Compare commits
118 Commits
python-dec
...
v0.186.12
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5898c2e18c | ||
|
|
a748c42c9a | ||
|
|
08b81a8b16 | ||
|
|
74f7a3ca1f | ||
|
|
1cfa4389bb | ||
|
|
55f4f66371 | ||
|
|
564a77df3e | ||
|
|
97495c3426 | ||
|
|
091294aa8e | ||
|
|
e5461065c3 | ||
|
|
7af03d3b24 | ||
|
|
5929637344 | ||
|
|
82cb908405 | ||
|
|
c78e470ab9 | ||
|
|
1599594c45 | ||
|
|
b313e0b4c0 | ||
|
|
d7e2d66c2b | ||
|
|
34356acb99 | ||
|
|
a2134c1e03 | ||
|
|
758cb8e491 | ||
|
|
3ed1b3b52d | ||
|
|
5b61a59e6c | ||
|
|
98f12f19b2 | ||
|
|
d3bbf52f85 | ||
|
|
46f6e8b791 | ||
|
|
eaf4f8f654 | ||
|
|
374842e0e0 | ||
|
|
6a51b14d2a | ||
|
|
dad7d94b4b | ||
|
|
917c0c110a | ||
|
|
595abe1fa7 | ||
|
|
267cfe9f19 | ||
|
|
c3906d6816 | ||
|
|
159f042f05 | ||
|
|
f3c922a2ee | ||
|
|
7a966869dd | ||
|
|
62d58c349b | ||
|
|
a6e8c06abd | ||
|
|
72b7c6bb65 | ||
|
|
09a6fcd2e2 | ||
|
|
28ae9f8804 | ||
|
|
ff829e4b1a | ||
|
|
4aef0fb095 | ||
|
|
9d00e26928 | ||
|
|
b492ff3fa0 | ||
|
|
2c63af94c8 | ||
|
|
38e4b3bf50 | ||
|
|
07c471e229 | ||
|
|
b6d5aabeec | ||
|
|
a71dbfcb58 | ||
|
|
c2d436d655 | ||
|
|
3a207da92d | ||
|
|
d4591ea64a | ||
|
|
71e4885874 | ||
|
|
ca9a1e58af | ||
|
|
4fc7849bc3 | ||
|
|
2ae7e167c2 | ||
|
|
b061f44fc0 | ||
|
|
9a2fb77732 | ||
|
|
5ba2869283 | ||
|
|
2c80aa103c | ||
|
|
2423b6d8c2 | ||
|
|
183912fdb2 | ||
|
|
a21283b858 | ||
|
|
07868df84f | ||
|
|
4a0025d7b5 | ||
|
|
02f09a5d53 | ||
|
|
8afba12eb4 | ||
|
|
2c5d8f9889 | ||
|
|
49689ef7c2 | ||
|
|
5e9d6f473b | ||
|
|
b94b5e6982 | ||
|
|
65ada5cd8f | ||
|
|
ac6f1c9003 | ||
|
|
af88bd4e88 | ||
|
|
3a73f7c4c8 | ||
|
|
f3d84f0bc9 | ||
|
|
a230730d3a | ||
|
|
5ae8072d22 | ||
|
|
f9b46ebf8a | ||
|
|
762fc428a5 | ||
|
|
0490d255c6 | ||
|
|
1d9c2ddbc7 | ||
|
|
8594cefc97 | ||
|
|
1afd18629b | ||
|
|
4aa38ce39a | ||
|
|
8a742fba43 | ||
|
|
055e35e2b9 | ||
|
|
2c131c3d34 | ||
|
|
206d7341a1 | ||
|
|
efbe678dcd | ||
|
|
0f9607710b | ||
|
|
91b1f60e5b | ||
|
|
29bb9aaf76 | ||
|
|
9cd2806ce1 | ||
|
|
96b328d0ff | ||
|
|
da333757f1 | ||
|
|
fb1ac6ef61 | ||
|
|
190d6129bb | ||
|
|
a206c31e97 | ||
|
|
e0b27290f3 | ||
|
|
78f9852eda | ||
|
|
6eee3440e7 | ||
|
|
080ef043c3 | ||
|
|
a7b882c1cb | ||
|
|
202a19e0ed | ||
|
|
ace7f573c3 | ||
|
|
d0da6f75d7 | ||
|
|
13743ef2ef | ||
|
|
03f9b1e74e | ||
|
|
453125eb03 | ||
|
|
e8dadb16c3 | ||
|
|
a1c5a58521 | ||
|
|
703af39558 | ||
|
|
dc9b1d316d | ||
|
|
affec54b8e | ||
|
|
4cde597d39 | ||
|
|
2e2ad6c80e |
@@ -13,6 +13,12 @@ rustflags = ["-C", "link-arg=-fuse-ld=mold"]
|
||||
linker = "clang"
|
||||
rustflags = ["-C", "link-arg=-fuse-ld=mold"]
|
||||
|
||||
[target.aarch64-apple-darwin]
|
||||
rustflags = ["-C", "link-args=-all_load"]
|
||||
|
||||
[target.x86_64-apple-darwin]
|
||||
rustflags = ["-C", "link-args=-all_load"]
|
||||
|
||||
[target.'cfg(target_os = "windows")']
|
||||
rustflags = [
|
||||
"--cfg",
|
||||
|
||||
@@ -30,7 +30,3 @@ ffdda588b41f7d9d270ffe76cab116f828ad545e
|
||||
# 2024-07-05 Improved formatting of default keymaps (single line per bind)
|
||||
# https://github.com/zed-industries/zed/pull/13887
|
||||
813cc3f5e537372fc86720b5e71b6e1c815440ab
|
||||
|
||||
# 2024-07-24 docs: Format docs
|
||||
# https://github.com/zed-industries/zed/pull/15352
|
||||
3a44a59f8ec114ac1ba22f7da1652717ef7e4e5c
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/01_bug_agent.yml
vendored
4
.github/ISSUE_TEMPLATE/01_bug_agent.yml
vendored
@@ -29,8 +29,8 @@ body:
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
@@ -29,8 +29,8 @@ body:
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/03_bug_git.yml
vendored
4
.github/ISSUE_TEMPLATE/03_bug_git.yml
vendored
@@ -28,8 +28,8 @@ body:
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
35
.github/ISSUE_TEMPLATE/04_bug_debugger.yml
vendored
35
.github/ISSUE_TEMPLATE/04_bug_debugger.yml
vendored
@@ -1,35 +0,0 @@
|
||||
name: Bug Report (Debugger)
|
||||
description: Zed Debugger-Related Bugs
|
||||
type: "Bug"
|
||||
labels: ["debugger"]
|
||||
title: "Debugger: <a short description of the Debugger bug>"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
<!-- Describe with sufficient detail to reproduce from a clean Zed install. -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
Actual Behavior:
|
||||
Expected Behavior:
|
||||
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
4
.github/ISSUE_TEMPLATE/10_bug_report.yml
vendored
4
.github/ISSUE_TEMPLATE/10_bug_report.yml
vendored
@@ -49,8 +49,8 @@ body:
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: |
|
||||
Open Zed, from the command palette select "zed: copy system specs into clipboard"
|
||||
Open Zed, from the command palette select "zed: Copy System Specs Into Clipboard"
|
||||
placeholder: |
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/11_crash_report.yml
vendored
4
.github/ISSUE_TEMPLATE/11_crash_report.yml
vendored
@@ -26,9 +26,9 @@ body:
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -2,7 +2,6 @@
|
||||
**/cargo-target
|
||||
**/target
|
||||
**/venv
|
||||
**/.direnv
|
||||
*.wasm
|
||||
*.xcodeproj
|
||||
.DS_Store
|
||||
|
||||
20
.mailmap
20
.mailmap
@@ -19,8 +19,6 @@ amtoaer <amtoaer@gmail.com>
|
||||
amtoaer <amtoaer@gmail.com> <amtoaer@outlook.com>
|
||||
Andrei Zvonimir Crnković <andrei@0x7f.dev>
|
||||
Andrei Zvonimir Crnković <andrei@0x7f.dev> <andreicek@0x7f.dev>
|
||||
Angelk90 <angelo.k90@hotmail.it>
|
||||
Angelk90 <angelo.k90@hotmail.it> <20476002+Angelk90@users.noreply.github.com>
|
||||
Antonio Scandurra <me@as-cii.com>
|
||||
Antonio Scandurra <me@as-cii.com> <antonio@zed.dev>
|
||||
Ben Kunkle <ben@zed.dev>
|
||||
@@ -40,8 +38,6 @@ Dairon Medina <dairon.medina@gmail.com>
|
||||
Danilo Leal <danilo@zed.dev>
|
||||
Danilo Leal <danilo@zed.dev> <67129314+danilo-leal@users.noreply.github.com>
|
||||
Edwin Aronsson <75266237+4teapo@users.noreply.github.com>
|
||||
Elvis Pranskevichus <elvis@geldata.com>
|
||||
Elvis Pranskevichus <elvis@geldata.com> <elvis@magic.io>
|
||||
Evren Sen <nervenes@icloud.com>
|
||||
Evren Sen <nervenes@icloud.com> <146845123+evrensen467@users.noreply.github.com>
|
||||
Evren Sen <nervenes@icloud.com> <146845123+evrsen@users.noreply.github.com>
|
||||
@@ -73,8 +69,6 @@ Lilith Iris <itslirissama@gmail.com> <83819417+Irilith@users.noreply.github.com>
|
||||
LoganDark <contact@logandark.mozmail.com>
|
||||
LoganDark <contact@logandark.mozmail.com> <git@logandark.mozmail.com>
|
||||
LoganDark <contact@logandark.mozmail.com> <github@logandark.mozmail.com>
|
||||
Marko Kungla <marko.kungla@gmail.com>
|
||||
Marko Kungla <marko.kungla@gmail.com> <marko@mkungla.dev>
|
||||
Marshall Bowers <git@maxdeviant.com>
|
||||
Marshall Bowers <git@maxdeviant.com> <elliott.codes@gmail.com>
|
||||
Marshall Bowers <git@maxdeviant.com> <marshall@zed.dev>
|
||||
@@ -90,7 +84,6 @@ Michael Sloan <michael@zed.dev> <mgsloan@google.com>
|
||||
Mikayla Maki <mikayla@zed.dev>
|
||||
Mikayla Maki <mikayla@zed.dev> <mikayla.c.maki@gmail.com>
|
||||
Mikayla Maki <mikayla@zed.dev> <mikayla.c.maki@icloud.com>
|
||||
Morgan Krey <morgan@zed.dev>
|
||||
Muhammad Talal Anwar <mail@talal.io>
|
||||
Muhammad Talal Anwar <mail@talal.io> <talalanwar@outlook.com>
|
||||
Nate Butler <iamnbutler@gmail.com>
|
||||
@@ -123,18 +116,11 @@ Shish <webmaster@shishnet.org>
|
||||
Shish <webmaster@shishnet.org> <shish@shishnet.org>
|
||||
Smit Barmase <0xtimsb@gmail.com>
|
||||
Smit Barmase <0xtimsb@gmail.com> <smit@zed.dev>
|
||||
Thomas <github.thomaub@gmail.com>
|
||||
Thomas <github.thomaub@gmail.com> <thomas.aubry94@gmail.com>
|
||||
Thomas <github.thomaub@gmail.com> <thomas.aubry@paylead.fr>
|
||||
Thomas Heartman <thomasheartman+github@gmail.com>
|
||||
Thomas Heartman <thomasheartman+github@gmail.com> <thomas@getunleash.io>
|
||||
Thomas Mickley-Doyle <tmickleydoyle@gmail.com>
|
||||
Thomas Mickley-Doyle <tmickleydoyle@gmail.com> <thomas@zed.dev>
|
||||
Thorben Kröger <dev@thorben.net>
|
||||
Thorben Kröger <dev@thorben.net> <thorben.kroeger@hexagon.com>
|
||||
Thorsten Ball <mrnugget@gmail.com>
|
||||
Thorsten Ball <mrnugget@gmail.com> <me@thorstenball.com>
|
||||
Thorsten Ball <mrnugget@gmail.com> <thorsten@zed.dev>
|
||||
Thorsten Ball <thorsten@zed.dev>
|
||||
Thorsten Ball <thorsten@zed.dev> <me@thorstenball.com>
|
||||
Thorsten Ball <thorsten@zed.dev> <mrnugget@gmail.com>
|
||||
Tristan Hume <tris.hume@gmail.com>
|
||||
Tristan Hume <tris.hume@gmail.com> <tristan@anthropic.com>
|
||||
Uladzislau Kaminski <i@uladkaminski.com>
|
||||
|
||||
2
.rules
2
.rules
@@ -115,7 +115,7 @@ Other entities can then register a callback to handle these events by doing `cx.
|
||||
GPUI has had some changes to its APIs. Always write code using the new APIs:
|
||||
|
||||
* `spawn` methods now take async closures (`AsyncFn`), and so should be called like `cx.spawn(async move |cx| ...)`.
|
||||
* Use `Entity<T>`. This replaces `Model<T>` and `View<T>` which no longer exist and should NEVER be used.
|
||||
* Use `Entity<T>`. This replaces `Model<T>` and `View<T>` which longer exists and should NEVER be used.
|
||||
* Use `App` references. This replaces `AppContext` which no longer exists and should NEVER be used.
|
||||
* Use `Context<T>` references. This replaces `ModelContext<T>` which no longer exists and should NEVER be used.
|
||||
* `Window` is now passed around explicitly. The new interface adds a `Window` reference parameter to some methods, and adds some new "*_in" methods for plumbing `Window`. The old types `WindowContext` and `ViewContext<T>` should NEVER be used.
|
||||
|
||||
@@ -2,14 +2,16 @@
|
||||
{
|
||||
"label": "Debug Zed (CodeLLDB)",
|
||||
"adapter": "CodeLLDB",
|
||||
"program": "target/debug/zed",
|
||||
"request": "launch"
|
||||
"program": "$ZED_WORKTREE_ROOT/target/debug/zed",
|
||||
"request": "launch",
|
||||
"cwd": "$ZED_WORKTREE_ROOT"
|
||||
},
|
||||
{
|
||||
"label": "Debug Zed (GDB)",
|
||||
"adapter": "GDB",
|
||||
"program": "target/debug/zed",
|
||||
"program": "$ZED_WORKTREE_ROOT/target/debug/zed",
|
||||
"request": "launch",
|
||||
"cwd": "$ZED_WORKTREE_ROOT",
|
||||
"initialize_args": {
|
||||
"stopAtBeginningOfMainSubprogram": true
|
||||
}
|
||||
|
||||
1669
Cargo.lock
generated
1669
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
27
Cargo.toml
27
Cargo.toml
@@ -31,13 +31,13 @@ members = [
|
||||
"crates/command_palette",
|
||||
"crates/command_palette_hooks",
|
||||
"crates/component",
|
||||
"crates/component_preview",
|
||||
"crates/context_server",
|
||||
"crates/copilot",
|
||||
"crates/credentials_provider",
|
||||
"crates/dap",
|
||||
"crates/dap_adapters",
|
||||
"crates/db",
|
||||
"crates/debug_adapter_extension",
|
||||
"crates/debugger_tools",
|
||||
"crates/debugger_ui",
|
||||
"crates/deepseek",
|
||||
@@ -74,8 +74,6 @@ members = [
|
||||
"crates/inline_completion",
|
||||
"crates/inline_completion_button",
|
||||
"crates/install_cli",
|
||||
"crates/jj",
|
||||
"crates/jj_ui",
|
||||
"crates/journal",
|
||||
"crates/language",
|
||||
"crates/language_extension",
|
||||
@@ -240,13 +238,13 @@ collections = { path = "crates/collections" }
|
||||
command_palette = { path = "crates/command_palette" }
|
||||
command_palette_hooks = { path = "crates/command_palette_hooks" }
|
||||
component = { path = "crates/component" }
|
||||
component_preview = { path = "crates/component_preview" }
|
||||
context_server = { path = "crates/context_server" }
|
||||
copilot = { path = "crates/copilot" }
|
||||
credentials_provider = { path = "crates/credentials_provider" }
|
||||
dap = { path = "crates/dap" }
|
||||
dap_adapters = { path = "crates/dap_adapters" }
|
||||
db = { path = "crates/db" }
|
||||
debug_adapter_extension = { path = "crates/debug_adapter_extension" }
|
||||
debugger_tools = { path = "crates/debugger_tools" }
|
||||
debugger_ui = { path = "crates/debugger_ui" }
|
||||
deepseek = { path = "crates/deepseek" }
|
||||
@@ -281,8 +279,6 @@ indexed_docs = { path = "crates/indexed_docs" }
|
||||
inline_completion = { path = "crates/inline_completion" }
|
||||
inline_completion_button = { path = "crates/inline_completion_button" }
|
||||
install_cli = { path = "crates/install_cli" }
|
||||
jj = { path = "crates/jj" }
|
||||
jj_ui = { path = "crates/jj_ui" }
|
||||
journal = { path = "crates/journal" }
|
||||
language = { path = "crates/language" }
|
||||
language_extension = { path = "crates/language_extension" }
|
||||
@@ -414,9 +410,9 @@ aws-smithy-runtime-api = { version = "1.7.4", features = ["http-1x", "client"] }
|
||||
aws-smithy-types = { version = "1.3.0", features = ["http-body-1-x"] }
|
||||
base64 = "0.22"
|
||||
bitflags = "2.6.0"
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "416375211bb0b5826b3584dccdb6a43369e499ad" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "416375211bb0b5826b3584dccdb6a43369e499ad" }
|
||||
blade-util = { git = "https://github.com/kvark/blade", rev = "416375211bb0b5826b3584dccdb6a43369e499ad" }
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "b16f5c7bd873c7126f48c82c39e7ae64602ae74f" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "b16f5c7bd873c7126f48c82c39e7ae64602ae74f" }
|
||||
blade-util = { git = "https://github.com/kvark/blade", rev = "b16f5c7bd873c7126f48c82c39e7ae64602ae74f" }
|
||||
blake3 = "1.5.3"
|
||||
bytes = "1.0"
|
||||
cargo_metadata = "0.19"
|
||||
@@ -462,7 +458,6 @@ indexmap = { version = "2.7.0", features = ["serde"] }
|
||||
indoc = "2"
|
||||
inventory = "0.3.19"
|
||||
itertools = "0.14.0"
|
||||
jj-lib = { git = "https://github.com/jj-vcs/jj", rev = "e18eb8e05efaa153fad5ef46576af145bba1807f" }
|
||||
jsonschema = "0.30.0"
|
||||
jsonwebtoken = "9.3"
|
||||
jupyter-protocol = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
|
||||
@@ -470,12 +465,13 @@ jupyter-websocket-client = { git = "https://github.com/ConradIrwin/runtimed" ,r
|
||||
libc = "0.2"
|
||||
libsqlite3-sys = { version = "0.30.1", features = ["bundled"] }
|
||||
linkify = "0.10.0"
|
||||
linkme = "0.3.31"
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
|
||||
lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "c9c189f1c5dd53c624a419ce35bc77ad6a908d18" }
|
||||
markup5ever_rcdom = "0.3.0"
|
||||
metal = "0.29"
|
||||
mlua = { version = "0.10", features = ["lua54", "vendored", "async", "send"] }
|
||||
naga = { version = "25.0", features = ["wgsl-in"] }
|
||||
naga = { version = "23.1.0", features = ["wgsl-in"] }
|
||||
nanoid = "0.4"
|
||||
nbformat = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
|
||||
nix = "0.29"
|
||||
@@ -553,7 +549,7 @@ syn = { version = "1.0.72", features = ["full", "extra-traits"] }
|
||||
sys-locale = "0.3.1"
|
||||
sysinfo = "0.31.0"
|
||||
take-until = "0.2.0"
|
||||
tempfile = "3.20.0"
|
||||
tempfile = "3.9.0"
|
||||
thiserror = "2.0.12"
|
||||
tiktoken-rs = "0.6.0"
|
||||
time = { version = "0.3", features = [
|
||||
@@ -599,7 +595,6 @@ url = "2.2"
|
||||
urlencoding = "2.1.2"
|
||||
uuid = { version = "1.1.2", features = ["v4", "v5", "v7", "serde"] }
|
||||
walkdir = "2.3"
|
||||
wasi-preview1-component-adapter-provider = "29"
|
||||
wasm-encoder = "0.221"
|
||||
wasmparser = "0.221"
|
||||
wasmtime = { version = "29", default-features = false, features = [
|
||||
@@ -613,7 +608,7 @@ wasmtime-wasi = "29"
|
||||
which = "6.0.0"
|
||||
wit-component = "0.221"
|
||||
workspace-hack = "0.1.0"
|
||||
zed_llm_client = "0.8.1"
|
||||
zed_llm_client = "0.8.0"
|
||||
zstd = "0.11"
|
||||
|
||||
[workspace.dependencies.async-stripe]
|
||||
@@ -793,9 +788,6 @@ let_underscore_future = "allow"
|
||||
# running afoul of the borrow checker.
|
||||
too_many_arguments = "allow"
|
||||
|
||||
# We often have large enum variants yet we rarely actually bother with splitting them up.
|
||||
large_enum_variant = "allow"
|
||||
|
||||
[workspace.metadata.cargo-machete]
|
||||
ignored = [
|
||||
"bindgen",
|
||||
@@ -803,6 +795,7 @@ ignored = [
|
||||
"prost_build",
|
||||
"serde",
|
||||
"component",
|
||||
"linkme",
|
||||
"documented",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
|
||||
FROM rust:1.87-bookworm as builder
|
||||
FROM rust:1.86-bookworm as builder
|
||||
WORKDIR app
|
||||
COPY . .
|
||||
|
||||
|
||||
@@ -8,6 +8,10 @@ Welcome to Zed, a high-performance, multiplayer code editor from the creators of
|
||||
|
||||
### Installation
|
||||
|
||||
<a href="https://repology.org/project/zed-editor/versions">
|
||||
<img src="https://repology.org/badge/vertical-allrepos/zed-editor.svg?minversion=0.143.5" alt="Packaging status" align="right">
|
||||
</a>
|
||||
|
||||
On macOS and Linux you can [download Zed directly](https://zed.dev/download) or [install Zed via your local package manager](https://zed.dev/docs/linux#installing-via-a-package-manager).
|
||||
|
||||
Other platforms are not yet available:
|
||||
|
||||
@@ -217,6 +217,7 @@
|
||||
"context": "ContextEditor > Editor",
|
||||
"bindings": {
|
||||
"ctrl-enter": "assistant::Assist",
|
||||
"ctrl-shift-enter": "assistant::Edit",
|
||||
"ctrl-s": "workspace::Save",
|
||||
"save": "workspace::Save",
|
||||
"ctrl->": "assistant::QuoteSelection",
|
||||
@@ -766,7 +767,7 @@
|
||||
"alt-ctrl-r": "project_panel::RevealInFileManager",
|
||||
"ctrl-shift-enter": "project_panel::OpenWithSystem",
|
||||
"shift-find": "project_panel::NewSearchInDirectory",
|
||||
"ctrl-alt-shift-f": "project_panel::NewSearchInDirectory",
|
||||
"ctrl-shift-f": "project_panel::NewSearchInDirectory",
|
||||
"shift-down": "menu::SelectNext",
|
||||
"shift-up": "menu::SelectPrevious",
|
||||
"escape": "menu::Cancel"
|
||||
@@ -934,7 +935,6 @@
|
||||
"ctrl-e": ["terminal::SendKeystroke", "ctrl-e"],
|
||||
"ctrl-o": ["terminal::SendKeystroke", "ctrl-o"],
|
||||
"ctrl-w": ["terminal::SendKeystroke", "ctrl-w"],
|
||||
"ctrl-backspace": ["terminal::SendKeystroke", "ctrl-w"],
|
||||
"ctrl-shift-a": "editor::SelectAll",
|
||||
"find": "buffer_search::Deploy",
|
||||
"ctrl-shift-f": "buffer_search::Deploy",
|
||||
@@ -952,10 +952,7 @@
|
||||
"shift-down": "terminal::ScrollLineDown",
|
||||
"shift-home": "terminal::ScrollToTop",
|
||||
"shift-end": "terminal::ScrollToBottom",
|
||||
"ctrl-shift-space": "terminal::ToggleViMode",
|
||||
"ctrl-shift-r": "terminal::RerunTask",
|
||||
"ctrl-alt-r": "terminal::RerunTask",
|
||||
"alt-t": "terminal::RerunTask"
|
||||
"ctrl-shift-space": "terminal::ToggleViMode"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -978,12 +975,5 @@
|
||||
"bindings": {
|
||||
"ctrl-r": "diagnostics::ToggleDiagnosticsRefresh"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "DebugConsole > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"enter": "menu::Confirm"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -263,6 +263,7 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-enter": "assistant::Assist",
|
||||
"cmd-shift-enter": "assistant::Edit",
|
||||
"cmd-s": "workspace::Save",
|
||||
"cmd->": "assistant::QuoteSelection",
|
||||
"cmd-<": "assistant::InsertIntoEditor",
|
||||
@@ -825,7 +826,7 @@
|
||||
"alt-cmd-r": "project_panel::RevealInFileManager",
|
||||
"ctrl-shift-enter": "project_panel::OpenWithSystem",
|
||||
"cmd-alt-backspace": ["project_panel::Delete", { "skip_prompt": false }],
|
||||
"cmd-alt-shift-f": "project_panel::NewSearchInDirectory",
|
||||
"cmd-shift-f": "project_panel::NewSearchInDirectory",
|
||||
"shift-down": "menu::SelectNext",
|
||||
"shift-up": "menu::SelectPrevious",
|
||||
"escape": "menu::Cancel"
|
||||
@@ -1021,7 +1022,6 @@
|
||||
"escape": ["terminal::SendKeystroke", "escape"],
|
||||
"enter": ["terminal::SendKeystroke", "enter"],
|
||||
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"],
|
||||
"ctrl-backspace": ["terminal::SendKeystroke", "ctrl-w"],
|
||||
"shift-pageup": "terminal::ScrollPageUp",
|
||||
"cmd-up": "terminal::ScrollPageUp",
|
||||
"shift-pagedown": "terminal::ScrollPageDown",
|
||||
@@ -1038,8 +1038,7 @@
|
||||
"ctrl-alt-up": "pane::SplitUp",
|
||||
"ctrl-alt-down": "pane::SplitDown",
|
||||
"ctrl-alt-left": "pane::SplitLeft",
|
||||
"ctrl-alt-right": "pane::SplitRight",
|
||||
"cmd-alt-r": "terminal::RerunTask"
|
||||
"ctrl-alt-right": "pane::SplitRight"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -1084,12 +1083,5 @@
|
||||
"bindings": {
|
||||
"ctrl-r": "diagnostics::ToggleDiagnosticsRefresh"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "DebugConsole > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"enter": "menu::Confirm"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -152,7 +152,6 @@
|
||||
"g end": ["vim::EndOfLine", { "display_lines": true }],
|
||||
"g 0": ["vim::StartOfLine", { "display_lines": true }],
|
||||
"g home": ["vim::StartOfLine", { "display_lines": true }],
|
||||
"g shift-m": ["vim::MiddleOfLine", { "display_lines": true }],
|
||||
"g ^": ["vim::FirstNonWhitespace", { "display_lines": true }],
|
||||
"g v": "vim::RestoreVisualSelection",
|
||||
"g ]": "editor::GoToDiagnostic",
|
||||
|
||||
@@ -49,9 +49,10 @@ And here's the section to rewrite based on that prompt again for reference:
|
||||
</rewrite_this>
|
||||
|
||||
{{#if diagnostic_errors}}
|
||||
{{#each diagnostic_errors}}
|
||||
|
||||
Below are the diagnostic errors visible to the user. If the user requests problems to be fixed, use this information, but do not try to fix these errors if the user hasn't asked you to.
|
||||
|
||||
{{#each diagnostic_errors}}
|
||||
<diagnostic_error>
|
||||
<line_number>{{line_number}}</line_number>
|
||||
<error_message>{{error_message}}</error_message>
|
||||
|
||||
206
assets/prompts/suggest_edits.hbs
Normal file
206
assets/prompts/suggest_edits.hbs
Normal file
@@ -0,0 +1,206 @@
|
||||
<task_description>
|
||||
|
||||
The user of a code editor wants to make a change to their codebase.
|
||||
You must describe the change using the following XML structure:
|
||||
|
||||
- <patch> - A group of related code changes.
|
||||
Child tags:
|
||||
- <title> (required) - A high-level description of the changes. This should be as short
|
||||
as possible, possibly using common abbreviations.
|
||||
- <edit> (1 or more) - An edit to make at a particular range within a file.
|
||||
Includes the following child tags:
|
||||
- <path> (required) - The path to the file that will be changed.
|
||||
- <description> (optional) - An arbitrarily-long comment that describes the purpose
|
||||
of this edit.
|
||||
- <old_text> (optional) - An excerpt from the file's current contents that uniquely
|
||||
identifies a range within the file where the edit should occur. Required for all operations
|
||||
except `create`.
|
||||
- <new_text> (required) - The new text to insert into the file.
|
||||
- <operation> (required) - The type of change that should occur at the given range
|
||||
of the file. Must be one of the following values:
|
||||
- `update`: Replaces the entire range with the new text.
|
||||
- `insert_before`: Inserts the new text before the range.
|
||||
- `insert_after`: Inserts new text after the range.
|
||||
- `create`: Creates or overwrites a file with the given path and the new text.
|
||||
- `delete`: Deletes the specified range from the file.
|
||||
|
||||
<guidelines>
|
||||
- Never provide multiple edits whose ranges intersect each other. Instead, merge them into one edit.
|
||||
- Prefer multiple edits to smaller, disjoint ranges, rather than one edit to a larger range.
|
||||
- There's no need to escape angle brackets within XML tags.
|
||||
- Always ensure imports are added if you're referencing symbols that are not in scope.
|
||||
</guidelines>
|
||||
|
||||
Here are some concrete examples.
|
||||
|
||||
<example>
|
||||
<message role="user">
|
||||
|
||||
```rs src/shapes.rs
|
||||
pub mod rectangle;
|
||||
pub mod circle;
|
||||
```
|
||||
|
||||
```rs src/shapes/rectangle.rs
|
||||
pub struct Rectangle {
|
||||
width: f64,
|
||||
height: f64,
|
||||
}
|
||||
|
||||
impl Rectangle {
|
||||
pub fn new(width: f64, height: f64) -> Self {
|
||||
Rectangle { width, height }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
```rs src/shapes/circle.rs
|
||||
pub struct Circle {
|
||||
radius: f64,
|
||||
}
|
||||
|
||||
impl Circle {
|
||||
pub fn new(radius: f64) -> Self {
|
||||
Circle { radius }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Update all shapes to store their origin as an (x, y) tuple and implement Display.
|
||||
</message>
|
||||
|
||||
<message role="assistant">
|
||||
We'll need to update both the rectangle and circle modules.
|
||||
|
||||
<patch>
|
||||
<title>Add origins and display impls to shapes</title>
|
||||
<edit>
|
||||
<path>src/shapes/rectangle.rs</path>
|
||||
<description>Add the origin field to Rectangle struct</description>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>
|
||||
pub struct Rectangle {
|
||||
</old_text>
|
||||
<new_text>
|
||||
origin: (f64, f64),
|
||||
</new_text>
|
||||
</edit>
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/rectangle.rs</path>
|
||||
<operation>update</operation>
|
||||
<old_text>
|
||||
fn new(width: f64, height: f64) -> Self {
|
||||
Rectangle { width, height }
|
||||
}
|
||||
</old_text>
|
||||
<new_text>
|
||||
fn new(origin: (f64, f64), width: f64, height: f64) -> Self {
|
||||
Rectangle { origin, width, height }
|
||||
}
|
||||
</new_text>
|
||||
</edit>
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/circle.rs</path>
|
||||
<description>Add the origin field to Circle struct</description>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>
|
||||
pub struct Circle {
|
||||
radius: f64,
|
||||
</old_text>
|
||||
<new_text>
|
||||
origin: (f64, f64),
|
||||
</new_text>
|
||||
</edit>
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/circle.rs</path>
|
||||
<operation>update</operation>
|
||||
<old_text>
|
||||
fn new(radius: f64) -> Self {
|
||||
Circle { radius }
|
||||
}
|
||||
</old_text>
|
||||
<new_text>
|
||||
fn new(origin: (f64, f64), radius: f64) -> Self {
|
||||
Circle { origin, radius }
|
||||
}
|
||||
</new_text>
|
||||
</edit>
|
||||
</step>
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/rectangle.rs</path>
|
||||
<operation>insert_before</operation>
|
||||
<old_text>
|
||||
struct Rectangle {
|
||||
</old_text>
|
||||
<new_text>
|
||||
use std::fmt;
|
||||
|
||||
</new_text>
|
||||
</edit>
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/rectangle.rs</path>
|
||||
<description>
|
||||
Add a manual Display implementation for Rectangle.
|
||||
Currently, this is the same as a derived Display implementation.
|
||||
</description>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>
|
||||
Rectangle { width, height }
|
||||
}
|
||||
}
|
||||
</old_text>
|
||||
<new_text>
|
||||
impl fmt::Display for Rectangle {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.format_struct(f, "Rectangle")
|
||||
.field("origin", &self.origin)
|
||||
.field("width", &self.width)
|
||||
.field("height", &self.height)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
</new_text>
|
||||
</edit>
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/circle.rs</path>
|
||||
<operation>insert_before</operation>
|
||||
<old_text>
|
||||
struct Circle {
|
||||
</old_text>
|
||||
<new_text>
|
||||
use std::fmt;
|
||||
</new_text>
|
||||
</edit>
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/circle.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>
|
||||
Circle { radius }
|
||||
}
|
||||
}
|
||||
</old_text>
|
||||
<new_text>
|
||||
impl fmt::Display for Rectangle {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.format_struct(f, "Rectangle")
|
||||
.field("origin", &self.origin)
|
||||
.field("width", &self.width)
|
||||
.field("height", &self.height)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
</new_text>
|
||||
</edit>
|
||||
</patch>
|
||||
|
||||
</message>
|
||||
</example>
|
||||
|
||||
</task_description>
|
||||
@@ -113,8 +113,8 @@
|
||||
// Whether to show the informational hover box when moving the mouse
|
||||
// over symbols in the editor.
|
||||
"hover_popover_enabled": true,
|
||||
// Time to wait in milliseconds before showing the informational hover box.
|
||||
"hover_popover_delay": 300,
|
||||
// Time to wait before showing the informational hover box
|
||||
"hover_popover_delay": 350,
|
||||
// Whether to confirm before quitting Zed.
|
||||
"confirm_quit": false,
|
||||
// Whether to restore last closed project when fresh Zed instance is opened.
|
||||
@@ -218,23 +218,6 @@
|
||||
// 1. Do nothing: `none`
|
||||
// 2. Find references for the same symbol: `find_all_references` (default)
|
||||
"go_to_definition_fallback": "find_all_references",
|
||||
// Which level to use to filter out diagnostics displayed in the editor.
|
||||
//
|
||||
// Affects the editor rendering only, and does not interrupt
|
||||
// the functionality of diagnostics fetching and project diagnostics editor.
|
||||
// Which files containing diagnostic errors/warnings to mark in the tabs.
|
||||
// Diagnostics are only shown when file icons are also active.
|
||||
// This setting only works when can take the following three values:
|
||||
//
|
||||
// Which diagnostic indicators to show in the scrollbar, their level should be more or equal to the specified severity level.
|
||||
// Possible values:
|
||||
// - "off" — no diagnostics are allowed
|
||||
// - "error"
|
||||
// - "warning" (default)
|
||||
// - "info"
|
||||
// - "hint"
|
||||
// - null — allow all diagnostics
|
||||
"diagnostics_max_severity": "warning",
|
||||
// Whether to show wrap guides (vertical rulers) in the editor.
|
||||
// Setting this to true will show a guide at the 'preferred_line_length' value
|
||||
// if 'soft_wrap' is set to 'preferred_line_length', and will show any
|
||||
@@ -328,16 +311,10 @@
|
||||
"title_bar": {
|
||||
// Whether to show the branch icon beside branch switcher in the titlebar.
|
||||
"show_branch_icon": false,
|
||||
// Whether to show the branch name button in the titlebar.
|
||||
"show_branch_name": true,
|
||||
// Whether to show the project host and name in the titlebar.
|
||||
"show_project_items": true,
|
||||
// Whether to show onboarding banners in the titlebar.
|
||||
"show_onboarding_banner": true,
|
||||
// Whether to show user picture in the titlebar.
|
||||
"show_user_picture": true,
|
||||
// Whether to show the sign in button in the titlebar.
|
||||
"show_sign_in": true
|
||||
"show_user_picture": true
|
||||
},
|
||||
// Scrollbar related settings
|
||||
"scrollbar": {
|
||||
@@ -379,45 +356,6 @@
|
||||
"vertical": true
|
||||
}
|
||||
},
|
||||
// Minimap related settings
|
||||
"minimap": {
|
||||
// When to show the minimap in the editor.
|
||||
// This setting can take three values:
|
||||
// 1. Show the minimap if the editor's scrollbar is visible:
|
||||
// "auto"
|
||||
// 2. Always show the minimap:
|
||||
// "always"
|
||||
// 3. Never show the minimap:
|
||||
// "never" (default)
|
||||
"show": "never",
|
||||
// When to show the minimap thumb.
|
||||
// This setting can take two values:
|
||||
// 1. Show the minimap thumb if the mouse is over the minimap:
|
||||
// "hover"
|
||||
// 2. Always show the minimap thumb:
|
||||
// "always" (default)
|
||||
"thumb": "always",
|
||||
// How the minimap thumb border should look.
|
||||
// This setting can take five values:
|
||||
// 1. Display a border on all sides of the thumb:
|
||||
// "thumb_border": "full"
|
||||
// 2. Display a border on all sides except the left side of the thumb:
|
||||
// "thumb_border": "left_open" (default)
|
||||
// 3. Display a border on all sides except the right side of the thumb:
|
||||
// "thumb_border": "right_open"
|
||||
// 4. Display a border only on the left side of the thumb:
|
||||
// "thumb_border": "left_only"
|
||||
// 5. Display the thumb without any border:
|
||||
// "thumb_border": "none"
|
||||
"thumb_border": "left_open",
|
||||
// How to highlight the current line in the minimap.
|
||||
// This setting can take the following values:
|
||||
//
|
||||
// 1. `null` to inherit the editor `current_line_highlight` setting (default)
|
||||
// 2. "line" or "all" to highlight the current line in the minimap.
|
||||
// 3. "gutter" or "none" to not highlight the current line in the minimap.
|
||||
"current_line_highlight": null
|
||||
},
|
||||
// Enable middle-click paste on Linux.
|
||||
"middle_click_paste": true,
|
||||
// What to do when multibuffer is double clicked in some of its excerpts
|
||||
@@ -432,6 +370,8 @@
|
||||
"gutter": {
|
||||
// Whether to show line numbers in the gutter.
|
||||
"line_numbers": true,
|
||||
// Whether to show code action buttons in the gutter.
|
||||
"code_actions": true,
|
||||
// Whether to show runnables buttons in the gutter.
|
||||
"runnables": true,
|
||||
// Whether to show breakpoints in the gutter.
|
||||
@@ -476,8 +416,6 @@
|
||||
"search_wrap": true,
|
||||
// Search options to enable by default when opening new project and buffer searches.
|
||||
"search": {
|
||||
// Whether to show the project search button in the status bar.
|
||||
"button": true,
|
||||
"whole_word": false,
|
||||
"case_sensitive": false,
|
||||
"include_ignored": false,
|
||||
@@ -758,8 +696,6 @@
|
||||
"stream_edits": false,
|
||||
// When enabled, agent edits will be displayed in single-file editors for review
|
||||
"single_file_review": true,
|
||||
// When enabled, show voting thumbs for feedback on agent edits.
|
||||
"enable_feedback": true,
|
||||
"default_profile": "write",
|
||||
"profiles": {
|
||||
"write": {
|
||||
@@ -1012,8 +948,6 @@
|
||||
"auto_update": true,
|
||||
// Diagnostics configuration.
|
||||
"diagnostics": {
|
||||
// Whether to show the project diagnostics button in the status bar.
|
||||
"button": true,
|
||||
// Whether to show warnings or not by default.
|
||||
"include_warnings": true,
|
||||
// Settings for inline diagnostics
|
||||
@@ -1031,7 +965,7 @@
|
||||
// longer than this value will still push diagnostics further to the right.
|
||||
"min_column": 0,
|
||||
// The minimum severity of the diagnostics to show inline.
|
||||
// Inherits editor's diagnostics' max severity settings when `null`.
|
||||
// Shows all diagnostics when not specified.
|
||||
"max_severity": null
|
||||
},
|
||||
"cargo": {
|
||||
@@ -1715,8 +1649,6 @@
|
||||
// }
|
||||
// ]
|
||||
"ssh_connections": [],
|
||||
// Whether to read ~/.ssh/config for ssh connection sources.
|
||||
"read_ssh_config": true,
|
||||
// Configures context servers for use by the agent.
|
||||
"context_servers": {},
|
||||
"debugger": {
|
||||
|
||||
@@ -47,12 +47,12 @@ heed.workspace = true
|
||||
html_to_markdown.workspace = true
|
||||
http_client.workspace = true
|
||||
indexed_docs.workspace = true
|
||||
inventory.workspace = true
|
||||
itertools.workspace = true
|
||||
jsonschema.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
language_model_selector.workspace = true
|
||||
linkme.workspace = true
|
||||
log.workspace = true
|
||||
lsp.workspace = true
|
||||
markdown.workspace = true
|
||||
|
||||
@@ -185,14 +185,12 @@ pub(crate) fn default_markdown_style(window: &Window, cx: &App) -> MarkdownStyle
|
||||
let ui_font_size = TextSize::Default.rems(cx);
|
||||
let buffer_font_size = TextSize::Small.rems(cx);
|
||||
let mut text_style = window.text_style();
|
||||
let line_height = buffer_font_size * 1.75;
|
||||
|
||||
text_style.refine(&TextStyleRefinement {
|
||||
font_family: Some(theme_settings.ui_font.family.clone()),
|
||||
font_fallbacks: theme_settings.ui_font.fallbacks.clone(),
|
||||
font_features: Some(theme_settings.ui_font.features.clone()),
|
||||
font_size: Some(ui_font_size.into()),
|
||||
line_height: Some(line_height.into()),
|
||||
color: Some(cx.theme().colors().text),
|
||||
..Default::default()
|
||||
});
|
||||
@@ -1014,7 +1012,6 @@ impl ActiveThread {
|
||||
self.push_message(message_id, &message_segments, window, cx);
|
||||
}
|
||||
|
||||
self.scroll_to_bottom(cx);
|
||||
self.save_thread(cx);
|
||||
cx.notify();
|
||||
}
|
||||
@@ -1028,7 +1025,6 @@ impl ActiveThread {
|
||||
self.edited_message(message_id, &message_segments, window, cx);
|
||||
}
|
||||
|
||||
self.scroll_to_bottom(cx);
|
||||
self.save_thread(cx);
|
||||
cx.notify();
|
||||
}
|
||||
@@ -1542,15 +1538,11 @@ impl ActiveThread {
|
||||
let project = self.thread.read(cx).project().clone();
|
||||
let prompt_store = self.thread_store.read(cx).prompt_store().clone();
|
||||
|
||||
let git_store = project.read(cx).git_store().clone();
|
||||
let checkpoint = git_store.update(cx, |git_store, cx| git_store.checkpoint(cx));
|
||||
|
||||
let load_context_task =
|
||||
crate::context::load_context(new_context, &project, &prompt_store, cx);
|
||||
self._load_edited_message_context_task =
|
||||
Some(cx.spawn_in(window, async move |this, cx| {
|
||||
let (context, checkpoint) =
|
||||
futures::future::join(load_context_task, checkpoint).await;
|
||||
let context = load_context_task.await;
|
||||
let _ = this
|
||||
.update_in(cx, |this, window, cx| {
|
||||
this.thread.update(cx, |thread, cx| {
|
||||
@@ -1559,7 +1551,6 @@ impl ActiveThread {
|
||||
Role::User,
|
||||
vec![MessageSegment::Text(edited_text)],
|
||||
Some(context.loaded_context),
|
||||
checkpoint.ok(),
|
||||
cx,
|
||||
);
|
||||
for message_id in this.messages_after(message_id) {
|
||||
@@ -1729,11 +1720,10 @@ impl ActiveThread {
|
||||
.on_action(cx.listener(Self::confirm_editing_message))
|
||||
.capture_action(cx.listener(Self::paste))
|
||||
.min_h_6()
|
||||
.w_full()
|
||||
.flex_grow()
|
||||
.w_full()
|
||||
.gap_2()
|
||||
.child(state.context_strip.clone())
|
||||
.child(div().pt(px(-3.)).px_neg_0p5().child(EditorElement::new(
|
||||
.child(EditorElement::new(
|
||||
&state.editor,
|
||||
EditorStyle {
|
||||
background: colors.editor_background,
|
||||
@@ -1742,7 +1732,8 @@ impl ActiveThread {
|
||||
syntax: cx.theme().syntax().clone(),
|
||||
..Default::default()
|
||||
},
|
||||
)))
|
||||
))
|
||||
.child(state.context_strip.clone())
|
||||
}
|
||||
|
||||
fn render_message(&self, ix: usize, window: &mut Window, cx: &mut Context<Self>) -> AnyElement {
|
||||
@@ -1870,8 +1861,7 @@ impl ActiveThread {
|
||||
.child(open_as_markdown),
|
||||
)
|
||||
.into_any_element(),
|
||||
None if AssistantSettings::get_global(cx).enable_feedback =>
|
||||
feedback_container
|
||||
None => feedback_container
|
||||
.child(
|
||||
div().visible_on_hover("feedback_container").child(
|
||||
Label::new(
|
||||
@@ -1914,9 +1904,6 @@ impl ActiveThread {
|
||||
.child(open_as_markdown),
|
||||
)
|
||||
.into_any_element(),
|
||||
None => feedback_container
|
||||
.child(h_flex().child(open_as_markdown))
|
||||
.into_any_element(),
|
||||
};
|
||||
|
||||
let message_is_empty = message.should_display_content();
|
||||
@@ -1930,6 +1917,16 @@ impl ActiveThread {
|
||||
v_flex()
|
||||
.w_full()
|
||||
.gap_1()
|
||||
.when(!message_is_empty, |parent| {
|
||||
parent.child(div().min_h_6().child(self.render_message_content(
|
||||
message_id,
|
||||
rendered_message,
|
||||
has_tool_uses,
|
||||
workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
)))
|
||||
})
|
||||
.when(!added_context.is_empty(), |parent| {
|
||||
parent.child(h_flex().flex_wrap().gap_1().children(
|
||||
added_context.into_iter().map(|added_context| {
|
||||
@@ -1948,16 +1945,6 @@ impl ActiveThread {
|
||||
}),
|
||||
))
|
||||
})
|
||||
.when(!message_is_empty, |parent| {
|
||||
parent.child(div().pt_0p5().min_h_6().child(self.render_message_content(
|
||||
message_id,
|
||||
rendered_message,
|
||||
has_tool_uses,
|
||||
workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
)))
|
||||
})
|
||||
.into_any_element()
|
||||
}
|
||||
});
|
||||
@@ -1983,7 +1970,6 @@ impl ActiveThread {
|
||||
h_flex()
|
||||
.p_2p5()
|
||||
.gap_1()
|
||||
.items_end()
|
||||
.children(message_content)
|
||||
.when_some(editing_message_state, |this, state| {
|
||||
let focus_handle = state.editor.focus_handle(cx).clone();
|
||||
@@ -1997,7 +1983,6 @@ impl ActiveThread {
|
||||
)
|
||||
.shape(ui::IconButtonShape::Square)
|
||||
.icon_color(Color::Error)
|
||||
.icon_size(IconSize::Small)
|
||||
.tooltip({
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |window, cx| {
|
||||
@@ -2015,12 +2000,11 @@ impl ActiveThread {
|
||||
.child(
|
||||
IconButton::new(
|
||||
"confirm-edit-message",
|
||||
IconName::Return,
|
||||
IconName::Check,
|
||||
)
|
||||
.disabled(state.editor.read(cx).is_empty(cx))
|
||||
.shape(ui::IconButtonShape::Square)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Success)
|
||||
.tooltip({
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |window, cx| {
|
||||
@@ -2040,6 +2024,9 @@ impl ActiveThread {
|
||||
)
|
||||
}),
|
||||
)
|
||||
.when(editing_message_state.is_none(), |this| {
|
||||
this.tooltip(Tooltip::text("Click To Edit"))
|
||||
})
|
||||
.on_click(cx.listener({
|
||||
let message_segments = message.segments.clone();
|
||||
move |this, _, window, cx| {
|
||||
@@ -2080,16 +2067,6 @@ impl ActiveThread {
|
||||
|
||||
let panel_background = cx.theme().colors().panel_background;
|
||||
|
||||
let backdrop = div()
|
||||
.id("backdrop")
|
||||
.stop_mouse_events_except_scroll()
|
||||
.absolute()
|
||||
.inset_0()
|
||||
.size_full()
|
||||
.bg(panel_background)
|
||||
.opacity(0.8)
|
||||
.on_click(cx.listener(Self::handle_cancel_click));
|
||||
|
||||
v_flex()
|
||||
.w_full()
|
||||
.map(|parent| {
|
||||
@@ -2259,7 +2236,15 @@ impl ActiveThread {
|
||||
})
|
||||
.when(after_editing_message, |parent| {
|
||||
// Backdrop to dim out the whole thread below the editing user message
|
||||
parent.relative().child(backdrop)
|
||||
parent.relative().child(
|
||||
div()
|
||||
.stop_mouse_events_except_scroll()
|
||||
.absolute()
|
||||
.inset_0()
|
||||
.size_full()
|
||||
.bg(panel_background)
|
||||
.opacity(0.8),
|
||||
)
|
||||
})
|
||||
.into_any()
|
||||
}
|
||||
@@ -2374,7 +2359,6 @@ impl ActiveThread {
|
||||
move |el, range, metadata, _, cx| {
|
||||
let can_expand = metadata.line_count
|
||||
>= MAX_UNCOLLAPSED_LINES_IN_CODE_BLOCK;
|
||||
|
||||
if !can_expand {
|
||||
return el;
|
||||
}
|
||||
@@ -2382,7 +2366,6 @@ impl ActiveThread {
|
||||
let is_expanded = active_thread
|
||||
.read(cx)
|
||||
.is_codeblock_expanded(message_id, range.start);
|
||||
|
||||
if is_expanded {
|
||||
return el;
|
||||
}
|
||||
@@ -3405,21 +3388,16 @@ impl ActiveThread {
|
||||
self.expanded_code_blocks
|
||||
.get(&(message_id, ix))
|
||||
.copied()
|
||||
.unwrap_or(true)
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
pub fn toggle_codeblock_expanded(&mut self, message_id: MessageId, ix: usize) {
|
||||
let is_expanded = self
|
||||
.expanded_code_blocks
|
||||
.entry((message_id, ix))
|
||||
.or_insert(true);
|
||||
.or_insert(false);
|
||||
*is_expanded = !*is_expanded;
|
||||
}
|
||||
|
||||
pub fn scroll_to_bottom(&mut self, cx: &mut Context<Self>) {
|
||||
self.list_state.reset(self.messages.len());
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
|
||||
pub enum ActiveThreadEvent {
|
||||
@@ -3433,7 +3411,6 @@ impl Render for ActiveThread {
|
||||
v_flex()
|
||||
.size_full()
|
||||
.relative()
|
||||
.bg(cx.theme().colors().panel_background)
|
||||
.on_mouse_move(cx.listener(|this, _, _, cx| {
|
||||
this.show_scrollbar = true;
|
||||
this.hide_scrollbar_later(cx);
|
||||
|
||||
@@ -85,6 +85,7 @@ actions!(
|
||||
KeepAll,
|
||||
Follow,
|
||||
ResetTrialUpsell,
|
||||
ResetTrialEndUpsell,
|
||||
]
|
||||
);
|
||||
|
||||
|
||||
@@ -18,8 +18,8 @@ use language_model::{LanguageModelProvider, LanguageModelProviderId, LanguageMod
|
||||
use project::context_server_store::{ContextServerStatus, ContextServerStore};
|
||||
use settings::{Settings, update_settings_file};
|
||||
use ui::{
|
||||
Disclosure, ElevationIndex, Indicator, Scrollbar, ScrollbarState, Switch, SwitchColor, Tooltip,
|
||||
prelude::*,
|
||||
Disclosure, Divider, DividerColor, ElevationIndex, Indicator, Scrollbar, ScrollbarState,
|
||||
Switch, SwitchColor, Tooltip, prelude::*,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
use zed_actions::ExtensionCategoryFilter;
|
||||
@@ -142,7 +142,7 @@ impl AgentConfiguration {
|
||||
.expanded_provider_configurations
|
||||
.get(&provider.id())
|
||||
.copied()
|
||||
.unwrap_or(false);
|
||||
.unwrap_or(true);
|
||||
|
||||
v_flex()
|
||||
.pt_3()
|
||||
@@ -201,12 +201,12 @@ impl AgentConfiguration {
|
||||
.on_click(cx.listener({
|
||||
let provider_id = provider.id().clone();
|
||||
move |this, _event, _window, _cx| {
|
||||
let is_expanded = this
|
||||
let is_open = this
|
||||
.expanded_provider_configurations
|
||||
.entry(provider_id.clone())
|
||||
.or_insert(false);
|
||||
.or_insert(true);
|
||||
|
||||
*is_expanded = !*is_expanded;
|
||||
*is_open = !*is_open;
|
||||
}
|
||||
})),
|
||||
),
|
||||
@@ -214,9 +214,9 @@ impl AgentConfiguration {
|
||||
)
|
||||
.when(is_expanded, |parent| match configuration_view {
|
||||
Some(configuration_view) => parent.child(configuration_view),
|
||||
None => parent.child(Label::new(format!(
|
||||
None => parent.child(div().child(Label::new(format!(
|
||||
"No configuration view for {provider_name}",
|
||||
))),
|
||||
)))),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -230,8 +230,7 @@ impl AgentConfiguration {
|
||||
.p(DynamicSpacing::Base16.rems(cx))
|
||||
.pr(DynamicSpacing::Base20.rems(cx))
|
||||
.gap_4()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.flex_1()
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
@@ -332,8 +331,7 @@ impl AgentConfiguration {
|
||||
.p(DynamicSpacing::Base16.rems(cx))
|
||||
.pr(DynamicSpacing::Base20.rems(cx))
|
||||
.gap_2p5()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.flex_1()
|
||||
.child(Headline::new("General Settings"))
|
||||
.child(self.render_command_permission(cx))
|
||||
.child(self.render_single_file_review(cx))
|
||||
@@ -346,17 +344,18 @@ impl AgentConfiguration {
|
||||
) -> impl IntoElement {
|
||||
let context_server_ids = self.context_server_store.read(cx).all_server_ids().clone();
|
||||
|
||||
const SUBHEADING: &str = "Connect to context servers via the Model Context Protocol either via Zed extensions or directly.";
|
||||
|
||||
v_flex()
|
||||
.p(DynamicSpacing::Base16.rems(cx))
|
||||
.pr(DynamicSpacing::Base20.rems(cx))
|
||||
.gap_2()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.flex_1()
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
.child(Headline::new("Model Context Protocol (MCP) Servers"))
|
||||
.child(Label::new("Connect to context servers via the Model Context Protocol either via Zed extensions or directly.").color(Color::Muted)),
|
||||
.child(Label::new(SUBHEADING).color(Color::Muted)),
|
||||
)
|
||||
.children(
|
||||
context_server_ids.into_iter().map(|context_server_id| {
|
||||
@@ -423,7 +422,6 @@ impl AgentConfiguration {
|
||||
.unwrap_or(ContextServerStatus::Stopped);
|
||||
|
||||
let is_running = matches!(server_status, ContextServerStatus::Running);
|
||||
let item_id = SharedString::from(context_server_id.0.clone());
|
||||
|
||||
let error = if let ContextServerStatus::Error(error) = server_status.clone() {
|
||||
Some(error)
|
||||
@@ -445,38 +443,9 @@ impl AgentConfiguration {
|
||||
let tool_count = tools.len();
|
||||
|
||||
let border_color = cx.theme().colors().border.opacity(0.6);
|
||||
let success_color = Color::Success.color(cx);
|
||||
|
||||
let (status_indicator, tooltip_text) = match server_status {
|
||||
ContextServerStatus::Starting => (
|
||||
Indicator::dot()
|
||||
.color(Color::Success)
|
||||
.with_animation(
|
||||
SharedString::from(format!("{}-starting", context_server_id.0.clone(),)),
|
||||
Animation::new(Duration::from_secs(2))
|
||||
.repeat()
|
||||
.with_easing(pulsating_between(0.4, 1.)),
|
||||
move |this, delta| this.color(success_color.alpha(delta).into()),
|
||||
)
|
||||
.into_any_element(),
|
||||
"Server is starting.",
|
||||
),
|
||||
ContextServerStatus::Running => (
|
||||
Indicator::dot().color(Color::Success).into_any_element(),
|
||||
"Server is running.",
|
||||
),
|
||||
ContextServerStatus::Error(_) => (
|
||||
Indicator::dot().color(Color::Error).into_any_element(),
|
||||
"Server has an error.",
|
||||
),
|
||||
ContextServerStatus::Stopped => (
|
||||
Indicator::dot().color(Color::Muted).into_any_element(),
|
||||
"Server is stopped.",
|
||||
),
|
||||
};
|
||||
|
||||
v_flex()
|
||||
.id(item_id.clone())
|
||||
.id(SharedString::from(context_server_id.0.clone()))
|
||||
.border_1()
|
||||
.rounded_md()
|
||||
.border_color(border_color)
|
||||
@@ -511,12 +480,35 @@ impl AgentConfiguration {
|
||||
}
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.id(item_id.clone())
|
||||
.tooltip(Tooltip::text(tooltip_text))
|
||||
.child(status_indicator),
|
||||
)
|
||||
.child(match server_status {
|
||||
ContextServerStatus::Starting => {
|
||||
let color = Color::Success.color(cx);
|
||||
Indicator::dot()
|
||||
.color(Color::Success)
|
||||
.with_animation(
|
||||
SharedString::from(format!(
|
||||
"{}-starting",
|
||||
context_server_id.0.clone(),
|
||||
)),
|
||||
Animation::new(Duration::from_secs(2))
|
||||
.repeat()
|
||||
.with_easing(pulsating_between(0.4, 1.)),
|
||||
move |this, delta| {
|
||||
this.color(color.alpha(delta).into())
|
||||
},
|
||||
)
|
||||
.into_any_element()
|
||||
}
|
||||
ContextServerStatus::Running => {
|
||||
Indicator::dot().color(Color::Success).into_any_element()
|
||||
}
|
||||
ContextServerStatus::Error(_) => {
|
||||
Indicator::dot().color(Color::Error).into_any_element()
|
||||
}
|
||||
ContextServerStatus::Stopped => {
|
||||
Indicator::dot().color(Color::Muted).into_any_element()
|
||||
}
|
||||
})
|
||||
.child(Label::new(context_server_id.0.clone()).ml_0p5())
|
||||
.when(is_running, |this| {
|
||||
this.child(
|
||||
@@ -631,7 +623,9 @@ impl Render for AgentConfiguration {
|
||||
.size_full()
|
||||
.overflow_y_scroll()
|
||||
.child(self.render_general_settings_section(cx))
|
||||
.child(Divider::horizontal().color(DividerColor::Border))
|
||||
.child(self.render_context_servers_section(window, cx))
|
||||
.child(Divider::horizontal().color(DividerColor::Border))
|
||||
.child(self.render_provider_configuration_section(cx)),
|
||||
)
|
||||
.child(
|
||||
|
||||
@@ -30,6 +30,7 @@ pub(crate) struct ConfigureContextServerModal {
|
||||
context_server_store: Entity<ContextServerStore>,
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
enum Configuration {
|
||||
NotAvailable,
|
||||
Required(ConfigurationRequiredState),
|
||||
|
||||
@@ -3,7 +3,7 @@ use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use db::kvp::KEY_VALUE_STORE;
|
||||
use db::kvp::{Dismissable, KEY_VALUE_STORE};
|
||||
use markdown::Markdown;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@@ -66,8 +66,8 @@ use crate::ui::AgentOnboardingModal;
|
||||
use crate::{
|
||||
AddContextServer, AgentDiffPane, ContextStore, DeleteRecentlyOpenThread, ExpandMessageEditor,
|
||||
Follow, InlineAssistant, NewTextThread, NewThread, OpenActiveThreadAsMarkdown, OpenAgentDiff,
|
||||
OpenHistory, ResetTrialUpsell, TextThreadStore, ThreadEvent, ToggleContextPicker,
|
||||
ToggleNavigationMenu, ToggleOptionsMenu,
|
||||
OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell, TextThreadStore, ThreadEvent,
|
||||
ToggleContextPicker, ToggleNavigationMenu, ToggleOptionsMenu,
|
||||
};
|
||||
|
||||
const AGENT_PANEL_KEY: &str = "agent_panel";
|
||||
@@ -157,7 +157,10 @@ pub fn init(cx: &mut App) {
|
||||
window.refresh();
|
||||
})
|
||||
.register_action(|_workspace, _: &ResetTrialUpsell, _window, cx| {
|
||||
set_trial_upsell_dismissed(false, cx);
|
||||
TrialUpsell::set_dismissed(false, cx);
|
||||
})
|
||||
.register_action(|_workspace, _: &ResetTrialEndUpsell, _window, cx| {
|
||||
TrialEndUpsell::set_dismissed(false, cx);
|
||||
});
|
||||
},
|
||||
)
|
||||
@@ -567,15 +570,6 @@ impl AgentPanel {
|
||||
menu = menu.header("Recently Opened");
|
||||
|
||||
for entry in recently_opened.iter() {
|
||||
if let RecentEntry::Context(context) = entry {
|
||||
if context.read(cx).path().is_none() {
|
||||
log::error!(
|
||||
"bug: text thread in recent history list was never saved"
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let summary = entry.summary(cx);
|
||||
|
||||
menu = menu.entry_with_end_slot_on_hover(
|
||||
@@ -1299,26 +1293,14 @@ impl AgentPanel {
|
||||
let new_is_history = matches!(new_view, ActiveView::History);
|
||||
|
||||
match &self.active_view {
|
||||
ActiveView::Thread { thread, .. } => {
|
||||
ActiveView::Thread { thread, .. } => self.history_store.update(cx, |store, cx| {
|
||||
if let Some(thread) = thread.upgrade() {
|
||||
if thread.read(cx).is_empty() {
|
||||
let id = thread.read(cx).id().clone();
|
||||
self.history_store.update(cx, |store, cx| {
|
||||
store.remove_recently_opened_thread(id, cx);
|
||||
});
|
||||
store.remove_recently_opened_thread(id, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
ActiveView::PromptEditor { context_editor, .. } => {
|
||||
let context = context_editor.read(cx).context();
|
||||
// When switching away from an unsaved text thread, delete its entry.
|
||||
if context.read(cx).path().is_none() {
|
||||
let context = context.clone();
|
||||
self.history_store.update(cx, |store, cx| {
|
||||
store.remove_recently_opened_entry(&RecentEntry::Context(context), cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
}),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
@@ -1932,12 +1914,23 @@ impl AgentPanel {
|
||||
}
|
||||
}
|
||||
|
||||
fn should_render_trial_end_upsell(&self, cx: &mut Context<Self>) -> bool {
|
||||
if TrialEndUpsell::dismissed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
let plan = self.user_store.read(cx).current_plan();
|
||||
let has_previous_trial = self.user_store.read(cx).trial_started_at().is_some();
|
||||
|
||||
matches!(plan, Some(Plan::Free)) && has_previous_trial
|
||||
}
|
||||
|
||||
fn should_render_upsell(&self, cx: &mut Context<Self>) -> bool {
|
||||
if !matches!(self.active_view, ActiveView::Thread { .. }) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if self.hide_trial_upsell || dismissed_trial_upsell() {
|
||||
if self.hide_trial_upsell || TrialUpsell::dismissed() {
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -1983,125 +1976,115 @@ impl AgentPanel {
|
||||
move |toggle_state, _window, cx| {
|
||||
let toggle_state_bool = toggle_state.selected();
|
||||
|
||||
set_trial_upsell_dismissed(toggle_state_bool, cx);
|
||||
TrialUpsell::set_dismissed(toggle_state_bool, cx);
|
||||
},
|
||||
);
|
||||
|
||||
Some(
|
||||
div().p_2().child(
|
||||
v_flex()
|
||||
let contents = div()
|
||||
.size_full()
|
||||
.gap_2()
|
||||
.flex()
|
||||
.flex_col()
|
||||
.child(Headline::new("Build better with Zed Pro").size(HeadlineSize::Small))
|
||||
.child(
|
||||
Label::new("Try Zed Pro for free for 14 days - no credit card required.")
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
.child(
|
||||
Label::new(
|
||||
"Use your own API keys or enable usage-based billing once you hit the cap.",
|
||||
)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.elevation_2(cx)
|
||||
.rounded(px(8.))
|
||||
.bg(cx.theme().colors().background.alpha(0.5))
|
||||
.p(px(3.))
|
||||
|
||||
.px_neg_1()
|
||||
.justify_between()
|
||||
.items_center()
|
||||
.child(h_flex().items_center().gap_1().child(checkbox))
|
||||
.child(
|
||||
div()
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.flex()
|
||||
.flex_col()
|
||||
.size_full()
|
||||
.border_1()
|
||||
.rounded(px(5.))
|
||||
.border_color(cx.theme().colors().text.alpha(0.1))
|
||||
.overflow_hidden()
|
||||
.relative()
|
||||
.bg(cx.theme().colors().panel_background)
|
||||
.px_4()
|
||||
.py_3()
|
||||
.child(
|
||||
div()
|
||||
.absolute()
|
||||
.top_0()
|
||||
.right(px(-1.0))
|
||||
.w(px(441.))
|
||||
.h(px(167.))
|
||||
.child(
|
||||
Vector::new(VectorName::Grid, rems_from_px(441.), rems_from_px(167.)).color(ui::Color::Custom(cx.theme().colors().text.alpha(0.1)))
|
||||
)
|
||||
Button::new("dismiss-button", "Not Now")
|
||||
.style(ButtonStyle::Transparent)
|
||||
.color(Color::Muted)
|
||||
.on_click({
|
||||
let agent_panel = cx.entity();
|
||||
move |_, _, cx| {
|
||||
agent_panel.update(cx, |this, cx| {
|
||||
this.hide_trial_upsell = true;
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.absolute()
|
||||
.top(px(-8.0))
|
||||
.right_0()
|
||||
.w(px(400.))
|
||||
.h(px(92.))
|
||||
.child(
|
||||
Vector::new(VectorName::AiGrid, rems_from_px(400.), rems_from_px(92.)).color(ui::Color::Custom(cx.theme().colors().text.alpha(0.32)))
|
||||
)
|
||||
)
|
||||
// .child(
|
||||
// div()
|
||||
// .absolute()
|
||||
// .top_0()
|
||||
// .right(px(360.))
|
||||
// .size(px(401.))
|
||||
// .overflow_hidden()
|
||||
// .bg(cx.theme().colors().panel_background)
|
||||
// )
|
||||
.child(
|
||||
div()
|
||||
.absolute()
|
||||
.top_0()
|
||||
.right_0()
|
||||
.w(px(660.))
|
||||
.h(px(401.))
|
||||
.overflow_hidden()
|
||||
.bg(linear_gradient(
|
||||
75.,
|
||||
linear_color_stop(cx.theme().colors().panel_background.alpha(0.01), 1.0),
|
||||
linear_color_stop(cx.theme().colors().panel_background, 0.45),
|
||||
))
|
||||
)
|
||||
.child(Headline::new("Build better with Zed Pro").size(HeadlineSize::Small))
|
||||
.child(Label::new("Try Zed Pro for free for 14 days - no credit card required.").size(LabelSize::Small))
|
||||
.child(Label::new("Use your own API keys or enable usage-based billing once you hit the cap.").color(Color::Muted))
|
||||
Button::new("cta-button", "Start Trial")
|
||||
.style(ButtonStyle::Transparent)
|
||||
.on_click(|_, _, cx| cx.open_url(&zed_urls::account_url(cx))),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
Some(self.render_upsell_container(cx, contents))
|
||||
}
|
||||
|
||||
fn render_trial_end_upsell(
|
||||
&self,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<impl IntoElement> {
|
||||
if !self.should_render_trial_end_upsell(cx) {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(
|
||||
self.render_upsell_container(
|
||||
cx,
|
||||
div()
|
||||
.size_full()
|
||||
.gap_2()
|
||||
.flex()
|
||||
.flex_col()
|
||||
.child(
|
||||
Headline::new("Your Zed Pro trial has expired.").size(HeadlineSize::Small),
|
||||
)
|
||||
.child(
|
||||
Label::new("You've been automatically reset to the free plan.")
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.px_neg_1()
|
||||
.justify_between()
|
||||
.items_center()
|
||||
.child(div())
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.px_neg_1()
|
||||
.justify_between()
|
||||
.items_center()
|
||||
.child(h_flex().items_center().gap_1().child(checkbox))
|
||||
.gap_2()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(
|
||||
Button::new("dismiss-button", "Not Now")
|
||||
.style(ButtonStyle::Transparent)
|
||||
.color(Color::Muted)
|
||||
.on_click({
|
||||
let agent_panel = cx.entity();
|
||||
move |_, _, cx| {
|
||||
agent_panel.update(
|
||||
cx,
|
||||
|this, cx| {
|
||||
let hidden =
|
||||
this.hide_trial_upsell;
|
||||
println!("hidden: {}", hidden);
|
||||
this.hide_trial_upsell = true;
|
||||
let new_hidden =
|
||||
this.hide_trial_upsell;
|
||||
println!(
|
||||
"new_hidden: {}",
|
||||
new_hidden
|
||||
);
|
||||
|
||||
cx.notify();
|
||||
},
|
||||
);
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
Button::new("cta-button", "Start Trial")
|
||||
.style(ButtonStyle::Transparent)
|
||||
.on_click(|_, _, cx| {
|
||||
cx.open_url(&zed_urls::account_url(cx))
|
||||
}),
|
||||
),
|
||||
Button::new("dismiss-button", "Stay on Free")
|
||||
.style(ButtonStyle::Transparent)
|
||||
.color(Color::Muted)
|
||||
.on_click({
|
||||
let agent_panel = cx.entity();
|
||||
move |_, _, cx| {
|
||||
agent_panel.update(cx, |_this, cx| {
|
||||
TrialEndUpsell::set_dismissed(true, cx);
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
Button::new("cta-button", "Upgrade to Zed Pro")
|
||||
.style(ButtonStyle::Transparent)
|
||||
.on_click(|_, _, cx| {
|
||||
cx.open_url(&zed_urls::account_url(cx))
|
||||
}),
|
||||
),
|
||||
),
|
||||
),
|
||||
@@ -2109,6 +2092,91 @@ impl AgentPanel {
|
||||
)
|
||||
}
|
||||
|
||||
fn render_upsell_container(&self, cx: &mut Context<Self>, content: Div) -> Div {
|
||||
div().p_2().child(
|
||||
v_flex()
|
||||
.w_full()
|
||||
.elevation_2(cx)
|
||||
.rounded(px(8.))
|
||||
.bg(cx.theme().colors().background.alpha(0.5))
|
||||
.p(px(3.))
|
||||
.child(
|
||||
div()
|
||||
.gap_2()
|
||||
.flex()
|
||||
.flex_col()
|
||||
.size_full()
|
||||
.border_1()
|
||||
.rounded(px(5.))
|
||||
.border_color(cx.theme().colors().text.alpha(0.1))
|
||||
.overflow_hidden()
|
||||
.relative()
|
||||
.bg(cx.theme().colors().panel_background)
|
||||
.px_4()
|
||||
.py_3()
|
||||
.child(
|
||||
div()
|
||||
.absolute()
|
||||
.top_0()
|
||||
.right(px(-1.0))
|
||||
.w(px(441.))
|
||||
.h(px(167.))
|
||||
.child(
|
||||
Vector::new(
|
||||
VectorName::Grid,
|
||||
rems_from_px(441.),
|
||||
rems_from_px(167.),
|
||||
)
|
||||
.color(ui::Color::Custom(cx.theme().colors().text.alpha(0.1))),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.absolute()
|
||||
.top(px(-8.0))
|
||||
.right_0()
|
||||
.w(px(400.))
|
||||
.h(px(92.))
|
||||
.child(
|
||||
Vector::new(
|
||||
VectorName::AiGrid,
|
||||
rems_from_px(400.),
|
||||
rems_from_px(92.),
|
||||
)
|
||||
.color(ui::Color::Custom(cx.theme().colors().text.alpha(0.32))),
|
||||
),
|
||||
)
|
||||
// .child(
|
||||
// div()
|
||||
// .absolute()
|
||||
// .top_0()
|
||||
// .right(px(360.))
|
||||
// .size(px(401.))
|
||||
// .overflow_hidden()
|
||||
// .bg(cx.theme().colors().panel_background)
|
||||
// )
|
||||
.child(
|
||||
div()
|
||||
.absolute()
|
||||
.top_0()
|
||||
.right_0()
|
||||
.w(px(660.))
|
||||
.h(px(401.))
|
||||
.overflow_hidden()
|
||||
.bg(linear_gradient(
|
||||
75.,
|
||||
linear_color_stop(
|
||||
cx.theme().colors().panel_background.alpha(0.01),
|
||||
1.0,
|
||||
),
|
||||
linear_color_stop(cx.theme().colors().panel_background, 0.45),
|
||||
)),
|
||||
)
|
||||
.child(content),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_active_thread_or_empty_state(
|
||||
&self,
|
||||
window: &mut Window,
|
||||
@@ -2156,7 +2224,6 @@ impl AgentPanel {
|
||||
|
||||
v_flex()
|
||||
.size_full()
|
||||
.bg(cx.theme().colors().panel_background)
|
||||
.when(recent_history.is_empty(), |this| {
|
||||
let configuration_error_ref = &configuration_error;
|
||||
this.child(
|
||||
@@ -2461,6 +2528,9 @@ impl AgentPanel {
|
||||
.occlude()
|
||||
.child(match last_error {
|
||||
ThreadError::PaymentRequired => self.render_payment_required_error(cx),
|
||||
ThreadError::MaxMonthlySpendReached => {
|
||||
self.render_max_monthly_spend_reached_error(cx)
|
||||
}
|
||||
ThreadError::ModelRequestLimitReached { plan } => {
|
||||
self.render_model_request_limit_reached_error(plan, cx)
|
||||
}
|
||||
@@ -2520,6 +2590,56 @@ impl AgentPanel {
|
||||
.into_any()
|
||||
}
|
||||
|
||||
fn render_max_monthly_spend_reached_error(&self, cx: &mut Context<Self>) -> AnyElement {
|
||||
const ERROR_MESSAGE: &str = "You have reached your maximum monthly spend. Increase your spend limit to continue using Zed LLMs.";
|
||||
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.items_center()
|
||||
.child(Icon::new(IconName::XCircle).color(Color::Error))
|
||||
.child(Label::new("Max Monthly Spend Reached").weight(FontWeight::MEDIUM)),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.id("error-message")
|
||||
.max_h_24()
|
||||
.overflow_y_scroll()
|
||||
.child(Label::new(ERROR_MESSAGE)),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_end()
|
||||
.mt_1()
|
||||
.gap_1()
|
||||
.child(self.create_copy_button(ERROR_MESSAGE))
|
||||
.child(
|
||||
Button::new("subscribe", "Update Monthly Spend Limit").on_click(
|
||||
cx.listener(|this, _, _, cx| {
|
||||
this.thread.update(cx, |this, _cx| {
|
||||
this.clear_last_error();
|
||||
});
|
||||
|
||||
cx.open_url(&zed_urls::account_url(cx));
|
||||
cx.notify();
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(Button::new("dismiss", "Dismiss").on_click(cx.listener(
|
||||
|this, _, _, cx| {
|
||||
this.thread.update(cx, |this, _cx| {
|
||||
this.clear_last_error();
|
||||
});
|
||||
|
||||
cx.notify();
|
||||
},
|
||||
))),
|
||||
)
|
||||
.into_any()
|
||||
}
|
||||
|
||||
fn render_model_request_limit_reached_error(
|
||||
&self,
|
||||
plan: Plan,
|
||||
@@ -2827,6 +2947,7 @@ impl Render for AgentPanel {
|
||||
.on_action(cx.listener(Self::toggle_zoom))
|
||||
.child(self.render_toolbar(window, cx))
|
||||
.children(self.render_trial_upsell(window, cx))
|
||||
.children(self.render_trial_end_upsell(window, cx))
|
||||
.map(|parent| match &self.active_view {
|
||||
ActiveView::Thread { .. } => parent
|
||||
.relative()
|
||||
@@ -3014,25 +3135,14 @@ impl AgentPanelDelegate for ConcreteAssistantPanelDelegate {
|
||||
}
|
||||
}
|
||||
|
||||
const DISMISSED_TRIAL_UPSELL_KEY: &str = "dismissed-trial-upsell";
|
||||
struct TrialUpsell;
|
||||
|
||||
fn dismissed_trial_upsell() -> bool {
|
||||
db::kvp::KEY_VALUE_STORE
|
||||
.read_kvp(DISMISSED_TRIAL_UPSELL_KEY)
|
||||
.log_err()
|
||||
.map_or(false, |s| s.is_some())
|
||||
impl Dismissable for TrialUpsell {
|
||||
const KEY: &'static str = "dismissed-trial-upsell";
|
||||
}
|
||||
|
||||
fn set_trial_upsell_dismissed(is_dismissed: bool, cx: &mut App) {
|
||||
db::write_and_log(cx, move || async move {
|
||||
if is_dismissed {
|
||||
db::kvp::KEY_VALUE_STORE
|
||||
.write_kvp(DISMISSED_TRIAL_UPSELL_KEY.into(), "1".into())
|
||||
.await
|
||||
} else {
|
||||
db::kvp::KEY_VALUE_STORE
|
||||
.delete_kvp(DISMISSED_TRIAL_UPSELL_KEY.into())
|
||||
.await
|
||||
}
|
||||
})
|
||||
struct TrialEndUpsell;
|
||||
|
||||
impl Dismissable for TrialEndUpsell {
|
||||
const KEY: &'static str = "dismissed-trial-end-upsell";
|
||||
}
|
||||
|
||||
@@ -749,11 +749,11 @@ pub enum ImageStatus {
|
||||
|
||||
impl ImageContext {
|
||||
pub fn eq_for_key(&self, other: &Self) -> bool {
|
||||
self.original_image.id() == other.original_image.id()
|
||||
self.original_image.id == other.original_image.id
|
||||
}
|
||||
|
||||
pub fn hash_for_key<H: Hasher>(&self, state: &mut H) {
|
||||
self.original_image.id().hash(state);
|
||||
self.original_image.id.hash(state);
|
||||
}
|
||||
|
||||
pub fn image(&self) -> Option<LanguageModelImage> {
|
||||
|
||||
@@ -942,8 +942,8 @@ impl MentionLink {
|
||||
format!("[@{}]({}:{})", title, Self::THREAD, id)
|
||||
}
|
||||
ThreadContextEntry::Context { path, title } => {
|
||||
let filename = path.file_name().unwrap_or_default().to_string_lossy();
|
||||
let escaped_filename = urlencoding::encode(&filename);
|
||||
let filename = path.file_name().unwrap_or_default();
|
||||
let escaped_filename = urlencoding::encode(&filename.to_string_lossy()).to_string();
|
||||
format!(
|
||||
"[@{}]({}:{}{})",
|
||||
title,
|
||||
|
||||
@@ -84,12 +84,6 @@ impl ContextStrip {
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether or not the context strip has items to display
|
||||
pub fn has_context_items(&self, cx: &App) -> bool {
|
||||
self.context_store.read(cx).context().next().is_some()
|
||||
|| self.suggested_context(cx).is_some()
|
||||
}
|
||||
|
||||
fn added_contexts(&self, cx: &App) -> Vec<AddedContext> {
|
||||
if let Some(workspace) = self.workspace.upgrade() {
|
||||
let project = workspace.read(cx).project().read(cx);
|
||||
@@ -110,14 +104,14 @@ impl ContextStrip {
|
||||
}
|
||||
}
|
||||
|
||||
fn suggested_context(&self, cx: &App) -> Option<SuggestedContext> {
|
||||
fn suggested_context(&self, cx: &Context<Self>) -> Option<SuggestedContext> {
|
||||
match self.suggest_context_kind {
|
||||
SuggestContextKind::File => self.suggested_file(cx),
|
||||
SuggestContextKind::Thread => self.suggested_thread(cx),
|
||||
}
|
||||
}
|
||||
|
||||
fn suggested_file(&self, cx: &App) -> Option<SuggestedContext> {
|
||||
fn suggested_file(&self, cx: &Context<Self>) -> Option<SuggestedContext> {
|
||||
let workspace = self.workspace.upgrade()?;
|
||||
let active_item = workspace.read(cx).active_item(cx)?;
|
||||
|
||||
@@ -144,7 +138,7 @@ impl ContextStrip {
|
||||
})
|
||||
}
|
||||
|
||||
fn suggested_thread(&self, cx: &App) -> Option<SuggestedContext> {
|
||||
fn suggested_thread(&self, cx: &Context<Self>) -> Option<SuggestedContext> {
|
||||
if !self.context_picker.read(cx).allow_threads() {
|
||||
return None;
|
||||
}
|
||||
|
||||
@@ -75,7 +75,7 @@ impl Default for DebugAccountState {
|
||||
Self {
|
||||
enabled: false,
|
||||
trial_expired: false,
|
||||
plan: Plan::ZedFree,
|
||||
plan: Plan::Free,
|
||||
custom_prompt_usage: RequestUsage {
|
||||
limit: UsageLimit::Unlimited,
|
||||
amount: 0,
|
||||
|
||||
@@ -8,10 +8,9 @@ use anyhow::{Context as _, Result};
|
||||
use assistant_settings::AssistantSettings;
|
||||
use client::telemetry::Telemetry;
|
||||
use collections::{HashMap, HashSet, VecDeque, hash_map};
|
||||
use editor::display_map::EditorMargins;
|
||||
use editor::{
|
||||
Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorEvent, ExcerptId, ExcerptRange,
|
||||
MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint,
|
||||
GutterDimensions, MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint,
|
||||
actions::SelectAll,
|
||||
display_map::{
|
||||
BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, RenderBlock,
|
||||
@@ -472,11 +471,11 @@ impl InlineAssistant {
|
||||
)
|
||||
});
|
||||
|
||||
let editor_margins = Arc::new(Mutex::new(EditorMargins::default()));
|
||||
let gutter_dimensions = Arc::new(Mutex::new(GutterDimensions::default()));
|
||||
let prompt_editor = cx.new(|cx| {
|
||||
PromptEditor::new_buffer(
|
||||
assist_id,
|
||||
editor_margins,
|
||||
gutter_dimensions.clone(),
|
||||
self.prompt_history.clone(),
|
||||
prompt_buffer.clone(),
|
||||
codegen.clone(),
|
||||
@@ -591,11 +590,11 @@ impl InlineAssistant {
|
||||
)
|
||||
});
|
||||
|
||||
let editor_margins = Arc::new(Mutex::new(EditorMargins::default()));
|
||||
let gutter_dimensions = Arc::new(Mutex::new(GutterDimensions::default()));
|
||||
let prompt_editor = cx.new(|cx| {
|
||||
PromptEditor::new_buffer(
|
||||
assist_id,
|
||||
editor_margins,
|
||||
gutter_dimensions.clone(),
|
||||
self.prompt_history.clone(),
|
||||
prompt_buffer.clone(),
|
||||
codegen.clone(),
|
||||
@@ -664,7 +663,6 @@ impl InlineAssistant {
|
||||
height: Some(prompt_editor_height),
|
||||
render: build_assist_editor_renderer(prompt_editor),
|
||||
priority: 0,
|
||||
render_in_minimap: false,
|
||||
},
|
||||
BlockProperties {
|
||||
style: BlockStyle::Sticky,
|
||||
@@ -679,7 +677,6 @@ impl InlineAssistant {
|
||||
.into_any_element()
|
||||
}),
|
||||
priority: 0,
|
||||
render_in_minimap: false,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -1421,11 +1418,11 @@ impl InlineAssistant {
|
||||
|
||||
enum DeletedLines {}
|
||||
let mut editor = Editor::for_multibuffer(multi_buffer, None, window, cx);
|
||||
editor.disable_scrollbars_and_minimap(window, cx);
|
||||
editor.set_soft_wrap_mode(language::language_settings::SoftWrap::None, cx);
|
||||
editor.set_show_wrap_guides(false, cx);
|
||||
editor.set_show_gutter(false, cx);
|
||||
editor.scroll_manager.set_forbid_vertical_scroll(true);
|
||||
editor.set_show_scrollbars(false, cx);
|
||||
editor.set_read_only(true);
|
||||
editor.set_show_edit_predictions(Some(false), window, cx);
|
||||
editor.highlight_rows::<DeletedLines>(
|
||||
@@ -1449,12 +1446,11 @@ impl InlineAssistant {
|
||||
.bg(cx.theme().status().deleted_background)
|
||||
.size_full()
|
||||
.h(height as f32 * cx.window.line_height())
|
||||
.pl(cx.margins.gutter.full_width())
|
||||
.pl(cx.gutter_dimensions.full_width())
|
||||
.child(deleted_lines_editor.clone())
|
||||
.into_any_element()
|
||||
}),
|
||||
priority: 0,
|
||||
render_in_minimap: false,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1612,9 +1608,9 @@ fn build_assist_editor_renderer(editor: &Entity<PromptEditor<BufferCodegen>>) ->
|
||||
let editor = editor.clone();
|
||||
|
||||
Arc::new(move |cx: &mut BlockContext| {
|
||||
let editor_margins = editor.read(cx).editor_margins();
|
||||
let gutter_dimensions = editor.read(cx).gutter_dimensions();
|
||||
|
||||
*editor_margins.lock() = *cx.margins;
|
||||
*gutter_dimensions.lock() = *cx.gutter_dimensions;
|
||||
editor.clone().into_any_element()
|
||||
})
|
||||
}
|
||||
|
||||
@@ -11,9 +11,10 @@ use crate::{CycleNextInlineAssist, CyclePreviousInlineAssist};
|
||||
use crate::{RemoveAllContext, ToggleContextPicker};
|
||||
use client::ErrorExt;
|
||||
use collections::VecDeque;
|
||||
use editor::display_map::EditorMargins;
|
||||
use db::kvp::Dismissable;
|
||||
use editor::{
|
||||
ContextMenuOptions, Editor, EditorElement, EditorEvent, EditorMode, EditorStyle, MultiBuffer,
|
||||
ContextMenuOptions, Editor, EditorElement, EditorEvent, EditorMode, EditorStyle,
|
||||
GutterDimensions, MultiBuffer,
|
||||
actions::{MoveDown, MoveUp},
|
||||
};
|
||||
use feature_flags::{FeatureFlagAppExt as _, ZedProFeatureFlag};
|
||||
@@ -33,7 +34,6 @@ use ui::utils::WithRemSize;
|
||||
use ui::{
|
||||
CheckboxWithLabel, IconButtonShape, KeyBinding, Popover, PopoverMenuHandle, Tooltip, prelude::*,
|
||||
};
|
||||
use util::ResultExt;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub struct PromptEditor<T> {
|
||||
@@ -61,13 +61,11 @@ impl<T: 'static> Render for PromptEditor<T> {
|
||||
let ui_font_size = ThemeSettings::get_global(cx).ui_font_size(cx);
|
||||
let mut buttons = Vec::new();
|
||||
|
||||
const RIGHT_PADDING: Pixels = px(9.);
|
||||
|
||||
let (left_gutter_width, right_padding) = match &self.mode {
|
||||
let left_gutter_width = match &self.mode {
|
||||
PromptEditorMode::Buffer {
|
||||
id: _,
|
||||
codegen,
|
||||
editor_margins,
|
||||
gutter_dimensions,
|
||||
} => {
|
||||
let codegen = codegen.read(cx);
|
||||
|
||||
@@ -75,17 +73,13 @@ impl<T: 'static> Render for PromptEditor<T> {
|
||||
buttons.push(self.render_cycle_controls(&codegen, cx));
|
||||
}
|
||||
|
||||
let editor_margins = editor_margins.lock();
|
||||
let gutter = editor_margins.gutter;
|
||||
let gutter_dimensions = gutter_dimensions.lock();
|
||||
|
||||
let left_gutter_width = gutter.full_width() + (gutter.margin / 2.0);
|
||||
let right_padding = editor_margins.right + RIGHT_PADDING;
|
||||
|
||||
(left_gutter_width, right_padding)
|
||||
gutter_dimensions.full_width() + (gutter_dimensions.margin / 2.0)
|
||||
}
|
||||
PromptEditorMode::Terminal { .. } => {
|
||||
// Give the equivalent of the same left-padding that we're using on the right
|
||||
(Pixels::from(40.0), Pixels::from(24.))
|
||||
Pixels::from(40.0)
|
||||
}
|
||||
};
|
||||
|
||||
@@ -106,7 +100,7 @@ impl<T: 'static> Render for PromptEditor<T> {
|
||||
.size_full()
|
||||
.pt_0p5()
|
||||
.pb(bottom_padding)
|
||||
.pr(right_padding)
|
||||
.pr_6()
|
||||
.child(
|
||||
h_flex()
|
||||
.items_start()
|
||||
@@ -451,7 +445,7 @@ impl<T: 'static> PromptEditor<T> {
|
||||
editor.move_to_end(&Default::default(), window, cx)
|
||||
});
|
||||
}
|
||||
} else if self.context_strip.read(cx).has_context_items(cx) {
|
||||
} else {
|
||||
self.context_strip.focus_handle(cx).focus(window);
|
||||
}
|
||||
}
|
||||
@@ -722,7 +716,7 @@ impl<T: 'static> PromptEditor<T> {
|
||||
.child(CheckboxWithLabel::new(
|
||||
"dont-show-again",
|
||||
Label::new("Don't show again"),
|
||||
if dismissed_rate_limit_notice() {
|
||||
if RateLimitNotice::dismissed() {
|
||||
ui::ToggleState::Selected
|
||||
} else {
|
||||
ui::ToggleState::Unselected
|
||||
@@ -734,7 +728,7 @@ impl<T: 'static> PromptEditor<T> {
|
||||
ui::ToggleState::Selected => true,
|
||||
};
|
||||
|
||||
set_rate_limit_notice_dismissed(is_dismissed, cx)
|
||||
RateLimitNotice::set_dismissed(is_dismissed, cx);
|
||||
},
|
||||
))
|
||||
.child(
|
||||
@@ -812,7 +806,7 @@ pub enum PromptEditorMode {
|
||||
Buffer {
|
||||
id: InlineAssistId,
|
||||
codegen: Entity<BufferCodegen>,
|
||||
editor_margins: Arc<Mutex<EditorMargins>>,
|
||||
gutter_dimensions: Arc<Mutex<GutterDimensions>>,
|
||||
},
|
||||
Terminal {
|
||||
id: TerminalInlineAssistId,
|
||||
@@ -844,7 +838,7 @@ impl InlineAssistId {
|
||||
impl PromptEditor<BufferCodegen> {
|
||||
pub fn new_buffer(
|
||||
id: InlineAssistId,
|
||||
editor_margins: Arc<Mutex<EditorMargins>>,
|
||||
gutter_dimensions: Arc<Mutex<GutterDimensions>>,
|
||||
prompt_history: VecDeque<String>,
|
||||
prompt_buffer: Entity<MultiBuffer>,
|
||||
codegen: Entity<BufferCodegen>,
|
||||
@@ -861,7 +855,7 @@ impl PromptEditor<BufferCodegen> {
|
||||
let mode = PromptEditorMode::Buffer {
|
||||
id,
|
||||
codegen,
|
||||
editor_margins,
|
||||
gutter_dimensions,
|
||||
};
|
||||
|
||||
let prompt_editor = cx.new(|cx| {
|
||||
@@ -974,7 +968,7 @@ impl PromptEditor<BufferCodegen> {
|
||||
CodegenStatus::Error(error) => {
|
||||
if cx.has_flag::<ZedProFeatureFlag>()
|
||||
&& error.error_code() == proto::ErrorCode::RateLimitExceeded
|
||||
&& !dismissed_rate_limit_notice()
|
||||
&& !RateLimitNotice::dismissed()
|
||||
{
|
||||
self.show_rate_limit_notice = true;
|
||||
cx.notify();
|
||||
@@ -1001,9 +995,11 @@ impl PromptEditor<BufferCodegen> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn editor_margins(&self) -> &Arc<Mutex<EditorMargins>> {
|
||||
pub fn gutter_dimensions(&self) -> &Arc<Mutex<GutterDimensions>> {
|
||||
match &self.mode {
|
||||
PromptEditorMode::Buffer { editor_margins, .. } => editor_margins,
|
||||
PromptEditorMode::Buffer {
|
||||
gutter_dimensions, ..
|
||||
} => gutter_dimensions,
|
||||
PromptEditorMode::Terminal { .. } => unreachable!(),
|
||||
}
|
||||
}
|
||||
@@ -1180,27 +1176,10 @@ impl PromptEditor<TerminalCodegen> {
|
||||
}
|
||||
}
|
||||
|
||||
const DISMISSED_RATE_LIMIT_NOTICE_KEY: &str = "dismissed-rate-limit-notice";
|
||||
struct RateLimitNotice;
|
||||
|
||||
fn dismissed_rate_limit_notice() -> bool {
|
||||
db::kvp::KEY_VALUE_STORE
|
||||
.read_kvp(DISMISSED_RATE_LIMIT_NOTICE_KEY)
|
||||
.log_err()
|
||||
.map_or(false, |s| s.is_some())
|
||||
}
|
||||
|
||||
fn set_rate_limit_notice_dismissed(is_dismissed: bool, cx: &mut App) {
|
||||
db::write_and_log(cx, move || async move {
|
||||
if is_dismissed {
|
||||
db::kvp::KEY_VALUE_STORE
|
||||
.write_kvp(DISMISSED_RATE_LIMIT_NOTICE_KEY.into(), "1".into())
|
||||
.await
|
||||
} else {
|
||||
db::kvp::KEY_VALUE_STORE
|
||||
.delete_kvp(DISMISSED_RATE_LIMIT_NOTICE_KEY.into())
|
||||
.await
|
||||
}
|
||||
})
|
||||
impl Dismissable for RateLimitNotice {
|
||||
const KEY: &'static str = "dismissed-rate-limit-notice";
|
||||
}
|
||||
|
||||
pub enum CodegenStatus {
|
||||
|
||||
@@ -401,7 +401,7 @@ impl MessageEditor {
|
||||
fn move_up(&mut self, _: &MoveUp, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if self.context_picker_menu_handle.is_deployed() {
|
||||
cx.propagate();
|
||||
} else if self.context_strip.read(cx).has_context_items(cx) {
|
||||
} else {
|
||||
self.context_strip.focus_handle(cx).focus(window);
|
||||
}
|
||||
}
|
||||
@@ -1085,11 +1085,11 @@ impl MessageEditor {
|
||||
let plan = user_store
|
||||
.current_plan()
|
||||
.map(|plan| match plan {
|
||||
Plan::Free => zed_llm_client::Plan::ZedFree,
|
||||
Plan::Free => zed_llm_client::Plan::Free,
|
||||
Plan::ZedPro => zed_llm_client::Plan::ZedPro,
|
||||
Plan::ZedProTrial => zed_llm_client::Plan::ZedProTrial,
|
||||
})
|
||||
.unwrap_or(zed_llm_client::Plan::ZedFree);
|
||||
.unwrap_or(zed_llm_client::Plan::Free);
|
||||
let usage = self.thread.read(cx).last_usage().or_else(|| {
|
||||
maybe!({
|
||||
let amount = user_store.model_request_usage_amount()?;
|
||||
|
||||
@@ -191,7 +191,7 @@ impl TerminalInlineAssistant {
|
||||
};
|
||||
|
||||
self.prompt_history.retain(|prompt| *prompt != user_prompt);
|
||||
self.prompt_history.push_back(user_prompt);
|
||||
self.prompt_history.push_back(user_prompt.clone());
|
||||
if self.prompt_history.len() > PROMPT_HISTORY_MAX_LEN {
|
||||
self.prompt_history.pop_front();
|
||||
}
|
||||
|
||||
@@ -22,9 +22,9 @@ use language_model::{
|
||||
ConfiguredModel, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
|
||||
LanguageModelId, LanguageModelKnownError, LanguageModelRegistry, LanguageModelRequest,
|
||||
LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
|
||||
LanguageModelToolResultContent, LanguageModelToolUseId, MessageContent,
|
||||
ModelRequestLimitReachedError, PaymentRequiredError, RequestUsage, Role, SelectedModel,
|
||||
StopReason, TokenUsage,
|
||||
LanguageModelToolResultContent, LanguageModelToolUseId, MaxMonthlySpendReachedError,
|
||||
MessageContent, ModelRequestLimitReachedError, PaymentRequiredError, RequestUsage, Role,
|
||||
SelectedModel, StopReason, TokenUsage,
|
||||
};
|
||||
use postage::stream::Stream as _;
|
||||
use project::Project;
|
||||
@@ -214,7 +214,7 @@ pub struct GitState {
|
||||
pub diff: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone)]
|
||||
pub struct ThreadCheckpoint {
|
||||
message_id: MessageId,
|
||||
git_checkpoint: GitStoreCheckpoint,
|
||||
@@ -996,7 +996,6 @@ impl Thread {
|
||||
new_role: Role,
|
||||
new_segments: Vec<MessageSegment>,
|
||||
loaded_context: Option<LoadedContext>,
|
||||
checkpoint: Option<GitStoreCheckpoint>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> bool {
|
||||
let Some(message) = self.messages.iter_mut().find(|message| message.id == id) else {
|
||||
@@ -1007,15 +1006,6 @@ impl Thread {
|
||||
if let Some(context) = loaded_context {
|
||||
message.loaded_context = context;
|
||||
}
|
||||
if let Some(git_checkpoint) = checkpoint {
|
||||
self.checkpoints_by_message.insert(
|
||||
id,
|
||||
ThreadCheckpoint {
|
||||
message_id: id,
|
||||
git_checkpoint,
|
||||
},
|
||||
);
|
||||
}
|
||||
self.touch_updated_at();
|
||||
cx.emit(ThreadEvent::MessageEdited(id));
|
||||
true
|
||||
@@ -1698,6 +1688,10 @@ impl Thread {
|
||||
|
||||
if error.is::<PaymentRequiredError>() {
|
||||
cx.emit(ThreadEvent::ShowError(ThreadError::PaymentRequired));
|
||||
} else if error.is::<MaxMonthlySpendReachedError>() {
|
||||
cx.emit(ThreadEvent::ShowError(
|
||||
ThreadError::MaxMonthlySpendReached,
|
||||
));
|
||||
} else if let Some(error) =
|
||||
error.downcast_ref::<ModelRequestLimitReachedError>()
|
||||
{
|
||||
@@ -2253,7 +2247,7 @@ impl Thread {
|
||||
.read(cx)
|
||||
.enabled_tools(cx)
|
||||
.iter()
|
||||
.map(|tool| tool.name())
|
||||
.map(|tool| tool.name().to_string())
|
||||
.collect();
|
||||
|
||||
self.message_feedback.insert(message_id, feedback);
|
||||
@@ -2593,7 +2587,7 @@ impl Thread {
|
||||
.read(cx)
|
||||
.current_user()
|
||||
.map(|user| user.github_login.clone());
|
||||
let client = self.project.read(cx).client();
|
||||
let client = self.project.read(cx).client().clone();
|
||||
let serialize_task = self.serialize(cx);
|
||||
|
||||
cx.background_executor()
|
||||
@@ -2712,6 +2706,8 @@ impl Thread {
|
||||
pub enum ThreadError {
|
||||
#[error("Payment required")]
|
||||
PaymentRequired,
|
||||
#[error("Max monthly spend reached")]
|
||||
MaxMonthlySpendReached,
|
||||
#[error("Model request limit reached")]
|
||||
ModelRequestLimitReached { plan: Plan },
|
||||
#[error("Message {header}: {message}")]
|
||||
|
||||
@@ -260,7 +260,10 @@ impl ThreadHistory {
|
||||
}
|
||||
});
|
||||
|
||||
self.search_state = SearchState::Searching { query, _task: task };
|
||||
self.search_state = SearchState::Searching {
|
||||
query: query.clone(),
|
||||
_task: task,
|
||||
};
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
|
||||
@@ -505,8 +505,8 @@ impl ThreadStore {
|
||||
ToolSource::Native,
|
||||
&profile
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then(|| tool))
|
||||
.iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then(|| tool.clone()))
|
||||
.collect::<Vec<_>>(),
|
||||
cx,
|
||||
);
|
||||
@@ -530,32 +530,32 @@ impl ThreadStore {
|
||||
});
|
||||
}
|
||||
// Enable all the tools from all context servers, but disable the ones that are explicitly disabled
|
||||
for (context_server_id, preset) in profile.context_servers {
|
||||
for (context_server_id, preset) in &profile.context_servers {
|
||||
self.tools.update(cx, |tools, cx| {
|
||||
tools.disable(
|
||||
ToolSource::ContextServer {
|
||||
id: context_server_id.into(),
|
||||
id: context_server_id.clone().into(),
|
||||
},
|
||||
&preset
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(|(tool, enabled)| (!enabled).then(|| tool))
|
||||
.iter()
|
||||
.filter_map(|(tool, enabled)| (!enabled).then(|| tool.clone()))
|
||||
.collect::<Vec<_>>(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
} else {
|
||||
for (context_server_id, preset) in profile.context_servers {
|
||||
for (context_server_id, preset) in &profile.context_servers {
|
||||
self.tools.update(cx, |tools, cx| {
|
||||
tools.enable(
|
||||
ToolSource::ContextServer {
|
||||
id: context_server_id.into(),
|
||||
id: context_server_id.clone().into(),
|
||||
},
|
||||
&preset
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then(|| tool))
|
||||
.iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then(|| tool.clone()))
|
||||
.collect::<Vec<_>>(),
|
||||
cx,
|
||||
)
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use collections::HashMap;
|
||||
use component::ComponentId;
|
||||
use gpui::{App, Entity, WeakEntity};
|
||||
use linkme::distributed_slice;
|
||||
use std::sync::OnceLock;
|
||||
use ui::{AnyElement, Component, ComponentScope, Window};
|
||||
use workspace::Workspace;
|
||||
|
||||
@@ -12,15 +12,9 @@ use crate::ActiveThread;
|
||||
pub type PreviewFn =
|
||||
fn(WeakEntity<Workspace>, Entity<ActiveThread>, &mut Window, &mut App) -> Option<AnyElement>;
|
||||
|
||||
pub struct AgentPreviewFn(fn() -> (ComponentId, PreviewFn));
|
||||
|
||||
impl AgentPreviewFn {
|
||||
pub const fn new(f: fn() -> (ComponentId, PreviewFn)) -> Self {
|
||||
Self(f)
|
||||
}
|
||||
}
|
||||
|
||||
inventory::collect!(AgentPreviewFn);
|
||||
/// Distributed slice for preview registration functions
|
||||
#[distributed_slice]
|
||||
pub static __ALL_AGENT_PREVIEWS: [fn() -> (ComponentId, PreviewFn)] = [..];
|
||||
|
||||
/// Trait that must be implemented by components that provide agent previews.
|
||||
pub trait AgentPreview: Component + Sized {
|
||||
@@ -42,14 +36,16 @@ pub trait AgentPreview: Component + Sized {
|
||||
#[macro_export]
|
||||
macro_rules! register_agent_preview {
|
||||
($type:ty) => {
|
||||
inventory::submit! {
|
||||
$crate::ui::preview::AgentPreviewFn::new(|| {
|
||||
(
|
||||
<$type as component::Component>::id(),
|
||||
<$type as $crate::ui::preview::AgentPreview>::agent_preview,
|
||||
)
|
||||
})
|
||||
}
|
||||
#[linkme::distributed_slice($crate::ui::preview::__ALL_AGENT_PREVIEWS)]
|
||||
static __REGISTER_AGENT_PREVIEW: fn() -> (
|
||||
component::ComponentId,
|
||||
$crate::ui::preview::PreviewFn,
|
||||
) = || {
|
||||
(
|
||||
<$type as component::Component>::id(),
|
||||
<$type as $crate::ui::preview::AgentPreview>::agent_preview,
|
||||
)
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
@@ -60,8 +56,8 @@ static AGENT_PREVIEW_REGISTRY: OnceLock<HashMap<ComponentId, PreviewFn>> = OnceL
|
||||
fn get_or_init_registry() -> &'static HashMap<ComponentId, PreviewFn> {
|
||||
AGENT_PREVIEW_REGISTRY.get_or_init(|| {
|
||||
let mut map = HashMap::default();
|
||||
for register_fn in inventory::iter::<AgentPreviewFn>() {
|
||||
let (id, preview_fn) = (register_fn.0)();
|
||||
for register_fn in __ALL_AGENT_PREVIEWS.iter() {
|
||||
let (id, preview_fn) = register_fn();
|
||||
map.insert(id, preview_fn);
|
||||
}
|
||||
map
|
||||
|
||||
@@ -39,7 +39,7 @@ impl RenderOnce for UsageCallout {
|
||||
|
||||
let (title, message, button_text, url) = if is_limit_reached {
|
||||
match self.plan {
|
||||
Plan::ZedFree => (
|
||||
Plan::Free => (
|
||||
"Out of free prompts",
|
||||
"Upgrade to continue, wait for the next reset, or switch to API key."
|
||||
.to_string(),
|
||||
@@ -61,7 +61,7 @@ impl RenderOnce for UsageCallout {
|
||||
}
|
||||
} else {
|
||||
match self.plan {
|
||||
Plan::ZedFree => (
|
||||
Plan::Free => (
|
||||
"Reaching free plan limit soon",
|
||||
format!(
|
||||
"{remaining} remaining - Upgrade to increase limit, or switch providers",
|
||||
@@ -120,7 +120,7 @@ impl Component for UsageCallout {
|
||||
single_example(
|
||||
"Approaching limit (90%)",
|
||||
UsageCallout::new(
|
||||
Plan::ZedFree,
|
||||
Plan::Free,
|
||||
RequestUsage {
|
||||
limit: UsageLimit::Limited(50),
|
||||
amount: 45, // 90% of limit
|
||||
@@ -131,7 +131,7 @@ impl Component for UsageCallout {
|
||||
single_example(
|
||||
"Limit reached (100%)",
|
||||
UsageCallout::new(
|
||||
Plan::ZedFree,
|
||||
Plan::Free,
|
||||
RequestUsage {
|
||||
limit: UsageLimit::Limited(50),
|
||||
amount: 50, // 100% of limit
|
||||
|
||||
@@ -163,10 +163,8 @@ impl AskPassSession {
|
||||
#[cfg(unix)]
|
||||
fn get_shell_safe_zed_path() -> anyhow::Result<String> {
|
||||
let zed_path = std::env::current_exe()
|
||||
.context("Failed to determine current executable path for use in askpass")?
|
||||
.context("Failed to figure out current executable path for use in askpass")?
|
||||
.to_string_lossy()
|
||||
// see https://github.com/rust-lang/rust/issues/69343
|
||||
.trim_end_matches(" (deleted)")
|
||||
.to_string();
|
||||
|
||||
// NOTE: this was previously enabled, however, it caused errors when it shouldn't have
|
||||
|
||||
@@ -46,6 +46,7 @@ serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
smallvec.workspace = true
|
||||
smol.workspace = true
|
||||
strum.workspace = true
|
||||
telemetry_events.workspace = true
|
||||
text.workspace = true
|
||||
theme.workspace = true
|
||||
|
||||
@@ -2,6 +2,7 @@ mod context;
|
||||
mod context_editor;
|
||||
mod context_history;
|
||||
mod context_store;
|
||||
mod patch;
|
||||
mod slash_command;
|
||||
mod slash_command_picker;
|
||||
|
||||
@@ -15,6 +16,7 @@ pub use crate::context::*;
|
||||
pub use crate::context_editor::*;
|
||||
pub use crate::context_history::*;
|
||||
pub use crate::context_store::*;
|
||||
pub use crate::patch::*;
|
||||
pub use crate::slash_command::*;
|
||||
|
||||
pub fn init(client: Arc<Client>, cx: &mut App) {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#[cfg(test)]
|
||||
mod context_tests;
|
||||
|
||||
use crate::patch::{AssistantEdit, AssistantPatch, AssistantPatchStatus};
|
||||
use anyhow::{Context as _, Result, anyhow, bail};
|
||||
use assistant_settings::AssistantSettings;
|
||||
use assistant_slash_command::{
|
||||
@@ -21,8 +22,8 @@ use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, P
|
||||
use language_model::{
|
||||
LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionEvent,
|
||||
LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
|
||||
LanguageModelToolUseId, MessageContent, PaymentRequiredError, Role, StopReason,
|
||||
report_assistant_event,
|
||||
LanguageModelToolUseId, MaxMonthlySpendReachedError, MessageContent, PaymentRequiredError,
|
||||
Role, StopReason, report_assistant_event,
|
||||
};
|
||||
use open_ai::Model as OpenAiModel;
|
||||
use paths::contexts_dir;
|
||||
@@ -36,6 +37,7 @@ use std::{
|
||||
iter, mem,
|
||||
ops::Range,
|
||||
path::Path,
|
||||
str::FromStr as _,
|
||||
sync::Arc,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
@@ -120,6 +122,14 @@ impl MessageStatus {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum RequestType {
|
||||
/// Request a normal chat response from the model.
|
||||
Chat,
|
||||
/// Add a preamble to the message, which tells the model to return a structured response that suggests edits.
|
||||
SuggestEdits,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum ContextOperation {
|
||||
InsertMessage {
|
||||
@@ -447,12 +457,17 @@ impl ContextOperation {
|
||||
pub enum ContextEvent {
|
||||
ShowAssistError(SharedString),
|
||||
ShowPaymentRequiredError,
|
||||
ShowMaxMonthlySpendReachedError,
|
||||
MessagesEdited,
|
||||
SummaryChanged,
|
||||
SummaryGenerated,
|
||||
StreamedCompletion,
|
||||
StartedThoughtProcess(Range<language::Anchor>),
|
||||
EndedThoughtProcess(language::Anchor),
|
||||
PatchesUpdated {
|
||||
removed: Vec<Range<language::Anchor>>,
|
||||
updated: Vec<Range<language::Anchor>>,
|
||||
},
|
||||
InvokedSlashCommandChanged {
|
||||
command_id: InvokedSlashCommandId,
|
||||
},
|
||||
@@ -652,6 +667,26 @@ struct PendingCompletion {
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct InvokedSlashCommandId(clock::Lamport);
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct XmlTag {
|
||||
pub kind: XmlTagKind,
|
||||
pub range: Range<text::Anchor>,
|
||||
pub is_open_tag: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, strum::EnumString, PartialEq, Eq, strum::AsRefStr)]
|
||||
#[strum(serialize_all = "snake_case")]
|
||||
pub enum XmlTagKind {
|
||||
Patch,
|
||||
Title,
|
||||
Edit,
|
||||
Path,
|
||||
Description,
|
||||
OldText,
|
||||
NewText,
|
||||
Operation,
|
||||
}
|
||||
|
||||
pub struct AssistantContext {
|
||||
id: ContextId,
|
||||
timestamp: clock::Lamport,
|
||||
@@ -680,6 +715,8 @@ pub struct AssistantContext {
|
||||
_subscriptions: Vec<Subscription>,
|
||||
telemetry: Option<Arc<Telemetry>>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
patches: Vec<AssistantPatch>,
|
||||
xml_tags: Vec<XmlTag>,
|
||||
project: Option<Entity<Project>>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
}
|
||||
@@ -694,6 +731,18 @@ impl ContextAnnotation for ParsedSlashCommand {
|
||||
}
|
||||
}
|
||||
|
||||
impl ContextAnnotation for AssistantPatch {
|
||||
fn range(&self) -> &Range<language::Anchor> {
|
||||
&self.range
|
||||
}
|
||||
}
|
||||
|
||||
impl ContextAnnotation for XmlTag {
|
||||
fn range(&self) -> &Range<language::Anchor> {
|
||||
&self.range
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<ContextEvent> for AssistantContext {}
|
||||
|
||||
impl AssistantContext {
|
||||
@@ -770,6 +819,8 @@ impl AssistantContext {
|
||||
project,
|
||||
language_registry,
|
||||
slash_commands,
|
||||
patches: Vec::new(),
|
||||
xml_tags: Vec::new(),
|
||||
prompt_builder,
|
||||
};
|
||||
|
||||
@@ -1165,6 +1216,48 @@ impl AssistantContext {
|
||||
&self.summary
|
||||
}
|
||||
|
||||
pub fn patch_containing(&self, position: Point, cx: &App) -> Option<&AssistantPatch> {
|
||||
let buffer = self.buffer.read(cx);
|
||||
let index = self.patches.binary_search_by(|patch| {
|
||||
let patch_range = patch.range.to_point(&buffer);
|
||||
if position < patch_range.start {
|
||||
Ordering::Greater
|
||||
} else if position > patch_range.end {
|
||||
Ordering::Less
|
||||
} else {
|
||||
Ordering::Equal
|
||||
}
|
||||
});
|
||||
if let Ok(ix) = index {
|
||||
Some(&self.patches[ix])
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn patch_ranges(&self) -> impl Iterator<Item = Range<language::Anchor>> + '_ {
|
||||
self.patches.iter().map(|patch| patch.range.clone())
|
||||
}
|
||||
|
||||
pub fn patch_for_range(
|
||||
&self,
|
||||
range: &Range<language::Anchor>,
|
||||
cx: &App,
|
||||
) -> Option<&AssistantPatch> {
|
||||
let buffer = self.buffer.read(cx);
|
||||
let index = self.patch_index_for_range(range, buffer).ok()?;
|
||||
Some(&self.patches[index])
|
||||
}
|
||||
|
||||
fn patch_index_for_range(
|
||||
&self,
|
||||
tagged_range: &Range<text::Anchor>,
|
||||
buffer: &text::BufferSnapshot,
|
||||
) -> Result<usize, usize> {
|
||||
self.patches
|
||||
.binary_search_by(|probe| probe.range.cmp(&tagged_range, buffer))
|
||||
}
|
||||
|
||||
pub fn parsed_slash_commands(&self) -> &[ParsedSlashCommand] {
|
||||
&self.parsed_slash_commands
|
||||
}
|
||||
@@ -1244,7 +1337,7 @@ impl AssistantContext {
|
||||
let Some(model) = LanguageModelRegistry::read_global(cx).default_model() else {
|
||||
return;
|
||||
};
|
||||
let request = self.to_completion_request(Some(&model.model), cx);
|
||||
let request = self.to_completion_request(Some(&model.model), RequestType::Chat, cx);
|
||||
let debounce = self.token_count.is_some();
|
||||
self.pending_token_count = cx.spawn(async move |this, cx| {
|
||||
async move {
|
||||
@@ -1390,7 +1483,7 @@ impl AssistantContext {
|
||||
}
|
||||
|
||||
let request = {
|
||||
let mut req = self.to_completion_request(Some(&model), cx);
|
||||
let mut req = self.to_completion_request(Some(&model), RequestType::Chat, cx);
|
||||
// Skip the last message because it's likely to change and
|
||||
// therefore would be a waste to cache.
|
||||
req.messages.pop();
|
||||
@@ -1465,6 +1558,8 @@ impl AssistantContext {
|
||||
|
||||
let mut removed_parsed_slash_command_ranges = Vec::new();
|
||||
let mut updated_parsed_slash_commands = Vec::new();
|
||||
let mut removed_patches = Vec::new();
|
||||
let mut updated_patches = Vec::new();
|
||||
while let Some(mut row_range) = row_ranges.next() {
|
||||
while let Some(next_row_range) = row_ranges.peek() {
|
||||
if row_range.end >= next_row_range.start {
|
||||
@@ -1489,6 +1584,13 @@ impl AssistantContext {
|
||||
cx,
|
||||
);
|
||||
self.invalidate_pending_slash_commands(&buffer, cx);
|
||||
self.reparse_patches_in_range(
|
||||
start..end,
|
||||
&buffer,
|
||||
&mut updated_patches,
|
||||
&mut removed_patches,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
if !updated_parsed_slash_commands.is_empty()
|
||||
@@ -1499,6 +1601,13 @@ impl AssistantContext {
|
||||
updated: updated_parsed_slash_commands,
|
||||
});
|
||||
}
|
||||
|
||||
if !updated_patches.is_empty() || !removed_patches.is_empty() {
|
||||
cx.emit(ContextEvent::PatchesUpdated {
|
||||
removed: removed_patches,
|
||||
updated: updated_patches,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn reparse_slash_commands_in_range(
|
||||
@@ -1589,6 +1698,267 @@ impl AssistantContext {
|
||||
}
|
||||
}
|
||||
|
||||
fn reparse_patches_in_range(
|
||||
&mut self,
|
||||
range: Range<text::Anchor>,
|
||||
buffer: &BufferSnapshot,
|
||||
updated: &mut Vec<Range<text::Anchor>>,
|
||||
removed: &mut Vec<Range<text::Anchor>>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
// Rebuild the XML tags in the edited range.
|
||||
let intersecting_tags_range =
|
||||
self.indices_intersecting_buffer_range(&self.xml_tags, range.clone(), cx);
|
||||
let new_tags = self.parse_xml_tags_in_range(buffer, range.clone(), cx);
|
||||
self.xml_tags
|
||||
.splice(intersecting_tags_range.clone(), new_tags);
|
||||
|
||||
// Find which patches intersect the changed range.
|
||||
let intersecting_patches_range =
|
||||
self.indices_intersecting_buffer_range(&self.patches, range.clone(), cx);
|
||||
|
||||
// Reparse all tags after the last unchanged patch before the change.
|
||||
let mut tags_start_ix = 0;
|
||||
if let Some(preceding_unchanged_patch) =
|
||||
self.patches[..intersecting_patches_range.start].last()
|
||||
{
|
||||
tags_start_ix = match self.xml_tags.binary_search_by(|tag| {
|
||||
tag.range
|
||||
.start
|
||||
.cmp(&preceding_unchanged_patch.range.end, buffer)
|
||||
.then(Ordering::Less)
|
||||
}) {
|
||||
Ok(ix) | Err(ix) => ix,
|
||||
};
|
||||
}
|
||||
|
||||
// Rebuild the patches in the range.
|
||||
let new_patches = self.parse_patches(tags_start_ix, range.end, buffer, cx);
|
||||
updated.extend(new_patches.iter().map(|patch| patch.range.clone()));
|
||||
let removed_patches = self.patches.splice(intersecting_patches_range, new_patches);
|
||||
removed.extend(
|
||||
removed_patches
|
||||
.map(|patch| patch.range)
|
||||
.filter(|range| !updated.contains(&range)),
|
||||
);
|
||||
}
|
||||
|
||||
fn parse_xml_tags_in_range(
|
||||
&self,
|
||||
buffer: &BufferSnapshot,
|
||||
range: Range<text::Anchor>,
|
||||
cx: &App,
|
||||
) -> Vec<XmlTag> {
|
||||
let mut messages = self.messages(cx).peekable();
|
||||
|
||||
let mut tags = Vec::new();
|
||||
let mut lines = buffer.text_for_range(range).lines();
|
||||
let mut offset = lines.offset();
|
||||
|
||||
while let Some(line) = lines.next() {
|
||||
while let Some(message) = messages.peek() {
|
||||
if offset < message.offset_range.end {
|
||||
break;
|
||||
} else {
|
||||
messages.next();
|
||||
}
|
||||
}
|
||||
|
||||
let is_assistant_message = messages
|
||||
.peek()
|
||||
.map_or(false, |message| message.role == Role::Assistant);
|
||||
if is_assistant_message {
|
||||
for (start_ix, _) in line.match_indices('<') {
|
||||
let mut name_start_ix = start_ix + 1;
|
||||
let closing_bracket_ix = line[start_ix..].find('>').map(|i| start_ix + i);
|
||||
if let Some(closing_bracket_ix) = closing_bracket_ix {
|
||||
let end_ix = closing_bracket_ix + 1;
|
||||
let mut is_open_tag = true;
|
||||
if line[name_start_ix..closing_bracket_ix].starts_with('/') {
|
||||
name_start_ix += 1;
|
||||
is_open_tag = false;
|
||||
}
|
||||
let tag_inner = &line[name_start_ix..closing_bracket_ix];
|
||||
let tag_name_len = tag_inner
|
||||
.find(|c: char| c.is_whitespace())
|
||||
.unwrap_or(tag_inner.len());
|
||||
if let Ok(kind) = XmlTagKind::from_str(&tag_inner[..tag_name_len]) {
|
||||
tags.push(XmlTag {
|
||||
range: buffer.anchor_after(offset + start_ix)
|
||||
..buffer.anchor_before(offset + end_ix),
|
||||
is_open_tag,
|
||||
kind,
|
||||
});
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
offset = lines.offset();
|
||||
}
|
||||
tags
|
||||
}
|
||||
|
||||
fn parse_patches(
|
||||
&mut self,
|
||||
tags_start_ix: usize,
|
||||
buffer_end: text::Anchor,
|
||||
buffer: &BufferSnapshot,
|
||||
cx: &App,
|
||||
) -> Vec<AssistantPatch> {
|
||||
let mut new_patches = Vec::new();
|
||||
let mut pending_patch = None;
|
||||
let mut patch_tag_depth = 0;
|
||||
let mut tags = self.xml_tags[tags_start_ix..].iter().peekable();
|
||||
'tags: while let Some(tag) = tags.next() {
|
||||
if tag.range.start.cmp(&buffer_end, buffer).is_gt() && patch_tag_depth == 0 {
|
||||
break;
|
||||
}
|
||||
|
||||
if tag.kind == XmlTagKind::Patch && tag.is_open_tag {
|
||||
patch_tag_depth += 1;
|
||||
let patch_start = tag.range.start;
|
||||
let mut edits = Vec::<Result<AssistantEdit>>::new();
|
||||
let mut patch = AssistantPatch {
|
||||
range: patch_start..patch_start,
|
||||
title: String::new().into(),
|
||||
edits: Default::default(),
|
||||
status: crate::AssistantPatchStatus::Pending,
|
||||
};
|
||||
|
||||
while let Some(tag) = tags.next() {
|
||||
if tag.kind == XmlTagKind::Patch && !tag.is_open_tag {
|
||||
patch_tag_depth -= 1;
|
||||
if patch_tag_depth == 0 {
|
||||
patch.range.end = tag.range.end;
|
||||
|
||||
// Include the line immediately after this <patch> tag if it's empty.
|
||||
let patch_end_offset = patch.range.end.to_offset(buffer);
|
||||
let mut patch_end_chars = buffer.chars_at(patch_end_offset);
|
||||
if patch_end_chars.next() == Some('\n')
|
||||
&& patch_end_chars.next().map_or(true, |ch| ch == '\n')
|
||||
{
|
||||
let messages = self.messages_for_offsets(
|
||||
[patch_end_offset, patch_end_offset + 1],
|
||||
cx,
|
||||
);
|
||||
if messages.len() == 1 {
|
||||
patch.range.end = buffer.anchor_before(patch_end_offset + 1);
|
||||
}
|
||||
}
|
||||
|
||||
edits.sort_unstable_by(|a, b| {
|
||||
if let (Ok(a), Ok(b)) = (a, b) {
|
||||
a.path.cmp(&b.path)
|
||||
} else {
|
||||
Ordering::Equal
|
||||
}
|
||||
});
|
||||
patch.edits = edits.into();
|
||||
patch.status = AssistantPatchStatus::Ready;
|
||||
new_patches.push(patch);
|
||||
continue 'tags;
|
||||
}
|
||||
}
|
||||
|
||||
if tag.kind == XmlTagKind::Title && tag.is_open_tag {
|
||||
let content_start = tag.range.end;
|
||||
while let Some(tag) = tags.next() {
|
||||
if tag.kind == XmlTagKind::Title && !tag.is_open_tag {
|
||||
let content_end = tag.range.start;
|
||||
patch.title =
|
||||
trimmed_text_in_range(buffer, content_start..content_end)
|
||||
.into();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if tag.kind == XmlTagKind::Edit && tag.is_open_tag {
|
||||
let mut path = None;
|
||||
let mut old_text = None;
|
||||
let mut new_text = None;
|
||||
let mut operation = None;
|
||||
let mut description = None;
|
||||
|
||||
while let Some(tag) = tags.next() {
|
||||
if tag.kind == XmlTagKind::Edit && !tag.is_open_tag {
|
||||
edits.push(AssistantEdit::new(
|
||||
path,
|
||||
operation,
|
||||
old_text,
|
||||
new_text,
|
||||
description,
|
||||
));
|
||||
break;
|
||||
}
|
||||
|
||||
if tag.is_open_tag
|
||||
&& [
|
||||
XmlTagKind::Path,
|
||||
XmlTagKind::OldText,
|
||||
XmlTagKind::NewText,
|
||||
XmlTagKind::Operation,
|
||||
XmlTagKind::Description,
|
||||
]
|
||||
.contains(&tag.kind)
|
||||
{
|
||||
let kind = tag.kind;
|
||||
let content_start = tag.range.end;
|
||||
if let Some(tag) = tags.peek() {
|
||||
if tag.kind == kind && !tag.is_open_tag {
|
||||
let tag = tags.next().unwrap();
|
||||
let content_end = tag.range.start;
|
||||
let content = trimmed_text_in_range(
|
||||
buffer,
|
||||
content_start..content_end,
|
||||
);
|
||||
match kind {
|
||||
XmlTagKind::Path => path = Some(content),
|
||||
XmlTagKind::Operation => operation = Some(content),
|
||||
XmlTagKind::OldText => {
|
||||
old_text = Some(content).filter(|s| !s.is_empty())
|
||||
}
|
||||
XmlTagKind::NewText => {
|
||||
new_text = Some(content).filter(|s| !s.is_empty())
|
||||
}
|
||||
XmlTagKind::Description => {
|
||||
description =
|
||||
Some(content).filter(|s| !s.is_empty())
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
patch.edits = edits.into();
|
||||
pending_patch = Some(patch);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(mut pending_patch) = pending_patch {
|
||||
let patch_start = pending_patch.range.start.to_offset(buffer);
|
||||
if let Some(message) = self.message_for_offset(patch_start, cx) {
|
||||
if message.anchor_range.end == text::Anchor::MAX {
|
||||
pending_patch.range.end = text::Anchor::MAX;
|
||||
} else {
|
||||
let message_end = buffer.anchor_after(message.offset_range.end - 1);
|
||||
pending_patch.range.end = message_end;
|
||||
}
|
||||
} else {
|
||||
pending_patch.range.end = text::Anchor::MAX;
|
||||
}
|
||||
|
||||
new_patches.push(pending_patch);
|
||||
}
|
||||
|
||||
new_patches
|
||||
}
|
||||
|
||||
pub fn pending_command_for_position(
|
||||
&mut self,
|
||||
position: language::Anchor,
|
||||
@@ -1993,7 +2363,11 @@ impl AssistantContext {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn assist(&mut self, cx: &mut Context<Self>) -> Option<MessageAnchor> {
|
||||
pub fn assist(
|
||||
&mut self,
|
||||
request_type: RequestType,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<MessageAnchor> {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let model = model_registry.default_model()?;
|
||||
let last_message_id = self.get_last_valid_message_id(cx)?;
|
||||
@@ -2008,7 +2382,7 @@ impl AssistantContext {
|
||||
// Compute which messages to cache, including the last one.
|
||||
self.mark_cache_anchors(&model.cache_configuration(), false, cx);
|
||||
|
||||
let request = self.to_completion_request(Some(&model), cx);
|
||||
let request = self.to_completion_request(Some(&model), request_type, cx);
|
||||
|
||||
let assistant_message = self
|
||||
.insert_message_after(last_message_id, Role::Assistant, MessageStatus::Pending, cx)
|
||||
@@ -2154,6 +2528,12 @@ impl AssistantContext {
|
||||
metadata.status = MessageStatus::Canceled;
|
||||
});
|
||||
Some(error.to_string())
|
||||
} else if error.is::<MaxMonthlySpendReachedError>() {
|
||||
cx.emit(ContextEvent::ShowMaxMonthlySpendReachedError);
|
||||
this.update_metadata(assistant_message_id, cx, |metadata| {
|
||||
metadata.status = MessageStatus::Canceled;
|
||||
});
|
||||
Some(error.to_string())
|
||||
} else {
|
||||
let error_message = error
|
||||
.chain()
|
||||
@@ -2243,6 +2623,7 @@ impl AssistantContext {
|
||||
pub fn to_completion_request(
|
||||
&self,
|
||||
model: Option<&Arc<dyn LanguageModel>>,
|
||||
request_type: RequestType,
|
||||
cx: &App,
|
||||
) -> LanguageModelRequest {
|
||||
let buffer = self.buffer.read(cx);
|
||||
@@ -2323,6 +2704,25 @@ impl AssistantContext {
|
||||
}
|
||||
}
|
||||
|
||||
if let RequestType::SuggestEdits = request_type {
|
||||
if let Ok(preamble) = self.prompt_builder.generate_suggest_edits_prompt() {
|
||||
let last_elem_index = completion_request.messages.len();
|
||||
|
||||
completion_request
|
||||
.messages
|
||||
.push(LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![MessageContent::Text(preamble)],
|
||||
cache: false,
|
||||
});
|
||||
|
||||
// The preamble message should be sent right before the last actual user message.
|
||||
completion_request
|
||||
.messages
|
||||
.swap(last_elem_index, last_elem_index.saturating_sub(1));
|
||||
}
|
||||
}
|
||||
|
||||
completion_request
|
||||
}
|
||||
|
||||
@@ -2357,6 +2757,17 @@ impl AssistantContext {
|
||||
ranges.push(message.anchor_range.clone());
|
||||
}
|
||||
}
|
||||
|
||||
let buffer = self.buffer.read(cx).text_snapshot();
|
||||
let mut updated = Vec::new();
|
||||
let mut removed = Vec::new();
|
||||
for range in ranges {
|
||||
self.reparse_patches_in_range(range, &buffer, &mut updated, &mut removed, cx);
|
||||
}
|
||||
|
||||
if !updated.is_empty() || !removed.is_empty() {
|
||||
cx.emit(ContextEvent::PatchesUpdated { removed, updated })
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_metadata(
|
||||
@@ -2634,7 +3045,7 @@ impl AssistantContext {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut request = self.to_completion_request(Some(&model.model), cx);
|
||||
let mut request = self.to_completion_request(Some(&model.model), RequestType::Chat, cx);
|
||||
request.messages.push(LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![
|
||||
@@ -2906,6 +3317,24 @@ impl AssistantContext {
|
||||
}
|
||||
}
|
||||
|
||||
fn trimmed_text_in_range(buffer: &BufferSnapshot, range: Range<text::Anchor>) -> String {
|
||||
let mut is_start = true;
|
||||
let mut content = buffer
|
||||
.text_for_range(range)
|
||||
.map(|mut chunk| {
|
||||
if is_start {
|
||||
chunk = chunk.trim_start_matches('\n');
|
||||
if !chunk.is_empty() {
|
||||
is_start = false;
|
||||
}
|
||||
}
|
||||
chunk
|
||||
})
|
||||
.collect::<String>();
|
||||
content.truncate(content.trim_end().len());
|
||||
content
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct ContextVersion {
|
||||
context: clock::Global,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use crate::{
|
||||
AssistantContext, CacheStatus, ContextEvent, ContextId, ContextOperation, ContextSummary,
|
||||
InvokedSlashCommandId, MessageCacheMetadata, MessageId, MessageStatus,
|
||||
AssistantContext, AssistantEdit, AssistantEditKind, CacheStatus, ContextEvent, ContextId,
|
||||
ContextOperation, ContextSummary, InvokedSlashCommandId, MessageCacheMetadata, MessageId,
|
||||
MessageStatus, RequestType,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use assistant_slash_command::{
|
||||
@@ -35,10 +36,13 @@ use std::{
|
||||
rc::Rc,
|
||||
sync::{Arc, atomic::AtomicBool},
|
||||
};
|
||||
use text::{ReplicaId, ToOffset, network::Network};
|
||||
use text::{OffsetRangeExt as _, ReplicaId, ToOffset, network::Network};
|
||||
use ui::{IconName, Window};
|
||||
use unindent::Unindent;
|
||||
use util::RandomCharIter;
|
||||
use util::{
|
||||
RandomCharIter,
|
||||
test::{generate_marked_text, marked_text_ranges},
|
||||
};
|
||||
use workspace::Workspace;
|
||||
|
||||
#[gpui::test]
|
||||
@@ -664,6 +668,401 @@ async fn test_slash_commands(cx: &mut TestAppContext) {
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
init_test(cx);
|
||||
cx.update_global(|settings_store: &mut SettingsStore, cx| {
|
||||
settings_store
|
||||
.set_user_settings(
|
||||
r#"{ "assistant": { "enable_experimental_live_diffs": true } }"#,
|
||||
cx,
|
||||
)
|
||||
.unwrap()
|
||||
})
|
||||
});
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let project = Project::test(fs, [Path::new("/root")], cx).await;
|
||||
|
||||
let registry = Arc::new(LanguageRegistry::test(cx.executor()));
|
||||
|
||||
// Create a new context
|
||||
let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap());
|
||||
let context = cx.new(|cx| {
|
||||
AssistantContext::local(
|
||||
registry.clone(),
|
||||
Some(project),
|
||||
None,
|
||||
prompt_builder.clone(),
|
||||
Arc::new(SlashCommandWorkingSet::default()),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
// Insert an assistant message to simulate a response.
|
||||
let assistant_message_id = context.update(cx, |context, cx| {
|
||||
let user_message_id = context.messages(cx).next().unwrap().id;
|
||||
context
|
||||
.insert_message_after(user_message_id, Role::Assistant, MessageStatus::Done, cx)
|
||||
.unwrap()
|
||||
.id
|
||||
});
|
||||
|
||||
// No edit tags
|
||||
edit(
|
||||
&context,
|
||||
"
|
||||
|
||||
«one
|
||||
two
|
||||
»",
|
||||
cx,
|
||||
);
|
||||
expect_patches(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
",
|
||||
&[],
|
||||
cx,
|
||||
);
|
||||
|
||||
// Partial edit step tag is added
|
||||
edit(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
«
|
||||
<patch»",
|
||||
cx,
|
||||
);
|
||||
expect_patches(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
<patch",
|
||||
&[],
|
||||
cx,
|
||||
);
|
||||
|
||||
// The rest of the step tag is added. The unclosed
|
||||
// step is treated as incomplete.
|
||||
edit(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
<patch«>
|
||||
<edit>»",
|
||||
cx,
|
||||
);
|
||||
expect_patches(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
«<patch>
|
||||
<edit>»",
|
||||
&[&[]],
|
||||
cx,
|
||||
);
|
||||
|
||||
// The full patch is added
|
||||
edit(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
<patch>
|
||||
<edit>«
|
||||
<description>add a `two` function</description>
|
||||
<path>src/lib.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>fn one</old_text>
|
||||
<new_text>
|
||||
fn two() {}
|
||||
</new_text>
|
||||
</edit>
|
||||
</patch>
|
||||
|
||||
also,»",
|
||||
cx,
|
||||
);
|
||||
expect_patches(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
«<patch>
|
||||
<edit>
|
||||
<description>add a `two` function</description>
|
||||
<path>src/lib.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>fn one</old_text>
|
||||
<new_text>
|
||||
fn two() {}
|
||||
</new_text>
|
||||
</edit>
|
||||
</patch>
|
||||
»
|
||||
also,",
|
||||
&[&[AssistantEdit {
|
||||
path: "src/lib.rs".into(),
|
||||
kind: AssistantEditKind::InsertAfter {
|
||||
old_text: "fn one".into(),
|
||||
new_text: "fn two() {}".into(),
|
||||
description: Some("add a `two` function".into()),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
);
|
||||
|
||||
// The step is manually edited.
|
||||
edit(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
<patch>
|
||||
<edit>
|
||||
<description>add a `two` function</description>
|
||||
<path>src/lib.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>«fn zero»</old_text>
|
||||
<new_text>
|
||||
fn two() {}
|
||||
</new_text>
|
||||
</edit>
|
||||
</patch>
|
||||
|
||||
also,",
|
||||
cx,
|
||||
);
|
||||
expect_patches(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
«<patch>
|
||||
<edit>
|
||||
<description>add a `two` function</description>
|
||||
<path>src/lib.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>fn zero</old_text>
|
||||
<new_text>
|
||||
fn two() {}
|
||||
</new_text>
|
||||
</edit>
|
||||
</patch>
|
||||
»
|
||||
also,",
|
||||
&[&[AssistantEdit {
|
||||
path: "src/lib.rs".into(),
|
||||
kind: AssistantEditKind::InsertAfter {
|
||||
old_text: "fn zero".into(),
|
||||
new_text: "fn two() {}".into(),
|
||||
description: Some("add a `two` function".into()),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
);
|
||||
|
||||
// When setting the message role to User, the steps are cleared.
|
||||
context.update(cx, |context, cx| {
|
||||
context.cycle_message_roles(HashSet::from_iter([assistant_message_id]), cx);
|
||||
context.cycle_message_roles(HashSet::from_iter([assistant_message_id]), cx);
|
||||
});
|
||||
expect_patches(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
<patch>
|
||||
<edit>
|
||||
<description>add a `two` function</description>
|
||||
<path>src/lib.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>fn zero</old_text>
|
||||
<new_text>
|
||||
fn two() {}
|
||||
</new_text>
|
||||
</edit>
|
||||
</patch>
|
||||
|
||||
also,",
|
||||
&[],
|
||||
cx,
|
||||
);
|
||||
|
||||
// When setting the message role back to Assistant, the steps are reparsed.
|
||||
context.update(cx, |context, cx| {
|
||||
context.cycle_message_roles(HashSet::from_iter([assistant_message_id]), cx);
|
||||
});
|
||||
expect_patches(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
«<patch>
|
||||
<edit>
|
||||
<description>add a `two` function</description>
|
||||
<path>src/lib.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>fn zero</old_text>
|
||||
<new_text>
|
||||
fn two() {}
|
||||
</new_text>
|
||||
</edit>
|
||||
</patch>
|
||||
»
|
||||
also,",
|
||||
&[&[AssistantEdit {
|
||||
path: "src/lib.rs".into(),
|
||||
kind: AssistantEditKind::InsertAfter {
|
||||
old_text: "fn zero".into(),
|
||||
new_text: "fn two() {}".into(),
|
||||
description: Some("add a `two` function".into()),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
);
|
||||
|
||||
// Ensure steps are re-parsed when deserializing.
|
||||
let serialized_context = context.read_with(cx, |context, cx| context.serialize(cx));
|
||||
let deserialized_context = cx.new(|cx| {
|
||||
AssistantContext::deserialize(
|
||||
serialized_context,
|
||||
Path::new("").into(),
|
||||
registry.clone(),
|
||||
prompt_builder.clone(),
|
||||
Arc::new(SlashCommandWorkingSet::default()),
|
||||
None,
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
expect_patches(
|
||||
&deserialized_context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
«<patch>
|
||||
<edit>
|
||||
<description>add a `two` function</description>
|
||||
<path>src/lib.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>fn zero</old_text>
|
||||
<new_text>
|
||||
fn two() {}
|
||||
</new_text>
|
||||
</edit>
|
||||
</patch>
|
||||
»
|
||||
also,",
|
||||
&[&[AssistantEdit {
|
||||
path: "src/lib.rs".into(),
|
||||
kind: AssistantEditKind::InsertAfter {
|
||||
old_text: "fn zero".into(),
|
||||
new_text: "fn two() {}".into(),
|
||||
description: Some("add a `two` function".into()),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
);
|
||||
|
||||
fn edit(
|
||||
context: &Entity<AssistantContext>,
|
||||
new_text_marked_with_edits: &str,
|
||||
cx: &mut TestAppContext,
|
||||
) {
|
||||
context.update(cx, |context, cx| {
|
||||
context.buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit_via_marked_text(&new_text_marked_with_edits.unindent(), None, cx);
|
||||
});
|
||||
});
|
||||
cx.executor().run_until_parked();
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn expect_patches(
|
||||
context: &Entity<AssistantContext>,
|
||||
expected_marked_text: &str,
|
||||
expected_suggestions: &[&[AssistantEdit]],
|
||||
cx: &mut TestAppContext,
|
||||
) {
|
||||
let expected_marked_text = expected_marked_text.unindent();
|
||||
let (expected_text, _) = marked_text_ranges(&expected_marked_text, false);
|
||||
|
||||
let (buffer_text, ranges, patches) = context.update(cx, |context, cx| {
|
||||
context.buffer.read_with(cx, |buffer, _| {
|
||||
let ranges = context
|
||||
.patches
|
||||
.iter()
|
||||
.map(|entry| entry.range.to_offset(buffer))
|
||||
.collect::<Vec<_>>();
|
||||
(
|
||||
buffer.text(),
|
||||
ranges,
|
||||
context
|
||||
.patches
|
||||
.iter()
|
||||
.map(|step| step.edits.clone())
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
})
|
||||
});
|
||||
|
||||
assert_eq!(buffer_text, expected_text);
|
||||
|
||||
let actual_marked_text = generate_marked_text(&expected_text, &ranges, false);
|
||||
assert_eq!(actual_marked_text, expected_marked_text);
|
||||
|
||||
assert_eq!(
|
||||
patches
|
||||
.iter()
|
||||
.map(|patch| {
|
||||
patch
|
||||
.iter()
|
||||
.map(|edit| {
|
||||
let edit = edit.as_ref().unwrap();
|
||||
AssistantEdit {
|
||||
path: edit.path.clone(),
|
||||
kind: edit.kind.clone(),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
expected_suggestions
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_serialization(cx: &mut TestAppContext) {
|
||||
cx.update(init_test);
|
||||
@@ -1199,7 +1598,7 @@ async fn test_summarization(cx: &mut TestAppContext) {
|
||||
|
||||
// Send a message
|
||||
context.update(cx, |context, cx| {
|
||||
context.assist(cx);
|
||||
context.assist(RequestType::Chat, cx);
|
||||
});
|
||||
|
||||
simulate_successful_response(&fake_model, cx);
|
||||
@@ -1254,7 +1653,7 @@ async fn test_thread_summary_error_retry(cx: &mut TestAppContext) {
|
||||
|
||||
// Sending another message should not trigger another summarize request
|
||||
context.update(cx, |context, cx| {
|
||||
context.assist(cx);
|
||||
context.assist(RequestType::Chat, cx);
|
||||
});
|
||||
|
||||
simulate_successful_response(&fake_model, cx);
|
||||
@@ -1297,7 +1696,7 @@ fn test_summarize_error(
|
||||
|
||||
// Send a message
|
||||
context.update(cx, |context, cx| {
|
||||
context.assist(cx);
|
||||
context.assist(RequestType::Chat, cx);
|
||||
});
|
||||
|
||||
simulate_successful_response(&model, cx);
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
957
crates/assistant_context_editor/src/patch.rs
Normal file
957
crates/assistant_context_editor/src/patch.rs
Normal file
@@ -0,0 +1,957 @@
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use collections::HashMap;
|
||||
use editor::ProposedChangesEditor;
|
||||
use futures::{TryFutureExt as _, future};
|
||||
use gpui::{App, AppContext as _, AsyncApp, Entity, SharedString};
|
||||
use language::{AutoindentMode, Buffer, BufferSnapshot};
|
||||
use project::{Project, ProjectPath};
|
||||
use std::{cmp, ops::Range, path::Path, sync::Arc};
|
||||
use text::{AnchorRangeExt as _, Bias, OffsetRangeExt as _, Point};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AssistantPatch {
|
||||
pub range: Range<language::Anchor>,
|
||||
pub title: SharedString,
|
||||
pub edits: Arc<[Result<AssistantEdit>]>,
|
||||
pub status: AssistantPatchStatus,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum AssistantPatchStatus {
|
||||
Pending,
|
||||
Ready,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct AssistantEdit {
|
||||
pub path: String,
|
||||
pub kind: AssistantEditKind,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum AssistantEditKind {
|
||||
Update {
|
||||
old_text: String,
|
||||
new_text: String,
|
||||
description: Option<String>,
|
||||
},
|
||||
Create {
|
||||
new_text: String,
|
||||
description: Option<String>,
|
||||
},
|
||||
InsertBefore {
|
||||
old_text: String,
|
||||
new_text: String,
|
||||
description: Option<String>,
|
||||
},
|
||||
InsertAfter {
|
||||
old_text: String,
|
||||
new_text: String,
|
||||
description: Option<String>,
|
||||
},
|
||||
Delete {
|
||||
old_text: String,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct ResolvedPatch {
|
||||
pub edit_groups: HashMap<Entity<Buffer>, Vec<ResolvedEditGroup>>,
|
||||
pub errors: Vec<AssistantPatchResolutionError>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct ResolvedEditGroup {
|
||||
pub context_range: Range<language::Anchor>,
|
||||
pub edits: Vec<ResolvedEdit>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct ResolvedEdit {
|
||||
range: Range<language::Anchor>,
|
||||
new_text: String,
|
||||
description: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct AssistantPatchResolutionError {
|
||||
pub edit_ix: usize,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
enum SearchDirection {
|
||||
Up,
|
||||
Left,
|
||||
Diagonal,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
struct SearchState {
|
||||
cost: u32,
|
||||
direction: SearchDirection,
|
||||
}
|
||||
|
||||
impl SearchState {
|
||||
fn new(cost: u32, direction: SearchDirection) -> Self {
|
||||
Self { cost, direction }
|
||||
}
|
||||
}
|
||||
|
||||
struct SearchMatrix {
|
||||
cols: usize,
|
||||
data: Vec<SearchState>,
|
||||
}
|
||||
|
||||
impl SearchMatrix {
|
||||
fn new(rows: usize, cols: usize) -> Self {
|
||||
SearchMatrix {
|
||||
cols,
|
||||
data: vec![SearchState::new(0, SearchDirection::Diagonal); rows * cols],
|
||||
}
|
||||
}
|
||||
|
||||
fn get(&self, row: usize, col: usize) -> SearchState {
|
||||
self.data[row * self.cols + col]
|
||||
}
|
||||
|
||||
fn set(&mut self, row: usize, col: usize, cost: SearchState) {
|
||||
self.data[row * self.cols + col] = cost;
|
||||
}
|
||||
}
|
||||
|
||||
impl ResolvedPatch {
|
||||
pub fn apply(&self, editor: &ProposedChangesEditor, cx: &mut App) {
|
||||
for (buffer, groups) in &self.edit_groups {
|
||||
let branch = editor.branch_buffer_for_base(buffer).unwrap();
|
||||
Self::apply_edit_groups(groups, &branch, cx);
|
||||
}
|
||||
editor.recalculate_all_buffer_diffs();
|
||||
}
|
||||
|
||||
fn apply_edit_groups(groups: &Vec<ResolvedEditGroup>, buffer: &Entity<Buffer>, cx: &mut App) {
|
||||
let mut edits = Vec::new();
|
||||
for group in groups {
|
||||
for suggestion in &group.edits {
|
||||
edits.push((suggestion.range.clone(), suggestion.new_text.clone()));
|
||||
}
|
||||
}
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(
|
||||
edits,
|
||||
Some(AutoindentMode::Block {
|
||||
original_indent_columns: Vec::new(),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl ResolvedEdit {
|
||||
pub fn try_merge(&mut self, other: &Self, buffer: &text::BufferSnapshot) -> bool {
|
||||
let range = &self.range;
|
||||
let other_range = &other.range;
|
||||
|
||||
// Don't merge if we don't contain the other suggestion.
|
||||
if range.start.cmp(&other_range.start, buffer).is_gt()
|
||||
|| range.end.cmp(&other_range.end, buffer).is_lt()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
let other_offset_range = other_range.to_offset(buffer);
|
||||
let offset_range = range.to_offset(buffer);
|
||||
|
||||
// If the other range is empty at the start of this edit's range, combine the new text
|
||||
if other_offset_range.is_empty() && other_offset_range.start == offset_range.start {
|
||||
self.new_text = format!("{}\n{}", other.new_text, self.new_text);
|
||||
self.range.start = other_range.start;
|
||||
|
||||
if let Some((description, other_description)) =
|
||||
self.description.as_mut().zip(other.description.as_ref())
|
||||
{
|
||||
*description = format!("{}\n{}", other_description, description)
|
||||
}
|
||||
} else {
|
||||
if let Some((description, other_description)) =
|
||||
self.description.as_mut().zip(other.description.as_ref())
|
||||
{
|
||||
description.push('\n');
|
||||
description.push_str(other_description);
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl AssistantEdit {
|
||||
pub fn new(
|
||||
path: Option<String>,
|
||||
operation: Option<String>,
|
||||
old_text: Option<String>,
|
||||
new_text: Option<String>,
|
||||
description: Option<String>,
|
||||
) -> Result<Self> {
|
||||
let path = path.ok_or_else(|| anyhow!("missing path"))?;
|
||||
let operation = operation.ok_or_else(|| anyhow!("missing operation"))?;
|
||||
|
||||
let kind = match operation.as_str() {
|
||||
"update" => AssistantEditKind::Update {
|
||||
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
|
||||
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
|
||||
description,
|
||||
},
|
||||
"insert_before" => AssistantEditKind::InsertBefore {
|
||||
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
|
||||
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
|
||||
description,
|
||||
},
|
||||
"insert_after" => AssistantEditKind::InsertAfter {
|
||||
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
|
||||
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
|
||||
description,
|
||||
},
|
||||
"delete" => AssistantEditKind::Delete {
|
||||
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
|
||||
},
|
||||
"create" => AssistantEditKind::Create {
|
||||
description,
|
||||
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
|
||||
},
|
||||
_ => Err(anyhow!("unknown operation {operation:?}"))?,
|
||||
};
|
||||
|
||||
Ok(Self { path, kind })
|
||||
}
|
||||
|
||||
pub async fn resolve(
|
||||
&self,
|
||||
project: Entity<Project>,
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<(Entity<Buffer>, ResolvedEdit)> {
|
||||
let path = self.path.clone();
|
||||
let kind = self.kind.clone();
|
||||
let buffer = project
|
||||
.update(&mut cx, |project, cx| {
|
||||
let project_path = project
|
||||
.find_project_path(Path::new(&path), cx)
|
||||
.or_else(|| {
|
||||
// If we couldn't find a project path for it, put it in the active worktree
|
||||
// so that when we create the buffer, it can be saved.
|
||||
let worktree = project
|
||||
.active_entry()
|
||||
.and_then(|entry_id| project.worktree_for_entry(entry_id, cx))
|
||||
.or_else(|| project.worktrees(cx).next())?;
|
||||
let worktree = worktree.read(cx);
|
||||
|
||||
Some(ProjectPath {
|
||||
worktree_id: worktree.id(),
|
||||
path: Arc::from(Path::new(&path)),
|
||||
})
|
||||
})
|
||||
.with_context(|| format!("worktree not found for {:?}", path))?;
|
||||
anyhow::Ok(project.open_buffer(project_path, cx))
|
||||
})??
|
||||
.await?;
|
||||
|
||||
let snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot())?;
|
||||
let suggestion = cx
|
||||
.background_spawn(async move { kind.resolve(&snapshot) })
|
||||
.await;
|
||||
|
||||
Ok((buffer, suggestion))
|
||||
}
|
||||
}
|
||||
|
||||
impl AssistantEditKind {
|
||||
fn resolve(self, snapshot: &BufferSnapshot) -> ResolvedEdit {
|
||||
match self {
|
||||
Self::Update {
|
||||
old_text,
|
||||
new_text,
|
||||
description,
|
||||
} => {
|
||||
let range = Self::resolve_location(&snapshot, &old_text);
|
||||
ResolvedEdit {
|
||||
range,
|
||||
new_text,
|
||||
description,
|
||||
}
|
||||
}
|
||||
Self::Create {
|
||||
new_text,
|
||||
description,
|
||||
} => ResolvedEdit {
|
||||
range: text::Anchor::MIN..text::Anchor::MAX,
|
||||
description,
|
||||
new_text,
|
||||
},
|
||||
Self::InsertBefore {
|
||||
old_text,
|
||||
mut new_text,
|
||||
description,
|
||||
} => {
|
||||
let range = Self::resolve_location(&snapshot, &old_text);
|
||||
new_text.push('\n');
|
||||
ResolvedEdit {
|
||||
range: range.start..range.start,
|
||||
new_text,
|
||||
description,
|
||||
}
|
||||
}
|
||||
Self::InsertAfter {
|
||||
old_text,
|
||||
mut new_text,
|
||||
description,
|
||||
} => {
|
||||
let range = Self::resolve_location(&snapshot, &old_text);
|
||||
new_text.insert(0, '\n');
|
||||
ResolvedEdit {
|
||||
range: range.end..range.end,
|
||||
new_text,
|
||||
description,
|
||||
}
|
||||
}
|
||||
Self::Delete { old_text } => {
|
||||
let range = Self::resolve_location(&snapshot, &old_text);
|
||||
ResolvedEdit {
|
||||
range,
|
||||
new_text: String::new(),
|
||||
description: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_location(buffer: &text::BufferSnapshot, search_query: &str) -> Range<text::Anchor> {
|
||||
const INSERTION_COST: u32 = 3;
|
||||
const DELETION_COST: u32 = 10;
|
||||
const WHITESPACE_INSERTION_COST: u32 = 1;
|
||||
const WHITESPACE_DELETION_COST: u32 = 1;
|
||||
|
||||
let buffer_len = buffer.len();
|
||||
let query_len = search_query.len();
|
||||
let mut matrix = SearchMatrix::new(query_len + 1, buffer_len + 1);
|
||||
let mut leading_deletion_cost = 0_u32;
|
||||
for (row, query_byte) in search_query.bytes().enumerate() {
|
||||
let deletion_cost = if query_byte.is_ascii_whitespace() {
|
||||
WHITESPACE_DELETION_COST
|
||||
} else {
|
||||
DELETION_COST
|
||||
};
|
||||
|
||||
leading_deletion_cost = leading_deletion_cost.saturating_add(deletion_cost);
|
||||
matrix.set(
|
||||
row + 1,
|
||||
0,
|
||||
SearchState::new(leading_deletion_cost, SearchDirection::Diagonal),
|
||||
);
|
||||
|
||||
for (col, buffer_byte) in buffer.bytes_in_range(0..buffer.len()).flatten().enumerate() {
|
||||
let insertion_cost = if buffer_byte.is_ascii_whitespace() {
|
||||
WHITESPACE_INSERTION_COST
|
||||
} else {
|
||||
INSERTION_COST
|
||||
};
|
||||
|
||||
let up = SearchState::new(
|
||||
matrix.get(row, col + 1).cost.saturating_add(deletion_cost),
|
||||
SearchDirection::Up,
|
||||
);
|
||||
let left = SearchState::new(
|
||||
matrix.get(row + 1, col).cost.saturating_add(insertion_cost),
|
||||
SearchDirection::Left,
|
||||
);
|
||||
let diagonal = SearchState::new(
|
||||
if query_byte == *buffer_byte {
|
||||
matrix.get(row, col).cost
|
||||
} else {
|
||||
matrix
|
||||
.get(row, col)
|
||||
.cost
|
||||
.saturating_add(deletion_cost + insertion_cost)
|
||||
},
|
||||
SearchDirection::Diagonal,
|
||||
);
|
||||
matrix.set(row + 1, col + 1, up.min(left).min(diagonal));
|
||||
}
|
||||
}
|
||||
|
||||
// Traceback to find the best match
|
||||
let mut best_buffer_end = buffer_len;
|
||||
let mut best_cost = u32::MAX;
|
||||
for col in 1..=buffer_len {
|
||||
let cost = matrix.get(query_len, col).cost;
|
||||
if cost < best_cost {
|
||||
best_cost = cost;
|
||||
best_buffer_end = col;
|
||||
}
|
||||
}
|
||||
|
||||
let mut query_ix = query_len;
|
||||
let mut buffer_ix = best_buffer_end;
|
||||
while query_ix > 0 && buffer_ix > 0 {
|
||||
let current = matrix.get(query_ix, buffer_ix);
|
||||
match current.direction {
|
||||
SearchDirection::Diagonal => {
|
||||
query_ix -= 1;
|
||||
buffer_ix -= 1;
|
||||
}
|
||||
SearchDirection::Up => {
|
||||
query_ix -= 1;
|
||||
}
|
||||
SearchDirection::Left => {
|
||||
buffer_ix -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut start = buffer.offset_to_point(buffer.clip_offset(buffer_ix, Bias::Left));
|
||||
start.column = 0;
|
||||
let mut end = buffer.offset_to_point(buffer.clip_offset(best_buffer_end, Bias::Right));
|
||||
if end.column > 0 {
|
||||
end.column = buffer.line_len(end.row);
|
||||
}
|
||||
|
||||
buffer.anchor_after(start)..buffer.anchor_before(end)
|
||||
}
|
||||
}
|
||||
|
||||
impl AssistantPatch {
|
||||
pub async fn resolve(&self, project: Entity<Project>, cx: &mut AsyncApp) -> ResolvedPatch {
|
||||
let mut resolve_tasks = Vec::new();
|
||||
for (ix, edit) in self.edits.iter().enumerate() {
|
||||
if let Ok(edit) = edit.as_ref() {
|
||||
resolve_tasks.push(
|
||||
edit.resolve(project.clone(), cx.clone())
|
||||
.map_err(move |error| (ix, error)),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let edits = future::join_all(resolve_tasks).await;
|
||||
let mut errors = Vec::new();
|
||||
let mut edits_by_buffer = HashMap::default();
|
||||
for entry in edits {
|
||||
match entry {
|
||||
Ok((buffer, edit)) => {
|
||||
edits_by_buffer
|
||||
.entry(buffer)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(edit);
|
||||
}
|
||||
Err((edit_ix, error)) => errors.push(AssistantPatchResolutionError {
|
||||
edit_ix,
|
||||
message: error.to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
// Expand the context ranges of each edit and group edits with overlapping context ranges.
|
||||
let mut edit_groups_by_buffer = HashMap::default();
|
||||
for (buffer, edits) in edits_by_buffer {
|
||||
if let Ok(snapshot) = buffer.update(cx, |buffer, _| buffer.text_snapshot()) {
|
||||
edit_groups_by_buffer.insert(buffer, Self::group_edits(edits, &snapshot));
|
||||
}
|
||||
}
|
||||
|
||||
ResolvedPatch {
|
||||
edit_groups: edit_groups_by_buffer,
|
||||
errors,
|
||||
}
|
||||
}
|
||||
|
||||
fn group_edits(
|
||||
mut edits: Vec<ResolvedEdit>,
|
||||
snapshot: &text::BufferSnapshot,
|
||||
) -> Vec<ResolvedEditGroup> {
|
||||
let mut edit_groups = Vec::<ResolvedEditGroup>::new();
|
||||
// Sort edits by their range so that earlier, larger ranges come first
|
||||
edits.sort_by(|a, b| a.range.cmp(&b.range, &snapshot));
|
||||
|
||||
// Merge overlapping edits
|
||||
edits.dedup_by(|a, b| b.try_merge(a, &snapshot));
|
||||
|
||||
// Create context ranges for each edit
|
||||
for edit in edits {
|
||||
let context_range = {
|
||||
let edit_point_range = edit.range.to_point(&snapshot);
|
||||
let start_row = edit_point_range.start.row.saturating_sub(5);
|
||||
let end_row = cmp::min(edit_point_range.end.row + 5, snapshot.max_point().row);
|
||||
let start = snapshot.anchor_before(Point::new(start_row, 0));
|
||||
let end = snapshot.anchor_after(Point::new(end_row, snapshot.line_len(end_row)));
|
||||
start..end
|
||||
};
|
||||
|
||||
if let Some(last_group) = edit_groups.last_mut() {
|
||||
if last_group
|
||||
.context_range
|
||||
.end
|
||||
.cmp(&context_range.start, &snapshot)
|
||||
.is_ge()
|
||||
{
|
||||
// Merge with the previous group if context ranges overlap
|
||||
last_group.context_range.end = context_range.end;
|
||||
last_group.edits.push(edit);
|
||||
} else {
|
||||
// Create a new group
|
||||
edit_groups.push(ResolvedEditGroup {
|
||||
context_range,
|
||||
edits: vec![edit],
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// Create the first group
|
||||
edit_groups.push(ResolvedEditGroup {
|
||||
context_range,
|
||||
edits: vec![edit],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
edit_groups
|
||||
}
|
||||
|
||||
pub fn path_count(&self) -> usize {
|
||||
self.paths().count()
|
||||
}
|
||||
|
||||
pub fn paths(&self) -> impl '_ + Iterator<Item = &str> {
|
||||
let mut prev_path = None;
|
||||
self.edits.iter().filter_map(move |edit| {
|
||||
if let Ok(edit) = edit {
|
||||
let path = Some(edit.path.as_str());
|
||||
if path != prev_path {
|
||||
prev_path = path;
|
||||
return path;
|
||||
}
|
||||
}
|
||||
None
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for AssistantPatch {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.range == other.range
|
||||
&& self.title == other.title
|
||||
&& Arc::ptr_eq(&self.edits, &other.edits)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for AssistantPatch {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use gpui::App;
|
||||
use language::{
|
||||
Language, LanguageConfig, LanguageMatcher, language_settings::AllLanguageSettings,
|
||||
};
|
||||
use settings::SettingsStore;
|
||||
use ui::BorrowAppContext;
|
||||
use unindent::Unindent as _;
|
||||
use util::test::{generate_marked_text, marked_text_ranges};
|
||||
|
||||
#[gpui::test]
|
||||
fn test_resolve_location(cx: &mut App) {
|
||||
assert_location_resolution(
|
||||
concat!(
|
||||
" Lorem\n",
|
||||
"« ipsum\n",
|
||||
" dolor sit amet»\n",
|
||||
" consecteur",
|
||||
),
|
||||
"ipsum\ndolor",
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_location_resolution(
|
||||
&"
|
||||
«fn foo1(a: usize) -> usize {
|
||||
40
|
||||
}»
|
||||
|
||||
fn foo2(b: usize) -> usize {
|
||||
42
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
"fn foo1(b: usize) {\n40\n}",
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_location_resolution(
|
||||
&"
|
||||
fn main() {
|
||||
« Foo
|
||||
.bar()
|
||||
.baz()
|
||||
.qux()»
|
||||
}
|
||||
|
||||
fn foo2(b: usize) -> usize {
|
||||
42
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
"Foo.bar.baz.qux()",
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_location_resolution(
|
||||
&"
|
||||
class Something {
|
||||
one() { return 1; }
|
||||
« two() { return 2222; }
|
||||
three() { return 333; }
|
||||
four() { return 4444; }
|
||||
five() { return 5555; }
|
||||
six() { return 6666; }
|
||||
» seven() { return 7; }
|
||||
eight() { return 8; }
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
&"
|
||||
two() { return 2222; }
|
||||
four() { return 4444; }
|
||||
five() { return 5555; }
|
||||
six() { return 6666; }
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_resolve_edits(cx: &mut App) {
|
||||
init_test(cx);
|
||||
|
||||
assert_edits(
|
||||
"
|
||||
/// A person
|
||||
struct Person {
|
||||
name: String,
|
||||
age: usize,
|
||||
}
|
||||
|
||||
/// A dog
|
||||
struct Dog {
|
||||
weight: f32,
|
||||
}
|
||||
|
||||
impl Person {
|
||||
fn name(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
vec![
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
name: String,
|
||||
"
|
||||
.unindent(),
|
||||
new_text: "
|
||||
first_name: String,
|
||||
last_name: String,
|
||||
"
|
||||
.unindent(),
|
||||
description: None,
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
fn name(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
new_text: "
|
||||
fn name(&self) -> String {
|
||||
format!(\"{} {}\", self.first_name, self.last_name)
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: None,
|
||||
},
|
||||
],
|
||||
"
|
||||
/// A person
|
||||
struct Person {
|
||||
first_name: String,
|
||||
last_name: String,
|
||||
age: usize,
|
||||
}
|
||||
|
||||
/// A dog
|
||||
struct Dog {
|
||||
weight: f32,
|
||||
}
|
||||
|
||||
impl Person {
|
||||
fn name(&self) -> String {
|
||||
format!(\"{} {}\", self.first_name, self.last_name)
|
||||
}
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
|
||||
// Ensure InsertBefore merges correctly with Update of the same text
|
||||
assert_edits(
|
||||
"
|
||||
fn foo() {
|
||||
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
vec![
|
||||
AssistantEditKind::InsertBefore {
|
||||
old_text: "
|
||||
fn foo() {"
|
||||
.unindent(),
|
||||
new_text: "
|
||||
fn bar() {
|
||||
qux();
|
||||
}"
|
||||
.unindent(),
|
||||
description: Some("implement bar".into()),
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
fn foo() {
|
||||
|
||||
}"
|
||||
.unindent(),
|
||||
new_text: "
|
||||
fn foo() {
|
||||
bar();
|
||||
}"
|
||||
.unindent(),
|
||||
description: Some("call bar in foo".into()),
|
||||
},
|
||||
AssistantEditKind::InsertAfter {
|
||||
old_text: "
|
||||
fn foo() {
|
||||
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
new_text: "
|
||||
fn qux() {
|
||||
// todo
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: Some("implement qux".into()),
|
||||
},
|
||||
],
|
||||
"
|
||||
fn bar() {
|
||||
qux();
|
||||
}
|
||||
|
||||
fn foo() {
|
||||
bar();
|
||||
}
|
||||
|
||||
fn qux() {
|
||||
// todo
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
|
||||
// Correctly indent new text when replacing multiple adjacent indented blocks.
|
||||
assert_edits(
|
||||
"
|
||||
impl Numbers {
|
||||
fn one() {
|
||||
1
|
||||
}
|
||||
|
||||
fn two() {
|
||||
2
|
||||
}
|
||||
|
||||
fn three() {
|
||||
3
|
||||
}
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
vec![
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
fn one() {
|
||||
1
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
new_text: "
|
||||
fn one() {
|
||||
101
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: None,
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
fn two() {
|
||||
2
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
new_text: "
|
||||
fn two() {
|
||||
102
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: None,
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
fn three() {
|
||||
3
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
new_text: "
|
||||
fn three() {
|
||||
103
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: None,
|
||||
},
|
||||
],
|
||||
"
|
||||
impl Numbers {
|
||||
fn one() {
|
||||
101
|
||||
}
|
||||
|
||||
fn two() {
|
||||
102
|
||||
}
|
||||
|
||||
fn three() {
|
||||
103
|
||||
}
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_edits(
|
||||
"
|
||||
impl Person {
|
||||
fn set_name(&mut self, name: String) {
|
||||
self.name = name;
|
||||
}
|
||||
|
||||
fn name(&self) -> String {
|
||||
return self.name;
|
||||
}
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
vec![
|
||||
AssistantEditKind::Update {
|
||||
old_text: "self.name = name;".unindent(),
|
||||
new_text: "self._name = name;".unindent(),
|
||||
description: None,
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "return self.name;\n".unindent(),
|
||||
new_text: "return self._name;\n".unindent(),
|
||||
description: None,
|
||||
},
|
||||
],
|
||||
"
|
||||
impl Person {
|
||||
fn set_name(&mut self, name: String) {
|
||||
self._name = name;
|
||||
}
|
||||
|
||||
fn name(&self) -> String {
|
||||
return self._name;
|
||||
}
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut App) {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
cx.update_global::<SettingsStore, _>(|settings, cx| {
|
||||
settings.update_user_settings::<AllLanguageSettings>(cx, |_| {});
|
||||
});
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_location_resolution(text_with_expected_range: &str, query: &str, cx: &mut App) {
|
||||
let (text, _) = marked_text_ranges(text_with_expected_range, false);
|
||||
let buffer = cx.new(|cx| Buffer::local(text.clone(), cx));
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let range = AssistantEditKind::resolve_location(&snapshot, query).to_offset(&snapshot);
|
||||
let text_with_actual_range = generate_marked_text(&text, &[range], false);
|
||||
pretty_assertions::assert_eq!(text_with_actual_range, text_with_expected_range);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_edits(
|
||||
old_text: String,
|
||||
edits: Vec<AssistantEditKind>,
|
||||
new_text: String,
|
||||
cx: &mut App,
|
||||
) {
|
||||
let buffer =
|
||||
cx.new(|cx| Buffer::local(old_text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let resolved_edits = edits
|
||||
.into_iter()
|
||||
.map(|kind| kind.resolve(&snapshot))
|
||||
.collect();
|
||||
let edit_groups = AssistantPatch::group_edits(resolved_edits, &snapshot);
|
||||
ResolvedPatch::apply_edit_groups(&edit_groups, &buffer, cx);
|
||||
let actual_new_text = buffer.read(cx).text();
|
||||
pretty_assertions::assert_eq!(actual_new_text, new_text);
|
||||
}
|
||||
|
||||
fn rust_lang() -> Language {
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".into(),
|
||||
matcher: LanguageMatcher {
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
Some(language::tree_sitter_rust::LANGUAGE.into()),
|
||||
)
|
||||
.with_indents_query(
|
||||
r#"
|
||||
(call_expression) @indent
|
||||
(field_expression) @indent
|
||||
(_ "(" ")" @end) @indent
|
||||
(_ "{" "}" @end) @indent
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
@@ -278,8 +278,8 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
buffer.anchor_after(Point::new(position.row, first_arg_start.start as u32));
|
||||
let arguments = call
|
||||
.arguments
|
||||
.into_iter()
|
||||
.filter_map(|argument| Some(line.get(argument)?.to_string()))
|
||||
.iter()
|
||||
.filter_map(|argument| Some(line.get(argument.clone())?.to_string()))
|
||||
.collect::<Vec<_>>();
|
||||
let argument_range = first_arg_start..buffer_position;
|
||||
(
|
||||
|
||||
@@ -41,7 +41,6 @@ pub enum NotifyWhenAgentWaiting {
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
|
||||
#[serde(tag = "name", rename_all = "snake_case")]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub enum AssistantProviderContentV1 {
|
||||
#[serde(rename = "zed.dev")]
|
||||
ZedDotDev { default_model: Option<CloudModel> },
|
||||
@@ -86,6 +85,7 @@ pub struct AssistantSettings {
|
||||
pub thread_summary_model: Option<LanguageModelSelection>,
|
||||
pub inline_alternatives: Vec<LanguageModelSelection>,
|
||||
pub using_outdated_settings_version: bool,
|
||||
pub enable_experimental_live_diffs: bool,
|
||||
pub default_profile: AgentProfileId,
|
||||
pub profiles: IndexMap<AgentProfileId, AgentProfile>,
|
||||
pub always_allow_tool_actions: bool,
|
||||
@@ -94,7 +94,6 @@ pub struct AssistantSettings {
|
||||
pub single_file_review: bool,
|
||||
pub model_parameters: Vec<LanguageModelParameters>,
|
||||
pub preferred_completion_mode: CompletionMode,
|
||||
pub enable_feedback: bool,
|
||||
}
|
||||
|
||||
impl AssistantSettings {
|
||||
@@ -107,6 +106,10 @@ impl AssistantSettings {
|
||||
.and_then(|m| m.temperature)
|
||||
}
|
||||
|
||||
pub fn are_live_diffs_enabled(&self, _cx: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
|
||||
self.inline_assistant_model = Some(LanguageModelSelection {
|
||||
provider: provider.into(),
|
||||
@@ -254,6 +257,7 @@ impl AssistantSettingsContent {
|
||||
commit_message_model: None,
|
||||
thread_summary_model: None,
|
||||
inline_alternatives: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
default_profile: None,
|
||||
profiles: None,
|
||||
always_allow_tool_actions: None,
|
||||
@@ -262,7 +266,6 @@ impl AssistantSettingsContent {
|
||||
single_file_review: None,
|
||||
model_parameters: Vec::new(),
|
||||
preferred_completion_mode: None,
|
||||
enable_feedback: None,
|
||||
},
|
||||
VersionedAssistantSettingsContent::V2(ref settings) => settings.clone(),
|
||||
},
|
||||
@@ -285,6 +288,7 @@ impl AssistantSettingsContent {
|
||||
commit_message_model: None,
|
||||
thread_summary_model: None,
|
||||
inline_alternatives: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
default_profile: None,
|
||||
profiles: None,
|
||||
always_allow_tool_actions: None,
|
||||
@@ -293,7 +297,6 @@ impl AssistantSettingsContent {
|
||||
single_file_review: None,
|
||||
model_parameters: Vec::new(),
|
||||
preferred_completion_mode: None,
|
||||
enable_feedback: None,
|
||||
},
|
||||
None => AssistantSettingsContentV2::default(),
|
||||
}
|
||||
@@ -547,7 +550,6 @@ impl AssistantSettingsContent {
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
#[serde(tag = "version")]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub enum VersionedAssistantSettingsContent {
|
||||
#[serde(rename = "1")]
|
||||
V1(AssistantSettingsContentV1),
|
||||
@@ -568,6 +570,7 @@ impl Default for VersionedAssistantSettingsContent {
|
||||
commit_message_model: None,
|
||||
thread_summary_model: None,
|
||||
inline_alternatives: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
default_profile: None,
|
||||
profiles: None,
|
||||
always_allow_tool_actions: None,
|
||||
@@ -576,13 +579,11 @@ impl Default for VersionedAssistantSettingsContent {
|
||||
single_file_review: None,
|
||||
model_parameters: Vec::new(),
|
||||
preferred_completion_mode: None,
|
||||
enable_feedback: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub struct AssistantSettingsContentV2 {
|
||||
/// Whether the Assistant is enabled.
|
||||
///
|
||||
@@ -614,6 +615,10 @@ pub struct AssistantSettingsContentV2 {
|
||||
thread_summary_model: Option<LanguageModelSelection>,
|
||||
/// Additional models with which to generate alternatives when performing inline assists.
|
||||
inline_alternatives: Option<Vec<LanguageModelSelection>>,
|
||||
/// Enable experimental live diffs in the assistant panel.
|
||||
///
|
||||
/// Default: false
|
||||
enable_experimental_live_diffs: Option<bool>,
|
||||
/// The default profile to use in the Agent.
|
||||
///
|
||||
/// Default: write
|
||||
@@ -651,10 +656,6 @@ pub struct AssistantSettingsContentV2 {
|
||||
///
|
||||
/// Default: normal
|
||||
preferred_completion_mode: Option<CompletionMode>,
|
||||
/// Whether to show thumb buttons for feedback in the agent panel.
|
||||
///
|
||||
/// Default: true
|
||||
enable_feedback: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Default)]
|
||||
@@ -692,7 +693,7 @@ impl JsonSchema for LanguageModelProviderSetting {
|
||||
schemars::schema::SchemaObject {
|
||||
enum_values: Some(vec![
|
||||
"anthropic".into(),
|
||||
"amazon-bedrock".into(),
|
||||
"bedrock".into(),
|
||||
"google".into(),
|
||||
"lmstudio".into(),
|
||||
"ollama".into(),
|
||||
@@ -745,7 +746,6 @@ pub struct ContextServerPresetContent {
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub struct AssistantSettingsContentV1 {
|
||||
/// Whether the Assistant is enabled.
|
||||
///
|
||||
@@ -775,7 +775,6 @@ pub struct AssistantSettingsContentV1 {
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub struct LegacyAssistantSettingsContent {
|
||||
/// Whether to show the assistant panel button in the status bar.
|
||||
///
|
||||
@@ -846,6 +845,10 @@ impl Settings for AssistantSettings {
|
||||
.thread_summary_model
|
||||
.or(settings.thread_summary_model.take());
|
||||
merge(&mut settings.inline_alternatives, value.inline_alternatives);
|
||||
merge(
|
||||
&mut settings.enable_experimental_live_diffs,
|
||||
value.enable_experimental_live_diffs,
|
||||
);
|
||||
merge(
|
||||
&mut settings.always_allow_tool_actions,
|
||||
value.always_allow_tool_actions,
|
||||
@@ -861,7 +864,6 @@ impl Settings for AssistantSettings {
|
||||
&mut settings.preferred_completion_mode,
|
||||
value.preferred_completion_mode,
|
||||
);
|
||||
merge(&mut settings.enable_feedback, value.enable_feedback);
|
||||
|
||||
settings
|
||||
.model_parameters
|
||||
@@ -992,13 +994,13 @@ mod tests {
|
||||
dock: None,
|
||||
default_width: None,
|
||||
default_height: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
default_profile: None,
|
||||
profiles: None,
|
||||
always_allow_tool_actions: None,
|
||||
notify_when_agent_waiting: None,
|
||||
stream_edits: None,
|
||||
single_file_review: None,
|
||||
enable_feedback: None,
|
||||
model_parameters: Vec::new(),
|
||||
preferred_completion_mode: None,
|
||||
},
|
||||
|
||||
@@ -35,6 +35,7 @@ indoc.workspace = true
|
||||
itertools.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
linkme.workspace = true
|
||||
log.workspace = true
|
||||
markdown.workspace = true
|
||||
open.workspace = true
|
||||
|
||||
@@ -15,7 +15,7 @@ use gpui::{AppContext, TestAppContext};
|
||||
use indoc::{formatdoc, indoc};
|
||||
use language_model::{
|
||||
LanguageModelRegistry, LanguageModelRequestTool, LanguageModelToolResult,
|
||||
LanguageModelToolResultContent, LanguageModelToolUse, LanguageModelToolUseId, SelectedModel,
|
||||
LanguageModelToolResultContent, LanguageModelToolUse, LanguageModelToolUseId,
|
||||
};
|
||||
use project::Project;
|
||||
use rand::prelude::*;
|
||||
@@ -25,7 +25,6 @@ use std::{
|
||||
cmp::Reverse,
|
||||
fmt::{self, Display},
|
||||
io::Write as _,
|
||||
str::FromStr,
|
||||
sync::mpsc,
|
||||
};
|
||||
use util::path;
|
||||
@@ -1217,7 +1216,7 @@ fn report_progress(evaluated_count: usize, failed_count: usize, iterations: usiz
|
||||
passed_count as f64 / evaluated_count as f64
|
||||
};
|
||||
print!(
|
||||
"\r\x1b[KEvaluated {}/{} ({:.2}% passed)",
|
||||
"\r\x1b[KEvaluated {}/{} ({:.2}%)",
|
||||
evaluated_count,
|
||||
iterations,
|
||||
passed_ratio * 100.0
|
||||
@@ -1256,21 +1255,13 @@ impl EditAgentTest {
|
||||
|
||||
fs.insert_tree("/root", json!({})).await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let agent_model = SelectedModel::from_str(
|
||||
&std::env::var("ZED_AGENT_MODEL")
|
||||
.unwrap_or("anthropic/claude-3-7-sonnet-latest".into()),
|
||||
)
|
||||
.unwrap();
|
||||
let judge_model = SelectedModel::from_str(
|
||||
&std::env::var("ZED_JUDGE_MODEL")
|
||||
.unwrap_or("anthropic/claude-3-7-sonnet-latest".into()),
|
||||
)
|
||||
.unwrap();
|
||||
let (agent_model, judge_model) = cx
|
||||
.update(|cx| {
|
||||
cx.spawn(async move |cx| {
|
||||
let agent_model = Self::load_model(&agent_model, cx).await;
|
||||
let judge_model = Self::load_model(&judge_model, cx).await;
|
||||
let agent_model =
|
||||
Self::load_model("anthropic", "claude-3-7-sonnet-latest", cx).await;
|
||||
let judge_model =
|
||||
Self::load_model("anthropic", "claude-3-7-sonnet-latest", cx).await;
|
||||
(agent_model.unwrap(), judge_model.unwrap())
|
||||
})
|
||||
})
|
||||
@@ -1285,17 +1276,15 @@ impl EditAgentTest {
|
||||
}
|
||||
|
||||
async fn load_model(
|
||||
selected_model: &SelectedModel,
|
||||
provider: &str,
|
||||
id: &str,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<Arc<dyn LanguageModel>> {
|
||||
let (provider, model) = cx.update(|cx| {
|
||||
let models = LanguageModelRegistry::read_global(cx);
|
||||
let model = models
|
||||
.available_models(cx)
|
||||
.find(|model| {
|
||||
model.provider_id() == selected_model.provider
|
||||
&& model.id() == selected_model.model
|
||||
})
|
||||
.find(|model| model.provider_id().0 == provider && model.id().0 == id)
|
||||
.unwrap();
|
||||
let provider = models.provider(&model.provider_id()).unwrap();
|
||||
(provider, model)
|
||||
|
||||
@@ -1249,7 +1249,7 @@ pub struct ActiveDiagnosticGroup {
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub(crate) enum ActiveDiagnostic {
|
||||
None,
|
||||
All,
|
||||
|
||||
@@ -22,7 +22,7 @@ use language::{
|
||||
};
|
||||
use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat};
|
||||
use markdown::{Markdown, MarkdownElement, MarkdownStyle};
|
||||
use project::Project;
|
||||
use project::{Project, ProjectPath};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
@@ -86,7 +86,7 @@ pub struct EditFileToolInput {
|
||||
pub mode: EditFileMode,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum EditFileMode {
|
||||
Edit,
|
||||
@@ -171,12 +171,9 @@ impl Tool for EditFileTool {
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
|
||||
};
|
||||
|
||||
let Some(project_path) = project.read(cx).find_project_path(&input.path, cx) else {
|
||||
return Task::ready(Err(anyhow!(
|
||||
"Path {} not found in project",
|
||||
input.path.display()
|
||||
)))
|
||||
.into();
|
||||
let project_path = match resolve_path(&input, project.clone(), cx) {
|
||||
Ok(path) => path,
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
|
||||
};
|
||||
|
||||
let card = window.and_then(|window| {
|
||||
@@ -199,20 +196,6 @@ impl Tool for EditFileTool {
|
||||
})?
|
||||
.await?;
|
||||
|
||||
let exists = buffer.read_with(cx, |buffer, _| {
|
||||
buffer
|
||||
.file()
|
||||
.as_ref()
|
||||
.map_or(false, |file| file.disk_state().exists())
|
||||
})?;
|
||||
let create_or_overwrite = match input.mode {
|
||||
EditFileMode::Create | EditFileMode::Overwrite => true,
|
||||
_ => false,
|
||||
};
|
||||
if !create_or_overwrite && !exists {
|
||||
return Err(anyhow!("{} not found", input.path.display()));
|
||||
}
|
||||
|
||||
let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
|
||||
let old_text = cx
|
||||
.background_spawn({
|
||||
@@ -221,15 +204,15 @@ impl Tool for EditFileTool {
|
||||
})
|
||||
.await;
|
||||
|
||||
let (output, mut events) = if create_or_overwrite {
|
||||
edit_agent.overwrite(
|
||||
let (output, mut events) = if matches!(input.mode, EditFileMode::Edit) {
|
||||
edit_agent.edit(
|
||||
buffer.clone(),
|
||||
input.display_description.clone(),
|
||||
&request,
|
||||
cx,
|
||||
)
|
||||
} else {
|
||||
edit_agent.edit(
|
||||
edit_agent.overwrite(
|
||||
buffer.clone(),
|
||||
input.display_description.clone(),
|
||||
&request,
|
||||
@@ -349,6 +332,72 @@ impl Tool for EditFileTool {
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate that the file path is valid, meaning:
|
||||
///
|
||||
/// - For `edit` and `overwrite`, the path must point to an existing file.
|
||||
/// - For `create`, the file must not already exist, but it's parent dir must exist.
|
||||
fn resolve_path(
|
||||
input: &EditFileToolInput,
|
||||
project: Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Result<ProjectPath> {
|
||||
let project = project.read(cx);
|
||||
|
||||
match input.mode {
|
||||
EditFileMode::Edit | EditFileMode::Overwrite => {
|
||||
let path = project
|
||||
.find_project_path(&input.path, cx)
|
||||
.ok_or_else(|| anyhow!("Can't edit file: path not found"))?;
|
||||
|
||||
let entry = project
|
||||
.entry_for_path(&path, cx)
|
||||
.ok_or_else(|| anyhow!("Can't edit file: path not found"))?;
|
||||
|
||||
if !entry.is_file() {
|
||||
return Err(anyhow!("Can't edit file: path is a directory"));
|
||||
}
|
||||
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
EditFileMode::Create => {
|
||||
if let Some(path) = project.find_project_path(&input.path, cx) {
|
||||
if project.entry_for_path(&path, cx).is_some() {
|
||||
return Err(anyhow!("Can't create file: file already exists"));
|
||||
}
|
||||
}
|
||||
|
||||
let parent_path = input
|
||||
.path
|
||||
.parent()
|
||||
.ok_or_else(|| anyhow!("Can't create file: incorrect path"))?;
|
||||
|
||||
let parent_project_path = project.find_project_path(&parent_path, cx);
|
||||
|
||||
let parent_entry = parent_project_path
|
||||
.as_ref()
|
||||
.and_then(|path| project.entry_for_path(&path, cx))
|
||||
.ok_or_else(|| anyhow!("Can't create file: parent directory doesn't exist"))?;
|
||||
|
||||
if !parent_entry.is_dir() {
|
||||
return Err(anyhow!("Can't create file: parent is not a directory"));
|
||||
}
|
||||
|
||||
let file_name = input
|
||||
.path
|
||||
.file_name()
|
||||
.ok_or_else(|| anyhow!("Can't create file: invalid filename"))?;
|
||||
|
||||
let new_file_path = parent_project_path.map(|parent| ProjectPath {
|
||||
path: Arc::from(parent.path.join(file_name)),
|
||||
..parent
|
||||
});
|
||||
|
||||
new_file_path.ok_or_else(|| anyhow!("Can't create file"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct EditFileToolCard {
|
||||
path: PathBuf,
|
||||
editor: Entity<Editor>,
|
||||
@@ -380,9 +429,9 @@ impl EditFileToolCard {
|
||||
editor.set_show_gutter(false, cx);
|
||||
editor.disable_inline_diagnostics();
|
||||
editor.disable_expand_excerpt_buttons(cx);
|
||||
editor.disable_scrollbars_and_minimap(window, cx);
|
||||
editor.set_soft_wrap_mode(SoftWrap::None, cx);
|
||||
editor.scroll_manager.set_forbid_vertical_scroll(true);
|
||||
editor.set_show_scrollbars(false, cx);
|
||||
editor.set_show_indent_guides(false, cx);
|
||||
editor.set_read_only(true);
|
||||
editor.set_show_breakpoints(false, cx);
|
||||
@@ -868,7 +917,10 @@ async fn build_buffer_diff(
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::result::Result;
|
||||
|
||||
use super::*;
|
||||
use client::TelemetrySettings;
|
||||
use fs::FakeFs;
|
||||
use gpui::TestAppContext;
|
||||
use language_model::fake_provider::FakeLanguageModel;
|
||||
@@ -908,10 +960,102 @@ mod tests {
|
||||
.await;
|
||||
assert_eq!(
|
||||
result.unwrap_err().to_string(),
|
||||
"root/nonexistent_file.txt not found"
|
||||
"Can't edit file: path not found"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_resolve_path_for_creating_file(cx: &mut TestAppContext) {
|
||||
let mode = &EditFileMode::Create;
|
||||
|
||||
let result = test_resolve_path(mode, "root/new.txt", cx);
|
||||
assert_resolved_path_eq(result.await, "new.txt");
|
||||
|
||||
let result = test_resolve_path(mode, "new.txt", cx);
|
||||
assert_resolved_path_eq(result.await, "new.txt");
|
||||
|
||||
let result = test_resolve_path(mode, "dir/new.txt", cx);
|
||||
assert_resolved_path_eq(result.await, "dir/new.txt");
|
||||
|
||||
let result = test_resolve_path(mode, "root/dir/subdir/existing.txt", cx);
|
||||
assert_eq!(
|
||||
result.await.unwrap_err().to_string(),
|
||||
"Can't create file: file already exists"
|
||||
);
|
||||
|
||||
let result = test_resolve_path(mode, "root/dir/nonexistent_dir/new.txt", cx);
|
||||
assert_eq!(
|
||||
result.await.unwrap_err().to_string(),
|
||||
"Can't create file: parent directory doesn't exist"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_resolve_path_for_editing_file(cx: &mut TestAppContext) {
|
||||
let mode = &EditFileMode::Edit;
|
||||
|
||||
let path_with_root = "root/dir/subdir/existing.txt";
|
||||
let path_without_root = "dir/subdir/existing.txt";
|
||||
let result = test_resolve_path(mode, path_with_root, cx);
|
||||
assert_resolved_path_eq(result.await, path_without_root);
|
||||
|
||||
let result = test_resolve_path(mode, path_without_root, cx);
|
||||
assert_resolved_path_eq(result.await, path_without_root);
|
||||
|
||||
let result = test_resolve_path(mode, "root/nonexistent.txt", cx);
|
||||
assert_eq!(
|
||||
result.await.unwrap_err().to_string(),
|
||||
"Can't edit file: path not found"
|
||||
);
|
||||
|
||||
let result = test_resolve_path(mode, "root/dir", cx);
|
||||
assert_eq!(
|
||||
result.await.unwrap_err().to_string(),
|
||||
"Can't edit file: path is a directory"
|
||||
);
|
||||
}
|
||||
|
||||
async fn test_resolve_path(
|
||||
mode: &EditFileMode,
|
||||
path: &str,
|
||||
cx: &mut TestAppContext,
|
||||
) -> Result<ProjectPath, anyhow::Error> {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
"dir": {
|
||||
"subdir": {
|
||||
"existing.txt": "hello"
|
||||
}
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
|
||||
let input = EditFileToolInput {
|
||||
display_description: "Some edit".into(),
|
||||
path: path.into(),
|
||||
mode: mode.clone(),
|
||||
};
|
||||
|
||||
let result = cx.update(|cx| resolve_path(&input, project, cx));
|
||||
result
|
||||
}
|
||||
|
||||
fn assert_resolved_path_eq(path: Result<ProjectPath, anyhow::Error>, expected: &str) {
|
||||
let actual = path
|
||||
.expect("Should return valid path")
|
||||
.path
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.replace("\\", "/"); // Naive Windows paths normalization
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn still_streaming_ui_text_with_path() {
|
||||
let input = json!({
|
||||
@@ -984,6 +1128,7 @@ mod tests {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
TelemetrySettings::register(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -12,7 +12,6 @@ pub struct CallSettings {
|
||||
|
||||
/// Configuration of voice calls in Zed.
|
||||
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub struct CallSettingsContent {
|
||||
/// Whether the microphone should be muted when joining a channel or a call.
|
||||
///
|
||||
|
||||
@@ -19,7 +19,6 @@ test-support = ["clock/test-support", "collections/test-support", "gpui/test-sup
|
||||
anyhow.workspace = true
|
||||
async-recursion = "0.3"
|
||||
async-tungstenite = { workspace = true, features = ["tokio", "tokio-rustls-manual-roots"] }
|
||||
base64.workspace = true
|
||||
chrono = { workspace = true, features = ["serde"] }
|
||||
clock.workspace = true
|
||||
collections.workspace = true
|
||||
@@ -30,7 +29,6 @@ gpui.workspace = true
|
||||
gpui_tokio.workspace = true
|
||||
http_client.workspace = true
|
||||
http_client_tls.workspace = true
|
||||
httparse = "1.10"
|
||||
log.workspace = true
|
||||
paths.workspace = true
|
||||
parking_lot.workspace = true
|
||||
@@ -49,7 +47,6 @@ text.workspace = true
|
||||
thiserror.workspace = true
|
||||
time.workspace = true
|
||||
tiny_http = "0.8"
|
||||
tokio-native-tls = "0.3"
|
||||
tokio-socks = { version = "0.5.2", default-features = false, features = ["futures-io"] }
|
||||
url.workspace = true
|
||||
util.workspace = true
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub mod test;
|
||||
|
||||
mod proxy;
|
||||
mod socks;
|
||||
pub mod telemetry;
|
||||
pub mod user;
|
||||
pub mod zed_urls;
|
||||
@@ -24,13 +24,13 @@ use gpui::{App, AsyncApp, Entity, Global, Task, WeakEntity, actions};
|
||||
use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
|
||||
use parking_lot::RwLock;
|
||||
use postage::watch;
|
||||
use proxy::connect_proxy_stream;
|
||||
use rand::prelude::*;
|
||||
use release_channel::{AppVersion, ReleaseChannel};
|
||||
use rpc::proto::{AnyTypedEnvelope, EnvelopedMessage, PeerId, RequestMessage};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources};
|
||||
use socks::connect_socks_proxy_stream;
|
||||
use std::pin::Pin;
|
||||
use std::{
|
||||
any::TypeId,
|
||||
@@ -49,7 +49,7 @@ use telemetry::Telemetry;
|
||||
use thiserror::Error;
|
||||
use tokio::net::TcpStream;
|
||||
use url::Url;
|
||||
use util::{ConnectionResult, ResultExt};
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
|
||||
pub use rpc::*;
|
||||
pub use telemetry_events::Event;
|
||||
@@ -151,19 +151,9 @@ pub fn init(client: &Arc<Client>, cx: &mut App) {
|
||||
let client = client.clone();
|
||||
move |_: &SignIn, cx| {
|
||||
if let Some(client) = client.upgrade() {
|
||||
cx.spawn(
|
||||
async move |cx| match client.authenticate_and_connect(true, &cx).await {
|
||||
ConnectionResult::Timeout => {
|
||||
log::error!("Initial authentication timed out");
|
||||
}
|
||||
ConnectionResult::ConnectionReset => {
|
||||
log::error!("Initial authentication connection reset");
|
||||
}
|
||||
ConnectionResult::Result(r) => {
|
||||
r.log_err();
|
||||
}
|
||||
},
|
||||
)
|
||||
cx.spawn(async move |cx| {
|
||||
client.authenticate_and_connect(true, &cx).log_err().await
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
@@ -668,7 +658,7 @@ impl Client {
|
||||
state._reconnect_task = None;
|
||||
}
|
||||
Status::ConnectionLost => {
|
||||
let client = self.clone();
|
||||
let this = self.clone();
|
||||
state._reconnect_task = Some(cx.spawn(async move |cx| {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
let mut rng = StdRng::seed_from_u64(0);
|
||||
@@ -676,25 +666,10 @@ impl Client {
|
||||
let mut rng = StdRng::from_entropy();
|
||||
|
||||
let mut delay = INITIAL_RECONNECTION_DELAY;
|
||||
loop {
|
||||
match client.authenticate_and_connect(true, &cx).await {
|
||||
ConnectionResult::Timeout => {
|
||||
log::error!("client connect attempt timed out")
|
||||
}
|
||||
ConnectionResult::ConnectionReset => {
|
||||
log::error!("client connect attempt reset")
|
||||
}
|
||||
ConnectionResult::Result(r) => {
|
||||
if let Err(error) = r {
|
||||
log::error!("failed to connect: {error}");
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if matches!(*client.status().borrow(), Status::ConnectionError) {
|
||||
client.set_status(
|
||||
while let Err(error) = this.authenticate_and_connect(true, &cx).await {
|
||||
log::error!("failed to connect {}", error);
|
||||
if matches!(*this.status().borrow(), Status::ConnectionError) {
|
||||
this.set_status(
|
||||
Status::ReconnectionError {
|
||||
next_reconnection: Instant::now() + delay,
|
||||
},
|
||||
@@ -852,7 +827,7 @@ impl Client {
|
||||
self: &Arc<Self>,
|
||||
try_provider: bool,
|
||||
cx: &AsyncApp,
|
||||
) -> ConnectionResult<()> {
|
||||
) -> anyhow::Result<()> {
|
||||
let was_disconnected = match *self.status().borrow() {
|
||||
Status::SignedOut => true,
|
||||
Status::ConnectionError
|
||||
@@ -861,14 +836,9 @@ impl Client {
|
||||
| Status::Reauthenticating { .. }
|
||||
| Status::ReconnectionError { .. } => false,
|
||||
Status::Connected { .. } | Status::Connecting { .. } | Status::Reconnecting { .. } => {
|
||||
return ConnectionResult::Result(Ok(()));
|
||||
}
|
||||
Status::UpgradeRequired => {
|
||||
return ConnectionResult::Result(
|
||||
Err(EstablishConnectionError::UpgradeRequired)
|
||||
.context("client auth and connect"),
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
Status::UpgradeRequired => return Err(EstablishConnectionError::UpgradeRequired)?,
|
||||
};
|
||||
if was_disconnected {
|
||||
self.set_status(Status::Authenticating, cx);
|
||||
@@ -892,12 +862,12 @@ impl Client {
|
||||
Ok(creds) => credentials = Some(creds),
|
||||
Err(err) => {
|
||||
self.set_status(Status::ConnectionError, cx);
|
||||
return ConnectionResult::Result(Err(err));
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ = status_rx.next().fuse() => {
|
||||
return ConnectionResult::Result(Err(anyhow!("authentication canceled")));
|
||||
return Err(anyhow!("authentication canceled"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -922,10 +892,10 @@ impl Client {
|
||||
}
|
||||
|
||||
futures::select_biased! {
|
||||
result = self.set_connection(conn, cx).fuse() => ConnectionResult::Result(result.context("client auth and connect")),
|
||||
result = self.set_connection(conn, cx).fuse() => result,
|
||||
_ = timeout => {
|
||||
self.set_status(Status::ConnectionError, cx);
|
||||
ConnectionResult::Timeout
|
||||
Err(anyhow!("timed out waiting on hello message from server"))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -937,22 +907,22 @@ impl Client {
|
||||
self.authenticate_and_connect(false, cx).await
|
||||
} else {
|
||||
self.set_status(Status::ConnectionError, cx);
|
||||
ConnectionResult::Result(Err(EstablishConnectionError::Unauthorized).context("client auth and connect"))
|
||||
Err(EstablishConnectionError::Unauthorized)?
|
||||
}
|
||||
}
|
||||
Err(EstablishConnectionError::UpgradeRequired) => {
|
||||
self.set_status(Status::UpgradeRequired, cx);
|
||||
ConnectionResult::Result(Err(EstablishConnectionError::UpgradeRequired).context("client auth and connect"))
|
||||
Err(EstablishConnectionError::UpgradeRequired)?
|
||||
}
|
||||
Err(error) => {
|
||||
self.set_status(Status::ConnectionError, cx);
|
||||
ConnectionResult::Result(Err(error).context("client auth and connect"))
|
||||
Err(error)?
|
||||
}
|
||||
}
|
||||
}
|
||||
_ = &mut timeout => {
|
||||
self.set_status(Status::ConnectionError, cx);
|
||||
ConnectionResult::Timeout
|
||||
Err(anyhow!("timed out trying to establish connection"))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -968,7 +938,10 @@ impl Client {
|
||||
|
||||
let peer_id = async {
|
||||
log::debug!("waiting for server hello");
|
||||
let message = incoming.next().await.context("no hello message received")?;
|
||||
let message = incoming
|
||||
.next()
|
||||
.await
|
||||
.ok_or_else(|| anyhow!("no hello message received"))?;
|
||||
log::debug!("got server hello");
|
||||
let hello_message_type_name = message.payload_type_name().to_string();
|
||||
let hello = message
|
||||
@@ -1156,7 +1129,7 @@ impl Client {
|
||||
let handle = cx.update(|cx| gpui_tokio::Tokio::handle(cx)).ok().unwrap();
|
||||
let _guard = handle.enter();
|
||||
match proxy {
|
||||
Some(proxy) => connect_proxy_stream(&proxy, rpc_host).await?,
|
||||
Some(proxy) => connect_socks_proxy_stream(&proxy, rpc_host).await?,
|
||||
None => Box::new(TcpStream::connect(rpc_host).await?),
|
||||
}
|
||||
};
|
||||
@@ -1770,7 +1743,7 @@ mod tests {
|
||||
status.next().await,
|
||||
Some(Status::ConnectionError { .. })
|
||||
));
|
||||
auth_and_connect.await.into_response().unwrap_err();
|
||||
auth_and_connect.await.unwrap_err();
|
||||
|
||||
// Allow the connection to be established.
|
||||
let server = FakeServer::for_client(user_id, &client, cx).await;
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
//! client proxy
|
||||
|
||||
mod http_proxy;
|
||||
mod socks_proxy;
|
||||
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use http_client::Url;
|
||||
use http_proxy::{HttpProxyType, connect_http_proxy_stream, parse_http_proxy};
|
||||
use socks_proxy::{SocksVersion, connect_socks_proxy_stream, parse_socks_proxy};
|
||||
|
||||
pub(crate) async fn connect_proxy_stream(
|
||||
proxy: &Url,
|
||||
rpc_host: (&str, u16),
|
||||
) -> Result<Box<dyn AsyncReadWrite>> {
|
||||
let Some(((proxy_domain, proxy_port), proxy_type)) = parse_proxy_type(proxy) else {
|
||||
// If parsing the proxy URL fails, we must avoid falling back to an insecure connection.
|
||||
// SOCKS proxies are often used in contexts where security and privacy are critical,
|
||||
// so any fallback could expose users to significant risks.
|
||||
return Err(anyhow!("Parsing proxy url failed"));
|
||||
};
|
||||
|
||||
// Connect to proxy and wrap protocol later
|
||||
let stream = tokio::net::TcpStream::connect((proxy_domain.as_str(), proxy_port))
|
||||
.await
|
||||
.context("Failed to connect to proxy")?;
|
||||
|
||||
let proxy_stream = match proxy_type {
|
||||
ProxyType::SocksProxy(proxy) => connect_socks_proxy_stream(stream, proxy, rpc_host).await?,
|
||||
ProxyType::HttpProxy(proxy) => {
|
||||
connect_http_proxy_stream(stream, proxy, rpc_host, &proxy_domain).await?
|
||||
}
|
||||
};
|
||||
|
||||
Ok(proxy_stream)
|
||||
}
|
||||
|
||||
enum ProxyType<'t> {
|
||||
SocksProxy(SocksVersion<'t>),
|
||||
HttpProxy(HttpProxyType<'t>),
|
||||
}
|
||||
|
||||
fn parse_proxy_type<'t>(proxy: &'t Url) -> Option<((String, u16), ProxyType<'t>)> {
|
||||
let scheme = proxy.scheme();
|
||||
let host = proxy.host()?.to_string();
|
||||
let port = proxy.port_or_known_default()?;
|
||||
let proxy_type = match scheme {
|
||||
scheme if scheme.starts_with("socks") => {
|
||||
Some(ProxyType::SocksProxy(parse_socks_proxy(scheme, proxy)))
|
||||
}
|
||||
scheme if scheme.starts_with("http") => {
|
||||
Some(ProxyType::HttpProxy(parse_http_proxy(scheme, proxy)))
|
||||
}
|
||||
_ => None,
|
||||
}?;
|
||||
|
||||
Some(((host, port), proxy_type))
|
||||
}
|
||||
|
||||
pub(crate) trait AsyncReadWrite:
|
||||
tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static
|
||||
{
|
||||
}
|
||||
impl<T: tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static> AsyncReadWrite
|
||||
for T
|
||||
{
|
||||
}
|
||||
@@ -1,171 +0,0 @@
|
||||
use anyhow::{Context, Result};
|
||||
use base64::Engine;
|
||||
use httparse::{EMPTY_HEADER, Response};
|
||||
use tokio::{
|
||||
io::{AsyncBufReadExt, AsyncWriteExt, BufStream},
|
||||
net::TcpStream,
|
||||
};
|
||||
use tokio_native_tls::{TlsConnector, native_tls};
|
||||
use url::Url;
|
||||
|
||||
use super::AsyncReadWrite;
|
||||
|
||||
pub(super) enum HttpProxyType<'t> {
|
||||
HTTP(Option<HttpProxyAuthorization<'t>>),
|
||||
HTTPS(Option<HttpProxyAuthorization<'t>>),
|
||||
}
|
||||
|
||||
pub(super) struct HttpProxyAuthorization<'t> {
|
||||
username: &'t str,
|
||||
password: &'t str,
|
||||
}
|
||||
|
||||
pub(super) fn parse_http_proxy<'t>(scheme: &str, proxy: &'t Url) -> HttpProxyType<'t> {
|
||||
let auth = proxy.password().map(|password| HttpProxyAuthorization {
|
||||
username: proxy.username(),
|
||||
password,
|
||||
});
|
||||
if scheme.starts_with("https") {
|
||||
HttpProxyType::HTTPS(auth)
|
||||
} else {
|
||||
HttpProxyType::HTTP(auth)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn connect_http_proxy_stream(
|
||||
stream: TcpStream,
|
||||
http_proxy: HttpProxyType<'_>,
|
||||
rpc_host: (&str, u16),
|
||||
proxy_domain: &str,
|
||||
) -> Result<Box<dyn AsyncReadWrite>> {
|
||||
match http_proxy {
|
||||
HttpProxyType::HTTP(auth) => http_connect(stream, rpc_host, auth).await,
|
||||
HttpProxyType::HTTPS(auth) => https_connect(stream, rpc_host, auth, proxy_domain).await,
|
||||
}
|
||||
.context("error connecting to http/https proxy")
|
||||
}
|
||||
|
||||
async fn http_connect<T>(
|
||||
stream: T,
|
||||
target: (&str, u16),
|
||||
auth: Option<HttpProxyAuthorization<'_>>,
|
||||
) -> Result<Box<dyn AsyncReadWrite>>
|
||||
where
|
||||
T: AsyncReadWrite,
|
||||
{
|
||||
let mut stream = BufStream::new(stream);
|
||||
let request = make_request(target, auth);
|
||||
stream.write_all(request.as_bytes()).await?;
|
||||
stream.flush().await?;
|
||||
check_response(&mut stream).await?;
|
||||
Ok(Box::new(stream))
|
||||
}
|
||||
|
||||
async fn https_connect<T>(
|
||||
stream: T,
|
||||
target: (&str, u16),
|
||||
auth: Option<HttpProxyAuthorization<'_>>,
|
||||
proxy_domain: &str,
|
||||
) -> Result<Box<dyn AsyncReadWrite>>
|
||||
where
|
||||
T: AsyncReadWrite,
|
||||
{
|
||||
let tls_connector = TlsConnector::from(native_tls::TlsConnector::new()?);
|
||||
let stream = tls_connector.connect(proxy_domain, stream).await?;
|
||||
http_connect(stream, target, auth).await
|
||||
}
|
||||
|
||||
fn make_request(target: (&str, u16), auth: Option<HttpProxyAuthorization<'_>>) -> String {
|
||||
let (host, port) = target;
|
||||
let mut request = format!(
|
||||
"CONNECT {host}:{port} HTTP/1.1\r\nHost: {host}:{port}\r\nProxy-Connection: Keep-Alive\r\n"
|
||||
);
|
||||
if let Some(HttpProxyAuthorization { username, password }) = auth {
|
||||
let auth =
|
||||
base64::prelude::BASE64_STANDARD.encode(format!("{username}:{password}").as_bytes());
|
||||
let auth = format!("Proxy-Authorization: Basic {auth}\r\n");
|
||||
request.push_str(&auth);
|
||||
}
|
||||
request.push_str("\r\n");
|
||||
request
|
||||
}
|
||||
|
||||
async fn check_response<T>(stream: &mut BufStream<T>) -> Result<()>
|
||||
where
|
||||
T: AsyncReadWrite,
|
||||
{
|
||||
let response = recv_response(stream).await?;
|
||||
let mut dummy_headers = [EMPTY_HEADER; MAX_RESPONSE_HEADERS];
|
||||
let mut parser = Response::new(&mut dummy_headers);
|
||||
parser.parse(response.as_bytes())?;
|
||||
|
||||
match parser.code {
|
||||
Some(code) => {
|
||||
if code == 200 {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow::anyhow!(
|
||||
"Proxy connection failed with HTTP code: {code}"
|
||||
))
|
||||
}
|
||||
}
|
||||
None => Err(anyhow::anyhow!(
|
||||
"Proxy connection failed with no HTTP code: {}",
|
||||
parser.reason.unwrap_or("Unknown reason")
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
const MAX_RESPONSE_HEADER_LENGTH: usize = 4096;
|
||||
const MAX_RESPONSE_HEADERS: usize = 16;
|
||||
|
||||
async fn recv_response<T>(stream: &mut BufStream<T>) -> Result<String>
|
||||
where
|
||||
T: AsyncReadWrite,
|
||||
{
|
||||
let mut response = String::new();
|
||||
loop {
|
||||
if stream.read_line(&mut response).await? == 0 {
|
||||
return Err(anyhow::anyhow!("End of stream"));
|
||||
}
|
||||
|
||||
if MAX_RESPONSE_HEADER_LENGTH < response.len() {
|
||||
return Err(anyhow::anyhow!("Maximum response header length exceeded"));
|
||||
}
|
||||
|
||||
if response.ends_with("\r\n\r\n") {
|
||||
return Ok(response);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use url::Url;
|
||||
|
||||
use super::{HttpProxyAuthorization, HttpProxyType, parse_http_proxy};
|
||||
|
||||
#[test]
|
||||
fn test_parse_http_proxy() {
|
||||
let proxy = Url::parse("http://proxy.example.com:1080").unwrap();
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let version = parse_http_proxy(scheme, &proxy);
|
||||
assert!(matches!(version, HttpProxyType::HTTP(None)))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_http_proxy_with_auth() {
|
||||
let proxy = Url::parse("http://username:password@proxy.example.com:1080").unwrap();
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let version = parse_http_proxy(scheme, &proxy);
|
||||
assert!(matches!(
|
||||
version,
|
||||
HttpProxyType::HTTP(Some(HttpProxyAuthorization {
|
||||
username: "username",
|
||||
password: "password"
|
||||
}))
|
||||
))
|
||||
}
|
||||
}
|
||||
@@ -1,19 +1,15 @@
|
||||
//! socks proxy
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use tokio::net::TcpStream;
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use http_client::Url;
|
||||
use tokio_socks::tcp::{Socks4Stream, Socks5Stream};
|
||||
use url::Url;
|
||||
|
||||
use super::AsyncReadWrite;
|
||||
|
||||
/// Identification to a Socks V4 Proxy
|
||||
pub(super) struct Socks4Identification<'a> {
|
||||
struct Socks4Identification<'a> {
|
||||
user_id: &'a str,
|
||||
}
|
||||
|
||||
/// Authorization to a Socks V5 Proxy
|
||||
pub(super) struct Socks5Authorization<'a> {
|
||||
struct Socks5Authorization<'a> {
|
||||
username: &'a str,
|
||||
password: &'a str,
|
||||
}
|
||||
@@ -22,50 +18,45 @@ pub(super) struct Socks5Authorization<'a> {
|
||||
///
|
||||
/// V4 allows idenfication using a user_id
|
||||
/// V5 allows authorization using a username and password
|
||||
pub(super) enum SocksVersion<'a> {
|
||||
enum SocksVersion<'a> {
|
||||
V4(Option<Socks4Identification<'a>>),
|
||||
V5(Option<Socks5Authorization<'a>>),
|
||||
}
|
||||
|
||||
pub(super) fn parse_socks_proxy<'t>(scheme: &str, proxy: &'t Url) -> SocksVersion<'t> {
|
||||
if scheme.starts_with("socks4") {
|
||||
let identification = match proxy.username() {
|
||||
"" => None,
|
||||
username => Some(Socks4Identification { user_id: username }),
|
||||
};
|
||||
SocksVersion::V4(identification)
|
||||
} else {
|
||||
let authorization = proxy.password().map(|password| Socks5Authorization {
|
||||
username: proxy.username(),
|
||||
password,
|
||||
});
|
||||
SocksVersion::V5(authorization)
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) async fn connect_socks_proxy_stream(
|
||||
stream: TcpStream,
|
||||
socks_version: SocksVersion<'_>,
|
||||
pub(crate) async fn connect_socks_proxy_stream(
|
||||
proxy: &Url,
|
||||
rpc_host: (&str, u16),
|
||||
) -> Result<Box<dyn AsyncReadWrite>> {
|
||||
match socks_version {
|
||||
let Some((socks_proxy, version)) = parse_socks_proxy(proxy) else {
|
||||
// If parsing the proxy URL fails, we must avoid falling back to an insecure connection.
|
||||
// SOCKS proxies are often used in contexts where security and privacy are critical,
|
||||
// so any fallback could expose users to significant risks.
|
||||
return Err(anyhow!("Parsing proxy url failed"));
|
||||
};
|
||||
|
||||
// Connect to proxy and wrap protocol later
|
||||
let stream = tokio::net::TcpStream::connect(socks_proxy)
|
||||
.await
|
||||
.context("Failed to connect to socks proxy")?;
|
||||
|
||||
let socks: Box<dyn AsyncReadWrite> = match version {
|
||||
SocksVersion::V4(None) => {
|
||||
let socks = Socks4Stream::connect_with_socket(stream, rpc_host)
|
||||
.await
|
||||
.context("error connecting to socks")?;
|
||||
Ok(Box::new(socks))
|
||||
Box::new(socks)
|
||||
}
|
||||
SocksVersion::V4(Some(Socks4Identification { user_id })) => {
|
||||
let socks = Socks4Stream::connect_with_userid_and_socket(stream, rpc_host, user_id)
|
||||
.await
|
||||
.context("error connecting to socks")?;
|
||||
Ok(Box::new(socks))
|
||||
Box::new(socks)
|
||||
}
|
||||
SocksVersion::V5(None) => {
|
||||
let socks = Socks5Stream::connect_with_socket(stream, rpc_host)
|
||||
.await
|
||||
.context("error connecting to socks")?;
|
||||
Ok(Box::new(socks))
|
||||
Box::new(socks)
|
||||
}
|
||||
SocksVersion::V5(Some(Socks5Authorization { username, password })) => {
|
||||
let socks = Socks5Stream::connect_with_password_and_socket(
|
||||
@@ -73,9 +64,44 @@ pub(super) async fn connect_socks_proxy_stream(
|
||||
)
|
||||
.await
|
||||
.context("error connecting to socks")?;
|
||||
Ok(Box::new(socks))
|
||||
Box::new(socks)
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(socks)
|
||||
}
|
||||
|
||||
fn parse_socks_proxy(proxy: &Url) -> Option<((String, u16), SocksVersion<'_>)> {
|
||||
let scheme = proxy.scheme();
|
||||
let socks_version = if scheme.starts_with("socks4") {
|
||||
let identification = match proxy.username() {
|
||||
"" => None,
|
||||
username => Some(Socks4Identification { user_id: username }),
|
||||
};
|
||||
SocksVersion::V4(identification)
|
||||
} else if scheme.starts_with("socks") {
|
||||
let authorization = proxy.password().map(|password| Socks5Authorization {
|
||||
username: proxy.username(),
|
||||
password,
|
||||
});
|
||||
SocksVersion::V5(authorization)
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let host = proxy.host()?.to_string();
|
||||
let port = proxy.port_or_known_default()?;
|
||||
|
||||
Some(((host, port), socks_version))
|
||||
}
|
||||
|
||||
pub(crate) trait AsyncReadWrite:
|
||||
tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static
|
||||
{
|
||||
}
|
||||
impl<T: tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static> AsyncReadWrite
|
||||
for T
|
||||
{
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -87,18 +113,20 @@ mod tests {
|
||||
#[test]
|
||||
fn parse_socks4() {
|
||||
let proxy = Url::parse("socks4://proxy.example.com:1080").unwrap();
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let version = parse_socks_proxy(scheme, &proxy);
|
||||
let ((host, port), version) = parse_socks_proxy(&proxy).unwrap();
|
||||
assert_eq!(host, "proxy.example.com");
|
||||
assert_eq!(port, 1080);
|
||||
assert!(matches!(version, SocksVersion::V4(None)))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_socks4_with_identification() {
|
||||
let proxy = Url::parse("socks4://userid@proxy.example.com:1080").unwrap();
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let version = parse_socks_proxy(scheme, &proxy);
|
||||
let ((host, port), version) = parse_socks_proxy(&proxy).unwrap();
|
||||
assert_eq!(host, "proxy.example.com");
|
||||
assert_eq!(port, 1080);
|
||||
assert!(matches!(
|
||||
version,
|
||||
SocksVersion::V4(Some(Socks4Identification { user_id: "userid" }))
|
||||
@@ -108,18 +136,20 @@ mod tests {
|
||||
#[test]
|
||||
fn parse_socks5() {
|
||||
let proxy = Url::parse("socks5://proxy.example.com:1080").unwrap();
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let version = parse_socks_proxy(scheme, &proxy);
|
||||
let ((host, port), version) = parse_socks_proxy(&proxy).unwrap();
|
||||
assert_eq!(host, "proxy.example.com");
|
||||
assert_eq!(port, 1080);
|
||||
assert!(matches!(version, SocksVersion::V5(None)))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_socks5_with_authorization() {
|
||||
let proxy = Url::parse("socks5://username:password@proxy.example.com:1080").unwrap();
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let version = parse_socks_proxy(scheme, &proxy);
|
||||
let ((host, port), version) = parse_socks_proxy(&proxy).unwrap();
|
||||
assert_eq!(host, "proxy.example.com");
|
||||
assert_eq!(port, 1080);
|
||||
assert!(matches!(
|
||||
version,
|
||||
SocksVersion::V5(Some(Socks5Authorization {
|
||||
@@ -128,4 +158,19 @@ mod tests {
|
||||
}))
|
||||
))
|
||||
}
|
||||
|
||||
/// If parsing the proxy URL fails, we must avoid falling back to an insecure connection.
|
||||
/// SOCKS proxies are often used in contexts where security and privacy are critical,
|
||||
/// so any fallback could expose users to significant risks.
|
||||
#[tokio::test]
|
||||
async fn fails_on_bad_proxy() {
|
||||
// Should fail connecting because http is not a valid Socks proxy scheme
|
||||
let proxy = Url::parse("http://localhost:2313").unwrap();
|
||||
|
||||
let result = connect_socks_proxy_stream(&proxy, ("test", 1080)).await;
|
||||
match result {
|
||||
Err(e) => assert_eq!(e.to_string(), "Parsing proxy url failed"),
|
||||
Ok(_) => panic!("Connecting on bad proxy should fail"),
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -137,14 +137,18 @@ pub fn os_version() -> String {
|
||||
log::error!("Failed to load /etc/os-release, /usr/lib/os-release");
|
||||
"".to_string()
|
||||
};
|
||||
let mut name = "unknown";
|
||||
let mut version = "unknown";
|
||||
let mut name = "unknown".to_string();
|
||||
let mut version = "unknown".to_string();
|
||||
|
||||
for line in content.lines() {
|
||||
match line.split_once('=') {
|
||||
Some(("ID", val)) => name = val.trim_matches('"'),
|
||||
Some(("VERSION_ID", val)) => version = val.trim_matches('"'),
|
||||
_ => {}
|
||||
if line.starts_with("ID=") {
|
||||
name = line.trim_start_matches("ID=").trim_matches('"').to_string();
|
||||
}
|
||||
if line.starts_with("VERSION_ID=") {
|
||||
version = line
|
||||
.trim_start_matches("VERSION_ID=")
|
||||
.trim_matches('"')
|
||||
.to_string();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -218,7 +222,7 @@ impl Telemetry {
|
||||
cx.background_spawn({
|
||||
let state = state.clone();
|
||||
let os_version = os_version();
|
||||
state.lock().os_version = Some(os_version);
|
||||
state.lock().os_version = Some(os_version.clone());
|
||||
async move {
|
||||
if let Some(tempfile) = File::create(Self::log_file_path()).log_err() {
|
||||
state.lock().log_file = Some(tempfile);
|
||||
@@ -365,7 +369,7 @@ impl Telemetry {
|
||||
telemetry::event!(
|
||||
"Editor Edited",
|
||||
duration = duration,
|
||||
environment = environment,
|
||||
environment = environment.to_string(),
|
||||
is_via_ssh = is_via_ssh
|
||||
);
|
||||
}
|
||||
@@ -427,8 +431,9 @@ impl Telemetry {
|
||||
|
||||
if state.flush_events_task.is_none() {
|
||||
let this = self.clone();
|
||||
let executor = self.executor.clone();
|
||||
state.flush_events_task = Some(self.executor.spawn(async move {
|
||||
this.executor.timer(FLUSH_INTERVAL).await;
|
||||
executor.timer(FLUSH_INTERVAL).await;
|
||||
this.flush_events().detach();
|
||||
}));
|
||||
}
|
||||
@@ -479,12 +484,12 @@ impl Telemetry {
|
||||
self: &Arc<Self>,
|
||||
// We take in the JSON bytes buffer so we can reuse the existing allocation.
|
||||
mut json_bytes: Vec<u8>,
|
||||
event_request: &EventRequestBody,
|
||||
event_request: EventRequestBody,
|
||||
) -> Result<Request<AsyncBody>> {
|
||||
json_bytes.clear();
|
||||
serde_json::to_writer(&mut json_bytes, event_request)?;
|
||||
serde_json::to_writer(&mut json_bytes, &event_request)?;
|
||||
|
||||
let checksum = calculate_json_checksum(&json_bytes).unwrap_or_default();
|
||||
let checksum = calculate_json_checksum(&json_bytes).unwrap_or("".to_string());
|
||||
|
||||
Ok(Request::builder()
|
||||
.method(Method::POST)
|
||||
@@ -501,7 +506,7 @@ impl Telemetry {
|
||||
pub fn flush_events(self: &Arc<Self>) -> Task<()> {
|
||||
let mut state = self.state.lock();
|
||||
state.first_event_date_time = None;
|
||||
let events = mem::take(&mut state.events_queue);
|
||||
let mut events = mem::take(&mut state.events_queue);
|
||||
state.flush_events_task.take();
|
||||
drop(state);
|
||||
if events.is_empty() {
|
||||
@@ -514,7 +519,7 @@ impl Telemetry {
|
||||
let mut json_bytes = Vec::new();
|
||||
|
||||
if let Some(file) = &mut this.state.lock().log_file {
|
||||
for event in &events {
|
||||
for event in &mut events {
|
||||
json_bytes.clear();
|
||||
serde_json::to_writer(&mut json_bytes, event)?;
|
||||
file.write_all(&json_bytes)?;
|
||||
@@ -541,7 +546,7 @@ impl Telemetry {
|
||||
}
|
||||
};
|
||||
|
||||
let request = this.build_request(json_bytes, &request_body)?;
|
||||
let request = this.build_request(json_bytes, request_body)?;
|
||||
let response = this.http_client.send(request).await?;
|
||||
if response.status() != 200 {
|
||||
log::error!("Failed to send events: HTTP {:?}", response.status());
|
||||
|
||||
@@ -107,7 +107,6 @@ impl FakeServer {
|
||||
client
|
||||
.authenticate_and_connect(false, &cx.to_async())
|
||||
.await
|
||||
.into_response()
|
||||
.unwrap();
|
||||
|
||||
server
|
||||
|
||||
@@ -18,8 +18,7 @@ use stripe::{
|
||||
CreateBillingPortalSessionFlowDataSubscriptionUpdateConfirm,
|
||||
CreateBillingPortalSessionFlowDataSubscriptionUpdateConfirmItems,
|
||||
CreateBillingPortalSessionFlowDataType, CreateCustomer, Customer, CustomerId, EventObject,
|
||||
EventType, Expandable, ListEvents, PaymentMethod, Subscription, SubscriptionId,
|
||||
SubscriptionStatus,
|
||||
EventType, Expandable, ListEvents, Subscription, SubscriptionId, SubscriptionStatus,
|
||||
};
|
||||
use util::{ResultExt, maybe};
|
||||
|
||||
@@ -72,7 +71,6 @@ struct GetBillingPreferencesParams {
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct BillingPreferencesResponse {
|
||||
trial_started_at: Option<String>,
|
||||
max_monthly_llm_usage_spending_in_cents: i32,
|
||||
model_request_overages_enabled: bool,
|
||||
model_request_overages_spend_limit_in_cents: i32,
|
||||
@@ -88,17 +86,9 @@ async fn get_billing_preferences(
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
|
||||
let billing_customer = app.db.get_billing_customer_by_user_id(user.id).await?;
|
||||
let preferences = app.db.get_billing_preferences(user.id).await?;
|
||||
|
||||
Ok(Json(BillingPreferencesResponse {
|
||||
trial_started_at: billing_customer
|
||||
.and_then(|billing_customer| billing_customer.trial_started_at)
|
||||
.map(|trial_started_at| {
|
||||
trial_started_at
|
||||
.and_utc()
|
||||
.to_rfc3339_opts(SecondsFormat::Millis, true)
|
||||
}),
|
||||
max_monthly_llm_usage_spending_in_cents: preferences
|
||||
.as_ref()
|
||||
.map_or(DEFAULT_MAX_MONTHLY_SPEND.0 as i32, |preferences| {
|
||||
@@ -137,8 +127,6 @@ async fn update_billing_preferences(
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
|
||||
let billing_customer = app.db.get_billing_customer_by_user_id(user.id).await?;
|
||||
|
||||
let max_monthly_llm_usage_spending_in_cents =
|
||||
body.max_monthly_llm_usage_spending_in_cents.max(0);
|
||||
let model_request_overages_spend_limit_in_cents =
|
||||
@@ -194,13 +182,6 @@ async fn update_billing_preferences(
|
||||
rpc_server.refresh_llm_tokens_for_user(user.id).await;
|
||||
|
||||
Ok(Json(BillingPreferencesResponse {
|
||||
trial_started_at: billing_customer
|
||||
.and_then(|billing_customer| billing_customer.trial_started_at)
|
||||
.map(|trial_started_at| {
|
||||
trial_started_at
|
||||
.and_utc()
|
||||
.to_rfc3339_opts(SecondsFormat::Millis, true)
|
||||
}),
|
||||
max_monthly_llm_usage_spending_in_cents: billing_preferences
|
||||
.max_monthly_llm_usage_spending_in_cents,
|
||||
model_request_overages_enabled: billing_preferences.model_request_overages_enabled,
|
||||
@@ -320,6 +301,13 @@ async fn create_billing_subscription(
|
||||
"not supported".into(),
|
||||
))?
|
||||
};
|
||||
let Some(llm_db) = app.llm_db.clone() else {
|
||||
log::error!("failed to retrieve LLM database");
|
||||
Err(Error::http(
|
||||
StatusCode::NOT_IMPLEMENTED,
|
||||
"not supported".into(),
|
||||
))?
|
||||
};
|
||||
|
||||
if app.db.has_active_billing_subscription(user.id).await? {
|
||||
return Err(Error::http(
|
||||
@@ -411,10 +399,16 @@ async fn create_billing_subscription(
|
||||
.await?
|
||||
}
|
||||
None => {
|
||||
return Err(Error::http(
|
||||
StatusCode::BAD_REQUEST,
|
||||
"No product selected".into(),
|
||||
));
|
||||
let default_model = llm_db.model(
|
||||
zed_llm_client::LanguageModelProvider::Anthropic,
|
||||
"claude-3-7-sonnet",
|
||||
)?;
|
||||
let stripe_model = stripe_billing
|
||||
.register_model_for_token_based_usage(default_model)
|
||||
.await?;
|
||||
stripe_billing
|
||||
.checkout(customer_id, &user.github_login, &stripe_model, &success_url)
|
||||
.await?
|
||||
}
|
||||
};
|
||||
|
||||
@@ -430,8 +424,6 @@ enum ManageSubscriptionIntent {
|
||||
///
|
||||
/// This will open the Stripe billing portal without putting the user in a specific flow.
|
||||
ManageSubscription,
|
||||
/// The user intends to update their payment method.
|
||||
UpdatePaymentMethod,
|
||||
/// The user intends to upgrade to Zed Pro.
|
||||
UpgradeToPro,
|
||||
/// The user intends to cancel their subscription.
|
||||
@@ -446,7 +438,6 @@ struct ManageBillingSubscriptionBody {
|
||||
intent: ManageSubscriptionIntent,
|
||||
/// The ID of the subscription to manage.
|
||||
subscription_id: BillingSubscriptionId,
|
||||
redirect_to: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
@@ -544,23 +535,6 @@ async fn manage_billing_subscription(
|
||||
.map_or(false, |price| price.id == zed_pro_price_id)
|
||||
});
|
||||
if is_on_zed_pro_trial {
|
||||
let payment_methods = PaymentMethod::list(
|
||||
&stripe_client,
|
||||
&stripe::ListPaymentMethods {
|
||||
customer: Some(stripe_subscription.customer.id()),
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
let has_payment_method = !payment_methods.data.is_empty();
|
||||
if !has_payment_method {
|
||||
return Err(Error::http(
|
||||
StatusCode::BAD_REQUEST,
|
||||
"missing payment method".into(),
|
||||
));
|
||||
}
|
||||
|
||||
// If the user is already on a Zed Pro trial and wants to upgrade to Pro, we just need to end their trial early.
|
||||
Subscription::update(
|
||||
&stripe_client,
|
||||
@@ -610,21 +584,6 @@ async fn manage_billing_subscription(
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
ManageSubscriptionIntent::UpdatePaymentMethod => Some(CreateBillingPortalSessionFlowData {
|
||||
type_: CreateBillingPortalSessionFlowDataType::PaymentMethodUpdate,
|
||||
after_completion: Some(CreateBillingPortalSessionFlowDataAfterCompletion {
|
||||
type_: stripe::CreateBillingPortalSessionFlowDataAfterCompletionType::Redirect,
|
||||
redirect: Some(CreateBillingPortalSessionFlowDataAfterCompletionRedirect {
|
||||
return_url: format!(
|
||||
"{}{path}",
|
||||
app.config.zed_dot_dev_url(),
|
||||
path = body.redirect_to.unwrap_or_else(|| "/account".to_string())
|
||||
),
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
ManageSubscriptionIntent::Cancel => Some(CreateBillingPortalSessionFlowData {
|
||||
type_: CreateBillingPortalSessionFlowDataType::SubscriptionCancel,
|
||||
after_completion: Some(CreateBillingPortalSessionFlowDataAfterCompletion {
|
||||
@@ -1137,12 +1096,6 @@ async fn handle_customer_subscription_event(
|
||||
.await?;
|
||||
}
|
||||
|
||||
// When the user's subscription changes, push down any changes to their plan.
|
||||
rpc_server
|
||||
.update_plan_for_user(billing_customer.user_id)
|
||||
.await
|
||||
.trace_err();
|
||||
|
||||
// When the user's subscription changes, we want to refresh their LLM tokens
|
||||
// to either grant/revoke access.
|
||||
rpc_server
|
||||
@@ -1280,7 +1233,7 @@ async fn get_current_usage(
|
||||
subscription
|
||||
.kind
|
||||
.map(Into::into)
|
||||
.unwrap_or(zed_llm_client::Plan::ZedFree)
|
||||
.unwrap_or(zed_llm_client::Plan::Free)
|
||||
});
|
||||
|
||||
let model_requests_limit = match plan.model_requests_limit() {
|
||||
@@ -1428,6 +1381,81 @@ async fn find_or_create_billing_customer(
|
||||
Ok(Some(billing_customer))
|
||||
}
|
||||
|
||||
const SYNC_LLM_TOKEN_USAGE_WITH_STRIPE_INTERVAL: Duration = Duration::from_secs(60);
|
||||
|
||||
pub fn sync_llm_token_usage_with_stripe_periodically(app: Arc<AppState>) {
|
||||
let Some(stripe_billing) = app.stripe_billing.clone() else {
|
||||
log::warn!("failed to retrieve Stripe billing object");
|
||||
return;
|
||||
};
|
||||
let Some(llm_db) = app.llm_db.clone() else {
|
||||
log::warn!("failed to retrieve LLM database");
|
||||
return;
|
||||
};
|
||||
|
||||
let executor = app.executor.clone();
|
||||
executor.spawn_detached({
|
||||
let executor = executor.clone();
|
||||
async move {
|
||||
loop {
|
||||
sync_token_usage_with_stripe(&app, &llm_db, &stripe_billing)
|
||||
.await
|
||||
.context("failed to sync LLM usage to Stripe")
|
||||
.trace_err();
|
||||
executor
|
||||
.sleep(SYNC_LLM_TOKEN_USAGE_WITH_STRIPE_INTERVAL)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async fn sync_token_usage_with_stripe(
|
||||
app: &Arc<AppState>,
|
||||
llm_db: &Arc<LlmDatabase>,
|
||||
stripe_billing: &Arc<StripeBilling>,
|
||||
) -> anyhow::Result<()> {
|
||||
let events = llm_db.get_billing_events().await?;
|
||||
let user_ids = events
|
||||
.iter()
|
||||
.map(|(event, _)| event.user_id)
|
||||
.collect::<HashSet<UserId>>();
|
||||
let stripe_subscriptions = app.db.get_active_billing_subscriptions(user_ids).await?;
|
||||
|
||||
for (event, model) in events {
|
||||
let Some((stripe_db_customer, stripe_db_subscription)) =
|
||||
stripe_subscriptions.get(&event.user_id)
|
||||
else {
|
||||
tracing::warn!(
|
||||
user_id = event.user_id.0,
|
||||
"Registered billing event for user who is not a Stripe customer. Billing events should only be created for users who are Stripe customers, so this is a mistake on our side."
|
||||
);
|
||||
continue;
|
||||
};
|
||||
let stripe_subscription_id: stripe::SubscriptionId = stripe_db_subscription
|
||||
.stripe_subscription_id
|
||||
.parse()
|
||||
.context("failed to parse stripe subscription id from db")?;
|
||||
let stripe_customer_id: stripe::CustomerId = stripe_db_customer
|
||||
.stripe_customer_id
|
||||
.parse()
|
||||
.context("failed to parse stripe customer id from db")?;
|
||||
|
||||
let stripe_model = stripe_billing
|
||||
.register_model_for_token_based_usage(&model)
|
||||
.await?;
|
||||
stripe_billing
|
||||
.subscribe_to_model(&stripe_subscription_id, &stripe_model)
|
||||
.await?;
|
||||
stripe_billing
|
||||
.bill_model_token_usage(&stripe_customer_id, &stripe_model, &event)
|
||||
.await?;
|
||||
llm_db.consume_billing_event(event.id).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
const SYNC_LLM_REQUEST_USAGE_WITH_STRIPE_INTERVAL: Duration = Duration::from_secs(60);
|
||||
|
||||
pub fn sync_llm_request_usage_with_stripe_periodically(app: Arc<AppState>) {
|
||||
|
||||
@@ -543,7 +543,7 @@ pub struct MembershipUpdated {
|
||||
|
||||
/// The result of setting a member's role.
|
||||
#[derive(Debug)]
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum SetMemberRoleResult {
|
||||
InviteUpdated(Channel),
|
||||
MembershipUpdated(MembershipUpdated),
|
||||
|
||||
@@ -99,7 +99,7 @@ impl From<SubscriptionKind> for zed_llm_client::Plan {
|
||||
match value {
|
||||
SubscriptionKind::ZedPro => Self::ZedPro,
|
||||
SubscriptionKind::ZedProTrial => Self::ZedProTrial,
|
||||
SubscriptionKind::ZedFree => Self::ZedFree,
|
||||
SubscriptionKind::ZedFree => Self::Free,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use super::*;
|
||||
|
||||
pub mod billing_events;
|
||||
pub mod providers;
|
||||
pub mod subscription_usage_meters;
|
||||
pub mod subscription_usages;
|
||||
|
||||
31
crates/collab/src/llm/db/queries/billing_events.rs
Normal file
31
crates/collab/src/llm/db/queries/billing_events.rs
Normal file
@@ -0,0 +1,31 @@
|
||||
use super::*;
|
||||
use crate::Result;
|
||||
use anyhow::Context as _;
|
||||
|
||||
impl LlmDatabase {
|
||||
pub async fn get_billing_events(&self) -> Result<Vec<(billing_event::Model, model::Model)>> {
|
||||
self.transaction(|tx| async move {
|
||||
let events_with_models = billing_event::Entity::find()
|
||||
.find_also_related(model::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
events_with_models
|
||||
.into_iter()
|
||||
.map(|(event, model)| {
|
||||
let model =
|
||||
model.context("could not find model associated with billing event")?;
|
||||
Ok((event, model))
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn consume_billing_event(&self, id: BillingEventId) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
billing_event::Entity::delete_by_id(id).exec(&*tx).await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
pub mod billing_event;
|
||||
pub mod model;
|
||||
pub mod monthly_usage;
|
||||
pub mod provider;
|
||||
|
||||
37
crates/collab/src/llm/db/tables/billing_event.rs
Normal file
37
crates/collab/src/llm/db/tables/billing_event.rs
Normal file
@@ -0,0 +1,37 @@
|
||||
use crate::{
|
||||
db::UserId,
|
||||
llm::db::{BillingEventId, ModelId},
|
||||
};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "billing_events")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: BillingEventId,
|
||||
pub idempotency_key: Uuid,
|
||||
pub user_id: UserId,
|
||||
pub model_id: ModelId,
|
||||
pub input_tokens: i64,
|
||||
pub input_cache_creation_tokens: i64,
|
||||
pub input_cache_read_tokens: i64,
|
||||
pub output_tokens: i64,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::model::Entity",
|
||||
from = "Column::ModelId",
|
||||
to = "super::model::Column::Id"
|
||||
)]
|
||||
Model,
|
||||
}
|
||||
|
||||
impl Related<super::model::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Model.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
@@ -31,6 +31,8 @@ pub enum Relation {
|
||||
Provider,
|
||||
#[sea_orm(has_many = "super::usage::Entity")]
|
||||
Usages,
|
||||
#[sea_orm(has_many = "super::billing_event::Entity")]
|
||||
BillingEvents,
|
||||
}
|
||||
|
||||
impl Related<super::provider::Entity> for Entity {
|
||||
@@ -45,4 +47,10 @@ impl Related<super::usage::Entity> for Entity {
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::billing_event::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::BillingEvents.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
use crate::Cents;
|
||||
use crate::db::billing_subscription::SubscriptionKind;
|
||||
use crate::db::{billing_subscription, user};
|
||||
use crate::llm::AGENT_EXTENDED_TRIAL_FEATURE_FLAG;
|
||||
use crate::llm::{
|
||||
AGENT_EXTENDED_TRIAL_FEATURE_FLAG, DEFAULT_MAX_MONTHLY_SPEND, FREE_TIER_MONTHLY_SPENDING_LIMIT,
|
||||
};
|
||||
use crate::{Config, db::billing_preference};
|
||||
use anyhow::{Result, anyhow};
|
||||
use chrono::{NaiveDateTime, Utc};
|
||||
@@ -25,12 +28,23 @@ pub struct LlmTokenClaims {
|
||||
pub is_staff: bool,
|
||||
pub has_llm_closed_beta_feature_flag: bool,
|
||||
pub bypass_account_age_check: bool,
|
||||
pub has_llm_subscription: bool,
|
||||
#[serde(default)]
|
||||
pub use_llm_request_queue: bool,
|
||||
pub max_monthly_spend_in_cents: u32,
|
||||
pub custom_llm_monthly_allowance_in_cents: Option<u32>,
|
||||
#[serde(default)]
|
||||
pub use_new_billing: bool,
|
||||
pub plan: Plan,
|
||||
#[serde(default)]
|
||||
pub has_extended_trial: bool,
|
||||
pub subscription_period: (NaiveDateTime, NaiveDateTime),
|
||||
#[serde(default)]
|
||||
pub subscription_period: Option<(NaiveDateTime, NaiveDateTime)>,
|
||||
#[serde(default)]
|
||||
pub enable_model_request_overages: bool,
|
||||
#[serde(default)]
|
||||
pub model_request_overages_spend_limit_in_cents: u32,
|
||||
#[serde(default)]
|
||||
pub can_use_web_search_tool: bool,
|
||||
}
|
||||
|
||||
@@ -42,6 +56,7 @@ impl LlmTokenClaims {
|
||||
is_staff: bool,
|
||||
billing_preferences: Option<billing_preference::Model>,
|
||||
feature_flags: &Vec<String>,
|
||||
has_legacy_llm_subscription: bool,
|
||||
subscription: Option<billing_subscription::Model>,
|
||||
system_id: Option<String>,
|
||||
config: &Config,
|
||||
@@ -51,23 +66,6 @@ impl LlmTokenClaims {
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("no LLM API secret"))?;
|
||||
|
||||
let plan = if is_staff {
|
||||
Plan::ZedPro
|
||||
} else {
|
||||
subscription
|
||||
.as_ref()
|
||||
.and_then(|subscription| subscription.kind)
|
||||
.map_or(Plan::ZedFree, |kind| match kind {
|
||||
SubscriptionKind::ZedFree => Plan::ZedFree,
|
||||
SubscriptionKind::ZedPro => Plan::ZedPro,
|
||||
SubscriptionKind::ZedProTrial => Plan::ZedProTrial,
|
||||
})
|
||||
};
|
||||
let subscription_period =
|
||||
billing_subscription::Model::current_period(subscription, is_staff)
|
||||
.map(|(start, end)| (start.naive_utc(), end.naive_utc()))
|
||||
.ok_or_else(|| anyhow!("A plan is required to use Zed's hosted models or edit predictions. Visit https://zed.dev/account to get started."))?;
|
||||
|
||||
let now = Utc::now();
|
||||
let claims = Self {
|
||||
iat: now.timestamp() as u64,
|
||||
@@ -85,13 +83,38 @@ impl LlmTokenClaims {
|
||||
bypass_account_age_check: feature_flags
|
||||
.iter()
|
||||
.any(|flag| flag == "bypass-account-age-check"),
|
||||
can_use_web_search_tool: true,
|
||||
can_use_web_search_tool: feature_flags.iter().any(|flag| flag == "assistant2"),
|
||||
has_llm_subscription: has_legacy_llm_subscription,
|
||||
max_monthly_spend_in_cents: billing_preferences
|
||||
.as_ref()
|
||||
.map_or(DEFAULT_MAX_MONTHLY_SPEND.0, |preferences| {
|
||||
preferences.max_monthly_llm_usage_spending_in_cents as u32
|
||||
}),
|
||||
custom_llm_monthly_allowance_in_cents: user
|
||||
.custom_llm_monthly_allowance_in_cents
|
||||
.map(|allowance| allowance as u32),
|
||||
use_new_billing: feature_flags.iter().any(|flag| flag == "new-billing"),
|
||||
use_llm_request_queue: feature_flags.iter().any(|flag| flag == "llm-request-queue"),
|
||||
plan,
|
||||
plan: if is_staff {
|
||||
Plan::ZedPro
|
||||
} else {
|
||||
subscription
|
||||
.as_ref()
|
||||
.and_then(|subscription| subscription.kind)
|
||||
.map_or(Plan::Free, |kind| match kind {
|
||||
SubscriptionKind::ZedFree => Plan::Free,
|
||||
SubscriptionKind::ZedPro => Plan::ZedPro,
|
||||
SubscriptionKind::ZedProTrial => Plan::ZedProTrial,
|
||||
})
|
||||
},
|
||||
has_extended_trial: feature_flags
|
||||
.iter()
|
||||
.any(|flag| flag == AGENT_EXTENDED_TRIAL_FEATURE_FLAG),
|
||||
subscription_period,
|
||||
subscription_period: billing_subscription::Model::current_period(
|
||||
subscription,
|
||||
is_staff,
|
||||
)
|
||||
.map(|(start, end)| (start.naive_utc(), end.naive_utc())),
|
||||
enable_model_request_overages: billing_preferences
|
||||
.as_ref()
|
||||
.map_or(false, |preferences| {
|
||||
@@ -132,6 +155,12 @@ impl LlmTokenClaims {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn free_tier_monthly_spending_limit(&self) -> Cents {
|
||||
self.custom_llm_monthly_allowance_in_cents
|
||||
.map(Cents)
|
||||
.unwrap_or(FREE_TIER_MONTHLY_SPENDING_LIMIT)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
|
||||
@@ -8,7 +8,9 @@ use axum::{
|
||||
};
|
||||
|
||||
use collab::api::CloudflareIpCountryHeader;
|
||||
use collab::api::billing::sync_llm_request_usage_with_stripe_periodically;
|
||||
use collab::api::billing::{
|
||||
sync_llm_request_usage_with_stripe_periodically, sync_llm_token_usage_with_stripe_periodically,
|
||||
};
|
||||
use collab::llm::db::LlmDatabase;
|
||||
use collab::migrations::run_database_migrations;
|
||||
use collab::user_backfiller::spawn_user_backfiller;
|
||||
@@ -36,7 +38,6 @@ use util::{ResultExt as _, maybe};
|
||||
const VERSION: &str = env!("CARGO_PKG_VERSION");
|
||||
const REVISION: Option<&'static str> = option_env!("GITHUB_SHA");
|
||||
|
||||
#[expect(clippy::result_large_err)]
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
if let Err(error) = env::load_dotenv() {
|
||||
@@ -154,6 +155,7 @@ async fn main() -> Result<()> {
|
||||
if let Some(mut llm_db) = llm_db {
|
||||
llm_db.initialize().await?;
|
||||
sync_llm_request_usage_with_stripe_periodically(state.clone());
|
||||
sync_llm_token_usage_with_stripe_periodically(state.clone());
|
||||
}
|
||||
|
||||
app = app
|
||||
|
||||
@@ -2,7 +2,6 @@ mod connection_pool;
|
||||
|
||||
use crate::api::{CloudflareIpCountryHeader, SystemIdHeader};
|
||||
use crate::db::billing_subscription::SubscriptionKind;
|
||||
use crate::llm::db::LlmDatabase;
|
||||
use crate::llm::{AGENT_EXTENDED_TRIAL_FEATURE_FLAG, LlmTokenClaims};
|
||||
use crate::{
|
||||
AppState, Error, Result, auth,
|
||||
@@ -68,7 +67,7 @@ use std::{
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use time::OffsetDateTime;
|
||||
use tokio::sync::{Semaphore, watch};
|
||||
use tokio::sync::{MutexGuard, Semaphore, watch};
|
||||
use tower::ServiceBuilder;
|
||||
use tracing::{
|
||||
Instrument,
|
||||
@@ -167,6 +166,42 @@ impl Session {
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn has_llm_subscription(
|
||||
&self,
|
||||
db: &MutexGuard<'_, DbHandle>,
|
||||
) -> anyhow::Result<bool> {
|
||||
if self.is_staff() {
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
let user_id = self.user_id();
|
||||
|
||||
Ok(db.has_active_billing_subscription(user_id).await?)
|
||||
}
|
||||
|
||||
pub async fn current_plan(&self, db: &MutexGuard<'_, DbHandle>) -> anyhow::Result<proto::Plan> {
|
||||
if self.is_staff() {
|
||||
return Ok(proto::Plan::ZedPro);
|
||||
}
|
||||
|
||||
let user_id = self.user_id();
|
||||
|
||||
let subscription = db.get_active_billing_subscription(user_id).await?;
|
||||
let subscription_kind = subscription.and_then(|subscription| subscription.kind);
|
||||
|
||||
let plan = if let Some(subscription_kind) = subscription_kind {
|
||||
match subscription_kind {
|
||||
SubscriptionKind::ZedPro => proto::Plan::ZedPro,
|
||||
SubscriptionKind::ZedProTrial => proto::Plan::ZedProTrial,
|
||||
SubscriptionKind::ZedFree => proto::Plan::Free,
|
||||
}
|
||||
} else {
|
||||
proto::Plan::Free
|
||||
};
|
||||
|
||||
Ok(plan)
|
||||
}
|
||||
|
||||
fn user_id(&self) -> UserId {
|
||||
match &self.principal {
|
||||
Principal::User(user) => user.id,
|
||||
@@ -931,32 +966,6 @@ impl Server {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn update_plan_for_user(self: &Arc<Self>, user_id: UserId) -> Result<()> {
|
||||
let user = self
|
||||
.app_state
|
||||
.db
|
||||
.get_user_by_id(user_id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
|
||||
let update_user_plan = make_update_user_plan_message(
|
||||
&self.app_state.db,
|
||||
self.app_state.llm_db.clone(),
|
||||
user_id,
|
||||
user.admin,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let pool = self.connection_pool.lock();
|
||||
for connection_id in pool.user_connection_ids(user_id) {
|
||||
self.peer
|
||||
.send(connection_id, update_user_plan.clone())
|
||||
.trace_err();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn refresh_llm_tokens_for_user(self: &Arc<Self>, user_id: UserId) {
|
||||
let pool = self.connection_pool.lock();
|
||||
for connection_id in pool.user_connection_ids(user_id) {
|
||||
@@ -2692,43 +2701,21 @@ fn should_auto_subscribe_to_channels(version: ZedVersion) -> bool {
|
||||
version.0.minor() < 139
|
||||
}
|
||||
|
||||
async fn current_plan(db: &Arc<Database>, user_id: UserId, is_staff: bool) -> Result<proto::Plan> {
|
||||
if is_staff {
|
||||
return Ok(proto::Plan::ZedPro);
|
||||
}
|
||||
async fn update_user_plan(user_id: UserId, session: &Session) -> Result<()> {
|
||||
let db = session.db().await;
|
||||
|
||||
let subscription = db.get_active_billing_subscription(user_id).await?;
|
||||
let subscription_kind = subscription.and_then(|subscription| subscription.kind);
|
||||
|
||||
let plan = if let Some(subscription_kind) = subscription_kind {
|
||||
match subscription_kind {
|
||||
SubscriptionKind::ZedPro => proto::Plan::ZedPro,
|
||||
SubscriptionKind::ZedProTrial => proto::Plan::ZedProTrial,
|
||||
SubscriptionKind::ZedFree => proto::Plan::Free,
|
||||
}
|
||||
} else {
|
||||
proto::Plan::Free
|
||||
};
|
||||
|
||||
Ok(plan)
|
||||
}
|
||||
|
||||
async fn make_update_user_plan_message(
|
||||
db: &Arc<Database>,
|
||||
llm_db: Option<Arc<LlmDatabase>>,
|
||||
user_id: UserId,
|
||||
is_staff: bool,
|
||||
) -> Result<proto::UpdateUserPlan> {
|
||||
let feature_flags = db.get_user_flags(user_id).await?;
|
||||
let plan = current_plan(db, user_id, is_staff).await?;
|
||||
let plan = session.current_plan(&db).await?;
|
||||
let billing_customer = db.get_billing_customer_by_user_id(user_id).await?;
|
||||
let billing_preferences = db.get_billing_preferences(user_id).await?;
|
||||
|
||||
let (subscription_period, usage) = if let Some(llm_db) = llm_db {
|
||||
let (subscription_period, usage) = if let Some(llm_db) = session.app_state.llm_db.clone() {
|
||||
let subscription = db.get_active_billing_subscription(user_id).await?;
|
||||
|
||||
let subscription_period =
|
||||
crate::db::billing_subscription::Model::current_period(subscription, is_staff);
|
||||
let subscription_period = crate::db::billing_subscription::Model::current_period(
|
||||
subscription,
|
||||
session.is_staff(),
|
||||
);
|
||||
|
||||
let usage = if let Some((period_start_at, period_end_at)) = subscription_period {
|
||||
llm_db
|
||||
@@ -2743,92 +2730,92 @@ async fn make_update_user_plan_message(
|
||||
(None, None)
|
||||
};
|
||||
|
||||
Ok(proto::UpdateUserPlan {
|
||||
plan: plan.into(),
|
||||
trial_started_at: billing_customer
|
||||
.and_then(|billing_customer| billing_customer.trial_started_at)
|
||||
.map(|trial_started_at| trial_started_at.and_utc().timestamp() as u64),
|
||||
is_usage_based_billing_enabled: if is_staff {
|
||||
Some(true)
|
||||
} else {
|
||||
billing_preferences.map(|preferences| preferences.model_request_overages_enabled)
|
||||
},
|
||||
subscription_period: subscription_period.map(|(started_at, ended_at)| {
|
||||
proto::SubscriptionPeriod {
|
||||
started_at: started_at.timestamp() as u64,
|
||||
ended_at: ended_at.timestamp() as u64,
|
||||
}
|
||||
}),
|
||||
usage: usage.map(|usage| {
|
||||
let plan = match plan {
|
||||
proto::Plan::Free => zed_llm_client::Plan::ZedFree,
|
||||
proto::Plan::ZedPro => zed_llm_client::Plan::ZedPro,
|
||||
proto::Plan::ZedProTrial => zed_llm_client::Plan::ZedProTrial,
|
||||
};
|
||||
|
||||
let model_requests_limit = match plan.model_requests_limit() {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
let limit = if plan == zed_llm_client::Plan::ZedProTrial
|
||||
&& feature_flags
|
||||
.iter()
|
||||
.any(|flag| flag == AGENT_EXTENDED_TRIAL_FEATURE_FLAG)
|
||||
{
|
||||
1_000
|
||||
} else {
|
||||
limit
|
||||
};
|
||||
|
||||
zed_llm_client::UsageLimit::Limited(limit)
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => zed_llm_client::UsageLimit::Unlimited,
|
||||
};
|
||||
|
||||
proto::SubscriptionUsage {
|
||||
model_requests_usage_amount: usage.model_requests as u32,
|
||||
model_requests_usage_limit: Some(proto::UsageLimit {
|
||||
variant: Some(match model_requests_limit {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
proto::usage_limit::Variant::Limited(proto::usage_limit::Limited {
|
||||
limit: limit as u32,
|
||||
})
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => {
|
||||
proto::usage_limit::Variant::Unlimited(proto::usage_limit::Unlimited {})
|
||||
}
|
||||
}),
|
||||
}),
|
||||
edit_predictions_usage_amount: usage.edit_predictions as u32,
|
||||
edit_predictions_usage_limit: Some(proto::UsageLimit {
|
||||
variant: Some(match plan.edit_predictions_limit() {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
proto::usage_limit::Variant::Limited(proto::usage_limit::Limited {
|
||||
limit: limit as u32,
|
||||
})
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => {
|
||||
proto::usage_limit::Variant::Unlimited(proto::usage_limit::Unlimited {})
|
||||
}
|
||||
}),
|
||||
}),
|
||||
}
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
async fn update_user_plan(user_id: UserId, session: &Session) -> Result<()> {
|
||||
let db = session.db().await;
|
||||
|
||||
let update_user_plan = make_update_user_plan_message(
|
||||
&db.0,
|
||||
session.app_state.llm_db.clone(),
|
||||
user_id,
|
||||
session.is_staff(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
session
|
||||
.peer
|
||||
.send(session.connection_id, update_user_plan)
|
||||
.send(
|
||||
session.connection_id,
|
||||
proto::UpdateUserPlan {
|
||||
plan: plan.into(),
|
||||
trial_started_at: billing_customer
|
||||
.and_then(|billing_customer| billing_customer.trial_started_at)
|
||||
.map(|trial_started_at| trial_started_at.and_utc().timestamp() as u64),
|
||||
is_usage_based_billing_enabled: if session.is_staff() {
|
||||
Some(true)
|
||||
} else {
|
||||
billing_preferences
|
||||
.map(|preferences| preferences.model_request_overages_enabled)
|
||||
},
|
||||
subscription_period: subscription_period.map(|(started_at, ended_at)| {
|
||||
proto::SubscriptionPeriod {
|
||||
started_at: started_at.timestamp() as u64,
|
||||
ended_at: ended_at.timestamp() as u64,
|
||||
}
|
||||
}),
|
||||
usage: usage.map(|usage| {
|
||||
let plan = match plan {
|
||||
proto::Plan::Free => zed_llm_client::Plan::Free,
|
||||
proto::Plan::ZedPro => zed_llm_client::Plan::ZedPro,
|
||||
proto::Plan::ZedProTrial => zed_llm_client::Plan::ZedProTrial,
|
||||
};
|
||||
|
||||
let model_requests_limit = match plan.model_requests_limit() {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
let limit = if plan == zed_llm_client::Plan::ZedProTrial
|
||||
&& feature_flags
|
||||
.iter()
|
||||
.any(|flag| flag == AGENT_EXTENDED_TRIAL_FEATURE_FLAG)
|
||||
{
|
||||
1_000
|
||||
} else {
|
||||
limit
|
||||
};
|
||||
|
||||
zed_llm_client::UsageLimit::Limited(limit)
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => {
|
||||
zed_llm_client::UsageLimit::Unlimited
|
||||
}
|
||||
};
|
||||
|
||||
proto::SubscriptionUsage {
|
||||
model_requests_usage_amount: usage.model_requests as u32,
|
||||
model_requests_usage_limit: Some(proto::UsageLimit {
|
||||
variant: Some(match model_requests_limit {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
proto::usage_limit::Variant::Limited(
|
||||
proto::usage_limit::Limited {
|
||||
limit: limit as u32,
|
||||
},
|
||||
)
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => {
|
||||
proto::usage_limit::Variant::Unlimited(
|
||||
proto::usage_limit::Unlimited {},
|
||||
)
|
||||
}
|
||||
}),
|
||||
}),
|
||||
edit_predictions_usage_amount: usage.edit_predictions as u32,
|
||||
edit_predictions_usage_limit: Some(proto::UsageLimit {
|
||||
variant: Some(match plan.edit_predictions_limit() {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
proto::usage_limit::Variant::Limited(
|
||||
proto::usage_limit::Limited {
|
||||
limit: limit as u32,
|
||||
},
|
||||
)
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => {
|
||||
proto::usage_limit::Variant::Unlimited(
|
||||
proto::usage_limit::Unlimited {},
|
||||
)
|
||||
}
|
||||
}),
|
||||
}),
|
||||
}
|
||||
}),
|
||||
},
|
||||
)
|
||||
.trace_err();
|
||||
|
||||
Ok(())
|
||||
@@ -4013,6 +4000,11 @@ async fn get_llm_api_token(
|
||||
let db = session.db().await;
|
||||
|
||||
let flags = db.get_user_flags(session.user_id()).await?;
|
||||
let has_language_models_feature_flag = flags.iter().any(|flag| flag == "language-models");
|
||||
|
||||
if !session.is_staff() && !has_language_models_feature_flag {
|
||||
Err(anyhow!("permission denied"))?
|
||||
}
|
||||
|
||||
let user_id = session.user_id();
|
||||
let user = db
|
||||
@@ -4024,6 +4016,7 @@ async fn get_llm_api_token(
|
||||
Err(anyhow!("terms of service not accepted"))?
|
||||
}
|
||||
|
||||
let has_legacy_llm_subscription = session.has_llm_subscription(&db).await?;
|
||||
let billing_subscription = db.get_active_billing_subscription(user.id).await?;
|
||||
let billing_preferences = db.get_billing_preferences(user.id).await?;
|
||||
|
||||
@@ -4032,6 +4025,7 @@ async fn get_llm_api_token(
|
||||
session.is_staff(),
|
||||
billing_preferences,
|
||||
&flags,
|
||||
has_legacy_llm_subscription,
|
||||
billing_subscription,
|
||||
session.system_id.clone(),
|
||||
&session.app_state.config,
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::Result;
|
||||
use crate::llm::AGENT_EXTENDED_TRIAL_FEATURE_FLAG;
|
||||
use crate::llm::{self, AGENT_EXTENDED_TRIAL_FEATURE_FLAG};
|
||||
use crate::{Cents, Result};
|
||||
use anyhow::{Context as _, anyhow};
|
||||
use chrono::Utc;
|
||||
use chrono::{Datelike, Utc};
|
||||
use collections::HashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use stripe::PriceId;
|
||||
@@ -22,6 +22,18 @@ struct StripeBillingState {
|
||||
prices_by_lookup_key: HashMap<String, stripe::Price>,
|
||||
}
|
||||
|
||||
pub struct StripeModelTokenPrices {
|
||||
input_tokens_price: StripeBillingPrice,
|
||||
input_cache_creation_tokens_price: StripeBillingPrice,
|
||||
input_cache_read_tokens_price: StripeBillingPrice,
|
||||
output_tokens_price: StripeBillingPrice,
|
||||
}
|
||||
|
||||
struct StripeBillingPrice {
|
||||
id: stripe::PriceId,
|
||||
meter_event_name: String,
|
||||
}
|
||||
|
||||
impl StripeBilling {
|
||||
pub fn new(client: Arc<stripe::Client>) -> Self {
|
||||
Self {
|
||||
@@ -97,6 +109,142 @@ impl StripeBilling {
|
||||
.ok_or_else(|| crate::Error::Internal(anyhow!("no price found for {lookup_key:?}")))
|
||||
}
|
||||
|
||||
pub async fn register_model_for_token_based_usage(
|
||||
&self,
|
||||
model: &llm::db::model::Model,
|
||||
) -> Result<StripeModelTokenPrices> {
|
||||
let input_tokens_price = self
|
||||
.get_or_insert_token_price(
|
||||
&format!("model_{}/input_tokens", model.id),
|
||||
&format!("{} (Input Tokens)", model.name),
|
||||
Cents::new(model.price_per_million_input_tokens as u32),
|
||||
)
|
||||
.await?;
|
||||
let input_cache_creation_tokens_price = self
|
||||
.get_or_insert_token_price(
|
||||
&format!("model_{}/input_cache_creation_tokens", model.id),
|
||||
&format!("{} (Input Cache Creation Tokens)", model.name),
|
||||
Cents::new(model.price_per_million_cache_creation_input_tokens as u32),
|
||||
)
|
||||
.await?;
|
||||
let input_cache_read_tokens_price = self
|
||||
.get_or_insert_token_price(
|
||||
&format!("model_{}/input_cache_read_tokens", model.id),
|
||||
&format!("{} (Input Cache Read Tokens)", model.name),
|
||||
Cents::new(model.price_per_million_cache_read_input_tokens as u32),
|
||||
)
|
||||
.await?;
|
||||
let output_tokens_price = self
|
||||
.get_or_insert_token_price(
|
||||
&format!("model_{}/output_tokens", model.id),
|
||||
&format!("{} (Output Tokens)", model.name),
|
||||
Cents::new(model.price_per_million_output_tokens as u32),
|
||||
)
|
||||
.await?;
|
||||
Ok(StripeModelTokenPrices {
|
||||
input_tokens_price,
|
||||
input_cache_creation_tokens_price,
|
||||
input_cache_read_tokens_price,
|
||||
output_tokens_price,
|
||||
})
|
||||
}
|
||||
|
||||
async fn get_or_insert_token_price(
|
||||
&self,
|
||||
meter_event_name: &str,
|
||||
price_description: &str,
|
||||
price_per_million_tokens: Cents,
|
||||
) -> Result<StripeBillingPrice> {
|
||||
// Fast code path when the meter and the price already exist.
|
||||
{
|
||||
let state = self.state.read().await;
|
||||
if let Some(meter) = state.meters_by_event_name.get(meter_event_name) {
|
||||
if let Some(price_id) = state.price_ids_by_meter_id.get(&meter.id) {
|
||||
return Ok(StripeBillingPrice {
|
||||
id: price_id.clone(),
|
||||
meter_event_name: meter_event_name.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut state = self.state.write().await;
|
||||
let meter = if let Some(meter) = state.meters_by_event_name.get(meter_event_name) {
|
||||
meter.clone()
|
||||
} else {
|
||||
let meter = StripeMeter::create(
|
||||
&self.client,
|
||||
StripeCreateMeterParams {
|
||||
default_aggregation: DefaultAggregation { formula: "sum" },
|
||||
display_name: price_description.to_string(),
|
||||
event_name: meter_event_name,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
state
|
||||
.meters_by_event_name
|
||||
.insert(meter_event_name.to_string(), meter.clone());
|
||||
meter
|
||||
};
|
||||
|
||||
let price_id = if let Some(price_id) = state.price_ids_by_meter_id.get(&meter.id) {
|
||||
price_id.clone()
|
||||
} else {
|
||||
let price = stripe::Price::create(
|
||||
&self.client,
|
||||
stripe::CreatePrice {
|
||||
active: Some(true),
|
||||
billing_scheme: Some(stripe::PriceBillingScheme::PerUnit),
|
||||
currency: stripe::Currency::USD,
|
||||
currency_options: None,
|
||||
custom_unit_amount: None,
|
||||
expand: &[],
|
||||
lookup_key: None,
|
||||
metadata: None,
|
||||
nickname: None,
|
||||
product: None,
|
||||
product_data: Some(stripe::CreatePriceProductData {
|
||||
id: None,
|
||||
active: Some(true),
|
||||
metadata: None,
|
||||
name: price_description.to_string(),
|
||||
statement_descriptor: None,
|
||||
tax_code: None,
|
||||
unit_label: None,
|
||||
}),
|
||||
recurring: Some(stripe::CreatePriceRecurring {
|
||||
aggregate_usage: None,
|
||||
interval: stripe::CreatePriceRecurringInterval::Month,
|
||||
interval_count: None,
|
||||
trial_period_days: None,
|
||||
usage_type: Some(stripe::CreatePriceRecurringUsageType::Metered),
|
||||
meter: Some(meter.id.clone()),
|
||||
}),
|
||||
tax_behavior: None,
|
||||
tiers: None,
|
||||
tiers_mode: None,
|
||||
transfer_lookup_key: None,
|
||||
transform_quantity: None,
|
||||
unit_amount: None,
|
||||
unit_amount_decimal: Some(&format!(
|
||||
"{:.12}",
|
||||
price_per_million_tokens.0 as f64 / 1_000_000f64
|
||||
)),
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
state
|
||||
.price_ids_by_meter_id
|
||||
.insert(meter.id, price.id.clone());
|
||||
price.id
|
||||
};
|
||||
|
||||
Ok(StripeBillingPrice {
|
||||
id: price_id,
|
||||
meter_event_name: meter_event_name.to_string(),
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn subscribe_to_price(
|
||||
&self,
|
||||
subscription_id: &stripe::SubscriptionId,
|
||||
@@ -135,6 +283,142 @@ impl StripeBilling {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn subscribe_to_model(
|
||||
&self,
|
||||
subscription_id: &stripe::SubscriptionId,
|
||||
model: &StripeModelTokenPrices,
|
||||
) -> Result<()> {
|
||||
let subscription =
|
||||
stripe::Subscription::retrieve(&self.client, &subscription_id, &[]).await?;
|
||||
|
||||
let mut items = Vec::new();
|
||||
|
||||
if !subscription_contains_price(&subscription, &model.input_tokens_price.id) {
|
||||
items.push(stripe::UpdateSubscriptionItems {
|
||||
price: Some(model.input_tokens_price.id.to_string()),
|
||||
..Default::default()
|
||||
});
|
||||
}
|
||||
|
||||
if !subscription_contains_price(&subscription, &model.input_cache_creation_tokens_price.id)
|
||||
{
|
||||
items.push(stripe::UpdateSubscriptionItems {
|
||||
price: Some(model.input_cache_creation_tokens_price.id.to_string()),
|
||||
..Default::default()
|
||||
});
|
||||
}
|
||||
|
||||
if !subscription_contains_price(&subscription, &model.input_cache_read_tokens_price.id) {
|
||||
items.push(stripe::UpdateSubscriptionItems {
|
||||
price: Some(model.input_cache_read_tokens_price.id.to_string()),
|
||||
..Default::default()
|
||||
});
|
||||
}
|
||||
|
||||
if !subscription_contains_price(&subscription, &model.output_tokens_price.id) {
|
||||
items.push(stripe::UpdateSubscriptionItems {
|
||||
price: Some(model.output_tokens_price.id.to_string()),
|
||||
..Default::default()
|
||||
});
|
||||
}
|
||||
|
||||
if !items.is_empty() {
|
||||
items.extend(subscription.items.data.iter().map(|item| {
|
||||
stripe::UpdateSubscriptionItems {
|
||||
id: Some(item.id.to_string()),
|
||||
..Default::default()
|
||||
}
|
||||
}));
|
||||
|
||||
stripe::Subscription::update(
|
||||
&self.client,
|
||||
subscription_id,
|
||||
stripe::UpdateSubscription {
|
||||
items: Some(items),
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn bill_model_token_usage(
|
||||
&self,
|
||||
customer_id: &stripe::CustomerId,
|
||||
model: &StripeModelTokenPrices,
|
||||
event: &llm::db::billing_event::Model,
|
||||
) -> Result<()> {
|
||||
let timestamp = Utc::now().timestamp();
|
||||
|
||||
if event.input_tokens > 0 {
|
||||
StripeMeterEvent::create(
|
||||
&self.client,
|
||||
StripeCreateMeterEventParams {
|
||||
identifier: &format!("input_tokens/{}", event.idempotency_key),
|
||||
event_name: &model.input_tokens_price.meter_event_name,
|
||||
payload: StripeCreateMeterEventPayload {
|
||||
value: event.input_tokens as u64,
|
||||
stripe_customer_id: customer_id,
|
||||
},
|
||||
timestamp: Some(timestamp),
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if event.input_cache_creation_tokens > 0 {
|
||||
StripeMeterEvent::create(
|
||||
&self.client,
|
||||
StripeCreateMeterEventParams {
|
||||
identifier: &format!("input_cache_creation_tokens/{}", event.idempotency_key),
|
||||
event_name: &model.input_cache_creation_tokens_price.meter_event_name,
|
||||
payload: StripeCreateMeterEventPayload {
|
||||
value: event.input_cache_creation_tokens as u64,
|
||||
stripe_customer_id: customer_id,
|
||||
},
|
||||
timestamp: Some(timestamp),
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if event.input_cache_read_tokens > 0 {
|
||||
StripeMeterEvent::create(
|
||||
&self.client,
|
||||
StripeCreateMeterEventParams {
|
||||
identifier: &format!("input_cache_read_tokens/{}", event.idempotency_key),
|
||||
event_name: &model.input_cache_read_tokens_price.meter_event_name,
|
||||
payload: StripeCreateMeterEventPayload {
|
||||
value: event.input_cache_read_tokens as u64,
|
||||
stripe_customer_id: customer_id,
|
||||
},
|
||||
timestamp: Some(timestamp),
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
if event.output_tokens > 0 {
|
||||
StripeMeterEvent::create(
|
||||
&self.client,
|
||||
StripeCreateMeterEventParams {
|
||||
identifier: &format!("output_tokens/{}", event.idempotency_key),
|
||||
event_name: &model.output_tokens_price.meter_event_name,
|
||||
payload: StripeCreateMeterEventPayload {
|
||||
value: event.output_tokens as u64,
|
||||
stripe_customer_id: customer_id,
|
||||
},
|
||||
timestamp: Some(timestamp),
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn bill_model_request_usage(
|
||||
&self,
|
||||
customer_id: &stripe::CustomerId,
|
||||
@@ -161,6 +445,47 @@ impl StripeBilling {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn checkout(
|
||||
&self,
|
||||
customer_id: stripe::CustomerId,
|
||||
github_login: &str,
|
||||
model: &StripeModelTokenPrices,
|
||||
success_url: &str,
|
||||
) -> Result<String> {
|
||||
let first_of_next_month = Utc::now()
|
||||
.checked_add_months(chrono::Months::new(1))
|
||||
.unwrap()
|
||||
.with_day(1)
|
||||
.unwrap();
|
||||
|
||||
let mut params = stripe::CreateCheckoutSession::new();
|
||||
params.mode = Some(stripe::CheckoutSessionMode::Subscription);
|
||||
params.customer = Some(customer_id);
|
||||
params.client_reference_id = Some(github_login);
|
||||
params.subscription_data = Some(stripe::CreateCheckoutSessionSubscriptionData {
|
||||
billing_cycle_anchor: Some(first_of_next_month.timestamp()),
|
||||
..Default::default()
|
||||
});
|
||||
params.line_items = Some(
|
||||
[
|
||||
&model.input_tokens_price.id,
|
||||
&model.input_cache_creation_tokens_price.id,
|
||||
&model.input_cache_read_tokens_price.id,
|
||||
&model.output_tokens_price.id,
|
||||
]
|
||||
.into_iter()
|
||||
.map(|price_id| stripe::CreateCheckoutSessionLineItems {
|
||||
price: Some(price_id.to_string()),
|
||||
..Default::default()
|
||||
})
|
||||
.collect(),
|
||||
);
|
||||
params.success_url = Some(success_url);
|
||||
|
||||
let session = stripe::CheckoutSession::create(&self.client, params).await?;
|
||||
Ok(session.url.context("no checkout session URL")?)
|
||||
}
|
||||
|
||||
pub async fn checkout_with_zed_pro(
|
||||
&self,
|
||||
customer_id: stripe::CustomerId,
|
||||
@@ -248,8 +573,6 @@ impl StripeBilling {
|
||||
|
||||
let mut params = stripe::CreateCheckoutSession::new();
|
||||
params.mode = Some(stripe::CheckoutSessionMode::Subscription);
|
||||
params.payment_method_collection =
|
||||
Some(stripe::CheckoutSessionPaymentMethodCollection::IfRequired);
|
||||
params.customer = Some(customer_id);
|
||||
params.client_reference_id = Some(github_login);
|
||||
params.line_items = Some(vec![stripe::CreateCheckoutSessionLineItems {
|
||||
@@ -264,6 +587,18 @@ impl StripeBilling {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct DefaultAggregation {
|
||||
formula: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct StripeCreateMeterParams<'a> {
|
||||
default_aggregation: DefaultAggregation,
|
||||
display_name: String,
|
||||
event_name: &'a str,
|
||||
}
|
||||
|
||||
#[derive(Clone, Deserialize)]
|
||||
struct StripeMeter {
|
||||
id: String,
|
||||
@@ -271,6 +606,13 @@ struct StripeMeter {
|
||||
}
|
||||
|
||||
impl StripeMeter {
|
||||
pub fn create(
|
||||
client: &stripe::Client,
|
||||
params: StripeCreateMeterParams,
|
||||
) -> stripe::Response<Self> {
|
||||
client.post_form("/billing/meters", params)
|
||||
}
|
||||
|
||||
pub fn list(client: &stripe::Client) -> stripe::Response<stripe::List<Self>> {
|
||||
#[derive(Serialize)]
|
||||
struct Params {
|
||||
|
||||
@@ -36,8 +36,8 @@ fn room_participants(room: &Entity<Room>, cx: &mut TestAppContext) -> RoomPartic
|
||||
room.read_with(cx, |room, _| {
|
||||
let mut remote = room
|
||||
.remote_participants()
|
||||
.values()
|
||||
.map(|participant| participant.user.github_login.clone())
|
||||
.iter()
|
||||
.map(|(_, participant)| participant.user.github_login.clone())
|
||||
.collect::<Vec<_>>();
|
||||
let mut pending = room
|
||||
.pending_participants()
|
||||
|
||||
@@ -1740,7 +1740,6 @@ async fn test_mutual_editor_inlay_hint_cache_update(
|
||||
fake_language_server
|
||||
.request::<lsp::request::InlayHintRefreshRequest>(())
|
||||
.await
|
||||
.into_response()
|
||||
.expect("inlay refresh request failed");
|
||||
|
||||
executor.run_until_parked();
|
||||
@@ -1931,7 +1930,6 @@ async fn test_inlay_hint_refresh_is_forwarded(
|
||||
fake_language_server
|
||||
.request::<lsp::request::InlayHintRefreshRequest>(())
|
||||
.await
|
||||
.into_response()
|
||||
.expect("inlay refresh request failed");
|
||||
executor.run_until_parked();
|
||||
editor_a.update(cx_a, |editor, _| {
|
||||
|
||||
@@ -1253,7 +1253,6 @@ async fn test_calls_on_multiple_connections(
|
||||
client_b1
|
||||
.authenticate_and_connect(false, &cx_b1.to_async())
|
||||
.await
|
||||
.into_response()
|
||||
.unwrap();
|
||||
|
||||
// User B hangs up, and user A calls them again.
|
||||
@@ -1634,7 +1633,6 @@ async fn test_project_reconnect(
|
||||
client_a
|
||||
.authenticate_and_connect(false, &cx_a.to_async())
|
||||
.await
|
||||
.into_response()
|
||||
.unwrap();
|
||||
executor.run_until_parked();
|
||||
|
||||
@@ -1763,7 +1761,6 @@ async fn test_project_reconnect(
|
||||
client_b
|
||||
.authenticate_and_connect(false, &cx_b.to_async())
|
||||
.await
|
||||
.into_response()
|
||||
.unwrap();
|
||||
executor.run_until_parked();
|
||||
|
||||
@@ -4320,7 +4317,6 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
|
||||
token: lsp::NumberOrString::String("the-disk-based-token".to_string()),
|
||||
})
|
||||
.await
|
||||
.into_response()
|
||||
.unwrap();
|
||||
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
token: lsp::NumberOrString::String("the-disk-based-token".to_string()),
|
||||
@@ -5703,7 +5699,6 @@ async fn test_contacts(
|
||||
client_c
|
||||
.authenticate_and_connect(false, &cx_c.to_async())
|
||||
.await
|
||||
.into_response()
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
@@ -6234,7 +6229,6 @@ async fn test_contact_requests(
|
||||
client
|
||||
.authenticate_and_connect(false, &cx.to_async())
|
||||
.await
|
||||
.into_response()
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -313,7 +313,6 @@ impl TestServer {
|
||||
client
|
||||
.authenticate_and_connect(false, &cx.to_async())
|
||||
.await
|
||||
.into_response()
|
||||
.unwrap();
|
||||
|
||||
let client = TestClient {
|
||||
|
||||
@@ -42,7 +42,6 @@ futures.workspace = true
|
||||
fuzzy.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
menu.workspace = true
|
||||
notifications.workspace = true
|
||||
picker.workspace = true
|
||||
|
||||
@@ -1059,7 +1059,7 @@ impl Render for ChatPanel {
|
||||
.child(
|
||||
Label::new(format!(
|
||||
"@{}",
|
||||
user_being_replied_to.github_login
|
||||
user_being_replied_to.github_login.clone()
|
||||
))
|
||||
.size(LabelSize::Small)
|
||||
.weight(FontWeight::BOLD),
|
||||
|
||||
@@ -378,27 +378,16 @@ impl CollabPanel {
|
||||
workspace: WeakEntity<Workspace>,
|
||||
mut cx: AsyncWindowContext,
|
||||
) -> anyhow::Result<Entity<Self>> {
|
||||
let serialized_panel = match workspace
|
||||
.read_with(&cx, |workspace, _| {
|
||||
CollabPanel::serialization_key(workspace)
|
||||
})
|
||||
.ok()
|
||||
let serialized_panel = cx
|
||||
.background_spawn(async move { KEY_VALUE_STORE.read_kvp(COLLABORATION_PANEL_KEY) })
|
||||
.await
|
||||
.map_err(|_| anyhow::anyhow!("Failed to read collaboration panel from key value store"))
|
||||
.log_err()
|
||||
.flatten()
|
||||
{
|
||||
Some(serialization_key) => cx
|
||||
.background_spawn(async move { KEY_VALUE_STORE.read_kvp(&serialization_key) })
|
||||
.await
|
||||
.map_err(|_| {
|
||||
anyhow::anyhow!("Failed to read collaboration panel from key value store")
|
||||
})
|
||||
.log_err()
|
||||
.flatten()
|
||||
.map(|panel| serde_json::from_str::<SerializedCollabPanel>(&panel))
|
||||
.transpose()
|
||||
.log_err()
|
||||
.flatten(),
|
||||
None => None,
|
||||
};
|
||||
.map(|panel| serde_json::from_str::<SerializedCollabPanel>(&panel))
|
||||
.transpose()
|
||||
.log_err()
|
||||
.flatten();
|
||||
|
||||
workspace.update_in(&mut cx, |workspace, window, cx| {
|
||||
let panel = CollabPanel::new(workspace, window, cx);
|
||||
@@ -418,30 +407,14 @@ impl CollabPanel {
|
||||
})
|
||||
}
|
||||
|
||||
fn serialization_key(workspace: &Workspace) -> Option<String> {
|
||||
workspace
|
||||
.database_id()
|
||||
.map(|id| i64::from(id).to_string())
|
||||
.or(workspace.session_id())
|
||||
.map(|id| format!("{}-{:?}", COLLABORATION_PANEL_KEY, id))
|
||||
}
|
||||
|
||||
fn serialize(&mut self, cx: &mut Context<Self>) {
|
||||
let Some(serialization_key) = self
|
||||
.workspace
|
||||
.update(cx, |workspace, _| CollabPanel::serialization_key(workspace))
|
||||
.ok()
|
||||
.flatten()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
let width = self.width;
|
||||
let collapsed_channels = self.collapsed_channels.clone();
|
||||
self.pending_serialization = cx.background_spawn(
|
||||
async move {
|
||||
KEY_VALUE_STORE
|
||||
.write_kvp(
|
||||
serialization_key,
|
||||
COLLABORATION_PANEL_KEY.into(),
|
||||
serde_json::to_string(&SerializedCollabPanel {
|
||||
width,
|
||||
collapsed_channels: Some(
|
||||
@@ -1490,9 +1463,7 @@ impl CollabPanel {
|
||||
}
|
||||
|
||||
fn cancel(&mut self, _: &Cancel, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if cx.stop_active_drag(window) {
|
||||
return;
|
||||
} else if self.take_editing_state(window, cx) {
|
||||
if self.take_editing_state(window, cx) {
|
||||
window.focus(&self.filter_editor.focus_handle(cx));
|
||||
} else if !self.reset_filter_editor_text(window, cx) {
|
||||
self.focus_handle.focus(window);
|
||||
@@ -2254,7 +2225,6 @@ impl CollabPanel {
|
||||
client
|
||||
.authenticate_and_connect(true, &cx)
|
||||
.await
|
||||
.into_response()
|
||||
.notify_async_err(cx);
|
||||
})
|
||||
.detach()
|
||||
@@ -3026,12 +2996,10 @@ impl Panel for CollabPanel {
|
||||
.unwrap_or_else(|| CollaborationPanelSettings::get_global(cx).default_width)
|
||||
}
|
||||
|
||||
fn set_size(&mut self, size: Option<Pixels>, window: &mut Window, cx: &mut Context<Self>) {
|
||||
fn set_size(&mut self, size: Option<Pixels>, _: &mut Window, cx: &mut Context<Self>) {
|
||||
self.width = size;
|
||||
self.serialize(cx);
|
||||
cx.notify();
|
||||
cx.defer_in(window, |this, _, cx| {
|
||||
this.serialize(cx);
|
||||
});
|
||||
}
|
||||
|
||||
fn icon(&self, _window: &Window, cx: &App) -> Option<ui::IconName> {
|
||||
|
||||
@@ -646,20 +646,10 @@ impl Render for NotificationPanel {
|
||||
let client = client.clone();
|
||||
window
|
||||
.spawn(cx, async move |cx| {
|
||||
match client
|
||||
client
|
||||
.authenticate_and_connect(true, &cx)
|
||||
.await
|
||||
{
|
||||
util::ConnectionResult::Timeout => {
|
||||
log::error!("Connection timeout");
|
||||
}
|
||||
util::ConnectionResult::ConnectionReset => {
|
||||
log::error!("Connection reset");
|
||||
}
|
||||
util::ConnectionResult::Result(r) => {
|
||||
r.log_err();
|
||||
}
|
||||
}
|
||||
.log_err()
|
||||
.await;
|
||||
})
|
||||
.detach()
|
||||
}
|
||||
|
||||
@@ -7,11 +7,11 @@ use crate::notifications::collab_notification::CollabNotification;
|
||||
pub struct CollabNotificationStory;
|
||||
|
||||
impl Render for CollabNotificationStory {
|
||||
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let window_container = |width, height| div().w(px(width)).h(px(height));
|
||||
|
||||
Story::container(cx)
|
||||
.child(Story::title_for::<CollabNotification>(cx))
|
||||
Story::container()
|
||||
.child(Story::title_for::<CollabNotification>())
|
||||
.child(
|
||||
StorySection::new().child(StoryItem::new(
|
||||
"Incoming Call Notification",
|
||||
|
||||
@@ -28,7 +28,6 @@ pub struct ChatPanelSettings {
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub struct ChatPanelSettingsContent {
|
||||
/// When to show the panel button in the status bar.
|
||||
///
|
||||
@@ -52,7 +51,6 @@ pub struct NotificationPanelSettings {
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub struct PanelSettingsContent {
|
||||
/// Whether to show the panel button in the status bar.
|
||||
///
|
||||
@@ -69,7 +67,6 @@ pub struct PanelSettingsContent {
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub struct MessageEditorSettings {
|
||||
/// Whether to automatically replace emoji shortcodes with emoji characters.
|
||||
/// For example: typing `:wave:` gets replaced with `👋`.
|
||||
|
||||
@@ -14,7 +14,7 @@ path = "src/component.rs"
|
||||
[dependencies]
|
||||
collections.workspace = true
|
||||
gpui.workspace = true
|
||||
inventory.workspace = true
|
||||
linkme.workspace = true
|
||||
parking_lot.workspace = true
|
||||
strum.workspace = true
|
||||
theme.workspace = true
|
||||
|
||||
@@ -9,12 +9,13 @@
|
||||
|
||||
mod component_layout;
|
||||
|
||||
use std::sync::LazyLock;
|
||||
|
||||
pub use component_layout::*;
|
||||
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use collections::HashMap;
|
||||
use gpui::{AnyElement, App, SharedString, Window};
|
||||
use linkme::distributed_slice;
|
||||
use parking_lot::RwLock;
|
||||
use strum::{Display, EnumString};
|
||||
|
||||
@@ -23,27 +24,12 @@ pub fn components() -> ComponentRegistry {
|
||||
}
|
||||
|
||||
pub fn init() {
|
||||
for f in inventory::iter::<ComponentFn>() {
|
||||
(f.0)();
|
||||
let component_fns: Vec<_> = __ALL_COMPONENTS.iter().cloned().collect();
|
||||
for f in component_fns {
|
||||
f();
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ComponentFn(fn());
|
||||
|
||||
impl ComponentFn {
|
||||
pub const fn new(f: fn()) -> Self {
|
||||
Self(f)
|
||||
}
|
||||
}
|
||||
|
||||
inventory::collect!(ComponentFn);
|
||||
|
||||
/// Private internals for macros.
|
||||
#[doc(hidden)]
|
||||
pub mod __private {
|
||||
pub use inventory;
|
||||
}
|
||||
|
||||
pub fn register_component<T: Component>() {
|
||||
let id = T::id();
|
||||
let metadata = ComponentMetadata {
|
||||
@@ -60,6 +46,9 @@ pub fn register_component<T: Component>() {
|
||||
data.components.insert(id, metadata);
|
||||
}
|
||||
|
||||
#[distributed_slice]
|
||||
pub static __ALL_COMPONENTS: [fn()] = [..];
|
||||
|
||||
pub static COMPONENT_DATA: LazyLock<RwLock<ComponentRegistry>> =
|
||||
LazyLock::new(|| RwLock::new(ComponentRegistry::default()));
|
||||
|
||||
|
||||
37
crates/component_preview/Cargo.toml
Normal file
37
crates/component_preview/Cargo.toml
Normal file
@@ -0,0 +1,37 @@
|
||||
[package]
|
||||
name = "component_preview"
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/component_preview.rs"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
||||
[dependencies]
|
||||
agent.workspace = true
|
||||
anyhow.workspace = true
|
||||
client.workspace = true
|
||||
collections.workspace = true
|
||||
component.workspace = true
|
||||
db.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
languages.workspace = true
|
||||
log.workspace = true
|
||||
notifications.workspace = true
|
||||
project.workspace = true
|
||||
prompt_store.workspace = true
|
||||
serde.workspace = true
|
||||
ui.workspace = true
|
||||
ui_input.workspace = true
|
||||
util.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
workspace.workspace = true
|
||||
assistant_tool.workspace = true
|
||||
@@ -1,12 +1,12 @@
|
||||
use languages::LanguageRegistry;
|
||||
use project::Project;
|
||||
use std::sync::Arc;
|
||||
|
||||
use agent::{ActiveThread, ContextStore, MessageSegment, TextThreadStore, ThreadStore};
|
||||
use anyhow::{Result, anyhow};
|
||||
use assistant_tool::ToolWorkingSet;
|
||||
use gpui::{AppContext, AsyncApp, Entity, Task, WeakEntity};
|
||||
use indoc::indoc;
|
||||
use languages::LanguageRegistry;
|
||||
use project::Project;
|
||||
use prompt_store::PromptBuilder;
|
||||
use std::sync::Arc;
|
||||
use ui::{App, Window};
|
||||
use workspace::Workspace;
|
||||
|
||||
@@ -60,30 +60,16 @@ pub fn static_active_thread(
|
||||
let thread = thread_store.update(cx, |thread_store, cx| thread_store.create_thread(cx));
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.insert_assistant_message(vec![
|
||||
MessageSegment::Text(indoc! {"
|
||||
I'll help you fix the lifetime error in your `cx.spawn` call. When working with async operations in GPUI, there are specific patterns to follow for proper lifetime management.
|
||||
|
||||
Let's look at what's happening in your code:
|
||||
|
||||
---
|
||||
|
||||
Let's check the current state of the active_thread.rs file to understand what might have changed:
|
||||
|
||||
---
|
||||
|
||||
Looking at the implementation of `load_preview_thread_store` and understanding GPUI's async patterns, here's the issue:
|
||||
|
||||
1. `load_preview_thread_store` returns a `Task<anyhow::Result<Entity<ThreadStore>>>`, which means it's already a task.
|
||||
2. When you call this function inside another `spawn` call, you're nesting tasks incorrectly.
|
||||
|
||||
Here's the correct way to implement this:
|
||||
|
||||
---
|
||||
|
||||
The problem is in how you're setting up the async closure and trying to reference variables like `window` and `language_registry` that aren't accessible in that scope.
|
||||
|
||||
Here's how to fix it:
|
||||
"}.to_string()),
|
||||
MessageSegment::Text("I'll help you fix the lifetime error in your `cx.spawn` call. When working with async operations in GPUI, there are specific patterns to follow for proper lifetime management.".to_string()),
|
||||
MessageSegment::Text("\n\nLet's look at what's happening in your code:".to_string()),
|
||||
MessageSegment::Text("\n\n---\n\nLet's check the current state of the active_thread.rs file to understand what might have changed:".to_string()),
|
||||
MessageSegment::Text("\n\n---\n\nLooking at the implementation of `load_preview_thread_store` and understanding GPUI's async patterns, here's the issue:".to_string()),
|
||||
MessageSegment::Text("\n\n1. `load_preview_thread_store` returns a `Task<anyhow::Result<Entity<ThreadStore>>>`, which means it's already a task".to_string()),
|
||||
MessageSegment::Text("\n2. When you call this function inside another `spawn` call, you're nesting tasks incorrectly".to_string()),
|
||||
MessageSegment::Text("\n3. The `this` parameter you're trying to use in your closure has the wrong context".to_string()),
|
||||
MessageSegment::Text("\n\nHere's the correct way to implement this:".to_string()),
|
||||
MessageSegment::Text("\n\n---\n\nThe problem is in how you're setting up the async closure and trying to reference variables like `window` and `language_registry` that aren't accessible in that scope.".to_string()),
|
||||
MessageSegment::Text("\n\nHere's how to fix it:".to_string()),
|
||||
], cx);
|
||||
});
|
||||
cx.new(|cx| {
|
||||
@@ -5,7 +5,7 @@ mod sign_in;
|
||||
|
||||
use crate::sign_in::initiate_sign_in_within_workspace;
|
||||
use ::fs::Fs;
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use anyhow::{Result, anyhow};
|
||||
use collections::{HashMap, HashSet};
|
||||
use command_palette_hooks::CommandPaletteFilter;
|
||||
use futures::{Future, FutureExt, TryFutureExt, channel::oneshot, future::Shared};
|
||||
@@ -531,15 +531,11 @@ impl Copilot {
|
||||
.request::<request::CheckStatus>(request::CheckStatusParams {
|
||||
local_checks_only: false,
|
||||
})
|
||||
.await
|
||||
.into_response()
|
||||
.context("copilot: check status")?;
|
||||
.await?;
|
||||
|
||||
server
|
||||
.request::<request::SetEditorInfo>(editor_info)
|
||||
.await
|
||||
.into_response()
|
||||
.context("copilot: set editor info")?;
|
||||
.await?;
|
||||
|
||||
anyhow::Ok((server, status))
|
||||
};
|
||||
@@ -585,9 +581,7 @@ impl Copilot {
|
||||
.request::<request::SignInInitiate>(
|
||||
request::SignInInitiateParams {},
|
||||
)
|
||||
.await
|
||||
.into_response()
|
||||
.context("copilot sign-in")?;
|
||||
.await?;
|
||||
match sign_in {
|
||||
request::SignInInitiateResult::AlreadySignedIn { user } => {
|
||||
Ok(request::SignInStatus::Ok { user: Some(user) })
|
||||
@@ -615,9 +609,7 @@ impl Copilot {
|
||||
user_code: flow.user_code,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.into_response()
|
||||
.context("copilot: sign in confirm")?;
|
||||
.await?;
|
||||
Ok(response)
|
||||
}
|
||||
}
|
||||
@@ -664,9 +656,7 @@ impl Copilot {
|
||||
cx.background_spawn(async move {
|
||||
server
|
||||
.request::<request::SignOut>(request::SignOutParams {})
|
||||
.await
|
||||
.into_response()
|
||||
.context("copilot: sign in confirm")?;
|
||||
.await?;
|
||||
anyhow::Ok(())
|
||||
})
|
||||
}
|
||||
@@ -883,10 +873,7 @@ impl Copilot {
|
||||
uuid: completion.uuid.clone(),
|
||||
});
|
||||
cx.background_spawn(async move {
|
||||
request
|
||||
.await
|
||||
.into_response()
|
||||
.context("copilot: notify accepted")?;
|
||||
request.await?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
@@ -910,10 +897,7 @@ impl Copilot {
|
||||
.collect(),
|
||||
});
|
||||
cx.background_spawn(async move {
|
||||
request
|
||||
.await
|
||||
.into_response()
|
||||
.context("copilot: notify rejected")?;
|
||||
request.await?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
@@ -973,9 +957,7 @@ impl Copilot {
|
||||
version: version.try_into().unwrap(),
|
||||
},
|
||||
})
|
||||
.await
|
||||
.into_response()
|
||||
.context("copilot: get completions")?;
|
||||
.await?;
|
||||
let completions = result
|
||||
.completions
|
||||
.into_iter()
|
||||
|
||||
@@ -36,6 +36,7 @@ gpui.workspace = true
|
||||
http_client.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
lsp-types.workspace = true
|
||||
node_runtime.workspace = true
|
||||
parking_lot.workspace = true
|
||||
paths.workspace = true
|
||||
|
||||
@@ -4,11 +4,11 @@ use async_compression::futures::bufread::GzipDecoder;
|
||||
use async_tar::Archive;
|
||||
use async_trait::async_trait;
|
||||
use collections::HashMap;
|
||||
pub use dap_types::{StartDebuggingRequestArguments, StartDebuggingRequestArgumentsRequest};
|
||||
use dap_types::{StartDebuggingRequestArguments, StartDebuggingRequestArgumentsRequest};
|
||||
use futures::io::BufReader;
|
||||
use gpui::{AsyncApp, SharedString};
|
||||
pub use http_client::{HttpClient, github::latest_github_release};
|
||||
use language::{LanguageName, LanguageToolchainStore};
|
||||
use language::LanguageToolchainStore;
|
||||
use node_runtime::NodeRuntime;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::WorktreeId;
|
||||
@@ -78,11 +78,6 @@ impl From<DebugAdapterName> for SharedString {
|
||||
name.0
|
||||
}
|
||||
}
|
||||
impl From<SharedString> for DebugAdapterName {
|
||||
fn from(name: SharedString) -> Self {
|
||||
DebugAdapterName(name)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<&'a str> for DebugAdapterName {
|
||||
fn from(str: &'a str) -> DebugAdapterName {
|
||||
@@ -407,6 +402,10 @@ pub async fn fetch_latest_adapter_version_from_github(
|
||||
})
|
||||
}
|
||||
|
||||
pub trait InlineValueProvider {
|
||||
fn provide(&self, variables: Vec<(String, lsp_types::Range)>) -> Vec<lsp_types::InlineValue>;
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
pub trait DebugAdapter: 'static + Send + Sync {
|
||||
fn name(&self) -> DebugAdapterName;
|
||||
@@ -419,8 +418,7 @@ pub trait DebugAdapter: 'static + Send + Sync {
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary>;
|
||||
|
||||
/// Returns the language name of an adapter if it only supports one language
|
||||
fn adapter_language_name(&self) -> Option<LanguageName> {
|
||||
fn inline_value_provider(&self) -> Option<Box<dyn InlineValueProvider>> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,14 +7,21 @@ use dap_types::{
|
||||
messages::{Message, Response},
|
||||
requests::Request,
|
||||
};
|
||||
use futures::channel::oneshot;
|
||||
use gpui::{AppContext, AsyncApp};
|
||||
use futures::{FutureExt as _, channel::oneshot, select};
|
||||
use gpui::{AppContext, AsyncApp, BackgroundExecutor};
|
||||
use smol::channel::{Receiver, Sender};
|
||||
use std::{
|
||||
hash::Hash,
|
||||
sync::atomic::{AtomicU64, Ordering},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
const DAP_REQUEST_TIMEOUT: Duration = Duration::from_secs(2);
|
||||
|
||||
#[cfg(not(any(test, feature = "test-support")))]
|
||||
const DAP_REQUEST_TIMEOUT: Duration = Duration::from_secs(12);
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[repr(transparent)]
|
||||
pub struct SessionId(pub u32);
|
||||
@@ -34,6 +41,7 @@ pub struct DebugAdapterClient {
|
||||
id: SessionId,
|
||||
sequence_count: AtomicU64,
|
||||
binary: DebugAdapterBinary,
|
||||
executor: BackgroundExecutor,
|
||||
transport_delegate: TransportDelegate,
|
||||
}
|
||||
|
||||
@@ -53,6 +61,7 @@ impl DebugAdapterClient {
|
||||
binary,
|
||||
transport_delegate,
|
||||
sequence_count: AtomicU64::new(1),
|
||||
executor: cx.background_executor().clone(),
|
||||
};
|
||||
log::info!("Successfully connected to debug adapter");
|
||||
|
||||
@@ -164,33 +173,40 @@ impl DebugAdapterClient {
|
||||
|
||||
self.send_message(Message::Request(request)).await?;
|
||||
|
||||
let mut timeout = self.executor.timer(DAP_REQUEST_TIMEOUT).fuse();
|
||||
let command = R::COMMAND.to_string();
|
||||
|
||||
let response = callback_rx.await??;
|
||||
log::debug!(
|
||||
"Client {} received response for: `{}` sequence_id: {}",
|
||||
self.id.0,
|
||||
command,
|
||||
sequence_id
|
||||
);
|
||||
match response.success {
|
||||
true => {
|
||||
if let Some(json) = response.body {
|
||||
Ok(serde_json::from_value(json)?)
|
||||
// Note: dap types configure themselves to return `None` when an empty object is received,
|
||||
// which then fails here...
|
||||
} else if let Ok(result) =
|
||||
serde_json::from_value(serde_json::Value::Object(Default::default()))
|
||||
{
|
||||
Ok(result)
|
||||
} else {
|
||||
Ok(serde_json::from_value(Default::default())?)
|
||||
select! {
|
||||
response = callback_rx.fuse() => {
|
||||
log::debug!(
|
||||
"Client {} received response for: `{}` sequence_id: {}",
|
||||
self.id.0,
|
||||
command,
|
||||
sequence_id
|
||||
);
|
||||
|
||||
let response = response??;
|
||||
match response.success {
|
||||
true => {
|
||||
if let Some(json) = response.body {
|
||||
Ok(serde_json::from_value(json)?)
|
||||
// Note: dap types configure themselves to return `None` when an empty object is received,
|
||||
// which then fails here...
|
||||
} else if let Ok(result) = serde_json::from_value(serde_json::Value::Object(Default::default())) {
|
||||
Ok(result)
|
||||
} else {
|
||||
Ok(serde_json::from_value(Default::default())?)
|
||||
}
|
||||
}
|
||||
false => Err(anyhow!("Request failed: {}", response.message.unwrap_or_default())),
|
||||
}
|
||||
}
|
||||
false => Err(anyhow!(
|
||||
"Request failed: {}",
|
||||
response.message.unwrap_or_default()
|
||||
)),
|
||||
|
||||
_ = timeout => {
|
||||
self.transport_delegate.cancel_pending_request(&sequence_id).await;
|
||||
log::error!("Cancelled DAP request for {command:?} id {sequence_id} which took over {DAP_REQUEST_TIMEOUT:?}");
|
||||
anyhow::bail!("DAP request timeout");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
pub mod adapters;
|
||||
pub mod client;
|
||||
pub mod debugger_settings;
|
||||
pub mod inline_value;
|
||||
pub mod proto_conversions;
|
||||
mod registry;
|
||||
pub mod transport;
|
||||
|
||||
@@ -1,277 +0,0 @@
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum VariableLookupKind {
|
||||
Variable,
|
||||
Expression,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum VariableScope {
|
||||
Local,
|
||||
Global,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct InlineValueLocation {
|
||||
pub variable_name: String,
|
||||
pub scope: VariableScope,
|
||||
pub lookup: VariableLookupKind,
|
||||
pub row: usize,
|
||||
pub column: usize,
|
||||
}
|
||||
|
||||
/// A trait for providing inline values for debugging purposes.
|
||||
///
|
||||
/// Implementors of this trait are responsible for analyzing a given node in the
|
||||
/// source code and extracting variable information, including their names,
|
||||
/// scopes, and positions. This information is used to display inline values
|
||||
/// during debugging sessions. Implementors must also handle variable scoping
|
||||
/// themselves by traversing the syntax tree upwards to determine whether a
|
||||
/// variable is local or global.
|
||||
pub trait InlineValueProvider: 'static + Send + Sync {
|
||||
/// Provides a list of inline value locations based on the given node and source code.
|
||||
///
|
||||
/// # Parameters
|
||||
/// - `node`: The root node of the active debug line. Implementors should traverse
|
||||
/// upwards from this node to gather variable information and determine their scope.
|
||||
/// - `source`: The source code as a string slice, used to extract variable names.
|
||||
/// - `max_row`: The maximum row to consider when collecting variables. Variables
|
||||
/// declared beyond this row should be ignored.
|
||||
///
|
||||
/// # Returns
|
||||
/// A vector of `InlineValueLocation` instances, each representing a variable's
|
||||
/// name, scope, and the position of the inline value should be shown.
|
||||
fn provide(
|
||||
&self,
|
||||
node: language::Node,
|
||||
source: &str,
|
||||
max_row: usize,
|
||||
) -> Vec<InlineValueLocation>;
|
||||
}
|
||||
|
||||
pub struct RustInlineValueProvider;
|
||||
|
||||
impl InlineValueProvider for RustInlineValueProvider {
|
||||
fn provide(
|
||||
&self,
|
||||
mut node: language::Node,
|
||||
source: &str,
|
||||
max_row: usize,
|
||||
) -> Vec<InlineValueLocation> {
|
||||
let mut variables = Vec::new();
|
||||
let mut variable_names = HashSet::new();
|
||||
let mut scope = VariableScope::Local;
|
||||
|
||||
loop {
|
||||
let mut variable_names_in_scope = HashMap::new();
|
||||
for child in node.named_children(&mut node.walk()) {
|
||||
if child.start_position().row >= max_row {
|
||||
break;
|
||||
}
|
||||
|
||||
if scope == VariableScope::Local && child.kind() == "let_declaration" {
|
||||
if let Some(identifier) = child.child_by_field_name("pattern") {
|
||||
let variable_name = source[identifier.byte_range()].to_string();
|
||||
|
||||
if variable_names.contains(&variable_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(index) = variable_names_in_scope.get(&variable_name) {
|
||||
variables.remove(*index);
|
||||
}
|
||||
|
||||
variable_names_in_scope.insert(variable_name.clone(), variables.len());
|
||||
variables.push(InlineValueLocation {
|
||||
variable_name,
|
||||
scope: VariableScope::Local,
|
||||
lookup: VariableLookupKind::Variable,
|
||||
row: identifier.end_position().row,
|
||||
column: identifier.end_position().column,
|
||||
});
|
||||
}
|
||||
} else if child.kind() == "static_item" {
|
||||
if let Some(name) = child.child_by_field_name("name") {
|
||||
let variable_name = source[name.byte_range()].to_string();
|
||||
variables.push(InlineValueLocation {
|
||||
variable_name,
|
||||
scope: scope.clone(),
|
||||
lookup: VariableLookupKind::Expression,
|
||||
row: name.end_position().row,
|
||||
column: name.end_position().column,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variable_names.extend(variable_names_in_scope.keys().cloned());
|
||||
|
||||
if matches!(node.kind(), "function_item" | "closure_expression") {
|
||||
scope = VariableScope::Global;
|
||||
}
|
||||
|
||||
if let Some(parent) = node.parent() {
|
||||
node = parent;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
variables
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PythonInlineValueProvider;
|
||||
|
||||
impl InlineValueProvider for PythonInlineValueProvider {
|
||||
fn provide(
|
||||
&self,
|
||||
mut node: language::Node,
|
||||
source: &str,
|
||||
max_row: usize,
|
||||
) -> Vec<InlineValueLocation> {
|
||||
let mut variables = Vec::new();
|
||||
let mut variable_names = HashSet::new();
|
||||
let mut scope = VariableScope::Local;
|
||||
|
||||
loop {
|
||||
let mut variable_names_in_scope = HashMap::new();
|
||||
for child in node.named_children(&mut node.walk()) {
|
||||
if child.start_position().row >= max_row {
|
||||
break;
|
||||
}
|
||||
|
||||
if scope == VariableScope::Local {
|
||||
match child.kind() {
|
||||
"expression_statement" => {
|
||||
if let Some(expr) = child.child(0) {
|
||||
if expr.kind() == "assignment" {
|
||||
if let Some(param) = expr.child(0) {
|
||||
let param_identifier = if param.kind() == "identifier" {
|
||||
Some(param)
|
||||
} else if param.kind() == "typed_parameter" {
|
||||
param.child(0)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Some(identifier) = param_identifier {
|
||||
if identifier.kind() == "identifier" {
|
||||
let variable_name =
|
||||
source[identifier.byte_range()].to_string();
|
||||
|
||||
if variable_names.contains(&variable_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(index) =
|
||||
variable_names_in_scope.get(&variable_name)
|
||||
{
|
||||
variables.remove(*index);
|
||||
}
|
||||
|
||||
variable_names_in_scope
|
||||
.insert(variable_name.clone(), variables.len());
|
||||
variables.push(InlineValueLocation {
|
||||
variable_name,
|
||||
scope: VariableScope::Local,
|
||||
lookup: VariableLookupKind::Variable,
|
||||
row: identifier.end_position().row,
|
||||
column: identifier.end_position().column,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"function_definition" => {
|
||||
if let Some(params) = child.child_by_field_name("parameters") {
|
||||
for param in params.named_children(&mut params.walk()) {
|
||||
let param_identifier = if param.kind() == "identifier" {
|
||||
Some(param)
|
||||
} else if param.kind() == "typed_parameter" {
|
||||
param.child(0)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Some(identifier) = param_identifier {
|
||||
if identifier.kind() == "identifier" {
|
||||
let variable_name =
|
||||
source[identifier.byte_range()].to_string();
|
||||
|
||||
if variable_names.contains(&variable_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(index) =
|
||||
variable_names_in_scope.get(&variable_name)
|
||||
{
|
||||
variables.remove(*index);
|
||||
}
|
||||
|
||||
variable_names_in_scope
|
||||
.insert(variable_name.clone(), variables.len());
|
||||
variables.push(InlineValueLocation {
|
||||
variable_name,
|
||||
scope: VariableScope::Local,
|
||||
lookup: VariableLookupKind::Variable,
|
||||
row: identifier.end_position().row,
|
||||
column: identifier.end_position().column,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"for_statement" => {
|
||||
if let Some(target) = child.child_by_field_name("left") {
|
||||
if target.kind() == "identifier" {
|
||||
let variable_name = source[target.byte_range()].to_string();
|
||||
|
||||
if variable_names.contains(&variable_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(index) = variable_names_in_scope.get(&variable_name)
|
||||
{
|
||||
variables.remove(*index);
|
||||
}
|
||||
|
||||
variable_names_in_scope
|
||||
.insert(variable_name.clone(), variables.len());
|
||||
variables.push(InlineValueLocation {
|
||||
variable_name,
|
||||
scope: VariableScope::Local,
|
||||
lookup: VariableLookupKind::Variable,
|
||||
row: target.end_position().row,
|
||||
column: target.end_position().column,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variable_names.extend(variable_names_in_scope.keys().cloned());
|
||||
|
||||
if matches!(node.kind(), "function_definition" | "module")
|
||||
&& node.range().end_point.row < max_row
|
||||
{
|
||||
scope = VariableScope::Global;
|
||||
}
|
||||
|
||||
if let Some(parent) = node.parent() {
|
||||
node = parent;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
variables
|
||||
}
|
||||
}
|
||||
@@ -2,14 +2,10 @@ use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use collections::FxHashMap;
|
||||
use gpui::{App, Global, SharedString};
|
||||
use language::LanguageName;
|
||||
use parking_lot::RwLock;
|
||||
use task::{DebugRequest, DebugScenario, SpawnInTerminal, TaskTemplate};
|
||||
|
||||
use crate::{
|
||||
adapters::{DebugAdapter, DebugAdapterName},
|
||||
inline_value::InlineValueProvider,
|
||||
};
|
||||
use crate::adapters::{DebugAdapter, DebugAdapterName};
|
||||
use std::{collections::BTreeMap, sync::Arc};
|
||||
|
||||
/// Given a user build configuration, locator creates a fill-in debug target ([DebugRequest]) on behalf of the user.
|
||||
@@ -17,12 +13,7 @@ use std::{collections::BTreeMap, sync::Arc};
|
||||
pub trait DapLocator: Send + Sync {
|
||||
fn name(&self) -> SharedString;
|
||||
/// Determines whether this locator can generate debug target for given task.
|
||||
fn create_scenario(
|
||||
&self,
|
||||
build_config: &TaskTemplate,
|
||||
resolved_label: &str,
|
||||
adapter: DebugAdapterName,
|
||||
) -> Option<DebugScenario>;
|
||||
fn create_scenario(&self, build_config: &TaskTemplate, adapter: &str) -> Option<DebugScenario>;
|
||||
|
||||
async fn run(&self, build_config: SpawnInTerminal) -> Result<DebugRequest>;
|
||||
}
|
||||
@@ -31,7 +22,6 @@ pub trait DapLocator: Send + Sync {
|
||||
struct DapRegistryState {
|
||||
adapters: BTreeMap<DebugAdapterName, Arc<dyn DebugAdapter>>,
|
||||
locators: FxHashMap<SharedString, Arc<dyn DapLocator>>,
|
||||
inline_value_providers: FxHashMap<String, Arc<dyn InlineValueProvider>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
@@ -60,11 +50,6 @@ impl DapRegistry {
|
||||
);
|
||||
}
|
||||
|
||||
pub fn adapter_language(&self, adapter_name: &str) -> Option<LanguageName> {
|
||||
self.adapter(adapter_name)
|
||||
.and_then(|adapter| adapter.adapter_language_name())
|
||||
}
|
||||
|
||||
pub fn add_locator(&self, locator: Arc<dyn DapLocator>) {
|
||||
let _previous_value = self.0.write().locators.insert(locator.name(), locator);
|
||||
debug_assert!(
|
||||
@@ -73,22 +58,6 @@ impl DapRegistry {
|
||||
);
|
||||
}
|
||||
|
||||
pub fn add_inline_value_provider(
|
||||
&self,
|
||||
language: String,
|
||||
provider: Arc<dyn InlineValueProvider>,
|
||||
) {
|
||||
let _previous_value = self
|
||||
.0
|
||||
.write()
|
||||
.inline_value_providers
|
||||
.insert(language, provider);
|
||||
debug_assert!(
|
||||
_previous_value.is_none(),
|
||||
"Attempted to insert a new inline value provider when one is already registered"
|
||||
);
|
||||
}
|
||||
|
||||
pub fn locators(&self) -> FxHashMap<SharedString, Arc<dyn DapLocator>> {
|
||||
self.0.read().locators.clone()
|
||||
}
|
||||
@@ -97,10 +66,6 @@ impl DapRegistry {
|
||||
self.0.read().adapters.get(name).cloned()
|
||||
}
|
||||
|
||||
pub fn inline_value_provider(&self, language: &str) -> Option<Arc<dyn InlineValueProvider>> {
|
||||
self.0.read().inline_value_providers.get(language).cloned()
|
||||
}
|
||||
|
||||
pub fn enumerate_adapters(&self) -> Vec<DebugAdapterName> {
|
||||
self.0.read().adapters.keys().cloned().collect()
|
||||
}
|
||||
|
||||
@@ -224,6 +224,11 @@ impl TransportDelegate {
|
||||
pending_requests.insert(sequence_id, request);
|
||||
}
|
||||
|
||||
pub(crate) async fn cancel_pending_request(&self, sequence_id: &u64) {
|
||||
let mut pending_requests = self.pending_requests.lock().await;
|
||||
pending_requests.remove(sequence_id);
|
||||
}
|
||||
|
||||
pub(crate) async fn send_message(&self, message: Message) -> Result<()> {
|
||||
if let Some(server_tx) = self.server_tx.lock().await.as_ref() {
|
||||
server_tx
|
||||
@@ -575,31 +580,21 @@ impl TcpTransport {
|
||||
.unwrap_or(2000u64)
|
||||
});
|
||||
|
||||
let (mut process, (rx, tx)) = select! {
|
||||
let (rx, tx) = select! {
|
||||
_ = cx.background_executor().timer(Duration::from_millis(timeout)).fuse() => {
|
||||
return Err(anyhow!(format!("Connection to TCP DAP timeout {}:{}", host, port)))
|
||||
},
|
||||
result = cx.spawn(async move |cx| {
|
||||
loop {
|
||||
match TcpStream::connect(address).await {
|
||||
Ok(stream) => return Ok((process, stream.split())),
|
||||
Ok(stream) => return stream.split(),
|
||||
Err(_) => {
|
||||
if let Ok(Some(_)) = process.try_status() {
|
||||
let output = process.output().await?;
|
||||
let output = if output.stderr.is_empty() {
|
||||
String::from_utf8_lossy(&output.stdout).to_string()
|
||||
} else {
|
||||
String::from_utf8_lossy(&output.stderr).to_string()
|
||||
};
|
||||
return Err(anyhow!("{}\nerror: process exited before debugger attached.", output));
|
||||
}
|
||||
cx.background_executor().timer(Duration::from_millis(100)).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}).fuse() => result?
|
||||
}).fuse() => result
|
||||
};
|
||||
|
||||
log::info!(
|
||||
"Debug adapter has connected to TCP server {}:{}",
|
||||
host,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user