Compare commits
210 Commits
restore-de
...
v0.185.16
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d43cb1e5e3 | ||
|
|
0a7d3e4cab | ||
|
|
e74ab2bfe6 | ||
|
|
ac2b411091 | ||
|
|
44c29a9db6 | ||
|
|
7cde151bfa | ||
|
|
29f43fc319 | ||
|
|
690ff347ef | ||
|
|
31227831c3 | ||
|
|
828d8e1a22 | ||
|
|
2e92343df8 | ||
|
|
101705d567 | ||
|
|
c59a00821e | ||
|
|
bb9732dccd | ||
|
|
2c0d7529b4 | ||
|
|
070f1bc6e2 | ||
|
|
6b58e38f4a | ||
|
|
dc505bd9df | ||
|
|
f73984ce36 | ||
|
|
5be974e58d | ||
|
|
299f600cbf | ||
|
|
05596ab493 | ||
|
|
83c178782f | ||
|
|
73ca7074a8 | ||
|
|
d158360797 | ||
|
|
5f055f5dce | ||
|
|
2843b5a8f3 | ||
|
|
3a716b1c67 | ||
|
|
28544d818b | ||
|
|
a20e7e457d | ||
|
|
de4c309183 | ||
|
|
29e32640a5 | ||
|
|
466d63dada | ||
|
|
cb30afaf1c | ||
|
|
805678bbdd | ||
|
|
307896cce3 | ||
|
|
3db19e141a | ||
|
|
76b295541e | ||
|
|
5ba44fe8ea | ||
|
|
db79f4ffde | ||
|
|
2940962840 | ||
|
|
c09d53bfb8 | ||
|
|
150f394b98 | ||
|
|
cd0ef6fe78 | ||
|
|
a9e5beda8b | ||
|
|
c0e128e8db | ||
|
|
3c22ad9e93 | ||
|
|
eff23ab177 | ||
|
|
743f75f743 | ||
|
|
f14322741e | ||
|
|
ce6e82cd7e | ||
|
|
009963fa70 | ||
|
|
5d4967562f | ||
|
|
5c22af3f67 | ||
|
|
7bffd47e99 | ||
|
|
ee9040ad6e | ||
|
|
35ed225a08 | ||
|
|
e6165798ea | ||
|
|
cb22acc102 | ||
|
|
552382f04a | ||
|
|
317103bac9 | ||
|
|
59cab2af5f | ||
|
|
cd4024a038 | ||
|
|
7a37a02080 | ||
|
|
0fe173d1b5 | ||
|
|
0cbfd55a0a | ||
|
|
4e8d053b80 | ||
|
|
cfaf16a3f1 | ||
|
|
cf30cc73b0 | ||
|
|
3845fbef42 | ||
|
|
3c9d39b4e3 | ||
|
|
85f63c9fad | ||
|
|
3cabcd7667 | ||
|
|
c833618aa5 | ||
|
|
1d6a5b20ce | ||
|
|
3f96f70475 | ||
|
|
b5c6567924 | ||
|
|
792bae2949 | ||
|
|
012f4c2304 | ||
|
|
9b6f44275f | ||
|
|
f4c6b2f07a | ||
|
|
150f3561ae | ||
|
|
8512ba44b1 | ||
|
|
b6cb653f4d | ||
|
|
41fe53b3a7 | ||
|
|
6c29cbe5b4 | ||
|
|
520bbd2120 | ||
|
|
42894f6c8b | ||
|
|
44ef5bd95f | ||
|
|
06704846e1 | ||
|
|
ac70f228e7 | ||
|
|
400a4faa1a | ||
|
|
8ce8a67c26 | ||
|
|
6ae2417dea | ||
|
|
6cc6acccdd | ||
|
|
ca51c0c406 | ||
|
|
4da8f4a2f0 | ||
|
|
764c529849 | ||
|
|
121e3b5dfd | ||
|
|
80a85a31ab | ||
|
|
f6ae21ca7c | ||
|
|
0c424ec197 | ||
|
|
80d8731010 | ||
|
|
1c8f779b60 | ||
|
|
21bb98bceb | ||
|
|
abf613d84f | ||
|
|
c172f4c045 | ||
|
|
61161a6ae5 | ||
|
|
8722b7a268 | ||
|
|
224ad682e4 | ||
|
|
69fd7d57d7 | ||
|
|
d30adffe2c | ||
|
|
ac042a090f | ||
|
|
093f95346a | ||
|
|
4239ab4f9d | ||
|
|
56c3fc66b6 | ||
|
|
586ab70201 | ||
|
|
4de9657154 | ||
|
|
ff44d0b21a | ||
|
|
000c37f114 | ||
|
|
778ff511a6 | ||
|
|
ebb04b0411 | ||
|
|
cd96a084c8 | ||
|
|
a47a8d8547 | ||
|
|
cc2d7b9c7d | ||
|
|
a20c38463b | ||
|
|
94b335241c | ||
|
|
31419db05f | ||
|
|
51bfe9df8a | ||
|
|
971fc5c982 | ||
|
|
f9fa99eb16 | ||
|
|
aae502e687 | ||
|
|
cb6209121b | ||
|
|
108005f1b8 | ||
|
|
71f7100083 | ||
|
|
6b5c834659 | ||
|
|
b65b251958 | ||
|
|
3b32760d10 | ||
|
|
185bd0601c | ||
|
|
22dda1f4af | ||
|
|
ba35bcb233 | ||
|
|
b577114584 | ||
|
|
c2dc7a70f0 | ||
|
|
7249da16a0 | ||
|
|
3ffaf278a8 | ||
|
|
571db35dcf | ||
|
|
395d4e9a35 | ||
|
|
711a855699 | ||
|
|
298b30c1f0 | ||
|
|
124e420685 | ||
|
|
29178459fe | ||
|
|
beecd17d30 | ||
|
|
d9f33183e2 | ||
|
|
96ce034229 | ||
|
|
2c6e9fe065 | ||
|
|
b2bc1b45a8 | ||
|
|
6300518375 | ||
|
|
04fdfd619c | ||
|
|
8dc98c2487 | ||
|
|
cdbd8c328e | ||
|
|
ae0716edfb | ||
|
|
d6c26084e3 | ||
|
|
ed49ee9db1 | ||
|
|
23b669321b | ||
|
|
6183034c3c | ||
|
|
f30c146770 | ||
|
|
e8b8cd39eb | ||
|
|
b4234faf33 | ||
|
|
c41a0c08d7 | ||
|
|
081edd653e | ||
|
|
1450f04cbb | ||
|
|
2c60192379 | ||
|
|
0158a2f350 | ||
|
|
bbb364df85 | ||
|
|
5399aa37f6 | ||
|
|
c71e41467d | ||
|
|
550c3fb506 | ||
|
|
02f9df3c7b | ||
|
|
5c3c2f5efb | ||
|
|
f4ac0cf14a | ||
|
|
1d7b2960e1 | ||
|
|
184db010e4 | ||
|
|
6f3b39b908 | ||
|
|
acff230164 | ||
|
|
31d6a4e484 | ||
|
|
f32e0968a9 | ||
|
|
a5087f57c0 | ||
|
|
bc3b4c0bf0 | ||
|
|
887d6bf833 | ||
|
|
472b820bf9 | ||
|
|
7da116619b | ||
|
|
a329e67ebc | ||
|
|
cde6c2f767 | ||
|
|
40b51c2c2b | ||
|
|
7c56afda70 | ||
|
|
8e31309bee | ||
|
|
990acdfa30 | ||
|
|
e4d7f98445 | ||
|
|
c8b456d216 | ||
|
|
6dbb35620c | ||
|
|
a6a94f79b5 | ||
|
|
91074731b0 | ||
|
|
88bd4eeafd | ||
|
|
60acc3ea27 | ||
|
|
ec4dc956b6 | ||
|
|
6030ecb4d3 | ||
|
|
a1f0dfc5a3 | ||
|
|
ad11646804 | ||
|
|
87d1beb062 | ||
|
|
cdcf035b7e |
@@ -13,6 +13,12 @@ rustflags = ["-C", "link-arg=-fuse-ld=mold"]
|
||||
linker = "clang"
|
||||
rustflags = ["-C", "link-arg=-fuse-ld=mold"]
|
||||
|
||||
[target.aarch64-apple-darwin]
|
||||
rustflags = ["-C", "link-args=-all_load"]
|
||||
|
||||
[target.x86_64-apple-darwin]
|
||||
rustflags = ["-C", "link-args=-all_load"]
|
||||
|
||||
[target.'cfg(target_os = "windows")']
|
||||
rustflags = [
|
||||
"--cfg",
|
||||
|
||||
@@ -30,7 +30,3 @@ ffdda588b41f7d9d270ffe76cab116f828ad545e
|
||||
# 2024-07-05 Improved formatting of default keymaps (single line per bind)
|
||||
# https://github.com/zed-industries/zed/pull/13887
|
||||
813cc3f5e537372fc86720b5e71b6e1c815440ab
|
||||
|
||||
# 2024-07-24 docs: Format docs
|
||||
# https://github.com/zed-industries/zed/pull/15352
|
||||
3a44a59f8ec114ac1ba22f7da1652717ef7e4e5c
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/01_bug_agent.yml
vendored
4
.github/ISSUE_TEMPLATE/01_bug_agent.yml
vendored
@@ -29,8 +29,8 @@ body:
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
@@ -29,8 +29,8 @@ body:
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/03_bug_git.yml
vendored
4
.github/ISSUE_TEMPLATE/03_bug_git.yml
vendored
@@ -28,8 +28,8 @@ body:
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
35
.github/ISSUE_TEMPLATE/04_bug_debugger.yml
vendored
35
.github/ISSUE_TEMPLATE/04_bug_debugger.yml
vendored
@@ -1,35 +0,0 @@
|
||||
name: Bug Report (Debugger)
|
||||
description: Zed Debugger-Related Bugs
|
||||
type: "Bug"
|
||||
labels: ["debugger"]
|
||||
title: "Debugger: <a short description of the Debugger bug>"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
<!-- Describe with sufficient detail to reproduce from a clean Zed install. -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
Actual Behavior:
|
||||
Expected Behavior:
|
||||
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
4
.github/ISSUE_TEMPLATE/10_bug_report.yml
vendored
4
.github/ISSUE_TEMPLATE/10_bug_report.yml
vendored
@@ -49,8 +49,8 @@ body:
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: |
|
||||
Open Zed, from the command palette select "zed: copy system specs into clipboard"
|
||||
Open Zed, from the command palette select "zed: Copy System Specs Into Clipboard"
|
||||
placeholder: |
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
4
.github/ISSUE_TEMPLATE/11_crash_report.yml
vendored
4
.github/ISSUE_TEMPLATE/11_crash_report.yml
vendored
@@ -26,9 +26,9 @@ body:
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
|
||||
12
.github/workflows/ci.yml
vendored
12
.github/workflows/ci.yml
vendored
@@ -162,23 +162,13 @@ jobs:
|
||||
working-directory: ./docs
|
||||
run: |
|
||||
pnpm dlx prettier@${PRETTIER_VERSION} . --check || {
|
||||
echo "To fix, run from the root of the Zed repo:"
|
||||
echo "To fix, run from the root of the zed repo:"
|
||||
echo " cd docs && pnpm dlx prettier@${PRETTIER_VERSION} . --write && cd .."
|
||||
false
|
||||
}
|
||||
env:
|
||||
PRETTIER_VERSION: 3.5.0
|
||||
|
||||
- name: Prettier Check on default.json
|
||||
run: |
|
||||
pnpm dlx prettier@${PRETTIER_VERSION} assets/settings/default.json --check || {
|
||||
echo "To fix, run from the root of the Zed repo:"
|
||||
echo " pnpm dlx prettier@${PRETTIER_VERSION} assets/settings/default.json --write"
|
||||
false
|
||||
}
|
||||
env:
|
||||
PRETTIER_VERSION: 3.5.0
|
||||
|
||||
# To support writing comments that they will certainly be revisited.
|
||||
- name: Check for todo! and FIXME comments
|
||||
run: script/check-todos
|
||||
|
||||
2
.github/workflows/eval.yml
vendored
2
.github/workflows/eval.yml
vendored
@@ -69,7 +69,7 @@ jobs:
|
||||
run: cargo build --package=eval
|
||||
|
||||
- name: Run eval
|
||||
run: cargo run --package=eval -- --repetitions=8 --concurrency=1
|
||||
run: cargo run --package=eval -- --repetitions=3 --concurrency=1
|
||||
|
||||
# Even the Linux runner is not stateful, in theory there is no need to do this cleanup.
|
||||
# But, to avoid potential issues in the future if we choose to use a stateful Linux runner and forget to add code
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -2,7 +2,6 @@
|
||||
**/cargo-target
|
||||
**/target
|
||||
**/venv
|
||||
**/.direnv
|
||||
*.wasm
|
||||
*.xcodeproj
|
||||
.DS_Store
|
||||
|
||||
20
.mailmap
20
.mailmap
@@ -19,8 +19,6 @@ amtoaer <amtoaer@gmail.com>
|
||||
amtoaer <amtoaer@gmail.com> <amtoaer@outlook.com>
|
||||
Andrei Zvonimir Crnković <andrei@0x7f.dev>
|
||||
Andrei Zvonimir Crnković <andrei@0x7f.dev> <andreicek@0x7f.dev>
|
||||
Angelk90 <angelo.k90@hotmail.it>
|
||||
Angelk90 <angelo.k90@hotmail.it> <20476002+Angelk90@users.noreply.github.com>
|
||||
Antonio Scandurra <me@as-cii.com>
|
||||
Antonio Scandurra <me@as-cii.com> <antonio@zed.dev>
|
||||
Ben Kunkle <ben@zed.dev>
|
||||
@@ -40,8 +38,6 @@ Dairon Medina <dairon.medina@gmail.com>
|
||||
Danilo Leal <danilo@zed.dev>
|
||||
Danilo Leal <danilo@zed.dev> <67129314+danilo-leal@users.noreply.github.com>
|
||||
Edwin Aronsson <75266237+4teapo@users.noreply.github.com>
|
||||
Elvis Pranskevichus <elvis@geldata.com>
|
||||
Elvis Pranskevichus <elvis@geldata.com> <elvis@magic.io>
|
||||
Evren Sen <nervenes@icloud.com>
|
||||
Evren Sen <nervenes@icloud.com> <146845123+evrensen467@users.noreply.github.com>
|
||||
Evren Sen <nervenes@icloud.com> <146845123+evrsen@users.noreply.github.com>
|
||||
@@ -73,8 +69,6 @@ Lilith Iris <itslirissama@gmail.com> <83819417+Irilith@users.noreply.github.com>
|
||||
LoganDark <contact@logandark.mozmail.com>
|
||||
LoganDark <contact@logandark.mozmail.com> <git@logandark.mozmail.com>
|
||||
LoganDark <contact@logandark.mozmail.com> <github@logandark.mozmail.com>
|
||||
Marko Kungla <marko.kungla@gmail.com>
|
||||
Marko Kungla <marko.kungla@gmail.com> <marko@mkungla.dev>
|
||||
Marshall Bowers <git@maxdeviant.com>
|
||||
Marshall Bowers <git@maxdeviant.com> <elliott.codes@gmail.com>
|
||||
Marshall Bowers <git@maxdeviant.com> <marshall@zed.dev>
|
||||
@@ -90,7 +84,6 @@ Michael Sloan <michael@zed.dev> <mgsloan@google.com>
|
||||
Mikayla Maki <mikayla@zed.dev>
|
||||
Mikayla Maki <mikayla@zed.dev> <mikayla.c.maki@gmail.com>
|
||||
Mikayla Maki <mikayla@zed.dev> <mikayla.c.maki@icloud.com>
|
||||
Morgan Krey <morgan@zed.dev>
|
||||
Muhammad Talal Anwar <mail@talal.io>
|
||||
Muhammad Talal Anwar <mail@talal.io> <talalanwar@outlook.com>
|
||||
Nate Butler <iamnbutler@gmail.com>
|
||||
@@ -123,18 +116,11 @@ Shish <webmaster@shishnet.org>
|
||||
Shish <webmaster@shishnet.org> <shish@shishnet.org>
|
||||
Smit Barmase <0xtimsb@gmail.com>
|
||||
Smit Barmase <0xtimsb@gmail.com> <smit@zed.dev>
|
||||
Thomas <github.thomaub@gmail.com>
|
||||
Thomas <github.thomaub@gmail.com> <thomas.aubry94@gmail.com>
|
||||
Thomas <github.thomaub@gmail.com> <thomas.aubry@paylead.fr>
|
||||
Thomas Heartman <thomasheartman+github@gmail.com>
|
||||
Thomas Heartman <thomasheartman+github@gmail.com> <thomas@getunleash.io>
|
||||
Thomas Mickley-Doyle <tmickleydoyle@gmail.com>
|
||||
Thomas Mickley-Doyle <tmickleydoyle@gmail.com> <thomas@zed.dev>
|
||||
Thorben Kröger <dev@thorben.net>
|
||||
Thorben Kröger <dev@thorben.net> <thorben.kroeger@hexagon.com>
|
||||
Thorsten Ball <mrnugget@gmail.com>
|
||||
Thorsten Ball <mrnugget@gmail.com> <me@thorstenball.com>
|
||||
Thorsten Ball <mrnugget@gmail.com> <thorsten@zed.dev>
|
||||
Thorsten Ball <thorsten@zed.dev>
|
||||
Thorsten Ball <thorsten@zed.dev> <me@thorstenball.com>
|
||||
Thorsten Ball <thorsten@zed.dev> <mrnugget@gmail.com>
|
||||
Tristan Hume <tris.hume@gmail.com>
|
||||
Tristan Hume <tris.hume@gmail.com> <tristan@anthropic.com>
|
||||
Uladzislau Kaminski <i@uladkaminski.com>
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
{
|
||||
"printWidth": 120
|
||||
}
|
||||
2
.rules
2
.rules
@@ -115,7 +115,7 @@ Other entities can then register a callback to handle these events by doing `cx.
|
||||
GPUI has had some changes to its APIs. Always write code using the new APIs:
|
||||
|
||||
* `spawn` methods now take async closures (`AsyncFn`), and so should be called like `cx.spawn(async move |cx| ...)`.
|
||||
* Use `Entity<T>`. This replaces `Model<T>` and `View<T>` which no longer exist and should NEVER be used.
|
||||
* Use `Entity<T>`. This replaces `Model<T>` and `View<T>` which longer exists and should NEVER be used.
|
||||
* Use `App` references. This replaces `AppContext` which no longer exists and should NEVER be used.
|
||||
* Use `Context<T>` references. This replaces `ModelContext<T>` which no longer exists and should NEVER be used.
|
||||
* `Window` is now passed around explicitly. The new interface adds a `Window` reference parameter to some methods, and adds some new "*_in" methods for plumbing `Window`. The old types `WindowContext` and `ViewContext<T>` should NEVER be used.
|
||||
|
||||
@@ -2,14 +2,16 @@
|
||||
{
|
||||
"label": "Debug Zed (CodeLLDB)",
|
||||
"adapter": "CodeLLDB",
|
||||
"program": "target/debug/zed",
|
||||
"request": "launch"
|
||||
"program": "$ZED_WORKTREE_ROOT/target/debug/zed",
|
||||
"request": "launch",
|
||||
"cwd": "$ZED_WORKTREE_ROOT"
|
||||
},
|
||||
{
|
||||
"label": "Debug Zed (GDB)",
|
||||
"adapter": "GDB",
|
||||
"program": "target/debug/zed",
|
||||
"program": "$ZED_WORKTREE_ROOT/target/debug/zed",
|
||||
"request": "launch",
|
||||
"cwd": "$ZED_WORKTREE_ROOT",
|
||||
"initialize_args": {
|
||||
"stopAtBeginningOfMainSubprogram": true
|
||||
}
|
||||
|
||||
@@ -46,17 +46,5 @@
|
||||
"formatter": "auto",
|
||||
"remove_trailing_whitespace_on_save": true,
|
||||
"ensure_final_newline_on_save": true,
|
||||
"file_scan_exclusions": [
|
||||
"crates/eval/worktrees/",
|
||||
"crates/eval/repos/",
|
||||
"**/.git",
|
||||
"**/.svn",
|
||||
"**/.hg",
|
||||
"**/.jj",
|
||||
"**/CVS",
|
||||
"**/.DS_Store",
|
||||
"**/Thumbs.db",
|
||||
"**/.classpath",
|
||||
"**/.settings"
|
||||
]
|
||||
"file_scan_exclusions": ["crates/eval/worktrees/", "crates/eval/repos/"]
|
||||
}
|
||||
|
||||
1692
Cargo.lock
generated
1692
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
33
Cargo.toml
33
Cargo.toml
@@ -31,13 +31,13 @@ members = [
|
||||
"crates/command_palette",
|
||||
"crates/command_palette_hooks",
|
||||
"crates/component",
|
||||
"crates/component_preview",
|
||||
"crates/context_server",
|
||||
"crates/copilot",
|
||||
"crates/credentials_provider",
|
||||
"crates/dap",
|
||||
"crates/dap_adapters",
|
||||
"crates/db",
|
||||
"crates/debug_adapter_extension",
|
||||
"crates/debugger_tools",
|
||||
"crates/debugger_ui",
|
||||
"crates/deepseek",
|
||||
@@ -74,8 +74,6 @@ members = [
|
||||
"crates/inline_completion",
|
||||
"crates/inline_completion_button",
|
||||
"crates/install_cli",
|
||||
"crates/jj",
|
||||
"crates/jj_ui",
|
||||
"crates/journal",
|
||||
"crates/language",
|
||||
"crates/language_extension",
|
||||
@@ -240,13 +238,13 @@ collections = { path = "crates/collections" }
|
||||
command_palette = { path = "crates/command_palette" }
|
||||
command_palette_hooks = { path = "crates/command_palette_hooks" }
|
||||
component = { path = "crates/component" }
|
||||
component_preview = { path = "crates/component_preview" }
|
||||
context_server = { path = "crates/context_server" }
|
||||
copilot = { path = "crates/copilot" }
|
||||
credentials_provider = { path = "crates/credentials_provider" }
|
||||
dap = { path = "crates/dap" }
|
||||
dap_adapters = { path = "crates/dap_adapters" }
|
||||
db = { path = "crates/db" }
|
||||
debug_adapter_extension = { path = "crates/debug_adapter_extension" }
|
||||
debugger_tools = { path = "crates/debugger_tools" }
|
||||
debugger_ui = { path = "crates/debugger_ui" }
|
||||
deepseek = { path = "crates/deepseek" }
|
||||
@@ -281,8 +279,6 @@ indexed_docs = { path = "crates/indexed_docs" }
|
||||
inline_completion = { path = "crates/inline_completion" }
|
||||
inline_completion_button = { path = "crates/inline_completion_button" }
|
||||
install_cli = { path = "crates/install_cli" }
|
||||
jj = { path = "crates/jj" }
|
||||
jj_ui = { path = "crates/jj_ui" }
|
||||
journal = { path = "crates/journal" }
|
||||
language = { path = "crates/language" }
|
||||
language_extension = { path = "crates/language_extension" }
|
||||
@@ -414,9 +410,9 @@ aws-smithy-runtime-api = { version = "1.7.4", features = ["http-1x", "client"] }
|
||||
aws-smithy-types = { version = "1.3.0", features = ["http-body-1-x"] }
|
||||
base64 = "0.22"
|
||||
bitflags = "2.6.0"
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "416375211bb0b5826b3584dccdb6a43369e499ad" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "416375211bb0b5826b3584dccdb6a43369e499ad" }
|
||||
blade-util = { git = "https://github.com/kvark/blade", rev = "416375211bb0b5826b3584dccdb6a43369e499ad" }
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "b16f5c7bd873c7126f48c82c39e7ae64602ae74f" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "b16f5c7bd873c7126f48c82c39e7ae64602ae74f" }
|
||||
blade-util = { git = "https://github.com/kvark/blade", rev = "b16f5c7bd873c7126f48c82c39e7ae64602ae74f" }
|
||||
blake3 = "1.5.3"
|
||||
bytes = "1.0"
|
||||
cargo_metadata = "0.19"
|
||||
@@ -430,13 +426,11 @@ convert_case = "0.8.0"
|
||||
core-foundation = "0.10.0"
|
||||
core-foundation-sys = "0.8.6"
|
||||
core-video = { version = "0.4.3", features = ["metal"] }
|
||||
criterion = { version = "0.5", features = ["html_reports"] }
|
||||
ctor = "0.4.0"
|
||||
dap-types = { git = "https://github.com/zed-industries/dap-types", rev = "be69a016ba710191b9fdded28c8b042af4b617f7" }
|
||||
dashmap = "6.0"
|
||||
derive_more = "0.99.17"
|
||||
dirs = "4.0"
|
||||
documented = "0.9.1"
|
||||
dotenv = "0.15.0"
|
||||
ec4rs = "1.1"
|
||||
emojis = "0.6.1"
|
||||
@@ -463,7 +457,6 @@ indexmap = { version = "2.7.0", features = ["serde"] }
|
||||
indoc = "2"
|
||||
inventory = "0.3.19"
|
||||
itertools = "0.14.0"
|
||||
jj-lib = { git = "https://github.com/jj-vcs/jj", rev = "e18eb8e05efaa153fad5ef46576af145bba1807f" }
|
||||
jsonschema = "0.30.0"
|
||||
jsonwebtoken = "9.3"
|
||||
jupyter-protocol = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
|
||||
@@ -471,12 +464,13 @@ jupyter-websocket-client = { git = "https://github.com/ConradIrwin/runtimed" ,r
|
||||
libc = "0.2"
|
||||
libsqlite3-sys = { version = "0.30.1", features = ["bundled"] }
|
||||
linkify = "0.10.0"
|
||||
linkme = "0.3.31"
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
|
||||
lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "c9c189f1c5dd53c624a419ce35bc77ad6a908d18" }
|
||||
markup5ever_rcdom = "0.3.0"
|
||||
metal = "0.29"
|
||||
mlua = { version = "0.10", features = ["lua54", "vendored", "async", "send"] }
|
||||
naga = { version = "25.0", features = ["wgsl-in"] }
|
||||
naga = { version = "23.1.0", features = ["wgsl-in"] }
|
||||
nanoid = "0.4"
|
||||
nbformat = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
|
||||
nix = "0.29"
|
||||
@@ -554,9 +548,9 @@ syn = { version = "1.0.72", features = ["full", "extra-traits"] }
|
||||
sys-locale = "0.3.1"
|
||||
sysinfo = "0.31.0"
|
||||
take-until = "0.2.0"
|
||||
tempfile = "3.20.0"
|
||||
tempfile = "3.9.0"
|
||||
thiserror = "2.0.12"
|
||||
tiktoken-rs = "0.7.0"
|
||||
tiktoken-rs = "0.6.0"
|
||||
time = { version = "0.3", features = [
|
||||
"macros",
|
||||
"parsing",
|
||||
@@ -600,7 +594,6 @@ url = "2.2"
|
||||
urlencoding = "2.1.2"
|
||||
uuid = { version = "1.1.2", features = ["v4", "v5", "v7", "serde"] }
|
||||
walkdir = "2.3"
|
||||
wasi-preview1-component-adapter-provider = "29"
|
||||
wasm-encoder = "0.221"
|
||||
wasmparser = "0.221"
|
||||
wasmtime = { version = "29", default-features = false, features = [
|
||||
@@ -609,13 +602,12 @@ wasmtime = { version = "29", default-features = false, features = [
|
||||
"runtime",
|
||||
"cranelift",
|
||||
"component-model",
|
||||
"parallel-compilation",
|
||||
] }
|
||||
wasmtime-wasi = "29"
|
||||
which = "6.0.0"
|
||||
wit-component = "0.221"
|
||||
workspace-hack = "0.1.0"
|
||||
zed_llm_client = "0.8.1"
|
||||
zed_llm_client = "0.8.0"
|
||||
zstd = "0.11"
|
||||
|
||||
[workspace.dependencies.async-stripe]
|
||||
@@ -795,9 +787,6 @@ let_underscore_future = "allow"
|
||||
# running afoul of the borrow checker.
|
||||
too_many_arguments = "allow"
|
||||
|
||||
# We often have large enum variants yet we rarely actually bother with splitting them up.
|
||||
large_enum_variant = "allow"
|
||||
|
||||
[workspace.metadata.cargo-machete]
|
||||
ignored = [
|
||||
"bindgen",
|
||||
@@ -805,6 +794,6 @@ ignored = [
|
||||
"prost_build",
|
||||
"serde",
|
||||
"component",
|
||||
"documented",
|
||||
"linkme",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
|
||||
FROM rust:1.87-bookworm as builder
|
||||
FROM rust:1.86-bookworm as builder
|
||||
WORKDIR app
|
||||
COPY . .
|
||||
|
||||
|
||||
@@ -8,6 +8,10 @@ Welcome to Zed, a high-performance, multiplayer code editor from the creators of
|
||||
|
||||
### Installation
|
||||
|
||||
<a href="https://repology.org/project/zed-editor/versions">
|
||||
<img src="https://repology.org/badge/vertical-allrepos/zed-editor.svg?minversion=0.143.5" alt="Packaging status" align="right">
|
||||
</a>
|
||||
|
||||
On macOS and Linux you can [download Zed directly](https://zed.dev/download) or [install Zed via your local package manager](https://zed.dev/docs/linux#installing-via-a-package-manager).
|
||||
|
||||
Other platforms are not yet available:
|
||||
|
||||
|
Before Width: | Height: | Size: 474 B After Width: | Height: | Size: 474 B |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-user-round-check-icon lucide-user-round-check"><path d="M2 21a8 8 0 0 1 13.292-6"/><circle cx="10" cy="8" r="5"/><path d="m16 19 2 2 4-4"/></svg>
|
||||
|
Before Width: | Height: | Size: 348 B |
@@ -217,6 +217,7 @@
|
||||
"context": "ContextEditor > Editor",
|
||||
"bindings": {
|
||||
"ctrl-enter": "assistant::Assist",
|
||||
"ctrl-shift-enter": "assistant::Edit",
|
||||
"ctrl-s": "workspace::Save",
|
||||
"save": "workspace::Save",
|
||||
"ctrl->": "assistant::QuoteSelection",
|
||||
@@ -512,8 +513,6 @@
|
||||
"alt-ctrl-o": "projects::OpenRecent",
|
||||
"alt-shift-open": "projects::OpenRemote",
|
||||
"alt-ctrl-shift-o": "projects::OpenRemote",
|
||||
// Change to open path modal for existing remote connection by setting the parameter
|
||||
// "alt-ctrl-shift-o": "["projects::OpenRemote", { "from_existing_connection": true }]",
|
||||
"alt-ctrl-shift-b": "branches::OpenRecent",
|
||||
"alt-shift-enter": "toast::RunAction",
|
||||
"ctrl-~": "workspace::NewTerminal",
|
||||
@@ -768,7 +767,7 @@
|
||||
"alt-ctrl-r": "project_panel::RevealInFileManager",
|
||||
"ctrl-shift-enter": "project_panel::OpenWithSystem",
|
||||
"shift-find": "project_panel::NewSearchInDirectory",
|
||||
"ctrl-alt-shift-f": "project_panel::NewSearchInDirectory",
|
||||
"ctrl-shift-f": "project_panel::NewSearchInDirectory",
|
||||
"shift-down": "menu::SelectNext",
|
||||
"shift-up": "menu::SelectPrevious",
|
||||
"escape": "menu::Cancel"
|
||||
@@ -930,14 +929,12 @@
|
||||
"alt-b": ["terminal::SendText", "\u001bb"],
|
||||
"alt-f": ["terminal::SendText", "\u001bf"],
|
||||
"alt-.": ["terminal::SendText", "\u001b."],
|
||||
"ctrl-delete": ["terminal::SendText", "\u001bd"],
|
||||
// Overrides for conflicting keybindings
|
||||
"ctrl-b": ["terminal::SendKeystroke", "ctrl-b"],
|
||||
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"],
|
||||
"ctrl-e": ["terminal::SendKeystroke", "ctrl-e"],
|
||||
"ctrl-o": ["terminal::SendKeystroke", "ctrl-o"],
|
||||
"ctrl-w": ["terminal::SendKeystroke", "ctrl-w"],
|
||||
"ctrl-backspace": ["terminal::SendKeystroke", "ctrl-w"],
|
||||
"ctrl-shift-a": "editor::SelectAll",
|
||||
"find": "buffer_search::Deploy",
|
||||
"ctrl-shift-f": "buffer_search::Deploy",
|
||||
@@ -955,10 +952,7 @@
|
||||
"shift-down": "terminal::ScrollLineDown",
|
||||
"shift-home": "terminal::ScrollToTop",
|
||||
"shift-end": "terminal::ScrollToBottom",
|
||||
"ctrl-shift-space": "terminal::ToggleViMode",
|
||||
"ctrl-shift-r": "terminal::RerunTask",
|
||||
"ctrl-alt-r": "terminal::RerunTask",
|
||||
"alt-t": "terminal::RerunTask"
|
||||
"ctrl-shift-space": "terminal::ToggleViMode"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -974,19 +968,5 @@
|
||||
"enter": "editor::Newline",
|
||||
"ctrl-enter": "menu::Confirm"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Diagnostics",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-r": "diagnostics::ToggleDiagnosticsRefresh"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "DebugConsole > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"enter": "menu::Confirm"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -263,6 +263,7 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-enter": "assistant::Assist",
|
||||
"cmd-shift-enter": "assistant::Edit",
|
||||
"cmd-s": "workspace::Save",
|
||||
"cmd->": "assistant::QuoteSelection",
|
||||
"cmd-<": "assistant::InsertIntoEditor",
|
||||
@@ -588,7 +589,6 @@
|
||||
// "alt-cmd-o": ["projects::OpenRecent", {"create_new_window": true }],
|
||||
"alt-cmd-o": "projects::OpenRecent",
|
||||
"ctrl-cmd-o": "projects::OpenRemote",
|
||||
"ctrl-cmd-shift-o": ["projects::OpenRemote", { "from_existing_connection": true }],
|
||||
"alt-cmd-b": "branches::OpenRecent",
|
||||
"ctrl-~": "workspace::NewTerminal",
|
||||
"cmd-s": "workspace::Save",
|
||||
@@ -826,7 +826,7 @@
|
||||
"alt-cmd-r": "project_panel::RevealInFileManager",
|
||||
"ctrl-shift-enter": "project_panel::OpenWithSystem",
|
||||
"cmd-alt-backspace": ["project_panel::Delete", { "skip_prompt": false }],
|
||||
"cmd-alt-shift-f": "project_panel::NewSearchInDirectory",
|
||||
"cmd-shift-f": "project_panel::NewSearchInDirectory",
|
||||
"shift-down": "menu::SelectNext",
|
||||
"shift-up": "menu::SelectPrevious",
|
||||
"escape": "menu::Cancel"
|
||||
@@ -1013,7 +1013,6 @@
|
||||
"alt-b": ["terminal::SendText", "\u001bb"],
|
||||
"alt-f": ["terminal::SendText", "\u001bf"],
|
||||
"alt-.": ["terminal::SendText", "\u001b."],
|
||||
"ctrl-delete": ["terminal::SendText", "\u001bd"],
|
||||
// There are conflicting bindings for these keys in the global context.
|
||||
// these bindings override them, remove at your own risk:
|
||||
"up": ["terminal::SendKeystroke", "up"],
|
||||
@@ -1023,7 +1022,6 @@
|
||||
"escape": ["terminal::SendKeystroke", "escape"],
|
||||
"enter": ["terminal::SendKeystroke", "enter"],
|
||||
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"],
|
||||
"ctrl-backspace": ["terminal::SendKeystroke", "ctrl-w"],
|
||||
"shift-pageup": "terminal::ScrollPageUp",
|
||||
"cmd-up": "terminal::ScrollPageUp",
|
||||
"shift-pagedown": "terminal::ScrollPageDown",
|
||||
@@ -1040,8 +1038,7 @@
|
||||
"ctrl-alt-up": "pane::SplitUp",
|
||||
"ctrl-alt-down": "pane::SplitDown",
|
||||
"ctrl-alt-left": "pane::SplitLeft",
|
||||
"ctrl-alt-right": "pane::SplitRight",
|
||||
"cmd-alt-r": "terminal::RerunTask"
|
||||
"ctrl-alt-right": "pane::SplitRight"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -1079,19 +1076,5 @@
|
||||
"enter": "editor::Newline",
|
||||
"cmd-enter": "menu::Confirm"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Diagnostics",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-r": "diagnostics::ToggleDiagnosticsRefresh"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "DebugConsole > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"enter": "menu::Confirm"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -72,9 +72,7 @@
|
||||
"alt-left": "editor::SelectToPreviousWordStart",
|
||||
"alt-right": "editor::SelectToNextWordEnd",
|
||||
"pagedown": "editor::SelectPageDown",
|
||||
"ctrl-v": "editor::SelectPageDown",
|
||||
"pageup": "editor::SelectPageUp",
|
||||
"alt-v": "editor::SelectPageUp",
|
||||
"ctrl-f": "editor::SelectRight",
|
||||
"ctrl-b": "editor::SelectLeft",
|
||||
"ctrl-n": "editor::SelectDown",
|
||||
|
||||
@@ -72,9 +72,7 @@
|
||||
"alt-left": "editor::SelectToPreviousWordStart",
|
||||
"alt-right": "editor::SelectToNextWordEnd",
|
||||
"pagedown": "editor::SelectPageDown",
|
||||
"ctrl-v": "editor::SelectPageDown",
|
||||
"pageup": "editor::SelectPageUp",
|
||||
"alt-v": "editor::SelectPageUp",
|
||||
"ctrl-f": "editor::SelectRight",
|
||||
"ctrl-b": "editor::SelectLeft",
|
||||
"ctrl-n": "editor::SelectDown",
|
||||
|
||||
@@ -152,7 +152,6 @@
|
||||
"g end": ["vim::EndOfLine", { "display_lines": true }],
|
||||
"g 0": ["vim::StartOfLine", { "display_lines": true }],
|
||||
"g home": ["vim::StartOfLine", { "display_lines": true }],
|
||||
"g shift-m": ["vim::MiddleOfLine", { "display_lines": true }],
|
||||
"g ^": ["vim::FirstNonWhitespace", { "display_lines": true }],
|
||||
"g v": "vim::RestoreVisualSelection",
|
||||
"g ]": "editor::GoToDiagnostic",
|
||||
|
||||
@@ -49,9 +49,10 @@ And here's the section to rewrite based on that prompt again for reference:
|
||||
</rewrite_this>
|
||||
|
||||
{{#if diagnostic_errors}}
|
||||
{{#each diagnostic_errors}}
|
||||
|
||||
Below are the diagnostic errors visible to the user. If the user requests problems to be fixed, use this information, but do not try to fix these errors if the user hasn't asked you to.
|
||||
|
||||
{{#each diagnostic_errors}}
|
||||
<diagnostic_error>
|
||||
<line_number>{{line_number}}</line_number>
|
||||
<error_message>{{error_message}}</error_message>
|
||||
|
||||
206
assets/prompts/suggest_edits.hbs
Normal file
206
assets/prompts/suggest_edits.hbs
Normal file
@@ -0,0 +1,206 @@
|
||||
<task_description>
|
||||
|
||||
The user of a code editor wants to make a change to their codebase.
|
||||
You must describe the change using the following XML structure:
|
||||
|
||||
- <patch> - A group of related code changes.
|
||||
Child tags:
|
||||
- <title> (required) - A high-level description of the changes. This should be as short
|
||||
as possible, possibly using common abbreviations.
|
||||
- <edit> (1 or more) - An edit to make at a particular range within a file.
|
||||
Includes the following child tags:
|
||||
- <path> (required) - The path to the file that will be changed.
|
||||
- <description> (optional) - An arbitrarily-long comment that describes the purpose
|
||||
of this edit.
|
||||
- <old_text> (optional) - An excerpt from the file's current contents that uniquely
|
||||
identifies a range within the file where the edit should occur. Required for all operations
|
||||
except `create`.
|
||||
- <new_text> (required) - The new text to insert into the file.
|
||||
- <operation> (required) - The type of change that should occur at the given range
|
||||
of the file. Must be one of the following values:
|
||||
- `update`: Replaces the entire range with the new text.
|
||||
- `insert_before`: Inserts the new text before the range.
|
||||
- `insert_after`: Inserts new text after the range.
|
||||
- `create`: Creates or overwrites a file with the given path and the new text.
|
||||
- `delete`: Deletes the specified range from the file.
|
||||
|
||||
<guidelines>
|
||||
- Never provide multiple edits whose ranges intersect each other. Instead, merge them into one edit.
|
||||
- Prefer multiple edits to smaller, disjoint ranges, rather than one edit to a larger range.
|
||||
- There's no need to escape angle brackets within XML tags.
|
||||
- Always ensure imports are added if you're referencing symbols that are not in scope.
|
||||
</guidelines>
|
||||
|
||||
Here are some concrete examples.
|
||||
|
||||
<example>
|
||||
<message role="user">
|
||||
|
||||
```rs src/shapes.rs
|
||||
pub mod rectangle;
|
||||
pub mod circle;
|
||||
```
|
||||
|
||||
```rs src/shapes/rectangle.rs
|
||||
pub struct Rectangle {
|
||||
width: f64,
|
||||
height: f64,
|
||||
}
|
||||
|
||||
impl Rectangle {
|
||||
pub fn new(width: f64, height: f64) -> Self {
|
||||
Rectangle { width, height }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
```rs src/shapes/circle.rs
|
||||
pub struct Circle {
|
||||
radius: f64,
|
||||
}
|
||||
|
||||
impl Circle {
|
||||
pub fn new(radius: f64) -> Self {
|
||||
Circle { radius }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Update all shapes to store their origin as an (x, y) tuple and implement Display.
|
||||
</message>
|
||||
|
||||
<message role="assistant">
|
||||
We'll need to update both the rectangle and circle modules.
|
||||
|
||||
<patch>
|
||||
<title>Add origins and display impls to shapes</title>
|
||||
<edit>
|
||||
<path>src/shapes/rectangle.rs</path>
|
||||
<description>Add the origin field to Rectangle struct</description>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>
|
||||
pub struct Rectangle {
|
||||
</old_text>
|
||||
<new_text>
|
||||
origin: (f64, f64),
|
||||
</new_text>
|
||||
</edit>
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/rectangle.rs</path>
|
||||
<operation>update</operation>
|
||||
<old_text>
|
||||
fn new(width: f64, height: f64) -> Self {
|
||||
Rectangle { width, height }
|
||||
}
|
||||
</old_text>
|
||||
<new_text>
|
||||
fn new(origin: (f64, f64), width: f64, height: f64) -> Self {
|
||||
Rectangle { origin, width, height }
|
||||
}
|
||||
</new_text>
|
||||
</edit>
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/circle.rs</path>
|
||||
<description>Add the origin field to Circle struct</description>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>
|
||||
pub struct Circle {
|
||||
radius: f64,
|
||||
</old_text>
|
||||
<new_text>
|
||||
origin: (f64, f64),
|
||||
</new_text>
|
||||
</edit>
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/circle.rs</path>
|
||||
<operation>update</operation>
|
||||
<old_text>
|
||||
fn new(radius: f64) -> Self {
|
||||
Circle { radius }
|
||||
}
|
||||
</old_text>
|
||||
<new_text>
|
||||
fn new(origin: (f64, f64), radius: f64) -> Self {
|
||||
Circle { origin, radius }
|
||||
}
|
||||
</new_text>
|
||||
</edit>
|
||||
</step>
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/rectangle.rs</path>
|
||||
<operation>insert_before</operation>
|
||||
<old_text>
|
||||
struct Rectangle {
|
||||
</old_text>
|
||||
<new_text>
|
||||
use std::fmt;
|
||||
|
||||
</new_text>
|
||||
</edit>
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/rectangle.rs</path>
|
||||
<description>
|
||||
Add a manual Display implementation for Rectangle.
|
||||
Currently, this is the same as a derived Display implementation.
|
||||
</description>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>
|
||||
Rectangle { width, height }
|
||||
}
|
||||
}
|
||||
</old_text>
|
||||
<new_text>
|
||||
impl fmt::Display for Rectangle {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.format_struct(f, "Rectangle")
|
||||
.field("origin", &self.origin)
|
||||
.field("width", &self.width)
|
||||
.field("height", &self.height)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
</new_text>
|
||||
</edit>
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/circle.rs</path>
|
||||
<operation>insert_before</operation>
|
||||
<old_text>
|
||||
struct Circle {
|
||||
</old_text>
|
||||
<new_text>
|
||||
use std::fmt;
|
||||
</new_text>
|
||||
</edit>
|
||||
|
||||
<edit>
|
||||
<path>src/shapes/circle.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>
|
||||
Circle { radius }
|
||||
}
|
||||
}
|
||||
</old_text>
|
||||
<new_text>
|
||||
impl fmt::Display for Rectangle {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.format_struct(f, "Rectangle")
|
||||
.field("origin", &self.origin)
|
||||
.field("width", &self.width)
|
||||
.field("height", &self.height)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
</new_text>
|
||||
</edit>
|
||||
</patch>
|
||||
|
||||
</message>
|
||||
</example>
|
||||
|
||||
</task_description>
|
||||
@@ -113,8 +113,8 @@
|
||||
// Whether to show the informational hover box when moving the mouse
|
||||
// over symbols in the editor.
|
||||
"hover_popover_enabled": true,
|
||||
// Time to wait in milliseconds before showing the informational hover box.
|
||||
"hover_popover_delay": 300,
|
||||
// Time to wait before showing the informational hover box
|
||||
"hover_popover_delay": 350,
|
||||
// Whether to confirm before quitting Zed.
|
||||
"confirm_quit": false,
|
||||
// Whether to restore last closed project when fresh Zed instance is opened.
|
||||
@@ -218,23 +218,6 @@
|
||||
// 1. Do nothing: `none`
|
||||
// 2. Find references for the same symbol: `find_all_references` (default)
|
||||
"go_to_definition_fallback": "find_all_references",
|
||||
// Which level to use to filter out diagnostics displayed in the editor.
|
||||
//
|
||||
// Affects the editor rendering only, and does not interrupt
|
||||
// the functionality of diagnostics fetching and project diagnostics editor.
|
||||
// Which files containing diagnostic errors/warnings to mark in the tabs.
|
||||
// Diagnostics are only shown when file icons are also active.
|
||||
// This setting only works when can take the following three values:
|
||||
//
|
||||
// Which diagnostic indicators to show in the scrollbar, their level should be more or equal to the specified severity level.
|
||||
// Possible values:
|
||||
// - "off" — no diagnostics are allowed
|
||||
// - "error"
|
||||
// - "warning" (default)
|
||||
// - "info"
|
||||
// - "hint"
|
||||
// - null — allow all diagnostics
|
||||
"diagnostics_max_severity": "warning",
|
||||
// Whether to show wrap guides (vertical rulers) in the editor.
|
||||
// Setting this to true will show a guide at the 'preferred_line_length' value
|
||||
// if 'soft_wrap' is set to 'preferred_line_length', and will show any
|
||||
@@ -324,21 +307,6 @@
|
||||
// Whether to show agent review buttons in the editor toolbar.
|
||||
"agent_review": true
|
||||
},
|
||||
// Titlebar related settings
|
||||
"title_bar": {
|
||||
// Whether to show the branch icon beside branch switcher in the titlebar.
|
||||
"show_branch_icon": false,
|
||||
// Whether to show the branch name button in the titlebar.
|
||||
"show_branch_name": true,
|
||||
// Whether to show the project host and name in the titlebar.
|
||||
"show_project_items": true,
|
||||
// Whether to show onboarding banners in the titlebar.
|
||||
"show_onboarding_banner": true,
|
||||
// Whether to show user picture in the titlebar.
|
||||
"show_user_picture": true,
|
||||
// Whether to show the sign in button in the titlebar.
|
||||
"show_sign_in": true
|
||||
},
|
||||
// Scrollbar related settings
|
||||
"scrollbar": {
|
||||
// When to show the scrollbar in the editor.
|
||||
@@ -379,45 +347,6 @@
|
||||
"vertical": true
|
||||
}
|
||||
},
|
||||
// Minimap related settings
|
||||
"minimap": {
|
||||
// When to show the minimap in the editor.
|
||||
// This setting can take three values:
|
||||
// 1. Show the minimap if the editor's scrollbar is visible:
|
||||
// "auto"
|
||||
// 2. Always show the minimap:
|
||||
// "always"
|
||||
// 3. Never show the minimap:
|
||||
// "never" (default)
|
||||
"show": "never",
|
||||
// When to show the minimap thumb.
|
||||
// This setting can take two values:
|
||||
// 1. Show the minimap thumb if the mouse is over the minimap:
|
||||
// "hover"
|
||||
// 2. Always show the minimap thumb:
|
||||
// "always" (default)
|
||||
"thumb": "always",
|
||||
// How the minimap thumb border should look.
|
||||
// This setting can take five values:
|
||||
// 1. Display a border on all sides of the thumb:
|
||||
// "thumb_border": "full"
|
||||
// 2. Display a border on all sides except the left side of the thumb:
|
||||
// "thumb_border": "left_open" (default)
|
||||
// 3. Display a border on all sides except the right side of the thumb:
|
||||
// "thumb_border": "right_open"
|
||||
// 4. Display a border only on the left side of the thumb:
|
||||
// "thumb_border": "left_only"
|
||||
// 5. Display the thumb without any border:
|
||||
// "thumb_border": "none"
|
||||
"thumb_border": "left_open",
|
||||
// How to highlight the current line in the minimap.
|
||||
// This setting can take the following values:
|
||||
//
|
||||
// 1. `null` to inherit the editor `current_line_highlight` setting (default)
|
||||
// 2. "line" or "all" to highlight the current line in the minimap.
|
||||
// 3. "gutter" or "none" to not highlight the current line in the minimap.
|
||||
"current_line_highlight": null
|
||||
},
|
||||
// Enable middle-click paste on Linux.
|
||||
"middle_click_paste": true,
|
||||
// What to do when multibuffer is double clicked in some of its excerpts
|
||||
@@ -432,6 +361,8 @@
|
||||
"gutter": {
|
||||
// Whether to show line numbers in the gutter.
|
||||
"line_numbers": true,
|
||||
// Whether to show code action buttons in the gutter.
|
||||
"code_actions": true,
|
||||
// Whether to show runnables buttons in the gutter.
|
||||
"runnables": true,
|
||||
// Whether to show breakpoints in the gutter.
|
||||
@@ -476,8 +407,6 @@
|
||||
"search_wrap": true,
|
||||
// Search options to enable by default when opening new project and buffer searches.
|
||||
"search": {
|
||||
// Whether to show the project search button in the status bar.
|
||||
"button": true,
|
||||
"whole_word": false,
|
||||
"case_sensitive": false,
|
||||
"include_ignored": false,
|
||||
@@ -758,8 +687,6 @@
|
||||
"stream_edits": false,
|
||||
// When enabled, agent edits will be displayed in single-file editors for review
|
||||
"single_file_review": true,
|
||||
// When enabled, show voting thumbs for feedback on agent edits.
|
||||
"enable_feedback": true,
|
||||
"default_profile": "write",
|
||||
"profiles": {
|
||||
"write": {
|
||||
@@ -933,20 +860,7 @@
|
||||
// "modal_max_width": "full"
|
||||
//
|
||||
// Default: small
|
||||
"modal_max_width": "small",
|
||||
// Determines whether the file finder should skip focus for the active file in search results.
|
||||
// There are 2 possible values:
|
||||
//
|
||||
// 1. true: When searching for files, if the currently active file appears as the first result,
|
||||
// auto-focus will skip it and focus the second result instead.
|
||||
// "skip_focus_for_active_in_search": true
|
||||
//
|
||||
// 2. false: When searching for files, the first result will always receive focus,
|
||||
// even if it's the currently active file.
|
||||
// "skip_focus_for_active_in_search": false
|
||||
//
|
||||
// Default: true
|
||||
"skip_focus_for_active_in_search": true
|
||||
"modal_max_width": "small"
|
||||
},
|
||||
// Whether or not to remove any trailing whitespace from lines of a buffer
|
||||
// before saving it.
|
||||
@@ -998,8 +912,6 @@
|
||||
"hard_tabs": false,
|
||||
// How many columns a tab should occupy.
|
||||
"tab_size": 4,
|
||||
// What debuggers are preferred by default for all languages.
|
||||
"debuggers": [],
|
||||
// Control what info is collected by Zed.
|
||||
"telemetry": {
|
||||
// Send debug info like crash reports.
|
||||
@@ -1012,8 +924,6 @@
|
||||
"auto_update": true,
|
||||
// Diagnostics configuration.
|
||||
"diagnostics": {
|
||||
// Whether to show the project diagnostics button in the status bar.
|
||||
"button": true,
|
||||
// Whether to show warnings or not by default.
|
||||
"include_warnings": true,
|
||||
// Settings for inline diagnostics
|
||||
@@ -1031,13 +941,8 @@
|
||||
// longer than this value will still push diagnostics further to the right.
|
||||
"min_column": 0,
|
||||
// The minimum severity of the diagnostics to show inline.
|
||||
// Inherits editor's diagnostics' max severity settings when `null`.
|
||||
// Shows all diagnostics when not specified.
|
||||
"max_severity": null
|
||||
},
|
||||
"cargo": {
|
||||
// When enabled, Zed disables rust-analyzer's check on save and starts to query
|
||||
// Cargo diagnostics separately.
|
||||
"fetch_cargo_diagnostics": false
|
||||
}
|
||||
},
|
||||
// Files or globs of files that will be excluded by Zed entirely. They will be skipped during file
|
||||
@@ -1422,9 +1327,6 @@
|
||||
"Elixir": {
|
||||
"language_servers": ["elixir-ls", "!next-ls", "!lexical", "..."]
|
||||
},
|
||||
"Elm": {
|
||||
"tab_size": 4
|
||||
},
|
||||
"Erlang": {
|
||||
"language_servers": ["erlang-ls", "!elp", "..."]
|
||||
},
|
||||
@@ -1699,6 +1601,8 @@
|
||||
// "W": "workspace::Save"
|
||||
// }
|
||||
"command_aliases": {},
|
||||
// Whether to show user picture in titlebar.
|
||||
"show_user_picture": true,
|
||||
// ssh_connections is an array of ssh connections.
|
||||
// You can configure these from `project: Open Remote` in the command palette.
|
||||
// Zed's ssh support will pull configuration from your ~/.ssh too.
|
||||
@@ -1715,8 +1619,6 @@
|
||||
// }
|
||||
// ]
|
||||
"ssh_connections": [],
|
||||
// Whether to read ~/.ssh/config for ssh connection sources.
|
||||
"read_ssh_config": true,
|
||||
// Configures context servers for use by the agent.
|
||||
"context_servers": {},
|
||||
"debugger": {
|
||||
|
||||
@@ -43,7 +43,6 @@ pub struct ActivityIndicator {
|
||||
context_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ServerStatus {
|
||||
name: SharedString,
|
||||
status: BinaryStatus,
|
||||
@@ -71,7 +70,6 @@ impl ActivityIndicator {
|
||||
) -> Entity<ActivityIndicator> {
|
||||
let project = workspace.project().clone();
|
||||
let auto_updater = AutoUpdater::get(cx);
|
||||
let workspace_handle = cx.entity();
|
||||
let this = cx.new(|cx| {
|
||||
let mut status_events = languages.language_server_binary_statuses();
|
||||
cx.spawn(async move |this, cx| {
|
||||
@@ -86,23 +84,17 @@ impl ActivityIndicator {
|
||||
})
|
||||
.detach();
|
||||
|
||||
cx.subscribe_in(
|
||||
&workspace_handle,
|
||||
window,
|
||||
|activity_indicator, _, event, window, cx| match event {
|
||||
workspace::Event::ClearActivityIndicator { .. } => {
|
||||
if activity_indicator.statuses.pop().is_some() {
|
||||
activity_indicator.dismiss_error_message(
|
||||
&DismissErrorMessage,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
)
|
||||
let mut status_events = languages.dap_server_binary_statuses();
|
||||
cx.spawn(async move |this, cx| {
|
||||
while let Some((name, status)) = status_events.next().await {
|
||||
this.update(cx, |this, cx| {
|
||||
this.statuses.retain(|s| s.name != name);
|
||||
this.statuses.push(ServerStatus { name, status });
|
||||
cx.notify();
|
||||
})?;
|
||||
}
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach();
|
||||
|
||||
cx.subscribe(
|
||||
@@ -136,7 +128,7 @@ impl ActivityIndicator {
|
||||
}
|
||||
|
||||
Self {
|
||||
statuses: Vec::new(),
|
||||
statuses: Default::default(),
|
||||
project: project.clone(),
|
||||
auto_updater,
|
||||
context_menu_handle: Default::default(),
|
||||
@@ -206,8 +198,11 @@ impl ActivityIndicator {
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if let Some(updater) = &self.auto_updater {
|
||||
updater.update(cx, |updater, cx| updater.dismiss_error(cx));
|
||||
updater.update(cx, |updater, cx| {
|
||||
updater.dismiss_error(cx);
|
||||
});
|
||||
}
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn pending_language_server_work<'a>(
|
||||
|
||||
@@ -47,12 +47,12 @@ heed.workspace = true
|
||||
html_to_markdown.workspace = true
|
||||
http_client.workspace = true
|
||||
indexed_docs.workspace = true
|
||||
inventory.workspace = true
|
||||
itertools.workspace = true
|
||||
jsonschema.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
language_model_selector.workspace = true
|
||||
linkme.workspace = true
|
||||
log.workspace = true
|
||||
lsp.workspace = true
|
||||
markdown.workspace = true
|
||||
|
||||
@@ -33,9 +33,7 @@ use language_model::{
|
||||
LanguageModelRequestMessage, LanguageModelToolUseId, MessageContent, Role, StopReason,
|
||||
};
|
||||
use markdown::parser::{CodeBlockKind, CodeBlockMetadata};
|
||||
use markdown::{
|
||||
HeadingLevelStyles, Markdown, MarkdownElement, MarkdownStyle, ParsedMarkdown, PathWithRange,
|
||||
};
|
||||
use markdown::{HeadingLevelStyles, Markdown, MarkdownElement, MarkdownStyle, ParsedMarkdown};
|
||||
use project::{ProjectEntryId, ProjectItem as _};
|
||||
use rope::Point;
|
||||
use settings::{Settings as _, SettingsStore, update_settings_file};
|
||||
@@ -185,14 +183,12 @@ pub(crate) fn default_markdown_style(window: &Window, cx: &App) -> MarkdownStyle
|
||||
let ui_font_size = TextSize::Default.rems(cx);
|
||||
let buffer_font_size = TextSize::Small.rems(cx);
|
||||
let mut text_style = window.text_style();
|
||||
let line_height = buffer_font_size * 1.75;
|
||||
|
||||
text_style.refine(&TextStyleRefinement {
|
||||
font_family: Some(theme_settings.ui_font.family.clone()),
|
||||
font_fallbacks: theme_settings.ui_font.fallbacks.clone(),
|
||||
font_features: Some(theme_settings.ui_font.features.clone()),
|
||||
font_size: Some(ui_font_size.into()),
|
||||
line_height: Some(line_height.into()),
|
||||
color: Some(cx.theme().colors().text),
|
||||
..Default::default()
|
||||
});
|
||||
@@ -333,6 +329,7 @@ fn tool_use_markdown_style(window: &Window, cx: &mut App) -> MarkdownStyle {
|
||||
}
|
||||
|
||||
const CODEBLOCK_CONTAINER_GROUP: &str = "codeblock_container";
|
||||
const MAX_UNCOLLAPSED_LINES_IN_CODE_BLOCK: usize = 10;
|
||||
|
||||
fn render_markdown_code_block(
|
||||
message_id: MessageId,
|
||||
@@ -345,20 +342,17 @@ fn render_markdown_code_block(
|
||||
_window: &Window,
|
||||
cx: &App,
|
||||
) -> Div {
|
||||
let label_size = rems(0.8125);
|
||||
|
||||
let label = match kind {
|
||||
CodeBlockKind::Indented => None,
|
||||
CodeBlockKind::Fenced => Some(
|
||||
h_flex()
|
||||
.px_1()
|
||||
.gap_1()
|
||||
.child(
|
||||
Icon::new(IconName::Code)
|
||||
.color(Color::Muted)
|
||||
.size(IconSize::XSmall),
|
||||
)
|
||||
.child(div().text_size(label_size).child("Plain Text"))
|
||||
.child(Label::new("untitled").size(LabelSize::Small))
|
||||
.into_any_element(),
|
||||
),
|
||||
CodeBlockKind::FencedLang(raw_language_name) => Some(render_code_language(
|
||||
@@ -387,36 +381,28 @@ fn render_markdown_code_block(
|
||||
)
|
||||
} else {
|
||||
let content = if let Some(parent) = path_range.path.parent() {
|
||||
let file_name = file_name.to_string_lossy().to_string();
|
||||
let path = parent.to_string_lossy().to_string();
|
||||
let path_and_file = format!("{}/{}", path, file_name);
|
||||
|
||||
h_flex()
|
||||
.id(("code-block-header-label", ix))
|
||||
.ml_1()
|
||||
.gap_1()
|
||||
.child(div().text_size(label_size).child(file_name))
|
||||
.child(Label::new(path).color(Color::Muted).size(LabelSize::Small))
|
||||
.tooltip(move |window, cx| {
|
||||
Tooltip::with_meta(
|
||||
"Jump to File",
|
||||
None,
|
||||
path_and_file.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.child(
|
||||
Label::new(file_name.to_string_lossy().to_string())
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
.child(
|
||||
Label::new(parent.to_string_lossy().to_string())
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
.into_any_element()
|
||||
} else {
|
||||
div()
|
||||
Label::new(path_range.path.to_string_lossy().to_string())
|
||||
.size(LabelSize::Small)
|
||||
.ml_1()
|
||||
.text_size(label_size)
|
||||
.child(path_range.path.to_string_lossy().to_string())
|
||||
.into_any_element()
|
||||
};
|
||||
|
||||
h_flex()
|
||||
.id(("code-block-header-button", ix))
|
||||
.id(("code-block-header-label", ix))
|
||||
.w_full()
|
||||
.max_w_full()
|
||||
.px_1()
|
||||
@@ -424,6 +410,7 @@ fn render_markdown_code_block(
|
||||
.cursor_pointer()
|
||||
.rounded_sm()
|
||||
.hover(|item| item.bg(cx.theme().colors().element_hover.opacity(0.5)))
|
||||
.tooltip(Tooltip::text("Jump to File"))
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_0p5()
|
||||
@@ -443,8 +430,49 @@ fn render_markdown_code_block(
|
||||
let path_range = path_range.clone();
|
||||
move |_, window, cx| {
|
||||
workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
open_path(&path_range, window, workspace, cx)
|
||||
.update(cx, {
|
||||
|workspace, cx| {
|
||||
let Some(project_path) = workspace
|
||||
.project()
|
||||
.read(cx)
|
||||
.find_project_path(&path_range.path, cx)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
let Some(target) = path_range.range.as_ref().map(|range| {
|
||||
Point::new(
|
||||
// Line number is 1-based
|
||||
range.start.line.saturating_sub(1),
|
||||
range.start.col.unwrap_or(0),
|
||||
)
|
||||
}) else {
|
||||
return;
|
||||
};
|
||||
let open_task = workspace.open_path(
|
||||
project_path,
|
||||
None,
|
||||
true,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
window
|
||||
.spawn(cx, async move |cx| {
|
||||
let item = open_task.await?;
|
||||
if let Some(active_editor) =
|
||||
item.downcast::<Editor>()
|
||||
{
|
||||
active_editor
|
||||
.update_in(cx, |editor, window, cx| {
|
||||
editor.go_to_singleton_buffer_point(
|
||||
target, window, cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
@@ -459,157 +487,124 @@ fn render_markdown_code_block(
|
||||
.copied_code_block_ids
|
||||
.contains(&(message_id, ix));
|
||||
|
||||
let is_expanded = active_thread.read(cx).is_codeblock_expanded(message_id, ix);
|
||||
let can_expand = metadata.line_count > MAX_UNCOLLAPSED_LINES_IN_CODE_BLOCK;
|
||||
|
||||
let is_expanded = if can_expand {
|
||||
active_thread
|
||||
.read(cx)
|
||||
.expanded_code_blocks
|
||||
.get(&(message_id, ix))
|
||||
.copied()
|
||||
.unwrap_or(false)
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
let codeblock_header_bg = cx
|
||||
.theme()
|
||||
.colors()
|
||||
.element_background
|
||||
.blend(cx.theme().colors().editor_foreground.opacity(0.025));
|
||||
|
||||
let control_buttons = h_flex()
|
||||
.visible_on_hover(CODEBLOCK_CONTAINER_GROUP)
|
||||
.absolute()
|
||||
.top_0()
|
||||
.right_0()
|
||||
.h_full()
|
||||
.bg(codeblock_header_bg)
|
||||
.rounded_tr_md()
|
||||
.px_1()
|
||||
.gap_1()
|
||||
.child(
|
||||
IconButton::new(
|
||||
("copy-markdown-code", ix),
|
||||
if codeblock_was_copied {
|
||||
IconName::Check
|
||||
} else {
|
||||
IconName::Copy
|
||||
},
|
||||
)
|
||||
.icon_color(Color::Muted)
|
||||
.shape(ui::IconButtonShape::Square)
|
||||
.tooltip(Tooltip::text("Copy Code"))
|
||||
.on_click({
|
||||
let active_thread = active_thread.clone();
|
||||
let parsed_markdown = parsed_markdown.clone();
|
||||
let code_block_range = metadata.content_range.clone();
|
||||
move |_event, _window, cx| {
|
||||
active_thread.update(cx, |this, cx| {
|
||||
this.copied_code_block_ids.insert((message_id, ix));
|
||||
|
||||
let code = parsed_markdown.source()[code_block_range.clone()].to_string();
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(code));
|
||||
|
||||
cx.spawn(async move |this, cx| {
|
||||
cx.background_executor().timer(Duration::from_secs(2)).await;
|
||||
|
||||
cx.update(|cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.copied_code_block_ids.remove(&(message_id, ix));
|
||||
cx.notify();
|
||||
})
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
.detach();
|
||||
});
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
IconButton::new(
|
||||
("expand-collapse-code", ix),
|
||||
if is_expanded {
|
||||
IconName::ChevronUp
|
||||
} else {
|
||||
IconName::ChevronDown
|
||||
},
|
||||
)
|
||||
.icon_color(Color::Muted)
|
||||
.shape(ui::IconButtonShape::Square)
|
||||
.tooltip(Tooltip::text(if is_expanded {
|
||||
"Collapse Code"
|
||||
} else {
|
||||
"Expand Code"
|
||||
}))
|
||||
.on_click({
|
||||
let active_thread = active_thread.clone();
|
||||
move |_event, _window, cx| {
|
||||
active_thread.update(cx, |this, cx| {
|
||||
this.toggle_codeblock_expanded(message_id, ix);
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
}),
|
||||
);
|
||||
.blend(cx.theme().colors().editor_foreground.opacity(0.01));
|
||||
|
||||
let codeblock_header = h_flex()
|
||||
.relative()
|
||||
.p_1()
|
||||
.py_1()
|
||||
.pl_1p5()
|
||||
.pr_1()
|
||||
.gap_1()
|
||||
.justify_between()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border.opacity(0.6))
|
||||
.bg(codeblock_header_bg)
|
||||
.map(|this| {
|
||||
if !is_expanded {
|
||||
this.rounded_md()
|
||||
} else {
|
||||
this.rounded_t_md()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border.opacity(0.6))
|
||||
}
|
||||
})
|
||||
.rounded_t_md()
|
||||
.children(label)
|
||||
.child(control_buttons);
|
||||
.child(
|
||||
h_flex()
|
||||
.visible_on_hover(CODEBLOCK_CONTAINER_GROUP)
|
||||
.gap_1()
|
||||
.child(
|
||||
IconButton::new(
|
||||
("copy-markdown-code", ix),
|
||||
if codeblock_was_copied {
|
||||
IconName::Check
|
||||
} else {
|
||||
IconName::Copy
|
||||
},
|
||||
)
|
||||
.icon_color(Color::Muted)
|
||||
.shape(ui::IconButtonShape::Square)
|
||||
.tooltip(Tooltip::text("Copy Code"))
|
||||
.on_click({
|
||||
let active_thread = active_thread.clone();
|
||||
let parsed_markdown = parsed_markdown.clone();
|
||||
let code_block_range = metadata.content_range.clone();
|
||||
move |_event, _window, cx| {
|
||||
active_thread.update(cx, |this, cx| {
|
||||
this.copied_code_block_ids.insert((message_id, ix));
|
||||
|
||||
let code =
|
||||
parsed_markdown.source()[code_block_range.clone()].to_string();
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(code));
|
||||
|
||||
cx.spawn(async move |this, cx| {
|
||||
cx.background_executor().timer(Duration::from_secs(2)).await;
|
||||
|
||||
cx.update(|cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.copied_code_block_ids.remove(&(message_id, ix));
|
||||
cx.notify();
|
||||
})
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
.detach();
|
||||
});
|
||||
}
|
||||
}),
|
||||
)
|
||||
.when(can_expand, |header| {
|
||||
header.child(
|
||||
IconButton::new(
|
||||
("expand-collapse-code", ix),
|
||||
if is_expanded {
|
||||
IconName::ChevronUp
|
||||
} else {
|
||||
IconName::ChevronDown
|
||||
},
|
||||
)
|
||||
.icon_color(Color::Muted)
|
||||
.shape(ui::IconButtonShape::Square)
|
||||
.tooltip(Tooltip::text(if is_expanded {
|
||||
"Collapse Code"
|
||||
} else {
|
||||
"Expand Code"
|
||||
}))
|
||||
.on_click({
|
||||
let active_thread = active_thread.clone();
|
||||
move |_event, _window, cx| {
|
||||
active_thread.update(cx, |this, cx| {
|
||||
let is_expanded = this
|
||||
.expanded_code_blocks
|
||||
.entry((message_id, ix))
|
||||
.or_insert(true);
|
||||
*is_expanded = !*is_expanded;
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
}),
|
||||
)
|
||||
}),
|
||||
);
|
||||
|
||||
v_flex()
|
||||
.group(CODEBLOCK_CONTAINER_GROUP)
|
||||
.my_2()
|
||||
.overflow_hidden()
|
||||
.rounded_md()
|
||||
.rounded_lg()
|
||||
.border_1()
|
||||
.border_color(cx.theme().colors().border.opacity(0.6))
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(codeblock_header)
|
||||
.when(!is_expanded, |this| this.h(rems_from_px(31.)))
|
||||
}
|
||||
|
||||
fn open_path(
|
||||
path_range: &PathWithRange,
|
||||
window: &mut Window,
|
||||
workspace: &mut Workspace,
|
||||
cx: &mut Context<'_, Workspace>,
|
||||
) {
|
||||
let Some(project_path) = workspace
|
||||
.project()
|
||||
.read(cx)
|
||||
.find_project_path(&path_range.path, cx)
|
||||
else {
|
||||
return; // TODO instead of just bailing out, open that path in a buffer.
|
||||
};
|
||||
|
||||
let Some(target) = path_range.range.as_ref().map(|range| {
|
||||
Point::new(
|
||||
// Line number is 1-based
|
||||
range.start.line.saturating_sub(1),
|
||||
range.start.col.unwrap_or(0),
|
||||
)
|
||||
}) else {
|
||||
return;
|
||||
};
|
||||
let open_task = workspace.open_path(project_path, None, true, window, cx);
|
||||
window
|
||||
.spawn(cx, async move |cx| {
|
||||
let item = open_task.await?;
|
||||
if let Some(active_editor) = item.downcast::<Editor>() {
|
||||
active_editor
|
||||
.update_in(cx, |editor, window, cx| {
|
||||
editor.go_to_singleton_buffer_point(target, window, cx);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
.when(can_expand && !is_expanded, |this| this.max_h_80())
|
||||
}
|
||||
|
||||
fn render_code_language(
|
||||
@@ -631,13 +626,10 @@ fn render_code_language(
|
||||
.map(|language| language.name().into())
|
||||
.unwrap_or(name_fallback);
|
||||
|
||||
let label_size = rems(0.8125);
|
||||
|
||||
h_flex()
|
||||
.px_1()
|
||||
.gap_1p5()
|
||||
.children(icon_path.map(|icon| icon.color(Color::Muted).size(IconSize::XSmall)))
|
||||
.child(div().text_size(label_size).child(language_label))
|
||||
.gap_1()
|
||||
.children(icon_path.map(|icon| icon.color(Color::Muted).size(IconSize::Small)))
|
||||
.child(Label::new(language_label).size(LabelSize::Small))
|
||||
.into_any_element()
|
||||
}
|
||||
|
||||
@@ -1018,7 +1010,6 @@ impl ActiveThread {
|
||||
self.push_message(message_id, &message_segments, window, cx);
|
||||
}
|
||||
|
||||
self.scroll_to_bottom(cx);
|
||||
self.save_thread(cx);
|
||||
cx.notify();
|
||||
}
|
||||
@@ -1032,7 +1023,6 @@ impl ActiveThread {
|
||||
self.edited_message(message_id, &message_segments, window, cx);
|
||||
}
|
||||
|
||||
self.scroll_to_bottom(cx);
|
||||
self.save_thread(cx);
|
||||
cx.notify();
|
||||
}
|
||||
@@ -1546,15 +1536,11 @@ impl ActiveThread {
|
||||
let project = self.thread.read(cx).project().clone();
|
||||
let prompt_store = self.thread_store.read(cx).prompt_store().clone();
|
||||
|
||||
let git_store = project.read(cx).git_store().clone();
|
||||
let checkpoint = git_store.update(cx, |git_store, cx| git_store.checkpoint(cx));
|
||||
|
||||
let load_context_task =
|
||||
crate::context::load_context(new_context, &project, &prompt_store, cx);
|
||||
self._load_edited_message_context_task =
|
||||
Some(cx.spawn_in(window, async move |this, cx| {
|
||||
let (context, checkpoint) =
|
||||
futures::future::join(load_context_task, checkpoint).await;
|
||||
let context = load_context_task.await;
|
||||
let _ = this
|
||||
.update_in(cx, |this, window, cx| {
|
||||
this.thread.update(cx, |thread, cx| {
|
||||
@@ -1563,7 +1549,6 @@ impl ActiveThread {
|
||||
Role::User,
|
||||
vec![MessageSegment::Text(edited_text)],
|
||||
Some(context.loaded_context),
|
||||
checkpoint.ok(),
|
||||
cx,
|
||||
);
|
||||
for message_id in this.messages_after(message_id) {
|
||||
@@ -1733,11 +1718,10 @@ impl ActiveThread {
|
||||
.on_action(cx.listener(Self::confirm_editing_message))
|
||||
.capture_action(cx.listener(Self::paste))
|
||||
.min_h_6()
|
||||
.w_full()
|
||||
.flex_grow()
|
||||
.w_full()
|
||||
.gap_2()
|
||||
.child(state.context_strip.clone())
|
||||
.child(div().pt(px(-3.)).px_neg_0p5().child(EditorElement::new(
|
||||
.child(EditorElement::new(
|
||||
&state.editor,
|
||||
EditorStyle {
|
||||
background: colors.editor_background,
|
||||
@@ -1746,7 +1730,8 @@ impl ActiveThread {
|
||||
syntax: cx.theme().syntax().clone(),
|
||||
..Default::default()
|
||||
},
|
||||
)))
|
||||
))
|
||||
.child(state.context_strip.clone())
|
||||
}
|
||||
|
||||
fn render_message(&self, ix: usize, window: &mut Window, cx: &mut Context<Self>) -> AnyElement {
|
||||
@@ -1874,8 +1859,7 @@ impl ActiveThread {
|
||||
.child(open_as_markdown),
|
||||
)
|
||||
.into_any_element(),
|
||||
None if AssistantSettings::get_global(cx).enable_feedback =>
|
||||
feedback_container
|
||||
None => feedback_container
|
||||
.child(
|
||||
div().visible_on_hover("feedback_container").child(
|
||||
Label::new(
|
||||
@@ -1918,9 +1902,6 @@ impl ActiveThread {
|
||||
.child(open_as_markdown),
|
||||
)
|
||||
.into_any_element(),
|
||||
None => feedback_container
|
||||
.child(h_flex().child(open_as_markdown))
|
||||
.into_any_element(),
|
||||
};
|
||||
|
||||
let message_is_empty = message.should_display_content();
|
||||
@@ -1934,6 +1915,16 @@ impl ActiveThread {
|
||||
v_flex()
|
||||
.w_full()
|
||||
.gap_1()
|
||||
.when(!message_is_empty, |parent| {
|
||||
parent.child(div().min_h_6().child(self.render_message_content(
|
||||
message_id,
|
||||
rendered_message,
|
||||
has_tool_uses,
|
||||
workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
)))
|
||||
})
|
||||
.when(!added_context.is_empty(), |parent| {
|
||||
parent.child(h_flex().flex_wrap().gap_1().children(
|
||||
added_context.into_iter().map(|added_context| {
|
||||
@@ -1952,16 +1943,6 @@ impl ActiveThread {
|
||||
}),
|
||||
))
|
||||
})
|
||||
.when(!message_is_empty, |parent| {
|
||||
parent.child(div().pt_0p5().min_h_6().child(self.render_message_content(
|
||||
message_id,
|
||||
rendered_message,
|
||||
has_tool_uses,
|
||||
workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
)))
|
||||
})
|
||||
.into_any_element()
|
||||
}
|
||||
});
|
||||
@@ -1987,7 +1968,6 @@ impl ActiveThread {
|
||||
h_flex()
|
||||
.p_2p5()
|
||||
.gap_1()
|
||||
.items_end()
|
||||
.children(message_content)
|
||||
.when_some(editing_message_state, |this, state| {
|
||||
let focus_handle = state.editor.focus_handle(cx).clone();
|
||||
@@ -2001,7 +1981,6 @@ impl ActiveThread {
|
||||
)
|
||||
.shape(ui::IconButtonShape::Square)
|
||||
.icon_color(Color::Error)
|
||||
.icon_size(IconSize::Small)
|
||||
.tooltip({
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |window, cx| {
|
||||
@@ -2019,12 +1998,11 @@ impl ActiveThread {
|
||||
.child(
|
||||
IconButton::new(
|
||||
"confirm-edit-message",
|
||||
IconName::Return,
|
||||
IconName::Check,
|
||||
)
|
||||
.disabled(state.editor.read(cx).is_empty(cx))
|
||||
.shape(ui::IconButtonShape::Square)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Success)
|
||||
.tooltip({
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |window, cx| {
|
||||
@@ -2044,6 +2022,9 @@ impl ActiveThread {
|
||||
)
|
||||
}),
|
||||
)
|
||||
.when(editing_message_state.is_none(), |this| {
|
||||
this.tooltip(Tooltip::text("Click To Edit"))
|
||||
})
|
||||
.on_click(cx.listener({
|
||||
let message_segments = message.segments.clone();
|
||||
move |this, _, window, cx| {
|
||||
@@ -2084,16 +2065,6 @@ impl ActiveThread {
|
||||
|
||||
let panel_background = cx.theme().colors().panel_background;
|
||||
|
||||
let backdrop = div()
|
||||
.id("backdrop")
|
||||
.stop_mouse_events_except_scroll()
|
||||
.absolute()
|
||||
.inset_0()
|
||||
.size_full()
|
||||
.bg(panel_background)
|
||||
.opacity(0.8)
|
||||
.on_click(cx.listener(Self::handle_cancel_click));
|
||||
|
||||
v_flex()
|
||||
.w_full()
|
||||
.map(|parent| {
|
||||
@@ -2263,7 +2234,15 @@ impl ActiveThread {
|
||||
})
|
||||
.when(after_editing_message, |parent| {
|
||||
// Backdrop to dim out the whole thread below the editing user message
|
||||
parent.relative().child(backdrop)
|
||||
parent.relative().child(
|
||||
div()
|
||||
.stop_mouse_events_except_scroll()
|
||||
.absolute()
|
||||
.inset_0()
|
||||
.size_full()
|
||||
.bg(panel_background)
|
||||
.opacity(0.8),
|
||||
)
|
||||
})
|
||||
.into_any()
|
||||
}
|
||||
@@ -2373,17 +2352,42 @@ impl ActiveThread {
|
||||
}),
|
||||
transform: Some(Arc::new({
|
||||
let active_thread = cx.entity();
|
||||
let editor_bg = cx.theme().colors().editor_background;
|
||||
|
||||
move |element, range, _, _, cx| {
|
||||
let is_expanded = active_thread
|
||||
.read(cx)
|
||||
.is_codeblock_expanded(message_id, range.start);
|
||||
|
||||
if is_expanded {
|
||||
return element;
|
||||
move |el, range, metadata, _, cx| {
|
||||
let can_expand = metadata.line_count
|
||||
> MAX_UNCOLLAPSED_LINES_IN_CODE_BLOCK;
|
||||
if !can_expand {
|
||||
return el;
|
||||
}
|
||||
|
||||
element
|
||||
let is_expanded = active_thread
|
||||
.read(cx)
|
||||
.expanded_code_blocks
|
||||
.get(&(message_id, range.start))
|
||||
.copied()
|
||||
.unwrap_or(false);
|
||||
if is_expanded {
|
||||
return el;
|
||||
}
|
||||
|
||||
el.child(
|
||||
div()
|
||||
.absolute()
|
||||
.bottom_0()
|
||||
.left_0()
|
||||
.w_full()
|
||||
.h_1_4()
|
||||
.rounded_b_lg()
|
||||
.bg(linear_gradient(
|
||||
0.,
|
||||
linear_color_stop(editor_bg, 0.),
|
||||
linear_color_stop(
|
||||
editor_bg.opacity(0.),
|
||||
1.,
|
||||
),
|
||||
)),
|
||||
)
|
||||
}
|
||||
})),
|
||||
},
|
||||
@@ -3380,26 +3384,6 @@ impl ActiveThread {
|
||||
.log_err();
|
||||
}))
|
||||
}
|
||||
|
||||
pub fn is_codeblock_expanded(&self, message_id: MessageId, ix: usize) -> bool {
|
||||
self.expanded_code_blocks
|
||||
.get(&(message_id, ix))
|
||||
.copied()
|
||||
.unwrap_or(true)
|
||||
}
|
||||
|
||||
pub fn toggle_codeblock_expanded(&mut self, message_id: MessageId, ix: usize) {
|
||||
let is_expanded = self
|
||||
.expanded_code_blocks
|
||||
.entry((message_id, ix))
|
||||
.or_insert(true);
|
||||
*is_expanded = !*is_expanded;
|
||||
}
|
||||
|
||||
pub fn scroll_to_bottom(&mut self, cx: &mut Context<Self>) {
|
||||
self.list_state.reset(self.messages.len());
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
|
||||
pub enum ActiveThreadEvent {
|
||||
@@ -3413,7 +3397,6 @@ impl Render for ActiveThread {
|
||||
v_flex()
|
||||
.size_full()
|
||||
.relative()
|
||||
.bg(cx.theme().colors().panel_background)
|
||||
.on_mouse_move(cx.listener(|this, _, _, cx| {
|
||||
this.show_scrollbar = true;
|
||||
this.hide_scrollbar_later(cx);
|
||||
|
||||
@@ -49,7 +49,7 @@ pub use crate::context::{ContextLoadResult, LoadedContext};
|
||||
pub use crate::inline_assistant::InlineAssistant;
|
||||
use crate::slash_command_settings::SlashCommandSettings;
|
||||
pub use crate::thread::{Message, MessageSegment, Thread, ThreadEvent};
|
||||
pub use crate::thread_store::{SerializedThread, TextThreadStore, ThreadStore};
|
||||
pub use crate::thread_store::{TextThreadStore, ThreadStore};
|
||||
pub use agent_diff::{AgentDiffPane, AgentDiffToolbar};
|
||||
pub use context_store::ContextStore;
|
||||
pub use ui::preview::{all_agent_previews, get_agent_preview};
|
||||
|
||||
@@ -18,8 +18,8 @@ use language_model::{LanguageModelProvider, LanguageModelProviderId, LanguageMod
|
||||
use project::context_server_store::{ContextServerStatus, ContextServerStore};
|
||||
use settings::{Settings, update_settings_file};
|
||||
use ui::{
|
||||
Disclosure, ElevationIndex, Indicator, Scrollbar, ScrollbarState, Switch, SwitchColor, Tooltip,
|
||||
prelude::*,
|
||||
Disclosure, Divider, DividerColor, ElevationIndex, Indicator, Scrollbar, ScrollbarState,
|
||||
Switch, SwitchColor, Tooltip, prelude::*,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
use zed_actions::ExtensionCategoryFilter;
|
||||
@@ -142,7 +142,7 @@ impl AgentConfiguration {
|
||||
.expanded_provider_configurations
|
||||
.get(&provider.id())
|
||||
.copied()
|
||||
.unwrap_or(false);
|
||||
.unwrap_or(true);
|
||||
|
||||
v_flex()
|
||||
.pt_3()
|
||||
@@ -201,12 +201,12 @@ impl AgentConfiguration {
|
||||
.on_click(cx.listener({
|
||||
let provider_id = provider.id().clone();
|
||||
move |this, _event, _window, _cx| {
|
||||
let is_expanded = this
|
||||
let is_open = this
|
||||
.expanded_provider_configurations
|
||||
.entry(provider_id.clone())
|
||||
.or_insert(false);
|
||||
.or_insert(true);
|
||||
|
||||
*is_expanded = !*is_expanded;
|
||||
*is_open = !*is_open;
|
||||
}
|
||||
})),
|
||||
),
|
||||
@@ -214,9 +214,9 @@ impl AgentConfiguration {
|
||||
)
|
||||
.when(is_expanded, |parent| match configuration_view {
|
||||
Some(configuration_view) => parent.child(configuration_view),
|
||||
None => parent.child(Label::new(format!(
|
||||
None => parent.child(div().child(Label::new(format!(
|
||||
"No configuration view for {provider_name}",
|
||||
))),
|
||||
)))),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -230,8 +230,7 @@ impl AgentConfiguration {
|
||||
.p(DynamicSpacing::Base16.rems(cx))
|
||||
.pr(DynamicSpacing::Base20.rems(cx))
|
||||
.gap_4()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.flex_1()
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
@@ -332,8 +331,7 @@ impl AgentConfiguration {
|
||||
.p(DynamicSpacing::Base16.rems(cx))
|
||||
.pr(DynamicSpacing::Base20.rems(cx))
|
||||
.gap_2p5()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.flex_1()
|
||||
.child(Headline::new("General Settings"))
|
||||
.child(self.render_command_permission(cx))
|
||||
.child(self.render_single_file_review(cx))
|
||||
@@ -346,17 +344,18 @@ impl AgentConfiguration {
|
||||
) -> impl IntoElement {
|
||||
let context_server_ids = self.context_server_store.read(cx).all_server_ids().clone();
|
||||
|
||||
const SUBHEADING: &str = "Connect to context servers via the Model Context Protocol either via Zed extensions or directly.";
|
||||
|
||||
v_flex()
|
||||
.p(DynamicSpacing::Base16.rems(cx))
|
||||
.pr(DynamicSpacing::Base20.rems(cx))
|
||||
.gap_2()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.flex_1()
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
.child(Headline::new("Model Context Protocol (MCP) Servers"))
|
||||
.child(Label::new("Connect to context servers via the Model Context Protocol either via Zed extensions or directly.").color(Color::Muted)),
|
||||
.child(Label::new(SUBHEADING).color(Color::Muted)),
|
||||
)
|
||||
.children(
|
||||
context_server_ids.into_iter().map(|context_server_id| {
|
||||
@@ -423,7 +422,6 @@ impl AgentConfiguration {
|
||||
.unwrap_or(ContextServerStatus::Stopped);
|
||||
|
||||
let is_running = matches!(server_status, ContextServerStatus::Running);
|
||||
let item_id = SharedString::from(context_server_id.0.clone());
|
||||
|
||||
let error = if let ContextServerStatus::Error(error) = server_status.clone() {
|
||||
Some(error)
|
||||
@@ -445,38 +443,9 @@ impl AgentConfiguration {
|
||||
let tool_count = tools.len();
|
||||
|
||||
let border_color = cx.theme().colors().border.opacity(0.6);
|
||||
let success_color = Color::Success.color(cx);
|
||||
|
||||
let (status_indicator, tooltip_text) = match server_status {
|
||||
ContextServerStatus::Starting => (
|
||||
Indicator::dot()
|
||||
.color(Color::Success)
|
||||
.with_animation(
|
||||
SharedString::from(format!("{}-starting", context_server_id.0.clone(),)),
|
||||
Animation::new(Duration::from_secs(2))
|
||||
.repeat()
|
||||
.with_easing(pulsating_between(0.4, 1.)),
|
||||
move |this, delta| this.color(success_color.alpha(delta).into()),
|
||||
)
|
||||
.into_any_element(),
|
||||
"Server is starting.",
|
||||
),
|
||||
ContextServerStatus::Running => (
|
||||
Indicator::dot().color(Color::Success).into_any_element(),
|
||||
"Server is running.",
|
||||
),
|
||||
ContextServerStatus::Error(_) => (
|
||||
Indicator::dot().color(Color::Error).into_any_element(),
|
||||
"Server has an error.",
|
||||
),
|
||||
ContextServerStatus::Stopped => (
|
||||
Indicator::dot().color(Color::Muted).into_any_element(),
|
||||
"Server is stopped.",
|
||||
),
|
||||
};
|
||||
|
||||
v_flex()
|
||||
.id(item_id.clone())
|
||||
.id(SharedString::from(context_server_id.0.clone()))
|
||||
.border_1()
|
||||
.rounded_md()
|
||||
.border_color(border_color)
|
||||
@@ -511,12 +480,35 @@ impl AgentConfiguration {
|
||||
}
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.id(item_id.clone())
|
||||
.tooltip(Tooltip::text(tooltip_text))
|
||||
.child(status_indicator),
|
||||
)
|
||||
.child(match server_status {
|
||||
ContextServerStatus::Starting => {
|
||||
let color = Color::Success.color(cx);
|
||||
Indicator::dot()
|
||||
.color(Color::Success)
|
||||
.with_animation(
|
||||
SharedString::from(format!(
|
||||
"{}-starting",
|
||||
context_server_id.0.clone(),
|
||||
)),
|
||||
Animation::new(Duration::from_secs(2))
|
||||
.repeat()
|
||||
.with_easing(pulsating_between(0.4, 1.)),
|
||||
move |this, delta| {
|
||||
this.color(color.alpha(delta).into())
|
||||
},
|
||||
)
|
||||
.into_any_element()
|
||||
}
|
||||
ContextServerStatus::Running => {
|
||||
Indicator::dot().color(Color::Success).into_any_element()
|
||||
}
|
||||
ContextServerStatus::Error(_) => {
|
||||
Indicator::dot().color(Color::Error).into_any_element()
|
||||
}
|
||||
ContextServerStatus::Stopped => {
|
||||
Indicator::dot().color(Color::Muted).into_any_element()
|
||||
}
|
||||
})
|
||||
.child(Label::new(context_server_id.0.clone()).ml_0p5())
|
||||
.when(is_running, |this| {
|
||||
this.child(
|
||||
@@ -631,7 +623,9 @@ impl Render for AgentConfiguration {
|
||||
.size_full()
|
||||
.overflow_y_scroll()
|
||||
.child(self.render_general_settings_section(cx))
|
||||
.child(Divider::horizontal().color(DividerColor::Border))
|
||||
.child(self.render_context_servers_section(window, cx))
|
||||
.child(Divider::horizontal().color(DividerColor::Border))
|
||||
.child(self.render_provider_configuration_section(cx)),
|
||||
)
|
||||
.child(
|
||||
|
||||
@@ -30,6 +30,7 @@ pub(crate) struct ConfigureContextServerModal {
|
||||
context_server_store: Entity<ContextServerStore>,
|
||||
}
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
enum Configuration {
|
||||
NotAvailable,
|
||||
Required(ConfigurationRequiredState),
|
||||
|
||||
@@ -567,15 +567,6 @@ impl AgentPanel {
|
||||
menu = menu.header("Recently Opened");
|
||||
|
||||
for entry in recently_opened.iter() {
|
||||
if let RecentEntry::Context(context) = entry {
|
||||
if context.read(cx).path().is_none() {
|
||||
log::error!(
|
||||
"bug: text thread in recent history list was never saved"
|
||||
);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
let summary = entry.summary(cx);
|
||||
|
||||
menu = menu.entry_with_end_slot_on_hover(
|
||||
@@ -1299,26 +1290,14 @@ impl AgentPanel {
|
||||
let new_is_history = matches!(new_view, ActiveView::History);
|
||||
|
||||
match &self.active_view {
|
||||
ActiveView::Thread { thread, .. } => {
|
||||
ActiveView::Thread { thread, .. } => self.history_store.update(cx, |store, cx| {
|
||||
if let Some(thread) = thread.upgrade() {
|
||||
if thread.read(cx).is_empty() {
|
||||
let id = thread.read(cx).id().clone();
|
||||
self.history_store.update(cx, |store, cx| {
|
||||
store.remove_recently_opened_thread(id, cx);
|
||||
});
|
||||
store.remove_recently_opened_thread(id, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
ActiveView::PromptEditor { context_editor, .. } => {
|
||||
let context = context_editor.read(cx).context();
|
||||
// When switching away from an unsaved text thread, delete its entry.
|
||||
if context.read(cx).path().is_none() {
|
||||
let context = context.clone();
|
||||
self.history_store.update(cx, |store, cx| {
|
||||
store.remove_recently_opened_entry(&RecentEntry::Context(context), cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
}),
|
||||
_ => {}
|
||||
}
|
||||
|
||||
@@ -2156,7 +2135,6 @@ impl AgentPanel {
|
||||
|
||||
v_flex()
|
||||
.size_full()
|
||||
.bg(cx.theme().colors().panel_background)
|
||||
.when(recent_history.is_empty(), |this| {
|
||||
let configuration_error_ref = &configuration_error;
|
||||
this.child(
|
||||
@@ -2461,6 +2439,9 @@ impl AgentPanel {
|
||||
.occlude()
|
||||
.child(match last_error {
|
||||
ThreadError::PaymentRequired => self.render_payment_required_error(cx),
|
||||
ThreadError::MaxMonthlySpendReached => {
|
||||
self.render_max_monthly_spend_reached_error(cx)
|
||||
}
|
||||
ThreadError::ModelRequestLimitReached { plan } => {
|
||||
self.render_model_request_limit_reached_error(plan, cx)
|
||||
}
|
||||
@@ -2520,6 +2501,56 @@ impl AgentPanel {
|
||||
.into_any()
|
||||
}
|
||||
|
||||
fn render_max_monthly_spend_reached_error(&self, cx: &mut Context<Self>) -> AnyElement {
|
||||
const ERROR_MESSAGE: &str = "You have reached your maximum monthly spend. Increase your spend limit to continue using Zed LLMs.";
|
||||
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.items_center()
|
||||
.child(Icon::new(IconName::XCircle).color(Color::Error))
|
||||
.child(Label::new("Max Monthly Spend Reached").weight(FontWeight::MEDIUM)),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.id("error-message")
|
||||
.max_h_24()
|
||||
.overflow_y_scroll()
|
||||
.child(Label::new(ERROR_MESSAGE)),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_end()
|
||||
.mt_1()
|
||||
.gap_1()
|
||||
.child(self.create_copy_button(ERROR_MESSAGE))
|
||||
.child(
|
||||
Button::new("subscribe", "Update Monthly Spend Limit").on_click(
|
||||
cx.listener(|this, _, _, cx| {
|
||||
this.thread.update(cx, |this, _cx| {
|
||||
this.clear_last_error();
|
||||
});
|
||||
|
||||
cx.open_url(&zed_urls::account_url(cx));
|
||||
cx.notify();
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(Button::new("dismiss", "Dismiss").on_click(cx.listener(
|
||||
|this, _, _, cx| {
|
||||
this.thread.update(cx, |this, _cx| {
|
||||
this.clear_last_error();
|
||||
});
|
||||
|
||||
cx.notify();
|
||||
},
|
||||
))),
|
||||
)
|
||||
.into_any()
|
||||
}
|
||||
|
||||
fn render_model_request_limit_reached_error(
|
||||
&self,
|
||||
plan: Plan,
|
||||
|
||||
@@ -749,11 +749,11 @@ pub enum ImageStatus {
|
||||
|
||||
impl ImageContext {
|
||||
pub fn eq_for_key(&self, other: &Self) -> bool {
|
||||
self.original_image.id() == other.original_image.id()
|
||||
self.original_image.id == other.original_image.id
|
||||
}
|
||||
|
||||
pub fn hash_for_key<H: Hasher>(&self, state: &mut H) {
|
||||
self.original_image.id().hash(state);
|
||||
self.original_image.id.hash(state);
|
||||
}
|
||||
|
||||
pub fn image(&self) -> Option<LanguageModelImage> {
|
||||
|
||||
@@ -942,8 +942,8 @@ impl MentionLink {
|
||||
format!("[@{}]({}:{})", title, Self::THREAD, id)
|
||||
}
|
||||
ThreadContextEntry::Context { path, title } => {
|
||||
let filename = path.file_name().unwrap_or_default().to_string_lossy();
|
||||
let escaped_filename = urlencoding::encode(&filename);
|
||||
let filename = path.file_name().unwrap_or_default();
|
||||
let escaped_filename = urlencoding::encode(&filename.to_string_lossy()).to_string();
|
||||
format!(
|
||||
"[@{}]({}:{}{})",
|
||||
title,
|
||||
|
||||
@@ -84,12 +84,6 @@ impl ContextStrip {
|
||||
}
|
||||
}
|
||||
|
||||
/// Whether or not the context strip has items to display
|
||||
pub fn has_context_items(&self, cx: &App) -> bool {
|
||||
self.context_store.read(cx).context().next().is_some()
|
||||
|| self.suggested_context(cx).is_some()
|
||||
}
|
||||
|
||||
fn added_contexts(&self, cx: &App) -> Vec<AddedContext> {
|
||||
if let Some(workspace) = self.workspace.upgrade() {
|
||||
let project = workspace.read(cx).project().read(cx);
|
||||
@@ -110,14 +104,14 @@ impl ContextStrip {
|
||||
}
|
||||
}
|
||||
|
||||
fn suggested_context(&self, cx: &App) -> Option<SuggestedContext> {
|
||||
fn suggested_context(&self, cx: &Context<Self>) -> Option<SuggestedContext> {
|
||||
match self.suggest_context_kind {
|
||||
SuggestContextKind::File => self.suggested_file(cx),
|
||||
SuggestContextKind::Thread => self.suggested_thread(cx),
|
||||
}
|
||||
}
|
||||
|
||||
fn suggested_file(&self, cx: &App) -> Option<SuggestedContext> {
|
||||
fn suggested_file(&self, cx: &Context<Self>) -> Option<SuggestedContext> {
|
||||
let workspace = self.workspace.upgrade()?;
|
||||
let active_item = workspace.read(cx).active_item(cx)?;
|
||||
|
||||
@@ -144,7 +138,7 @@ impl ContextStrip {
|
||||
})
|
||||
}
|
||||
|
||||
fn suggested_thread(&self, cx: &App) -> Option<SuggestedContext> {
|
||||
fn suggested_thread(&self, cx: &Context<Self>) -> Option<SuggestedContext> {
|
||||
if !self.context_picker.read(cx).allow_threads() {
|
||||
return None;
|
||||
}
|
||||
|
||||
@@ -75,7 +75,7 @@ impl Default for DebugAccountState {
|
||||
Self {
|
||||
enabled: false,
|
||||
trial_expired: false,
|
||||
plan: Plan::ZedFree,
|
||||
plan: Plan::Free,
|
||||
custom_prompt_usage: RequestUsage {
|
||||
limit: UsageLimit::Unlimited,
|
||||
amount: 0,
|
||||
|
||||
@@ -8,10 +8,9 @@ use anyhow::{Context as _, Result};
|
||||
use assistant_settings::AssistantSettings;
|
||||
use client::telemetry::Telemetry;
|
||||
use collections::{HashMap, HashSet, VecDeque, hash_map};
|
||||
use editor::display_map::EditorMargins;
|
||||
use editor::{
|
||||
Anchor, AnchorRangeExt, CodeActionProvider, Editor, EditorEvent, ExcerptId, ExcerptRange,
|
||||
MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint,
|
||||
GutterDimensions, MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint,
|
||||
actions::SelectAll,
|
||||
display_map::{
|
||||
BlockContext, BlockPlacement, BlockProperties, BlockStyle, CustomBlockId, RenderBlock,
|
||||
@@ -338,27 +337,13 @@ impl InlineAssistant {
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) {
|
||||
let (snapshot, initial_selections, newest_selection) = editor.update(cx, |editor, cx| {
|
||||
let selections = editor.selections.all::<Point>(cx);
|
||||
let newest_selection = editor.selections.newest::<Point>(cx);
|
||||
(editor.snapshot(window, cx), selections, newest_selection)
|
||||
let (snapshot, initial_selections) = editor.update(cx, |editor, cx| {
|
||||
(
|
||||
editor.snapshot(window, cx),
|
||||
editor.selections.all::<Point>(cx),
|
||||
)
|
||||
});
|
||||
|
||||
// Check if there is already an inline assistant that contains the
|
||||
// newest selection, if there is, focus it
|
||||
if let Some(editor_assists) = self.assists_by_editor.get(&editor.downgrade()) {
|
||||
for assist_id in &editor_assists.assist_ids {
|
||||
let assist = &self.assists[assist_id];
|
||||
let range = assist.range.to_point(&snapshot.buffer_snapshot);
|
||||
if range.start.row <= newest_selection.start.row
|
||||
&& newest_selection.end.row <= range.end.row
|
||||
{
|
||||
self.focus_assist(*assist_id, window, cx);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut selections = Vec::<Selection<Point>>::new();
|
||||
let mut newest_selection = None;
|
||||
for mut selection in initial_selections {
|
||||
@@ -472,11 +457,11 @@ impl InlineAssistant {
|
||||
)
|
||||
});
|
||||
|
||||
let editor_margins = Arc::new(Mutex::new(EditorMargins::default()));
|
||||
let gutter_dimensions = Arc::new(Mutex::new(GutterDimensions::default()));
|
||||
let prompt_editor = cx.new(|cx| {
|
||||
PromptEditor::new_buffer(
|
||||
assist_id,
|
||||
editor_margins,
|
||||
gutter_dimensions.clone(),
|
||||
self.prompt_history.clone(),
|
||||
prompt_buffer.clone(),
|
||||
codegen.clone(),
|
||||
@@ -591,11 +576,11 @@ impl InlineAssistant {
|
||||
)
|
||||
});
|
||||
|
||||
let editor_margins = Arc::new(Mutex::new(EditorMargins::default()));
|
||||
let gutter_dimensions = Arc::new(Mutex::new(GutterDimensions::default()));
|
||||
let prompt_editor = cx.new(|cx| {
|
||||
PromptEditor::new_buffer(
|
||||
assist_id,
|
||||
editor_margins,
|
||||
gutter_dimensions.clone(),
|
||||
self.prompt_history.clone(),
|
||||
prompt_buffer.clone(),
|
||||
codegen.clone(),
|
||||
@@ -664,7 +649,6 @@ impl InlineAssistant {
|
||||
height: Some(prompt_editor_height),
|
||||
render: build_assist_editor_renderer(prompt_editor),
|
||||
priority: 0,
|
||||
render_in_minimap: false,
|
||||
},
|
||||
BlockProperties {
|
||||
style: BlockStyle::Sticky,
|
||||
@@ -679,7 +663,6 @@ impl InlineAssistant {
|
||||
.into_any_element()
|
||||
}),
|
||||
priority: 0,
|
||||
render_in_minimap: false,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -1421,11 +1404,11 @@ impl InlineAssistant {
|
||||
|
||||
enum DeletedLines {}
|
||||
let mut editor = Editor::for_multibuffer(multi_buffer, None, window, cx);
|
||||
editor.disable_scrollbars_and_minimap(window, cx);
|
||||
editor.set_soft_wrap_mode(language::language_settings::SoftWrap::None, cx);
|
||||
editor.set_show_wrap_guides(false, cx);
|
||||
editor.set_show_gutter(false, cx);
|
||||
editor.scroll_manager.set_forbid_vertical_scroll(true);
|
||||
editor.set_show_scrollbars(false, cx);
|
||||
editor.set_read_only(true);
|
||||
editor.set_show_edit_predictions(Some(false), window, cx);
|
||||
editor.highlight_rows::<DeletedLines>(
|
||||
@@ -1449,12 +1432,11 @@ impl InlineAssistant {
|
||||
.bg(cx.theme().status().deleted_background)
|
||||
.size_full()
|
||||
.h(height as f32 * cx.window.line_height())
|
||||
.pl(cx.margins.gutter.full_width())
|
||||
.pl(cx.gutter_dimensions.full_width())
|
||||
.child(deleted_lines_editor.clone())
|
||||
.into_any_element()
|
||||
}),
|
||||
priority: 0,
|
||||
render_in_minimap: false,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1612,9 +1594,9 @@ fn build_assist_editor_renderer(editor: &Entity<PromptEditor<BufferCodegen>>) ->
|
||||
let editor = editor.clone();
|
||||
|
||||
Arc::new(move |cx: &mut BlockContext| {
|
||||
let editor_margins = editor.read(cx).editor_margins();
|
||||
let gutter_dimensions = editor.read(cx).gutter_dimensions();
|
||||
|
||||
*editor_margins.lock() = *cx.margins;
|
||||
*gutter_dimensions.lock() = *cx.gutter_dimensions;
|
||||
editor.clone().into_any_element()
|
||||
})
|
||||
}
|
||||
|
||||
@@ -11,9 +11,9 @@ use crate::{CycleNextInlineAssist, CyclePreviousInlineAssist};
|
||||
use crate::{RemoveAllContext, ToggleContextPicker};
|
||||
use client::ErrorExt;
|
||||
use collections::VecDeque;
|
||||
use editor::display_map::EditorMargins;
|
||||
use editor::{
|
||||
ContextMenuOptions, Editor, EditorElement, EditorEvent, EditorMode, EditorStyle, MultiBuffer,
|
||||
ContextMenuOptions, Editor, EditorElement, EditorEvent, EditorMode, EditorStyle,
|
||||
GutterDimensions, MultiBuffer,
|
||||
actions::{MoveDown, MoveUp},
|
||||
};
|
||||
use feature_flags::{FeatureFlagAppExt as _, ZedProFeatureFlag};
|
||||
@@ -61,13 +61,11 @@ impl<T: 'static> Render for PromptEditor<T> {
|
||||
let ui_font_size = ThemeSettings::get_global(cx).ui_font_size(cx);
|
||||
let mut buttons = Vec::new();
|
||||
|
||||
const RIGHT_PADDING: Pixels = px(9.);
|
||||
|
||||
let (left_gutter_width, right_padding) = match &self.mode {
|
||||
let left_gutter_width = match &self.mode {
|
||||
PromptEditorMode::Buffer {
|
||||
id: _,
|
||||
codegen,
|
||||
editor_margins,
|
||||
gutter_dimensions,
|
||||
} => {
|
||||
let codegen = codegen.read(cx);
|
||||
|
||||
@@ -75,17 +73,13 @@ impl<T: 'static> Render for PromptEditor<T> {
|
||||
buttons.push(self.render_cycle_controls(&codegen, cx));
|
||||
}
|
||||
|
||||
let editor_margins = editor_margins.lock();
|
||||
let gutter = editor_margins.gutter;
|
||||
let gutter_dimensions = gutter_dimensions.lock();
|
||||
|
||||
let left_gutter_width = gutter.full_width() + (gutter.margin / 2.0);
|
||||
let right_padding = editor_margins.right + RIGHT_PADDING;
|
||||
|
||||
(left_gutter_width, right_padding)
|
||||
gutter_dimensions.full_width() + (gutter_dimensions.margin / 2.0)
|
||||
}
|
||||
PromptEditorMode::Terminal { .. } => {
|
||||
// Give the equivalent of the same left-padding that we're using on the right
|
||||
(Pixels::from(40.0), Pixels::from(24.))
|
||||
Pixels::from(40.0)
|
||||
}
|
||||
};
|
||||
|
||||
@@ -106,7 +100,7 @@ impl<T: 'static> Render for PromptEditor<T> {
|
||||
.size_full()
|
||||
.pt_0p5()
|
||||
.pb(bottom_padding)
|
||||
.pr(right_padding)
|
||||
.pr_6()
|
||||
.child(
|
||||
h_flex()
|
||||
.items_start()
|
||||
@@ -451,7 +445,7 @@ impl<T: 'static> PromptEditor<T> {
|
||||
editor.move_to_end(&Default::default(), window, cx)
|
||||
});
|
||||
}
|
||||
} else if self.context_strip.read(cx).has_context_items(cx) {
|
||||
} else {
|
||||
self.context_strip.focus_handle(cx).focus(window);
|
||||
}
|
||||
}
|
||||
@@ -812,7 +806,7 @@ pub enum PromptEditorMode {
|
||||
Buffer {
|
||||
id: InlineAssistId,
|
||||
codegen: Entity<BufferCodegen>,
|
||||
editor_margins: Arc<Mutex<EditorMargins>>,
|
||||
gutter_dimensions: Arc<Mutex<GutterDimensions>>,
|
||||
},
|
||||
Terminal {
|
||||
id: TerminalInlineAssistId,
|
||||
@@ -844,7 +838,7 @@ impl InlineAssistId {
|
||||
impl PromptEditor<BufferCodegen> {
|
||||
pub fn new_buffer(
|
||||
id: InlineAssistId,
|
||||
editor_margins: Arc<Mutex<EditorMargins>>,
|
||||
gutter_dimensions: Arc<Mutex<GutterDimensions>>,
|
||||
prompt_history: VecDeque<String>,
|
||||
prompt_buffer: Entity<MultiBuffer>,
|
||||
codegen: Entity<BufferCodegen>,
|
||||
@@ -861,7 +855,7 @@ impl PromptEditor<BufferCodegen> {
|
||||
let mode = PromptEditorMode::Buffer {
|
||||
id,
|
||||
codegen,
|
||||
editor_margins,
|
||||
gutter_dimensions,
|
||||
};
|
||||
|
||||
let prompt_editor = cx.new(|cx| {
|
||||
@@ -1001,9 +995,11 @@ impl PromptEditor<BufferCodegen> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn editor_margins(&self) -> &Arc<Mutex<EditorMargins>> {
|
||||
pub fn gutter_dimensions(&self) -> &Arc<Mutex<GutterDimensions>> {
|
||||
match &self.mode {
|
||||
PromptEditorMode::Buffer { editor_margins, .. } => editor_margins,
|
||||
PromptEditorMode::Buffer {
|
||||
gutter_dimensions, ..
|
||||
} => gutter_dimensions,
|
||||
PromptEditorMode::Terminal { .. } => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -401,7 +401,7 @@ impl MessageEditor {
|
||||
fn move_up(&mut self, _: &MoveUp, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if self.context_picker_menu_handle.is_deployed() {
|
||||
cx.propagate();
|
||||
} else if self.context_strip.read(cx).has_context_items(cx) {
|
||||
} else {
|
||||
self.context_strip.focus_handle(cx).focus(window);
|
||||
}
|
||||
}
|
||||
@@ -1085,11 +1085,11 @@ impl MessageEditor {
|
||||
let plan = user_store
|
||||
.current_plan()
|
||||
.map(|plan| match plan {
|
||||
Plan::Free => zed_llm_client::Plan::ZedFree,
|
||||
Plan::Free => zed_llm_client::Plan::Free,
|
||||
Plan::ZedPro => zed_llm_client::Plan::ZedPro,
|
||||
Plan::ZedProTrial => zed_llm_client::Plan::ZedProTrial,
|
||||
})
|
||||
.unwrap_or(zed_llm_client::Plan::ZedFree);
|
||||
.unwrap_or(zed_llm_client::Plan::Free);
|
||||
let usage = self.thread.read(cx).last_usage().or_else(|| {
|
||||
maybe!({
|
||||
let amount = user_store.model_request_usage_amount()?;
|
||||
|
||||
@@ -191,7 +191,7 @@ impl TerminalInlineAssistant {
|
||||
};
|
||||
|
||||
self.prompt_history.retain(|prompt| *prompt != user_prompt);
|
||||
self.prompt_history.push_back(user_prompt);
|
||||
self.prompt_history.push_back(user_prompt.clone());
|
||||
if self.prompt_history.len() > PROMPT_HISTORY_MAX_LEN {
|
||||
self.prompt_history.pop_front();
|
||||
}
|
||||
|
||||
@@ -22,7 +22,7 @@ use language_model::{
|
||||
ConfiguredModel, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
|
||||
LanguageModelId, LanguageModelKnownError, LanguageModelRegistry, LanguageModelRequest,
|
||||
LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
|
||||
LanguageModelToolResultContent, LanguageModelToolUseId, MessageContent,
|
||||
LanguageModelToolUseId, MaxMonthlySpendReachedError, MessageContent,
|
||||
ModelRequestLimitReachedError, PaymentRequiredError, RequestUsage, Role, SelectedModel,
|
||||
StopReason, TokenUsage,
|
||||
};
|
||||
@@ -214,7 +214,7 @@ pub struct GitState {
|
||||
pub diff: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone)]
|
||||
pub struct ThreadCheckpoint {
|
||||
message_id: MessageId,
|
||||
git_checkpoint: GitStoreCheckpoint,
|
||||
@@ -458,7 +458,7 @@ impl Thread {
|
||||
tools: Entity<ToolWorkingSet>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
project_context: SharedProjectContext,
|
||||
window: Option<&mut Window>, // None in headless mode
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let next_message_id = MessageId(
|
||||
@@ -880,13 +880,7 @@ impl Thread {
|
||||
}
|
||||
|
||||
pub fn output_for_tool(&self, id: &LanguageModelToolUseId) -> Option<&Arc<str>> {
|
||||
match &self.tool_use.tool_result(id)?.content {
|
||||
LanguageModelToolResultContent::Text(str) => Some(str),
|
||||
LanguageModelToolResultContent::Image(_) => {
|
||||
// TODO: We should display image
|
||||
None
|
||||
}
|
||||
}
|
||||
Some(&self.tool_use.tool_result(id)?.content)
|
||||
}
|
||||
|
||||
pub fn card_for_tool(&self, id: &LanguageModelToolUseId) -> Option<AnyToolCard> {
|
||||
@@ -996,7 +990,6 @@ impl Thread {
|
||||
new_role: Role,
|
||||
new_segments: Vec<MessageSegment>,
|
||||
loaded_context: Option<LoadedContext>,
|
||||
checkpoint: Option<GitStoreCheckpoint>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> bool {
|
||||
let Some(message) = self.messages.iter_mut().find(|message| message.id == id) else {
|
||||
@@ -1007,15 +1000,6 @@ impl Thread {
|
||||
if let Some(context) = loaded_context {
|
||||
message.loaded_context = context;
|
||||
}
|
||||
if let Some(git_checkpoint) = checkpoint {
|
||||
self.checkpoints_by_message.insert(
|
||||
id,
|
||||
ThreadCheckpoint {
|
||||
message_id: id,
|
||||
git_checkpoint,
|
||||
},
|
||||
);
|
||||
}
|
||||
self.touch_updated_at();
|
||||
cx.emit(ThreadEvent::MessageEdited(id));
|
||||
true
|
||||
@@ -1698,6 +1682,10 @@ impl Thread {
|
||||
|
||||
if error.is::<PaymentRequiredError>() {
|
||||
cx.emit(ThreadEvent::ShowError(ThreadError::PaymentRequired));
|
||||
} else if error.is::<MaxMonthlySpendReachedError>() {
|
||||
cx.emit(ThreadEvent::ShowError(
|
||||
ThreadError::MaxMonthlySpendReached,
|
||||
));
|
||||
} else if let Some(error) =
|
||||
error.downcast_ref::<ModelRequestLimitReachedError>()
|
||||
{
|
||||
@@ -2253,7 +2241,7 @@ impl Thread {
|
||||
.read(cx)
|
||||
.enabled_tools(cx)
|
||||
.iter()
|
||||
.map(|tool| tool.name())
|
||||
.map(|tool| tool.name().to_string())
|
||||
.collect();
|
||||
|
||||
self.message_feedback.insert(message_id, feedback);
|
||||
@@ -2413,7 +2401,7 @@ impl Thread {
|
||||
.map(|repo| {
|
||||
repo.update(cx, |repo, _| {
|
||||
let current_branch =
|
||||
repo.branch.as_ref().map(|branch| branch.name().to_owned());
|
||||
repo.branch.as_ref().map(|branch| branch.name.to_string());
|
||||
repo.send_job(None, |state, _| async move {
|
||||
let RepositoryState::Local { backend, .. } = state else {
|
||||
return GitState {
|
||||
@@ -2514,15 +2502,7 @@ impl Thread {
|
||||
}
|
||||
|
||||
writeln!(markdown, "**\n")?;
|
||||
match &tool_result.content {
|
||||
LanguageModelToolResultContent::Text(str) => {
|
||||
writeln!(markdown, "{}", str)?;
|
||||
}
|
||||
LanguageModelToolResultContent::Image(image) => {
|
||||
writeln!(markdown, "", image.source)?;
|
||||
}
|
||||
}
|
||||
|
||||
writeln!(markdown, "{}", tool_result.content)?;
|
||||
if let Some(output) = tool_result.output.as_ref() {
|
||||
writeln!(
|
||||
markdown,
|
||||
@@ -2593,7 +2573,7 @@ impl Thread {
|
||||
.read(cx)
|
||||
.current_user()
|
||||
.map(|user| user.github_login.clone());
|
||||
let client = self.project.read(cx).client();
|
||||
let client = self.project.read(cx).client().clone();
|
||||
let serialize_task = self.serialize(cx);
|
||||
|
||||
cx.background_executor()
|
||||
@@ -2712,6 +2692,8 @@ impl Thread {
|
||||
pub enum ThreadError {
|
||||
#[error("Payment required")]
|
||||
PaymentRequired,
|
||||
#[error("Max monthly spend reached")]
|
||||
MaxMonthlySpendReached,
|
||||
#[error("Model request limit reached")]
|
||||
ModelRequestLimitReached { plan: Plan },
|
||||
#[error("Message {header}: {message}")]
|
||||
|
||||
@@ -260,7 +260,10 @@ impl ThreadHistory {
|
||||
}
|
||||
});
|
||||
|
||||
self.search_state = SearchState::Searching { query, _task: task };
|
||||
self.search_state = SearchState::Searching {
|
||||
query: query.clone(),
|
||||
_task: task,
|
||||
};
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ use gpui::{
|
||||
};
|
||||
use heed::Database;
|
||||
use heed::types::SerdeBincode;
|
||||
use language_model::{LanguageModelToolResultContent, LanguageModelToolUseId, Role, TokenUsage};
|
||||
use language_model::{LanguageModelToolUseId, Role, TokenUsage};
|
||||
use project::context_server_store::{ContextServerStatus, ContextServerStore};
|
||||
use project::{Project, ProjectItem, ProjectPath, Worktree};
|
||||
use prompt_store::{
|
||||
@@ -386,25 +386,6 @@ impl ThreadStore {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn create_thread_from_serialized(
|
||||
&mut self,
|
||||
serialized: SerializedThread,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Entity<Thread> {
|
||||
cx.new(|cx| {
|
||||
Thread::deserialize(
|
||||
ThreadId::new(),
|
||||
serialized,
|
||||
self.project.clone(),
|
||||
self.tools.clone(),
|
||||
self.prompt_builder.clone(),
|
||||
self.project_context.clone(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn open_thread(
|
||||
&self,
|
||||
id: &ThreadId,
|
||||
@@ -430,7 +411,7 @@ impl ThreadStore {
|
||||
this.tools.clone(),
|
||||
this.prompt_builder.clone(),
|
||||
this.project_context.clone(),
|
||||
Some(window),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -505,8 +486,8 @@ impl ThreadStore {
|
||||
ToolSource::Native,
|
||||
&profile
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then(|| tool))
|
||||
.iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then(|| tool.clone()))
|
||||
.collect::<Vec<_>>(),
|
||||
cx,
|
||||
);
|
||||
@@ -530,32 +511,32 @@ impl ThreadStore {
|
||||
});
|
||||
}
|
||||
// Enable all the tools from all context servers, but disable the ones that are explicitly disabled
|
||||
for (context_server_id, preset) in profile.context_servers {
|
||||
for (context_server_id, preset) in &profile.context_servers {
|
||||
self.tools.update(cx, |tools, cx| {
|
||||
tools.disable(
|
||||
ToolSource::ContextServer {
|
||||
id: context_server_id.into(),
|
||||
id: context_server_id.clone().into(),
|
||||
},
|
||||
&preset
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(|(tool, enabled)| (!enabled).then(|| tool))
|
||||
.iter()
|
||||
.filter_map(|(tool, enabled)| (!enabled).then(|| tool.clone()))
|
||||
.collect::<Vec<_>>(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
} else {
|
||||
for (context_server_id, preset) in profile.context_servers {
|
||||
for (context_server_id, preset) in &profile.context_servers {
|
||||
self.tools.update(cx, |tools, cx| {
|
||||
tools.enable(
|
||||
ToolSource::ContextServer {
|
||||
id: context_server_id.into(),
|
||||
id: context_server_id.clone().into(),
|
||||
},
|
||||
&preset
|
||||
.tools
|
||||
.into_iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then(|| tool))
|
||||
.iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then(|| tool.clone()))
|
||||
.collect::<Vec<_>>(),
|
||||
cx,
|
||||
)
|
||||
@@ -794,7 +775,7 @@ pub struct SerializedToolUse {
|
||||
pub struct SerializedToolResult {
|
||||
pub tool_use_id: LanguageModelToolUseId,
|
||||
pub is_error: bool,
|
||||
pub content: LanguageModelToolResultContent,
|
||||
pub content: Arc<str>,
|
||||
pub output: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
|
||||
@@ -1,16 +1,14 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use assistant_tool::{
|
||||
AnyToolCard, Tool, ToolResultContent, ToolResultOutput, ToolUseStatus, ToolWorkingSet,
|
||||
};
|
||||
use assistant_tool::{AnyToolCard, Tool, ToolResultOutput, ToolUseStatus, ToolWorkingSet};
|
||||
use collections::HashMap;
|
||||
use futures::FutureExt as _;
|
||||
use futures::future::Shared;
|
||||
use gpui::{App, Entity, SharedString, Task};
|
||||
use language_model::{
|
||||
ConfiguredModel, LanguageModel, LanguageModelRequest, LanguageModelToolResult,
|
||||
LanguageModelToolResultContent, LanguageModelToolUse, LanguageModelToolUseId, Role,
|
||||
LanguageModelToolUse, LanguageModelToolUseId, Role,
|
||||
};
|
||||
use project::Project;
|
||||
use ui::{IconName, Window};
|
||||
@@ -54,19 +52,15 @@ impl ToolUseState {
|
||||
/// Constructs a [`ToolUseState`] from the given list of [`SerializedMessage`]s.
|
||||
///
|
||||
/// Accepts a function to filter the tools that should be used to populate the state.
|
||||
///
|
||||
/// If `window` is `None` (e.g., when in headless mode or when running evals),
|
||||
/// tool cards won't be deserialized
|
||||
pub fn from_serialized_messages(
|
||||
tools: Entity<ToolWorkingSet>,
|
||||
messages: &[SerializedMessage],
|
||||
project: Entity<Project>,
|
||||
window: Option<&mut Window>, // None in headless mode
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Self {
|
||||
let mut this = Self::new(tools);
|
||||
let mut tool_names_by_id = HashMap::default();
|
||||
let mut window = window;
|
||||
|
||||
for message in messages {
|
||||
match message.role {
|
||||
@@ -111,17 +105,12 @@ impl ToolUseState {
|
||||
},
|
||||
);
|
||||
|
||||
if let Some(window) = &mut window {
|
||||
if let Some(tool) = this.tools.read(cx).tool(tool_use, cx) {
|
||||
if let Some(output) = tool_result.output.clone() {
|
||||
if let Some(card) = tool.deserialize_card(
|
||||
output,
|
||||
project.clone(),
|
||||
window,
|
||||
cx,
|
||||
) {
|
||||
this.tool_result_cards.insert(tool_use_id, card);
|
||||
}
|
||||
if let Some(tool) = this.tools.read(cx).tool(tool_use, cx) {
|
||||
if let Some(output) = tool_result.output.clone() {
|
||||
if let Some(card) =
|
||||
tool.deserialize_card(output, project.clone(), window, cx)
|
||||
{
|
||||
this.tool_result_cards.insert(tool_use_id, card);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -176,16 +165,10 @@ impl ToolUseState {
|
||||
|
||||
let status = (|| {
|
||||
if let Some(tool_result) = tool_result {
|
||||
let content = tool_result
|
||||
.content
|
||||
.to_str()
|
||||
.map(|str| str.to_owned().into())
|
||||
.unwrap_or_default();
|
||||
|
||||
return if tool_result.is_error {
|
||||
ToolUseStatus::Error(content)
|
||||
ToolUseStatus::Error(tool_result.content.clone().into())
|
||||
} else {
|
||||
ToolUseStatus::Finished(content)
|
||||
ToolUseStatus::Finished(tool_result.content.clone().into())
|
||||
};
|
||||
}
|
||||
|
||||
@@ -416,45 +399,21 @@ impl ToolUseState {
|
||||
let tool_result = output.content;
|
||||
const BYTES_PER_TOKEN_ESTIMATE: usize = 3;
|
||||
|
||||
let old_use = self.pending_tool_uses_by_id.remove(&tool_use_id);
|
||||
|
||||
// Protect from overly large output
|
||||
// Protect from clearly large output
|
||||
let tool_output_limit = configured_model
|
||||
.map(|model| model.model.max_token_count() * BYTES_PER_TOKEN_ESTIMATE)
|
||||
.unwrap_or(usize::MAX);
|
||||
|
||||
let content = match tool_result {
|
||||
ToolResultContent::Text(text) => {
|
||||
let text = if text.len() < tool_output_limit {
|
||||
text
|
||||
} else {
|
||||
let truncated = truncate_lines_to_byte_limit(&text, tool_output_limit);
|
||||
format!(
|
||||
"Tool result too long. The first {} bytes:\n\n{}",
|
||||
truncated.len(),
|
||||
truncated
|
||||
)
|
||||
};
|
||||
LanguageModelToolResultContent::Text(text.into())
|
||||
}
|
||||
ToolResultContent::Image(language_model_image) => {
|
||||
if language_model_image.estimate_tokens() < tool_output_limit {
|
||||
LanguageModelToolResultContent::Image(language_model_image)
|
||||
} else {
|
||||
self.tool_results.insert(
|
||||
tool_use_id.clone(),
|
||||
LanguageModelToolResult {
|
||||
tool_use_id: tool_use_id.clone(),
|
||||
tool_name,
|
||||
content: "Tool responded with an image that would exceeded the remaining tokens".into(),
|
||||
is_error: true,
|
||||
output: None,
|
||||
},
|
||||
);
|
||||
let tool_result = if tool_result.len() <= tool_output_limit {
|
||||
tool_result
|
||||
} else {
|
||||
let truncated = truncate_lines_to_byte_limit(&tool_result, tool_output_limit);
|
||||
|
||||
return old_use;
|
||||
}
|
||||
}
|
||||
format!(
|
||||
"Tool result too long. The first {} bytes:\n\n{}",
|
||||
truncated.len(),
|
||||
truncated
|
||||
)
|
||||
};
|
||||
|
||||
self.tool_results.insert(
|
||||
@@ -462,13 +421,12 @@ impl ToolUseState {
|
||||
LanguageModelToolResult {
|
||||
tool_use_id: tool_use_id.clone(),
|
||||
tool_name,
|
||||
content,
|
||||
content: tool_result.into(),
|
||||
is_error: false,
|
||||
output: output.output,
|
||||
},
|
||||
);
|
||||
|
||||
old_use
|
||||
self.pending_tool_uses_by_id.remove(&tool_use_id)
|
||||
}
|
||||
Err(err) => {
|
||||
self.tool_results.insert(
|
||||
@@ -476,7 +434,7 @@ impl ToolUseState {
|
||||
LanguageModelToolResult {
|
||||
tool_use_id: tool_use_id.clone(),
|
||||
tool_name,
|
||||
content: LanguageModelToolResultContent::Text(err.to_string().into()),
|
||||
content: err.to_string().into(),
|
||||
is_error: true,
|
||||
output: None,
|
||||
},
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use std::sync::OnceLock;
|
||||
|
||||
use collections::HashMap;
|
||||
use component::ComponentId;
|
||||
use gpui::{App, Entity, WeakEntity};
|
||||
use linkme::distributed_slice;
|
||||
use std::sync::OnceLock;
|
||||
use ui::{AnyElement, Component, ComponentScope, Window};
|
||||
use workspace::Workspace;
|
||||
|
||||
@@ -12,15 +12,9 @@ use crate::ActiveThread;
|
||||
pub type PreviewFn =
|
||||
fn(WeakEntity<Workspace>, Entity<ActiveThread>, &mut Window, &mut App) -> Option<AnyElement>;
|
||||
|
||||
pub struct AgentPreviewFn(fn() -> (ComponentId, PreviewFn));
|
||||
|
||||
impl AgentPreviewFn {
|
||||
pub const fn new(f: fn() -> (ComponentId, PreviewFn)) -> Self {
|
||||
Self(f)
|
||||
}
|
||||
}
|
||||
|
||||
inventory::collect!(AgentPreviewFn);
|
||||
/// Distributed slice for preview registration functions
|
||||
#[distributed_slice]
|
||||
pub static __ALL_AGENT_PREVIEWS: [fn() -> (ComponentId, PreviewFn)] = [..];
|
||||
|
||||
/// Trait that must be implemented by components that provide agent previews.
|
||||
pub trait AgentPreview: Component + Sized {
|
||||
@@ -42,14 +36,16 @@ pub trait AgentPreview: Component + Sized {
|
||||
#[macro_export]
|
||||
macro_rules! register_agent_preview {
|
||||
($type:ty) => {
|
||||
inventory::submit! {
|
||||
$crate::ui::preview::AgentPreviewFn::new(|| {
|
||||
(
|
||||
<$type as component::Component>::id(),
|
||||
<$type as $crate::ui::preview::AgentPreview>::agent_preview,
|
||||
)
|
||||
})
|
||||
}
|
||||
#[linkme::distributed_slice($crate::ui::preview::__ALL_AGENT_PREVIEWS)]
|
||||
static __REGISTER_AGENT_PREVIEW: fn() -> (
|
||||
component::ComponentId,
|
||||
$crate::ui::preview::PreviewFn,
|
||||
) = || {
|
||||
(
|
||||
<$type as component::Component>::id(),
|
||||
<$type as $crate::ui::preview::AgentPreview>::agent_preview,
|
||||
)
|
||||
};
|
||||
};
|
||||
}
|
||||
|
||||
@@ -60,8 +56,8 @@ static AGENT_PREVIEW_REGISTRY: OnceLock<HashMap<ComponentId, PreviewFn>> = OnceL
|
||||
fn get_or_init_registry() -> &'static HashMap<ComponentId, PreviewFn> {
|
||||
AGENT_PREVIEW_REGISTRY.get_or_init(|| {
|
||||
let mut map = HashMap::default();
|
||||
for register_fn in inventory::iter::<AgentPreviewFn>() {
|
||||
let (id, preview_fn) = (register_fn.0)();
|
||||
for register_fn in __ALL_AGENT_PREVIEWS.iter() {
|
||||
let (id, preview_fn) = register_fn();
|
||||
map.insert(id, preview_fn);
|
||||
}
|
||||
map
|
||||
|
||||
@@ -39,7 +39,7 @@ impl RenderOnce for UsageCallout {
|
||||
|
||||
let (title, message, button_text, url) = if is_limit_reached {
|
||||
match self.plan {
|
||||
Plan::ZedFree => (
|
||||
Plan::Free => (
|
||||
"Out of free prompts",
|
||||
"Upgrade to continue, wait for the next reset, or switch to API key."
|
||||
.to_string(),
|
||||
@@ -61,7 +61,7 @@ impl RenderOnce for UsageCallout {
|
||||
}
|
||||
} else {
|
||||
match self.plan {
|
||||
Plan::ZedFree => (
|
||||
Plan::Free => (
|
||||
"Reaching free plan limit soon",
|
||||
format!(
|
||||
"{remaining} remaining - Upgrade to increase limit, or switch providers",
|
||||
@@ -120,7 +120,7 @@ impl Component for UsageCallout {
|
||||
single_example(
|
||||
"Approaching limit (90%)",
|
||||
UsageCallout::new(
|
||||
Plan::ZedFree,
|
||||
Plan::Free,
|
||||
RequestUsage {
|
||||
limit: UsageLimit::Limited(50),
|
||||
amount: 45, // 90% of limit
|
||||
@@ -131,7 +131,7 @@ impl Component for UsageCallout {
|
||||
single_example(
|
||||
"Limit reached (100%)",
|
||||
UsageCallout::new(
|
||||
Plan::ZedFree,
|
||||
Plan::Free,
|
||||
RequestUsage {
|
||||
limit: UsageLimit::Limited(50),
|
||||
amount: 50, // 100% of limit
|
||||
|
||||
@@ -534,26 +534,12 @@ pub enum RequestContent {
|
||||
ToolResult {
|
||||
tool_use_id: String,
|
||||
is_error: bool,
|
||||
content: ToolResultContent,
|
||||
content: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
cache_control: Option<CacheControl>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum ToolResultContent {
|
||||
Plain(String),
|
||||
Multipart(Vec<ToolResultPart>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "lowercase")]
|
||||
pub enum ToolResultPart {
|
||||
Text { text: String },
|
||||
Image { source: ImageSource },
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum ResponseContent {
|
||||
|
||||
@@ -163,26 +163,20 @@ impl AskPassSession {
|
||||
#[cfg(unix)]
|
||||
fn get_shell_safe_zed_path() -> anyhow::Result<String> {
|
||||
let zed_path = std::env::current_exe()
|
||||
.context("Failed to determine current executable path for use in askpass")?
|
||||
.context("Failed to figure out current executable path for use in askpass")?
|
||||
.to_string_lossy()
|
||||
// see https://github.com/rust-lang/rust/issues/69343
|
||||
.trim_end_matches(" (deleted)")
|
||||
.to_string();
|
||||
|
||||
// NOTE: this was previously enabled, however, it caused errors when it shouldn't have
|
||||
// (see https://github.com/zed-industries/zed/issues/29819)
|
||||
// The zed path failing to execute within the askpass script results in very vague ssh
|
||||
// authentication failed errors, so this was done to try and surface a better error
|
||||
//
|
||||
// use std::os::unix::fs::MetadataExt;
|
||||
// let metadata = std::fs::metadata(&zed_path)
|
||||
// .context("Failed to check metadata of Zed executable path for use in askpass")?;
|
||||
// let is_executable = metadata.is_file() && metadata.mode() & 0o111 != 0;
|
||||
// anyhow::ensure!(
|
||||
// is_executable,
|
||||
// "Failed to verify Zed executable path for use in askpass"
|
||||
// );
|
||||
|
||||
// sanity check on unix systems that the path exists and is executable
|
||||
// todo(windows): implement this check for windows (or just use `is-executable` crate)
|
||||
use std::os::unix::fs::MetadataExt;
|
||||
let metadata = std::fs::metadata(&zed_path)
|
||||
.context("Failed to check metadata of Zed executable path for use in askpass")?;
|
||||
let is_executable = metadata.is_file() && metadata.mode() & 0o111 != 0;
|
||||
anyhow::ensure!(
|
||||
is_executable,
|
||||
"Failed to verify Zed executable path for use in askpass"
|
||||
);
|
||||
// As of writing, this can only be fail if the path contains a null byte, which shouldn't be possible
|
||||
// but shlex has annotated the error as #[non_exhaustive] so we can't make it a compile error if other
|
||||
// errors are introduced in the future :(
|
||||
|
||||
@@ -46,6 +46,7 @@ serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
smallvec.workspace = true
|
||||
smol.workspace = true
|
||||
strum.workspace = true
|
||||
telemetry_events.workspace = true
|
||||
text.workspace = true
|
||||
theme.workspace = true
|
||||
|
||||
@@ -2,6 +2,7 @@ mod context;
|
||||
mod context_editor;
|
||||
mod context_history;
|
||||
mod context_store;
|
||||
mod patch;
|
||||
mod slash_command;
|
||||
mod slash_command_picker;
|
||||
|
||||
@@ -15,6 +16,7 @@ pub use crate::context::*;
|
||||
pub use crate::context_editor::*;
|
||||
pub use crate::context_history::*;
|
||||
pub use crate::context_store::*;
|
||||
pub use crate::patch::*;
|
||||
pub use crate::slash_command::*;
|
||||
|
||||
pub fn init(client: Arc<Client>, cx: &mut App) {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#[cfg(test)]
|
||||
mod context_tests;
|
||||
|
||||
use crate::patch::{AssistantEdit, AssistantPatch, AssistantPatchStatus};
|
||||
use anyhow::{Context as _, Result, anyhow, bail};
|
||||
use assistant_settings::AssistantSettings;
|
||||
use assistant_slash_command::{
|
||||
@@ -21,8 +22,8 @@ use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, P
|
||||
use language_model::{
|
||||
LanguageModel, LanguageModelCacheConfiguration, LanguageModelCompletionEvent,
|
||||
LanguageModelImage, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
|
||||
LanguageModelToolUseId, MessageContent, PaymentRequiredError, Role, StopReason,
|
||||
report_assistant_event,
|
||||
LanguageModelToolUseId, MaxMonthlySpendReachedError, MessageContent, PaymentRequiredError,
|
||||
Role, StopReason, report_assistant_event,
|
||||
};
|
||||
use open_ai::Model as OpenAiModel;
|
||||
use paths::contexts_dir;
|
||||
@@ -36,6 +37,7 @@ use std::{
|
||||
iter, mem,
|
||||
ops::Range,
|
||||
path::Path,
|
||||
str::FromStr as _,
|
||||
sync::Arc,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
@@ -120,6 +122,14 @@ impl MessageStatus {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum RequestType {
|
||||
/// Request a normal chat response from the model.
|
||||
Chat,
|
||||
/// Add a preamble to the message, which tells the model to return a structured response that suggests edits.
|
||||
SuggestEdits,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum ContextOperation {
|
||||
InsertMessage {
|
||||
@@ -447,12 +457,17 @@ impl ContextOperation {
|
||||
pub enum ContextEvent {
|
||||
ShowAssistError(SharedString),
|
||||
ShowPaymentRequiredError,
|
||||
ShowMaxMonthlySpendReachedError,
|
||||
MessagesEdited,
|
||||
SummaryChanged,
|
||||
SummaryGenerated,
|
||||
StreamedCompletion,
|
||||
StartedThoughtProcess(Range<language::Anchor>),
|
||||
EndedThoughtProcess(language::Anchor),
|
||||
PatchesUpdated {
|
||||
removed: Vec<Range<language::Anchor>>,
|
||||
updated: Vec<Range<language::Anchor>>,
|
||||
},
|
||||
InvokedSlashCommandChanged {
|
||||
command_id: InvokedSlashCommandId,
|
||||
},
|
||||
@@ -652,6 +667,26 @@ struct PendingCompletion {
|
||||
#[derive(Copy, Clone, Debug, Hash, Eq, PartialEq)]
|
||||
pub struct InvokedSlashCommandId(clock::Lamport);
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct XmlTag {
|
||||
pub kind: XmlTagKind,
|
||||
pub range: Range<text::Anchor>,
|
||||
pub is_open_tag: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, strum::EnumString, PartialEq, Eq, strum::AsRefStr)]
|
||||
#[strum(serialize_all = "snake_case")]
|
||||
pub enum XmlTagKind {
|
||||
Patch,
|
||||
Title,
|
||||
Edit,
|
||||
Path,
|
||||
Description,
|
||||
OldText,
|
||||
NewText,
|
||||
Operation,
|
||||
}
|
||||
|
||||
pub struct AssistantContext {
|
||||
id: ContextId,
|
||||
timestamp: clock::Lamport,
|
||||
@@ -680,6 +715,8 @@ pub struct AssistantContext {
|
||||
_subscriptions: Vec<Subscription>,
|
||||
telemetry: Option<Arc<Telemetry>>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
patches: Vec<AssistantPatch>,
|
||||
xml_tags: Vec<XmlTag>,
|
||||
project: Option<Entity<Project>>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
}
|
||||
@@ -694,6 +731,18 @@ impl ContextAnnotation for ParsedSlashCommand {
|
||||
}
|
||||
}
|
||||
|
||||
impl ContextAnnotation for AssistantPatch {
|
||||
fn range(&self) -> &Range<language::Anchor> {
|
||||
&self.range
|
||||
}
|
||||
}
|
||||
|
||||
impl ContextAnnotation for XmlTag {
|
||||
fn range(&self) -> &Range<language::Anchor> {
|
||||
&self.range
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<ContextEvent> for AssistantContext {}
|
||||
|
||||
impl AssistantContext {
|
||||
@@ -770,6 +819,8 @@ impl AssistantContext {
|
||||
project,
|
||||
language_registry,
|
||||
slash_commands,
|
||||
patches: Vec::new(),
|
||||
xml_tags: Vec::new(),
|
||||
prompt_builder,
|
||||
};
|
||||
|
||||
@@ -1165,6 +1216,48 @@ impl AssistantContext {
|
||||
&self.summary
|
||||
}
|
||||
|
||||
pub fn patch_containing(&self, position: Point, cx: &App) -> Option<&AssistantPatch> {
|
||||
let buffer = self.buffer.read(cx);
|
||||
let index = self.patches.binary_search_by(|patch| {
|
||||
let patch_range = patch.range.to_point(&buffer);
|
||||
if position < patch_range.start {
|
||||
Ordering::Greater
|
||||
} else if position > patch_range.end {
|
||||
Ordering::Less
|
||||
} else {
|
||||
Ordering::Equal
|
||||
}
|
||||
});
|
||||
if let Ok(ix) = index {
|
||||
Some(&self.patches[ix])
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn patch_ranges(&self) -> impl Iterator<Item = Range<language::Anchor>> + '_ {
|
||||
self.patches.iter().map(|patch| patch.range.clone())
|
||||
}
|
||||
|
||||
pub fn patch_for_range(
|
||||
&self,
|
||||
range: &Range<language::Anchor>,
|
||||
cx: &App,
|
||||
) -> Option<&AssistantPatch> {
|
||||
let buffer = self.buffer.read(cx);
|
||||
let index = self.patch_index_for_range(range, buffer).ok()?;
|
||||
Some(&self.patches[index])
|
||||
}
|
||||
|
||||
fn patch_index_for_range(
|
||||
&self,
|
||||
tagged_range: &Range<text::Anchor>,
|
||||
buffer: &text::BufferSnapshot,
|
||||
) -> Result<usize, usize> {
|
||||
self.patches
|
||||
.binary_search_by(|probe| probe.range.cmp(&tagged_range, buffer))
|
||||
}
|
||||
|
||||
pub fn parsed_slash_commands(&self) -> &[ParsedSlashCommand] {
|
||||
&self.parsed_slash_commands
|
||||
}
|
||||
@@ -1244,7 +1337,7 @@ impl AssistantContext {
|
||||
let Some(model) = LanguageModelRegistry::read_global(cx).default_model() else {
|
||||
return;
|
||||
};
|
||||
let request = self.to_completion_request(Some(&model.model), cx);
|
||||
let request = self.to_completion_request(Some(&model.model), RequestType::Chat, cx);
|
||||
let debounce = self.token_count.is_some();
|
||||
self.pending_token_count = cx.spawn(async move |this, cx| {
|
||||
async move {
|
||||
@@ -1390,7 +1483,7 @@ impl AssistantContext {
|
||||
}
|
||||
|
||||
let request = {
|
||||
let mut req = self.to_completion_request(Some(&model), cx);
|
||||
let mut req = self.to_completion_request(Some(&model), RequestType::Chat, cx);
|
||||
// Skip the last message because it's likely to change and
|
||||
// therefore would be a waste to cache.
|
||||
req.messages.pop();
|
||||
@@ -1465,6 +1558,8 @@ impl AssistantContext {
|
||||
|
||||
let mut removed_parsed_slash_command_ranges = Vec::new();
|
||||
let mut updated_parsed_slash_commands = Vec::new();
|
||||
let mut removed_patches = Vec::new();
|
||||
let mut updated_patches = Vec::new();
|
||||
while let Some(mut row_range) = row_ranges.next() {
|
||||
while let Some(next_row_range) = row_ranges.peek() {
|
||||
if row_range.end >= next_row_range.start {
|
||||
@@ -1489,6 +1584,13 @@ impl AssistantContext {
|
||||
cx,
|
||||
);
|
||||
self.invalidate_pending_slash_commands(&buffer, cx);
|
||||
self.reparse_patches_in_range(
|
||||
start..end,
|
||||
&buffer,
|
||||
&mut updated_patches,
|
||||
&mut removed_patches,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
if !updated_parsed_slash_commands.is_empty()
|
||||
@@ -1499,6 +1601,13 @@ impl AssistantContext {
|
||||
updated: updated_parsed_slash_commands,
|
||||
});
|
||||
}
|
||||
|
||||
if !updated_patches.is_empty() || !removed_patches.is_empty() {
|
||||
cx.emit(ContextEvent::PatchesUpdated {
|
||||
removed: removed_patches,
|
||||
updated: updated_patches,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn reparse_slash_commands_in_range(
|
||||
@@ -1589,6 +1698,267 @@ impl AssistantContext {
|
||||
}
|
||||
}
|
||||
|
||||
fn reparse_patches_in_range(
|
||||
&mut self,
|
||||
range: Range<text::Anchor>,
|
||||
buffer: &BufferSnapshot,
|
||||
updated: &mut Vec<Range<text::Anchor>>,
|
||||
removed: &mut Vec<Range<text::Anchor>>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
// Rebuild the XML tags in the edited range.
|
||||
let intersecting_tags_range =
|
||||
self.indices_intersecting_buffer_range(&self.xml_tags, range.clone(), cx);
|
||||
let new_tags = self.parse_xml_tags_in_range(buffer, range.clone(), cx);
|
||||
self.xml_tags
|
||||
.splice(intersecting_tags_range.clone(), new_tags);
|
||||
|
||||
// Find which patches intersect the changed range.
|
||||
let intersecting_patches_range =
|
||||
self.indices_intersecting_buffer_range(&self.patches, range.clone(), cx);
|
||||
|
||||
// Reparse all tags after the last unchanged patch before the change.
|
||||
let mut tags_start_ix = 0;
|
||||
if let Some(preceding_unchanged_patch) =
|
||||
self.patches[..intersecting_patches_range.start].last()
|
||||
{
|
||||
tags_start_ix = match self.xml_tags.binary_search_by(|tag| {
|
||||
tag.range
|
||||
.start
|
||||
.cmp(&preceding_unchanged_patch.range.end, buffer)
|
||||
.then(Ordering::Less)
|
||||
}) {
|
||||
Ok(ix) | Err(ix) => ix,
|
||||
};
|
||||
}
|
||||
|
||||
// Rebuild the patches in the range.
|
||||
let new_patches = self.parse_patches(tags_start_ix, range.end, buffer, cx);
|
||||
updated.extend(new_patches.iter().map(|patch| patch.range.clone()));
|
||||
let removed_patches = self.patches.splice(intersecting_patches_range, new_patches);
|
||||
removed.extend(
|
||||
removed_patches
|
||||
.map(|patch| patch.range)
|
||||
.filter(|range| !updated.contains(&range)),
|
||||
);
|
||||
}
|
||||
|
||||
fn parse_xml_tags_in_range(
|
||||
&self,
|
||||
buffer: &BufferSnapshot,
|
||||
range: Range<text::Anchor>,
|
||||
cx: &App,
|
||||
) -> Vec<XmlTag> {
|
||||
let mut messages = self.messages(cx).peekable();
|
||||
|
||||
let mut tags = Vec::new();
|
||||
let mut lines = buffer.text_for_range(range).lines();
|
||||
let mut offset = lines.offset();
|
||||
|
||||
while let Some(line) = lines.next() {
|
||||
while let Some(message) = messages.peek() {
|
||||
if offset < message.offset_range.end {
|
||||
break;
|
||||
} else {
|
||||
messages.next();
|
||||
}
|
||||
}
|
||||
|
||||
let is_assistant_message = messages
|
||||
.peek()
|
||||
.map_or(false, |message| message.role == Role::Assistant);
|
||||
if is_assistant_message {
|
||||
for (start_ix, _) in line.match_indices('<') {
|
||||
let mut name_start_ix = start_ix + 1;
|
||||
let closing_bracket_ix = line[start_ix..].find('>').map(|i| start_ix + i);
|
||||
if let Some(closing_bracket_ix) = closing_bracket_ix {
|
||||
let end_ix = closing_bracket_ix + 1;
|
||||
let mut is_open_tag = true;
|
||||
if line[name_start_ix..closing_bracket_ix].starts_with('/') {
|
||||
name_start_ix += 1;
|
||||
is_open_tag = false;
|
||||
}
|
||||
let tag_inner = &line[name_start_ix..closing_bracket_ix];
|
||||
let tag_name_len = tag_inner
|
||||
.find(|c: char| c.is_whitespace())
|
||||
.unwrap_or(tag_inner.len());
|
||||
if let Ok(kind) = XmlTagKind::from_str(&tag_inner[..tag_name_len]) {
|
||||
tags.push(XmlTag {
|
||||
range: buffer.anchor_after(offset + start_ix)
|
||||
..buffer.anchor_before(offset + end_ix),
|
||||
is_open_tag,
|
||||
kind,
|
||||
});
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
offset = lines.offset();
|
||||
}
|
||||
tags
|
||||
}
|
||||
|
||||
fn parse_patches(
|
||||
&mut self,
|
||||
tags_start_ix: usize,
|
||||
buffer_end: text::Anchor,
|
||||
buffer: &BufferSnapshot,
|
||||
cx: &App,
|
||||
) -> Vec<AssistantPatch> {
|
||||
let mut new_patches = Vec::new();
|
||||
let mut pending_patch = None;
|
||||
let mut patch_tag_depth = 0;
|
||||
let mut tags = self.xml_tags[tags_start_ix..].iter().peekable();
|
||||
'tags: while let Some(tag) = tags.next() {
|
||||
if tag.range.start.cmp(&buffer_end, buffer).is_gt() && patch_tag_depth == 0 {
|
||||
break;
|
||||
}
|
||||
|
||||
if tag.kind == XmlTagKind::Patch && tag.is_open_tag {
|
||||
patch_tag_depth += 1;
|
||||
let patch_start = tag.range.start;
|
||||
let mut edits = Vec::<Result<AssistantEdit>>::new();
|
||||
let mut patch = AssistantPatch {
|
||||
range: patch_start..patch_start,
|
||||
title: String::new().into(),
|
||||
edits: Default::default(),
|
||||
status: crate::AssistantPatchStatus::Pending,
|
||||
};
|
||||
|
||||
while let Some(tag) = tags.next() {
|
||||
if tag.kind == XmlTagKind::Patch && !tag.is_open_tag {
|
||||
patch_tag_depth -= 1;
|
||||
if patch_tag_depth == 0 {
|
||||
patch.range.end = tag.range.end;
|
||||
|
||||
// Include the line immediately after this <patch> tag if it's empty.
|
||||
let patch_end_offset = patch.range.end.to_offset(buffer);
|
||||
let mut patch_end_chars = buffer.chars_at(patch_end_offset);
|
||||
if patch_end_chars.next() == Some('\n')
|
||||
&& patch_end_chars.next().map_or(true, |ch| ch == '\n')
|
||||
{
|
||||
let messages = self.messages_for_offsets(
|
||||
[patch_end_offset, patch_end_offset + 1],
|
||||
cx,
|
||||
);
|
||||
if messages.len() == 1 {
|
||||
patch.range.end = buffer.anchor_before(patch_end_offset + 1);
|
||||
}
|
||||
}
|
||||
|
||||
edits.sort_unstable_by(|a, b| {
|
||||
if let (Ok(a), Ok(b)) = (a, b) {
|
||||
a.path.cmp(&b.path)
|
||||
} else {
|
||||
Ordering::Equal
|
||||
}
|
||||
});
|
||||
patch.edits = edits.into();
|
||||
patch.status = AssistantPatchStatus::Ready;
|
||||
new_patches.push(patch);
|
||||
continue 'tags;
|
||||
}
|
||||
}
|
||||
|
||||
if tag.kind == XmlTagKind::Title && tag.is_open_tag {
|
||||
let content_start = tag.range.end;
|
||||
while let Some(tag) = tags.next() {
|
||||
if tag.kind == XmlTagKind::Title && !tag.is_open_tag {
|
||||
let content_end = tag.range.start;
|
||||
patch.title =
|
||||
trimmed_text_in_range(buffer, content_start..content_end)
|
||||
.into();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if tag.kind == XmlTagKind::Edit && tag.is_open_tag {
|
||||
let mut path = None;
|
||||
let mut old_text = None;
|
||||
let mut new_text = None;
|
||||
let mut operation = None;
|
||||
let mut description = None;
|
||||
|
||||
while let Some(tag) = tags.next() {
|
||||
if tag.kind == XmlTagKind::Edit && !tag.is_open_tag {
|
||||
edits.push(AssistantEdit::new(
|
||||
path,
|
||||
operation,
|
||||
old_text,
|
||||
new_text,
|
||||
description,
|
||||
));
|
||||
break;
|
||||
}
|
||||
|
||||
if tag.is_open_tag
|
||||
&& [
|
||||
XmlTagKind::Path,
|
||||
XmlTagKind::OldText,
|
||||
XmlTagKind::NewText,
|
||||
XmlTagKind::Operation,
|
||||
XmlTagKind::Description,
|
||||
]
|
||||
.contains(&tag.kind)
|
||||
{
|
||||
let kind = tag.kind;
|
||||
let content_start = tag.range.end;
|
||||
if let Some(tag) = tags.peek() {
|
||||
if tag.kind == kind && !tag.is_open_tag {
|
||||
let tag = tags.next().unwrap();
|
||||
let content_end = tag.range.start;
|
||||
let content = trimmed_text_in_range(
|
||||
buffer,
|
||||
content_start..content_end,
|
||||
);
|
||||
match kind {
|
||||
XmlTagKind::Path => path = Some(content),
|
||||
XmlTagKind::Operation => operation = Some(content),
|
||||
XmlTagKind::OldText => {
|
||||
old_text = Some(content).filter(|s| !s.is_empty())
|
||||
}
|
||||
XmlTagKind::NewText => {
|
||||
new_text = Some(content).filter(|s| !s.is_empty())
|
||||
}
|
||||
XmlTagKind::Description => {
|
||||
description =
|
||||
Some(content).filter(|s| !s.is_empty())
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
patch.edits = edits.into();
|
||||
pending_patch = Some(patch);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(mut pending_patch) = pending_patch {
|
||||
let patch_start = pending_patch.range.start.to_offset(buffer);
|
||||
if let Some(message) = self.message_for_offset(patch_start, cx) {
|
||||
if message.anchor_range.end == text::Anchor::MAX {
|
||||
pending_patch.range.end = text::Anchor::MAX;
|
||||
} else {
|
||||
let message_end = buffer.anchor_after(message.offset_range.end - 1);
|
||||
pending_patch.range.end = message_end;
|
||||
}
|
||||
} else {
|
||||
pending_patch.range.end = text::Anchor::MAX;
|
||||
}
|
||||
|
||||
new_patches.push(pending_patch);
|
||||
}
|
||||
|
||||
new_patches
|
||||
}
|
||||
|
||||
pub fn pending_command_for_position(
|
||||
&mut self,
|
||||
position: language::Anchor,
|
||||
@@ -1993,7 +2363,11 @@ impl AssistantContext {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn assist(&mut self, cx: &mut Context<Self>) -> Option<MessageAnchor> {
|
||||
pub fn assist(
|
||||
&mut self,
|
||||
request_type: RequestType,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<MessageAnchor> {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let model = model_registry.default_model()?;
|
||||
let last_message_id = self.get_last_valid_message_id(cx)?;
|
||||
@@ -2008,7 +2382,7 @@ impl AssistantContext {
|
||||
// Compute which messages to cache, including the last one.
|
||||
self.mark_cache_anchors(&model.cache_configuration(), false, cx);
|
||||
|
||||
let request = self.to_completion_request(Some(&model), cx);
|
||||
let request = self.to_completion_request(Some(&model), request_type, cx);
|
||||
|
||||
let assistant_message = self
|
||||
.insert_message_after(last_message_id, Role::Assistant, MessageStatus::Pending, cx)
|
||||
@@ -2154,6 +2528,12 @@ impl AssistantContext {
|
||||
metadata.status = MessageStatus::Canceled;
|
||||
});
|
||||
Some(error.to_string())
|
||||
} else if error.is::<MaxMonthlySpendReachedError>() {
|
||||
cx.emit(ContextEvent::ShowMaxMonthlySpendReachedError);
|
||||
this.update_metadata(assistant_message_id, cx, |metadata| {
|
||||
metadata.status = MessageStatus::Canceled;
|
||||
});
|
||||
Some(error.to_string())
|
||||
} else {
|
||||
let error_message = error
|
||||
.chain()
|
||||
@@ -2243,6 +2623,7 @@ impl AssistantContext {
|
||||
pub fn to_completion_request(
|
||||
&self,
|
||||
model: Option<&Arc<dyn LanguageModel>>,
|
||||
request_type: RequestType,
|
||||
cx: &App,
|
||||
) -> LanguageModelRequest {
|
||||
let buffer = self.buffer.read(cx);
|
||||
@@ -2323,6 +2704,25 @@ impl AssistantContext {
|
||||
}
|
||||
}
|
||||
|
||||
if let RequestType::SuggestEdits = request_type {
|
||||
if let Ok(preamble) = self.prompt_builder.generate_suggest_edits_prompt() {
|
||||
let last_elem_index = completion_request.messages.len();
|
||||
|
||||
completion_request
|
||||
.messages
|
||||
.push(LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![MessageContent::Text(preamble)],
|
||||
cache: false,
|
||||
});
|
||||
|
||||
// The preamble message should be sent right before the last actual user message.
|
||||
completion_request
|
||||
.messages
|
||||
.swap(last_elem_index, last_elem_index.saturating_sub(1));
|
||||
}
|
||||
}
|
||||
|
||||
completion_request
|
||||
}
|
||||
|
||||
@@ -2357,6 +2757,17 @@ impl AssistantContext {
|
||||
ranges.push(message.anchor_range.clone());
|
||||
}
|
||||
}
|
||||
|
||||
let buffer = self.buffer.read(cx).text_snapshot();
|
||||
let mut updated = Vec::new();
|
||||
let mut removed = Vec::new();
|
||||
for range in ranges {
|
||||
self.reparse_patches_in_range(range, &buffer, &mut updated, &mut removed, cx);
|
||||
}
|
||||
|
||||
if !updated.is_empty() || !removed.is_empty() {
|
||||
cx.emit(ContextEvent::PatchesUpdated { removed, updated })
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_metadata(
|
||||
@@ -2634,7 +3045,7 @@ impl AssistantContext {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut request = self.to_completion_request(Some(&model.model), cx);
|
||||
let mut request = self.to_completion_request(Some(&model.model), RequestType::Chat, cx);
|
||||
request.messages.push(LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![
|
||||
@@ -2906,6 +3317,24 @@ impl AssistantContext {
|
||||
}
|
||||
}
|
||||
|
||||
fn trimmed_text_in_range(buffer: &BufferSnapshot, range: Range<text::Anchor>) -> String {
|
||||
let mut is_start = true;
|
||||
let mut content = buffer
|
||||
.text_for_range(range)
|
||||
.map(|mut chunk| {
|
||||
if is_start {
|
||||
chunk = chunk.trim_start_matches('\n');
|
||||
if !chunk.is_empty() {
|
||||
is_start = false;
|
||||
}
|
||||
}
|
||||
chunk
|
||||
})
|
||||
.collect::<String>();
|
||||
content.truncate(content.trim_end().len());
|
||||
content
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct ContextVersion {
|
||||
context: clock::Global,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use crate::{
|
||||
AssistantContext, CacheStatus, ContextEvent, ContextId, ContextOperation, ContextSummary,
|
||||
InvokedSlashCommandId, MessageCacheMetadata, MessageId, MessageStatus,
|
||||
AssistantContext, AssistantEdit, AssistantEditKind, CacheStatus, ContextEvent, ContextId,
|
||||
ContextOperation, ContextSummary, InvokedSlashCommandId, MessageCacheMetadata, MessageId,
|
||||
MessageStatus, RequestType,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use assistant_slash_command::{
|
||||
@@ -35,10 +36,13 @@ use std::{
|
||||
rc::Rc,
|
||||
sync::{Arc, atomic::AtomicBool},
|
||||
};
|
||||
use text::{ReplicaId, ToOffset, network::Network};
|
||||
use text::{OffsetRangeExt as _, ReplicaId, ToOffset, network::Network};
|
||||
use ui::{IconName, Window};
|
||||
use unindent::Unindent;
|
||||
use util::RandomCharIter;
|
||||
use util::{
|
||||
RandomCharIter,
|
||||
test::{generate_marked_text, marked_text_ranges},
|
||||
};
|
||||
use workspace::Workspace;
|
||||
|
||||
#[gpui::test]
|
||||
@@ -664,6 +668,401 @@ async fn test_slash_commands(cx: &mut TestAppContext) {
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_workflow_step_parsing(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
init_test(cx);
|
||||
cx.update_global(|settings_store: &mut SettingsStore, cx| {
|
||||
settings_store
|
||||
.set_user_settings(
|
||||
r#"{ "assistant": { "enable_experimental_live_diffs": true } }"#,
|
||||
cx,
|
||||
)
|
||||
.unwrap()
|
||||
})
|
||||
});
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let project = Project::test(fs, [Path::new("/root")], cx).await;
|
||||
|
||||
let registry = Arc::new(LanguageRegistry::test(cx.executor()));
|
||||
|
||||
// Create a new context
|
||||
let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap());
|
||||
let context = cx.new(|cx| {
|
||||
AssistantContext::local(
|
||||
registry.clone(),
|
||||
Some(project),
|
||||
None,
|
||||
prompt_builder.clone(),
|
||||
Arc::new(SlashCommandWorkingSet::default()),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
// Insert an assistant message to simulate a response.
|
||||
let assistant_message_id = context.update(cx, |context, cx| {
|
||||
let user_message_id = context.messages(cx).next().unwrap().id;
|
||||
context
|
||||
.insert_message_after(user_message_id, Role::Assistant, MessageStatus::Done, cx)
|
||||
.unwrap()
|
||||
.id
|
||||
});
|
||||
|
||||
// No edit tags
|
||||
edit(
|
||||
&context,
|
||||
"
|
||||
|
||||
«one
|
||||
two
|
||||
»",
|
||||
cx,
|
||||
);
|
||||
expect_patches(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
",
|
||||
&[],
|
||||
cx,
|
||||
);
|
||||
|
||||
// Partial edit step tag is added
|
||||
edit(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
«
|
||||
<patch»",
|
||||
cx,
|
||||
);
|
||||
expect_patches(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
<patch",
|
||||
&[],
|
||||
cx,
|
||||
);
|
||||
|
||||
// The rest of the step tag is added. The unclosed
|
||||
// step is treated as incomplete.
|
||||
edit(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
<patch«>
|
||||
<edit>»",
|
||||
cx,
|
||||
);
|
||||
expect_patches(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
«<patch>
|
||||
<edit>»",
|
||||
&[&[]],
|
||||
cx,
|
||||
);
|
||||
|
||||
// The full patch is added
|
||||
edit(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
<patch>
|
||||
<edit>«
|
||||
<description>add a `two` function</description>
|
||||
<path>src/lib.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>fn one</old_text>
|
||||
<new_text>
|
||||
fn two() {}
|
||||
</new_text>
|
||||
</edit>
|
||||
</patch>
|
||||
|
||||
also,»",
|
||||
cx,
|
||||
);
|
||||
expect_patches(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
«<patch>
|
||||
<edit>
|
||||
<description>add a `two` function</description>
|
||||
<path>src/lib.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>fn one</old_text>
|
||||
<new_text>
|
||||
fn two() {}
|
||||
</new_text>
|
||||
</edit>
|
||||
</patch>
|
||||
»
|
||||
also,",
|
||||
&[&[AssistantEdit {
|
||||
path: "src/lib.rs".into(),
|
||||
kind: AssistantEditKind::InsertAfter {
|
||||
old_text: "fn one".into(),
|
||||
new_text: "fn two() {}".into(),
|
||||
description: Some("add a `two` function".into()),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
);
|
||||
|
||||
// The step is manually edited.
|
||||
edit(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
<patch>
|
||||
<edit>
|
||||
<description>add a `two` function</description>
|
||||
<path>src/lib.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>«fn zero»</old_text>
|
||||
<new_text>
|
||||
fn two() {}
|
||||
</new_text>
|
||||
</edit>
|
||||
</patch>
|
||||
|
||||
also,",
|
||||
cx,
|
||||
);
|
||||
expect_patches(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
«<patch>
|
||||
<edit>
|
||||
<description>add a `two` function</description>
|
||||
<path>src/lib.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>fn zero</old_text>
|
||||
<new_text>
|
||||
fn two() {}
|
||||
</new_text>
|
||||
</edit>
|
||||
</patch>
|
||||
»
|
||||
also,",
|
||||
&[&[AssistantEdit {
|
||||
path: "src/lib.rs".into(),
|
||||
kind: AssistantEditKind::InsertAfter {
|
||||
old_text: "fn zero".into(),
|
||||
new_text: "fn two() {}".into(),
|
||||
description: Some("add a `two` function".into()),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
);
|
||||
|
||||
// When setting the message role to User, the steps are cleared.
|
||||
context.update(cx, |context, cx| {
|
||||
context.cycle_message_roles(HashSet::from_iter([assistant_message_id]), cx);
|
||||
context.cycle_message_roles(HashSet::from_iter([assistant_message_id]), cx);
|
||||
});
|
||||
expect_patches(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
<patch>
|
||||
<edit>
|
||||
<description>add a `two` function</description>
|
||||
<path>src/lib.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>fn zero</old_text>
|
||||
<new_text>
|
||||
fn two() {}
|
||||
</new_text>
|
||||
</edit>
|
||||
</patch>
|
||||
|
||||
also,",
|
||||
&[],
|
||||
cx,
|
||||
);
|
||||
|
||||
// When setting the message role back to Assistant, the steps are reparsed.
|
||||
context.update(cx, |context, cx| {
|
||||
context.cycle_message_roles(HashSet::from_iter([assistant_message_id]), cx);
|
||||
});
|
||||
expect_patches(
|
||||
&context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
«<patch>
|
||||
<edit>
|
||||
<description>add a `two` function</description>
|
||||
<path>src/lib.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>fn zero</old_text>
|
||||
<new_text>
|
||||
fn two() {}
|
||||
</new_text>
|
||||
</edit>
|
||||
</patch>
|
||||
»
|
||||
also,",
|
||||
&[&[AssistantEdit {
|
||||
path: "src/lib.rs".into(),
|
||||
kind: AssistantEditKind::InsertAfter {
|
||||
old_text: "fn zero".into(),
|
||||
new_text: "fn two() {}".into(),
|
||||
description: Some("add a `two` function".into()),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
);
|
||||
|
||||
// Ensure steps are re-parsed when deserializing.
|
||||
let serialized_context = context.read_with(cx, |context, cx| context.serialize(cx));
|
||||
let deserialized_context = cx.new(|cx| {
|
||||
AssistantContext::deserialize(
|
||||
serialized_context,
|
||||
Path::new("").into(),
|
||||
registry.clone(),
|
||||
prompt_builder.clone(),
|
||||
Arc::new(SlashCommandWorkingSet::default()),
|
||||
None,
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
expect_patches(
|
||||
&deserialized_context,
|
||||
"
|
||||
|
||||
one
|
||||
two
|
||||
|
||||
«<patch>
|
||||
<edit>
|
||||
<description>add a `two` function</description>
|
||||
<path>src/lib.rs</path>
|
||||
<operation>insert_after</operation>
|
||||
<old_text>fn zero</old_text>
|
||||
<new_text>
|
||||
fn two() {}
|
||||
</new_text>
|
||||
</edit>
|
||||
</patch>
|
||||
»
|
||||
also,",
|
||||
&[&[AssistantEdit {
|
||||
path: "src/lib.rs".into(),
|
||||
kind: AssistantEditKind::InsertAfter {
|
||||
old_text: "fn zero".into(),
|
||||
new_text: "fn two() {}".into(),
|
||||
description: Some("add a `two` function".into()),
|
||||
},
|
||||
}]],
|
||||
cx,
|
||||
);
|
||||
|
||||
fn edit(
|
||||
context: &Entity<AssistantContext>,
|
||||
new_text_marked_with_edits: &str,
|
||||
cx: &mut TestAppContext,
|
||||
) {
|
||||
context.update(cx, |context, cx| {
|
||||
context.buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit_via_marked_text(&new_text_marked_with_edits.unindent(), None, cx);
|
||||
});
|
||||
});
|
||||
cx.executor().run_until_parked();
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn expect_patches(
|
||||
context: &Entity<AssistantContext>,
|
||||
expected_marked_text: &str,
|
||||
expected_suggestions: &[&[AssistantEdit]],
|
||||
cx: &mut TestAppContext,
|
||||
) {
|
||||
let expected_marked_text = expected_marked_text.unindent();
|
||||
let (expected_text, _) = marked_text_ranges(&expected_marked_text, false);
|
||||
|
||||
let (buffer_text, ranges, patches) = context.update(cx, |context, cx| {
|
||||
context.buffer.read_with(cx, |buffer, _| {
|
||||
let ranges = context
|
||||
.patches
|
||||
.iter()
|
||||
.map(|entry| entry.range.to_offset(buffer))
|
||||
.collect::<Vec<_>>();
|
||||
(
|
||||
buffer.text(),
|
||||
ranges,
|
||||
context
|
||||
.patches
|
||||
.iter()
|
||||
.map(|step| step.edits.clone())
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
})
|
||||
});
|
||||
|
||||
assert_eq!(buffer_text, expected_text);
|
||||
|
||||
let actual_marked_text = generate_marked_text(&expected_text, &ranges, false);
|
||||
assert_eq!(actual_marked_text, expected_marked_text);
|
||||
|
||||
assert_eq!(
|
||||
patches
|
||||
.iter()
|
||||
.map(|patch| {
|
||||
patch
|
||||
.iter()
|
||||
.map(|edit| {
|
||||
let edit = edit.as_ref().unwrap();
|
||||
AssistantEdit {
|
||||
path: edit.path.clone(),
|
||||
kind: edit.kind.clone(),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
expected_suggestions
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_serialization(cx: &mut TestAppContext) {
|
||||
cx.update(init_test);
|
||||
@@ -1199,7 +1598,7 @@ async fn test_summarization(cx: &mut TestAppContext) {
|
||||
|
||||
// Send a message
|
||||
context.update(cx, |context, cx| {
|
||||
context.assist(cx);
|
||||
context.assist(RequestType::Chat, cx);
|
||||
});
|
||||
|
||||
simulate_successful_response(&fake_model, cx);
|
||||
@@ -1254,7 +1653,7 @@ async fn test_thread_summary_error_retry(cx: &mut TestAppContext) {
|
||||
|
||||
// Sending another message should not trigger another summarize request
|
||||
context.update(cx, |context, cx| {
|
||||
context.assist(cx);
|
||||
context.assist(RequestType::Chat, cx);
|
||||
});
|
||||
|
||||
simulate_successful_response(&fake_model, cx);
|
||||
@@ -1297,7 +1696,7 @@ fn test_summarize_error(
|
||||
|
||||
// Send a message
|
||||
context.update(cx, |context, cx| {
|
||||
context.assist(cx);
|
||||
context.assist(RequestType::Chat, cx);
|
||||
});
|
||||
|
||||
simulate_successful_response(&model, cx);
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
957
crates/assistant_context_editor/src/patch.rs
Normal file
957
crates/assistant_context_editor/src/patch.rs
Normal file
@@ -0,0 +1,957 @@
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use collections::HashMap;
|
||||
use editor::ProposedChangesEditor;
|
||||
use futures::{TryFutureExt as _, future};
|
||||
use gpui::{App, AppContext as _, AsyncApp, Entity, SharedString};
|
||||
use language::{AutoindentMode, Buffer, BufferSnapshot};
|
||||
use project::{Project, ProjectPath};
|
||||
use std::{cmp, ops::Range, path::Path, sync::Arc};
|
||||
use text::{AnchorRangeExt as _, Bias, OffsetRangeExt as _, Point};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AssistantPatch {
|
||||
pub range: Range<language::Anchor>,
|
||||
pub title: SharedString,
|
||||
pub edits: Arc<[Result<AssistantEdit>]>,
|
||||
pub status: AssistantPatchStatus,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
pub enum AssistantPatchStatus {
|
||||
Pending,
|
||||
Ready,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct AssistantEdit {
|
||||
pub path: String,
|
||||
pub kind: AssistantEditKind,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum AssistantEditKind {
|
||||
Update {
|
||||
old_text: String,
|
||||
new_text: String,
|
||||
description: Option<String>,
|
||||
},
|
||||
Create {
|
||||
new_text: String,
|
||||
description: Option<String>,
|
||||
},
|
||||
InsertBefore {
|
||||
old_text: String,
|
||||
new_text: String,
|
||||
description: Option<String>,
|
||||
},
|
||||
InsertAfter {
|
||||
old_text: String,
|
||||
new_text: String,
|
||||
description: Option<String>,
|
||||
},
|
||||
Delete {
|
||||
old_text: String,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct ResolvedPatch {
|
||||
pub edit_groups: HashMap<Entity<Buffer>, Vec<ResolvedEditGroup>>,
|
||||
pub errors: Vec<AssistantPatchResolutionError>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct ResolvedEditGroup {
|
||||
pub context_range: Range<language::Anchor>,
|
||||
pub edits: Vec<ResolvedEdit>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct ResolvedEdit {
|
||||
range: Range<language::Anchor>,
|
||||
new_text: String,
|
||||
description: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct AssistantPatchResolutionError {
|
||||
pub edit_ix: usize,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
enum SearchDirection {
|
||||
Up,
|
||||
Left,
|
||||
Diagonal,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
struct SearchState {
|
||||
cost: u32,
|
||||
direction: SearchDirection,
|
||||
}
|
||||
|
||||
impl SearchState {
|
||||
fn new(cost: u32, direction: SearchDirection) -> Self {
|
||||
Self { cost, direction }
|
||||
}
|
||||
}
|
||||
|
||||
struct SearchMatrix {
|
||||
cols: usize,
|
||||
data: Vec<SearchState>,
|
||||
}
|
||||
|
||||
impl SearchMatrix {
|
||||
fn new(rows: usize, cols: usize) -> Self {
|
||||
SearchMatrix {
|
||||
cols,
|
||||
data: vec![SearchState::new(0, SearchDirection::Diagonal); rows * cols],
|
||||
}
|
||||
}
|
||||
|
||||
fn get(&self, row: usize, col: usize) -> SearchState {
|
||||
self.data[row * self.cols + col]
|
||||
}
|
||||
|
||||
fn set(&mut self, row: usize, col: usize, cost: SearchState) {
|
||||
self.data[row * self.cols + col] = cost;
|
||||
}
|
||||
}
|
||||
|
||||
impl ResolvedPatch {
|
||||
pub fn apply(&self, editor: &ProposedChangesEditor, cx: &mut App) {
|
||||
for (buffer, groups) in &self.edit_groups {
|
||||
let branch = editor.branch_buffer_for_base(buffer).unwrap();
|
||||
Self::apply_edit_groups(groups, &branch, cx);
|
||||
}
|
||||
editor.recalculate_all_buffer_diffs();
|
||||
}
|
||||
|
||||
fn apply_edit_groups(groups: &Vec<ResolvedEditGroup>, buffer: &Entity<Buffer>, cx: &mut App) {
|
||||
let mut edits = Vec::new();
|
||||
for group in groups {
|
||||
for suggestion in &group.edits {
|
||||
edits.push((suggestion.range.clone(), suggestion.new_text.clone()));
|
||||
}
|
||||
}
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(
|
||||
edits,
|
||||
Some(AutoindentMode::Block {
|
||||
original_indent_columns: Vec::new(),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl ResolvedEdit {
|
||||
pub fn try_merge(&mut self, other: &Self, buffer: &text::BufferSnapshot) -> bool {
|
||||
let range = &self.range;
|
||||
let other_range = &other.range;
|
||||
|
||||
// Don't merge if we don't contain the other suggestion.
|
||||
if range.start.cmp(&other_range.start, buffer).is_gt()
|
||||
|| range.end.cmp(&other_range.end, buffer).is_lt()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
let other_offset_range = other_range.to_offset(buffer);
|
||||
let offset_range = range.to_offset(buffer);
|
||||
|
||||
// If the other range is empty at the start of this edit's range, combine the new text
|
||||
if other_offset_range.is_empty() && other_offset_range.start == offset_range.start {
|
||||
self.new_text = format!("{}\n{}", other.new_text, self.new_text);
|
||||
self.range.start = other_range.start;
|
||||
|
||||
if let Some((description, other_description)) =
|
||||
self.description.as_mut().zip(other.description.as_ref())
|
||||
{
|
||||
*description = format!("{}\n{}", other_description, description)
|
||||
}
|
||||
} else {
|
||||
if let Some((description, other_description)) =
|
||||
self.description.as_mut().zip(other.description.as_ref())
|
||||
{
|
||||
description.push('\n');
|
||||
description.push_str(other_description);
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl AssistantEdit {
|
||||
pub fn new(
|
||||
path: Option<String>,
|
||||
operation: Option<String>,
|
||||
old_text: Option<String>,
|
||||
new_text: Option<String>,
|
||||
description: Option<String>,
|
||||
) -> Result<Self> {
|
||||
let path = path.ok_or_else(|| anyhow!("missing path"))?;
|
||||
let operation = operation.ok_or_else(|| anyhow!("missing operation"))?;
|
||||
|
||||
let kind = match operation.as_str() {
|
||||
"update" => AssistantEditKind::Update {
|
||||
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
|
||||
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
|
||||
description,
|
||||
},
|
||||
"insert_before" => AssistantEditKind::InsertBefore {
|
||||
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
|
||||
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
|
||||
description,
|
||||
},
|
||||
"insert_after" => AssistantEditKind::InsertAfter {
|
||||
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
|
||||
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
|
||||
description,
|
||||
},
|
||||
"delete" => AssistantEditKind::Delete {
|
||||
old_text: old_text.ok_or_else(|| anyhow!("missing old_text"))?,
|
||||
},
|
||||
"create" => AssistantEditKind::Create {
|
||||
description,
|
||||
new_text: new_text.ok_or_else(|| anyhow!("missing new_text"))?,
|
||||
},
|
||||
_ => Err(anyhow!("unknown operation {operation:?}"))?,
|
||||
};
|
||||
|
||||
Ok(Self { path, kind })
|
||||
}
|
||||
|
||||
pub async fn resolve(
|
||||
&self,
|
||||
project: Entity<Project>,
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<(Entity<Buffer>, ResolvedEdit)> {
|
||||
let path = self.path.clone();
|
||||
let kind = self.kind.clone();
|
||||
let buffer = project
|
||||
.update(&mut cx, |project, cx| {
|
||||
let project_path = project
|
||||
.find_project_path(Path::new(&path), cx)
|
||||
.or_else(|| {
|
||||
// If we couldn't find a project path for it, put it in the active worktree
|
||||
// so that when we create the buffer, it can be saved.
|
||||
let worktree = project
|
||||
.active_entry()
|
||||
.and_then(|entry_id| project.worktree_for_entry(entry_id, cx))
|
||||
.or_else(|| project.worktrees(cx).next())?;
|
||||
let worktree = worktree.read(cx);
|
||||
|
||||
Some(ProjectPath {
|
||||
worktree_id: worktree.id(),
|
||||
path: Arc::from(Path::new(&path)),
|
||||
})
|
||||
})
|
||||
.with_context(|| format!("worktree not found for {:?}", path))?;
|
||||
anyhow::Ok(project.open_buffer(project_path, cx))
|
||||
})??
|
||||
.await?;
|
||||
|
||||
let snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot())?;
|
||||
let suggestion = cx
|
||||
.background_spawn(async move { kind.resolve(&snapshot) })
|
||||
.await;
|
||||
|
||||
Ok((buffer, suggestion))
|
||||
}
|
||||
}
|
||||
|
||||
impl AssistantEditKind {
|
||||
fn resolve(self, snapshot: &BufferSnapshot) -> ResolvedEdit {
|
||||
match self {
|
||||
Self::Update {
|
||||
old_text,
|
||||
new_text,
|
||||
description,
|
||||
} => {
|
||||
let range = Self::resolve_location(&snapshot, &old_text);
|
||||
ResolvedEdit {
|
||||
range,
|
||||
new_text,
|
||||
description,
|
||||
}
|
||||
}
|
||||
Self::Create {
|
||||
new_text,
|
||||
description,
|
||||
} => ResolvedEdit {
|
||||
range: text::Anchor::MIN..text::Anchor::MAX,
|
||||
description,
|
||||
new_text,
|
||||
},
|
||||
Self::InsertBefore {
|
||||
old_text,
|
||||
mut new_text,
|
||||
description,
|
||||
} => {
|
||||
let range = Self::resolve_location(&snapshot, &old_text);
|
||||
new_text.push('\n');
|
||||
ResolvedEdit {
|
||||
range: range.start..range.start,
|
||||
new_text,
|
||||
description,
|
||||
}
|
||||
}
|
||||
Self::InsertAfter {
|
||||
old_text,
|
||||
mut new_text,
|
||||
description,
|
||||
} => {
|
||||
let range = Self::resolve_location(&snapshot, &old_text);
|
||||
new_text.insert(0, '\n');
|
||||
ResolvedEdit {
|
||||
range: range.end..range.end,
|
||||
new_text,
|
||||
description,
|
||||
}
|
||||
}
|
||||
Self::Delete { old_text } => {
|
||||
let range = Self::resolve_location(&snapshot, &old_text);
|
||||
ResolvedEdit {
|
||||
range,
|
||||
new_text: String::new(),
|
||||
description: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_location(buffer: &text::BufferSnapshot, search_query: &str) -> Range<text::Anchor> {
|
||||
const INSERTION_COST: u32 = 3;
|
||||
const DELETION_COST: u32 = 10;
|
||||
const WHITESPACE_INSERTION_COST: u32 = 1;
|
||||
const WHITESPACE_DELETION_COST: u32 = 1;
|
||||
|
||||
let buffer_len = buffer.len();
|
||||
let query_len = search_query.len();
|
||||
let mut matrix = SearchMatrix::new(query_len + 1, buffer_len + 1);
|
||||
let mut leading_deletion_cost = 0_u32;
|
||||
for (row, query_byte) in search_query.bytes().enumerate() {
|
||||
let deletion_cost = if query_byte.is_ascii_whitespace() {
|
||||
WHITESPACE_DELETION_COST
|
||||
} else {
|
||||
DELETION_COST
|
||||
};
|
||||
|
||||
leading_deletion_cost = leading_deletion_cost.saturating_add(deletion_cost);
|
||||
matrix.set(
|
||||
row + 1,
|
||||
0,
|
||||
SearchState::new(leading_deletion_cost, SearchDirection::Diagonal),
|
||||
);
|
||||
|
||||
for (col, buffer_byte) in buffer.bytes_in_range(0..buffer.len()).flatten().enumerate() {
|
||||
let insertion_cost = if buffer_byte.is_ascii_whitespace() {
|
||||
WHITESPACE_INSERTION_COST
|
||||
} else {
|
||||
INSERTION_COST
|
||||
};
|
||||
|
||||
let up = SearchState::new(
|
||||
matrix.get(row, col + 1).cost.saturating_add(deletion_cost),
|
||||
SearchDirection::Up,
|
||||
);
|
||||
let left = SearchState::new(
|
||||
matrix.get(row + 1, col).cost.saturating_add(insertion_cost),
|
||||
SearchDirection::Left,
|
||||
);
|
||||
let diagonal = SearchState::new(
|
||||
if query_byte == *buffer_byte {
|
||||
matrix.get(row, col).cost
|
||||
} else {
|
||||
matrix
|
||||
.get(row, col)
|
||||
.cost
|
||||
.saturating_add(deletion_cost + insertion_cost)
|
||||
},
|
||||
SearchDirection::Diagonal,
|
||||
);
|
||||
matrix.set(row + 1, col + 1, up.min(left).min(diagonal));
|
||||
}
|
||||
}
|
||||
|
||||
// Traceback to find the best match
|
||||
let mut best_buffer_end = buffer_len;
|
||||
let mut best_cost = u32::MAX;
|
||||
for col in 1..=buffer_len {
|
||||
let cost = matrix.get(query_len, col).cost;
|
||||
if cost < best_cost {
|
||||
best_cost = cost;
|
||||
best_buffer_end = col;
|
||||
}
|
||||
}
|
||||
|
||||
let mut query_ix = query_len;
|
||||
let mut buffer_ix = best_buffer_end;
|
||||
while query_ix > 0 && buffer_ix > 0 {
|
||||
let current = matrix.get(query_ix, buffer_ix);
|
||||
match current.direction {
|
||||
SearchDirection::Diagonal => {
|
||||
query_ix -= 1;
|
||||
buffer_ix -= 1;
|
||||
}
|
||||
SearchDirection::Up => {
|
||||
query_ix -= 1;
|
||||
}
|
||||
SearchDirection::Left => {
|
||||
buffer_ix -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut start = buffer.offset_to_point(buffer.clip_offset(buffer_ix, Bias::Left));
|
||||
start.column = 0;
|
||||
let mut end = buffer.offset_to_point(buffer.clip_offset(best_buffer_end, Bias::Right));
|
||||
if end.column > 0 {
|
||||
end.column = buffer.line_len(end.row);
|
||||
}
|
||||
|
||||
buffer.anchor_after(start)..buffer.anchor_before(end)
|
||||
}
|
||||
}
|
||||
|
||||
impl AssistantPatch {
|
||||
pub async fn resolve(&self, project: Entity<Project>, cx: &mut AsyncApp) -> ResolvedPatch {
|
||||
let mut resolve_tasks = Vec::new();
|
||||
for (ix, edit) in self.edits.iter().enumerate() {
|
||||
if let Ok(edit) = edit.as_ref() {
|
||||
resolve_tasks.push(
|
||||
edit.resolve(project.clone(), cx.clone())
|
||||
.map_err(move |error| (ix, error)),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let edits = future::join_all(resolve_tasks).await;
|
||||
let mut errors = Vec::new();
|
||||
let mut edits_by_buffer = HashMap::default();
|
||||
for entry in edits {
|
||||
match entry {
|
||||
Ok((buffer, edit)) => {
|
||||
edits_by_buffer
|
||||
.entry(buffer)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(edit);
|
||||
}
|
||||
Err((edit_ix, error)) => errors.push(AssistantPatchResolutionError {
|
||||
edit_ix,
|
||||
message: error.to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
// Expand the context ranges of each edit and group edits with overlapping context ranges.
|
||||
let mut edit_groups_by_buffer = HashMap::default();
|
||||
for (buffer, edits) in edits_by_buffer {
|
||||
if let Ok(snapshot) = buffer.update(cx, |buffer, _| buffer.text_snapshot()) {
|
||||
edit_groups_by_buffer.insert(buffer, Self::group_edits(edits, &snapshot));
|
||||
}
|
||||
}
|
||||
|
||||
ResolvedPatch {
|
||||
edit_groups: edit_groups_by_buffer,
|
||||
errors,
|
||||
}
|
||||
}
|
||||
|
||||
fn group_edits(
|
||||
mut edits: Vec<ResolvedEdit>,
|
||||
snapshot: &text::BufferSnapshot,
|
||||
) -> Vec<ResolvedEditGroup> {
|
||||
let mut edit_groups = Vec::<ResolvedEditGroup>::new();
|
||||
// Sort edits by their range so that earlier, larger ranges come first
|
||||
edits.sort_by(|a, b| a.range.cmp(&b.range, &snapshot));
|
||||
|
||||
// Merge overlapping edits
|
||||
edits.dedup_by(|a, b| b.try_merge(a, &snapshot));
|
||||
|
||||
// Create context ranges for each edit
|
||||
for edit in edits {
|
||||
let context_range = {
|
||||
let edit_point_range = edit.range.to_point(&snapshot);
|
||||
let start_row = edit_point_range.start.row.saturating_sub(5);
|
||||
let end_row = cmp::min(edit_point_range.end.row + 5, snapshot.max_point().row);
|
||||
let start = snapshot.anchor_before(Point::new(start_row, 0));
|
||||
let end = snapshot.anchor_after(Point::new(end_row, snapshot.line_len(end_row)));
|
||||
start..end
|
||||
};
|
||||
|
||||
if let Some(last_group) = edit_groups.last_mut() {
|
||||
if last_group
|
||||
.context_range
|
||||
.end
|
||||
.cmp(&context_range.start, &snapshot)
|
||||
.is_ge()
|
||||
{
|
||||
// Merge with the previous group if context ranges overlap
|
||||
last_group.context_range.end = context_range.end;
|
||||
last_group.edits.push(edit);
|
||||
} else {
|
||||
// Create a new group
|
||||
edit_groups.push(ResolvedEditGroup {
|
||||
context_range,
|
||||
edits: vec![edit],
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// Create the first group
|
||||
edit_groups.push(ResolvedEditGroup {
|
||||
context_range,
|
||||
edits: vec![edit],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
edit_groups
|
||||
}
|
||||
|
||||
pub fn path_count(&self) -> usize {
|
||||
self.paths().count()
|
||||
}
|
||||
|
||||
pub fn paths(&self) -> impl '_ + Iterator<Item = &str> {
|
||||
let mut prev_path = None;
|
||||
self.edits.iter().filter_map(move |edit| {
|
||||
if let Ok(edit) = edit {
|
||||
let path = Some(edit.path.as_str());
|
||||
if path != prev_path {
|
||||
prev_path = path;
|
||||
return path;
|
||||
}
|
||||
}
|
||||
None
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for AssistantPatch {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.range == other.range
|
||||
&& self.title == other.title
|
||||
&& Arc::ptr_eq(&self.edits, &other.edits)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for AssistantPatch {}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use gpui::App;
|
||||
use language::{
|
||||
Language, LanguageConfig, LanguageMatcher, language_settings::AllLanguageSettings,
|
||||
};
|
||||
use settings::SettingsStore;
|
||||
use ui::BorrowAppContext;
|
||||
use unindent::Unindent as _;
|
||||
use util::test::{generate_marked_text, marked_text_ranges};
|
||||
|
||||
#[gpui::test]
|
||||
fn test_resolve_location(cx: &mut App) {
|
||||
assert_location_resolution(
|
||||
concat!(
|
||||
" Lorem\n",
|
||||
"« ipsum\n",
|
||||
" dolor sit amet»\n",
|
||||
" consecteur",
|
||||
),
|
||||
"ipsum\ndolor",
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_location_resolution(
|
||||
&"
|
||||
«fn foo1(a: usize) -> usize {
|
||||
40
|
||||
}»
|
||||
|
||||
fn foo2(b: usize) -> usize {
|
||||
42
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
"fn foo1(b: usize) {\n40\n}",
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_location_resolution(
|
||||
&"
|
||||
fn main() {
|
||||
« Foo
|
||||
.bar()
|
||||
.baz()
|
||||
.qux()»
|
||||
}
|
||||
|
||||
fn foo2(b: usize) -> usize {
|
||||
42
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
"Foo.bar.baz.qux()",
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_location_resolution(
|
||||
&"
|
||||
class Something {
|
||||
one() { return 1; }
|
||||
« two() { return 2222; }
|
||||
three() { return 333; }
|
||||
four() { return 4444; }
|
||||
five() { return 5555; }
|
||||
six() { return 6666; }
|
||||
» seven() { return 7; }
|
||||
eight() { return 8; }
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
&"
|
||||
two() { return 2222; }
|
||||
four() { return 4444; }
|
||||
five() { return 5555; }
|
||||
six() { return 6666; }
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_resolve_edits(cx: &mut App) {
|
||||
init_test(cx);
|
||||
|
||||
assert_edits(
|
||||
"
|
||||
/// A person
|
||||
struct Person {
|
||||
name: String,
|
||||
age: usize,
|
||||
}
|
||||
|
||||
/// A dog
|
||||
struct Dog {
|
||||
weight: f32,
|
||||
}
|
||||
|
||||
impl Person {
|
||||
fn name(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
vec![
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
name: String,
|
||||
"
|
||||
.unindent(),
|
||||
new_text: "
|
||||
first_name: String,
|
||||
last_name: String,
|
||||
"
|
||||
.unindent(),
|
||||
description: None,
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
fn name(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
new_text: "
|
||||
fn name(&self) -> String {
|
||||
format!(\"{} {}\", self.first_name, self.last_name)
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: None,
|
||||
},
|
||||
],
|
||||
"
|
||||
/// A person
|
||||
struct Person {
|
||||
first_name: String,
|
||||
last_name: String,
|
||||
age: usize,
|
||||
}
|
||||
|
||||
/// A dog
|
||||
struct Dog {
|
||||
weight: f32,
|
||||
}
|
||||
|
||||
impl Person {
|
||||
fn name(&self) -> String {
|
||||
format!(\"{} {}\", self.first_name, self.last_name)
|
||||
}
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
|
||||
// Ensure InsertBefore merges correctly with Update of the same text
|
||||
assert_edits(
|
||||
"
|
||||
fn foo() {
|
||||
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
vec![
|
||||
AssistantEditKind::InsertBefore {
|
||||
old_text: "
|
||||
fn foo() {"
|
||||
.unindent(),
|
||||
new_text: "
|
||||
fn bar() {
|
||||
qux();
|
||||
}"
|
||||
.unindent(),
|
||||
description: Some("implement bar".into()),
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
fn foo() {
|
||||
|
||||
}"
|
||||
.unindent(),
|
||||
new_text: "
|
||||
fn foo() {
|
||||
bar();
|
||||
}"
|
||||
.unindent(),
|
||||
description: Some("call bar in foo".into()),
|
||||
},
|
||||
AssistantEditKind::InsertAfter {
|
||||
old_text: "
|
||||
fn foo() {
|
||||
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
new_text: "
|
||||
fn qux() {
|
||||
// todo
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: Some("implement qux".into()),
|
||||
},
|
||||
],
|
||||
"
|
||||
fn bar() {
|
||||
qux();
|
||||
}
|
||||
|
||||
fn foo() {
|
||||
bar();
|
||||
}
|
||||
|
||||
fn qux() {
|
||||
// todo
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
|
||||
// Correctly indent new text when replacing multiple adjacent indented blocks.
|
||||
assert_edits(
|
||||
"
|
||||
impl Numbers {
|
||||
fn one() {
|
||||
1
|
||||
}
|
||||
|
||||
fn two() {
|
||||
2
|
||||
}
|
||||
|
||||
fn three() {
|
||||
3
|
||||
}
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
vec![
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
fn one() {
|
||||
1
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
new_text: "
|
||||
fn one() {
|
||||
101
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: None,
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
fn two() {
|
||||
2
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
new_text: "
|
||||
fn two() {
|
||||
102
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: None,
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "
|
||||
fn three() {
|
||||
3
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
new_text: "
|
||||
fn three() {
|
||||
103
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
description: None,
|
||||
},
|
||||
],
|
||||
"
|
||||
impl Numbers {
|
||||
fn one() {
|
||||
101
|
||||
}
|
||||
|
||||
fn two() {
|
||||
102
|
||||
}
|
||||
|
||||
fn three() {
|
||||
103
|
||||
}
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_edits(
|
||||
"
|
||||
impl Person {
|
||||
fn set_name(&mut self, name: String) {
|
||||
self.name = name;
|
||||
}
|
||||
|
||||
fn name(&self) -> String {
|
||||
return self.name;
|
||||
}
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
vec![
|
||||
AssistantEditKind::Update {
|
||||
old_text: "self.name = name;".unindent(),
|
||||
new_text: "self._name = name;".unindent(),
|
||||
description: None,
|
||||
},
|
||||
AssistantEditKind::Update {
|
||||
old_text: "return self.name;\n".unindent(),
|
||||
new_text: "return self._name;\n".unindent(),
|
||||
description: None,
|
||||
},
|
||||
],
|
||||
"
|
||||
impl Person {
|
||||
fn set_name(&mut self, name: String) {
|
||||
self._name = name;
|
||||
}
|
||||
|
||||
fn name(&self) -> String {
|
||||
return self._name;
|
||||
}
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut App) {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
cx.update_global::<SettingsStore, _>(|settings, cx| {
|
||||
settings.update_user_settings::<AllLanguageSettings>(cx, |_| {});
|
||||
});
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_location_resolution(text_with_expected_range: &str, query: &str, cx: &mut App) {
|
||||
let (text, _) = marked_text_ranges(text_with_expected_range, false);
|
||||
let buffer = cx.new(|cx| Buffer::local(text.clone(), cx));
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let range = AssistantEditKind::resolve_location(&snapshot, query).to_offset(&snapshot);
|
||||
let text_with_actual_range = generate_marked_text(&text, &[range], false);
|
||||
pretty_assertions::assert_eq!(text_with_actual_range, text_with_expected_range);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_edits(
|
||||
old_text: String,
|
||||
edits: Vec<AssistantEditKind>,
|
||||
new_text: String,
|
||||
cx: &mut App,
|
||||
) {
|
||||
let buffer =
|
||||
cx.new(|cx| Buffer::local(old_text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let resolved_edits = edits
|
||||
.into_iter()
|
||||
.map(|kind| kind.resolve(&snapshot))
|
||||
.collect();
|
||||
let edit_groups = AssistantPatch::group_edits(resolved_edits, &snapshot);
|
||||
ResolvedPatch::apply_edit_groups(&edit_groups, &buffer, cx);
|
||||
let actual_new_text = buffer.read(cx).text();
|
||||
pretty_assertions::assert_eq!(actual_new_text, new_text);
|
||||
}
|
||||
|
||||
fn rust_lang() -> Language {
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".into(),
|
||||
matcher: LanguageMatcher {
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
Some(language::tree_sitter_rust::LANGUAGE.into()),
|
||||
)
|
||||
.with_indents_query(
|
||||
r#"
|
||||
(call_expression) @indent
|
||||
(field_expression) @indent
|
||||
(_ "(" ")" @end) @indent
|
||||
(_ "{" "}" @end) @indent
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
@@ -278,8 +278,8 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
buffer.anchor_after(Point::new(position.row, first_arg_start.start as u32));
|
||||
let arguments = call
|
||||
.arguments
|
||||
.into_iter()
|
||||
.filter_map(|argument| Some(line.get(argument)?.to_string()))
|
||||
.iter()
|
||||
.filter_map(|argument| Some(line.get(argument.clone())?.to_string()))
|
||||
.collect::<Vec<_>>();
|
||||
let argument_range = first_arg_start..buffer_position;
|
||||
(
|
||||
|
||||
@@ -23,7 +23,6 @@ log.workspace = true
|
||||
ollama = { workspace = true, features = ["schemars"] }
|
||||
open_ai = { workspace = true, features = ["schemars"] }
|
||||
deepseek = { workspace = true, features = ["schemars"] }
|
||||
mistral = { workspace = true, features = ["schemars"] }
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
settings.workspace = true
|
||||
|
||||
@@ -10,7 +10,6 @@ use deepseek::Model as DeepseekModel;
|
||||
use gpui::{App, Pixels, SharedString};
|
||||
use language_model::{CloudModel, LanguageModel};
|
||||
use lmstudio::Model as LmStudioModel;
|
||||
use mistral::Model as MistralModel;
|
||||
use ollama::Model as OllamaModel;
|
||||
use schemars::{JsonSchema, schema::Schema};
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -42,7 +41,6 @@ pub enum NotifyWhenAgentWaiting {
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
|
||||
#[serde(tag = "name", rename_all = "snake_case")]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub enum AssistantProviderContentV1 {
|
||||
#[serde(rename = "zed.dev")]
|
||||
ZedDotDev { default_model: Option<CloudModel> },
|
||||
@@ -72,11 +70,6 @@ pub enum AssistantProviderContentV1 {
|
||||
default_model: Option<DeepseekModel>,
|
||||
api_url: Option<String>,
|
||||
},
|
||||
#[serde(rename = "mistral")]
|
||||
Mistral {
|
||||
default_model: Option<MistralModel>,
|
||||
api_url: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Debug)]
|
||||
@@ -92,6 +85,7 @@ pub struct AssistantSettings {
|
||||
pub thread_summary_model: Option<LanguageModelSelection>,
|
||||
pub inline_alternatives: Vec<LanguageModelSelection>,
|
||||
pub using_outdated_settings_version: bool,
|
||||
pub enable_experimental_live_diffs: bool,
|
||||
pub default_profile: AgentProfileId,
|
||||
pub profiles: IndexMap<AgentProfileId, AgentProfile>,
|
||||
pub always_allow_tool_actions: bool,
|
||||
@@ -100,7 +94,6 @@ pub struct AssistantSettings {
|
||||
pub single_file_review: bool,
|
||||
pub model_parameters: Vec<LanguageModelParameters>,
|
||||
pub preferred_completion_mode: CompletionMode,
|
||||
pub enable_feedback: bool,
|
||||
}
|
||||
|
||||
impl AssistantSettings {
|
||||
@@ -113,6 +106,10 @@ impl AssistantSettings {
|
||||
.and_then(|m| m.temperature)
|
||||
}
|
||||
|
||||
pub fn are_live_diffs_enabled(&self, _cx: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
|
||||
self.inline_assistant_model = Some(LanguageModelSelection {
|
||||
provider: provider.into(),
|
||||
@@ -255,17 +252,12 @@ impl AssistantSettingsContent {
|
||||
model: model.id().to_string(),
|
||||
})
|
||||
}
|
||||
AssistantProviderContentV1::Mistral { default_model, .. } => {
|
||||
default_model.map(|model| LanguageModelSelection {
|
||||
provider: "mistral".into(),
|
||||
model: model.id().to_string(),
|
||||
})
|
||||
}
|
||||
}),
|
||||
inline_assistant_model: None,
|
||||
commit_message_model: None,
|
||||
thread_summary_model: None,
|
||||
inline_alternatives: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
default_profile: None,
|
||||
profiles: None,
|
||||
always_allow_tool_actions: None,
|
||||
@@ -274,7 +266,6 @@ impl AssistantSettingsContent {
|
||||
single_file_review: None,
|
||||
model_parameters: Vec::new(),
|
||||
preferred_completion_mode: None,
|
||||
enable_feedback: None,
|
||||
},
|
||||
VersionedAssistantSettingsContent::V2(ref settings) => settings.clone(),
|
||||
},
|
||||
@@ -297,6 +288,7 @@ impl AssistantSettingsContent {
|
||||
commit_message_model: None,
|
||||
thread_summary_model: None,
|
||||
inline_alternatives: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
default_profile: None,
|
||||
profiles: None,
|
||||
always_allow_tool_actions: None,
|
||||
@@ -305,7 +297,6 @@ impl AssistantSettingsContent {
|
||||
single_file_review: None,
|
||||
model_parameters: Vec::new(),
|
||||
preferred_completion_mode: None,
|
||||
enable_feedback: None,
|
||||
},
|
||||
None => AssistantSettingsContentV2::default(),
|
||||
}
|
||||
@@ -559,7 +550,6 @@ impl AssistantSettingsContent {
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
#[serde(tag = "version")]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub enum VersionedAssistantSettingsContent {
|
||||
#[serde(rename = "1")]
|
||||
V1(AssistantSettingsContentV1),
|
||||
@@ -580,6 +570,7 @@ impl Default for VersionedAssistantSettingsContent {
|
||||
commit_message_model: None,
|
||||
thread_summary_model: None,
|
||||
inline_alternatives: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
default_profile: None,
|
||||
profiles: None,
|
||||
always_allow_tool_actions: None,
|
||||
@@ -588,13 +579,11 @@ impl Default for VersionedAssistantSettingsContent {
|
||||
single_file_review: None,
|
||||
model_parameters: Vec::new(),
|
||||
preferred_completion_mode: None,
|
||||
enable_feedback: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub struct AssistantSettingsContentV2 {
|
||||
/// Whether the Assistant is enabled.
|
||||
///
|
||||
@@ -626,6 +615,10 @@ pub struct AssistantSettingsContentV2 {
|
||||
thread_summary_model: Option<LanguageModelSelection>,
|
||||
/// Additional models with which to generate alternatives when performing inline assists.
|
||||
inline_alternatives: Option<Vec<LanguageModelSelection>>,
|
||||
/// Enable experimental live diffs in the assistant panel.
|
||||
///
|
||||
/// Default: false
|
||||
enable_experimental_live_diffs: Option<bool>,
|
||||
/// The default profile to use in the Agent.
|
||||
///
|
||||
/// Default: write
|
||||
@@ -663,10 +656,6 @@ pub struct AssistantSettingsContentV2 {
|
||||
///
|
||||
/// Default: normal
|
||||
preferred_completion_mode: Option<CompletionMode>,
|
||||
/// Whether to show thumb buttons for feedback in the agent panel.
|
||||
///
|
||||
/// Default: true
|
||||
enable_feedback: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Default)]
|
||||
@@ -704,7 +693,7 @@ impl JsonSchema for LanguageModelProviderSetting {
|
||||
schemars::schema::SchemaObject {
|
||||
enum_values: Some(vec![
|
||||
"anthropic".into(),
|
||||
"amazon-bedrock".into(),
|
||||
"bedrock".into(),
|
||||
"google".into(),
|
||||
"lmstudio".into(),
|
||||
"ollama".into(),
|
||||
@@ -712,7 +701,6 @@ impl JsonSchema for LanguageModelProviderSetting {
|
||||
"zed.dev".into(),
|
||||
"copilot_chat".into(),
|
||||
"deepseek".into(),
|
||||
"mistral".into(),
|
||||
]),
|
||||
..Default::default()
|
||||
}
|
||||
@@ -758,7 +746,6 @@ pub struct ContextServerPresetContent {
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub struct AssistantSettingsContentV1 {
|
||||
/// Whether the Assistant is enabled.
|
||||
///
|
||||
@@ -788,7 +775,6 @@ pub struct AssistantSettingsContentV1 {
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub struct LegacyAssistantSettingsContent {
|
||||
/// Whether to show the assistant panel button in the status bar.
|
||||
///
|
||||
@@ -859,6 +845,10 @@ impl Settings for AssistantSettings {
|
||||
.thread_summary_model
|
||||
.or(settings.thread_summary_model.take());
|
||||
merge(&mut settings.inline_alternatives, value.inline_alternatives);
|
||||
merge(
|
||||
&mut settings.enable_experimental_live_diffs,
|
||||
value.enable_experimental_live_diffs,
|
||||
);
|
||||
merge(
|
||||
&mut settings.always_allow_tool_actions,
|
||||
value.always_allow_tool_actions,
|
||||
@@ -874,7 +864,6 @@ impl Settings for AssistantSettings {
|
||||
&mut settings.preferred_completion_mode,
|
||||
value.preferred_completion_mode,
|
||||
);
|
||||
merge(&mut settings.enable_feedback, value.enable_feedback);
|
||||
|
||||
settings
|
||||
.model_parameters
|
||||
@@ -1005,13 +994,13 @@ mod tests {
|
||||
dock: None,
|
||||
default_width: None,
|
||||
default_height: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
default_profile: None,
|
||||
profiles: None,
|
||||
always_allow_tool_actions: None,
|
||||
notify_when_agent_waiting: None,
|
||||
stream_edits: None,
|
||||
single_file_review: None,
|
||||
enable_feedback: None,
|
||||
model_parameters: Vec::new(),
|
||||
preferred_completion_mode: None,
|
||||
},
|
||||
|
||||
@@ -49,37 +49,6 @@ impl ActionLog {
|
||||
is_created: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) -> &mut TrackedBuffer {
|
||||
let status = if is_created {
|
||||
if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
|
||||
match tracked.status {
|
||||
TrackedBufferStatus::Created {
|
||||
existing_file_content,
|
||||
} => TrackedBufferStatus::Created {
|
||||
existing_file_content,
|
||||
},
|
||||
TrackedBufferStatus::Modified | TrackedBufferStatus::Deleted => {
|
||||
TrackedBufferStatus::Created {
|
||||
existing_file_content: Some(tracked.diff_base),
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if buffer
|
||||
.read(cx)
|
||||
.file()
|
||||
.map_or(false, |file| file.disk_state().exists())
|
||||
{
|
||||
TrackedBufferStatus::Created {
|
||||
existing_file_content: Some(buffer.read(cx).as_rope().clone()),
|
||||
}
|
||||
} else {
|
||||
TrackedBufferStatus::Created {
|
||||
existing_file_content: None,
|
||||
}
|
||||
}
|
||||
} else {
|
||||
TrackedBufferStatus::Modified
|
||||
};
|
||||
|
||||
let tracked_buffer = self
|
||||
.tracked_buffers
|
||||
.entry(buffer.clone())
|
||||
@@ -91,21 +60,36 @@ impl ActionLog {
|
||||
let text_snapshot = buffer.read(cx).text_snapshot();
|
||||
let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
|
||||
let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
|
||||
let diff_base;
|
||||
let base_text;
|
||||
let status;
|
||||
let unreviewed_changes;
|
||||
if is_created {
|
||||
diff_base = Rope::default();
|
||||
let existing_file_content = if buffer
|
||||
.read(cx)
|
||||
.file()
|
||||
.map_or(false, |file| file.disk_state().exists())
|
||||
{
|
||||
Some(text_snapshot.as_rope().clone())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
base_text = Rope::default();
|
||||
status = TrackedBufferStatus::Created {
|
||||
existing_file_content,
|
||||
};
|
||||
unreviewed_changes = Patch::new(vec![Edit {
|
||||
old: 0..1,
|
||||
new: 0..text_snapshot.max_point().row + 1,
|
||||
}])
|
||||
} else {
|
||||
diff_base = buffer.read(cx).as_rope().clone();
|
||||
base_text = buffer.read(cx).as_rope().clone();
|
||||
status = TrackedBufferStatus::Modified;
|
||||
unreviewed_changes = Patch::default();
|
||||
}
|
||||
TrackedBuffer {
|
||||
buffer: buffer.clone(),
|
||||
diff_base,
|
||||
base_text,
|
||||
unreviewed_changes,
|
||||
snapshot: text_snapshot.clone(),
|
||||
status,
|
||||
@@ -200,7 +184,7 @@ impl ActionLog {
|
||||
.context("buffer not tracked")?;
|
||||
|
||||
let rebase = cx.background_spawn({
|
||||
let mut base_text = tracked_buffer.diff_base.clone();
|
||||
let mut base_text = tracked_buffer.base_text.clone();
|
||||
let old_snapshot = tracked_buffer.snapshot.clone();
|
||||
let new_snapshot = buffer_snapshot.clone();
|
||||
let unreviewed_changes = tracked_buffer.unreviewed_changes.clone();
|
||||
@@ -226,7 +210,7 @@ impl ActionLog {
|
||||
))
|
||||
})??;
|
||||
|
||||
let (new_base_text, new_diff_base) = rebase.await;
|
||||
let (new_base_text, new_base_text_rope) = rebase.await;
|
||||
let diff_snapshot = BufferDiff::update_diff(
|
||||
diff.clone(),
|
||||
buffer_snapshot.clone(),
|
||||
@@ -245,23 +229,24 @@ impl ActionLog {
|
||||
.background_spawn({
|
||||
let diff_snapshot = diff_snapshot.clone();
|
||||
let buffer_snapshot = buffer_snapshot.clone();
|
||||
let new_diff_base = new_diff_base.clone();
|
||||
let new_base_text_rope = new_base_text_rope.clone();
|
||||
async move {
|
||||
let mut unreviewed_changes = Patch::default();
|
||||
for hunk in diff_snapshot.hunks_intersecting_range(
|
||||
Anchor::MIN..Anchor::MAX,
|
||||
&buffer_snapshot,
|
||||
) {
|
||||
let old_range = new_diff_base
|
||||
let old_range = new_base_text_rope
|
||||
.offset_to_point(hunk.diff_base_byte_range.start)
|
||||
..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
|
||||
..new_base_text_rope
|
||||
.offset_to_point(hunk.diff_base_byte_range.end);
|
||||
let new_range = hunk.range.start..hunk.range.end;
|
||||
unreviewed_changes.push(point_to_row_edit(
|
||||
Edit {
|
||||
old: old_range,
|
||||
new: new_range,
|
||||
},
|
||||
&new_diff_base,
|
||||
&new_base_text_rope,
|
||||
&buffer_snapshot.as_rope(),
|
||||
));
|
||||
}
|
||||
@@ -279,7 +264,7 @@ impl ActionLog {
|
||||
.tracked_buffers
|
||||
.get_mut(&buffer)
|
||||
.context("buffer not tracked")?;
|
||||
tracked_buffer.diff_base = new_diff_base;
|
||||
tracked_buffer.base_text = new_base_text_rope;
|
||||
tracked_buffer.snapshot = buffer_snapshot;
|
||||
tracked_buffer.unreviewed_changes = unreviewed_changes;
|
||||
cx.notify();
|
||||
@@ -298,6 +283,7 @@ impl ActionLog {
|
||||
/// Mark a buffer as edited, so we can refresh it in the context
|
||||
pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
|
||||
self.edited_since_project_diagnostics_check = true;
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
self.track_buffer_internal(buffer.clone(), true, cx);
|
||||
}
|
||||
|
||||
@@ -360,11 +346,11 @@ impl ActionLog {
|
||||
true
|
||||
} else {
|
||||
let old_range = tracked_buffer
|
||||
.diff_base
|
||||
.base_text
|
||||
.point_to_offset(Point::new(edit.old.start, 0))
|
||||
..tracked_buffer.diff_base.point_to_offset(cmp::min(
|
||||
..tracked_buffer.base_text.point_to_offset(cmp::min(
|
||||
Point::new(edit.old.end, 0),
|
||||
tracked_buffer.diff_base.max_point(),
|
||||
tracked_buffer.base_text.max_point(),
|
||||
));
|
||||
let new_range = tracked_buffer
|
||||
.snapshot
|
||||
@@ -373,7 +359,7 @@ impl ActionLog {
|
||||
Point::new(edit.new.end, 0),
|
||||
tracked_buffer.snapshot.max_point(),
|
||||
));
|
||||
tracked_buffer.diff_base.replace(
|
||||
tracked_buffer.base_text.replace(
|
||||
old_range,
|
||||
&tracked_buffer
|
||||
.snapshot
|
||||
@@ -431,7 +417,7 @@ impl ActionLog {
|
||||
}
|
||||
TrackedBufferStatus::Deleted => {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.set_text(tracked_buffer.diff_base.to_string(), cx)
|
||||
buffer.set_text(tracked_buffer.base_text.to_string(), cx)
|
||||
});
|
||||
let save = self
|
||||
.project
|
||||
@@ -478,14 +464,14 @@ impl ActionLog {
|
||||
|
||||
if revert {
|
||||
let old_range = tracked_buffer
|
||||
.diff_base
|
||||
.base_text
|
||||
.point_to_offset(Point::new(edit.old.start, 0))
|
||||
..tracked_buffer.diff_base.point_to_offset(cmp::min(
|
||||
..tracked_buffer.base_text.point_to_offset(cmp::min(
|
||||
Point::new(edit.old.end, 0),
|
||||
tracked_buffer.diff_base.max_point(),
|
||||
tracked_buffer.base_text.max_point(),
|
||||
));
|
||||
let old_text = tracked_buffer
|
||||
.diff_base
|
||||
.base_text
|
||||
.chunks_in_range(old_range)
|
||||
.collect::<String>();
|
||||
edits_to_revert.push((new_range, old_text));
|
||||
@@ -506,7 +492,7 @@ impl ActionLog {
|
||||
TrackedBufferStatus::Deleted => false,
|
||||
_ => {
|
||||
tracked_buffer.unreviewed_changes.clear();
|
||||
tracked_buffer.diff_base = tracked_buffer.snapshot.as_rope().clone();
|
||||
tracked_buffer.base_text = tracked_buffer.snapshot.as_rope().clone();
|
||||
tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
|
||||
true
|
||||
}
|
||||
@@ -669,7 +655,7 @@ enum TrackedBufferStatus {
|
||||
|
||||
struct TrackedBuffer {
|
||||
buffer: Entity<Buffer>,
|
||||
diff_base: Rope,
|
||||
base_text: Rope,
|
||||
unreviewed_changes: Patch<u32>,
|
||||
status: TrackedBufferStatus,
|
||||
version: clock::Global,
|
||||
@@ -1108,86 +1094,6 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_overwriting_previously_edited_files(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/dir"),
|
||||
json!({
|
||||
"file1": "Lorem ipsum dolor"
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let file_path = project
|
||||
.read_with(cx, |project, cx| project.find_project_path("dir/file1", cx))
|
||||
.unwrap();
|
||||
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_buffer(file_path, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
cx.update(|cx| {
|
||||
action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
|
||||
buffer.update(cx, |buffer, cx| buffer.append(" sit amet consecteur", cx));
|
||||
action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
|
||||
});
|
||||
project
|
||||
.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
unreviewed_hunks(&action_log, cx),
|
||||
vec![(
|
||||
buffer.clone(),
|
||||
vec![HunkStatus {
|
||||
range: Point::new(0, 0)..Point::new(0, 37),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "Lorem ipsum dolor".into(),
|
||||
}],
|
||||
)]
|
||||
);
|
||||
|
||||
cx.update(|cx| {
|
||||
action_log.update(cx, |log, cx| log.buffer_created(buffer.clone(), cx));
|
||||
buffer.update(cx, |buffer, cx| buffer.set_text("rewritten", cx));
|
||||
action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
|
||||
});
|
||||
project
|
||||
.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
unreviewed_hunks(&action_log, cx),
|
||||
vec![(
|
||||
buffer.clone(),
|
||||
vec![HunkStatus {
|
||||
range: Point::new(0, 0)..Point::new(0, 9),
|
||||
diff_status: DiffHunkStatusKind::Added,
|
||||
old_text: "".into(),
|
||||
}],
|
||||
)]
|
||||
);
|
||||
|
||||
action_log
|
||||
.update(cx, |log, cx| {
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _cx| buffer.text()),
|
||||
"Lorem ipsum dolor"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_deleting_files(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
@@ -1695,7 +1601,7 @@ mod tests {
|
||||
cx.run_until_parked();
|
||||
action_log.update(cx, |log, cx| {
|
||||
let tracked_buffer = log.tracked_buffers.get(&buffer).unwrap();
|
||||
let mut old_text = tracked_buffer.diff_base.clone();
|
||||
let mut old_text = tracked_buffer.base_text.clone();
|
||||
let new_text = buffer.read(cx).as_rope();
|
||||
for edit in tracked_buffer.unreviewed_changes.edits() {
|
||||
let old_start = old_text.point_to_offset(Point::new(edit.new.start, 0));
|
||||
|
||||
@@ -19,7 +19,6 @@ use gpui::Window;
|
||||
use gpui::{App, Entity, SharedString, Task, WeakEntity};
|
||||
use icons::IconName;
|
||||
use language_model::LanguageModel;
|
||||
use language_model::LanguageModelImage;
|
||||
use language_model::LanguageModelRequest;
|
||||
use language_model::LanguageModelToolSchemaFormat;
|
||||
use project::Project;
|
||||
@@ -66,50 +65,21 @@ impl ToolUseStatus {
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ToolResultOutput {
|
||||
pub content: ToolResultContent,
|
||||
pub content: String,
|
||||
pub output: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum ToolResultContent {
|
||||
Text(String),
|
||||
Image(LanguageModelImage),
|
||||
}
|
||||
|
||||
impl ToolResultContent {
|
||||
pub fn len(&self) -> usize {
|
||||
match self {
|
||||
ToolResultContent::Text(str) => str.len(),
|
||||
ToolResultContent::Image(image) => image.len(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
match self {
|
||||
ToolResultContent::Text(str) => str.is_empty(),
|
||||
ToolResultContent::Image(image) => image.is_empty(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> Option<&str> {
|
||||
match self {
|
||||
ToolResultContent::Text(str) => Some(str),
|
||||
ToolResultContent::Image(_) => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for ToolResultOutput {
|
||||
fn from(value: String) -> Self {
|
||||
ToolResultOutput {
|
||||
content: ToolResultContent::Text(value),
|
||||
content: value,
|
||||
output: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for ToolResultOutput {
|
||||
type Target = ToolResultContent;
|
||||
type Target = String;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.content
|
||||
|
||||
@@ -35,6 +35,7 @@ indoc.workspace = true
|
||||
itertools.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
linkme.workspace = true
|
||||
log.workspace = true
|
||||
markdown.workspace = true
|
||||
open.workspace = true
|
||||
|
||||
@@ -40,12 +40,13 @@ use crate::find_path_tool::FindPathTool;
|
||||
use crate::grep_tool::GrepTool;
|
||||
use crate::list_directory_tool::ListDirectoryTool;
|
||||
use crate::now_tool::NowTool;
|
||||
use crate::read_file_tool::ReadFileTool;
|
||||
use crate::thinking_tool::ThinkingTool;
|
||||
|
||||
pub use edit_file_tool::{EditFileMode, EditFileToolInput};
|
||||
pub use edit_file_tool::EditFileToolInput;
|
||||
pub use find_path_tool::FindPathToolInput;
|
||||
pub use open_tool::OpenTool;
|
||||
pub use read_file_tool::{ReadFileTool, ReadFileToolInput};
|
||||
pub use read_file_tool::ReadFileToolInput;
|
||||
pub use terminal_tool::TerminalTool;
|
||||
|
||||
pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {
|
||||
|
||||
@@ -1,9 +1,5 @@
|
||||
use super::*;
|
||||
use crate::{
|
||||
ReadFileToolInput,
|
||||
edit_file_tool::{EditFileMode, EditFileToolInput},
|
||||
grep_tool::GrepToolInput,
|
||||
};
|
||||
use crate::{ReadFileToolInput, edit_file_tool::EditFileToolInput, grep_tool::GrepToolInput};
|
||||
use Role::*;
|
||||
use anyhow::anyhow;
|
||||
use assistant_tool::ToolRegistry;
|
||||
@@ -14,8 +10,8 @@ use futures::{FutureExt, future::LocalBoxFuture};
|
||||
use gpui::{AppContext, TestAppContext};
|
||||
use indoc::{formatdoc, indoc};
|
||||
use language_model::{
|
||||
LanguageModelRegistry, LanguageModelRequestTool, LanguageModelToolResult,
|
||||
LanguageModelToolResultContent, LanguageModelToolUse, LanguageModelToolUseId, SelectedModel,
|
||||
LanguageModelRegistry, LanguageModelRequestTool, LanguageModelToolResult, LanguageModelToolUse,
|
||||
LanguageModelToolUseId,
|
||||
};
|
||||
use project::Project;
|
||||
use rand::prelude::*;
|
||||
@@ -25,7 +21,6 @@ use std::{
|
||||
cmp::Reverse,
|
||||
fmt::{self, Display},
|
||||
io::Write as _,
|
||||
str::FromStr,
|
||||
sync::mpsc,
|
||||
};
|
||||
use util::path;
|
||||
@@ -76,7 +71,7 @@ fn eval_extract_handle_command_output() {
|
||||
EditFileToolInput {
|
||||
display_description: edit_description.into(),
|
||||
path: input_file_path.into(),
|
||||
mode: EditFileMode::Edit,
|
||||
create_or_overwrite: false,
|
||||
},
|
||||
)],
|
||||
),
|
||||
@@ -132,7 +127,7 @@ fn eval_delete_run_git_blame() {
|
||||
EditFileToolInput {
|
||||
display_description: edit_description.into(),
|
||||
path: input_file_path.into(),
|
||||
mode: EditFileMode::Edit,
|
||||
create_or_overwrite: false,
|
||||
},
|
||||
)],
|
||||
),
|
||||
@@ -187,7 +182,7 @@ fn eval_translate_doc_comments() {
|
||||
EditFileToolInput {
|
||||
display_description: edit_description.into(),
|
||||
path: input_file_path.into(),
|
||||
mode: EditFileMode::Edit,
|
||||
create_or_overwrite: false,
|
||||
},
|
||||
)],
|
||||
),
|
||||
@@ -302,7 +297,7 @@ fn eval_use_wasi_sdk_in_compile_parser_to_wasm() {
|
||||
EditFileToolInput {
|
||||
display_description: edit_description.into(),
|
||||
path: input_file_path.into(),
|
||||
mode: EditFileMode::Edit,
|
||||
create_or_overwrite: false,
|
||||
},
|
||||
)],
|
||||
),
|
||||
@@ -377,7 +372,7 @@ fn eval_disable_cursor_blinking() {
|
||||
EditFileToolInput {
|
||||
display_description: edit_description.into(),
|
||||
path: input_file_path.into(),
|
||||
mode: EditFileMode::Edit,
|
||||
create_or_overwrite: false,
|
||||
},
|
||||
)],
|
||||
),
|
||||
@@ -571,7 +566,7 @@ fn eval_from_pixels_constructor() {
|
||||
EditFileToolInput {
|
||||
display_description: edit_description.into(),
|
||||
path: input_file_path.into(),
|
||||
mode: EditFileMode::Edit,
|
||||
create_or_overwrite: false,
|
||||
},
|
||||
)],
|
||||
),
|
||||
@@ -648,7 +643,7 @@ fn eval_zode() {
|
||||
EditFileToolInput {
|
||||
display_description: edit_description.into(),
|
||||
path: input_file_path.into(),
|
||||
mode: EditFileMode::Create,
|
||||
create_or_overwrite: true,
|
||||
},
|
||||
),
|
||||
],
|
||||
@@ -893,7 +888,7 @@ fn eval_add_overwrite_test() {
|
||||
EditFileToolInput {
|
||||
display_description: edit_description.into(),
|
||||
path: input_file_path.into(),
|
||||
mode: EditFileMode::Edit,
|
||||
create_or_overwrite: false,
|
||||
},
|
||||
),
|
||||
],
|
||||
@@ -956,7 +951,7 @@ fn tool_result(
|
||||
tool_use_id: LanguageModelToolUseId::from(id.into()),
|
||||
tool_name: name.into(),
|
||||
is_error: false,
|
||||
content: LanguageModelToolResultContent::Text(result.into()),
|
||||
content: result.into(),
|
||||
output: None,
|
||||
})
|
||||
}
|
||||
@@ -1217,7 +1212,7 @@ fn report_progress(evaluated_count: usize, failed_count: usize, iterations: usiz
|
||||
passed_count as f64 / evaluated_count as f64
|
||||
};
|
||||
print!(
|
||||
"\r\x1b[KEvaluated {}/{} ({:.2}% passed)",
|
||||
"\r\x1b[KEvaluated {}/{} ({:.2}%)",
|
||||
evaluated_count,
|
||||
iterations,
|
||||
passed_ratio * 100.0
|
||||
@@ -1256,21 +1251,13 @@ impl EditAgentTest {
|
||||
|
||||
fs.insert_tree("/root", json!({})).await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let agent_model = SelectedModel::from_str(
|
||||
&std::env::var("ZED_AGENT_MODEL")
|
||||
.unwrap_or("anthropic/claude-3-7-sonnet-latest".into()),
|
||||
)
|
||||
.unwrap();
|
||||
let judge_model = SelectedModel::from_str(
|
||||
&std::env::var("ZED_JUDGE_MODEL")
|
||||
.unwrap_or("anthropic/claude-3-7-sonnet-latest".into()),
|
||||
)
|
||||
.unwrap();
|
||||
let (agent_model, judge_model) = cx
|
||||
.update(|cx| {
|
||||
cx.spawn(async move |cx| {
|
||||
let agent_model = Self::load_model(&agent_model, cx).await;
|
||||
let judge_model = Self::load_model(&judge_model, cx).await;
|
||||
let agent_model =
|
||||
Self::load_model("anthropic", "claude-3-7-sonnet-latest", cx).await;
|
||||
let judge_model =
|
||||
Self::load_model("anthropic", "claude-3-7-sonnet-latest", cx).await;
|
||||
(agent_model.unwrap(), judge_model.unwrap())
|
||||
})
|
||||
})
|
||||
@@ -1285,17 +1272,15 @@ impl EditAgentTest {
|
||||
}
|
||||
|
||||
async fn load_model(
|
||||
selected_model: &SelectedModel,
|
||||
provider: &str,
|
||||
id: &str,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<Arc<dyn LanguageModel>> {
|
||||
let (provider, model) = cx.update(|cx| {
|
||||
let models = LanguageModelRegistry::read_global(cx);
|
||||
let model = models
|
||||
.available_models(cx)
|
||||
.find(|model| {
|
||||
model.provider_id() == selected_model.provider
|
||||
&& model.id() == selected_model.model
|
||||
})
|
||||
.find(|model| model.provider_id().0 == provider && model.id().0 == id)
|
||||
.unwrap();
|
||||
let provider = models.provider(&model.provider_id()).unwrap();
|
||||
(provider, model)
|
||||
|
||||
@@ -1249,7 +1249,7 @@ pub struct ActiveDiagnosticGroup {
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub(crate) enum ActiveDiagnostic {
|
||||
None,
|
||||
All,
|
||||
|
||||
@@ -5,8 +5,7 @@ use crate::{
|
||||
};
|
||||
use anyhow::{Result, anyhow};
|
||||
use assistant_tool::{
|
||||
ActionLog, AnyToolCard, Tool, ToolCard, ToolResult, ToolResultContent, ToolResultOutput,
|
||||
ToolUseStatus,
|
||||
ActionLog, AnyToolCard, Tool, ToolCard, ToolResult, ToolResultOutput, ToolUseStatus,
|
||||
};
|
||||
use buffer_diff::{BufferDiff, BufferDiffSnapshot};
|
||||
use editor::{Editor, EditorMode, MultiBuffer, PathKey};
|
||||
@@ -22,7 +21,7 @@ use language::{
|
||||
};
|
||||
use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat};
|
||||
use markdown::{Markdown, MarkdownElement, MarkdownStyle};
|
||||
use project::{Project, ProjectPath};
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
@@ -76,22 +75,12 @@ pub struct EditFileToolInput {
|
||||
/// </example>
|
||||
pub path: PathBuf,
|
||||
|
||||
/// The mode of operation on the file. Possible values:
|
||||
/// - 'edit': Make granular edits to an existing file.
|
||||
/// - 'create': Create a new file if it doesn't exist.
|
||||
/// - 'overwrite': Replace the entire contents of an existing file.
|
||||
/// If true, this tool will recreate the file from scratch.
|
||||
/// If false, this tool will produce granular edits to an existing file.
|
||||
///
|
||||
/// When a file already exists or you just created it, prefer editing
|
||||
/// When a file already exists or you just created it, always prefer editing
|
||||
/// it as opposed to recreating it from scratch.
|
||||
pub mode: EditFileMode,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum EditFileMode {
|
||||
Edit,
|
||||
Create,
|
||||
Overwrite,
|
||||
pub create_or_overwrite: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
@@ -171,9 +160,12 @@ impl Tool for EditFileTool {
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
|
||||
};
|
||||
|
||||
let project_path = match resolve_path(&input, project.clone(), cx) {
|
||||
Ok(path) => path,
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
|
||||
let Some(project_path) = project.read(cx).find_project_path(&input.path, cx) else {
|
||||
return Task::ready(Err(anyhow!(
|
||||
"Path {} not found in project",
|
||||
input.path.display()
|
||||
)))
|
||||
.into();
|
||||
};
|
||||
|
||||
let card = window.and_then(|window| {
|
||||
@@ -196,6 +188,16 @@ impl Tool for EditFileTool {
|
||||
})?
|
||||
.await?;
|
||||
|
||||
let exists = buffer.read_with(cx, |buffer, _| {
|
||||
buffer
|
||||
.file()
|
||||
.as_ref()
|
||||
.map_or(false, |file| file.disk_state().exists())
|
||||
})?;
|
||||
if !input.create_or_overwrite && !exists {
|
||||
return Err(anyhow!("{} not found", input.path.display()));
|
||||
}
|
||||
|
||||
let old_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
|
||||
let old_text = cx
|
||||
.background_spawn({
|
||||
@@ -204,15 +206,15 @@ impl Tool for EditFileTool {
|
||||
})
|
||||
.await;
|
||||
|
||||
let (output, mut events) = if matches!(input.mode, EditFileMode::Edit) {
|
||||
edit_agent.edit(
|
||||
let (output, mut events) = if input.create_or_overwrite {
|
||||
edit_agent.overwrite(
|
||||
buffer.clone(),
|
||||
input.display_description.clone(),
|
||||
&request,
|
||||
cx,
|
||||
)
|
||||
} else {
|
||||
edit_agent.overwrite(
|
||||
edit_agent.edit(
|
||||
buffer.clone(),
|
||||
input.display_description.clone(),
|
||||
&request,
|
||||
@@ -290,10 +292,7 @@ impl Tool for EditFileTool {
|
||||
}
|
||||
} else {
|
||||
Ok(ToolResultOutput {
|
||||
content: ToolResultContent::Text(format!(
|
||||
"Edited {}:\n\n```diff\n{}\n```",
|
||||
input_path, diff
|
||||
)),
|
||||
content: format!("Edited {}:\n\n```diff\n{}\n```", input_path, diff),
|
||||
output: serde_json::to_value(output).ok(),
|
||||
})
|
||||
}
|
||||
@@ -332,72 +331,6 @@ impl Tool for EditFileTool {
|
||||
}
|
||||
}
|
||||
|
||||
/// Validate that the file path is valid, meaning:
|
||||
///
|
||||
/// - For `edit` and `overwrite`, the path must point to an existing file.
|
||||
/// - For `create`, the file must not already exist, but it's parent dir must exist.
|
||||
fn resolve_path(
|
||||
input: &EditFileToolInput,
|
||||
project: Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Result<ProjectPath> {
|
||||
let project = project.read(cx);
|
||||
|
||||
match input.mode {
|
||||
EditFileMode::Edit | EditFileMode::Overwrite => {
|
||||
let path = project
|
||||
.find_project_path(&input.path, cx)
|
||||
.ok_or_else(|| anyhow!("Can't edit file: path not found"))?;
|
||||
|
||||
let entry = project
|
||||
.entry_for_path(&path, cx)
|
||||
.ok_or_else(|| anyhow!("Can't edit file: path not found"))?;
|
||||
|
||||
if !entry.is_file() {
|
||||
return Err(anyhow!("Can't edit file: path is a directory"));
|
||||
}
|
||||
|
||||
Ok(path)
|
||||
}
|
||||
|
||||
EditFileMode::Create => {
|
||||
if let Some(path) = project.find_project_path(&input.path, cx) {
|
||||
if project.entry_for_path(&path, cx).is_some() {
|
||||
return Err(anyhow!("Can't create file: file already exists"));
|
||||
}
|
||||
}
|
||||
|
||||
let parent_path = input
|
||||
.path
|
||||
.parent()
|
||||
.ok_or_else(|| anyhow!("Can't create file: incorrect path"))?;
|
||||
|
||||
let parent_project_path = project.find_project_path(&parent_path, cx);
|
||||
|
||||
let parent_entry = parent_project_path
|
||||
.as_ref()
|
||||
.and_then(|path| project.entry_for_path(&path, cx))
|
||||
.ok_or_else(|| anyhow!("Can't create file: parent directory doesn't exist"))?;
|
||||
|
||||
if !parent_entry.is_dir() {
|
||||
return Err(anyhow!("Can't create file: parent is not a directory"));
|
||||
}
|
||||
|
||||
let file_name = input
|
||||
.path
|
||||
.file_name()
|
||||
.ok_or_else(|| anyhow!("Can't create file: invalid filename"))?;
|
||||
|
||||
let new_file_path = parent_project_path.map(|parent| ProjectPath {
|
||||
path: Arc::from(parent.path.join(file_name)),
|
||||
..parent
|
||||
});
|
||||
|
||||
new_file_path.ok_or_else(|| anyhow!("Can't create file"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct EditFileToolCard {
|
||||
path: PathBuf,
|
||||
editor: Entity<Editor>,
|
||||
@@ -429,9 +362,9 @@ impl EditFileToolCard {
|
||||
editor.set_show_gutter(false, cx);
|
||||
editor.disable_inline_diagnostics();
|
||||
editor.disable_expand_excerpt_buttons(cx);
|
||||
editor.disable_scrollbars_and_minimap(window, cx);
|
||||
editor.set_soft_wrap_mode(SoftWrap::None, cx);
|
||||
editor.scroll_manager.set_forbid_vertical_scroll(true);
|
||||
editor.set_show_scrollbars(false, cx);
|
||||
editor.set_show_indent_guides(false, cx);
|
||||
editor.set_read_only(true);
|
||||
editor.set_show_breakpoints(false, cx);
|
||||
@@ -449,7 +382,7 @@ impl EditFileToolCard {
|
||||
diff_task: None,
|
||||
preview_expanded: true,
|
||||
error_expanded: None,
|
||||
full_height_expanded: true,
|
||||
full_height_expanded: false,
|
||||
total_lines: None,
|
||||
}
|
||||
}
|
||||
@@ -917,10 +850,7 @@ async fn build_buffer_diff(
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::result::Result;
|
||||
|
||||
use super::*;
|
||||
use client::TelemetrySettings;
|
||||
use fs::FakeFs;
|
||||
use gpui::TestAppContext;
|
||||
use language_model::fake_provider::FakeLanguageModel;
|
||||
@@ -942,7 +872,7 @@ mod tests {
|
||||
let input = serde_json::to_value(EditFileToolInput {
|
||||
display_description: "Some edit".into(),
|
||||
path: "root/nonexistent_file.txt".into(),
|
||||
mode: EditFileMode::Edit,
|
||||
create_or_overwrite: false,
|
||||
})
|
||||
.unwrap();
|
||||
Arc::new(EditFileTool)
|
||||
@@ -960,102 +890,10 @@ mod tests {
|
||||
.await;
|
||||
assert_eq!(
|
||||
result.unwrap_err().to_string(),
|
||||
"Can't edit file: path not found"
|
||||
"root/nonexistent_file.txt not found"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_resolve_path_for_creating_file(cx: &mut TestAppContext) {
|
||||
let mode = &EditFileMode::Create;
|
||||
|
||||
let result = test_resolve_path(mode, "root/new.txt", cx);
|
||||
assert_resolved_path_eq(result.await, "new.txt");
|
||||
|
||||
let result = test_resolve_path(mode, "new.txt", cx);
|
||||
assert_resolved_path_eq(result.await, "new.txt");
|
||||
|
||||
let result = test_resolve_path(mode, "dir/new.txt", cx);
|
||||
assert_resolved_path_eq(result.await, "dir/new.txt");
|
||||
|
||||
let result = test_resolve_path(mode, "root/dir/subdir/existing.txt", cx);
|
||||
assert_eq!(
|
||||
result.await.unwrap_err().to_string(),
|
||||
"Can't create file: file already exists"
|
||||
);
|
||||
|
||||
let result = test_resolve_path(mode, "root/dir/nonexistent_dir/new.txt", cx);
|
||||
assert_eq!(
|
||||
result.await.unwrap_err().to_string(),
|
||||
"Can't create file: parent directory doesn't exist"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_resolve_path_for_editing_file(cx: &mut TestAppContext) {
|
||||
let mode = &EditFileMode::Edit;
|
||||
|
||||
let path_with_root = "root/dir/subdir/existing.txt";
|
||||
let path_without_root = "dir/subdir/existing.txt";
|
||||
let result = test_resolve_path(mode, path_with_root, cx);
|
||||
assert_resolved_path_eq(result.await, path_without_root);
|
||||
|
||||
let result = test_resolve_path(mode, path_without_root, cx);
|
||||
assert_resolved_path_eq(result.await, path_without_root);
|
||||
|
||||
let result = test_resolve_path(mode, "root/nonexistent.txt", cx);
|
||||
assert_eq!(
|
||||
result.await.unwrap_err().to_string(),
|
||||
"Can't edit file: path not found"
|
||||
);
|
||||
|
||||
let result = test_resolve_path(mode, "root/dir", cx);
|
||||
assert_eq!(
|
||||
result.await.unwrap_err().to_string(),
|
||||
"Can't edit file: path is a directory"
|
||||
);
|
||||
}
|
||||
|
||||
async fn test_resolve_path(
|
||||
mode: &EditFileMode,
|
||||
path: &str,
|
||||
cx: &mut TestAppContext,
|
||||
) -> Result<ProjectPath, anyhow::Error> {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
"dir": {
|
||||
"subdir": {
|
||||
"existing.txt": "hello"
|
||||
}
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
|
||||
let input = EditFileToolInput {
|
||||
display_description: "Some edit".into(),
|
||||
path: path.into(),
|
||||
mode: mode.clone(),
|
||||
};
|
||||
|
||||
let result = cx.update(|cx| resolve_path(&input, project, cx));
|
||||
result
|
||||
}
|
||||
|
||||
fn assert_resolved_path_eq(path: Result<ProjectPath, anyhow::Error>, expected: &str) {
|
||||
let actual = path
|
||||
.expect("Should return valid path")
|
||||
.path
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.replace("\\", "/"); // Naive Windows paths normalization
|
||||
assert_eq!(actual, expected);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn still_streaming_ui_text_with_path() {
|
||||
let input = json!({
|
||||
@@ -1128,7 +966,6 @@ mod tests {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
TelemetrySettings::register(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
use crate::{schema::json_schema_for, ui::ToolCallCardHeader};
|
||||
use anyhow::{Result, anyhow};
|
||||
use assistant_tool::{
|
||||
ActionLog, Tool, ToolCard, ToolResult, ToolResultContent, ToolResultOutput, ToolUseStatus,
|
||||
};
|
||||
use assistant_tool::{ActionLog, Tool, ToolCard, ToolResult, ToolUseStatus};
|
||||
use editor::Editor;
|
||||
use futures::channel::oneshot::{self, Receiver};
|
||||
use gpui::{
|
||||
@@ -40,12 +38,6 @@ pub struct FindPathToolInput {
|
||||
pub offset: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct FindPathToolOutput {
|
||||
glob: String,
|
||||
paths: Vec<PathBuf>,
|
||||
}
|
||||
|
||||
const RESULTS_PER_PAGE: usize = 50;
|
||||
|
||||
pub struct FindPathTool;
|
||||
@@ -119,18 +111,10 @@ impl Tool for FindPathTool {
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
let output = FindPathToolOutput {
|
||||
glob,
|
||||
paths: matches.clone(),
|
||||
};
|
||||
|
||||
for mat in matches.into_iter().skip(offset).take(RESULTS_PER_PAGE) {
|
||||
write!(&mut message, "\n{}", mat.display()).unwrap();
|
||||
}
|
||||
Ok(ToolResultOutput {
|
||||
content: ToolResultContent::Text(message),
|
||||
output: Some(serde_json::to_value(output)?),
|
||||
})
|
||||
Ok(message.into())
|
||||
}
|
||||
});
|
||||
|
||||
@@ -139,18 +123,6 @@ impl Tool for FindPathTool {
|
||||
card: Some(card.into()),
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_card(
|
||||
self: Arc<Self>,
|
||||
output: serde_json::Value,
|
||||
_project: Entity<Project>,
|
||||
_window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Option<assistant_tool::AnyToolCard> {
|
||||
let output = serde_json::from_value::<FindPathToolOutput>(output).ok()?;
|
||||
let card = cx.new(|_| FindPathToolCard::from_output(output));
|
||||
Some(card.into())
|
||||
}
|
||||
}
|
||||
|
||||
fn search_paths(glob: &str, project: Entity<Project>, cx: &mut App) -> Task<Result<Vec<PathBuf>>> {
|
||||
@@ -208,15 +180,6 @@ impl FindPathToolCard {
|
||||
_receiver_task: Some(_receiver_task),
|
||||
}
|
||||
}
|
||||
|
||||
fn from_output(output: FindPathToolOutput) -> Self {
|
||||
Self {
|
||||
glob: output.glob,
|
||||
paths: output.paths,
|
||||
expanded: false,
|
||||
_receiver_task: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToolCard for FindPathToolCard {
|
||||
|
||||
@@ -752,9 +752,9 @@ mod tests {
|
||||
match task.output.await {
|
||||
Ok(result) => {
|
||||
if cfg!(windows) {
|
||||
result.content.as_str().unwrap().replace("root\\", "root/")
|
||||
result.content.replace("root\\", "root/")
|
||||
} else {
|
||||
result.content.as_str().unwrap().to_string()
|
||||
result.content
|
||||
}
|
||||
}
|
||||
Err(e) => panic!("Failed to run grep tool: {}", e),
|
||||
|
||||
@@ -1,17 +1,13 @@
|
||||
use crate::schema::json_schema_for;
|
||||
use anyhow::{Result, anyhow};
|
||||
use assistant_tool::outline;
|
||||
use assistant_tool::{ActionLog, Tool, ToolResult};
|
||||
use assistant_tool::{ToolResultContent, outline};
|
||||
use gpui::{AnyWindowHandle, App, Entity, Task};
|
||||
use project::{ImageItem, image_store};
|
||||
|
||||
use assistant_tool::ToolResultOutput;
|
||||
use indoc::formatdoc;
|
||||
use itertools::Itertools;
|
||||
use language::{Anchor, Point};
|
||||
use language_model::{
|
||||
LanguageModel, LanguageModelImage, LanguageModelRequest, LanguageModelToolSchemaFormat,
|
||||
};
|
||||
use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchemaFormat};
|
||||
use project::{AgentLocation, Project};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -90,7 +86,7 @@ impl Tool for ReadFileTool {
|
||||
_request: Arc<LanguageModelRequest>,
|
||||
project: Entity<Project>,
|
||||
action_log: Entity<ActionLog>,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
_model: Arc<dyn LanguageModel>,
|
||||
_window: Option<AnyWindowHandle>,
|
||||
cx: &mut App,
|
||||
) -> ToolResult {
|
||||
@@ -104,42 +100,6 @@ impl Tool for ReadFileTool {
|
||||
};
|
||||
|
||||
let file_path = input.path.clone();
|
||||
|
||||
if image_store::is_image_file(&project, &project_path, cx) {
|
||||
if !model.supports_images() {
|
||||
return Task::ready(Err(anyhow!(
|
||||
"Attempted to read an image, but Zed doesn't currently sending images to {}.",
|
||||
model.name().0
|
||||
)))
|
||||
.into();
|
||||
}
|
||||
|
||||
let task = cx.spawn(async move |cx| -> Result<ToolResultOutput> {
|
||||
let image_entity: Entity<ImageItem> = cx
|
||||
.update(|cx| {
|
||||
project.update(cx, |project, cx| {
|
||||
project.open_image(project_path.clone(), cx)
|
||||
})
|
||||
})?
|
||||
.await?;
|
||||
|
||||
let image =
|
||||
image_entity.read_with(cx, |image_item, _| Arc::clone(&image_item.image))?;
|
||||
|
||||
let language_model_image = cx
|
||||
.update(|cx| LanguageModelImage::from_image(image, cx))?
|
||||
.await
|
||||
.ok_or_else(|| anyhow!("Failed to process image"))?;
|
||||
|
||||
Ok(ToolResultOutput {
|
||||
content: ToolResultContent::Image(language_model_image),
|
||||
output: None,
|
||||
})
|
||||
});
|
||||
|
||||
return task.into();
|
||||
}
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let buffer = cx
|
||||
.update(|cx| {
|
||||
@@ -322,10 +282,7 @@ mod test {
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
assert_eq!(
|
||||
result.unwrap().content.as_str(),
|
||||
Some("This is a small file content")
|
||||
);
|
||||
assert_eq!(result.unwrap().content, "This is a small file content");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -365,7 +322,6 @@ mod test {
|
||||
})
|
||||
.await;
|
||||
let content = result.unwrap();
|
||||
let content = content.as_str().unwrap();
|
||||
assert_eq!(
|
||||
content.lines().skip(4).take(6).collect::<Vec<_>>(),
|
||||
vec![
|
||||
@@ -409,8 +365,6 @@ mod test {
|
||||
.collect::<Vec<_>>();
|
||||
pretty_assertions::assert_eq!(
|
||||
content
|
||||
.as_str()
|
||||
.unwrap()
|
||||
.lines()
|
||||
.skip(4)
|
||||
.take(expected_content.len())
|
||||
@@ -454,10 +408,7 @@ mod test {
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
assert_eq!(
|
||||
result.unwrap().content.as_str(),
|
||||
Some("Line 2\nLine 3\nLine 4")
|
||||
);
|
||||
assert_eq!(result.unwrap().content, "Line 2\nLine 3\nLine 4");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -497,7 +448,7 @@ mod test {
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
assert_eq!(result.unwrap().content.as_str(), Some("Line 1\nLine 2"));
|
||||
assert_eq!(result.unwrap().content, "Line 1\nLine 2");
|
||||
|
||||
// end_line of 0 should result in at least 1 line
|
||||
let result = cx
|
||||
@@ -520,7 +471,7 @@ mod test {
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
assert_eq!(result.unwrap().content.as_str(), Some("Line 1"));
|
||||
assert_eq!(result.unwrap().content, "Line 1");
|
||||
|
||||
// when start_line > end_line, should still return at least 1 line
|
||||
let result = cx
|
||||
@@ -543,7 +494,7 @@ mod test {
|
||||
.output
|
||||
})
|
||||
.await;
|
||||
assert_eq!(result.unwrap().content.as_str(), Some("Line 3"));
|
||||
assert_eq!(result.unwrap().content, "Line 3");
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut TestAppContext) {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::schema::json_schema_for;
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use anyhow::{Context as _, Result, anyhow, bail};
|
||||
use assistant_tool::{ActionLog, Tool, ToolCard, ToolResult, ToolUseStatus};
|
||||
use futures::{FutureExt as _, future::Shared};
|
||||
use gpui::{
|
||||
@@ -125,24 +125,14 @@ impl Tool for TerminalTool {
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
|
||||
};
|
||||
|
||||
let working_dir = match working_dir(&input, &project, cx) {
|
||||
let input_path = Path::new(&input.cd);
|
||||
let working_dir = match working_dir(&input, &project, input_path, cx) {
|
||||
Ok(dir) => dir,
|
||||
Err(err) => return Task::ready(Err(err)).into(),
|
||||
};
|
||||
let program = self.determine_shell.clone();
|
||||
let command = if cfg!(windows) {
|
||||
format!("$null | & {{{}}}", input.command.replace("\"", "'"))
|
||||
} else if let Some(cwd) = working_dir
|
||||
.as_ref()
|
||||
.and_then(|cwd| cwd.as_os_str().to_str())
|
||||
{
|
||||
// Make sure once we're *inside* the shell, we cd into `cwd`
|
||||
format!("(cd {cwd}; {}) </dev/null", input.command)
|
||||
} else {
|
||||
format!("({}) </dev/null", input.command)
|
||||
};
|
||||
let args = vec!["-c".into(), command];
|
||||
|
||||
let command = format!("({}) </dev/null", input.command);
|
||||
let args = vec!["-c".into(), command.clone()];
|
||||
let cwd = working_dir.clone();
|
||||
let env = match &working_dir {
|
||||
Some(dir) => project.update(cx, |project, cx| {
|
||||
@@ -325,13 +315,19 @@ fn process_content(
|
||||
} else {
|
||||
content
|
||||
};
|
||||
let content = content.trim();
|
||||
let is_empty = content.is_empty();
|
||||
let content = format!("```\n{content}\n```");
|
||||
let is_empty = content.trim().is_empty();
|
||||
|
||||
let content = format!(
|
||||
"```\n{}{}```",
|
||||
content,
|
||||
if content.ends_with('\n') { "" } else { "\n" }
|
||||
);
|
||||
|
||||
let content = if should_truncate {
|
||||
format!(
|
||||
"Command output too long. The first {} bytes:\n\n{content}",
|
||||
"Command output too long. The first {} bytes:\n\n{}",
|
||||
content.len(),
|
||||
content,
|
||||
)
|
||||
} else {
|
||||
content
|
||||
@@ -371,47 +367,42 @@ fn process_content(
|
||||
fn working_dir(
|
||||
input: &TerminalToolInput,
|
||||
project: &Entity<Project>,
|
||||
input_path: &Path,
|
||||
cx: &mut App,
|
||||
) -> Result<Option<PathBuf>> {
|
||||
let project = project.read(cx);
|
||||
let cd = &input.cd;
|
||||
|
||||
if cd == "." || cd == "" {
|
||||
// Accept "." or "" as meaning "the one worktree" if we only have one worktree.
|
||||
if input.cd == "." {
|
||||
// Accept "." as meaning "the one worktree" if we only have one worktree.
|
||||
let mut worktrees = project.worktrees(cx);
|
||||
|
||||
match worktrees.next() {
|
||||
Some(worktree) => {
|
||||
if worktrees.next().is_none() {
|
||||
Ok(Some(worktree.read(cx).abs_path().to_path_buf()))
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
if worktrees.next().is_some() {
|
||||
bail!(
|
||||
"'.' is ambiguous in multi-root workspaces. Please specify a root directory explicitly.",
|
||||
))
|
||||
);
|
||||
}
|
||||
Ok(Some(worktree.read(cx).abs_path().to_path_buf()))
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
} else {
|
||||
let input_path = Path::new(cd);
|
||||
|
||||
if input_path.is_absolute() {
|
||||
// Absolute paths are allowed, but only if they're in one of the project's worktrees.
|
||||
if project
|
||||
.worktrees(cx)
|
||||
.any(|worktree| input_path.starts_with(&worktree.read(cx).abs_path()))
|
||||
{
|
||||
return Ok(Some(input_path.into()));
|
||||
}
|
||||
} else {
|
||||
if let Some(worktree) = project.worktree_for_root_name(cd, cx) {
|
||||
return Ok(Some(worktree.read(cx).abs_path().to_path_buf()));
|
||||
}
|
||||
} else if input_path.is_absolute() {
|
||||
// Absolute paths are allowed, but only if they're in one of the project's worktrees.
|
||||
if !project
|
||||
.worktrees(cx)
|
||||
.any(|worktree| input_path.starts_with(&worktree.read(cx).abs_path()))
|
||||
{
|
||||
bail!("The absolute path must be within one of the project's worktrees");
|
||||
}
|
||||
|
||||
Err(anyhow!(
|
||||
"`cd` directory {cd:?} was not in any of the project's worktrees."
|
||||
))
|
||||
Ok(Some(input_path.into()))
|
||||
} else {
|
||||
let Some(worktree) = project.worktree_for_root_name(&input.cd, cx) else {
|
||||
bail!("`cd` directory {:?} not found in the project", input.cd);
|
||||
};
|
||||
|
||||
Ok(Some(worktree.read(cx).abs_path().to_path_buf()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -732,8 +723,8 @@ mod tests {
|
||||
)
|
||||
});
|
||||
|
||||
let output = result.output.await.log_err().unwrap().content;
|
||||
assert_eq!(output.as_str().unwrap(), "Command executed successfully.");
|
||||
let output = result.output.await.log_err().map(|output| output.content);
|
||||
assert_eq!(output, Some("Command executed successfully.".into()));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -766,13 +757,12 @@ mod tests {
|
||||
cx,
|
||||
);
|
||||
cx.spawn(async move |_| {
|
||||
let output = headless_result.output.await.map(|output| output.content);
|
||||
assert_eq!(
|
||||
output
|
||||
.ok()
|
||||
.and_then(|content| content.as_str().map(ToString::to_string)),
|
||||
expected
|
||||
);
|
||||
let output = headless_result
|
||||
.output
|
||||
.await
|
||||
.log_err()
|
||||
.map(|output| output.content);
|
||||
assert_eq!(output, expected);
|
||||
})
|
||||
};
|
||||
|
||||
@@ -780,7 +770,7 @@ mod tests {
|
||||
check(
|
||||
TerminalToolInput {
|
||||
command: "pwd".into(),
|
||||
cd: ".".into(),
|
||||
cd: "project".into(),
|
||||
},
|
||||
Some(format!(
|
||||
"```\n{}\n```",
|
||||
@@ -795,9 +785,12 @@ mod tests {
|
||||
check(
|
||||
TerminalToolInput {
|
||||
command: "pwd".into(),
|
||||
cd: "other-project".into(),
|
||||
cd: ".".into(),
|
||||
},
|
||||
None, // other-project is a dir, but *not* a worktree (yet)
|
||||
Some(format!(
|
||||
"```\n{}\n```",
|
||||
tree.path().join("project").display()
|
||||
)),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
|
||||
@@ -3,9 +3,7 @@ use std::{sync::Arc, time::Duration};
|
||||
use crate::schema::json_schema_for;
|
||||
use crate::ui::ToolCallCardHeader;
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{
|
||||
ActionLog, Tool, ToolCard, ToolResult, ToolResultContent, ToolResultOutput, ToolUseStatus,
|
||||
};
|
||||
use assistant_tool::{ActionLog, Tool, ToolCard, ToolResult, ToolUseStatus};
|
||||
use futures::{Future, FutureExt, TryFutureExt};
|
||||
use gpui::{
|
||||
AnyWindowHandle, App, AppContext, Context, Entity, IntoElement, Task, WeakEntity, Window,
|
||||
@@ -75,13 +73,9 @@ impl Tool for WebSearchTool {
|
||||
let search_task = search_task.clone();
|
||||
async move {
|
||||
let response = search_task.await.map_err(|err| anyhow!(err))?;
|
||||
Ok(ToolResultOutput {
|
||||
content: ToolResultContent::Text(
|
||||
serde_json::to_string(&response)
|
||||
.context("Failed to serialize search results")?,
|
||||
),
|
||||
output: Some(serde_json::to_value(response)?),
|
||||
})
|
||||
serde_json::to_string(&response)
|
||||
.context("Failed to serialize search results")
|
||||
.map(Into::into)
|
||||
}
|
||||
});
|
||||
|
||||
@@ -90,18 +84,6 @@ impl Tool for WebSearchTool {
|
||||
card: Some(cx.new(|cx| WebSearchToolCard::new(search_task, cx)).into()),
|
||||
}
|
||||
}
|
||||
|
||||
fn deserialize_card(
|
||||
self: Arc<Self>,
|
||||
output: serde_json::Value,
|
||||
_project: Entity<Project>,
|
||||
_window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Option<assistant_tool::AnyToolCard> {
|
||||
let output = serde_json::from_value::<WebSearchResponse>(output).ok()?;
|
||||
let card = cx.new(|cx| WebSearchToolCard::new(Task::ready(Ok(output)), cx));
|
||||
Some(card.into())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(RegisterComponent)]
|
||||
|
||||
@@ -335,13 +335,9 @@ impl AutoUpdater {
|
||||
self.status.clone()
|
||||
}
|
||||
|
||||
pub fn dismiss_error(&mut self, cx: &mut Context<Self>) -> bool {
|
||||
if self.status == AutoUpdateStatus::Idle {
|
||||
return false;
|
||||
}
|
||||
pub fn dismiss_error(&mut self, cx: &mut Context<Self>) {
|
||||
self.status = AutoUpdateStatus::Idle;
|
||||
cx.notify();
|
||||
true
|
||||
}
|
||||
|
||||
// If you are packaging Zed and need to override the place it downloads SSH remotes from,
|
||||
|
||||
@@ -155,7 +155,6 @@ pub fn notify_if_app_was_updated(cx: &mut App) {
|
||||
}
|
||||
cx.emit(DismissEvent);
|
||||
})
|
||||
.show_suppress_button(false)
|
||||
})
|
||||
},
|
||||
);
|
||||
|
||||
@@ -12,7 +12,6 @@ pub struct CallSettings {
|
||||
|
||||
/// Configuration of voice calls in Zed.
|
||||
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub struct CallSettingsContent {
|
||||
/// Whether the microphone should be muted when joining a channel or a call.
|
||||
///
|
||||
|
||||
@@ -19,7 +19,6 @@ test-support = ["clock/test-support", "collections/test-support", "gpui/test-sup
|
||||
anyhow.workspace = true
|
||||
async-recursion = "0.3"
|
||||
async-tungstenite = { workspace = true, features = ["tokio", "tokio-rustls-manual-roots"] }
|
||||
base64.workspace = true
|
||||
chrono = { workspace = true, features = ["serde"] }
|
||||
clock.workspace = true
|
||||
collections.workspace = true
|
||||
@@ -30,7 +29,6 @@ gpui.workspace = true
|
||||
gpui_tokio.workspace = true
|
||||
http_client.workspace = true
|
||||
http_client_tls.workspace = true
|
||||
httparse = "1.10"
|
||||
log.workspace = true
|
||||
paths.workspace = true
|
||||
parking_lot.workspace = true
|
||||
@@ -49,7 +47,6 @@ text.workspace = true
|
||||
thiserror.workspace = true
|
||||
time.workspace = true
|
||||
tiny_http = "0.8"
|
||||
tokio-native-tls = "0.3"
|
||||
tokio-socks = { version = "0.5.2", default-features = false, features = ["futures-io"] }
|
||||
url.workspace = true
|
||||
util.workspace = true
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub mod test;
|
||||
|
||||
mod proxy;
|
||||
mod socks;
|
||||
pub mod telemetry;
|
||||
pub mod user;
|
||||
pub mod zed_urls;
|
||||
@@ -24,13 +24,13 @@ use gpui::{App, AsyncApp, Entity, Global, Task, WeakEntity, actions};
|
||||
use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
|
||||
use parking_lot::RwLock;
|
||||
use postage::watch;
|
||||
use proxy::connect_proxy_stream;
|
||||
use rand::prelude::*;
|
||||
use release_channel::{AppVersion, ReleaseChannel};
|
||||
use rpc::proto::{AnyTypedEnvelope, EnvelopedMessage, PeerId, RequestMessage};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources};
|
||||
use socks::connect_socks_proxy_stream;
|
||||
use std::pin::Pin;
|
||||
use std::{
|
||||
any::TypeId,
|
||||
@@ -47,9 +47,8 @@ use std::{
|
||||
};
|
||||
use telemetry::Telemetry;
|
||||
use thiserror::Error;
|
||||
use tokio::net::TcpStream;
|
||||
use url::Url;
|
||||
use util::{ConnectionResult, ResultExt};
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
|
||||
pub use rpc::*;
|
||||
pub use telemetry_events::Event;
|
||||
@@ -151,19 +150,9 @@ pub fn init(client: &Arc<Client>, cx: &mut App) {
|
||||
let client = client.clone();
|
||||
move |_: &SignIn, cx| {
|
||||
if let Some(client) = client.upgrade() {
|
||||
cx.spawn(
|
||||
async move |cx| match client.authenticate_and_connect(true, &cx).await {
|
||||
ConnectionResult::Timeout => {
|
||||
log::error!("Initial authentication timed out");
|
||||
}
|
||||
ConnectionResult::ConnectionReset => {
|
||||
log::error!("Initial authentication connection reset");
|
||||
}
|
||||
ConnectionResult::Result(r) => {
|
||||
r.log_err();
|
||||
}
|
||||
},
|
||||
)
|
||||
cx.spawn(async move |cx| {
|
||||
client.authenticate_and_connect(true, &cx).log_err().await
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
@@ -668,7 +657,7 @@ impl Client {
|
||||
state._reconnect_task = None;
|
||||
}
|
||||
Status::ConnectionLost => {
|
||||
let client = self.clone();
|
||||
let this = self.clone();
|
||||
state._reconnect_task = Some(cx.spawn(async move |cx| {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
let mut rng = StdRng::seed_from_u64(0);
|
||||
@@ -676,25 +665,10 @@ impl Client {
|
||||
let mut rng = StdRng::from_entropy();
|
||||
|
||||
let mut delay = INITIAL_RECONNECTION_DELAY;
|
||||
loop {
|
||||
match client.authenticate_and_connect(true, &cx).await {
|
||||
ConnectionResult::Timeout => {
|
||||
log::error!("client connect attempt timed out")
|
||||
}
|
||||
ConnectionResult::ConnectionReset => {
|
||||
log::error!("client connect attempt reset")
|
||||
}
|
||||
ConnectionResult::Result(r) => {
|
||||
if let Err(error) = r {
|
||||
log::error!("failed to connect: {error}");
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if matches!(*client.status().borrow(), Status::ConnectionError) {
|
||||
client.set_status(
|
||||
while let Err(error) = this.authenticate_and_connect(true, &cx).await {
|
||||
log::error!("failed to connect {}", error);
|
||||
if matches!(*this.status().borrow(), Status::ConnectionError) {
|
||||
this.set_status(
|
||||
Status::ReconnectionError {
|
||||
next_reconnection: Instant::now() + delay,
|
||||
},
|
||||
@@ -852,7 +826,7 @@ impl Client {
|
||||
self: &Arc<Self>,
|
||||
try_provider: bool,
|
||||
cx: &AsyncApp,
|
||||
) -> ConnectionResult<()> {
|
||||
) -> anyhow::Result<()> {
|
||||
let was_disconnected = match *self.status().borrow() {
|
||||
Status::SignedOut => true,
|
||||
Status::ConnectionError
|
||||
@@ -861,14 +835,9 @@ impl Client {
|
||||
| Status::Reauthenticating { .. }
|
||||
| Status::ReconnectionError { .. } => false,
|
||||
Status::Connected { .. } | Status::Connecting { .. } | Status::Reconnecting { .. } => {
|
||||
return ConnectionResult::Result(Ok(()));
|
||||
}
|
||||
Status::UpgradeRequired => {
|
||||
return ConnectionResult::Result(
|
||||
Err(EstablishConnectionError::UpgradeRequired)
|
||||
.context("client auth and connect"),
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
Status::UpgradeRequired => return Err(EstablishConnectionError::UpgradeRequired)?,
|
||||
};
|
||||
if was_disconnected {
|
||||
self.set_status(Status::Authenticating, cx);
|
||||
@@ -892,12 +861,12 @@ impl Client {
|
||||
Ok(creds) => credentials = Some(creds),
|
||||
Err(err) => {
|
||||
self.set_status(Status::ConnectionError, cx);
|
||||
return ConnectionResult::Result(Err(err));
|
||||
return Err(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ = status_rx.next().fuse() => {
|
||||
return ConnectionResult::Result(Err(anyhow!("authentication canceled")));
|
||||
return Err(anyhow!("authentication canceled"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -922,10 +891,10 @@ impl Client {
|
||||
}
|
||||
|
||||
futures::select_biased! {
|
||||
result = self.set_connection(conn, cx).fuse() => ConnectionResult::Result(result.context("client auth and connect")),
|
||||
result = self.set_connection(conn, cx).fuse() => result,
|
||||
_ = timeout => {
|
||||
self.set_status(Status::ConnectionError, cx);
|
||||
ConnectionResult::Timeout
|
||||
Err(anyhow!("timed out waiting on hello message from server"))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -937,22 +906,22 @@ impl Client {
|
||||
self.authenticate_and_connect(false, cx).await
|
||||
} else {
|
||||
self.set_status(Status::ConnectionError, cx);
|
||||
ConnectionResult::Result(Err(EstablishConnectionError::Unauthorized).context("client auth and connect"))
|
||||
Err(EstablishConnectionError::Unauthorized)?
|
||||
}
|
||||
}
|
||||
Err(EstablishConnectionError::UpgradeRequired) => {
|
||||
self.set_status(Status::UpgradeRequired, cx);
|
||||
ConnectionResult::Result(Err(EstablishConnectionError::UpgradeRequired).context("client auth and connect"))
|
||||
Err(EstablishConnectionError::UpgradeRequired)?
|
||||
}
|
||||
Err(error) => {
|
||||
self.set_status(Status::ConnectionError, cx);
|
||||
ConnectionResult::Result(Err(error).context("client auth and connect"))
|
||||
Err(error)?
|
||||
}
|
||||
}
|
||||
}
|
||||
_ = &mut timeout => {
|
||||
self.set_status(Status::ConnectionError, cx);
|
||||
ConnectionResult::Timeout
|
||||
Err(anyhow!("timed out trying to establish connection"))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -968,7 +937,10 @@ impl Client {
|
||||
|
||||
let peer_id = async {
|
||||
log::debug!("waiting for server hello");
|
||||
let message = incoming.next().await.context("no hello message received")?;
|
||||
let message = incoming
|
||||
.next()
|
||||
.await
|
||||
.ok_or_else(|| anyhow!("no hello message received"))?;
|
||||
log::debug!("got server hello");
|
||||
let hello_message_type_name = message.payload_type_name().to_string();
|
||||
let hello = message
|
||||
@@ -1155,10 +1127,7 @@ impl Client {
|
||||
let stream = {
|
||||
let handle = cx.update(|cx| gpui_tokio::Tokio::handle(cx)).ok().unwrap();
|
||||
let _guard = handle.enter();
|
||||
match proxy {
|
||||
Some(proxy) => connect_proxy_stream(&proxy, rpc_host).await?,
|
||||
None => Box::new(TcpStream::connect(rpc_host).await?),
|
||||
}
|
||||
connect_socks_proxy_stream(proxy.as_ref(), rpc_host).await?
|
||||
};
|
||||
|
||||
log::info!("connected to rpc endpoint {}", rpc_url);
|
||||
@@ -1770,7 +1739,7 @@ mod tests {
|
||||
status.next().await,
|
||||
Some(Status::ConnectionError { .. })
|
||||
));
|
||||
auth_and_connect.await.into_response().unwrap_err();
|
||||
auth_and_connect.await.unwrap_err();
|
||||
|
||||
// Allow the connection to be established.
|
||||
let server = FakeServer::for_client(user_id, &client, cx).await;
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
//! client proxy
|
||||
|
||||
mod http_proxy;
|
||||
mod socks_proxy;
|
||||
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use http_client::Url;
|
||||
use http_proxy::{HttpProxyType, connect_http_proxy_stream, parse_http_proxy};
|
||||
use socks_proxy::{SocksVersion, connect_socks_proxy_stream, parse_socks_proxy};
|
||||
|
||||
pub(crate) async fn connect_proxy_stream(
|
||||
proxy: &Url,
|
||||
rpc_host: (&str, u16),
|
||||
) -> Result<Box<dyn AsyncReadWrite>> {
|
||||
let Some(((proxy_domain, proxy_port), proxy_type)) = parse_proxy_type(proxy) else {
|
||||
// If parsing the proxy URL fails, we must avoid falling back to an insecure connection.
|
||||
// SOCKS proxies are often used in contexts where security and privacy are critical,
|
||||
// so any fallback could expose users to significant risks.
|
||||
return Err(anyhow!("Parsing proxy url failed"));
|
||||
};
|
||||
|
||||
// Connect to proxy and wrap protocol later
|
||||
let stream = tokio::net::TcpStream::connect((proxy_domain.as_str(), proxy_port))
|
||||
.await
|
||||
.context("Failed to connect to proxy")?;
|
||||
|
||||
let proxy_stream = match proxy_type {
|
||||
ProxyType::SocksProxy(proxy) => connect_socks_proxy_stream(stream, proxy, rpc_host).await?,
|
||||
ProxyType::HttpProxy(proxy) => {
|
||||
connect_http_proxy_stream(stream, proxy, rpc_host, &proxy_domain).await?
|
||||
}
|
||||
};
|
||||
|
||||
Ok(proxy_stream)
|
||||
}
|
||||
|
||||
enum ProxyType<'t> {
|
||||
SocksProxy(SocksVersion<'t>),
|
||||
HttpProxy(HttpProxyType<'t>),
|
||||
}
|
||||
|
||||
fn parse_proxy_type<'t>(proxy: &'t Url) -> Option<((String, u16), ProxyType<'t>)> {
|
||||
let scheme = proxy.scheme();
|
||||
let host = proxy.host()?.to_string();
|
||||
let port = proxy.port_or_known_default()?;
|
||||
let proxy_type = match scheme {
|
||||
scheme if scheme.starts_with("socks") => {
|
||||
Some(ProxyType::SocksProxy(parse_socks_proxy(scheme, proxy)))
|
||||
}
|
||||
scheme if scheme.starts_with("http") => {
|
||||
Some(ProxyType::HttpProxy(parse_http_proxy(scheme, proxy)))
|
||||
}
|
||||
_ => None,
|
||||
}?;
|
||||
|
||||
Some(((host, port), proxy_type))
|
||||
}
|
||||
|
||||
pub(crate) trait AsyncReadWrite:
|
||||
tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static
|
||||
{
|
||||
}
|
||||
impl<T: tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static> AsyncReadWrite
|
||||
for T
|
||||
{
|
||||
}
|
||||
@@ -1,171 +0,0 @@
|
||||
use anyhow::{Context, Result};
|
||||
use base64::Engine;
|
||||
use httparse::{EMPTY_HEADER, Response};
|
||||
use tokio::{
|
||||
io::{AsyncBufReadExt, AsyncWriteExt, BufStream},
|
||||
net::TcpStream,
|
||||
};
|
||||
use tokio_native_tls::{TlsConnector, native_tls};
|
||||
use url::Url;
|
||||
|
||||
use super::AsyncReadWrite;
|
||||
|
||||
pub(super) enum HttpProxyType<'t> {
|
||||
HTTP(Option<HttpProxyAuthorization<'t>>),
|
||||
HTTPS(Option<HttpProxyAuthorization<'t>>),
|
||||
}
|
||||
|
||||
pub(super) struct HttpProxyAuthorization<'t> {
|
||||
username: &'t str,
|
||||
password: &'t str,
|
||||
}
|
||||
|
||||
pub(super) fn parse_http_proxy<'t>(scheme: &str, proxy: &'t Url) -> HttpProxyType<'t> {
|
||||
let auth = proxy.password().map(|password| HttpProxyAuthorization {
|
||||
username: proxy.username(),
|
||||
password,
|
||||
});
|
||||
if scheme.starts_with("https") {
|
||||
HttpProxyType::HTTPS(auth)
|
||||
} else {
|
||||
HttpProxyType::HTTP(auth)
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) async fn connect_http_proxy_stream(
|
||||
stream: TcpStream,
|
||||
http_proxy: HttpProxyType<'_>,
|
||||
rpc_host: (&str, u16),
|
||||
proxy_domain: &str,
|
||||
) -> Result<Box<dyn AsyncReadWrite>> {
|
||||
match http_proxy {
|
||||
HttpProxyType::HTTP(auth) => http_connect(stream, rpc_host, auth).await,
|
||||
HttpProxyType::HTTPS(auth) => https_connect(stream, rpc_host, auth, proxy_domain).await,
|
||||
}
|
||||
.context("error connecting to http/https proxy")
|
||||
}
|
||||
|
||||
async fn http_connect<T>(
|
||||
stream: T,
|
||||
target: (&str, u16),
|
||||
auth: Option<HttpProxyAuthorization<'_>>,
|
||||
) -> Result<Box<dyn AsyncReadWrite>>
|
||||
where
|
||||
T: AsyncReadWrite,
|
||||
{
|
||||
let mut stream = BufStream::new(stream);
|
||||
let request = make_request(target, auth);
|
||||
stream.write_all(request.as_bytes()).await?;
|
||||
stream.flush().await?;
|
||||
check_response(&mut stream).await?;
|
||||
Ok(Box::new(stream))
|
||||
}
|
||||
|
||||
async fn https_connect<T>(
|
||||
stream: T,
|
||||
target: (&str, u16),
|
||||
auth: Option<HttpProxyAuthorization<'_>>,
|
||||
proxy_domain: &str,
|
||||
) -> Result<Box<dyn AsyncReadWrite>>
|
||||
where
|
||||
T: AsyncReadWrite,
|
||||
{
|
||||
let tls_connector = TlsConnector::from(native_tls::TlsConnector::new()?);
|
||||
let stream = tls_connector.connect(proxy_domain, stream).await?;
|
||||
http_connect(stream, target, auth).await
|
||||
}
|
||||
|
||||
fn make_request(target: (&str, u16), auth: Option<HttpProxyAuthorization<'_>>) -> String {
|
||||
let (host, port) = target;
|
||||
let mut request = format!(
|
||||
"CONNECT {host}:{port} HTTP/1.1\r\nHost: {host}:{port}\r\nProxy-Connection: Keep-Alive\r\n"
|
||||
);
|
||||
if let Some(HttpProxyAuthorization { username, password }) = auth {
|
||||
let auth =
|
||||
base64::prelude::BASE64_STANDARD.encode(format!("{username}:{password}").as_bytes());
|
||||
let auth = format!("Proxy-Authorization: Basic {auth}\r\n");
|
||||
request.push_str(&auth);
|
||||
}
|
||||
request.push_str("\r\n");
|
||||
request
|
||||
}
|
||||
|
||||
async fn check_response<T>(stream: &mut BufStream<T>) -> Result<()>
|
||||
where
|
||||
T: AsyncReadWrite,
|
||||
{
|
||||
let response = recv_response(stream).await?;
|
||||
let mut dummy_headers = [EMPTY_HEADER; MAX_RESPONSE_HEADERS];
|
||||
let mut parser = Response::new(&mut dummy_headers);
|
||||
parser.parse(response.as_bytes())?;
|
||||
|
||||
match parser.code {
|
||||
Some(code) => {
|
||||
if code == 200 {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow::anyhow!(
|
||||
"Proxy connection failed with HTTP code: {code}"
|
||||
))
|
||||
}
|
||||
}
|
||||
None => Err(anyhow::anyhow!(
|
||||
"Proxy connection failed with no HTTP code: {}",
|
||||
parser.reason.unwrap_or("Unknown reason")
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
const MAX_RESPONSE_HEADER_LENGTH: usize = 4096;
|
||||
const MAX_RESPONSE_HEADERS: usize = 16;
|
||||
|
||||
async fn recv_response<T>(stream: &mut BufStream<T>) -> Result<String>
|
||||
where
|
||||
T: AsyncReadWrite,
|
||||
{
|
||||
let mut response = String::new();
|
||||
loop {
|
||||
if stream.read_line(&mut response).await? == 0 {
|
||||
return Err(anyhow::anyhow!("End of stream"));
|
||||
}
|
||||
|
||||
if MAX_RESPONSE_HEADER_LENGTH < response.len() {
|
||||
return Err(anyhow::anyhow!("Maximum response header length exceeded"));
|
||||
}
|
||||
|
||||
if response.ends_with("\r\n\r\n") {
|
||||
return Ok(response);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use url::Url;
|
||||
|
||||
use super::{HttpProxyAuthorization, HttpProxyType, parse_http_proxy};
|
||||
|
||||
#[test]
|
||||
fn test_parse_http_proxy() {
|
||||
let proxy = Url::parse("http://proxy.example.com:1080").unwrap();
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let version = parse_http_proxy(scheme, &proxy);
|
||||
assert!(matches!(version, HttpProxyType::HTTP(None)))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_http_proxy_with_auth() {
|
||||
let proxy = Url::parse("http://username:password@proxy.example.com:1080").unwrap();
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let version = parse_http_proxy(scheme, &proxy);
|
||||
assert!(matches!(
|
||||
version,
|
||||
HttpProxyType::HTTP(Some(HttpProxyAuthorization {
|
||||
username: "username",
|
||||
password: "password"
|
||||
}))
|
||||
))
|
||||
}
|
||||
}
|
||||
@@ -1,131 +0,0 @@
|
||||
//! socks proxy
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use tokio::net::TcpStream;
|
||||
use tokio_socks::tcp::{Socks4Stream, Socks5Stream};
|
||||
use url::Url;
|
||||
|
||||
use super::AsyncReadWrite;
|
||||
|
||||
/// Identification to a Socks V4 Proxy
|
||||
pub(super) struct Socks4Identification<'a> {
|
||||
user_id: &'a str,
|
||||
}
|
||||
|
||||
/// Authorization to a Socks V5 Proxy
|
||||
pub(super) struct Socks5Authorization<'a> {
|
||||
username: &'a str,
|
||||
password: &'a str,
|
||||
}
|
||||
|
||||
/// Socks Proxy Protocol Version
|
||||
///
|
||||
/// V4 allows idenfication using a user_id
|
||||
/// V5 allows authorization using a username and password
|
||||
pub(super) enum SocksVersion<'a> {
|
||||
V4(Option<Socks4Identification<'a>>),
|
||||
V5(Option<Socks5Authorization<'a>>),
|
||||
}
|
||||
|
||||
pub(super) fn parse_socks_proxy<'t>(scheme: &str, proxy: &'t Url) -> SocksVersion<'t> {
|
||||
if scheme.starts_with("socks4") {
|
||||
let identification = match proxy.username() {
|
||||
"" => None,
|
||||
username => Some(Socks4Identification { user_id: username }),
|
||||
};
|
||||
SocksVersion::V4(identification)
|
||||
} else {
|
||||
let authorization = proxy.password().map(|password| Socks5Authorization {
|
||||
username: proxy.username(),
|
||||
password,
|
||||
});
|
||||
SocksVersion::V5(authorization)
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) async fn connect_socks_proxy_stream(
|
||||
stream: TcpStream,
|
||||
socks_version: SocksVersion<'_>,
|
||||
rpc_host: (&str, u16),
|
||||
) -> Result<Box<dyn AsyncReadWrite>> {
|
||||
match socks_version {
|
||||
SocksVersion::V4(None) => {
|
||||
let socks = Socks4Stream::connect_with_socket(stream, rpc_host)
|
||||
.await
|
||||
.context("error connecting to socks")?;
|
||||
Ok(Box::new(socks))
|
||||
}
|
||||
SocksVersion::V4(Some(Socks4Identification { user_id })) => {
|
||||
let socks = Socks4Stream::connect_with_userid_and_socket(stream, rpc_host, user_id)
|
||||
.await
|
||||
.context("error connecting to socks")?;
|
||||
Ok(Box::new(socks))
|
||||
}
|
||||
SocksVersion::V5(None) => {
|
||||
let socks = Socks5Stream::connect_with_socket(stream, rpc_host)
|
||||
.await
|
||||
.context("error connecting to socks")?;
|
||||
Ok(Box::new(socks))
|
||||
}
|
||||
SocksVersion::V5(Some(Socks5Authorization { username, password })) => {
|
||||
let socks = Socks5Stream::connect_with_password_and_socket(
|
||||
stream, rpc_host, username, password,
|
||||
)
|
||||
.await
|
||||
.context("error connecting to socks")?;
|
||||
Ok(Box::new(socks))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use url::Url;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn parse_socks4() {
|
||||
let proxy = Url::parse("socks4://proxy.example.com:1080").unwrap();
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let version = parse_socks_proxy(scheme, &proxy);
|
||||
assert!(matches!(version, SocksVersion::V4(None)))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_socks4_with_identification() {
|
||||
let proxy = Url::parse("socks4://userid@proxy.example.com:1080").unwrap();
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let version = parse_socks_proxy(scheme, &proxy);
|
||||
assert!(matches!(
|
||||
version,
|
||||
SocksVersion::V4(Some(Socks4Identification { user_id: "userid" }))
|
||||
))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_socks5() {
|
||||
let proxy = Url::parse("socks5://proxy.example.com:1080").unwrap();
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let version = parse_socks_proxy(scheme, &proxy);
|
||||
assert!(matches!(version, SocksVersion::V5(None)))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_socks5_with_authorization() {
|
||||
let proxy = Url::parse("socks5://username:password@proxy.example.com:1080").unwrap();
|
||||
let scheme = proxy.scheme();
|
||||
|
||||
let version = parse_socks_proxy(scheme, &proxy);
|
||||
assert!(matches!(
|
||||
version,
|
||||
SocksVersion::V5(Some(Socks5Authorization {
|
||||
username: "username",
|
||||
password: "password"
|
||||
}))
|
||||
))
|
||||
}
|
||||
}
|
||||
68
crates/client/src/socks.rs
Normal file
68
crates/client/src/socks.rs
Normal file
@@ -0,0 +1,68 @@
|
||||
//! socks proxy
|
||||
use anyhow::{Result, anyhow};
|
||||
use http_client::Url;
|
||||
use tokio_socks::tcp::{Socks4Stream, Socks5Stream};
|
||||
|
||||
pub(crate) async fn connect_socks_proxy_stream(
|
||||
proxy: Option<&Url>,
|
||||
rpc_host: (&str, u16),
|
||||
) -> Result<Box<dyn AsyncReadWrite>> {
|
||||
let stream = match parse_socks_proxy(proxy) {
|
||||
Some((socks_proxy, SocksVersion::V4)) => {
|
||||
let stream = Socks4Stream::connect_with_socket(
|
||||
tokio::net::TcpStream::connect(socks_proxy).await?,
|
||||
rpc_host,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| anyhow!("error connecting to socks {}", err))?;
|
||||
Box::new(stream) as Box<dyn AsyncReadWrite>
|
||||
}
|
||||
Some((socks_proxy, SocksVersion::V5)) => Box::new(
|
||||
Socks5Stream::connect_with_socket(
|
||||
tokio::net::TcpStream::connect(socks_proxy).await?,
|
||||
rpc_host,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| anyhow!("error connecting to socks {}", err))?,
|
||||
) as Box<dyn AsyncReadWrite>,
|
||||
None => {
|
||||
Box::new(tokio::net::TcpStream::connect(rpc_host).await?) as Box<dyn AsyncReadWrite>
|
||||
}
|
||||
};
|
||||
Ok(stream)
|
||||
}
|
||||
|
||||
fn parse_socks_proxy(proxy: Option<&Url>) -> Option<((String, u16), SocksVersion)> {
|
||||
let proxy_url = proxy?;
|
||||
let scheme = proxy_url.scheme();
|
||||
let socks_version = if scheme.starts_with("socks4") {
|
||||
// socks4
|
||||
SocksVersion::V4
|
||||
} else if scheme.starts_with("socks") {
|
||||
// socks, socks5
|
||||
SocksVersion::V5
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
if let Some((host, port)) = proxy_url.host().zip(proxy_url.port_or_known_default()) {
|
||||
Some(((host.to_string(), port), socks_version))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
// private helper structs and traits
|
||||
|
||||
enum SocksVersion {
|
||||
V4,
|
||||
V5,
|
||||
}
|
||||
|
||||
pub(crate) trait AsyncReadWrite:
|
||||
tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static
|
||||
{
|
||||
}
|
||||
impl<T: tokio::io::AsyncRead + tokio::io::AsyncWrite + Unpin + Send + 'static> AsyncReadWrite
|
||||
for T
|
||||
{
|
||||
}
|
||||
@@ -137,14 +137,18 @@ pub fn os_version() -> String {
|
||||
log::error!("Failed to load /etc/os-release, /usr/lib/os-release");
|
||||
"".to_string()
|
||||
};
|
||||
let mut name = "unknown";
|
||||
let mut version = "unknown";
|
||||
let mut name = "unknown".to_string();
|
||||
let mut version = "unknown".to_string();
|
||||
|
||||
for line in content.lines() {
|
||||
match line.split_once('=') {
|
||||
Some(("ID", val)) => name = val.trim_matches('"'),
|
||||
Some(("VERSION_ID", val)) => version = val.trim_matches('"'),
|
||||
_ => {}
|
||||
if line.starts_with("ID=") {
|
||||
name = line.trim_start_matches("ID=").trim_matches('"').to_string();
|
||||
}
|
||||
if line.starts_with("VERSION_ID=") {
|
||||
version = line
|
||||
.trim_start_matches("VERSION_ID=")
|
||||
.trim_matches('"')
|
||||
.to_string();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -218,7 +222,7 @@ impl Telemetry {
|
||||
cx.background_spawn({
|
||||
let state = state.clone();
|
||||
let os_version = os_version();
|
||||
state.lock().os_version = Some(os_version);
|
||||
state.lock().os_version = Some(os_version.clone());
|
||||
async move {
|
||||
if let Some(tempfile) = File::create(Self::log_file_path()).log_err() {
|
||||
state.lock().log_file = Some(tempfile);
|
||||
@@ -365,7 +369,7 @@ impl Telemetry {
|
||||
telemetry::event!(
|
||||
"Editor Edited",
|
||||
duration = duration,
|
||||
environment = environment,
|
||||
environment = environment.to_string(),
|
||||
is_via_ssh = is_via_ssh
|
||||
);
|
||||
}
|
||||
@@ -427,8 +431,9 @@ impl Telemetry {
|
||||
|
||||
if state.flush_events_task.is_none() {
|
||||
let this = self.clone();
|
||||
let executor = self.executor.clone();
|
||||
state.flush_events_task = Some(self.executor.spawn(async move {
|
||||
this.executor.timer(FLUSH_INTERVAL).await;
|
||||
executor.timer(FLUSH_INTERVAL).await;
|
||||
this.flush_events().detach();
|
||||
}));
|
||||
}
|
||||
@@ -479,12 +484,12 @@ impl Telemetry {
|
||||
self: &Arc<Self>,
|
||||
// We take in the JSON bytes buffer so we can reuse the existing allocation.
|
||||
mut json_bytes: Vec<u8>,
|
||||
event_request: &EventRequestBody,
|
||||
event_request: EventRequestBody,
|
||||
) -> Result<Request<AsyncBody>> {
|
||||
json_bytes.clear();
|
||||
serde_json::to_writer(&mut json_bytes, event_request)?;
|
||||
serde_json::to_writer(&mut json_bytes, &event_request)?;
|
||||
|
||||
let checksum = calculate_json_checksum(&json_bytes).unwrap_or_default();
|
||||
let checksum = calculate_json_checksum(&json_bytes).unwrap_or("".to_string());
|
||||
|
||||
Ok(Request::builder()
|
||||
.method(Method::POST)
|
||||
@@ -501,7 +506,7 @@ impl Telemetry {
|
||||
pub fn flush_events(self: &Arc<Self>) -> Task<()> {
|
||||
let mut state = self.state.lock();
|
||||
state.first_event_date_time = None;
|
||||
let events = mem::take(&mut state.events_queue);
|
||||
let mut events = mem::take(&mut state.events_queue);
|
||||
state.flush_events_task.take();
|
||||
drop(state);
|
||||
if events.is_empty() {
|
||||
@@ -514,7 +519,7 @@ impl Telemetry {
|
||||
let mut json_bytes = Vec::new();
|
||||
|
||||
if let Some(file) = &mut this.state.lock().log_file {
|
||||
for event in &events {
|
||||
for event in &mut events {
|
||||
json_bytes.clear();
|
||||
serde_json::to_writer(&mut json_bytes, event)?;
|
||||
file.write_all(&json_bytes)?;
|
||||
@@ -541,7 +546,7 @@ impl Telemetry {
|
||||
}
|
||||
};
|
||||
|
||||
let request = this.build_request(json_bytes, &request_body)?;
|
||||
let request = this.build_request(json_bytes, request_body)?;
|
||||
let response = this.http_client.send(request).await?;
|
||||
if response.status() != 200 {
|
||||
log::error!("Failed to send events: HTTP {:?}", response.status());
|
||||
|
||||
@@ -107,7 +107,6 @@ impl FakeServer {
|
||||
client
|
||||
.authenticate_and_connect(false, &cx.to_async())
|
||||
.await
|
||||
.into_response()
|
||||
.unwrap();
|
||||
|
||||
server
|
||||
|
||||
@@ -1,23 +0,0 @@
|
||||
create table subscription_usages_v2 (
|
||||
id uuid primary key,
|
||||
user_id integer not null,
|
||||
period_start_at timestamp without time zone not null,
|
||||
period_end_at timestamp without time zone not null,
|
||||
plan text not null,
|
||||
model_requests int not null default 0,
|
||||
edit_predictions int not null default 0
|
||||
);
|
||||
|
||||
create unique index uix_subscription_usages_v2_on_user_id_start_at_end_at on subscription_usages_v2 (user_id, period_start_at, period_end_at);
|
||||
|
||||
create index ix_subscription_usages_v2_on_plan on subscription_usages_v2 (plan);
|
||||
|
||||
create table subscription_usage_meters_v2 (
|
||||
id uuid primary key,
|
||||
subscription_usage_id uuid not null references subscription_usages_v2 (id) on delete cascade,
|
||||
model_id integer not null references models (id) on delete cascade,
|
||||
mode text not null,
|
||||
requests integer not null default 0
|
||||
);
|
||||
|
||||
create unique index uix_subscription_usage_meters_v2_on_usage_model_mode on subscription_usage_meters_v2 (subscription_usage_id, model_id, mode);
|
||||
@@ -1,2 +0,0 @@
|
||||
drop table subscription_usage_meters;
|
||||
drop table subscription_usages;
|
||||
@@ -18,8 +18,7 @@ use stripe::{
|
||||
CreateBillingPortalSessionFlowDataSubscriptionUpdateConfirm,
|
||||
CreateBillingPortalSessionFlowDataSubscriptionUpdateConfirmItems,
|
||||
CreateBillingPortalSessionFlowDataType, CreateCustomer, Customer, CustomerId, EventObject,
|
||||
EventType, Expandable, ListEvents, PaymentMethod, Subscription, SubscriptionId,
|
||||
SubscriptionStatus,
|
||||
EventType, Expandable, ListEvents, Subscription, SubscriptionId, SubscriptionStatus,
|
||||
};
|
||||
use util::{ResultExt, maybe};
|
||||
|
||||
@@ -28,9 +27,7 @@ use crate::db::billing_subscription::{
|
||||
StripeCancellationReason, StripeSubscriptionStatus, SubscriptionKind,
|
||||
};
|
||||
use crate::llm::db::subscription_usage_meter::CompletionMode;
|
||||
use crate::llm::{
|
||||
AGENT_EXTENDED_TRIAL_FEATURE_FLAG, DEFAULT_MAX_MONTHLY_SPEND, FREE_TIER_MONTHLY_SPENDING_LIMIT,
|
||||
};
|
||||
use crate::llm::{DEFAULT_MAX_MONTHLY_SPEND, FREE_TIER_MONTHLY_SPENDING_LIMIT};
|
||||
use crate::rpc::{ResultExt as _, Server};
|
||||
use crate::{AppState, Cents, Error, Result};
|
||||
use crate::{db::UserId, llm::db::LlmDatabase};
|
||||
@@ -57,14 +54,6 @@ pub fn router() -> Router {
|
||||
"/billing/subscriptions/manage",
|
||||
post(manage_billing_subscription),
|
||||
)
|
||||
.route(
|
||||
"/billing/subscriptions/migrate",
|
||||
post(migrate_to_new_billing),
|
||||
)
|
||||
.route(
|
||||
"/billing/subscriptions/sync",
|
||||
post(sync_billing_subscription),
|
||||
)
|
||||
.route("/billing/monthly_spend", get(get_monthly_spend))
|
||||
.route("/billing/usage", get(get_current_usage))
|
||||
}
|
||||
@@ -76,7 +65,6 @@ struct GetBillingPreferencesParams {
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct BillingPreferencesResponse {
|
||||
trial_started_at: Option<String>,
|
||||
max_monthly_llm_usage_spending_in_cents: i32,
|
||||
model_request_overages_enabled: bool,
|
||||
model_request_overages_spend_limit_in_cents: i32,
|
||||
@@ -92,17 +80,9 @@ async fn get_billing_preferences(
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
|
||||
let billing_customer = app.db.get_billing_customer_by_user_id(user.id).await?;
|
||||
let preferences = app.db.get_billing_preferences(user.id).await?;
|
||||
|
||||
Ok(Json(BillingPreferencesResponse {
|
||||
trial_started_at: billing_customer
|
||||
.and_then(|billing_customer| billing_customer.trial_started_at)
|
||||
.map(|trial_started_at| {
|
||||
trial_started_at
|
||||
.and_utc()
|
||||
.to_rfc3339_opts(SecondsFormat::Millis, true)
|
||||
}),
|
||||
max_monthly_llm_usage_spending_in_cents: preferences
|
||||
.as_ref()
|
||||
.map_or(DEFAULT_MAX_MONTHLY_SPEND.0 as i32, |preferences| {
|
||||
@@ -141,8 +121,6 @@ async fn update_billing_preferences(
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
|
||||
let billing_customer = app.db.get_billing_customer_by_user_id(user.id).await?;
|
||||
|
||||
let max_monthly_llm_usage_spending_in_cents =
|
||||
body.max_monthly_llm_usage_spending_in_cents.max(0);
|
||||
let model_request_overages_spend_limit_in_cents =
|
||||
@@ -198,13 +176,6 @@ async fn update_billing_preferences(
|
||||
rpc_server.refresh_llm_tokens_for_user(user.id).await;
|
||||
|
||||
Ok(Json(BillingPreferencesResponse {
|
||||
trial_started_at: billing_customer
|
||||
.and_then(|billing_customer| billing_customer.trial_started_at)
|
||||
.map(|trial_started_at| {
|
||||
trial_started_at
|
||||
.and_utc()
|
||||
.to_rfc3339_opts(SecondsFormat::Millis, true)
|
||||
}),
|
||||
max_monthly_llm_usage_spending_in_cents: billing_preferences
|
||||
.max_monthly_llm_usage_spending_in_cents,
|
||||
model_request_overages_enabled: billing_preferences.model_request_overages_enabled,
|
||||
@@ -285,7 +256,6 @@ async fn list_billing_subscriptions(
|
||||
enum ProductCode {
|
||||
ZedPro,
|
||||
ZedProTrial,
|
||||
ZedFree,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
@@ -324,6 +294,13 @@ async fn create_billing_subscription(
|
||||
"not supported".into(),
|
||||
))?
|
||||
};
|
||||
let Some(llm_db) = app.llm_db.clone() else {
|
||||
log::error!("failed to retrieve LLM database");
|
||||
Err(Error::http(
|
||||
StatusCode::NOT_IMPLEMENTED,
|
||||
"not supported".into(),
|
||||
))?
|
||||
};
|
||||
|
||||
if app.db.has_active_billing_subscription(user.id).await? {
|
||||
return Err(Error::http(
|
||||
@@ -385,7 +362,12 @@ async fn create_billing_subscription(
|
||||
let checkout_session_url = match body.product {
|
||||
Some(ProductCode::ZedPro) => {
|
||||
stripe_billing
|
||||
.checkout_with_zed_pro(customer_id, &user.github_login, &success_url)
|
||||
.checkout_with_price(
|
||||
app.config.zed_pro_price_id()?,
|
||||
customer_id,
|
||||
&user.github_login,
|
||||
&success_url,
|
||||
)
|
||||
.await?
|
||||
}
|
||||
Some(ProductCode::ZedProTrial) => {
|
||||
@@ -402,6 +384,7 @@ async fn create_billing_subscription(
|
||||
|
||||
stripe_billing
|
||||
.checkout_with_zed_pro_trial(
|
||||
app.config.zed_pro_price_id()?,
|
||||
customer_id,
|
||||
&user.github_login,
|
||||
feature_flags,
|
||||
@@ -409,16 +392,17 @@ async fn create_billing_subscription(
|
||||
)
|
||||
.await?
|
||||
}
|
||||
Some(ProductCode::ZedFree) => {
|
||||
stripe_billing
|
||||
.checkout_with_zed_free(customer_id, &user.github_login, &success_url)
|
||||
.await?
|
||||
}
|
||||
None => {
|
||||
return Err(Error::http(
|
||||
StatusCode::BAD_REQUEST,
|
||||
"No product selected".into(),
|
||||
));
|
||||
let default_model = llm_db.model(
|
||||
zed_llm_client::LanguageModelProvider::Anthropic,
|
||||
"claude-3-7-sonnet",
|
||||
)?;
|
||||
let stripe_model = stripe_billing
|
||||
.register_model_for_token_based_usage(default_model)
|
||||
.await?;
|
||||
stripe_billing
|
||||
.checkout(customer_id, &user.github_login, &stripe_model, &success_url)
|
||||
.await?
|
||||
}
|
||||
};
|
||||
|
||||
@@ -434,8 +418,6 @@ enum ManageSubscriptionIntent {
|
||||
///
|
||||
/// This will open the Stripe billing portal without putting the user in a specific flow.
|
||||
ManageSubscription,
|
||||
/// The user intends to update their payment method.
|
||||
UpdatePaymentMethod,
|
||||
/// The user intends to upgrade to Zed Pro.
|
||||
UpgradeToPro,
|
||||
/// The user intends to cancel their subscription.
|
||||
@@ -450,7 +432,6 @@ struct ManageBillingSubscriptionBody {
|
||||
intent: ManageSubscriptionIntent,
|
||||
/// The ID of the subscription to manage.
|
||||
subscription_id: BillingSubscriptionId,
|
||||
redirect_to: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
@@ -477,14 +458,6 @@ async fn manage_billing_subscription(
|
||||
))?
|
||||
};
|
||||
|
||||
let Some(stripe_billing) = app.stripe_billing.clone() else {
|
||||
log::error!("failed to retrieve Stripe billing object");
|
||||
Err(Error::http(
|
||||
StatusCode::NOT_IMPLEMENTED,
|
||||
"not supported".into(),
|
||||
))?
|
||||
};
|
||||
|
||||
let customer = app
|
||||
.db
|
||||
.get_billing_customer_by_user_id(user.id)
|
||||
@@ -535,8 +508,8 @@ async fn manage_billing_subscription(
|
||||
let flow = match body.intent {
|
||||
ManageSubscriptionIntent::ManageSubscription => None,
|
||||
ManageSubscriptionIntent::UpgradeToPro => {
|
||||
let zed_pro_price_id = stripe_billing.zed_pro_price_id().await?;
|
||||
let zed_free_price_id = stripe_billing.zed_free_price_id().await?;
|
||||
let zed_pro_price_id = app.config.zed_pro_price_id()?;
|
||||
let zed_free_price_id = app.config.zed_free_price_id()?;
|
||||
|
||||
let stripe_subscription =
|
||||
Subscription::retrieve(&stripe_client, &subscription_id, &[]).await?;
|
||||
@@ -548,23 +521,6 @@ async fn manage_billing_subscription(
|
||||
.map_or(false, |price| price.id == zed_pro_price_id)
|
||||
});
|
||||
if is_on_zed_pro_trial {
|
||||
let payment_methods = PaymentMethod::list(
|
||||
&stripe_client,
|
||||
&stripe::ListPaymentMethods {
|
||||
customer: Some(stripe_subscription.customer.id()),
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
let has_payment_method = !payment_methods.data.is_empty();
|
||||
if !has_payment_method {
|
||||
return Err(Error::http(
|
||||
StatusCode::BAD_REQUEST,
|
||||
"missing payment method".into(),
|
||||
));
|
||||
}
|
||||
|
||||
// If the user is already on a Zed Pro trial and wants to upgrade to Pro, we just need to end their trial early.
|
||||
Subscription::update(
|
||||
&stripe_client,
|
||||
@@ -614,21 +570,6 @@ async fn manage_billing_subscription(
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
ManageSubscriptionIntent::UpdatePaymentMethod => Some(CreateBillingPortalSessionFlowData {
|
||||
type_: CreateBillingPortalSessionFlowDataType::PaymentMethodUpdate,
|
||||
after_completion: Some(CreateBillingPortalSessionFlowDataAfterCompletion {
|
||||
type_: stripe::CreateBillingPortalSessionFlowDataAfterCompletionType::Redirect,
|
||||
redirect: Some(CreateBillingPortalSessionFlowDataAfterCompletionRedirect {
|
||||
return_url: format!(
|
||||
"{}{path}",
|
||||
app.config.zed_dot_dev_url(),
|
||||
path = body.redirect_to.unwrap_or_else(|| "/account".to_string())
|
||||
),
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
..Default::default()
|
||||
}),
|
||||
ManageSubscriptionIntent::Cancel => Some(CreateBillingPortalSessionFlowData {
|
||||
type_: CreateBillingPortalSessionFlowDataType::SubscriptionCancel,
|
||||
after_completion: Some(CreateBillingPortalSessionFlowDataAfterCompletion {
|
||||
@@ -661,153 +602,6 @@ async fn manage_billing_subscription(
|
||||
}))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct MigrateToNewBillingBody {
|
||||
github_user_id: i32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct MigrateToNewBillingResponse {
|
||||
/// The ID of the subscription that was canceled.
|
||||
canceled_subscription_id: Option<String>,
|
||||
}
|
||||
|
||||
async fn migrate_to_new_billing(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
extract::Json(body): extract::Json<MigrateToNewBillingBody>,
|
||||
) -> Result<Json<MigrateToNewBillingResponse>> {
|
||||
let Some(stripe_client) = app.stripe_client.clone() else {
|
||||
log::error!("failed to retrieve Stripe client");
|
||||
Err(Error::http(
|
||||
StatusCode::NOT_IMPLEMENTED,
|
||||
"not supported".into(),
|
||||
))?
|
||||
};
|
||||
|
||||
let user = app
|
||||
.db
|
||||
.get_user_by_github_user_id(body.github_user_id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
|
||||
let old_billing_subscriptions_by_user = app
|
||||
.db
|
||||
.get_active_billing_subscriptions(HashSet::from_iter([user.id]))
|
||||
.await?;
|
||||
|
||||
let canceled_subscription_id = if let Some((_billing_customer, billing_subscription)) =
|
||||
old_billing_subscriptions_by_user.get(&user.id)
|
||||
{
|
||||
let stripe_subscription_id = billing_subscription
|
||||
.stripe_subscription_id
|
||||
.parse::<stripe::SubscriptionId>()
|
||||
.context("failed to parse Stripe subscription ID from database")?;
|
||||
|
||||
Subscription::cancel(
|
||||
&stripe_client,
|
||||
&stripe_subscription_id,
|
||||
stripe::CancelSubscription {
|
||||
invoice_now: Some(true),
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
Some(stripe_subscription_id)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let all_feature_flags = app.db.list_feature_flags().await?;
|
||||
let user_feature_flags = app.db.get_user_flags(user.id).await?;
|
||||
|
||||
for feature_flag in ["new-billing", "assistant2"] {
|
||||
let already_in_feature_flag = user_feature_flags.iter().any(|flag| flag == feature_flag);
|
||||
if already_in_feature_flag {
|
||||
continue;
|
||||
}
|
||||
|
||||
let feature_flag = all_feature_flags
|
||||
.iter()
|
||||
.find(|flag| flag.flag == feature_flag)
|
||||
.context("failed to find feature flag: {feature_flag:?}")?;
|
||||
|
||||
app.db.add_user_flag(user.id, feature_flag.id).await?;
|
||||
}
|
||||
|
||||
Ok(Json(MigrateToNewBillingResponse {
|
||||
canceled_subscription_id: canceled_subscription_id
|
||||
.map(|subscription_id| subscription_id.to_string()),
|
||||
}))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct SyncBillingSubscriptionBody {
|
||||
github_user_id: i32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct SyncBillingSubscriptionResponse {
|
||||
stripe_customer_id: String,
|
||||
}
|
||||
|
||||
async fn sync_billing_subscription(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
extract::Json(body): extract::Json<SyncBillingSubscriptionBody>,
|
||||
) -> Result<Json<SyncBillingSubscriptionResponse>> {
|
||||
let Some(stripe_client) = app.stripe_client.clone() else {
|
||||
log::error!("failed to retrieve Stripe client");
|
||||
Err(Error::http(
|
||||
StatusCode::NOT_IMPLEMENTED,
|
||||
"not supported".into(),
|
||||
))?
|
||||
};
|
||||
|
||||
let user = app
|
||||
.db
|
||||
.get_user_by_github_user_id(body.github_user_id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
|
||||
let billing_customer = app
|
||||
.db
|
||||
.get_billing_customer_by_user_id(user.id)
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("billing customer not found"))?;
|
||||
let stripe_customer_id = billing_customer
|
||||
.stripe_customer_id
|
||||
.parse::<stripe::CustomerId>()
|
||||
.context("failed to parse Stripe customer ID from database")?;
|
||||
|
||||
let subscriptions = Subscription::list(
|
||||
&stripe_client,
|
||||
&stripe::ListSubscriptions {
|
||||
customer: Some(stripe_customer_id),
|
||||
// Sync all non-canceled subscriptions.
|
||||
status: None,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
for subscription in subscriptions.data {
|
||||
let subscription_id = subscription.id.clone();
|
||||
|
||||
sync_subscription(&app, &stripe_client, subscription)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"failed to sync subscription {subscription_id} for user {}",
|
||||
user.id,
|
||||
)
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok(Json(SyncBillingSubscriptionResponse {
|
||||
stripe_customer_id: billing_customer.stripe_customer_id.clone(),
|
||||
}))
|
||||
}
|
||||
|
||||
/// The amount of time we wait in between each poll of Stripe events.
|
||||
///
|
||||
/// This value should strike a balance between:
|
||||
@@ -1050,19 +844,39 @@ async fn handle_customer_event(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn sync_subscription(
|
||||
async fn handle_customer_subscription_event(
|
||||
app: &Arc<AppState>,
|
||||
rpc_server: &Arc<Server>,
|
||||
stripe_client: &stripe::Client,
|
||||
subscription: stripe::Subscription,
|
||||
) -> anyhow::Result<billing_customer::Model> {
|
||||
let subscription_kind = if let Some(stripe_billing) = &app.stripe_billing {
|
||||
stripe_billing
|
||||
.determine_subscription_kind(&subscription)
|
||||
.await
|
||||
} else {
|
||||
None
|
||||
event: stripe::Event,
|
||||
) -> anyhow::Result<()> {
|
||||
let EventObject::Subscription(subscription) = event.data.object else {
|
||||
bail!("unexpected event payload for {}", event.id);
|
||||
};
|
||||
|
||||
log::info!("handling Stripe {} event: {}", event.type_, event.id);
|
||||
|
||||
let subscription_kind = maybe!({
|
||||
let zed_pro_price_id = app.config.zed_pro_price_id().ok()?;
|
||||
let zed_free_price_id = app.config.zed_free_price_id().ok()?;
|
||||
|
||||
subscription.items.data.iter().find_map(|item| {
|
||||
let price = item.price.as_ref()?;
|
||||
|
||||
if price.id == zed_pro_price_id {
|
||||
Some(if subscription.status == SubscriptionStatus::Trialing {
|
||||
SubscriptionKind::ZedProTrial
|
||||
} else {
|
||||
SubscriptionKind::ZedPro
|
||||
})
|
||||
} else if price.id == zed_free_price_id {
|
||||
Some(SubscriptionKind::ZedFree)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
let billing_customer =
|
||||
find_or_create_billing_customer(app, stripe_client, subscription.customer)
|
||||
.await?
|
||||
@@ -1112,6 +926,28 @@ async fn sync_subscription(
|
||||
.get_billing_subscription_by_stripe_subscription_id(&subscription.id)
|
||||
.await?
|
||||
{
|
||||
let llm_db = app
|
||||
.llm_db
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("LLM DB not initialized"))?;
|
||||
|
||||
let new_period_start_at =
|
||||
chrono::DateTime::from_timestamp(subscription.current_period_start, 0)
|
||||
.ok_or_else(|| anyhow!("No subscription period start"))?;
|
||||
let new_period_end_at =
|
||||
chrono::DateTime::from_timestamp(subscription.current_period_end, 0)
|
||||
.ok_or_else(|| anyhow!("No subscription period end"))?;
|
||||
|
||||
llm_db
|
||||
.transfer_existing_subscription_usage(
|
||||
billing_customer.user_id,
|
||||
&existing_subscription,
|
||||
subscription_kind,
|
||||
new_period_start_at,
|
||||
new_period_end_at,
|
||||
)
|
||||
.await?;
|
||||
|
||||
app.db
|
||||
.update_billing_subscription(
|
||||
existing_subscription.id,
|
||||
@@ -1166,7 +1002,7 @@ async fn sync_subscription(
|
||||
user_id = billing_customer.user_id,
|
||||
subscription_id = subscription.id
|
||||
);
|
||||
return Ok(billing_customer);
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
app.db
|
||||
@@ -1185,29 +1021,6 @@ async fn sync_subscription(
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(billing_customer)
|
||||
}
|
||||
|
||||
async fn handle_customer_subscription_event(
|
||||
app: &Arc<AppState>,
|
||||
rpc_server: &Arc<Server>,
|
||||
stripe_client: &stripe::Client,
|
||||
event: stripe::Event,
|
||||
) -> anyhow::Result<()> {
|
||||
let EventObject::Subscription(subscription) = event.data.object else {
|
||||
bail!("unexpected event payload for {}", event.id);
|
||||
};
|
||||
|
||||
log::info!("handling Stripe {} event: {}", event.type_, event.id);
|
||||
|
||||
let billing_customer = sync_subscription(app, stripe_client, subscription).await?;
|
||||
|
||||
// When the user's subscription changes, push down any changes to their plan.
|
||||
rpc_server
|
||||
.update_plan_for_user(billing_customer.user_id)
|
||||
.await
|
||||
.trace_err();
|
||||
|
||||
// When the user's subscription changes, we want to refresh their LLM tokens
|
||||
// to either grant/revoke access.
|
||||
rpc_server
|
||||
@@ -1278,23 +1091,9 @@ struct UsageCounts {
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct ModelRequestUsage {
|
||||
pub model: String,
|
||||
pub mode: CompletionMode,
|
||||
pub requests: i32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct CurrentUsage {
|
||||
pub model_requests: UsageCounts,
|
||||
pub model_request_usage: Vec<ModelRequestUsage>,
|
||||
pub edit_predictions: UsageCounts,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Serialize)]
|
||||
struct GetCurrentUsageResponse {
|
||||
pub plan: String,
|
||||
pub current_usage: Option<CurrentUsage>,
|
||||
pub model_requests: UsageCounts,
|
||||
pub edit_predictions: UsageCounts,
|
||||
}
|
||||
|
||||
async fn get_current_usage(
|
||||
@@ -1307,11 +1106,6 @@ async fn get_current_usage(
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("user not found"))?;
|
||||
|
||||
let feature_flags = app.db.get_user_flags(user.id).await?;
|
||||
let has_extended_trial = feature_flags
|
||||
.iter()
|
||||
.any(|flag| flag == AGENT_EXTENDED_TRIAL_FEATURE_FLAG);
|
||||
|
||||
let Some(llm_db) = app.llm_db.clone() else {
|
||||
return Err(Error::http(
|
||||
StatusCode::NOT_IMPLEMENTED,
|
||||
@@ -1319,8 +1113,21 @@ async fn get_current_usage(
|
||||
));
|
||||
};
|
||||
|
||||
let empty_usage = GetCurrentUsageResponse {
|
||||
model_requests: UsageCounts {
|
||||
used: 0,
|
||||
limit: Some(0),
|
||||
remaining: Some(0),
|
||||
},
|
||||
edit_predictions: UsageCounts {
|
||||
used: 0,
|
||||
limit: Some(0),
|
||||
remaining: Some(0),
|
||||
},
|
||||
};
|
||||
|
||||
let Some(subscription) = app.db.get_active_billing_subscription(user.id).await? else {
|
||||
return Ok(Json(GetCurrentUsageResponse::default()));
|
||||
return Ok(Json(empty_usage));
|
||||
};
|
||||
|
||||
let subscription_period = maybe!({
|
||||
@@ -1331,93 +1138,42 @@ async fn get_current_usage(
|
||||
});
|
||||
|
||||
let Some((period_start_at, period_end_at)) = subscription_period else {
|
||||
return Ok(Json(GetCurrentUsageResponse::default()));
|
||||
return Ok(Json(empty_usage));
|
||||
};
|
||||
|
||||
let usage = llm_db
|
||||
.get_subscription_usage_for_period(user.id, period_start_at, period_end_at)
|
||||
.await?;
|
||||
|
||||
let plan = usage
|
||||
.as_ref()
|
||||
.map(|usage| usage.plan.into())
|
||||
.unwrap_or_else(|| {
|
||||
subscription
|
||||
.kind
|
||||
.map(Into::into)
|
||||
.unwrap_or(zed_llm_client::Plan::ZedFree)
|
||||
});
|
||||
|
||||
let model_requests_limit = match plan.model_requests_limit() {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
let limit = if plan == zed_llm_client::Plan::ZedProTrial && has_extended_trial {
|
||||
1_000
|
||||
} else {
|
||||
limit
|
||||
};
|
||||
|
||||
Some(limit)
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => None,
|
||||
let Some(usage) = usage else {
|
||||
return Ok(Json(empty_usage));
|
||||
};
|
||||
|
||||
let edit_predictions_limit = match plan.edit_predictions_limit() {
|
||||
let plan = match usage.plan {
|
||||
SubscriptionKind::ZedPro => zed_llm_client::Plan::ZedPro,
|
||||
SubscriptionKind::ZedProTrial => zed_llm_client::Plan::ZedProTrial,
|
||||
SubscriptionKind::ZedFree => zed_llm_client::Plan::Free,
|
||||
};
|
||||
|
||||
let model_requests_limit = match plan.model_requests_limit() {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => Some(limit),
|
||||
zed_llm_client::UsageLimit::Unlimited => None,
|
||||
};
|
||||
let edit_prediction_limit = match plan.edit_predictions_limit() {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => Some(limit),
|
||||
zed_llm_client::UsageLimit::Unlimited => None,
|
||||
};
|
||||
|
||||
let Some(usage) = usage else {
|
||||
return Ok(Json(GetCurrentUsageResponse {
|
||||
plan: plan.as_str().to_string(),
|
||||
current_usage: Some(CurrentUsage {
|
||||
model_requests: UsageCounts {
|
||||
used: 0,
|
||||
limit: model_requests_limit,
|
||||
remaining: model_requests_limit,
|
||||
},
|
||||
model_request_usage: Vec::new(),
|
||||
edit_predictions: UsageCounts {
|
||||
used: 0,
|
||||
limit: edit_predictions_limit,
|
||||
remaining: edit_predictions_limit,
|
||||
},
|
||||
}),
|
||||
}));
|
||||
};
|
||||
|
||||
let subscription_usage_meters = llm_db
|
||||
.get_current_subscription_usage_meters_for_user(user.id, Utc::now())
|
||||
.await?;
|
||||
|
||||
let model_request_usage = subscription_usage_meters
|
||||
.into_iter()
|
||||
.filter_map(|(usage_meter, _usage)| {
|
||||
let model = llm_db.model_by_id(usage_meter.model_id).ok()?;
|
||||
|
||||
Some(ModelRequestUsage {
|
||||
model: model.name.clone(),
|
||||
mode: usage_meter.mode,
|
||||
requests: usage_meter.requests,
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(Json(GetCurrentUsageResponse {
|
||||
plan: plan.as_str().to_string(),
|
||||
current_usage: Some(CurrentUsage {
|
||||
model_requests: UsageCounts {
|
||||
used: usage.model_requests,
|
||||
limit: model_requests_limit,
|
||||
remaining: model_requests_limit.map(|limit| (limit - usage.model_requests).max(0)),
|
||||
},
|
||||
model_request_usage,
|
||||
edit_predictions: UsageCounts {
|
||||
used: usage.edit_predictions,
|
||||
limit: edit_predictions_limit,
|
||||
remaining: edit_predictions_limit
|
||||
.map(|limit| (limit - usage.edit_predictions).max(0)),
|
||||
},
|
||||
}),
|
||||
model_requests: UsageCounts {
|
||||
used: usage.model_requests,
|
||||
limit: model_requests_limit,
|
||||
remaining: model_requests_limit.map(|limit| (limit - usage.model_requests).max(0)),
|
||||
},
|
||||
edit_predictions: UsageCounts {
|
||||
used: usage.edit_predictions,
|
||||
limit: edit_prediction_limit,
|
||||
remaining: edit_prediction_limit.map(|limit| (limit - usage.edit_predictions).max(0)),
|
||||
},
|
||||
}))
|
||||
}
|
||||
|
||||
@@ -1493,6 +1249,81 @@ async fn find_or_create_billing_customer(
|
||||
Ok(Some(billing_customer))
|
||||
}
|
||||
|
||||
const SYNC_LLM_TOKEN_USAGE_WITH_STRIPE_INTERVAL: Duration = Duration::from_secs(60);
|
||||
|
||||
pub fn sync_llm_token_usage_with_stripe_periodically(app: Arc<AppState>) {
|
||||
let Some(stripe_billing) = app.stripe_billing.clone() else {
|
||||
log::warn!("failed to retrieve Stripe billing object");
|
||||
return;
|
||||
};
|
||||
let Some(llm_db) = app.llm_db.clone() else {
|
||||
log::warn!("failed to retrieve LLM database");
|
||||
return;
|
||||
};
|
||||
|
||||
let executor = app.executor.clone();
|
||||
executor.spawn_detached({
|
||||
let executor = executor.clone();
|
||||
async move {
|
||||
loop {
|
||||
sync_token_usage_with_stripe(&app, &llm_db, &stripe_billing)
|
||||
.await
|
||||
.context("failed to sync LLM usage to Stripe")
|
||||
.trace_err();
|
||||
executor
|
||||
.sleep(SYNC_LLM_TOKEN_USAGE_WITH_STRIPE_INTERVAL)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
async fn sync_token_usage_with_stripe(
|
||||
app: &Arc<AppState>,
|
||||
llm_db: &Arc<LlmDatabase>,
|
||||
stripe_billing: &Arc<StripeBilling>,
|
||||
) -> anyhow::Result<()> {
|
||||
let events = llm_db.get_billing_events().await?;
|
||||
let user_ids = events
|
||||
.iter()
|
||||
.map(|(event, _)| event.user_id)
|
||||
.collect::<HashSet<UserId>>();
|
||||
let stripe_subscriptions = app.db.get_active_billing_subscriptions(user_ids).await?;
|
||||
|
||||
for (event, model) in events {
|
||||
let Some((stripe_db_customer, stripe_db_subscription)) =
|
||||
stripe_subscriptions.get(&event.user_id)
|
||||
else {
|
||||
tracing::warn!(
|
||||
user_id = event.user_id.0,
|
||||
"Registered billing event for user who is not a Stripe customer. Billing events should only be created for users who are Stripe customers, so this is a mistake on our side."
|
||||
);
|
||||
continue;
|
||||
};
|
||||
let stripe_subscription_id: stripe::SubscriptionId = stripe_db_subscription
|
||||
.stripe_subscription_id
|
||||
.parse()
|
||||
.context("failed to parse stripe subscription id from db")?;
|
||||
let stripe_customer_id: stripe::CustomerId = stripe_db_customer
|
||||
.stripe_customer_id
|
||||
.parse()
|
||||
.context("failed to parse stripe customer id from db")?;
|
||||
|
||||
let stripe_model = stripe_billing
|
||||
.register_model_for_token_based_usage(&model)
|
||||
.await?;
|
||||
stripe_billing
|
||||
.subscribe_to_model(&stripe_subscription_id, &stripe_model)
|
||||
.await?;
|
||||
stripe_billing
|
||||
.bill_model_token_usage(&stripe_customer_id, &stripe_model, &event)
|
||||
.await?;
|
||||
llm_db.consume_billing_event(event.id).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
const SYNC_LLM_REQUEST_USAGE_WITH_STRIPE_INTERVAL: Duration = Duration::from_secs(60);
|
||||
|
||||
pub fn sync_llm_request_usage_with_stripe_periodically(app: Arc<AppState>) {
|
||||
@@ -1527,19 +1358,9 @@ async fn sync_model_request_usage_with_stripe(
|
||||
llm_db: &Arc<LlmDatabase>,
|
||||
stripe_billing: &Arc<StripeBilling>,
|
||||
) -> anyhow::Result<()> {
|
||||
let staff_users = app.db.get_staff_users().await?;
|
||||
let staff_user_ids = staff_users
|
||||
.iter()
|
||||
.map(|user| user.id)
|
||||
.collect::<HashSet<UserId>>();
|
||||
|
||||
let usage_meters = llm_db
|
||||
.get_current_subscription_usage_meters(Utc::now())
|
||||
.await?;
|
||||
let usage_meters = usage_meters
|
||||
.into_iter()
|
||||
.filter(|(_, usage)| !staff_user_ids.contains(&usage.user_id))
|
||||
.collect::<Vec<_>>();
|
||||
let user_ids = usage_meters
|
||||
.iter()
|
||||
.map(|(_, usage)| usage.user_id)
|
||||
@@ -1581,12 +1402,12 @@ async fn sync_model_request_usage_with_stripe(
|
||||
|
||||
let model = llm_db.model_by_id(usage_meter.model_id)?;
|
||||
|
||||
let (price, meter_event_name) = match model.name.as_str() {
|
||||
"claude-3-5-sonnet" => (&claude_3_5_sonnet, "claude_3_5_sonnet/requests"),
|
||||
let (price_id, meter_event_name) = match model.name.as_str() {
|
||||
"claude-3-5-sonnet" => (&claude_3_5_sonnet.id, "claude_3_5_sonnet/requests"),
|
||||
"claude-3-7-sonnet" => match usage_meter.mode {
|
||||
CompletionMode::Normal => (&claude_3_7_sonnet, "claude_3_7_sonnet/requests"),
|
||||
CompletionMode::Normal => (&claude_3_7_sonnet.id, "claude_3_7_sonnet/requests"),
|
||||
CompletionMode::Max => {
|
||||
(&claude_3_7_sonnet_max, "claude_3_7_sonnet/requests/max")
|
||||
(&claude_3_7_sonnet_max.id, "claude_3_7_sonnet/requests/max")
|
||||
}
|
||||
},
|
||||
model_name => {
|
||||
@@ -1595,7 +1416,7 @@ async fn sync_model_request_usage_with_stripe(
|
||||
};
|
||||
|
||||
stripe_billing
|
||||
.subscribe_to_price(&stripe_subscription_id, price)
|
||||
.subscribe_to_price(&stripe_subscription_id, price_id)
|
||||
.await?;
|
||||
stripe_billing
|
||||
.bill_model_request_usage(
|
||||
|
||||
@@ -543,7 +543,7 @@ pub struct MembershipUpdated {
|
||||
|
||||
/// The result of setting a member's role.
|
||||
#[derive(Debug)]
|
||||
|
||||
#[allow(clippy::large_enum_variant)]
|
||||
pub enum SetMemberRoleResult {
|
||||
InviteUpdated(Channel),
|
||||
MembershipUpdated(MembershipUpdated),
|
||||
|
||||
@@ -65,18 +65,6 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
/// Returns all users flagged as staff.
|
||||
pub async fn get_staff_users(&self) -> Result<Vec<user::Model>> {
|
||||
self.transaction(|tx| async {
|
||||
let tx = tx;
|
||||
Ok(user::Entity::find()
|
||||
.filter(user::Column::Admin.eq(true))
|
||||
.all(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Returns a user by email address. There are no access checks here, so this should only be used internally.
|
||||
pub async fn get_user_by_email(&self, email: &str) -> Result<Option<User>> {
|
||||
self.transaction(|tx| async move {
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use crate::db::{BillingCustomerId, BillingSubscriptionId};
|
||||
use chrono::{Datelike as _, NaiveDate, Utc};
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::Serialize;
|
||||
|
||||
@@ -30,38 +29,6 @@ impl Model {
|
||||
let period_end = self.stripe_current_period_end?;
|
||||
chrono::DateTime::from_timestamp(period_end, 0)
|
||||
}
|
||||
|
||||
pub fn current_period(
|
||||
subscription: Option<Self>,
|
||||
is_staff: bool,
|
||||
) -> Option<(DateTimeUtc, DateTimeUtc)> {
|
||||
if is_staff {
|
||||
let now = Utc::now();
|
||||
let year = now.year();
|
||||
let month = now.month();
|
||||
|
||||
let first_day_of_this_month =
|
||||
NaiveDate::from_ymd_opt(year, month, 1)?.and_hms_opt(0, 0, 0)?;
|
||||
|
||||
let next_month = if month == 12 { 1 } else { month + 1 };
|
||||
let next_month_year = if month == 12 { year + 1 } else { year };
|
||||
let first_day_of_next_month =
|
||||
NaiveDate::from_ymd_opt(next_month_year, next_month, 1)?.and_hms_opt(23, 59, 59)?;
|
||||
|
||||
let last_day_of_this_month = first_day_of_next_month - chrono::Days::new(1);
|
||||
|
||||
Some((
|
||||
first_day_of_this_month.and_utc(),
|
||||
last_day_of_this_month.and_utc(),
|
||||
))
|
||||
} else {
|
||||
let subscription = subscription?;
|
||||
let period_start_at = subscription.current_period_start_at()?;
|
||||
let period_end_at = subscription.current_period_end_at()?;
|
||||
|
||||
Some((period_start_at, period_end_at))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
@@ -94,16 +61,6 @@ pub enum SubscriptionKind {
|
||||
ZedFree,
|
||||
}
|
||||
|
||||
impl From<SubscriptionKind> for zed_llm_client::Plan {
|
||||
fn from(value: SubscriptionKind) -> Self {
|
||||
match value {
|
||||
SubscriptionKind::ZedPro => Self::ZedPro,
|
||||
SubscriptionKind::ZedProTrial => Self::ZedProTrial,
|
||||
SubscriptionKind::ZedFree => Self::ZedFree,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// The status of a Stripe subscription.
|
||||
///
|
||||
/// [Stripe docs](https://docs.stripe.com/api/subscriptions/object#subscription_object-status)
|
||||
|
||||
@@ -180,6 +180,9 @@ pub struct Config {
|
||||
pub slack_panics_webhook: Option<String>,
|
||||
pub auto_join_channel_id: Option<ChannelId>,
|
||||
pub stripe_api_key: Option<String>,
|
||||
pub stripe_zed_pro_price_id: Option<String>,
|
||||
pub stripe_zed_pro_trial_price_id: Option<String>,
|
||||
pub stripe_zed_free_price_id: Option<String>,
|
||||
pub supermaven_admin_api_key: Option<Arc<str>>,
|
||||
pub user_backfiller_github_access_token: Option<Arc<str>>,
|
||||
}
|
||||
@@ -198,6 +201,22 @@ impl Config {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn zed_pro_price_id(&self) -> anyhow::Result<stripe::PriceId> {
|
||||
Self::parse_stripe_price_id("Zed Pro", self.stripe_zed_pro_price_id.as_deref())
|
||||
}
|
||||
|
||||
pub fn zed_free_price_id(&self) -> anyhow::Result<stripe::PriceId> {
|
||||
Self::parse_stripe_price_id("Zed Free", self.stripe_zed_pro_price_id.as_deref())
|
||||
}
|
||||
|
||||
fn parse_stripe_price_id(name: &str, value: Option<&str>) -> anyhow::Result<stripe::PriceId> {
|
||||
use std::str::FromStr as _;
|
||||
|
||||
let price_id = value.ok_or_else(|| anyhow!("{name} price ID not set"))?;
|
||||
|
||||
Ok(stripe::PriceId::from_str(price_id)?)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn test() -> Self {
|
||||
Self {
|
||||
@@ -235,6 +254,9 @@ impl Config {
|
||||
migrations_path: None,
|
||||
seed_path: None,
|
||||
stripe_api_key: None,
|
||||
stripe_zed_pro_price_id: None,
|
||||
stripe_zed_pro_trial_price_id: None,
|
||||
stripe_zed_free_price_id: None,
|
||||
supermaven_admin_api_key: None,
|
||||
user_backfiller_github_access_token: None,
|
||||
kinesis_region: None,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use super::*;
|
||||
|
||||
pub mod billing_events;
|
||||
pub mod providers;
|
||||
pub mod subscription_usage_meters;
|
||||
pub mod subscription_usages;
|
||||
|
||||
31
crates/collab/src/llm/db/queries/billing_events.rs
Normal file
31
crates/collab/src/llm/db/queries/billing_events.rs
Normal file
@@ -0,0 +1,31 @@
|
||||
use super::*;
|
||||
use crate::Result;
|
||||
use anyhow::Context as _;
|
||||
|
||||
impl LlmDatabase {
|
||||
pub async fn get_billing_events(&self) -> Result<Vec<(billing_event::Model, model::Model)>> {
|
||||
self.transaction(|tx| async move {
|
||||
let events_with_models = billing_event::Entity::find()
|
||||
.find_also_related(model::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
events_with_models
|
||||
.into_iter()
|
||||
.map(|(event, model)| {
|
||||
let model =
|
||||
model.context("could not find model associated with billing event")?;
|
||||
Ok((event, model))
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn consume_billing_event(&self, id: BillingEventId) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
billing_event::Entity::delete_by_id(id).exec(&*tx).await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,3 @@
|
||||
use crate::db::UserId;
|
||||
use crate::llm::db::queries::subscription_usages::convert_chrono_to_time;
|
||||
|
||||
use super::*;
|
||||
@@ -35,38 +34,4 @@ impl LlmDatabase {
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Returns all current subscription usage meters for the given user as of the given timestamp.
|
||||
pub async fn get_current_subscription_usage_meters_for_user(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
now: DateTimeUtc,
|
||||
) -> Result<Vec<(subscription_usage_meter::Model, subscription_usage::Model)>> {
|
||||
let now = convert_chrono_to_time(now)?;
|
||||
|
||||
self.transaction(|tx| async move {
|
||||
let result = subscription_usage_meter::Entity::find()
|
||||
.inner_join(subscription_usage::Entity)
|
||||
.filter(subscription_usage::Column::UserId.eq(user_id))
|
||||
.filter(
|
||||
subscription_usage::Column::PeriodStartAt
|
||||
.lte(now)
|
||||
.and(subscription_usage::Column::PeriodEndAt.gte(now)),
|
||||
)
|
||||
.select_also(subscription_usage::Entity)
|
||||
.all(&*tx)
|
||||
.await?;
|
||||
|
||||
let result = result
|
||||
.into_iter()
|
||||
.filter_map(|(meter, usage)| {
|
||||
let usage = usage?;
|
||||
Some((meter, usage))
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(result)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use chrono::Timelike;
|
||||
use time::PrimitiveDateTime;
|
||||
|
||||
use crate::db::UserId;
|
||||
use crate::db::billing_subscription::SubscriptionKind;
|
||||
use crate::db::{UserId, billing_subscription};
|
||||
|
||||
use super::*;
|
||||
|
||||
@@ -24,6 +26,62 @@ pub fn convert_chrono_to_time(datetime: DateTimeUtc) -> anyhow::Result<Primitive
|
||||
}
|
||||
|
||||
impl LlmDatabase {
|
||||
pub async fn create_subscription_usage(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
period_start_at: DateTimeUtc,
|
||||
period_end_at: DateTimeUtc,
|
||||
plan: SubscriptionKind,
|
||||
model_requests: i32,
|
||||
edit_predictions: i32,
|
||||
) -> Result<subscription_usage::Model> {
|
||||
self.transaction(|tx| async move {
|
||||
self.create_subscription_usage_in_tx(
|
||||
user_id,
|
||||
period_start_at,
|
||||
period_end_at,
|
||||
plan,
|
||||
model_requests,
|
||||
edit_predictions,
|
||||
&tx,
|
||||
)
|
||||
.await
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn create_subscription_usage_in_tx(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
period_start_at: DateTimeUtc,
|
||||
period_end_at: DateTimeUtc,
|
||||
plan: SubscriptionKind,
|
||||
model_requests: i32,
|
||||
edit_predictions: i32,
|
||||
tx: &DatabaseTransaction,
|
||||
) -> Result<subscription_usage::Model> {
|
||||
// Clear out the nanoseconds so that these timestamps are comparable with Unix timestamps.
|
||||
let period_start_at = period_start_at.with_nanosecond(0).unwrap();
|
||||
let period_end_at = period_end_at.with_nanosecond(0).unwrap();
|
||||
|
||||
let period_start_at = convert_chrono_to_time(period_start_at)?;
|
||||
let period_end_at = convert_chrono_to_time(period_end_at)?;
|
||||
|
||||
Ok(
|
||||
subscription_usage::Entity::insert(subscription_usage::ActiveModel {
|
||||
id: ActiveValue::not_set(),
|
||||
user_id: ActiveValue::set(user_id),
|
||||
period_start_at: ActiveValue::set(period_start_at),
|
||||
period_end_at: ActiveValue::set(period_end_at),
|
||||
plan: ActiveValue::set(plan),
|
||||
model_requests: ActiveValue::set(model_requests),
|
||||
edit_predictions: ActiveValue::set(edit_predictions),
|
||||
})
|
||||
.exec_with_returning(tx)
|
||||
.await?,
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn get_subscription_usage_for_period(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
@@ -56,4 +114,53 @@ impl LlmDatabase {
|
||||
.one(tx)
|
||||
.await?)
|
||||
}
|
||||
|
||||
pub async fn transfer_existing_subscription_usage(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
existing_subscription: &billing_subscription::Model,
|
||||
new_subscription_kind: Option<SubscriptionKind>,
|
||||
new_period_start_at: DateTimeUtc,
|
||||
new_period_end_at: DateTimeUtc,
|
||||
) -> Result<Option<subscription_usage::Model>> {
|
||||
self.transaction(|tx| async move {
|
||||
match existing_subscription.kind {
|
||||
Some(SubscriptionKind::ZedProTrial) => {
|
||||
let trial_period_start_at = existing_subscription
|
||||
.current_period_start_at()
|
||||
.ok_or_else(|| anyhow!("No trial subscription period start"))?;
|
||||
let trial_period_end_at = existing_subscription
|
||||
.current_period_end_at()
|
||||
.ok_or_else(|| anyhow!("No trial subscription period end"))?;
|
||||
|
||||
let existing_usage = self
|
||||
.get_subscription_usage_for_period_in_tx(
|
||||
user_id,
|
||||
trial_period_start_at,
|
||||
trial_period_end_at,
|
||||
&tx,
|
||||
)
|
||||
.await?;
|
||||
if let Some(existing_usage) = existing_usage {
|
||||
return Ok(Some(
|
||||
self.create_subscription_usage_in_tx(
|
||||
user_id,
|
||||
new_period_start_at,
|
||||
new_period_end_at,
|
||||
new_subscription_kind.unwrap_or(existing_usage.plan),
|
||||
existing_usage.model_requests,
|
||||
existing_usage.edit_predictions,
|
||||
&tx,
|
||||
)
|
||||
.await?,
|
||||
));
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
pub mod billing_event;
|
||||
pub mod model;
|
||||
pub mod monthly_usage;
|
||||
pub mod provider;
|
||||
|
||||
37
crates/collab/src/llm/db/tables/billing_event.rs
Normal file
37
crates/collab/src/llm/db/tables/billing_event.rs
Normal file
@@ -0,0 +1,37 @@
|
||||
use crate::{
|
||||
db::UserId,
|
||||
llm::db::{BillingEventId, ModelId},
|
||||
};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "billing_events")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: BillingEventId,
|
||||
pub idempotency_key: Uuid,
|
||||
pub user_id: UserId,
|
||||
pub model_id: ModelId,
|
||||
pub input_tokens: i64,
|
||||
pub input_cache_creation_tokens: i64,
|
||||
pub input_cache_read_tokens: i64,
|
||||
pub output_tokens: i64,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::model::Entity",
|
||||
from = "Column::ModelId",
|
||||
to = "super::model::Column::Id"
|
||||
)]
|
||||
Model,
|
||||
}
|
||||
|
||||
impl Related<super::model::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Model.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
@@ -31,6 +31,8 @@ pub enum Relation {
|
||||
Provider,
|
||||
#[sea_orm(has_many = "super::usage::Entity")]
|
||||
Usages,
|
||||
#[sea_orm(has_many = "super::billing_event::Entity")]
|
||||
BillingEvents,
|
||||
}
|
||||
|
||||
impl Related<super::provider::Entity> for Entity {
|
||||
@@ -45,4 +47,10 @@ impl Related<super::usage::Entity> for Entity {
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::billing_event::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::BillingEvents.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user