Compare commits
339 Commits
path-based
...
hide-curso
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
72688a7420 | ||
|
|
45764bc943 | ||
|
|
12d6162bd8 | ||
|
|
f4c707b01a | ||
|
|
3537d8384e | ||
|
|
de67d93a92 | ||
|
|
a02f7e5cda | ||
|
|
d70ac64fe4 | ||
|
|
df583d73b9 | ||
|
|
e091c5faaf | ||
|
|
931a6d6f40 | ||
|
|
a605b66ce1 | ||
|
|
7376c6f377 | ||
|
|
ee08776f34 | ||
|
|
30f7e896cf | ||
|
|
8c8f50d916 | ||
|
|
76192ea93c | ||
|
|
fb2586a553 | ||
|
|
28e0bb5a12 | ||
|
|
a65ea2708c | ||
|
|
1574a3a2fd | ||
|
|
152432f1d9 | ||
|
|
5953c6167b | ||
|
|
aab02a4166 | ||
|
|
9fc570c4be | ||
|
|
581d67398a | ||
|
|
4a30b960d4 | ||
|
|
503bf607c5 | ||
|
|
46e86f003f | ||
|
|
0339d654d2 | ||
|
|
24d76a64c3 | ||
|
|
3ba624391f | ||
|
|
31e3c13ea9 | ||
|
|
0fec04a1b7 | ||
|
|
bf255486c0 | ||
|
|
cd1e56d6c7 | ||
|
|
2fe2028e20 | ||
|
|
f9212a001e | ||
|
|
954a56ce0c | ||
|
|
275cecb262 | ||
|
|
6b7167a32d | ||
|
|
430814c0a9 | ||
|
|
5aba5e1b3d | ||
|
|
408e157d0f | ||
|
|
2414855121 | ||
|
|
e7f7ed349b | ||
|
|
e273de5490 | ||
|
|
7046b9641d | ||
|
|
9b883e02a6 | ||
|
|
ffee218070 | ||
|
|
d9dcc59334 | ||
|
|
8f1023360d | ||
|
|
9468b9699e | ||
|
|
f1a9fdddab | ||
|
|
d40f8889b9 | ||
|
|
6715c8582f | ||
|
|
baf03e355b | ||
|
|
35ec4753b4 | ||
|
|
b85492bd00 | ||
|
|
fca7ce9a14 | ||
|
|
42f01cc903 | ||
|
|
ffa736e566 | ||
|
|
e9e6529df4 | ||
|
|
3205ae3884 | ||
|
|
03102b4d7e | ||
|
|
c32dece1b8 | ||
|
|
bcfc9e4437 | ||
|
|
a52095f558 | ||
|
|
be83c5e1c5 | ||
|
|
46d67a33c7 | ||
|
|
10c04afc81 | ||
|
|
920eda07a5 | ||
|
|
a469c0e261 | ||
|
|
5d05c4aa70 | ||
|
|
5465198d0d | ||
|
|
bbc7fcc54f | ||
|
|
11552cc0bd | ||
|
|
699369995b | ||
|
|
4a5f89aded | ||
|
|
e661a0afd6 | ||
|
|
57ffd6d78d | ||
|
|
350c1e41d2 | ||
|
|
a23e293ca2 | ||
|
|
f2be201495 | ||
|
|
43712285bf | ||
|
|
f38ee81e83 | ||
|
|
ddf8d07f02 | ||
|
|
7db9077835 | ||
|
|
4e33aaa55c | ||
|
|
d253d46fdf | ||
|
|
07727f939e | ||
|
|
08e8109e79 | ||
|
|
f19e1e3b5f | ||
|
|
8294b9f674 | ||
|
|
352c71f8a6 | ||
|
|
9f0b09007b | ||
|
|
f4d1e7901c | ||
|
|
044eb7b990 | ||
|
|
d96a50b029 | ||
|
|
b5e5959339 | ||
|
|
9918b6cade | ||
|
|
fa677bdc38 | ||
|
|
d82b547596 | ||
|
|
4c86cda909 | ||
|
|
90649fbc89 | ||
|
|
c783fd072f | ||
|
|
85a761cb2b | ||
|
|
739f45eb23 | ||
|
|
16ad7424d6 | ||
|
|
b32c792b68 | ||
|
|
1db621dc50 | ||
|
|
6143da95fc | ||
|
|
7ced1b7a90 | ||
|
|
0e9e2d70cd | ||
|
|
a52e2f9553 | ||
|
|
a551a6139c | ||
|
|
c394a3a890 | ||
|
|
1cca2e37b0 | ||
|
|
0de5c2ed53 | ||
|
|
6397872c49 | ||
|
|
93bd32b425 | ||
|
|
f119550838 | ||
|
|
4e93e38b0a | ||
|
|
05aa8880a4 | ||
|
|
6bced3a834 | ||
|
|
e14ebcf267 | ||
|
|
9d965bc98a | ||
|
|
579868110b | ||
|
|
a709d4c7c6 | ||
|
|
2ffce4f516 | ||
|
|
33fc1f4af2 | ||
|
|
7ade7d8e45 | ||
|
|
8f86cd758a | ||
|
|
962709f42c | ||
|
|
cf7d639fbc | ||
|
|
9134630841 | ||
|
|
bc1c0a2297 | ||
|
|
700af63c45 | ||
|
|
4b5df2189b | ||
|
|
48b1a43f5e | ||
|
|
9609e04bb2 | ||
|
|
a74f2bb18b | ||
|
|
ac452799b0 | ||
|
|
7b80cd865d | ||
|
|
7931b1d345 | ||
|
|
27ebedf517 | ||
|
|
f9f5126d2c | ||
|
|
6408ae81d1 | ||
|
|
c60a7034c8 | ||
|
|
7feb50fafe | ||
|
|
f365b80814 | ||
|
|
d0641a38a4 | ||
|
|
2e8c0ff244 | ||
|
|
4421bdd12e | ||
|
|
aa2fe9cce1 | ||
|
|
e3578fc44a | ||
|
|
aae81fd54c | ||
|
|
de99febd9b | ||
|
|
5bef32f3ed | ||
|
|
23e8519057 | ||
|
|
1180b6fbc7 | ||
|
|
14920ab910 | ||
|
|
000b981cb4 | ||
|
|
c9bff6e762 | ||
|
|
9fd2d064ee | ||
|
|
11425cf5f1 | ||
|
|
b54c92079f | ||
|
|
3bbdc546ec | ||
|
|
e4e3ce6a38 | ||
|
|
8cd96cbf59 | ||
|
|
274124256d | ||
|
|
1cf252f8eb | ||
|
|
e46c72f4a8 | ||
|
|
63f656faae | ||
|
|
31b8c36479 | ||
|
|
dfdca540ec | ||
|
|
14c036931d | ||
|
|
5387ae9ed8 | ||
|
|
c30fb5f1ec | ||
|
|
f7e2b7b679 | ||
|
|
b3bf3e2d53 | ||
|
|
1cc59b317c | ||
|
|
efd3f8a8f1 | ||
|
|
930dba4a7f | ||
|
|
7cfd919523 | ||
|
|
edd1b48e7c | ||
|
|
3ec69a5bc0 | ||
|
|
33faa66e35 | ||
|
|
68262fe7e4 | ||
|
|
2491426be7 | ||
|
|
4487dc1064 | ||
|
|
e03edc2a76 | ||
|
|
d722067000 | ||
|
|
d51cd15e4d | ||
|
|
ef14bc8e76 | ||
|
|
9fe243efa5 | ||
|
|
74a39c7263 | ||
|
|
5f398071b2 | ||
|
|
410a942d57 | ||
|
|
06ffdc6791 | ||
|
|
394215599a | ||
|
|
e8a40085de | ||
|
|
6303751325 | ||
|
|
3edf930007 | ||
|
|
584a70ca5e | ||
|
|
2230f3b09d | ||
|
|
84a8d48178 | ||
|
|
ac5dafc6b2 | ||
|
|
23686aa394 | ||
|
|
3874d315ec | ||
|
|
1d33bfde37 | ||
|
|
9377ef9817 | ||
|
|
c3b5046347 | ||
|
|
44fff08ed6 | ||
|
|
d4daa0a3a2 | ||
|
|
81582cd7f3 | ||
|
|
0f5a3afe94 | ||
|
|
382f9f6151 | ||
|
|
15d2420031 | ||
|
|
026c7274d9 | ||
|
|
1aefa5178b | ||
|
|
7f2e3fb5bd | ||
|
|
68a572873b | ||
|
|
c042a02cf4 | ||
|
|
73ac3d9a99 | ||
|
|
2269f996f7 | ||
|
|
e9033a75ac | ||
|
|
a2ae6a1c77 | ||
|
|
985ac4e5f2 | ||
|
|
89ae4ca9a3 | ||
|
|
1d4afe6daa | ||
|
|
777c88bcea | ||
|
|
959a024861 | ||
|
|
ed510b5e93 | ||
|
|
674c572a28 | ||
|
|
4a39fc2644 | ||
|
|
48fe134408 | ||
|
|
22b8662275 | ||
|
|
cc36cd9768 | ||
|
|
628a61d929 | ||
|
|
7f23875c5e | ||
|
|
e7bba1c252 | ||
|
|
41a60ffecf | ||
|
|
ed4e654fdf | ||
|
|
baaafddbeb | ||
|
|
b70f21c08d | ||
|
|
5615be51cc | ||
|
|
06e9f0e309 | ||
|
|
41a2be7e54 | ||
|
|
e38ae423f1 | ||
|
|
68bb3bd5eb | ||
|
|
122e73f152 | ||
|
|
4b775505f5 | ||
|
|
a9f7c0549c | ||
|
|
ac617e278e | ||
|
|
26f4b2a491 | ||
|
|
fdcacb3849 | ||
|
|
f61d3d28e0 | ||
|
|
a5621662b2 | ||
|
|
b6198ad516 | ||
|
|
5210d9e8b4 | ||
|
|
1139904ef5 | ||
|
|
b4ef3791bb | ||
|
|
88907eeb38 | ||
|
|
cd5d7e82d0 | ||
|
|
0851842d2c | ||
|
|
1397e01735 | ||
|
|
2b2b9c1624 | ||
|
|
a05066cd83 | ||
|
|
cb439e672d | ||
|
|
6b0a282c9c | ||
|
|
25772b8777 | ||
|
|
94b63808e0 | ||
|
|
798af67dc1 | ||
|
|
db1d2defa5 | ||
|
|
430bd83e4d | ||
|
|
dbe5399fc4 | ||
|
|
aba242d576 | ||
|
|
ddc210abfc | ||
|
|
65994c0576 | ||
|
|
011f823f33 | ||
|
|
3d1ae68f83 | ||
|
|
1f62274a89 | ||
|
|
c2f62d261b | ||
|
|
7d433a30ec | ||
|
|
52567f4b72 | ||
|
|
a0ee84d3ac | ||
|
|
6cac0b33dc | ||
|
|
45606abfdb | ||
|
|
8ba6ce43ac | ||
|
|
040d42fc24 | ||
|
|
22d905dc03 | ||
|
|
bf735da3f2 | ||
|
|
210d8d5530 | ||
|
|
a0f995d2ae | ||
|
|
8f560daec2 | ||
|
|
d5bb12631a | ||
|
|
8a31dcaeb0 | ||
|
|
ef91e7afae | ||
|
|
c220fb387d | ||
|
|
adbde210fd | ||
|
|
b81a1ad91d | ||
|
|
5f390f1bf8 | ||
|
|
c282acbe65 | ||
|
|
021d6584cc | ||
|
|
b547cd1c70 | ||
|
|
8f841d1ab7 | ||
|
|
4b153e7f7f | ||
|
|
b61171f152 | ||
|
|
0b492c11de | ||
|
|
265caed15e | ||
|
|
148131786f | ||
|
|
7c1405db37 | ||
|
|
96b747e31d | ||
|
|
7a888de9f5 | ||
|
|
e9b4fa1465 | ||
|
|
ead60d1857 | ||
|
|
768dfc8b6b | ||
|
|
f2f9c786da | ||
|
|
e5d2678d94 | ||
|
|
3ad9074e63 | ||
|
|
f40b22c02a | ||
|
|
8490d0d4ef | ||
|
|
afd0da97b9 | ||
|
|
1bf1c7223f | ||
|
|
ba8b9ec2c7 | ||
|
|
685536c27e | ||
|
|
ae017c3f96 | ||
|
|
f587e95a7e | ||
|
|
83dfdb0cfe | ||
|
|
566c5f91a7 | ||
|
|
21057e3af7 | ||
|
|
f68a475eca | ||
|
|
c62210b178 | ||
|
|
ad14dcc57b | ||
|
|
b9432dbe42 | ||
|
|
41c373eff1 | ||
|
|
6a95ec6a64 | ||
|
|
8d7b021f92 |
@@ -19,6 +19,10 @@
|
||||
# https://github.com/zed-industries/zed/pull/2394
|
||||
eca93c124a488b4e538946cd2d313bd571aa2b86
|
||||
|
||||
# 2024-02-15 Format YAML files
|
||||
# https://github.com/zed-industries/zed/pull/7887
|
||||
a161a7d0c95ca7505bf9218bfae640ee5444c88b
|
||||
|
||||
# 2024-02-25 Format JSON files in assets/
|
||||
# https://github.com/zed-industries/zed/pull/8405
|
||||
ffdda588b41f7d9d270ffe76cab116f828ad545e
|
||||
|
||||
2
.github/actions/run_tests/action.yml
vendored
@@ -10,7 +10,7 @@ runs:
|
||||
cargo install cargo-nextest --locked
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
|
||||
2
.github/actions/run_tests_windows/action.yml
vendored
@@ -16,7 +16,7 @@ runs:
|
||||
run: cargo install cargo-nextest --locked
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
|
||||
23
.github/workflows/ci.yml
vendored
@@ -235,7 +235,7 @@ jobs:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@f0deed1e0edfc6a9be95417288c0e1099b1eeec3 # v2
|
||||
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
cache-provider: "buildjet"
|
||||
@@ -287,7 +287,7 @@ jobs:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@f0deed1e0edfc6a9be95417288c0e1099b1eeec3 # v2
|
||||
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
cache-provider: "buildjet"
|
||||
@@ -334,7 +334,7 @@ jobs:
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.ZED_WORKSPACE }}" -Recurse
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@f0deed1e0edfc6a9be95417288c0e1099b1eeec3 # v2
|
||||
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
workspaces: ${{ env.ZED_WORKSPACE }}
|
||||
@@ -393,7 +393,7 @@ jobs:
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.ZED_WORKSPACE }}" -Recurse
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@f0deed1e0edfc6a9be95417288c0e1099b1eeec3 # v2
|
||||
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
workspaces: ${{ env.ZED_WORKSPACE }}
|
||||
@@ -453,7 +453,6 @@ jobs:
|
||||
[[ "${{ needs.linux_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Linux tests failed"; }
|
||||
[[ "${{ needs.windows_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows tests failed"; }
|
||||
[[ "${{ needs.windows_clippy.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows clippy failed"; }
|
||||
[[ "${{ needs.migration_checks.result }}" != 'success' ]] && { RET_CODE=1; echo "Migration checks failed"; }
|
||||
[[ "${{ needs.build_remote_server.result }}" != 'success' ]] && { RET_CODE=1; echo "Remote server build failed"; }
|
||||
fi
|
||||
if [[ "$RET_CODE" -eq 0 ]]; then
|
||||
@@ -483,7 +482,7 @@ jobs:
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
@@ -527,14 +526,14 @@ jobs:
|
||||
mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
|
||||
- name: Upload app bundle (aarch64) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
|
||||
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
|
||||
- name: Upload app bundle (x86_64) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
|
||||
@@ -587,7 +586,7 @@ jobs:
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
@@ -596,7 +595,7 @@ jobs:
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload Linux remote server to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
@@ -648,7 +647,7 @@ jobs:
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
@@ -657,7 +656,7 @@ jobs:
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload Linux remote server to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
name: "Close Stale Issues"
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 11 * * 2"
|
||||
- cron: "0 7,9,11 * * 3"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
|
||||
39
.github/workflows/community_release_actions.yml
vendored
@@ -13,11 +13,12 @@ jobs:
|
||||
id: get-release-url
|
||||
run: |
|
||||
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
|
||||
URL="https://zed.dev/releases/preview/latest"
|
||||
URL="https://zed.dev/releases/preview/latest"
|
||||
else
|
||||
URL="https://zed.dev/releases/stable/latest"
|
||||
URL="https://zed.dev/releases/stable/latest"
|
||||
fi
|
||||
echo "::set-output name=URL::$URL"
|
||||
|
||||
echo "URL=$URL" >> $GITHUB_OUTPUT
|
||||
- name: Get content
|
||||
uses: 2428392/gh-truncate-string-action@b3ff790d21cf42af3ca7579146eedb93c8fb0757 # v1.4.1
|
||||
id: get-content
|
||||
@@ -33,3 +34,35 @@ jobs:
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
content: ${{ steps.get-content.outputs.string }}
|
||||
|
||||
send_release_notes_email:
|
||||
if: github.repository_owner == 'zed-industries' && !github.event.release.prerelease
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check if release was promoted from preview
|
||||
id: check-promotion-from-preview
|
||||
run: |
|
||||
VERSION="${{ github.event.release.tag_name }}"
|
||||
PREVIEW_TAG="${VERSION}-pre"
|
||||
|
||||
if git rev-parse "$PREVIEW_TAG" > /dev/null 2>&1; then
|
||||
echo "was_promoted_from_preview=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "was_promoted_from_preview=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Send release notes email
|
||||
if: steps.check-promotion-from-preview.outputs.was_promoted_from_preview == 'true'
|
||||
run: |
|
||||
TAG="${{ github.event.release.tag_name }}"
|
||||
echo \"${{ toJSON(github.event.release.body) }}\" > release_body.txt
|
||||
jq -n --arg tag "$TAG" --rawfile body release_body.txt '{version: $tag, markdown_body: $body}' \
|
||||
> release_data.json
|
||||
curl -X POST "https://zed.dev/api/send_release_notes_email" \
|
||||
-H "Authorization: Bearer ${{ secrets.RELEASE_NOTES_API_TOKEN }}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d @release_data.json
|
||||
|
||||
2
.github/workflows/danger.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
version: 9
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "pnpm"
|
||||
|
||||
10
.github/workflows/deploy_cloudflare.yml
vendored
@@ -37,35 +37,35 @@ jobs:
|
||||
mdbook build ./docs --dest-dir=../target/deploy/docs/
|
||||
|
||||
- name: Deploy Docs
|
||||
uses: cloudflare/wrangler-action@392082e81ffbcb9ebdde27400634aa004b35ea37 # v3
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
command: pages deploy target/deploy --project-name=docs
|
||||
|
||||
- name: Deploy Install
|
||||
uses: cloudflare/wrangler-action@392082e81ffbcb9ebdde27400634aa004b35ea37 # v3
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
command: r2 object put -f script/install.sh zed-open-source-website-assets/install.sh
|
||||
|
||||
- name: Deploy Docs Workers
|
||||
uses: cloudflare/wrangler-action@392082e81ffbcb9ebdde27400634aa004b35ea37 # v3
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
command: deploy .cloudflare/docs-proxy/src/worker.js
|
||||
|
||||
- name: Deploy Install Workers
|
||||
uses: cloudflare/wrangler-action@392082e81ffbcb9ebdde27400634aa004b35ea37 # v3
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
command: deploy .cloudflare/docs-proxy/src/worker.js
|
||||
|
||||
- name: Preserve Wrangler logs
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: always()
|
||||
with:
|
||||
name: wrangler_logs
|
||||
|
||||
2
.github/workflows/issue_response.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
version: 9
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "pnpm"
|
||||
|
||||
2
.github/workflows/publish_extension_cli.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
clean: false
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@f0deed1e0edfc6a9be95417288c0e1099b1eeec3 # v2
|
||||
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
cache-provider: "github"
|
||||
|
||||
2
.github/workflows/randomized_tests.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
|
||||
53
.github/workflows/release_nightly.yml
vendored
@@ -71,7 +71,7 @@ jobs:
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
@@ -170,6 +170,57 @@ jobs:
|
||||
- name: Upload Zed Nightly
|
||||
run: script/upload-nightly linux-targz
|
||||
|
||||
bundle-nix:
|
||||
timeout-minutes: 60
|
||||
name: (${{ matrix.system.os }}) Nix Build
|
||||
continue-on-error: true
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
system:
|
||||
- os: x86 Linux
|
||||
runner: buildjet-16vcpu-ubuntu-2204
|
||||
install_nix: true
|
||||
- os: arm Mac
|
||||
runner: [macOS, ARM64, test]
|
||||
install_nix: false
|
||||
- os: arm Linux
|
||||
runner: buildjet-16vcpu-ubuntu-2204-arm
|
||||
install_nix: true
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: ${{ matrix.system.runner }}
|
||||
needs: tests
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
GIT_LFS_SKIP_SMUDGE: 1 # breaks the livekit rust sdk examples which we don't actually depend on
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
# on our macs we manually install nix. for some reason the cachix action is running
|
||||
# under a non-login /bin/bash shell which doesn't source the proper script to add the
|
||||
# nix profile to PATH, so we manually add them here
|
||||
- name: Set path
|
||||
if: ${{ ! matrix.system.install_nix }}
|
||||
run: |
|
||||
echo "/nix/var/nix/profiles/default/bin" >> $GITHUB_PATH
|
||||
echo "/Users/administrator/.nix-profile/bin" >> $GITHUB_PATH
|
||||
|
||||
- uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f # v31
|
||||
if: ${{ matrix.system.install_nix }}
|
||||
with:
|
||||
github_access_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad # v16
|
||||
with:
|
||||
name: zed-industries
|
||||
authToken: "${{ secrets.CACHIX_AUTH_TOKEN }}"
|
||||
- run: nix build
|
||||
- run: nix-collect-garbage -d
|
||||
|
||||
update-nightly-tag:
|
||||
name: Update nightly tag
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
|
||||
19
.zed/debug.json
Normal file
@@ -0,0 +1,19 @@
|
||||
[
|
||||
{
|
||||
"label": "Debug Zed with LLDB",
|
||||
"adapter": "lldb",
|
||||
"program": "$ZED_WORKTREE_ROOT/target/debug/zed",
|
||||
"request": "launch",
|
||||
"cwd": "$ZED_WORKTREE_ROOT"
|
||||
},
|
||||
{
|
||||
"label": "Debug Zed with GDB",
|
||||
"adapter": "gdb",
|
||||
"program": "$ZED_WORKTREE_ROOT/target/debug/zed",
|
||||
"request": "launch",
|
||||
"cwd": "$ZED_WORKTREE_ROOT",
|
||||
"initialize_args": {
|
||||
"stopAtBeginningOfMainSubprogram": true
|
||||
}
|
||||
}
|
||||
]
|
||||
791
Cargo.lock
generated
32
Cargo.toml
@@ -8,6 +8,7 @@ members = [
|
||||
"crates/assistant",
|
||||
"crates/assistant2",
|
||||
"crates/assistant_context_editor",
|
||||
"crates/assistant_eval",
|
||||
"crates/assistant_settings",
|
||||
"crates/assistant_slash_command",
|
||||
"crates/assistant_slash_commands",
|
||||
@@ -36,6 +37,10 @@ members = [
|
||||
"crates/context_server_settings",
|
||||
"crates/copilot",
|
||||
"crates/credentials_provider",
|
||||
"crates/dap",
|
||||
"crates/dap_adapters",
|
||||
"crates/debugger_tools",
|
||||
"crates/debugger_ui",
|
||||
"crates/db",
|
||||
"crates/deepseek",
|
||||
"crates/diagnostics",
|
||||
@@ -64,6 +69,8 @@ members = [
|
||||
"crates/gpui_tokio",
|
||||
"crates/html_to_markdown",
|
||||
"crates/http_client",
|
||||
"crates/http_client_tls",
|
||||
"crates/icons",
|
||||
"crates/image_viewer",
|
||||
"crates/indexed_docs",
|
||||
"crates/inline_completion",
|
||||
@@ -118,7 +125,6 @@ members = [
|
||||
"crates/rope",
|
||||
"crates/rpc",
|
||||
"crates/schema_generator",
|
||||
"crates/scripting_tool",
|
||||
"crates/search",
|
||||
"crates/semantic_index",
|
||||
"crates/semantic_version",
|
||||
@@ -154,6 +160,7 @@ members = [
|
||||
"crates/ui",
|
||||
"crates/ui_input",
|
||||
"crates/ui_macros",
|
||||
"crates/ui_prompt",
|
||||
"crates/util",
|
||||
"crates/util_macros",
|
||||
"crates/vim",
|
||||
@@ -164,6 +171,8 @@ members = [
|
||||
"crates/zed",
|
||||
"crates/zed_actions",
|
||||
"crates/zeta",
|
||||
"crates/zlog",
|
||||
"crates/zlog_settings",
|
||||
|
||||
#
|
||||
# Extensions
|
||||
@@ -206,6 +215,7 @@ assets = { path = "crates/assets" }
|
||||
assistant = { path = "crates/assistant" }
|
||||
assistant2 = { path = "crates/assistant2" }
|
||||
assistant_context_editor = { path = "crates/assistant_context_editor" }
|
||||
assistant_eval = { path = "crates/assistant_eval" }
|
||||
assistant_settings = { path = "crates/assistant_settings" }
|
||||
assistant_slash_command = { path = "crates/assistant_slash_command" }
|
||||
assistant_slash_commands = { path = "crates/assistant_slash_commands" }
|
||||
@@ -233,7 +243,11 @@ context_server = { path = "crates/context_server" }
|
||||
context_server_settings = { path = "crates/context_server_settings" }
|
||||
copilot = { path = "crates/copilot" }
|
||||
credentials_provider = { path = "crates/credentials_provider" }
|
||||
dap = { path = "crates/dap" }
|
||||
dap_adapters = { path = "crates/dap_adapters" }
|
||||
db = { path = "crates/db" }
|
||||
debugger_ui = { path = "crates/debugger_ui" }
|
||||
debugger_tools = { path = "crates/debugger_tools" }
|
||||
deepseek = { path = "crates/deepseek" }
|
||||
diagnostics = { path = "crates/diagnostics" }
|
||||
buffer_diff = { path = "crates/buffer_diff" }
|
||||
@@ -260,6 +274,8 @@ gpui_macros = { path = "crates/gpui_macros" }
|
||||
gpui_tokio = { path = "crates/gpui_tokio" }
|
||||
html_to_markdown = { path = "crates/html_to_markdown" }
|
||||
http_client = { path = "crates/http_client" }
|
||||
http_client_tls = { path = "crates/http_client_tls" }
|
||||
icons = { path = "crates/icons" }
|
||||
image_viewer = { path = "crates/image_viewer" }
|
||||
indexed_docs = { path = "crates/indexed_docs" }
|
||||
inline_completion = { path = "crates/inline_completion" }
|
||||
@@ -314,7 +330,6 @@ reqwest_client = { path = "crates/reqwest_client" }
|
||||
rich_text = { path = "crates/rich_text" }
|
||||
rope = { path = "crates/rope" }
|
||||
rpc = { path = "crates/rpc" }
|
||||
scripting_tool = { path = "crates/scripting_tool" }
|
||||
search = { path = "crates/search" }
|
||||
semantic_index = { path = "crates/semantic_index" }
|
||||
semantic_version = { path = "crates/semantic_version" }
|
||||
@@ -350,6 +365,7 @@ toolchain_selector = { path = "crates/toolchain_selector" }
|
||||
ui = { path = "crates/ui" }
|
||||
ui_input = { path = "crates/ui_input" }
|
||||
ui_macros = { path = "crates/ui_macros" }
|
||||
ui_prompt = { path = "crates/ui_prompt" }
|
||||
util = { path = "crates/util" }
|
||||
util_macros = { path = "crates/util_macros" }
|
||||
vim = { path = "crates/vim" }
|
||||
@@ -360,6 +376,8 @@ worktree = { path = "crates/worktree" }
|
||||
zed = { path = "crates/zed" }
|
||||
zed_actions = { path = "crates/zed_actions" }
|
||||
zeta = { path = "crates/zeta" }
|
||||
zlog = { path = "crates/zlog" }
|
||||
zlog_settings = { path = "crates/zlog_settings" }
|
||||
|
||||
#
|
||||
# External crates
|
||||
@@ -398,6 +416,7 @@ bytes = "1.0"
|
||||
cargo_metadata = "0.19"
|
||||
cargo_toml = "0.21"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
circular-buffer = "1.0"
|
||||
clap = { version = "4.4", features = ["derive"] }
|
||||
cocoa = "0.26"
|
||||
cocoa-foundation = "0.2.0"
|
||||
@@ -406,6 +425,7 @@ core-foundation = "0.9.3"
|
||||
core-foundation-sys = "0.8.6"
|
||||
ctor = "0.4.0"
|
||||
dashmap = "6.0"
|
||||
dap-types = { git = "https://github.com/zed-industries/dap-types", rev = "bfd4af0" }
|
||||
derive_more = "0.99.17"
|
||||
dirs = "4.0"
|
||||
ec4rs = "1.1"
|
||||
@@ -417,8 +437,7 @@ fork = "0.2.0"
|
||||
futures = "0.3"
|
||||
futures-batch = "0.6.1"
|
||||
futures-lite = "1.13"
|
||||
# TODO: get back to regular versions when https://github.com/rust-lang/git2-rs/pull/1120 is released
|
||||
git2 = { git = "https://github.com/rust-lang/git2-rs", rev = "a3b90cb3756c1bb63e2317bf9cfa57838178de5c", default-features = false }
|
||||
git2 = { version = "0.20.1", default-features = false }
|
||||
globset = "0.4"
|
||||
handlebars = "4.3"
|
||||
heed = { version = "0.21.0", features = ["read-txn-no-tls"] }
|
||||
@@ -520,7 +539,7 @@ sys-locale = "0.3.1"
|
||||
sysinfo = "0.31.0"
|
||||
take-until = "0.2.0"
|
||||
tempfile = "3.9.0"
|
||||
thiserror = "1.0.29"
|
||||
thiserror = "2.0.12"
|
||||
tiktoken-rs = "0.6.0"
|
||||
time = { version = "0.3", features = [
|
||||
"macros",
|
||||
@@ -561,6 +580,7 @@ unindent = "0.2.0"
|
||||
unicode-segmentation = "1.10"
|
||||
unicode-script = "0.5.7"
|
||||
url = "2.2"
|
||||
urlencoding = "2.1.2"
|
||||
uuid = { version = "1.1.2", features = ["v4", "v5", "v7", "serde"] }
|
||||
wasmparser = "0.221"
|
||||
wasm-encoder = "0.221"
|
||||
@@ -593,7 +613,7 @@ features = [
|
||||
]
|
||||
|
||||
[workspace.dependencies.windows]
|
||||
version = "0.60"
|
||||
version = "0.61"
|
||||
features = [
|
||||
"Foundation_Collections",
|
||||
"Foundation_Numerics",
|
||||
|
||||
1
assets/icons/arrow_right_left.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-arrow-right-left"><path d="m16 3 4 4-4 4"/><path d="M20 7H4"/><path d="m8 21-4-4 4-4"/><path d="M4 17h16"/></svg>
|
||||
|
After Width: | Height: | Size: 316 B |
3
assets/icons/arrow_up_right_alt.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="8" height="8" viewBox="0 0 8 8" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M5.4 2.6H5.75C5.75 2.50717 5.71312 2.41815 5.64749 2.35251C5.58185 2.28688 5.49283 2.25 5.4 2.25V2.6ZM2.6 2.25C2.4067 2.25 2.25 2.4067 2.25 2.6C2.25 2.7933 2.4067 2.95 2.6 2.95V2.25ZM5.05 5.4C5.05 5.5933 5.2067 5.75 5.4 5.75C5.5933 5.75 5.75 5.5933 5.75 5.4H5.05ZM2.35252 5.15251C2.21583 5.2892 2.21583 5.5108 2.35252 5.64748C2.4892 5.78417 2.7108 5.78417 2.84749 5.64748L2.35252 5.15251ZM5.4 2.25H2.6V2.95H5.4V2.25ZM5.05 2.6V5.4H5.75V2.6H5.05ZM5.15252 2.35251L2.35252 5.15251L2.84749 5.64748L5.64749 2.84748L5.15252 2.35251Z" fill="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 650 B |
1
assets/icons/brain.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-brain"><path d="M12 5a3 3 0 1 0-5.997.125 4 4 0 0 0-2.526 5.77 4 4 0 0 0 .556 6.588A4 4 0 1 0 12 18Z"/><path d="M12 5a3 3 0 1 1 5.997.125 4 4 0 0 1 2.526 5.77 4 4 0 0 1-.556 6.588A4 4 0 1 1 12 18Z"/><path d="M15 13a4.5 4.5 0 0 1-3-4 4.5 4.5 0 0 1-3 4"/><path d="M17.599 6.5a3 3 0 0 0 .399-1.375"/><path d="M6.003 5.125A3 3 0 0 0 6.401 6.5"/><path d="M3.477 10.896a4 4 0 0 1 .585-.396"/><path d="M19.938 10.5a4 4 0 0 1 .585.396"/><path d="M6 18a4 4 0 0 1-1.967-.516"/><path d="M19.967 17.484A4 4 0 0 1 18 18"/></svg>
|
||||
|
After Width: | Height: | Size: 718 B |
1
assets/icons/clipboard.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-clipboard"><rect width="8" height="4" x="8" y="2" rx="1" ry="1"/><path d="M16 4h2a2 2 0 0 1 2 2v14a2 2 0 0 1-2 2H6a2 2 0 0 1-2-2V6a2 2 0 0 1 2-2h2"/></svg>
|
||||
|
After Width: | Height: | Size: 358 B |
1
assets/icons/cog.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-cog"><path d="M12 20a8 8 0 1 0 0-16 8 8 0 0 0 0 16Z"/><path d="M12 14a2 2 0 1 0 0-4 2 2 0 0 0 0 4Z"/><path d="M12 2v2"/><path d="M12 22v-2"/><path d="m17 20.66-1-1.73"/><path d="M11 10.27 7 3.34"/><path d="m20.66 17-1.73-1"/><path d="m3.34 7 1.73 1"/><path d="M14 12h8"/><path d="M2 12h2"/><path d="m20.66 7-1.73 1"/><path d="m3.34 17 1.73-1"/><path d="m17 3.34-1 1.73"/><path d="m11 13.73-4 6.93"/></svg>
|
||||
|
After Width: | Height: | Size: 608 B |
1
assets/icons/debug.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-bug"><path d="m8 2 1.88 1.88"/><path d="M14.12 3.88 16 2"/><path d="M9 7.13v-1a3.003 3.003 0 1 1 6 0v1"/><path d="M12 20c-3.3 0-6-2.7-6-6v-3a4 4 0 0 1 4-4h4a4 4 0 0 1 4 4v3c0 3.3-2.7 6-6 6"/><path d="M12 20v-9"/><path d="M6.53 9C4.6 8.8 3 7.1 3 5"/><path d="M6 13H2"/><path d="M3 21c0-2.1 1.7-3.9 3.8-4"/><path d="M20.97 5c0 2.1-1.6 3.8-3.5 4"/><path d="M22 13h-4"/><path d="M17.2 17c2.1.1 3.8 1.9 3.8 4"/></svg>
|
||||
|
After Width: | Height: | Size: 615 B |
1
assets/icons/debug_breakpoint.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="currentColor" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-circle"><circle cx="12" cy="12" r="10"/></svg>
|
||||
|
After Width: | Height: | Size: 257 B |
1
assets/icons/debug_continue.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-step-forward"><line x1="6" x2="6" y1="4" y2="20"/><polygon points="10,4 20,12 10,20"/></svg>
|
||||
|
After Width: | Height: | Size: 295 B |
1
assets/icons/debug_disconnect.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-unplug"><path d="m19 5 3-3"/><path d="m2 22 3-3"/><path d="M6.3 20.3a2.4 2.4 0 0 0 3.4 0L12 18l-6-6-2.3 2.3a2.4 2.4 0 0 0 0 3.4Z"/><path d="M7.5 13.5 10 11"/><path d="M10.5 16.5 13 14"/><path d="m12 6 6 6 2.3-2.3a2.4 2.4 0 0 0 0-3.4l-2.6-2.6a2.4 2.4 0 0 0-3.4 0Z"/></svg>
|
||||
|
After Width: | Height: | Size: 474 B |
1
assets/icons/debug_ignore_breakpoints.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-circle-off"><path d="m2 2 20 20"/><path d="M8.35 2.69A10 10 0 0 1 21.3 15.65"/><path d="M19.08 19.08A10 10 0 1 1 4.92 4.92"/></svg>
|
||||
|
After Width: | Height: | Size: 334 B |
1
assets/icons/debug_log_breakpoint.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="currentColor" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-message-circle"><path d="M7.9 20A9 9 0 1 0 4 16.1L2 22Z"/></svg>
|
||||
|
After Width: | Height: | Size: 275 B |
1
assets/icons/debug_pause.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-pause"><rect x="14" y="4" width="4" height="16" rx="1"/><rect x="6" y="4" width="4" height="16" rx="1"/></svg>
|
||||
|
After Width: | Height: | Size: 313 B |
1
assets/icons/debug_restart.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-rotate-ccw"><path d="M3 12a9 9 0 1 0 9-9 9.75 9.75 0 0 0-6.74 2.74L3 8"/><path d="M3 3v5h5"/></svg>
|
||||
|
After Width: | Height: | Size: 302 B |
1
assets/icons/debug_step_back.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-undo-dot"><path d="M21 17a9 9 0 0 0-15-6.7L3 13"/><path d="M3 7v6h6"/><circle cx="12" cy="17" r="1"/></svg>
|
||||
|
After Width: | Height: | Size: 310 B |
5
assets/icons/debug_step_into.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-arrow-up-from-dot">
|
||||
<path d="m5 15 7 7 7-7"/>
|
||||
<path d="M12 8v14"/>
|
||||
<circle cx="12" cy="3" r="1"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 313 B |
5
assets/icons/debug_step_out.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-arrow-up-from-dot">
|
||||
<path d="m3 10 9-8 9 8"/>
|
||||
<path d="M12 17V2"/>
|
||||
<circle cx="12" cy="21" r="1"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 314 B |
5
assets/icons/debug_step_over.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-redo-dot">
|
||||
<circle cx="12" cy="17" r="1"/>
|
||||
<path d="M21 7v6h-6"/>
|
||||
<path d="M3 17a9 9 0 0 1 9-9 9 9 0 0 1 6 2.3l3 2.7"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 335 B |
1
assets/icons/debug_stop.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-square"><rect width="18" height="18" x="3" y="3" rx="2"/></svg>
|
||||
|
After Width: | Height: | Size: 266 B |
@@ -1,6 +1,6 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M3 4H8" stroke="black" stroke-width="1.75" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M6 10L11 10" stroke="black" stroke-width="1.75" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<circle cx="4" cy="10" r="1.875" stroke="black" stroke-width="1.75"/>
|
||||
<circle cx="10" cy="4" r="1.875" stroke="black" stroke-width="1.75"/>
|
||||
<path d="M3 4H8" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M6 10L11 10" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<circle cx="4" cy="10" r="1.875" stroke="black" stroke-width="1.5"/>
|
||||
<circle cx="10" cy="4" r="1.875" stroke="black" stroke-width="1.5"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 450 B After Width: | Height: | Size: 446 B |
@@ -30,6 +30,13 @@
|
||||
"ctrl-0": ["zed::ResetBufferFontSize", { "persist": false }],
|
||||
"ctrl-,": "zed::OpenSettings",
|
||||
"ctrl-q": "zed::Quit",
|
||||
"f4": "debugger::Start",
|
||||
"f5": "debugger::Continue",
|
||||
"shift-f5": "debugger::Stop",
|
||||
"f6": "debugger::Pause",
|
||||
"f7": "debugger::StepOver",
|
||||
"cmd-f11": "debugger::StepInto",
|
||||
"shift-f11": "debugger::StepOut",
|
||||
"f11": "zed::ToggleFullScreen",
|
||||
"ctrl-alt-z": "edit_prediction::RateCompletions",
|
||||
"ctrl-shift-i": "edit_prediction::ToggleMenu"
|
||||
@@ -46,7 +53,9 @@
|
||||
"context": "Prompt",
|
||||
"bindings": {
|
||||
"left": "menu::SelectPrevious",
|
||||
"right": "menu::SelectNext"
|
||||
"right": "menu::SelectNext",
|
||||
"h": "menu::SelectPrevious",
|
||||
"l": "menu::SelectNext"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -107,6 +116,7 @@
|
||||
"ctrl-a": "editor::SelectAll",
|
||||
"ctrl-l": "editor::SelectLine",
|
||||
"ctrl-shift-i": "editor::Format",
|
||||
"alt-shift-o": "editor::OrganizeImports",
|
||||
// "cmd-shift-left": ["editor::SelectToBeginningOfLine", {"stop_at_soft_wraps": true, "stop_at_indent": true }],
|
||||
// "ctrl-shift-a": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": true, "stop_at_indent": true }],
|
||||
"shift-home": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": true, "stop_at_indent": true }],
|
||||
@@ -123,7 +133,9 @@
|
||||
"alt-g b": "editor::ToggleGitBlame",
|
||||
"menu": "editor::OpenContextMenu",
|
||||
"shift-f10": "editor::OpenContextMenu",
|
||||
"ctrl-shift-e": "editor::ToggleEditPrediction"
|
||||
"ctrl-shift-e": "editor::ToggleEditPrediction",
|
||||
"f9": "editor::ToggleBreakpoint",
|
||||
"shift-f9": "editor::EditLogBreakpoint"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -183,7 +195,7 @@
|
||||
"ctrl-shift-g": "search::SelectPreviousMatch",
|
||||
"ctrl-alt-/": "assistant::ToggleModelSelector",
|
||||
"ctrl-k h": "assistant::DeployHistory",
|
||||
"ctrl-k l": "assistant::DeployPromptLibrary",
|
||||
"ctrl-k l": "assistant::OpenPromptLibrary",
|
||||
"new": "assistant::NewChat",
|
||||
"ctrl-t": "assistant::NewChat",
|
||||
"ctrl-n": "assistant::NewChat"
|
||||
@@ -362,6 +374,7 @@
|
||||
"ctrl-k ctrl-0": "editor::FoldAll",
|
||||
"ctrl-k ctrl-j": "editor::UnfoldAll",
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
"ctrl-shift-space": "editor::ShowWordCompletions",
|
||||
"ctrl-.": "editor::ToggleCodeActions",
|
||||
"ctrl-k r": "editor::RevealInFileManager",
|
||||
"ctrl-k p": "editor::CopyPath",
|
||||
@@ -741,7 +754,8 @@
|
||||
"escape": "git_panel::ToggleFocus",
|
||||
"ctrl-enter": "git::Commit",
|
||||
"alt-enter": "menu::SecondaryConfirm",
|
||||
"backspace": "git::RestoreFile"
|
||||
"shift-delete": "git::RestoreFile",
|
||||
"ctrl-delete": "git::RestoreFile"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -14,6 +14,13 @@
|
||||
{
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"f4": "debugger::Start",
|
||||
"f5": "debugger::Continue",
|
||||
"shift-f5": "debugger::Stop",
|
||||
"f6": "debugger::Pause",
|
||||
"f7": "debugger::StepOver",
|
||||
"f11": "debugger::StepInto",
|
||||
"shift-f11": "debugger::StepOut",
|
||||
"home": "menu::SelectFirst",
|
||||
"shift-pageup": "menu::SelectFirst",
|
||||
"pageup": "menu::SelectFirst",
|
||||
@@ -148,6 +155,8 @@
|
||||
"cmd-\"": "editor::ExpandAllDiffHunks",
|
||||
"cmd-alt-g b": "editor::ToggleGitBlame",
|
||||
"cmd-i": "editor::ShowSignatureHelp",
|
||||
"f9": "editor::ToggleBreakpoint",
|
||||
"shift-f9": "editor::EditLogBreakpoint",
|
||||
"ctrl-f12": "editor::GoToDeclaration",
|
||||
"alt-ctrl-f12": "editor::GoToDeclarationSplit",
|
||||
"ctrl-cmd-e": "editor::ToggleEditPrediction"
|
||||
@@ -232,7 +241,7 @@
|
||||
"cmd-shift-g": "search::SelectPreviousMatch",
|
||||
"cmd-alt-/": "assistant::ToggleModelSelector",
|
||||
"cmd-k h": "assistant::DeployHistory",
|
||||
"cmd-k l": "assistant::DeployPromptLibrary",
|
||||
"cmd-k l": "assistant::OpenPromptLibrary",
|
||||
"cmd-t": "assistant::NewChat",
|
||||
"cmd-n": "assistant::NewChat"
|
||||
}
|
||||
@@ -278,7 +287,9 @@
|
||||
"context": "MessageEditor > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"enter": "assistant2::Chat"
|
||||
"enter": "assistant2::Chat",
|
||||
"cmd-g d": "git::Diff",
|
||||
"shift-escape": "git::ExpandCommitEditor"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -466,6 +477,7 @@
|
||||
// Using `ctrl-space` in Zed requires disabling the macOS global shortcut.
|
||||
// System Preferences->Keyboard->Keyboard Shortcuts->Input Sources->Select the previous input source (uncheck)
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
"ctrl-shift-space": "editor::ShowWordCompletions",
|
||||
"cmd-.": "editor::ToggleCodeActions",
|
||||
"cmd-k r": "editor::RevealInFileManager",
|
||||
"cmd-k p": "editor::CopyPath",
|
||||
@@ -695,6 +707,16 @@
|
||||
"ctrl-]": "assistant::CycleNextInlineAssist"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Prompt",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"left": "menu::SelectPrevious",
|
||||
"right": "menu::SelectNext",
|
||||
"h": "menu::SelectPrevious",
|
||||
"l": "menu::SelectNext"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ProjectSearchBar && !in_replace",
|
||||
"use_key_equivalents": true,
|
||||
@@ -755,6 +777,14 @@
|
||||
"space": "project_panel::Open"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "VariableList",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"left": "variable_list::CollapseSelectedEntry",
|
||||
"right": "variable_list::ExpandSelectedEntry"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitPanel && ChangesList",
|
||||
"use_key_equivalents": true,
|
||||
@@ -773,7 +803,7 @@
|
||||
"shift-tab": "git_panel::FocusEditor",
|
||||
"escape": "git_panel::ToggleFocus",
|
||||
"cmd-enter": "git::Commit",
|
||||
"backspace": "git::RestoreFile"
|
||||
"cmd-backspace": "git::RestoreFile"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -3,7 +3,14 @@
|
||||
"bindings": {
|
||||
"ctrl-alt-s": "zed::OpenSettings",
|
||||
"ctrl-{": "pane::ActivatePreviousItem",
|
||||
"ctrl-}": "pane::ActivateNextItem"
|
||||
"ctrl-}": "pane::ActivateNextItem",
|
||||
"ctrl-f2": "debugger::Stop",
|
||||
"f6": "debugger::Pause",
|
||||
"f7": "debugger::StepInto",
|
||||
"f8": "debugger::StepOver",
|
||||
"shift-f8": "debugger::StepOut",
|
||||
"f9": "debugger::Continue",
|
||||
"alt-shift-f9": "debugger::Start"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -31,6 +38,7 @@
|
||||
"shift-alt-up": "editor::MoveLineUp",
|
||||
"shift-alt-down": "editor::MoveLineDown",
|
||||
"ctrl-alt-l": "editor::Format",
|
||||
"ctrl-alt-o": "editor::OrganizeImports",
|
||||
"shift-f6": "editor::Rename",
|
||||
"ctrl-alt-left": "pane::GoBack",
|
||||
"ctrl-alt-right": "pane::GoForward",
|
||||
@@ -48,7 +56,9 @@
|
||||
"ctrl-home": "editor::MoveToBeginning",
|
||||
"ctrl-end": "editor::MoveToEnd",
|
||||
"ctrl-shift-home": "editor::SelectToBeginning",
|
||||
"ctrl-shift-end": "editor::SelectToEnd"
|
||||
"ctrl-shift-end": "editor::SelectToEnd",
|
||||
"ctrl-f8": "editor::ToggleBreakpoint",
|
||||
"ctrl-shift-f8": "editor::EditLogBreakpoint"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -2,7 +2,14 @@
|
||||
{
|
||||
"bindings": {
|
||||
"cmd-{": "pane::ActivatePreviousItem",
|
||||
"cmd-}": "pane::ActivateNextItem"
|
||||
"cmd-}": "pane::ActivateNextItem",
|
||||
"ctrl-f2": "debugger::Stop",
|
||||
"f6": "debugger::Pause",
|
||||
"f7": "debugger::StepInto",
|
||||
"f8": "debugger::StepOver",
|
||||
"shift-f8": "debugger::StepOut",
|
||||
"f9": "debugger::Continue",
|
||||
"alt-shift-f9": "debugger::Start"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -29,6 +36,7 @@
|
||||
"shift-alt-up": "editor::MoveLineUp",
|
||||
"shift-alt-down": "editor::MoveLineDown",
|
||||
"cmd-alt-l": "editor::Format",
|
||||
"ctrl-alt-o": "editor::OrganizeImports",
|
||||
"shift-f6": "editor::Rename",
|
||||
"cmd-[": "pane::GoBack",
|
||||
"cmd-]": "pane::GoForward",
|
||||
@@ -45,7 +53,9 @@
|
||||
"cmd-home": "editor::MoveToBeginning",
|
||||
"cmd-end": "editor::MoveToEnd",
|
||||
"cmd-shift-home": "editor::SelectToBeginning",
|
||||
"cmd-shift-end": "editor::SelectToEnd"
|
||||
"cmd-shift-end": "editor::SelectToEnd",
|
||||
"ctrl-f8": "editor::ToggleBreakpoint",
|
||||
"ctrl-shift-f8": "editor::EditLogBreakpoint"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -11,8 +11,22 @@ You should only perform actions that modify the user’s system if explicitly re
|
||||
|
||||
Be concise and direct in your responses.
|
||||
|
||||
The user has opened a project that contains the following root directories/files:
|
||||
The user has opened a project that contains the following root directories/files. Whenever you specify a path in the project, it must be a relative path which begins with one of these root directories/files:
|
||||
|
||||
{{#each worktrees}}
|
||||
- {{root_name}} (absolute path: {{abs_path}})
|
||||
- `{{root_name}}` (absolute path: `{{abs_path}}`)
|
||||
{{/each}}
|
||||
{{#if has_rules}}
|
||||
|
||||
There are rules that apply to these root directories:
|
||||
{{#each worktrees}}
|
||||
{{#if rules_file}}
|
||||
|
||||
`{{root_name}}/{{rules_file.rel_path}}`:
|
||||
|
||||
``````
|
||||
{{{rules_file.text}}}
|
||||
``````
|
||||
{{/if}}
|
||||
{{/each}}
|
||||
{{/if}}
|
||||
|
||||
@@ -25,7 +25,7 @@
|
||||
// Features that can be globally enabled or disabled
|
||||
"features": {
|
||||
// Which edit prediction provider to use.
|
||||
"edit_prediction_provider": "copilot"
|
||||
"edit_prediction_provider": "zed"
|
||||
},
|
||||
// The name of a font to use for rendering text in the editor
|
||||
"buffer_font_family": "Zed Plex Mono",
|
||||
@@ -136,6 +136,11 @@
|
||||
// Whether to use the system provided dialogs for Open and Save As.
|
||||
// When set to false, Zed will use the built-in keyboard-first pickers.
|
||||
"use_system_path_prompts": true,
|
||||
// Whether to use the system provided dialogs for prompts, such as confirmation
|
||||
// prompts.
|
||||
// When set to false, Zed will use its built-in prompts. Note that on Linux,
|
||||
// this option is ignored and Zed will always use the built-in prompts.
|
||||
"use_system_prompts": true,
|
||||
// Whether the cursor blinks in the editor.
|
||||
"cursor_blink": true,
|
||||
// Cursor shape for the default editor.
|
||||
@@ -150,6 +155,8 @@
|
||||
//
|
||||
// Default: not set, defaults to "bar"
|
||||
"cursor_shape": null,
|
||||
// Determines whether the mouse cursor is hidden when typing in an editor or input box.
|
||||
"hide_mouse_while_typing": true,
|
||||
// How to highlight the current line in the editor.
|
||||
//
|
||||
// 1. Don't highlight the current line:
|
||||
@@ -179,6 +186,11 @@
|
||||
// Whether to show the signature help after completion or a bracket pair inserted.
|
||||
// If `auto_signature_help` is enabled, this setting will be treated as enabled also.
|
||||
"show_signature_help_after_edits": false,
|
||||
// What to do when go to definition yields no results.
|
||||
//
|
||||
// 1. Do nothing: `none`
|
||||
// 2. Find references for the same symbol: `find_all_references` (default)
|
||||
"go_to_definition_fallback": "find_all_references",
|
||||
// Whether to show wrap guides (vertical rulers) in the editor.
|
||||
// Setting this to true will show a guide at the 'preferred_line_length' value
|
||||
// if 'soft_wrap' is set to 'preferred_line_length', and will show any
|
||||
@@ -324,6 +336,8 @@
|
||||
"code_actions": true,
|
||||
// Whether to show runnables buttons in the gutter.
|
||||
"runnables": true,
|
||||
// Whether to show breakpoints in the gutter.
|
||||
"breakpoints": true,
|
||||
// Whether to show fold buttons in the gutter.
|
||||
"folds": true
|
||||
},
|
||||
@@ -607,7 +621,43 @@
|
||||
"provider": "zed.dev",
|
||||
// The model to use.
|
||||
"model": "claude-3-5-sonnet-latest"
|
||||
}
|
||||
},
|
||||
"profiles": {
|
||||
"read-only": {
|
||||
"name": "Read-only",
|
||||
"tools": {
|
||||
"diagnostics": true,
|
||||
"fetch": true,
|
||||
"list-directory": true,
|
||||
"now": true,
|
||||
"path-search": true,
|
||||
"read-file": true,
|
||||
"regex-search": true,
|
||||
"thinking": true
|
||||
}
|
||||
},
|
||||
"code-writer": {
|
||||
"name": "Code Writer",
|
||||
"tools": {
|
||||
"bash": true,
|
||||
"copy-path": true,
|
||||
"create-file": true,
|
||||
"delete-path": true,
|
||||
"diagnostics": true,
|
||||
"find-replace-file": true,
|
||||
"edit-files": false,
|
||||
"fetch": true,
|
||||
"list-directory": true,
|
||||
"move-path": true,
|
||||
"now": true,
|
||||
"path-search": true,
|
||||
"read-file": true,
|
||||
"regex-search": true,
|
||||
"thinking": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"notify_when_agent_waiting": true
|
||||
},
|
||||
// The settings for slash commands.
|
||||
"slash_commands": {
|
||||
@@ -850,8 +900,24 @@
|
||||
//
|
||||
// The minimum column number to show the inline blame information at
|
||||
// "min_column": 0
|
||||
}
|
||||
},
|
||||
// How git hunks are displayed visually in the editor.
|
||||
// This setting can take two values:
|
||||
//
|
||||
// 1. Show unstaged hunks filled and staged hunks hollow:
|
||||
// "hunk_style": "staged_hollow"
|
||||
// 2. Show unstaged hunks hollow and staged hunks filled:
|
||||
// "hunk_style": "unstaged_hollow"
|
||||
"hunk_style": "staged_hollow"
|
||||
},
|
||||
// The list of custom Git hosting providers.
|
||||
"git_hosting_providers": [
|
||||
// {
|
||||
// "provider": "github",
|
||||
// "name": "BigCorp GitHub",
|
||||
// "base_url": "https://code.big-corp.com"
|
||||
// }
|
||||
],
|
||||
// Configuration for how direnv configuration should be loaded. May take 2 values:
|
||||
// 1. Load direnv configuration using `direnv export json` directly.
|
||||
// "load_direnv": "direct"
|
||||
@@ -958,7 +1024,7 @@
|
||||
// "alternate_scroll": "on",
|
||||
// 2. Default alternate scroll mode to off
|
||||
// "alternate_scroll": "off",
|
||||
"alternate_scroll": "off",
|
||||
"alternate_scroll": "on",
|
||||
// Set whether the option key behaves as the meta key.
|
||||
// May take 2 values:
|
||||
// 1. Rely on default platform handling of option key, on macOS
|
||||
@@ -1092,11 +1158,12 @@
|
||||
//
|
||||
// May take 3 values:
|
||||
// 1. "enabled"
|
||||
// Always fetch document's words for completions.
|
||||
// Always fetch document's words for completions along with LSP completions.
|
||||
// 2. "fallback"
|
||||
// Only if LSP response errors/times out/is empty, use document's words to show completions.
|
||||
// Only if LSP response errors or times out, use document's words to show completions.
|
||||
// 3. "disabled"
|
||||
// Never fetch or complete document's words for completions.
|
||||
// (Word-based completions can still be queried via a separate action)
|
||||
//
|
||||
// Default: fallback
|
||||
"words": "fallback",
|
||||
@@ -1107,8 +1174,8 @@
|
||||
// When fetching LSP completions, determines how long to wait for a response of a particular server.
|
||||
// When set to 0, waits indefinitely.
|
||||
//
|
||||
// Default: 500
|
||||
"lsp_fetch_timeout_ms": 500
|
||||
// Default: 0
|
||||
"lsp_fetch_timeout_ms": 0
|
||||
},
|
||||
// Different settings for specific languages.
|
||||
"languages": {
|
||||
@@ -1200,11 +1267,19 @@
|
||||
"allowed": true
|
||||
}
|
||||
},
|
||||
"LaTeX": {
|
||||
"format_on_save": "on",
|
||||
"formatter": "language_server",
|
||||
"language_servers": ["texlab", "..."],
|
||||
"prettier": {
|
||||
"allowed": false
|
||||
}
|
||||
},
|
||||
"Markdown": {
|
||||
"format_on_save": "off",
|
||||
"use_on_type_format": false,
|
||||
"allow_rewrap": "anywhere",
|
||||
"soft_wrap": "bounded",
|
||||
"soft_wrap": "editor_width",
|
||||
"prettier": {
|
||||
"allowed": true
|
||||
}
|
||||
@@ -1436,6 +1511,12 @@
|
||||
// }
|
||||
// ]
|
||||
"ssh_connections": [],
|
||||
|
||||
// Configures context servers for use in the Assistant.
|
||||
"context_servers": {}
|
||||
"context_servers": {},
|
||||
"debugger": {
|
||||
"stepping_granularity": "line",
|
||||
"save_breakpoints": true,
|
||||
"button": true
|
||||
}
|
||||
}
|
||||
|
||||
32
assets/settings/initial_debug_tasks.json
Normal file
@@ -0,0 +1,32 @@
|
||||
[
|
||||
{
|
||||
"label": "Debug active PHP file",
|
||||
"adapter": "php",
|
||||
"program": "$ZED_FILE",
|
||||
"request": "launch",
|
||||
"cwd": "$ZED_WORKTREE_ROOT"
|
||||
},
|
||||
{
|
||||
"label": "Debug active Python file",
|
||||
"adapter": "python",
|
||||
"program": "$ZED_FILE",
|
||||
"request": "launch",
|
||||
"cwd": "$ZED_WORKTREE_ROOT"
|
||||
},
|
||||
{
|
||||
"label": "Debug active JavaScript file",
|
||||
"adapter": "javascript",
|
||||
"program": "$ZED_FILE",
|
||||
"request": "launch",
|
||||
"cwd": "$ZED_WORKTREE_ROOT"
|
||||
},
|
||||
{
|
||||
"label": "JavaScript debug terminal",
|
||||
"adapter": "javascript",
|
||||
"request": "launch",
|
||||
"cwd": "$ZED_WORKTREE_ROOT",
|
||||
"initialize_args": {
|
||||
"console": "integratedTerminal"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -20,7 +20,6 @@ extension_host.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
lsp.workspace = true
|
||||
project.workspace = true
|
||||
smallvec.workspace = true
|
||||
ui.workspace = true
|
||||
|
||||
@@ -7,8 +7,7 @@ use gpui::{
|
||||
EventEmitter, InteractiveElement as _, ParentElement as _, Render, SharedString,
|
||||
StatefulInteractiveElement, Styled, Transformation, Window,
|
||||
};
|
||||
use language::{LanguageRegistry, LanguageServerBinaryStatus, LanguageServerId};
|
||||
use lsp::LanguageServerName;
|
||||
use language::{BinaryStatus, LanguageRegistry, LanguageServerId};
|
||||
use project::{
|
||||
EnvironmentErrorMessage, LanguageServerProgress, LspStoreEvent, Project,
|
||||
ProjectEnvironmentEvent, WorktreeId,
|
||||
@@ -23,21 +22,21 @@ actions!(activity_indicator, [ShowErrorMessage]);
|
||||
|
||||
pub enum Event {
|
||||
ShowError {
|
||||
lsp_name: LanguageServerName,
|
||||
server_name: SharedString,
|
||||
error: String,
|
||||
},
|
||||
}
|
||||
|
||||
pub struct ActivityIndicator {
|
||||
statuses: Vec<LspStatus>,
|
||||
statuses: Vec<ServerStatus>,
|
||||
project: Entity<Project>,
|
||||
auto_updater: Option<Entity<AutoUpdater>>,
|
||||
context_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
}
|
||||
|
||||
struct LspStatus {
|
||||
name: LanguageServerName,
|
||||
status: LanguageServerBinaryStatus,
|
||||
struct ServerStatus {
|
||||
name: SharedString,
|
||||
status: BinaryStatus,
|
||||
}
|
||||
|
||||
struct PendingWork<'a> {
|
||||
@@ -64,11 +63,24 @@ impl ActivityIndicator {
|
||||
let auto_updater = AutoUpdater::get(cx);
|
||||
let this = cx.new(|cx| {
|
||||
let mut status_events = languages.language_server_binary_statuses();
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
while let Some((name, status)) = status_events.next().await {
|
||||
this.update(&mut cx, |this: &mut ActivityIndicator, cx| {
|
||||
this.update(cx, |this: &mut ActivityIndicator, cx| {
|
||||
this.statuses.retain(|s| s.name != name);
|
||||
this.statuses.push(LspStatus { name, status });
|
||||
this.statuses.push(ServerStatus { name, status });
|
||||
cx.notify();
|
||||
})?;
|
||||
}
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach();
|
||||
|
||||
let mut status_events = languages.dap_server_binary_statuses();
|
||||
cx.spawn(async move |this, cx| {
|
||||
while let Some((name, status)) = status_events.next().await {
|
||||
this.update(cx, |this, cx| {
|
||||
this.statuses.retain(|s| s.name != name);
|
||||
this.statuses.push(ServerStatus { name, status });
|
||||
cx.notify();
|
||||
})?;
|
||||
}
|
||||
@@ -106,25 +118,25 @@ impl ActivityIndicator {
|
||||
});
|
||||
|
||||
cx.subscribe_in(&this, window, move |_, _, event, window, cx| match event {
|
||||
Event::ShowError { lsp_name, error } => {
|
||||
Event::ShowError { server_name, error } => {
|
||||
let create_buffer = project.update(cx, |project, cx| project.create_buffer(cx));
|
||||
let project = project.clone();
|
||||
let error = error.clone();
|
||||
let lsp_name = lsp_name.clone();
|
||||
cx.spawn_in(window, |workspace, mut cx| async move {
|
||||
let server_name = server_name.clone();
|
||||
cx.spawn_in(window, async move |workspace, cx| {
|
||||
let buffer = create_buffer.await?;
|
||||
buffer.update(&mut cx, |buffer, cx| {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(
|
||||
[(
|
||||
0..0,
|
||||
format!("Language server error: {}\n\n{}", lsp_name, error),
|
||||
format!("Language server error: {}\n\n{}", server_name, error),
|
||||
)],
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
buffer.set_capability(language::Capability::ReadOnly, cx);
|
||||
})?;
|
||||
workspace.update_in(&mut cx, |workspace, window, cx| {
|
||||
workspace.update_in(cx, |workspace, window, cx| {
|
||||
workspace.add_item_to_active_pane(
|
||||
Box::new(cx.new(|cx| {
|
||||
Editor::for_buffer(buffer, Some(project.clone()), window, cx)
|
||||
@@ -147,9 +159,9 @@ impl ActivityIndicator {
|
||||
|
||||
fn show_error_message(&mut self, _: &ShowErrorMessage, _: &mut Window, cx: &mut Context<Self>) {
|
||||
self.statuses.retain(|status| {
|
||||
if let LanguageServerBinaryStatus::Failed { error } = &status.status {
|
||||
if let BinaryStatus::Failed { error } = &status.status {
|
||||
cx.emit(Event::ShowError {
|
||||
lsp_name: status.name.clone(),
|
||||
server_name: status.name.clone(),
|
||||
error: error.clone(),
|
||||
});
|
||||
false
|
||||
@@ -278,12 +290,10 @@ impl ActivityIndicator {
|
||||
let mut failed = SmallVec::<[_; 3]>::new();
|
||||
for status in &self.statuses {
|
||||
match status.status {
|
||||
LanguageServerBinaryStatus::CheckingForUpdate => {
|
||||
checking_for_update.push(status.name.clone())
|
||||
}
|
||||
LanguageServerBinaryStatus::Downloading => downloading.push(status.name.clone()),
|
||||
LanguageServerBinaryStatus::Failed { .. } => failed.push(status.name.clone()),
|
||||
LanguageServerBinaryStatus::None => {}
|
||||
BinaryStatus::CheckingForUpdate => checking_for_update.push(status.name.clone()),
|
||||
BinaryStatus::Downloading => downloading.push(status.name.clone()),
|
||||
BinaryStatus::Failed { .. } => failed.push(status.name.clone()),
|
||||
BinaryStatus::None => {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -296,7 +306,7 @@ impl ActivityIndicator {
|
||||
),
|
||||
message: format!(
|
||||
"Downloading {}...",
|
||||
downloading.iter().map(|name| name.0.as_ref()).fold(
|
||||
downloading.iter().map(|name| name.as_ref()).fold(
|
||||
String::new(),
|
||||
|mut acc, s| {
|
||||
if !acc.is_empty() {
|
||||
@@ -324,7 +334,7 @@ impl ActivityIndicator {
|
||||
),
|
||||
message: format!(
|
||||
"Checking for updates to {}...",
|
||||
checking_for_update.iter().map(|name| name.0.as_ref()).fold(
|
||||
checking_for_update.iter().map(|name| name.as_ref()).fold(
|
||||
String::new(),
|
||||
|mut acc, s| {
|
||||
if !acc.is_empty() {
|
||||
@@ -354,7 +364,7 @@ impl ActivityIndicator {
|
||||
"Failed to run {}. Click to show error.",
|
||||
failed
|
||||
.iter()
|
||||
.map(|name| name.0.as_ref())
|
||||
.map(|name| name.as_ref())
|
||||
.fold(String::new(), |mut acc, s| {
|
||||
if !acc.is_empty() {
|
||||
acc.push_str(", ");
|
||||
|
||||
@@ -24,6 +24,16 @@ pub struct AnthropicModelCacheConfiguration {
|
||||
pub max_cache_anchors: usize,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
|
||||
pub enum AnthropicModelMode {
|
||||
#[default]
|
||||
Default,
|
||||
Thinking {
|
||||
budget_tokens: Option<u32>,
|
||||
},
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
|
||||
pub enum Model {
|
||||
@@ -32,6 +42,11 @@ pub enum Model {
|
||||
Claude3_5Sonnet,
|
||||
#[serde(rename = "claude-3-7-sonnet", alias = "claude-3-7-sonnet-latest")]
|
||||
Claude3_7Sonnet,
|
||||
#[serde(
|
||||
rename = "claude-3-7-sonnet-thinking",
|
||||
alias = "claude-3-7-sonnet-thinking-latest"
|
||||
)]
|
||||
Claude3_7SonnetThinking,
|
||||
#[serde(rename = "claude-3-5-haiku", alias = "claude-3-5-haiku-latest")]
|
||||
Claude3_5Haiku,
|
||||
#[serde(rename = "claude-3-opus", alias = "claude-3-opus-latest")]
|
||||
@@ -54,6 +69,8 @@ pub enum Model {
|
||||
default_temperature: Option<f32>,
|
||||
#[serde(default)]
|
||||
extra_beta_headers: Vec<String>,
|
||||
#[serde(default)]
|
||||
mode: AnthropicModelMode,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -61,6 +78,8 @@ impl Model {
|
||||
pub fn from_id(id: &str) -> Result<Self> {
|
||||
if id.starts_with("claude-3-5-sonnet") {
|
||||
Ok(Self::Claude3_5Sonnet)
|
||||
} else if id.starts_with("claude-3-7-sonnet-thinking") {
|
||||
Ok(Self::Claude3_7SonnetThinking)
|
||||
} else if id.starts_with("claude-3-7-sonnet") {
|
||||
Ok(Self::Claude3_7Sonnet)
|
||||
} else if id.starts_with("claude-3-5-haiku") {
|
||||
@@ -80,6 +99,20 @@ impl Model {
|
||||
match self {
|
||||
Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
|
||||
Model::Claude3_7Sonnet => "claude-3-7-sonnet-latest",
|
||||
Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-thinking-latest",
|
||||
Model::Claude3_5Haiku => "claude-3-5-haiku-latest",
|
||||
Model::Claude3Opus => "claude-3-opus-latest",
|
||||
Model::Claude3Sonnet => "claude-3-sonnet-20240229",
|
||||
Model::Claude3Haiku => "claude-3-haiku-20240307",
|
||||
Self::Custom { name, .. } => name,
|
||||
}
|
||||
}
|
||||
|
||||
/// The id of the model that should be used for making API requests
|
||||
pub fn request_id(&self) -> &str {
|
||||
match self {
|
||||
Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
|
||||
Model::Claude3_7Sonnet | Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-latest",
|
||||
Model::Claude3_5Haiku => "claude-3-5-haiku-latest",
|
||||
Model::Claude3Opus => "claude-3-opus-latest",
|
||||
Model::Claude3Sonnet => "claude-3-sonnet-20240229",
|
||||
@@ -92,6 +125,7 @@ impl Model {
|
||||
match self {
|
||||
Self::Claude3_7Sonnet => "Claude 3.7 Sonnet",
|
||||
Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
|
||||
Self::Claude3_7SonnetThinking => "Claude 3.7 Sonnet Thinking",
|
||||
Self::Claude3_5Haiku => "Claude 3.5 Haiku",
|
||||
Self::Claude3Opus => "Claude 3 Opus",
|
||||
Self::Claude3Sonnet => "Claude 3 Sonnet",
|
||||
@@ -107,6 +141,7 @@ impl Model {
|
||||
Self::Claude3_5Sonnet
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_7SonnetThinking
|
||||
| Self::Claude3Haiku => Some(AnthropicModelCacheConfiguration {
|
||||
min_total_token: 2_048,
|
||||
should_speculate: true,
|
||||
@@ -125,6 +160,7 @@ impl Model {
|
||||
Self::Claude3_5Sonnet
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_7SonnetThinking
|
||||
| Self::Claude3Opus
|
||||
| Self::Claude3Sonnet
|
||||
| Self::Claude3Haiku => 200_000,
|
||||
@@ -135,7 +171,10 @@ impl Model {
|
||||
pub fn max_output_tokens(&self) -> u32 {
|
||||
match self {
|
||||
Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3Haiku => 4_096,
|
||||
Self::Claude3_5Sonnet | Self::Claude3_7Sonnet | Self::Claude3_5Haiku => 8_192,
|
||||
Self::Claude3_5Sonnet
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_7SonnetThinking
|
||||
| Self::Claude3_5Haiku => 8_192,
|
||||
Self::Custom {
|
||||
max_output_tokens, ..
|
||||
} => max_output_tokens.unwrap_or(4_096),
|
||||
@@ -146,6 +185,7 @@ impl Model {
|
||||
match self {
|
||||
Self::Claude3_5Sonnet
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_7SonnetThinking
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::Claude3Opus
|
||||
| Self::Claude3Sonnet
|
||||
@@ -157,6 +197,21 @@ impl Model {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn mode(&self) -> AnthropicModelMode {
|
||||
match self {
|
||||
Self::Claude3_5Sonnet
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::Claude3Opus
|
||||
| Self::Claude3Sonnet
|
||||
| Self::Claude3Haiku => AnthropicModelMode::Default,
|
||||
Self::Claude3_7SonnetThinking => AnthropicModelMode::Thinking {
|
||||
budget_tokens: Some(4_096),
|
||||
},
|
||||
Self::Custom { mode, .. } => mode.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub const DEFAULT_BETA_HEADERS: &[&str] = &["prompt-caching-2024-07-31"];
|
||||
|
||||
pub fn beta_headers(&self) -> String {
|
||||
@@ -188,7 +243,7 @@ impl Model {
|
||||
{
|
||||
tool_override
|
||||
} else {
|
||||
self.id()
|
||||
self.request_id()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -409,6 +464,8 @@ pub async fn extract_tool_args_from_events(
|
||||
Err(error) => Some(Err(error)),
|
||||
Ok(Event::ContentBlockDelta { index, delta }) => match delta {
|
||||
ContentDelta::TextDelta { .. } => None,
|
||||
ContentDelta::ThinkingDelta { .. } => None,
|
||||
ContentDelta::SignatureDelta { .. } => None,
|
||||
ContentDelta::InputJsonDelta { partial_json } => {
|
||||
if index == tool_use_index {
|
||||
Some(Ok(partial_json))
|
||||
@@ -487,6 +544,10 @@ pub enum RequestContent {
|
||||
pub enum ResponseContent {
|
||||
#[serde(rename = "text")]
|
||||
Text { text: String },
|
||||
#[serde(rename = "thinking")]
|
||||
Thinking { thinking: String },
|
||||
#[serde(rename = "redacted_thinking")]
|
||||
RedactedThinking { data: String },
|
||||
#[serde(rename = "tool_use")]
|
||||
ToolUse {
|
||||
id: String,
|
||||
@@ -518,6 +579,12 @@ pub enum ToolChoice {
|
||||
Tool { name: String },
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "lowercase")]
|
||||
pub enum Thinking {
|
||||
Enabled { budget_tokens: Option<u32> },
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Request {
|
||||
pub model: String,
|
||||
@@ -526,6 +593,8 @@ pub struct Request {
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
pub tools: Vec<Tool>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub thinking: Option<Thinking>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub tool_choice: Option<ToolChoice>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub system: Option<String>,
|
||||
@@ -609,6 +678,10 @@ pub enum Event {
|
||||
pub enum ContentDelta {
|
||||
#[serde(rename = "text_delta")]
|
||||
TextDelta { text: String },
|
||||
#[serde(rename = "thinking_delta")]
|
||||
ThinkingDelta { thinking: String },
|
||||
#[serde(rename = "signature_delta")]
|
||||
SignatureDelta { signature: String },
|
||||
#[serde(rename = "input_json_delta")]
|
||||
InputJsonDelta { partial_json: String },
|
||||
}
|
||||
|
||||
@@ -34,9 +34,9 @@ impl AskPassDelegate {
|
||||
password_prompt: impl Fn(String, oneshot::Sender<String>, &mut AsyncApp) + Send + Sync + 'static,
|
||||
) -> Self {
|
||||
let (tx, mut rx) = mpsc::unbounded::<(String, oneshot::Sender<String>)>();
|
||||
let task = cx.spawn(|mut cx| async move {
|
||||
let task = cx.spawn(async move |cx: &mut AsyncApp| {
|
||||
while let Some((prompt, channel)) = rx.next().await {
|
||||
password_prompt(prompt, channel, &mut cx);
|
||||
password_prompt(prompt, channel, cx);
|
||||
}
|
||||
});
|
||||
Self { tx, _task: task }
|
||||
@@ -188,7 +188,7 @@ impl AskPassSession {
|
||||
}
|
||||
|
||||
pub async fn run(&mut self) -> AskPassResult {
|
||||
futures::FutureExt::fuse(smol::Timer::after(Duration::from_secs(10))).await;
|
||||
futures::FutureExt::fuse(smol::Timer::after(Duration::from_secs(20))).await;
|
||||
AskPassResult::Timedout
|
||||
}
|
||||
}
|
||||
|
||||
@@ -98,9 +98,9 @@ pub fn init(
|
||||
AssistantSettings::register(cx);
|
||||
SlashCommandSettings::register(cx);
|
||||
|
||||
cx.spawn(|mut cx| {
|
||||
cx.spawn({
|
||||
let client = client.clone();
|
||||
async move {
|
||||
async move |cx| {
|
||||
let is_search_slash_command_enabled = cx
|
||||
.update(|cx| cx.wait_for_flag::<SearchSlashCommandFeatureFlag>())?
|
||||
.await;
|
||||
@@ -116,7 +116,7 @@ pub fn init(
|
||||
let semantic_index = SemanticDb::new(
|
||||
paths::embeddings_dir().join("semantic-index-db.0.mdb"),
|
||||
Arc::new(embedding_provider),
|
||||
&mut cx,
|
||||
cx,
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
||||
@@ -38,7 +38,7 @@ use workspace::{
|
||||
dock::{DockPosition, Panel, PanelEvent},
|
||||
pane, DraggedSelection, Pane, ShowConfiguration, ToggleZoom, Workspace,
|
||||
};
|
||||
use zed_actions::assistant::{DeployPromptLibrary, InlineAssist, ToggleFocus};
|
||||
use zed_actions::assistant::{InlineAssist, OpenPromptLibrary, ToggleFocus};
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
workspace::FollowableViewRegistry::register::<ContextEditor>(cx);
|
||||
@@ -98,16 +98,16 @@ impl AssistantPanel {
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
cx: AsyncWindowContext,
|
||||
) -> Task<Result<Entity<Self>>> {
|
||||
cx.spawn(|mut cx| async move {
|
||||
cx.spawn(async move |cx| {
|
||||
let slash_commands = Arc::new(SlashCommandWorkingSet::default());
|
||||
let context_store = workspace
|
||||
.update(&mut cx, |workspace, cx| {
|
||||
.update(cx, |workspace, cx| {
|
||||
let project = workspace.project().clone();
|
||||
ContextStore::new(project, prompt_builder.clone(), slash_commands, cx)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
workspace.update_in(&mut cx, |workspace, window, cx| {
|
||||
workspace.update_in(cx, |workspace, window, cx| {
|
||||
// TODO: deserialize state.
|
||||
cx.new(|cx| Self::new(workspace, context_store, window, cx))
|
||||
})
|
||||
@@ -259,7 +259,7 @@ impl AssistantPanel {
|
||||
menu.context(focus_handle.clone())
|
||||
.action("New Chat", Box::new(NewChat))
|
||||
.action("History", Box::new(DeployHistory))
|
||||
.action("Prompt Library", Box::new(DeployPromptLibrary))
|
||||
.action("Prompt Library", Box::new(OpenPromptLibrary))
|
||||
.action("Configure", Box::new(ShowConfiguration))
|
||||
.action(zoom_label, Box::new(ToggleZoom))
|
||||
}))
|
||||
@@ -357,9 +357,9 @@ impl AssistantPanel {
|
||||
) -> Task<()> {
|
||||
let mut status_rx = client.status();
|
||||
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
while let Some(status) = status_rx.next().await {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
if this.client_status.is_none()
|
||||
|| this
|
||||
.client_status
|
||||
@@ -371,7 +371,7 @@ impl AssistantPanel {
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
this.update(&mut cx, |this, _cx| this.watch_client_status = None)
|
||||
this.update(cx, |this, _cx| this.watch_client_status = None)
|
||||
.log_err();
|
||||
})
|
||||
}
|
||||
@@ -576,11 +576,11 @@ impl AssistantPanel {
|
||||
if self.authenticate_provider_task.is_none() {
|
||||
self.authenticate_provider_task = Some((
|
||||
provider.id(),
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
if let Some(future) = load_credentials {
|
||||
let _ = future.await;
|
||||
}
|
||||
this.update(&mut cx, |this, _cx| {
|
||||
this.update(cx, |this, _cx| {
|
||||
this.authenticate_provider_task = None;
|
||||
})
|
||||
.log_err();
|
||||
@@ -641,9 +641,9 @@ impl AssistantPanel {
|
||||
}
|
||||
} else {
|
||||
let assistant_panel = assistant_panel.downgrade();
|
||||
cx.spawn_in(window, |workspace, mut cx| async move {
|
||||
cx.spawn_in(window, async move |workspace, cx| {
|
||||
let Some(task) =
|
||||
assistant_panel.update(&mut cx, |assistant, cx| assistant.authenticate(cx))?
|
||||
assistant_panel.update(cx, |assistant, cx| assistant.authenticate(cx))?
|
||||
else {
|
||||
let answer = cx
|
||||
.prompt(
|
||||
@@ -665,7 +665,7 @@ impl AssistantPanel {
|
||||
return Ok(());
|
||||
};
|
||||
task.await?;
|
||||
if assistant_panel.update(&mut cx, |panel, cx| panel.is_authenticated(cx))? {
|
||||
if assistant_panel.update(cx, |panel, cx| panel.is_authenticated(cx))? {
|
||||
cx.update(|window, cx| match inline_assist_target {
|
||||
InlineAssistTarget::Editor(active_editor, include_context) => {
|
||||
let assistant_panel = if include_context {
|
||||
@@ -698,7 +698,7 @@ impl AssistantPanel {
|
||||
}
|
||||
})?
|
||||
} else {
|
||||
workspace.update_in(&mut cx, |workspace, window, cx| {
|
||||
workspace.update_in(cx, |workspace, window, cx| {
|
||||
workspace.focus_panel::<AssistantPanel>(window, cx)
|
||||
})?;
|
||||
}
|
||||
@@ -791,10 +791,10 @@ impl AssistantPanel {
|
||||
.context_store
|
||||
.update(cx, |store, cx| store.create_remote_context(cx));
|
||||
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let context = task.await?;
|
||||
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
let workspace = this.workspace.clone();
|
||||
let project = this.project.clone();
|
||||
let lsp_adapter_delegate =
|
||||
@@ -847,9 +847,9 @@ impl AssistantPanel {
|
||||
|
||||
self.show_context(editor.clone(), window, cx);
|
||||
let workspace = self.workspace.clone();
|
||||
cx.spawn_in(window, move |_, mut cx| async move {
|
||||
cx.spawn_in(window, async move |_, cx| {
|
||||
workspace
|
||||
.update_in(&mut cx, |workspace, window, cx| {
|
||||
.update_in(cx, |workspace, window, cx| {
|
||||
workspace.focus_panel::<AssistantPanel>(window, cx);
|
||||
})
|
||||
.ok();
|
||||
@@ -1028,7 +1028,7 @@ impl AssistantPanel {
|
||||
|
||||
fn deploy_prompt_library(
|
||||
&mut self,
|
||||
_: &DeployPromptLibrary,
|
||||
_: &OpenPromptLibrary,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
@@ -1069,8 +1069,8 @@ impl AssistantPanel {
|
||||
.filter(|editor| editor.read(cx).context().read(cx).path() == Some(&path))
|
||||
});
|
||||
if let Some(existing_context) = existing_context {
|
||||
return cx.spawn_in(window, |this, mut cx| async move {
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
return cx.spawn_in(window, async move |this, cx| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.show_context(existing_context, window, cx)
|
||||
})
|
||||
});
|
||||
@@ -1085,9 +1085,9 @@ impl AssistantPanel {
|
||||
|
||||
let lsp_adapter_delegate = make_lsp_adapter_delegate(&project, cx).log_err().flatten();
|
||||
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let context = context.await?;
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
let editor = cx.new(|cx| {
|
||||
ContextEditor::for_context(
|
||||
context,
|
||||
@@ -1117,8 +1117,8 @@ impl AssistantPanel {
|
||||
.filter(|editor| *editor.read(cx).context().read(cx).id() == id)
|
||||
});
|
||||
if let Some(existing_context) = existing_context {
|
||||
return cx.spawn_in(window, |this, mut cx| async move {
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
return cx.spawn_in(window, async move |this, cx| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.show_context(existing_context.clone(), window, cx)
|
||||
})?;
|
||||
Ok(existing_context)
|
||||
@@ -1134,9 +1134,9 @@ impl AssistantPanel {
|
||||
.log_err()
|
||||
.flatten();
|
||||
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let context = context.await?;
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
let editor = cx.new(|cx| {
|
||||
ContextEditor::for_context(
|
||||
context,
|
||||
|
||||
@@ -232,7 +232,7 @@ impl InlineAssistant {
|
||||
) {
|
||||
let (snapshot, initial_selections) = editor.update(cx, |editor, cx| {
|
||||
(
|
||||
editor.buffer().read(cx).snapshot(cx),
|
||||
editor.snapshot(window, cx),
|
||||
editor.selections.all::<Point>(cx),
|
||||
)
|
||||
});
|
||||
@@ -246,7 +246,37 @@ impl InlineAssistant {
|
||||
if selection.end.column == 0 {
|
||||
selection.end.row -= 1;
|
||||
}
|
||||
selection.end.column = snapshot.line_len(MultiBufferRow(selection.end.row));
|
||||
selection.end.column = snapshot
|
||||
.buffer_snapshot
|
||||
.line_len(MultiBufferRow(selection.end.row));
|
||||
} else if let Some(fold) =
|
||||
snapshot.crease_for_buffer_row(MultiBufferRow(selection.end.row))
|
||||
{
|
||||
selection.start = fold.range().start;
|
||||
selection.end = fold.range().end;
|
||||
if MultiBufferRow(selection.end.row) < snapshot.buffer_snapshot.max_row() {
|
||||
let chars = snapshot
|
||||
.buffer_snapshot
|
||||
.chars_at(Point::new(selection.end.row + 1, 0));
|
||||
|
||||
for c in chars {
|
||||
if c == '\n' {
|
||||
break;
|
||||
}
|
||||
if c.is_whitespace() {
|
||||
continue;
|
||||
}
|
||||
if snapshot
|
||||
.language_at(selection.end)
|
||||
.is_some_and(|language| language.config().brackets.is_closing_brace(c))
|
||||
{
|
||||
selection.end.row += 1;
|
||||
selection.end.column = snapshot
|
||||
.buffer_snapshot
|
||||
.line_len(MultiBufferRow(selection.end.row));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(prev_selection) = selections.last_mut() {
|
||||
@@ -262,6 +292,7 @@ impl InlineAssistant {
|
||||
}
|
||||
selections.push(selection);
|
||||
}
|
||||
let snapshot = &snapshot.buffer_snapshot;
|
||||
let newest_selection = newest_selection.unwrap();
|
||||
|
||||
let mut codegen_ranges = Vec::new();
|
||||
@@ -1311,9 +1342,9 @@ impl EditorInlineAssists {
|
||||
assist_ids: Vec::new(),
|
||||
scroll_lock: None,
|
||||
highlight_updates: highlight_updates_tx,
|
||||
_update_highlights: cx.spawn(|cx| {
|
||||
_update_highlights: cx.spawn({
|
||||
let editor = editor.downgrade();
|
||||
async move {
|
||||
async move |cx| {
|
||||
while let Ok(()) = highlight_updates_rx.changed().await {
|
||||
let editor = editor.upgrade().context("editor was dropped")?;
|
||||
cx.update_global(|assistant: &mut InlineAssistant, cx| {
|
||||
@@ -1850,7 +1881,7 @@ impl PromptEditor {
|
||||
|
||||
fn count_tokens(&mut self, cx: &mut Context<Self>) {
|
||||
let assist_id = self.id;
|
||||
self.pending_token_count = cx.spawn(|this, mut cx| async move {
|
||||
self.pending_token_count = cx.spawn(async move |this, cx| {
|
||||
cx.background_executor().timer(Duration::from_secs(1)).await;
|
||||
let token_count = cx
|
||||
.update_global(|inline_assistant: &mut InlineAssistant, cx| {
|
||||
@@ -1862,7 +1893,7 @@ impl PromptEditor {
|
||||
})??
|
||||
.await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.token_counts = Some(token_count);
|
||||
cx.notify();
|
||||
})
|
||||
@@ -2882,7 +2913,7 @@ impl CodegenAlternative {
|
||||
let request = self.build_request(user_prompt, assistant_panel_context, cx)?;
|
||||
self.request = Some(request.clone());
|
||||
|
||||
cx.spawn(|_, cx| async move { model.stream_completion_text(request, &cx).await })
|
||||
cx.spawn(async move |_, cx| model.stream_completion_text(request, &cx).await)
|
||||
.boxed_local()
|
||||
};
|
||||
self.handle_stream(telemetry_id, provider_id.to_string(), api_key, stream, cx);
|
||||
@@ -2999,213 +3030,207 @@ impl CodegenAlternative {
|
||||
let completion = Arc::new(Mutex::new(String::new()));
|
||||
let completion_clone = completion.clone();
|
||||
|
||||
self.generation = cx.spawn(|codegen, mut cx| {
|
||||
async move {
|
||||
let stream = stream.await;
|
||||
let message_id = stream
|
||||
.as_ref()
|
||||
.ok()
|
||||
.and_then(|stream| stream.message_id.clone());
|
||||
let generate = async {
|
||||
let (mut diff_tx, mut diff_rx) = mpsc::channel(1);
|
||||
let executor = cx.background_executor().clone();
|
||||
let message_id = message_id.clone();
|
||||
let line_based_stream_diff: Task<anyhow::Result<()>> =
|
||||
cx.background_spawn(async move {
|
||||
let mut response_latency = None;
|
||||
let request_start = Instant::now();
|
||||
let diff = async {
|
||||
let chunks = StripInvalidSpans::new(stream?.stream);
|
||||
futures::pin_mut!(chunks);
|
||||
let mut diff = StreamingDiff::new(selected_text.to_string());
|
||||
let mut line_diff = LineDiff::default();
|
||||
self.generation = cx.spawn(async move |codegen, cx| {
|
||||
let stream = stream.await;
|
||||
let message_id = stream
|
||||
.as_ref()
|
||||
.ok()
|
||||
.and_then(|stream| stream.message_id.clone());
|
||||
let generate = async {
|
||||
let (mut diff_tx, mut diff_rx) = mpsc::channel(1);
|
||||
let executor = cx.background_executor().clone();
|
||||
let message_id = message_id.clone();
|
||||
let line_based_stream_diff: Task<anyhow::Result<()>> =
|
||||
cx.background_spawn(async move {
|
||||
let mut response_latency = None;
|
||||
let request_start = Instant::now();
|
||||
let diff = async {
|
||||
let chunks = StripInvalidSpans::new(stream?.stream);
|
||||
futures::pin_mut!(chunks);
|
||||
let mut diff = StreamingDiff::new(selected_text.to_string());
|
||||
let mut line_diff = LineDiff::default();
|
||||
|
||||
let mut new_text = String::new();
|
||||
let mut base_indent = None;
|
||||
let mut line_indent = None;
|
||||
let mut first_line = true;
|
||||
let mut new_text = String::new();
|
||||
let mut base_indent = None;
|
||||
let mut line_indent = None;
|
||||
let mut first_line = true;
|
||||
|
||||
while let Some(chunk) = chunks.next().await {
|
||||
if response_latency.is_none() {
|
||||
response_latency = Some(request_start.elapsed());
|
||||
}
|
||||
let chunk = chunk?;
|
||||
completion_clone.lock().push_str(&chunk);
|
||||
while let Some(chunk) = chunks.next().await {
|
||||
if response_latency.is_none() {
|
||||
response_latency = Some(request_start.elapsed());
|
||||
}
|
||||
let chunk = chunk?;
|
||||
completion_clone.lock().push_str(&chunk);
|
||||
|
||||
let mut lines = chunk.split('\n').peekable();
|
||||
while let Some(line) = lines.next() {
|
||||
new_text.push_str(line);
|
||||
if line_indent.is_none() {
|
||||
if let Some(non_whitespace_ch_ix) =
|
||||
new_text.find(|ch: char| !ch.is_whitespace())
|
||||
{
|
||||
line_indent = Some(non_whitespace_ch_ix);
|
||||
base_indent = base_indent.or(line_indent);
|
||||
let mut lines = chunk.split('\n').peekable();
|
||||
while let Some(line) = lines.next() {
|
||||
new_text.push_str(line);
|
||||
if line_indent.is_none() {
|
||||
if let Some(non_whitespace_ch_ix) =
|
||||
new_text.find(|ch: char| !ch.is_whitespace())
|
||||
{
|
||||
line_indent = Some(non_whitespace_ch_ix);
|
||||
base_indent = base_indent.or(line_indent);
|
||||
|
||||
let line_indent = line_indent.unwrap();
|
||||
let base_indent = base_indent.unwrap();
|
||||
let indent_delta =
|
||||
line_indent as i32 - base_indent as i32;
|
||||
let mut corrected_indent_len = cmp::max(
|
||||
0,
|
||||
suggested_line_indent.len as i32 + indent_delta,
|
||||
)
|
||||
as usize;
|
||||
if first_line {
|
||||
corrected_indent_len = corrected_indent_len
|
||||
.saturating_sub(
|
||||
selection_start.column as usize,
|
||||
);
|
||||
}
|
||||
|
||||
let indent_char = suggested_line_indent.char();
|
||||
let mut indent_buffer = [0; 4];
|
||||
let indent_str =
|
||||
indent_char.encode_utf8(&mut indent_buffer);
|
||||
new_text.replace_range(
|
||||
..line_indent,
|
||||
&indent_str.repeat(corrected_indent_len),
|
||||
);
|
||||
let line_indent = line_indent.unwrap();
|
||||
let base_indent = base_indent.unwrap();
|
||||
let indent_delta =
|
||||
line_indent as i32 - base_indent as i32;
|
||||
let mut corrected_indent_len = cmp::max(
|
||||
0,
|
||||
suggested_line_indent.len as i32 + indent_delta,
|
||||
)
|
||||
as usize;
|
||||
if first_line {
|
||||
corrected_indent_len = corrected_indent_len
|
||||
.saturating_sub(
|
||||
selection_start.column as usize,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if line_indent.is_some() {
|
||||
let char_ops = diff.push_new(&new_text);
|
||||
line_diff
|
||||
.push_char_operations(&char_ops, &selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
let indent_char = suggested_line_indent.char();
|
||||
let mut indent_buffer = [0; 4];
|
||||
let indent_str =
|
||||
indent_char.encode_utf8(&mut indent_buffer);
|
||||
new_text.replace_range(
|
||||
..line_indent,
|
||||
&indent_str.repeat(corrected_indent_len),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if line_indent.is_some() {
|
||||
let char_ops = diff.push_new(&new_text);
|
||||
line_diff.push_char_operations(&char_ops, &selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
new_text.clear();
|
||||
}
|
||||
|
||||
if lines.peek().is_some() {
|
||||
let char_ops = diff.push_new("\n");
|
||||
line_diff.push_char_operations(&char_ops, &selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
if line_indent.is_none() {
|
||||
// Don't write out the leading indentation in empty lines on the next line
|
||||
// This is the case where the above if statement didn't clear the buffer
|
||||
new_text.clear();
|
||||
}
|
||||
|
||||
if lines.peek().is_some() {
|
||||
let char_ops = diff.push_new("\n");
|
||||
line_diff
|
||||
.push_char_operations(&char_ops, &selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
if line_indent.is_none() {
|
||||
// Don't write out the leading indentation in empty lines on the next line
|
||||
// This is the case where the above if statement didn't clear the buffer
|
||||
new_text.clear();
|
||||
}
|
||||
line_indent = None;
|
||||
first_line = false;
|
||||
}
|
||||
line_indent = None;
|
||||
first_line = false;
|
||||
}
|
||||
}
|
||||
|
||||
let mut char_ops = diff.push_new(&new_text);
|
||||
char_ops.extend(diff.finish());
|
||||
line_diff.push_char_operations(&char_ops, &selected_text);
|
||||
line_diff.finish(&selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
|
||||
anyhow::Ok(())
|
||||
};
|
||||
|
||||
let result = diff.await;
|
||||
|
||||
let error_message =
|
||||
result.as_ref().err().map(|error| error.to_string());
|
||||
report_assistant_event(
|
||||
AssistantEvent {
|
||||
conversation_id: None,
|
||||
message_id,
|
||||
kind: AssistantKind::Inline,
|
||||
phase: AssistantPhase::Response,
|
||||
model: model_telemetry_id,
|
||||
model_provider: model_provider_id.to_string(),
|
||||
response_latency,
|
||||
error_message,
|
||||
language_name: language_name.map(|name| name.to_proto()),
|
||||
},
|
||||
telemetry,
|
||||
http_client,
|
||||
model_api_key,
|
||||
&executor,
|
||||
);
|
||||
|
||||
result?;
|
||||
Ok(())
|
||||
});
|
||||
|
||||
while let Some((char_ops, line_ops)) = diff_rx.next().await {
|
||||
codegen.update(&mut cx, |codegen, cx| {
|
||||
codegen.last_equal_ranges.clear();
|
||||
|
||||
let edits = char_ops
|
||||
.into_iter()
|
||||
.filter_map(|operation| match operation {
|
||||
CharOperation::Insert { text } => {
|
||||
let edit_start = snapshot.anchor_after(edit_start);
|
||||
Some((edit_start..edit_start, text))
|
||||
}
|
||||
CharOperation::Delete { bytes } => {
|
||||
let edit_end = edit_start + bytes;
|
||||
let edit_range = snapshot.anchor_after(edit_start)
|
||||
..snapshot.anchor_before(edit_end);
|
||||
edit_start = edit_end;
|
||||
Some((edit_range, String::new()))
|
||||
}
|
||||
CharOperation::Keep { bytes } => {
|
||||
let edit_end = edit_start + bytes;
|
||||
let edit_range = snapshot.anchor_after(edit_start)
|
||||
..snapshot.anchor_before(edit_end);
|
||||
edit_start = edit_end;
|
||||
codegen.last_equal_ranges.push(edit_range);
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if codegen.active {
|
||||
codegen.apply_edits(edits.iter().cloned(), cx);
|
||||
codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx);
|
||||
}
|
||||
codegen.edits.extend(edits);
|
||||
codegen.line_operations = line_ops;
|
||||
codegen.edit_position = Some(snapshot.anchor_after(edit_start));
|
||||
|
||||
cx.notify();
|
||||
})?;
|
||||
}
|
||||
let mut char_ops = diff.push_new(&new_text);
|
||||
char_ops.extend(diff.finish());
|
||||
line_diff.push_char_operations(&char_ops, &selected_text);
|
||||
line_diff.finish(&selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
|
||||
// Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer.
|
||||
// That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff.
|
||||
// It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`.
|
||||
let batch_diff_task =
|
||||
codegen.update(&mut cx, |codegen, cx| codegen.reapply_batch_diff(cx))?;
|
||||
let (line_based_stream_diff, ()) =
|
||||
join!(line_based_stream_diff, batch_diff_task);
|
||||
line_based_stream_diff?;
|
||||
anyhow::Ok(())
|
||||
};
|
||||
|
||||
anyhow::Ok(())
|
||||
};
|
||||
let result = diff.await;
|
||||
|
||||
let result = generate.await;
|
||||
let elapsed_time = start_time.elapsed().as_secs_f64();
|
||||
let error_message = result.as_ref().err().map(|error| error.to_string());
|
||||
report_assistant_event(
|
||||
AssistantEvent {
|
||||
conversation_id: None,
|
||||
message_id,
|
||||
kind: AssistantKind::Inline,
|
||||
phase: AssistantPhase::Response,
|
||||
model: model_telemetry_id,
|
||||
model_provider: model_provider_id.to_string(),
|
||||
response_latency,
|
||||
error_message,
|
||||
language_name: language_name.map(|name| name.to_proto()),
|
||||
},
|
||||
telemetry,
|
||||
http_client,
|
||||
model_api_key,
|
||||
&executor,
|
||||
);
|
||||
|
||||
codegen
|
||||
.update(&mut cx, |this, cx| {
|
||||
this.message_id = message_id;
|
||||
this.last_equal_ranges.clear();
|
||||
if let Err(error) = result {
|
||||
this.status = CodegenStatus::Error(error);
|
||||
} else {
|
||||
this.status = CodegenStatus::Done;
|
||||
result?;
|
||||
Ok(())
|
||||
});
|
||||
|
||||
while let Some((char_ops, line_ops)) = diff_rx.next().await {
|
||||
codegen.update(cx, |codegen, cx| {
|
||||
codegen.last_equal_ranges.clear();
|
||||
|
||||
let edits = char_ops
|
||||
.into_iter()
|
||||
.filter_map(|operation| match operation {
|
||||
CharOperation::Insert { text } => {
|
||||
let edit_start = snapshot.anchor_after(edit_start);
|
||||
Some((edit_start..edit_start, text))
|
||||
}
|
||||
CharOperation::Delete { bytes } => {
|
||||
let edit_end = edit_start + bytes;
|
||||
let edit_range = snapshot.anchor_after(edit_start)
|
||||
..snapshot.anchor_before(edit_end);
|
||||
edit_start = edit_end;
|
||||
Some((edit_range, String::new()))
|
||||
}
|
||||
CharOperation::Keep { bytes } => {
|
||||
let edit_end = edit_start + bytes;
|
||||
let edit_range = snapshot.anchor_after(edit_start)
|
||||
..snapshot.anchor_before(edit_end);
|
||||
edit_start = edit_end;
|
||||
codegen.last_equal_ranges.push(edit_range);
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if codegen.active {
|
||||
codegen.apply_edits(edits.iter().cloned(), cx);
|
||||
codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx);
|
||||
}
|
||||
this.elapsed_time = Some(elapsed_time);
|
||||
this.completion = Some(completion.lock().clone());
|
||||
cx.emit(CodegenEvent::Finished);
|
||||
codegen.edits.extend(edits);
|
||||
codegen.line_operations = line_ops;
|
||||
codegen.edit_position = Some(snapshot.anchor_after(edit_start));
|
||||
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
})?;
|
||||
}
|
||||
|
||||
// Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer.
|
||||
// That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff.
|
||||
// It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`.
|
||||
let batch_diff_task =
|
||||
codegen.update(cx, |codegen, cx| codegen.reapply_batch_diff(cx))?;
|
||||
let (line_based_stream_diff, ()) = join!(line_based_stream_diff, batch_diff_task);
|
||||
line_based_stream_diff?;
|
||||
|
||||
anyhow::Ok(())
|
||||
};
|
||||
|
||||
let result = generate.await;
|
||||
let elapsed_time = start_time.elapsed().as_secs_f64();
|
||||
|
||||
codegen
|
||||
.update(cx, |this, cx| {
|
||||
this.message_id = message_id;
|
||||
this.last_equal_ranges.clear();
|
||||
if let Err(error) = result {
|
||||
this.status = CodegenStatus::Error(error);
|
||||
} else {
|
||||
this.status = CodegenStatus::Done;
|
||||
}
|
||||
this.elapsed_time = Some(elapsed_time);
|
||||
this.completion = Some(completion.lock().clone());
|
||||
cx.emit(CodegenEvent::Finished);
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
@@ -3323,7 +3348,7 @@ impl CodegenAlternative {
|
||||
let new_snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
let new_range = self.range.to_point(&new_snapshot);
|
||||
|
||||
cx.spawn(|codegen, mut cx| async move {
|
||||
cx.spawn(async move |codegen, cx| {
|
||||
let (deleted_row_ranges, inserted_row_ranges) = cx
|
||||
.background_spawn(async move {
|
||||
let old_text = old_snapshot
|
||||
@@ -3373,7 +3398,7 @@ impl CodegenAlternative {
|
||||
.await;
|
||||
|
||||
codegen
|
||||
.update(&mut cx, |codegen, cx| {
|
||||
.update(cx, |codegen, cx| {
|
||||
codegen.diff.deleted_row_ranges = deleted_row_ranges;
|
||||
codegen.diff.inserted_row_ranges = inserted_row_ranges;
|
||||
cx.notify();
|
||||
@@ -3569,6 +3594,7 @@ impl CodeActionProvider for AssistantCodeActionProvider {
|
||||
title: "Fix with Assistant".into(),
|
||||
..Default::default()
|
||||
})),
|
||||
resolved: true,
|
||||
}]))
|
||||
} else {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
@@ -3586,10 +3612,10 @@ impl CodeActionProvider for AssistantCodeActionProvider {
|
||||
) -> Task<Result<ProjectTransaction>> {
|
||||
let editor = self.editor.clone();
|
||||
let workspace = self.workspace.clone();
|
||||
window.spawn(cx, |mut cx| async move {
|
||||
window.spawn(cx, async move |cx| {
|
||||
let editor = editor.upgrade().context("editor was released")?;
|
||||
let range = editor
|
||||
.update(&mut cx, |editor, cx| {
|
||||
.update(cx, |editor, cx| {
|
||||
editor.buffer().update(cx, |multibuffer, cx| {
|
||||
let buffer = buffer.read(cx);
|
||||
let multibuffer_snapshot = multibuffer.read(cx);
|
||||
@@ -3624,7 +3650,7 @@ impl CodeActionProvider for AssistantCodeActionProvider {
|
||||
})
|
||||
})?
|
||||
.context("invalid range")?;
|
||||
let assistant_panel = workspace.update(&mut cx, |workspace, cx| {
|
||||
let assistant_panel = workspace.update(cx, |workspace, cx| {
|
||||
workspace
|
||||
.panel::<AssistantPanel>(cx)
|
||||
.context("assistant panel was released")
|
||||
|
||||
@@ -825,7 +825,7 @@ impl PromptEditor {
|
||||
let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
|
||||
return;
|
||||
};
|
||||
self.pending_token_count = cx.spawn(|this, mut cx| async move {
|
||||
self.pending_token_count = cx.spawn(async move |this, cx| {
|
||||
cx.background_executor().timer(Duration::from_secs(1)).await;
|
||||
let request =
|
||||
cx.update_global(|inline_assistant: &mut TerminalInlineAssistant, cx| {
|
||||
@@ -833,7 +833,7 @@ impl PromptEditor {
|
||||
})??;
|
||||
|
||||
let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.token_count = Some(token_count);
|
||||
cx.notify();
|
||||
})
|
||||
@@ -1140,7 +1140,7 @@ impl Codegen {
|
||||
let telemetry = self.telemetry.clone();
|
||||
self.status = CodegenStatus::Pending;
|
||||
self.transaction = Some(TerminalTransaction::start(self.terminal.clone()));
|
||||
self.generation = cx.spawn(|this, mut cx| async move {
|
||||
self.generation = cx.spawn(async move |this, cx| {
|
||||
let model_telemetry_id = model.telemetry_id();
|
||||
let model_provider_id = model.provider_id();
|
||||
let response = model.stream_completion_text(prompt, &cx).await;
|
||||
@@ -1197,12 +1197,12 @@ impl Codegen {
|
||||
}
|
||||
});
|
||||
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.update(cx, |this, _| {
|
||||
this.message_id = message_id;
|
||||
})?;
|
||||
|
||||
while let Some(hunk) = hunks_rx.next().await {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
if let Some(transaction) = &mut this.transaction {
|
||||
transaction.push(hunk, cx);
|
||||
cx.notify();
|
||||
@@ -1216,7 +1216,7 @@ impl Codegen {
|
||||
|
||||
let result = generate.await;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
if let Err(error) = result {
|
||||
this.status = CodegenStatus::Error(error);
|
||||
} else {
|
||||
|
||||
@@ -38,10 +38,13 @@ file_icons.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
fuzzy.workspace = true
|
||||
git.workspace = true
|
||||
git_ui.workspace = true
|
||||
gpui.workspace = true
|
||||
heed.workspace = true
|
||||
html_to_markdown.workspace = true
|
||||
http_client.workspace = true
|
||||
indexmap.workspace = true
|
||||
itertools.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
@@ -58,13 +61,15 @@ project.workspace = true
|
||||
prompt_library.workspace = true
|
||||
prompt_store.workspace = true
|
||||
proto.workspace = true
|
||||
release_channel.workspace = true
|
||||
rope.workspace = true
|
||||
scripting_tool.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
smallvec.workspace = true
|
||||
smol.workspace = true
|
||||
streaming_diff.workspace = true
|
||||
telemetry.workspace = true
|
||||
telemetry_events.workspace = true
|
||||
terminal.workspace = true
|
||||
terminal_view.workspace = true
|
||||
|
||||
@@ -31,8 +31,12 @@ use gpui::{actions, App};
|
||||
use prompt_store::PromptBuilder;
|
||||
use settings::Settings as _;
|
||||
|
||||
pub use crate::active_thread::ActiveThread;
|
||||
use crate::assistant_configuration::AddContextServerModal;
|
||||
pub use crate::assistant_panel::{AssistantPanel, ConcreteAssistantPanelDelegate};
|
||||
pub use crate::inline_assistant::InlineAssistant;
|
||||
pub use crate::thread::{Message, RequestKind, Thread, ThreadEvent};
|
||||
pub use crate::thread_store::ThreadStore;
|
||||
|
||||
actions!(
|
||||
assistant2,
|
||||
@@ -43,6 +47,7 @@ actions!(
|
||||
RemoveAllContext,
|
||||
OpenHistory,
|
||||
OpenConfiguration,
|
||||
AddContextServer,
|
||||
RemoveSelectedThread,
|
||||
Chat,
|
||||
ChatMode,
|
||||
@@ -83,6 +88,7 @@ pub fn init(
|
||||
client.telemetry().clone(),
|
||||
cx,
|
||||
);
|
||||
cx.observe_new(AddContextServerModal::register).detach();
|
||||
|
||||
feature_gate_assistant2_actions(cx);
|
||||
}
|
||||
|
||||
@@ -1,19 +1,36 @@
|
||||
mod add_context_server_modal;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use assistant_tool::{ToolSource, ToolWorkingSet};
|
||||
use collections::HashMap;
|
||||
use gpui::{Action, AnyView, App, EventEmitter, FocusHandle, Focusable, Subscription};
|
||||
use context_server::manager::ContextServerManager;
|
||||
use gpui::{Action, AnyView, App, Entity, EventEmitter, FocusHandle, Focusable, Subscription};
|
||||
use language_model::{LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry};
|
||||
use ui::{prelude::*, Divider, DividerColor, ElevationIndex};
|
||||
use zed_actions::assistant::DeployPromptLibrary;
|
||||
use ui::{prelude::*, Disclosure, Divider, DividerColor, ElevationIndex, Indicator, Switch};
|
||||
use util::ResultExt as _;
|
||||
use zed_actions::ExtensionCategoryFilter;
|
||||
|
||||
pub(crate) use add_context_server_modal::AddContextServerModal;
|
||||
|
||||
use crate::AddContextServer;
|
||||
|
||||
pub struct AssistantConfiguration {
|
||||
focus_handle: FocusHandle,
|
||||
configuration_views_by_provider: HashMap<LanguageModelProviderId, AnyView>,
|
||||
context_server_manager: Entity<ContextServerManager>,
|
||||
expanded_context_server_tools: HashMap<Arc<str>, bool>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
_registry_subscription: Subscription,
|
||||
}
|
||||
|
||||
impl AssistantConfiguration {
|
||||
pub fn new(window: &mut Window, cx: &mut Context<Self>) -> Self {
|
||||
pub fn new(
|
||||
context_server_manager: Entity<ContextServerManager>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let focus_handle = cx.focus_handle();
|
||||
|
||||
let registry_subscription = cx.subscribe_in(
|
||||
@@ -36,6 +53,9 @@ impl AssistantConfiguration {
|
||||
let mut this = Self {
|
||||
focus_handle,
|
||||
configuration_views_by_provider: HashMap::default(),
|
||||
context_server_manager,
|
||||
expanded_context_server_tools: HashMap::default(),
|
||||
tools,
|
||||
_registry_subscription: registry_subscription,
|
||||
};
|
||||
this.build_provider_configuration_views(window, cx);
|
||||
@@ -143,6 +163,186 @@ impl AssistantConfiguration {
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_context_servers_section(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let context_servers = self.context_server_manager.read(cx).all_servers().clone();
|
||||
let tools_by_source = self.tools.tools_by_source(cx);
|
||||
let empty = Vec::new();
|
||||
|
||||
const SUBHEADING: &str = "Connect to context servers via the Model Context Protocol either via Zed extensions or directly.";
|
||||
|
||||
v_flex()
|
||||
.p(DynamicSpacing::Base16.rems(cx))
|
||||
.gap_2()
|
||||
.flex_1()
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
.child(Headline::new("Context Servers (MCP)").size(HeadlineSize::Small))
|
||||
.child(Label::new(SUBHEADING).color(Color::Muted)),
|
||||
)
|
||||
.children(context_servers.into_iter().map(|context_server| {
|
||||
let is_running = context_server.client().is_some();
|
||||
let are_tools_expanded = self
|
||||
.expanded_context_server_tools
|
||||
.get(&context_server.id())
|
||||
.copied()
|
||||
.unwrap_or_default();
|
||||
|
||||
let tools = tools_by_source
|
||||
.get(&ToolSource::ContextServer {
|
||||
id: context_server.id().into(),
|
||||
})
|
||||
.unwrap_or_else(|| &empty);
|
||||
let tool_count = tools.len();
|
||||
|
||||
v_flex()
|
||||
.id(SharedString::from(context_server.id()))
|
||||
.border_1()
|
||||
.rounded_sm()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_between()
|
||||
.px_2()
|
||||
.py_1()
|
||||
.when(are_tools_expanded, |element| {
|
||||
element
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
})
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(
|
||||
Disclosure::new("tool-list-disclosure", are_tools_expanded)
|
||||
.on_click(cx.listener({
|
||||
let context_server_id = context_server.id();
|
||||
move |this, _event, _window, _cx| {
|
||||
let is_open = this
|
||||
.expanded_context_server_tools
|
||||
.entry(context_server_id.clone())
|
||||
.or_insert(false);
|
||||
|
||||
*is_open = !*is_open;
|
||||
}
|
||||
})),
|
||||
)
|
||||
.child(Indicator::dot().color(if is_running {
|
||||
Color::Success
|
||||
} else {
|
||||
Color::Error
|
||||
}))
|
||||
.child(Label::new(context_server.id()))
|
||||
.child(
|
||||
Label::new(format!("{tool_count} tools"))
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
.child(h_flex().child(
|
||||
Switch::new("context-server-switch", is_running.into()).on_click({
|
||||
let context_server_manager =
|
||||
self.context_server_manager.clone();
|
||||
let context_server = context_server.clone();
|
||||
move |state, _window, cx| match state {
|
||||
ToggleState::Unselected | ToggleState::Indeterminate => {
|
||||
context_server_manager.update(cx, |this, cx| {
|
||||
this.stop_server(context_server.clone(), cx)
|
||||
.log_err();
|
||||
});
|
||||
}
|
||||
ToggleState::Selected => {
|
||||
cx.spawn({
|
||||
let context_server_manager =
|
||||
context_server_manager.clone();
|
||||
let context_server = context_server.clone();
|
||||
async move |cx| {
|
||||
if let Some(start_server_task) =
|
||||
context_server_manager
|
||||
.update(cx, |this, cx| {
|
||||
this.start_server(
|
||||
context_server,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.log_err()
|
||||
{
|
||||
start_server_task.await.log_err();
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
}),
|
||||
)),
|
||||
)
|
||||
.map(|parent| {
|
||||
if !are_tools_expanded {
|
||||
return parent;
|
||||
}
|
||||
|
||||
parent.child(v_flex().children(tools.into_iter().enumerate().map(
|
||||
|(ix, tool)| {
|
||||
h_flex()
|
||||
.px_2()
|
||||
.py_1()
|
||||
.when(ix < tool_count - 1, |element| {
|
||||
element
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
})
|
||||
.child(Label::new(tool.name()))
|
||||
},
|
||||
)))
|
||||
})
|
||||
}))
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_between()
|
||||
.gap_2()
|
||||
.child(
|
||||
h_flex().w_full().child(
|
||||
Button::new("add-context-server", "Add Context Server")
|
||||
.style(ButtonStyle::Filled)
|
||||
.layer(ElevationIndex::ModalSurface)
|
||||
.full_width()
|
||||
.icon(IconName::Plus)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_position(IconPosition::Start)
|
||||
.on_click(|_event, window, cx| {
|
||||
window.dispatch_action(AddContextServer.boxed_clone(), cx)
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_flex().w_full().child(
|
||||
Button::new(
|
||||
"install-context-server-extensions",
|
||||
"Install Context Server Extensions",
|
||||
)
|
||||
.style(ButtonStyle::Filled)
|
||||
.layer(ElevationIndex::ModalSurface)
|
||||
.full_width()
|
||||
.icon(IconName::DatabaseZap)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_position(IconPosition::Start)
|
||||
.on_click(|_event, window, cx| {
|
||||
window.dispatch_action(
|
||||
zed_actions::Extensions {
|
||||
category_filter: Some(
|
||||
ExtensionCategoryFilter::ContextServers,
|
||||
),
|
||||
}
|
||||
.boxed_clone(),
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for AssistantConfiguration {
|
||||
@@ -155,32 +355,7 @@ impl Render for AssistantConfiguration {
|
||||
.bg(cx.theme().colors().panel_background)
|
||||
.size_full()
|
||||
.overflow_y_scroll()
|
||||
.child(
|
||||
v_flex()
|
||||
.p(DynamicSpacing::Base16.rems(cx))
|
||||
.gap_2()
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
.child(Headline::new("Prompt Library").size(HeadlineSize::Small))
|
||||
.child(
|
||||
Label::new("Create reusable prompts and tag which ones you want sent in every LLM interaction.")
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
Button::new("open-prompt-library", "Open Prompt Library")
|
||||
.style(ButtonStyle::Filled)
|
||||
.layer(ElevationIndex::ModalSurface)
|
||||
.full_width()
|
||||
.icon(IconName::Book)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_position(IconPosition::Start)
|
||||
.on_click(|_event, window, cx| {
|
||||
window.dispatch_action(DeployPromptLibrary.boxed_clone(), cx)
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(self.render_context_servers_section(cx))
|
||||
.child(Divider::horizontal().color(DividerColor::Border))
|
||||
.child(
|
||||
v_flex()
|
||||
|
||||
@@ -0,0 +1,164 @@
|
||||
use context_server::{ContextServerSettings, ServerCommand, ServerConfig};
|
||||
use editor::Editor;
|
||||
use gpui::{prelude::*, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, WeakEntity};
|
||||
use serde_json::json;
|
||||
use settings::update_settings_file;
|
||||
use ui::{prelude::*, Modal, ModalFooter, ModalHeader, Section, Tooltip};
|
||||
use workspace::{ModalView, Workspace};
|
||||
|
||||
use crate::AddContextServer;
|
||||
|
||||
pub struct AddContextServerModal {
|
||||
workspace: WeakEntity<Workspace>,
|
||||
name_editor: Entity<Editor>,
|
||||
command_editor: Entity<Editor>,
|
||||
}
|
||||
|
||||
impl AddContextServerModal {
|
||||
pub fn register(
|
||||
workspace: &mut Workspace,
|
||||
_window: Option<&mut Window>,
|
||||
_cx: &mut Context<Workspace>,
|
||||
) {
|
||||
workspace.register_action(|workspace, _: &AddContextServer, window, cx| {
|
||||
let workspace_handle = cx.entity().downgrade();
|
||||
workspace.toggle_modal(window, cx, |window, cx| {
|
||||
Self::new(workspace_handle, window, cx)
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
workspace: WeakEntity<Workspace>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let name_editor = cx.new(|cx| Editor::single_line(window, cx));
|
||||
let command_editor = cx.new(|cx| Editor::single_line(window, cx));
|
||||
|
||||
name_editor.update(cx, |editor, cx| {
|
||||
editor.set_placeholder_text("Context server name", cx);
|
||||
});
|
||||
|
||||
command_editor.update(cx, |editor, cx| {
|
||||
editor.set_placeholder_text("Command to run the context server", cx);
|
||||
});
|
||||
|
||||
Self {
|
||||
name_editor,
|
||||
command_editor,
|
||||
workspace,
|
||||
}
|
||||
}
|
||||
|
||||
fn confirm(&mut self, cx: &mut Context<Self>) {
|
||||
let name = self.name_editor.read(cx).text(cx).trim().to_string();
|
||||
let command = self.command_editor.read(cx).text(cx).trim().to_string();
|
||||
|
||||
if name.is_empty() || command.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut command_parts = command.split(' ').map(|part| part.trim().to_string());
|
||||
let Some(path) = command_parts.next() else {
|
||||
return;
|
||||
};
|
||||
let args = command_parts.collect::<Vec<_>>();
|
||||
|
||||
if let Some(workspace) = self.workspace.upgrade() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
let fs = workspace.app_state().fs.clone();
|
||||
update_settings_file::<ContextServerSettings>(fs.clone(), cx, |settings, _| {
|
||||
settings.context_servers.insert(
|
||||
name.into(),
|
||||
ServerConfig {
|
||||
command: Some(ServerCommand {
|
||||
path,
|
||||
args,
|
||||
env: None,
|
||||
}),
|
||||
settings: Some(json!({})),
|
||||
},
|
||||
);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
cx.emit(DismissEvent);
|
||||
}
|
||||
|
||||
fn cancel(&mut self, cx: &mut Context<Self>) {
|
||||
cx.emit(DismissEvent);
|
||||
}
|
||||
}
|
||||
|
||||
impl ModalView for AddContextServerModal {}
|
||||
|
||||
impl Focusable for AddContextServerModal {
|
||||
fn focus_handle(&self, cx: &App) -> FocusHandle {
|
||||
self.name_editor.focus_handle(cx).clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<DismissEvent> for AddContextServerModal {}
|
||||
|
||||
impl Render for AddContextServerModal {
|
||||
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let is_name_empty = self.name_editor.read(cx).text(cx).trim().is_empty();
|
||||
let is_command_empty = self.command_editor.read(cx).text(cx).trim().is_empty();
|
||||
|
||||
div()
|
||||
.elevation_3(cx)
|
||||
.w(rems(34.))
|
||||
.key_context("AddContextServerModal")
|
||||
.on_action(cx.listener(|this, _: &menu::Cancel, _window, cx| this.cancel(cx)))
|
||||
.on_action(cx.listener(|this, _: &menu::Confirm, _window, cx| this.confirm(cx)))
|
||||
.capture_any_mouse_down(cx.listener(|this, _, window, cx| {
|
||||
this.focus_handle(cx).focus(window);
|
||||
}))
|
||||
.on_mouse_down_out(cx.listener(|_this, _, _, cx| cx.emit(DismissEvent)))
|
||||
.child(
|
||||
Modal::new("add-context-server", None)
|
||||
.header(ModalHeader::new().headline("Add Context Server"))
|
||||
.section(
|
||||
Section::new()
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_1()
|
||||
.child(Label::new("Name"))
|
||||
.child(self.name_editor.clone()),
|
||||
)
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_1()
|
||||
.child(Label::new("Command"))
|
||||
.child(self.command_editor.clone()),
|
||||
),
|
||||
)
|
||||
.footer(
|
||||
ModalFooter::new()
|
||||
.start_slot(
|
||||
Button::new("cancel", "Cancel").on_click(
|
||||
cx.listener(|this, _event, _window, cx| this.cancel(cx)),
|
||||
),
|
||||
)
|
||||
.end_slot(
|
||||
Button::new("add-server", "Add Server")
|
||||
.disabled(is_name_empty || is_command_empty)
|
||||
.map(|button| {
|
||||
if is_name_empty {
|
||||
button.tooltip(Tooltip::text("Name is required"))
|
||||
} else if is_command_empty {
|
||||
button.tooltip(Tooltip::text("Command is required"))
|
||||
} else {
|
||||
button
|
||||
}
|
||||
})
|
||||
.on_click(
|
||||
cx.listener(|this, _event, _window, cx| this.confirm(cx)),
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -14,9 +14,9 @@ use client::zed_urls;
|
||||
use editor::{Editor, MultiBuffer};
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
prelude::*, Action, AnyElement, App, AsyncWindowContext, Corner, Entity, EventEmitter,
|
||||
FocusHandle, Focusable, FontWeight, KeyContext, Pixels, Subscription, Task, UpdateGlobal,
|
||||
WeakEntity,
|
||||
action_with_deprecated_aliases, prelude::*, Action, AnyElement, App, AsyncWindowContext,
|
||||
Corner, Entity, EventEmitter, FocusHandle, Focusable, FontWeight, KeyContext, Pixels,
|
||||
Subscription, Task, UpdateGlobal, WeakEntity,
|
||||
};
|
||||
use language::LanguageRegistry;
|
||||
use language_model::{LanguageModelProviderTosView, LanguageModelRegistry};
|
||||
@@ -29,7 +29,7 @@ use ui::{prelude::*, ContextMenu, KeyBinding, PopoverMenu, PopoverMenuHandle, Ta
|
||||
use util::ResultExt as _;
|
||||
use workspace::dock::{DockPosition, Panel, PanelEvent};
|
||||
use workspace::Workspace;
|
||||
use zed_actions::assistant::{DeployPromptLibrary, ToggleFocus};
|
||||
use zed_actions::assistant::ToggleFocus;
|
||||
|
||||
use crate::active_thread::ActiveThread;
|
||||
use crate::assistant_configuration::{AssistantConfiguration, AssistantConfigurationEvent};
|
||||
@@ -43,6 +43,12 @@ use crate::{
|
||||
OpenHistory,
|
||||
};
|
||||
|
||||
action_with_deprecated_aliases!(
|
||||
assistant,
|
||||
OpenPromptLibrary,
|
||||
["assistant::DeployPromptLibrary"]
|
||||
);
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
cx.observe_new(
|
||||
|workspace: &mut Workspace, _window, _cx: &mut Context<Workspace>| {
|
||||
@@ -65,6 +71,14 @@ pub fn init(cx: &mut App) {
|
||||
panel.update(cx, |panel, cx| panel.new_prompt_editor(window, cx));
|
||||
}
|
||||
})
|
||||
.register_action(|workspace, _: &OpenPromptLibrary, window, cx| {
|
||||
if let Some(panel) = workspace.panel::<AssistantPanel>(cx) {
|
||||
workspace.focus_panel::<AssistantPanel>(window, cx);
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.deploy_prompt_library(&OpenPromptLibrary, window, cx)
|
||||
});
|
||||
}
|
||||
})
|
||||
.register_action(|workspace, _: &OpenConfiguration, window, cx| {
|
||||
if let Some(panel) = workspace.panel::<AssistantPanel>(cx) {
|
||||
workspace.focus_panel::<AssistantPanel>(window, cx);
|
||||
@@ -110,19 +124,16 @@ impl AssistantPanel {
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
cx: AsyncWindowContext,
|
||||
) -> Task<Result<Entity<Self>>> {
|
||||
cx.spawn(|mut cx| async move {
|
||||
cx.spawn(async move |cx| {
|
||||
let tools = Arc::new(ToolWorkingSet::default());
|
||||
log::info!("[assistant2-debug] initializing ThreadStore");
|
||||
let thread_store = workspace.update(&mut cx, |workspace, cx| {
|
||||
let thread_store = workspace.update(cx, |workspace, cx| {
|
||||
let project = workspace.project().clone();
|
||||
ThreadStore::new(project, tools.clone(), prompt_builder.clone(), cx)
|
||||
})??;
|
||||
log::info!("[assistant2-debug] finished initializing ThreadStore");
|
||||
|
||||
let slash_commands = Arc::new(SlashCommandWorkingSet::default());
|
||||
log::info!("[assistant2-debug] initializing ContextStore");
|
||||
let context_store = workspace
|
||||
.update(&mut cx, |workspace, cx| {
|
||||
.update(cx, |workspace, cx| {
|
||||
let project = workspace.project().clone();
|
||||
assistant_context_editor::ContextStore::new(
|
||||
project,
|
||||
@@ -132,9 +143,8 @@ impl AssistantPanel {
|
||||
)
|
||||
})?
|
||||
.await?;
|
||||
log::info!("[assistant2-debug] finished initializing ContextStore");
|
||||
|
||||
workspace.update_in(&mut cx, |workspace, window, cx| {
|
||||
workspace.update_in(cx, |workspace, window, cx| {
|
||||
cx.new(|cx| Self::new(workspace, thread_store, context_store, window, cx))
|
||||
})
|
||||
})
|
||||
@@ -147,7 +157,6 @@ impl AssistantPanel {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
log::info!("[assistant2-debug] AssistantPanel::new");
|
||||
let thread = thread_store.update(cx, |this, cx| this.create_thread(cx));
|
||||
let fs = workspace.app_state().fs.clone();
|
||||
let project = workspace.project().clone();
|
||||
@@ -155,10 +164,14 @@ impl AssistantPanel {
|
||||
let workspace = workspace.weak_handle();
|
||||
let weak_self = cx.entity().downgrade();
|
||||
|
||||
let message_editor_context_store =
|
||||
cx.new(|_cx| crate::context_store::ContextStore::new(workspace.clone()));
|
||||
|
||||
let message_editor = cx.new(|cx| {
|
||||
MessageEditor::new(
|
||||
fs.clone(),
|
||||
workspace.clone(),
|
||||
message_editor_context_store.clone(),
|
||||
thread_store.downgrade(),
|
||||
thread.clone(),
|
||||
window,
|
||||
@@ -174,6 +187,8 @@ impl AssistantPanel {
|
||||
thread.clone(),
|
||||
thread_store.clone(),
|
||||
language_registry.clone(),
|
||||
message_editor_context_store.clone(),
|
||||
workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -242,11 +257,17 @@ impl AssistantPanel {
|
||||
.update(cx, |this, cx| this.create_thread(cx));
|
||||
|
||||
self.active_view = ActiveView::Thread;
|
||||
|
||||
let message_editor_context_store =
|
||||
cx.new(|_cx| crate::context_store::ContextStore::new(self.workspace.clone()));
|
||||
|
||||
self.thread = cx.new(|cx| {
|
||||
ActiveThread::new(
|
||||
thread.clone(),
|
||||
self.thread_store.clone(),
|
||||
self.language_registry.clone(),
|
||||
message_editor_context_store.clone(),
|
||||
self.workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -255,6 +276,7 @@ impl AssistantPanel {
|
||||
MessageEditor::new(
|
||||
self.fs.clone(),
|
||||
self.workspace.clone(),
|
||||
message_editor_context_store,
|
||||
self.thread_store.downgrade(),
|
||||
thread,
|
||||
window,
|
||||
@@ -295,7 +317,7 @@ impl AssistantPanel {
|
||||
|
||||
fn deploy_prompt_library(
|
||||
&mut self,
|
||||
_: &DeployPromptLibrary,
|
||||
_: &OpenPromptLibrary,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
@@ -338,9 +360,9 @@ impl AssistantPanel {
|
||||
|
||||
let lsp_adapter_delegate = make_lsp_adapter_delegate(&project, cx).log_err().flatten();
|
||||
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let context = context.await?;
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
let editor = cx.new(|cx| {
|
||||
ContextEditor::for_context(
|
||||
context,
|
||||
@@ -371,15 +393,19 @@ impl AssistantPanel {
|
||||
.thread_store
|
||||
.update(cx, |this, cx| this.open_thread(thread_id, cx));
|
||||
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let thread = open_thread_task.await?;
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.active_view = ActiveView::Thread;
|
||||
let message_editor_context_store =
|
||||
cx.new(|_cx| crate::context_store::ContextStore::new(this.workspace.clone()));
|
||||
this.thread = cx.new(|cx| {
|
||||
ActiveThread::new(
|
||||
thread.clone(),
|
||||
this.thread_store.clone(),
|
||||
this.language_registry.clone(),
|
||||
message_editor_context_store.clone(),
|
||||
this.workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -388,6 +414,7 @@ impl AssistantPanel {
|
||||
MessageEditor::new(
|
||||
this.fs.clone(),
|
||||
this.workspace.clone(),
|
||||
message_editor_context_store,
|
||||
this.thread_store.downgrade(),
|
||||
thread,
|
||||
window,
|
||||
@@ -400,8 +427,13 @@ impl AssistantPanel {
|
||||
}
|
||||
|
||||
pub(crate) fn open_configuration(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
let context_server_manager = self.thread_store.read(cx).context_server_manager();
|
||||
let tools = self.thread_store.read(cx).tools();
|
||||
|
||||
self.active_view = ActiveView::Configuration;
|
||||
self.configuration = Some(cx.new(|cx| AssistantConfiguration::new(window, cx)));
|
||||
self.configuration = Some(
|
||||
cx.new(|cx| AssistantConfiguration::new(context_server_manager, tools, window, cx)),
|
||||
);
|
||||
|
||||
if let Some(configuration) = self.configuration.as_ref() {
|
||||
self.configuration_subscription = Some(cx.subscribe_in(
|
||||
@@ -435,12 +467,12 @@ impl AssistantPanel {
|
||||
.languages
|
||||
.language_for_name("Markdown");
|
||||
let thread = self.active_thread(cx);
|
||||
cx.spawn_in(window, |_this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |_this, cx| {
|
||||
let markdown_language = markdown_language_task.await?;
|
||||
|
||||
workspace.update_in(&mut cx, |workspace, window, cx| {
|
||||
workspace.update_in(cx, |workspace, window, cx| {
|
||||
let thread = thread.read(cx);
|
||||
let markdown = thread.to_markdown()?;
|
||||
let markdown = thread.to_markdown(cx)?;
|
||||
let thread_summary = thread
|
||||
.summary()
|
||||
.map(|summary| summary.to_string())
|
||||
@@ -907,8 +939,8 @@ impl AssistantPanel {
|
||||
ThreadError::MaxMonthlySpendReached => {
|
||||
self.render_max_monthly_spend_reached_error(cx)
|
||||
}
|
||||
ThreadError::Message(error_message) => {
|
||||
self.render_error_message(&error_message, cx)
|
||||
ThreadError::Message { header, message } => {
|
||||
self.render_error_message(header, message, cx)
|
||||
}
|
||||
})
|
||||
.into_any(),
|
||||
@@ -1011,7 +1043,8 @@ impl AssistantPanel {
|
||||
|
||||
fn render_error_message(
|
||||
&self,
|
||||
error_message: &SharedString,
|
||||
header: SharedString,
|
||||
message: SharedString,
|
||||
cx: &mut Context<Self>,
|
||||
) -> AnyElement {
|
||||
v_flex()
|
||||
@@ -1021,17 +1054,14 @@ impl AssistantPanel {
|
||||
.gap_1p5()
|
||||
.items_center()
|
||||
.child(Icon::new(IconName::XCircle).color(Color::Error))
|
||||
.child(
|
||||
Label::new("Error interacting with language model")
|
||||
.weight(FontWeight::MEDIUM),
|
||||
),
|
||||
.child(Label::new(header).weight(FontWeight::MEDIUM)),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.id("error-message")
|
||||
.max_h_32()
|
||||
.overflow_y_scroll()
|
||||
.child(Label::new(error_message.clone())),
|
||||
.child(Label::new(message)),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
|
||||
@@ -367,7 +367,7 @@ impl CodegenAlternative {
|
||||
let request = self.build_request(user_prompt, cx)?;
|
||||
self.request = Some(request.clone());
|
||||
|
||||
cx.spawn(|_, cx| async move { model.stream_completion_text(request, &cx).await })
|
||||
cx.spawn(async move |_, cx| model.stream_completion_text(request, &cx).await)
|
||||
.boxed_local()
|
||||
};
|
||||
self.handle_stream(telemetry_id, provider_id.to_string(), api_key, stream, cx);
|
||||
@@ -480,213 +480,207 @@ impl CodegenAlternative {
|
||||
let completion = Arc::new(Mutex::new(String::new()));
|
||||
let completion_clone = completion.clone();
|
||||
|
||||
self.generation = cx.spawn(|codegen, mut cx| {
|
||||
async move {
|
||||
let stream = stream.await;
|
||||
let message_id = stream
|
||||
.as_ref()
|
||||
.ok()
|
||||
.and_then(|stream| stream.message_id.clone());
|
||||
let generate = async {
|
||||
let (mut diff_tx, mut diff_rx) = mpsc::channel(1);
|
||||
let executor = cx.background_executor().clone();
|
||||
let message_id = message_id.clone();
|
||||
let line_based_stream_diff: Task<anyhow::Result<()>> =
|
||||
cx.background_spawn(async move {
|
||||
let mut response_latency = None;
|
||||
let request_start = Instant::now();
|
||||
let diff = async {
|
||||
let chunks = StripInvalidSpans::new(stream?.stream);
|
||||
futures::pin_mut!(chunks);
|
||||
let mut diff = StreamingDiff::new(selected_text.to_string());
|
||||
let mut line_diff = LineDiff::default();
|
||||
self.generation = cx.spawn(async move |codegen, cx| {
|
||||
let stream = stream.await;
|
||||
let message_id = stream
|
||||
.as_ref()
|
||||
.ok()
|
||||
.and_then(|stream| stream.message_id.clone());
|
||||
let generate = async {
|
||||
let (mut diff_tx, mut diff_rx) = mpsc::channel(1);
|
||||
let executor = cx.background_executor().clone();
|
||||
let message_id = message_id.clone();
|
||||
let line_based_stream_diff: Task<anyhow::Result<()>> =
|
||||
cx.background_spawn(async move {
|
||||
let mut response_latency = None;
|
||||
let request_start = Instant::now();
|
||||
let diff = async {
|
||||
let chunks = StripInvalidSpans::new(stream?.stream);
|
||||
futures::pin_mut!(chunks);
|
||||
let mut diff = StreamingDiff::new(selected_text.to_string());
|
||||
let mut line_diff = LineDiff::default();
|
||||
|
||||
let mut new_text = String::new();
|
||||
let mut base_indent = None;
|
||||
let mut line_indent = None;
|
||||
let mut first_line = true;
|
||||
let mut new_text = String::new();
|
||||
let mut base_indent = None;
|
||||
let mut line_indent = None;
|
||||
let mut first_line = true;
|
||||
|
||||
while let Some(chunk) = chunks.next().await {
|
||||
if response_latency.is_none() {
|
||||
response_latency = Some(request_start.elapsed());
|
||||
}
|
||||
let chunk = chunk?;
|
||||
completion_clone.lock().push_str(&chunk);
|
||||
while let Some(chunk) = chunks.next().await {
|
||||
if response_latency.is_none() {
|
||||
response_latency = Some(request_start.elapsed());
|
||||
}
|
||||
let chunk = chunk?;
|
||||
completion_clone.lock().push_str(&chunk);
|
||||
|
||||
let mut lines = chunk.split('\n').peekable();
|
||||
while let Some(line) = lines.next() {
|
||||
new_text.push_str(line);
|
||||
if line_indent.is_none() {
|
||||
if let Some(non_whitespace_ch_ix) =
|
||||
new_text.find(|ch: char| !ch.is_whitespace())
|
||||
{
|
||||
line_indent = Some(non_whitespace_ch_ix);
|
||||
base_indent = base_indent.or(line_indent);
|
||||
let mut lines = chunk.split('\n').peekable();
|
||||
while let Some(line) = lines.next() {
|
||||
new_text.push_str(line);
|
||||
if line_indent.is_none() {
|
||||
if let Some(non_whitespace_ch_ix) =
|
||||
new_text.find(|ch: char| !ch.is_whitespace())
|
||||
{
|
||||
line_indent = Some(non_whitespace_ch_ix);
|
||||
base_indent = base_indent.or(line_indent);
|
||||
|
||||
let line_indent = line_indent.unwrap();
|
||||
let base_indent = base_indent.unwrap();
|
||||
let indent_delta =
|
||||
line_indent as i32 - base_indent as i32;
|
||||
let mut corrected_indent_len = cmp::max(
|
||||
0,
|
||||
suggested_line_indent.len as i32 + indent_delta,
|
||||
)
|
||||
as usize;
|
||||
if first_line {
|
||||
corrected_indent_len = corrected_indent_len
|
||||
.saturating_sub(
|
||||
selection_start.column as usize,
|
||||
);
|
||||
}
|
||||
|
||||
let indent_char = suggested_line_indent.char();
|
||||
let mut indent_buffer = [0; 4];
|
||||
let indent_str =
|
||||
indent_char.encode_utf8(&mut indent_buffer);
|
||||
new_text.replace_range(
|
||||
..line_indent,
|
||||
&indent_str.repeat(corrected_indent_len),
|
||||
);
|
||||
let line_indent = line_indent.unwrap();
|
||||
let base_indent = base_indent.unwrap();
|
||||
let indent_delta =
|
||||
line_indent as i32 - base_indent as i32;
|
||||
let mut corrected_indent_len = cmp::max(
|
||||
0,
|
||||
suggested_line_indent.len as i32 + indent_delta,
|
||||
)
|
||||
as usize;
|
||||
if first_line {
|
||||
corrected_indent_len = corrected_indent_len
|
||||
.saturating_sub(
|
||||
selection_start.column as usize,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if line_indent.is_some() {
|
||||
let char_ops = diff.push_new(&new_text);
|
||||
line_diff
|
||||
.push_char_operations(&char_ops, &selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
let indent_char = suggested_line_indent.char();
|
||||
let mut indent_buffer = [0; 4];
|
||||
let indent_str =
|
||||
indent_char.encode_utf8(&mut indent_buffer);
|
||||
new_text.replace_range(
|
||||
..line_indent,
|
||||
&indent_str.repeat(corrected_indent_len),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if line_indent.is_some() {
|
||||
let char_ops = diff.push_new(&new_text);
|
||||
line_diff.push_char_operations(&char_ops, &selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
new_text.clear();
|
||||
}
|
||||
|
||||
if lines.peek().is_some() {
|
||||
let char_ops = diff.push_new("\n");
|
||||
line_diff.push_char_operations(&char_ops, &selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
if line_indent.is_none() {
|
||||
// Don't write out the leading indentation in empty lines on the next line
|
||||
// This is the case where the above if statement didn't clear the buffer
|
||||
new_text.clear();
|
||||
}
|
||||
|
||||
if lines.peek().is_some() {
|
||||
let char_ops = diff.push_new("\n");
|
||||
line_diff
|
||||
.push_char_operations(&char_ops, &selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
if line_indent.is_none() {
|
||||
// Don't write out the leading indentation in empty lines on the next line
|
||||
// This is the case where the above if statement didn't clear the buffer
|
||||
new_text.clear();
|
||||
}
|
||||
line_indent = None;
|
||||
first_line = false;
|
||||
}
|
||||
line_indent = None;
|
||||
first_line = false;
|
||||
}
|
||||
}
|
||||
|
||||
let mut char_ops = diff.push_new(&new_text);
|
||||
char_ops.extend(diff.finish());
|
||||
line_diff.push_char_operations(&char_ops, &selected_text);
|
||||
line_diff.finish(&selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
|
||||
anyhow::Ok(())
|
||||
};
|
||||
|
||||
let result = diff.await;
|
||||
|
||||
let error_message =
|
||||
result.as_ref().err().map(|error| error.to_string());
|
||||
report_assistant_event(
|
||||
AssistantEvent {
|
||||
conversation_id: None,
|
||||
message_id,
|
||||
kind: AssistantKind::Inline,
|
||||
phase: AssistantPhase::Response,
|
||||
model: model_telemetry_id,
|
||||
model_provider: model_provider_id.to_string(),
|
||||
response_latency,
|
||||
error_message,
|
||||
language_name: language_name.map(|name| name.to_proto()),
|
||||
},
|
||||
telemetry,
|
||||
http_client,
|
||||
model_api_key,
|
||||
&executor,
|
||||
);
|
||||
|
||||
result?;
|
||||
Ok(())
|
||||
});
|
||||
|
||||
while let Some((char_ops, line_ops)) = diff_rx.next().await {
|
||||
codegen.update(&mut cx, |codegen, cx| {
|
||||
codegen.last_equal_ranges.clear();
|
||||
|
||||
let edits = char_ops
|
||||
.into_iter()
|
||||
.filter_map(|operation| match operation {
|
||||
CharOperation::Insert { text } => {
|
||||
let edit_start = snapshot.anchor_after(edit_start);
|
||||
Some((edit_start..edit_start, text))
|
||||
}
|
||||
CharOperation::Delete { bytes } => {
|
||||
let edit_end = edit_start + bytes;
|
||||
let edit_range = snapshot.anchor_after(edit_start)
|
||||
..snapshot.anchor_before(edit_end);
|
||||
edit_start = edit_end;
|
||||
Some((edit_range, String::new()))
|
||||
}
|
||||
CharOperation::Keep { bytes } => {
|
||||
let edit_end = edit_start + bytes;
|
||||
let edit_range = snapshot.anchor_after(edit_start)
|
||||
..snapshot.anchor_before(edit_end);
|
||||
edit_start = edit_end;
|
||||
codegen.last_equal_ranges.push(edit_range);
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if codegen.active {
|
||||
codegen.apply_edits(edits.iter().cloned(), cx);
|
||||
codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx);
|
||||
}
|
||||
codegen.edits.extend(edits);
|
||||
codegen.line_operations = line_ops;
|
||||
codegen.edit_position = Some(snapshot.anchor_after(edit_start));
|
||||
|
||||
cx.notify();
|
||||
})?;
|
||||
}
|
||||
let mut char_ops = diff.push_new(&new_text);
|
||||
char_ops.extend(diff.finish());
|
||||
line_diff.push_char_operations(&char_ops, &selected_text);
|
||||
line_diff.finish(&selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
|
||||
// Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer.
|
||||
// That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff.
|
||||
// It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`.
|
||||
let batch_diff_task =
|
||||
codegen.update(&mut cx, |codegen, cx| codegen.reapply_batch_diff(cx))?;
|
||||
let (line_based_stream_diff, ()) =
|
||||
join!(line_based_stream_diff, batch_diff_task);
|
||||
line_based_stream_diff?;
|
||||
anyhow::Ok(())
|
||||
};
|
||||
|
||||
anyhow::Ok(())
|
||||
};
|
||||
let result = diff.await;
|
||||
|
||||
let result = generate.await;
|
||||
let elapsed_time = start_time.elapsed().as_secs_f64();
|
||||
let error_message = result.as_ref().err().map(|error| error.to_string());
|
||||
report_assistant_event(
|
||||
AssistantEvent {
|
||||
conversation_id: None,
|
||||
message_id,
|
||||
kind: AssistantKind::Inline,
|
||||
phase: AssistantPhase::Response,
|
||||
model: model_telemetry_id,
|
||||
model_provider: model_provider_id.to_string(),
|
||||
response_latency,
|
||||
error_message,
|
||||
language_name: language_name.map(|name| name.to_proto()),
|
||||
},
|
||||
telemetry,
|
||||
http_client,
|
||||
model_api_key,
|
||||
&executor,
|
||||
);
|
||||
|
||||
codegen
|
||||
.update(&mut cx, |this, cx| {
|
||||
this.message_id = message_id;
|
||||
this.last_equal_ranges.clear();
|
||||
if let Err(error) = result {
|
||||
this.status = CodegenStatus::Error(error);
|
||||
} else {
|
||||
this.status = CodegenStatus::Done;
|
||||
result?;
|
||||
Ok(())
|
||||
});
|
||||
|
||||
while let Some((char_ops, line_ops)) = diff_rx.next().await {
|
||||
codegen.update(cx, |codegen, cx| {
|
||||
codegen.last_equal_ranges.clear();
|
||||
|
||||
let edits = char_ops
|
||||
.into_iter()
|
||||
.filter_map(|operation| match operation {
|
||||
CharOperation::Insert { text } => {
|
||||
let edit_start = snapshot.anchor_after(edit_start);
|
||||
Some((edit_start..edit_start, text))
|
||||
}
|
||||
CharOperation::Delete { bytes } => {
|
||||
let edit_end = edit_start + bytes;
|
||||
let edit_range = snapshot.anchor_after(edit_start)
|
||||
..snapshot.anchor_before(edit_end);
|
||||
edit_start = edit_end;
|
||||
Some((edit_range, String::new()))
|
||||
}
|
||||
CharOperation::Keep { bytes } => {
|
||||
let edit_end = edit_start + bytes;
|
||||
let edit_range = snapshot.anchor_after(edit_start)
|
||||
..snapshot.anchor_before(edit_end);
|
||||
edit_start = edit_end;
|
||||
codegen.last_equal_ranges.push(edit_range);
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if codegen.active {
|
||||
codegen.apply_edits(edits.iter().cloned(), cx);
|
||||
codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx);
|
||||
}
|
||||
this.elapsed_time = Some(elapsed_time);
|
||||
this.completion = Some(completion.lock().clone());
|
||||
cx.emit(CodegenEvent::Finished);
|
||||
codegen.edits.extend(edits);
|
||||
codegen.line_operations = line_ops;
|
||||
codegen.edit_position = Some(snapshot.anchor_after(edit_start));
|
||||
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
})?;
|
||||
}
|
||||
|
||||
// Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer.
|
||||
// That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff.
|
||||
// It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`.
|
||||
let batch_diff_task =
|
||||
codegen.update(cx, |codegen, cx| codegen.reapply_batch_diff(cx))?;
|
||||
let (line_based_stream_diff, ()) = join!(line_based_stream_diff, batch_diff_task);
|
||||
line_based_stream_diff?;
|
||||
|
||||
anyhow::Ok(())
|
||||
};
|
||||
|
||||
let result = generate.await;
|
||||
let elapsed_time = start_time.elapsed().as_secs_f64();
|
||||
|
||||
codegen
|
||||
.update(cx, |this, cx| {
|
||||
this.message_id = message_id;
|
||||
this.last_equal_ranges.clear();
|
||||
if let Err(error) = result {
|
||||
this.status = CodegenStatus::Error(error);
|
||||
} else {
|
||||
this.status = CodegenStatus::Done;
|
||||
}
|
||||
this.elapsed_time = Some(elapsed_time);
|
||||
this.completion = Some(completion.lock().clone());
|
||||
cx.emit(CodegenEvent::Finished);
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
@@ -804,7 +798,7 @@ impl CodegenAlternative {
|
||||
let new_snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
let new_range = self.range.to_point(&new_snapshot);
|
||||
|
||||
cx.spawn(|codegen, mut cx| async move {
|
||||
cx.spawn(async move |codegen, cx| {
|
||||
let (deleted_row_ranges, inserted_row_ranges) = cx
|
||||
.background_spawn(async move {
|
||||
let old_text = old_snapshot
|
||||
@@ -854,7 +848,7 @@ impl CodegenAlternative {
|
||||
.await;
|
||||
|
||||
codegen
|
||||
.update(&mut cx, |codegen, cx| {
|
||||
.update(cx, |codegen, cx| {
|
||||
codegen.diff.deleted_row_ranges = deleted_row_ranges;
|
||||
codegen.diff.inserted_row_ranges = inserted_row_ranges;
|
||||
cx.notify();
|
||||
|
||||
@@ -43,15 +43,6 @@ pub enum ContextKind {
|
||||
}
|
||||
|
||||
impl ContextKind {
|
||||
pub fn label(&self) -> &'static str {
|
||||
match self {
|
||||
ContextKind::File => "File",
|
||||
ContextKind::Directory => "Folder",
|
||||
ContextKind::FetchedUrl => "Fetch",
|
||||
ContextKind::Thread => "Thread",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn icon(&self) -> IconName {
|
||||
match self {
|
||||
ContextKind::File => IconName::File,
|
||||
|
||||
@@ -1,22 +1,28 @@
|
||||
mod directory_context_picker;
|
||||
mod completion_provider;
|
||||
mod fetch_context_picker;
|
||||
mod file_context_picker;
|
||||
mod thread_context_picker;
|
||||
|
||||
use std::ops::Range;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use editor::Editor;
|
||||
use editor::display_map::{Crease, FoldId};
|
||||
use editor::{Anchor, AnchorRangeExt as _, Editor, ExcerptId, FoldPlaceholder, ToOffset};
|
||||
use file_context_picker::render_file_context_entry;
|
||||
use gpui::{App, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Task, WeakEntity};
|
||||
use gpui::{
|
||||
App, DismissEvent, Empty, Entity, EventEmitter, FocusHandle, Focusable, Task, WeakEntity,
|
||||
};
|
||||
use multi_buffer::MultiBufferRow;
|
||||
use project::ProjectPath;
|
||||
use thread_context_picker::{render_thread_context_entry, ThreadContextEntry};
|
||||
use ui::{prelude::*, ContextMenu, ContextMenuEntry, ContextMenuItem};
|
||||
use ui::{
|
||||
prelude::*, ButtonLike, ContextMenu, ContextMenuEntry, ContextMenuItem, Disclosure, TintColor,
|
||||
};
|
||||
use workspace::{notifications::NotifyResultExt, Workspace};
|
||||
|
||||
use crate::context::ContextKind;
|
||||
use crate::context_picker::directory_context_picker::DirectoryContextPicker;
|
||||
pub use crate::context_picker::completion_provider::ContextPickerCompletionProvider;
|
||||
use crate::context_picker::fetch_context_picker::FetchContextPicker;
|
||||
use crate::context_picker::file_context_picker::FileContextPicker;
|
||||
use crate::context_picker::thread_context_picker::ThreadContextPicker;
|
||||
@@ -30,19 +36,63 @@ pub enum ConfirmBehavior {
|
||||
Close,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
enum ContextPickerMode {
|
||||
File,
|
||||
Fetch,
|
||||
Thread,
|
||||
}
|
||||
|
||||
impl TryFrom<&str> for ContextPickerMode {
|
||||
type Error = String;
|
||||
|
||||
fn try_from(value: &str) -> Result<Self, Self::Error> {
|
||||
match value {
|
||||
"file" => Ok(Self::File),
|
||||
"fetch" => Ok(Self::Fetch),
|
||||
"thread" => Ok(Self::Thread),
|
||||
_ => Err(format!("Invalid context picker mode: {}", value)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ContextPickerMode {
|
||||
pub fn mention_prefix(&self) -> &'static str {
|
||||
match self {
|
||||
Self::File => "file",
|
||||
Self::Fetch => "fetch",
|
||||
Self::Thread => "thread",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn label(&self) -> &'static str {
|
||||
match self {
|
||||
Self::File => "Files & Directories",
|
||||
Self::Fetch => "Fetch",
|
||||
Self::Thread => "Thread",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn icon(&self) -> IconName {
|
||||
match self {
|
||||
Self::File => IconName::File,
|
||||
Self::Fetch => IconName::Globe,
|
||||
Self::Thread => IconName::MessageCircle,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum ContextPickerState {
|
||||
Default(Entity<ContextMenu>),
|
||||
File(Entity<FileContextPicker>),
|
||||
Directory(Entity<DirectoryContextPicker>),
|
||||
Fetch(Entity<FetchContextPicker>),
|
||||
Thread(Entity<ThreadContextPicker>),
|
||||
}
|
||||
|
||||
pub(super) struct ContextPicker {
|
||||
mode: ContextPickerMode,
|
||||
mode: ContextPickerState,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
editor: WeakEntity<Editor>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
thread_store: Option<WeakEntity<ThreadStore>>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
@@ -53,13 +103,12 @@ impl ContextPicker {
|
||||
workspace: WeakEntity<Workspace>,
|
||||
thread_store: Option<WeakEntity<ThreadStore>>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
editor: WeakEntity<Editor>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
ContextPicker {
|
||||
mode: ContextPickerMode::Default(ContextMenu::build(
|
||||
mode: ContextPickerState::Default(ContextMenu::build(
|
||||
window,
|
||||
cx,
|
||||
|menu, _window, _cx| menu,
|
||||
@@ -67,13 +116,12 @@ impl ContextPicker {
|
||||
workspace,
|
||||
context_store,
|
||||
thread_store,
|
||||
editor,
|
||||
confirm_behavior,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.mode = ContextPickerMode::Default(self.build_menu(window, cx));
|
||||
self.mode = ContextPickerState::Default(self.build_menu(window, cx));
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
@@ -88,14 +136,7 @@ impl ContextPicker {
|
||||
.enumerate()
|
||||
.map(|(ix, entry)| self.recent_menu_item(context_picker.clone(), ix, entry));
|
||||
|
||||
let mut context_kinds = vec![
|
||||
ContextKind::File,
|
||||
ContextKind::Directory,
|
||||
ContextKind::FetchedUrl,
|
||||
];
|
||||
if self.allow_threads() {
|
||||
context_kinds.push(ContextKind::Thread);
|
||||
}
|
||||
let modes = supported_context_picker_modes(&self.thread_store);
|
||||
|
||||
let menu = menu
|
||||
.when(has_recent, |menu| {
|
||||
@@ -112,15 +153,15 @@ impl ContextPicker {
|
||||
})
|
||||
.extend(recent_entries)
|
||||
.when(has_recent, |menu| menu.separator())
|
||||
.extend(context_kinds.into_iter().map(|kind| {
|
||||
.extend(modes.into_iter().map(|mode| {
|
||||
let context_picker = context_picker.clone();
|
||||
|
||||
ContextMenuEntry::new(kind.label())
|
||||
.icon(kind.icon())
|
||||
ContextMenuEntry::new(mode.label())
|
||||
.icon(mode.icon())
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.handler(move |window, cx| {
|
||||
context_picker.update(cx, |this, cx| this.select_kind(kind, window, cx))
|
||||
context_picker.update(cx, |this, cx| this.select_mode(mode, window, cx))
|
||||
})
|
||||
}));
|
||||
|
||||
@@ -143,16 +184,20 @@ impl ContextPicker {
|
||||
self.thread_store.is_some()
|
||||
}
|
||||
|
||||
fn select_kind(&mut self, kind: ContextKind, window: &mut Window, cx: &mut Context<Self>) {
|
||||
fn select_mode(
|
||||
&mut self,
|
||||
mode: ContextPickerMode,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let context_picker = cx.entity().downgrade();
|
||||
|
||||
match kind {
|
||||
ContextKind::File => {
|
||||
self.mode = ContextPickerMode::File(cx.new(|cx| {
|
||||
match mode {
|
||||
ContextPickerMode::File => {
|
||||
self.mode = ContextPickerState::File(cx.new(|cx| {
|
||||
FileContextPicker::new(
|
||||
context_picker.clone(),
|
||||
self.workspace.clone(),
|
||||
self.editor.clone(),
|
||||
self.context_store.clone(),
|
||||
self.confirm_behavior,
|
||||
window,
|
||||
@@ -160,20 +205,8 @@ impl ContextPicker {
|
||||
)
|
||||
}));
|
||||
}
|
||||
ContextKind::Directory => {
|
||||
self.mode = ContextPickerMode::Directory(cx.new(|cx| {
|
||||
DirectoryContextPicker::new(
|
||||
context_picker.clone(),
|
||||
self.workspace.clone(),
|
||||
self.context_store.clone(),
|
||||
self.confirm_behavior,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}));
|
||||
}
|
||||
ContextKind::FetchedUrl => {
|
||||
self.mode = ContextPickerMode::Fetch(cx.new(|cx| {
|
||||
ContextPickerMode::Fetch => {
|
||||
self.mode = ContextPickerState::Fetch(cx.new(|cx| {
|
||||
FetchContextPicker::new(
|
||||
context_picker.clone(),
|
||||
self.workspace.clone(),
|
||||
@@ -184,9 +217,9 @@ impl ContextPicker {
|
||||
)
|
||||
}));
|
||||
}
|
||||
ContextKind::Thread => {
|
||||
ContextPickerMode::Thread => {
|
||||
if let Some(thread_store) = self.thread_store.as_ref() {
|
||||
self.mode = ContextPickerMode::Thread(cx.new(|cx| {
|
||||
self.mode = ContextPickerState::Thread(cx.new(|cx| {
|
||||
ThreadContextPicker::new(
|
||||
thread_store.clone(),
|
||||
context_picker.clone(),
|
||||
@@ -224,6 +257,7 @@ impl ContextPicker {
|
||||
ElementId::NamedInteger("ctx-recent".into(), ix),
|
||||
&path,
|
||||
&path_prefix,
|
||||
false,
|
||||
context_store.clone(),
|
||||
cx,
|
||||
)
|
||||
@@ -267,13 +301,11 @@ impl ContextPicker {
|
||||
};
|
||||
|
||||
let task = context_store.update(cx, |context_store, cx| {
|
||||
context_store.add_file_from_path(project_path.clone(), cx)
|
||||
context_store.add_file_from_path(project_path.clone(), true, cx)
|
||||
});
|
||||
|
||||
cx.spawn_in(window, |_, mut cx| async move {
|
||||
task.await.notify_async_err(&mut cx)
|
||||
})
|
||||
.detach();
|
||||
cx.spawn_in(window, async move |_, cx| task.await.notify_async_err(cx))
|
||||
.detach();
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
@@ -296,13 +328,13 @@ impl ContextPicker {
|
||||
};
|
||||
|
||||
let open_thread_task = thread_store.update(cx, |this, cx| this.open_thread(&thread.id, cx));
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let thread = open_thread_task.await?;
|
||||
context_store.update(&mut cx, |context_store, cx| {
|
||||
context_store.add_thread(thread, cx);
|
||||
context_store.update(cx, |context_store, cx| {
|
||||
context_store.add_thread(thread, true, cx);
|
||||
})?;
|
||||
|
||||
this.update(&mut cx, |_this, cx| cx.notify())
|
||||
this.update(cx, |_this, cx| cx.notify())
|
||||
})
|
||||
}
|
||||
|
||||
@@ -319,7 +351,7 @@ impl ContextPicker {
|
||||
|
||||
let mut current_files = context_store.file_paths(cx);
|
||||
|
||||
if let Some(active_path) = Self::active_singleton_buffer_path(&workspace, cx) {
|
||||
if let Some(active_path) = active_singleton_buffer_path(&workspace, cx) {
|
||||
current_files.insert(active_path);
|
||||
}
|
||||
|
||||
@@ -375,16 +407,6 @@ impl ContextPicker {
|
||||
|
||||
recent
|
||||
}
|
||||
|
||||
fn active_singleton_buffer_path(workspace: &Workspace, cx: &App) -> Option<PathBuf> {
|
||||
let active_item = workspace.active_item(cx)?;
|
||||
|
||||
let editor = active_item.to_any().downcast::<Editor>().ok()?.read(cx);
|
||||
let buffer = editor.buffer().read(cx).as_singleton()?;
|
||||
|
||||
let path = buffer.read(cx).file()?.path().to_path_buf();
|
||||
Some(path)
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<DismissEvent> for ContextPicker {}
|
||||
@@ -392,11 +414,10 @@ impl EventEmitter<DismissEvent> for ContextPicker {}
|
||||
impl Focusable for ContextPicker {
|
||||
fn focus_handle(&self, cx: &App) -> FocusHandle {
|
||||
match &self.mode {
|
||||
ContextPickerMode::Default(menu) => menu.focus_handle(cx),
|
||||
ContextPickerMode::File(file_picker) => file_picker.focus_handle(cx),
|
||||
ContextPickerMode::Directory(directory_picker) => directory_picker.focus_handle(cx),
|
||||
ContextPickerMode::Fetch(fetch_picker) => fetch_picker.focus_handle(cx),
|
||||
ContextPickerMode::Thread(thread_picker) => thread_picker.focus_handle(cx),
|
||||
ContextPickerState::Default(menu) => menu.focus_handle(cx),
|
||||
ContextPickerState::File(file_picker) => file_picker.focus_handle(cx),
|
||||
ContextPickerState::Fetch(fetch_picker) => fetch_picker.focus_handle(cx),
|
||||
ContextPickerState::Thread(thread_picker) => thread_picker.focus_handle(cx),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -407,13 +428,10 @@ impl Render for ContextPicker {
|
||||
.w(px(400.))
|
||||
.min_w(px(400.))
|
||||
.map(|parent| match &self.mode {
|
||||
ContextPickerMode::Default(menu) => parent.child(menu.clone()),
|
||||
ContextPickerMode::File(file_picker) => parent.child(file_picker.clone()),
|
||||
ContextPickerMode::Directory(directory_picker) => {
|
||||
parent.child(directory_picker.clone())
|
||||
}
|
||||
ContextPickerMode::Fetch(fetch_picker) => parent.child(fetch_picker.clone()),
|
||||
ContextPickerMode::Thread(thread_picker) => parent.child(thread_picker.clone()),
|
||||
ContextPickerState::Default(menu) => parent.child(menu.clone()),
|
||||
ContextPickerState::File(file_picker) => parent.child(file_picker.clone()),
|
||||
ContextPickerState::Fetch(fetch_picker) => parent.child(fetch_picker.clone()),
|
||||
ContextPickerState::Thread(thread_picker) => parent.child(thread_picker.clone()),
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -424,3 +442,212 @@ enum RecentEntry {
|
||||
},
|
||||
Thread(ThreadContextEntry),
|
||||
}
|
||||
|
||||
fn supported_context_picker_modes(
|
||||
thread_store: &Option<WeakEntity<ThreadStore>>,
|
||||
) -> Vec<ContextPickerMode> {
|
||||
let mut modes = vec![ContextPickerMode::File, ContextPickerMode::Fetch];
|
||||
if thread_store.is_some() {
|
||||
modes.push(ContextPickerMode::Thread);
|
||||
}
|
||||
modes
|
||||
}
|
||||
|
||||
fn active_singleton_buffer_path(workspace: &Workspace, cx: &App) -> Option<PathBuf> {
|
||||
let active_item = workspace.active_item(cx)?;
|
||||
|
||||
let editor = active_item.to_any().downcast::<Editor>().ok()?.read(cx);
|
||||
let buffer = editor.buffer().read(cx).as_singleton()?;
|
||||
|
||||
let path = buffer.read(cx).file()?.path().to_path_buf();
|
||||
Some(path)
|
||||
}
|
||||
|
||||
fn recent_context_picker_entries(
|
||||
context_store: Entity<ContextStore>,
|
||||
thread_store: Option<WeakEntity<ThreadStore>>,
|
||||
workspace: Entity<Workspace>,
|
||||
cx: &App,
|
||||
) -> Vec<RecentEntry> {
|
||||
let mut recent = Vec::with_capacity(6);
|
||||
|
||||
let mut current_files = context_store.read(cx).file_paths(cx);
|
||||
|
||||
let workspace = workspace.read(cx);
|
||||
|
||||
if let Some(active_path) = active_singleton_buffer_path(workspace, cx) {
|
||||
current_files.insert(active_path);
|
||||
}
|
||||
|
||||
let project = workspace.project().read(cx);
|
||||
|
||||
recent.extend(
|
||||
workspace
|
||||
.recent_navigation_history_iter(cx)
|
||||
.filter(|(path, _)| !current_files.contains(&path.path.to_path_buf()))
|
||||
.take(4)
|
||||
.filter_map(|(project_path, _)| {
|
||||
project
|
||||
.worktree_for_id(project_path.worktree_id, cx)
|
||||
.map(|worktree| RecentEntry::File {
|
||||
project_path,
|
||||
path_prefix: worktree.read(cx).root_name().into(),
|
||||
})
|
||||
}),
|
||||
);
|
||||
|
||||
let mut current_threads = context_store.read(cx).thread_ids();
|
||||
|
||||
if let Some(active_thread) = workspace
|
||||
.panel::<AssistantPanel>(cx)
|
||||
.map(|panel| panel.read(cx).active_thread(cx))
|
||||
{
|
||||
current_threads.insert(active_thread.read(cx).id().clone());
|
||||
}
|
||||
|
||||
if let Some(thread_store) = thread_store.and_then(|thread_store| thread_store.upgrade()) {
|
||||
recent.extend(
|
||||
thread_store
|
||||
.read(cx)
|
||||
.threads()
|
||||
.into_iter()
|
||||
.filter(|thread| !current_threads.contains(&thread.id))
|
||||
.take(2)
|
||||
.map(|thread| {
|
||||
RecentEntry::Thread(ThreadContextEntry {
|
||||
id: thread.id,
|
||||
summary: thread.summary,
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
recent
|
||||
}
|
||||
|
||||
pub(crate) fn insert_crease_for_mention(
|
||||
excerpt_id: ExcerptId,
|
||||
crease_start: text::Anchor,
|
||||
content_len: usize,
|
||||
crease_label: SharedString,
|
||||
crease_icon_path: SharedString,
|
||||
editor_entity: Entity<Editor>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) {
|
||||
editor_entity.update(cx, |editor, cx| {
|
||||
let snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
|
||||
let Some(start) = snapshot.anchor_in_excerpt(excerpt_id, crease_start) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let end = snapshot.anchor_before(start.to_offset(&snapshot) + content_len);
|
||||
|
||||
let placeholder = FoldPlaceholder {
|
||||
render: render_fold_icon_button(
|
||||
crease_icon_path,
|
||||
crease_label,
|
||||
editor_entity.downgrade(),
|
||||
),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let render_trailer =
|
||||
move |_row, _unfold, _window: &mut Window, _cx: &mut App| Empty.into_any();
|
||||
|
||||
let crease = Crease::inline(
|
||||
start..end,
|
||||
placeholder.clone(),
|
||||
fold_toggle("mention"),
|
||||
render_trailer,
|
||||
);
|
||||
|
||||
editor.insert_creases(vec![crease.clone()], cx);
|
||||
editor.fold_creases(vec![crease], false, window, cx);
|
||||
});
|
||||
}
|
||||
|
||||
fn render_fold_icon_button(
|
||||
icon_path: SharedString,
|
||||
label: SharedString,
|
||||
editor: WeakEntity<Editor>,
|
||||
) -> Arc<dyn Send + Sync + Fn(FoldId, Range<Anchor>, &mut App) -> AnyElement> {
|
||||
Arc::new({
|
||||
move |fold_id, fold_range, cx| {
|
||||
let is_in_text_selection = editor.upgrade().is_some_and(|editor| {
|
||||
editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor
|
||||
.buffer()
|
||||
.update(cx, |multi_buffer, cx| multi_buffer.snapshot(cx));
|
||||
|
||||
let is_in_pending_selection = || {
|
||||
editor
|
||||
.selections
|
||||
.pending
|
||||
.as_ref()
|
||||
.is_some_and(|pending_selection| {
|
||||
pending_selection
|
||||
.selection
|
||||
.range()
|
||||
.includes(&fold_range, &snapshot)
|
||||
})
|
||||
};
|
||||
|
||||
let mut is_in_complete_selection = || {
|
||||
editor
|
||||
.selections
|
||||
.disjoint_in_range::<usize>(fold_range.clone(), cx)
|
||||
.into_iter()
|
||||
.any(|selection| {
|
||||
// This is needed to cover a corner case, if we just check for an existing
|
||||
// selection in the fold range, having a cursor at the start of the fold
|
||||
// marks it as selected. Non-empty selections don't cause this.
|
||||
let length = selection.end - selection.start;
|
||||
length > 0
|
||||
})
|
||||
};
|
||||
|
||||
is_in_pending_selection() || is_in_complete_selection()
|
||||
})
|
||||
});
|
||||
|
||||
ButtonLike::new(fold_id)
|
||||
.style(ButtonStyle::Filled)
|
||||
.selected_style(ButtonStyle::Tinted(TintColor::Accent))
|
||||
.toggle_state(is_in_text_selection)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(
|
||||
Icon::from_path(icon_path.clone())
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(
|
||||
Label::new(label.clone())
|
||||
.size(LabelSize::Small)
|
||||
.single_line(),
|
||||
),
|
||||
)
|
||||
.into_any_element()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn fold_toggle(
|
||||
name: &'static str,
|
||||
) -> impl Fn(
|
||||
MultiBufferRow,
|
||||
bool,
|
||||
Arc<dyn Fn(bool, &mut Window, &mut App) + Send + Sync>,
|
||||
&mut Window,
|
||||
&mut App,
|
||||
) -> AnyElement {
|
||||
move |row, is_folded, fold, _window, _cx| {
|
||||
Disclosure::new((name, row.0 as u64), !is_folded)
|
||||
.toggle_state(is_folded)
|
||||
.on_click(move |_e, window, cx| fold(!is_folded, window, cx))
|
||||
.into_any_element()
|
||||
}
|
||||
}
|
||||
|
||||
1024
crates/assistant2/src/context_picker/completion_provider.rs
Normal file
@@ -1,269 +0,0 @@
|
||||
use std::path::Path;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::Arc;
|
||||
|
||||
use fuzzy::PathMatch;
|
||||
use gpui::{App, DismissEvent, Entity, FocusHandle, Focusable, Task, WeakEntity};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use project::{PathMatchCandidateSet, ProjectPath, WorktreeId};
|
||||
use ui::{prelude::*, ListItem};
|
||||
use util::ResultExt as _;
|
||||
use workspace::{notifications::NotifyResultExt, Workspace};
|
||||
|
||||
use crate::context_picker::{ConfirmBehavior, ContextPicker};
|
||||
use crate::context_store::ContextStore;
|
||||
|
||||
pub struct DirectoryContextPicker {
|
||||
picker: Entity<Picker<DirectoryContextPickerDelegate>>,
|
||||
}
|
||||
|
||||
impl DirectoryContextPicker {
|
||||
pub fn new(
|
||||
context_picker: WeakEntity<ContextPicker>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let delegate = DirectoryContextPickerDelegate::new(
|
||||
context_picker,
|
||||
workspace,
|
||||
context_store,
|
||||
confirm_behavior,
|
||||
);
|
||||
let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx));
|
||||
|
||||
Self { picker }
|
||||
}
|
||||
}
|
||||
|
||||
impl Focusable for DirectoryContextPicker {
|
||||
fn focus_handle(&self, cx: &App) -> FocusHandle {
|
||||
self.picker.focus_handle(cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for DirectoryContextPicker {
|
||||
fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
|
||||
self.picker.clone()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DirectoryContextPickerDelegate {
|
||||
context_picker: WeakEntity<ContextPicker>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
matches: Vec<PathMatch>,
|
||||
selected_index: usize,
|
||||
}
|
||||
|
||||
impl DirectoryContextPickerDelegate {
|
||||
pub fn new(
|
||||
context_picker: WeakEntity<ContextPicker>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
) -> Self {
|
||||
Self {
|
||||
context_picker,
|
||||
workspace,
|
||||
context_store,
|
||||
confirm_behavior,
|
||||
matches: Vec::new(),
|
||||
selected_index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn search(
|
||||
&mut self,
|
||||
query: String,
|
||||
cancellation_flag: Arc<AtomicBool>,
|
||||
workspace: &Entity<Workspace>,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Task<Vec<PathMatch>> {
|
||||
if query.is_empty() {
|
||||
let workspace = workspace.read(cx);
|
||||
let project = workspace.project().read(cx);
|
||||
let directory_matches = project.worktrees(cx).flat_map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
let path_prefix: Arc<str> = worktree.root_name().into();
|
||||
worktree.directories(false, 0).map(move |entry| PathMatch {
|
||||
score: 0.,
|
||||
positions: Vec::new(),
|
||||
worktree_id: worktree.id().to_usize(),
|
||||
path: entry.path.clone(),
|
||||
path_prefix: path_prefix.clone(),
|
||||
distance_to_relative_ancestor: 0,
|
||||
is_dir: true,
|
||||
})
|
||||
});
|
||||
|
||||
Task::ready(directory_matches.collect())
|
||||
} else {
|
||||
let worktrees = workspace.read(cx).visible_worktrees(cx).collect::<Vec<_>>();
|
||||
let candidate_sets = worktrees
|
||||
.into_iter()
|
||||
.map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
|
||||
PathMatchCandidateSet {
|
||||
snapshot: worktree.snapshot(),
|
||||
include_ignored: worktree
|
||||
.root_entry()
|
||||
.map_or(false, |entry| entry.is_ignored),
|
||||
include_root_name: true,
|
||||
candidates: project::Candidates::Directories,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let executor = cx.background_executor().clone();
|
||||
cx.foreground_executor().spawn(async move {
|
||||
fuzzy::match_path_sets(
|
||||
candidate_sets.as_slice(),
|
||||
query.as_str(),
|
||||
None,
|
||||
false,
|
||||
100,
|
||||
&cancellation_flag,
|
||||
executor,
|
||||
)
|
||||
.await
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PickerDelegate for DirectoryContextPickerDelegate {
|
||||
type ListItem = ListItem;
|
||||
|
||||
fn match_count(&self) -> usize {
|
||||
self.matches.len()
|
||||
}
|
||||
|
||||
fn selected_index(&self) -> usize {
|
||||
self.selected_index
|
||||
}
|
||||
|
||||
fn set_selected_index(
|
||||
&mut self,
|
||||
ix: usize,
|
||||
_window: &mut Window,
|
||||
_cx: &mut Context<Picker<Self>>,
|
||||
) {
|
||||
self.selected_index = ix;
|
||||
}
|
||||
|
||||
fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
|
||||
"Search folders…".into()
|
||||
}
|
||||
|
||||
fn update_matches(
|
||||
&mut self,
|
||||
query: String,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Task<()> {
|
||||
let Some(workspace) = self.workspace.upgrade() else {
|
||||
return Task::ready(());
|
||||
};
|
||||
|
||||
let search_task = self.search(query, Arc::<AtomicBool>::default(), &workspace, cx);
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let mut paths = search_task.await;
|
||||
let empty_path = Path::new("");
|
||||
paths.retain(|path_match| path_match.path.as_ref() != empty_path);
|
||||
|
||||
this.update(&mut cx, |this, _cx| {
|
||||
this.delegate.matches = paths;
|
||||
})
|
||||
.log_err();
|
||||
})
|
||||
}
|
||||
|
||||
fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
let Some(mat) = self.matches.get(self.selected_index) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let project_path = ProjectPath {
|
||||
worktree_id: WorktreeId::from_usize(mat.worktree_id),
|
||||
path: mat.path.clone(),
|
||||
};
|
||||
|
||||
let Some(task) = self
|
||||
.context_store
|
||||
.update(cx, |context_store, cx| {
|
||||
context_store.add_directory(project_path, cx)
|
||||
})
|
||||
.ok()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
let confirm_behavior = self.confirm_behavior;
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
match task.await.notify_async_err(&mut cx) {
|
||||
None => anyhow::Ok(()),
|
||||
Some(()) => this.update_in(&mut cx, |this, window, cx| match confirm_behavior {
|
||||
ConfirmBehavior::KeepOpen => {}
|
||||
ConfirmBehavior::Close => this.delegate.dismissed(window, cx),
|
||||
}),
|
||||
}
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
fn dismissed(&mut self, _window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
self.context_picker
|
||||
.update(cx, |_, cx| {
|
||||
cx.emit(DismissEvent);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
|
||||
fn render_match(
|
||||
&self,
|
||||
ix: usize,
|
||||
selected: bool,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<Self::ListItem> {
|
||||
let path_match = &self.matches[ix];
|
||||
let directory_name = path_match.path.to_string_lossy().to_string();
|
||||
|
||||
let added = self.context_store.upgrade().map_or(false, |context_store| {
|
||||
context_store
|
||||
.read(cx)
|
||||
.includes_directory(&path_match.path)
|
||||
.is_some()
|
||||
});
|
||||
|
||||
Some(
|
||||
ListItem::new(ix)
|
||||
.inset(true)
|
||||
.toggle_state(selected)
|
||||
.start_slot(
|
||||
Icon::new(IconName::Folder)
|
||||
.size(IconSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(Label::new(directory_name))
|
||||
.when(added, |el| {
|
||||
el.end_slot(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(
|
||||
Icon::new(IconName::Check)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Success),
|
||||
)
|
||||
.child(Label::new("Added").size(LabelSize::Small)),
|
||||
)
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -81,77 +81,80 @@ impl FetchContextPickerDelegate {
|
||||
url: String::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn build_message(http_client: Arc<HttpClientWithUrl>, url: String) -> Result<String> {
|
||||
let url = if !url.starts_with("https://") && !url.starts_with("http://") {
|
||||
format!("https://{url}")
|
||||
} else {
|
||||
url
|
||||
};
|
||||
pub(crate) async fn fetch_url_content(
|
||||
http_client: Arc<HttpClientWithUrl>,
|
||||
url: String,
|
||||
) -> Result<String> {
|
||||
let url = if !url.starts_with("https://") && !url.starts_with("http://") {
|
||||
format!("https://{url}")
|
||||
} else {
|
||||
url
|
||||
};
|
||||
|
||||
let mut response = http_client.get(&url, AsyncBody::default(), true).await?;
|
||||
let mut response = http_client.get(&url, AsyncBody::default(), true).await?;
|
||||
|
||||
let mut body = Vec::new();
|
||||
response
|
||||
.body_mut()
|
||||
.read_to_end(&mut body)
|
||||
.await
|
||||
.context("error reading response body")?;
|
||||
let mut body = Vec::new();
|
||||
response
|
||||
.body_mut()
|
||||
.read_to_end(&mut body)
|
||||
.await
|
||||
.context("error reading response body")?;
|
||||
|
||||
if response.status().is_client_error() {
|
||||
let text = String::from_utf8_lossy(body.as_slice());
|
||||
bail!(
|
||||
"status error {}, response: {text:?}",
|
||||
response.status().as_u16()
|
||||
);
|
||||
if response.status().is_client_error() {
|
||||
let text = String::from_utf8_lossy(body.as_slice());
|
||||
bail!(
|
||||
"status error {}, response: {text:?}",
|
||||
response.status().as_u16()
|
||||
);
|
||||
}
|
||||
|
||||
let Some(content_type) = response.headers().get("content-type") else {
|
||||
bail!("missing Content-Type header");
|
||||
};
|
||||
let content_type = content_type
|
||||
.to_str()
|
||||
.context("invalid Content-Type header")?;
|
||||
let content_type = match content_type {
|
||||
"text/html" => ContentType::Html,
|
||||
"text/plain" => ContentType::Plaintext,
|
||||
"application/json" => ContentType::Json,
|
||||
_ => ContentType::Html,
|
||||
};
|
||||
|
||||
match content_type {
|
||||
ContentType::Html => {
|
||||
let mut handlers: Vec<TagHandler> = vec![
|
||||
Rc::new(RefCell::new(markdown::WebpageChromeRemover)),
|
||||
Rc::new(RefCell::new(markdown::ParagraphHandler)),
|
||||
Rc::new(RefCell::new(markdown::HeadingHandler)),
|
||||
Rc::new(RefCell::new(markdown::ListHandler)),
|
||||
Rc::new(RefCell::new(markdown::TableHandler::new())),
|
||||
Rc::new(RefCell::new(markdown::StyledTextHandler)),
|
||||
];
|
||||
if url.contains("wikipedia.org") {
|
||||
use html_to_markdown::structure::wikipedia;
|
||||
|
||||
handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaChromeRemover)));
|
||||
handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaInfoboxHandler)));
|
||||
handlers.push(Rc::new(
|
||||
RefCell::new(wikipedia::WikipediaCodeHandler::new()),
|
||||
));
|
||||
} else {
|
||||
handlers.push(Rc::new(RefCell::new(markdown::CodeHandler)));
|
||||
}
|
||||
|
||||
convert_html_to_markdown(&body[..], &mut handlers)
|
||||
}
|
||||
ContentType::Plaintext => Ok(std::str::from_utf8(&body)?.to_owned()),
|
||||
ContentType::Json => {
|
||||
let json: serde_json::Value = serde_json::from_slice(&body)?;
|
||||
|
||||
let Some(content_type) = response.headers().get("content-type") else {
|
||||
bail!("missing Content-Type header");
|
||||
};
|
||||
let content_type = content_type
|
||||
.to_str()
|
||||
.context("invalid Content-Type header")?;
|
||||
let content_type = match content_type {
|
||||
"text/html" => ContentType::Html,
|
||||
"text/plain" => ContentType::Plaintext,
|
||||
"application/json" => ContentType::Json,
|
||||
_ => ContentType::Html,
|
||||
};
|
||||
|
||||
match content_type {
|
||||
ContentType::Html => {
|
||||
let mut handlers: Vec<TagHandler> = vec![
|
||||
Rc::new(RefCell::new(markdown::WebpageChromeRemover)),
|
||||
Rc::new(RefCell::new(markdown::ParagraphHandler)),
|
||||
Rc::new(RefCell::new(markdown::HeadingHandler)),
|
||||
Rc::new(RefCell::new(markdown::ListHandler)),
|
||||
Rc::new(RefCell::new(markdown::TableHandler::new())),
|
||||
Rc::new(RefCell::new(markdown::StyledTextHandler)),
|
||||
];
|
||||
if url.contains("wikipedia.org") {
|
||||
use html_to_markdown::structure::wikipedia;
|
||||
|
||||
handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaChromeRemover)));
|
||||
handlers.push(Rc::new(RefCell::new(wikipedia::WikipediaInfoboxHandler)));
|
||||
handlers.push(Rc::new(
|
||||
RefCell::new(wikipedia::WikipediaCodeHandler::new()),
|
||||
));
|
||||
} else {
|
||||
handlers.push(Rc::new(RefCell::new(markdown::CodeHandler)));
|
||||
}
|
||||
|
||||
convert_html_to_markdown(&body[..], &mut handlers)
|
||||
}
|
||||
ContentType::Plaintext => Ok(std::str::from_utf8(&body)?.to_owned()),
|
||||
ContentType::Json => {
|
||||
let json: serde_json::Value = serde_json::from_slice(&body)?;
|
||||
|
||||
Ok(format!(
|
||||
"```json\n{}\n```",
|
||||
serde_json::to_string_pretty(&json)?
|
||||
))
|
||||
}
|
||||
Ok(format!(
|
||||
"```json\n{}\n```",
|
||||
serde_json::to_string_pretty(&json)?
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -206,12 +209,12 @@ impl PickerDelegate for FetchContextPickerDelegate {
|
||||
let http_client = workspace.read(cx).client().http_client().clone();
|
||||
let url = self.url.clone();
|
||||
let confirm_behavior = self.confirm_behavior;
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let text = cx
|
||||
.background_spawn(Self::build_message(http_client, url.clone()))
|
||||
.background_spawn(fetch_url_content(http_client, url.clone()))
|
||||
.await?;
|
||||
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.delegate
|
||||
.context_store
|
||||
.update(cx, |context_store, _cx| {
|
||||
|
||||
@@ -1,25 +1,15 @@
|
||||
use std::collections::BTreeSet;
|
||||
use std::ops::Range;
|
||||
use std::path::Path;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::Arc;
|
||||
|
||||
use editor::actions::FoldAt;
|
||||
use editor::display_map::{Crease, FoldId};
|
||||
use editor::scroll::Autoscroll;
|
||||
use editor::{Anchor, AnchorRangeExt, Editor, FoldPlaceholder, ToPoint};
|
||||
use file_icons::FileIcons;
|
||||
use fuzzy::PathMatch;
|
||||
use gpui::{
|
||||
AnyElement, App, AppContext, DismissEvent, Empty, Entity, FocusHandle, Focusable, Stateful,
|
||||
Task, WeakEntity,
|
||||
App, AppContext, DismissEvent, Entity, FocusHandle, Focusable, Stateful, Task, WeakEntity,
|
||||
};
|
||||
use multi_buffer::{MultiBufferPoint, MultiBufferRow};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use project::{PathMatchCandidateSet, ProjectPath, WorktreeId};
|
||||
use rope::Point;
|
||||
use text::SelectionGoal;
|
||||
use ui::{prelude::*, ButtonLike, Disclosure, ListItem, TintColor, Tooltip};
|
||||
use ui::{prelude::*, ListItem, Tooltip};
|
||||
use util::ResultExt as _;
|
||||
use workspace::{notifications::NotifyResultExt, Workspace};
|
||||
|
||||
@@ -34,7 +24,6 @@ impl FileContextPicker {
|
||||
pub fn new(
|
||||
context_picker: WeakEntity<ContextPicker>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
editor: WeakEntity<Editor>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
window: &mut Window,
|
||||
@@ -43,7 +32,6 @@ impl FileContextPicker {
|
||||
let delegate = FileContextPickerDelegate::new(
|
||||
context_picker,
|
||||
workspace,
|
||||
editor,
|
||||
context_store,
|
||||
confirm_behavior,
|
||||
);
|
||||
@@ -68,7 +56,6 @@ impl Render for FileContextPicker {
|
||||
pub struct FileContextPickerDelegate {
|
||||
context_picker: WeakEntity<ContextPicker>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
editor: WeakEntity<Editor>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
matches: Vec<PathMatch>,
|
||||
@@ -79,96 +66,18 @@ impl FileContextPickerDelegate {
|
||||
pub fn new(
|
||||
context_picker: WeakEntity<ContextPicker>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
editor: WeakEntity<Editor>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
) -> Self {
|
||||
Self {
|
||||
context_picker,
|
||||
workspace,
|
||||
editor,
|
||||
context_store,
|
||||
confirm_behavior,
|
||||
matches: Vec::new(),
|
||||
selected_index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn search(
|
||||
&mut self,
|
||||
query: String,
|
||||
cancellation_flag: Arc<AtomicBool>,
|
||||
workspace: &Entity<Workspace>,
|
||||
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Task<Vec<PathMatch>> {
|
||||
if query.is_empty() {
|
||||
let workspace = workspace.read(cx);
|
||||
let project = workspace.project().read(cx);
|
||||
let recent_matches = workspace
|
||||
.recent_navigation_history(Some(10), cx)
|
||||
.into_iter()
|
||||
.filter_map(|(project_path, _)| {
|
||||
let worktree = project.worktree_for_id(project_path.worktree_id, cx)?;
|
||||
Some(PathMatch {
|
||||
score: 0.,
|
||||
positions: Vec::new(),
|
||||
worktree_id: project_path.worktree_id.to_usize(),
|
||||
path: project_path.path,
|
||||
path_prefix: worktree.read(cx).root_name().into(),
|
||||
distance_to_relative_ancestor: 0,
|
||||
is_dir: false,
|
||||
})
|
||||
});
|
||||
|
||||
let file_matches = project.worktrees(cx).flat_map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
let path_prefix: Arc<str> = worktree.root_name().into();
|
||||
worktree.files(false, 0).map(move |entry| PathMatch {
|
||||
score: 0.,
|
||||
positions: Vec::new(),
|
||||
worktree_id: worktree.id().to_usize(),
|
||||
path: entry.path.clone(),
|
||||
path_prefix: path_prefix.clone(),
|
||||
distance_to_relative_ancestor: 0,
|
||||
is_dir: false,
|
||||
})
|
||||
});
|
||||
|
||||
Task::ready(recent_matches.chain(file_matches).collect())
|
||||
} else {
|
||||
let worktrees = workspace.read(cx).visible_worktrees(cx).collect::<Vec<_>>();
|
||||
let candidate_sets = worktrees
|
||||
.into_iter()
|
||||
.map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
|
||||
PathMatchCandidateSet {
|
||||
snapshot: worktree.snapshot(),
|
||||
include_ignored: worktree
|
||||
.root_entry()
|
||||
.map_or(false, |entry| entry.is_ignored),
|
||||
include_root_name: true,
|
||||
candidates: project::Candidates::Files,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let executor = cx.background_executor().clone();
|
||||
cx.foreground_executor().spawn(async move {
|
||||
fuzzy::match_path_sets(
|
||||
candidate_sets.as_slice(),
|
||||
query.as_str(),
|
||||
None,
|
||||
false,
|
||||
100,
|
||||
&cancellation_flag,
|
||||
executor,
|
||||
)
|
||||
.await
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PickerDelegate for FileContextPickerDelegate {
|
||||
@@ -192,7 +101,7 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
}
|
||||
|
||||
fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
|
||||
"Search files…".into()
|
||||
"Search files & directories…".into()
|
||||
}
|
||||
|
||||
fn update_matches(
|
||||
@@ -205,13 +114,13 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
return Task::ready(());
|
||||
};
|
||||
|
||||
let search_task = self.search(query, Arc::<AtomicBool>::default(), &workspace, cx);
|
||||
let search_task = search_paths(query, Arc::<AtomicBool>::default(), &workspace, cx);
|
||||
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
// TODO: This should be probably be run in the background.
|
||||
let paths = search_task.await;
|
||||
|
||||
this.update(&mut cx, |this, _cx| {
|
||||
this.update(cx, |this, _cx| {
|
||||
this.delegate.matches = paths;
|
||||
})
|
||||
.log_err();
|
||||
@@ -223,114 +132,21 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
return;
|
||||
};
|
||||
|
||||
let Some(file_name) = mat
|
||||
.path
|
||||
.file_name()
|
||||
.map(|os_str| os_str.to_string_lossy().into_owned())
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
let full_path = mat.path.display().to_string();
|
||||
|
||||
let project_path = ProjectPath {
|
||||
worktree_id: WorktreeId::from_usize(mat.worktree_id),
|
||||
path: mat.path.clone(),
|
||||
};
|
||||
|
||||
let Some(editor_entity) = self.editor.upgrade() else {
|
||||
return;
|
||||
};
|
||||
|
||||
editor_entity.update(cx, |editor, cx| {
|
||||
editor.transact(window, cx, |editor, window, cx| {
|
||||
// Move empty selections left by 1 column to select the `@`s, so they get overwritten when we insert.
|
||||
{
|
||||
let mut selections = editor.selections.all::<MultiBufferPoint>(cx);
|
||||
|
||||
for selection in selections.iter_mut() {
|
||||
if selection.is_empty() {
|
||||
let old_head = selection.head();
|
||||
let new_head = MultiBufferPoint::new(
|
||||
old_head.row,
|
||||
old_head.column.saturating_sub(1),
|
||||
);
|
||||
selection.set_head(new_head, SelectionGoal::None);
|
||||
}
|
||||
}
|
||||
|
||||
editor.change_selections(Some(Autoscroll::fit()), window, cx, |s| {
|
||||
s.select(selections)
|
||||
});
|
||||
}
|
||||
|
||||
let start_anchors = {
|
||||
let snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
editor
|
||||
.selections
|
||||
.all::<Point>(cx)
|
||||
.into_iter()
|
||||
.map(|selection| snapshot.anchor_before(selection.start))
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
editor.insert(&full_path, window, cx);
|
||||
|
||||
let end_anchors = {
|
||||
let snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
editor
|
||||
.selections
|
||||
.all::<Point>(cx)
|
||||
.into_iter()
|
||||
.map(|selection| snapshot.anchor_after(selection.end))
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
editor.insert("\n", window, cx); // Needed to end the fold
|
||||
|
||||
let file_icon = FileIcons::get_icon(&Path::new(&full_path), cx)
|
||||
.unwrap_or_else(|| SharedString::new(""));
|
||||
|
||||
let placeholder = FoldPlaceholder {
|
||||
render: render_fold_icon_button(
|
||||
file_icon,
|
||||
file_name.into(),
|
||||
editor_entity.downgrade(),
|
||||
),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let render_trailer =
|
||||
move |_row, _unfold, _window: &mut Window, _cx: &mut App| Empty.into_any();
|
||||
|
||||
let buffer = editor.buffer().read(cx).snapshot(cx);
|
||||
let mut rows_to_fold = BTreeSet::new();
|
||||
let crease_iter = start_anchors
|
||||
.into_iter()
|
||||
.zip(end_anchors)
|
||||
.map(|(start, end)| {
|
||||
rows_to_fold.insert(MultiBufferRow(start.to_point(&buffer).row));
|
||||
|
||||
Crease::inline(
|
||||
start..end,
|
||||
placeholder.clone(),
|
||||
fold_toggle("tool-use"),
|
||||
render_trailer,
|
||||
)
|
||||
});
|
||||
|
||||
editor.insert_creases(crease_iter, cx);
|
||||
|
||||
for buffer_row in rows_to_fold {
|
||||
editor.fold_at(&FoldAt { buffer_row }, window, cx);
|
||||
}
|
||||
});
|
||||
});
|
||||
let is_directory = mat.is_dir;
|
||||
|
||||
let Some(task) = self
|
||||
.context_store
|
||||
.update(cx, |context_store, cx| {
|
||||
context_store.add_file_from_path(project_path, cx)
|
||||
if is_directory {
|
||||
context_store.add_directory(project_path, true, cx)
|
||||
} else {
|
||||
context_store.add_file_from_path(project_path, true, cx)
|
||||
}
|
||||
})
|
||||
.ok()
|
||||
else {
|
||||
@@ -338,10 +154,10 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
};
|
||||
|
||||
let confirm_behavior = self.confirm_behavior;
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
match task.await.notify_async_err(&mut cx) {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
match task.await.notify_async_err(cx) {
|
||||
None => anyhow::Ok(()),
|
||||
Some(()) => this.update_in(&mut cx, |this, window, cx| match confirm_behavior {
|
||||
Some(()) => this.update_in(cx, |this, window, cx| match confirm_behavior {
|
||||
ConfirmBehavior::KeepOpen => {}
|
||||
ConfirmBehavior::Close => this.delegate.dismissed(window, cx),
|
||||
}),
|
||||
@@ -375,6 +191,7 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
ElementId::NamedInteger("file-ctx-picker".into(), ix),
|
||||
&path_match.path,
|
||||
&path_match.path_prefix,
|
||||
path_match.is_dir,
|
||||
self.context_store.clone(),
|
||||
cx,
|
||||
)),
|
||||
@@ -382,10 +199,85 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn search_paths(
|
||||
query: String,
|
||||
cancellation_flag: Arc<AtomicBool>,
|
||||
workspace: &Entity<Workspace>,
|
||||
cx: &App,
|
||||
) -> Task<Vec<PathMatch>> {
|
||||
if query.is_empty() {
|
||||
let workspace = workspace.read(cx);
|
||||
let project = workspace.project().read(cx);
|
||||
let recent_matches = workspace
|
||||
.recent_navigation_history(Some(10), cx)
|
||||
.into_iter()
|
||||
.filter_map(|(project_path, _)| {
|
||||
let worktree = project.worktree_for_id(project_path.worktree_id, cx)?;
|
||||
Some(PathMatch {
|
||||
score: 0.,
|
||||
positions: Vec::new(),
|
||||
worktree_id: project_path.worktree_id.to_usize(),
|
||||
path: project_path.path,
|
||||
path_prefix: worktree.read(cx).root_name().into(),
|
||||
distance_to_relative_ancestor: 0,
|
||||
is_dir: false,
|
||||
})
|
||||
});
|
||||
|
||||
let file_matches = project.worktrees(cx).flat_map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
let path_prefix: Arc<str> = worktree.root_name().into();
|
||||
worktree.entries(false, 0).map(move |entry| PathMatch {
|
||||
score: 0.,
|
||||
positions: Vec::new(),
|
||||
worktree_id: worktree.id().to_usize(),
|
||||
path: entry.path.clone(),
|
||||
path_prefix: path_prefix.clone(),
|
||||
distance_to_relative_ancestor: 0,
|
||||
is_dir: entry.is_dir(),
|
||||
})
|
||||
});
|
||||
|
||||
Task::ready(recent_matches.chain(file_matches).collect())
|
||||
} else {
|
||||
let worktrees = workspace.read(cx).visible_worktrees(cx).collect::<Vec<_>>();
|
||||
let candidate_sets = worktrees
|
||||
.into_iter()
|
||||
.map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
|
||||
PathMatchCandidateSet {
|
||||
snapshot: worktree.snapshot(),
|
||||
include_ignored: worktree
|
||||
.root_entry()
|
||||
.map_or(false, |entry| entry.is_ignored),
|
||||
include_root_name: true,
|
||||
candidates: project::Candidates::Entries,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let executor = cx.background_executor().clone();
|
||||
cx.foreground_executor().spawn(async move {
|
||||
fuzzy::match_path_sets(
|
||||
candidate_sets.as_slice(),
|
||||
query.as_str(),
|
||||
None,
|
||||
false,
|
||||
100,
|
||||
&cancellation_flag,
|
||||
executor,
|
||||
)
|
||||
.await
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn render_file_context_entry(
|
||||
id: ElementId,
|
||||
path: &Path,
|
||||
path_prefix: &Arc<str>,
|
||||
is_directory: bool,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
cx: &App,
|
||||
) -> Stateful<Div> {
|
||||
@@ -409,13 +301,24 @@ pub fn render_file_context_entry(
|
||||
(file_name, Some(directory))
|
||||
};
|
||||
|
||||
let added = context_store
|
||||
.upgrade()
|
||||
.and_then(|context_store| context_store.read(cx).will_include_file_path(path, cx));
|
||||
let added = context_store.upgrade().and_then(|context_store| {
|
||||
if is_directory {
|
||||
context_store
|
||||
.read(cx)
|
||||
.includes_directory(path)
|
||||
.map(FileInclusion::Direct)
|
||||
} else {
|
||||
context_store.read(cx).will_include_file_path(path, cx)
|
||||
}
|
||||
});
|
||||
|
||||
let file_icon = FileIcons::get_icon(&path, cx)
|
||||
.map(Icon::from_path)
|
||||
.unwrap_or_else(|| Icon::new(IconName::File));
|
||||
let file_icon = if is_directory {
|
||||
FileIcons::get_folder_icon(false, cx)
|
||||
} else {
|
||||
FileIcons::get_icon(&path, cx)
|
||||
}
|
||||
.map(Icon::from_path)
|
||||
.unwrap_or_else(|| Icon::new(IconName::File));
|
||||
|
||||
h_flex()
|
||||
.id(id)
|
||||
@@ -464,85 +367,3 @@ pub fn render_file_context_entry(
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn render_fold_icon_button(
|
||||
icon: SharedString,
|
||||
label: SharedString,
|
||||
editor: WeakEntity<Editor>,
|
||||
) -> Arc<dyn Send + Sync + Fn(FoldId, Range<Anchor>, &mut App) -> AnyElement> {
|
||||
Arc::new(move |fold_id, fold_range, cx| {
|
||||
let is_in_text_selection = editor.upgrade().is_some_and(|editor| {
|
||||
editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor
|
||||
.buffer()
|
||||
.update(cx, |multi_buffer, cx| multi_buffer.snapshot(cx));
|
||||
|
||||
let is_in_pending_selection = || {
|
||||
editor
|
||||
.selections
|
||||
.pending
|
||||
.as_ref()
|
||||
.is_some_and(|pending_selection| {
|
||||
pending_selection
|
||||
.selection
|
||||
.range()
|
||||
.includes(&fold_range, &snapshot)
|
||||
})
|
||||
};
|
||||
|
||||
let mut is_in_complete_selection = || {
|
||||
editor
|
||||
.selections
|
||||
.disjoint_in_range::<usize>(fold_range.clone(), cx)
|
||||
.into_iter()
|
||||
.any(|selection| {
|
||||
// This is needed to cover a corner case, if we just check for an existing
|
||||
// selection in the fold range, having a cursor at the start of the fold
|
||||
// marks it as selected. Non-empty selections don't cause this.
|
||||
let length = selection.end - selection.start;
|
||||
length > 0
|
||||
})
|
||||
};
|
||||
|
||||
is_in_pending_selection() || is_in_complete_selection()
|
||||
})
|
||||
});
|
||||
|
||||
ButtonLike::new(fold_id)
|
||||
.style(ButtonStyle::Filled)
|
||||
.selected_style(ButtonStyle::Tinted(TintColor::Accent))
|
||||
.toggle_state(is_in_text_selection)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(
|
||||
Icon::from_path(icon.clone())
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(
|
||||
Label::new(label.clone())
|
||||
.size(LabelSize::Small)
|
||||
.single_line(),
|
||||
),
|
||||
)
|
||||
.into_any_element()
|
||||
})
|
||||
}
|
||||
|
||||
fn fold_toggle(
|
||||
name: &'static str,
|
||||
) -> impl Fn(
|
||||
MultiBufferRow,
|
||||
bool,
|
||||
Arc<dyn Fn(bool, &mut Window, &mut App) + Send + Sync>,
|
||||
&mut Window,
|
||||
&mut App,
|
||||
) -> AnyElement {
|
||||
move |row, is_folded, fold, _window, _cx| {
|
||||
Disclosure::new((name, row.0 as u64), !is_folded)
|
||||
.toggle_state(is_folded)
|
||||
.on_click(move |_e, window, cx| fold(!is_folded, window, cx))
|
||||
.into_any_element()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -110,48 +110,14 @@ impl PickerDelegate for ThreadContextPickerDelegate {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Task<()> {
|
||||
let Ok(threads) = self.thread_store.update(cx, |this, _cx| {
|
||||
this.threads()
|
||||
.into_iter()
|
||||
.map(|thread| ThreadContextEntry {
|
||||
id: thread.id,
|
||||
summary: thread.summary,
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}) else {
|
||||
let Some(threads) = self.thread_store.upgrade() else {
|
||||
return Task::ready(());
|
||||
};
|
||||
|
||||
let executor = cx.background_executor().clone();
|
||||
let search_task = cx.background_spawn(async move {
|
||||
if query.is_empty() {
|
||||
threads
|
||||
} else {
|
||||
let candidates = threads
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(id, thread)| StringMatchCandidate::new(id, &thread.summary))
|
||||
.collect::<Vec<_>>();
|
||||
let matches = fuzzy::match_strings(
|
||||
&candidates,
|
||||
&query,
|
||||
false,
|
||||
100,
|
||||
&Default::default(),
|
||||
executor,
|
||||
)
|
||||
.await;
|
||||
|
||||
matches
|
||||
.into_iter()
|
||||
.map(|mat| threads[mat.candidate_id].clone())
|
||||
.collect()
|
||||
}
|
||||
});
|
||||
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
let search_task = search_threads(query, threads, cx);
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let matches = search_task.await;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.delegate.matches = matches;
|
||||
this.delegate.selected_index = 0;
|
||||
cx.notify();
|
||||
@@ -171,12 +137,14 @@ impl PickerDelegate for ThreadContextPickerDelegate {
|
||||
|
||||
let open_thread_task = thread_store.update(cx, |this, cx| this.open_thread(&entry.id, cx));
|
||||
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let thread = open_thread_task.await?;
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.delegate
|
||||
.context_store
|
||||
.update(cx, |context_store, cx| context_store.add_thread(thread, cx))
|
||||
.update(cx, |context_store, cx| {
|
||||
context_store.add_thread(thread, true, cx)
|
||||
})
|
||||
.ok();
|
||||
|
||||
match this.delegate.confirm_behavior {
|
||||
@@ -223,13 +191,18 @@ pub fn render_thread_context_entry(
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.w_full()
|
||||
.justify_between()
|
||||
.child(
|
||||
Icon::new(IconName::MessageCircle)
|
||||
.size(IconSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.max_w_72()
|
||||
.child(
|
||||
Icon::new(IconName::MessageCircle)
|
||||
.size(IconSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(Label::new(thread.summary.clone()).truncate()),
|
||||
)
|
||||
.child(Label::new(thread.summary.clone()))
|
||||
.child(div().w_full())
|
||||
.when(added, |el| {
|
||||
el.child(
|
||||
h_flex()
|
||||
@@ -243,3 +216,46 @@ pub fn render_thread_context_entry(
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn search_threads(
|
||||
query: String,
|
||||
thread_store: Entity<ThreadStore>,
|
||||
cx: &mut App,
|
||||
) -> Task<Vec<ThreadContextEntry>> {
|
||||
let threads = thread_store.update(cx, |this, _cx| {
|
||||
this.threads()
|
||||
.into_iter()
|
||||
.map(|thread| ThreadContextEntry {
|
||||
id: thread.id,
|
||||
summary: thread.summary,
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
let executor = cx.background_executor().clone();
|
||||
cx.background_spawn(async move {
|
||||
if query.is_empty() {
|
||||
threads
|
||||
} else {
|
||||
let candidates = threads
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(id, thread)| StringMatchCandidate::new(id, &thread.summary))
|
||||
.collect::<Vec<_>>();
|
||||
let matches = fuzzy::match_strings(
|
||||
&candidates,
|
||||
&query,
|
||||
false,
|
||||
100,
|
||||
&Default::default(),
|
||||
executor,
|
||||
)
|
||||
.await;
|
||||
|
||||
matches
|
||||
.into_iter()
|
||||
.map(|mat| threads[mat.candidate_id].clone())
|
||||
.collect()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -9,6 +9,7 @@ use language::Buffer;
|
||||
use project::{ProjectPath, Worktree};
|
||||
use rope::Rope;
|
||||
use text::BufferId;
|
||||
use util::maybe;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::context::{
|
||||
@@ -63,6 +64,7 @@ impl ContextStore {
|
||||
pub fn add_file_from_path(
|
||||
&mut self,
|
||||
project_path: ProjectPath,
|
||||
remove_if_exists: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let workspace = self.workspace.clone();
|
||||
@@ -74,18 +76,20 @@ impl ContextStore {
|
||||
return Task::ready(Err(anyhow!("failed to read project")));
|
||||
};
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let open_buffer_task = project.update(&mut cx, |project, cx| {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let open_buffer_task = project.update(cx, |project, cx| {
|
||||
project.open_buffer(project_path.clone(), cx)
|
||||
})?;
|
||||
|
||||
let buffer_entity = open_buffer_task.await?;
|
||||
let buffer_id = this.update(&mut cx, |_, cx| buffer_entity.read(cx).remote_id())?;
|
||||
let buffer_id = this.update(cx, |_, cx| buffer_entity.read(cx).remote_id())?;
|
||||
|
||||
let already_included = this.update(&mut cx, |this, _cx| {
|
||||
let already_included = this.update(cx, |this, _cx| {
|
||||
match this.will_include_buffer(buffer_id, &project_path.path) {
|
||||
Some(FileInclusion::Direct(context_id)) => {
|
||||
this.remove_context(context_id);
|
||||
if remove_if_exists {
|
||||
this.remove_context(context_id);
|
||||
}
|
||||
true
|
||||
}
|
||||
Some(FileInclusion::InDirectory(_)) => true,
|
||||
@@ -97,7 +101,7 @@ impl ContextStore {
|
||||
return anyhow::Ok(());
|
||||
}
|
||||
|
||||
let (buffer_info, text_task) = this.update(&mut cx, |_, cx| {
|
||||
let (buffer_info, text_task) = this.update(cx, |_, cx| {
|
||||
let buffer = buffer_entity.read(cx);
|
||||
collect_buffer_info_and_text(
|
||||
project_path.path.clone(),
|
||||
@@ -109,7 +113,7 @@ impl ContextStore {
|
||||
|
||||
let text = text_task.await;
|
||||
|
||||
this.update(&mut cx, |this, _cx| {
|
||||
this.update(cx, |this, _cx| {
|
||||
this.insert_file(make_context_buffer(buffer_info, text));
|
||||
})?;
|
||||
|
||||
@@ -122,8 +126,8 @@ impl ContextStore {
|
||||
buffer_entity: Entity<Buffer>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let (buffer_info, text_task) = this.update(&mut cx, |_, cx| {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let (buffer_info, text_task) = this.update(cx, |_, cx| {
|
||||
let buffer = buffer_entity.read(cx);
|
||||
let Some(file) = buffer.file() else {
|
||||
return Err(anyhow!("Buffer has no path."));
|
||||
@@ -138,7 +142,7 @@ impl ContextStore {
|
||||
|
||||
let text = text_task.await;
|
||||
|
||||
this.update(&mut cx, |this, _cx| {
|
||||
this.update(cx, |this, _cx| {
|
||||
this.insert_file(make_context_buffer(buffer_info, text))
|
||||
})?;
|
||||
|
||||
@@ -156,6 +160,7 @@ impl ContextStore {
|
||||
pub fn add_directory(
|
||||
&mut self,
|
||||
project_path: ProjectPath,
|
||||
remove_if_exists: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let workspace = self.workspace.clone();
|
||||
@@ -168,7 +173,9 @@ impl ContextStore {
|
||||
|
||||
let already_included = if let Some(context_id) = self.includes_directory(&project_path.path)
|
||||
{
|
||||
self.remove_context(context_id);
|
||||
if remove_if_exists {
|
||||
self.remove_context(context_id);
|
||||
}
|
||||
true
|
||||
} else {
|
||||
false
|
||||
@@ -178,18 +185,18 @@ impl ContextStore {
|
||||
}
|
||||
|
||||
let worktree_id = project_path.worktree_id;
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let worktree = project.update(&mut cx, |project, cx| {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let worktree = project.update(cx, |project, cx| {
|
||||
project
|
||||
.worktree_for_id(worktree_id, cx)
|
||||
.ok_or_else(|| anyhow!("no worktree found for {worktree_id:?}"))
|
||||
})??;
|
||||
|
||||
let files = worktree.update(&mut cx, |worktree, _cx| {
|
||||
let files = worktree.update(cx, |worktree, _cx| {
|
||||
collect_files_in_path(worktree, &project_path.path)
|
||||
})?;
|
||||
|
||||
let open_buffers_task = project.update(&mut cx, |project, cx| {
|
||||
let open_buffers_task = project.update(cx, |project, cx| {
|
||||
let tasks = files.iter().map(|file_path| {
|
||||
project.open_buffer(
|
||||
ProjectPath {
|
||||
@@ -206,7 +213,7 @@ impl ContextStore {
|
||||
|
||||
let mut buffer_infos = Vec::new();
|
||||
let mut text_tasks = Vec::new();
|
||||
this.update(&mut cx, |_, cx| {
|
||||
this.update(cx, |_, cx| {
|
||||
for (path, buffer_entity) in files.into_iter().zip(buffers) {
|
||||
// Skip all binary files and other non-UTF8 files
|
||||
if let Ok(buffer_entity) = buffer_entity {
|
||||
@@ -235,7 +242,7 @@ impl ContextStore {
|
||||
bail!("No text files found in {}", &project_path.path.display());
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.update(cx, |this, _| {
|
||||
this.insert_directory(&project_path.path, context_buffers);
|
||||
})?;
|
||||
|
||||
@@ -255,9 +262,16 @@ impl ContextStore {
|
||||
)));
|
||||
}
|
||||
|
||||
pub fn add_thread(&mut self, thread: Entity<Thread>, cx: &mut Context<Self>) {
|
||||
pub fn add_thread(
|
||||
&mut self,
|
||||
thread: Entity<Thread>,
|
||||
remove_if_exists: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if let Some(context_id) = self.includes_thread(&thread.read(cx).id()) {
|
||||
self.remove_context(context_id);
|
||||
if remove_if_exists {
|
||||
self.remove_context(context_id);
|
||||
}
|
||||
} else {
|
||||
self.insert_thread(thread, cx);
|
||||
}
|
||||
@@ -531,35 +545,59 @@ fn collect_files_in_path(worktree: &Worktree, path: &Path) -> Vec<Arc<Path>> {
|
||||
|
||||
pub fn refresh_context_store_text(
|
||||
context_store: Entity<ContextStore>,
|
||||
changed_buffers: &HashSet<Entity<Buffer>>,
|
||||
cx: &App,
|
||||
) -> impl Future<Output = ()> {
|
||||
) -> impl Future<Output = Vec<ContextId>> {
|
||||
let mut tasks = Vec::new();
|
||||
|
||||
for context in &context_store.read(cx).context {
|
||||
match context {
|
||||
AssistantContext::File(file_context) => {
|
||||
let context_store = context_store.clone();
|
||||
if let Some(task) = refresh_file_text(context_store, file_context, cx) {
|
||||
tasks.push(task);
|
||||
let id = context.id();
|
||||
|
||||
let task = maybe!({
|
||||
match context {
|
||||
AssistantContext::File(file_context) => {
|
||||
if changed_buffers.is_empty()
|
||||
|| changed_buffers.contains(&file_context.context_buffer.buffer)
|
||||
{
|
||||
let context_store = context_store.clone();
|
||||
return refresh_file_text(context_store, file_context, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
AssistantContext::Directory(directory_context) => {
|
||||
let context_store = context_store.clone();
|
||||
if let Some(task) = refresh_directory_text(context_store, directory_context, cx) {
|
||||
tasks.push(task);
|
||||
AssistantContext::Directory(directory_context) => {
|
||||
let should_refresh = changed_buffers.is_empty()
|
||||
|| changed_buffers.iter().any(|buffer| {
|
||||
let buffer = buffer.read(cx);
|
||||
|
||||
buffer_path_log_err(&buffer)
|
||||
.map_or(false, |path| path.starts_with(&directory_context.path))
|
||||
});
|
||||
|
||||
if should_refresh {
|
||||
let context_store = context_store.clone();
|
||||
return refresh_directory_text(context_store, directory_context, cx);
|
||||
}
|
||||
}
|
||||
AssistantContext::Thread(thread_context) => {
|
||||
if changed_buffers.is_empty() {
|
||||
let context_store = context_store.clone();
|
||||
return Some(refresh_thread_text(context_store, thread_context, cx));
|
||||
}
|
||||
}
|
||||
// Intentionally omit refreshing fetched URLs as it doesn't seem all that useful,
|
||||
// and doing the caching properly could be tricky (unless it's already handled by
|
||||
// the HttpClient?).
|
||||
AssistantContext::FetchedUrl(_) => {}
|
||||
}
|
||||
AssistantContext::Thread(thread_context) => {
|
||||
let context_store = context_store.clone();
|
||||
tasks.push(refresh_thread_text(context_store, thread_context, cx));
|
||||
}
|
||||
// Intentionally omit refreshing fetched URLs as it doesn't seem all that useful,
|
||||
// and doing the caching properly could be tricky (unless it's already handled by
|
||||
// the HttpClient?).
|
||||
AssistantContext::FetchedUrl(_) => {}
|
||||
|
||||
None
|
||||
});
|
||||
|
||||
if let Some(task) = task {
|
||||
tasks.push(task.map(move |_| id));
|
||||
}
|
||||
}
|
||||
|
||||
future::join_all(tasks).map(|_| ())
|
||||
future::join_all(tasks)
|
||||
}
|
||||
|
||||
fn refresh_file_text(
|
||||
@@ -570,10 +608,10 @@ fn refresh_file_text(
|
||||
let id = file_context.id;
|
||||
let task = refresh_context_buffer(&file_context.context_buffer, cx);
|
||||
if let Some(task) = task {
|
||||
Some(cx.spawn(|mut cx| async move {
|
||||
Some(cx.spawn(async move |cx| {
|
||||
let context_buffer = task.await;
|
||||
context_store
|
||||
.update(&mut cx, |context_store, _| {
|
||||
.update(cx, |context_store, _| {
|
||||
let new_file_context = FileContext { id, context_buffer };
|
||||
context_store.replace_context(AssistantContext::File(new_file_context));
|
||||
})
|
||||
@@ -611,10 +649,10 @@ fn refresh_directory_text(
|
||||
|
||||
let id = directory_context.snapshot.id;
|
||||
let path = directory_context.path.clone();
|
||||
Some(cx.spawn(|mut cx| async move {
|
||||
Some(cx.spawn(async move |cx| {
|
||||
let context_buffers = context_buffers.await;
|
||||
context_store
|
||||
.update(&mut cx, |context_store, _| {
|
||||
.update(cx, |context_store, _| {
|
||||
let new_directory_context = DirectoryContext::new(id, &path, context_buffers);
|
||||
context_store.replace_context(AssistantContext::Directory(new_directory_context));
|
||||
})
|
||||
@@ -629,9 +667,9 @@ fn refresh_thread_text(
|
||||
) -> Task<()> {
|
||||
let id = thread_context.id;
|
||||
let thread = thread_context.thread.clone();
|
||||
cx.spawn(move |mut cx| async move {
|
||||
cx.spawn(async move |cx| {
|
||||
context_store
|
||||
.update(&mut cx, |context_store, cx| {
|
||||
.update(cx, |context_store, cx| {
|
||||
let text = thread.read(cx).text().into();
|
||||
context_store.replace_context(AssistantContext::Thread(ThreadContext {
|
||||
id,
|
||||
|
||||
@@ -39,7 +39,6 @@ impl ContextStrip {
|
||||
pub fn new(
|
||||
context_store: Entity<ContextStore>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
editor: WeakEntity<Editor>,
|
||||
thread_store: Option<WeakEntity<ThreadStore>>,
|
||||
context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
|
||||
suggest_context_kind: SuggestContextKind,
|
||||
@@ -51,7 +50,6 @@ impl ContextStrip {
|
||||
workspace.clone(),
|
||||
thread_store.clone(),
|
||||
context_store.downgrade(),
|
||||
editor.clone(),
|
||||
ConfirmBehavior::KeepOpen,
|
||||
window,
|
||||
cx,
|
||||
@@ -335,12 +333,12 @@ impl ContextStrip {
|
||||
context_store.accept_suggested_context(&suggested, cx)
|
||||
});
|
||||
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
match task.await.notify_async_err(&mut cx) {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
match task.await.notify_async_err(cx) {
|
||||
None => {}
|
||||
Some(()) => {
|
||||
if let Some(this) = this.upgrade() {
|
||||
this.update(&mut cx, |_, cx| cx.notify())?;
|
||||
this.update(cx, |_, cx| cx.notify())?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,10 +2,10 @@ use assistant_context_editor::SavedContextMetadata;
|
||||
use chrono::{DateTime, Utc};
|
||||
use gpui::{prelude::*, Entity};
|
||||
|
||||
use crate::thread_store::{SavedThreadMetadata, ThreadStore};
|
||||
use crate::thread_store::{SerializedThreadMetadata, ThreadStore};
|
||||
|
||||
pub enum HistoryEntry {
|
||||
Thread(SavedThreadMetadata),
|
||||
Thread(SerializedThreadMetadata),
|
||||
Context(SavedContextMetadata),
|
||||
}
|
||||
|
||||
|
||||
@@ -276,7 +276,7 @@ impl InlineAssistant {
|
||||
if is_authenticated() {
|
||||
handle_assist(window, cx);
|
||||
} else {
|
||||
cx.spawn_in(window, |_workspace, mut cx| async move {
|
||||
cx.spawn_in(window, async move |_workspace, cx| {
|
||||
let Some(task) = cx.update(|_, cx| {
|
||||
LanguageModelRegistry::read_global(cx)
|
||||
.active_provider()
|
||||
@@ -324,7 +324,7 @@ impl InlineAssistant {
|
||||
) {
|
||||
let (snapshot, initial_selections) = editor.update(cx, |editor, cx| {
|
||||
(
|
||||
editor.buffer().read(cx).snapshot(cx),
|
||||
editor.snapshot(window, cx),
|
||||
editor.selections.all::<Point>(cx),
|
||||
)
|
||||
});
|
||||
@@ -338,7 +338,37 @@ impl InlineAssistant {
|
||||
if selection.end.column == 0 {
|
||||
selection.end.row -= 1;
|
||||
}
|
||||
selection.end.column = snapshot.line_len(MultiBufferRow(selection.end.row));
|
||||
selection.end.column = snapshot
|
||||
.buffer_snapshot
|
||||
.line_len(MultiBufferRow(selection.end.row));
|
||||
} else if let Some(fold) =
|
||||
snapshot.crease_for_buffer_row(MultiBufferRow(selection.end.row))
|
||||
{
|
||||
selection.start = fold.range().start;
|
||||
selection.end = fold.range().end;
|
||||
if MultiBufferRow(selection.end.row) < snapshot.buffer_snapshot.max_row() {
|
||||
let chars = snapshot
|
||||
.buffer_snapshot
|
||||
.chars_at(Point::new(selection.end.row + 1, 0));
|
||||
|
||||
for c in chars {
|
||||
if c == '\n' {
|
||||
break;
|
||||
}
|
||||
if c.is_whitespace() {
|
||||
continue;
|
||||
}
|
||||
if snapshot
|
||||
.language_at(selection.end)
|
||||
.is_some_and(|language| language.config().brackets.is_closing_brace(c))
|
||||
{
|
||||
selection.end.row += 1;
|
||||
selection.end.column = snapshot
|
||||
.buffer_snapshot
|
||||
.line_len(MultiBufferRow(selection.end.row));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(prev_selection) = selections.last_mut() {
|
||||
@@ -354,6 +384,7 @@ impl InlineAssistant {
|
||||
}
|
||||
selections.push(selection);
|
||||
}
|
||||
let snapshot = &snapshot.buffer_snapshot;
|
||||
let newest_selection = newest_selection.unwrap();
|
||||
|
||||
let mut codegen_ranges = Vec::new();
|
||||
@@ -1456,9 +1487,9 @@ impl EditorInlineAssists {
|
||||
assist_ids: Vec::new(),
|
||||
scroll_lock: None,
|
||||
highlight_updates: highlight_updates_tx,
|
||||
_update_highlights: cx.spawn(|cx| {
|
||||
_update_highlights: cx.spawn({
|
||||
let editor = editor.downgrade();
|
||||
async move {
|
||||
async move |cx| {
|
||||
while let Ok(()) = highlight_updates_rx.changed().await {
|
||||
let editor = editor.upgrade().context("editor was dropped")?;
|
||||
cx.update_global(|assistant: &mut InlineAssistant, cx| {
|
||||
@@ -1729,6 +1760,7 @@ impl CodeActionProvider for AssistantCodeActionProvider {
|
||||
title: "Fix with Assistant".into(),
|
||||
..Default::default()
|
||||
})),
|
||||
resolved: true,
|
||||
}]))
|
||||
} else {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
@@ -1747,10 +1779,10 @@ impl CodeActionProvider for AssistantCodeActionProvider {
|
||||
let editor = self.editor.clone();
|
||||
let workspace = self.workspace.clone();
|
||||
let thread_store = self.thread_store.clone();
|
||||
window.spawn(cx, |mut cx| async move {
|
||||
window.spawn(cx, async move |cx| {
|
||||
let editor = editor.upgrade().context("editor was released")?;
|
||||
let range = editor
|
||||
.update(&mut cx, |editor, cx| {
|
||||
.update(cx, |editor, cx| {
|
||||
editor.buffer().update(cx, |multibuffer, cx| {
|
||||
let buffer = buffer.read(cx);
|
||||
let multibuffer_snapshot = multibuffer.read(cx);
|
||||
|
||||
@@ -861,7 +861,6 @@ impl PromptEditor<BufferCodegen> {
|
||||
ContextStrip::new(
|
||||
context_store.clone(),
|
||||
workspace.clone(),
|
||||
prompt_editor.downgrade(),
|
||||
thread_store.clone(),
|
||||
context_picker_menu_handle.clone(),
|
||||
SuggestContextKind::Thread,
|
||||
@@ -1014,7 +1013,6 @@ impl PromptEditor<TerminalCodegen> {
|
||||
ContextStrip::new(
|
||||
context_store.clone(),
|
||||
workspace.clone(),
|
||||
prompt_editor.downgrade(),
|
||||
thread_store.clone(),
|
||||
context_picker_menu_handle.clone(),
|
||||
SuggestContextKind::Thread,
|
||||
|
||||
@@ -1,39 +1,42 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use collections::HashSet;
|
||||
use editor::actions::MoveUp;
|
||||
use editor::{Editor, EditorElement, EditorEvent, EditorStyle};
|
||||
use file_icons::FileIcons;
|
||||
use editor::{ContextMenuOptions, ContextMenuPlacement, Editor, EditorElement, EditorStyle};
|
||||
use fs::Fs;
|
||||
use git::ExpandCommitEditor;
|
||||
use git_ui::git_panel;
|
||||
use gpui::{
|
||||
Animation, AnimationExt, App, DismissEvent, Entity, Focusable, Subscription, TextStyle,
|
||||
point, Animation, AnimationExt, App, DismissEvent, Entity, Focusable, Subscription, TextStyle,
|
||||
WeakEntity,
|
||||
};
|
||||
use language_model::LanguageModelRegistry;
|
||||
use language_model_selector::ToggleModelSelector;
|
||||
use rope::Point;
|
||||
use project::Project;
|
||||
use settings::Settings;
|
||||
use std::time::Duration;
|
||||
use text::Bias;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{
|
||||
prelude::*, ButtonLike, Disclosure, KeyBinding, PlatformStyle, PopoverMenu, PopoverMenuHandle,
|
||||
Tooltip,
|
||||
prelude::*, ButtonLike, KeyBinding, PlatformStyle, PopoverMenu, PopoverMenuHandle, Tooltip,
|
||||
};
|
||||
use vim_mode_setting::VimModeSetting;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::assistant_model_selector::AssistantModelSelector;
|
||||
use crate::context_picker::{ConfirmBehavior, ContextPicker};
|
||||
use crate::context_picker::{ConfirmBehavior, ContextPicker, ContextPickerCompletionProvider};
|
||||
use crate::context_store::{refresh_context_store_text, ContextStore};
|
||||
use crate::context_strip::{ContextStrip, ContextStripEvent, SuggestContextKind};
|
||||
use crate::thread::{RequestKind, Thread};
|
||||
use crate::thread_store::ThreadStore;
|
||||
use crate::tool_selector::ToolSelector;
|
||||
use crate::{Chat, ChatMode, RemoveAllContext, ToggleContextPicker};
|
||||
use crate::{Chat, ChatMode, RemoveAllContext, ThreadEvent, ToggleContextPicker};
|
||||
|
||||
pub struct MessageEditor {
|
||||
thread: Entity<Thread>,
|
||||
editor: Entity<Editor>,
|
||||
#[allow(dead_code)]
|
||||
workspace: WeakEntity<Workspace>,
|
||||
project: Entity<Project>,
|
||||
context_store: Entity<ContextStore>,
|
||||
context_strip: Entity<ContextStrip>,
|
||||
context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
|
||||
@@ -41,7 +44,6 @@ pub struct MessageEditor {
|
||||
inline_context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
|
||||
model_selector: Entity<AssistantModelSelector>,
|
||||
tool_selector: Entity<ToolSelector>,
|
||||
edits_expanded: bool,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
|
||||
@@ -49,13 +51,13 @@ impl MessageEditor {
|
||||
pub fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: Entity<ContextStore>,
|
||||
thread_store: WeakEntity<ThreadStore>,
|
||||
thread: Entity<Thread>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let tools = thread.read(cx).tools().clone();
|
||||
let context_store = cx.new(|_cx| ContextStore::new(workspace.clone()));
|
||||
let context_picker_menu_handle = PopoverMenuHandle::default();
|
||||
let inline_context_picker_menu_handle = PopoverMenuHandle::default();
|
||||
let model_selector_menu_handle = PopoverMenuHandle::default();
|
||||
@@ -64,16 +66,30 @@ impl MessageEditor {
|
||||
let mut editor = Editor::auto_height(10, window, cx);
|
||||
editor.set_placeholder_text("Ask anything, @ to mention, ↑ to select", cx);
|
||||
editor.set_show_indent_guides(false, cx);
|
||||
editor.set_context_menu_options(ContextMenuOptions {
|
||||
min_entries_visible: 12,
|
||||
max_entries_visible: 12,
|
||||
placement: Some(ContextMenuPlacement::Above),
|
||||
});
|
||||
|
||||
editor
|
||||
});
|
||||
|
||||
let editor_entity = editor.downgrade();
|
||||
editor.update(cx, |editor, _| {
|
||||
editor.set_completion_provider(Some(Box::new(ContextPickerCompletionProvider::new(
|
||||
workspace.clone(),
|
||||
context_store.downgrade(),
|
||||
Some(thread_store.clone()),
|
||||
editor_entity,
|
||||
))));
|
||||
});
|
||||
|
||||
let inline_context_picker = cx.new(|cx| {
|
||||
ContextPicker::new(
|
||||
workspace.clone(),
|
||||
Some(thread_store.clone()),
|
||||
context_store.downgrade(),
|
||||
editor.downgrade(),
|
||||
ConfirmBehavior::Close,
|
||||
window,
|
||||
cx,
|
||||
@@ -84,7 +100,6 @@ impl MessageEditor {
|
||||
ContextStrip::new(
|
||||
context_store.clone(),
|
||||
workspace.clone(),
|
||||
editor.downgrade(),
|
||||
Some(thread_store.clone()),
|
||||
context_picker_menu_handle.clone(),
|
||||
SuggestContextKind::File,
|
||||
@@ -94,7 +109,6 @@ impl MessageEditor {
|
||||
});
|
||||
|
||||
let subscriptions = vec![
|
||||
cx.subscribe_in(&editor, window, Self::handle_editor_event),
|
||||
cx.subscribe_in(
|
||||
&inline_context_picker,
|
||||
window,
|
||||
@@ -104,8 +118,10 @@ impl MessageEditor {
|
||||
];
|
||||
|
||||
Self {
|
||||
thread,
|
||||
editor: editor.clone(),
|
||||
project: thread.read(cx).project().clone(),
|
||||
thread,
|
||||
workspace,
|
||||
context_store,
|
||||
context_strip,
|
||||
context_picker_menu_handle,
|
||||
@@ -121,7 +137,6 @@ impl MessageEditor {
|
||||
)
|
||||
}),
|
||||
tool_selector: cx.new(|cx| ToolSelector::new(tools, cx)),
|
||||
edits_expanded: false,
|
||||
_subscriptions: subscriptions,
|
||||
}
|
||||
}
|
||||
@@ -138,7 +153,6 @@ impl MessageEditor {
|
||||
) {
|
||||
self.context_picker_menu_handle.toggle(window, cx);
|
||||
}
|
||||
|
||||
pub fn remove_all_context(
|
||||
&mut self,
|
||||
_: &RemoveAllContext,
|
||||
@@ -154,7 +168,7 @@ impl MessageEditor {
|
||||
return;
|
||||
}
|
||||
|
||||
if self.thread.read(cx).is_streaming() {
|
||||
if self.thread.read(cx).is_generating() {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -197,16 +211,30 @@ impl MessageEditor {
|
||||
text
|
||||
});
|
||||
|
||||
let refresh_task = refresh_context_store_text(self.context_store.clone(), cx);
|
||||
let refresh_task =
|
||||
refresh_context_store_text(self.context_store.clone(), &HashSet::default(), cx);
|
||||
|
||||
let system_prompt_context_task = self.thread.read(cx).load_system_prompt_context(cx);
|
||||
|
||||
let thread = self.thread.clone();
|
||||
let context_store = self.context_store.clone();
|
||||
cx.spawn(move |_, mut cx| async move {
|
||||
let checkpoint = self.project.read(cx).git_store().read(cx).checkpoint(cx);
|
||||
cx.spawn(async move |_, cx| {
|
||||
let checkpoint = checkpoint.await.ok();
|
||||
refresh_task.await;
|
||||
let (system_prompt_context, load_error) = system_prompt_context_task.await;
|
||||
thread
|
||||
.update(&mut cx, |thread, cx| {
|
||||
.update(cx, |thread, cx| {
|
||||
thread.set_system_prompt_context(system_prompt_context);
|
||||
if let Some(load_error) = load_error {
|
||||
cx.emit(ThreadEvent::ShowError(load_error));
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
thread
|
||||
.update(cx, |thread, cx| {
|
||||
let context = context_store.read(cx).snapshot(cx).collect::<Vec<_>>();
|
||||
thread.insert_user_message(user_message, context, cx);
|
||||
thread.insert_user_message(user_message, context, checkpoint, cx);
|
||||
thread.send_to_model(model, request_kind, cx);
|
||||
})
|
||||
.ok();
|
||||
@@ -214,34 +242,6 @@ impl MessageEditor {
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn handle_editor_event(
|
||||
&mut self,
|
||||
editor: &Entity<Editor>,
|
||||
event: &EditorEvent,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
match event {
|
||||
EditorEvent::SelectionsChanged { .. } => {
|
||||
editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
let newest_cursor = editor.selections.newest::<Point>(cx).head();
|
||||
if newest_cursor.column > 0 {
|
||||
let behind_cursor = snapshot.clip_point(
|
||||
Point::new(newest_cursor.row, newest_cursor.column - 1),
|
||||
Bias::Left,
|
||||
);
|
||||
let char_behind_cursor = snapshot.chars_at(behind_cursor).next();
|
||||
if char_behind_cursor == Some('@') {
|
||||
self.inline_context_picker_menu_handle.show(window, cx);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_inline_context_picker_event(
|
||||
&mut self,
|
||||
_inline_context_picker: &Entity<ContextPicker>,
|
||||
@@ -292,10 +292,12 @@ impl Render for MessageEditor {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let font_size = TextSize::Default.rems(cx);
|
||||
let line_height = font_size.to_pixels(window.rem_size()) * 1.5;
|
||||
|
||||
let focus_handle = self.editor.focus_handle(cx);
|
||||
let inline_context_picker = self.inline_context_picker.clone();
|
||||
let bg_color = cx.theme().colors().editor_background;
|
||||
let is_streaming_completion = self.thread.read(cx).is_streaming();
|
||||
|
||||
let empty_thread = self.thread.read(cx).is_empty();
|
||||
let is_generating = self.thread.read(cx).is_generating();
|
||||
let is_model_selected = self.is_model_selected(cx);
|
||||
let is_editor_empty = self.is_editor_empty(cx);
|
||||
let submit_label_color = if is_editor_empty {
|
||||
@@ -314,12 +316,34 @@ impl Render for MessageEditor {
|
||||
px(64.)
|
||||
};
|
||||
|
||||
let changed_buffers = self.thread.read(cx).scripting_changed_buffers(cx);
|
||||
let changed_buffers_count = changed_buffers.len();
|
||||
let project = self.thread.read(cx).project();
|
||||
let changed_files = if let Some(repository) = project.read(cx).active_repository(cx) {
|
||||
repository.read(cx).status().count()
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
let border_color = cx.theme().colors().border;
|
||||
let active_color = cx.theme().colors().element_selected;
|
||||
let editor_bg_color = cx.theme().colors().editor_background;
|
||||
let bg_edit_files_disclosure = editor_bg_color.blend(active_color.opacity(0.3));
|
||||
|
||||
let edit_files_container = || {
|
||||
h_flex()
|
||||
.mx_2()
|
||||
.py_1()
|
||||
.pl_2p5()
|
||||
.pr_1()
|
||||
.bg(bg_edit_files_disclosure)
|
||||
.border_1()
|
||||
.border_color(border_color)
|
||||
.justify_between()
|
||||
.flex_wrap()
|
||||
};
|
||||
|
||||
v_flex()
|
||||
.size_full()
|
||||
.when(is_streaming_completion, |parent| {
|
||||
.when(is_generating, |parent| {
|
||||
let focus_handle = self.editor.focus_handle(cx).clone();
|
||||
parent.child(
|
||||
h_flex().py_3().w_full().justify_center().child(
|
||||
@@ -328,7 +352,7 @@ impl Render for MessageEditor {
|
||||
.pl_2()
|
||||
.pr_1()
|
||||
.py_1()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.bg(editor_bg_color)
|
||||
.border_1()
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
.rounded_lg()
|
||||
@@ -377,109 +401,163 @@ impl Render for MessageEditor {
|
||||
),
|
||||
)
|
||||
})
|
||||
.when(changed_buffers_count > 0, |parent| {
|
||||
parent.child(
|
||||
v_flex()
|
||||
.mx_2()
|
||||
.bg(cx.theme().colors().element_background)
|
||||
.border_1()
|
||||
.border_b_0()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.rounded_t_md()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.p_2()
|
||||
.child(
|
||||
Disclosure::new("edits-disclosure", self.edits_expanded)
|
||||
.on_click(cx.listener(|this, _ev, _window, cx| {
|
||||
this.edits_expanded = !this.edits_expanded;
|
||||
cx.notify();
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
Label::new("Edits")
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(Label::new("•").size(LabelSize::XSmall).color(Color::Muted))
|
||||
.child(
|
||||
Label::new(format!(
|
||||
"{} {}",
|
||||
changed_buffers_count,
|
||||
if changed_buffers_count == 1 {
|
||||
"file"
|
||||
} else {
|
||||
"files"
|
||||
}
|
||||
))
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
.when(self.edits_expanded, |parent| {
|
||||
parent.child(
|
||||
v_flex().bg(cx.theme().colors().editor_background).children(
|
||||
changed_buffers.enumerate().flat_map(|(index, buffer)| {
|
||||
let file = buffer.read(cx).file()?;
|
||||
let path = file.path();
|
||||
|
||||
let parent_label = path.parent().and_then(|parent| {
|
||||
let parent_str = parent.to_string_lossy();
|
||||
|
||||
if parent_str.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(
|
||||
Label::new(format!(
|
||||
"{}{}",
|
||||
parent_str,
|
||||
std::path::MAIN_SEPARATOR_STR
|
||||
))
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
}
|
||||
});
|
||||
|
||||
let name_label = path.file_name().map(|name| {
|
||||
Label::new(name.to_string_lossy().to_string())
|
||||
.size(LabelSize::Small)
|
||||
});
|
||||
|
||||
let file_icon = FileIcons::get_icon(&path, cx)
|
||||
.map(Icon::from_path)
|
||||
.unwrap_or_else(|| Icon::new(IconName::File));
|
||||
|
||||
let element = div()
|
||||
.p_2()
|
||||
.when(index + 1 < changed_buffers_count, |parent| {
|
||||
parent
|
||||
.border_color(cx.theme().colors().border)
|
||||
.border_b_1()
|
||||
})
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(file_icon)
|
||||
.child(
|
||||
// TODO: handle overflow
|
||||
h_flex()
|
||||
.children(parent_label)
|
||||
.children(name_label),
|
||||
)
|
||||
// TODO: show lines changed
|
||||
.child(Label::new("+").color(Color::Created))
|
||||
.child(Label::new("-").color(Color::Deleted)),
|
||||
);
|
||||
|
||||
Some(element)
|
||||
}),
|
||||
),
|
||||
.when(
|
||||
changed_files > 0 && !is_generating && !empty_thread,
|
||||
|parent| {
|
||||
parent.child(
|
||||
edit_files_container()
|
||||
.border_b_0()
|
||||
.rounded_t_md()
|
||||
.shadow(smallvec::smallvec![gpui::BoxShadow {
|
||||
color: gpui::black().opacity(0.15),
|
||||
offset: point(px(1.), px(-1.)),
|
||||
blur_radius: px(3.),
|
||||
spread_radius: px(0.),
|
||||
}])
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(Label::new("Edits").size(LabelSize::XSmall))
|
||||
.child(div().size_1().rounded_full().bg(border_color))
|
||||
.child(
|
||||
Label::new(format!(
|
||||
"{} {}",
|
||||
changed_files,
|
||||
if changed_files == 1 { "file" } else { "files" }
|
||||
))
|
||||
.size(LabelSize::XSmall),
|
||||
),
|
||||
)
|
||||
}),
|
||||
)
|
||||
})
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(
|
||||
Button::new("panel", "Open Git Panel")
|
||||
.label_size(LabelSize::XSmall)
|
||||
.key_binding({
|
||||
let focus_handle = focus_handle.clone();
|
||||
KeyBinding::for_action_in(
|
||||
&git_panel::ToggleFocus,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.map(|kb| kb.size(rems_from_px(10.)))
|
||||
})
|
||||
.on_click(|_ev, _window, cx| {
|
||||
cx.defer(|cx| {
|
||||
cx.dispatch_action(&git_panel::ToggleFocus)
|
||||
});
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
Button::new("review", "Review Diff")
|
||||
.label_size(LabelSize::XSmall)
|
||||
.key_binding({
|
||||
let focus_handle = focus_handle.clone();
|
||||
KeyBinding::for_action_in(
|
||||
&git_ui::project_diff::Diff,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.map(|kb| kb.size(rems_from_px(10.)))
|
||||
})
|
||||
.on_click(|_event, _window, cx| {
|
||||
cx.defer(|cx| {
|
||||
cx.dispatch_action(&git_ui::project_diff::Diff)
|
||||
});
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
Button::new("commit", "Commit Changes")
|
||||
.label_size(LabelSize::XSmall)
|
||||
.key_binding({
|
||||
let focus_handle = focus_handle.clone();
|
||||
KeyBinding::for_action_in(
|
||||
&ExpandCommitEditor,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.map(|kb| kb.size(rems_from_px(10.)))
|
||||
})
|
||||
.on_click(|_event, _window, cx| {
|
||||
cx.defer(|cx| {
|
||||
cx.dispatch_action(&ExpandCommitEditor)
|
||||
});
|
||||
}),
|
||||
),
|
||||
),
|
||||
)
|
||||
},
|
||||
)
|
||||
.when(
|
||||
changed_files > 0 && !is_generating && empty_thread,
|
||||
|parent| {
|
||||
parent.child(
|
||||
edit_files_container()
|
||||
.mb_2()
|
||||
.rounded_md()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(Label::new("Consider committing your changes before starting a fresh thread").size(LabelSize::XSmall))
|
||||
.child(div().size_1().rounded_full().bg(border_color))
|
||||
.child(
|
||||
Label::new(format!(
|
||||
"{} {}",
|
||||
changed_files,
|
||||
if changed_files == 1 { "file" } else { "files" }
|
||||
))
|
||||
.size(LabelSize::XSmall),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(
|
||||
Button::new("review", "Review Diff")
|
||||
.label_size(LabelSize::XSmall)
|
||||
.key_binding({
|
||||
let focus_handle = focus_handle.clone();
|
||||
KeyBinding::for_action_in(
|
||||
&git_ui::project_diff::Diff,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.map(|kb| kb.size(rems_from_px(10.)))
|
||||
})
|
||||
.on_click(|_event, _window, cx| {
|
||||
cx.defer(|cx| {
|
||||
cx.dispatch_action(&git_ui::project_diff::Diff)
|
||||
});
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
Button::new("commit", "Commit Changes")
|
||||
.label_size(LabelSize::XSmall)
|
||||
.key_binding({
|
||||
let focus_handle = focus_handle.clone();
|
||||
KeyBinding::for_action_in(
|
||||
&ExpandCommitEditor,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.map(|kb| kb.size(rems_from_px(10.)))
|
||||
})
|
||||
.on_click(|_event, _window, cx| {
|
||||
cx.defer(|cx| {
|
||||
cx.dispatch_action(&ExpandCommitEditor)
|
||||
});
|
||||
}),
|
||||
),
|
||||
),
|
||||
)
|
||||
},
|
||||
)
|
||||
.child(
|
||||
v_flex()
|
||||
.key_context("MessageEditor")
|
||||
@@ -494,10 +572,10 @@ impl Render for MessageEditor {
|
||||
.on_action(cx.listener(Self::toggle_chat_mode))
|
||||
.gap_2()
|
||||
.p_2()
|
||||
.bg(bg_color)
|
||||
.bg(editor_bg_color)
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(self.context_strip.clone())
|
||||
.child(h_flex().justify_between().child(self.context_strip.clone()))
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_5()
|
||||
@@ -517,9 +595,10 @@ impl Render for MessageEditor {
|
||||
EditorElement::new(
|
||||
&self.editor,
|
||||
EditorStyle {
|
||||
background: bg_color,
|
||||
background: editor_bg_color,
|
||||
local_player: cx.theme().players().local(),
|
||||
text: text_style,
|
||||
syntax: cx.theme().syntax().clone(),
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
@@ -554,7 +633,7 @@ impl Render for MessageEditor {
|
||||
.disabled(
|
||||
is_editor_empty
|
||||
|| !is_model_selected
|
||||
|| is_streaming_completion,
|
||||
|| is_generating,
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
@@ -589,7 +668,7 @@ impl Render for MessageEditor {
|
||||
"Type a message to submit",
|
||||
))
|
||||
})
|
||||
.when(is_streaming_completion, |button| {
|
||||
.when(is_generating, |button| {
|
||||
button.tooltip(Tooltip::text(
|
||||
"Cancel to submit a new message",
|
||||
))
|
||||
|
||||
@@ -40,7 +40,7 @@ impl TerminalCodegen {
|
||||
let telemetry = self.telemetry.clone();
|
||||
self.status = CodegenStatus::Pending;
|
||||
self.transaction = Some(TerminalTransaction::start(self.terminal.clone()));
|
||||
self.generation = cx.spawn(|this, mut cx| async move {
|
||||
self.generation = cx.spawn(async move |this, cx| {
|
||||
let model_telemetry_id = model.telemetry_id();
|
||||
let model_provider_id = model.provider_id();
|
||||
let response = model.stream_completion_text(prompt, &cx).await;
|
||||
@@ -97,12 +97,12 @@ impl TerminalCodegen {
|
||||
}
|
||||
});
|
||||
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.update(cx, |this, _| {
|
||||
this.message_id = message_id;
|
||||
})?;
|
||||
|
||||
while let Some(hunk) = hunks_rx.next().await {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
if let Some(transaction) = &mut this.transaction {
|
||||
transaction.push(hunk, cx);
|
||||
cx.notify();
|
||||
@@ -116,7 +116,7 @@ impl TerminalCodegen {
|
||||
|
||||
let result = generate.await;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
if let Err(error) = result {
|
||||
this.status = CodegenStatus::Error(error);
|
||||
} else {
|
||||
|
||||
@@ -7,7 +7,7 @@ use time::{OffsetDateTime, UtcOffset};
|
||||
use ui::{prelude::*, IconButtonShape, ListItem, ListItemSpacing, Tooltip};
|
||||
|
||||
use crate::history_store::{HistoryEntry, HistoryStore};
|
||||
use crate::thread_store::SavedThreadMetadata;
|
||||
use crate::thread_store::SerializedThreadMetadata;
|
||||
use crate::{AssistantPanel, RemoveSelectedThread};
|
||||
|
||||
pub struct ThreadHistory {
|
||||
@@ -221,14 +221,14 @@ impl Render for ThreadHistory {
|
||||
|
||||
#[derive(IntoElement)]
|
||||
pub struct PastThread {
|
||||
thread: SavedThreadMetadata,
|
||||
thread: SerializedThreadMetadata,
|
||||
assistant_panel: WeakEntity<AssistantPanel>,
|
||||
selected: bool,
|
||||
}
|
||||
|
||||
impl PastThread {
|
||||
pub fn new(
|
||||
thread: SavedThreadMetadata,
|
||||
thread: SerializedThreadMetadata,
|
||||
assistant_panel: WeakEntity<AssistantPanel>,
|
||||
selected: bool,
|
||||
) -> Self {
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
use std::borrow::Cow;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
@@ -12,7 +13,7 @@ use futures::FutureExt as _;
|
||||
use gpui::{
|
||||
prelude::*, App, BackgroundExecutor, Context, Entity, Global, ReadGlobal, SharedString, Task,
|
||||
};
|
||||
use heed::types::{SerdeBincode, SerdeJson};
|
||||
use heed::types::SerdeBincode;
|
||||
use heed::Database;
|
||||
use language_model::{LanguageModelToolUseId, Role};
|
||||
use project::Project;
|
||||
@@ -20,7 +21,7 @@ use prompt_store::PromptBuilder;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use util::ResultExt as _;
|
||||
|
||||
use crate::thread::{MessageId, Thread, ThreadId};
|
||||
use crate::thread::{MessageId, ProjectSnapshot, Thread, ThreadEvent, ThreadId};
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
ThreadsDatabase::init(cx);
|
||||
@@ -32,7 +33,7 @@ pub struct ThreadStore {
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
context_server_manager: Entity<ContextServerManager>,
|
||||
context_server_tool_ids: HashMap<Arc<str>, Vec<ToolId>>,
|
||||
threads: Vec<SavedThreadMetadata>,
|
||||
threads: Vec<SerializedThreadMetadata>,
|
||||
}
|
||||
|
||||
impl ThreadStore {
|
||||
@@ -65,18 +66,26 @@ impl ThreadStore {
|
||||
Ok(this)
|
||||
}
|
||||
|
||||
pub fn context_server_manager(&self) -> Entity<ContextServerManager> {
|
||||
self.context_server_manager.clone()
|
||||
}
|
||||
|
||||
pub fn tools(&self) -> Arc<ToolWorkingSet> {
|
||||
self.tools.clone()
|
||||
}
|
||||
|
||||
/// Returns the number of threads.
|
||||
pub fn thread_count(&self) -> usize {
|
||||
self.threads.len()
|
||||
}
|
||||
|
||||
pub fn threads(&self) -> Vec<SavedThreadMetadata> {
|
||||
pub fn threads(&self) -> Vec<SerializedThreadMetadata> {
|
||||
let mut threads = self.threads.iter().cloned().collect::<Vec<_>>();
|
||||
threads.sort_unstable_by_key(|thread| std::cmp::Reverse(thread.updated_at));
|
||||
threads
|
||||
}
|
||||
|
||||
pub fn recent_threads(&self, limit: usize) -> Vec<SavedThreadMetadata> {
|
||||
pub fn recent_threads(&self, limit: usize) -> Vec<SerializedThreadMetadata> {
|
||||
self.threads().into_iter().take(limit).collect()
|
||||
}
|
||||
|
||||
@@ -98,16 +107,16 @@ impl ThreadStore {
|
||||
) -> Task<Result<Entity<Thread>>> {
|
||||
let id = id.clone();
|
||||
let database_future = ThreadsDatabase::global_future(cx);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let database = database_future.await.map_err(|err| anyhow!(err))?;
|
||||
let thread = database
|
||||
.try_find_thread(id.clone())
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no thread found with ID: {id:?}"))?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let thread = this.update(cx, |this, cx| {
|
||||
cx.new(|cx| {
|
||||
Thread::from_saved(
|
||||
Thread::deserialize(
|
||||
id.clone(),
|
||||
thread,
|
||||
this.project.clone(),
|
||||
@@ -116,71 +125,44 @@ impl ThreadStore {
|
||||
cx,
|
||||
)
|
||||
})
|
||||
})
|
||||
})?;
|
||||
|
||||
let (system_prompt_context, load_error) = thread
|
||||
.update(cx, |thread, cx| thread.load_system_prompt_context(cx))?
|
||||
.await;
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.set_system_prompt_context(system_prompt_context);
|
||||
if let Some(load_error) = load_error {
|
||||
cx.emit(ThreadEvent::ShowError(load_error));
|
||||
}
|
||||
})?;
|
||||
|
||||
Ok(thread)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn save_thread(&self, thread: &Entity<Thread>, cx: &mut Context<Self>) -> Task<Result<()>> {
|
||||
let (metadata, thread) = thread.update(cx, |thread, _cx| {
|
||||
let id = thread.id().clone();
|
||||
let thread = SavedThread {
|
||||
summary: thread.summary_or_default(),
|
||||
updated_at: thread.updated_at(),
|
||||
messages: thread
|
||||
.messages()
|
||||
.map(|message| {
|
||||
let all_tool_uses = thread
|
||||
.tool_uses_for_message(message.id)
|
||||
.into_iter()
|
||||
.chain(thread.scripting_tool_uses_for_message(message.id))
|
||||
.map(|tool_use| SavedToolUse {
|
||||
id: tool_use.id,
|
||||
name: tool_use.name,
|
||||
input: tool_use.input,
|
||||
})
|
||||
.collect();
|
||||
let all_tool_results = thread
|
||||
.tool_results_for_message(message.id)
|
||||
.into_iter()
|
||||
.chain(thread.scripting_tool_results_for_message(message.id))
|
||||
.map(|tool_result| SavedToolResult {
|
||||
tool_use_id: tool_result.tool_use_id.clone(),
|
||||
is_error: tool_result.is_error,
|
||||
content: tool_result.content.clone(),
|
||||
})
|
||||
.collect();
|
||||
|
||||
SavedMessage {
|
||||
id: message.id,
|
||||
role: message.role,
|
||||
text: message.text.clone(),
|
||||
tool_uses: all_tool_uses,
|
||||
tool_results: all_tool_results,
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
|
||||
(id, thread)
|
||||
});
|
||||
let (metadata, serialized_thread) =
|
||||
thread.update(cx, |thread, cx| (thread.id().clone(), thread.serialize(cx)));
|
||||
|
||||
let database_future = ThreadsDatabase::global_future(cx);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let serialized_thread = serialized_thread.await?;
|
||||
let database = database_future.await.map_err(|err| anyhow!(err))?;
|
||||
database.save_thread(metadata, thread).await?;
|
||||
database.save_thread(metadata, serialized_thread).await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| this.reload(cx))?.await
|
||||
this.update(cx, |this, cx| this.reload(cx))?.await
|
||||
})
|
||||
}
|
||||
|
||||
pub fn delete_thread(&mut self, id: &ThreadId, cx: &mut Context<Self>) -> Task<Result<()>> {
|
||||
let id = id.clone();
|
||||
let database_future = ThreadsDatabase::global_future(cx);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let database = database_future.await.map_err(|err| anyhow!(err))?;
|
||||
database.delete_thread(id.clone()).await?;
|
||||
|
||||
this.update(&mut cx, |this, _cx| {
|
||||
this.update(cx, |this, _cx| {
|
||||
this.threads.retain(|thread| thread.id != id)
|
||||
})
|
||||
})
|
||||
@@ -188,14 +170,14 @@ impl ThreadStore {
|
||||
|
||||
pub fn reload(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
|
||||
let database_future = ThreadsDatabase::global_future(cx);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let threads = database_future
|
||||
.await
|
||||
.map_err(|err| anyhow!(err))?
|
||||
.list_threads()
|
||||
.await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.threads = threads;
|
||||
cx.notify();
|
||||
})
|
||||
@@ -224,7 +206,7 @@ impl ThreadStore {
|
||||
cx.spawn({
|
||||
let server = server.clone();
|
||||
let server_id = server_id.clone();
|
||||
|this, mut cx| async move {
|
||||
async move |this, cx| {
|
||||
let Some(protocol) = server.client() else {
|
||||
return;
|
||||
};
|
||||
@@ -249,7 +231,7 @@ impl ThreadStore {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
this.update(&mut cx, |this, _cx| {
|
||||
this.update(cx, |this, _cx| {
|
||||
this.context_server_tool_ids.insert(server_id, tool_ids);
|
||||
})
|
||||
.log_err();
|
||||
@@ -270,44 +252,129 @@ impl ThreadStore {
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SavedThreadMetadata {
|
||||
pub struct SerializedThreadMetadata {
|
||||
pub id: ThreadId,
|
||||
pub summary: SharedString,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct SavedThread {
|
||||
pub struct SerializedThread {
|
||||
pub version: String,
|
||||
pub summary: SharedString,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
pub messages: Vec<SavedMessage>,
|
||||
pub messages: Vec<SerializedMessage>,
|
||||
#[serde(default)]
|
||||
pub initial_project_snapshot: Option<Arc<ProjectSnapshot>>,
|
||||
}
|
||||
|
||||
impl SerializedThread {
|
||||
pub const VERSION: &'static str = "0.1.0";
|
||||
|
||||
pub fn from_json(json: &[u8]) -> Result<Self> {
|
||||
let saved_thread_json = serde_json::from_slice::<serde_json::Value>(json)?;
|
||||
match saved_thread_json.get("version") {
|
||||
Some(serde_json::Value::String(version)) => match version.as_str() {
|
||||
SerializedThread::VERSION => Ok(serde_json::from_value::<SerializedThread>(
|
||||
saved_thread_json,
|
||||
)?),
|
||||
_ => Err(anyhow!(
|
||||
"unrecognized serialized thread version: {}",
|
||||
version
|
||||
)),
|
||||
},
|
||||
None => {
|
||||
let saved_thread =
|
||||
serde_json::from_value::<LegacySerializedThread>(saved_thread_json)?;
|
||||
Ok(saved_thread.upgrade())
|
||||
}
|
||||
version => Err(anyhow!(
|
||||
"unrecognized serialized thread version: {:?}",
|
||||
version
|
||||
)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SavedMessage {
|
||||
pub struct SerializedMessage {
|
||||
pub id: MessageId,
|
||||
pub role: Role,
|
||||
pub text: String,
|
||||
#[serde(default)]
|
||||
pub tool_uses: Vec<SavedToolUse>,
|
||||
pub segments: Vec<SerializedMessageSegment>,
|
||||
#[serde(default)]
|
||||
pub tool_results: Vec<SavedToolResult>,
|
||||
pub tool_uses: Vec<SerializedToolUse>,
|
||||
#[serde(default)]
|
||||
pub tool_results: Vec<SerializedToolResult>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SavedToolUse {
|
||||
#[serde(tag = "type")]
|
||||
pub enum SerializedMessageSegment {
|
||||
#[serde(rename = "text")]
|
||||
Text { text: String },
|
||||
#[serde(rename = "thinking")]
|
||||
Thinking { text: String },
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SerializedToolUse {
|
||||
pub id: LanguageModelToolUseId,
|
||||
pub name: SharedString,
|
||||
pub input: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SavedToolResult {
|
||||
pub struct SerializedToolResult {
|
||||
pub tool_use_id: LanguageModelToolUseId,
|
||||
pub is_error: bool,
|
||||
pub content: Arc<str>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct LegacySerializedThread {
|
||||
pub summary: SharedString,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
pub messages: Vec<LegacySerializedMessage>,
|
||||
#[serde(default)]
|
||||
pub initial_project_snapshot: Option<Arc<ProjectSnapshot>>,
|
||||
}
|
||||
|
||||
impl LegacySerializedThread {
|
||||
pub fn upgrade(self) -> SerializedThread {
|
||||
SerializedThread {
|
||||
version: SerializedThread::VERSION.to_string(),
|
||||
summary: self.summary,
|
||||
updated_at: self.updated_at,
|
||||
messages: self.messages.into_iter().map(|msg| msg.upgrade()).collect(),
|
||||
initial_project_snapshot: self.initial_project_snapshot,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct LegacySerializedMessage {
|
||||
pub id: MessageId,
|
||||
pub role: Role,
|
||||
pub text: String,
|
||||
#[serde(default)]
|
||||
pub tool_uses: Vec<SerializedToolUse>,
|
||||
#[serde(default)]
|
||||
pub tool_results: Vec<SerializedToolResult>,
|
||||
}
|
||||
|
||||
impl LegacySerializedMessage {
|
||||
fn upgrade(self) -> SerializedMessage {
|
||||
SerializedMessage {
|
||||
id: self.id,
|
||||
role: self.role,
|
||||
segments: vec![SerializedMessageSegment::Text { text: self.text }],
|
||||
tool_uses: self.tool_uses,
|
||||
tool_results: self.tool_results,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct GlobalThreadsDatabase(
|
||||
Shared<BoxFuture<'static, Result<Arc<ThreadsDatabase>, Arc<anyhow::Error>>>>,
|
||||
);
|
||||
@@ -317,7 +384,25 @@ impl Global for GlobalThreadsDatabase {}
|
||||
pub(crate) struct ThreadsDatabase {
|
||||
executor: BackgroundExecutor,
|
||||
env: heed::Env,
|
||||
threads: Database<SerdeBincode<ThreadId>, SerdeJson<SavedThread>>,
|
||||
threads: Database<SerdeBincode<ThreadId>, SerializedThread>,
|
||||
}
|
||||
|
||||
impl heed::BytesEncode<'_> for SerializedThread {
|
||||
type EItem = SerializedThread;
|
||||
|
||||
fn bytes_encode(item: &Self::EItem) -> Result<Cow<[u8]>, heed::BoxedError> {
|
||||
serde_json::to_vec(item).map(Cow::Owned).map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> heed::BytesDecode<'a> for SerializedThread {
|
||||
type DItem = SerializedThread;
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> Result<Self::DItem, heed::BoxedError> {
|
||||
// We implement this type manually because we want to call `SerializedThread::from_json`,
|
||||
// instead of the Deserialize trait implementation for `SerializedThread`.
|
||||
SerializedThread::from_json(bytes).map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
impl ThreadsDatabase {
|
||||
@@ -364,7 +449,7 @@ impl ThreadsDatabase {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn list_threads(&self) -> Task<Result<Vec<SavedThreadMetadata>>> {
|
||||
pub fn list_threads(&self) -> Task<Result<Vec<SerializedThreadMetadata>>> {
|
||||
let env = self.env.clone();
|
||||
let threads = self.threads;
|
||||
|
||||
@@ -373,7 +458,7 @@ impl ThreadsDatabase {
|
||||
let mut iter = threads.iter(&txn)?;
|
||||
let mut threads = Vec::new();
|
||||
while let Some((key, value)) = iter.next().transpose()? {
|
||||
threads.push(SavedThreadMetadata {
|
||||
threads.push(SerializedThreadMetadata {
|
||||
id: key,
|
||||
summary: value.summary,
|
||||
updated_at: value.updated_at,
|
||||
@@ -384,7 +469,7 @@ impl ThreadsDatabase {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn try_find_thread(&self, id: ThreadId) -> Task<Result<Option<SavedThread>>> {
|
||||
pub fn try_find_thread(&self, id: ThreadId) -> Task<Result<Option<SerializedThread>>> {
|
||||
let env = self.env.clone();
|
||||
let threads = self.threads;
|
||||
|
||||
@@ -395,7 +480,7 @@ impl ThreadsDatabase {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn save_thread(&self, id: ThreadId, thread: SavedThread) -> Task<Result<()>> {
|
||||
pub fn save_thread(&self, id: ThreadId, thread: SerializedThread) -> Task<Result<()>> {
|
||||
let env = self.env.clone();
|
||||
let threads = self.threads;
|
||||
|
||||
|
||||
@@ -1,17 +1,38 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use assistant_settings::{AgentProfile, AssistantSettings};
|
||||
use assistant_tool::{ToolSource, ToolWorkingSet};
|
||||
use gpui::Entity;
|
||||
use scripting_tool::ScriptingTool;
|
||||
use ui::{prelude::*, ContextMenu, IconButtonShape, PopoverMenu, Tooltip};
|
||||
use gpui::{Entity, Subscription};
|
||||
use indexmap::IndexMap;
|
||||
use settings::{Settings as _, SettingsStore};
|
||||
use ui::{prelude::*, ContextMenu, PopoverMenu, Tooltip};
|
||||
|
||||
pub struct ToolSelector {
|
||||
profiles: IndexMap<Arc<str>, AgentProfile>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
|
||||
impl ToolSelector {
|
||||
pub fn new(tools: Arc<ToolWorkingSet>, _cx: &mut Context<Self>) -> Self {
|
||||
Self { tools }
|
||||
pub fn new(tools: Arc<ToolWorkingSet>, cx: &mut Context<Self>) -> Self {
|
||||
let settings_subscription = cx.observe_global::<SettingsStore>(move |this, cx| {
|
||||
this.refresh_profiles(cx);
|
||||
});
|
||||
|
||||
let mut this = Self {
|
||||
profiles: IndexMap::default(),
|
||||
tools,
|
||||
_subscriptions: vec![settings_subscription],
|
||||
};
|
||||
this.refresh_profiles(cx);
|
||||
|
||||
this
|
||||
}
|
||||
|
||||
fn refresh_profiles(&mut self, cx: &mut Context<Self>) {
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
|
||||
self.profiles = settings.profiles.clone();
|
||||
}
|
||||
|
||||
fn build_context_menu(
|
||||
@@ -19,27 +40,58 @@ impl ToolSelector {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Entity<ContextMenu> {
|
||||
ContextMenu::build(window, cx, |mut menu, _window, cx| {
|
||||
let profiles = self.profiles.clone();
|
||||
let tool_set = self.tools.clone();
|
||||
ContextMenu::build_persistent(window, cx, move |mut menu, _window, cx| {
|
||||
let icon_position = IconPosition::End;
|
||||
let tools_by_source = self.tools.tools_by_source(cx);
|
||||
|
||||
let all_tools_enabled = self.tools.are_all_tools_enabled();
|
||||
menu = menu.header("Tools").toggleable_entry(
|
||||
"All Tools",
|
||||
all_tools_enabled,
|
||||
icon_position,
|
||||
None,
|
||||
{
|
||||
let tools = self.tools.clone();
|
||||
menu = menu.header("Profiles");
|
||||
for (_id, profile) in profiles.clone() {
|
||||
menu = menu.toggleable_entry(profile.name.clone(), false, icon_position, None, {
|
||||
let tools = tool_set.clone();
|
||||
move |_window, cx| {
|
||||
if all_tools_enabled {
|
||||
tools.disable_all_tools(cx);
|
||||
} else {
|
||||
tools.enable_all_tools();
|
||||
tools.disable_all_tools(cx);
|
||||
|
||||
tools.enable(
|
||||
ToolSource::Native,
|
||||
&profile
|
||||
.tools
|
||||
.iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then(|| tool.clone()))
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
for (context_server_id, preset) in &profile.context_servers {
|
||||
tools.enable(
|
||||
ToolSource::ContextServer {
|
||||
id: context_server_id.clone().into(),
|
||||
},
|
||||
&preset
|
||||
.tools
|
||||
.iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then(|| tool.clone()))
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
}
|
||||
}
|
||||
},
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
menu = menu.separator();
|
||||
|
||||
let tools_by_source = tool_set.tools_by_source(cx);
|
||||
|
||||
let all_tools_enabled = tool_set.are_all_tools_enabled();
|
||||
menu = menu.toggleable_entry("All Tools", all_tools_enabled, icon_position, None, {
|
||||
let tools = tool_set.clone();
|
||||
move |_window, cx| {
|
||||
if all_tools_enabled {
|
||||
tools.disable_all_tools(cx);
|
||||
} else {
|
||||
tools.enable_all_tools();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
for (source, tools) in tools_by_source {
|
||||
let mut tools = tools
|
||||
@@ -47,26 +99,21 @@ impl ToolSelector {
|
||||
.map(|tool| {
|
||||
let source = tool.source();
|
||||
let name = tool.name().into();
|
||||
let is_enabled = self.tools.is_enabled(&source, &name);
|
||||
let is_enabled = tool_set.is_enabled(&source, &name);
|
||||
|
||||
(source, name, is_enabled)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if ToolSource::Native == source {
|
||||
tools.push((
|
||||
ToolSource::Native,
|
||||
ScriptingTool::NAME.into(),
|
||||
self.tools.is_scripting_tool_enabled(),
|
||||
));
|
||||
tools.sort_by(|(_, name_a, _), (_, name_b, _)| name_a.cmp(name_b));
|
||||
}
|
||||
|
||||
menu = match &source {
|
||||
ToolSource::Native => menu.header("Zed"),
|
||||
ToolSource::Native => menu.separator().header("Zed Tools"),
|
||||
ToolSource::ContextServer { id } => {
|
||||
let all_tools_from_source_enabled =
|
||||
self.tools.are_all_tools_from_source_enabled(&source);
|
||||
tool_set.are_all_tools_from_source_enabled(&source);
|
||||
|
||||
menu.separator().header(id).toggleable_entry(
|
||||
"All Tools",
|
||||
@@ -74,7 +121,7 @@ impl ToolSelector {
|
||||
icon_position,
|
||||
None,
|
||||
{
|
||||
let tools = self.tools.clone();
|
||||
let tools = tool_set.clone();
|
||||
let source = source.clone();
|
||||
move |_window, cx| {
|
||||
if all_tools_from_source_enabled {
|
||||
@@ -90,20 +137,12 @@ impl ToolSelector {
|
||||
|
||||
for (source, name, is_enabled) in tools {
|
||||
menu = menu.toggleable_entry(name.clone(), is_enabled, icon_position, None, {
|
||||
let tools = self.tools.clone();
|
||||
let tools = tool_set.clone();
|
||||
move |_window, _cx| {
|
||||
if name.as_ref() == ScriptingTool::NAME {
|
||||
if is_enabled {
|
||||
tools.disable_scripting_tool();
|
||||
} else {
|
||||
tools.enable_scripting_tool();
|
||||
}
|
||||
if is_enabled {
|
||||
tools.disable(source.clone(), &[name.clone()]);
|
||||
} else {
|
||||
if is_enabled {
|
||||
tools.disable(source.clone(), &[name.clone()]);
|
||||
} else {
|
||||
tools.enable(source.clone(), &[name.clone()]);
|
||||
}
|
||||
tools.enable(source.clone(), &[name.clone()]);
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -124,7 +163,6 @@ impl Render for ToolSelector {
|
||||
})
|
||||
.trigger_with_tooltip(
|
||||
IconButton::new("tool-selector-button", IconName::SettingsAlt)
|
||||
.shape(IconButtonShape::Square)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted),
|
||||
Tooltip::text("Customize Tools"),
|
||||
|
||||
@@ -1,28 +1,33 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use assistant_tool::{Tool, ToolWorkingSet};
|
||||
use collections::HashMap;
|
||||
use futures::future::Shared;
|
||||
use futures::FutureExt as _;
|
||||
use gpui::{SharedString, Task};
|
||||
use gpui::{App, SharedString, Task};
|
||||
use language_model::{
|
||||
LanguageModelRequestMessage, LanguageModelToolResult, LanguageModelToolUse,
|
||||
LanguageModelToolUseId, MessageContent, Role,
|
||||
};
|
||||
use ui::IconName;
|
||||
|
||||
use crate::thread::MessageId;
|
||||
use crate::thread_store::SavedMessage;
|
||||
use crate::thread_store::SerializedMessage;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ToolUse {
|
||||
pub id: LanguageModelToolUseId,
|
||||
pub name: SharedString,
|
||||
pub ui_text: SharedString,
|
||||
pub status: ToolUseStatus,
|
||||
pub input: serde_json::Value,
|
||||
pub icon: ui::IconName,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ToolUseStatus {
|
||||
NeedsConfirmation,
|
||||
Pending,
|
||||
Running,
|
||||
Finished(SharedString),
|
||||
@@ -30,6 +35,7 @@ pub enum ToolUseStatus {
|
||||
}
|
||||
|
||||
pub struct ToolUseState {
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
tool_uses_by_assistant_message: HashMap<MessageId, Vec<LanguageModelToolUse>>,
|
||||
tool_uses_by_user_message: HashMap<MessageId, Vec<LanguageModelToolUseId>>,
|
||||
tool_results: HashMap<LanguageModelToolUseId, LanguageModelToolResult>,
|
||||
@@ -37,8 +43,9 @@ pub struct ToolUseState {
|
||||
}
|
||||
|
||||
impl ToolUseState {
|
||||
pub fn new() -> Self {
|
||||
pub fn new(tools: Arc<ToolWorkingSet>) -> Self {
|
||||
Self {
|
||||
tools,
|
||||
tool_uses_by_assistant_message: HashMap::default(),
|
||||
tool_uses_by_user_message: HashMap::default(),
|
||||
tool_results: HashMap::default(),
|
||||
@@ -46,14 +53,15 @@ impl ToolUseState {
|
||||
}
|
||||
}
|
||||
|
||||
/// Constructs a [`ToolUseState`] from the given list of [`SavedMessage`]s.
|
||||
/// Constructs a [`ToolUseState`] from the given list of [`SerializedMessage`]s.
|
||||
///
|
||||
/// Accepts a function to filter the tools that should be used to populate the state.
|
||||
pub fn from_saved_messages(
|
||||
messages: &[SavedMessage],
|
||||
pub fn from_serialized_messages(
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
messages: &[SerializedMessage],
|
||||
mut filter_by_tool_name: impl FnMut(&str) -> bool,
|
||||
) -> Self {
|
||||
let mut this = Self::new();
|
||||
let mut this = Self::new(tools);
|
||||
let mut tool_names_by_id = HashMap::default();
|
||||
|
||||
for message in messages {
|
||||
@@ -118,11 +126,27 @@ impl ToolUseState {
|
||||
this
|
||||
}
|
||||
|
||||
pub fn cancel_pending(&mut self) -> Vec<PendingToolUse> {
|
||||
let mut pending_tools = Vec::new();
|
||||
for (tool_use_id, tool_use) in self.pending_tool_uses_by_id.drain() {
|
||||
self.tool_results.insert(
|
||||
tool_use_id.clone(),
|
||||
LanguageModelToolResult {
|
||||
tool_use_id,
|
||||
content: "Tool canceled by user".into(),
|
||||
is_error: true,
|
||||
},
|
||||
);
|
||||
pending_tools.push(tool_use.clone());
|
||||
}
|
||||
pending_tools
|
||||
}
|
||||
|
||||
pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> {
|
||||
self.pending_tool_uses_by_id.values().collect()
|
||||
}
|
||||
|
||||
pub fn tool_uses_for_message(&self, id: MessageId) -> Vec<ToolUse> {
|
||||
pub fn tool_uses_for_message(&self, id: MessageId, cx: &App) -> Vec<ToolUse> {
|
||||
let Some(tool_uses_for_message) = &self.tool_uses_by_assistant_message.get(&id) else {
|
||||
return Vec::new();
|
||||
};
|
||||
@@ -142,29 +166,53 @@ impl ToolUseState {
|
||||
}
|
||||
|
||||
if let Some(pending_tool_use) = self.pending_tool_uses_by_id.get(&tool_use.id) {
|
||||
return match pending_tool_use.status {
|
||||
match pending_tool_use.status {
|
||||
PendingToolUseStatus::Idle => ToolUseStatus::Pending,
|
||||
PendingToolUseStatus::NeedsConfirmation { .. } => {
|
||||
ToolUseStatus::NeedsConfirmation
|
||||
}
|
||||
PendingToolUseStatus::Running { .. } => ToolUseStatus::Running,
|
||||
PendingToolUseStatus::Error(ref err) => {
|
||||
ToolUseStatus::Error(err.clone().into())
|
||||
}
|
||||
};
|
||||
}
|
||||
} else {
|
||||
ToolUseStatus::Pending
|
||||
}
|
||||
|
||||
ToolUseStatus::Pending
|
||||
})();
|
||||
|
||||
let icon = if let Some(tool) = self.tools.tool(&tool_use.name, cx) {
|
||||
tool.icon()
|
||||
} else {
|
||||
IconName::Cog
|
||||
};
|
||||
|
||||
tool_uses.push(ToolUse {
|
||||
id: tool_use.id.clone(),
|
||||
name: tool_use.name.clone().into(),
|
||||
ui_text: self.tool_ui_label(&tool_use.name, &tool_use.input, cx),
|
||||
input: tool_use.input.clone(),
|
||||
status,
|
||||
icon,
|
||||
})
|
||||
}
|
||||
|
||||
tool_uses
|
||||
}
|
||||
|
||||
pub fn tool_ui_label(
|
||||
&self,
|
||||
tool_name: &str,
|
||||
input: &serde_json::Value,
|
||||
cx: &App,
|
||||
) -> SharedString {
|
||||
if let Some(tool) = self.tools.tool(tool_name, cx) {
|
||||
tool.ui_text(input).into()
|
||||
} else {
|
||||
"Unknown tool".into()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tool_results_for_message(&self, message_id: MessageId) -> Vec<&LanguageModelToolResult> {
|
||||
let empty = Vec::new();
|
||||
|
||||
@@ -182,10 +230,18 @@ impl ToolUseState {
|
||||
.map_or(false, |results| !results.is_empty())
|
||||
}
|
||||
|
||||
pub fn tool_result(
|
||||
&self,
|
||||
tool_use_id: &LanguageModelToolUseId,
|
||||
) -> Option<&LanguageModelToolResult> {
|
||||
self.tool_results.get(tool_use_id)
|
||||
}
|
||||
|
||||
pub fn request_tool_use(
|
||||
&mut self,
|
||||
assistant_message_id: MessageId,
|
||||
tool_use: LanguageModelToolUse,
|
||||
cx: &App,
|
||||
) {
|
||||
self.tool_uses_by_assistant_message
|
||||
.entry(assistant_message_id)
|
||||
@@ -205,33 +261,64 @@ impl ToolUseState {
|
||||
PendingToolUse {
|
||||
assistant_message_id,
|
||||
id: tool_use.id,
|
||||
name: tool_use.name,
|
||||
name: tool_use.name.clone(),
|
||||
ui_text: self
|
||||
.tool_ui_label(&tool_use.name, &tool_use.input, cx)
|
||||
.into(),
|
||||
input: tool_use.input,
|
||||
status: PendingToolUseStatus::Idle,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
pub fn run_pending_tool(&mut self, tool_use_id: LanguageModelToolUseId, task: Task<()>) {
|
||||
pub fn run_pending_tool(
|
||||
&mut self,
|
||||
tool_use_id: LanguageModelToolUseId,
|
||||
ui_text: SharedString,
|
||||
task: Task<()>,
|
||||
) {
|
||||
if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) {
|
||||
tool_use.ui_text = ui_text.into();
|
||||
tool_use.status = PendingToolUseStatus::Running {
|
||||
_task: task.shared(),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
pub fn confirm_tool_use(
|
||||
&mut self,
|
||||
tool_use_id: LanguageModelToolUseId,
|
||||
ui_text: impl Into<Arc<str>>,
|
||||
input: serde_json::Value,
|
||||
messages: Arc<Vec<LanguageModelRequestMessage>>,
|
||||
tool: Arc<dyn Tool>,
|
||||
) {
|
||||
if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) {
|
||||
let ui_text = ui_text.into();
|
||||
tool_use.ui_text = ui_text.clone();
|
||||
let confirmation = Confirmation {
|
||||
tool_use_id,
|
||||
input,
|
||||
messages,
|
||||
tool,
|
||||
ui_text,
|
||||
};
|
||||
tool_use.status = PendingToolUseStatus::NeedsConfirmation(Arc::new(confirmation));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_tool_output(
|
||||
&mut self,
|
||||
tool_use_id: LanguageModelToolUseId,
|
||||
output: Result<String>,
|
||||
) -> Option<PendingToolUse> {
|
||||
match output {
|
||||
Ok(output) => {
|
||||
Ok(tool_result) => {
|
||||
self.tool_results.insert(
|
||||
tool_use_id.clone(),
|
||||
LanguageModelToolResult {
|
||||
tool_use_id: tool_use_id.clone(),
|
||||
content: output.into(),
|
||||
content: tool_result.into(),
|
||||
is_error: false,
|
||||
},
|
||||
);
|
||||
@@ -263,9 +350,17 @@ impl ToolUseState {
|
||||
) {
|
||||
if let Some(tool_uses) = self.tool_uses_by_assistant_message.get(&message_id) {
|
||||
for tool_use in tool_uses {
|
||||
request_message
|
||||
.content
|
||||
.push(MessageContent::ToolUse(tool_use.clone()));
|
||||
if self.tool_results.contains_key(&tool_use.id) {
|
||||
// Do not send tool uses until they are completed
|
||||
request_message
|
||||
.content
|
||||
.push(MessageContent::ToolUse(tool_use.clone()));
|
||||
} else {
|
||||
log::debug!(
|
||||
"skipped tool use {:?} because it is still pending",
|
||||
tool_use
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -278,9 +373,19 @@ impl ToolUseState {
|
||||
if let Some(tool_uses) = self.tool_uses_by_user_message.get(&message_id) {
|
||||
for tool_use_id in tool_uses {
|
||||
if let Some(tool_result) = self.tool_results.get(tool_use_id) {
|
||||
request_message
|
||||
.content
|
||||
.push(MessageContent::ToolResult(tool_result.clone()));
|
||||
request_message.content.push(MessageContent::ToolResult(
|
||||
LanguageModelToolResult {
|
||||
tool_use_id: tool_use_id.clone(),
|
||||
is_error: tool_result.is_error,
|
||||
content: if tool_result.content.is_empty() {
|
||||
// Surprisingly, the API fails if we return an empty string here.
|
||||
// It thinks we are sending a tool use without a tool result.
|
||||
"<Tool returned an empty string>".into()
|
||||
} else {
|
||||
tool_result.content.clone()
|
||||
},
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -294,13 +399,24 @@ pub struct PendingToolUse {
|
||||
#[allow(unused)]
|
||||
pub assistant_message_id: MessageId,
|
||||
pub name: Arc<str>,
|
||||
pub ui_text: Arc<str>,
|
||||
pub input: serde_json::Value,
|
||||
pub status: PendingToolUseStatus,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Confirmation {
|
||||
pub tool_use_id: LanguageModelToolUseId,
|
||||
pub input: serde_json::Value,
|
||||
pub ui_text: Arc<str>,
|
||||
pub messages: Arc<Vec<LanguageModelRequestMessage>>,
|
||||
pub tool: Arc<dyn Tool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum PendingToolUseStatus {
|
||||
Idle,
|
||||
NeedsConfirmation(Arc<Confirmation>),
|
||||
Running { _task: Shared<Task<()>> },
|
||||
Error(#[allow(unused)] Arc<str>),
|
||||
}
|
||||
@@ -313,4 +429,8 @@ impl PendingToolUseStatus {
|
||||
pub fn is_error(&self) -> bool {
|
||||
matches!(self, PendingToolUseStatus::Error(_))
|
||||
}
|
||||
|
||||
pub fn needs_confirmation(&self) -> bool {
|
||||
matches!(self, PendingToolUseStatus::NeedsConfirmation { .. })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
mod context_pill;
|
||||
mod tool_ready_pop_up;
|
||||
|
||||
pub use context_pill::*;
|
||||
pub use tool_ready_pop_up::*;
|
||||
|
||||
115
crates/assistant2/src/ui/tool_ready_pop_up.rs
Normal file
@@ -0,0 +1,115 @@
|
||||
use gpui::{
|
||||
point, App, Context, EventEmitter, IntoElement, PlatformDisplay, Size, Window,
|
||||
WindowBackgroundAppearance, WindowBounds, WindowDecorations, WindowKind, WindowOptions,
|
||||
};
|
||||
use release_channel::ReleaseChannel;
|
||||
use std::rc::Rc;
|
||||
use theme;
|
||||
use ui::{prelude::*, Render};
|
||||
|
||||
pub struct ToolReadyPopUp {
|
||||
caption: SharedString,
|
||||
}
|
||||
|
||||
impl ToolReadyPopUp {
|
||||
pub fn new(caption: impl Into<SharedString>) -> Self {
|
||||
Self {
|
||||
caption: caption.into(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn window_options(screen: Rc<dyn PlatformDisplay>, cx: &App) -> WindowOptions {
|
||||
let size = Size {
|
||||
width: px(440.),
|
||||
height: px(72.),
|
||||
};
|
||||
|
||||
let notification_margin_width = px(16.);
|
||||
let notification_margin_height = px(-48.);
|
||||
|
||||
let bounds = gpui::Bounds::<Pixels> {
|
||||
origin: screen.bounds().top_right()
|
||||
- point(
|
||||
size.width + notification_margin_width,
|
||||
notification_margin_height,
|
||||
),
|
||||
size,
|
||||
};
|
||||
|
||||
let app_id = ReleaseChannel::global(cx).app_id();
|
||||
|
||||
WindowOptions {
|
||||
window_bounds: Some(WindowBounds::Windowed(bounds)),
|
||||
titlebar: None,
|
||||
focus: false,
|
||||
show: true,
|
||||
kind: WindowKind::PopUp,
|
||||
is_movable: false,
|
||||
display_id: Some(screen.id()),
|
||||
window_background: WindowBackgroundAppearance::Transparent,
|
||||
app_id: Some(app_id.to_owned()),
|
||||
window_min_size: None,
|
||||
window_decorations: Some(WindowDecorations::Client),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum ToolReadyPopupEvent {
|
||||
Accepted,
|
||||
Dismissed,
|
||||
}
|
||||
|
||||
impl EventEmitter<ToolReadyPopupEvent> for ToolReadyPopUp {}
|
||||
|
||||
impl Render for ToolReadyPopUp {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let ui_font = theme::setup_ui_font(window, cx);
|
||||
let line_height = window.line_height();
|
||||
|
||||
h_flex()
|
||||
.size_full()
|
||||
.p_3()
|
||||
.gap_4()
|
||||
.justify_between()
|
||||
.elevation_3(cx)
|
||||
.text_ui(cx)
|
||||
.font(ui_font)
|
||||
.border_color(cx.theme().colors().border)
|
||||
.rounded_xl()
|
||||
.child(
|
||||
h_flex()
|
||||
.items_start()
|
||||
.gap_2()
|
||||
.child(
|
||||
h_flex().h(line_height).justify_center().child(
|
||||
Icon::new(IconName::Info)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
v_flex()
|
||||
.child(Headline::new("Agent Panel").size(HeadlineSize::XSmall))
|
||||
.child(Label::new(self.caption.clone()).color(Color::Muted)),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
Button::new("open", "View Panel")
|
||||
.style(ButtonStyle::Tinted(ui::TintColor::Accent))
|
||||
.on_click({
|
||||
cx.listener(move |_this, _event, _, cx| {
|
||||
cx.emit(ToolReadyPopupEvent::Accepted);
|
||||
})
|
||||
}),
|
||||
)
|
||||
.child(Button::new("dismiss", "Dismiss").on_click({
|
||||
cx.listener(move |_, _event, _, cx| {
|
||||
cx.emit(ToolReadyPopupEvent::Dismissed);
|
||||
})
|
||||
})),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -162,6 +162,11 @@ pub enum ContextOperation {
|
||||
section: SlashCommandOutputSection<language::Anchor>,
|
||||
version: clock::Global,
|
||||
},
|
||||
ThoughtProcessOutputSectionAdded {
|
||||
timestamp: clock::Lamport,
|
||||
section: ThoughtProcessOutputSection<language::Anchor>,
|
||||
version: clock::Global,
|
||||
},
|
||||
BufferOperation(language::Operation),
|
||||
}
|
||||
|
||||
@@ -259,6 +264,20 @@ impl ContextOperation {
|
||||
version: language::proto::deserialize_version(&message.version),
|
||||
})
|
||||
}
|
||||
proto::context_operation::Variant::ThoughtProcessOutputSectionAdded(message) => {
|
||||
let section = message.section.context("missing section")?;
|
||||
Ok(Self::ThoughtProcessOutputSectionAdded {
|
||||
timestamp: language::proto::deserialize_timestamp(
|
||||
message.timestamp.context("missing timestamp")?,
|
||||
),
|
||||
section: ThoughtProcessOutputSection {
|
||||
range: language::proto::deserialize_anchor_range(
|
||||
section.range.context("invalid range")?,
|
||||
)?,
|
||||
},
|
||||
version: language::proto::deserialize_version(&message.version),
|
||||
})
|
||||
}
|
||||
proto::context_operation::Variant::BufferOperation(op) => Ok(Self::BufferOperation(
|
||||
language::proto::deserialize_operation(
|
||||
op.operation.context("invalid buffer operation")?,
|
||||
@@ -370,6 +389,27 @@ impl ContextOperation {
|
||||
},
|
||||
)),
|
||||
},
|
||||
Self::ThoughtProcessOutputSectionAdded {
|
||||
timestamp,
|
||||
section,
|
||||
version,
|
||||
} => proto::ContextOperation {
|
||||
variant: Some(
|
||||
proto::context_operation::Variant::ThoughtProcessOutputSectionAdded(
|
||||
proto::context_operation::ThoughtProcessOutputSectionAdded {
|
||||
timestamp: Some(language::proto::serialize_timestamp(*timestamp)),
|
||||
section: Some({
|
||||
proto::ThoughtProcessOutputSection {
|
||||
range: Some(language::proto::serialize_anchor_range(
|
||||
section.range.clone(),
|
||||
)),
|
||||
}
|
||||
}),
|
||||
version: language::proto::serialize_version(version),
|
||||
},
|
||||
),
|
||||
),
|
||||
},
|
||||
Self::BufferOperation(operation) => proto::ContextOperation {
|
||||
variant: Some(proto::context_operation::Variant::BufferOperation(
|
||||
proto::context_operation::BufferOperation {
|
||||
@@ -387,7 +427,8 @@ impl ContextOperation {
|
||||
Self::UpdateSummary { summary, .. } => summary.timestamp,
|
||||
Self::SlashCommandStarted { id, .. } => id.0,
|
||||
Self::SlashCommandOutputSectionAdded { timestamp, .. }
|
||||
| Self::SlashCommandFinished { timestamp, .. } => *timestamp,
|
||||
| Self::SlashCommandFinished { timestamp, .. }
|
||||
| Self::ThoughtProcessOutputSectionAdded { timestamp, .. } => *timestamp,
|
||||
Self::BufferOperation(_) => {
|
||||
panic!("reading the timestamp of a buffer operation is not supported")
|
||||
}
|
||||
@@ -402,7 +443,8 @@ impl ContextOperation {
|
||||
| Self::UpdateSummary { version, .. }
|
||||
| Self::SlashCommandStarted { version, .. }
|
||||
| Self::SlashCommandOutputSectionAdded { version, .. }
|
||||
| Self::SlashCommandFinished { version, .. } => version,
|
||||
| Self::SlashCommandFinished { version, .. }
|
||||
| Self::ThoughtProcessOutputSectionAdded { version, .. } => version,
|
||||
Self::BufferOperation(_) => {
|
||||
panic!("reading the version of a buffer operation is not supported")
|
||||
}
|
||||
@@ -418,6 +460,8 @@ pub enum ContextEvent {
|
||||
MessagesEdited,
|
||||
SummaryChanged,
|
||||
StreamedCompletion,
|
||||
StartedThoughtProcess(Range<language::Anchor>),
|
||||
EndedThoughtProcess(language::Anchor),
|
||||
PatchesUpdated {
|
||||
removed: Vec<Range<language::Anchor>>,
|
||||
updated: Vec<Range<language::Anchor>>,
|
||||
@@ -498,6 +542,17 @@ impl MessageMetadata {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
pub struct ThoughtProcessOutputSection<T> {
|
||||
pub range: Range<T>,
|
||||
}
|
||||
|
||||
impl ThoughtProcessOutputSection<language::Anchor> {
|
||||
pub fn is_valid(&self, buffer: &language::TextBuffer) -> bool {
|
||||
self.range.start.is_valid(buffer) && !self.range.to_offset(buffer).is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Message {
|
||||
pub offset_range: Range<usize>,
|
||||
@@ -580,6 +635,7 @@ pub struct AssistantContext {
|
||||
edits_since_last_parse: language::Subscription,
|
||||
slash_commands: Arc<SlashCommandWorkingSet>,
|
||||
slash_command_output_sections: Vec<SlashCommandOutputSection<language::Anchor>>,
|
||||
thought_process_output_sections: Vec<ThoughtProcessOutputSection<language::Anchor>>,
|
||||
message_anchors: Vec<MessageAnchor>,
|
||||
contents: Vec<Content>,
|
||||
messages_metadata: HashMap<MessageId, MessageMetadata>,
|
||||
@@ -682,6 +738,7 @@ impl AssistantContext {
|
||||
parsed_slash_commands: Vec::new(),
|
||||
invoked_slash_commands: HashMap::default(),
|
||||
slash_command_output_sections: Vec::new(),
|
||||
thought_process_output_sections: Vec::new(),
|
||||
edits_since_last_parse: edits_since_last_slash_command_parse,
|
||||
summary: None,
|
||||
pending_summary: Task::ready(None),
|
||||
@@ -764,6 +821,18 @@ impl AssistantContext {
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
thought_process_output_sections: self
|
||||
.thought_process_output_sections
|
||||
.iter()
|
||||
.filter_map(|section| {
|
||||
if section.is_valid(buffer) {
|
||||
let range = section.range.to_offset(buffer);
|
||||
Some(ThoughtProcessOutputSection { range })
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -957,6 +1026,16 @@ impl AssistantContext {
|
||||
cx.emit(ContextEvent::SlashCommandOutputSectionAdded { section });
|
||||
}
|
||||
}
|
||||
ContextOperation::ThoughtProcessOutputSectionAdded { section, .. } => {
|
||||
let buffer = self.buffer.read(cx);
|
||||
if let Err(ix) = self
|
||||
.thought_process_output_sections
|
||||
.binary_search_by(|probe| probe.range.cmp(§ion.range, buffer))
|
||||
{
|
||||
self.thought_process_output_sections
|
||||
.insert(ix, section.clone());
|
||||
}
|
||||
}
|
||||
ContextOperation::SlashCommandFinished {
|
||||
id,
|
||||
error_message,
|
||||
@@ -1020,6 +1099,9 @@ impl AssistantContext {
|
||||
ContextOperation::SlashCommandOutputSectionAdded { section, .. } => {
|
||||
self.has_received_operations_for_anchor_range(section.range.clone(), cx)
|
||||
}
|
||||
ContextOperation::ThoughtProcessOutputSectionAdded { section, .. } => {
|
||||
self.has_received_operations_for_anchor_range(section.range.clone(), cx)
|
||||
}
|
||||
ContextOperation::SlashCommandFinished { .. } => true,
|
||||
ContextOperation::BufferOperation(_) => {
|
||||
panic!("buffer operations should always be applied")
|
||||
@@ -1128,6 +1210,12 @@ impl AssistantContext {
|
||||
&self.slash_command_output_sections
|
||||
}
|
||||
|
||||
pub fn thought_process_output_sections(
|
||||
&self,
|
||||
) -> &[ThoughtProcessOutputSection<language::Anchor>] {
|
||||
&self.thought_process_output_sections
|
||||
}
|
||||
|
||||
pub fn contains_files(&self, cx: &App) -> bool {
|
||||
let buffer = self.buffer.read(cx);
|
||||
self.slash_command_output_sections.iter().any(|section| {
|
||||
@@ -1144,9 +1232,9 @@ impl AssistantContext {
|
||||
|
||||
fn set_language(&mut self, cx: &mut Context<Self>) {
|
||||
let markdown = self.language_registry.language_for_name("Markdown");
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let markdown = markdown.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.buffer
|
||||
.update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx));
|
||||
})
|
||||
@@ -1188,7 +1276,7 @@ impl AssistantContext {
|
||||
return;
|
||||
};
|
||||
let debounce = self.token_count.is_some();
|
||||
self.pending_token_count = cx.spawn(|this, mut cx| {
|
||||
self.pending_token_count = cx.spawn(async move |this, cx| {
|
||||
async move {
|
||||
if debounce {
|
||||
cx.background_executor()
|
||||
@@ -1197,13 +1285,14 @@ impl AssistantContext {
|
||||
}
|
||||
|
||||
let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.token_count = Some(token_count);
|
||||
this.start_cache_warming(&model, cx);
|
||||
cx.notify()
|
||||
})
|
||||
}
|
||||
.log_err()
|
||||
.await
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1342,7 +1431,7 @@ impl AssistantContext {
|
||||
};
|
||||
|
||||
let model = Arc::clone(model);
|
||||
self.pending_cache_warming_task = cx.spawn(|this, mut cx| {
|
||||
self.pending_cache_warming_task = cx.spawn(async move |this, cx| {
|
||||
async move {
|
||||
match model.stream_completion(request, &cx).await {
|
||||
Ok(mut stream) => {
|
||||
@@ -1353,13 +1442,14 @@ impl AssistantContext {
|
||||
log::warn!("Cache warming failed: {}", e);
|
||||
}
|
||||
};
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.update_cache_status_for_completion(cx);
|
||||
})
|
||||
.ok();
|
||||
anyhow::Ok(())
|
||||
}
|
||||
.log_err()
|
||||
.await
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1916,7 +2006,7 @@ impl AssistantContext {
|
||||
});
|
||||
self.reparse(cx);
|
||||
|
||||
let insert_output_task = cx.spawn(|this, mut cx| async move {
|
||||
let insert_output_task = cx.spawn(async move |this, cx| {
|
||||
let run_command = async {
|
||||
let mut stream = output.await?;
|
||||
|
||||
@@ -1933,7 +2023,7 @@ impl AssistantContext {
|
||||
|
||||
while let Some(event) = stream.next().await {
|
||||
let event = event?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.buffer.update(cx, |buffer, _cx| {
|
||||
buffer.finalize_last_transaction();
|
||||
buffer.start_transaction()
|
||||
@@ -2034,7 +2124,7 @@ impl AssistantContext {
|
||||
})?;
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.buffer.update(cx, |buffer, cx| {
|
||||
buffer.finalize_last_transaction();
|
||||
buffer.start_transaction();
|
||||
@@ -2080,7 +2170,7 @@ impl AssistantContext {
|
||||
|
||||
let command_result = run_command.await;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
let version = this.version.clone();
|
||||
let timestamp = this.next_timestamp();
|
||||
let Some(invoked_slash_command) = this.invoked_slash_commands.get_mut(&command_id)
|
||||
@@ -2166,6 +2256,35 @@ impl AssistantContext {
|
||||
);
|
||||
}
|
||||
|
||||
fn insert_thought_process_output_section(
|
||||
&mut self,
|
||||
section: ThoughtProcessOutputSection<language::Anchor>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let buffer = self.buffer.read(cx);
|
||||
let insertion_ix = match self
|
||||
.thought_process_output_sections
|
||||
.binary_search_by(|probe| probe.range.cmp(§ion.range, buffer))
|
||||
{
|
||||
Ok(ix) | Err(ix) => ix,
|
||||
};
|
||||
self.thought_process_output_sections
|
||||
.insert(insertion_ix, section.clone());
|
||||
// cx.emit(ContextEvent::ThoughtProcessOutputSectionAdded {
|
||||
// section: section.clone(),
|
||||
// });
|
||||
let version = self.version.clone();
|
||||
let timestamp = self.next_timestamp();
|
||||
self.push_op(
|
||||
ContextOperation::ThoughtProcessOutputSectionAdded {
|
||||
timestamp,
|
||||
section,
|
||||
version,
|
||||
},
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
pub fn completion_provider_changed(&mut self, cx: &mut Context<Self>) {
|
||||
self.count_remaining_tokens(cx);
|
||||
}
|
||||
@@ -2210,7 +2329,7 @@ impl AssistantContext {
|
||||
let pending_completion_id = post_inc(&mut self.completion_count);
|
||||
|
||||
let task = cx.spawn({
|
||||
|this, mut cx| async move {
|
||||
async move |this, cx| {
|
||||
let stream = model.stream_completion(request, &cx);
|
||||
let assistant_message_id = assistant_message.id;
|
||||
let mut response_latency = None;
|
||||
@@ -2218,6 +2337,10 @@ impl AssistantContext {
|
||||
let request_start = Instant::now();
|
||||
let mut events = stream.await?;
|
||||
let mut stop_reason = StopReason::EndTurn;
|
||||
let mut thought_process_stack = Vec::new();
|
||||
|
||||
const THOUGHT_PROCESS_START_MARKER: &str = "<think>\n";
|
||||
const THOUGHT_PROCESS_END_MARKER: &str = "\n</think>";
|
||||
|
||||
while let Some(event) = events.next().await {
|
||||
if response_latency.is_none() {
|
||||
@@ -2225,7 +2348,10 @@ impl AssistantContext {
|
||||
}
|
||||
let event = event?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let mut context_event = None;
|
||||
let mut thought_process_output_section = None;
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
let message_ix = this
|
||||
.message_anchors
|
||||
.iter()
|
||||
@@ -2243,7 +2369,50 @@ impl AssistantContext {
|
||||
LanguageModelCompletionEvent::Stop(reason) => {
|
||||
stop_reason = reason;
|
||||
}
|
||||
LanguageModelCompletionEvent::Text(chunk) => {
|
||||
LanguageModelCompletionEvent::Thinking(chunk) => {
|
||||
if thought_process_stack.is_empty() {
|
||||
let start =
|
||||
buffer.anchor_before(message_old_end_offset);
|
||||
thought_process_stack.push(start);
|
||||
let chunk =
|
||||
format!("{THOUGHT_PROCESS_START_MARKER}{chunk}{THOUGHT_PROCESS_END_MARKER}");
|
||||
let chunk_len = chunk.len();
|
||||
buffer.edit(
|
||||
[(
|
||||
message_old_end_offset..message_old_end_offset,
|
||||
chunk,
|
||||
)],
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
let end = buffer
|
||||
.anchor_before(message_old_end_offset + chunk_len);
|
||||
context_event = Some(
|
||||
ContextEvent::StartedThoughtProcess(start..end),
|
||||
);
|
||||
} else {
|
||||
// This ensures that all the thinking chunks are inserted inside the thinking tag
|
||||
let insertion_position =
|
||||
message_old_end_offset - THOUGHT_PROCESS_END_MARKER.len();
|
||||
buffer.edit(
|
||||
[(insertion_position..insertion_position, chunk)],
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}
|
||||
LanguageModelCompletionEvent::Text(mut chunk) => {
|
||||
if let Some(start) = thought_process_stack.pop() {
|
||||
let end = buffer.anchor_before(message_old_end_offset);
|
||||
context_event =
|
||||
Some(ContextEvent::EndedThoughtProcess(end));
|
||||
thought_process_output_section =
|
||||
Some(ThoughtProcessOutputSection {
|
||||
range: start..end,
|
||||
});
|
||||
chunk.insert_str(0, "\n\n");
|
||||
}
|
||||
|
||||
buffer.edit(
|
||||
[(
|
||||
message_old_end_offset..message_old_end_offset,
|
||||
@@ -2258,13 +2427,20 @@ impl AssistantContext {
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(section) = thought_process_output_section.take() {
|
||||
this.insert_thought_process_output_section(section, cx);
|
||||
}
|
||||
if let Some(context_event) = context_event.take() {
|
||||
cx.emit(context_event);
|
||||
}
|
||||
|
||||
cx.emit(ContextEvent::StreamedCompletion);
|
||||
|
||||
Some(())
|
||||
})?;
|
||||
smol::future::yield_now().await;
|
||||
}
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.pending_completions
|
||||
.retain(|completion| completion.id != pending_completion_id);
|
||||
this.summarize(false, cx);
|
||||
@@ -2276,7 +2452,7 @@ impl AssistantContext {
|
||||
|
||||
let result = stream_completion.await;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
let error_message = if let Some(error) = result.as_ref().err() {
|
||||
if error.is::<PaymentRequiredError>() {
|
||||
cx.emit(ContextEvent::ShowPaymentRequiredError);
|
||||
@@ -2786,7 +2962,7 @@ impl AssistantContext {
|
||||
cache: false,
|
||||
});
|
||||
|
||||
self.pending_summary = cx.spawn(|this, mut cx| {
|
||||
self.pending_summary = cx.spawn(async move |this, cx| {
|
||||
async move {
|
||||
let stream = model.stream_completion_text(request, &cx);
|
||||
let mut messages = stream.await?;
|
||||
@@ -2795,7 +2971,7 @@ impl AssistantContext {
|
||||
while let Some(message) = messages.stream.next().await {
|
||||
let text = message?;
|
||||
let mut lines = text.lines();
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
let version = this.version.clone();
|
||||
let timestamp = this.next_timestamp();
|
||||
let summary = this.summary.get_or_insert(ContextSummary::default());
|
||||
@@ -2819,7 +2995,7 @@ impl AssistantContext {
|
||||
}
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
let version = this.version.clone();
|
||||
let timestamp = this.next_timestamp();
|
||||
if let Some(summary) = this.summary.as_mut() {
|
||||
@@ -2837,6 +3013,7 @@ impl AssistantContext {
|
||||
anyhow::Ok(())
|
||||
}
|
||||
.log_err()
|
||||
.await
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -2943,12 +3120,12 @@ impl AssistantContext {
|
||||
return;
|
||||
}
|
||||
|
||||
self.pending_save = cx.spawn(|this, mut cx| async move {
|
||||
self.pending_save = cx.spawn(async move |this, cx| {
|
||||
if let Some(debounce) = debounce {
|
||||
cx.background_executor().timer(debounce).await;
|
||||
}
|
||||
|
||||
let (old_path, summary) = this.read_with(&cx, |this, _| {
|
||||
let (old_path, summary) = this.read_with(cx, |this, _| {
|
||||
let path = this.path.clone();
|
||||
let summary = if let Some(summary) = this.summary.as_ref() {
|
||||
if summary.done {
|
||||
@@ -2963,7 +3140,7 @@ impl AssistantContext {
|
||||
})?;
|
||||
|
||||
if let Some(summary) = summary {
|
||||
let context = this.read_with(&cx, |this, cx| this.serialize(cx))?;
|
||||
let context = this.read_with(cx, |this, cx| this.serialize(cx))?;
|
||||
let mut discriminant = 1;
|
||||
let mut new_path;
|
||||
loop {
|
||||
@@ -2995,7 +3172,7 @@ impl AssistantContext {
|
||||
}
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, _| this.path = Some(new_path))?;
|
||||
this.update(cx, |this, _| this.path = Some(new_path))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
@@ -3124,6 +3301,8 @@ pub struct SavedContext {
|
||||
pub summary: String,
|
||||
pub slash_command_output_sections:
|
||||
Vec<assistant_slash_command::SlashCommandOutputSection<usize>>,
|
||||
#[serde(default)]
|
||||
pub thought_process_output_sections: Vec<ThoughtProcessOutputSection<usize>>,
|
||||
}
|
||||
|
||||
impl SavedContext {
|
||||
@@ -3225,6 +3404,20 @@ impl SavedContext {
|
||||
version.observe(timestamp);
|
||||
}
|
||||
|
||||
for section in self.thought_process_output_sections {
|
||||
let timestamp = next_timestamp.tick();
|
||||
operations.push(ContextOperation::ThoughtProcessOutputSectionAdded {
|
||||
timestamp,
|
||||
section: ThoughtProcessOutputSection {
|
||||
range: buffer.anchor_after(section.range.start)
|
||||
..buffer.anchor_before(section.range.end),
|
||||
},
|
||||
version: version.clone(),
|
||||
});
|
||||
|
||||
version.observe(timestamp);
|
||||
}
|
||||
|
||||
let timestamp = next_timestamp.tick();
|
||||
operations.push(ContextOperation::UpdateSummary {
|
||||
summary: ContextSummary {
|
||||
@@ -3299,6 +3492,7 @@ impl SavedContextV0_3_0 {
|
||||
.collect(),
|
||||
summary: self.summary,
|
||||
slash_command_output_sections: self.slash_command_output_sections,
|
||||
thought_process_output_sections: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ use editor::{
|
||||
BlockContext, BlockId, BlockPlacement, BlockProperties, BlockStyle, Crease, CreaseMetadata,
|
||||
CustomBlockId, FoldId, RenderBlock, ToDisplayPoint,
|
||||
},
|
||||
scroll::{Autoscroll, AutoscrollStrategy},
|
||||
scroll::Autoscroll,
|
||||
Anchor, Editor, EditorEvent, MenuInlineCompletionsPolicy, ProposedChangeLocation,
|
||||
ProposedChangesEditor, RowExt, ToOffset as _, ToPoint,
|
||||
};
|
||||
@@ -64,7 +64,10 @@ use workspace::{
|
||||
Workspace,
|
||||
};
|
||||
|
||||
use crate::{slash_command::SlashCommandCompletionProvider, slash_command_picker};
|
||||
use crate::{
|
||||
slash_command::SlashCommandCompletionProvider, slash_command_picker,
|
||||
ThoughtProcessOutputSection,
|
||||
};
|
||||
use crate::{
|
||||
AssistantContext, AssistantPatch, AssistantPatchStatus, CacheStatus, Content, ContextEvent,
|
||||
ContextId, InvokedSlashCommandId, InvokedSlashCommandStatus, Message, MessageId,
|
||||
@@ -120,6 +123,11 @@ enum AssistError {
|
||||
Message(SharedString),
|
||||
}
|
||||
|
||||
pub enum ThoughtProcessStatus {
|
||||
Pending,
|
||||
Completed,
|
||||
}
|
||||
|
||||
pub trait AssistantPanelDelegate {
|
||||
fn active_context_editor(
|
||||
&self,
|
||||
@@ -178,6 +186,7 @@ pub struct ContextEditor {
|
||||
project: Entity<Project>,
|
||||
lsp_adapter_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
editor: Entity<Editor>,
|
||||
pending_thought_process: Option<(CreaseId, language::Anchor)>,
|
||||
blocks: HashMap<MessageId, (MessageHeader, CustomBlockId)>,
|
||||
image_blocks: HashSet<CustomBlockId>,
|
||||
scroll_position: Option<ScrollPosition>,
|
||||
@@ -229,6 +238,7 @@ impl ContextEditor {
|
||||
editor.set_show_git_diff_gutter(false, cx);
|
||||
editor.set_show_code_actions(false, cx);
|
||||
editor.set_show_runnables(false, cx);
|
||||
editor.set_show_breakpoints(false, cx);
|
||||
editor.set_show_wrap_guides(false, cx);
|
||||
editor.set_show_indent_guides(false, cx);
|
||||
editor.set_completion_provider(Some(Box::new(completion_provider)));
|
||||
@@ -252,7 +262,8 @@ impl ContextEditor {
|
||||
cx.observe_global_in::<SettingsStore>(window, Self::settings_changed),
|
||||
];
|
||||
|
||||
let sections = context.read(cx).slash_command_output_sections().to_vec();
|
||||
let slash_command_sections = context.read(cx).slash_command_output_sections().to_vec();
|
||||
let thought_process_sections = context.read(cx).thought_process_output_sections().to_vec();
|
||||
let patch_ranges = context.read(cx).patch_ranges().collect::<Vec<_>>();
|
||||
let slash_commands = context.read(cx).slash_commands().clone();
|
||||
let mut this = Self {
|
||||
@@ -264,6 +275,7 @@ impl ContextEditor {
|
||||
image_blocks: Default::default(),
|
||||
scroll_position: None,
|
||||
remote_id: None,
|
||||
pending_thought_process: None,
|
||||
fs: fs.clone(),
|
||||
workspace,
|
||||
project,
|
||||
@@ -293,7 +305,14 @@ impl ContextEditor {
|
||||
};
|
||||
this.update_message_headers(cx);
|
||||
this.update_image_blocks(cx);
|
||||
this.insert_slash_command_output_sections(sections, false, window, cx);
|
||||
this.insert_slash_command_output_sections(slash_command_sections, false, window, cx);
|
||||
this.insert_thought_process_output_sections(
|
||||
thought_process_sections
|
||||
.into_iter()
|
||||
.map(|section| (section, ThoughtProcessStatus::Completed)),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
this.patches_updated(&Vec::new(), &patch_ranges, window, cx);
|
||||
this
|
||||
}
|
||||
@@ -395,12 +414,9 @@ impl ContextEditor {
|
||||
cursor..cursor
|
||||
};
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
editor.change_selections(
|
||||
Some(Autoscroll::Strategy(AutoscrollStrategy::Fit)),
|
||||
window,
|
||||
cx,
|
||||
|selections| selections.select_ranges([new_selection]),
|
||||
);
|
||||
editor.change_selections(Some(Autoscroll::fit()), window, cx, |selections| {
|
||||
selections.select_ranges([new_selection])
|
||||
});
|
||||
});
|
||||
// Avoid scrolling to the new cursor position so the assistant's output is stable.
|
||||
cx.defer_in(window, |this, _, _| this.scroll_position = None);
|
||||
@@ -598,6 +614,47 @@ impl ContextEditor {
|
||||
context.save(Some(Duration::from_millis(500)), self.fs.clone(), cx);
|
||||
});
|
||||
}
|
||||
ContextEvent::StartedThoughtProcess(range) => {
|
||||
let creases = self.insert_thought_process_output_sections(
|
||||
[(
|
||||
ThoughtProcessOutputSection {
|
||||
range: range.clone(),
|
||||
},
|
||||
ThoughtProcessStatus::Pending,
|
||||
)],
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
self.pending_thought_process = Some((creases[0], range.start));
|
||||
}
|
||||
ContextEvent::EndedThoughtProcess(end) => {
|
||||
if let Some((crease_id, start)) = self.pending_thought_process.take() {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
let multi_buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
let (excerpt_id, _, _) = multi_buffer_snapshot.as_singleton().unwrap();
|
||||
let start_anchor = multi_buffer_snapshot
|
||||
.anchor_in_excerpt(*excerpt_id, start)
|
||||
.unwrap();
|
||||
|
||||
editor.display_map.update(cx, |display_map, cx| {
|
||||
display_map.unfold_intersecting(
|
||||
vec![start_anchor..start_anchor],
|
||||
true,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
editor.remove_creases(vec![crease_id], cx);
|
||||
});
|
||||
self.insert_thought_process_output_sections(
|
||||
[(
|
||||
ThoughtProcessOutputSection { range: start..*end },
|
||||
ThoughtProcessStatus::Completed,
|
||||
)],
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}
|
||||
ContextEvent::StreamedCompletion => {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
if let Some(scroll_position) = self.scroll_position {
|
||||
@@ -906,7 +963,7 @@ impl ContextEditor {
|
||||
if editor_state.opened_patch != patch {
|
||||
state.update_task = Some({
|
||||
let this = this.clone();
|
||||
cx.spawn_in(window, |_, cx| async move {
|
||||
cx.spawn_in(window, async move |_, cx| {
|
||||
Self::update_patch_editor(this.clone(), patch, cx)
|
||||
.await
|
||||
.log_err();
|
||||
@@ -945,6 +1002,62 @@ impl ContextEditor {
|
||||
self.update_active_patch(window, cx);
|
||||
}
|
||||
|
||||
fn insert_thought_process_output_sections(
|
||||
&mut self,
|
||||
sections: impl IntoIterator<
|
||||
Item = (
|
||||
ThoughtProcessOutputSection<language::Anchor>,
|
||||
ThoughtProcessStatus,
|
||||
),
|
||||
>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Vec<CreaseId> {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
let buffer = editor.buffer().read(cx).snapshot(cx);
|
||||
let excerpt_id = *buffer.as_singleton().unwrap().0;
|
||||
let mut buffer_rows_to_fold = BTreeSet::new();
|
||||
let mut creases = Vec::new();
|
||||
for (section, status) in sections {
|
||||
let start = buffer
|
||||
.anchor_in_excerpt(excerpt_id, section.range.start)
|
||||
.unwrap();
|
||||
let end = buffer
|
||||
.anchor_in_excerpt(excerpt_id, section.range.end)
|
||||
.unwrap();
|
||||
let buffer_row = MultiBufferRow(start.to_point(&buffer).row);
|
||||
buffer_rows_to_fold.insert(buffer_row);
|
||||
creases.push(
|
||||
Crease::inline(
|
||||
start..end,
|
||||
FoldPlaceholder {
|
||||
render: render_thought_process_fold_icon_button(
|
||||
cx.entity().downgrade(),
|
||||
status,
|
||||
),
|
||||
merge_adjacent: false,
|
||||
..Default::default()
|
||||
},
|
||||
render_slash_command_output_toggle,
|
||||
|_, _, _, _| Empty.into_any_element(),
|
||||
)
|
||||
.with_metadata(CreaseMetadata {
|
||||
icon: IconName::Ai,
|
||||
label: "Thinking Process".into(),
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
let creases = editor.insert_creases(creases, cx);
|
||||
|
||||
for buffer_row in buffer_rows_to_fold.into_iter().rev() {
|
||||
editor.fold_at(&FoldAt { buffer_row }, window, cx);
|
||||
}
|
||||
|
||||
creases
|
||||
})
|
||||
}
|
||||
|
||||
fn insert_slash_command_output_sections(
|
||||
&mut self,
|
||||
sections: impl IntoIterator<Item = SlashCommandOutputSection<language::Anchor>>,
|
||||
@@ -1069,10 +1182,9 @@ impl ContextEditor {
|
||||
})
|
||||
.ok();
|
||||
} else {
|
||||
patch_state.update_task =
|
||||
Some(cx.spawn_in(window, move |this, cx| async move {
|
||||
Self::open_patch_editor(this, new_patch, cx).await.log_err();
|
||||
}));
|
||||
patch_state.update_task = Some(cx.spawn_in(window, async move |this, cx| {
|
||||
Self::open_patch_editor(this, new_patch, cx).await.log_err();
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1102,10 +1214,10 @@ impl ContextEditor {
|
||||
async fn open_patch_editor(
|
||||
this: WeakEntity<Self>,
|
||||
patch: AssistantPatch,
|
||||
mut cx: AsyncWindowContext,
|
||||
cx: &mut AsyncWindowContext,
|
||||
) -> Result<()> {
|
||||
let project = this.read_with(&cx, |this, _| this.project.clone())?;
|
||||
let resolved_patch = patch.resolve(project.clone(), &mut cx).await;
|
||||
let project = this.read_with(cx, |this, _| this.project.clone())?;
|
||||
let resolved_patch = patch.resolve(project.clone(), cx).await;
|
||||
|
||||
let editor = cx.new_window_entity(|window, cx| {
|
||||
let editor = ProposedChangesEditor::new(
|
||||
@@ -1129,7 +1241,7 @@ impl ContextEditor {
|
||||
editor
|
||||
})?;
|
||||
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.update(cx, |this, _| {
|
||||
if let Some(patch_state) = this.patches.get_mut(&patch.range) {
|
||||
patch_state.editor = Some(PatchEditorState {
|
||||
editor: editor.downgrade(),
|
||||
@@ -1138,8 +1250,8 @@ impl ContextEditor {
|
||||
patch_state.update_task.take();
|
||||
}
|
||||
})?;
|
||||
this.read_with(&cx, |this, _| this.workspace.clone())?
|
||||
.update_in(&mut cx, |workspace, window, cx| {
|
||||
this.read_with(cx, |this, _| this.workspace.clone())?
|
||||
.update_in(cx, |workspace, window, cx| {
|
||||
workspace.add_item_to_active_pane(Box::new(editor.clone()), None, false, window, cx)
|
||||
})
|
||||
.log_err();
|
||||
@@ -1150,11 +1262,11 @@ impl ContextEditor {
|
||||
async fn update_patch_editor(
|
||||
this: WeakEntity<Self>,
|
||||
patch: AssistantPatch,
|
||||
mut cx: AsyncWindowContext,
|
||||
cx: &mut AsyncWindowContext,
|
||||
) -> Result<()> {
|
||||
let project = this.update(&mut cx, |this, _| this.project.clone())?;
|
||||
let resolved_patch = patch.resolve(project.clone(), &mut cx).await;
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
let project = this.update(cx, |this, _| this.project.clone())?;
|
||||
let resolved_patch = patch.resolve(project.clone(), cx).await;
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
let patch_state = this.patches.get_mut(&patch.range)?;
|
||||
|
||||
let locations = resolved_patch
|
||||
@@ -1624,14 +1736,14 @@ impl ContextEditor {
|
||||
.map(|path| Workspace::project_path_for_path(project.clone(), &path, false, cx))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
cx.spawn(move |_, cx| async move {
|
||||
cx.spawn(async move |_, cx| {
|
||||
let mut paths = vec![];
|
||||
let mut worktrees = vec![];
|
||||
|
||||
let opened_paths = futures::future::join_all(tasks).await;
|
||||
for (worktree, project_path) in opened_paths.into_iter().flatten() {
|
||||
let Ok(worktree_root_name) =
|
||||
worktree.read_with(&cx, |worktree, _| worktree.root_name().to_string())
|
||||
worktree.read_with(cx, |worktree, _| worktree.root_name().to_string())
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
@@ -1648,12 +1760,12 @@ impl ContextEditor {
|
||||
};
|
||||
|
||||
window
|
||||
.spawn(cx, |mut cx| async move {
|
||||
.spawn(cx, async move |cx| {
|
||||
let (paths, dragged_file_worktrees) = paths.await;
|
||||
let cmd_name = FileSlashCommand.name();
|
||||
|
||||
context_editor_view
|
||||
.update_in(&mut cx, |context_editor, window, cx| {
|
||||
.update_in(cx, |context_editor, window, cx| {
|
||||
let file_argument = paths
|
||||
.into_iter()
|
||||
.map(|path| path.to_string_lossy().to_string())
|
||||
@@ -2199,9 +2311,9 @@ impl ContextEditor {
|
||||
.log_err();
|
||||
|
||||
if let Some(client) = client {
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
client.authenticate_and_connect(true, &mut cx).await?;
|
||||
this.update(&mut cx, |_, cx| cx.notify())
|
||||
cx.spawn(async move |this, cx| {
|
||||
client.authenticate_and_connect(true, cx).await?;
|
||||
this.update(cx, |_, cx| cx.notify())
|
||||
})
|
||||
.detach_and_log_err(cx)
|
||||
}
|
||||
@@ -2652,6 +2764,52 @@ fn find_surrounding_code_block(snapshot: &BufferSnapshot, offset: usize) -> Opti
|
||||
None
|
||||
}
|
||||
|
||||
fn render_thought_process_fold_icon_button(
|
||||
editor: WeakEntity<Editor>,
|
||||
status: ThoughtProcessStatus,
|
||||
) -> Arc<dyn Send + Sync + Fn(FoldId, Range<Anchor>, &mut App) -> AnyElement> {
|
||||
Arc::new(move |fold_id, fold_range, _cx| {
|
||||
let editor = editor.clone();
|
||||
|
||||
let button = ButtonLike::new(fold_id).layer(ElevationIndex::ElevatedSurface);
|
||||
let button = match status {
|
||||
ThoughtProcessStatus::Pending => button
|
||||
.child(
|
||||
Icon::new(IconName::Brain)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(
|
||||
Label::new("Thinking…").color(Color::Muted).with_animation(
|
||||
"pulsating-label",
|
||||
Animation::new(Duration::from_secs(2))
|
||||
.repeat()
|
||||
.with_easing(pulsating_between(0.4, 0.8)),
|
||||
|label, delta| label.alpha(delta),
|
||||
),
|
||||
),
|
||||
ThoughtProcessStatus::Completed => button
|
||||
.style(ButtonStyle::Filled)
|
||||
.child(Icon::new(IconName::Brain).size(IconSize::Small))
|
||||
.child(Label::new("Thought Process").single_line()),
|
||||
};
|
||||
|
||||
button
|
||||
.on_click(move |_, window, cx| {
|
||||
editor
|
||||
.update(cx, |editor, cx| {
|
||||
let buffer_start = fold_range
|
||||
.start
|
||||
.to_point(&editor.buffer().read(cx).read(cx));
|
||||
let buffer_row = MultiBufferRow(buffer_start.row);
|
||||
editor.unfold_at(&UnfoldAt { buffer_row }, window, cx);
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
.into_any_element()
|
||||
})
|
||||
}
|
||||
|
||||
fn render_fold_icon_button(
|
||||
editor: WeakEntity<Editor>,
|
||||
icon: IconName,
|
||||
@@ -3160,10 +3318,10 @@ impl FollowableItem for ContextEditor {
|
||||
assistant_panel_delegate.open_remote_context(workspace, context_id, window, cx)
|
||||
});
|
||||
|
||||
Some(window.spawn(cx, |mut cx| async move {
|
||||
Some(window.spawn(cx, async move |cx| {
|
||||
let context_editor = context_editor_task.await?;
|
||||
context_editor
|
||||
.update_in(&mut cx, |context_editor, window, cx| {
|
||||
.update_in(cx, |context_editor, window, cx| {
|
||||
context_editor.remote_id = Some(id);
|
||||
context_editor.editor.update(cx, |editor, cx| {
|
||||
editor.apply_update_proto(
|
||||
|
||||
@@ -164,9 +164,9 @@ impl PickerDelegate for SavedContextPickerDelegate {
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Task<()> {
|
||||
let search = self.store.read(cx).search(query, cx);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let matches = search.await;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
let host_contexts = this.delegate.store.read(cx).host_contexts();
|
||||
this.delegate.matches = host_contexts
|
||||
.iter()
|
||||
|
||||
@@ -100,7 +100,7 @@ impl ContextStore {
|
||||
let fs = project.read(cx).fs().clone();
|
||||
let languages = project.read(cx).languages().clone();
|
||||
let telemetry = project.read(cx).client().telemetry().clone();
|
||||
cx.spawn(|mut cx| async move {
|
||||
cx.spawn(async move |cx| {
|
||||
const CONTEXT_WATCH_DURATION: Duration = Duration::from_millis(100);
|
||||
let (mut events, _) = fs.watch(contexts_dir(), CONTEXT_WATCH_DURATION).await;
|
||||
|
||||
@@ -125,16 +125,15 @@ impl ContextStore {
|
||||
languages,
|
||||
slash_commands,
|
||||
telemetry,
|
||||
_watch_updates: cx.spawn(|this, mut cx| {
|
||||
_watch_updates: cx.spawn(async move |this, cx| {
|
||||
async move {
|
||||
while events.next().await.is_some() {
|
||||
this.update(&mut cx, |this, cx| this.reload(cx))?
|
||||
.await
|
||||
.log_err();
|
||||
this.update(cx, |this, cx| this.reload(cx))?.await.log_err();
|
||||
}
|
||||
anyhow::Ok(())
|
||||
}
|
||||
.log_err()
|
||||
.await
|
||||
}),
|
||||
client_subscription: None,
|
||||
_project_subscriptions: vec![
|
||||
@@ -395,7 +394,7 @@ impl ContextStore {
|
||||
let prompt_builder = self.prompt_builder.clone();
|
||||
let slash_commands = self.slash_commands.clone();
|
||||
let request = self.client.request(proto::CreateContext { project_id });
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let response = request.await?;
|
||||
let context_id = ContextId::from_proto(response.context_id);
|
||||
let context_proto = response.context.context("invalid context")?;
|
||||
@@ -421,8 +420,8 @@ impl ContextStore {
|
||||
.collect::<Result<Vec<_>>>()
|
||||
})
|
||||
.await?;
|
||||
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
context.update(cx, |context, cx| context.apply_ops(operations, cx))?;
|
||||
this.update(cx, |this, cx| {
|
||||
if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) {
|
||||
existing_context
|
||||
} else {
|
||||
@@ -457,7 +456,7 @@ impl ContextStore {
|
||||
let prompt_builder = self.prompt_builder.clone();
|
||||
let slash_commands = self.slash_commands.clone();
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let saved_context = load.await?;
|
||||
let context = cx.new(|cx| {
|
||||
AssistantContext::deserialize(
|
||||
@@ -471,7 +470,7 @@ impl ContextStore {
|
||||
cx,
|
||||
)
|
||||
})?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
if let Some(existing_context) = this.loaded_context_for_path(&path, cx) {
|
||||
existing_context
|
||||
} else {
|
||||
@@ -489,7 +488,7 @@ impl ContextStore {
|
||||
) -> Task<Result<()>> {
|
||||
let fs = self.fs.clone();
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
fs.remove_file(
|
||||
&path,
|
||||
RemoveOptions {
|
||||
@@ -499,7 +498,7 @@ impl ContextStore {
|
||||
)
|
||||
.await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.contexts.retain(|context| {
|
||||
context
|
||||
.upgrade()
|
||||
@@ -565,7 +564,7 @@ impl ContextStore {
|
||||
});
|
||||
let prompt_builder = self.prompt_builder.clone();
|
||||
let slash_commands = self.slash_commands.clone();
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let response = request.await?;
|
||||
let context_proto = response.context.context("invalid context")?;
|
||||
let context = cx.new(|cx| {
|
||||
@@ -590,8 +589,8 @@ impl ContextStore {
|
||||
.collect::<Result<Vec<_>>>()
|
||||
})
|
||||
.await?;
|
||||
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
context.update(cx, |context, cx| context.apply_ops(operations, cx))?;
|
||||
this.update(cx, |this, cx| {
|
||||
if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) {
|
||||
existing_context
|
||||
} else {
|
||||
@@ -700,12 +699,12 @@ impl ContextStore {
|
||||
project_id,
|
||||
contexts,
|
||||
});
|
||||
cx.spawn(|this, cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let response = request.await?;
|
||||
|
||||
let mut context_ids = Vec::new();
|
||||
let mut operations = Vec::new();
|
||||
this.read_with(&cx, |this, cx| {
|
||||
this.read_with(cx, |this, cx| {
|
||||
for context_version_proto in response.contexts {
|
||||
let context_version = ContextVersion::from_proto(&context_version_proto);
|
||||
let context_id = ContextId::from_proto(context_version_proto.context_id);
|
||||
@@ -768,7 +767,7 @@ impl ContextStore {
|
||||
|
||||
fn reload(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
|
||||
let fs = self.fs.clone();
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
fs.create_dir(contexts_dir()).await?;
|
||||
|
||||
let mut paths = fs.read_dir(contexts_dir()).await?;
|
||||
@@ -808,7 +807,7 @@ impl ContextStore {
|
||||
}
|
||||
contexts.sort_unstable_by_key(|context| Reverse(context.mtime));
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.contexts_metadata = contexts;
|
||||
cx.notify();
|
||||
})
|
||||
@@ -819,7 +818,7 @@ impl ContextStore {
|
||||
cx.update_entity(
|
||||
&self.context_server_manager,
|
||||
|context_server_manager, cx| {
|
||||
for server in context_server_manager.servers() {
|
||||
for server in context_server_manager.running_servers() {
|
||||
context_server_manager
|
||||
.restart_server(&server.id(), cx)
|
||||
.detach_and_log_err(cx);
|
||||
@@ -850,7 +849,7 @@ impl ContextStore {
|
||||
cx.spawn({
|
||||
let server = server.clone();
|
||||
let server_id = server_id.clone();
|
||||
|this, mut cx| async move {
|
||||
async move |this, cx| {
|
||||
let Some(protocol) = server.client() else {
|
||||
return;
|
||||
};
|
||||
@@ -875,7 +874,7 @@ impl ContextStore {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
this.update(&mut cx, |this, _cx| {
|
||||
this.update( cx, |this, _cx| {
|
||||
this.context_server_slash_command_ids
|
||||
.insert(server_id.clone(), slash_command_ids);
|
||||
})
|
||||
|
||||
@@ -2,7 +2,7 @@ use crate::context_editor::ContextEditor;
|
||||
use anyhow::Result;
|
||||
pub use assistant_slash_command::SlashCommand;
|
||||
use assistant_slash_command::{AfterCompletion, SlashCommandLine, SlashCommandWorkingSet};
|
||||
use editor::{CompletionProvider, Editor};
|
||||
use editor::{CompletionProvider, Editor, ExcerptId};
|
||||
use fuzzy::{match_strings, StringMatchCandidate};
|
||||
use gpui::{App, AppContext as _, Context, Entity, Task, WeakEntity, Window};
|
||||
use language::{Anchor, Buffer, ToPoint};
|
||||
@@ -48,7 +48,7 @@ impl SlashCommandCompletionProvider {
|
||||
name_range: Range<Anchor>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Vec<project::Completion>>> {
|
||||
) -> Task<Result<Option<Vec<project::Completion>>>> {
|
||||
let slash_commands = self.slash_commands.clone();
|
||||
let candidates = slash_commands
|
||||
.command_names(cx)
|
||||
@@ -59,7 +59,7 @@ impl SlashCommandCompletionProvider {
|
||||
let command_name = command_name.to_string();
|
||||
let editor = self.editor.clone();
|
||||
let workspace = self.workspace.clone();
|
||||
window.spawn(cx, |mut cx| async move {
|
||||
window.spawn(cx, async move |cx| {
|
||||
let matches = match_strings(
|
||||
&candidates,
|
||||
&command_name,
|
||||
@@ -71,65 +71,68 @@ impl SlashCommandCompletionProvider {
|
||||
.await;
|
||||
|
||||
cx.update(|_, cx| {
|
||||
matches
|
||||
.into_iter()
|
||||
.filter_map(|mat| {
|
||||
let command = slash_commands.command(&mat.string, cx)?;
|
||||
let mut new_text = mat.string.clone();
|
||||
let requires_argument = command.requires_argument();
|
||||
let accepts_arguments = command.accepts_arguments();
|
||||
if requires_argument || accepts_arguments {
|
||||
new_text.push(' ');
|
||||
}
|
||||
Some(
|
||||
matches
|
||||
.into_iter()
|
||||
.filter_map(|mat| {
|
||||
let command = slash_commands.command(&mat.string, cx)?;
|
||||
let mut new_text = mat.string.clone();
|
||||
let requires_argument = command.requires_argument();
|
||||
let accepts_arguments = command.accepts_arguments();
|
||||
if requires_argument || accepts_arguments {
|
||||
new_text.push(' ');
|
||||
}
|
||||
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
let command_name = mat.string.clone();
|
||||
let command_range = command_range.clone();
|
||||
let editor = editor.clone();
|
||||
let workspace = workspace.clone();
|
||||
Arc::new(
|
||||
move |intent: CompletionIntent,
|
||||
window: &mut Window,
|
||||
cx: &mut App| {
|
||||
if !requires_argument
|
||||
&& (!accepts_arguments || intent.is_complete())
|
||||
{
|
||||
editor
|
||||
.update(cx, |editor, cx| {
|
||||
editor.run_command(
|
||||
command_range.clone(),
|
||||
&command_name,
|
||||
&[],
|
||||
true,
|
||||
workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
false
|
||||
} else {
|
||||
requires_argument || accepts_arguments
|
||||
}
|
||||
},
|
||||
) as Arc<_>
|
||||
});
|
||||
Some(project::Completion {
|
||||
old_range: name_range.clone(),
|
||||
documentation: Some(CompletionDocumentation::SingleLine(
|
||||
command.description().into(),
|
||||
)),
|
||||
new_text,
|
||||
label: command.label(cx),
|
||||
confirm,
|
||||
source: CompletionSource::Custom,
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
let command_name = mat.string.clone();
|
||||
let command_range = command_range.clone();
|
||||
let editor = editor.clone();
|
||||
let workspace = workspace.clone();
|
||||
Arc::new(
|
||||
move |intent: CompletionIntent,
|
||||
window: &mut Window,
|
||||
cx: &mut App| {
|
||||
if !requires_argument
|
||||
&& (!accepts_arguments || intent.is_complete())
|
||||
{
|
||||
editor
|
||||
.update(cx, |editor, cx| {
|
||||
editor.run_command(
|
||||
command_range.clone(),
|
||||
&command_name,
|
||||
&[],
|
||||
true,
|
||||
workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
false
|
||||
} else {
|
||||
requires_argument || accepts_arguments
|
||||
}
|
||||
},
|
||||
) as Arc<_>
|
||||
});
|
||||
Some(project::Completion {
|
||||
old_range: name_range.clone(),
|
||||
documentation: Some(CompletionDocumentation::SingleLine(
|
||||
command.description().into(),
|
||||
)),
|
||||
new_text,
|
||||
label: command.label(cx),
|
||||
icon_path: None,
|
||||
confirm,
|
||||
source: CompletionSource::Custom,
|
||||
})
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -143,7 +146,7 @@ impl SlashCommandCompletionProvider {
|
||||
last_argument_range: Range<Anchor>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Vec<project::Completion>>> {
|
||||
) -> Task<Result<Option<Vec<project::Completion>>>> {
|
||||
let new_cancel_flag = Arc::new(AtomicBool::new(false));
|
||||
let mut flag = self.cancel_flag.lock();
|
||||
flag.store(true, SeqCst);
|
||||
@@ -161,27 +164,28 @@ impl SlashCommandCompletionProvider {
|
||||
let workspace = self.workspace.clone();
|
||||
let arguments = arguments.to_vec();
|
||||
cx.background_spawn(async move {
|
||||
Ok(completions
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|new_argument| {
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
Arc::new({
|
||||
let mut completed_arguments = arguments.clone();
|
||||
if new_argument.replace_previous_arguments {
|
||||
completed_arguments.clear();
|
||||
} else {
|
||||
completed_arguments.pop();
|
||||
}
|
||||
completed_arguments.push(new_argument.new_text.clone());
|
||||
Ok(Some(
|
||||
completions
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|new_argument| {
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
Arc::new({
|
||||
let mut completed_arguments = arguments.clone();
|
||||
if new_argument.replace_previous_arguments {
|
||||
completed_arguments.clear();
|
||||
} else {
|
||||
completed_arguments.pop();
|
||||
}
|
||||
completed_arguments.push(new_argument.new_text.clone());
|
||||
|
||||
let command_range = command_range.clone();
|
||||
let command_name = command_name.clone();
|
||||
move |intent: CompletionIntent,
|
||||
let command_range = command_range.clone();
|
||||
let command_name = command_name.clone();
|
||||
move |intent: CompletionIntent,
|
||||
window: &mut Window,
|
||||
cx: &mut App| {
|
||||
if new_argument.after_completion.run()
|
||||
@@ -205,31 +209,33 @@ impl SlashCommandCompletionProvider {
|
||||
!new_argument.after_completion.run()
|
||||
}
|
||||
}
|
||||
}) as Arc<_>
|
||||
});
|
||||
}) as Arc<_>
|
||||
});
|
||||
|
||||
let mut new_text = new_argument.new_text.clone();
|
||||
if new_argument.after_completion == AfterCompletion::Continue {
|
||||
new_text.push(' ');
|
||||
}
|
||||
let mut new_text = new_argument.new_text.clone();
|
||||
if new_argument.after_completion == AfterCompletion::Continue {
|
||||
new_text.push(' ');
|
||||
}
|
||||
|
||||
project::Completion {
|
||||
old_range: if new_argument.replace_previous_arguments {
|
||||
argument_range.clone()
|
||||
} else {
|
||||
last_argument_range.clone()
|
||||
},
|
||||
label: new_argument.label,
|
||||
new_text,
|
||||
documentation: None,
|
||||
confirm,
|
||||
source: CompletionSource::Custom,
|
||||
}
|
||||
})
|
||||
.collect())
|
||||
project::Completion {
|
||||
old_range: if new_argument.replace_previous_arguments {
|
||||
argument_range.clone()
|
||||
} else {
|
||||
last_argument_range.clone()
|
||||
},
|
||||
label: new_argument.label,
|
||||
icon_path: None,
|
||||
new_text,
|
||||
documentation: None,
|
||||
confirm,
|
||||
source: CompletionSource::Custom,
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
))
|
||||
})
|
||||
} else {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
Task::ready(Ok(Some(Vec::new())))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -237,12 +243,13 @@ impl SlashCommandCompletionProvider {
|
||||
impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
fn completions(
|
||||
&self,
|
||||
_excerpt_id: ExcerptId,
|
||||
buffer: &Entity<Buffer>,
|
||||
buffer_position: Anchor,
|
||||
_: editor::CompletionContext,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Task<Result<Vec<project::Completion>>> {
|
||||
) -> Task<Result<Option<Vec<project::Completion>>>> {
|
||||
let Some((name, arguments, command_range, last_argument_range)) =
|
||||
buffer.update(cx, |buffer, _cx| {
|
||||
let position = buffer_position.to_point(buffer);
|
||||
@@ -286,7 +293,7 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
Some((name, arguments, command_range, last_argument_range))
|
||||
})
|
||||
else {
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
return Task::ready(Ok(Some(Vec::new())));
|
||||
};
|
||||
|
||||
if let Some((arguments, argument_range)) = arguments {
|
||||
|
||||
@@ -100,7 +100,7 @@ impl PickerDelegate for SlashCommandDelegate {
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Task<()> {
|
||||
let all_commands = self.all_commands.clone();
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let filtered_commands = cx
|
||||
.background_spawn(async move {
|
||||
if query.is_empty() {
|
||||
@@ -119,7 +119,7 @@ impl PickerDelegate for SlashCommandDelegate {
|
||||
})
|
||||
.await;
|
||||
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.delegate.filtered_commands = filtered_commands;
|
||||
this.delegate.set_selected_index(0, window, cx);
|
||||
cx.notify();
|
||||
|
||||
44
crates/assistant_eval/Cargo.toml
Normal file
@@ -0,0 +1,44 @@
|
||||
[package]
|
||||
name = "assistant_eval"
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[[bin]]
|
||||
name = "assistant_eval"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
assistant2.workspace = true
|
||||
assistant_tool.workspace = true
|
||||
assistant_tools.workspace = true
|
||||
clap.workspace = true
|
||||
client.workspace = true
|
||||
collections.workspace = true
|
||||
context_server.workspace = true
|
||||
env_logger.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
gpui_tokio.workspace = true
|
||||
itertools.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
language_models.workspace = true
|
||||
node_runtime.workspace = true
|
||||
project.workspace = true
|
||||
prompt_store.workspace = true
|
||||
regex.workspace = true
|
||||
release_channel.workspace = true
|
||||
reqwest_client.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde_json_lenient.workspace = true
|
||||
settings.workspace = true
|
||||
smol.workspace = true
|
||||
util.workspace = true
|
||||
77
crates/assistant_eval/README.md
Normal file
@@ -0,0 +1,77 @@
|
||||
# Tool Evals
|
||||
|
||||
A framework for evaluating and benchmarking AI assistant performance in the Zed editor.
|
||||
|
||||
## Overview
|
||||
|
||||
Tool Evals provides a headless environment for running assistants evaluations on code repositories. It automates the process of:
|
||||
|
||||
1. Cloning and setting up test repositories
|
||||
2. Sending prompts to language models
|
||||
3. Allowing the assistant to use tools to modify code
|
||||
4. Collecting metrics on performance
|
||||
5. Evaluating results against known good solutions
|
||||
|
||||
## How It Works
|
||||
|
||||
The system consists of several key components:
|
||||
|
||||
- **Eval**: Loads test cases from the evaluation_data directory, clones repos, and executes evaluations
|
||||
- **HeadlessAssistant**: Provides a headless environment for running the AI assistant
|
||||
- **Judge**: Compares AI-generated diffs with reference solutions and scores their functional similarity
|
||||
|
||||
The evaluation flow:
|
||||
1. An evaluation is loaded from the evaluation_data directory
|
||||
2. The target repository is cloned and checked out at a specific commit
|
||||
3. A HeadlessAssistant instance is created with the specified language model
|
||||
4. The user prompt is sent to the assistant
|
||||
5. The assistant responds and uses tools to modify code
|
||||
6. Upon completion, a diff is generated from the changes
|
||||
7. Results are saved including the diff, assistant's response, and performance metrics
|
||||
8. If a reference solution exists, a Judge evaluates the similarity of the solution
|
||||
|
||||
## Setup Requirements
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Rust and Cargo
|
||||
- Git
|
||||
- Network access to clone repositories
|
||||
- Appropriate API keys for language models and git services (Anthropic, GitHub, etc.)
|
||||
|
||||
### Environment Variables
|
||||
|
||||
Ensure you have the required API keys set, either from a dev run of Zed or via these environment variables:
|
||||
- `ZED_ANTHROPIC_API_KEY` for Claude models
|
||||
- `ZED_OPENAI_API_KEY` for OpenAI models
|
||||
- `ZED_GITHUB_API_KEY` for GitHub API (or similar)
|
||||
|
||||
## Usage
|
||||
|
||||
### Running a Single Evaluation
|
||||
|
||||
To run a specific evaluation:
|
||||
|
||||
```bash
|
||||
cargo run -p assistant_eval -- bubbletea-add-set-window-title
|
||||
```
|
||||
|
||||
The arguments are regex patterns for the evaluation names to run, so to run all evaluations that contain `bubbletea`, run:
|
||||
|
||||
```bash
|
||||
cargo run -p assistant_eval -- bubbletea
|
||||
```
|
||||
|
||||
To run all evaluations:
|
||||
|
||||
```bash
|
||||
cargo run -p assistant_eval -- --all
|
||||
```
|
||||
|
||||
## Evaluation Data Structure
|
||||
|
||||
Each evaluation should be placed in the `evaluation_data` directory with the following structure:
|
||||
|
||||
* `prompt.txt`: The user's prompt.
|
||||
* `original.diff`: The `git diff` of the change anticipated for this prompt.
|
||||
* `setup.json`: Information about the repo used for the evaluation.
|
||||
61
crates/assistant_eval/build.rs
Normal file
@@ -0,0 +1,61 @@
|
||||
// Copied from `crates/zed/build.rs`, with removal of code for including the zed icon on windows.
|
||||
|
||||
use std::process::Command;
|
||||
|
||||
fn main() {
|
||||
if cfg!(target_os = "macos") {
|
||||
println!("cargo:rustc-env=MACOSX_DEPLOYMENT_TARGET=10.15.7");
|
||||
|
||||
println!("cargo:rerun-if-env-changed=ZED_BUNDLE");
|
||||
if std::env::var("ZED_BUNDLE").ok().as_deref() == Some("true") {
|
||||
// Find WebRTC.framework in the Frameworks folder when running as part of an application bundle.
|
||||
println!("cargo:rustc-link-arg=-Wl,-rpath,@executable_path/../Frameworks");
|
||||
} else {
|
||||
// Find WebRTC.framework as a sibling of the executable when running outside of an application bundle.
|
||||
println!("cargo:rustc-link-arg=-Wl,-rpath,@executable_path");
|
||||
}
|
||||
|
||||
// Weakly link ReplayKit to ensure Zed can be used on macOS 10.15+.
|
||||
println!("cargo:rustc-link-arg=-Wl,-weak_framework,ReplayKit");
|
||||
|
||||
// Seems to be required to enable Swift concurrency
|
||||
println!("cargo:rustc-link-arg=-Wl,-rpath,/usr/lib/swift");
|
||||
|
||||
// Register exported Objective-C selectors, protocols, etc
|
||||
println!("cargo:rustc-link-arg=-Wl,-ObjC");
|
||||
}
|
||||
|
||||
// Populate git sha environment variable if git is available
|
||||
println!("cargo:rerun-if-changed=../../.git/logs/HEAD");
|
||||
println!(
|
||||
"cargo:rustc-env=TARGET={}",
|
||||
std::env::var("TARGET").unwrap()
|
||||
);
|
||||
if let Ok(output) = Command::new("git").args(["rev-parse", "HEAD"]).output() {
|
||||
if output.status.success() {
|
||||
let git_sha = String::from_utf8_lossy(&output.stdout);
|
||||
let git_sha = git_sha.trim();
|
||||
|
||||
println!("cargo:rustc-env=ZED_COMMIT_SHA={git_sha}");
|
||||
|
||||
if let Ok(build_profile) = std::env::var("PROFILE") {
|
||||
if build_profile == "release" {
|
||||
// This is currently the best way to make `cargo build ...`'s build script
|
||||
// to print something to stdout without extra verbosity.
|
||||
println!(
|
||||
"cargo:warning=Info: using '{git_sha}' hash for ZED_COMMIT_SHA env var"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
#[cfg(target_env = "msvc")]
|
||||
{
|
||||
// todo(windows): This is to avoid stack overflow. Remove it when solved.
|
||||
println!("cargo:rustc-link-arg=/stack:{}", 8 * 1024 * 1024);
|
||||
}
|
||||
}
|
||||
}
|
||||
267
crates/assistant_eval/src/eval.rs
Normal file
@@ -0,0 +1,267 @@
|
||||
use crate::headless_assistant::{HeadlessAppState, HeadlessAssistant};
|
||||
use anyhow::anyhow;
|
||||
use assistant2::RequestKind;
|
||||
use collections::HashMap;
|
||||
use gpui::{App, Task};
|
||||
use language_model::{LanguageModel, TokenUsage};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
fs,
|
||||
io::Write,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
};
|
||||
use util::command::new_smol_command;
|
||||
|
||||
pub struct Eval {
|
||||
pub name: String,
|
||||
pub path: PathBuf,
|
||||
pub repo_path: PathBuf,
|
||||
pub eval_setup: EvalSetup,
|
||||
pub user_prompt: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct EvalOutput {
|
||||
pub diff: String,
|
||||
pub last_message: String,
|
||||
pub elapsed_time: Duration,
|
||||
pub assistant_response_count: usize,
|
||||
pub tool_use_counts: HashMap<Arc<str>, u32>,
|
||||
pub token_usage: TokenUsage,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct EvalSetup {
|
||||
pub url: String,
|
||||
pub base_sha: String,
|
||||
}
|
||||
|
||||
impl Eval {
|
||||
/// Loads the eval from a path (typically in `evaluation_data`). Clones and checks out the repo
|
||||
/// if necessary.
|
||||
pub async fn load(name: String, path: PathBuf, repos_dir: &Path) -> anyhow::Result<Self> {
|
||||
let prompt_path = path.join("prompt.txt");
|
||||
let user_prompt = smol::unblock(|| std::fs::read_to_string(prompt_path)).await?;
|
||||
let setup_path = path.join("setup.json");
|
||||
let setup_contents = smol::unblock(|| std::fs::read_to_string(setup_path)).await?;
|
||||
let eval_setup = serde_json_lenient::from_str_lenient::<EvalSetup>(&setup_contents)?;
|
||||
let repo_path = repos_dir.join(repo_dir_name(&eval_setup.url));
|
||||
Ok(Eval {
|
||||
name,
|
||||
path,
|
||||
repo_path,
|
||||
eval_setup,
|
||||
user_prompt,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn run(
|
||||
self,
|
||||
app_state: Arc<HeadlessAppState>,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
cx: &mut App,
|
||||
) -> Task<anyhow::Result<EvalOutput>> {
|
||||
cx.spawn(async move |cx| {
|
||||
checkout_repo(&self.eval_setup, &self.repo_path).await?;
|
||||
|
||||
let (assistant, done_rx) =
|
||||
cx.update(|cx| HeadlessAssistant::new(app_state.clone(), cx))??;
|
||||
|
||||
let _worktree = assistant
|
||||
.update(cx, |assistant, cx| {
|
||||
assistant.project.update(cx, |project, cx| {
|
||||
project.create_worktree(&self.repo_path, true, cx)
|
||||
})
|
||||
})?
|
||||
.await?;
|
||||
|
||||
let start_time = std::time::SystemTime::now();
|
||||
|
||||
let (system_prompt_context, load_error) = cx
|
||||
.update(|cx| {
|
||||
assistant
|
||||
.read(cx)
|
||||
.thread
|
||||
.read(cx)
|
||||
.load_system_prompt_context(cx)
|
||||
})?
|
||||
.await;
|
||||
|
||||
if let Some(load_error) = load_error {
|
||||
return Err(anyhow!("{:?}", load_error));
|
||||
};
|
||||
|
||||
assistant.update(cx, |assistant, cx| {
|
||||
assistant.thread.update(cx, |thread, cx| {
|
||||
let context = vec![];
|
||||
thread.insert_user_message(self.user_prompt.clone(), context, None, cx);
|
||||
thread.set_system_prompt_context(system_prompt_context);
|
||||
thread.send_to_model(model, RequestKind::Chat, cx);
|
||||
});
|
||||
})?;
|
||||
|
||||
done_rx.recv().await??;
|
||||
|
||||
let elapsed_time = start_time.elapsed()?;
|
||||
|
||||
let diff = query_git(&self.repo_path, vec!["diff"]).await?;
|
||||
|
||||
assistant.update(cx, |assistant, cx| {
|
||||
let thread = assistant.thread.read(cx);
|
||||
let last_message = thread.messages().last().unwrap();
|
||||
if last_message.role != language_model::Role::Assistant {
|
||||
return Err(anyhow!("Last message is not from assistant"));
|
||||
}
|
||||
let assistant_response_count = thread
|
||||
.messages()
|
||||
.filter(|message| message.role == language_model::Role::Assistant)
|
||||
.count();
|
||||
Ok(EvalOutput {
|
||||
diff,
|
||||
last_message: last_message.to_string(),
|
||||
elapsed_time,
|
||||
assistant_response_count,
|
||||
tool_use_counts: assistant.tool_use_counts.clone(),
|
||||
token_usage: thread.cumulative_token_usage(),
|
||||
})
|
||||
})?
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl EvalOutput {
|
||||
// Method to save the output to a directory
|
||||
pub fn save_to_directory(
|
||||
&self,
|
||||
output_dir: &Path,
|
||||
eval_output_value: String,
|
||||
) -> anyhow::Result<()> {
|
||||
// Create the output directory if it doesn't exist
|
||||
fs::create_dir_all(&output_dir)?;
|
||||
|
||||
// Save the diff to a file
|
||||
let diff_path = output_dir.join("diff.patch");
|
||||
let mut diff_file = fs::File::create(&diff_path)?;
|
||||
diff_file.write_all(self.diff.as_bytes())?;
|
||||
|
||||
// Save the last message to a file
|
||||
let message_path = output_dir.join("assistant_response.txt");
|
||||
let mut message_file = fs::File::create(&message_path)?;
|
||||
message_file.write_all(self.last_message.as_bytes())?;
|
||||
|
||||
// Current metrics for this run
|
||||
let current_metrics = serde_json::json!({
|
||||
"elapsed_time_ms": self.elapsed_time.as_millis(),
|
||||
"assistant_response_count": self.assistant_response_count,
|
||||
"tool_use_counts": self.tool_use_counts,
|
||||
"token_usage": self.token_usage,
|
||||
"eval_output_value": eval_output_value,
|
||||
});
|
||||
|
||||
// Get current timestamp in milliseconds
|
||||
let timestamp = std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)?
|
||||
.as_millis()
|
||||
.to_string();
|
||||
|
||||
// Path to metrics file
|
||||
let metrics_path = output_dir.join("metrics.json");
|
||||
|
||||
// Load existing metrics if the file exists, or create a new object
|
||||
let mut historical_metrics = if metrics_path.exists() {
|
||||
let metrics_content = fs::read_to_string(&metrics_path)?;
|
||||
serde_json::from_str::<serde_json::Value>(&metrics_content)
|
||||
.unwrap_or_else(|_| serde_json::json!({}))
|
||||
} else {
|
||||
serde_json::json!({})
|
||||
};
|
||||
|
||||
// Add new run with timestamp as key
|
||||
if let serde_json::Value::Object(ref mut map) = historical_metrics {
|
||||
map.insert(timestamp, current_metrics);
|
||||
}
|
||||
|
||||
// Write updated metrics back to file
|
||||
let metrics_json = serde_json::to_string_pretty(&historical_metrics)?;
|
||||
let mut metrics_file = fs::File::create(&metrics_path)?;
|
||||
metrics_file.write_all(metrics_json.as_bytes())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn repo_dir_name(url: &str) -> String {
|
||||
url.trim_start_matches("https://")
|
||||
.replace(|c: char| !c.is_alphanumeric(), "_")
|
||||
}
|
||||
|
||||
async fn checkout_repo(eval_setup: &EvalSetup, repo_path: &Path) -> anyhow::Result<()> {
|
||||
if !repo_path.exists() {
|
||||
smol::unblock({
|
||||
let repo_path = repo_path.to_path_buf();
|
||||
|| std::fs::create_dir_all(repo_path)
|
||||
})
|
||||
.await?;
|
||||
run_git(repo_path, vec!["init"]).await?;
|
||||
run_git(repo_path, vec!["remote", "add", "origin", &eval_setup.url]).await?;
|
||||
} else {
|
||||
let actual_origin = query_git(repo_path, vec!["remote", "get-url", "origin"]).await?;
|
||||
if actual_origin != eval_setup.url {
|
||||
return Err(anyhow!(
|
||||
"remote origin {} does not match expected origin {}",
|
||||
actual_origin,
|
||||
eval_setup.url
|
||||
));
|
||||
}
|
||||
|
||||
// TODO: consider including "-x" to remove ignored files. The downside of this is that it will
|
||||
// also remove build artifacts, and so prevent incremental reuse there.
|
||||
run_git(repo_path, vec!["clean", "--force", "-d"]).await?;
|
||||
run_git(repo_path, vec!["reset", "--hard", "HEAD"]).await?;
|
||||
}
|
||||
|
||||
run_git(
|
||||
repo_path,
|
||||
vec!["fetch", "--depth", "1", "origin", &eval_setup.base_sha],
|
||||
)
|
||||
.await?;
|
||||
run_git(repo_path, vec!["checkout", &eval_setup.base_sha]).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn run_git(repo_path: &Path, args: Vec<&str>) -> anyhow::Result<()> {
|
||||
let exit_status = new_smol_command("git")
|
||||
.current_dir(repo_path)
|
||||
.args(args.clone())
|
||||
.status()
|
||||
.await?;
|
||||
if exit_status.success() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"`git {}` failed with {}",
|
||||
args.join(" "),
|
||||
exit_status,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
async fn query_git(repo_path: &Path, args: Vec<&str>) -> anyhow::Result<String> {
|
||||
let output = new_smol_command("git")
|
||||
.current_dir(repo_path)
|
||||
.args(args.clone())
|
||||
.output()
|
||||
.await?;
|
||||
if output.status.success() {
|
||||
Ok(String::from_utf8(output.stdout)?.trim().to_string())
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"`git {}` failed with {}",
|
||||
args.join(" "),
|
||||
output.status
|
||||
))
|
||||
}
|
||||
}
|
||||
236
crates/assistant_eval/src/headless_assistant.rs
Normal file
@@ -0,0 +1,236 @@
|
||||
use anyhow::anyhow;
|
||||
use assistant2::{RequestKind, Thread, ThreadEvent, ThreadStore};
|
||||
use assistant_tool::ToolWorkingSet;
|
||||
use client::{Client, UserStore};
|
||||
use collections::HashMap;
|
||||
use futures::StreamExt;
|
||||
use gpui::{prelude::*, App, AsyncApp, Entity, SemanticVersion, Subscription, Task};
|
||||
use language::LanguageRegistry;
|
||||
use language_model::{
|
||||
AuthenticateError, LanguageModel, LanguageModelProviderId, LanguageModelRegistry,
|
||||
LanguageModelRequest,
|
||||
};
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::{Project, RealFs};
|
||||
use prompt_store::PromptBuilder;
|
||||
use settings::SettingsStore;
|
||||
use smol::channel;
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Subset of `workspace::AppState` needed by `HeadlessAssistant`, with additional fields.
|
||||
pub struct HeadlessAppState {
|
||||
pub languages: Arc<LanguageRegistry>,
|
||||
pub client: Arc<Client>,
|
||||
pub user_store: Entity<UserStore>,
|
||||
pub fs: Arc<dyn fs::Fs>,
|
||||
pub node_runtime: NodeRuntime,
|
||||
|
||||
// Additional fields not present in `workspace::AppState`.
|
||||
pub prompt_builder: Arc<PromptBuilder>,
|
||||
}
|
||||
|
||||
pub struct HeadlessAssistant {
|
||||
pub thread: Entity<Thread>,
|
||||
pub project: Entity<Project>,
|
||||
#[allow(dead_code)]
|
||||
pub thread_store: Entity<ThreadStore>,
|
||||
pub tool_use_counts: HashMap<Arc<str>, u32>,
|
||||
pub done_tx: channel::Sender<anyhow::Result<()>>,
|
||||
_subscription: Subscription,
|
||||
}
|
||||
|
||||
impl HeadlessAssistant {
|
||||
pub fn new(
|
||||
app_state: Arc<HeadlessAppState>,
|
||||
cx: &mut App,
|
||||
) -> anyhow::Result<(Entity<Self>, channel::Receiver<anyhow::Result<()>>)> {
|
||||
let env = None;
|
||||
let project = Project::local(
|
||||
app_state.client.clone(),
|
||||
app_state.node_runtime.clone(),
|
||||
app_state.user_store.clone(),
|
||||
app_state.languages.clone(),
|
||||
app_state.fs.clone(),
|
||||
env,
|
||||
cx,
|
||||
);
|
||||
|
||||
let tools = Arc::new(ToolWorkingSet::default());
|
||||
let thread_store =
|
||||
ThreadStore::new(project.clone(), tools, app_state.prompt_builder.clone(), cx)?;
|
||||
|
||||
let thread = thread_store.update(cx, |thread_store, cx| thread_store.create_thread(cx));
|
||||
|
||||
let (done_tx, done_rx) = channel::unbounded::<anyhow::Result<()>>();
|
||||
|
||||
let headless_thread = cx.new(move |cx| Self {
|
||||
_subscription: cx.subscribe(&thread, Self::handle_thread_event),
|
||||
thread,
|
||||
project,
|
||||
thread_store,
|
||||
tool_use_counts: HashMap::default(),
|
||||
done_tx,
|
||||
});
|
||||
|
||||
Ok((headless_thread, done_rx))
|
||||
}
|
||||
|
||||
fn handle_thread_event(
|
||||
&mut self,
|
||||
thread: Entity<Thread>,
|
||||
event: &ThreadEvent,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
match event {
|
||||
ThreadEvent::ShowError(err) => self
|
||||
.done_tx
|
||||
.send_blocking(Err(anyhow!("{:?}", err)))
|
||||
.unwrap(),
|
||||
ThreadEvent::DoneStreaming => {
|
||||
let thread = thread.read(cx);
|
||||
if let Some(message) = thread.messages().last() {
|
||||
println!("Message: {}", message.to_string());
|
||||
}
|
||||
if thread.all_tools_finished() {
|
||||
self.done_tx.send_blocking(Ok(())).unwrap()
|
||||
}
|
||||
}
|
||||
ThreadEvent::UsePendingTools => {
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.use_pending_tools(cx);
|
||||
});
|
||||
}
|
||||
ThreadEvent::ToolFinished {
|
||||
tool_use_id,
|
||||
pending_tool_use,
|
||||
..
|
||||
} => {
|
||||
if let Some(pending_tool_use) = pending_tool_use {
|
||||
println!(
|
||||
"Used tool {} with input: {}",
|
||||
pending_tool_use.name, pending_tool_use.input
|
||||
);
|
||||
*self
|
||||
.tool_use_counts
|
||||
.entry(pending_tool_use.name.clone())
|
||||
.or_insert(0) += 1;
|
||||
}
|
||||
if let Some(tool_result) = thread.read(cx).tool_result(tool_use_id) {
|
||||
println!("Tool result: {:?}", tool_result);
|
||||
}
|
||||
if thread.read(cx).all_tools_finished() {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
if let Some(model) = model_registry.active_model() {
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.attach_tool_results(vec![], cx);
|
||||
thread.send_to_model(model, RequestKind::Chat, cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(cx: &mut App) -> Arc<HeadlessAppState> {
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
gpui_tokio::init(cx);
|
||||
|
||||
let mut settings_store = SettingsStore::new(cx);
|
||||
settings_store
|
||||
.set_default_settings(settings::default_settings().as_ref(), cx)
|
||||
.unwrap();
|
||||
cx.set_global(settings_store);
|
||||
client::init_settings(cx);
|
||||
Project::init_settings(cx);
|
||||
|
||||
let client = Client::production(cx);
|
||||
cx.set_http_client(client.http_client().clone());
|
||||
|
||||
let git_binary_path = None;
|
||||
let fs = Arc::new(RealFs::new(git_binary_path));
|
||||
|
||||
let languages = Arc::new(LanguageRegistry::new(cx.background_executor().clone()));
|
||||
|
||||
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
||||
|
||||
language::init(cx);
|
||||
language_model::init(client.clone(), cx);
|
||||
language_models::init(user_store.clone(), client.clone(), fs.clone(), cx);
|
||||
assistant_tools::init(client.http_client().clone(), cx);
|
||||
context_server::init(cx);
|
||||
let stdout_is_a_pty = false;
|
||||
let prompt_builder = PromptBuilder::load(fs.clone(), stdout_is_a_pty, cx);
|
||||
assistant2::init(fs.clone(), client.clone(), prompt_builder.clone(), cx);
|
||||
|
||||
Arc::new(HeadlessAppState {
|
||||
languages,
|
||||
client,
|
||||
user_store,
|
||||
fs,
|
||||
node_runtime: NodeRuntime::unavailable(),
|
||||
prompt_builder,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn find_model(model_name: &str, cx: &App) -> anyhow::Result<Arc<dyn LanguageModel>> {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let model = model_registry
|
||||
.available_models(cx)
|
||||
.find(|model| model.id().0 == model_name);
|
||||
|
||||
let Some(model) = model else {
|
||||
return Err(anyhow!(
|
||||
"No language model named {} was available. Available models: {}",
|
||||
model_name,
|
||||
model_registry
|
||||
.available_models(cx)
|
||||
.map(|model| model.id().0.clone())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
));
|
||||
};
|
||||
|
||||
Ok(model)
|
||||
}
|
||||
|
||||
pub fn authenticate_model_provider(
|
||||
provider_id: LanguageModelProviderId,
|
||||
cx: &mut App,
|
||||
) -> Task<std::result::Result<(), AuthenticateError>> {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let model_provider = model_registry.provider(&provider_id).unwrap();
|
||||
model_provider.authenticate(cx)
|
||||
}
|
||||
|
||||
pub async fn send_language_model_request(
|
||||
model: Arc<dyn LanguageModel>,
|
||||
request: LanguageModelRequest,
|
||||
cx: &mut AsyncApp,
|
||||
) -> anyhow::Result<String> {
|
||||
match model.stream_completion_text(request, &cx).await {
|
||||
Ok(mut stream) => {
|
||||
let mut full_response = String::new();
|
||||
|
||||
// Process the response stream
|
||||
while let Some(chunk_result) = stream.stream.next().await {
|
||||
match chunk_result {
|
||||
Ok(chunk_str) => {
|
||||
full_response.push_str(&chunk_str);
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(anyhow!(
|
||||
"Error receiving response from language model: {err}"
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(full_response)
|
||||
}
|
||||
Err(err) => Err(anyhow!(
|
||||
"Failed to get response from language model. Error was: {err}"
|
||||
)),
|
||||
}
|
||||
}
|
||||
121
crates/assistant_eval/src/judge.rs
Normal file
@@ -0,0 +1,121 @@
|
||||
use crate::eval::EvalOutput;
|
||||
use crate::headless_assistant::send_language_model_request;
|
||||
use anyhow::anyhow;
|
||||
use gpui::{App, Task};
|
||||
use language_model::{
|
||||
LanguageModel, LanguageModelRequest, LanguageModelRequestMessage, MessageContent, Role,
|
||||
};
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
pub struct Judge {
|
||||
pub original_diff: Option<String>,
|
||||
#[allow(dead_code)]
|
||||
pub original_message: Option<String>,
|
||||
pub model: Arc<dyn LanguageModel>,
|
||||
}
|
||||
|
||||
impl Judge {
|
||||
pub async fn load(eval_path: &Path, model: Arc<dyn LanguageModel>) -> anyhow::Result<Judge> {
|
||||
let original_diff_path = eval_path.join("original.diff");
|
||||
let original_diff = smol::unblock(move || {
|
||||
if std::fs::exists(&original_diff_path)? {
|
||||
anyhow::Ok(Some(std::fs::read_to_string(&original_diff_path)?))
|
||||
} else {
|
||||
anyhow::Ok(None)
|
||||
}
|
||||
});
|
||||
|
||||
let original_message_path = eval_path.join("original_message.txt");
|
||||
let original_message = smol::unblock(move || {
|
||||
if std::fs::exists(&original_message_path)? {
|
||||
anyhow::Ok(Some(std::fs::read_to_string(&original_message_path)?))
|
||||
} else {
|
||||
anyhow::Ok(None)
|
||||
}
|
||||
});
|
||||
|
||||
Ok(Self {
|
||||
original_diff: original_diff.await?,
|
||||
original_message: original_message.await?,
|
||||
model,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn run(&self, eval_output: &EvalOutput, cx: &mut App) -> Task<anyhow::Result<String>> {
|
||||
let Some(original_diff) = self.original_diff.as_ref() else {
|
||||
return Task::ready(Err(anyhow!("No original.diff found")));
|
||||
};
|
||||
|
||||
// TODO: check for empty diff?
|
||||
let prompt = diff_comparison_prompt(&original_diff, &eval_output.diff);
|
||||
|
||||
let request = LanguageModelRequest {
|
||||
messages: vec![LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![MessageContent::Text(prompt)],
|
||||
cache: false,
|
||||
}],
|
||||
temperature: Some(0.0),
|
||||
tools: Vec::new(),
|
||||
stop: Vec::new(),
|
||||
};
|
||||
|
||||
let model = self.model.clone();
|
||||
cx.spawn(async move |cx| send_language_model_request(model, request, cx).await)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn diff_comparison_prompt(original_diff: &str, new_diff: &str) -> String {
|
||||
format!(
|
||||
r#"# Git Diff Similarity Evaluation Template
|
||||
|
||||
## Instructions
|
||||
|
||||
Compare the two diffs and score them between 0.0 and 1.0 based on their functional similarity.
|
||||
- 1.0 = Perfect functional match (achieves identical results)
|
||||
- 0.0 = No functional similarity whatsoever
|
||||
|
||||
## Evaluation Criteria
|
||||
|
||||
Please consider the following aspects in order of importance:
|
||||
|
||||
1. **Functional Equivalence (60%)**
|
||||
- Do both diffs achieve the same end result?
|
||||
- Are the changes functionally equivalent despite possibly using different approaches?
|
||||
- Do the modifications address the same issues or implement the same features?
|
||||
|
||||
2. **Logical Structure (20%)**
|
||||
- Are the logical flows similar?
|
||||
- Do the modifications affect the same code paths?
|
||||
- Are control structures (if/else, loops, etc.) modified in similar ways?
|
||||
|
||||
3. **Code Content (15%)**
|
||||
- Are similar lines added/removed?
|
||||
- Are the same variables, functions, or methods being modified?
|
||||
- Are the same APIs or libraries being used?
|
||||
|
||||
4. **File Layout (5%)**
|
||||
- Are the same files being modified?
|
||||
- Are changes occurring in similar locations within files?
|
||||
|
||||
## Input
|
||||
|
||||
Original Diff:
|
||||
```git
|
||||
{}
|
||||
```
|
||||
|
||||
New Diff:
|
||||
```git
|
||||
{}
|
||||
```
|
||||
|
||||
## Output Format
|
||||
|
||||
THE ONLY OUTPUT SHOULD BE A SCORE BETWEEN 0.0 AND 1.0.
|
||||
|
||||
Example output:
|
||||
0.85"#,
|
||||
original_diff, new_diff
|
||||
)
|
||||
}
|
||||
243
crates/assistant_eval/src/main.rs
Normal file
@@ -0,0 +1,243 @@
|
||||
mod eval;
|
||||
mod headless_assistant;
|
||||
mod judge;
|
||||
|
||||
use clap::Parser;
|
||||
use eval::{Eval, EvalOutput};
|
||||
use futures::future;
|
||||
use gpui::{Application, AsyncApp};
|
||||
use headless_assistant::{authenticate_model_provider, find_model, HeadlessAppState};
|
||||
use itertools::Itertools;
|
||||
use judge::Judge;
|
||||
use language_model::{LanguageModel, LanguageModelRegistry};
|
||||
use regex::Regex;
|
||||
use reqwest_client::ReqwestClient;
|
||||
use std::{cmp, path::PathBuf, sync::Arc};
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(
|
||||
name = "assistant_eval",
|
||||
disable_version_flag = true,
|
||||
before_help = "Tool eval runner"
|
||||
)]
|
||||
struct Args {
|
||||
/// Regexes to match the names of evals to run.
|
||||
eval_name_regexes: Vec<String>,
|
||||
/// Runs all evals in `evaluation_data`, causes the regex to be ignored.
|
||||
#[arg(long)]
|
||||
all: bool,
|
||||
/// Name of the model (default: "claude-3-7-sonnet-latest")
|
||||
#[arg(long, default_value = "claude-3-7-sonnet-latest")]
|
||||
model_name: String,
|
||||
/// Name of the editor model (default: value of `--model_name`).
|
||||
#[arg(long)]
|
||||
editor_model_name: Option<String>,
|
||||
/// Name of the judge model (default: value of `--model_name`).
|
||||
#[arg(long)]
|
||||
judge_model_name: Option<String>,
|
||||
/// Number of evaluations to run concurrently (default: 10)
|
||||
#[arg(short, long, default_value = "10")]
|
||||
concurrency: usize,
|
||||
}
|
||||
|
||||
fn main() {
|
||||
env_logger::init();
|
||||
let args = Args::parse();
|
||||
let http_client = Arc::new(ReqwestClient::new());
|
||||
let app = Application::headless().with_http_client(http_client.clone());
|
||||
|
||||
let crate_dir = PathBuf::from("../zed-agent-bench");
|
||||
let evaluation_data_dir = crate_dir.join("evaluation_data").canonicalize().unwrap();
|
||||
|
||||
let repos_dir = crate_dir.join("repos");
|
||||
if !repos_dir.exists() {
|
||||
std::fs::create_dir_all(&repos_dir).unwrap();
|
||||
}
|
||||
let repos_dir = repos_dir.canonicalize().unwrap();
|
||||
|
||||
let all_evals = std::fs::read_dir(&evaluation_data_dir)
|
||||
.unwrap()
|
||||
.map(|path| path.unwrap().file_name().to_string_lossy().to_string())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let evals_to_run = if args.all {
|
||||
all_evals
|
||||
} else {
|
||||
args.eval_name_regexes
|
||||
.into_iter()
|
||||
.map(|regex_string| Regex::new(®ex_string).unwrap())
|
||||
.flat_map(|regex| {
|
||||
all_evals
|
||||
.iter()
|
||||
.filter(|eval_name| regex.is_match(eval_name))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
if evals_to_run.is_empty() {
|
||||
panic!("Names of evals to run must be provided or `--all` specified");
|
||||
}
|
||||
|
||||
println!("Will run the following evals: {evals_to_run:?}");
|
||||
println!("Running up to {} evals concurrently", args.concurrency);
|
||||
|
||||
let editor_model_name = if let Some(model_name) = args.editor_model_name {
|
||||
model_name
|
||||
} else {
|
||||
args.model_name.clone()
|
||||
};
|
||||
|
||||
let judge_model_name = if let Some(model_name) = args.judge_model_name {
|
||||
model_name
|
||||
} else {
|
||||
args.model_name.clone()
|
||||
};
|
||||
|
||||
app.run(move |cx| {
|
||||
let app_state = headless_assistant::init(cx);
|
||||
|
||||
let model = find_model(&args.model_name, cx).unwrap();
|
||||
let editor_model = find_model(&editor_model_name, cx).unwrap();
|
||||
let judge_model = find_model(&judge_model_name, cx).unwrap();
|
||||
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry.set_active_model(Some(model.clone()), cx);
|
||||
registry.set_editor_model(Some(editor_model.clone()), cx);
|
||||
});
|
||||
|
||||
let model_provider_id = model.provider_id();
|
||||
let editor_model_provider_id = editor_model.provider_id();
|
||||
let judge_model_provider_id = judge_model.provider_id();
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
// Authenticate all model providers first
|
||||
cx.update(|cx| authenticate_model_provider(model_provider_id.clone(), cx))
|
||||
.unwrap()
|
||||
.await
|
||||
.unwrap();
|
||||
cx.update(|cx| authenticate_model_provider(editor_model_provider_id.clone(), cx))
|
||||
.unwrap()
|
||||
.await
|
||||
.unwrap();
|
||||
cx.update(|cx| authenticate_model_provider(judge_model_provider_id.clone(), cx))
|
||||
.unwrap()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let eval_load_futures = evals_to_run
|
||||
.into_iter()
|
||||
.map(|eval_name| {
|
||||
let eval_path = evaluation_data_dir.join(&eval_name);
|
||||
let load_future = Eval::load(eval_name.clone(), eval_path, &repos_dir);
|
||||
async move {
|
||||
match load_future.await {
|
||||
Ok(eval) => Some(eval),
|
||||
Err(err) => {
|
||||
// TODO: Persist errors / surface errors at the end.
|
||||
println!("Error loading {eval_name}: {err}");
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let loaded_evals = future::join_all(eval_load_futures)
|
||||
.await
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// The evals need to be loaded and grouped by URL before concurrently running, since
|
||||
// evals that use the same remote URL will use the same working directory.
|
||||
let mut evals_grouped_by_url: Vec<Vec<Eval>> = loaded_evals
|
||||
.into_iter()
|
||||
.map(|eval| (eval.eval_setup.url.clone(), eval))
|
||||
.into_group_map()
|
||||
.into_values()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Sort groups in descending order, so that bigger groups start first.
|
||||
evals_grouped_by_url.sort_by_key(|evals| cmp::Reverse(evals.len()));
|
||||
|
||||
let result_futures = evals_grouped_by_url
|
||||
.into_iter()
|
||||
.map(|evals| {
|
||||
let model = model.clone();
|
||||
let judge_model = judge_model.clone();
|
||||
let app_state = app_state.clone();
|
||||
let cx = cx.clone();
|
||||
|
||||
async move {
|
||||
let mut results = Vec::new();
|
||||
for eval in evals {
|
||||
let name = eval.name.clone();
|
||||
println!("Starting eval named {}", name);
|
||||
let result = run_eval(
|
||||
eval,
|
||||
model.clone(),
|
||||
judge_model.clone(),
|
||||
app_state.clone(),
|
||||
cx.clone(),
|
||||
)
|
||||
.await;
|
||||
results.push((name, result));
|
||||
}
|
||||
results
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let results = future::join_all(result_futures)
|
||||
.await
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Process results in order of completion
|
||||
for (eval_name, result) in results {
|
||||
match result {
|
||||
Ok((eval_output, judge_output)) => {
|
||||
println!("Generated diff for {eval_name}:\n");
|
||||
println!("{}\n", eval_output.diff);
|
||||
println!("Last message for {eval_name}:\n");
|
||||
println!("{}\n", eval_output.last_message);
|
||||
println!("Elapsed time: {:?}", eval_output.elapsed_time);
|
||||
println!(
|
||||
"Assistant response count: {}",
|
||||
eval_output.assistant_response_count
|
||||
);
|
||||
println!("Tool use counts: {:?}", eval_output.tool_use_counts);
|
||||
println!("Judge output for {eval_name}: {judge_output}");
|
||||
}
|
||||
Err(err) => {
|
||||
// TODO: Persist errors / surface errors at the end.
|
||||
println!("Error running {eval_name}: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cx.update(|cx| cx.quit()).unwrap();
|
||||
})
|
||||
.detach();
|
||||
});
|
||||
|
||||
println!("Done running evals");
|
||||
}
|
||||
|
||||
async fn run_eval(
|
||||
eval: Eval,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
judge_model: Arc<dyn LanguageModel>,
|
||||
app_state: Arc<HeadlessAppState>,
|
||||
cx: AsyncApp,
|
||||
) -> anyhow::Result<(EvalOutput, String)> {
|
||||
let path = eval.path.clone();
|
||||
let judge = Judge::load(&path, judge_model).await?;
|
||||
let eval_output = cx.update(|cx| eval.run(app_state, model, cx))?.await?;
|
||||
let judge_output = cx.update(|cx| judge.run(&eval_output, cx))?.await?;
|
||||
eval_output.save_to_directory(&path, judge_output.to_string())?;
|
||||
Ok((eval_output, judge_output))
|
||||
}
|
||||
@@ -16,6 +16,7 @@ anthropic = { workspace = true, features = ["schemars"] }
|
||||
anyhow.workspace = true
|
||||
feature_flags.workspace = true
|
||||
gpui.workspace = true
|
||||
indexmap.workspace = true
|
||||
language_model.workspace = true
|
||||
lmstudio = { workspace = true, features = ["schemars"] }
|
||||
log.workspace = true
|
||||
|
||||
18
crates/assistant_settings/src/agent_profile.rs
Normal file
@@ -0,0 +1,18 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use gpui::SharedString;
|
||||
use indexmap::IndexMap;
|
||||
|
||||
/// A profile for the Zed Agent that controls its behavior.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AgentProfile {
|
||||
/// The name of the profile.
|
||||
pub name: SharedString,
|
||||
pub tools: IndexMap<Arc<str>, bool>,
|
||||
pub context_servers: IndexMap<Arc<str>, ContextServerPreset>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ContextServerPreset {
|
||||
pub tools: IndexMap<Arc<str>, bool>,
|
||||
}
|
||||
@@ -1,3 +1,5 @@
|
||||
mod agent_profile;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use ::open_ai::Model as OpenAiModel;
|
||||
@@ -5,6 +7,7 @@ use anthropic::Model as AnthropicModel;
|
||||
use deepseek::Model as DeepseekModel;
|
||||
use feature_flags::FeatureFlagAppExt;
|
||||
use gpui::{App, Pixels};
|
||||
use indexmap::IndexMap;
|
||||
use language_model::{CloudModel, LanguageModel};
|
||||
use lmstudio::Model as LmStudioModel;
|
||||
use ollama::Model as OllamaModel;
|
||||
@@ -12,6 +15,8 @@ use schemars::{schema::Schema, JsonSchema};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources};
|
||||
|
||||
pub use crate::agent_profile::*;
|
||||
|
||||
#[derive(Copy, Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum AssistantDockPosition {
|
||||
@@ -66,6 +71,9 @@ pub struct AssistantSettings {
|
||||
pub inline_alternatives: Vec<LanguageModelSelection>,
|
||||
pub using_outdated_settings_version: bool,
|
||||
pub enable_experimental_live_diffs: bool,
|
||||
pub profiles: IndexMap<Arc<str>, AgentProfile>,
|
||||
pub always_allow_tool_actions: bool,
|
||||
pub notify_when_agent_waiting: bool,
|
||||
}
|
||||
|
||||
impl AssistantSettings {
|
||||
@@ -166,6 +174,9 @@ impl AssistantSettingsContent {
|
||||
editor_model: None,
|
||||
inline_alternatives: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
profiles: None,
|
||||
always_allow_tool_actions: None,
|
||||
notify_when_agent_waiting: None,
|
||||
},
|
||||
VersionedAssistantSettingsContent::V2(settings) => settings.clone(),
|
||||
},
|
||||
@@ -187,6 +198,9 @@ impl AssistantSettingsContent {
|
||||
editor_model: None,
|
||||
inline_alternatives: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
profiles: None,
|
||||
always_allow_tool_actions: None,
|
||||
notify_when_agent_waiting: None,
|
||||
},
|
||||
}
|
||||
}
|
||||
@@ -316,6 +330,9 @@ impl Default for VersionedAssistantSettingsContent {
|
||||
editor_model: None,
|
||||
inline_alternatives: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
profiles: None,
|
||||
always_allow_tool_actions: None,
|
||||
notify_when_agent_waiting: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -352,6 +369,17 @@ pub struct AssistantSettingsContentV2 {
|
||||
///
|
||||
/// Default: false
|
||||
enable_experimental_live_diffs: Option<bool>,
|
||||
#[schemars(skip)]
|
||||
profiles: Option<IndexMap<Arc<str>, AgentProfileContent>>,
|
||||
/// Whenever a tool action would normally wait for your confirmation
|
||||
/// that you allow it, always choose to allow it.
|
||||
///
|
||||
/// Default: false
|
||||
always_allow_tool_actions: Option<bool>,
|
||||
/// Whether to show a popup notification when the agent is waiting for user input.
|
||||
///
|
||||
/// Default: true
|
||||
notify_when_agent_waiting: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
|
||||
@@ -388,6 +416,19 @@ impl Default for LanguageModelSelection {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct AgentProfileContent {
|
||||
pub name: Arc<str>,
|
||||
pub tools: IndexMap<Arc<str>, bool>,
|
||||
#[serde(default)]
|
||||
pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct ContextServerPresetContent {
|
||||
pub tools: IndexMap<Arc<str>, bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
pub struct AssistantSettingsContentV1 {
|
||||
/// Whether the Assistant is enabled.
|
||||
@@ -482,6 +523,40 @@ impl Settings for AssistantSettings {
|
||||
&mut settings.enable_experimental_live_diffs,
|
||||
value.enable_experimental_live_diffs,
|
||||
);
|
||||
merge(
|
||||
&mut settings.always_allow_tool_actions,
|
||||
value.always_allow_tool_actions,
|
||||
);
|
||||
merge(
|
||||
&mut settings.notify_when_agent_waiting,
|
||||
value.notify_when_agent_waiting,
|
||||
);
|
||||
|
||||
if let Some(profiles) = value.profiles {
|
||||
settings
|
||||
.profiles
|
||||
.extend(profiles.into_iter().map(|(id, profile)| {
|
||||
(
|
||||
id,
|
||||
AgentProfile {
|
||||
name: profile.name.into(),
|
||||
tools: profile.tools,
|
||||
context_servers: profile
|
||||
.context_servers
|
||||
.into_iter()
|
||||
.map(|(context_server_id, preset)| {
|
||||
(
|
||||
context_server_id,
|
||||
ContextServerPreset {
|
||||
tools: preset.tools.clone(),
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
},
|
||||
)
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(settings)
|
||||
@@ -546,6 +621,9 @@ mod tests {
|
||||
default_width: None,
|
||||
default_height: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
profiles: None,
|
||||
always_allow_tool_actions: None,
|
||||
notify_when_agent_waiting: None,
|
||||
}),
|
||||
)
|
||||
},
|
||||
|
||||
@@ -77,8 +77,8 @@ impl SlashCommand for AutoCommand {
|
||||
|
||||
let cx: &mut App = cx;
|
||||
|
||||
cx.spawn(|cx: gpui::AsyncApp| async move {
|
||||
let task = project_index.read_with(&cx, |project_index, cx| {
|
||||
cx.spawn(async move |cx| {
|
||||
let task = project_index.read_with(cx, |project_index, cx| {
|
||||
project_index.flush_summary_backlogs(cx)
|
||||
})?;
|
||||
|
||||
@@ -117,9 +117,9 @@ impl SlashCommand for AutoCommand {
|
||||
return Task::ready(Err(anyhow!("no project indexer")));
|
||||
};
|
||||
|
||||
let task = window.spawn(cx, |cx| async move {
|
||||
let task = window.spawn(cx, async move |cx| {
|
||||
let summaries = project_index
|
||||
.read_with(&cx, |project_index, cx| project_index.all_summaries(cx))?
|
||||
.read_with(cx, |project_index, cx| project_index.all_summaries(cx))?
|
||||
.await?;
|
||||
|
||||
commands_for_summaries(&summaries, &original_prompt, &cx).await
|
||||
|
||||
@@ -186,7 +186,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
|
||||
|
||||
let task = collect_diagnostics(workspace.read(cx).project().clone(), options, cx);
|
||||
|
||||
window.spawn(cx, move |_| async move {
|
||||
window.spawn(cx, async move |_| {
|
||||
task.await?
|
||||
.map(|output| output.to_event_stream())
|
||||
.ok_or_else(|| anyhow!("No diagnostics found"))
|
||||
@@ -268,7 +268,7 @@ fn collect_diagnostics(
|
||||
})
|
||||
.collect();
|
||||
|
||||
cx.spawn(|mut cx| async move {
|
||||
cx.spawn(async move |cx| {
|
||||
let mut output = SlashCommandOutput::default();
|
||||
|
||||
if let Some(error_source) = error_source.as_ref() {
|
||||
@@ -299,7 +299,7 @@ fn collect_diagnostics(
|
||||
}
|
||||
|
||||
if let Some(buffer) = project_handle
|
||||
.update(&mut cx, |project, cx| project.open_buffer(project_path, cx))?
|
||||
.update(cx, |project, cx| project.open_buffer(project_path, cx))?
|
||||
.await
|
||||
.log_err()
|
||||
{
|
||||
|
||||
@@ -241,7 +241,7 @@ fn collect_files(
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let (events_tx, events_rx) = mpsc::unbounded();
|
||||
cx.spawn(|mut cx| async move {
|
||||
cx.spawn(async move |cx| {
|
||||
for snapshot in snapshots {
|
||||
let worktree_id = snapshot.id();
|
||||
let mut directory_stack: Vec<Arc<Path>> = Vec::new();
|
||||
@@ -352,7 +352,7 @@ fn collect_files(
|
||||
)))?;
|
||||
} else if entry.is_file() {
|
||||
let Some(open_buffer_task) = project_handle
|
||||
.update(&mut cx, |project, cx| {
|
||||
.update(cx, |project, cx| {
|
||||
project.open_buffer((worktree_id, &entry.path), cx)
|
||||
})
|
||||
.ok()
|
||||
@@ -361,7 +361,7 @@ fn collect_files(
|
||||
};
|
||||
if let Some(buffer) = open_buffer_task.await.log_err() {
|
||||
let mut output = SlashCommandOutput::default();
|
||||
let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?;
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
|
||||
append_buffer_to_output(
|
||||
&snapshot,
|
||||
Some(&path_including_worktree_name),
|
||||
|
||||
@@ -99,7 +99,7 @@ impl SlashCommand for ProjectSlashCommand {
|
||||
return Task::ready(Err(anyhow::anyhow!("no project indexer")));
|
||||
};
|
||||
|
||||
window.spawn(cx, |mut cx| async move {
|
||||
window.spawn(cx, async move |cx| {
|
||||
let current_model = current_model.ok_or_else(|| anyhow!("no model selected"))?;
|
||||
|
||||
let prompt =
|
||||
@@ -123,7 +123,7 @@ impl SlashCommand for ProjectSlashCommand {
|
||||
.search_queries;
|
||||
|
||||
let results = project_index
|
||||
.read_with(&cx, |project_index, cx| {
|
||||
.read_with(cx, |project_index, cx| {
|
||||
project_index.search(search_queries.clone(), 25, cx)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
@@ -109,9 +109,9 @@ impl SlashCommand for SearchSlashCommand {
|
||||
return Task::ready(Err(anyhow::anyhow!("no project indexer")));
|
||||
};
|
||||
|
||||
window.spawn(cx, |cx| async move {
|
||||
window.spawn(cx, async move |cx| {
|
||||
let results = project_index
|
||||
.read_with(&cx, |project_index, cx| {
|
||||
.read_with(cx, |project_index, cx| {
|
||||
project_index.search(vec![query.clone()], limit.unwrap_or(5), cx)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
@@ -86,7 +86,7 @@ impl SlashCommand for TabSlashCommand {
|
||||
tab_items_for_queries(workspace, &[current_query], cancel, false, window, cx);
|
||||
|
||||
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
|
||||
window.spawn(cx, |_| async move {
|
||||
window.spawn(cx, async move |_| {
|
||||
let tab_items = tab_items_search.await?;
|
||||
let run_command = tab_items.len() == 1;
|
||||
let tab_completion_items = tab_items.into_iter().filter_map(|(path, ..)| {
|
||||
@@ -172,11 +172,11 @@ fn tab_items_for_queries(
|
||||
) -> Task<anyhow::Result<Vec<(Option<PathBuf>, BufferSnapshot, usize)>>> {
|
||||
let empty_query = queries.is_empty() || queries.iter().all(|query| query.trim().is_empty());
|
||||
let queries = queries.to_owned();
|
||||
window.spawn(cx, |mut cx| async move {
|
||||
window.spawn(cx, async move |cx| {
|
||||
let mut open_buffers =
|
||||
workspace
|
||||
.context("no workspace")?
|
||||
.update(&mut cx, |workspace, cx| {
|
||||
.update(cx, |workspace, cx| {
|
||||
if strict_match && empty_query {
|
||||
let snapshot = active_item_buffer(workspace, cx)?;
|
||||
let full_path = snapshot.resolve_file_path(cx, true);
|
||||
|
||||