Compare commits
356 Commits
lipstick
...
ssh-remote
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7ffa87e566 | ||
|
|
f7a532add8 | ||
|
|
3a67fb68da | ||
|
|
dd7105ff38 | ||
|
|
15dfb47b17 | ||
|
|
0f57932945 | ||
|
|
42c96212c4 | ||
|
|
0fd5030297 | ||
|
|
46ecd7d190 | ||
|
|
88b03bc074 | ||
|
|
db4ff7da6b | ||
|
|
fb35f15526 | ||
|
|
78120cc568 | ||
|
|
4ddf2cbb9f | ||
|
|
69e76a3bb9 | ||
|
|
80c25960dd | ||
|
|
26f2369fa6 | ||
|
|
b19356ac69 | ||
|
|
7523a7a437 | ||
|
|
abc712014a | ||
|
|
e7c8dba54f | ||
|
|
99d45ba694 | ||
|
|
1447a9d48c | ||
|
|
f45af17fd4 | ||
|
|
e1b05bf7a3 | ||
|
|
c0ea806afe | ||
|
|
1404e328cf | ||
|
|
8ea8e81c86 | ||
|
|
e1c42a5c85 | ||
|
|
e17a5c1412 | ||
|
|
20f85b946d | ||
|
|
abb5800d20 | ||
|
|
4e2b08b909 | ||
|
|
c697eaba82 | ||
|
|
93642c9c51 | ||
|
|
25cdd2ad25 | ||
|
|
182b7af299 | ||
|
|
72b5cda356 | ||
|
|
912ed20a3b | ||
|
|
3d94ed3242 | ||
|
|
3a593fe803 | ||
|
|
f08be779c0 | ||
|
|
278864e19f | ||
|
|
9245015d1a | ||
|
|
b7a66e4491 | ||
|
|
59dd7c9138 | ||
|
|
3c577e1a42 | ||
|
|
bb725d3158 | ||
|
|
5250866c1a | ||
|
|
1ae96025f5 | ||
|
|
6b9fa68dc5 | ||
|
|
db0c1fd592 | ||
|
|
1e39d407c2 | ||
|
|
61ca36ecad | ||
|
|
eb9eae09b1 | ||
|
|
136f75ee9a | ||
|
|
1f8fa82ac3 | ||
|
|
8895084604 | ||
|
|
1abbe9c65d | ||
|
|
ec98e71190 | ||
|
|
1d986b0c77 | ||
|
|
feab1261c8 | ||
|
|
406d3b413d | ||
|
|
643d60f551 | ||
|
|
0229d3ccac | ||
|
|
f85ca387a7 | ||
|
|
96bcceed40 | ||
|
|
2ad9a742dd | ||
|
|
9f0438b540 | ||
|
|
d274be67d6 | ||
|
|
19f0c4af6d | ||
|
|
09c698d8d7 | ||
|
|
8a5fcc2c22 | ||
|
|
28568429aa | ||
|
|
f1778dd9de | ||
|
|
36d51fe4a5 | ||
|
|
c4c07583c3 | ||
|
|
a82cc80d1d | ||
|
|
a3672d96d4 | ||
|
|
a1438a49df | ||
|
|
d2a7caa84b | ||
|
|
ff7017c308 | ||
|
|
85731dfe8e | ||
|
|
8381b06d2d | ||
|
|
a89844bcc9 | ||
|
|
5c0d800b21 | ||
|
|
ebcc2945c5 | ||
|
|
e884d0060e | ||
|
|
0c980cde74 | ||
|
|
9951df7709 | ||
|
|
936466e02c | ||
|
|
1eb1e16954 | ||
|
|
b67404323c | ||
|
|
c251a50e41 | ||
|
|
e482fcde5b | ||
|
|
f185269d03 | ||
|
|
1f0dc8b754 | ||
|
|
395a68133d | ||
|
|
77c08fade5 | ||
|
|
f7f7cd5bb9 | ||
|
|
6f5674691c | ||
|
|
8993a9f2ee | ||
|
|
9f66f12f7b | ||
|
|
3eb5488c63 | ||
|
|
30bfa56a24 | ||
|
|
0042c24d3c | ||
|
|
971db5c6f6 | ||
|
|
b5bd8a5c5d | ||
|
|
41fc6d0885 | ||
|
|
90897707c3 | ||
|
|
7fbea39566 | ||
|
|
037cf1393c | ||
|
|
69aae2037d | ||
|
|
bac8e81e73 | ||
|
|
0bea4d5fa6 | ||
|
|
4dec7806cb | ||
|
|
de41c151c8 | ||
|
|
56f1ab9459 | ||
|
|
911112d94a | ||
|
|
e68b2d5ecc | ||
|
|
f651333896 | ||
|
|
14fa4abce4 | ||
|
|
8a320668ed | ||
|
|
86efde4b76 | ||
|
|
43e13df9f3 | ||
|
|
b9176fe4bb | ||
|
|
11753914d7 | ||
|
|
6f93b42ecb | ||
|
|
10a996cbc4 | ||
|
|
5e6e465294 | ||
|
|
8841d6faad | ||
|
|
c9c5eef8f2 | ||
|
|
7c268d0c6d | ||
|
|
e4a591dcbd | ||
|
|
07d5e22cbe | ||
|
|
ebecd7e65f | ||
|
|
18f0626e08 | ||
|
|
3d997e5fd6 | ||
|
|
1b1070e0f7 | ||
|
|
9ef3306f55 | ||
|
|
0fdc9d0f05 | ||
|
|
907d76208d | ||
|
|
ae9e6a9daa | ||
|
|
e36e605c96 | ||
|
|
35cd397a40 | ||
|
|
2180dbdb50 | ||
|
|
23d56a1a84 | ||
|
|
a9441879c3 | ||
|
|
6cfbb54ede | ||
|
|
7a5acc0b0c | ||
|
|
463ac7f5e4 | ||
|
|
ee27114b35 | ||
|
|
ebac9a7342 | ||
|
|
455850505f | ||
|
|
a3a6ebcf31 | ||
|
|
b4f5f5024e | ||
|
|
7eab57a264 | ||
|
|
f1a2638d29 | ||
|
|
0fe22f2a48 | ||
|
|
f7f5a25584 | ||
|
|
40d97fd346 | ||
|
|
ad44b459cd | ||
|
|
bac39d7743 | ||
|
|
46fb917e02 | ||
|
|
09b6e3f2a6 | ||
|
|
364a58a262 | ||
|
|
c896ff292c | ||
|
|
da2bfbd29f | ||
|
|
0b407164d0 | ||
|
|
b151241d84 | ||
|
|
f65b2b9a2d | ||
|
|
776442f3ae | ||
|
|
583959f82a | ||
|
|
9233418cb8 | ||
|
|
df20bae80e | ||
|
|
cb423bcb6f | ||
|
|
5e05821d18 | ||
|
|
ff83e5b55a | ||
|
|
6b7664ef4a | ||
|
|
931883aca9 | ||
|
|
e982ff7b9e | ||
|
|
0b3e5b2649 | ||
|
|
7434b56e68 | ||
|
|
b764174e8b | ||
|
|
c45adce2e3 | ||
|
|
5a30e29848 | ||
|
|
03b843ebf3 | ||
|
|
02ea6ac845 | ||
|
|
d54818fd9e | ||
|
|
add0f0dbe5 | ||
|
|
506b7198c2 | ||
|
|
e10f2d8deb | ||
|
|
3c8d890702 | ||
|
|
05f7583444 | ||
|
|
e6ed97051d | ||
|
|
e0cabbd142 | ||
|
|
102796979b | ||
|
|
4c390b82fb | ||
|
|
1117d89057 | ||
|
|
0df4d12234 | ||
|
|
796cba9e0e | ||
|
|
a6461f90a1 | ||
|
|
2e1750d5e2 | ||
|
|
18aff55f34 | ||
|
|
ec062cfe66 | ||
|
|
e39671cad5 | ||
|
|
ffaecbaa0c | ||
|
|
7a693235a5 | ||
|
|
177aa7d9c0 | ||
|
|
271e774713 | ||
|
|
e8bae839ed | ||
|
|
b55e8383c8 | ||
|
|
ccd8f75cff | ||
|
|
66e750eea2 | ||
|
|
04ee5e3e6e | ||
|
|
8ad7d69378 | ||
|
|
8df21f7bcd | ||
|
|
6365000b68 | ||
|
|
6713e40875 | ||
|
|
793a90c3e1 | ||
|
|
340a1d145e | ||
|
|
caf222a71d | ||
|
|
f5532afaa7 | ||
|
|
8d9dcf1e78 | ||
|
|
aa31f9ded0 | ||
|
|
f8a72b5d0a | ||
|
|
8fe2de1737 | ||
|
|
88a12b60a9 | ||
|
|
0eb96c72e3 | ||
|
|
55563831c5 | ||
|
|
e28681c27e | ||
|
|
b7dcd4e4d6 | ||
|
|
8b8335f449 | ||
|
|
97469cd049 | ||
|
|
5cb4de4ec6 | ||
|
|
47628515e1 | ||
|
|
c3edbd7d9a | ||
|
|
aa12ae0e3c | ||
|
|
c6a1d9aa33 | ||
|
|
07c21bb8ee | ||
|
|
2f5031bd28 | ||
|
|
170ad46f5e | ||
|
|
a81e355dc5 | ||
|
|
bd71e9192c | ||
|
|
7eeb37262a | ||
|
|
d4761a3296 | ||
|
|
ac30ed0754 | ||
|
|
b1a581e81b | ||
|
|
e3b0de5dda | ||
|
|
03796e79b0 | ||
|
|
fa51651d06 | ||
|
|
7aed240729 | ||
|
|
7b613cb169 | ||
|
|
8a9c58e515 | ||
|
|
0dbecee03f | ||
|
|
82529499df | ||
|
|
4450ebff6b | ||
|
|
98a2ab0686 | ||
|
|
47eed12f77 | ||
|
|
cade9fbd3d | ||
|
|
fe190359d5 | ||
|
|
ab98f16280 | ||
|
|
ac6bff12b9 | ||
|
|
081cbcebd9 | ||
|
|
c2b254a67a | ||
|
|
af36d4934c | ||
|
|
b36d1386a9 | ||
|
|
ee6a40137f | ||
|
|
1c189e82a0 | ||
|
|
a515442a36 | ||
|
|
e662bfc74f | ||
|
|
b4c22cc861 | ||
|
|
0bc9fc9487 | ||
|
|
dbcd06642c | ||
|
|
68ae347077 | ||
|
|
f956257638 | ||
|
|
8a148f3a13 | ||
|
|
3bebb8b401 | ||
|
|
98516b5527 | ||
|
|
f398ecc3fb | ||
|
|
a3c79218c4 | ||
|
|
1674e12ccb | ||
|
|
f3ec8d425f | ||
|
|
ebdde5994d | ||
|
|
df70e901af | ||
|
|
63aef7f798 | ||
|
|
9ae0d5388e | ||
|
|
ebdb755fef | ||
|
|
fbb533b3e0 | ||
|
|
98f314ba21 | ||
|
|
b6b081596a | ||
|
|
fc64843dd5 | ||
|
|
f952126319 | ||
|
|
f992cfdc7f | ||
|
|
6389c613a2 | ||
|
|
bab4da78b7 | ||
|
|
a15a9565ab | ||
|
|
48f6193628 | ||
|
|
355aebd0e4 | ||
|
|
10937c6e37 | ||
|
|
4818e0b7eb | ||
|
|
2490b050ee | ||
|
|
e99113e4cb | ||
|
|
3140d6ce8c | ||
|
|
492f6b9cdf | ||
|
|
36a560dcd0 | ||
|
|
6f104fecad | ||
|
|
550e139b61 | ||
|
|
33e120d964 | ||
|
|
8688b2ad19 | ||
|
|
423c7b999a | ||
|
|
0932818829 | ||
|
|
fbebb73d7b | ||
|
|
d96afde5bf | ||
|
|
b1c69c2178 | ||
|
|
eb3c4b0e46 | ||
|
|
c633fa5a10 | ||
|
|
46a4dd3ab1 | ||
|
|
49f760eeda | ||
|
|
225726ba4a | ||
|
|
c1872e9cb0 | ||
|
|
09c9ed4765 | ||
|
|
19d8422933 | ||
|
|
173f6e7c8f | ||
|
|
e783142528 | ||
|
|
06833d7360 | ||
|
|
7a600411cf | ||
|
|
240b7c641c | ||
|
|
06625bfe94 | ||
|
|
2bc503771b | ||
|
|
5b8ce91048 | ||
|
|
abb6d40fbf | ||
|
|
bd59af1df5 | ||
|
|
b7d6b0a096 | ||
|
|
b28507d2e0 | ||
|
|
a39f1f5133 | ||
|
|
fbc629df7d | ||
|
|
8ba5207c6c | ||
|
|
c3cfaade7d | ||
|
|
8681eeb0e2 | ||
|
|
8f9bcbe739 | ||
|
|
9a211b239c | ||
|
|
a71bfd41cc | ||
|
|
73fb8277fc | ||
|
|
514b79e461 | ||
|
|
793cd88792 | ||
|
|
92496f33e7 | ||
|
|
b9159d98ea | ||
|
|
f3abb7e724 | ||
|
|
389cb86e43 | ||
|
|
fea8f16df0 | ||
|
|
76d58ac295 | ||
|
|
e69b0833aa | ||
|
|
da8d1306af | ||
|
|
a5961c8d45 | ||
|
|
e9ddca1075 |
@@ -3,6 +3,15 @@ export default {
|
||||
const url = new URL(request.url);
|
||||
url.hostname = "docs-anw.pages.dev";
|
||||
|
||||
// These pages were removed, but may still be served due to Cloudflare's
|
||||
// [asset retention](https://developers.cloudflare.com/pages/configuration/serving-pages/#asset-retention).
|
||||
if (
|
||||
url.pathname === "/docs/assistant/context-servers" ||
|
||||
url.pathname === "/docs/assistant/model-context-protocol"
|
||||
) {
|
||||
return await fetch("https://zed.dev/404");
|
||||
}
|
||||
|
||||
let res = await fetch(url, request);
|
||||
|
||||
if (res.status === 404) {
|
||||
|
||||
6
.github/pull_request_template.md
vendored
@@ -1,13 +1,15 @@
|
||||
|
||||
Closes #ISSUE
|
||||
|
||||
Release Notes:
|
||||
|
||||
- Added/Fixed/Improved ... ([#NNNNN](https://github.com/zed-industries/zed/issues/NNNNN)).
|
||||
- Added/Fixed/Improved ...
|
||||
|
||||
Optionally, include screenshots / media showcasing your addition that can be included in the release notes.
|
||||
|
||||
### Or...
|
||||
|
||||
Closes #ISSUE
|
||||
|
||||
Release Notes:
|
||||
|
||||
- N/A
|
||||
|
||||
15
.github/workflows/ci.yml
vendored
@@ -167,6 +167,7 @@ jobs:
|
||||
APPLE_NOTARIZATION_USERNAME: ${{ secrets.APPLE_NOTARIZATION_USERNAME }}
|
||||
APPLE_NOTARIZATION_PASSWORD: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
steps:
|
||||
@@ -231,20 +232,20 @@ jobs:
|
||||
mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
|
||||
- name: Upload app bundle (universal) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4
|
||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}.dmg
|
||||
path: target/release/Zed.dmg
|
||||
- name: Upload app bundle (aarch64) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4
|
||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
|
||||
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
|
||||
- name: Upload app bundle (x86_64) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4
|
||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
|
||||
@@ -276,6 +277,7 @@ jobs:
|
||||
needs: [linux_tests]
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
@@ -319,7 +321,7 @@ jobs:
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4
|
||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
|
||||
@@ -346,6 +348,7 @@ jobs:
|
||||
needs: [linux_tests]
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
@@ -360,7 +363,7 @@ jobs:
|
||||
sudo apt-get install -y llvm-10 clang-10 build-essential cmake pkg-config libasound2-dev libfontconfig-dev libwayland-dev libxkbcommon-x11-dev libssl-dev libsqlite3-dev libzstd-dev libvulkan1 libgit2-dev
|
||||
echo "/usr/lib/llvm-10/bin" >> $GITHUB_PATH
|
||||
|
||||
- uses: rui314/setup-mold@2e332a0b602c2fc65d2d3995941b1b29a5f554a0 # v1
|
||||
- uses: rui314/setup-mold@0bf4f07ef9048ec62a45f9dbf2f098afa49695f0 # v1
|
||||
with:
|
||||
mold-version: 2.32.0
|
||||
|
||||
@@ -403,7 +406,7 @@ jobs:
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4
|
||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
|
||||
|
||||
2
.github/workflows/release_actions.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
fi
|
||||
echo "::set-output name=URL::$URL"
|
||||
- name: Get content
|
||||
uses: 2428392/gh-truncate-string-action@67b1b814955634208b103cff064be3cb1c7a19be # v1.3.0
|
||||
uses: 2428392/gh-truncate-string-action@e6b5885fb83c81ca9a700a91b079baec2133be3e # v1.4.0
|
||||
id: get-content
|
||||
with:
|
||||
stringToTruncate: |
|
||||
|
||||
5
.github/workflows/release_nightly.yml
vendored
@@ -67,6 +67,7 @@ jobs:
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
|
||||
@@ -106,6 +107,7 @@ jobs:
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
@@ -139,6 +141,7 @@ jobs:
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
@@ -154,7 +157,7 @@ jobs:
|
||||
sudo apt-get install -y llvm-10 clang-10 build-essential cmake pkg-config libasound2-dev libfontconfig-dev libwayland-dev libxkbcommon-x11-dev libssl-dev libsqlite3-dev libzstd-dev libvulkan1 libgit2-dev
|
||||
echo "/usr/lib/llvm-10/bin" >> $GITHUB_PATH
|
||||
|
||||
- uses: rui314/setup-mold@2e332a0b602c2fc65d2d3995941b1b29a5f554a0 # v1
|
||||
- uses: rui314/setup-mold@0bf4f07ef9048ec62a45f9dbf2f098afa49695f0 # v1
|
||||
with:
|
||||
mold-version: 2.32.0
|
||||
|
||||
|
||||
3
.mailmap
@@ -24,7 +24,8 @@ Conrad Irwin <conrad@zed.dev>
|
||||
Conrad Irwin <conrad@zed.dev> <conrad.irwin@gmail.com>
|
||||
Danilo Leal <danilo@zed.dev>
|
||||
Danilo Leal <danilo@zed.dev> <67129314+danilo-leal@users.noreply.github.com>
|
||||
Evren Sen <146845123+evrsen@users.noreply.github.com>
|
||||
Evren Sen <146845123+evrensen467@users.noreply.github.com>
|
||||
Evren Sen <146845123+evrensen467@users.noreply.github.com> <146845123+evrsen@users.noreply.github.com>
|
||||
Fernando Tagawa <tagawafernando@gmail.com>
|
||||
Fernando Tagawa <tagawafernando@gmail.com> <fernando.tagawa.gamail.com@gmail.com>
|
||||
Greg Morenz <greg-morenz@droid.cafe>
|
||||
|
||||
@@ -3,5 +3,10 @@
|
||||
"label": "clippy",
|
||||
"command": "./script/clippy",
|
||||
"args": []
|
||||
},
|
||||
{
|
||||
"label": "cargo run --profile release-fast",
|
||||
"command": "cargo",
|
||||
"args": ["run", "--profile", "release-fast"]
|
||||
}
|
||||
]
|
||||
|
||||
2489
Cargo.lock
generated
21
Cargo.toml
@@ -19,6 +19,7 @@ members = [
|
||||
"crates/collections",
|
||||
"crates/command_palette",
|
||||
"crates/command_palette_hooks",
|
||||
"crates/context_servers",
|
||||
"crates/copilot",
|
||||
"crates/db",
|
||||
"crates/dev_server_projects",
|
||||
@@ -69,6 +70,7 @@ members = [
|
||||
"crates/outline",
|
||||
"crates/outline_panel",
|
||||
"crates/paths",
|
||||
"crates/performance",
|
||||
"crates/picker",
|
||||
"crates/prettier",
|
||||
"crates/project",
|
||||
@@ -144,10 +146,12 @@ members = [
|
||||
"extensions/lua",
|
||||
"extensions/ocaml",
|
||||
"extensions/php",
|
||||
"extensions/perplexity",
|
||||
"extensions/prisma",
|
||||
"extensions/purescript",
|
||||
"extensions/ruff",
|
||||
"extensions/ruby",
|
||||
"extensions/slash-commands-example",
|
||||
"extensions/snippets",
|
||||
"extensions/svelte",
|
||||
"extensions/terraform",
|
||||
@@ -189,6 +193,7 @@ collab_ui = { path = "crates/collab_ui" }
|
||||
collections = { path = "crates/collections" }
|
||||
command_palette = { path = "crates/command_palette" }
|
||||
command_palette_hooks = { path = "crates/command_palette_hooks" }
|
||||
context_servers = { path = "crates/context_servers" }
|
||||
copilot = { path = "crates/copilot" }
|
||||
db = { path = "crates/db" }
|
||||
dev_server_projects = { path = "crates/dev_server_projects" }
|
||||
@@ -238,6 +243,7 @@ open_ai = { path = "crates/open_ai" }
|
||||
outline = { path = "crates/outline" }
|
||||
outline_panel = { path = "crates/outline_panel" }
|
||||
paths = { path = "crates/paths" }
|
||||
performance = { path = "crates/performance" }
|
||||
picker = { path = "crates/picker" }
|
||||
plugin = { path = "crates/plugin" }
|
||||
plugin_macros = { path = "crates/plugin_macros" }
|
||||
@@ -299,7 +305,7 @@ zed_actions = { path = "crates/zed_actions" }
|
||||
#
|
||||
|
||||
aho-corasick = "1.1"
|
||||
alacritty_terminal = { git = "https://github.com/alacritty/alacritty", rev = "cacdb5bb3b72bad2c729227537979d95af75978f" }
|
||||
alacritty_terminal = { git = "https://github.com/alacritty/alacritty", rev = "91d034ff8b53867143c005acfaa14609147c9a2c" }
|
||||
any_vec = "0.14"
|
||||
anyhow = "1.0.86"
|
||||
ashpd = "0.9.1"
|
||||
@@ -315,9 +321,9 @@ async-watch = "0.3.1"
|
||||
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
|
||||
base64 = "0.22"
|
||||
bitflags = "2.6.0"
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "ac25c77ed8d86c386a541c935ffe0a0f6024e701" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "ac25c77ed8d86c386a541c935ffe0a0f6024e701" }
|
||||
blade-util = { git = "https://github.com/kvark/blade", rev = "ac25c77ed8d86c386a541c935ffe0a0f6024e701" }
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "7f54ddfc001edc48225e6602d3c38ebb855421aa" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "7f54ddfc001edc48225e6602d3c38ebb855421aa" }
|
||||
blade-util = { git = "https://github.com/kvark/blade", rev = "7f54ddfc001edc48225e6602d3c38ebb855421aa" }
|
||||
cargo_metadata = "0.18"
|
||||
cargo_toml = "0.20"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
@@ -353,7 +359,6 @@ isahc = { version = "1.7.2", default-features = false, features = [
|
||||
] }
|
||||
itertools = "0.11.0"
|
||||
jsonwebtoken = "9.3"
|
||||
lazy_static = "1.4.0"
|
||||
libc = "0.2"
|
||||
linkify = "0.10.0"
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
|
||||
@@ -377,13 +382,13 @@ rand = "0.8.5"
|
||||
regex = "1.5"
|
||||
repair_json = "0.1.0"
|
||||
rsa = "0.9.6"
|
||||
runtimelib = { version = "0.14", default-features = false, features = [
|
||||
runtimelib = { version = "0.15", default-features = false, features = [
|
||||
"async-dispatcher-runtime",
|
||||
] }
|
||||
rusqlite = { version = "0.29.0", features = ["blob", "array", "modern_sqlite"] }
|
||||
rustc-demangle = "0.1.23"
|
||||
rust-embed = { version = "8.4", features = ["include-exclude"] }
|
||||
schemars = {version = "0.8", features = ["impl_json_schema"]}
|
||||
schemars = { version = "0.8", features = ["impl_json_schema"] }
|
||||
semver = "1.0"
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
serde_derive = { version = "1.0", features = ["deserialize_in_place"] }
|
||||
@@ -461,7 +466,7 @@ which = "6.0.0"
|
||||
wit-component = "0.201"
|
||||
|
||||
[workspace.dependencies.async-stripe]
|
||||
version = "0.37"
|
||||
version = "0.38"
|
||||
default-features = false
|
||||
features = [
|
||||
"runtime-tokio-hyper-rustls",
|
||||
|
||||
2
Procfile.postgrest
Normal file
@@ -0,0 +1,2 @@
|
||||
app: postgrest crates/collab/postgrest_app.conf
|
||||
llm: postgrest crates/collab/postgrest_llm.conf
|
||||
@@ -1,4 +1,11 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M3.43331 10.1846L6.66616 2.33334L9.89902 10.1846M3.43331 10.1846L1.9995 13.6667M3.43331 10.1846H9.89902M11.3328 13.6667L9.89902 10.1846" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M14.0613 13.647L9.34721 2.33334" stroke="black" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<g clip-path="url(#clip0_1896_18)">
|
||||
<path d="M11.094 3.09999H8.952L12.858 12.9H15L11.094 3.09999Z" fill="#1F1F1E"/>
|
||||
<path d="M4.906 3.09999L1 12.9H3.184L3.98284 10.842H8.06915L8.868 12.9H11.052L7.146 3.09999H4.906ZM4.68928 9.02199L6.026 5.57799L7.3627 9.02199H4.68928Z" fill="#1F1F1E"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_1896_18">
|
||||
<rect width="14" height="9.8" fill="white" transform="translate(1 3.09999)"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 459 B After Width: | Height: | Size: 530 B |
12
assets/icons/ai_anthropic_hosted.svg
Normal file
@@ -0,0 +1,12 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<rect width="16" height="16" rx="2" fill="black" fill-opacity="0.2"/>
|
||||
<g clip-path="url(#clip0_1916_18)">
|
||||
<path d="M10.652 3.79999H8.816L12.164 12.2H14L10.652 3.79999Z" fill="#1F1F1E"/>
|
||||
<path d="M5.348 3.79999L2 12.2H3.872L4.55672 10.436H8.05927L8.744 12.2H10.616L7.268 3.79999H5.348ZM5.16224 8.87599L6.308 5.92399L7.45374 8.87599H5.16224Z" fill="#1F1F1E"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_1916_18">
|
||||
<rect width="12" height="8.4" fill="white" transform="translate(2 3.79999)"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 601 B |
@@ -1,3 +1,3 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M14.8695 8.16639C14.8695 12.2258 12.0896 15.1147 7.98425 15.1147C4.04818 15.1147 0.869492 11.9361 0.869492 7.99999C0.869492 4.06393 4.04818 0.885239 7.98425 0.885239C9.90064 0.885239 11.5129 1.58811 12.7551 2.74712L10.8187 4.60901C8.28547 2.16475 3.57482 4.00081 3.57482 7.99999C3.57482 10.4816 5.5572 12.4926 7.98425 12.4926C10.8015 12.4926 11.8572 10.4729 12.0236 9.42581H7.98425V6.97868H14.7576C14.8236 7.34303 14.8695 7.69303 14.8695 8.16639Z" fill="black"/>
|
||||
<path d="M13.8695 8.14262C13.8695 11.6221 11.4867 14.0984 7.96785 14.0984C4.59408 14.0984 1.86949 11.3738 1.86949 7.99999C1.86949 4.62622 4.59408 1.90163 7.96785 1.90163C9.61048 1.90163 10.9924 2.50409 12.0572 3.49754L10.3974 5.09344C8.22605 2.99836 4.18834 4.57213 4.18834 7.99999C4.18834 10.127 5.88752 11.8508 7.96785 11.8508C10.3826 11.8508 11.2875 10.1197 11.4301 9.22213H7.96785V7.12458H13.7736C13.8301 7.43688 13.8695 7.73688 13.8695 8.14262Z" fill="black"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 575 B After Width: | Height: | Size: 569 B |
|
Before Width: | Height: | Size: 1.1 KiB After Width: | Height: | Size: 8.5 KiB |
1
assets/icons/database_zap.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-database-zap"><ellipse cx="12" cy="5" rx="9" ry="3"/><path d="M3 5V19A9 3 0 0 0 15 21.84"/><path d="M21 5V8"/><path d="M21 12L18 17H22L19 22"/><path d="M3 12A9 3 0 0 0 14.59 14.87"/></svg>
|
||||
|
After Width: | Height: | Size: 391 B |
@@ -1 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-text-search"><path d="M21 6H3"/><path d="M10 12H3"/><path d="M10 18H3"/><circle cx="17" cy="15" r="3"/><path d="m21 19-1.9-1.9"/></svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-ellipsis-vertical"><circle cx="12" cy="12" r="1"/><circle cx="12" cy="5" r="1"/><circle cx="12" cy="19" r="1"/></svg>
|
||||
|
Before Width: | Height: | Size: 338 B After Width: | Height: | Size: 320 B |
@@ -1,10 +0,0 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M3 13L7.01562 8.98438" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
|
||||
<path d="M8.6875 7.3125L9.5 6.5" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
|
||||
<path d="M7 5V3" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
|
||||
<path d="M12 5V3" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
|
||||
<path d="M12 10V8" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
|
||||
<path d="M6 4L8 4" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
|
||||
<path d="M11 4L13 4" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
|
||||
<path d="M11 9L13 9" stroke="black" stroke-width="1.5" stroke-linecap="round"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 787 B |
1
assets/icons/microscope.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-microscope"><path d="M6 18h8"/><path d="M3 22h18"/><path d="M14 22a7 7 0 1 0 0-14h-1"/><path d="M9 14h2"/><path d="M9 12a2 2 0 0 1-2-2V6h6v4a2 2 0 0 1-2 2Z"/><path d="M12 6V3a1 1 0 0 0-1-1H9a1 1 0 0 0-1 1v3"/></svg>
|
||||
|
After Width: | Height: | Size: 418 B |
1
assets/icons/search_code.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-search-code"><path d="m13 13.5 2-2.5-2-2.5"/><path d="m21 21-4.3-4.3"/><path d="M9 8.5 7 11l2 2.5"/><circle cx="11" cy="11" r="8"/></svg>
|
||||
|
After Width: | Height: | Size: 340 B |
1
assets/icons/slash.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-slash"><path d="M22 2 2 22"/></svg>
|
||||
|
After Width: | Height: | Size: 238 B |
1
assets/icons/slash_square.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-square-slash"><rect width="18" height="18" x="3" y="3" rx="2"/><line x1="9" x2="15" y1="15" y2="9"/></svg>
|
||||
|
After Width: | Height: | Size: 309 B |
1
assets/icons/text_select.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-text-select"><path d="M5 3a2 2 0 0 0-2 2"/><path d="M19 3a2 2 0 0 1 2 2"/><path d="M21 19a2 2 0 0 1-2 2"/><path d="M5 21a2 2 0 0 1-2-2"/><path d="M9 3h1"/><path d="M9 21h1"/><path d="M14 3h1"/><path d="M14 21h1"/><path d="M3 9v1"/><path d="M21 9v1"/><path d="M3 14v1"/><path d="M21 14v1"/><line x1="7" x2="15" y1="8" y2="8"/><line x1="7" x2="17" y1="12" y2="12"/><line x1="7" x2="13" y1="16" y2="16"/></svg>
|
||||
|
After Width: | Height: | Size: 610 B |
1
assets/icons/undo.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-undo"><path d="M3 7v6h6"/><path d="M21 17a9 9 0 0 0-9-9 9 9 0 0 0-6 2.3L3 13"/></svg>
|
||||
|
After Width: | Height: | Size: 288 B |
@@ -437,7 +437,7 @@
|
||||
"context": "Editor && showing_completions",
|
||||
"bindings": {
|
||||
"enter": "editor::ConfirmCompletion",
|
||||
"tab": "editor::ConfirmCompletion"
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -474,7 +474,7 @@
|
||||
"context": "Editor && showing_completions",
|
||||
"bindings": {
|
||||
"enter": "editor::ConfirmCompletion",
|
||||
"tab": "editor::ConfirmCompletion"
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"bindings": {
|
||||
"i": ["vim::PushOperator", { "Object": { "around": false } }],
|
||||
"a": ["vim::PushOperator", { "Object": { "around": true } }],
|
||||
":": "command_palette::Toggle",
|
||||
"h": "vim::Left",
|
||||
"left": "vim::Left",
|
||||
"backspace": "vim::Backspace",
|
||||
@@ -93,6 +92,7 @@
|
||||
"g y": "editor::GoToTypeDefinition",
|
||||
"g shift-i": "editor::GoToImplementation",
|
||||
"g x": "editor::OpenUrl",
|
||||
"g f": "editor::OpenFile",
|
||||
"g n": "vim::SelectNextMatch",
|
||||
"g shift-n": "vim::SelectPreviousMatch",
|
||||
"g l": "vim::SelectNext",
|
||||
@@ -177,19 +177,19 @@
|
||||
"ctrl-w ctrl-p": "workspace::ActivatePreviousPane",
|
||||
"ctrl-w shift-w": "workspace::ActivatePreviousPane",
|
||||
"ctrl-w ctrl-shift-w": "workspace::ActivatePreviousPane",
|
||||
"ctrl-w v": "pane::SplitLeft",
|
||||
"ctrl-w ctrl-v": "pane::SplitLeft",
|
||||
"ctrl-w s": "pane::SplitUp",
|
||||
"ctrl-w shift-s": "pane::SplitUp",
|
||||
"ctrl-w ctrl-s": "pane::SplitUp",
|
||||
"ctrl-w v": "pane::SplitVertical",
|
||||
"ctrl-w ctrl-v": "pane::SplitVertical",
|
||||
"ctrl-w s": "pane::SplitHorizontal",
|
||||
"ctrl-w shift-s": "pane::SplitHorizontal",
|
||||
"ctrl-w ctrl-s": "pane::SplitHorizontal",
|
||||
"ctrl-w c": "pane::CloseAllItems",
|
||||
"ctrl-w ctrl-c": "pane::CloseAllItems",
|
||||
"ctrl-w q": "pane::CloseAllItems",
|
||||
"ctrl-w ctrl-q": "pane::CloseAllItems",
|
||||
"ctrl-w o": "workspace::CloseInactiveTabsAndPanes",
|
||||
"ctrl-w ctrl-o": "workspace::CloseInactiveTabsAndPanes",
|
||||
"ctrl-w n": ["workspace::NewFileInDirection", "Up"],
|
||||
"ctrl-w ctrl-n": ["workspace::NewFileInDirection", "Up"],
|
||||
"ctrl-w n": "workspace::NewFileSplitHorizontal",
|
||||
"ctrl-w ctrl-n": "workspace::NewFileSplitHorizontal",
|
||||
"ctrl-w d": "editor::GoToDefinitionSplit",
|
||||
"ctrl-w g d": "editor::GoToDefinitionSplit",
|
||||
"ctrl-w shift-d": "editor::GoToTypeDefinitionSplit",
|
||||
@@ -199,17 +199,12 @@
|
||||
"ctrl-6": "pane::AlternateFile"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "VimControl && VimCount",
|
||||
"bindings": {
|
||||
"0": ["vim::Number", 0]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_mode == normal",
|
||||
"bindings": {
|
||||
"escape": "editor::Cancel",
|
||||
"ctrl-[": "editor::Cancel",
|
||||
":": "command_palette::Toggle",
|
||||
".": "vim::Repeat",
|
||||
"c": ["vim::PushOperator", "Change"],
|
||||
"shift-c": "vim::ChangeToEndOfLine",
|
||||
@@ -257,9 +252,17 @@
|
||||
"g c": ["vim::PushOperator", "ToggleComments"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "VimControl && VimCount",
|
||||
"bindings": {
|
||||
"0": ["vim::Number", 0],
|
||||
":": "vim::CountCommand"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_mode == visual",
|
||||
"bindings": {
|
||||
":": "vim::VisualCommand",
|
||||
"u": "vim::ConvertToLowerCase",
|
||||
"U": "vim::ConvertToUpperCase",
|
||||
"o": "vim::OtherEnd",
|
||||
|
||||
@@ -32,7 +32,7 @@ Match the indentation in the original file in the inserted {{content_type}}, don
|
||||
Immediately start with the following format with no remarks:
|
||||
|
||||
```
|
||||
{{INSERTED_CODE}}
|
||||
\{{INSERTED_CODE}}
|
||||
```
|
||||
{{else}}
|
||||
Edit the section of {{content_type}} in <rewrite_this></rewrite_this> tags based on the following prompt:
|
||||
@@ -56,6 +56,6 @@ Start at the indentation level in the original file in the rewritten {{content_t
|
||||
Immediately start with the following format with no remarks:
|
||||
|
||||
```
|
||||
{{REWRITTEN_CODE}}
|
||||
\{{REWRITTEN_CODE}}
|
||||
```
|
||||
{{/if}}
|
||||
|
||||
@@ -1,87 +1,302 @@
|
||||
<workflow>
|
||||
Guide the user through code changes in numbered steps that focus on individual functions, type definitions, etc.
|
||||
Surround each distinct step in a <step></step> XML tag.
|
||||
Surround each distinct step in a <step></step> XML tag. The user will be performing these steps in a code editor
|
||||
named Zed, which is where they will have entered this prompt and will be seeing the response.
|
||||
|
||||
Here's an example of a user-assistant dialog with step numbers and a follow-up question in Rust:
|
||||
<instructions>
|
||||
- Use the language of the file for code fence blocks unless otherwise specified.
|
||||
- Include a code or file action in each step.
|
||||
- Only put code in separate steps if it should either go in separate files, or in different (non-contiguous) places in the same file.
|
||||
- Provide error handling and input validation where appropriate.
|
||||
- Adapt explanations based on the user's perceived level of expertise.
|
||||
- Include comments in code examples to enhance understanding.
|
||||
- Consider more complex programming scenarios when relevant.
|
||||
- Avoid using the terminal to perform filesystem operations such as creating, deleting, renaming, and editing files. Assume the user prefers to use Zed for these operations, unless the prompt suggests that the user is expecting terminal commands.
|
||||
- When creating a new file, Zed will automatically create any necessary directories in that file's path. So when a step will create a new file in Zed, don't mention needing to create the directory that it will go in.
|
||||
- Assume the user is performing these actions in the Zed code editor, so avoid redundancies like "In your code editor, ..." or "In Zed, ..."
|
||||
- Be concise without leaving out important information.
|
||||
</instructions>
|
||||
|
||||
1. User: I want to create a User struct with name and age fields, and a method to print the user's information. Can you help me with that?
|
||||
A: Certainly! Let's create a User struct with the specified fields and a method to print the information:
|
||||
<step>Define the User struct
|
||||
Examples of user-assistant dialogs with step numbers and user follow-up requests:
|
||||
|
||||
<example language="rust">
|
||||
<turn number="1">
|
||||
<user>Create a Library struct with a vector of Book structs. Include methods to add books and find books by author. Use generics for the book ID.</user>
|
||||
<assistant>Certainly! Let's create a Library struct with a vector of Book structs, including methods to add books and find books by author, using generics for the book ID:
|
||||
|
||||
<step>Define the Book and Library structs
|
||||
```rust
|
||||
struct User {
|
||||
name: String,
|
||||
age: u32,
|
||||
use std::fmt::Display;
|
||||
|
||||
struct Book<T>
|
||||
where
|
||||
T: Display + Clone
|
||||
{
|
||||
id: T,
|
||||
title: String,
|
||||
author: String,
|
||||
}
|
||||
|
||||
struct Library<T>
|
||||
where
|
||||
T: Display + Clone
|
||||
{
|
||||
books: Vec<Book<T>>,
|
||||
}
|
||||
```
|
||||
</step>
|
||||
<step>Implement methods for the User struct
|
||||
<step>Implement methods for the Library struct
|
||||
```rust
|
||||
impl User {
|
||||
fn new(name: String, age: u32) -> Self {
|
||||
User { name, age }
|
||||
impl<T> Library<T>
|
||||
where
|
||||
T: Display + Clone
|
||||
{
|
||||
fn new() -> Self {
|
||||
Library { books: Vec::new() }
|
||||
}
|
||||
fn print_info(&self) {
|
||||
println!("Name: {}, Age: {}", self.name, self.age);
|
||||
|
||||
fn add_book(&mut self, id: T, title: String, author: String) {
|
||||
let book = Book { id, title, author };
|
||||
self.books.push(book);
|
||||
}
|
||||
|
||||
fn find_books_by_author(&self, author: &str) -> Vec<&Book<T>> {
|
||||
self.books
|
||||
.iter()
|
||||
.filter(|book| book.author.to_lowercase() == author.to_lowercase())
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
This code defines a User struct and implements two methods:
|
||||
- `new`: a constructor to create a new User
|
||||
- `print_info`: a method to print the user's information
|
||||
This code defines:
|
||||
1. A generic Book struct with an ID of type T, title, and author
|
||||
2. A Library struct containing a vector of Books
|
||||
3. Methods to create a new Library, add books, and find books by author
|
||||
The use of generics allows flexibility in the type of ID used for books.</assistant>
|
||||
</turn>
|
||||
|
||||
2. User: Great! Now, how can I create a User instance and call the print_info method?
|
||||
A: You can create a User instance and call the print_info method like this:
|
||||
<turn number="2">
|
||||
<user>How can I use this Library struct in a main function?</user>
|
||||
<assistant>Here's how you can use the Library struct in a main function:
|
||||
|
||||
<step>Create a User instance and call print_info
|
||||
<step>Create a main function to demonstrate Library usage
|
||||
```rust
|
||||
fn main() {
|
||||
let user = User::new(String::from("Alice"), 30);
|
||||
user.print_info();
|
||||
}
|
||||
```
|
||||
</step>
|
||||
// Create a new Library with string IDs
|
||||
let mut library = Library::new();
|
||||
|
||||
This will output: `Name: Alice, Age: 30`
|
||||
// Add some books
|
||||
library.add_book(String::from("001"), String::from("The Rust Programming Language"), String::from("Steve Klabnik"));
|
||||
library.add_book(String::from("002"), String::from("Programming Rust"), String::from("Jim Blandy"));
|
||||
library.add_book(String::from("003"), String::from("Rust in Action"), String::from("Tim McNamara"));
|
||||
|
||||
3. User: Can you add an email field to the User struct and update the print_info method to include it?
|
||||
A: Certainly! Let's modify the User struct and update the print_info method:
|
||||
// Find books by author
|
||||
let author = "Steve Klabnik";
|
||||
let books = library.find_books_by_author(author);
|
||||
|
||||
<step>Add email field to User struct
|
||||
```rust
|
||||
struct User {
|
||||
name: String,
|
||||
age: u32,
|
||||
email: String,
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
<step>Update the new method and print_info method
|
||||
```rust
|
||||
impl User {
|
||||
fn new(name: String, age: u32, email: String) -> Self {
|
||||
User { name, age, email }
|
||||
}
|
||||
fn print_info(&self) {
|
||||
println!("Name: {}, Age: {}, Email: {}", self.name, self.age, self.email);
|
||||
println!("Books by {}:", author);
|
||||
for book in books {
|
||||
println!("ID: {}, Title: {}", book.id, book.title);
|
||||
}
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
<step>Update the main function
|
||||
This main function demonstrates:
|
||||
1. Creating a new Library instance
|
||||
2. Adding books with string IDs
|
||||
3. Finding and displaying books by a specific author</assistant>
|
||||
</turn>
|
||||
|
||||
<turn number="3">
|
||||
<user>Can you add a method to remove a book by ID and handle potential errors?</user>
|
||||
<assistant>Certainly! Let's add a method to remove a book by ID and handle potential errors:
|
||||
|
||||
<step>Add a remove_book method to the Library struct
|
||||
```rust
|
||||
use std::fmt::Display;
|
||||
use std::error::Error;
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Debug)]
|
||||
struct BookNotFoundError;
|
||||
|
||||
impl Error for BookNotFoundError {}
|
||||
|
||||
impl fmt::Display for BookNotFoundError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "Book not found")
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> Library<T>
|
||||
where
|
||||
T: Display + Clone + PartialEq
|
||||
{
|
||||
// ... (previous methods remain the same)
|
||||
|
||||
fn remove_book(&mut self, id: &T) -> Result<Book<T>, BookNotFoundError> {
|
||||
if let Some(index) = self.books.iter().position(|book| &book.id == id) {
|
||||
Ok(self.books.remove(index))
|
||||
} else {
|
||||
Err(BookNotFoundError)
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
</step>
|
||||
<step>Update the main function to demonstrate book removal
|
||||
```rust
|
||||
fn main() {
|
||||
let user = User::new(String::from("Alice"), 30, String::from("alice@example.com"));
|
||||
user.print_info();
|
||||
// ... (previous code remains the same)
|
||||
|
||||
// Attempt to remove a book
|
||||
let id_to_remove = String::from("002");
|
||||
match library.remove_book(&id_to_remove) {
|
||||
Ok(removed_book) => println!("Removed book: {} by {}", removed_book.title, removed_book.author),
|
||||
Err(e) => println!("Error removing book: {}", e),
|
||||
}
|
||||
|
||||
// Attempt to remove a non-existent book
|
||||
let non_existent_id = String::from("999");
|
||||
match library.remove_book(&non_existent_id) {
|
||||
Ok(_) => println!("Book removed successfully"),
|
||||
Err(e) => println!("Error removing book: {}", e),
|
||||
}
|
||||
|
||||
// Display remaining books
|
||||
println!("\nRemaining books:");
|
||||
for book in &library.books {
|
||||
println!("ID: {}, Title: {} by {}", book.id, book.title, book.author);
|
||||
}
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
This will now output: `Name: Alice, Age: 30, Email: alice@example.com`
|
||||
The changes include:
|
||||
1. Adding the `email` field to the User struct
|
||||
2. Updating the `new` method to accept an email parameter
|
||||
3. Modifying the `print_info` method to include the email
|
||||
4. Updating the main function to provide an email when creating a User instance
|
||||
This update includes:
|
||||
1. A new remove_book method that returns a Result type
|
||||
2. A custom BookNotFoundError for error handling
|
||||
3. Demonstration of successful book removal and error handling for non-existent books
|
||||
4. Display of remaining books after removal attempts
|
||||
|
||||
The remove_book method uses the PartialEq trait to compare book IDs, allowing for flexible ID types while maintaining type safety.</assistant>
|
||||
</turn>
|
||||
</example>
|
||||
|
||||
<example language="go">
|
||||
<turn number="1">
|
||||
<user>Create a Person struct with name, age, and email fields, and a method to greet. Include error handling for age.</user>
|
||||
<assistant>Certainly! Let's create a Person struct with the specified fields and a method to greet in Go:
|
||||
|
||||
<step>Define the Person struct and NewPerson function with error handling
|
||||
```go
|
||||
type Person struct {
|
||||
Name string
|
||||
Age int
|
||||
Email string
|
||||
}
|
||||
|
||||
func NewPerson(name string, age int, email string) (*Person, error) {
|
||||
if age < 0 || age > 150 {
|
||||
return nil, fmt.Errorf("invalid age: %d", age)
|
||||
}
|
||||
return &Person{Name: name, Age: age, Email: email}, nil
|
||||
}
|
||||
```
|
||||
</step>
|
||||
<step>Implement the Greet method for the Person struct
|
||||
```go
|
||||
func (p *Person) Greet() string {
|
||||
return fmt.Sprintf("Hello, I'm %s, %d years old. You can reach me at %s.", p.Name, p.Age, p.Email)
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
This code defines a Person struct, a NewPerson function with age validation, and a Greet method.</assistant>
|
||||
</turn>
|
||||
|
||||
<turn number="2">
|
||||
<user>How can I use these in a main function?</user>
|
||||
<assistant>Here's how to use the Person struct and its methods in a main function:
|
||||
|
||||
<step>Create a main function with error handling
|
||||
```go
|
||||
func main() {
|
||||
// Create a new Person instance
|
||||
person, err := NewPerson("Alice", 30, "alice@example.com")
|
||||
if err != nil {
|
||||
fmt.Println("Error creating person:", err)
|
||||
return
|
||||
}
|
||||
|
||||
// Use the Greet method
|
||||
fmt.Println(person.Greet())
|
||||
|
||||
// Demonstrate error handling with invalid age
|
||||
invalidPerson, err := NewPerson("Bob", 200, "bob@example.com")
|
||||
if err != nil {
|
||||
fmt.Println("Error creating person with invalid age:", err)
|
||||
}
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
This main function demonstrates:
|
||||
1. Creating a valid Person instance
|
||||
2. Using the Greet method
|
||||
3. Handling errors when creating a Person with an invalid age</assistant>
|
||||
</turn>
|
||||
|
||||
<turn number="3">
|
||||
<user>Can you add a method to update the email with validation?</user>
|
||||
<assistant>Certainly! Let's add a method to update the email with validation:
|
||||
|
||||
<step>Add UpdateEmail method with validation
|
||||
```go
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
)
|
||||
|
||||
func (p *Person) UpdateEmail(newEmail string) error {
|
||||
// Simple email validation using regex
|
||||
emailRegex := regexp.MustCompile(`^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$`)
|
||||
if !emailRegex.MatchString(newEmail) {
|
||||
return fmt.Errorf("invalid email format: %s", newEmail)
|
||||
}
|
||||
p.Email = newEmail
|
||||
return nil
|
||||
}
|
||||
```
|
||||
</step>
|
||||
<step>Update the main function to demonstrate email update
|
||||
```go
|
||||
func main() {
|
||||
// ... (previous code remains the same)
|
||||
|
||||
// Demonstrate email update
|
||||
err = person.UpdateEmail("alice.new@example.com")
|
||||
if err != nil {
|
||||
fmt.Println("Error updating email:", err)
|
||||
} else {
|
||||
fmt.Println("Updated greeting:", person.Greet())
|
||||
}
|
||||
|
||||
// Demonstrate email update with invalid email
|
||||
err = person.UpdateEmail("invalid-email")
|
||||
if err != nil {
|
||||
fmt.Println("Error updating email:", err)
|
||||
}
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
This update includes:
|
||||
1. An UpdateEmail method with email format validation
|
||||
2. Demonstration of successful email update in the main function
|
||||
3. Handling of invalid email update attempt</assistant>
|
||||
</turn>
|
||||
</example>
|
||||
</workflow>
|
||||
|
||||
@@ -1,22 +1,28 @@
|
||||
Your task is to map a step from the conversation above to suggestions on symbols inside the provided source files.
|
||||
<overview>
|
||||
Your task is to map a step from a workflow to locations in source code where code needs to be changed to fulfill that step.
|
||||
Given a workflow containing background context plus a series of <step> tags, you will resolve *one* of these step tags to resolve to one or more locations in the code.
|
||||
With each location, you will produce a brief, one-line description of the changes to be made.
|
||||
|
||||
Guidelines:
|
||||
<guidelines>
|
||||
- There's no need to describe *what* to do, just *where* to do it.
|
||||
- Only reference locations that actually exist (unless you're creating a file).
|
||||
- If creating a file, assume any subsequent updates are included at the time of creation.
|
||||
- Don't create and then update a file.
|
||||
- We'll create it in one shot.
|
||||
- Don't create and then update a file. Always create new files in shot.
|
||||
- Prefer updating symbols lower in the syntax tree if possible.
|
||||
- Never include suggestions on a parent symbol and one of its children in the same suggestions block.
|
||||
- Never nest an operation with another operation or include CDATA or other content. All suggestions are leaf nodes.
|
||||
- Include a description attribute for each operation with a brief, one-line description of the change to perform.
|
||||
- Descriptions are required for all suggestions except delete.
|
||||
- When generating multiple suggestions, ensure the descriptions are specific to each individual operation.
|
||||
- Avoid referring to the location in the description. Focus on the change to be made, not the location where it's made. That's implicit with the symbol you provide.
|
||||
- Don't generate multiple suggestions at the same location. Instead, combine them together in a single operation with a succinct combined description.
|
||||
- To add imports respond with a suggestion where the `"symbol"` key is set to `"#imports"`
|
||||
</guidelines>
|
||||
</overview>
|
||||
|
||||
Example 1:
|
||||
|
||||
User:
|
||||
<examples>
|
||||
<example>
|
||||
<workflow_context>
|
||||
<message role="user">
|
||||
```rs src/rectangle.rs
|
||||
struct Rectangle {
|
||||
width: f64,
|
||||
@@ -30,12 +36,21 @@ impl Rectangle {
|
||||
}
|
||||
```
|
||||
|
||||
We need to add methods to calculate the area and perimeter of the rectangle. Can you help with that?
|
||||
</message>
|
||||
<message role="assistant">
|
||||
Sure, I can help with that!
|
||||
|
||||
<step>Add new methods 'calculate_area' and 'calculate_perimeter' to the Rectangle struct</step>
|
||||
<step>Implement the 'Display' trait for the Rectangle struct</step>
|
||||
</message>
|
||||
</workflow_context>
|
||||
|
||||
What are the suggestions for the step: <step>Add a new method 'calculate_area' to the Rectangle struct</step>
|
||||
<step_to_resolve>
|
||||
Add new methods 'calculate_area' and 'calculate_perimeter' to the Rectangle struct
|
||||
</step_to_resolve>
|
||||
|
||||
A (wrong):
|
||||
<incorrect_output reason="NEVER append multiple children at the same location.">
|
||||
{
|
||||
"title": "Add Rectangle methods",
|
||||
"suggestions": [
|
||||
@@ -53,10 +68,9 @@ A (wrong):
|
||||
}
|
||||
]
|
||||
}
|
||||
</incorrect_output>
|
||||
|
||||
This demonstrates what NOT to do. NEVER append multiple children at the same location.
|
||||
|
||||
A (corrected):
|
||||
<correct_output>
|
||||
{
|
||||
"title": "Add Rectangle methods",
|
||||
"suggestions": [
|
||||
@@ -68,11 +82,13 @@ A (corrected):
|
||||
}
|
||||
]
|
||||
}
|
||||
</correct_output>
|
||||
|
||||
User:
|
||||
What are the suggestions for the step: <step>Implement the 'Display' trait for the Rectangle struct</step>
|
||||
<step_to_resolve>
|
||||
Implement the 'Display' trait for the Rectangle struct
|
||||
</step_to_resolve>
|
||||
|
||||
A:
|
||||
<output>
|
||||
{
|
||||
"title": "Implement Display for Rectangle",
|
||||
"suggestions": [
|
||||
@@ -84,10 +100,11 @@ A:
|
||||
}
|
||||
]
|
||||
}
|
||||
</output>
|
||||
|
||||
Example 2:
|
||||
|
||||
User:
|
||||
<example>
|
||||
<workflow_context>
|
||||
<message role="user">
|
||||
```rs src/user.rs
|
||||
struct User {
|
||||
pub name: String,
|
||||
@@ -105,13 +122,19 @@ impl User {
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</message>
|
||||
<message role="assistant">
|
||||
Certainly!
|
||||
<step>Update the 'print_info' method to use formatted output</step>
|
||||
<step>Remove the 'email' field from the User struct</step>
|
||||
</message>
|
||||
</workflow_context>
|
||||
|
||||
What are the suggestions for the step: <step>Update the 'print_info' method to use formatted output</step>
|
||||
<step_to_resolve>
|
||||
Update the 'print_info' method to use formatted output
|
||||
</step_to_resolve>
|
||||
|
||||
A:
|
||||
<output>
|
||||
{
|
||||
"title": "Use formatted output",
|
||||
"suggestions": [
|
||||
@@ -123,11 +146,13 @@ A:
|
||||
}
|
||||
]
|
||||
}
|
||||
</output>
|
||||
|
||||
User:
|
||||
What are the suggestions for the step: <step>Remove the 'email' field from the User struct</step>
|
||||
<step_to_resolve>
|
||||
Remove the 'email' field from the User struct
|
||||
</step_to_resolve>
|
||||
|
||||
A:
|
||||
<output>
|
||||
{
|
||||
"title": "Remove email field",
|
||||
"suggestions": [
|
||||
@@ -138,10 +163,12 @@ A:
|
||||
}
|
||||
]
|
||||
}
|
||||
</output>
|
||||
</example>
|
||||
|
||||
Example 3:
|
||||
|
||||
User:
|
||||
<example>
|
||||
<workflow_context>
|
||||
<message role="user">
|
||||
```rs src/vehicle.rs
|
||||
struct Vehicle {
|
||||
make: String,
|
||||
@@ -159,28 +186,36 @@ impl Vehicle {
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</message>
|
||||
<message role="assistant">
|
||||
<step>Add a 'use std::fmt;' statement at the beginning of the file</step>
|
||||
<step>Add a new method 'start_engine' in the Vehicle impl block</step>
|
||||
</message>
|
||||
</workflow_context>
|
||||
|
||||
What are the suggestions for the step: <step>Add a 'use std::fmt;' statement at the beginning of the file</step>
|
||||
<step_to_resolve>
|
||||
Add a 'use std::fmt;' statement at the beginning of the file
|
||||
</step_to_resolve>
|
||||
|
||||
A:
|
||||
<output>
|
||||
{
|
||||
"title": "Add use std::fmt statement",
|
||||
"suggestions": [
|
||||
{
|
||||
"kind": "PrependChild",
|
||||
"path": "src/vehicle.rs",
|
||||
"symbol": "#imports",
|
||||
"description": "Add 'use std::fmt' statement"
|
||||
}
|
||||
]
|
||||
}
|
||||
</output>
|
||||
|
||||
User:
|
||||
What are the suggestions for the step: <step>Add a new method 'start_engine' in the Vehicle impl block</step>
|
||||
<step_to_resolve>
|
||||
Add a new method 'start_engine' in the Vehicle impl block
|
||||
</step_to_resolve>
|
||||
|
||||
A:
|
||||
<output>
|
||||
{
|
||||
"title": "Add start_engine method",
|
||||
"suggestions": [
|
||||
@@ -192,10 +227,12 @@ A:
|
||||
}
|
||||
]
|
||||
}
|
||||
</output>
|
||||
</example>
|
||||
|
||||
Example 4:
|
||||
|
||||
User:
|
||||
<example>
|
||||
<workflow_context>
|
||||
<message role="user">
|
||||
```rs src/employee.rs
|
||||
struct Employee {
|
||||
name: String,
|
||||
@@ -219,12 +256,18 @@ impl Employee {
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</message>
|
||||
<message role="assistant">
|
||||
<step>Make salary an f32</step>
|
||||
<step>Remove the 'department' field and update the 'print_details' method</step>
|
||||
</message>
|
||||
</workflow_context>
|
||||
|
||||
What are the suggestions for the step: <step>Make salary an f32</step>
|
||||
<step_to_resolve>
|
||||
Make salary an f32
|
||||
</step_to_resolve>
|
||||
|
||||
A (wrong):
|
||||
<incorrect_output reason="NEVER include suggestions on a parent symbol and one of its children in the same suggestions block.">
|
||||
{
|
||||
"title": "Change salary to f32",
|
||||
"suggestions": [
|
||||
@@ -242,10 +285,9 @@ A (wrong):
|
||||
}
|
||||
]
|
||||
}
|
||||
</incorrect_output>
|
||||
|
||||
This example demonstrates what not to do. `struct Employee salary` is a child of `struct Employee`.
|
||||
|
||||
A (corrected):
|
||||
<correct_output>
|
||||
{
|
||||
"title": "Change salary to f32",
|
||||
"suggestions": [
|
||||
@@ -257,11 +299,13 @@ A (corrected):
|
||||
}
|
||||
]
|
||||
}
|
||||
</correct_output>
|
||||
|
||||
User:
|
||||
What are the correct suggestions for the step: <step>Remove the 'department' field and update the 'print_details' method</step>
|
||||
<step_to_resolve>
|
||||
Remove the 'department' field and update the 'print_details' method
|
||||
</step_to_resolve>
|
||||
|
||||
A:
|
||||
<output>
|
||||
{
|
||||
"title": "Remove department",
|
||||
"suggestions": [
|
||||
@@ -278,10 +322,12 @@ A:
|
||||
}
|
||||
]
|
||||
}
|
||||
</output>
|
||||
</example>
|
||||
|
||||
Example 5:
|
||||
|
||||
User:
|
||||
<example>
|
||||
<workflow_context>
|
||||
<message role="user">
|
||||
```rs src/game.rs
|
||||
struct Player {
|
||||
name: String,
|
||||
@@ -305,10 +351,17 @@ impl Game {
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</message>
|
||||
<message role="assistant">
|
||||
<step>Add a 'level' field to Player and update the 'new' method</step>
|
||||
</message>
|
||||
</workflow_context>
|
||||
|
||||
A:
|
||||
<step_to_resolve>
|
||||
Add a 'level' field to Player and update the 'new' method
|
||||
</step_to_resolve>
|
||||
|
||||
<output>
|
||||
{
|
||||
"title": "Add level field to Player",
|
||||
"suggestions": [
|
||||
@@ -326,10 +379,12 @@ A:
|
||||
}
|
||||
]
|
||||
}
|
||||
</output>
|
||||
</example>
|
||||
|
||||
Example 6:
|
||||
|
||||
User:
|
||||
<example>
|
||||
<workflow_context>
|
||||
<message role="user">
|
||||
```rs src/config.rs
|
||||
use std::collections::HashMap;
|
||||
|
||||
@@ -343,16 +398,24 @@ impl Config {
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</message>
|
||||
<message role="assistant">
|
||||
<step>Add a 'load_from_file' method to Config and import necessary modules</step>
|
||||
</message>
|
||||
</workflow_context>
|
||||
|
||||
A:
|
||||
<step_to_resolve>
|
||||
Add a 'load_from_file' method to Config and import necessary modules
|
||||
</step_to_resolve>
|
||||
|
||||
<output>
|
||||
{
|
||||
"title": "Add load_from_file method",
|
||||
"suggestions": [
|
||||
{
|
||||
"kind": "PrependChild",
|
||||
"path": "src/config.rs",
|
||||
"symbol": "#imports",
|
||||
"description": "Import std::fs and std::io modules"
|
||||
},
|
||||
{
|
||||
@@ -363,10 +426,12 @@ A:
|
||||
}
|
||||
]
|
||||
}
|
||||
</output>
|
||||
</example>
|
||||
|
||||
Example 7:
|
||||
|
||||
User:
|
||||
<example>
|
||||
<workflow_context>
|
||||
<message role="user">
|
||||
```rs src/database.rs
|
||||
pub(crate) struct Database {
|
||||
connection: Connection,
|
||||
@@ -383,10 +448,17 @@ impl Database {
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
</message>
|
||||
<message role="assistant">
|
||||
<step>Add error handling to the 'query' method and create a custom error type</step>
|
||||
</message>
|
||||
</workflow_context>
|
||||
|
||||
A:
|
||||
<step_to_resolve>
|
||||
Add error handling to the 'query' method and create a custom error type
|
||||
</step_to_resolve>
|
||||
|
||||
<output>
|
||||
{
|
||||
"title": "Add error handling to query",
|
||||
"suggestions": [
|
||||
@@ -409,5 +481,16 @@ A:
|
||||
}
|
||||
]
|
||||
}
|
||||
</output>
|
||||
</example>
|
||||
</examples>
|
||||
|
||||
Now generate the suggestions for the following step:
|
||||
|
||||
<workflow_context>
|
||||
{{{workflow_context}}}
|
||||
</workflow_context>
|
||||
|
||||
<step_to_resolve>
|
||||
{{{step_to_resolve}}}
|
||||
</step_to_resolve>
|
||||
|
||||
@@ -64,9 +64,15 @@
|
||||
"ui_font_weight": 400,
|
||||
// The default font size for text in the UI
|
||||
"ui_font_size": 16,
|
||||
// How much to fade out unused code.
|
||||
"unnecessary_code_fade": 0.3,
|
||||
// The factor to grow the active pane by. Defaults to 1.0
|
||||
// which gives the same size as all other panes.
|
||||
"active_pane_magnification": 1.0,
|
||||
// The direction that you want to split panes horizontally. Defaults to "up"
|
||||
"pane_split_direction_horizontal": "up",
|
||||
// The direction that you want to split panes horizontally. Defaults to "left"
|
||||
"pane_split_direction_vertical": "left",
|
||||
// Centered layout related settings.
|
||||
"centered_layout": {
|
||||
// The relative width of the left padding of the central pane from the
|
||||
@@ -395,9 +401,22 @@
|
||||
// The default model to use when creating new contexts.
|
||||
"default_model": {
|
||||
// The provider to use.
|
||||
"provider": "openai",
|
||||
"provider": "zed.dev",
|
||||
// The model to use.
|
||||
"model": "gpt-4o"
|
||||
"model": "claude-3-5-sonnet"
|
||||
}
|
||||
},
|
||||
// The settings for slash commands.
|
||||
"slash_commands": {
|
||||
// Settings for the `/docs` slash command.
|
||||
"docs": {
|
||||
// Whether `/docs` is enabled.
|
||||
"enabled": false
|
||||
},
|
||||
// Settings for the `/project` slash command.
|
||||
"project": {
|
||||
// Whether `/project` is enabled.
|
||||
"enabled": false
|
||||
}
|
||||
},
|
||||
// Whether the screen sharing icon is shown in the os status bar.
|
||||
@@ -491,6 +510,8 @@
|
||||
// "soft_wrap": "editor_width",
|
||||
// 4. Soft wrap lines at the preferred line length.
|
||||
// "soft_wrap": "preferred_line_length",
|
||||
// 5. Soft wrap lines at the preferred line length or the editor width (whichever is smaller).
|
||||
// "soft_wrap": "bounded",
|
||||
"soft_wrap": "prefer_line",
|
||||
// The column at which to soft-wrap lines, for buffers where soft-wrap
|
||||
// is enabled.
|
||||
@@ -823,6 +844,7 @@
|
||||
"language_servers": ["starpls", "!buck2-lsp", "..."]
|
||||
},
|
||||
"Svelte": {
|
||||
"language_servers": ["svelte-language-server", "..."],
|
||||
"prettier": {
|
||||
"allowed": true,
|
||||
"plugins": ["prettier-plugin-svelte"]
|
||||
@@ -846,6 +868,7 @@
|
||||
}
|
||||
},
|
||||
"Vue.js": {
|
||||
"language_servers": ["vue-language-server", "..."],
|
||||
"prettier": {
|
||||
"allowed": true
|
||||
}
|
||||
@@ -997,5 +1020,16 @@
|
||||
// ]
|
||||
// }
|
||||
// ]
|
||||
"ssh_connections": null
|
||||
"ssh_connections": null,
|
||||
// Configures the Context Server Protocol binaries
|
||||
//
|
||||
// Examples:
|
||||
// {
|
||||
// "id": "server-1",
|
||||
// "executable": "/path",
|
||||
// "args": ['arg1", "args2"]
|
||||
// }
|
||||
"experimental.context_servers": {
|
||||
"servers": []
|
||||
}
|
||||
}
|
||||
|
||||
26
compose.yml
@@ -33,5 +33,31 @@ services:
|
||||
volumes:
|
||||
- ./livekit.yaml:/livekit.yaml
|
||||
|
||||
postgrest_app:
|
||||
image: postgrest/postgrest
|
||||
container_name: postgrest_app
|
||||
ports:
|
||||
- 8081:8081
|
||||
environment:
|
||||
PGRST_DB_URI: postgres://postgres@postgres:5432/zed
|
||||
volumes:
|
||||
- ./crates/collab/postgrest_app.conf:/etc/postgrest.conf
|
||||
command: postgrest /etc/postgrest.conf
|
||||
depends_on:
|
||||
- postgres
|
||||
|
||||
postgrest_llm:
|
||||
image: postgrest/postgrest
|
||||
container_name: postgrest_llm
|
||||
ports:
|
||||
- 8082:8082
|
||||
environment:
|
||||
PGRST_DB_URI: postgres://postgres@postgres:5432/zed_llm
|
||||
volumes:
|
||||
- ./crates/collab/postgrest_llm.conf:/etc/postgrest.conf
|
||||
command: postgrest /etc/postgrest.conf
|
||||
depends_on:
|
||||
- postgres
|
||||
|
||||
volumes:
|
||||
postgres_data:
|
||||
|
||||
@@ -3,10 +3,9 @@ use editor::Editor;
|
||||
use extension::ExtensionStore;
|
||||
use futures::StreamExt;
|
||||
use gpui::{
|
||||
actions, anchored, deferred, percentage, Animation, AnimationExt as _, AppContext, CursorStyle,
|
||||
DismissEvent, EventEmitter, InteractiveElement as _, Model, ParentElement as _, Render,
|
||||
SharedString, StatefulInteractiveElement, Styled, Transformation, View, ViewContext,
|
||||
VisualContext as _,
|
||||
actions, percentage, Animation, AnimationExt as _, AppContext, CursorStyle, EventEmitter,
|
||||
InteractiveElement as _, Model, ParentElement as _, Render, SharedString,
|
||||
StatefulInteractiveElement, Styled, Transformation, View, ViewContext, VisualContext as _,
|
||||
};
|
||||
use language::{
|
||||
LanguageRegistry, LanguageServerBinaryStatus, LanguageServerId, LanguageServerName,
|
||||
@@ -14,7 +13,7 @@ use language::{
|
||||
use project::{LanguageServerProgress, Project};
|
||||
use smallvec::SmallVec;
|
||||
use std::{cmp::Reverse, fmt::Write, sync::Arc, time::Duration};
|
||||
use ui::{prelude::*, ContextMenu};
|
||||
use ui::{prelude::*, ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle};
|
||||
use workspace::{item::ItemHandle, StatusItemView, Workspace};
|
||||
|
||||
actions!(activity_indicator, [ShowErrorMessage]);
|
||||
@@ -27,7 +26,7 @@ pub struct ActivityIndicator {
|
||||
statuses: Vec<LspStatus>,
|
||||
project: Model<Project>,
|
||||
auto_updater: Option<Model<AutoUpdater>>,
|
||||
context_menu: Option<View<ContextMenu>>,
|
||||
context_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
}
|
||||
|
||||
struct LspStatus {
|
||||
@@ -79,7 +78,7 @@ impl ActivityIndicator {
|
||||
statuses: Default::default(),
|
||||
project: project.clone(),
|
||||
auto_updater,
|
||||
context_menu: None,
|
||||
context_menu_handle: Default::default(),
|
||||
}
|
||||
});
|
||||
|
||||
@@ -368,72 +367,7 @@ impl ActivityIndicator {
|
||||
}
|
||||
|
||||
fn toggle_language_server_work_context_menu(&mut self, cx: &mut ViewContext<Self>) {
|
||||
if self.context_menu.take().is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
self.build_lsp_work_context_menu(cx);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn build_lsp_work_context_menu(&mut self, cx: &mut ViewContext<Self>) {
|
||||
let mut has_work = false;
|
||||
let this = cx.view().downgrade();
|
||||
let context_menu = ContextMenu::build(cx, |mut menu, cx| {
|
||||
for work in self.pending_language_server_work(cx) {
|
||||
has_work = true;
|
||||
|
||||
let this = this.clone();
|
||||
let title = SharedString::from(
|
||||
work.progress
|
||||
.title
|
||||
.as_deref()
|
||||
.unwrap_or(work.progress_token)
|
||||
.to_string(),
|
||||
);
|
||||
if work.progress.is_cancellable {
|
||||
let language_server_id = work.language_server_id;
|
||||
let token = work.progress_token.to_string();
|
||||
menu = menu.custom_entry(
|
||||
move |_| {
|
||||
h_flex()
|
||||
.w_full()
|
||||
.justify_between()
|
||||
.child(Label::new(title.clone()))
|
||||
.child(Icon::new(IconName::XCircle))
|
||||
.into_any_element()
|
||||
},
|
||||
move |cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.project.update(cx, |project, cx| {
|
||||
project.cancel_language_server_work(
|
||||
language_server_id,
|
||||
Some(token.clone()),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
this.context_menu.take();
|
||||
})
|
||||
.ok();
|
||||
},
|
||||
);
|
||||
} else {
|
||||
menu = menu.label(title.clone());
|
||||
}
|
||||
}
|
||||
menu
|
||||
});
|
||||
|
||||
if has_work {
|
||||
cx.subscribe(&context_menu, |this, _, _: &DismissEvent, cx| {
|
||||
this.context_menu.take();
|
||||
cx.notify();
|
||||
})
|
||||
.detach();
|
||||
cx.focus_view(&context_menu);
|
||||
self.context_menu = Some(context_menu);
|
||||
cx.notify();
|
||||
}
|
||||
self.context_menu_handle.toggle(cx);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -455,19 +389,72 @@ impl Render for ActivityIndicator {
|
||||
on_click(this, cx);
|
||||
}))
|
||||
}
|
||||
|
||||
result
|
||||
.gap_2()
|
||||
.children(content.icon)
|
||||
.child(Label::new(SharedString::from(content.message)).size(LabelSize::Small))
|
||||
.children(self.context_menu.as_ref().map(|menu| {
|
||||
deferred(
|
||||
anchored()
|
||||
.anchor(gpui::AnchorCorner::BottomLeft)
|
||||
.child(menu.clone()),
|
||||
let this = cx.view().downgrade();
|
||||
result.gap_2().child(
|
||||
PopoverMenu::new("activity-indicator-popover")
|
||||
.trigger(
|
||||
ButtonLike::new("activity-indicator-trigger").child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.children(content.icon)
|
||||
.child(Label::new(content.message).size(LabelSize::Small)),
|
||||
),
|
||||
)
|
||||
.with_priority(1)
|
||||
}))
|
||||
.anchor(gpui::AnchorCorner::BottomLeft)
|
||||
.menu(move |cx| {
|
||||
let strong_this = this.upgrade()?;
|
||||
ContextMenu::build(cx, |mut menu, cx| {
|
||||
for work in strong_this.read(cx).pending_language_server_work(cx) {
|
||||
let this = this.clone();
|
||||
let mut title = work
|
||||
.progress
|
||||
.title
|
||||
.as_deref()
|
||||
.unwrap_or(work.progress_token)
|
||||
.to_owned();
|
||||
|
||||
if work.progress.is_cancellable {
|
||||
let language_server_id = work.language_server_id;
|
||||
let token = work.progress_token.to_string();
|
||||
let title = SharedString::from(title);
|
||||
menu = menu.custom_entry(
|
||||
move |_| {
|
||||
h_flex()
|
||||
.w_full()
|
||||
.justify_between()
|
||||
.child(Label::new(title.clone()))
|
||||
.child(Icon::new(IconName::XCircle))
|
||||
.into_any_element()
|
||||
},
|
||||
move |cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.project.update(cx, |project, cx| {
|
||||
project.cancel_language_server_work(
|
||||
language_server_id,
|
||||
Some(token.clone()),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
this.context_menu_handle.hide(cx);
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
},
|
||||
);
|
||||
} else {
|
||||
if let Some(progress_message) = work.progress.message.as_ref() {
|
||||
title.push_str(": ");
|
||||
title.push_str(progress_message);
|
||||
}
|
||||
|
||||
menu = menu.label(title);
|
||||
}
|
||||
}
|
||||
menu
|
||||
})
|
||||
.into()
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -17,6 +17,7 @@ path = "src/anthropic.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
chrono.workspace = true
|
||||
futures.workspace = true
|
||||
http_client.workspace = true
|
||||
isahc.workspace = true
|
||||
@@ -24,6 +25,8 @@ schemars = { workspace = true, optional = true }
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
strum.workspace = true
|
||||
thiserror.workspace = true
|
||||
util.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
tokio.workspace = true
|
||||
|
||||
@@ -1,35 +1,53 @@
|
||||
mod supported_countries;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use chrono::{DateTime, Utc};
|
||||
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt};
|
||||
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
|
||||
use isahc::config::Configurable;
|
||||
use isahc::http::{HeaderMap, HeaderValue};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::time::Duration;
|
||||
use strum::EnumIter;
|
||||
use std::{pin::Pin, str::FromStr};
|
||||
use strum::{EnumIter, EnumString};
|
||||
use thiserror::Error;
|
||||
use util::ResultExt as _;
|
||||
|
||||
pub use supported_countries::*;
|
||||
|
||||
pub const ANTHROPIC_API_URL: &'static str = "https://api.anthropic.com";
|
||||
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
|
||||
pub struct AnthropicModelCacheConfiguration {
|
||||
pub min_total_token: usize,
|
||||
pub should_speculate: bool,
|
||||
pub max_cache_anchors: usize,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
|
||||
pub enum Model {
|
||||
#[default]
|
||||
#[serde(alias = "claude-3-5-sonnet", rename = "claude-3-5-sonnet-20240620")]
|
||||
#[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-20240620")]
|
||||
Claude3_5Sonnet,
|
||||
#[serde(alias = "claude-3-opus", rename = "claude-3-opus-20240229")]
|
||||
#[serde(rename = "claude-3-opus", alias = "claude-3-opus-20240229")]
|
||||
Claude3Opus,
|
||||
#[serde(alias = "claude-3-sonnet", rename = "claude-3-sonnet-20240229")]
|
||||
#[serde(rename = "claude-3-sonnet", alias = "claude-3-sonnet-20240229")]
|
||||
Claude3Sonnet,
|
||||
#[serde(alias = "claude-3-haiku", rename = "claude-3-haiku-20240307")]
|
||||
#[serde(rename = "claude-3-haiku", alias = "claude-3-haiku-20240307")]
|
||||
Claude3Haiku,
|
||||
#[serde(rename = "custom")]
|
||||
Custom {
|
||||
name: String,
|
||||
max_tokens: usize,
|
||||
/// The name displayed in the UI, such as in the assistant panel model dropdown menu.
|
||||
display_name: Option<String>,
|
||||
/// Override this model with a different Anthropic model for tool calls.
|
||||
tool_override: Option<String>,
|
||||
/// Indicates whether this custom model supports caching.
|
||||
cache_configuration: Option<AnthropicModelCacheConfiguration>,
|
||||
max_output_tokens: Option<u32>,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -53,7 +71,7 @@ impl Model {
|
||||
Model::Claude3_5Sonnet => "claude-3-5-sonnet-20240620",
|
||||
Model::Claude3Opus => "claude-3-opus-20240229",
|
||||
Model::Claude3Sonnet => "claude-3-sonnet-20240229",
|
||||
Model::Claude3Haiku => "claude-3-opus-20240307",
|
||||
Model::Claude3Haiku => "claude-3-haiku-20240307",
|
||||
Self::Custom { name, .. } => name,
|
||||
}
|
||||
}
|
||||
@@ -64,7 +82,24 @@ impl Model {
|
||||
Self::Claude3Opus => "Claude 3 Opus",
|
||||
Self::Claude3Sonnet => "Claude 3 Sonnet",
|
||||
Self::Claude3Haiku => "Claude 3 Haiku",
|
||||
Self::Custom { name, .. } => name,
|
||||
Self::Custom {
|
||||
name, display_name, ..
|
||||
} => display_name.as_ref().unwrap_or(name),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cache_configuration(&self) -> Option<AnthropicModelCacheConfiguration> {
|
||||
match self {
|
||||
Self::Claude3_5Sonnet | Self::Claude3Haiku => Some(AnthropicModelCacheConfiguration {
|
||||
min_total_token: 2_048,
|
||||
should_speculate: true,
|
||||
max_cache_anchors: 4,
|
||||
}),
|
||||
Self::Custom {
|
||||
cache_configuration,
|
||||
..
|
||||
} => cache_configuration.clone(),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -78,6 +113,16 @@ impl Model {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn max_output_tokens(&self) -> u32 {
|
||||
match self {
|
||||
Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3Haiku => 4_096,
|
||||
Self::Claude3_5Sonnet => 8_192,
|
||||
Self::Custom {
|
||||
max_output_tokens, ..
|
||||
} => max_output_tokens.unwrap_or(4_096),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tool_model_id(&self) -> &str {
|
||||
if let Self::Custom {
|
||||
tool_override: Some(tool_override),
|
||||
@@ -96,34 +141,53 @@ pub async fn complete(
|
||||
api_url: &str,
|
||||
api_key: &str,
|
||||
request: Request,
|
||||
) -> Result<Response> {
|
||||
) -> Result<Response, AnthropicError> {
|
||||
let uri = format!("{api_url}/v1/messages");
|
||||
let request_builder = HttpRequest::builder()
|
||||
.method(Method::POST)
|
||||
.uri(uri)
|
||||
.header("Anthropic-Version", "2023-06-01")
|
||||
.header("Anthropic-Beta", "tools-2024-04-04")
|
||||
.header(
|
||||
"Anthropic-Beta",
|
||||
"tools-2024-04-04,prompt-caching-2024-07-31,max-tokens-3-5-sonnet-2024-07-15",
|
||||
)
|
||||
.header("X-Api-Key", api_key)
|
||||
.header("Content-Type", "application/json");
|
||||
|
||||
let serialized_request = serde_json::to_string(&request)?;
|
||||
let request = request_builder.body(AsyncBody::from(serialized_request))?;
|
||||
let serialized_request =
|
||||
serde_json::to_string(&request).context("failed to serialize request")?;
|
||||
let request = request_builder
|
||||
.body(AsyncBody::from(serialized_request))
|
||||
.context("failed to construct request body")?;
|
||||
|
||||
let mut response = client.send(request).await?;
|
||||
let mut response = client
|
||||
.send(request)
|
||||
.await
|
||||
.context("failed to send request to Anthropic")?;
|
||||
if response.status().is_success() {
|
||||
let mut body = Vec::new();
|
||||
response.body_mut().read_to_end(&mut body).await?;
|
||||
let response_message: Response = serde_json::from_slice(&body)?;
|
||||
response
|
||||
.body_mut()
|
||||
.read_to_end(&mut body)
|
||||
.await
|
||||
.context("failed to read response body")?;
|
||||
let response_message: Response =
|
||||
serde_json::from_slice(&body).context("failed to deserialize response body")?;
|
||||
Ok(response_message)
|
||||
} else {
|
||||
let mut body = Vec::new();
|
||||
response.body_mut().read_to_end(&mut body).await?;
|
||||
let body_str = std::str::from_utf8(&body)?;
|
||||
Err(anyhow!(
|
||||
response
|
||||
.body_mut()
|
||||
.read_to_end(&mut body)
|
||||
.await
|
||||
.context("failed to read response body")?;
|
||||
let body_str =
|
||||
std::str::from_utf8(&body).context("failed to parse response body as UTF-8")?;
|
||||
Err(AnthropicError::Other(anyhow!(
|
||||
"Failed to connect to API: {} {}",
|
||||
response.status(),
|
||||
body_str
|
||||
))
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -133,7 +197,67 @@ pub async fn stream_completion(
|
||||
api_key: &str,
|
||||
request: Request,
|
||||
low_speed_timeout: Option<Duration>,
|
||||
) -> Result<BoxStream<'static, Result<Event>>> {
|
||||
) -> Result<BoxStream<'static, Result<Event, AnthropicError>>, AnthropicError> {
|
||||
stream_completion_with_rate_limit_info(client, api_url, api_key, request, low_speed_timeout)
|
||||
.await
|
||||
.map(|output| output.0)
|
||||
}
|
||||
|
||||
/// https://docs.anthropic.com/en/api/rate-limits#response-headers
|
||||
#[derive(Debug)]
|
||||
pub struct RateLimitInfo {
|
||||
pub requests_limit: usize,
|
||||
pub requests_remaining: usize,
|
||||
pub requests_reset: DateTime<Utc>,
|
||||
pub tokens_limit: usize,
|
||||
pub tokens_remaining: usize,
|
||||
pub tokens_reset: DateTime<Utc>,
|
||||
}
|
||||
|
||||
impl RateLimitInfo {
|
||||
fn from_headers(headers: &HeaderMap<HeaderValue>) -> Result<Self> {
|
||||
let tokens_limit = get_header("anthropic-ratelimit-tokens-limit", headers)?.parse()?;
|
||||
let requests_limit = get_header("anthropic-ratelimit-requests-limit", headers)?.parse()?;
|
||||
let tokens_remaining =
|
||||
get_header("anthropic-ratelimit-tokens-remaining", headers)?.parse()?;
|
||||
let requests_remaining =
|
||||
get_header("anthropic-ratelimit-requests-remaining", headers)?.parse()?;
|
||||
let requests_reset = get_header("anthropic-ratelimit-requests-reset", headers)?;
|
||||
let tokens_reset = get_header("anthropic-ratelimit-tokens-reset", headers)?;
|
||||
let requests_reset = DateTime::parse_from_rfc3339(requests_reset)?.to_utc();
|
||||
let tokens_reset = DateTime::parse_from_rfc3339(tokens_reset)?.to_utc();
|
||||
|
||||
Ok(Self {
|
||||
requests_limit,
|
||||
tokens_limit,
|
||||
requests_remaining,
|
||||
tokens_remaining,
|
||||
requests_reset,
|
||||
tokens_reset,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn get_header<'a>(key: &str, headers: &'a HeaderMap) -> Result<&'a str, anyhow::Error> {
|
||||
Ok(headers
|
||||
.get(key)
|
||||
.ok_or_else(|| anyhow!("missing header `{key}`"))?
|
||||
.to_str()?)
|
||||
}
|
||||
|
||||
pub async fn stream_completion_with_rate_limit_info(
|
||||
client: &dyn HttpClient,
|
||||
api_url: &str,
|
||||
api_key: &str,
|
||||
request: Request,
|
||||
low_speed_timeout: Option<Duration>,
|
||||
) -> Result<
|
||||
(
|
||||
BoxStream<'static, Result<Event, AnthropicError>>,
|
||||
Option<RateLimitInfo>,
|
||||
),
|
||||
AnthropicError,
|
||||
> {
|
||||
let request = StreamingRequest {
|
||||
base: request,
|
||||
stream: true,
|
||||
@@ -143,19 +267,29 @@ pub async fn stream_completion(
|
||||
.method(Method::POST)
|
||||
.uri(uri)
|
||||
.header("Anthropic-Version", "2023-06-01")
|
||||
.header("Anthropic-Beta", "tools-2024-04-04")
|
||||
.header(
|
||||
"Anthropic-Beta",
|
||||
"tools-2024-04-04,prompt-caching-2024-07-31,max-tokens-3-5-sonnet-2024-07-15",
|
||||
)
|
||||
.header("X-Api-Key", api_key)
|
||||
.header("Content-Type", "application/json");
|
||||
if let Some(low_speed_timeout) = low_speed_timeout {
|
||||
request_builder = request_builder.low_speed_timeout(100, low_speed_timeout);
|
||||
}
|
||||
let serialized_request = serde_json::to_string(&request)?;
|
||||
let request = request_builder.body(AsyncBody::from(serialized_request))?;
|
||||
let serialized_request =
|
||||
serde_json::to_string(&request).context("failed to serialize request")?;
|
||||
let request = request_builder
|
||||
.body(AsyncBody::from(serialized_request))
|
||||
.context("failed to construct request body")?;
|
||||
|
||||
let mut response = client.send(request).await?;
|
||||
let mut response = client
|
||||
.send(request)
|
||||
.await
|
||||
.context("failed to send request to Anthropic")?;
|
||||
if response.status().is_success() {
|
||||
let rate_limits = RateLimitInfo::from_headers(response.headers());
|
||||
let reader = BufReader::new(response.into_body());
|
||||
Ok(reader
|
||||
let stream = reader
|
||||
.lines()
|
||||
.filter_map(|line| async move {
|
||||
match line {
|
||||
@@ -163,48 +297,54 @@ pub async fn stream_completion(
|
||||
let line = line.strip_prefix("data: ")?;
|
||||
match serde_json::from_str(line) {
|
||||
Ok(response) => Some(Ok(response)),
|
||||
Err(error) => Some(Err(anyhow!(error))),
|
||||
Err(error) => Some(Err(AnthropicError::Other(anyhow!(error)))),
|
||||
}
|
||||
}
|
||||
Err(error) => Some(Err(anyhow!(error))),
|
||||
Err(error) => Some(Err(AnthropicError::Other(anyhow!(error)))),
|
||||
}
|
||||
})
|
||||
.boxed())
|
||||
.boxed();
|
||||
Ok((stream, rate_limits.log_err()))
|
||||
} else {
|
||||
let mut body = Vec::new();
|
||||
response.body_mut().read_to_end(&mut body).await?;
|
||||
response
|
||||
.body_mut()
|
||||
.read_to_end(&mut body)
|
||||
.await
|
||||
.context("failed to read response body")?;
|
||||
|
||||
let body_str = std::str::from_utf8(&body)?;
|
||||
let body_str =
|
||||
std::str::from_utf8(&body).context("failed to parse response body as UTF-8")?;
|
||||
|
||||
match serde_json::from_str::<Event>(body_str) {
|
||||
Ok(Event::Error { error }) => Err(api_error_to_err(error)),
|
||||
Ok(_) => Err(anyhow!(
|
||||
Ok(Event::Error { error }) => Err(AnthropicError::ApiError(error)),
|
||||
Ok(_) => Err(AnthropicError::Other(anyhow!(
|
||||
"Unexpected success response while expecting an error: '{body_str}'",
|
||||
)),
|
||||
Err(_) => Err(anyhow!(
|
||||
))),
|
||||
Err(_) => Err(AnthropicError::Other(anyhow!(
|
||||
"Failed to connect to API: {} {}",
|
||||
response.status(),
|
||||
body_str,
|
||||
)),
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_text_from_events(
|
||||
response: impl Stream<Item = Result<Event>>,
|
||||
) -> impl Stream<Item = Result<String>> {
|
||||
response: impl Stream<Item = Result<Event, AnthropicError>>,
|
||||
) -> impl Stream<Item = Result<String, AnthropicError>> {
|
||||
response.filter_map(|response| async move {
|
||||
match response {
|
||||
Ok(response) => match response {
|
||||
Event::ContentBlockStart { content_block, .. } => match content_block {
|
||||
Content::Text { text } => Some(Ok(text)),
|
||||
Content::Text { text, .. } => Some(Ok(text)),
|
||||
_ => None,
|
||||
},
|
||||
Event::ContentBlockDelta { delta, .. } => match delta {
|
||||
ContentDelta::TextDelta { text } => Some(Ok(text)),
|
||||
_ => None,
|
||||
},
|
||||
Event::Error { error } => Some(Err(api_error_to_err(error))),
|
||||
Event::Error { error } => Some(Err(AnthropicError::ApiError(error))),
|
||||
_ => None,
|
||||
},
|
||||
Err(error) => Some(Err(error)),
|
||||
@@ -212,13 +352,60 @@ pub fn extract_text_from_events(
|
||||
})
|
||||
}
|
||||
|
||||
fn api_error_to_err(
|
||||
ApiError {
|
||||
error_type,
|
||||
message,
|
||||
}: ApiError,
|
||||
) -> anyhow::Error {
|
||||
anyhow!("API error. Type: '{error_type}', message: '{message}'",)
|
||||
pub async fn extract_tool_args_from_events(
|
||||
tool_name: String,
|
||||
mut events: Pin<Box<dyn Send + Stream<Item = Result<Event>>>>,
|
||||
) -> Result<impl Send + Stream<Item = Result<String>>> {
|
||||
let mut tool_use_index = None;
|
||||
while let Some(event) = events.next().await {
|
||||
if let Event::ContentBlockStart {
|
||||
index,
|
||||
content_block,
|
||||
} = event?
|
||||
{
|
||||
if let Content::ToolUse { name, .. } = content_block {
|
||||
if name == tool_name {
|
||||
tool_use_index = Some(index);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let Some(tool_use_index) = tool_use_index else {
|
||||
return Err(anyhow!("tool not used"));
|
||||
};
|
||||
|
||||
Ok(events.filter_map(move |event| {
|
||||
let result = match event {
|
||||
Err(error) => Some(Err(error)),
|
||||
Ok(Event::ContentBlockDelta { index, delta }) => match delta {
|
||||
ContentDelta::TextDelta { .. } => None,
|
||||
ContentDelta::InputJsonDelta { partial_json } => {
|
||||
if index == tool_use_index {
|
||||
Some(Ok(partial_json))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
},
|
||||
_ => None,
|
||||
};
|
||||
|
||||
async move { result }
|
||||
}))
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Copy, Clone)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum CacheControlType {
|
||||
Ephemeral,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Copy, Clone)]
|
||||
pub struct CacheControl {
|
||||
#[serde(rename = "type")]
|
||||
pub cache_type: CacheControlType,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
@@ -227,7 +414,7 @@ pub struct Message {
|
||||
pub content: Vec<Content>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize, Deserialize, Eq, PartialEq, Hash)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum Role {
|
||||
User,
|
||||
@@ -238,19 +425,31 @@ pub enum Role {
|
||||
#[serde(tag = "type")]
|
||||
pub enum Content {
|
||||
#[serde(rename = "text")]
|
||||
Text { text: String },
|
||||
Text {
|
||||
text: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
cache_control: Option<CacheControl>,
|
||||
},
|
||||
#[serde(rename = "image")]
|
||||
Image { source: ImageSource },
|
||||
Image {
|
||||
source: ImageSource,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
cache_control: Option<CacheControl>,
|
||||
},
|
||||
#[serde(rename = "tool_use")]
|
||||
ToolUse {
|
||||
id: String,
|
||||
name: String,
|
||||
input: serde_json::Value,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
cache_control: Option<CacheControl>,
|
||||
},
|
||||
#[serde(rename = "tool_result")]
|
||||
ToolResult {
|
||||
tool_use_id: String,
|
||||
content: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
cache_control: Option<CacheControl>,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -374,9 +573,53 @@ pub struct MessageDelta {
|
||||
pub stop_sequence: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum AnthropicError {
|
||||
#[error("an error occurred while interacting with the Anthropic API: {error_type}: {message}", error_type = .0.error_type, message = .0.message)]
|
||||
ApiError(ApiError),
|
||||
#[error("{0}")]
|
||||
Other(#[from] anyhow::Error),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ApiError {
|
||||
#[serde(rename = "type")]
|
||||
pub error_type: String,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
/// An Anthropic API error code.
|
||||
/// https://docs.anthropic.com/en/api/errors#http-errors
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, EnumString)]
|
||||
#[strum(serialize_all = "snake_case")]
|
||||
pub enum ApiErrorCode {
|
||||
/// 400 - `invalid_request_error`: There was an issue with the format or content of your request.
|
||||
InvalidRequestError,
|
||||
/// 401 - `authentication_error`: There's an issue with your API key.
|
||||
AuthenticationError,
|
||||
/// 403 - `permission_error`: Your API key does not have permission to use the specified resource.
|
||||
PermissionError,
|
||||
/// 404 - `not_found_error`: The requested resource was not found.
|
||||
NotFoundError,
|
||||
/// 413 - `request_too_large`: Request exceeds the maximum allowed number of bytes.
|
||||
RequestTooLarge,
|
||||
/// 429 - `rate_limit_error`: Your account has hit a rate limit.
|
||||
RateLimitError,
|
||||
/// 500 - `api_error`: An unexpected error has occurred internal to Anthropic's systems.
|
||||
ApiError,
|
||||
/// 529 - `overloaded_error`: Anthropic's API is temporarily overloaded.
|
||||
OverloadedError,
|
||||
}
|
||||
|
||||
impl ApiError {
|
||||
pub fn code(&self) -> Option<ApiErrorCode> {
|
||||
ApiErrorCode::from_str(&self.error_type).ok()
|
||||
}
|
||||
|
||||
pub fn is_rate_limit_error(&self) -> bool {
|
||||
match self.error_type.as_str() {
|
||||
"rate_limit_error" => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -33,11 +33,13 @@ clock.workspace = true
|
||||
collections.workspace = true
|
||||
command_palette_hooks.workspace = true
|
||||
db.workspace = true
|
||||
context_servers.workspace = true
|
||||
editor.workspace = true
|
||||
feature_flags.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
fuzzy.workspace = true
|
||||
globset.workspace = true
|
||||
gpui.workspace = true
|
||||
handlebars.workspace = true
|
||||
heed.workspace = true
|
||||
@@ -66,6 +68,8 @@ semantic_index.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
smallvec.workspace = true
|
||||
similar.workspace = true
|
||||
smol.workspace = true
|
||||
telemetry_events.workspace = true
|
||||
terminal.workspace = true
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
#![cfg_attr(target_os = "windows", allow(unused, dead_code))]
|
||||
|
||||
pub mod assistant_panel;
|
||||
pub mod assistant_settings;
|
||||
mod context;
|
||||
@@ -7,8 +9,11 @@ mod model_selector;
|
||||
mod prompt_library;
|
||||
mod prompts;
|
||||
mod slash_command;
|
||||
pub(crate) mod slash_command_picker;
|
||||
pub mod slash_command_settings;
|
||||
mod streaming_diff;
|
||||
mod terminal_inline_assistant;
|
||||
mod workflow;
|
||||
|
||||
pub use assistant_panel::{AssistantPanel, AssistantPanelEvent};
|
||||
use assistant_settings::AssistantSettings;
|
||||
@@ -16,8 +21,11 @@ use assistant_slash_command::SlashCommandRegistry;
|
||||
use client::{proto, Client};
|
||||
use command_palette_hooks::CommandPaletteFilter;
|
||||
pub use context::*;
|
||||
use context_servers::ContextServerRegistry;
|
||||
pub use context_store::*;
|
||||
use feature_flags::FeatureFlagAppExt;
|
||||
use fs::Fs;
|
||||
use gpui::Context as _;
|
||||
use gpui::{actions, impl_actions, AppContext, Global, SharedString, UpdateGlobal};
|
||||
use indexed_docs::IndexedDocsRegistry;
|
||||
pub(crate) use inline_assistant::*;
|
||||
@@ -26,17 +34,21 @@ use language_model::{
|
||||
};
|
||||
pub(crate) use model_selector::*;
|
||||
pub use prompts::PromptBuilder;
|
||||
use prompts::PromptLoadingParams;
|
||||
use semantic_index::{CloudEmbeddingProvider, SemanticIndex};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{update_settings_file, Settings, SettingsStore};
|
||||
use slash_command::{
|
||||
active_command, default_command, diagnostics_command, docs_command, fetch_command,
|
||||
context_server_command, default_command, diagnostics_command, docs_command, fetch_command,
|
||||
file_command, now_command, project_command, prompt_command, search_command, symbols_command,
|
||||
tabs_command, term_command, workflow_command,
|
||||
tab_command, terminal_command, workflow_command,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
pub(crate) use streaming_diff::*;
|
||||
use util::ResultExt;
|
||||
pub use workflow::*;
|
||||
|
||||
use crate::slash_command_settings::SlashCommandSettings;
|
||||
|
||||
actions!(
|
||||
assistant,
|
||||
@@ -48,16 +60,14 @@ actions!(
|
||||
InsertIntoEditor,
|
||||
ToggleFocus,
|
||||
InsertActivePrompt,
|
||||
ShowConfiguration,
|
||||
DeployHistory,
|
||||
DeployPromptLibrary,
|
||||
ConfirmCommand,
|
||||
ToggleModelSelector,
|
||||
DebugEditSteps
|
||||
]
|
||||
);
|
||||
|
||||
const DEFAULT_CONTEXT_LINES: usize = 20;
|
||||
const DEFAULT_CONTEXT_LINES: usize = 50;
|
||||
|
||||
#[derive(Clone, Default, Deserialize, PartialEq)]
|
||||
pub struct InlineAssist {
|
||||
@@ -94,6 +104,7 @@ pub enum MessageStatus {
|
||||
Pending,
|
||||
Done,
|
||||
Error(SharedString),
|
||||
Canceled,
|
||||
}
|
||||
|
||||
impl MessageStatus {
|
||||
@@ -104,6 +115,7 @@ impl MessageStatus {
|
||||
Some(proto::context_message_status::Variant::Error(error)) => {
|
||||
MessageStatus::Error(error.message.into())
|
||||
}
|
||||
Some(proto::context_message_status::Variant::Canceled(_)) => MessageStatus::Canceled,
|
||||
None => MessageStatus::Pending,
|
||||
}
|
||||
}
|
||||
@@ -127,6 +139,11 @@ impl MessageStatus {
|
||||
},
|
||||
)),
|
||||
},
|
||||
MessageStatus::Canceled => proto::ContextMessageStatus {
|
||||
variant: Some(proto::context_message_status::Variant::Canceled(
|
||||
proto::context_message_status::Canceled {},
|
||||
)),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -164,9 +181,15 @@ impl Assistant {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(fs: Arc<dyn Fs>, client: Arc<Client>, cx: &mut AppContext) -> Arc<PromptBuilder> {
|
||||
pub fn init(
|
||||
fs: Arc<dyn Fs>,
|
||||
client: Arc<Client>,
|
||||
stdout_is_a_pty: bool,
|
||||
cx: &mut AppContext,
|
||||
) -> Arc<PromptBuilder> {
|
||||
cx.set_global(Assistant::default());
|
||||
AssistantSettings::register(cx);
|
||||
SlashCommandSettings::register(cx);
|
||||
|
||||
// TODO: remove this when 0.148.0 is released.
|
||||
if AssistantSettings::get_global(cx).using_outdated_settings_version {
|
||||
@@ -198,11 +221,18 @@ pub fn init(fs: Arc<dyn Fs>, client: Arc<Client>, cx: &mut AppContext) -> Arc<Pr
|
||||
init_language_model_settings(cx);
|
||||
assistant_slash_command::init(cx);
|
||||
assistant_panel::init(cx);
|
||||
context_servers::init(cx);
|
||||
|
||||
let prompt_builder = prompts::PromptBuilder::new(Some((fs.clone(), cx)))
|
||||
.log_err()
|
||||
.map(Arc::new)
|
||||
.unwrap_or_else(|| Arc::new(prompts::PromptBuilder::new(None).unwrap()));
|
||||
let prompt_builder = prompts::PromptBuilder::new(Some(PromptLoadingParams {
|
||||
fs: fs.clone(),
|
||||
repo_path: stdout_is_a_pty
|
||||
.then(|| std::env::current_dir().log_err())
|
||||
.flatten(),
|
||||
cx,
|
||||
}))
|
||||
.log_err()
|
||||
.map(Arc::new)
|
||||
.unwrap_or_else(|| Arc::new(prompts::PromptBuilder::new(None).unwrap()));
|
||||
register_slash_commands(Some(prompt_builder.clone()), cx);
|
||||
inline_assistant::init(
|
||||
fs.clone(),
|
||||
@@ -234,9 +264,69 @@ pub fn init(fs: Arc<dyn Fs>, client: Arc<Client>, cx: &mut AppContext) -> Arc<Pr
|
||||
})
|
||||
.detach();
|
||||
|
||||
register_context_server_handlers(cx);
|
||||
|
||||
prompt_builder
|
||||
}
|
||||
|
||||
fn register_context_server_handlers(cx: &mut AppContext) {
|
||||
cx.subscribe(
|
||||
&context_servers::manager::ContextServerManager::global(cx),
|
||||
|manager, event, cx| match event {
|
||||
context_servers::manager::Event::ServerStarted { server_id } => {
|
||||
cx.update_model(
|
||||
&manager,
|
||||
|manager: &mut context_servers::manager::ContextServerManager, cx| {
|
||||
let slash_command_registry = SlashCommandRegistry::global(cx);
|
||||
let context_server_registry = ContextServerRegistry::global(cx);
|
||||
if let Some(server) = manager.get_server(server_id) {
|
||||
cx.spawn(|_, _| async move {
|
||||
let Some(protocol) = server.client.read().clone() else {
|
||||
return;
|
||||
};
|
||||
|
||||
if let Some(prompts) = protocol.list_prompts().await.log_err() {
|
||||
for prompt in prompts
|
||||
.into_iter()
|
||||
.filter(context_server_command::acceptable_prompt)
|
||||
{
|
||||
log::info!(
|
||||
"registering context server command: {:?}",
|
||||
prompt.name
|
||||
);
|
||||
context_server_registry.register_command(
|
||||
server.id.clone(),
|
||||
prompt.name.as_str(),
|
||||
);
|
||||
slash_command_registry.register_command(
|
||||
context_server_command::ContextServerSlashCommand::new(
|
||||
&server, prompt,
|
||||
),
|
||||
true,
|
||||
);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
context_servers::manager::Event::ServerStopped { server_id } => {
|
||||
let slash_command_registry = SlashCommandRegistry::global(cx);
|
||||
let context_server_registry = ContextServerRegistry::global(cx);
|
||||
if let Some(commands) = context_server_registry.get_commands(server_id) {
|
||||
for command_name in commands {
|
||||
slash_command_registry.unregister_command_by_name(&command_name);
|
||||
context_server_registry.unregister_command(&server_id, &command_name);
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn init_language_model_settings(cx: &mut AppContext) {
|
||||
update_active_language_model_from_settings(cx);
|
||||
|
||||
@@ -268,17 +358,15 @@ fn update_active_language_model_from_settings(cx: &mut AppContext) {
|
||||
fn register_slash_commands(prompt_builder: Option<Arc<PromptBuilder>>, cx: &mut AppContext) {
|
||||
let slash_command_registry = SlashCommandRegistry::global(cx);
|
||||
slash_command_registry.register_command(file_command::FileSlashCommand, true);
|
||||
slash_command_registry.register_command(active_command::ActiveSlashCommand, true);
|
||||
slash_command_registry.register_command(symbols_command::OutlineSlashCommand, true);
|
||||
slash_command_registry.register_command(tabs_command::TabsSlashCommand, true);
|
||||
slash_command_registry.register_command(tab_command::TabSlashCommand, true);
|
||||
slash_command_registry.register_command(project_command::ProjectSlashCommand, true);
|
||||
slash_command_registry.register_command(search_command::SearchSlashCommand, true);
|
||||
slash_command_registry.register_command(prompt_command::PromptSlashCommand, true);
|
||||
slash_command_registry.register_command(default_command::DefaultSlashCommand, true);
|
||||
slash_command_registry.register_command(term_command::TermSlashCommand, true);
|
||||
slash_command_registry.register_command(now_command::NowSlashCommand, true);
|
||||
slash_command_registry.register_command(default_command::DefaultSlashCommand, false);
|
||||
slash_command_registry.register_command(terminal_command::TerminalSlashCommand, true);
|
||||
slash_command_registry.register_command(now_command::NowSlashCommand, false);
|
||||
slash_command_registry.register_command(diagnostics_command::DiagnosticsSlashCommand, true);
|
||||
slash_command_registry.register_command(docs_command::DocsSlashCommand, true);
|
||||
|
||||
if let Some(prompt_builder) = prompt_builder {
|
||||
slash_command_registry.register_command(
|
||||
workflow_command::WorkflowSlashCommand::new(prompt_builder),
|
||||
@@ -286,6 +374,37 @@ fn register_slash_commands(prompt_builder: Option<Arc<PromptBuilder>>, cx: &mut
|
||||
);
|
||||
}
|
||||
slash_command_registry.register_command(fetch_command::FetchSlashCommand, false);
|
||||
|
||||
update_slash_commands_from_settings(cx);
|
||||
cx.observe_global::<SettingsStore>(update_slash_commands_from_settings)
|
||||
.detach();
|
||||
|
||||
cx.observe_flag::<search_command::SearchSlashCommandFeatureFlag, _>({
|
||||
let slash_command_registry = slash_command_registry.clone();
|
||||
move |is_enabled, _cx| {
|
||||
if is_enabled {
|
||||
slash_command_registry.register_command(search_command::SearchSlashCommand, true);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn update_slash_commands_from_settings(cx: &mut AppContext) {
|
||||
let slash_command_registry = SlashCommandRegistry::global(cx);
|
||||
let settings = SlashCommandSettings::get_global(cx);
|
||||
|
||||
if settings.docs.enabled {
|
||||
slash_command_registry.register_command(docs_command::DocsSlashCommand, true);
|
||||
} else {
|
||||
slash_command_registry.unregister_command(docs_command::DocsSlashCommand);
|
||||
}
|
||||
|
||||
if settings.project.enabled {
|
||||
slash_command_registry.register_command(project_command::ProjectSlashCommand, true);
|
||||
} else {
|
||||
slash_command_registry.unregister_command(project_command::ProjectSlashCommand);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn humanize_token_count(count: usize) -> String {
|
||||
|
||||
@@ -153,8 +153,8 @@ impl AssistantSettingsContent {
|
||||
models
|
||||
.into_iter()
|
||||
.filter_map(|model| match model {
|
||||
open_ai::Model::Custom { name, max_tokens } => {
|
||||
Some(language_model::provider::open_ai::AvailableModel { name, max_tokens })
|
||||
open_ai::Model::Custom { name, max_tokens,max_output_tokens } => {
|
||||
Some(language_model::provider::open_ai::AvailableModel { name, max_tokens,max_output_tokens })
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
@@ -543,8 +543,8 @@ mod tests {
|
||||
assert_eq!(
|
||||
AssistantSettings::get_global(cx).default_model,
|
||||
LanguageModelSelection {
|
||||
provider: "openai".into(),
|
||||
model: "gpt-4o".into(),
|
||||
provider: "zed.dev".into(),
|
||||
model: "claude-3-5-sonnet".into(),
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
1222
crates/assistant/src/context/context_tests.rs
Normal file
@@ -161,7 +161,7 @@ impl ContextStore {
|
||||
) -> Result<proto::OpenContextResponse> {
|
||||
let context_id = ContextId::from_proto(envelope.payload.context_id);
|
||||
let operations = this.update(&mut cx, |this, cx| {
|
||||
if this.project.read(cx).is_remote() {
|
||||
if this.project.read(cx).is_via_collab() {
|
||||
return Err(anyhow!("only the host contexts can be opened"));
|
||||
}
|
||||
|
||||
@@ -190,7 +190,7 @@ impl ContextStore {
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<proto::CreateContextResponse> {
|
||||
let (context_id, operations) = this.update(&mut cx, |this, cx| {
|
||||
if this.project.read(cx).is_remote() {
|
||||
if this.project.read(cx).is_via_collab() {
|
||||
return Err(anyhow!("can only create contexts as the host"));
|
||||
}
|
||||
|
||||
@@ -234,7 +234,7 @@ impl ContextStore {
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<proto::SynchronizeContextsResponse> {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
if this.project.read(cx).is_remote() {
|
||||
if this.project.read(cx).is_via_collab() {
|
||||
return Err(anyhow!("only the host can synchronize contexts"));
|
||||
}
|
||||
|
||||
@@ -356,7 +356,7 @@ impl ContextStore {
|
||||
let Some(project_id) = project.remote_id() else {
|
||||
return Task::ready(Err(anyhow!("project was not remote")));
|
||||
};
|
||||
if project.is_local() {
|
||||
if project.is_local_or_ssh() {
|
||||
return Task::ready(Err(anyhow!("cannot create remote contexts as the host")));
|
||||
}
|
||||
|
||||
@@ -487,7 +487,7 @@ impl ContextStore {
|
||||
let Some(project_id) = project.remote_id() else {
|
||||
return Task::ready(Err(anyhow!("project was not remote")));
|
||||
};
|
||||
if project.is_local() {
|
||||
if project.is_local_or_ssh() {
|
||||
return Task::ready(Err(anyhow!("cannot open remote contexts as the host")));
|
||||
}
|
||||
|
||||
@@ -589,7 +589,7 @@ impl ContextStore {
|
||||
};
|
||||
|
||||
// For now, only the host can advertise their open contexts.
|
||||
if self.project.read(cx).is_remote() {
|
||||
if self.project.read(cx).is_via_collab() {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@ use fs::Fs;
|
||||
use futures::{
|
||||
channel::mpsc,
|
||||
future::{BoxFuture, LocalBoxFuture},
|
||||
join,
|
||||
stream::{self, BoxStream},
|
||||
SinkExt, Stream, StreamExt,
|
||||
};
|
||||
@@ -58,6 +59,13 @@ pub fn init(
|
||||
cx: &mut AppContext,
|
||||
) {
|
||||
cx.set_global(InlineAssistant::new(fs, prompt_builder, telemetry));
|
||||
cx.observe_new_views(|_, cx| {
|
||||
let workspace = cx.view().clone();
|
||||
InlineAssistant::update_global(cx, |inline_assistant, cx| {
|
||||
inline_assistant.register_workspace(&workspace, cx)
|
||||
})
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
const PROMPT_HISTORY_MAX_LEN: usize = 20;
|
||||
@@ -68,12 +76,33 @@ pub struct InlineAssistant {
|
||||
assists: HashMap<InlineAssistId, InlineAssist>,
|
||||
assists_by_editor: HashMap<WeakView<Editor>, EditorInlineAssists>,
|
||||
assist_groups: HashMap<InlineAssistGroupId, InlineAssistGroup>,
|
||||
assist_observations: HashMap<
|
||||
InlineAssistId,
|
||||
(
|
||||
async_watch::Sender<AssistStatus>,
|
||||
async_watch::Receiver<AssistStatus>,
|
||||
),
|
||||
>,
|
||||
confirmed_assists: HashMap<InlineAssistId, Model<Codegen>>,
|
||||
prompt_history: VecDeque<String>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
telemetry: Option<Arc<Telemetry>>,
|
||||
fs: Arc<dyn Fs>,
|
||||
}
|
||||
|
||||
pub enum AssistStatus {
|
||||
Idle,
|
||||
Started,
|
||||
Stopped,
|
||||
Finished,
|
||||
}
|
||||
|
||||
impl AssistStatus {
|
||||
pub fn is_done(&self) -> bool {
|
||||
matches!(self, Self::Stopped | Self::Finished)
|
||||
}
|
||||
}
|
||||
|
||||
impl Global for InlineAssistant {}
|
||||
|
||||
impl InlineAssistant {
|
||||
@@ -88,6 +117,8 @@ impl InlineAssistant {
|
||||
assists: HashMap::default(),
|
||||
assists_by_editor: HashMap::default(),
|
||||
assist_groups: HashMap::default(),
|
||||
assist_observations: HashMap::default(),
|
||||
confirmed_assists: HashMap::default(),
|
||||
prompt_history: VecDeque::default(),
|
||||
prompt_builder,
|
||||
telemetry: Some(telemetry),
|
||||
@@ -95,6 +126,29 @@ impl InlineAssistant {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn register_workspace(&mut self, workspace: &View<Workspace>, cx: &mut WindowContext) {
|
||||
cx.subscribe(workspace, |_, event, cx| {
|
||||
Self::update_global(cx, |this, cx| this.handle_workspace_event(event, cx));
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn handle_workspace_event(&mut self, event: &workspace::Event, cx: &mut WindowContext) {
|
||||
// When the user manually saves an editor, automatically accepts all finished transformations.
|
||||
if let workspace::Event::UserSavedItem { item, .. } = event {
|
||||
if let Some(editor) = item.upgrade().and_then(|item| item.act_as::<Editor>(cx)) {
|
||||
if let Some(editor_assists) = self.assists_by_editor.get(&editor.downgrade()) {
|
||||
for assist_id in editor_assists.assist_ids.clone() {
|
||||
let assist = &self.assists[&assist_id];
|
||||
if let CodegenStatus::Done = &assist.codegen.read(cx).status {
|
||||
self.finish_assist(assist_id, false, cx)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn assist(
|
||||
&mut self,
|
||||
editor: &View<Editor>,
|
||||
@@ -343,6 +397,7 @@ impl InlineAssistant {
|
||||
height: prompt_editor_height,
|
||||
render: build_assist_editor_renderer(prompt_editor),
|
||||
disposition: BlockDisposition::Above,
|
||||
priority: 0,
|
||||
},
|
||||
BlockProperties {
|
||||
style: BlockStyle::Sticky,
|
||||
@@ -357,6 +412,7 @@ impl InlineAssistant {
|
||||
.into_any_element()
|
||||
}),
|
||||
disposition: BlockDisposition::Below,
|
||||
priority: 0,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -491,17 +547,43 @@ impl InlineAssistant {
|
||||
if editor.selections.count() == 1 {
|
||||
let selection = editor.selections.newest::<usize>(cx);
|
||||
let buffer = editor.buffer().read(cx).snapshot(cx);
|
||||
let mut closest_assist_fallback = None;
|
||||
for assist_id in &editor_assists.assist_ids {
|
||||
let assist = &self.assists[assist_id];
|
||||
let assist_range = assist.range.to_offset(&buffer);
|
||||
if assist.decorations.is_some()
|
||||
&& assist_range.contains(&selection.start)
|
||||
&& assist_range.contains(&selection.end)
|
||||
{
|
||||
self.focus_assist(*assist_id, cx);
|
||||
return;
|
||||
if assist.decorations.is_some() {
|
||||
if assist_range.contains(&selection.start)
|
||||
&& assist_range.contains(&selection.end)
|
||||
{
|
||||
self.focus_assist(*assist_id, cx);
|
||||
return;
|
||||
} else {
|
||||
let distance_from_selection = assist_range
|
||||
.start
|
||||
.abs_diff(selection.start)
|
||||
.min(assist_range.start.abs_diff(selection.end))
|
||||
+ assist_range
|
||||
.end
|
||||
.abs_diff(selection.start)
|
||||
.min(assist_range.end.abs_diff(selection.end));
|
||||
match closest_assist_fallback {
|
||||
Some((_, old_distance)) => {
|
||||
if distance_from_selection < old_distance {
|
||||
closest_assist_fallback =
|
||||
Some((assist_id, distance_from_selection));
|
||||
}
|
||||
}
|
||||
None => {
|
||||
closest_assist_fallback = Some((assist_id, distance_from_selection))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((&assist_id, _)) = closest_assist_fallback {
|
||||
self.focus_assist(assist_id, cx);
|
||||
}
|
||||
}
|
||||
|
||||
cx.propagate();
|
||||
@@ -551,14 +633,6 @@ impl InlineAssistant {
|
||||
};
|
||||
|
||||
match event {
|
||||
EditorEvent::Saved => {
|
||||
for assist_id in editor_assists.assist_ids.clone() {
|
||||
let assist = &self.assists[&assist_id];
|
||||
if let CodegenStatus::Done = &assist.codegen.read(cx).status {
|
||||
self.finish_assist(assist_id, false, cx)
|
||||
}
|
||||
}
|
||||
}
|
||||
EditorEvent::Edited { transaction_id } => {
|
||||
let buffer = editor.read(cx).buffer().read(cx);
|
||||
let edited_ranges =
|
||||
@@ -654,8 +728,21 @@ impl InlineAssistant {
|
||||
|
||||
if undo {
|
||||
assist.codegen.update(cx, |codegen, cx| codegen.undo(cx));
|
||||
} else {
|
||||
self.confirmed_assists.insert(assist_id, assist.codegen);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove the assist from the status updates map
|
||||
self.assist_observations.remove(&assist_id);
|
||||
}
|
||||
|
||||
pub fn undo_assist(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) -> bool {
|
||||
let Some(codegen) = self.confirmed_assists.remove(&assist_id) else {
|
||||
return false;
|
||||
};
|
||||
codegen.update(cx, |this, cx| this.undo(cx));
|
||||
true
|
||||
}
|
||||
|
||||
fn dismiss_assist(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) -> bool {
|
||||
@@ -854,6 +941,10 @@ impl InlineAssistant {
|
||||
)
|
||||
})
|
||||
.log_err();
|
||||
|
||||
if let Some((tx, _)) = self.assist_observations.get(&assist_id) {
|
||||
tx.send(AssistStatus::Started).ok();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stop_assist(&mut self, assist_id: InlineAssistId, cx: &mut WindowContext) {
|
||||
@@ -864,19 +955,24 @@ impl InlineAssistant {
|
||||
};
|
||||
|
||||
assist.codegen.update(cx, |codegen, cx| codegen.stop(cx));
|
||||
|
||||
if let Some((tx, _)) = self.assist_observations.get(&assist_id) {
|
||||
tx.send(AssistStatus::Stopped).ok();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn status_for_assist(
|
||||
&self,
|
||||
assist_id: InlineAssistId,
|
||||
cx: &WindowContext,
|
||||
) -> Option<CodegenStatus> {
|
||||
let assist = self.assists.get(&assist_id)?;
|
||||
match &assist.codegen.read(cx).status {
|
||||
CodegenStatus::Idle => Some(CodegenStatus::Idle),
|
||||
CodegenStatus::Pending => Some(CodegenStatus::Pending),
|
||||
CodegenStatus::Done => Some(CodegenStatus::Done),
|
||||
CodegenStatus::Error(error) => Some(CodegenStatus::Error(anyhow!("{:?}", error))),
|
||||
pub fn assist_status(&self, assist_id: InlineAssistId, cx: &AppContext) -> InlineAssistStatus {
|
||||
if let Some(assist) = self.assists.get(&assist_id) {
|
||||
match &assist.codegen.read(cx).status {
|
||||
CodegenStatus::Idle => InlineAssistStatus::Idle,
|
||||
CodegenStatus::Pending => InlineAssistStatus::Pending,
|
||||
CodegenStatus::Done => InlineAssistStatus::Done,
|
||||
CodegenStatus::Error(_) => InlineAssistStatus::Error,
|
||||
}
|
||||
} else if self.confirmed_assists.contains_key(&assist_id) {
|
||||
InlineAssistStatus::Confirmed
|
||||
} else {
|
||||
InlineAssistStatus::Canceled
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1026,6 +1122,7 @@ impl InlineAssistant {
|
||||
editor.set_show_gutter(false, cx);
|
||||
editor.scroll_manager.set_forbid_vertical_scroll(true);
|
||||
editor.set_read_only(true);
|
||||
editor.set_show_inline_completions(false);
|
||||
editor.highlight_rows::<DeletedLines>(
|
||||
Anchor::min()..=Anchor::max(),
|
||||
Some(cx.theme().status().deleted_background),
|
||||
@@ -1051,6 +1148,7 @@ impl InlineAssistant {
|
||||
.into_any_element()
|
||||
}),
|
||||
disposition: BlockDisposition::Above,
|
||||
priority: 0,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1060,6 +1158,40 @@ impl InlineAssistant {
|
||||
.collect();
|
||||
})
|
||||
}
|
||||
|
||||
pub fn observe_assist(
|
||||
&mut self,
|
||||
assist_id: InlineAssistId,
|
||||
) -> async_watch::Receiver<AssistStatus> {
|
||||
if let Some((_, rx)) = self.assist_observations.get(&assist_id) {
|
||||
rx.clone()
|
||||
} else {
|
||||
let (tx, rx) = async_watch::channel(AssistStatus::Idle);
|
||||
self.assist_observations.insert(assist_id, (tx, rx.clone()));
|
||||
rx
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub enum InlineAssistStatus {
|
||||
Idle,
|
||||
Pending,
|
||||
Done,
|
||||
Error,
|
||||
Confirmed,
|
||||
Canceled,
|
||||
}
|
||||
|
||||
impl InlineAssistStatus {
|
||||
pub(crate) fn is_pending(&self) -> bool {
|
||||
matches!(self, Self::Pending)
|
||||
}
|
||||
pub(crate) fn is_confirmed(&self) -> bool {
|
||||
matches!(self, Self::Confirmed)
|
||||
}
|
||||
pub(crate) fn is_done(&self) -> bool {
|
||||
matches!(self, Self::Done)
|
||||
}
|
||||
}
|
||||
|
||||
struct EditorInlineAssists {
|
||||
@@ -1167,12 +1299,6 @@ fn build_assist_editor_renderer(editor: &View<PromptEditor>) -> RenderBlock {
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
|
||||
pub enum InitialInsertion {
|
||||
NewlineBefore,
|
||||
NewlineAfter,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Default, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct InlineAssistId(usize);
|
||||
|
||||
@@ -1216,12 +1342,18 @@ struct PromptEditor {
|
||||
_codegen_subscription: Subscription,
|
||||
editor_subscriptions: Vec<Subscription>,
|
||||
pending_token_count: Task<Result<()>>,
|
||||
token_count: Option<usize>,
|
||||
token_counts: Option<TokenCounts>,
|
||||
_token_count_subscriptions: Vec<Subscription>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
show_rate_limit_notice: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub struct TokenCounts {
|
||||
total: usize,
|
||||
assistant_panel: usize,
|
||||
}
|
||||
|
||||
impl EventEmitter<PromptEditorEvent> for PromptEditor {}
|
||||
|
||||
impl Render for PromptEditor {
|
||||
@@ -1467,7 +1599,7 @@ impl PromptEditor {
|
||||
codegen,
|
||||
fs,
|
||||
pending_token_count: Task::ready(Ok(())),
|
||||
token_count: None,
|
||||
token_counts: None,
|
||||
_token_count_subscriptions: token_count_subscriptions,
|
||||
workspace,
|
||||
show_rate_limit_notice: false,
|
||||
@@ -1557,7 +1689,7 @@ impl PromptEditor {
|
||||
.await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.token_count = Some(token_count);
|
||||
this.token_counts = Some(token_count);
|
||||
cx.notify();
|
||||
})
|
||||
})
|
||||
@@ -1698,13 +1830,13 @@ impl PromptEditor {
|
||||
|
||||
fn render_token_count(&self, cx: &mut ViewContext<Self>) -> Option<impl IntoElement> {
|
||||
let model = LanguageModelRegistry::read_global(cx).active_model()?;
|
||||
let token_count = self.token_count?;
|
||||
let token_counts = self.token_counts?;
|
||||
let max_token_count = model.max_token_count();
|
||||
|
||||
let remaining_tokens = max_token_count as isize - token_count as isize;
|
||||
let remaining_tokens = max_token_count as isize - token_counts.total as isize;
|
||||
let token_count_color = if remaining_tokens <= 0 {
|
||||
Color::Error
|
||||
} else if token_count as f32 / max_token_count as f32 >= 0.8 {
|
||||
} else if token_counts.total as f32 / max_token_count as f32 >= 0.8 {
|
||||
Color::Warning
|
||||
} else {
|
||||
Color::Muted
|
||||
@@ -1714,7 +1846,7 @@ impl PromptEditor {
|
||||
.id("token_count")
|
||||
.gap_0p5()
|
||||
.child(
|
||||
Label::new(humanize_token_count(token_count))
|
||||
Label::new(humanize_token_count(token_counts.total))
|
||||
.size(LabelSize::Small)
|
||||
.color(token_count_color),
|
||||
)
|
||||
@@ -1726,11 +1858,14 @@ impl PromptEditor {
|
||||
);
|
||||
if let Some(workspace) = self.workspace.clone() {
|
||||
token_count = token_count
|
||||
.tooltip(|cx| {
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::with_meta(
|
||||
"Tokens Used by Inline Assistant",
|
||||
format!(
|
||||
"Tokens Used ({} from the Assistant Panel)",
|
||||
humanize_token_count(token_counts.assistant_panel)
|
||||
),
|
||||
None,
|
||||
"Click to Open Assistant Panel",
|
||||
"Click to open the Assistant Panel",
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -1747,7 +1882,7 @@ impl PromptEditor {
|
||||
} else {
|
||||
token_count = token_count
|
||||
.cursor_default()
|
||||
.tooltip(|cx| Tooltip::text("Tokens Used by Inline Assistant", cx));
|
||||
.tooltip(|cx| Tooltip::text("Tokens used", cx));
|
||||
}
|
||||
|
||||
Some(token_count)
|
||||
@@ -1964,6 +2099,8 @@ impl InlineAssist {
|
||||
|
||||
if assist.decorations.is_none() {
|
||||
this.finish_assist(assist_id, false, cx);
|
||||
} else if let Some(tx) = this.assist_observations.get(&assist_id) {
|
||||
tx.0.send(AssistStatus::Finished).ok();
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -1994,7 +2131,7 @@ impl InlineAssist {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn count_tokens(&self, cx: &WindowContext) -> BoxFuture<'static, Result<usize>> {
|
||||
pub fn count_tokens(&self, cx: &WindowContext) -> BoxFuture<'static, Result<TokenCounts>> {
|
||||
let Some(user_prompt) = self.user_prompt(cx) else {
|
||||
return future::ready(Err(anyhow!("no user prompt"))).boxed();
|
||||
};
|
||||
@@ -2037,7 +2174,7 @@ pub struct Codegen {
|
||||
builder: Arc<PromptBuilder>,
|
||||
}
|
||||
|
||||
pub enum CodegenStatus {
|
||||
enum CodegenStatus {
|
||||
Idle,
|
||||
Pending,
|
||||
Done,
|
||||
@@ -2131,11 +2268,25 @@ impl Codegen {
|
||||
user_prompt: String,
|
||||
assistant_panel_context: Option<LanguageModelRequest>,
|
||||
cx: &AppContext,
|
||||
) -> BoxFuture<'static, Result<usize>> {
|
||||
) -> BoxFuture<'static, Result<TokenCounts>> {
|
||||
if let Some(model) = LanguageModelRegistry::read_global(cx).active_model() {
|
||||
let request = self.build_request(user_prompt, assistant_panel_context, edit_range, cx);
|
||||
let request =
|
||||
self.build_request(user_prompt, assistant_panel_context.clone(), edit_range, cx);
|
||||
match request {
|
||||
Ok(request) => model.count_tokens(request, cx),
|
||||
Ok(request) => {
|
||||
let total_count = model.count_tokens(request.clone(), cx);
|
||||
let assistant_panel_count = assistant_panel_context
|
||||
.map(|context| model.count_tokens(context, cx))
|
||||
.unwrap_or_else(|| future::ready(Ok(0)).boxed());
|
||||
|
||||
async move {
|
||||
Ok(TokenCounts {
|
||||
total: total_count.await?,
|
||||
assistant_panel: assistant_panel_count.await?,
|
||||
})
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
Err(error) => futures::future::ready(Err(error)).boxed(),
|
||||
}
|
||||
} else {
|
||||
@@ -2156,7 +2307,7 @@ impl Codegen {
|
||||
|
||||
if let Some(transformation_transaction_id) = self.transformation_transaction_id.take() {
|
||||
self.buffer.update(cx, |buffer, cx| {
|
||||
buffer.undo_transaction(transformation_transaction_id, cx)
|
||||
buffer.undo_transaction(transformation_transaction_id, cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -2227,6 +2378,7 @@ impl Codegen {
|
||||
} else {
|
||||
return Err(anyhow::anyhow!("invalid transformation range"));
|
||||
};
|
||||
|
||||
let prompt = self
|
||||
.builder
|
||||
.generate_content_prompt(user_prompt, language_name, buffer, range)
|
||||
@@ -2239,7 +2391,8 @@ impl Codegen {
|
||||
|
||||
messages.push(LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: prompt,
|
||||
content: vec![prompt.into()],
|
||||
cache: false,
|
||||
});
|
||||
|
||||
Ok(LanguageModelRequest {
|
||||
@@ -2286,12 +2439,12 @@ impl Codegen {
|
||||
self.diff = Diff::default();
|
||||
self.status = CodegenStatus::Pending;
|
||||
let mut edit_start = edit_range.start.to_offset(&snapshot);
|
||||
self.generation = cx.spawn(|this, mut cx| {
|
||||
self.generation = cx.spawn(|codegen, mut cx| {
|
||||
async move {
|
||||
let chunks = stream.await;
|
||||
let generate = async {
|
||||
let (mut diff_tx, mut diff_rx) = mpsc::channel(1);
|
||||
let diff: Task<anyhow::Result<()>> =
|
||||
let line_based_stream_diff: Task<anyhow::Result<()>> =
|
||||
cx.background_executor().spawn(async move {
|
||||
let mut response_latency = None;
|
||||
let request_start = Instant::now();
|
||||
@@ -2407,10 +2560,10 @@ impl Codegen {
|
||||
});
|
||||
|
||||
while let Some((char_ops, line_diff)) = diff_rx.next().await {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.last_equal_ranges.clear();
|
||||
codegen.update(&mut cx, |codegen, cx| {
|
||||
codegen.last_equal_ranges.clear();
|
||||
|
||||
let transaction = this.buffer.update(cx, |buffer, cx| {
|
||||
let transaction = codegen.buffer.update(cx, |buffer, cx| {
|
||||
// Avoid grouping assistant edits with user edits.
|
||||
buffer.finalize_last_transaction(cx);
|
||||
|
||||
@@ -2435,23 +2588,24 @@ impl Codegen {
|
||||
let edit_range = snapshot.anchor_after(edit_start)
|
||||
..snapshot.anchor_before(edit_end);
|
||||
edit_start = edit_end;
|
||||
this.last_equal_ranges.push(edit_range);
|
||||
codegen.last_equal_ranges.push(edit_range);
|
||||
None
|
||||
}
|
||||
}),
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
this.edit_position = Some(snapshot.anchor_after(edit_start));
|
||||
codegen.edit_position = Some(snapshot.anchor_after(edit_start));
|
||||
|
||||
buffer.end_transaction(cx)
|
||||
});
|
||||
|
||||
if let Some(transaction) = transaction {
|
||||
if let Some(first_transaction) = this.transformation_transaction_id
|
||||
if let Some(first_transaction) =
|
||||
codegen.transformation_transaction_id
|
||||
{
|
||||
// Group all assistant edits into the first transaction.
|
||||
this.buffer.update(cx, |buffer, cx| {
|
||||
codegen.buffer.update(cx, |buffer, cx| {
|
||||
buffer.merge_transactions(
|
||||
transaction,
|
||||
first_transaction,
|
||||
@@ -2459,36 +2613,45 @@ impl Codegen {
|
||||
)
|
||||
});
|
||||
} else {
|
||||
this.transformation_transaction_id = Some(transaction);
|
||||
this.buffer.update(cx, |buffer, cx| {
|
||||
codegen.transformation_transaction_id = Some(transaction);
|
||||
codegen.buffer.update(cx, |buffer, cx| {
|
||||
buffer.finalize_last_transaction(cx)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
this.update_diff(edit_range.clone(), line_diff, cx);
|
||||
codegen.reapply_line_based_diff(edit_range.clone(), line_diff, cx);
|
||||
|
||||
cx.notify();
|
||||
})?;
|
||||
}
|
||||
|
||||
diff.await?;
|
||||
// Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer.
|
||||
// That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff.
|
||||
// It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`.
|
||||
let batch_diff_task = codegen.update(&mut cx, |codegen, cx| {
|
||||
codegen.reapply_batch_diff(edit_range.clone(), cx)
|
||||
})?;
|
||||
let (line_based_stream_diff, ()) =
|
||||
join!(line_based_stream_diff, batch_diff_task);
|
||||
line_based_stream_diff?;
|
||||
|
||||
anyhow::Ok(())
|
||||
};
|
||||
|
||||
let result = generate.await;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.last_equal_ranges.clear();
|
||||
if let Err(error) = result {
|
||||
this.status = CodegenStatus::Error(error);
|
||||
} else {
|
||||
this.status = CodegenStatus::Done;
|
||||
}
|
||||
cx.emit(CodegenEvent::Finished);
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
codegen
|
||||
.update(&mut cx, |this, cx| {
|
||||
this.last_equal_ranges.clear();
|
||||
if let Err(error) = result {
|
||||
this.status = CodegenStatus::Error(error);
|
||||
} else {
|
||||
this.status = CodegenStatus::Done;
|
||||
}
|
||||
cx.emit(CodegenEvent::Finished);
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
});
|
||||
cx.notify();
|
||||
@@ -2510,15 +2673,17 @@ impl Codegen {
|
||||
self.buffer.update(cx, |buffer, cx| {
|
||||
if let Some(transaction_id) = self.transformation_transaction_id.take() {
|
||||
buffer.undo_transaction(transaction_id, cx);
|
||||
buffer.refresh_preview(cx);
|
||||
}
|
||||
|
||||
if let Some(transaction_id) = self.initial_transaction_id.take() {
|
||||
buffer.undo_transaction(transaction_id, cx);
|
||||
buffer.refresh_preview(cx);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn update_diff(
|
||||
fn reapply_line_based_diff(
|
||||
&mut self,
|
||||
edit_range: Range<Anchor>,
|
||||
line_operations: Vec<LineOperation>,
|
||||
@@ -2577,6 +2742,99 @@ impl Codegen {
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
|
||||
fn reapply_batch_diff(
|
||||
&mut self,
|
||||
edit_range: Range<Anchor>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<()> {
|
||||
let old_snapshot = self.snapshot.clone();
|
||||
let old_range = edit_range.to_point(&old_snapshot);
|
||||
let new_snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
let new_range = edit_range.to_point(&new_snapshot);
|
||||
|
||||
cx.spawn(|codegen, mut cx| async move {
|
||||
let (deleted_row_ranges, inserted_row_ranges) = cx
|
||||
.background_executor()
|
||||
.spawn(async move {
|
||||
let old_text = old_snapshot
|
||||
.text_for_range(
|
||||
Point::new(old_range.start.row, 0)
|
||||
..Point::new(
|
||||
old_range.end.row,
|
||||
old_snapshot.line_len(MultiBufferRow(old_range.end.row)),
|
||||
),
|
||||
)
|
||||
.collect::<String>();
|
||||
let new_text = new_snapshot
|
||||
.text_for_range(
|
||||
Point::new(new_range.start.row, 0)
|
||||
..Point::new(
|
||||
new_range.end.row,
|
||||
new_snapshot.line_len(MultiBufferRow(new_range.end.row)),
|
||||
),
|
||||
)
|
||||
.collect::<String>();
|
||||
|
||||
let mut old_row = old_range.start.row;
|
||||
let mut new_row = new_range.start.row;
|
||||
let batch_diff =
|
||||
similar::TextDiff::from_lines(old_text.as_str(), new_text.as_str());
|
||||
|
||||
let mut deleted_row_ranges: Vec<(Anchor, RangeInclusive<u32>)> = Vec::new();
|
||||
let mut inserted_row_ranges = Vec::new();
|
||||
for change in batch_diff.iter_all_changes() {
|
||||
let line_count = change.value().lines().count() as u32;
|
||||
match change.tag() {
|
||||
similar::ChangeTag::Equal => {
|
||||
old_row += line_count;
|
||||
new_row += line_count;
|
||||
}
|
||||
similar::ChangeTag::Delete => {
|
||||
let old_end_row = old_row + line_count - 1;
|
||||
let new_row = new_snapshot.anchor_before(Point::new(new_row, 0));
|
||||
|
||||
if let Some((_, last_deleted_row_range)) =
|
||||
deleted_row_ranges.last_mut()
|
||||
{
|
||||
if *last_deleted_row_range.end() + 1 == old_row {
|
||||
*last_deleted_row_range =
|
||||
*last_deleted_row_range.start()..=old_end_row;
|
||||
} else {
|
||||
deleted_row_ranges.push((new_row, old_row..=old_end_row));
|
||||
}
|
||||
} else {
|
||||
deleted_row_ranges.push((new_row, old_row..=old_end_row));
|
||||
}
|
||||
|
||||
old_row += line_count;
|
||||
}
|
||||
similar::ChangeTag::Insert => {
|
||||
let new_end_row = new_row + line_count - 1;
|
||||
let start = new_snapshot.anchor_before(Point::new(new_row, 0));
|
||||
let end = new_snapshot.anchor_before(Point::new(
|
||||
new_end_row,
|
||||
new_snapshot.line_len(MultiBufferRow(new_end_row)),
|
||||
));
|
||||
inserted_row_ranges.push(start..=end);
|
||||
new_row += line_count;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
(deleted_row_ranges, inserted_row_ranges)
|
||||
})
|
||||
.await;
|
||||
|
||||
codegen
|
||||
.update(&mut cx, |codegen, cx| {
|
||||
codegen.diff.deleted_row_ranges = deleted_row_ranges;
|
||||
codegen.diff.inserted_row_ranges = inserted_row_ranges;
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
struct StripInvalidSpans<T> {
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
use feature_flags::ZedPro;
|
||||
use gpui::Action;
|
||||
use gpui::DismissEvent;
|
||||
|
||||
use language_model::{LanguageModel, LanguageModelAvailability, LanguageModelRegistry};
|
||||
use proto::Plan;
|
||||
use workspace::ShowConfiguration;
|
||||
|
||||
use std::sync::Arc;
|
||||
use ui::ListItemSpacing;
|
||||
|
||||
use crate::assistant_settings::AssistantSettings;
|
||||
use crate::ShowConfiguration;
|
||||
use fs::Fs;
|
||||
use gpui::Action;
|
||||
use gpui::SharedString;
|
||||
use gpui::Task;
|
||||
use picker::{Picker, PickerDelegate};
|
||||
@@ -35,7 +37,7 @@ pub struct ModelPickerDelegate {
|
||||
#[derive(Clone)]
|
||||
struct ModelInfo {
|
||||
model: Arc<dyn LanguageModel>,
|
||||
provider_icon: IconName,
|
||||
icon: IconName,
|
||||
availability: LanguageModelAvailability,
|
||||
is_selected: bool,
|
||||
}
|
||||
@@ -132,6 +134,8 @@ impl PickerDelegate for ModelPickerDelegate {
|
||||
model.is_selected = model.model.id() == selected_model_id
|
||||
&& model.model.provider_id() == selected_provider_id;
|
||||
}
|
||||
|
||||
cx.emit(DismissEvent);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -146,6 +150,8 @@ impl PickerDelegate for ModelPickerDelegate {
|
||||
use feature_flags::FeatureFlagAppExt;
|
||||
let model_info = self.filtered_models.get(ix)?;
|
||||
let show_badges = cx.has_flag::<ZedPro>();
|
||||
let provider_name: String = model_info.model.provider_name().0.into();
|
||||
|
||||
Some(
|
||||
ListItem::new(ix)
|
||||
.inset(true)
|
||||
@@ -153,9 +159,9 @@ impl PickerDelegate for ModelPickerDelegate {
|
||||
.selected(selected)
|
||||
.start_slot(
|
||||
div().pr_1().child(
|
||||
Icon::new(model_info.provider_icon)
|
||||
Icon::new(model_info.icon)
|
||||
.color(Color::Muted)
|
||||
.size(IconSize::XSmall),
|
||||
.size(IconSize::Medium),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
@@ -163,11 +169,16 @@ impl PickerDelegate for ModelPickerDelegate {
|
||||
.w_full()
|
||||
.justify_between()
|
||||
.font_buffer(cx)
|
||||
.min_w(px(200.))
|
||||
.min_w(px(240.))
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(Label::new(model_info.model.name().0.clone()))
|
||||
.child(
|
||||
Label::new(provider_name)
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.children(match model_info.availability {
|
||||
LanguageModelAvailability::Public => None,
|
||||
LanguageModelAvailability::RequiresPlan(Plan::Free) => None,
|
||||
@@ -258,16 +269,17 @@ impl<T: PopoverTrigger> RenderOnce for ModelSelector<T> {
|
||||
.iter()
|
||||
.flat_map(|provider| {
|
||||
let provider_id = provider.id();
|
||||
let provider_icon = provider.icon();
|
||||
let icon = provider.icon();
|
||||
let selected_model = selected_model.clone();
|
||||
let selected_provider = selected_provider.clone();
|
||||
|
||||
provider.provided_models(cx).into_iter().map(move |model| {
|
||||
let model = model.clone();
|
||||
let icon = model.icon().unwrap_or(icon);
|
||||
|
||||
ModelInfo {
|
||||
model: model.clone(),
|
||||
provider_icon,
|
||||
icon,
|
||||
availability: model.availability(),
|
||||
is_selected: selected_model.as_ref() == Some(&model.id())
|
||||
&& selected_provider.as_ref() == Some(&provider_id),
|
||||
@@ -292,5 +304,6 @@ impl<T: PopoverTrigger> RenderOnce for ModelSelector<T> {
|
||||
.menu(move |_cx| Some(picker_view.clone()))
|
||||
.trigger(self.trigger)
|
||||
.attach(gpui::AnchorCorner::BottomLeft)
|
||||
.when_some(self.handle, |menu, handle| menu.with_handle(handle))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,8 +11,8 @@ use futures::{
|
||||
};
|
||||
use fuzzy::StringMatchCandidate;
|
||||
use gpui::{
|
||||
actions, point, size, transparent_black, AppContext, BackgroundExecutor, Bounds, EventEmitter,
|
||||
Global, HighlightStyle, PromptLevel, ReadGlobal, Subscription, Task, TextStyle,
|
||||
actions, point, size, transparent_black, Action, AppContext, BackgroundExecutor, Bounds,
|
||||
EventEmitter, Global, HighlightStyle, PromptLevel, ReadGlobal, Subscription, Task, TextStyle,
|
||||
TitlebarOptions, UpdateGlobal, View, WindowBounds, WindowHandle, WindowOptions,
|
||||
};
|
||||
use heed::{
|
||||
@@ -38,7 +38,7 @@ use std::{
|
||||
use text::LineEnding;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{
|
||||
div, prelude::*, IconButtonShape, ListItem, ListItemSpacing, ParentElement, Render,
|
||||
div, prelude::*, IconButtonShape, KeyBinding, ListItem, ListItemSpacing, ParentElement, Render,
|
||||
SharedString, Styled, Tooltip, ViewContext, VisualContext,
|
||||
};
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
@@ -100,7 +100,7 @@ pub fn open_prompt_library(
|
||||
WindowOptions {
|
||||
titlebar: Some(TitlebarOptions {
|
||||
title: Some("Prompt Library".into()),
|
||||
appears_transparent: true,
|
||||
appears_transparent: !cfg!(windows),
|
||||
traffic_light_position: Some(point(px(9.0), px(9.0))),
|
||||
}),
|
||||
window_bounds: Some(WindowBounds::Windowed(bounds)),
|
||||
@@ -155,6 +155,14 @@ impl PickerDelegate for PromptPickerDelegate {
|
||||
self.matches.len()
|
||||
}
|
||||
|
||||
fn no_matches_text(&self, _cx: &mut WindowContext) -> SharedString {
|
||||
if self.store.prompt_count() == 0 {
|
||||
"No prompts.".into()
|
||||
} else {
|
||||
"No prompts found matching your search.".into()
|
||||
}
|
||||
}
|
||||
|
||||
fn selected_index(&self) -> usize {
|
||||
self.selected_index
|
||||
}
|
||||
@@ -486,7 +494,10 @@ impl PromptLibrary {
|
||||
let mut editor = Editor::auto_width(cx);
|
||||
editor.set_placeholder_text("Untitled", cx);
|
||||
editor.set_text(prompt_metadata.title.unwrap_or_default(), cx);
|
||||
editor.set_read_only(prompt_id.is_built_in());
|
||||
if prompt_id.is_built_in() {
|
||||
editor.set_read_only(true);
|
||||
editor.set_show_inline_completions(false);
|
||||
}
|
||||
editor
|
||||
});
|
||||
let body_editor = cx.new_view(|cx| {
|
||||
@@ -498,7 +509,10 @@ impl PromptLibrary {
|
||||
});
|
||||
|
||||
let mut editor = Editor::for_buffer(buffer, None, cx);
|
||||
editor.set_read_only(prompt_id.is_built_in());
|
||||
if prompt_id.is_built_in() {
|
||||
editor.set_read_only(true);
|
||||
editor.set_show_inline_completions(false);
|
||||
}
|
||||
editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx);
|
||||
editor.set_show_gutter(false, cx);
|
||||
editor.set_show_wrap_guides(false, cx);
|
||||
@@ -775,7 +789,8 @@ impl PromptLibrary {
|
||||
LanguageModelRequest {
|
||||
messages: vec![LanguageModelRequestMessage {
|
||||
role: Role::System,
|
||||
content: body.to_string(),
|
||||
content: vec![body.to_string().into()],
|
||||
cache: false,
|
||||
}],
|
||||
stop: Vec::new(),
|
||||
temperature: 1.,
|
||||
@@ -911,6 +926,7 @@ impl PromptLibrary {
|
||||
color: Some(cx.theme().status().predictive),
|
||||
..HighlightStyle::default()
|
||||
},
|
||||
..EditorStyle::default()
|
||||
},
|
||||
)),
|
||||
),
|
||||
@@ -1094,7 +1110,55 @@ impl Render for PromptLibrary {
|
||||
.font(ui_font)
|
||||
.text_color(theme.colors().text)
|
||||
.child(self.render_prompt_list(cx))
|
||||
.child(self.render_active_prompt(cx))
|
||||
.map(|el| {
|
||||
if self.store.prompt_count() == 0 {
|
||||
el.child(
|
||||
v_flex()
|
||||
.w_2_3()
|
||||
.h_full()
|
||||
.items_center()
|
||||
.justify_center()
|
||||
.gap_4()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(
|
||||
Icon::new(IconName::Book)
|
||||
.size(IconSize::Medium)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(
|
||||
Label::new("No prompts yet")
|
||||
.size(LabelSize::Large)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.child(h_flex())
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_1()
|
||||
.child(Label::new("Create your first prompt:"))
|
||||
.child(
|
||||
Button::new("create-prompt", "New Prompt")
|
||||
.full_width()
|
||||
.key_binding(KeyBinding::for_action(
|
||||
&NewPrompt, cx,
|
||||
))
|
||||
.on_click(|_, cx| {
|
||||
cx.dispatch_action(NewPrompt.boxed_clone())
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(h_flex()),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
el.child(self.render_active_prompt(cx))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1342,6 +1406,11 @@ impl PromptStore {
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the number of prompts in the store.
|
||||
fn prompt_count(&self) -> usize {
|
||||
self.metadata_cache.read().metadata.len()
|
||||
}
|
||||
|
||||
fn metadata(&self, id: PromptId) -> Option<PromptMetadata> {
|
||||
self.metadata_cache.read().metadata_by_id.get(&id).cloned()
|
||||
}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
use anyhow::Result;
|
||||
use assets::Assets;
|
||||
use fs::Fs;
|
||||
use futures::StreamExt;
|
||||
use handlebars::{Handlebars, RenderError, TemplateError};
|
||||
use gpui::AssetSource;
|
||||
use handlebars::{Handlebars, RenderError};
|
||||
use language::BufferSnapshot;
|
||||
use parking_lot::Mutex;
|
||||
use serde::Serialize;
|
||||
use std::{ops::Range, sync::Arc, time::Duration};
|
||||
use std::{ops::Range, path::PathBuf, sync::Arc, time::Duration};
|
||||
use util::ResultExt;
|
||||
|
||||
#[derive(Serialize)]
|
||||
@@ -29,115 +31,171 @@ pub struct TerminalAssistantPromptContext {
|
||||
pub user_prompt: String,
|
||||
}
|
||||
|
||||
/// Context required to generate a workflow step resolution prompt.
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct StepResolutionContext {
|
||||
/// The full context, including <step>...</step> tags
|
||||
pub workflow_context: String,
|
||||
/// The text of the specific step from the context to resolve
|
||||
pub step_to_resolve: String,
|
||||
}
|
||||
|
||||
pub struct PromptLoadingParams<'a> {
|
||||
pub fs: Arc<dyn Fs>,
|
||||
pub repo_path: Option<PathBuf>,
|
||||
pub cx: &'a gpui::AppContext,
|
||||
}
|
||||
|
||||
pub struct PromptBuilder {
|
||||
handlebars: Arc<Mutex<Handlebars<'static>>>,
|
||||
}
|
||||
|
||||
impl PromptBuilder {
|
||||
pub fn new(
|
||||
fs_and_cx: Option<(Arc<dyn Fs>, &gpui::AppContext)>,
|
||||
) -> Result<Self, Box<TemplateError>> {
|
||||
pub fn new(loading_params: Option<PromptLoadingParams>) -> Result<Self> {
|
||||
let mut handlebars = Handlebars::new();
|
||||
Self::register_templates(&mut handlebars)?;
|
||||
Self::register_built_in_templates(&mut handlebars)?;
|
||||
|
||||
let handlebars = Arc::new(Mutex::new(handlebars));
|
||||
|
||||
if let Some((fs, cx)) = fs_and_cx {
|
||||
Self::watch_fs_for_template_overrides(fs, cx, handlebars.clone());
|
||||
if let Some(params) = loading_params {
|
||||
Self::watch_fs_for_template_overrides(params, handlebars.clone());
|
||||
}
|
||||
|
||||
Ok(Self { handlebars })
|
||||
}
|
||||
|
||||
/// Watches the filesystem for changes to prompt template overrides.
|
||||
///
|
||||
/// This function sets up a file watcher on the prompt templates directory. It performs
|
||||
/// an initial scan of the directory and registers any existing template overrides.
|
||||
/// Then it continuously monitors for changes, reloading templates as they are
|
||||
/// modified or added.
|
||||
///
|
||||
/// If the templates directory doesn't exist initially, it waits for it to be created.
|
||||
/// If the directory is removed, it restores the built-in templates and waits for the
|
||||
/// directory to be recreated.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `params` - A `PromptLoadingParams` struct containing the filesystem, repository path,
|
||||
/// and application context.
|
||||
/// * `handlebars` - An `Arc<Mutex<Handlebars>>` for registering and updating templates.
|
||||
fn watch_fs_for_template_overrides(
|
||||
fs: Arc<dyn Fs>,
|
||||
cx: &gpui::AppContext,
|
||||
mut params: PromptLoadingParams,
|
||||
handlebars: Arc<Mutex<Handlebars<'static>>>,
|
||||
) {
|
||||
let templates_dir = paths::prompt_templates_dir();
|
||||
|
||||
cx.background_executor()
|
||||
params.repo_path = None;
|
||||
let templates_dir = paths::prompt_overrides_dir(params.repo_path.as_deref());
|
||||
params.cx.background_executor()
|
||||
.spawn(async move {
|
||||
// Create the prompt templates directory if it doesn't exist
|
||||
if !fs.is_dir(templates_dir).await {
|
||||
if let Err(e) = fs.create_dir(templates_dir).await {
|
||||
log::error!("Failed to create prompt templates directory: {}", e);
|
||||
return;
|
||||
let Some(parent_dir) = templates_dir.parent() else {
|
||||
return;
|
||||
};
|
||||
|
||||
let mut found_dir_once = false;
|
||||
loop {
|
||||
// Check if the templates directory exists and handle its status
|
||||
// If it exists, log its presence and check if it's a symlink
|
||||
// If it doesn't exist:
|
||||
// - Log that we're using built-in prompts
|
||||
// - Check if it's a broken symlink and log if so
|
||||
// - Set up a watcher to detect when it's created
|
||||
// After the first check, set the `found_dir_once` flag
|
||||
// This allows us to avoid logging when looping back around after deleting the prompt overrides directory.
|
||||
let dir_status = params.fs.is_dir(&templates_dir).await;
|
||||
let symlink_status = params.fs.read_link(&templates_dir).await.ok();
|
||||
if dir_status {
|
||||
let mut log_message = format!("Prompt template overrides directory found at {}", templates_dir.display());
|
||||
if let Some(target) = symlink_status {
|
||||
log_message.push_str(" -> ");
|
||||
log_message.push_str(&target.display().to_string());
|
||||
}
|
||||
log::info!("{}.", log_message);
|
||||
} else {
|
||||
if !found_dir_once {
|
||||
log::info!("No prompt template overrides directory found at {}. Using built-in prompts.", templates_dir.display());
|
||||
if let Some(target) = symlink_status {
|
||||
log::info!("Symlink found pointing to {}, but target is invalid.", target.display());
|
||||
}
|
||||
}
|
||||
|
||||
if params.fs.is_dir(parent_dir).await {
|
||||
let (mut changes, _watcher) = params.fs.watch(parent_dir, Duration::from_secs(1)).await;
|
||||
while let Some(changed_paths) = changes.next().await {
|
||||
if changed_paths.iter().any(|p| p == &templates_dir) {
|
||||
let mut log_message = format!("Prompt template overrides directory detected at {}", templates_dir.display());
|
||||
if let Ok(target) = params.fs.read_link(&templates_dir).await {
|
||||
log_message.push_str(" -> ");
|
||||
log_message.push_str(&target.display().to_string());
|
||||
}
|
||||
log::info!("{}.", log_message);
|
||||
break;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Initial scan of the prompts directory
|
||||
if let Ok(mut entries) = fs.read_dir(templates_dir).await {
|
||||
while let Some(Ok(file_path)) = entries.next().await {
|
||||
if file_path.to_string_lossy().ends_with(".hbs") {
|
||||
if let Ok(content) = fs.load(&file_path).await {
|
||||
let file_name = file_path.file_stem().unwrap().to_string_lossy();
|
||||
found_dir_once = true;
|
||||
|
||||
match handlebars.lock().register_template_string(&file_name, content) {
|
||||
Ok(_) => {
|
||||
log::info!(
|
||||
"Successfully registered template override: {} ({})",
|
||||
file_name,
|
||||
file_path.display()
|
||||
);
|
||||
},
|
||||
Err(e) => {
|
||||
log::error!(
|
||||
"Failed to register template during initial scan: {} ({})",
|
||||
e,
|
||||
file_path.display()
|
||||
);
|
||||
},
|
||||
// Initial scan of the prompt overrides directory
|
||||
if let Ok(mut entries) = params.fs.read_dir(&templates_dir).await {
|
||||
while let Some(Ok(file_path)) = entries.next().await {
|
||||
if file_path.to_string_lossy().ends_with(".hbs") {
|
||||
if let Ok(content) = params.fs.load(&file_path).await {
|
||||
let file_name = file_path.file_stem().unwrap().to_string_lossy();
|
||||
log::info!("Registering prompt template override: {}", file_name);
|
||||
handlebars.lock().register_template_string(&file_name, content).log_err();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Watch for changes
|
||||
let (mut changes, watcher) = fs.watch(templates_dir, Duration::from_secs(1)).await;
|
||||
while let Some(changed_paths) = changes.next().await {
|
||||
for changed_path in changed_paths {
|
||||
if changed_path.extension().map_or(false, |ext| ext == "hbs") {
|
||||
log::info!("Reloading template: {}", changed_path.display());
|
||||
if let Some(content) = fs.load(&changed_path).await.log_err() {
|
||||
let file_name = changed_path.file_stem().unwrap().to_string_lossy();
|
||||
let file_path = changed_path.to_string_lossy();
|
||||
match handlebars.lock().register_template_string(&file_name, content) {
|
||||
Ok(_) => log::info!(
|
||||
"Successfully reloaded template: {} ({})",
|
||||
file_name,
|
||||
file_path
|
||||
),
|
||||
Err(e) => log::error!(
|
||||
"Failed to register template: {} ({})",
|
||||
e,
|
||||
file_path
|
||||
),
|
||||
// Watch both the parent directory and the template overrides directory:
|
||||
// - Monitor the parent directory to detect if the template overrides directory is deleted.
|
||||
// - Monitor the template overrides directory to re-register templates when they change.
|
||||
// Combine both watch streams into a single stream.
|
||||
let (parent_changes, parent_watcher) = params.fs.watch(parent_dir, Duration::from_secs(1)).await;
|
||||
let (changes, watcher) = params.fs.watch(&templates_dir, Duration::from_secs(1)).await;
|
||||
let mut combined_changes = futures::stream::select(changes, parent_changes);
|
||||
|
||||
while let Some(changed_paths) = combined_changes.next().await {
|
||||
if changed_paths.iter().any(|p| p == &templates_dir) {
|
||||
if !params.fs.is_dir(&templates_dir).await {
|
||||
log::info!("Prompt template overrides directory removed. Restoring built-in prompt templates.");
|
||||
Self::register_built_in_templates(&mut handlebars.lock()).log_err();
|
||||
break;
|
||||
}
|
||||
}
|
||||
for changed_path in changed_paths {
|
||||
if changed_path.starts_with(&templates_dir) && changed_path.extension().map_or(false, |ext| ext == "hbs") {
|
||||
log::info!("Reloading prompt template override: {}", changed_path.display());
|
||||
if let Some(content) = params.fs.load(&changed_path).await.log_err() {
|
||||
let file_name = changed_path.file_stem().unwrap().to_string_lossy();
|
||||
handlebars.lock().register_template_string(&file_name, content).log_err();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
drop(watcher);
|
||||
drop(parent_watcher);
|
||||
}
|
||||
drop(watcher);
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn register_templates(handlebars: &mut Handlebars) -> Result<(), Box<TemplateError>> {
|
||||
let mut register_template = |id: &str| {
|
||||
let prompt = Assets::get(&format!("prompts/{}.hbs", id))
|
||||
.unwrap_or_else(|| panic!("{} prompt template not found", id))
|
||||
.data;
|
||||
handlebars
|
||||
.register_template_string(id, String::from_utf8_lossy(&prompt))
|
||||
.map_err(Box::new)
|
||||
};
|
||||
|
||||
register_template("content_prompt")?;
|
||||
register_template("terminal_assistant_prompt")?;
|
||||
register_template("edit_workflow")?;
|
||||
register_template("step_resolution")?;
|
||||
fn register_built_in_templates(handlebars: &mut Handlebars) -> Result<()> {
|
||||
for path in Assets.list("prompts")? {
|
||||
if let Some(id) = path.split('/').last().and_then(|s| s.strip_suffix(".hbs")) {
|
||||
if let Some(prompt) = Assets.load(path.as_ref()).log_err().flatten() {
|
||||
log::info!("Registering built-in prompt template: {}", id);
|
||||
handlebars
|
||||
.register_template_string(id, String::from_utf8_lossy(prompt.as_ref()))?
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -239,7 +297,10 @@ impl PromptBuilder {
|
||||
self.handlebars.lock().render("edit_workflow", &())
|
||||
}
|
||||
|
||||
pub fn generate_step_resolution_prompt(&self) -> Result<String, RenderError> {
|
||||
self.handlebars.lock().render("step_resolution", &())
|
||||
pub fn generate_step_resolution_prompt(
|
||||
&self,
|
||||
context: &StepResolutionContext,
|
||||
) -> Result<String, RenderError> {
|
||||
self.handlebars.lock().render("step_resolution", context)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
use crate::assistant_panel::ContextEditor;
|
||||
use anyhow::Result;
|
||||
use assistant_slash_command::AfterCompletion;
|
||||
pub use assistant_slash_command::{SlashCommand, SlashCommandOutput, SlashCommandRegistry};
|
||||
use editor::{CompletionProvider, Editor};
|
||||
use fuzzy::{match_strings, StringMatchCandidate};
|
||||
use gpui::{AppContext, Model, Task, ViewContext, WeakView, WindowContext};
|
||||
use language::{Anchor, Buffer, CodeLabel, Documentation, HighlightId, LanguageServerId, ToPoint};
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use project::CompletionIntent;
|
||||
use rope::Point;
|
||||
use std::{
|
||||
ops::Range,
|
||||
@@ -17,7 +19,7 @@ use std::{
|
||||
use ui::ActiveTheme;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub mod active_command;
|
||||
pub mod context_server_command;
|
||||
pub mod default_command;
|
||||
pub mod diagnostics_command;
|
||||
pub mod docs_command;
|
||||
@@ -28,8 +30,8 @@ pub mod project_command;
|
||||
pub mod prompt_command;
|
||||
pub mod search_command;
|
||||
pub mod symbols_command;
|
||||
pub mod tabs_command;
|
||||
pub mod term_command;
|
||||
pub mod tab_command;
|
||||
pub mod terminal_command;
|
||||
pub mod workflow_command;
|
||||
|
||||
pub(crate) struct SlashCommandCompletionProvider {
|
||||
@@ -41,8 +43,8 @@ pub(crate) struct SlashCommandCompletionProvider {
|
||||
pub(crate) struct SlashCommandLine {
|
||||
/// The range within the line containing the command name.
|
||||
pub name: Range<usize>,
|
||||
/// The range within the line containing the command argument.
|
||||
pub argument: Option<Range<usize>>,
|
||||
/// Ranges within the line containing the command arguments.
|
||||
pub arguments: Vec<Range<usize>>,
|
||||
}
|
||||
|
||||
impl SlashCommandCompletionProvider {
|
||||
@@ -96,34 +98,45 @@ impl SlashCommandCompletionProvider {
|
||||
let command = commands.command(&mat.string)?;
|
||||
let mut new_text = mat.string.clone();
|
||||
let requires_argument = command.requires_argument();
|
||||
if requires_argument {
|
||||
let accepts_arguments = command.accepts_arguments();
|
||||
if requires_argument || accepts_arguments {
|
||||
new_text.push(' ');
|
||||
}
|
||||
|
||||
let confirm = editor.clone().zip(workspace.clone()).and_then(
|
||||
|(editor, workspace)| {
|
||||
(!requires_argument).then(|| {
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
let command_name = mat.string.clone();
|
||||
let command_range = command_range.clone();
|
||||
let editor = editor.clone();
|
||||
let workspace = workspace.clone();
|
||||
Arc::new(move |cx: &mut WindowContext| {
|
||||
editor
|
||||
.update(cx, |editor, cx| {
|
||||
editor.run_command(
|
||||
command_range.clone(),
|
||||
&command_name,
|
||||
None,
|
||||
true,
|
||||
workspace.clone(),
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
}) as Arc<_>
|
||||
})
|
||||
},
|
||||
);
|
||||
Arc::new(
|
||||
move |intent: CompletionIntent, cx: &mut WindowContext| {
|
||||
if !requires_argument
|
||||
&& (!accepts_arguments || intent.is_complete())
|
||||
{
|
||||
editor
|
||||
.update(cx, |editor, cx| {
|
||||
editor.run_command(
|
||||
command_range.clone(),
|
||||
&command_name,
|
||||
&[],
|
||||
true,
|
||||
false,
|
||||
workspace.clone(),
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
false
|
||||
} else {
|
||||
requires_argument || accepts_arguments
|
||||
}
|
||||
},
|
||||
) as Arc<_>
|
||||
});
|
||||
Some(project::Completion {
|
||||
old_range: name_range.clone(),
|
||||
documentation: Some(Documentation::SingleLine(command.description())),
|
||||
@@ -131,7 +144,6 @@ impl SlashCommandCompletionProvider {
|
||||
label: command.label(cx),
|
||||
server_id: LanguageServerId(0),
|
||||
lsp_completion: Default::default(),
|
||||
show_new_completions_on_confirm: requires_argument,
|
||||
confirm,
|
||||
})
|
||||
})
|
||||
@@ -143,20 +155,20 @@ impl SlashCommandCompletionProvider {
|
||||
fn complete_command_argument(
|
||||
&self,
|
||||
command_name: &str,
|
||||
argument: String,
|
||||
arguments: &[String],
|
||||
command_range: Range<Anchor>,
|
||||
argument_range: Range<Anchor>,
|
||||
last_argument_range: Range<Anchor>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<project::Completion>>> {
|
||||
let new_cancel_flag = Arc::new(AtomicBool::new(false));
|
||||
let mut flag = self.cancel_flag.lock();
|
||||
flag.store(true, SeqCst);
|
||||
*flag = new_cancel_flag.clone();
|
||||
|
||||
let commands = SlashCommandRegistry::global(cx);
|
||||
if let Some(command) = commands.command(command_name) {
|
||||
let completions = command.complete_argument(
|
||||
argument,
|
||||
arguments,
|
||||
new_cancel_flag.clone(),
|
||||
self.workspace.clone(),
|
||||
cx,
|
||||
@@ -164,61 +176,76 @@ impl SlashCommandCompletionProvider {
|
||||
let command_name: Arc<str> = command_name.into();
|
||||
let editor = self.editor.clone();
|
||||
let workspace = self.workspace.clone();
|
||||
let arguments = arguments.to_vec();
|
||||
cx.background_executor().spawn(async move {
|
||||
Ok(completions
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|command_argument| {
|
||||
let confirm = if command_argument.run_command {
|
||||
.map(|new_argument| {
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
Arc::new({
|
||||
let mut completed_arguments = arguments.clone();
|
||||
if new_argument.replace_previous_arguments {
|
||||
completed_arguments.clear();
|
||||
} else {
|
||||
completed_arguments.pop();
|
||||
}
|
||||
completed_arguments.push(new_argument.new_text.clone());
|
||||
|
||||
let command_range = command_range.clone();
|
||||
let command_name = command_name.clone();
|
||||
let command_argument = command_argument.new_text.clone();
|
||||
move |cx: &mut WindowContext| {
|
||||
editor
|
||||
.update(cx, |editor, cx| {
|
||||
editor.run_command(
|
||||
command_range.clone(),
|
||||
&command_name,
|
||||
Some(&command_argument),
|
||||
true,
|
||||
workspace.clone(),
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
move |intent: CompletionIntent, cx: &mut WindowContext| {
|
||||
if new_argument.after_completion.run()
|
||||
|| intent.is_complete()
|
||||
{
|
||||
editor
|
||||
.update(cx, |editor, cx| {
|
||||
editor.run_command(
|
||||
command_range.clone(),
|
||||
&command_name,
|
||||
&completed_arguments,
|
||||
true,
|
||||
false,
|
||||
workspace.clone(),
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
false
|
||||
} else {
|
||||
!new_argument.after_completion.run()
|
||||
}
|
||||
}
|
||||
}) as Arc<_>
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
});
|
||||
|
||||
let mut new_text = command_argument.new_text.clone();
|
||||
if !command_argument.run_command {
|
||||
let mut new_text = new_argument.new_text.clone();
|
||||
if new_argument.after_completion == AfterCompletion::Continue {
|
||||
new_text.push(' ');
|
||||
}
|
||||
|
||||
project::Completion {
|
||||
old_range: argument_range.clone(),
|
||||
label: CodeLabel::plain(command_argument.label, None),
|
||||
old_range: if new_argument.replace_previous_arguments {
|
||||
argument_range.clone()
|
||||
} else {
|
||||
last_argument_range.clone()
|
||||
},
|
||||
label: new_argument.label,
|
||||
new_text,
|
||||
documentation: None,
|
||||
server_id: LanguageServerId(0),
|
||||
lsp_completion: Default::default(),
|
||||
show_new_completions_on_confirm: !command_argument.run_command,
|
||||
confirm,
|
||||
}
|
||||
})
|
||||
.collect())
|
||||
})
|
||||
} else {
|
||||
cx.background_executor()
|
||||
.spawn(async move { Ok(Vec::new()) })
|
||||
Task::ready(Ok(Vec::new()))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -231,7 +258,7 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
_: editor::CompletionContext,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) -> Task<Result<Vec<project::Completion>>> {
|
||||
let Some((name, argument, command_range, argument_range)) =
|
||||
let Some((name, arguments, command_range, last_argument_range)) =
|
||||
buffer.update(cx, |buffer, _cx| {
|
||||
let position = buffer_position.to_point(buffer);
|
||||
let line_start = Point::new(position.row, 0);
|
||||
@@ -242,32 +269,52 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
let command_range_start = Point::new(position.row, call.name.start as u32 - 1);
|
||||
let command_range_end = Point::new(
|
||||
position.row,
|
||||
call.argument.as_ref().map_or(call.name.end, |arg| arg.end) as u32,
|
||||
call.arguments.last().map_or(call.name.end, |arg| arg.end) as u32,
|
||||
);
|
||||
let command_range = buffer.anchor_after(command_range_start)
|
||||
..buffer.anchor_after(command_range_end);
|
||||
|
||||
let name = line[call.name.clone()].to_string();
|
||||
|
||||
Some(if let Some(argument) = call.argument {
|
||||
let start =
|
||||
let (arguments, last_argument_range) = if let Some(argument) = call.arguments.last()
|
||||
{
|
||||
let last_arg_start =
|
||||
buffer.anchor_after(Point::new(position.row, argument.start as u32));
|
||||
let argument = line[argument.clone()].to_string();
|
||||
(name, Some(argument), command_range, start..buffer_position)
|
||||
let first_arg_start = call.arguments.first().expect("we have the last element");
|
||||
let first_arg_start =
|
||||
buffer.anchor_after(Point::new(position.row, first_arg_start.start as u32));
|
||||
let arguments = call
|
||||
.arguments
|
||||
.iter()
|
||||
.filter_map(|argument| Some(line.get(argument.clone())?.to_string()))
|
||||
.collect::<Vec<_>>();
|
||||
let argument_range = first_arg_start..buffer_position;
|
||||
(
|
||||
Some((arguments, argument_range)),
|
||||
last_arg_start..buffer_position,
|
||||
)
|
||||
} else {
|
||||
let start =
|
||||
buffer.anchor_after(Point::new(position.row, call.name.start as u32));
|
||||
(name, None, command_range, start..buffer_position)
|
||||
})
|
||||
(None, start..buffer_position)
|
||||
};
|
||||
|
||||
Some((name, arguments, command_range, last_argument_range))
|
||||
})
|
||||
else {
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
};
|
||||
|
||||
if let Some(argument) = argument {
|
||||
self.complete_command_argument(&name, argument, command_range, argument_range, cx)
|
||||
if let Some((arguments, argument_range)) = arguments {
|
||||
self.complete_command_argument(
|
||||
&name,
|
||||
&arguments,
|
||||
command_range,
|
||||
argument_range,
|
||||
last_argument_range,
|
||||
cx,
|
||||
)
|
||||
} else {
|
||||
self.complete_command_name(&name, command_range, argument_range, cx)
|
||||
self.complete_command_name(&name, command_range, last_argument_range, cx)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -309,6 +356,10 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn sort_completions(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl SlashCommandLine {
|
||||
@@ -320,16 +371,23 @@ impl SlashCommandLine {
|
||||
if let Some(call) = &mut call {
|
||||
// The command arguments start at the first non-whitespace character
|
||||
// after the command name, and continue until the end of the line.
|
||||
if let Some(argument) = &mut call.argument {
|
||||
if (*argument).is_empty() && c.is_whitespace() {
|
||||
argument.start = next_ix;
|
||||
if let Some(argument) = call.arguments.last_mut() {
|
||||
if c.is_whitespace() {
|
||||
if (*argument).is_empty() {
|
||||
argument.start = next_ix;
|
||||
argument.end = next_ix;
|
||||
} else {
|
||||
argument.end = ix;
|
||||
call.arguments.push(next_ix..next_ix);
|
||||
}
|
||||
} else {
|
||||
argument.end = next_ix;
|
||||
}
|
||||
argument.end = next_ix;
|
||||
}
|
||||
// The command name ends at the first whitespace character.
|
||||
else if !call.name.is_empty() {
|
||||
if c.is_whitespace() {
|
||||
call.argument = Some(next_ix..next_ix);
|
||||
call.arguments = vec![next_ix..next_ix];
|
||||
} else {
|
||||
call.name.end = next_ix;
|
||||
}
|
||||
@@ -345,7 +403,7 @@ impl SlashCommandLine {
|
||||
else if c == '/' {
|
||||
call = Some(SlashCommandLine {
|
||||
name: next_ix..next_ix,
|
||||
argument: None,
|
||||
arguments: Vec::new(),
|
||||
});
|
||||
}
|
||||
// The line can't contain anything before the slash except for whitespace.
|
||||
|
||||
@@ -1,102 +0,0 @@
|
||||
use super::{
|
||||
diagnostics_command::write_single_file_diagnostics,
|
||||
file_command::{build_entry_output_section, codeblock_fence_for_path},
|
||||
SlashCommand, SlashCommandOutput,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::ArgumentCompletion;
|
||||
use editor::Editor;
|
||||
use gpui::{AppContext, Task, WeakView};
|
||||
use language::LspAdapterDelegate;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::Arc;
|
||||
use ui::WindowContext;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub(crate) struct ActiveSlashCommand;
|
||||
|
||||
impl SlashCommand for ActiveSlashCommand {
|
||||
fn name(&self) -> String {
|
||||
"active".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"insert active tab".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
"Insert Active Tab".into()
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_query: String,
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut AppContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Err(anyhow!("this command does not require argument")))
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_argument: Option<&str>,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let output = workspace.update(cx, |workspace, cx| {
|
||||
let Some(active_item) = workspace.active_item(cx) else {
|
||||
return Task::ready(Err(anyhow!("no active tab")));
|
||||
};
|
||||
let Some(buffer) = active_item
|
||||
.downcast::<Editor>()
|
||||
.and_then(|editor| editor.read(cx).buffer().read(cx).as_singleton())
|
||||
else {
|
||||
return Task::ready(Err(anyhow!("active tab is not an editor")));
|
||||
};
|
||||
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let path = snapshot.resolve_file_path(cx, true);
|
||||
let task = cx.background_executor().spawn({
|
||||
let path = path.clone();
|
||||
async move {
|
||||
let mut output = String::new();
|
||||
output.push_str(&codeblock_fence_for_path(path.as_deref(), None));
|
||||
for chunk in snapshot.as_rope().chunks() {
|
||||
output.push_str(chunk);
|
||||
}
|
||||
if !output.ends_with('\n') {
|
||||
output.push('\n');
|
||||
}
|
||||
output.push_str("```\n");
|
||||
let has_diagnostics =
|
||||
write_single_file_diagnostics(&mut output, path.as_deref(), &snapshot);
|
||||
if output.ends_with('\n') {
|
||||
output.pop();
|
||||
}
|
||||
(output, has_diagnostics)
|
||||
}
|
||||
});
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let (text, has_diagnostics) = task.await;
|
||||
let range = 0..text.len();
|
||||
Ok(SlashCommandOutput {
|
||||
text,
|
||||
sections: vec![build_entry_output_section(
|
||||
range,
|
||||
path.as_deref(),
|
||||
false,
|
||||
None,
|
||||
)],
|
||||
run_commands_in_text: has_diagnostics,
|
||||
})
|
||||
})
|
||||
});
|
||||
output.unwrap_or_else(|error| Task::ready(Err(error)))
|
||||
}
|
||||
}
|
||||
134
crates/assistant/src/slash_command/context_server_command.rs
Normal file
@@ -0,0 +1,134 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
};
|
||||
use collections::HashMap;
|
||||
use context_servers::{
|
||||
manager::{ContextServer, ContextServerManager},
|
||||
protocol::PromptInfo,
|
||||
};
|
||||
use gpui::{Task, WeakView, WindowContext};
|
||||
use language::LspAdapterDelegate;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::Arc;
|
||||
use ui::{IconName, SharedString};
|
||||
use workspace::Workspace;
|
||||
|
||||
pub struct ContextServerSlashCommand {
|
||||
server_id: String,
|
||||
prompt: PromptInfo,
|
||||
}
|
||||
|
||||
impl ContextServerSlashCommand {
|
||||
pub fn new(server: &Arc<ContextServer>, prompt: PromptInfo) -> Self {
|
||||
Self {
|
||||
server_id: server.id.clone(),
|
||||
prompt,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SlashCommand for ContextServerSlashCommand {
|
||||
fn name(&self) -> String {
|
||||
self.prompt.name.clone()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
format!("Run context server command: {}", self.prompt.name)
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
format!("Run '{}' from {}", self.prompt.name, self.server_id)
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
self.prompt
|
||||
.arguments
|
||||
.as_ref()
|
||||
.map_or(false, |args| !args.is_empty())
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_arguments: &[String],
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
_workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let server_id = self.server_id.clone();
|
||||
let prompt_name = self.prompt.name.clone();
|
||||
|
||||
let prompt_args = match prompt_arguments(&self.prompt, arguments) {
|
||||
Ok(args) => args,
|
||||
Err(e) => return Task::ready(Err(e)),
|
||||
};
|
||||
|
||||
let manager = ContextServerManager::global(cx);
|
||||
let manager = manager.read(cx);
|
||||
if let Some(server) = manager.get_server(&server_id) {
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let Some(protocol) = server.client.read().clone() else {
|
||||
return Err(anyhow!("Context server not initialized"));
|
||||
};
|
||||
let result = protocol.run_prompt(&prompt_name, prompt_args).await?;
|
||||
|
||||
Ok(SlashCommandOutput {
|
||||
sections: vec![SlashCommandOutputSection {
|
||||
range: 0..result.len(),
|
||||
icon: IconName::ZedAssistant,
|
||||
label: SharedString::from(format!("Result from {}", prompt_name)),
|
||||
}],
|
||||
text: result,
|
||||
run_commands_in_text: false,
|
||||
})
|
||||
})
|
||||
} else {
|
||||
Task::ready(Err(anyhow!("Context server not found")))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn prompt_arguments(prompt: &PromptInfo, arguments: &[String]) -> Result<HashMap<String, String>> {
|
||||
match &prompt.arguments {
|
||||
Some(args) if args.len() > 1 => Err(anyhow!(
|
||||
"Prompt has more than one argument, which is not supported"
|
||||
)),
|
||||
Some(args) if args.len() == 1 => {
|
||||
if !arguments.is_empty() {
|
||||
let mut map = HashMap::default();
|
||||
map.insert(args[0].name.clone(), arguments.join(" "));
|
||||
Ok(map)
|
||||
} else {
|
||||
Err(anyhow!("Prompt expects argument but none given"))
|
||||
}
|
||||
}
|
||||
Some(_) | None => {
|
||||
if arguments.is_empty() {
|
||||
Ok(HashMap::default())
|
||||
} else {
|
||||
Err(anyhow!("Prompt expects no arguments but some were given"))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// MCP servers can return prompts with multiple arguments. Since we only
|
||||
/// support one argument, we ignore all others. This is the necessary predicate
|
||||
/// for this.
|
||||
pub fn acceptable_prompt(prompt: &PromptInfo) -> bool {
|
||||
match &prompt.arguments {
|
||||
None => true,
|
||||
Some(args) if args.len() == 1 => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,7 @@ use super::{SlashCommand, SlashCommandOutput};
|
||||
use crate::prompt_library::PromptStore;
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
|
||||
use gpui::{AppContext, Task, WeakView};
|
||||
use gpui::{Task, WeakView};
|
||||
use language::LspAdapterDelegate;
|
||||
use std::{
|
||||
fmt::Write,
|
||||
@@ -32,17 +32,17 @@ impl SlashCommand for DefaultSlashCommand {
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_query: String,
|
||||
_arguments: &[String],
|
||||
_cancellation_flag: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut AppContext,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Err(anyhow!("this command does not require argument")))
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_argument: Option<&str>,
|
||||
_arguments: &[String],
|
||||
_workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
|
||||
@@ -43,6 +43,7 @@ impl DiagnosticsSlashCommand {
|
||||
worktree_id: entry.worktree_id.to_usize(),
|
||||
path: entry.path.clone(),
|
||||
path_prefix: path_prefix.clone(),
|
||||
is_dir: false, // Diagnostics can't be produced for directories
|
||||
distance_to_relative_ancestor: 0,
|
||||
})
|
||||
.collect(),
|
||||
@@ -102,17 +103,21 @@ impl SlashCommand for DiagnosticsSlashCommand {
|
||||
false
|
||||
}
|
||||
|
||||
fn accepts_arguments(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
query: String,
|
||||
arguments: &[String],
|
||||
cancellation_flag: Arc<AtomicBool>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
cx: &mut AppContext,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
let Some(workspace) = workspace.and_then(|workspace| workspace.upgrade()) else {
|
||||
return Task::ready(Err(anyhow!("workspace was dropped")));
|
||||
};
|
||||
let query = query.split_whitespace().last().unwrap_or("").to_string();
|
||||
let query = arguments.last().cloned().unwrap_or_default();
|
||||
|
||||
let paths = self.search_paths(query.clone(), cancellation_flag.clone(), &workspace, cx);
|
||||
let executor = cx.background_executor().clone();
|
||||
@@ -146,9 +151,10 @@ impl SlashCommand for DiagnosticsSlashCommand {
|
||||
Ok(matches
|
||||
.into_iter()
|
||||
.map(|completion| ArgumentCompletion {
|
||||
label: completion.clone(),
|
||||
label: completion.clone().into(),
|
||||
new_text: completion,
|
||||
run_command: true,
|
||||
after_completion: assistant_slash_command::AfterCompletion::Run,
|
||||
replace_previous_arguments: false,
|
||||
})
|
||||
.collect())
|
||||
})
|
||||
@@ -156,7 +162,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
argument: Option<&str>,
|
||||
arguments: &[String],
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
@@ -165,61 +171,69 @@ impl SlashCommand for DiagnosticsSlashCommand {
|
||||
return Task::ready(Err(anyhow!("workspace was dropped")));
|
||||
};
|
||||
|
||||
let options = Options::parse(argument);
|
||||
let options = Options::parse(arguments);
|
||||
|
||||
let task = collect_diagnostics(workspace.read(cx).project().clone(), options, cx);
|
||||
|
||||
cx.spawn(move |_| async move {
|
||||
let Some((text, sections)) = task.await? else {
|
||||
return Ok(SlashCommandOutput::default());
|
||||
return Ok(SlashCommandOutput {
|
||||
sections: vec![SlashCommandOutputSection {
|
||||
range: 0..1,
|
||||
icon: IconName::Library,
|
||||
label: "No Diagnostics".into(),
|
||||
}],
|
||||
text: "\n".to_string(),
|
||||
run_commands_in_text: true,
|
||||
});
|
||||
};
|
||||
|
||||
let sections = sections
|
||||
.into_iter()
|
||||
.map(|(range, placeholder_type)| SlashCommandOutputSection {
|
||||
range,
|
||||
icon: match placeholder_type {
|
||||
PlaceholderType::Root(_, _) => IconName::ExclamationTriangle,
|
||||
PlaceholderType::File(_) => IconName::File,
|
||||
PlaceholderType::Diagnostic(DiagnosticType::Error, _) => IconName::XCircle,
|
||||
PlaceholderType::Diagnostic(DiagnosticType::Warning, _) => {
|
||||
IconName::ExclamationTriangle
|
||||
}
|
||||
},
|
||||
label: match placeholder_type {
|
||||
PlaceholderType::Root(summary, source) => {
|
||||
let mut label = String::new();
|
||||
label.push_str("Diagnostics");
|
||||
if let Some(source) = source {
|
||||
write!(label, " ({})", source).unwrap();
|
||||
}
|
||||
|
||||
if summary.error_count > 0 || summary.warning_count > 0 {
|
||||
label.push(':');
|
||||
|
||||
if summary.error_count > 0 {
|
||||
write!(label, " {} errors", summary.error_count).unwrap();
|
||||
if summary.warning_count > 0 {
|
||||
label.push_str(",");
|
||||
}
|
||||
}
|
||||
|
||||
if summary.warning_count > 0 {
|
||||
write!(label, " {} warnings", summary.warning_count).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
label.into()
|
||||
}
|
||||
PlaceholderType::File(file_path) => file_path.into(),
|
||||
PlaceholderType::Diagnostic(_, message) => message.into(),
|
||||
},
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(SlashCommandOutput {
|
||||
text,
|
||||
sections: sections
|
||||
.into_iter()
|
||||
.map(|(range, placeholder_type)| SlashCommandOutputSection {
|
||||
range,
|
||||
icon: match placeholder_type {
|
||||
PlaceholderType::Root(_, _) => IconName::ExclamationTriangle,
|
||||
PlaceholderType::File(_) => IconName::File,
|
||||
PlaceholderType::Diagnostic(DiagnosticType::Error, _) => {
|
||||
IconName::XCircle
|
||||
}
|
||||
PlaceholderType::Diagnostic(DiagnosticType::Warning, _) => {
|
||||
IconName::ExclamationTriangle
|
||||
}
|
||||
},
|
||||
label: match placeholder_type {
|
||||
PlaceholderType::Root(summary, source) => {
|
||||
let mut label = String::new();
|
||||
label.push_str("Diagnostics");
|
||||
if let Some(source) = source {
|
||||
write!(label, " ({})", source).unwrap();
|
||||
}
|
||||
|
||||
if summary.error_count > 0 || summary.warning_count > 0 {
|
||||
label.push(':');
|
||||
|
||||
if summary.error_count > 0 {
|
||||
write!(label, " {} errors", summary.error_count).unwrap();
|
||||
if summary.warning_count > 0 {
|
||||
label.push_str(",");
|
||||
}
|
||||
}
|
||||
|
||||
if summary.warning_count > 0 {
|
||||
write!(label, " {} warnings", summary.warning_count)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
label.into()
|
||||
}
|
||||
PlaceholderType::File(file_path) => file_path.into(),
|
||||
PlaceholderType::Diagnostic(_, message) => message.into(),
|
||||
},
|
||||
})
|
||||
.collect(),
|
||||
sections,
|
||||
run_commands_in_text: false,
|
||||
})
|
||||
})
|
||||
@@ -235,25 +249,20 @@ struct Options {
|
||||
const INCLUDE_WARNINGS_ARGUMENT: &str = "--include-warnings";
|
||||
|
||||
impl Options {
|
||||
fn parse(arguments_line: Option<&str>) -> Self {
|
||||
arguments_line
|
||||
.map(|arguments_line| {
|
||||
let args = arguments_line.split_whitespace().collect::<Vec<_>>();
|
||||
let mut include_warnings = false;
|
||||
let mut path_matcher = None;
|
||||
for arg in args {
|
||||
if arg == INCLUDE_WARNINGS_ARGUMENT {
|
||||
include_warnings = true;
|
||||
} else {
|
||||
path_matcher = PathMatcher::new(&[arg.to_owned()]).log_err();
|
||||
}
|
||||
}
|
||||
Self {
|
||||
include_warnings,
|
||||
path_matcher,
|
||||
}
|
||||
})
|
||||
.unwrap_or_default()
|
||||
fn parse(arguments: &[String]) -> Self {
|
||||
let mut include_warnings = false;
|
||||
let mut path_matcher = None;
|
||||
for arg in arguments {
|
||||
if arg == INCLUDE_WARNINGS_ARGUMENT {
|
||||
include_warnings = true;
|
||||
} else {
|
||||
path_matcher = PathMatcher::new(&[arg.to_owned()]).log_err();
|
||||
}
|
||||
}
|
||||
Self {
|
||||
include_warnings,
|
||||
path_matcher,
|
||||
}
|
||||
}
|
||||
|
||||
fn match_candidates_for_args() -> [StringMatchCandidate; 1] {
|
||||
|
||||
@@ -161,30 +161,28 @@ impl SlashCommand for DocsSlashCommand {
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
query: String,
|
||||
arguments: &[String],
|
||||
_cancel: Arc<AtomicBool>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
cx: &mut AppContext,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
self.ensure_rust_doc_providers_are_registered(workspace, cx);
|
||||
|
||||
let indexed_docs_registry = IndexedDocsRegistry::global(cx);
|
||||
let args = DocsSlashCommandArgs::parse(&query);
|
||||
let args = DocsSlashCommandArgs::parse(arguments);
|
||||
let store = args
|
||||
.provider()
|
||||
.ok_or_else(|| anyhow!("no docs provider specified"))
|
||||
.and_then(|provider| IndexedDocsStore::try_global(provider, cx));
|
||||
cx.background_executor().spawn(async move {
|
||||
fn build_completions(
|
||||
provider: ProviderId,
|
||||
items: Vec<String>,
|
||||
) -> Vec<ArgumentCompletion> {
|
||||
fn build_completions(items: Vec<String>) -> Vec<ArgumentCompletion> {
|
||||
items
|
||||
.into_iter()
|
||||
.map(|item| ArgumentCompletion {
|
||||
label: item.clone(),
|
||||
new_text: format!("{provider} {item}"),
|
||||
run_command: true,
|
||||
label: item.clone().into(),
|
||||
new_text: item.to_string(),
|
||||
after_completion: assistant_slash_command::AfterCompletion::Run,
|
||||
replace_previous_arguments: false,
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
@@ -194,18 +192,20 @@ impl SlashCommand for DocsSlashCommand {
|
||||
let providers = indexed_docs_registry.list_providers();
|
||||
if providers.is_empty() {
|
||||
return Ok(vec![ArgumentCompletion {
|
||||
label: "No available docs providers.".to_string(),
|
||||
label: "No available docs providers.".into(),
|
||||
new_text: String::new(),
|
||||
run_command: false,
|
||||
after_completion: false.into(),
|
||||
replace_previous_arguments: false,
|
||||
}]);
|
||||
}
|
||||
|
||||
Ok(providers
|
||||
.into_iter()
|
||||
.map(|provider| ArgumentCompletion {
|
||||
label: provider.to_string(),
|
||||
label: provider.to_string().into(),
|
||||
new_text: provider.to_string(),
|
||||
run_command: false,
|
||||
after_completion: false.into(),
|
||||
replace_previous_arguments: false,
|
||||
})
|
||||
.collect())
|
||||
}
|
||||
@@ -222,17 +222,45 @@ impl SlashCommand for DocsSlashCommand {
|
||||
drop(store.clone().index(package.as_str().into()));
|
||||
}
|
||||
|
||||
let items = store.search(package).await;
|
||||
Ok(build_completions(provider, items))
|
||||
let suggested_packages = store.clone().suggest_packages().await?;
|
||||
let search_results = store.search(package).await;
|
||||
|
||||
let mut items = build_completions(search_results);
|
||||
let workspace_crate_completions = suggested_packages
|
||||
.into_iter()
|
||||
.filter(|package_name| {
|
||||
!items
|
||||
.iter()
|
||||
.any(|item| item.label.text() == package_name.as_ref())
|
||||
})
|
||||
.map(|package_name| ArgumentCompletion {
|
||||
label: format!("{package_name} (unindexed)").into(),
|
||||
new_text: format!("{package_name}"),
|
||||
after_completion: true.into(),
|
||||
replace_previous_arguments: false,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
items.extend(workspace_crate_completions);
|
||||
|
||||
if items.is_empty() {
|
||||
return Ok(vec![ArgumentCompletion {
|
||||
label: format!(
|
||||
"Enter a {package_term} name.",
|
||||
package_term = package_term(&provider)
|
||||
)
|
||||
.into(),
|
||||
new_text: provider.to_string(),
|
||||
after_completion: false.into(),
|
||||
replace_previous_arguments: false,
|
||||
}]);
|
||||
}
|
||||
|
||||
Ok(items)
|
||||
}
|
||||
DocsSlashCommandArgs::SearchItemDocs {
|
||||
provider,
|
||||
item_path,
|
||||
..
|
||||
} => {
|
||||
DocsSlashCommandArgs::SearchItemDocs { item_path, .. } => {
|
||||
let store = store?;
|
||||
let items = store.search(item_path).await;
|
||||
Ok(build_completions(provider, items))
|
||||
Ok(build_completions(items))
|
||||
}
|
||||
}
|
||||
})
|
||||
@@ -240,16 +268,16 @@ impl SlashCommand for DocsSlashCommand {
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
argument: Option<&str>,
|
||||
arguments: &[String],
|
||||
_workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let Some(argument) = argument else {
|
||||
return Task::ready(Err(anyhow!("missing argument")));
|
||||
if arguments.is_empty() {
|
||||
return Task::ready(Err(anyhow!("missing an argument")));
|
||||
};
|
||||
|
||||
let args = DocsSlashCommandArgs::parse(argument);
|
||||
let args = DocsSlashCommandArgs::parse(arguments);
|
||||
let executor = cx.background_executor().clone();
|
||||
let task = cx.background_executor().spawn({
|
||||
let store = args
|
||||
@@ -269,6 +297,13 @@ impl SlashCommand for DocsSlashCommand {
|
||||
} => (provider, item_path),
|
||||
};
|
||||
|
||||
if key.trim().is_empty() {
|
||||
bail!(
|
||||
"no {package_term} name provided",
|
||||
package_term = package_term(&provider)
|
||||
);
|
||||
}
|
||||
|
||||
let store = store?;
|
||||
|
||||
if let Some(package) = args.package() {
|
||||
@@ -342,12 +377,18 @@ pub(crate) enum DocsSlashCommandArgs {
|
||||
}
|
||||
|
||||
impl DocsSlashCommandArgs {
|
||||
pub fn parse(argument: &str) -> Self {
|
||||
let Some((provider, argument)) = argument.split_once(' ') else {
|
||||
pub fn parse(arguments: &[String]) -> Self {
|
||||
let Some(provider) = arguments
|
||||
.get(0)
|
||||
.cloned()
|
||||
.filter(|arg| !arg.trim().is_empty())
|
||||
else {
|
||||
return Self::NoProvider;
|
||||
};
|
||||
|
||||
let provider = ProviderId(provider.into());
|
||||
let Some(argument) = arguments.get(1) else {
|
||||
return Self::NoProvider;
|
||||
};
|
||||
|
||||
if let Some((package, rest)) = argument.split_once(is_item_path_delimiter) {
|
||||
if rest.trim().is_empty() {
|
||||
@@ -391,6 +432,15 @@ impl DocsSlashCommandArgs {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the term used to refer to a package.
|
||||
fn package_term(provider: &ProviderId) -> &'static str {
|
||||
if provider == &DocsDotRsProvider::id() || provider == &LocalRustdocProvider::id() {
|
||||
return "crate";
|
||||
}
|
||||
|
||||
"package"
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -398,16 +448,16 @@ mod tests {
|
||||
#[test]
|
||||
fn test_parse_docs_slash_command_args() {
|
||||
assert_eq!(
|
||||
DocsSlashCommandArgs::parse(""),
|
||||
DocsSlashCommandArgs::parse(&["".to_string()]),
|
||||
DocsSlashCommandArgs::NoProvider
|
||||
);
|
||||
assert_eq!(
|
||||
DocsSlashCommandArgs::parse("rustdoc"),
|
||||
DocsSlashCommandArgs::parse(&["rustdoc".to_string()]),
|
||||
DocsSlashCommandArgs::NoProvider
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
DocsSlashCommandArgs::parse("rustdoc "),
|
||||
DocsSlashCommandArgs::parse(&["rustdoc".to_string(), "".to_string()]),
|
||||
DocsSlashCommandArgs::SearchPackageDocs {
|
||||
provider: ProviderId("rustdoc".into()),
|
||||
package: "".into(),
|
||||
@@ -415,7 +465,7 @@ mod tests {
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
DocsSlashCommandArgs::parse("gleam "),
|
||||
DocsSlashCommandArgs::parse(&["gleam".to_string(), "".to_string()]),
|
||||
DocsSlashCommandArgs::SearchPackageDocs {
|
||||
provider: ProviderId("gleam".into()),
|
||||
package: "".into(),
|
||||
@@ -424,7 +474,7 @@ mod tests {
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
DocsSlashCommandArgs::parse("rustdoc gpui"),
|
||||
DocsSlashCommandArgs::parse(&["rustdoc".to_string(), "gpui".to_string()]),
|
||||
DocsSlashCommandArgs::SearchPackageDocs {
|
||||
provider: ProviderId("rustdoc".into()),
|
||||
package: "gpui".into(),
|
||||
@@ -432,7 +482,7 @@ mod tests {
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
DocsSlashCommandArgs::parse("gleam gleam_stdlib"),
|
||||
DocsSlashCommandArgs::parse(&["gleam".to_string(), "gleam_stdlib".to_string()]),
|
||||
DocsSlashCommandArgs::SearchPackageDocs {
|
||||
provider: ProviderId("gleam".into()),
|
||||
package: "gleam_stdlib".into(),
|
||||
@@ -442,7 +492,7 @@ mod tests {
|
||||
|
||||
// Adding an item path delimiter indicates we can start indexing.
|
||||
assert_eq!(
|
||||
DocsSlashCommandArgs::parse("rustdoc gpui:"),
|
||||
DocsSlashCommandArgs::parse(&["rustdoc".to_string(), "gpui:".to_string()]),
|
||||
DocsSlashCommandArgs::SearchPackageDocs {
|
||||
provider: ProviderId("rustdoc".into()),
|
||||
package: "gpui".into(),
|
||||
@@ -450,7 +500,7 @@ mod tests {
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
DocsSlashCommandArgs::parse("gleam gleam_stdlib/"),
|
||||
DocsSlashCommandArgs::parse(&["gleam".to_string(), "gleam_stdlib/".to_string()]),
|
||||
DocsSlashCommandArgs::SearchPackageDocs {
|
||||
provider: ProviderId("gleam".into()),
|
||||
package: "gleam_stdlib".into(),
|
||||
@@ -459,7 +509,10 @@ mod tests {
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
DocsSlashCommandArgs::parse("rustdoc gpui::foo::bar::Baz"),
|
||||
DocsSlashCommandArgs::parse(&[
|
||||
"rustdoc".to_string(),
|
||||
"gpui::foo::bar::Baz".to_string()
|
||||
]),
|
||||
DocsSlashCommandArgs::SearchItemDocs {
|
||||
provider: ProviderId("rustdoc".into()),
|
||||
package: "gpui".into(),
|
||||
@@ -467,7 +520,10 @@ mod tests {
|
||||
}
|
||||
);
|
||||
assert_eq!(
|
||||
DocsSlashCommandArgs::parse("gleam gleam_stdlib/gleam/int"),
|
||||
DocsSlashCommandArgs::parse(&[
|
||||
"gleam".to_string(),
|
||||
"gleam_stdlib/gleam/int".to_string()
|
||||
]),
|
||||
DocsSlashCommandArgs::SearchItemDocs {
|
||||
provider: ProviderId("gleam".into()),
|
||||
package: "gleam_stdlib".into(),
|
||||
|
||||
@@ -8,7 +8,7 @@ use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
};
|
||||
use futures::AsyncReadExt;
|
||||
use gpui::{AppContext, Task, WeakView};
|
||||
use gpui::{Task, WeakView};
|
||||
use html_to_markdown::{convert_html_to_markdown, markdown, TagHandler};
|
||||
use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
|
||||
use language::LspAdapterDelegate;
|
||||
@@ -117,22 +117,22 @@ impl SlashCommand for FetchSlashCommand {
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_query: String,
|
||||
_arguments: &[String],
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut AppContext,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
argument: Option<&str>,
|
||||
arguments: &[String],
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let Some(argument) = argument else {
|
||||
let Some(argument) = arguments.first() else {
|
||||
return Task::ready(Err(anyhow!("missing URL")));
|
||||
};
|
||||
let Some(workspace) = workspace.upgrade() else {
|
||||
@@ -150,6 +150,10 @@ impl SlashCommand for FetchSlashCommand {
|
||||
let url = SharedString::from(url);
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let text = text.await?;
|
||||
if text.trim().is_empty() {
|
||||
bail!("no textual content found");
|
||||
}
|
||||
|
||||
let range = 0..text.len();
|
||||
Ok(SlashCommandOutput {
|
||||
text,
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use super::{diagnostics_command::write_single_file_diagnostics, SlashCommand, SlashCommandOutput};
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use assistant_slash_command::{AfterCompletion, ArgumentCompletion, SlashCommandOutputSection};
|
||||
use fuzzy::PathMatch;
|
||||
use gpui::{AppContext, Model, Task, View, WeakView};
|
||||
use language::{BufferSnapshot, LineEnding, LspAdapterDelegate};
|
||||
use language::{BufferSnapshot, CodeLabel, HighlightId, LineEnding, LspAdapterDelegate};
|
||||
use project::{PathMatchCandidateSet, Project};
|
||||
use std::{
|
||||
fmt::Write,
|
||||
@@ -12,7 +12,7 @@ use std::{
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
};
|
||||
use ui::prelude::*;
|
||||
use util::{paths::PathMatcher, ResultExt};
|
||||
use util::ResultExt;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub(crate) struct FileSlashCommand;
|
||||
@@ -29,11 +29,30 @@ impl FileSlashCommand {
|
||||
let workspace = workspace.read(cx);
|
||||
let project = workspace.project().read(cx);
|
||||
let entries = workspace.recent_navigation_history(Some(10), cx);
|
||||
|
||||
let entries = entries
|
||||
.into_iter()
|
||||
.map(|entries| (entries.0, false))
|
||||
.chain(project.worktrees(cx).flat_map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
let id = worktree.id();
|
||||
worktree.child_entries(Path::new("")).map(move |entry| {
|
||||
(
|
||||
project::ProjectPath {
|
||||
worktree_id: id,
|
||||
path: entry.path.clone(),
|
||||
},
|
||||
entry.kind.is_dir(),
|
||||
)
|
||||
})
|
||||
}))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let path_prefix: Arc<str> = Arc::default();
|
||||
Task::ready(
|
||||
entries
|
||||
.into_iter()
|
||||
.filter_map(|(entry, _)| {
|
||||
.filter_map(|(entry, is_dir)| {
|
||||
let worktree = project.worktree_for_id(entry.worktree_id, cx)?;
|
||||
let mut full_path = PathBuf::from(worktree.read(cx).root_name());
|
||||
full_path.push(&entry.path);
|
||||
@@ -44,6 +63,7 @@ impl FileSlashCommand {
|
||||
path: full_path.into(),
|
||||
path_prefix: path_prefix.clone(),
|
||||
distance_to_relative_ancestor: 0,
|
||||
is_dir,
|
||||
})
|
||||
})
|
||||
.collect(),
|
||||
@@ -54,6 +74,7 @@ impl FileSlashCommand {
|
||||
.into_iter()
|
||||
.map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
|
||||
PathMatchCandidateSet {
|
||||
snapshot: worktree.snapshot(),
|
||||
include_ignored: worktree
|
||||
@@ -101,32 +122,55 @@ impl SlashCommand for FileSlashCommand {
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
query: String,
|
||||
arguments: &[String],
|
||||
cancellation_flag: Arc<AtomicBool>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
cx: &mut AppContext,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
let Some(workspace) = workspace.and_then(|workspace| workspace.upgrade()) else {
|
||||
return Task::ready(Err(anyhow!("workspace was dropped")));
|
||||
};
|
||||
|
||||
let paths = self.search_paths(query, cancellation_flag, &workspace, cx);
|
||||
let paths = self.search_paths(
|
||||
arguments.last().cloned().unwrap_or_default(),
|
||||
cancellation_flag,
|
||||
&workspace,
|
||||
cx,
|
||||
);
|
||||
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
|
||||
cx.background_executor().spawn(async move {
|
||||
Ok(paths
|
||||
.await
|
||||
.into_iter()
|
||||
.map(|path_match| {
|
||||
.filter_map(|path_match| {
|
||||
let text = format!(
|
||||
"{}{}",
|
||||
path_match.path_prefix,
|
||||
path_match.path.to_string_lossy()
|
||||
);
|
||||
|
||||
ArgumentCompletion {
|
||||
label: text.clone(),
|
||||
let mut label = CodeLabel::default();
|
||||
let file_name = path_match.path.file_name()?.to_string_lossy();
|
||||
let label_text = if path_match.is_dir {
|
||||
format!("{}/ ", file_name)
|
||||
} else {
|
||||
format!("{} ", file_name)
|
||||
};
|
||||
|
||||
label.push_str(label_text.as_str(), None);
|
||||
label.push_str(&text, comment_id);
|
||||
label.filter_range = 0..file_name.len();
|
||||
|
||||
Some(ArgumentCompletion {
|
||||
label,
|
||||
new_text: text,
|
||||
run_command: true,
|
||||
}
|
||||
after_completion: if path_match.is_dir {
|
||||
AfterCompletion::Compose
|
||||
} else {
|
||||
AfterCompletion::Run
|
||||
},
|
||||
replace_previous_arguments: false,
|
||||
})
|
||||
})
|
||||
.collect())
|
||||
})
|
||||
@@ -134,7 +178,7 @@ impl SlashCommand for FileSlashCommand {
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
argument: Option<&str>,
|
||||
arguments: &[String],
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
@@ -143,23 +187,24 @@ impl SlashCommand for FileSlashCommand {
|
||||
return Task::ready(Err(anyhow!("workspace was dropped")));
|
||||
};
|
||||
|
||||
let Some(argument) = argument else {
|
||||
if arguments.is_empty() {
|
||||
return Task::ready(Err(anyhow!("missing path")));
|
||||
};
|
||||
|
||||
let task = collect_files(workspace.read(cx).project().clone(), argument, cx);
|
||||
let task = collect_files(workspace.read(cx).project().clone(), arguments, cx);
|
||||
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let (text, ranges) = task.await?;
|
||||
let output = task.await?;
|
||||
Ok(SlashCommandOutput {
|
||||
text,
|
||||
sections: ranges
|
||||
text: output.completion_text,
|
||||
sections: output
|
||||
.files
|
||||
.into_iter()
|
||||
.map(|(range, path, entry_type)| {
|
||||
.map(|file| {
|
||||
build_entry_output_section(
|
||||
range,
|
||||
Some(&path),
|
||||
entry_type == EntryType::Directory,
|
||||
file.range_in_text,
|
||||
Some(&file.path),
|
||||
file.entry_type == EntryType::Directory,
|
||||
None,
|
||||
)
|
||||
})
|
||||
@@ -170,18 +215,38 @@ impl SlashCommand for FileSlashCommand {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq)]
|
||||
#[derive(Clone, Copy, PartialEq, Debug)]
|
||||
enum EntryType {
|
||||
File,
|
||||
Directory,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
struct FileCommandOutput {
|
||||
completion_text: String,
|
||||
files: Vec<OutputFile>,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Debug)]
|
||||
struct OutputFile {
|
||||
range_in_text: Range<usize>,
|
||||
path: PathBuf,
|
||||
entry_type: EntryType,
|
||||
}
|
||||
|
||||
fn collect_files(
|
||||
project: Model<Project>,
|
||||
glob_input: &str,
|
||||
glob_inputs: &[String],
|
||||
cx: &mut AppContext,
|
||||
) -> Task<Result<(String, Vec<(Range<usize>, PathBuf, EntryType)>)>> {
|
||||
let Ok(matcher) = PathMatcher::new(&[glob_input.to_owned()]) else {
|
||||
) -> Task<Result<FileCommandOutput>> {
|
||||
let Ok(matchers) = glob_inputs
|
||||
.into_iter()
|
||||
.map(|glob_input| {
|
||||
custom_path_matcher::PathMatcher::new(&[glob_input.to_owned()])
|
||||
.with_context(|| format!("invalid path {glob_input}"))
|
||||
})
|
||||
.collect::<anyhow::Result<Vec<custom_path_matcher::PathMatcher>>>()
|
||||
else {
|
||||
return Task::ready(Err(anyhow!("invalid path")));
|
||||
};
|
||||
|
||||
@@ -191,6 +256,7 @@ fn collect_files(
|
||||
.worktrees(cx)
|
||||
.map(|worktree| worktree.read(cx).snapshot())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
cx.spawn(|mut cx| async move {
|
||||
let mut text = String::new();
|
||||
let mut ranges = Vec::new();
|
||||
@@ -199,11 +265,16 @@ fn collect_files(
|
||||
let mut directory_stack: Vec<(Arc<Path>, String, usize)> = Vec::new();
|
||||
let mut folded_directory_names_stack = Vec::new();
|
||||
let mut is_top_level_directory = true;
|
||||
|
||||
for entry in snapshot.entries(false, 0) {
|
||||
let mut path_including_worktree_name = PathBuf::new();
|
||||
path_including_worktree_name.push(snapshot.root_name());
|
||||
path_including_worktree_name.push(&entry.path);
|
||||
if !matcher.is_match(&path_including_worktree_name) {
|
||||
|
||||
if !matchers
|
||||
.iter()
|
||||
.any(|matcher| matcher.is_match(&path_including_worktree_name))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -212,11 +283,11 @@ fn collect_files(
|
||||
break;
|
||||
}
|
||||
let (_, entry_name, start) = directory_stack.pop().unwrap();
|
||||
ranges.push((
|
||||
start..text.len().saturating_sub(1),
|
||||
PathBuf::from(entry_name),
|
||||
EntryType::Directory,
|
||||
));
|
||||
ranges.push(OutputFile {
|
||||
range_in_text: start..text.len().saturating_sub(1),
|
||||
path: PathBuf::from(entry_name),
|
||||
entry_type: EntryType::Directory,
|
||||
});
|
||||
}
|
||||
|
||||
let filename = entry
|
||||
@@ -289,24 +360,39 @@ fn collect_files(
|
||||
) {
|
||||
text.pop();
|
||||
}
|
||||
ranges.push((
|
||||
prev_len..text.len(),
|
||||
path_including_worktree_name,
|
||||
EntryType::File,
|
||||
));
|
||||
ranges.push(OutputFile {
|
||||
range_in_text: prev_len..text.len(),
|
||||
path: path_including_worktree_name,
|
||||
entry_type: EntryType::File,
|
||||
});
|
||||
text.push('\n');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
while let Some((dir, _, start)) = directory_stack.pop() {
|
||||
let mut root_path = PathBuf::new();
|
||||
root_path.push(snapshot.root_name());
|
||||
root_path.push(&dir);
|
||||
ranges.push((start..text.len(), root_path, EntryType::Directory));
|
||||
while let Some((dir, entry, start)) = directory_stack.pop() {
|
||||
if directory_stack.is_empty() {
|
||||
let mut root_path = PathBuf::new();
|
||||
root_path.push(snapshot.root_name());
|
||||
root_path.push(&dir);
|
||||
ranges.push(OutputFile {
|
||||
range_in_text: start..text.len(),
|
||||
path: root_path,
|
||||
entry_type: EntryType::Directory,
|
||||
});
|
||||
} else {
|
||||
ranges.push(OutputFile {
|
||||
range_in_text: start..text.len(),
|
||||
path: PathBuf::from(entry.as_str()),
|
||||
entry_type: EntryType::Directory,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok((text, ranges))
|
||||
Ok(FileCommandOutput {
|
||||
completion_text: text,
|
||||
files: ranges,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@@ -374,3 +460,300 @@ pub fn build_entry_output_section(
|
||||
label: label.into(),
|
||||
}
|
||||
}
|
||||
|
||||
/// This contains a small fork of the util::paths::PathMatcher, that is stricter about the prefix
|
||||
/// check. Only subpaths pass the prefix check, rather than any prefix.
|
||||
mod custom_path_matcher {
|
||||
use std::{fmt::Debug as _, path::Path};
|
||||
|
||||
use globset::{Glob, GlobSet, GlobSetBuilder};
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct PathMatcher {
|
||||
sources: Vec<String>,
|
||||
sources_with_trailing_slash: Vec<String>,
|
||||
glob: GlobSet,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for PathMatcher {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
self.sources.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for PathMatcher {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.sources.eq(&other.sources)
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for PathMatcher {}
|
||||
|
||||
impl PathMatcher {
|
||||
pub fn new(globs: &[String]) -> Result<Self, globset::Error> {
|
||||
let globs = globs
|
||||
.into_iter()
|
||||
.map(|glob| Glob::new(&glob))
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
let sources = globs.iter().map(|glob| glob.glob().to_owned()).collect();
|
||||
let sources_with_trailing_slash = globs
|
||||
.iter()
|
||||
.map(|glob| glob.glob().to_string() + std::path::MAIN_SEPARATOR_STR)
|
||||
.collect();
|
||||
let mut glob_builder = GlobSetBuilder::new();
|
||||
for single_glob in globs {
|
||||
glob_builder.add(single_glob);
|
||||
}
|
||||
let glob = glob_builder.build()?;
|
||||
Ok(PathMatcher {
|
||||
glob,
|
||||
sources,
|
||||
sources_with_trailing_slash,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn sources(&self) -> &[String] {
|
||||
&self.sources
|
||||
}
|
||||
|
||||
pub fn is_match<P: AsRef<Path>>(&self, other: P) -> bool {
|
||||
let other_path = other.as_ref();
|
||||
self.sources
|
||||
.iter()
|
||||
.zip(self.sources_with_trailing_slash.iter())
|
||||
.any(|(source, with_slash)| {
|
||||
let as_bytes = other_path.as_os_str().as_encoded_bytes();
|
||||
let with_slash = if source.ends_with("/") {
|
||||
source.as_bytes()
|
||||
} else {
|
||||
with_slash.as_bytes()
|
||||
};
|
||||
|
||||
as_bytes.starts_with(with_slash) || as_bytes.ends_with(source.as_bytes())
|
||||
})
|
||||
|| self.glob.is_match(other_path)
|
||||
|| self.check_with_end_separator(other_path)
|
||||
}
|
||||
|
||||
fn check_with_end_separator(&self, path: &Path) -> bool {
|
||||
let path_str = path.to_string_lossy();
|
||||
let separator = std::path::MAIN_SEPARATOR_STR;
|
||||
if path_str.ends_with(separator) {
|
||||
return false;
|
||||
} else {
|
||||
self.glob.is_match(path_str.to_string() + separator)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use fs::FakeFs;
|
||||
use gpui::TestAppContext;
|
||||
use project::Project;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
|
||||
use crate::slash_command::file_command::collect_files;
|
||||
|
||||
pub fn init_test(cx: &mut gpui::TestAppContext) {
|
||||
if std::env::var("RUST_LOG").is_ok() {
|
||||
env_logger::try_init().ok();
|
||||
}
|
||||
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
// release_channel::init(SemanticVersion::default(), cx);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_file_exact_matching(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
"dir": {
|
||||
"subdir": {
|
||||
"file_0": "0"
|
||||
},
|
||||
"file_1": "1",
|
||||
"file_2": "2",
|
||||
"file_3": "3",
|
||||
},
|
||||
"dir.rs": "4"
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs, ["/root".as_ref()], cx).await;
|
||||
|
||||
let result_1 = cx
|
||||
.update(|cx| collect_files(project.clone(), &["root/dir".to_string()], cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(result_1.completion_text.starts_with("root/dir"));
|
||||
// 4 files + 2 directories
|
||||
assert_eq!(6, result_1.files.len());
|
||||
|
||||
let result_2 = cx
|
||||
.update(|cx| collect_files(project.clone(), &["root/dir/".to_string()], cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(result_1, result_2);
|
||||
|
||||
let result = cx
|
||||
.update(|cx| collect_files(project.clone(), &["root/dir*".to_string()], cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(result.completion_text.starts_with("root/dir"));
|
||||
// 5 files + 2 directories
|
||||
assert_eq!(7, result.files.len());
|
||||
|
||||
// Ensure that the project lasts until after the last await
|
||||
drop(project);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_file_sub_directory_rendering(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
|
||||
fs.insert_tree(
|
||||
"/zed",
|
||||
json!({
|
||||
"assets": {
|
||||
"dir1": {
|
||||
".gitkeep": ""
|
||||
},
|
||||
"dir2": {
|
||||
".gitkeep": ""
|
||||
},
|
||||
"themes": {
|
||||
"ayu": {
|
||||
"LICENSE": "1",
|
||||
},
|
||||
"andromeda": {
|
||||
"LICENSE": "2",
|
||||
},
|
||||
"summercamp": {
|
||||
"LICENSE": "3",
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs, ["/zed".as_ref()], cx).await;
|
||||
|
||||
let result = cx
|
||||
.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Sanity check
|
||||
assert!(result.completion_text.starts_with("zed/assets/themes\n"));
|
||||
assert_eq!(7, result.files.len());
|
||||
|
||||
// Ensure that full file paths are included in the real output
|
||||
assert!(result
|
||||
.completion_text
|
||||
.contains("zed/assets/themes/andromeda/LICENSE"));
|
||||
assert!(result
|
||||
.completion_text
|
||||
.contains("zed/assets/themes/ayu/LICENSE"));
|
||||
assert!(result
|
||||
.completion_text
|
||||
.contains("zed/assets/themes/summercamp/LICENSE"));
|
||||
|
||||
assert_eq!("summercamp", result.files[5].path.to_string_lossy());
|
||||
|
||||
// Ensure that things are in descending order, with properly relativized paths
|
||||
assert_eq!(
|
||||
"zed/assets/themes/andromeda/LICENSE",
|
||||
result.files[0].path.to_string_lossy()
|
||||
);
|
||||
assert_eq!("andromeda", result.files[1].path.to_string_lossy());
|
||||
assert_eq!(
|
||||
"zed/assets/themes/ayu/LICENSE",
|
||||
result.files[2].path.to_string_lossy()
|
||||
);
|
||||
assert_eq!("ayu", result.files[3].path.to_string_lossy());
|
||||
assert_eq!(
|
||||
"zed/assets/themes/summercamp/LICENSE",
|
||||
result.files[4].path.to_string_lossy()
|
||||
);
|
||||
|
||||
// Ensure that the project lasts until after the last await
|
||||
drop(project);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_file_deep_sub_directory_rendering(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
|
||||
fs.insert_tree(
|
||||
"/zed",
|
||||
json!({
|
||||
"assets": {
|
||||
"themes": {
|
||||
"LICENSE": "1",
|
||||
"summercamp": {
|
||||
"LICENSE": "1",
|
||||
"subdir": {
|
||||
"LICENSE": "1",
|
||||
"subsubdir": {
|
||||
"LICENSE": "3",
|
||||
}
|
||||
}
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs, ["/zed".as_ref()], cx).await;
|
||||
|
||||
let result = cx
|
||||
.update(|cx| collect_files(project.clone(), &["zed/assets/themes".to_string()], cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(result.completion_text.starts_with("zed/assets/themes\n"));
|
||||
assert_eq!(
|
||||
"zed/assets/themes/LICENSE",
|
||||
result.files[0].path.to_string_lossy()
|
||||
);
|
||||
assert_eq!(
|
||||
"zed/assets/themes/summercamp/LICENSE",
|
||||
result.files[1].path.to_string_lossy()
|
||||
);
|
||||
assert_eq!(
|
||||
"zed/assets/themes/summercamp/subdir/LICENSE",
|
||||
result.files[2].path.to_string_lossy()
|
||||
);
|
||||
assert_eq!(
|
||||
"zed/assets/themes/summercamp/subdir/subsubdir/LICENSE",
|
||||
result.files[3].path.to_string_lossy()
|
||||
);
|
||||
assert_eq!("subsubdir", result.files[4].path.to_string_lossy());
|
||||
assert_eq!("subdir", result.files[5].path.to_string_lossy());
|
||||
assert_eq!("summercamp", result.files[6].path.to_string_lossy());
|
||||
assert_eq!("zed/assets/themes", result.files[7].path.to_string_lossy());
|
||||
|
||||
// Ensure that the project lasts until after the last await
|
||||
drop(project);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
};
|
||||
use chrono::Local;
|
||||
use gpui::{AppContext, Task, WeakView};
|
||||
use gpui::{Task, WeakView};
|
||||
use language::LspAdapterDelegate;
|
||||
use ui::prelude::*;
|
||||
use workspace::Workspace;
|
||||
@@ -32,17 +32,17 @@ impl SlashCommand for NowSlashCommand {
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_query: String,
|
||||
_arguments: &[String],
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut AppContext,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_argument: Option<&str>,
|
||||
_arguments: &[String],
|
||||
_workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
_cx: &mut WindowContext,
|
||||
|
||||
@@ -103,10 +103,10 @@ impl SlashCommand for ProjectSlashCommand {
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_query: String,
|
||||
_arguments: &[String],
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut AppContext,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Err(anyhow!("this command does not require argument")))
|
||||
}
|
||||
@@ -117,7 +117,7 @@ impl SlashCommand for ProjectSlashCommand {
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_argument: Option<&str>,
|
||||
_arguments: &[String],
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
|
||||
@@ -2,7 +2,7 @@ use super::{SlashCommand, SlashCommandOutput};
|
||||
use crate::prompt_library::PromptStore;
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
|
||||
use gpui::{AppContext, Task, WeakView};
|
||||
use gpui::{Task, WeakView};
|
||||
use language::LspAdapterDelegate;
|
||||
use std::sync::{atomic::AtomicBool, Arc};
|
||||
use ui::prelude::*;
|
||||
@@ -29,12 +29,13 @@ impl SlashCommand for PromptSlashCommand {
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
query: String,
|
||||
arguments: &[String],
|
||||
_cancellation_flag: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
cx: &mut AppContext,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
let store = PromptStore::global(cx);
|
||||
let query = arguments.to_owned().join(" ");
|
||||
cx.background_executor().spawn(async move {
|
||||
let prompts = store.await?.search(query).await;
|
||||
Ok(prompts
|
||||
@@ -42,9 +43,10 @@ impl SlashCommand for PromptSlashCommand {
|
||||
.filter_map(|prompt| {
|
||||
let prompt_title = prompt.title?.to_string();
|
||||
Some(ArgumentCompletion {
|
||||
label: prompt_title.clone(),
|
||||
label: prompt_title.clone().into(),
|
||||
new_text: prompt_title,
|
||||
run_command: true,
|
||||
after_completion: true.into(),
|
||||
replace_previous_arguments: true,
|
||||
})
|
||||
})
|
||||
.collect())
|
||||
@@ -53,17 +55,18 @@ impl SlashCommand for PromptSlashCommand {
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
title: Option<&str>,
|
||||
arguments: &[String],
|
||||
_workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let Some(title) = title else {
|
||||
let title = arguments.to_owned().join(" ");
|
||||
if title.trim().is_empty() {
|
||||
return Task::ready(Err(anyhow!("missing prompt name")));
|
||||
};
|
||||
|
||||
let store = PromptStore::global(cx);
|
||||
let title = SharedString::from(title.to_string());
|
||||
let title = SharedString::from(title.clone());
|
||||
let prompt = cx.background_executor().spawn({
|
||||
let title = title.clone();
|
||||
async move {
|
||||
@@ -77,6 +80,11 @@ impl SlashCommand for PromptSlashCommand {
|
||||
});
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let mut prompt = prompt.await?;
|
||||
|
||||
if prompt.starts_with('/') {
|
||||
// Prevent an edge case where the inserted prompt starts with a slash command (that leads to funky rendering).
|
||||
prompt.insert(0, '\n');
|
||||
}
|
||||
if prompt.is_empty() {
|
||||
prompt.push('\n');
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ use super::{
|
||||
};
|
||||
use anyhow::Result;
|
||||
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
|
||||
use feature_flags::FeatureFlag;
|
||||
use gpui::{AppContext, Task, WeakView};
|
||||
use language::{CodeLabel, LineEnding, LspAdapterDelegate};
|
||||
use semantic_index::SemanticIndex;
|
||||
@@ -17,6 +18,12 @@ use ui::{prelude::*, IconName};
|
||||
use util::ResultExt;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub(crate) struct SearchSlashCommandFeatureFlag;
|
||||
|
||||
impl FeatureFlag for SearchSlashCommandFeatureFlag {
|
||||
const NAME: &'static str = "search-slash-command";
|
||||
}
|
||||
|
||||
pub(crate) struct SearchSlashCommand;
|
||||
|
||||
impl SlashCommand for SearchSlashCommand {
|
||||
@@ -42,17 +49,17 @@ impl SlashCommand for SearchSlashCommand {
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_query: String,
|
||||
_arguments: &[String],
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut AppContext,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
argument: Option<&str>,
|
||||
arguments: &[String],
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
@@ -60,13 +67,13 @@ impl SlashCommand for SearchSlashCommand {
|
||||
let Some(workspace) = workspace.upgrade() else {
|
||||
return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
|
||||
};
|
||||
let Some(argument) = argument else {
|
||||
if arguments.is_empty() {
|
||||
return Task::ready(Err(anyhow::anyhow!("missing search query")));
|
||||
};
|
||||
|
||||
let mut limit = None;
|
||||
let mut query = String::new();
|
||||
for part in argument.split(' ') {
|
||||
for part in arguments {
|
||||
if let Some(parameter) = part.strip_prefix("--") {
|
||||
if let Ok(count) = parameter.parse::<usize>() {
|
||||
limit = Some(count);
|
||||
|
||||
@@ -2,7 +2,7 @@ use super::{SlashCommand, SlashCommandOutput};
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
|
||||
use editor::Editor;
|
||||
use gpui::{AppContext, Task, WeakView};
|
||||
use gpui::{Task, WeakView};
|
||||
use language::LspAdapterDelegate;
|
||||
use std::sync::Arc;
|
||||
use std::{path::Path, sync::atomic::AtomicBool};
|
||||
@@ -26,10 +26,10 @@ impl SlashCommand for OutlineSlashCommand {
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_query: String,
|
||||
_arguments: &[String],
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut AppContext,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Err(anyhow!("this command does not require argument")))
|
||||
}
|
||||
@@ -40,7 +40,7 @@ impl SlashCommand for OutlineSlashCommand {
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_argument: Option<&str>,
|
||||
_arguments: &[String],
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
|
||||
321
crates/assistant/src/slash_command/tab_command.rs
Normal file
@@ -0,0 +1,321 @@
|
||||
use super::{
|
||||
diagnostics_command::write_single_file_diagnostics,
|
||||
file_command::{build_entry_output_section, codeblock_fence_for_path},
|
||||
SlashCommand, SlashCommandOutput,
|
||||
};
|
||||
use anyhow::{Context, Result};
|
||||
use assistant_slash_command::ArgumentCompletion;
|
||||
use collections::{HashMap, HashSet};
|
||||
use editor::Editor;
|
||||
use futures::future::join_all;
|
||||
use gpui::{Entity, Task, WeakView};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use std::{
|
||||
fmt::Write,
|
||||
path::PathBuf,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
};
|
||||
use ui::WindowContext;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub(crate) struct TabSlashCommand;
|
||||
|
||||
const ALL_TABS_COMPLETION_ITEM: &str = "all";
|
||||
|
||||
impl SlashCommand for TabSlashCommand {
|
||||
fn name(&self) -> String {
|
||||
"tab".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"insert open tabs (active tab by default)".to_owned()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
"Insert Open Tabs".to_owned()
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn accepts_arguments(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
cancel: Arc<AtomicBool>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
let mut has_all_tabs_completion_item = false;
|
||||
let argument_set = arguments
|
||||
.iter()
|
||||
.filter(|argument| {
|
||||
if has_all_tabs_completion_item || ALL_TABS_COMPLETION_ITEM == argument.as_str() {
|
||||
has_all_tabs_completion_item = true;
|
||||
false
|
||||
} else {
|
||||
true
|
||||
}
|
||||
})
|
||||
.cloned()
|
||||
.collect::<HashSet<_>>();
|
||||
if has_all_tabs_completion_item {
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
}
|
||||
|
||||
let active_item_path = workspace.as_ref().and_then(|workspace| {
|
||||
workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
let snapshot = active_item_buffer(workspace, cx).ok()?;
|
||||
snapshot.resolve_file_path(cx, true)
|
||||
})
|
||||
.ok()
|
||||
.flatten()
|
||||
});
|
||||
let current_query = arguments.last().cloned().unwrap_or_default();
|
||||
let tab_items_search =
|
||||
tab_items_for_queries(workspace, &[current_query], cancel, false, cx);
|
||||
cx.spawn(|_| async move {
|
||||
let tab_items = tab_items_search.await?;
|
||||
let run_command = tab_items.len() == 1;
|
||||
let tab_completion_items = tab_items.into_iter().filter_map(|(path, ..)| {
|
||||
let path_string = path.as_deref()?.to_string_lossy().to_string();
|
||||
if argument_set.contains(&path_string) {
|
||||
return None;
|
||||
}
|
||||
if active_item_path.is_some() && active_item_path == path {
|
||||
return None;
|
||||
}
|
||||
Some(ArgumentCompletion {
|
||||
label: path_string.clone().into(),
|
||||
new_text: path_string,
|
||||
replace_previous_arguments: false,
|
||||
after_completion: run_command.into(),
|
||||
})
|
||||
});
|
||||
|
||||
let active_item_completion = active_item_path
|
||||
.as_deref()
|
||||
.map(|active_item_path| active_item_path.to_string_lossy().to_string())
|
||||
.filter(|path_string| !argument_set.contains(path_string))
|
||||
.map(|path_string| ArgumentCompletion {
|
||||
label: path_string.clone().into(),
|
||||
new_text: path_string,
|
||||
replace_previous_arguments: false,
|
||||
after_completion: run_command.into(),
|
||||
});
|
||||
|
||||
Ok(active_item_completion
|
||||
.into_iter()
|
||||
.chain(Some(ArgumentCompletion {
|
||||
label: ALL_TABS_COMPLETION_ITEM.into(),
|
||||
new_text: ALL_TABS_COMPLETION_ITEM.to_owned(),
|
||||
replace_previous_arguments: false,
|
||||
after_completion: true.into(),
|
||||
}))
|
||||
.chain(tab_completion_items)
|
||||
.collect())
|
||||
})
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let tab_items_search = tab_items_for_queries(
|
||||
Some(workspace),
|
||||
arguments,
|
||||
Arc::new(AtomicBool::new(false)),
|
||||
true,
|
||||
cx,
|
||||
);
|
||||
|
||||
cx.background_executor().spawn(async move {
|
||||
let mut sections = Vec::new();
|
||||
let mut text = String::new();
|
||||
let mut has_diagnostics = false;
|
||||
for (full_path, buffer, _) in tab_items_search.await? {
|
||||
let section_start_ix = text.len();
|
||||
text.push_str(&codeblock_fence_for_path(full_path.as_deref(), None));
|
||||
for chunk in buffer.as_rope().chunks() {
|
||||
text.push_str(chunk);
|
||||
}
|
||||
if !text.ends_with('\n') {
|
||||
text.push('\n');
|
||||
}
|
||||
writeln!(text, "```").unwrap();
|
||||
if write_single_file_diagnostics(&mut text, full_path.as_deref(), &buffer) {
|
||||
has_diagnostics = true;
|
||||
}
|
||||
if !text.ends_with('\n') {
|
||||
text.push('\n');
|
||||
}
|
||||
|
||||
let section_end_ix = text.len() - 1;
|
||||
sections.push(build_entry_output_section(
|
||||
section_start_ix..section_end_ix,
|
||||
full_path.as_deref(),
|
||||
false,
|
||||
None,
|
||||
));
|
||||
}
|
||||
|
||||
Ok(SlashCommandOutput {
|
||||
text,
|
||||
sections,
|
||||
run_commands_in_text: has_diagnostics,
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn tab_items_for_queries(
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
queries: &[String],
|
||||
cancel: Arc<AtomicBool>,
|
||||
strict_match: bool,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<anyhow::Result<Vec<(Option<PathBuf>, BufferSnapshot, usize)>>> {
|
||||
let empty_query = queries.is_empty() || queries.iter().all(|query| query.trim().is_empty());
|
||||
let queries = queries.to_owned();
|
||||
cx.spawn(|mut cx| async move {
|
||||
let mut open_buffers =
|
||||
workspace
|
||||
.context("no workspace")?
|
||||
.update(&mut cx, |workspace, cx| {
|
||||
if strict_match && empty_query {
|
||||
let snapshot = active_item_buffer(workspace, cx)?;
|
||||
let full_path = snapshot.resolve_file_path(cx, true);
|
||||
return anyhow::Ok(vec![(full_path, snapshot, 0)]);
|
||||
}
|
||||
|
||||
let mut timestamps_by_entity_id = HashMap::default();
|
||||
let mut visited_buffers = HashSet::default();
|
||||
let mut open_buffers = Vec::new();
|
||||
|
||||
for pane in workspace.panes() {
|
||||
let pane = pane.read(cx);
|
||||
for entry in pane.activation_history() {
|
||||
timestamps_by_entity_id.insert(entry.entity_id, entry.timestamp);
|
||||
}
|
||||
}
|
||||
|
||||
for editor in workspace.items_of_type::<Editor>(cx) {
|
||||
if let Some(buffer) = editor.read(cx).buffer().read(cx).as_singleton() {
|
||||
if let Some(timestamp) =
|
||||
timestamps_by_entity_id.get(&editor.entity_id())
|
||||
{
|
||||
if visited_buffers.insert(buffer.read(cx).remote_id()) {
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let full_path = snapshot.resolve_file_path(cx, true);
|
||||
open_buffers.push((full_path, snapshot, *timestamp));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(open_buffers)
|
||||
})??;
|
||||
|
||||
let background_executor = cx.background_executor().clone();
|
||||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
open_buffers.sort_by_key(|(_, _, timestamp)| *timestamp);
|
||||
if empty_query
|
||||
|| queries
|
||||
.iter()
|
||||
.any(|query| query == ALL_TABS_COMPLETION_ITEM)
|
||||
{
|
||||
return Ok(open_buffers);
|
||||
}
|
||||
|
||||
let matched_items = if strict_match {
|
||||
let match_candidates = open_buffers
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(id, (full_path, ..))| {
|
||||
let path_string = full_path.as_deref()?.to_string_lossy().to_string();
|
||||
Some((id, path_string))
|
||||
})
|
||||
.fold(HashMap::default(), |mut candidates, (id, path_string)| {
|
||||
candidates
|
||||
.entry(path_string)
|
||||
.or_insert_with(|| Vec::new())
|
||||
.push(id);
|
||||
candidates
|
||||
});
|
||||
|
||||
queries
|
||||
.iter()
|
||||
.filter_map(|query| match_candidates.get(query))
|
||||
.flatten()
|
||||
.copied()
|
||||
.filter_map(|id| open_buffers.get(id))
|
||||
.cloned()
|
||||
.collect()
|
||||
} else {
|
||||
let match_candidates = open_buffers
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(id, (full_path, ..))| {
|
||||
let path_string = full_path.as_deref()?.to_string_lossy().to_string();
|
||||
Some(fuzzy::StringMatchCandidate {
|
||||
id,
|
||||
char_bag: path_string.as_str().into(),
|
||||
string: path_string,
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let mut processed_matches = HashSet::default();
|
||||
let file_queries = queries.iter().map(|query| {
|
||||
fuzzy::match_strings(
|
||||
&match_candidates,
|
||||
query,
|
||||
true,
|
||||
usize::MAX,
|
||||
&cancel,
|
||||
background_executor.clone(),
|
||||
)
|
||||
});
|
||||
|
||||
join_all(file_queries)
|
||||
.await
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.filter(|string_match| processed_matches.insert(string_match.candidate_id))
|
||||
.filter_map(|string_match| open_buffers.get(string_match.candidate_id))
|
||||
.cloned()
|
||||
.collect()
|
||||
};
|
||||
Ok(matched_items)
|
||||
})
|
||||
.await
|
||||
})
|
||||
}
|
||||
|
||||
fn active_item_buffer(
|
||||
workspace: &mut Workspace,
|
||||
cx: &mut ui::ViewContext<Workspace>,
|
||||
) -> anyhow::Result<BufferSnapshot> {
|
||||
let active_editor = workspace
|
||||
.active_item(cx)
|
||||
.context("no active item")?
|
||||
.downcast::<Editor>()
|
||||
.context("active item is not an editor")?;
|
||||
let snapshot = active_editor
|
||||
.read(cx)
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.as_singleton()
|
||||
.context("active editor is not a singleton buffer")?
|
||||
.read(cx)
|
||||
.snapshot();
|
||||
Ok(snapshot)
|
||||
}
|
||||
@@ -1,118 +0,0 @@
|
||||
use super::{
|
||||
diagnostics_command::write_single_file_diagnostics,
|
||||
file_command::{build_entry_output_section, codeblock_fence_for_path},
|
||||
SlashCommand, SlashCommandOutput,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::ArgumentCompletion;
|
||||
use collections::HashMap;
|
||||
use editor::Editor;
|
||||
use gpui::{AppContext, Entity, Task, WeakView};
|
||||
use language::LspAdapterDelegate;
|
||||
use std::{fmt::Write, sync::Arc};
|
||||
use ui::WindowContext;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub(crate) struct TabsSlashCommand;
|
||||
|
||||
impl SlashCommand for TabsSlashCommand {
|
||||
fn name(&self) -> String {
|
||||
"tabs".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"insert open tabs".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
"Insert Open Tabs".into()
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_query: String,
|
||||
_cancel: Arc<std::sync::atomic::AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut AppContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Err(anyhow!("this command does not require argument")))
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_argument: Option<&str>,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let open_buffers = workspace.update(cx, |workspace, cx| {
|
||||
let mut timestamps_by_entity_id = HashMap::default();
|
||||
let mut open_buffers = Vec::new();
|
||||
|
||||
for pane in workspace.panes() {
|
||||
let pane = pane.read(cx);
|
||||
for entry in pane.activation_history() {
|
||||
timestamps_by_entity_id.insert(entry.entity_id, entry.timestamp);
|
||||
}
|
||||
}
|
||||
|
||||
for editor in workspace.items_of_type::<Editor>(cx) {
|
||||
if let Some(buffer) = editor.read(cx).buffer().read(cx).as_singleton() {
|
||||
if let Some(timestamp) = timestamps_by_entity_id.get(&editor.entity_id()) {
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let full_path = snapshot.resolve_file_path(cx, true);
|
||||
open_buffers.push((full_path, snapshot, *timestamp));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
open_buffers
|
||||
});
|
||||
|
||||
match open_buffers {
|
||||
Ok(mut open_buffers) => cx.background_executor().spawn(async move {
|
||||
open_buffers.sort_by_key(|(_, _, timestamp)| *timestamp);
|
||||
|
||||
let mut sections = Vec::new();
|
||||
let mut text = String::new();
|
||||
let mut has_diagnostics = false;
|
||||
for (full_path, buffer, _) in open_buffers {
|
||||
let section_start_ix = text.len();
|
||||
text.push_str(&codeblock_fence_for_path(full_path.as_deref(), None));
|
||||
for chunk in buffer.as_rope().chunks() {
|
||||
text.push_str(chunk);
|
||||
}
|
||||
if !text.ends_with('\n') {
|
||||
text.push('\n');
|
||||
}
|
||||
writeln!(text, "```").unwrap();
|
||||
if write_single_file_diagnostics(&mut text, full_path.as_deref(), &buffer) {
|
||||
has_diagnostics = true;
|
||||
}
|
||||
if !text.ends_with('\n') {
|
||||
text.push('\n');
|
||||
}
|
||||
|
||||
let section_end_ix = text.len() - 1;
|
||||
sections.push(build_entry_output_section(
|
||||
section_start_ix..section_end_ix,
|
||||
full_path.as_deref(),
|
||||
false,
|
||||
None,
|
||||
));
|
||||
}
|
||||
|
||||
Ok(SlashCommandOutput {
|
||||
text,
|
||||
sections,
|
||||
run_commands_in_text: has_diagnostics,
|
||||
})
|
||||
}),
|
||||
Err(error) => Task::ready(Err(error)),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -5,7 +5,7 @@ use anyhow::Result;
|
||||
use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
};
|
||||
use gpui::{AppContext, Task, WeakView};
|
||||
use gpui::{AppContext, Task, View, WeakView};
|
||||
use language::{CodeLabel, LspAdapterDelegate};
|
||||
use terminal_view::{terminal_panel::TerminalPanel, TerminalView};
|
||||
use ui::prelude::*;
|
||||
@@ -15,17 +15,17 @@ use crate::DEFAULT_CONTEXT_LINES;
|
||||
|
||||
use super::create_label_for_command;
|
||||
|
||||
pub(crate) struct TermSlashCommand;
|
||||
pub(crate) struct TerminalSlashCommand;
|
||||
|
||||
const LINE_COUNT_ARG: &str = "--line-count";
|
||||
|
||||
impl SlashCommand for TermSlashCommand {
|
||||
impl SlashCommand for TerminalSlashCommand {
|
||||
fn name(&self) -> String {
|
||||
"term".into()
|
||||
"terminal".into()
|
||||
}
|
||||
|
||||
fn label(&self, cx: &AppContext) -> CodeLabel {
|
||||
create_label_for_command("term", &[LINE_COUNT_ARG], cx)
|
||||
create_label_for_command("terminal", &[LINE_COUNT_ARG], cx)
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
@@ -40,23 +40,23 @@ impl SlashCommand for TermSlashCommand {
|
||||
false
|
||||
}
|
||||
|
||||
fn accepts_arguments(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_query: String,
|
||||
_arguments: &[String],
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut AppContext,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Ok(vec![ArgumentCompletion {
|
||||
label: LINE_COUNT_ARG.to_string(),
|
||||
new_text: LINE_COUNT_ARG.to_string(),
|
||||
run_command: true,
|
||||
}]))
|
||||
Task::ready(Ok(Vec::new()))
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
argument: Option<&str>,
|
||||
arguments: &[String],
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
@@ -64,19 +64,14 @@ impl SlashCommand for TermSlashCommand {
|
||||
let Some(workspace) = workspace.upgrade() else {
|
||||
return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
|
||||
};
|
||||
let Some(terminal_panel) = workspace.read(cx).panel::<TerminalPanel>(cx) else {
|
||||
return Task::ready(Err(anyhow::anyhow!("no terminal panel open")));
|
||||
};
|
||||
let Some(active_terminal) = terminal_panel.read(cx).pane().and_then(|pane| {
|
||||
pane.read(cx)
|
||||
.active_item()
|
||||
.and_then(|t| t.downcast::<TerminalView>())
|
||||
}) else {
|
||||
|
||||
let Some(active_terminal) = resolve_active_terminal(&workspace, cx) else {
|
||||
return Task::ready(Err(anyhow::anyhow!("no active terminal")));
|
||||
};
|
||||
|
||||
let line_count = argument
|
||||
.and_then(|a| parse_argument(a))
|
||||
let line_count = arguments
|
||||
.get(0)
|
||||
.and_then(|s| s.parse::<usize>().ok())
|
||||
.unwrap_or(DEFAULT_CONTEXT_LINES);
|
||||
|
||||
let lines = active_terminal
|
||||
@@ -102,12 +97,22 @@ impl SlashCommand for TermSlashCommand {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_argument(argument: &str) -> Option<usize> {
|
||||
let mut args = argument.split(' ');
|
||||
if args.next() == Some(LINE_COUNT_ARG) {
|
||||
if let Some(line_count) = args.next().and_then(|s| s.parse::<usize>().ok()) {
|
||||
return Some(line_count);
|
||||
}
|
||||
fn resolve_active_terminal(
|
||||
workspace: &View<Workspace>,
|
||||
cx: &WindowContext,
|
||||
) -> Option<View<TerminalView>> {
|
||||
if let Some(terminal_view) = workspace
|
||||
.read(cx)
|
||||
.active_item(cx)
|
||||
.and_then(|item| item.act_as::<TerminalView>(cx))
|
||||
{
|
||||
return Some(terminal_view);
|
||||
}
|
||||
None
|
||||
|
||||
let terminal_panel = workspace.read(cx).panel::<TerminalPanel>(cx)?;
|
||||
terminal_panel.read(cx).pane().and_then(|pane| {
|
||||
pane.read(cx)
|
||||
.active_item()
|
||||
.and_then(|t| t.downcast::<TerminalView>())
|
||||
})
|
||||
}
|
||||
@@ -7,7 +7,7 @@ use anyhow::Result;
|
||||
use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
};
|
||||
use gpui::{AppContext, Task, WeakView};
|
||||
use gpui::{Task, WeakView};
|
||||
use language::LspAdapterDelegate;
|
||||
use ui::prelude::*;
|
||||
|
||||
@@ -42,17 +42,17 @@ impl SlashCommand for WorkflowSlashCommand {
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_query: String,
|
||||
_arguments: &[String],
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut AppContext,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_argument: Option<&str>,
|
||||
_arguments: &[String],
|
||||
_workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
|
||||
306
crates/assistant/src/slash_command_picker.rs
Normal file
@@ -0,0 +1,306 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use assistant_slash_command::SlashCommandRegistry;
|
||||
use gpui::AnyElement;
|
||||
use gpui::DismissEvent;
|
||||
use gpui::WeakView;
|
||||
use picker::PickerEditorPosition;
|
||||
|
||||
use ui::ListItemSpacing;
|
||||
|
||||
use gpui::SharedString;
|
||||
use gpui::Task;
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use ui::{prelude::*, ListItem, PopoverMenu, PopoverTrigger};
|
||||
|
||||
use crate::assistant_panel::ContextEditor;
|
||||
|
||||
#[derive(IntoElement)]
|
||||
pub(super) struct SlashCommandSelector<T: PopoverTrigger> {
|
||||
registry: Arc<SlashCommandRegistry>,
|
||||
active_context_editor: WeakView<ContextEditor>,
|
||||
trigger: T,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct SlashCommandInfo {
|
||||
name: SharedString,
|
||||
description: SharedString,
|
||||
args: Option<SharedString>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
enum SlashCommandEntry {
|
||||
Info(SlashCommandInfo),
|
||||
Advert {
|
||||
name: SharedString,
|
||||
renderer: fn(&mut WindowContext<'_>) -> AnyElement,
|
||||
on_confirm: fn(&mut WindowContext<'_>),
|
||||
},
|
||||
}
|
||||
|
||||
impl AsRef<str> for SlashCommandEntry {
|
||||
fn as_ref(&self) -> &str {
|
||||
match self {
|
||||
SlashCommandEntry::Info(SlashCommandInfo { name, .. })
|
||||
| SlashCommandEntry::Advert { name, .. } => name,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct SlashCommandDelegate {
|
||||
all_commands: Vec<SlashCommandEntry>,
|
||||
filtered_commands: Vec<SlashCommandEntry>,
|
||||
active_context_editor: WeakView<ContextEditor>,
|
||||
selected_index: usize,
|
||||
}
|
||||
|
||||
impl<T: PopoverTrigger> SlashCommandSelector<T> {
|
||||
pub(crate) fn new(
|
||||
registry: Arc<SlashCommandRegistry>,
|
||||
active_context_editor: WeakView<ContextEditor>,
|
||||
trigger: T,
|
||||
) -> Self {
|
||||
SlashCommandSelector {
|
||||
registry,
|
||||
active_context_editor,
|
||||
trigger,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PickerDelegate for SlashCommandDelegate {
|
||||
type ListItem = ListItem;
|
||||
|
||||
fn match_count(&self) -> usize {
|
||||
self.filtered_commands.len()
|
||||
}
|
||||
|
||||
fn selected_index(&self) -> usize {
|
||||
self.selected_index
|
||||
}
|
||||
|
||||
fn set_selected_index(&mut self, ix: usize, cx: &mut ViewContext<Picker<Self>>) {
|
||||
self.selected_index = ix.min(self.filtered_commands.len().saturating_sub(1));
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn placeholder_text(&self, _cx: &mut WindowContext) -> Arc<str> {
|
||||
"Select a command...".into()
|
||||
}
|
||||
|
||||
fn update_matches(&mut self, query: String, cx: &mut ViewContext<Picker<Self>>) -> Task<()> {
|
||||
let all_commands = self.all_commands.clone();
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let filtered_commands = cx
|
||||
.background_executor()
|
||||
.spawn(async move {
|
||||
if query.is_empty() {
|
||||
all_commands
|
||||
} else {
|
||||
all_commands
|
||||
.into_iter()
|
||||
.filter(|model_info| {
|
||||
model_info
|
||||
.as_ref()
|
||||
.to_lowercase()
|
||||
.contains(&query.to_lowercase())
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.delegate.filtered_commands = filtered_commands;
|
||||
this.delegate.set_selected_index(0, cx);
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
}
|
||||
|
||||
fn separators_after_indices(&self) -> Vec<usize> {
|
||||
let mut ret = vec![];
|
||||
let mut previous_is_advert = false;
|
||||
|
||||
for (index, command) in self.filtered_commands.iter().enumerate() {
|
||||
if previous_is_advert {
|
||||
if let SlashCommandEntry::Info(_) = command {
|
||||
previous_is_advert = false;
|
||||
debug_assert_ne!(
|
||||
index, 0,
|
||||
"index cannot be zero, as we can never have a separator at 0th position"
|
||||
);
|
||||
ret.push(index - 1);
|
||||
}
|
||||
} else {
|
||||
if let SlashCommandEntry::Advert { .. } = command {
|
||||
previous_is_advert = true;
|
||||
if index != 0 {
|
||||
ret.push(index - 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
ret
|
||||
}
|
||||
fn confirm(&mut self, _secondary: bool, cx: &mut ViewContext<Picker<Self>>) {
|
||||
if let Some(command) = self.filtered_commands.get(self.selected_index) {
|
||||
if let SlashCommandEntry::Info(info) = command {
|
||||
self.active_context_editor
|
||||
.update(cx, |context_editor, cx| {
|
||||
context_editor.insert_command(&info.name, cx)
|
||||
})
|
||||
.ok();
|
||||
} else if let SlashCommandEntry::Advert { on_confirm, .. } = command {
|
||||
on_confirm(cx);
|
||||
}
|
||||
cx.emit(DismissEvent);
|
||||
}
|
||||
}
|
||||
|
||||
fn dismissed(&mut self, _cx: &mut ViewContext<Picker<Self>>) {}
|
||||
|
||||
fn editor_position(&self) -> PickerEditorPosition {
|
||||
PickerEditorPosition::End
|
||||
}
|
||||
|
||||
fn render_match(
|
||||
&self,
|
||||
ix: usize,
|
||||
selected: bool,
|
||||
cx: &mut ViewContext<Picker<Self>>,
|
||||
) -> Option<Self::ListItem> {
|
||||
let command_info = self.filtered_commands.get(ix)?;
|
||||
|
||||
match command_info {
|
||||
SlashCommandEntry::Info(info) => Some(
|
||||
ListItem::new(ix)
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.selected(selected)
|
||||
.child(
|
||||
h_flex()
|
||||
.group(format!("command-entry-label-{ix}"))
|
||||
.w_full()
|
||||
.min_w(px(220.))
|
||||
.child(
|
||||
v_flex()
|
||||
.child(
|
||||
h_flex()
|
||||
.child(div().font_buffer(cx).child({
|
||||
let mut label = format!("/{}", info.name);
|
||||
if let Some(args) =
|
||||
info.args.as_ref().filter(|_| selected)
|
||||
{
|
||||
label.push_str(&args);
|
||||
}
|
||||
Label::new(label).size(LabelSize::Small)
|
||||
}))
|
||||
.children(info.args.clone().filter(|_| !selected).map(
|
||||
|args| {
|
||||
div()
|
||||
.font_buffer(cx)
|
||||
.child(
|
||||
Label::new(args).size(LabelSize::Small),
|
||||
)
|
||||
.visible_on_hover(format!(
|
||||
"command-entry-label-{ix}"
|
||||
))
|
||||
},
|
||||
)),
|
||||
)
|
||||
.child(
|
||||
Label::new(info.description.clone())
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
),
|
||||
),
|
||||
),
|
||||
SlashCommandEntry::Advert { renderer, .. } => Some(
|
||||
ListItem::new(ix)
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.selected(selected)
|
||||
.child(renderer(cx)),
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: PopoverTrigger> RenderOnce for SlashCommandSelector<T> {
|
||||
fn render(self, cx: &mut WindowContext) -> impl IntoElement {
|
||||
let all_models = self
|
||||
.registry
|
||||
.featured_command_names()
|
||||
.into_iter()
|
||||
.filter_map(|command_name| {
|
||||
let command = self.registry.command(&command_name)?;
|
||||
let menu_text = SharedString::from(Arc::from(command.menu_text()));
|
||||
let label = command.label(cx);
|
||||
let args = label.filter_range.end.ne(&label.text.len()).then(|| {
|
||||
SharedString::from(
|
||||
label.text[label.filter_range.end..label.text.len()].to_owned(),
|
||||
)
|
||||
});
|
||||
Some(SlashCommandEntry::Info(SlashCommandInfo {
|
||||
name: command_name.into(),
|
||||
description: menu_text,
|
||||
args,
|
||||
}))
|
||||
})
|
||||
.chain([SlashCommandEntry::Advert {
|
||||
name: "create-your-command".into(),
|
||||
renderer: |cx| {
|
||||
v_flex()
|
||||
.child(
|
||||
h_flex()
|
||||
.font_buffer(cx)
|
||||
.items_center()
|
||||
.gap_1()
|
||||
.child(div().font_buffer(cx).child(
|
||||
Label::new("create-your-command").size(LabelSize::Small),
|
||||
))
|
||||
.child(Icon::new(IconName::ArrowUpRight).size(IconSize::XSmall)),
|
||||
)
|
||||
.child(
|
||||
Label::new("Learn how to create a custom command")
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.into_any_element()
|
||||
},
|
||||
on_confirm: |cx| cx.open_url("https://zed.dev/docs/extensions/slash-commands"),
|
||||
}])
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let delegate = SlashCommandDelegate {
|
||||
all_commands: all_models.clone(),
|
||||
active_context_editor: self.active_context_editor.clone(),
|
||||
filtered_commands: all_models,
|
||||
selected_index: 0,
|
||||
};
|
||||
|
||||
let picker_view = cx.new_view(|cx| {
|
||||
let picker = Picker::uniform_list(delegate, cx).max_height(Some(rems(20.).into()));
|
||||
picker
|
||||
});
|
||||
|
||||
let handle = self
|
||||
.active_context_editor
|
||||
.update(cx, |this, _| this.slash_menu_handle.clone())
|
||||
.ok();
|
||||
PopoverMenu::new("model-switcher")
|
||||
.menu(move |_cx| Some(picker_view.clone()))
|
||||
.trigger(self.trigger)
|
||||
.attach(gpui::AnchorCorner::TopLeft)
|
||||
.anchor(gpui::AnchorCorner::BottomLeft)
|
||||
.offset(gpui::Point {
|
||||
x: px(0.0),
|
||||
y: px(-16.0),
|
||||
})
|
||||
.when_some(handle, |this, handle| this.with_handle(handle))
|
||||
}
|
||||
}
|
||||
44
crates/assistant/src/slash_command_settings.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
use anyhow::Result;
|
||||
use gpui::AppContext;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources};
|
||||
|
||||
/// Settings for slash commands.
|
||||
#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)]
|
||||
pub struct SlashCommandSettings {
|
||||
/// Settings for the `/docs` slash command.
|
||||
#[serde(default)]
|
||||
pub docs: DocsCommandSettings,
|
||||
/// Settings for the `/project` slash command.
|
||||
#[serde(default)]
|
||||
pub project: ProjectCommandSettings,
|
||||
}
|
||||
|
||||
/// Settings for the `/docs` slash command.
|
||||
#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)]
|
||||
pub struct DocsCommandSettings {
|
||||
/// Whether `/docs` is enabled.
|
||||
#[serde(default)]
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
/// Settings for the `/project` slash command.
|
||||
#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)]
|
||||
pub struct ProjectCommandSettings {
|
||||
/// Whether `/project` is enabled.
|
||||
#[serde(default)]
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
impl Settings for SlashCommandSettings {
|
||||
const KEY: Option<&'static str> = Some("slash_commands");
|
||||
|
||||
type FileContent = Self;
|
||||
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _cx: &mut AppContext) -> Result<Self> {
|
||||
SettingsSources::<Self::FileContent>::json_merge_with(
|
||||
[sources.default].into_iter().chain(sources.user),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -276,7 +276,8 @@ impl TerminalInlineAssistant {
|
||||
|
||||
messages.push(LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: prompt,
|
||||
content: vec![prompt.into()],
|
||||
cache: false,
|
||||
});
|
||||
|
||||
Ok(LanguageModelRequest {
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
### Using the Assistant
|
||||
|
||||
Once you have configured a provider, you can interact with the provider's language models in a context editor.
|
||||
|
||||
To create a new context editor, use the menu in the top right of the assistant panel and the `New Context` option.
|
||||
|
||||
In the context editor, select a model from one of the configured providers, type a message in the `You` block, and submit with `cmd-enter` (or `ctrl-enter` on Linux).
|
||||
|
||||
### Inline assistant
|
||||
|
||||
When you're in a normal editor, you can use `ctrl-enter` to open the inline assistant.
|
||||
|
||||
The inline assistant allows you to send the current selection (or the current line) to a language model and modify the selection with the language model's response.
|
||||
|
||||
### Adding Prompts
|
||||
|
||||
You can customize the default prompts that are used in new context editor, by opening the `Prompt Library`.
|
||||
|
||||
Open the `Prompt Library` using either the menu in the top right of the assistant panel and choosing the `Prompt Library` option, or by using the `assistant: deploy prompt library` command when the assistant panel is focused.
|
||||
|
||||
### Viewing past contexts
|
||||
|
||||
You view all previous contexts by opening up the `History` tab in the assistant panel.
|
||||
|
||||
Open the `History` using the menu in the top right of the assistant panel and choosing the `History`.
|
||||
803
crates/assistant/src/workflow.rs
Normal file
@@ -0,0 +1,803 @@
|
||||
mod step_view;
|
||||
|
||||
use crate::{
|
||||
prompts::StepResolutionContext, AssistantPanel, Context, InlineAssistId, InlineAssistant,
|
||||
};
|
||||
use anyhow::{anyhow, Error, Result};
|
||||
use collections::HashMap;
|
||||
use editor::Editor;
|
||||
use futures::future;
|
||||
use gpui::{
|
||||
Model, ModelContext, Task, UpdateGlobal as _, View, WeakModel, WeakView, WindowContext,
|
||||
};
|
||||
use language::{Anchor, Buffer, BufferSnapshot, SymbolPath};
|
||||
use language_model::{LanguageModelRegistry, LanguageModelRequestMessage, Role};
|
||||
use project::Project;
|
||||
use rope::Point;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smol::stream::StreamExt;
|
||||
use std::{cmp, fmt::Write, ops::Range, sync::Arc};
|
||||
use text::{AnchorRangeExt as _, OffsetRangeExt as _};
|
||||
use util::ResultExt as _;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub use step_view::WorkflowStepView;
|
||||
|
||||
const IMPORTS_SYMBOL: &str = "#imports";
|
||||
|
||||
pub struct WorkflowStep {
|
||||
context: WeakModel<Context>,
|
||||
context_buffer_range: Range<Anchor>,
|
||||
tool_output: String,
|
||||
resolve_task: Option<Task<()>>,
|
||||
pub resolution: Option<Result<WorkflowStepResolution, Arc<Error>>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct WorkflowStepResolution {
|
||||
pub title: String,
|
||||
pub suggestion_groups: HashMap<Model<Buffer>, Vec<WorkflowSuggestionGroup>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct WorkflowSuggestionGroup {
|
||||
pub context_range: Range<language::Anchor>,
|
||||
pub suggestions: Vec<WorkflowSuggestion>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub enum WorkflowSuggestion {
|
||||
Update {
|
||||
symbol_path: SymbolPath,
|
||||
range: Range<language::Anchor>,
|
||||
description: String,
|
||||
},
|
||||
CreateFile {
|
||||
description: String,
|
||||
},
|
||||
InsertSiblingBefore {
|
||||
symbol_path: SymbolPath,
|
||||
position: language::Anchor,
|
||||
description: String,
|
||||
},
|
||||
InsertSiblingAfter {
|
||||
symbol_path: SymbolPath,
|
||||
position: language::Anchor,
|
||||
description: String,
|
||||
},
|
||||
PrependChild {
|
||||
symbol_path: Option<SymbolPath>,
|
||||
position: language::Anchor,
|
||||
description: String,
|
||||
},
|
||||
AppendChild {
|
||||
symbol_path: Option<SymbolPath>,
|
||||
position: language::Anchor,
|
||||
description: String,
|
||||
},
|
||||
Delete {
|
||||
symbol_path: SymbolPath,
|
||||
range: Range<language::Anchor>,
|
||||
},
|
||||
}
|
||||
|
||||
impl WorkflowStep {
|
||||
pub fn new(range: Range<Anchor>, context: WeakModel<Context>) -> Self {
|
||||
Self {
|
||||
context_buffer_range: range,
|
||||
tool_output: String::new(),
|
||||
context,
|
||||
resolution: None,
|
||||
resolve_task: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve(&mut self, cx: &mut ModelContext<WorkflowStep>) -> Option<()> {
|
||||
let range = self.context_buffer_range.clone();
|
||||
let context = self.context.upgrade()?;
|
||||
let context = context.read(cx);
|
||||
let project = context.project()?;
|
||||
let prompt_builder = context.prompt_builder();
|
||||
let mut request = context.to_completion_request(cx);
|
||||
let model = LanguageModelRegistry::read_global(cx).active_model();
|
||||
let context_buffer = context.buffer();
|
||||
let step_text = context_buffer
|
||||
.read(cx)
|
||||
.text_for_range(range.clone())
|
||||
.collect::<String>();
|
||||
|
||||
let mut workflow_context = String::new();
|
||||
for message in context.messages(cx) {
|
||||
write!(&mut workflow_context, "<message role={}>", message.role).unwrap();
|
||||
for chunk in context_buffer.read(cx).text_for_range(message.offset_range) {
|
||||
write!(&mut workflow_context, "{chunk}").unwrap();
|
||||
}
|
||||
write!(&mut workflow_context, "</message>").unwrap();
|
||||
}
|
||||
|
||||
self.resolve_task = Some(cx.spawn(|this, mut cx| async move {
|
||||
let result = async {
|
||||
let Some(model) = model else {
|
||||
return Err(anyhow!("no model selected"));
|
||||
};
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.tool_output.clear();
|
||||
this.resolution = None;
|
||||
this.result_updated(cx);
|
||||
cx.notify();
|
||||
})?;
|
||||
|
||||
let resolution_context = StepResolutionContext {
|
||||
workflow_context,
|
||||
step_to_resolve: step_text.clone(),
|
||||
};
|
||||
let mut prompt =
|
||||
prompt_builder.generate_step_resolution_prompt(&resolution_context)?;
|
||||
prompt.push_str(&step_text);
|
||||
request.messages.push(LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![prompt.into()],
|
||||
cache: false,
|
||||
});
|
||||
|
||||
// Invoke the model to get its edit suggestions for this workflow step.
|
||||
let mut stream = model
|
||||
.use_tool_stream::<tool::WorkflowStepResolutionTool>(request, &cx)
|
||||
.await?;
|
||||
while let Some(chunk) = stream.next().await {
|
||||
let chunk = chunk?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.tool_output.push_str(&chunk);
|
||||
cx.notify();
|
||||
})?;
|
||||
}
|
||||
|
||||
let resolution = this.update(&mut cx, |this, _| {
|
||||
serde_json::from_str::<tool::WorkflowStepResolutionTool>(&this.tool_output)
|
||||
})??;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.tool_output = serde_json::to_string_pretty(&resolution).unwrap();
|
||||
cx.notify();
|
||||
})?;
|
||||
|
||||
// Translate the parsed suggestions to our internal types, which anchor the suggestions to locations in the code.
|
||||
let suggestion_tasks: Vec<_> = resolution
|
||||
.suggestions
|
||||
.iter()
|
||||
.map(|suggestion| suggestion.resolve(project.clone(), cx.clone()))
|
||||
.collect();
|
||||
|
||||
// Expand the context ranges of each suggestion and group suggestions with overlapping context ranges.
|
||||
let suggestions = future::join_all(suggestion_tasks)
|
||||
.await
|
||||
.into_iter()
|
||||
.filter_map(|task| task.log_err())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut suggestions_by_buffer = HashMap::default();
|
||||
for (buffer, suggestion) in suggestions {
|
||||
suggestions_by_buffer
|
||||
.entry(buffer)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(suggestion);
|
||||
}
|
||||
|
||||
let mut suggestion_groups_by_buffer = HashMap::default();
|
||||
for (buffer, mut suggestions) in suggestions_by_buffer {
|
||||
let mut suggestion_groups = Vec::<WorkflowSuggestionGroup>::new();
|
||||
let snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot())?;
|
||||
// Sort suggestions by their range so that earlier, larger ranges come first
|
||||
suggestions.sort_by(|a, b| a.range().cmp(&b.range(), &snapshot));
|
||||
|
||||
// Merge overlapping suggestions
|
||||
suggestions.dedup_by(|a, b| b.try_merge(a, &snapshot));
|
||||
|
||||
// Create context ranges for each suggestion
|
||||
for suggestion in suggestions {
|
||||
let context_range = {
|
||||
let suggestion_point_range = suggestion.range().to_point(&snapshot);
|
||||
let start_row = suggestion_point_range.start.row.saturating_sub(5);
|
||||
let end_row = cmp::min(
|
||||
suggestion_point_range.end.row + 5,
|
||||
snapshot.max_point().row,
|
||||
);
|
||||
let start = snapshot.anchor_before(Point::new(start_row, 0));
|
||||
let end = snapshot
|
||||
.anchor_after(Point::new(end_row, snapshot.line_len(end_row)));
|
||||
start..end
|
||||
};
|
||||
|
||||
if let Some(last_group) = suggestion_groups.last_mut() {
|
||||
if last_group
|
||||
.context_range
|
||||
.end
|
||||
.cmp(&context_range.start, &snapshot)
|
||||
.is_ge()
|
||||
{
|
||||
// Merge with the previous group if context ranges overlap
|
||||
last_group.context_range.end = context_range.end;
|
||||
last_group.suggestions.push(suggestion);
|
||||
} else {
|
||||
// Create a new group
|
||||
suggestion_groups.push(WorkflowSuggestionGroup {
|
||||
context_range,
|
||||
suggestions: vec![suggestion],
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// Create the first group
|
||||
suggestion_groups.push(WorkflowSuggestionGroup {
|
||||
context_range,
|
||||
suggestions: vec![suggestion],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
suggestion_groups_by_buffer.insert(buffer, suggestion_groups);
|
||||
}
|
||||
|
||||
Ok((resolution.step_title, suggestion_groups_by_buffer))
|
||||
};
|
||||
|
||||
let result = result.await;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.resolution = Some(match result {
|
||||
Ok((title, suggestion_groups)) => Ok(WorkflowStepResolution {
|
||||
title,
|
||||
suggestion_groups,
|
||||
}),
|
||||
Err(error) => Err(Arc::new(error)),
|
||||
});
|
||||
this.context
|
||||
.update(cx, |context, cx| context.workflow_step_updated(range, cx))
|
||||
.ok();
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
}));
|
||||
None
|
||||
}
|
||||
|
||||
fn result_updated(&mut self, cx: &mut ModelContext<Self>) {
|
||||
self.context
|
||||
.update(cx, |context, cx| {
|
||||
context.workflow_step_updated(self.context_buffer_range.clone(), cx)
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
}
|
||||
|
||||
impl WorkflowSuggestion {
|
||||
pub fn range(&self) -> Range<language::Anchor> {
|
||||
match self {
|
||||
Self::Update { range, .. } => range.clone(),
|
||||
Self::CreateFile { .. } => language::Anchor::MIN..language::Anchor::MAX,
|
||||
Self::InsertSiblingBefore { position, .. }
|
||||
| Self::InsertSiblingAfter { position, .. }
|
||||
| Self::PrependChild { position, .. }
|
||||
| Self::AppendChild { position, .. } => *position..*position,
|
||||
Self::Delete { range, .. } => range.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn description(&self) -> Option<&str> {
|
||||
match self {
|
||||
Self::Update { description, .. }
|
||||
| Self::CreateFile { description }
|
||||
| Self::InsertSiblingBefore { description, .. }
|
||||
| Self::InsertSiblingAfter { description, .. }
|
||||
| Self::PrependChild { description, .. }
|
||||
| Self::AppendChild { description, .. } => Some(description),
|
||||
Self::Delete { .. } => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn description_mut(&mut self) -> Option<&mut String> {
|
||||
match self {
|
||||
Self::Update { description, .. }
|
||||
| Self::CreateFile { description }
|
||||
| Self::InsertSiblingBefore { description, .. }
|
||||
| Self::InsertSiblingAfter { description, .. }
|
||||
| Self::PrependChild { description, .. }
|
||||
| Self::AppendChild { description, .. } => Some(description),
|
||||
Self::Delete { .. } => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn symbol_path(&self) -> Option<&SymbolPath> {
|
||||
match self {
|
||||
Self::Update { symbol_path, .. } => Some(symbol_path),
|
||||
Self::InsertSiblingBefore { symbol_path, .. } => Some(symbol_path),
|
||||
Self::InsertSiblingAfter { symbol_path, .. } => Some(symbol_path),
|
||||
Self::PrependChild { symbol_path, .. } => symbol_path.as_ref(),
|
||||
Self::AppendChild { symbol_path, .. } => symbol_path.as_ref(),
|
||||
Self::Delete { symbol_path, .. } => Some(symbol_path),
|
||||
Self::CreateFile { .. } => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn kind(&self) -> &str {
|
||||
match self {
|
||||
Self::Update { .. } => "Update",
|
||||
Self::CreateFile { .. } => "CreateFile",
|
||||
Self::InsertSiblingBefore { .. } => "InsertSiblingBefore",
|
||||
Self::InsertSiblingAfter { .. } => "InsertSiblingAfter",
|
||||
Self::PrependChild { .. } => "PrependChild",
|
||||
Self::AppendChild { .. } => "AppendChild",
|
||||
Self::Delete { .. } => "Delete",
|
||||
}
|
||||
}
|
||||
|
||||
fn try_merge(&mut self, other: &Self, buffer: &BufferSnapshot) -> bool {
|
||||
let range = self.range();
|
||||
let other_range = other.range();
|
||||
|
||||
// Don't merge if we don't contain the other suggestion.
|
||||
if range.start.cmp(&other_range.start, buffer).is_gt()
|
||||
|| range.end.cmp(&other_range.end, buffer).is_lt()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
if let Some(description) = self.description_mut() {
|
||||
if let Some(other_description) = other.description() {
|
||||
description.push('\n');
|
||||
description.push_str(other_description);
|
||||
}
|
||||
}
|
||||
true
|
||||
}
|
||||
|
||||
pub fn show(
|
||||
&self,
|
||||
editor: &View<Editor>,
|
||||
excerpt_id: editor::ExcerptId,
|
||||
workspace: &WeakView<Workspace>,
|
||||
assistant_panel: &View<AssistantPanel>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Option<InlineAssistId> {
|
||||
let mut initial_transaction_id = None;
|
||||
let initial_prompt;
|
||||
let suggestion_range;
|
||||
let buffer = editor.read(cx).buffer().clone();
|
||||
let snapshot = buffer.read(cx).snapshot(cx);
|
||||
|
||||
match self {
|
||||
Self::Update {
|
||||
range, description, ..
|
||||
} => {
|
||||
initial_prompt = description.clone();
|
||||
suggestion_range = snapshot.anchor_in_excerpt(excerpt_id, range.start)?
|
||||
..snapshot.anchor_in_excerpt(excerpt_id, range.end)?;
|
||||
}
|
||||
Self::CreateFile { description } => {
|
||||
initial_prompt = description.clone();
|
||||
suggestion_range = editor::Anchor::min()..editor::Anchor::min();
|
||||
}
|
||||
Self::InsertSiblingBefore {
|
||||
position,
|
||||
description,
|
||||
..
|
||||
} => {
|
||||
let position = snapshot.anchor_in_excerpt(excerpt_id, *position)?;
|
||||
initial_prompt = description.clone();
|
||||
suggestion_range = buffer.update(cx, |buffer, cx| {
|
||||
buffer.start_transaction(cx);
|
||||
let line_start = buffer.insert_empty_line(position, true, true, cx);
|
||||
initial_transaction_id = buffer.end_transaction(cx);
|
||||
buffer.refresh_preview(cx);
|
||||
|
||||
let line_start = buffer.read(cx).anchor_before(line_start);
|
||||
line_start..line_start
|
||||
});
|
||||
}
|
||||
Self::InsertSiblingAfter {
|
||||
position,
|
||||
description,
|
||||
..
|
||||
} => {
|
||||
let position = snapshot.anchor_in_excerpt(excerpt_id, *position)?;
|
||||
initial_prompt = description.clone();
|
||||
suggestion_range = buffer.update(cx, |buffer, cx| {
|
||||
buffer.start_transaction(cx);
|
||||
let line_start = buffer.insert_empty_line(position, true, true, cx);
|
||||
initial_transaction_id = buffer.end_transaction(cx);
|
||||
buffer.refresh_preview(cx);
|
||||
|
||||
let line_start = buffer.read(cx).anchor_before(line_start);
|
||||
line_start..line_start
|
||||
});
|
||||
}
|
||||
Self::PrependChild {
|
||||
position,
|
||||
description,
|
||||
..
|
||||
} => {
|
||||
let position = snapshot.anchor_in_excerpt(excerpt_id, *position)?;
|
||||
initial_prompt = description.clone();
|
||||
suggestion_range = buffer.update(cx, |buffer, cx| {
|
||||
buffer.start_transaction(cx);
|
||||
let line_start = buffer.insert_empty_line(position, false, true, cx);
|
||||
initial_transaction_id = buffer.end_transaction(cx);
|
||||
buffer.refresh_preview(cx);
|
||||
|
||||
let line_start = buffer.read(cx).anchor_before(line_start);
|
||||
line_start..line_start
|
||||
});
|
||||
}
|
||||
Self::AppendChild {
|
||||
position,
|
||||
description,
|
||||
..
|
||||
} => {
|
||||
let position = snapshot.anchor_in_excerpt(excerpt_id, *position)?;
|
||||
initial_prompt = description.clone();
|
||||
suggestion_range = buffer.update(cx, |buffer, cx| {
|
||||
buffer.start_transaction(cx);
|
||||
let line_start = buffer.insert_empty_line(position, true, false, cx);
|
||||
initial_transaction_id = buffer.end_transaction(cx);
|
||||
buffer.refresh_preview(cx);
|
||||
|
||||
let line_start = buffer.read(cx).anchor_before(line_start);
|
||||
line_start..line_start
|
||||
});
|
||||
}
|
||||
Self::Delete { range, .. } => {
|
||||
initial_prompt = "Delete".to_string();
|
||||
suggestion_range = snapshot.anchor_in_excerpt(excerpt_id, range.start)?
|
||||
..snapshot.anchor_in_excerpt(excerpt_id, range.end)?;
|
||||
}
|
||||
}
|
||||
|
||||
InlineAssistant::update_global(cx, |inline_assistant, cx| {
|
||||
Some(inline_assistant.suggest_assist(
|
||||
editor,
|
||||
suggestion_range,
|
||||
initial_prompt,
|
||||
initial_transaction_id,
|
||||
Some(workspace.clone()),
|
||||
Some(assistant_panel),
|
||||
cx,
|
||||
))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub mod tool {
|
||||
use super::*;
|
||||
use anyhow::Context as _;
|
||||
use gpui::AsyncAppContext;
|
||||
use language::{Outline, OutlineItem, ParseStatus};
|
||||
use language_model::LanguageModelTool;
|
||||
use project::ProjectPath;
|
||||
use schemars::JsonSchema;
|
||||
use std::path::Path;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct WorkflowStepResolutionTool {
|
||||
/// An extremely short title for the edit step represented by these operations.
|
||||
pub step_title: String,
|
||||
/// A sequence of operations to apply to the codebase.
|
||||
/// When multiple operations are required for a step, be sure to include multiple operations in this list.
|
||||
pub suggestions: Vec<WorkflowSuggestionTool>,
|
||||
}
|
||||
|
||||
impl LanguageModelTool for WorkflowStepResolutionTool {
|
||||
fn name() -> String {
|
||||
"edit".into()
|
||||
}
|
||||
|
||||
fn description() -> String {
|
||||
"suggest edits to one or more locations in the codebase".into()
|
||||
}
|
||||
}
|
||||
|
||||
/// A description of an operation to apply to one location in the codebase.
|
||||
///
|
||||
/// This object represents a single edit operation that can be performed on a specific file
|
||||
/// in the codebase. It encapsulates both the location (file path) and the nature of the
|
||||
/// edit to be made.
|
||||
///
|
||||
/// # Fields
|
||||
///
|
||||
/// * `path`: A string representing the file path where the edit operation should be applied.
|
||||
/// This path is relative to the root of the project or repository.
|
||||
///
|
||||
/// * `kind`: An enum representing the specific type of edit operation to be performed.
|
||||
///
|
||||
/// # Usage
|
||||
///
|
||||
/// `EditOperation` is used within a code editor to represent and apply
|
||||
/// programmatic changes to source code. It provides a structured way to describe
|
||||
/// edits for features like refactoring tools or AI-assisted coding suggestions.
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct WorkflowSuggestionTool {
|
||||
/// The path to the file containing the relevant operation
|
||||
pub path: String,
|
||||
#[serde(flatten)]
|
||||
pub kind: WorkflowSuggestionToolKind,
|
||||
}
|
||||
|
||||
impl WorkflowSuggestionTool {
|
||||
pub(super) async fn resolve(
|
||||
&self,
|
||||
project: Model<Project>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<(Model<Buffer>, super::WorkflowSuggestion)> {
|
||||
let path = self.path.clone();
|
||||
let kind = self.kind.clone();
|
||||
let buffer = project
|
||||
.update(&mut cx, |project, cx| {
|
||||
let project_path = project
|
||||
.find_project_path(Path::new(&path), cx)
|
||||
.or_else(|| {
|
||||
// If we couldn't find a project path for it, put it in the active worktree
|
||||
// so that when we create the buffer, it can be saved.
|
||||
let worktree = project
|
||||
.active_entry()
|
||||
.and_then(|entry_id| project.worktree_for_entry(entry_id, cx))
|
||||
.or_else(|| project.worktrees(cx).next())?;
|
||||
let worktree = worktree.read(cx);
|
||||
|
||||
Some(ProjectPath {
|
||||
worktree_id: worktree.id(),
|
||||
path: Arc::from(Path::new(&path)),
|
||||
})
|
||||
})
|
||||
.with_context(|| format!("worktree not found for {:?}", path))?;
|
||||
anyhow::Ok(project.open_buffer(project_path, cx))
|
||||
})??
|
||||
.await?;
|
||||
|
||||
let mut parse_status = buffer.read_with(&cx, |buffer, _cx| buffer.parse_status())?;
|
||||
while *parse_status.borrow() != ParseStatus::Idle {
|
||||
parse_status.changed().await?;
|
||||
}
|
||||
|
||||
let snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot())?;
|
||||
let outline = snapshot.outline(None).context("no outline for buffer")?;
|
||||
|
||||
let suggestion = match kind {
|
||||
WorkflowSuggestionToolKind::Update {
|
||||
symbol,
|
||||
description,
|
||||
} => {
|
||||
let (symbol_path, symbol) = Self::resolve_symbol(&snapshot, &outline, &symbol)?;
|
||||
let start = symbol
|
||||
.annotation_range
|
||||
.map_or(symbol.range.start, |range| range.start);
|
||||
let start = Point::new(start.row, 0);
|
||||
let end = Point::new(
|
||||
symbol.range.end.row,
|
||||
snapshot.line_len(symbol.range.end.row),
|
||||
);
|
||||
let range = snapshot.anchor_before(start)..snapshot.anchor_after(end);
|
||||
WorkflowSuggestion::Update {
|
||||
range,
|
||||
description,
|
||||
symbol_path,
|
||||
}
|
||||
}
|
||||
WorkflowSuggestionToolKind::Create { description } => {
|
||||
WorkflowSuggestion::CreateFile { description }
|
||||
}
|
||||
WorkflowSuggestionToolKind::InsertSiblingBefore {
|
||||
symbol,
|
||||
description,
|
||||
} => {
|
||||
let (symbol_path, symbol) = Self::resolve_symbol(&snapshot, &outline, &symbol)?;
|
||||
let position = snapshot.anchor_before(
|
||||
symbol
|
||||
.annotation_range
|
||||
.map_or(symbol.range.start, |annotation_range| {
|
||||
annotation_range.start
|
||||
}),
|
||||
);
|
||||
WorkflowSuggestion::InsertSiblingBefore {
|
||||
position,
|
||||
description,
|
||||
symbol_path,
|
||||
}
|
||||
}
|
||||
WorkflowSuggestionToolKind::InsertSiblingAfter {
|
||||
symbol,
|
||||
description,
|
||||
} => {
|
||||
let (symbol_path, symbol) = Self::resolve_symbol(&snapshot, &outline, &symbol)?;
|
||||
let position = snapshot.anchor_after(symbol.range.end);
|
||||
WorkflowSuggestion::InsertSiblingAfter {
|
||||
position,
|
||||
description,
|
||||
symbol_path,
|
||||
}
|
||||
}
|
||||
WorkflowSuggestionToolKind::PrependChild {
|
||||
symbol,
|
||||
description,
|
||||
} => {
|
||||
if let Some(symbol) = symbol {
|
||||
let (symbol_path, symbol) =
|
||||
Self::resolve_symbol(&snapshot, &outline, &symbol)?;
|
||||
|
||||
let position = snapshot.anchor_after(
|
||||
symbol
|
||||
.body_range
|
||||
.map_or(symbol.range.start, |body_range| body_range.start),
|
||||
);
|
||||
WorkflowSuggestion::PrependChild {
|
||||
position,
|
||||
description,
|
||||
symbol_path: Some(symbol_path),
|
||||
}
|
||||
} else {
|
||||
WorkflowSuggestion::PrependChild {
|
||||
position: language::Anchor::MIN,
|
||||
description,
|
||||
symbol_path: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
WorkflowSuggestionToolKind::AppendChild {
|
||||
symbol,
|
||||
description,
|
||||
} => {
|
||||
if let Some(symbol) = symbol {
|
||||
let (symbol_path, symbol) =
|
||||
Self::resolve_symbol(&snapshot, &outline, &symbol)?;
|
||||
|
||||
let position = snapshot.anchor_before(
|
||||
symbol
|
||||
.body_range
|
||||
.map_or(symbol.range.end, |body_range| body_range.end),
|
||||
);
|
||||
WorkflowSuggestion::AppendChild {
|
||||
position,
|
||||
description,
|
||||
symbol_path: Some(symbol_path),
|
||||
}
|
||||
} else {
|
||||
WorkflowSuggestion::PrependChild {
|
||||
position: language::Anchor::MAX,
|
||||
description,
|
||||
symbol_path: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
WorkflowSuggestionToolKind::Delete { symbol } => {
|
||||
let (symbol_path, symbol) = Self::resolve_symbol(&snapshot, &outline, &symbol)?;
|
||||
let start = symbol
|
||||
.annotation_range
|
||||
.map_or(symbol.range.start, |range| range.start);
|
||||
let start = Point::new(start.row, 0);
|
||||
let end = Point::new(
|
||||
symbol.range.end.row,
|
||||
snapshot.line_len(symbol.range.end.row),
|
||||
);
|
||||
let range = snapshot.anchor_before(start)..snapshot.anchor_after(end);
|
||||
WorkflowSuggestion::Delete { range, symbol_path }
|
||||
}
|
||||
};
|
||||
|
||||
Ok((buffer, suggestion))
|
||||
}
|
||||
|
||||
fn resolve_symbol(
|
||||
snapshot: &BufferSnapshot,
|
||||
outline: &Outline<Anchor>,
|
||||
symbol: &str,
|
||||
) -> Result<(SymbolPath, OutlineItem<Point>)> {
|
||||
if symbol == IMPORTS_SYMBOL {
|
||||
let target_row = find_first_non_comment_line(snapshot);
|
||||
Ok((
|
||||
SymbolPath(IMPORTS_SYMBOL.to_string()),
|
||||
OutlineItem {
|
||||
range: Point::new(target_row, 0)..Point::new(target_row + 1, 0),
|
||||
..Default::default()
|
||||
},
|
||||
))
|
||||
} else {
|
||||
let (symbol_path, symbol) = outline
|
||||
.find_most_similar(symbol)
|
||||
.with_context(|| format!("symbol not found: {symbol}"))?;
|
||||
Ok((symbol_path, symbol.to_point(snapshot)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn find_first_non_comment_line(snapshot: &BufferSnapshot) -> u32 {
|
||||
let Some(language) = snapshot.language() else {
|
||||
return 0;
|
||||
};
|
||||
|
||||
let scope = language.default_scope();
|
||||
let comment_prefixes = scope.line_comment_prefixes();
|
||||
|
||||
let mut chunks = snapshot.as_rope().chunks();
|
||||
let mut target_row = 0;
|
||||
loop {
|
||||
let starts_with_comment = chunks
|
||||
.peek()
|
||||
.map(|chunk| {
|
||||
comment_prefixes
|
||||
.iter()
|
||||
.any(|s| chunk.starts_with(s.as_ref().trim_end()))
|
||||
})
|
||||
.unwrap_or(false);
|
||||
|
||||
if !starts_with_comment {
|
||||
break;
|
||||
}
|
||||
|
||||
target_row += 1;
|
||||
if !chunks.next_line() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
target_row
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(tag = "kind")]
|
||||
pub enum WorkflowSuggestionToolKind {
|
||||
/// Rewrites the specified symbol entirely based on the given description.
|
||||
/// This operation completely replaces the existing symbol with new content.
|
||||
Update {
|
||||
/// A fully-qualified reference to the symbol, e.g. `mod foo impl Bar pub fn baz` instead of just `fn baz`.
|
||||
/// The path should uniquely identify the symbol within the containing file.
|
||||
symbol: String,
|
||||
/// A brief description of the transformation to apply to the symbol.
|
||||
description: String,
|
||||
},
|
||||
/// Creates a new file with the given path based on the provided description.
|
||||
/// This operation adds a new file to the codebase.
|
||||
Create {
|
||||
/// A brief description of the file to be created.
|
||||
description: String,
|
||||
},
|
||||
/// Inserts a new symbol based on the given description before the specified symbol.
|
||||
/// This operation adds new content immediately preceding an existing symbol.
|
||||
InsertSiblingBefore {
|
||||
/// A fully-qualified reference to the symbol, e.g. `mod foo impl Bar pub fn baz` instead of just `fn baz`.
|
||||
/// The new content will be inserted immediately before this symbol.
|
||||
symbol: String,
|
||||
/// A brief description of the new symbol to be inserted.
|
||||
description: String,
|
||||
},
|
||||
/// Inserts a new symbol based on the given description after the specified symbol.
|
||||
/// This operation adds new content immediately following an existing symbol.
|
||||
InsertSiblingAfter {
|
||||
/// A fully-qualified reference to the symbol, e.g. `mod foo impl Bar pub fn baz` instead of just `fn baz`.
|
||||
/// The new content will be inserted immediately after this symbol.
|
||||
symbol: String,
|
||||
/// A brief description of the new symbol to be inserted.
|
||||
description: String,
|
||||
},
|
||||
/// Inserts a new symbol as a child of the specified symbol at the start.
|
||||
/// This operation adds new content as the first child of an existing symbol (or file if no symbol is provided).
|
||||
PrependChild {
|
||||
/// An optional fully-qualified reference to the symbol after the code you want to insert, e.g. `mod foo impl Bar pub fn baz` instead of just `fn baz`.
|
||||
/// If provided, the new content will be inserted as the first child of this symbol.
|
||||
/// If not provided, the new content will be inserted at the top of the file.
|
||||
symbol: Option<String>,
|
||||
/// A brief description of the new symbol to be inserted.
|
||||
description: String,
|
||||
},
|
||||
/// Inserts a new symbol as a child of the specified symbol at the end.
|
||||
/// This operation adds new content as the last child of an existing symbol (or file if no symbol is provided).
|
||||
AppendChild {
|
||||
/// An optional fully-qualified reference to the symbol before the code you want to insert, e.g. `mod foo impl Bar pub fn baz` instead of just `fn baz`.
|
||||
/// If provided, the new content will be inserted as the last child of this symbol.
|
||||
/// If not provided, the new content will be applied at the bottom of the file.
|
||||
symbol: Option<String>,
|
||||
/// A brief description of the new symbol to be inserted.
|
||||
description: String,
|
||||
},
|
||||
/// Deletes the specified symbol from the containing file.
|
||||
Delete {
|
||||
/// An fully-qualified reference to the symbol to be deleted, e.g. `mod foo impl Bar pub fn baz` instead of just `fn baz`.
|
||||
symbol: String,
|
||||
},
|
||||
}
|
||||
}
|
||||
315
crates/assistant/src/workflow/step_view.rs
Normal file
@@ -0,0 +1,315 @@
|
||||
use super::WorkflowStep;
|
||||
use crate::{Assist, Context};
|
||||
use editor::{
|
||||
display_map::{BlockDisposition, BlockProperties, BlockStyle},
|
||||
Editor, EditorEvent, ExcerptRange, MultiBuffer,
|
||||
};
|
||||
use gpui::{
|
||||
div, AnyElement, AppContext, Context as _, Empty, EventEmitter, FocusableView, IntoElement,
|
||||
Model, ParentElement as _, Render, SharedString, Styled as _, View, ViewContext,
|
||||
VisualContext as _, WeakModel, WindowContext,
|
||||
};
|
||||
use language::{language_settings::SoftWrap, Anchor, Buffer, LanguageRegistry};
|
||||
use std::{ops::DerefMut, sync::Arc};
|
||||
use text::OffsetRangeExt;
|
||||
use theme::ActiveTheme as _;
|
||||
use ui::{
|
||||
h_flex, v_flex, ButtonCommon as _, ButtonLike, ButtonStyle, Color, Icon, IconName,
|
||||
InteractiveElement as _, Label, LabelCommon as _,
|
||||
};
|
||||
use workspace::{
|
||||
item::{self, Item},
|
||||
pane,
|
||||
searchable::SearchableItemHandle,
|
||||
};
|
||||
|
||||
pub struct WorkflowStepView {
|
||||
step: WeakModel<WorkflowStep>,
|
||||
tool_output_buffer: Model<Buffer>,
|
||||
editor: View<Editor>,
|
||||
}
|
||||
|
||||
impl WorkflowStepView {
|
||||
pub fn new(
|
||||
context: Model<Context>,
|
||||
step: Model<WorkflowStep>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Self {
|
||||
let tool_output_buffer =
|
||||
cx.new_model(|cx| Buffer::local(step.read(cx).tool_output.clone(), cx));
|
||||
let buffer = cx.new_model(|cx| {
|
||||
let mut buffer = MultiBuffer::without_headers(0, language::Capability::ReadWrite);
|
||||
buffer.push_excerpts(
|
||||
context.read(cx).buffer().clone(),
|
||||
[ExcerptRange {
|
||||
context: step.read(cx).context_buffer_range.clone(),
|
||||
primary: None,
|
||||
}],
|
||||
cx,
|
||||
);
|
||||
buffer.push_excerpts(
|
||||
tool_output_buffer.clone(),
|
||||
[ExcerptRange {
|
||||
context: Anchor::MIN..Anchor::MAX,
|
||||
primary: None,
|
||||
}],
|
||||
cx,
|
||||
);
|
||||
buffer
|
||||
});
|
||||
|
||||
let buffer_snapshot = buffer.read(cx).snapshot(cx);
|
||||
let output_excerpt = buffer_snapshot.excerpts().skip(1).next().unwrap().0;
|
||||
let input_start_anchor = multi_buffer::Anchor::min();
|
||||
let output_start_anchor = buffer_snapshot
|
||||
.anchor_in_excerpt(output_excerpt, Anchor::MIN)
|
||||
.unwrap();
|
||||
let output_end_anchor = multi_buffer::Anchor::max();
|
||||
|
||||
let handle = cx.view().downgrade();
|
||||
let editor = cx.new_view(|cx| {
|
||||
let mut editor = Editor::for_multibuffer(buffer.clone(), None, false, cx);
|
||||
editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx);
|
||||
editor.set_show_line_numbers(false, cx);
|
||||
editor.set_show_git_diff_gutter(false, cx);
|
||||
editor.set_show_code_actions(false, cx);
|
||||
editor.set_show_runnables(false, cx);
|
||||
editor.set_show_wrap_guides(false, cx);
|
||||
editor.set_show_indent_guides(false, cx);
|
||||
editor.set_read_only(true);
|
||||
editor.set_show_inline_completions(false);
|
||||
editor.insert_blocks(
|
||||
[
|
||||
BlockProperties {
|
||||
position: input_start_anchor,
|
||||
height: 1,
|
||||
style: BlockStyle::Fixed,
|
||||
render: Box::new(|cx| section_header("Step Input", cx)),
|
||||
disposition: BlockDisposition::Above,
|
||||
priority: 0,
|
||||
},
|
||||
BlockProperties {
|
||||
position: output_start_anchor,
|
||||
height: 1,
|
||||
style: BlockStyle::Fixed,
|
||||
render: Box::new(|cx| section_header("Tool Output", cx)),
|
||||
disposition: BlockDisposition::Above,
|
||||
priority: 0,
|
||||
},
|
||||
BlockProperties {
|
||||
position: output_end_anchor,
|
||||
height: 1,
|
||||
style: BlockStyle::Fixed,
|
||||
render: Box::new(move |cx| {
|
||||
if let Some(result) = handle.upgrade().and_then(|this| {
|
||||
this.update(cx.deref_mut(), |this, cx| this.render_result(cx))
|
||||
}) {
|
||||
v_flex()
|
||||
.child(section_header("Output", cx))
|
||||
.child(
|
||||
div().pl(cx.gutter_dimensions.full_width()).child(result),
|
||||
)
|
||||
.into_any_element()
|
||||
} else {
|
||||
Empty.into_any_element()
|
||||
}
|
||||
}),
|
||||
disposition: BlockDisposition::Below,
|
||||
priority: 0,
|
||||
},
|
||||
],
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
editor
|
||||
});
|
||||
|
||||
cx.observe(&step, Self::step_updated).detach();
|
||||
cx.observe_release(&step, Self::step_released).detach();
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
if let Ok(language) = language_registry.language_for_name("JSON").await {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.tool_output_buffer.update(cx, |buffer, cx| {
|
||||
buffer.set_language(Some(language), cx);
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
Self {
|
||||
tool_output_buffer,
|
||||
step: step.downgrade(),
|
||||
editor,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn step(&self) -> &WeakModel<WorkflowStep> {
|
||||
&self.step
|
||||
}
|
||||
|
||||
fn render_result(&mut self, cx: &mut ViewContext<Self>) -> Option<AnyElement> {
|
||||
let step = self.step.upgrade()?;
|
||||
let result = step.read(cx).resolution.as_ref()?;
|
||||
match result {
|
||||
Ok(result) => {
|
||||
Some(
|
||||
v_flex()
|
||||
.child(result.title.clone())
|
||||
.children(result.suggestion_groups.iter().filter_map(
|
||||
|(buffer, suggestion_groups)| {
|
||||
let buffer = buffer.read(cx);
|
||||
let path = buffer.file().map(|f| f.path());
|
||||
let snapshot = buffer.snapshot();
|
||||
v_flex()
|
||||
.mb_2()
|
||||
.border_b_1()
|
||||
.children(path.map(|path| format!("path: {}", path.display())))
|
||||
.children(suggestion_groups.iter().map(|group| {
|
||||
v_flex().pt_2().pl_2().children(
|
||||
group.suggestions.iter().map(|suggestion| {
|
||||
let range = suggestion.range().to_point(&snapshot);
|
||||
v_flex()
|
||||
.children(
|
||||
suggestion.description().map(|desc| {
|
||||
format!("description: {desc}")
|
||||
}),
|
||||
)
|
||||
.child(format!("kind: {}", suggestion.kind()))
|
||||
.children(suggestion.symbol_path().map(
|
||||
|path| format!("symbol path: {}", path.0),
|
||||
))
|
||||
.child(format!(
|
||||
"lines: {} - {}",
|
||||
range.start.row + 1,
|
||||
range.end.row + 1
|
||||
))
|
||||
}),
|
||||
)
|
||||
}))
|
||||
.into()
|
||||
},
|
||||
))
|
||||
.into_any_element(),
|
||||
)
|
||||
}
|
||||
Err(error) => Some(format!("{:?}", error).into_any_element()),
|
||||
}
|
||||
}
|
||||
|
||||
fn step_updated(&mut self, step: Model<WorkflowStep>, cx: &mut ViewContext<Self>) {
|
||||
self.tool_output_buffer.update(cx, |buffer, cx| {
|
||||
let text = step.read(cx).tool_output.clone();
|
||||
buffer.set_text(text, cx);
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn step_released(&mut self, _: &mut WorkflowStep, cx: &mut ViewContext<Self>) {
|
||||
cx.emit(EditorEvent::Closed);
|
||||
}
|
||||
|
||||
fn resolve(&mut self, _: &Assist, cx: &mut ViewContext<Self>) {
|
||||
self.step
|
||||
.update(cx, |step, cx| {
|
||||
step.resolve(cx);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
}
|
||||
|
||||
fn section_header(
|
||||
name: &'static str,
|
||||
cx: &mut editor::display_map::BlockContext,
|
||||
) -> gpui::AnyElement {
|
||||
h_flex()
|
||||
.pl(cx.gutter_dimensions.full_width())
|
||||
.h_11()
|
||||
.w_full()
|
||||
.relative()
|
||||
.gap_1()
|
||||
.child(
|
||||
ButtonLike::new("role")
|
||||
.style(ButtonStyle::Filled)
|
||||
.child(Label::new(name).color(Color::Default)),
|
||||
)
|
||||
.into_any_element()
|
||||
}
|
||||
|
||||
impl Render for WorkflowStepView {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
div()
|
||||
.key_context("ContextEditor")
|
||||
.on_action(cx.listener(Self::resolve))
|
||||
.flex_grow()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(self.editor.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<EditorEvent> for WorkflowStepView {}
|
||||
|
||||
impl FocusableView for WorkflowStepView {
|
||||
fn focus_handle(&self, cx: &gpui::AppContext) -> gpui::FocusHandle {
|
||||
self.editor.read(cx).focus_handle(cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl Item for WorkflowStepView {
|
||||
type Event = EditorEvent;
|
||||
|
||||
fn tab_content_text(&self, cx: &WindowContext) -> Option<SharedString> {
|
||||
let step = self.step.upgrade()?.read(cx);
|
||||
let context = step.context.upgrade()?.read(cx);
|
||||
let buffer = context.buffer().read(cx);
|
||||
let index = context
|
||||
.workflow_step_index_for_range(&step.context_buffer_range, buffer)
|
||||
.ok()?
|
||||
+ 1;
|
||||
Some(format!("Step {index}").into())
|
||||
}
|
||||
|
||||
fn tab_icon(&self, _cx: &WindowContext) -> Option<ui::Icon> {
|
||||
Some(Icon::new(IconName::SearchCode))
|
||||
}
|
||||
|
||||
fn to_item_events(event: &Self::Event, mut f: impl FnMut(item::ItemEvent)) {
|
||||
match event {
|
||||
EditorEvent::Edited { .. } => {
|
||||
f(item::ItemEvent::Edit);
|
||||
}
|
||||
EditorEvent::TitleChanged => {
|
||||
f(item::ItemEvent::UpdateTab);
|
||||
}
|
||||
EditorEvent::Closed => f(item::ItemEvent::CloseItem),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn tab_tooltip_text(&self, _cx: &AppContext) -> Option<SharedString> {
|
||||
None
|
||||
}
|
||||
|
||||
fn as_searchable(&self, _handle: &View<Self>) -> Option<Box<dyn SearchableItemHandle>> {
|
||||
None
|
||||
}
|
||||
|
||||
fn set_nav_history(&mut self, nav_history: pane::ItemNavHistory, cx: &mut ViewContext<Self>) {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
Item::set_nav_history(editor, nav_history, cx)
|
||||
})
|
||||
}
|
||||
|
||||
fn navigate(&mut self, data: Box<dyn std::any::Any>, cx: &mut ViewContext<Self>) -> bool {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| Item::navigate(editor, data, cx))
|
||||
}
|
||||
|
||||
fn deactivated(&mut self, cx: &mut ViewContext<Self>) {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| Item::deactivated(editor, cx))
|
||||
}
|
||||
}
|
||||
@@ -15,14 +15,45 @@ pub fn init(cx: &mut AppContext) {
|
||||
SlashCommandRegistry::default_global(cx);
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum AfterCompletion {
|
||||
/// Run the command
|
||||
Run,
|
||||
/// Continue composing the current argument, doesn't add a space
|
||||
Compose,
|
||||
/// Continue the command composition, adds a space
|
||||
Continue,
|
||||
}
|
||||
|
||||
impl From<bool> for AfterCompletion {
|
||||
fn from(value: bool) -> Self {
|
||||
if value {
|
||||
AfterCompletion::Run
|
||||
} else {
|
||||
AfterCompletion::Continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AfterCompletion {
|
||||
pub fn run(&self) -> bool {
|
||||
match self {
|
||||
AfterCompletion::Run => true,
|
||||
AfterCompletion::Compose | AfterCompletion::Continue => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ArgumentCompletion {
|
||||
/// The label to display for this completion.
|
||||
pub label: String,
|
||||
pub label: CodeLabel,
|
||||
/// The new text that should be inserted into the command when this completion is accepted.
|
||||
pub new_text: String,
|
||||
/// Whether the command should be run when accepting this completion.
|
||||
pub run_command: bool,
|
||||
pub after_completion: AfterCompletion,
|
||||
/// Whether to replace the all arguments, or whether to treat this as an independent argument.
|
||||
pub replace_previous_arguments: bool,
|
||||
}
|
||||
|
||||
pub trait SlashCommand: 'static + Send + Sync {
|
||||
@@ -34,15 +65,18 @@ pub trait SlashCommand: 'static + Send + Sync {
|
||||
fn menu_text(&self) -> String;
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
query: String,
|
||||
arguments: &[String],
|
||||
cancel: Arc<AtomicBool>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
cx: &mut AppContext,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>>;
|
||||
fn requires_argument(&self) -> bool;
|
||||
fn accepts_arguments(&self) -> bool {
|
||||
self.requires_argument()
|
||||
}
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
argument: Option<&str>,
|
||||
arguments: &[String],
|
||||
workspace: WeakView<Workspace>,
|
||||
// TODO: We're just using the `LspAdapterDelegate` here because that is
|
||||
// what the extension API is already expecting.
|
||||
|
||||
@@ -56,6 +56,18 @@ impl SlashCommandRegistry {
|
||||
state.commands.insert(command_name, Arc::new(command));
|
||||
}
|
||||
|
||||
/// Unregisters the provided [`SlashCommand`].
|
||||
pub fn unregister_command(&self, command: impl SlashCommand) {
|
||||
self.unregister_command_by_name(command.name().as_str())
|
||||
}
|
||||
|
||||
/// Unregisters the command with the given name.
|
||||
pub fn unregister_command_by_name(&self, command_name: &str) {
|
||||
let mut state = self.state.write();
|
||||
state.featured_commands.remove(command_name);
|
||||
state.commands.remove(command_name);
|
||||
}
|
||||
|
||||
/// Returns the names of registered [`SlashCommand`]s.
|
||||
pub fn command_names(&self) -> Vec<Arc<str>> {
|
||||
self.state.read().commands.keys().cloned().collect()
|
||||
|
||||
@@ -26,6 +26,7 @@ paths.workspace = true
|
||||
release_channel.workspace = true
|
||||
serde.workspace = true
|
||||
util.workspace = true
|
||||
tempfile.workspace = true
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
exec.workspace = true
|
||||
|
||||
@@ -11,6 +11,7 @@ use std::{
|
||||
sync::Arc,
|
||||
thread::{self, JoinHandle},
|
||||
};
|
||||
use tempfile::NamedTempFile;
|
||||
use util::paths::PathWithPosition;
|
||||
|
||||
struct Detect;
|
||||
@@ -22,7 +23,11 @@ trait InstalledApp {
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(name = "zed", disable_version_flag = true)]
|
||||
#[command(
|
||||
name = "zed",
|
||||
disable_version_flag = true,
|
||||
after_help = "To read from stdin, append '-' (e.g. 'ps axf | zed -')"
|
||||
)]
|
||||
struct Args {
|
||||
/// Wait for all of the given paths to be opened/closed before exiting.
|
||||
#[arg(short, long)]
|
||||
@@ -120,6 +125,7 @@ fn main() -> Result<()> {
|
||||
let exit_status = Arc::new(Mutex::new(None));
|
||||
let mut paths = vec![];
|
||||
let mut urls = vec![];
|
||||
let mut stdin_tmp_file: Option<fs::File> = None;
|
||||
for path in args.paths_with_position.iter() {
|
||||
if path.starts_with("zed://")
|
||||
|| path.starts_with("http://")
|
||||
@@ -128,6 +134,11 @@ fn main() -> Result<()> {
|
||||
|| path.starts_with("ssh://")
|
||||
{
|
||||
urls.push(path.to_string());
|
||||
} else if path == "-" && args.paths_with_position.len() == 1 {
|
||||
let file = NamedTempFile::new()?;
|
||||
paths.push(file.path().to_string_lossy().to_string());
|
||||
let (file, _) = file.keep()?;
|
||||
stdin_tmp_file = Some(file);
|
||||
} else {
|
||||
paths.push(parse_path_with_position(path)?)
|
||||
}
|
||||
@@ -162,11 +173,31 @@ fn main() -> Result<()> {
|
||||
}
|
||||
});
|
||||
|
||||
let pipe_handle: JoinHandle<anyhow::Result<()>> = thread::spawn(move || {
|
||||
if let Some(mut tmp_file) = stdin_tmp_file {
|
||||
let mut stdin = std::io::stdin().lock();
|
||||
if io::IsTerminal::is_terminal(&stdin) {
|
||||
return Ok(());
|
||||
}
|
||||
let mut buffer = [0; 8 * 1024];
|
||||
loop {
|
||||
let bytes_read = io::Read::read(&mut stdin, &mut buffer)?;
|
||||
if bytes_read == 0 {
|
||||
break;
|
||||
}
|
||||
io::Write::write(&mut tmp_file, &buffer[..bytes_read])?;
|
||||
}
|
||||
io::Write::flush(&mut tmp_file)?;
|
||||
}
|
||||
Ok(())
|
||||
});
|
||||
|
||||
if args.foreground {
|
||||
app.run_foreground(url)?;
|
||||
} else {
|
||||
app.launch(url)?;
|
||||
sender.join().unwrap()?;
|
||||
pipe_handle.join().unwrap()?;
|
||||
}
|
||||
|
||||
if let Some(exit_status) = exit_status.lock().take() {
|
||||
|
||||
@@ -27,7 +27,6 @@ fs.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
http_client.workspace = true
|
||||
lazy_static.workspace = true
|
||||
log.workspace = true
|
||||
once_cell.workspace = true
|
||||
paths.workspace = true
|
||||
@@ -49,6 +48,7 @@ text.workspace = true
|
||||
thiserror.workspace = true
|
||||
time.workspace = true
|
||||
tiny_http = "0.8"
|
||||
tokio-socks = { version = "0.5.2", default-features = false, features = ["futures-io"] }
|
||||
url.workspace = true
|
||||
util.workspace = true
|
||||
worktree.workspace = true
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub mod test;
|
||||
|
||||
mod socks;
|
||||
pub mod telemetry;
|
||||
pub mod user;
|
||||
|
||||
@@ -22,7 +23,6 @@ use gpui::{
|
||||
actions, AnyModel, AnyWeakModel, AppContext, AsyncAppContext, Global, Model, Task, WeakModel,
|
||||
};
|
||||
use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
|
||||
use lazy_static::lazy_static;
|
||||
use parking_lot::RwLock;
|
||||
use postage::watch;
|
||||
use proto::ProtoClient;
|
||||
@@ -32,6 +32,7 @@ use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, PeerId, Requ
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources};
|
||||
use socks::connect_socks_proxy_stream;
|
||||
use std::fmt;
|
||||
use std::pin::Pin;
|
||||
use std::{
|
||||
@@ -43,7 +44,7 @@ use std::{
|
||||
path::PathBuf,
|
||||
sync::{
|
||||
atomic::{AtomicU64, Ordering},
|
||||
Arc, Weak,
|
||||
Arc, LazyLock, Weak,
|
||||
},
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
@@ -65,27 +66,35 @@ impl fmt::Display for DevServerToken {
|
||||
}
|
||||
}
|
||||
|
||||
lazy_static! {
|
||||
static ref ZED_SERVER_URL: Option<String> = std::env::var("ZED_SERVER_URL").ok();
|
||||
static ref ZED_RPC_URL: Option<String> = std::env::var("ZED_RPC_URL").ok();
|
||||
/// An environment variable whose presence indicates that the development auth
|
||||
/// provider should be used.
|
||||
///
|
||||
/// Only works in development. Setting this environment variable in other release
|
||||
/// channels is a no-op.
|
||||
pub static ref ZED_DEVELOPMENT_AUTH: bool =
|
||||
std::env::var("ZED_DEVELOPMENT_AUTH").map_or(false, |value| !value.is_empty());
|
||||
pub static ref IMPERSONATE_LOGIN: Option<String> = std::env::var("ZED_IMPERSONATE")
|
||||
static ZED_SERVER_URL: LazyLock<Option<String>> =
|
||||
LazyLock::new(|| std::env::var("ZED_SERVER_URL").ok());
|
||||
static ZED_RPC_URL: LazyLock<Option<String>> = LazyLock::new(|| std::env::var("ZED_RPC_URL").ok());
|
||||
|
||||
/// An environment variable whose presence indicates that the development auth
|
||||
/// provider should be used.
|
||||
///
|
||||
/// Only works in development. Setting this environment variable in other release
|
||||
/// channels is a no-op.
|
||||
pub static ZED_DEVELOPMENT_AUTH: LazyLock<bool> = LazyLock::new(|| {
|
||||
std::env::var("ZED_DEVELOPMENT_AUTH").map_or(false, |value| !value.is_empty())
|
||||
});
|
||||
pub static IMPERSONATE_LOGIN: LazyLock<Option<String>> = LazyLock::new(|| {
|
||||
std::env::var("ZED_IMPERSONATE")
|
||||
.ok()
|
||||
.and_then(|s| if s.is_empty() { None } else { Some(s) });
|
||||
pub static ref ADMIN_API_TOKEN: Option<String> = std::env::var("ZED_ADMIN_API_TOKEN")
|
||||
.and_then(|s| if s.is_empty() { None } else { Some(s) })
|
||||
});
|
||||
|
||||
pub static ADMIN_API_TOKEN: LazyLock<Option<String>> = LazyLock::new(|| {
|
||||
std::env::var("ZED_ADMIN_API_TOKEN")
|
||||
.ok()
|
||||
.and_then(|s| if s.is_empty() { None } else { Some(s) });
|
||||
pub static ref ZED_APP_PATH: Option<PathBuf> =
|
||||
std::env::var("ZED_APP_PATH").ok().map(PathBuf::from);
|
||||
pub static ref ZED_ALWAYS_ACTIVE: bool =
|
||||
std::env::var("ZED_ALWAYS_ACTIVE").map_or(false, |e| !e.is_empty());
|
||||
}
|
||||
.and_then(|s| if s.is_empty() { None } else { Some(s) })
|
||||
});
|
||||
|
||||
pub static ZED_APP_PATH: LazyLock<Option<PathBuf>> =
|
||||
LazyLock::new(|| std::env::var("ZED_APP_PATH").ok().map(PathBuf::from));
|
||||
|
||||
pub static ZED_ALWAYS_ACTIVE: LazyLock<bool> =
|
||||
LazyLock::new(|| std::env::var("ZED_ALWAYS_ACTIVE").map_or(false, |e| !e.is_empty()));
|
||||
|
||||
pub const INITIAL_RECONNECTION_DELAY: Duration = Duration::from_millis(500);
|
||||
pub const MAX_RECONNECTION_DELAY: Duration = Duration::from_secs(10);
|
||||
@@ -1170,6 +1179,7 @@ impl Client {
|
||||
.unwrap_or_default();
|
||||
|
||||
let http = self.http.clone();
|
||||
let proxy = http.proxy().cloned();
|
||||
let credentials = credentials.clone();
|
||||
let rpc_url = self.rpc_url(http, release_channel);
|
||||
cx.background_executor().spawn(async move {
|
||||
@@ -1191,7 +1201,7 @@ impl Client {
|
||||
.host_str()
|
||||
.zip(rpc_url.port_or_known_default())
|
||||
.ok_or_else(|| anyhow!("missing host in rpc url"))?;
|
||||
let stream = smol::net::TcpStream::connect(rpc_host).await?;
|
||||
let stream = connect_socks_proxy_stream(proxy.as_ref(), rpc_host).await?;
|
||||
|
||||
log::info!("connected to rpc endpoint {}", rpc_url);
|
||||
|
||||
|
||||
68
crates/client/src/socks.rs
Normal file
@@ -0,0 +1,68 @@
|
||||
//! socks proxy
|
||||
use anyhow::{anyhow, Result};
|
||||
use futures::io::{AsyncRead, AsyncWrite};
|
||||
use http_client::Uri;
|
||||
use tokio_socks::{
|
||||
io::Compat,
|
||||
tcp::{Socks4Stream, Socks5Stream},
|
||||
};
|
||||
|
||||
pub(crate) async fn connect_socks_proxy_stream(
|
||||
proxy: Option<&Uri>,
|
||||
rpc_host: (&str, u16),
|
||||
) -> Result<Box<dyn AsyncReadWrite>> {
|
||||
let stream = match parse_socks_proxy(proxy) {
|
||||
Some((socks_proxy, SocksVersion::V4)) => {
|
||||
let stream = Socks4Stream::connect_with_socket(
|
||||
Compat::new(smol::net::TcpStream::connect(socks_proxy).await?),
|
||||
rpc_host,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| anyhow!("error connecting to socks {}", err))?;
|
||||
Box::new(stream) as Box<dyn AsyncReadWrite>
|
||||
}
|
||||
Some((socks_proxy, SocksVersion::V5)) => Box::new(
|
||||
Socks5Stream::connect_with_socket(
|
||||
Compat::new(smol::net::TcpStream::connect(socks_proxy).await?),
|
||||
rpc_host,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| anyhow!("error connecting to socks {}", err))?,
|
||||
) as Box<dyn AsyncReadWrite>,
|
||||
None => Box::new(smol::net::TcpStream::connect(rpc_host).await?) as Box<dyn AsyncReadWrite>,
|
||||
};
|
||||
Ok(stream)
|
||||
}
|
||||
|
||||
fn parse_socks_proxy(proxy: Option<&Uri>) -> Option<((String, u16), SocksVersion)> {
|
||||
let Some(proxy_uri) = proxy else {
|
||||
return None;
|
||||
};
|
||||
let Some(scheme) = proxy_uri.scheme_str() else {
|
||||
return None;
|
||||
};
|
||||
let socks_version = if scheme.starts_with("socks4") {
|
||||
// socks4
|
||||
SocksVersion::V4
|
||||
} else if scheme.starts_with("socks") {
|
||||
// socks, socks5
|
||||
SocksVersion::V5
|
||||
} else {
|
||||
return None;
|
||||
};
|
||||
if let (Some(host), Some(port)) = (proxy_uri.host(), proxy_uri.port_u16()) {
|
||||
Some(((host.to_string(), port), socks_version))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
// private helper structs and traits
|
||||
|
||||
enum SocksVersion {
|
||||
V4,
|
||||
V5,
|
||||
}
|
||||
|
||||
pub(crate) trait AsyncReadWrite: AsyncRead + AsyncWrite + Unpin + Send + 'static {}
|
||||
impl<T: AsyncRead + AsyncWrite + Unpin + Send + 'static> AsyncReadWrite for T {}
|
||||
@@ -1,5 +1,6 @@
|
||||
use crate::{Client, Connection, Credentials, EstablishConnectionError, UserStore};
|
||||
use anyhow::{anyhow, Result};
|
||||
use chrono::Duration;
|
||||
use futures::{stream::BoxStream, StreamExt};
|
||||
use gpui::{BackgroundExecutor, Context, Model, TestAppContext};
|
||||
use parking_lot::Mutex;
|
||||
@@ -162,6 +163,11 @@ impl FakeServer {
|
||||
return Ok(*message.downcast().unwrap());
|
||||
}
|
||||
|
||||
let accepted_tos_at = chrono::Utc::now()
|
||||
.checked_sub_signed(Duration::hours(5))
|
||||
.expect("failed to build accepted_tos_at")
|
||||
.timestamp() as u64;
|
||||
|
||||
if message.is::<TypedEnvelope<GetPrivateUserInfo>>() {
|
||||
self.respond(
|
||||
message
|
||||
@@ -172,6 +178,7 @@ impl FakeServer {
|
||||
metrics_id: "the-metrics-id".into(),
|
||||
staff: false,
|
||||
flags: Default::default(),
|
||||
accepted_tos_at: Some(accepted_tos_at),
|
||||
},
|
||||
);
|
||||
continue;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use super::{proto, Client, Status, TypedEnvelope};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use chrono::{DateTime, Utc};
|
||||
use collections::{hash_map::Entry, HashMap, HashSet};
|
||||
use feature_flags::FeatureFlagAppExt;
|
||||
use futures::{channel::mpsc, Future, StreamExt};
|
||||
@@ -94,6 +95,7 @@ pub struct UserStore {
|
||||
update_contacts_tx: mpsc::UnboundedSender<UpdateContacts>,
|
||||
current_plan: Option<proto::Plan>,
|
||||
current_user: watch::Receiver<Option<Arc<User>>>,
|
||||
accepted_tos_at: Option<Option<DateTime<Utc>>>,
|
||||
contacts: Vec<Arc<Contact>>,
|
||||
incoming_contact_requests: Vec<Arc<User>>,
|
||||
outgoing_contact_requests: Vec<Arc<User>>,
|
||||
@@ -150,6 +152,7 @@ impl UserStore {
|
||||
by_github_login: Default::default(),
|
||||
current_user: current_user_rx,
|
||||
current_plan: None,
|
||||
accepted_tos_at: None,
|
||||
contacts: Default::default(),
|
||||
incoming_contact_requests: Default::default(),
|
||||
participant_indices: Default::default(),
|
||||
@@ -189,9 +192,10 @@ impl UserStore {
|
||||
} else {
|
||||
break;
|
||||
};
|
||||
let fetch_metrics_id =
|
||||
let fetch_private_user_info =
|
||||
client.request(proto::GetPrivateUserInfo {}).log_err();
|
||||
let (user, info) = futures::join!(fetch_user, fetch_metrics_id);
|
||||
let (user, info) =
|
||||
futures::join!(fetch_user, fetch_private_user_info);
|
||||
|
||||
cx.update(|cx| {
|
||||
if let Some(info) = info {
|
||||
@@ -202,9 +206,17 @@ impl UserStore {
|
||||
client.telemetry.set_authenticated_user_info(
|
||||
Some(info.metrics_id.clone()),
|
||||
staff,
|
||||
)
|
||||
);
|
||||
|
||||
this.update(cx, |this, _| {
|
||||
this.set_current_user_accepted_tos_at(
|
||||
info.accepted_tos_at,
|
||||
);
|
||||
})
|
||||
} else {
|
||||
anyhow::Ok(())
|
||||
}
|
||||
})?;
|
||||
})??;
|
||||
|
||||
current_user_tx.send(user).await.ok();
|
||||
|
||||
@@ -680,6 +692,39 @@ impl UserStore {
|
||||
self.current_user.clone()
|
||||
}
|
||||
|
||||
pub fn current_user_has_accepted_terms(&self) -> Option<bool> {
|
||||
self.accepted_tos_at
|
||||
.map(|accepted_tos_at| accepted_tos_at.is_some())
|
||||
}
|
||||
|
||||
pub fn accept_terms_of_service(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
|
||||
if self.current_user().is_none() {
|
||||
return Task::ready(Err(anyhow!("no current user")));
|
||||
};
|
||||
|
||||
let client = self.client.clone();
|
||||
cx.spawn(move |this, mut cx| async move {
|
||||
if let Some(client) = client.upgrade() {
|
||||
let response = client
|
||||
.request(proto::AcceptTermsOfService {})
|
||||
.await
|
||||
.context("error accepting tos")?;
|
||||
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.set_current_user_accepted_tos_at(Some(response.accepted_tos_at))
|
||||
})
|
||||
} else {
|
||||
Err(anyhow!("client not found"))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn set_current_user_accepted_tos_at(&mut self, accepted_tos_at: Option<u64>) {
|
||||
self.accepted_tos_at = Some(
|
||||
accepted_tos_at.and_then(|timestamp| DateTime::from_timestamp(timestamp as i64, 0)),
|
||||
);
|
||||
}
|
||||
|
||||
fn load_users(
|
||||
&mut self,
|
||||
request: impl RequestMessage<Response = UsersResponse>,
|
||||
|
||||
@@ -10,7 +10,7 @@ It contains our back-end logic for collaboration, to which we connect from the Z
|
||||
|
||||
Before you can run the collab server locally, you'll need to set up a zed Postgres database.
|
||||
|
||||
```
|
||||
```sh
|
||||
script/bootstrap
|
||||
```
|
||||
|
||||
@@ -32,13 +32,13 @@ To use a different set of admin users, create `crates/collab/seed.json`.
|
||||
|
||||
In one terminal, run Zed's collaboration server and the livekit dev server:
|
||||
|
||||
```
|
||||
```sh
|
||||
foreman start
|
||||
```
|
||||
|
||||
In a second terminal, run two or more instances of Zed.
|
||||
|
||||
```
|
||||
```sh
|
||||
script/zed-local -2
|
||||
```
|
||||
|
||||
@@ -64,7 +64,7 @@ You can tell what is currently deployed with `./script/what-is-deployed`.
|
||||
|
||||
To create a new migration:
|
||||
|
||||
```
|
||||
```sh
|
||||
./script/create-migration <name>
|
||||
```
|
||||
|
||||
|
||||
@@ -97,6 +97,13 @@ spec:
|
||||
secretKeyRef:
|
||||
name: llm-token
|
||||
key: secret
|
||||
- name: LLM_DATABASE_URL
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: llm-database
|
||||
key: url
|
||||
- name: LLM_DATABASE_MAX_CONNECTIONS
|
||||
value: "${LLM_DATABASE_MAX_CONNECTIONS}"
|
||||
- name: ZED_CLIENT_CHECKSUM_SEED
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
@@ -127,6 +134,16 @@ spec:
|
||||
secretKeyRef:
|
||||
name: anthropic
|
||||
key: api_key
|
||||
- name: ANTHROPIC_STAFF_API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: anthropic
|
||||
key: staff_api_key
|
||||
- name: LLM_CLOSED_BETA_MODEL_NAME
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: llm-closed-beta
|
||||
key: model_name
|
||||
- name: GOOGLE_AI_API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
@@ -199,6 +216,12 @@ spec:
|
||||
secretKeyRef:
|
||||
name: supermaven
|
||||
key: api_key
|
||||
- name: USER_BACKFILLER_GITHUB_ACCESS_TOKEN
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: user-backfiller
|
||||
key: github_access_token
|
||||
optional: true
|
||||
- name: INVITE_LINK_PREFIX
|
||||
value: ${INVITE_LINK_PREFIX}
|
||||
- name: RUST_BACKTRACE
|
||||
|
||||
@@ -3,3 +3,4 @@ RUST_LOG=info
|
||||
INVITE_LINK_PREFIX=https://zed.dev/invites/
|
||||
AUTO_JOIN_CHANNEL_ID=283
|
||||
DATABASE_MAX_CONNECTIONS=85
|
||||
LLM_DATABASE_MAX_CONNECTIONS=25
|
||||
|
||||
@@ -2,4 +2,5 @@ ZED_ENVIRONMENT=staging
|
||||
RUST_LOG=info
|
||||
INVITE_LINK_PREFIX=https://staging.zed.dev/invites/
|
||||
DATABASE_MAX_CONNECTIONS=5
|
||||
LLM_DATABASE_MAX_CONNECTIONS=5
|
||||
AUTO_JOIN_CHANNEL_ID=8
|
||||
|
||||
@@ -12,7 +12,7 @@ metadata:
|
||||
spec:
|
||||
type: LoadBalancer
|
||||
selector:
|
||||
app: postgrest
|
||||
app: nginx
|
||||
ports:
|
||||
- name: web
|
||||
protocol: TCP
|
||||
@@ -24,17 +24,99 @@ apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: postgrest
|
||||
|
||||
name: nginx
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: postgrest
|
||||
app: nginx
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: postgrest
|
||||
app: nginx
|
||||
spec:
|
||||
containers:
|
||||
- name: nginx
|
||||
image: nginx:latest
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
protocol: TCP
|
||||
volumeMounts:
|
||||
- name: nginx-config
|
||||
mountPath: /etc/nginx/nginx.conf
|
||||
subPath: nginx.conf
|
||||
volumes:
|
||||
- name: nginx-config
|
||||
configMap:
|
||||
name: nginx-config
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: nginx-config
|
||||
data:
|
||||
nginx.conf: |
|
||||
events {}
|
||||
|
||||
http {
|
||||
server {
|
||||
listen 8080;
|
||||
|
||||
location /app/ {
|
||||
proxy_pass http://postgrest-app:8080/;
|
||||
}
|
||||
|
||||
location /llm/ {
|
||||
proxy_pass http://postgrest-llm:8080/;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: postgrest-app
|
||||
spec:
|
||||
selector:
|
||||
app: postgrest-app
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: 8080
|
||||
targetPort: 8080
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: postgrest-llm
|
||||
spec:
|
||||
selector:
|
||||
app: postgrest-llm
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: 8080
|
||||
targetPort: 8080
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: postgrest-app
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: postgrest-app
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: postgrest-app
|
||||
spec:
|
||||
containers:
|
||||
- name: postgrest
|
||||
@@ -55,3 +137,39 @@ spec:
|
||||
secretKeyRef:
|
||||
name: postgrest
|
||||
key: jwt_secret
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: postgrest-llm
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: postgrest-llm
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: postgrest-llm
|
||||
spec:
|
||||
containers:
|
||||
- name: postgrest
|
||||
image: "postgrest/postgrest"
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
protocol: TCP
|
||||
env:
|
||||
- name: PGRST_SERVER_PORT
|
||||
value: "8080"
|
||||
- name: PGRST_DB_URI
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: llm-database
|
||||
key: url
|
||||
- name: PGRST_JWT_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: postgrest
|
||||
key: jwt_secret
|
||||
|
||||
@@ -9,12 +9,14 @@ CREATE TABLE "users" (
|
||||
"connected_once" BOOLEAN NOT NULL DEFAULT false,
|
||||
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"metrics_id" TEXT,
|
||||
"github_user_id" INTEGER
|
||||
"github_user_id" INTEGER NOT NULL,
|
||||
"accepted_tos_at" TIMESTAMP WITHOUT TIME ZONE,
|
||||
"github_user_created_at" TIMESTAMP WITHOUT TIME ZONE
|
||||
);
|
||||
CREATE UNIQUE INDEX "index_users_github_login" ON "users" ("github_login");
|
||||
CREATE UNIQUE INDEX "index_invite_code_users" ON "users" ("invite_code");
|
||||
CREATE INDEX "index_users_on_email_address" ON "users" ("email_address");
|
||||
CREATE INDEX "index_users_on_github_user_id" ON "users" ("github_user_id");
|
||||
CREATE UNIQUE INDEX "index_users_on_github_user_id" ON "users" ("github_user_id");
|
||||
|
||||
CREATE TABLE "access_tokens" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
@@ -293,7 +295,8 @@ CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_connection
|
||||
|
||||
CREATE TABLE "feature_flags" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"flag" TEXT NOT NULL UNIQUE
|
||||
"flag" TEXT NOT NULL UNIQUE,
|
||||
"enabled_for_all" BOOLEAN NOT NULL DEFAULT false
|
||||
);
|
||||
|
||||
CREATE INDEX "index_feature_flags" ON "feature_flags" ("id");
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE users ADD accepted_tos_at TIMESTAMP WITHOUT TIME ZONE;
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE "users" ADD COLUMN "github_user_created_at" TIMESTAMP WITHOUT TIME ZONE;
|
||||
@@ -0,0 +1 @@
|
||||
alter table feature_flags add column enabled_for_all boolean not null default false;
|
||||
@@ -0,0 +1,4 @@
|
||||
alter table users alter column github_user_id set not null;
|
||||
|
||||
drop index index_users_on_github_user_id;
|
||||
create unique index uix_users_on_github_user_id on users (github_user_id);
|
||||
@@ -1,32 +0,0 @@
|
||||
create table providers (
|
||||
id integer primary key autoincrement,
|
||||
name text not null
|
||||
);
|
||||
|
||||
create unique index uix_providers_on_name on providers (name);
|
||||
|
||||
create table models (
|
||||
id integer primary key autoincrement,
|
||||
provider_id integer not null references providers (id) on delete cascade,
|
||||
name text not null
|
||||
);
|
||||
|
||||
create unique index uix_models_on_provider_id_name on models (provider_id, name);
|
||||
create index ix_models_on_provider_id on models (provider_id);
|
||||
create index ix_models_on_name on models (name);
|
||||
|
||||
create table if not exists usages (
|
||||
id integer primary key autoincrement,
|
||||
user_id integer not null,
|
||||
model_id integer not null references models (id) on delete cascade,
|
||||
requests_this_minute integer not null default 0,
|
||||
tokens_this_minute integer not null default 0,
|
||||
requests_this_day integer not null default 0,
|
||||
tokens_this_day integer not null default 0,
|
||||
requests_this_month integer not null default 0,
|
||||
tokens_this_month integer not null default 0
|
||||
);
|
||||
|
||||
create index ix_usages_on_user_id on usages (user_id);
|
||||
create index ix_usages_on_model_id on usages (model_id);
|
||||
create unique index uix_usages_on_user_id_model_id on usages (user_id, model_id);
|
||||
@@ -8,7 +8,10 @@ create unique index uix_providers_on_name on providers (name);
|
||||
create table if not exists models (
|
||||
id serial primary key,
|
||||
provider_id integer not null references providers (id) on delete cascade,
|
||||
name text not null
|
||||
name text not null,
|
||||
max_requests_per_minute integer not null,
|
||||
max_tokens_per_minute integer not null,
|
||||
max_tokens_per_day integer not null
|
||||
);
|
||||
|
||||
create unique index uix_models_on_provider_id_name on models (provider_id, name);
|
||||
|
||||
@@ -1,15 +1,19 @@
|
||||
create table usage_measures (
|
||||
id serial primary key,
|
||||
name text not null
|
||||
);
|
||||
|
||||
create unique index uix_usage_measures_on_name on usage_measures (name);
|
||||
|
||||
create table if not exists usages (
|
||||
id serial primary key,
|
||||
user_id integer not null,
|
||||
model_id integer not null references models (id) on delete cascade,
|
||||
requests_this_minute integer not null default 0,
|
||||
tokens_this_minute bigint not null default 0,
|
||||
requests_this_day integer not null default 0,
|
||||
tokens_this_day bigint not null default 0,
|
||||
requests_this_month integer not null default 0,
|
||||
tokens_this_month bigint not null default 0
|
||||
measure_id integer not null references usage_measures (id) on delete cascade,
|
||||
timestamp timestamp without time zone not null,
|
||||
buckets bigint[] not null
|
||||
);
|
||||
|
||||
create index ix_usages_on_user_id on usages (user_id);
|
||||
create index ix_usages_on_model_id on usages (model_id);
|
||||
create unique index uix_usages_on_user_id_model_id on usages (user_id, model_id);
|
||||
create unique index uix_usages_on_user_id_model_id_measure_id on usages (user_id, model_id, measure_id);
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
ALTER TABLE models
|
||||
ALTER COLUMN max_requests_per_minute TYPE bigint,
|
||||
ALTER COLUMN max_tokens_per_minute TYPE bigint,
|
||||
ALTER COLUMN max_tokens_per_day TYPE bigint;
|
||||
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE models
|
||||
ADD COLUMN price_per_million_input_tokens integer NOT NULL DEFAULT 0,
|
||||
ADD COLUMN price_per_million_output_tokens integer NOT NULL DEFAULT 0;
|
||||
@@ -0,0 +1 @@
|
||||
alter table usages add column is_staff boolean not null default false;
|
||||
@@ -0,0 +1,9 @@
|
||||
create table lifetime_usages (
|
||||
id serial primary key,
|
||||
user_id integer not null,
|
||||
model_id integer not null references models (id) on delete cascade,
|
||||
input_tokens bigint not null default 0,
|
||||
output_tokens bigint not null default 0
|
||||
);
|
||||
|
||||
create unique index uix_lifetime_usages_on_user_id_model_id on lifetime_usages (user_id, model_id);
|
||||
@@ -0,0 +1,7 @@
|
||||
create table revoked_access_tokens (
|
||||
id serial primary key,
|
||||
jti text not null,
|
||||
revoked_at timestamp without time zone not null default now()
|
||||
);
|
||||
|
||||
create unique index uix_revoked_access_tokens_on_jti on revoked_access_tokens (jti);
|
||||
4
crates/collab/postgrest_llm.conf
Normal file
@@ -0,0 +1,4 @@
|
||||
db-uri = "postgres://postgres@localhost/zed_llm"
|
||||
server-port = 8082
|
||||
jwt-secret = "the-postgrest-jwt-secret-for-authorization"
|
||||
log-level = "info"
|
||||
@@ -108,9 +108,10 @@ pub async fn validate_api_token<B>(req: Request<B>, next: Next<B>) -> impl IntoR
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct AuthenticatedUserParams {
|
||||
github_user_id: Option<i32>,
|
||||
github_user_id: i32,
|
||||
github_login: String,
|
||||
github_email: Option<String>,
|
||||
github_user_created_at: Option<chrono::DateTime<chrono::Utc>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
@@ -131,6 +132,7 @@ async fn get_authenticated_user(
|
||||
¶ms.github_login,
|
||||
params.github_user_id,
|
||||
params.github_email.as_deref(),
|
||||
params.github_user_created_at,
|
||||
initial_channel_id,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -115,6 +115,7 @@ async fn add_contributor(
|
||||
¶ms.github_login,
|
||||
params.github_user_id,
|
||||
params.github_email.as_deref(),
|
||||
params.github_user_created_at,
|
||||
initial_channel_id,
|
||||
)
|
||||
.await
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use super::ips_file::IpsFile;
|
||||
use crate::api::CloudflareIpCountryHeader;
|
||||
use crate::clickhouse::write_to_table;
|
||||
use crate::{api::slack, AppState, Error, Result};
|
||||
use anyhow::{anyhow, Context};
|
||||
use aws_sdk_s3::primitives::ByteStream;
|
||||
@@ -529,12 +530,12 @@ struct ToUpload {
|
||||
impl ToUpload {
|
||||
pub async fn upload(&self, clickhouse_client: &clickhouse::Client) -> anyhow::Result<()> {
|
||||
const EDITOR_EVENTS_TABLE: &str = "editor_events";
|
||||
Self::upload_to_table(EDITOR_EVENTS_TABLE, &self.editor_events, clickhouse_client)
|
||||
write_to_table(EDITOR_EVENTS_TABLE, &self.editor_events, clickhouse_client)
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table '{EDITOR_EVENTS_TABLE}'"))?;
|
||||
|
||||
const INLINE_COMPLETION_EVENTS_TABLE: &str = "inline_completion_events";
|
||||
Self::upload_to_table(
|
||||
write_to_table(
|
||||
INLINE_COMPLETION_EVENTS_TABLE,
|
||||
&self.inline_completion_events,
|
||||
clickhouse_client,
|
||||
@@ -543,7 +544,7 @@ impl ToUpload {
|
||||
.with_context(|| format!("failed to upload to table '{INLINE_COMPLETION_EVENTS_TABLE}'"))?;
|
||||
|
||||
const ASSISTANT_EVENTS_TABLE: &str = "assistant_events";
|
||||
Self::upload_to_table(
|
||||
write_to_table(
|
||||
ASSISTANT_EVENTS_TABLE,
|
||||
&self.assistant_events,
|
||||
clickhouse_client,
|
||||
@@ -552,27 +553,27 @@ impl ToUpload {
|
||||
.with_context(|| format!("failed to upload to table '{ASSISTANT_EVENTS_TABLE}'"))?;
|
||||
|
||||
const CALL_EVENTS_TABLE: &str = "call_events";
|
||||
Self::upload_to_table(CALL_EVENTS_TABLE, &self.call_events, clickhouse_client)
|
||||
write_to_table(CALL_EVENTS_TABLE, &self.call_events, clickhouse_client)
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table '{CALL_EVENTS_TABLE}'"))?;
|
||||
|
||||
const CPU_EVENTS_TABLE: &str = "cpu_events";
|
||||
Self::upload_to_table(CPU_EVENTS_TABLE, &self.cpu_events, clickhouse_client)
|
||||
write_to_table(CPU_EVENTS_TABLE, &self.cpu_events, clickhouse_client)
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table '{CPU_EVENTS_TABLE}'"))?;
|
||||
|
||||
const MEMORY_EVENTS_TABLE: &str = "memory_events";
|
||||
Self::upload_to_table(MEMORY_EVENTS_TABLE, &self.memory_events, clickhouse_client)
|
||||
write_to_table(MEMORY_EVENTS_TABLE, &self.memory_events, clickhouse_client)
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table '{MEMORY_EVENTS_TABLE}'"))?;
|
||||
|
||||
const APP_EVENTS_TABLE: &str = "app_events";
|
||||
Self::upload_to_table(APP_EVENTS_TABLE, &self.app_events, clickhouse_client)
|
||||
write_to_table(APP_EVENTS_TABLE, &self.app_events, clickhouse_client)
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table '{APP_EVENTS_TABLE}'"))?;
|
||||
|
||||
const SETTING_EVENTS_TABLE: &str = "setting_events";
|
||||
Self::upload_to_table(
|
||||
write_to_table(
|
||||
SETTING_EVENTS_TABLE,
|
||||
&self.setting_events,
|
||||
clickhouse_client,
|
||||
@@ -581,7 +582,7 @@ impl ToUpload {
|
||||
.with_context(|| format!("failed to upload to table '{SETTING_EVENTS_TABLE}'"))?;
|
||||
|
||||
const EXTENSION_EVENTS_TABLE: &str = "extension_events";
|
||||
Self::upload_to_table(
|
||||
write_to_table(
|
||||
EXTENSION_EVENTS_TABLE,
|
||||
&self.extension_events,
|
||||
clickhouse_client,
|
||||
@@ -590,48 +591,22 @@ impl ToUpload {
|
||||
.with_context(|| format!("failed to upload to table '{EXTENSION_EVENTS_TABLE}'"))?;
|
||||
|
||||
const EDIT_EVENTS_TABLE: &str = "edit_events";
|
||||
Self::upload_to_table(EDIT_EVENTS_TABLE, &self.edit_events, clickhouse_client)
|
||||
write_to_table(EDIT_EVENTS_TABLE, &self.edit_events, clickhouse_client)
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table '{EDIT_EVENTS_TABLE}'"))?;
|
||||
|
||||
const ACTION_EVENTS_TABLE: &str = "action_events";
|
||||
Self::upload_to_table(ACTION_EVENTS_TABLE, &self.action_events, clickhouse_client)
|
||||
write_to_table(ACTION_EVENTS_TABLE, &self.action_events, clickhouse_client)
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table '{ACTION_EVENTS_TABLE}'"))?;
|
||||
|
||||
const REPL_EVENTS_TABLE: &str = "repl_events";
|
||||
Self::upload_to_table(REPL_EVENTS_TABLE, &self.repl_events, clickhouse_client)
|
||||
write_to_table(REPL_EVENTS_TABLE, &self.repl_events, clickhouse_client)
|
||||
.await
|
||||
.with_context(|| format!("failed to upload to table '{REPL_EVENTS_TABLE}'"))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn upload_to_table<T: clickhouse::Row + Serialize + std::fmt::Debug>(
|
||||
table: &str,
|
||||
rows: &[T],
|
||||
clickhouse_client: &clickhouse::Client,
|
||||
) -> anyhow::Result<()> {
|
||||
if rows.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let mut insert = clickhouse_client.insert(table)?;
|
||||
|
||||
for event in rows {
|
||||
insert.write(event).await?;
|
||||
}
|
||||
|
||||
insert.end().await?;
|
||||
|
||||
let event_count = rows.len();
|
||||
log::info!(
|
||||
"wrote {event_count} {event_specifier} to '{table}'",
|
||||
event_specifier = if event_count == 1 { "event" } else { "events" }
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn serialize_country_code<S>(country_code: &str, serializer: S) -> Result<S::Ok, S::Error>
|
||||
|
||||