Compare commits
359 Commits
tool-calli
...
go-example
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b53c649cfb | ||
|
|
e28681c27e | ||
|
|
b7dcd4e4d6 | ||
|
|
8b8335f449 | ||
|
|
97469cd049 | ||
|
|
5cb4de4ec6 | ||
|
|
47628515e1 | ||
|
|
c3edbd7d9a | ||
|
|
aa12ae0e3c | ||
|
|
c6a1d9aa33 | ||
|
|
07c21bb8ee | ||
|
|
2f5031bd28 | ||
|
|
170ad46f5e | ||
|
|
a81e355dc5 | ||
|
|
bd71e9192c | ||
|
|
7eeb37262a | ||
|
|
d4761a3296 | ||
|
|
ac30ed0754 | ||
|
|
b1a581e81b | ||
|
|
e3b0de5dda | ||
|
|
03796e79b0 | ||
|
|
fa51651d06 | ||
|
|
7aed240729 | ||
|
|
7b613cb169 | ||
|
|
8a9c58e515 | ||
|
|
0dbecee03f | ||
|
|
82529499df | ||
|
|
4450ebff6b | ||
|
|
98a2ab0686 | ||
|
|
47eed12f77 | ||
|
|
cade9fbd3d | ||
|
|
fe190359d5 | ||
|
|
ab98f16280 | ||
|
|
ac6bff12b9 | ||
|
|
081cbcebd9 | ||
|
|
c2b254a67a | ||
|
|
af36d4934c | ||
|
|
b36d1386a9 | ||
|
|
ee6a40137f | ||
|
|
1c189e82a0 | ||
|
|
a515442a36 | ||
|
|
e662bfc74f | ||
|
|
b4c22cc861 | ||
|
|
0bc9fc9487 | ||
|
|
dbcd06642c | ||
|
|
68ae347077 | ||
|
|
f956257638 | ||
|
|
8a148f3a13 | ||
|
|
3bebb8b401 | ||
|
|
98516b5527 | ||
|
|
f398ecc3fb | ||
|
|
a3c79218c4 | ||
|
|
1674e12ccb | ||
|
|
f3ec8d425f | ||
|
|
ebdde5994d | ||
|
|
df70e901af | ||
|
|
63aef7f798 | ||
|
|
9ae0d5388e | ||
|
|
ebdb755fef | ||
|
|
fbb533b3e0 | ||
|
|
98f314ba21 | ||
|
|
b6b081596a | ||
|
|
fc64843dd5 | ||
|
|
f952126319 | ||
|
|
f992cfdc7f | ||
|
|
6389c613a2 | ||
|
|
bab4da78b7 | ||
|
|
a15a9565ab | ||
|
|
48f6193628 | ||
|
|
355aebd0e4 | ||
|
|
10937c6e37 | ||
|
|
4818e0b7eb | ||
|
|
2490b050ee | ||
|
|
e99113e4cb | ||
|
|
3140d6ce8c | ||
|
|
492f6b9cdf | ||
|
|
36a560dcd0 | ||
|
|
6f104fecad | ||
|
|
550e139b61 | ||
|
|
33e120d964 | ||
|
|
8688b2ad19 | ||
|
|
423c7b999a | ||
|
|
0932818829 | ||
|
|
fbebb73d7b | ||
|
|
d96afde5bf | ||
|
|
b1c69c2178 | ||
|
|
eb3c4b0e46 | ||
|
|
c633fa5a10 | ||
|
|
46a4dd3ab1 | ||
|
|
49f760eeda | ||
|
|
225726ba4a | ||
|
|
c1872e9cb0 | ||
|
|
09c9ed4765 | ||
|
|
19d8422933 | ||
|
|
173f6e7c8f | ||
|
|
e783142528 | ||
|
|
06833d7360 | ||
|
|
7a600411cf | ||
|
|
240b7c641c | ||
|
|
06625bfe94 | ||
|
|
2bc503771b | ||
|
|
5b8ce91048 | ||
|
|
abb6d40fbf | ||
|
|
bd59af1df5 | ||
|
|
b7d6b0a096 | ||
|
|
b28507d2e0 | ||
|
|
a39f1f5133 | ||
|
|
fbc629df7d | ||
|
|
8ba5207c6c | ||
|
|
c3cfaade7d | ||
|
|
8681eeb0e2 | ||
|
|
8f9bcbe739 | ||
|
|
9a211b239c | ||
|
|
a71bfd41cc | ||
|
|
73fb8277fc | ||
|
|
514b79e461 | ||
|
|
793cd88792 | ||
|
|
92496f33e7 | ||
|
|
b9159d98ea | ||
|
|
f3abb7e724 | ||
|
|
389cb86e43 | ||
|
|
fea8f16df0 | ||
|
|
76d58ac295 | ||
|
|
e69b0833aa | ||
|
|
da8d1306af | ||
|
|
a5961c8d45 | ||
|
|
e9ddca1075 | ||
|
|
6f6eeb6595 | ||
|
|
22162e884b | ||
|
|
864c443f4b | ||
|
|
5ce6c374b0 | ||
|
|
3a52d6cc52 | ||
|
|
efbf7ada28 | ||
|
|
94028290cc | ||
|
|
f24f601e05 | ||
|
|
2db2b636f2 | ||
|
|
8aa1ba8cbc | ||
|
|
bf403ca788 | ||
|
|
990774247e | ||
|
|
c8f1358629 | ||
|
|
6065db174a | ||
|
|
a54e16b7ea | ||
|
|
4f69336024 | ||
|
|
b19f85f9b5 | ||
|
|
7f6d0919c9 | ||
|
|
a64906779b | ||
|
|
11c6ada08f | ||
|
|
7366c85dd3 | ||
|
|
591b579ccf | ||
|
|
a82f318d72 | ||
|
|
104d7adc49 | ||
|
|
2e27448d5f | ||
|
|
33afbe9a94 | ||
|
|
cf5f4dddf5 | ||
|
|
9c6ccaffe3 | ||
|
|
411934bb61 | ||
|
|
889a14a2c2 | ||
|
|
874f0c0712 | ||
|
|
be514f23e1 | ||
|
|
a054a2a9a3 | ||
|
|
82310092a2 | ||
|
|
1c3f303594 | ||
|
|
7cef5b2956 | ||
|
|
f4a58e5411 | ||
|
|
d6e5265e84 | ||
|
|
56abd68d0e | ||
|
|
fb1cd7cae2 | ||
|
|
44ae9efb27 | ||
|
|
7b5fdcee7f | ||
|
|
82db5dedfb | ||
|
|
1f97741067 | ||
|
|
45afe9dffa | ||
|
|
ca9511393b | ||
|
|
8e9c2b1125 | ||
|
|
4ed43e6e6f | ||
|
|
f6a0fef5cf | ||
|
|
ab1fabef68 | ||
|
|
27779e33fb | ||
|
|
705f7e7a03 | ||
|
|
04ad088eb8 | ||
|
|
03cc18dd20 | ||
|
|
f11f3f2599 | ||
|
|
be0ccf47ee | ||
|
|
294892c470 | ||
|
|
49d0672cdd | ||
|
|
0d97b236e7 | ||
|
|
0fba36469b | ||
|
|
d9977718d2 | ||
|
|
559ce87b4e | ||
|
|
0ec29d6866 | ||
|
|
49e736d8ef | ||
|
|
f8234aec6a | ||
|
|
b7eae7fbd9 | ||
|
|
36b61a8b87 | ||
|
|
68446d2ed1 | ||
|
|
4528e9d582 | ||
|
|
b6a3556a32 | ||
|
|
5c54596027 | ||
|
|
91bbf0efc4 | ||
|
|
3755f0d901 | ||
|
|
c6580da889 | ||
|
|
24afe7c3f9 | ||
|
|
d6e558e3c9 | ||
|
|
e2eb68abca | ||
|
|
ad11d83724 | ||
|
|
390815dd76 | ||
|
|
35a3b00255 | ||
|
|
69cefb3a0c | ||
|
|
55fbafee35 | ||
|
|
61365e034f | ||
|
|
b88b9dcdd1 | ||
|
|
e4608e7f12 | ||
|
|
b4dcd6d394 | ||
|
|
5e011ab029 | ||
|
|
906d9736d5 | ||
|
|
771424e4f9 | ||
|
|
88f29c8355 | ||
|
|
55fc1f0afb | ||
|
|
1fa4cc3c7a | ||
|
|
1b85793ebc | ||
|
|
0799dfb2b8 | ||
|
|
21816d1ff5 | ||
|
|
60127f2a8d | ||
|
|
4bfb8fda8d | ||
|
|
7652045903 | ||
|
|
80594cc7f8 | ||
|
|
5afa799f37 | ||
|
|
70b2da78f8 | ||
|
|
3bd9a3f478 | ||
|
|
0b175ac66e | ||
|
|
ed7952f5ef | ||
|
|
6c83c7906a | ||
|
|
ac1a15f5d7 | ||
|
|
a9c6e435f7 | ||
|
|
be3a8584ff | ||
|
|
79213637e2 | ||
|
|
dc0f7436e0 | ||
|
|
04065d9a6a | ||
|
|
1246010e44 | ||
|
|
c98918aed8 | ||
|
|
9b24e7d6de | ||
|
|
9307cffd46 | ||
|
|
f26d746c71 | ||
|
|
903195d391 | ||
|
|
33d06c4d96 | ||
|
|
9ca21c1454 | ||
|
|
218776a119 | ||
|
|
a6f73c10d1 | ||
|
|
236e72df38 | ||
|
|
851aca9514 | ||
|
|
0d9a6d246a | ||
|
|
54f774bfeb | ||
|
|
b27d0c816f | ||
|
|
48d7929ce8 | ||
|
|
2bc0a58f32 | ||
|
|
7b03b7e753 | ||
|
|
0ea1d6ee8f | ||
|
|
1b2d4ee132 | ||
|
|
3f1c091b87 | ||
|
|
86f266d37f | ||
|
|
9751e61101 | ||
|
|
8c54a46202 | ||
|
|
7a0149f17c | ||
|
|
7259d092a7 | ||
|
|
fef95110bb | ||
|
|
3c404dec92 | ||
|
|
5b1ea7eda0 | ||
|
|
73d8370177 | ||
|
|
874fedd717 | ||
|
|
64bfec533e | ||
|
|
9384f665bb | ||
|
|
821ce2fc7c | ||
|
|
a31dba9fc1 | ||
|
|
380a19fcf2 | ||
|
|
b571bc800d | ||
|
|
c78ea0df7e | ||
|
|
d8f7322725 | ||
|
|
8e6af73784 | ||
|
|
2b019ff9e2 | ||
|
|
a08e020e32 | ||
|
|
a7ffc2b6f3 | ||
|
|
161c6ca6a4 | ||
|
|
7c5f4b72fb | ||
|
|
dca9400edf | ||
|
|
eedef487ac | ||
|
|
73e3dfc0c3 | ||
|
|
4bf2826655 | ||
|
|
1982a5aed1 | ||
|
|
99bc90a372 | ||
|
|
1bfea9d443 | ||
|
|
b160e13f20 | ||
|
|
2ada2964c5 | ||
|
|
13dcb42c1c | ||
|
|
41c550cbe1 | ||
|
|
21ebbc387a | ||
|
|
530feecdaa | ||
|
|
0540291204 | ||
|
|
fa19bc98ac | ||
|
|
2a649fa824 | ||
|
|
0c8b17d252 | ||
|
|
6f0655810e | ||
|
|
d93891ba63 | ||
|
|
28c14cdee4 | ||
|
|
66121fa0e8 | ||
|
|
4d8ad7ae42 | ||
|
|
e15d59c445 | ||
|
|
8bb34fd93e | ||
|
|
02c51c9b56 | ||
|
|
085d41b121 | ||
|
|
0702ed5cd6 | ||
|
|
2b871a631a | ||
|
|
5e1aa898d4 | ||
|
|
aa1633ba40 | ||
|
|
2b0c60043d | ||
|
|
0b4afe518b | ||
|
|
f124ca6474 | ||
|
|
b8f20053d8 | ||
|
|
6db33b83d8 | ||
|
|
f58ef9b82b | ||
|
|
6e1f7c6e1d | ||
|
|
f6012cd86e | ||
|
|
de8f0ce861 | ||
|
|
8cf93ba947 | ||
|
|
e20db039d6 | ||
|
|
1a9af10ca2 | ||
|
|
c97d035eea | ||
|
|
6af72ab53a | ||
|
|
78a2539d59 | ||
|
|
583b6235fb | ||
|
|
26d0a33e79 | ||
|
|
b87028956f | ||
|
|
a875dd153d | ||
|
|
bb188f673e | ||
|
|
771a7bb4b6 | ||
|
|
49f87165ea | ||
|
|
66ada3e44c | ||
|
|
d6bdaa8a91 | ||
|
|
e0fe7f632c | ||
|
|
fee49fcf65 | ||
|
|
58755a6c88 | ||
|
|
04e25525bf | ||
|
|
3a44a59f8e | ||
|
|
53a3d6424f | ||
|
|
0171b7d53c | ||
|
|
26aec4ba99 | ||
|
|
cb07e02ce9 | ||
|
|
b05d532991 | ||
|
|
f88278111e | ||
|
|
f1d777434b | ||
|
|
c7a78fafac | ||
|
|
b8982ad385 | ||
|
|
1ffb34c5fc | ||
|
|
8b22f09b6f | ||
|
|
e72f33d79e | ||
|
|
af24967195 | ||
|
|
acea6f9c0f | ||
|
|
4976a9e9d8 | ||
|
|
138c3fcfdd | ||
|
|
64add2f222 |
6
.github/pull_request_template.md
vendored
@@ -1,13 +1,15 @@
|
||||
|
||||
Closes #ISSUE
|
||||
|
||||
Release Notes:
|
||||
|
||||
- Added/Fixed/Improved ... ([#NNNNN](https://github.com/zed-industries/zed/issues/NNNNN)).
|
||||
- Added/Fixed/Improved ...
|
||||
|
||||
Optionally, include screenshots / media showcasing your addition that can be included in the release notes.
|
||||
|
||||
### Or...
|
||||
|
||||
Closes #ISSUE
|
||||
|
||||
Release Notes:
|
||||
|
||||
- N/A
|
||||
|
||||
10
.github/workflows/ci.yml
vendored
@@ -231,20 +231,20 @@ jobs:
|
||||
mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
|
||||
- name: Upload app bundle (universal) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4
|
||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}.dmg
|
||||
path: target/release/Zed.dmg
|
||||
- name: Upload app bundle (aarch64) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4
|
||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
|
||||
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
|
||||
- name: Upload app bundle (x86_64) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4
|
||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
|
||||
@@ -319,7 +319,7 @@ jobs:
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4
|
||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
|
||||
@@ -403,7 +403,7 @@ jobs:
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@0b2256b8c012f0828dc542b3febcab082c67f72b # v4
|
||||
uses: actions/upload-artifact@834a144ee995460fba8ed112a2fc961b36a5ec5a # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
|
||||
|
||||
2
.github/workflows/danger.yml
vendored
@@ -16,7 +16,7 @@ jobs:
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
|
||||
- uses: pnpm/action-setup@v3
|
||||
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
|
||||
with:
|
||||
version: 9
|
||||
|
||||
|
||||
8
.github/workflows/deploy_collab.yml
vendored
@@ -106,10 +106,12 @@ jobs:
|
||||
export ZED_KUBE_NAMESPACE=production
|
||||
export ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT=10
|
||||
export ZED_API_LOAD_BALANCER_SIZE_UNIT=2
|
||||
export ZED_LLM_LOAD_BALANCER_SIZE_UNIT=2
|
||||
elif [[ $GITHUB_REF_NAME = "collab-staging" ]]; then
|
||||
export ZED_KUBE_NAMESPACE=staging
|
||||
export ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT=1
|
||||
export ZED_API_LOAD_BALANCER_SIZE_UNIT=1
|
||||
export ZED_LLM_LOAD_BALANCER_SIZE_UNIT=1
|
||||
else
|
||||
echo "cowardly refusing to deploy from an unknown branch"
|
||||
exit 1
|
||||
@@ -134,3 +136,9 @@ jobs:
|
||||
envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f -
|
||||
kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch
|
||||
echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}"
|
||||
|
||||
export ZED_SERVICE_NAME=llm
|
||||
export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_LLM_LOAD_BALANCER_SIZE_UNIT
|
||||
envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f -
|
||||
kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch
|
||||
echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}"
|
||||
|
||||
24
.github/workflows/docs.yml
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
name: Docs
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "docs/**"
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
check_formatting:
|
||||
name: "Check formatting"
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4
|
||||
|
||||
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- run: pnpm dlx prettier . --check
|
||||
working-directory: ./docs
|
||||
3
.gitignore
vendored
@@ -29,3 +29,6 @@ DerivedData/
|
||||
.vscode
|
||||
.wrangler
|
||||
.flatpak-builder
|
||||
|
||||
# Don't commit any secrets to the repo.
|
||||
.env.secret.toml
|
||||
|
||||
@@ -26,6 +26,10 @@
|
||||
"tab_size": 2,
|
||||
"formatter": "prettier"
|
||||
},
|
||||
"CSS": {
|
||||
"tab_size": 2,
|
||||
"formatter": "prettier"
|
||||
},
|
||||
"Rust": {
|
||||
"tasks": {
|
||||
"variables": {
|
||||
|
||||
2812
Cargo.lock
generated
30
Cargo.toml
@@ -19,7 +19,6 @@ members = [
|
||||
"crates/collections",
|
||||
"crates/command_palette",
|
||||
"crates/command_palette_hooks",
|
||||
"crates/completion",
|
||||
"crates/copilot",
|
||||
"crates/db",
|
||||
"crates/dev_server_projects",
|
||||
@@ -190,7 +189,6 @@ collab_ui = { path = "crates/collab_ui" }
|
||||
collections = { path = "crates/collections" }
|
||||
command_palette = { path = "crates/command_palette" }
|
||||
command_palette_hooks = { path = "crates/command_palette_hooks" }
|
||||
completion = { path = "crates/completion" }
|
||||
copilot = { path = "crates/copilot" }
|
||||
db = { path = "crates/db" }
|
||||
dev_server_projects = { path = "crates/dev_server_projects" }
|
||||
@@ -211,6 +209,7 @@ go_to_line = { path = "crates/go_to_line" }
|
||||
google_ai = { path = "crates/google_ai" }
|
||||
gpui = { path = "crates/gpui" }
|
||||
gpui_macros = { path = "crates/gpui_macros" }
|
||||
handlebars = "4.3"
|
||||
headless = { path = "crates/headless" }
|
||||
html_to_markdown = { path = "crates/html_to_markdown" }
|
||||
http_client = { path = "crates/http_client" }
|
||||
@@ -300,7 +299,7 @@ zed_actions = { path = "crates/zed_actions" }
|
||||
#
|
||||
|
||||
aho-corasick = "1.1"
|
||||
alacritty_terminal = "0.23"
|
||||
alacritty_terminal = { git = "https://github.com/alacritty/alacritty", rev = "cacdb5bb3b72bad2c729227537979d95af75978f" }
|
||||
any_vec = "0.14"
|
||||
anyhow = "1.0.86"
|
||||
ashpd = "0.9.1"
|
||||
@@ -316,9 +315,9 @@ async-watch = "0.3.1"
|
||||
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
|
||||
base64 = "0.22"
|
||||
bitflags = "2.6.0"
|
||||
blade-graphics = { git = "https://github.com/zed-industries/blade", rev = "7e497c534d5d4a30c18d9eb182cf39eaf0aaa25e" }
|
||||
blade-macros = { git = "https://github.com/zed-industries/blade", rev = "7e497c534d5d4a30c18d9eb182cf39eaf0aaa25e" }
|
||||
blade-util = { git = "https://github.com/zed-industries/blade", rev = "7e497c534d5d4a30c18d9eb182cf39eaf0aaa25e" }
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "ac25c77ed8d86c386a541c935ffe0a0f6024e701" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "ac25c77ed8d86c386a541c935ffe0a0f6024e701" }
|
||||
blade-util = { git = "https://github.com/kvark/blade", rev = "ac25c77ed8d86c386a541c935ffe0a0f6024e701" }
|
||||
cargo_metadata = "0.18"
|
||||
cargo_toml = "0.20"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
@@ -342,6 +341,7 @@ git2 = { version = "0.19", default-features = false }
|
||||
globset = "0.4"
|
||||
heed = { version = "0.20.1", features = ["read-txn-no-tls"] }
|
||||
hex = "0.4.3"
|
||||
hyper = "0.14"
|
||||
html5ever = "0.27.0"
|
||||
ignore = "0.4.22"
|
||||
image = "0.25.1"
|
||||
@@ -377,7 +377,7 @@ rand = "0.8.5"
|
||||
regex = "1.5"
|
||||
repair_json = "0.1.0"
|
||||
rsa = "0.9.6"
|
||||
runtimelib = { version = "0.12", default-features = false, features = [
|
||||
runtimelib = { version = "0.14", default-features = false, features = [
|
||||
"async-dispatcher-runtime",
|
||||
] }
|
||||
rusqlite = { version = "0.29.0", features = ["blob", "array", "modern_sqlite"] }
|
||||
@@ -401,6 +401,7 @@ similar = "1.3"
|
||||
simplelog = "0.12.2"
|
||||
smallvec = { version = "1.6", features = ["union"] }
|
||||
smol = "1.2"
|
||||
strsim = "0.11"
|
||||
strum = { version = "0.25.0", features = ["derive"] }
|
||||
subtle = "2.5.0"
|
||||
sys-locale = "0.3.1"
|
||||
@@ -427,7 +428,7 @@ tree-sitter-css = "0.21"
|
||||
tree-sitter-elixir = "0.2"
|
||||
tree-sitter-embedded-template = "0.20.0"
|
||||
tree-sitter-go = "0.21"
|
||||
tree-sitter-go-mod = { git = "https://github.com/SomeoneToIgnore/tree-sitter-go-mod", rev = "8c1f54f12bb4c846336b634bc817645d6f35d641", package = "tree-sitter-gomod" }
|
||||
tree-sitter-go-mod = { git = "https://github.com/camdencheek/tree-sitter-go-mod", rev = "1f55029bacd0a6a11f6eb894c4312d429dcf735c", package = "tree-sitter-gomod" }
|
||||
tree-sitter-gowork = { git = "https://github.com/d1y/tree-sitter-go-work", rev = "dcbabff454703c3a4bc98a23cf8778d4be46fd22" }
|
||||
tree-sitter-heex = { git = "https://github.com/phoenixframework/tree-sitter-heex", rev = "6dd0303acf7138dd2b9b432a229e16539581c701" }
|
||||
tree-sitter-html = "0.20"
|
||||
@@ -459,6 +460,19 @@ wasmtime-wasi = "21.0.1"
|
||||
which = "6.0.0"
|
||||
wit-component = "0.201"
|
||||
|
||||
[workspace.dependencies.async-stripe]
|
||||
version = "0.37"
|
||||
default-features = false
|
||||
features = [
|
||||
"runtime-tokio-hyper-rustls",
|
||||
"billing",
|
||||
"checkout",
|
||||
"events",
|
||||
# The features below are only enabled to get the `events` feature to build.
|
||||
"chrono",
|
||||
"connect",
|
||||
]
|
||||
|
||||
[workspace.dependencies.windows]
|
||||
version = "0.58"
|
||||
features = [
|
||||
|
||||
@@ -27,5 +27,7 @@ RUN apt-get update; \
|
||||
WORKDIR app
|
||||
COPY --from=builder /app/collab /app/collab
|
||||
COPY --from=builder /app/crates/collab/migrations /app/migrations
|
||||
COPY --from=builder /app/crates/collab/migrations_llm /app/migrations_llm
|
||||
ENV MIGRATIONS_PATH=/app/migrations
|
||||
ENV LLM_DATABASE_MIGRATIONS_PATH=/app/migrations_llm
|
||||
ENTRYPOINT ["/app/collab"]
|
||||
|
||||
2
Procfile
@@ -1,3 +1,3 @@
|
||||
collab: RUST_LOG=${RUST_LOG:-info} cargo run --package=collab serve
|
||||
collab: RUST_LOG=${RUST_LOG:-info} cargo run --package=collab serve all
|
||||
livekit: livekit-server --dev
|
||||
blob_store: ./script/run-local-minio
|
||||
|
||||
2
Procfile.postgrest
Normal file
@@ -0,0 +1,2 @@
|
||||
app: postgrest crates/collab/postgrest_app.conf
|
||||
llm: postgrest crates/collab/postgrest_llm.conf
|
||||
11
assets/icons/ai_anthropic.svg
Normal file
@@ -0,0 +1,11 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0_1896_18)">
|
||||
<path d="M11.094 3.09999H8.952L12.858 12.9H15L11.094 3.09999Z" fill="#1F1F1E"/>
|
||||
<path d="M4.906 3.09999L1 12.9H3.184L3.98284 10.842H8.06915L8.868 12.9H11.052L7.146 3.09999H4.906ZM4.68928 9.02199L6.026 5.57799L7.3627 9.02199H4.68928Z" fill="#1F1F1E"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_1896_18">
|
||||
<rect width="14" height="9.8" fill="white" transform="translate(1 3.09999)"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 530 B |
3
assets/icons/ai_google.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M13.8695 8.14262C13.8695 11.6221 11.4867 14.0984 7.96785 14.0984C4.59408 14.0984 1.86949 11.3738 1.86949 7.99999C1.86949 4.62622 4.59408 1.90163 7.96785 1.90163C9.61048 1.90163 10.9924 2.50409 12.0572 3.49754L10.3974 5.09344C8.22605 2.99836 4.18834 4.57213 4.18834 7.99999C4.18834 10.127 5.88752 11.8508 7.96785 11.8508C10.3826 11.8508 11.2875 10.1197 11.4301 9.22213H7.96785V7.12458H13.7736C13.8301 7.43688 13.8695 7.73688 13.8695 8.14262Z" fill="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 569 B |
14
assets/icons/ai_ollama.svg
Normal file
|
After Width: | Height: | Size: 8.5 KiB |
3
assets/icons/ai_open_ai.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M14.0768 6.72994C14.3987 5.77663 14.2879 4.73232 13.7731 3.86519C12.9989 2.53519 11.4427 1.85094 9.92272 2.17294C9.24656 1.42132 8.2751 0.993879 7.25664 1C5.70301 0.996504 4.32452 1.9835 3.84655 3.44213C2.84849 3.64382 1.98699 4.26025 1.48286 5.13394C0.70294 6.46044 0.880738 8.13257 1.9227 9.27007C1.6008 10.2234 1.71164 11.2677 2.22642 12.1348C3.00057 13.4648 4.55686 14.1491 6.07679 13.8271C6.75251 14.5787 7.72441 15.0061 8.74287 14.9996C10.2974 15.0035 11.6763 14.0156 12.1543 12.5557C13.1524 12.354 14.0139 11.7376 14.518 10.8639C15.297 9.53738 15.1188 7.86657 14.0773 6.72907L14.0768 6.72994ZM8.74376 14.0848C8.12169 14.0856 7.51912 13.8708 7.0416 13.4775C7.06332 13.4661 7.10101 13.4456 7.1254 13.4307L9.95066 11.8207C10.0952 11.7398 10.1839 11.5879 10.183 11.4239V7.49382L11.377 8.17413C11.3899 8.18025 11.3983 8.1925 11.4001 8.2065V11.4611C11.3983 12.9083 10.2105 14.0817 8.74376 14.0848ZM3.03116 11.6772C2.71946 11.1461 2.60729 10.5235 2.71414 9.91932C2.73498 9.93157 2.77178 9.95388 2.79794 9.96875L5.6232 11.5788C5.76642 11.6614 5.94377 11.6614 6.08743 11.5788L9.53654 9.6135V10.9741C9.53742 10.9881 9.53077 11.0017 9.51969 11.0104L6.66383 12.6375C5.39175 13.3603 3.76719 12.9306 3.03161 11.6772H3.03116ZM2.2876 5.592C2.59797 5.06 3.08792 4.65313 3.67141 4.44182C3.67141 4.46588 3.67008 4.50832 3.67008 4.53807V7.7585C3.6692 7.92213 3.75787 8.07394 3.90198 8.15488L7.35108 10.1197L6.15704 10.8C6.14507 10.8079 6.12999 10.8092 6.11669 10.8035L3.26039 9.17513C1.99098 8.44975 1.55557 6.84719 2.28716 5.59244L2.2876 5.592ZM12.098 7.84469L8.64887 5.87944L9.84292 5.19957C9.85489 5.19169 9.86996 5.19038 9.88326 5.19607L12.7396 6.82313C14.0112 7.54807 14.447 9.15325 13.7124 10.408C13.4015 10.9391 12.912 11.346 12.329 11.5578V8.24107C12.3303 8.07744 12.2421 7.92607 12.0984 7.84469H12.098ZM13.2863 6.07982C13.2654 6.06713 13.2286 6.04525 13.2025 6.03038L10.3772 4.42038C10.234 4.33769 10.0566 4.33769 9.91297 4.42038L6.46386 6.38563V5.025C6.46298 5.011 6.46963 4.99744 6.48071 4.98869L9.33657 3.36294C10.6086 2.63888 12.235 3.06982 12.9683 4.32544C13.2783 4.85569 13.3905 5.4765 13.2854 6.07982H13.2863ZM5.81475 8.50488L4.62026 7.82457C4.6074 7.81844 4.59898 7.80619 4.59721 7.79219V4.53763C4.59809 3.08863 5.78947 1.91438 7.25797 1.91525C7.87916 1.91525 8.48039 2.1305 8.95792 2.5225C8.93619 2.53388 8.89894 2.55444 8.87412 2.56932L6.04885 4.17932C5.90431 4.26025 5.81563 4.41163 5.81652 4.57569L5.81475 8.504V8.50488ZM6.46342 7.125L7.99976 6.24957L9.53609 7.12457V8.875L7.99976 9.75L6.46342 8.875V7.125Z" fill="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.6 KiB |
10
assets/icons/ai_zed.svg
Normal file
@@ -0,0 +1,10 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0_1882_101)">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M2.3125 1.875C2.07088 1.875 1.875 2.07088 1.875 2.3125V11.9375H1V2.3125C1 1.58763 1.58763 1 2.3125 1H14.0344C14.6191 1 14.9118 1.70688 14.4984 2.12029L7.27887 9.33984H9.3125V8.4375H10.1875V9.55859C10.1875 9.92103 9.89369 10.2148 9.53125 10.2148H6.40387L4.89996 11.7187H11.7187V6.25H12.5937V11.7187C12.5937 12.202 12.202 12.5937 11.7187 12.5937H4.02496L2.49371 14.125H13.6875C13.9291 14.125 14.125 13.9291 14.125 13.6875V4.0625H15V13.6875C15 14.4124 14.4124 15 13.6875 15H1.96561C1.38095 15 1.08816 14.2931 1.50157 13.8797L8.69379 6.6875H6.6875V7.5625H5.8125V6.46875C5.8125 6.10631 6.10631 5.8125 6.46875 5.8125H9.56879L11.1 4.28125H4.28125V9.75H3.40625V4.28125C3.40625 3.798 3.798 3.40625 4.28125 3.40625H11.975L13.5063 1.875H2.3125Z" fill="black"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_1882_101">
|
||||
<rect width="14" height="14" fill="white" transform="translate(1 1)"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.0 KiB |
@@ -1,10 +1 @@
|
||||
<svg width="12" height="12" viewBox="0 0 12 12" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0_430_1270)">
|
||||
<path d="M4.30957 0.857736V4.28922L2.35703 4.28788C2.10067 4.28788 1.86816 4.44057 1.76636 4.67656C1.66511 4.91229 1.71332 5.18633 1.88959 5.37304L5.53269 9.23312C5.77565 9.49028 6.22488 9.49028 6.46784 9.23312L10.1123 5.37277C10.2875 5.18794 10.3354 4.9147 10.2342 4.6763C10.1337 4.44057 9.90066 4.28788 9.66761 4.28788H7.73891L7.73891 0.857736C7.73891 0.383865 7.35504 2.35669e-07 6.88171 2.14979e-07L5.16731 1.4004e-07C4.66906 -0.000267757 4.30957 0.383865 4.30957 0.857736ZM11.1433 11.1187C11.1434 10.6687 10.7595 10.2856 10.2861 10.2856H1.71413C1.23972 10.2856 0.856659 10.6687 0.856659 11.1187C0.856659 11.6169 1.23972 12 1.71386 12H10.2861C10.7595 12 11.1433 11.6169 11.1433 11.1187Z" fill="white"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_430_1270">
|
||||
<rect width="12" height="12" fill="white"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-download"><path d="M21 15v4a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2v-4"/><polyline points="7 10 12 15 17 10"/><line x1="12" x2="12" y1="15" y2="3"/></svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 954 B After Width: | Height: | Size: 347 B |
1
assets/icons/microscope.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-microscope"><path d="M6 18h8"/><path d="M3 22h18"/><path d="M14 22a7 7 0 1 0 0-14h-1"/><path d="M9 14h2"/><path d="M9 12a2 2 0 0 1-2-2V6h6v4a2 2 0 0 1-2 2Z"/><path d="M12 6V3a1 1 0 0 0-1-1H9a1 1 0 0 0-1 1v3"/></svg>
|
||||
|
After Width: | Height: | Size: 418 B |
1
assets/icons/route.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-route"><circle cx="6" cy="19" r="3"/><path d="M9 19h8.5a3.5 3.5 0 0 0 0-7h-11a3.5 3.5 0 0 1 0-7H15"/><circle cx="18" cy="5" r="3"/></svg>
|
||||
|
After Width: | Height: | Size: 340 B |
1
assets/icons/text_select.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-text-select"><path d="M5 3a2 2 0 0 0-2 2"/><path d="M19 3a2 2 0 0 1 2 2"/><path d="M21 19a2 2 0 0 1-2 2"/><path d="M5 21a2 2 0 0 1-2-2"/><path d="M9 3h1"/><path d="M9 21h1"/><path d="M14 3h1"/><path d="M14 21h1"/><path d="M3 9v1"/><path d="M21 9v1"/><path d="M3 14v1"/><path d="M21 14v1"/><line x1="7" x2="15" y1="8" y2="8"/><line x1="7" x2="17" y1="12" y2="12"/><line x1="7" x2="13" y1="16" y2="16"/></svg>
|
||||
|
After Width: | Height: | Size: 610 B |
1
assets/icons/undo.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-undo"><path d="M3 7v6h6"/><path d="M21 17a9 9 0 0 0-9-9 9 9 0 0 0-6 2.3L3 13"/></svg>
|
||||
|
After Width: | Height: | Size: 288 B |
@@ -2,13 +2,11 @@
|
||||
// Standard Linux bindings
|
||||
{
|
||||
"bindings": {
|
||||
"up": "menu::SelectPrev",
|
||||
"shift-tab": "menu::SelectPrev",
|
||||
"home": "menu::SelectFirst",
|
||||
"pageup": "menu::SelectFirst",
|
||||
"shift-pageup": "menu::SelectFirst",
|
||||
"ctrl-p": "menu::SelectPrev",
|
||||
"down": "menu::SelectNext",
|
||||
"tab": "menu::SelectNext",
|
||||
"end": "menu::SelectLast",
|
||||
"pagedown": "menu::SelectLast",
|
||||
@@ -33,6 +31,20 @@
|
||||
"f11": "zed::ToggleFullScreen"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Picker || menu",
|
||||
"bindings": {
|
||||
"up": "menu::SelectPrev",
|
||||
"down": "menu::SelectNext"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Prompt",
|
||||
"bindings": {
|
||||
"left": "menu::SelectPrev",
|
||||
"right": "menu::SelectNext"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor",
|
||||
"bindings": {
|
||||
@@ -121,7 +133,7 @@
|
||||
"bindings": {
|
||||
"alt-]": "editor::NextInlineCompletion",
|
||||
"alt-[": "editor::PreviousInlineCompletion",
|
||||
"alt-right": "editor::AcceptPartialInlineCompletion"
|
||||
"ctrl-right": "editor::AcceptPartialInlineCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -149,7 +161,9 @@
|
||||
"bindings": {
|
||||
"ctrl-g": "search::SelectNextMatch",
|
||||
"ctrl-shift-g": "search::SelectPrevMatch",
|
||||
"alt-m": "assistant::ToggleModelSelector"
|
||||
"alt-m": "assistant::ToggleModelSelector",
|
||||
"ctrl-k h": "assistant::DeployHistory",
|
||||
"ctrl-k l": "assistant::DeployPromptLibrary"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -244,13 +258,6 @@
|
||||
"ctrl-alt-shift-x": "search::ToggleRegex"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Terminal",
|
||||
"bindings": {
|
||||
"ctrl-w": ["terminal::SendKeystroke", "ctrl-w"],
|
||||
"ctrl-e": ["terminal::SendKeystroke", "ctrl-e"]
|
||||
}
|
||||
},
|
||||
// Bindings from VS Code
|
||||
{
|
||||
"context": "Editor",
|
||||
@@ -430,7 +437,7 @@
|
||||
"context": "Editor && showing_completions",
|
||||
"bindings": {
|
||||
"enter": "editor::ConfirmCompletion",
|
||||
"tab": "editor::ConfirmCompletion"
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -460,12 +467,16 @@
|
||||
{
|
||||
"bindings": {
|
||||
"ctrl-alt-shift-f": "workspace::FollowNextCollaborator",
|
||||
// TODO: Move this to a dock open action
|
||||
"ctrl-shift-c": "collab_panel::ToggleFocus",
|
||||
"ctrl-alt-i": "zed::DebugElements",
|
||||
"ctrl-:": "editor::ToggleInlayHints"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "!Terminal",
|
||||
"bindings": {
|
||||
"ctrl-shift-c": "collab_panel::ToggleFocus"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && mode == full",
|
||||
"bindings": {
|
||||
@@ -480,7 +491,8 @@
|
||||
{
|
||||
"context": "Editor && jupyter && !ContextEditor",
|
||||
"bindings": {
|
||||
"ctrl-shift-enter": "repl::Run"
|
||||
"ctrl-shift-enter": "repl::Run",
|
||||
"ctrl-alt-enter": "repl::RunInPlace"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -599,12 +611,14 @@
|
||||
"context": "Terminal",
|
||||
"bindings": {
|
||||
"ctrl-alt-space": "terminal::ShowCharacterPalette",
|
||||
"shift-ctrl-c": "terminal::Copy",
|
||||
"ctrl-shift-c": "terminal::Copy",
|
||||
"ctrl-insert": "terminal::Copy",
|
||||
"ctrl-a": "editor::SelectAll",
|
||||
"shift-ctrl-v": "terminal::Paste",
|
||||
// "ctrl-a": "editor::SelectAll", // conflicts with readline
|
||||
"ctrl-shift-v": "terminal::Paste",
|
||||
"shift-insert": "terminal::Paste",
|
||||
"ctrl-enter": "assistant::InlineAssist",
|
||||
"ctrl-w": ["terminal::SendKeystroke", "ctrl-w"],
|
||||
"ctrl-e": ["terminal::SendKeystroke", "ctrl-e"],
|
||||
"up": ["terminal::SendKeystroke", "up"],
|
||||
"pageup": ["terminal::SendKeystroke", "pageup"],
|
||||
"down": ["terminal::SendKeystroke", "down"],
|
||||
|
||||
@@ -89,9 +89,9 @@
|
||||
"alt-right": "editor::MoveToNextWordEnd",
|
||||
"alt-f": "editor::MoveToNextWordEnd",
|
||||
"cmd-left": "editor::MoveToBeginningOfLine",
|
||||
"ctrl-a": "editor::MoveToBeginningOfLine",
|
||||
"ctrl-a": ["editor::MoveToBeginningOfLine", { "stop_at_soft_wraps": false }],
|
||||
"cmd-right": "editor::MoveToEndOfLine",
|
||||
"ctrl-e": "editor::MoveToEndOfLine",
|
||||
"ctrl-e": ["editor::MoveToEndOfLine", { "stop_at_soft_wraps": false }],
|
||||
"cmd-up": "editor::MoveToBeginning",
|
||||
"cmd-down": "editor::MoveToEnd",
|
||||
"shift-up": "editor::SelectUp",
|
||||
@@ -127,7 +127,9 @@
|
||||
"cmd-'": "editor::ToggleHunkDiff",
|
||||
"cmd-\"": "editor::ExpandAllHunkDiffs",
|
||||
"cmd-alt-g b": "editor::ToggleGitBlame",
|
||||
"cmd-i": "editor::ShowSignatureHelp"
|
||||
"cmd-i": "editor::ShowSignatureHelp",
|
||||
"ctrl-f12": "editor::GoToDeclaration",
|
||||
"alt-ctrl-f12": "editor::GoToDeclarationSplit"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -152,7 +154,7 @@
|
||||
"bindings": {
|
||||
"alt-]": "editor::NextInlineCompletion",
|
||||
"alt-[": "editor::PreviousInlineCompletion",
|
||||
"alt-right": "editor::AcceptPartialInlineCompletion"
|
||||
"cmd-right": "editor::AcceptPartialInlineCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -178,7 +180,8 @@
|
||||
{
|
||||
"context": "Editor && jupyter && !ContextEditor",
|
||||
"bindings": {
|
||||
"ctrl-shift-enter": "repl::Run"
|
||||
"ctrl-shift-enter": "repl::Run",
|
||||
"ctrl-alt-enter": "repl::RunInPlace"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -186,7 +189,9 @@
|
||||
"bindings": {
|
||||
"cmd-g": "search::SelectNextMatch",
|
||||
"cmd-shift-g": "search::SelectPrevMatch",
|
||||
"alt-m": "assistant::ToggleModelSelector"
|
||||
"alt-m": "assistant::ToggleModelSelector",
|
||||
"cmd-k h": "assistant::DeployHistory",
|
||||
"cmd-k l": "assistant::DeployPromptLibrary"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -469,7 +474,7 @@
|
||||
"context": "Editor && showing_completions",
|
||||
"bindings": {
|
||||
"enter": "editor::ConfirmCompletion",
|
||||
"tab": "editor::ConfirmCompletion"
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
// For information on binding keys, see the Zed
|
||||
// documentation: https://zed.dev/docs/key-bindings
|
||||
//
|
||||
// To see the default key bindings run `zed: Open Default Keymap`
|
||||
// To see the default key bindings run `zed: open default keymap`
|
||||
// from the command palette.
|
||||
[
|
||||
{
|
||||
|
||||
@@ -4,7 +4,6 @@
|
||||
"bindings": {
|
||||
"i": ["vim::PushOperator", { "Object": { "around": false } }],
|
||||
"a": ["vim::PushOperator", { "Object": { "around": true } }],
|
||||
":": "command_palette::Toggle",
|
||||
"h": "vim::Left",
|
||||
"left": "vim::Left",
|
||||
"backspace": "vim::Backspace",
|
||||
@@ -89,8 +88,9 @@
|
||||
"g t": "pane::ActivateNextItem",
|
||||
"g shift-t": "pane::ActivatePrevItem",
|
||||
"g d": "editor::GoToDefinition",
|
||||
"g shift-d": "editor::GoToTypeDefinition",
|
||||
"g cmd-d": "editor::GoToImplementation",
|
||||
"g shift-d": "editor::GoToDeclaration",
|
||||
"g y": "editor::GoToTypeDefinition",
|
||||
"g shift-i": "editor::GoToImplementation",
|
||||
"g x": "editor::OpenUrl",
|
||||
"g n": "vim::SelectNextMatch",
|
||||
"g shift-n": "vim::SelectPreviousMatch",
|
||||
@@ -198,17 +198,12 @@
|
||||
"ctrl-6": "pane::AlternateFile"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "VimControl && VimCount",
|
||||
"bindings": {
|
||||
"0": ["vim::Number", 0]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_mode == normal",
|
||||
"bindings": {
|
||||
"escape": "editor::Cancel",
|
||||
"ctrl-[": "editor::Cancel",
|
||||
":": "command_palette::Toggle",
|
||||
".": "vim::Repeat",
|
||||
"c": ["vim::PushOperator", "Change"],
|
||||
"shift-c": "vim::ChangeToEndOfLine",
|
||||
@@ -256,9 +251,17 @@
|
||||
"g c": ["vim::PushOperator", "ToggleComments"]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "VimControl && VimCount",
|
||||
"bindings": {
|
||||
"0": ["vim::Number", 0],
|
||||
":": "vim::CountCommand"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_mode == visual",
|
||||
"bindings": {
|
||||
":": "vim::VisualCommand",
|
||||
"u": "vim::ConvertToLowerCase",
|
||||
"U": "vim::ConvertToUpperCase",
|
||||
"o": "vim::OtherEnd",
|
||||
@@ -316,6 +319,7 @@
|
||||
"ctrl-u": "editor::DeleteToBeginningOfLine",
|
||||
"ctrl-t": "vim::Indent",
|
||||
"ctrl-d": "vim::Outdent",
|
||||
"ctrl-k": ["vim::PushOperator", { "Digraph": {} }],
|
||||
"ctrl-r": ["vim::PushOperator", "Register"]
|
||||
}
|
||||
},
|
||||
@@ -325,6 +329,7 @@
|
||||
"escape": "vim::NormalBefore",
|
||||
"ctrl-c": "vim::NormalBefore",
|
||||
"ctrl-[": "vim::NormalBefore",
|
||||
"ctrl-k": ["vim::PushOperator", { "Digraph": {} }],
|
||||
"backspace": "vim::UndoReplace",
|
||||
"tab": "vim::Tab",
|
||||
"enter": "vim::Enter"
|
||||
@@ -337,7 +342,8 @@
|
||||
"enter": "vim::Enter",
|
||||
"escape": "vim::ClearOperators",
|
||||
"ctrl-c": "vim::ClearOperators",
|
||||
"ctrl-[": "vim::ClearOperators"
|
||||
"ctrl-[": "vim::ClearOperators",
|
||||
"ctrl-k": ["vim::PushOperator", { "Digraph": {} }]
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
41
assets/prompts/content_prompt.hbs
Normal file
@@ -0,0 +1,41 @@
|
||||
{{#if language_name}}
|
||||
File language: {{language_name}}
|
||||
{{/if}}
|
||||
|
||||
<document>
|
||||
{{{document_content}}}
|
||||
</document>
|
||||
|
||||
{{#if is_truncated}}
|
||||
Note: Context around the relevant section has been truncated for brevity.
|
||||
{{/if}}
|
||||
|
||||
Editing instructions:
|
||||
1. Rewrite the section marked with <rewrite_this></rewrite_this> tags based on this prompt:
|
||||
<prompt>
|
||||
{{{user_prompt}}}
|
||||
</prompt>
|
||||
|
||||
2. Within <rewrite_this></rewrite_this>, make changes only in these subsections:
|
||||
{{#if has_insertion}}
|
||||
- Insert text where marked with <insert_here></insert_here> tags
|
||||
{{/if}}
|
||||
{{#if has_replacement}}
|
||||
- Edit text surrounded by <edit_here></edit_here> tags
|
||||
{{/if}}
|
||||
|
||||
3. Section to edit:
|
||||
<rewrite_this>
|
||||
{{{rewrite_section_with_selections}}}
|
||||
</rewrite_this>
|
||||
|
||||
4. Guidelines:
|
||||
- Only make changes necessary to fulfill the prompt
|
||||
- Preserve all surrounding {{content_type}}
|
||||
- Maintain the original indentation level
|
||||
- Rewrite the entire section, even if no changes are needed
|
||||
- Do not include <rewrite_this>, <insert_here>, or <edit_here> tags in your output
|
||||
|
||||
Output format:
|
||||
Immediately start with the following, ensuring no leading whitespace:
|
||||
```{{REWRITTEN_CODE}}```
|
||||
181
assets/prompts/edit_workflow.hbs
Normal file
@@ -0,0 +1,181 @@
|
||||
Guide the user through code changes in numbered steps that focus on individual functions, type definitions, etc.
|
||||
Surround each distinct step in a <step></step> XML tag.
|
||||
|
||||
<instructions>
|
||||
- Always include code or a file action in each step. Do not close the `<step></step>` tags with code following after them.
|
||||
</instructions>
|
||||
|
||||
Here's an example of a user-assistant dialog with step numbers and a follow-up question in Rust:
|
||||
|
||||
<example language="Rust">
|
||||
<turn number="1">
|
||||
<user>I want to create a User struct with name and age fields, and a method to print the user's information. Can you help me with that?</user>
|
||||
<assistant>Certainly! Let's create a User struct with the specified fields and a method to print the information:
|
||||
<step>Define the User struct
|
||||
```rust
|
||||
struct User {
|
||||
name: String,
|
||||
age: u32,
|
||||
}
|
||||
```
|
||||
</step>
|
||||
<step>Implement methods for the User struct
|
||||
```rust
|
||||
impl User {
|
||||
fn new(name: String, age: u32) -> Self {
|
||||
User { name, age }
|
||||
}
|
||||
fn print_info(&self) {
|
||||
println!("Name: {}, Age: {}", self.name, self.age);
|
||||
}
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
This code defines a User struct and implements two methods:
|
||||
- `new`: a constructor to create a new User
|
||||
- `print_info`: a method to print the user's information</assistant>
|
||||
</turn>
|
||||
<turn number="2">
|
||||
<user>Great! Now, how can I create a User instance and call the print_info method?</user>
|
||||
<assistant>You can create a User instance and call the print_info method like this:
|
||||
|
||||
<step>Create a User instance and call print_info
|
||||
```rust
|
||||
fn main() {
|
||||
let user = User::new(String::from("Alice"), 30);
|
||||
user.print_info();
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
This will output: `Name: Alice, Age: 30`</assistant>
|
||||
</turn>
|
||||
<turn number="3">
|
||||
<user>Can you add an email field to the User struct and update the print_info method to include it?</user>
|
||||
<assistant>Certainly! Let's modify the User struct and update the print_info method:
|
||||
|
||||
<step>Add email field to User struct
|
||||
```rust
|
||||
struct User {
|
||||
name: String,
|
||||
age: u32,
|
||||
email: String,
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
<step>Update the new method and print_info method
|
||||
```rust
|
||||
impl User {
|
||||
fn new(name: String, age: u32, email: String) -> Self {
|
||||
User { name, age, email }
|
||||
}
|
||||
fn print_info(&self) {
|
||||
println!("Name: {}, Age: {}, Email: {}", self.name, self.age, self.email);
|
||||
}
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
<step>Update the main function
|
||||
```rust
|
||||
fn main() {
|
||||
let user = User::new(String::from("Alice"), 30, String::from("alice@example.com"));
|
||||
user.print_info();
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
This will now output: `Name: Alice, Age: 30, Email: alice@example.com`
|
||||
The changes include:
|
||||
1. Adding the `email` field to the User struct
|
||||
2. Updating the `new` method to accept an email parameter
|
||||
3. Modifying the `print_info` method to include the email
|
||||
4. Updating the main function to provide an email when creating a User instance
|
||||
</assistant>
|
||||
</turn>
|
||||
</example>
|
||||
|
||||
|
||||
<example language="go">
|
||||
<turn number="1">
|
||||
<user>I want to create a Person struct with name and age fields, and a method to greet. Can you help me with that in Go?</user>
|
||||
<assistant>Certainly! Let's create a Person struct with the specified fields and a method to greet in Go:
|
||||
|
||||
<step>Define the Person struct
|
||||
```go
|
||||
type Person struct {
|
||||
Name string
|
||||
Age int
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
<step>Implement the Greet method for the Person struct
|
||||
```go
|
||||
func (p Person) Greet() string {
|
||||
return fmt.Sprintf("Hello, my name is %s and I'm %d years old.", p.Name, p.Age)
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
This code defines a Person struct and implements a Greet method that returns a greeting string.</assistant>
|
||||
</turn>
|
||||
|
||||
<turn number="2">
|
||||
<user>Great! How can I create a Person instance and use the Greet method?</user>
|
||||
<assistant>You can create a Person instance and use the Greet method like this:
|
||||
|
||||
<step>Create a Person instance and call the Greet method
|
||||
```go
|
||||
func main() {
|
||||
person := Person{Name: "Alice", Age: 30}
|
||||
fmt.Println(person.Greet())
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
This will output: `Hello, my name is Alice and I'm 30 years old.`</assistant>
|
||||
</turn>
|
||||
|
||||
<turn number="3">
|
||||
<user>Can you add an Email field to the Person struct and update the Greet method to include it?</user>
|
||||
<assistant>Certainly! Let's modify the Person struct and update the Greet method:
|
||||
|
||||
<step>Add Email field to Person struct
|
||||
```go
|
||||
type Person struct {
|
||||
Name string
|
||||
Age int
|
||||
Email string
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
<step>Update the Greet method
|
||||
```go
|
||||
func (p Person) Greet() string {
|
||||
return fmt.Sprintf("Hello, my name is %s, I'm %d years old, and my email is %s.", p.Name, p.Age, p.Email)
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
<step>Update the main function
|
||||
```go
|
||||
func main() {
|
||||
person := Person{Name: "Alice", Age: 30, Email: "alice@example.com"}
|
||||
fmt.Println(person.Greet())
|
||||
}
|
||||
```
|
||||
</step>
|
||||
|
||||
This will now output: `Hello, my name is Alice, I'm 30 years old, and my email is alice@example.com.`
|
||||
|
||||
The changes include:
|
||||
1. Adding the `Email` field to the Person struct
|
||||
2. Modifying the `Greet` method to include the email
|
||||
3. Updating the main function to provide an email when creating a Person instance
|
||||
</assistant>
|
||||
</turn>
|
||||
</example>
|
||||
@@ -1,241 +0,0 @@
|
||||
Your task is to map a step from the conversation above to operations on symbols inside the provided source files.
|
||||
|
||||
Guidelines:
|
||||
- There's no need to describe *what* to do, just *where* to do it.
|
||||
- If creating a file, assume any subsequent updates are included at the time of creation.
|
||||
- Don't create and then update a file.
|
||||
- We'll create it in one shot.
|
||||
- Prefer updating symbols lower in the syntax tree if possible.
|
||||
- Never include operations on a parent symbol and one of its children in the same <operations> block.
|
||||
- Never nest an operation with another operation or include CDATA or other content. All operations are leaf nodes.
|
||||
- Include a description attribute for each operation with a brief, one-line description of the change to perform.
|
||||
- Descriptions are required for all operations except delete.
|
||||
- When generating multiple operations, ensure the descriptions are specific to each individual operation.
|
||||
- Avoid referring to the location in the description. Focus on the change to be made, not the location where it's made. That's implicit with the symbol you provide.
|
||||
- Don't generate multiple operations at the same location. Instead, combine them together in a single operation with a succinct combined description.
|
||||
|
||||
The available operation types are:
|
||||
|
||||
1. <update>: Modify an existing symbol in a file.
|
||||
2. <create_file>: Create a new file.
|
||||
3. <insert_sibling_after>: Add a new symbol as sibling after an existing symbol in a file.
|
||||
4. <append_child>: Add a new symbol as the last child of an existing symbol in a file.
|
||||
5. <prepend_child>: Add a new symbol as the first child of an existing symbol in a file.
|
||||
6. <delete>: Remove an existing symbol from a file. The `description` attribute is invalid for delete, but required for other ops.
|
||||
|
||||
All operations *require* a path.
|
||||
Operations that *require* a symbol: <update>, <insert_sibling_after>, <delete>
|
||||
Operations that don't allow a symbol: <create>
|
||||
Operations that have an *optional* symbol: <prepend_child>, <append_child>
|
||||
|
||||
Example 1:
|
||||
|
||||
User:
|
||||
```rs src/rectangle.rs
|
||||
struct Rectangle {
|
||||
width: f64,
|
||||
height: f64,
|
||||
}
|
||||
|
||||
impl Rectangle {
|
||||
fn new(width: f64, height: f64) -> Self {
|
||||
Rectangle { width, height }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Symbols for src/rectangle.rs:
|
||||
- struct Rectangle
|
||||
- impl Rectangle
|
||||
- impl Rectangle fn new
|
||||
|
||||
<step>Add new methods 'calculate_area' and 'calculate_perimeter' to the Rectangle struct</step>
|
||||
<step>Implement the 'Display' trait for the Rectangle struct</step>
|
||||
|
||||
What are the operations for the step: <step>Add a new method 'calculate_area' to the Rectangle struct</step>
|
||||
|
||||
Assistant (wrong):
|
||||
<operations>
|
||||
<append_child path="src/shapes.rs" symbol="impl Rectangle" description="Add calculate_area method" />
|
||||
<append_child path="src/shapes.rs" symbol="impl Rectangle" description="Add calculate_perimeter method" />
|
||||
</operations>
|
||||
|
||||
This demonstrates what NOT to do. NEVER append multiple children at the same location.
|
||||
|
||||
Assistant (corrected):
|
||||
<operations>
|
||||
<append_child path="src/shapes.rs" symbol="impl Rectangle" description="Add calculate area and perimeter methods" />
|
||||
</operations>
|
||||
|
||||
User:
|
||||
What are the operations for the step: <step>Implement the 'Display' trait for the Rectangle struct</step>
|
||||
|
||||
Assistant:
|
||||
<operations>
|
||||
<insert_sibling_after path="src/shapes.rs" symbol="impl Rectangle" description="Implement Display trait for Rectangle"/>
|
||||
</operations>
|
||||
|
||||
Example 2:
|
||||
|
||||
User:
|
||||
```rs src/user.rs
|
||||
struct User {
|
||||
pub name: String,
|
||||
age: u32,
|
||||
email: String,
|
||||
}
|
||||
|
||||
impl User {
|
||||
fn new(name: String, age: u32, email: String) -> Self {
|
||||
User { name, age, email }
|
||||
}
|
||||
|
||||
pub fn print_info(&self) {
|
||||
println!("Name: {}, Age: {}, Email: {}", self.name, self.age, self.email);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Symbols for src/user.rs:
|
||||
- struct User
|
||||
- struct User pub name
|
||||
- struct User age
|
||||
- struct User email
|
||||
- impl User
|
||||
- impl User fn new
|
||||
- impl User pub fn print_info
|
||||
|
||||
<step>Update the 'print_info' method to use formatted output</step>
|
||||
<step>Remove the 'email' field from the User struct</step>
|
||||
|
||||
What are the operations for the step: <step>Update the 'print_info' method to use formatted output</step>
|
||||
|
||||
Assistant:
|
||||
<operations>
|
||||
<update path="src/user.rs" symbol="impl User fn print_info" description="Use formatted output" />
|
||||
</operations>
|
||||
|
||||
User:
|
||||
What are the operations for the step: <step>Remove the 'email' field from the User struct</step>
|
||||
|
||||
Assistant:
|
||||
<operations>
|
||||
<delete path="src/user.rs" symbol="struct User email" description="Remove the email field" />
|
||||
</operations>
|
||||
|
||||
Example 3:
|
||||
|
||||
User:
|
||||
```rs src/vehicle.rs
|
||||
struct Vehicle {
|
||||
make: String,
|
||||
model: String,
|
||||
year: u32,
|
||||
}
|
||||
|
||||
impl Vehicle {
|
||||
fn new(make: String, model: String, year: u32) -> Self {
|
||||
Vehicle { make, model, year }
|
||||
}
|
||||
|
||||
fn print_year(&self) {
|
||||
println!("Year: {}", self.year);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Symbols for src/vehicle.rs:
|
||||
- struct Vehicle
|
||||
- struct Vehicle make
|
||||
- struct Vehicle model
|
||||
- struct Vehicle year
|
||||
- impl Vehicle
|
||||
- impl Vehicle fn new
|
||||
- impl Vehicle fn print_year
|
||||
|
||||
<step>Add a 'use std::fmt;' statement at the beginning of the file</step>
|
||||
<step>Add a new method 'start_engine' in the Vehicle impl block</step>
|
||||
|
||||
What are the operations for the step: <step>Add a 'use std::fmt;' statement at the beginning of the file</step>
|
||||
|
||||
Assistant:
|
||||
<operations>
|
||||
<prepend_child path="src/vehicle.rs" description="Add 'use std::fmt' statement" />
|
||||
</operations>
|
||||
|
||||
User:
|
||||
What are the operations for the step: <step>Add a new method 'start_engine' in the Vehicle impl block</step>
|
||||
|
||||
Assistant:
|
||||
<operations>
|
||||
<insert_sibling_after path="src/vehicle.rs" symbol="impl Vehicle fn new" description="Add start_engine method"/>
|
||||
</operations>
|
||||
|
||||
Example 4:
|
||||
|
||||
User:
|
||||
```rs src/employee.rs
|
||||
struct Employee {
|
||||
name: String,
|
||||
position: String,
|
||||
salary: u32,
|
||||
department: String,
|
||||
}
|
||||
|
||||
impl Employee {
|
||||
fn new(name: String, position: String, salary: u32, department: String) -> Self {
|
||||
Employee { name, position, salary, department }
|
||||
}
|
||||
|
||||
fn print_details(&self) {
|
||||
println!("Name: {}, Position: {}, Salary: {}, Department: {}",
|
||||
self.name, self.position, self.salary, self.department);
|
||||
}
|
||||
|
||||
fn give_raise(&mut self, amount: u32) {
|
||||
self.salary += amount;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Symbols for src/employee.rs:
|
||||
- struct Employee
|
||||
- struct Employee name
|
||||
- struct Employee position
|
||||
- struct Employee salary
|
||||
- struct Employee department
|
||||
- impl Employee
|
||||
- impl Employee fn new
|
||||
- impl Employee fn print_details
|
||||
- impl Employee fn give_raise
|
||||
|
||||
<step>Make salary an f32</step>
|
||||
|
||||
What are the operations for the step: <step>Make salary an f32</step>
|
||||
|
||||
A (wrong):
|
||||
<operations>
|
||||
<update path="src/employee.rs" symbol="struct Employee" description="Change the type of salary to an f32" />
|
||||
<update path="src/employee.rs" symbol="struct Employee salary" description="Change the type to an f32" />
|
||||
</operations>
|
||||
|
||||
This example demonstrates what not to do. `struct Employee salary` is a child of `struct Employee`.
|
||||
|
||||
A (corrected):
|
||||
<operations>
|
||||
<update path="src/employee.rs" symbol="struct Employee salary" description="Change the type to an f32" />
|
||||
</operations>
|
||||
|
||||
User:
|
||||
What are the correct operations for the step: <step>Remove the 'department' field and update the 'print_details' method</step>
|
||||
|
||||
A:
|
||||
<operations>
|
||||
<delete path="src/employee.rs" symbol="struct Employee department" />
|
||||
<update path="src/employee.rs" symbol="impl Employee fn print_details" description="Don't print the 'department' field" />
|
||||
</operations>
|
||||
|
||||
Now generate the operations for the following step.
|
||||
Output only valid XML containing valid operations with their required attributes.
|
||||
NEVER output code or any other text inside <operation> tags. If you do, you will replaced with another model.
|
||||
Your response *MUST* begin with <operations> and end with </operations>:
|
||||
413
assets/prompts/step_resolution.hbs
Normal file
@@ -0,0 +1,413 @@
|
||||
Your task is to map a step from the conversation above to suggestions on symbols inside the provided source files.
|
||||
|
||||
Guidelines:
|
||||
- There's no need to describe *what* to do, just *where* to do it.
|
||||
- If creating a file, assume any subsequent updates are included at the time of creation.
|
||||
- Don't create and then update a file.
|
||||
- We'll create it in one shot.
|
||||
- Prefer updating symbols lower in the syntax tree if possible.
|
||||
- Never include suggestions on a parent symbol and one of its children in the same suggestions block.
|
||||
- Never nest an operation with another operation or include CDATA or other content. All suggestions are leaf nodes.
|
||||
- Include a description attribute for each operation with a brief, one-line description of the change to perform.
|
||||
- Descriptions are required for all suggestions except delete.
|
||||
- When generating multiple suggestions, ensure the descriptions are specific to each individual operation.
|
||||
- Avoid referring to the location in the description. Focus on the change to be made, not the location where it's made. That's implicit with the symbol you provide.
|
||||
- Don't generate multiple suggestions at the same location. Instead, combine them together in a single operation with a succinct combined description.
|
||||
|
||||
Example 1:
|
||||
|
||||
User:
|
||||
```rs src/rectangle.rs
|
||||
struct Rectangle {
|
||||
width: f64,
|
||||
height: f64,
|
||||
}
|
||||
|
||||
impl Rectangle {
|
||||
fn new(width: f64, height: f64) -> Self {
|
||||
Rectangle { width, height }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
<step>Add new methods 'calculate_area' and 'calculate_perimeter' to the Rectangle struct</step>
|
||||
<step>Implement the 'Display' trait for the Rectangle struct</step>
|
||||
|
||||
What are the suggestions for the step: <step>Add a new method 'calculate_area' to the Rectangle struct</step>
|
||||
|
||||
A (wrong):
|
||||
{
|
||||
"title": "Add Rectangle methods",
|
||||
"suggestions": [
|
||||
{
|
||||
"kind": "AppendChild",
|
||||
"path": "src/shapes.rs",
|
||||
"symbol": "impl Rectangle",
|
||||
"description": "Add calculate_area method"
|
||||
},
|
||||
{
|
||||
"kind": "AppendChild",
|
||||
"path": "src/shapes.rs",
|
||||
"symbol": "impl Rectangle",
|
||||
"description": "Add calculate_perimeter method"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
This demonstrates what NOT to do. NEVER append multiple children at the same location.
|
||||
|
||||
A (corrected):
|
||||
{
|
||||
"title": "Add Rectangle methods",
|
||||
"suggestions": [
|
||||
{
|
||||
"kind": "AppendChild",
|
||||
"path": "src/shapes.rs",
|
||||
"symbol": "impl Rectangle",
|
||||
"description": "Add calculate area and perimeter methods"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
User:
|
||||
What are the suggestions for the step: <step>Implement the 'Display' trait for the Rectangle struct</step>
|
||||
|
||||
A:
|
||||
{
|
||||
"title": "Implement Display for Rectangle",
|
||||
"suggestions": [
|
||||
{
|
||||
"kind": "InsertSiblingAfter",
|
||||
"path": "src/shapes.rs",
|
||||
"symbol": "impl Rectangle",
|
||||
"description": "Implement Display trait for Rectangle"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Example 2:
|
||||
|
||||
User:
|
||||
```rs src/user.rs
|
||||
struct User {
|
||||
pub name: String,
|
||||
age: u32,
|
||||
email: String,
|
||||
}
|
||||
|
||||
impl User {
|
||||
fn new(name: String, age: u32, email: String) -> Self {
|
||||
User { name, age, email }
|
||||
}
|
||||
|
||||
pub fn print_info(&self) {
|
||||
println!("Name: {}, Age: {}, Email: {}", self.name, self.age, self.email);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
<step>Update the 'print_info' method to use formatted output</step>
|
||||
<step>Remove the 'email' field from the User struct</step>
|
||||
|
||||
What are the suggestions for the step: <step>Update the 'print_info' method to use formatted output</step>
|
||||
|
||||
A:
|
||||
{
|
||||
"title": "Use formatted output",
|
||||
"suggestions": [
|
||||
{
|
||||
"kind": "Update",
|
||||
"path": "src/user.rs",
|
||||
"symbol": "impl User pub fn print_info",
|
||||
"description": "Use formatted output"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
User:
|
||||
What are the suggestions for the step: <step>Remove the 'email' field from the User struct</step>
|
||||
|
||||
A:
|
||||
{
|
||||
"title": "Remove email field",
|
||||
"suggestions": [
|
||||
{
|
||||
"kind": "Delete",
|
||||
"path": "src/user.rs",
|
||||
"symbol": "struct User email"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Example 3:
|
||||
|
||||
User:
|
||||
```rs src/vehicle.rs
|
||||
struct Vehicle {
|
||||
make: String,
|
||||
model: String,
|
||||
year: u32,
|
||||
}
|
||||
|
||||
impl Vehicle {
|
||||
fn new(make: String, model: String, year: u32) -> Self {
|
||||
Vehicle { make, model, year }
|
||||
}
|
||||
|
||||
fn print_year(&self) {
|
||||
println!("Year: {}", self.year);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
<step>Add a 'use std::fmt;' statement at the beginning of the file</step>
|
||||
<step>Add a new method 'start_engine' in the Vehicle impl block</step>
|
||||
|
||||
What are the suggestions for the step: <step>Add a 'use std::fmt;' statement at the beginning of the file</step>
|
||||
|
||||
A:
|
||||
{
|
||||
"title": "Add use std::fmt statement",
|
||||
"suggestions": [
|
||||
{
|
||||
"kind": "PrependChild",
|
||||
"path": "src/vehicle.rs",
|
||||
"description": "Add 'use std::fmt' statement"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
User:
|
||||
What are the suggestions for the step: <step>Add a new method 'start_engine' in the Vehicle impl block</step>
|
||||
|
||||
A:
|
||||
{
|
||||
"title": "Add start_engine method",
|
||||
"suggestions": [
|
||||
{
|
||||
"kind": "InsertSiblingAfter",
|
||||
"path": "src/vehicle.rs",
|
||||
"symbol": "impl Vehicle fn new",
|
||||
"description": "Add start_engine method"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Example 4:
|
||||
|
||||
User:
|
||||
```rs src/employee.rs
|
||||
struct Employee {
|
||||
name: String,
|
||||
position: String,
|
||||
salary: u32,
|
||||
department: String,
|
||||
}
|
||||
|
||||
impl Employee {
|
||||
fn new(name: String, position: String, salary: u32, department: String) -> Self {
|
||||
Employee { name, position, salary, department }
|
||||
}
|
||||
|
||||
fn print_details(&self) {
|
||||
println!("Name: {}, Position: {}, Salary: {}, Department: {}",
|
||||
self.name, self.position, self.salary, self.department);
|
||||
}
|
||||
|
||||
fn give_raise(&mut self, amount: u32) {
|
||||
self.salary += amount;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
<step>Make salary an f32</step>
|
||||
|
||||
What are the suggestions for the step: <step>Make salary an f32</step>
|
||||
|
||||
A (wrong):
|
||||
{
|
||||
"title": "Change salary to f32",
|
||||
"suggestions": [
|
||||
{
|
||||
"kind": "Update",
|
||||
"path": "src/employee.rs",
|
||||
"symbol": "struct Employee",
|
||||
"description": "Change the type of salary to an f32"
|
||||
},
|
||||
{
|
||||
"kind": "Update",
|
||||
"path": "src/employee.rs",
|
||||
"symbol": "struct Employee salary",
|
||||
"description": "Change the type to an f32"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
This example demonstrates what not to do. `struct Employee salary` is a child of `struct Employee`.
|
||||
|
||||
A (corrected):
|
||||
{
|
||||
"title": "Change salary to f32",
|
||||
"suggestions": [
|
||||
{
|
||||
"kind": "Update",
|
||||
"path": "src/employee.rs",
|
||||
"symbol": "struct Employee salary",
|
||||
"description": "Change the type to an f32"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
User:
|
||||
What are the correct suggestions for the step: <step>Remove the 'department' field and update the 'print_details' method</step>
|
||||
|
||||
A:
|
||||
{
|
||||
"title": "Remove department",
|
||||
"suggestions": [
|
||||
{
|
||||
"kind": "Delete",
|
||||
"path": "src/employee.rs",
|
||||
"symbol": "struct Employee department"
|
||||
},
|
||||
{
|
||||
"kind": "Update",
|
||||
"path": "src/employee.rs",
|
||||
"symbol": "impl Employee fn print_details",
|
||||
"description": "Don't print the 'department' field"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Example 5:
|
||||
|
||||
User:
|
||||
```rs src/game.rs
|
||||
struct Player {
|
||||
name: String,
|
||||
health: i32,
|
||||
pub score: u32,
|
||||
}
|
||||
|
||||
impl Player {
|
||||
pub fn new(name: String) -> Self {
|
||||
Player { name, health: 100, score: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
struct Game {
|
||||
players: Vec<Player>,
|
||||
}
|
||||
|
||||
impl Game {
|
||||
fn new() -> Self {
|
||||
Game { players: Vec::new() }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
<step>Add a 'level' field to Player and update the 'new' method</step>
|
||||
|
||||
A:
|
||||
{
|
||||
"title": "Add level field to Player",
|
||||
"suggestions": [
|
||||
{
|
||||
"kind": "InsertSiblingAfter",
|
||||
"path": "src/game.rs",
|
||||
"symbol": "struct Player pub score",
|
||||
"description": "Add level field to Player"
|
||||
},
|
||||
{
|
||||
"kind": "Update",
|
||||
"path": "src/game.rs",
|
||||
"symbol": "impl Player pub fn new",
|
||||
"description": "Initialize level in new method"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Example 6:
|
||||
|
||||
User:
|
||||
```rs src/config.rs
|
||||
use std::collections::HashMap;
|
||||
|
||||
struct Config {
|
||||
settings: HashMap<String, String>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
fn new() -> Self {
|
||||
Config { settings: HashMap::new() }
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
<step>Add a 'load_from_file' method to Config and import necessary modules</step>
|
||||
|
||||
A:
|
||||
{
|
||||
"title": "Add load_from_file method",
|
||||
"suggestions": [
|
||||
{
|
||||
"kind": "PrependChild",
|
||||
"path": "src/config.rs",
|
||||
"description": "Import std::fs and std::io modules"
|
||||
},
|
||||
{
|
||||
"kind": "AppendChild",
|
||||
"path": "src/config.rs",
|
||||
"symbol": "impl Config",
|
||||
"description": "Add load_from_file method"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Example 7:
|
||||
|
||||
User:
|
||||
```rs src/database.rs
|
||||
pub(crate) struct Database {
|
||||
connection: Connection,
|
||||
}
|
||||
|
||||
impl Database {
|
||||
fn new(url: &str) -> Result<Self, Error> {
|
||||
let connection = Connection::connect(url)?;
|
||||
Ok(Database { connection })
|
||||
}
|
||||
|
||||
async fn query(&self, sql: &str) -> Result<Vec<Row>, Error> {
|
||||
self.connection.query(sql, &[])
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
<step>Add error handling to the 'query' method and create a custom error type</step>
|
||||
|
||||
A:
|
||||
{
|
||||
"title": "Add error handling to query",
|
||||
"suggestions": [
|
||||
{
|
||||
"kind": "PrependChild",
|
||||
"path": "src/database.rs",
|
||||
"description": "Import necessary error handling modules"
|
||||
},
|
||||
{
|
||||
"kind": "InsertSiblingBefore",
|
||||
"path": "src/database.rs",
|
||||
"symbol": "pub(crate) struct Database",
|
||||
"description": "Define custom DatabaseError enum"
|
||||
},
|
||||
{
|
||||
"kind": "Update",
|
||||
"path": "src/database.rs",
|
||||
"symbol": "impl Database async fn query",
|
||||
"description": "Implement error handling in query method"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Now generate the suggestions for the following step:
|
||||
18
assets/prompts/terminal_assistant_prompt.hbs
Normal file
@@ -0,0 +1,18 @@
|
||||
You are an expert terminal user.
|
||||
You will be given a description of a command and you need to respond with a command that matches the description.
|
||||
Do not include markdown blocks or any other text formatting in your response, always respond with a single command that can be executed in the given shell.
|
||||
Current OS name is '{{os}}', architecture is '{{arch}}'.
|
||||
{{#if shell}}
|
||||
Current shell is '{{shell}}'.
|
||||
{{/if}}
|
||||
{{#if working_directory}}
|
||||
Current working directory is '{{working_directory}}'.
|
||||
{{/if}}
|
||||
{{#if latest_output}}
|
||||
Latest non-empty terminal output:
|
||||
{{#each latest_output as |line|}}
|
||||
{{line}}
|
||||
{{/each}}
|
||||
{{/if}}
|
||||
Here is the description of the command:
|
||||
{{{user_prompt}}}
|
||||
@@ -400,6 +400,19 @@
|
||||
"model": "gpt-4o"
|
||||
}
|
||||
},
|
||||
// The settings for slash commands.
|
||||
"slash_commands": {
|
||||
// Settings for the `/docs` slash command.
|
||||
"docs": {
|
||||
// Whether `/docs` is enabled.
|
||||
"enabled": false
|
||||
},
|
||||
// Settings for the `/project` slash command.
|
||||
"project": {
|
||||
// Whether `/project` is enabled.
|
||||
"enabled": false
|
||||
}
|
||||
},
|
||||
// Whether the screen sharing icon is shown in the os status bar.
|
||||
"show_call_status_icon": true,
|
||||
// Whether to use language servers to provide code intelligence.
|
||||
@@ -635,7 +648,7 @@
|
||||
// "option_to_meta": false,
|
||||
// 2. Make the option keys behave as a 'meta' key, e.g. for emacs
|
||||
// "option_to_meta": true,
|
||||
"option_as_meta": false,
|
||||
"option_as_meta": true,
|
||||
// Whether or not selecting text in the terminal will automatically
|
||||
// copy to the system clipboard.
|
||||
"copy_on_select": false,
|
||||
@@ -865,13 +878,18 @@
|
||||
// Different settings for specific language models.
|
||||
"language_models": {
|
||||
"anthropic": {
|
||||
"version": "1",
|
||||
"api_url": "https://api.anthropic.com"
|
||||
},
|
||||
"openai": {
|
||||
"api_url": "https://api.openai.com/v1"
|
||||
"google": {
|
||||
"api_url": "https://generativelanguage.googleapis.com"
|
||||
},
|
||||
"ollama": {
|
||||
"api_url": "http://localhost:11434"
|
||||
},
|
||||
"openai": {
|
||||
"version": "1",
|
||||
"api_url": "https://api.openai.com/v1"
|
||||
}
|
||||
},
|
||||
// Zed's Prettier integration settings.
|
||||
@@ -919,7 +937,8 @@
|
||||
"vim": {
|
||||
"use_system_clipboard": "always",
|
||||
"use_multiline_find": false,
|
||||
"use_smartcase_find": false
|
||||
"use_smartcase_find": false,
|
||||
"custom_digraphs": {}
|
||||
},
|
||||
// The server to connect to. If the environment variable
|
||||
// ZED_SERVER_URL is set, it will override this setting.
|
||||
|
||||
@@ -4,8 +4,8 @@
|
||||
// documentation: https://zed.dev/docs/configuring-zed
|
||||
//
|
||||
// To see all of Zed's default settings without changing your
|
||||
// custom settings, run the `zed: Open Default Settings` command
|
||||
// from the command palette
|
||||
// custom settings, run `zed: open default settings` from the
|
||||
// command palette
|
||||
{
|
||||
"ui_font_size": 16,
|
||||
"buffer_font_size": 16,
|
||||
|
||||
@@ -24,6 +24,7 @@ schemars = { workspace = true, optional = true }
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
strum.workspace = true
|
||||
thiserror.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
tokio.workspace = true
|
||||
|
||||
@@ -1,10 +1,16 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, StreamExt};
|
||||
mod supported_countries;
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt};
|
||||
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
|
||||
use isahc::config::Configurable;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{convert::TryFrom, time::Duration};
|
||||
use strum::EnumIter;
|
||||
use std::str::FromStr;
|
||||
use std::time::Duration;
|
||||
use strum::{EnumIter, EnumString};
|
||||
use thiserror::Error;
|
||||
|
||||
pub use supported_countries::*;
|
||||
|
||||
pub const ANTHROPIC_API_URL: &'static str = "https://api.anthropic.com";
|
||||
|
||||
@@ -12,16 +18,21 @@ pub const ANTHROPIC_API_URL: &'static str = "https://api.anthropic.com";
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
|
||||
pub enum Model {
|
||||
#[default]
|
||||
#[serde(alias = "claude-3-5-sonnet", rename = "claude-3-5-sonnet-20240620")]
|
||||
#[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-20240620")]
|
||||
Claude3_5Sonnet,
|
||||
#[serde(alias = "claude-3-opus", rename = "claude-3-opus-20240229")]
|
||||
#[serde(rename = "claude-3-opus", alias = "claude-3-opus-20240229")]
|
||||
Claude3Opus,
|
||||
#[serde(alias = "claude-3-sonnet", rename = "claude-3-sonnet-20240229")]
|
||||
#[serde(rename = "claude-3-sonnet", alias = "claude-3-sonnet-20240229")]
|
||||
Claude3Sonnet,
|
||||
#[serde(alias = "claude-3-haiku", rename = "claude-3-haiku-20240307")]
|
||||
#[serde(rename = "claude-3-haiku", alias = "claude-3-haiku-20240307")]
|
||||
Claude3Haiku,
|
||||
#[serde(rename = "custom")]
|
||||
Custom { name: String, max_tokens: usize },
|
||||
Custom {
|
||||
name: String,
|
||||
max_tokens: usize,
|
||||
/// Override this model with a different Anthropic model for tool calls.
|
||||
tool_override: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
impl Model {
|
||||
@@ -44,7 +55,7 @@ impl Model {
|
||||
Model::Claude3_5Sonnet => "claude-3-5-sonnet-20240620",
|
||||
Model::Claude3Opus => "claude-3-opus-20240229",
|
||||
Model::Claude3Sonnet => "claude-3-sonnet-20240229",
|
||||
Model::Claude3Haiku => "claude-3-opus-20240307",
|
||||
Model::Claude3Haiku => "claude-3-haiku-20240307",
|
||||
Self::Custom { name, .. } => name,
|
||||
}
|
||||
}
|
||||
@@ -68,114 +79,83 @@ impl Model {
|
||||
Self::Custom { max_tokens, .. } => *max_tokens,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum Role {
|
||||
User,
|
||||
Assistant,
|
||||
}
|
||||
|
||||
impl TryFrom<String> for Role {
|
||||
type Error = anyhow::Error;
|
||||
|
||||
fn try_from(value: String) -> Result<Self> {
|
||||
match value.as_str() {
|
||||
"user" => Ok(Self::User),
|
||||
"assistant" => Ok(Self::Assistant),
|
||||
_ => Err(anyhow!("invalid role '{value}'")),
|
||||
pub fn tool_model_id(&self) -> &str {
|
||||
if let Self::Custom {
|
||||
tool_override: Some(tool_override),
|
||||
..
|
||||
} = self
|
||||
{
|
||||
tool_override
|
||||
} else {
|
||||
self.id()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Role> for String {
|
||||
fn from(val: Role) -> Self {
|
||||
match val {
|
||||
Role::User => "user".to_owned(),
|
||||
Role::Assistant => "assistant".to_owned(),
|
||||
}
|
||||
pub async fn complete(
|
||||
client: &dyn HttpClient,
|
||||
api_url: &str,
|
||||
api_key: &str,
|
||||
request: Request,
|
||||
) -> Result<Response, AnthropicError> {
|
||||
let uri = format!("{api_url}/v1/messages");
|
||||
let request_builder = HttpRequest::builder()
|
||||
.method(Method::POST)
|
||||
.uri(uri)
|
||||
.header("Anthropic-Version", "2023-06-01")
|
||||
.header("Anthropic-Beta", "tools-2024-04-04")
|
||||
.header("X-Api-Key", api_key)
|
||||
.header("Content-Type", "application/json");
|
||||
|
||||
let serialized_request =
|
||||
serde_json::to_string(&request).context("failed to serialize request")?;
|
||||
let request = request_builder
|
||||
.body(AsyncBody::from(serialized_request))
|
||||
.context("failed to construct request body")?;
|
||||
|
||||
let mut response = client
|
||||
.send(request)
|
||||
.await
|
||||
.context("failed to send request to Anthropic")?;
|
||||
if response.status().is_success() {
|
||||
let mut body = Vec::new();
|
||||
response
|
||||
.body_mut()
|
||||
.read_to_end(&mut body)
|
||||
.await
|
||||
.context("failed to read response body")?;
|
||||
let response_message: Response =
|
||||
serde_json::from_slice(&body).context("failed to deserialize response body")?;
|
||||
Ok(response_message)
|
||||
} else {
|
||||
let mut body = Vec::new();
|
||||
response
|
||||
.body_mut()
|
||||
.read_to_end(&mut body)
|
||||
.await
|
||||
.context("failed to read response body")?;
|
||||
let body_str =
|
||||
std::str::from_utf8(&body).context("failed to parse response body as UTF-8")?;
|
||||
Err(AnthropicError::Other(anyhow!(
|
||||
"Failed to connect to API: {} {}",
|
||||
response.status(),
|
||||
body_str
|
||||
)))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct Request {
|
||||
pub model: String,
|
||||
pub messages: Vec<RequestMessage>,
|
||||
pub stream: bool,
|
||||
pub system: String,
|
||||
pub max_tokens: u32,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
|
||||
pub struct RequestMessage {
|
||||
pub role: Role,
|
||||
pub content: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum ResponseEvent {
|
||||
MessageStart {
|
||||
message: ResponseMessage,
|
||||
},
|
||||
ContentBlockStart {
|
||||
index: u32,
|
||||
content_block: ContentBlock,
|
||||
},
|
||||
Ping {},
|
||||
ContentBlockDelta {
|
||||
index: u32,
|
||||
delta: TextDelta,
|
||||
},
|
||||
ContentBlockStop {
|
||||
index: u32,
|
||||
},
|
||||
MessageDelta {
|
||||
delta: ResponseMessage,
|
||||
usage: Usage,
|
||||
},
|
||||
MessageStop {},
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct ResponseMessage {
|
||||
#[serde(rename = "type")]
|
||||
pub message_type: Option<String>,
|
||||
pub id: Option<String>,
|
||||
pub role: Option<String>,
|
||||
pub content: Option<Vec<String>>,
|
||||
pub model: Option<String>,
|
||||
pub stop_reason: Option<String>,
|
||||
pub stop_sequence: Option<String>,
|
||||
pub usage: Option<Usage>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
pub struct Usage {
|
||||
pub input_tokens: Option<u32>,
|
||||
pub output_tokens: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum ContentBlock {
|
||||
Text { text: String },
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Debug)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
pub enum TextDelta {
|
||||
TextDelta { text: String },
|
||||
}
|
||||
|
||||
pub async fn stream_completion(
|
||||
client: &dyn HttpClient,
|
||||
api_url: &str,
|
||||
api_key: &str,
|
||||
request: Request,
|
||||
low_speed_timeout: Option<Duration>,
|
||||
) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
|
||||
) -> Result<BoxStream<'static, Result<Event, AnthropicError>>, AnthropicError> {
|
||||
let request = StreamingRequest {
|
||||
base: request,
|
||||
stream: true,
|
||||
};
|
||||
let uri = format!("{api_url}/v1/messages");
|
||||
let mut request_builder = HttpRequest::builder()
|
||||
.method(Method::POST)
|
||||
@@ -187,8 +167,16 @@ pub async fn stream_completion(
|
||||
if let Some(low_speed_timeout) = low_speed_timeout {
|
||||
request_builder = request_builder.low_speed_timeout(100, low_speed_timeout);
|
||||
}
|
||||
let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?;
|
||||
let mut response = client.send(request).await?;
|
||||
let serialized_request =
|
||||
serde_json::to_string(&request).context("failed to serialize request")?;
|
||||
let request = request_builder
|
||||
.body(AsyncBody::from(serialized_request))
|
||||
.context("failed to construct request body")?;
|
||||
|
||||
let mut response = client
|
||||
.send(request)
|
||||
.await
|
||||
.context("failed to send request to Anthropic")?;
|
||||
if response.status().is_success() {
|
||||
let reader = BufReader::new(response.into_body());
|
||||
Ok(reader
|
||||
@@ -199,69 +187,260 @@ pub async fn stream_completion(
|
||||
let line = line.strip_prefix("data: ")?;
|
||||
match serde_json::from_str(line) {
|
||||
Ok(response) => Some(Ok(response)),
|
||||
Err(error) => Some(Err(anyhow!(error))),
|
||||
Err(error) => Some(Err(AnthropicError::Other(anyhow!(error)))),
|
||||
}
|
||||
}
|
||||
Err(error) => Some(Err(anyhow!(error))),
|
||||
Err(error) => Some(Err(AnthropicError::Other(anyhow!(error)))),
|
||||
}
|
||||
})
|
||||
.boxed())
|
||||
} else {
|
||||
let mut body = Vec::new();
|
||||
response.body_mut().read_to_end(&mut body).await?;
|
||||
response
|
||||
.body_mut()
|
||||
.read_to_end(&mut body)
|
||||
.await
|
||||
.context("failed to read response body")?;
|
||||
|
||||
let body_str = std::str::from_utf8(&body)?;
|
||||
let body_str =
|
||||
std::str::from_utf8(&body).context("failed to parse response body as UTF-8")?;
|
||||
|
||||
match serde_json::from_str::<ResponseEvent>(body_str) {
|
||||
Ok(_) => Err(anyhow!(
|
||||
"Unexpected success response while expecting an error: {}",
|
||||
body_str,
|
||||
)),
|
||||
Err(_) => Err(anyhow!(
|
||||
match serde_json::from_str::<Event>(body_str) {
|
||||
Ok(Event::Error { error }) => Err(AnthropicError::ApiError(error)),
|
||||
Ok(_) => Err(AnthropicError::Other(anyhow!(
|
||||
"Unexpected success response while expecting an error: '{body_str}'",
|
||||
))),
|
||||
Err(_) => Err(AnthropicError::Other(anyhow!(
|
||||
"Failed to connect to API: {} {}",
|
||||
response.status(),
|
||||
body_str,
|
||||
)),
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// #[cfg(test)]
|
||||
// mod tests {
|
||||
// use super::*;
|
||||
// use http::IsahcHttpClient;
|
||||
pub fn extract_text_from_events(
|
||||
response: impl Stream<Item = Result<Event, AnthropicError>>,
|
||||
) -> impl Stream<Item = Result<String, AnthropicError>> {
|
||||
response.filter_map(|response| async move {
|
||||
match response {
|
||||
Ok(response) => match response {
|
||||
Event::ContentBlockStart { content_block, .. } => match content_block {
|
||||
Content::Text { text } => Some(Ok(text)),
|
||||
_ => None,
|
||||
},
|
||||
Event::ContentBlockDelta { delta, .. } => match delta {
|
||||
ContentDelta::TextDelta { text } => Some(Ok(text)),
|
||||
_ => None,
|
||||
},
|
||||
Event::Error { error } => Some(Err(AnthropicError::ApiError(error))),
|
||||
_ => None,
|
||||
},
|
||||
Err(error) => Some(Err(error)),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
// #[tokio::test]
|
||||
// async fn stream_completion_success() {
|
||||
// let http_client = IsahcHttpClient::new().unwrap();
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Message {
|
||||
pub role: Role,
|
||||
pub content: Vec<Content>,
|
||||
}
|
||||
|
||||
// let request = Request {
|
||||
// model: Model::Claude3Opus,
|
||||
// messages: vec![RequestMessage {
|
||||
// role: Role::User,
|
||||
// content: "Ping".to_string(),
|
||||
// }],
|
||||
// stream: true,
|
||||
// system: "Respond to ping with pong".to_string(),
|
||||
// max_tokens: 4096,
|
||||
// };
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum Role {
|
||||
User,
|
||||
Assistant,
|
||||
}
|
||||
|
||||
// let stream = stream_completion(
|
||||
// &http_client,
|
||||
// "https://api.anthropic.com",
|
||||
// &std::env::var("ANTHROPIC_API_KEY").expect("ANTHROPIC_API_KEY not set"),
|
||||
// request,
|
||||
// )
|
||||
// .await
|
||||
// .unwrap();
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum Content {
|
||||
#[serde(rename = "text")]
|
||||
Text { text: String },
|
||||
#[serde(rename = "image")]
|
||||
Image { source: ImageSource },
|
||||
#[serde(rename = "tool_use")]
|
||||
ToolUse {
|
||||
id: String,
|
||||
name: String,
|
||||
input: serde_json::Value,
|
||||
},
|
||||
#[serde(rename = "tool_result")]
|
||||
ToolResult {
|
||||
tool_use_id: String,
|
||||
content: String,
|
||||
},
|
||||
}
|
||||
|
||||
// stream
|
||||
// .for_each(|event| async {
|
||||
// match event {
|
||||
// Ok(event) => println!("{:?}", event),
|
||||
// Err(e) => eprintln!("Error: {:?}", e),
|
||||
// }
|
||||
// })
|
||||
// .await;
|
||||
// }
|
||||
// }
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ImageSource {
|
||||
#[serde(rename = "type")]
|
||||
pub source_type: String,
|
||||
pub media_type: String,
|
||||
pub data: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Tool {
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
pub input_schema: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "lowercase")]
|
||||
pub enum ToolChoice {
|
||||
Auto,
|
||||
Any,
|
||||
Tool { name: String },
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Request {
|
||||
pub model: String,
|
||||
pub max_tokens: u32,
|
||||
pub messages: Vec<Message>,
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
pub tools: Vec<Tool>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub tool_choice: Option<ToolChoice>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub system: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub metadata: Option<Metadata>,
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
pub stop_sequences: Vec<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub temperature: Option<f32>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub top_k: Option<u32>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub top_p: Option<f32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct StreamingRequest {
|
||||
#[serde(flatten)]
|
||||
pub base: Request,
|
||||
pub stream: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Metadata {
|
||||
pub user_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Usage {
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub input_tokens: Option<u32>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub output_tokens: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Response {
|
||||
pub id: String,
|
||||
#[serde(rename = "type")]
|
||||
pub response_type: String,
|
||||
pub role: Role,
|
||||
pub content: Vec<Content>,
|
||||
pub model: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub stop_reason: Option<String>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub stop_sequence: Option<String>,
|
||||
pub usage: Usage,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum Event {
|
||||
#[serde(rename = "message_start")]
|
||||
MessageStart { message: Response },
|
||||
#[serde(rename = "content_block_start")]
|
||||
ContentBlockStart {
|
||||
index: usize,
|
||||
content_block: Content,
|
||||
},
|
||||
#[serde(rename = "content_block_delta")]
|
||||
ContentBlockDelta { index: usize, delta: ContentDelta },
|
||||
#[serde(rename = "content_block_stop")]
|
||||
ContentBlockStop { index: usize },
|
||||
#[serde(rename = "message_delta")]
|
||||
MessageDelta { delta: MessageDelta, usage: Usage },
|
||||
#[serde(rename = "message_stop")]
|
||||
MessageStop,
|
||||
#[serde(rename = "ping")]
|
||||
Ping,
|
||||
#[serde(rename = "error")]
|
||||
Error { error: ApiError },
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum ContentDelta {
|
||||
#[serde(rename = "text_delta")]
|
||||
TextDelta { text: String },
|
||||
#[serde(rename = "input_json_delta")]
|
||||
InputJsonDelta { partial_json: String },
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct MessageDelta {
|
||||
pub stop_reason: Option<String>,
|
||||
pub stop_sequence: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
pub enum AnthropicError {
|
||||
#[error("an error occurred while interacting with the Anthropic API: {error_type}: {message}", error_type = .0.error_type, message = .0.message)]
|
||||
ApiError(ApiError),
|
||||
#[error("{0}")]
|
||||
Other(#[from] anyhow::Error),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ApiError {
|
||||
#[serde(rename = "type")]
|
||||
pub error_type: String,
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
/// An Anthropic API error code.
|
||||
/// https://docs.anthropic.com/en/api/errors#http-errors
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, EnumString)]
|
||||
#[strum(serialize_all = "snake_case")]
|
||||
pub enum ApiErrorCode {
|
||||
/// 400 - `invalid_request_error`: There was an issue with the format or content of your request.
|
||||
InvalidRequestError,
|
||||
/// 401 - `authentication_error`: There's an issue with your API key.
|
||||
AuthenticationError,
|
||||
/// 403 - `permission_error`: Your API key does not have permission to use the specified resource.
|
||||
PermissionError,
|
||||
/// 404 - `not_found_error`: The requested resource was not found.
|
||||
NotFoundError,
|
||||
/// 413 - `request_too_large`: Request exceeds the maximum allowed number of bytes.
|
||||
RequestTooLarge,
|
||||
/// 429 - `rate_limit_error`: Your account has hit a rate limit.
|
||||
RateLimitError,
|
||||
/// 500 - `api_error`: An unexpected error has occurred internal to Anthropic's systems.
|
||||
ApiError,
|
||||
/// 529 - `overloaded_error`: Anthropic's API is temporarily overloaded.
|
||||
OverloadedError,
|
||||
}
|
||||
|
||||
impl ApiError {
|
||||
pub fn code(&self) -> Option<ApiErrorCode> {
|
||||
ApiErrorCode::from_str(&self.error_type).ok()
|
||||
}
|
||||
|
||||
pub fn is_rate_limit_error(&self) -> bool {
|
||||
match self.error_type.as_str() {
|
||||
"rate_limit_error" => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
194
crates/anthropic/src/supported_countries.rs
Normal file
@@ -0,0 +1,194 @@
|
||||
use std::collections::HashSet;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
/// Returns whether the given country code is supported by Anthropic.
|
||||
///
|
||||
/// https://www.anthropic.com/supported-countries
|
||||
pub fn is_supported_country(country_code: &str) -> bool {
|
||||
SUPPORTED_COUNTRIES.contains(&country_code)
|
||||
}
|
||||
|
||||
/// The list of country codes supported by Anthropic.
|
||||
///
|
||||
/// https://www.anthropic.com/supported-countries
|
||||
static SUPPORTED_COUNTRIES: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
|
||||
vec![
|
||||
"AL", // Albania
|
||||
"DZ", // Algeria
|
||||
"AD", // Andorra
|
||||
"AO", // Angola
|
||||
"AG", // Antigua and Barbuda
|
||||
"AR", // Argentina
|
||||
"AM", // Armenia
|
||||
"AU", // Australia
|
||||
"AT", // Austria
|
||||
"AZ", // Azerbaijan
|
||||
"BS", // Bahamas
|
||||
"BH", // Bahrain
|
||||
"BD", // Bangladesh
|
||||
"BB", // Barbados
|
||||
"BE", // Belgium
|
||||
"BZ", // Belize
|
||||
"BJ", // Benin
|
||||
"BT", // Bhutan
|
||||
"BO", // Bolivia
|
||||
"BA", // Bosnia and Herzegovina
|
||||
"BW", // Botswana
|
||||
"BR", // Brazil
|
||||
"BN", // Brunei
|
||||
"BG", // Bulgaria
|
||||
"BF", // Burkina Faso
|
||||
"BI", // Burundi
|
||||
"CV", // Cabo Verde
|
||||
"KH", // Cambodia
|
||||
"CM", // Cameroon
|
||||
"CA", // Canada
|
||||
"TD", // Chad
|
||||
"CL", // Chile
|
||||
"CO", // Colombia
|
||||
"KM", // Comoros
|
||||
"CG", // Congo (Brazzaville)
|
||||
"CR", // Costa Rica
|
||||
"CI", // Côte d'Ivoire
|
||||
"HR", // Croatia
|
||||
"CY", // Cyprus
|
||||
"CZ", // Czechia (Czech Republic)
|
||||
"DK", // Denmark
|
||||
"DJ", // Djibouti
|
||||
"DM", // Dominica
|
||||
"DO", // Dominican Republic
|
||||
"EC", // Ecuador
|
||||
"EG", // Egypt
|
||||
"SV", // El Salvador
|
||||
"GQ", // Equatorial Guinea
|
||||
"EE", // Estonia
|
||||
"SZ", // Eswatini
|
||||
"FJ", // Fiji
|
||||
"FI", // Finland
|
||||
"FR", // France
|
||||
"GA", // Gabon
|
||||
"GM", // Gambia
|
||||
"GE", // Georgia
|
||||
"DE", // Germany
|
||||
"GH", // Ghana
|
||||
"GR", // Greece
|
||||
"GD", // Grenada
|
||||
"GT", // Guatemala
|
||||
"GN", // Guinea
|
||||
"GW", // Guinea-Bissau
|
||||
"GY", // Guyana
|
||||
"HT", // Haiti
|
||||
"HN", // Honduras
|
||||
"HU", // Hungary
|
||||
"IS", // Iceland
|
||||
"IN", // India
|
||||
"ID", // Indonesia
|
||||
"IQ", // Iraq
|
||||
"IE", // Ireland
|
||||
"IL", // Israel
|
||||
"IT", // Italy
|
||||
"JM", // Jamaica
|
||||
"JP", // Japan
|
||||
"JO", // Jordan
|
||||
"KZ", // Kazakhstan
|
||||
"KE", // Kenya
|
||||
"KI", // Kiribati
|
||||
"KW", // Kuwait
|
||||
"KG", // Kyrgyzstan
|
||||
"LA", // Laos
|
||||
"LV", // Latvia
|
||||
"LB", // Lebanon
|
||||
"LS", // Lesotho
|
||||
"LR", // Liberia
|
||||
"LI", // Liechtenstein
|
||||
"LT", // Lithuania
|
||||
"LU", // Luxembourg
|
||||
"MG", // Madagascar
|
||||
"MW", // Malawi
|
||||
"MY", // Malaysia
|
||||
"MV", // Maldives
|
||||
"MT", // Malta
|
||||
"MH", // Marshall Islands
|
||||
"MR", // Mauritania
|
||||
"MU", // Mauritius
|
||||
"MX", // Mexico
|
||||
"FM", // Micronesia
|
||||
"MD", // Moldova
|
||||
"MC", // Monaco
|
||||
"MN", // Mongolia
|
||||
"ME", // Montenegro
|
||||
"MA", // Morocco
|
||||
"MZ", // Mozambique
|
||||
"NA", // Namibia
|
||||
"NR", // Nauru
|
||||
"NP", // Nepal
|
||||
"NL", // Netherlands
|
||||
"NZ", // New Zealand
|
||||
"NE", // Niger
|
||||
"NG", // Nigeria
|
||||
"MK", // North Macedonia
|
||||
"NO", // Norway
|
||||
"OM", // Oman
|
||||
"PK", // Pakistan
|
||||
"PW", // Palau
|
||||
"PS", // Palestine
|
||||
"PA", // Panama
|
||||
"PG", // Papua New Guinea
|
||||
"PY", // Paraguay
|
||||
"PE", // Peru
|
||||
"PH", // Philippines
|
||||
"PL", // Poland
|
||||
"PT", // Portugal
|
||||
"QA", // Qatar
|
||||
"RO", // Romania
|
||||
"RW", // Rwanda
|
||||
"KN", // Saint Kitts and Nevis
|
||||
"LC", // Saint Lucia
|
||||
"VC", // Saint Vincent and the Grenadines
|
||||
"WS", // Samoa
|
||||
"SM", // San Marino
|
||||
"ST", // São Tomé and Príncipe
|
||||
"SA", // Saudi Arabia
|
||||
"SN", // Senegal
|
||||
"RS", // Serbia
|
||||
"SC", // Seychelles
|
||||
"SL", // Sierra Leone
|
||||
"SG", // Singapore
|
||||
"SK", // Slovakia
|
||||
"SI", // Slovenia
|
||||
"SB", // Solomon Islands
|
||||
"ZA", // South Africa
|
||||
"KR", // South Korea
|
||||
"ES", // Spain
|
||||
"LK", // Sri Lanka
|
||||
"SR", // Suriname
|
||||
"SE", // Sweden
|
||||
"CH", // Switzerland
|
||||
"TW", // Taiwan
|
||||
"TJ", // Tajikistan
|
||||
"TZ", // Tanzania
|
||||
"TH", // Thailand
|
||||
"TL", // Timor-Leste
|
||||
"TG", // Togo
|
||||
"TO", // Tonga
|
||||
"TT", // Trinidad and Tobago
|
||||
"TN", // Tunisia
|
||||
"TR", // Türkiye (Turkey)
|
||||
"TM", // Turkmenistan
|
||||
"TV", // Tuvalu
|
||||
"UG", // Uganda
|
||||
"UA", // Ukraine (except Crimea, Donetsk, and Luhansk regions)
|
||||
"AE", // United Arab Emirates
|
||||
"GB", // United Kingdom
|
||||
"US", // United States of America
|
||||
"UY", // Uruguay
|
||||
"UZ", // Uzbekistan
|
||||
"VU", // Vanuatu
|
||||
"VA", // Vatican City
|
||||
"VN", // Vietnam
|
||||
"ZM", // Zambia
|
||||
"ZW", // Zimbabwe
|
||||
]
|
||||
.into_iter()
|
||||
.collect()
|
||||
});
|
||||
@@ -11,6 +11,7 @@ use rust_embed::RustEmbed;
|
||||
#[include = "themes/**/*"]
|
||||
#[exclude = "themes/src/*"]
|
||||
#[include = "sounds/**/*"]
|
||||
#[include = "prompts/**/*"]
|
||||
#[include = "*.md"]
|
||||
#[exclude = "*.DS_Store"]
|
||||
pub struct Assets;
|
||||
|
||||
@@ -32,13 +32,14 @@ client.workspace = true
|
||||
clock.workspace = true
|
||||
collections.workspace = true
|
||||
command_palette_hooks.workspace = true
|
||||
completion.workspace = true
|
||||
db.workspace = true
|
||||
editor.workspace = true
|
||||
feature_flags.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
fuzzy.workspace = true
|
||||
gpui.workspace = true
|
||||
handlebars.workspace = true
|
||||
heed.workspace = true
|
||||
html_to_markdown.workspace = true
|
||||
http_client.workspace = true
|
||||
@@ -47,6 +48,7 @@ indoc.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
log.workspace = true
|
||||
markdown.workspace = true
|
||||
menu.workspace = true
|
||||
multi_buffer.workspace = true
|
||||
ollama = { workspace = true, features = ["schemars"] }
|
||||
@@ -55,6 +57,7 @@ ordered-float.workspace = true
|
||||
parking_lot.workspace = true
|
||||
paths.workspace = true
|
||||
project.workspace = true
|
||||
proto.workspace = true
|
||||
regex.workspace = true
|
||||
rope.workspace = true
|
||||
schemars.workspace = true
|
||||
@@ -63,11 +66,12 @@ semantic_index.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
similar.workspace = true
|
||||
smallvec.workspace = true
|
||||
smol.workspace = true
|
||||
telemetry_events.workspace = true
|
||||
terminal.workspace = true
|
||||
terminal_view.workspace = true
|
||||
text.workspace = true
|
||||
theme.workspace = true
|
||||
toml.workspace = true
|
||||
ui.workspace = true
|
||||
@@ -75,16 +79,17 @@ util.workspace = true
|
||||
uuid.workspace = true
|
||||
workspace.workspace = true
|
||||
picker.workspace = true
|
||||
roxmltree = "0.20.0"
|
||||
zed_actions.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
completion = { workspace = true, features = ["test-support"] }
|
||||
ctor.workspace = true
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
env_logger.workspace = true
|
||||
language = { workspace = true, features = ["test-support"] }
|
||||
language_model = { workspace = true, features = ["test-support"] }
|
||||
log.workspace = true
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
rand.workspace = true
|
||||
serde_json_lenient.workspace = true
|
||||
text = { workspace = true, features = ["test-support"] }
|
||||
unindent.workspace = true
|
||||
|
||||
@@ -1,12 +1,16 @@
|
||||
#![cfg_attr(target_os = "windows", allow(unused, dead_code))]
|
||||
|
||||
pub mod assistant_panel;
|
||||
pub mod assistant_settings;
|
||||
mod context;
|
||||
pub(crate) mod context_inspector;
|
||||
pub mod context_store;
|
||||
mod inline_assistant;
|
||||
mod model_selector;
|
||||
mod prompt_library;
|
||||
mod prompts;
|
||||
mod slash_command;
|
||||
pub mod slash_command_settings;
|
||||
mod streaming_diff;
|
||||
mod terminal_inline_assistant;
|
||||
|
||||
@@ -15,9 +19,9 @@ use assistant_settings::AssistantSettings;
|
||||
use assistant_slash_command::SlashCommandRegistry;
|
||||
use client::{proto, Client};
|
||||
use command_palette_hooks::CommandPaletteFilter;
|
||||
use completion::LanguageModelCompletionProvider;
|
||||
pub use context::*;
|
||||
pub use context_store::*;
|
||||
use feature_flags::FeatureFlagAppExt;
|
||||
use fs::Fs;
|
||||
use gpui::{actions, impl_actions, AppContext, Global, SharedString, UpdateGlobal};
|
||||
use indexed_docs::IndexedDocsRegistry;
|
||||
@@ -26,16 +30,21 @@ use language_model::{
|
||||
LanguageModelId, LanguageModelProviderId, LanguageModelRegistry, LanguageModelResponseMessage,
|
||||
};
|
||||
pub(crate) use model_selector::*;
|
||||
pub use prompts::PromptBuilder;
|
||||
use prompts::PromptOverrideContext;
|
||||
use semantic_index::{CloudEmbeddingProvider, SemanticIndex};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{update_settings_file, Settings, SettingsStore};
|
||||
use slash_command::{
|
||||
active_command, default_command, diagnostics_command, docs_command, fetch_command,
|
||||
file_command, now_command, project_command, prompt_command, search_command, symbols_command,
|
||||
tabs_command, term_command,
|
||||
default_command, diagnostics_command, docs_command, fetch_command, file_command, now_command,
|
||||
project_command, prompt_command, search_command, symbols_command, tabs_command,
|
||||
terminal_command, workflow_command,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
pub(crate) use streaming_diff::*;
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::slash_command_settings::SlashCommandSettings;
|
||||
|
||||
actions!(
|
||||
assistant,
|
||||
@@ -46,16 +55,18 @@ actions!(
|
||||
QuoteSelection,
|
||||
InsertIntoEditor,
|
||||
ToggleFocus,
|
||||
ResetKey,
|
||||
InsertActivePrompt,
|
||||
ShowConfiguration,
|
||||
DeployHistory,
|
||||
DeployPromptLibrary,
|
||||
ConfirmCommand,
|
||||
ToggleModelSelector,
|
||||
DebugEditSteps
|
||||
DebugWorkflowSteps
|
||||
]
|
||||
);
|
||||
|
||||
const DEFAULT_CONTEXT_LINES: usize = 20;
|
||||
|
||||
#[derive(Clone, Default, Deserialize, PartialEq)]
|
||||
pub struct InlineAssist {
|
||||
prompt: Option<String>,
|
||||
@@ -91,6 +102,7 @@ pub enum MessageStatus {
|
||||
Pending,
|
||||
Done,
|
||||
Error(SharedString),
|
||||
Canceled,
|
||||
}
|
||||
|
||||
impl MessageStatus {
|
||||
@@ -101,6 +113,7 @@ impl MessageStatus {
|
||||
Some(proto::context_message_status::Variant::Error(error)) => {
|
||||
MessageStatus::Error(error.message.into())
|
||||
}
|
||||
Some(proto::context_message_status::Variant::Canceled(_)) => MessageStatus::Canceled,
|
||||
None => MessageStatus::Pending,
|
||||
}
|
||||
}
|
||||
@@ -124,6 +137,11 @@ impl MessageStatus {
|
||||
},
|
||||
)),
|
||||
},
|
||||
MessageStatus::Canceled => proto::ContextMessageStatus {
|
||||
variant: Some(proto::context_message_status::Variant::Canceled(
|
||||
proto::context_message_status::Canceled {},
|
||||
)),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -161,9 +179,15 @@ impl Assistant {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(fs: Arc<dyn Fs>, client: Arc<Client>, cx: &mut AppContext) {
|
||||
pub fn init(
|
||||
fs: Arc<dyn Fs>,
|
||||
client: Arc<Client>,
|
||||
dev_mode: bool,
|
||||
cx: &mut AppContext,
|
||||
) -> Arc<PromptBuilder> {
|
||||
cx.set_global(Assistant::default());
|
||||
AssistantSettings::register(cx);
|
||||
SlashCommandSettings::register(cx);
|
||||
|
||||
// TODO: remove this when 0.148.0 is released.
|
||||
if AssistantSettings::get_global(cx).using_outdated_settings_version {
|
||||
@@ -192,12 +216,31 @@ pub fn init(fs: Arc<dyn Fs>, client: Arc<Client>, cx: &mut AppContext) {
|
||||
|
||||
context_store::init(&client);
|
||||
prompt_library::init(cx);
|
||||
init_completion_provider(cx);
|
||||
init_language_model_settings(cx);
|
||||
assistant_slash_command::init(cx);
|
||||
register_slash_commands(cx);
|
||||
assistant_panel::init(cx);
|
||||
inline_assistant::init(fs.clone(), client.telemetry().clone(), cx);
|
||||
terminal_inline_assistant::init(fs.clone(), client.telemetry().clone(), cx);
|
||||
|
||||
let prompt_builder = prompts::PromptBuilder::new(Some(PromptOverrideContext {
|
||||
dev_mode,
|
||||
fs: fs.clone(),
|
||||
cx,
|
||||
}))
|
||||
.log_err()
|
||||
.map(Arc::new)
|
||||
.unwrap_or_else(|| Arc::new(prompts::PromptBuilder::new(None).unwrap()));
|
||||
register_slash_commands(Some(prompt_builder.clone()), cx);
|
||||
inline_assistant::init(
|
||||
fs.clone(),
|
||||
prompt_builder.clone(),
|
||||
client.telemetry().clone(),
|
||||
cx,
|
||||
);
|
||||
terminal_inline_assistant::init(
|
||||
fs.clone(),
|
||||
prompt_builder.clone(),
|
||||
client.telemetry().clone(),
|
||||
cx,
|
||||
);
|
||||
IndexedDocsRegistry::init_global(cx);
|
||||
|
||||
CommandPaletteFilter::update_global(cx, |filter, _cx| {
|
||||
@@ -215,17 +258,26 @@ pub fn init(fs: Arc<dyn Fs>, client: Arc<Client>, cx: &mut AppContext) {
|
||||
});
|
||||
})
|
||||
.detach();
|
||||
|
||||
prompt_builder
|
||||
}
|
||||
|
||||
fn init_completion_provider(cx: &mut AppContext) {
|
||||
completion::init(cx);
|
||||
fn init_language_model_settings(cx: &mut AppContext) {
|
||||
update_active_language_model_from_settings(cx);
|
||||
|
||||
cx.observe_global::<SettingsStore>(update_active_language_model_from_settings)
|
||||
.detach();
|
||||
cx.observe(&LanguageModelRegistry::global(cx), |_, cx| {
|
||||
update_active_language_model_from_settings(cx)
|
||||
})
|
||||
cx.subscribe(
|
||||
&LanguageModelRegistry::global(cx),
|
||||
|_, event: &language_model::Event, cx| match event {
|
||||
language_model::Event::ProviderStateChanged
|
||||
| language_model::Event::AddedProvider(_)
|
||||
| language_model::Event::RemovedProvider(_) => {
|
||||
update_active_language_model_from_settings(cx);
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
)
|
||||
.detach();
|
||||
}
|
||||
|
||||
@@ -233,37 +285,61 @@ fn update_active_language_model_from_settings(cx: &mut AppContext) {
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
let provider_name = LanguageModelProviderId::from(settings.default_model.provider.clone());
|
||||
let model_id = LanguageModelId::from(settings.default_model.model.clone());
|
||||
|
||||
let Some(provider) = LanguageModelRegistry::global(cx)
|
||||
.read(cx)
|
||||
.provider(&provider_name)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
let models = provider.provided_models(cx);
|
||||
if let Some(model) = models.iter().find(|model| model.id() == model_id).cloned() {
|
||||
LanguageModelCompletionProvider::global(cx).update(cx, |completion_provider, cx| {
|
||||
completion_provider.set_active_model(model, cx);
|
||||
});
|
||||
}
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry.select_active_model(&provider_name, &model_id, cx);
|
||||
});
|
||||
}
|
||||
|
||||
fn register_slash_commands(cx: &mut AppContext) {
|
||||
fn register_slash_commands(prompt_builder: Option<Arc<PromptBuilder>>, cx: &mut AppContext) {
|
||||
let slash_command_registry = SlashCommandRegistry::global(cx);
|
||||
slash_command_registry.register_command(file_command::FileSlashCommand, true);
|
||||
slash_command_registry.register_command(active_command::ActiveSlashCommand, true);
|
||||
slash_command_registry.register_command(symbols_command::OutlineSlashCommand, true);
|
||||
slash_command_registry.register_command(tabs_command::TabsSlashCommand, true);
|
||||
slash_command_registry.register_command(project_command::ProjectSlashCommand, true);
|
||||
slash_command_registry.register_command(search_command::SearchSlashCommand, true);
|
||||
slash_command_registry.register_command(prompt_command::PromptSlashCommand, true);
|
||||
slash_command_registry.register_command(default_command::DefaultSlashCommand, true);
|
||||
slash_command_registry.register_command(term_command::TermSlashCommand, true);
|
||||
slash_command_registry.register_command(now_command::NowSlashCommand, true);
|
||||
slash_command_registry.register_command(default_command::DefaultSlashCommand, false);
|
||||
slash_command_registry.register_command(terminal_command::TerminalSlashCommand, true);
|
||||
slash_command_registry.register_command(now_command::NowSlashCommand, false);
|
||||
slash_command_registry.register_command(diagnostics_command::DiagnosticsSlashCommand, true);
|
||||
slash_command_registry.register_command(docs_command::DocsSlashCommand, true);
|
||||
|
||||
if let Some(prompt_builder) = prompt_builder {
|
||||
slash_command_registry.register_command(
|
||||
workflow_command::WorkflowSlashCommand::new(prompt_builder),
|
||||
true,
|
||||
);
|
||||
}
|
||||
slash_command_registry.register_command(fetch_command::FetchSlashCommand, false);
|
||||
|
||||
update_slash_commands_from_settings(cx);
|
||||
cx.observe_global::<SettingsStore>(update_slash_commands_from_settings)
|
||||
.detach();
|
||||
|
||||
cx.observe_flag::<search_command::SearchSlashCommandFeatureFlag, _>({
|
||||
let slash_command_registry = slash_command_registry.clone();
|
||||
move |is_enabled, _cx| {
|
||||
if is_enabled {
|
||||
slash_command_registry.register_command(search_command::SearchSlashCommand, true);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn update_slash_commands_from_settings(cx: &mut AppContext) {
|
||||
let slash_command_registry = SlashCommandRegistry::global(cx);
|
||||
let settings = SlashCommandSettings::get_global(cx);
|
||||
|
||||
if settings.docs.enabled {
|
||||
slash_command_registry.register_command(docs_command::DocsSlashCommand, true);
|
||||
} else {
|
||||
slash_command_registry.unregister_command(docs_command::DocsSlashCommand);
|
||||
}
|
||||
|
||||
if settings.project.enabled {
|
||||
slash_command_registry.register_command(project_command::ProjectSlashCommand, true);
|
||||
} else {
|
||||
slash_command_registry.unregister_command(project_command::ProjectSlashCommand);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn humanize_token_count(count: usize) -> String {
|
||||
|
||||
@@ -52,7 +52,7 @@ pub struct AssistantSettings {
|
||||
pub dock: AssistantDockPosition,
|
||||
pub default_width: Pixels,
|
||||
pub default_height: Pixels,
|
||||
pub default_model: AssistantDefaultModel,
|
||||
pub default_model: LanguageModelSelection,
|
||||
pub using_outdated_settings_version: bool,
|
||||
}
|
||||
|
||||
@@ -110,11 +110,15 @@ impl AssistantSettingsContent {
|
||||
move |content, _| {
|
||||
if content.anthropic.is_none() {
|
||||
content.anthropic =
|
||||
Some(language_model::settings::AnthropicSettingsContent {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
..Default::default()
|
||||
});
|
||||
Some(language_model::settings::AnthropicSettingsContent::Versioned(
|
||||
language_model::settings::VersionedAnthropicSettingsContent::V1(
|
||||
language_model::settings::AnthropicSettingsContentV1 {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models: None
|
||||
}
|
||||
)
|
||||
));
|
||||
}
|
||||
},
|
||||
),
|
||||
@@ -145,12 +149,27 @@ impl AssistantSettingsContent {
|
||||
cx,
|
||||
move |content, _| {
|
||||
if content.openai.is_none() {
|
||||
let available_models = available_models.map(|models| {
|
||||
models
|
||||
.into_iter()
|
||||
.filter_map(|model| match model {
|
||||
open_ai::Model::Custom { name, max_tokens } => {
|
||||
Some(language_model::provider::open_ai::AvailableModel { name, max_tokens })
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
content.openai =
|
||||
Some(language_model::settings::OpenAiSettingsContent {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models,
|
||||
});
|
||||
Some(language_model::settings::OpenAiSettingsContent::Versioned(
|
||||
language_model::settings::VersionedOpenAiSettingsContent::V1(
|
||||
language_model::settings::OpenAiSettingsContentV1 {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models
|
||||
}
|
||||
)
|
||||
));
|
||||
}
|
||||
},
|
||||
),
|
||||
@@ -179,25 +198,25 @@ impl AssistantSettingsContent {
|
||||
.clone()
|
||||
.and_then(|provider| match provider {
|
||||
AssistantProviderContentV1::ZedDotDev { default_model } => {
|
||||
default_model.map(|model| AssistantDefaultModel {
|
||||
default_model.map(|model| LanguageModelSelection {
|
||||
provider: "zed.dev".to_string(),
|
||||
model: model.id().to_string(),
|
||||
})
|
||||
}
|
||||
AssistantProviderContentV1::OpenAi { default_model, .. } => {
|
||||
default_model.map(|model| AssistantDefaultModel {
|
||||
default_model.map(|model| LanguageModelSelection {
|
||||
provider: "openai".to_string(),
|
||||
model: model.id().to_string(),
|
||||
})
|
||||
}
|
||||
AssistantProviderContentV1::Anthropic { default_model, .. } => {
|
||||
default_model.map(|model| AssistantDefaultModel {
|
||||
default_model.map(|model| LanguageModelSelection {
|
||||
provider: "anthropic".to_string(),
|
||||
model: model.id().to_string(),
|
||||
})
|
||||
}
|
||||
AssistantProviderContentV1::Ollama { default_model, .. } => {
|
||||
default_model.map(|model| AssistantDefaultModel {
|
||||
default_model.map(|model| LanguageModelSelection {
|
||||
provider: "ollama".to_string(),
|
||||
model: model.id().to_string(),
|
||||
})
|
||||
@@ -212,7 +231,7 @@ impl AssistantSettingsContent {
|
||||
dock: settings.dock,
|
||||
default_width: settings.default_width,
|
||||
default_height: settings.default_height,
|
||||
default_model: Some(AssistantDefaultModel {
|
||||
default_model: Some(LanguageModelSelection {
|
||||
provider: "openai".to_string(),
|
||||
model: settings
|
||||
.default_open_ai_model
|
||||
@@ -249,9 +268,7 @@ impl AssistantSettingsContent {
|
||||
AssistantSettingsContent::Versioned(settings) => match settings {
|
||||
VersionedAssistantSettingsContent::V1(settings) => match provider.as_ref() {
|
||||
"zed.dev" => {
|
||||
settings.provider = Some(AssistantProviderContentV1::ZedDotDev {
|
||||
default_model: CloudModel::from_id(&model).ok(),
|
||||
});
|
||||
log::warn!("attempted to set zed.dev model on outdated settings");
|
||||
}
|
||||
"anthropic" => {
|
||||
let (api_url, low_speed_timeout_in_seconds) = match &settings.provider {
|
||||
@@ -308,7 +325,7 @@ impl AssistantSettingsContent {
|
||||
_ => {}
|
||||
},
|
||||
VersionedAssistantSettingsContent::V2(settings) => {
|
||||
settings.default_model = Some(AssistantDefaultModel { provider, model });
|
||||
settings.default_model = Some(LanguageModelSelection { provider, model });
|
||||
}
|
||||
},
|
||||
AssistantSettingsContent::Legacy(settings) => {
|
||||
@@ -365,11 +382,11 @@ pub struct AssistantSettingsContentV2 {
|
||||
/// Default: 320
|
||||
default_height: Option<f32>,
|
||||
/// The default model to use when creating new contexts.
|
||||
default_model: Option<AssistantDefaultModel>,
|
||||
default_model: Option<LanguageModelSelection>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
|
||||
pub struct AssistantDefaultModel {
|
||||
pub struct LanguageModelSelection {
|
||||
#[schemars(schema_with = "providers_schema")]
|
||||
pub provider: String,
|
||||
pub model: String,
|
||||
@@ -379,16 +396,18 @@ fn providers_schema(_: &mut schemars::gen::SchemaGenerator) -> schemars::schema:
|
||||
schemars::schema::SchemaObject {
|
||||
enum_values: Some(vec![
|
||||
"anthropic".into(),
|
||||
"google".into(),
|
||||
"ollama".into(),
|
||||
"openai".into(),
|
||||
"zed.dev".into(),
|
||||
"copilot_chat".into(),
|
||||
]),
|
||||
..Default::default()
|
||||
}
|
||||
.into()
|
||||
}
|
||||
|
||||
impl Default for AssistantDefaultModel {
|
||||
impl Default for LanguageModelSelection {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
provider: "openai".to_string(),
|
||||
@@ -421,7 +440,7 @@ pub struct AssistantSettingsContentV1 {
|
||||
default_height: Option<f32>,
|
||||
/// The provider of the assistant service.
|
||||
///
|
||||
/// This can either be the internal `zed.dev` service or an external `openai` service,
|
||||
/// This can be "openai", "anthropic", "ollama", "zed.dev"
|
||||
/// each with their respective default models and configurations.
|
||||
provider: Option<AssistantProviderContentV1>,
|
||||
}
|
||||
@@ -457,6 +476,8 @@ pub struct LegacyAssistantSettingsContent {
|
||||
impl Settings for AssistantSettings {
|
||||
const KEY: Option<&'static str> = Some("assistant");
|
||||
|
||||
const PRESERVED_KEYS: Option<&'static [&'static str]> = Some(&["version"]);
|
||||
|
||||
type FileContent = AssistantSettingsContent;
|
||||
|
||||
fn load(
|
||||
@@ -498,103 +519,70 @@ fn merge<T>(target: &mut T, value: Option<T>) {
|
||||
}
|
||||
}
|
||||
|
||||
// #[cfg(test)]
|
||||
// mod tests {
|
||||
// use gpui::{AppContext, UpdateGlobal};
|
||||
// use settings::SettingsStore;
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use gpui::{ReadGlobal, TestAppContext};
|
||||
|
||||
// use super::*;
|
||||
use super::*;
|
||||
|
||||
// #[gpui::test]
|
||||
// fn test_deserialize_assistant_settings(cx: &mut AppContext) {
|
||||
// let store = settings::SettingsStore::test(cx);
|
||||
// cx.set_global(store);
|
||||
#[gpui::test]
|
||||
async fn test_deserialize_assistant_settings_with_version(cx: &mut TestAppContext) {
|
||||
let fs = fs::FakeFs::new(cx.executor().clone());
|
||||
fs.create_dir(paths::settings_file().parent().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// // Settings default to gpt-4-turbo.
|
||||
// AssistantSettings::register(cx);
|
||||
// assert_eq!(
|
||||
// AssistantSettings::get_global(cx).provider,
|
||||
// AssistantProvider::OpenAi {
|
||||
// model: OpenAiModel::FourOmni,
|
||||
// api_url: open_ai::OPEN_AI_API_URL.into(),
|
||||
// low_speed_timeout_in_seconds: None,
|
||||
// available_models: Default::default(),
|
||||
// }
|
||||
// );
|
||||
cx.update(|cx| {
|
||||
let test_settings = settings::SettingsStore::test(cx);
|
||||
cx.set_global(test_settings);
|
||||
AssistantSettings::register(cx);
|
||||
});
|
||||
|
||||
// // Ensure backward-compatibility.
|
||||
// SettingsStore::update_global(cx, |store, cx| {
|
||||
// store
|
||||
// .set_user_settings(
|
||||
// r#"{
|
||||
// "assistant": {
|
||||
// "openai_api_url": "test-url",
|
||||
// }
|
||||
// }"#,
|
||||
// cx,
|
||||
// )
|
||||
// .unwrap();
|
||||
// });
|
||||
// assert_eq!(
|
||||
// AssistantSettings::get_global(cx).provider,
|
||||
// AssistantProvider::OpenAi {
|
||||
// model: OpenAiModel::FourOmni,
|
||||
// api_url: "test-url".into(),
|
||||
// low_speed_timeout_in_seconds: None,
|
||||
// available_models: Default::default(),
|
||||
// }
|
||||
// );
|
||||
// SettingsStore::update_global(cx, |store, cx| {
|
||||
// store
|
||||
// .set_user_settings(
|
||||
// r#"{
|
||||
// "assistant": {
|
||||
// "default_open_ai_model": "gpt-4-0613"
|
||||
// }
|
||||
// }"#,
|
||||
// cx,
|
||||
// )
|
||||
// .unwrap();
|
||||
// });
|
||||
// assert_eq!(
|
||||
// AssistantSettings::get_global(cx).provider,
|
||||
// AssistantProvider::OpenAi {
|
||||
// model: OpenAiModel::Four,
|
||||
// api_url: open_ai::OPEN_AI_API_URL.into(),
|
||||
// low_speed_timeout_in_seconds: None,
|
||||
// available_models: Default::default(),
|
||||
// }
|
||||
// );
|
||||
cx.update(|cx| {
|
||||
assert!(!AssistantSettings::get_global(cx).using_outdated_settings_version);
|
||||
assert_eq!(
|
||||
AssistantSettings::get_global(cx).default_model,
|
||||
LanguageModelSelection {
|
||||
provider: "openai".into(),
|
||||
model: "gpt-4o".into(),
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
// // The new version supports setting a custom model when using zed.dev.
|
||||
// SettingsStore::update_global(cx, |store, cx| {
|
||||
// store
|
||||
// .set_user_settings(
|
||||
// r#"{
|
||||
// "assistant": {
|
||||
// "version": "1",
|
||||
// "provider": {
|
||||
// "name": "zed.dev",
|
||||
// "default_model": {
|
||||
// "custom": {
|
||||
// "name": "custom-provider"
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }"#,
|
||||
// cx,
|
||||
// )
|
||||
// .unwrap();
|
||||
// });
|
||||
// assert_eq!(
|
||||
// AssistantSettings::get_global(cx).provider,
|
||||
// AssistantProvider::ZedDotDev {
|
||||
// model: CloudModel::Custom {
|
||||
// name: "custom-provider".into(),
|
||||
// max_tokens: None
|
||||
// }
|
||||
// }
|
||||
// );
|
||||
// }
|
||||
// }
|
||||
cx.update(|cx| {
|
||||
settings::SettingsStore::global(cx).update_settings_file::<AssistantSettings>(
|
||||
fs.clone(),
|
||||
|settings, _| {
|
||||
*settings = AssistantSettingsContent::Versioned(
|
||||
VersionedAssistantSettingsContent::V2(AssistantSettingsContentV2 {
|
||||
default_model: Some(LanguageModelSelection {
|
||||
provider: "test-provider".into(),
|
||||
model: "gpt-99".into(),
|
||||
}),
|
||||
enabled: None,
|
||||
button: None,
|
||||
dock: None,
|
||||
default_width: None,
|
||||
default_height: None,
|
||||
}),
|
||||
)
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
|
||||
assert!(raw_settings_value.contains(r#""version": "2""#));
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct AssistantSettingsTest {
|
||||
assistant: AssistantSettingsContent,
|
||||
}
|
||||
|
||||
let assistant_settings: AssistantSettingsTest =
|
||||
serde_json_lenient::from_str(&raw_settings_value).unwrap();
|
||||
|
||||
assert!(!assistant_settings.assistant.is_version_outdated());
|
||||
}
|
||||
}
|
||||
|
||||
219
crates/assistant/src/context_inspector.rs
Normal file
@@ -0,0 +1,219 @@
|
||||
use std::{ops::Range, sync::Arc};
|
||||
|
||||
use collections::{HashMap, HashSet};
|
||||
use editor::{
|
||||
display_map::{BlockDisposition, BlockProperties, BlockStyle, CustomBlockId},
|
||||
Editor,
|
||||
};
|
||||
use gpui::{AppContext, Model, View};
|
||||
use text::{Bias, ToOffset, ToPoint};
|
||||
use ui::{
|
||||
div, h_flex, px, Color, Element as _, ParentElement as _, Styled, ViewContext, WindowContext,
|
||||
};
|
||||
|
||||
use crate::{Context, ResolvedWorkflowStep, WorkflowSuggestion};
|
||||
|
||||
type StepRange = Range<language::Anchor>;
|
||||
|
||||
struct DebugInfo {
|
||||
range: Range<editor::Anchor>,
|
||||
block_id: CustomBlockId,
|
||||
}
|
||||
|
||||
pub(crate) struct ContextInspector {
|
||||
active_debug_views: HashMap<Range<language::Anchor>, DebugInfo>,
|
||||
context: Model<Context>,
|
||||
editor: View<Editor>,
|
||||
}
|
||||
|
||||
impl ContextInspector {
|
||||
pub(crate) fn new(editor: View<Editor>, context: Model<Context>) -> Self {
|
||||
Self {
|
||||
editor,
|
||||
context,
|
||||
active_debug_views: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn is_active(&self, range: &StepRange) -> bool {
|
||||
self.active_debug_views.contains_key(range)
|
||||
}
|
||||
|
||||
pub(crate) fn refresh(&mut self, range: &StepRange, cx: &mut WindowContext<'_>) {
|
||||
if self.deactivate_for(range, cx) {
|
||||
self.activate_for_step(range.clone(), cx);
|
||||
}
|
||||
}
|
||||
fn crease_content(
|
||||
context: &Model<Context>,
|
||||
range: StepRange,
|
||||
cx: &mut AppContext,
|
||||
) -> Option<Arc<str>> {
|
||||
use std::fmt::Write;
|
||||
let step = context.read(cx).workflow_step_for_range(range)?;
|
||||
let mut output = String::from("\n\n");
|
||||
match &step.status {
|
||||
crate::WorkflowStepStatus::Resolved(ResolvedWorkflowStep { title, suggestions }) => {
|
||||
writeln!(output, "Resolution:").ok()?;
|
||||
writeln!(output, " {title:?}").ok()?;
|
||||
for (buffer, suggestion_groups) in suggestions {
|
||||
let buffer = buffer.read(cx);
|
||||
let buffer_path = buffer
|
||||
.file()
|
||||
.and_then(|file| file.path().to_str())
|
||||
.unwrap_or("untitled");
|
||||
let snapshot = buffer.text_snapshot();
|
||||
writeln!(output, " {buffer_path}:").ok()?;
|
||||
for group in suggestion_groups {
|
||||
for suggestion in &group.suggestions {
|
||||
pretty_print_workflow_suggestion(&mut output, suggestion, &snapshot);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
crate::WorkflowStepStatus::Pending(_) => {
|
||||
writeln!(output, "Resolution: Pending").ok()?;
|
||||
}
|
||||
crate::WorkflowStepStatus::Error(error) => {
|
||||
writeln!(output, "Resolution: Error").ok()?;
|
||||
writeln!(output, "{error:?}").ok()?;
|
||||
}
|
||||
}
|
||||
|
||||
Some(output.into())
|
||||
}
|
||||
pub(crate) fn activate_for_step(&mut self, range: StepRange, cx: &mut WindowContext<'_>) {
|
||||
let text = Self::crease_content(&self.context, range.clone(), cx)
|
||||
.unwrap_or_else(|| Arc::from("Error fetching debug info"));
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
let buffer = editor.buffer().read(cx).as_singleton()?;
|
||||
let snapshot = buffer.read(cx).text_snapshot();
|
||||
let start_offset = range.end.to_offset(&snapshot) + 1;
|
||||
let start_offset = snapshot.clip_offset(start_offset, Bias::Right);
|
||||
let text_len = text.len();
|
||||
buffer.update(cx, |this, cx| {
|
||||
this.edit([(start_offset..start_offset, text)], None, cx);
|
||||
});
|
||||
|
||||
let end_offset = start_offset + text_len;
|
||||
let multibuffer_snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
let anchor_before = multibuffer_snapshot.anchor_after(start_offset);
|
||||
let anchor_after = multibuffer_snapshot.anchor_before(end_offset);
|
||||
|
||||
let block_id = editor
|
||||
.insert_blocks(
|
||||
[BlockProperties {
|
||||
position: anchor_after,
|
||||
height: 0,
|
||||
style: BlockStyle::Sticky,
|
||||
render: Box::new(move |cx| {
|
||||
div()
|
||||
.w_full()
|
||||
.px(cx.gutter_dimensions.full_width())
|
||||
.child(h_flex().h(px(1.)).bg(Color::Warning.color(cx)))
|
||||
.into_any()
|
||||
}),
|
||||
disposition: BlockDisposition::Below,
|
||||
priority: 0,
|
||||
}],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
.into_iter()
|
||||
.next()?;
|
||||
let info = DebugInfo {
|
||||
range: anchor_before..anchor_after,
|
||||
block_id,
|
||||
};
|
||||
self.active_debug_views.insert(range, info);
|
||||
Some(())
|
||||
});
|
||||
}
|
||||
|
||||
fn deactivate_impl(editor: &mut Editor, debug_data: DebugInfo, cx: &mut ViewContext<Editor>) {
|
||||
editor.remove_blocks(HashSet::from_iter([debug_data.block_id]), None, cx);
|
||||
editor.edit([(debug_data.range, Arc::<str>::default())], cx)
|
||||
}
|
||||
pub(crate) fn deactivate_for(&mut self, range: &StepRange, cx: &mut WindowContext<'_>) -> bool {
|
||||
if let Some(debug_data) = self.active_debug_views.remove(range) {
|
||||
self.editor.update(cx, |this, cx| {
|
||||
Self::deactivate_impl(this, debug_data, cx);
|
||||
});
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn deactivate(&mut self, cx: &mut WindowContext<'_>) {
|
||||
let steps_to_disable = std::mem::take(&mut self.active_debug_views);
|
||||
|
||||
self.editor.update(cx, move |editor, cx| {
|
||||
for (_, debug_data) in steps_to_disable {
|
||||
Self::deactivate_impl(editor, debug_data, cx);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
fn pretty_print_anchor(
|
||||
out: &mut String,
|
||||
anchor: &language::Anchor,
|
||||
snapshot: &text::BufferSnapshot,
|
||||
) {
|
||||
use std::fmt::Write;
|
||||
let point = anchor.to_point(snapshot);
|
||||
write!(out, "{}:{}", point.row, point.column).ok();
|
||||
}
|
||||
fn pretty_print_range(
|
||||
out: &mut String,
|
||||
range: &Range<language::Anchor>,
|
||||
snapshot: &text::BufferSnapshot,
|
||||
) {
|
||||
use std::fmt::Write;
|
||||
write!(out, " Range: ").ok();
|
||||
pretty_print_anchor(out, &range.start, snapshot);
|
||||
write!(out, "..").ok();
|
||||
pretty_print_anchor(out, &range.end, snapshot);
|
||||
}
|
||||
|
||||
fn pretty_print_workflow_suggestion(
|
||||
out: &mut String,
|
||||
suggestion: &WorkflowSuggestion,
|
||||
snapshot: &text::BufferSnapshot,
|
||||
) {
|
||||
use std::fmt::Write;
|
||||
let (range, description, position) = match suggestion {
|
||||
WorkflowSuggestion::Update { range, description } => (Some(range), Some(description), None),
|
||||
WorkflowSuggestion::CreateFile { description } => (None, Some(description), None),
|
||||
WorkflowSuggestion::AppendChild {
|
||||
position,
|
||||
description,
|
||||
}
|
||||
| WorkflowSuggestion::InsertSiblingBefore {
|
||||
position,
|
||||
description,
|
||||
}
|
||||
| WorkflowSuggestion::InsertSiblingAfter {
|
||||
position,
|
||||
description,
|
||||
}
|
||||
| WorkflowSuggestion::PrependChild {
|
||||
position,
|
||||
description,
|
||||
} => (None, Some(description), Some(position)),
|
||||
|
||||
WorkflowSuggestion::Delete { range } => (Some(range), None, None),
|
||||
};
|
||||
if let Some(description) = description {
|
||||
writeln!(out, " Description: {description}").ok();
|
||||
}
|
||||
if let Some(range) = range {
|
||||
pretty_print_range(out, range, snapshot);
|
||||
}
|
||||
if let Some(position) = position {
|
||||
write!(out, " Position: ").ok();
|
||||
pretty_print_anchor(out, position, snapshot);
|
||||
write!(out, "\n").ok();
|
||||
}
|
||||
write!(out, "\n").ok();
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::{
|
||||
Context, ContextEvent, ContextId, ContextOperation, ContextVersion, SavedContext,
|
||||
SavedContextMetadata,
|
||||
prompts::PromptBuilder, Context, ContextEvent, ContextId, ContextOperation, ContextVersion,
|
||||
SavedContext, SavedContextMetadata,
|
||||
};
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use client::{proto, telemetry::Telemetry, Client, TypedEnvelope};
|
||||
@@ -8,7 +8,9 @@ use clock::ReplicaId;
|
||||
use fs::Fs;
|
||||
use futures::StreamExt;
|
||||
use fuzzy::StringMatchCandidate;
|
||||
use gpui::{AppContext, AsyncAppContext, Context as _, Model, ModelContext, Task, WeakModel};
|
||||
use gpui::{
|
||||
AppContext, AsyncAppContext, Context as _, EventEmitter, Model, ModelContext, Task, WeakModel,
|
||||
};
|
||||
use language::LanguageRegistry;
|
||||
use paths::contexts_dir;
|
||||
use project::Project;
|
||||
@@ -26,6 +28,7 @@ use util::{ResultExt, TryFutureExt};
|
||||
pub fn init(client: &Arc<Client>) {
|
||||
client.add_model_message_handler(ContextStore::handle_advertise_contexts);
|
||||
client.add_model_request_handler(ContextStore::handle_open_context);
|
||||
client.add_model_request_handler(ContextStore::handle_create_context);
|
||||
client.add_model_message_handler(ContextStore::handle_update_context);
|
||||
client.add_model_request_handler(ContextStore::handle_synchronize_contexts);
|
||||
}
|
||||
@@ -49,8 +52,15 @@ pub struct ContextStore {
|
||||
project_is_shared: bool,
|
||||
client_subscription: Option<client::Subscription>,
|
||||
_project_subscriptions: Vec<gpui::Subscription>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
}
|
||||
|
||||
pub enum ContextStoreEvent {
|
||||
ContextCreated(ContextId),
|
||||
}
|
||||
|
||||
impl EventEmitter<ContextStoreEvent> for ContextStore {}
|
||||
|
||||
enum ContextHandle {
|
||||
Weak(WeakModel<Context>),
|
||||
Strong(Model<Context>),
|
||||
@@ -73,7 +83,11 @@ impl ContextHandle {
|
||||
}
|
||||
|
||||
impl ContextStore {
|
||||
pub fn new(project: Model<Project>, cx: &mut AppContext) -> Task<Result<Model<Self>>> {
|
||||
pub fn new(
|
||||
project: Model<Project>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
cx: &mut AppContext,
|
||||
) -> Task<Result<Model<Self>>> {
|
||||
let fs = project.read(cx).fs().clone();
|
||||
let languages = project.read(cx).languages().clone();
|
||||
let telemetry = project.read(cx).client().telemetry().clone();
|
||||
@@ -108,6 +122,7 @@ impl ContextStore {
|
||||
project_is_shared: false,
|
||||
client: project.read(cx).client(),
|
||||
project: project.clone(),
|
||||
prompt_builder,
|
||||
};
|
||||
this.handle_project_changed(project, cx);
|
||||
this.synchronize_contexts(cx);
|
||||
@@ -169,6 +184,34 @@ impl ContextStore {
|
||||
})
|
||||
}
|
||||
|
||||
async fn handle_create_context(
|
||||
this: Model<Self>,
|
||||
_: TypedEnvelope<proto::CreateContext>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<proto::CreateContextResponse> {
|
||||
let (context_id, operations) = this.update(&mut cx, |this, cx| {
|
||||
if this.project.read(cx).is_remote() {
|
||||
return Err(anyhow!("can only create contexts as the host"));
|
||||
}
|
||||
|
||||
let context = this.create(cx);
|
||||
let context_id = context.read(cx).id().clone();
|
||||
cx.emit(ContextStoreEvent::ContextCreated(context_id.clone()));
|
||||
|
||||
anyhow::Ok((
|
||||
context_id,
|
||||
context
|
||||
.read(cx)
|
||||
.serialize_ops(&ContextVersion::default(), cx),
|
||||
))
|
||||
})??;
|
||||
let operations = operations.await;
|
||||
Ok(proto::CreateContextResponse {
|
||||
context_id: context_id.to_proto(),
|
||||
context: Some(proto::Context { operations }),
|
||||
})
|
||||
}
|
||||
|
||||
async fn handle_update_context(
|
||||
this: Model<Self>,
|
||||
envelope: TypedEnvelope<proto::UpdateContext>,
|
||||
@@ -293,12 +336,76 @@ impl ContextStore {
|
||||
|
||||
pub fn create(&mut self, cx: &mut ModelContext<Self>) -> Model<Context> {
|
||||
let context = cx.new_model(|cx| {
|
||||
Context::local(self.languages.clone(), Some(self.telemetry.clone()), cx)
|
||||
Context::local(
|
||||
self.languages.clone(),
|
||||
Some(self.project.clone()),
|
||||
Some(self.telemetry.clone()),
|
||||
self.prompt_builder.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
self.register_context(&context, cx);
|
||||
context
|
||||
}
|
||||
|
||||
pub fn create_remote_context(
|
||||
&mut self,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Result<Model<Context>>> {
|
||||
let project = self.project.read(cx);
|
||||
let Some(project_id) = project.remote_id() else {
|
||||
return Task::ready(Err(anyhow!("project was not remote")));
|
||||
};
|
||||
if project.is_local() {
|
||||
return Task::ready(Err(anyhow!("cannot create remote contexts as the host")));
|
||||
}
|
||||
|
||||
let replica_id = project.replica_id();
|
||||
let capability = project.capability();
|
||||
let language_registry = self.languages.clone();
|
||||
let project = self.project.clone();
|
||||
let telemetry = self.telemetry.clone();
|
||||
let prompt_builder = self.prompt_builder.clone();
|
||||
let request = self.client.request(proto::CreateContext { project_id });
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let response = request.await?;
|
||||
let context_id = ContextId::from_proto(response.context_id);
|
||||
let context_proto = response.context.context("invalid context")?;
|
||||
let context = cx.new_model(|cx| {
|
||||
Context::new(
|
||||
context_id.clone(),
|
||||
replica_id,
|
||||
capability,
|
||||
language_registry,
|
||||
prompt_builder,
|
||||
Some(project),
|
||||
Some(telemetry),
|
||||
cx,
|
||||
)
|
||||
})?;
|
||||
let operations = cx
|
||||
.background_executor()
|
||||
.spawn(async move {
|
||||
context_proto
|
||||
.operations
|
||||
.into_iter()
|
||||
.map(|op| ContextOperation::from_proto(op))
|
||||
.collect::<Result<Vec<_>>>()
|
||||
})
|
||||
.await?;
|
||||
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))??;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) {
|
||||
existing_context
|
||||
} else {
|
||||
this.register_context(&context, cx);
|
||||
this.synchronize_contexts(cx);
|
||||
context
|
||||
}
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn open_local_context(
|
||||
&mut self,
|
||||
path: PathBuf,
|
||||
@@ -310,6 +417,7 @@ impl ContextStore {
|
||||
|
||||
let fs = self.fs.clone();
|
||||
let languages = self.languages.clone();
|
||||
let project = self.project.clone();
|
||||
let telemetry = self.telemetry.clone();
|
||||
let load = cx.background_executor().spawn({
|
||||
let path = path.clone();
|
||||
@@ -318,11 +426,20 @@ impl ContextStore {
|
||||
SavedContext::from_json(&saved_context)
|
||||
}
|
||||
});
|
||||
let prompt_builder = self.prompt_builder.clone();
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let saved_context = load.await?;
|
||||
let context = cx.new_model(|cx| {
|
||||
Context::deserialize(saved_context, path.clone(), languages, Some(telemetry), cx)
|
||||
Context::deserialize(
|
||||
saved_context,
|
||||
path.clone(),
|
||||
languages,
|
||||
prompt_builder,
|
||||
Some(project),
|
||||
Some(telemetry),
|
||||
cx,
|
||||
)
|
||||
})?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
if let Some(existing_context) = this.loaded_context_for_path(&path, cx) {
|
||||
@@ -346,7 +463,11 @@ impl ContextStore {
|
||||
})
|
||||
}
|
||||
|
||||
fn loaded_context_for_id(&self, id: &ContextId, cx: &AppContext) -> Option<Model<Context>> {
|
||||
pub(super) fn loaded_context_for_id(
|
||||
&self,
|
||||
id: &ContextId,
|
||||
cx: &AppContext,
|
||||
) -> Option<Model<Context>> {
|
||||
self.contexts.iter().find_map(|context| {
|
||||
let context = context.upgrade()?;
|
||||
if context.read(cx).id() == id {
|
||||
@@ -377,11 +498,13 @@ impl ContextStore {
|
||||
let replica_id = project.replica_id();
|
||||
let capability = project.capability();
|
||||
let language_registry = self.languages.clone();
|
||||
let project = self.project.clone();
|
||||
let telemetry = self.telemetry.clone();
|
||||
let request = self.client.request(proto::OpenContext {
|
||||
project_id,
|
||||
context_id: context_id.to_proto(),
|
||||
});
|
||||
let prompt_builder = self.prompt_builder.clone();
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let response = request.await?;
|
||||
let context_proto = response.context.context("invalid context")?;
|
||||
@@ -391,6 +514,8 @@ impl ContextStore {
|
||||
replica_id,
|
||||
capability,
|
||||
language_registry,
|
||||
prompt_builder,
|
||||
Some(project),
|
||||
Some(telemetry),
|
||||
cx,
|
||||
)
|
||||
|
||||
@@ -1,20 +1,46 @@
|
||||
use std::sync::Arc;
|
||||
use feature_flags::ZedPro;
|
||||
use gpui::DismissEvent;
|
||||
use language_model::{LanguageModel, LanguageModelAvailability, LanguageModelRegistry};
|
||||
use proto::Plan;
|
||||
|
||||
use crate::{assistant_settings::AssistantSettings, LanguageModelCompletionProvider};
|
||||
use std::sync::Arc;
|
||||
use ui::ListItemSpacing;
|
||||
|
||||
use crate::assistant_settings::AssistantSettings;
|
||||
use crate::ShowConfiguration;
|
||||
use fs::Fs;
|
||||
use gpui::Action;
|
||||
use gpui::SharedString;
|
||||
use language_model::LanguageModelRegistry;
|
||||
use gpui::Task;
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use settings::update_settings_file;
|
||||
use ui::{prelude::*, ContextMenu, PopoverMenu, PopoverMenuHandle, PopoverTrigger};
|
||||
use ui::{prelude::*, ListItem, PopoverMenu, PopoverMenuHandle, PopoverTrigger};
|
||||
|
||||
const TRY_ZED_PRO_URL: &str = "https://zed.dev/pro";
|
||||
|
||||
#[derive(IntoElement)]
|
||||
pub struct ModelSelector<T: PopoverTrigger> {
|
||||
handle: Option<PopoverMenuHandle<ContextMenu>>,
|
||||
handle: Option<PopoverMenuHandle<Picker<ModelPickerDelegate>>>,
|
||||
fs: Arc<dyn Fs>,
|
||||
trigger: T,
|
||||
info_text: Option<SharedString>,
|
||||
}
|
||||
|
||||
pub struct ModelPickerDelegate {
|
||||
fs: Arc<dyn Fs>,
|
||||
all_models: Vec<ModelInfo>,
|
||||
filtered_models: Vec<ModelInfo>,
|
||||
selected_index: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct ModelInfo {
|
||||
model: Arc<dyn LanguageModel>,
|
||||
provider_icon: IconName,
|
||||
availability: LanguageModelAvailability,
|
||||
is_selected: bool,
|
||||
}
|
||||
|
||||
impl<T: PopoverTrigger> ModelSelector<T> {
|
||||
pub fn new(fs: Arc<dyn Fs>, trigger: T) -> Self {
|
||||
ModelSelector {
|
||||
@@ -25,7 +51,7 @@ impl<T: PopoverTrigger> ModelSelector<T> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_handle(mut self, handle: PopoverMenuHandle<ContextMenu>) -> Self {
|
||||
pub fn with_handle(mut self, handle: PopoverMenuHandle<Picker<ModelPickerDelegate>>) -> Self {
|
||||
self.handle = Some(handle);
|
||||
self
|
||||
}
|
||||
@@ -36,113 +62,238 @@ impl<T: PopoverTrigger> ModelSelector<T> {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: PopoverTrigger> RenderOnce for ModelSelector<T> {
|
||||
fn render(self, _: &mut WindowContext) -> impl IntoElement {
|
||||
let mut menu = PopoverMenu::new("model-switcher");
|
||||
if let Some(handle) = self.handle {
|
||||
menu = menu.with_handle(handle);
|
||||
}
|
||||
impl PickerDelegate for ModelPickerDelegate {
|
||||
type ListItem = ListItem;
|
||||
|
||||
let info_text = self.info_text.clone();
|
||||
fn match_count(&self) -> usize {
|
||||
self.filtered_models.len()
|
||||
}
|
||||
|
||||
menu.menu(move |cx| {
|
||||
ContextMenu::build(cx, |mut menu, cx| {
|
||||
if let Some(info_text) = info_text.clone() {
|
||||
menu = menu
|
||||
.custom_row(move |_cx| {
|
||||
Label::new(info_text.clone())
|
||||
.color(Color::Muted)
|
||||
.into_any_element()
|
||||
})
|
||||
.separator();
|
||||
}
|
||||
fn selected_index(&self) -> usize {
|
||||
self.selected_index
|
||||
}
|
||||
|
||||
for (index, provider) in LanguageModelRegistry::global(cx)
|
||||
.read(cx)
|
||||
.providers()
|
||||
.enumerate()
|
||||
{
|
||||
if index > 0 {
|
||||
menu = menu.separator();
|
||||
fn set_selected_index(&mut self, ix: usize, cx: &mut ViewContext<Picker<Self>>) {
|
||||
self.selected_index = ix.min(self.filtered_models.len().saturating_sub(1));
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn placeholder_text(&self, _cx: &mut WindowContext) -> Arc<str> {
|
||||
"Select a model...".into()
|
||||
}
|
||||
|
||||
fn update_matches(&mut self, query: String, cx: &mut ViewContext<Picker<Self>>) -> Task<()> {
|
||||
let all_models = self.all_models.clone();
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let filtered_models = cx
|
||||
.background_executor()
|
||||
.spawn(async move {
|
||||
if query.is_empty() {
|
||||
all_models
|
||||
} else {
|
||||
all_models
|
||||
.into_iter()
|
||||
.filter(|model_info| {
|
||||
model_info
|
||||
.model
|
||||
.name()
|
||||
.0
|
||||
.to_lowercase()
|
||||
.contains(&query.to_lowercase())
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
menu = menu.header(provider.name().0);
|
||||
})
|
||||
.await;
|
||||
|
||||
let available_models = provider.provided_models(cx);
|
||||
if available_models.is_empty() {
|
||||
menu = menu.custom_entry(
|
||||
{
|
||||
move |_| {
|
||||
h_flex()
|
||||
.w_full()
|
||||
.gap_1()
|
||||
.child(Icon::new(IconName::Settings))
|
||||
.child(Label::new("Configure"))
|
||||
.into_any()
|
||||
}
|
||||
},
|
||||
{
|
||||
let provider = provider.id();
|
||||
move |cx| {
|
||||
LanguageModelCompletionProvider::global(cx).update(
|
||||
cx,
|
||||
|completion_provider, cx| {
|
||||
completion_provider
|
||||
.set_active_provider(provider.clone(), cx)
|
||||
},
|
||||
);
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
let selected_model = LanguageModelCompletionProvider::read_global(cx)
|
||||
.active_model()
|
||||
.map(|m| m.id());
|
||||
let selected_provider = LanguageModelCompletionProvider::read_global(cx)
|
||||
.active_provider()
|
||||
.map(|m| m.id());
|
||||
|
||||
for available_model in available_models {
|
||||
menu = menu.custom_entry(
|
||||
{
|
||||
let id = available_model.id();
|
||||
let provider_id = available_model.provider_id();
|
||||
let model_name = available_model.name().0.clone();
|
||||
let selected_model = selected_model.clone();
|
||||
let selected_provider = selected_provider.clone();
|
||||
move |_| {
|
||||
h_flex()
|
||||
.w_full()
|
||||
.justify_between()
|
||||
.child(Label::new(model_name.clone()))
|
||||
.when(
|
||||
selected_model.as_ref() == Some(&id)
|
||||
&& selected_provider.as_ref() == Some(&provider_id),
|
||||
|this| this.child(Icon::new(IconName::Check)),
|
||||
)
|
||||
.into_any()
|
||||
}
|
||||
},
|
||||
{
|
||||
let fs = self.fs.clone();
|
||||
let model = available_model.clone();
|
||||
move |cx| {
|
||||
let model = model.clone();
|
||||
update_settings_file::<AssistantSettings>(
|
||||
fs.clone(),
|
||||
cx,
|
||||
move |settings, _| settings.set_model(model),
|
||||
);
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
menu
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.delegate.filtered_models = filtered_models;
|
||||
this.delegate.set_selected_index(0, cx);
|
||||
cx.notify();
|
||||
})
|
||||
.into()
|
||||
.ok();
|
||||
})
|
||||
.trigger(self.trigger)
|
||||
.attach(gpui::AnchorCorner::BottomLeft)
|
||||
}
|
||||
|
||||
fn confirm(&mut self, _secondary: bool, cx: &mut ViewContext<Picker<Self>>) {
|
||||
if let Some(model_info) = self.filtered_models.get(self.selected_index) {
|
||||
let model = model_info.model.clone();
|
||||
update_settings_file::<AssistantSettings>(self.fs.clone(), cx, move |settings, _| {
|
||||
settings.set_model(model.clone())
|
||||
});
|
||||
|
||||
// Update the selection status
|
||||
let selected_model_id = model_info.model.id();
|
||||
let selected_provider_id = model_info.model.provider_id();
|
||||
for model in &mut self.all_models {
|
||||
model.is_selected = model.model.id() == selected_model_id
|
||||
&& model.model.provider_id() == selected_provider_id;
|
||||
}
|
||||
for model in &mut self.filtered_models {
|
||||
model.is_selected = model.model.id() == selected_model_id
|
||||
&& model.model.provider_id() == selected_provider_id;
|
||||
}
|
||||
|
||||
cx.emit(DismissEvent);
|
||||
}
|
||||
}
|
||||
|
||||
fn dismissed(&mut self, _cx: &mut ViewContext<Picker<Self>>) {}
|
||||
|
||||
fn render_match(
|
||||
&self,
|
||||
ix: usize,
|
||||
selected: bool,
|
||||
cx: &mut ViewContext<Picker<Self>>,
|
||||
) -> Option<Self::ListItem> {
|
||||
use feature_flags::FeatureFlagAppExt;
|
||||
let model_info = self.filtered_models.get(ix)?;
|
||||
let show_badges = cx.has_flag::<ZedPro>();
|
||||
Some(
|
||||
ListItem::new(ix)
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.selected(selected)
|
||||
.start_slot(
|
||||
div().pr_1().child(
|
||||
Icon::new(model_info.provider_icon)
|
||||
.color(Color::Muted)
|
||||
.size(IconSize::Medium),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.justify_between()
|
||||
.font_buffer(cx)
|
||||
.min_w(px(200.))
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(Label::new(model_info.model.name().0.clone()))
|
||||
.children(match model_info.availability {
|
||||
LanguageModelAvailability::Public => None,
|
||||
LanguageModelAvailability::RequiresPlan(Plan::Free) => None,
|
||||
LanguageModelAvailability::RequiresPlan(Plan::ZedPro) => {
|
||||
show_badges.then(|| {
|
||||
Label::new("Pro")
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted)
|
||||
})
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(div().when(model_info.is_selected, |this| {
|
||||
this.child(
|
||||
Icon::new(IconName::Check)
|
||||
.color(Color::Accent)
|
||||
.size(IconSize::Small),
|
||||
)
|
||||
})),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_footer(&self, cx: &mut ViewContext<Picker<Self>>) -> Option<gpui::AnyElement> {
|
||||
use feature_flags::FeatureFlagAppExt;
|
||||
|
||||
let plan = proto::Plan::ZedPro;
|
||||
let is_trial = false;
|
||||
|
||||
Some(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.p_1()
|
||||
.gap_4()
|
||||
.justify_between()
|
||||
.when(cx.has_flag::<ZedPro>(), |this| {
|
||||
this.child(match plan {
|
||||
// Already a zed pro subscriber
|
||||
Plan::ZedPro => Button::new("zed-pro", "Zed Pro")
|
||||
.icon(IconName::ZedAssistant)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_position(IconPosition::Start)
|
||||
.on_click(|_, cx| {
|
||||
cx.dispatch_action(Box::new(zed_actions::OpenAccountSettings))
|
||||
}),
|
||||
// Free user
|
||||
Plan::Free => Button::new(
|
||||
"try-pro",
|
||||
if is_trial {
|
||||
"Upgrade to Pro"
|
||||
} else {
|
||||
"Try Pro"
|
||||
},
|
||||
)
|
||||
.on_click(|_, cx| cx.open_url(TRY_ZED_PRO_URL)),
|
||||
})
|
||||
})
|
||||
.child(
|
||||
Button::new("configure", "Configure")
|
||||
.icon(IconName::Settings)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_position(IconPosition::Start)
|
||||
.on_click(|_, cx| {
|
||||
cx.dispatch_action(ShowConfiguration.boxed_clone());
|
||||
}),
|
||||
)
|
||||
.into_any(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: PopoverTrigger> RenderOnce for ModelSelector<T> {
|
||||
fn render(self, cx: &mut WindowContext) -> impl IntoElement {
|
||||
let selected_provider = LanguageModelRegistry::read_global(cx)
|
||||
.active_provider()
|
||||
.map(|m| m.id());
|
||||
let selected_model = LanguageModelRegistry::read_global(cx)
|
||||
.active_model()
|
||||
.map(|m| m.id());
|
||||
|
||||
let all_models = LanguageModelRegistry::global(cx)
|
||||
.read(cx)
|
||||
.providers()
|
||||
.iter()
|
||||
.flat_map(|provider| {
|
||||
let provider_id = provider.id();
|
||||
let provider_icon = provider.icon();
|
||||
let selected_model = selected_model.clone();
|
||||
let selected_provider = selected_provider.clone();
|
||||
|
||||
provider.provided_models(cx).into_iter().map(move |model| {
|
||||
let model = model.clone();
|
||||
|
||||
ModelInfo {
|
||||
model: model.clone(),
|
||||
provider_icon,
|
||||
availability: model.availability(),
|
||||
is_selected: selected_model.as_ref() == Some(&model.id())
|
||||
&& selected_provider.as_ref() == Some(&provider_id),
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let delegate = ModelPickerDelegate {
|
||||
fs: self.fs.clone(),
|
||||
all_models: all_models.clone(),
|
||||
filtered_models: all_models,
|
||||
selected_index: 0,
|
||||
};
|
||||
|
||||
let picker_view = cx.new_view(|cx| {
|
||||
let picker = Picker::uniform_list(delegate, cx).max_height(Some(rems(20.).into()));
|
||||
picker
|
||||
});
|
||||
|
||||
PopoverMenu::new("model-switcher")
|
||||
.menu(move |_cx| Some(picker_view.clone()))
|
||||
.trigger(self.trigger)
|
||||
.attach(gpui::AnchorCorner::BottomLeft)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
use crate::{
|
||||
slash_command::SlashCommandCompletionProvider, AssistantPanel, InlineAssist, InlineAssistant,
|
||||
LanguageModelCompletionProvider,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use assets::Assets;
|
||||
use chrono::{DateTime, Utc};
|
||||
use collections::{HashMap, HashSet};
|
||||
use editor::{actions::Tab, CurrentLineHighlight, Editor, EditorElement, EditorEvent, EditorStyle};
|
||||
@@ -13,13 +11,18 @@ use futures::{
|
||||
};
|
||||
use fuzzy::StringMatchCandidate;
|
||||
use gpui::{
|
||||
actions, point, size, transparent_black, AppContext, AssetSource, BackgroundExecutor, Bounds,
|
||||
EventEmitter, Global, HighlightStyle, PromptLevel, ReadGlobal, Subscription, Task, TextStyle,
|
||||
actions, point, size, transparent_black, AppContext, BackgroundExecutor, Bounds, EventEmitter,
|
||||
Global, HighlightStyle, PromptLevel, ReadGlobal, Subscription, Task, TextStyle,
|
||||
TitlebarOptions, UpdateGlobal, View, WindowBounds, WindowHandle, WindowOptions,
|
||||
};
|
||||
use heed::{types::SerdeBincode, Database, RoTxn};
|
||||
use heed::{
|
||||
types::{SerdeBincode, SerdeJson, Str},
|
||||
Database, RoTxn,
|
||||
};
|
||||
use language::{language_settings::SoftWrap, Buffer, LanguageRegistry};
|
||||
use language_model::{LanguageModelRequest, LanguageModelRequestMessage, Role};
|
||||
use language_model::{
|
||||
LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role,
|
||||
};
|
||||
use parking_lot::RwLock;
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use rope::Rope;
|
||||
@@ -32,6 +35,7 @@ use std::{
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
time::Duration,
|
||||
};
|
||||
use text::LineEnding;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{
|
||||
div, prelude::*, IconButtonShape, ListItem, ListItemSpacing, ParentElement, Render,
|
||||
@@ -62,6 +66,11 @@ pub fn init(cx: &mut AppContext) {
|
||||
cx.set_global(GlobalPromptStore(prompt_store_future))
|
||||
}
|
||||
|
||||
const BUILT_IN_TOOLTIP_TEXT: &'static str = concat!(
|
||||
"This prompt supports special functionality.\n",
|
||||
"It's read-only, but you can remove it from your default prompt."
|
||||
);
|
||||
|
||||
/// This function opens a new prompt library window if one doesn't exist already.
|
||||
/// If one exists, it brings it to the foreground.
|
||||
///
|
||||
@@ -229,15 +238,29 @@ impl PickerDelegate for PromptPickerDelegate {
|
||||
.end_hover_slot(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(
|
||||
.child(if prompt_id.is_built_in() {
|
||||
div()
|
||||
.id("built-in-prompt")
|
||||
.child(Icon::new(IconName::FileLock).color(Color::Muted))
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::with_meta(
|
||||
"Built-in prompt",
|
||||
None,
|
||||
BUILT_IN_TOOLTIP_TEXT,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.into_any()
|
||||
} else {
|
||||
IconButton::new("delete-prompt", IconName::Trash)
|
||||
.icon_color(Color::Muted)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(move |cx| Tooltip::text("Delete Prompt", cx))
|
||||
.on_click(cx.listener(move |_, _, cx| {
|
||||
cx.emit(PromptPickerEvent::Deleted { prompt_id })
|
||||
})),
|
||||
)
|
||||
}))
|
||||
.into_any_element()
|
||||
})
|
||||
.child(
|
||||
IconButton::new("toggle-default-prompt", IconName::Sparkle)
|
||||
.selected(default)
|
||||
@@ -350,6 +373,10 @@ impl PromptLibrary {
|
||||
pub fn save_prompt(&mut self, prompt_id: PromptId, cx: &mut ViewContext<Self>) {
|
||||
const SAVE_THROTTLE: Duration = Duration::from_millis(500);
|
||||
|
||||
if prompt_id.is_built_in() {
|
||||
return;
|
||||
}
|
||||
|
||||
let prompt_metadata = self.store.metadata(prompt_id).unwrap();
|
||||
let prompt_editor = self.prompt_editors.get_mut(&prompt_id).unwrap();
|
||||
let title = prompt_editor.title_editor.read(cx).text(cx);
|
||||
@@ -459,6 +486,7 @@ impl PromptLibrary {
|
||||
let mut editor = Editor::auto_width(cx);
|
||||
editor.set_placeholder_text("Untitled", cx);
|
||||
editor.set_text(prompt_metadata.title.unwrap_or_default(), cx);
|
||||
editor.set_read_only(prompt_id.is_built_in());
|
||||
editor
|
||||
});
|
||||
let body_editor = cx.new_view(|cx| {
|
||||
@@ -470,6 +498,7 @@ impl PromptLibrary {
|
||||
});
|
||||
|
||||
let mut editor = Editor::for_buffer(buffer, None, cx);
|
||||
editor.set_read_only(prompt_id.is_built_in());
|
||||
editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx);
|
||||
editor.set_show_gutter(false, cx);
|
||||
editor.set_show_wrap_guides(false, cx);
|
||||
@@ -636,7 +665,10 @@ impl PromptLibrary {
|
||||
};
|
||||
|
||||
let prompt_editor = &self.prompt_editors[&active_prompt_id].body_editor;
|
||||
let provider = LanguageModelCompletionProvider::read_global(cx);
|
||||
let Some(provider) = LanguageModelRegistry::read_global(cx).active_provider() else {
|
||||
return;
|
||||
};
|
||||
|
||||
let initial_prompt = action.prompt.clone();
|
||||
if provider.is_authenticated(cx) {
|
||||
InlineAssistant::update_global(cx, |assistant, cx| {
|
||||
@@ -725,6 +757,9 @@ impl PromptLibrary {
|
||||
}
|
||||
|
||||
fn count_tokens(&mut self, prompt_id: PromptId, cx: &mut ViewContext<Self>) {
|
||||
let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
|
||||
return;
|
||||
};
|
||||
if let Some(prompt) = self.prompt_editors.get_mut(&prompt_id) {
|
||||
let editor = &prompt.body_editor.read(cx);
|
||||
let buffer = &editor.buffer().read(cx).as_singleton().unwrap().read(cx);
|
||||
@@ -734,29 +769,27 @@ impl PromptLibrary {
|
||||
const DEBOUNCE_TIMEOUT: Duration = Duration::from_secs(1);
|
||||
|
||||
cx.background_executor().timer(DEBOUNCE_TIMEOUT).await;
|
||||
if let Some(token_count) = cx.update(|cx| {
|
||||
LanguageModelCompletionProvider::read_global(cx).count_tokens(
|
||||
LanguageModelRequest {
|
||||
messages: vec![LanguageModelRequestMessage {
|
||||
role: Role::System,
|
||||
content: body.to_string(),
|
||||
}],
|
||||
stop: Vec::new(),
|
||||
temperature: 1.,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
})? {
|
||||
let token_count = token_count.await?;
|
||||
let token_count = cx
|
||||
.update(|cx| {
|
||||
model.count_tokens(
|
||||
LanguageModelRequest {
|
||||
messages: vec![LanguageModelRequestMessage {
|
||||
role: Role::System,
|
||||
content: vec![body.to_string().into()],
|
||||
}],
|
||||
stop: Vec::new(),
|
||||
temperature: 1.,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let prompt_editor = this.prompt_editors.get_mut(&prompt_id).unwrap();
|
||||
prompt_editor.token_count = Some(token_count);
|
||||
cx.notify();
|
||||
})
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let prompt_editor = this.prompt_editors.get_mut(&prompt_id).unwrap();
|
||||
prompt_editor.token_count = Some(token_count);
|
||||
cx.notify();
|
||||
})
|
||||
}
|
||||
.log_err()
|
||||
});
|
||||
@@ -806,7 +839,7 @@ impl PromptLibrary {
|
||||
let prompt_metadata = self.store.metadata(prompt_id)?;
|
||||
let prompt_editor = &self.prompt_editors[&prompt_id];
|
||||
let focus_handle = prompt_editor.body_editor.focus_handle(cx);
|
||||
let current_model = LanguageModelCompletionProvider::read_global(cx).active_model();
|
||||
let model = LanguageModelRegistry::read_global(cx).active_model();
|
||||
let settings = ThemeSettings::get_global(cx);
|
||||
|
||||
Some(
|
||||
@@ -916,7 +949,7 @@ impl PromptLibrary {
|
||||
None,
|
||||
format!(
|
||||
"Model: {}",
|
||||
current_model
|
||||
model
|
||||
.as_ref()
|
||||
.map(|model| model
|
||||
.name()
|
||||
@@ -935,7 +968,23 @@ impl PromptLibrary {
|
||||
)
|
||||
},
|
||||
))
|
||||
.child(
|
||||
.child(if prompt_id.is_built_in() {
|
||||
div()
|
||||
.id("built-in-prompt")
|
||||
.child(
|
||||
Icon::new(IconName::FileLock)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.tooltip(move |cx| {
|
||||
Tooltip::with_meta(
|
||||
"Built-in prompt",
|
||||
None,
|
||||
BUILT_IN_TOOLTIP_TEXT,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.into_any()
|
||||
} else {
|
||||
IconButton::new(
|
||||
"delete-prompt",
|
||||
IconName::Trash,
|
||||
@@ -953,8 +1002,9 @@ impl PromptLibrary {
|
||||
})
|
||||
.on_click(|_, cx| {
|
||||
cx.dispatch_action(Box::new(DeletePrompt));
|
||||
}),
|
||||
)
|
||||
})
|
||||
.into_any_element()
|
||||
})
|
||||
.child(
|
||||
IconButton::new(
|
||||
"duplicate-prompt",
|
||||
@@ -1057,20 +1107,30 @@ pub struct PromptMetadata {
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct PromptId(Uuid);
|
||||
#[serde(tag = "kind")]
|
||||
pub enum PromptId {
|
||||
User { uuid: Uuid },
|
||||
EditWorkflow,
|
||||
}
|
||||
|
||||
impl PromptId {
|
||||
pub fn new() -> PromptId {
|
||||
PromptId(Uuid::new_v4())
|
||||
PromptId::User {
|
||||
uuid: Uuid::new_v4(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_built_in(&self) -> bool {
|
||||
!matches!(self, PromptId::User { .. })
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PromptStore {
|
||||
executor: BackgroundExecutor,
|
||||
env: heed::Env,
|
||||
bodies: Database<SerdeBincode<PromptId>, SerdeBincode<String>>,
|
||||
metadata: Database<SerdeBincode<PromptId>, SerdeBincode<PromptMetadata>>,
|
||||
metadata_cache: RwLock<MetadataCache>,
|
||||
metadata: Database<SerdeJson<PromptId>, SerdeJson<PromptMetadata>>,
|
||||
bodies: Database<SerdeJson<PromptId>, Str>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -1081,7 +1141,7 @@ struct MetadataCache {
|
||||
|
||||
impl MetadataCache {
|
||||
fn from_db(
|
||||
db: Database<SerdeBincode<PromptId>, SerdeBincode<PromptMetadata>>,
|
||||
db: Database<SerdeJson<PromptId>, SerdeJson<PromptMetadata>>,
|
||||
txn: &RoTxn,
|
||||
) -> Result<Self> {
|
||||
let mut cache = MetadataCache::default();
|
||||
@@ -1133,35 +1193,123 @@ impl PromptStore {
|
||||
let db_env = unsafe {
|
||||
heed::EnvOpenOptions::new()
|
||||
.map_size(1024 * 1024 * 1024) // 1GB
|
||||
.max_dbs(2) // bodies and metadata
|
||||
.max_dbs(4) // Metadata and bodies (possibly v1 of both as well)
|
||||
.open(db_path)?
|
||||
};
|
||||
|
||||
let mut txn = db_env.write_txn()?;
|
||||
let bodies = db_env.create_database(&mut txn, Some("bodies"))?;
|
||||
let metadata = db_env.create_database(&mut txn, Some("metadata"))?;
|
||||
let metadata = db_env.create_database(&mut txn, Some("metadata.v2"))?;
|
||||
let bodies = db_env.create_database(&mut txn, Some("bodies.v2"))?;
|
||||
|
||||
// Remove edit workflow prompt, as we decided to opt into it using
|
||||
// a slash command instead.
|
||||
metadata.delete(&mut txn, &PromptId::EditWorkflow).ok();
|
||||
bodies.delete(&mut txn, &PromptId::EditWorkflow).ok();
|
||||
|
||||
txn.commit()?;
|
||||
|
||||
Self::upgrade_dbs(&db_env, metadata, bodies).log_err();
|
||||
|
||||
let txn = db_env.read_txn()?;
|
||||
let metadata_cache = MetadataCache::from_db(metadata, &txn)?;
|
||||
txn.commit()?;
|
||||
|
||||
Ok(PromptStore {
|
||||
executor,
|
||||
env: db_env,
|
||||
bodies,
|
||||
metadata,
|
||||
metadata_cache: RwLock::new(metadata_cache),
|
||||
metadata,
|
||||
bodies,
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn upgrade_dbs(
|
||||
env: &heed::Env,
|
||||
metadata_db: heed::Database<SerdeJson<PromptId>, SerdeJson<PromptMetadata>>,
|
||||
bodies_db: heed::Database<SerdeJson<PromptId>, Str>,
|
||||
) -> Result<()> {
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize, Hash)]
|
||||
pub struct PromptIdV1(Uuid);
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct PromptMetadataV1 {
|
||||
pub id: PromptIdV1,
|
||||
pub title: Option<SharedString>,
|
||||
pub default: bool,
|
||||
pub saved_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
let mut txn = env.write_txn()?;
|
||||
let Some(bodies_v1_db) = env
|
||||
.open_database::<SerdeBincode<PromptIdV1>, SerdeBincode<String>>(
|
||||
&txn,
|
||||
Some("bodies"),
|
||||
)?
|
||||
else {
|
||||
return Ok(());
|
||||
};
|
||||
let mut bodies_v1 = bodies_v1_db
|
||||
.iter(&txn)?
|
||||
.collect::<heed::Result<HashMap<_, _>>>()?;
|
||||
|
||||
let Some(metadata_v1_db) = env
|
||||
.open_database::<SerdeBincode<PromptIdV1>, SerdeBincode<PromptMetadataV1>>(
|
||||
&txn,
|
||||
Some("metadata"),
|
||||
)?
|
||||
else {
|
||||
return Ok(());
|
||||
};
|
||||
let metadata_v1 = metadata_v1_db
|
||||
.iter(&txn)?
|
||||
.collect::<heed::Result<HashMap<_, _>>>()?;
|
||||
|
||||
for (prompt_id_v1, metadata_v1) in metadata_v1 {
|
||||
let prompt_id_v2 = PromptId::User {
|
||||
uuid: prompt_id_v1.0,
|
||||
};
|
||||
let Some(body_v1) = bodies_v1.remove(&prompt_id_v1) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if metadata_db
|
||||
.get(&txn, &prompt_id_v2)?
|
||||
.map_or(true, |metadata_v2| {
|
||||
metadata_v1.saved_at > metadata_v2.saved_at
|
||||
})
|
||||
{
|
||||
metadata_db.put(
|
||||
&mut txn,
|
||||
&prompt_id_v2,
|
||||
&PromptMetadata {
|
||||
id: prompt_id_v2,
|
||||
title: metadata_v1.title.clone(),
|
||||
default: metadata_v1.default,
|
||||
saved_at: metadata_v1.saved_at,
|
||||
},
|
||||
)?;
|
||||
bodies_db.put(&mut txn, &prompt_id_v2, &body_v1)?;
|
||||
}
|
||||
}
|
||||
|
||||
txn.commit()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn load(&self, id: PromptId) -> Task<Result<String>> {
|
||||
let env = self.env.clone();
|
||||
let bodies = self.bodies;
|
||||
self.executor.spawn(async move {
|
||||
let txn = env.read_txn()?;
|
||||
bodies
|
||||
let mut prompt = bodies
|
||||
.get(&txn, &id)?
|
||||
.ok_or_else(|| anyhow!("prompt not found"))
|
||||
.ok_or_else(|| anyhow!("prompt not found"))?
|
||||
.into();
|
||||
LineEnding::normalize(&mut prompt);
|
||||
Ok(prompt)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1250,6 +1398,10 @@ impl PromptStore {
|
||||
default: bool,
|
||||
body: Rope,
|
||||
) -> Task<Result<()>> {
|
||||
if id.is_built_in() {
|
||||
return Task::ready(Err(anyhow!("built-in prompts cannot be saved")));
|
||||
}
|
||||
|
||||
let prompt_metadata = PromptMetadata {
|
||||
id,
|
||||
title,
|
||||
@@ -1277,16 +1429,26 @@ impl PromptStore {
|
||||
fn save_metadata(
|
||||
&self,
|
||||
id: PromptId,
|
||||
title: Option<SharedString>,
|
||||
mut title: Option<SharedString>,
|
||||
default: bool,
|
||||
) -> Task<Result<()>> {
|
||||
let mut cache = self.metadata_cache.write();
|
||||
|
||||
if id.is_built_in() {
|
||||
title = cache
|
||||
.metadata_by_id
|
||||
.get(&id)
|
||||
.and_then(|metadata| metadata.title.clone());
|
||||
}
|
||||
|
||||
let prompt_metadata = PromptMetadata {
|
||||
id,
|
||||
title,
|
||||
default,
|
||||
saved_at: Utc::now(),
|
||||
};
|
||||
self.metadata_cache.write().insert(prompt_metadata.clone());
|
||||
|
||||
cache.insert(prompt_metadata.clone());
|
||||
|
||||
let db_connection = self.env.clone();
|
||||
let metadata = self.metadata;
|
||||
@@ -1303,17 +1465,6 @@ impl PromptStore {
|
||||
fn first(&self) -> Option<PromptMetadata> {
|
||||
self.metadata_cache.read().metadata.first().cloned()
|
||||
}
|
||||
|
||||
pub fn operations_prompt(&self) -> String {
|
||||
String::from_utf8(
|
||||
Assets
|
||||
.load("prompts/operations.md")
|
||||
.unwrap()
|
||||
.unwrap()
|
||||
.to_vec(),
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
/// Wraps a shared future to a prompt store so it can be assigned as a context global.
|
||||
|
||||
@@ -1,135 +1,284 @@
|
||||
use assets::Assets;
|
||||
use fs::Fs;
|
||||
use futures::StreamExt;
|
||||
use handlebars::{Handlebars, RenderError, TemplateError};
|
||||
use language::BufferSnapshot;
|
||||
use std::{fmt::Write, ops::Range};
|
||||
use parking_lot::Mutex;
|
||||
use serde::Serialize;
|
||||
use std::{ops::Range, sync::Arc, time::Duration};
|
||||
use util::ResultExt;
|
||||
|
||||
pub fn generate_content_prompt(
|
||||
user_prompt: String,
|
||||
language_name: Option<&str>,
|
||||
buffer: BufferSnapshot,
|
||||
range: Range<usize>,
|
||||
_project_name: Option<String>,
|
||||
) -> anyhow::Result<String> {
|
||||
let mut prompt = String::new();
|
||||
#[derive(Serialize)]
|
||||
pub struct ContentPromptContext {
|
||||
pub content_type: String,
|
||||
pub language_name: Option<String>,
|
||||
pub is_truncated: bool,
|
||||
pub document_content: String,
|
||||
pub user_prompt: String,
|
||||
pub rewrite_section: String,
|
||||
pub rewrite_section_with_selections: String,
|
||||
pub has_insertion: bool,
|
||||
pub has_replacement: bool,
|
||||
}
|
||||
|
||||
let content_type = match language_name {
|
||||
None | Some("Markdown" | "Plain Text") => {
|
||||
writeln!(
|
||||
prompt,
|
||||
"Here's a file of text that I'm going to ask you to make an edit to."
|
||||
)?;
|
||||
"text"
|
||||
#[derive(Serialize)]
|
||||
pub struct TerminalAssistantPromptContext {
|
||||
pub os: String,
|
||||
pub arch: String,
|
||||
pub shell: Option<String>,
|
||||
pub working_directory: Option<String>,
|
||||
pub latest_output: Vec<String>,
|
||||
pub user_prompt: String,
|
||||
}
|
||||
|
||||
pub struct PromptBuilder {
|
||||
handlebars: Arc<Mutex<Handlebars<'static>>>,
|
||||
}
|
||||
|
||||
pub struct PromptOverrideContext<'a> {
|
||||
pub dev_mode: bool,
|
||||
pub fs: Arc<dyn Fs>,
|
||||
pub cx: &'a mut gpui::AppContext,
|
||||
}
|
||||
|
||||
impl PromptBuilder {
|
||||
pub fn new(override_cx: Option<PromptOverrideContext>) -> Result<Self, Box<TemplateError>> {
|
||||
let mut handlebars = Handlebars::new();
|
||||
Self::register_templates(&mut handlebars)?;
|
||||
|
||||
let handlebars = Arc::new(Mutex::new(handlebars));
|
||||
|
||||
if let Some(override_cx) = override_cx {
|
||||
Self::watch_fs_for_template_overrides(override_cx, handlebars.clone());
|
||||
}
|
||||
Some(language_name) => {
|
||||
writeln!(
|
||||
prompt,
|
||||
"Here's a file of {language_name} that I'm going to ask you to make an edit to."
|
||||
)?;
|
||||
"code"
|
||||
|
||||
Ok(Self { handlebars })
|
||||
}
|
||||
|
||||
fn watch_fs_for_template_overrides(
|
||||
PromptOverrideContext { dev_mode, fs, cx }: PromptOverrideContext,
|
||||
handlebars: Arc<Mutex<Handlebars<'static>>>,
|
||||
) {
|
||||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
let templates_dir = if dev_mode {
|
||||
std::env::current_dir()
|
||||
.ok()
|
||||
.and_then(|pwd| {
|
||||
let pwd_assets_prompts = pwd.join("assets").join("prompts");
|
||||
pwd_assets_prompts.exists().then_some(pwd_assets_prompts)
|
||||
})
|
||||
.unwrap_or_else(|| paths::prompt_overrides_dir().clone())
|
||||
} else {
|
||||
paths::prompt_overrides_dir().clone()
|
||||
};
|
||||
|
||||
// Create the prompt templates directory if it doesn't exist
|
||||
if !fs.is_dir(&templates_dir).await {
|
||||
if let Err(e) = fs.create_dir(&templates_dir).await {
|
||||
log::error!("Failed to create prompt templates directory: {}", e);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Initial scan of the prompts directory
|
||||
if let Ok(mut entries) = fs.read_dir(&templates_dir).await {
|
||||
while let Some(Ok(file_path)) = entries.next().await {
|
||||
if file_path.to_string_lossy().ends_with(".hbs") {
|
||||
if let Ok(content) = fs.load(&file_path).await {
|
||||
let file_name = file_path.file_stem().unwrap().to_string_lossy();
|
||||
|
||||
match handlebars.lock().register_template_string(&file_name, content) {
|
||||
Ok(_) => {
|
||||
log::info!(
|
||||
"Successfully registered template override: {} ({})",
|
||||
file_name,
|
||||
file_path.display()
|
||||
);
|
||||
},
|
||||
Err(e) => {
|
||||
log::error!(
|
||||
"Failed to register template during initial scan: {} ({})",
|
||||
e,
|
||||
file_path.display()
|
||||
);
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Watch for changes
|
||||
let (mut changes, watcher) = fs.watch(&templates_dir, Duration::from_secs(1)).await;
|
||||
while let Some(changed_paths) = changes.next().await {
|
||||
for changed_path in changed_paths {
|
||||
if changed_path.extension().map_or(false, |ext| ext == "hbs") {
|
||||
log::info!("Reloading template: {}", changed_path.display());
|
||||
if let Some(content) = fs.load(&changed_path).await.log_err() {
|
||||
let file_name = changed_path.file_stem().unwrap().to_string_lossy();
|
||||
let file_path = changed_path.to_string_lossy();
|
||||
match handlebars.lock().register_template_string(&file_name, content) {
|
||||
Ok(_) => log::info!(
|
||||
"Successfully reloaded template: {} ({})",
|
||||
file_name,
|
||||
file_path
|
||||
),
|
||||
Err(e) => log::error!(
|
||||
"Failed to register template: {} ({})",
|
||||
e,
|
||||
file_path
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
drop(watcher);
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn register_templates(handlebars: &mut Handlebars) -> Result<(), Box<TemplateError>> {
|
||||
let mut register_template = |id: &str| {
|
||||
let prompt = Assets::get(&format!("prompts/{}.hbs", id))
|
||||
.unwrap_or_else(|| panic!("{} prompt template not found", id))
|
||||
.data;
|
||||
handlebars
|
||||
.register_template_string(id, String::from_utf8_lossy(&prompt))
|
||||
.map_err(Box::new)
|
||||
};
|
||||
|
||||
register_template("content_prompt")?;
|
||||
register_template("terminal_assistant_prompt")?;
|
||||
register_template("edit_workflow")?;
|
||||
register_template("step_resolution")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn generate_content_prompt(
|
||||
&self,
|
||||
user_prompt: String,
|
||||
language_name: Option<&str>,
|
||||
buffer: BufferSnapshot,
|
||||
transform_range: Range<usize>,
|
||||
selected_ranges: Vec<Range<usize>>,
|
||||
) -> Result<String, RenderError> {
|
||||
let content_type = match language_name {
|
||||
None | Some("Markdown" | "Plain Text") => "text",
|
||||
Some(_) => "code",
|
||||
};
|
||||
|
||||
const MAX_CTX: usize = 50000;
|
||||
let mut is_truncated = false;
|
||||
|
||||
let before_range = 0..transform_range.start;
|
||||
let truncated_before = if before_range.len() > MAX_CTX {
|
||||
is_truncated = true;
|
||||
transform_range.start - MAX_CTX..transform_range.start
|
||||
} else {
|
||||
before_range
|
||||
};
|
||||
|
||||
let after_range = transform_range.end..buffer.len();
|
||||
let truncated_after = if after_range.len() > MAX_CTX {
|
||||
is_truncated = true;
|
||||
transform_range.end..transform_range.end + MAX_CTX
|
||||
} else {
|
||||
after_range
|
||||
};
|
||||
|
||||
let mut document_content = String::new();
|
||||
for chunk in buffer.text_for_range(truncated_before) {
|
||||
document_content.push_str(chunk);
|
||||
}
|
||||
};
|
||||
|
||||
const MAX_CTX: usize = 50000;
|
||||
let mut is_truncated = false;
|
||||
if range.is_empty() {
|
||||
prompt.push_str("The point you'll need to insert at is marked with <insert_here></insert_here>.\n\n<document>");
|
||||
} else {
|
||||
prompt.push_str("The section you'll need to rewrite is marked with <rewrite_this></rewrite_this> tags.\n\n<document>");
|
||||
}
|
||||
// Include file content.
|
||||
let before_range = 0..range.start;
|
||||
let truncated_before = if before_range.len() > MAX_CTX {
|
||||
is_truncated = true;
|
||||
range.start - MAX_CTX..range.start
|
||||
} else {
|
||||
before_range
|
||||
};
|
||||
let mut non_rewrite_len = truncated_before.len();
|
||||
for chunk in buffer.text_for_range(truncated_before) {
|
||||
prompt.push_str(chunk);
|
||||
}
|
||||
if !range.is_empty() {
|
||||
prompt.push_str("<rewrite_this>\n");
|
||||
for chunk in buffer.text_for_range(range.clone()) {
|
||||
prompt.push_str(chunk);
|
||||
document_content.push_str("<rewrite_this>\n");
|
||||
for chunk in buffer.text_for_range(transform_range.clone()) {
|
||||
document_content.push_str(chunk);
|
||||
}
|
||||
prompt.push_str("\n<rewrite_this>");
|
||||
} else {
|
||||
prompt.push_str("<insert_here></insert_here>");
|
||||
}
|
||||
let after_range = range.end..buffer.len();
|
||||
let truncated_after = if after_range.len() > MAX_CTX {
|
||||
is_truncated = true;
|
||||
range.end..range.end + MAX_CTX
|
||||
} else {
|
||||
after_range
|
||||
};
|
||||
non_rewrite_len += truncated_after.len();
|
||||
for chunk in buffer.text_for_range(truncated_after) {
|
||||
prompt.push_str(chunk);
|
||||
}
|
||||
document_content.push_str("\n</rewrite_this>");
|
||||
|
||||
write!(prompt, "</document>\n\n").unwrap();
|
||||
for chunk in buffer.text_for_range(truncated_after) {
|
||||
document_content.push_str(chunk);
|
||||
}
|
||||
|
||||
if is_truncated {
|
||||
writeln!(prompt, "The context around the relevant section has been truncated (possibly in the middle of a line) for brevity.\n")?;
|
||||
}
|
||||
let mut rewrite_section = String::new();
|
||||
for chunk in buffer.text_for_range(transform_range.clone()) {
|
||||
rewrite_section.push_str(chunk);
|
||||
}
|
||||
|
||||
if range.is_empty() {
|
||||
writeln!(
|
||||
prompt,
|
||||
"You can't replace {content_type}, your answer will be inserted in place of the `<insert_here></insert_here>` tags. Don't include the insert_here tags in your output.",
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(
|
||||
prompt,
|
||||
"Generate {content_type} based on the following prompt:\n\n<prompt>\n{user_prompt}\n</prompt>",
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(prompt, "Match the indentation in the original file in the inserted {content_type}, don't include any indentation on blank lines.\n").unwrap();
|
||||
prompt.push_str("Immediately start with the following format with no remarks:\n\n```\n{{INSERTED_CODE}}\n```");
|
||||
} else {
|
||||
writeln!(prompt, "Edit the section of {content_type} in <rewrite_this></rewrite_this> tags based on the following prompt:'").unwrap();
|
||||
writeln!(prompt, "\n<prompt>\n{user_prompt}\n</prompt>\n").unwrap();
|
||||
let rewrite_len = range.end - range.start;
|
||||
if rewrite_len < 20000 && rewrite_len * 2 < non_rewrite_len {
|
||||
writeln!(prompt, "And here's the section to rewrite based on that prompt again for reference:\n\n<rewrite_this>\n").unwrap();
|
||||
for chunk in buffer.text_for_range(range.clone()) {
|
||||
prompt.push_str(chunk);
|
||||
let rewrite_section_with_selections = {
|
||||
let mut section_with_selections = String::new();
|
||||
let mut last_end = 0;
|
||||
for selected_range in &selected_ranges {
|
||||
if selected_range.start > last_end {
|
||||
section_with_selections.push_str(
|
||||
&rewrite_section[last_end..selected_range.start - transform_range.start],
|
||||
);
|
||||
}
|
||||
if selected_range.start == selected_range.end {
|
||||
section_with_selections.push_str("<insert_here></insert_here>");
|
||||
} else {
|
||||
section_with_selections.push_str("<edit_here>");
|
||||
section_with_selections.push_str(
|
||||
&rewrite_section[selected_range.start - transform_range.start
|
||||
..selected_range.end - transform_range.start],
|
||||
);
|
||||
section_with_selections.push_str("</edit_here>");
|
||||
}
|
||||
last_end = selected_range.end - transform_range.start;
|
||||
}
|
||||
writeln!(prompt, "\n</rewrite_this>\n").unwrap();
|
||||
}
|
||||
writeln!(prompt, "Only make changes that are necessary to fulfill the prompt, leave everything else as-is. All surrounding {content_type} will be preserved.\n").unwrap();
|
||||
write!(
|
||||
prompt,
|
||||
"Start at the indentation level in the original file in the rewritten {content_type}. "
|
||||
)
|
||||
.unwrap();
|
||||
prompt.push_str("Don't stop until you've rewritten the entire section, even if you have no more changes to make, always write out the whole section with no unnecessary elisions.");
|
||||
prompt.push_str("\n\nImmediately start with the following format with no remarks:\n\n```\n{{REWRITTEN_CODE}}\n```");
|
||||
if last_end < rewrite_section.len() {
|
||||
section_with_selections.push_str(&rewrite_section[last_end..]);
|
||||
}
|
||||
section_with_selections
|
||||
};
|
||||
|
||||
let has_insertion = selected_ranges.iter().any(|range| range.start == range.end);
|
||||
let has_replacement = selected_ranges.iter().any(|range| range.start != range.end);
|
||||
|
||||
let context = ContentPromptContext {
|
||||
content_type: content_type.to_string(),
|
||||
language_name: language_name.map(|s| s.to_string()),
|
||||
is_truncated,
|
||||
document_content,
|
||||
user_prompt,
|
||||
rewrite_section,
|
||||
rewrite_section_with_selections,
|
||||
has_insertion,
|
||||
has_replacement,
|
||||
};
|
||||
|
||||
self.handlebars.lock().render("content_prompt", &context)
|
||||
}
|
||||
|
||||
Ok(prompt)
|
||||
}
|
||||
pub fn generate_terminal_assistant_prompt(
|
||||
&self,
|
||||
user_prompt: &str,
|
||||
shell: Option<&str>,
|
||||
working_directory: Option<&str>,
|
||||
latest_output: &[String],
|
||||
) -> Result<String, RenderError> {
|
||||
let context = TerminalAssistantPromptContext {
|
||||
os: std::env::consts::OS.to_string(),
|
||||
arch: std::env::consts::ARCH.to_string(),
|
||||
shell: shell.map(|s| s.to_string()),
|
||||
working_directory: working_directory.map(|s| s.to_string()),
|
||||
latest_output: latest_output.to_vec(),
|
||||
user_prompt: user_prompt.to_string(),
|
||||
};
|
||||
|
||||
pub fn generate_terminal_assistant_prompt(
|
||||
user_prompt: &str,
|
||||
shell: Option<&str>,
|
||||
working_directory: Option<&str>,
|
||||
) -> String {
|
||||
let mut prompt = String::new();
|
||||
writeln!(&mut prompt, "You are an expert terminal user.").unwrap();
|
||||
writeln!(&mut prompt, "You will be given a description of a command and you need to respond with a command that matches the description.").unwrap();
|
||||
writeln!(&mut prompt, "Do not include markdown blocks or any other text formatting in your response, always respond with a single command that can be executed in the given shell.").unwrap();
|
||||
if let Some(shell) = shell {
|
||||
writeln!(&mut prompt, "Current shell is '{shell}'.").unwrap();
|
||||
self.handlebars
|
||||
.lock()
|
||||
.render("terminal_assistant_prompt", &context)
|
||||
}
|
||||
if let Some(working_directory) = working_directory {
|
||||
writeln!(
|
||||
&mut prompt,
|
||||
"Current working directory is '{working_directory}'."
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
pub fn generate_workflow_prompt(&self) -> Result<String, RenderError> {
|
||||
self.handlebars.lock().render("edit_workflow", &())
|
||||
}
|
||||
|
||||
pub fn generate_step_resolution_prompt(&self) -> Result<String, RenderError> {
|
||||
self.handlebars.lock().render("step_resolution", &())
|
||||
}
|
||||
writeln!(&mut prompt, "Here is the description of the command:").unwrap();
|
||||
prompt.push_str(user_prompt);
|
||||
prompt
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ use fuzzy::{match_strings, StringMatchCandidate};
|
||||
use gpui::{AppContext, Model, Task, ViewContext, WeakView, WindowContext};
|
||||
use language::{Anchor, Buffer, CodeLabel, Documentation, HighlightId, LanguageServerId, ToPoint};
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use project::CompletionIntent;
|
||||
use rope::Point;
|
||||
use std::{
|
||||
ops::Range,
|
||||
@@ -17,7 +18,6 @@ use std::{
|
||||
use ui::ActiveTheme;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub mod active_command;
|
||||
pub mod default_command;
|
||||
pub mod diagnostics_command;
|
||||
pub mod docs_command;
|
||||
@@ -29,7 +29,8 @@ pub mod prompt_command;
|
||||
pub mod search_command;
|
||||
pub mod symbols_command;
|
||||
pub mod tabs_command;
|
||||
pub mod term_command;
|
||||
pub mod terminal_command;
|
||||
pub mod workflow_command;
|
||||
|
||||
pub(crate) struct SlashCommandCompletionProvider {
|
||||
cancel_flag: Mutex<Arc<AtomicBool>>,
|
||||
@@ -106,20 +107,24 @@ impl SlashCommandCompletionProvider {
|
||||
let command_range = command_range.clone();
|
||||
let editor = editor.clone();
|
||||
let workspace = workspace.clone();
|
||||
Arc::new(move |cx: &mut WindowContext| {
|
||||
editor
|
||||
.update(cx, |editor, cx| {
|
||||
editor.run_command(
|
||||
command_range.clone(),
|
||||
&command_name,
|
||||
None,
|
||||
true,
|
||||
workspace.clone(),
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
}) as Arc<_>
|
||||
Arc::new(
|
||||
move |intent: CompletionIntent, cx: &mut WindowContext| {
|
||||
if intent.is_complete() {
|
||||
editor
|
||||
.update(cx, |editor, cx| {
|
||||
editor.run_command(
|
||||
command_range.clone(),
|
||||
&command_name,
|
||||
None,
|
||||
true,
|
||||
workspace.clone(),
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
},
|
||||
) as Arc<_>
|
||||
})
|
||||
},
|
||||
);
|
||||
@@ -151,7 +156,6 @@ impl SlashCommandCompletionProvider {
|
||||
let mut flag = self.cancel_flag.lock();
|
||||
flag.store(true, SeqCst);
|
||||
*flag = new_cancel_flag.clone();
|
||||
|
||||
let commands = SlashCommandRegistry::global(cx);
|
||||
if let Some(command) = commands.command(command_name) {
|
||||
let completions = command.complete_argument(
|
||||
@@ -177,19 +181,21 @@ impl SlashCommandCompletionProvider {
|
||||
let command_range = command_range.clone();
|
||||
let command_name = command_name.clone();
|
||||
let command_argument = command_argument.new_text.clone();
|
||||
move |cx: &mut WindowContext| {
|
||||
editor
|
||||
.update(cx, |editor, cx| {
|
||||
editor.run_command(
|
||||
command_range.clone(),
|
||||
&command_name,
|
||||
Some(&command_argument),
|
||||
true,
|
||||
workspace.clone(),
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
move |intent: CompletionIntent, cx: &mut WindowContext| {
|
||||
if intent.is_complete() {
|
||||
editor
|
||||
.update(cx, |editor, cx| {
|
||||
editor.run_command(
|
||||
command_range.clone(),
|
||||
&command_name,
|
||||
Some(&command_argument),
|
||||
true,
|
||||
workspace.clone(),
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
}
|
||||
}) as Arc<_>
|
||||
})
|
||||
@@ -204,7 +210,7 @@ impl SlashCommandCompletionProvider {
|
||||
|
||||
project::Completion {
|
||||
old_range: argument_range.clone(),
|
||||
label: CodeLabel::plain(command_argument.label, None),
|
||||
label: command_argument.label,
|
||||
new_text,
|
||||
documentation: None,
|
||||
server_id: LanguageServerId(0),
|
||||
|
||||
@@ -1,102 +0,0 @@
|
||||
use super::{
|
||||
diagnostics_command::write_single_file_diagnostics,
|
||||
file_command::{build_entry_output_section, codeblock_fence_for_path},
|
||||
SlashCommand, SlashCommandOutput,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::ArgumentCompletion;
|
||||
use editor::Editor;
|
||||
use gpui::{AppContext, Task, WeakView};
|
||||
use language::LspAdapterDelegate;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::Arc;
|
||||
use ui::WindowContext;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub(crate) struct ActiveSlashCommand;
|
||||
|
||||
impl SlashCommand for ActiveSlashCommand {
|
||||
fn name(&self) -> String {
|
||||
"active".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"insert active tab".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
"Insert Active Tab".into()
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_query: String,
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut AppContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Err(anyhow!("this command does not require argument")))
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_argument: Option<&str>,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Arc<dyn LspAdapterDelegate>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let output = workspace.update(cx, |workspace, cx| {
|
||||
let Some(active_item) = workspace.active_item(cx) else {
|
||||
return Task::ready(Err(anyhow!("no active tab")));
|
||||
};
|
||||
let Some(buffer) = active_item
|
||||
.downcast::<Editor>()
|
||||
.and_then(|editor| editor.read(cx).buffer().read(cx).as_singleton())
|
||||
else {
|
||||
return Task::ready(Err(anyhow!("active tab is not an editor")));
|
||||
};
|
||||
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let path = snapshot.resolve_file_path(cx, true);
|
||||
let task = cx.background_executor().spawn({
|
||||
let path = path.clone();
|
||||
async move {
|
||||
let mut output = String::new();
|
||||
output.push_str(&codeblock_fence_for_path(path.as_deref(), None));
|
||||
for chunk in snapshot.as_rope().chunks() {
|
||||
output.push_str(chunk);
|
||||
}
|
||||
if !output.ends_with('\n') {
|
||||
output.push('\n');
|
||||
}
|
||||
output.push_str("```\n");
|
||||
let has_diagnostics =
|
||||
write_single_file_diagnostics(&mut output, path.as_deref(), &snapshot);
|
||||
if output.ends_with('\n') {
|
||||
output.pop();
|
||||
}
|
||||
(output, has_diagnostics)
|
||||
}
|
||||
});
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let (text, has_diagnostics) = task.await;
|
||||
let range = 0..text.len();
|
||||
Ok(SlashCommandOutput {
|
||||
text,
|
||||
sections: vec![build_entry_output_section(
|
||||
range,
|
||||
path.as_deref(),
|
||||
false,
|
||||
None,
|
||||
)],
|
||||
run_commands_in_text: has_diagnostics,
|
||||
})
|
||||
})
|
||||
});
|
||||
output.unwrap_or_else(|error| Task::ready(Err(error)))
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,7 @@ use super::{SlashCommand, SlashCommandOutput};
|
||||
use crate::prompt_library::PromptStore;
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
|
||||
use gpui::{AppContext, Task, WeakView};
|
||||
use gpui::{Task, WeakView};
|
||||
use language::LspAdapterDelegate;
|
||||
use std::{
|
||||
fmt::Write,
|
||||
@@ -35,7 +35,7 @@ impl SlashCommand for DefaultSlashCommand {
|
||||
_query: String,
|
||||
_cancellation_flag: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut AppContext,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Err(anyhow!("this command does not require argument")))
|
||||
}
|
||||
@@ -44,7 +44,7 @@ impl SlashCommand for DefaultSlashCommand {
|
||||
self: Arc<Self>,
|
||||
_argument: Option<&str>,
|
||||
_workspace: WeakView<Workspace>,
|
||||
_delegate: Arc<dyn LspAdapterDelegate>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let store = PromptStore::global(cx);
|
||||
|
||||
@@ -43,6 +43,7 @@ impl DiagnosticsSlashCommand {
|
||||
worktree_id: entry.worktree_id.to_usize(),
|
||||
path: entry.path.clone(),
|
||||
path_prefix: path_prefix.clone(),
|
||||
is_dir: false, // Diagnostics can't be produced for directories
|
||||
distance_to_relative_ancestor: 0,
|
||||
})
|
||||
.collect(),
|
||||
@@ -107,7 +108,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
|
||||
query: String,
|
||||
cancellation_flag: Arc<AtomicBool>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
cx: &mut AppContext,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
let Some(workspace) = workspace.and_then(|workspace| workspace.upgrade()) else {
|
||||
return Task::ready(Err(anyhow!("workspace was dropped")));
|
||||
@@ -146,7 +147,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
|
||||
Ok(matches
|
||||
.into_iter()
|
||||
.map(|completion| ArgumentCompletion {
|
||||
label: completion.clone(),
|
||||
label: completion.clone().into(),
|
||||
new_text: completion,
|
||||
run_command: true,
|
||||
})
|
||||
@@ -158,7 +159,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
|
||||
self: Arc<Self>,
|
||||
argument: Option<&str>,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Arc<dyn LspAdapterDelegate>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let Some(workspace) = workspace.upgrade() else {
|
||||
@@ -168,58 +169,66 @@ impl SlashCommand for DiagnosticsSlashCommand {
|
||||
let options = Options::parse(argument);
|
||||
|
||||
let task = collect_diagnostics(workspace.read(cx).project().clone(), options, cx);
|
||||
|
||||
cx.spawn(move |_| async move {
|
||||
let Some((text, sections)) = task.await? else {
|
||||
return Ok(SlashCommandOutput::default());
|
||||
return Ok(SlashCommandOutput {
|
||||
sections: vec![SlashCommandOutputSection {
|
||||
range: 0..1,
|
||||
icon: IconName::Library,
|
||||
label: "No Diagnostics".into(),
|
||||
}],
|
||||
text: "\n".to_string(),
|
||||
run_commands_in_text: true,
|
||||
});
|
||||
};
|
||||
|
||||
let sections = sections
|
||||
.into_iter()
|
||||
.map(|(range, placeholder_type)| SlashCommandOutputSection {
|
||||
range,
|
||||
icon: match placeholder_type {
|
||||
PlaceholderType::Root(_, _) => IconName::ExclamationTriangle,
|
||||
PlaceholderType::File(_) => IconName::File,
|
||||
PlaceholderType::Diagnostic(DiagnosticType::Error, _) => IconName::XCircle,
|
||||
PlaceholderType::Diagnostic(DiagnosticType::Warning, _) => {
|
||||
IconName::ExclamationTriangle
|
||||
}
|
||||
},
|
||||
label: match placeholder_type {
|
||||
PlaceholderType::Root(summary, source) => {
|
||||
let mut label = String::new();
|
||||
label.push_str("Diagnostics");
|
||||
if let Some(source) = source {
|
||||
write!(label, " ({})", source).unwrap();
|
||||
}
|
||||
|
||||
if summary.error_count > 0 || summary.warning_count > 0 {
|
||||
label.push(':');
|
||||
|
||||
if summary.error_count > 0 {
|
||||
write!(label, " {} errors", summary.error_count).unwrap();
|
||||
if summary.warning_count > 0 {
|
||||
label.push_str(",");
|
||||
}
|
||||
}
|
||||
|
||||
if summary.warning_count > 0 {
|
||||
write!(label, " {} warnings", summary.warning_count).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
label.into()
|
||||
}
|
||||
PlaceholderType::File(file_path) => file_path.into(),
|
||||
PlaceholderType::Diagnostic(_, message) => message.into(),
|
||||
},
|
||||
})
|
||||
.collect();
|
||||
|
||||
Ok(SlashCommandOutput {
|
||||
text,
|
||||
sections: sections
|
||||
.into_iter()
|
||||
.map(|(range, placeholder_type)| SlashCommandOutputSection {
|
||||
range,
|
||||
icon: match placeholder_type {
|
||||
PlaceholderType::Root(_, _) => IconName::ExclamationTriangle,
|
||||
PlaceholderType::File(_) => IconName::File,
|
||||
PlaceholderType::Diagnostic(DiagnosticType::Error, _) => {
|
||||
IconName::XCircle
|
||||
}
|
||||
PlaceholderType::Diagnostic(DiagnosticType::Warning, _) => {
|
||||
IconName::ExclamationTriangle
|
||||
}
|
||||
},
|
||||
label: match placeholder_type {
|
||||
PlaceholderType::Root(summary, source) => {
|
||||
let mut label = String::new();
|
||||
label.push_str("Diagnostics");
|
||||
if let Some(source) = source {
|
||||
write!(label, " ({})", source).unwrap();
|
||||
}
|
||||
|
||||
if summary.error_count > 0 || summary.warning_count > 0 {
|
||||
label.push(':');
|
||||
|
||||
if summary.error_count > 0 {
|
||||
write!(label, " {} errors", summary.error_count).unwrap();
|
||||
if summary.warning_count > 0 {
|
||||
label.push_str(",");
|
||||
}
|
||||
}
|
||||
|
||||
if summary.warning_count > 0 {
|
||||
write!(label, " {} warnings", summary.warning_count)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
label.into()
|
||||
}
|
||||
PlaceholderType::File(file_path) => file_path.into(),
|
||||
PlaceholderType::Diagnostic(_, message) => message.into(),
|
||||
},
|
||||
})
|
||||
.collect(),
|
||||
sections,
|
||||
run_commands_in_text: false,
|
||||
})
|
||||
})
|
||||
|
||||
@@ -164,7 +164,7 @@ impl SlashCommand for DocsSlashCommand {
|
||||
query: String,
|
||||
_cancel: Arc<AtomicBool>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
cx: &mut AppContext,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
self.ensure_rust_doc_providers_are_registered(workspace, cx);
|
||||
|
||||
@@ -182,7 +182,7 @@ impl SlashCommand for DocsSlashCommand {
|
||||
items
|
||||
.into_iter()
|
||||
.map(|item| ArgumentCompletion {
|
||||
label: item.clone(),
|
||||
label: item.clone().into(),
|
||||
new_text: format!("{provider} {item}"),
|
||||
run_command: true,
|
||||
})
|
||||
@@ -194,7 +194,7 @@ impl SlashCommand for DocsSlashCommand {
|
||||
let providers = indexed_docs_registry.list_providers();
|
||||
if providers.is_empty() {
|
||||
return Ok(vec![ArgumentCompletion {
|
||||
label: "No available docs providers.".to_string(),
|
||||
label: "No available docs providers.".into(),
|
||||
new_text: String::new(),
|
||||
run_command: false,
|
||||
}]);
|
||||
@@ -203,7 +203,7 @@ impl SlashCommand for DocsSlashCommand {
|
||||
Ok(providers
|
||||
.into_iter()
|
||||
.map(|provider| ArgumentCompletion {
|
||||
label: provider.to_string(),
|
||||
label: provider.to_string().into(),
|
||||
new_text: provider.to_string(),
|
||||
run_command: false,
|
||||
})
|
||||
@@ -222,8 +222,38 @@ impl SlashCommand for DocsSlashCommand {
|
||||
drop(store.clone().index(package.as_str().into()));
|
||||
}
|
||||
|
||||
let items = store.search(package).await;
|
||||
Ok(build_completions(provider, items))
|
||||
let suggested_packages = store.clone().suggest_packages().await?;
|
||||
let search_results = store.search(package).await;
|
||||
|
||||
let mut items = build_completions(provider.clone(), search_results);
|
||||
let workspace_crate_completions = suggested_packages
|
||||
.into_iter()
|
||||
.filter(|package_name| {
|
||||
!items
|
||||
.iter()
|
||||
.any(|item| item.label.text() == package_name.as_ref())
|
||||
})
|
||||
.map(|package_name| ArgumentCompletion {
|
||||
label: format!("{package_name} (unindexed)").into(),
|
||||
new_text: format!("{provider} {package_name}"),
|
||||
run_command: true,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
items.extend(workspace_crate_completions);
|
||||
|
||||
if items.is_empty() {
|
||||
return Ok(vec![ArgumentCompletion {
|
||||
label: format!(
|
||||
"Enter a {package_term} name.",
|
||||
package_term = package_term(&provider)
|
||||
)
|
||||
.into(),
|
||||
new_text: provider.to_string(),
|
||||
run_command: false,
|
||||
}]);
|
||||
}
|
||||
|
||||
Ok(items)
|
||||
}
|
||||
DocsSlashCommandArgs::SearchItemDocs {
|
||||
provider,
|
||||
@@ -242,7 +272,7 @@ impl SlashCommand for DocsSlashCommand {
|
||||
self: Arc<Self>,
|
||||
argument: Option<&str>,
|
||||
_workspace: WeakView<Workspace>,
|
||||
_delegate: Arc<dyn LspAdapterDelegate>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let Some(argument) = argument else {
|
||||
@@ -269,6 +299,13 @@ impl SlashCommand for DocsSlashCommand {
|
||||
} => (provider, item_path),
|
||||
};
|
||||
|
||||
if key.trim().is_empty() {
|
||||
bail!(
|
||||
"no {package_term} name provided",
|
||||
package_term = package_term(&provider)
|
||||
);
|
||||
}
|
||||
|
||||
let store = store?;
|
||||
|
||||
if let Some(package) = args.package() {
|
||||
@@ -391,6 +428,15 @@ impl DocsSlashCommandArgs {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the term used to refer to a package.
|
||||
fn package_term(provider: &ProviderId) -> &'static str {
|
||||
if provider == &DocsDotRsProvider::id() || provider == &LocalRustdocProvider::id() {
|
||||
return "crate";
|
||||
}
|
||||
|
||||
"package"
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
@@ -8,7 +8,7 @@ use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
};
|
||||
use futures::AsyncReadExt;
|
||||
use gpui::{AppContext, Task, WeakView};
|
||||
use gpui::{Task, WeakView};
|
||||
use html_to_markdown::{convert_html_to_markdown, markdown, TagHandler};
|
||||
use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
|
||||
use language::LspAdapterDelegate;
|
||||
@@ -120,7 +120,7 @@ impl SlashCommand for FetchSlashCommand {
|
||||
_query: String,
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut AppContext,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
}
|
||||
@@ -129,7 +129,7 @@ impl SlashCommand for FetchSlashCommand {
|
||||
self: Arc<Self>,
|
||||
argument: Option<&str>,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Arc<dyn LspAdapterDelegate>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let Some(argument) = argument else {
|
||||
@@ -150,6 +150,10 @@ impl SlashCommand for FetchSlashCommand {
|
||||
let url = SharedString::from(url);
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let text = text.await?;
|
||||
if text.trim().is_empty() {
|
||||
bail!("no textual content found");
|
||||
}
|
||||
|
||||
let range = 0..text.len();
|
||||
Ok(SlashCommandOutput {
|
||||
text,
|
||||
|
||||
@@ -3,7 +3,7 @@ use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
|
||||
use fuzzy::PathMatch;
|
||||
use gpui::{AppContext, Model, Task, View, WeakView};
|
||||
use language::{BufferSnapshot, LineEnding, LspAdapterDelegate};
|
||||
use language::{BufferSnapshot, CodeLabel, HighlightId, LineEnding, LspAdapterDelegate};
|
||||
use project::{PathMatchCandidateSet, Project};
|
||||
use std::{
|
||||
fmt::Write,
|
||||
@@ -29,11 +29,30 @@ impl FileSlashCommand {
|
||||
let workspace = workspace.read(cx);
|
||||
let project = workspace.project().read(cx);
|
||||
let entries = workspace.recent_navigation_history(Some(10), cx);
|
||||
|
||||
let entries = entries
|
||||
.into_iter()
|
||||
.map(|entries| (entries.0, false))
|
||||
.chain(project.worktrees(cx).flat_map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
let id = worktree.id();
|
||||
worktree.child_entries(Path::new("")).map(move |entry| {
|
||||
(
|
||||
project::ProjectPath {
|
||||
worktree_id: id,
|
||||
path: entry.path.clone(),
|
||||
},
|
||||
entry.kind.is_dir(),
|
||||
)
|
||||
})
|
||||
}))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let path_prefix: Arc<str> = Arc::default();
|
||||
Task::ready(
|
||||
entries
|
||||
.into_iter()
|
||||
.filter_map(|(entry, _)| {
|
||||
.filter_map(|(entry, is_dir)| {
|
||||
let worktree = project.worktree_for_id(entry.worktree_id, cx)?;
|
||||
let mut full_path = PathBuf::from(worktree.read(cx).root_name());
|
||||
full_path.push(&entry.path);
|
||||
@@ -44,6 +63,7 @@ impl FileSlashCommand {
|
||||
path: full_path.into(),
|
||||
path_prefix: path_prefix.clone(),
|
||||
distance_to_relative_ancestor: 0,
|
||||
is_dir,
|
||||
})
|
||||
})
|
||||
.collect(),
|
||||
@@ -54,6 +74,7 @@ impl FileSlashCommand {
|
||||
.into_iter()
|
||||
.map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
|
||||
PathMatchCandidateSet {
|
||||
snapshot: worktree.snapshot(),
|
||||
include_ignored: worktree
|
||||
@@ -104,29 +125,42 @@ impl SlashCommand for FileSlashCommand {
|
||||
query: String,
|
||||
cancellation_flag: Arc<AtomicBool>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
cx: &mut AppContext,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
let Some(workspace) = workspace.and_then(|workspace| workspace.upgrade()) else {
|
||||
return Task::ready(Err(anyhow!("workspace was dropped")));
|
||||
};
|
||||
|
||||
let paths = self.search_paths(query, cancellation_flag, &workspace, cx);
|
||||
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
|
||||
cx.background_executor().spawn(async move {
|
||||
Ok(paths
|
||||
.await
|
||||
.into_iter()
|
||||
.map(|path_match| {
|
||||
.filter_map(|path_match| {
|
||||
let text = format!(
|
||||
"{}{}",
|
||||
path_match.path_prefix,
|
||||
path_match.path.to_string_lossy()
|
||||
);
|
||||
|
||||
ArgumentCompletion {
|
||||
label: text.clone(),
|
||||
let mut label = CodeLabel::default();
|
||||
let file_name = path_match.path.file_name()?.to_string_lossy();
|
||||
let label_text = if path_match.is_dir {
|
||||
format!("{}/ ", file_name)
|
||||
} else {
|
||||
format!("{} ", file_name)
|
||||
};
|
||||
|
||||
label.push_str(label_text.as_str(), None);
|
||||
label.push_str(&text, comment_id);
|
||||
label.filter_range = 0..file_name.len();
|
||||
|
||||
Some(ArgumentCompletion {
|
||||
label,
|
||||
new_text: text,
|
||||
run_command: true,
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect())
|
||||
})
|
||||
@@ -136,7 +170,7 @@ impl SlashCommand for FileSlashCommand {
|
||||
self: Arc<Self>,
|
||||
argument: Option<&str>,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Arc<dyn LspAdapterDelegate>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let Some(workspace) = workspace.upgrade() else {
|
||||
@@ -273,20 +307,25 @@ fn collect_files(
|
||||
continue;
|
||||
};
|
||||
if let Some(buffer) = open_buffer_task.await.log_err() {
|
||||
let snapshot = cx.read_model(&buffer, |buffer, _| buffer.snapshot())?;
|
||||
let buffer_snapshot =
|
||||
cx.read_model(&buffer, |buffer, _| buffer.snapshot())?;
|
||||
let prev_len = text.len();
|
||||
collect_file_content(&mut text, &snapshot, filename.clone());
|
||||
collect_file_content(
|
||||
&mut text,
|
||||
&buffer_snapshot,
|
||||
path_including_worktree_name.to_string_lossy().to_string(),
|
||||
);
|
||||
text.push('\n');
|
||||
if !write_single_file_diagnostics(
|
||||
&mut text,
|
||||
Some(&path_including_worktree_name),
|
||||
&snapshot,
|
||||
&buffer_snapshot,
|
||||
) {
|
||||
text.pop();
|
||||
}
|
||||
ranges.push((
|
||||
prev_len..text.len(),
|
||||
PathBuf::from(filename),
|
||||
path_including_worktree_name,
|
||||
EntryType::File,
|
||||
));
|
||||
text.push('\n');
|
||||
|
||||
@@ -6,7 +6,7 @@ use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
};
|
||||
use chrono::Local;
|
||||
use gpui::{AppContext, Task, WeakView};
|
||||
use gpui::{Task, WeakView};
|
||||
use language::LspAdapterDelegate;
|
||||
use ui::prelude::*;
|
||||
use workspace::Workspace;
|
||||
@@ -35,7 +35,7 @@ impl SlashCommand for NowSlashCommand {
|
||||
_query: String,
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut AppContext,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
}
|
||||
@@ -44,7 +44,7 @@ impl SlashCommand for NowSlashCommand {
|
||||
self: Arc<Self>,
|
||||
_argument: Option<&str>,
|
||||
_workspace: WeakView<Workspace>,
|
||||
_delegate: Arc<dyn LspAdapterDelegate>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let now = Local::now();
|
||||
|
||||
@@ -106,7 +106,7 @@ impl SlashCommand for ProjectSlashCommand {
|
||||
_query: String,
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut AppContext,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Err(anyhow!("this command does not require argument")))
|
||||
}
|
||||
@@ -119,7 +119,7 @@ impl SlashCommand for ProjectSlashCommand {
|
||||
self: Arc<Self>,
|
||||
_argument: Option<&str>,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Arc<dyn LspAdapterDelegate>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let output = workspace.update(cx, |workspace, cx| {
|
||||
|
||||
@@ -2,7 +2,7 @@ use super::{SlashCommand, SlashCommandOutput};
|
||||
use crate::prompt_library::PromptStore;
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
|
||||
use gpui::{AppContext, Task, WeakView};
|
||||
use gpui::{Task, WeakView};
|
||||
use language::LspAdapterDelegate;
|
||||
use std::sync::{atomic::AtomicBool, Arc};
|
||||
use ui::prelude::*;
|
||||
@@ -32,7 +32,7 @@ impl SlashCommand for PromptSlashCommand {
|
||||
query: String,
|
||||
_cancellation_flag: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
cx: &mut AppContext,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
let store = PromptStore::global(cx);
|
||||
cx.background_executor().spawn(async move {
|
||||
@@ -42,7 +42,7 @@ impl SlashCommand for PromptSlashCommand {
|
||||
.filter_map(|prompt| {
|
||||
let prompt_title = prompt.title?.to_string();
|
||||
Some(ArgumentCompletion {
|
||||
label: prompt_title.clone(),
|
||||
label: prompt_title.clone().into(),
|
||||
new_text: prompt_title,
|
||||
run_command: true,
|
||||
})
|
||||
@@ -55,7 +55,7 @@ impl SlashCommand for PromptSlashCommand {
|
||||
self: Arc<Self>,
|
||||
title: Option<&str>,
|
||||
_workspace: WeakView<Workspace>,
|
||||
_delegate: Arc<dyn LspAdapterDelegate>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let Some(title) = title else {
|
||||
@@ -77,6 +77,11 @@ impl SlashCommand for PromptSlashCommand {
|
||||
});
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let mut prompt = prompt.await?;
|
||||
|
||||
if prompt.starts_with('/') {
|
||||
// Prevent an edge case where the inserted prompt starts with a slash command (that leads to funky rendering).
|
||||
prompt.insert(0, '\n');
|
||||
}
|
||||
if prompt.is_empty() {
|
||||
prompt.push('\n');
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ use super::{
|
||||
};
|
||||
use anyhow::Result;
|
||||
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
|
||||
use feature_flags::FeatureFlag;
|
||||
use gpui::{AppContext, Task, WeakView};
|
||||
use language::{CodeLabel, LineEnding, LspAdapterDelegate};
|
||||
use semantic_index::SemanticIndex;
|
||||
@@ -17,6 +18,12 @@ use ui::{prelude::*, IconName};
|
||||
use util::ResultExt;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub(crate) struct SearchSlashCommandFeatureFlag;
|
||||
|
||||
impl FeatureFlag for SearchSlashCommandFeatureFlag {
|
||||
const NAME: &'static str = "search-slash-command";
|
||||
}
|
||||
|
||||
pub(crate) struct SearchSlashCommand;
|
||||
|
||||
impl SlashCommand for SearchSlashCommand {
|
||||
@@ -45,7 +52,7 @@ impl SlashCommand for SearchSlashCommand {
|
||||
_query: String,
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut AppContext,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
}
|
||||
@@ -54,7 +61,7 @@ impl SlashCommand for SearchSlashCommand {
|
||||
self: Arc<Self>,
|
||||
argument: Option<&str>,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Arc<dyn LspAdapterDelegate>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let Some(workspace) = workspace.upgrade() else {
|
||||
|
||||
@@ -2,7 +2,7 @@ use super::{SlashCommand, SlashCommandOutput};
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use assistant_slash_command::{ArgumentCompletion, SlashCommandOutputSection};
|
||||
use editor::Editor;
|
||||
use gpui::{AppContext, Task, WeakView};
|
||||
use gpui::{Task, WeakView};
|
||||
use language::LspAdapterDelegate;
|
||||
use std::sync::Arc;
|
||||
use std::{path::Path, sync::atomic::AtomicBool};
|
||||
@@ -29,7 +29,7 @@ impl SlashCommand for OutlineSlashCommand {
|
||||
_query: String,
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut AppContext,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Err(anyhow!("this command does not require argument")))
|
||||
}
|
||||
@@ -42,7 +42,7 @@ impl SlashCommand for OutlineSlashCommand {
|
||||
self: Arc<Self>,
|
||||
_argument: Option<&str>,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Arc<dyn LspAdapterDelegate>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let output = workspace.update(cx, |workspace, cx| {
|
||||
|
||||
@@ -3,29 +3,35 @@ use super::{
|
||||
file_command::{build_entry_output_section, codeblock_fence_for_path},
|
||||
SlashCommand, SlashCommandOutput,
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use anyhow::{Context, Result};
|
||||
use assistant_slash_command::ArgumentCompletion;
|
||||
use collections::HashMap;
|
||||
use editor::Editor;
|
||||
use gpui::{AppContext, Entity, Task, WeakView};
|
||||
use language::LspAdapterDelegate;
|
||||
use std::{fmt::Write, sync::Arc};
|
||||
use gpui::{Entity, Task, WeakView};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use std::{
|
||||
fmt::Write,
|
||||
path::PathBuf,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
};
|
||||
use ui::WindowContext;
|
||||
use workspace::Workspace;
|
||||
|
||||
pub(crate) struct TabsSlashCommand;
|
||||
|
||||
const ALL_TABS_COMPLETION_ITEM: &str = "all";
|
||||
|
||||
impl SlashCommand for TabsSlashCommand {
|
||||
fn name(&self) -> String {
|
||||
"tabs".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"insert open tabs".into()
|
||||
"insert open tabs (active tab by default)".to_owned()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
"Insert Open Tabs".into()
|
||||
"Insert Open Tabs".to_owned()
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
@@ -34,85 +40,188 @@ impl SlashCommand for TabsSlashCommand {
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_query: String,
|
||||
_cancel: Arc<std::sync::atomic::AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut AppContext,
|
||||
query: String,
|
||||
cancel: Arc<AtomicBool>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Err(anyhow!("this command does not require argument")))
|
||||
let all_tabs_completion_item = if ALL_TABS_COMPLETION_ITEM.contains(&query) {
|
||||
Some(ArgumentCompletion {
|
||||
label: ALL_TABS_COMPLETION_ITEM.into(),
|
||||
new_text: ALL_TABS_COMPLETION_ITEM.to_owned(),
|
||||
run_command: true,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let tab_items_search = tab_items_for_query(workspace, query, cancel, false, cx);
|
||||
cx.spawn(|_| async move {
|
||||
let tab_completion_items =
|
||||
tab_items_search
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter_map(|(path, ..)| {
|
||||
let path_string = path.as_deref()?.to_string_lossy().to_string();
|
||||
Some(ArgumentCompletion {
|
||||
label: path_string.clone().into(),
|
||||
new_text: path_string,
|
||||
run_command: true,
|
||||
})
|
||||
});
|
||||
Ok(all_tabs_completion_item
|
||||
.into_iter()
|
||||
.chain(tab_completion_items)
|
||||
.collect::<Vec<_>>())
|
||||
})
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_argument: Option<&str>,
|
||||
argument: Option<&str>,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Arc<dyn LspAdapterDelegate>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let open_buffers = workspace.update(cx, |workspace, cx| {
|
||||
let mut timestamps_by_entity_id = HashMap::default();
|
||||
let mut open_buffers = Vec::new();
|
||||
let tab_items_search = tab_items_for_query(
|
||||
Some(workspace),
|
||||
argument.map(ToOwned::to_owned).unwrap_or_default(),
|
||||
Arc::new(AtomicBool::new(false)),
|
||||
true,
|
||||
cx,
|
||||
);
|
||||
|
||||
for pane in workspace.panes() {
|
||||
let pane = pane.read(cx);
|
||||
for entry in pane.activation_history() {
|
||||
timestamps_by_entity_id.insert(entry.entity_id, entry.timestamp);
|
||||
cx.background_executor().spawn(async move {
|
||||
let mut sections = Vec::new();
|
||||
let mut text = String::new();
|
||||
let mut has_diagnostics = false;
|
||||
for (full_path, buffer, _) in tab_items_search.await? {
|
||||
let section_start_ix = text.len();
|
||||
text.push_str(&codeblock_fence_for_path(full_path.as_deref(), None));
|
||||
for chunk in buffer.as_rope().chunks() {
|
||||
text.push_str(chunk);
|
||||
}
|
||||
if !text.ends_with('\n') {
|
||||
text.push('\n');
|
||||
}
|
||||
writeln!(text, "```").unwrap();
|
||||
if write_single_file_diagnostics(&mut text, full_path.as_deref(), &buffer) {
|
||||
has_diagnostics = true;
|
||||
}
|
||||
if !text.ends_with('\n') {
|
||||
text.push('\n');
|
||||
}
|
||||
|
||||
let section_end_ix = text.len() - 1;
|
||||
sections.push(build_entry_output_section(
|
||||
section_start_ix..section_end_ix,
|
||||
full_path.as_deref(),
|
||||
false,
|
||||
None,
|
||||
));
|
||||
}
|
||||
|
||||
for editor in workspace.items_of_type::<Editor>(cx) {
|
||||
if let Some(buffer) = editor.read(cx).buffer().read(cx).as_singleton() {
|
||||
if let Some(timestamp) = timestamps_by_entity_id.get(&editor.entity_id()) {
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let full_path = snapshot.resolve_file_path(cx, true);
|
||||
open_buffers.push((full_path, snapshot, *timestamp));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
open_buffers
|
||||
});
|
||||
|
||||
match open_buffers {
|
||||
Ok(mut open_buffers) => cx.background_executor().spawn(async move {
|
||||
open_buffers.sort_by_key(|(_, _, timestamp)| *timestamp);
|
||||
|
||||
let mut sections = Vec::new();
|
||||
let mut text = String::new();
|
||||
let mut has_diagnostics = false;
|
||||
for (full_path, buffer, _) in open_buffers {
|
||||
let section_start_ix = text.len();
|
||||
text.push_str(&codeblock_fence_for_path(full_path.as_deref(), None));
|
||||
for chunk in buffer.as_rope().chunks() {
|
||||
text.push_str(chunk);
|
||||
}
|
||||
if !text.ends_with('\n') {
|
||||
text.push('\n');
|
||||
}
|
||||
writeln!(text, "```").unwrap();
|
||||
if write_single_file_diagnostics(&mut text, full_path.as_deref(), &buffer) {
|
||||
has_diagnostics = true;
|
||||
}
|
||||
if !text.ends_with('\n') {
|
||||
text.push('\n');
|
||||
}
|
||||
|
||||
let section_end_ix = text.len() - 1;
|
||||
sections.push(build_entry_output_section(
|
||||
section_start_ix..section_end_ix,
|
||||
full_path.as_deref(),
|
||||
false,
|
||||
None,
|
||||
));
|
||||
}
|
||||
|
||||
Ok(SlashCommandOutput {
|
||||
text,
|
||||
sections,
|
||||
run_commands_in_text: has_diagnostics,
|
||||
})
|
||||
}),
|
||||
Err(error) => Task::ready(Err(error)),
|
||||
}
|
||||
Ok(SlashCommandOutput {
|
||||
text,
|
||||
sections,
|
||||
run_commands_in_text: has_diagnostics,
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn tab_items_for_query(
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
mut query: String,
|
||||
cancel: Arc<AtomicBool>,
|
||||
use_active_tab_for_empty_query: bool,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<anyhow::Result<Vec<(Option<PathBuf>, BufferSnapshot, usize)>>> {
|
||||
cx.spawn(|mut cx| async move {
|
||||
query.make_ascii_lowercase();
|
||||
let mut open_buffers =
|
||||
workspace
|
||||
.context("no workspace")?
|
||||
.update(&mut cx, |workspace, cx| {
|
||||
if use_active_tab_for_empty_query && query.trim().is_empty() {
|
||||
let active_editor = workspace
|
||||
.active_item(cx)
|
||||
.context("no active item")?
|
||||
.downcast::<Editor>()
|
||||
.context("active item is not an editor")?;
|
||||
let snapshot = active_editor
|
||||
.read(cx)
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.as_singleton()
|
||||
.context("active editor is not a singleton buffer")?
|
||||
.read(cx)
|
||||
.snapshot();
|
||||
let full_path = snapshot.resolve_file_path(cx, true);
|
||||
return anyhow::Ok(vec![(full_path, snapshot, 0)]);
|
||||
}
|
||||
|
||||
let mut timestamps_by_entity_id = HashMap::default();
|
||||
let mut open_buffers = Vec::new();
|
||||
|
||||
for pane in workspace.panes() {
|
||||
let pane = pane.read(cx);
|
||||
for entry in pane.activation_history() {
|
||||
timestamps_by_entity_id.insert(entry.entity_id, entry.timestamp);
|
||||
}
|
||||
}
|
||||
|
||||
for editor in workspace.items_of_type::<Editor>(cx) {
|
||||
if let Some(buffer) = editor.read(cx).buffer().read(cx).as_singleton() {
|
||||
if let Some(timestamp) =
|
||||
timestamps_by_entity_id.get(&editor.entity_id())
|
||||
{
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let full_path = snapshot.resolve_file_path(cx, true);
|
||||
open_buffers.push((full_path, snapshot, *timestamp));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(open_buffers)
|
||||
})??;
|
||||
|
||||
let background_executor = cx.background_executor().clone();
|
||||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
open_buffers.sort_by_key(|(_, _, timestamp)| *timestamp);
|
||||
let query = query.trim();
|
||||
if query.is_empty() || query == ALL_TABS_COMPLETION_ITEM {
|
||||
return Ok(open_buffers);
|
||||
}
|
||||
|
||||
let match_candidates = open_buffers
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(id, (full_path, ..))| {
|
||||
let path_string = full_path.as_deref()?.to_string_lossy().to_string();
|
||||
Some(fuzzy::StringMatchCandidate {
|
||||
id,
|
||||
char_bag: path_string.as_str().into(),
|
||||
string: path_string,
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let string_matches = fuzzy::match_strings(
|
||||
&match_candidates,
|
||||
&query,
|
||||
true,
|
||||
usize::MAX,
|
||||
&cancel,
|
||||
background_executor,
|
||||
)
|
||||
.await;
|
||||
|
||||
Ok(string_matches
|
||||
.into_iter()
|
||||
.filter_map(|string_match| open_buffers.get(string_match.candidate_id))
|
||||
.cloned()
|
||||
.collect())
|
||||
})
|
||||
.await
|
||||
})
|
||||
}
|
||||
|
||||
@@ -9,21 +9,23 @@ use gpui::{AppContext, Task, WeakView};
|
||||
use language::{CodeLabel, LspAdapterDelegate};
|
||||
use terminal_view::{terminal_panel::TerminalPanel, TerminalView};
|
||||
use ui::prelude::*;
|
||||
use workspace::Workspace;
|
||||
use workspace::{dock::Panel, Workspace};
|
||||
|
||||
use crate::DEFAULT_CONTEXT_LINES;
|
||||
|
||||
use super::create_label_for_command;
|
||||
|
||||
pub(crate) struct TermSlashCommand;
|
||||
pub(crate) struct TerminalSlashCommand;
|
||||
|
||||
const LINE_COUNT_ARG: &str = "--line-count";
|
||||
|
||||
impl SlashCommand for TermSlashCommand {
|
||||
impl SlashCommand for TerminalSlashCommand {
|
||||
fn name(&self) -> String {
|
||||
"term".into()
|
||||
"terminal".into()
|
||||
}
|
||||
|
||||
fn label(&self, cx: &AppContext) -> CodeLabel {
|
||||
create_label_for_command("term", &[LINE_COUNT_ARG], cx)
|
||||
create_label_for_command("terminal", &[LINE_COUNT_ARG], cx)
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
@@ -43,10 +45,10 @@ impl SlashCommand for TermSlashCommand {
|
||||
_query: String,
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut AppContext,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Ok(vec![ArgumentCompletion {
|
||||
label: LINE_COUNT_ARG.to_string(),
|
||||
label: LINE_COUNT_ARG.into(),
|
||||
new_text: LINE_COUNT_ARG.to_string(),
|
||||
run_command: true,
|
||||
}]))
|
||||
@@ -56,7 +58,7 @@ impl SlashCommand for TermSlashCommand {
|
||||
self: Arc<Self>,
|
||||
argument: Option<&str>,
|
||||
workspace: WeakView<Workspace>,
|
||||
_delegate: Arc<dyn LspAdapterDelegate>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let Some(workspace) = workspace.upgrade() else {
|
||||
@@ -65,17 +67,17 @@ impl SlashCommand for TermSlashCommand {
|
||||
let Some(terminal_panel) = workspace.read(cx).panel::<TerminalPanel>(cx) else {
|
||||
return Task::ready(Err(anyhow::anyhow!("no terminal panel open")));
|
||||
};
|
||||
let Some(active_terminal) = terminal_panel
|
||||
.read(cx)
|
||||
.pane()
|
||||
.read(cx)
|
||||
.active_item()
|
||||
.and_then(|t| t.downcast::<TerminalView>())
|
||||
else {
|
||||
let Some(active_terminal) = terminal_panel.read(cx).pane().and_then(|pane| {
|
||||
pane.read(cx)
|
||||
.active_item()
|
||||
.and_then(|t| t.downcast::<TerminalView>())
|
||||
}) else {
|
||||
return Task::ready(Err(anyhow::anyhow!("no active terminal")));
|
||||
};
|
||||
|
||||
let line_count = argument.and_then(|a| parse_argument(a)).unwrap_or(20);
|
||||
let line_count = argument
|
||||
.and_then(|a| parse_argument(a))
|
||||
.unwrap_or(DEFAULT_CONTEXT_LINES);
|
||||
|
||||
let lines = active_terminal
|
||||
.read(cx)
|
||||
76
crates/assistant/src/slash_command/workflow_command.rs
Normal file
@@ -0,0 +1,76 @@
|
||||
use crate::prompts::PromptBuilder;
|
||||
use std::sync::Arc;
|
||||
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
use anyhow::Result;
|
||||
use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
};
|
||||
use gpui::{Task, WeakView};
|
||||
use language::LspAdapterDelegate;
|
||||
use ui::prelude::*;
|
||||
|
||||
use workspace::Workspace;
|
||||
|
||||
pub(crate) struct WorkflowSlashCommand {
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
}
|
||||
|
||||
impl WorkflowSlashCommand {
|
||||
pub fn new(prompt_builder: Arc<PromptBuilder>) -> Self {
|
||||
Self { prompt_builder }
|
||||
}
|
||||
}
|
||||
|
||||
impl SlashCommand for WorkflowSlashCommand {
|
||||
fn name(&self) -> String {
|
||||
"workflow".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"insert a prompt that opts into the edit workflow".into()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
"Insert Workflow Prompt".into()
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_query: String,
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
_cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_argument: Option<&str>,
|
||||
_workspace: WeakView<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>> {
|
||||
let prompt_builder = self.prompt_builder.clone();
|
||||
cx.spawn(|_cx| async move {
|
||||
let text = prompt_builder.generate_workflow_prompt()?;
|
||||
let range = 0..text.len();
|
||||
|
||||
Ok(SlashCommandOutput {
|
||||
text,
|
||||
sections: vec![SlashCommandOutputSection {
|
||||
range,
|
||||
icon: IconName::Route,
|
||||
label: "Workflow".into(),
|
||||
}],
|
||||
run_commands_in_text: false,
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
44
crates/assistant/src/slash_command_settings.rs
Normal file
@@ -0,0 +1,44 @@
|
||||
use anyhow::Result;
|
||||
use gpui::AppContext;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources};
|
||||
|
||||
/// Settings for slash commands.
|
||||
#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)]
|
||||
pub struct SlashCommandSettings {
|
||||
/// Settings for the `/docs` slash command.
|
||||
#[serde(default)]
|
||||
pub docs: DocsCommandSettings,
|
||||
/// Settings for the `/project` slash command.
|
||||
#[serde(default)]
|
||||
pub project: ProjectCommandSettings,
|
||||
}
|
||||
|
||||
/// Settings for the `/docs` slash command.
|
||||
#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)]
|
||||
pub struct DocsCommandSettings {
|
||||
/// Whether `/docs` is enabled.
|
||||
#[serde(default)]
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
/// Settings for the `/project` slash command.
|
||||
#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)]
|
||||
pub struct ProjectCommandSettings {
|
||||
/// Whether `/project` is enabled.
|
||||
#[serde(default)]
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
impl Settings for SlashCommandSettings {
|
||||
const KEY: Option<&'static str> = Some("slash_commands");
|
||||
|
||||
type FileContent = Self;
|
||||
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _cx: &mut AppContext) -> Result<Self> {
|
||||
SettingsSources::<Self::FileContent>::json_merge_with(
|
||||
[sources.default].into_iter().chain(sources.user),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
use collections::HashMap;
|
||||
use ordered_float::OrderedFloat;
|
||||
use rope::{Point, Rope, TextSummary};
|
||||
use std::collections::{BTreeSet, HashMap};
|
||||
use std::{
|
||||
cmp,
|
||||
fmt::{self, Debug},
|
||||
@@ -64,11 +65,11 @@ impl Debug for Matrix {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Hunk {
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum CharOperation {
|
||||
Insert { text: String },
|
||||
Remove { len: usize },
|
||||
Keep { len: usize },
|
||||
Delete { bytes: usize },
|
||||
Keep { bytes: usize },
|
||||
}
|
||||
|
||||
pub struct StreamingDiff {
|
||||
@@ -103,7 +104,7 @@ impl StreamingDiff {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn push_new(&mut self, text: &str) -> Vec<Hunk> {
|
||||
pub fn push_new(&mut self, text: &str) -> Vec<CharOperation> {
|
||||
self.new.extend(text.chars());
|
||||
self.scores.resize(self.old.len() + 1, self.new.len() + 1);
|
||||
|
||||
@@ -145,7 +146,7 @@ impl StreamingDiff {
|
||||
hunks
|
||||
}
|
||||
|
||||
fn backtrack(&self, old_text_ix: usize, new_text_ix: usize) -> Vec<Hunk> {
|
||||
fn backtrack(&self, old_text_ix: usize, new_text_ix: usize) -> Vec<CharOperation> {
|
||||
let mut pending_insert: Option<Range<usize>> = None;
|
||||
let mut hunks = Vec::new();
|
||||
let mut i = old_text_ix;
|
||||
@@ -185,22 +186,22 @@ impl StreamingDiff {
|
||||
}
|
||||
} else {
|
||||
if let Some(range) = pending_insert.take() {
|
||||
hunks.push(Hunk::Insert {
|
||||
hunks.push(CharOperation::Insert {
|
||||
text: self.new[range].iter().collect(),
|
||||
});
|
||||
}
|
||||
|
||||
let char_len = self.old[i - 1].len_utf8();
|
||||
if prev_i == i - 1 && prev_j == j {
|
||||
if let Some(Hunk::Remove { len }) = hunks.last_mut() {
|
||||
if let Some(CharOperation::Delete { bytes: len }) = hunks.last_mut() {
|
||||
*len += char_len;
|
||||
} else {
|
||||
hunks.push(Hunk::Remove { len: char_len })
|
||||
hunks.push(CharOperation::Delete { bytes: char_len })
|
||||
}
|
||||
} else if let Some(Hunk::Keep { len }) = hunks.last_mut() {
|
||||
} else if let Some(CharOperation::Keep { bytes: len }) = hunks.last_mut() {
|
||||
*len += char_len;
|
||||
} else {
|
||||
hunks.push(Hunk::Keep { len: char_len })
|
||||
hunks.push(CharOperation::Keep { bytes: char_len })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -209,7 +210,7 @@ impl StreamingDiff {
|
||||
}
|
||||
|
||||
if let Some(range) = pending_insert.take() {
|
||||
hunks.push(Hunk::Insert {
|
||||
hunks.push(CharOperation::Insert {
|
||||
text: self.new[range].iter().collect(),
|
||||
});
|
||||
}
|
||||
@@ -218,74 +219,853 @@ impl StreamingDiff {
|
||||
hunks
|
||||
}
|
||||
|
||||
pub fn finish(self) -> Vec<Hunk> {
|
||||
pub fn finish(self) -> Vec<CharOperation> {
|
||||
self.backtrack(self.old.len(), self.new.len())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::env;
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub enum LineOperation {
|
||||
Insert { lines: u32 },
|
||||
Delete { lines: u32 },
|
||||
Keep { lines: u32 },
|
||||
}
|
||||
|
||||
use super::*;
|
||||
use rand::prelude::*;
|
||||
#[derive(Debug, Default)]
|
||||
pub struct LineDiff {
|
||||
inserted_newline_at_end: bool,
|
||||
/// The extent of kept and deleted text.
|
||||
old_end: Point,
|
||||
/// The extent of kept and inserted text.
|
||||
new_end: Point,
|
||||
/// Deleted rows, expressed in terms of the old text.
|
||||
deleted_rows: BTreeSet<u32>,
|
||||
/// Inserted rows, expressed in terms of the new text.
|
||||
inserted_rows: BTreeSet<u32>,
|
||||
buffered_insert: String,
|
||||
/// After deleting a newline, we buffer deletion until we keep or insert a character.
|
||||
buffered_delete: usize,
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_random_diffs(mut rng: StdRng) {
|
||||
let old_text_len = env::var("OLD_TEXT_LEN")
|
||||
.map(|i| i.parse().expect("invalid `OLD_TEXT_LEN` variable"))
|
||||
.unwrap_or(10);
|
||||
let new_text_len = env::var("NEW_TEXT_LEN")
|
||||
.map(|i| i.parse().expect("invalid `NEW_TEXT_LEN` variable"))
|
||||
.unwrap_or(10);
|
||||
|
||||
let old = util::RandomCharIter::new(&mut rng)
|
||||
.take(old_text_len)
|
||||
.collect::<String>();
|
||||
log::info!("old text: {:?}", old);
|
||||
|
||||
let mut diff = StreamingDiff::new(old.clone());
|
||||
let mut hunks = Vec::new();
|
||||
let mut new_len = 0;
|
||||
let mut new = String::new();
|
||||
while new_len < new_text_len {
|
||||
let new_chunk_len = rng.gen_range(1..=new_text_len - new_len);
|
||||
let new_chunk = util::RandomCharIter::new(&mut rng)
|
||||
.take(new_len)
|
||||
.collect::<String>();
|
||||
log::info!("new chunk: {:?}", new_chunk);
|
||||
new_len += new_chunk_len;
|
||||
new.push_str(&new_chunk);
|
||||
let new_hunks = diff.push_new(&new_chunk);
|
||||
log::info!("hunks: {:?}", new_hunks);
|
||||
hunks.extend(new_hunks);
|
||||
impl LineDiff {
|
||||
pub fn push_char_operations<'a>(
|
||||
&mut self,
|
||||
operations: impl IntoIterator<Item = &'a CharOperation>,
|
||||
old_text: &Rope,
|
||||
) {
|
||||
for operation in operations {
|
||||
self.push_char_operation(operation, old_text);
|
||||
}
|
||||
let final_hunks = diff.finish();
|
||||
log::info!("final hunks: {:?}", final_hunks);
|
||||
hunks.extend(final_hunks);
|
||||
}
|
||||
|
||||
log::info!("new text: {:?}", new);
|
||||
let mut old_ix = 0;
|
||||
let mut new_ix = 0;
|
||||
let mut patched = String::new();
|
||||
for hunk in hunks {
|
||||
match hunk {
|
||||
Hunk::Keep { len } => {
|
||||
assert_eq!(&old[old_ix..old_ix + len], &new[new_ix..new_ix + len]);
|
||||
patched.push_str(&old[old_ix..old_ix + len]);
|
||||
old_ix += len;
|
||||
new_ix += len;
|
||||
pub fn push_char_operation(&mut self, operation: &CharOperation, old_text: &Rope) {
|
||||
match operation {
|
||||
CharOperation::Insert { text } => {
|
||||
self.flush_delete(old_text);
|
||||
|
||||
if is_line_start(self.old_end) {
|
||||
if let Some(newline_ix) = text.rfind('\n') {
|
||||
let (prefix, suffix) = text.split_at(newline_ix + 1);
|
||||
self.buffered_insert.push_str(prefix);
|
||||
self.flush_insert(old_text);
|
||||
self.buffered_insert.push_str(suffix);
|
||||
} else {
|
||||
self.buffered_insert.push_str(&text);
|
||||
}
|
||||
} else {
|
||||
self.buffered_insert.push_str(&text);
|
||||
if !text.ends_with('\n') {
|
||||
self.flush_insert(old_text);
|
||||
}
|
||||
}
|
||||
Hunk::Remove { len } => {
|
||||
old_ix += len;
|
||||
}
|
||||
CharOperation::Delete { bytes } => {
|
||||
self.buffered_delete += bytes;
|
||||
|
||||
let common_suffix_len = self.trim_buffered_end(old_text);
|
||||
self.flush_insert(old_text);
|
||||
|
||||
if common_suffix_len > 0 || !is_line_end(self.old_end, old_text) {
|
||||
self.flush_delete(old_text);
|
||||
self.keep(common_suffix_len, old_text);
|
||||
}
|
||||
Hunk::Insert { text } => {
|
||||
assert_eq!(text, &new[new_ix..new_ix + text.len()]);
|
||||
patched.push_str(&text);
|
||||
new_ix += text.len();
|
||||
}
|
||||
CharOperation::Keep { bytes } => {
|
||||
self.flush_delete(old_text);
|
||||
self.flush_insert(old_text);
|
||||
self.keep(*bytes, old_text);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn flush_insert(&mut self, old_text: &Rope) {
|
||||
if self.buffered_insert.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let new_start = self.new_end;
|
||||
let lines = TextSummary::from(self.buffered_insert.as_str()).lines;
|
||||
self.new_end += lines;
|
||||
|
||||
if is_line_start(self.old_end) {
|
||||
if self.new_end.column == 0 {
|
||||
self.inserted_rows.extend(new_start.row..self.new_end.row);
|
||||
} else {
|
||||
self.deleted_rows.insert(self.old_end.row);
|
||||
self.inserted_rows.extend(new_start.row..=self.new_end.row);
|
||||
}
|
||||
} else if is_line_end(self.old_end, old_text) {
|
||||
if self.buffered_insert.starts_with('\n') {
|
||||
self.inserted_rows
|
||||
.extend(new_start.row + 1..=self.new_end.row);
|
||||
self.inserted_newline_at_end = true;
|
||||
} else {
|
||||
if !self.inserted_newline_at_end {
|
||||
self.deleted_rows.insert(self.old_end.row);
|
||||
}
|
||||
self.inserted_rows.extend(new_start.row..=self.new_end.row);
|
||||
}
|
||||
} else {
|
||||
self.deleted_rows.insert(self.old_end.row);
|
||||
self.inserted_rows.extend(new_start.row..=self.new_end.row);
|
||||
}
|
||||
|
||||
self.buffered_insert.clear();
|
||||
}
|
||||
|
||||
fn flush_delete(&mut self, old_text: &Rope) {
|
||||
if self.buffered_delete == 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
let old_start = self.old_end;
|
||||
self.old_end =
|
||||
old_text.offset_to_point(old_text.point_to_offset(self.old_end) + self.buffered_delete);
|
||||
|
||||
if is_line_end(old_start, old_text) && is_line_end(self.old_end, old_text) {
|
||||
self.deleted_rows
|
||||
.extend(old_start.row + 1..=self.old_end.row);
|
||||
} else if is_line_start(old_start)
|
||||
&& (is_line_start(self.old_end) && self.old_end < old_text.max_point())
|
||||
&& self.new_end.column == 0
|
||||
{
|
||||
self.deleted_rows.extend(old_start.row..self.old_end.row);
|
||||
} else {
|
||||
self.inserted_rows.insert(self.new_end.row);
|
||||
self.deleted_rows.extend(old_start.row..=self.old_end.row);
|
||||
}
|
||||
|
||||
self.inserted_newline_at_end = false;
|
||||
self.buffered_delete = 0;
|
||||
}
|
||||
|
||||
fn keep(&mut self, bytes: usize, old_text: &Rope) {
|
||||
if bytes == 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
let lines =
|
||||
old_text.offset_to_point(old_text.point_to_offset(self.old_end) + bytes) - self.old_end;
|
||||
self.old_end += lines;
|
||||
self.new_end += lines;
|
||||
self.inserted_newline_at_end = false;
|
||||
}
|
||||
|
||||
fn trim_buffered_end(&mut self, old_text: &Rope) -> usize {
|
||||
let old_start_offset = old_text.point_to_offset(self.old_end);
|
||||
let old_end_offset = old_start_offset + self.buffered_delete;
|
||||
|
||||
let new_chars = self.buffered_insert.chars().rev();
|
||||
let old_chars = old_text
|
||||
.chunks_in_range(old_start_offset..old_end_offset)
|
||||
.flat_map(|chunk| chunk.chars().rev());
|
||||
|
||||
let mut common_suffix_len = 0;
|
||||
for (new_ch, old_ch) in new_chars.zip(old_chars) {
|
||||
if new_ch == old_ch {
|
||||
common_suffix_len += new_ch.len_utf8();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
self.buffered_delete -= common_suffix_len;
|
||||
self.buffered_insert
|
||||
.truncate(self.buffered_insert.len() - common_suffix_len);
|
||||
|
||||
common_suffix_len
|
||||
}
|
||||
|
||||
pub fn finish(&mut self, old_text: &Rope) {
|
||||
self.flush_insert(old_text);
|
||||
self.flush_delete(old_text);
|
||||
|
||||
let old_start = self.old_end;
|
||||
self.old_end = old_text.max_point();
|
||||
self.new_end += self.old_end - old_start;
|
||||
}
|
||||
|
||||
pub fn line_operations(&self) -> Vec<LineOperation> {
|
||||
let mut ops = Vec::new();
|
||||
let mut deleted_rows = self.deleted_rows.iter().copied().peekable();
|
||||
let mut inserted_rows = self.inserted_rows.iter().copied().peekable();
|
||||
let mut old_row = 0;
|
||||
let mut new_row = 0;
|
||||
|
||||
while deleted_rows.peek().is_some() || inserted_rows.peek().is_some() {
|
||||
// Check for a run of deleted lines at current old row.
|
||||
if Some(old_row) == deleted_rows.peek().copied() {
|
||||
if let Some(LineOperation::Delete { lines }) = ops.last_mut() {
|
||||
*lines += 1;
|
||||
} else {
|
||||
ops.push(LineOperation::Delete { lines: 1 });
|
||||
}
|
||||
old_row += 1;
|
||||
deleted_rows.next();
|
||||
} else if Some(new_row) == inserted_rows.peek().copied() {
|
||||
if let Some(LineOperation::Insert { lines }) = ops.last_mut() {
|
||||
*lines += 1;
|
||||
} else {
|
||||
ops.push(LineOperation::Insert { lines: 1 });
|
||||
}
|
||||
new_row += 1;
|
||||
inserted_rows.next();
|
||||
} else {
|
||||
// Keep lines until the next deletion, insertion, or the end of the old text.
|
||||
let lines_to_next_deletion = inserted_rows
|
||||
.peek()
|
||||
.copied()
|
||||
.unwrap_or(self.new_end.row + 1)
|
||||
- new_row;
|
||||
let lines_to_next_insertion =
|
||||
deleted_rows.peek().copied().unwrap_or(self.old_end.row + 1) - old_row;
|
||||
let kept_lines =
|
||||
cmp::max(1, cmp::min(lines_to_next_insertion, lines_to_next_deletion));
|
||||
if kept_lines > 0 {
|
||||
ops.push(LineOperation::Keep { lines: kept_lines });
|
||||
old_row += kept_lines;
|
||||
new_row += kept_lines;
|
||||
}
|
||||
}
|
||||
}
|
||||
assert_eq!(patched, new);
|
||||
|
||||
if old_row < self.old_end.row + 1 {
|
||||
ops.push(LineOperation::Keep {
|
||||
lines: self.old_end.row + 1 - old_row,
|
||||
});
|
||||
}
|
||||
|
||||
ops
|
||||
}
|
||||
}
|
||||
|
||||
fn is_line_start(point: Point) -> bool {
|
||||
point.column == 0
|
||||
}
|
||||
|
||||
fn is_line_end(point: Point, text: &Rope) -> bool {
|
||||
text.line_len(point.row) == point.column
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use rand::prelude::*;
|
||||
use std::env;
|
||||
|
||||
#[test]
|
||||
fn test_delete_first_of_two_lines() {
|
||||
let old_text = "aaaa\nbbbb";
|
||||
let char_ops = vec![
|
||||
CharOperation::Delete { bytes: 5 },
|
||||
CharOperation::Keep { bytes: 4 },
|
||||
];
|
||||
let expected_line_ops = vec![
|
||||
LineOperation::Delete { lines: 1 },
|
||||
LineOperation::Keep { lines: 1 },
|
||||
];
|
||||
let new_text = apply_char_operations(old_text, &char_ops);
|
||||
assert_eq!(
|
||||
new_text,
|
||||
apply_line_operations(old_text, &new_text, &expected_line_ops)
|
||||
);
|
||||
|
||||
let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
|
||||
assert_eq!(line_ops, expected_line_ops);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_delete_second_of_two_lines() {
|
||||
let old_text = "aaaa\nbbbb";
|
||||
let char_ops = vec![
|
||||
CharOperation::Keep { bytes: 5 },
|
||||
CharOperation::Delete { bytes: 4 },
|
||||
];
|
||||
let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
|
||||
assert_eq!(
|
||||
line_ops,
|
||||
vec![
|
||||
LineOperation::Keep { lines: 1 },
|
||||
LineOperation::Delete { lines: 1 },
|
||||
LineOperation::Insert { lines: 1 }
|
||||
]
|
||||
);
|
||||
let new_text = apply_char_operations(old_text, &char_ops);
|
||||
assert_eq!(
|
||||
new_text,
|
||||
apply_line_operations(old_text, &new_text, &line_ops)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_add_new_line() {
|
||||
let old_text = "aaaa\nbbbb";
|
||||
let char_ops = vec![
|
||||
CharOperation::Keep { bytes: 9 },
|
||||
CharOperation::Insert {
|
||||
text: "\ncccc".into(),
|
||||
},
|
||||
];
|
||||
let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
|
||||
assert_eq!(
|
||||
line_ops,
|
||||
vec![
|
||||
LineOperation::Keep { lines: 2 },
|
||||
LineOperation::Insert { lines: 1 }
|
||||
]
|
||||
);
|
||||
let new_text = apply_char_operations(old_text, &char_ops);
|
||||
assert_eq!(
|
||||
new_text,
|
||||
apply_line_operations(old_text, &new_text, &line_ops)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_delete_line_in_middle() {
|
||||
let old_text = "aaaa\nbbbb\ncccc";
|
||||
let char_ops = vec![
|
||||
CharOperation::Keep { bytes: 5 },
|
||||
CharOperation::Delete { bytes: 5 },
|
||||
CharOperation::Keep { bytes: 4 },
|
||||
];
|
||||
let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
|
||||
assert_eq!(
|
||||
line_ops,
|
||||
vec![
|
||||
LineOperation::Keep { lines: 1 },
|
||||
LineOperation::Delete { lines: 1 },
|
||||
LineOperation::Keep { lines: 1 }
|
||||
]
|
||||
);
|
||||
let new_text = apply_char_operations(old_text, &char_ops);
|
||||
assert_eq!(
|
||||
new_text,
|
||||
apply_line_operations(old_text, &new_text, &line_ops)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_replace_line() {
|
||||
let old_text = "aaaa\nbbbb\ncccc";
|
||||
let char_ops = vec![
|
||||
CharOperation::Keep { bytes: 5 },
|
||||
CharOperation::Delete { bytes: 4 },
|
||||
CharOperation::Insert {
|
||||
text: "BBBB".into(),
|
||||
},
|
||||
CharOperation::Keep { bytes: 5 },
|
||||
];
|
||||
let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
|
||||
assert_eq!(
|
||||
line_ops,
|
||||
vec![
|
||||
LineOperation::Keep { lines: 1 },
|
||||
LineOperation::Delete { lines: 1 },
|
||||
LineOperation::Insert { lines: 1 },
|
||||
LineOperation::Keep { lines: 1 }
|
||||
]
|
||||
);
|
||||
let new_text = apply_char_operations(old_text, &char_ops);
|
||||
assert_eq!(
|
||||
new_text,
|
||||
apply_line_operations(old_text, &new_text, &line_ops)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multiple_edits_on_different_lines() {
|
||||
let old_text = "aaaa\nbbbb\ncccc\ndddd";
|
||||
let char_ops = vec![
|
||||
CharOperation::Insert { text: "A".into() },
|
||||
CharOperation::Keep { bytes: 9 },
|
||||
CharOperation::Delete { bytes: 5 },
|
||||
CharOperation::Keep { bytes: 4 },
|
||||
CharOperation::Insert {
|
||||
text: "\nEEEE".into(),
|
||||
},
|
||||
];
|
||||
let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
|
||||
assert_eq!(
|
||||
line_ops,
|
||||
vec![
|
||||
LineOperation::Delete { lines: 1 },
|
||||
LineOperation::Insert { lines: 1 },
|
||||
LineOperation::Keep { lines: 1 },
|
||||
LineOperation::Delete { lines: 2 },
|
||||
LineOperation::Insert { lines: 2 },
|
||||
]
|
||||
);
|
||||
let new_text = apply_char_operations(old_text, &char_ops);
|
||||
assert_eq!(
|
||||
new_text,
|
||||
apply_line_operations(old_text, &new_text, &line_ops)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_edit_at_end_of_line() {
|
||||
let old_text = "aaaa\nbbbb\ncccc";
|
||||
let char_ops = vec![
|
||||
CharOperation::Keep { bytes: 4 },
|
||||
CharOperation::Insert { text: "A".into() },
|
||||
CharOperation::Keep { bytes: 10 },
|
||||
];
|
||||
let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
|
||||
assert_eq!(
|
||||
line_ops,
|
||||
vec![
|
||||
LineOperation::Delete { lines: 1 },
|
||||
LineOperation::Insert { lines: 1 },
|
||||
LineOperation::Keep { lines: 2 }
|
||||
]
|
||||
);
|
||||
let new_text = apply_char_operations(old_text, &char_ops);
|
||||
assert_eq!(
|
||||
new_text,
|
||||
apply_line_operations(old_text, &new_text, &line_ops)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_insert_newline_character() {
|
||||
let old_text = "aaaabbbb";
|
||||
let char_ops = vec![
|
||||
CharOperation::Keep { bytes: 4 },
|
||||
CharOperation::Insert { text: "\n".into() },
|
||||
CharOperation::Keep { bytes: 4 },
|
||||
];
|
||||
let new_text = apply_char_operations(old_text, &char_ops);
|
||||
let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
|
||||
assert_eq!(
|
||||
line_ops,
|
||||
vec![
|
||||
LineOperation::Delete { lines: 1 },
|
||||
LineOperation::Insert { lines: 2 }
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
new_text,
|
||||
apply_line_operations(old_text, &new_text, &line_ops)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_insert_newline_at_beginning() {
|
||||
let old_text = "aaaa\nbbbb";
|
||||
let char_ops = vec![
|
||||
CharOperation::Insert { text: "\n".into() },
|
||||
CharOperation::Keep { bytes: 9 },
|
||||
];
|
||||
let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
|
||||
assert_eq!(
|
||||
line_ops,
|
||||
vec![
|
||||
LineOperation::Insert { lines: 1 },
|
||||
LineOperation::Keep { lines: 2 }
|
||||
]
|
||||
);
|
||||
let new_text = apply_char_operations(old_text, &char_ops);
|
||||
assert_eq!(
|
||||
new_text,
|
||||
apply_line_operations(old_text, &new_text, &line_ops)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_delete_newline() {
|
||||
let old_text = "aaaa\nbbbb";
|
||||
let char_ops = vec![
|
||||
CharOperation::Keep { bytes: 4 },
|
||||
CharOperation::Delete { bytes: 1 },
|
||||
CharOperation::Keep { bytes: 4 },
|
||||
];
|
||||
let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
|
||||
assert_eq!(
|
||||
line_ops,
|
||||
vec![
|
||||
LineOperation::Delete { lines: 2 },
|
||||
LineOperation::Insert { lines: 1 }
|
||||
]
|
||||
);
|
||||
|
||||
let new_text = apply_char_operations(old_text, &char_ops);
|
||||
assert_eq!(
|
||||
new_text,
|
||||
apply_line_operations(old_text, &new_text, &line_ops)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_insert_multiple_newlines() {
|
||||
let old_text = "aaaa\nbbbb";
|
||||
let char_ops = vec![
|
||||
CharOperation::Keep { bytes: 5 },
|
||||
CharOperation::Insert {
|
||||
text: "\n\n".into(),
|
||||
},
|
||||
CharOperation::Keep { bytes: 4 },
|
||||
];
|
||||
let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
|
||||
assert_eq!(
|
||||
line_ops,
|
||||
vec![
|
||||
LineOperation::Keep { lines: 1 },
|
||||
LineOperation::Insert { lines: 2 },
|
||||
LineOperation::Keep { lines: 1 }
|
||||
]
|
||||
);
|
||||
let new_text = apply_char_operations(old_text, &char_ops);
|
||||
assert_eq!(
|
||||
new_text,
|
||||
apply_line_operations(old_text, &new_text, &line_ops)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_delete_multiple_newlines() {
|
||||
let old_text = "aaaa\n\n\nbbbb";
|
||||
let char_ops = vec![
|
||||
CharOperation::Keep { bytes: 5 },
|
||||
CharOperation::Delete { bytes: 2 },
|
||||
CharOperation::Keep { bytes: 4 },
|
||||
];
|
||||
let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
|
||||
assert_eq!(
|
||||
line_ops,
|
||||
vec![
|
||||
LineOperation::Keep { lines: 1 },
|
||||
LineOperation::Delete { lines: 2 },
|
||||
LineOperation::Keep { lines: 1 }
|
||||
]
|
||||
);
|
||||
let new_text = apply_char_operations(old_text, &char_ops);
|
||||
assert_eq!(
|
||||
new_text,
|
||||
apply_line_operations(old_text, &new_text, &line_ops)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_complex_scenario() {
|
||||
let old_text = "line1\nline2\nline3\nline4";
|
||||
let char_ops = vec![
|
||||
CharOperation::Keep { bytes: 6 },
|
||||
CharOperation::Insert {
|
||||
text: "inserted\n".into(),
|
||||
},
|
||||
CharOperation::Delete { bytes: 6 },
|
||||
CharOperation::Keep { bytes: 5 },
|
||||
CharOperation::Insert {
|
||||
text: "\nnewline".into(),
|
||||
},
|
||||
CharOperation::Keep { bytes: 6 },
|
||||
];
|
||||
let line_ops = char_ops_to_line_ops(&old_text, &char_ops);
|
||||
assert_eq!(
|
||||
line_ops,
|
||||
vec![
|
||||
LineOperation::Keep { lines: 1 },
|
||||
LineOperation::Delete { lines: 1 },
|
||||
LineOperation::Insert { lines: 1 },
|
||||
LineOperation::Keep { lines: 1 },
|
||||
LineOperation::Insert { lines: 1 },
|
||||
LineOperation::Keep { lines: 1 }
|
||||
]
|
||||
);
|
||||
let new_text = apply_char_operations(old_text, &char_ops);
|
||||
assert_eq!(new_text, "line1\ninserted\nline3\nnewline\nline4");
|
||||
assert_eq!(
|
||||
apply_line_operations(old_text, &new_text, &line_ops),
|
||||
new_text,
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cleaning_up_common_suffix() {
|
||||
let old_text = concat!(
|
||||
" for y in 0..size.y() {\n",
|
||||
" let a = 10;\n",
|
||||
" let b = 20;\n",
|
||||
" }",
|
||||
);
|
||||
let char_ops = [
|
||||
CharOperation::Keep { bytes: 8 },
|
||||
CharOperation::Insert { text: "let".into() },
|
||||
CharOperation::Insert {
|
||||
text: " mut".into(),
|
||||
},
|
||||
CharOperation::Insert { text: " y".into() },
|
||||
CharOperation::Insert { text: " =".into() },
|
||||
CharOperation::Insert { text: " 0".into() },
|
||||
CharOperation::Insert { text: ";".into() },
|
||||
CharOperation::Insert { text: "\n".into() },
|
||||
CharOperation::Insert {
|
||||
text: " while".into(),
|
||||
},
|
||||
CharOperation::Insert { text: " y".into() },
|
||||
CharOperation::Insert {
|
||||
text: " < size".into(),
|
||||
},
|
||||
CharOperation::Insert { text: ".".into() },
|
||||
CharOperation::Insert { text: "y".into() },
|
||||
CharOperation::Insert { text: "()".into() },
|
||||
CharOperation::Insert { text: " {".into() },
|
||||
CharOperation::Insert { text: "\n".into() },
|
||||
CharOperation::Delete { bytes: 23 },
|
||||
CharOperation::Keep { bytes: 23 },
|
||||
CharOperation::Keep { bytes: 1 },
|
||||
CharOperation::Keep { bytes: 23 },
|
||||
CharOperation::Keep { bytes: 1 },
|
||||
CharOperation::Keep { bytes: 8 },
|
||||
CharOperation::Insert {
|
||||
text: " y".into(),
|
||||
},
|
||||
CharOperation::Insert { text: " +=".into() },
|
||||
CharOperation::Insert { text: " 1".into() },
|
||||
CharOperation::Insert { text: ";".into() },
|
||||
CharOperation::Insert { text: "\n".into() },
|
||||
CharOperation::Insert {
|
||||
text: " ".into(),
|
||||
},
|
||||
CharOperation::Keep { bytes: 1 },
|
||||
];
|
||||
let line_ops = char_ops_to_line_ops(old_text, &char_ops);
|
||||
assert_eq!(
|
||||
line_ops,
|
||||
vec![
|
||||
LineOperation::Delete { lines: 1 },
|
||||
LineOperation::Insert { lines: 2 },
|
||||
LineOperation::Keep { lines: 2 },
|
||||
LineOperation::Delete { lines: 1 },
|
||||
LineOperation::Insert { lines: 2 },
|
||||
]
|
||||
);
|
||||
let new_text = apply_char_operations(old_text, &char_ops);
|
||||
assert_eq!(
|
||||
new_text,
|
||||
apply_line_operations(old_text, &new_text, &line_ops)
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_random_diffs() {
|
||||
random_test(|mut rng| {
|
||||
let old_text_len = env::var("OLD_TEXT_LEN")
|
||||
.map(|i| i.parse().expect("invalid `OLD_TEXT_LEN` variable"))
|
||||
.unwrap_or(10);
|
||||
|
||||
let old = random_text(&mut rng, old_text_len);
|
||||
println!("old text: {:?}", old);
|
||||
|
||||
let new = randomly_edit(&old, &mut rng);
|
||||
println!("new text: {:?}", new);
|
||||
|
||||
let char_operations = random_streaming_diff(&mut rng, &old, &new);
|
||||
println!("char operations: {:?}", char_operations);
|
||||
|
||||
// Use apply_char_operations to verify the result
|
||||
let patched = apply_char_operations(&old, &char_operations);
|
||||
assert_eq!(patched, new);
|
||||
|
||||
// Test char_ops_to_line_ops
|
||||
let line_ops = char_ops_to_line_ops(&old, &char_operations);
|
||||
println!("line operations: {:?}", line_ops);
|
||||
let patched = apply_line_operations(&old, &new, &line_ops);
|
||||
assert_eq!(patched, new);
|
||||
});
|
||||
}
|
||||
|
||||
fn char_ops_to_line_ops(old_text: &str, char_ops: &[CharOperation]) -> Vec<LineOperation> {
|
||||
let old_rope = Rope::from(old_text);
|
||||
let mut diff = LineDiff::default();
|
||||
for op in char_ops {
|
||||
diff.push_char_operation(op, &old_rope);
|
||||
}
|
||||
diff.finish(&old_rope);
|
||||
diff.line_operations()
|
||||
}
|
||||
|
||||
fn random_streaming_diff(rng: &mut impl Rng, old: &str, new: &str) -> Vec<CharOperation> {
|
||||
let mut diff = StreamingDiff::new(old.to_string());
|
||||
let mut char_operations = Vec::new();
|
||||
let mut new_len = 0;
|
||||
|
||||
while new_len < new.len() {
|
||||
let mut chunk_len = rng.gen_range(1..=new.len() - new_len);
|
||||
while !new.is_char_boundary(new_len + chunk_len) {
|
||||
chunk_len += 1;
|
||||
}
|
||||
let chunk = &new[new_len..new_len + chunk_len];
|
||||
let new_hunks = diff.push_new(chunk);
|
||||
char_operations.extend(new_hunks);
|
||||
new_len += chunk_len;
|
||||
}
|
||||
|
||||
char_operations.extend(diff.finish());
|
||||
char_operations
|
||||
}
|
||||
|
||||
fn random_test<F>(mut test_fn: F)
|
||||
where
|
||||
F: FnMut(StdRng),
|
||||
{
|
||||
let iterations = env::var("ITERATIONS")
|
||||
.map(|i| i.parse().expect("invalid `ITERATIONS` variable"))
|
||||
.unwrap_or(100);
|
||||
|
||||
let seed: u64 = env::var("SEED")
|
||||
.map(|s| s.parse().expect("invalid `SEED` variable"))
|
||||
.unwrap_or(0);
|
||||
|
||||
println!(
|
||||
"Running test with {} iterations and seed {}",
|
||||
iterations, seed
|
||||
);
|
||||
|
||||
for i in 0..iterations {
|
||||
println!("Iteration {}", i + 1);
|
||||
let rng = StdRng::seed_from_u64(seed + i);
|
||||
test_fn(rng);
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_line_operations(old_text: &str, new_text: &str, line_ops: &[LineOperation]) -> String {
|
||||
let mut result: Vec<&str> = Vec::new();
|
||||
|
||||
let old_lines: Vec<&str> = old_text.split('\n').collect();
|
||||
let new_lines: Vec<&str> = new_text.split('\n').collect();
|
||||
let mut old_start = 0_usize;
|
||||
let mut new_start = 0_usize;
|
||||
|
||||
for op in line_ops {
|
||||
match op {
|
||||
LineOperation::Keep { lines } => {
|
||||
let old_end = old_start + *lines as usize;
|
||||
result.extend(&old_lines[old_start..old_end]);
|
||||
old_start = old_end;
|
||||
new_start += *lines as usize;
|
||||
}
|
||||
LineOperation::Delete { lines } => {
|
||||
old_start += *lines as usize;
|
||||
}
|
||||
LineOperation::Insert { lines } => {
|
||||
let new_end = new_start + *lines as usize;
|
||||
result.extend(&new_lines[new_start..new_end]);
|
||||
new_start = new_end;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result.join("\n")
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_apply_char_operations() {
|
||||
let old_text = "Hello, world!";
|
||||
let char_ops = vec![
|
||||
CharOperation::Keep { bytes: 7 },
|
||||
CharOperation::Delete { bytes: 5 },
|
||||
CharOperation::Insert {
|
||||
text: "Rust".to_string(),
|
||||
},
|
||||
CharOperation::Keep { bytes: 1 },
|
||||
];
|
||||
let result = apply_char_operations(old_text, &char_ops);
|
||||
assert_eq!(result, "Hello, Rust!");
|
||||
}
|
||||
|
||||
fn random_text(rng: &mut impl Rng, length: usize) -> String {
|
||||
util::RandomCharIter::new(rng).take(length).collect()
|
||||
}
|
||||
|
||||
fn randomly_edit(text: &str, rng: &mut impl Rng) -> String {
|
||||
let mut result = String::from(text);
|
||||
let edit_count = rng.gen_range(1..=5);
|
||||
|
||||
fn random_char_range(text: &str, rng: &mut impl Rng) -> (usize, usize) {
|
||||
let mut start = rng.gen_range(0..=text.len());
|
||||
while !text.is_char_boundary(start) {
|
||||
start -= 1;
|
||||
}
|
||||
let mut end = rng.gen_range(start..=text.len());
|
||||
while !text.is_char_boundary(end) {
|
||||
end += 1;
|
||||
}
|
||||
(start, end)
|
||||
}
|
||||
|
||||
for _ in 0..edit_count {
|
||||
match rng.gen_range(0..3) {
|
||||
0 => {
|
||||
// Insert
|
||||
let (pos, _) = random_char_range(&result, rng);
|
||||
let insert_len = rng.gen_range(1..=5);
|
||||
let insert_text: String = random_text(rng, insert_len);
|
||||
result.insert_str(pos, &insert_text);
|
||||
}
|
||||
1 => {
|
||||
// Delete
|
||||
if !result.is_empty() {
|
||||
let (start, end) = random_char_range(&result, rng);
|
||||
result.replace_range(start..end, "");
|
||||
}
|
||||
}
|
||||
2 => {
|
||||
// Replace
|
||||
if !result.is_empty() {
|
||||
let (start, end) = random_char_range(&result, rng);
|
||||
let replace_len = end - start;
|
||||
let replace_text: String = random_text(rng, replace_len);
|
||||
result.replace_range(start..end, &replace_text);
|
||||
}
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn apply_char_operations(old_text: &str, char_ops: &[CharOperation]) -> String {
|
||||
let mut result = String::new();
|
||||
let mut old_ix = 0;
|
||||
|
||||
for operation in char_ops {
|
||||
match operation {
|
||||
CharOperation::Keep { bytes } => {
|
||||
result.push_str(&old_text[old_ix..old_ix + bytes]);
|
||||
old_ix += bytes;
|
||||
}
|
||||
CharOperation::Delete { bytes } => {
|
||||
old_ix += bytes;
|
||||
}
|
||||
CharOperation::Insert { text } => {
|
||||
result.push_str(text);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,86 +0,0 @@
|
||||
When the user asks you to suggest edits for a buffer, use a strict template consisting of:
|
||||
|
||||
* A markdown code block with the file path as the language identifier.
|
||||
* The original code that should be replaced
|
||||
* A separator line (`---`)
|
||||
* The new text that should replace the original lines
|
||||
|
||||
Each code block may only contain an edit for one single contiguous range of text. Use multiple code blocks for multiple edits.
|
||||
|
||||
## Example
|
||||
|
||||
If you have a buffer with the following lines:
|
||||
|
||||
```path/to/file.rs
|
||||
fn quicksort(arr: &mut [i32]) {
|
||||
if arr.len() <= 1 {
|
||||
return;
|
||||
}
|
||||
let pivot_index = partition(arr);
|
||||
let (left, right) = arr.split_at_mut(pivot_index);
|
||||
quicksort(left);
|
||||
quicksort(&mut right[1..]);
|
||||
}
|
||||
|
||||
fn partition(arr: &mut [i32]) -> usize {
|
||||
let last_index = arr.len() - 1;
|
||||
let pivot = arr[last_index];
|
||||
let mut i = 0;
|
||||
for j in 0..last_index {
|
||||
if arr[j] <= pivot {
|
||||
arr.swap(i, j);
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
arr.swap(i, last_index);
|
||||
i
|
||||
}
|
||||
```
|
||||
|
||||
And you want to replace the for loop inside `partition`, output the following.
|
||||
|
||||
```edit path/to/file.rs
|
||||
for j in 0..last_index {
|
||||
if arr[j] <= pivot {
|
||||
arr.swap(i, j);
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
---
|
||||
let mut j = 0;
|
||||
while j < last_index {
|
||||
if arr[j] <= pivot {
|
||||
arr.swap(i, j);
|
||||
i += 1;
|
||||
}
|
||||
j += 1;
|
||||
}
|
||||
```
|
||||
|
||||
If you wanted to insert comments above the partition function, output the following:
|
||||
|
||||
```edit path/to/file.rs
|
||||
fn partition(arr: &mut [i32]) -> usize {
|
||||
---
|
||||
// A helper function used for quicksort.
|
||||
fn partition(arr: &mut [i32]) -> usize {
|
||||
```
|
||||
|
||||
If you wanted to delete the partition function, output the following:
|
||||
|
||||
```edit path/to/file.rs
|
||||
fn partition(arr: &mut [i32]) -> usize {
|
||||
let last_index = arr.len() - 1;
|
||||
let pivot = arr[last_index];
|
||||
let mut i = 0;
|
||||
for j in 0..last_index {
|
||||
if arr[j] <= pivot {
|
||||
arr.swap(i, j);
|
||||
i += 1;
|
||||
}
|
||||
}
|
||||
arr.swap(i, last_index);
|
||||
i
|
||||
}
|
||||
---
|
||||
```
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::{
|
||||
humanize_token_count, prompts::generate_terminal_assistant_prompt, AssistantPanel,
|
||||
AssistantPanelEvent, LanguageModelCompletionProvider, ModelSelector,
|
||||
humanize_token_count, prompts::PromptBuilder, AssistantPanel, AssistantPanelEvent,
|
||||
ModelSelector, DEFAULT_CONTEXT_LINES,
|
||||
};
|
||||
use anyhow::{Context as _, Result};
|
||||
use client::telemetry::Telemetry;
|
||||
@@ -16,7 +16,9 @@ use gpui::{
|
||||
Subscription, Task, TextStyle, UpdateGlobal, View, WeakView,
|
||||
};
|
||||
use language::Buffer;
|
||||
use language_model::{LanguageModelRequest, LanguageModelRequestMessage, Role};
|
||||
use language_model::{
|
||||
LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, Role,
|
||||
};
|
||||
use settings::Settings;
|
||||
use std::{
|
||||
cmp,
|
||||
@@ -30,8 +32,13 @@ use ui::{prelude::*, IconButtonShape, Tooltip};
|
||||
use util::ResultExt;
|
||||
use workspace::{notifications::NotificationId, Toast, Workspace};
|
||||
|
||||
pub fn init(fs: Arc<dyn Fs>, telemetry: Arc<Telemetry>, cx: &mut AppContext) {
|
||||
cx.set_global(TerminalInlineAssistant::new(fs, telemetry));
|
||||
pub fn init(
|
||||
fs: Arc<dyn Fs>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
telemetry: Arc<Telemetry>,
|
||||
cx: &mut AppContext,
|
||||
) {
|
||||
cx.set_global(TerminalInlineAssistant::new(fs, prompt_builder, telemetry));
|
||||
}
|
||||
|
||||
const PROMPT_HISTORY_MAX_LEN: usize = 20;
|
||||
@@ -53,18 +60,24 @@ pub struct TerminalInlineAssistant {
|
||||
prompt_history: VecDeque<String>,
|
||||
telemetry: Option<Arc<Telemetry>>,
|
||||
fs: Arc<dyn Fs>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
}
|
||||
|
||||
impl Global for TerminalInlineAssistant {}
|
||||
|
||||
impl TerminalInlineAssistant {
|
||||
pub fn new(fs: Arc<dyn Fs>, telemetry: Arc<Telemetry>) -> Self {
|
||||
pub fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
telemetry: Arc<Telemetry>,
|
||||
) -> Self {
|
||||
Self {
|
||||
next_assist_id: TerminalInlineAssistId::default(),
|
||||
assists: HashMap::default(),
|
||||
prompt_history: VecDeque::default(),
|
||||
telemetry: Some(telemetry),
|
||||
fs,
|
||||
prompt_builder,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -215,17 +228,18 @@ impl TerminalInlineAssistant {
|
||||
let assist = self.assists.get(&assist_id).context("invalid assist")?;
|
||||
|
||||
let shell = std::env::var("SHELL").ok();
|
||||
let working_directory = assist
|
||||
let (latest_output, working_directory) = assist
|
||||
.terminal
|
||||
.update(cx, |terminal, cx| {
|
||||
terminal
|
||||
.model()
|
||||
.read(cx)
|
||||
let terminal = terminal.model().read(cx);
|
||||
let latest_output = terminal.last_n_non_empty_lines(DEFAULT_CONTEXT_LINES);
|
||||
let working_directory = terminal
|
||||
.working_directory()
|
||||
.map(|path| path.to_string_lossy().to_string())
|
||||
.map(|path| path.to_string_lossy().to_string());
|
||||
(latest_output, working_directory)
|
||||
})
|
||||
.ok()
|
||||
.flatten();
|
||||
.unwrap_or_default();
|
||||
|
||||
let context_request = if assist.include_context {
|
||||
assist.workspace.as_ref().and_then(|workspace| {
|
||||
@@ -243,7 +257,7 @@ impl TerminalInlineAssistant {
|
||||
None
|
||||
};
|
||||
|
||||
let prompt = generate_terminal_assistant_prompt(
|
||||
let prompt = self.prompt_builder.generate_terminal_assistant_prompt(
|
||||
&assist
|
||||
.prompt_editor
|
||||
.clone()
|
||||
@@ -252,7 +266,8 @@ impl TerminalInlineAssistant {
|
||||
.prompt(cx),
|
||||
shell.as_deref(),
|
||||
working_directory.as_deref(),
|
||||
);
|
||||
&latest_output,
|
||||
)?;
|
||||
|
||||
let mut messages = Vec::new();
|
||||
if let Some(context_request) = context_request {
|
||||
@@ -261,7 +276,7 @@ impl TerminalInlineAssistant {
|
||||
|
||||
messages.push(LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: prompt,
|
||||
content: vec![prompt.into()],
|
||||
});
|
||||
|
||||
Ok(LanguageModelRequest {
|
||||
@@ -548,7 +563,7 @@ impl Render for PromptEditor {
|
||||
.gap_2()
|
||||
.child(ModelSelector::new(
|
||||
self.fs.clone(),
|
||||
IconButton::new("context", IconName::Settings)
|
||||
IconButton::new("context", IconName::SlidersAlt)
|
||||
.shape(IconButtonShape::Square)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
@@ -556,7 +571,7 @@ impl Render for PromptEditor {
|
||||
Tooltip::with_meta(
|
||||
format!(
|
||||
"Using {}",
|
||||
LanguageModelCompletionProvider::read_global(cx)
|
||||
LanguageModelRegistry::read_global(cx)
|
||||
.active_model()
|
||||
.map(|model| model.name().0)
|
||||
.unwrap_or_else(|| "No model selected".into()),
|
||||
@@ -700,6 +715,9 @@ impl PromptEditor {
|
||||
|
||||
fn count_tokens(&mut self, cx: &mut ViewContext<Self>) {
|
||||
let assist_id = self.id;
|
||||
let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
|
||||
return;
|
||||
};
|
||||
self.pending_token_count = cx.spawn(|this, mut cx| async move {
|
||||
cx.background_executor().timer(Duration::from_secs(1)).await;
|
||||
let request =
|
||||
@@ -707,18 +725,11 @@ impl PromptEditor {
|
||||
inline_assistant.request_for_inline_assist(assist_id, cx)
|
||||
})??;
|
||||
|
||||
if let Some(token_count) = cx.update(|cx| {
|
||||
LanguageModelCompletionProvider::read_global(cx).count_tokens(request, cx)
|
||||
})? {
|
||||
let token_count = token_count.await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.token_count = Some(token_count);
|
||||
cx.notify();
|
||||
})
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.token_count = Some(token_count);
|
||||
cx.notify();
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@@ -843,7 +854,7 @@ impl PromptEditor {
|
||||
}
|
||||
|
||||
fn render_token_count(&self, cx: &mut ViewContext<Self>) -> Option<impl IntoElement> {
|
||||
let model = LanguageModelCompletionProvider::read_global(cx).active_model()?;
|
||||
let model = LanguageModelRegistry::read_global(cx).active_model()?;
|
||||
let token_count = self.token_count?;
|
||||
let max_token_count = model.max_token_count();
|
||||
|
||||
@@ -985,19 +996,16 @@ impl Codegen {
|
||||
}
|
||||
|
||||
pub fn start(&mut self, prompt: LanguageModelRequest, cx: &mut ModelContext<Self>) {
|
||||
self.status = CodegenStatus::Pending;
|
||||
self.transaction = Some(TerminalTransaction::start(self.terminal.clone()));
|
||||
let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
|
||||
return;
|
||||
};
|
||||
|
||||
let telemetry = self.telemetry.clone();
|
||||
let model_telemetry_id = LanguageModelCompletionProvider::read_global(cx)
|
||||
.active_model()
|
||||
.map(|m| m.telemetry_id())
|
||||
.unwrap_or_default();
|
||||
let response =
|
||||
LanguageModelCompletionProvider::read_global(cx).stream_completion(prompt, cx);
|
||||
|
||||
self.status = CodegenStatus::Pending;
|
||||
self.transaction = Some(TerminalTransaction::start(self.terminal.clone()));
|
||||
self.generation = cx.spawn(|this, mut cx| async move {
|
||||
let response = response.await;
|
||||
let model_telemetry_id = model.telemetry_id();
|
||||
let response = model.stream_completion(prompt, &cx).await;
|
||||
let generate = async {
|
||||
let (mut hunks_tx, mut hunks_rx) = mpsc::channel(1);
|
||||
|
||||
|
||||
25
crates/assistant/src/using-the-assistant.md
Normal file
@@ -0,0 +1,25 @@
|
||||
### Using the Assistant
|
||||
|
||||
Once you have configured a provider, you can interact with the provider's language models in a context editor.
|
||||
|
||||
To create a new context editor, use the menu in the top right of the assistant panel and the `New Context` option.
|
||||
|
||||
In the context editor, select a model from one of the configured providers, type a message in the `You` block, and submit with `cmd-enter` (or `ctrl-enter` on Linux).
|
||||
|
||||
### Inline assistant
|
||||
|
||||
When you're in a normal editor, you can use `ctrl-enter` to open the inline assistant.
|
||||
|
||||
The inline assistant allows you to send the current selection (or the current line) to a language model and modify the selection with the language model's response.
|
||||
|
||||
### Adding Prompts
|
||||
|
||||
You can customize the default prompts that are used in new context editor, by opening the `Prompt Library`.
|
||||
|
||||
Open the `Prompt Library` using either the menu in the top right of the assistant panel and choosing the `Prompt Library` option, or by using the `assistant: deploy prompt library` command when the assistant panel is focused.
|
||||
|
||||
### Viewing past contexts
|
||||
|
||||
You view all previous contexts by opening up the `History` tab in the assistant panel.
|
||||
|
||||
Open the `History` using the menu in the top right of the assistant panel and choosing the `History`.
|
||||
@@ -18,7 +18,7 @@ pub fn init(cx: &mut AppContext) {
|
||||
#[derive(Debug)]
|
||||
pub struct ArgumentCompletion {
|
||||
/// The label to display for this completion.
|
||||
pub label: String,
|
||||
pub label: CodeLabel,
|
||||
/// The new text that should be inserted into the command when this completion is accepted.
|
||||
pub new_text: String,
|
||||
/// Whether the command should be run when accepting this completion.
|
||||
@@ -37,7 +37,7 @@ pub trait SlashCommand: 'static + Send + Sync {
|
||||
query: String,
|
||||
cancel: Arc<AtomicBool>,
|
||||
workspace: Option<WeakView<Workspace>>,
|
||||
cx: &mut AppContext,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>>;
|
||||
fn requires_argument(&self) -> bool;
|
||||
fn run(
|
||||
@@ -49,7 +49,7 @@ pub trait SlashCommand: 'static + Send + Sync {
|
||||
//
|
||||
// It may be that `LspAdapterDelegate` needs a more general name, or
|
||||
// perhaps another kind of delegate is needed here.
|
||||
delegate: Arc<dyn LspAdapterDelegate>,
|
||||
delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<SlashCommandOutput>>;
|
||||
}
|
||||
@@ -60,7 +60,7 @@ pub type RenderFoldPlaceholder = Arc<
|
||||
+ Fn(ElementId, Arc<dyn Fn(&mut WindowContext)>, &mut WindowContext) -> AnyElement,
|
||||
>;
|
||||
|
||||
#[derive(Default)]
|
||||
#[derive(Debug, Default)]
|
||||
pub struct SlashCommandOutput {
|
||||
pub text: String,
|
||||
pub sections: Vec<SlashCommandOutputSection<usize>>,
|
||||
|
||||
@@ -56,6 +56,14 @@ impl SlashCommandRegistry {
|
||||
state.commands.insert(command_name, Arc::new(command));
|
||||
}
|
||||
|
||||
/// Unregisters the provided [`SlashCommand`].
|
||||
pub fn unregister_command(&self, command: impl SlashCommand) {
|
||||
let mut state = self.state.write();
|
||||
let command_name: Arc<str> = command.name().into();
|
||||
state.featured_commands.remove(&command_name);
|
||||
state.commands.remove(&command_name);
|
||||
}
|
||||
|
||||
/// Returns the names of registered [`SlashCommand`]s.
|
||||
pub fn command_names(&self) -> Vec<Arc<str>> {
|
||||
self.state.read().commands.keys().cloned().collect()
|
||||
|
||||
@@ -55,6 +55,8 @@ struct UpdateRequestBody {
|
||||
installation_id: Option<Arc<str>>,
|
||||
release_channel: Option<&'static str>,
|
||||
telemetry: bool,
|
||||
is_staff: Option<bool>,
|
||||
destination: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Eq)]
|
||||
@@ -575,18 +577,27 @@ async fn download_remote_server_binary(
|
||||
cx: &AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
let mut target_file = File::create(&target_path).await?;
|
||||
let (installation_id, release_channel, telemetry) = cx.update(|cx| {
|
||||
let installation_id = Client::global(cx).telemetry().installation_id();
|
||||
let (installation_id, release_channel, telemetry_enabled, is_staff) = cx.update(|cx| {
|
||||
let telemetry = Client::global(cx).telemetry().clone();
|
||||
let is_staff = telemetry.is_staff();
|
||||
let installation_id = telemetry.installation_id();
|
||||
let release_channel =
|
||||
ReleaseChannel::try_global(cx).map(|release_channel| release_channel.display_name());
|
||||
let telemetry = TelemetrySettings::get_global(cx).metrics;
|
||||
let telemetry_enabled = TelemetrySettings::get_global(cx).metrics;
|
||||
|
||||
(installation_id, release_channel, telemetry)
|
||||
(
|
||||
installation_id,
|
||||
release_channel,
|
||||
telemetry_enabled,
|
||||
is_staff,
|
||||
)
|
||||
})?;
|
||||
let request_body = AsyncBody::from(serde_json::to_string(&UpdateRequestBody {
|
||||
installation_id,
|
||||
release_channel,
|
||||
telemetry,
|
||||
telemetry: telemetry_enabled,
|
||||
is_staff,
|
||||
destination: "remote",
|
||||
})?);
|
||||
|
||||
let mut response = client.get(&release.url, request_body, true).await?;
|
||||
@@ -602,19 +613,28 @@ async fn download_release(
|
||||
) -> Result<()> {
|
||||
let mut target_file = File::create(&target_path).await?;
|
||||
|
||||
let (installation_id, release_channel, telemetry) = cx.update(|cx| {
|
||||
let installation_id = Client::global(cx).telemetry().installation_id();
|
||||
let (installation_id, release_channel, telemetry_enabled, is_staff) = cx.update(|cx| {
|
||||
let telemetry = Client::global(cx).telemetry().clone();
|
||||
let is_staff = telemetry.is_staff();
|
||||
let installation_id = telemetry.installation_id();
|
||||
let release_channel =
|
||||
ReleaseChannel::try_global(cx).map(|release_channel| release_channel.display_name());
|
||||
let telemetry = TelemetrySettings::get_global(cx).metrics;
|
||||
let telemetry_enabled = TelemetrySettings::get_global(cx).metrics;
|
||||
|
||||
(installation_id, release_channel, telemetry)
|
||||
(
|
||||
installation_id,
|
||||
release_channel,
|
||||
telemetry_enabled,
|
||||
is_staff,
|
||||
)
|
||||
})?;
|
||||
|
||||
let request_body = AsyncBody::from(serde_json::to_string(&UpdateRequestBody {
|
||||
installation_id,
|
||||
release_channel,
|
||||
telemetry,
|
||||
telemetry: telemetry_enabled,
|
||||
is_staff,
|
||||
destination: "local",
|
||||
})?);
|
||||
|
||||
let mut response = client.get(&release.url, request_body, true).await?;
|
||||
|
||||
@@ -5,14 +5,13 @@ use clap::Parser;
|
||||
use cli::{ipc::IpcOneShotServer, CliRequest, CliResponse, IpcHandshake};
|
||||
use parking_lot::Mutex;
|
||||
use std::{
|
||||
convert::Infallible,
|
||||
env, fs, io,
|
||||
path::{Path, PathBuf},
|
||||
process::ExitStatus,
|
||||
sync::Arc,
|
||||
thread::{self, JoinHandle},
|
||||
};
|
||||
use util::paths::PathLikeWithPosition;
|
||||
use util::paths::PathWithPosition;
|
||||
|
||||
struct Detect;
|
||||
|
||||
@@ -54,13 +53,10 @@ struct Args {
|
||||
}
|
||||
|
||||
fn parse_path_with_position(argument_str: &str) -> Result<String, std::io::Error> {
|
||||
let path_like = PathLikeWithPosition::parse_str::<Infallible>(argument_str, |_, path_str| {
|
||||
Ok(Path::new(path_str).to_path_buf())
|
||||
})
|
||||
.unwrap();
|
||||
let path = PathWithPosition::parse_str(argument_str);
|
||||
let curdir = env::current_dir()?;
|
||||
|
||||
let canonicalized = path_like.map_path_like(|path| match fs::canonicalize(&path) {
|
||||
let canonicalized = path.map_path(|path| match fs::canonicalize(&path) {
|
||||
Ok(path) => Ok(path),
|
||||
Err(e) => {
|
||||
if let Some(mut parent) = path.parent() {
|
||||
|
||||
@@ -541,9 +541,16 @@ impl Client {
|
||||
}
|
||||
|
||||
pub fn production(cx: &mut AppContext) -> Arc<Self> {
|
||||
let user_agent = format!(
|
||||
"Zed/{} ({}; {})",
|
||||
AppVersion::global(cx),
|
||||
std::env::consts::OS,
|
||||
std::env::consts::ARCH
|
||||
);
|
||||
let clock = Arc::new(clock::RealSystemClock);
|
||||
let http = Arc::new(HttpClientWithUrl::new(
|
||||
&ClientSettings::get_global(cx).server_url,
|
||||
Some(user_agent),
|
||||
ProxySettings::get_global(cx).proxy.clone(),
|
||||
));
|
||||
Self::new(clock, http.clone(), cx)
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use crate::{Client, Connection, Credentials, EstablishConnectionError, UserStore};
|
||||
use anyhow::{anyhow, Result};
|
||||
use chrono::Duration;
|
||||
use futures::{stream::BoxStream, StreamExt};
|
||||
use gpui::{BackgroundExecutor, Context, Model, TestAppContext};
|
||||
use parking_lot::Mutex;
|
||||
@@ -162,6 +163,11 @@ impl FakeServer {
|
||||
return Ok(*message.downcast().unwrap());
|
||||
}
|
||||
|
||||
let accepted_tos_at = chrono::Utc::now()
|
||||
.checked_sub_signed(Duration::hours(5))
|
||||
.expect("failed to build accepted_tos_at")
|
||||
.timestamp() as u64;
|
||||
|
||||
if message.is::<TypedEnvelope<GetPrivateUserInfo>>() {
|
||||
self.respond(
|
||||
message
|
||||
@@ -172,6 +178,7 @@ impl FakeServer {
|
||||
metrics_id: "the-metrics-id".into(),
|
||||
staff: false,
|
||||
flags: Default::default(),
|
||||
accepted_tos_at: Some(accepted_tos_at),
|
||||
},
|
||||
);
|
||||
continue;
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use super::{proto, Client, Status, TypedEnvelope};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use chrono::{DateTime, Utc};
|
||||
use collections::{hash_map::Entry, HashMap, HashSet};
|
||||
use feature_flags::FeatureFlagAppExt;
|
||||
use futures::{channel::mpsc, Future, StreamExt};
|
||||
@@ -92,7 +93,9 @@ pub struct UserStore {
|
||||
by_github_login: HashMap<String, u64>,
|
||||
participant_indices: HashMap<u64, ParticipantIndex>,
|
||||
update_contacts_tx: mpsc::UnboundedSender<UpdateContacts>,
|
||||
current_plan: Option<proto::Plan>,
|
||||
current_user: watch::Receiver<Option<Arc<User>>>,
|
||||
accepted_tos_at: Option<Option<DateTime<Utc>>>,
|
||||
contacts: Vec<Arc<Contact>>,
|
||||
incoming_contact_requests: Vec<Arc<User>>,
|
||||
outgoing_contact_requests: Vec<Arc<User>>,
|
||||
@@ -139,6 +142,7 @@ impl UserStore {
|
||||
let (mut current_user_tx, current_user_rx) = watch::channel();
|
||||
let (update_contacts_tx, mut update_contacts_rx) = mpsc::unbounded();
|
||||
let rpc_subscriptions = vec![
|
||||
client.add_message_handler(cx.weak_model(), Self::handle_update_plan),
|
||||
client.add_message_handler(cx.weak_model(), Self::handle_update_contacts),
|
||||
client.add_message_handler(cx.weak_model(), Self::handle_update_invite_info),
|
||||
client.add_message_handler(cx.weak_model(), Self::handle_show_contacts),
|
||||
@@ -147,6 +151,8 @@ impl UserStore {
|
||||
users: Default::default(),
|
||||
by_github_login: Default::default(),
|
||||
current_user: current_user_rx,
|
||||
current_plan: None,
|
||||
accepted_tos_at: None,
|
||||
contacts: Default::default(),
|
||||
incoming_contact_requests: Default::default(),
|
||||
participant_indices: Default::default(),
|
||||
@@ -186,9 +192,10 @@ impl UserStore {
|
||||
} else {
|
||||
break;
|
||||
};
|
||||
let fetch_metrics_id =
|
||||
let fetch_private_user_info =
|
||||
client.request(proto::GetPrivateUserInfo {}).log_err();
|
||||
let (user, info) = futures::join!(fetch_user, fetch_metrics_id);
|
||||
let (user, info) =
|
||||
futures::join!(fetch_user, fetch_private_user_info);
|
||||
|
||||
cx.update(|cx| {
|
||||
if let Some(info) = info {
|
||||
@@ -199,9 +206,17 @@ impl UserStore {
|
||||
client.telemetry.set_authenticated_user_info(
|
||||
Some(info.metrics_id.clone()),
|
||||
staff,
|
||||
)
|
||||
);
|
||||
|
||||
this.update(cx, |this, _| {
|
||||
this.set_current_user_accepted_tos_at(
|
||||
info.accepted_tos_at,
|
||||
);
|
||||
})
|
||||
} else {
|
||||
anyhow::Ok(())
|
||||
}
|
||||
})?;
|
||||
})??;
|
||||
|
||||
current_user_tx.send(user).await.ok();
|
||||
|
||||
@@ -280,6 +295,18 @@ impl UserStore {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_update_plan(
|
||||
this: Model<Self>,
|
||||
message: TypedEnvelope<proto::UpdateUserPlan>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<()> {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.current_plan = Some(message.payload.plan());
|
||||
cx.notify();
|
||||
})?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update_contacts(
|
||||
&mut self,
|
||||
message: UpdateContacts,
|
||||
@@ -657,10 +684,47 @@ impl UserStore {
|
||||
self.current_user.borrow().clone()
|
||||
}
|
||||
|
||||
pub fn current_plan(&self) -> Option<proto::Plan> {
|
||||
self.current_plan
|
||||
}
|
||||
|
||||
pub fn watch_current_user(&self) -> watch::Receiver<Option<Arc<User>>> {
|
||||
self.current_user.clone()
|
||||
}
|
||||
|
||||
pub fn current_user_has_accepted_terms(&self) -> Option<bool> {
|
||||
self.accepted_tos_at
|
||||
.map(|accepted_tos_at| accepted_tos_at.is_some())
|
||||
}
|
||||
|
||||
pub fn accept_terms_of_service(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
|
||||
if self.current_user().is_none() {
|
||||
return Task::ready(Err(anyhow!("no current user")));
|
||||
};
|
||||
|
||||
let client = self.client.clone();
|
||||
cx.spawn(move |this, mut cx| async move {
|
||||
if let Some(client) = client.upgrade() {
|
||||
let response = client
|
||||
.request(proto::AcceptTermsOfService {})
|
||||
.await
|
||||
.context("error accepting tos")?;
|
||||
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.set_current_user_accepted_tos_at(Some(response.accepted_tos_at))
|
||||
})
|
||||
} else {
|
||||
Err(anyhow!("client not found"))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn set_current_user_accepted_tos_at(&mut self, accepted_tos_at: Option<u64>) {
|
||||
self.accepted_tos_at = Some(
|
||||
accepted_tos_at.and_then(|timestamp| DateTime::from_timestamp(timestamp as i64, 0)),
|
||||
);
|
||||
}
|
||||
|
||||
fn load_users(
|
||||
&mut self,
|
||||
request: impl RequestMessage<Response = UsersResponse>,
|
||||
|
||||
@@ -15,6 +15,9 @@ BLOB_STORE_URL = "http://127.0.0.1:9000"
|
||||
BLOB_STORE_REGION = "the-region"
|
||||
ZED_CLIENT_CHECKSUM_SEED = "development-checksum-seed"
|
||||
SEED_PATH = "crates/collab/seed.default.json"
|
||||
LLM_DATABASE_URL = "postgres://postgres@localhost/zed_llm"
|
||||
LLM_DATABASE_MAX_CONNECTIONS = 5
|
||||
LLM_API_SECRET = "llm-secret"
|
||||
|
||||
# CLICKHOUSE_URL = ""
|
||||
# CLICKHOUSE_USER = "default"
|
||||
|
||||
@@ -20,6 +20,7 @@ test-support = ["sqlite"]
|
||||
[dependencies]
|
||||
anthropic.workspace = true
|
||||
anyhow.workspace = true
|
||||
async-stripe.workspace = true
|
||||
async-tungstenite.workspace = true
|
||||
aws-config = { version = "1.1.5" }
|
||||
aws-sdk-s3 = { version = "1.15.0" }
|
||||
@@ -36,6 +37,7 @@ futures.workspace = true
|
||||
google_ai.workspace = true
|
||||
hex.workspace = true
|
||||
http_client.workspace = true
|
||||
jsonwebtoken.workspace = true
|
||||
live_kit_server.workspace = true
|
||||
log.workspace = true
|
||||
nanoid.workspace = true
|
||||
@@ -56,10 +58,12 @@ serde_derive.workspace = true
|
||||
serde_json.workspace = true
|
||||
sha2.workspace = true
|
||||
sqlx = { version = "0.7", features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid", "any"] }
|
||||
strum.workspace = true
|
||||
subtle.workspace = true
|
||||
rustc-demangle.workspace = true
|
||||
telemetry_events.workspace = true
|
||||
text.workspace = true
|
||||
thiserror.workspace = true
|
||||
time.workspace = true
|
||||
tokio.workspace = true
|
||||
toml.workspace = true
|
||||
@@ -79,7 +83,6 @@ channel.workspace = true
|
||||
client = { workspace = true, features = ["test-support"] }
|
||||
collab_ui = { workspace = true, features = ["test-support"] }
|
||||
collections = { workspace = true, features = ["test-support"] }
|
||||
completion = { workspace = true, features = ["test-support"] }
|
||||
ctor.workspace = true
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
env_logger.workspace = true
|
||||
@@ -88,6 +91,7 @@ fs = { workspace = true, features = ["test-support"] }
|
||||
git = { workspace = true, features = ["test-support"] }
|
||||
git_hosting_providers.workspace = true
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
hyper.workspace = true
|
||||
indoc.workspace = true
|
||||
language = { workspace = true, features = ["test-support"] }
|
||||
language_model = { workspace = true, features = ["test-support"] }
|
||||
@@ -116,3 +120,6 @@ util.workspace = true
|
||||
workspace = { workspace = true, features = ["test-support"] }
|
||||
worktree = { workspace = true, features = ["test-support"] }
|
||||
headless.workspace = true
|
||||
|
||||
[package.metadata.cargo-machete]
|
||||
ignored = ["async-stripe"]
|
||||
|
||||
@@ -10,7 +10,7 @@ It contains our back-end logic for collaboration, to which we connect from the Z
|
||||
|
||||
Before you can run the collab server locally, you'll need to set up a zed Postgres database.
|
||||
|
||||
```
|
||||
```sh
|
||||
script/bootstrap
|
||||
```
|
||||
|
||||
@@ -32,13 +32,13 @@ To use a different set of admin users, create `crates/collab/seed.json`.
|
||||
|
||||
In one terminal, run Zed's collaboration server and the livekit dev server:
|
||||
|
||||
```
|
||||
```sh
|
||||
foreman start
|
||||
```
|
||||
|
||||
In a second terminal, run two or more instances of Zed.
|
||||
|
||||
```
|
||||
```sh
|
||||
script/zed-local -2
|
||||
```
|
||||
|
||||
@@ -64,7 +64,7 @@ You can tell what is currently deployed with `./script/what-is-deployed`.
|
||||
|
||||
To create a new migration:
|
||||
|
||||
```
|
||||
```sh
|
||||
./script/create-migration <name>
|
||||
```
|
||||
|
||||
|
||||
@@ -92,6 +92,18 @@ spec:
|
||||
secretKeyRef:
|
||||
name: api
|
||||
key: token
|
||||
- name: LLM_API_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: llm-token
|
||||
key: secret
|
||||
- name: LLM_DATABASE_URL
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: llm-database
|
||||
key: url
|
||||
- name: LLM_DATABASE_MAX_CONNECTIONS
|
||||
value: "${LLM_DATABASE_MAX_CONNECTIONS}"
|
||||
- name: ZED_CLIENT_CHECKSUM_SEED
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
@@ -122,11 +134,26 @@ spec:
|
||||
secretKeyRef:
|
||||
name: anthropic
|
||||
key: api_key
|
||||
- name: ANTHROPIC_STAFF_API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: anthropic
|
||||
key: staff_api_key
|
||||
- name: GOOGLE_AI_API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: google-ai
|
||||
key: api_key
|
||||
- name: QWEN2_7B_API_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: hugging-face
|
||||
key: api_key
|
||||
- name: QWEN2_7B_API_URL
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: hugging-face
|
||||
key: qwen2_api_url
|
||||
- name: BLOB_STORE_ACCESS_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
|
||||
@@ -3,3 +3,4 @@ RUST_LOG=info
|
||||
INVITE_LINK_PREFIX=https://zed.dev/invites/
|
||||
AUTO_JOIN_CHANNEL_ID=283
|
||||
DATABASE_MAX_CONNECTIONS=85
|
||||
LLM_DATABASE_MAX_CONNECTIONS=25
|
||||
|
||||
@@ -2,4 +2,5 @@ ZED_ENVIRONMENT=staging
|
||||
RUST_LOG=info
|
||||
INVITE_LINK_PREFIX=https://staging.zed.dev/invites/
|
||||
DATABASE_MAX_CONNECTIONS=5
|
||||
LLM_DATABASE_MAX_CONNECTIONS=5
|
||||
AUTO_JOIN_CHANNEL_ID=8
|
||||
|
||||
@@ -12,7 +12,7 @@ metadata:
|
||||
spec:
|
||||
type: LoadBalancer
|
||||
selector:
|
||||
app: postgrest
|
||||
app: nginx
|
||||
ports:
|
||||
- name: web
|
||||
protocol: TCP
|
||||
@@ -24,17 +24,99 @@ apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: postgrest
|
||||
|
||||
name: nginx
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: postgrest
|
||||
app: nginx
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: postgrest
|
||||
app: nginx
|
||||
spec:
|
||||
containers:
|
||||
- name: nginx
|
||||
image: nginx:latest
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
protocol: TCP
|
||||
volumeMounts:
|
||||
- name: nginx-config
|
||||
mountPath: /etc/nginx/nginx.conf
|
||||
subPath: nginx.conf
|
||||
volumes:
|
||||
- name: nginx-config
|
||||
configMap:
|
||||
name: nginx-config
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: nginx-config
|
||||
data:
|
||||
nginx.conf: |
|
||||
events {}
|
||||
|
||||
http {
|
||||
server {
|
||||
listen 8080;
|
||||
|
||||
location /app/ {
|
||||
proxy_pass http://postgrest-app:8080/;
|
||||
}
|
||||
|
||||
location /llm/ {
|
||||
proxy_pass http://postgrest-llm:8080/;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: postgrest-app
|
||||
spec:
|
||||
selector:
|
||||
app: postgrest-app
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: 8080
|
||||
targetPort: 8080
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: postgrest-llm
|
||||
spec:
|
||||
selector:
|
||||
app: postgrest-llm
|
||||
ports:
|
||||
- protocol: TCP
|
||||
port: 8080
|
||||
targetPort: 8080
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: postgrest-app
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: postgrest-app
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: postgrest-app
|
||||
spec:
|
||||
containers:
|
||||
- name: postgrest
|
||||
@@ -55,3 +137,39 @@ spec:
|
||||
secretKeyRef:
|
||||
name: postgrest
|
||||
key: jwt_secret
|
||||
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: postgrest-llm
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: postgrest-llm
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: postgrest-llm
|
||||
spec:
|
||||
containers:
|
||||
- name: postgrest
|
||||
image: "postgrest/postgrest"
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
protocol: TCP
|
||||
env:
|
||||
- name: PGRST_SERVER_PORT
|
||||
value: "8080"
|
||||
- name: PGRST_DB_URI
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: llm-database
|
||||
key: url
|
||||
- name: PGRST_JWT_SECRET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: postgrest
|
||||
key: jwt_secret
|
||||
|
||||
@@ -9,7 +9,9 @@ CREATE TABLE "users" (
|
||||
"connected_once" BOOLEAN NOT NULL DEFAULT false,
|
||||
"created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
"metrics_id" TEXT,
|
||||
"github_user_id" INTEGER
|
||||
"github_user_id" INTEGER,
|
||||
"accepted_tos_at" TIMESTAMP WITHOUT TIME ZONE,
|
||||
"github_user_created_at" TIMESTAMP WITHOUT TIME ZONE
|
||||
);
|
||||
CREATE UNIQUE INDEX "index_users_github_login" ON "users" ("github_login");
|
||||
CREATE UNIQUE INDEX "index_invite_code_users" ON "users" ("invite_code");
|
||||
@@ -416,3 +418,34 @@ CREATE TABLE dev_server_projects (
|
||||
dev_server_id INTEGER NOT NULL REFERENCES dev_servers(id),
|
||||
paths TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS billing_customers (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
user_id INTEGER NOT NULL REFERENCES users(id),
|
||||
stripe_customer_id TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "uix_billing_customers_on_user_id" ON billing_customers (user_id);
|
||||
CREATE UNIQUE INDEX "uix_billing_customers_on_stripe_customer_id" ON billing_customers (stripe_customer_id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS billing_subscriptions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
||||
billing_customer_id INTEGER NOT NULL REFERENCES billing_customers(id),
|
||||
stripe_subscription_id TEXT NOT NULL,
|
||||
stripe_subscription_status TEXT NOT NULL,
|
||||
stripe_cancel_at TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE INDEX "ix_billing_subscriptions_on_billing_customer_id" ON billing_subscriptions (billing_customer_id);
|
||||
CREATE UNIQUE INDEX "uix_billing_subscriptions_on_stripe_subscription_id" ON billing_subscriptions (stripe_subscription_id);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS processed_stripe_events (
|
||||
stripe_event_id TEXT PRIMARY KEY,
|
||||
stripe_event_type TEXT NOT NULL,
|
||||
stripe_event_created_timestamp INTEGER NOT NULL,
|
||||
processed_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE INDEX "ix_processed_stripe_events_on_stripe_event_created_timestamp" ON processed_stripe_events (stripe_event_created_timestamp);
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
CREATE TABLE IF NOT EXISTS billing_subscriptions (
|
||||
id SERIAL PRIMARY KEY,
|
||||
created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT now(),
|
||||
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
stripe_customer_id TEXT NOT NULL,
|
||||
stripe_subscription_id TEXT NOT NULL,
|
||||
stripe_subscription_status TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX "ix_billing_subscriptions_on_user_id" ON billing_subscriptions (user_id);
|
||||
CREATE INDEX "ix_billing_subscriptions_on_stripe_customer_id" ON billing_subscriptions (stripe_customer_id);
|
||||
CREATE UNIQUE INDEX "uix_billing_subscriptions_on_stripe_subscription_id" ON billing_subscriptions (stripe_subscription_id);
|
||||
@@ -0,0 +1,18 @@
|
||||
CREATE TABLE IF NOT EXISTS billing_customers (
|
||||
id SERIAL PRIMARY KEY,
|
||||
created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT now(),
|
||||
user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
|
||||
stripe_customer_id TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "uix_billing_customers_on_user_id" ON billing_customers (user_id);
|
||||
CREATE UNIQUE INDEX "uix_billing_customers_on_stripe_customer_id" ON billing_customers (stripe_customer_id);
|
||||
|
||||
-- Make `billing_subscriptions` reference `billing_customers` instead of having its
|
||||
-- own `user_id` and `stripe_customer_id`.
|
||||
DROP INDEX IF EXISTS "ix_billing_subscriptions_on_user_id";
|
||||
DROP INDEX IF EXISTS "ix_billing_subscriptions_on_stripe_customer_id";
|
||||
ALTER TABLE billing_subscriptions DROP COLUMN user_id;
|
||||
ALTER TABLE billing_subscriptions DROP COLUMN stripe_customer_id;
|
||||
ALTER TABLE billing_subscriptions ADD COLUMN billing_customer_id INTEGER NOT NULL REFERENCES billing_customers (id) ON DELETE CASCADE;
|
||||
CREATE INDEX "ix_billing_subscriptions_on_billing_customer_id" ON billing_subscriptions (billing_customer_id);
|
||||
@@ -0,0 +1,2 @@
|
||||
ALTER TABLE billing_customers ADD COLUMN last_stripe_event_id TEXT;
|
||||
ALTER TABLE billing_subscriptions ADD COLUMN last_stripe_event_id TEXT;
|
||||
@@ -0,0 +1,11 @@
|
||||
ALTER TABLE billing_customers DROP COLUMN last_stripe_event_id;
|
||||
ALTER TABLE billing_subscriptions DROP COLUMN last_stripe_event_id;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS processed_stripe_events (
|
||||
stripe_event_id TEXT PRIMARY KEY,
|
||||
stripe_event_type TEXT NOT NULL,
|
||||
stripe_event_created_timestamp BIGINT NOT NULL,
|
||||
processed_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT now()
|
||||
);
|
||||
|
||||
CREATE INDEX "ix_processed_stripe_events_on_stripe_event_created_timestamp" ON processed_stripe_events (stripe_event_created_timestamp);
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE billing_subscriptions ADD COLUMN stripe_cancel_at TIMESTAMP WITHOUT TIME ZONE;
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE users ADD accepted_tos_at TIMESTAMP WITHOUT TIME ZONE;
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE "users" ADD COLUMN "github_user_created_at" TIMESTAMP WITHOUT TIME ZONE;
|
||||
@@ -0,0 +1,19 @@
|
||||
create table if not exists providers (
|
||||
id serial primary key,
|
||||
name text not null
|
||||
);
|
||||
|
||||
create unique index uix_providers_on_name on providers (name);
|
||||
|
||||
create table if not exists models (
|
||||
id serial primary key,
|
||||
provider_id integer not null references providers (id) on delete cascade,
|
||||
name text not null,
|
||||
max_requests_per_minute integer not null,
|
||||
max_tokens_per_minute integer not null,
|
||||
max_tokens_per_day integer not null
|
||||
);
|
||||
|
||||
create unique index uix_models_on_provider_id_name on models (provider_id, name);
|
||||
create index ix_models_on_provider_id on models (provider_id);
|
||||
create index ix_models_on_name on models (name);
|
||||
@@ -0,0 +1,19 @@
|
||||
create table usage_measures (
|
||||
id serial primary key,
|
||||
name text not null
|
||||
);
|
||||
|
||||
create unique index uix_usage_measures_on_name on usage_measures (name);
|
||||
|
||||
create table if not exists usages (
|
||||
id serial primary key,
|
||||
user_id integer not null,
|
||||
model_id integer not null references models (id) on delete cascade,
|
||||
measure_id integer not null references usage_measures (id) on delete cascade,
|
||||
timestamp timestamp without time zone not null,
|
||||
buckets bigint[] not null
|
||||
);
|
||||
|
||||
create index ix_usages_on_user_id on usages (user_id);
|
||||
create index ix_usages_on_model_id on usages (model_id);
|
||||
create unique index uix_usages_on_user_id_model_id_measure_id on usages (user_id, model_id, measure_id);
|
||||
@@ -0,0 +1,4 @@
|
||||
ALTER TABLE models
|
||||
ALTER COLUMN max_requests_per_minute TYPE bigint,
|
||||
ALTER COLUMN max_tokens_per_minute TYPE bigint,
|
||||
ALTER COLUMN max_tokens_per_day TYPE bigint;
|
||||
@@ -0,0 +1,3 @@
|
||||
ALTER TABLE models
|
||||
ADD COLUMN price_per_million_input_tokens integer NOT NULL DEFAULT 0,
|
||||
ADD COLUMN price_per_million_output_tokens integer NOT NULL DEFAULT 0;
|
||||
@@ -0,0 +1 @@
|
||||
alter table usages add column is_staff boolean not null default false;
|
||||
@@ -0,0 +1,9 @@
|
||||
create table lifetime_usages (
|
||||
id serial primary key,
|
||||
user_id integer not null,
|
||||
model_id integer not null references models (id) on delete cascade,
|
||||
input_tokens bigint not null default 0,
|
||||
output_tokens bigint not null default 0
|
||||
);
|
||||
|
||||
create unique index uix_lifetime_usages_on_user_id_model_id on lifetime_usages (user_id, model_id);
|
||||
@@ -0,0 +1,7 @@
|
||||
create table revoked_access_tokens (
|
||||
id serial primary key,
|
||||
jti text not null,
|
||||
revoked_at timestamp without time zone not null default now()
|
||||
);
|
||||
|
||||
create unique index uix_revoked_access_tokens_on_jti on revoked_access_tokens (jti);
|
||||
4
crates/collab/postgrest_app.conf
Normal file
@@ -0,0 +1,4 @@
|
||||
db-uri = "postgres://postgres@localhost/zed_llm"
|
||||
server-port = 8082
|
||||
jwt-secret = "the-postgrest-jwt-secret-for-authorization"
|
||||
log-level = "info"
|
||||
@@ -1,86 +0,0 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use rpc::proto;
|
||||
|
||||
pub fn language_model_request_to_open_ai(
|
||||
request: proto::CompleteWithLanguageModel,
|
||||
) -> Result<open_ai::Request> {
|
||||
Ok(open_ai::Request {
|
||||
model: open_ai::Model::from_id(&request.model).unwrap_or(open_ai::Model::FourTurbo),
|
||||
messages: request
|
||||
.messages
|
||||
.into_iter()
|
||||
.map(|message: proto::LanguageModelRequestMessage| {
|
||||
let role = proto::LanguageModelRole::from_i32(message.role)
|
||||
.ok_or_else(|| anyhow!("invalid role {}", message.role))?;
|
||||
|
||||
let openai_message = match role {
|
||||
proto::LanguageModelRole::LanguageModelUser => open_ai::RequestMessage::User {
|
||||
content: message.content,
|
||||
},
|
||||
proto::LanguageModelRole::LanguageModelAssistant => {
|
||||
open_ai::RequestMessage::Assistant {
|
||||
content: Some(message.content),
|
||||
tool_calls: Vec::new(),
|
||||
}
|
||||
}
|
||||
proto::LanguageModelRole::LanguageModelSystem => {
|
||||
open_ai::RequestMessage::System {
|
||||
content: message.content,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok(openai_message)
|
||||
})
|
||||
.collect::<Result<Vec<open_ai::RequestMessage>>>()?,
|
||||
stream: true,
|
||||
stop: request.stop,
|
||||
temperature: request.temperature,
|
||||
tool_choice: None,
|
||||
tools: Vec::new(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn language_model_request_to_google_ai(
|
||||
request: proto::CompleteWithLanguageModel,
|
||||
) -> Result<google_ai::GenerateContentRequest> {
|
||||
Ok(google_ai::GenerateContentRequest {
|
||||
contents: request
|
||||
.messages
|
||||
.into_iter()
|
||||
.map(language_model_request_message_to_google_ai)
|
||||
.collect::<Result<Vec<_>>>()?,
|
||||
generation_config: None,
|
||||
safety_settings: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn language_model_request_message_to_google_ai(
|
||||
message: proto::LanguageModelRequestMessage,
|
||||
) -> Result<google_ai::Content> {
|
||||
let role = proto::LanguageModelRole::from_i32(message.role)
|
||||
.ok_or_else(|| anyhow!("invalid role {}", message.role))?;
|
||||
|
||||
Ok(google_ai::Content {
|
||||
parts: vec![google_ai::Part::TextPart(google_ai::TextPart {
|
||||
text: message.content,
|
||||
})],
|
||||
role: match role {
|
||||
proto::LanguageModelRole::LanguageModelUser => google_ai::Role::User,
|
||||
proto::LanguageModelRole::LanguageModelAssistant => google_ai::Role::Model,
|
||||
proto::LanguageModelRole::LanguageModelSystem => google_ai::Role::User,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
pub fn count_tokens_request_to_google_ai(
|
||||
request: proto::CountTokensWithLanguageModel,
|
||||
) -> Result<google_ai::CountTokensRequest> {
|
||||
Ok(google_ai::CountTokensRequest {
|
||||
contents: request
|
||||
.messages
|
||||
.into_iter()
|
||||
.map(language_model_request_message_to_google_ai)
|
||||
.collect::<Result<Vec<_>>>()?,
|
||||
})
|
||||
}
|
||||