Compare commits
343 Commits
show-lua-s
...
parse-bash
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1ff8521612 | ||
|
|
ca22d5d4a3 | ||
|
|
1cf252f8eb | ||
|
|
e46c72f4a8 | ||
|
|
63f656faae | ||
|
|
31b8c36479 | ||
|
|
dfdca540ec | ||
|
|
14c036931d | ||
|
|
5387ae9ed8 | ||
|
|
c30fb5f1ec | ||
|
|
f7e2b7b679 | ||
|
|
b3bf3e2d53 | ||
|
|
1cc59b317c | ||
|
|
efd3f8a8f1 | ||
|
|
930dba4a7f | ||
|
|
7cfd919523 | ||
|
|
edd1b48e7c | ||
|
|
3ec69a5bc0 | ||
|
|
33faa66e35 | ||
|
|
68262fe7e4 | ||
|
|
2491426be7 | ||
|
|
4487dc1064 | ||
|
|
e03edc2a76 | ||
|
|
d722067000 | ||
|
|
d51cd15e4d | ||
|
|
ef14bc8e76 | ||
|
|
9fe243efa5 | ||
|
|
74a39c7263 | ||
|
|
5f398071b2 | ||
|
|
410a942d57 | ||
|
|
06ffdc6791 | ||
|
|
394215599a | ||
|
|
e8a40085de | ||
|
|
6303751325 | ||
|
|
3edf930007 | ||
|
|
584a70ca5e | ||
|
|
2230f3b09d | ||
|
|
84a8d48178 | ||
|
|
ac5dafc6b2 | ||
|
|
23686aa394 | ||
|
|
3874d315ec | ||
|
|
1d33bfde37 | ||
|
|
9377ef9817 | ||
|
|
c3b5046347 | ||
|
|
44fff08ed6 | ||
|
|
d4daa0a3a2 | ||
|
|
81582cd7f3 | ||
|
|
0f5a3afe94 | ||
|
|
382f9f6151 | ||
|
|
15d2420031 | ||
|
|
026c7274d9 | ||
|
|
1aefa5178b | ||
|
|
7f2e3fb5bd | ||
|
|
68a572873b | ||
|
|
c042a02cf4 | ||
|
|
73ac3d9a99 | ||
|
|
2269f996f7 | ||
|
|
e9033a75ac | ||
|
|
a2ae6a1c77 | ||
|
|
985ac4e5f2 | ||
|
|
89ae4ca9a3 | ||
|
|
1d4afe6daa | ||
|
|
777c88bcea | ||
|
|
959a024861 | ||
|
|
ed510b5e93 | ||
|
|
674c572a28 | ||
|
|
4a39fc2644 | ||
|
|
48fe134408 | ||
|
|
22b8662275 | ||
|
|
cc36cd9768 | ||
|
|
628a61d929 | ||
|
|
7f23875c5e | ||
|
|
e7bba1c252 | ||
|
|
41a60ffecf | ||
|
|
ed4e654fdf | ||
|
|
baaafddbeb | ||
|
|
b70f21c08d | ||
|
|
5615be51cc | ||
|
|
06e9f0e309 | ||
|
|
41a2be7e54 | ||
|
|
e38ae423f1 | ||
|
|
68bb3bd5eb | ||
|
|
122e73f152 | ||
|
|
4b775505f5 | ||
|
|
a9f7c0549c | ||
|
|
ac617e278e | ||
|
|
26f4b2a491 | ||
|
|
fdcacb3849 | ||
|
|
f61d3d28e0 | ||
|
|
a5621662b2 | ||
|
|
b6198ad516 | ||
|
|
5210d9e8b4 | ||
|
|
1139904ef5 | ||
|
|
b4ef3791bb | ||
|
|
88907eeb38 | ||
|
|
cd5d7e82d0 | ||
|
|
0851842d2c | ||
|
|
1397e01735 | ||
|
|
2b2b9c1624 | ||
|
|
a05066cd83 | ||
|
|
cb439e672d | ||
|
|
6b0a282c9c | ||
|
|
25772b8777 | ||
|
|
94b63808e0 | ||
|
|
798af67dc1 | ||
|
|
db1d2defa5 | ||
|
|
430bd83e4d | ||
|
|
dbe5399fc4 | ||
|
|
aba242d576 | ||
|
|
ddc210abfc | ||
|
|
65994c0576 | ||
|
|
011f823f33 | ||
|
|
3d1ae68f83 | ||
|
|
1f62274a89 | ||
|
|
c2f62d261b | ||
|
|
7d433a30ec | ||
|
|
52567f4b72 | ||
|
|
a0ee84d3ac | ||
|
|
6cac0b33dc | ||
|
|
45606abfdb | ||
|
|
8ba6ce43ac | ||
|
|
040d42fc24 | ||
|
|
22d905dc03 | ||
|
|
bf735da3f2 | ||
|
|
210d8d5530 | ||
|
|
a0f995d2ae | ||
|
|
8f560daec2 | ||
|
|
d5bb12631a | ||
|
|
8a31dcaeb0 | ||
|
|
ef91e7afae | ||
|
|
c220fb387d | ||
|
|
adbde210fd | ||
|
|
b81a1ad91d | ||
|
|
5f390f1bf8 | ||
|
|
c282acbe65 | ||
|
|
021d6584cc | ||
|
|
b547cd1c70 | ||
|
|
8f841d1ab7 | ||
|
|
4b153e7f7f | ||
|
|
b61171f152 | ||
|
|
0b492c11de | ||
|
|
265caed15e | ||
|
|
148131786f | ||
|
|
7c1405db37 | ||
|
|
96b747e31d | ||
|
|
7a888de9f5 | ||
|
|
e9b4fa1465 | ||
|
|
ead60d1857 | ||
|
|
768dfc8b6b | ||
|
|
f2f9c786da | ||
|
|
e5d2678d94 | ||
|
|
3ad9074e63 | ||
|
|
f40b22c02a | ||
|
|
8490d0d4ef | ||
|
|
afd0da97b9 | ||
|
|
1bf1c7223f | ||
|
|
ba8b9ec2c7 | ||
|
|
685536c27e | ||
|
|
ae017c3f96 | ||
|
|
f587e95a7e | ||
|
|
83dfdb0cfe | ||
|
|
566c5f91a7 | ||
|
|
21057e3af7 | ||
|
|
f68a475eca | ||
|
|
c62210b178 | ||
|
|
ad14dcc57b | ||
|
|
b9432dbe42 | ||
|
|
41c373eff1 | ||
|
|
6a95ec6a64 | ||
|
|
8d7b021f92 | ||
|
|
798a34bfc2 | ||
|
|
a4a9f6bd07 | ||
|
|
bfe4c40f73 | ||
|
|
daa16bcf42 | ||
|
|
22ad7b17c5 | ||
|
|
728a5eb388 | ||
|
|
8d8e5d3635 | ||
|
|
a05a480ed9 | ||
|
|
d141fa027e | ||
|
|
8e0e291bd5 | ||
|
|
e3c0f56a96 | ||
|
|
3935e8343a | ||
|
|
0c84170071 | ||
|
|
a38687d278 | ||
|
|
b75b308459 | ||
|
|
dffa725c7d | ||
|
|
22f1429f97 | ||
|
|
6bdd2cf7db | ||
|
|
a7f3b22051 | ||
|
|
f3703fa8be | ||
|
|
a0be6c8cb2 | ||
|
|
b5a7fb13c3 | ||
|
|
2183fc674d | ||
|
|
0ad5979f19 | ||
|
|
ed1938dd9a | ||
|
|
f7927d3fa4 | ||
|
|
8361c32a34 | ||
|
|
2edadd9352 | ||
|
|
85384fb9c6 | ||
|
|
00359271d1 | ||
|
|
18fcdf1d2c | ||
|
|
55c927b039 | ||
|
|
1be3f81920 | ||
|
|
2eb4d6b7eb | ||
|
|
25f407baab | ||
|
|
79874872cb | ||
|
|
95208a6576 | ||
|
|
1034d1a6b5 | ||
|
|
d4eab557b2 | ||
|
|
b75964a636 | ||
|
|
87cdb68cca | ||
|
|
b0b65420f6 | ||
|
|
8ec0309645 | ||
|
|
6767e98e00 | ||
|
|
8cf5af1a84 | ||
|
|
247ee880d2 | ||
|
|
2e217759c0 | ||
|
|
0a0c163692 | ||
|
|
e80df25386 | ||
|
|
d9590f3f0e | ||
|
|
4ecd1b5174 | ||
|
|
70c973f6c3 | ||
|
|
e842b4eade | ||
|
|
606aa7a78c | ||
|
|
0081b816fe | ||
|
|
21949bcf1a | ||
|
|
ee7ed6d5b8 | ||
|
|
07b67c1bd3 | ||
|
|
f116b44ae8 | ||
|
|
43ab7fe0e2 | ||
|
|
6044773043 | ||
|
|
81af2c0bed | ||
|
|
ab199fda47 | ||
|
|
e60e8f3a0a | ||
|
|
edeed7b619 | ||
|
|
9be7934f12 | ||
|
|
009b90291e | ||
|
|
8b17dc66f6 | ||
|
|
de07b712fd | ||
|
|
be8f3b3791 | ||
|
|
3131b0459f | ||
|
|
3ec323ce0d | ||
|
|
c8b782d870 | ||
|
|
7bca15704b | ||
|
|
5268e74315 | ||
|
|
91c209900b | ||
|
|
74c29f1818 | ||
|
|
5858e61327 | ||
|
|
21cf2e38c5 | ||
|
|
a3ca5554fd | ||
|
|
acf9b22466 | ||
|
|
ffcd023f83 | ||
|
|
6259ad559b | ||
|
|
8d259a9dbe | ||
|
|
010c5a2c4e | ||
|
|
45b126a977 | ||
|
|
5f74297576 | ||
|
|
349f57381f | ||
|
|
41eb586ec8 | ||
|
|
6bf6fcaa51 | ||
|
|
6e89537830 | ||
|
|
669c6a3d5e | ||
|
|
910531bc33 | ||
|
|
690f26cf8b | ||
|
|
6b56fee6b0 | ||
|
|
d94001f445 | ||
|
|
6bcfc4014b | ||
|
|
47a89ad243 | ||
|
|
f3f97895a9 | ||
|
|
30afba50a9 | ||
|
|
036c123488 | ||
|
|
050f5f6723 | ||
|
|
2cd970f137 | ||
|
|
d6255fb3d2 | ||
|
|
f9a66ecaed | ||
|
|
cfb9a4beb0 | ||
|
|
9902cd54ce | ||
|
|
96510b72b8 | ||
|
|
a364a13458 | ||
|
|
09a4cfd307 | ||
|
|
5d66c3db85 | ||
|
|
28f33d0103 | ||
|
|
55a90f576a | ||
|
|
8d6abf6537 | ||
|
|
04961a0186 | ||
|
|
fd7ab20ea4 | ||
|
|
7019aca59d | ||
|
|
d43bcc04db | ||
|
|
2b94a35aaa | ||
|
|
e8208643bb | ||
|
|
a90f80725f | ||
|
|
4e6c37d23b | ||
|
|
0cf6259fec | ||
|
|
5cb5e92185 | ||
|
|
da61a28839 | ||
|
|
efdb769f9b | ||
|
|
9cce5a650e | ||
|
|
2021ca5bff | ||
|
|
1771250b04 | ||
|
|
18259c0fd4 | ||
|
|
41ddd1cc97 | ||
|
|
e175878008 | ||
|
|
1cfbfc199c | ||
|
|
f59f2caf7e | ||
|
|
401342c6ec | ||
|
|
0df1e4a489 | ||
|
|
9bd3e156f5 | ||
|
|
42c655751b | ||
|
|
ff1d78df3b | ||
|
|
c2e4fdf63d | ||
|
|
bf11b888c3 | ||
|
|
d562f58e76 | ||
|
|
94e4aa626d | ||
|
|
8ceba89d81 | ||
|
|
c37d6d5fed | ||
|
|
1a3597d726 | ||
|
|
c747cccde3 | ||
|
|
d81e7683ea | ||
|
|
8b29ee6033 | ||
|
|
96a75e08af | ||
|
|
06cbff6714 | ||
|
|
ce05813e7c | ||
|
|
4d1d8d6d78 | ||
|
|
1f8b14f4f1 | ||
|
|
082cc6184c | ||
|
|
6cfc4dc857 | ||
|
|
b9c48685e8 | ||
|
|
570c396e84 | ||
|
|
5fd034e604 | ||
|
|
63dab5f891 | ||
|
|
a2d6df3ed6 | ||
|
|
30e86ac939 | ||
|
|
976fc3ee97 | ||
|
|
63091459d8 | ||
|
|
659fae70f8 | ||
|
|
02e970192f | ||
|
|
5ecc67f2ef | ||
|
|
73dfb10c16 | ||
|
|
e513e81046 | ||
|
|
2fc4dec58f | ||
|
|
3891381d3e | ||
|
|
b91e929086 | ||
|
|
013a646799 |
51
.github/ISSUE_TEMPLATE/0_git_beta_bug_report.yml
vendored
@@ -1,51 +0,0 @@
|
||||
name: Git Beta
|
||||
description: There is a bug related to new Git features in Zed
|
||||
type: "Bug"
|
||||
labels: [git]
|
||||
title: "Git Beta: <a short description of the Git bug>"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
|
||||
<!-- Include all steps necessary to reproduce from a clean Zed installation. Be verbose -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
Actual Behavior:
|
||||
|
||||
Expected Behavior:
|
||||
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, attach your `~/Library/Logs/Zed/Zed.log` file to this issue.
|
||||
description: |
|
||||
macOS: `~/Library/Logs/Zed/Zed.log`
|
||||
Linux: `~/.local/share/zed/logs/Zed.log` or $XDG_DATA_HOME
|
||||
If you only need the most recent lines, you can run the `zed: open log` command palette action to see the last 1000.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Click below this line and paste or drag-and-drop your log-->
|
||||
```
|
||||
|
||||
```
|
||||
<!-- Click above this line and paste or drag-and-drop your log--></details>
|
||||
validations:
|
||||
required: false
|
||||
2
.github/actions/run_tests/action.yml
vendored
@@ -10,7 +10,7 @@ runs:
|
||||
cargo install cargo-nextest --locked
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
|
||||
2
.github/actions/run_tests_windows/action.yml
vendored
@@ -16,7 +16,7 @@ runs:
|
||||
run: cargo install cargo-nextest --locked
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
|
||||
206
.github/workflows/ci.yml
vendored
@@ -23,9 +23,53 @@ env:
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
jobs:
|
||||
job_spec:
|
||||
name: Decide which jobs to run
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
outputs:
|
||||
run_tests: ${{ steps.filter.outputs.run_tests }}
|
||||
run_license: ${{ steps.filter.outputs.run_license }}
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
# 350 is arbitrary; ~10days of history on main (5secs); full history is ~25secs
|
||||
fetch-depth: ${{ github.ref == 'refs/heads/main' && 2 || 350 }}
|
||||
- name: Fetch git history and generate output filters
|
||||
id: filter
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ]; then
|
||||
echo "Not in a PR context (i.e., push to main/stable/preview)"
|
||||
COMPARE_REV=$(git rev-parse HEAD~1)
|
||||
else
|
||||
echo "In a PR context comparing to pull_request.base.ref"
|
||||
git fetch origin "$GITHUB_BASE_REF" --depth=350
|
||||
COMPARE_REV=$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)
|
||||
fi
|
||||
# Specify anything which should skip full CI in this regex:
|
||||
# - docs/
|
||||
# - .github/ISSUE_TEMPLATE/
|
||||
# - .github/workflows/ (except .github/workflows/ci.yml)
|
||||
SKIP_REGEX='^(docs/|\.github/(ISSUE_TEMPLATE|workflows/(?!ci)))'
|
||||
if [[ $(git diff --name-only $COMPARE_REV ${{ github.sha }} | grep -vP "$SKIP_REGEX") ]]; then
|
||||
echo "run_tests=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "run_tests=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
if [[ $(git diff --name-only $COMPARE_REV ${{ github.sha }} | grep '^Cargo.lock') ]]; then
|
||||
echo "run_license=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "run_license=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
migration_checks:
|
||||
name: Check Postgres and Protobuf migrations, mergability
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
timeout-minutes: 60
|
||||
runs-on:
|
||||
- self-hosted
|
||||
@@ -69,6 +113,7 @@ jobs:
|
||||
style:
|
||||
timeout-minutes: 60
|
||||
name: Check formatting and spelling
|
||||
needs: [job_spec]
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- buildjet-8vcpu-ubuntu-2204
|
||||
@@ -76,6 +121,21 @@ jobs:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
|
||||
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Prettier Check on /docs
|
||||
working-directory: ./docs
|
||||
run: |
|
||||
pnpm dlx prettier@${PRETTIER_VERSION} . --check || {
|
||||
echo "To fix, run from the root of the zed repo:"
|
||||
echo " cd docs && pnpm dlx prettier@${PRETTIER_VERSION} . --write && cd .."
|
||||
false
|
||||
}
|
||||
env:
|
||||
PRETTIER_VERSION: 3.5.0
|
||||
|
||||
# To support writing comments that they will certainly be revisited.
|
||||
- name: Check for todo! and FIXME comments
|
||||
run: script/check-todos
|
||||
@@ -91,7 +151,10 @@ jobs:
|
||||
macos_tests:
|
||||
timeout-minutes: 60
|
||||
name: (macOS) Run Clippy and tests
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
@@ -123,7 +186,9 @@ jobs:
|
||||
- name: Check licenses
|
||||
run: |
|
||||
script/check-licenses
|
||||
script/generate-licenses /tmp/zed_licenses_output
|
||||
if [[ "${{ needs.job_spec.outputs.run_license }}" == "true" ]]; then
|
||||
script/generate-licenses /tmp/zed_licenses_output
|
||||
fi
|
||||
|
||||
- name: Check for new vulnerable dependencies
|
||||
if: github.event_name == 'pull_request'
|
||||
@@ -154,7 +219,10 @@ jobs:
|
||||
linux_tests:
|
||||
timeout-minutes: 60
|
||||
name: (Linux) Run Clippy and tests
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
steps:
|
||||
@@ -203,9 +271,12 @@ jobs:
|
||||
build_remote_server:
|
||||
timeout-minutes: 60
|
||||
name: (Linux) Build Remote Server
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
- buildjet-8vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
@@ -239,21 +310,12 @@ jobs:
|
||||
windows_clippy:
|
||||
timeout-minutes: 60
|
||||
name: (Windows) Run Clippy
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: hosted-windows-2
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on: windows-2025-16
|
||||
steps:
|
||||
# Temporarily Collect some metadata about the hardware behind our runners.
|
||||
- name: GHA Runner Info
|
||||
run: |
|
||||
Invoke-RestMethod -Headers @{"Metadata"="true"} -Method GET -Uri "http://169.254.169.254/metadata/instance/compute?api-version=2023-07-01" |
|
||||
ConvertTo-Json -Depth 10 |
|
||||
jq "{ vm_size: .vmSize, location: .location, os_disk_gb: (.storageProfile.osDisk.diskSizeGB | tonumber), rs_disk_gb: (.storageProfile.resourceDisk.size | tonumber / 1024) }"
|
||||
@{
|
||||
Cores = (Get-CimInstance Win32_Processor).NumberOfCores
|
||||
vCPUs = (Get-CimInstance Win32_Processor).NumberOfLogicalProcessors
|
||||
RamGb = [math]::Round((Get-CimInstance Win32_ComputerSystem).TotalPhysicalMemory / 1GB, 2)
|
||||
cpuid = (Get-CimInstance Win32_Processor).Name.Trim()
|
||||
} | ConvertTo-Json
|
||||
# more info here:- https://github.com/rust-lang/cargo/issues/13020
|
||||
- name: Enable longer pathnames for git
|
||||
run: git config --system core.longpaths true
|
||||
@@ -306,21 +368,13 @@ jobs:
|
||||
windows_tests:
|
||||
timeout-minutes: 60
|
||||
name: (Windows) Run Tests
|
||||
if: ${{ github.repository_owner == 'zed-industries' && (github.ref == 'refs/heads/main' || contains(github.event.pull_request.labels.*.name, 'windows')) }}
|
||||
runs-on: hosted-windows-2
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
# Use bigger runners for PRs (speed); smaller for async (cost)
|
||||
runs-on: ${{ github.event_name == 'pull_request' && 'windows-2025-32' || 'windows-2025-16' }}
|
||||
steps:
|
||||
# Temporarily Collect some metadata about the hardware behind our runners.
|
||||
- name: GHA Runner Info
|
||||
run: |
|
||||
Invoke-RestMethod -Headers @{"Metadata"="true"} -Method GET -Uri "http://169.254.169.254/metadata/instance/compute?api-version=2023-07-01" |
|
||||
ConvertTo-Json -Depth 10 |
|
||||
jq "{ vm_size: .vmSize, location: .location, os_disk_gb: (.storageProfile.osDisk.diskSizeGB | tonumber), rs_disk_gb: (.storageProfile.resourceDisk.size | tonumber / 1024) }"
|
||||
@{
|
||||
Cores = (Get-CimInstance Win32_Processor).NumberOfCores
|
||||
vCPUs = (Get-CimInstance Win32_Processor).NumberOfLogicalProcessors
|
||||
RamGb = [math]::Round((Get-CimInstance Win32_ComputerSystem).TotalPhysicalMemory / 1GB, 2)
|
||||
cpuid = (Get-CimInstance Win32_Processor).Name.Trim()
|
||||
} | ConvertTo-Json
|
||||
# more info here:- https://github.com/rust-lang/cargo/issues/13020
|
||||
- name: Enable longer pathnames for git
|
||||
run: git config --system core.longpaths true
|
||||
@@ -372,13 +426,49 @@ jobs:
|
||||
Remove-Item -Path "${{ env.CARGO_HOME }}/config.toml" -Force
|
||||
}
|
||||
|
||||
tests_pass:
|
||||
name: Tests Pass
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- job_spec
|
||||
- style
|
||||
- migration_checks
|
||||
- linux_tests
|
||||
- build_remote_server
|
||||
- macos_tests
|
||||
- windows_clippy
|
||||
- windows_tests
|
||||
if: always()
|
||||
steps:
|
||||
- name: Check all tests passed
|
||||
run: |
|
||||
# Check dependent jobs...
|
||||
RET_CODE=0
|
||||
# Always check style
|
||||
[[ "${{ needs.style.result }}" != 'success' ]] && { RET_CODE=1; echo "style tests failed"; }
|
||||
|
||||
# Only check test jobs if they were supposed to run
|
||||
if [[ "${{ needs.job_spec.outputs.run_tests }}" == "true" ]]; then
|
||||
[[ "${{ needs.macos_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "macOS tests failed"; }
|
||||
[[ "${{ needs.linux_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Linux tests failed"; }
|
||||
[[ "${{ needs.windows_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows tests failed"; }
|
||||
[[ "${{ needs.windows_clippy.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows clippy failed"; }
|
||||
[[ "${{ needs.build_remote_server.result }}" != 'success' ]] && { RET_CODE=1; echo "Remote server build failed"; }
|
||||
fi
|
||||
if [[ "$RET_CODE" -eq 0 ]]; then
|
||||
echo "All tests passed successfully!"
|
||||
fi
|
||||
exit $RET_CODE
|
||||
|
||||
bundle-mac:
|
||||
timeout-minutes: 120
|
||||
name: Create a macOS bundle
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- bundle
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
needs: [macos_tests]
|
||||
env:
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
@@ -392,7 +482,7 @@ jobs:
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
@@ -436,14 +526,14 @@ jobs:
|
||||
mv target/x86_64-apple-darwin/release/Zed.dmg target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
|
||||
- name: Upload app bundle (aarch64) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
|
||||
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
|
||||
- name: Upload app bundle (x86_64) to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
|
||||
@@ -468,7 +558,9 @@ jobs:
|
||||
name: Linux x86_x64 release bundle
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2004
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
needs: [linux_tests]
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
@@ -485,7 +577,7 @@ jobs:
|
||||
run: ./script/linux && ./script/install-mold 2.34.0
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
run: |
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel
|
||||
@@ -494,15 +586,19 @@ jobs:
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload Linux remote server to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.gz
|
||||
path: target/zed-remote-server-linux-x86_64.gz
|
||||
@@ -523,7 +619,9 @@ jobs:
|
||||
name: Linux arm64 release bundle
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204-arm
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
needs: [linux_tests]
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
@@ -540,7 +638,7 @@ jobs:
|
||||
run: ./script/linux
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
run: |
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel
|
||||
@@ -549,15 +647,19 @@ jobs:
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload Linux remote server to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.gz
|
||||
path: target/zed-remote-server-linux-aarch64.gz
|
||||
@@ -575,7 +677,9 @@ jobs:
|
||||
|
||||
auto-release-preview:
|
||||
name: Auto release preview
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') && endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre') }}
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
&& endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre')
|
||||
needs: [bundle-mac, bundle-linux-x86_x64, bundle-linux-aarch64]
|
||||
runs-on:
|
||||
- self-hosted
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
name: "Close Stale Issues"
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 11 * * 2"
|
||||
- cron: "0 7,9,11 * * 2"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
|
||||
39
.github/workflows/community_release_actions.yml
vendored
@@ -13,11 +13,12 @@ jobs:
|
||||
id: get-release-url
|
||||
run: |
|
||||
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
|
||||
URL="https://zed.dev/releases/preview/latest"
|
||||
URL="https://zed.dev/releases/preview/latest"
|
||||
else
|
||||
URL="https://zed.dev/releases/stable/latest"
|
||||
URL="https://zed.dev/releases/stable/latest"
|
||||
fi
|
||||
echo "::set-output name=URL::$URL"
|
||||
|
||||
echo "URL=$URL" >> $GITHUB_OUTPUT
|
||||
- name: Get content
|
||||
uses: 2428392/gh-truncate-string-action@b3ff790d21cf42af3ca7579146eedb93c8fb0757 # v1.4.1
|
||||
id: get-content
|
||||
@@ -33,3 +34,35 @@ jobs:
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
|
||||
content: ${{ steps.get-content.outputs.string }}
|
||||
|
||||
send_release_notes_email:
|
||||
if: github.repository_owner == 'zed-industries' && !github.event.release.prerelease
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check if release was promoted from preview
|
||||
id: check-promotion-from-preview
|
||||
run: |
|
||||
VERSION="${{ github.event.release.tag_name }}"
|
||||
PREVIEW_TAG="${VERSION}-pre"
|
||||
|
||||
if git rev-parse "$PREVIEW_TAG" > /dev/null 2>&1; then
|
||||
echo "was_promoted_from_preview=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "was_promoted_from_preview=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
- name: Send release notes email
|
||||
if: steps.check-promotion-from-preview.outputs.was_promoted_from_preview == 'true'
|
||||
run: |
|
||||
TAG="${{ github.event.release.tag_name }}"
|
||||
echo \"${{ toJSON(github.event.release.body) }}\" > release_body.txt
|
||||
jq -n --arg tag "$TAG" --rawfile body release_body.txt '{version: $tag, markdown_body: $body}' \
|
||||
> release_data.json
|
||||
curl -X POST "https://zed.dev/api/send_release_notes_email" \
|
||||
-H "Authorization: Bearer ${{ secrets.RELEASE_NOTES_API_TOKEN }}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d @release_data.json
|
||||
|
||||
2
.github/workflows/danger.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
version: 9
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "pnpm"
|
||||
|
||||
10
.github/workflows/deploy_cloudflare.yml
vendored
@@ -37,35 +37,35 @@ jobs:
|
||||
mdbook build ./docs --dest-dir=../target/deploy/docs/
|
||||
|
||||
- name: Deploy Docs
|
||||
uses: cloudflare/wrangler-action@392082e81ffbcb9ebdde27400634aa004b35ea37 # v3
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
command: pages deploy target/deploy --project-name=docs
|
||||
|
||||
- name: Deploy Install
|
||||
uses: cloudflare/wrangler-action@392082e81ffbcb9ebdde27400634aa004b35ea37 # v3
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
command: r2 object put -f script/install.sh zed-open-source-website-assets/install.sh
|
||||
|
||||
- name: Deploy Docs Workers
|
||||
uses: cloudflare/wrangler-action@392082e81ffbcb9ebdde27400634aa004b35ea37 # v3
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
command: deploy .cloudflare/docs-proxy/src/worker.js
|
||||
|
||||
- name: Deploy Install Workers
|
||||
uses: cloudflare/wrangler-action@392082e81ffbcb9ebdde27400634aa004b35ea37 # v3
|
||||
uses: cloudflare/wrangler-action@da0e0dfe58b7a431659754fdf3f186c529afbe65 # v3
|
||||
with:
|
||||
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN }}
|
||||
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
|
||||
command: deploy .cloudflare/docs-proxy/src/worker.js
|
||||
|
||||
- name: Preserve Wrangler logs
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: always()
|
||||
with:
|
||||
name: wrangler_logs
|
||||
|
||||
39
.github/workflows/docs.yml
vendored
@@ -1,39 +0,0 @@
|
||||
name: Docs
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "docs/**"
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
check_formatting:
|
||||
name: "Check formatting"
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
|
||||
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Prettier Check on /docs
|
||||
working-directory: ./docs
|
||||
run: |
|
||||
pnpm dlx prettier@${PRETTIER_VERSION} . --check || {
|
||||
echo "To fix, run from the root of the zed repo:"
|
||||
echo " cd docs && pnpm dlx prettier@${PRETTIER_VERSION} . --write && cd .."
|
||||
false
|
||||
}
|
||||
env:
|
||||
PRETTIER_VERSION: 3.5.0
|
||||
|
||||
- name: Check for Typos with Typos-CLI
|
||||
uses: crate-ci/typos@8e6a4285bcbde632c5d79900a7779746e8b7ea3f # v1.24.6
|
||||
with:
|
||||
config: ./typos.toml
|
||||
files: ./docs/
|
||||
2
.github/workflows/issue_response.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
version: 9
|
||||
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
with:
|
||||
node-version: "20"
|
||||
cache: "pnpm"
|
||||
|
||||
2
.github/workflows/randomized_tests.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
|
||||
54
.github/workflows/release_nightly.yml
vendored
@@ -71,7 +71,7 @@ jobs:
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@1d0ff469b7ec7b3cb9d8673fde0c81c44821de2a # v4
|
||||
uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4
|
||||
with:
|
||||
node-version: "18"
|
||||
|
||||
@@ -170,6 +170,58 @@ jobs:
|
||||
- name: Upload Zed Nightly
|
||||
run: script/upload-nightly linux-targz
|
||||
|
||||
bundle-nix:
|
||||
timeout-minutes: 60
|
||||
name: (${{ matrix.system.os }}) Nix Build
|
||||
continue-on-error: true
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
system:
|
||||
- os: x86 Linux
|
||||
runner: buildjet-16vcpu-ubuntu-2204
|
||||
install_nix: true
|
||||
- os: arm Mac
|
||||
# TODO: once other macs are provisioned for nix, remove that constraint from the runner
|
||||
runner: [macOS, ARM64, nix]
|
||||
install_nix: false
|
||||
- os: arm Linux
|
||||
runner: buildjet-16vcpu-ubuntu-2204-arm
|
||||
install_nix: true
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: ${{ matrix.system.runner }}
|
||||
needs: tests
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
GIT_LFS_SKIP_SMUDGE: 1 # breaks the livekit rust sdk examples which we don't actually depend on
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
# on our macs we manually install nix. for some reason the cachix action is running
|
||||
# under a non-login /bin/bash shell which doesn't source the proper script to add the
|
||||
# nix profile to PATH, so we manually add them here
|
||||
- name: Set path
|
||||
if: ${{ ! matrix.system.install_nix }}
|
||||
run: |
|
||||
echo "/nix/var/nix/profiles/default/bin" >> $GITHUB_PATH
|
||||
echo "/Users/administrator/.nix-profile/bin" >> $GITHUB_PATH
|
||||
|
||||
- uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f # v31
|
||||
if: ${{ matrix.system.install_nix }}
|
||||
with:
|
||||
github_access_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad # v16
|
||||
with:
|
||||
name: zed-industries
|
||||
authToken: "${{ secrets.CACHIX_AUTH_TOKEN }}"
|
||||
- run: nix build
|
||||
- run: nix-collect-garbage -d
|
||||
|
||||
update-nightly-tag:
|
||||
name: Update nightly tag
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
|
||||
19
.zed/debug.json
Normal file
@@ -0,0 +1,19 @@
|
||||
[
|
||||
{
|
||||
"label": "Debug Zed with LLDB",
|
||||
"adapter": "lldb",
|
||||
"program": "$ZED_WORKTREE_ROOT/target/debug/zed",
|
||||
"request": "launch",
|
||||
"cwd": "$ZED_WORKTREE_ROOT"
|
||||
},
|
||||
{
|
||||
"label": "Debug Zed with GDB",
|
||||
"adapter": "gdb",
|
||||
"program": "$ZED_WORKTREE_ROOT/target/debug/zed",
|
||||
"request": "launch",
|
||||
"cwd": "$ZED_WORKTREE_ROOT",
|
||||
"initialize_args": {
|
||||
"stopAtBeginningOfMainSubprogram": true
|
||||
}
|
||||
}
|
||||
]
|
||||
923
Cargo.lock
generated
43
Cargo.toml
@@ -8,7 +8,7 @@ members = [
|
||||
"crates/assistant",
|
||||
"crates/assistant2",
|
||||
"crates/assistant_context_editor",
|
||||
"crates/assistant_scripting",
|
||||
"crates/assistant_eval",
|
||||
"crates/assistant_settings",
|
||||
"crates/assistant_slash_command",
|
||||
"crates/assistant_slash_commands",
|
||||
@@ -37,6 +37,10 @@ members = [
|
||||
"crates/context_server_settings",
|
||||
"crates/copilot",
|
||||
"crates/credentials_provider",
|
||||
"crates/dap",
|
||||
"crates/dap_adapters",
|
||||
"crates/debugger_tools",
|
||||
"crates/debugger_ui",
|
||||
"crates/db",
|
||||
"crates/deepseek",
|
||||
"crates/diagnostics",
|
||||
@@ -65,6 +69,7 @@ members = [
|
||||
"crates/gpui_tokio",
|
||||
"crates/html_to_markdown",
|
||||
"crates/http_client",
|
||||
"crates/http_client_tls",
|
||||
"crates/image_viewer",
|
||||
"crates/indexed_docs",
|
||||
"crates/inline_completion",
|
||||
@@ -119,12 +124,14 @@ members = [
|
||||
"crates/rope",
|
||||
"crates/rpc",
|
||||
"crates/schema_generator",
|
||||
"crates/scripting_tool",
|
||||
"crates/search",
|
||||
"crates/semantic_index",
|
||||
"crates/semantic_version",
|
||||
"crates/session",
|
||||
"crates/settings",
|
||||
"crates/settings_ui",
|
||||
"crates/shell_parser",
|
||||
"crates/snippet",
|
||||
"crates/snippet_provider",
|
||||
"crates/snippets_ui",
|
||||
@@ -154,6 +161,7 @@ members = [
|
||||
"crates/ui",
|
||||
"crates/ui_input",
|
||||
"crates/ui_macros",
|
||||
"crates/ui_prompt",
|
||||
"crates/util",
|
||||
"crates/util_macros",
|
||||
"crates/vim",
|
||||
@@ -174,15 +182,11 @@ members = [
|
||||
"extensions/html",
|
||||
"extensions/perplexity",
|
||||
"extensions/proto",
|
||||
"extensions/purescript",
|
||||
"extensions/ruff",
|
||||
"extensions/slash-commands-example",
|
||||
"extensions/snippets",
|
||||
"extensions/terraform",
|
||||
"extensions/test-extension",
|
||||
"extensions/toml",
|
||||
"extensions/uiua",
|
||||
"extensions/zig",
|
||||
|
||||
#
|
||||
# Tooling
|
||||
@@ -210,6 +214,7 @@ assets = { path = "crates/assets" }
|
||||
assistant = { path = "crates/assistant" }
|
||||
assistant2 = { path = "crates/assistant2" }
|
||||
assistant_context_editor = { path = "crates/assistant_context_editor" }
|
||||
assistant_eval = { path = "crates/assistant_eval" }
|
||||
assistant_settings = { path = "crates/assistant_settings" }
|
||||
assistant_slash_command = { path = "crates/assistant_slash_command" }
|
||||
assistant_slash_commands = { path = "crates/assistant_slash_commands" }
|
||||
@@ -237,7 +242,11 @@ context_server = { path = "crates/context_server" }
|
||||
context_server_settings = { path = "crates/context_server_settings" }
|
||||
copilot = { path = "crates/copilot" }
|
||||
credentials_provider = { path = "crates/credentials_provider" }
|
||||
dap = { path = "crates/dap" }
|
||||
dap_adapters = { path = "crates/dap_adapters" }
|
||||
db = { path = "crates/db" }
|
||||
debugger_ui = { path = "crates/debugger_ui" }
|
||||
debugger_tools = { path = "crates/debugger_tools" }
|
||||
deepseek = { path = "crates/deepseek" }
|
||||
diagnostics = { path = "crates/diagnostics" }
|
||||
buffer_diff = { path = "crates/buffer_diff" }
|
||||
@@ -264,6 +273,7 @@ gpui_macros = { path = "crates/gpui_macros" }
|
||||
gpui_tokio = { path = "crates/gpui_tokio" }
|
||||
html_to_markdown = { path = "crates/html_to_markdown" }
|
||||
http_client = { path = "crates/http_client" }
|
||||
http_client_tls = { path = "crates/http_client_tls" }
|
||||
image_viewer = { path = "crates/image_viewer" }
|
||||
indexed_docs = { path = "crates/indexed_docs" }
|
||||
inline_completion = { path = "crates/inline_completion" }
|
||||
@@ -318,7 +328,7 @@ reqwest_client = { path = "crates/reqwest_client" }
|
||||
rich_text = { path = "crates/rich_text" }
|
||||
rope = { path = "crates/rope" }
|
||||
rpc = { path = "crates/rpc" }
|
||||
assistant_scripting = { path = "crates/assistant_scripting" }
|
||||
scripting_tool = { path = "crates/scripting_tool" }
|
||||
search = { path = "crates/search" }
|
||||
semantic_index = { path = "crates/semantic_index" }
|
||||
semantic_version = { path = "crates/semantic_version" }
|
||||
@@ -354,6 +364,7 @@ toolchain_selector = { path = "crates/toolchain_selector" }
|
||||
ui = { path = "crates/ui" }
|
||||
ui_input = { path = "crates/ui_input" }
|
||||
ui_macros = { path = "crates/ui_macros" }
|
||||
ui_prompt = { path = "crates/ui_prompt" }
|
||||
util = { path = "crates/util" }
|
||||
util_macros = { path = "crates/util_macros" }
|
||||
vim = { path = "crates/vim" }
|
||||
@@ -402,6 +413,7 @@ bytes = "1.0"
|
||||
cargo_metadata = "0.19"
|
||||
cargo_toml = "0.21"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
circular-buffer = "1.0"
|
||||
clap = { version = "4.4", features = ["derive"] }
|
||||
cocoa = "0.26"
|
||||
cocoa-foundation = "0.2.0"
|
||||
@@ -410,6 +422,7 @@ core-foundation = "0.9.3"
|
||||
core-foundation-sys = "0.8.6"
|
||||
ctor = "0.4.0"
|
||||
dashmap = "6.0"
|
||||
dap-types = { git = "https://github.com/zed-industries/dap-types", rev = "bfd4af0" }
|
||||
derive_more = "0.99.17"
|
||||
dirs = "4.0"
|
||||
ec4rs = "1.1"
|
||||
@@ -421,8 +434,7 @@ fork = "0.2.0"
|
||||
futures = "0.3"
|
||||
futures-batch = "0.6.1"
|
||||
futures-lite = "1.13"
|
||||
# TODO: get back to regular versions when https://github.com/rust-lang/git2-rs/pull/1120 is released
|
||||
git2 = { git = "https://github.com/rust-lang/git2-rs", rev = "a3b90cb3756c1bb63e2317bf9cfa57838178de5c", default-features = false }
|
||||
git2 = { version = "0.20.1", default-features = false }
|
||||
globset = "0.4"
|
||||
handlebars = "4.3"
|
||||
heed = { version = "0.21.0", features = ["read-txn-no-tls"] }
|
||||
@@ -524,7 +536,7 @@ sys-locale = "0.3.1"
|
||||
sysinfo = "0.31.0"
|
||||
take-until = "0.2.0"
|
||||
tempfile = "3.9.0"
|
||||
thiserror = "1.0.29"
|
||||
thiserror = "2.0.12"
|
||||
tiktoken-rs = "0.6.0"
|
||||
time = { version = "0.3", features = [
|
||||
"macros",
|
||||
@@ -565,6 +577,7 @@ unindent = "0.2.0"
|
||||
unicode-segmentation = "1.10"
|
||||
unicode-script = "0.5.7"
|
||||
url = "2.2"
|
||||
urlencoding = "2.1.2"
|
||||
uuid = { version = "1.1.2", features = ["v4", "v5", "v7", "serde"] }
|
||||
wasmparser = "0.221"
|
||||
wasm-encoder = "0.221"
|
||||
@@ -597,12 +610,12 @@ features = [
|
||||
]
|
||||
|
||||
[workspace.dependencies.windows]
|
||||
version = "0.58"
|
||||
version = "0.60"
|
||||
features = [
|
||||
"implement",
|
||||
"Foundation_Collections",
|
||||
"Foundation_Numerics",
|
||||
"Storage",
|
||||
"Storage_Search",
|
||||
"Storage_Streams",
|
||||
"System_Threading",
|
||||
"UI_StartScreen",
|
||||
"UI_ViewManagement",
|
||||
@@ -623,9 +636,11 @@ features = [
|
||||
"Win32_System_Com_StructuredStorage",
|
||||
"Win32_System_Console",
|
||||
"Win32_System_DataExchange",
|
||||
"Win32_System_IO",
|
||||
"Win32_System_LibraryLoader",
|
||||
"Win32_System_Memory",
|
||||
"Win32_System_Ole",
|
||||
"Win32_System_Pipes",
|
||||
"Win32_System_SystemInformation",
|
||||
"Win32_System_SystemServices",
|
||||
"Win32_System_Threading",
|
||||
@@ -751,5 +766,9 @@ new_ret_no_self = { level = "allow" }
|
||||
should_implement_trait = { level = "allow" }
|
||||
let_underscore_future = "allow"
|
||||
|
||||
# in Rust it can be very tedious to reduce argument count without
|
||||
# running afoul of the borrow checker.
|
||||
too_many_arguments = "allow"
|
||||
|
||||
[workspace.metadata.cargo-machete]
|
||||
ignored = ["bindgen", "cbindgen", "prost_build", "serde", "component", "linkme"]
|
||||
|
||||
1
assets/icons/debug.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-bug"><path d="m8 2 1.88 1.88"/><path d="M14.12 3.88 16 2"/><path d="M9 7.13v-1a3.003 3.003 0 1 1 6 0v1"/><path d="M12 20c-3.3 0-6-2.7-6-6v-3a4 4 0 0 1 4-4h4a4 4 0 0 1 4 4v3c0 3.3-2.7 6-6 6"/><path d="M12 20v-9"/><path d="M6.53 9C4.6 8.8 3 7.1 3 5"/><path d="M6 13H2"/><path d="M3 21c0-2.1 1.7-3.9 3.8-4"/><path d="M20.97 5c0 2.1-1.6 3.8-3.5 4"/><path d="M22 13h-4"/><path d="M17.2 17c2.1.1 3.8 1.9 3.8 4"/></svg>
|
||||
|
After Width: | Height: | Size: 615 B |
1
assets/icons/debug_breakpoint.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="currentColor" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-circle"><circle cx="12" cy="12" r="10"/></svg>
|
||||
|
After Width: | Height: | Size: 257 B |
1
assets/icons/debug_continue.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-step-forward"><line x1="6" x2="6" y1="4" y2="20"/><polygon points="10,4 20,12 10,20"/></svg>
|
||||
|
After Width: | Height: | Size: 295 B |
1
assets/icons/debug_disconnect.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-unplug"><path d="m19 5 3-3"/><path d="m2 22 3-3"/><path d="M6.3 20.3a2.4 2.4 0 0 0 3.4 0L12 18l-6-6-2.3 2.3a2.4 2.4 0 0 0 0 3.4Z"/><path d="M7.5 13.5 10 11"/><path d="M10.5 16.5 13 14"/><path d="m12 6 6 6 2.3-2.3a2.4 2.4 0 0 0 0-3.4l-2.6-2.6a2.4 2.4 0 0 0-3.4 0Z"/></svg>
|
||||
|
After Width: | Height: | Size: 474 B |
1
assets/icons/debug_ignore_breakpoints.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-circle-off"><path d="m2 2 20 20"/><path d="M8.35 2.69A10 10 0 0 1 21.3 15.65"/><path d="M19.08 19.08A10 10 0 1 1 4.92 4.92"/></svg>
|
||||
|
After Width: | Height: | Size: 334 B |
1
assets/icons/debug_log_breakpoint.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="currentColor" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-message-circle"><path d="M7.9 20A9 9 0 1 0 4 16.1L2 22Z"/></svg>
|
||||
|
After Width: | Height: | Size: 275 B |
1
assets/icons/debug_pause.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-pause"><rect x="14" y="4" width="4" height="16" rx="1"/><rect x="6" y="4" width="4" height="16" rx="1"/></svg>
|
||||
|
After Width: | Height: | Size: 313 B |
1
assets/icons/debug_restart.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-rotate-ccw"><path d="M3 12a9 9 0 1 0 9-9 9.75 9.75 0 0 0-6.74 2.74L3 8"/><path d="M3 3v5h5"/></svg>
|
||||
|
After Width: | Height: | Size: 302 B |
1
assets/icons/debug_step_back.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-undo-dot"><path d="M21 17a9 9 0 0 0-15-6.7L3 13"/><path d="M3 7v6h6"/><circle cx="12" cy="17" r="1"/></svg>
|
||||
|
After Width: | Height: | Size: 310 B |
5
assets/icons/debug_step_into.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-arrow-up-from-dot">
|
||||
<path d="m5 15 7 7 7-7"/>
|
||||
<path d="M12 8v14"/>
|
||||
<circle cx="12" cy="3" r="1"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 313 B |
5
assets/icons/debug_step_out.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-arrow-up-from-dot">
|
||||
<path d="m3 10 9-8 9 8"/>
|
||||
<path d="M12 17V2"/>
|
||||
<circle cx="12" cy="21" r="1"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 314 B |
5
assets/icons/debug_step_over.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-redo-dot">
|
||||
<circle cx="12" cy="17" r="1"/>
|
||||
<path d="M21 7v6h-6"/>
|
||||
<path d="M3 17a9 9 0 0 1 9-9 9 9 0 0 1 6 2.3l3 2.7"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 335 B |
1
assets/icons/debug_stop.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-square"><rect width="18" height="18" x="3" y="3" rx="2"/></svg>
|
||||
|
After Width: | Height: | Size: 266 B |
4
assets/icons/expand_down.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M10.5 8.5L7.5 11.5M7.5 11.5L4.5 8.5M7.5 11.5L7.5 5.5" stroke="black" stroke-linecap="square"/>
|
||||
<path d="M5 3.5L10 3.5" stroke="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 248 B |
4
assets/icons/expand_up.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M4.5 6.5L7.5 3.5M7.5 3.5L10.5 6.5M7.5 3.5V9.5" stroke="black" stroke-linecap="square"/>
|
||||
<path d="M5 11.5H10" stroke="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 238 B |
40
assets/icons/git_onboarding_bg.svg
Normal file
@@ -0,0 +1,40 @@
|
||||
<svg width="400" height="120" xmlns="http://www.w3.org/2000/svg">
|
||||
<defs>
|
||||
<pattern id="tilePattern" width="124" height="24" patternUnits="userSpaceOnUse">
|
||||
<svg width="124" height="24" viewBox="0 0 124 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g opacity="0.2">
|
||||
<path d="M16.666 12.0013L11.9993 16.668L7.33268 12.0013" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M12 7.33464L12 16.668" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M29 8.33464C29.3682 8.33464 29.6667 8.03616 29.6667 7.66797C29.6667 7.29978 29.3682 7.0013 29 7.0013C28.6318 7.0013 28.3333 7.29978 28.3333 7.66797C28.3333 8.03616 28.6318 8.33464 29 8.33464ZM29 9.66797C30.1046 9.66797 31 8.77254 31 7.66797C31 6.5634 30.1046 5.66797 29 5.66797C27.8954 5.66797 27 6.5634 27 7.66797C27 8.77254 27.8954 9.66797 29 9.66797Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M35 8.33464C35.3682 8.33464 35.6667 8.03616 35.6667 7.66797C35.6667 7.29978 35.3682 7.0013 35 7.0013C34.6318 7.0013 34.3333 7.29978 34.3333 7.66797C34.3333 8.03616 34.6318 8.33464 35 8.33464ZM35 9.66797C36.1046 9.66797 37 8.77254 37 7.66797C37 6.5634 36.1046 5.66797 35 5.66797C33.8954 5.66797 33 6.5634 33 7.66797C33 8.77254 33.8954 9.66797 35 9.66797Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M29 16.9987C29.3682 16.9987 29.6667 16.7002 29.6667 16.332C29.6667 15.9638 29.3682 15.6654 29 15.6654C28.6318 15.6654 28.3333 15.9638 28.3333 16.332C28.3333 16.7002 28.6318 16.9987 29 16.9987ZM29 18.332C30.1046 18.332 31 17.4366 31 16.332C31 15.2275 30.1046 14.332 29 14.332C27.8954 14.332 27 15.2275 27 16.332C27 17.4366 27.8954 18.332 29 18.332Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M28.334 9H29.6673V11.4615C30.2383 11.1443 31.0005 11 32.0007 11H33.6675C34.0356 11 34.334 10.7017 34.334 10.3333V9H35.6673V10.3333C35.6673 11.4378 34.7723 12.3333 33.6675 12.3333H32.0007C30.8614 12.3333 30.3692 12.5484 30.1298 12.7549C29.9016 12.9516 29.7857 13.2347 29.6673 13.742V15H28.334V9Z" fill="white"/>
|
||||
<path d="M48.668 8.66406H55.3346V15.3307" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M48.668 15.3307L55.3346 8.66406" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M76.5871 9.40624C76.8514 9.14195 77 8.78346 77 8.40965C77 8.03583 76.8516 7.67731 76.5873 7.41295C76.323 7.14859 75.9645 7.00005 75.5907 7C75.2169 6.99995 74.8584 7.14841 74.594 7.4127L67.921 14.0874C67.8049 14.2031 67.719 14.3456 67.671 14.5024L67.0105 16.6784C66.9975 16.7217 66.9966 16.7676 67.0076 16.8113C67.0187 16.8551 67.0414 16.895 67.0734 16.9269C67.1053 16.9588 67.1453 16.9815 67.1891 16.9925C67.2328 17.0035 67.2788 17.0024 67.322 16.9894L69.4985 16.3294C69.6551 16.2818 69.7976 16.1964 69.9135 16.0809L76.5871 9.40624Z" stroke="white" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M74 8L76 10" stroke="white" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M70.3877 7.53516V6.53516" stroke="white" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M73.5693 16.6992V17.6992" stroke="white" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M66.3877 10.5352H67.3877" stroke="white" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M77.5693 13.6992H76.5693" stroke="white" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M68.3877 8.53516L67.3877 7.53516" stroke="white" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M75.5693 15.6992L76.5693 16.6992" stroke="white" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M87.334 11.9987L92.0007 7.33203L96.6673 11.9987" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M92 16.6654V7.33203" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M117 12C117 10.6739 116.473 9.40215 115.536 8.46447C114.598 7.52678 113.326 7 112 7C110.602 7.00526 109.261 7.55068 108.256 8.52222L107 9.77778" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M107 7V9.77778H109.778" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M107 12C107 13.3261 107.527 14.5979 108.464 15.5355C109.402 16.4732 110.674 17 112 17C113.398 16.9947 114.739 16.4493 115.744 15.4778L117 14.2222" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M114.223 14.2188H117V16.9965" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</g>
|
||||
</svg>
|
||||
</pattern>
|
||||
<linearGradient id="fade" y2="1" x2="0">
|
||||
<stop offset="0" stop-color="white" stop-opacity=".52"/>
|
||||
<stop offset="1" stop-color="white" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
<mask id="fadeMask" maskContentUnits="objectBoundingBox">
|
||||
<rect width="1" height="1" fill="url(#fade)"/>
|
||||
</mask>
|
||||
</defs>
|
||||
<rect width="100%" height="100%" fill="url(#tilePattern)" mask="url(#fadeMask)"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 5.4 KiB |
@@ -1,6 +1,6 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M3 4H8" stroke="black" stroke-width="1.75" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M6 10L11 10" stroke="black" stroke-width="1.75" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<circle cx="4" cy="10" r="1.875" stroke="black" stroke-width="1.75"/>
|
||||
<circle cx="10" cy="4" r="1.875" stroke="black" stroke-width="1.75"/>
|
||||
<path d="M3 4H8" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M6 10L11 10" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<circle cx="4" cy="10" r="1.875" stroke="black" stroke-width="1.5"/>
|
||||
<circle cx="10" cy="4" r="1.875" stroke="black" stroke-width="1.5"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 450 B After Width: | Height: | Size: 446 B |
@@ -30,6 +30,13 @@
|
||||
"ctrl-0": ["zed::ResetBufferFontSize", { "persist": false }],
|
||||
"ctrl-,": "zed::OpenSettings",
|
||||
"ctrl-q": "zed::Quit",
|
||||
"f4": "debugger::Start",
|
||||
"f5": "debugger::Continue",
|
||||
"shift-f5": "debugger::Stop",
|
||||
"f6": "debugger::Pause",
|
||||
"f7": "debugger::StepOver",
|
||||
"cmd-f11": "debugger::StepInto",
|
||||
"shift-f11": "debugger::StepOut",
|
||||
"f11": "zed::ToggleFullScreen",
|
||||
"ctrl-alt-z": "edit_prediction::RateCompletions",
|
||||
"ctrl-shift-i": "edit_prediction::ToggleMenu"
|
||||
@@ -46,7 +53,9 @@
|
||||
"context": "Prompt",
|
||||
"bindings": {
|
||||
"left": "menu::SelectPrevious",
|
||||
"right": "menu::SelectNext"
|
||||
"right": "menu::SelectNext",
|
||||
"h": "menu::SelectPrevious",
|
||||
"l": "menu::SelectNext"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -107,6 +116,7 @@
|
||||
"ctrl-a": "editor::SelectAll",
|
||||
"ctrl-l": "editor::SelectLine",
|
||||
"ctrl-shift-i": "editor::Format",
|
||||
"alt-shift-o": "editor::OrganizeImports",
|
||||
// "cmd-shift-left": ["editor::SelectToBeginningOfLine", {"stop_at_soft_wraps": true, "stop_at_indent": true }],
|
||||
// "ctrl-shift-a": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": true, "stop_at_indent": true }],
|
||||
"shift-home": ["editor::SelectToBeginningOfLine", { "stop_at_soft_wraps": true, "stop_at_indent": true }],
|
||||
@@ -123,7 +133,9 @@
|
||||
"alt-g b": "editor::ToggleGitBlame",
|
||||
"menu": "editor::OpenContextMenu",
|
||||
"shift-f10": "editor::OpenContextMenu",
|
||||
"ctrl-shift-e": "editor::ToggleEditPrediction"
|
||||
"ctrl-shift-e": "editor::ToggleEditPrediction",
|
||||
"f9": "editor::ToggleBreakpoint",
|
||||
"shift-f9": "editor::EditLogBreakpoint"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -362,6 +374,7 @@
|
||||
"ctrl-k ctrl-0": "editor::FoldAll",
|
||||
"ctrl-k ctrl-j": "editor::UnfoldAll",
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
"ctrl-shift-space": "editor::ShowWordCompletions",
|
||||
"ctrl-.": "editor::ToggleCodeActions",
|
||||
"ctrl-k r": "editor::RevealInFileManager",
|
||||
"ctrl-k p": "editor::CopyPath",
|
||||
@@ -393,6 +406,7 @@
|
||||
"alt-shift-open": "projects::OpenRemote",
|
||||
"alt-ctrl-shift-o": "projects::OpenRemote",
|
||||
"alt-ctrl-shift-b": "branches::OpenRecent",
|
||||
"alt-shift-enter": "toast::RunAction",
|
||||
"ctrl-~": "workspace::NewTerminal",
|
||||
"save": "workspace::Save",
|
||||
"ctrl-s": "workspace::Save",
|
||||
@@ -731,28 +745,50 @@
|
||||
"up": "menu::SelectPrevious",
|
||||
"down": "menu::SelectNext",
|
||||
"enter": "menu::Confirm",
|
||||
"alt-y": "git::StageFile",
|
||||
"alt-shift-y": "git::UnstageFile",
|
||||
"ctrl-alt-y": "git::ToggleStaged",
|
||||
"space": "git::ToggleStaged",
|
||||
"ctrl-space": "git::StageAll",
|
||||
"ctrl-shift-space": "git::UnstageAll",
|
||||
"tab": "git_panel::FocusEditor",
|
||||
"shift-tab": "git_panel::FocusEditor",
|
||||
"escape": "git_panel::ToggleFocus",
|
||||
"ctrl-enter": "git::Commit",
|
||||
"alt-enter": "menu::SecondaryConfirm"
|
||||
"alt-enter": "menu::SecondaryConfirm",
|
||||
"delete": "git::RestoreFile",
|
||||
"shift-delete": "git::RestoreFile",
|
||||
"backspace": "git::RestoreFile"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitCommit > Editor",
|
||||
"bindings": {
|
||||
"escape": "menu::Cancel",
|
||||
"enter": "editor::Newline",
|
||||
"ctrl-enter": "git::Commit",
|
||||
"alt-l": "git::GenerateCommitMessage"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitPanel",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-g ctrl-g": "git::Fetch",
|
||||
"ctrl-g up": "git::Push",
|
||||
"ctrl-g down": "git::Pull",
|
||||
"ctrl-g shift-up": "git::ForcePush",
|
||||
"ctrl-g d": "git::Diff",
|
||||
"ctrl-g backspace": "git::RestoreTrackedFiles",
|
||||
"ctrl-g shift-backspace": "git::TrashUntrackedFiles",
|
||||
"ctrl-space": "git::StageAll",
|
||||
"ctrl-shift-space": "git::UnstageAll"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitDiff > Editor",
|
||||
"bindings": {
|
||||
"ctrl-enter": "git::Commit"
|
||||
"ctrl-enter": "git::Commit",
|
||||
"ctrl-space": "git::StageAll",
|
||||
"ctrl-shift-space": "git::UnstageAll"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -767,6 +803,7 @@
|
||||
"escape": "git_panel::FocusChanges",
|
||||
"tab": "git_panel::FocusChanges",
|
||||
"shift-tab": "git_panel::FocusChanges",
|
||||
"enter": "editor::Newline",
|
||||
"ctrl-enter": "git::Commit",
|
||||
"alt-up": "git_panel::FocusChanges",
|
||||
"alt-l": "git::GenerateCommitMessage"
|
||||
@@ -840,21 +877,22 @@
|
||||
"alt-b": ["terminal::SendText", "\u001bb"],
|
||||
"alt-f": ["terminal::SendText", "\u001bf"],
|
||||
// Overrides for conflicting keybindings
|
||||
"ctrl-b": ["terminal::SendKeystroke", "ctrl-b"],
|
||||
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"],
|
||||
"ctrl-e": ["terminal::SendKeystroke", "ctrl-e"],
|
||||
"ctrl-o": ["terminal::SendKeystroke", "ctrl-o"],
|
||||
"ctrl-w": ["terminal::SendKeystroke", "ctrl-w"],
|
||||
"ctrl-shift-a": "editor::SelectAll",
|
||||
"find": "buffer_search::Deploy",
|
||||
"ctrl-shift-f": "buffer_search::Deploy",
|
||||
"ctrl-shift-l": "terminal::Clear",
|
||||
"ctrl-shift-w": "pane::CloseActiveItem",
|
||||
"ctrl-e": ["terminal::SendKeystroke", "ctrl-e"],
|
||||
"up": ["terminal::SendKeystroke", "up"],
|
||||
"pageup": ["terminal::SendKeystroke", "pageup"],
|
||||
"down": ["terminal::SendKeystroke", "down"],
|
||||
"pagedown": ["terminal::SendKeystroke", "pagedown"],
|
||||
"escape": ["terminal::SendKeystroke", "escape"],
|
||||
"enter": ["terminal::SendKeystroke", "enter"],
|
||||
"ctrl-b": ["terminal::SendKeystroke", "ctrl-b"],
|
||||
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"],
|
||||
"shift-pageup": "terminal::ScrollPageUp",
|
||||
"shift-pagedown": "terminal::ScrollPageDown",
|
||||
"shift-up": "terminal::ScrollLineUp",
|
||||
|
||||
@@ -14,6 +14,13 @@
|
||||
{
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"f4": "debugger::Start",
|
||||
"f5": "debugger::Continue",
|
||||
"shift-f5": "debugger::Stop",
|
||||
"f6": "debugger::Pause",
|
||||
"f7": "debugger::StepOver",
|
||||
"f11": "debugger::StepInto",
|
||||
"shift-f11": "debugger::StepOut",
|
||||
"home": "menu::SelectFirst",
|
||||
"shift-pageup": "menu::SelectFirst",
|
||||
"pageup": "menu::SelectFirst",
|
||||
@@ -31,13 +38,13 @@
|
||||
"enter": "menu::Confirm",
|
||||
"ctrl-enter": "menu::SecondaryConfirm",
|
||||
"cmd-enter": "menu::SecondaryConfirm",
|
||||
"cmd-escape": "menu::Cancel",
|
||||
"ctrl-escape": "menu::Cancel",
|
||||
"ctrl-c": "menu::Cancel",
|
||||
"escape": "menu::Cancel",
|
||||
"alt-shift-enter": "menu::Restart",
|
||||
"cmd-shift-w": "workspace::CloseWindow",
|
||||
"shift-escape": "workspace::ToggleZoom",
|
||||
"cmd-escape": "menu::Cancel",
|
||||
"cmd-o": "workspace::Open",
|
||||
"cmd-=": ["zed::IncreaseBufferFontSize", { "persist": false }],
|
||||
"cmd-+": ["zed::IncreaseBufferFontSize", { "persist": false }],
|
||||
@@ -148,6 +155,8 @@
|
||||
"cmd-\"": "editor::ExpandAllDiffHunks",
|
||||
"cmd-alt-g b": "editor::ToggleGitBlame",
|
||||
"cmd-i": "editor::ShowSignatureHelp",
|
||||
"f9": "editor::ToggleBreakpoint",
|
||||
"shift-f9": "editor::EditLogBreakpoint",
|
||||
"ctrl-f12": "editor::GoToDeclaration",
|
||||
"alt-ctrl-f12": "editor::GoToDeclarationSplit",
|
||||
"ctrl-cmd-e": "editor::ToggleEditPrediction"
|
||||
@@ -466,6 +475,7 @@
|
||||
// Using `ctrl-space` in Zed requires disabling the macOS global shortcut.
|
||||
// System Preferences->Keyboard->Keyboard Shortcuts->Input Sources->Select the previous input source (uncheck)
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
"ctrl-shift-space": "editor::ShowWordCompletions",
|
||||
"cmd-.": "editor::ToggleCodeActions",
|
||||
"cmd-k r": "editor::RevealInFileManager",
|
||||
"cmd-k p": "editor::CopyPath",
|
||||
@@ -514,6 +524,7 @@
|
||||
"ctrl-~": "workspace::NewTerminal",
|
||||
"cmd-s": "workspace::Save",
|
||||
"cmd-k s": "workspace::SaveWithoutFormat",
|
||||
"alt-shift-enter": "toast::RunAction",
|
||||
"cmd-shift-s": "workspace::SaveAs",
|
||||
"cmd-shift-n": "workspace::NewWindow",
|
||||
"ctrl-`": "terminal_panel::ToggleFocus",
|
||||
@@ -694,6 +705,16 @@
|
||||
"ctrl-]": "assistant::CycleNextInlineAssist"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Prompt",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"left": "menu::SelectPrevious",
|
||||
"right": "menu::SelectNext",
|
||||
"h": "menu::SelectPrevious",
|
||||
"l": "menu::SelectNext"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ProjectSearchBar && !in_replace",
|
||||
"use_key_equivalents": true,
|
||||
@@ -754,6 +775,14 @@
|
||||
"space": "project_panel::Open"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "VariableList",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"left": "variable_list::CollapseSelectedEntry",
|
||||
"right": "variable_list::ExpandSelectedEntry"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitPanel && ChangesList",
|
||||
"use_key_equivalents": true,
|
||||
@@ -763,28 +792,27 @@
|
||||
"cmd-up": "menu::SelectFirst",
|
||||
"cmd-down": "menu::SelectLast",
|
||||
"enter": "menu::Confirm",
|
||||
"cmd-alt-y": "git::ToggleStaged",
|
||||
"space": "git::ToggleStaged",
|
||||
"cmd-shift-space": "git::StageAll",
|
||||
"ctrl-shift-space": "git::UnstageAll",
|
||||
"cmd-y": "git::StageFile",
|
||||
"cmd-shift-y": "git::UnstageFile",
|
||||
"alt-down": "git_panel::FocusEditor",
|
||||
"tab": "git_panel::FocusEditor",
|
||||
"shift-tab": "git_panel::FocusEditor",
|
||||
"escape": "git_panel::ToggleFocus",
|
||||
"cmd-enter": "git::Commit"
|
||||
"cmd-enter": "git::Commit",
|
||||
"delete": "git::RestoreFile",
|
||||
"cmd-backspace": "git::RestoreFile",
|
||||
"backspace": "git::RestoreFile"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitDiff > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-enter": "git::Commit"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "AskPass > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"enter": "menu::Confirm"
|
||||
"cmd-enter": "git::Commit",
|
||||
"cmd-ctrl-y": "git::StageAll",
|
||||
"cmd-ctrl-shift-y": "git::UnstageAll"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -800,11 +828,27 @@
|
||||
"alt-tab": "git::GenerateCommitMessage"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitPanel",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-g ctrl-g": "git::Fetch",
|
||||
"ctrl-g up": "git::Push",
|
||||
"ctrl-g down": "git::Pull",
|
||||
"ctrl-g shift-up": "git::ForcePush",
|
||||
"ctrl-g d": "git::Diff",
|
||||
"ctrl-g backspace": "git::RestoreTrackedFiles",
|
||||
"ctrl-g shift-backspace": "git::TrashUntrackedFiles",
|
||||
"cmd-ctrl-y": "git::StageAll",
|
||||
"cmd-ctrl-shift-y": "git::UnstageAll"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitCommit > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"enter": "editor::Newline",
|
||||
"escape": "menu::Cancel",
|
||||
"cmd-enter": "git::Commit",
|
||||
"alt-tab": "git::GenerateCommitMessage"
|
||||
}
|
||||
|
||||
@@ -3,7 +3,14 @@
|
||||
"bindings": {
|
||||
"ctrl-alt-s": "zed::OpenSettings",
|
||||
"ctrl-{": "pane::ActivatePreviousItem",
|
||||
"ctrl-}": "pane::ActivateNextItem"
|
||||
"ctrl-}": "pane::ActivateNextItem",
|
||||
"ctrl-f2": "debugger::Stop",
|
||||
"f6": "debugger::Pause",
|
||||
"f7": "debugger::StepInto",
|
||||
"f8": "debugger::StepOver",
|
||||
"shift-f8": "debugger::StepOut",
|
||||
"f9": "debugger::Continue",
|
||||
"alt-shift-f9": "debugger::Start"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -31,6 +38,7 @@
|
||||
"shift-alt-up": "editor::MoveLineUp",
|
||||
"shift-alt-down": "editor::MoveLineDown",
|
||||
"ctrl-alt-l": "editor::Format",
|
||||
"ctrl-alt-o": "editor::OrganizeImports",
|
||||
"shift-f6": "editor::Rename",
|
||||
"ctrl-alt-left": "pane::GoBack",
|
||||
"ctrl-alt-right": "pane::GoForward",
|
||||
@@ -48,7 +56,9 @@
|
||||
"ctrl-home": "editor::MoveToBeginning",
|
||||
"ctrl-end": "editor::MoveToEnd",
|
||||
"ctrl-shift-home": "editor::SelectToBeginning",
|
||||
"ctrl-shift-end": "editor::SelectToEnd"
|
||||
"ctrl-shift-end": "editor::SelectToEnd",
|
||||
"ctrl-f8": "editor::ToggleBreakpoint",
|
||||
"ctrl-shift-f8": "editor::EditLogBreakpoint"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -2,7 +2,14 @@
|
||||
{
|
||||
"bindings": {
|
||||
"cmd-{": "pane::ActivatePreviousItem",
|
||||
"cmd-}": "pane::ActivateNextItem"
|
||||
"cmd-}": "pane::ActivateNextItem",
|
||||
"ctrl-f2": "debugger::Stop",
|
||||
"f6": "debugger::Pause",
|
||||
"f7": "debugger::StepInto",
|
||||
"f8": "debugger::StepOver",
|
||||
"shift-f8": "debugger::StepOut",
|
||||
"f9": "debugger::Continue",
|
||||
"alt-shift-f9": "debugger::Start"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -29,6 +36,7 @@
|
||||
"shift-alt-up": "editor::MoveLineUp",
|
||||
"shift-alt-down": "editor::MoveLineDown",
|
||||
"cmd-alt-l": "editor::Format",
|
||||
"ctrl-alt-o": "editor::OrganizeImports",
|
||||
"shift-f6": "editor::Rename",
|
||||
"cmd-[": "pane::GoBack",
|
||||
"cmd-]": "pane::GoForward",
|
||||
@@ -45,7 +53,9 @@
|
||||
"cmd-home": "editor::MoveToBeginning",
|
||||
"cmd-end": "editor::MoveToEnd",
|
||||
"cmd-shift-home": "editor::SelectToBeginning",
|
||||
"cmd-shift-end": "editor::SelectToEnd"
|
||||
"cmd-shift-end": "editor::SelectToEnd",
|
||||
"ctrl-f8": "editor::ToggleBreakpoint",
|
||||
"ctrl-shift-f8": "editor::EditLogBreakpoint"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
18
assets/prompts/assistant_system_prompt.hbs
Normal file
@@ -0,0 +1,18 @@
|
||||
You are an AI assistant integrated into a text editor. Your goal is to do one of the following two things:
|
||||
|
||||
1. Help users answer questions and perform tasks related to their codebase.
|
||||
2. Answer general-purpose questions unrelated to their particular codebase.
|
||||
|
||||
It will be up to you to decide which of these you are doing based on what the user has told you. When unclear, ask clarifying questions to understand the user's intent before proceeding.
|
||||
|
||||
You should only perform actions that modify the user’s system if explicitly requested by the user:
|
||||
- If the user asks a question about how to accomplish a task, provide guidance or information, and use read-only tools (e.g., search) to assist. You may suggest potential actions, but do not directly modify the user’s system without explicit instruction.
|
||||
- If the user clearly requests that you perform an action, carry out the action directly without explaining why you are doing so.
|
||||
|
||||
Be concise and direct in your responses.
|
||||
|
||||
The user has opened a project that contains the following root directories/files:
|
||||
|
||||
{{#each worktrees}}
|
||||
- {{root_name}} (absolute path: {{abs_path}})
|
||||
{{/each}}
|
||||
@@ -136,6 +136,11 @@
|
||||
// Whether to use the system provided dialogs for Open and Save As.
|
||||
// When set to false, Zed will use the built-in keyboard-first pickers.
|
||||
"use_system_path_prompts": true,
|
||||
// Whether to use the system provided dialogs for prompts, such as confirmation
|
||||
// prompts.
|
||||
// When set to false, Zed will use its built-in prompts. Note that on Linux,
|
||||
// this option is ignored and Zed will always use the built-in prompts.
|
||||
"use_system_prompts": true,
|
||||
// Whether the cursor blinks in the editor.
|
||||
"cursor_blink": true,
|
||||
// Cursor shape for the default editor.
|
||||
@@ -324,6 +329,8 @@
|
||||
"code_actions": true,
|
||||
// Whether to show runnables buttons in the gutter.
|
||||
"runnables": true,
|
||||
// Whether to show breakpoints in the gutter.
|
||||
"breakpoints": true,
|
||||
// Whether to show fold buttons in the gutter.
|
||||
"folds": true
|
||||
},
|
||||
@@ -336,14 +343,14 @@
|
||||
"active_line_width": 1,
|
||||
// Determines how indent guides are colored.
|
||||
// This setting can take the following three values:
|
||||
///
|
||||
//
|
||||
// 1. "disabled"
|
||||
// 2. "fixed"
|
||||
// 3. "indent_aware"
|
||||
"coloring": "fixed",
|
||||
// Determines how indent guide backgrounds are colored.
|
||||
// This setting can take the following two values:
|
||||
///
|
||||
//
|
||||
// 1. "disabled"
|
||||
// 2. "indent_aware"
|
||||
"background_coloring": "disabled"
|
||||
@@ -402,8 +409,8 @@
|
||||
// Time to wait after scrolling the buffer, before requesting the hints,
|
||||
// set to 0 to disable debouncing.
|
||||
"scroll_debounce_ms": 50,
|
||||
/// A set of modifiers which, when pressed, will toggle the visibility of inlay hints.
|
||||
/// If the set if empty or not all the modifiers specified are pressed, inlay hints will not be toggled.
|
||||
// A set of modifiers which, when pressed, will toggle the visibility of inlay hints.
|
||||
// If the set if empty or not all the modifiers specified are pressed, inlay hints will not be toggled.
|
||||
"toggle_on_modifiers_press": {
|
||||
"control": false,
|
||||
"shift": false,
|
||||
@@ -440,7 +447,7 @@
|
||||
"scrollbar": {
|
||||
// When to show the scrollbar in the project panel.
|
||||
// This setting can take five values:
|
||||
///
|
||||
//
|
||||
// 1. null (default): Inherit editor settings
|
||||
// 2. Show the scrollbar if there's important information or
|
||||
// follow the system's configured behavior (default):
|
||||
@@ -455,7 +462,7 @@
|
||||
},
|
||||
// Which files containing diagnostic errors/warnings to mark in the project panel.
|
||||
// This setting can take the following three values:
|
||||
///
|
||||
//
|
||||
// 1. Do not mark any files:
|
||||
// "off"
|
||||
// 2. Only mark files with errors:
|
||||
@@ -512,7 +519,7 @@
|
||||
"scrollbar": {
|
||||
// When to show the scrollbar in the project panel.
|
||||
// This setting can take five values:
|
||||
///
|
||||
//
|
||||
// 1. null (default): Inherit editor settings
|
||||
// 2. Show the scrollbar if there's important information or
|
||||
// follow the system's configured behavior (default):
|
||||
@@ -547,7 +554,7 @@
|
||||
"git_panel": {
|
||||
// Whether to show the git panel button in the status bar.
|
||||
"button": true,
|
||||
// Where to the git panel. Can be 'left' or 'right'.
|
||||
// Where to show the git panel. Can be 'left' or 'right'.
|
||||
"dock": "left",
|
||||
// Default width of the git panel.
|
||||
"default_width": 360,
|
||||
@@ -555,6 +562,12 @@
|
||||
//
|
||||
// Default: icon
|
||||
"status_style": "icon",
|
||||
// What branch name to use if init.defaultBranch
|
||||
// is not set
|
||||
//
|
||||
// Default: main
|
||||
"fallback_branch_name": "main",
|
||||
|
||||
"scrollbar": {
|
||||
// When to show the scrollbar in the git panel.
|
||||
//
|
||||
@@ -594,6 +607,13 @@
|
||||
"provider": "zed.dev",
|
||||
// The model to use.
|
||||
"model": "claude-3-5-sonnet-latest"
|
||||
},
|
||||
// The model to use when applying edits from the assistant.
|
||||
"editor_model": {
|
||||
// The provider to use.
|
||||
"provider": "zed.dev",
|
||||
// The model to use.
|
||||
"model": "claude-3-5-sonnet-latest"
|
||||
}
|
||||
},
|
||||
// The settings for slash commands.
|
||||
@@ -673,7 +693,7 @@
|
||||
// Which files containing diagnostic errors/warnings to mark in the tabs.
|
||||
// Diagnostics are only shown when file icons are also active.
|
||||
// This setting only works when can take the following three values:
|
||||
///
|
||||
//
|
||||
// 1. Do not mark any files:
|
||||
// "off"
|
||||
// 2. Only mark files with errors:
|
||||
@@ -841,12 +861,20 @@
|
||||
// How git hunks are displayed visually in the editor.
|
||||
// This setting can take two values:
|
||||
//
|
||||
// 1. Show unstaged hunks with a transparent background (default):
|
||||
// "hunk_style": "transparent"
|
||||
// 2. Show unstaged hunks with a pattern background:
|
||||
// "hunk_style": "pattern"
|
||||
"hunk_style": "staged_border"
|
||||
// 1. Show unstaged hunks filled and staged hunks hollow:
|
||||
// "hunk_style": "staged_hollow"
|
||||
// 2. Show unstaged hunks hollow and staged hunks filled:
|
||||
// "hunk_style": "unstaged_hollow"
|
||||
"hunk_style": "staged_hollow"
|
||||
},
|
||||
// The list of custom Git hosting providers.
|
||||
"git_hosting_providers": [
|
||||
// {
|
||||
// "provider": "github",
|
||||
// "name": "BigCorp GitHub",
|
||||
// "base_url": "https://code.big-corp.com"
|
||||
// }
|
||||
],
|
||||
// Configuration for how direnv configuration should be loaded. May take 2 values:
|
||||
// 1. Load direnv configuration using `direnv export json` directly.
|
||||
// "load_direnv": "direct"
|
||||
@@ -1009,7 +1037,7 @@
|
||||
"scrollbar": {
|
||||
// When to show the scrollbar in the terminal.
|
||||
// This setting can take five values:
|
||||
///
|
||||
//
|
||||
// 1. null (default): Inherit editor settings
|
||||
// 2. Show the scrollbar if there's important information or
|
||||
// follow the system's configured behavior (default):
|
||||
@@ -1080,6 +1108,32 @@
|
||||
"auto_install_extensions": {
|
||||
"html": true
|
||||
},
|
||||
// Controls how completions are processed for this language.
|
||||
"completions": {
|
||||
// Controls how words are completed.
|
||||
// For large documents, not all words may be fetched for completion.
|
||||
//
|
||||
// May take 3 values:
|
||||
// 1. "enabled"
|
||||
// Always fetch document's words for completions along with LSP completions.
|
||||
// 2. "fallback"
|
||||
// Only if LSP response errors or times out, use document's words to show completions.
|
||||
// 3. "disabled"
|
||||
// Never fetch or complete document's words for completions.
|
||||
// (Word-based completions can still be queried via a separate action)
|
||||
//
|
||||
// Default: fallback
|
||||
"words": "fallback",
|
||||
// Whether to fetch LSP completions or not.
|
||||
//
|
||||
// Default: true
|
||||
"lsp": true,
|
||||
// When fetching LSP completions, determines how long to wait for a response of a particular server.
|
||||
// When set to 0, waits indefinitely.
|
||||
//
|
||||
// Default: 0
|
||||
"lsp_fetch_timeout_ms": 0
|
||||
},
|
||||
// Different settings for specific languages.
|
||||
"languages": {
|
||||
"Astro": {
|
||||
@@ -1300,8 +1354,7 @@
|
||||
},
|
||||
// Settings for auto-closing of JSX tags.
|
||||
"jsx_tag_auto_close": {
|
||||
// // Whether to auto-close JSX tags.
|
||||
// "enabled": true
|
||||
"enabled": true
|
||||
},
|
||||
// LSP Specific settings.
|
||||
"lsp": {
|
||||
@@ -1407,6 +1460,12 @@
|
||||
// }
|
||||
// ]
|
||||
"ssh_connections": [],
|
||||
|
||||
// Configures context servers for use in the Assistant.
|
||||
"context_servers": {}
|
||||
"context_servers": {},
|
||||
"debugger": {
|
||||
"stepping_granularity": "line",
|
||||
"save_breakpoints": true,
|
||||
"button": true
|
||||
}
|
||||
}
|
||||
|
||||
32
assets/settings/initial_debug_tasks.json
Normal file
@@ -0,0 +1,32 @@
|
||||
[
|
||||
{
|
||||
"label": "Debug active PHP file",
|
||||
"adapter": "php",
|
||||
"program": "$ZED_FILE",
|
||||
"request": "launch",
|
||||
"cwd": "$ZED_WORKTREE_ROOT"
|
||||
},
|
||||
{
|
||||
"label": "Debug active Python file",
|
||||
"adapter": "python",
|
||||
"program": "$ZED_FILE",
|
||||
"request": "launch",
|
||||
"cwd": "$ZED_WORKTREE_ROOT"
|
||||
},
|
||||
{
|
||||
"label": "Debug active JavaScript file",
|
||||
"adapter": "javascript",
|
||||
"program": "$ZED_FILE",
|
||||
"request": "launch",
|
||||
"cwd": "$ZED_WORKTREE_ROOT"
|
||||
},
|
||||
{
|
||||
"label": "JavaScript debug terminal",
|
||||
"adapter": "javascript",
|
||||
"request": "launch",
|
||||
"cwd": "$ZED_WORKTREE_ROOT",
|
||||
"initialize_args": {
|
||||
"console": "integratedTerminal"
|
||||
}
|
||||
}
|
||||
]
|
||||
@@ -6,15 +6,7 @@
|
||||
{
|
||||
"name": "Gruvbox Dark",
|
||||
"appearance": "dark",
|
||||
"accents": [
|
||||
"#cc241dff",
|
||||
"#98971aff",
|
||||
"#d79921ff",
|
||||
"#458588ff",
|
||||
"#b16286ff",
|
||||
"#689d6aff",
|
||||
"#d65d0eff"
|
||||
],
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"border": "#5b534dff",
|
||||
"border.variant": "#494340ff",
|
||||
@@ -105,9 +97,9 @@
|
||||
"terminal.ansi.bright_white": "#fbf1c7ff",
|
||||
"terminal.ansi.dim_white": "#b0a189ff",
|
||||
"link_text.hover": "#83a598ff",
|
||||
"version_control_added": "#b7bb26ff",
|
||||
"version_control_modified": "#f9bd2fff",
|
||||
"version_control_deleted": "#fb4a35ff",
|
||||
"version_control.added": "#b7bb26ff",
|
||||
"version_control.modified": "#f9bd2fff",
|
||||
"version_control.deleted": "#fb4a35ff",
|
||||
"conflict": "#f9bd2fff",
|
||||
"conflict.background": "#572e10ff",
|
||||
"conflict.border": "#754916ff",
|
||||
@@ -383,6 +375,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variable.special": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variant": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
@@ -394,15 +391,7 @@
|
||||
{
|
||||
"name": "Gruvbox Dark Hard",
|
||||
"appearance": "dark",
|
||||
"accents": [
|
||||
"#cc241dff",
|
||||
"#98971aff",
|
||||
"#d79921ff",
|
||||
"#458588ff",
|
||||
"#b16286ff",
|
||||
"#689d6aff",
|
||||
"#d65d0eff"
|
||||
],
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"border": "#5b534dff",
|
||||
"border.variant": "#494340ff",
|
||||
@@ -493,9 +482,9 @@
|
||||
"terminal.ansi.bright_white": "#fbf1c7ff",
|
||||
"terminal.ansi.dim_white": "#b0a189ff",
|
||||
"link_text.hover": "#83a598ff",
|
||||
"version_control_added": "#b7bb26ff",
|
||||
"version_control_modified": "#f9bd2fff",
|
||||
"version_control_deleted": "#fb4a35ff",
|
||||
"version_control.added": "#b7bb26ff",
|
||||
"version_control.modified": "#f9bd2fff",
|
||||
"version_control.deleted": "#fb4a35ff",
|
||||
"conflict": "#f9bd2fff",
|
||||
"conflict.background": "#572e10ff",
|
||||
"conflict.border": "#754916ff",
|
||||
@@ -771,6 +760,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variable.special": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variant": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
@@ -782,15 +776,7 @@
|
||||
{
|
||||
"name": "Gruvbox Dark Soft",
|
||||
"appearance": "dark",
|
||||
"accents": [
|
||||
"#cc241dff",
|
||||
"#98971aff",
|
||||
"#d79921ff",
|
||||
"#458588ff",
|
||||
"#b16286ff",
|
||||
"#689d6aff",
|
||||
"#d65d0eff"
|
||||
],
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"border": "#5b534dff",
|
||||
"border.variant": "#494340ff",
|
||||
@@ -881,9 +867,9 @@
|
||||
"terminal.ansi.bright_white": "#fbf1c7ff",
|
||||
"terminal.ansi.dim_white": "#b0a189ff",
|
||||
"link_text.hover": "#83a598ff",
|
||||
"version_control_added": "#b7bb26ff",
|
||||
"version_control_modified": "#f9bd2fff",
|
||||
"version_control_deleted": "#fb4a35ff",
|
||||
"version_control.added": "#b7bb26ff",
|
||||
"version_control.modified": "#f9bd2fff",
|
||||
"version_control.deleted": "#fb4a35ff",
|
||||
"conflict": "#f9bd2fff",
|
||||
"conflict.background": "#572e10ff",
|
||||
"conflict.border": "#754916ff",
|
||||
@@ -1159,6 +1145,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variable.special": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variant": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
@@ -1170,15 +1161,7 @@
|
||||
{
|
||||
"name": "Gruvbox Light",
|
||||
"appearance": "light",
|
||||
"accents": [
|
||||
"#cc241dff",
|
||||
"#98971aff",
|
||||
"#d79921ff",
|
||||
"#458588ff",
|
||||
"#b16286ff",
|
||||
"#689d6aff",
|
||||
"#d65d0eff"
|
||||
],
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"border": "#c8b899ff",
|
||||
"border.variant": "#ddcca7ff",
|
||||
@@ -1269,9 +1252,9 @@
|
||||
"terminal.ansi.bright_white": "#282828ff",
|
||||
"terminal.ansi.dim_white": "#73675eff",
|
||||
"link_text.hover": "#0b6678ff",
|
||||
"version_control_added": "#797410ff",
|
||||
"version_control_modified": "#b57615ff",
|
||||
"version_control_deleted": "#9d0308ff",
|
||||
"version_control.added": "#797410ff",
|
||||
"version_control.modified": "#b57615ff",
|
||||
"version_control.deleted": "#9d0308ff",
|
||||
"conflict": "#b57615ff",
|
||||
"conflict.background": "#f5e2d0ff",
|
||||
"conflict.border": "#ebccabff",
|
||||
@@ -1547,6 +1530,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variable.special": {
|
||||
"color": "#066578ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variant": {
|
||||
"color": "#0b6678ff",
|
||||
"font_style": null,
|
||||
@@ -1558,15 +1546,7 @@
|
||||
{
|
||||
"name": "Gruvbox Light Hard",
|
||||
"appearance": "light",
|
||||
"accents": [
|
||||
"#cc241dff",
|
||||
"#98971aff",
|
||||
"#d79921ff",
|
||||
"#458588ff",
|
||||
"#b16286ff",
|
||||
"#689d6aff",
|
||||
"#d65d0eff"
|
||||
],
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"border": "#c8b899ff",
|
||||
"border.variant": "#ddcca7ff",
|
||||
@@ -1657,9 +1637,9 @@
|
||||
"terminal.ansi.bright_white": "#282828ff",
|
||||
"terminal.ansi.dim_white": "#73675eff",
|
||||
"link_text.hover": "#0b6678ff",
|
||||
"version_control_added": "#797410ff",
|
||||
"version_control_modified": "#b57615ff",
|
||||
"version_control_deleted": "#9d0308ff",
|
||||
"version_control.added": "#797410ff",
|
||||
"version_control.modified": "#b57615ff",
|
||||
"version_control.deleted": "#9d0308ff",
|
||||
"conflict": "#b57615ff",
|
||||
"conflict.background": "#f5e2d0ff",
|
||||
"conflict.border": "#ebccabff",
|
||||
@@ -1935,6 +1915,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variable.special": {
|
||||
"color": "#066578ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variant": {
|
||||
"color": "#0b6678ff",
|
||||
"font_style": null,
|
||||
@@ -1946,15 +1931,7 @@
|
||||
{
|
||||
"name": "Gruvbox Light Soft",
|
||||
"appearance": "light",
|
||||
"accents": [
|
||||
"#cc241dff",
|
||||
"#98971aff",
|
||||
"#d79921ff",
|
||||
"#458588ff",
|
||||
"#b16286ff",
|
||||
"#689d6aff",
|
||||
"#d65d0eff"
|
||||
],
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"border": "#c8b899ff",
|
||||
"border.variant": "#ddcca7ff",
|
||||
@@ -2045,9 +2022,9 @@
|
||||
"terminal.ansi.bright_white": "#282828ff",
|
||||
"terminal.ansi.dim_white": "#73675eff",
|
||||
"link_text.hover": "#0b6678ff",
|
||||
"version_control_added": "#797410ff",
|
||||
"version_control_modified": "#b57615ff",
|
||||
"version_control_deleted": "#9d0308ff",
|
||||
"version_control.added": "#797410ff",
|
||||
"version_control.modified": "#b57615ff",
|
||||
"version_control.deleted": "#9d0308ff",
|
||||
"conflict": "#b57615ff",
|
||||
"conflict.background": "#f5e2d0ff",
|
||||
"conflict.border": "#ebccabff",
|
||||
@@ -2323,6 +2300,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variable.special": {
|
||||
"color": "#066578ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variant": {
|
||||
"color": "#0b6678ff",
|
||||
"font_style": null,
|
||||
|
||||
@@ -96,9 +96,9 @@
|
||||
"terminal.ansi.bright_white": "#dce0e5ff",
|
||||
"terminal.ansi.dim_white": "#575d65ff",
|
||||
"link_text.hover": "#74ade8ff",
|
||||
"version_control_added": "#a7c088ff",
|
||||
"version_control_modified": "#dec184ff",
|
||||
"version_control_deleted": "#d07277ff",
|
||||
"version_control.added": "#27a657ff",
|
||||
"version_control.modified": "#d3b020ff",
|
||||
"version_control.deleted": "#e06c76ff",
|
||||
"conflict": "#dec184ff",
|
||||
"conflict.background": "#dec1841a",
|
||||
"conflict.border": "#5d4c2fff",
|
||||
@@ -475,9 +475,9 @@
|
||||
"terminal.ansi.bright_white": "#242529ff",
|
||||
"terminal.ansi.dim_white": "#97979aff",
|
||||
"link_text.hover": "#5c78e2ff",
|
||||
"version_control_added": "#669f59ff",
|
||||
"version_control_modified": "#a48819ff",
|
||||
"version_control_deleted": "#d36151ff",
|
||||
"version_control.added": "#27a657ff",
|
||||
"version_control.modified": "#d3b020ff",
|
||||
"version_control.deleted": "#e06c76ff",
|
||||
"conflict": "#a48819ff",
|
||||
"conflict.background": "#faf2e6ff",
|
||||
"conflict.border": "#f4e7d1ff",
|
||||
|
||||
@@ -20,7 +20,6 @@ extension_host.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
lsp.workspace = true
|
||||
project.workspace = true
|
||||
smallvec.workspace = true
|
||||
ui.workspace = true
|
||||
|
||||
@@ -7,8 +7,7 @@ use gpui::{
|
||||
EventEmitter, InteractiveElement as _, ParentElement as _, Render, SharedString,
|
||||
StatefulInteractiveElement, Styled, Transformation, Window,
|
||||
};
|
||||
use language::{LanguageRegistry, LanguageServerBinaryStatus, LanguageServerId};
|
||||
use lsp::LanguageServerName;
|
||||
use language::{BinaryStatus, LanguageRegistry, LanguageServerId};
|
||||
use project::{
|
||||
EnvironmentErrorMessage, LanguageServerProgress, LspStoreEvent, Project,
|
||||
ProjectEnvironmentEvent, WorktreeId,
|
||||
@@ -23,21 +22,21 @@ actions!(activity_indicator, [ShowErrorMessage]);
|
||||
|
||||
pub enum Event {
|
||||
ShowError {
|
||||
lsp_name: LanguageServerName,
|
||||
server_name: SharedString,
|
||||
error: String,
|
||||
},
|
||||
}
|
||||
|
||||
pub struct ActivityIndicator {
|
||||
statuses: Vec<LspStatus>,
|
||||
statuses: Vec<ServerStatus>,
|
||||
project: Entity<Project>,
|
||||
auto_updater: Option<Entity<AutoUpdater>>,
|
||||
context_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
}
|
||||
|
||||
struct LspStatus {
|
||||
name: LanguageServerName,
|
||||
status: LanguageServerBinaryStatus,
|
||||
struct ServerStatus {
|
||||
name: SharedString,
|
||||
status: BinaryStatus,
|
||||
}
|
||||
|
||||
struct PendingWork<'a> {
|
||||
@@ -64,11 +63,24 @@ impl ActivityIndicator {
|
||||
let auto_updater = AutoUpdater::get(cx);
|
||||
let this = cx.new(|cx| {
|
||||
let mut status_events = languages.language_server_binary_statuses();
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
while let Some((name, status)) = status_events.next().await {
|
||||
this.update(&mut cx, |this: &mut ActivityIndicator, cx| {
|
||||
this.update(cx, |this: &mut ActivityIndicator, cx| {
|
||||
this.statuses.retain(|s| s.name != name);
|
||||
this.statuses.push(LspStatus { name, status });
|
||||
this.statuses.push(ServerStatus { name, status });
|
||||
cx.notify();
|
||||
})?;
|
||||
}
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach();
|
||||
|
||||
let mut status_events = languages.dap_server_binary_statuses();
|
||||
cx.spawn(async move |this, cx| {
|
||||
while let Some((name, status)) = status_events.next().await {
|
||||
this.update(cx, |this, cx| {
|
||||
this.statuses.retain(|s| s.name != name);
|
||||
this.statuses.push(ServerStatus { name, status });
|
||||
cx.notify();
|
||||
})?;
|
||||
}
|
||||
@@ -106,25 +118,25 @@ impl ActivityIndicator {
|
||||
});
|
||||
|
||||
cx.subscribe_in(&this, window, move |_, _, event, window, cx| match event {
|
||||
Event::ShowError { lsp_name, error } => {
|
||||
Event::ShowError { server_name, error } => {
|
||||
let create_buffer = project.update(cx, |project, cx| project.create_buffer(cx));
|
||||
let project = project.clone();
|
||||
let error = error.clone();
|
||||
let lsp_name = lsp_name.clone();
|
||||
cx.spawn_in(window, |workspace, mut cx| async move {
|
||||
let server_name = server_name.clone();
|
||||
cx.spawn_in(window, async move |workspace, cx| {
|
||||
let buffer = create_buffer.await?;
|
||||
buffer.update(&mut cx, |buffer, cx| {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(
|
||||
[(
|
||||
0..0,
|
||||
format!("Language server error: {}\n\n{}", lsp_name, error),
|
||||
format!("Language server error: {}\n\n{}", server_name, error),
|
||||
)],
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
buffer.set_capability(language::Capability::ReadOnly, cx);
|
||||
})?;
|
||||
workspace.update_in(&mut cx, |workspace, window, cx| {
|
||||
workspace.update_in(cx, |workspace, window, cx| {
|
||||
workspace.add_item_to_active_pane(
|
||||
Box::new(cx.new(|cx| {
|
||||
Editor::for_buffer(buffer, Some(project.clone()), window, cx)
|
||||
@@ -147,9 +159,9 @@ impl ActivityIndicator {
|
||||
|
||||
fn show_error_message(&mut self, _: &ShowErrorMessage, _: &mut Window, cx: &mut Context<Self>) {
|
||||
self.statuses.retain(|status| {
|
||||
if let LanguageServerBinaryStatus::Failed { error } = &status.status {
|
||||
if let BinaryStatus::Failed { error } = &status.status {
|
||||
cx.emit(Event::ShowError {
|
||||
lsp_name: status.name.clone(),
|
||||
server_name: status.name.clone(),
|
||||
error: error.clone(),
|
||||
});
|
||||
false
|
||||
@@ -278,12 +290,10 @@ impl ActivityIndicator {
|
||||
let mut failed = SmallVec::<[_; 3]>::new();
|
||||
for status in &self.statuses {
|
||||
match status.status {
|
||||
LanguageServerBinaryStatus::CheckingForUpdate => {
|
||||
checking_for_update.push(status.name.clone())
|
||||
}
|
||||
LanguageServerBinaryStatus::Downloading => downloading.push(status.name.clone()),
|
||||
LanguageServerBinaryStatus::Failed { .. } => failed.push(status.name.clone()),
|
||||
LanguageServerBinaryStatus::None => {}
|
||||
BinaryStatus::CheckingForUpdate => checking_for_update.push(status.name.clone()),
|
||||
BinaryStatus::Downloading => downloading.push(status.name.clone()),
|
||||
BinaryStatus::Failed { .. } => failed.push(status.name.clone()),
|
||||
BinaryStatus::None => {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -296,7 +306,7 @@ impl ActivityIndicator {
|
||||
),
|
||||
message: format!(
|
||||
"Downloading {}...",
|
||||
downloading.iter().map(|name| name.0.as_ref()).fold(
|
||||
downloading.iter().map(|name| name.as_ref()).fold(
|
||||
String::new(),
|
||||
|mut acc, s| {
|
||||
if !acc.is_empty() {
|
||||
@@ -324,7 +334,7 @@ impl ActivityIndicator {
|
||||
),
|
||||
message: format!(
|
||||
"Checking for updates to {}...",
|
||||
checking_for_update.iter().map(|name| name.0.as_ref()).fold(
|
||||
checking_for_update.iter().map(|name| name.as_ref()).fold(
|
||||
String::new(),
|
||||
|mut acc, s| {
|
||||
if !acc.is_empty() {
|
||||
@@ -354,7 +364,7 @@ impl ActivityIndicator {
|
||||
"Failed to run {}. Click to show error.",
|
||||
failed
|
||||
.iter()
|
||||
.map(|name| name.0.as_ref())
|
||||
.map(|name| name.as_ref())
|
||||
.fold(String::new(), |mut acc, s| {
|
||||
if !acc.is_empty() {
|
||||
acc.push_str(", ");
|
||||
|
||||
@@ -553,7 +553,7 @@ pub struct Metadata {
|
||||
pub user_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize, Deserialize, Default)]
|
||||
pub struct Usage {
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub input_tokens: Option<u32>,
|
||||
|
||||
@@ -34,9 +34,9 @@ impl AskPassDelegate {
|
||||
password_prompt: impl Fn(String, oneshot::Sender<String>, &mut AsyncApp) + Send + Sync + 'static,
|
||||
) -> Self {
|
||||
let (tx, mut rx) = mpsc::unbounded::<(String, oneshot::Sender<String>)>();
|
||||
let task = cx.spawn(|mut cx| async move {
|
||||
let task = cx.spawn(async move |cx: &mut AsyncApp| {
|
||||
while let Some((prompt, channel)) = rx.next().await {
|
||||
password_prompt(prompt, channel, &mut cx);
|
||||
password_prompt(prompt, channel, cx);
|
||||
}
|
||||
});
|
||||
Self { tx, _task: task }
|
||||
|
||||
@@ -98,9 +98,9 @@ pub fn init(
|
||||
AssistantSettings::register(cx);
|
||||
SlashCommandSettings::register(cx);
|
||||
|
||||
cx.spawn(|mut cx| {
|
||||
cx.spawn({
|
||||
let client = client.clone();
|
||||
async move {
|
||||
async move |cx| {
|
||||
let is_search_slash_command_enabled = cx
|
||||
.update(|cx| cx.wait_for_flag::<SearchSlashCommandFeatureFlag>())?
|
||||
.await;
|
||||
@@ -116,7 +116,7 @@ pub fn init(
|
||||
let semantic_index = SemanticDb::new(
|
||||
paths::embeddings_dir().join("semantic-index-db.0.mdb"),
|
||||
Arc::new(embedding_provider),
|
||||
&mut cx,
|
||||
cx,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -186,8 +186,12 @@ fn init_language_model_settings(cx: &mut App) {
|
||||
|
||||
fn update_active_language_model_from_settings(cx: &mut App) {
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
let provider_name = LanguageModelProviderId::from(settings.default_model.provider.clone());
|
||||
let model_id = LanguageModelId::from(settings.default_model.model.clone());
|
||||
let active_model_provider_name =
|
||||
LanguageModelProviderId::from(settings.default_model.provider.clone());
|
||||
let active_model_id = LanguageModelId::from(settings.default_model.model.clone());
|
||||
let editor_provider_name =
|
||||
LanguageModelProviderId::from(settings.editor_model.provider.clone());
|
||||
let editor_model_id = LanguageModelId::from(settings.editor_model.model.clone());
|
||||
let inline_alternatives = settings
|
||||
.inline_alternatives
|
||||
.iter()
|
||||
@@ -199,7 +203,8 @@ fn update_active_language_model_from_settings(cx: &mut App) {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry.select_active_model(&provider_name, &model_id, cx);
|
||||
registry.select_active_model(&active_model_provider_name, &active_model_id, cx);
|
||||
registry.select_editor_model(&editor_provider_name, &editor_model_id, cx);
|
||||
registry.select_inline_alternative_models(inline_alternatives, cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -98,16 +98,16 @@ impl AssistantPanel {
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
cx: AsyncWindowContext,
|
||||
) -> Task<Result<Entity<Self>>> {
|
||||
cx.spawn(|mut cx| async move {
|
||||
cx.spawn(async move |cx| {
|
||||
let slash_commands = Arc::new(SlashCommandWorkingSet::default());
|
||||
let context_store = workspace
|
||||
.update(&mut cx, |workspace, cx| {
|
||||
.update(cx, |workspace, cx| {
|
||||
let project = workspace.project().clone();
|
||||
ContextStore::new(project, prompt_builder.clone(), slash_commands, cx)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
workspace.update_in(&mut cx, |workspace, window, cx| {
|
||||
workspace.update_in(cx, |workspace, window, cx| {
|
||||
// TODO: deserialize state.
|
||||
cx.new(|cx| Self::new(workspace, context_store, window, cx))
|
||||
})
|
||||
@@ -297,7 +297,8 @@ impl AssistantPanel {
|
||||
&LanguageModelRegistry::global(cx),
|
||||
window,
|
||||
|this, _, event: &language_model::Event, window, cx| match event {
|
||||
language_model::Event::ActiveModelChanged => {
|
||||
language_model::Event::ActiveModelChanged
|
||||
| language_model::Event::EditorModelChanged => {
|
||||
this.completion_provider_changed(window, cx);
|
||||
}
|
||||
language_model::Event::ProviderStateChanged => {
|
||||
@@ -356,9 +357,9 @@ impl AssistantPanel {
|
||||
) -> Task<()> {
|
||||
let mut status_rx = client.status();
|
||||
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
while let Some(status) = status_rx.next().await {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
if this.client_status.is_none()
|
||||
|| this
|
||||
.client_status
|
||||
@@ -370,7 +371,7 @@ impl AssistantPanel {
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
this.update(&mut cx, |this, _cx| this.watch_client_status = None)
|
||||
this.update(cx, |this, _cx| this.watch_client_status = None)
|
||||
.log_err();
|
||||
})
|
||||
}
|
||||
@@ -575,11 +576,11 @@ impl AssistantPanel {
|
||||
if self.authenticate_provider_task.is_none() {
|
||||
self.authenticate_provider_task = Some((
|
||||
provider.id(),
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
if let Some(future) = load_credentials {
|
||||
let _ = future.await;
|
||||
}
|
||||
this.update(&mut cx, |this, _cx| {
|
||||
this.update(cx, |this, _cx| {
|
||||
this.authenticate_provider_task = None;
|
||||
})
|
||||
.log_err();
|
||||
@@ -640,9 +641,9 @@ impl AssistantPanel {
|
||||
}
|
||||
} else {
|
||||
let assistant_panel = assistant_panel.downgrade();
|
||||
cx.spawn_in(window, |workspace, mut cx| async move {
|
||||
cx.spawn_in(window, async move |workspace, cx| {
|
||||
let Some(task) =
|
||||
assistant_panel.update(&mut cx, |assistant, cx| assistant.authenticate(cx))?
|
||||
assistant_panel.update(cx, |assistant, cx| assistant.authenticate(cx))?
|
||||
else {
|
||||
let answer = cx
|
||||
.prompt(
|
||||
@@ -664,7 +665,7 @@ impl AssistantPanel {
|
||||
return Ok(());
|
||||
};
|
||||
task.await?;
|
||||
if assistant_panel.update(&mut cx, |panel, cx| panel.is_authenticated(cx))? {
|
||||
if assistant_panel.update(cx, |panel, cx| panel.is_authenticated(cx))? {
|
||||
cx.update(|window, cx| match inline_assist_target {
|
||||
InlineAssistTarget::Editor(active_editor, include_context) => {
|
||||
let assistant_panel = if include_context {
|
||||
@@ -697,7 +698,7 @@ impl AssistantPanel {
|
||||
}
|
||||
})?
|
||||
} else {
|
||||
workspace.update_in(&mut cx, |workspace, window, cx| {
|
||||
workspace.update_in(cx, |workspace, window, cx| {
|
||||
workspace.focus_panel::<AssistantPanel>(window, cx)
|
||||
})?;
|
||||
}
|
||||
@@ -790,10 +791,10 @@ impl AssistantPanel {
|
||||
.context_store
|
||||
.update(cx, |store, cx| store.create_remote_context(cx));
|
||||
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let context = task.await?;
|
||||
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
let workspace = this.workspace.clone();
|
||||
let project = this.project.clone();
|
||||
let lsp_adapter_delegate =
|
||||
@@ -846,9 +847,9 @@ impl AssistantPanel {
|
||||
|
||||
self.show_context(editor.clone(), window, cx);
|
||||
let workspace = self.workspace.clone();
|
||||
cx.spawn_in(window, move |_, mut cx| async move {
|
||||
cx.spawn_in(window, async move |_, cx| {
|
||||
workspace
|
||||
.update_in(&mut cx, |workspace, window, cx| {
|
||||
.update_in(cx, |workspace, window, cx| {
|
||||
workspace.focus_panel::<AssistantPanel>(window, cx);
|
||||
})
|
||||
.ok();
|
||||
@@ -1068,8 +1069,8 @@ impl AssistantPanel {
|
||||
.filter(|editor| editor.read(cx).context().read(cx).path() == Some(&path))
|
||||
});
|
||||
if let Some(existing_context) = existing_context {
|
||||
return cx.spawn_in(window, |this, mut cx| async move {
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
return cx.spawn_in(window, async move |this, cx| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.show_context(existing_context, window, cx)
|
||||
})
|
||||
});
|
||||
@@ -1084,9 +1085,9 @@ impl AssistantPanel {
|
||||
|
||||
let lsp_adapter_delegate = make_lsp_adapter_delegate(&project, cx).log_err().flatten();
|
||||
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let context = context.await?;
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
let editor = cx.new(|cx| {
|
||||
ContextEditor::for_context(
|
||||
context,
|
||||
@@ -1116,8 +1117,8 @@ impl AssistantPanel {
|
||||
.filter(|editor| *editor.read(cx).context().read(cx).id() == id)
|
||||
});
|
||||
if let Some(existing_context) = existing_context {
|
||||
return cx.spawn_in(window, |this, mut cx| async move {
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
return cx.spawn_in(window, async move |this, cx| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.show_context(existing_context.clone(), window, cx)
|
||||
})?;
|
||||
Ok(existing_context)
|
||||
@@ -1133,9 +1134,9 @@ impl AssistantPanel {
|
||||
.log_err()
|
||||
.flatten();
|
||||
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let context = context.await?;
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
let editor = cx.new(|cx| {
|
||||
ContextEditor::for_context(
|
||||
context,
|
||||
|
||||
@@ -386,7 +386,6 @@ impl InlineAssistant {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn suggest_assist(
|
||||
&mut self,
|
||||
editor: &Entity<Editor>,
|
||||
@@ -1247,7 +1246,7 @@ impl InlineAssistant {
|
||||
});
|
||||
|
||||
enum DeletedLines {}
|
||||
let mut editor = Editor::for_multibuffer(multi_buffer, None, true, window, cx);
|
||||
let mut editor = Editor::for_multibuffer(multi_buffer, None, window, cx);
|
||||
editor.set_soft_wrap_mode(language::language_settings::SoftWrap::None, cx);
|
||||
editor.set_show_wrap_guides(false, cx);
|
||||
editor.set_show_gutter(false, cx);
|
||||
@@ -1312,9 +1311,9 @@ impl EditorInlineAssists {
|
||||
assist_ids: Vec::new(),
|
||||
scroll_lock: None,
|
||||
highlight_updates: highlight_updates_tx,
|
||||
_update_highlights: cx.spawn(|cx| {
|
||||
_update_highlights: cx.spawn({
|
||||
let editor = editor.downgrade();
|
||||
async move {
|
||||
async move |cx| {
|
||||
while let Ok(()) = highlight_updates_rx.changed().await {
|
||||
let editor = editor.upgrade().context("editor was dropped")?;
|
||||
cx.update_global(|assistant: &mut InlineAssistant, cx| {
|
||||
@@ -1674,7 +1673,6 @@ impl Focusable for PromptEditor {
|
||||
impl PromptEditor {
|
||||
const MAX_LINES: u8 = 8;
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn new(
|
||||
id: InlineAssistId,
|
||||
gutter_dimensions: Arc<Mutex<GutterDimensions>>,
|
||||
@@ -1695,7 +1693,6 @@ impl PromptEditor {
|
||||
},
|
||||
prompt_buffer,
|
||||
None,
|
||||
false,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
@@ -1853,7 +1850,7 @@ impl PromptEditor {
|
||||
|
||||
fn count_tokens(&mut self, cx: &mut Context<Self>) {
|
||||
let assist_id = self.id;
|
||||
self.pending_token_count = cx.spawn(|this, mut cx| async move {
|
||||
self.pending_token_count = cx.spawn(async move |this, cx| {
|
||||
cx.background_executor().timer(Duration::from_secs(1)).await;
|
||||
let token_count = cx
|
||||
.update_global(|inline_assistant: &mut InlineAssistant, cx| {
|
||||
@@ -1865,7 +1862,7 @@ impl PromptEditor {
|
||||
})??
|
||||
.await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.token_counts = Some(token_count);
|
||||
cx.notify();
|
||||
})
|
||||
@@ -2333,7 +2330,6 @@ struct InlineAssist {
|
||||
}
|
||||
|
||||
impl InlineAssist {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn new(
|
||||
assist_id: InlineAssistId,
|
||||
group_id: InlineAssistGroupId,
|
||||
@@ -2886,7 +2882,7 @@ impl CodegenAlternative {
|
||||
let request = self.build_request(user_prompt, assistant_panel_context, cx)?;
|
||||
self.request = Some(request.clone());
|
||||
|
||||
cx.spawn(|_, cx| async move { model.stream_completion_text(request, &cx).await })
|
||||
cx.spawn(async move |_, cx| model.stream_completion_text(request, &cx).await)
|
||||
.boxed_local()
|
||||
};
|
||||
self.handle_stream(telemetry_id, provider_id.to_string(), api_key, stream, cx);
|
||||
@@ -3003,213 +2999,207 @@ impl CodegenAlternative {
|
||||
let completion = Arc::new(Mutex::new(String::new()));
|
||||
let completion_clone = completion.clone();
|
||||
|
||||
self.generation = cx.spawn(|codegen, mut cx| {
|
||||
async move {
|
||||
let stream = stream.await;
|
||||
let message_id = stream
|
||||
.as_ref()
|
||||
.ok()
|
||||
.and_then(|stream| stream.message_id.clone());
|
||||
let generate = async {
|
||||
let (mut diff_tx, mut diff_rx) = mpsc::channel(1);
|
||||
let executor = cx.background_executor().clone();
|
||||
let message_id = message_id.clone();
|
||||
let line_based_stream_diff: Task<anyhow::Result<()>> =
|
||||
cx.background_spawn(async move {
|
||||
let mut response_latency = None;
|
||||
let request_start = Instant::now();
|
||||
let diff = async {
|
||||
let chunks = StripInvalidSpans::new(stream?.stream);
|
||||
futures::pin_mut!(chunks);
|
||||
let mut diff = StreamingDiff::new(selected_text.to_string());
|
||||
let mut line_diff = LineDiff::default();
|
||||
self.generation = cx.spawn(async move |codegen, cx| {
|
||||
let stream = stream.await;
|
||||
let message_id = stream
|
||||
.as_ref()
|
||||
.ok()
|
||||
.and_then(|stream| stream.message_id.clone());
|
||||
let generate = async {
|
||||
let (mut diff_tx, mut diff_rx) = mpsc::channel(1);
|
||||
let executor = cx.background_executor().clone();
|
||||
let message_id = message_id.clone();
|
||||
let line_based_stream_diff: Task<anyhow::Result<()>> =
|
||||
cx.background_spawn(async move {
|
||||
let mut response_latency = None;
|
||||
let request_start = Instant::now();
|
||||
let diff = async {
|
||||
let chunks = StripInvalidSpans::new(stream?.stream);
|
||||
futures::pin_mut!(chunks);
|
||||
let mut diff = StreamingDiff::new(selected_text.to_string());
|
||||
let mut line_diff = LineDiff::default();
|
||||
|
||||
let mut new_text = String::new();
|
||||
let mut base_indent = None;
|
||||
let mut line_indent = None;
|
||||
let mut first_line = true;
|
||||
let mut new_text = String::new();
|
||||
let mut base_indent = None;
|
||||
let mut line_indent = None;
|
||||
let mut first_line = true;
|
||||
|
||||
while let Some(chunk) = chunks.next().await {
|
||||
if response_latency.is_none() {
|
||||
response_latency = Some(request_start.elapsed());
|
||||
}
|
||||
let chunk = chunk?;
|
||||
completion_clone.lock().push_str(&chunk);
|
||||
while let Some(chunk) = chunks.next().await {
|
||||
if response_latency.is_none() {
|
||||
response_latency = Some(request_start.elapsed());
|
||||
}
|
||||
let chunk = chunk?;
|
||||
completion_clone.lock().push_str(&chunk);
|
||||
|
||||
let mut lines = chunk.split('\n').peekable();
|
||||
while let Some(line) = lines.next() {
|
||||
new_text.push_str(line);
|
||||
if line_indent.is_none() {
|
||||
if let Some(non_whitespace_ch_ix) =
|
||||
new_text.find(|ch: char| !ch.is_whitespace())
|
||||
{
|
||||
line_indent = Some(non_whitespace_ch_ix);
|
||||
base_indent = base_indent.or(line_indent);
|
||||
let mut lines = chunk.split('\n').peekable();
|
||||
while let Some(line) = lines.next() {
|
||||
new_text.push_str(line);
|
||||
if line_indent.is_none() {
|
||||
if let Some(non_whitespace_ch_ix) =
|
||||
new_text.find(|ch: char| !ch.is_whitespace())
|
||||
{
|
||||
line_indent = Some(non_whitespace_ch_ix);
|
||||
base_indent = base_indent.or(line_indent);
|
||||
|
||||
let line_indent = line_indent.unwrap();
|
||||
let base_indent = base_indent.unwrap();
|
||||
let indent_delta =
|
||||
line_indent as i32 - base_indent as i32;
|
||||
let mut corrected_indent_len = cmp::max(
|
||||
0,
|
||||
suggested_line_indent.len as i32 + indent_delta,
|
||||
)
|
||||
as usize;
|
||||
if first_line {
|
||||
corrected_indent_len = corrected_indent_len
|
||||
.saturating_sub(
|
||||
selection_start.column as usize,
|
||||
);
|
||||
}
|
||||
|
||||
let indent_char = suggested_line_indent.char();
|
||||
let mut indent_buffer = [0; 4];
|
||||
let indent_str =
|
||||
indent_char.encode_utf8(&mut indent_buffer);
|
||||
new_text.replace_range(
|
||||
..line_indent,
|
||||
&indent_str.repeat(corrected_indent_len),
|
||||
);
|
||||
let line_indent = line_indent.unwrap();
|
||||
let base_indent = base_indent.unwrap();
|
||||
let indent_delta =
|
||||
line_indent as i32 - base_indent as i32;
|
||||
let mut corrected_indent_len = cmp::max(
|
||||
0,
|
||||
suggested_line_indent.len as i32 + indent_delta,
|
||||
)
|
||||
as usize;
|
||||
if first_line {
|
||||
corrected_indent_len = corrected_indent_len
|
||||
.saturating_sub(
|
||||
selection_start.column as usize,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if line_indent.is_some() {
|
||||
let char_ops = diff.push_new(&new_text);
|
||||
line_diff
|
||||
.push_char_operations(&char_ops, &selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
let indent_char = suggested_line_indent.char();
|
||||
let mut indent_buffer = [0; 4];
|
||||
let indent_str =
|
||||
indent_char.encode_utf8(&mut indent_buffer);
|
||||
new_text.replace_range(
|
||||
..line_indent,
|
||||
&indent_str.repeat(corrected_indent_len),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if line_indent.is_some() {
|
||||
let char_ops = diff.push_new(&new_text);
|
||||
line_diff.push_char_operations(&char_ops, &selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
new_text.clear();
|
||||
}
|
||||
|
||||
if lines.peek().is_some() {
|
||||
let char_ops = diff.push_new("\n");
|
||||
line_diff.push_char_operations(&char_ops, &selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
if line_indent.is_none() {
|
||||
// Don't write out the leading indentation in empty lines on the next line
|
||||
// This is the case where the above if statement didn't clear the buffer
|
||||
new_text.clear();
|
||||
}
|
||||
|
||||
if lines.peek().is_some() {
|
||||
let char_ops = diff.push_new("\n");
|
||||
line_diff
|
||||
.push_char_operations(&char_ops, &selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
if line_indent.is_none() {
|
||||
// Don't write out the leading indentation in empty lines on the next line
|
||||
// This is the case where the above if statement didn't clear the buffer
|
||||
new_text.clear();
|
||||
}
|
||||
line_indent = None;
|
||||
first_line = false;
|
||||
}
|
||||
line_indent = None;
|
||||
first_line = false;
|
||||
}
|
||||
}
|
||||
|
||||
let mut char_ops = diff.push_new(&new_text);
|
||||
char_ops.extend(diff.finish());
|
||||
line_diff.push_char_operations(&char_ops, &selected_text);
|
||||
line_diff.finish(&selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
|
||||
anyhow::Ok(())
|
||||
};
|
||||
|
||||
let result = diff.await;
|
||||
|
||||
let error_message =
|
||||
result.as_ref().err().map(|error| error.to_string());
|
||||
report_assistant_event(
|
||||
AssistantEvent {
|
||||
conversation_id: None,
|
||||
message_id,
|
||||
kind: AssistantKind::Inline,
|
||||
phase: AssistantPhase::Response,
|
||||
model: model_telemetry_id,
|
||||
model_provider: model_provider_id.to_string(),
|
||||
response_latency,
|
||||
error_message,
|
||||
language_name: language_name.map(|name| name.to_proto()),
|
||||
},
|
||||
telemetry,
|
||||
http_client,
|
||||
model_api_key,
|
||||
&executor,
|
||||
);
|
||||
|
||||
result?;
|
||||
Ok(())
|
||||
});
|
||||
|
||||
while let Some((char_ops, line_ops)) = diff_rx.next().await {
|
||||
codegen.update(&mut cx, |codegen, cx| {
|
||||
codegen.last_equal_ranges.clear();
|
||||
|
||||
let edits = char_ops
|
||||
.into_iter()
|
||||
.filter_map(|operation| match operation {
|
||||
CharOperation::Insert { text } => {
|
||||
let edit_start = snapshot.anchor_after(edit_start);
|
||||
Some((edit_start..edit_start, text))
|
||||
}
|
||||
CharOperation::Delete { bytes } => {
|
||||
let edit_end = edit_start + bytes;
|
||||
let edit_range = snapshot.anchor_after(edit_start)
|
||||
..snapshot.anchor_before(edit_end);
|
||||
edit_start = edit_end;
|
||||
Some((edit_range, String::new()))
|
||||
}
|
||||
CharOperation::Keep { bytes } => {
|
||||
let edit_end = edit_start + bytes;
|
||||
let edit_range = snapshot.anchor_after(edit_start)
|
||||
..snapshot.anchor_before(edit_end);
|
||||
edit_start = edit_end;
|
||||
codegen.last_equal_ranges.push(edit_range);
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if codegen.active {
|
||||
codegen.apply_edits(edits.iter().cloned(), cx);
|
||||
codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx);
|
||||
}
|
||||
codegen.edits.extend(edits);
|
||||
codegen.line_operations = line_ops;
|
||||
codegen.edit_position = Some(snapshot.anchor_after(edit_start));
|
||||
|
||||
cx.notify();
|
||||
})?;
|
||||
}
|
||||
let mut char_ops = diff.push_new(&new_text);
|
||||
char_ops.extend(diff.finish());
|
||||
line_diff.push_char_operations(&char_ops, &selected_text);
|
||||
line_diff.finish(&selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
|
||||
// Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer.
|
||||
// That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff.
|
||||
// It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`.
|
||||
let batch_diff_task =
|
||||
codegen.update(&mut cx, |codegen, cx| codegen.reapply_batch_diff(cx))?;
|
||||
let (line_based_stream_diff, ()) =
|
||||
join!(line_based_stream_diff, batch_diff_task);
|
||||
line_based_stream_diff?;
|
||||
anyhow::Ok(())
|
||||
};
|
||||
|
||||
anyhow::Ok(())
|
||||
};
|
||||
let result = diff.await;
|
||||
|
||||
let result = generate.await;
|
||||
let elapsed_time = start_time.elapsed().as_secs_f64();
|
||||
let error_message = result.as_ref().err().map(|error| error.to_string());
|
||||
report_assistant_event(
|
||||
AssistantEvent {
|
||||
conversation_id: None,
|
||||
message_id,
|
||||
kind: AssistantKind::Inline,
|
||||
phase: AssistantPhase::Response,
|
||||
model: model_telemetry_id,
|
||||
model_provider: model_provider_id.to_string(),
|
||||
response_latency,
|
||||
error_message,
|
||||
language_name: language_name.map(|name| name.to_proto()),
|
||||
},
|
||||
telemetry,
|
||||
http_client,
|
||||
model_api_key,
|
||||
&executor,
|
||||
);
|
||||
|
||||
codegen
|
||||
.update(&mut cx, |this, cx| {
|
||||
this.message_id = message_id;
|
||||
this.last_equal_ranges.clear();
|
||||
if let Err(error) = result {
|
||||
this.status = CodegenStatus::Error(error);
|
||||
} else {
|
||||
this.status = CodegenStatus::Done;
|
||||
result?;
|
||||
Ok(())
|
||||
});
|
||||
|
||||
while let Some((char_ops, line_ops)) = diff_rx.next().await {
|
||||
codegen.update(cx, |codegen, cx| {
|
||||
codegen.last_equal_ranges.clear();
|
||||
|
||||
let edits = char_ops
|
||||
.into_iter()
|
||||
.filter_map(|operation| match operation {
|
||||
CharOperation::Insert { text } => {
|
||||
let edit_start = snapshot.anchor_after(edit_start);
|
||||
Some((edit_start..edit_start, text))
|
||||
}
|
||||
CharOperation::Delete { bytes } => {
|
||||
let edit_end = edit_start + bytes;
|
||||
let edit_range = snapshot.anchor_after(edit_start)
|
||||
..snapshot.anchor_before(edit_end);
|
||||
edit_start = edit_end;
|
||||
Some((edit_range, String::new()))
|
||||
}
|
||||
CharOperation::Keep { bytes } => {
|
||||
let edit_end = edit_start + bytes;
|
||||
let edit_range = snapshot.anchor_after(edit_start)
|
||||
..snapshot.anchor_before(edit_end);
|
||||
edit_start = edit_end;
|
||||
codegen.last_equal_ranges.push(edit_range);
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if codegen.active {
|
||||
codegen.apply_edits(edits.iter().cloned(), cx);
|
||||
codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx);
|
||||
}
|
||||
this.elapsed_time = Some(elapsed_time);
|
||||
this.completion = Some(completion.lock().clone());
|
||||
cx.emit(CodegenEvent::Finished);
|
||||
codegen.edits.extend(edits);
|
||||
codegen.line_operations = line_ops;
|
||||
codegen.edit_position = Some(snapshot.anchor_after(edit_start));
|
||||
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
})?;
|
||||
}
|
||||
|
||||
// Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer.
|
||||
// That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff.
|
||||
// It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`.
|
||||
let batch_diff_task =
|
||||
codegen.update(cx, |codegen, cx| codegen.reapply_batch_diff(cx))?;
|
||||
let (line_based_stream_diff, ()) = join!(line_based_stream_diff, batch_diff_task);
|
||||
line_based_stream_diff?;
|
||||
|
||||
anyhow::Ok(())
|
||||
};
|
||||
|
||||
let result = generate.await;
|
||||
let elapsed_time = start_time.elapsed().as_secs_f64();
|
||||
|
||||
codegen
|
||||
.update(cx, |this, cx| {
|
||||
this.message_id = message_id;
|
||||
this.last_equal_ranges.clear();
|
||||
if let Err(error) = result {
|
||||
this.status = CodegenStatus::Error(error);
|
||||
} else {
|
||||
this.status = CodegenStatus::Done;
|
||||
}
|
||||
this.elapsed_time = Some(elapsed_time);
|
||||
this.completion = Some(completion.lock().clone());
|
||||
cx.emit(CodegenEvent::Finished);
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
@@ -3327,7 +3317,7 @@ impl CodegenAlternative {
|
||||
let new_snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
let new_range = self.range.to_point(&new_snapshot);
|
||||
|
||||
cx.spawn(|codegen, mut cx| async move {
|
||||
cx.spawn(async move |codegen, cx| {
|
||||
let (deleted_row_ranges, inserted_row_ranges) = cx
|
||||
.background_spawn(async move {
|
||||
let old_text = old_snapshot
|
||||
@@ -3377,7 +3367,7 @@ impl CodegenAlternative {
|
||||
.await;
|
||||
|
||||
codegen
|
||||
.update(&mut cx, |codegen, cx| {
|
||||
.update(cx, |codegen, cx| {
|
||||
codegen.diff.deleted_row_ranges = deleted_row_ranges;
|
||||
codegen.diff.inserted_row_ranges = inserted_row_ranges;
|
||||
cx.notify();
|
||||
@@ -3573,6 +3563,7 @@ impl CodeActionProvider for AssistantCodeActionProvider {
|
||||
title: "Fix with Assistant".into(),
|
||||
..Default::default()
|
||||
})),
|
||||
resolved: true,
|
||||
}]))
|
||||
} else {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
@@ -3590,10 +3581,10 @@ impl CodeActionProvider for AssistantCodeActionProvider {
|
||||
) -> Task<Result<ProjectTransaction>> {
|
||||
let editor = self.editor.clone();
|
||||
let workspace = self.workspace.clone();
|
||||
window.spawn(cx, |mut cx| async move {
|
||||
window.spawn(cx, async move |cx| {
|
||||
let editor = editor.upgrade().context("editor was released")?;
|
||||
let range = editor
|
||||
.update(&mut cx, |editor, cx| {
|
||||
.update(cx, |editor, cx| {
|
||||
editor.buffer().update(cx, |multibuffer, cx| {
|
||||
let buffer = buffer.read(cx);
|
||||
let multibuffer_snapshot = multibuffer.read(cx);
|
||||
@@ -3628,7 +3619,7 @@ impl CodeActionProvider for AssistantCodeActionProvider {
|
||||
})
|
||||
})?
|
||||
.context("invalid range")?;
|
||||
let assistant_panel = workspace.update(&mut cx, |workspace, cx| {
|
||||
let assistant_panel = workspace.update(cx, |workspace, cx| {
|
||||
workspace
|
||||
.panel::<AssistantPanel>(cx)
|
||||
.context("assistant panel was released")
|
||||
|
||||
@@ -702,7 +702,6 @@ impl Focusable for PromptEditor {
|
||||
impl PromptEditor {
|
||||
const MAX_LINES: u8 = 8;
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn new(
|
||||
id: TerminalInlineAssistId,
|
||||
prompt_history: VecDeque<String>,
|
||||
@@ -721,7 +720,6 @@ impl PromptEditor {
|
||||
},
|
||||
prompt_buffer,
|
||||
None,
|
||||
false,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
@@ -827,7 +825,7 @@ impl PromptEditor {
|
||||
let Some(model) = LanguageModelRegistry::read_global(cx).active_model() else {
|
||||
return;
|
||||
};
|
||||
self.pending_token_count = cx.spawn(|this, mut cx| async move {
|
||||
self.pending_token_count = cx.spawn(async move |this, cx| {
|
||||
cx.background_executor().timer(Duration::from_secs(1)).await;
|
||||
let request =
|
||||
cx.update_global(|inline_assistant: &mut TerminalInlineAssistant, cx| {
|
||||
@@ -835,7 +833,7 @@ impl PromptEditor {
|
||||
})??;
|
||||
|
||||
let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.token_count = Some(token_count);
|
||||
cx.notify();
|
||||
})
|
||||
@@ -1142,7 +1140,7 @@ impl Codegen {
|
||||
let telemetry = self.telemetry.clone();
|
||||
self.status = CodegenStatus::Pending;
|
||||
self.transaction = Some(TerminalTransaction::start(self.terminal.clone()));
|
||||
self.generation = cx.spawn(|this, mut cx| async move {
|
||||
self.generation = cx.spawn(async move |this, cx| {
|
||||
let model_telemetry_id = model.telemetry_id();
|
||||
let model_provider_id = model.provider_id();
|
||||
let response = model.stream_completion_text(prompt, &cx).await;
|
||||
@@ -1199,12 +1197,12 @@ impl Codegen {
|
||||
}
|
||||
});
|
||||
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.update(cx, |this, _| {
|
||||
this.message_id = message_id;
|
||||
})?;
|
||||
|
||||
while let Some(hunk) = hunks_rx.next().await {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
if let Some(transaction) = &mut this.transaction {
|
||||
transaction.push(hunk, cx);
|
||||
cx.notify();
|
||||
@@ -1218,7 +1216,7 @@ impl Codegen {
|
||||
|
||||
let result = generate.await;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
if let Err(error) = result {
|
||||
this.status = CodegenStatus::Error(error);
|
||||
} else {
|
||||
|
||||
@@ -21,7 +21,6 @@ test-support = [
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
assistant_context_editor.workspace = true
|
||||
assistant_scripting.workspace = true
|
||||
assistant_settings.workspace = true
|
||||
assistant_slash_command.workspace = true
|
||||
assistant_tool.workspace = true
|
||||
@@ -39,6 +38,8 @@ file_icons.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
fuzzy.workspace = true
|
||||
git.workspace = true
|
||||
git_ui.workspace = true
|
||||
gpui.workspace = true
|
||||
heed.workspace = true
|
||||
html_to_markdown.workspace = true
|
||||
@@ -60,11 +61,13 @@ prompt_library.workspace = true
|
||||
prompt_store.workspace = true
|
||||
proto.workspace = true
|
||||
rope.workspace = true
|
||||
scripting_tool.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
smol.workspace = true
|
||||
streaming_diff.workspace = true
|
||||
telemetry.workspace = true
|
||||
telemetry_events.workspace = true
|
||||
terminal.workspace = true
|
||||
terminal_view.workspace = true
|
||||
|
||||
@@ -1,39 +1,41 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use assistant_scripting::{ScriptId, ScriptState};
|
||||
use collections::{HashMap, HashSet};
|
||||
use editor::{Editor, MultiBuffer};
|
||||
use gpui::{
|
||||
list, AbsoluteLength, AnyElement, App, ClickEvent, DefiniteLength, EdgesRefinement, Empty,
|
||||
Entity, Focusable, Length, ListAlignment, ListOffset, ListState, StyleRefinement, Subscription,
|
||||
Task, TextStyleRefinement, UnderlineStyle, WeakEntity,
|
||||
};
|
||||
use language::{Buffer, LanguageRegistry};
|
||||
use language_model::{LanguageModelRegistry, LanguageModelToolUseId, Role};
|
||||
use markdown::{Markdown, MarkdownStyle};
|
||||
use settings::Settings as _;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{prelude::*, Disclosure, KeyBinding};
|
||||
use util::ResultExt as _;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::thread::{MessageId, RequestKind, Thread, ThreadError, ThreadEvent};
|
||||
use crate::thread_store::ThreadStore;
|
||||
use crate::tool_use::{ToolUse, ToolUseStatus};
|
||||
use crate::ui::ContextPill;
|
||||
use collections::HashMap;
|
||||
use editor::{Editor, MultiBuffer};
|
||||
use gpui::{
|
||||
list, percentage, AbsoluteLength, Animation, AnimationExt, AnyElement, App, ClickEvent,
|
||||
DefiniteLength, EdgesRefinement, Empty, Entity, Focusable, Length, ListAlignment, ListOffset,
|
||||
ListState, StyleRefinement, Subscription, Task, TextStyleRefinement, Transformation,
|
||||
UnderlineStyle,
|
||||
};
|
||||
use language::{Buffer, LanguageRegistry};
|
||||
use language_model::{LanguageModelRegistry, LanguageModelToolUseId, Role};
|
||||
use markdown::{Markdown, MarkdownStyle};
|
||||
use scripting_tool::{ScriptingTool, ScriptingToolInput};
|
||||
use settings::Settings as _;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use theme::ThemeSettings;
|
||||
use ui::Color;
|
||||
use ui::{prelude::*, Disclosure, KeyBinding};
|
||||
use util::ResultExt as _;
|
||||
|
||||
use crate::context_store::{refresh_context_store_text, ContextStore};
|
||||
|
||||
pub struct ActiveThread {
|
||||
workspace: WeakEntity<Workspace>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
thread_store: Entity<ThreadStore>,
|
||||
thread: Entity<Thread>,
|
||||
context_store: Entity<ContextStore>,
|
||||
save_thread_task: Option<Task<()>>,
|
||||
messages: Vec<MessageId>,
|
||||
list_state: ListState,
|
||||
rendered_messages_by_id: HashMap<MessageId, Entity<Markdown>>,
|
||||
rendered_scripting_tool_uses: HashMap<LanguageModelToolUseId, Entity<Markdown>>,
|
||||
editing_message: Option<(MessageId, EditMessageState)>,
|
||||
expanded_tool_uses: HashMap<LanguageModelToolUseId, bool>,
|
||||
expanded_scripts: HashSet<ScriptId>,
|
||||
last_error: Option<ThreadError>,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
@@ -44,10 +46,10 @@ struct EditMessageState {
|
||||
|
||||
impl ActiveThread {
|
||||
pub fn new(
|
||||
workspace: WeakEntity<Workspace>,
|
||||
thread: Entity<Thread>,
|
||||
thread_store: Entity<ThreadStore>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
context_store: Entity<ContextStore>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
@@ -57,15 +59,15 @@ impl ActiveThread {
|
||||
];
|
||||
|
||||
let mut this = Self {
|
||||
workspace,
|
||||
language_registry,
|
||||
thread_store,
|
||||
thread: thread.clone(),
|
||||
context_store,
|
||||
save_thread_task: None,
|
||||
messages: Vec::new(),
|
||||
rendered_messages_by_id: HashMap::default(),
|
||||
rendered_scripting_tool_uses: HashMap::default(),
|
||||
expanded_tool_uses: HashMap::default(),
|
||||
expanded_scripts: HashSet::default(),
|
||||
list_state: ListState::new(0, ListAlignment::Bottom, px(1024.), {
|
||||
let this = cx.entity().downgrade();
|
||||
move |ix, window: &mut Window, cx: &mut App| {
|
||||
@@ -80,6 +82,16 @@ impl ActiveThread {
|
||||
|
||||
for message in thread.read(cx).messages().cloned().collect::<Vec<_>>() {
|
||||
this.push_message(&message.id, message.text.clone(), window, cx);
|
||||
|
||||
for tool_use in thread.read(cx).scripting_tool_uses_for_message(message.id) {
|
||||
this.render_scripting_tool_use_markdown(
|
||||
tool_use.id.clone(),
|
||||
tool_use.name.as_ref(),
|
||||
tool_use.input.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
this
|
||||
@@ -104,7 +116,7 @@ impl ActiveThread {
|
||||
pub fn cancel_last_completion(&mut self, cx: &mut App) -> bool {
|
||||
self.last_error.take();
|
||||
self.thread
|
||||
.update(cx, |thread, _cx| thread.cancel_last_completion())
|
||||
.update(cx, |thread, cx| thread.cancel_last_completion(cx))
|
||||
}
|
||||
|
||||
pub fn last_error(&self) -> Option<ThreadError> {
|
||||
@@ -246,6 +258,32 @@ impl ActiveThread {
|
||||
})
|
||||
}
|
||||
|
||||
/// Renders the input of a scripting tool use to Markdown.
|
||||
///
|
||||
/// Does nothing if the tool use does not correspond to the scripting tool.
|
||||
fn render_scripting_tool_use_markdown(
|
||||
&mut self,
|
||||
tool_use_id: LanguageModelToolUseId,
|
||||
tool_name: &str,
|
||||
tool_input: serde_json::Value,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if tool_name != ScriptingTool::NAME {
|
||||
return;
|
||||
}
|
||||
|
||||
let lua_script = serde_json::from_value::<ScriptingToolInput>(tool_input)
|
||||
.map(|input| input.lua_script)
|
||||
.unwrap_or_default();
|
||||
|
||||
let lua_script =
|
||||
self.render_markdown(format!("```lua\n{lua_script}\n```").into(), window, cx);
|
||||
|
||||
self.rendered_scripting_tool_uses
|
||||
.insert(tool_use_id, lua_script);
|
||||
}
|
||||
|
||||
fn handle_thread_event(
|
||||
&mut self,
|
||||
_thread: &Entity<Thread>,
|
||||
@@ -260,6 +298,7 @@ impl ActiveThread {
|
||||
ThreadEvent::StreamedCompletion | ThreadEvent::SummaryChanged => {
|
||||
self.save_thread(cx);
|
||||
}
|
||||
ThreadEvent::DoneStreaming => {}
|
||||
ThreadEvent::StreamedAssistantText(message_id, text) => {
|
||||
if let Some(markdown) = self.rendered_messages_by_id.get_mut(&message_id) {
|
||||
markdown.update(cx, |markdown, cx| {
|
||||
@@ -303,24 +342,74 @@ impl ActiveThread {
|
||||
thread.use_pending_tools(cx);
|
||||
});
|
||||
}
|
||||
ThreadEvent::ToolFinished { .. } => {
|
||||
ThreadEvent::ToolFinished {
|
||||
pending_tool_use,
|
||||
canceled,
|
||||
..
|
||||
} => {
|
||||
let canceled = *canceled;
|
||||
if let Some(tool_use) = pending_tool_use {
|
||||
self.render_scripting_tool_use_markdown(
|
||||
tool_use.id.clone(),
|
||||
tool_use.name.as_ref(),
|
||||
tool_use.input.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
if self.thread.read(cx).all_tools_finished() {
|
||||
let pending_refresh_buffers = self.thread.update(cx, |thread, cx| {
|
||||
thread.action_log().update(cx, |action_log, _cx| {
|
||||
action_log.take_stale_buffers_in_context()
|
||||
})
|
||||
});
|
||||
|
||||
let context_update_task = if !pending_refresh_buffers.is_empty() {
|
||||
let refresh_task = refresh_context_store_text(
|
||||
self.context_store.clone(),
|
||||
&pending_refresh_buffers,
|
||||
cx,
|
||||
);
|
||||
|
||||
cx.spawn(async move |this, cx| {
|
||||
let updated_context_ids = refresh_task.await;
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
this.context_store.read_with(cx, |context_store, cx| {
|
||||
context_store
|
||||
.context()
|
||||
.iter()
|
||||
.filter(|context| {
|
||||
updated_context_ids.contains(&context.id())
|
||||
})
|
||||
.flat_map(|context| context.snapshot(cx))
|
||||
.collect()
|
||||
})
|
||||
})
|
||||
})
|
||||
} else {
|
||||
Task::ready(anyhow::Ok(Vec::new()))
|
||||
};
|
||||
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
if let Some(model) = model_registry.active_model() {
|
||||
self.thread.update(cx, |thread, cx| {
|
||||
thread.send_tool_results_to_model(model, cx);
|
||||
});
|
||||
cx.spawn(async move |this, cx| {
|
||||
let updated_context = context_update_task.await?;
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
this.thread.update(cx, |thread, cx| {
|
||||
thread.attach_tool_results(updated_context, cx);
|
||||
if !canceled {
|
||||
thread.send_to_model(model, RequestKind::Chat, cx);
|
||||
}
|
||||
});
|
||||
})
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
}
|
||||
ThreadEvent::ScriptFinished => {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
if let Some(model) = model_registry.active_model() {
|
||||
self.thread.update(cx, |thread, cx| {
|
||||
thread.send_to_model(model, RequestKind::Chat, false, cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -329,9 +418,9 @@ impl ActiveThread {
|
||||
/// Only one task to save the thread will be in flight at a time.
|
||||
fn save_thread(&mut self, cx: &mut Context<Self>) {
|
||||
let thread = self.thread.clone();
|
||||
self.save_thread_task = Some(cx.spawn(|this, mut cx| async move {
|
||||
self.save_thread_task = Some(cx.spawn(async move |this, cx| {
|
||||
let task = this
|
||||
.update(&mut cx, |this, cx| {
|
||||
.update(cx, |this, cx| {
|
||||
this.thread_store
|
||||
.update(cx, |thread_store, cx| thread_store.save_thread(&thread, cx))
|
||||
})
|
||||
@@ -358,7 +447,6 @@ impl ActiveThread {
|
||||
editor::EditorMode::AutoHeight { max_lines: 8 },
|
||||
buffer,
|
||||
None,
|
||||
false,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
@@ -411,7 +499,7 @@ impl ActiveThread {
|
||||
};
|
||||
|
||||
self.thread.update(cx, |thread, cx| {
|
||||
thread.send_to_model(model, RequestKind::Chat, false, cx)
|
||||
thread.send_to_model(model, RequestKind::Chat, cx)
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
@@ -461,14 +549,16 @@ impl ActiveThread {
|
||||
};
|
||||
|
||||
let thread = self.thread.read(cx);
|
||||
|
||||
// Get all the data we need from thread before we start using it in closures
|
||||
let checkpoint = thread.checkpoint_for_message(message_id);
|
||||
let context = thread.context_for_message(message_id);
|
||||
let tool_uses = thread.tool_uses_for_message(message_id);
|
||||
let scripting_tool_uses = thread.scripting_tool_uses_for_message(message_id);
|
||||
|
||||
// Don't render user messages that are just there for returning tool results.
|
||||
if message.role == Role::User
|
||||
&& (thread.message_has_tool_results(message_id)
|
||||
|| thread.message_has_script_output(message_id))
|
||||
|| thread.message_has_scripting_tool_results(message_id))
|
||||
{
|
||||
return Empty.into_any();
|
||||
}
|
||||
@@ -494,7 +584,7 @@ impl ActiveThread {
|
||||
.p_2p5()
|
||||
.child(edit_message_editor)
|
||||
} else {
|
||||
div().p_2p5().text_ui(cx).child(markdown.clone())
|
||||
div().text_ui(cx).child(markdown.clone())
|
||||
},
|
||||
)
|
||||
.when_some(context, |parent, context| {
|
||||
@@ -514,15 +604,16 @@ impl ActiveThread {
|
||||
let styled_message = match message.role {
|
||||
Role::User => v_flex()
|
||||
.id(("message-container", ix))
|
||||
.pt_2p5()
|
||||
.px_2p5()
|
||||
.pt_2()
|
||||
.pl_2()
|
||||
.pr_2p5()
|
||||
.child(
|
||||
v_flex()
|
||||
.bg(colors.editor_background)
|
||||
.rounded_lg()
|
||||
.border_1()
|
||||
.border_color(colors.border)
|
||||
.shadow_sm()
|
||||
.shadow_md()
|
||||
.child(
|
||||
h_flex()
|
||||
.py_1()
|
||||
@@ -613,34 +704,56 @@ impl ActiveThread {
|
||||
},
|
||||
),
|
||||
)
|
||||
.child(message_content),
|
||||
.child(div().p_2().child(message_content)),
|
||||
),
|
||||
Role::Assistant => div()
|
||||
.id(("message-container", ix))
|
||||
.child(message_content)
|
||||
.children(self.render_script(message_id, cx))
|
||||
.map(|parent| {
|
||||
if tool_uses.is_empty() {
|
||||
return parent;
|
||||
}
|
||||
|
||||
parent.child(
|
||||
v_flex().children(
|
||||
tool_uses
|
||||
.into_iter()
|
||||
.map(|tool_use| self.render_tool_use(tool_use, cx)),
|
||||
),
|
||||
Role::Assistant => {
|
||||
v_flex()
|
||||
.id(("message-container", ix))
|
||||
.child(div().py_3().px_4().child(message_content))
|
||||
.when(
|
||||
!tool_uses.is_empty() || !scripting_tool_uses.is_empty(),
|
||||
|parent| {
|
||||
parent.child(
|
||||
v_flex()
|
||||
.children(
|
||||
tool_uses
|
||||
.into_iter()
|
||||
.map(|tool_use| self.render_tool_use(tool_use, cx)),
|
||||
)
|
||||
.children(scripting_tool_uses.into_iter().map(|tool_use| {
|
||||
self.render_scripting_tool_use(tool_use, cx)
|
||||
})),
|
||||
)
|
||||
},
|
||||
)
|
||||
}),
|
||||
}
|
||||
Role::System => div().id(("message-container", ix)).py_1().px_2().child(
|
||||
v_flex()
|
||||
.bg(colors.editor_background)
|
||||
.rounded_sm()
|
||||
.child(message_content),
|
||||
.child(div().p_4().child(message_content)),
|
||||
),
|
||||
};
|
||||
|
||||
styled_message.into_any()
|
||||
v_flex()
|
||||
.when_some(checkpoint, |parent, checkpoint| {
|
||||
parent.child(
|
||||
h_flex().pl_2().child(
|
||||
Button::new("restore-checkpoint", "Restore Checkpoint")
|
||||
.icon(IconName::Undo)
|
||||
.size(ButtonSize::Compact)
|
||||
.on_click(cx.listener(move |this, _, _window, cx| {
|
||||
this.thread.update(cx, |thread, cx| {
|
||||
thread
|
||||
.restore_checkpoint(checkpoint.clone(), cx)
|
||||
.detach_and_log_err(cx);
|
||||
});
|
||||
})),
|
||||
),
|
||||
)
|
||||
})
|
||||
.child(styled_message)
|
||||
.into_any()
|
||||
}
|
||||
|
||||
fn render_tool_use(&self, tool_use: ToolUse, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
@@ -650,6 +763,184 @@ impl ActiveThread {
|
||||
.copied()
|
||||
.unwrap_or_default();
|
||||
|
||||
let lighter_border = cx.theme().colors().border.opacity(0.5);
|
||||
|
||||
div().px_4().child(
|
||||
v_flex()
|
||||
.rounded_lg()
|
||||
.border_1()
|
||||
.border_color(lighter_border)
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_between()
|
||||
.py_1()
|
||||
.pl_1()
|
||||
.pr_2()
|
||||
.bg(cx.theme().colors().editor_foreground.opacity(0.025))
|
||||
.map(|element| {
|
||||
if is_open {
|
||||
element.border_b_1().rounded_t_md()
|
||||
} else {
|
||||
element.rounded_md()
|
||||
}
|
||||
})
|
||||
.border_color(lighter_border)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(Disclosure::new("tool-use-disclosure", is_open).on_click(
|
||||
cx.listener({
|
||||
let tool_use_id = tool_use.id.clone();
|
||||
move |this, _event, _window, _cx| {
|
||||
let is_open = this
|
||||
.expanded_tool_uses
|
||||
.entry(tool_use_id.clone())
|
||||
.or_insert(false);
|
||||
|
||||
*is_open = !*is_open;
|
||||
}
|
||||
}),
|
||||
))
|
||||
.child(
|
||||
Label::new(tool_use.name)
|
||||
.size(LabelSize::Small)
|
||||
.buffer_font(cx),
|
||||
),
|
||||
)
|
||||
.child({
|
||||
let (icon_name, color, animated) = match &tool_use.status {
|
||||
ToolUseStatus::Pending => {
|
||||
(IconName::Warning, Color::Warning, false)
|
||||
}
|
||||
ToolUseStatus::Running => {
|
||||
(IconName::ArrowCircle, Color::Accent, true)
|
||||
}
|
||||
ToolUseStatus::Finished(_) => {
|
||||
(IconName::Check, Color::Success, false)
|
||||
}
|
||||
ToolUseStatus::Error(_) => (IconName::Close, Color::Error, false),
|
||||
};
|
||||
|
||||
let icon = Icon::new(icon_name).color(color).size(IconSize::Small);
|
||||
|
||||
if animated {
|
||||
icon.with_animation(
|
||||
"arrow-circle",
|
||||
Animation::new(Duration::from_secs(2)).repeat(),
|
||||
|icon, delta| {
|
||||
icon.transform(Transformation::rotate(percentage(delta)))
|
||||
},
|
||||
)
|
||||
.into_any_element()
|
||||
} else {
|
||||
icon.into_any_element()
|
||||
}
|
||||
}),
|
||||
)
|
||||
.map(|parent| {
|
||||
if !is_open {
|
||||
return parent;
|
||||
}
|
||||
|
||||
let content_container = || v_flex().py_1().gap_0p5().px_2p5();
|
||||
|
||||
parent.child(
|
||||
v_flex()
|
||||
.gap_1()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.rounded_b_lg()
|
||||
.child(
|
||||
content_container()
|
||||
.border_b_1()
|
||||
.border_color(lighter_border)
|
||||
.child(
|
||||
Label::new("Input")
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted)
|
||||
.buffer_font(cx),
|
||||
)
|
||||
.child(
|
||||
Label::new(
|
||||
serde_json::to_string_pretty(&tool_use.input)
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
.size(LabelSize::Small)
|
||||
.buffer_font(cx),
|
||||
),
|
||||
)
|
||||
.map(|container| match tool_use.status {
|
||||
ToolUseStatus::Finished(output) => container.child(
|
||||
content_container()
|
||||
.child(
|
||||
Label::new("Result")
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted)
|
||||
.buffer_font(cx),
|
||||
)
|
||||
.child(
|
||||
Label::new(output)
|
||||
.size(LabelSize::Small)
|
||||
.buffer_font(cx),
|
||||
),
|
||||
),
|
||||
ToolUseStatus::Running => container.child(
|
||||
content_container().child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.pb_1()
|
||||
.child(
|
||||
Icon::new(IconName::ArrowCircle)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Accent)
|
||||
.with_animation(
|
||||
"arrow-circle",
|
||||
Animation::new(Duration::from_secs(2))
|
||||
.repeat(),
|
||||
|icon, delta| {
|
||||
icon.transform(Transformation::rotate(
|
||||
percentage(delta),
|
||||
))
|
||||
},
|
||||
),
|
||||
)
|
||||
.child(
|
||||
Label::new("Running…")
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted)
|
||||
.buffer_font(cx),
|
||||
),
|
||||
),
|
||||
),
|
||||
ToolUseStatus::Error(err) => container.child(
|
||||
content_container()
|
||||
.child(
|
||||
Label::new("Error")
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted)
|
||||
.buffer_font(cx),
|
||||
)
|
||||
.child(
|
||||
Label::new(err).size(LabelSize::Small).buffer_font(cx),
|
||||
),
|
||||
),
|
||||
ToolUseStatus::Pending => container,
|
||||
}),
|
||||
)
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_scripting_tool_use(
|
||||
&self,
|
||||
tool_use: ToolUse,
|
||||
cx: &mut Context<Self>,
|
||||
) -> impl IntoElement {
|
||||
let is_open = self
|
||||
.expanded_tool_uses
|
||||
.get(&tool_use.id)
|
||||
.copied()
|
||||
.unwrap_or_default();
|
||||
|
||||
div().px_2p5().child(
|
||||
v_flex()
|
||||
.gap_1()
|
||||
@@ -663,8 +954,13 @@ impl ActiveThread {
|
||||
.pl_1()
|
||||
.pr_2()
|
||||
.bg(cx.theme().colors().editor_foreground.opacity(0.02))
|
||||
.when(is_open, |element| element.border_b_1().rounded_t(px(6.)))
|
||||
.when(!is_open, |element| element.rounded_md())
|
||||
.map(|element| {
|
||||
if is_open {
|
||||
element.border_b_1().rounded_t_md()
|
||||
} else {
|
||||
element.rounded_md()
|
||||
}
|
||||
})
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(
|
||||
h_flex()
|
||||
@@ -700,6 +996,9 @@ impl ActiveThread {
|
||||
return parent;
|
||||
}
|
||||
|
||||
let lua_script_markdown =
|
||||
self.rendered_scripting_tool_uses.get(&tool_use.id).cloned();
|
||||
|
||||
parent.child(
|
||||
v_flex()
|
||||
.child(
|
||||
@@ -710,10 +1009,15 @@ impl ActiveThread {
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(Label::new("Input:"))
|
||||
.child(Label::new(
|
||||
serde_json::to_string_pretty(&tool_use.input)
|
||||
.unwrap_or_default(),
|
||||
)),
|
||||
.map(|parent| {
|
||||
if let Some(markdown) = lua_script_markdown {
|
||||
parent.child(markdown)
|
||||
} else {
|
||||
parent.child(Label::new(
|
||||
"Failed to render script input to Markdown",
|
||||
))
|
||||
}
|
||||
}),
|
||||
)
|
||||
.map(|parent| match tool_use.status {
|
||||
ToolUseStatus::Finished(output) => parent.child(
|
||||
@@ -738,139 +1042,6 @@ impl ActiveThread {
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_script(&self, message_id: MessageId, cx: &mut Context<Self>) -> Option<AnyElement> {
|
||||
let script = self.thread.read(cx).script_for_message(message_id, cx)?;
|
||||
|
||||
let is_open = self.expanded_scripts.contains(&script.id);
|
||||
let colors = cx.theme().colors();
|
||||
|
||||
let element = div().px_2p5().child(
|
||||
v_flex()
|
||||
.gap_1()
|
||||
.rounded_lg()
|
||||
.border_1()
|
||||
.border_color(colors.border)
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_between()
|
||||
.py_0p5()
|
||||
.pl_1()
|
||||
.pr_2()
|
||||
.bg(colors.editor_foreground.opacity(0.02))
|
||||
.when(is_open, |element| element.border_b_1().rounded_t(px(6.)))
|
||||
.when(!is_open, |element| element.rounded_md())
|
||||
.border_color(colors.border)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(Disclosure::new("script-disclosure", is_open).on_click(
|
||||
cx.listener({
|
||||
let script_id = script.id;
|
||||
move |this, _event, _window, _cx| {
|
||||
if this.expanded_scripts.contains(&script_id) {
|
||||
this.expanded_scripts.remove(&script_id);
|
||||
} else {
|
||||
this.expanded_scripts.insert(script_id);
|
||||
}
|
||||
}
|
||||
}),
|
||||
))
|
||||
// TODO: Generate script description
|
||||
.child(Label::new("Script")),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(
|
||||
Label::new(match script.state {
|
||||
ScriptState::Generating => "Generating",
|
||||
ScriptState::Running { .. } => "Running",
|
||||
ScriptState::Succeeded { .. } => "Finished",
|
||||
ScriptState::Failed { .. } => "Error",
|
||||
})
|
||||
.size(LabelSize::XSmall)
|
||||
.buffer_font(cx),
|
||||
)
|
||||
.child(
|
||||
IconButton::new("view-source", IconName::Eye)
|
||||
.icon_color(Color::Muted)
|
||||
.disabled(matches!(script.state, ScriptState::Generating))
|
||||
.on_click(cx.listener({
|
||||
let source = script.source.clone();
|
||||
move |this, _event, window, cx| {
|
||||
this.open_script_source(source.clone(), window, cx);
|
||||
}
|
||||
})),
|
||||
),
|
||||
),
|
||||
)
|
||||
.when(is_open, |parent| {
|
||||
let stdout = script.stdout_snapshot();
|
||||
let error = script.error();
|
||||
|
||||
parent.child(
|
||||
v_flex()
|
||||
.p_2()
|
||||
.bg(colors.editor_background)
|
||||
.gap_2()
|
||||
.child(if stdout.is_empty() && error.is_none() {
|
||||
Label::new("No output yet")
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted)
|
||||
} else {
|
||||
Label::new(stdout).size(LabelSize::Small).buffer_font(cx)
|
||||
})
|
||||
.children(script.error().map(|err| {
|
||||
Label::new(err.to_string())
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Error)
|
||||
})),
|
||||
)
|
||||
}),
|
||||
);
|
||||
|
||||
Some(element.into_any())
|
||||
}
|
||||
|
||||
fn open_script_source(
|
||||
&mut self,
|
||||
source: SharedString,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<'_, ActiveThread>,
|
||||
) {
|
||||
let language_registry = self.language_registry.clone();
|
||||
let workspace = self.workspace.clone();
|
||||
let source = source.clone();
|
||||
|
||||
cx.spawn_in(window, |_, mut cx| async move {
|
||||
let lua = language_registry.language_for_name("Lua").await.log_err();
|
||||
|
||||
workspace.update_in(&mut cx, |workspace, window, cx| {
|
||||
let project = workspace.project().clone();
|
||||
|
||||
let buffer = project.update(cx, |project, cx| {
|
||||
project.create_local_buffer(&source.trim(), lua, cx)
|
||||
});
|
||||
|
||||
let buffer = cx.new(|cx| {
|
||||
MultiBuffer::singleton(buffer, cx)
|
||||
// TODO: Generate script description
|
||||
.with_title("Assistant script".into())
|
||||
});
|
||||
|
||||
let editor = cx.new(|cx| {
|
||||
let mut editor =
|
||||
Editor::for_multibuffer(buffer, Some(project), true, window, cx);
|
||||
editor.set_read_only(true);
|
||||
editor
|
||||
});
|
||||
|
||||
workspace.add_item_to_active_pane(Box::new(editor), None, true, window, cx);
|
||||
})
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for ActiveThread {
|
||||
|
||||
59
crates/assistant2/src/agent_profile.rs
Normal file
@@ -0,0 +1,59 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use collections::HashMap;
|
||||
use gpui::SharedString;
|
||||
|
||||
/// A profile for the Zed Agent that controls its behavior.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AgentProfile {
|
||||
/// The name of the profile.
|
||||
pub name: SharedString,
|
||||
pub tools: HashMap<Arc<str>, bool>,
|
||||
#[allow(dead_code)]
|
||||
pub context_servers: HashMap<Arc<str>, ContextServerPreset>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ContextServerPreset {
|
||||
#[allow(dead_code)]
|
||||
pub tools: HashMap<Arc<str>, bool>,
|
||||
}
|
||||
|
||||
impl AgentProfile {
|
||||
pub fn read_only() -> Self {
|
||||
Self {
|
||||
name: "Read-only".into(),
|
||||
tools: HashMap::from_iter([
|
||||
("diagnostics".into(), true),
|
||||
("fetch".into(), true),
|
||||
("list-directory".into(), true),
|
||||
("now".into(), true),
|
||||
("path-search".into(), true),
|
||||
("read-file".into(), true),
|
||||
("regex-search".into(), true),
|
||||
("thinking".into(), true),
|
||||
]),
|
||||
context_servers: HashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn code_writer() -> Self {
|
||||
Self {
|
||||
name: "Code Writer".into(),
|
||||
tools: HashMap::from_iter([
|
||||
("bash".into(), true),
|
||||
("delete-path".into(), true),
|
||||
("diagnostics".into(), true),
|
||||
("edit-files".into(), true),
|
||||
("fetch".into(), true),
|
||||
("list-directory".into(), true),
|
||||
("now".into(), true),
|
||||
("path-search".into(), true),
|
||||
("read-file".into(), true),
|
||||
("regex-search".into(), true),
|
||||
("thinking".into(), true),
|
||||
]),
|
||||
context_servers: HashMap::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,5 @@
|
||||
mod active_thread;
|
||||
mod agent_profile;
|
||||
mod assistant_configuration;
|
||||
mod assistant_model_selector;
|
||||
mod assistant_panel;
|
||||
@@ -16,6 +17,7 @@ mod terminal_inline_assistant;
|
||||
mod thread;
|
||||
mod thread_history;
|
||||
mod thread_store;
|
||||
mod tool_selector;
|
||||
mod tool_use;
|
||||
mod ui;
|
||||
|
||||
@@ -30,8 +32,11 @@ use gpui::{actions, App};
|
||||
use prompt_store::PromptBuilder;
|
||||
use settings::Settings as _;
|
||||
|
||||
pub use crate::active_thread::ActiveThread;
|
||||
pub use crate::assistant_panel::{AssistantPanel, ConcreteAssistantPanelDelegate};
|
||||
pub use crate::inline_assistant::InlineAssistant;
|
||||
pub use crate::thread::{Message, RequestKind, Thread, ThreadEvent};
|
||||
pub use crate::thread_store::ThreadStore;
|
||||
|
||||
actions!(
|
||||
assistant2,
|
||||
@@ -52,7 +57,8 @@ actions!(
|
||||
FocusLeft,
|
||||
FocusRight,
|
||||
RemoveFocusedContext,
|
||||
AcceptSuggestedContext
|
||||
AcceptSuggestedContext,
|
||||
OpenActiveThreadAsMarkdown
|
||||
]
|
||||
);
|
||||
|
||||
|
||||
@@ -1,19 +1,33 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use assistant_tool::{ToolSource, ToolWorkingSet};
|
||||
use collections::HashMap;
|
||||
use gpui::{Action, AnyView, App, EventEmitter, FocusHandle, Focusable, Subscription};
|
||||
use context_server::manager::ContextServerManager;
|
||||
use gpui::{Action, AnyView, App, Entity, EventEmitter, FocusHandle, Focusable, Subscription};
|
||||
use language_model::{LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry};
|
||||
use ui::{prelude::*, Divider, DividerColor, ElevationIndex};
|
||||
use ui::{
|
||||
prelude::*, Disclosure, Divider, DividerColor, ElevationIndex, Indicator, Switch, Tooltip,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
use zed_actions::assistant::DeployPromptLibrary;
|
||||
use zed_actions::ExtensionCategoryFilter;
|
||||
|
||||
pub struct AssistantConfiguration {
|
||||
focus_handle: FocusHandle,
|
||||
configuration_views_by_provider: HashMap<LanguageModelProviderId, AnyView>,
|
||||
context_server_manager: Entity<ContextServerManager>,
|
||||
expanded_context_server_tools: HashMap<Arc<str>, bool>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
_registry_subscription: Subscription,
|
||||
}
|
||||
|
||||
impl AssistantConfiguration {
|
||||
pub fn new(window: &mut Window, cx: &mut Context<Self>) -> Self {
|
||||
pub fn new(
|
||||
context_server_manager: Entity<ContextServerManager>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let focus_handle = cx.focus_handle();
|
||||
|
||||
let registry_subscription = cx.subscribe_in(
|
||||
@@ -36,6 +50,9 @@ impl AssistantConfiguration {
|
||||
let mut this = Self {
|
||||
focus_handle,
|
||||
configuration_views_by_provider: HashMap::default(),
|
||||
context_server_manager,
|
||||
expanded_context_server_tools: HashMap::default(),
|
||||
tools,
|
||||
_registry_subscription: registry_subscription,
|
||||
};
|
||||
this.build_provider_configuration_views(window, cx);
|
||||
@@ -143,6 +160,185 @@ impl AssistantConfiguration {
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_context_servers_section(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let context_servers = self.context_server_manager.read(cx).all_servers().clone();
|
||||
let tools_by_source = self.tools.tools_by_source(cx);
|
||||
let empty = Vec::new();
|
||||
|
||||
const SUBHEADING: &str = "Connect to context servers via the Model Context Protocol either via Zed extensions or directly.";
|
||||
|
||||
v_flex()
|
||||
.p(DynamicSpacing::Base16.rems(cx))
|
||||
.mt_1()
|
||||
.gap_2()
|
||||
.flex_1()
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
.child(Headline::new("Context Servers (MCP)").size(HeadlineSize::Small))
|
||||
.child(Label::new(SUBHEADING).color(Color::Muted)),
|
||||
)
|
||||
.children(context_servers.into_iter().map(|context_server| {
|
||||
let is_running = context_server.client().is_some();
|
||||
let are_tools_expanded = self
|
||||
.expanded_context_server_tools
|
||||
.get(&context_server.id())
|
||||
.copied()
|
||||
.unwrap_or_default();
|
||||
|
||||
let tools = tools_by_source
|
||||
.get(&ToolSource::ContextServer {
|
||||
id: context_server.id().into(),
|
||||
})
|
||||
.unwrap_or_else(|| &empty);
|
||||
let tool_count = tools.len();
|
||||
|
||||
v_flex()
|
||||
.border_1()
|
||||
.rounded_sm()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_between()
|
||||
.px_2()
|
||||
.py_1()
|
||||
.when(are_tools_expanded, |element| {
|
||||
element
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
})
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(
|
||||
Disclosure::new("tool-list-disclosure", are_tools_expanded)
|
||||
.on_click(cx.listener({
|
||||
let context_server_id = context_server.id();
|
||||
move |this, _event, _window, _cx| {
|
||||
let is_open = this
|
||||
.expanded_context_server_tools
|
||||
.entry(context_server_id.clone())
|
||||
.or_insert(false);
|
||||
|
||||
*is_open = !*is_open;
|
||||
}
|
||||
})),
|
||||
)
|
||||
.child(Indicator::dot().color(if is_running {
|
||||
Color::Success
|
||||
} else {
|
||||
Color::Error
|
||||
}))
|
||||
.child(Label::new(context_server.id()))
|
||||
.child(
|
||||
Label::new(format!("{tool_count} tools"))
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
.child(h_flex().child(
|
||||
Switch::new("context-server-switch", is_running.into()).on_click({
|
||||
let context_server_manager =
|
||||
self.context_server_manager.clone();
|
||||
let context_server = context_server.clone();
|
||||
move |state, _window, cx| match state {
|
||||
ToggleState::Unselected | ToggleState::Indeterminate => {
|
||||
context_server_manager.update(cx, |this, cx| {
|
||||
this.stop_server(context_server.clone(), cx)
|
||||
.log_err();
|
||||
});
|
||||
}
|
||||
ToggleState::Selected => {
|
||||
cx.spawn({
|
||||
let context_server_manager =
|
||||
context_server_manager.clone();
|
||||
let context_server = context_server.clone();
|
||||
async move |cx| {
|
||||
if let Some(start_server_task) =
|
||||
context_server_manager
|
||||
.update(cx, |this, cx| {
|
||||
this.start_server(
|
||||
context_server,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.log_err()
|
||||
{
|
||||
start_server_task.await.log_err();
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
}),
|
||||
)),
|
||||
)
|
||||
.map(|parent| {
|
||||
if !are_tools_expanded {
|
||||
return parent;
|
||||
}
|
||||
|
||||
parent.child(v_flex().children(tools.into_iter().enumerate().map(
|
||||
|(ix, tool)| {
|
||||
h_flex()
|
||||
.px_2()
|
||||
.py_1()
|
||||
.when(ix < tool_count - 1, |element| {
|
||||
element
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
})
|
||||
.child(Label::new(tool.name()))
|
||||
},
|
||||
)))
|
||||
})
|
||||
}))
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_between()
|
||||
.gap_2()
|
||||
.child(
|
||||
h_flex().w_full().child(
|
||||
Button::new("add-context-server", "Add Context Server")
|
||||
.style(ButtonStyle::Filled)
|
||||
.layer(ElevationIndex::ModalSurface)
|
||||
.full_width()
|
||||
.icon(IconName::Plus)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_position(IconPosition::Start)
|
||||
.disabled(true)
|
||||
.tooltip(Tooltip::text("Not yet implemented")),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_flex().w_full().child(
|
||||
Button::new(
|
||||
"install-context-server-extensions",
|
||||
"Install Context Server Extensions",
|
||||
)
|
||||
.style(ButtonStyle::Filled)
|
||||
.layer(ElevationIndex::ModalSurface)
|
||||
.full_width()
|
||||
.icon(IconName::DatabaseZap)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_position(IconPosition::Start)
|
||||
.on_click(|_event, window, cx| {
|
||||
window.dispatch_action(
|
||||
zed_actions::Extensions {
|
||||
category_filter: Some(
|
||||
ExtensionCategoryFilter::ContextServers,
|
||||
),
|
||||
}
|
||||
.boxed_clone(),
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for AssistantConfiguration {
|
||||
@@ -182,6 +378,8 @@ impl Render for AssistantConfiguration {
|
||||
),
|
||||
)
|
||||
.child(Divider::horizontal().color(DividerColor::Border))
|
||||
.child(self.render_context_servers_section(cx))
|
||||
.child(Divider::horizontal().color(DividerColor::Border))
|
||||
.child(
|
||||
v_flex()
|
||||
.p(DynamicSpacing::Base16.rems(cx))
|
||||
|
||||
@@ -11,7 +11,7 @@ use assistant_slash_command::SlashCommandWorkingSet;
|
||||
use assistant_tool::ToolWorkingSet;
|
||||
|
||||
use client::zed_urls;
|
||||
use editor::Editor;
|
||||
use editor::{Editor, MultiBuffer};
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
prelude::*, Action, AnyElement, App, AsyncWindowContext, Corner, Entity, EventEmitter,
|
||||
@@ -38,7 +38,10 @@ use crate::message_editor::MessageEditor;
|
||||
use crate::thread::{Thread, ThreadError, ThreadId};
|
||||
use crate::thread_history::{PastContext, PastThread, ThreadHistory};
|
||||
use crate::thread_store::ThreadStore;
|
||||
use crate::{InlineAssistant, NewPromptEditor, NewThread, OpenConfiguration, OpenHistory};
|
||||
use crate::{
|
||||
InlineAssistant, NewPromptEditor, NewThread, OpenActiveThreadAsMarkdown, OpenConfiguration,
|
||||
OpenHistory,
|
||||
};
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
cx.observe_new(
|
||||
@@ -107,19 +110,16 @@ impl AssistantPanel {
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
cx: AsyncWindowContext,
|
||||
) -> Task<Result<Entity<Self>>> {
|
||||
cx.spawn(|mut cx| async move {
|
||||
cx.spawn(async move |cx| {
|
||||
let tools = Arc::new(ToolWorkingSet::default());
|
||||
log::info!("[assistant2-debug] initializing ThreadStore");
|
||||
let thread_store = workspace.update(&mut cx, |workspace, cx| {
|
||||
let thread_store = workspace.update(cx, |workspace, cx| {
|
||||
let project = workspace.project().clone();
|
||||
ThreadStore::new(project, tools.clone(), cx)
|
||||
ThreadStore::new(project, tools.clone(), prompt_builder.clone(), cx)
|
||||
})??;
|
||||
log::info!("[assistant2-debug] finished initializing ThreadStore");
|
||||
|
||||
let slash_commands = Arc::new(SlashCommandWorkingSet::default());
|
||||
log::info!("[assistant2-debug] initializing ContextStore");
|
||||
let context_store = workspace
|
||||
.update(&mut cx, |workspace, cx| {
|
||||
.update(cx, |workspace, cx| {
|
||||
let project = workspace.project().clone();
|
||||
assistant_context_editor::ContextStore::new(
|
||||
project,
|
||||
@@ -129,9 +129,8 @@ impl AssistantPanel {
|
||||
)
|
||||
})?
|
||||
.await?;
|
||||
log::info!("[assistant2-debug] finished initializing ContextStore");
|
||||
|
||||
workspace.update_in(&mut cx, |workspace, window, cx| {
|
||||
workspace.update_in(cx, |workspace, window, cx| {
|
||||
cx.new(|cx| Self::new(workspace, thread_store, context_store, window, cx))
|
||||
})
|
||||
})
|
||||
@@ -144,7 +143,6 @@ impl AssistantPanel {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
log::info!("[assistant2-debug] AssistantPanel::new");
|
||||
let thread = thread_store.update(cx, |this, cx| this.create_thread(cx));
|
||||
let fs = workspace.app_state().fs.clone();
|
||||
let project = workspace.project().clone();
|
||||
@@ -152,10 +150,14 @@ impl AssistantPanel {
|
||||
let workspace = workspace.weak_handle();
|
||||
let weak_self = cx.entity().downgrade();
|
||||
|
||||
let message_editor_context_store =
|
||||
cx.new(|_cx| crate::context_store::ContextStore::new(workspace.clone()));
|
||||
|
||||
let message_editor = cx.new(|cx| {
|
||||
MessageEditor::new(
|
||||
fs.clone(),
|
||||
workspace.clone(),
|
||||
message_editor_context_store.clone(),
|
||||
thread_store.downgrade(),
|
||||
thread.clone(),
|
||||
window,
|
||||
@@ -168,10 +170,10 @@ impl AssistantPanel {
|
||||
|
||||
let thread = cx.new(|cx| {
|
||||
ActiveThread::new(
|
||||
workspace.clone(),
|
||||
thread.clone(),
|
||||
thread_store.clone(),
|
||||
language_registry.clone(),
|
||||
message_editor_context_store.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -240,12 +242,16 @@ impl AssistantPanel {
|
||||
.update(cx, |this, cx| this.create_thread(cx));
|
||||
|
||||
self.active_view = ActiveView::Thread;
|
||||
|
||||
let message_editor_context_store =
|
||||
cx.new(|_cx| crate::context_store::ContextStore::new(self.workspace.clone()));
|
||||
|
||||
self.thread = cx.new(|cx| {
|
||||
ActiveThread::new(
|
||||
self.workspace.clone(),
|
||||
thread.clone(),
|
||||
self.thread_store.clone(),
|
||||
self.language_registry.clone(),
|
||||
message_editor_context_store.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -254,6 +260,7 @@ impl AssistantPanel {
|
||||
MessageEditor::new(
|
||||
self.fs.clone(),
|
||||
self.workspace.clone(),
|
||||
message_editor_context_store,
|
||||
self.thread_store.downgrade(),
|
||||
thread,
|
||||
window,
|
||||
@@ -337,9 +344,9 @@ impl AssistantPanel {
|
||||
|
||||
let lsp_adapter_delegate = make_lsp_adapter_delegate(&project, cx).log_err().flatten();
|
||||
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let context = context.await?;
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
let editor = cx.new(|cx| {
|
||||
ContextEditor::for_context(
|
||||
context,
|
||||
@@ -370,16 +377,18 @@ impl AssistantPanel {
|
||||
.thread_store
|
||||
.update(cx, |this, cx| this.open_thread(thread_id, cx));
|
||||
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let thread = open_thread_task.await?;
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.active_view = ActiveView::Thread;
|
||||
let message_editor_context_store =
|
||||
cx.new(|_cx| crate::context_store::ContextStore::new(this.workspace.clone()));
|
||||
this.thread = cx.new(|cx| {
|
||||
ActiveThread::new(
|
||||
this.workspace.clone(),
|
||||
thread.clone(),
|
||||
this.thread_store.clone(),
|
||||
this.language_registry.clone(),
|
||||
message_editor_context_store.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -388,6 +397,7 @@ impl AssistantPanel {
|
||||
MessageEditor::new(
|
||||
this.fs.clone(),
|
||||
this.workspace.clone(),
|
||||
message_editor_context_store,
|
||||
this.thread_store.downgrade(),
|
||||
thread,
|
||||
window,
|
||||
@@ -400,8 +410,13 @@ impl AssistantPanel {
|
||||
}
|
||||
|
||||
pub(crate) fn open_configuration(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
let context_server_manager = self.thread_store.read(cx).context_server_manager();
|
||||
let tools = self.thread_store.read(cx).tools();
|
||||
|
||||
self.active_view = ActiveView::Configuration;
|
||||
self.configuration = Some(cx.new(|cx| AssistantConfiguration::new(window, cx)));
|
||||
self.configuration = Some(
|
||||
cx.new(|cx| AssistantConfiguration::new(context_server_manager, tools, window, cx)),
|
||||
);
|
||||
|
||||
if let Some(configuration) = self.configuration.as_ref() {
|
||||
self.configuration_subscription = Some(cx.subscribe_in(
|
||||
@@ -414,6 +429,65 @@ impl AssistantPanel {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn open_active_thread_as_markdown(
|
||||
&mut self,
|
||||
_: &OpenActiveThreadAsMarkdown,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let Some(workspace) = self
|
||||
.workspace
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("workspace dropped"))
|
||||
.log_err()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
let markdown_language_task = workspace
|
||||
.read(cx)
|
||||
.app_state()
|
||||
.languages
|
||||
.language_for_name("Markdown");
|
||||
let thread = self.active_thread(cx);
|
||||
cx.spawn_in(window, async move |_this, cx| {
|
||||
let markdown_language = markdown_language_task.await?;
|
||||
|
||||
workspace.update_in(cx, |workspace, window, cx| {
|
||||
let thread = thread.read(cx);
|
||||
let markdown = thread.to_markdown()?;
|
||||
let thread_summary = thread
|
||||
.summary()
|
||||
.map(|summary| summary.to_string())
|
||||
.unwrap_or_else(|| "Thread".to_string());
|
||||
|
||||
let project = workspace.project().clone();
|
||||
let buffer = project.update(cx, |project, cx| {
|
||||
project.create_local_buffer(&markdown, Some(markdown_language), cx)
|
||||
});
|
||||
let buffer = cx.new(|cx| {
|
||||
MultiBuffer::singleton(buffer, cx).with_title(thread_summary.clone())
|
||||
});
|
||||
|
||||
workspace.add_item_to_active_pane(
|
||||
Box::new(cx.new(|cx| {
|
||||
let mut editor =
|
||||
Editor::for_multibuffer(buffer, Some(project.clone()), window, cx);
|
||||
editor.set_breadcrumb_header(thread_summary);
|
||||
editor
|
||||
})),
|
||||
None,
|
||||
true,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
fn handle_assistant_configuration_event(
|
||||
&mut self,
|
||||
_entity: &Entity<AssistantConfiguration>,
|
||||
@@ -1014,6 +1088,7 @@ impl Render for AssistantPanel {
|
||||
.on_action(cx.listener(|this, _: &OpenHistory, window, cx| {
|
||||
this.open_history(window, cx);
|
||||
}))
|
||||
.on_action(cx.listener(Self::open_active_thread_as_markdown))
|
||||
.on_action(cx.listener(Self::deploy_prompt_library))
|
||||
.child(self.render_toolbar(cx))
|
||||
.map(|parent| match self.active_view {
|
||||
|
||||
@@ -367,7 +367,7 @@ impl CodegenAlternative {
|
||||
let request = self.build_request(user_prompt, cx)?;
|
||||
self.request = Some(request.clone());
|
||||
|
||||
cx.spawn(|_, cx| async move { model.stream_completion_text(request, &cx).await })
|
||||
cx.spawn(async move |_, cx| model.stream_completion_text(request, &cx).await)
|
||||
.boxed_local()
|
||||
};
|
||||
self.handle_stream(telemetry_id, provider_id.to_string(), api_key, stream, cx);
|
||||
@@ -480,213 +480,207 @@ impl CodegenAlternative {
|
||||
let completion = Arc::new(Mutex::new(String::new()));
|
||||
let completion_clone = completion.clone();
|
||||
|
||||
self.generation = cx.spawn(|codegen, mut cx| {
|
||||
async move {
|
||||
let stream = stream.await;
|
||||
let message_id = stream
|
||||
.as_ref()
|
||||
.ok()
|
||||
.and_then(|stream| stream.message_id.clone());
|
||||
let generate = async {
|
||||
let (mut diff_tx, mut diff_rx) = mpsc::channel(1);
|
||||
let executor = cx.background_executor().clone();
|
||||
let message_id = message_id.clone();
|
||||
let line_based_stream_diff: Task<anyhow::Result<()>> =
|
||||
cx.background_spawn(async move {
|
||||
let mut response_latency = None;
|
||||
let request_start = Instant::now();
|
||||
let diff = async {
|
||||
let chunks = StripInvalidSpans::new(stream?.stream);
|
||||
futures::pin_mut!(chunks);
|
||||
let mut diff = StreamingDiff::new(selected_text.to_string());
|
||||
let mut line_diff = LineDiff::default();
|
||||
self.generation = cx.spawn(async move |codegen, cx| {
|
||||
let stream = stream.await;
|
||||
let message_id = stream
|
||||
.as_ref()
|
||||
.ok()
|
||||
.and_then(|stream| stream.message_id.clone());
|
||||
let generate = async {
|
||||
let (mut diff_tx, mut diff_rx) = mpsc::channel(1);
|
||||
let executor = cx.background_executor().clone();
|
||||
let message_id = message_id.clone();
|
||||
let line_based_stream_diff: Task<anyhow::Result<()>> =
|
||||
cx.background_spawn(async move {
|
||||
let mut response_latency = None;
|
||||
let request_start = Instant::now();
|
||||
let diff = async {
|
||||
let chunks = StripInvalidSpans::new(stream?.stream);
|
||||
futures::pin_mut!(chunks);
|
||||
let mut diff = StreamingDiff::new(selected_text.to_string());
|
||||
let mut line_diff = LineDiff::default();
|
||||
|
||||
let mut new_text = String::new();
|
||||
let mut base_indent = None;
|
||||
let mut line_indent = None;
|
||||
let mut first_line = true;
|
||||
let mut new_text = String::new();
|
||||
let mut base_indent = None;
|
||||
let mut line_indent = None;
|
||||
let mut first_line = true;
|
||||
|
||||
while let Some(chunk) = chunks.next().await {
|
||||
if response_latency.is_none() {
|
||||
response_latency = Some(request_start.elapsed());
|
||||
}
|
||||
let chunk = chunk?;
|
||||
completion_clone.lock().push_str(&chunk);
|
||||
while let Some(chunk) = chunks.next().await {
|
||||
if response_latency.is_none() {
|
||||
response_latency = Some(request_start.elapsed());
|
||||
}
|
||||
let chunk = chunk?;
|
||||
completion_clone.lock().push_str(&chunk);
|
||||
|
||||
let mut lines = chunk.split('\n').peekable();
|
||||
while let Some(line) = lines.next() {
|
||||
new_text.push_str(line);
|
||||
if line_indent.is_none() {
|
||||
if let Some(non_whitespace_ch_ix) =
|
||||
new_text.find(|ch: char| !ch.is_whitespace())
|
||||
{
|
||||
line_indent = Some(non_whitespace_ch_ix);
|
||||
base_indent = base_indent.or(line_indent);
|
||||
let mut lines = chunk.split('\n').peekable();
|
||||
while let Some(line) = lines.next() {
|
||||
new_text.push_str(line);
|
||||
if line_indent.is_none() {
|
||||
if let Some(non_whitespace_ch_ix) =
|
||||
new_text.find(|ch: char| !ch.is_whitespace())
|
||||
{
|
||||
line_indent = Some(non_whitespace_ch_ix);
|
||||
base_indent = base_indent.or(line_indent);
|
||||
|
||||
let line_indent = line_indent.unwrap();
|
||||
let base_indent = base_indent.unwrap();
|
||||
let indent_delta =
|
||||
line_indent as i32 - base_indent as i32;
|
||||
let mut corrected_indent_len = cmp::max(
|
||||
0,
|
||||
suggested_line_indent.len as i32 + indent_delta,
|
||||
)
|
||||
as usize;
|
||||
if first_line {
|
||||
corrected_indent_len = corrected_indent_len
|
||||
.saturating_sub(
|
||||
selection_start.column as usize,
|
||||
);
|
||||
}
|
||||
|
||||
let indent_char = suggested_line_indent.char();
|
||||
let mut indent_buffer = [0; 4];
|
||||
let indent_str =
|
||||
indent_char.encode_utf8(&mut indent_buffer);
|
||||
new_text.replace_range(
|
||||
..line_indent,
|
||||
&indent_str.repeat(corrected_indent_len),
|
||||
);
|
||||
let line_indent = line_indent.unwrap();
|
||||
let base_indent = base_indent.unwrap();
|
||||
let indent_delta =
|
||||
line_indent as i32 - base_indent as i32;
|
||||
let mut corrected_indent_len = cmp::max(
|
||||
0,
|
||||
suggested_line_indent.len as i32 + indent_delta,
|
||||
)
|
||||
as usize;
|
||||
if first_line {
|
||||
corrected_indent_len = corrected_indent_len
|
||||
.saturating_sub(
|
||||
selection_start.column as usize,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if line_indent.is_some() {
|
||||
let char_ops = diff.push_new(&new_text);
|
||||
line_diff
|
||||
.push_char_operations(&char_ops, &selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
let indent_char = suggested_line_indent.char();
|
||||
let mut indent_buffer = [0; 4];
|
||||
let indent_str =
|
||||
indent_char.encode_utf8(&mut indent_buffer);
|
||||
new_text.replace_range(
|
||||
..line_indent,
|
||||
&indent_str.repeat(corrected_indent_len),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
if line_indent.is_some() {
|
||||
let char_ops = diff.push_new(&new_text);
|
||||
line_diff.push_char_operations(&char_ops, &selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
new_text.clear();
|
||||
}
|
||||
|
||||
if lines.peek().is_some() {
|
||||
let char_ops = diff.push_new("\n");
|
||||
line_diff.push_char_operations(&char_ops, &selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
if line_indent.is_none() {
|
||||
// Don't write out the leading indentation in empty lines on the next line
|
||||
// This is the case where the above if statement didn't clear the buffer
|
||||
new_text.clear();
|
||||
}
|
||||
|
||||
if lines.peek().is_some() {
|
||||
let char_ops = diff.push_new("\n");
|
||||
line_diff
|
||||
.push_char_operations(&char_ops, &selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
if line_indent.is_none() {
|
||||
// Don't write out the leading indentation in empty lines on the next line
|
||||
// This is the case where the above if statement didn't clear the buffer
|
||||
new_text.clear();
|
||||
}
|
||||
line_indent = None;
|
||||
first_line = false;
|
||||
}
|
||||
line_indent = None;
|
||||
first_line = false;
|
||||
}
|
||||
}
|
||||
|
||||
let mut char_ops = diff.push_new(&new_text);
|
||||
char_ops.extend(diff.finish());
|
||||
line_diff.push_char_operations(&char_ops, &selected_text);
|
||||
line_diff.finish(&selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
|
||||
anyhow::Ok(())
|
||||
};
|
||||
|
||||
let result = diff.await;
|
||||
|
||||
let error_message =
|
||||
result.as_ref().err().map(|error| error.to_string());
|
||||
report_assistant_event(
|
||||
AssistantEvent {
|
||||
conversation_id: None,
|
||||
message_id,
|
||||
kind: AssistantKind::Inline,
|
||||
phase: AssistantPhase::Response,
|
||||
model: model_telemetry_id,
|
||||
model_provider: model_provider_id.to_string(),
|
||||
response_latency,
|
||||
error_message,
|
||||
language_name: language_name.map(|name| name.to_proto()),
|
||||
},
|
||||
telemetry,
|
||||
http_client,
|
||||
model_api_key,
|
||||
&executor,
|
||||
);
|
||||
|
||||
result?;
|
||||
Ok(())
|
||||
});
|
||||
|
||||
while let Some((char_ops, line_ops)) = diff_rx.next().await {
|
||||
codegen.update(&mut cx, |codegen, cx| {
|
||||
codegen.last_equal_ranges.clear();
|
||||
|
||||
let edits = char_ops
|
||||
.into_iter()
|
||||
.filter_map(|operation| match operation {
|
||||
CharOperation::Insert { text } => {
|
||||
let edit_start = snapshot.anchor_after(edit_start);
|
||||
Some((edit_start..edit_start, text))
|
||||
}
|
||||
CharOperation::Delete { bytes } => {
|
||||
let edit_end = edit_start + bytes;
|
||||
let edit_range = snapshot.anchor_after(edit_start)
|
||||
..snapshot.anchor_before(edit_end);
|
||||
edit_start = edit_end;
|
||||
Some((edit_range, String::new()))
|
||||
}
|
||||
CharOperation::Keep { bytes } => {
|
||||
let edit_end = edit_start + bytes;
|
||||
let edit_range = snapshot.anchor_after(edit_start)
|
||||
..snapshot.anchor_before(edit_end);
|
||||
edit_start = edit_end;
|
||||
codegen.last_equal_ranges.push(edit_range);
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if codegen.active {
|
||||
codegen.apply_edits(edits.iter().cloned(), cx);
|
||||
codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx);
|
||||
}
|
||||
codegen.edits.extend(edits);
|
||||
codegen.line_operations = line_ops;
|
||||
codegen.edit_position = Some(snapshot.anchor_after(edit_start));
|
||||
|
||||
cx.notify();
|
||||
})?;
|
||||
}
|
||||
let mut char_ops = diff.push_new(&new_text);
|
||||
char_ops.extend(diff.finish());
|
||||
line_diff.push_char_operations(&char_ops, &selected_text);
|
||||
line_diff.finish(&selected_text);
|
||||
diff_tx
|
||||
.send((char_ops, line_diff.line_operations()))
|
||||
.await?;
|
||||
|
||||
// Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer.
|
||||
// That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff.
|
||||
// It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`.
|
||||
let batch_diff_task =
|
||||
codegen.update(&mut cx, |codegen, cx| codegen.reapply_batch_diff(cx))?;
|
||||
let (line_based_stream_diff, ()) =
|
||||
join!(line_based_stream_diff, batch_diff_task);
|
||||
line_based_stream_diff?;
|
||||
anyhow::Ok(())
|
||||
};
|
||||
|
||||
anyhow::Ok(())
|
||||
};
|
||||
let result = diff.await;
|
||||
|
||||
let result = generate.await;
|
||||
let elapsed_time = start_time.elapsed().as_secs_f64();
|
||||
let error_message = result.as_ref().err().map(|error| error.to_string());
|
||||
report_assistant_event(
|
||||
AssistantEvent {
|
||||
conversation_id: None,
|
||||
message_id,
|
||||
kind: AssistantKind::Inline,
|
||||
phase: AssistantPhase::Response,
|
||||
model: model_telemetry_id,
|
||||
model_provider: model_provider_id.to_string(),
|
||||
response_latency,
|
||||
error_message,
|
||||
language_name: language_name.map(|name| name.to_proto()),
|
||||
},
|
||||
telemetry,
|
||||
http_client,
|
||||
model_api_key,
|
||||
&executor,
|
||||
);
|
||||
|
||||
codegen
|
||||
.update(&mut cx, |this, cx| {
|
||||
this.message_id = message_id;
|
||||
this.last_equal_ranges.clear();
|
||||
if let Err(error) = result {
|
||||
this.status = CodegenStatus::Error(error);
|
||||
} else {
|
||||
this.status = CodegenStatus::Done;
|
||||
result?;
|
||||
Ok(())
|
||||
});
|
||||
|
||||
while let Some((char_ops, line_ops)) = diff_rx.next().await {
|
||||
codegen.update(cx, |codegen, cx| {
|
||||
codegen.last_equal_ranges.clear();
|
||||
|
||||
let edits = char_ops
|
||||
.into_iter()
|
||||
.filter_map(|operation| match operation {
|
||||
CharOperation::Insert { text } => {
|
||||
let edit_start = snapshot.anchor_after(edit_start);
|
||||
Some((edit_start..edit_start, text))
|
||||
}
|
||||
CharOperation::Delete { bytes } => {
|
||||
let edit_end = edit_start + bytes;
|
||||
let edit_range = snapshot.anchor_after(edit_start)
|
||||
..snapshot.anchor_before(edit_end);
|
||||
edit_start = edit_end;
|
||||
Some((edit_range, String::new()))
|
||||
}
|
||||
CharOperation::Keep { bytes } => {
|
||||
let edit_end = edit_start + bytes;
|
||||
let edit_range = snapshot.anchor_after(edit_start)
|
||||
..snapshot.anchor_before(edit_end);
|
||||
edit_start = edit_end;
|
||||
codegen.last_equal_ranges.push(edit_range);
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if codegen.active {
|
||||
codegen.apply_edits(edits.iter().cloned(), cx);
|
||||
codegen.reapply_line_based_diff(line_ops.iter().cloned(), cx);
|
||||
}
|
||||
this.elapsed_time = Some(elapsed_time);
|
||||
this.completion = Some(completion.lock().clone());
|
||||
cx.emit(CodegenEvent::Finished);
|
||||
codegen.edits.extend(edits);
|
||||
codegen.line_operations = line_ops;
|
||||
codegen.edit_position = Some(snapshot.anchor_after(edit_start));
|
||||
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
})?;
|
||||
}
|
||||
|
||||
// Streaming stopped and we have the new text in the buffer, and a line-based diff applied for the whole new buffer.
|
||||
// That diff is not what a regular diff is and might look unexpected, ergo apply a regular diff.
|
||||
// It's fine to apply even if the rest of the line diffing fails, as no more hunks are coming through `diff_rx`.
|
||||
let batch_diff_task =
|
||||
codegen.update(cx, |codegen, cx| codegen.reapply_batch_diff(cx))?;
|
||||
let (line_based_stream_diff, ()) = join!(line_based_stream_diff, batch_diff_task);
|
||||
line_based_stream_diff?;
|
||||
|
||||
anyhow::Ok(())
|
||||
};
|
||||
|
||||
let result = generate.await;
|
||||
let elapsed_time = start_time.elapsed().as_secs_f64();
|
||||
|
||||
codegen
|
||||
.update(cx, |this, cx| {
|
||||
this.message_id = message_id;
|
||||
this.last_equal_ranges.clear();
|
||||
if let Err(error) = result {
|
||||
this.status = CodegenStatus::Error(error);
|
||||
} else {
|
||||
this.status = CodegenStatus::Done;
|
||||
}
|
||||
this.elapsed_time = Some(elapsed_time);
|
||||
this.completion = Some(completion.lock().clone());
|
||||
cx.emit(CodegenEvent::Finished);
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
@@ -804,7 +798,7 @@ impl CodegenAlternative {
|
||||
let new_snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
let new_range = self.range.to_point(&new_snapshot);
|
||||
|
||||
cx.spawn(|codegen, mut cx| async move {
|
||||
cx.spawn(async move |codegen, cx| {
|
||||
let (deleted_row_ranges, inserted_row_ranges) = cx
|
||||
.background_spawn(async move {
|
||||
let old_text = old_snapshot
|
||||
@@ -854,7 +848,7 @@ impl CodegenAlternative {
|
||||
.await;
|
||||
|
||||
codegen
|
||||
.update(&mut cx, |codegen, cx| {
|
||||
.update(cx, |codegen, cx| {
|
||||
codegen.diff.deleted_row_ranges = deleted_row_ranges;
|
||||
codegen.diff.inserted_row_ranges = inserted_row_ranges;
|
||||
cx.notify();
|
||||
|
||||
@@ -43,15 +43,6 @@ pub enum ContextKind {
|
||||
}
|
||||
|
||||
impl ContextKind {
|
||||
pub fn label(&self) -> &'static str {
|
||||
match self {
|
||||
ContextKind::File => "File",
|
||||
ContextKind::Directory => "Folder",
|
||||
ContextKind::FetchedUrl => "Fetch",
|
||||
ContextKind::Thread => "Thread",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn icon(&self) -> IconName {
|
||||
match self {
|
||||
ContextKind::File => IconName::File,
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
mod directory_context_picker;
|
||||
mod fetch_context_picker;
|
||||
mod file_context_picker;
|
||||
mod thread_context_picker;
|
||||
@@ -15,8 +14,6 @@ use thread_context_picker::{render_thread_context_entry, ThreadContextEntry};
|
||||
use ui::{prelude::*, ContextMenu, ContextMenuEntry, ContextMenuItem};
|
||||
use workspace::{notifications::NotifyResultExt, Workspace};
|
||||
|
||||
use crate::context::ContextKind;
|
||||
use crate::context_picker::directory_context_picker::DirectoryContextPicker;
|
||||
use crate::context_picker::fetch_context_picker::FetchContextPicker;
|
||||
use crate::context_picker::file_context_picker::FileContextPicker;
|
||||
use crate::context_picker::thread_context_picker::ThreadContextPicker;
|
||||
@@ -30,17 +27,41 @@ pub enum ConfirmBehavior {
|
||||
Close,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
enum ContextPickerMode {
|
||||
File,
|
||||
Fetch,
|
||||
Thread,
|
||||
}
|
||||
|
||||
impl ContextPickerMode {
|
||||
pub fn label(&self) -> &'static str {
|
||||
match self {
|
||||
Self::File => "File/Directory",
|
||||
Self::Fetch => "Fetch",
|
||||
Self::Thread => "Thread",
|
||||
}
|
||||
}
|
||||
|
||||
pub fn icon(&self) -> IconName {
|
||||
match self {
|
||||
Self::File => IconName::File,
|
||||
Self::Fetch => IconName::Globe,
|
||||
Self::Thread => IconName::MessageCircle,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum ContextPickerState {
|
||||
Default(Entity<ContextMenu>),
|
||||
File(Entity<FileContextPicker>),
|
||||
Directory(Entity<DirectoryContextPicker>),
|
||||
Fetch(Entity<FetchContextPicker>),
|
||||
Thread(Entity<ThreadContextPicker>),
|
||||
}
|
||||
|
||||
pub(super) struct ContextPicker {
|
||||
mode: ContextPickerMode,
|
||||
mode: ContextPickerState,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
editor: WeakEntity<Editor>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
@@ -59,7 +80,7 @@ impl ContextPicker {
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
ContextPicker {
|
||||
mode: ContextPickerMode::Default(ContextMenu::build(
|
||||
mode: ContextPickerState::Default(ContextMenu::build(
|
||||
window,
|
||||
cx,
|
||||
|menu, _window, _cx| menu,
|
||||
@@ -73,7 +94,7 @@ impl ContextPicker {
|
||||
}
|
||||
|
||||
pub fn init(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.mode = ContextPickerMode::Default(self.build_menu(window, cx));
|
||||
self.mode = ContextPickerState::Default(self.build_menu(window, cx));
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
@@ -88,13 +109,9 @@ impl ContextPicker {
|
||||
.enumerate()
|
||||
.map(|(ix, entry)| self.recent_menu_item(context_picker.clone(), ix, entry));
|
||||
|
||||
let mut context_kinds = vec![
|
||||
ContextKind::File,
|
||||
ContextKind::Directory,
|
||||
ContextKind::FetchedUrl,
|
||||
];
|
||||
let mut modes = vec![ContextPickerMode::File, ContextPickerMode::Fetch];
|
||||
if self.allow_threads() {
|
||||
context_kinds.push(ContextKind::Thread);
|
||||
modes.push(ContextPickerMode::Thread);
|
||||
}
|
||||
|
||||
let menu = menu
|
||||
@@ -112,15 +129,15 @@ impl ContextPicker {
|
||||
})
|
||||
.extend(recent_entries)
|
||||
.when(has_recent, |menu| menu.separator())
|
||||
.extend(context_kinds.into_iter().map(|kind| {
|
||||
.extend(modes.into_iter().map(|mode| {
|
||||
let context_picker = context_picker.clone();
|
||||
|
||||
ContextMenuEntry::new(kind.label())
|
||||
.icon(kind.icon())
|
||||
ContextMenuEntry::new(mode.label())
|
||||
.icon(mode.icon())
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.handler(move |window, cx| {
|
||||
context_picker.update(cx, |this, cx| this.select_kind(kind, window, cx))
|
||||
context_picker.update(cx, |this, cx| this.select_mode(mode, window, cx))
|
||||
})
|
||||
}));
|
||||
|
||||
@@ -143,12 +160,17 @@ impl ContextPicker {
|
||||
self.thread_store.is_some()
|
||||
}
|
||||
|
||||
fn select_kind(&mut self, kind: ContextKind, window: &mut Window, cx: &mut Context<Self>) {
|
||||
fn select_mode(
|
||||
&mut self,
|
||||
mode: ContextPickerMode,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let context_picker = cx.entity().downgrade();
|
||||
|
||||
match kind {
|
||||
ContextKind::File => {
|
||||
self.mode = ContextPickerMode::File(cx.new(|cx| {
|
||||
match mode {
|
||||
ContextPickerMode::File => {
|
||||
self.mode = ContextPickerState::File(cx.new(|cx| {
|
||||
FileContextPicker::new(
|
||||
context_picker.clone(),
|
||||
self.workspace.clone(),
|
||||
@@ -160,20 +182,8 @@ impl ContextPicker {
|
||||
)
|
||||
}));
|
||||
}
|
||||
ContextKind::Directory => {
|
||||
self.mode = ContextPickerMode::Directory(cx.new(|cx| {
|
||||
DirectoryContextPicker::new(
|
||||
context_picker.clone(),
|
||||
self.workspace.clone(),
|
||||
self.context_store.clone(),
|
||||
self.confirm_behavior,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}));
|
||||
}
|
||||
ContextKind::FetchedUrl => {
|
||||
self.mode = ContextPickerMode::Fetch(cx.new(|cx| {
|
||||
ContextPickerMode::Fetch => {
|
||||
self.mode = ContextPickerState::Fetch(cx.new(|cx| {
|
||||
FetchContextPicker::new(
|
||||
context_picker.clone(),
|
||||
self.workspace.clone(),
|
||||
@@ -184,9 +194,9 @@ impl ContextPicker {
|
||||
)
|
||||
}));
|
||||
}
|
||||
ContextKind::Thread => {
|
||||
ContextPickerMode::Thread => {
|
||||
if let Some(thread_store) = self.thread_store.as_ref() {
|
||||
self.mode = ContextPickerMode::Thread(cx.new(|cx| {
|
||||
self.mode = ContextPickerState::Thread(cx.new(|cx| {
|
||||
ThreadContextPicker::new(
|
||||
thread_store.clone(),
|
||||
context_picker.clone(),
|
||||
@@ -224,6 +234,7 @@ impl ContextPicker {
|
||||
ElementId::NamedInteger("ctx-recent".into(), ix),
|
||||
&path,
|
||||
&path_prefix,
|
||||
false,
|
||||
context_store.clone(),
|
||||
cx,
|
||||
)
|
||||
@@ -270,10 +281,8 @@ impl ContextPicker {
|
||||
context_store.add_file_from_path(project_path.clone(), cx)
|
||||
});
|
||||
|
||||
cx.spawn_in(window, |_, mut cx| async move {
|
||||
task.await.notify_async_err(&mut cx)
|
||||
})
|
||||
.detach();
|
||||
cx.spawn_in(window, async move |_, cx| task.await.notify_async_err(cx))
|
||||
.detach();
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
@@ -296,13 +305,13 @@ impl ContextPicker {
|
||||
};
|
||||
|
||||
let open_thread_task = thread_store.update(cx, |this, cx| this.open_thread(&thread.id, cx));
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let thread = open_thread_task.await?;
|
||||
context_store.update(&mut cx, |context_store, cx| {
|
||||
context_store.update(cx, |context_store, cx| {
|
||||
context_store.add_thread(thread, cx);
|
||||
})?;
|
||||
|
||||
this.update(&mut cx, |_this, cx| cx.notify())
|
||||
this.update(cx, |_this, cx| cx.notify())
|
||||
})
|
||||
}
|
||||
|
||||
@@ -392,11 +401,10 @@ impl EventEmitter<DismissEvent> for ContextPicker {}
|
||||
impl Focusable for ContextPicker {
|
||||
fn focus_handle(&self, cx: &App) -> FocusHandle {
|
||||
match &self.mode {
|
||||
ContextPickerMode::Default(menu) => menu.focus_handle(cx),
|
||||
ContextPickerMode::File(file_picker) => file_picker.focus_handle(cx),
|
||||
ContextPickerMode::Directory(directory_picker) => directory_picker.focus_handle(cx),
|
||||
ContextPickerMode::Fetch(fetch_picker) => fetch_picker.focus_handle(cx),
|
||||
ContextPickerMode::Thread(thread_picker) => thread_picker.focus_handle(cx),
|
||||
ContextPickerState::Default(menu) => menu.focus_handle(cx),
|
||||
ContextPickerState::File(file_picker) => file_picker.focus_handle(cx),
|
||||
ContextPickerState::Fetch(fetch_picker) => fetch_picker.focus_handle(cx),
|
||||
ContextPickerState::Thread(thread_picker) => thread_picker.focus_handle(cx),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -407,13 +415,10 @@ impl Render for ContextPicker {
|
||||
.w(px(400.))
|
||||
.min_w(px(400.))
|
||||
.map(|parent| match &self.mode {
|
||||
ContextPickerMode::Default(menu) => parent.child(menu.clone()),
|
||||
ContextPickerMode::File(file_picker) => parent.child(file_picker.clone()),
|
||||
ContextPickerMode::Directory(directory_picker) => {
|
||||
parent.child(directory_picker.clone())
|
||||
}
|
||||
ContextPickerMode::Fetch(fetch_picker) => parent.child(fetch_picker.clone()),
|
||||
ContextPickerMode::Thread(thread_picker) => parent.child(thread_picker.clone()),
|
||||
ContextPickerState::Default(menu) => parent.child(menu.clone()),
|
||||
ContextPickerState::File(file_picker) => parent.child(file_picker.clone()),
|
||||
ContextPickerState::Fetch(fetch_picker) => parent.child(fetch_picker.clone()),
|
||||
ContextPickerState::Thread(thread_picker) => parent.child(thread_picker.clone()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,269 +0,0 @@
|
||||
use std::path::Path;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::Arc;
|
||||
|
||||
use fuzzy::PathMatch;
|
||||
use gpui::{App, DismissEvent, Entity, FocusHandle, Focusable, Task, WeakEntity};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use project::{PathMatchCandidateSet, ProjectPath, WorktreeId};
|
||||
use ui::{prelude::*, ListItem};
|
||||
use util::ResultExt as _;
|
||||
use workspace::{notifications::NotifyResultExt, Workspace};
|
||||
|
||||
use crate::context_picker::{ConfirmBehavior, ContextPicker};
|
||||
use crate::context_store::ContextStore;
|
||||
|
||||
pub struct DirectoryContextPicker {
|
||||
picker: Entity<Picker<DirectoryContextPickerDelegate>>,
|
||||
}
|
||||
|
||||
impl DirectoryContextPicker {
|
||||
pub fn new(
|
||||
context_picker: WeakEntity<ContextPicker>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let delegate = DirectoryContextPickerDelegate::new(
|
||||
context_picker,
|
||||
workspace,
|
||||
context_store,
|
||||
confirm_behavior,
|
||||
);
|
||||
let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx));
|
||||
|
||||
Self { picker }
|
||||
}
|
||||
}
|
||||
|
||||
impl Focusable for DirectoryContextPicker {
|
||||
fn focus_handle(&self, cx: &App) -> FocusHandle {
|
||||
self.picker.focus_handle(cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for DirectoryContextPicker {
|
||||
fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
|
||||
self.picker.clone()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct DirectoryContextPickerDelegate {
|
||||
context_picker: WeakEntity<ContextPicker>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
matches: Vec<PathMatch>,
|
||||
selected_index: usize,
|
||||
}
|
||||
|
||||
impl DirectoryContextPickerDelegate {
|
||||
pub fn new(
|
||||
context_picker: WeakEntity<ContextPicker>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
) -> Self {
|
||||
Self {
|
||||
context_picker,
|
||||
workspace,
|
||||
context_store,
|
||||
confirm_behavior,
|
||||
matches: Vec::new(),
|
||||
selected_index: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn search(
|
||||
&mut self,
|
||||
query: String,
|
||||
cancellation_flag: Arc<AtomicBool>,
|
||||
workspace: &Entity<Workspace>,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Task<Vec<PathMatch>> {
|
||||
if query.is_empty() {
|
||||
let workspace = workspace.read(cx);
|
||||
let project = workspace.project().read(cx);
|
||||
let directory_matches = project.worktrees(cx).flat_map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
let path_prefix: Arc<str> = worktree.root_name().into();
|
||||
worktree.directories(false, 0).map(move |entry| PathMatch {
|
||||
score: 0.,
|
||||
positions: Vec::new(),
|
||||
worktree_id: worktree.id().to_usize(),
|
||||
path: entry.path.clone(),
|
||||
path_prefix: path_prefix.clone(),
|
||||
distance_to_relative_ancestor: 0,
|
||||
is_dir: true,
|
||||
})
|
||||
});
|
||||
|
||||
Task::ready(directory_matches.collect())
|
||||
} else {
|
||||
let worktrees = workspace.read(cx).visible_worktrees(cx).collect::<Vec<_>>();
|
||||
let candidate_sets = worktrees
|
||||
.into_iter()
|
||||
.map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
|
||||
PathMatchCandidateSet {
|
||||
snapshot: worktree.snapshot(),
|
||||
include_ignored: worktree
|
||||
.root_entry()
|
||||
.map_or(false, |entry| entry.is_ignored),
|
||||
include_root_name: true,
|
||||
candidates: project::Candidates::Directories,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let executor = cx.background_executor().clone();
|
||||
cx.foreground_executor().spawn(async move {
|
||||
fuzzy::match_path_sets(
|
||||
candidate_sets.as_slice(),
|
||||
query.as_str(),
|
||||
None,
|
||||
false,
|
||||
100,
|
||||
&cancellation_flag,
|
||||
executor,
|
||||
)
|
||||
.await
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PickerDelegate for DirectoryContextPickerDelegate {
|
||||
type ListItem = ListItem;
|
||||
|
||||
fn match_count(&self) -> usize {
|
||||
self.matches.len()
|
||||
}
|
||||
|
||||
fn selected_index(&self) -> usize {
|
||||
self.selected_index
|
||||
}
|
||||
|
||||
fn set_selected_index(
|
||||
&mut self,
|
||||
ix: usize,
|
||||
_window: &mut Window,
|
||||
_cx: &mut Context<Picker<Self>>,
|
||||
) {
|
||||
self.selected_index = ix;
|
||||
}
|
||||
|
||||
fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
|
||||
"Search folders…".into()
|
||||
}
|
||||
|
||||
fn update_matches(
|
||||
&mut self,
|
||||
query: String,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Task<()> {
|
||||
let Some(workspace) = self.workspace.upgrade() else {
|
||||
return Task::ready(());
|
||||
};
|
||||
|
||||
let search_task = self.search(query, Arc::<AtomicBool>::default(), &workspace, cx);
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let mut paths = search_task.await;
|
||||
let empty_path = Path::new("");
|
||||
paths.retain(|path_match| path_match.path.as_ref() != empty_path);
|
||||
|
||||
this.update(&mut cx, |this, _cx| {
|
||||
this.delegate.matches = paths;
|
||||
})
|
||||
.log_err();
|
||||
})
|
||||
}
|
||||
|
||||
fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
let Some(mat) = self.matches.get(self.selected_index) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let project_path = ProjectPath {
|
||||
worktree_id: WorktreeId::from_usize(mat.worktree_id),
|
||||
path: mat.path.clone(),
|
||||
};
|
||||
|
||||
let Some(task) = self
|
||||
.context_store
|
||||
.update(cx, |context_store, cx| {
|
||||
context_store.add_directory(project_path, cx)
|
||||
})
|
||||
.ok()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
let confirm_behavior = self.confirm_behavior;
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
match task.await.notify_async_err(&mut cx) {
|
||||
None => anyhow::Ok(()),
|
||||
Some(()) => this.update_in(&mut cx, |this, window, cx| match confirm_behavior {
|
||||
ConfirmBehavior::KeepOpen => {}
|
||||
ConfirmBehavior::Close => this.delegate.dismissed(window, cx),
|
||||
}),
|
||||
}
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
fn dismissed(&mut self, _window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
self.context_picker
|
||||
.update(cx, |_, cx| {
|
||||
cx.emit(DismissEvent);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
|
||||
fn render_match(
|
||||
&self,
|
||||
ix: usize,
|
||||
selected: bool,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<Self::ListItem> {
|
||||
let path_match = &self.matches[ix];
|
||||
let directory_name = path_match.path.to_string_lossy().to_string();
|
||||
|
||||
let added = self.context_store.upgrade().map_or(false, |context_store| {
|
||||
context_store
|
||||
.read(cx)
|
||||
.includes_directory(&path_match.path)
|
||||
.is_some()
|
||||
});
|
||||
|
||||
Some(
|
||||
ListItem::new(ix)
|
||||
.inset(true)
|
||||
.toggle_state(selected)
|
||||
.start_slot(
|
||||
Icon::new(IconName::Folder)
|
||||
.size(IconSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(Label::new(directory_name))
|
||||
.when(added, |el| {
|
||||
el.end_slot(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(
|
||||
Icon::new(IconName::Check)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Success),
|
||||
)
|
||||
.child(Label::new("Added").size(LabelSize::Small)),
|
||||
)
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -167,8 +167,8 @@ impl PickerDelegate for FetchContextPickerDelegate {
|
||||
}
|
||||
}
|
||||
|
||||
fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> SharedString {
|
||||
"Enter the URL that you would like to fetch".into()
|
||||
fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option<SharedString> {
|
||||
Some("Enter the URL that you would like to fetch".into())
|
||||
}
|
||||
|
||||
fn selected_index(&self) -> usize {
|
||||
@@ -206,12 +206,12 @@ impl PickerDelegate for FetchContextPickerDelegate {
|
||||
let http_client = workspace.read(cx).client().http_client().clone();
|
||||
let url = self.url.clone();
|
||||
let confirm_behavior = self.confirm_behavior;
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let text = cx
|
||||
.background_spawn(Self::build_message(http_client, url.clone()))
|
||||
.await?;
|
||||
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.delegate
|
||||
.context_store
|
||||
.update(cx, |context_store, _cx| {
|
||||
|
||||
@@ -99,7 +99,6 @@ impl FileContextPickerDelegate {
|
||||
query: String,
|
||||
cancellation_flag: Arc<AtomicBool>,
|
||||
workspace: &Entity<Workspace>,
|
||||
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Task<Vec<PathMatch>> {
|
||||
if query.is_empty() {
|
||||
@@ -124,14 +123,14 @@ impl FileContextPickerDelegate {
|
||||
let file_matches = project.worktrees(cx).flat_map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
let path_prefix: Arc<str> = worktree.root_name().into();
|
||||
worktree.files(false, 0).map(move |entry| PathMatch {
|
||||
worktree.entries(false, 0).map(move |entry| PathMatch {
|
||||
score: 0.,
|
||||
positions: Vec::new(),
|
||||
worktree_id: worktree.id().to_usize(),
|
||||
path: entry.path.clone(),
|
||||
path_prefix: path_prefix.clone(),
|
||||
distance_to_relative_ancestor: 0,
|
||||
is_dir: false,
|
||||
is_dir: entry.is_dir(),
|
||||
})
|
||||
});
|
||||
|
||||
@@ -149,7 +148,7 @@ impl FileContextPickerDelegate {
|
||||
.root_entry()
|
||||
.map_or(false, |entry| entry.is_ignored),
|
||||
include_root_name: true,
|
||||
candidates: project::Candidates::Files,
|
||||
candidates: project::Candidates::Entries,
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
@@ -192,7 +191,7 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
}
|
||||
|
||||
fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
|
||||
"Search files…".into()
|
||||
"Search files & directories…".into()
|
||||
}
|
||||
|
||||
fn update_matches(
|
||||
@@ -207,11 +206,11 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
|
||||
let search_task = self.search(query, Arc::<AtomicBool>::default(), &workspace, cx);
|
||||
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
// TODO: This should be probably be run in the background.
|
||||
let paths = search_task.await;
|
||||
|
||||
this.update(&mut cx, |this, _cx| {
|
||||
this.update(cx, |this, _cx| {
|
||||
this.delegate.matches = paths;
|
||||
})
|
||||
.log_err();
|
||||
@@ -223,13 +222,11 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
return;
|
||||
};
|
||||
|
||||
let Some(file_name) = mat
|
||||
let file_name = mat
|
||||
.path
|
||||
.file_name()
|
||||
.map(|os_str| os_str.to_string_lossy().into_owned())
|
||||
else {
|
||||
return;
|
||||
};
|
||||
.unwrap_or(mat.path_prefix.to_string());
|
||||
|
||||
let full_path = mat.path.display().to_string();
|
||||
|
||||
@@ -238,6 +235,8 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
path: mat.path.clone(),
|
||||
};
|
||||
|
||||
let is_directory = mat.is_dir;
|
||||
|
||||
let Some(editor_entity) = self.editor.upgrade() else {
|
||||
return;
|
||||
};
|
||||
@@ -288,8 +287,12 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
|
||||
editor.insert("\n", window, cx); // Needed to end the fold
|
||||
|
||||
let file_icon = FileIcons::get_icon(&Path::new(&full_path), cx)
|
||||
.unwrap_or_else(|| SharedString::new(""));
|
||||
let file_icon = if is_directory {
|
||||
FileIcons::get_folder_icon(false, cx)
|
||||
} else {
|
||||
FileIcons::get_icon(&Path::new(&full_path), cx)
|
||||
}
|
||||
.unwrap_or_else(|| SharedString::new(""));
|
||||
|
||||
let placeholder = FoldPlaceholder {
|
||||
render: render_fold_icon_button(
|
||||
@@ -330,7 +333,11 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
let Some(task) = self
|
||||
.context_store
|
||||
.update(cx, |context_store, cx| {
|
||||
context_store.add_file_from_path(project_path, cx)
|
||||
if is_directory {
|
||||
context_store.add_directory(project_path, cx)
|
||||
} else {
|
||||
context_store.add_file_from_path(project_path, cx)
|
||||
}
|
||||
})
|
||||
.ok()
|
||||
else {
|
||||
@@ -338,10 +345,10 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
};
|
||||
|
||||
let confirm_behavior = self.confirm_behavior;
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
match task.await.notify_async_err(&mut cx) {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
match task.await.notify_async_err(cx) {
|
||||
None => anyhow::Ok(()),
|
||||
Some(()) => this.update_in(&mut cx, |this, window, cx| match confirm_behavior {
|
||||
Some(()) => this.update_in(cx, |this, window, cx| match confirm_behavior {
|
||||
ConfirmBehavior::KeepOpen => {}
|
||||
ConfirmBehavior::Close => this.delegate.dismissed(window, cx),
|
||||
}),
|
||||
@@ -375,6 +382,7 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
ElementId::NamedInteger("file-ctx-picker".into(), ix),
|
||||
&path_match.path,
|
||||
&path_match.path_prefix,
|
||||
path_match.is_dir,
|
||||
self.context_store.clone(),
|
||||
cx,
|
||||
)),
|
||||
@@ -386,6 +394,7 @@ pub fn render_file_context_entry(
|
||||
id: ElementId,
|
||||
path: &Path,
|
||||
path_prefix: &Arc<str>,
|
||||
is_directory: bool,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
cx: &App,
|
||||
) -> Stateful<Div> {
|
||||
@@ -409,13 +418,24 @@ pub fn render_file_context_entry(
|
||||
(file_name, Some(directory))
|
||||
};
|
||||
|
||||
let added = context_store
|
||||
.upgrade()
|
||||
.and_then(|context_store| context_store.read(cx).will_include_file_path(path, cx));
|
||||
let added = context_store.upgrade().and_then(|context_store| {
|
||||
if is_directory {
|
||||
context_store
|
||||
.read(cx)
|
||||
.includes_directory(path)
|
||||
.map(FileInclusion::Direct)
|
||||
} else {
|
||||
context_store.read(cx).will_include_file_path(path, cx)
|
||||
}
|
||||
});
|
||||
|
||||
let file_icon = FileIcons::get_icon(&path, cx)
|
||||
.map(Icon::from_path)
|
||||
.unwrap_or_else(|| Icon::new(IconName::File));
|
||||
let file_icon = if is_directory {
|
||||
FileIcons::get_folder_icon(false, cx)
|
||||
} else {
|
||||
FileIcons::get_icon(&path, cx)
|
||||
}
|
||||
.map(Icon::from_path)
|
||||
.unwrap_or_else(|| Icon::new(IconName::File));
|
||||
|
||||
h_flex()
|
||||
.id(id)
|
||||
|
||||
@@ -149,9 +149,9 @@ impl PickerDelegate for ThreadContextPickerDelegate {
|
||||
}
|
||||
});
|
||||
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let matches = search_task.await;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.delegate.matches = matches;
|
||||
this.delegate.selected_index = 0;
|
||||
cx.notify();
|
||||
@@ -171,9 +171,9 @@ impl PickerDelegate for ThreadContextPickerDelegate {
|
||||
|
||||
let open_thread_task = thread_store.update(cx, |this, cx| this.open_thread(&entry.id, cx));
|
||||
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let thread = open_thread_task.await?;
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.delegate
|
||||
.context_store
|
||||
.update(cx, |context_store, cx| context_store.add_thread(thread, cx))
|
||||
@@ -223,13 +223,18 @@ pub fn render_thread_context_entry(
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.w_full()
|
||||
.justify_between()
|
||||
.child(
|
||||
Icon::new(IconName::MessageCircle)
|
||||
.size(IconSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.max_w_72()
|
||||
.child(
|
||||
Icon::new(IconName::MessageCircle)
|
||||
.size(IconSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(Label::new(thread.summary.clone()).truncate()),
|
||||
)
|
||||
.child(Label::new(thread.summary.clone()))
|
||||
.child(div().w_full())
|
||||
.when(added, |el| {
|
||||
el.child(
|
||||
h_flex()
|
||||
|
||||
@@ -9,6 +9,7 @@ use language::Buffer;
|
||||
use project::{ProjectPath, Worktree};
|
||||
use rope::Rope;
|
||||
use text::BufferId;
|
||||
use util::maybe;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::context::{
|
||||
@@ -74,15 +75,15 @@ impl ContextStore {
|
||||
return Task::ready(Err(anyhow!("failed to read project")));
|
||||
};
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let open_buffer_task = project.update(&mut cx, |project, cx| {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let open_buffer_task = project.update(cx, |project, cx| {
|
||||
project.open_buffer(project_path.clone(), cx)
|
||||
})?;
|
||||
|
||||
let buffer_entity = open_buffer_task.await?;
|
||||
let buffer_id = this.update(&mut cx, |_, cx| buffer_entity.read(cx).remote_id())?;
|
||||
let buffer_id = this.update(cx, |_, cx| buffer_entity.read(cx).remote_id())?;
|
||||
|
||||
let already_included = this.update(&mut cx, |this, _cx| {
|
||||
let already_included = this.update(cx, |this, _cx| {
|
||||
match this.will_include_buffer(buffer_id, &project_path.path) {
|
||||
Some(FileInclusion::Direct(context_id)) => {
|
||||
this.remove_context(context_id);
|
||||
@@ -97,7 +98,7 @@ impl ContextStore {
|
||||
return anyhow::Ok(());
|
||||
}
|
||||
|
||||
let (buffer_info, text_task) = this.update(&mut cx, |_, cx| {
|
||||
let (buffer_info, text_task) = this.update(cx, |_, cx| {
|
||||
let buffer = buffer_entity.read(cx);
|
||||
collect_buffer_info_and_text(
|
||||
project_path.path.clone(),
|
||||
@@ -109,7 +110,7 @@ impl ContextStore {
|
||||
|
||||
let text = text_task.await;
|
||||
|
||||
this.update(&mut cx, |this, _cx| {
|
||||
this.update(cx, |this, _cx| {
|
||||
this.insert_file(make_context_buffer(buffer_info, text));
|
||||
})?;
|
||||
|
||||
@@ -122,8 +123,8 @@ impl ContextStore {
|
||||
buffer_entity: Entity<Buffer>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let (buffer_info, text_task) = this.update(&mut cx, |_, cx| {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let (buffer_info, text_task) = this.update(cx, |_, cx| {
|
||||
let buffer = buffer_entity.read(cx);
|
||||
let Some(file) = buffer.file() else {
|
||||
return Err(anyhow!("Buffer has no path."));
|
||||
@@ -138,7 +139,7 @@ impl ContextStore {
|
||||
|
||||
let text = text_task.await;
|
||||
|
||||
this.update(&mut cx, |this, _cx| {
|
||||
this.update(cx, |this, _cx| {
|
||||
this.insert_file(make_context_buffer(buffer_info, text))
|
||||
})?;
|
||||
|
||||
@@ -178,18 +179,18 @@ impl ContextStore {
|
||||
}
|
||||
|
||||
let worktree_id = project_path.worktree_id;
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let worktree = project.update(&mut cx, |project, cx| {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let worktree = project.update(cx, |project, cx| {
|
||||
project
|
||||
.worktree_for_id(worktree_id, cx)
|
||||
.ok_or_else(|| anyhow!("no worktree found for {worktree_id:?}"))
|
||||
})??;
|
||||
|
||||
let files = worktree.update(&mut cx, |worktree, _cx| {
|
||||
let files = worktree.update(cx, |worktree, _cx| {
|
||||
collect_files_in_path(worktree, &project_path.path)
|
||||
})?;
|
||||
|
||||
let open_buffers_task = project.update(&mut cx, |project, cx| {
|
||||
let open_buffers_task = project.update(cx, |project, cx| {
|
||||
let tasks = files.iter().map(|file_path| {
|
||||
project.open_buffer(
|
||||
ProjectPath {
|
||||
@@ -206,7 +207,7 @@ impl ContextStore {
|
||||
|
||||
let mut buffer_infos = Vec::new();
|
||||
let mut text_tasks = Vec::new();
|
||||
this.update(&mut cx, |_, cx| {
|
||||
this.update(cx, |_, cx| {
|
||||
for (path, buffer_entity) in files.into_iter().zip(buffers) {
|
||||
// Skip all binary files and other non-UTF8 files
|
||||
if let Ok(buffer_entity) = buffer_entity {
|
||||
@@ -235,7 +236,7 @@ impl ContextStore {
|
||||
bail!("No text files found in {}", &project_path.path.display());
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.update(cx, |this, _| {
|
||||
this.insert_directory(&project_path.path, context_buffers);
|
||||
})?;
|
||||
|
||||
@@ -531,35 +532,59 @@ fn collect_files_in_path(worktree: &Worktree, path: &Path) -> Vec<Arc<Path>> {
|
||||
|
||||
pub fn refresh_context_store_text(
|
||||
context_store: Entity<ContextStore>,
|
||||
changed_buffers: &HashSet<Entity<Buffer>>,
|
||||
cx: &App,
|
||||
) -> impl Future<Output = ()> {
|
||||
) -> impl Future<Output = Vec<ContextId>> {
|
||||
let mut tasks = Vec::new();
|
||||
|
||||
for context in &context_store.read(cx).context {
|
||||
match context {
|
||||
AssistantContext::File(file_context) => {
|
||||
let context_store = context_store.clone();
|
||||
if let Some(task) = refresh_file_text(context_store, file_context, cx) {
|
||||
tasks.push(task);
|
||||
let id = context.id();
|
||||
|
||||
let task = maybe!({
|
||||
match context {
|
||||
AssistantContext::File(file_context) => {
|
||||
if changed_buffers.is_empty()
|
||||
|| changed_buffers.contains(&file_context.context_buffer.buffer)
|
||||
{
|
||||
let context_store = context_store.clone();
|
||||
return refresh_file_text(context_store, file_context, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
AssistantContext::Directory(directory_context) => {
|
||||
let context_store = context_store.clone();
|
||||
if let Some(task) = refresh_directory_text(context_store, directory_context, cx) {
|
||||
tasks.push(task);
|
||||
AssistantContext::Directory(directory_context) => {
|
||||
let should_refresh = changed_buffers.is_empty()
|
||||
|| changed_buffers.iter().any(|buffer| {
|
||||
let buffer = buffer.read(cx);
|
||||
|
||||
buffer_path_log_err(&buffer)
|
||||
.map_or(false, |path| path.starts_with(&directory_context.path))
|
||||
});
|
||||
|
||||
if should_refresh {
|
||||
let context_store = context_store.clone();
|
||||
return refresh_directory_text(context_store, directory_context, cx);
|
||||
}
|
||||
}
|
||||
AssistantContext::Thread(thread_context) => {
|
||||
if changed_buffers.is_empty() {
|
||||
let context_store = context_store.clone();
|
||||
return Some(refresh_thread_text(context_store, thread_context, cx));
|
||||
}
|
||||
}
|
||||
// Intentionally omit refreshing fetched URLs as it doesn't seem all that useful,
|
||||
// and doing the caching properly could be tricky (unless it's already handled by
|
||||
// the HttpClient?).
|
||||
AssistantContext::FetchedUrl(_) => {}
|
||||
}
|
||||
AssistantContext::Thread(thread_context) => {
|
||||
let context_store = context_store.clone();
|
||||
tasks.push(refresh_thread_text(context_store, thread_context, cx));
|
||||
}
|
||||
// Intentionally omit refreshing fetched URLs as it doesn't seem all that useful,
|
||||
// and doing the caching properly could be tricky (unless it's already handled by
|
||||
// the HttpClient?).
|
||||
AssistantContext::FetchedUrl(_) => {}
|
||||
|
||||
None
|
||||
});
|
||||
|
||||
if let Some(task) = task {
|
||||
tasks.push(task.map(move |_| id));
|
||||
}
|
||||
}
|
||||
|
||||
future::join_all(tasks).map(|_| ())
|
||||
future::join_all(tasks)
|
||||
}
|
||||
|
||||
fn refresh_file_text(
|
||||
@@ -570,10 +595,10 @@ fn refresh_file_text(
|
||||
let id = file_context.id;
|
||||
let task = refresh_context_buffer(&file_context.context_buffer, cx);
|
||||
if let Some(task) = task {
|
||||
Some(cx.spawn(|mut cx| async move {
|
||||
Some(cx.spawn(async move |cx| {
|
||||
let context_buffer = task.await;
|
||||
context_store
|
||||
.update(&mut cx, |context_store, _| {
|
||||
.update(cx, |context_store, _| {
|
||||
let new_file_context = FileContext { id, context_buffer };
|
||||
context_store.replace_context(AssistantContext::File(new_file_context));
|
||||
})
|
||||
@@ -611,10 +636,10 @@ fn refresh_directory_text(
|
||||
|
||||
let id = directory_context.snapshot.id;
|
||||
let path = directory_context.path.clone();
|
||||
Some(cx.spawn(|mut cx| async move {
|
||||
Some(cx.spawn(async move |cx| {
|
||||
let context_buffers = context_buffers.await;
|
||||
context_store
|
||||
.update(&mut cx, |context_store, _| {
|
||||
.update(cx, |context_store, _| {
|
||||
let new_directory_context = DirectoryContext::new(id, &path, context_buffers);
|
||||
context_store.replace_context(AssistantContext::Directory(new_directory_context));
|
||||
})
|
||||
@@ -629,9 +654,9 @@ fn refresh_thread_text(
|
||||
) -> Task<()> {
|
||||
let id = thread_context.id;
|
||||
let thread = thread_context.thread.clone();
|
||||
cx.spawn(move |mut cx| async move {
|
||||
cx.spawn(async move |cx| {
|
||||
context_store
|
||||
.update(&mut cx, |context_store, cx| {
|
||||
.update(cx, |context_store, cx| {
|
||||
let text = thread.read(cx).text().into();
|
||||
context_store.replace_context(AssistantContext::Thread(ThreadContext {
|
||||
id,
|
||||
|
||||
@@ -25,7 +25,7 @@ use crate::{
|
||||
|
||||
pub struct ContextStrip {
|
||||
context_store: Entity<ContextStore>,
|
||||
pub context_picker: Entity<ContextPicker>,
|
||||
context_picker: Entity<ContextPicker>,
|
||||
context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
|
||||
focus_handle: FocusHandle,
|
||||
suggest_context_kind: SuggestContextKind,
|
||||
@@ -36,7 +36,6 @@ pub struct ContextStrip {
|
||||
}
|
||||
|
||||
impl ContextStrip {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
context_store: Entity<ContextStore>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
@@ -336,12 +335,12 @@ impl ContextStrip {
|
||||
context_store.accept_suggested_context(&suggested, cx)
|
||||
});
|
||||
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
match task.await.notify_async_err(&mut cx) {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
match task.await.notify_async_err(cx) {
|
||||
None => {}
|
||||
Some(()) => {
|
||||
if let Some(this) = this.upgrade() {
|
||||
this.update(&mut cx, |_, cx| cx.notify())?;
|
||||
this.update(cx, |_, cx| cx.notify())?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,10 +2,10 @@ use assistant_context_editor::SavedContextMetadata;
|
||||
use chrono::{DateTime, Utc};
|
||||
use gpui::{prelude::*, Entity};
|
||||
|
||||
use crate::thread_store::{SavedThreadMetadata, ThreadStore};
|
||||
use crate::thread_store::{SerializedThreadMetadata, ThreadStore};
|
||||
|
||||
pub enum HistoryEntry {
|
||||
Thread(SavedThreadMetadata),
|
||||
Thread(SerializedThreadMetadata),
|
||||
Context(SavedContextMetadata),
|
||||
}
|
||||
|
||||
|
||||
@@ -276,7 +276,7 @@ impl InlineAssistant {
|
||||
if is_authenticated() {
|
||||
handle_assist(window, cx);
|
||||
} else {
|
||||
cx.spawn_in(window, |_workspace, mut cx| async move {
|
||||
cx.spawn_in(window, async move |_workspace, cx| {
|
||||
let Some(task) = cx.update(|_, cx| {
|
||||
LanguageModelRegistry::read_global(cx)
|
||||
.active_provider()
|
||||
@@ -480,7 +480,6 @@ impl InlineAssistant {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn suggest_assist(
|
||||
&mut self,
|
||||
editor: &Entity<Editor>,
|
||||
@@ -1342,7 +1341,7 @@ impl InlineAssistant {
|
||||
});
|
||||
|
||||
enum DeletedLines {}
|
||||
let mut editor = Editor::for_multibuffer(multi_buffer, None, true, window, cx);
|
||||
let mut editor = Editor::for_multibuffer(multi_buffer, None, window, cx);
|
||||
editor.set_soft_wrap_mode(language::language_settings::SoftWrap::None, cx);
|
||||
editor.set_show_wrap_guides(false, cx);
|
||||
editor.set_show_gutter(false, cx);
|
||||
@@ -1451,16 +1450,15 @@ struct InlineAssistScrollLock {
|
||||
}
|
||||
|
||||
impl EditorInlineAssists {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn new(editor: &Entity<Editor>, window: &mut Window, cx: &mut App) -> Self {
|
||||
let (highlight_updates_tx, mut highlight_updates_rx) = async_watch::channel(());
|
||||
Self {
|
||||
assist_ids: Vec::new(),
|
||||
scroll_lock: None,
|
||||
highlight_updates: highlight_updates_tx,
|
||||
_update_highlights: cx.spawn(|cx| {
|
||||
_update_highlights: cx.spawn({
|
||||
let editor = editor.downgrade();
|
||||
async move {
|
||||
async move |cx| {
|
||||
while let Ok(()) = highlight_updates_rx.changed().await {
|
||||
let editor = editor.upgrade().context("editor was dropped")?;
|
||||
cx.update_global(|assistant: &mut InlineAssistant, cx| {
|
||||
@@ -1563,7 +1561,6 @@ pub struct InlineAssist {
|
||||
}
|
||||
|
||||
impl InlineAssist {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn new(
|
||||
assist_id: InlineAssistId,
|
||||
group_id: InlineAssistGroupId,
|
||||
@@ -1732,6 +1729,7 @@ impl CodeActionProvider for AssistantCodeActionProvider {
|
||||
title: "Fix with Assistant".into(),
|
||||
..Default::default()
|
||||
})),
|
||||
resolved: true,
|
||||
}]))
|
||||
} else {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
@@ -1750,10 +1748,10 @@ impl CodeActionProvider for AssistantCodeActionProvider {
|
||||
let editor = self.editor.clone();
|
||||
let workspace = self.workspace.clone();
|
||||
let thread_store = self.thread_store.clone();
|
||||
window.spawn(cx, |mut cx| async move {
|
||||
window.spawn(cx, async move |cx| {
|
||||
let editor = editor.upgrade().context("editor was released")?;
|
||||
let range = editor
|
||||
.update(&mut cx, |editor, cx| {
|
||||
.update(cx, |editor, cx| {
|
||||
editor.buffer().update(cx, |multibuffer, cx| {
|
||||
let buffer = buffer.read(cx);
|
||||
let multibuffer_snapshot = multibuffer.read(cx);
|
||||
|
||||
@@ -816,7 +816,6 @@ impl InlineAssistId {
|
||||
}
|
||||
|
||||
impl PromptEditor<BufferCodegen> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new_buffer(
|
||||
id: InlineAssistId,
|
||||
gutter_dimensions: Arc<Mutex<GutterDimensions>>,
|
||||
@@ -844,7 +843,6 @@ impl PromptEditor<BufferCodegen> {
|
||||
},
|
||||
prompt_buffer,
|
||||
None,
|
||||
false,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
@@ -976,7 +974,6 @@ impl TerminalInlineAssistId {
|
||||
}
|
||||
|
||||
impl PromptEditor<TerminalCodegen> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new_terminal(
|
||||
id: TerminalInlineAssistId,
|
||||
prompt_history: VecDeque<String>,
|
||||
@@ -1003,7 +1000,6 @@ impl PromptEditor<TerminalCodegen> {
|
||||
},
|
||||
prompt_buffer,
|
||||
None,
|
||||
false,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
|
||||
@@ -1,25 +1,30 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use collections::HashSet;
|
||||
use editor::actions::MoveUp;
|
||||
use editor::{Editor, EditorElement, EditorEvent, EditorStyle};
|
||||
use fs::Fs;
|
||||
use git::ExpandCommitEditor;
|
||||
use git_ui::git_panel;
|
||||
use gpui::{
|
||||
Animation, AnimationExt, App, DismissEvent, Entity, Focusable, Subscription, TextStyle,
|
||||
WeakEntity,
|
||||
};
|
||||
use language_model::LanguageModelRegistry;
|
||||
use language_model_selector::ToggleModelSelector;
|
||||
use project::Project;
|
||||
use rope::Point;
|
||||
use settings::Settings;
|
||||
use std::time::Duration;
|
||||
use text::Bias;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{
|
||||
prelude::*, ButtonLike, KeyBinding, PlatformStyle, PopoverMenu, PopoverMenuHandle, Switch,
|
||||
Tooltip,
|
||||
prelude::*, ButtonLike, KeyBinding, PlatformStyle, PopoverMenu, PopoverMenuHandle, Tooltip,
|
||||
};
|
||||
use util::ResultExt;
|
||||
use vim_mode_setting::VimModeSetting;
|
||||
use workspace::Workspace;
|
||||
use workspace::notifications::{NotificationId, NotifyTaskExt};
|
||||
use workspace::{Toast, Workspace};
|
||||
|
||||
use crate::assistant_model_selector::AssistantModelSelector;
|
||||
use crate::context_picker::{ConfirmBehavior, ContextPicker};
|
||||
@@ -27,18 +32,21 @@ use crate::context_store::{refresh_context_store_text, ContextStore};
|
||||
use crate::context_strip::{ContextStrip, ContextStripEvent, SuggestContextKind};
|
||||
use crate::thread::{RequestKind, Thread};
|
||||
use crate::thread_store::ThreadStore;
|
||||
use crate::tool_selector::ToolSelector;
|
||||
use crate::{Chat, ChatMode, RemoveAllContext, ToggleContextPicker};
|
||||
|
||||
pub struct MessageEditor {
|
||||
thread: Entity<Thread>,
|
||||
editor: Entity<Editor>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
project: Entity<Project>,
|
||||
context_store: Entity<ContextStore>,
|
||||
context_strip: Entity<ContextStrip>,
|
||||
context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
|
||||
inline_context_picker: Entity<ContextPicker>,
|
||||
inline_context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
|
||||
model_selector: Entity<AssistantModelSelector>,
|
||||
use_tools: bool,
|
||||
tool_selector: Entity<ToolSelector>,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
|
||||
@@ -46,12 +54,13 @@ impl MessageEditor {
|
||||
pub fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: Entity<ContextStore>,
|
||||
thread_store: WeakEntity<ThreadStore>,
|
||||
thread: Entity<Thread>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let context_store = cx.new(|_cx| ContextStore::new(workspace.clone()));
|
||||
let tools = thread.read(cx).tools().clone();
|
||||
let context_picker_menu_handle = PopoverMenuHandle::default();
|
||||
let inline_context_picker_menu_handle = PopoverMenuHandle::default();
|
||||
let model_selector_menu_handle = PopoverMenuHandle::default();
|
||||
@@ -100,8 +109,10 @@ impl MessageEditor {
|
||||
];
|
||||
|
||||
Self {
|
||||
thread,
|
||||
editor: editor.clone(),
|
||||
project: thread.read(cx).project().clone(),
|
||||
thread,
|
||||
workspace,
|
||||
context_store,
|
||||
context_strip,
|
||||
context_picker_menu_handle,
|
||||
@@ -116,13 +127,12 @@ impl MessageEditor {
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
use_tools: false,
|
||||
tool_selector: cx.new(|cx| ToolSelector::new(tools, cx)),
|
||||
_subscriptions: subscriptions,
|
||||
}
|
||||
}
|
||||
|
||||
fn toggle_chat_mode(&mut self, _: &ChatMode, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.use_tools = !self.use_tools;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
@@ -146,6 +156,14 @@ impl MessageEditor {
|
||||
}
|
||||
|
||||
fn chat(&mut self, _: &Chat, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if self.is_editor_empty(cx) {
|
||||
return;
|
||||
}
|
||||
|
||||
if self.thread.read(cx).is_generating() {
|
||||
return;
|
||||
}
|
||||
|
||||
self.send_to_model(RequestKind::Chat, window, cx);
|
||||
}
|
||||
|
||||
@@ -185,18 +203,21 @@ impl MessageEditor {
|
||||
text
|
||||
});
|
||||
|
||||
let refresh_task = refresh_context_store_text(self.context_store.clone(), cx);
|
||||
let refresh_task =
|
||||
refresh_context_store_text(self.context_store.clone(), &HashSet::default(), cx);
|
||||
|
||||
let thread = self.thread.clone();
|
||||
let context_store = self.context_store.clone();
|
||||
let use_tools = self.use_tools;
|
||||
cx.spawn(move |_, mut cx| async move {
|
||||
let git_store = self.project.read(cx).git_store();
|
||||
let checkpoint = git_store.read(cx).checkpoint(cx);
|
||||
cx.spawn(async move |_, cx| {
|
||||
refresh_task.await;
|
||||
let checkpoint = checkpoint.await.log_err();
|
||||
thread
|
||||
.update(&mut cx, |thread, cx| {
|
||||
.update(cx, |thread, cx| {
|
||||
let context = context_store.read(cx).snapshot(cx).collect::<Vec<_>>();
|
||||
thread.insert_user_message(user_message, context, cx);
|
||||
thread.send_to_model(model, request_kind, use_tools, cx);
|
||||
thread.insert_user_message(user_message, context, checkpoint, cx);
|
||||
thread.send_to_model(model, request_kind, cx);
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
@@ -269,6 +290,34 @@ impl MessageEditor {
|
||||
self.context_strip.focus_handle(cx).focus(window);
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_feedback_click(
|
||||
&mut self,
|
||||
is_positive: bool,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let workspace = self.workspace.clone();
|
||||
let report = self
|
||||
.thread
|
||||
.update(cx, |thread, cx| thread.report_feedback(is_positive, cx));
|
||||
|
||||
cx.spawn(async move |_, cx| {
|
||||
report.await?;
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
let message = if is_positive {
|
||||
"Positive feedback recorded. Thank you!"
|
||||
} else {
|
||||
"Negative feedback recorded. Thank you for helping us improve!"
|
||||
};
|
||||
|
||||
struct ThreadFeedback;
|
||||
let id = NotificationId::unique::<ThreadFeedback>();
|
||||
workspace.show_toast(Toast::new(id, message).autohide(), cx)
|
||||
})
|
||||
})
|
||||
.detach_and_notify_err(window, cx);
|
||||
}
|
||||
}
|
||||
|
||||
impl Focusable for MessageEditor {
|
||||
@@ -284,7 +333,7 @@ impl Render for MessageEditor {
|
||||
let focus_handle = self.editor.focus_handle(cx);
|
||||
let inline_context_picker = self.inline_context_picker.clone();
|
||||
let bg_color = cx.theme().colors().editor_background;
|
||||
let is_streaming_completion = self.thread.read(cx).is_streaming();
|
||||
let is_generating = self.thread.read(cx).is_generating();
|
||||
let is_model_selected = self.is_model_selected(cx);
|
||||
let is_editor_empty = self.is_editor_empty(cx);
|
||||
let submit_label_color = if is_editor_empty {
|
||||
@@ -303,9 +352,16 @@ impl Render for MessageEditor {
|
||||
px(64.)
|
||||
};
|
||||
|
||||
let project = self.thread.read(cx).project();
|
||||
let changed_files = if let Some(repository) = project.read(cx).active_repository(cx) {
|
||||
repository.read(cx).status().count()
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
v_flex()
|
||||
.size_full()
|
||||
.when(is_streaming_completion, |parent| {
|
||||
.when(is_generating, |parent| {
|
||||
let focus_handle = self.editor.focus_handle(cx).clone();
|
||||
parent.child(
|
||||
h_flex().py_3().w_full().justify_center().child(
|
||||
@@ -363,6 +419,73 @@ impl Render for MessageEditor {
|
||||
),
|
||||
)
|
||||
})
|
||||
.when(changed_files > 0, |parent| {
|
||||
parent.child(
|
||||
v_flex()
|
||||
.mx_2()
|
||||
.bg(cx.theme().colors().element_background)
|
||||
.border_1()
|
||||
.border_b_0()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.rounded_t_md()
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_between()
|
||||
.p_2()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(
|
||||
IconButton::new(
|
||||
"edits-disclosure",
|
||||
IconName::GitBranchSmall,
|
||||
)
|
||||
.icon_size(IconSize::Small)
|
||||
.on_click(
|
||||
|_ev, _window, cx| {
|
||||
cx.defer(|cx| {
|
||||
cx.dispatch_action(&git_panel::ToggleFocus)
|
||||
});
|
||||
},
|
||||
),
|
||||
)
|
||||
.child(
|
||||
Label::new(format!(
|
||||
"{} {} changed",
|
||||
changed_files,
|
||||
if changed_files == 1 { "file" } else { "files" }
|
||||
))
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(
|
||||
Button::new("review", "Review")
|
||||
.label_size(LabelSize::XSmall)
|
||||
.on_click(|_event, _window, cx| {
|
||||
cx.defer(|cx| {
|
||||
cx.dispatch_action(
|
||||
&git_ui::project_diff::Diff,
|
||||
);
|
||||
});
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
Button::new("commit", "Commit")
|
||||
.label_size(LabelSize::XSmall)
|
||||
.on_click(|_event, _window, cx| {
|
||||
cx.defer(|cx| {
|
||||
cx.dispatch_action(&ExpandCommitEditor)
|
||||
});
|
||||
}),
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
})
|
||||
.child(
|
||||
v_flex()
|
||||
.key_context("MessageEditor")
|
||||
@@ -380,7 +503,45 @@ impl Render for MessageEditor {
|
||||
.bg(bg_color)
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(self.context_strip.clone())
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_between()
|
||||
.child(self.context_strip.clone())
|
||||
.when(!self.thread.read(cx).is_empty(), |this| {
|
||||
this.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(
|
||||
IconButton::new(
|
||||
"feedback-thumbs-up",
|
||||
IconName::ThumbsUp,
|
||||
)
|
||||
.style(ButtonStyle::Subtle)
|
||||
.icon_size(IconSize::Small)
|
||||
.tooltip(Tooltip::text("Helpful"))
|
||||
.on_click(
|
||||
cx.listener(|this, _, window, cx| {
|
||||
this.handle_feedback_click(true, window, cx);
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
IconButton::new(
|
||||
"feedback-thumbs-down",
|
||||
IconName::ThumbsDown,
|
||||
)
|
||||
.style(ButtonStyle::Subtle)
|
||||
.icon_size(IconSize::Small)
|
||||
.tooltip(Tooltip::text("Not Helpful"))
|
||||
.on_click(
|
||||
cx.listener(|this, _, window, cx| {
|
||||
this.handle_feedback_click(false, window, cx);
|
||||
}),
|
||||
),
|
||||
),
|
||||
)
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_5()
|
||||
@@ -428,25 +589,7 @@ impl Render for MessageEditor {
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_between()
|
||||
.child(
|
||||
Switch::new("use-tools", self.use_tools.into())
|
||||
.label("Tools")
|
||||
.on_click(cx.listener(
|
||||
|this, selection, _window, _cx| {
|
||||
this.use_tools = match selection {
|
||||
ToggleState::Selected => true,
|
||||
ToggleState::Unselected
|
||||
| ToggleState::Indeterminate => false,
|
||||
};
|
||||
},
|
||||
))
|
||||
.key_binding(KeyBinding::for_action_in(
|
||||
&ChatMode,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)),
|
||||
)
|
||||
.child(h_flex().gap_2().child(self.tool_selector.clone()))
|
||||
.child(
|
||||
h_flex().gap_1().child(self.model_selector.clone()).child(
|
||||
ButtonLike::new("submit-message")
|
||||
@@ -455,7 +598,7 @@ impl Render for MessageEditor {
|
||||
.disabled(
|
||||
is_editor_empty
|
||||
|| !is_model_selected
|
||||
|| is_streaming_completion,
|
||||
|| is_generating,
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
@@ -490,7 +633,7 @@ impl Render for MessageEditor {
|
||||
"Type a message to submit",
|
||||
))
|
||||
})
|
||||
.when(is_streaming_completion, |button| {
|
||||
.when(is_generating, |button| {
|
||||
button.tooltip(Tooltip::text(
|
||||
"Cancel to submit a new message",
|
||||
))
|
||||
|
||||
@@ -40,7 +40,7 @@ impl TerminalCodegen {
|
||||
let telemetry = self.telemetry.clone();
|
||||
self.status = CodegenStatus::Pending;
|
||||
self.transaction = Some(TerminalTransaction::start(self.terminal.clone()));
|
||||
self.generation = cx.spawn(|this, mut cx| async move {
|
||||
self.generation = cx.spawn(async move |this, cx| {
|
||||
let model_telemetry_id = model.telemetry_id();
|
||||
let model_provider_id = model.provider_id();
|
||||
let response = model.stream_completion_text(prompt, &cx).await;
|
||||
@@ -97,12 +97,12 @@ impl TerminalCodegen {
|
||||
}
|
||||
});
|
||||
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.update(cx, |this, _| {
|
||||
this.message_id = message_id;
|
||||
})?;
|
||||
|
||||
while let Some(hunk) = hunks_rx.next().await {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
if let Some(transaction) = &mut this.transaction {
|
||||
transaction.push(hunk, cx);
|
||||
cx.notify();
|
||||
@@ -116,7 +116,7 @@ impl TerminalCodegen {
|
||||
|
||||
let result = generate.await;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
if let Err(error) = result {
|
||||
this.status = CodegenStatus::Error(error);
|
||||
} else {
|
||||
|
||||
@@ -7,7 +7,7 @@ use time::{OffsetDateTime, UtcOffset};
|
||||
use ui::{prelude::*, IconButtonShape, ListItem, ListItemSpacing, Tooltip};
|
||||
|
||||
use crate::history_store::{HistoryEntry, HistoryStore};
|
||||
use crate::thread_store::SavedThreadMetadata;
|
||||
use crate::thread_store::SerializedThreadMetadata;
|
||||
use crate::{AssistantPanel, RemoveSelectedThread};
|
||||
|
||||
pub struct ThreadHistory {
|
||||
@@ -221,14 +221,14 @@ impl Render for ThreadHistory {
|
||||
|
||||
#[derive(IntoElement)]
|
||||
pub struct PastThread {
|
||||
thread: SavedThreadMetadata,
|
||||
thread: SerializedThreadMetadata,
|
||||
assistant_panel: WeakEntity<AssistantPanel>,
|
||||
selected: bool,
|
||||
}
|
||||
|
||||
impl PastThread {
|
||||
pub fn new(
|
||||
thread: SavedThreadMetadata,
|
||||
thread: SerializedThreadMetadata,
|
||||
assistant_panel: WeakEntity<AssistantPanel>,
|
||||
selected: bool,
|
||||
) -> Self {
|
||||
|
||||
@@ -16,10 +16,11 @@ use heed::types::{SerdeBincode, SerdeJson};
|
||||
use heed::Database;
|
||||
use language_model::{LanguageModelToolUseId, Role};
|
||||
use project::Project;
|
||||
use prompt_store::PromptBuilder;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use util::ResultExt as _;
|
||||
|
||||
use crate::thread::{MessageId, Thread, ThreadId};
|
||||
use crate::thread::{MessageId, ProjectSnapshot, Thread, ThreadId};
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
ThreadsDatabase::init(cx);
|
||||
@@ -28,15 +29,17 @@ pub fn init(cx: &mut App) {
|
||||
pub struct ThreadStore {
|
||||
project: Entity<Project>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
context_server_manager: Entity<ContextServerManager>,
|
||||
context_server_tool_ids: HashMap<Arc<str>, Vec<ToolId>>,
|
||||
threads: Vec<SavedThreadMetadata>,
|
||||
threads: Vec<SerializedThreadMetadata>,
|
||||
}
|
||||
|
||||
impl ThreadStore {
|
||||
pub fn new(
|
||||
project: Entity<Project>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
cx: &mut App,
|
||||
) -> Result<Entity<Self>> {
|
||||
let this = cx.new(|cx| {
|
||||
@@ -48,6 +51,7 @@ impl ThreadStore {
|
||||
let this = Self {
|
||||
project,
|
||||
tools,
|
||||
prompt_builder,
|
||||
context_server_manager,
|
||||
context_server_tool_ids: HashMap::default(),
|
||||
threads: Vec::new(),
|
||||
@@ -61,23 +65,38 @@ impl ThreadStore {
|
||||
Ok(this)
|
||||
}
|
||||
|
||||
pub fn context_server_manager(&self) -> Entity<ContextServerManager> {
|
||||
self.context_server_manager.clone()
|
||||
}
|
||||
|
||||
pub fn tools(&self) -> Arc<ToolWorkingSet> {
|
||||
self.tools.clone()
|
||||
}
|
||||
|
||||
/// Returns the number of threads.
|
||||
pub fn thread_count(&self) -> usize {
|
||||
self.threads.len()
|
||||
}
|
||||
|
||||
pub fn threads(&self) -> Vec<SavedThreadMetadata> {
|
||||
pub fn threads(&self) -> Vec<SerializedThreadMetadata> {
|
||||
let mut threads = self.threads.iter().cloned().collect::<Vec<_>>();
|
||||
threads.sort_unstable_by_key(|thread| std::cmp::Reverse(thread.updated_at));
|
||||
threads
|
||||
}
|
||||
|
||||
pub fn recent_threads(&self, limit: usize) -> Vec<SavedThreadMetadata> {
|
||||
pub fn recent_threads(&self, limit: usize) -> Vec<SerializedThreadMetadata> {
|
||||
self.threads().into_iter().take(limit).collect()
|
||||
}
|
||||
|
||||
pub fn create_thread(&mut self, cx: &mut Context<Self>) -> Entity<Thread> {
|
||||
cx.new(|cx| Thread::new(self.project.clone(), self.tools.clone(), cx))
|
||||
cx.new(|cx| {
|
||||
Thread::new(
|
||||
self.project.clone(),
|
||||
self.tools.clone(),
|
||||
self.prompt_builder.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn open_thread(
|
||||
@@ -87,20 +106,21 @@ impl ThreadStore {
|
||||
) -> Task<Result<Entity<Thread>>> {
|
||||
let id = id.clone();
|
||||
let database_future = ThreadsDatabase::global_future(cx);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let database = database_future.await.map_err(|err| anyhow!(err))?;
|
||||
let thread = database
|
||||
.try_find_thread(id.clone())
|
||||
.await?
|
||||
.ok_or_else(|| anyhow!("no thread found with ID: {id:?}"))?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
cx.new(|cx| {
|
||||
Thread::from_saved(
|
||||
Thread::deserialize(
|
||||
id.clone(),
|
||||
thread,
|
||||
this.project.clone(),
|
||||
this.tools.clone(),
|
||||
this.prompt_builder.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -109,59 +129,27 @@ impl ThreadStore {
|
||||
}
|
||||
|
||||
pub fn save_thread(&self, thread: &Entity<Thread>, cx: &mut Context<Self>) -> Task<Result<()>> {
|
||||
let (metadata, thread) = thread.update(cx, |thread, _cx| {
|
||||
let id = thread.id().clone();
|
||||
let thread = SavedThread {
|
||||
summary: thread.summary_or_default(),
|
||||
updated_at: thread.updated_at(),
|
||||
messages: thread
|
||||
.messages()
|
||||
.map(|message| SavedMessage {
|
||||
id: message.id,
|
||||
role: message.role,
|
||||
text: message.text.clone(),
|
||||
tool_uses: thread
|
||||
.tool_uses_for_message(message.id)
|
||||
.into_iter()
|
||||
.map(|tool_use| SavedToolUse {
|
||||
id: tool_use.id,
|
||||
name: tool_use.name,
|
||||
input: tool_use.input,
|
||||
})
|
||||
.collect(),
|
||||
tool_results: thread
|
||||
.tool_results_for_message(message.id)
|
||||
.into_iter()
|
||||
.map(|tool_result| SavedToolResult {
|
||||
tool_use_id: tool_result.tool_use_id.clone(),
|
||||
is_error: tool_result.is_error,
|
||||
content: tool_result.content.clone(),
|
||||
})
|
||||
.collect(),
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
|
||||
(id, thread)
|
||||
});
|
||||
let (metadata, serialized_thread) =
|
||||
thread.update(cx, |thread, cx| (thread.id().clone(), thread.serialize(cx)));
|
||||
|
||||
let database_future = ThreadsDatabase::global_future(cx);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let serialized_thread = serialized_thread.await?;
|
||||
let database = database_future.await.map_err(|err| anyhow!(err))?;
|
||||
database.save_thread(metadata, thread).await?;
|
||||
database.save_thread(metadata, serialized_thread).await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| this.reload(cx))?.await
|
||||
this.update(cx, |this, cx| this.reload(cx))?.await
|
||||
})
|
||||
}
|
||||
|
||||
pub fn delete_thread(&mut self, id: &ThreadId, cx: &mut Context<Self>) -> Task<Result<()>> {
|
||||
let id = id.clone();
|
||||
let database_future = ThreadsDatabase::global_future(cx);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let database = database_future.await.map_err(|err| anyhow!(err))?;
|
||||
database.delete_thread(id.clone()).await?;
|
||||
|
||||
this.update(&mut cx, |this, _cx| {
|
||||
this.update(cx, |this, _cx| {
|
||||
this.threads.retain(|thread| thread.id != id)
|
||||
})
|
||||
})
|
||||
@@ -169,14 +157,14 @@ impl ThreadStore {
|
||||
|
||||
pub fn reload(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
|
||||
let database_future = ThreadsDatabase::global_future(cx);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let threads = database_future
|
||||
.await
|
||||
.map_err(|err| anyhow!(err))?
|
||||
.list_threads()
|
||||
.await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.threads = threads;
|
||||
cx.notify();
|
||||
})
|
||||
@@ -205,7 +193,7 @@ impl ThreadStore {
|
||||
cx.spawn({
|
||||
let server = server.clone();
|
||||
let server_id = server_id.clone();
|
||||
|this, mut cx| async move {
|
||||
async move |this, cx| {
|
||||
let Some(protocol) = server.client() else {
|
||||
return;
|
||||
};
|
||||
@@ -230,7 +218,7 @@ impl ThreadStore {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
this.update(&mut cx, |this, _cx| {
|
||||
this.update(cx, |this, _cx| {
|
||||
this.context_server_tool_ids.insert(server_id, tool_ids);
|
||||
})
|
||||
.log_err();
|
||||
@@ -251,39 +239,41 @@ impl ThreadStore {
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SavedThreadMetadata {
|
||||
pub struct SerializedThreadMetadata {
|
||||
pub id: ThreadId,
|
||||
pub summary: SharedString,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct SavedThread {
|
||||
pub struct SerializedThread {
|
||||
pub summary: SharedString,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
pub messages: Vec<SavedMessage>,
|
||||
pub messages: Vec<SerializedMessage>,
|
||||
#[serde(default)]
|
||||
pub initial_project_snapshot: Option<Arc<ProjectSnapshot>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SavedMessage {
|
||||
pub struct SerializedMessage {
|
||||
pub id: MessageId,
|
||||
pub role: Role,
|
||||
pub text: String,
|
||||
#[serde(default)]
|
||||
pub tool_uses: Vec<SavedToolUse>,
|
||||
pub tool_uses: Vec<SerializedToolUse>,
|
||||
#[serde(default)]
|
||||
pub tool_results: Vec<SavedToolResult>,
|
||||
pub tool_results: Vec<SerializedToolResult>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SavedToolUse {
|
||||
pub struct SerializedToolUse {
|
||||
pub id: LanguageModelToolUseId,
|
||||
pub name: SharedString,
|
||||
pub input: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SavedToolResult {
|
||||
pub struct SerializedToolResult {
|
||||
pub tool_use_id: LanguageModelToolUseId,
|
||||
pub is_error: bool,
|
||||
pub content: Arc<str>,
|
||||
@@ -298,7 +288,7 @@ impl Global for GlobalThreadsDatabase {}
|
||||
pub(crate) struct ThreadsDatabase {
|
||||
executor: BackgroundExecutor,
|
||||
env: heed::Env,
|
||||
threads: Database<SerdeBincode<ThreadId>, SerdeJson<SavedThread>>,
|
||||
threads: Database<SerdeBincode<ThreadId>, SerdeJson<SerializedThread>>,
|
||||
}
|
||||
|
||||
impl ThreadsDatabase {
|
||||
@@ -345,7 +335,7 @@ impl ThreadsDatabase {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn list_threads(&self) -> Task<Result<Vec<SavedThreadMetadata>>> {
|
||||
pub fn list_threads(&self) -> Task<Result<Vec<SerializedThreadMetadata>>> {
|
||||
let env = self.env.clone();
|
||||
let threads = self.threads;
|
||||
|
||||
@@ -354,7 +344,7 @@ impl ThreadsDatabase {
|
||||
let mut iter = threads.iter(&txn)?;
|
||||
let mut threads = Vec::new();
|
||||
while let Some((key, value)) = iter.next().transpose()? {
|
||||
threads.push(SavedThreadMetadata {
|
||||
threads.push(SerializedThreadMetadata {
|
||||
id: key,
|
||||
summary: value.summary,
|
||||
updated_at: value.updated_at,
|
||||
@@ -365,7 +355,7 @@ impl ThreadsDatabase {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn try_find_thread(&self, id: ThreadId) -> Task<Result<Option<SavedThread>>> {
|
||||
pub fn try_find_thread(&self, id: ThreadId) -> Task<Result<Option<SerializedThread>>> {
|
||||
let env = self.env.clone();
|
||||
let threads = self.threads;
|
||||
|
||||
@@ -376,7 +366,7 @@ impl ThreadsDatabase {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn save_thread(&self, id: ThreadId, thread: SavedThread) -> Task<Result<()>> {
|
||||
pub fn save_thread(&self, id: ThreadId, thread: SerializedThread) -> Task<Result<()>> {
|
||||
let env = self.env.clone();
|
||||
let threads = self.threads;
|
||||
|
||||
|
||||
156
crates/assistant2/src/tool_selector.rs
Normal file
@@ -0,0 +1,156 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use assistant_tool::{ToolSource, ToolWorkingSet};
|
||||
use gpui::Entity;
|
||||
use scripting_tool::ScriptingTool;
|
||||
use ui::{prelude::*, ContextMenu, PopoverMenu, Tooltip};
|
||||
|
||||
use crate::agent_profile::AgentProfile;
|
||||
|
||||
pub struct ToolSelector {
|
||||
profiles: Vec<AgentProfile>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
}
|
||||
|
||||
impl ToolSelector {
|
||||
pub fn new(tools: Arc<ToolWorkingSet>, _cx: &mut Context<Self>) -> Self {
|
||||
Self {
|
||||
profiles: vec![AgentProfile::read_only(), AgentProfile::code_writer()],
|
||||
tools,
|
||||
}
|
||||
}
|
||||
|
||||
fn build_context_menu(
|
||||
&self,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Entity<ContextMenu> {
|
||||
let profiles = self.profiles.clone();
|
||||
let tool_set = self.tools.clone();
|
||||
ContextMenu::build_persistent(window, cx, move |mut menu, _window, cx| {
|
||||
let icon_position = IconPosition::End;
|
||||
|
||||
menu = menu.header("Profiles");
|
||||
for profile in profiles.clone() {
|
||||
menu = menu.toggleable_entry(profile.name.clone(), false, icon_position, None, {
|
||||
let tools = tool_set.clone();
|
||||
move |_window, cx| {
|
||||
tools.disable_source(ToolSource::Native, cx);
|
||||
tools.enable(
|
||||
ToolSource::Native,
|
||||
&profile
|
||||
.tools
|
||||
.iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then(|| tool.clone()))
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
menu = menu.separator();
|
||||
|
||||
let tools_by_source = tool_set.tools_by_source(cx);
|
||||
|
||||
let all_tools_enabled = tool_set.are_all_tools_enabled();
|
||||
menu = menu.toggleable_entry("All Tools", all_tools_enabled, icon_position, None, {
|
||||
let tools = tool_set.clone();
|
||||
move |_window, cx| {
|
||||
if all_tools_enabled {
|
||||
tools.disable_all_tools(cx);
|
||||
} else {
|
||||
tools.enable_all_tools();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
for (source, tools) in tools_by_source {
|
||||
let mut tools = tools
|
||||
.into_iter()
|
||||
.map(|tool| {
|
||||
let source = tool.source();
|
||||
let name = tool.name().into();
|
||||
let is_enabled = tool_set.is_enabled(&source, &name);
|
||||
|
||||
(source, name, is_enabled)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if ToolSource::Native == source {
|
||||
tools.push((
|
||||
ToolSource::Native,
|
||||
ScriptingTool::NAME.into(),
|
||||
tool_set.is_scripting_tool_enabled(),
|
||||
));
|
||||
tools.sort_by(|(_, name_a, _), (_, name_b, _)| name_a.cmp(name_b));
|
||||
}
|
||||
|
||||
menu = match &source {
|
||||
ToolSource::Native => menu.separator().header("Zed Tools"),
|
||||
ToolSource::ContextServer { id } => {
|
||||
let all_tools_from_source_enabled =
|
||||
tool_set.are_all_tools_from_source_enabled(&source);
|
||||
|
||||
menu.separator().header(id).toggleable_entry(
|
||||
"All Tools",
|
||||
all_tools_from_source_enabled,
|
||||
icon_position,
|
||||
None,
|
||||
{
|
||||
let tools = tool_set.clone();
|
||||
let source = source.clone();
|
||||
move |_window, cx| {
|
||||
if all_tools_from_source_enabled {
|
||||
tools.disable_source(source.clone(), cx);
|
||||
} else {
|
||||
tools.enable_source(&source);
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
for (source, name, is_enabled) in tools {
|
||||
menu = menu.toggleable_entry(name.clone(), is_enabled, icon_position, None, {
|
||||
let tools = tool_set.clone();
|
||||
move |_window, _cx| {
|
||||
if name.as_ref() == ScriptingTool::NAME {
|
||||
if is_enabled {
|
||||
tools.disable_scripting_tool();
|
||||
} else {
|
||||
tools.enable_scripting_tool();
|
||||
}
|
||||
} else {
|
||||
if is_enabled {
|
||||
tools.disable(source.clone(), &[name.clone()]);
|
||||
} else {
|
||||
tools.enable(source.clone(), &[name.clone()]);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
menu
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for ToolSelector {
|
||||
fn render(&mut self, _window: &mut Window, cx: &mut Context<'_, Self>) -> impl IntoElement {
|
||||
let this = cx.entity().clone();
|
||||
PopoverMenu::new("tool-selector")
|
||||
.menu(move |window, cx| {
|
||||
Some(this.update(cx, |this, cx| this.build_context_menu(window, cx)))
|
||||
})
|
||||
.trigger_with_tooltip(
|
||||
IconButton::new("tool-selector-button", IconName::SettingsAlt)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted),
|
||||
Tooltip::text("Customize Tools"),
|
||||
)
|
||||
.anchor(gpui::Corner::BottomLeft)
|
||||
}
|
||||
}
|
||||
@@ -11,7 +11,7 @@ use language_model::{
|
||||
};
|
||||
|
||||
use crate::thread::MessageId;
|
||||
use crate::thread_store::SavedMessage;
|
||||
use crate::thread_store::SerializedMessage;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ToolUse {
|
||||
@@ -46,25 +46,39 @@ impl ToolUseState {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_saved_messages(messages: &[SavedMessage]) -> Self {
|
||||
/// Constructs a [`ToolUseState`] from the given list of [`SerializedMessage`]s.
|
||||
///
|
||||
/// Accepts a function to filter the tools that should be used to populate the state.
|
||||
pub fn from_serialized_messages(
|
||||
messages: &[SerializedMessage],
|
||||
mut filter_by_tool_name: impl FnMut(&str) -> bool,
|
||||
) -> Self {
|
||||
let mut this = Self::new();
|
||||
let mut tool_names_by_id = HashMap::default();
|
||||
|
||||
for message in messages {
|
||||
match message.role {
|
||||
Role::Assistant => {
|
||||
if !message.tool_uses.is_empty() {
|
||||
this.tool_uses_by_assistant_message.insert(
|
||||
message.id,
|
||||
message
|
||||
.tool_uses
|
||||
let tool_uses = message
|
||||
.tool_uses
|
||||
.iter()
|
||||
.filter(|tool_use| (filter_by_tool_name)(tool_use.name.as_ref()))
|
||||
.map(|tool_use| LanguageModelToolUse {
|
||||
id: tool_use.id.clone(),
|
||||
name: tool_use.name.clone().into(),
|
||||
input: tool_use.input.clone(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
tool_names_by_id.extend(
|
||||
tool_uses
|
||||
.iter()
|
||||
.map(|tool_use| LanguageModelToolUse {
|
||||
id: tool_use.id.clone(),
|
||||
name: tool_use.name.clone().into(),
|
||||
input: tool_use.input.clone(),
|
||||
})
|
||||
.collect(),
|
||||
.map(|tool_use| (tool_use.id.clone(), tool_use.name.clone())),
|
||||
);
|
||||
|
||||
this.tool_uses_by_assistant_message
|
||||
.insert(message.id, tool_uses);
|
||||
}
|
||||
}
|
||||
Role::User => {
|
||||
@@ -76,6 +90,14 @@ impl ToolUseState {
|
||||
|
||||
for tool_result in &message.tool_results {
|
||||
let tool_use_id = tool_result.tool_use_id.clone();
|
||||
let Some(tool_use) = tool_names_by_id.get(&tool_use_id) else {
|
||||
log::warn!("no tool name found for tool use: {tool_use_id:?}");
|
||||
continue;
|
||||
};
|
||||
|
||||
if !(filter_by_tool_name)(tool_use.as_ref()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
tool_uses_by_user_message.push(tool_use_id.clone());
|
||||
this.tool_results.insert(
|
||||
@@ -96,6 +118,22 @@ impl ToolUseState {
|
||||
this
|
||||
}
|
||||
|
||||
pub fn cancel_pending(&mut self) -> Vec<PendingToolUse> {
|
||||
let mut pending_tools = Vec::new();
|
||||
for (tool_use_id, tool_use) in self.pending_tool_uses_by_id.drain() {
|
||||
self.tool_results.insert(
|
||||
tool_use_id.clone(),
|
||||
LanguageModelToolResult {
|
||||
tool_use_id,
|
||||
content: "Tool canceled by user".into(),
|
||||
is_error: true,
|
||||
},
|
||||
);
|
||||
pending_tools.push(tool_use.clone());
|
||||
}
|
||||
pending_tools
|
||||
}
|
||||
|
||||
pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> {
|
||||
self.pending_tool_uses_by_id.values().collect()
|
||||
}
|
||||
@@ -160,6 +198,13 @@ impl ToolUseState {
|
||||
.map_or(false, |results| !results.is_empty())
|
||||
}
|
||||
|
||||
pub fn tool_result(
|
||||
&self,
|
||||
tool_use_id: &LanguageModelToolUseId,
|
||||
) -> Option<&LanguageModelToolResult> {
|
||||
self.tool_results.get(tool_use_id)
|
||||
}
|
||||
|
||||
pub fn request_tool_use(
|
||||
&mut self,
|
||||
assistant_message_id: MessageId,
|
||||
@@ -202,18 +247,18 @@ impl ToolUseState {
|
||||
&mut self,
|
||||
tool_use_id: LanguageModelToolUseId,
|
||||
output: Result<String>,
|
||||
) {
|
||||
) -> Option<PendingToolUse> {
|
||||
match output {
|
||||
Ok(output) => {
|
||||
Ok(tool_result) => {
|
||||
self.tool_results.insert(
|
||||
tool_use_id.clone(),
|
||||
LanguageModelToolResult {
|
||||
tool_use_id: tool_use_id.clone(),
|
||||
content: output.into(),
|
||||
content: tool_result.into(),
|
||||
is_error: false,
|
||||
},
|
||||
);
|
||||
self.pending_tool_uses_by_id.remove(&tool_use_id);
|
||||
self.pending_tool_uses_by_id.remove(&tool_use_id)
|
||||
}
|
||||
Err(err) => {
|
||||
self.tool_results.insert(
|
||||
@@ -228,6 +273,8 @@ impl ToolUseState {
|
||||
if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) {
|
||||
tool_use.status = PendingToolUseStatus::Error(err.to_string().into());
|
||||
}
|
||||
|
||||
self.pending_tool_uses_by_id.get(&tool_use_id).cloned()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -239,9 +286,17 @@ impl ToolUseState {
|
||||
) {
|
||||
if let Some(tool_uses) = self.tool_uses_by_assistant_message.get(&message_id) {
|
||||
for tool_use in tool_uses {
|
||||
request_message
|
||||
.content
|
||||
.push(MessageContent::ToolUse(tool_use.clone()));
|
||||
if self.tool_results.contains_key(&tool_use.id) {
|
||||
// Do not send tool uses until they are completed
|
||||
request_message
|
||||
.content
|
||||
.push(MessageContent::ToolUse(tool_use.clone()));
|
||||
} else {
|
||||
log::debug!(
|
||||
"skipped tool use {:?} because it is still pending",
|
||||
tool_use
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -254,9 +309,19 @@ impl ToolUseState {
|
||||
if let Some(tool_uses) = self.tool_uses_by_user_message.get(&message_id) {
|
||||
for tool_use_id in tool_uses {
|
||||
if let Some(tool_result) = self.tool_results.get(tool_use_id) {
|
||||
request_message
|
||||
.content
|
||||
.push(MessageContent::ToolResult(tool_result.clone()));
|
||||
request_message.content.push(MessageContent::ToolResult(
|
||||
LanguageModelToolResult {
|
||||
tool_use_id: tool_use_id.clone(),
|
||||
is_error: tool_result.is_error,
|
||||
content: if tool_result.content.is_empty() {
|
||||
// Surprisingly, the API fails if we return an empty string here.
|
||||
// It thinks we are sending a tool use without a tool result.
|
||||
"<Tool returned an empty string>".into()
|
||||
} else {
|
||||
tool_result.content.clone()
|
||||
},
|
||||
},
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -267,6 +332,7 @@ impl ToolUseState {
|
||||
pub struct PendingToolUse {
|
||||
pub id: LanguageModelToolUseId,
|
||||
/// The ID of the Assistant message in which the tool use was requested.
|
||||
#[allow(unused)]
|
||||
pub assistant_message_id: MessageId,
|
||||
pub name: Arc<str>,
|
||||
pub input: serde_json::Value,
|
||||
|
||||
@@ -126,7 +126,13 @@ impl RenderOnce for ContextPill {
|
||||
h_flex()
|
||||
.id("context-data")
|
||||
.gap_1()
|
||||
.child(Label::new(context.name.clone()).size(LabelSize::Small))
|
||||
.child(
|
||||
div().max_w_64().child(
|
||||
Label::new(context.name.clone())
|
||||
.size(LabelSize::Small)
|
||||
.truncate(),
|
||||
),
|
||||
)
|
||||
.when_some(context.parent.as_ref(), |element, parent_name| {
|
||||
if *dupe_name {
|
||||
element.child(
|
||||
@@ -174,21 +180,22 @@ impl RenderOnce for ContextPill {
|
||||
})
|
||||
.hover(|style| style.bg(color.element_hover.opacity(0.5)))
|
||||
.child(
|
||||
Label::new(name.clone())
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
div().px_0p5().max_w_64().child(
|
||||
Label::new(name.clone())
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted)
|
||||
.truncate(),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
div().px_0p5().child(
|
||||
Label::new(match kind {
|
||||
ContextKind::File => "Active Tab",
|
||||
ContextKind::Thread
|
||||
| ContextKind::Directory
|
||||
| ContextKind::FetchedUrl => "Active",
|
||||
})
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
Label::new(match kind {
|
||||
ContextKind::File => "Active Tab",
|
||||
ContextKind::Thread | ContextKind::Directory | ContextKind::FetchedUrl => {
|
||||
"Active"
|
||||
}
|
||||
})
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(
|
||||
Icon::new(IconName::Plus)
|
||||
|
||||
@@ -647,7 +647,6 @@ impl AssistantContext {
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
id: ContextId,
|
||||
replica_id: ReplicaId,
|
||||
@@ -768,7 +767,6 @@ impl AssistantContext {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn deserialize(
|
||||
saved_context: SavedContext,
|
||||
path: PathBuf,
|
||||
@@ -1146,9 +1144,9 @@ impl AssistantContext {
|
||||
|
||||
fn set_language(&mut self, cx: &mut Context<Self>) {
|
||||
let markdown = self.language_registry.language_for_name("Markdown");
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let markdown = markdown.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.buffer
|
||||
.update(cx, |buffer, cx| buffer.set_language(Some(markdown), cx));
|
||||
})
|
||||
@@ -1190,7 +1188,7 @@ impl AssistantContext {
|
||||
return;
|
||||
};
|
||||
let debounce = self.token_count.is_some();
|
||||
self.pending_token_count = cx.spawn(|this, mut cx| {
|
||||
self.pending_token_count = cx.spawn(async move |this, cx| {
|
||||
async move {
|
||||
if debounce {
|
||||
cx.background_executor()
|
||||
@@ -1199,13 +1197,14 @@ impl AssistantContext {
|
||||
}
|
||||
|
||||
let token_count = cx.update(|cx| model.count_tokens(request, cx))?.await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.token_count = Some(token_count);
|
||||
this.start_cache_warming(&model, cx);
|
||||
cx.notify()
|
||||
})
|
||||
}
|
||||
.log_err()
|
||||
.await
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1344,7 +1343,7 @@ impl AssistantContext {
|
||||
};
|
||||
|
||||
let model = Arc::clone(model);
|
||||
self.pending_cache_warming_task = cx.spawn(|this, mut cx| {
|
||||
self.pending_cache_warming_task = cx.spawn(async move |this, cx| {
|
||||
async move {
|
||||
match model.stream_completion(request, &cx).await {
|
||||
Ok(mut stream) => {
|
||||
@@ -1355,13 +1354,14 @@ impl AssistantContext {
|
||||
log::warn!("Cache warming failed: {}", e);
|
||||
}
|
||||
};
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.update_cache_status_for_completion(cx);
|
||||
})
|
||||
.ok();
|
||||
anyhow::Ok(())
|
||||
}
|
||||
.log_err()
|
||||
.await
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1918,7 +1918,7 @@ impl AssistantContext {
|
||||
});
|
||||
self.reparse(cx);
|
||||
|
||||
let insert_output_task = cx.spawn(|this, mut cx| async move {
|
||||
let insert_output_task = cx.spawn(async move |this, cx| {
|
||||
let run_command = async {
|
||||
let mut stream = output.await?;
|
||||
|
||||
@@ -1935,7 +1935,7 @@ impl AssistantContext {
|
||||
|
||||
while let Some(event) = stream.next().await {
|
||||
let event = event?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.buffer.update(cx, |buffer, _cx| {
|
||||
buffer.finalize_last_transaction();
|
||||
buffer.start_transaction()
|
||||
@@ -2036,7 +2036,7 @@ impl AssistantContext {
|
||||
})?;
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.buffer.update(cx, |buffer, cx| {
|
||||
buffer.finalize_last_transaction();
|
||||
buffer.start_transaction();
|
||||
@@ -2082,7 +2082,7 @@ impl AssistantContext {
|
||||
|
||||
let command_result = run_command.await;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
let version = this.version.clone();
|
||||
let timestamp = this.next_timestamp();
|
||||
let Some(invoked_slash_command) = this.invoked_slash_commands.get_mut(&command_id)
|
||||
@@ -2212,7 +2212,7 @@ impl AssistantContext {
|
||||
let pending_completion_id = post_inc(&mut self.completion_count);
|
||||
|
||||
let task = cx.spawn({
|
||||
|this, mut cx| async move {
|
||||
async move |this, cx| {
|
||||
let stream = model.stream_completion(request, &cx);
|
||||
let assistant_message_id = assistant_message.id;
|
||||
let mut response_latency = None;
|
||||
@@ -2227,7 +2227,7 @@ impl AssistantContext {
|
||||
}
|
||||
let event = event?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
let message_ix = this
|
||||
.message_anchors
|
||||
.iter()
|
||||
@@ -2256,6 +2256,7 @@ impl AssistantContext {
|
||||
);
|
||||
}
|
||||
LanguageModelCompletionEvent::ToolUse(_) => {}
|
||||
LanguageModelCompletionEvent::UsageUpdate(_) => {}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -2265,7 +2266,7 @@ impl AssistantContext {
|
||||
})?;
|
||||
smol::future::yield_now().await;
|
||||
}
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.pending_completions
|
||||
.retain(|completion| completion.id != pending_completion_id);
|
||||
this.summarize(false, cx);
|
||||
@@ -2277,7 +2278,7 @@ impl AssistantContext {
|
||||
|
||||
let result = stream_completion.await;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
let error_message = if let Some(error) = result.as_ref().err() {
|
||||
if error.is::<PaymentRequiredError>() {
|
||||
cx.emit(ContextEvent::ShowPaymentRequiredError);
|
||||
@@ -2787,7 +2788,7 @@ impl AssistantContext {
|
||||
cache: false,
|
||||
});
|
||||
|
||||
self.pending_summary = cx.spawn(|this, mut cx| {
|
||||
self.pending_summary = cx.spawn(async move |this, cx| {
|
||||
async move {
|
||||
let stream = model.stream_completion_text(request, &cx);
|
||||
let mut messages = stream.await?;
|
||||
@@ -2796,7 +2797,7 @@ impl AssistantContext {
|
||||
while let Some(message) = messages.stream.next().await {
|
||||
let text = message?;
|
||||
let mut lines = text.lines();
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
let version = this.version.clone();
|
||||
let timestamp = this.next_timestamp();
|
||||
let summary = this.summary.get_or_insert(ContextSummary::default());
|
||||
@@ -2820,7 +2821,7 @@ impl AssistantContext {
|
||||
}
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
let version = this.version.clone();
|
||||
let timestamp = this.next_timestamp();
|
||||
if let Some(summary) = this.summary.as_mut() {
|
||||
@@ -2838,6 +2839,7 @@ impl AssistantContext {
|
||||
anyhow::Ok(())
|
||||
}
|
||||
.log_err()
|
||||
.await
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -2944,12 +2946,12 @@ impl AssistantContext {
|
||||
return;
|
||||
}
|
||||
|
||||
self.pending_save = cx.spawn(|this, mut cx| async move {
|
||||
self.pending_save = cx.spawn(async move |this, cx| {
|
||||
if let Some(debounce) = debounce {
|
||||
cx.background_executor().timer(debounce).await;
|
||||
}
|
||||
|
||||
let (old_path, summary) = this.read_with(&cx, |this, _| {
|
||||
let (old_path, summary) = this.read_with(cx, |this, _| {
|
||||
let path = this.path.clone();
|
||||
let summary = if let Some(summary) = this.summary.as_ref() {
|
||||
if summary.done {
|
||||
@@ -2964,7 +2966,7 @@ impl AssistantContext {
|
||||
})?;
|
||||
|
||||
if let Some(summary) = summary {
|
||||
let context = this.read_with(&cx, |this, cx| this.serialize(cx))?;
|
||||
let context = this.read_with(cx, |this, cx| this.serialize(cx))?;
|
||||
let mut discriminant = 1;
|
||||
let mut new_path;
|
||||
loop {
|
||||
@@ -2996,7 +2998,7 @@ impl AssistantContext {
|
||||
}
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, _| this.path = Some(new_path))?;
|
||||
this.update(cx, |this, _| this.path = Some(new_path))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
||||
@@ -229,6 +229,7 @@ impl ContextEditor {
|
||||
editor.set_show_git_diff_gutter(false, cx);
|
||||
editor.set_show_code_actions(false, cx);
|
||||
editor.set_show_runnables(false, cx);
|
||||
editor.set_show_breakpoints(false, cx);
|
||||
editor.set_show_wrap_guides(false, cx);
|
||||
editor.set_show_indent_guides(false, cx);
|
||||
editor.set_completion_provider(Some(Box::new(completion_provider)));
|
||||
@@ -535,7 +536,6 @@ impl ContextEditor {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn run_command(
|
||||
&mut self,
|
||||
command_range: Range<language::Anchor>,
|
||||
@@ -907,7 +907,7 @@ impl ContextEditor {
|
||||
if editor_state.opened_patch != patch {
|
||||
state.update_task = Some({
|
||||
let this = this.clone();
|
||||
cx.spawn_in(window, |_, cx| async move {
|
||||
cx.spawn_in(window, async move |_, cx| {
|
||||
Self::update_patch_editor(this.clone(), patch, cx)
|
||||
.await
|
||||
.log_err();
|
||||
@@ -1070,10 +1070,9 @@ impl ContextEditor {
|
||||
})
|
||||
.ok();
|
||||
} else {
|
||||
patch_state.update_task =
|
||||
Some(cx.spawn_in(window, move |this, cx| async move {
|
||||
Self::open_patch_editor(this, new_patch, cx).await.log_err();
|
||||
}));
|
||||
patch_state.update_task = Some(cx.spawn_in(window, async move |this, cx| {
|
||||
Self::open_patch_editor(this, new_patch, cx).await.log_err();
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1103,10 +1102,10 @@ impl ContextEditor {
|
||||
async fn open_patch_editor(
|
||||
this: WeakEntity<Self>,
|
||||
patch: AssistantPatch,
|
||||
mut cx: AsyncWindowContext,
|
||||
cx: &mut AsyncWindowContext,
|
||||
) -> Result<()> {
|
||||
let project = this.read_with(&cx, |this, _| this.project.clone())?;
|
||||
let resolved_patch = patch.resolve(project.clone(), &mut cx).await;
|
||||
let project = this.read_with(cx, |this, _| this.project.clone())?;
|
||||
let resolved_patch = patch.resolve(project.clone(), cx).await;
|
||||
|
||||
let editor = cx.new_window_entity(|window, cx| {
|
||||
let editor = ProposedChangesEditor::new(
|
||||
@@ -1130,7 +1129,7 @@ impl ContextEditor {
|
||||
editor
|
||||
})?;
|
||||
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.update(cx, |this, _| {
|
||||
if let Some(patch_state) = this.patches.get_mut(&patch.range) {
|
||||
patch_state.editor = Some(PatchEditorState {
|
||||
editor: editor.downgrade(),
|
||||
@@ -1139,8 +1138,8 @@ impl ContextEditor {
|
||||
patch_state.update_task.take();
|
||||
}
|
||||
})?;
|
||||
this.read_with(&cx, |this, _| this.workspace.clone())?
|
||||
.update_in(&mut cx, |workspace, window, cx| {
|
||||
this.read_with(cx, |this, _| this.workspace.clone())?
|
||||
.update_in(cx, |workspace, window, cx| {
|
||||
workspace.add_item_to_active_pane(Box::new(editor.clone()), None, false, window, cx)
|
||||
})
|
||||
.log_err();
|
||||
@@ -1151,11 +1150,11 @@ impl ContextEditor {
|
||||
async fn update_patch_editor(
|
||||
this: WeakEntity<Self>,
|
||||
patch: AssistantPatch,
|
||||
mut cx: AsyncWindowContext,
|
||||
cx: &mut AsyncWindowContext,
|
||||
) -> Result<()> {
|
||||
let project = this.update(&mut cx, |this, _| this.project.clone())?;
|
||||
let resolved_patch = patch.resolve(project.clone(), &mut cx).await;
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
let project = this.update(cx, |this, _| this.project.clone())?;
|
||||
let resolved_patch = patch.resolve(project.clone(), cx).await;
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
let patch_state = this.patches.get_mut(&patch.range)?;
|
||||
|
||||
let locations = resolved_patch
|
||||
@@ -1625,14 +1624,14 @@ impl ContextEditor {
|
||||
.map(|path| Workspace::project_path_for_path(project.clone(), &path, false, cx))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
cx.spawn(move |_, cx| async move {
|
||||
cx.spawn(async move |_, cx| {
|
||||
let mut paths = vec![];
|
||||
let mut worktrees = vec![];
|
||||
|
||||
let opened_paths = futures::future::join_all(tasks).await;
|
||||
for (worktree, project_path) in opened_paths.into_iter().flatten() {
|
||||
let Ok(worktree_root_name) =
|
||||
worktree.read_with(&cx, |worktree, _| worktree.root_name().to_string())
|
||||
worktree.read_with(cx, |worktree, _| worktree.root_name().to_string())
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
@@ -1649,12 +1648,12 @@ impl ContextEditor {
|
||||
};
|
||||
|
||||
window
|
||||
.spawn(cx, |mut cx| async move {
|
||||
.spawn(cx, async move |cx| {
|
||||
let (paths, dragged_file_worktrees) = paths.await;
|
||||
let cmd_name = FileSlashCommand.name();
|
||||
|
||||
context_editor_view
|
||||
.update_in(&mut cx, |context_editor, window, cx| {
|
||||
.update_in(cx, |context_editor, window, cx| {
|
||||
let file_argument = paths
|
||||
.into_iter()
|
||||
.map(|path| path.to_string_lossy().to_string())
|
||||
@@ -2057,7 +2056,6 @@ impl ContextEditor {
|
||||
.unwrap_or_else(|| Cow::Borrowed(DEFAULT_TAB_TITLE))
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn render_patch_block(
|
||||
&mut self,
|
||||
range: Range<text::Anchor>,
|
||||
@@ -2201,9 +2199,9 @@ impl ContextEditor {
|
||||
.log_err();
|
||||
|
||||
if let Some(client) = client {
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
client.authenticate_and_connect(true, &mut cx).await?;
|
||||
this.update(&mut cx, |_, cx| cx.notify())
|
||||
cx.spawn(async move |this, cx| {
|
||||
client.authenticate_and_connect(true, cx).await?;
|
||||
this.update(cx, |_, cx| cx.notify())
|
||||
})
|
||||
.detach_and_log_err(cx)
|
||||
}
|
||||
@@ -3162,10 +3160,10 @@ impl FollowableItem for ContextEditor {
|
||||
assistant_panel_delegate.open_remote_context(workspace, context_id, window, cx)
|
||||
});
|
||||
|
||||
Some(window.spawn(cx, |mut cx| async move {
|
||||
Some(window.spawn(cx, async move |cx| {
|
||||
let context_editor = context_editor_task.await?;
|
||||
context_editor
|
||||
.update_in(&mut cx, |context_editor, window, cx| {
|
||||
.update_in(cx, |context_editor, window, cx| {
|
||||
context_editor.remote_id = Some(id);
|
||||
context_editor.editor.update(cx, |editor, cx| {
|
||||
editor.apply_update_proto(
|
||||
|
||||
@@ -164,9 +164,9 @@ impl PickerDelegate for SavedContextPickerDelegate {
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Task<()> {
|
||||
let search = self.store.read(cx).search(query, cx);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let matches = search.await;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
let host_contexts = this.delegate.store.read(cx).host_contexts();
|
||||
this.delegate.matches = host_contexts
|
||||
.iter()
|
||||
|
||||
@@ -100,7 +100,7 @@ impl ContextStore {
|
||||
let fs = project.read(cx).fs().clone();
|
||||
let languages = project.read(cx).languages().clone();
|
||||
let telemetry = project.read(cx).client().telemetry().clone();
|
||||
cx.spawn(|mut cx| async move {
|
||||
cx.spawn(async move |cx| {
|
||||
const CONTEXT_WATCH_DURATION: Duration = Duration::from_millis(100);
|
||||
let (mut events, _) = fs.watch(contexts_dir(), CONTEXT_WATCH_DURATION).await;
|
||||
|
||||
@@ -125,16 +125,15 @@ impl ContextStore {
|
||||
languages,
|
||||
slash_commands,
|
||||
telemetry,
|
||||
_watch_updates: cx.spawn(|this, mut cx| {
|
||||
_watch_updates: cx.spawn(async move |this, cx| {
|
||||
async move {
|
||||
while events.next().await.is_some() {
|
||||
this.update(&mut cx, |this, cx| this.reload(cx))?
|
||||
.await
|
||||
.log_err();
|
||||
this.update(cx, |this, cx| this.reload(cx))?.await.log_err();
|
||||
}
|
||||
anyhow::Ok(())
|
||||
}
|
||||
.log_err()
|
||||
.await
|
||||
}),
|
||||
client_subscription: None,
|
||||
_project_subscriptions: vec![
|
||||
@@ -395,7 +394,7 @@ impl ContextStore {
|
||||
let prompt_builder = self.prompt_builder.clone();
|
||||
let slash_commands = self.slash_commands.clone();
|
||||
let request = self.client.request(proto::CreateContext { project_id });
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let response = request.await?;
|
||||
let context_id = ContextId::from_proto(response.context_id);
|
||||
let context_proto = response.context.context("invalid context")?;
|
||||
@@ -421,8 +420,8 @@ impl ContextStore {
|
||||
.collect::<Result<Vec<_>>>()
|
||||
})
|
||||
.await?;
|
||||
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
context.update(cx, |context, cx| context.apply_ops(operations, cx))?;
|
||||
this.update(cx, |this, cx| {
|
||||
if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) {
|
||||
existing_context
|
||||
} else {
|
||||
@@ -457,7 +456,7 @@ impl ContextStore {
|
||||
let prompt_builder = self.prompt_builder.clone();
|
||||
let slash_commands = self.slash_commands.clone();
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let saved_context = load.await?;
|
||||
let context = cx.new(|cx| {
|
||||
AssistantContext::deserialize(
|
||||
@@ -471,7 +470,7 @@ impl ContextStore {
|
||||
cx,
|
||||
)
|
||||
})?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
if let Some(existing_context) = this.loaded_context_for_path(&path, cx) {
|
||||
existing_context
|
||||
} else {
|
||||
@@ -489,7 +488,7 @@ impl ContextStore {
|
||||
) -> Task<Result<()>> {
|
||||
let fs = self.fs.clone();
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
fs.remove_file(
|
||||
&path,
|
||||
RemoveOptions {
|
||||
@@ -499,7 +498,7 @@ impl ContextStore {
|
||||
)
|
||||
.await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.contexts.retain(|context| {
|
||||
context
|
||||
.upgrade()
|
||||
@@ -565,7 +564,7 @@ impl ContextStore {
|
||||
});
|
||||
let prompt_builder = self.prompt_builder.clone();
|
||||
let slash_commands = self.slash_commands.clone();
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let response = request.await?;
|
||||
let context_proto = response.context.context("invalid context")?;
|
||||
let context = cx.new(|cx| {
|
||||
@@ -590,8 +589,8 @@ impl ContextStore {
|
||||
.collect::<Result<Vec<_>>>()
|
||||
})
|
||||
.await?;
|
||||
context.update(&mut cx, |context, cx| context.apply_ops(operations, cx))?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
context.update(cx, |context, cx| context.apply_ops(operations, cx))?;
|
||||
this.update(cx, |this, cx| {
|
||||
if let Some(existing_context) = this.loaded_context_for_id(&context_id, cx) {
|
||||
existing_context
|
||||
} else {
|
||||
@@ -700,12 +699,12 @@ impl ContextStore {
|
||||
project_id,
|
||||
contexts,
|
||||
});
|
||||
cx.spawn(|this, cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let response = request.await?;
|
||||
|
||||
let mut context_ids = Vec::new();
|
||||
let mut operations = Vec::new();
|
||||
this.read_with(&cx, |this, cx| {
|
||||
this.read_with(cx, |this, cx| {
|
||||
for context_version_proto in response.contexts {
|
||||
let context_version = ContextVersion::from_proto(&context_version_proto);
|
||||
let context_id = ContextId::from_proto(context_version_proto.context_id);
|
||||
@@ -768,7 +767,7 @@ impl ContextStore {
|
||||
|
||||
fn reload(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
|
||||
let fs = self.fs.clone();
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
cx.spawn(async move |this, cx| {
|
||||
fs.create_dir(contexts_dir()).await?;
|
||||
|
||||
let mut paths = fs.read_dir(contexts_dir()).await?;
|
||||
@@ -808,7 +807,7 @@ impl ContextStore {
|
||||
}
|
||||
contexts.sort_unstable_by_key(|context| Reverse(context.mtime));
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.contexts_metadata = contexts;
|
||||
cx.notify();
|
||||
})
|
||||
@@ -819,7 +818,7 @@ impl ContextStore {
|
||||
cx.update_entity(
|
||||
&self.context_server_manager,
|
||||
|context_server_manager, cx| {
|
||||
for server in context_server_manager.servers() {
|
||||
for server in context_server_manager.running_servers() {
|
||||
context_server_manager
|
||||
.restart_server(&server.id(), cx)
|
||||
.detach_and_log_err(cx);
|
||||
@@ -850,7 +849,7 @@ impl ContextStore {
|
||||
cx.spawn({
|
||||
let server = server.clone();
|
||||
let server_id = server_id.clone();
|
||||
|this, mut cx| async move {
|
||||
async move |this, cx| {
|
||||
let Some(protocol) = server.client() else {
|
||||
return;
|
||||
};
|
||||
@@ -875,7 +874,7 @@ impl ContextStore {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
this.update(&mut cx, |this, _cx| {
|
||||
this.update( cx, |this, _cx| {
|
||||
this.context_server_slash_command_ids
|
||||
.insert(server_id.clone(), slash_command_ids);
|
||||
})
|
||||
|
||||
@@ -48,7 +48,7 @@ impl SlashCommandCompletionProvider {
|
||||
name_range: Range<Anchor>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Vec<project::Completion>>> {
|
||||
) -> Task<Result<Option<Vec<project::Completion>>>> {
|
||||
let slash_commands = self.slash_commands.clone();
|
||||
let candidates = slash_commands
|
||||
.command_names(cx)
|
||||
@@ -59,7 +59,7 @@ impl SlashCommandCompletionProvider {
|
||||
let command_name = command_name.to_string();
|
||||
let editor = self.editor.clone();
|
||||
let workspace = self.workspace.clone();
|
||||
window.spawn(cx, |mut cx| async move {
|
||||
window.spawn(cx, async move |cx| {
|
||||
let matches = match_strings(
|
||||
&candidates,
|
||||
&command_name,
|
||||
@@ -71,70 +71,71 @@ impl SlashCommandCompletionProvider {
|
||||
.await;
|
||||
|
||||
cx.update(|_, cx| {
|
||||
matches
|
||||
.into_iter()
|
||||
.filter_map(|mat| {
|
||||
let command = slash_commands.command(&mat.string, cx)?;
|
||||
let mut new_text = mat.string.clone();
|
||||
let requires_argument = command.requires_argument();
|
||||
let accepts_arguments = command.accepts_arguments();
|
||||
if requires_argument || accepts_arguments {
|
||||
new_text.push(' ');
|
||||
}
|
||||
Some(
|
||||
matches
|
||||
.into_iter()
|
||||
.filter_map(|mat| {
|
||||
let command = slash_commands.command(&mat.string, cx)?;
|
||||
let mut new_text = mat.string.clone();
|
||||
let requires_argument = command.requires_argument();
|
||||
let accepts_arguments = command.accepts_arguments();
|
||||
if requires_argument || accepts_arguments {
|
||||
new_text.push(' ');
|
||||
}
|
||||
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
let command_name = mat.string.clone();
|
||||
let command_range = command_range.clone();
|
||||
let editor = editor.clone();
|
||||
let workspace = workspace.clone();
|
||||
Arc::new(
|
||||
move |intent: CompletionIntent,
|
||||
window: &mut Window,
|
||||
cx: &mut App| {
|
||||
if !requires_argument
|
||||
&& (!accepts_arguments || intent.is_complete())
|
||||
{
|
||||
editor
|
||||
.update(cx, |editor, cx| {
|
||||
editor.run_command(
|
||||
command_range.clone(),
|
||||
&command_name,
|
||||
&[],
|
||||
true,
|
||||
workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
false
|
||||
} else {
|
||||
requires_argument || accepts_arguments
|
||||
}
|
||||
},
|
||||
) as Arc<_>
|
||||
});
|
||||
Some(project::Completion {
|
||||
old_range: name_range.clone(),
|
||||
documentation: Some(CompletionDocumentation::SingleLine(
|
||||
command.description().into(),
|
||||
)),
|
||||
new_text,
|
||||
label: command.label(cx),
|
||||
confirm,
|
||||
source: CompletionSource::Custom,
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
let command_name = mat.string.clone();
|
||||
let command_range = command_range.clone();
|
||||
let editor = editor.clone();
|
||||
let workspace = workspace.clone();
|
||||
Arc::new(
|
||||
move |intent: CompletionIntent,
|
||||
window: &mut Window,
|
||||
cx: &mut App| {
|
||||
if !requires_argument
|
||||
&& (!accepts_arguments || intent.is_complete())
|
||||
{
|
||||
editor
|
||||
.update(cx, |editor, cx| {
|
||||
editor.run_command(
|
||||
command_range.clone(),
|
||||
&command_name,
|
||||
&[],
|
||||
true,
|
||||
workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
false
|
||||
} else {
|
||||
requires_argument || accepts_arguments
|
||||
}
|
||||
},
|
||||
) as Arc<_>
|
||||
});
|
||||
Some(project::Completion {
|
||||
old_range: name_range.clone(),
|
||||
documentation: Some(CompletionDocumentation::SingleLine(
|
||||
command.description().into(),
|
||||
)),
|
||||
new_text,
|
||||
label: command.label(cx),
|
||||
confirm,
|
||||
source: CompletionSource::Custom,
|
||||
})
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn complete_command_argument(
|
||||
&self,
|
||||
command_name: &str,
|
||||
@@ -144,7 +145,7 @@ impl SlashCommandCompletionProvider {
|
||||
last_argument_range: Range<Anchor>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Vec<project::Completion>>> {
|
||||
) -> Task<Result<Option<Vec<project::Completion>>>> {
|
||||
let new_cancel_flag = Arc::new(AtomicBool::new(false));
|
||||
let mut flag = self.cancel_flag.lock();
|
||||
flag.store(true, SeqCst);
|
||||
@@ -162,27 +163,28 @@ impl SlashCommandCompletionProvider {
|
||||
let workspace = self.workspace.clone();
|
||||
let arguments = arguments.to_vec();
|
||||
cx.background_spawn(async move {
|
||||
Ok(completions
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|new_argument| {
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
Arc::new({
|
||||
let mut completed_arguments = arguments.clone();
|
||||
if new_argument.replace_previous_arguments {
|
||||
completed_arguments.clear();
|
||||
} else {
|
||||
completed_arguments.pop();
|
||||
}
|
||||
completed_arguments.push(new_argument.new_text.clone());
|
||||
Ok(Some(
|
||||
completions
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|new_argument| {
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
Arc::new({
|
||||
let mut completed_arguments = arguments.clone();
|
||||
if new_argument.replace_previous_arguments {
|
||||
completed_arguments.clear();
|
||||
} else {
|
||||
completed_arguments.pop();
|
||||
}
|
||||
completed_arguments.push(new_argument.new_text.clone());
|
||||
|
||||
let command_range = command_range.clone();
|
||||
let command_name = command_name.clone();
|
||||
move |intent: CompletionIntent,
|
||||
let command_range = command_range.clone();
|
||||
let command_name = command_name.clone();
|
||||
move |intent: CompletionIntent,
|
||||
window: &mut Window,
|
||||
cx: &mut App| {
|
||||
if new_argument.after_completion.run()
|
||||
@@ -206,31 +208,32 @@ impl SlashCommandCompletionProvider {
|
||||
!new_argument.after_completion.run()
|
||||
}
|
||||
}
|
||||
}) as Arc<_>
|
||||
});
|
||||
}) as Arc<_>
|
||||
});
|
||||
|
||||
let mut new_text = new_argument.new_text.clone();
|
||||
if new_argument.after_completion == AfterCompletion::Continue {
|
||||
new_text.push(' ');
|
||||
}
|
||||
let mut new_text = new_argument.new_text.clone();
|
||||
if new_argument.after_completion == AfterCompletion::Continue {
|
||||
new_text.push(' ');
|
||||
}
|
||||
|
||||
project::Completion {
|
||||
old_range: if new_argument.replace_previous_arguments {
|
||||
argument_range.clone()
|
||||
} else {
|
||||
last_argument_range.clone()
|
||||
},
|
||||
label: new_argument.label,
|
||||
new_text,
|
||||
documentation: None,
|
||||
confirm,
|
||||
source: CompletionSource::Custom,
|
||||
}
|
||||
})
|
||||
.collect())
|
||||
project::Completion {
|
||||
old_range: if new_argument.replace_previous_arguments {
|
||||
argument_range.clone()
|
||||
} else {
|
||||
last_argument_range.clone()
|
||||
},
|
||||
label: new_argument.label,
|
||||
new_text,
|
||||
documentation: None,
|
||||
confirm,
|
||||
source: CompletionSource::Custom,
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
))
|
||||
})
|
||||
} else {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
Task::ready(Ok(Some(Vec::new())))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -243,7 +246,7 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
_: editor::CompletionContext,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Task<Result<Vec<project::Completion>>> {
|
||||
) -> Task<Result<Option<Vec<project::Completion>>>> {
|
||||
let Some((name, arguments, command_range, last_argument_range)) =
|
||||
buffer.update(cx, |buffer, _cx| {
|
||||
let position = buffer_position.to_point(buffer);
|
||||
@@ -287,7 +290,7 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
Some((name, arguments, command_range, last_argument_range))
|
||||
})
|
||||
else {
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
return Task::ready(Ok(Some(Vec::new())));
|
||||
};
|
||||
|
||||
if let Some((arguments, argument_range)) = arguments {
|
||||
|
||||
@@ -100,7 +100,7 @@ impl PickerDelegate for SlashCommandDelegate {
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Task<()> {
|
||||
let all_commands = self.all_commands.clone();
|
||||
cx.spawn_in(window, |this, mut cx| async move {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let filtered_commands = cx
|
||||
.background_spawn(async move {
|
||||
if query.is_empty() {
|
||||
@@ -119,7 +119,7 @@ impl PickerDelegate for SlashCommandDelegate {
|
||||
})
|
||||
.await;
|
||||
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.delegate.filtered_commands = filtered_commands;
|
||||
this.delegate.set_selected_index(0, window, cx);
|
||||
cx.notify();
|
||||
|
||||
44
crates/assistant_eval/Cargo.toml
Normal file
@@ -0,0 +1,44 @@
|
||||
[package]
|
||||
name = "assistant_eval"
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[[bin]]
|
||||
name = "assistant_eval"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
assistant2.workspace = true
|
||||
assistant_tool.workspace = true
|
||||
assistant_tools.workspace = true
|
||||
clap.workspace = true
|
||||
client.workspace = true
|
||||
collections.workspace = true
|
||||
context_server.workspace = true
|
||||
env_logger.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
gpui_tokio.workspace = true
|
||||
itertools.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
language_models.workspace = true
|
||||
node_runtime.workspace = true
|
||||
project.workspace = true
|
||||
prompt_store.workspace = true
|
||||
regex.workspace = true
|
||||
release_channel.workspace = true
|
||||
reqwest_client.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde_json_lenient.workspace = true
|
||||
settings.workspace = true
|
||||
smol.workspace = true
|
||||
util.workspace = true
|
||||
77
crates/assistant_eval/README.md
Normal file
@@ -0,0 +1,77 @@
|
||||
# Tool Evals
|
||||
|
||||
A framework for evaluating and benchmarking AI assistant performance in the Zed editor.
|
||||
|
||||
## Overview
|
||||
|
||||
Tool Evals provides a headless environment for running assistants evaluations on code repositories. It automates the process of:
|
||||
|
||||
1. Cloning and setting up test repositories
|
||||
2. Sending prompts to language models
|
||||
3. Allowing the assistant to use tools to modify code
|
||||
4. Collecting metrics on performance
|
||||
5. Evaluating results against known good solutions
|
||||
|
||||
## How It Works
|
||||
|
||||
The system consists of several key components:
|
||||
|
||||
- **Eval**: Loads test cases from the evaluation_data directory, clones repos, and executes evaluations
|
||||
- **HeadlessAssistant**: Provides a headless environment for running the AI assistant
|
||||
- **Judge**: Compares AI-generated diffs with reference solutions and scores their functional similarity
|
||||
|
||||
The evaluation flow:
|
||||
1. An evaluation is loaded from the evaluation_data directory
|
||||
2. The target repository is cloned and checked out at a specific commit
|
||||
3. A HeadlessAssistant instance is created with the specified language model
|
||||
4. The user prompt is sent to the assistant
|
||||
5. The assistant responds and uses tools to modify code
|
||||
6. Upon completion, a diff is generated from the changes
|
||||
7. Results are saved including the diff, assistant's response, and performance metrics
|
||||
8. If a reference solution exists, a Judge evaluates the similarity of the solution
|
||||
|
||||
## Setup Requirements
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Rust and Cargo
|
||||
- Git
|
||||
- Network access to clone repositories
|
||||
- Appropriate API keys for language models and git services (Anthropic, GitHub, etc.)
|
||||
|
||||
### Environment Variables
|
||||
|
||||
Ensure you have the required API keys set, either from a dev run of Zed or via these environment variables:
|
||||
- `ZED_ANTHROPIC_API_KEY` for Claude models
|
||||
- `ZED_OPENAI_API_KEY` for OpenAI models
|
||||
- `ZED_GITHUB_API_KEY` for GitHub API (or similar)
|
||||
|
||||
## Usage
|
||||
|
||||
### Running a Single Evaluation
|
||||
|
||||
To run a specific evaluation:
|
||||
|
||||
```bash
|
||||
cargo run -p assistant_eval -- bubbletea-add-set-window-title
|
||||
```
|
||||
|
||||
The arguments are regex patterns for the evaluation names to run, so to run all evaluations that contain `bubbletea`, run:
|
||||
|
||||
```bash
|
||||
cargo run -p assistant_eval -- bubbletea
|
||||
```
|
||||
|
||||
To run all evaluations:
|
||||
|
||||
```bash
|
||||
cargo run -p assistant_eval -- --all
|
||||
```
|
||||
|
||||
## Evaluation Data Structure
|
||||
|
||||
Each evaluation should be placed in the `evaluation_data` directory with the following structure:
|
||||
|
||||
* `prompt.txt`: The user's prompt.
|
||||
* `original.diff`: The `git diff` of the change anticipated for this prompt.
|
||||
* `setup.json`: Information about the repo used for the evaluation.
|
||||
61
crates/assistant_eval/build.rs
Normal file
@@ -0,0 +1,61 @@
|
||||
// Copied from `crates/zed/build.rs`, with removal of code for including the zed icon on windows.
|
||||
|
||||
use std::process::Command;
|
||||
|
||||
fn main() {
|
||||
if cfg!(target_os = "macos") {
|
||||
println!("cargo:rustc-env=MACOSX_DEPLOYMENT_TARGET=10.15.7");
|
||||
|
||||
println!("cargo:rerun-if-env-changed=ZED_BUNDLE");
|
||||
if std::env::var("ZED_BUNDLE").ok().as_deref() == Some("true") {
|
||||
// Find WebRTC.framework in the Frameworks folder when running as part of an application bundle.
|
||||
println!("cargo:rustc-link-arg=-Wl,-rpath,@executable_path/../Frameworks");
|
||||
} else {
|
||||
// Find WebRTC.framework as a sibling of the executable when running outside of an application bundle.
|
||||
println!("cargo:rustc-link-arg=-Wl,-rpath,@executable_path");
|
||||
}
|
||||
|
||||
// Weakly link ReplayKit to ensure Zed can be used on macOS 10.15+.
|
||||
println!("cargo:rustc-link-arg=-Wl,-weak_framework,ReplayKit");
|
||||
|
||||
// Seems to be required to enable Swift concurrency
|
||||
println!("cargo:rustc-link-arg=-Wl,-rpath,/usr/lib/swift");
|
||||
|
||||
// Register exported Objective-C selectors, protocols, etc
|
||||
println!("cargo:rustc-link-arg=-Wl,-ObjC");
|
||||
}
|
||||
|
||||
// Populate git sha environment variable if git is available
|
||||
println!("cargo:rerun-if-changed=../../.git/logs/HEAD");
|
||||
println!(
|
||||
"cargo:rustc-env=TARGET={}",
|
||||
std::env::var("TARGET").unwrap()
|
||||
);
|
||||
if let Ok(output) = Command::new("git").args(["rev-parse", "HEAD"]).output() {
|
||||
if output.status.success() {
|
||||
let git_sha = String::from_utf8_lossy(&output.stdout);
|
||||
let git_sha = git_sha.trim();
|
||||
|
||||
println!("cargo:rustc-env=ZED_COMMIT_SHA={git_sha}");
|
||||
|
||||
if let Ok(build_profile) = std::env::var("PROFILE") {
|
||||
if build_profile == "release" {
|
||||
// This is currently the best way to make `cargo build ...`'s build script
|
||||
// to print something to stdout without extra verbosity.
|
||||
println!(
|
||||
"cargo:warning=Info: using '{git_sha}' hash for ZED_COMMIT_SHA env var"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
#[cfg(target_env = "msvc")]
|
||||
{
|
||||
// todo(windows): This is to avoid stack overflow. Remove it when solved.
|
||||
println!("cargo:rustc-link-arg=/stack:{}", 8 * 1024 * 1024);
|
||||
}
|
||||
}
|
||||
}
|
||||
252
crates/assistant_eval/src/eval.rs
Normal file
@@ -0,0 +1,252 @@
|
||||
use crate::headless_assistant::{HeadlessAppState, HeadlessAssistant};
|
||||
use anyhow::anyhow;
|
||||
use assistant2::RequestKind;
|
||||
use collections::HashMap;
|
||||
use gpui::{App, Task};
|
||||
use language_model::{LanguageModel, TokenUsage};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
fs,
|
||||
io::Write,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
};
|
||||
use util::command::new_smol_command;
|
||||
|
||||
pub struct Eval {
|
||||
pub name: String,
|
||||
pub path: PathBuf,
|
||||
pub repo_path: PathBuf,
|
||||
pub eval_setup: EvalSetup,
|
||||
pub user_prompt: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct EvalOutput {
|
||||
pub diff: String,
|
||||
pub last_message: String,
|
||||
pub elapsed_time: Duration,
|
||||
pub assistant_response_count: usize,
|
||||
pub tool_use_counts: HashMap<Arc<str>, u32>,
|
||||
pub token_usage: TokenUsage,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct EvalSetup {
|
||||
pub url: String,
|
||||
pub base_sha: String,
|
||||
}
|
||||
|
||||
impl Eval {
|
||||
/// Loads the eval from a path (typically in `evaluation_data`). Clones and checks out the repo
|
||||
/// if necessary.
|
||||
pub async fn load(name: String, path: PathBuf, repos_dir: &Path) -> anyhow::Result<Self> {
|
||||
let prompt_path = path.join("prompt.txt");
|
||||
let user_prompt = smol::unblock(|| std::fs::read_to_string(prompt_path)).await?;
|
||||
let setup_path = path.join("setup.json");
|
||||
let setup_contents = smol::unblock(|| std::fs::read_to_string(setup_path)).await?;
|
||||
let eval_setup = serde_json_lenient::from_str_lenient::<EvalSetup>(&setup_contents)?;
|
||||
let repo_path = repos_dir.join(repo_dir_name(&eval_setup.url));
|
||||
Ok(Eval {
|
||||
name,
|
||||
path,
|
||||
repo_path,
|
||||
eval_setup,
|
||||
user_prompt,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn run(
|
||||
self,
|
||||
app_state: Arc<HeadlessAppState>,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
cx: &mut App,
|
||||
) -> Task<anyhow::Result<EvalOutput>> {
|
||||
cx.spawn(async move |cx| {
|
||||
checkout_repo(&self.eval_setup, &self.repo_path).await?;
|
||||
|
||||
let (assistant, done_rx) =
|
||||
cx.update(|cx| HeadlessAssistant::new(app_state.clone(), cx))??;
|
||||
|
||||
let _worktree = assistant
|
||||
.update(cx, |assistant, cx| {
|
||||
assistant.project.update(cx, |project, cx| {
|
||||
project.create_worktree(&self.repo_path, true, cx)
|
||||
})
|
||||
})?
|
||||
.await?;
|
||||
|
||||
let start_time = std::time::SystemTime::now();
|
||||
|
||||
assistant.update(cx, |assistant, cx| {
|
||||
assistant.thread.update(cx, |thread, cx| {
|
||||
let context = vec![];
|
||||
thread.insert_user_message(self.user_prompt.clone(), context, None, cx);
|
||||
thread.send_to_model(model, RequestKind::Chat, cx);
|
||||
});
|
||||
})?;
|
||||
|
||||
done_rx.recv().await??;
|
||||
|
||||
let elapsed_time = start_time.elapsed()?;
|
||||
|
||||
let diff = query_git(&self.repo_path, vec!["diff"]).await?;
|
||||
|
||||
assistant.update(cx, |assistant, cx| {
|
||||
let thread = assistant.thread.read(cx);
|
||||
let last_message = thread.messages().last().unwrap();
|
||||
if last_message.role != language_model::Role::Assistant {
|
||||
return Err(anyhow!("Last message is not from assistant"));
|
||||
}
|
||||
let assistant_response_count = thread
|
||||
.messages()
|
||||
.filter(|message| message.role == language_model::Role::Assistant)
|
||||
.count();
|
||||
Ok(EvalOutput {
|
||||
diff,
|
||||
last_message: last_message.text.clone(),
|
||||
elapsed_time,
|
||||
assistant_response_count,
|
||||
tool_use_counts: assistant.tool_use_counts.clone(),
|
||||
token_usage: thread.cumulative_token_usage(),
|
||||
})
|
||||
})?
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl EvalOutput {
|
||||
// Method to save the output to a directory
|
||||
pub fn save_to_directory(
|
||||
&self,
|
||||
output_dir: &Path,
|
||||
eval_output_value: String,
|
||||
) -> anyhow::Result<()> {
|
||||
// Create the output directory if it doesn't exist
|
||||
fs::create_dir_all(&output_dir)?;
|
||||
|
||||
// Save the diff to a file
|
||||
let diff_path = output_dir.join("diff.patch");
|
||||
let mut diff_file = fs::File::create(&diff_path)?;
|
||||
diff_file.write_all(self.diff.as_bytes())?;
|
||||
|
||||
// Save the last message to a file
|
||||
let message_path = output_dir.join("assistant_response.txt");
|
||||
let mut message_file = fs::File::create(&message_path)?;
|
||||
message_file.write_all(self.last_message.as_bytes())?;
|
||||
|
||||
// Current metrics for this run
|
||||
let current_metrics = serde_json::json!({
|
||||
"elapsed_time_ms": self.elapsed_time.as_millis(),
|
||||
"assistant_response_count": self.assistant_response_count,
|
||||
"tool_use_counts": self.tool_use_counts,
|
||||
"token_usage": self.token_usage,
|
||||
"eval_output_value": eval_output_value,
|
||||
});
|
||||
|
||||
// Get current timestamp in milliseconds
|
||||
let timestamp = std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)?
|
||||
.as_millis()
|
||||
.to_string();
|
||||
|
||||
// Path to metrics file
|
||||
let metrics_path = output_dir.join("metrics.json");
|
||||
|
||||
// Load existing metrics if the file exists, or create a new object
|
||||
let mut historical_metrics = if metrics_path.exists() {
|
||||
let metrics_content = fs::read_to_string(&metrics_path)?;
|
||||
serde_json::from_str::<serde_json::Value>(&metrics_content)
|
||||
.unwrap_or_else(|_| serde_json::json!({}))
|
||||
} else {
|
||||
serde_json::json!({})
|
||||
};
|
||||
|
||||
// Add new run with timestamp as key
|
||||
if let serde_json::Value::Object(ref mut map) = historical_metrics {
|
||||
map.insert(timestamp, current_metrics);
|
||||
}
|
||||
|
||||
// Write updated metrics back to file
|
||||
let metrics_json = serde_json::to_string_pretty(&historical_metrics)?;
|
||||
let mut metrics_file = fs::File::create(&metrics_path)?;
|
||||
metrics_file.write_all(metrics_json.as_bytes())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn repo_dir_name(url: &str) -> String {
|
||||
url.trim_start_matches("https://")
|
||||
.replace(|c: char| !c.is_alphanumeric(), "_")
|
||||
}
|
||||
|
||||
async fn checkout_repo(eval_setup: &EvalSetup, repo_path: &Path) -> anyhow::Result<()> {
|
||||
if !repo_path.exists() {
|
||||
smol::unblock({
|
||||
let repo_path = repo_path.to_path_buf();
|
||||
|| std::fs::create_dir_all(repo_path)
|
||||
})
|
||||
.await?;
|
||||
run_git(repo_path, vec!["init"]).await?;
|
||||
run_git(repo_path, vec!["remote", "add", "origin", &eval_setup.url]).await?;
|
||||
} else {
|
||||
let actual_origin = query_git(repo_path, vec!["remote", "get-url", "origin"]).await?;
|
||||
if actual_origin != eval_setup.url {
|
||||
return Err(anyhow!(
|
||||
"remote origin {} does not match expected origin {}",
|
||||
actual_origin,
|
||||
eval_setup.url
|
||||
));
|
||||
}
|
||||
|
||||
// TODO: consider including "-x" to remove ignored files. The downside of this is that it will
|
||||
// also remove build artifacts, and so prevent incremental reuse there.
|
||||
run_git(repo_path, vec!["clean", "--force", "-d"]).await?;
|
||||
run_git(repo_path, vec!["reset", "--hard", "HEAD"]).await?;
|
||||
}
|
||||
|
||||
run_git(
|
||||
repo_path,
|
||||
vec!["fetch", "--depth", "1", "origin", &eval_setup.base_sha],
|
||||
)
|
||||
.await?;
|
||||
run_git(repo_path, vec!["checkout", &eval_setup.base_sha]).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn run_git(repo_path: &Path, args: Vec<&str>) -> anyhow::Result<()> {
|
||||
let exit_status = new_smol_command("git")
|
||||
.current_dir(repo_path)
|
||||
.args(args.clone())
|
||||
.status()
|
||||
.await?;
|
||||
if exit_status.success() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"`git {}` failed with {}",
|
||||
args.join(" "),
|
||||
exit_status,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
async fn query_git(repo_path: &Path, args: Vec<&str>) -> anyhow::Result<String> {
|
||||
let output = new_smol_command("git")
|
||||
.current_dir(repo_path)
|
||||
.args(args.clone())
|
||||
.output()
|
||||
.await?;
|
||||
if output.status.success() {
|
||||
Ok(String::from_utf8(output.stdout)?.trim().to_string())
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"`git {}` failed with {}",
|
||||
args.join(" "),
|
||||
output.status
|
||||
))
|
||||
}
|
||||
}
|
||||
241
crates/assistant_eval/src/headless_assistant.rs
Normal file
@@ -0,0 +1,241 @@
|
||||
use anyhow::anyhow;
|
||||
use assistant2::{RequestKind, Thread, ThreadEvent, ThreadStore};
|
||||
use assistant_tool::ToolWorkingSet;
|
||||
use client::{Client, UserStore};
|
||||
use collections::HashMap;
|
||||
use futures::StreamExt;
|
||||
use gpui::{prelude::*, App, AsyncApp, Entity, SemanticVersion, Subscription, Task};
|
||||
use language::LanguageRegistry;
|
||||
use language_model::{
|
||||
AuthenticateError, LanguageModel, LanguageModelProviderId, LanguageModelRegistry,
|
||||
LanguageModelRequest,
|
||||
};
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::{Project, RealFs};
|
||||
use prompt_store::PromptBuilder;
|
||||
use settings::SettingsStore;
|
||||
use smol::channel;
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Subset of `workspace::AppState` needed by `HeadlessAssistant`, with additional fields.
|
||||
pub struct HeadlessAppState {
|
||||
pub languages: Arc<LanguageRegistry>,
|
||||
pub client: Arc<Client>,
|
||||
pub user_store: Entity<UserStore>,
|
||||
pub fs: Arc<dyn fs::Fs>,
|
||||
pub node_runtime: NodeRuntime,
|
||||
|
||||
// Additional fields not present in `workspace::AppState`.
|
||||
pub prompt_builder: Arc<PromptBuilder>,
|
||||
}
|
||||
|
||||
pub struct HeadlessAssistant {
|
||||
pub thread: Entity<Thread>,
|
||||
pub project: Entity<Project>,
|
||||
#[allow(dead_code)]
|
||||
pub thread_store: Entity<ThreadStore>,
|
||||
pub tool_use_counts: HashMap<Arc<str>, u32>,
|
||||
pub done_tx: channel::Sender<anyhow::Result<()>>,
|
||||
_subscription: Subscription,
|
||||
}
|
||||
|
||||
impl HeadlessAssistant {
|
||||
pub fn new(
|
||||
app_state: Arc<HeadlessAppState>,
|
||||
cx: &mut App,
|
||||
) -> anyhow::Result<(Entity<Self>, channel::Receiver<anyhow::Result<()>>)> {
|
||||
let env = None;
|
||||
let project = Project::local(
|
||||
app_state.client.clone(),
|
||||
app_state.node_runtime.clone(),
|
||||
app_state.user_store.clone(),
|
||||
app_state.languages.clone(),
|
||||
app_state.fs.clone(),
|
||||
env,
|
||||
cx,
|
||||
);
|
||||
|
||||
let tools = Arc::new(ToolWorkingSet::default());
|
||||
let thread_store =
|
||||
ThreadStore::new(project.clone(), tools, app_state.prompt_builder.clone(), cx)?;
|
||||
|
||||
let thread = thread_store.update(cx, |thread_store, cx| thread_store.create_thread(cx));
|
||||
|
||||
let (done_tx, done_rx) = channel::unbounded::<anyhow::Result<()>>();
|
||||
|
||||
let headless_thread = cx.new(move |cx| Self {
|
||||
_subscription: cx.subscribe(&thread, Self::handle_thread_event),
|
||||
thread,
|
||||
project,
|
||||
thread_store,
|
||||
tool_use_counts: HashMap::default(),
|
||||
done_tx,
|
||||
});
|
||||
|
||||
Ok((headless_thread, done_rx))
|
||||
}
|
||||
|
||||
fn handle_thread_event(
|
||||
&mut self,
|
||||
thread: Entity<Thread>,
|
||||
event: &ThreadEvent,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
match event {
|
||||
ThreadEvent::ShowError(err) => self
|
||||
.done_tx
|
||||
.send_blocking(Err(anyhow!("{:?}", err)))
|
||||
.unwrap(),
|
||||
ThreadEvent::DoneStreaming => {
|
||||
let thread = thread.read(cx);
|
||||
if let Some(message) = thread.messages().last() {
|
||||
println!("Message: {}", message.text,);
|
||||
}
|
||||
if thread.all_tools_finished() {
|
||||
self.done_tx.send_blocking(Ok(())).unwrap()
|
||||
}
|
||||
}
|
||||
ThreadEvent::UsePendingTools => {
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.use_pending_tools(cx);
|
||||
});
|
||||
}
|
||||
ThreadEvent::ToolFinished {
|
||||
tool_use_id,
|
||||
pending_tool_use,
|
||||
..
|
||||
} => {
|
||||
if let Some(pending_tool_use) = pending_tool_use {
|
||||
println!(
|
||||
"Used tool {} with input: {}",
|
||||
pending_tool_use.name, pending_tool_use.input
|
||||
);
|
||||
*self
|
||||
.tool_use_counts
|
||||
.entry(pending_tool_use.name.clone())
|
||||
.or_insert(0) += 1;
|
||||
}
|
||||
if let Some(tool_result) = thread.read(cx).tool_result(tool_use_id) {
|
||||
println!("Tool result: {:?}", tool_result);
|
||||
}
|
||||
if thread.read(cx).all_tools_finished() {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
if let Some(model) = model_registry.active_model() {
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.attach_tool_results(vec![], cx);
|
||||
thread.send_to_model(model, RequestKind::Chat, cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
ThreadEvent::StreamedCompletion
|
||||
| ThreadEvent::SummaryChanged
|
||||
| ThreadEvent::StreamedAssistantText(_, _)
|
||||
| ThreadEvent::MessageAdded(_)
|
||||
| ThreadEvent::MessageEdited(_)
|
||||
| ThreadEvent::MessageDeleted(_) => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(cx: &mut App) -> Arc<HeadlessAppState> {
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
gpui_tokio::init(cx);
|
||||
|
||||
let mut settings_store = SettingsStore::new(cx);
|
||||
settings_store
|
||||
.set_default_settings(settings::default_settings().as_ref(), cx)
|
||||
.unwrap();
|
||||
cx.set_global(settings_store);
|
||||
client::init_settings(cx);
|
||||
Project::init_settings(cx);
|
||||
|
||||
let client = Client::production(cx);
|
||||
cx.set_http_client(client.http_client().clone());
|
||||
|
||||
let git_binary_path = None;
|
||||
let fs = Arc::new(RealFs::new(git_binary_path));
|
||||
|
||||
let languages = Arc::new(LanguageRegistry::new(cx.background_executor().clone()));
|
||||
|
||||
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
||||
|
||||
language::init(cx);
|
||||
language_model::init(client.clone(), cx);
|
||||
language_models::init(user_store.clone(), client.clone(), fs.clone(), cx);
|
||||
assistant_tools::init(client.http_client().clone(), cx);
|
||||
context_server::init(cx);
|
||||
let stdout_is_a_pty = false;
|
||||
let prompt_builder = PromptBuilder::load(fs.clone(), stdout_is_a_pty, cx);
|
||||
assistant2::init(fs.clone(), client.clone(), prompt_builder.clone(), cx);
|
||||
|
||||
Arc::new(HeadlessAppState {
|
||||
languages,
|
||||
client,
|
||||
user_store,
|
||||
fs,
|
||||
node_runtime: NodeRuntime::unavailable(),
|
||||
prompt_builder,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn find_model(model_name: &str, cx: &App) -> anyhow::Result<Arc<dyn LanguageModel>> {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let model = model_registry
|
||||
.available_models(cx)
|
||||
.find(|model| model.id().0 == model_name);
|
||||
|
||||
let Some(model) = model else {
|
||||
return Err(anyhow!(
|
||||
"No language model named {} was available. Available models: {}",
|
||||
model_name,
|
||||
model_registry
|
||||
.available_models(cx)
|
||||
.map(|model| model.id().0.clone())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
));
|
||||
};
|
||||
|
||||
Ok(model)
|
||||
}
|
||||
|
||||
pub fn authenticate_model_provider(
|
||||
provider_id: LanguageModelProviderId,
|
||||
cx: &mut App,
|
||||
) -> Task<std::result::Result<(), AuthenticateError>> {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let model_provider = model_registry.provider(&provider_id).unwrap();
|
||||
model_provider.authenticate(cx)
|
||||
}
|
||||
|
||||
pub async fn send_language_model_request(
|
||||
model: Arc<dyn LanguageModel>,
|
||||
request: LanguageModelRequest,
|
||||
cx: &mut AsyncApp,
|
||||
) -> anyhow::Result<String> {
|
||||
match model.stream_completion_text(request, &cx).await {
|
||||
Ok(mut stream) => {
|
||||
let mut full_response = String::new();
|
||||
|
||||
// Process the response stream
|
||||
while let Some(chunk_result) = stream.stream.next().await {
|
||||
match chunk_result {
|
||||
Ok(chunk_str) => {
|
||||
full_response.push_str(&chunk_str);
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(anyhow!(
|
||||
"Error receiving response from language model: {err}"
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(full_response)
|
||||
}
|
||||
Err(err) => Err(anyhow!(
|
||||
"Failed to get response from language model. Error was: {err}"
|
||||
)),
|
||||
}
|
||||
}
|
||||
121
crates/assistant_eval/src/judge.rs
Normal file
@@ -0,0 +1,121 @@
|
||||
use crate::eval::EvalOutput;
|
||||
use crate::headless_assistant::send_language_model_request;
|
||||
use anyhow::anyhow;
|
||||
use gpui::{App, Task};
|
||||
use language_model::{
|
||||
LanguageModel, LanguageModelRequest, LanguageModelRequestMessage, MessageContent, Role,
|
||||
};
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
pub struct Judge {
|
||||
pub original_diff: Option<String>,
|
||||
#[allow(dead_code)]
|
||||
pub original_message: Option<String>,
|
||||
pub model: Arc<dyn LanguageModel>,
|
||||
}
|
||||
|
||||
impl Judge {
|
||||
pub async fn load(eval_path: &Path, model: Arc<dyn LanguageModel>) -> anyhow::Result<Judge> {
|
||||
let original_diff_path = eval_path.join("original.diff");
|
||||
let original_diff = smol::unblock(move || {
|
||||
if std::fs::exists(&original_diff_path)? {
|
||||
anyhow::Ok(Some(std::fs::read_to_string(&original_diff_path)?))
|
||||
} else {
|
||||
anyhow::Ok(None)
|
||||
}
|
||||
});
|
||||
|
||||
let original_message_path = eval_path.join("original_message.txt");
|
||||
let original_message = smol::unblock(move || {
|
||||
if std::fs::exists(&original_message_path)? {
|
||||
anyhow::Ok(Some(std::fs::read_to_string(&original_message_path)?))
|
||||
} else {
|
||||
anyhow::Ok(None)
|
||||
}
|
||||
});
|
||||
|
||||
Ok(Self {
|
||||
original_diff: original_diff.await?,
|
||||
original_message: original_message.await?,
|
||||
model,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn run(&self, eval_output: &EvalOutput, cx: &mut App) -> Task<anyhow::Result<String>> {
|
||||
let Some(original_diff) = self.original_diff.as_ref() else {
|
||||
return Task::ready(Err(anyhow!("No original.diff found")));
|
||||
};
|
||||
|
||||
// TODO: check for empty diff?
|
||||
let prompt = diff_comparison_prompt(&original_diff, &eval_output.diff);
|
||||
|
||||
let request = LanguageModelRequest {
|
||||
messages: vec![LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![MessageContent::Text(prompt)],
|
||||
cache: false,
|
||||
}],
|
||||
temperature: Some(0.0),
|
||||
tools: Vec::new(),
|
||||
stop: Vec::new(),
|
||||
};
|
||||
|
||||
let model = self.model.clone();
|
||||
cx.spawn(async move |cx| send_language_model_request(model, request, cx).await)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn diff_comparison_prompt(original_diff: &str, new_diff: &str) -> String {
|
||||
format!(
|
||||
r#"# Git Diff Similarity Evaluation Template
|
||||
|
||||
## Instructions
|
||||
|
||||
Compare the two diffs and score them between 0.0 and 1.0 based on their functional similarity.
|
||||
- 1.0 = Perfect functional match (achieves identical results)
|
||||
- 0.0 = No functional similarity whatsoever
|
||||
|
||||
## Evaluation Criteria
|
||||
|
||||
Please consider the following aspects in order of importance:
|
||||
|
||||
1. **Functional Equivalence (60%)**
|
||||
- Do both diffs achieve the same end result?
|
||||
- Are the changes functionally equivalent despite possibly using different approaches?
|
||||
- Do the modifications address the same issues or implement the same features?
|
||||
|
||||
2. **Logical Structure (20%)**
|
||||
- Are the logical flows similar?
|
||||
- Do the modifications affect the same code paths?
|
||||
- Are control structures (if/else, loops, etc.) modified in similar ways?
|
||||
|
||||
3. **Code Content (15%)**
|
||||
- Are similar lines added/removed?
|
||||
- Are the same variables, functions, or methods being modified?
|
||||
- Are the same APIs or libraries being used?
|
||||
|
||||
4. **File Layout (5%)**
|
||||
- Are the same files being modified?
|
||||
- Are changes occurring in similar locations within files?
|
||||
|
||||
## Input
|
||||
|
||||
Original Diff:
|
||||
```git
|
||||
{}
|
||||
```
|
||||
|
||||
New Diff:
|
||||
```git
|
||||
{}
|
||||
```
|
||||
|
||||
## Output Format
|
||||
|
||||
THE ONLY OUTPUT SHOULD BE A SCORE BETWEEN 0.0 AND 1.0.
|
||||
|
||||
Example output:
|
||||
0.85"#,
|
||||
original_diff, new_diff
|
||||
)
|
||||
}
|
||||
243
crates/assistant_eval/src/main.rs
Normal file
@@ -0,0 +1,243 @@
|
||||
mod eval;
|
||||
mod headless_assistant;
|
||||
mod judge;
|
||||
|
||||
use clap::Parser;
|
||||
use eval::{Eval, EvalOutput};
|
||||
use futures::future;
|
||||
use gpui::{Application, AsyncApp};
|
||||
use headless_assistant::{authenticate_model_provider, find_model, HeadlessAppState};
|
||||
use itertools::Itertools;
|
||||
use judge::Judge;
|
||||
use language_model::{LanguageModel, LanguageModelRegistry};
|
||||
use regex::Regex;
|
||||
use reqwest_client::ReqwestClient;
|
||||
use std::{cmp, path::PathBuf, sync::Arc};
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(
|
||||
name = "assistant_eval",
|
||||
disable_version_flag = true,
|
||||
before_help = "Tool eval runner"
|
||||
)]
|
||||
struct Args {
|
||||
/// Regexes to match the names of evals to run.
|
||||
eval_name_regexes: Vec<String>,
|
||||
/// Runs all evals in `evaluation_data`, causes the regex to be ignored.
|
||||
#[arg(long)]
|
||||
all: bool,
|
||||
/// Name of the model (default: "claude-3-7-sonnet-latest")
|
||||
#[arg(long, default_value = "claude-3-7-sonnet-latest")]
|
||||
model_name: String,
|
||||
/// Name of the editor model (default: value of `--model_name`).
|
||||
#[arg(long)]
|
||||
editor_model_name: Option<String>,
|
||||
/// Name of the judge model (default: value of `--model_name`).
|
||||
#[arg(long)]
|
||||
judge_model_name: Option<String>,
|
||||
/// Number of evaluations to run concurrently (default: 10)
|
||||
#[arg(short, long, default_value = "10")]
|
||||
concurrency: usize,
|
||||
}
|
||||
|
||||
fn main() {
|
||||
env_logger::init();
|
||||
let args = Args::parse();
|
||||
let http_client = Arc::new(ReqwestClient::new());
|
||||
let app = Application::headless().with_http_client(http_client.clone());
|
||||
|
||||
let crate_dir = PathBuf::from("../zed-agent-bench");
|
||||
let evaluation_data_dir = crate_dir.join("evaluation_data").canonicalize().unwrap();
|
||||
|
||||
let repos_dir = crate_dir.join("repos");
|
||||
if !repos_dir.exists() {
|
||||
std::fs::create_dir_all(&repos_dir).unwrap();
|
||||
}
|
||||
let repos_dir = repos_dir.canonicalize().unwrap();
|
||||
|
||||
let all_evals = std::fs::read_dir(&evaluation_data_dir)
|
||||
.unwrap()
|
||||
.map(|path| path.unwrap().file_name().to_string_lossy().to_string())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let evals_to_run = if args.all {
|
||||
all_evals
|
||||
} else {
|
||||
args.eval_name_regexes
|
||||
.into_iter()
|
||||
.map(|regex_string| Regex::new(®ex_string).unwrap())
|
||||
.flat_map(|regex| {
|
||||
all_evals
|
||||
.iter()
|
||||
.filter(|eval_name| regex.is_match(eval_name))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
if evals_to_run.is_empty() {
|
||||
panic!("Names of evals to run must be provided or `--all` specified");
|
||||
}
|
||||
|
||||
println!("Will run the following evals: {evals_to_run:?}");
|
||||
println!("Running up to {} evals concurrently", args.concurrency);
|
||||
|
||||
let editor_model_name = if let Some(model_name) = args.editor_model_name {
|
||||
model_name
|
||||
} else {
|
||||
args.model_name.clone()
|
||||
};
|
||||
|
||||
let judge_model_name = if let Some(model_name) = args.judge_model_name {
|
||||
model_name
|
||||
} else {
|
||||
args.model_name.clone()
|
||||
};
|
||||
|
||||
app.run(move |cx| {
|
||||
let app_state = headless_assistant::init(cx);
|
||||
|
||||
let model = find_model(&args.model_name, cx).unwrap();
|
||||
let editor_model = find_model(&editor_model_name, cx).unwrap();
|
||||
let judge_model = find_model(&judge_model_name, cx).unwrap();
|
||||
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry.set_active_model(Some(model.clone()), cx);
|
||||
registry.set_editor_model(Some(editor_model.clone()), cx);
|
||||
});
|
||||
|
||||
let model_provider_id = model.provider_id();
|
||||
let editor_model_provider_id = editor_model.provider_id();
|
||||
let judge_model_provider_id = judge_model.provider_id();
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
// Authenticate all model providers first
|
||||
cx.update(|cx| authenticate_model_provider(model_provider_id.clone(), cx))
|
||||
.unwrap()
|
||||
.await
|
||||
.unwrap();
|
||||
cx.update(|cx| authenticate_model_provider(editor_model_provider_id.clone(), cx))
|
||||
.unwrap()
|
||||
.await
|
||||
.unwrap();
|
||||
cx.update(|cx| authenticate_model_provider(judge_model_provider_id.clone(), cx))
|
||||
.unwrap()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let eval_load_futures = evals_to_run
|
||||
.into_iter()
|
||||
.map(|eval_name| {
|
||||
let eval_path = evaluation_data_dir.join(&eval_name);
|
||||
let load_future = Eval::load(eval_name.clone(), eval_path, &repos_dir);
|
||||
async move {
|
||||
match load_future.await {
|
||||
Ok(eval) => Some(eval),
|
||||
Err(err) => {
|
||||
// TODO: Persist errors / surface errors at the end.
|
||||
println!("Error loading {eval_name}: {err}");
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let loaded_evals = future::join_all(eval_load_futures)
|
||||
.await
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// The evals need to be loaded and grouped by URL before concurrently running, since
|
||||
// evals that use the same remote URL will use the same working directory.
|
||||
let mut evals_grouped_by_url: Vec<Vec<Eval>> = loaded_evals
|
||||
.into_iter()
|
||||
.map(|eval| (eval.eval_setup.url.clone(), eval))
|
||||
.into_group_map()
|
||||
.into_values()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Sort groups in descending order, so that bigger groups start first.
|
||||
evals_grouped_by_url.sort_by_key(|evals| cmp::Reverse(evals.len()));
|
||||
|
||||
let result_futures = evals_grouped_by_url
|
||||
.into_iter()
|
||||
.map(|evals| {
|
||||
let model = model.clone();
|
||||
let judge_model = judge_model.clone();
|
||||
let app_state = app_state.clone();
|
||||
let cx = cx.clone();
|
||||
|
||||
async move {
|
||||
let mut results = Vec::new();
|
||||
for eval in evals {
|
||||
let name = eval.name.clone();
|
||||
println!("Starting eval named {}", name);
|
||||
let result = run_eval(
|
||||
eval,
|
||||
model.clone(),
|
||||
judge_model.clone(),
|
||||
app_state.clone(),
|
||||
cx.clone(),
|
||||
)
|
||||
.await;
|
||||
results.push((name, result));
|
||||
}
|
||||
results
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let results = future::join_all(result_futures)
|
||||
.await
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Process results in order of completion
|
||||
for (eval_name, result) in results {
|
||||
match result {
|
||||
Ok((eval_output, judge_output)) => {
|
||||
println!("Generated diff for {eval_name}:\n");
|
||||
println!("{}\n", eval_output.diff);
|
||||
println!("Last message for {eval_name}:\n");
|
||||
println!("{}\n", eval_output.last_message);
|
||||
println!("Elapsed time: {:?}", eval_output.elapsed_time);
|
||||
println!(
|
||||
"Assistant response count: {}",
|
||||
eval_output.assistant_response_count
|
||||
);
|
||||
println!("Tool use counts: {:?}", eval_output.tool_use_counts);
|
||||
println!("Judge output for {eval_name}: {judge_output}");
|
||||
}
|
||||
Err(err) => {
|
||||
// TODO: Persist errors / surface errors at the end.
|
||||
println!("Error running {eval_name}: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cx.update(|cx| cx.quit()).unwrap();
|
||||
})
|
||||
.detach();
|
||||
});
|
||||
|
||||
println!("Done running evals");
|
||||
}
|
||||
|
||||
async fn run_eval(
|
||||
eval: Eval,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
judge_model: Arc<dyn LanguageModel>,
|
||||
app_state: Arc<HeadlessAppState>,
|
||||
cx: AsyncApp,
|
||||
) -> anyhow::Result<(EvalOutput, String)> {
|
||||
let path = eval.path.clone();
|
||||
let judge = Judge::load(&path, judge_model).await?;
|
||||
let eval_output = cx.update(|cx| eval.run(app_state, model, cx))?.await?;
|
||||
let judge_output = cx.update(|cx| judge.run(&eval_output, cx))?.await?;
|
||||
eval_output.save_to_directory(&path, judge_output.to_string())?;
|
||||
Ok((eval_output, judge_output))
|
||||
}
|
||||
@@ -1,7 +0,0 @@
|
||||
mod session;
|
||||
mod tag;
|
||||
|
||||
pub use session::*;
|
||||
pub use tag::*;
|
||||
|
||||
pub const SCRIPTING_PROMPT: &str = include_str!("./system_prompt.txt");
|
||||
@@ -1,953 +0,0 @@
|
||||
use anyhow::anyhow;
|
||||
use collections::{HashMap, HashSet};
|
||||
use futures::{
|
||||
channel::{mpsc, oneshot},
|
||||
pin_mut, SinkExt, StreamExt,
|
||||
};
|
||||
use gpui::{AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task, WeakEntity};
|
||||
use mlua::{ExternalResult, Lua, MultiValue, Table, UserData, UserDataMethods};
|
||||
use parking_lot::Mutex;
|
||||
use project::{search::SearchQuery, Fs, Project};
|
||||
use regex::Regex;
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
use util::{paths::PathMatcher, ResultExt};
|
||||
|
||||
use crate::{SCRIPT_END_TAG, SCRIPT_START_TAG};
|
||||
|
||||
struct ForegroundFn(Box<dyn FnOnce(WeakEntity<ScriptSession>, AsyncApp) + Send>);
|
||||
|
||||
pub struct ScriptSession {
|
||||
project: Entity<Project>,
|
||||
// TODO Remove this
|
||||
fs_changes: Arc<Mutex<HashMap<PathBuf, Vec<u8>>>>,
|
||||
foreground_fns_tx: mpsc::Sender<ForegroundFn>,
|
||||
_invoke_foreground_fns: Task<()>,
|
||||
scripts: Vec<Script>,
|
||||
}
|
||||
|
||||
impl ScriptSession {
|
||||
pub fn new(project: Entity<Project>, cx: &mut Context<Self>) -> Self {
|
||||
let (foreground_fns_tx, mut foreground_fns_rx) = mpsc::channel(128);
|
||||
ScriptSession {
|
||||
project,
|
||||
fs_changes: Arc::new(Mutex::new(HashMap::default())),
|
||||
foreground_fns_tx,
|
||||
_invoke_foreground_fns: cx.spawn(|this, cx| async move {
|
||||
while let Some(foreground_fn) = foreground_fns_rx.next().await {
|
||||
foreground_fn.0(this.clone(), cx.clone());
|
||||
}
|
||||
}),
|
||||
scripts: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_script(&mut self) -> ScriptId {
|
||||
let id = ScriptId(self.scripts.len() as u32);
|
||||
let script = Script {
|
||||
id,
|
||||
state: ScriptState::Generating,
|
||||
source: SharedString::new_static(""),
|
||||
};
|
||||
self.scripts.push(script);
|
||||
id
|
||||
}
|
||||
|
||||
pub fn run_script(
|
||||
&mut self,
|
||||
script_id: ScriptId,
|
||||
script_src: String,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<anyhow::Result<()>> {
|
||||
let script = self.get_mut(script_id);
|
||||
|
||||
let stdout = Arc::new(Mutex::new(String::new()));
|
||||
script.source = script_src.clone().into();
|
||||
script.state = ScriptState::Running {
|
||||
stdout: stdout.clone(),
|
||||
};
|
||||
|
||||
let task = self.run_lua(script_src, stdout, cx);
|
||||
|
||||
cx.emit(ScriptEvent::Spawned(script_id));
|
||||
|
||||
cx.spawn(|session, mut cx| async move {
|
||||
let result = task.await;
|
||||
|
||||
session.update(&mut cx, |session, cx| {
|
||||
let script = session.get_mut(script_id);
|
||||
let stdout = script.stdout_snapshot();
|
||||
|
||||
script.state = match result {
|
||||
Ok(()) => ScriptState::Succeeded { stdout },
|
||||
Err(error) => ScriptState::Failed { stdout, error },
|
||||
};
|
||||
|
||||
cx.emit(ScriptEvent::Exited(script_id))
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn run_lua(
|
||||
&mut self,
|
||||
script: String,
|
||||
stdout: Arc<Mutex<String>>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<anyhow::Result<()>> {
|
||||
const SANDBOX_PREAMBLE: &str = include_str!("sandbox_preamble.lua");
|
||||
|
||||
// TODO Remove fs_changes
|
||||
let fs_changes = self.fs_changes.clone();
|
||||
// TODO Honor all worktrees instead of the first one
|
||||
let root_dir = self
|
||||
.project
|
||||
.read(cx)
|
||||
.visible_worktrees(cx)
|
||||
.next()
|
||||
.map(|worktree| worktree.read(cx).abs_path());
|
||||
|
||||
let fs = self.project.read(cx).fs().clone();
|
||||
let foreground_fns_tx = self.foreground_fns_tx.clone();
|
||||
|
||||
let task = cx.background_spawn({
|
||||
let stdout = stdout.clone();
|
||||
|
||||
async move {
|
||||
let lua = Lua::new();
|
||||
lua.set_memory_limit(2 * 1024 * 1024 * 1024)?; // 2 GB
|
||||
let globals = lua.globals();
|
||||
|
||||
// Use the project root dir as the script's current working dir.
|
||||
if let Some(root_dir) = &root_dir {
|
||||
if let Some(root_dir) = root_dir.to_str() {
|
||||
globals.set("cwd", root_dir)?;
|
||||
}
|
||||
}
|
||||
|
||||
globals.set(
|
||||
"sb_print",
|
||||
lua.create_function({
|
||||
let stdout = stdout.clone();
|
||||
move |_, args: MultiValue| Self::print(args, &stdout)
|
||||
})?,
|
||||
)?;
|
||||
globals.set(
|
||||
"search",
|
||||
lua.create_async_function({
|
||||
let foreground_fns_tx = foreground_fns_tx.clone();
|
||||
move |lua, regex| {
|
||||
let mut foreground_fns_tx = foreground_fns_tx.clone();
|
||||
let fs = fs.clone();
|
||||
async move {
|
||||
Self::search(&lua, &mut foreground_fns_tx, fs, regex)
|
||||
.await
|
||||
.into_lua_err()
|
||||
}
|
||||
}
|
||||
})?,
|
||||
)?;
|
||||
globals.set(
|
||||
"outline",
|
||||
lua.create_async_function({
|
||||
let root_dir = root_dir.clone();
|
||||
move |_lua, path| {
|
||||
let mut foreground_fns_tx = foreground_fns_tx.clone();
|
||||
let root_dir = root_dir.clone();
|
||||
async move {
|
||||
Self::outline(root_dir, &mut foreground_fns_tx, path)
|
||||
.await
|
||||
.into_lua_err()
|
||||
}
|
||||
}
|
||||
})?,
|
||||
)?;
|
||||
globals.set(
|
||||
"sb_io_open",
|
||||
lua.create_function({
|
||||
let fs_changes = fs_changes.clone();
|
||||
let root_dir = root_dir.clone();
|
||||
move |lua, (path_str, mode)| {
|
||||
Self::io_open(&lua, &fs_changes, root_dir.as_ref(), path_str, mode)
|
||||
}
|
||||
})?,
|
||||
)?;
|
||||
globals.set("user_script", script)?;
|
||||
|
||||
lua.load(SANDBOX_PREAMBLE).exec_async().await?;
|
||||
|
||||
// Drop Lua instance to decrement reference count.
|
||||
drop(lua);
|
||||
|
||||
anyhow::Ok(())
|
||||
}
|
||||
});
|
||||
|
||||
task
|
||||
}
|
||||
|
||||
pub fn get(&self, script_id: ScriptId) -> &Script {
|
||||
&self.scripts[script_id.0 as usize]
|
||||
}
|
||||
|
||||
fn get_mut(&mut self, script_id: ScriptId) -> &mut Script {
|
||||
&mut self.scripts[script_id.0 as usize]
|
||||
}
|
||||
|
||||
/// Sandboxed print() function in Lua.
|
||||
fn print(args: MultiValue, stdout: &Mutex<String>) -> mlua::Result<()> {
|
||||
for (index, arg) in args.into_iter().enumerate() {
|
||||
// Lua's `print()` prints tab characters between each argument.
|
||||
if index > 0 {
|
||||
stdout.lock().push('\t');
|
||||
}
|
||||
|
||||
// If the argument's to_string() fails, have the whole function call fail.
|
||||
stdout.lock().push_str(&arg.to_string()?);
|
||||
}
|
||||
stdout.lock().push('\n');
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Sandboxed io.open() function in Lua.
|
||||
fn io_open(
|
||||
lua: &Lua,
|
||||
fs_changes: &Arc<Mutex<HashMap<PathBuf, Vec<u8>>>>,
|
||||
root_dir: Option<&Arc<Path>>,
|
||||
path_str: String,
|
||||
mode: Option<String>,
|
||||
) -> mlua::Result<(Option<Table>, String)> {
|
||||
let root_dir = root_dir
|
||||
.ok_or_else(|| mlua::Error::runtime("cannot open file without a root directory"))?;
|
||||
|
||||
let mode = mode.unwrap_or_else(|| "r".to_string());
|
||||
|
||||
// Parse the mode string to determine read/write permissions
|
||||
let read_perm = mode.contains('r');
|
||||
let write_perm = mode.contains('w') || mode.contains('a') || mode.contains('+');
|
||||
let append = mode.contains('a');
|
||||
let truncate = mode.contains('w');
|
||||
|
||||
// This will be the Lua value returned from the `open` function.
|
||||
let file = lua.create_table()?;
|
||||
|
||||
// Store file metadata in the file
|
||||
file.set("__path", path_str.clone())?;
|
||||
file.set("__mode", mode.clone())?;
|
||||
file.set("__read_perm", read_perm)?;
|
||||
file.set("__write_perm", write_perm)?;
|
||||
|
||||
let path = match Self::parse_abs_path_in_root_dir(&root_dir, &path_str) {
|
||||
Ok(path) => path,
|
||||
Err(err) => return Ok((None, format!("{err}"))),
|
||||
};
|
||||
|
||||
// close method
|
||||
let close_fn = {
|
||||
let fs_changes = fs_changes.clone();
|
||||
lua.create_function(move |_lua, file_userdata: mlua::Table| {
|
||||
let write_perm = file_userdata.get::<bool>("__write_perm")?;
|
||||
let path = file_userdata.get::<String>("__path")?;
|
||||
|
||||
if write_perm {
|
||||
// When closing a writable file, record the content
|
||||
let content = file_userdata.get::<mlua::AnyUserData>("__content")?;
|
||||
let content_ref = content.borrow::<FileContent>()?;
|
||||
let content_vec = content_ref.0.borrow();
|
||||
|
||||
// Don't actually write to disk; instead, just update fs_changes.
|
||||
let path_buf = PathBuf::from(&path);
|
||||
fs_changes
|
||||
.lock()
|
||||
.insert(path_buf.clone(), content_vec.clone());
|
||||
}
|
||||
|
||||
Ok(true)
|
||||
})?
|
||||
};
|
||||
file.set("close", close_fn)?;
|
||||
|
||||
// If it's a directory, give it a custom read() and return early.
|
||||
if path.is_dir() {
|
||||
// TODO handle the case where we changed it in the in-memory fs
|
||||
|
||||
// Create a special directory handle
|
||||
file.set("__is_directory", true)?;
|
||||
|
||||
// Store directory entries
|
||||
let entries = match std::fs::read_dir(&path) {
|
||||
Ok(entries) => {
|
||||
let mut entry_names = Vec::new();
|
||||
for entry in entries.flatten() {
|
||||
entry_names.push(entry.file_name().to_string_lossy().into_owned());
|
||||
}
|
||||
entry_names
|
||||
}
|
||||
Err(e) => return Ok((None, format!("Error reading directory: {}", e))),
|
||||
};
|
||||
|
||||
// Save the list of entries
|
||||
file.set("__dir_entries", entries)?;
|
||||
file.set("__dir_position", 0usize)?;
|
||||
|
||||
// Create a directory-specific read function
|
||||
let read_fn = lua.create_function(|_lua, file_userdata: mlua::Table| {
|
||||
let position = file_userdata.get::<usize>("__dir_position")?;
|
||||
let entries = file_userdata.get::<Vec<String>>("__dir_entries")?;
|
||||
|
||||
if position >= entries.len() {
|
||||
return Ok(None); // No more entries
|
||||
}
|
||||
|
||||
let entry = entries[position].clone();
|
||||
file_userdata.set("__dir_position", position + 1)?;
|
||||
|
||||
Ok(Some(entry))
|
||||
})?;
|
||||
file.set("read", read_fn)?;
|
||||
|
||||
// If we got this far, the directory was opened successfully
|
||||
return Ok((Some(file), String::new()));
|
||||
}
|
||||
|
||||
let fs_changes_map = fs_changes.lock();
|
||||
|
||||
let is_in_changes = fs_changes_map.contains_key(&path);
|
||||
let file_exists = is_in_changes || path.exists();
|
||||
let mut file_content = Vec::new();
|
||||
|
||||
if file_exists && !truncate {
|
||||
if is_in_changes {
|
||||
file_content = fs_changes_map.get(&path).unwrap().clone();
|
||||
} else {
|
||||
// Try to read existing content if file exists and we're not truncating
|
||||
match std::fs::read(&path) {
|
||||
Ok(content) => file_content = content,
|
||||
Err(e) => return Ok((None, format!("Error reading file: {}", e))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
drop(fs_changes_map); // Unlock the fs_changes mutex.
|
||||
|
||||
// If in append mode, position should be at the end
|
||||
let position = if append && file_exists {
|
||||
file_content.len()
|
||||
} else {
|
||||
0
|
||||
};
|
||||
file.set("__position", position)?;
|
||||
file.set(
|
||||
"__content",
|
||||
lua.create_userdata(FileContent(RefCell::new(file_content)))?,
|
||||
)?;
|
||||
|
||||
// Create file methods
|
||||
|
||||
// read method
|
||||
let read_fn = {
|
||||
lua.create_function(
|
||||
|_lua, (file_userdata, format): (mlua::Table, Option<mlua::Value>)| {
|
||||
let read_perm = file_userdata.get::<bool>("__read_perm")?;
|
||||
if !read_perm {
|
||||
return Err(mlua::Error::runtime("File not open for reading"));
|
||||
}
|
||||
|
||||
let content = file_userdata.get::<mlua::AnyUserData>("__content")?;
|
||||
let mut position = file_userdata.get::<usize>("__position")?;
|
||||
let content_ref = content.borrow::<FileContent>()?;
|
||||
let content_vec = content_ref.0.borrow();
|
||||
|
||||
if position >= content_vec.len() {
|
||||
return Ok(None); // EOF
|
||||
}
|
||||
|
||||
match format {
|
||||
Some(mlua::Value::String(s)) => {
|
||||
let lossy_string = s.to_string_lossy();
|
||||
let format_str: &str = lossy_string.as_ref();
|
||||
|
||||
// Only consider the first 2 bytes, since it's common to pass e.g. "*all" instead of "*a"
|
||||
match &format_str[0..2] {
|
||||
"*a" => {
|
||||
// Read entire file from current position
|
||||
let result = String::from_utf8_lossy(&content_vec[position..])
|
||||
.to_string();
|
||||
position = content_vec.len();
|
||||
file_userdata.set("__position", position)?;
|
||||
Ok(Some(result))
|
||||
}
|
||||
"*l" => {
|
||||
// Read next line
|
||||
let mut line = Vec::new();
|
||||
let mut found_newline = false;
|
||||
|
||||
while position < content_vec.len() {
|
||||
let byte = content_vec[position];
|
||||
position += 1;
|
||||
|
||||
if byte == b'\n' {
|
||||
found_newline = true;
|
||||
break;
|
||||
}
|
||||
|
||||
// Skip \r in \r\n sequence but add it if it's alone
|
||||
if byte == b'\r' {
|
||||
if position < content_vec.len()
|
||||
&& content_vec[position] == b'\n'
|
||||
{
|
||||
position += 1;
|
||||
found_newline = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
line.push(byte);
|
||||
}
|
||||
|
||||
file_userdata.set("__position", position)?;
|
||||
|
||||
if !found_newline
|
||||
&& line.is_empty()
|
||||
&& position >= content_vec.len()
|
||||
{
|
||||
return Ok(None); // EOF
|
||||
}
|
||||
|
||||
let result = String::from_utf8_lossy(&line).to_string();
|
||||
Ok(Some(result))
|
||||
}
|
||||
"*n" => {
|
||||
// Try to parse as a number (number of bytes to read)
|
||||
match format_str.parse::<usize>() {
|
||||
Ok(n) => {
|
||||
let end =
|
||||
std::cmp::min(position + n, content_vec.len());
|
||||
let bytes = &content_vec[position..end];
|
||||
let result = String::from_utf8_lossy(bytes).to_string();
|
||||
position = end;
|
||||
file_userdata.set("__position", position)?;
|
||||
Ok(Some(result))
|
||||
}
|
||||
Err(_) => Err(mlua::Error::runtime(format!(
|
||||
"Invalid format: {}",
|
||||
format_str
|
||||
))),
|
||||
}
|
||||
}
|
||||
"*L" => {
|
||||
// Read next line keeping the end of line
|
||||
let mut line = Vec::new();
|
||||
|
||||
while position < content_vec.len() {
|
||||
let byte = content_vec[position];
|
||||
position += 1;
|
||||
|
||||
line.push(byte);
|
||||
|
||||
if byte == b'\n' {
|
||||
break;
|
||||
}
|
||||
|
||||
// If we encounter a \r, add it and check if the next is \n
|
||||
if byte == b'\r'
|
||||
&& position < content_vec.len()
|
||||
&& content_vec[position] == b'\n'
|
||||
{
|
||||
line.push(content_vec[position]);
|
||||
position += 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
file_userdata.set("__position", position)?;
|
||||
|
||||
if line.is_empty() && position >= content_vec.len() {
|
||||
return Ok(None); // EOF
|
||||
}
|
||||
|
||||
let result = String::from_utf8_lossy(&line).to_string();
|
||||
Ok(Some(result))
|
||||
}
|
||||
_ => Err(mlua::Error::runtime(format!(
|
||||
"Unsupported format: {}",
|
||||
format_str
|
||||
))),
|
||||
}
|
||||
}
|
||||
Some(mlua::Value::Number(n)) => {
|
||||
// Read n bytes
|
||||
let n = n as usize;
|
||||
let end = std::cmp::min(position + n, content_vec.len());
|
||||
let bytes = &content_vec[position..end];
|
||||
let result = String::from_utf8_lossy(bytes).to_string();
|
||||
position = end;
|
||||
file_userdata.set("__position", position)?;
|
||||
Ok(Some(result))
|
||||
}
|
||||
Some(_) => Err(mlua::Error::runtime("Invalid format")),
|
||||
None => {
|
||||
// Default is to read a line
|
||||
let mut line = Vec::new();
|
||||
let mut found_newline = false;
|
||||
|
||||
while position < content_vec.len() {
|
||||
let byte = content_vec[position];
|
||||
position += 1;
|
||||
|
||||
if byte == b'\n' {
|
||||
found_newline = true;
|
||||
break;
|
||||
}
|
||||
|
||||
// Handle \r\n
|
||||
if byte == b'\r' {
|
||||
if position < content_vec.len()
|
||||
&& content_vec[position] == b'\n'
|
||||
{
|
||||
position += 1;
|
||||
found_newline = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
line.push(byte);
|
||||
}
|
||||
|
||||
file_userdata.set("__position", position)?;
|
||||
|
||||
if !found_newline && line.is_empty() && position >= content_vec.len() {
|
||||
return Ok(None); // EOF
|
||||
}
|
||||
|
||||
let result = String::from_utf8_lossy(&line).to_string();
|
||||
Ok(Some(result))
|
||||
}
|
||||
}
|
||||
},
|
||||
)?
|
||||
};
|
||||
file.set("read", read_fn)?;
|
||||
|
||||
// write method
|
||||
let write_fn = {
|
||||
let fs_changes = fs_changes.clone();
|
||||
|
||||
lua.create_function(move |_lua, (file_userdata, text): (mlua::Table, String)| {
|
||||
let write_perm = file_userdata.get::<bool>("__write_perm")?;
|
||||
if !write_perm {
|
||||
return Err(mlua::Error::runtime("File not open for writing"));
|
||||
}
|
||||
|
||||
let content = file_userdata.get::<mlua::AnyUserData>("__content")?;
|
||||
let position = file_userdata.get::<usize>("__position")?;
|
||||
let content_ref = content.borrow::<FileContent>()?;
|
||||
let mut content_vec = content_ref.0.borrow_mut();
|
||||
|
||||
let bytes = text.as_bytes();
|
||||
|
||||
// Ensure the vector has enough capacity
|
||||
if position + bytes.len() > content_vec.len() {
|
||||
content_vec.resize(position + bytes.len(), 0);
|
||||
}
|
||||
|
||||
// Write the bytes
|
||||
for (i, &byte) in bytes.iter().enumerate() {
|
||||
content_vec[position + i] = byte;
|
||||
}
|
||||
|
||||
// Update position
|
||||
let new_position = position + bytes.len();
|
||||
file_userdata.set("__position", new_position)?;
|
||||
|
||||
// Update fs_changes
|
||||
let path = file_userdata.get::<String>("__path")?;
|
||||
let path_buf = PathBuf::from(path);
|
||||
fs_changes.lock().insert(path_buf, content_vec.clone());
|
||||
|
||||
Ok(true)
|
||||
})?
|
||||
};
|
||||
file.set("write", write_fn)?;
|
||||
|
||||
// If we got this far, the file was opened successfully
|
||||
Ok((Some(file), String::new()))
|
||||
}
|
||||
|
||||
async fn search(
|
||||
lua: &Lua,
|
||||
foreground_tx: &mut mpsc::Sender<ForegroundFn>,
|
||||
fs: Arc<dyn Fs>,
|
||||
regex: String,
|
||||
) -> anyhow::Result<Table> {
|
||||
// TODO: Allow specification of these options.
|
||||
let search_query = SearchQuery::regex(
|
||||
®ex,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
PathMatcher::default(),
|
||||
PathMatcher::default(),
|
||||
None,
|
||||
);
|
||||
let search_query = match search_query {
|
||||
Ok(query) => query,
|
||||
Err(e) => return Err(anyhow!("Invalid search query: {}", e)),
|
||||
};
|
||||
|
||||
// TODO: Should use `search_query.regex`. The tool description should also be updated,
|
||||
// as it specifies standard regex.
|
||||
let search_regex = match Regex::new(®ex) {
|
||||
Ok(re) => re,
|
||||
Err(e) => return Err(anyhow!("Invalid regex: {}", e)),
|
||||
};
|
||||
|
||||
let mut abs_paths_rx = Self::find_search_candidates(search_query, foreground_tx).await?;
|
||||
|
||||
let mut search_results: Vec<Table> = Vec::new();
|
||||
while let Some(path) = abs_paths_rx.next().await {
|
||||
// Skip files larger than 1MB
|
||||
if let Ok(Some(metadata)) = fs.metadata(&path).await {
|
||||
if metadata.len > 1_000_000 {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Attempt to read the file as text
|
||||
if let Ok(content) = fs.load(&path).await {
|
||||
let mut matches = Vec::new();
|
||||
|
||||
// Find all regex matches in the content
|
||||
for capture in search_regex.find_iter(&content) {
|
||||
matches.push(capture.as_str().to_string());
|
||||
}
|
||||
|
||||
// If we found matches, create a result entry
|
||||
if !matches.is_empty() {
|
||||
let result_entry = lua.create_table()?;
|
||||
result_entry.set("path", path.to_string_lossy().to_string())?;
|
||||
|
||||
let matches_table = lua.create_table()?;
|
||||
for (ix, m) in matches.iter().enumerate() {
|
||||
matches_table.set(ix + 1, m.clone())?;
|
||||
}
|
||||
result_entry.set("matches", matches_table)?;
|
||||
|
||||
search_results.push(result_entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create a table to hold our results
|
||||
let results_table = lua.create_table()?;
|
||||
for (ix, entry) in search_results.into_iter().enumerate() {
|
||||
results_table.set(ix + 1, entry)?;
|
||||
}
|
||||
|
||||
Ok(results_table)
|
||||
}
|
||||
|
||||
async fn find_search_candidates(
|
||||
search_query: SearchQuery,
|
||||
foreground_tx: &mut mpsc::Sender<ForegroundFn>,
|
||||
) -> anyhow::Result<mpsc::UnboundedReceiver<PathBuf>> {
|
||||
Self::run_foreground_fn(
|
||||
"finding search file candidates",
|
||||
foreground_tx,
|
||||
Box::new(move |session, mut cx| {
|
||||
session.update(&mut cx, |session, cx| {
|
||||
session.project.update(cx, |project, cx| {
|
||||
project.worktree_store().update(cx, |worktree_store, cx| {
|
||||
// TODO: Better limit? For now this is the same as
|
||||
// MAX_SEARCH_RESULT_FILES.
|
||||
let limit = 5000;
|
||||
// TODO: Providing non-empty open_entries can make this a bit more
|
||||
// efficient as it can skip checking that these paths are textual.
|
||||
let open_entries = HashSet::default();
|
||||
let candidates = worktree_store.find_search_candidates(
|
||||
search_query,
|
||||
limit,
|
||||
open_entries,
|
||||
project.fs().clone(),
|
||||
cx,
|
||||
);
|
||||
let (abs_paths_tx, abs_paths_rx) = mpsc::unbounded();
|
||||
cx.spawn(|worktree_store, cx| async move {
|
||||
pin_mut!(candidates);
|
||||
|
||||
while let Some(project_path) = candidates.next().await {
|
||||
worktree_store.read_with(&cx, |worktree_store, cx| {
|
||||
if let Some(worktree) = worktree_store
|
||||
.worktree_for_id(project_path.worktree_id, cx)
|
||||
{
|
||||
if let Some(abs_path) = worktree
|
||||
.read(cx)
|
||||
.absolutize(&project_path.path)
|
||||
.log_err()
|
||||
{
|
||||
abs_paths_tx.unbounded_send(abs_path)?;
|
||||
}
|
||||
}
|
||||
anyhow::Ok(())
|
||||
})??;
|
||||
}
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach();
|
||||
abs_paths_rx
|
||||
})
|
||||
})
|
||||
})
|
||||
}),
|
||||
)
|
||||
.await?
|
||||
}
|
||||
|
||||
async fn outline(
|
||||
root_dir: Option<Arc<Path>>,
|
||||
foreground_tx: &mut mpsc::Sender<ForegroundFn>,
|
||||
path_str: String,
|
||||
) -> anyhow::Result<String> {
|
||||
let root_dir = root_dir
|
||||
.ok_or_else(|| mlua::Error::runtime("cannot get outline without a root directory"))?;
|
||||
let path = Self::parse_abs_path_in_root_dir(&root_dir, &path_str)?;
|
||||
let outline = Self::run_foreground_fn(
|
||||
"getting code outline",
|
||||
foreground_tx,
|
||||
Box::new(move |session, cx| {
|
||||
cx.spawn(move |mut cx| async move {
|
||||
// TODO: This will not use file content from `fs_changes`. It will also reflect
|
||||
// user changes that have not been saved.
|
||||
let buffer = session
|
||||
.update(&mut cx, |session, cx| {
|
||||
session
|
||||
.project
|
||||
.update(cx, |project, cx| project.open_local_buffer(&path, cx))
|
||||
})?
|
||||
.await?;
|
||||
buffer.update(&mut cx, |buffer, _cx| {
|
||||
if let Some(outline) = buffer.snapshot().outline(None) {
|
||||
Ok(outline)
|
||||
} else {
|
||||
Err(anyhow!("No outline for file {path_str}"))
|
||||
}
|
||||
})
|
||||
})
|
||||
}),
|
||||
)
|
||||
.await?
|
||||
.await??;
|
||||
|
||||
Ok(outline
|
||||
.items
|
||||
.into_iter()
|
||||
.map(|item| {
|
||||
if item.text.contains('\n') {
|
||||
log::error!("Outline item unexpectedly contains newline");
|
||||
}
|
||||
format!("{}{}", " ".repeat(item.depth), item.text)
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
.join("\n"))
|
||||
}
|
||||
|
||||
async fn run_foreground_fn<R: Send + 'static>(
|
||||
description: &str,
|
||||
foreground_tx: &mut mpsc::Sender<ForegroundFn>,
|
||||
function: Box<dyn FnOnce(WeakEntity<Self>, AsyncApp) -> R + Send>,
|
||||
) -> anyhow::Result<R> {
|
||||
let (response_tx, response_rx) = oneshot::channel();
|
||||
let send_result = foreground_tx
|
||||
.send(ForegroundFn(Box::new(move |this, cx| {
|
||||
response_tx.send(function(this, cx)).ok();
|
||||
})))
|
||||
.await;
|
||||
match send_result {
|
||||
Ok(()) => (),
|
||||
Err(err) => {
|
||||
return Err(anyhow::Error::new(err).context(format!(
|
||||
"Internal error while enqueuing work for {description}"
|
||||
)));
|
||||
}
|
||||
}
|
||||
match response_rx.await {
|
||||
Ok(result) => Ok(result),
|
||||
Err(oneshot::Canceled) => Err(anyhow!(
|
||||
"Internal error: response oneshot was canceled while {description}."
|
||||
)),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_abs_path_in_root_dir(root_dir: &Path, path_str: &str) -> anyhow::Result<PathBuf> {
|
||||
let path = Path::new(&path_str);
|
||||
if path.is_absolute() {
|
||||
// Check if path starts with root_dir prefix without resolving symlinks
|
||||
if path.starts_with(&root_dir) {
|
||||
Ok(path.to_path_buf())
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Error: Absolute path {} is outside the current working directory",
|
||||
path_str
|
||||
))
|
||||
}
|
||||
} else {
|
||||
// TODO: Does use of `../` break sandbox - is path canonicalization needed?
|
||||
Ok(root_dir.join(path))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct FileContent(RefCell<Vec<u8>>);
|
||||
|
||||
impl UserData for FileContent {
|
||||
fn add_methods<M: UserDataMethods<Self>>(_methods: &mut M) {
|
||||
// FileContent doesn't have any methods so far.
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ScriptEvent {
|
||||
Spawned(ScriptId),
|
||||
Exited(ScriptId),
|
||||
}
|
||||
|
||||
impl EventEmitter<ScriptEvent> for ScriptSession {}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
|
||||
pub struct ScriptId(u32);
|
||||
|
||||
pub struct Script {
|
||||
pub id: ScriptId,
|
||||
pub state: ScriptState,
|
||||
pub source: SharedString,
|
||||
}
|
||||
|
||||
pub enum ScriptState {
|
||||
Generating,
|
||||
Running {
|
||||
stdout: Arc<Mutex<String>>,
|
||||
},
|
||||
Succeeded {
|
||||
stdout: String,
|
||||
},
|
||||
Failed {
|
||||
stdout: String,
|
||||
error: anyhow::Error,
|
||||
},
|
||||
}
|
||||
|
||||
impl Script {
|
||||
pub fn source_tag(&self) -> String {
|
||||
format!("{}{}{}", SCRIPT_START_TAG, self.source, SCRIPT_END_TAG)
|
||||
}
|
||||
|
||||
/// If exited, returns a message with the output for the LLM
|
||||
pub fn output_message_for_llm(&self) -> Option<String> {
|
||||
match &self.state {
|
||||
ScriptState::Generating { .. } => None,
|
||||
ScriptState::Running { .. } => None,
|
||||
ScriptState::Succeeded { stdout } => {
|
||||
format!("Here's the script output:\n{}", stdout).into()
|
||||
}
|
||||
ScriptState::Failed { stdout, error } => format!(
|
||||
"The script failed with:\n{}\n\nHere's the output it managed to print:\n{}",
|
||||
error, stdout
|
||||
)
|
||||
.into(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get a snapshot of the script's stdout
|
||||
pub fn stdout_snapshot(&self) -> String {
|
||||
match &self.state {
|
||||
ScriptState::Generating { .. } => String::new(),
|
||||
ScriptState::Running { stdout } => stdout.lock().clone(),
|
||||
ScriptState::Succeeded { stdout } => stdout.clone(),
|
||||
ScriptState::Failed { stdout, .. } => stdout.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the error if the script failed, otherwise None
|
||||
pub fn error(&self) -> Option<&anyhow::Error> {
|
||||
match &self.state {
|
||||
ScriptState::Generating { .. } => None,
|
||||
ScriptState::Running { .. } => None,
|
||||
ScriptState::Succeeded { .. } => None,
|
||||
ScriptState::Failed { error, .. } => Some(error),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use gpui::TestAppContext;
|
||||
use project::FakeFs;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_print(cx: &mut TestAppContext) {
|
||||
let script = r#"
|
||||
print("Hello", "world!")
|
||||
print("Goodbye", "moon!")
|
||||
"#;
|
||||
|
||||
let output = test_script(script, cx).await.unwrap();
|
||||
assert_eq!(output, "Hello\tworld!\nGoodbye\tmoon!\n");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_search(cx: &mut TestAppContext) {
|
||||
let script = r#"
|
||||
local results = search("world")
|
||||
for i, result in ipairs(results) do
|
||||
print("File: " .. result.path)
|
||||
print("Matches:")
|
||||
for j, match in ipairs(result.matches) do
|
||||
print(" " .. match)
|
||||
end
|
||||
end
|
||||
"#;
|
||||
|
||||
let output = test_script(script, cx).await.unwrap();
|
||||
assert_eq!(output, "File: /file1.txt\nMatches:\n world\n");
|
||||
}
|
||||
|
||||
async fn test_script(source: &str, cx: &mut TestAppContext) -> anyhow::Result<String> {
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/",
|
||||
json!({
|
||||
"file1.txt": "Hello world!",
|
||||
"file2.txt": "Goodbye moon!"
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs, [Path::new("/")], cx).await;
|
||||
let session = cx.new(|cx| ScriptSession::new(project, cx));
|
||||
|
||||
let (script_id, task) = session.update(cx, |session, cx| {
|
||||
let script_id = session.new_script();
|
||||
let task = session.run_script(script_id, source.to_string(), cx);
|
||||
|
||||
(script_id, task)
|
||||
});
|
||||
|
||||
task.await?;
|
||||
|
||||
Ok(session.read_with(cx, |session, _cx| session.get(script_id).stdout_snapshot()))
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut TestAppContext) {
|
||||
let settings_store = cx.update(SettingsStore::test);
|
||||
cx.set_global(settings_store);
|
||||
cx.update(Project::init_settings);
|
||||
}
|
||||
}
|
||||
@@ -1,36 +0,0 @@
|
||||
You can write a Lua script and I'll run it on my codebase and tell you what its
|
||||
output was, including both stdout as well as the git diff of changes it made to
|
||||
the filesystem. That way, you can get more information about the code base, or
|
||||
make changes to the code base directly.
|
||||
|
||||
Put the Lua script inside of an `<eval>` tag like so:
|
||||
|
||||
<eval type="lua">
|
||||
print("Hello, world!")
|
||||
</eval>
|
||||
|
||||
The Lua script will have access to `io` and it will run with the current working
|
||||
directory being in the root of the code base, so you can use it to explore,
|
||||
search, make changes, etc. You can also have the script print things, and I'll
|
||||
tell you what the output was. Note that `io` only has `open`, and then the file
|
||||
it returns only has the methods read, write, and close - it doesn't have popen
|
||||
or anything else.
|
||||
|
||||
There is a function called `search` which accepts a regex (it's implemented
|
||||
using Rust's regex crate, so use that regex syntax) and runs that regex on the
|
||||
contents of every file in the code base (aside from gitignored files), then
|
||||
returns an array of tables with two fields: "path" (the path to the file that
|
||||
had the matches) and "matches" (an array of strings, with each string being a
|
||||
match that was found within the file).
|
||||
|
||||
There is a function called `outline` which accepts the path to a source file,
|
||||
and returns a string where each line is a declaration. These lines are indented
|
||||
with 2 spaces to indicate when a declaration is inside another.
|
||||
|
||||
When I send you the script output, do not thank me for running it,
|
||||
act as if you ran it yourself.
|
||||
|
||||
IMPORTANT!
|
||||
Only include a maximum of one Lua script at the very end of your message
|
||||
DO NOT WRITE ANYTHING ELSE AFTER THE SCRIPT. Wait for my response with the script
|
||||
output to continue.
|
||||
@@ -1,260 +0,0 @@
|
||||
pub const SCRIPT_START_TAG: &str = "<eval type=\"lua\">";
|
||||
pub const SCRIPT_END_TAG: &str = "</eval>";
|
||||
|
||||
const START_TAG: &[u8] = SCRIPT_START_TAG.as_bytes();
|
||||
const END_TAG: &[u8] = SCRIPT_END_TAG.as_bytes();
|
||||
|
||||
/// Parses a script tag in an assistant message as it is being streamed.
|
||||
pub struct ScriptTagParser {
|
||||
state: State,
|
||||
buffer: Vec<u8>,
|
||||
tag_match_ix: usize,
|
||||
}
|
||||
|
||||
enum State {
|
||||
Unstarted,
|
||||
Streaming,
|
||||
Ended,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct ChunkOutput {
|
||||
/// The chunk with script tags removed.
|
||||
pub content: String,
|
||||
/// The full script tag content. `None` until closed.
|
||||
pub script_source: Option<String>,
|
||||
}
|
||||
|
||||
impl ScriptTagParser {
|
||||
/// Create a new script tag parser.
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
state: State::Unstarted,
|
||||
buffer: Vec::new(),
|
||||
tag_match_ix: 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns true if the parser has found a script tag.
|
||||
pub fn found_script(&self) -> bool {
|
||||
match self.state {
|
||||
State::Unstarted => false,
|
||||
State::Streaming | State::Ended => true,
|
||||
}
|
||||
}
|
||||
|
||||
/// Process a new chunk of input, splitting it into surrounding content and script source.
|
||||
pub fn parse_chunk(&mut self, input: &str) -> ChunkOutput {
|
||||
let mut content = Vec::with_capacity(input.len());
|
||||
|
||||
for byte in input.bytes() {
|
||||
match self.state {
|
||||
State::Unstarted => {
|
||||
if collect_until_tag(byte, START_TAG, &mut self.tag_match_ix, &mut content) {
|
||||
self.state = State::Streaming;
|
||||
self.buffer = Vec::with_capacity(1024);
|
||||
self.tag_match_ix = 0;
|
||||
}
|
||||
}
|
||||
State::Streaming => {
|
||||
if collect_until_tag(byte, END_TAG, &mut self.tag_match_ix, &mut self.buffer) {
|
||||
self.state = State::Ended;
|
||||
}
|
||||
}
|
||||
State::Ended => content.push(byte),
|
||||
}
|
||||
}
|
||||
|
||||
let content = unsafe { String::from_utf8_unchecked(content) };
|
||||
|
||||
let script_source = if matches!(self.state, State::Ended) && !self.buffer.is_empty() {
|
||||
let source = unsafe { String::from_utf8_unchecked(std::mem::take(&mut self.buffer)) };
|
||||
|
||||
Some(source)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
ChunkOutput {
|
||||
content,
|
||||
script_source,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_until_tag(byte: u8, tag: &[u8], tag_match_ix: &mut usize, buffer: &mut Vec<u8>) -> bool {
|
||||
// this can't be a method because it'd require a mutable borrow on both self and self.buffer
|
||||
|
||||
if match_tag_byte(byte, tag, tag_match_ix) {
|
||||
*tag_match_ix >= tag.len()
|
||||
} else {
|
||||
if *tag_match_ix > 0 {
|
||||
// push the partially matched tag to the buffer
|
||||
buffer.extend_from_slice(&tag[..*tag_match_ix]);
|
||||
*tag_match_ix = 0;
|
||||
|
||||
// the tag might start to match again
|
||||
if match_tag_byte(byte, tag, tag_match_ix) {
|
||||
return *tag_match_ix >= tag.len();
|
||||
}
|
||||
}
|
||||
|
||||
buffer.push(byte);
|
||||
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn match_tag_byte(byte: u8, tag: &[u8], tag_match_ix: &mut usize) -> bool {
|
||||
if byte == tag[*tag_match_ix] {
|
||||
*tag_match_ix += 1;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_parse_complete_tag() {
|
||||
let mut parser = ScriptTagParser::new();
|
||||
let input = "<eval type=\"lua\">print(\"Hello, World!\")</eval>";
|
||||
let result = parser.parse_chunk(input);
|
||||
assert_eq!(result.content, "");
|
||||
assert_eq!(
|
||||
result.script_source,
|
||||
Some("print(\"Hello, World!\")".to_string())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_no_tag() {
|
||||
let mut parser = ScriptTagParser::new();
|
||||
let input = "No tags here, just plain text";
|
||||
let result = parser.parse_chunk(input);
|
||||
assert_eq!(result.content, "No tags here, just plain text");
|
||||
assert_eq!(result.script_source, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_partial_end_tag() {
|
||||
let mut parser = ScriptTagParser::new();
|
||||
|
||||
// Start the tag
|
||||
let result = parser.parse_chunk("<eval type=\"lua\">let x = '</e");
|
||||
assert_eq!(result.content, "");
|
||||
assert_eq!(result.script_source, None);
|
||||
|
||||
// Finish with the rest
|
||||
let result = parser.parse_chunk("val' + 'not the end';</eval>");
|
||||
assert_eq!(result.content, "");
|
||||
assert_eq!(
|
||||
result.script_source,
|
||||
Some("let x = '</eval' + 'not the end';".to_string())
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_text_before_and_after_tag() {
|
||||
let mut parser = ScriptTagParser::new();
|
||||
let input = "Before tag <eval type=\"lua\">print(\"Hello\")</eval> After tag";
|
||||
let result = parser.parse_chunk(input);
|
||||
assert_eq!(result.content, "Before tag After tag");
|
||||
assert_eq!(result.script_source, Some("print(\"Hello\")".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multiple_chunks_with_surrounding_text() {
|
||||
let mut parser = ScriptTagParser::new();
|
||||
|
||||
// First chunk with text before
|
||||
let result = parser.parse_chunk("Before script <eval type=\"lua\">local x = 10");
|
||||
assert_eq!(result.content, "Before script ");
|
||||
assert_eq!(result.script_source, None);
|
||||
|
||||
// Second chunk with script content
|
||||
let result = parser.parse_chunk("\nlocal y = 20");
|
||||
assert_eq!(result.content, "");
|
||||
assert_eq!(result.script_source, None);
|
||||
|
||||
// Last chunk with text after
|
||||
let result = parser.parse_chunk("\nprint(x + y)</eval> After script");
|
||||
assert_eq!(result.content, " After script");
|
||||
assert_eq!(
|
||||
result.script_source,
|
||||
Some("local x = 10\nlocal y = 20\nprint(x + y)".to_string())
|
||||
);
|
||||
|
||||
let result = parser.parse_chunk(" there's more text");
|
||||
assert_eq!(result.content, " there's more text");
|
||||
assert_eq!(result.script_source, None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_partial_start_tag_matching() {
|
||||
let mut parser = ScriptTagParser::new();
|
||||
|
||||
// partial match of start tag...
|
||||
let result = parser.parse_chunk("<ev");
|
||||
assert_eq!(result.content, "");
|
||||
|
||||
// ...that's abandandoned when the < of a real tag is encountered
|
||||
let result = parser.parse_chunk("<eval type=\"lua\">script content</eval>");
|
||||
// ...so it gets pushed to content
|
||||
assert_eq!(result.content, "<ev");
|
||||
// ...and the real tag is parsed correctly
|
||||
assert_eq!(result.script_source, Some("script content".to_string()));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_random_chunked_parsing() {
|
||||
use rand::rngs::StdRng;
|
||||
use rand::{Rng, SeedableRng};
|
||||
use std::time::{SystemTime, UNIX_EPOCH};
|
||||
|
||||
let test_inputs = [
|
||||
"Before <eval type=\"lua\">print(\"Hello\")</eval> After",
|
||||
"No tags here at all",
|
||||
"<eval type=\"lua\">local x = 10\nlocal y = 20\nprint(x + y)</eval>",
|
||||
"Text <eval type=\"lua\">if true then\nprint(\"nested </e\")\nend</eval> more",
|
||||
];
|
||||
|
||||
let seed = SystemTime::now()
|
||||
.duration_since(UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs();
|
||||
|
||||
eprintln!("Using random seed: {}", seed);
|
||||
let mut rng = StdRng::seed_from_u64(seed);
|
||||
|
||||
for test_input in &test_inputs {
|
||||
let mut reference_parser = ScriptTagParser::new();
|
||||
let expected = reference_parser.parse_chunk(test_input);
|
||||
|
||||
let mut chunked_parser = ScriptTagParser::new();
|
||||
let mut remaining = test_input.as_bytes();
|
||||
let mut actual_content = String::new();
|
||||
let mut actual_script = None;
|
||||
|
||||
while !remaining.is_empty() {
|
||||
let chunk_size = rng.gen_range(1..=remaining.len().min(5));
|
||||
let (chunk, rest) = remaining.split_at(chunk_size);
|
||||
remaining = rest;
|
||||
|
||||
let chunk_str = std::str::from_utf8(chunk).unwrap();
|
||||
let result = chunked_parser.parse_chunk(chunk_str);
|
||||
|
||||
actual_content.push_str(&result.content);
|
||||
if result.script_source.is_some() {
|
||||
actual_script = result.script_source;
|
||||
}
|
||||
}
|
||||
|
||||
assert_eq!(actual_content, expected.content);
|
||||
assert_eq!(actual_script, expected.script_source);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -62,6 +62,7 @@ pub struct AssistantSettings {
|
||||
pub default_width: Pixels,
|
||||
pub default_height: Pixels,
|
||||
pub default_model: LanguageModelSelection,
|
||||
pub editor_model: LanguageModelSelection,
|
||||
pub inline_alternatives: Vec<LanguageModelSelection>,
|
||||
pub using_outdated_settings_version: bool,
|
||||
pub enable_experimental_live_diffs: bool,
|
||||
@@ -162,6 +163,7 @@ impl AssistantSettingsContent {
|
||||
})
|
||||
}
|
||||
}),
|
||||
editor_model: None,
|
||||
inline_alternatives: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
},
|
||||
@@ -182,6 +184,7 @@ impl AssistantSettingsContent {
|
||||
.id()
|
||||
.to_string(),
|
||||
}),
|
||||
editor_model: None,
|
||||
inline_alternatives: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
},
|
||||
@@ -310,6 +313,7 @@ impl Default for VersionedAssistantSettingsContent {
|
||||
default_width: None,
|
||||
default_height: None,
|
||||
default_model: None,
|
||||
editor_model: None,
|
||||
inline_alternatives: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
})
|
||||
@@ -340,6 +344,8 @@ pub struct AssistantSettingsContentV2 {
|
||||
default_height: Option<f32>,
|
||||
/// The default model to use when creating new chats.
|
||||
default_model: Option<LanguageModelSelection>,
|
||||
/// The model to use when applying edits from the assistant.
|
||||
editor_model: Option<LanguageModelSelection>,
|
||||
/// Additional models with which to generate alternatives when performing inline assists.
|
||||
inline_alternatives: Option<Vec<LanguageModelSelection>>,
|
||||
/// Enable experimental live diffs in the assistant panel.
|
||||
@@ -470,6 +476,7 @@ impl Settings for AssistantSettings {
|
||||
value.default_height.map(Into::into),
|
||||
);
|
||||
merge(&mut settings.default_model, value.default_model);
|
||||
merge(&mut settings.editor_model, value.editor_model);
|
||||
merge(&mut settings.inline_alternatives, value.inline_alternatives);
|
||||
merge(
|
||||
&mut settings.enable_experimental_live_diffs,
|
||||
@@ -528,6 +535,10 @@ mod tests {
|
||||
provider: "test-provider".into(),
|
||||
model: "gpt-99".into(),
|
||||
}),
|
||||
editor_model: Some(LanguageModelSelection {
|
||||
provider: "test-provider".into(),
|
||||
model: "gpt-99".into(),
|
||||
}),
|
||||
inline_alternatives: None,
|
||||
enabled: None,
|
||||
button: None,
|
||||
|
||||
@@ -88,7 +88,6 @@ pub trait SlashCommand: 'static + Send + Sync {
|
||||
fn accepts_arguments(&self) -> bool {
|
||||
self.requires_argument()
|
||||
}
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
|
||||
@@ -77,8 +77,8 @@ impl SlashCommand for AutoCommand {
|
||||
|
||||
let cx: &mut App = cx;
|
||||
|
||||
cx.spawn(|cx: gpui::AsyncApp| async move {
|
||||
let task = project_index.read_with(&cx, |project_index, cx| {
|
||||
cx.spawn(async move |cx| {
|
||||
let task = project_index.read_with(cx, |project_index, cx| {
|
||||
project_index.flush_summary_backlogs(cx)
|
||||
})?;
|
||||
|
||||
@@ -117,9 +117,9 @@ impl SlashCommand for AutoCommand {
|
||||
return Task::ready(Err(anyhow!("no project indexer")));
|
||||
};
|
||||
|
||||
let task = window.spawn(cx, |cx| async move {
|
||||
let task = window.spawn(cx, async move |cx| {
|
||||
let summaries = project_index
|
||||
.read_with(&cx, |project_index, cx| project_index.all_summaries(cx))?
|
||||
.read_with(cx, |project_index, cx| project_index.all_summaries(cx))?
|
||||
.await?;
|
||||
|
||||
commands_for_summaries(&summaries, &original_prompt, &cx).await
|
||||
|
||||
@@ -186,7 +186,7 @@ impl SlashCommand for DiagnosticsSlashCommand {
|
||||
|
||||
let task = collect_diagnostics(workspace.read(cx).project().clone(), options, cx);
|
||||
|
||||
window.spawn(cx, move |_| async move {
|
||||
window.spawn(cx, async move |_| {
|
||||
task.await?
|
||||
.map(|output| output.to_event_stream())
|
||||
.ok_or_else(|| anyhow!("No diagnostics found"))
|
||||
@@ -268,7 +268,7 @@ fn collect_diagnostics(
|
||||
})
|
||||
.collect();
|
||||
|
||||
cx.spawn(|mut cx| async move {
|
||||
cx.spawn(async move |cx| {
|
||||
let mut output = SlashCommandOutput::default();
|
||||
|
||||
if let Some(error_source) = error_source.as_ref() {
|
||||
@@ -299,7 +299,7 @@ fn collect_diagnostics(
|
||||
}
|
||||
|
||||
if let Some(buffer) = project_handle
|
||||
.update(&mut cx, |project, cx| project.open_buffer(project_path, cx))?
|
||||
.update(cx, |project, cx| project.open_buffer(project_path, cx))?
|
||||
.await
|
||||
.log_err()
|
||||
{
|
||||
|
||||
@@ -241,7 +241,7 @@ fn collect_files(
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let (events_tx, events_rx) = mpsc::unbounded();
|
||||
cx.spawn(|mut cx| async move {
|
||||
cx.spawn(async move |cx| {
|
||||
for snapshot in snapshots {
|
||||
let worktree_id = snapshot.id();
|
||||
let mut directory_stack: Vec<Arc<Path>> = Vec::new();
|
||||
@@ -352,7 +352,7 @@ fn collect_files(
|
||||
)))?;
|
||||
} else if entry.is_file() {
|
||||
let Some(open_buffer_task) = project_handle
|
||||
.update(&mut cx, |project, cx| {
|
||||
.update(cx, |project, cx| {
|
||||
project.open_buffer((worktree_id, &entry.path), cx)
|
||||
})
|
||||
.ok()
|
||||
@@ -361,7 +361,7 @@ fn collect_files(
|
||||
};
|
||||
if let Some(buffer) = open_buffer_task.await.log_err() {
|
||||
let mut output = SlashCommandOutput::default();
|
||||
let snapshot = buffer.read_with(&cx, |buffer, _| buffer.snapshot())?;
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
|
||||
append_buffer_to_output(
|
||||
&snapshot,
|
||||
Some(&path_including_worktree_name),
|
||||
|
||||
@@ -99,7 +99,7 @@ impl SlashCommand for ProjectSlashCommand {
|
||||
return Task::ready(Err(anyhow::anyhow!("no project indexer")));
|
||||
};
|
||||
|
||||
window.spawn(cx, |mut cx| async move {
|
||||
window.spawn(cx, async move |cx| {
|
||||
let current_model = current_model.ok_or_else(|| anyhow!("no model selected"))?;
|
||||
|
||||
let prompt =
|
||||
@@ -123,7 +123,7 @@ impl SlashCommand for ProjectSlashCommand {
|
||||
.search_queries;
|
||||
|
||||
let results = project_index
|
||||
.read_with(&cx, |project_index, cx| {
|
||||
.read_with(cx, |project_index, cx| {
|
||||
project_index.search(search_queries.clone(), 25, cx)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
@@ -109,9 +109,9 @@ impl SlashCommand for SearchSlashCommand {
|
||||
return Task::ready(Err(anyhow::anyhow!("no project indexer")));
|
||||
};
|
||||
|
||||
window.spawn(cx, |cx| async move {
|
||||
window.spawn(cx, async move |cx| {
|
||||
let results = project_index
|
||||
.read_with(&cx, |project_index, cx| {
|
||||
.read_with(cx, |project_index, cx| {
|
||||
project_index.search(vec![query.clone()], limit.unwrap_or(5), cx)
|
||||
})?
|
||||
.await?;
|
||||
|
||||