Compare commits
295 Commits
new-ui-lis
...
cole/limit
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cc5adeb83b | ||
|
|
c91efd0e50 | ||
|
|
88907eeb38 | ||
|
|
cd5d7e82d0 | ||
|
|
0851842d2c | ||
|
|
1397e01735 | ||
|
|
2b2b9c1624 | ||
|
|
a05066cd83 | ||
|
|
cb439e672d | ||
|
|
6b0a282c9c | ||
|
|
25772b8777 | ||
|
|
94b63808e0 | ||
|
|
798af67dc1 | ||
|
|
db1d2defa5 | ||
|
|
430bd83e4d | ||
|
|
dbe5399fc4 | ||
|
|
aba242d576 | ||
|
|
ddc210abfc | ||
|
|
65994c0576 | ||
|
|
011f823f33 | ||
|
|
3d1ae68f83 | ||
|
|
1f62274a89 | ||
|
|
c2f62d261b | ||
|
|
7d433a30ec | ||
|
|
52567f4b72 | ||
|
|
a0ee84d3ac | ||
|
|
6cac0b33dc | ||
|
|
45606abfdb | ||
|
|
8ba6ce43ac | ||
|
|
040d42fc24 | ||
|
|
22d905dc03 | ||
|
|
bf735da3f2 | ||
|
|
210d8d5530 | ||
|
|
a0f995d2ae | ||
|
|
8f560daec2 | ||
|
|
d5bb12631a | ||
|
|
8a31dcaeb0 | ||
|
|
ef91e7afae | ||
|
|
c220fb387d | ||
|
|
adbde210fd | ||
|
|
b81a1ad91d | ||
|
|
5f390f1bf8 | ||
|
|
c282acbe65 | ||
|
|
021d6584cc | ||
|
|
b547cd1c70 | ||
|
|
8f841d1ab7 | ||
|
|
4b153e7f7f | ||
|
|
b61171f152 | ||
|
|
0b492c11de | ||
|
|
265caed15e | ||
|
|
148131786f | ||
|
|
7c1405db37 | ||
|
|
96b747e31d | ||
|
|
7a888de9f5 | ||
|
|
e9b4fa1465 | ||
|
|
ead60d1857 | ||
|
|
768dfc8b6b | ||
|
|
f2f9c786da | ||
|
|
e5d2678d94 | ||
|
|
3ad9074e63 | ||
|
|
f40b22c02a | ||
|
|
8490d0d4ef | ||
|
|
afd0da97b9 | ||
|
|
1bf1c7223f | ||
|
|
ba8b9ec2c7 | ||
|
|
685536c27e | ||
|
|
ae017c3f96 | ||
|
|
f587e95a7e | ||
|
|
83dfdb0cfe | ||
|
|
566c5f91a7 | ||
|
|
21057e3af7 | ||
|
|
f68a475eca | ||
|
|
c62210b178 | ||
|
|
ad14dcc57b | ||
|
|
b9432dbe42 | ||
|
|
41c373eff1 | ||
|
|
6a95ec6a64 | ||
|
|
8d7b021f92 | ||
|
|
798a34bfc2 | ||
|
|
a4a9f6bd07 | ||
|
|
bfe4c40f73 | ||
|
|
daa16bcf42 | ||
|
|
22ad7b17c5 | ||
|
|
728a5eb388 | ||
|
|
8d8e5d3635 | ||
|
|
a05a480ed9 | ||
|
|
d141fa027e | ||
|
|
8e0e291bd5 | ||
|
|
e3c0f56a96 | ||
|
|
3935e8343a | ||
|
|
0c84170071 | ||
|
|
a38687d278 | ||
|
|
b75b308459 | ||
|
|
dffa725c7d | ||
|
|
22f1429f97 | ||
|
|
6bdd2cf7db | ||
|
|
a7f3b22051 | ||
|
|
f3703fa8be | ||
|
|
a0be6c8cb2 | ||
|
|
b5a7fb13c3 | ||
|
|
2183fc674d | ||
|
|
0ad5979f19 | ||
|
|
ed1938dd9a | ||
|
|
f7927d3fa4 | ||
|
|
8361c32a34 | ||
|
|
2edadd9352 | ||
|
|
85384fb9c6 | ||
|
|
00359271d1 | ||
|
|
18fcdf1d2c | ||
|
|
55c927b039 | ||
|
|
1be3f81920 | ||
|
|
2eb4d6b7eb | ||
|
|
25f407baab | ||
|
|
79874872cb | ||
|
|
95208a6576 | ||
|
|
1034d1a6b5 | ||
|
|
d4eab557b2 | ||
|
|
b75964a636 | ||
|
|
87cdb68cca | ||
|
|
b0b65420f6 | ||
|
|
8ec0309645 | ||
|
|
6767e98e00 | ||
|
|
8cf5af1a84 | ||
|
|
247ee880d2 | ||
|
|
2e217759c0 | ||
|
|
0a0c163692 | ||
|
|
e80df25386 | ||
|
|
d9590f3f0e | ||
|
|
4ecd1b5174 | ||
|
|
70c973f6c3 | ||
|
|
e842b4eade | ||
|
|
606aa7a78c | ||
|
|
0081b816fe | ||
|
|
21949bcf1a | ||
|
|
ee7ed6d5b8 | ||
|
|
07b67c1bd3 | ||
|
|
f116b44ae8 | ||
|
|
43ab7fe0e2 | ||
|
|
6044773043 | ||
|
|
81af2c0bed | ||
|
|
ab199fda47 | ||
|
|
e60e8f3a0a | ||
|
|
edeed7b619 | ||
|
|
9be7934f12 | ||
|
|
009b90291e | ||
|
|
8b17dc66f6 | ||
|
|
de07b712fd | ||
|
|
be8f3b3791 | ||
|
|
3131b0459f | ||
|
|
3ec323ce0d | ||
|
|
c8b782d870 | ||
|
|
7bca15704b | ||
|
|
5268e74315 | ||
|
|
91c209900b | ||
|
|
74c29f1818 | ||
|
|
5858e61327 | ||
|
|
21cf2e38c5 | ||
|
|
a3ca5554fd | ||
|
|
acf9b22466 | ||
|
|
ffcd023f83 | ||
|
|
6259ad559b | ||
|
|
8d259a9dbe | ||
|
|
010c5a2c4e | ||
|
|
45b126a977 | ||
|
|
5f74297576 | ||
|
|
349f57381f | ||
|
|
41eb586ec8 | ||
|
|
6bf6fcaa51 | ||
|
|
6e89537830 | ||
|
|
669c6a3d5e | ||
|
|
910531bc33 | ||
|
|
690f26cf8b | ||
|
|
6b56fee6b0 | ||
|
|
d94001f445 | ||
|
|
6bcfc4014b | ||
|
|
47a89ad243 | ||
|
|
f3f97895a9 | ||
|
|
30afba50a9 | ||
|
|
036c123488 | ||
|
|
050f5f6723 | ||
|
|
2cd970f137 | ||
|
|
d6255fb3d2 | ||
|
|
f9a66ecaed | ||
|
|
cfb9a4beb0 | ||
|
|
9902cd54ce | ||
|
|
96510b72b8 | ||
|
|
a364a13458 | ||
|
|
09a4cfd307 | ||
|
|
5d66c3db85 | ||
|
|
28f33d0103 | ||
|
|
55a90f576a | ||
|
|
8d6abf6537 | ||
|
|
04961a0186 | ||
|
|
fd7ab20ea4 | ||
|
|
7019aca59d | ||
|
|
d43bcc04db | ||
|
|
2b94a35aaa | ||
|
|
e8208643bb | ||
|
|
a90f80725f | ||
|
|
4e6c37d23b | ||
|
|
0cf6259fec | ||
|
|
5cb5e92185 | ||
|
|
da61a28839 | ||
|
|
efdb769f9b | ||
|
|
9cce5a650e | ||
|
|
2021ca5bff | ||
|
|
1771250b04 | ||
|
|
18259c0fd4 | ||
|
|
41ddd1cc97 | ||
|
|
e175878008 | ||
|
|
1cfbfc199c | ||
|
|
f59f2caf7e | ||
|
|
401342c6ec | ||
|
|
0df1e4a489 | ||
|
|
9bd3e156f5 | ||
|
|
42c655751b | ||
|
|
ff1d78df3b | ||
|
|
c2e4fdf63d | ||
|
|
bf11b888c3 | ||
|
|
d562f58e76 | ||
|
|
94e4aa626d | ||
|
|
8ceba89d81 | ||
|
|
c37d6d5fed | ||
|
|
1a3597d726 | ||
|
|
c747cccde3 | ||
|
|
d81e7683ea | ||
|
|
8b29ee6033 | ||
|
|
96a75e08af | ||
|
|
06cbff6714 | ||
|
|
ce05813e7c | ||
|
|
4d1d8d6d78 | ||
|
|
1f8b14f4f1 | ||
|
|
082cc6184c | ||
|
|
6cfc4dc857 | ||
|
|
b9c48685e8 | ||
|
|
570c396e84 | ||
|
|
5fd034e604 | ||
|
|
63dab5f891 | ||
|
|
a2d6df3ed6 | ||
|
|
30e86ac939 | ||
|
|
976fc3ee97 | ||
|
|
63091459d8 | ||
|
|
659fae70f8 | ||
|
|
02e970192f | ||
|
|
5ecc67f2ef | ||
|
|
73dfb10c16 | ||
|
|
e513e81046 | ||
|
|
2fc4dec58f | ||
|
|
3891381d3e | ||
|
|
b91e929086 | ||
|
|
013a646799 | ||
|
|
ed52e759d7 | ||
|
|
6da099a9d7 | ||
|
|
5f159bc95e | ||
|
|
a4462577bf | ||
|
|
c147b58558 | ||
|
|
84fe1bfe9b | ||
|
|
657d7a911d | ||
|
|
ee05cc3ad9 | ||
|
|
5ed144f9d2 | ||
|
|
2a862b3c54 | ||
|
|
4a7c84f490 | ||
|
|
230e2e4107 | ||
|
|
d732b8ba0f | ||
|
|
7c3eecc9c7 | ||
|
|
fff37ab823 | ||
|
|
8a7a78fafb | ||
|
|
6de3ac3e17 | ||
|
|
5aae3bdc69 | ||
|
|
e298301b40 | ||
|
|
ed6bf7f161 | ||
|
|
f14d6670ba | ||
|
|
22d9b5d8ca | ||
|
|
6ed6e8bc26 | ||
|
|
4846e6fb3a | ||
|
|
cb543f9546 | ||
|
|
450d727a04 | ||
|
|
60b3eb3f76 | ||
|
|
bbe7c9a738 | ||
|
|
f6345a6995 | ||
|
|
e70d0edfac | ||
|
|
921c24e274 | ||
|
|
18f3f8097f | ||
|
|
4f6682c7fe | ||
|
|
f57dece2d5 | ||
|
|
103ad635d9 | ||
|
|
ec5e7a2653 | ||
|
|
05d3ee8555 | ||
|
|
1b34437839 | ||
|
|
3ff2c8fc38 | ||
|
|
b0b0b00fae | ||
|
|
80fb88520f | ||
|
|
aef84d453a | ||
|
|
e06d010aab | ||
|
|
14148f53d4 |
51
.github/ISSUE_TEMPLATE/0_git_beta_bug_report.yml
vendored
@@ -1,51 +0,0 @@
|
||||
name: Git Beta
|
||||
description: There is a bug related to new Git features in Zed
|
||||
type: "Bug"
|
||||
labels: [git]
|
||||
title: "Git Beta: <a short description of the Git bug>"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
|
||||
<!-- Include all steps necessary to reproduce from a clean Zed installation. Be verbose -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
Actual Behavior:
|
||||
|
||||
Expected Behavior:
|
||||
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, attach your `~/Library/Logs/Zed/Zed.log` file to this issue.
|
||||
description: |
|
||||
macOS: `~/Library/Logs/Zed/Zed.log`
|
||||
Linux: `~/.local/share/zed/logs/Zed.log` or $XDG_DATA_HOME
|
||||
If you only need the most recent lines, you can run the `zed: open log` command palette action to see the last 1000.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Click below this line and paste or drag-and-drop your log-->
|
||||
```
|
||||
|
||||
```
|
||||
<!-- Click above this line and paste or drag-and-drop your log--></details>
|
||||
validations:
|
||||
required: false
|
||||
192
.github/workflows/ci.yml
vendored
@@ -23,9 +23,53 @@ env:
|
||||
RUST_BACKTRACE: 1
|
||||
|
||||
jobs:
|
||||
job_spec:
|
||||
name: Decide which jobs to run
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
outputs:
|
||||
run_tests: ${{ steps.filter.outputs.run_tests }}
|
||||
run_license: ${{ steps.filter.outputs.run_license }}
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
# 350 is arbitrary; ~10days of history on main (5secs); full history is ~25secs
|
||||
fetch-depth: ${{ github.ref == 'refs/heads/main' && 2 || 350 }}
|
||||
- name: Fetch git history and generate output filters
|
||||
id: filter
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ]; then
|
||||
echo "Not in a PR context (i.e., push to main/stable/preview)"
|
||||
COMPARE_REV=$(git rev-parse HEAD~1)
|
||||
else
|
||||
echo "In a PR context comparing to pull_request.base.ref"
|
||||
git fetch origin "$GITHUB_BASE_REF" --depth=350
|
||||
COMPARE_REV=$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)
|
||||
fi
|
||||
# Specify anything which should skip full CI in this regex:
|
||||
# - docs/
|
||||
# - .github/ISSUE_TEMPLATE/
|
||||
# - .github/workflows/ (except .github/workflows/ci.yml)
|
||||
SKIP_REGEX='^(docs/|\.github/(ISSUE_TEMPLATE|workflows/(?!ci)))'
|
||||
if [[ $(git diff --name-only $COMPARE_REV ${{ github.sha }} | grep -vP "$SKIP_REGEX") ]]; then
|
||||
echo "run_tests=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "run_tests=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
if [[ $(git diff --name-only $COMPARE_REV ${{ github.sha }} | grep '^Cargo.lock') ]]; then
|
||||
echo "run_license=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "run_license=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
|
||||
migration_checks:
|
||||
name: Check Postgres and Protobuf migrations, mergability
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
timeout-minutes: 60
|
||||
runs-on:
|
||||
- self-hosted
|
||||
@@ -69,6 +113,7 @@ jobs:
|
||||
style:
|
||||
timeout-minutes: 60
|
||||
name: Check formatting and spelling
|
||||
needs: [job_spec]
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- buildjet-8vcpu-ubuntu-2204
|
||||
@@ -76,6 +121,21 @@ jobs:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
|
||||
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Prettier Check on /docs
|
||||
working-directory: ./docs
|
||||
run: |
|
||||
pnpm dlx prettier@${PRETTIER_VERSION} . --check || {
|
||||
echo "To fix, run from the root of the zed repo:"
|
||||
echo " cd docs && pnpm dlx prettier@${PRETTIER_VERSION} . --write && cd .."
|
||||
false
|
||||
}
|
||||
env:
|
||||
PRETTIER_VERSION: 3.5.0
|
||||
|
||||
# To support writing comments that they will certainly be revisited.
|
||||
- name: Check for todo! and FIXME comments
|
||||
run: script/check-todos
|
||||
@@ -91,7 +151,10 @@ jobs:
|
||||
macos_tests:
|
||||
timeout-minutes: 60
|
||||
name: (macOS) Run Clippy and tests
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- test
|
||||
@@ -123,7 +186,9 @@ jobs:
|
||||
- name: Check licenses
|
||||
run: |
|
||||
script/check-licenses
|
||||
script/generate-licenses /tmp/zed_licenses_output
|
||||
if [[ "${{ needs.job_spec.outputs.run_license }}" == "true" ]]; then
|
||||
script/generate-licenses /tmp/zed_licenses_output
|
||||
fi
|
||||
|
||||
- name: Check for new vulnerable dependencies
|
||||
if: github.event_name == 'pull_request'
|
||||
@@ -154,7 +219,10 @@ jobs:
|
||||
linux_tests:
|
||||
timeout-minutes: 60
|
||||
name: (Linux) Run Clippy and tests
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
steps:
|
||||
@@ -203,9 +271,12 @@ jobs:
|
||||
build_remote_server:
|
||||
timeout-minutes: 60
|
||||
name: (Linux) Build Remote Server
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204
|
||||
- buildjet-8vcpu-ubuntu-2204
|
||||
steps:
|
||||
- name: Add Rust to the PATH
|
||||
run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH
|
||||
@@ -239,21 +310,12 @@ jobs:
|
||||
windows_clippy:
|
||||
timeout-minutes: 60
|
||||
name: (Windows) Run Clippy
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: hosted-windows-2
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on: windows-2025-16
|
||||
steps:
|
||||
# Temporarily Collect some metadata about the hardware behind our runners.
|
||||
- name: GHA Runner Info
|
||||
run: |
|
||||
Invoke-RestMethod -Headers @{"Metadata"="true"} -Method GET -Uri "http://169.254.169.254/metadata/instance/compute?api-version=2023-07-01" |
|
||||
ConvertTo-Json -Depth 10 |
|
||||
jq "{ vm_size: .vmSize, location: .location, os_disk_gb: (.storageProfile.osDisk.diskSizeGB | tonumber), rs_disk_gb: (.storageProfile.resourceDisk.size | tonumber / 1024) }"
|
||||
@{
|
||||
Cores = (Get-CimInstance Win32_Processor).NumberOfCores
|
||||
vCPUs = (Get-CimInstance Win32_Processor).NumberOfLogicalProcessors
|
||||
RamGb = [math]::Round((Get-CimInstance Win32_ComputerSystem).TotalPhysicalMemory / 1GB, 2)
|
||||
cpuid = (Get-CimInstance Win32_Processor).Name.Trim()
|
||||
} | ConvertTo-Json
|
||||
# more info here:- https://github.com/rust-lang/cargo/issues/13020
|
||||
- name: Enable longer pathnames for git
|
||||
run: git config --system core.longpaths true
|
||||
@@ -306,21 +368,13 @@ jobs:
|
||||
windows_tests:
|
||||
timeout-minutes: 60
|
||||
name: (Windows) Run Tests
|
||||
if: ${{ github.repository_owner == 'zed-industries' && (github.ref == 'refs/heads/main' || contains(github.event.pull_request.labels.*.name, 'windows')) }}
|
||||
runs-on: hosted-windows-2
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
# Use bigger runners for PRs (speed); smaller for async (cost)
|
||||
runs-on: ${{ github.event_name == 'pull_request' && 'windows-2025-32' || 'windows-2025-16' }}
|
||||
steps:
|
||||
# Temporarily Collect some metadata about the hardware behind our runners.
|
||||
- name: GHA Runner Info
|
||||
run: |
|
||||
Invoke-RestMethod -Headers @{"Metadata"="true"} -Method GET -Uri "http://169.254.169.254/metadata/instance/compute?api-version=2023-07-01" |
|
||||
ConvertTo-Json -Depth 10 |
|
||||
jq "{ vm_size: .vmSize, location: .location, os_disk_gb: (.storageProfile.osDisk.diskSizeGB | tonumber), rs_disk_gb: (.storageProfile.resourceDisk.size | tonumber / 1024) }"
|
||||
@{
|
||||
Cores = (Get-CimInstance Win32_Processor).NumberOfCores
|
||||
vCPUs = (Get-CimInstance Win32_Processor).NumberOfLogicalProcessors
|
||||
RamGb = [math]::Round((Get-CimInstance Win32_ComputerSystem).TotalPhysicalMemory / 1GB, 2)
|
||||
cpuid = (Get-CimInstance Win32_Processor).Name.Trim()
|
||||
} | ConvertTo-Json
|
||||
# more info here:- https://github.com/rust-lang/cargo/issues/13020
|
||||
- name: Enable longer pathnames for git
|
||||
run: git config --system core.longpaths true
|
||||
@@ -372,13 +426,49 @@ jobs:
|
||||
Remove-Item -Path "${{ env.CARGO_HOME }}/config.toml" -Force
|
||||
}
|
||||
|
||||
tests_pass:
|
||||
name: Tests Pass
|
||||
runs-on: ubuntu-latest
|
||||
needs:
|
||||
- job_spec
|
||||
- style
|
||||
- migration_checks
|
||||
- linux_tests
|
||||
- build_remote_server
|
||||
- macos_tests
|
||||
- windows_clippy
|
||||
- windows_tests
|
||||
if: always()
|
||||
steps:
|
||||
- name: Check all tests passed
|
||||
run: |
|
||||
# Check dependent jobs...
|
||||
RET_CODE=0
|
||||
# Always check style
|
||||
[[ "${{ needs.style.result }}" != 'success' ]] && { RET_CODE=1; echo "style tests failed"; }
|
||||
|
||||
# Only check test jobs if they were supposed to run
|
||||
if [[ "${{ needs.job_spec.outputs.run_tests }}" == "true" ]]; then
|
||||
[[ "${{ needs.macos_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "macOS tests failed"; }
|
||||
[[ "${{ needs.linux_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Linux tests failed"; }
|
||||
[[ "${{ needs.windows_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows tests failed"; }
|
||||
[[ "${{ needs.windows_clippy.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows clippy failed"; }
|
||||
[[ "${{ needs.build_remote_server.result }}" != 'success' ]] && { RET_CODE=1; echo "Remote server build failed"; }
|
||||
fi
|
||||
if [[ "$RET_CODE" -eq 0 ]]; then
|
||||
echo "All tests passed successfully!"
|
||||
fi
|
||||
exit $RET_CODE
|
||||
|
||||
bundle-mac:
|
||||
timeout-minutes: 120
|
||||
name: Create a macOS bundle
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- bundle
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
needs: [macos_tests]
|
||||
env:
|
||||
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
|
||||
@@ -468,7 +558,9 @@ jobs:
|
||||
name: Linux x86_x64 release bundle
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2004
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
needs: [linux_tests]
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
@@ -485,7 +577,7 @@ jobs:
|
||||
run: ./script/linux && ./script/install-mold 2.34.0
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
run: |
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel
|
||||
@@ -495,14 +587,18 @@ jobs:
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload Linux remote server to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.gz
|
||||
path: target/zed-remote-server-linux-x86_64.gz
|
||||
@@ -523,7 +619,9 @@ jobs:
|
||||
name: Linux arm64 release bundle
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204-arm
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
needs: [linux_tests]
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
@@ -540,7 +638,7 @@ jobs:
|
||||
run: ./script/linux
|
||||
|
||||
- name: Determine version and release channel
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
if: startsWith(github.ref, 'refs/tags/v')
|
||||
run: |
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel
|
||||
@@ -550,14 +648,18 @@ jobs:
|
||||
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload Linux remote server to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@4cec3d8aa04e39d1a68397de0c4cd6fb9dce8ec1 # v4
|
||||
if: ${{ github.ref == 'refs/heads/main' }} || contains(github.event.pull_request.labels.*.name, 'run-bundling') }}
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.gz
|
||||
path: target/zed-remote-server-linux-aarch64.gz
|
||||
@@ -575,7 +677,9 @@ jobs:
|
||||
|
||||
auto-release-preview:
|
||||
name: Auto release preview
|
||||
if: ${{ startsWith(github.ref, 'refs/tags/v') && endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre') }}
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
&& endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre')
|
||||
needs: [bundle-mac, bundle-linux-x86_x64, bundle-linux-aarch64]
|
||||
runs-on:
|
||||
- self-hosted
|
||||
|
||||
39
.github/workflows/docs.yml
vendored
@@ -1,39 +0,0 @@
|
||||
name: Docs
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "docs/**"
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
check_formatting:
|
||||
name: "Check formatting"
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
|
||||
- uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2 # v4.0.0
|
||||
with:
|
||||
version: 9
|
||||
|
||||
- name: Prettier Check on /docs
|
||||
working-directory: ./docs
|
||||
run: |
|
||||
pnpm dlx prettier@${PRETTIER_VERSION} . --check || {
|
||||
echo "To fix, run from the root of the zed repo:"
|
||||
echo " cd docs && pnpm dlx prettier@${PRETTIER_VERSION} . --write && cd .."
|
||||
false
|
||||
}
|
||||
env:
|
||||
PRETTIER_VERSION: 3.5.0
|
||||
|
||||
- name: Check for Typos with Typos-CLI
|
||||
uses: crate-ci/typos@8e6a4285bcbde632c5d79900a7779746e8b7ea3f # v1.24.6
|
||||
with:
|
||||
config: ./typos.toml
|
||||
files: ./docs/
|
||||
714
Cargo.lock
generated
24
Cargo.toml
@@ -8,6 +8,7 @@ members = [
|
||||
"crates/assistant",
|
||||
"crates/assistant2",
|
||||
"crates/assistant_context_editor",
|
||||
"crates/assistant_eval",
|
||||
"crates/assistant_settings",
|
||||
"crates/assistant_slash_command",
|
||||
"crates/assistant_slash_commands",
|
||||
@@ -64,6 +65,7 @@ members = [
|
||||
"crates/gpui_tokio",
|
||||
"crates/html_to_markdown",
|
||||
"crates/http_client",
|
||||
"crates/http_client_tls",
|
||||
"crates/image_viewer",
|
||||
"crates/indexed_docs",
|
||||
"crates/inline_completion",
|
||||
@@ -171,19 +173,14 @@ members = [
|
||||
|
||||
"extensions/emmet",
|
||||
"extensions/glsl",
|
||||
"extensions/haskell",
|
||||
"extensions/html",
|
||||
"extensions/perplexity",
|
||||
"extensions/proto",
|
||||
"extensions/purescript",
|
||||
"extensions/ruff",
|
||||
"extensions/slash-commands-example",
|
||||
"extensions/snippets",
|
||||
"extensions/terraform",
|
||||
"extensions/test-extension",
|
||||
"extensions/toml",
|
||||
"extensions/uiua",
|
||||
"extensions/zig",
|
||||
|
||||
#
|
||||
# Tooling
|
||||
@@ -211,6 +208,7 @@ assets = { path = "crates/assets" }
|
||||
assistant = { path = "crates/assistant" }
|
||||
assistant2 = { path = "crates/assistant2" }
|
||||
assistant_context_editor = { path = "crates/assistant_context_editor" }
|
||||
assistant_eval = { path = "crates/assistant_eval" }
|
||||
assistant_settings = { path = "crates/assistant_settings" }
|
||||
assistant_slash_command = { path = "crates/assistant_slash_command" }
|
||||
assistant_slash_commands = { path = "crates/assistant_slash_commands" }
|
||||
@@ -265,6 +263,7 @@ gpui_macros = { path = "crates/gpui_macros" }
|
||||
gpui_tokio = { path = "crates/gpui_tokio" }
|
||||
html_to_markdown = { path = "crates/html_to_markdown" }
|
||||
http_client = { path = "crates/http_client" }
|
||||
http_client_tls = { path = "crates/http_client_tls" }
|
||||
image_viewer = { path = "crates/image_viewer" }
|
||||
indexed_docs = { path = "crates/indexed_docs" }
|
||||
inline_completion = { path = "crates/inline_completion" }
|
||||
@@ -371,7 +370,7 @@ zeta = { path = "crates/zeta" }
|
||||
#
|
||||
|
||||
aho-corasick = "1.1"
|
||||
alacritty_terminal = { git = "https://github.com/zed-industries/alacritty.git", rev = "03c2907b44b4189aac5fdeaea331f5aab5c7072e" }
|
||||
alacritty_terminal = { git = "https://github.com/zed-industries/alacritty.git", branch = "add-hush-login-flag" }
|
||||
any_vec = "0.14"
|
||||
anyhow = "1.0.86"
|
||||
arrayvec = { version = "0.7.4", features = ["serde"] }
|
||||
@@ -566,6 +565,7 @@ unindent = "0.2.0"
|
||||
unicode-segmentation = "1.10"
|
||||
unicode-script = "0.5.7"
|
||||
url = "2.2"
|
||||
urlencoding = "2.1.2"
|
||||
uuid = { version = "1.1.2", features = ["v4", "v5", "v7", "serde"] }
|
||||
wasmparser = "0.221"
|
||||
wasm-encoder = "0.221"
|
||||
@@ -598,12 +598,12 @@ features = [
|
||||
]
|
||||
|
||||
[workspace.dependencies.windows]
|
||||
version = "0.58"
|
||||
version = "0.60"
|
||||
features = [
|
||||
"implement",
|
||||
"Foundation_Collections",
|
||||
"Foundation_Numerics",
|
||||
"Storage",
|
||||
"Storage_Search",
|
||||
"Storage_Streams",
|
||||
"System_Threading",
|
||||
"UI_StartScreen",
|
||||
"UI_ViewManagement",
|
||||
@@ -624,9 +624,11 @@ features = [
|
||||
"Win32_System_Com_StructuredStorage",
|
||||
"Win32_System_Console",
|
||||
"Win32_System_DataExchange",
|
||||
"Win32_System_IO",
|
||||
"Win32_System_LibraryLoader",
|
||||
"Win32_System_Memory",
|
||||
"Win32_System_Ole",
|
||||
"Win32_System_Pipes",
|
||||
"Win32_System_SystemInformation",
|
||||
"Win32_System_SystemServices",
|
||||
"Win32_System_Threading",
|
||||
@@ -752,5 +754,9 @@ new_ret_no_self = { level = "allow" }
|
||||
should_implement_trait = { level = "allow" }
|
||||
let_underscore_future = "allow"
|
||||
|
||||
# in Rust it can be very tedious to reduce argument count without
|
||||
# running afoul of the borrow checker.
|
||||
too_many_arguments = "allow"
|
||||
|
||||
[workspace.metadata.cargo-machete]
|
||||
ignored = ["bindgen", "cbindgen", "prost_build", "serde", "component", "linkme"]
|
||||
|
||||
4
assets/icons/expand_down.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M10.5 8.5L7.5 11.5M7.5 11.5L4.5 8.5M7.5 11.5L7.5 5.5" stroke="black" stroke-linecap="square"/>
|
||||
<path d="M5 3.5L10 3.5" stroke="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 248 B |
4
assets/icons/expand_up.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="15" height="15" viewBox="0 0 15 15" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M4.5 6.5L7.5 3.5M7.5 3.5L10.5 6.5M7.5 3.5V9.5" stroke="black" stroke-linecap="square"/>
|
||||
<path d="M5 11.5H10" stroke="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 238 B |
3
assets/icons/file_icons/luau.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M6.36197 1.67985C5.3748 1.41534 4.36011 2.00117 4.0956 2.98834L2.17985 10.138C1.91534 11.1252 2.50117 12.1399 3.48833 12.4044L10.638 14.3202C11.6252 14.5847 12.6399 13.9988 12.9044 13.0117L14.8202 5.86197C15.0847 4.8748 14.4988 3.86012 13.5117 3.59561L6.36197 1.67985ZM10.0457 4.58266C9.77896 4.51119 9.50479 4.66948 9.43332 4.93621L8.76235 7.44028C8.69088 7.70701 8.84917 7.98118 9.11591 8.05265L11.62 8.72362C11.8867 8.79509 12.1609 8.6368 12.2324 8.37006L12.9033 5.86599C12.9748 5.59926 12.8165 5.32509 12.5498 5.25362L10.0457 4.58266Z" fill="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 707 B |
7
assets/icons/file_icons/wgsl.svg
Normal file
@@ -0,0 +1,7 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M6.5 13L1.5 5H11.5L6.5 13Z" fill="black"/>
|
||||
<path d="M14 9H9L11.5 5L14 9Z" fill="black" fill-opacity="0.75"/>
|
||||
<path d="M9 9L14 9L11.5 13L9 9Z" fill="black" fill-opacity="0.65"/>
|
||||
<path d="M14 5L15.25 7L12.75 7L14 5Z" fill="black" fill-opacity="0.5"/>
|
||||
<path d="M14 9L12.75 7H15.25L14 9Z" fill="black" fill-opacity="0.55"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 432 B |
40
assets/icons/git_onboarding_bg.svg
Normal file
@@ -0,0 +1,40 @@
|
||||
<svg width="400" height="120" xmlns="http://www.w3.org/2000/svg">
|
||||
<defs>
|
||||
<pattern id="tilePattern" width="124" height="24" patternUnits="userSpaceOnUse">
|
||||
<svg width="124" height="24" viewBox="0 0 124 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g opacity="0.2">
|
||||
<path d="M16.666 12.0013L11.9993 16.668L7.33268 12.0013" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M12 7.33464L12 16.668" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M29 8.33464C29.3682 8.33464 29.6667 8.03616 29.6667 7.66797C29.6667 7.29978 29.3682 7.0013 29 7.0013C28.6318 7.0013 28.3333 7.29978 28.3333 7.66797C28.3333 8.03616 28.6318 8.33464 29 8.33464ZM29 9.66797C30.1046 9.66797 31 8.77254 31 7.66797C31 6.5634 30.1046 5.66797 29 5.66797C27.8954 5.66797 27 6.5634 27 7.66797C27 8.77254 27.8954 9.66797 29 9.66797Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M35 8.33464C35.3682 8.33464 35.6667 8.03616 35.6667 7.66797C35.6667 7.29978 35.3682 7.0013 35 7.0013C34.6318 7.0013 34.3333 7.29978 34.3333 7.66797C34.3333 8.03616 34.6318 8.33464 35 8.33464ZM35 9.66797C36.1046 9.66797 37 8.77254 37 7.66797C37 6.5634 36.1046 5.66797 35 5.66797C33.8954 5.66797 33 6.5634 33 7.66797C33 8.77254 33.8954 9.66797 35 9.66797Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M29 16.9987C29.3682 16.9987 29.6667 16.7002 29.6667 16.332C29.6667 15.9638 29.3682 15.6654 29 15.6654C28.6318 15.6654 28.3333 15.9638 28.3333 16.332C28.3333 16.7002 28.6318 16.9987 29 16.9987ZM29 18.332C30.1046 18.332 31 17.4366 31 16.332C31 15.2275 30.1046 14.332 29 14.332C27.8954 14.332 27 15.2275 27 16.332C27 17.4366 27.8954 18.332 29 18.332Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M28.334 9H29.6673V11.4615C30.2383 11.1443 31.0005 11 32.0007 11H33.6675C34.0356 11 34.334 10.7017 34.334 10.3333V9H35.6673V10.3333C35.6673 11.4378 34.7723 12.3333 33.6675 12.3333H32.0007C30.8614 12.3333 30.3692 12.5484 30.1298 12.7549C29.9016 12.9516 29.7857 13.2347 29.6673 13.742V15H28.334V9Z" fill="white"/>
|
||||
<path d="M48.668 8.66406H55.3346V15.3307" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M48.668 15.3307L55.3346 8.66406" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M76.5871 9.40624C76.8514 9.14195 77 8.78346 77 8.40965C77 8.03583 76.8516 7.67731 76.5873 7.41295C76.323 7.14859 75.9645 7.00005 75.5907 7C75.2169 6.99995 74.8584 7.14841 74.594 7.4127L67.921 14.0874C67.8049 14.2031 67.719 14.3456 67.671 14.5024L67.0105 16.6784C66.9975 16.7217 66.9966 16.7676 67.0076 16.8113C67.0187 16.8551 67.0414 16.895 67.0734 16.9269C67.1053 16.9588 67.1453 16.9815 67.1891 16.9925C67.2328 17.0035 67.2788 17.0024 67.322 16.9894L69.4985 16.3294C69.6551 16.2818 69.7976 16.1964 69.9135 16.0809L76.5871 9.40624Z" stroke="white" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M74 8L76 10" stroke="white" stroke-width="1.33" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M70.3877 7.53516V6.53516" stroke="white" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M73.5693 16.6992V17.6992" stroke="white" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M66.3877 10.5352H67.3877" stroke="white" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M77.5693 13.6992H76.5693" stroke="white" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M68.3877 8.53516L67.3877 7.53516" stroke="white" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M75.5693 15.6992L76.5693 16.6992" stroke="white" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M87.334 11.9987L92.0007 7.33203L96.6673 11.9987" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M92 16.6654V7.33203" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M117 12C117 10.6739 116.473 9.40215 115.536 8.46447C114.598 7.52678 113.326 7 112 7C110.602 7.00526 109.261 7.55068 108.256 8.52222L107 9.77778" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M107 7V9.77778H109.778" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M107 12C107 13.3261 107.527 14.5979 108.464 15.5355C109.402 16.4732 110.674 17 112 17C113.398 16.9947 114.739 16.4493 115.744 15.4778L117 14.2222" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M114.223 14.2188H117V16.9965" stroke="white" stroke-width="1.6" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</g>
|
||||
</svg>
|
||||
</pattern>
|
||||
<linearGradient id="fade" y2="1" x2="0">
|
||||
<stop offset="0" stop-color="white" stop-opacity=".52"/>
|
||||
<stop offset="1" stop-color="white" stop-opacity="0"/>
|
||||
</linearGradient>
|
||||
<mask id="fadeMask" maskContentUnits="objectBoundingBox">
|
||||
<rect width="1" height="1" fill="url(#fade)"/>
|
||||
</mask>
|
||||
</defs>
|
||||
<rect width="100%" height="100%" fill="url(#tilePattern)" mask="url(#fadeMask)"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 5.4 KiB |
@@ -1,6 +1,6 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M3 4H8" stroke="black" stroke-width="1.75" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M6 10L11 10" stroke="black" stroke-width="1.75" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<circle cx="4" cy="10" r="1.875" stroke="black" stroke-width="1.75"/>
|
||||
<circle cx="10" cy="4" r="1.875" stroke="black" stroke-width="1.75"/>
|
||||
<path d="M3 4H8" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M6 10L11 10" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<circle cx="4" cy="10" r="1.875" stroke="black" stroke-width="1.5"/>
|
||||
<circle cx="10" cy="4" r="1.875" stroke="black" stroke-width="1.5"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 450 B After Width: | Height: | Size: 446 B |
@@ -362,6 +362,7 @@
|
||||
"ctrl-k ctrl-0": "editor::FoldAll",
|
||||
"ctrl-k ctrl-j": "editor::UnfoldAll",
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
"ctrl-shift-space": "editor::ShowWordCompletions",
|
||||
"ctrl-.": "editor::ToggleCodeActions",
|
||||
"ctrl-k r": "editor::RevealInFileManager",
|
||||
"ctrl-k p": "editor::CopyPath",
|
||||
@@ -393,6 +394,7 @@
|
||||
"alt-shift-open": "projects::OpenRemote",
|
||||
"alt-ctrl-shift-o": "projects::OpenRemote",
|
||||
"alt-ctrl-shift-b": "branches::OpenRecent",
|
||||
"alt-shift-enter": "toast::RunAction",
|
||||
"ctrl-~": "workspace::NewTerminal",
|
||||
"save": "workspace::Save",
|
||||
"ctrl-s": "workspace::Save",
|
||||
@@ -731,28 +733,48 @@
|
||||
"up": "menu::SelectPrevious",
|
||||
"down": "menu::SelectNext",
|
||||
"enter": "menu::Confirm",
|
||||
"alt-y": "git::StageFile",
|
||||
"alt-shift-y": "git::UnstageFile",
|
||||
"ctrl-alt-y": "git::ToggleStaged",
|
||||
"space": "git::ToggleStaged",
|
||||
"ctrl-space": "git::StageAll",
|
||||
"ctrl-shift-space": "git::UnstageAll",
|
||||
"tab": "git_panel::FocusEditor",
|
||||
"shift-tab": "git_panel::FocusEditor",
|
||||
"escape": "git_panel::ToggleFocus",
|
||||
"ctrl-enter": "git::Commit",
|
||||
"alt-enter": "menu::SecondaryConfirm"
|
||||
"alt-enter": "menu::SecondaryConfirm",
|
||||
"backspace": "git::RestoreFile"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitCommit > Editor",
|
||||
"bindings": {
|
||||
"escape": "menu::Cancel",
|
||||
"enter": "editor::Newline",
|
||||
"ctrl-enter": "git::Commit",
|
||||
"alt-l": "git::GenerateCommitMessage"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitPanel",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-g ctrl-g": "git::Fetch",
|
||||
"ctrl-g up": "git::Push",
|
||||
"ctrl-g down": "git::Pull",
|
||||
"ctrl-g shift-up": "git::ForcePush",
|
||||
"ctrl-g d": "git::Diff",
|
||||
"ctrl-g backspace": "git::RestoreTrackedFiles",
|
||||
"ctrl-g shift-backspace": "git::TrashUntrackedFiles",
|
||||
"ctrl-space": "git::StageAll",
|
||||
"ctrl-shift-space": "git::UnstageAll"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitDiff > Editor",
|
||||
"bindings": {
|
||||
"ctrl-enter": "git::Commit"
|
||||
"ctrl-enter": "git::Commit",
|
||||
"ctrl-space": "git::StageAll",
|
||||
"ctrl-shift-space": "git::UnstageAll"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -767,6 +789,7 @@
|
||||
"escape": "git_panel::FocusChanges",
|
||||
"tab": "git_panel::FocusChanges",
|
||||
"shift-tab": "git_panel::FocusChanges",
|
||||
"enter": "editor::Newline",
|
||||
"ctrl-enter": "git::Commit",
|
||||
"alt-up": "git_panel::FocusChanges",
|
||||
"alt-l": "git::GenerateCommitMessage"
|
||||
@@ -840,21 +863,22 @@
|
||||
"alt-b": ["terminal::SendText", "\u001bb"],
|
||||
"alt-f": ["terminal::SendText", "\u001bf"],
|
||||
// Overrides for conflicting keybindings
|
||||
"ctrl-b": ["terminal::SendKeystroke", "ctrl-b"],
|
||||
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"],
|
||||
"ctrl-e": ["terminal::SendKeystroke", "ctrl-e"],
|
||||
"ctrl-o": ["terminal::SendKeystroke", "ctrl-o"],
|
||||
"ctrl-w": ["terminal::SendKeystroke", "ctrl-w"],
|
||||
"ctrl-shift-a": "editor::SelectAll",
|
||||
"find": "buffer_search::Deploy",
|
||||
"ctrl-shift-f": "buffer_search::Deploy",
|
||||
"ctrl-shift-l": "terminal::Clear",
|
||||
"ctrl-shift-w": "pane::CloseActiveItem",
|
||||
"ctrl-e": ["terminal::SendKeystroke", "ctrl-e"],
|
||||
"up": ["terminal::SendKeystroke", "up"],
|
||||
"pageup": ["terminal::SendKeystroke", "pageup"],
|
||||
"down": ["terminal::SendKeystroke", "down"],
|
||||
"pagedown": ["terminal::SendKeystroke", "pagedown"],
|
||||
"escape": ["terminal::SendKeystroke", "escape"],
|
||||
"enter": ["terminal::SendKeystroke", "enter"],
|
||||
"ctrl-b": ["terminal::SendKeystroke", "ctrl-b"],
|
||||
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"],
|
||||
"shift-pageup": "terminal::ScrollPageUp",
|
||||
"shift-pagedown": "terminal::ScrollPageDown",
|
||||
"shift-up": "terminal::ScrollLineUp",
|
||||
|
||||
@@ -31,13 +31,13 @@
|
||||
"enter": "menu::Confirm",
|
||||
"ctrl-enter": "menu::SecondaryConfirm",
|
||||
"cmd-enter": "menu::SecondaryConfirm",
|
||||
"cmd-escape": "menu::Cancel",
|
||||
"ctrl-escape": "menu::Cancel",
|
||||
"ctrl-c": "menu::Cancel",
|
||||
"escape": "menu::Cancel",
|
||||
"alt-shift-enter": "menu::Restart",
|
||||
"cmd-shift-w": "workspace::CloseWindow",
|
||||
"shift-escape": "workspace::ToggleZoom",
|
||||
"cmd-escape": "menu::Cancel",
|
||||
"cmd-o": "workspace::Open",
|
||||
"cmd-=": ["zed::IncreaseBufferFontSize", { "persist": false }],
|
||||
"cmd-+": ["zed::IncreaseBufferFontSize", { "persist": false }],
|
||||
@@ -108,8 +108,8 @@
|
||||
"cmd-right": ["editor::MoveToEndOfLine", { "stop_at_soft_wraps": true }],
|
||||
"ctrl-e": ["editor::MoveToEndOfLine", { "stop_at_soft_wraps": false }],
|
||||
"end": ["editor::MoveToEndOfLine", { "stop_at_soft_wraps": true }],
|
||||
"cmd-up": "editor::MoveToStartOfExcerpt",
|
||||
"cmd-down": "editor::MoveToEndOfExcerpt",
|
||||
"cmd-up": "editor::MoveToBeginning",
|
||||
"cmd-down": "editor::MoveToEnd",
|
||||
"cmd-home": "editor::MoveToBeginning", // Typed via `cmd-fn-left`
|
||||
"cmd-end": "editor::MoveToEnd", // Typed via `cmd-fn-right`
|
||||
"shift-up": "editor::SelectUp",
|
||||
@@ -124,8 +124,8 @@
|
||||
"alt-shift-right": "editor::SelectToNextWordEnd", // cursorWordRightSelect
|
||||
"ctrl-shift-up": "editor::SelectToStartOfParagraph",
|
||||
"ctrl-shift-down": "editor::SelectToEndOfParagraph",
|
||||
"cmd-shift-up": "editor::SelectToStartOfExcerpt",
|
||||
"cmd-shift-down": "editor::SelectToEndOfExcerpt",
|
||||
"cmd-shift-up": "editor::SelectToBeginning",
|
||||
"cmd-shift-down": "editor::SelectToEnd",
|
||||
"cmd-a": "editor::SelectAll",
|
||||
"cmd-l": "editor::SelectLine",
|
||||
"cmd-shift-i": "editor::Format",
|
||||
@@ -172,6 +172,16 @@
|
||||
"alt-enter": "editor::OpenSelectionsInMultibuffer"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && multibuffer",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-up": "editor::MoveToStartOfExcerpt",
|
||||
"cmd-down": "editor::MoveToStartOfNextExcerpt",
|
||||
"cmd-shift-up": "editor::SelectToStartOfExcerpt",
|
||||
"cmd-shift-down": "editor::SelectToStartOfNextExcerpt"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && mode == full && edit_prediction",
|
||||
"use_key_equivalents": true,
|
||||
@@ -456,6 +466,7 @@
|
||||
// Using `ctrl-space` in Zed requires disabling the macOS global shortcut.
|
||||
// System Preferences->Keyboard->Keyboard Shortcuts->Input Sources->Select the previous input source (uncheck)
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
"ctrl-shift-space": "editor::ShowWordCompletions",
|
||||
"cmd-.": "editor::ToggleCodeActions",
|
||||
"cmd-k r": "editor::RevealInFileManager",
|
||||
"cmd-k p": "editor::CopyPath",
|
||||
@@ -504,6 +515,7 @@
|
||||
"ctrl-~": "workspace::NewTerminal",
|
||||
"cmd-s": "workspace::Save",
|
||||
"cmd-k s": "workspace::SaveWithoutFormat",
|
||||
"alt-shift-enter": "toast::RunAction",
|
||||
"cmd-shift-s": "workspace::SaveAs",
|
||||
"cmd-shift-n": "workspace::NewWindow",
|
||||
"ctrl-`": "terminal_panel::ToggleFocus",
|
||||
@@ -753,28 +765,25 @@
|
||||
"cmd-up": "menu::SelectFirst",
|
||||
"cmd-down": "menu::SelectLast",
|
||||
"enter": "menu::Confirm",
|
||||
"cmd-alt-y": "git::ToggleStaged",
|
||||
"space": "git::ToggleStaged",
|
||||
"cmd-shift-space": "git::StageAll",
|
||||
"ctrl-shift-space": "git::UnstageAll",
|
||||
"cmd-y": "git::StageFile",
|
||||
"cmd-shift-y": "git::UnstageFile",
|
||||
"alt-down": "git_panel::FocusEditor",
|
||||
"tab": "git_panel::FocusEditor",
|
||||
"shift-tab": "git_panel::FocusEditor",
|
||||
"escape": "git_panel::ToggleFocus",
|
||||
"cmd-enter": "git::Commit"
|
||||
"cmd-enter": "git::Commit",
|
||||
"backspace": "git::RestoreFile"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitDiff > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-enter": "git::Commit"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "AskPass > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"enter": "menu::Confirm"
|
||||
"cmd-enter": "git::Commit",
|
||||
"cmd-ctrl-y": "git::StageAll",
|
||||
"cmd-ctrl-shift-y": "git::UnstageAll"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -790,11 +799,27 @@
|
||||
"alt-tab": "git::GenerateCommitMessage"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitPanel",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-g ctrl-g": "git::Fetch",
|
||||
"ctrl-g up": "git::Push",
|
||||
"ctrl-g down": "git::Pull",
|
||||
"ctrl-g shift-up": "git::ForcePush",
|
||||
"ctrl-g d": "git::Diff",
|
||||
"ctrl-g backspace": "git::RestoreTrackedFiles",
|
||||
"ctrl-g shift-backspace": "git::TrashUntrackedFiles",
|
||||
"cmd-ctrl-y": "git::StageAll",
|
||||
"cmd-ctrl-shift-y": "git::UnstageAll"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitCommit > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"enter": "editor::Newline",
|
||||
"escape": "menu::Cancel",
|
||||
"cmd-enter": "git::Commit",
|
||||
"alt-tab": "git::GenerateCommitMessage"
|
||||
}
|
||||
|
||||
18
assets/prompts/assistant_system_prompt.hbs
Normal file
@@ -0,0 +1,18 @@
|
||||
You are an AI assistant integrated into a text editor. Your goal is to do one of the following two things:
|
||||
|
||||
1. Help users answer questions and perform tasks related to their codebase.
|
||||
2. Answer general-purpose questions unrelated to their particular codebase.
|
||||
|
||||
It will be up to you to decide which of these you are doing based on what the user has told you. When unclear, ask clarifying questions to understand the user's intent before proceeding.
|
||||
|
||||
You should only perform actions that modify the user’s system if explicitly requested by the user:
|
||||
- If the user asks a question about how to accomplish a task, provide guidance or information, and use read-only tools (e.g., search) to assist. You may suggest potential actions, but do not directly modify the user’s system without explicit instruction.
|
||||
- If the user clearly requests that you perform an action, carry out the action directly without explaining why you are doing so.
|
||||
|
||||
Be concise and direct in your responses.
|
||||
|
||||
The user has opened a project that contains the following root directories/files:
|
||||
|
||||
{{#each worktrees}}
|
||||
- {{root_name}} (absolute path: {{abs_path}})
|
||||
{{/each}}
|
||||
@@ -336,14 +336,14 @@
|
||||
"active_line_width": 1,
|
||||
// Determines how indent guides are colored.
|
||||
// This setting can take the following three values:
|
||||
///
|
||||
//
|
||||
// 1. "disabled"
|
||||
// 2. "fixed"
|
||||
// 3. "indent_aware"
|
||||
"coloring": "fixed",
|
||||
// Determines how indent guide backgrounds are colored.
|
||||
// This setting can take the following two values:
|
||||
///
|
||||
//
|
||||
// 1. "disabled"
|
||||
// 2. "indent_aware"
|
||||
"background_coloring": "disabled"
|
||||
@@ -402,8 +402,8 @@
|
||||
// Time to wait after scrolling the buffer, before requesting the hints,
|
||||
// set to 0 to disable debouncing.
|
||||
"scroll_debounce_ms": 50,
|
||||
/// A set of modifiers which, when pressed, will toggle the visibility of inlay hints.
|
||||
/// If the set if empty or not all the modifiers specified are pressed, inlay hints will not be toggled.
|
||||
// A set of modifiers which, when pressed, will toggle the visibility of inlay hints.
|
||||
// If the set if empty or not all the modifiers specified are pressed, inlay hints will not be toggled.
|
||||
"toggle_on_modifiers_press": {
|
||||
"control": false,
|
||||
"shift": false,
|
||||
@@ -440,7 +440,7 @@
|
||||
"scrollbar": {
|
||||
// When to show the scrollbar in the project panel.
|
||||
// This setting can take five values:
|
||||
///
|
||||
//
|
||||
// 1. null (default): Inherit editor settings
|
||||
// 2. Show the scrollbar if there's important information or
|
||||
// follow the system's configured behavior (default):
|
||||
@@ -455,7 +455,7 @@
|
||||
},
|
||||
// Which files containing diagnostic errors/warnings to mark in the project panel.
|
||||
// This setting can take the following three values:
|
||||
///
|
||||
//
|
||||
// 1. Do not mark any files:
|
||||
// "off"
|
||||
// 2. Only mark files with errors:
|
||||
@@ -512,7 +512,7 @@
|
||||
"scrollbar": {
|
||||
// When to show the scrollbar in the project panel.
|
||||
// This setting can take five values:
|
||||
///
|
||||
//
|
||||
// 1. null (default): Inherit editor settings
|
||||
// 2. Show the scrollbar if there's important information or
|
||||
// follow the system's configured behavior (default):
|
||||
@@ -547,7 +547,7 @@
|
||||
"git_panel": {
|
||||
// Whether to show the git panel button in the status bar.
|
||||
"button": true,
|
||||
// Where to the git panel. Can be 'left' or 'right'.
|
||||
// Where to show the git panel. Can be 'left' or 'right'.
|
||||
"dock": "left",
|
||||
// Default width of the git panel.
|
||||
"default_width": 360,
|
||||
@@ -555,6 +555,12 @@
|
||||
//
|
||||
// Default: icon
|
||||
"status_style": "icon",
|
||||
// What branch name to use if init.defaultBranch
|
||||
// is not set
|
||||
//
|
||||
// Default: main
|
||||
"fallback_branch_name": "main",
|
||||
|
||||
"scrollbar": {
|
||||
// When to show the scrollbar in the git panel.
|
||||
//
|
||||
@@ -594,6 +600,13 @@
|
||||
"provider": "zed.dev",
|
||||
// The model to use.
|
||||
"model": "claude-3-5-sonnet-latest"
|
||||
},
|
||||
// The model to use when applying edits from the assistant.
|
||||
"editor_model": {
|
||||
// The provider to use.
|
||||
"provider": "zed.dev",
|
||||
// The model to use.
|
||||
"model": "claude-3-5-sonnet-latest"
|
||||
}
|
||||
},
|
||||
// The settings for slash commands.
|
||||
@@ -673,7 +686,7 @@
|
||||
// Which files containing diagnostic errors/warnings to mark in the tabs.
|
||||
// Diagnostics are only shown when file icons are also active.
|
||||
// This setting only works when can take the following three values:
|
||||
///
|
||||
//
|
||||
// 1. Do not mark any files:
|
||||
// "off"
|
||||
// 2. Only mark files with errors:
|
||||
@@ -841,12 +854,20 @@
|
||||
// How git hunks are displayed visually in the editor.
|
||||
// This setting can take two values:
|
||||
//
|
||||
// 1. Show unstaged hunks with a transparent background (default):
|
||||
// "hunk_style": "transparent"
|
||||
// 2. Show unstaged hunks with a pattern background:
|
||||
// "hunk_style": "pattern"
|
||||
"hunk_style": "transparent"
|
||||
// 1. Show unstaged hunks filled and staged hunks hollow:
|
||||
// "hunk_style": "staged_hollow"
|
||||
// 2. Show unstaged hunks hollow and staged hunks filled:
|
||||
// "hunk_style": "unstaged_hollow"
|
||||
"hunk_style": "staged_hollow"
|
||||
},
|
||||
// The list of custom Git hosting providers.
|
||||
"git_hosting_providers": [
|
||||
// {
|
||||
// "provider": "github",
|
||||
// "name": "BigCorp GitHub",
|
||||
// "base_url": "https://code.big-corp.com"
|
||||
// }
|
||||
],
|
||||
// Configuration for how direnv configuration should be loaded. May take 2 values:
|
||||
// 1. Load direnv configuration using `direnv export json` directly.
|
||||
// "load_direnv": "direct"
|
||||
@@ -1009,7 +1030,7 @@
|
||||
"scrollbar": {
|
||||
// When to show the scrollbar in the terminal.
|
||||
// This setting can take five values:
|
||||
///
|
||||
//
|
||||
// 1. null (default): Inherit editor settings
|
||||
// 2. Show the scrollbar if there's important information or
|
||||
// follow the system's configured behavior (default):
|
||||
@@ -1080,6 +1101,32 @@
|
||||
"auto_install_extensions": {
|
||||
"html": true
|
||||
},
|
||||
// Controls how completions are processed for this language.
|
||||
"completions": {
|
||||
// Controls how words are completed.
|
||||
// For large documents, not all words may be fetched for completion.
|
||||
//
|
||||
// May take 3 values:
|
||||
// 1. "enabled"
|
||||
// Always fetch document's words for completions along with LSP completions.
|
||||
// 2. "fallback"
|
||||
// Only if LSP response errors or times out, use document's words to show completions.
|
||||
// 3. "disabled"
|
||||
// Never fetch or complete document's words for completions.
|
||||
// (Word-based completions can still be queried via a separate action)
|
||||
//
|
||||
// Default: fallback
|
||||
"words": "fallback",
|
||||
// Whether to fetch LSP completions or not.
|
||||
//
|
||||
// Default: true
|
||||
"lsp": true,
|
||||
// When fetching LSP completions, determines how long to wait for a response of a particular server.
|
||||
// When set to 0, waits indefinitely.
|
||||
//
|
||||
// Default: 0
|
||||
"lsp_fetch_timeout_ms": 0
|
||||
},
|
||||
// Different settings for specific languages.
|
||||
"languages": {
|
||||
"Astro": {
|
||||
@@ -1300,8 +1347,7 @@
|
||||
},
|
||||
// Settings for auto-closing of JSX tags.
|
||||
"jsx_tag_auto_close": {
|
||||
// // Whether to auto-close JSX tags.
|
||||
// "enabled": true
|
||||
"enabled": true
|
||||
},
|
||||
// LSP Specific settings.
|
||||
"lsp": {
|
||||
|
||||
@@ -6,15 +6,7 @@
|
||||
{
|
||||
"name": "Gruvbox Dark",
|
||||
"appearance": "dark",
|
||||
"accents": [
|
||||
"#cc241dff",
|
||||
"#98971aff",
|
||||
"#d79921ff",
|
||||
"#458588ff",
|
||||
"#b16286ff",
|
||||
"#689d6aff",
|
||||
"#d65d0eff"
|
||||
],
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"border": "#5b534dff",
|
||||
"border.variant": "#494340ff",
|
||||
@@ -105,9 +97,9 @@
|
||||
"terminal.ansi.bright_white": "#fbf1c7ff",
|
||||
"terminal.ansi.dim_white": "#b0a189ff",
|
||||
"link_text.hover": "#83a598ff",
|
||||
"version_control_added": "#b7bb26ff",
|
||||
"version_control_modified": "#f9bd2fff",
|
||||
"version_control_deleted": "#fb4a35ff",
|
||||
"version_control.added": "#b7bb26ff",
|
||||
"version_control.modified": "#f9bd2fff",
|
||||
"version_control.deleted": "#fb4a35ff",
|
||||
"conflict": "#f9bd2fff",
|
||||
"conflict.background": "#572e10ff",
|
||||
"conflict.border": "#754916ff",
|
||||
@@ -383,6 +375,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variable.special": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variant": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
@@ -394,15 +391,7 @@
|
||||
{
|
||||
"name": "Gruvbox Dark Hard",
|
||||
"appearance": "dark",
|
||||
"accents": [
|
||||
"#cc241dff",
|
||||
"#98971aff",
|
||||
"#d79921ff",
|
||||
"#458588ff",
|
||||
"#b16286ff",
|
||||
"#689d6aff",
|
||||
"#d65d0eff"
|
||||
],
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"border": "#5b534dff",
|
||||
"border.variant": "#494340ff",
|
||||
@@ -493,9 +482,9 @@
|
||||
"terminal.ansi.bright_white": "#fbf1c7ff",
|
||||
"terminal.ansi.dim_white": "#b0a189ff",
|
||||
"link_text.hover": "#83a598ff",
|
||||
"version_control_added": "#b7bb26ff",
|
||||
"version_control_modified": "#f9bd2fff",
|
||||
"version_control_deleted": "#fb4a35ff",
|
||||
"version_control.added": "#b7bb26ff",
|
||||
"version_control.modified": "#f9bd2fff",
|
||||
"version_control.deleted": "#fb4a35ff",
|
||||
"conflict": "#f9bd2fff",
|
||||
"conflict.background": "#572e10ff",
|
||||
"conflict.border": "#754916ff",
|
||||
@@ -771,6 +760,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variable.special": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variant": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
@@ -782,15 +776,7 @@
|
||||
{
|
||||
"name": "Gruvbox Dark Soft",
|
||||
"appearance": "dark",
|
||||
"accents": [
|
||||
"#cc241dff",
|
||||
"#98971aff",
|
||||
"#d79921ff",
|
||||
"#458588ff",
|
||||
"#b16286ff",
|
||||
"#689d6aff",
|
||||
"#d65d0eff"
|
||||
],
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"border": "#5b534dff",
|
||||
"border.variant": "#494340ff",
|
||||
@@ -881,9 +867,9 @@
|
||||
"terminal.ansi.bright_white": "#fbf1c7ff",
|
||||
"terminal.ansi.dim_white": "#b0a189ff",
|
||||
"link_text.hover": "#83a598ff",
|
||||
"version_control_added": "#b7bb26ff",
|
||||
"version_control_modified": "#f9bd2fff",
|
||||
"version_control_deleted": "#fb4a35ff",
|
||||
"version_control.added": "#b7bb26ff",
|
||||
"version_control.modified": "#f9bd2fff",
|
||||
"version_control.deleted": "#fb4a35ff",
|
||||
"conflict": "#f9bd2fff",
|
||||
"conflict.background": "#572e10ff",
|
||||
"conflict.border": "#754916ff",
|
||||
@@ -1159,6 +1145,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variable.special": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variant": {
|
||||
"color": "#83a598ff",
|
||||
"font_style": null,
|
||||
@@ -1170,15 +1161,7 @@
|
||||
{
|
||||
"name": "Gruvbox Light",
|
||||
"appearance": "light",
|
||||
"accents": [
|
||||
"#cc241dff",
|
||||
"#98971aff",
|
||||
"#d79921ff",
|
||||
"#458588ff",
|
||||
"#b16286ff",
|
||||
"#689d6aff",
|
||||
"#d65d0eff"
|
||||
],
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"border": "#c8b899ff",
|
||||
"border.variant": "#ddcca7ff",
|
||||
@@ -1269,9 +1252,9 @@
|
||||
"terminal.ansi.bright_white": "#282828ff",
|
||||
"terminal.ansi.dim_white": "#73675eff",
|
||||
"link_text.hover": "#0b6678ff",
|
||||
"version_control_added": "#797410ff",
|
||||
"version_control_modified": "#b57615ff",
|
||||
"version_control_deleted": "#9d0308ff",
|
||||
"version_control.added": "#797410ff",
|
||||
"version_control.modified": "#b57615ff",
|
||||
"version_control.deleted": "#9d0308ff",
|
||||
"conflict": "#b57615ff",
|
||||
"conflict.background": "#f5e2d0ff",
|
||||
"conflict.border": "#ebccabff",
|
||||
@@ -1547,6 +1530,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variable.special": {
|
||||
"color": "#066578ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variant": {
|
||||
"color": "#0b6678ff",
|
||||
"font_style": null,
|
||||
@@ -1558,15 +1546,7 @@
|
||||
{
|
||||
"name": "Gruvbox Light Hard",
|
||||
"appearance": "light",
|
||||
"accents": [
|
||||
"#cc241dff",
|
||||
"#98971aff",
|
||||
"#d79921ff",
|
||||
"#458588ff",
|
||||
"#b16286ff",
|
||||
"#689d6aff",
|
||||
"#d65d0eff"
|
||||
],
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"border": "#c8b899ff",
|
||||
"border.variant": "#ddcca7ff",
|
||||
@@ -1657,9 +1637,9 @@
|
||||
"terminal.ansi.bright_white": "#282828ff",
|
||||
"terminal.ansi.dim_white": "#73675eff",
|
||||
"link_text.hover": "#0b6678ff",
|
||||
"version_control_added": "#797410ff",
|
||||
"version_control_modified": "#b57615ff",
|
||||
"version_control_deleted": "#9d0308ff",
|
||||
"version_control.added": "#797410ff",
|
||||
"version_control.modified": "#b57615ff",
|
||||
"version_control.deleted": "#9d0308ff",
|
||||
"conflict": "#b57615ff",
|
||||
"conflict.background": "#f5e2d0ff",
|
||||
"conflict.border": "#ebccabff",
|
||||
@@ -1935,6 +1915,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variable.special": {
|
||||
"color": "#066578ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variant": {
|
||||
"color": "#0b6678ff",
|
||||
"font_style": null,
|
||||
@@ -1946,15 +1931,7 @@
|
||||
{
|
||||
"name": "Gruvbox Light Soft",
|
||||
"appearance": "light",
|
||||
"accents": [
|
||||
"#cc241dff",
|
||||
"#98971aff",
|
||||
"#d79921ff",
|
||||
"#458588ff",
|
||||
"#b16286ff",
|
||||
"#689d6aff",
|
||||
"#d65d0eff"
|
||||
],
|
||||
"accents": ["#cc241dff", "#98971aff", "#d79921ff", "#458588ff", "#b16286ff", "#689d6aff", "#d65d0eff"],
|
||||
"style": {
|
||||
"border": "#c8b899ff",
|
||||
"border.variant": "#ddcca7ff",
|
||||
@@ -2045,9 +2022,9 @@
|
||||
"terminal.ansi.bright_white": "#282828ff",
|
||||
"terminal.ansi.dim_white": "#73675eff",
|
||||
"link_text.hover": "#0b6678ff",
|
||||
"version_control_added": "#797410ff",
|
||||
"version_control_modified": "#b57615ff",
|
||||
"version_control_deleted": "#9d0308ff",
|
||||
"version_control.added": "#797410ff",
|
||||
"version_control.modified": "#b57615ff",
|
||||
"version_control.deleted": "#9d0308ff",
|
||||
"conflict": "#b57615ff",
|
||||
"conflict.background": "#f5e2d0ff",
|
||||
"conflict.border": "#ebccabff",
|
||||
@@ -2323,6 +2300,11 @@
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variable.special": {
|
||||
"color": "#066578ff",
|
||||
"font_style": null,
|
||||
"font_weight": null
|
||||
},
|
||||
"variant": {
|
||||
"color": "#0b6678ff",
|
||||
"font_style": null,
|
||||
|
||||
@@ -96,9 +96,9 @@
|
||||
"terminal.ansi.bright_white": "#dce0e5ff",
|
||||
"terminal.ansi.dim_white": "#575d65ff",
|
||||
"link_text.hover": "#74ade8ff",
|
||||
"version_control_added": "#a7c088ff",
|
||||
"version_control_modified": "#dec184ff",
|
||||
"version_control_deleted": "#d07277ff",
|
||||
"version_control.added": "#27a657ff",
|
||||
"version_control.modified": "#d3b020ff",
|
||||
"version_control.deleted": "#e06c76ff",
|
||||
"conflict": "#dec184ff",
|
||||
"conflict.background": "#dec1841a",
|
||||
"conflict.border": "#5d4c2fff",
|
||||
@@ -475,9 +475,9 @@
|
||||
"terminal.ansi.bright_white": "#242529ff",
|
||||
"terminal.ansi.dim_white": "#97979aff",
|
||||
"link_text.hover": "#5c78e2ff",
|
||||
"version_control_added": "#669f59ff",
|
||||
"version_control_modified": "#a48819ff",
|
||||
"version_control_deleted": "#d36151ff",
|
||||
"version_control.added": "#27a657ff",
|
||||
"version_control.modified": "#d3b020ff",
|
||||
"version_control.deleted": "#e06c76ff",
|
||||
"conflict": "#a48819ff",
|
||||
"conflict.background": "#faf2e6ff",
|
||||
"conflict.border": "#f4e7d1ff",
|
||||
|
||||
@@ -9,7 +9,10 @@ use gpui::{
|
||||
};
|
||||
use language::{LanguageRegistry, LanguageServerBinaryStatus, LanguageServerId};
|
||||
use lsp::LanguageServerName;
|
||||
use project::{EnvironmentErrorMessage, LanguageServerProgress, Project, WorktreeId};
|
||||
use project::{
|
||||
EnvironmentErrorMessage, LanguageServerProgress, LspStoreEvent, Project,
|
||||
ProjectEnvironmentEvent, WorktreeId,
|
||||
};
|
||||
use smallvec::SmallVec;
|
||||
use std::{cmp::Reverse, fmt::Write, sync::Arc, time::Duration};
|
||||
use ui::{prelude::*, ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle, Tooltip};
|
||||
@@ -73,7 +76,22 @@ impl ActivityIndicator {
|
||||
})
|
||||
.detach();
|
||||
|
||||
cx.observe(&project, |_, _, cx| cx.notify()).detach();
|
||||
cx.subscribe(
|
||||
&project.read(cx).lsp_store(),
|
||||
|_, _, event, cx| match event {
|
||||
LspStoreEvent::LanguageServerUpdate { .. } => cx.notify(),
|
||||
_ => {}
|
||||
},
|
||||
)
|
||||
.detach();
|
||||
|
||||
cx.subscribe(
|
||||
&project.read(cx).environment().clone(),
|
||||
|_, _, event, cx| match event {
|
||||
ProjectEnvironmentEvent::ErrorsUpdated => cx.notify(),
|
||||
},
|
||||
)
|
||||
.detach();
|
||||
|
||||
if let Some(auto_updater) = auto_updater.as_ref() {
|
||||
cx.observe(auto_updater, |_, _, cx| cx.notify()).detach();
|
||||
@@ -204,7 +222,7 @@ impl ActivityIndicator {
|
||||
message: error.0.clone(),
|
||||
on_click: Some(Arc::new(move |this, window, cx| {
|
||||
this.project.update(cx, |project, cx| {
|
||||
project.remove_environment_error(cx, worktree_id);
|
||||
project.remove_environment_error(worktree_id, cx);
|
||||
});
|
||||
window.dispatch_action(Box::new(workspace::OpenLog), cx);
|
||||
})),
|
||||
|
||||
@@ -553,7 +553,7 @@ pub struct Metadata {
|
||||
pub user_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[derive(Debug, Serialize, Deserialize, Default)]
|
||||
pub struct Usage {
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub input_tokens: Option<u32>,
|
||||
|
||||
@@ -186,8 +186,12 @@ fn init_language_model_settings(cx: &mut App) {
|
||||
|
||||
fn update_active_language_model_from_settings(cx: &mut App) {
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
let provider_name = LanguageModelProviderId::from(settings.default_model.provider.clone());
|
||||
let model_id = LanguageModelId::from(settings.default_model.model.clone());
|
||||
let active_model_provider_name =
|
||||
LanguageModelProviderId::from(settings.default_model.provider.clone());
|
||||
let active_model_id = LanguageModelId::from(settings.default_model.model.clone());
|
||||
let editor_provider_name =
|
||||
LanguageModelProviderId::from(settings.editor_model.provider.clone());
|
||||
let editor_model_id = LanguageModelId::from(settings.editor_model.model.clone());
|
||||
let inline_alternatives = settings
|
||||
.inline_alternatives
|
||||
.iter()
|
||||
@@ -199,7 +203,8 @@ fn update_active_language_model_from_settings(cx: &mut App) {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry.select_active_model(&provider_name, &model_id, cx);
|
||||
registry.select_active_model(&active_model_provider_name, &active_model_id, cx);
|
||||
registry.select_editor_model(&editor_provider_name, &editor_model_id, cx);
|
||||
registry.select_inline_alternative_models(inline_alternatives, cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -297,7 +297,8 @@ impl AssistantPanel {
|
||||
&LanguageModelRegistry::global(cx),
|
||||
window,
|
||||
|this, _, event: &language_model::Event, window, cx| match event {
|
||||
language_model::Event::ActiveModelChanged => {
|
||||
language_model::Event::ActiveModelChanged
|
||||
| language_model::Event::EditorModelChanged => {
|
||||
this.completion_provider_changed(window, cx);
|
||||
}
|
||||
language_model::Event::ProviderStateChanged => {
|
||||
|
||||
@@ -38,7 +38,7 @@ use language_model::{
|
||||
use language_model_selector::{LanguageModelSelector, LanguageModelSelectorPopoverMenu};
|
||||
use multi_buffer::MultiBufferRow;
|
||||
use parking_lot::Mutex;
|
||||
use project::{ActionVariant, CodeAction, ProjectTransaction};
|
||||
use project::{CodeAction, LspAction, ProjectTransaction};
|
||||
use prompt_store::PromptBuilder;
|
||||
use rope::Rope;
|
||||
use settings::{update_settings_file, Settings, SettingsStore};
|
||||
@@ -386,7 +386,6 @@ impl InlineAssistant {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn suggest_assist(
|
||||
&mut self,
|
||||
editor: &Entity<Editor>,
|
||||
@@ -1247,7 +1246,7 @@ impl InlineAssistant {
|
||||
});
|
||||
|
||||
enum DeletedLines {}
|
||||
let mut editor = Editor::for_multibuffer(multi_buffer, None, true, window, cx);
|
||||
let mut editor = Editor::for_multibuffer(multi_buffer, None, window, cx);
|
||||
editor.set_soft_wrap_mode(language::language_settings::SoftWrap::None, cx);
|
||||
editor.set_show_wrap_guides(false, cx);
|
||||
editor.set_show_gutter(false, cx);
|
||||
@@ -1674,7 +1673,6 @@ impl Focusable for PromptEditor {
|
||||
impl PromptEditor {
|
||||
const MAX_LINES: u8 = 8;
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn new(
|
||||
id: InlineAssistId,
|
||||
gutter_dimensions: Arc<Mutex<GutterDimensions>>,
|
||||
@@ -1695,7 +1693,6 @@ impl PromptEditor {
|
||||
},
|
||||
prompt_buffer,
|
||||
None,
|
||||
false,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
@@ -2333,7 +2330,6 @@ struct InlineAssist {
|
||||
}
|
||||
|
||||
impl InlineAssist {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn new(
|
||||
assist_id: InlineAssistId,
|
||||
group_id: InlineAssistGroupId,
|
||||
@@ -3569,10 +3565,11 @@ impl CodeActionProvider for AssistantCodeActionProvider {
|
||||
Task::ready(Ok(vec![CodeAction {
|
||||
server_id: language::LanguageServerId(0),
|
||||
range: snapshot.anchor_before(range.start)..snapshot.anchor_after(range.end),
|
||||
lsp_action: ActionVariant::Action(Box::new(lsp::CodeAction {
|
||||
lsp_action: LspAction::Action(Box::new(lsp::CodeAction {
|
||||
title: "Fix with Assistant".into(),
|
||||
..Default::default()
|
||||
})),
|
||||
resolved: true,
|
||||
}]))
|
||||
} else {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
|
||||
@@ -702,7 +702,6 @@ impl Focusable for PromptEditor {
|
||||
impl PromptEditor {
|
||||
const MAX_LINES: u8 = 8;
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn new(
|
||||
id: TerminalInlineAssistId,
|
||||
prompt_history: VecDeque<String>,
|
||||
@@ -721,7 +720,6 @@ impl PromptEditor {
|
||||
},
|
||||
prompt_buffer,
|
||||
None,
|
||||
false,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
|
||||
@@ -38,6 +38,7 @@ file_icons.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
fuzzy.workspace = true
|
||||
git.workspace = true
|
||||
gpui.workspace = true
|
||||
heed.workspace = true
|
||||
html_to_markdown.workspace = true
|
||||
@@ -59,11 +60,13 @@ prompt_library.workspace = true
|
||||
prompt_store.workspace = true
|
||||
proto.workspace = true
|
||||
rope.workspace = true
|
||||
scripting_tool.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
smol.workspace = true
|
||||
streaming_diff.workspace = true
|
||||
telemetry.workspace = true
|
||||
telemetry_events.workspace = true
|
||||
terminal.workspace = true
|
||||
terminal_view.workspace = true
|
||||
@@ -81,8 +84,8 @@ zed_actions.workspace = true
|
||||
[dev-dependencies]
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
gpui = { workspace = true, "features" = ["test-support"] }
|
||||
indoc.workspace = true
|
||||
language = { workspace = true, "features" = ["test-support"] }
|
||||
language_model = { workspace = true, "features" = ["test-support"] }
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
rand.workspace = true
|
||||
indoc.workspace = true
|
||||
|
||||
@@ -1,37 +1,39 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use assistant_tool::ToolWorkingSet;
|
||||
use collections::HashMap;
|
||||
use editor::{Editor, MultiBuffer};
|
||||
use gpui::{
|
||||
list, AbsoluteLength, AnyElement, App, ClickEvent, DefiniteLength, EdgesRefinement, Empty,
|
||||
Entity, Focusable, Length, ListAlignment, ListOffset, ListState, StyleRefinement, Subscription,
|
||||
Task, TextStyleRefinement, UnderlineStyle, WeakEntity,
|
||||
};
|
||||
use language::{Buffer, LanguageRegistry};
|
||||
use language_model::{LanguageModelRegistry, LanguageModelToolUseId, Role};
|
||||
use markdown::{Markdown, MarkdownStyle};
|
||||
use settings::Settings as _;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{prelude::*, Disclosure, KeyBinding};
|
||||
use util::ResultExt as _;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::thread::{MessageId, RequestKind, Thread, ThreadError, ThreadEvent};
|
||||
use crate::thread_store::ThreadStore;
|
||||
use crate::tool_use::{ToolUse, ToolUseStatus};
|
||||
use crate::ui::ContextPill;
|
||||
use collections::HashMap;
|
||||
use editor::{Editor, MultiBuffer};
|
||||
use gpui::{
|
||||
list, percentage, AbsoluteLength, Animation, AnimationExt, AnyElement, App, ClickEvent,
|
||||
DefiniteLength, EdgesRefinement, Empty, Entity, Focusable, Length, ListAlignment, ListOffset,
|
||||
ListState, StyleRefinement, Subscription, Task, TextStyleRefinement, Transformation,
|
||||
UnderlineStyle,
|
||||
};
|
||||
use language::{Buffer, LanguageRegistry};
|
||||
use language_model::{LanguageModelRegistry, LanguageModelToolUseId, Role};
|
||||
use markdown::{Markdown, MarkdownStyle};
|
||||
use scripting_tool::{ScriptingTool, ScriptingToolInput};
|
||||
use settings::Settings as _;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
use theme::ThemeSettings;
|
||||
use ui::Color;
|
||||
use ui::{prelude::*, Disclosure, KeyBinding};
|
||||
use util::ResultExt as _;
|
||||
|
||||
use crate::context_store::{refresh_context_store_text, ContextStore};
|
||||
|
||||
pub struct ActiveThread {
|
||||
workspace: WeakEntity<Workspace>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
thread_store: Entity<ThreadStore>,
|
||||
thread: Entity<Thread>,
|
||||
context_store: Entity<ContextStore>,
|
||||
save_thread_task: Option<Task<()>>,
|
||||
messages: Vec<MessageId>,
|
||||
list_state: ListState,
|
||||
rendered_messages_by_id: HashMap<MessageId, Entity<Markdown>>,
|
||||
rendered_scripting_tool_uses: HashMap<LanguageModelToolUseId, Entity<Markdown>>,
|
||||
editing_message: Option<(MessageId, EditMessageState)>,
|
||||
expanded_tool_uses: HashMap<LanguageModelToolUseId, bool>,
|
||||
last_error: Option<ThreadError>,
|
||||
@@ -46,9 +48,8 @@ impl ActiveThread {
|
||||
pub fn new(
|
||||
thread: Entity<Thread>,
|
||||
thread_store: Entity<ThreadStore>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
context_store: Entity<ContextStore>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
@@ -58,14 +59,14 @@ impl ActiveThread {
|
||||
];
|
||||
|
||||
let mut this = Self {
|
||||
workspace,
|
||||
language_registry,
|
||||
tools,
|
||||
thread_store,
|
||||
thread: thread.clone(),
|
||||
context_store,
|
||||
save_thread_task: None,
|
||||
messages: Vec::new(),
|
||||
rendered_messages_by_id: HashMap::default(),
|
||||
rendered_scripting_tool_uses: HashMap::default(),
|
||||
expanded_tool_uses: HashMap::default(),
|
||||
list_state: ListState::new(0, ListAlignment::Bottom, px(1024.), {
|
||||
let this = cx.entity().downgrade();
|
||||
@@ -81,6 +82,16 @@ impl ActiveThread {
|
||||
|
||||
for message in thread.read(cx).messages().cloned().collect::<Vec<_>>() {
|
||||
this.push_message(&message.id, message.text.clone(), window, cx);
|
||||
|
||||
for tool_use in thread.read(cx).scripting_tool_uses_for_message(message.id) {
|
||||
this.render_scripting_tool_use_markdown(
|
||||
tool_use.id.clone(),
|
||||
tool_use.name.as_ref(),
|
||||
tool_use.input.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
this
|
||||
@@ -105,7 +116,7 @@ impl ActiveThread {
|
||||
pub fn cancel_last_completion(&mut self, cx: &mut App) -> bool {
|
||||
self.last_error.take();
|
||||
self.thread
|
||||
.update(cx, |thread, _cx| thread.cancel_last_completion())
|
||||
.update(cx, |thread, cx| thread.cancel_last_completion(cx))
|
||||
}
|
||||
|
||||
pub fn last_error(&self) -> Option<ThreadError> {
|
||||
@@ -247,9 +258,35 @@ impl ActiveThread {
|
||||
})
|
||||
}
|
||||
|
||||
/// Renders the input of a scripting tool use to Markdown.
|
||||
///
|
||||
/// Does nothing if the tool use does not correspond to the scripting tool.
|
||||
fn render_scripting_tool_use_markdown(
|
||||
&mut self,
|
||||
tool_use_id: LanguageModelToolUseId,
|
||||
tool_name: &str,
|
||||
tool_input: serde_json::Value,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if tool_name != ScriptingTool::NAME {
|
||||
return;
|
||||
}
|
||||
|
||||
let lua_script = serde_json::from_value::<ScriptingToolInput>(tool_input)
|
||||
.map(|input| input.lua_script)
|
||||
.unwrap_or_default();
|
||||
|
||||
let lua_script =
|
||||
self.render_markdown(format!("```lua\n{lua_script}\n```").into(), window, cx);
|
||||
|
||||
self.rendered_scripting_tool_uses
|
||||
.insert(tool_use_id, lua_script);
|
||||
}
|
||||
|
||||
fn handle_thread_event(
|
||||
&mut self,
|
||||
_: &Entity<Thread>,
|
||||
_thread: &Entity<Thread>,
|
||||
event: &ThreadEvent,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
@@ -261,6 +298,7 @@ impl ActiveThread {
|
||||
ThreadEvent::StreamedCompletion | ThreadEvent::SummaryChanged => {
|
||||
self.save_thread(cx);
|
||||
}
|
||||
ThreadEvent::DoneStreaming => {}
|
||||
ThreadEvent::StreamedAssistantText(message_id, text) => {
|
||||
if let Some(markdown) = self.rendered_messages_by_id.get_mut(&message_id) {
|
||||
markdown.update(cx, |markdown, cx| {
|
||||
@@ -300,47 +338,75 @@ impl ActiveThread {
|
||||
cx.notify();
|
||||
}
|
||||
ThreadEvent::UsePendingTools => {
|
||||
let pending_tool_uses = self
|
||||
.thread
|
||||
.read(cx)
|
||||
.pending_tool_uses()
|
||||
.into_iter()
|
||||
.filter(|tool_use| tool_use.status.is_idle())
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for tool_use in pending_tool_uses {
|
||||
if let Some(tool) = self.tools.tool(&tool_use.name, cx) {
|
||||
let task = tool.run(tool_use.input, self.workspace.clone(), window, cx);
|
||||
|
||||
self.thread.update(cx, |thread, cx| {
|
||||
thread.insert_tool_output(tool_use.id.clone(), task, cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
self.thread.update(cx, |thread, cx| {
|
||||
thread.use_pending_tools(cx);
|
||||
});
|
||||
}
|
||||
ThreadEvent::ToolFinished { .. } => {
|
||||
let all_tools_finished = self
|
||||
.thread
|
||||
.read(cx)
|
||||
.pending_tool_uses()
|
||||
.into_iter()
|
||||
.all(|tool_use| tool_use.status.is_error());
|
||||
if all_tools_finished {
|
||||
ThreadEvent::ToolFinished {
|
||||
pending_tool_use,
|
||||
canceled,
|
||||
..
|
||||
} => {
|
||||
let canceled = *canceled;
|
||||
if let Some(tool_use) = pending_tool_use {
|
||||
self.render_scripting_tool_use_markdown(
|
||||
tool_use.id.clone(),
|
||||
tool_use.name.as_ref(),
|
||||
tool_use.input.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
if self.thread.read(cx).all_tools_finished() {
|
||||
let pending_refresh_buffers = self.thread.update(cx, |thread, cx| {
|
||||
thread.action_log().update(cx, |action_log, _cx| {
|
||||
action_log.take_stale_buffers_in_context()
|
||||
})
|
||||
});
|
||||
|
||||
let context_update_task = if !pending_refresh_buffers.is_empty() {
|
||||
let refresh_task = refresh_context_store_text(
|
||||
self.context_store.clone(),
|
||||
&pending_refresh_buffers,
|
||||
cx,
|
||||
);
|
||||
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let updated_context_ids = refresh_task.await;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.context_store.read_with(cx, |context_store, cx| {
|
||||
context_store
|
||||
.context()
|
||||
.iter()
|
||||
.filter(|context| {
|
||||
updated_context_ids.contains(&context.id())
|
||||
})
|
||||
.flat_map(|context| context.snapshot(cx))
|
||||
.collect()
|
||||
})
|
||||
})
|
||||
})
|
||||
} else {
|
||||
Task::ready(anyhow::Ok(Vec::new()))
|
||||
};
|
||||
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
if let Some(model) = model_registry.active_model() {
|
||||
self.thread.update(cx, |thread, cx| {
|
||||
// Insert a user message to contain the tool results.
|
||||
thread.insert_user_message(
|
||||
// TODO: Sending up a user message without any content results in the model sending back
|
||||
// responses that also don't have any content. We currently don't handle this case well,
|
||||
// so for now we provide some text to keep the model on track.
|
||||
"Here are the tool results.",
|
||||
Vec::new(),
|
||||
cx,
|
||||
);
|
||||
thread.send_to_model(model, RequestKind::Chat, true, cx);
|
||||
});
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let updated_context = context_update_task.await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.thread.update(cx, |thread, cx| {
|
||||
thread.attach_tool_results(updated_context, cx);
|
||||
if !canceled {
|
||||
thread.send_to_model(model, RequestKind::Chat, cx);
|
||||
}
|
||||
});
|
||||
})
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -381,7 +447,6 @@ impl ActiveThread {
|
||||
editor::EditorMode::AutoHeight { max_lines: 8 },
|
||||
buffer,
|
||||
None,
|
||||
false,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
@@ -434,7 +499,7 @@ impl ActiveThread {
|
||||
};
|
||||
|
||||
self.thread.update(cx, |thread, cx| {
|
||||
thread.send_to_model(model, RequestKind::Chat, false, cx)
|
||||
thread.send_to_model(model, RequestKind::Chat, cx)
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
@@ -483,12 +548,17 @@ impl ActiveThread {
|
||||
return Empty.into_any();
|
||||
};
|
||||
|
||||
let context = self.thread.read(cx).context_for_message(message_id);
|
||||
let tool_uses = self.thread.read(cx).tool_uses_for_message(message_id);
|
||||
let colors = cx.theme().colors();
|
||||
let thread = self.thread.read(cx);
|
||||
// Get all the data we need from thread before we start using it in closures
|
||||
let context = thread.context_for_message(message_id);
|
||||
let tool_uses = thread.tool_uses_for_message(message_id);
|
||||
let scripting_tool_uses = thread.scripting_tool_uses_for_message(message_id);
|
||||
|
||||
// Don't render user messages that are just there for returning tool results.
|
||||
if message.role == Role::User && self.thread.read(cx).message_has_tool_results(message_id) {
|
||||
if message.role == Role::User
|
||||
&& (thread.message_has_tool_results(message_id)
|
||||
|| thread.message_has_scripting_tool_results(message_id))
|
||||
{
|
||||
return Empty.into_any();
|
||||
}
|
||||
|
||||
@@ -501,6 +571,8 @@ impl ActiveThread {
|
||||
.filter(|(id, _)| *id == message_id)
|
||||
.map(|(_, state)| state.editor.clone());
|
||||
|
||||
let colors = cx.theme().colors();
|
||||
|
||||
let message_content = v_flex()
|
||||
.child(
|
||||
if let Some(edit_message_editor) = edit_message_editor.clone() {
|
||||
@@ -632,22 +704,27 @@ impl ActiveThread {
|
||||
)
|
||||
.child(message_content),
|
||||
),
|
||||
Role::Assistant => div()
|
||||
.id(("message-container", ix))
|
||||
.child(message_content)
|
||||
.map(|parent| {
|
||||
if tool_uses.is_empty() {
|
||||
return parent;
|
||||
}
|
||||
|
||||
parent.child(
|
||||
v_flex().children(
|
||||
tool_uses
|
||||
.into_iter()
|
||||
.map(|tool_use| self.render_tool_use(tool_use, cx)),
|
||||
),
|
||||
Role::Assistant => {
|
||||
v_flex()
|
||||
.id(("message-container", ix))
|
||||
.child(message_content)
|
||||
.when(
|
||||
!tool_uses.is_empty() || !scripting_tool_uses.is_empty(),
|
||||
|parent| {
|
||||
parent.child(
|
||||
v_flex()
|
||||
.children(
|
||||
tool_uses
|
||||
.into_iter()
|
||||
.map(|tool_use| self.render_tool_use(tool_use, cx)),
|
||||
)
|
||||
.children(scripting_tool_uses.into_iter().map(|tool_use| {
|
||||
self.render_scripting_tool_use(tool_use, cx)
|
||||
})),
|
||||
)
|
||||
},
|
||||
)
|
||||
}),
|
||||
}
|
||||
Role::System => div().id(("message-container", ix)).py_1().px_2().child(
|
||||
v_flex()
|
||||
.bg(colors.editor_background)
|
||||
@@ -666,6 +743,184 @@ impl ActiveThread {
|
||||
.copied()
|
||||
.unwrap_or_default();
|
||||
|
||||
let lighter_border = cx.theme().colors().border.opacity(0.5);
|
||||
|
||||
div().px_2p5().child(
|
||||
v_flex()
|
||||
.rounded_lg()
|
||||
.border_1()
|
||||
.border_color(lighter_border)
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_between()
|
||||
.py_1()
|
||||
.pl_1()
|
||||
.pr_2()
|
||||
.bg(cx.theme().colors().editor_foreground.opacity(0.025))
|
||||
.map(|element| {
|
||||
if is_open {
|
||||
element.border_b_1().rounded_t_md()
|
||||
} else {
|
||||
element.rounded_md()
|
||||
}
|
||||
})
|
||||
.border_color(lighter_border)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(Disclosure::new("tool-use-disclosure", is_open).on_click(
|
||||
cx.listener({
|
||||
let tool_use_id = tool_use.id.clone();
|
||||
move |this, _event, _window, _cx| {
|
||||
let is_open = this
|
||||
.expanded_tool_uses
|
||||
.entry(tool_use_id.clone())
|
||||
.or_insert(false);
|
||||
|
||||
*is_open = !*is_open;
|
||||
}
|
||||
}),
|
||||
))
|
||||
.child(
|
||||
Label::new(tool_use.name)
|
||||
.size(LabelSize::Small)
|
||||
.buffer_font(cx),
|
||||
),
|
||||
)
|
||||
.child({
|
||||
let (icon_name, color, animated) = match &tool_use.status {
|
||||
ToolUseStatus::Pending => {
|
||||
(IconName::Warning, Color::Warning, false)
|
||||
}
|
||||
ToolUseStatus::Running => {
|
||||
(IconName::ArrowCircle, Color::Accent, true)
|
||||
}
|
||||
ToolUseStatus::Finished(_) => {
|
||||
(IconName::Check, Color::Success, false)
|
||||
}
|
||||
ToolUseStatus::Error(_) => (IconName::Close, Color::Error, false),
|
||||
};
|
||||
|
||||
let icon = Icon::new(icon_name).color(color).size(IconSize::Small);
|
||||
|
||||
if animated {
|
||||
icon.with_animation(
|
||||
"arrow-circle",
|
||||
Animation::new(Duration::from_secs(2)).repeat(),
|
||||
|icon, delta| {
|
||||
icon.transform(Transformation::rotate(percentage(delta)))
|
||||
},
|
||||
)
|
||||
.into_any_element()
|
||||
} else {
|
||||
icon.into_any_element()
|
||||
}
|
||||
}),
|
||||
)
|
||||
.map(|parent| {
|
||||
if !is_open {
|
||||
return parent;
|
||||
}
|
||||
|
||||
let content_container = || v_flex().py_1().gap_0p5().px_2p5();
|
||||
|
||||
parent.child(
|
||||
v_flex()
|
||||
.gap_1()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.rounded_b_lg()
|
||||
.child(
|
||||
content_container()
|
||||
.border_b_1()
|
||||
.border_color(lighter_border)
|
||||
.child(
|
||||
Label::new("Input")
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted)
|
||||
.buffer_font(cx),
|
||||
)
|
||||
.child(
|
||||
Label::new(
|
||||
serde_json::to_string_pretty(&tool_use.input)
|
||||
.unwrap_or_default(),
|
||||
)
|
||||
.size(LabelSize::Small)
|
||||
.buffer_font(cx),
|
||||
),
|
||||
)
|
||||
.map(|container| match tool_use.status {
|
||||
ToolUseStatus::Finished(output) => container.child(
|
||||
content_container()
|
||||
.child(
|
||||
Label::new("Result")
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted)
|
||||
.buffer_font(cx),
|
||||
)
|
||||
.child(
|
||||
Label::new(output)
|
||||
.size(LabelSize::Small)
|
||||
.buffer_font(cx),
|
||||
),
|
||||
),
|
||||
ToolUseStatus::Running => container.child(
|
||||
content_container().child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.pb_1()
|
||||
.child(
|
||||
Icon::new(IconName::ArrowCircle)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Accent)
|
||||
.with_animation(
|
||||
"arrow-circle",
|
||||
Animation::new(Duration::from_secs(2))
|
||||
.repeat(),
|
||||
|icon, delta| {
|
||||
icon.transform(Transformation::rotate(
|
||||
percentage(delta),
|
||||
))
|
||||
},
|
||||
),
|
||||
)
|
||||
.child(
|
||||
Label::new("Running…")
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted)
|
||||
.buffer_font(cx),
|
||||
),
|
||||
),
|
||||
),
|
||||
ToolUseStatus::Error(err) => container.child(
|
||||
content_container()
|
||||
.child(
|
||||
Label::new("Error")
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted)
|
||||
.buffer_font(cx),
|
||||
)
|
||||
.child(
|
||||
Label::new(err).size(LabelSize::Small).buffer_font(cx),
|
||||
),
|
||||
),
|
||||
ToolUseStatus::Pending => container,
|
||||
}),
|
||||
)
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_scripting_tool_use(
|
||||
&self,
|
||||
tool_use: ToolUse,
|
||||
cx: &mut Context<Self>,
|
||||
) -> impl IntoElement {
|
||||
let is_open = self
|
||||
.expanded_tool_uses
|
||||
.get(&tool_use.id)
|
||||
.copied()
|
||||
.unwrap_or_default();
|
||||
|
||||
div().px_2p5().child(
|
||||
v_flex()
|
||||
.gap_1()
|
||||
@@ -679,8 +934,13 @@ impl ActiveThread {
|
||||
.pl_1()
|
||||
.pr_2()
|
||||
.bg(cx.theme().colors().editor_foreground.opacity(0.02))
|
||||
.when(is_open, |element| element.border_b_1().rounded_t(px(6.)))
|
||||
.when(!is_open, |element| element.rounded_md())
|
||||
.map(|element| {
|
||||
if is_open {
|
||||
element.border_b_1().rounded_t_md()
|
||||
} else {
|
||||
element.rounded_md()
|
||||
}
|
||||
})
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(
|
||||
h_flex()
|
||||
@@ -716,6 +976,9 @@ impl ActiveThread {
|
||||
return parent;
|
||||
}
|
||||
|
||||
let lua_script_markdown =
|
||||
self.rendered_scripting_tool_uses.get(&tool_use.id).cloned();
|
||||
|
||||
parent.child(
|
||||
v_flex()
|
||||
.child(
|
||||
@@ -726,10 +989,15 @@ impl ActiveThread {
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(Label::new("Input:"))
|
||||
.child(Label::new(
|
||||
serde_json::to_string_pretty(&tool_use.input)
|
||||
.unwrap_or_default(),
|
||||
)),
|
||||
.map(|parent| {
|
||||
if let Some(markdown) = lua_script_markdown {
|
||||
parent.child(markdown)
|
||||
} else {
|
||||
parent.child(Label::new(
|
||||
"Failed to render script input to Markdown",
|
||||
))
|
||||
}
|
||||
}),
|
||||
)
|
||||
.map(|parent| match tool_use.status {
|
||||
ToolUseStatus::Finished(output) => parent.child(
|
||||
|
||||
@@ -16,6 +16,7 @@ mod terminal_inline_assistant;
|
||||
mod thread;
|
||||
mod thread_history;
|
||||
mod thread_store;
|
||||
mod tool_selector;
|
||||
mod tool_use;
|
||||
mod ui;
|
||||
|
||||
@@ -30,8 +31,11 @@ use gpui::{actions, App};
|
||||
use prompt_store::PromptBuilder;
|
||||
use settings::Settings as _;
|
||||
|
||||
pub use crate::active_thread::ActiveThread;
|
||||
pub use crate::assistant_panel::{AssistantPanel, ConcreteAssistantPanelDelegate};
|
||||
pub use crate::inline_assistant::InlineAssistant;
|
||||
pub use crate::thread::{Message, RequestKind, Thread, ThreadEvent};
|
||||
pub use crate::thread_store::ThreadStore;
|
||||
|
||||
actions!(
|
||||
assistant2,
|
||||
@@ -52,7 +56,8 @@ actions!(
|
||||
FocusLeft,
|
||||
FocusRight,
|
||||
RemoveFocusedContext,
|
||||
AcceptSuggestedContext
|
||||
AcceptSuggestedContext,
|
||||
OpenActiveThreadAsMarkdown
|
||||
]
|
||||
);
|
||||
|
||||
|
||||
@@ -11,7 +11,7 @@ use assistant_slash_command::SlashCommandWorkingSet;
|
||||
use assistant_tool::ToolWorkingSet;
|
||||
|
||||
use client::zed_urls;
|
||||
use editor::Editor;
|
||||
use editor::{Editor, MultiBuffer};
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
prelude::*, Action, AnyElement, App, AsyncWindowContext, Corner, Entity, EventEmitter,
|
||||
@@ -38,7 +38,10 @@ use crate::message_editor::MessageEditor;
|
||||
use crate::thread::{Thread, ThreadError, ThreadId};
|
||||
use crate::thread_history::{PastContext, PastThread, ThreadHistory};
|
||||
use crate::thread_store::ThreadStore;
|
||||
use crate::{InlineAssistant, NewPromptEditor, NewThread, OpenConfiguration, OpenHistory};
|
||||
use crate::{
|
||||
InlineAssistant, NewPromptEditor, NewThread, OpenActiveThreadAsMarkdown, OpenConfiguration,
|
||||
OpenHistory,
|
||||
};
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
cx.observe_new(
|
||||
@@ -92,7 +95,6 @@ pub struct AssistantPanel {
|
||||
context_editor: Option<Entity<ContextEditor>>,
|
||||
configuration: Option<Entity<AssistantConfiguration>>,
|
||||
configuration_subscription: Option<Subscription>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
local_timezone: UtcOffset,
|
||||
active_view: ActiveView,
|
||||
history_store: Entity<HistoryStore>,
|
||||
@@ -113,7 +115,7 @@ impl AssistantPanel {
|
||||
log::info!("[assistant2-debug] initializing ThreadStore");
|
||||
let thread_store = workspace.update(&mut cx, |workspace, cx| {
|
||||
let project = workspace.project().clone();
|
||||
ThreadStore::new(project, tools.clone(), cx)
|
||||
ThreadStore::new(project, tools.clone(), prompt_builder.clone(), cx)
|
||||
})??;
|
||||
log::info!("[assistant2-debug] finished initializing ThreadStore");
|
||||
|
||||
@@ -133,7 +135,7 @@ impl AssistantPanel {
|
||||
log::info!("[assistant2-debug] finished initializing ContextStore");
|
||||
|
||||
workspace.update_in(&mut cx, |workspace, window, cx| {
|
||||
cx.new(|cx| Self::new(workspace, thread_store, context_store, tools, window, cx))
|
||||
cx.new(|cx| Self::new(workspace, thread_store, context_store, window, cx))
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -142,7 +144,6 @@ impl AssistantPanel {
|
||||
workspace: &Workspace,
|
||||
thread_store: Entity<ThreadStore>,
|
||||
context_store: Entity<assistant_context_editor::ContextStore>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
@@ -154,10 +155,14 @@ impl AssistantPanel {
|
||||
let workspace = workspace.weak_handle();
|
||||
let weak_self = cx.entity().downgrade();
|
||||
|
||||
let message_editor_context_store =
|
||||
cx.new(|_cx| crate::context_store::ContextStore::new(workspace.clone()));
|
||||
|
||||
let message_editor = cx.new(|cx| {
|
||||
MessageEditor::new(
|
||||
fs.clone(),
|
||||
workspace.clone(),
|
||||
message_editor_context_store.clone(),
|
||||
thread_store.downgrade(),
|
||||
thread.clone(),
|
||||
window,
|
||||
@@ -168,30 +173,30 @@ impl AssistantPanel {
|
||||
let history_store =
|
||||
cx.new(|cx| HistoryStore::new(thread_store.clone(), context_store.clone(), cx));
|
||||
|
||||
let thread = cx.new(|cx| {
|
||||
ActiveThread::new(
|
||||
thread.clone(),
|
||||
thread_store.clone(),
|
||||
language_registry.clone(),
|
||||
message_editor_context_store.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
Self {
|
||||
active_view: ActiveView::Thread,
|
||||
workspace: workspace.clone(),
|
||||
project,
|
||||
workspace,
|
||||
project: project.clone(),
|
||||
fs: fs.clone(),
|
||||
language_registry: language_registry.clone(),
|
||||
language_registry,
|
||||
thread_store: thread_store.clone(),
|
||||
thread: cx.new(|cx| {
|
||||
ActiveThread::new(
|
||||
thread.clone(),
|
||||
thread_store.clone(),
|
||||
workspace,
|
||||
language_registry,
|
||||
tools.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
thread,
|
||||
message_editor,
|
||||
context_store,
|
||||
context_editor: None,
|
||||
configuration: None,
|
||||
configuration_subscription: None,
|
||||
tools,
|
||||
local_timezone: UtcOffset::from_whole_seconds(
|
||||
chrono::Local::now().offset().local_minus_utc(),
|
||||
)
|
||||
@@ -242,13 +247,16 @@ impl AssistantPanel {
|
||||
.update(cx, |this, cx| this.create_thread(cx));
|
||||
|
||||
self.active_view = ActiveView::Thread;
|
||||
|
||||
let message_editor_context_store =
|
||||
cx.new(|_cx| crate::context_store::ContextStore::new(self.workspace.clone()));
|
||||
|
||||
self.thread = cx.new(|cx| {
|
||||
ActiveThread::new(
|
||||
thread.clone(),
|
||||
self.thread_store.clone(),
|
||||
self.workspace.clone(),
|
||||
self.language_registry.clone(),
|
||||
self.tools.clone(),
|
||||
message_editor_context_store.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -257,6 +265,7 @@ impl AssistantPanel {
|
||||
MessageEditor::new(
|
||||
self.fs.clone(),
|
||||
self.workspace.clone(),
|
||||
message_editor_context_store,
|
||||
self.thread_store.downgrade(),
|
||||
thread,
|
||||
window,
|
||||
@@ -377,13 +386,14 @@ impl AssistantPanel {
|
||||
let thread = open_thread_task.await?;
|
||||
this.update_in(&mut cx, |this, window, cx| {
|
||||
this.active_view = ActiveView::Thread;
|
||||
let message_editor_context_store =
|
||||
cx.new(|_cx| crate::context_store::ContextStore::new(this.workspace.clone()));
|
||||
this.thread = cx.new(|cx| {
|
||||
ActiveThread::new(
|
||||
thread.clone(),
|
||||
this.thread_store.clone(),
|
||||
this.workspace.clone(),
|
||||
this.language_registry.clone(),
|
||||
this.tools.clone(),
|
||||
message_editor_context_store.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -392,6 +402,7 @@ impl AssistantPanel {
|
||||
MessageEditor::new(
|
||||
this.fs.clone(),
|
||||
this.workspace.clone(),
|
||||
message_editor_context_store,
|
||||
this.thread_store.downgrade(),
|
||||
thread,
|
||||
window,
|
||||
@@ -418,6 +429,65 @@ impl AssistantPanel {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn open_active_thread_as_markdown(
|
||||
&mut self,
|
||||
_: &OpenActiveThreadAsMarkdown,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let Some(workspace) = self
|
||||
.workspace
|
||||
.upgrade()
|
||||
.ok_or_else(|| anyhow!("workspace dropped"))
|
||||
.log_err()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
let markdown_language_task = workspace
|
||||
.read(cx)
|
||||
.app_state()
|
||||
.languages
|
||||
.language_for_name("Markdown");
|
||||
let thread = self.active_thread(cx);
|
||||
cx.spawn_in(window, |_this, mut cx| async move {
|
||||
let markdown_language = markdown_language_task.await?;
|
||||
|
||||
workspace.update_in(&mut cx, |workspace, window, cx| {
|
||||
let thread = thread.read(cx);
|
||||
let markdown = thread.to_markdown()?;
|
||||
let thread_summary = thread
|
||||
.summary()
|
||||
.map(|summary| summary.to_string())
|
||||
.unwrap_or_else(|| "Thread".to_string());
|
||||
|
||||
let project = workspace.project().clone();
|
||||
let buffer = project.update(cx, |project, cx| {
|
||||
project.create_local_buffer(&markdown, Some(markdown_language), cx)
|
||||
});
|
||||
let buffer = cx.new(|cx| {
|
||||
MultiBuffer::singleton(buffer, cx).with_title(thread_summary.clone())
|
||||
});
|
||||
|
||||
workspace.add_item_to_active_pane(
|
||||
Box::new(cx.new(|cx| {
|
||||
let mut editor =
|
||||
Editor::for_multibuffer(buffer, Some(project.clone()), window, cx);
|
||||
editor.set_breadcrumb_header(thread_summary);
|
||||
editor
|
||||
})),
|
||||
None,
|
||||
true,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
fn handle_assistant_configuration_event(
|
||||
&mut self,
|
||||
_entity: &Entity<AssistantConfiguration>,
|
||||
@@ -1018,6 +1088,7 @@ impl Render for AssistantPanel {
|
||||
.on_action(cx.listener(|this, _: &OpenHistory, window, cx| {
|
||||
this.open_history(window, cx);
|
||||
}))
|
||||
.on_action(cx.listener(Self::open_active_thread_as_markdown))
|
||||
.on_action(cx.listener(Self::deploy_prompt_library))
|
||||
.child(self.render_toolbar(cx))
|
||||
.map(|parent| match self.active_view {
|
||||
|
||||
@@ -167,8 +167,8 @@ impl PickerDelegate for FetchContextPickerDelegate {
|
||||
}
|
||||
}
|
||||
|
||||
fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> SharedString {
|
||||
"Enter the URL that you would like to fetch".into()
|
||||
fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option<SharedString> {
|
||||
Some("Enter the URL that you would like to fetch".into())
|
||||
}
|
||||
|
||||
fn selected_index(&self) -> usize {
|
||||
|
||||
@@ -223,13 +223,18 @@ pub fn render_thread_context_entry(
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.w_full()
|
||||
.justify_between()
|
||||
.child(
|
||||
Icon::new(IconName::MessageCircle)
|
||||
.size(IconSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.max_w_72()
|
||||
.child(
|
||||
Icon::new(IconName::MessageCircle)
|
||||
.size(IconSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(Label::new(thread.summary.clone()).truncate()),
|
||||
)
|
||||
.child(Label::new(thread.summary.clone()))
|
||||
.child(div().w_full())
|
||||
.when(added, |el| {
|
||||
el.child(
|
||||
h_flex()
|
||||
|
||||
@@ -9,6 +9,7 @@ use language::Buffer;
|
||||
use project::{ProjectPath, Worktree};
|
||||
use rope::Rope;
|
||||
use text::BufferId;
|
||||
use util::maybe;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::context::{
|
||||
@@ -531,35 +532,59 @@ fn collect_files_in_path(worktree: &Worktree, path: &Path) -> Vec<Arc<Path>> {
|
||||
|
||||
pub fn refresh_context_store_text(
|
||||
context_store: Entity<ContextStore>,
|
||||
changed_buffers: &HashSet<Entity<Buffer>>,
|
||||
cx: &App,
|
||||
) -> impl Future<Output = ()> {
|
||||
) -> impl Future<Output = Vec<ContextId>> {
|
||||
let mut tasks = Vec::new();
|
||||
|
||||
for context in &context_store.read(cx).context {
|
||||
match context {
|
||||
AssistantContext::File(file_context) => {
|
||||
let context_store = context_store.clone();
|
||||
if let Some(task) = refresh_file_text(context_store, file_context, cx) {
|
||||
tasks.push(task);
|
||||
let id = context.id();
|
||||
|
||||
let task = maybe!({
|
||||
match context {
|
||||
AssistantContext::File(file_context) => {
|
||||
if changed_buffers.is_empty()
|
||||
|| changed_buffers.contains(&file_context.context_buffer.buffer)
|
||||
{
|
||||
let context_store = context_store.clone();
|
||||
return refresh_file_text(context_store, file_context, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
AssistantContext::Directory(directory_context) => {
|
||||
let context_store = context_store.clone();
|
||||
if let Some(task) = refresh_directory_text(context_store, directory_context, cx) {
|
||||
tasks.push(task);
|
||||
AssistantContext::Directory(directory_context) => {
|
||||
let should_refresh = changed_buffers.is_empty()
|
||||
|| changed_buffers.iter().any(|buffer| {
|
||||
let buffer = buffer.read(cx);
|
||||
|
||||
buffer_path_log_err(&buffer)
|
||||
.map_or(false, |path| path.starts_with(&directory_context.path))
|
||||
});
|
||||
|
||||
if should_refresh {
|
||||
let context_store = context_store.clone();
|
||||
return refresh_directory_text(context_store, directory_context, cx);
|
||||
}
|
||||
}
|
||||
AssistantContext::Thread(thread_context) => {
|
||||
if changed_buffers.is_empty() {
|
||||
let context_store = context_store.clone();
|
||||
return Some(refresh_thread_text(context_store, thread_context, cx));
|
||||
}
|
||||
}
|
||||
// Intentionally omit refreshing fetched URLs as it doesn't seem all that useful,
|
||||
// and doing the caching properly could be tricky (unless it's already handled by
|
||||
// the HttpClient?).
|
||||
AssistantContext::FetchedUrl(_) => {}
|
||||
}
|
||||
AssistantContext::Thread(thread_context) => {
|
||||
let context_store = context_store.clone();
|
||||
tasks.push(refresh_thread_text(context_store, thread_context, cx));
|
||||
}
|
||||
// Intentionally omit refreshing fetched URLs as it doesn't seem all that useful,
|
||||
// and doing the caching properly could be tricky (unless it's already handled by
|
||||
// the HttpClient?).
|
||||
AssistantContext::FetchedUrl(_) => {}
|
||||
|
||||
None
|
||||
});
|
||||
|
||||
if let Some(task) = task {
|
||||
tasks.push(task.map(move |_| id));
|
||||
}
|
||||
}
|
||||
|
||||
future::join_all(tasks).map(|_| ())
|
||||
future::join_all(tasks)
|
||||
}
|
||||
|
||||
fn refresh_file_text(
|
||||
|
||||
@@ -25,7 +25,7 @@ use crate::{
|
||||
|
||||
pub struct ContextStrip {
|
||||
context_store: Entity<ContextStore>,
|
||||
pub context_picker: Entity<ContextPicker>,
|
||||
context_picker: Entity<ContextPicker>,
|
||||
context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
|
||||
focus_handle: FocusHandle,
|
||||
suggest_context_kind: SuggestContextKind,
|
||||
@@ -36,7 +36,6 @@ pub struct ContextStrip {
|
||||
}
|
||||
|
||||
impl ContextStrip {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
context_store: Entity<ContextStore>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
|
||||
@@ -2,10 +2,10 @@ use assistant_context_editor::SavedContextMetadata;
|
||||
use chrono::{DateTime, Utc};
|
||||
use gpui::{prelude::*, Entity};
|
||||
|
||||
use crate::thread_store::{SavedThreadMetadata, ThreadStore};
|
||||
use crate::thread_store::{SerializedThreadMetadata, ThreadStore};
|
||||
|
||||
pub enum HistoryEntry {
|
||||
Thread(SavedThreadMetadata),
|
||||
Thread(SerializedThreadMetadata),
|
||||
Context(SavedContextMetadata),
|
||||
}
|
||||
|
||||
|
||||
@@ -27,7 +27,7 @@ use language::{Buffer, Point, Selection, TransactionId};
|
||||
use language_model::{report_assistant_event, LanguageModelRegistry};
|
||||
use multi_buffer::MultiBufferRow;
|
||||
use parking_lot::Mutex;
|
||||
use project::ActionVariant;
|
||||
use project::LspAction;
|
||||
use project::{CodeAction, ProjectTransaction};
|
||||
use prompt_store::PromptBuilder;
|
||||
use settings::{Settings, SettingsStore};
|
||||
@@ -480,7 +480,6 @@ impl InlineAssistant {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn suggest_assist(
|
||||
&mut self,
|
||||
editor: &Entity<Editor>,
|
||||
@@ -1342,7 +1341,7 @@ impl InlineAssistant {
|
||||
});
|
||||
|
||||
enum DeletedLines {}
|
||||
let mut editor = Editor::for_multibuffer(multi_buffer, None, true, window, cx);
|
||||
let mut editor = Editor::for_multibuffer(multi_buffer, None, window, cx);
|
||||
editor.set_soft_wrap_mode(language::language_settings::SoftWrap::None, cx);
|
||||
editor.set_show_wrap_guides(false, cx);
|
||||
editor.set_show_gutter(false, cx);
|
||||
@@ -1451,7 +1450,6 @@ struct InlineAssistScrollLock {
|
||||
}
|
||||
|
||||
impl EditorInlineAssists {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn new(editor: &Entity<Editor>, window: &mut Window, cx: &mut App) -> Self {
|
||||
let (highlight_updates_tx, mut highlight_updates_rx) = async_watch::channel(());
|
||||
Self {
|
||||
@@ -1563,7 +1561,6 @@ pub struct InlineAssist {
|
||||
}
|
||||
|
||||
impl InlineAssist {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn new(
|
||||
assist_id: InlineAssistId,
|
||||
group_id: InlineAssistGroupId,
|
||||
@@ -1728,10 +1725,11 @@ impl CodeActionProvider for AssistantCodeActionProvider {
|
||||
Task::ready(Ok(vec![CodeAction {
|
||||
server_id: language::LanguageServerId(0),
|
||||
range: snapshot.anchor_before(range.start)..snapshot.anchor_after(range.end),
|
||||
lsp_action: ActionVariant::Action(Box::new(lsp::CodeAction {
|
||||
lsp_action: LspAction::Action(Box::new(lsp::CodeAction {
|
||||
title: "Fix with Assistant".into(),
|
||||
..Default::default()
|
||||
})),
|
||||
resolved: true,
|
||||
}]))
|
||||
} else {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
|
||||
@@ -816,7 +816,6 @@ impl InlineAssistId {
|
||||
}
|
||||
|
||||
impl PromptEditor<BufferCodegen> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new_buffer(
|
||||
id: InlineAssistId,
|
||||
gutter_dimensions: Arc<Mutex<GutterDimensions>>,
|
||||
@@ -844,7 +843,6 @@ impl PromptEditor<BufferCodegen> {
|
||||
},
|
||||
prompt_buffer,
|
||||
None,
|
||||
false,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
@@ -976,7 +974,6 @@ impl TerminalInlineAssistId {
|
||||
}
|
||||
|
||||
impl PromptEditor<TerminalCodegen> {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new_terminal(
|
||||
id: TerminalInlineAssistId,
|
||||
prompt_history: VecDeque<String>,
|
||||
@@ -1003,7 +1000,6 @@ impl PromptEditor<TerminalCodegen> {
|
||||
},
|
||||
prompt_buffer,
|
||||
None,
|
||||
false,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use collections::HashSet;
|
||||
use editor::actions::MoveUp;
|
||||
use editor::{Editor, EditorElement, EditorEvent, EditorStyle};
|
||||
use file_icons::FileIcons;
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
Animation, AnimationExt, App, DismissEvent, Entity, Focusable, Subscription, TextStyle,
|
||||
@@ -15,11 +17,12 @@ use std::time::Duration;
|
||||
use text::Bias;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{
|
||||
prelude::*, ButtonLike, KeyBinding, PlatformStyle, PopoverMenu, PopoverMenuHandle, Switch,
|
||||
prelude::*, ButtonLike, Disclosure, KeyBinding, PlatformStyle, PopoverMenu, PopoverMenuHandle,
|
||||
Tooltip,
|
||||
};
|
||||
use vim_mode_setting::VimModeSetting;
|
||||
use workspace::Workspace;
|
||||
use workspace::notifications::{NotificationId, NotifyTaskExt};
|
||||
use workspace::{Toast, Workspace};
|
||||
|
||||
use crate::assistant_model_selector::AssistantModelSelector;
|
||||
use crate::context_picker::{ConfirmBehavior, ContextPicker};
|
||||
@@ -27,18 +30,21 @@ use crate::context_store::{refresh_context_store_text, ContextStore};
|
||||
use crate::context_strip::{ContextStrip, ContextStripEvent, SuggestContextKind};
|
||||
use crate::thread::{RequestKind, Thread};
|
||||
use crate::thread_store::ThreadStore;
|
||||
use crate::tool_selector::ToolSelector;
|
||||
use crate::{Chat, ChatMode, RemoveAllContext, ToggleContextPicker};
|
||||
|
||||
pub struct MessageEditor {
|
||||
thread: Entity<Thread>,
|
||||
editor: Entity<Editor>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: Entity<ContextStore>,
|
||||
context_strip: Entity<ContextStrip>,
|
||||
context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
|
||||
inline_context_picker: Entity<ContextPicker>,
|
||||
inline_context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
|
||||
model_selector: Entity<AssistantModelSelector>,
|
||||
use_tools: bool,
|
||||
tool_selector: Entity<ToolSelector>,
|
||||
edits_expanded: bool,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
|
||||
@@ -46,12 +52,13 @@ impl MessageEditor {
|
||||
pub fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: Entity<ContextStore>,
|
||||
thread_store: WeakEntity<ThreadStore>,
|
||||
thread: Entity<Thread>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let context_store = cx.new(|_cx| ContextStore::new(workspace.clone()));
|
||||
let tools = thread.read(cx).tools().clone();
|
||||
let context_picker_menu_handle = PopoverMenuHandle::default();
|
||||
let inline_context_picker_menu_handle = PopoverMenuHandle::default();
|
||||
let model_selector_menu_handle = PopoverMenuHandle::default();
|
||||
@@ -102,6 +109,7 @@ impl MessageEditor {
|
||||
Self {
|
||||
thread,
|
||||
editor: editor.clone(),
|
||||
workspace,
|
||||
context_store,
|
||||
context_strip,
|
||||
context_picker_menu_handle,
|
||||
@@ -116,13 +124,13 @@ impl MessageEditor {
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
use_tools: false,
|
||||
tool_selector: cx.new(|cx| ToolSelector::new(tools, cx)),
|
||||
edits_expanded: false,
|
||||
_subscriptions: subscriptions,
|
||||
}
|
||||
}
|
||||
|
||||
fn toggle_chat_mode(&mut self, _: &ChatMode, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.use_tools = !self.use_tools;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
@@ -146,6 +154,14 @@ impl MessageEditor {
|
||||
}
|
||||
|
||||
fn chat(&mut self, _: &Chat, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if self.is_editor_empty(cx) {
|
||||
return;
|
||||
}
|
||||
|
||||
if self.thread.read(cx).is_generating() {
|
||||
return;
|
||||
}
|
||||
|
||||
self.send_to_model(RequestKind::Chat, window, cx);
|
||||
}
|
||||
|
||||
@@ -185,18 +201,18 @@ impl MessageEditor {
|
||||
text
|
||||
});
|
||||
|
||||
let refresh_task = refresh_context_store_text(self.context_store.clone(), cx);
|
||||
let refresh_task =
|
||||
refresh_context_store_text(self.context_store.clone(), &HashSet::default(), cx);
|
||||
|
||||
let thread = self.thread.clone();
|
||||
let context_store = self.context_store.clone();
|
||||
let use_tools = self.use_tools;
|
||||
cx.spawn(move |_, mut cx| async move {
|
||||
refresh_task.await;
|
||||
thread
|
||||
.update(&mut cx, |thread, cx| {
|
||||
let context = context_store.read(cx).snapshot(cx).collect::<Vec<_>>();
|
||||
thread.insert_user_message(user_message, context, cx);
|
||||
thread.send_to_model(model, request_kind, use_tools, cx);
|
||||
thread.send_to_model(model, request_kind, cx);
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
@@ -269,6 +285,34 @@ impl MessageEditor {
|
||||
self.context_strip.focus_handle(cx).focus(window);
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_feedback_click(
|
||||
&mut self,
|
||||
is_positive: bool,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let workspace = self.workspace.clone();
|
||||
let report = self
|
||||
.thread
|
||||
.update(cx, |thread, cx| thread.report_feedback(is_positive, cx));
|
||||
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
report.await?;
|
||||
workspace.update(&mut cx, |workspace, cx| {
|
||||
let message = if is_positive {
|
||||
"Positive feedback recorded. Thank you!"
|
||||
} else {
|
||||
"Negative feedback recorded. Thank you for helping us improve!"
|
||||
};
|
||||
|
||||
struct ThreadFeedback;
|
||||
let id = NotificationId::unique::<ThreadFeedback>();
|
||||
workspace.show_toast(Toast::new(id, message).autohide(), cx)
|
||||
})
|
||||
})
|
||||
.detach_and_notify_err(window, cx);
|
||||
}
|
||||
}
|
||||
|
||||
impl Focusable for MessageEditor {
|
||||
@@ -284,7 +328,7 @@ impl Render for MessageEditor {
|
||||
let focus_handle = self.editor.focus_handle(cx);
|
||||
let inline_context_picker = self.inline_context_picker.clone();
|
||||
let bg_color = cx.theme().colors().editor_background;
|
||||
let is_streaming_completion = self.thread.read(cx).is_streaming();
|
||||
let is_generating = self.thread.read(cx).is_generating();
|
||||
let is_model_selected = self.is_model_selected(cx);
|
||||
let is_editor_empty = self.is_editor_empty(cx);
|
||||
let submit_label_color = if is_editor_empty {
|
||||
@@ -303,9 +347,12 @@ impl Render for MessageEditor {
|
||||
px(64.)
|
||||
};
|
||||
|
||||
let changed_buffers = self.thread.read(cx).scripting_changed_buffers(cx);
|
||||
let changed_buffers_count = changed_buffers.len();
|
||||
|
||||
v_flex()
|
||||
.size_full()
|
||||
.when(is_streaming_completion, |parent| {
|
||||
.when(is_generating, |parent| {
|
||||
let focus_handle = self.editor.focus_handle(cx).clone();
|
||||
parent.child(
|
||||
h_flex().py_3().w_full().justify_center().child(
|
||||
@@ -363,6 +410,109 @@ impl Render for MessageEditor {
|
||||
),
|
||||
)
|
||||
})
|
||||
.when(changed_buffers_count > 0, |parent| {
|
||||
parent.child(
|
||||
v_flex()
|
||||
.mx_2()
|
||||
.bg(cx.theme().colors().element_background)
|
||||
.border_1()
|
||||
.border_b_0()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.rounded_t_md()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.p_2()
|
||||
.child(
|
||||
Disclosure::new("edits-disclosure", self.edits_expanded)
|
||||
.on_click(cx.listener(|this, _ev, _window, cx| {
|
||||
this.edits_expanded = !this.edits_expanded;
|
||||
cx.notify();
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
Label::new("Edits")
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(Label::new("•").size(LabelSize::XSmall).color(Color::Muted))
|
||||
.child(
|
||||
Label::new(format!(
|
||||
"{} {}",
|
||||
changed_buffers_count,
|
||||
if changed_buffers_count == 1 {
|
||||
"file"
|
||||
} else {
|
||||
"files"
|
||||
}
|
||||
))
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
.when(self.edits_expanded, |parent| {
|
||||
parent.child(
|
||||
v_flex().bg(cx.theme().colors().editor_background).children(
|
||||
changed_buffers.enumerate().flat_map(|(index, buffer)| {
|
||||
let file = buffer.read(cx).file()?;
|
||||
let path = file.path();
|
||||
|
||||
let parent_label = path.parent().and_then(|parent| {
|
||||
let parent_str = parent.to_string_lossy();
|
||||
|
||||
if parent_str.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(
|
||||
Label::new(format!(
|
||||
"{}{}",
|
||||
parent_str,
|
||||
std::path::MAIN_SEPARATOR_STR
|
||||
))
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
}
|
||||
});
|
||||
|
||||
let name_label = path.file_name().map(|name| {
|
||||
Label::new(name.to_string_lossy().to_string())
|
||||
.size(LabelSize::Small)
|
||||
});
|
||||
|
||||
let file_icon = FileIcons::get_icon(&path, cx)
|
||||
.map(Icon::from_path)
|
||||
.unwrap_or_else(|| Icon::new(IconName::File));
|
||||
|
||||
let element = div()
|
||||
.p_2()
|
||||
.when(index + 1 < changed_buffers_count, |parent| {
|
||||
parent
|
||||
.border_color(cx.theme().colors().border)
|
||||
.border_b_1()
|
||||
})
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(file_icon)
|
||||
.child(
|
||||
// TODO: handle overflow
|
||||
h_flex()
|
||||
.children(parent_label)
|
||||
.children(name_label),
|
||||
)
|
||||
// TODO: show lines changed
|
||||
.child(Label::new("+").color(Color::Created))
|
||||
.child(Label::new("-").color(Color::Deleted)),
|
||||
);
|
||||
|
||||
Some(element)
|
||||
}),
|
||||
),
|
||||
)
|
||||
}),
|
||||
)
|
||||
})
|
||||
.child(
|
||||
v_flex()
|
||||
.key_context("MessageEditor")
|
||||
@@ -380,7 +530,45 @@ impl Render for MessageEditor {
|
||||
.bg(bg_color)
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(self.context_strip.clone())
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_between()
|
||||
.child(self.context_strip.clone())
|
||||
.when(!self.thread.read(cx).is_empty(), |this| {
|
||||
this.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(
|
||||
IconButton::new(
|
||||
"feedback-thumbs-up",
|
||||
IconName::ThumbsUp,
|
||||
)
|
||||
.style(ButtonStyle::Subtle)
|
||||
.icon_size(IconSize::Small)
|
||||
.tooltip(Tooltip::text("Helpful"))
|
||||
.on_click(
|
||||
cx.listener(|this, _, window, cx| {
|
||||
this.handle_feedback_click(true, window, cx);
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
IconButton::new(
|
||||
"feedback-thumbs-down",
|
||||
IconName::ThumbsDown,
|
||||
)
|
||||
.style(ButtonStyle::Subtle)
|
||||
.icon_size(IconSize::Small)
|
||||
.tooltip(Tooltip::text("Not Helpful"))
|
||||
.on_click(
|
||||
cx.listener(|this, _, window, cx| {
|
||||
this.handle_feedback_click(false, window, cx);
|
||||
}),
|
||||
),
|
||||
),
|
||||
)
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_5()
|
||||
@@ -428,25 +616,7 @@ impl Render for MessageEditor {
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_between()
|
||||
.child(
|
||||
Switch::new("use-tools", self.use_tools.into())
|
||||
.label("Tools")
|
||||
.on_click(cx.listener(
|
||||
|this, selection, _window, _cx| {
|
||||
this.use_tools = match selection {
|
||||
ToggleState::Selected => true,
|
||||
ToggleState::Unselected
|
||||
| ToggleState::Indeterminate => false,
|
||||
};
|
||||
},
|
||||
))
|
||||
.key_binding(KeyBinding::for_action_in(
|
||||
&ChatMode,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)),
|
||||
)
|
||||
.child(h_flex().gap_2().child(self.tool_selector.clone()))
|
||||
.child(
|
||||
h_flex().gap_1().child(self.model_selector.clone()).child(
|
||||
ButtonLike::new("submit-message")
|
||||
@@ -455,7 +625,7 @@ impl Render for MessageEditor {
|
||||
.disabled(
|
||||
is_editor_empty
|
||||
|| !is_model_selected
|
||||
|| is_streaming_completion,
|
||||
|| is_generating,
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
@@ -490,7 +660,7 @@ impl Render for MessageEditor {
|
||||
"Type a message to submit",
|
||||
))
|
||||
})
|
||||
.when(is_streaming_completion, |button| {
|
||||
.when(is_generating, |button| {
|
||||
button.tooltip(Tooltip::text(
|
||||
"Cancel to submit a new message",
|
||||
))
|
||||
|
||||
@@ -1,23 +1,32 @@
|
||||
use std::fmt::Write as _;
|
||||
use std::io::Write;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use assistant_tool::ToolWorkingSet;
|
||||
use anyhow::{Context as _, Result};
|
||||
use assistant_tool::{ActionLog, ToolWorkingSet};
|
||||
use chrono::{DateTime, Utc};
|
||||
use collections::{BTreeMap, HashMap, HashSet};
|
||||
use futures::StreamExt as _;
|
||||
use gpui::{App, Context, EventEmitter, SharedString, Task};
|
||||
use futures::future::Shared;
|
||||
use futures::{FutureExt, StreamExt as _};
|
||||
use git;
|
||||
use gpui::{App, AppContext, Context, Entity, EventEmitter, SharedString, Task};
|
||||
use language_model::{
|
||||
LanguageModel, LanguageModelCompletionEvent, LanguageModelRegistry, LanguageModelRequest,
|
||||
LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
|
||||
LanguageModelToolUseId, MaxMonthlySpendReachedError, MessageContent, PaymentRequiredError,
|
||||
Role, StopReason,
|
||||
Role, StopReason, TokenUsage,
|
||||
};
|
||||
use project::Project;
|
||||
use prompt_store::{AssistantSystemPromptWorktree, PromptBuilder};
|
||||
use scripting_tool::{ScriptingSession, ScriptingTool};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use util::{post_inc, TryFutureExt as _};
|
||||
use util::{post_inc, ResultExt, TryFutureExt as _};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::context::{attach_context_to_message, ContextId, ContextSnapshot};
|
||||
use crate::thread_store::SavedThread;
|
||||
use crate::thread_store::{
|
||||
SerializedMessage, SerializedThread, SerializedToolResult, SerializedToolUse,
|
||||
};
|
||||
use crate::tool_use::{PendingToolUse, ToolUse, ToolUseState};
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
@@ -59,6 +68,27 @@ pub struct Message {
|
||||
pub text: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ProjectSnapshot {
|
||||
pub worktree_snapshots: Vec<WorktreeSnapshot>,
|
||||
pub unsaved_buffer_paths: Vec<String>,
|
||||
pub timestamp: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct WorktreeSnapshot {
|
||||
pub worktree_path: String,
|
||||
pub git_state: Option<GitState>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct GitState {
|
||||
pub remote_url: Option<String>,
|
||||
pub head_sha: Option<String>,
|
||||
pub current_branch: Option<String>,
|
||||
pub diff: Option<String>,
|
||||
}
|
||||
|
||||
/// A thread of conversation with the LLM.
|
||||
pub struct Thread {
|
||||
id: ThreadId,
|
||||
@@ -71,12 +101,24 @@ pub struct Thread {
|
||||
context_by_message: HashMap<MessageId, Vec<ContextId>>,
|
||||
completion_count: usize,
|
||||
pending_completions: Vec<PendingCompletion>,
|
||||
project: Entity<Project>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
tool_use: ToolUseState,
|
||||
action_log: Entity<ActionLog>,
|
||||
scripting_session: Entity<ScriptingSession>,
|
||||
scripting_tool_use: ToolUseState,
|
||||
initial_project_snapshot: Shared<Task<Option<Arc<ProjectSnapshot>>>>,
|
||||
cumulative_token_usage: TokenUsage,
|
||||
}
|
||||
|
||||
impl Thread {
|
||||
pub fn new(tools: Arc<ToolWorkingSet>, _cx: &mut Context<Self>) -> Self {
|
||||
pub fn new(
|
||||
project: Entity<Project>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
Self {
|
||||
id: ThreadId::new(),
|
||||
updated_at: Utc::now(),
|
||||
@@ -88,32 +130,53 @@ impl Thread {
|
||||
context_by_message: HashMap::default(),
|
||||
completion_count: 0,
|
||||
pending_completions: Vec::new(),
|
||||
project: project.clone(),
|
||||
prompt_builder,
|
||||
tools,
|
||||
tool_use: ToolUseState::new(),
|
||||
scripting_session: cx.new(|cx| ScriptingSession::new(project.clone(), cx)),
|
||||
scripting_tool_use: ToolUseState::new(),
|
||||
action_log: cx.new(|_| ActionLog::new()),
|
||||
initial_project_snapshot: {
|
||||
let project_snapshot = Self::project_snapshot(project, cx);
|
||||
cx.foreground_executor()
|
||||
.spawn(async move { Some(project_snapshot.await) })
|
||||
.shared()
|
||||
},
|
||||
cumulative_token_usage: TokenUsage::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_saved(
|
||||
pub fn deserialize(
|
||||
id: ThreadId,
|
||||
saved: SavedThread,
|
||||
serialized: SerializedThread,
|
||||
project: Entity<Project>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
_cx: &mut Context<Self>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let next_message_id = MessageId(
|
||||
saved
|
||||
serialized
|
||||
.messages
|
||||
.last()
|
||||
.map(|message| message.id.0 + 1)
|
||||
.unwrap_or(0),
|
||||
);
|
||||
let tool_use = ToolUseState::from_saved_messages(&saved.messages);
|
||||
let tool_use = ToolUseState::from_serialized_messages(&serialized.messages, |name| {
|
||||
name != ScriptingTool::NAME
|
||||
});
|
||||
let scripting_tool_use =
|
||||
ToolUseState::from_serialized_messages(&serialized.messages, |name| {
|
||||
name == ScriptingTool::NAME
|
||||
});
|
||||
let scripting_session = cx.new(|cx| ScriptingSession::new(project.clone(), cx));
|
||||
|
||||
Self {
|
||||
id,
|
||||
updated_at: saved.updated_at,
|
||||
summary: Some(saved.summary),
|
||||
updated_at: serialized.updated_at,
|
||||
summary: Some(serialized.summary),
|
||||
pending_summary: Task::ready(None),
|
||||
messages: saved
|
||||
messages: serialized
|
||||
.messages
|
||||
.into_iter()
|
||||
.map(|message| Message {
|
||||
@@ -127,8 +190,16 @@ impl Thread {
|
||||
context_by_message: HashMap::default(),
|
||||
completion_count: 0,
|
||||
pending_completions: Vec::new(),
|
||||
project,
|
||||
prompt_builder,
|
||||
tools,
|
||||
tool_use,
|
||||
action_log: cx.new(|_| ActionLog::new()),
|
||||
scripting_session,
|
||||
scripting_tool_use,
|
||||
initial_project_snapshot: Task::ready(serialized.initial_project_snapshot).shared(),
|
||||
// TODO: persist token usage?
|
||||
cumulative_token_usage: TokenUsage::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -170,8 +241,8 @@ impl Thread {
|
||||
self.messages.iter()
|
||||
}
|
||||
|
||||
pub fn is_streaming(&self) -> bool {
|
||||
!self.pending_completions.is_empty()
|
||||
pub fn is_generating(&self) -> bool {
|
||||
!self.pending_completions.is_empty() || !self.all_tools_finished()
|
||||
}
|
||||
|
||||
pub fn tools(&self) -> &Arc<ToolWorkingSet> {
|
||||
@@ -189,33 +260,69 @@ impl Thread {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> {
|
||||
self.tool_use.pending_tool_uses()
|
||||
/// Returns whether all of the tool uses have finished running.
|
||||
pub fn all_tools_finished(&self) -> bool {
|
||||
let mut all_pending_tool_uses = self
|
||||
.tool_use
|
||||
.pending_tool_uses()
|
||||
.into_iter()
|
||||
.chain(self.scripting_tool_use.pending_tool_uses());
|
||||
|
||||
// If the only pending tool uses left are the ones with errors, then
|
||||
// that means that we've finished running all of the pending tools.
|
||||
all_pending_tool_uses.all(|tool_use| tool_use.status.is_error())
|
||||
}
|
||||
|
||||
pub fn tool_uses_for_message(&self, id: MessageId) -> Vec<ToolUse> {
|
||||
self.tool_use.tool_uses_for_message(id)
|
||||
}
|
||||
|
||||
pub fn scripting_tool_uses_for_message(&self, id: MessageId) -> Vec<ToolUse> {
|
||||
self.scripting_tool_use.tool_uses_for_message(id)
|
||||
}
|
||||
|
||||
pub fn tool_results_for_message(&self, id: MessageId) -> Vec<&LanguageModelToolResult> {
|
||||
self.tool_use.tool_results_for_message(id)
|
||||
}
|
||||
|
||||
pub fn tool_result(&self, id: &LanguageModelToolUseId) -> Option<&LanguageModelToolResult> {
|
||||
self.tool_use.tool_result(id)
|
||||
}
|
||||
|
||||
pub fn scripting_tool_results_for_message(
|
||||
&self,
|
||||
id: MessageId,
|
||||
) -> Vec<&LanguageModelToolResult> {
|
||||
self.scripting_tool_use.tool_results_for_message(id)
|
||||
}
|
||||
|
||||
pub fn scripting_changed_buffers<'a>(
|
||||
&self,
|
||||
cx: &'a App,
|
||||
) -> impl ExactSizeIterator<Item = &'a Entity<language::Buffer>> {
|
||||
self.scripting_session.read(cx).changed_buffers()
|
||||
}
|
||||
|
||||
pub fn message_has_tool_results(&self, message_id: MessageId) -> bool {
|
||||
self.tool_use.message_has_tool_results(message_id)
|
||||
}
|
||||
|
||||
pub fn message_has_scripting_tool_results(&self, message_id: MessageId) -> bool {
|
||||
self.scripting_tool_use.message_has_tool_results(message_id)
|
||||
}
|
||||
|
||||
pub fn insert_user_message(
|
||||
&mut self,
|
||||
text: impl Into<String>,
|
||||
context: Vec<ContextSnapshot>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
) -> MessageId {
|
||||
let message_id = self.insert_message(Role::User, text, cx);
|
||||
let context_ids = context.iter().map(|context| context.id).collect::<Vec<_>>();
|
||||
self.context
|
||||
.extend(context.into_iter().map(|context| (context.id, context)));
|
||||
self.context_by_message.insert(message_id, context_ids);
|
||||
message_id
|
||||
}
|
||||
|
||||
pub fn insert_message(
|
||||
@@ -284,27 +391,75 @@ impl Thread {
|
||||
text
|
||||
}
|
||||
|
||||
/// Serializes this thread into a format for storage or telemetry.
|
||||
pub fn serialize(&self, cx: &mut Context<Self>) -> Task<Result<SerializedThread>> {
|
||||
let initial_project_snapshot = self.initial_project_snapshot.clone();
|
||||
cx.spawn(|this, cx| async move {
|
||||
let initial_project_snapshot = initial_project_snapshot.await;
|
||||
this.read_with(&cx, |this, _| SerializedThread {
|
||||
summary: this.summary_or_default(),
|
||||
updated_at: this.updated_at(),
|
||||
messages: this
|
||||
.messages()
|
||||
.map(|message| SerializedMessage {
|
||||
id: message.id,
|
||||
role: message.role,
|
||||
text: message.text.clone(),
|
||||
tool_uses: this
|
||||
.tool_uses_for_message(message.id)
|
||||
.into_iter()
|
||||
.chain(this.scripting_tool_uses_for_message(message.id))
|
||||
.map(|tool_use| SerializedToolUse {
|
||||
id: tool_use.id,
|
||||
name: tool_use.name,
|
||||
input: tool_use.input,
|
||||
})
|
||||
.collect(),
|
||||
tool_results: this
|
||||
.tool_results_for_message(message.id)
|
||||
.into_iter()
|
||||
.chain(this.scripting_tool_results_for_message(message.id))
|
||||
.map(|tool_result| SerializedToolResult {
|
||||
tool_use_id: tool_result.tool_use_id.clone(),
|
||||
is_error: tool_result.is_error,
|
||||
content: tool_result.content.clone(),
|
||||
})
|
||||
.collect(),
|
||||
})
|
||||
.collect(),
|
||||
initial_project_snapshot,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn send_to_model(
|
||||
&mut self,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
request_kind: RequestKind,
|
||||
use_tools: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let mut request = self.to_completion_request(request_kind, cx);
|
||||
request.tools = {
|
||||
let mut tools = Vec::new();
|
||||
|
||||
if use_tools {
|
||||
request.tools = self
|
||||
.tools()
|
||||
.tools(cx)
|
||||
.into_iter()
|
||||
.map(|tool| LanguageModelRequestTool {
|
||||
if self.tools.is_scripting_tool_enabled() {
|
||||
tools.push(LanguageModelRequestTool {
|
||||
name: ScriptingTool::NAME.into(),
|
||||
description: ScriptingTool::DESCRIPTION.into(),
|
||||
input_schema: ScriptingTool::input_schema(),
|
||||
});
|
||||
}
|
||||
|
||||
tools.extend(self.tools().enabled_tools(cx).into_iter().map(|tool| {
|
||||
LanguageModelRequestTool {
|
||||
name: tool.name(),
|
||||
description: tool.description(),
|
||||
input_schema: tool.input_schema(),
|
||||
})
|
||||
.collect();
|
||||
}
|
||||
}
|
||||
}));
|
||||
|
||||
tools
|
||||
};
|
||||
|
||||
self.stream_completion(request, model, cx);
|
||||
}
|
||||
@@ -312,10 +467,33 @@ impl Thread {
|
||||
pub fn to_completion_request(
|
||||
&self,
|
||||
request_kind: RequestKind,
|
||||
_cx: &App,
|
||||
cx: &App,
|
||||
) -> LanguageModelRequest {
|
||||
let worktree_root_names = self
|
||||
.project
|
||||
.read(cx)
|
||||
.visible_worktrees(cx)
|
||||
.map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
AssistantSystemPromptWorktree {
|
||||
root_name: worktree.root_name().into(),
|
||||
abs_path: worktree.abs_path(),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let system_prompt = self
|
||||
.prompt_builder
|
||||
.generate_assistant_system_prompt(worktree_root_names)
|
||||
.context("failed to generate assistant system prompt")
|
||||
.log_err()
|
||||
.unwrap_or_default();
|
||||
|
||||
let mut request = LanguageModelRequest {
|
||||
messages: vec![],
|
||||
messages: vec![LanguageModelRequestMessage {
|
||||
role: Role::System,
|
||||
content: vec![MessageContent::Text(system_prompt)],
|
||||
cache: true,
|
||||
}],
|
||||
tools: Vec::new(),
|
||||
stop: Vec::new(),
|
||||
temperature: None,
|
||||
@@ -333,10 +511,13 @@ impl Thread {
|
||||
content: Vec::new(),
|
||||
cache: false,
|
||||
};
|
||||
|
||||
match request_kind {
|
||||
RequestKind::Chat => {
|
||||
self.tool_use
|
||||
.attach_tool_results(message.id, &mut request_message);
|
||||
self.scripting_tool_use
|
||||
.attach_tool_results(message.id, &mut request_message);
|
||||
}
|
||||
RequestKind::Summarize => {
|
||||
// We don't care about tool use during summarization.
|
||||
@@ -353,11 +534,13 @@ impl Thread {
|
||||
RequestKind::Chat => {
|
||||
self.tool_use
|
||||
.attach_tool_uses(message.id, &mut request_message);
|
||||
self.scripting_tool_use
|
||||
.attach_tool_uses(message.id, &mut request_message);
|
||||
}
|
||||
RequestKind::Summarize => {
|
||||
// We don't care about tool use during summarization.
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
request.messages.push(request_message);
|
||||
}
|
||||
@@ -378,9 +561,39 @@ impl Thread {
|
||||
request.messages.push(context_message);
|
||||
}
|
||||
|
||||
self.attach_stale_files(&mut request.messages, cx);
|
||||
|
||||
request
|
||||
}
|
||||
|
||||
fn attach_stale_files(&self, messages: &mut Vec<LanguageModelRequestMessage>, cx: &App) {
|
||||
const STALE_FILES_HEADER: &str = "These files changed since last read:";
|
||||
|
||||
let mut stale_message = String::new();
|
||||
|
||||
for stale_file in self.action_log.read(cx).stale_buffers(cx) {
|
||||
let Some(file) = stale_file.read(cx).file() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if stale_message.is_empty() {
|
||||
write!(&mut stale_message, "{}", STALE_FILES_HEADER).ok();
|
||||
}
|
||||
|
||||
writeln!(&mut stale_message, "- {}", file.path().display()).ok();
|
||||
}
|
||||
|
||||
if !stale_message.is_empty() {
|
||||
let context_message = LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![stale_message.into()],
|
||||
cache: false,
|
||||
};
|
||||
|
||||
messages.push(context_message);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stream_completion(
|
||||
&mut self,
|
||||
request: LanguageModelRequest,
|
||||
@@ -394,6 +607,7 @@ impl Thread {
|
||||
let stream_completion = async {
|
||||
let mut events = stream.await?;
|
||||
let mut stop_reason = StopReason::EndTurn;
|
||||
let mut current_token_usage = TokenUsage::default();
|
||||
|
||||
while let Some(event) = events.next().await {
|
||||
let event = event?;
|
||||
@@ -406,6 +620,12 @@ impl Thread {
|
||||
LanguageModelCompletionEvent::Stop(reason) => {
|
||||
stop_reason = reason;
|
||||
}
|
||||
LanguageModelCompletionEvent::UsageUpdate(token_usage) => {
|
||||
thread.cumulative_token_usage =
|
||||
thread.cumulative_token_usage.clone() + token_usage.clone()
|
||||
- current_token_usage.clone();
|
||||
current_token_usage = token_usage;
|
||||
}
|
||||
LanguageModelCompletionEvent::Text(chunk) => {
|
||||
if let Some(last_message) = thread.messages.last_mut() {
|
||||
if last_message.role == Role::Assistant {
|
||||
@@ -421,7 +641,7 @@ impl Thread {
|
||||
// Importantly: We do *not* want to emit a `StreamedAssistantText` event here, as it
|
||||
// will result in duplicating the text of the chunk in the rendered Markdown.
|
||||
thread.insert_message(Role::Assistant, chunk, cx);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
LanguageModelCompletionEvent::ToolUse(tool_use) => {
|
||||
@@ -430,9 +650,15 @@ impl Thread {
|
||||
.iter()
|
||||
.rfind(|message| message.role == Role::Assistant)
|
||||
{
|
||||
thread
|
||||
.tool_use
|
||||
.request_tool_use(last_assistant_message.id, tool_use);
|
||||
if tool_use.name.as_ref() == ScriptingTool::NAME {
|
||||
thread
|
||||
.scripting_tool_use
|
||||
.request_tool_use(last_assistant_message.id, tool_use);
|
||||
} else {
|
||||
thread
|
||||
.tool_use
|
||||
.request_tool_use(last_assistant_message.id, tool_use);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -461,32 +687,37 @@ impl Thread {
|
||||
let result = stream_completion.await;
|
||||
|
||||
thread
|
||||
.update(&mut cx, |thread, cx| match result.as_ref() {
|
||||
Ok(stop_reason) => match stop_reason {
|
||||
StopReason::ToolUse => {
|
||||
cx.emit(ThreadEvent::UsePendingTools);
|
||||
}
|
||||
StopReason::EndTurn => {}
|
||||
StopReason::MaxTokens => {}
|
||||
},
|
||||
Err(error) => {
|
||||
if error.is::<PaymentRequiredError>() {
|
||||
cx.emit(ThreadEvent::ShowError(ThreadError::PaymentRequired));
|
||||
} else if error.is::<MaxMonthlySpendReachedError>() {
|
||||
cx.emit(ThreadEvent::ShowError(ThreadError::MaxMonthlySpendReached));
|
||||
} else {
|
||||
let error_message = error
|
||||
.chain()
|
||||
.map(|err| err.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
cx.emit(ThreadEvent::ShowError(ThreadError::Message(
|
||||
SharedString::from(error_message.clone()),
|
||||
)));
|
||||
}
|
||||
.update(&mut cx, |thread, cx| {
|
||||
match result.as_ref() {
|
||||
Ok(stop_reason) => match stop_reason {
|
||||
StopReason::ToolUse => {
|
||||
cx.emit(ThreadEvent::UsePendingTools);
|
||||
}
|
||||
StopReason::EndTurn => {}
|
||||
StopReason::MaxTokens => {}
|
||||
},
|
||||
Err(error) => {
|
||||
if error.is::<PaymentRequiredError>() {
|
||||
cx.emit(ThreadEvent::ShowError(ThreadError::PaymentRequired));
|
||||
} else if error.is::<MaxMonthlySpendReachedError>() {
|
||||
cx.emit(ThreadEvent::ShowError(
|
||||
ThreadError::MaxMonthlySpendReached,
|
||||
));
|
||||
} else {
|
||||
let error_message = error
|
||||
.chain()
|
||||
.map(|err| err.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
cx.emit(ThreadEvent::ShowError(ThreadError::Message(
|
||||
SharedString::from(error_message.clone()),
|
||||
)));
|
||||
}
|
||||
|
||||
thread.cancel_last_completion();
|
||||
thread.cancel_last_completion(cx);
|
||||
}
|
||||
}
|
||||
cx.emit(ThreadEvent::DoneStreaming);
|
||||
})
|
||||
.ok();
|
||||
});
|
||||
@@ -550,6 +781,70 @@ impl Thread {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn use_pending_tools(&mut self, cx: &mut Context<Self>) {
|
||||
let request = self.to_completion_request(RequestKind::Chat, cx);
|
||||
let pending_tool_uses = self
|
||||
.tool_use
|
||||
.pending_tool_uses()
|
||||
.into_iter()
|
||||
.filter(|tool_use| tool_use.status.is_idle())
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for tool_use in pending_tool_uses {
|
||||
if let Some(tool) = self.tools.tool(&tool_use.name, cx) {
|
||||
let task = tool.run(
|
||||
tool_use.input,
|
||||
&request.messages,
|
||||
self.project.clone(),
|
||||
self.action_log.clone(),
|
||||
cx,
|
||||
);
|
||||
|
||||
self.insert_tool_output(tool_use.id.clone(), task, cx);
|
||||
}
|
||||
}
|
||||
|
||||
let pending_scripting_tool_uses = self
|
||||
.scripting_tool_use
|
||||
.pending_tool_uses()
|
||||
.into_iter()
|
||||
.filter(|tool_use| tool_use.status.is_idle())
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for scripting_tool_use in pending_scripting_tool_uses {
|
||||
let task = match ScriptingTool::deserialize_input(scripting_tool_use.input) {
|
||||
Err(err) => Task::ready(Err(err.into())),
|
||||
Ok(input) => {
|
||||
let (script_id, script_task) =
|
||||
self.scripting_session.update(cx, move |session, cx| {
|
||||
session.run_script(input.lua_script, cx)
|
||||
});
|
||||
|
||||
let session = self.scripting_session.clone();
|
||||
cx.spawn(|_, cx| async move {
|
||||
script_task.await;
|
||||
|
||||
let message = session.read_with(&cx, |session, _cx| {
|
||||
// Using a id to get the script output seems impractical.
|
||||
// Why not just include it in the Task result?
|
||||
// This is because we'll later report the script state as it runs,
|
||||
session
|
||||
.get(script_id)
|
||||
.output_message_for_llm()
|
||||
.expect("Script shouldn't still be running")
|
||||
})?;
|
||||
|
||||
Ok(message)
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
self.insert_scripting_tool_output(scripting_tool_use.id.clone(), task, cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_tool_output(
|
||||
&mut self,
|
||||
tool_use_id: LanguageModelToolUseId,
|
||||
@@ -562,11 +857,15 @@ impl Thread {
|
||||
let output = output.await;
|
||||
thread
|
||||
.update(&mut cx, |thread, cx| {
|
||||
thread
|
||||
let pending_tool_use = thread
|
||||
.tool_use
|
||||
.insert_tool_output(tool_use_id.clone(), output);
|
||||
|
||||
cx.emit(ThreadEvent::ToolFinished { tool_use_id });
|
||||
cx.emit(ThreadEvent::ToolFinished {
|
||||
tool_use_id,
|
||||
pending_tool_use,
|
||||
canceled: false,
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
@@ -576,16 +875,260 @@ impl Thread {
|
||||
.run_pending_tool(tool_use_id, insert_output_task);
|
||||
}
|
||||
|
||||
pub fn insert_scripting_tool_output(
|
||||
&mut self,
|
||||
tool_use_id: LanguageModelToolUseId,
|
||||
output: Task<Result<String>>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let insert_output_task = cx.spawn(|thread, mut cx| {
|
||||
let tool_use_id = tool_use_id.clone();
|
||||
async move {
|
||||
let output = output.await;
|
||||
thread
|
||||
.update(&mut cx, |thread, cx| {
|
||||
let pending_tool_use = thread
|
||||
.scripting_tool_use
|
||||
.insert_tool_output(tool_use_id.clone(), output);
|
||||
|
||||
cx.emit(ThreadEvent::ToolFinished {
|
||||
tool_use_id,
|
||||
pending_tool_use,
|
||||
canceled: false,
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
});
|
||||
|
||||
self.scripting_tool_use
|
||||
.run_pending_tool(tool_use_id, insert_output_task);
|
||||
}
|
||||
|
||||
pub fn attach_tool_results(
|
||||
&mut self,
|
||||
updated_context: Vec<ContextSnapshot>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.context.extend(
|
||||
updated_context
|
||||
.into_iter()
|
||||
.map(|context| (context.id, context)),
|
||||
);
|
||||
|
||||
// Insert a user message to contain the tool results.
|
||||
self.insert_user_message(
|
||||
// TODO: Sending up a user message without any content results in the model sending back
|
||||
// responses that also don't have any content. We currently don't handle this case well,
|
||||
// so for now we provide some text to keep the model on track.
|
||||
"Here are the tool results.",
|
||||
Vec::new(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
/// Cancels the last pending completion, if there are any pending.
|
||||
///
|
||||
/// Returns whether a completion was canceled.
|
||||
pub fn cancel_last_completion(&mut self) -> bool {
|
||||
if let Some(_last_completion) = self.pending_completions.pop() {
|
||||
pub fn cancel_last_completion(&mut self, cx: &mut Context<Self>) -> bool {
|
||||
if self.pending_completions.pop().is_some() {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
let mut canceled = false;
|
||||
for pending_tool_use in self.tool_use.cancel_pending() {
|
||||
canceled = true;
|
||||
cx.emit(ThreadEvent::ToolFinished {
|
||||
tool_use_id: pending_tool_use.id.clone(),
|
||||
pending_tool_use: Some(pending_tool_use),
|
||||
canceled: true,
|
||||
});
|
||||
}
|
||||
canceled
|
||||
}
|
||||
}
|
||||
|
||||
/// Reports feedback about the thread and stores it in our telemetry backend.
|
||||
pub fn report_feedback(&self, is_positive: bool, cx: &mut Context<Self>) -> Task<Result<()>> {
|
||||
let final_project_snapshot = Self::project_snapshot(self.project.clone(), cx);
|
||||
let serialized_thread = self.serialize(cx);
|
||||
let thread_id = self.id().clone();
|
||||
let client = self.project.read(cx).client();
|
||||
|
||||
cx.background_spawn(async move {
|
||||
let final_project_snapshot = final_project_snapshot.await;
|
||||
let serialized_thread = serialized_thread.await?;
|
||||
let thread_data =
|
||||
serde_json::to_value(serialized_thread).unwrap_or_else(|_| serde_json::Value::Null);
|
||||
|
||||
let rating = if is_positive { "positive" } else { "negative" };
|
||||
telemetry::event!(
|
||||
"Assistant Thread Rated",
|
||||
rating,
|
||||
thread_id,
|
||||
thread_data,
|
||||
final_project_snapshot
|
||||
);
|
||||
client.telemetry().flush_events();
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
/// Create a snapshot of the current project state including git information and unsaved buffers.
|
||||
fn project_snapshot(
|
||||
project: Entity<Project>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Arc<ProjectSnapshot>> {
|
||||
let worktree_snapshots: Vec<_> = project
|
||||
.read(cx)
|
||||
.visible_worktrees(cx)
|
||||
.map(|worktree| Self::worktree_snapshot(worktree, cx))
|
||||
.collect();
|
||||
|
||||
cx.spawn(move |_, cx| async move {
|
||||
let worktree_snapshots = futures::future::join_all(worktree_snapshots).await;
|
||||
|
||||
let mut unsaved_buffers = Vec::new();
|
||||
cx.update(|app_cx| {
|
||||
let buffer_store = project.read(app_cx).buffer_store();
|
||||
for buffer_handle in buffer_store.read(app_cx).buffers() {
|
||||
let buffer = buffer_handle.read(app_cx);
|
||||
if buffer.is_dirty() {
|
||||
if let Some(file) = buffer.file() {
|
||||
let path = file.path().to_string_lossy().to_string();
|
||||
unsaved_buffers.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
|
||||
Arc::new(ProjectSnapshot {
|
||||
worktree_snapshots,
|
||||
unsaved_buffer_paths: unsaved_buffers,
|
||||
timestamp: Utc::now(),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn worktree_snapshot(worktree: Entity<project::Worktree>, cx: &App) -> Task<WorktreeSnapshot> {
|
||||
cx.spawn(move |cx| async move {
|
||||
// Get worktree path and snapshot
|
||||
let worktree_info = cx.update(|app_cx| {
|
||||
let worktree = worktree.read(app_cx);
|
||||
let path = worktree.abs_path().to_string_lossy().to_string();
|
||||
let snapshot = worktree.snapshot();
|
||||
(path, snapshot)
|
||||
});
|
||||
|
||||
let Ok((worktree_path, snapshot)) = worktree_info else {
|
||||
return WorktreeSnapshot {
|
||||
worktree_path: String::new(),
|
||||
git_state: None,
|
||||
};
|
||||
};
|
||||
|
||||
// Extract git information
|
||||
let git_state = match snapshot.repositories().first() {
|
||||
None => None,
|
||||
Some(repo_entry) => {
|
||||
// Get branch information
|
||||
let current_branch = repo_entry.branch().map(|branch| branch.name.to_string());
|
||||
|
||||
// Get repository info
|
||||
let repo_result = worktree.read_with(&cx, |worktree, _cx| {
|
||||
if let project::Worktree::Local(local_worktree) = &worktree {
|
||||
local_worktree.get_local_repo(repo_entry).map(|local_repo| {
|
||||
let repo = local_repo.repo();
|
||||
(repo.remote_url("origin"), repo.head_sha(), repo.clone())
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
|
||||
match repo_result {
|
||||
Ok(Some((remote_url, head_sha, repository))) => {
|
||||
// Get diff asynchronously
|
||||
let diff = repository
|
||||
.diff(git::repository::DiffType::HeadToWorktree, cx)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
Some(GitState {
|
||||
remote_url,
|
||||
head_sha,
|
||||
current_branch,
|
||||
diff,
|
||||
})
|
||||
}
|
||||
Err(_) | Ok(None) => None,
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
WorktreeSnapshot {
|
||||
worktree_path,
|
||||
git_state,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn to_markdown(&self) -> Result<String> {
|
||||
let mut markdown = Vec::new();
|
||||
|
||||
if let Some(summary) = self.summary() {
|
||||
writeln!(markdown, "# {summary}\n")?;
|
||||
};
|
||||
|
||||
for message in self.messages() {
|
||||
writeln!(
|
||||
markdown,
|
||||
"## {role}\n",
|
||||
role = match message.role {
|
||||
Role::User => "User",
|
||||
Role::Assistant => "Assistant",
|
||||
Role::System => "System",
|
||||
}
|
||||
)?;
|
||||
writeln!(markdown, "{}\n", message.text)?;
|
||||
|
||||
for tool_use in self.tool_uses_for_message(message.id) {
|
||||
writeln!(
|
||||
markdown,
|
||||
"**Use Tool: {} ({})**",
|
||||
tool_use.name, tool_use.id
|
||||
)?;
|
||||
writeln!(markdown, "```json")?;
|
||||
writeln!(
|
||||
markdown,
|
||||
"{}",
|
||||
serde_json::to_string_pretty(&tool_use.input)?
|
||||
)?;
|
||||
writeln!(markdown, "```")?;
|
||||
}
|
||||
|
||||
for tool_result in self.tool_results_for_message(message.id) {
|
||||
write!(markdown, "**Tool Results: {}", tool_result.tool_use_id)?;
|
||||
if tool_result.is_error {
|
||||
write!(markdown, " (Error)")?;
|
||||
}
|
||||
|
||||
writeln!(markdown, "**\n")?;
|
||||
writeln!(markdown, "{}", tool_result.content)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(String::from_utf8_lossy(&markdown).to_string())
|
||||
}
|
||||
|
||||
pub fn action_log(&self) -> &Entity<ActionLog> {
|
||||
&self.action_log
|
||||
}
|
||||
|
||||
pub fn cumulative_token_usage(&self) -> TokenUsage {
|
||||
self.cumulative_token_usage.clone()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -600,6 +1143,7 @@ pub enum ThreadEvent {
|
||||
ShowError(ThreadError),
|
||||
StreamedCompletion,
|
||||
StreamedAssistantText(MessageId, String),
|
||||
DoneStreaming,
|
||||
MessageAdded(MessageId),
|
||||
MessageEdited(MessageId),
|
||||
MessageDeleted(MessageId),
|
||||
@@ -608,6 +1152,10 @@ pub enum ThreadEvent {
|
||||
ToolFinished {
|
||||
#[allow(unused)]
|
||||
tool_use_id: LanguageModelToolUseId,
|
||||
/// The pending tool use that corresponds to this tool.
|
||||
pending_tool_use: Option<PendingToolUse>,
|
||||
/// Whether the tool was canceled by the user.
|
||||
canceled: bool,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ use time::{OffsetDateTime, UtcOffset};
|
||||
use ui::{prelude::*, IconButtonShape, ListItem, ListItemSpacing, Tooltip};
|
||||
|
||||
use crate::history_store::{HistoryEntry, HistoryStore};
|
||||
use crate::thread_store::SavedThreadMetadata;
|
||||
use crate::thread_store::SerializedThreadMetadata;
|
||||
use crate::{AssistantPanel, RemoveSelectedThread};
|
||||
|
||||
pub struct ThreadHistory {
|
||||
@@ -221,14 +221,14 @@ impl Render for ThreadHistory {
|
||||
|
||||
#[derive(IntoElement)]
|
||||
pub struct PastThread {
|
||||
thread: SavedThreadMetadata,
|
||||
thread: SerializedThreadMetadata,
|
||||
assistant_panel: WeakEntity<AssistantPanel>,
|
||||
selected: bool,
|
||||
}
|
||||
|
||||
impl PastThread {
|
||||
pub fn new(
|
||||
thread: SavedThreadMetadata,
|
||||
thread: SerializedThreadMetadata,
|
||||
assistant_panel: WeakEntity<AssistantPanel>,
|
||||
selected: bool,
|
||||
) -> Self {
|
||||
|
||||
@@ -16,28 +16,30 @@ use heed::types::{SerdeBincode, SerdeJson};
|
||||
use heed::Database;
|
||||
use language_model::{LanguageModelToolUseId, Role};
|
||||
use project::Project;
|
||||
use prompt_store::PromptBuilder;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use util::ResultExt as _;
|
||||
|
||||
use crate::thread::{MessageId, Thread, ThreadId};
|
||||
use crate::thread::{MessageId, ProjectSnapshot, Thread, ThreadId};
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
ThreadsDatabase::init(cx);
|
||||
}
|
||||
|
||||
pub struct ThreadStore {
|
||||
#[allow(unused)]
|
||||
project: Entity<Project>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
context_server_manager: Entity<ContextServerManager>,
|
||||
context_server_tool_ids: HashMap<Arc<str>, Vec<ToolId>>,
|
||||
threads: Vec<SavedThreadMetadata>,
|
||||
threads: Vec<SerializedThreadMetadata>,
|
||||
}
|
||||
|
||||
impl ThreadStore {
|
||||
pub fn new(
|
||||
project: Entity<Project>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
cx: &mut App,
|
||||
) -> Result<Entity<Self>> {
|
||||
let this = cx.new(|cx| {
|
||||
@@ -49,6 +51,7 @@ impl ThreadStore {
|
||||
let this = Self {
|
||||
project,
|
||||
tools,
|
||||
prompt_builder,
|
||||
context_server_manager,
|
||||
context_server_tool_ids: HashMap::default(),
|
||||
threads: Vec::new(),
|
||||
@@ -67,18 +70,25 @@ impl ThreadStore {
|
||||
self.threads.len()
|
||||
}
|
||||
|
||||
pub fn threads(&self) -> Vec<SavedThreadMetadata> {
|
||||
pub fn threads(&self) -> Vec<SerializedThreadMetadata> {
|
||||
let mut threads = self.threads.iter().cloned().collect::<Vec<_>>();
|
||||
threads.sort_unstable_by_key(|thread| std::cmp::Reverse(thread.updated_at));
|
||||
threads
|
||||
}
|
||||
|
||||
pub fn recent_threads(&self, limit: usize) -> Vec<SavedThreadMetadata> {
|
||||
pub fn recent_threads(&self, limit: usize) -> Vec<SerializedThreadMetadata> {
|
||||
self.threads().into_iter().take(limit).collect()
|
||||
}
|
||||
|
||||
pub fn create_thread(&mut self, cx: &mut Context<Self>) -> Entity<Thread> {
|
||||
cx.new(|cx| Thread::new(self.tools.clone(), cx))
|
||||
cx.new(|cx| {
|
||||
Thread::new(
|
||||
self.project.clone(),
|
||||
self.tools.clone(),
|
||||
self.prompt_builder.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn open_thread(
|
||||
@@ -96,52 +106,29 @@ impl ThreadStore {
|
||||
.ok_or_else(|| anyhow!("no thread found with ID: {id:?}"))?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
cx.new(|cx| Thread::from_saved(id.clone(), thread, this.tools.clone(), cx))
|
||||
cx.new(|cx| {
|
||||
Thread::deserialize(
|
||||
id.clone(),
|
||||
thread,
|
||||
this.project.clone(),
|
||||
this.tools.clone(),
|
||||
this.prompt_builder.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn save_thread(&self, thread: &Entity<Thread>, cx: &mut Context<Self>) -> Task<Result<()>> {
|
||||
let (metadata, thread) = thread.update(cx, |thread, _cx| {
|
||||
let id = thread.id().clone();
|
||||
let thread = SavedThread {
|
||||
summary: thread.summary_or_default(),
|
||||
updated_at: thread.updated_at(),
|
||||
messages: thread
|
||||
.messages()
|
||||
.map(|message| SavedMessage {
|
||||
id: message.id,
|
||||
role: message.role,
|
||||
text: message.text.clone(),
|
||||
tool_uses: thread
|
||||
.tool_uses_for_message(message.id)
|
||||
.into_iter()
|
||||
.map(|tool_use| SavedToolUse {
|
||||
id: tool_use.id,
|
||||
name: tool_use.name,
|
||||
input: tool_use.input,
|
||||
})
|
||||
.collect(),
|
||||
tool_results: thread
|
||||
.tool_results_for_message(message.id)
|
||||
.into_iter()
|
||||
.map(|tool_result| SavedToolResult {
|
||||
tool_use_id: tool_result.tool_use_id.clone(),
|
||||
is_error: tool_result.is_error,
|
||||
content: tool_result.content.clone(),
|
||||
})
|
||||
.collect(),
|
||||
})
|
||||
.collect(),
|
||||
};
|
||||
|
||||
(id, thread)
|
||||
});
|
||||
let (metadata, serialized_thread) =
|
||||
thread.update(cx, |thread, cx| (thread.id().clone(), thread.serialize(cx)));
|
||||
|
||||
let database_future = ThreadsDatabase::global_future(cx);
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
let serialized_thread = serialized_thread.await?;
|
||||
let database = database_future.await.map_err(|err| anyhow!(err))?;
|
||||
database.save_thread(metadata, thread).await?;
|
||||
database.save_thread(metadata, serialized_thread).await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| this.reload(cx))?.await
|
||||
})
|
||||
@@ -244,39 +231,41 @@ impl ThreadStore {
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct SavedThreadMetadata {
|
||||
pub struct SerializedThreadMetadata {
|
||||
pub id: ThreadId,
|
||||
pub summary: SharedString,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct SavedThread {
|
||||
pub struct SerializedThread {
|
||||
pub summary: SharedString,
|
||||
pub updated_at: DateTime<Utc>,
|
||||
pub messages: Vec<SavedMessage>,
|
||||
pub messages: Vec<SerializedMessage>,
|
||||
#[serde(default)]
|
||||
pub initial_project_snapshot: Option<Arc<ProjectSnapshot>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SavedMessage {
|
||||
pub struct SerializedMessage {
|
||||
pub id: MessageId,
|
||||
pub role: Role,
|
||||
pub text: String,
|
||||
#[serde(default)]
|
||||
pub tool_uses: Vec<SavedToolUse>,
|
||||
pub tool_uses: Vec<SerializedToolUse>,
|
||||
#[serde(default)]
|
||||
pub tool_results: Vec<SavedToolResult>,
|
||||
pub tool_results: Vec<SerializedToolResult>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SavedToolUse {
|
||||
pub struct SerializedToolUse {
|
||||
pub id: LanguageModelToolUseId,
|
||||
pub name: SharedString,
|
||||
pub input: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct SavedToolResult {
|
||||
pub struct SerializedToolResult {
|
||||
pub tool_use_id: LanguageModelToolUseId,
|
||||
pub is_error: bool,
|
||||
pub content: Arc<str>,
|
||||
@@ -291,7 +280,7 @@ impl Global for GlobalThreadsDatabase {}
|
||||
pub(crate) struct ThreadsDatabase {
|
||||
executor: BackgroundExecutor,
|
||||
env: heed::Env,
|
||||
threads: Database<SerdeBincode<ThreadId>, SerdeJson<SavedThread>>,
|
||||
threads: Database<SerdeBincode<ThreadId>, SerdeJson<SerializedThread>>,
|
||||
}
|
||||
|
||||
impl ThreadsDatabase {
|
||||
@@ -338,7 +327,7 @@ impl ThreadsDatabase {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn list_threads(&self) -> Task<Result<Vec<SavedThreadMetadata>>> {
|
||||
pub fn list_threads(&self) -> Task<Result<Vec<SerializedThreadMetadata>>> {
|
||||
let env = self.env.clone();
|
||||
let threads = self.threads;
|
||||
|
||||
@@ -347,7 +336,7 @@ impl ThreadsDatabase {
|
||||
let mut iter = threads.iter(&txn)?;
|
||||
let mut threads = Vec::new();
|
||||
while let Some((key, value)) = iter.next().transpose()? {
|
||||
threads.push(SavedThreadMetadata {
|
||||
threads.push(SerializedThreadMetadata {
|
||||
id: key,
|
||||
summary: value.summary,
|
||||
updated_at: value.updated_at,
|
||||
@@ -358,7 +347,7 @@ impl ThreadsDatabase {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn try_find_thread(&self, id: ThreadId) -> Task<Result<Option<SavedThread>>> {
|
||||
pub fn try_find_thread(&self, id: ThreadId) -> Task<Result<Option<SerializedThread>>> {
|
||||
let env = self.env.clone();
|
||||
let threads = self.threads;
|
||||
|
||||
@@ -369,7 +358,7 @@ impl ThreadsDatabase {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn save_thread(&self, id: ThreadId, thread: SavedThread) -> Task<Result<()>> {
|
||||
pub fn save_thread(&self, id: ThreadId, thread: SerializedThread) -> Task<Result<()>> {
|
||||
let env = self.env.clone();
|
||||
let threads = self.threads;
|
||||
|
||||
|
||||
127
crates/assistant2/src/tool_selector.rs
Normal file
@@ -0,0 +1,127 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use assistant_tool::{ToolSource, ToolWorkingSet};
|
||||
use gpui::Entity;
|
||||
use scripting_tool::ScriptingTool;
|
||||
use ui::{prelude::*, ContextMenu, PopoverMenu, Tooltip};
|
||||
|
||||
pub struct ToolSelector {
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
}
|
||||
|
||||
impl ToolSelector {
|
||||
pub fn new(tools: Arc<ToolWorkingSet>, _cx: &mut Context<Self>) -> Self {
|
||||
Self { tools }
|
||||
}
|
||||
|
||||
fn build_context_menu(
|
||||
&self,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Entity<ContextMenu> {
|
||||
ContextMenu::build(window, cx, |mut menu, _window, cx| {
|
||||
let icon_position = IconPosition::End;
|
||||
let tools_by_source = self.tools.tools_by_source(cx);
|
||||
|
||||
let all_tools_enabled = self.tools.are_all_tools_enabled();
|
||||
menu = menu.toggleable_entry("All Tools", all_tools_enabled, icon_position, None, {
|
||||
let tools = self.tools.clone();
|
||||
move |_window, cx| {
|
||||
if all_tools_enabled {
|
||||
tools.disable_all_tools(cx);
|
||||
} else {
|
||||
tools.enable_all_tools();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
for (source, tools) in tools_by_source {
|
||||
let mut tools = tools
|
||||
.into_iter()
|
||||
.map(|tool| {
|
||||
let source = tool.source();
|
||||
let name = tool.name().into();
|
||||
let is_enabled = self.tools.is_enabled(&source, &name);
|
||||
|
||||
(source, name, is_enabled)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if ToolSource::Native == source {
|
||||
tools.push((
|
||||
ToolSource::Native,
|
||||
ScriptingTool::NAME.into(),
|
||||
self.tools.is_scripting_tool_enabled(),
|
||||
));
|
||||
tools.sort_by(|(_, name_a, _), (_, name_b, _)| name_a.cmp(name_b));
|
||||
}
|
||||
|
||||
menu = match &source {
|
||||
ToolSource::Native => menu.separator().header("Zed Tools"),
|
||||
ToolSource::ContextServer { id } => {
|
||||
let all_tools_from_source_enabled =
|
||||
self.tools.are_all_tools_from_source_enabled(&source);
|
||||
|
||||
menu.separator().header(id).toggleable_entry(
|
||||
"All Tools",
|
||||
all_tools_from_source_enabled,
|
||||
icon_position,
|
||||
None,
|
||||
{
|
||||
let tools = self.tools.clone();
|
||||
let source = source.clone();
|
||||
move |_window, cx| {
|
||||
if all_tools_from_source_enabled {
|
||||
tools.disable_source(source.clone(), cx);
|
||||
} else {
|
||||
tools.enable_source(&source);
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
};
|
||||
|
||||
for (source, name, is_enabled) in tools {
|
||||
menu = menu.toggleable_entry(name.clone(), is_enabled, icon_position, None, {
|
||||
let tools = self.tools.clone();
|
||||
move |_window, _cx| {
|
||||
if name.as_ref() == ScriptingTool::NAME {
|
||||
if is_enabled {
|
||||
tools.disable_scripting_tool();
|
||||
} else {
|
||||
tools.enable_scripting_tool();
|
||||
}
|
||||
} else {
|
||||
if is_enabled {
|
||||
tools.disable(source.clone(), &[name.clone()]);
|
||||
} else {
|
||||
tools.enable(source.clone(), &[name.clone()]);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
menu
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for ToolSelector {
|
||||
fn render(&mut self, _window: &mut Window, cx: &mut Context<'_, Self>) -> impl IntoElement {
|
||||
let this = cx.entity().clone();
|
||||
PopoverMenu::new("tool-selector")
|
||||
.menu(move |window, cx| {
|
||||
Some(this.update(cx, |this, cx| this.build_context_menu(window, cx)))
|
||||
})
|
||||
.trigger_with_tooltip(
|
||||
IconButton::new("tool-selector-button", IconName::SettingsAlt)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted),
|
||||
Tooltip::text("Customize Tools"),
|
||||
)
|
||||
.anchor(gpui::Corner::BottomLeft)
|
||||
}
|
||||
}
|
||||
@@ -11,7 +11,7 @@ use language_model::{
|
||||
};
|
||||
|
||||
use crate::thread::MessageId;
|
||||
use crate::thread_store::SavedMessage;
|
||||
use crate::thread_store::SerializedMessage;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ToolUse {
|
||||
@@ -46,25 +46,39 @@ impl ToolUseState {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_saved_messages(messages: &[SavedMessage]) -> Self {
|
||||
/// Constructs a [`ToolUseState`] from the given list of [`SerializedMessage`]s.
|
||||
///
|
||||
/// Accepts a function to filter the tools that should be used to populate the state.
|
||||
pub fn from_serialized_messages(
|
||||
messages: &[SerializedMessage],
|
||||
mut filter_by_tool_name: impl FnMut(&str) -> bool,
|
||||
) -> Self {
|
||||
let mut this = Self::new();
|
||||
let mut tool_names_by_id = HashMap::default();
|
||||
|
||||
for message in messages {
|
||||
match message.role {
|
||||
Role::Assistant => {
|
||||
if !message.tool_uses.is_empty() {
|
||||
this.tool_uses_by_assistant_message.insert(
|
||||
message.id,
|
||||
message
|
||||
.tool_uses
|
||||
let tool_uses = message
|
||||
.tool_uses
|
||||
.iter()
|
||||
.filter(|tool_use| (filter_by_tool_name)(tool_use.name.as_ref()))
|
||||
.map(|tool_use| LanguageModelToolUse {
|
||||
id: tool_use.id.clone(),
|
||||
name: tool_use.name.clone().into(),
|
||||
input: tool_use.input.clone(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
tool_names_by_id.extend(
|
||||
tool_uses
|
||||
.iter()
|
||||
.map(|tool_use| LanguageModelToolUse {
|
||||
id: tool_use.id.clone(),
|
||||
name: tool_use.name.clone().into(),
|
||||
input: tool_use.input.clone(),
|
||||
})
|
||||
.collect(),
|
||||
.map(|tool_use| (tool_use.id.clone(), tool_use.name.clone())),
|
||||
);
|
||||
|
||||
this.tool_uses_by_assistant_message
|
||||
.insert(message.id, tool_uses);
|
||||
}
|
||||
}
|
||||
Role::User => {
|
||||
@@ -76,6 +90,14 @@ impl ToolUseState {
|
||||
|
||||
for tool_result in &message.tool_results {
|
||||
let tool_use_id = tool_result.tool_use_id.clone();
|
||||
let Some(tool_use) = tool_names_by_id.get(&tool_use_id) else {
|
||||
log::warn!("no tool name found for tool use: {tool_use_id:?}");
|
||||
continue;
|
||||
};
|
||||
|
||||
if !(filter_by_tool_name)(tool_use.as_ref()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
tool_uses_by_user_message.push(tool_use_id.clone());
|
||||
this.tool_results.insert(
|
||||
@@ -96,6 +118,22 @@ impl ToolUseState {
|
||||
this
|
||||
}
|
||||
|
||||
pub fn cancel_pending(&mut self) -> Vec<PendingToolUse> {
|
||||
let mut pending_tools = Vec::new();
|
||||
for (tool_use_id, tool_use) in self.pending_tool_uses_by_id.drain() {
|
||||
self.tool_results.insert(
|
||||
tool_use_id.clone(),
|
||||
LanguageModelToolResult {
|
||||
tool_use_id,
|
||||
content: "Tool canceled by user".into(),
|
||||
is_error: true,
|
||||
},
|
||||
);
|
||||
pending_tools.push(tool_use.clone());
|
||||
}
|
||||
pending_tools
|
||||
}
|
||||
|
||||
pub fn pending_tool_uses(&self) -> Vec<&PendingToolUse> {
|
||||
self.pending_tool_uses_by_id.values().collect()
|
||||
}
|
||||
@@ -160,6 +198,13 @@ impl ToolUseState {
|
||||
.map_or(false, |results| !results.is_empty())
|
||||
}
|
||||
|
||||
pub fn tool_result(
|
||||
&self,
|
||||
tool_use_id: &LanguageModelToolUseId,
|
||||
) -> Option<&LanguageModelToolResult> {
|
||||
self.tool_results.get(tool_use_id)
|
||||
}
|
||||
|
||||
pub fn request_tool_use(
|
||||
&mut self,
|
||||
assistant_message_id: MessageId,
|
||||
@@ -202,18 +247,18 @@ impl ToolUseState {
|
||||
&mut self,
|
||||
tool_use_id: LanguageModelToolUseId,
|
||||
output: Result<String>,
|
||||
) {
|
||||
) -> Option<PendingToolUse> {
|
||||
match output {
|
||||
Ok(output) => {
|
||||
Ok(tool_result) => {
|
||||
self.tool_results.insert(
|
||||
tool_use_id.clone(),
|
||||
LanguageModelToolResult {
|
||||
tool_use_id: tool_use_id.clone(),
|
||||
content: output.into(),
|
||||
content: tool_result.into(),
|
||||
is_error: false,
|
||||
},
|
||||
);
|
||||
self.pending_tool_uses_by_id.remove(&tool_use_id);
|
||||
self.pending_tool_uses_by_id.remove(&tool_use_id)
|
||||
}
|
||||
Err(err) => {
|
||||
self.tool_results.insert(
|
||||
@@ -228,6 +273,8 @@ impl ToolUseState {
|
||||
if let Some(tool_use) = self.pending_tool_uses_by_id.get_mut(&tool_use_id) {
|
||||
tool_use.status = PendingToolUseStatus::Error(err.to_string().into());
|
||||
}
|
||||
|
||||
self.pending_tool_uses_by_id.get(&tool_use_id).cloned()
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -267,6 +314,7 @@ impl ToolUseState {
|
||||
pub struct PendingToolUse {
|
||||
pub id: LanguageModelToolUseId,
|
||||
/// The ID of the Assistant message in which the tool use was requested.
|
||||
#[allow(unused)]
|
||||
pub assistant_message_id: MessageId,
|
||||
pub name: Arc<str>,
|
||||
pub input: serde_json::Value,
|
||||
|
||||
@@ -126,7 +126,13 @@ impl RenderOnce for ContextPill {
|
||||
h_flex()
|
||||
.id("context-data")
|
||||
.gap_1()
|
||||
.child(Label::new(context.name.clone()).size(LabelSize::Small))
|
||||
.child(
|
||||
div().max_w_64().child(
|
||||
Label::new(context.name.clone())
|
||||
.size(LabelSize::Small)
|
||||
.truncate(),
|
||||
),
|
||||
)
|
||||
.when_some(context.parent.as_ref(), |element, parent_name| {
|
||||
if *dupe_name {
|
||||
element.child(
|
||||
@@ -174,21 +180,22 @@ impl RenderOnce for ContextPill {
|
||||
})
|
||||
.hover(|style| style.bg(color.element_hover.opacity(0.5)))
|
||||
.child(
|
||||
Label::new(name.clone())
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
div().px_0p5().max_w_64().child(
|
||||
Label::new(name.clone())
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted)
|
||||
.truncate(),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
div().px_0p5().child(
|
||||
Label::new(match kind {
|
||||
ContextKind::File => "Active Tab",
|
||||
ContextKind::Thread
|
||||
| ContextKind::Directory
|
||||
| ContextKind::FetchedUrl => "Active",
|
||||
})
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
Label::new(match kind {
|
||||
ContextKind::File => "Active Tab",
|
||||
ContextKind::Thread | ContextKind::Directory | ContextKind::FetchedUrl => {
|
||||
"Active"
|
||||
}
|
||||
})
|
||||
.size(LabelSize::XSmall)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(
|
||||
Icon::new(IconName::Plus)
|
||||
|
||||
@@ -647,7 +647,6 @@ impl AssistantContext {
|
||||
)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn new(
|
||||
id: ContextId,
|
||||
replica_id: ReplicaId,
|
||||
@@ -768,7 +767,6 @@ impl AssistantContext {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn deserialize(
|
||||
saved_context: SavedContext,
|
||||
path: PathBuf,
|
||||
@@ -2256,6 +2254,7 @@ impl AssistantContext {
|
||||
);
|
||||
}
|
||||
LanguageModelCompletionEvent::ToolUse(_) => {}
|
||||
LanguageModelCompletionEvent::UsageUpdate(_) => {}
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
@@ -535,7 +535,6 @@ impl ContextEditor {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn run_command(
|
||||
&mut self,
|
||||
command_range: Range<language::Anchor>,
|
||||
@@ -2057,7 +2056,6 @@ impl ContextEditor {
|
||||
.unwrap_or_else(|| Cow::Borrowed(DEFAULT_TAB_TITLE))
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn render_patch_block(
|
||||
&mut self,
|
||||
range: Range<text::Anchor>,
|
||||
|
||||
@@ -104,49 +104,53 @@ impl ContextStore {
|
||||
const CONTEXT_WATCH_DURATION: Duration = Duration::from_millis(100);
|
||||
let (mut events, _) = fs.watch(contexts_dir(), CONTEXT_WATCH_DURATION).await;
|
||||
|
||||
let this = cx.new(|cx: &mut Context<Self>| {
|
||||
let context_server_factory_registry =
|
||||
ContextServerFactoryRegistry::default_global(cx);
|
||||
let context_server_manager = cx.new(|cx| {
|
||||
ContextServerManager::new(context_server_factory_registry, project.clone(), cx)
|
||||
});
|
||||
let mut this = Self {
|
||||
contexts: Vec::new(),
|
||||
contexts_metadata: Vec::new(),
|
||||
context_server_manager,
|
||||
context_server_slash_command_ids: HashMap::default(),
|
||||
host_contexts: Vec::new(),
|
||||
fs,
|
||||
languages,
|
||||
slash_commands,
|
||||
telemetry,
|
||||
_watch_updates: cx.spawn(|this, mut cx| {
|
||||
async move {
|
||||
while events.next().await.is_some() {
|
||||
this.update(&mut cx, |this, cx| this.reload(cx))?
|
||||
.await
|
||||
.log_err();
|
||||
let this =
|
||||
cx.new(|cx: &mut Context<Self>| {
|
||||
let context_server_factory_registry =
|
||||
ContextServerFactoryRegistry::default_global(cx);
|
||||
let context_server_manager = cx.new(|cx| {
|
||||
ContextServerManager::new(
|
||||
context_server_factory_registry,
|
||||
project.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let mut this = Self {
|
||||
contexts: Vec::new(),
|
||||
contexts_metadata: Vec::new(),
|
||||
context_server_manager,
|
||||
context_server_slash_command_ids: HashMap::default(),
|
||||
host_contexts: Vec::new(),
|
||||
fs,
|
||||
languages,
|
||||
slash_commands,
|
||||
telemetry,
|
||||
_watch_updates: cx.spawn(|this, mut cx| {
|
||||
async move {
|
||||
while events.next().await.is_some() {
|
||||
this.update(&mut cx, |this, cx| this.reload(cx))?
|
||||
.await
|
||||
.log_err();
|
||||
}
|
||||
anyhow::Ok(())
|
||||
}
|
||||
anyhow::Ok(())
|
||||
}
|
||||
.log_err()
|
||||
}),
|
||||
client_subscription: None,
|
||||
_project_subscriptions: vec![
|
||||
cx.observe(&project, Self::handle_project_changed),
|
||||
cx.subscribe(&project, Self::handle_project_event),
|
||||
],
|
||||
project_is_shared: false,
|
||||
client: project.read(cx).client(),
|
||||
project: project.clone(),
|
||||
prompt_builder,
|
||||
};
|
||||
this.handle_project_changed(project.clone(), cx);
|
||||
this.synchronize_contexts(cx);
|
||||
this.register_context_server_handlers(cx);
|
||||
this.reload(cx).detach_and_log_err(cx);
|
||||
this
|
||||
})?;
|
||||
.log_err()
|
||||
}),
|
||||
client_subscription: None,
|
||||
_project_subscriptions: vec![
|
||||
cx.subscribe(&project, Self::handle_project_event)
|
||||
],
|
||||
project_is_shared: false,
|
||||
client: project.read(cx).client(),
|
||||
project: project.clone(),
|
||||
prompt_builder,
|
||||
};
|
||||
this.handle_project_shared(project.clone(), cx);
|
||||
this.synchronize_contexts(cx);
|
||||
this.register_context_server_handlers(cx);
|
||||
this.reload(cx).detach_and_log_err(cx);
|
||||
this
|
||||
})?;
|
||||
|
||||
Ok(this)
|
||||
})
|
||||
@@ -288,7 +292,7 @@ impl ContextStore {
|
||||
})?
|
||||
}
|
||||
|
||||
fn handle_project_changed(&mut self, _: Entity<Project>, cx: &mut Context<Self>) {
|
||||
fn handle_project_shared(&mut self, _: Entity<Project>, cx: &mut Context<Self>) {
|
||||
let is_shared = self.project.read(cx).is_shared();
|
||||
let was_shared = mem::replace(&mut self.project_is_shared, is_shared);
|
||||
if is_shared == was_shared {
|
||||
@@ -318,11 +322,14 @@ impl ContextStore {
|
||||
|
||||
fn handle_project_event(
|
||||
&mut self,
|
||||
_: Entity<Project>,
|
||||
project: Entity<Project>,
|
||||
event: &project::Event,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
match event {
|
||||
project::Event::RemoteIdChanged(_) => {
|
||||
self.handle_project_shared(project, cx);
|
||||
}
|
||||
project::Event::Reshared => {
|
||||
self.advertise_contexts(cx);
|
||||
}
|
||||
|
||||
@@ -5,9 +5,9 @@ use assistant_slash_command::{AfterCompletion, SlashCommandLine, SlashCommandWor
|
||||
use editor::{CompletionProvider, Editor};
|
||||
use fuzzy::{match_strings, StringMatchCandidate};
|
||||
use gpui::{App, AppContext as _, Context, Entity, Task, WeakEntity, Window};
|
||||
use language::{Anchor, Buffer, LanguageServerId, ToPoint};
|
||||
use language::{Anchor, Buffer, ToPoint};
|
||||
use parking_lot::Mutex;
|
||||
use project::{lsp_store::CompletionDocumentation, CompletionIntent};
|
||||
use project::{lsp_store::CompletionDocumentation, CompletionIntent, CompletionSource};
|
||||
use rope::Point;
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
@@ -48,7 +48,7 @@ impl SlashCommandCompletionProvider {
|
||||
name_range: Range<Anchor>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Vec<project::Completion>>> {
|
||||
) -> Task<Result<Option<Vec<project::Completion>>>> {
|
||||
let slash_commands = self.slash_commands.clone();
|
||||
let candidates = slash_commands
|
||||
.command_names(cx)
|
||||
@@ -71,72 +71,71 @@ impl SlashCommandCompletionProvider {
|
||||
.await;
|
||||
|
||||
cx.update(|_, cx| {
|
||||
matches
|
||||
.into_iter()
|
||||
.filter_map(|mat| {
|
||||
let command = slash_commands.command(&mat.string, cx)?;
|
||||
let mut new_text = mat.string.clone();
|
||||
let requires_argument = command.requires_argument();
|
||||
let accepts_arguments = command.accepts_arguments();
|
||||
if requires_argument || accepts_arguments {
|
||||
new_text.push(' ');
|
||||
}
|
||||
Some(
|
||||
matches
|
||||
.into_iter()
|
||||
.filter_map(|mat| {
|
||||
let command = slash_commands.command(&mat.string, cx)?;
|
||||
let mut new_text = mat.string.clone();
|
||||
let requires_argument = command.requires_argument();
|
||||
let accepts_arguments = command.accepts_arguments();
|
||||
if requires_argument || accepts_arguments {
|
||||
new_text.push(' ');
|
||||
}
|
||||
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
let command_name = mat.string.clone();
|
||||
let command_range = command_range.clone();
|
||||
let editor = editor.clone();
|
||||
let workspace = workspace.clone();
|
||||
Arc::new(
|
||||
move |intent: CompletionIntent,
|
||||
window: &mut Window,
|
||||
cx: &mut App| {
|
||||
if !requires_argument
|
||||
&& (!accepts_arguments || intent.is_complete())
|
||||
{
|
||||
editor
|
||||
.update(cx, |editor, cx| {
|
||||
editor.run_command(
|
||||
command_range.clone(),
|
||||
&command_name,
|
||||
&[],
|
||||
true,
|
||||
workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
false
|
||||
} else {
|
||||
requires_argument || accepts_arguments
|
||||
}
|
||||
},
|
||||
) as Arc<_>
|
||||
});
|
||||
Some(project::Completion {
|
||||
old_range: name_range.clone(),
|
||||
documentation: Some(CompletionDocumentation::SingleLine(
|
||||
command.description().into(),
|
||||
)),
|
||||
new_text,
|
||||
label: command.label(cx),
|
||||
server_id: LanguageServerId(0),
|
||||
lsp_completion: Default::default(),
|
||||
confirm,
|
||||
resolved: true,
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
let command_name = mat.string.clone();
|
||||
let command_range = command_range.clone();
|
||||
let editor = editor.clone();
|
||||
let workspace = workspace.clone();
|
||||
Arc::new(
|
||||
move |intent: CompletionIntent,
|
||||
window: &mut Window,
|
||||
cx: &mut App| {
|
||||
if !requires_argument
|
||||
&& (!accepts_arguments || intent.is_complete())
|
||||
{
|
||||
editor
|
||||
.update(cx, |editor, cx| {
|
||||
editor.run_command(
|
||||
command_range.clone(),
|
||||
&command_name,
|
||||
&[],
|
||||
true,
|
||||
workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
false
|
||||
} else {
|
||||
requires_argument || accepts_arguments
|
||||
}
|
||||
},
|
||||
) as Arc<_>
|
||||
});
|
||||
Some(project::Completion {
|
||||
old_range: name_range.clone(),
|
||||
documentation: Some(CompletionDocumentation::SingleLine(
|
||||
command.description().into(),
|
||||
)),
|
||||
new_text,
|
||||
label: command.label(cx),
|
||||
confirm,
|
||||
source: CompletionSource::Custom,
|
||||
})
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn complete_command_argument(
|
||||
&self,
|
||||
command_name: &str,
|
||||
@@ -146,7 +145,7 @@ impl SlashCommandCompletionProvider {
|
||||
last_argument_range: Range<Anchor>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Vec<project::Completion>>> {
|
||||
) -> Task<Result<Option<Vec<project::Completion>>>> {
|
||||
let new_cancel_flag = Arc::new(AtomicBool::new(false));
|
||||
let mut flag = self.cancel_flag.lock();
|
||||
flag.store(true, SeqCst);
|
||||
@@ -164,27 +163,28 @@ impl SlashCommandCompletionProvider {
|
||||
let workspace = self.workspace.clone();
|
||||
let arguments = arguments.to_vec();
|
||||
cx.background_spawn(async move {
|
||||
Ok(completions
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|new_argument| {
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
Arc::new({
|
||||
let mut completed_arguments = arguments.clone();
|
||||
if new_argument.replace_previous_arguments {
|
||||
completed_arguments.clear();
|
||||
} else {
|
||||
completed_arguments.pop();
|
||||
}
|
||||
completed_arguments.push(new_argument.new_text.clone());
|
||||
Ok(Some(
|
||||
completions
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|new_argument| {
|
||||
let confirm =
|
||||
editor
|
||||
.clone()
|
||||
.zip(workspace.clone())
|
||||
.map(|(editor, workspace)| {
|
||||
Arc::new({
|
||||
let mut completed_arguments = arguments.clone();
|
||||
if new_argument.replace_previous_arguments {
|
||||
completed_arguments.clear();
|
||||
} else {
|
||||
completed_arguments.pop();
|
||||
}
|
||||
completed_arguments.push(new_argument.new_text.clone());
|
||||
|
||||
let command_range = command_range.clone();
|
||||
let command_name = command_name.clone();
|
||||
move |intent: CompletionIntent,
|
||||
let command_range = command_range.clone();
|
||||
let command_name = command_name.clone();
|
||||
move |intent: CompletionIntent,
|
||||
window: &mut Window,
|
||||
cx: &mut App| {
|
||||
if new_argument.after_completion.run()
|
||||
@@ -208,33 +208,32 @@ impl SlashCommandCompletionProvider {
|
||||
!new_argument.after_completion.run()
|
||||
}
|
||||
}
|
||||
}) as Arc<_>
|
||||
});
|
||||
}) as Arc<_>
|
||||
});
|
||||
|
||||
let mut new_text = new_argument.new_text.clone();
|
||||
if new_argument.after_completion == AfterCompletion::Continue {
|
||||
new_text.push(' ');
|
||||
}
|
||||
let mut new_text = new_argument.new_text.clone();
|
||||
if new_argument.after_completion == AfterCompletion::Continue {
|
||||
new_text.push(' ');
|
||||
}
|
||||
|
||||
project::Completion {
|
||||
old_range: if new_argument.replace_previous_arguments {
|
||||
argument_range.clone()
|
||||
} else {
|
||||
last_argument_range.clone()
|
||||
},
|
||||
label: new_argument.label,
|
||||
new_text,
|
||||
documentation: None,
|
||||
server_id: LanguageServerId(0),
|
||||
lsp_completion: Default::default(),
|
||||
confirm,
|
||||
resolved: true,
|
||||
}
|
||||
})
|
||||
.collect())
|
||||
project::Completion {
|
||||
old_range: if new_argument.replace_previous_arguments {
|
||||
argument_range.clone()
|
||||
} else {
|
||||
last_argument_range.clone()
|
||||
},
|
||||
label: new_argument.label,
|
||||
new_text,
|
||||
documentation: None,
|
||||
confirm,
|
||||
source: CompletionSource::Custom,
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
))
|
||||
})
|
||||
} else {
|
||||
Task::ready(Ok(Vec::new()))
|
||||
Task::ready(Ok(Some(Vec::new())))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -247,7 +246,7 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
_: editor::CompletionContext,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> Task<Result<Vec<project::Completion>>> {
|
||||
) -> Task<Result<Option<Vec<project::Completion>>>> {
|
||||
let Some((name, arguments, command_range, last_argument_range)) =
|
||||
buffer.update(cx, |buffer, _cx| {
|
||||
let position = buffer_position.to_point(buffer);
|
||||
@@ -291,7 +290,7 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
Some((name, arguments, command_range, last_argument_range))
|
||||
})
|
||||
else {
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
return Task::ready(Ok(Some(Vec::new())));
|
||||
};
|
||||
|
||||
if let Some((arguments, argument_range)) = arguments {
|
||||
|
||||
44
crates/assistant_eval/Cargo.toml
Normal file
@@ -0,0 +1,44 @@
|
||||
[package]
|
||||
name = "assistant_eval"
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[[bin]]
|
||||
name = "assistant_eval"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
assistant2.workspace = true
|
||||
assistant_tool.workspace = true
|
||||
assistant_tools.workspace = true
|
||||
clap.workspace = true
|
||||
client.workspace = true
|
||||
collections.workspace = true
|
||||
context_server.workspace = true
|
||||
env_logger.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
gpui_tokio.workspace = true
|
||||
itertools.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
language_models.workspace = true
|
||||
node_runtime.workspace = true
|
||||
project.workspace = true
|
||||
prompt_store.workspace = true
|
||||
regex.workspace = true
|
||||
release_channel.workspace = true
|
||||
reqwest_client.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde_json_lenient.workspace = true
|
||||
settings.workspace = true
|
||||
smol.workspace = true
|
||||
util.workspace = true
|
||||
1
crates/assistant_eval/LICENSE-GPL
Symbolic link
@@ -0,0 +1 @@
|
||||
../../LICENSE-GPL
|
||||
77
crates/assistant_eval/README.md
Normal file
@@ -0,0 +1,77 @@
|
||||
# Tool Evals
|
||||
|
||||
A framework for evaluating and benchmarking AI assistant performance in the Zed editor.
|
||||
|
||||
## Overview
|
||||
|
||||
Tool Evals provides a headless environment for running assistants evaluations on code repositories. It automates the process of:
|
||||
|
||||
1. Cloning and setting up test repositories
|
||||
2. Sending prompts to language models
|
||||
3. Allowing the assistant to use tools to modify code
|
||||
4. Collecting metrics on performance
|
||||
5. Evaluating results against known good solutions
|
||||
|
||||
## How It Works
|
||||
|
||||
The system consists of several key components:
|
||||
|
||||
- **Eval**: Loads test cases from the evaluation_data directory, clones repos, and executes evaluations
|
||||
- **HeadlessAssistant**: Provides a headless environment for running the AI assistant
|
||||
- **Judge**: Compares AI-generated diffs with reference solutions and scores their functional similarity
|
||||
|
||||
The evaluation flow:
|
||||
1. An evaluation is loaded from the evaluation_data directory
|
||||
2. The target repository is cloned and checked out at a specific commit
|
||||
3. A HeadlessAssistant instance is created with the specified language model
|
||||
4. The user prompt is sent to the assistant
|
||||
5. The assistant responds and uses tools to modify code
|
||||
6. Upon completion, a diff is generated from the changes
|
||||
7. Results are saved including the diff, assistant's response, and performance metrics
|
||||
8. If a reference solution exists, a Judge evaluates the similarity of the solution
|
||||
|
||||
## Setup Requirements
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Rust and Cargo
|
||||
- Git
|
||||
- Network access to clone repositories
|
||||
- Appropriate API keys for language models and git services (Anthropic, GitHub, etc.)
|
||||
|
||||
### Environment Variables
|
||||
|
||||
Ensure you have the required API keys set, either from a dev run of Zed or via these environment variables:
|
||||
- `ZED_ANTHROPIC_API_KEY` for Claude models
|
||||
- `ZED_OPENAI_API_KEY` for OpenAI models
|
||||
- `ZED_GITHUB_API_KEY` for GitHub API (or similar)
|
||||
|
||||
## Usage
|
||||
|
||||
### Running a Single Evaluation
|
||||
|
||||
To run a specific evaluation:
|
||||
|
||||
```bash
|
||||
cargo run -p assistant_eval -- bubbletea-add-set-window-title
|
||||
```
|
||||
|
||||
The arguments are regex patterns for the evaluation names to run, so to run all evaluations that contain `bubbletea`, run:
|
||||
|
||||
```bash
|
||||
cargo run -p assistant_eval -- bubbletea
|
||||
```
|
||||
|
||||
To run all evaluations:
|
||||
|
||||
```bash
|
||||
cargo run -p assistant_eval -- --all
|
||||
```
|
||||
|
||||
## Evaluation Data Structure
|
||||
|
||||
Each evaluation should be placed in the `evaluation_data` directory with the following structure:
|
||||
|
||||
* `prompt.txt`: The user's prompt.
|
||||
* `original.diff`: The `git diff` of the change anticipated for this prompt.
|
||||
* `setup.json`: Information about the repo used for the evaluation.
|
||||
61
crates/assistant_eval/build.rs
Normal file
@@ -0,0 +1,61 @@
|
||||
// Copied from `crates/zed/build.rs`, with removal of code for including the zed icon on windows.
|
||||
|
||||
use std::process::Command;
|
||||
|
||||
fn main() {
|
||||
if cfg!(target_os = "macos") {
|
||||
println!("cargo:rustc-env=MACOSX_DEPLOYMENT_TARGET=10.15.7");
|
||||
|
||||
println!("cargo:rerun-if-env-changed=ZED_BUNDLE");
|
||||
if std::env::var("ZED_BUNDLE").ok().as_deref() == Some("true") {
|
||||
// Find WebRTC.framework in the Frameworks folder when running as part of an application bundle.
|
||||
println!("cargo:rustc-link-arg=-Wl,-rpath,@executable_path/../Frameworks");
|
||||
} else {
|
||||
// Find WebRTC.framework as a sibling of the executable when running outside of an application bundle.
|
||||
println!("cargo:rustc-link-arg=-Wl,-rpath,@executable_path");
|
||||
}
|
||||
|
||||
// Weakly link ReplayKit to ensure Zed can be used on macOS 10.15+.
|
||||
println!("cargo:rustc-link-arg=-Wl,-weak_framework,ReplayKit");
|
||||
|
||||
// Seems to be required to enable Swift concurrency
|
||||
println!("cargo:rustc-link-arg=-Wl,-rpath,/usr/lib/swift");
|
||||
|
||||
// Register exported Objective-C selectors, protocols, etc
|
||||
println!("cargo:rustc-link-arg=-Wl,-ObjC");
|
||||
}
|
||||
|
||||
// Populate git sha environment variable if git is available
|
||||
println!("cargo:rerun-if-changed=../../.git/logs/HEAD");
|
||||
println!(
|
||||
"cargo:rustc-env=TARGET={}",
|
||||
std::env::var("TARGET").unwrap()
|
||||
);
|
||||
if let Ok(output) = Command::new("git").args(["rev-parse", "HEAD"]).output() {
|
||||
if output.status.success() {
|
||||
let git_sha = String::from_utf8_lossy(&output.stdout);
|
||||
let git_sha = git_sha.trim();
|
||||
|
||||
println!("cargo:rustc-env=ZED_COMMIT_SHA={git_sha}");
|
||||
|
||||
if let Ok(build_profile) = std::env::var("PROFILE") {
|
||||
if build_profile == "release" {
|
||||
// This is currently the best way to make `cargo build ...`'s build script
|
||||
// to print something to stdout without extra verbosity.
|
||||
println!(
|
||||
"cargo:warning=Info: using '{git_sha}' hash for ZED_COMMIT_SHA env var"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
#[cfg(target_env = "msvc")]
|
||||
{
|
||||
// todo(windows): This is to avoid stack overflow. Remove it when solved.
|
||||
println!("cargo:rustc-link-arg=/stack:{}", 8 * 1024 * 1024);
|
||||
}
|
||||
}
|
||||
}
|
||||
252
crates/assistant_eval/src/eval.rs
Normal file
@@ -0,0 +1,252 @@
|
||||
use crate::headless_assistant::{HeadlessAppState, HeadlessAssistant};
|
||||
use anyhow::anyhow;
|
||||
use assistant2::RequestKind;
|
||||
use collections::HashMap;
|
||||
use gpui::{App, Task};
|
||||
use language_model::{LanguageModel, TokenUsage};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
fs,
|
||||
io::Write,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
};
|
||||
use util::command::new_smol_command;
|
||||
|
||||
pub struct Eval {
|
||||
pub name: String,
|
||||
pub path: PathBuf,
|
||||
pub repo_path: PathBuf,
|
||||
pub eval_setup: EvalSetup,
|
||||
pub user_prompt: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct EvalOutput {
|
||||
pub diff: String,
|
||||
pub last_message: String,
|
||||
pub elapsed_time: Duration,
|
||||
pub assistant_response_count: usize,
|
||||
pub tool_use_counts: HashMap<Arc<str>, u32>,
|
||||
pub token_usage: TokenUsage,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct EvalSetup {
|
||||
pub url: String,
|
||||
pub base_sha: String,
|
||||
}
|
||||
|
||||
impl Eval {
|
||||
/// Loads the eval from a path (typically in `evaluation_data`). Clones and checks out the repo
|
||||
/// if necessary.
|
||||
pub async fn load(name: String, path: PathBuf, repos_dir: &Path) -> anyhow::Result<Self> {
|
||||
let prompt_path = path.join("prompt.txt");
|
||||
let user_prompt = smol::unblock(|| std::fs::read_to_string(prompt_path)).await?;
|
||||
let setup_path = path.join("setup.json");
|
||||
let setup_contents = smol::unblock(|| std::fs::read_to_string(setup_path)).await?;
|
||||
let eval_setup = serde_json_lenient::from_str_lenient::<EvalSetup>(&setup_contents)?;
|
||||
let repo_path = repos_dir.join(repo_dir_name(&eval_setup.url));
|
||||
Ok(Eval {
|
||||
name,
|
||||
path,
|
||||
repo_path,
|
||||
eval_setup,
|
||||
user_prompt,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn run(
|
||||
self,
|
||||
app_state: Arc<HeadlessAppState>,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
cx: &mut App,
|
||||
) -> Task<anyhow::Result<EvalOutput>> {
|
||||
cx.spawn(move |mut cx| async move {
|
||||
checkout_repo(&self.eval_setup, &self.repo_path).await?;
|
||||
|
||||
let (assistant, done_rx) =
|
||||
cx.update(|cx| HeadlessAssistant::new(app_state.clone(), cx))??;
|
||||
|
||||
let _worktree = assistant
|
||||
.update(&mut cx, |assistant, cx| {
|
||||
assistant.project.update(cx, |project, cx| {
|
||||
project.create_worktree(&self.repo_path, true, cx)
|
||||
})
|
||||
})?
|
||||
.await?;
|
||||
|
||||
let start_time = std::time::SystemTime::now();
|
||||
|
||||
assistant.update(&mut cx, |assistant, cx| {
|
||||
assistant.thread.update(cx, |thread, cx| {
|
||||
let context = vec![];
|
||||
thread.insert_user_message(self.user_prompt.clone(), context, cx);
|
||||
thread.send_to_model(model, RequestKind::Chat, cx);
|
||||
});
|
||||
})?;
|
||||
|
||||
done_rx.recv().await??;
|
||||
|
||||
let elapsed_time = start_time.elapsed()?;
|
||||
|
||||
let diff = query_git(&self.repo_path, vec!["diff"]).await?;
|
||||
|
||||
assistant.update(&mut cx, |assistant, cx| {
|
||||
let thread = assistant.thread.read(cx);
|
||||
let last_message = thread.messages().last().unwrap();
|
||||
if last_message.role != language_model::Role::Assistant {
|
||||
return Err(anyhow!("Last message is not from assistant"));
|
||||
}
|
||||
let assistant_response_count = thread
|
||||
.messages()
|
||||
.filter(|message| message.role == language_model::Role::Assistant)
|
||||
.count();
|
||||
Ok(EvalOutput {
|
||||
diff,
|
||||
last_message: last_message.text.clone(),
|
||||
elapsed_time,
|
||||
assistant_response_count,
|
||||
tool_use_counts: assistant.tool_use_counts.clone(),
|
||||
token_usage: thread.cumulative_token_usage(),
|
||||
})
|
||||
})?
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl EvalOutput {
|
||||
// Method to save the output to a directory
|
||||
pub fn save_to_directory(
|
||||
&self,
|
||||
output_dir: &Path,
|
||||
eval_output_value: String,
|
||||
) -> anyhow::Result<()> {
|
||||
// Create the output directory if it doesn't exist
|
||||
fs::create_dir_all(&output_dir)?;
|
||||
|
||||
// Save the diff to a file
|
||||
let diff_path = output_dir.join("diff.patch");
|
||||
let mut diff_file = fs::File::create(&diff_path)?;
|
||||
diff_file.write_all(self.diff.as_bytes())?;
|
||||
|
||||
// Save the last message to a file
|
||||
let message_path = output_dir.join("assistant_response.txt");
|
||||
let mut message_file = fs::File::create(&message_path)?;
|
||||
message_file.write_all(self.last_message.as_bytes())?;
|
||||
|
||||
// Current metrics for this run
|
||||
let current_metrics = serde_json::json!({
|
||||
"elapsed_time_ms": self.elapsed_time.as_millis(),
|
||||
"assistant_response_count": self.assistant_response_count,
|
||||
"tool_use_counts": self.tool_use_counts,
|
||||
"token_usage": self.token_usage,
|
||||
"eval_output_value": eval_output_value,
|
||||
});
|
||||
|
||||
// Get current timestamp in milliseconds
|
||||
let timestamp = std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)?
|
||||
.as_millis()
|
||||
.to_string();
|
||||
|
||||
// Path to metrics file
|
||||
let metrics_path = output_dir.join("metrics.json");
|
||||
|
||||
// Load existing metrics if the file exists, or create a new object
|
||||
let mut historical_metrics = if metrics_path.exists() {
|
||||
let metrics_content = fs::read_to_string(&metrics_path)?;
|
||||
serde_json::from_str::<serde_json::Value>(&metrics_content)
|
||||
.unwrap_or_else(|_| serde_json::json!({}))
|
||||
} else {
|
||||
serde_json::json!({})
|
||||
};
|
||||
|
||||
// Add new run with timestamp as key
|
||||
if let serde_json::Value::Object(ref mut map) = historical_metrics {
|
||||
map.insert(timestamp, current_metrics);
|
||||
}
|
||||
|
||||
// Write updated metrics back to file
|
||||
let metrics_json = serde_json::to_string_pretty(&historical_metrics)?;
|
||||
let mut metrics_file = fs::File::create(&metrics_path)?;
|
||||
metrics_file.write_all(metrics_json.as_bytes())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn repo_dir_name(url: &str) -> String {
|
||||
url.trim_start_matches("https://")
|
||||
.replace(|c: char| !c.is_alphanumeric(), "_")
|
||||
}
|
||||
|
||||
async fn checkout_repo(eval_setup: &EvalSetup, repo_path: &Path) -> anyhow::Result<()> {
|
||||
if !repo_path.exists() {
|
||||
smol::unblock({
|
||||
let repo_path = repo_path.to_path_buf();
|
||||
|| std::fs::create_dir_all(repo_path)
|
||||
})
|
||||
.await?;
|
||||
run_git(repo_path, vec!["init"]).await?;
|
||||
run_git(repo_path, vec!["remote", "add", "origin", &eval_setup.url]).await?;
|
||||
} else {
|
||||
let actual_origin = query_git(repo_path, vec!["remote", "get-url", "origin"]).await?;
|
||||
if actual_origin != eval_setup.url {
|
||||
return Err(anyhow!(
|
||||
"remote origin {} does not match expected origin {}",
|
||||
actual_origin,
|
||||
eval_setup.url
|
||||
));
|
||||
}
|
||||
|
||||
// TODO: consider including "-x" to remove ignored files. The downside of this is that it will
|
||||
// also remove build artifacts, and so prevent incremental reuse there.
|
||||
run_git(repo_path, vec!["clean", "--force", "-d"]).await?;
|
||||
run_git(repo_path, vec!["reset", "--hard", "HEAD"]).await?;
|
||||
}
|
||||
|
||||
run_git(
|
||||
repo_path,
|
||||
vec!["fetch", "--depth", "1", "origin", &eval_setup.base_sha],
|
||||
)
|
||||
.await?;
|
||||
run_git(repo_path, vec!["checkout", &eval_setup.base_sha]).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn run_git(repo_path: &Path, args: Vec<&str>) -> anyhow::Result<()> {
|
||||
let exit_status = new_smol_command("git")
|
||||
.current_dir(repo_path)
|
||||
.args(args.clone())
|
||||
.status()
|
||||
.await?;
|
||||
if exit_status.success() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"`git {}` failed with {}",
|
||||
args.join(" "),
|
||||
exit_status,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
async fn query_git(repo_path: &Path, args: Vec<&str>) -> anyhow::Result<String> {
|
||||
let output = new_smol_command("git")
|
||||
.current_dir(repo_path)
|
||||
.args(args.clone())
|
||||
.output()
|
||||
.await?;
|
||||
if output.status.success() {
|
||||
Ok(String::from_utf8(output.stdout)?.trim().to_string())
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"`git {}` failed with {}",
|
||||
args.join(" "),
|
||||
output.status
|
||||
))
|
||||
}
|
||||
}
|
||||
241
crates/assistant_eval/src/headless_assistant.rs
Normal file
@@ -0,0 +1,241 @@
|
||||
use anyhow::anyhow;
|
||||
use assistant2::{RequestKind, Thread, ThreadEvent, ThreadStore};
|
||||
use assistant_tool::ToolWorkingSet;
|
||||
use client::{Client, UserStore};
|
||||
use collections::HashMap;
|
||||
use futures::StreamExt;
|
||||
use gpui::{prelude::*, App, AsyncApp, Entity, SemanticVersion, Subscription, Task};
|
||||
use language::LanguageRegistry;
|
||||
use language_model::{
|
||||
AuthenticateError, LanguageModel, LanguageModelProviderId, LanguageModelRegistry,
|
||||
LanguageModelRequest,
|
||||
};
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::{Project, RealFs};
|
||||
use prompt_store::PromptBuilder;
|
||||
use settings::SettingsStore;
|
||||
use smol::channel;
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Subset of `workspace::AppState` needed by `HeadlessAssistant`, with additional fields.
|
||||
pub struct HeadlessAppState {
|
||||
pub languages: Arc<LanguageRegistry>,
|
||||
pub client: Arc<Client>,
|
||||
pub user_store: Entity<UserStore>,
|
||||
pub fs: Arc<dyn fs::Fs>,
|
||||
pub node_runtime: NodeRuntime,
|
||||
|
||||
// Additional fields not present in `workspace::AppState`.
|
||||
pub prompt_builder: Arc<PromptBuilder>,
|
||||
}
|
||||
|
||||
pub struct HeadlessAssistant {
|
||||
pub thread: Entity<Thread>,
|
||||
pub project: Entity<Project>,
|
||||
#[allow(dead_code)]
|
||||
pub thread_store: Entity<ThreadStore>,
|
||||
pub tool_use_counts: HashMap<Arc<str>, u32>,
|
||||
pub done_tx: channel::Sender<anyhow::Result<()>>,
|
||||
_subscription: Subscription,
|
||||
}
|
||||
|
||||
impl HeadlessAssistant {
|
||||
pub fn new(
|
||||
app_state: Arc<HeadlessAppState>,
|
||||
cx: &mut App,
|
||||
) -> anyhow::Result<(Entity<Self>, channel::Receiver<anyhow::Result<()>>)> {
|
||||
let env = None;
|
||||
let project = Project::local(
|
||||
app_state.client.clone(),
|
||||
app_state.node_runtime.clone(),
|
||||
app_state.user_store.clone(),
|
||||
app_state.languages.clone(),
|
||||
app_state.fs.clone(),
|
||||
env,
|
||||
cx,
|
||||
);
|
||||
|
||||
let tools = Arc::new(ToolWorkingSet::default());
|
||||
let thread_store =
|
||||
ThreadStore::new(project.clone(), tools, app_state.prompt_builder.clone(), cx)?;
|
||||
|
||||
let thread = thread_store.update(cx, |thread_store, cx| thread_store.create_thread(cx));
|
||||
|
||||
let (done_tx, done_rx) = channel::unbounded::<anyhow::Result<()>>();
|
||||
|
||||
let headless_thread = cx.new(move |cx| Self {
|
||||
_subscription: cx.subscribe(&thread, Self::handle_thread_event),
|
||||
thread,
|
||||
project,
|
||||
thread_store,
|
||||
tool_use_counts: HashMap::default(),
|
||||
done_tx,
|
||||
});
|
||||
|
||||
Ok((headless_thread, done_rx))
|
||||
}
|
||||
|
||||
fn handle_thread_event(
|
||||
&mut self,
|
||||
thread: Entity<Thread>,
|
||||
event: &ThreadEvent,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
match event {
|
||||
ThreadEvent::ShowError(err) => self
|
||||
.done_tx
|
||||
.send_blocking(Err(anyhow!("{:?}", err)))
|
||||
.unwrap(),
|
||||
ThreadEvent::DoneStreaming => {
|
||||
let thread = thread.read(cx);
|
||||
if let Some(message) = thread.messages().last() {
|
||||
println!("Message: {}", message.text,);
|
||||
}
|
||||
if thread.all_tools_finished() {
|
||||
self.done_tx.send_blocking(Ok(())).unwrap()
|
||||
}
|
||||
}
|
||||
ThreadEvent::UsePendingTools => {
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.use_pending_tools(cx);
|
||||
});
|
||||
}
|
||||
ThreadEvent::ToolFinished {
|
||||
tool_use_id,
|
||||
pending_tool_use,
|
||||
..
|
||||
} => {
|
||||
if let Some(pending_tool_use) = pending_tool_use {
|
||||
println!(
|
||||
"Used tool {} with input: {}",
|
||||
pending_tool_use.name, pending_tool_use.input
|
||||
);
|
||||
*self
|
||||
.tool_use_counts
|
||||
.entry(pending_tool_use.name.clone())
|
||||
.or_insert(0) += 1;
|
||||
}
|
||||
if let Some(tool_result) = thread.read(cx).tool_result(tool_use_id) {
|
||||
println!("Tool result: {:?}", tool_result);
|
||||
}
|
||||
if thread.read(cx).all_tools_finished() {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
if let Some(model) = model_registry.active_model() {
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.attach_tool_results(vec![], cx);
|
||||
thread.send_to_model(model, RequestKind::Chat, cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
ThreadEvent::StreamedCompletion
|
||||
| ThreadEvent::SummaryChanged
|
||||
| ThreadEvent::StreamedAssistantText(_, _)
|
||||
| ThreadEvent::MessageAdded(_)
|
||||
| ThreadEvent::MessageEdited(_)
|
||||
| ThreadEvent::MessageDeleted(_) => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(cx: &mut App) -> Arc<HeadlessAppState> {
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
gpui_tokio::init(cx);
|
||||
|
||||
let mut settings_store = SettingsStore::new(cx);
|
||||
settings_store
|
||||
.set_default_settings(settings::default_settings().as_ref(), cx)
|
||||
.unwrap();
|
||||
cx.set_global(settings_store);
|
||||
client::init_settings(cx);
|
||||
Project::init_settings(cx);
|
||||
|
||||
let client = Client::production(cx);
|
||||
cx.set_http_client(client.http_client().clone());
|
||||
|
||||
let git_binary_path = None;
|
||||
let fs = Arc::new(RealFs::new(git_binary_path));
|
||||
|
||||
let languages = Arc::new(LanguageRegistry::new(cx.background_executor().clone()));
|
||||
|
||||
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
||||
|
||||
language::init(cx);
|
||||
language_model::init(client.clone(), cx);
|
||||
language_models::init(user_store.clone(), client.clone(), fs.clone(), cx);
|
||||
assistant_tools::init(cx);
|
||||
context_server::init(cx);
|
||||
let stdout_is_a_pty = false;
|
||||
let prompt_builder = PromptBuilder::load(fs.clone(), stdout_is_a_pty, cx);
|
||||
assistant2::init(fs.clone(), client.clone(), prompt_builder.clone(), cx);
|
||||
|
||||
Arc::new(HeadlessAppState {
|
||||
languages,
|
||||
client,
|
||||
user_store,
|
||||
fs,
|
||||
node_runtime: NodeRuntime::unavailable(),
|
||||
prompt_builder,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn find_model(model_name: &str, cx: &App) -> anyhow::Result<Arc<dyn LanguageModel>> {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let model = model_registry
|
||||
.available_models(cx)
|
||||
.find(|model| model.id().0 == model_name);
|
||||
|
||||
let Some(model) = model else {
|
||||
return Err(anyhow!(
|
||||
"No language model named {} was available. Available models: {}",
|
||||
model_name,
|
||||
model_registry
|
||||
.available_models(cx)
|
||||
.map(|model| model.id().0.clone())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
));
|
||||
};
|
||||
|
||||
Ok(model)
|
||||
}
|
||||
|
||||
pub fn authenticate_model_provider(
|
||||
provider_id: LanguageModelProviderId,
|
||||
cx: &mut App,
|
||||
) -> Task<std::result::Result<(), AuthenticateError>> {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let model_provider = model_registry.provider(&provider_id).unwrap();
|
||||
model_provider.authenticate(cx)
|
||||
}
|
||||
|
||||
pub async fn send_language_model_request(
|
||||
model: Arc<dyn LanguageModel>,
|
||||
request: LanguageModelRequest,
|
||||
cx: AsyncApp,
|
||||
) -> anyhow::Result<String> {
|
||||
match model.stream_completion_text(request, &cx).await {
|
||||
Ok(mut stream) => {
|
||||
let mut full_response = String::new();
|
||||
|
||||
// Process the response stream
|
||||
while let Some(chunk_result) = stream.stream.next().await {
|
||||
match chunk_result {
|
||||
Ok(chunk_str) => {
|
||||
full_response.push_str(&chunk_str);
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(anyhow!(
|
||||
"Error receiving response from language model: {err}"
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(full_response)
|
||||
}
|
||||
Err(err) => Err(anyhow!(
|
||||
"Failed to get response from language model. Error was: {err}"
|
||||
)),
|
||||
}
|
||||
}
|
||||
121
crates/assistant_eval/src/judge.rs
Normal file
@@ -0,0 +1,121 @@
|
||||
use crate::eval::EvalOutput;
|
||||
use crate::headless_assistant::send_language_model_request;
|
||||
use anyhow::anyhow;
|
||||
use gpui::{App, Task};
|
||||
use language_model::{
|
||||
LanguageModel, LanguageModelRequest, LanguageModelRequestMessage, MessageContent, Role,
|
||||
};
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
pub struct Judge {
|
||||
pub original_diff: Option<String>,
|
||||
#[allow(dead_code)]
|
||||
pub original_message: Option<String>,
|
||||
pub model: Arc<dyn LanguageModel>,
|
||||
}
|
||||
|
||||
impl Judge {
|
||||
pub async fn load(eval_path: &Path, model: Arc<dyn LanguageModel>) -> anyhow::Result<Judge> {
|
||||
let original_diff_path = eval_path.join("original.diff");
|
||||
let original_diff = smol::unblock(move || {
|
||||
if std::fs::exists(&original_diff_path)? {
|
||||
anyhow::Ok(Some(std::fs::read_to_string(&original_diff_path)?))
|
||||
} else {
|
||||
anyhow::Ok(None)
|
||||
}
|
||||
});
|
||||
|
||||
let original_message_path = eval_path.join("original_message.txt");
|
||||
let original_message = smol::unblock(move || {
|
||||
if std::fs::exists(&original_message_path)? {
|
||||
anyhow::Ok(Some(std::fs::read_to_string(&original_message_path)?))
|
||||
} else {
|
||||
anyhow::Ok(None)
|
||||
}
|
||||
});
|
||||
|
||||
Ok(Self {
|
||||
original_diff: original_diff.await?,
|
||||
original_message: original_message.await?,
|
||||
model,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn run(&self, eval_output: &EvalOutput, cx: &mut App) -> Task<anyhow::Result<String>> {
|
||||
let Some(original_diff) = self.original_diff.as_ref() else {
|
||||
return Task::ready(Err(anyhow!("No original.diff found")));
|
||||
};
|
||||
|
||||
// TODO: check for empty diff?
|
||||
let prompt = diff_comparison_prompt(&original_diff, &eval_output.diff);
|
||||
|
||||
let request = LanguageModelRequest {
|
||||
messages: vec![LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![MessageContent::Text(prompt)],
|
||||
cache: false,
|
||||
}],
|
||||
temperature: Some(0.0),
|
||||
tools: Vec::new(),
|
||||
stop: Vec::new(),
|
||||
};
|
||||
|
||||
let model = self.model.clone();
|
||||
cx.spawn(move |cx| send_language_model_request(model, request, cx))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn diff_comparison_prompt(original_diff: &str, new_diff: &str) -> String {
|
||||
format!(
|
||||
r#"# Git Diff Similarity Evaluation Template
|
||||
|
||||
## Instructions
|
||||
|
||||
Compare the two diffs and score them between 0.0 and 1.0 based on their functional similarity.
|
||||
- 1.0 = Perfect functional match (achieves identical results)
|
||||
- 0.0 = No functional similarity whatsoever
|
||||
|
||||
## Evaluation Criteria
|
||||
|
||||
Please consider the following aspects in order of importance:
|
||||
|
||||
1. **Functional Equivalence (60%)**
|
||||
- Do both diffs achieve the same end result?
|
||||
- Are the changes functionally equivalent despite possibly using different approaches?
|
||||
- Do the modifications address the same issues or implement the same features?
|
||||
|
||||
2. **Logical Structure (20%)**
|
||||
- Are the logical flows similar?
|
||||
- Do the modifications affect the same code paths?
|
||||
- Are control structures (if/else, loops, etc.) modified in similar ways?
|
||||
|
||||
3. **Code Content (15%)**
|
||||
- Are similar lines added/removed?
|
||||
- Are the same variables, functions, or methods being modified?
|
||||
- Are the same APIs or libraries being used?
|
||||
|
||||
4. **File Layout (5%)**
|
||||
- Are the same files being modified?
|
||||
- Are changes occurring in similar locations within files?
|
||||
|
||||
## Input
|
||||
|
||||
Original Diff:
|
||||
```git
|
||||
{}
|
||||
```
|
||||
|
||||
New Diff:
|
||||
```git
|
||||
{}
|
||||
```
|
||||
|
||||
## Output Format
|
||||
|
||||
THE ONLY OUTPUT SHOULD BE A SCORE BETWEEN 0.0 AND 1.0.
|
||||
|
||||
Example output:
|
||||
0.85"#,
|
||||
original_diff, new_diff
|
||||
)
|
||||
}
|
||||
239
crates/assistant_eval/src/main.rs
Normal file
@@ -0,0 +1,239 @@
|
||||
mod eval;
|
||||
mod headless_assistant;
|
||||
mod judge;
|
||||
|
||||
use clap::Parser;
|
||||
use eval::{Eval, EvalOutput};
|
||||
use futures::{stream, StreamExt};
|
||||
use gpui::{Application, AsyncApp};
|
||||
use headless_assistant::{authenticate_model_provider, find_model, HeadlessAppState};
|
||||
use itertools::Itertools;
|
||||
use judge::Judge;
|
||||
use language_model::{LanguageModel, LanguageModelRegistry};
|
||||
use regex::Regex;
|
||||
use reqwest_client::ReqwestClient;
|
||||
use std::{cmp, path::PathBuf, sync::Arc};
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(
|
||||
name = "assistant_eval",
|
||||
disable_version_flag = true,
|
||||
before_help = "Tool eval runner"
|
||||
)]
|
||||
struct Args {
|
||||
/// Regexes to match the names of evals to run.
|
||||
eval_name_regexes: Vec<String>,
|
||||
/// Runs all evals in `evaluation_data`, causes the regex to be ignored.
|
||||
#[arg(long)]
|
||||
all: bool,
|
||||
/// Name of the model (default: "claude-3-7-sonnet-latest")
|
||||
#[arg(long, default_value = "claude-3-7-sonnet-latest")]
|
||||
model_name: String,
|
||||
/// Name of the editor model (default: value of `--model_name`).
|
||||
#[arg(long)]
|
||||
editor_model_name: Option<String>,
|
||||
/// Name of the judge model (default: value of `--model_name`).
|
||||
#[arg(long)]
|
||||
judge_model_name: Option<String>,
|
||||
/// Number of evaluations to run concurrently (default: 10)
|
||||
#[arg(short, long, default_value = "10")]
|
||||
concurrency: usize,
|
||||
}
|
||||
|
||||
fn main() {
|
||||
env_logger::init();
|
||||
let args = Args::parse();
|
||||
let http_client = Arc::new(ReqwestClient::new());
|
||||
let app = Application::headless().with_http_client(http_client.clone());
|
||||
|
||||
let crate_dir = PathBuf::from("../zed-agent-bench");
|
||||
let evaluation_data_dir = crate_dir.join("evaluation_data").canonicalize().unwrap();
|
||||
|
||||
let repos_dir = crate_dir.join("repos");
|
||||
if !repos_dir.exists() {
|
||||
std::fs::create_dir_all(&repos_dir).unwrap();
|
||||
}
|
||||
let repos_dir = repos_dir.canonicalize().unwrap();
|
||||
|
||||
let all_evals = std::fs::read_dir(&evaluation_data_dir)
|
||||
.unwrap()
|
||||
.map(|path| path.unwrap().file_name().to_string_lossy().to_string())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let evals_to_run = if args.all {
|
||||
all_evals
|
||||
} else {
|
||||
args.eval_name_regexes
|
||||
.into_iter()
|
||||
.map(|regex_string| Regex::new(®ex_string).unwrap())
|
||||
.flat_map(|regex| {
|
||||
all_evals
|
||||
.iter()
|
||||
.filter(|eval_name| regex.is_match(eval_name))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
if evals_to_run.is_empty() {
|
||||
panic!("Names of evals to run must be provided or `--all` specified");
|
||||
}
|
||||
|
||||
println!("Will run the following evals: {evals_to_run:?}");
|
||||
println!("Running up to {} evals concurrently", args.concurrency);
|
||||
|
||||
let editor_model_name = if let Some(model_name) = args.editor_model_name {
|
||||
model_name
|
||||
} else {
|
||||
args.model_name.clone()
|
||||
};
|
||||
|
||||
let judge_model_name = if let Some(model_name) = args.judge_model_name {
|
||||
model_name
|
||||
} else {
|
||||
args.model_name.clone()
|
||||
};
|
||||
|
||||
app.run(move |cx| {
|
||||
let app_state = headless_assistant::init(cx);
|
||||
|
||||
let model = find_model(&args.model_name, cx).unwrap();
|
||||
let editor_model = find_model(&editor_model_name, cx).unwrap();
|
||||
let judge_model = find_model(&judge_model_name, cx).unwrap();
|
||||
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry.set_active_model(Some(model.clone()), cx);
|
||||
registry.set_editor_model(Some(editor_model.clone()), cx);
|
||||
});
|
||||
|
||||
let model_provider_id = model.provider_id();
|
||||
let editor_model_provider_id = editor_model.provider_id();
|
||||
let judge_model_provider_id = judge_model.provider_id();
|
||||
|
||||
cx.spawn(move |cx| async move {
|
||||
// Authenticate all model providers first
|
||||
cx.update(|cx| authenticate_model_provider(model_provider_id.clone(), cx))
|
||||
.unwrap()
|
||||
.await
|
||||
.unwrap();
|
||||
cx.update(|cx| authenticate_model_provider(editor_model_provider_id.clone(), cx))
|
||||
.unwrap()
|
||||
.await
|
||||
.unwrap();
|
||||
cx.update(|cx| authenticate_model_provider(judge_model_provider_id.clone(), cx))
|
||||
.unwrap()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let loaded_evals = stream::iter(evals_to_run)
|
||||
.map(|eval_name| {
|
||||
let eval_path = evaluation_data_dir.join(&eval_name);
|
||||
let repos_dir = repos_dir.clone();
|
||||
async move {
|
||||
match Eval::load(eval_name.clone(), eval_path, &repos_dir).await {
|
||||
Ok(eval) => Some(eval),
|
||||
Err(err) => {
|
||||
// TODO: Persist errors / surface errors at the end.
|
||||
println!("Error loading {eval_name}: {err}");
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.buffer_unordered(args.concurrency)
|
||||
.collect::<Vec<_>>()
|
||||
.await
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// The evals need to be loaded and grouped by URL before concurrently running, since
|
||||
// evals that use the same remote URL will use the same working directory.
|
||||
let mut evals_grouped_by_url: Vec<Vec<Eval>> = loaded_evals
|
||||
.into_iter()
|
||||
.map(|eval| (eval.eval_setup.url.clone(), eval))
|
||||
.into_group_map()
|
||||
.into_values()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Sort groups in descending order, so that bigger groups start first.
|
||||
evals_grouped_by_url.sort_by_key(|evals| cmp::Reverse(evals.len()));
|
||||
|
||||
let results = stream::iter(evals_grouped_by_url)
|
||||
.map(|evals| {
|
||||
let model = model.clone();
|
||||
let judge_model = judge_model.clone();
|
||||
let app_state = app_state.clone();
|
||||
let cx = cx.clone();
|
||||
|
||||
async move {
|
||||
let mut results = Vec::new();
|
||||
for eval in evals {
|
||||
let name = eval.name.clone();
|
||||
println!("Starting eval named {}", name);
|
||||
let result = run_eval(
|
||||
eval,
|
||||
model.clone(),
|
||||
judge_model.clone(),
|
||||
app_state.clone(),
|
||||
cx.clone(),
|
||||
)
|
||||
.await;
|
||||
results.push((name, result));
|
||||
}
|
||||
results
|
||||
}
|
||||
})
|
||||
.buffer_unordered(args.concurrency)
|
||||
.collect::<Vec<_>>()
|
||||
.await
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Process results in order of completion
|
||||
for (eval_name, result) in results {
|
||||
match result {
|
||||
Ok((eval_output, judge_output)) => {
|
||||
println!("Generated diff for {eval_name}:\n");
|
||||
println!("{}\n", eval_output.diff);
|
||||
println!("Last message for {eval_name}:\n");
|
||||
println!("{}\n", eval_output.last_message);
|
||||
println!("Elapsed time: {:?}", eval_output.elapsed_time);
|
||||
println!(
|
||||
"Assistant response count: {}",
|
||||
eval_output.assistant_response_count
|
||||
);
|
||||
println!("Tool use counts: {:?}", eval_output.tool_use_counts);
|
||||
println!("Judge output for {eval_name}: {judge_output}");
|
||||
}
|
||||
Err(err) => {
|
||||
// TODO: Persist errors / surface errors at the end.
|
||||
println!("Error running {eval_name}: {err}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cx.update(|cx| cx.quit()).unwrap();
|
||||
})
|
||||
.detach();
|
||||
});
|
||||
|
||||
println!("Done running evals");
|
||||
}
|
||||
|
||||
async fn run_eval(
|
||||
eval: Eval,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
judge_model: Arc<dyn LanguageModel>,
|
||||
app_state: Arc<HeadlessAppState>,
|
||||
cx: AsyncApp,
|
||||
) -> anyhow::Result<(EvalOutput, String)> {
|
||||
let path = eval.path.clone();
|
||||
let judge = Judge::load(&path, judge_model).await?;
|
||||
let eval_output = cx.update(|cx| eval.run(app_state, model, cx))?.await?;
|
||||
let judge_output = cx.update(|cx| judge.run(&eval_output, cx))?.await?;
|
||||
eval_output.save_to_directory(&path, judge_output.to_string())?;
|
||||
Ok((eval_output, judge_output))
|
||||
}
|
||||
@@ -62,6 +62,7 @@ pub struct AssistantSettings {
|
||||
pub default_width: Pixels,
|
||||
pub default_height: Pixels,
|
||||
pub default_model: LanguageModelSelection,
|
||||
pub editor_model: LanguageModelSelection,
|
||||
pub inline_alternatives: Vec<LanguageModelSelection>,
|
||||
pub using_outdated_settings_version: bool,
|
||||
pub enable_experimental_live_diffs: bool,
|
||||
@@ -162,6 +163,7 @@ impl AssistantSettingsContent {
|
||||
})
|
||||
}
|
||||
}),
|
||||
editor_model: None,
|
||||
inline_alternatives: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
},
|
||||
@@ -182,6 +184,7 @@ impl AssistantSettingsContent {
|
||||
.id()
|
||||
.to_string(),
|
||||
}),
|
||||
editor_model: None,
|
||||
inline_alternatives: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
},
|
||||
@@ -310,6 +313,7 @@ impl Default for VersionedAssistantSettingsContent {
|
||||
default_width: None,
|
||||
default_height: None,
|
||||
default_model: None,
|
||||
editor_model: None,
|
||||
inline_alternatives: None,
|
||||
enable_experimental_live_diffs: None,
|
||||
})
|
||||
@@ -340,6 +344,8 @@ pub struct AssistantSettingsContentV2 {
|
||||
default_height: Option<f32>,
|
||||
/// The default model to use when creating new chats.
|
||||
default_model: Option<LanguageModelSelection>,
|
||||
/// The model to use when applying edits from the assistant.
|
||||
editor_model: Option<LanguageModelSelection>,
|
||||
/// Additional models with which to generate alternatives when performing inline assists.
|
||||
inline_alternatives: Option<Vec<LanguageModelSelection>>,
|
||||
/// Enable experimental live diffs in the assistant panel.
|
||||
@@ -470,6 +476,7 @@ impl Settings for AssistantSettings {
|
||||
value.default_height.map(Into::into),
|
||||
);
|
||||
merge(&mut settings.default_model, value.default_model);
|
||||
merge(&mut settings.editor_model, value.editor_model);
|
||||
merge(&mut settings.inline_alternatives, value.inline_alternatives);
|
||||
merge(
|
||||
&mut settings.enable_experimental_live_diffs,
|
||||
@@ -528,6 +535,10 @@ mod tests {
|
||||
provider: "test-provider".into(),
|
||||
model: "gpt-99".into(),
|
||||
}),
|
||||
editor_model: Some(LanguageModelSelection {
|
||||
provider: "test-provider".into(),
|
||||
model: "gpt-99".into(),
|
||||
}),
|
||||
inline_alternatives: None,
|
||||
enabled: None,
|
||||
button: None,
|
||||
|
||||
@@ -88,7 +88,6 @@ pub trait SlashCommand: 'static + Send + Sync {
|
||||
fn accepts_arguments(&self) -> bool {
|
||||
self.requires_argument()
|
||||
}
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
|
||||
@@ -14,9 +14,12 @@ path = "src/assistant_tool.rs"
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
collections.workspace = true
|
||||
clock.workspace = true
|
||||
derive_more.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
parking_lot.workspace = true
|
||||
project.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
workspace.workspace = true
|
||||
|
||||
@@ -4,8 +4,12 @@ mod tool_working_set;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use gpui::{App, Task, WeakEntity, Window};
|
||||
use workspace::Workspace;
|
||||
use collections::{HashMap, HashSet};
|
||||
use gpui::Context;
|
||||
use gpui::{App, Entity, SharedString, Task};
|
||||
use language::Buffer;
|
||||
use language_model::LanguageModelRequestMessage;
|
||||
use project::Project;
|
||||
|
||||
pub use crate::tool_registry::*;
|
||||
pub use crate::tool_working_set::*;
|
||||
@@ -14,6 +18,14 @@ pub fn init(cx: &mut App) {
|
||||
ToolRegistry::default_global(cx);
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone)]
|
||||
pub enum ToolSource {
|
||||
/// A native tool built-in to Zed.
|
||||
Native,
|
||||
/// A tool provided by a context server.
|
||||
ContextServer { id: SharedString },
|
||||
}
|
||||
|
||||
/// A tool that can be used by a language model.
|
||||
pub trait Tool: 'static + Send + Sync {
|
||||
/// Returns the name of the tool.
|
||||
@@ -22,6 +34,11 @@ pub trait Tool: 'static + Send + Sync {
|
||||
/// Returns the description of the tool.
|
||||
fn description(&self) -> String;
|
||||
|
||||
/// Returns the source of the tool.
|
||||
fn source(&self) -> ToolSource {
|
||||
ToolSource::Native
|
||||
}
|
||||
|
||||
/// Returns the JSON schema that describes the tool's input.
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
serde_json::Value::Object(serde_json::Map::default())
|
||||
@@ -31,8 +48,63 @@ pub trait Tool: 'static + Send + Sync {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
window: &mut Window,
|
||||
messages: &[LanguageModelRequestMessage],
|
||||
project: Entity<Project>,
|
||||
action_log: Entity<ActionLog>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<String>>;
|
||||
}
|
||||
|
||||
/// Tracks actions performed by tools in a thread
|
||||
#[derive(Debug)]
|
||||
pub struct ActionLog {
|
||||
/// Buffers that user manually added to the context, and whose content has
|
||||
/// changed since the model last saw them.
|
||||
stale_buffers_in_context: HashSet<Entity<Buffer>>,
|
||||
/// Buffers that we want to notify the model about when they change.
|
||||
tracked_buffers: HashMap<Entity<Buffer>, TrackedBuffer>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
struct TrackedBuffer {
|
||||
version: clock::Global,
|
||||
}
|
||||
|
||||
impl ActionLog {
|
||||
/// Creates a new, empty action log.
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
stale_buffers_in_context: HashSet::default(),
|
||||
tracked_buffers: HashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Track a buffer as read, so we can notify the model about user edits.
|
||||
pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
|
||||
let tracked_buffer = self.tracked_buffers.entry(buffer.clone()).or_default();
|
||||
tracked_buffer.version = buffer.read(cx).version();
|
||||
}
|
||||
|
||||
/// Mark a buffer as edited, so we can refresh it in the context
|
||||
pub fn buffer_edited(&mut self, buffers: HashSet<Entity<Buffer>>, cx: &mut Context<Self>) {
|
||||
for buffer in &buffers {
|
||||
let tracked_buffer = self.tracked_buffers.entry(buffer.clone()).or_default();
|
||||
tracked_buffer.version = buffer.read(cx).version();
|
||||
}
|
||||
|
||||
self.stale_buffers_in_context.extend(buffers);
|
||||
}
|
||||
|
||||
/// Iterate over buffers changed since last read or edited by the model
|
||||
pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
|
||||
self.tracked_buffers
|
||||
.iter()
|
||||
.filter(|(buffer, tracked)| tracked.version != buffer.read(cx).version)
|
||||
.map(|(buffer, _)| buffer)
|
||||
}
|
||||
|
||||
/// Takes and returns the set of buffers pending refresh, clearing internal state.
|
||||
pub fn take_stale_buffers_in_context(&mut self) -> HashSet<Entity<Buffer>> {
|
||||
std::mem::take(&mut self.stale_buffers_in_context)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use collections::HashMap;
|
||||
use collections::{HashMap, HashSet, IndexMap};
|
||||
use gpui::App;
|
||||
use parking_lot::Mutex;
|
||||
|
||||
use crate::{Tool, ToolRegistry};
|
||||
use crate::{Tool, ToolRegistry, ToolSource};
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash, Default)]
|
||||
pub struct ToolId(usize);
|
||||
@@ -15,13 +15,26 @@ pub struct ToolWorkingSet {
|
||||
state: Mutex<WorkingSetState>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct WorkingSetState {
|
||||
context_server_tools_by_id: HashMap<ToolId, Arc<dyn Tool>>,
|
||||
context_server_tools_by_name: HashMap<String, Arc<dyn Tool>>,
|
||||
disabled_tools_by_source: HashMap<ToolSource, HashSet<Arc<str>>>,
|
||||
is_scripting_tool_disabled: bool,
|
||||
next_tool_id: ToolId,
|
||||
}
|
||||
|
||||
impl Default for WorkingSetState {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
context_server_tools_by_id: HashMap::default(),
|
||||
context_server_tools_by_name: HashMap::default(),
|
||||
disabled_tools_by_source: HashMap::default(),
|
||||
is_scripting_tool_disabled: true,
|
||||
next_tool_id: ToolId::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToolWorkingSet {
|
||||
pub fn tool(&self, name: &str, cx: &App) -> Option<Arc<dyn Tool>> {
|
||||
self.state
|
||||
@@ -33,36 +46,99 @@ impl ToolWorkingSet {
|
||||
}
|
||||
|
||||
pub fn tools(&self, cx: &App) -> Vec<Arc<dyn Tool>> {
|
||||
let mut tools = ToolRegistry::global(cx).tools();
|
||||
tools.extend(
|
||||
self.state
|
||||
.lock()
|
||||
.context_server_tools_by_id
|
||||
.values()
|
||||
.cloned(),
|
||||
);
|
||||
|
||||
tools
|
||||
self.state.lock().tools(cx)
|
||||
}
|
||||
|
||||
pub fn insert(&self, command: Arc<dyn Tool>) -> ToolId {
|
||||
pub fn tools_by_source(&self, cx: &App) -> IndexMap<ToolSource, Vec<Arc<dyn Tool>>> {
|
||||
self.state.lock().tools_by_source(cx)
|
||||
}
|
||||
|
||||
pub fn are_all_tools_enabled(&self) -> bool {
|
||||
let state = self.state.lock();
|
||||
state.disabled_tools_by_source.is_empty() && !state.is_scripting_tool_disabled
|
||||
}
|
||||
|
||||
pub fn are_all_tools_from_source_enabled(&self, source: &ToolSource) -> bool {
|
||||
let state = self.state.lock();
|
||||
!state.disabled_tools_by_source.contains_key(source)
|
||||
}
|
||||
|
||||
pub fn enabled_tools(&self, cx: &App) -> Vec<Arc<dyn Tool>> {
|
||||
self.state.lock().enabled_tools(cx)
|
||||
}
|
||||
|
||||
pub fn enable_all_tools(&self) {
|
||||
let mut state = self.state.lock();
|
||||
let command_id = state.next_tool_id;
|
||||
state.disabled_tools_by_source.clear();
|
||||
state.enable_scripting_tool();
|
||||
}
|
||||
|
||||
pub fn disable_all_tools(&self, cx: &App) {
|
||||
let mut state = self.state.lock();
|
||||
state.disable_all_tools(cx);
|
||||
}
|
||||
|
||||
pub fn enable_source(&self, source: &ToolSource) {
|
||||
let mut state = self.state.lock();
|
||||
state.enable_source(source);
|
||||
}
|
||||
|
||||
pub fn disable_source(&self, source: ToolSource, cx: &App) {
|
||||
let mut state = self.state.lock();
|
||||
state.disable_source(source, cx);
|
||||
}
|
||||
|
||||
pub fn insert(&self, tool: Arc<dyn Tool>) -> ToolId {
|
||||
let mut state = self.state.lock();
|
||||
let tool_id = state.next_tool_id;
|
||||
state.next_tool_id.0 += 1;
|
||||
state
|
||||
.context_server_tools_by_id
|
||||
.insert(command_id, command.clone());
|
||||
.insert(tool_id, tool.clone());
|
||||
state.tools_changed();
|
||||
command_id
|
||||
tool_id
|
||||
}
|
||||
|
||||
pub fn remove(&self, command_ids_to_remove: &[ToolId]) {
|
||||
pub fn is_enabled(&self, source: &ToolSource, name: &Arc<str>) -> bool {
|
||||
self.state.lock().is_enabled(source, name)
|
||||
}
|
||||
|
||||
pub fn is_disabled(&self, source: &ToolSource, name: &Arc<str>) -> bool {
|
||||
self.state.lock().is_disabled(source, name)
|
||||
}
|
||||
|
||||
pub fn enable(&self, source: ToolSource, tools_to_enable: &[Arc<str>]) {
|
||||
let mut state = self.state.lock();
|
||||
state.enable(source, tools_to_enable);
|
||||
}
|
||||
|
||||
pub fn disable(&self, source: ToolSource, tools_to_disable: &[Arc<str>]) {
|
||||
let mut state = self.state.lock();
|
||||
state.disable(source, tools_to_disable);
|
||||
}
|
||||
|
||||
pub fn remove(&self, tool_ids_to_remove: &[ToolId]) {
|
||||
let mut state = self.state.lock();
|
||||
state
|
||||
.context_server_tools_by_id
|
||||
.retain(|id, _| !command_ids_to_remove.contains(id));
|
||||
.retain(|id, _| !tool_ids_to_remove.contains(id));
|
||||
state.tools_changed();
|
||||
}
|
||||
|
||||
pub fn is_scripting_tool_enabled(&self) -> bool {
|
||||
let state = self.state.lock();
|
||||
!state.is_scripting_tool_disabled
|
||||
}
|
||||
|
||||
pub fn enable_scripting_tool(&self) {
|
||||
let mut state = self.state.lock();
|
||||
state.enable_scripting_tool();
|
||||
}
|
||||
|
||||
pub fn disable_scripting_tool(&self) {
|
||||
let mut state = self.state.lock();
|
||||
state.disable_scripting_tool();
|
||||
}
|
||||
}
|
||||
|
||||
impl WorkingSetState {
|
||||
@@ -71,7 +147,108 @@ impl WorkingSetState {
|
||||
self.context_server_tools_by_name.extend(
|
||||
self.context_server_tools_by_id
|
||||
.values()
|
||||
.map(|command| (command.name(), command.clone())),
|
||||
.map(|tool| (tool.name(), tool.clone())),
|
||||
);
|
||||
}
|
||||
|
||||
fn tools(&self, cx: &App) -> Vec<Arc<dyn Tool>> {
|
||||
let mut tools = ToolRegistry::global(cx).tools();
|
||||
tools.extend(self.context_server_tools_by_id.values().cloned());
|
||||
|
||||
tools
|
||||
}
|
||||
|
||||
fn tools_by_source(&self, cx: &App) -> IndexMap<ToolSource, Vec<Arc<dyn Tool>>> {
|
||||
let mut tools_by_source = IndexMap::default();
|
||||
|
||||
for tool in self.tools(cx) {
|
||||
tools_by_source
|
||||
.entry(tool.source())
|
||||
.or_insert_with(Vec::new)
|
||||
.push(tool);
|
||||
}
|
||||
|
||||
for tools in tools_by_source.values_mut() {
|
||||
tools.sort_by_key(|tool| tool.name());
|
||||
}
|
||||
|
||||
tools_by_source.sort_unstable_keys();
|
||||
|
||||
tools_by_source
|
||||
}
|
||||
|
||||
fn enabled_tools(&self, cx: &App) -> Vec<Arc<dyn Tool>> {
|
||||
let all_tools = self.tools(cx);
|
||||
|
||||
all_tools
|
||||
.into_iter()
|
||||
.filter(|tool| self.is_enabled(&tool.source(), &tool.name().into()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn is_enabled(&self, source: &ToolSource, name: &Arc<str>) -> bool {
|
||||
!self.is_disabled(source, name)
|
||||
}
|
||||
|
||||
fn is_disabled(&self, source: &ToolSource, name: &Arc<str>) -> bool {
|
||||
self.disabled_tools_by_source
|
||||
.get(source)
|
||||
.map_or(false, |disabled_tools| disabled_tools.contains(name))
|
||||
}
|
||||
|
||||
fn enable(&mut self, source: ToolSource, tools_to_enable: &[Arc<str>]) {
|
||||
self.disabled_tools_by_source
|
||||
.entry(source)
|
||||
.or_default()
|
||||
.retain(|name| !tools_to_enable.contains(name));
|
||||
}
|
||||
|
||||
fn disable(&mut self, source: ToolSource, tools_to_disable: &[Arc<str>]) {
|
||||
self.disabled_tools_by_source
|
||||
.entry(source)
|
||||
.or_default()
|
||||
.extend(tools_to_disable.into_iter().cloned());
|
||||
}
|
||||
|
||||
fn enable_source(&mut self, source: &ToolSource) {
|
||||
self.disabled_tools_by_source.remove(source);
|
||||
}
|
||||
|
||||
fn disable_source(&mut self, source: ToolSource, cx: &App) {
|
||||
let tools_by_source = self.tools_by_source(cx);
|
||||
let Some(tools) = tools_by_source.get(&source) else {
|
||||
return;
|
||||
};
|
||||
|
||||
self.disabled_tools_by_source.insert(
|
||||
source,
|
||||
tools
|
||||
.into_iter()
|
||||
.map(|tool| tool.name().into())
|
||||
.collect::<HashSet<_>>(),
|
||||
);
|
||||
}
|
||||
|
||||
fn disable_all_tools(&mut self, cx: &App) {
|
||||
let tools = self.tools_by_source(cx);
|
||||
|
||||
for (source, tools) in tools {
|
||||
let tool_names = tools
|
||||
.into_iter()
|
||||
.map(|tool| tool.name().into())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
self.disable(source, &tool_names);
|
||||
}
|
||||
|
||||
self.disable_scripting_tool();
|
||||
}
|
||||
|
||||
fn enable_scripting_tool(&mut self) {
|
||||
self.is_scripting_tool_disabled = false;
|
||||
}
|
||||
|
||||
fn disable_scripting_tool(&mut self) {
|
||||
self.is_scripting_tool_disabled = true;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,9 +15,30 @@ path = "src/assistant_tools.rs"
|
||||
anyhow.workspace = true
|
||||
assistant_tool.workspace = true
|
||||
chrono.workspace = true
|
||||
collections.workspace = true
|
||||
feature_flags.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
project.workspace = true
|
||||
release_channel.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
theme.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
workspace.workspace = true
|
||||
worktree.workspace = true
|
||||
settings.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
rand.workspace = true
|
||||
collections = { workspace = true, features = ["test-support"] }
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
language = { workspace = true, features = ["test-support"] }
|
||||
pretty_assertions.workspace = true
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
unindent.workspace = true
|
||||
workspace = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -1,19 +1,41 @@
|
||||
mod list_worktrees_tool;
|
||||
mod bash_tool;
|
||||
mod delete_path_tool;
|
||||
mod diagnostics_tool;
|
||||
mod edit_files_tool;
|
||||
mod list_directory_tool;
|
||||
mod now_tool;
|
||||
mod path_search_tool;
|
||||
mod read_file_tool;
|
||||
mod regex_search;
|
||||
mod thinking_tool;
|
||||
|
||||
use assistant_tool::ToolRegistry;
|
||||
use gpui::App;
|
||||
|
||||
use crate::list_worktrees_tool::ListWorktreesTool;
|
||||
use crate::bash_tool::BashTool;
|
||||
use crate::delete_path_tool::DeletePathTool;
|
||||
use crate::diagnostics_tool::DiagnosticsTool;
|
||||
use crate::edit_files_tool::EditFilesTool;
|
||||
use crate::list_directory_tool::ListDirectoryTool;
|
||||
use crate::now_tool::NowTool;
|
||||
use crate::path_search_tool::PathSearchTool;
|
||||
use crate::read_file_tool::ReadFileTool;
|
||||
use crate::regex_search::RegexSearchTool;
|
||||
use crate::thinking_tool::ThinkingTool;
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
assistant_tool::init(cx);
|
||||
crate::edit_files_tool::log::init(cx);
|
||||
|
||||
let registry = ToolRegistry::global(cx);
|
||||
registry.register_tool(BashTool);
|
||||
registry.register_tool(DeletePathTool);
|
||||
registry.register_tool(DiagnosticsTool);
|
||||
registry.register_tool(EditFilesTool);
|
||||
registry.register_tool(ListDirectoryTool);
|
||||
registry.register_tool(NowTool);
|
||||
registry.register_tool(ListWorktreesTool);
|
||||
registry.register_tool(PathSearchTool);
|
||||
registry.register_tool(ReadFileTool);
|
||||
registry.register_tool(RegexSearchTool);
|
||||
registry.register_tool(ThinkingTool);
|
||||
}
|
||||
|
||||
82
crates/assistant_tools/src/bash_tool.rs
Normal file
@@ -0,0 +1,82 @@
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use assistant_tool::{ActionLog, Tool};
|
||||
use gpui::{App, Entity, Task};
|
||||
use language_model::LanguageModelRequestMessage;
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::Arc;
|
||||
use util::command::new_smol_command;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct BashToolInput {
|
||||
/// The bash command to execute as a one-liner.
|
||||
command: String,
|
||||
/// Working directory for the command. This must be one of the root directories of the project.
|
||||
cd: String,
|
||||
}
|
||||
|
||||
pub struct BashTool;
|
||||
|
||||
impl Tool for BashTool {
|
||||
fn name(&self) -> String {
|
||||
"bash".to_string()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./bash_tool/description.md").to_string()
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
let schema = schemars::schema_for!(BashToolInput);
|
||||
serde_json::to_value(&schema).unwrap()
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
_messages: &[LanguageModelRequestMessage],
|
||||
project: Entity<Project>,
|
||||
_action_log: Entity<ActionLog>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
let input: BashToolInput = match serde_json::from_value(input) {
|
||||
Ok(input) => input,
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))),
|
||||
};
|
||||
|
||||
let Some(worktree) = project.read(cx).worktree_for_root_name(&input.cd, cx) else {
|
||||
return Task::ready(Err(anyhow!("Working directory not found in the project")));
|
||||
};
|
||||
let working_directory = worktree.read(cx).abs_path();
|
||||
|
||||
cx.spawn(|_| async move {
|
||||
// Add 2>&1 to merge stderr into stdout for proper interleaving.
|
||||
let command = format!("({}) 2>&1", input.command);
|
||||
|
||||
let output = new_smol_command("bash")
|
||||
.arg("-c")
|
||||
.arg(&command)
|
||||
.current_dir(working_directory)
|
||||
.output()
|
||||
.await
|
||||
.context("Failed to execute bash command")?;
|
||||
|
||||
let output_string = String::from_utf8_lossy(&output.stdout).to_string();
|
||||
|
||||
if output.status.success() {
|
||||
if output_string.is_empty() {
|
||||
Ok("Command executed successfully.".to_string())
|
||||
} else {
|
||||
Ok(output_string)
|
||||
}
|
||||
} else {
|
||||
Ok(format!(
|
||||
"Command failed with exit code {}\n{}",
|
||||
output.status.code().unwrap_or(-1),
|
||||
&output_string
|
||||
))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
7
crates/assistant_tools/src/bash_tool/description.md
Normal file
@@ -0,0 +1,7 @@
|
||||
Executes a bash one-liner and returns the combined output.
|
||||
|
||||
This tool spawns a bash process, combines stdout and stderr into one interleaved stream as they are produced (preserving the order of writes), and captures that stream into a string which is returned.
|
||||
|
||||
Make sure you use the `cd` parameter to navigate to one of the root directories of the project. NEVER do it as part of the `command` itself, otherwise it will error.
|
||||
|
||||
Remember that each invocation of this tool will spawn a new bash process, so you can't rely on any state from previous invocations.
|
||||
72
crates/assistant_tools/src/delete_path_tool.rs
Normal file
@@ -0,0 +1,72 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_tool::{ActionLog, Tool};
|
||||
use gpui::{App, AppContext, Entity, Task};
|
||||
use language_model::LanguageModelRequestMessage;
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::Arc;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct DeletePathToolInput {
|
||||
/// The path of the file or directory to delete.
|
||||
///
|
||||
/// <example>
|
||||
/// If the project has the following files:
|
||||
///
|
||||
/// - directory1/a/something.txt
|
||||
/// - directory2/a/things.txt
|
||||
/// - directory3/a/other.txt
|
||||
///
|
||||
/// You can delete the first file by providing a path of "directory1/a/something.txt"
|
||||
/// </example>
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
pub struct DeletePathTool;
|
||||
|
||||
impl Tool for DeletePathTool {
|
||||
fn name(&self) -> String {
|
||||
"delete-path".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./delete_path_tool/description.md").into()
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
let schema = schemars::schema_for!(DeletePathToolInput);
|
||||
serde_json::to_value(&schema).unwrap()
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
_messages: &[LanguageModelRequestMessage],
|
||||
project: Entity<Project>,
|
||||
_action_log: Entity<ActionLog>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
let path_str = match serde_json::from_value::<DeletePathToolInput>(input) {
|
||||
Ok(input) => input.path,
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))),
|
||||
};
|
||||
|
||||
match project
|
||||
.read(cx)
|
||||
.find_project_path(&path_str, cx)
|
||||
.and_then(|path| project.update(cx, |project, cx| project.delete_file(path, false, cx)))
|
||||
{
|
||||
Some(deletion_task) => cx.background_spawn(async move {
|
||||
match deletion_task.await {
|
||||
Ok(()) => Ok(format!("Deleted {}", &path_str)),
|
||||
Err(err) => Err(anyhow!("Failed to delete {}: {}", &path_str, err)),
|
||||
}
|
||||
}),
|
||||
None => Task::ready(Err(anyhow!(
|
||||
"Couldn't delete {} because that path isn't in this project.",
|
||||
path_str
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
Deletes the file or directory (and the directory's contents, recursively) at the specified path in the project, and returns confirmation of the deletion.
|
||||
128
crates/assistant_tools/src/diagnostics_tool.rs
Normal file
@@ -0,0 +1,128 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_tool::{ActionLog, Tool};
|
||||
use gpui::{App, Entity, Task};
|
||||
use language::{DiagnosticSeverity, OffsetRangeExt};
|
||||
use language_model::LanguageModelRequestMessage;
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
fmt::Write,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct DiagnosticsToolInput {
|
||||
/// The path to get diagnostics for. If not provided, returns a project-wide summary.
|
||||
///
|
||||
/// This path should never be absolute, and the first component
|
||||
/// of the path should always be a root directory in a project.
|
||||
///
|
||||
/// <example>
|
||||
/// If the project has the following root directories:
|
||||
///
|
||||
/// - lorem
|
||||
/// - ipsum
|
||||
///
|
||||
/// If you wanna access diagnostics for `dolor.txt` in `ipsum`, you should use the path `ipsum/dolor.txt`.
|
||||
/// </example>
|
||||
pub path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
pub struct DiagnosticsTool;
|
||||
|
||||
impl Tool for DiagnosticsTool {
|
||||
fn name(&self) -> String {
|
||||
"diagnostics".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./diagnostics_tool/description.md").into()
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
let schema = schemars::schema_for!(DiagnosticsToolInput);
|
||||
serde_json::to_value(&schema).unwrap()
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
_messages: &[LanguageModelRequestMessage],
|
||||
project: Entity<Project>,
|
||||
_action_log: Entity<ActionLog>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
let input = match serde_json::from_value::<DiagnosticsToolInput>(input) {
|
||||
Ok(input) => input,
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))),
|
||||
};
|
||||
|
||||
if let Some(path) = input.path {
|
||||
let Some(project_path) = project.read(cx).find_project_path(&path, cx) else {
|
||||
return Task::ready(Err(anyhow!("Could not find path in project")));
|
||||
};
|
||||
let buffer = project.update(cx, |project, cx| project.open_buffer(project_path, cx));
|
||||
|
||||
cx.spawn(|cx| async move {
|
||||
let mut output = String::new();
|
||||
let buffer = buffer.await?;
|
||||
let snapshot = buffer.read_with(&cx, |buffer, _cx| buffer.snapshot())?;
|
||||
|
||||
for (_, group) in snapshot.diagnostic_groups(None) {
|
||||
let entry = &group.entries[group.primary_ix];
|
||||
let range = entry.range.to_point(&snapshot);
|
||||
let severity = match entry.diagnostic.severity {
|
||||
DiagnosticSeverity::ERROR => "error",
|
||||
DiagnosticSeverity::WARNING => "warning",
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
writeln!(
|
||||
output,
|
||||
"{} at line {}: {}",
|
||||
severity,
|
||||
range.start.row + 1,
|
||||
entry.diagnostic.message
|
||||
)?;
|
||||
}
|
||||
|
||||
if output.is_empty() {
|
||||
Ok("File doesn't have errors or warnings!".to_string())
|
||||
} else {
|
||||
Ok(output)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
let project = project.read(cx);
|
||||
let mut output = String::new();
|
||||
let mut has_diagnostics = false;
|
||||
|
||||
for (project_path, _, summary) in project.diagnostic_summaries(true, cx) {
|
||||
if summary.error_count > 0 || summary.warning_count > 0 {
|
||||
let Some(worktree) = project.worktree_for_id(project_path.worktree_id, cx)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
has_diagnostics = true;
|
||||
output.push_str(&format!(
|
||||
"{}: {} error(s), {} warning(s)\n",
|
||||
Path::new(worktree.read(cx).root_name())
|
||||
.join(project_path.path)
|
||||
.display(),
|
||||
summary.error_count,
|
||||
summary.warning_count
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
if has_diagnostics {
|
||||
Task::ready(Ok(output))
|
||||
} else {
|
||||
Task::ready(Ok("No errors or warnings found in the project.".to_string()))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
16
crates/assistant_tools/src/diagnostics_tool/description.md
Normal file
@@ -0,0 +1,16 @@
|
||||
Get errors and warnings for the project or a specific file.
|
||||
|
||||
This tool can be invoked after a series of edits to determine if further edits are necessary, or if the user asks to fix errors or warnings in their codebase.
|
||||
|
||||
When a path is provided, shows all diagnostics for that specific file.
|
||||
When no path is provided, shows a summary of error and warning counts for all files in the project.
|
||||
|
||||
<example>
|
||||
To get diagnostics for a specific file:
|
||||
{
|
||||
"path": "src/main.rs"
|
||||
}
|
||||
|
||||
To get a project-wide diagnostic summary:
|
||||
{}
|
||||
</example>
|
||||
360
crates/assistant_tools/src/edit_files_tool.rs
Normal file
@@ -0,0 +1,360 @@
|
||||
mod edit_action;
|
||||
pub mod log;
|
||||
mod resolve_search_block;
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use assistant_tool::{ActionLog, Tool};
|
||||
use collections::HashSet;
|
||||
use edit_action::{EditAction, EditActionParser};
|
||||
use futures::StreamExt;
|
||||
use gpui::{App, AsyncApp, Entity, Task};
|
||||
use language::OffsetRangeExt;
|
||||
use language_model::{
|
||||
LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage, MessageContent, Role,
|
||||
};
|
||||
use log::{EditToolLog, EditToolRequestId};
|
||||
use project::Project;
|
||||
use resolve_search_block::resolve_search_block;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::Write;
|
||||
use std::sync::Arc;
|
||||
use util::ResultExt;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct EditFilesToolInput {
|
||||
/// High-level edit instructions. These will be interpreted by a smaller
|
||||
/// model, so explain the changes you want that model to make and which
|
||||
/// file paths need changing.
|
||||
///
|
||||
/// The description should be concise and clear. We will show this
|
||||
/// description to the user as well.
|
||||
///
|
||||
/// WARNING: When specifying which file paths need changing, you MUST
|
||||
/// start each path with one of the project's root directories.
|
||||
///
|
||||
/// WARNING: NEVER include code blocks or snippets in edit instructions.
|
||||
/// Only provide natural language descriptions of the changes needed! The tool will
|
||||
/// reject any instructions that contain code blocks or snippets.
|
||||
///
|
||||
/// The following examples assume we have two root directories in the project:
|
||||
/// - root-1
|
||||
/// - root-2
|
||||
///
|
||||
/// <example>
|
||||
/// If you want to introduce a new quit function to kill the process, your
|
||||
/// instructions should be: "Add a new `quit` function to
|
||||
/// `root-1/src/main.rs` to kill the process".
|
||||
///
|
||||
/// Notice how the file path starts with root-1. Without that, the path
|
||||
/// would be ambiguous and the call would fail!
|
||||
/// </example>
|
||||
///
|
||||
/// <example>
|
||||
/// If you want to change documentation to always start with a capital
|
||||
/// letter, your instructions should be: "In `root-2/db.js`,
|
||||
/// `root-2/inMemory.js` and `root-2/sql.js`, change all the documentation
|
||||
/// to start with a capital letter".
|
||||
///
|
||||
/// Notice how we never specify code snippets in the instructions!
|
||||
/// </example>
|
||||
pub edit_instructions: String,
|
||||
}
|
||||
|
||||
pub struct EditFilesTool;
|
||||
|
||||
impl Tool for EditFilesTool {
|
||||
fn name(&self) -> String {
|
||||
"edit-files".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./edit_files_tool/description.md").into()
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
let schema = schemars::schema_for!(EditFilesToolInput);
|
||||
serde_json::to_value(&schema).unwrap()
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
messages: &[LanguageModelRequestMessage],
|
||||
project: Entity<Project>,
|
||||
action_log: Entity<ActionLog>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
let input = match serde_json::from_value::<EditFilesToolInput>(input) {
|
||||
Ok(input) => input,
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))),
|
||||
};
|
||||
|
||||
match EditToolLog::try_global(cx) {
|
||||
Some(log) => {
|
||||
let req_id = log.update(cx, |log, cx| {
|
||||
log.new_request(input.edit_instructions.clone(), cx)
|
||||
});
|
||||
|
||||
let task = EditToolRequest::new(
|
||||
input,
|
||||
messages,
|
||||
project,
|
||||
action_log,
|
||||
Some((log.clone(), req_id)),
|
||||
cx,
|
||||
);
|
||||
|
||||
cx.spawn(|mut cx| async move {
|
||||
let result = task.await;
|
||||
|
||||
let str_result = match &result {
|
||||
Ok(out) => Ok(out.clone()),
|
||||
Err(err) => Err(err.to_string()),
|
||||
};
|
||||
|
||||
log.update(&mut cx, |log, cx| {
|
||||
log.set_tool_output(req_id, str_result, cx)
|
||||
})
|
||||
.log_err();
|
||||
|
||||
result
|
||||
})
|
||||
}
|
||||
|
||||
None => EditToolRequest::new(input, messages, project, action_log, None, cx),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct EditToolRequest {
|
||||
parser: EditActionParser,
|
||||
output: String,
|
||||
changed_buffers: HashSet<Entity<language::Buffer>>,
|
||||
project: Entity<Project>,
|
||||
action_log: Entity<ActionLog>,
|
||||
tool_log: Option<(Entity<EditToolLog>, EditToolRequestId)>,
|
||||
}
|
||||
|
||||
impl EditToolRequest {
|
||||
fn new(
|
||||
input: EditFilesToolInput,
|
||||
messages: &[LanguageModelRequestMessage],
|
||||
project: Entity<Project>,
|
||||
action_log: Entity<ActionLog>,
|
||||
tool_log: Option<(Entity<EditToolLog>, EditToolRequestId)>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let Some(model) = model_registry.editor_model() else {
|
||||
return Task::ready(Err(anyhow!("No editor model configured")));
|
||||
};
|
||||
|
||||
let mut messages = messages.to_vec();
|
||||
// Remove the last tool use (this run) to prevent an invalid request
|
||||
'outer: for message in messages.iter_mut().rev() {
|
||||
for (index, content) in message.content.iter().enumerate().rev() {
|
||||
match content {
|
||||
MessageContent::ToolUse(_) => {
|
||||
message.content.remove(index);
|
||||
break 'outer;
|
||||
}
|
||||
MessageContent::ToolResult(_) => {
|
||||
// If we find any tool results before a tool use, the request is already valid
|
||||
break 'outer;
|
||||
}
|
||||
MessageContent::Text(_) | MessageContent::Image(_) => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
messages.push(LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![
|
||||
include_str!("./edit_files_tool/edit_prompt.md").into(),
|
||||
input.edit_instructions.into(),
|
||||
],
|
||||
cache: false,
|
||||
});
|
||||
|
||||
cx.spawn(|mut cx| async move {
|
||||
let llm_request = LanguageModelRequest {
|
||||
messages,
|
||||
tools: vec![],
|
||||
stop: vec![],
|
||||
temperature: Some(0.0),
|
||||
};
|
||||
|
||||
let stream = model.stream_completion_text(llm_request, &cx);
|
||||
let mut chunks = stream.await?;
|
||||
|
||||
let mut request = Self {
|
||||
parser: EditActionParser::new(),
|
||||
// we start with the success header so we don't need to shift the output in the common case
|
||||
output: Self::SUCCESS_OUTPUT_HEADER.to_string(),
|
||||
changed_buffers: HashSet::default(),
|
||||
action_log,
|
||||
project,
|
||||
tool_log,
|
||||
};
|
||||
|
||||
while let Some(chunk) = chunks.stream.next().await {
|
||||
request.process_response_chunk(&chunk?, &mut cx).await?;
|
||||
}
|
||||
|
||||
request.finalize(&mut cx).await
|
||||
})
|
||||
}
|
||||
|
||||
async fn process_response_chunk(&mut self, chunk: &str, cx: &mut AsyncApp) -> Result<()> {
|
||||
let new_actions = self.parser.parse_chunk(chunk);
|
||||
|
||||
if let Some((ref log, req_id)) = self.tool_log {
|
||||
log.update(cx, |log, cx| {
|
||||
log.push_editor_response_chunk(req_id, chunk, &new_actions, cx)
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
|
||||
for action in new_actions {
|
||||
self.apply_action(action, cx).await?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn apply_action(
|
||||
&mut self,
|
||||
(action, source): (EditAction, String),
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<()> {
|
||||
let project_path = self.project.read_with(cx, |project, cx| {
|
||||
project
|
||||
.find_project_path(action.file_path(), cx)
|
||||
.context("Path not found in project")
|
||||
})??;
|
||||
|
||||
let buffer = self
|
||||
.project
|
||||
.update(cx, |project, cx| project.open_buffer(project_path, cx))?
|
||||
.await?;
|
||||
|
||||
let diff = match action {
|
||||
EditAction::Replace {
|
||||
old,
|
||||
new,
|
||||
file_path: _,
|
||||
} => {
|
||||
let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
|
||||
|
||||
let diff = cx
|
||||
.background_executor()
|
||||
.spawn(Self::replace_diff(old, new, snapshot))
|
||||
.await;
|
||||
|
||||
anyhow::Ok(diff)
|
||||
}
|
||||
EditAction::Write { content, .. } => Ok(buffer
|
||||
.read_with(cx, |buffer, cx| buffer.diff(content, cx))?
|
||||
.await),
|
||||
}?;
|
||||
|
||||
let _clock = buffer.update(cx, |buffer, cx| buffer.apply_diff(diff, cx))?;
|
||||
|
||||
write!(&mut self.output, "\n\n{}", source)?;
|
||||
self.changed_buffers.insert(buffer);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn replace_diff(
|
||||
old: String,
|
||||
new: String,
|
||||
snapshot: language::BufferSnapshot,
|
||||
) -> language::Diff {
|
||||
let edit_range = resolve_search_block(&snapshot, &old).to_offset(&snapshot);
|
||||
let diff = language::text_diff(&old, &new);
|
||||
|
||||
let edits = diff
|
||||
.into_iter()
|
||||
.map(|(old_range, text)| {
|
||||
let start = edit_range.start + old_range.start;
|
||||
let end = edit_range.start + old_range.end;
|
||||
(start..end, text)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let diff = language::Diff {
|
||||
base_version: snapshot.version().clone(),
|
||||
line_ending: snapshot.line_ending(),
|
||||
edits,
|
||||
};
|
||||
|
||||
diff
|
||||
}
|
||||
|
||||
const SUCCESS_OUTPUT_HEADER: &str = "Successfully applied. Here's a list of changes:";
|
||||
const ERROR_OUTPUT_HEADER_NO_EDITS: &str = "I couldn't apply any edits!";
|
||||
const ERROR_OUTPUT_HEADER_WITH_EDITS: &str =
|
||||
"Errors occurred. First, here's a list of the edits we managed to apply:";
|
||||
|
||||
async fn finalize(self, cx: &mut AsyncApp) -> Result<String> {
|
||||
let changed_buffer_count = self.changed_buffers.len();
|
||||
|
||||
// Save each buffer once at the end
|
||||
for buffer in &self.changed_buffers {
|
||||
self.project
|
||||
.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))?
|
||||
.await?;
|
||||
}
|
||||
|
||||
self.action_log
|
||||
.update(cx, |log, cx| log.buffer_edited(self.changed_buffers, cx))
|
||||
.log_err();
|
||||
|
||||
let errors = self.parser.errors();
|
||||
|
||||
if errors.is_empty() {
|
||||
if changed_buffer_count == 0 {
|
||||
return Err(anyhow!(
|
||||
"The instructions didn't lead to any changes. You might need to consult the file contents first."
|
||||
));
|
||||
}
|
||||
|
||||
Ok(self.output)
|
||||
} else {
|
||||
let mut output = self.output;
|
||||
|
||||
if output.is_empty() {
|
||||
output.replace_range(
|
||||
0..Self::SUCCESS_OUTPUT_HEADER.len(),
|
||||
Self::ERROR_OUTPUT_HEADER_NO_EDITS,
|
||||
);
|
||||
} else {
|
||||
output.replace_range(
|
||||
0..Self::SUCCESS_OUTPUT_HEADER.len(),
|
||||
Self::ERROR_OUTPUT_HEADER_WITH_EDITS,
|
||||
);
|
||||
}
|
||||
|
||||
if !errors.is_empty() {
|
||||
writeln!(
|
||||
&mut output,
|
||||
"\n\nThese SEARCH/REPLACE blocks failed to parse:"
|
||||
)?;
|
||||
|
||||
for error in errors {
|
||||
writeln!(&mut output, "- {}", error)?;
|
||||
}
|
||||
}
|
||||
|
||||
writeln!(
|
||||
&mut output,
|
||||
"\nYou can fix errors by running the tool again. You can include instructions, \
|
||||
but errors are part of the conversation so you don't need to repeat them."
|
||||
)?;
|
||||
|
||||
Err(anyhow!(output))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,5 @@
|
||||
Edit files in the current project by specifying instructions in natural language.
|
||||
|
||||
When using this tool, you should suggest one coherent edit that can be made to the codebase.
|
||||
|
||||
When the set of edits you want to make is large or complex, feel free to invoke this tool multiple times, each time focusing on a specific change you wanna make.
|
||||
931
crates/assistant_tools/src/edit_files_tool/edit_action.rs
Normal file
@@ -0,0 +1,931 @@
|
||||
use std::{
|
||||
mem::take,
|
||||
ops::Range,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use util::ResultExt;
|
||||
|
||||
/// Represents an edit action to be performed on a file.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum EditAction {
|
||||
/// Replace specific content in a file with new content
|
||||
Replace {
|
||||
file_path: PathBuf,
|
||||
old: String,
|
||||
new: String,
|
||||
},
|
||||
/// Write content to a file (create or overwrite)
|
||||
Write { file_path: PathBuf, content: String },
|
||||
}
|
||||
|
||||
impl EditAction {
|
||||
pub fn file_path(&self) -> &Path {
|
||||
match self {
|
||||
EditAction::Replace { file_path, .. } => file_path,
|
||||
EditAction::Write { file_path, .. } => file_path,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Parses edit actions from an LLM response.
|
||||
/// See system.md for more details on the format.
|
||||
#[derive(Debug)]
|
||||
pub struct EditActionParser {
|
||||
state: State,
|
||||
line: usize,
|
||||
column: usize,
|
||||
marker_ix: usize,
|
||||
action_source: Vec<u8>,
|
||||
fence_start_offset: usize,
|
||||
block_range: Range<usize>,
|
||||
old_range: Range<usize>,
|
||||
new_range: Range<usize>,
|
||||
errors: Vec<ParseError>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
enum State {
|
||||
/// Anywhere outside an action
|
||||
Default,
|
||||
/// After opening ```, in optional language tag
|
||||
OpenFence,
|
||||
/// In SEARCH marker
|
||||
SearchMarker,
|
||||
/// In search block or divider
|
||||
SearchBlock,
|
||||
/// In replace block or REPLACE marker
|
||||
ReplaceBlock,
|
||||
/// In closing ```
|
||||
CloseFence,
|
||||
}
|
||||
|
||||
impl EditActionParser {
|
||||
/// Creates a new `EditActionParser`
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
state: State::Default,
|
||||
line: 1,
|
||||
column: 0,
|
||||
action_source: Vec::new(),
|
||||
fence_start_offset: 0,
|
||||
marker_ix: 0,
|
||||
block_range: Range::default(),
|
||||
old_range: Range::default(),
|
||||
new_range: Range::default(),
|
||||
errors: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Processes a chunk of input text and returns any completed edit actions.
|
||||
///
|
||||
/// This method can be called repeatedly with fragments of input. The parser
|
||||
/// maintains its state between calls, allowing you to process streaming input
|
||||
/// as it becomes available. Actions are only inserted once they are fully parsed.
|
||||
///
|
||||
/// If a block fails to parse, it will simply be skipped and an error will be recorded.
|
||||
/// All errors can be accessed through the `EditActionsParser::errors` method.
|
||||
pub fn parse_chunk(&mut self, input: &str) -> Vec<(EditAction, String)> {
|
||||
use State::*;
|
||||
|
||||
const FENCE: &[u8] = b"```";
|
||||
const SEARCH_MARKER: &[u8] = b"<<<<<<< SEARCH";
|
||||
const DIVIDER: &[u8] = b"=======";
|
||||
const NL_DIVIDER: &[u8] = b"\n=======";
|
||||
const REPLACE_MARKER: &[u8] = b">>>>>>> REPLACE";
|
||||
const NL_REPLACE_MARKER: &[u8] = b"\n>>>>>>> REPLACE";
|
||||
|
||||
let mut actions = Vec::new();
|
||||
|
||||
for byte in input.bytes() {
|
||||
// Update line and column tracking
|
||||
if byte == b'\n' {
|
||||
self.line += 1;
|
||||
self.column = 0;
|
||||
} else {
|
||||
self.column += 1;
|
||||
}
|
||||
|
||||
let action_offset = self.action_source.len();
|
||||
|
||||
match &self.state {
|
||||
Default => match self.match_marker(byte, FENCE, false) {
|
||||
MarkerMatch::Complete => {
|
||||
self.fence_start_offset = action_offset + 1 - FENCE.len();
|
||||
self.to_state(OpenFence);
|
||||
}
|
||||
MarkerMatch::Partial => {}
|
||||
MarkerMatch::None => {
|
||||
if self.marker_ix > 0 {
|
||||
self.marker_ix = 0;
|
||||
} else if self.action_source.ends_with(b"\n") {
|
||||
self.action_source.clear();
|
||||
}
|
||||
}
|
||||
},
|
||||
OpenFence => {
|
||||
// skip language tag
|
||||
if byte == b'\n' {
|
||||
self.to_state(SearchMarker);
|
||||
}
|
||||
}
|
||||
SearchMarker => {
|
||||
if self.expect_marker(byte, SEARCH_MARKER, true) {
|
||||
self.to_state(SearchBlock);
|
||||
}
|
||||
}
|
||||
SearchBlock => {
|
||||
if self.extend_block_range(byte, DIVIDER, NL_DIVIDER) {
|
||||
self.old_range = take(&mut self.block_range);
|
||||
self.to_state(ReplaceBlock);
|
||||
}
|
||||
}
|
||||
ReplaceBlock => {
|
||||
if self.extend_block_range(byte, REPLACE_MARKER, NL_REPLACE_MARKER) {
|
||||
self.new_range = take(&mut self.block_range);
|
||||
self.to_state(CloseFence);
|
||||
}
|
||||
}
|
||||
CloseFence => {
|
||||
if self.expect_marker(byte, FENCE, false) {
|
||||
self.action_source.push(byte);
|
||||
|
||||
if let Some(action) = self.action() {
|
||||
actions.push(action);
|
||||
}
|
||||
|
||||
self.errors();
|
||||
self.reset();
|
||||
|
||||
continue;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
self.action_source.push(byte);
|
||||
}
|
||||
|
||||
actions
|
||||
}
|
||||
|
||||
/// Returns a reference to the errors encountered during parsing.
|
||||
pub fn errors(&self) -> &[ParseError] {
|
||||
&self.errors
|
||||
}
|
||||
|
||||
fn action(&mut self) -> Option<(EditAction, String)> {
|
||||
let old_range = take(&mut self.old_range);
|
||||
let new_range = take(&mut self.new_range);
|
||||
|
||||
let action_source = take(&mut self.action_source);
|
||||
let action_source = String::from_utf8(action_source).log_err()?;
|
||||
|
||||
let mut file_path_bytes = action_source[..self.fence_start_offset].to_owned();
|
||||
|
||||
if file_path_bytes.ends_with("\n") {
|
||||
file_path_bytes.pop();
|
||||
if file_path_bytes.ends_with("\r") {
|
||||
file_path_bytes.pop();
|
||||
}
|
||||
}
|
||||
|
||||
let file_path = PathBuf::from(file_path_bytes);
|
||||
|
||||
if old_range.is_empty() && new_range.is_empty() {
|
||||
self.push_error(ParseErrorKind::NoOp);
|
||||
return None;
|
||||
}
|
||||
|
||||
if old_range.is_empty() {
|
||||
return Some((
|
||||
EditAction::Write {
|
||||
file_path,
|
||||
content: action_source[new_range].to_owned(),
|
||||
},
|
||||
action_source,
|
||||
));
|
||||
}
|
||||
|
||||
let old = action_source[old_range].to_owned();
|
||||
let new = action_source[new_range].to_owned();
|
||||
|
||||
let action = EditAction::Replace {
|
||||
file_path,
|
||||
old,
|
||||
new,
|
||||
};
|
||||
|
||||
Some((action, action_source))
|
||||
}
|
||||
|
||||
fn to_state(&mut self, state: State) {
|
||||
self.state = state;
|
||||
self.marker_ix = 0;
|
||||
}
|
||||
|
||||
fn reset(&mut self) {
|
||||
self.action_source.clear();
|
||||
self.block_range = Range::default();
|
||||
self.old_range = Range::default();
|
||||
self.new_range = Range::default();
|
||||
self.fence_start_offset = 0;
|
||||
self.marker_ix = 0;
|
||||
self.to_state(State::Default);
|
||||
}
|
||||
|
||||
fn push_error(&mut self, kind: ParseErrorKind) {
|
||||
self.errors.push(ParseError {
|
||||
line: self.line,
|
||||
column: self.column,
|
||||
kind,
|
||||
});
|
||||
}
|
||||
|
||||
fn expect_marker(&mut self, byte: u8, marker: &'static [u8], trailing_newline: bool) -> bool {
|
||||
match self.match_marker(byte, marker, trailing_newline) {
|
||||
MarkerMatch::Complete => true,
|
||||
MarkerMatch::Partial => false,
|
||||
MarkerMatch::None => {
|
||||
self.push_error(ParseErrorKind::ExpectedMarker {
|
||||
expected: marker,
|
||||
found: byte,
|
||||
});
|
||||
self.reset();
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn extend_block_range(&mut self, byte: u8, marker: &[u8], nl_marker: &[u8]) -> bool {
|
||||
let marker = if self.block_range.is_empty() {
|
||||
// do not require another newline if block is empty
|
||||
marker
|
||||
} else {
|
||||
nl_marker
|
||||
};
|
||||
|
||||
let offset = self.action_source.len();
|
||||
|
||||
match self.match_marker(byte, marker, true) {
|
||||
MarkerMatch::Complete => {
|
||||
if self.action_source[self.block_range.clone()].ends_with(b"\r") {
|
||||
self.block_range.end -= 1;
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
MarkerMatch::Partial => false,
|
||||
MarkerMatch::None => {
|
||||
if self.marker_ix > 0 {
|
||||
self.marker_ix = 0;
|
||||
self.block_range.end = offset;
|
||||
|
||||
// The beginning of marker might match current byte
|
||||
match self.match_marker(byte, marker, true) {
|
||||
MarkerMatch::Complete => return true,
|
||||
MarkerMatch::Partial => return false,
|
||||
MarkerMatch::None => { /* no match, keep collecting */ }
|
||||
}
|
||||
}
|
||||
|
||||
if self.block_range.is_empty() {
|
||||
self.block_range.start = offset;
|
||||
}
|
||||
self.block_range.end = offset + 1;
|
||||
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn match_marker(&mut self, byte: u8, marker: &[u8], trailing_newline: bool) -> MarkerMatch {
|
||||
if trailing_newline && self.marker_ix >= marker.len() {
|
||||
if byte == b'\n' {
|
||||
MarkerMatch::Complete
|
||||
} else if byte == b'\r' {
|
||||
MarkerMatch::Partial
|
||||
} else {
|
||||
MarkerMatch::None
|
||||
}
|
||||
} else if byte == marker[self.marker_ix] {
|
||||
self.marker_ix += 1;
|
||||
|
||||
if self.marker_ix < marker.len() || trailing_newline {
|
||||
MarkerMatch::Partial
|
||||
} else {
|
||||
MarkerMatch::Complete
|
||||
}
|
||||
} else {
|
||||
MarkerMatch::None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum MarkerMatch {
|
||||
None,
|
||||
Partial,
|
||||
Complete,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct ParseError {
|
||||
line: usize,
|
||||
column: usize,
|
||||
kind: ParseErrorKind,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub enum ParseErrorKind {
|
||||
ExpectedMarker { expected: &'static [u8], found: u8 },
|
||||
NoOp,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ParseErrorKind {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
ParseErrorKind::ExpectedMarker { expected, found } => {
|
||||
write!(
|
||||
f,
|
||||
"Expected marker {:?}, found {:?}",
|
||||
String::from_utf8_lossy(expected),
|
||||
*found as char
|
||||
)
|
||||
}
|
||||
ParseErrorKind::NoOp => {
|
||||
write!(f, "No search or replace")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for ParseError {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "input:{}:{}: {}", self.line, self.column, self.kind)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use rand::prelude::*;
|
||||
use util::line_endings;
|
||||
|
||||
#[test]
|
||||
fn test_simple_edit_action() {
|
||||
let input = r#"src/main.rs
|
||||
```
|
||||
<<<<<<< SEARCH
|
||||
fn original() {}
|
||||
=======
|
||||
fn replacement() {}
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(input);
|
||||
|
||||
assert_no_errors(&parser);
|
||||
assert_eq!(actions.len(), 1);
|
||||
assert_eq!(
|
||||
actions[0].0,
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("src/main.rs"),
|
||||
old: "fn original() {}".to_string(),
|
||||
new: "fn replacement() {}".to_string(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_with_language_tag() {
|
||||
let input = r#"src/main.rs
|
||||
```rust
|
||||
<<<<<<< SEARCH
|
||||
fn original() {}
|
||||
=======
|
||||
fn replacement() {}
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(input);
|
||||
|
||||
assert_no_errors(&parser);
|
||||
assert_eq!(actions.len(), 1);
|
||||
assert_eq!(
|
||||
actions[0].0,
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("src/main.rs"),
|
||||
old: "fn original() {}".to_string(),
|
||||
new: "fn replacement() {}".to_string(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_with_surrounding_text() {
|
||||
let input = r#"Here's a modification I'd like to make to the file:
|
||||
|
||||
src/main.rs
|
||||
```rust
|
||||
<<<<<<< SEARCH
|
||||
fn original() {}
|
||||
=======
|
||||
fn replacement() {}
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
|
||||
This change makes the function better.
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(input);
|
||||
|
||||
assert_no_errors(&parser);
|
||||
assert_eq!(actions.len(), 1);
|
||||
assert_eq!(
|
||||
actions[0].0,
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("src/main.rs"),
|
||||
old: "fn original() {}".to_string(),
|
||||
new: "fn replacement() {}".to_string(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multiple_edit_actions() {
|
||||
let input = r#"First change:
|
||||
src/main.rs
|
||||
```
|
||||
<<<<<<< SEARCH
|
||||
fn original() {}
|
||||
=======
|
||||
fn replacement() {}
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
|
||||
Second change:
|
||||
src/utils.rs
|
||||
```rust
|
||||
<<<<<<< SEARCH
|
||||
fn old_util() -> bool { false }
|
||||
=======
|
||||
fn new_util() -> bool { true }
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(input);
|
||||
|
||||
assert_no_errors(&parser);
|
||||
assert_eq!(actions.len(), 2);
|
||||
|
||||
let (action, _) = &actions[0];
|
||||
assert_eq!(
|
||||
action,
|
||||
&EditAction::Replace {
|
||||
file_path: PathBuf::from("src/main.rs"),
|
||||
old: "fn original() {}".to_string(),
|
||||
new: "fn replacement() {}".to_string(),
|
||||
}
|
||||
);
|
||||
let (action2, _) = &actions[1];
|
||||
assert_eq!(
|
||||
action2,
|
||||
&EditAction::Replace {
|
||||
file_path: PathBuf::from("src/utils.rs"),
|
||||
old: "fn old_util() -> bool { false }".to_string(),
|
||||
new: "fn new_util() -> bool { true }".to_string(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_multiline() {
|
||||
let input = r#"src/main.rs
|
||||
```rust
|
||||
<<<<<<< SEARCH
|
||||
fn original() {
|
||||
println!("This is the original function");
|
||||
let x = 42;
|
||||
if x > 0 {
|
||||
println!("Positive number");
|
||||
}
|
||||
}
|
||||
=======
|
||||
fn replacement() {
|
||||
println!("This is the replacement function");
|
||||
let x = 100;
|
||||
if x > 50 {
|
||||
println!("Large number");
|
||||
} else {
|
||||
println!("Small number");
|
||||
}
|
||||
}
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(input);
|
||||
|
||||
assert_no_errors(&parser);
|
||||
assert_eq!(actions.len(), 1);
|
||||
|
||||
let (action, _) = &actions[0];
|
||||
assert_eq!(
|
||||
action,
|
||||
&EditAction::Replace {
|
||||
file_path: PathBuf::from("src/main.rs"),
|
||||
old: "fn original() {\n println!(\"This is the original function\");\n let x = 42;\n if x > 0 {\n println!(\"Positive number\");\n }\n}".to_string(),
|
||||
new: "fn replacement() {\n println!(\"This is the replacement function\");\n let x = 100;\n if x > 50 {\n println!(\"Large number\");\n } else {\n println!(\"Small number\");\n }\n}".to_string(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_write_action() {
|
||||
let input = r#"Create a new main.rs file:
|
||||
|
||||
src/main.rs
|
||||
```rust
|
||||
<<<<<<< SEARCH
|
||||
=======
|
||||
fn new_function() {
|
||||
println!("This function is being added");
|
||||
}
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(input);
|
||||
|
||||
assert_no_errors(&parser);
|
||||
assert_eq!(actions.len(), 1);
|
||||
assert_eq!(
|
||||
actions[0].0,
|
||||
EditAction::Write {
|
||||
file_path: PathBuf::from("src/main.rs"),
|
||||
content: "fn new_function() {\n println!(\"This function is being added\");\n}"
|
||||
.to_string(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_replace() {
|
||||
let input = r#"src/main.rs
|
||||
```rust
|
||||
<<<<<<< SEARCH
|
||||
fn this_will_be_deleted() {
|
||||
println!("Deleting this function");
|
||||
}
|
||||
=======
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(&input);
|
||||
|
||||
assert_no_errors(&parser);
|
||||
assert_eq!(actions.len(), 1);
|
||||
assert_eq!(
|
||||
actions[0].0,
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("src/main.rs"),
|
||||
old: "fn this_will_be_deleted() {\n println!(\"Deleting this function\");\n}"
|
||||
.to_string(),
|
||||
new: "".to_string(),
|
||||
}
|
||||
);
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(&input.replace("\n", "\r\n"));
|
||||
assert_no_errors(&parser);
|
||||
assert_eq!(actions.len(), 1);
|
||||
assert_eq!(
|
||||
actions[0].0,
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("src/main.rs"),
|
||||
old:
|
||||
"fn this_will_be_deleted() {\r\n println!(\"Deleting this function\");\r\n}"
|
||||
.to_string(),
|
||||
new: "".to_string(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_both() {
|
||||
let input = r#"src/main.rs
|
||||
```rust
|
||||
<<<<<<< SEARCH
|
||||
=======
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(input);
|
||||
|
||||
// Should not create an action when both sections are empty
|
||||
assert_eq!(actions.len(), 0);
|
||||
|
||||
// Check that the NoOp error was added
|
||||
assert_eq!(parser.errors().len(), 1);
|
||||
match parser.errors()[0].kind {
|
||||
ParseErrorKind::NoOp => {}
|
||||
_ => panic!("Expected NoOp error"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_resumability() {
|
||||
let input_part1 = r#"src/main.rs
|
||||
```rust
|
||||
<<<<<<< SEARCH
|
||||
fn ori"#;
|
||||
|
||||
let input_part2 = r#"ginal() {}
|
||||
=======
|
||||
fn replacement() {}"#;
|
||||
|
||||
let input_part3 = r#"
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions1 = parser.parse_chunk(input_part1);
|
||||
assert_no_errors(&parser);
|
||||
assert_eq!(actions1.len(), 0);
|
||||
|
||||
let actions2 = parser.parse_chunk(input_part2);
|
||||
// No actions should be complete yet
|
||||
assert_no_errors(&parser);
|
||||
assert_eq!(actions2.len(), 0);
|
||||
|
||||
let actions3 = parser.parse_chunk(input_part3);
|
||||
// The third chunk should complete the action
|
||||
assert_no_errors(&parser);
|
||||
assert_eq!(actions3.len(), 1);
|
||||
let (action, _) = &actions3[0];
|
||||
assert_eq!(
|
||||
action,
|
||||
&EditAction::Replace {
|
||||
file_path: PathBuf::from("src/main.rs"),
|
||||
old: "fn original() {}".to_string(),
|
||||
new: "fn replacement() {}".to_string(),
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parser_state_preservation() {
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions1 = parser.parse_chunk("src/main.rs\n```rust\n<<<<<<< SEARCH\n");
|
||||
|
||||
// Check parser is in the correct state
|
||||
assert_no_errors(&parser);
|
||||
assert_eq!(parser.state, State::SearchBlock);
|
||||
assert_eq!(
|
||||
parser.action_source,
|
||||
b"src/main.rs\n```rust\n<<<<<<< SEARCH\n"
|
||||
);
|
||||
|
||||
// Continue parsing
|
||||
let actions2 = parser.parse_chunk("original code\n=======\n");
|
||||
|
||||
assert_no_errors(&parser);
|
||||
assert_eq!(parser.state, State::ReplaceBlock);
|
||||
assert_eq!(
|
||||
&parser.action_source[parser.old_range.clone()],
|
||||
b"original code"
|
||||
);
|
||||
|
||||
let actions3 = parser.parse_chunk("replacement code\n>>>>>>> REPLACE\n```\n");
|
||||
|
||||
// After complete parsing, state should reset
|
||||
assert_no_errors(&parser);
|
||||
assert_eq!(parser.state, State::Default);
|
||||
assert_eq!(parser.action_source, b"\n");
|
||||
assert!(parser.old_range.is_empty());
|
||||
assert!(parser.new_range.is_empty());
|
||||
|
||||
assert_eq!(actions1.len(), 0);
|
||||
assert_eq!(actions2.len(), 0);
|
||||
assert_eq!(actions3.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_invalid_search_marker() {
|
||||
let input = r#"src/main.rs
|
||||
```rust
|
||||
<<<<<<< WRONG_MARKER
|
||||
fn original() {}
|
||||
=======
|
||||
fn replacement() {}
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(input);
|
||||
assert_eq!(actions.len(), 0);
|
||||
|
||||
assert_eq!(parser.errors().len(), 1);
|
||||
let error = &parser.errors()[0];
|
||||
|
||||
assert_eq!(
|
||||
error.to_string(),
|
||||
"input:3:9: Expected marker \"<<<<<<< SEARCH\", found 'W'"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_missing_closing_fence() {
|
||||
let input = r#"src/main.rs
|
||||
```rust
|
||||
<<<<<<< SEARCH
|
||||
fn original() {}
|
||||
=======
|
||||
fn replacement() {}
|
||||
>>>>>>> REPLACE
|
||||
<!-- Missing closing fence -->
|
||||
|
||||
src/utils.rs
|
||||
```rust
|
||||
<<<<<<< SEARCH
|
||||
fn utils_func() {}
|
||||
=======
|
||||
fn new_utils_func() {}
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(input);
|
||||
|
||||
// Only the second block should be parsed
|
||||
assert_eq!(actions.len(), 1);
|
||||
let (action, _) = &actions[0];
|
||||
assert_eq!(
|
||||
action,
|
||||
&EditAction::Replace {
|
||||
file_path: PathBuf::from("src/utils.rs"),
|
||||
old: "fn utils_func() {}".to_string(),
|
||||
new: "fn new_utils_func() {}".to_string(),
|
||||
}
|
||||
);
|
||||
assert_eq!(parser.errors().len(), 1);
|
||||
assert_eq!(
|
||||
parser.errors()[0].to_string(),
|
||||
"input:8:1: Expected marker \"```\", found '<'".to_string()
|
||||
);
|
||||
|
||||
// The parser should continue after an error
|
||||
assert_eq!(parser.state, State::Default);
|
||||
}
|
||||
|
||||
const SYSTEM_PROMPT: &str = include_str!("./edit_prompt.md");
|
||||
|
||||
#[test]
|
||||
fn test_parse_examples_in_system_prompt() {
|
||||
let mut parser = EditActionParser::new();
|
||||
let actions = parser.parse_chunk(SYSTEM_PROMPT);
|
||||
assert_examples_in_system_prompt(&actions, parser.errors());
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
fn test_random_chunking_of_system_prompt(mut rng: StdRng) {
|
||||
let mut parser = EditActionParser::new();
|
||||
let mut remaining = SYSTEM_PROMPT;
|
||||
let mut actions = Vec::with_capacity(5);
|
||||
|
||||
while !remaining.is_empty() {
|
||||
let chunk_size = rng.gen_range(1..=std::cmp::min(remaining.len(), 100));
|
||||
|
||||
let (chunk, rest) = remaining.split_at(chunk_size);
|
||||
|
||||
let chunk_actions = parser.parse_chunk(chunk);
|
||||
actions.extend(chunk_actions);
|
||||
remaining = rest;
|
||||
}
|
||||
|
||||
assert_examples_in_system_prompt(&actions, parser.errors());
|
||||
}
|
||||
|
||||
fn assert_examples_in_system_prompt(actions: &[(EditAction, String)], errors: &[ParseError]) {
|
||||
assert_eq!(actions.len(), 5);
|
||||
|
||||
assert_eq!(
|
||||
actions[0].0,
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("mathweb/flask/app.py"),
|
||||
old: "from flask import Flask".to_string(),
|
||||
new: line_endings!("import math\nfrom flask import Flask").to_string(),
|
||||
},
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
actions[1].0,
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("mathweb/flask/app.py"),
|
||||
old: line_endings!("def factorial(n):\n \"compute factorial\"\n\n if n == 0:\n return 1\n else:\n return n * factorial(n-1)\n").to_string(),
|
||||
new: "".to_string(),
|
||||
}
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
actions[2].0,
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("mathweb/flask/app.py"),
|
||||
old: " return str(factorial(n))".to_string(),
|
||||
new: " return str(math.factorial(n))".to_string(),
|
||||
},
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
actions[3].0,
|
||||
EditAction::Write {
|
||||
file_path: PathBuf::from("hello.py"),
|
||||
content: line_endings!(
|
||||
"def hello():\n \"print a greeting\"\n\n print(\"hello\")"
|
||||
)
|
||||
.to_string(),
|
||||
},
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
actions[4].0,
|
||||
EditAction::Replace {
|
||||
file_path: PathBuf::from("main.py"),
|
||||
old: line_endings!(
|
||||
"def hello():\n \"print a greeting\"\n\n print(\"hello\")"
|
||||
)
|
||||
.to_string(),
|
||||
new: "from hello import hello".to_string(),
|
||||
},
|
||||
);
|
||||
|
||||
// The system prompt includes some text that would produce errors
|
||||
assert_eq!(
|
||||
errors[0].to_string(),
|
||||
"input:102:1: Expected marker \"<<<<<<< SEARCH\", found '3'"
|
||||
);
|
||||
#[cfg(not(windows))]
|
||||
assert_eq!(
|
||||
errors[1].to_string(),
|
||||
"input:109:0: Expected marker \"<<<<<<< SEARCH\", found '\\n'"
|
||||
);
|
||||
#[cfg(windows)]
|
||||
assert_eq!(
|
||||
errors[1].to_string(),
|
||||
"input:108:1: Expected marker \"<<<<<<< SEARCH\", found '\\r'"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_print_error() {
|
||||
let input = r#"src/main.rs
|
||||
```rust
|
||||
<<<<<<< WRONG_MARKER
|
||||
fn original() {}
|
||||
=======
|
||||
fn replacement() {}
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"#;
|
||||
|
||||
let mut parser = EditActionParser::new();
|
||||
parser.parse_chunk(input);
|
||||
|
||||
assert_eq!(parser.errors().len(), 1);
|
||||
let error = &parser.errors()[0];
|
||||
let expected_error = r#"input:3:9: Expected marker "<<<<<<< SEARCH", found 'W'"#;
|
||||
|
||||
assert_eq!(format!("{}", error), expected_error);
|
||||
}
|
||||
|
||||
// helpers
|
||||
|
||||
fn assert_no_errors(parser: &EditActionParser) {
|
||||
let errors = parser.errors();
|
||||
|
||||
assert!(
|
||||
errors.is_empty(),
|
||||
"Expected no errors, but found:\n\n{}",
|
||||
errors
|
||||
.iter()
|
||||
.map(|e| e.to_string())
|
||||
.collect::<Vec<String>>()
|
||||
.join("\n")
|
||||
);
|
||||
}
|
||||
}
|
||||
134
crates/assistant_tools/src/edit_files_tool/edit_prompt.md
Normal file
@@ -0,0 +1,134 @@
|
||||
Act as an expert software developer.
|
||||
Always use best practices when coding.
|
||||
Respect and use existing conventions, libraries, etc that are already present in the code base.
|
||||
|
||||
Take requests for changes to the supplied code.
|
||||
Always reply to the user in the same language they are using.
|
||||
|
||||
Once you understand the request, you MUST:
|
||||
|
||||
1. Decide if you need to propose *SEARCH/REPLACE* edits to any files that haven't been added to the chat. You can create new files without asking!
|
||||
|
||||
2. Think step-by-step and explain the needed changes in a few short sentences.
|
||||
|
||||
3. Describe each change with a *SEARCH/REPLACE block* per the examples below.
|
||||
|
||||
All changes to files must use this *SEARCH/REPLACE block* format.
|
||||
ONLY EVER RETURN CODE IN A *SEARCH/REPLACE BLOCK*!
|
||||
|
||||
# Example conversations:
|
||||
|
||||
## USER: Change get_factorial() to use math.factorial
|
||||
|
||||
## ASSISTANT: To make this change we need to modify `mathweb/flask/app.py` to:
|
||||
|
||||
1. Import the math package.
|
||||
2. Remove the existing factorial() function.
|
||||
3. Update get_factorial() to call math.factorial instead.
|
||||
|
||||
Here are the *SEARCH/REPLACE* blocks:
|
||||
|
||||
mathweb/flask/app.py
|
||||
```python
|
||||
<<<<<<< SEARCH
|
||||
from flask import Flask
|
||||
=======
|
||||
import math
|
||||
from flask import Flask
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
|
||||
mathweb/flask/app.py
|
||||
```python
|
||||
<<<<<<< SEARCH
|
||||
def factorial(n):
|
||||
"compute factorial"
|
||||
|
||||
if n == 0:
|
||||
return 1
|
||||
else:
|
||||
return n * factorial(n-1)
|
||||
|
||||
=======
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
|
||||
mathweb/flask/app.py
|
||||
```python
|
||||
<<<<<<< SEARCH
|
||||
return str(factorial(n))
|
||||
=======
|
||||
return str(math.factorial(n))
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
|
||||
|
||||
## USER: Refactor hello() into its own file.
|
||||
|
||||
## ASSISTANT: To make this change we need to modify `main.py` and make a new file `hello.py`:
|
||||
|
||||
1. Make a new hello.py file with hello() in it.
|
||||
2. Remove hello() from main.py and replace it with an import.
|
||||
|
||||
Here are the *SEARCH/REPLACE* blocks:
|
||||
|
||||
hello.py
|
||||
```python
|
||||
<<<<<<< SEARCH
|
||||
=======
|
||||
def hello():
|
||||
"print a greeting"
|
||||
|
||||
print("hello")
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
|
||||
main.py
|
||||
```python
|
||||
<<<<<<< SEARCH
|
||||
def hello():
|
||||
"print a greeting"
|
||||
|
||||
print("hello")
|
||||
=======
|
||||
from hello import hello
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
# *SEARCH/REPLACE block* Rules:
|
||||
|
||||
Every *SEARCH/REPLACE block* must use this format:
|
||||
1. The *FULL* file path alone on a line, verbatim. No bold asterisks, no quotes around it, no escaping of characters, etc.
|
||||
2. The opening fence and code language, eg: ```python
|
||||
3. The start of search block: <<<<<<< SEARCH
|
||||
4. A contiguous chunk of lines to search for in the existing source code
|
||||
5. The dividing line: =======
|
||||
6. The lines to replace into the source code
|
||||
7. The end of the replace block: >>>>>>> REPLACE
|
||||
8. The closing fence: ```
|
||||
|
||||
Use the *FULL* file path, as shown to you by the user. Make sure to include the project's root directory name at the start of the path. *NEVER* specify the absolute path of the file!
|
||||
|
||||
Every *SEARCH* section must *EXACTLY MATCH* the existing file content, character for character, including all comments, docstrings, etc.
|
||||
If the file contains code or other data wrapped/escaped in json/xml/quotes or other containers, you need to propose edits to the literal contents of the file, including the container markup.
|
||||
|
||||
*SEARCH/REPLACE* blocks will *only* replace the first match occurrence.
|
||||
Including multiple unique *SEARCH/REPLACE* blocks if needed.
|
||||
Include enough lines in each SEARCH section to uniquely match each set of lines that need to change.
|
||||
|
||||
Keep *SEARCH/REPLACE* blocks concise.
|
||||
Break large *SEARCH/REPLACE* blocks into a series of smaller blocks that each change a small portion of the file.
|
||||
Include just the changing lines, and a few surrounding lines if needed for uniqueness.
|
||||
Do not include long runs of unchanging lines in *SEARCH/REPLACE* blocks.
|
||||
|
||||
Only create *SEARCH/REPLACE* blocks for files that the user has added to the chat!
|
||||
|
||||
To move code within a file, use 2 *SEARCH/REPLACE* blocks: 1 to delete it from its current location, 1 to insert it in the new location.
|
||||
|
||||
Pay attention to which filenames the user wants you to edit, especially if they are asking you to create a new file.
|
||||
|
||||
If you want to put code in a new file, use a *SEARCH/REPLACE block* with:
|
||||
- A new file path, including dir name if needed
|
||||
- An empty `SEARCH` section
|
||||
- The new file's contents in the `REPLACE` section
|
||||
|
||||
ONLY EVER RETURN CODE IN A *SEARCH/REPLACE BLOCK*!
|
||||
417
crates/assistant_tools/src/edit_files_tool/log.rs
Normal file
@@ -0,0 +1,417 @@
|
||||
use std::path::Path;
|
||||
|
||||
use collections::HashSet;
|
||||
use feature_flags::FeatureFlagAppExt;
|
||||
use gpui::{
|
||||
actions, list, prelude::*, App, Empty, Entity, EventEmitter, FocusHandle, Focusable, Global,
|
||||
ListAlignment, ListState, SharedString, Subscription, Window,
|
||||
};
|
||||
use release_channel::ReleaseChannel;
|
||||
use settings::Settings;
|
||||
use ui::prelude::*;
|
||||
use workspace::{item::ItemEvent, Item, Workspace, WorkspaceId};
|
||||
|
||||
use super::edit_action::EditAction;
|
||||
|
||||
actions!(debug, [EditTool]);
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
if cx.is_staff() || ReleaseChannel::global(cx) == ReleaseChannel::Dev {
|
||||
// Track events even before opening the log
|
||||
EditToolLog::global(cx);
|
||||
}
|
||||
|
||||
cx.observe_new(|workspace: &mut Workspace, _, _| {
|
||||
workspace.register_action(|workspace, _: &EditTool, window, cx| {
|
||||
let viewer = cx.new(EditToolLogViewer::new);
|
||||
workspace.add_item_to_active_pane(Box::new(viewer), None, true, window, cx)
|
||||
});
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
pub struct GlobalEditToolLog(Entity<EditToolLog>);
|
||||
|
||||
impl Global for GlobalEditToolLog {}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct EditToolLog {
|
||||
requests: Vec<EditToolRequest>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Hash, Eq, PartialEq)]
|
||||
pub struct EditToolRequestId(u32);
|
||||
|
||||
impl EditToolLog {
|
||||
pub fn global(cx: &mut App) -> Entity<Self> {
|
||||
match Self::try_global(cx) {
|
||||
Some(entity) => entity,
|
||||
None => {
|
||||
let entity = cx.new(|_cx| Self::default());
|
||||
cx.set_global(GlobalEditToolLog(entity.clone()));
|
||||
entity
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn try_global(cx: &App) -> Option<Entity<Self>> {
|
||||
cx.try_global::<GlobalEditToolLog>()
|
||||
.map(|log| log.0.clone())
|
||||
}
|
||||
|
||||
pub fn new_request(
|
||||
&mut self,
|
||||
instructions: String,
|
||||
cx: &mut Context<Self>,
|
||||
) -> EditToolRequestId {
|
||||
let id = EditToolRequestId(self.requests.len() as u32);
|
||||
self.requests.push(EditToolRequest {
|
||||
id,
|
||||
instructions,
|
||||
editor_response: None,
|
||||
tool_output: None,
|
||||
parsed_edits: Vec::new(),
|
||||
});
|
||||
cx.emit(EditToolLogEvent::Inserted);
|
||||
id
|
||||
}
|
||||
|
||||
pub fn push_editor_response_chunk(
|
||||
&mut self,
|
||||
id: EditToolRequestId,
|
||||
chunk: &str,
|
||||
new_actions: &[(EditAction, String)],
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if let Some(request) = self.requests.get_mut(id.0 as usize) {
|
||||
match &mut request.editor_response {
|
||||
None => {
|
||||
request.editor_response = Some(chunk.to_string());
|
||||
}
|
||||
Some(response) => {
|
||||
response.push_str(chunk);
|
||||
}
|
||||
}
|
||||
request
|
||||
.parsed_edits
|
||||
.extend(new_actions.iter().cloned().map(|(action, _)| action));
|
||||
|
||||
cx.emit(EditToolLogEvent::Updated);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_tool_output(
|
||||
&mut self,
|
||||
id: EditToolRequestId,
|
||||
tool_output: Result<String, String>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if let Some(request) = self.requests.get_mut(id.0 as usize) {
|
||||
request.tool_output = Some(tool_output);
|
||||
cx.emit(EditToolLogEvent::Updated);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum EditToolLogEvent {
|
||||
Inserted,
|
||||
Updated,
|
||||
}
|
||||
|
||||
impl EventEmitter<EditToolLogEvent> for EditToolLog {}
|
||||
|
||||
pub struct EditToolRequest {
|
||||
id: EditToolRequestId,
|
||||
instructions: String,
|
||||
// we don't use a result here because the error might have occurred after we got a response
|
||||
editor_response: Option<String>,
|
||||
parsed_edits: Vec<EditAction>,
|
||||
tool_output: Option<Result<String, String>>,
|
||||
}
|
||||
|
||||
pub struct EditToolLogViewer {
|
||||
focus_handle: FocusHandle,
|
||||
log: Entity<EditToolLog>,
|
||||
list_state: ListState,
|
||||
expanded_edits: HashSet<(EditToolRequestId, usize)>,
|
||||
_subscription: Subscription,
|
||||
}
|
||||
|
||||
impl EditToolLogViewer {
|
||||
pub fn new(cx: &mut Context<Self>) -> Self {
|
||||
let log = EditToolLog::global(cx);
|
||||
|
||||
let subscription = cx.subscribe(&log, Self::handle_log_event);
|
||||
|
||||
Self {
|
||||
focus_handle: cx.focus_handle(),
|
||||
log: log.clone(),
|
||||
list_state: ListState::new(
|
||||
log.read(cx).requests.len(),
|
||||
ListAlignment::Bottom,
|
||||
px(1024.),
|
||||
{
|
||||
let this = cx.entity().downgrade();
|
||||
move |ix, window: &mut Window, cx: &mut App| {
|
||||
this.update(cx, |this, cx| this.render_request(ix, window, cx))
|
||||
.unwrap()
|
||||
}
|
||||
},
|
||||
),
|
||||
expanded_edits: HashSet::default(),
|
||||
_subscription: subscription,
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_log_event(
|
||||
&mut self,
|
||||
_: Entity<EditToolLog>,
|
||||
event: &EditToolLogEvent,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
match event {
|
||||
EditToolLogEvent::Inserted => {
|
||||
let count = self.list_state.item_count();
|
||||
self.list_state.splice(count..count, 1);
|
||||
}
|
||||
EditToolLogEvent::Updated => {}
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn render_request(
|
||||
&self,
|
||||
index: usize,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> AnyElement {
|
||||
let requests = &self.log.read(cx).requests;
|
||||
let request = &requests[index];
|
||||
|
||||
v_flex()
|
||||
.gap_3()
|
||||
.child(Self::render_section(IconName::ArrowRight, "Tool Input"))
|
||||
.child(request.instructions.clone())
|
||||
.py_5()
|
||||
.when(index + 1 < requests.len(), |element| {
|
||||
element
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
})
|
||||
.map(|parent| match &request.editor_response {
|
||||
None => {
|
||||
if request.tool_output.is_none() {
|
||||
parent.child("...")
|
||||
} else {
|
||||
parent
|
||||
}
|
||||
}
|
||||
Some(response) => parent
|
||||
.child(Self::render_section(
|
||||
IconName::ZedAssistant,
|
||||
"Editor Response",
|
||||
))
|
||||
.child(Label::new(response.clone()).buffer_font(cx)),
|
||||
})
|
||||
.when(!request.parsed_edits.is_empty(), |parent| {
|
||||
parent
|
||||
.child(Self::render_section(IconName::Microscope, "Parsed Edits"))
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_2()
|
||||
.children(request.parsed_edits.iter().enumerate().map(
|
||||
|(index, edit)| {
|
||||
self.render_edit_action(edit, request.id, index, cx)
|
||||
},
|
||||
)),
|
||||
)
|
||||
})
|
||||
.when_some(request.tool_output.as_ref(), |parent, output| {
|
||||
parent
|
||||
.child(Self::render_section(IconName::ArrowLeft, "Tool Output"))
|
||||
.child(match output {
|
||||
Ok(output) => Label::new(output.clone()).color(Color::Success),
|
||||
Err(error) => Label::new(error.clone()).color(Color::Error),
|
||||
})
|
||||
})
|
||||
.into_any()
|
||||
}
|
||||
|
||||
fn render_section(icon: IconName, title: &'static str) -> AnyElement {
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(Icon::new(icon).color(Color::Muted))
|
||||
.child(Label::new(title).size(LabelSize::Small).color(Color::Muted))
|
||||
.into_any()
|
||||
}
|
||||
|
||||
fn render_edit_action(
|
||||
&self,
|
||||
edit_action: &EditAction,
|
||||
request_id: EditToolRequestId,
|
||||
index: usize,
|
||||
cx: &Context<Self>,
|
||||
) -> AnyElement {
|
||||
let expanded_id = (request_id, index);
|
||||
|
||||
match edit_action {
|
||||
EditAction::Replace {
|
||||
file_path,
|
||||
old,
|
||||
new,
|
||||
} => self
|
||||
.render_edit_action_container(
|
||||
expanded_id,
|
||||
&file_path,
|
||||
[
|
||||
Self::render_block(IconName::MagnifyingGlass, "Search", old.clone(), cx)
|
||||
.border_r_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.into_any(),
|
||||
Self::render_block(IconName::Replace, "Replace", new.clone(), cx)
|
||||
.into_any(),
|
||||
],
|
||||
cx,
|
||||
)
|
||||
.into_any(),
|
||||
EditAction::Write { file_path, content } => self
|
||||
.render_edit_action_container(
|
||||
expanded_id,
|
||||
&file_path,
|
||||
[
|
||||
Self::render_block(IconName::Pencil, "Write", content.clone(), cx)
|
||||
.into_any(),
|
||||
],
|
||||
cx,
|
||||
)
|
||||
.into_any(),
|
||||
}
|
||||
}
|
||||
|
||||
fn render_edit_action_container(
|
||||
&self,
|
||||
expanded_id: (EditToolRequestId, usize),
|
||||
file_path: &Path,
|
||||
content: impl IntoIterator<Item = AnyElement>,
|
||||
cx: &Context<Self>,
|
||||
) -> AnyElement {
|
||||
let is_expanded = self.expanded_edits.contains(&expanded_id);
|
||||
|
||||
v_flex()
|
||||
.child(
|
||||
h_flex()
|
||||
.bg(cx.theme().colors().element_background)
|
||||
.border_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.rounded_t_md()
|
||||
.when(!is_expanded, |el| el.rounded_b_md())
|
||||
.py_1()
|
||||
.px_2()
|
||||
.gap_1()
|
||||
.child(
|
||||
ui::Disclosure::new(ElementId::Integer(expanded_id.1), is_expanded)
|
||||
.on_click(cx.listener(move |this, _ev, _window, cx| {
|
||||
if is_expanded {
|
||||
this.expanded_edits.remove(&expanded_id);
|
||||
} else {
|
||||
this.expanded_edits.insert(expanded_id);
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
})),
|
||||
)
|
||||
.child(Label::new(file_path.display().to_string()).size(LabelSize::Small)),
|
||||
)
|
||||
.child(if is_expanded {
|
||||
h_flex()
|
||||
.border_1()
|
||||
.border_t_0()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.rounded_b_md()
|
||||
.children(content)
|
||||
.into_any()
|
||||
} else {
|
||||
Empty.into_any()
|
||||
})
|
||||
.into_any()
|
||||
}
|
||||
|
||||
fn render_block(icon: IconName, title: &'static str, content: String, cx: &App) -> Div {
|
||||
v_flex()
|
||||
.p_1()
|
||||
.gap_1()
|
||||
.flex_1()
|
||||
.h_full()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(Icon::new(icon).color(Color::Muted))
|
||||
.child(Label::new(title).size(LabelSize::Small).color(Color::Muted)),
|
||||
)
|
||||
.font(theme::ThemeSettings::get_global(cx).buffer_font.clone())
|
||||
.text_sm()
|
||||
.child(content)
|
||||
.child(div().flex_1())
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<()> for EditToolLogViewer {}
|
||||
|
||||
impl Focusable for EditToolLogViewer {
|
||||
fn focus_handle(&self, _: &App) -> gpui::FocusHandle {
|
||||
self.focus_handle.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl Item for EditToolLogViewer {
|
||||
type Event = ();
|
||||
|
||||
fn to_item_events(_: &Self::Event, _: impl FnMut(ItemEvent)) {}
|
||||
|
||||
fn tab_content_text(&self, _window: &Window, _cx: &App) -> Option<SharedString> {
|
||||
Some("Edit Tool Log".into())
|
||||
}
|
||||
|
||||
fn telemetry_event_text(&self) -> Option<&'static str> {
|
||||
None
|
||||
}
|
||||
|
||||
fn clone_on_split(
|
||||
&self,
|
||||
_workspace_id: Option<WorkspaceId>,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Entity<Self>>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
Some(cx.new(Self::new))
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for EditToolLogViewer {
|
||||
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
if self.list_state.item_count() == 0 {
|
||||
return v_flex()
|
||||
.justify_center()
|
||||
.size_full()
|
||||
.gap_1()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.text_center()
|
||||
.text_lg()
|
||||
.child("No requests yet")
|
||||
.child(
|
||||
div()
|
||||
.text_ui(cx)
|
||||
.child("Go ask the assistant to perform some edits"),
|
||||
);
|
||||
}
|
||||
|
||||
v_flex()
|
||||
.p_4()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.size_full()
|
||||
.child(list(self.list_state.clone()).flex_grow())
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,226 @@
|
||||
use language::{Anchor, Bias, BufferSnapshot};
|
||||
use std::ops::Range;
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
|
||||
enum SearchDirection {
|
||||
Up,
|
||||
Left,
|
||||
Diagonal,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
struct SearchState {
|
||||
cost: u32,
|
||||
direction: SearchDirection,
|
||||
}
|
||||
|
||||
impl SearchState {
|
||||
fn new(cost: u32, direction: SearchDirection) -> Self {
|
||||
Self { cost, direction }
|
||||
}
|
||||
}
|
||||
|
||||
struct SearchMatrix {
|
||||
cols: usize,
|
||||
data: Vec<SearchState>,
|
||||
}
|
||||
|
||||
impl SearchMatrix {
|
||||
fn new(rows: usize, cols: usize) -> Self {
|
||||
SearchMatrix {
|
||||
cols,
|
||||
data: vec![SearchState::new(0, SearchDirection::Diagonal); rows * cols],
|
||||
}
|
||||
}
|
||||
|
||||
fn get(&self, row: usize, col: usize) -> SearchState {
|
||||
self.data[row * self.cols + col]
|
||||
}
|
||||
|
||||
fn set(&mut self, row: usize, col: usize, cost: SearchState) {
|
||||
self.data[row * self.cols + col] = cost;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve_search_block(buffer: &BufferSnapshot, search_query: &str) -> Range<Anchor> {
|
||||
const INSERTION_COST: u32 = 3;
|
||||
const DELETION_COST: u32 = 10;
|
||||
const WHITESPACE_INSERTION_COST: u32 = 1;
|
||||
const WHITESPACE_DELETION_COST: u32 = 1;
|
||||
|
||||
let buffer_len = buffer.len();
|
||||
let query_len = search_query.len();
|
||||
let mut matrix = SearchMatrix::new(query_len + 1, buffer_len + 1);
|
||||
let mut leading_deletion_cost = 0_u32;
|
||||
for (row, query_byte) in search_query.bytes().enumerate() {
|
||||
let deletion_cost = if query_byte.is_ascii_whitespace() {
|
||||
WHITESPACE_DELETION_COST
|
||||
} else {
|
||||
DELETION_COST
|
||||
};
|
||||
|
||||
leading_deletion_cost = leading_deletion_cost.saturating_add(deletion_cost);
|
||||
matrix.set(
|
||||
row + 1,
|
||||
0,
|
||||
SearchState::new(leading_deletion_cost, SearchDirection::Diagonal),
|
||||
);
|
||||
|
||||
for (col, buffer_byte) in buffer.bytes_in_range(0..buffer.len()).flatten().enumerate() {
|
||||
let insertion_cost = if buffer_byte.is_ascii_whitespace() {
|
||||
WHITESPACE_INSERTION_COST
|
||||
} else {
|
||||
INSERTION_COST
|
||||
};
|
||||
|
||||
let up = SearchState::new(
|
||||
matrix.get(row, col + 1).cost.saturating_add(deletion_cost),
|
||||
SearchDirection::Up,
|
||||
);
|
||||
let left = SearchState::new(
|
||||
matrix.get(row + 1, col).cost.saturating_add(insertion_cost),
|
||||
SearchDirection::Left,
|
||||
);
|
||||
let diagonal = SearchState::new(
|
||||
if query_byte == *buffer_byte {
|
||||
matrix.get(row, col).cost
|
||||
} else {
|
||||
matrix
|
||||
.get(row, col)
|
||||
.cost
|
||||
.saturating_add(deletion_cost + insertion_cost)
|
||||
},
|
||||
SearchDirection::Diagonal,
|
||||
);
|
||||
matrix.set(row + 1, col + 1, up.min(left).min(diagonal));
|
||||
}
|
||||
}
|
||||
|
||||
// Traceback to find the best match
|
||||
let mut best_buffer_end = buffer_len;
|
||||
let mut best_cost = u32::MAX;
|
||||
for col in 1..=buffer_len {
|
||||
let cost = matrix.get(query_len, col).cost;
|
||||
if cost < best_cost {
|
||||
best_cost = cost;
|
||||
best_buffer_end = col;
|
||||
}
|
||||
}
|
||||
|
||||
let mut query_ix = query_len;
|
||||
let mut buffer_ix = best_buffer_end;
|
||||
while query_ix > 0 && buffer_ix > 0 {
|
||||
let current = matrix.get(query_ix, buffer_ix);
|
||||
match current.direction {
|
||||
SearchDirection::Diagonal => {
|
||||
query_ix -= 1;
|
||||
buffer_ix -= 1;
|
||||
}
|
||||
SearchDirection::Up => {
|
||||
query_ix -= 1;
|
||||
}
|
||||
SearchDirection::Left => {
|
||||
buffer_ix -= 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut start = buffer.offset_to_point(buffer.clip_offset(buffer_ix, Bias::Left));
|
||||
start.column = 0;
|
||||
let mut end = buffer.offset_to_point(buffer.clip_offset(best_buffer_end, Bias::Right));
|
||||
if end.column > 0 {
|
||||
end.column = buffer.line_len(end.row);
|
||||
}
|
||||
|
||||
buffer.anchor_after(start)..buffer.anchor_before(end)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::edit_files_tool::resolve_search_block::resolve_search_block;
|
||||
use gpui::{prelude::*, App};
|
||||
use language::{Buffer, OffsetRangeExt as _};
|
||||
use unindent::Unindent as _;
|
||||
use util::test::{generate_marked_text, marked_text_ranges};
|
||||
|
||||
#[gpui::test]
|
||||
fn test_resolve_search_block(cx: &mut App) {
|
||||
assert_resolved(
|
||||
concat!(
|
||||
" Lorem\n",
|
||||
"« ipsum\n",
|
||||
" dolor sit amet»\n",
|
||||
" consecteur",
|
||||
),
|
||||
"ipsum\ndolor",
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_resolved(
|
||||
&"
|
||||
«fn foo1(a: usize) -> usize {
|
||||
40
|
||||
}»
|
||||
|
||||
fn foo2(b: usize) -> usize {
|
||||
42
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
"fn foo1(b: usize) {\n40\n}",
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_resolved(
|
||||
&"
|
||||
fn main() {
|
||||
« Foo
|
||||
.bar()
|
||||
.baz()
|
||||
.qux()»
|
||||
}
|
||||
|
||||
fn foo2(b: usize) -> usize {
|
||||
42
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
"Foo.bar.baz.qux()",
|
||||
cx,
|
||||
);
|
||||
|
||||
assert_resolved(
|
||||
&"
|
||||
class Something {
|
||||
one() { return 1; }
|
||||
« two() { return 2222; }
|
||||
three() { return 333; }
|
||||
four() { return 4444; }
|
||||
five() { return 5555; }
|
||||
six() { return 6666; }
|
||||
» seven() { return 7; }
|
||||
eight() { return 8; }
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
&"
|
||||
two() { return 2222; }
|
||||
four() { return 4444; }
|
||||
five() { return 5555; }
|
||||
six() { return 6666; }
|
||||
"
|
||||
.unindent(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_resolved(text_with_expected_range: &str, query: &str, cx: &mut App) {
|
||||
let (text, _) = marked_text_ranges(text_with_expected_range, false);
|
||||
let buffer = cx.new(|cx| Buffer::local(text.clone(), cx));
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
let range = resolve_search_block(&snapshot, query).to_offset(&snapshot);
|
||||
let text_with_actual_range = generate_marked_text(&text, &[range], false);
|
||||
pretty_assertions::assert_eq!(text_with_actual_range, text_with_expected_range);
|
||||
}
|
||||
}
|
||||
99
crates/assistant_tools/src/list_directory_tool.rs
Normal file
@@ -0,0 +1,99 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_tool::{ActionLog, Tool};
|
||||
use gpui::{App, Entity, Task};
|
||||
use language_model::LanguageModelRequestMessage;
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{fmt::Write, path::Path, sync::Arc};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct ListDirectoryToolInput {
|
||||
/// The relative path of the directory to list.
|
||||
///
|
||||
/// This path should never be absolute, and the first component
|
||||
/// of the path should always be a root directory in a project.
|
||||
///
|
||||
/// <example>
|
||||
/// If the project has the following root directories:
|
||||
///
|
||||
/// - directory1
|
||||
/// - directory2
|
||||
///
|
||||
/// You can list the contents of `directory1` by using the path `directory1`.
|
||||
/// </example>
|
||||
///
|
||||
/// <example>
|
||||
/// If the project has the following root directories:
|
||||
///
|
||||
/// - foo
|
||||
/// - bar
|
||||
///
|
||||
/// If you wanna list contents in the directory `foo/baz`, you should use the path `foo/baz`.
|
||||
/// </example>
|
||||
pub path: Arc<Path>,
|
||||
}
|
||||
|
||||
pub struct ListDirectoryTool;
|
||||
|
||||
impl Tool for ListDirectoryTool {
|
||||
fn name(&self) -> String {
|
||||
"list-directory".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./list_directory_tool/description.md").into()
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
let schema = schemars::schema_for!(ListDirectoryToolInput);
|
||||
serde_json::to_value(&schema).unwrap()
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
_messages: &[LanguageModelRequestMessage],
|
||||
project: Entity<Project>,
|
||||
_action_log: Entity<ActionLog>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
let input = match serde_json::from_value::<ListDirectoryToolInput>(input) {
|
||||
Ok(input) => input,
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))),
|
||||
};
|
||||
|
||||
let Some(project_path) = project.read(cx).find_project_path(&input.path, cx) else {
|
||||
return Task::ready(Err(anyhow!("Path not found in project")));
|
||||
};
|
||||
let Some(worktree) = project
|
||||
.read(cx)
|
||||
.worktree_for_id(project_path.worktree_id, cx)
|
||||
else {
|
||||
return Task::ready(Err(anyhow!("Worktree not found")));
|
||||
};
|
||||
let worktree = worktree.read(cx);
|
||||
|
||||
let Some(entry) = worktree.entry_for_path(&project_path.path) else {
|
||||
return Task::ready(Err(anyhow!("Path not found: {}", input.path.display())));
|
||||
};
|
||||
|
||||
if !entry.is_dir() {
|
||||
return Task::ready(Err(anyhow!("{} is a file.", input.path.display())));
|
||||
}
|
||||
|
||||
let mut output = String::new();
|
||||
for entry in worktree.child_entries(&project_path.path) {
|
||||
writeln!(
|
||||
output,
|
||||
"{}",
|
||||
Path::new(worktree.root_name()).join(&entry.path).display(),
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
if output.is_empty() {
|
||||
return Task::ready(Ok(format!("{} is empty.", input.path.display())));
|
||||
}
|
||||
Task::ready(Ok(output))
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
Lists files and directories in a given path.
|
||||
@@ -1,84 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_tool::Tool;
|
||||
use gpui::{App, Task, WeakEntity, Window};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use workspace::Workspace;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct ListWorktreesToolInput {}
|
||||
|
||||
pub struct ListWorktreesTool;
|
||||
|
||||
impl Tool for ListWorktreesTool {
|
||||
fn name(&self) -> String {
|
||||
"list-worktrees".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Lists all worktrees in the current project. Use this tool when you need to find available worktrees and their IDs.".into()
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
serde_json::json!(
|
||||
{
|
||||
"type": "object",
|
||||
"properties": {},
|
||||
"required": []
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
_input: serde_json::Value,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
_window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
let Some(workspace) = workspace.upgrade() else {
|
||||
return Task::ready(Err(anyhow!("workspace dropped")));
|
||||
};
|
||||
|
||||
let project = workspace.read(cx).project().clone();
|
||||
|
||||
cx.spawn(|cx| async move {
|
||||
cx.update(|cx| {
|
||||
#[derive(Debug, Serialize)]
|
||||
struct WorktreeInfo {
|
||||
id: usize,
|
||||
root_name: String,
|
||||
root_dir: Option<String>,
|
||||
}
|
||||
|
||||
let worktrees = project.update(cx, |project, cx| {
|
||||
project
|
||||
.visible_worktrees(cx)
|
||||
.map(|worktree| {
|
||||
worktree.read_with(cx, |worktree, _cx| WorktreeInfo {
|
||||
id: worktree.id().to_usize(),
|
||||
root_dir: worktree
|
||||
.root_dir()
|
||||
.map(|root_dir| root_dir.to_string_lossy().to_string()),
|
||||
root_name: worktree.root_name().to_string(),
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
if worktrees.is_empty() {
|
||||
return Ok("No worktrees found in the current project.".to_string());
|
||||
}
|
||||
|
||||
let mut result = String::from("Worktrees in the current project:\n\n");
|
||||
for worktree in worktrees {
|
||||
result.push_str(&serde_json::to_string(&worktree)?);
|
||||
}
|
||||
|
||||
Ok(result)
|
||||
})?
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,11 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_tool::Tool;
|
||||
use assistant_tool::{ActionLog, Tool};
|
||||
use chrono::{Local, Utc};
|
||||
use gpui::{App, Task, WeakEntity, Window};
|
||||
use gpui::{App, Entity, Task};
|
||||
use language_model::LanguageModelRequestMessage;
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
@@ -41,8 +43,9 @@ impl Tool for NowTool {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
_workspace: WeakEntity<workspace::Workspace>,
|
||||
_window: &mut Window,
|
||||
_messages: &[LanguageModelRequestMessage],
|
||||
_project: Entity<Project>,
|
||||
_action_log: Entity<ActionLog>,
|
||||
_cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
let input: NowToolInput = match serde_json::from_value(input) {
|
||||
|
||||
94
crates/assistant_tools/src/path_search_tool.rs
Normal file
@@ -0,0 +1,94 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_tool::{ActionLog, Tool};
|
||||
use gpui::{App, AppContext, Entity, Task};
|
||||
use language_model::LanguageModelRequestMessage;
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
use util::paths::PathMatcher;
|
||||
use worktree::Snapshot;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct PathSearchToolInput {
|
||||
/// The glob to search all project paths for.
|
||||
///
|
||||
/// <example>
|
||||
/// If the project has the following root directories:
|
||||
///
|
||||
/// - directory1/a/something.txt
|
||||
/// - directory2/a/things.txt
|
||||
/// - directory3/a/other.txt
|
||||
///
|
||||
/// You can get back the first two paths by providing a glob of "*thing*.txt"
|
||||
/// </example>
|
||||
pub glob: String,
|
||||
}
|
||||
|
||||
pub struct PathSearchTool;
|
||||
|
||||
impl Tool for PathSearchTool {
|
||||
fn name(&self) -> String {
|
||||
"path-search".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./path_search_tool/description.md").into()
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
let schema = schemars::schema_for!(PathSearchToolInput);
|
||||
serde_json::to_value(&schema).unwrap()
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
_messages: &[LanguageModelRequestMessage],
|
||||
project: Entity<Project>,
|
||||
_action_log: Entity<ActionLog>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
let glob = match serde_json::from_value::<PathSearchToolInput>(input) {
|
||||
Ok(input) => input.glob,
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))),
|
||||
};
|
||||
let path_matcher = match PathMatcher::new(&[glob.clone()]) {
|
||||
Ok(matcher) => matcher,
|
||||
Err(err) => return Task::ready(Err(anyhow!("Invalid glob: {}", err))),
|
||||
};
|
||||
let snapshots: Vec<Snapshot> = project
|
||||
.read(cx)
|
||||
.worktrees(cx)
|
||||
.map(|worktree| worktree.read(cx).snapshot())
|
||||
.collect();
|
||||
|
||||
cx.background_spawn(async move {
|
||||
let mut matches = Vec::new();
|
||||
|
||||
for worktree in snapshots {
|
||||
let root_name = worktree.root_name();
|
||||
|
||||
// Don't consider ignored entries.
|
||||
for entry in worktree.entries(false, 0) {
|
||||
if path_matcher.is_match(&entry.path) {
|
||||
matches.push(
|
||||
PathBuf::from(root_name)
|
||||
.join(&entry.path)
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if matches.is_empty() {
|
||||
Ok(format!("No paths in the project matched the glob {glob:?}"))
|
||||
} else {
|
||||
// Sort to group entries in the same directory together.
|
||||
matches.sort();
|
||||
Ok(matches.join("\n"))
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
Returns all the paths in the project which match the given glob.
|
||||
@@ -2,20 +2,29 @@ use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_tool::Tool;
|
||||
use gpui::{App, Task, WeakEntity, Window};
|
||||
use project::{ProjectPath, WorktreeId};
|
||||
use assistant_tool::{ActionLog, Tool};
|
||||
use gpui::{App, Entity, Task};
|
||||
use language_model::LanguageModelRequestMessage;
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use workspace::Workspace;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct ReadFileToolInput {
|
||||
/// The ID of the worktree in which the file resides.
|
||||
pub worktree_id: usize,
|
||||
/// The path to the file to read.
|
||||
/// The relative path of the file to read.
|
||||
///
|
||||
/// This path is relative to the worktree root, it must not be an absolute path.
|
||||
/// This path should never be absolute, and the first component
|
||||
/// of the path should always be a root directory in a project.
|
||||
///
|
||||
/// <example>
|
||||
/// If the project has the following root directories:
|
||||
///
|
||||
/// - directory1
|
||||
/// - directory2
|
||||
///
|
||||
/// If you wanna access `file.txt` in `directory1`, you should use the path `directory1/file.txt`.
|
||||
/// If you wanna access `file.txt` in `directory2`, you should use the path `directory2/file.txt`.
|
||||
/// </example>
|
||||
pub path: Arc<Path>,
|
||||
}
|
||||
|
||||
@@ -27,7 +36,7 @@ impl Tool for ReadFileTool {
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Reads the content of a file specified by a worktree ID and path. Use this tool when you need to access the contents of a file in the project.".into()
|
||||
include_str!("./read_file_tool/description.md").into()
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
@@ -38,32 +47,43 @@ impl Tool for ReadFileTool {
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
_window: &mut Window,
|
||||
_messages: &[LanguageModelRequestMessage],
|
||||
project: Entity<Project>,
|
||||
action_log: Entity<ActionLog>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
let Some(workspace) = workspace.upgrade() else {
|
||||
return Task::ready(Err(anyhow!("workspace dropped")));
|
||||
};
|
||||
|
||||
let input = match serde_json::from_value::<ReadFileToolInput>(input) {
|
||||
Ok(input) => input,
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))),
|
||||
};
|
||||
|
||||
let project = workspace.read(cx).project().clone();
|
||||
let project_path = ProjectPath {
|
||||
worktree_id: WorktreeId::from_usize(input.worktree_id),
|
||||
path: input.path,
|
||||
let Some(project_path) = project.read(cx).find_project_path(&input.path, cx) else {
|
||||
return Task::ready(Err(anyhow!("Path not found in project")));
|
||||
};
|
||||
cx.spawn(|cx| async move {
|
||||
|
||||
cx.spawn(|mut cx| async move {
|
||||
let buffer = cx
|
||||
.update(|cx| {
|
||||
project.update(cx, |project, cx| project.open_buffer(project_path, cx))
|
||||
})?
|
||||
.await?;
|
||||
|
||||
cx.update(|cx| buffer.read(cx).text())
|
||||
let result = buffer.read_with(&cx, |buffer, _cx| {
|
||||
if buffer
|
||||
.file()
|
||||
.map_or(false, |file| file.disk_state().exists())
|
||||
{
|
||||
Ok(buffer.text())
|
||||
} else {
|
||||
Err(anyhow!("File does not exist"))
|
||||
}
|
||||
})??;
|
||||
|
||||
action_log.update(&mut cx, |log, cx| {
|
||||
log.buffer_read(buffer, cx);
|
||||
})?;
|
||||
|
||||
anyhow::Ok(result)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
1
crates/assistant_tools/src/read_file_tool/description.md
Normal file
@@ -0,0 +1 @@
|
||||
Reads the content of the given file in the project.
|
||||
120
crates/assistant_tools/src/regex_search.rs
Normal file
@@ -0,0 +1,120 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_tool::{ActionLog, Tool};
|
||||
use futures::StreamExt;
|
||||
use gpui::{App, Entity, Task};
|
||||
use language::OffsetRangeExt;
|
||||
use language_model::LanguageModelRequestMessage;
|
||||
use project::{search::SearchQuery, Project};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{cmp, fmt::Write, sync::Arc};
|
||||
use util::paths::PathMatcher;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct RegexSearchToolInput {
|
||||
/// A regex pattern to search for in the entire project. Note that the regex
|
||||
/// will be parsed by the Rust `regex` crate.
|
||||
pub regex: String,
|
||||
}
|
||||
|
||||
pub struct RegexSearchTool;
|
||||
|
||||
impl Tool for RegexSearchTool {
|
||||
fn name(&self) -> String {
|
||||
"regex-search".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./regex_search_tool/description.md").into()
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
let schema = schemars::schema_for!(RegexSearchToolInput);
|
||||
serde_json::to_value(&schema).unwrap()
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
_messages: &[LanguageModelRequestMessage],
|
||||
project: Entity<Project>,
|
||||
_action_log: Entity<ActionLog>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
const CONTEXT_LINES: u32 = 2;
|
||||
|
||||
let input = match serde_json::from_value::<RegexSearchToolInput>(input) {
|
||||
Ok(input) => input,
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))),
|
||||
};
|
||||
|
||||
let query = match SearchQuery::regex(
|
||||
&input.regex,
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
PathMatcher::default(),
|
||||
PathMatcher::default(),
|
||||
None,
|
||||
) {
|
||||
Ok(query) => query,
|
||||
Err(error) => return Task::ready(Err(error)),
|
||||
};
|
||||
|
||||
let results = project.update(cx, |project, cx| project.search(query, cx));
|
||||
cx.spawn(|cx| async move {
|
||||
futures::pin_mut!(results);
|
||||
|
||||
let mut output = String::new();
|
||||
while let Some(project::search::SearchResult::Buffer { buffer, ranges }) =
|
||||
results.next().await
|
||||
{
|
||||
if ranges.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
buffer.read_with(&cx, |buffer, cx| {
|
||||
if let Some(path) = buffer.file().map(|file| file.full_path(cx)) {
|
||||
writeln!(output, "### Found matches in {}:\n", path.display()).unwrap();
|
||||
let mut ranges = ranges
|
||||
.into_iter()
|
||||
.map(|range| {
|
||||
let mut point_range = range.to_point(buffer);
|
||||
point_range.start.row =
|
||||
point_range.start.row.saturating_sub(CONTEXT_LINES);
|
||||
point_range.start.column = 0;
|
||||
point_range.end.row = cmp::min(
|
||||
buffer.max_point().row,
|
||||
point_range.end.row + CONTEXT_LINES,
|
||||
);
|
||||
point_range.end.column = buffer.line_len(point_range.end.row);
|
||||
point_range
|
||||
})
|
||||
.peekable();
|
||||
|
||||
while let Some(mut range) = ranges.next() {
|
||||
while let Some(next_range) = ranges.peek() {
|
||||
if range.end.row >= next_range.start.row {
|
||||
range.end = next_range.end;
|
||||
ranges.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
writeln!(output, "```").unwrap();
|
||||
output.extend(buffer.text_for_range(range));
|
||||
writeln!(output, "\n```\n").unwrap();
|
||||
}
|
||||
}
|
||||
})?;
|
||||
}
|
||||
|
||||
if output.is_empty() {
|
||||
Ok("No matches found".to_string())
|
||||
} else {
|
||||
Ok(output)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
Searches the entire project for the given regular expression.
|
||||
|
||||
Returns a list of paths that matched the query. For each path, it returns a list of excerpts of the matched text.
|
||||
48
crates/assistant_tools/src/thinking_tool.rs
Normal file
@@ -0,0 +1,48 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_tool::{ActionLog, Tool};
|
||||
use gpui::{App, Entity, Task};
|
||||
use language_model::LanguageModelRequestMessage;
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct ThinkingToolInput {
|
||||
/// Content to think about. This should be a description of what to think about or
|
||||
/// a problem to solve.
|
||||
content: String,
|
||||
}
|
||||
|
||||
pub struct ThinkingTool;
|
||||
|
||||
impl Tool for ThinkingTool {
|
||||
fn name(&self) -> String {
|
||||
"thinking".to_string()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./thinking_tool/description.md").to_string()
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
let schema = schemars::schema_for!(ThinkingToolInput);
|
||||
serde_json::to_value(&schema).unwrap()
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
_messages: &[LanguageModelRequestMessage],
|
||||
_project: Entity<Project>,
|
||||
_action_log: Entity<ActionLog>,
|
||||
_cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
// This tool just "thinks out loud" and doesn't perform any actions.
|
||||
Task::ready(match serde_json::from_value::<ThinkingToolInput>(input) {
|
||||
Ok(_input) => Ok("Finished thinking.".to_string()),
|
||||
Err(err) => Err(anyhow!(err)),
|
||||
})
|
||||
}
|
||||
}
|
||||
1
crates/assistant_tools/src/thinking_tool/description.md
Normal file
@@ -0,0 +1 @@
|
||||
A tool for thinking through problems, brainstorming ideas, or planning without executing any actions. Use this tool when you need to work through complex problems, develop strategies, or outline approaches before taking action.
|
||||
@@ -93,7 +93,7 @@ fn view_release_notes_locally(
|
||||
|
||||
let tab_description = SharedString::from(body.title.to_string());
|
||||
let editor = cx.new(|cx| {
|
||||
Editor::for_multibuffer(buffer, Some(project), true, window, cx)
|
||||
Editor::for_multibuffer(buffer, Some(project), window, cx)
|
||||
});
|
||||
let workspace_handle = workspace.weak_handle();
|
||||
let markdown_preview: Entity<MarkdownPreviewView> =
|
||||
|
||||
@@ -22,6 +22,7 @@ git2.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
pretty_assertions.workspace = true
|
||||
rope.workspace = true
|
||||
sum_tree.workspace = true
|
||||
text.workspace = true
|
||||
@@ -31,7 +32,6 @@ util.workspace = true
|
||||
ctor.workspace = true
|
||||
env_logger.workspace = true
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
pretty_assertions.workspace = true
|
||||
rand.workspace = true
|
||||
serde_json.workspace = true
|
||||
text = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -6,9 +6,9 @@ use rope::Rope;
|
||||
use std::cmp::Ordering;
|
||||
use std::mem;
|
||||
use std::{future::Future, iter, ops::Range, sync::Arc};
|
||||
use sum_tree::{SumTree, TreeMap};
|
||||
use text::ToOffset as _;
|
||||
use sum_tree::SumTree;
|
||||
use text::{Anchor, Bias, BufferId, OffsetRangeExt, Point};
|
||||
use text::{AnchorRangeExt, ToOffset as _};
|
||||
use util::ResultExt;
|
||||
|
||||
pub struct BufferDiff {
|
||||
@@ -26,7 +26,7 @@ pub struct BufferDiffSnapshot {
|
||||
#[derive(Clone)]
|
||||
struct BufferDiffInner {
|
||||
hunks: SumTree<InternalDiffHunk>,
|
||||
pending_hunks: TreeMap<usize, PendingHunk>,
|
||||
pending_hunks: SumTree<PendingHunk>,
|
||||
base_text: language::BufferSnapshot,
|
||||
base_text_exists: bool,
|
||||
}
|
||||
@@ -48,7 +48,7 @@ pub enum DiffHunkStatusKind {
|
||||
pub enum DiffHunkSecondaryStatus {
|
||||
HasSecondaryHunk,
|
||||
OverlapsWithSecondaryHunk,
|
||||
None,
|
||||
NoSecondaryHunk,
|
||||
SecondaryHunkAdditionPending,
|
||||
SecondaryHunkRemovalPending,
|
||||
}
|
||||
@@ -74,6 +74,8 @@ struct InternalDiffHunk {
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
struct PendingHunk {
|
||||
buffer_range: Range<Anchor>,
|
||||
diff_base_byte_range: Range<usize>,
|
||||
buffer_version: clock::Global,
|
||||
new_status: DiffHunkSecondaryStatus,
|
||||
}
|
||||
@@ -93,6 +95,16 @@ impl sum_tree::Item for InternalDiffHunk {
|
||||
}
|
||||
}
|
||||
|
||||
impl sum_tree::Item for PendingHunk {
|
||||
type Summary = DiffHunkSummary;
|
||||
|
||||
fn summary(&self, _cx: &text::BufferSnapshot) -> Self::Summary {
|
||||
DiffHunkSummary {
|
||||
buffer_range: self.buffer_range.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sum_tree::Summary for DiffHunkSummary {
|
||||
type Context = text::BufferSnapshot;
|
||||
|
||||
@@ -176,6 +188,7 @@ impl BufferDiffSnapshot {
|
||||
}
|
||||
|
||||
impl BufferDiffInner {
|
||||
/// Returns the new index text and new pending hunks.
|
||||
fn stage_or_unstage_hunks(
|
||||
&mut self,
|
||||
unstaged_diff: &Self,
|
||||
@@ -183,7 +196,7 @@ impl BufferDiffInner {
|
||||
hunks: &[DiffHunk],
|
||||
buffer: &text::BufferSnapshot,
|
||||
file_exists: bool,
|
||||
) -> (Option<Rope>, Vec<(usize, PendingHunk)>) {
|
||||
) -> (Option<Rope>, SumTree<PendingHunk>) {
|
||||
let head_text = self
|
||||
.base_text_exists
|
||||
.then(|| self.base_text.as_rope().clone());
|
||||
@@ -195,41 +208,41 @@ impl BufferDiffInner {
|
||||
// entire file must be either created or deleted in the index.
|
||||
let (index_text, head_text) = match (index_text, head_text) {
|
||||
(Some(index_text), Some(head_text)) if file_exists || !stage => (index_text, head_text),
|
||||
(_, head_text @ _) => {
|
||||
if stage {
|
||||
(index_text, head_text) => {
|
||||
let (rope, new_status) = if stage {
|
||||
log::debug!("stage all");
|
||||
return (
|
||||
(
|
||||
file_exists.then(|| buffer.as_rope().clone()),
|
||||
vec![(
|
||||
0,
|
||||
PendingHunk {
|
||||
buffer_version: buffer.version().clone(),
|
||||
new_status: DiffHunkSecondaryStatus::SecondaryHunkRemovalPending,
|
||||
},
|
||||
)],
|
||||
);
|
||||
DiffHunkSecondaryStatus::SecondaryHunkRemovalPending,
|
||||
)
|
||||
} else {
|
||||
log::debug!("unstage all");
|
||||
return (
|
||||
(
|
||||
head_text,
|
||||
vec![(
|
||||
0,
|
||||
PendingHunk {
|
||||
buffer_version: buffer.version().clone(),
|
||||
new_status: DiffHunkSecondaryStatus::SecondaryHunkAdditionPending,
|
||||
},
|
||||
)],
|
||||
);
|
||||
}
|
||||
DiffHunkSecondaryStatus::SecondaryHunkAdditionPending,
|
||||
)
|
||||
};
|
||||
|
||||
let hunk = PendingHunk {
|
||||
buffer_range: Anchor::MIN..Anchor::MAX,
|
||||
diff_base_byte_range: 0..index_text.map_or(0, |rope| rope.len()),
|
||||
buffer_version: buffer.version().clone(),
|
||||
new_status,
|
||||
};
|
||||
let tree = SumTree::from_item(hunk, buffer);
|
||||
return (rope, tree);
|
||||
}
|
||||
};
|
||||
|
||||
let mut unstaged_hunk_cursor = unstaged_diff.hunks.cursor::<DiffHunkSummary>(buffer);
|
||||
unstaged_hunk_cursor.next(buffer);
|
||||
let mut edits = Vec::new();
|
||||
let mut pending_hunks = Vec::new();
|
||||
let mut prev_unstaged_hunk_buffer_offset = 0;
|
||||
let mut prev_unstaged_hunk_base_text_offset = 0;
|
||||
|
||||
let mut pending_hunks = SumTree::new(buffer);
|
||||
let mut old_pending_hunks = unstaged_diff
|
||||
.pending_hunks
|
||||
.cursor::<DiffHunkSummary>(buffer);
|
||||
|
||||
// first, merge new hunks into pending_hunks
|
||||
for DiffHunk {
|
||||
buffer_range,
|
||||
diff_base_byte_range,
|
||||
@@ -237,12 +250,58 @@ impl BufferDiffInner {
|
||||
..
|
||||
} in hunks.iter().cloned()
|
||||
{
|
||||
if (stage && secondary_status == DiffHunkSecondaryStatus::None)
|
||||
let preceding_pending_hunks =
|
||||
old_pending_hunks.slice(&buffer_range.start, Bias::Left, buffer);
|
||||
|
||||
pending_hunks.append(preceding_pending_hunks, buffer);
|
||||
|
||||
// skip all overlapping old pending hunks
|
||||
while old_pending_hunks
|
||||
.item()
|
||||
.is_some_and(|preceding_pending_hunk_item| {
|
||||
preceding_pending_hunk_item
|
||||
.buffer_range
|
||||
.overlaps(&buffer_range, buffer)
|
||||
})
|
||||
{
|
||||
old_pending_hunks.next(buffer);
|
||||
}
|
||||
|
||||
// merge into pending hunks
|
||||
if (stage && secondary_status == DiffHunkSecondaryStatus::NoSecondaryHunk)
|
||||
|| (!stage && secondary_status == DiffHunkSecondaryStatus::HasSecondaryHunk)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
pending_hunks.push(
|
||||
PendingHunk {
|
||||
buffer_range,
|
||||
diff_base_byte_range,
|
||||
buffer_version: buffer.version().clone(),
|
||||
new_status: if stage {
|
||||
DiffHunkSecondaryStatus::SecondaryHunkRemovalPending
|
||||
} else {
|
||||
DiffHunkSecondaryStatus::SecondaryHunkAdditionPending
|
||||
},
|
||||
},
|
||||
buffer,
|
||||
);
|
||||
}
|
||||
// append the remainder
|
||||
pending_hunks.append(old_pending_hunks.suffix(buffer), buffer);
|
||||
|
||||
let mut prev_unstaged_hunk_buffer_offset = 0;
|
||||
let mut prev_unstaged_hunk_base_text_offset = 0;
|
||||
let mut edits = Vec::<(Range<usize>, String)>::new();
|
||||
|
||||
// then, iterate over all pending hunks (both new ones and the existing ones) and compute the edits
|
||||
for PendingHunk {
|
||||
buffer_range,
|
||||
diff_base_byte_range,
|
||||
..
|
||||
} in pending_hunks.iter().cloned()
|
||||
{
|
||||
let skipped_hunks = unstaged_hunk_cursor.slice(&buffer_range.start, Bias::Left, buffer);
|
||||
|
||||
if let Some(secondary_hunk) = skipped_hunks.last() {
|
||||
@@ -294,22 +353,15 @@ impl BufferDiffInner {
|
||||
.chunks_in_range(diff_base_byte_range.clone())
|
||||
.collect::<String>()
|
||||
};
|
||||
pending_hunks.push((
|
||||
diff_base_byte_range.start,
|
||||
PendingHunk {
|
||||
buffer_version: buffer.version().clone(),
|
||||
new_status: if stage {
|
||||
DiffHunkSecondaryStatus::SecondaryHunkRemovalPending
|
||||
} else {
|
||||
DiffHunkSecondaryStatus::SecondaryHunkAdditionPending
|
||||
},
|
||||
},
|
||||
));
|
||||
|
||||
edits.push((index_range, replacement_text));
|
||||
}
|
||||
|
||||
debug_assert!(edits.iter().is_sorted_by_key(|(range, _)| range.start));
|
||||
|
||||
let mut new_index_text = Rope::new();
|
||||
let mut index_cursor = index_text.cursor(0);
|
||||
|
||||
for (old_range, replacement_text) in edits {
|
||||
new_index_text.append(index_cursor.slice(old_range.start));
|
||||
index_cursor.seek_forward(old_range.end);
|
||||
@@ -354,12 +406,14 @@ impl BufferDiffInner {
|
||||
});
|
||||
|
||||
let mut secondary_cursor = None;
|
||||
let mut pending_hunks = TreeMap::default();
|
||||
let mut pending_hunks_cursor = None;
|
||||
if let Some(secondary) = secondary.as_ref() {
|
||||
let mut cursor = secondary.hunks.cursor::<DiffHunkSummary>(buffer);
|
||||
cursor.next(buffer);
|
||||
secondary_cursor = Some(cursor);
|
||||
pending_hunks = secondary.pending_hunks.clone();
|
||||
let mut cursor = secondary.pending_hunks.cursor::<DiffHunkSummary>(buffer);
|
||||
cursor.next(buffer);
|
||||
pending_hunks_cursor = Some(cursor);
|
||||
}
|
||||
|
||||
let max_point = buffer.max_point();
|
||||
@@ -378,16 +432,33 @@ impl BufferDiffInner {
|
||||
end_anchor = buffer.anchor_before(end_point);
|
||||
}
|
||||
|
||||
let mut secondary_status = DiffHunkSecondaryStatus::None;
|
||||
let mut secondary_status = DiffHunkSecondaryStatus::NoSecondaryHunk;
|
||||
|
||||
let mut has_pending = false;
|
||||
if let Some(pending_hunk) = pending_hunks.get(&start_base) {
|
||||
if !buffer.has_edits_since_in_range(
|
||||
&pending_hunk.buffer_version,
|
||||
start_anchor..end_anchor,
|
||||
) {
|
||||
has_pending = true;
|
||||
secondary_status = pending_hunk.new_status;
|
||||
if let Some(pending_cursor) = pending_hunks_cursor.as_mut() {
|
||||
if start_anchor
|
||||
.cmp(&pending_cursor.start().buffer_range.start, buffer)
|
||||
.is_gt()
|
||||
{
|
||||
pending_cursor.seek_forward(&start_anchor, Bias::Left, buffer);
|
||||
}
|
||||
|
||||
if let Some(pending_hunk) = pending_cursor.item() {
|
||||
let mut pending_range = pending_hunk.buffer_range.to_point(buffer);
|
||||
if pending_range.end.column > 0 {
|
||||
pending_range.end.row += 1;
|
||||
pending_range.end.column = 0;
|
||||
}
|
||||
|
||||
if pending_range == (start_point..end_point) {
|
||||
if !buffer.has_edits_since_in_range(
|
||||
&pending_hunk.buffer_version,
|
||||
start_anchor..end_anchor,
|
||||
) {
|
||||
has_pending = true;
|
||||
secondary_status = pending_hunk.new_status;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -449,7 +520,7 @@ impl BufferDiffInner {
|
||||
diff_base_byte_range: hunk.diff_base_byte_range.clone(),
|
||||
buffer_range: hunk.buffer_range.clone(),
|
||||
// The secondary status is not used by callers of this method.
|
||||
secondary_status: DiffHunkSecondaryStatus::None,
|
||||
secondary_status: DiffHunkSecondaryStatus::NoSecondaryHunk,
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -724,7 +795,7 @@ impl BufferDiff {
|
||||
base_text,
|
||||
hunks,
|
||||
base_text_exists,
|
||||
pending_hunks: TreeMap::default(),
|
||||
pending_hunks: SumTree::new(&buffer),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -740,8 +811,8 @@ impl BufferDiff {
|
||||
cx.background_spawn(async move {
|
||||
BufferDiffInner {
|
||||
base_text: base_text_snapshot,
|
||||
pending_hunks: SumTree::new(&buffer),
|
||||
hunks: compute_hunks(base_text_pair, buffer),
|
||||
pending_hunks: TreeMap::default(),
|
||||
base_text_exists,
|
||||
}
|
||||
})
|
||||
@@ -751,7 +822,7 @@ impl BufferDiff {
|
||||
BufferDiffInner {
|
||||
base_text: language::Buffer::build_empty_snapshot(cx),
|
||||
hunks: SumTree::new(buffer),
|
||||
pending_hunks: TreeMap::default(),
|
||||
pending_hunks: SumTree::new(buffer),
|
||||
base_text_exists: false,
|
||||
}
|
||||
}
|
||||
@@ -767,7 +838,7 @@ impl BufferDiff {
|
||||
pub fn clear_pending_hunks(&mut self, cx: &mut Context<Self>) {
|
||||
if let Some(secondary_diff) = &self.secondary_diff {
|
||||
secondary_diff.update(cx, |diff, _| {
|
||||
diff.inner.pending_hunks.clear();
|
||||
diff.inner.pending_hunks = SumTree::from_summary(DiffHunkSummary::default());
|
||||
});
|
||||
cx.emit(BufferDiffEvent::DiffChanged {
|
||||
changed_range: Some(Anchor::MIN..Anchor::MAX),
|
||||
@@ -783,18 +854,17 @@ impl BufferDiff {
|
||||
file_exists: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Rope> {
|
||||
let (new_index_text, pending_hunks) = self.inner.stage_or_unstage_hunks(
|
||||
let (new_index_text, new_pending_hunks) = self.inner.stage_or_unstage_hunks(
|
||||
&self.secondary_diff.as_ref()?.read(cx).inner,
|
||||
stage,
|
||||
&hunks,
|
||||
buffer,
|
||||
file_exists,
|
||||
);
|
||||
|
||||
if let Some(unstaged_diff) = &self.secondary_diff {
|
||||
unstaged_diff.update(cx, |diff, _| {
|
||||
for (offset, pending_hunk) in pending_hunks {
|
||||
diff.inner.pending_hunks.insert(offset, pending_hunk);
|
||||
}
|
||||
diff.inner.pending_hunks = new_pending_hunks;
|
||||
});
|
||||
}
|
||||
cx.emit(BufferDiffEvent::HunksStagedOrUnstaged(
|
||||
@@ -828,7 +898,6 @@ impl BufferDiff {
|
||||
Some(start..end)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn update_diff(
|
||||
this: Entity<BufferDiff>,
|
||||
buffer: text::BufferSnapshot,
|
||||
@@ -838,8 +907,8 @@ impl BufferDiff {
|
||||
language: Option<Arc<Language>>,
|
||||
language_registry: Option<Arc<LanguageRegistry>>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> anyhow::Result<Option<Range<Anchor>>> {
|
||||
let snapshot = if base_text_changed || language_changed {
|
||||
) -> anyhow::Result<BufferDiffSnapshot> {
|
||||
let inner = if base_text_changed || language_changed {
|
||||
cx.update(|cx| {
|
||||
Self::build(
|
||||
buffer.clone(),
|
||||
@@ -861,18 +930,45 @@ impl BufferDiff {
|
||||
})?
|
||||
.await
|
||||
};
|
||||
|
||||
this.update(cx, |this, _| this.set_state(snapshot, &buffer))
|
||||
Ok(BufferDiffSnapshot {
|
||||
inner,
|
||||
secondary_diff: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn update_diff_from(
|
||||
pub fn set_snapshot(
|
||||
&mut self,
|
||||
buffer: &text::BufferSnapshot,
|
||||
other: &Entity<Self>,
|
||||
new_snapshot: BufferDiffSnapshot,
|
||||
language_changed: bool,
|
||||
secondary_changed_range: Option<Range<Anchor>>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Range<Anchor>> {
|
||||
let other = other.read(cx).inner.clone();
|
||||
self.set_state(other, buffer)
|
||||
let changed_range = self.set_state(new_snapshot.inner, buffer);
|
||||
if language_changed {
|
||||
cx.emit(BufferDiffEvent::LanguageChanged);
|
||||
}
|
||||
|
||||
let changed_range = match (secondary_changed_range, changed_range) {
|
||||
(None, None) => None,
|
||||
(Some(unstaged_range), None) => self.range_to_hunk_range(unstaged_range, &buffer, cx),
|
||||
(None, Some(uncommitted_range)) => Some(uncommitted_range),
|
||||
(Some(unstaged_range), Some(uncommitted_range)) => {
|
||||
let mut start = uncommitted_range.start;
|
||||
let mut end = uncommitted_range.end;
|
||||
if let Some(unstaged_range) = self.range_to_hunk_range(unstaged_range, &buffer, cx)
|
||||
{
|
||||
start = unstaged_range.start.min(&uncommitted_range.start, &buffer);
|
||||
end = unstaged_range.end.max(&uncommitted_range.end, &buffer);
|
||||
}
|
||||
Some(start..end)
|
||||
}
|
||||
};
|
||||
|
||||
cx.emit(BufferDiffEvent::DiffChanged {
|
||||
changed_range: changed_range.clone(),
|
||||
});
|
||||
changed_range
|
||||
}
|
||||
|
||||
fn set_state(
|
||||
@@ -890,7 +986,9 @@ impl BufferDiff {
|
||||
}
|
||||
_ => (true, Some(text::Anchor::MIN..text::Anchor::MAX)),
|
||||
};
|
||||
let pending_hunks = mem::take(&mut self.inner.pending_hunks);
|
||||
|
||||
let pending_hunks = mem::replace(&mut self.inner.pending_hunks, SumTree::new(buffer));
|
||||
|
||||
self.inner = new_state;
|
||||
if !base_text_changed {
|
||||
self.inner.pending_hunks = pending_hunks;
|
||||
@@ -1123,21 +1221,21 @@ impl DiffHunkStatus {
|
||||
pub fn deleted_none() -> Self {
|
||||
Self {
|
||||
kind: DiffHunkStatusKind::Deleted,
|
||||
secondary: DiffHunkSecondaryStatus::None,
|
||||
secondary: DiffHunkSecondaryStatus::NoSecondaryHunk,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn added_none() -> Self {
|
||||
Self {
|
||||
kind: DiffHunkStatusKind::Added,
|
||||
secondary: DiffHunkSecondaryStatus::None,
|
||||
secondary: DiffHunkSecondaryStatus::NoSecondaryHunk,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn modified_none() -> Self {
|
||||
Self {
|
||||
kind: DiffHunkStatusKind::Modified,
|
||||
secondary: DiffHunkSecondaryStatus::None,
|
||||
secondary: DiffHunkSecondaryStatus::NoSecondaryHunk,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1145,13 +1243,14 @@ impl DiffHunkStatus {
|
||||
/// Range (crossing new lines), old, new
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
#[track_caller]
|
||||
pub fn assert_hunks<Iter>(
|
||||
diff_hunks: Iter,
|
||||
pub fn assert_hunks<ExpectedText, HunkIter>(
|
||||
diff_hunks: HunkIter,
|
||||
buffer: &text::BufferSnapshot,
|
||||
diff_base: &str,
|
||||
expected_hunks: &[(Range<u32>, &str, &str, DiffHunkStatus)],
|
||||
expected_hunks: &[(Range<u32>, ExpectedText, ExpectedText, DiffHunkStatus)],
|
||||
) where
|
||||
Iter: Iterator<Item = DiffHunk>,
|
||||
HunkIter: Iterator<Item = DiffHunk>,
|
||||
ExpectedText: AsRef<str>,
|
||||
{
|
||||
let actual_hunks = diff_hunks
|
||||
.map(|hunk| {
|
||||
@@ -1171,14 +1270,14 @@ pub fn assert_hunks<Iter>(
|
||||
.map(|(r, old_text, new_text, status)| {
|
||||
(
|
||||
Point::new(r.start, 0)..Point::new(r.end, 0),
|
||||
*old_text,
|
||||
new_text.to_string(),
|
||||
old_text.as_ref(),
|
||||
new_text.as_ref().to_string(),
|
||||
*status,
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
assert_eq!(actual_hunks, expected_hunks);
|
||||
pretty_assertions::assert_eq!(actual_hunks, expected_hunks);
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -1237,7 +1336,7 @@ mod tests {
|
||||
);
|
||||
|
||||
diff = cx.update(|cx| BufferDiff::build_empty(&buffer, cx));
|
||||
assert_hunks(
|
||||
assert_hunks::<&str, _>(
|
||||
diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer, None),
|
||||
&buffer,
|
||||
&diff_base,
|
||||
@@ -1575,7 +1674,10 @@ mod tests {
|
||||
.hunks_intersecting_range(hunk_range.clone(), &buffer, &cx)
|
||||
.collect::<Vec<_>>();
|
||||
for hunk in &hunks {
|
||||
assert_ne!(hunk.secondary_status, DiffHunkSecondaryStatus::None)
|
||||
assert_ne!(
|
||||
hunk.secondary_status,
|
||||
DiffHunkSecondaryStatus::NoSecondaryHunk
|
||||
)
|
||||
}
|
||||
|
||||
let new_index_text = diff
|
||||
@@ -1854,10 +1956,10 @@ mod tests {
|
||||
let hunk_to_change = hunk.clone();
|
||||
let stage = match hunk.secondary_status {
|
||||
DiffHunkSecondaryStatus::HasSecondaryHunk => {
|
||||
hunk.secondary_status = DiffHunkSecondaryStatus::None;
|
||||
hunk.secondary_status = DiffHunkSecondaryStatus::NoSecondaryHunk;
|
||||
true
|
||||
}
|
||||
DiffHunkSecondaryStatus::None => {
|
||||
DiffHunkSecondaryStatus::NoSecondaryHunk => {
|
||||
hunk.secondary_status = DiffHunkSecondaryStatus::HasSecondaryHunk;
|
||||
false
|
||||
}
|
||||
|
||||
@@ -198,6 +198,8 @@ fn main() -> Result<()> {
|
||||
let mut paths = vec![];
|
||||
let mut urls = vec![];
|
||||
let mut stdin_tmp_file: Option<fs::File> = None;
|
||||
let mut anonymous_fd_tmp_files = vec![];
|
||||
|
||||
for path in args.paths_with_position.iter() {
|
||||
if path.starts_with("zed://")
|
||||
|| path.starts_with("http://")
|
||||
@@ -211,6 +213,11 @@ fn main() -> Result<()> {
|
||||
paths.push(file.path().to_string_lossy().to_string());
|
||||
let (file, _) = file.keep()?;
|
||||
stdin_tmp_file = Some(file);
|
||||
} else if let Some(file) = anonymous_fd(path) {
|
||||
let tmp_file = NamedTempFile::new()?;
|
||||
paths.push(tmp_file.path().to_string_lossy().to_string());
|
||||
let (tmp_file, _) = tmp_file.keep()?;
|
||||
anonymous_fd_tmp_files.push((file, tmp_file));
|
||||
} else {
|
||||
paths.push(parse_path_with_position(path)?)
|
||||
}
|
||||
@@ -252,31 +259,33 @@ fn main() -> Result<()> {
|
||||
}
|
||||
});
|
||||
|
||||
let pipe_handle: JoinHandle<anyhow::Result<()>> = thread::spawn(move || {
|
||||
if let Some(mut tmp_file) = stdin_tmp_file {
|
||||
let mut stdin = std::io::stdin().lock();
|
||||
if io::IsTerminal::is_terminal(&stdin) {
|
||||
return Ok(());
|
||||
}
|
||||
let mut buffer = [0; 8 * 1024];
|
||||
loop {
|
||||
let bytes_read = io::Read::read(&mut stdin, &mut buffer)?;
|
||||
if bytes_read == 0 {
|
||||
break;
|
||||
let stdin_pipe_handle: Option<JoinHandle<anyhow::Result<()>>> =
|
||||
stdin_tmp_file.map(|tmp_file| {
|
||||
thread::spawn(move || {
|
||||
let stdin = std::io::stdin().lock();
|
||||
if io::IsTerminal::is_terminal(&stdin) {
|
||||
return Ok(());
|
||||
}
|
||||
io::Write::write(&mut tmp_file, &buffer[..bytes_read])?;
|
||||
}
|
||||
io::Write::flush(&mut tmp_file)?;
|
||||
}
|
||||
Ok(())
|
||||
});
|
||||
return pipe_to_tmp(stdin, tmp_file);
|
||||
})
|
||||
});
|
||||
|
||||
let anonymous_fd_pipe_handles: Vec<JoinHandle<anyhow::Result<()>>> = anonymous_fd_tmp_files
|
||||
.into_iter()
|
||||
.map(|(file, tmp_file)| thread::spawn(move || pipe_to_tmp(file, tmp_file)))
|
||||
.collect();
|
||||
|
||||
if args.foreground {
|
||||
app.run_foreground(url)?;
|
||||
} else {
|
||||
app.launch(url)?;
|
||||
sender.join().unwrap()?;
|
||||
pipe_handle.join().unwrap()?;
|
||||
if let Some(handle) = stdin_pipe_handle {
|
||||
handle.join().unwrap()?;
|
||||
}
|
||||
for handle in anonymous_fd_pipe_handles {
|
||||
handle.join().unwrap()?;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(exit_status) = exit_status.lock().take() {
|
||||
@@ -285,6 +294,64 @@ fn main() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn pipe_to_tmp(mut src: impl io::Read, mut dest: fs::File) -> Result<()> {
|
||||
let mut buffer = [0; 8 * 1024];
|
||||
loop {
|
||||
let bytes_read = match src.read(&mut buffer) {
|
||||
Err(err) if err.kind() == io::ErrorKind::Interrupted => continue,
|
||||
res => res?,
|
||||
};
|
||||
if bytes_read == 0 {
|
||||
break;
|
||||
}
|
||||
io::Write::write_all(&mut dest, &buffer[..bytes_read])?;
|
||||
}
|
||||
io::Write::flush(&mut dest)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn anonymous_fd(path: &str) -> Option<fs::File> {
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
use std::os::fd::{self, FromRawFd};
|
||||
|
||||
let fd_str = path.strip_prefix("/proc/self/fd/")?;
|
||||
|
||||
let link = fs::read_link(path).ok()?;
|
||||
if !link.starts_with("memfd:") {
|
||||
return None;
|
||||
}
|
||||
|
||||
let fd: fd::RawFd = fd_str.parse().ok()?;
|
||||
let file = unsafe { fs::File::from_raw_fd(fd) };
|
||||
return Some(file);
|
||||
}
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
use std::os::{
|
||||
fd::{self, FromRawFd},
|
||||
unix::fs::FileTypeExt,
|
||||
};
|
||||
|
||||
let fd_str = path.strip_prefix("/dev/fd/")?;
|
||||
|
||||
let metadata = fs::metadata(path).ok()?;
|
||||
let file_type = metadata.file_type();
|
||||
if !file_type.is_fifo() && !file_type.is_socket() {
|
||||
return None;
|
||||
}
|
||||
let fd: fd::RawFd = fd_str.parse().ok()?;
|
||||
let file = unsafe { fs::File::from_raw_fd(fd) };
|
||||
return Some(file);
|
||||
}
|
||||
#[cfg(not(any(target_os = "linux", target_os = "macos")))]
|
||||
{
|
||||
_ = path;
|
||||
// not implemented for bsd, windows. Could be, but isn't yet
|
||||
return None;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
|
||||
mod linux {
|
||||
use std::{
|
||||
|
||||
@@ -27,6 +27,7 @@ feature_flags.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
http_client.workspace = true
|
||||
http_client_tls.workspace = true
|
||||
log.workspace = true
|
||||
paths.workspace = true
|
||||
parking_lot.workspace = true
|
||||
|
||||
@@ -1154,7 +1154,7 @@ impl Client {
|
||||
async_tungstenite::async_tls::client_async_tls_with_connector(
|
||||
request,
|
||||
stream,
|
||||
Some(http_client::tls_config().into()),
|
||||
Some(http_client_tls::tls_config().into()),
|
||||
)
|
||||
.await?;
|
||||
Ok(Connection::new(
|
||||
|
||||
@@ -29,6 +29,12 @@ impl std::fmt::Display for ChannelId {
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
|
||||
pub struct ProjectId(pub u64);
|
||||
|
||||
impl ProjectId {
|
||||
pub fn to_proto(&self) -> u64 {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, serde::Serialize, serde::Deserialize,
|
||||
)]
|
||||
|
||||
@@ -660,6 +660,10 @@ fn for_snowflake(
|
||||
e.event_type.clone(),
|
||||
serde_json::to_value(&e.event_properties).unwrap(),
|
||||
),
|
||||
Event::AssistantThreadFeedback(e) => (
|
||||
"Assistant Feedback".to_string(),
|
||||
serde_json::to_value(&e).unwrap(),
|
||||
),
|
||||
};
|
||||
|
||||
if let serde_json::Value::Object(ref mut map) = event_properties {
|
||||
|
||||
@@ -229,7 +229,6 @@ impl Database {
|
||||
}
|
||||
|
||||
/// Creates a new channel message.
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn create_channel_message(
|
||||
&self,
|
||||
channel_id: ChannelId,
|
||||
|
||||
@@ -122,7 +122,6 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn get_or_create_user_by_github_account_tx(
|
||||
&self,
|
||||
github_login: &str,
|
||||
|
||||
@@ -43,6 +43,20 @@ pub enum Relation {
|
||||
Contributor,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
/// Returns the timestamp of when the user's account was created.
|
||||
///
|
||||
/// This will be the earlier of the `created_at` and `github_user_created_at` timestamps.
|
||||
pub fn account_created_at(&self) -> NaiveDateTime {
|
||||
let mut account_created_at = self.created_at;
|
||||
if let Some(github_created_at) = self.github_user_created_at {
|
||||
account_created_at = account_created_at.min(github_created_at);
|
||||
}
|
||||
|
||||
account_created_at
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::access_token::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::AccessToken.def()
|
||||
|
||||
@@ -5,6 +5,7 @@ mod token;
|
||||
use crate::api::events::SnowflakeRow;
|
||||
use crate::api::CloudflareIpCountryHeader;
|
||||
use crate::build_kinesis_client;
|
||||
use crate::rpc::MIN_ACCOUNT_AGE_FOR_LLM_USE;
|
||||
use crate::{db::UserId, executor::Executor, Cents, Config, Error, Result};
|
||||
use anyhow::{anyhow, Context as _};
|
||||
use authorization::authorize_access_to_language_model;
|
||||
@@ -217,6 +218,13 @@ async fn perform_completion(
|
||||
params.model,
|
||||
);
|
||||
|
||||
let bypass_account_age_check = claims.has_llm_subscription || claims.bypass_account_age_check;
|
||||
if !bypass_account_age_check {
|
||||
if Utc::now().naive_utc() - claims.account_created_at < MIN_ACCOUNT_AGE_FOR_LLM_USE {
|
||||
Err(anyhow!("account too young"))?
|
||||
}
|
||||
}
|
||||
|
||||
authorize_access_to_language_model(
|
||||
&state.config,
|
||||
&claims,
|
||||
|
||||
@@ -289,7 +289,6 @@ impl LlmDatabase {
|
||||
.await
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn record_usage(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
@@ -554,7 +553,6 @@ impl LlmDatabase {
|
||||
.await
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
async fn update_usage_for_measure(
|
||||
&self,
|
||||
user_id: UserId,
|
||||
|
||||
@@ -3,7 +3,7 @@ use crate::llm::{DEFAULT_MAX_MONTHLY_SPEND, FREE_TIER_MONTHLY_SPENDING_LIMIT};
|
||||
use crate::Cents;
|
||||
use crate::{db::billing_preference, Config};
|
||||
use anyhow::{anyhow, Result};
|
||||
use chrono::Utc;
|
||||
use chrono::{NaiveDateTime, Utc};
|
||||
use jsonwebtoken::{DecodingKey, EncodingKey, Header, Validation};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::time::Duration;
|
||||
@@ -20,9 +20,10 @@ pub struct LlmTokenClaims {
|
||||
pub system_id: Option<String>,
|
||||
pub metrics_id: Uuid,
|
||||
pub github_user_login: String,
|
||||
pub account_created_at: NaiveDateTime,
|
||||
pub is_staff: bool,
|
||||
pub has_llm_closed_beta_feature_flag: bool,
|
||||
#[serde(default)]
|
||||
pub bypass_account_age_check: bool,
|
||||
pub has_predict_edits_feature_flag: bool,
|
||||
pub has_llm_subscription: bool,
|
||||
pub max_monthly_spend_in_cents: u32,
|
||||
@@ -33,13 +34,11 @@ pub struct LlmTokenClaims {
|
||||
const LLM_TOKEN_LIFETIME: Duration = Duration::from_secs(60 * 60);
|
||||
|
||||
impl LlmTokenClaims {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn create(
|
||||
user: &user::Model,
|
||||
is_staff: bool,
|
||||
billing_preferences: Option<billing_preference::Model>,
|
||||
has_llm_closed_beta_feature_flag: bool,
|
||||
has_predict_edits_feature_flag: bool,
|
||||
feature_flags: &Vec<String>,
|
||||
has_llm_subscription: bool,
|
||||
plan: rpc::proto::Plan,
|
||||
system_id: Option<String>,
|
||||
@@ -59,9 +58,17 @@ impl LlmTokenClaims {
|
||||
system_id,
|
||||
metrics_id: user.metrics_id,
|
||||
github_user_login: user.github_login.clone(),
|
||||
account_created_at: user.account_created_at(),
|
||||
is_staff,
|
||||
has_llm_closed_beta_feature_flag,
|
||||
has_predict_edits_feature_flag,
|
||||
has_llm_closed_beta_feature_flag: feature_flags
|
||||
.iter()
|
||||
.any(|flag| flag == "llm-closed-beta"),
|
||||
bypass_account_age_check: feature_flags
|
||||
.iter()
|
||||
.any(|flag| flag == "bypass-account-age-check"),
|
||||
has_predict_edits_feature_flag: feature_flags
|
||||
.iter()
|
||||
.any(|flag| flag == "predict-edits"),
|
||||
has_llm_subscription,
|
||||
max_monthly_spend_in_cents: billing_preferences
|
||||
.map_or(DEFAULT_MAX_MONTHLY_SPEND.0, |preferences| {
|
||||
|
||||
@@ -307,6 +307,7 @@ impl Server {
|
||||
.add_request_handler(forward_read_only_project_request::<proto::SynchronizeBuffers>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::InlayHints>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::ResolveInlayHint>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::GetCodeLens>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::OpenBufferByPath>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::GitGetBranches>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::OpenUnstagedDiff>)
|
||||
@@ -347,6 +348,7 @@ impl Server {
|
||||
.add_message_handler(create_buffer_for_peer)
|
||||
.add_request_handler(update_buffer)
|
||||
.add_message_handler(broadcast_project_message_from_host::<proto::RefreshInlayHints>)
|
||||
.add_message_handler(broadcast_project_message_from_host::<proto::RefreshCodeLens>)
|
||||
.add_message_handler(broadcast_project_message_from_host::<proto::UpdateBufferFile>)
|
||||
.add_message_handler(broadcast_project_message_from_host::<proto::BufferReloaded>)
|
||||
.add_message_handler(broadcast_project_message_from_host::<proto::BufferSaved>)
|
||||
@@ -396,6 +398,7 @@ impl Server {
|
||||
.add_request_handler(forward_mutating_project_request::<proto::Stage>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::Unstage>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::Commit>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::GitInit>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::GetRemotes>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::GitShow>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::GitReset>)
|
||||
@@ -697,7 +700,6 @@ impl Server {
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn handle_connection(
|
||||
self: &Arc<Self>,
|
||||
connection: Connection,
|
||||
@@ -1081,7 +1083,6 @@ pub fn routes(server: Arc<Server>) -> Router<(), Body> {
|
||||
.layer(Extension(server))
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub async fn handle_websocket_request(
|
||||
TypedHeader(ProtocolVersion(protocol_version)): TypedHeader<ProtocolVersion>,
|
||||
app_version_header: Option<TypedHeader<AppVersionHeader>>,
|
||||
@@ -4035,7 +4036,7 @@ async fn accept_terms_of_service(
|
||||
}
|
||||
|
||||
/// The minimum account age an account must have in order to use the LLM service.
|
||||
const MIN_ACCOUNT_AGE_FOR_LLM_USE: chrono::Duration = chrono::Duration::days(30);
|
||||
pub const MIN_ACCOUNT_AGE_FOR_LLM_USE: chrono::Duration = chrono::Duration::days(30);
|
||||
|
||||
async fn get_llm_api_token(
|
||||
_request: proto::GetLlmToken,
|
||||
@@ -4046,8 +4047,6 @@ async fn get_llm_api_token(
|
||||
|
||||
let flags = db.get_user_flags(session.user_id()).await?;
|
||||
let has_language_models_feature_flag = flags.iter().any(|flag| flag == "language-models");
|
||||
let has_llm_closed_beta_feature_flag = flags.iter().any(|flag| flag == "llm-closed-beta");
|
||||
let has_predict_edits_feature_flag = flags.iter().any(|flag| flag == "predict-edits");
|
||||
|
||||
if !session.is_staff() && !has_language_models_feature_flag {
|
||||
Err(anyhow!("permission denied"))?
|
||||
@@ -4064,27 +4063,13 @@ async fn get_llm_api_token(
|
||||
}
|
||||
|
||||
let has_llm_subscription = session.has_llm_subscription(&db).await?;
|
||||
|
||||
let bypass_account_age_check =
|
||||
has_llm_subscription || flags.iter().any(|flag| flag == "bypass-account-age-check");
|
||||
if !bypass_account_age_check {
|
||||
let mut account_created_at = user.created_at;
|
||||
if let Some(github_created_at) = user.github_user_created_at {
|
||||
account_created_at = account_created_at.min(github_created_at);
|
||||
}
|
||||
if Utc::now().naive_utc() - account_created_at < MIN_ACCOUNT_AGE_FOR_LLM_USE {
|
||||
Err(anyhow!("account too young"))?
|
||||
}
|
||||
}
|
||||
|
||||
let billing_preferences = db.get_billing_preferences(user.id).await?;
|
||||
|
||||
let token = LlmTokenClaims::create(
|
||||
&user,
|
||||
session.is_staff(),
|
||||
billing_preferences,
|
||||
has_llm_closed_beta_feature_flag,
|
||||
has_predict_edits_feature_flag,
|
||||
&flags,
|
||||
has_llm_subscription,
|
||||
session.current_plan(&db).await?,
|
||||
session.system_id.clone(),
|
||||
|
||||
@@ -3,7 +3,6 @@ use crate::{
|
||||
tests::{rust_lang, TestServer},
|
||||
};
|
||||
use call::ActiveCall;
|
||||
use collections::HashMap;
|
||||
use editor::{
|
||||
actions::{
|
||||
ConfirmCodeAction, ConfirmCompletion, ConfirmRename, ContextMenuFirst, Redo, Rename,
|
||||
@@ -1983,7 +1982,6 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
|
||||
blame_entry("3a3a3a", 2..3),
|
||||
blame_entry("4c4c4c", 3..4),
|
||||
],
|
||||
permalinks: HashMap::default(), // This field is deprecrated
|
||||
messages: [
|
||||
("1b1b1b", "message for idx-0"),
|
||||
("0d0d0d", "message for idx-1"),
|
||||
@@ -2040,7 +2038,7 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
|
||||
// client_b now requests git blame for the open buffer
|
||||
editor_b.update_in(cx_b, |editor_b, window, cx| {
|
||||
assert!(editor_b.blame().is_none());
|
||||
editor_b.toggle_git_blame(&editor::actions::ToggleGitBlame {}, window, cx);
|
||||
editor_b.toggle_git_blame(&git::Blame {}, window, cx);
|
||||
});
|
||||
|
||||
cx_a.executor().run_until_parked();
|
||||
|
||||