Compare commits
65 Commits
fix-python
...
project-en
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8001cb9037 | ||
|
|
18144e7d63 | ||
|
|
f2bb495bff | ||
|
|
1106ba81ff | ||
|
|
3a9fe61516 | ||
|
|
28cca2527c | ||
|
|
3fde499eee | ||
|
|
f176715f1e | ||
|
|
4a302a28b7 | ||
|
|
3154b530a0 | ||
|
|
959d5f52a4 | ||
|
|
2771623bac | ||
|
|
920487706d | ||
|
|
3d5035d8a6 | ||
|
|
d047801295 | ||
|
|
e7d1d731f4 | ||
|
|
149db5cad4 | ||
|
|
992d791852 | ||
|
|
18bf07822e | ||
|
|
3ba619db5c | ||
|
|
3559464840 | ||
|
|
32a03ee58a | ||
|
|
912ce5dc1a | ||
|
|
97e8f499fd | ||
|
|
736d5e6da1 | ||
|
|
68023fdc44 | ||
|
|
253ef7ef44 | ||
|
|
687cc5d661 | ||
|
|
290d249e38 | ||
|
|
4922a000aa | ||
|
|
96c9824997 | ||
|
|
a83151730b | ||
|
|
99335128eb | ||
|
|
e8457656c6 | ||
|
|
b037c913e5 | ||
|
|
aca7f54773 | ||
|
|
10a3ad078c | ||
|
|
c4bbdd03c5 | ||
|
|
27a47233c9 | ||
|
|
0b8c14f0d6 | ||
|
|
800f40524b | ||
|
|
1f4a2d50a0 | ||
|
|
c706acbfcb | ||
|
|
3f19ae1689 | ||
|
|
4744c4ff7e | ||
|
|
aa168c696b | ||
|
|
ce2918ef46 | ||
|
|
3cfaadd2af | ||
|
|
b69219e9e0 | ||
|
|
0ae3518b1c | ||
|
|
73964353a4 | ||
|
|
bf6e7cb6ee | ||
|
|
36c4f6082c | ||
|
|
3d032bcf2c | ||
|
|
3e28fa2cc4 | ||
|
|
c42442974d | ||
|
|
f23b972203 | ||
|
|
07d9cd7e88 | ||
|
|
a48238701d | ||
|
|
d17d747c62 | ||
|
|
bc08df2dfd | ||
|
|
0f4b734d91 | ||
|
|
6df29d3279 | ||
|
|
647cca8c8d | ||
|
|
1f81674927 |
36
.github/ISSUE_TEMPLATE/01_bug_agent.yml
vendored
@@ -1,36 +0,0 @@
|
||||
name: Bug Report (Agent Panel)
|
||||
description: Zed Agent Panel Bugs
|
||||
type: "Bug"
|
||||
labels: ["agent", "ai"]
|
||||
title: "Agent Panel: <a short description of the Agent Panel bug>"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
<!-- Describe with sufficient detail to reproduce from a clean Zed install. -->
|
||||
<!-- Please include the LLM provider and model name you are using -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
Actual Behavior:
|
||||
Expected Behavior:
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
@@ -1,36 +0,0 @@
|
||||
name: Bug Report (Edit Predictions)
|
||||
description: Zed Edit Predictions bugs
|
||||
type: "Bug"
|
||||
labels: ["ai", "inline completion", "zeta"]
|
||||
title: "Edit Predictions: <a short description of the Edit Prediction bug>"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
<!-- Describe with sufficient detail to reproduce from a clean Zed install. -->
|
||||
<!-- Please include the LLM provider and model name you are using -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
Actual Behavior:
|
||||
Expected Behavior:
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
35
.github/ISSUE_TEMPLATE/03_bug_git.yml
vendored
@@ -1,35 +0,0 @@
|
||||
name: Bug Report (Git)
|
||||
description: Zed Git-Related Bugs
|
||||
type: "Bug"
|
||||
labels: ["git"]
|
||||
title: "Git: <a short description of the Git bug>"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
<!-- Describe with sufficient detail to reproduce from a clean Zed install. -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
Actual Behavior:
|
||||
Expected Behavior:
|
||||
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
56
.github/ISSUE_TEMPLATE/10_bug_report.yml
vendored
@@ -1,56 +0,0 @@
|
||||
name: Bug Report (Other)
|
||||
description: |
|
||||
Something else is broken in Zed (exclude crashing).
|
||||
type: "Bug"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Provide a one sentence summary and detailed reproduction steps
|
||||
value: |
|
||||
<!-- Begin your issue with a one sentence summary -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
<!-- Describe with sufficient detail to reproduce from a clean Zed install.
|
||||
- Any code must be sufficient to reproduce (include context!)
|
||||
- Code must as text, not just as a screenshot.
|
||||
- Issues with insufficient detail may be summarily closed.
|
||||
-->
|
||||
|
||||
Steps to reproduce:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
4.
|
||||
|
||||
Expected Behavior:
|
||||
Actual Behavior:
|
||||
|
||||
<!-- Before Submitting, did you:
|
||||
1. Include settings.json, keymap.json, .editorconfig if relevant?
|
||||
2. Check your Zed.log for relevant errors? (please include!)
|
||||
3. Click Preview to ensure everything looks right?
|
||||
4. Hide videos, large images and logs in ``` inside collapsible blocks:
|
||||
|
||||
<details><summary>click to expand</summary>
|
||||
|
||||
```json
|
||||
|
||||
```
|
||||
</details>
|
||||
-->
|
||||
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: |
|
||||
Open Zed, from the command palette select "zed: Copy System Specs Into Clipboard"
|
||||
placeholder: |
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
57
.github/ISSUE_TEMPLATE/1_bug_report.yml
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
name: Bug Report
|
||||
description: |
|
||||
Something is broken in Zed (exclude crashing).
|
||||
type: "Bug"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
<!-- Be verbose: Include all steps necessary to reproduce from a clean Zed installation. -->
|
||||
<!-- Code snippets are better than images, a repository link that reproduces the issue is ideal. -->
|
||||
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
4.
|
||||
|
||||
Actual Behavior:
|
||||
|
||||
Expected Behavior:
|
||||
|
||||
<!--
|
||||
Is there anything additional necessary to reproduce this issue?
|
||||
- settings.json, keymap.json, .editorconfig etc?
|
||||
- Does it happen intermittently or only with specific projects / file types?
|
||||
- Have you found a workaround?
|
||||
|
||||
Did you check your Zed.log to see if there is any relevant details there?
|
||||
- When including large items (videos, screenshots, logs, configs) please wrap with:
|
||||
|
||||
<details><summary>See inside for XXXXYYY</summary>
|
||||
|
||||
```shell
|
||||
code
|
||||
```
|
||||
|
||||
</details>
|
||||
-->
|
||||
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: Copy System Specs Into Clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: Copy System Specs Into Clipboard"
|
||||
validations:
|
||||
required: true
|
||||
@@ -5,12 +5,10 @@ body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Summarize the issue with detailed reproduction steps
|
||||
description: Describe the bug with a one line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Begin your issue with a one sentence summary -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
|
||||
### Description
|
||||
<!-- Include all steps necessary to reproduce from a clean Zed installation. Be verbose -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
@@ -18,6 +16,7 @@ body:
|
||||
3.
|
||||
|
||||
Actual Behavior:
|
||||
|
||||
Expected Behavior:
|
||||
|
||||
validations:
|
||||
@@ -41,11 +40,10 @@ body:
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Paste your log inside the code block. -->
|
||||
```log
|
||||
|
||||
<!-- Click below this line and paste or drag-and-drop your log-->
|
||||
```
|
||||
|
||||
</details>
|
||||
```
|
||||
<!-- Click above this line and paste or drag-and-drop your log--></details>
|
||||
validations:
|
||||
required: false
|
||||
19
.github/ISSUE_TEMPLATE/99_other.yml
vendored
@@ -1,19 +0,0 @@
|
||||
name: Other [Staff Only]
|
||||
description: Zed Staff Only
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
|
||||
IF YOU DO NOT WORK FOR ZED INDUSTRIES DO NOT CREATE ISSUES WITH THIS TEMPLATE.
|
||||
THEY WILL BE AUTO-CLOSED AND MAY RESULT IN YOU BEING BANNED FROM THE ZED ISSUE TRACKER.
|
||||
|
||||
FEATURE REQUESTS / SUPPORT REQUESTS SHOULD BE OPENED AS DISCUSSIONS:
|
||||
https://github.com/zed-industries/zed/discussions/new/choose
|
||||
validations:
|
||||
required: true
|
||||
5
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -4,6 +4,9 @@ contact_links:
|
||||
- name: Feature Request
|
||||
url: https://github.com/zed-industries/zed/discussions/new/choose
|
||||
about: To request a feature, open a new Discussion in one of the appropriate Discussion categories
|
||||
- name: "Zed Discord"
|
||||
- name: Zed Discussion Forum
|
||||
url: https://github.com/zed-industries/zed/discussions
|
||||
about: A community discussion forum
|
||||
- name: "Zed Discord: #Support Channel"
|
||||
url: https://zed.dev/community-links
|
||||
about: Real-time discussion and user support
|
||||
|
||||
2
.github/actions/run_tests_windows/action.yml
vendored
@@ -23,4 +23,4 @@ runs:
|
||||
- name: Run tests
|
||||
shell: pwsh
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: cargo nextest run --workspace --no-fail-fast --config='profile.dev.debug="limited"'
|
||||
run: cargo nextest run --workspace --no-fail-fast
|
||||
|
||||
93
.github/workflows/ci.yml
vendored
@@ -114,9 +114,7 @@ jobs:
|
||||
timeout-minutes: 60
|
||||
name: Check workspace-hack crate
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- buildjet-8vcpu-ubuntu-2204
|
||||
steps:
|
||||
@@ -133,13 +131,13 @@ jobs:
|
||||
- name: Check workspace-hack Cargo.toml is up-to-date
|
||||
run: |
|
||||
cargo hakari generate --diff || {
|
||||
echo "To fix, run script/update-workspace-hack or script/update-workspace-hack.ps1";
|
||||
echo "To fix, run script/update-workspace-hack";
|
||||
false
|
||||
}
|
||||
- name: Check all crates depend on workspace-hack
|
||||
run: |
|
||||
cargo hakari manage-deps --dry-run || {
|
||||
echo "To fix, run script/update-workspace-hack or script/update-workspace-hack.ps1"
|
||||
echo "To fix, run script/update-workspace-hack"
|
||||
false
|
||||
}
|
||||
|
||||
@@ -225,7 +223,7 @@ jobs:
|
||||
|
||||
- name: Check for new vulnerable dependencies
|
||||
if: github.event_name == 'pull_request'
|
||||
uses: actions/dependency-review-action@67d4f4bd7a9b17a0db54d2a7519187c65e339de8 # v4
|
||||
uses: actions/dependency-review-action@3b139cfc5fae8b618d3eae3675e383bb1769c019 # v4
|
||||
with:
|
||||
license-check: false
|
||||
|
||||
@@ -465,7 +463,6 @@ jobs:
|
||||
- job_spec
|
||||
- style
|
||||
- migration_checks
|
||||
# run_tests: If adding required tests, add them here and to script below.
|
||||
- workspace_hack
|
||||
- linux_tests
|
||||
- build_remote_server
|
||||
@@ -483,14 +480,11 @@ jobs:
|
||||
|
||||
# Only check test jobs if they were supposed to run
|
||||
if [[ "${{ needs.job_spec.outputs.run_tests }}" == "true" ]]; then
|
||||
[[ "${{ needs.workspace_hack.result }}" != 'success' ]] && { RET_CODE=1; echo "Workspace Hack failed"; }
|
||||
[[ "${{ needs.macos_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "macOS tests failed"; }
|
||||
[[ "${{ needs.linux_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Linux tests failed"; }
|
||||
[[ "${{ needs.windows_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows tests failed"; }
|
||||
[[ "${{ needs.windows_clippy.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows clippy failed"; }
|
||||
[[ "${{ needs.build_remote_server.result }}" != 'success' ]] && { RET_CODE=1; echo "Remote server build failed"; }
|
||||
# This check is intentionally disabled. See: https://github.com/zed-industries/zed/pull/28431
|
||||
# [[ "${{ needs.migration_checks.result }}" != 'success' ]] && { RET_CODE=1; echo "Migration Checks failed"; }
|
||||
fi
|
||||
if [[ "$RET_CODE" -eq 0 ]]; then
|
||||
echo "All tests passed successfully!"
|
||||
@@ -594,7 +588,7 @@ jobs:
|
||||
timeout-minutes: 60
|
||||
name: Linux x86_x64 release bundle
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2004 # ubuntu 20.04 for minimal glibc
|
||||
- buildjet-16vcpu-ubuntu-2004
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
@@ -622,23 +616,26 @@ jobs:
|
||||
- name: Create Linux .tar.gz bundle
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Artifact to Workflow - zed (run-bundling)
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload Artifact to Workflow - zed-remote-server (run-bundling)
|
||||
- name: Upload Linux remote server to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.gz
|
||||
path: target/zed-remote-server-linux-x86_64.gz
|
||||
|
||||
- name: Upload Artifacts to release
|
||||
- name: Upload app bundle to release
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
@@ -677,26 +674,29 @@ jobs:
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel
|
||||
|
||||
- name: Create and upload Linux .tar.gz bundles
|
||||
- name: Create and upload Linux .tar.gz bundle
|
||||
run: script/bundle-linux
|
||||
|
||||
- name: Upload Artifact to Workflow - zed (run-bundling)
|
||||
- name: Upload Linux bundle to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
|
||||
path: target/release/zed-*.tar.gz
|
||||
|
||||
- name: Upload Artifact to Workflow - zed-remote-server (run-bundling)
|
||||
- name: Upload Linux remote server to workflow run if main branch or specific label
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
if: |
|
||||
github.ref == 'refs/heads/main'
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.gz
|
||||
path: target/zed-remote-server-linux-aarch64.gz
|
||||
|
||||
- name: Upload Artifacts to release
|
||||
- name: Upload app bundle to release
|
||||
uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 # v1
|
||||
if: ${{ !(contains(github.event.pull_request.labels.*.name, 'run-bundling')) }}
|
||||
with:
|
||||
draft: true
|
||||
prerelease: ${{ env.RELEASE_CHANNEL == 'preview' }}
|
||||
@@ -706,51 +706,6 @@ jobs:
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
nix-build:
|
||||
timeout-minutes: 60
|
||||
name: Nix Build
|
||||
continue-on-error: true
|
||||
if: github.repository_owner == 'zed-industries' && contains(github.event.pull_request.labels.*.name, 'run-nix')
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
system:
|
||||
- os: x86 Linux
|
||||
runner: buildjet-16vcpu-ubuntu-2204
|
||||
install_nix: true
|
||||
- os: arm Mac
|
||||
runner: [macOS, ARM64, test]
|
||||
install_nix: false
|
||||
runs-on: ${{ matrix.system.runner }}
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON: ${{ secrets.ZED_CLOUD_PROVIDER_ADDITIONAL_MODELS_JSON }}
|
||||
GIT_LFS_SKIP_SMUDGE: 1 # breaks the livekit rust sdk examples which we don't actually depend on
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
- name: Set path
|
||||
if: ${{ ! matrix.system.install_nix }}
|
||||
run: |
|
||||
echo "/nix/var/nix/profiles/default/bin" >> $GITHUB_PATH
|
||||
echo "/Users/administrator/.nix-profile/bin" >> $GITHUB_PATH
|
||||
|
||||
- uses: cachix/install-nix-action@d1ca217b388ee87b2507a9a93bf01368bde7cec2 # v31
|
||||
if: ${{ matrix.system.install_nix }}
|
||||
with:
|
||||
github_access_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- uses: cachix/cachix-action@0fc020193b5a1fa3ac4575aa3a7d3aa6a35435ad # v16
|
||||
with:
|
||||
name: zed-industries
|
||||
authToken: "${{ secrets.CACHIX_AUTH_TOKEN }}"
|
||||
skipPush: true
|
||||
- run: nix build .#debug
|
||||
- name: Limit /nix/store to 50GB
|
||||
run: "[ $(du -sm /nix/store | cut -f1) -gt 50000 ] && nix-collect-garbage -d"
|
||||
|
||||
auto-release-preview:
|
||||
name: Auto release preview
|
||||
if: |
|
||||
|
||||
8
.github/workflows/deploy_collab.yml
vendored
@@ -117,10 +117,12 @@ jobs:
|
||||
export ZED_KUBE_NAMESPACE=production
|
||||
export ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT=10
|
||||
export ZED_API_LOAD_BALANCER_SIZE_UNIT=2
|
||||
export ZED_LLM_LOAD_BALANCER_SIZE_UNIT=2
|
||||
elif [[ $GITHUB_REF_NAME = "collab-staging" ]]; then
|
||||
export ZED_KUBE_NAMESPACE=staging
|
||||
export ZED_COLLAB_LOAD_BALANCER_SIZE_UNIT=1
|
||||
export ZED_API_LOAD_BALANCER_SIZE_UNIT=1
|
||||
export ZED_LLM_LOAD_BALANCER_SIZE_UNIT=1
|
||||
else
|
||||
echo "cowardly refusing to deploy from an unknown branch"
|
||||
exit 1
|
||||
@@ -145,3 +147,9 @@ jobs:
|
||||
envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f -
|
||||
kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch
|
||||
echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}"
|
||||
|
||||
export ZED_SERVICE_NAME=llm
|
||||
export ZED_LOAD_BALANCER_SIZE_UNIT=$ZED_LLM_LOAD_BALANCER_SIZE_UNIT
|
||||
envsubst < crates/collab/k8s/collab.template.yml | kubectl apply -f -
|
||||
kubectl -n "$ZED_KUBE_NAMESPACE" rollout status deployment/$ZED_SERVICE_NAME --watch
|
||||
echo "deployed ${ZED_SERVICE_NAME} to ${ZED_KUBE_NAMESPACE}"
|
||||
|
||||
5
.github/workflows/release_nightly.yml
vendored
@@ -206,7 +206,7 @@ jobs:
|
||||
echo "/nix/var/nix/profiles/default/bin" >> $GITHUB_PATH
|
||||
echo "/Users/administrator/.nix-profile/bin" >> $GITHUB_PATH
|
||||
|
||||
- uses: cachix/install-nix-action@d1ca217b388ee87b2507a9a93bf01368bde7cec2 # v31
|
||||
- uses: cachix/install-nix-action@02a151ada4993995686f9ed4f1be7cfbb229e56f # v31
|
||||
if: ${{ matrix.system.install_nix }}
|
||||
with:
|
||||
github_access_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -216,8 +216,7 @@ jobs:
|
||||
name: zed-industries
|
||||
authToken: "${{ secrets.CACHIX_AUTH_TOKEN }}"
|
||||
- run: nix build
|
||||
- name: Limit /nix/store to 50GB
|
||||
run: '[ $(du -sm /nix/store | cut -f1) -gt 50000 ] && nix-collect-garbage -d'
|
||||
- run: nix-collect-garbage -d
|
||||
|
||||
update-nightly-tag:
|
||||
name: Update nightly tag
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
[
|
||||
{
|
||||
"label": "Debug Zed (CodeLLDB)",
|
||||
"adapter": "CodeLLDB",
|
||||
"label": "Debug Zed with LLDB",
|
||||
"adapter": "lldb",
|
||||
"program": "$ZED_WORKTREE_ROOT/target/debug/zed",
|
||||
"request": "launch",
|
||||
"cwd": "$ZED_WORKTREE_ROOT"
|
||||
},
|
||||
{
|
||||
"label": "Debug Zed (GDB)",
|
||||
"adapter": "GDB",
|
||||
"label": "Debug Zed with GDB",
|
||||
"adapter": "gdb",
|
||||
"program": "$ZED_WORKTREE_ROOT/target/debug/zed",
|
||||
"request": "launch",
|
||||
"cwd": "$ZED_WORKTREE_ROOT",
|
||||
|
||||
553
Cargo.lock
generated
62
Cargo.toml
@@ -8,6 +8,7 @@ members = [
|
||||
"crates/assets",
|
||||
"crates/assistant",
|
||||
"crates/assistant_context_editor",
|
||||
"crates/assistant_eval",
|
||||
"crates/assistant_settings",
|
||||
"crates/assistant_slash_command",
|
||||
"crates/assistant_slash_commands",
|
||||
@@ -15,7 +16,6 @@ members = [
|
||||
"crates/assistant_tools",
|
||||
"crates/audio",
|
||||
"crates/auto_update",
|
||||
"crates/auto_update_helper",
|
||||
"crates/auto_update_ui",
|
||||
"crates/aws_http_client",
|
||||
"crates/bedrock",
|
||||
@@ -46,7 +46,6 @@ members = [
|
||||
"crates/diagnostics",
|
||||
"crates/docs_preprocessor",
|
||||
"crates/editor",
|
||||
"crates/eval",
|
||||
"crates/evals",
|
||||
"crates/extension",
|
||||
"crates/extension_api",
|
||||
@@ -216,6 +215,7 @@ askpass = { path = "crates/askpass" }
|
||||
assets = { path = "crates/assets" }
|
||||
assistant = { path = "crates/assistant" }
|
||||
assistant_context_editor = { path = "crates/assistant_context_editor" }
|
||||
assistant_eval = { path = "crates/assistant_eval" }
|
||||
assistant_settings = { path = "crates/assistant_settings" }
|
||||
assistant_slash_command = { path = "crates/assistant_slash_command" }
|
||||
assistant_slash_commands = { path = "crates/assistant_slash_commands" }
|
||||
@@ -223,7 +223,6 @@ assistant_tool = { path = "crates/assistant_tool" }
|
||||
assistant_tools = { path = "crates/assistant_tools" }
|
||||
audio = { path = "crates/audio" }
|
||||
auto_update = { path = "crates/auto_update" }
|
||||
auto_update_helper = { path = "crates/auto_update_helper" }
|
||||
auto_update_ui = { path = "crates/auto_update_ui" }
|
||||
aws_http_client = { path = "crates/aws_http_client" }
|
||||
bedrock = { path = "crates/bedrock" }
|
||||
@@ -397,16 +396,12 @@ async-pipe = { git = "https://github.com/zed-industries/async-pipe-rs", rev = "8
|
||||
async-recursion = "1.0.0"
|
||||
async-tar = "0.5.0"
|
||||
async-trait = "0.1"
|
||||
async-tungstenite = "0.29.1"
|
||||
async-tungstenite = "0.28"
|
||||
async-watch = "0.3.1"
|
||||
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
|
||||
aws-config = { version = "1.6.1", features = ["behavior-version-latest"] }
|
||||
aws-credential-types = { version = "1.2.2", features = [
|
||||
"hardcoded-credentials",
|
||||
] }
|
||||
aws-sdk-bedrockruntime = { version = "1.80.0", features = [
|
||||
"behavior-version-latest",
|
||||
] }
|
||||
aws-credential-types = { version = "1.2.2", features = ["hardcoded-credentials"] }
|
||||
aws-sdk-bedrockruntime = { version = "1.80.0", features = ["behavior-version-latest"] }
|
||||
aws-smithy-runtime-api = { version = "1.7.4", features = ["http-1x", "client"] }
|
||||
aws-smithy-types = { version = "1.3.0", features = ["http-body-1-x"] }
|
||||
base64 = "0.22"
|
||||
@@ -430,7 +425,7 @@ core-foundation = "0.10.0"
|
||||
core-foundation-sys = "0.8.6"
|
||||
ctor = "0.4.0"
|
||||
dashmap = "6.0"
|
||||
dap-types = { git = "https://github.com/zed-industries/dap-types", rev = "be69a016ba710191b9fdded28c8b042af4b617f7" }
|
||||
dap-types = { git = "https://github.com/zed-industries/dap-types", rev = "bfd4af0" }
|
||||
derive_more = "0.99.17"
|
||||
dirs = "4.0"
|
||||
ec4rs = "1.1"
|
||||
@@ -445,7 +440,6 @@ futures-lite = "1.13"
|
||||
git2 = { version = "0.20.1", default-features = false }
|
||||
globset = "0.4"
|
||||
handlebars = "4.3"
|
||||
heck = "0.5"
|
||||
heed = { version = "0.21.0", features = ["read-txn-no-tls"] }
|
||||
hex = "0.4.3"
|
||||
html5ever = "0.27.0"
|
||||
@@ -459,8 +453,8 @@ indoc = "2"
|
||||
inventory = "0.3.19"
|
||||
itertools = "0.14.0"
|
||||
jsonwebtoken = "9.3"
|
||||
jupyter-protocol = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
|
||||
jupyter-websocket-client = { git = "https://github.com/ConradIrwin/runtimed" ,rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
|
||||
jupyter-protocol = { version = "0.6.0" }
|
||||
jupyter-websocket-client = { version = "0.9.0" }
|
||||
libc = "0.2"
|
||||
libsqlite3-sys = { version = "0.30.1", features = ["bundled"] }
|
||||
linkify = "0.10.0"
|
||||
@@ -469,22 +463,21 @@ log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
|
||||
markup5ever_rcdom = "0.3.0"
|
||||
mlua = { version = "0.10", features = ["lua54", "vendored", "async", "send"] }
|
||||
nanoid = "0.4"
|
||||
nbformat = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
|
||||
nbformat = { version = "0.10.0" }
|
||||
nix = "0.29"
|
||||
objc = "0.2"
|
||||
open = "5.0.0"
|
||||
num-format = "0.4.4"
|
||||
ordered-float = "2.1.1"
|
||||
palette = { version = "0.7.5", default-features = false, features = ["std"] }
|
||||
parking_lot = "0.12.1"
|
||||
pathdiff = "0.2"
|
||||
pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" }
|
||||
pet-fs = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" }
|
||||
pet-pixi = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" }
|
||||
pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" }
|
||||
pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" }
|
||||
pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" }
|
||||
pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" }
|
||||
pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1abe5cec5ebfbe97ca71746a4cfc7fe89bddf8e0" }
|
||||
pet-fs = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1abe5cec5ebfbe97ca71746a4cfc7fe89bddf8e0" }
|
||||
pet-pixi = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1abe5cec5ebfbe97ca71746a4cfc7fe89bddf8e0" }
|
||||
pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1abe5cec5ebfbe97ca71746a4cfc7fe89bddf8e0" }
|
||||
pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1abe5cec5ebfbe97ca71746a4cfc7fe89bddf8e0" }
|
||||
pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1abe5cec5ebfbe97ca71746a4cfc7fe89bddf8e0" }
|
||||
pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1abe5cec5ebfbe97ca71746a4cfc7fe89bddf8e0" }
|
||||
postage = { version = "0.5", features = ["futures-traits"] }
|
||||
pretty_assertions = { version = "1.3.0", features = ["unstable"] }
|
||||
proc-macro2 = "1.0.93"
|
||||
@@ -507,15 +500,14 @@ reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f
|
||||
"stream",
|
||||
] }
|
||||
rsa = "0.9.6"
|
||||
runtimelib = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734", default-features = false, features = [
|
||||
runtimelib = { version = "0.25.0", default-features = false, features = [
|
||||
"async-dispatcher-runtime",
|
||||
] }
|
||||
rustc-demangle = "0.1.23"
|
||||
rust-embed = { version = "8.4", features = ["include-exclude"] }
|
||||
rustc-hash = "2.1.0"
|
||||
rustls = { version = "0.23.26" }
|
||||
rustls = { version = "0.23.22" }
|
||||
rustls-platform-verifier = "0.5.0"
|
||||
scap = { git = "https://github.com/zed-industries/scap", rev = "08f0a01417505cc0990b9931a37e5120db92e0d0", default-features = false }
|
||||
schemars = { version = "0.8", features = ["impl_json_schema", "indexmap2"] }
|
||||
semver = "1.0"
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
@@ -555,7 +547,7 @@ time = { version = "0.3", features = [
|
||||
tiny_http = "0.8"
|
||||
toml = "0.8"
|
||||
tokio = { version = "1" }
|
||||
tokio-tungstenite = { version = "0.26", features = ["__rustls-tls"] }
|
||||
tokio-tungstenite = { version = "0.26", features = ["__rustls-tls"]}
|
||||
tower-http = "0.4.4"
|
||||
tree-sitter = { version = "0.25.3", features = ["wasm"] }
|
||||
tree-sitter-bash = "0.23"
|
||||
@@ -622,10 +614,12 @@ features = [
|
||||
[workspace.dependencies.windows]
|
||||
version = "0.61"
|
||||
features = [
|
||||
"Foundation_Collections",
|
||||
"Foundation_Numerics",
|
||||
"Storage_Search",
|
||||
"Storage_Streams",
|
||||
"System_Threading",
|
||||
"UI_StartScreen",
|
||||
"UI_ViewManagement",
|
||||
"Wdk_System_SystemServices",
|
||||
"Win32_Globalization",
|
||||
@@ -652,7 +646,6 @@ features = [
|
||||
"Win32_System_SystemInformation",
|
||||
"Win32_System_SystemServices",
|
||||
"Win32_System_Threading",
|
||||
"Win32_System_Variant",
|
||||
"Win32_System_WinRT",
|
||||
"Win32_UI_Controls",
|
||||
"Win32_UI_HiDpi",
|
||||
@@ -660,13 +653,13 @@ features = [
|
||||
"Win32_UI_Input_KeyboardAndMouse",
|
||||
"Win32_UI_Shell",
|
||||
"Win32_UI_Shell_Common",
|
||||
"Win32_UI_Shell_PropertiesSystem",
|
||||
"Win32_UI_WindowsAndMessaging",
|
||||
]
|
||||
|
||||
# TODO livekit https://github.com/RustAudio/cpal/pull/891
|
||||
[patch.crates-io]
|
||||
cpal = { git = "https://github.com/zed-industries/cpal", rev = "fd8bc2fd39f1f5fdee5a0690656caff9a26d9d50" }
|
||||
real-async-tls = { git = "https://github.com/zed-industries/async-tls", rev = "1e759a4b5e370f87dc15e40756ac4f8815b61d9d", package = "async-tls" }
|
||||
notify = { git = "https://github.com/zed-industries/notify.git", rev = "bbb9ea5ae52b253e095737847e367c30653a2e96" }
|
||||
notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "bbb9ea5ae52b253e095737847e367c30653a2e96" }
|
||||
|
||||
@@ -675,6 +668,7 @@ workspace-hack = { path = "tooling/workspace-hack" }
|
||||
|
||||
[profile.dev]
|
||||
split-debuginfo = "unpacked"
|
||||
debug = "limited"
|
||||
codegen-units = 16
|
||||
|
||||
[profile.dev.package]
|
||||
@@ -784,12 +778,4 @@ let_underscore_future = "allow"
|
||||
too_many_arguments = "allow"
|
||||
|
||||
[workspace.metadata.cargo-machete]
|
||||
ignored = [
|
||||
"bindgen",
|
||||
"cbindgen",
|
||||
"prost_build",
|
||||
"serde",
|
||||
"component",
|
||||
"linkme",
|
||||
"workspace-hack",
|
||||
]
|
||||
ignored = ["bindgen", "cbindgen", "prost_build", "serde", "component", "linkme", "workspace-hack"]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
|
||||
FROM rust:1.86-bookworm as builder
|
||||
FROM rust:1.81-bookworm as builder
|
||||
WORKDIR app
|
||||
COPY . .
|
||||
|
||||
|
||||
12
assets/icons/ai_anthropic_hosted.svg
Normal file
@@ -0,0 +1,12 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<rect width="16" height="16" rx="2" fill="black" fill-opacity="0.2"/>
|
||||
<g clip-path="url(#clip0_1916_18)">
|
||||
<path d="M10.652 3.79999H8.816L12.164 12.2H14L10.652 3.79999Z" fill="#1F1F1E"/>
|
||||
<path d="M5.348 3.79999L2 12.2H3.872L4.55672 10.436H8.05927L8.744 12.2H10.616L7.268 3.79999H5.348ZM5.16224 8.87599L6.308 5.92399L7.45374 8.87599H5.16224Z" fill="#1F1F1E"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_1916_18">
|
||||
<rect width="12" height="8.4" fill="white" transform="translate(2 3.79999)"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 601 B |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-arrow-down-right-icon lucide-arrow-down-right"><path d="m7 7 10 10"/><path d="M17 7v10H7"/></svg>
|
||||
|
Before Width: | Height: | Size: 300 B |
@@ -1 +1,3 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-arrow-up-right-icon lucide-arrow-up-right"><path d="M7 7h10v10"/><path d="M7 17 17 7"/></svg>
|
||||
<svg width="8" height="8" viewBox="0 0 8 8" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M6 2H6.5C6.5 1.86739 6.44732 1.74021 6.35355 1.64645C6.25979 1.55268 6.13261 1.5 6 1.5V2ZM2 1.5C1.72386 1.5 1.5 1.72386 1.5 2C1.5 2.27614 1.72386 2.5 2 2.5L2 1.5ZM5.5 6C5.5 6.27614 5.72386 6.5 6 6.5C6.27614 6.5 6.5 6.27614 6.5 6H5.5ZM1.64645 5.64645C1.45118 5.84171 1.45118 6.15829 1.64645 6.35355C1.84171 6.54882 2.15829 6.54882 2.35355 6.35355L1.64645 5.64645ZM6 1.5H2L2 2.5H6V1.5ZM5.5 2V6H6.5V2H5.5ZM5.64645 1.64645L1.64645 5.64645L2.35355 6.35355L6.35355 2.35355L5.64645 1.64645Z" fill="white"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 296 B After Width: | Height: | Size: 608 B |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-binary-icon lucide-binary"><rect x="14" y="14" width="4" height="6" rx="2"/><rect x="6" y="4" width="4" height="6" rx="2"/><path d="M6 20h4"/><path d="M14 10h4"/><path d="M6 14h2v6"/><path d="M14 4h2v6"/></svg>
|
||||
|
Before Width: | Height: | Size: 413 B |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-bug-off-icon lucide-bug-off"><path d="M15 7.13V6a3 3 0 0 0-5.14-2.1L8 2"/><path d="M14.12 3.88 16 2"/><path d="M22 13h-4v-2a4 4 0 0 0-4-4h-1.3"/><path d="M20.97 5c0 2.1-1.6 3.8-3.5 4"/><path d="m2 2 20 20"/><path d="M7.7 7.7A4 4 0 0 0 6 11v3a6 6 0 0 0 11.13 3.13"/><path d="M12 20v-8"/><path d="M6 13H2"/><path d="M3 21c0-2.1 1.7-3.9 3.8-4"/></svg>
|
||||
|
Before Width: | Height: | Size: 551 B |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-circle-off-icon lucide-circle-off"><path d="m2 2 20 20"/><path d="M8.35 2.69A10 10 0 0 1 21.3 15.65"/><path d="M19.08 19.08A10 10 0 1 1 4.92 4.92"/></svg>
|
||||
|
Before Width: | Height: | Size: 357 B |
@@ -1 +0,0 @@
|
||||
<svg width="16" height="16" fill="none" xml:space="preserve" xmlns="http://www.w3.org/2000/svg"><g style="fill:#000;fill-opacity:1" fill="#180c25"><path d="m-116.1-101.4-28.9-28.9a6.7 6.7 0 0 1-1.8-4.7v-41.2c0-2.4-2.4-4.8-4.8-4.8h-9.6a5.2 5.2 0 0 0-4.8 4.8v48c0 2.5 1 5 2.7 6.8l33.6 33.6a9.6 9.6 0 0 0 6.8 2.8h4.8c2.7 0 4.8-2.2 4.8-4.8v-4.8c0-2.5-1-5-2.8-6.8zM-79.6-176.2c0-2.4-2.4-4.8-4.8-4.8h-9.7a5.2 5.2 0 0 0-4.7 4.8v41.2c0 1.8-.8 3.5-2 4.7l-9.6 9.7a9.5 9.5 0 0 0-2.8 6.8v4.8c0 2.6 2.1 4.7 4.8 4.7h4.8c2.4 0 4.9-.9 6.7-2.8l14.4-14.3a9.6 9.6 0 0 0 2.8-6.8v-48z" style="fill:#000;fill-opacity:1;stroke-width:.255894" transform="translate(21.6 22.7) scale(.11067)"/></g></svg>
|
||||
|
Before Width: | Height: | Size: 677 B |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-flame-icon lucide-flame"><path d="M8.5 14.5A2.5 2.5 0 0 0 11 12c0-1.38-.5-2-1-3-1.072-2.143-.224-4.054 2-6 .5 2.5 2 4.9 4 6.5 2 1.6 3 3.5 3 5.5a7 7 0 1 1-14 0c0-1.153.433-2.294 1-3a2.5 2.5 0 0 0 2.5 2.5z"/></svg>
|
||||
|
Before Width: | Height: | Size: 415 B |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-square-function-icon lucide-square-function"><rect width="18" height="18" x="3" y="3" rx="2" ry="2"/><path d="M9 17c2 0 2.8-1 2.8-2.8V10c0-2 1-3.3 3.2-3"/><path d="M9 11.2h5.7"/></svg>
|
||||
|
Before Width: | Height: | Size: 387 B |
@@ -1,5 +0,0 @@
|
||||
<svg width="24" height="24" viewBox="0 0 24 24" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M20 14H4C3.44772 14 3 14.4477 3 15V20C3 20.5523 3.44772 21 4 21H20C20.5523 21 21 20.5523 21 20V15C21 14.4477 20.5523 14 20 14Z" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11 3H4C3.44772 3 3 3.44772 3 4V9C3 9.55228 3.44772 10 4 10H11C11.5523 10 12 9.55228 12 9V4C12 3.44772 11.5523 3 11 3Z" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M20 3H17C16.4477 3 16 3.44772 16 4V9C16 9.55228 16.4477 10 17 10H20C20.5523 10 21 9.55228 21 9V4C21 3.44772 20.5523 3 20 3Z" stroke="black" stroke-width="2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 746 B |
@@ -1,3 +0,0 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M10.1331 11.3776C10.2754 10.6665 10.1331 9.78593 11.1998 8.53327C11.82 7.80489 12.2664 6.96894 12.2664 6.04456C12.2664 4.91305 11.8169 3.82788 11.0168 3.02778C10.2167 2.22769 9.13152 1.7782 8.00001 1.7782C6.8685 1.7782 5.78334 2.22769 4.98324 3.02778C4.18314 3.82788 3.73364 4.91305 3.73364 6.04456C3.73364 6.75562 3.87586 7.6089 4.80024 8.53327C5.86683 9.80679 5.72462 10.6665 5.86683 11.3776M10.1331 11.3776V12.8821C10.1331 13.622 9.53341 14.2218 8.79353 14.2218H7.2065C6.46662 14.2218 5.86683 13.622 5.86683 12.8821V11.3776M10.1331 11.3776H5.86683" stroke="black" stroke-width="1.33333" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 751 B |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-power-icon lucide-power"><path d="M12 2v10"/><path d="M18.4 6.6a9 9 0 1 1-12.77.04"/></svg>
|
||||
|
Before Width: | Height: | Size: 294 B |
@@ -1,4 +0,0 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M3.09666 3.02263C3.0567 3.00312 3.01178 2.9961 2.96778 3.0025C2.92377 3.00889 2.88271 3.02839 2.84995 3.05847C2.8172 3.08854 2.79426 3.12778 2.78413 3.17108C2.77401 3.21439 2.77716 3.25973 2.79319 3.30121L4.05638 6.69C4.13088 6.89005 4.13088 7.11022 4.05638 7.31027L2.79363 10.6991C2.77769 10.7405 2.77457 10.7858 2.78469 10.829C2.79481 10.8722 2.8177 10.9114 2.85038 10.9414C2.88306 10.9715 2.92402 10.991 2.96794 10.9975C3.01186 11.0039 3.05671 10.997 3.09666 10.9776L11.0943 7.20097C11.1324 7.18297 11.1645 7.15455 11.187 7.11899C11.2096 7.08344 11.2215 7.04222 11.2215 7.00014C11.2215 6.95805 11.2096 6.91683 11.187 6.88128C11.1645 6.84573 11.1324 6.8173 11.0943 6.79931L3.09666 3.02263Z" stroke="black" stroke-width="1.33333" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M4.11255 7.00014H11.2216" stroke="black" stroke-width="1.33333" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 1014 B |
@@ -1,3 +0,0 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M4 9.8V4.2C4 4.08954 4.08954 4 4.2 4H9.8C9.91046 4 10 4.08954 10 4.2V9.8C10 9.91046 9.91046 10 9.8 10H4.2C4.08954 10 4 9.91046 4 9.8Z" fill="#C56757" stroke="#C56757" stroke-width="1.25" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 325 B |
@@ -150,9 +150,7 @@
|
||||
"context": "AgentDiff",
|
||||
"bindings": {
|
||||
"ctrl-y": "agent::Keep",
|
||||
"ctrl-n": "agent::Reject",
|
||||
"ctrl-shift-y": "agent::KeepAll",
|
||||
"ctrl-shift-n": "agent::RejectAll"
|
||||
"ctrl-k ctrl-r": "agent::Reject"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -313,7 +311,6 @@
|
||||
"ctrl-k t": ["pane::CloseItemsToTheRight", { "close_pinned": false }],
|
||||
"ctrl-k u": ["pane::CloseCleanItems", { "close_pinned": false }],
|
||||
"ctrl-k w": ["pane::CloseAllItems", { "close_pinned": false }],
|
||||
"ctrl-k ctrl-w": "workspace::CloseAllItemsAndPanes",
|
||||
"back": "pane::GoBack",
|
||||
"ctrl-alt--": "pane::GoBack",
|
||||
"ctrl-alt-_": "pane::GoForward",
|
||||
@@ -354,11 +351,11 @@
|
||||
"alt-shift-left": "editor::SelectSmallerSyntaxNode", // Shrink Selection
|
||||
"ctrl-shift-l": "editor::SelectAllMatches", // Select all occurrences of current selection
|
||||
"ctrl-f2": "editor::SelectAllMatches", // Select all occurrences of current word
|
||||
"ctrl-d": ["editor::SelectNext", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch / find_under_expand
|
||||
"ctrl-shift-down": ["editor::SelectNext", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch
|
||||
"ctrl-shift-up": ["editor::SelectPrevious", { "replace_newest": false }], // editor.action.addSelectionToPreviousFindMatch
|
||||
"ctrl-k ctrl-d": ["editor::SelectNext", { "replace_newest": true }], // editor.action.moveSelectionToNextFindMatch / find_under_expand_skip
|
||||
"ctrl-k ctrl-shift-d": ["editor::SelectPrevious", { "replace_newest": true }], // editor.action.moveSelectionToPreviousFindMatch
|
||||
"ctrl-d": ["editor::SelectNext", { "replace_newest": false }],
|
||||
"ctrl-shift-down": ["editor::SelectNext", { "replace_newest": false }], // Add selection to Next Find Match
|
||||
"ctrl-shift-up": ["editor::SelectPrevious", { "replace_newest": false }],
|
||||
"ctrl-k ctrl-d": ["editor::SelectNext", { "replace_newest": true }],
|
||||
"ctrl-k ctrl-shift-d": ["editor::SelectPrevious", { "replace_newest": true }],
|
||||
"ctrl-k ctrl-i": "editor::Hover",
|
||||
"ctrl-/": ["editor::ToggleComments", { "advance_downwards": false }],
|
||||
"ctrl-u": "editor::UndoSelection",
|
||||
@@ -484,8 +481,6 @@
|
||||
"alt-shift-r": ["task::Spawn", { "reveal_target": "center" }]
|
||||
// also possible to spawn tasks by name:
|
||||
// "foo-bar": ["task::Spawn", { "task_name": "MyTask", "reveal_target": "dock" }]
|
||||
// or by tag:
|
||||
// "foo-bar": ["task::Spawn", { "task_tag": "MyTag" }],
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -534,7 +529,6 @@
|
||||
"context": "Editor && showing_completions",
|
||||
"bindings": {
|
||||
"enter": "editor::ConfirmCompletion",
|
||||
"shift-enter": "editor::ConfirmCompletionReplace",
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
@@ -627,28 +621,22 @@
|
||||
"context": "AgentPanel",
|
||||
"bindings": {
|
||||
"ctrl-n": "agent::NewThread",
|
||||
"ctrl-alt-n": "agent::NewTextThread",
|
||||
"new": "agent::NewThread",
|
||||
"ctrl-alt-n": "agent::NewPromptEditor",
|
||||
"ctrl-shift-h": "agent::OpenHistory",
|
||||
"ctrl-alt-c": "agent::OpenConfiguration",
|
||||
"ctrl-i": "agent::ToggleProfileSelector",
|
||||
"ctrl-alt-/": "assistant::ToggleModelSelector",
|
||||
"ctrl-shift-a": "agent::ToggleContextPicker",
|
||||
"shift-escape": "agent::ExpandMessageEditor",
|
||||
"ctrl-e": "agent::ChatMode",
|
||||
"ctrl-alt-e": "agent::RemoveAllContext"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "AgentPanel > Markdown",
|
||||
"bindings": {
|
||||
"copy": "markdown::CopyAsMarkdown",
|
||||
"ctrl-c": "markdown::CopyAsMarkdown"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "AgentPanel && prompt_editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-n": "agent::NewTextThread",
|
||||
"cmd-n": "agent::NewPromptEditor",
|
||||
"cmd-alt-t": "agent::NewThread"
|
||||
}
|
||||
},
|
||||
@@ -662,6 +650,7 @@
|
||||
},
|
||||
{
|
||||
"context": "EditMessageEditor > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"escape": "menu::Cancel",
|
||||
"enter": "menu::Confirm",
|
||||
@@ -670,6 +659,7 @@
|
||||
},
|
||||
{
|
||||
"context": "AgentFeedbackMessageEditor > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"escape": "menu::Cancel",
|
||||
"enter": "menu::Confirm",
|
||||
@@ -782,7 +772,6 @@
|
||||
"shift-tab": "git_panel::FocusEditor",
|
||||
"escape": "git_panel::ToggleFocus",
|
||||
"ctrl-enter": "git::Commit",
|
||||
"ctrl-shift-enter": "git::Amend",
|
||||
"alt-enter": "menu::SecondaryConfirm",
|
||||
"delete": ["git::RestoreFile", { "skip_prompt": false }],
|
||||
"backspace": ["git::RestoreFile", { "skip_prompt": false }],
|
||||
@@ -791,25 +780,18 @@
|
||||
"ctrl-delete": ["git::RestoreFile", { "skip_prompt": false }]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitPanel && CommitEditor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"escape": "git::Cancel"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitCommit > Editor",
|
||||
"bindings": {
|
||||
"escape": "menu::Cancel",
|
||||
"enter": "editor::Newline",
|
||||
"ctrl-enter": "git::Commit",
|
||||
"ctrl-shift-enter": "git::Amend",
|
||||
"alt-l": "git::GenerateCommitMessage"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitPanel",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-g ctrl-g": "git::Fetch",
|
||||
"ctrl-g up": "git::Push",
|
||||
@@ -826,7 +808,6 @@
|
||||
"context": "GitDiff > Editor",
|
||||
"bindings": {
|
||||
"ctrl-enter": "git::Commit",
|
||||
"ctrl-shift-enter": "git::Amend",
|
||||
"ctrl-space": "git::StageAll",
|
||||
"ctrl-shift-space": "git::UnstageAll"
|
||||
}
|
||||
@@ -845,7 +826,6 @@
|
||||
"shift-tab": "git_panel::FocusChanges",
|
||||
"enter": "editor::Newline",
|
||||
"ctrl-enter": "git::Commit",
|
||||
"ctrl-shift-enter": "git::Amend",
|
||||
"alt-up": "git_panel::FocusChanges",
|
||||
"alt-l": "git::GenerateCommitMessage"
|
||||
}
|
||||
|
||||
@@ -242,9 +242,7 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-y": "agent::Keep",
|
||||
"cmd-n": "agent::Reject",
|
||||
"cmd-shift-y": "agent::KeepAll",
|
||||
"cmd-shift-n": "agent::RejectAll"
|
||||
"cmd-alt-z": "agent::Reject"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -283,29 +281,21 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-n": "agent::NewThread",
|
||||
"cmd-alt-n": "agent::NewTextThread",
|
||||
"cmd-alt-n": "agent::NewPromptEditor",
|
||||
"cmd-shift-h": "agent::OpenHistory",
|
||||
"cmd-alt-c": "agent::OpenConfiguration",
|
||||
"cmd-i": "agent::ToggleProfileSelector",
|
||||
"cmd-alt-/": "assistant::ToggleModelSelector",
|
||||
"cmd-shift-a": "agent::ToggleContextPicker",
|
||||
"shift-escape": "agent::ExpandMessageEditor",
|
||||
"cmd-e": "agent::ChatMode",
|
||||
"cmd-alt-e": "agent::RemoveAllContext"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "AgentPanel > Markdown",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-c": "markdown::CopyAsMarkdown"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "AgentPanel && prompt_editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-n": "agent::NewTextThread",
|
||||
"cmd-n": "agent::NewPromptEditor",
|
||||
"cmd-alt-t": "agent::NewThread"
|
||||
}
|
||||
},
|
||||
@@ -348,24 +338,6 @@
|
||||
"enter": "agent::AcceptSuggestedContext"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "AgentConfiguration",
|
||||
"bindings": {
|
||||
"ctrl--": "pane::GoBack"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ThreadHistory",
|
||||
"bindings": {
|
||||
"ctrl--": "pane::GoBack"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ThreadHistory",
|
||||
"bindings": {
|
||||
"ctrl--": "pane::GoBack"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ThreadHistory > Editor",
|
||||
"bindings": {
|
||||
@@ -458,8 +430,7 @@
|
||||
"cmd-k e": ["pane::CloseItemsToTheLeft", { "close_pinned": false }],
|
||||
"cmd-k t": ["pane::CloseItemsToTheRight", { "close_pinned": false }],
|
||||
"cmd-k u": ["pane::CloseCleanItems", { "close_pinned": false }],
|
||||
"cmd-k w": ["pane::CloseAllItems", { "close_pinned": false }],
|
||||
"cmd-k cmd-w": "workspace::CloseAllItemsAndPanes",
|
||||
"cmd-k cmd-w": ["pane::CloseAllItems", { "close_pinned": false }],
|
||||
"cmd-f": "project_search::ToggleFocus",
|
||||
"cmd-g": "search::SelectNextMatch",
|
||||
"cmd-shift-g": "search::SelectPreviousMatch",
|
||||
@@ -491,15 +462,12 @@
|
||||
"alt-shift-down": "editor::DuplicateLineDown",
|
||||
"ctrl-shift-right": "editor::SelectLargerSyntaxNode", // Expand Selection
|
||||
"ctrl-shift-left": "editor::SelectSmallerSyntaxNode", // Shrink Selection
|
||||
"cmd-d": ["editor::SelectNext", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch / find_under_expand
|
||||
"cmd-d": ["editor::SelectNext", { "replace_newest": false }], // Add selection to Next Find Match
|
||||
"cmd-shift-l": "editor::SelectAllMatches", // Select all occurrences of current selection
|
||||
"cmd-f2": "editor::SelectAllMatches", // Select all occurrences of current word
|
||||
"cmd-k cmd-d": ["editor::SelectNext", { "replace_newest": true }], // editor.action.moveSelectionToNextFindMatch / find_under_expand_skip
|
||||
// macOS binds `ctrl-cmd-d` to Show Dictionary which breaks these two binds
|
||||
// To use `ctrl-cmd-d` or `ctrl-k ctrl-cmd-d` in Zed you must execute this command and then restart:
|
||||
// defaults write com.apple.symbolichotkeys AppleSymbolicHotKeys -dict-add 70 '<dict><key>enabled</key><false/></dict>'
|
||||
"ctrl-cmd-d": ["editor::SelectPrevious", { "replace_newest": false }], // editor.action.addSelectionToPreviousFindMatch
|
||||
"cmd-k ctrl-cmd-d": ["editor::SelectPrevious", { "replace_newest": true }], // editor.action.moveSelectionToPreviousFindMatch
|
||||
"ctrl-cmd-d": ["editor::SelectPrevious", { "replace_newest": false }],
|
||||
"cmd-k cmd-d": ["editor::SelectNext", { "replace_newest": true }],
|
||||
"cmd-k ctrl-cmd-d": ["editor::SelectPrevious", { "replace_newest": true }],
|
||||
"cmd-k cmd-i": "editor::Hover",
|
||||
"cmd-/": ["editor::ToggleComments", { "advance_downwards": false }],
|
||||
"cmd-u": "editor::UndoSelection",
|
||||
@@ -531,7 +499,7 @@
|
||||
"cmd-k cmd-9": ["editor::FoldAtLevel", 9],
|
||||
"cmd-k cmd-0": "editor::FoldAll",
|
||||
"cmd-k cmd-j": "editor::UnfoldAll",
|
||||
// Using `ctrl-space` / `ctrl-shift-space` in Zed requires disabling the macOS global shortcut.
|
||||
// Using `ctrl-space` in Zed requires disabling the macOS global shortcut.
|
||||
// System Preferences->Keyboard->Keyboard Shortcuts->Input Sources->Select the previous input source (uncheck)
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
"ctrl-shift-space": "editor::ShowWordCompletions",
|
||||
@@ -639,8 +607,6 @@
|
||||
"ctrl-alt-shift-r": ["task::Spawn", { "reveal_target": "center" }]
|
||||
// also possible to spawn tasks by name:
|
||||
// "foo-bar": ["task::Spawn", { "task_name": "MyTask", "reveal_target": "dock" }]
|
||||
// or by tag:
|
||||
// "foo-bar": ["task::Spawn", { "task_tag": "MyTag" }],
|
||||
}
|
||||
},
|
||||
// Bindings from Sublime Text
|
||||
@@ -687,7 +653,6 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"enter": "editor::ConfirmCompletion",
|
||||
"shift-enter": "editor::ConfirmCompletionReplace",
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
@@ -855,26 +820,17 @@
|
||||
"shift-tab": "git_panel::FocusEditor",
|
||||
"escape": "git_panel::ToggleFocus",
|
||||
"cmd-enter": "git::Commit",
|
||||
"cmd-shift-enter": "git::Amend",
|
||||
"backspace": ["git::RestoreFile", { "skip_prompt": false }],
|
||||
"delete": ["git::RestoreFile", { "skip_prompt": false }],
|
||||
"cmd-backspace": ["git::RestoreFile", { "skip_prompt": true }],
|
||||
"cmd-delete": ["git::RestoreFile", { "skip_prompt": true }]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitPanel && CommitEditor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"escape": "git::Cancel"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitDiff > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-enter": "git::Commit",
|
||||
"cmd-shift-enter": "git::Amend",
|
||||
"cmd-ctrl-y": "git::StageAll",
|
||||
"cmd-ctrl-shift-y": "git::UnstageAll"
|
||||
}
|
||||
@@ -885,7 +841,6 @@
|
||||
"bindings": {
|
||||
"enter": "editor::Newline",
|
||||
"cmd-enter": "git::Commit",
|
||||
"cmd-shift-enter": "git::Amend",
|
||||
"tab": "git_panel::FocusChanges",
|
||||
"shift-tab": "git_panel::FocusChanges",
|
||||
"alt-up": "git_panel::FocusChanges",
|
||||
@@ -915,7 +870,6 @@
|
||||
"enter": "editor::Newline",
|
||||
"escape": "menu::Cancel",
|
||||
"cmd-enter": "git::Commit",
|
||||
"cmd-shift-enter": "git::Amend",
|
||||
"alt-tab": "git::GenerateCommitMessage"
|
||||
}
|
||||
},
|
||||
@@ -1021,8 +975,6 @@
|
||||
"cmd-home": "terminal::ScrollToTop",
|
||||
"shift-end": "terminal::ScrollToBottom",
|
||||
"cmd-end": "terminal::ScrollToBottom",
|
||||
// Using `ctrl-shift-space` in Zed requires disabling the macOS global shortcut.
|
||||
// System Preferences->Keyboard->Keyboard Shortcuts->Input Sources->Select the previous input source (uncheck)
|
||||
"ctrl-shift-space": "terminal::ToggleViMode",
|
||||
"ctrl-k up": "pane::SplitUp",
|
||||
"ctrl-k down": "pane::SplitDown",
|
||||
|
||||
@@ -58,8 +58,7 @@
|
||||
"ctrl-shift-home": "editor::SelectToBeginning",
|
||||
"ctrl-shift-end": "editor::SelectToEnd",
|
||||
"ctrl-f8": "editor::ToggleBreakpoint",
|
||||
"ctrl-shift-f8": "editor::EditLogBreakpoint",
|
||||
"ctrl-shift-u": "editor::ToggleCase"
|
||||
"ctrl-shift-f8": "editor::EditLogBreakpoint"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -37,8 +37,6 @@
|
||||
"ctrl-shift-a": "editor::SelectLargerSyntaxNode",
|
||||
"ctrl-shift-d": "editor::DuplicateSelection",
|
||||
"alt-f3": "editor::SelectAllMatches", // find_all_under
|
||||
// "ctrl-f3": "", // find_under (cancels any selections)
|
||||
// "cmd-alt-shift-g": "" // find_under_prev (cancels any selections)
|
||||
"f9": "editor::SortLinesCaseSensitive",
|
||||
"ctrl-f9": "editor::SortLinesCaseInsensitive",
|
||||
"f12": "editor::GoToDefinition",
|
||||
@@ -51,9 +49,7 @@
|
||||
"ctrl-k ctrl-l": "editor::ConvertToLowerCase",
|
||||
"shift-alt-m": "markdown::OpenPreviewToTheSide",
|
||||
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd",
|
||||
"f3": "editor::FindNextMatch",
|
||||
"shift-f3": "editor::FindPreviousMatch"
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -62,12 +58,6 @@
|
||||
"ctrl-r": "outline::Toggle"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && !agent_diff",
|
||||
"bindings": {
|
||||
"ctrl-k ctrl-z": "git::Restore"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Pane",
|
||||
"bindings": {
|
||||
|
||||
@@ -55,8 +55,7 @@
|
||||
"cmd-shift-home": "editor::SelectToBeginning",
|
||||
"cmd-shift-end": "editor::SelectToEnd",
|
||||
"ctrl-f8": "editor::ToggleBreakpoint",
|
||||
"ctrl-shift-f8": "editor::EditLogBreakpoint",
|
||||
"cmd-shift-u": "editor::ToggleCase"
|
||||
"ctrl-shift-f8": "editor::EditLogBreakpoint"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -38,8 +38,6 @@
|
||||
"cmd-shift-a": "editor::SelectLargerSyntaxNode",
|
||||
"cmd-shift-d": "editor::DuplicateSelection",
|
||||
"ctrl-cmd-g": "editor::SelectAllMatches", // find_all_under
|
||||
// "cmd-alt-g": "", // find_under (cancels any selections)
|
||||
// "cmd-alt-shift-g": "" // find_under_prev (cancels any selections)
|
||||
"f5": "editor::SortLinesCaseSensitive",
|
||||
"ctrl-f5": "editor::SortLinesCaseInsensitive",
|
||||
"shift-f12": "editor::FindAllReferences",
|
||||
@@ -53,9 +51,7 @@
|
||||
"cmd-shift-j": "editor::JoinLines",
|
||||
"shift-alt-m": "markdown::OpenPreviewToTheSide",
|
||||
"ctrl-backspace": "editor::DeleteToPreviousWordStart",
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd",
|
||||
"cmd-g": "editor::FindNextMatch",
|
||||
"cmd-shift-g": "editor::FindPreviousMatch"
|
||||
"ctrl-delete": "editor::DeleteToNextWordEnd"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -64,12 +60,6 @@
|
||||
"cmd-r": "outline::Toggle"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && !agent_diff",
|
||||
"bindings": {
|
||||
"cmd-k cmd-z": "git::Restore"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Pane",
|
||||
"bindings": {
|
||||
|
||||
@@ -44,12 +44,6 @@
|
||||
"[ /": "vim::PreviousComment",
|
||||
"] *": "vim::NextComment",
|
||||
"] /": "vim::NextComment",
|
||||
"[ -": "vim::PreviousLesserIndent",
|
||||
"[ +": "vim::PreviousGreaterIndent",
|
||||
"[ =": "vim::PreviousSameIndent",
|
||||
"] -": "vim::NextLesserIndent",
|
||||
"] +": "vim::NextGreaterIndent",
|
||||
"] =": "vim::NextSameIndent",
|
||||
// Word motions
|
||||
"w": "vim::NextWordStart",
|
||||
"e": "vim::NextWordEnd",
|
||||
@@ -203,7 +197,6 @@
|
||||
"c": "vim::PushChange",
|
||||
"shift-c": "vim::ChangeToEndOfLine",
|
||||
"d": "vim::PushDelete",
|
||||
"delete": "vim::DeleteRight",
|
||||
"shift-d": "vim::DeleteToEndOfLine",
|
||||
"shift-j": "vim::JoinLines",
|
||||
"g shift-j": "vim::JoinLinesNoWhitespace",
|
||||
@@ -342,106 +335,27 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_mode == helix_normal && !menu",
|
||||
"context": "vim_mode == helix_normal",
|
||||
"bindings": {
|
||||
"escape": "editor::Cancel",
|
||||
"ctrl-[": "editor::Cancel",
|
||||
":": "command_palette::Toggle",
|
||||
"shift-d": "vim::DeleteToEndOfLine",
|
||||
"shift-j": "vim::JoinLines",
|
||||
"y": "editor::Copy",
|
||||
"shift-y": "vim::YankLine",
|
||||
"i": "vim::InsertBefore",
|
||||
"shift-i": "vim::InsertFirstNonWhitespace",
|
||||
"a": "vim::InsertAfter",
|
||||
"shift-a": "vim::InsertEndOfLine",
|
||||
"o": "vim::InsertLineBelow",
|
||||
"shift-o": "vim::InsertLineAbove",
|
||||
"~": "vim::ChangeCase",
|
||||
"ctrl-a": "vim::Increment",
|
||||
"ctrl-x": "vim::Decrement",
|
||||
"p": "vim::Paste",
|
||||
"shift-p": ["vim::Paste", { "before": true }],
|
||||
"u": "vim::Undo",
|
||||
"ctrl-r": "vim::Redo",
|
||||
"r": "vim::PushReplace",
|
||||
"s": "vim::Substitute",
|
||||
"shift-s": "vim::SubstituteLine",
|
||||
">": "vim::Indent",
|
||||
"<": "vim::Outdent",
|
||||
"=": "vim::AutoIndent",
|
||||
"g u": "vim::PushLowercase",
|
||||
"g shift-u": "vim::PushUppercase",
|
||||
"g ~": "vim::PushOppositeCase",
|
||||
"\"": "vim::PushRegister",
|
||||
"g q": "vim::PushRewrap",
|
||||
"g w": "vim::PushRewrap",
|
||||
"ctrl-pagedown": "pane::ActivateNextItem",
|
||||
"ctrl-pageup": "pane::ActivatePreviousItem",
|
||||
"insert": "vim::InsertBefore",
|
||||
// tree-sitter related commands
|
||||
"[ x": "editor::SelectLargerSyntaxNode",
|
||||
"] x": "editor::SelectSmallerSyntaxNode",
|
||||
"] d": "editor::GoToDiagnostic",
|
||||
"[ d": "editor::GoToPreviousDiagnostic",
|
||||
"] c": "editor::GoToHunk",
|
||||
"[ c": "editor::GoToPreviousHunk",
|
||||
// Goto mode
|
||||
"g n": "pane::ActivateNextItem",
|
||||
"g p": "pane::ActivatePreviousItem",
|
||||
// "tab": "pane::ActivateNextItem",
|
||||
// "shift-tab": "pane::ActivatePrevItem",
|
||||
"shift-h": "pane::ActivatePreviousItem",
|
||||
"shift-l": "pane::ActivateNextItem",
|
||||
"g l": "vim::EndOfLine",
|
||||
"g h": "vim::StartOfLine",
|
||||
"g s": "vim::FirstNonWhitespace", // "g s" default behavior is "space s"
|
||||
"g e": "vim::EndOfDocument",
|
||||
"g y": "editor::GoToTypeDefinition",
|
||||
"g r": "editor::FindAllReferences", // zed specific
|
||||
"g t": "vim::WindowTop",
|
||||
"g c": "vim::WindowMiddle",
|
||||
"g b": "vim::WindowBottom",
|
||||
|
||||
"x": "editor::SelectLine",
|
||||
"shift-x": "editor::SelectLine",
|
||||
// Window mode
|
||||
"space w h": "workspace::ActivatePaneLeft",
|
||||
"space w l": "workspace::ActivatePaneRight",
|
||||
"space w k": "workspace::ActivatePaneUp",
|
||||
"space w j": "workspace::ActivatePaneDown",
|
||||
"space w q": "pane::CloseActiveItem",
|
||||
"space w s": "pane::SplitRight",
|
||||
"space w r": "pane::SplitRight",
|
||||
"space w v": "pane::SplitDown",
|
||||
"space w d": "pane::SplitDown",
|
||||
// Space mode
|
||||
"space f": "file_finder::Toggle",
|
||||
"space k": "editor::Hover",
|
||||
"space s": "outline::Toggle",
|
||||
"space shift-s": "project_symbols::Toggle",
|
||||
"space d": "editor::GoToDiagnostic",
|
||||
"space r": "editor::Rename",
|
||||
"space a": "editor::ToggleCodeActions",
|
||||
"space h": "editor::SelectAllMatches",
|
||||
"space c": "editor::ToggleComments",
|
||||
"space y": "editor::Copy",
|
||||
"space p": "editor::Paste",
|
||||
// Match mode
|
||||
"m m": "vim::Matching",
|
||||
"m i w": ["workspace::SendKeystrokes", "v i w"],
|
||||
"shift-u": "editor::Redo",
|
||||
"ctrl-c": "editor::ToggleComments",
|
||||
"d": "vim::HelixDelete",
|
||||
"c": "vim::Substitute",
|
||||
"shift-c": "editor::AddSelectionBelow"
|
||||
"w": "vim::NextWordStart",
|
||||
"e": "vim::NextWordEnd",
|
||||
"b": "vim::PreviousWordStart",
|
||||
|
||||
"h": "vim::Left",
|
||||
"j": "vim::Down",
|
||||
"k": "vim::Up",
|
||||
"l": "vim::Right"
|
||||
}
|
||||
},
|
||||
|
||||
{
|
||||
"context": "vim_mode == insert && !(showing_code_actions || showing_completions)",
|
||||
"bindings": {
|
||||
"ctrl-p": "editor::ShowWordCompletions",
|
||||
"ctrl-n": "editor::ShowWordCompletions"
|
||||
"ctrl-p": "editor::ShowCompletions",
|
||||
"ctrl-n": "editor::ShowCompletions"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -539,7 +453,6 @@
|
||||
"bindings": {
|
||||
"d": "vim::CurrentLine",
|
||||
"s": "vim::PushDeleteSurrounds",
|
||||
"v": "vim::PushForcedMotion", // "d v"
|
||||
"o": "editor::ToggleSelectedDiffHunks", // "d o"
|
||||
"shift-o": "git::ToggleStaged",
|
||||
"p": "git::Restore", // "d p"
|
||||
@@ -588,7 +501,6 @@
|
||||
"context": "vim_operator == y",
|
||||
"bindings": {
|
||||
"y": "vim::CurrentLine",
|
||||
"v": "vim::PushForcedMotion",
|
||||
"s": ["vim::PushAddSurrounds", {}]
|
||||
}
|
||||
},
|
||||
|
||||
@@ -6,19 +6,9 @@ You are an AI assistant integrated into a code editor. You have the programming
|
||||
It will be up to you to decide which of these you are doing based on what the user has told you. When unclear, ask clarifying questions to understand the user's intent before proceeding.
|
||||
|
||||
You should only perform actions that modify the user's system if explicitly requested by the user:
|
||||
- If the user asks a question about how to accomplish a task, provide guidance or information, and use read-only tools (e.g., search) to assist. You may suggest potential actions, but do not directly modify the user's system without explicit instruction.
|
||||
- If the user asks a question about how to accomplish a task, provide guidance or information, and use read-only tools (e.g., search) to assist. You may suggest potential actions, but do not directly modify the user’s system without explicit instruction.
|
||||
- If the user clearly requests that you perform an action, carry out the action directly without explaining why you are doing so.
|
||||
|
||||
When answering questions, it's okay to give incomplete examples containing comments about what would go there in a real version. When being asked to directly perform tasks on the code base, you must ALWAYS make fully working code. You may never "simplify" the code by omitting or deleting functionality you know the user has requested, and you must NEVER write comments like "in a full version, this would..." - instead, you must actually implement the real version. Don't be lazy!
|
||||
|
||||
Note that project files are automatically backed up. The user can always get them back later if anything goes wrong, so there's
|
||||
no need to create backup files (e.g. `.bak` files) because these files will just take up unnecessary space on the user's disk.
|
||||
|
||||
When attempting to resolve issues around failing tests, never simply remove the failing tests. Unless the user explicitly asks you to remove tests, ALWAYS attempt to fix the code causing the tests to fail.
|
||||
|
||||
Ignore "TODO"-type comments unless they're relevant to the user's explicit request or the user specifically asks you to address them. It is, however, okay to include them in codebase summaries.
|
||||
|
||||
<style>
|
||||
Editing code:
|
||||
- Make sure to take previous edits into account.
|
||||
- The edits you perform might lead to errors or warnings. At the end of your changes, check whether you introduced any problems, and fix them before providing a summary of the changes you made.
|
||||
@@ -44,106 +34,6 @@ Responding:
|
||||
For example, don't say "Now I'm going to check diagnostics to see if there are any warnings or errors," followed by running a tool which checks diagnostics and reports warnings or errors; instead, just request the tool call without saying anything.
|
||||
- All tool results are provided to you automatically, so DO NOT thank the user when this happens.
|
||||
|
||||
Whenever you mention a code block, you MUST use ONLY the following format:
|
||||
|
||||
```language path/to/Something.blah#L123-456
|
||||
(code goes here)
|
||||
```
|
||||
|
||||
The `#L123-456` means the line number range 123 through 456, and the path/to/Something.blah
|
||||
is a path in the project. (If there is no valid path in the project, then you can use
|
||||
/dev/null/path.extension for its path.) This is the ONLY valid way to format code blocks, because the Markdown parser
|
||||
does not understand the more common ```language syntax, or bare ``` blocks. It only
|
||||
understands this path-based syntax, and if the path is missing, then it will error and you will have to do it over again.
|
||||
|
||||
Just to be really clear about this, if you ever find yourself writing three backticks followed by a language name, STOP!
|
||||
You have made a mistake. You can only ever put paths after triple backticks!
|
||||
|
||||
<example>
|
||||
Based on all the information I've gathered, here's a summary of how this system works:
|
||||
1. The README file is loaded into the system.
|
||||
2. The system finds the first two headers, including everything in between. In this case, that would be:
|
||||
|
||||
```path/to/README.md#L8-12
|
||||
# First Header
|
||||
|
||||
This is the info under the first header.
|
||||
|
||||
## Sub-header
|
||||
```
|
||||
|
||||
3. Then the system finds the last header in the README:
|
||||
|
||||
```path/to/README.md#L27-29
|
||||
## Last Header
|
||||
|
||||
This is the last header in the README.
|
||||
```
|
||||
|
||||
4. Finally, it passes this information on to the next process.
|
||||
</example>
|
||||
|
||||
<example>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
|
||||
```/dev/null/example.md#L1-3
|
||||
# Level 1 heading
|
||||
## Level 2 heading
|
||||
### Level 3 heading
|
||||
```
|
||||
</example>
|
||||
|
||||
Here are examples of ways you must never render code blocks:
|
||||
|
||||
<bad_example_do_not_do_this>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
|
||||
```
|
||||
# Level 1 heading
|
||||
## Level 2 heading
|
||||
### Level 3 heading
|
||||
```
|
||||
</bad_example_do_not_do_this>
|
||||
|
||||
This example is unacceptable because it does not include the path.
|
||||
|
||||
<bad_example_do_not_do_this>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
|
||||
```markdown
|
||||
# Level 1 heading
|
||||
## Level 2 heading
|
||||
### Level 3 heading
|
||||
```
|
||||
</bad_example_do_not_do_this>
|
||||
|
||||
This example is unacceptable because it has the language instead of the path.
|
||||
|
||||
<bad_example_do_not_do_this>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
|
||||
# Level 1 heading
|
||||
## Level 2 heading
|
||||
### Level 3 heading
|
||||
</bad_example_do_not_do_this>
|
||||
|
||||
This example is unacceptable because it uses indentation to mark the code block
|
||||
instead of backticks with a path.
|
||||
|
||||
<bad_example_do_not_do_this>
|
||||
In Markdown, hash marks signify headings. For example:
|
||||
|
||||
```markdown
|
||||
/dev/null/example.md#L1-3
|
||||
# Level 1 heading
|
||||
## Level 2 heading
|
||||
### Level 3 heading
|
||||
```
|
||||
</bad_example_do_not_do_this>
|
||||
|
||||
This example is unacceptable because the path is in the wrong place. The path must be directly after the opening backticks.
|
||||
</style>
|
||||
|
||||
The user has opened a project that contains the following root directories/files. Whenever you specify a path in the project, it must be a relative path which begins with one of these root directories/files:
|
||||
|
||||
{{#each worktrees}}
|
||||
@@ -155,7 +45,7 @@ There are rules that apply to these root directories:
|
||||
{{#each worktrees}}
|
||||
{{#if rules_file}}
|
||||
|
||||
`{{root_name}}/{{rules_file.path_in_worktree}}`:
|
||||
`{{root_name}}/{{rules_file.rel_path}}`:
|
||||
|
||||
``````
|
||||
{{{rules_file.text}}}
|
||||
@@ -163,8 +53,3 @@ There are rules that apply to these root directories:
|
||||
{{/if}}
|
||||
{{/each}}
|
||||
{{/if}}
|
||||
|
||||
<user_environment>
|
||||
Operating System: {{os}} ({{arch}})
|
||||
Shell: {{shell}}
|
||||
</user_environment>
|
||||
|
||||
8
assets/prompts/project_slash_command.hbs
Normal file
@@ -0,0 +1,8 @@
|
||||
A software developer is asking a question about their project. The source files in their project have been indexed into a database of semantic text embeddings.
|
||||
Your task is to generate a list of 4 diverse search queries that can be run on this embedding database, in order to retrieve a list of code snippets
|
||||
that are relevant to the developer's question. Redundant search queries will be heavily penalized, so only include another query if it's sufficiently
|
||||
distinct from previous ones.
|
||||
|
||||
Here is the question that's been asked, together with context that the developer has added manually:
|
||||
|
||||
{{{context_buffer}}}
|
||||
@@ -80,8 +80,6 @@
|
||||
// Values are clamped to the [0.0, 1.0] range.
|
||||
"inactive_opacity": 1.0
|
||||
},
|
||||
// Layout mode of the bottom dock. Defaults to "contained"
|
||||
"bottom_dock_layout": "contained",
|
||||
// The direction that you want to split panes horizontally. Defaults to "up"
|
||||
"pane_split_direction_horizontal": "up",
|
||||
// The direction that you want to split panes horizontally. Defaults to "left"
|
||||
@@ -626,14 +624,14 @@
|
||||
// The provider to use.
|
||||
"provider": "zed.dev",
|
||||
// The model to use.
|
||||
"model": "claude-3-7-sonnet-latest"
|
||||
"model": "claude-3-5-sonnet-latest"
|
||||
},
|
||||
// The model to use when applying edits from the assistant.
|
||||
"editor_model": {
|
||||
// The provider to use.
|
||||
"provider": "zed.dev",
|
||||
// The model to use.
|
||||
"model": "claude-3-7-sonnet-latest"
|
||||
"model": "claude-3-5-sonnet-latest"
|
||||
},
|
||||
// When enabled, the agent can run potentially destructive actions without asking for your confirmation.
|
||||
"always_allow_tool_actions": false,
|
||||
@@ -644,7 +642,6 @@
|
||||
// We don't know which of the context server tools are safe for the "Ask" profile, so we don't enable them by default.
|
||||
// "enable_all_context_servers": true,
|
||||
"tools": {
|
||||
"contents": true,
|
||||
"diagnostics": true,
|
||||
"fetch": true,
|
||||
"list_directory": false,
|
||||
@@ -659,11 +656,9 @@
|
||||
"name": "Write",
|
||||
"enable_all_context_servers": true,
|
||||
"tools": {
|
||||
"terminal": true,
|
||||
"bash": true,
|
||||
"batch_tool": true,
|
||||
"code_actions": true,
|
||||
"code_symbols": true,
|
||||
"contents": true,
|
||||
"copy_path": false,
|
||||
"create_file": true,
|
||||
"delete_path": false,
|
||||
@@ -676,7 +671,6 @@
|
||||
"path_search": true,
|
||||
"read_file": true,
|
||||
"regex_search": true,
|
||||
"rename": true,
|
||||
"symbol_info": true,
|
||||
"thinking": true
|
||||
}
|
||||
@@ -1142,8 +1136,7 @@
|
||||
"code_actions_on_format": {},
|
||||
// Settings related to running tasks.
|
||||
"tasks": {
|
||||
"variables": {},
|
||||
"enabled": true
|
||||
"variables": {}
|
||||
},
|
||||
// An object whose keys are language names, and whose values
|
||||
// are arrays of filenames or extensions of files that should
|
||||
@@ -1207,27 +1200,7 @@
|
||||
// When set to 0, waits indefinitely.
|
||||
//
|
||||
// Default: 0
|
||||
"lsp_fetch_timeout_ms": 0,
|
||||
// Controls what range to replace when accepting LSP completions.
|
||||
//
|
||||
// When LSP servers give an `InsertReplaceEdit` completion, they provides two ranges: `insert` and `replace`. Usually, `insert`
|
||||
// contains the word prefix before your cursor and `replace` contains the whole word.
|
||||
//
|
||||
// Effectively, this setting just changes whether Zed will use the received range for `insert` or `replace`, so the results may
|
||||
// differ depending on the underlying LSP server.
|
||||
//
|
||||
// Possible values:
|
||||
// 1. "insert"
|
||||
// Replaces text before the cursor, using the `insert` range described in the LSP specification.
|
||||
// 2. "replace"
|
||||
// Replaces text before and after the cursor, using the `replace` range described in the LSP specification.
|
||||
// 3. "replace_subsequence"
|
||||
// Behaves like `"replace"` if the text that would be replaced is a subsequence of the completion text,
|
||||
// and like `"insert"` otherwise.
|
||||
// 4. "replace_suffix"
|
||||
// Behaves like `"replace"` if the text after the cursor is a suffix of the completion, and like
|
||||
// `"insert"` otherwise.
|
||||
"lsp_insert_mode": "replace_suffix"
|
||||
"lsp_fetch_timeout_ms": 0
|
||||
},
|
||||
// Different settings for specific languages.
|
||||
"languages": {
|
||||
@@ -1463,8 +1436,6 @@
|
||||
"lsp": {
|
||||
// Specify the LSP name as a key here.
|
||||
// "rust-analyzer": {
|
||||
// // A special flag for rust-analyzer integration, to use server-provided tasks
|
||||
// enable_lsp_tasks": true,
|
||||
// // These initialization options are merged into Zed's defaults
|
||||
// "initialization_options": {
|
||||
// "check": {
|
||||
|
||||
@@ -43,8 +43,6 @@
|
||||
// "args": ["--login"]
|
||||
// }
|
||||
// }
|
||||
"shell": "system",
|
||||
// Represents the tags for inline runnable indicators, or spawning multiple tasks at once.
|
||||
"tags": []
|
||||
"shell": "system"
|
||||
}
|
||||
]
|
||||
|
||||
@@ -87,9 +87,9 @@
|
||||
"terminal.ansi.blue": "#83a598ff",
|
||||
"terminal.ansi.bright_blue": "#414f4aff",
|
||||
"terminal.ansi.dim_blue": "#c0d2cbff",
|
||||
"terminal.ansi.magenta": "#d3869bff",
|
||||
"terminal.ansi.bright_magenta": "#8e5868ff",
|
||||
"terminal.ansi.dim_magenta": "#ff9ebbff",
|
||||
"terminal.ansi.magenta": "#a89984ff",
|
||||
"terminal.ansi.bright_magenta": "#514a41ff",
|
||||
"terminal.ansi.dim_magenta": "#d2cabfff",
|
||||
"terminal.ansi.cyan": "#8ec07cff",
|
||||
"terminal.ansi.bright_cyan": "#45603eff",
|
||||
"terminal.ansi.dim_cyan": "#c7dfbdff",
|
||||
@@ -472,9 +472,9 @@
|
||||
"terminal.ansi.blue": "#83a598ff",
|
||||
"terminal.ansi.bright_blue": "#414f4aff",
|
||||
"terminal.ansi.dim_blue": "#c0d2cbff",
|
||||
"terminal.ansi.magenta": "#d3869bff",
|
||||
"terminal.ansi.bright_magenta": "#8e5868ff",
|
||||
"terminal.ansi.dim_magenta": "#ff9ebbff",
|
||||
"terminal.ansi.magenta": "#a89984ff",
|
||||
"terminal.ansi.bright_magenta": "#514a41ff",
|
||||
"terminal.ansi.dim_magenta": "#d2cabfff",
|
||||
"terminal.ansi.cyan": "#8ec07cff",
|
||||
"terminal.ansi.bright_cyan": "#45603eff",
|
||||
"terminal.ansi.dim_cyan": "#c7dfbdff",
|
||||
@@ -857,9 +857,9 @@
|
||||
"terminal.ansi.blue": "#83a598ff",
|
||||
"terminal.ansi.bright_blue": "#414f4aff",
|
||||
"terminal.ansi.dim_blue": "#c0d2cbff",
|
||||
"terminal.ansi.magenta": "#d3869bff",
|
||||
"terminal.ansi.bright_magenta": "#8e5868ff",
|
||||
"terminal.ansi.dim_magenta": "#ff9ebbff",
|
||||
"terminal.ansi.magenta": "#a89984ff",
|
||||
"terminal.ansi.bright_magenta": "#514a41ff",
|
||||
"terminal.ansi.dim_magenta": "#d2cabfff",
|
||||
"terminal.ansi.cyan": "#8ec07cff",
|
||||
"terminal.ansi.bright_cyan": "#45603eff",
|
||||
"terminal.ansi.dim_cyan": "#c7dfbdff",
|
||||
@@ -1242,9 +1242,9 @@
|
||||
"terminal.ansi.blue": "#0b6678ff",
|
||||
"terminal.ansi.bright_blue": "#8fb0baff",
|
||||
"terminal.ansi.dim_blue": "#14333bff",
|
||||
"terminal.ansi.magenta": "#8f3e71ff",
|
||||
"terminal.ansi.bright_magenta": "#c76da0ff",
|
||||
"terminal.ansi.dim_magenta": "#5c2848ff",
|
||||
"terminal.ansi.magenta": "#7c6f64ff",
|
||||
"terminal.ansi.bright_magenta": "#bcb5afff",
|
||||
"terminal.ansi.dim_magenta": "#3e3833ff",
|
||||
"terminal.ansi.cyan": "#437b59ff",
|
||||
"terminal.ansi.bright_cyan": "#9fbca8ff",
|
||||
"terminal.ansi.dim_cyan": "#253e2eff",
|
||||
@@ -1627,9 +1627,9 @@
|
||||
"terminal.ansi.blue": "#0b6678ff",
|
||||
"terminal.ansi.bright_blue": "#8fb0baff",
|
||||
"terminal.ansi.dim_blue": "#14333bff",
|
||||
"terminal.ansi.magenta": "#8f3e71ff",
|
||||
"terminal.ansi.bright_magenta": "#c76da0ff",
|
||||
"terminal.ansi.dim_magenta": "#5c2848ff",
|
||||
"terminal.ansi.magenta": "#7c6f64ff",
|
||||
"terminal.ansi.bright_magenta": "#bcb5afff",
|
||||
"terminal.ansi.dim_magenta": "#3e3833ff",
|
||||
"terminal.ansi.cyan": "#437b59ff",
|
||||
"terminal.ansi.bright_cyan": "#9fbca8ff",
|
||||
"terminal.ansi.dim_cyan": "#253e2eff",
|
||||
@@ -2012,9 +2012,9 @@
|
||||
"terminal.ansi.blue": "#0b6678ff",
|
||||
"terminal.ansi.bright_blue": "#8fb0baff",
|
||||
"terminal.ansi.dim_blue": "#14333bff",
|
||||
"terminal.ansi.magenta": "#8f3e71ff",
|
||||
"terminal.ansi.bright_magenta": "#c76da0ff",
|
||||
"terminal.ansi.dim_magenta": "#5c2848ff",
|
||||
"terminal.ansi.magenta": "#7c6f64ff",
|
||||
"terminal.ansi.bright_magenta": "#bcb5afff",
|
||||
"terminal.ansi.dim_magenta": "#3e3833ff",
|
||||
"terminal.ansi.cyan": "#437b59ff",
|
||||
"terminal.ansi.bright_cyan": "#9fbca8ff",
|
||||
"terminal.ansi.dim_cyan": "#253e2eff",
|
||||
|
||||
@@ -11,22 +11,13 @@ use language::{BinaryStatus, LanguageRegistry, LanguageServerId};
|
||||
use project::{
|
||||
EnvironmentErrorMessage, LanguageServerProgress, LspStoreEvent, Project,
|
||||
ProjectEnvironmentEvent,
|
||||
git_store::{GitStoreEvent, Repository},
|
||||
};
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
cmp::Reverse,
|
||||
fmt::Write,
|
||||
path::Path,
|
||||
sync::Arc,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use std::{cmp::Reverse, fmt::Write, path::Path, sync::Arc, time::Duration};
|
||||
use ui::{ButtonLike, ContextMenu, PopoverMenu, PopoverMenuHandle, Tooltip, prelude::*};
|
||||
use util::truncate_and_trailoff;
|
||||
use workspace::{StatusItemView, Workspace, item::ItemHandle};
|
||||
|
||||
const GIT_OPERATION_DELAY: Duration = Duration::from_millis(0);
|
||||
|
||||
actions!(activity_indicator, [ShowErrorMessage]);
|
||||
|
||||
pub enum Event {
|
||||
@@ -114,15 +105,6 @@ impl ActivityIndicator {
|
||||
)
|
||||
.detach();
|
||||
|
||||
cx.subscribe(
|
||||
&project.read(cx).git_store().clone(),
|
||||
|_, _, event: &GitStoreEvent, cx| match event {
|
||||
project::git_store::GitStoreEvent::JobsUpdated => cx.notify(),
|
||||
_ => {}
|
||||
},
|
||||
)
|
||||
.detach();
|
||||
|
||||
if let Some(auto_updater) = auto_updater.as_ref() {
|
||||
cx.observe(auto_updater, |_, _, cx| cx.notify()).detach();
|
||||
}
|
||||
@@ -303,34 +285,6 @@ impl ActivityIndicator {
|
||||
});
|
||||
}
|
||||
|
||||
let current_job = self
|
||||
.project
|
||||
.read(cx)
|
||||
.active_repository(cx)
|
||||
.map(|r| r.read(cx))
|
||||
.and_then(Repository::current_job);
|
||||
// Show any long-running git command
|
||||
if let Some(job_info) = current_job {
|
||||
if Instant::now() - job_info.start >= GIT_OPERATION_DELAY {
|
||||
return Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::ArrowCircle)
|
||||
.size(IconSize::Small)
|
||||
.with_animation(
|
||||
"arrow-circle",
|
||||
Animation::new(Duration::from_secs(2)).repeat(),
|
||||
|icon, delta| {
|
||||
icon.transform(Transformation::rotate(percentage(delta)))
|
||||
},
|
||||
)
|
||||
.into_any_element(),
|
||||
),
|
||||
message: job_info.message.into(),
|
||||
on_click: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Show any language server installation info.
|
||||
let mut downloading = SmallVec::<[_; 3]>::new();
|
||||
let mut checking_for_update = SmallVec::<[_; 3]>::new();
|
||||
|
||||
@@ -31,7 +31,6 @@ client.workspace = true
|
||||
clock.workspace = true
|
||||
collections.workspace = true
|
||||
command_palette_hooks.workspace = true
|
||||
component.workspace = true
|
||||
context_server.workspace = true
|
||||
convert_case.workspace = true
|
||||
db.workspace = true
|
||||
@@ -51,7 +50,6 @@ itertools.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
language_model_selector.workspace = true
|
||||
linkme.workspace = true
|
||||
log.workspace = true
|
||||
lsp.workspace = true
|
||||
markdown.workspace = true
|
||||
@@ -80,16 +78,16 @@ terminal.workspace = true
|
||||
terminal_view.workspace = true
|
||||
text.workspace = true
|
||||
theme.workspace = true
|
||||
thiserror.workspace = true
|
||||
time.workspace = true
|
||||
time_format.workspace = true
|
||||
ui.workspace = true
|
||||
ui_input.workspace = true
|
||||
util.workspace = true
|
||||
uuid.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
vim_mode_setting.workspace = true
|
||||
workspace.workspace = true
|
||||
zed_actions.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
buffer_diff = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::{Keep, KeepAll, Reject, RejectAll, Thread, ThreadEvent};
|
||||
use crate::{Keep, Reject, Thread, ThreadEvent};
|
||||
use anyhow::Result;
|
||||
use buffer_diff::DiffHunkStatus;
|
||||
use collections::HashSet;
|
||||
@@ -792,11 +792,15 @@ impl editor::Addon for AgentDiffAddon {
|
||||
|
||||
pub struct AgentDiffToolbar {
|
||||
agent_diff: Option<WeakEntity<AgentDiff>>,
|
||||
_workspace: WeakEntity<Workspace>,
|
||||
}
|
||||
|
||||
impl AgentDiffToolbar {
|
||||
pub fn new() -> Self {
|
||||
Self { agent_diff: None }
|
||||
pub fn new(workspace: &Workspace, _: &mut Context<Self>) -> Self {
|
||||
Self {
|
||||
agent_diff: None,
|
||||
_workspace: workspace.weak_handle(),
|
||||
}
|
||||
}
|
||||
|
||||
fn agent_diff(&self, _: &App) -> Option<Entity<AgentDiff>> {
|
||||
@@ -843,7 +847,7 @@ impl ToolbarItemView for AgentDiffToolbar {
|
||||
}
|
||||
|
||||
impl Render for AgentDiffToolbar {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let agent_diff = match self.agent_diff(cx) {
|
||||
Some(ad) => ad,
|
||||
None => return div(),
|
||||
@@ -855,8 +859,6 @@ impl Render for AgentDiffToolbar {
|
||||
return div();
|
||||
}
|
||||
|
||||
let focus_handle = agent_diff.focus_handle(cx);
|
||||
|
||||
h_group_xl()
|
||||
.my_neg_1()
|
||||
.items_center()
|
||||
@@ -866,25 +868,15 @@ impl Render for AgentDiffToolbar {
|
||||
.child(
|
||||
h_group_sm()
|
||||
.child(
|
||||
Button::new("reject-all", "Reject All")
|
||||
.key_binding({
|
||||
KeyBinding::for_action_in(&RejectAll, &focus_handle, window, cx)
|
||||
.map(|kb| kb.size(rems_from_px(12.)))
|
||||
})
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.dispatch_action(&RejectAll, window, cx)
|
||||
})),
|
||||
Button::new("reject-all", "Reject All").on_click(cx.listener(
|
||||
|this, _, window, cx| {
|
||||
this.dispatch_action(&crate::RejectAll, window, cx)
|
||||
},
|
||||
)),
|
||||
)
|
||||
.child(
|
||||
Button::new("keep-all", "Keep All")
|
||||
.key_binding({
|
||||
KeyBinding::for_action_in(&KeepAll, &focus_handle, window, cx)
|
||||
.map(|kb| kb.size(rems_from_px(12.)))
|
||||
})
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.dispatch_action(&KeepAll, window, cx)
|
||||
})),
|
||||
),
|
||||
.child(Button::new("keep-all", "Keep All").on_click(cx.listener(
|
||||
|this, _, window, cx| this.dispatch_action(&crate::KeepAll, window, cx),
|
||||
))),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -894,7 +886,6 @@ mod tests {
|
||||
use super::*;
|
||||
use crate::{ThreadStore, thread_store};
|
||||
use assistant_settings::AssistantSettings;
|
||||
use assistant_tool::ToolWorkingSet;
|
||||
use context_server::ContextServerSettings;
|
||||
use editor::EditorSettings;
|
||||
use gpui::TestAppContext;
|
||||
@@ -934,16 +925,15 @@ mod tests {
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
let thread_store = cx
|
||||
.update(|cx| {
|
||||
ThreadStore::load(
|
||||
project.clone(),
|
||||
cx.new(|_| ToolWorkingSet::default()),
|
||||
Arc::new(PromptBuilder::new(None).unwrap()),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await;
|
||||
let thread_store = cx.update(|cx| {
|
||||
ThreadStore::new(
|
||||
project.clone(),
|
||||
Arc::default(),
|
||||
Arc::new(PromptBuilder::new(None).unwrap()),
|
||||
cx,
|
||||
)
|
||||
.unwrap()
|
||||
});
|
||||
let thread = thread_store.update(cx, |store, cx| store.create_thread(cx));
|
||||
let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
|
||||
|
||||
|
||||
@@ -18,7 +18,6 @@ mod terminal_inline_assistant;
|
||||
mod thread;
|
||||
mod thread_history;
|
||||
mod thread_store;
|
||||
mod tool_compatibility;
|
||||
mod tool_use;
|
||||
mod ui;
|
||||
|
||||
@@ -47,11 +46,10 @@ pub use agent_diff::{AgentDiff, AgentDiffToolbar};
|
||||
actions!(
|
||||
agent,
|
||||
[
|
||||
NewTextThread,
|
||||
NewPromptEditor,
|
||||
ToggleContextPicker,
|
||||
ToggleProfileSelector,
|
||||
RemoveAllContext,
|
||||
ExpandMessageEditor,
|
||||
OpenHistory,
|
||||
AddContextServer,
|
||||
RemoveSelectedThread,
|
||||
|
||||
@@ -12,9 +12,7 @@ use fs::Fs;
|
||||
use gpui::{Action, AnyView, App, Entity, EventEmitter, FocusHandle, Focusable, Subscription};
|
||||
use language_model::{LanguageModelProvider, LanguageModelProviderId, LanguageModelRegistry};
|
||||
use settings::{Settings, update_settings_file};
|
||||
use ui::{
|
||||
Disclosure, Divider, DividerColor, ElevationIndex, Indicator, Switch, Tooltip, prelude::*,
|
||||
};
|
||||
use ui::{Disclosure, Divider, DividerColor, ElevationIndex, Indicator, Switch, prelude::*};
|
||||
use util::ResultExt as _;
|
||||
use zed_actions::ExtensionCategoryFilter;
|
||||
|
||||
@@ -29,7 +27,7 @@ pub struct AssistantConfiguration {
|
||||
configuration_views_by_provider: HashMap<LanguageModelProviderId, AnyView>,
|
||||
context_server_manager: Entity<ContextServerManager>,
|
||||
expanded_context_server_tools: HashMap<Arc<str>, bool>,
|
||||
tools: Entity<ToolWorkingSet>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
_registry_subscription: Subscription,
|
||||
}
|
||||
|
||||
@@ -37,7 +35,7 @@ impl AssistantConfiguration {
|
||||
pub fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
context_server_manager: Entity<ContextServerManager>,
|
||||
tools: Entity<ToolWorkingSet>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
@@ -226,7 +224,7 @@ impl AssistantConfiguration {
|
||||
|
||||
fn render_context_servers_section(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let context_servers = self.context_server_manager.read(cx).all_servers().clone();
|
||||
let tools_by_source = self.tools.read(cx).tools_by_source(cx);
|
||||
let tools_by_source = self.tools.tools_by_source(cx);
|
||||
let empty = Vec::new();
|
||||
|
||||
const SUBHEADING: &str = "Connect to context servers via the Model Context Protocol either via Zed extensions or directly.";
|
||||
@@ -238,10 +236,7 @@ impl AssistantConfiguration {
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
Headline::new("Model Context Protocol (MCP) Servers")
|
||||
.size(HeadlineSize::Small),
|
||||
)
|
||||
.child(Headline::new("Context Servers (MCP)").size(HeadlineSize::Small))
|
||||
.child(Label::new(SUBHEADING).color(Color::Muted)),
|
||||
)
|
||||
.children(context_servers.into_iter().map(|context_server| {
|
||||
@@ -267,9 +262,10 @@ impl AssistantConfiguration {
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(
|
||||
h_flex()
|
||||
.p_1()
|
||||
.justify_between()
|
||||
.when(are_tools_expanded && tool_count > 1, |element| {
|
||||
.px_2()
|
||||
.py_1()
|
||||
.when(are_tools_expanded, |element| {
|
||||
element
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
@@ -279,7 +275,6 @@ impl AssistantConfiguration {
|
||||
.gap_2()
|
||||
.child(
|
||||
Disclosure::new("tool-list-disclosure", are_tools_expanded)
|
||||
.disabled(tool_count == 0)
|
||||
.on_click(cx.listener({
|
||||
let context_server_id = context_server.id();
|
||||
move |this, _event, _window, _cx| {
|
||||
@@ -300,11 +295,10 @@ impl AssistantConfiguration {
|
||||
.child(Label::new(context_server.id()))
|
||||
.child(
|
||||
Label::new(format!("{tool_count} tools"))
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small),
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
.child(h_flex().child(
|
||||
Switch::new("context-server-switch", is_running.into()).on_click({
|
||||
let context_server_manager =
|
||||
self.context_server_manager.clone();
|
||||
@@ -340,7 +334,7 @@ impl AssistantConfiguration {
|
||||
}
|
||||
}
|
||||
}),
|
||||
),
|
||||
)),
|
||||
)
|
||||
.map(|parent| {
|
||||
if !are_tools_expanded {
|
||||
@@ -350,29 +344,14 @@ impl AssistantConfiguration {
|
||||
parent.child(v_flex().children(tools.into_iter().enumerate().map(
|
||||
|(ix, tool)| {
|
||||
h_flex()
|
||||
.id("tool-item")
|
||||
.pl_2()
|
||||
.pr_1()
|
||||
.px_2()
|
||||
.py_1()
|
||||
.gap_2()
|
||||
.justify_between()
|
||||
.when(ix < tool_count - 1, |element| {
|
||||
element
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
.border_color(cx.theme().colors().border)
|
||||
})
|
||||
.child(
|
||||
Label::new(tool.name())
|
||||
.buffer_font(cx)
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
.child(
|
||||
IconButton::new(("tool-description", ix), IconName::Info)
|
||||
.shape(ui::IconButtonShape::Square)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Ignored)
|
||||
.tooltip(Tooltip::text(tool.description())),
|
||||
)
|
||||
.child(Label::new(tool.name()))
|
||||
},
|
||||
)))
|
||||
})
|
||||
@@ -383,7 +362,7 @@ impl AssistantConfiguration {
|
||||
.gap_2()
|
||||
.child(
|
||||
h_flex().w_full().child(
|
||||
Button::new("add-context-server", "Add MCPs Directly")
|
||||
Button::new("add-context-server", "Add Context Server")
|
||||
.style(ButtonStyle::Filled)
|
||||
.layer(ElevationIndex::ModalSurface)
|
||||
.full_width()
|
||||
@@ -399,7 +378,7 @@ impl AssistantConfiguration {
|
||||
h_flex().w_full().child(
|
||||
Button::new(
|
||||
"install-context-server-extensions",
|
||||
"Install MCP Extensions",
|
||||
"Install Context Server Extensions",
|
||||
)
|
||||
.style(ButtonStyle::Filled)
|
||||
.layer(ElevationIndex::ModalSurface)
|
||||
@@ -430,7 +409,6 @@ impl Render for AssistantConfiguration {
|
||||
|
||||
v_flex()
|
||||
.id("assistant-configuration")
|
||||
.key_context("AgentConfiguration")
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.bg(cx.theme().colors().panel_background)
|
||||
.size_full()
|
||||
|
||||
@@ -84,7 +84,7 @@ pub struct NewProfileMode {
|
||||
|
||||
pub struct ManageProfilesModal {
|
||||
fs: Arc<dyn Fs>,
|
||||
tools: Entity<ToolWorkingSet>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
thread_store: WeakEntity<ThreadStore>,
|
||||
focus_handle: FocusHandle,
|
||||
mode: Mode,
|
||||
@@ -117,7 +117,7 @@ impl ManageProfilesModal {
|
||||
|
||||
pub fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
tools: Entity<ToolWorkingSet>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
thread_store: WeakEntity<ThreadStore>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
|
||||
@@ -60,7 +60,7 @@ pub struct ToolPickerDelegate {
|
||||
impl ToolPickerDelegate {
|
||||
pub fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
tool_set: Entity<ToolWorkingSet>,
|
||||
tool_set: Arc<ToolWorkingSet>,
|
||||
thread_store: WeakEntity<ThreadStore>,
|
||||
profile_id: AgentProfileId,
|
||||
profile: AgentProfile,
|
||||
@@ -68,7 +68,7 @@ impl ToolPickerDelegate {
|
||||
) -> Self {
|
||||
let mut tool_entries = Vec::new();
|
||||
|
||||
for (source, tools) in tool_set.read(cx).tools_by_source(cx) {
|
||||
for (source, tools) in tool_set.tools_by_source(cx) {
|
||||
tool_entries.extend(tools.into_iter().map(|tool| ToolEntry {
|
||||
name: tool.name().into(),
|
||||
source: source.clone(),
|
||||
@@ -192,7 +192,7 @@ impl PickerDelegate for ToolPickerDelegate {
|
||||
if active_profile_id == &self.profile_id {
|
||||
self.thread_store
|
||||
.update(cx, |this, cx| {
|
||||
this.load_profile(self.profile.clone(), cx);
|
||||
this.load_profile(&self.profile, cx);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
|
||||
@@ -80,16 +80,17 @@ impl AssistantModelSelector {
|
||||
|
||||
impl Render for AssistantModelSelector {
|
||||
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
|
||||
let model = match self.model_type {
|
||||
ModelType::Default => model_registry.default_model(),
|
||||
ModelType::InlineAssistant => model_registry.inline_assistant_model(),
|
||||
};
|
||||
let (model_name, model_icon) = match model {
|
||||
Some(model) => (model.model.name().0, Some(model.provider.icon())),
|
||||
_ => (SharedString::from("No model selected"), None),
|
||||
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
let model_name = match model {
|
||||
Some(model) => model.model.name().0,
|
||||
_ => SharedString::from("No model selected"),
|
||||
};
|
||||
|
||||
LanguageModelSelectorPopoverMenu::new(
|
||||
@@ -99,16 +100,10 @@ impl Render for AssistantModelSelector {
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_0p5()
|
||||
.children(
|
||||
model_icon.map(|icon| {
|
||||
Icon::new(icon).color(Color::Muted).size(IconSize::Small)
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
Label::new(model_name)
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted)
|
||||
.ml_1(),
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(
|
||||
Icon::new(IconName::ChevronDown)
|
||||
|
||||
@@ -44,8 +44,8 @@ use crate::thread::{Thread, ThreadError, ThreadId, TokenUsageRatio};
|
||||
use crate::thread_history::{PastContext, PastThread, ThreadHistory};
|
||||
use crate::thread_store::ThreadStore;
|
||||
use crate::{
|
||||
AgentDiff, ExpandMessageEditor, InlineAssistant, NewTextThread, NewThread,
|
||||
OpenActiveThreadAsMarkdown, OpenAgentDiff, OpenHistory, ThreadEvent, ToggleContextPicker,
|
||||
AgentDiff, InlineAssistant, NewPromptEditor, NewThread, OpenActiveThreadAsMarkdown,
|
||||
OpenAgentDiff, OpenHistory, ThreadEvent, ToggleContextPicker,
|
||||
};
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
@@ -70,7 +70,7 @@ pub fn init(cx: &mut App) {
|
||||
panel.update(cx, |panel, cx| panel.open_configuration(window, cx));
|
||||
}
|
||||
})
|
||||
.register_action(|workspace, _: &NewTextThread, window, cx| {
|
||||
.register_action(|workspace, _: &NewPromptEditor, window, cx| {
|
||||
if let Some(panel) = workspace.panel::<AssistantPanel>(cx) {
|
||||
workspace.focus_panel::<AssistantPanel>(window, cx);
|
||||
panel.update(cx, |panel, cx| panel.new_prompt_editor(window, cx));
|
||||
@@ -90,16 +90,6 @@ pub fn init(cx: &mut App) {
|
||||
let thread = panel.read(cx).thread.read(cx).thread().clone();
|
||||
AgentDiff::deploy_in_workspace(thread, workspace, window, cx);
|
||||
}
|
||||
})
|
||||
.register_action(|workspace, _: &ExpandMessageEditor, window, cx| {
|
||||
if let Some(panel) = workspace.panel::<AssistantPanel>(cx) {
|
||||
workspace.focus_panel::<AssistantPanel>(window, cx);
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.message_editor.update(cx, |editor, cx| {
|
||||
editor.expand_message_editor(&ExpandMessageEditor, window, cx);
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
},
|
||||
)
|
||||
@@ -188,7 +178,6 @@ pub struct AssistantPanel {
|
||||
configuration_subscription: Option<Subscription>,
|
||||
local_timezone: UtcOffset,
|
||||
active_view: ActiveView,
|
||||
previous_view: Option<ActiveView>,
|
||||
history_store: Entity<HistoryStore>,
|
||||
history: Entity<ThreadHistory>,
|
||||
assistant_dropdown_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
@@ -203,13 +192,11 @@ impl AssistantPanel {
|
||||
cx: AsyncWindowContext,
|
||||
) -> Task<Result<Entity<Self>>> {
|
||||
cx.spawn(async move |cx| {
|
||||
let tools = cx.new(|_| ToolWorkingSet::default())?;
|
||||
let thread_store = workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
let project = workspace.project().clone();
|
||||
ThreadStore::load(project, tools.clone(), prompt_builder.clone(), cx)
|
||||
})?
|
||||
.await;
|
||||
let tools = Arc::new(ToolWorkingSet::default());
|
||||
let thread_store = workspace.update(cx, |workspace, cx| {
|
||||
let project = workspace.project().clone();
|
||||
ThreadStore::new(project, tools.clone(), prompt_builder.clone(), cx)
|
||||
})??;
|
||||
|
||||
let slash_commands = Arc::new(SlashCommandWorkingSet::default());
|
||||
let context_store = workspace
|
||||
@@ -239,14 +226,14 @@ impl AssistantPanel {
|
||||
) -> Self {
|
||||
let thread = thread_store.update(cx, |this, cx| this.create_thread(cx));
|
||||
let fs = workspace.app_state().fs.clone();
|
||||
let project = workspace.project();
|
||||
let project = workspace.project().clone();
|
||||
let language_registry = project.read(cx).languages().clone();
|
||||
let workspace = workspace.weak_handle();
|
||||
let weak_self = cx.entity().downgrade();
|
||||
|
||||
let message_editor_context_store = cx.new(|_cx| {
|
||||
crate::context_store::ContextStore::new(
|
||||
project.downgrade(),
|
||||
workspace.clone(),
|
||||
Some(thread_store.downgrade()),
|
||||
)
|
||||
});
|
||||
@@ -305,7 +292,6 @@ impl AssistantPanel {
|
||||
chrono::Local::now().offset().local_minus_utc(),
|
||||
)
|
||||
.unwrap(),
|
||||
previous_view: None,
|
||||
history_store: history_store.clone(),
|
||||
history: cx.new(|cx| ThreadHistory::new(weak_self, history_store, window, cx)),
|
||||
assistant_dropdown_menu_handle: PopoverMenuHandle::default(),
|
||||
@@ -351,12 +337,11 @@ impl AssistantPanel {
|
||||
.thread_store
|
||||
.update(cx, |this, cx| this.create_thread(cx));
|
||||
|
||||
let thread_view = ActiveView::thread(thread.clone(), window, cx);
|
||||
self.set_active_view(thread_view, window, cx);
|
||||
self.active_view = ActiveView::thread(thread.clone(), window, cx);
|
||||
|
||||
let message_editor_context_store = cx.new(|_cx| {
|
||||
crate::context_store::ContextStore::new(
|
||||
self.project.downgrade(),
|
||||
self.workspace.clone(),
|
||||
Some(self.thread_store.downgrade()),
|
||||
)
|
||||
});
|
||||
@@ -415,7 +400,7 @@ impl AssistantPanel {
|
||||
}
|
||||
|
||||
fn new_prompt_editor(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.set_active_view(ActiveView::PromptEditor, window, cx);
|
||||
self.active_view = ActiveView::PromptEditor;
|
||||
|
||||
let context = self
|
||||
.context_store
|
||||
@@ -465,16 +450,11 @@ impl AssistantPanel {
|
||||
}
|
||||
|
||||
fn open_history(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if matches!(self.active_view, ActiveView::History) {
|
||||
if let Some(previous_view) = self.previous_view.take() {
|
||||
self.set_active_view(previous_view, window, cx);
|
||||
}
|
||||
} else {
|
||||
self.thread_store
|
||||
.update(cx, |thread_store, cx| thread_store.reload(cx))
|
||||
.detach_and_log_err(cx);
|
||||
self.set_active_view(ActiveView::History, window, cx);
|
||||
}
|
||||
self.thread_store
|
||||
.update(cx, |thread_store, cx| thread_store.reload(cx))
|
||||
.detach_and_log_err(cx);
|
||||
self.active_view = ActiveView::History;
|
||||
self.history.focus_handle(cx).focus(window);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
@@ -507,7 +487,7 @@ impl AssistantPanel {
|
||||
cx,
|
||||
)
|
||||
});
|
||||
this.set_active_view(ActiveView::PromptEditor, window, cx);
|
||||
this.active_view = ActiveView::PromptEditor;
|
||||
this.context_editor = Some(editor);
|
||||
|
||||
anyhow::Ok(())
|
||||
@@ -529,11 +509,10 @@ impl AssistantPanel {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let thread = open_thread_task.await?;
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
let thread_view = ActiveView::thread(thread.clone(), window, cx);
|
||||
this.set_active_view(thread_view, window, cx);
|
||||
this.active_view = ActiveView::thread(thread.clone(), window, cx);
|
||||
let message_editor_context_store = cx.new(|_cx| {
|
||||
crate::context_store::ContextStore::new(
|
||||
this.project.downgrade(),
|
||||
this.workspace.clone(),
|
||||
Some(this.thread_store.downgrade()),
|
||||
)
|
||||
});
|
||||
@@ -564,18 +543,6 @@ impl AssistantPanel {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn go_back(&mut self, _: &workspace::GoBack, window: &mut Window, cx: &mut Context<Self>) {
|
||||
match self.active_view {
|
||||
ActiveView::Configuration | ActiveView::History => {
|
||||
self.active_view =
|
||||
ActiveView::thread(self.thread.read(cx).thread().clone(), window, cx);
|
||||
self.message_editor.focus_handle(cx).focus(window);
|
||||
cx.notify();
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn open_agent_diff(
|
||||
&mut self,
|
||||
_: &OpenAgentDiff,
|
||||
@@ -595,7 +562,7 @@ impl AssistantPanel {
|
||||
let tools = self.thread_store.read(cx).tools();
|
||||
let fs = self.fs.clone();
|
||||
|
||||
self.set_active_view(ActiveView::Configuration, window, cx);
|
||||
self.active_view = ActiveView::Configuration;
|
||||
self.configuration =
|
||||
Some(cx.new(|cx| {
|
||||
AssistantConfiguration::new(fs, context_server_manager, tools, window, cx)
|
||||
@@ -723,29 +690,6 @@ impl AssistantPanel {
|
||||
self.context_store
|
||||
.update(cx, |this, cx| this.delete_local_context(path, cx))
|
||||
}
|
||||
|
||||
fn set_active_view(
|
||||
&mut self,
|
||||
new_view: ActiveView,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let current_is_history = matches!(self.active_view, ActiveView::History);
|
||||
let new_is_history = matches!(new_view, ActiveView::History);
|
||||
|
||||
if current_is_history && !new_is_history {
|
||||
self.active_view = new_view;
|
||||
} else if !current_is_history && new_is_history {
|
||||
self.previous_view = Some(std::mem::replace(&mut self.active_view, new_view));
|
||||
} else {
|
||||
if !new_is_history {
|
||||
self.previous_view = None;
|
||||
}
|
||||
self.active_view = new_view;
|
||||
}
|
||||
|
||||
self.focus_handle(cx).focus(window);
|
||||
}
|
||||
}
|
||||
|
||||
impl Focusable for AssistantPanel {
|
||||
@@ -868,19 +812,13 @@ impl AssistantPanel {
|
||||
if is_empty {
|
||||
Label::new(Thread::DEFAULT_SUMMARY.clone())
|
||||
.truncate()
|
||||
.ml_2()
|
||||
.into_any_element()
|
||||
} else if summary.is_none() {
|
||||
Label::new(LOADING_SUMMARY_PLACEHOLDER)
|
||||
.ml_2()
|
||||
.truncate()
|
||||
.into_any_element()
|
||||
} else {
|
||||
div()
|
||||
.ml_2()
|
||||
.w_full()
|
||||
.child(change_title_editor.clone())
|
||||
.into_any_element()
|
||||
change_title_editor.clone().into_any_element()
|
||||
}
|
||||
}
|
||||
ActiveView::PromptEditor => {
|
||||
@@ -892,7 +830,7 @@ impl AssistantPanel {
|
||||
})
|
||||
.unwrap_or_else(|| SharedString::from(LOADING_SUMMARY_PLACEHOLDER));
|
||||
|
||||
Label::new(title).ml_2().truncate().into_any_element()
|
||||
Label::new(title).truncate().into_any_element()
|
||||
}
|
||||
ActiveView::History => Label::new("History").truncate().into_any_element(),
|
||||
ActiveView::Configuration => Label::new("Settings").truncate().into_any_element(),
|
||||
@@ -901,6 +839,7 @@ impl AssistantPanel {
|
||||
h_flex()
|
||||
.key_context("TitleEditor")
|
||||
.id("TitleEditor")
|
||||
.pl_2()
|
||||
.flex_grow()
|
||||
.w_full()
|
||||
.max_w_full()
|
||||
@@ -919,39 +858,12 @@ impl AssistantPanel {
|
||||
let is_empty = active_thread.is_empty();
|
||||
let focus_handle = self.focus_handle(cx);
|
||||
|
||||
let is_history = matches!(self.active_view, ActiveView::History);
|
||||
|
||||
let show_token_count = match &self.active_view {
|
||||
ActiveView::Thread { .. } => !is_empty,
|
||||
ActiveView::PromptEditor => self.context_editor.is_some(),
|
||||
_ => false,
|
||||
};
|
||||
|
||||
let go_back_button = match &self.active_view {
|
||||
ActiveView::History | ActiveView::Configuration => Some(
|
||||
div().pl_1().child(
|
||||
IconButton::new("go-back", IconName::ArrowLeft)
|
||||
.icon_size(IconSize::Small)
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.go_back(&workspace::GoBack, window, cx);
|
||||
}))
|
||||
.tooltip({
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |window, cx| {
|
||||
Tooltip::for_action_in(
|
||||
"Go Back",
|
||||
&workspace::GoBack,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
}),
|
||||
),
|
||||
),
|
||||
_ => None,
|
||||
};
|
||||
|
||||
h_flex()
|
||||
.id("assistant-toolbar")
|
||||
.h(Tab::container_height(cx))
|
||||
@@ -962,13 +874,7 @@ impl AssistantPanel {
|
||||
.bg(cx.theme().colors().tab_bar_background)
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.gap_1()
|
||||
.children(go_back_button)
|
||||
.child(self.render_title_view(window, cx)),
|
||||
)
|
||||
.child(self.render_title_view(window, cx))
|
||||
.child(
|
||||
h_flex()
|
||||
.h_full()
|
||||
@@ -1063,27 +969,6 @@ impl AssistantPanel {
|
||||
);
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
IconButton::new("open-history", IconName::HistoryRerun)
|
||||
.icon_size(IconSize::Small)
|
||||
.toggle_state(is_history)
|
||||
.selected_icon_color(Color::Accent)
|
||||
.tooltip({
|
||||
let focus_handle = self.focus_handle(cx);
|
||||
move |window, cx| {
|
||||
Tooltip::for_action_in(
|
||||
"History",
|
||||
&OpenHistory,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
})
|
||||
.on_click(move |_event, window, cx| {
|
||||
window.dispatch_action(OpenHistory.boxed_clone(), cx);
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
PopoverMenu::new("assistant-menu")
|
||||
.trigger_with_tooltip(
|
||||
@@ -1099,30 +984,27 @@ impl AssistantPanel {
|
||||
window,
|
||||
cx,
|
||||
|menu, _window, _cx| {
|
||||
menu
|
||||
.when(!is_empty, |menu| {
|
||||
menu.action(
|
||||
"Start New From Summary",
|
||||
Box::new(NewThread {
|
||||
from_thread_id: Some(thread_id.clone()),
|
||||
}),
|
||||
).separator()
|
||||
})
|
||||
.action(
|
||||
"New Text Thread",
|
||||
NewTextThread.boxed_clone(),
|
||||
menu.action(
|
||||
"New Thread",
|
||||
Box::new(NewThread {
|
||||
from_thread_id: None,
|
||||
}),
|
||||
)
|
||||
.action("Settings", OpenConfiguration.boxed_clone())
|
||||
.separator()
|
||||
.action(
|
||||
"Install MCPs",
|
||||
zed_actions::Extensions {
|
||||
category_filter: Some(
|
||||
zed_actions::ExtensionCategoryFilter::ContextServers,
|
||||
),
|
||||
}
|
||||
.boxed_clone(),
|
||||
"New Prompt Editor",
|
||||
NewPromptEditor.boxed_clone(),
|
||||
)
|
||||
.when(!is_empty, |menu| {
|
||||
menu.action(
|
||||
"Continue in New Thread",
|
||||
Box::new(NewThread {
|
||||
from_thread_id: Some(thread_id.clone()),
|
||||
}),
|
||||
)
|
||||
})
|
||||
.separator()
|
||||
.action("History", OpenHistory.boxed_clone())
|
||||
.action("Settings", OpenConfiguration.boxed_clone())
|
||||
},
|
||||
))
|
||||
}),
|
||||
@@ -1319,7 +1201,6 @@ impl AssistantPanel {
|
||||
let configuration_error_ref = &configuration_error;
|
||||
|
||||
parent
|
||||
.overflow_hidden()
|
||||
.p_1p5()
|
||||
.justify_end()
|
||||
.gap_1()
|
||||
@@ -1619,7 +1500,6 @@ impl Render for AssistantPanel {
|
||||
.on_action(cx.listener(Self::open_active_thread_as_markdown))
|
||||
.on_action(cx.listener(Self::deploy_prompt_library))
|
||||
.on_action(cx.listener(Self::open_agent_diff))
|
||||
.on_action(cx.listener(Self::go_back))
|
||||
.child(self.render_toolbar(window, cx))
|
||||
.map(|parent| match self.active_view {
|
||||
ActiveView::Thread { .. } => parent
|
||||
@@ -1652,21 +1532,7 @@ impl prompt_library::InlineAssistDelegate for PromptLibraryInlineAssist {
|
||||
cx: &mut Context<PromptLibrary>,
|
||||
) {
|
||||
InlineAssistant::update_global(cx, |assistant, cx| {
|
||||
let Some(project) = self
|
||||
.workspace
|
||||
.upgrade()
|
||||
.map(|workspace| workspace.read(cx).project().downgrade())
|
||||
else {
|
||||
return;
|
||||
};
|
||||
assistant.assist(
|
||||
&prompt_editor,
|
||||
self.workspace.clone(),
|
||||
project,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
assistant.assist(&prompt_editor, self.workspace.clone(), None, window, cx)
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -28,7 +28,7 @@ use std::{
|
||||
time::Instant,
|
||||
};
|
||||
use streaming_diff::{CharOperation, LineDiff, LineOperation, StreamingDiff};
|
||||
use telemetry_events::{AssistantEventData, AssistantKind, AssistantPhase};
|
||||
use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
|
||||
|
||||
pub struct BufferCodegen {
|
||||
alternatives: Vec<Entity<CodegenAlternative>>,
|
||||
@@ -601,7 +601,7 @@ impl CodegenAlternative {
|
||||
|
||||
let error_message = result.as_ref().err().map(|error| error.to_string());
|
||||
report_assistant_event(
|
||||
AssistantEventData {
|
||||
AssistantEvent {
|
||||
conversation_id: None,
|
||||
message_id,
|
||||
kind: AssistantKind::Inline,
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use std::{ops::Range, path::Path, sync::Arc};
|
||||
use std::{ops::Range, sync::Arc};
|
||||
|
||||
use gpui::{App, Entity, SharedString};
|
||||
use language::{Buffer, File};
|
||||
use language_model::LanguageModelRequestMessage;
|
||||
use project::{ProjectPath, Worktree};
|
||||
use project::ProjectPath;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use text::{Anchor, BufferId};
|
||||
use ui::IconName;
|
||||
@@ -69,21 +69,10 @@ pub struct FileContext {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DirectoryContext {
|
||||
pub id: ContextId,
|
||||
pub worktree: Entity<Worktree>,
|
||||
pub path: Arc<Path>,
|
||||
/// Buffers of the files within the directory.
|
||||
pub project_path: ProjectPath,
|
||||
pub context_buffers: Vec<ContextBuffer>,
|
||||
}
|
||||
|
||||
impl DirectoryContext {
|
||||
pub fn project_path(&self, cx: &App) -> ProjectPath {
|
||||
ProjectPath {
|
||||
worktree_id: self.worktree.read(cx).id(),
|
||||
path: self.path.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SymbolContext {
|
||||
pub id: ContextId,
|
||||
@@ -97,11 +86,12 @@ pub struct FetchedUrlContext {
|
||||
pub text: SharedString,
|
||||
}
|
||||
|
||||
// TODO: Model<Thread> holds onto the thread even if the thread is deleted. Can either handle this
|
||||
// explicitly or have a WeakModel<Thread> and remove during snapshot.
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ThreadContext {
|
||||
pub id: ContextId,
|
||||
// TODO: Entity<Thread> holds onto the thread even if the thread is deleted. Should probably be
|
||||
// a WeakEntity and handle removal from the UI when it has dropped.
|
||||
pub thread: Entity<Thread>,
|
||||
pub text: SharedString,
|
||||
}
|
||||
@@ -115,11 +105,12 @@ impl ThreadContext {
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Model<Buffer> holds onto the buffer even if the file is deleted and closed. Should remove
|
||||
// the context from the message editor in this case.
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ContextBuffer {
|
||||
pub id: BufferId,
|
||||
// TODO: Entity<Buffer> holds onto the thread even if the thread is deleted. Should probably be
|
||||
// a WeakEntity and handle removal from the UI when it has dropped.
|
||||
pub buffer: Entity<Buffer>,
|
||||
pub file: Arc<dyn File>,
|
||||
pub version: clock::Global,
|
||||
|
||||
@@ -34,6 +34,12 @@ use crate::context_store::ContextStore;
|
||||
use crate::thread::ThreadId;
|
||||
use crate::thread_store::ThreadStore;
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum ConfirmBehavior {
|
||||
KeepOpen,
|
||||
Close,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
enum ContextPickerMode {
|
||||
File,
|
||||
@@ -99,6 +105,7 @@ pub(super) struct ContextPicker {
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
thread_store: Option<WeakEntity<ThreadStore>>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
|
||||
@@ -107,6 +114,7 @@ impl ContextPicker {
|
||||
workspace: WeakEntity<Workspace>,
|
||||
thread_store: Option<WeakEntity<ThreadStore>>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
@@ -135,6 +143,7 @@ impl ContextPicker {
|
||||
workspace,
|
||||
context_store,
|
||||
thread_store,
|
||||
confirm_behavior,
|
||||
_subscriptions: subscriptions,
|
||||
}
|
||||
}
|
||||
@@ -157,32 +166,37 @@ impl ContextPicker {
|
||||
|
||||
let modes = supported_context_picker_modes(&self.thread_store);
|
||||
|
||||
menu.when(has_recent, |menu| {
|
||||
menu.custom_row(|_, _| {
|
||||
div()
|
||||
.mb_1()
|
||||
.child(
|
||||
Label::new("Recent")
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
.into_any_element()
|
||||
})
|
||||
})
|
||||
.extend(recent_entries)
|
||||
.when(has_recent, |menu| menu.separator())
|
||||
.extend(modes.into_iter().map(|mode| {
|
||||
let context_picker = context_picker.clone();
|
||||
|
||||
ContextMenuEntry::new(mode.label())
|
||||
.icon(mode.icon())
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.handler(move |window, cx| {
|
||||
context_picker.update(cx, |this, cx| this.select_mode(mode, window, cx))
|
||||
let menu = menu
|
||||
.when(has_recent, |menu| {
|
||||
menu.custom_row(|_, _| {
|
||||
div()
|
||||
.mb_1()
|
||||
.child(
|
||||
Label::new("Recent")
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
.into_any_element()
|
||||
})
|
||||
}))
|
||||
.keep_open_on_confirm()
|
||||
})
|
||||
.extend(recent_entries)
|
||||
.when(has_recent, |menu| menu.separator())
|
||||
.extend(modes.into_iter().map(|mode| {
|
||||
let context_picker = context_picker.clone();
|
||||
|
||||
ContextMenuEntry::new(mode.label())
|
||||
.icon(mode.icon())
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.handler(move |window, cx| {
|
||||
context_picker.update(cx, |this, cx| this.select_mode(mode, window, cx))
|
||||
})
|
||||
}));
|
||||
|
||||
match self.confirm_behavior {
|
||||
ConfirmBehavior::KeepOpen => menu.keep_open_on_confirm(),
|
||||
ConfirmBehavior::Close => menu,
|
||||
}
|
||||
});
|
||||
|
||||
cx.subscribe(&menu, move |_, _, _: &DismissEvent, cx| {
|
||||
@@ -213,6 +227,7 @@ impl ContextPicker {
|
||||
context_picker.clone(),
|
||||
self.workspace.clone(),
|
||||
self.context_store.clone(),
|
||||
self.confirm_behavior,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -224,6 +239,7 @@ impl ContextPicker {
|
||||
context_picker.clone(),
|
||||
self.workspace.clone(),
|
||||
self.context_store.clone(),
|
||||
self.confirm_behavior,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -235,6 +251,7 @@ impl ContextPicker {
|
||||
context_picker.clone(),
|
||||
self.workspace.clone(),
|
||||
self.context_store.clone(),
|
||||
self.confirm_behavior,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -247,6 +264,7 @@ impl ContextPicker {
|
||||
thread_store.clone(),
|
||||
context_picker.clone(),
|
||||
self.context_store.clone(),
|
||||
self.confirm_behavior,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -271,14 +289,12 @@ impl ContextPicker {
|
||||
path_prefix,
|
||||
} => {
|
||||
let context_store = self.context_store.clone();
|
||||
let worktree_id = project_path.worktree_id;
|
||||
let path = project_path.path.clone();
|
||||
|
||||
ContextMenuItem::custom_entry(
|
||||
move |_window, cx| {
|
||||
render_file_context_entry(
|
||||
ElementId::NamedInteger("ctx-recent".into(), ix),
|
||||
worktree_id,
|
||||
&path,
|
||||
&path_prefix,
|
||||
false,
|
||||
@@ -450,7 +466,7 @@ fn recent_context_picker_entries(
|
||||
recent.extend(
|
||||
workspace
|
||||
.recent_navigation_history_iter(cx)
|
||||
.filter(|(path, _)| !current_files.contains(path))
|
||||
.filter(|(path, _)| !current_files.contains(&path.path.to_path_buf()))
|
||||
.take(4)
|
||||
.filter_map(|(project_path, _)| {
|
||||
project
|
||||
@@ -491,13 +507,14 @@ fn recent_context_picker_entries(
|
||||
recent
|
||||
}
|
||||
|
||||
pub(crate) fn insert_fold_for_mention(
|
||||
pub(crate) fn insert_crease_for_mention(
|
||||
excerpt_id: ExcerptId,
|
||||
crease_start: text::Anchor,
|
||||
content_len: usize,
|
||||
crease_label: SharedString,
|
||||
crease_icon_path: SharedString,
|
||||
editor_entity: Entity<Editor>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) {
|
||||
editor_entity.update(cx, |editor, cx| {
|
||||
@@ -516,7 +533,6 @@ pub(crate) fn insert_fold_for_mention(
|
||||
crease_label,
|
||||
editor_entity.downgrade(),
|
||||
),
|
||||
merge_adjacent: false,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
@@ -530,9 +546,8 @@ pub(crate) fn insert_fold_for_mention(
|
||||
render_trailer,
|
||||
);
|
||||
|
||||
editor.display_map.update(cx, |display_map, cx| {
|
||||
display_map.fold(vec![crease], cx);
|
||||
});
|
||||
editor.insert_creases(vec![crease.clone()], cx);
|
||||
editor.fold_creases(vec![crease], false, window, cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -589,13 +604,12 @@ fn render_fold_icon_button(
|
||||
.gap_1()
|
||||
.child(
|
||||
Icon::from_path(icon_path.clone())
|
||||
.size(IconSize::XSmall)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(
|
||||
Label::new(label.clone())
|
||||
.size(LabelSize::Small)
|
||||
.buffer_font(cx)
|
||||
.single_line(),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -18,133 +18,16 @@ use text::{Anchor, ToPoint};
|
||||
use ui::prelude::*;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::context_picker::file_context_picker::search_files;
|
||||
use crate::context_picker::symbol_context_picker::search_symbols;
|
||||
use crate::context::AssistantContext;
|
||||
use crate::context_store::ContextStore;
|
||||
use crate::thread_store::ThreadStore;
|
||||
|
||||
use super::fetch_context_picker::fetch_url_content;
|
||||
use super::file_context_picker::FileMatch;
|
||||
use super::symbol_context_picker::SymbolMatch;
|
||||
use super::thread_context_picker::{ThreadContextEntry, ThreadMatch, search_threads};
|
||||
use super::thread_context_picker::ThreadContextEntry;
|
||||
use super::{
|
||||
ContextPickerMode, MentionLink, RecentEntry, recent_context_picker_entries,
|
||||
supported_context_picker_modes,
|
||||
ContextPickerMode, MentionLink, recent_context_picker_entries, supported_context_picker_modes,
|
||||
};
|
||||
|
||||
pub(crate) enum Match {
|
||||
Symbol(SymbolMatch),
|
||||
File(FileMatch),
|
||||
Thread(ThreadMatch),
|
||||
Fetch(SharedString),
|
||||
Mode(ContextPickerMode),
|
||||
}
|
||||
|
||||
fn search(
|
||||
mode: Option<ContextPickerMode>,
|
||||
query: String,
|
||||
cancellation_flag: Arc<AtomicBool>,
|
||||
recent_entries: Vec<RecentEntry>,
|
||||
thread_store: Option<WeakEntity<ThreadStore>>,
|
||||
workspace: Entity<Workspace>,
|
||||
cx: &mut App,
|
||||
) -> Task<Vec<Match>> {
|
||||
match mode {
|
||||
Some(ContextPickerMode::File) => {
|
||||
let search_files_task =
|
||||
search_files(query.clone(), cancellation_flag.clone(), &workspace, cx);
|
||||
cx.background_spawn(async move {
|
||||
search_files_task
|
||||
.await
|
||||
.into_iter()
|
||||
.map(Match::File)
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
Some(ContextPickerMode::Symbol) => {
|
||||
let search_symbols_task =
|
||||
search_symbols(query.clone(), cancellation_flag.clone(), &workspace, cx);
|
||||
cx.background_spawn(async move {
|
||||
search_symbols_task
|
||||
.await
|
||||
.into_iter()
|
||||
.map(Match::Symbol)
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
Some(ContextPickerMode::Thread) => {
|
||||
if let Some(thread_store) = thread_store.as_ref().and_then(|t| t.upgrade()) {
|
||||
let search_threads_task =
|
||||
search_threads(query.clone(), cancellation_flag.clone(), thread_store, cx);
|
||||
cx.background_spawn(async move {
|
||||
search_threads_task
|
||||
.await
|
||||
.into_iter()
|
||||
.map(Match::Thread)
|
||||
.collect()
|
||||
})
|
||||
} else {
|
||||
Task::ready(Vec::new())
|
||||
}
|
||||
}
|
||||
Some(ContextPickerMode::Fetch) => {
|
||||
if !query.is_empty() {
|
||||
Task::ready(vec![Match::Fetch(query.into())])
|
||||
} else {
|
||||
Task::ready(Vec::new())
|
||||
}
|
||||
}
|
||||
None => {
|
||||
if query.is_empty() {
|
||||
let mut matches = recent_entries
|
||||
.into_iter()
|
||||
.map(|entry| match entry {
|
||||
super::RecentEntry::File {
|
||||
project_path,
|
||||
path_prefix,
|
||||
} => Match::File(FileMatch {
|
||||
mat: fuzzy::PathMatch {
|
||||
score: 1.,
|
||||
positions: Vec::new(),
|
||||
worktree_id: project_path.worktree_id.to_usize(),
|
||||
path: project_path.path,
|
||||
path_prefix,
|
||||
is_dir: false,
|
||||
distance_to_relative_ancestor: 0,
|
||||
},
|
||||
is_recent: true,
|
||||
}),
|
||||
super::RecentEntry::Thread(thread_context_entry) => {
|
||||
Match::Thread(ThreadMatch {
|
||||
thread: thread_context_entry,
|
||||
is_recent: true,
|
||||
})
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
matches.extend(
|
||||
supported_context_picker_modes(&thread_store)
|
||||
.into_iter()
|
||||
.map(Match::Mode),
|
||||
);
|
||||
|
||||
Task::ready(matches)
|
||||
} else {
|
||||
let search_files_task =
|
||||
search_files(query.clone(), cancellation_flag.clone(), &workspace, cx);
|
||||
cx.background_spawn(async move {
|
||||
search_files_task
|
||||
.await
|
||||
.into_iter()
|
||||
.map(Match::File)
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ContextPickerCompletionProvider {
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
@@ -167,20 +50,96 @@ impl ContextPickerCompletionProvider {
|
||||
}
|
||||
}
|
||||
|
||||
fn completion_for_mode(source_range: Range<Anchor>, mode: ContextPickerMode) -> Completion {
|
||||
Completion {
|
||||
replace_range: source_range.clone(),
|
||||
new_text: format!("@{} ", mode.mention_prefix()),
|
||||
label: CodeLabel::plain(mode.label().to_string(), None),
|
||||
icon_path: Some(mode.icon().path().into()),
|
||||
documentation: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
insert_text_mode: None,
|
||||
// This ensures that when a user accepts this completion, the
|
||||
// completion menu will still be shown after "@category " is
|
||||
// inserted
|
||||
confirm: Some(Arc::new(|_, _, _| true)),
|
||||
fn default_completions(
|
||||
excerpt_id: ExcerptId,
|
||||
source_range: Range<Anchor>,
|
||||
context_store: Entity<ContextStore>,
|
||||
thread_store: Option<WeakEntity<ThreadStore>>,
|
||||
editor: Entity<Editor>,
|
||||
workspace: Entity<Workspace>,
|
||||
cx: &App,
|
||||
) -> Vec<Completion> {
|
||||
let mut completions = Vec::new();
|
||||
|
||||
completions.extend(
|
||||
recent_context_picker_entries(
|
||||
context_store.clone(),
|
||||
thread_store.clone(),
|
||||
workspace.clone(),
|
||||
cx,
|
||||
)
|
||||
.iter()
|
||||
.filter_map(|entry| match entry {
|
||||
super::RecentEntry::File {
|
||||
project_path,
|
||||
path_prefix,
|
||||
} => Some(Self::completion_for_path(
|
||||
project_path.clone(),
|
||||
path_prefix,
|
||||
true,
|
||||
false,
|
||||
excerpt_id,
|
||||
source_range.clone(),
|
||||
editor.clone(),
|
||||
context_store.clone(),
|
||||
cx,
|
||||
)),
|
||||
super::RecentEntry::Thread(thread_context_entry) => {
|
||||
let thread_store = thread_store
|
||||
.as_ref()
|
||||
.and_then(|thread_store| thread_store.upgrade())?;
|
||||
Some(Self::completion_for_thread(
|
||||
thread_context_entry.clone(),
|
||||
excerpt_id,
|
||||
source_range.clone(),
|
||||
true,
|
||||
editor.clone(),
|
||||
context_store.clone(),
|
||||
thread_store,
|
||||
))
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
completions.extend(
|
||||
supported_context_picker_modes(&thread_store)
|
||||
.iter()
|
||||
.map(|mode| {
|
||||
Completion {
|
||||
old_range: source_range.clone(),
|
||||
new_text: format!("@{} ", mode.mention_prefix()),
|
||||
label: CodeLabel::plain(mode.label().to_string(), None),
|
||||
icon_path: Some(mode.icon().path().into()),
|
||||
documentation: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
// This ensures that when a user accepts this completion, the
|
||||
// completion menu will still be shown after "@category " is
|
||||
// inserted
|
||||
confirm: Some(Arc::new(|_, _, _| true)),
|
||||
}
|
||||
}),
|
||||
);
|
||||
completions
|
||||
}
|
||||
|
||||
fn build_code_label_for_full_path(
|
||||
file_name: &str,
|
||||
directory: Option<&str>,
|
||||
cx: &App,
|
||||
) -> CodeLabel {
|
||||
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
|
||||
let mut label = CodeLabel::default();
|
||||
|
||||
label.push_str(&file_name, None);
|
||||
label.push_str(" ", None);
|
||||
|
||||
if let Some(directory) = directory {
|
||||
label.push_str(&directory, comment_id);
|
||||
}
|
||||
|
||||
label.filter_range = 0..label.text().len();
|
||||
|
||||
label
|
||||
}
|
||||
|
||||
fn completion_for_thread(
|
||||
@@ -200,11 +159,10 @@ impl ContextPickerCompletionProvider {
|
||||
let new_text = MentionLink::for_thread(&thread_entry);
|
||||
let new_text_len = new_text.len();
|
||||
Completion {
|
||||
replace_range: source_range.clone(),
|
||||
old_range: source_range.clone(),
|
||||
new_text,
|
||||
label: CodeLabel::plain(thread_entry.summary.to_string(), None),
|
||||
documentation: None,
|
||||
insert_text_mode: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
icon_path: Some(icon_for_completion.path().into()),
|
||||
confirm: Some(confirm_completion_callback(
|
||||
@@ -245,13 +203,12 @@ impl ContextPickerCompletionProvider {
|
||||
let new_text = MentionLink::for_fetch(&url_to_fetch);
|
||||
let new_text_len = new_text.len();
|
||||
Completion {
|
||||
replace_range: source_range.clone(),
|
||||
old_range: source_range.clone(),
|
||||
new_text,
|
||||
label: CodeLabel::plain(url_to_fetch.to_string(), None),
|
||||
documentation: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
icon_path: Some(IconName::Globe.path().into()),
|
||||
insert_text_mode: None,
|
||||
confirm: Some(confirm_completion_callback(
|
||||
IconName::Globe.path().into(),
|
||||
url_to_fetch.clone(),
|
||||
@@ -301,8 +258,11 @@ impl ContextPickerCompletionProvider {
|
||||
path_prefix,
|
||||
);
|
||||
|
||||
let label =
|
||||
build_code_label_for_full_path(&file_name, directory.as_ref().map(|s| s.as_ref()), cx);
|
||||
let label = Self::build_code_label_for_full_path(
|
||||
&file_name,
|
||||
directory.as_ref().map(|s| s.as_ref()),
|
||||
cx,
|
||||
);
|
||||
let full_path = if let Some(directory) = directory {
|
||||
format!("{}{}", directory, file_name)
|
||||
} else {
|
||||
@@ -324,13 +284,12 @@ impl ContextPickerCompletionProvider {
|
||||
let new_text = MentionLink::for_file(&file_name, &full_path);
|
||||
let new_text_len = new_text.len();
|
||||
Completion {
|
||||
replace_range: source_range.clone(),
|
||||
old_range: source_range.clone(),
|
||||
new_text,
|
||||
label,
|
||||
documentation: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
icon_path: Some(completion_icon_path),
|
||||
insert_text_mode: None,
|
||||
confirm: Some(confirm_completion_callback(
|
||||
crease_icon_path,
|
||||
file_name,
|
||||
@@ -387,13 +346,12 @@ impl ContextPickerCompletionProvider {
|
||||
let new_text = MentionLink::for_symbol(&symbol.name, &full_path);
|
||||
let new_text_len = new_text.len();
|
||||
Some(Completion {
|
||||
replace_range: source_range.clone(),
|
||||
old_range: source_range.clone(),
|
||||
new_text,
|
||||
label,
|
||||
documentation: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
icon_path: Some(IconName::Code.path().into()),
|
||||
insert_text_mode: None,
|
||||
confirm: Some(confirm_completion_callback(
|
||||
IconName::Code.path().into(),
|
||||
symbol.name.clone().into(),
|
||||
@@ -419,22 +377,6 @@ impl ContextPickerCompletionProvider {
|
||||
}
|
||||
}
|
||||
|
||||
fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx: &App) -> CodeLabel {
|
||||
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
|
||||
let mut label = CodeLabel::default();
|
||||
|
||||
label.push_str(&file_name, None);
|
||||
label.push_str(" ", None);
|
||||
|
||||
if let Some(directory) = directory {
|
||||
label.push_str(&directory, comment_id);
|
||||
}
|
||||
|
||||
label.filter_range = 0..label.text().len();
|
||||
|
||||
label
|
||||
}
|
||||
|
||||
impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
fn completions(
|
||||
&self,
|
||||
@@ -457,9 +399,10 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
return Task::ready(Ok(None));
|
||||
};
|
||||
|
||||
let Some((workspace, context_store)) =
|
||||
self.workspace.upgrade().zip(self.context_store.upgrade())
|
||||
else {
|
||||
let Some(workspace) = self.workspace.upgrade() else {
|
||||
return Task::ready(Ok(None));
|
||||
};
|
||||
let Some(context_store) = self.context_store.upgrade() else {
|
||||
return Task::ready(Ok(None));
|
||||
};
|
||||
|
||||
@@ -471,89 +414,154 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
let editor = self.editor.clone();
|
||||
let http_client = workspace.read(cx).client().http_client().clone();
|
||||
|
||||
let MentionCompletion { mode, argument, .. } = state;
|
||||
let query = argument.unwrap_or_else(|| "".to_string());
|
||||
|
||||
let recent_entries = recent_context_picker_entries(
|
||||
context_store.clone(),
|
||||
thread_store.clone(),
|
||||
workspace.clone(),
|
||||
cx,
|
||||
);
|
||||
|
||||
let search_task = search(
|
||||
mode,
|
||||
query,
|
||||
Arc::<AtomicBool>::default(),
|
||||
recent_entries,
|
||||
thread_store.clone(),
|
||||
workspace.clone(),
|
||||
cx,
|
||||
);
|
||||
|
||||
cx.spawn(async move |_, cx| {
|
||||
let matches = search_task.await;
|
||||
let Some(editor) = editor.upgrade() else {
|
||||
return Ok(None);
|
||||
};
|
||||
let mut completions = Vec::new();
|
||||
|
||||
Ok(Some(cx.update(|cx| {
|
||||
matches
|
||||
.into_iter()
|
||||
.filter_map(|mat| match mat {
|
||||
Match::File(FileMatch { mat, is_recent }) => {
|
||||
Some(Self::completion_for_path(
|
||||
ProjectPath {
|
||||
worktree_id: WorktreeId::from_usize(mat.worktree_id),
|
||||
path: mat.path.clone(),
|
||||
},
|
||||
&mat.path_prefix,
|
||||
is_recent,
|
||||
mat.is_dir,
|
||||
excerpt_id,
|
||||
source_range.clone(),
|
||||
editor.clone(),
|
||||
context_store.clone(),
|
||||
let MentionCompletion { mode, argument, .. } = state;
|
||||
|
||||
let query = argument.unwrap_or_else(|| "".to_string());
|
||||
match mode {
|
||||
Some(ContextPickerMode::File) => {
|
||||
let path_matches = cx
|
||||
.update(|cx| {
|
||||
super::file_context_picker::search_paths(
|
||||
query,
|
||||
Arc::<AtomicBool>::default(),
|
||||
&workspace,
|
||||
cx,
|
||||
))
|
||||
}
|
||||
Match::Symbol(SymbolMatch { symbol, .. }) => Self::completion_for_symbol(
|
||||
symbol,
|
||||
excerpt_id,
|
||||
source_range.clone(),
|
||||
editor.clone(),
|
||||
context_store.clone(),
|
||||
workspace.clone(),
|
||||
cx,
|
||||
),
|
||||
Match::Thread(ThreadMatch {
|
||||
thread, is_recent, ..
|
||||
}) => {
|
||||
let thread_store = thread_store.as_ref().and_then(|t| t.upgrade())?;
|
||||
Some(Self::completion_for_thread(
|
||||
thread,
|
||||
excerpt_id,
|
||||
)
|
||||
})?
|
||||
.await;
|
||||
|
||||
if let Some(editor) = editor.upgrade() {
|
||||
completions.reserve(path_matches.len());
|
||||
cx.update(|cx| {
|
||||
completions.extend(path_matches.iter().map(|mat| {
|
||||
Self::completion_for_path(
|
||||
ProjectPath {
|
||||
worktree_id: WorktreeId::from_usize(mat.worktree_id),
|
||||
path: mat.path.clone(),
|
||||
},
|
||||
&mat.path_prefix,
|
||||
false,
|
||||
mat.is_dir,
|
||||
excerpt_id,
|
||||
source_range.clone(),
|
||||
editor.clone(),
|
||||
context_store.clone(),
|
||||
cx,
|
||||
)
|
||||
}));
|
||||
})?;
|
||||
}
|
||||
}
|
||||
Some(ContextPickerMode::Symbol) => {
|
||||
if let Some(editor) = editor.upgrade() {
|
||||
let symbol_matches = cx
|
||||
.update(|cx| {
|
||||
super::symbol_context_picker::search_symbols(
|
||||
query,
|
||||
Arc::new(AtomicBool::default()),
|
||||
&workspace,
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await?;
|
||||
cx.update(|cx| {
|
||||
completions.extend(symbol_matches.into_iter().filter_map(
|
||||
|(_, symbol)| {
|
||||
Self::completion_for_symbol(
|
||||
symbol,
|
||||
excerpt_id,
|
||||
source_range.clone(),
|
||||
editor.clone(),
|
||||
context_store.clone(),
|
||||
workspace.clone(),
|
||||
cx,
|
||||
)
|
||||
},
|
||||
));
|
||||
})?;
|
||||
}
|
||||
}
|
||||
Some(ContextPickerMode::Fetch) => {
|
||||
if let Some(editor) = editor.upgrade() {
|
||||
if !query.is_empty() {
|
||||
completions.push(Self::completion_for_fetch(
|
||||
source_range.clone(),
|
||||
is_recent,
|
||||
query.into(),
|
||||
excerpt_id,
|
||||
editor.clone(),
|
||||
context_store.clone(),
|
||||
thread_store,
|
||||
))
|
||||
http_client.clone(),
|
||||
));
|
||||
}
|
||||
Match::Fetch(url) => Some(Self::completion_for_fetch(
|
||||
source_range.clone(),
|
||||
url,
|
||||
excerpt_id,
|
||||
editor.clone(),
|
||||
context_store.clone(),
|
||||
http_client.clone(),
|
||||
)),
|
||||
Match::Mode(mode) => {
|
||||
Some(Self::completion_for_mode(source_range.clone(), mode))
|
||||
|
||||
context_store.update(cx, |store, _| {
|
||||
let urls = store.context().iter().filter_map(|context| {
|
||||
if let AssistantContext::FetchedUrl(context) = context {
|
||||
Some(context.url.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
for url in urls {
|
||||
completions.push(Self::completion_for_fetch(
|
||||
source_range.clone(),
|
||||
url,
|
||||
excerpt_id,
|
||||
editor.clone(),
|
||||
context_store.clone(),
|
||||
http_client.clone(),
|
||||
));
|
||||
}
|
||||
})?;
|
||||
}
|
||||
}
|
||||
Some(ContextPickerMode::Thread) => {
|
||||
if let Some((thread_store, editor)) = thread_store
|
||||
.and_then(|thread_store| thread_store.upgrade())
|
||||
.zip(editor.upgrade())
|
||||
{
|
||||
let threads = cx
|
||||
.update(|cx| {
|
||||
super::thread_context_picker::search_threads(
|
||||
query,
|
||||
thread_store.clone(),
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await;
|
||||
for thread in threads {
|
||||
completions.push(Self::completion_for_thread(
|
||||
thread.clone(),
|
||||
excerpt_id,
|
||||
source_range.clone(),
|
||||
false,
|
||||
editor.clone(),
|
||||
context_store.clone(),
|
||||
thread_store.clone(),
|
||||
));
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
})?))
|
||||
}
|
||||
}
|
||||
None => {
|
||||
cx.update(|cx| {
|
||||
if let Some(editor) = editor.upgrade() {
|
||||
completions.extend(Self::default_completions(
|
||||
excerpt_id,
|
||||
source_range.clone(),
|
||||
context_store.clone(),
|
||||
thread_store.clone(),
|
||||
editor,
|
||||
workspace.clone(),
|
||||
cx,
|
||||
));
|
||||
}
|
||||
})?;
|
||||
}
|
||||
}
|
||||
Ok(Some(completions))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -610,20 +618,21 @@ fn confirm_completion_callback(
|
||||
editor: Entity<Editor>,
|
||||
add_context_fn: impl Fn(&mut App) -> () + Send + Sync + 'static,
|
||||
) -> Arc<dyn Fn(CompletionIntent, &mut Window, &mut App) -> bool + Send + Sync> {
|
||||
Arc::new(move |_, _, cx| {
|
||||
Arc::new(move |_, window, cx| {
|
||||
add_context_fn(cx);
|
||||
|
||||
let crease_text = crease_text.clone();
|
||||
let crease_icon_path = crease_icon_path.clone();
|
||||
let editor = editor.clone();
|
||||
cx.defer(move |cx| {
|
||||
crate::context_picker::insert_fold_for_mention(
|
||||
window.defer(cx, move |window, cx| {
|
||||
crate::context_picker::insert_crease_for_mention(
|
||||
excerpt_id,
|
||||
start,
|
||||
content_len,
|
||||
crease_text,
|
||||
crease_icon_path,
|
||||
editor,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
@@ -662,12 +671,7 @@ impl MentionCompletion {
|
||||
let mut end = last_mention_start + 1;
|
||||
if let Some(mode_text) = parts.next() {
|
||||
end += mode_text.len();
|
||||
|
||||
if let Some(parsed_mode) = ContextPickerMode::try_from(mode_text).ok() {
|
||||
mode = Some(parsed_mode);
|
||||
} else {
|
||||
argument = Some(mode_text.to_string());
|
||||
}
|
||||
mode = ContextPickerMode::try_from(mode_text).ok();
|
||||
match rest_of_line[mode_text.len()..].find(|c: char| !c.is_whitespace()) {
|
||||
Some(whitespace_count) => {
|
||||
if let Some(argument_text) = parts.next() {
|
||||
@@ -693,14 +697,13 @@ impl MentionCompletion {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use editor::AnchorRangeExt;
|
||||
use gpui::{EventEmitter, FocusHandle, Focusable, TestAppContext, VisualTestContext};
|
||||
use gpui::{Focusable, TestAppContext, VisualTestContext};
|
||||
use project::{Project, ProjectPath};
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::ops::Deref;
|
||||
use std::{ops::Deref, path::PathBuf};
|
||||
use util::{path, separator};
|
||||
use workspace::{AppState, Item};
|
||||
use workspace::AppState;
|
||||
|
||||
#[test]
|
||||
fn test_mention_completion_parse() {
|
||||
@@ -760,42 +763,9 @@ mod tests {
|
||||
})
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
MentionCompletion::try_parse("Lorem @main", 0),
|
||||
Some(MentionCompletion {
|
||||
source_range: 6..11,
|
||||
mode: None,
|
||||
argument: Some("main".to_string()),
|
||||
})
|
||||
);
|
||||
|
||||
assert_eq!(MentionCompletion::try_parse("test@", 0), None);
|
||||
}
|
||||
|
||||
struct AtMentionEditor(Entity<Editor>);
|
||||
|
||||
impl Item for AtMentionEditor {
|
||||
type Event = ();
|
||||
|
||||
fn include_in_nav_history() -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<()> for AtMentionEditor {}
|
||||
|
||||
impl Focusable for AtMentionEditor {
|
||||
fn focus_handle(&self, cx: &App) -> FocusHandle {
|
||||
self.0.read(cx).focus_handle(cx).clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for AtMentionEditor {
|
||||
fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
|
||||
self.0.clone().into_any_element()
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_context_completion_provider(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
@@ -871,30 +841,28 @@ mod tests {
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let editor = workspace.update_in(&mut cx, |workspace, window, cx| {
|
||||
let editor = cx.new(|cx| {
|
||||
Editor::new(
|
||||
editor::EditorMode::full(),
|
||||
multi_buffer::MultiBuffer::build_simple("", cx),
|
||||
let item = workspace
|
||||
.update_in(&mut cx, |workspace, window, cx| {
|
||||
workspace.open_path(
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: PathBuf::from("editor").into(),
|
||||
},
|
||||
None,
|
||||
true,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
workspace.active_pane().update(cx, |pane, cx| {
|
||||
pane.add_item(
|
||||
Box::new(cx.new(|_| AtMentionEditor(editor.clone()))),
|
||||
true,
|
||||
true,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
editor
|
||||
})
|
||||
.await
|
||||
.expect("Could not open test file");
|
||||
|
||||
let editor = cx.update(|_, cx| {
|
||||
item.act_as::<Editor>(cx)
|
||||
.expect("Opened test file wasn't an editor")
|
||||
});
|
||||
|
||||
let context_store = cx.new(|_| ContextStore::new(project.downgrade(), None));
|
||||
let context_store = cx.new(|_| ContextStore::new(workspace.downgrade(), None));
|
||||
|
||||
let editor_entity = editor.downgrade();
|
||||
editor.update_in(&mut cx, |editor, window, cx| {
|
||||
@@ -922,10 +890,10 @@ mod tests {
|
||||
assert_eq!(
|
||||
current_completion_labels(editor),
|
||||
&[
|
||||
"editor dir/",
|
||||
"seven.txt dir/b/",
|
||||
"six.txt dir/b/",
|
||||
"five.txt dir/b/",
|
||||
"four.txt dir/a/",
|
||||
"Files & Directories",
|
||||
"Symbols",
|
||||
"Fetch"
|
||||
@@ -967,7 +935,7 @@ mod tests {
|
||||
assert_eq!(editor.text(cx), "Lorem [@one.txt](@file:dir/a/one.txt)",);
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
fold_ranges(editor, cx),
|
||||
crease_ranges(editor, cx),
|
||||
vec![Point::new(0, 6)..Point::new(0, 37)]
|
||||
);
|
||||
});
|
||||
@@ -978,7 +946,7 @@ mod tests {
|
||||
assert_eq!(editor.text(cx), "Lorem [@one.txt](@file:dir/a/one.txt) ",);
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
fold_ranges(editor, cx),
|
||||
crease_ranges(editor, cx),
|
||||
vec![Point::new(0, 6)..Point::new(0, 37)]
|
||||
);
|
||||
});
|
||||
@@ -992,7 +960,7 @@ mod tests {
|
||||
);
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
fold_ranges(editor, cx),
|
||||
crease_ranges(editor, cx),
|
||||
vec![Point::new(0, 6)..Point::new(0, 37)]
|
||||
);
|
||||
});
|
||||
@@ -1006,7 +974,7 @@ mod tests {
|
||||
);
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
fold_ranges(editor, cx),
|
||||
crease_ranges(editor, cx),
|
||||
vec![Point::new(0, 6)..Point::new(0, 37)]
|
||||
);
|
||||
});
|
||||
@@ -1020,14 +988,14 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
"Lorem [@one.txt](@file:dir/a/one.txt) Ipsum [@seven.txt](@file:dir/b/seven.txt)"
|
||||
"Lorem [@one.txt](@file:dir/a/one.txt) Ipsum [@editor](@file:dir/editor)"
|
||||
);
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
fold_ranges(editor, cx),
|
||||
crease_ranges(editor, cx),
|
||||
vec![
|
||||
Point::new(0, 6)..Point::new(0, 37),
|
||||
Point::new(0, 44)..Point::new(0, 79)
|
||||
Point::new(0, 44)..Point::new(0, 71)
|
||||
]
|
||||
);
|
||||
});
|
||||
@@ -1037,14 +1005,14 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
"Lorem [@one.txt](@file:dir/a/one.txt) Ipsum [@seven.txt](@file:dir/b/seven.txt)\n@"
|
||||
"Lorem [@one.txt](@file:dir/a/one.txt) Ipsum [@editor](@file:dir/editor)\n@"
|
||||
);
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
fold_ranges(editor, cx),
|
||||
crease_ranges(editor, cx),
|
||||
vec![
|
||||
Point::new(0, 6)..Point::new(0, 37),
|
||||
Point::new(0, 44)..Point::new(0, 79)
|
||||
Point::new(0, 44)..Point::new(0, 71)
|
||||
]
|
||||
);
|
||||
});
|
||||
@@ -1058,27 +1026,29 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
"Lorem [@one.txt](@file:dir/a/one.txt) Ipsum [@seven.txt](@file:dir/b/seven.txt)\n[@six.txt](@file:dir/b/six.txt)"
|
||||
"Lorem [@one.txt](@file:dir/a/one.txt) Ipsum [@editor](@file:dir/editor)\n[@seven.txt](@file:dir/b/seven.txt)"
|
||||
);
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
fold_ranges(editor, cx),
|
||||
crease_ranges(editor, cx),
|
||||
vec![
|
||||
Point::new(0, 6)..Point::new(0, 37),
|
||||
Point::new(0, 44)..Point::new(0, 79),
|
||||
Point::new(1, 0)..Point::new(1, 31)
|
||||
Point::new(0, 44)..Point::new(0, 71),
|
||||
Point::new(1, 0)..Point::new(1, 35)
|
||||
]
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
fn fold_ranges(editor: &Editor, cx: &mut App) -> Vec<Range<Point>> {
|
||||
fn crease_ranges(editor: &Editor, cx: &mut App) -> Vec<Range<Point>> {
|
||||
let snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
editor.display_map.update(cx, |display_map, cx| {
|
||||
display_map
|
||||
.snapshot(cx)
|
||||
.folds_in_range(0..snapshot.len())
|
||||
.map(|fold| fold.range.to_point(&snapshot))
|
||||
.crease_snapshot
|
||||
.crease_items_with_offsets(&snapshot)
|
||||
.into_iter()
|
||||
.map(|(_, range)| range)
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ use picker::{Picker, PickerDelegate};
|
||||
use ui::{Context, ListItem, Window, prelude::*};
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::context_picker::ContextPicker;
|
||||
use crate::context_picker::{ConfirmBehavior, ContextPicker};
|
||||
use crate::context_store::ContextStore;
|
||||
|
||||
pub struct FetchContextPicker {
|
||||
@@ -23,10 +23,16 @@ impl FetchContextPicker {
|
||||
context_picker: WeakEntity<ContextPicker>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let delegate = FetchContextPickerDelegate::new(context_picker, workspace, context_store);
|
||||
let delegate = FetchContextPickerDelegate::new(
|
||||
context_picker,
|
||||
workspace,
|
||||
context_store,
|
||||
confirm_behavior,
|
||||
);
|
||||
let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx));
|
||||
|
||||
Self { picker }
|
||||
@@ -56,6 +62,7 @@ pub struct FetchContextPickerDelegate {
|
||||
context_picker: WeakEntity<ContextPicker>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
url: String,
|
||||
}
|
||||
|
||||
@@ -64,11 +71,13 @@ impl FetchContextPickerDelegate {
|
||||
context_picker: WeakEntity<ContextPicker>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
) -> Self {
|
||||
FetchContextPickerDelegate {
|
||||
context_picker,
|
||||
workspace,
|
||||
context_store,
|
||||
confirm_behavior,
|
||||
url: String::new(),
|
||||
}
|
||||
}
|
||||
@@ -195,15 +204,25 @@ impl PickerDelegate for FetchContextPickerDelegate {
|
||||
|
||||
let http_client = workspace.read(cx).client().http_client().clone();
|
||||
let url = self.url.clone();
|
||||
let confirm_behavior = self.confirm_behavior;
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let text = cx
|
||||
.background_spawn(fetch_url_content(http_client, url.clone()))
|
||||
.await?;
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
this.delegate.context_store.update(cx, |context_store, cx| {
|
||||
context_store.add_fetched_url(url, text, cx)
|
||||
})
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.delegate
|
||||
.context_store
|
||||
.update(cx, |context_store, cx| {
|
||||
context_store.add_fetched_url(url, text, cx)
|
||||
})?;
|
||||
|
||||
match confirm_behavior {
|
||||
ConfirmBehavior::KeepOpen => {}
|
||||
ConfirmBehavior::Close => this.delegate.dismissed(window, cx),
|
||||
}
|
||||
|
||||
anyhow::Ok(())
|
||||
})??;
|
||||
|
||||
anyhow::Ok(())
|
||||
|
||||
@@ -11,9 +11,9 @@ use picker::{Picker, PickerDelegate};
|
||||
use project::{PathMatchCandidateSet, ProjectPath, WorktreeId};
|
||||
use ui::{ListItem, Tooltip, prelude::*};
|
||||
use util::ResultExt as _;
|
||||
use workspace::Workspace;
|
||||
use workspace::{Workspace, notifications::NotifyResultExt};
|
||||
|
||||
use crate::context_picker::ContextPicker;
|
||||
use crate::context_picker::{ConfirmBehavior, ContextPicker};
|
||||
use crate::context_store::{ContextStore, FileInclusion};
|
||||
|
||||
pub struct FileContextPicker {
|
||||
@@ -25,10 +25,16 @@ impl FileContextPicker {
|
||||
context_picker: WeakEntity<ContextPicker>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let delegate = FileContextPickerDelegate::new(context_picker, workspace, context_store);
|
||||
let delegate = FileContextPickerDelegate::new(
|
||||
context_picker,
|
||||
workspace,
|
||||
context_store,
|
||||
confirm_behavior,
|
||||
);
|
||||
let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx));
|
||||
|
||||
Self { picker }
|
||||
@@ -51,7 +57,8 @@ pub struct FileContextPickerDelegate {
|
||||
context_picker: WeakEntity<ContextPicker>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
matches: Vec<FileMatch>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
matches: Vec<PathMatch>,
|
||||
selected_index: usize,
|
||||
}
|
||||
|
||||
@@ -60,11 +67,13 @@ impl FileContextPickerDelegate {
|
||||
context_picker: WeakEntity<ContextPicker>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
) -> Self {
|
||||
Self {
|
||||
context_picker,
|
||||
workspace,
|
||||
context_store,
|
||||
confirm_behavior,
|
||||
matches: Vec::new(),
|
||||
selected_index: 0,
|
||||
}
|
||||
@@ -105,7 +114,7 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
return Task::ready(());
|
||||
};
|
||||
|
||||
let search_task = search_files(query, Arc::<AtomicBool>::default(), &workspace, cx);
|
||||
let search_task = search_paths(query, Arc::<AtomicBool>::default(), &workspace, cx);
|
||||
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
// TODO: This should be probably be run in the background.
|
||||
@@ -118,8 +127,8 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
})
|
||||
}
|
||||
|
||||
fn confirm(&mut self, _secondary: bool, _window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
let Some(FileMatch { mat, .. }) = self.matches.get(self.selected_index) else {
|
||||
fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
let Some(mat) = self.matches.get(self.selected_index) else {
|
||||
return;
|
||||
};
|
||||
|
||||
@@ -144,7 +153,17 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
return;
|
||||
};
|
||||
|
||||
task.detach_and_log_err(cx);
|
||||
let confirm_behavior = self.confirm_behavior;
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
match task.await.notify_async_err(cx) {
|
||||
None => anyhow::Ok(()),
|
||||
Some(()) => this.update_in(cx, |this, window, cx| match confirm_behavior {
|
||||
ConfirmBehavior::KeepOpen => {}
|
||||
ConfirmBehavior::Close => this.delegate.dismissed(window, cx),
|
||||
}),
|
||||
}
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
fn dismissed(&mut self, _: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
@@ -162,7 +181,7 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<Self::ListItem> {
|
||||
let FileMatch { mat, .. } = &self.matches[ix];
|
||||
let path_match = &self.matches[ix];
|
||||
|
||||
Some(
|
||||
ListItem::new(ix)
|
||||
@@ -170,10 +189,9 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
.toggle_state(selected)
|
||||
.child(render_file_context_entry(
|
||||
ElementId::NamedInteger("file-ctx-picker".into(), ix),
|
||||
WorktreeId::from_usize(mat.worktree_id),
|
||||
&mat.path,
|
||||
&mat.path_prefix,
|
||||
mat.is_dir,
|
||||
&path_match.path,
|
||||
&path_match.path_prefix,
|
||||
path_match.is_dir,
|
||||
self.context_store.clone(),
|
||||
cx,
|
||||
)),
|
||||
@@ -181,17 +199,12 @@ impl PickerDelegate for FileContextPickerDelegate {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FileMatch {
|
||||
pub mat: PathMatch,
|
||||
pub is_recent: bool,
|
||||
}
|
||||
|
||||
pub(crate) fn search_files(
|
||||
pub(crate) fn search_paths(
|
||||
query: String,
|
||||
cancellation_flag: Arc<AtomicBool>,
|
||||
workspace: &Entity<Workspace>,
|
||||
cx: &App,
|
||||
) -> Task<Vec<FileMatch>> {
|
||||
) -> Task<Vec<PathMatch>> {
|
||||
if query.is_empty() {
|
||||
let workspace = workspace.read(cx);
|
||||
let project = workspace.project().read(cx);
|
||||
@@ -200,34 +213,28 @@ pub(crate) fn search_files(
|
||||
.into_iter()
|
||||
.filter_map(|(project_path, _)| {
|
||||
let worktree = project.worktree_for_id(project_path.worktree_id, cx)?;
|
||||
Some(FileMatch {
|
||||
mat: PathMatch {
|
||||
score: 0.,
|
||||
positions: Vec::new(),
|
||||
worktree_id: project_path.worktree_id.to_usize(),
|
||||
path: project_path.path,
|
||||
path_prefix: worktree.read(cx).root_name().into(),
|
||||
distance_to_relative_ancestor: 0,
|
||||
is_dir: false,
|
||||
},
|
||||
is_recent: true,
|
||||
Some(PathMatch {
|
||||
score: 0.,
|
||||
positions: Vec::new(),
|
||||
worktree_id: project_path.worktree_id.to_usize(),
|
||||
path: project_path.path,
|
||||
path_prefix: worktree.read(cx).root_name().into(),
|
||||
distance_to_relative_ancestor: 0,
|
||||
is_dir: false,
|
||||
})
|
||||
});
|
||||
|
||||
let file_matches = project.worktrees(cx).flat_map(|worktree| {
|
||||
let worktree = worktree.read(cx);
|
||||
let path_prefix: Arc<str> = worktree.root_name().into();
|
||||
worktree.entries(false, 0).map(move |entry| FileMatch {
|
||||
mat: PathMatch {
|
||||
score: 0.,
|
||||
positions: Vec::new(),
|
||||
worktree_id: worktree.id().to_usize(),
|
||||
path: entry.path.clone(),
|
||||
path_prefix: path_prefix.clone(),
|
||||
distance_to_relative_ancestor: 0,
|
||||
is_dir: entry.is_dir(),
|
||||
},
|
||||
is_recent: false,
|
||||
worktree.entries(false, 0).map(move |entry| PathMatch {
|
||||
score: 0.,
|
||||
positions: Vec::new(),
|
||||
worktree_id: worktree.id().to_usize(),
|
||||
path: entry.path.clone(),
|
||||
path_prefix: path_prefix.clone(),
|
||||
distance_to_relative_ancestor: 0,
|
||||
is_dir: entry.is_dir(),
|
||||
})
|
||||
});
|
||||
|
||||
@@ -262,12 +269,6 @@ pub(crate) fn search_files(
|
||||
executor,
|
||||
)
|
||||
.await
|
||||
.into_iter()
|
||||
.map(|mat| FileMatch {
|
||||
mat,
|
||||
is_recent: false,
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -310,26 +311,19 @@ pub fn extract_file_name_and_directory(
|
||||
|
||||
pub fn render_file_context_entry(
|
||||
id: ElementId,
|
||||
worktree_id: WorktreeId,
|
||||
path: &Arc<Path>,
|
||||
path: &Path,
|
||||
path_prefix: &Arc<str>,
|
||||
is_directory: bool,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
cx: &App,
|
||||
) -> Stateful<Div> {
|
||||
let (file_name, directory) = extract_file_name_and_directory(&path, path_prefix);
|
||||
let (file_name, directory) = extract_file_name_and_directory(path, path_prefix);
|
||||
|
||||
let added = context_store.upgrade().and_then(|context_store| {
|
||||
let project_path = ProjectPath {
|
||||
worktree_id,
|
||||
path: path.clone(),
|
||||
};
|
||||
if is_directory {
|
||||
context_store.read(cx).includes_directory(&project_path)
|
||||
context_store.read(cx).includes_directory(path)
|
||||
} else {
|
||||
context_store
|
||||
.read(cx)
|
||||
.will_include_file_path(&project_path, cx)
|
||||
context_store.read(cx).will_include_file_path(path, cx)
|
||||
}
|
||||
});
|
||||
|
||||
@@ -369,9 +363,8 @@ pub fn render_file_context_entry(
|
||||
)
|
||||
.child(Label::new("Added").size(LabelSize::Small)),
|
||||
),
|
||||
FileInclusion::InDirectory(directory_project_path) => {
|
||||
// TODO: Consider using worktree full_path to include worktree name.
|
||||
let directory_path = directory_project_path.path.to_string_lossy().into_owned();
|
||||
FileInclusion::InDirectory(dir_name) => {
|
||||
let dir_name = dir_name.to_string_lossy().into_owned();
|
||||
|
||||
el.child(
|
||||
h_flex()
|
||||
@@ -385,7 +378,7 @@ pub fn render_file_context_entry(
|
||||
)
|
||||
.child(Label::new("Included").size(LabelSize::Small)),
|
||||
)
|
||||
.tooltip(Tooltip::text(format!("in {directory_path}")))
|
||||
.tooltip(Tooltip::text(format!("in {dir_name}")))
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::cmp::Reverse;
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
use anyhow::Result;
|
||||
use anyhow::{Context as _, Result};
|
||||
use fuzzy::{StringMatch, StringMatchCandidate};
|
||||
use gpui::{
|
||||
App, AppContext, DismissEvent, Entity, FocusHandle, Focusable, Stateful, Task, WeakEntity,
|
||||
@@ -15,7 +15,7 @@ use ui::{ListItem, prelude::*};
|
||||
use util::ResultExt as _;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::context_picker::ContextPicker;
|
||||
use crate::context_picker::{ConfirmBehavior, ContextPicker};
|
||||
use crate::context_store::ContextStore;
|
||||
|
||||
pub struct SymbolContextPicker {
|
||||
@@ -27,10 +27,16 @@ impl SymbolContextPicker {
|
||||
context_picker: WeakEntity<ContextPicker>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let delegate = SymbolContextPickerDelegate::new(context_picker, workspace, context_store);
|
||||
let delegate = SymbolContextPickerDelegate::new(
|
||||
context_picker,
|
||||
workspace,
|
||||
context_store,
|
||||
confirm_behavior,
|
||||
);
|
||||
let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx));
|
||||
|
||||
Self { picker }
|
||||
@@ -53,6 +59,7 @@ pub struct SymbolContextPickerDelegate {
|
||||
context_picker: WeakEntity<ContextPicker>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
matches: Vec<SymbolEntry>,
|
||||
selected_index: usize,
|
||||
}
|
||||
@@ -62,11 +69,13 @@ impl SymbolContextPickerDelegate {
|
||||
context_picker: WeakEntity<ContextPicker>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
) -> Self {
|
||||
Self {
|
||||
context_picker,
|
||||
workspace,
|
||||
context_store,
|
||||
confirm_behavior,
|
||||
matches: Vec::new(),
|
||||
selected_index: 0,
|
||||
}
|
||||
@@ -110,7 +119,11 @@ impl PickerDelegate for SymbolContextPickerDelegate {
|
||||
let search_task = search_symbols(query, Arc::<AtomicBool>::default(), &workspace, cx);
|
||||
let context_store = self.context_store.clone();
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let symbols = search_task.await;
|
||||
let symbols = search_task
|
||||
.await
|
||||
.context("Failed to load symbols")
|
||||
.log_err()
|
||||
.unwrap_or_default();
|
||||
|
||||
let symbol_entries = context_store
|
||||
.read_with(cx, |context_store, cx| {
|
||||
@@ -126,7 +139,7 @@ impl PickerDelegate for SymbolContextPickerDelegate {
|
||||
})
|
||||
}
|
||||
|
||||
fn confirm(&mut self, _secondary: bool, _window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
let Some(mat) = self.matches.get(self.selected_index) else {
|
||||
return;
|
||||
};
|
||||
@@ -134,6 +147,7 @@ impl PickerDelegate for SymbolContextPickerDelegate {
|
||||
return;
|
||||
};
|
||||
|
||||
let confirm_behavior = self.confirm_behavior;
|
||||
let add_symbol_task = add_symbol(
|
||||
mat.symbol.clone(),
|
||||
true,
|
||||
@@ -143,12 +157,16 @@ impl PickerDelegate for SymbolContextPickerDelegate {
|
||||
);
|
||||
|
||||
let selected_index = self.selected_index;
|
||||
cx.spawn(async move |this, cx| {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let included = add_symbol_task.await?;
|
||||
this.update(cx, |this, _| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
if let Some(mat) = this.delegate.matches.get_mut(selected_index) {
|
||||
mat.is_included = included;
|
||||
}
|
||||
match confirm_behavior {
|
||||
ConfirmBehavior::KeepOpen => {}
|
||||
ConfirmBehavior::Close => this.delegate.dismissed(window, cx),
|
||||
}
|
||||
})
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
@@ -267,16 +285,12 @@ fn find_matching_symbol(symbol: &Symbol, candidates: &[DocumentSymbol]) -> Optio
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SymbolMatch {
|
||||
pub symbol: Symbol,
|
||||
}
|
||||
|
||||
pub(crate) fn search_symbols(
|
||||
query: String,
|
||||
cancellation_flag: Arc<AtomicBool>,
|
||||
workspace: &Entity<Workspace>,
|
||||
cx: &mut App,
|
||||
) -> Task<Vec<SymbolMatch>> {
|
||||
) -> Task<Result<Vec<(StringMatch, Symbol)>>> {
|
||||
let symbols_task = workspace.update(cx, |workspace, cx| {
|
||||
workspace
|
||||
.project()
|
||||
@@ -284,28 +298,19 @@ pub(crate) fn search_symbols(
|
||||
});
|
||||
let project = workspace.read(cx).project().clone();
|
||||
cx.spawn(async move |cx| {
|
||||
let Some(symbols) = symbols_task.await.log_err() else {
|
||||
return Vec::new();
|
||||
};
|
||||
let Some((visible_match_candidates, external_match_candidates)): Option<(Vec<_>, Vec<_>)> =
|
||||
project
|
||||
.update(cx, |project, cx| {
|
||||
symbols
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(id, symbol)| {
|
||||
StringMatchCandidate::new(id, &symbol.label.filter_text())
|
||||
})
|
||||
.partition(|candidate| {
|
||||
project
|
||||
.entry_for_path(&symbols[candidate.id].path, cx)
|
||||
.map_or(false, |e| !e.is_ignored)
|
||||
})
|
||||
})
|
||||
.log_err()
|
||||
else {
|
||||
return Vec::new();
|
||||
};
|
||||
let symbols = symbols_task.await?;
|
||||
let (visible_match_candidates, external_match_candidates): (Vec<_>, Vec<_>) = project
|
||||
.update(cx, |project, cx| {
|
||||
symbols
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(id, symbol)| StringMatchCandidate::new(id, &symbol.label.filter_text()))
|
||||
.partition(|candidate| {
|
||||
project
|
||||
.entry_for_path(&symbols[candidate.id].path, cx)
|
||||
.map_or(false, |e| !e.is_ignored)
|
||||
})
|
||||
})?;
|
||||
|
||||
const MAX_MATCHES: usize = 100;
|
||||
let mut visible_matches = cx.background_executor().block(fuzzy::match_strings(
|
||||
@@ -334,7 +339,7 @@ pub(crate) fn search_symbols(
|
||||
let mut matches = visible_matches;
|
||||
matches.append(&mut external_matches);
|
||||
|
||||
matches
|
||||
Ok(matches
|
||||
.into_iter()
|
||||
.map(|mut mat| {
|
||||
let symbol = symbols[mat.candidate_id].clone();
|
||||
@@ -342,19 +347,19 @@ pub(crate) fn search_symbols(
|
||||
for position in &mut mat.positions {
|
||||
*position += filter_start;
|
||||
}
|
||||
SymbolMatch { symbol }
|
||||
(mat, symbol)
|
||||
})
|
||||
.collect()
|
||||
.collect())
|
||||
})
|
||||
}
|
||||
|
||||
fn compute_symbol_entries(
|
||||
symbols: Vec<SymbolMatch>,
|
||||
symbols: Vec<(StringMatch, Symbol)>,
|
||||
context_store: &ContextStore,
|
||||
cx: &App,
|
||||
) -> Vec<SymbolEntry> {
|
||||
let mut symbol_entries = Vec::with_capacity(symbols.len());
|
||||
for SymbolMatch { symbol, .. } in symbols {
|
||||
for (_, symbol) in symbols {
|
||||
let symbols_for_path = context_store.included_symbols_by_path().get(&symbol.path);
|
||||
let is_included = if let Some(symbols_for_path) = symbols_for_path {
|
||||
let mut is_included = false;
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
use std::sync::Arc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
|
||||
use fuzzy::StringMatchCandidate;
|
||||
use gpui::{App, DismissEvent, Entity, FocusHandle, Focusable, Task, WeakEntity};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use ui::{ListItem, prelude::*};
|
||||
|
||||
use crate::context_picker::ContextPicker;
|
||||
use crate::context_picker::{ConfirmBehavior, ContextPicker};
|
||||
use crate::context_store::{self, ContextStore};
|
||||
use crate::thread::ThreadId;
|
||||
use crate::thread_store::ThreadStore;
|
||||
@@ -20,11 +19,16 @@ impl ThreadContextPicker {
|
||||
thread_store: WeakEntity<ThreadStore>,
|
||||
context_picker: WeakEntity<ContextPicker>,
|
||||
context_store: WeakEntity<context_store::ContextStore>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let delegate =
|
||||
ThreadContextPickerDelegate::new(thread_store, context_picker, context_store);
|
||||
let delegate = ThreadContextPickerDelegate::new(
|
||||
thread_store,
|
||||
context_picker,
|
||||
context_store,
|
||||
confirm_behavior,
|
||||
);
|
||||
let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx));
|
||||
|
||||
ThreadContextPicker { picker }
|
||||
@@ -53,6 +57,7 @@ pub struct ThreadContextPickerDelegate {
|
||||
thread_store: WeakEntity<ThreadStore>,
|
||||
context_picker: WeakEntity<ContextPicker>,
|
||||
context_store: WeakEntity<context_store::ContextStore>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
matches: Vec<ThreadContextEntry>,
|
||||
selected_index: usize,
|
||||
}
|
||||
@@ -62,11 +67,13 @@ impl ThreadContextPickerDelegate {
|
||||
thread_store: WeakEntity<ThreadStore>,
|
||||
context_picker: WeakEntity<ContextPicker>,
|
||||
context_store: WeakEntity<context_store::ContextStore>,
|
||||
confirm_behavior: ConfirmBehavior,
|
||||
) -> Self {
|
||||
ThreadContextPickerDelegate {
|
||||
thread_store,
|
||||
context_picker,
|
||||
context_store,
|
||||
confirm_behavior,
|
||||
matches: Vec::new(),
|
||||
selected_index: 0,
|
||||
}
|
||||
@@ -107,11 +114,11 @@ impl PickerDelegate for ThreadContextPickerDelegate {
|
||||
return Task::ready(());
|
||||
};
|
||||
|
||||
let search_task = search_threads(query, Arc::new(AtomicBool::default()), threads, cx);
|
||||
let search_task = search_threads(query, threads, cx);
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let matches = search_task.await;
|
||||
this.update(cx, |this, cx| {
|
||||
this.delegate.matches = matches.into_iter().map(|mat| mat.thread).collect();
|
||||
this.delegate.matches = matches;
|
||||
this.delegate.selected_index = 0;
|
||||
cx.notify();
|
||||
})
|
||||
@@ -119,7 +126,7 @@ impl PickerDelegate for ThreadContextPickerDelegate {
|
||||
})
|
||||
}
|
||||
|
||||
fn confirm(&mut self, _secondary: bool, _window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
let Some(entry) = self.matches.get(self.selected_index) else {
|
||||
return;
|
||||
};
|
||||
@@ -130,15 +137,20 @@ impl PickerDelegate for ThreadContextPickerDelegate {
|
||||
|
||||
let open_thread_task = thread_store.update(cx, |this, cx| this.open_thread(&entry.id, cx));
|
||||
|
||||
cx.spawn(async move |this, cx| {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let thread = open_thread_task.await?;
|
||||
this.update(cx, |this, cx| {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.delegate
|
||||
.context_store
|
||||
.update(cx, |context_store, cx| {
|
||||
context_store.add_thread(thread, true, cx)
|
||||
})
|
||||
.ok();
|
||||
|
||||
match this.delegate.confirm_behavior {
|
||||
ConfirmBehavior::KeepOpen => {}
|
||||
ConfirmBehavior::Close => this.delegate.dismissed(window, cx),
|
||||
}
|
||||
})
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
@@ -205,18 +217,11 @@ pub fn render_thread_context_entry(
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ThreadMatch {
|
||||
pub thread: ThreadContextEntry,
|
||||
pub is_recent: bool,
|
||||
}
|
||||
|
||||
pub(crate) fn search_threads(
|
||||
query: String,
|
||||
cancellation_flag: Arc<AtomicBool>,
|
||||
thread_store: Entity<ThreadStore>,
|
||||
cx: &mut App,
|
||||
) -> Task<Vec<ThreadMatch>> {
|
||||
) -> Task<Vec<ThreadContextEntry>> {
|
||||
let threads = thread_store.update(cx, |this, _cx| {
|
||||
this.threads()
|
||||
.into_iter()
|
||||
@@ -231,12 +236,6 @@ pub(crate) fn search_threads(
|
||||
cx.background_spawn(async move {
|
||||
if query.is_empty() {
|
||||
threads
|
||||
.into_iter()
|
||||
.map(|thread| ThreadMatch {
|
||||
thread,
|
||||
is_recent: false,
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
let candidates = threads
|
||||
.iter()
|
||||
@@ -248,17 +247,14 @@ pub(crate) fn search_threads(
|
||||
&query,
|
||||
false,
|
||||
100,
|
||||
&cancellation_flag,
|
||||
&Default::default(),
|
||||
executor,
|
||||
)
|
||||
.await;
|
||||
|
||||
matches
|
||||
.into_iter()
|
||||
.map(|mat| ThreadMatch {
|
||||
thread: threads[mat.candidate_id].clone(),
|
||||
is_recent: false,
|
||||
})
|
||||
.map(|mat| threads[mat.candidate_id].clone())
|
||||
.collect()
|
||||
}
|
||||
})
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use std::ops::Range;
|
||||
use std::path::Path;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
@@ -8,10 +8,11 @@ use futures::future::join_all;
|
||||
use futures::{self, Future, FutureExt, future};
|
||||
use gpui::{App, AppContext as _, Context, Entity, SharedString, Task, WeakEntity};
|
||||
use language::{Buffer, File};
|
||||
use project::{Project, ProjectItem, ProjectPath, Worktree};
|
||||
use project::{ProjectItem, ProjectPath, Worktree};
|
||||
use rope::Rope;
|
||||
use text::{Anchor, BufferId, OffsetRangeExt};
|
||||
use util::{ResultExt as _, maybe};
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::ThreadStore;
|
||||
use crate::context::{
|
||||
@@ -22,13 +23,13 @@ use crate::context_strip::SuggestedContext;
|
||||
use crate::thread::{Thread, ThreadId};
|
||||
|
||||
pub struct ContextStore {
|
||||
project: WeakEntity<Project>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context: Vec<AssistantContext>,
|
||||
thread_store: Option<WeakEntity<ThreadStore>>,
|
||||
// TODO: If an EntityId is used for all context types (like BufferId), can remove ContextId.
|
||||
next_context_id: ContextId,
|
||||
files: BTreeMap<BufferId, ContextId>,
|
||||
directories: HashMap<ProjectPath, ContextId>,
|
||||
directories: HashMap<PathBuf, ContextId>,
|
||||
symbols: HashMap<ContextSymbolId, ContextId>,
|
||||
symbol_buffers: HashMap<ContextSymbolId, Entity<Buffer>>,
|
||||
symbols_by_path: HashMap<ProjectPath, Vec<ContextSymbolId>>,
|
||||
@@ -39,11 +40,11 @@ pub struct ContextStore {
|
||||
|
||||
impl ContextStore {
|
||||
pub fn new(
|
||||
project: WeakEntity<Project>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
thread_store: Option<WeakEntity<ThreadStore>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
project,
|
||||
workspace,
|
||||
thread_store,
|
||||
context: Vec::new(),
|
||||
next_context_id: ContextId(0),
|
||||
@@ -80,7 +81,12 @@ impl ContextStore {
|
||||
remove_if_exists: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let Some(project) = self.project.upgrade() else {
|
||||
let workspace = self.workspace.clone();
|
||||
|
||||
let Some(project) = workspace
|
||||
.upgrade()
|
||||
.map(|workspace| workspace.read(cx).project().clone())
|
||||
else {
|
||||
return Task::ready(Err(anyhow!("failed to read project")));
|
||||
};
|
||||
|
||||
@@ -93,7 +99,7 @@ impl ContextStore {
|
||||
let buffer_id = this.update(cx, |_, cx| buffer.read(cx).remote_id())?;
|
||||
|
||||
let already_included = this.update(cx, |this, cx| {
|
||||
match this.will_include_buffer(buffer_id, &project_path) {
|
||||
match this.will_include_buffer(buffer_id, &project_path.path) {
|
||||
Some(FileInclusion::Direct(context_id)) => {
|
||||
if remove_if_exists {
|
||||
this.remove_context(context_id, cx);
|
||||
@@ -155,11 +161,15 @@ impl ContextStore {
|
||||
remove_if_exists: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let Some(project) = self.project.upgrade() else {
|
||||
let workspace = self.workspace.clone();
|
||||
let Some(project) = workspace
|
||||
.upgrade()
|
||||
.map(|workspace| workspace.read(cx).project().clone())
|
||||
else {
|
||||
return Task::ready(Err(anyhow!("failed to read project")));
|
||||
};
|
||||
|
||||
let already_included = match self.includes_directory(&project_path) {
|
||||
let already_included = match self.includes_directory(&project_path.path) {
|
||||
Some(FileInclusion::Direct(context_id)) => {
|
||||
if remove_if_exists {
|
||||
self.remove_context(context_id, cx);
|
||||
@@ -223,12 +233,14 @@ impl ContextStore {
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if context_buffers.is_empty() {
|
||||
let full_path = cx.update(|cx| worktree.read(cx).full_path(&project_path.path))?;
|
||||
return Err(anyhow!("No text files found in {}", &full_path.display()));
|
||||
return Err(anyhow!(
|
||||
"No text files found in {}",
|
||||
&project_path.path.display()
|
||||
));
|
||||
}
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
this.insert_directory(worktree, project_path, context_buffers, cx);
|
||||
this.insert_directory(project_path, context_buffers, cx);
|
||||
})?;
|
||||
|
||||
anyhow::Ok(())
|
||||
@@ -237,20 +249,17 @@ impl ContextStore {
|
||||
|
||||
fn insert_directory(
|
||||
&mut self,
|
||||
worktree: Entity<Worktree>,
|
||||
project_path: ProjectPath,
|
||||
context_buffers: Vec<ContextBuffer>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let id = self.next_context_id.post_inc();
|
||||
let path = project_path.path.clone();
|
||||
self.directories.insert(project_path, id);
|
||||
self.directories.insert(project_path.path.to_path_buf(), id);
|
||||
|
||||
self.context
|
||||
.push(AssistantContext::Directory(DirectoryContext {
|
||||
id,
|
||||
worktree,
|
||||
path,
|
||||
project_path,
|
||||
context_buffers,
|
||||
}));
|
||||
cx.notify();
|
||||
@@ -479,31 +488,23 @@ impl ContextStore {
|
||||
/// Returns whether the buffer is already included directly in the context, or if it will be
|
||||
/// included in the context via a directory. Directory inclusion is based on paths rather than
|
||||
/// buffer IDs as the directory will be re-scanned.
|
||||
pub fn will_include_buffer(
|
||||
&self,
|
||||
buffer_id: BufferId,
|
||||
project_path: &ProjectPath,
|
||||
) -> Option<FileInclusion> {
|
||||
pub fn will_include_buffer(&self, buffer_id: BufferId, path: &Path) -> Option<FileInclusion> {
|
||||
if let Some(context_id) = self.files.get(&buffer_id) {
|
||||
return Some(FileInclusion::Direct(*context_id));
|
||||
}
|
||||
|
||||
self.will_include_file_path_via_directory(project_path)
|
||||
self.will_include_file_path_via_directory(path)
|
||||
}
|
||||
|
||||
/// Returns whether this file path is already included directly in the context, or if it will be
|
||||
/// included in the context via a directory.
|
||||
pub fn will_include_file_path(
|
||||
&self,
|
||||
project_path: &ProjectPath,
|
||||
cx: &App,
|
||||
) -> Option<FileInclusion> {
|
||||
pub fn will_include_file_path(&self, path: &Path, cx: &App) -> Option<FileInclusion> {
|
||||
if !self.files.is_empty() {
|
||||
let found_file_context = self.context.iter().find(|context| match &context {
|
||||
AssistantContext::File(file_context) => {
|
||||
let buffer = file_context.context_buffer.buffer.read(cx);
|
||||
if let Some(context_path) = buffer.project_path(cx) {
|
||||
&context_path == project_path
|
||||
if let Some(file_path) = buffer_path_log_err(buffer, cx) {
|
||||
*file_path == *path
|
||||
} else {
|
||||
false
|
||||
}
|
||||
@@ -515,40 +516,31 @@ impl ContextStore {
|
||||
}
|
||||
}
|
||||
|
||||
self.will_include_file_path_via_directory(project_path)
|
||||
self.will_include_file_path_via_directory(path)
|
||||
}
|
||||
|
||||
fn will_include_file_path_via_directory(
|
||||
&self,
|
||||
project_path: &ProjectPath,
|
||||
) -> Option<FileInclusion> {
|
||||
fn will_include_file_path_via_directory(&self, path: &Path) -> Option<FileInclusion> {
|
||||
if self.directories.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut path_buf = project_path.path.to_path_buf();
|
||||
let mut buf = path.to_path_buf();
|
||||
|
||||
while path_buf.pop() {
|
||||
// TODO: This isn't very efficient. Consider using a better representation of the
|
||||
// directories map.
|
||||
let directory_project_path = ProjectPath {
|
||||
worktree_id: project_path.worktree_id,
|
||||
path: path_buf.clone().into(),
|
||||
};
|
||||
if let Some(_) = self.directories.get(&directory_project_path) {
|
||||
return Some(FileInclusion::InDirectory(directory_project_path));
|
||||
while buf.pop() {
|
||||
if let Some(_) = self.directories.get(&buf) {
|
||||
return Some(FileInclusion::InDirectory(buf));
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn includes_directory(&self, project_path: &ProjectPath) -> Option<FileInclusion> {
|
||||
if let Some(context_id) = self.directories.get(project_path) {
|
||||
pub fn includes_directory(&self, path: &Path) -> Option<FileInclusion> {
|
||||
if let Some(context_id) = self.directories.get(path) {
|
||||
return Some(FileInclusion::Direct(*context_id));
|
||||
}
|
||||
|
||||
self.will_include_file_path_via_directory(project_path)
|
||||
self.will_include_file_path_via_directory(path)
|
||||
}
|
||||
|
||||
pub fn included_symbol(&self, symbol_id: &ContextSymbolId) -> Option<ContextId> {
|
||||
@@ -582,13 +574,13 @@ impl ContextStore {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn file_paths(&self, cx: &App) -> HashSet<ProjectPath> {
|
||||
pub fn file_paths(&self, cx: &App) -> HashSet<PathBuf> {
|
||||
self.context
|
||||
.iter()
|
||||
.filter_map(|context| match context {
|
||||
AssistantContext::File(file) => {
|
||||
let buffer = file.context_buffer.buffer.read(cx);
|
||||
buffer.project_path(cx)
|
||||
buffer_path_log_err(buffer, cx).map(|p| p.to_path_buf())
|
||||
}
|
||||
AssistantContext::Directory(_)
|
||||
| AssistantContext::Symbol(_)
|
||||
@@ -605,7 +597,7 @@ impl ContextStore {
|
||||
|
||||
pub enum FileInclusion {
|
||||
Direct(ContextId),
|
||||
InDirectory(ProjectPath),
|
||||
InDirectory(PathBuf),
|
||||
}
|
||||
|
||||
// ContextBuffer without text.
|
||||
@@ -672,6 +664,19 @@ fn collect_buffer_info_and_text(
|
||||
Ok((buffer_info, text_task))
|
||||
}
|
||||
|
||||
pub fn buffer_path_log_err(buffer: &Buffer, cx: &App) -> Option<Arc<Path>> {
|
||||
if let Some(file) = buffer.file() {
|
||||
let mut path = file.path().clone();
|
||||
if path.as_os_str().is_empty() {
|
||||
path = file.full_path(cx).into();
|
||||
}
|
||||
Some(path)
|
||||
} else {
|
||||
log::error!("Buffer that had a path unexpectedly no longer has a path.");
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn to_fenced_codeblock(path: &Path, content: Rope) -> SharedString {
|
||||
let path_extension = path.extension().and_then(|ext| ext.to_str());
|
||||
let path_string = path.to_string_lossy();
|
||||
@@ -747,13 +752,13 @@ pub fn refresh_context_store_text(
|
||||
}
|
||||
}
|
||||
AssistantContext::Directory(directory_context) => {
|
||||
let directory_path = directory_context.project_path(cx);
|
||||
let should_refresh = changed_buffers.is_empty()
|
||||
|| changed_buffers.iter().any(|buffer| {
|
||||
let Some(buffer_path) = buffer.read(cx).project_path(cx) else {
|
||||
return false;
|
||||
};
|
||||
buffer_path.starts_with(&directory_path)
|
||||
let buffer = buffer.read(cx);
|
||||
|
||||
buffer_path_log_err(&buffer, cx).map_or(false, |path| {
|
||||
path.starts_with(&directory_context.project_path.path)
|
||||
})
|
||||
});
|
||||
|
||||
if should_refresh {
|
||||
@@ -840,16 +845,14 @@ fn refresh_directory_text(
|
||||
let context_buffers = future::join_all(futures);
|
||||
|
||||
let id = directory_context.id;
|
||||
let worktree = directory_context.worktree.clone();
|
||||
let path = directory_context.path.clone();
|
||||
let project_path = directory_context.project_path.clone();
|
||||
Some(cx.spawn(async move |cx| {
|
||||
let context_buffers = context_buffers.await;
|
||||
context_store
|
||||
.update(cx, |context_store, _| {
|
||||
let new_directory_context = DirectoryContext {
|
||||
id,
|
||||
worktree,
|
||||
path,
|
||||
project_path,
|
||||
context_buffers,
|
||||
};
|
||||
context_store.replace_context(AssistantContext::Directory(new_directory_context));
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use std::path::Path;
|
||||
use std::rc::Rc;
|
||||
|
||||
use collections::HashSet;
|
||||
@@ -10,12 +9,11 @@ use gpui::{
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use language::Buffer;
|
||||
use project::ProjectItem;
|
||||
use ui::{KeyBinding, PopoverMenu, PopoverMenuHandle, Tooltip, prelude::*};
|
||||
use workspace::{Workspace, notifications::NotifyResultExt};
|
||||
|
||||
use crate::context::{ContextId, ContextKind};
|
||||
use crate::context_picker::ContextPicker;
|
||||
use crate::context_picker::{ConfirmBehavior, ContextPicker};
|
||||
use crate::context_store::ContextStore;
|
||||
use crate::thread::Thread;
|
||||
use crate::thread_store::ThreadStore;
|
||||
@@ -52,6 +50,7 @@ impl ContextStrip {
|
||||
workspace.clone(),
|
||||
thread_store.clone(),
|
||||
context_store.downgrade(),
|
||||
ConfirmBehavior::KeepOpen,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -94,23 +93,26 @@ impl ContextStrip {
|
||||
let active_buffer_entity = editor.buffer().read(cx).as_singleton()?;
|
||||
let active_buffer = active_buffer_entity.read(cx);
|
||||
|
||||
let project_path = active_buffer.project_path(cx)?;
|
||||
let path = active_buffer.file()?.full_path(cx);
|
||||
|
||||
if self
|
||||
.context_store
|
||||
.read(cx)
|
||||
.will_include_buffer(active_buffer.remote_id(), &project_path)
|
||||
.will_include_buffer(active_buffer.remote_id(), &path)
|
||||
.is_some()
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
let file_name = active_buffer.file()?.file_name(cx);
|
||||
let name = match path.file_name() {
|
||||
Some(name) => name.to_string_lossy().into_owned().into(),
|
||||
None => path.to_string_lossy().into_owned().into(),
|
||||
};
|
||||
|
||||
let icon_path = FileIcons::get_icon(&Path::new(&file_name), cx);
|
||||
let icon_path = FileIcons::get_icon(&path, cx);
|
||||
|
||||
Some(SuggestedContext::File {
|
||||
name: file_name.to_string_lossy().into_owned().into(),
|
||||
name,
|
||||
buffer: active_buffer_entity.downgrade(),
|
||||
icon_path,
|
||||
})
|
||||
|
||||
@@ -28,11 +28,10 @@ use language_model::{LanguageModelRegistry, report_assistant_event};
|
||||
use multi_buffer::MultiBufferRow;
|
||||
use parking_lot::Mutex;
|
||||
use project::LspAction;
|
||||
use project::Project;
|
||||
use project::{CodeAction, ProjectTransaction};
|
||||
use prompt_store::PromptBuilder;
|
||||
use settings::{Settings, SettingsStore};
|
||||
use telemetry_events::{AssistantEventData, AssistantKind, AssistantPhase};
|
||||
use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
|
||||
use terminal_view::{TerminalView, terminal_panel::TerminalPanel};
|
||||
use text::{OffsetRangeExt, ToPoint as _};
|
||||
use ui::prelude::*;
|
||||
@@ -255,7 +254,6 @@ impl InlineAssistant {
|
||||
assistant.assist(
|
||||
&active_editor,
|
||||
cx.entity().downgrade(),
|
||||
workspace.project().downgrade(),
|
||||
thread_store,
|
||||
window,
|
||||
cx,
|
||||
@@ -267,7 +265,6 @@ impl InlineAssistant {
|
||||
assistant.assist(
|
||||
&active_terminal,
|
||||
cx.entity().downgrade(),
|
||||
workspace.project().downgrade(),
|
||||
thread_store,
|
||||
window,
|
||||
cx,
|
||||
@@ -321,7 +318,6 @@ impl InlineAssistant {
|
||||
&mut self,
|
||||
editor: &Entity<Editor>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
project: WeakEntity<Project>,
|
||||
thread_store: Option<WeakEntity<ThreadStore>>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
@@ -406,7 +402,7 @@ impl InlineAssistant {
|
||||
codegen_ranges.push(anchor_range);
|
||||
|
||||
if let Some(model) = LanguageModelRegistry::read_global(cx).inline_assistant_model() {
|
||||
self.telemetry.report_assistant_event(AssistantEventData {
|
||||
self.telemetry.report_assistant_event(AssistantEvent {
|
||||
conversation_id: None,
|
||||
kind: AssistantKind::Inline,
|
||||
phase: AssistantPhase::Invoked,
|
||||
@@ -429,7 +425,7 @@ impl InlineAssistant {
|
||||
for range in codegen_ranges {
|
||||
let assist_id = self.next_assist_id.post_inc();
|
||||
let context_store =
|
||||
cx.new(|_cx| ContextStore::new(project.clone(), thread_store.clone()));
|
||||
cx.new(|_cx| ContextStore::new(workspace.clone(), thread_store.clone()));
|
||||
let codegen = cx.new(|cx| {
|
||||
BufferCodegen::new(
|
||||
editor.read(cx).buffer().clone(),
|
||||
@@ -523,7 +519,7 @@ impl InlineAssistant {
|
||||
initial_prompt: String,
|
||||
initial_transaction_id: Option<TransactionId>,
|
||||
focus: bool,
|
||||
workspace: Entity<Workspace>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
thread_store: Option<WeakEntity<ThreadStore>>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
@@ -541,8 +537,8 @@ impl InlineAssistant {
|
||||
range.end = range.end.bias_right(&snapshot);
|
||||
}
|
||||
|
||||
let project = workspace.read(cx).project().downgrade();
|
||||
let context_store = cx.new(|_cx| ContextStore::new(project, thread_store.clone()));
|
||||
let context_store =
|
||||
cx.new(|_cx| ContextStore::new(workspace.clone(), thread_store.clone()));
|
||||
|
||||
let codegen = cx.new(|cx| {
|
||||
BufferCodegen::new(
|
||||
@@ -566,7 +562,7 @@ impl InlineAssistant {
|
||||
codegen.clone(),
|
||||
self.fs.clone(),
|
||||
context_store,
|
||||
workspace.downgrade(),
|
||||
workspace.clone(),
|
||||
thread_store,
|
||||
window,
|
||||
cx,
|
||||
@@ -593,7 +589,7 @@ impl InlineAssistant {
|
||||
end_block_id,
|
||||
range,
|
||||
codegen.clone(),
|
||||
workspace.downgrade(),
|
||||
workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
),
|
||||
@@ -991,7 +987,7 @@ impl InlineAssistant {
|
||||
.map(|language| language.name())
|
||||
});
|
||||
report_assistant_event(
|
||||
AssistantEventData {
|
||||
AssistantEvent {
|
||||
conversation_id: None,
|
||||
kind: AssistantKind::Inline,
|
||||
message_id,
|
||||
@@ -1783,7 +1779,6 @@ impl CodeActionProvider for AssistantCodeActionProvider {
|
||||
let workspace = self.workspace.clone();
|
||||
let thread_store = self.thread_store.clone();
|
||||
window.spawn(cx, async move |cx| {
|
||||
let workspace = workspace.upgrade().context("workspace was released")?;
|
||||
let editor = editor.upgrade().context("editor was released")?;
|
||||
let range = editor
|
||||
.update(cx, |editor, cx| {
|
||||
|
||||
@@ -86,7 +86,7 @@ impl ProfileSelector {
|
||||
|
||||
thread_store
|
||||
.update(cx, |this, cx| {
|
||||
this.load_profile_by_id(profile_id.clone(), cx);
|
||||
this.load_profile_by_id(&profile_id, cx);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ use language_model::{
|
||||
ConfiguredModel, LanguageModelRegistry, LanguageModelRequest, report_assistant_event,
|
||||
};
|
||||
use std::{sync::Arc, time::Instant};
|
||||
use telemetry_events::{AssistantEventData, AssistantKind, AssistantPhase};
|
||||
use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
|
||||
use terminal::Terminal;
|
||||
|
||||
pub struct TerminalCodegen {
|
||||
@@ -79,7 +79,7 @@ impl TerminalCodegen {
|
||||
|
||||
let error_message = result.as_ref().err().map(|error| error.to_string());
|
||||
report_assistant_event(
|
||||
AssistantEventData {
|
||||
AssistantEvent {
|
||||
conversation_id: None,
|
||||
kind: AssistantKind::InlineTerminal,
|
||||
message_id,
|
||||
|
||||
@@ -16,10 +16,9 @@ use language_model::{
|
||||
ConfiguredModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
|
||||
Role, report_assistant_event,
|
||||
};
|
||||
use project::Project;
|
||||
use prompt_store::PromptBuilder;
|
||||
use std::sync::Arc;
|
||||
use telemetry_events::{AssistantEventData, AssistantKind, AssistantPhase};
|
||||
use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
|
||||
use terminal_view::TerminalView;
|
||||
use ui::prelude::*;
|
||||
use util::ResultExt;
|
||||
@@ -68,7 +67,6 @@ impl TerminalInlineAssistant {
|
||||
&mut self,
|
||||
terminal_view: &Entity<TerminalView>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
project: WeakEntity<Project>,
|
||||
thread_store: Option<WeakEntity<ThreadStore>>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
@@ -77,7 +75,8 @@ impl TerminalInlineAssistant {
|
||||
let assist_id = self.next_assist_id.post_inc();
|
||||
let prompt_buffer =
|
||||
cx.new(|cx| MultiBuffer::singleton(cx.new(|cx| Buffer::local(String::new(), cx)), cx));
|
||||
let context_store = cx.new(|_cx| ContextStore::new(project, thread_store.clone()));
|
||||
let context_store =
|
||||
cx.new(|_cx| ContextStore::new(workspace.clone(), thread_store.clone()));
|
||||
let codegen = cx.new(|_| TerminalCodegen::new(terminal, self.telemetry.clone()));
|
||||
|
||||
let prompt_editor = cx.new(|cx| {
|
||||
@@ -293,7 +292,7 @@ impl TerminalInlineAssistant {
|
||||
let codegen = assist.codegen.read(cx);
|
||||
let executor = cx.background_executor().clone();
|
||||
report_assistant_event(
|
||||
AssistantEventData {
|
||||
AssistantEvent {
|
||||
conversation_id: None,
|
||||
kind: AssistantKind::InlineTerminal,
|
||||
message_id: codegen.message_id.clone(),
|
||||
|
||||
@@ -2,39 +2,38 @@ use std::fmt::Write as _;
|
||||
use std::io::Write;
|
||||
use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_settings::AssistantSettings;
|
||||
use assistant_tool::{ActionLog, Tool, ToolWorkingSet};
|
||||
use chrono::{DateTime, Utc};
|
||||
use collections::{BTreeMap, HashMap};
|
||||
use feature_flags::{self, FeatureFlagAppExt};
|
||||
use fs::Fs;
|
||||
use futures::future::Shared;
|
||||
use futures::{FutureExt, StreamExt as _};
|
||||
use git::repository::DiffType;
|
||||
use gpui::{App, AppContext, Context, Entity, EventEmitter, SharedString, Task, WeakEntity};
|
||||
use language_model::{
|
||||
ConfiguredModel, LanguageModel, LanguageModelCompletionEvent, LanguageModelId,
|
||||
LanguageModelKnownError, LanguageModelRegistry, LanguageModelRequest,
|
||||
LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
|
||||
LanguageModelToolUseId, MaxMonthlySpendReachedError, MessageContent, PaymentRequiredError,
|
||||
Role, StopReason, TokenUsage,
|
||||
ConfiguredModel, LanguageModel, LanguageModelCompletionEvent, LanguageModelRegistry,
|
||||
LanguageModelRequest, LanguageModelRequestMessage, LanguageModelRequestTool,
|
||||
LanguageModelToolResult, LanguageModelToolUseId, MaxMonthlySpendReachedError, MessageContent,
|
||||
PaymentRequiredError, Role, StopReason, TokenUsage,
|
||||
};
|
||||
use project::Project;
|
||||
use project::git_store::{GitStore, GitStoreCheckpoint, RepositoryState};
|
||||
use prompt_store::PromptBuilder;
|
||||
use project::{Project, Worktree};
|
||||
use prompt_store::{
|
||||
AssistantSystemPromptContext, PromptBuilder, RulesFile, WorktreeInfoForSystemPrompt,
|
||||
};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
use thiserror::Error;
|
||||
use util::{ResultExt as _, TryFutureExt as _, post_inc};
|
||||
use util::{ResultExt as _, TryFutureExt as _, maybe, post_inc};
|
||||
use uuid::Uuid;
|
||||
|
||||
use crate::context::{AssistantContext, ContextId, format_context_as_string};
|
||||
use crate::thread_store::{
|
||||
SerializedMessage, SerializedMessageSegment, SerializedThread, SerializedToolResult,
|
||||
SerializedToolUse, SharedProjectContext,
|
||||
SerializedToolUse,
|
||||
};
|
||||
use crate::tool_use::{PendingToolUse, ToolUse, ToolUseState, USING_TOOL_MARKER};
|
||||
|
||||
@@ -184,7 +183,7 @@ pub struct ThreadCheckpoint {
|
||||
git_checkpoint: GitStoreCheckpoint,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub enum ThreadFeedback {
|
||||
Positive,
|
||||
Negative,
|
||||
@@ -229,7 +228,7 @@ pub struct TotalTokenUsage {
|
||||
pub ratio: TokenUsageRatio,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, PartialEq, Eq)]
|
||||
#[derive(Default, PartialEq, Eq)]
|
||||
pub enum TokenUsageRatio {
|
||||
#[default]
|
||||
Normal,
|
||||
@@ -248,39 +247,27 @@ pub struct Thread {
|
||||
next_message_id: MessageId,
|
||||
context: BTreeMap<ContextId, AssistantContext>,
|
||||
context_by_message: HashMap<MessageId, Vec<ContextId>>,
|
||||
project_context: SharedProjectContext,
|
||||
system_prompt_context: Option<AssistantSystemPromptContext>,
|
||||
checkpoints_by_message: HashMap<MessageId, ThreadCheckpoint>,
|
||||
completion_count: usize,
|
||||
pending_completions: Vec<PendingCompletion>,
|
||||
project: Entity<Project>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
tools: Entity<ToolWorkingSet>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
tool_use: ToolUseState,
|
||||
action_log: Entity<ActionLog>,
|
||||
last_restore_checkpoint: Option<LastRestoreCheckpoint>,
|
||||
pending_checkpoint: Option<ThreadCheckpoint>,
|
||||
initial_project_snapshot: Shared<Task<Option<Arc<ProjectSnapshot>>>>,
|
||||
cumulative_token_usage: TokenUsage,
|
||||
exceeded_window_error: Option<ExceededWindowError>,
|
||||
feedback: Option<ThreadFeedback>,
|
||||
message_feedback: HashMap<MessageId, ThreadFeedback>,
|
||||
last_auto_capture_at: Option<Instant>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ExceededWindowError {
|
||||
/// Model used when last message exceeded context window
|
||||
model_id: LanguageModelId,
|
||||
/// Token count including last message
|
||||
token_count: usize,
|
||||
}
|
||||
|
||||
impl Thread {
|
||||
pub fn new(
|
||||
project: Entity<Project>,
|
||||
tools: Entity<ToolWorkingSet>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
system_prompt: SharedProjectContext,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
Self {
|
||||
@@ -293,7 +280,7 @@ impl Thread {
|
||||
next_message_id: MessageId(0),
|
||||
context: BTreeMap::default(),
|
||||
context_by_message: HashMap::default(),
|
||||
project_context: system_prompt,
|
||||
system_prompt_context: None,
|
||||
checkpoints_by_message: HashMap::default(),
|
||||
completion_count: 0,
|
||||
pending_completions: Vec::new(),
|
||||
@@ -311,10 +298,7 @@ impl Thread {
|
||||
.shared()
|
||||
},
|
||||
cumulative_token_usage: TokenUsage::default(),
|
||||
exceeded_window_error: None,
|
||||
feedback: None,
|
||||
message_feedback: HashMap::default(),
|
||||
last_auto_capture_at: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -322,9 +306,8 @@ impl Thread {
|
||||
id: ThreadId,
|
||||
serialized: SerializedThread,
|
||||
project: Entity<Project>,
|
||||
tools: Entity<ToolWorkingSet>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
project_context: SharedProjectContext,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let next_message_id = MessageId(
|
||||
@@ -365,7 +348,7 @@ impl Thread {
|
||||
next_message_id,
|
||||
context: BTreeMap::default(),
|
||||
context_by_message: HashMap::default(),
|
||||
project_context,
|
||||
system_prompt_context: None,
|
||||
checkpoints_by_message: HashMap::default(),
|
||||
completion_count: 0,
|
||||
pending_completions: Vec::new(),
|
||||
@@ -378,10 +361,7 @@ impl Thread {
|
||||
action_log: cx.new(|_| ActionLog::new(project)),
|
||||
initial_project_snapshot: Task::ready(serialized.initial_project_snapshot).shared(),
|
||||
cumulative_token_usage: serialized.cumulative_token_usage,
|
||||
exceeded_window_error: None,
|
||||
feedback: None,
|
||||
message_feedback: HashMap::default(),
|
||||
last_auto_capture_at: None,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -405,10 +385,6 @@ impl Thread {
|
||||
self.summary.clone()
|
||||
}
|
||||
|
||||
pub fn project_context(&self) -> SharedProjectContext {
|
||||
self.project_context.clone()
|
||||
}
|
||||
|
||||
pub const DEFAULT_SUMMARY: SharedString = SharedString::new_static("New Thread");
|
||||
|
||||
pub fn summary_or_default(&self) -> SharedString {
|
||||
@@ -458,7 +434,7 @@ impl Thread {
|
||||
!self.pending_completions.is_empty() || !self.all_tools_finished()
|
||||
}
|
||||
|
||||
pub fn tools(&self) -> &Entity<ToolWorkingSet> {
|
||||
pub fn tools(&self) -> &Arc<ToolWorkingSet> {
|
||||
&self.tools
|
||||
}
|
||||
|
||||
@@ -495,11 +471,11 @@ impl Thread {
|
||||
cx.emit(ThreadEvent::CheckpointChanged);
|
||||
cx.notify();
|
||||
|
||||
let git_store = self.project().read(cx).git_store().clone();
|
||||
let restore = git_store.update(cx, |git_store, cx| {
|
||||
git_store.restore_checkpoint(checkpoint.git_checkpoint.clone(), cx)
|
||||
});
|
||||
|
||||
let project = self.project.read(cx);
|
||||
let restore = project
|
||||
.git_store()
|
||||
.read(cx)
|
||||
.restore_checkpoint(checkpoint.git_checkpoint.clone(), cx);
|
||||
cx.spawn(async move |this, cx| {
|
||||
let result = restore.await;
|
||||
this.update(cx, |this, cx| {
|
||||
@@ -530,11 +506,11 @@ impl Thread {
|
||||
};
|
||||
|
||||
let git_store = self.project.read(cx).git_store().clone();
|
||||
let final_checkpoint = git_store.update(cx, |git_store, cx| git_store.checkpoint(cx));
|
||||
let final_checkpoint = git_store.read(cx).checkpoint(cx);
|
||||
cx.spawn(async move |this, cx| match final_checkpoint.await {
|
||||
Ok(final_checkpoint) => {
|
||||
let equal = git_store
|
||||
.update(cx, |store, cx| {
|
||||
.read_with(cx, |store, cx| {
|
||||
store.compare_checkpoints(
|
||||
pending_checkpoint.git_checkpoint.clone(),
|
||||
final_checkpoint.clone(),
|
||||
@@ -546,7 +522,7 @@ impl Thread {
|
||||
|
||||
if equal {
|
||||
git_store
|
||||
.update(cx, |store, cx| {
|
||||
.read_with(cx, |store, cx| {
|
||||
store.delete_checkpoint(pending_checkpoint.git_checkpoint, cx)
|
||||
})?
|
||||
.detach();
|
||||
@@ -557,7 +533,7 @@ impl Thread {
|
||||
}
|
||||
|
||||
git_store
|
||||
.update(cx, |store, cx| {
|
||||
.read_with(cx, |store, cx| {
|
||||
store.delete_checkpoint(final_checkpoint, cx)
|
||||
})?
|
||||
.detach();
|
||||
@@ -699,9 +675,6 @@ impl Thread {
|
||||
git_checkpoint,
|
||||
});
|
||||
}
|
||||
|
||||
self.auto_capture_telemetry(cx);
|
||||
|
||||
message_id
|
||||
}
|
||||
|
||||
@@ -827,13 +800,123 @@ impl Thread {
|
||||
})
|
||||
.collect(),
|
||||
initial_project_snapshot,
|
||||
cumulative_token_usage: this.cumulative_token_usage,
|
||||
cumulative_token_usage: this.cumulative_token_usage.clone(),
|
||||
detailed_summary_state: this.detailed_summary_state.clone(),
|
||||
exceeded_window_error: this.exceeded_window_error.clone(),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn set_system_prompt_context(&mut self, context: AssistantSystemPromptContext) {
|
||||
self.system_prompt_context = Some(context);
|
||||
}
|
||||
|
||||
pub fn system_prompt_context(&self) -> &Option<AssistantSystemPromptContext> {
|
||||
&self.system_prompt_context
|
||||
}
|
||||
|
||||
pub fn load_system_prompt_context(
|
||||
&self,
|
||||
cx: &App,
|
||||
) -> Task<(AssistantSystemPromptContext, Option<ThreadError>)> {
|
||||
let project = self.project.read(cx);
|
||||
let tasks = project
|
||||
.visible_worktrees(cx)
|
||||
.map(|worktree| {
|
||||
Self::load_worktree_info_for_system_prompt(
|
||||
project.fs().clone(),
|
||||
worktree.read(cx),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
cx.spawn(async |_cx| {
|
||||
let results = futures::future::join_all(tasks).await;
|
||||
let mut first_err = None;
|
||||
let worktrees = results
|
||||
.into_iter()
|
||||
.map(|(worktree, err)| {
|
||||
if first_err.is_none() && err.is_some() {
|
||||
first_err = err;
|
||||
}
|
||||
worktree
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
(AssistantSystemPromptContext::new(worktrees), first_err)
|
||||
})
|
||||
}
|
||||
|
||||
fn load_worktree_info_for_system_prompt(
|
||||
fs: Arc<dyn Fs>,
|
||||
worktree: &Worktree,
|
||||
cx: &App,
|
||||
) -> Task<(WorktreeInfoForSystemPrompt, Option<ThreadError>)> {
|
||||
let root_name = worktree.root_name().into();
|
||||
let abs_path = worktree.abs_path();
|
||||
|
||||
// Note that Cline supports `.clinerules` being a directory, but that is not currently
|
||||
// supported. This doesn't seem to occur often in GitHub repositories.
|
||||
const RULES_FILE_NAMES: [&'static str; 6] = [
|
||||
".rules",
|
||||
".cursorrules",
|
||||
".windsurfrules",
|
||||
".clinerules",
|
||||
".github/copilot-instructions.md",
|
||||
"CLAUDE.md",
|
||||
];
|
||||
let selected_rules_file = RULES_FILE_NAMES
|
||||
.into_iter()
|
||||
.filter_map(|name| {
|
||||
worktree
|
||||
.entry_for_path(name)
|
||||
.filter(|entry| entry.is_file())
|
||||
.map(|entry| (entry.path.clone(), worktree.absolutize(&entry.path)))
|
||||
})
|
||||
.next();
|
||||
|
||||
if let Some((rel_rules_path, abs_rules_path)) = selected_rules_file {
|
||||
cx.spawn(async move |_| {
|
||||
let rules_file_result = maybe!(async move {
|
||||
let abs_rules_path = abs_rules_path?;
|
||||
let text = fs.load(&abs_rules_path).await.with_context(|| {
|
||||
format!("Failed to load assistant rules file {:?}", abs_rules_path)
|
||||
})?;
|
||||
anyhow::Ok(RulesFile {
|
||||
rel_path: rel_rules_path,
|
||||
abs_path: abs_rules_path.into(),
|
||||
text: text.trim().to_string(),
|
||||
})
|
||||
})
|
||||
.await;
|
||||
let (rules_file, rules_file_error) = match rules_file_result {
|
||||
Ok(rules_file) => (Some(rules_file), None),
|
||||
Err(err) => (
|
||||
None,
|
||||
Some(ThreadError::Message {
|
||||
header: "Error loading rules file".into(),
|
||||
message: format!("{err}").into(),
|
||||
}),
|
||||
),
|
||||
};
|
||||
let worktree_info = WorktreeInfoForSystemPrompt {
|
||||
root_name,
|
||||
abs_path,
|
||||
rules_file,
|
||||
};
|
||||
(worktree_info, rules_file_error)
|
||||
})
|
||||
} else {
|
||||
Task::ready((
|
||||
WorktreeInfoForSystemPrompt {
|
||||
root_name,
|
||||
abs_path,
|
||||
rules_file: None,
|
||||
},
|
||||
None,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn send_to_model(
|
||||
&mut self,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
@@ -844,21 +927,13 @@ impl Thread {
|
||||
if model.supports_tools() {
|
||||
request.tools = {
|
||||
let mut tools = Vec::new();
|
||||
tools.extend(
|
||||
self.tools()
|
||||
.read(cx)
|
||||
.enabled_tools(cx)
|
||||
.into_iter()
|
||||
.filter_map(|tool| {
|
||||
// Skip tools that cannot be supported
|
||||
let input_schema = tool.input_schema(model.tool_input_format()).ok()?;
|
||||
Some(LanguageModelRequestTool {
|
||||
name: tool.name(),
|
||||
description: tool.description(),
|
||||
input_schema,
|
||||
})
|
||||
}),
|
||||
);
|
||||
tools.extend(self.tools().enabled_tools(cx).into_iter().map(|tool| {
|
||||
LanguageModelRequestTool {
|
||||
name: tool.name(),
|
||||
description: tool.description(),
|
||||
input_schema: tool.input_schema(model.tool_input_format()),
|
||||
}
|
||||
}));
|
||||
|
||||
tools
|
||||
};
|
||||
@@ -891,10 +966,10 @@ impl Thread {
|
||||
temperature: None,
|
||||
};
|
||||
|
||||
if let Some(project_context) = self.project_context.borrow().as_ref() {
|
||||
if let Some(system_prompt_context) = self.system_prompt_context.as_ref() {
|
||||
if let Some(system_prompt) = self
|
||||
.prompt_builder
|
||||
.generate_assistant_system_prompt(project_context)
|
||||
.generate_assistant_system_prompt(system_prompt_context)
|
||||
.context("failed to generate assistant system prompt")
|
||||
.log_err()
|
||||
{
|
||||
@@ -905,7 +980,7 @@ impl Thread {
|
||||
});
|
||||
}
|
||||
} else {
|
||||
log::error!("project_context not set.")
|
||||
log::error!("system_prompt_context not set.")
|
||||
}
|
||||
|
||||
for message in &self.messages {
|
||||
@@ -1017,7 +1092,7 @@ impl Thread {
|
||||
let task = cx.spawn(async move |thread, cx| {
|
||||
let stream = model.stream_completion(request, &cx);
|
||||
let initial_token_usage =
|
||||
thread.read_with(cx, |thread, _cx| thread.cumulative_token_usage);
|
||||
thread.read_with(cx, |thread, _cx| thread.cumulative_token_usage.clone());
|
||||
let stream_completion = async {
|
||||
let mut events = stream.await?;
|
||||
let mut stop_reason = StopReason::EndTurn;
|
||||
@@ -1039,9 +1114,9 @@ impl Thread {
|
||||
stop_reason = reason;
|
||||
}
|
||||
LanguageModelCompletionEvent::UsageUpdate(token_usage) => {
|
||||
thread.cumulative_token_usage = thread.cumulative_token_usage
|
||||
+ token_usage
|
||||
- current_token_usage;
|
||||
thread.cumulative_token_usage =
|
||||
thread.cumulative_token_usage.clone() + token_usage.clone()
|
||||
- current_token_usage.clone();
|
||||
current_token_usage = token_usage;
|
||||
}
|
||||
LanguageModelCompletionEvent::Text(chunk) => {
|
||||
@@ -1109,8 +1184,6 @@ impl Thread {
|
||||
thread.touch_updated_at();
|
||||
cx.emit(ThreadEvent::StreamedCompletion);
|
||||
cx.notify();
|
||||
|
||||
thread.auto_capture_telemetry(cx);
|
||||
})?;
|
||||
|
||||
smol::future::yield_now().await;
|
||||
@@ -1137,8 +1210,7 @@ impl Thread {
|
||||
match result.as_ref() {
|
||||
Ok(stop_reason) => match stop_reason {
|
||||
StopReason::ToolUse => {
|
||||
let tool_uses = thread.use_pending_tools(cx);
|
||||
cx.emit(ThreadEvent::UsePendingTools { tool_uses });
|
||||
cx.emit(ThreadEvent::UsePendingTools);
|
||||
}
|
||||
StopReason::EndTurn => {}
|
||||
StopReason::MaxTokens => {}
|
||||
@@ -1150,20 +1222,6 @@ impl Thread {
|
||||
cx.emit(ThreadEvent::ShowError(
|
||||
ThreadError::MaxMonthlySpendReached,
|
||||
));
|
||||
} else if let Some(known_error) =
|
||||
error.downcast_ref::<LanguageModelKnownError>()
|
||||
{
|
||||
match known_error {
|
||||
LanguageModelKnownError::ContextWindowLimitExceeded {
|
||||
tokens,
|
||||
} => {
|
||||
thread.exceeded_window_error = Some(ExceededWindowError {
|
||||
model_id: model.id(),
|
||||
token_count: *tokens,
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let error_message = error
|
||||
.chain()
|
||||
@@ -1179,12 +1237,10 @@ impl Thread {
|
||||
thread.cancel_last_completion(cx);
|
||||
}
|
||||
}
|
||||
cx.emit(ThreadEvent::Stopped(result.map_err(Arc::new)));
|
||||
|
||||
thread.auto_capture_telemetry(cx);
|
||||
cx.emit(ThreadEvent::DoneStreaming);
|
||||
|
||||
if let Ok(initial_usage) = initial_token_usage {
|
||||
let usage = thread.cumulative_token_usage - initial_usage;
|
||||
let usage = thread.cumulative_token_usage.clone() - initial_usage;
|
||||
|
||||
telemetry::event!(
|
||||
"Assistant Thread Completion",
|
||||
@@ -1342,8 +1398,10 @@ impl Thread {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn use_pending_tools(&mut self, cx: &mut Context<Self>) -> Vec<PendingToolUse> {
|
||||
self.auto_capture_telemetry(cx);
|
||||
pub fn use_pending_tools(
|
||||
&mut self,
|
||||
cx: &mut Context<Self>,
|
||||
) -> impl IntoIterator<Item = PendingToolUse> + use<> {
|
||||
let request = self.to_completion_request(RequestKind::Chat, cx);
|
||||
let messages = Arc::new(request.messages);
|
||||
let pending_tool_uses = self
|
||||
@@ -1355,8 +1413,8 @@ impl Thread {
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for tool_use in pending_tool_uses.iter() {
|
||||
if let Some(tool) = self.tools.read(cx).tool(&tool_use.name, cx) {
|
||||
if tool.needs_confirmation(&tool_use.input, cx)
|
||||
if let Some(tool) = self.tools.tool(&tool_use.name, cx) {
|
||||
if tool.needs_confirmation()
|
||||
&& !AssistantSettings::get_global(cx).always_allow_tool_actions
|
||||
{
|
||||
self.tool_use.confirm_tool_use(
|
||||
@@ -1407,8 +1465,8 @@ impl Thread {
|
||||
) -> Task<()> {
|
||||
let tool_name: Arc<str> = tool.name().into();
|
||||
|
||||
let tool_result = if self.tools.read(cx).is_disabled(&tool.source(), &tool_name) {
|
||||
Task::ready(Err(anyhow!("tool is disabled: {tool_name}"))).into()
|
||||
let run_tool = if self.tools.is_disabled(&tool.source(), &tool_name) {
|
||||
Task::ready(Err(anyhow!("tool is disabled: {tool_name}")))
|
||||
} else {
|
||||
tool.run(
|
||||
input,
|
||||
@@ -1421,7 +1479,7 @@ impl Thread {
|
||||
|
||||
cx.spawn({
|
||||
async move |thread: WeakEntity<Thread>, cx| {
|
||||
let output = tool_result.output.await;
|
||||
let output = run_tool.await;
|
||||
|
||||
thread
|
||||
.update(cx, |thread, cx| {
|
||||
@@ -1429,38 +1487,19 @@ impl Thread {
|
||||
tool_use_id.clone(),
|
||||
tool_name,
|
||||
output,
|
||||
cx,
|
||||
);
|
||||
thread.tool_finished(tool_use_id, pending_tool_use, false, cx);
|
||||
|
||||
cx.emit(ThreadEvent::ToolFinished {
|
||||
tool_use_id,
|
||||
pending_tool_use,
|
||||
canceled: false,
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn tool_finished(
|
||||
&mut self,
|
||||
tool_use_id: LanguageModelToolUseId,
|
||||
pending_tool_use: Option<PendingToolUse>,
|
||||
canceled: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if self.all_tools_finished() {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
if let Some(ConfiguredModel { model, .. }) = model_registry.default_model() {
|
||||
self.attach_tool_results(cx);
|
||||
if !canceled {
|
||||
self.send_to_model(model, RequestKind::Chat, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cx.emit(ThreadEvent::ToolFinished {
|
||||
tool_use_id,
|
||||
pending_tool_use,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn attach_tool_results(&mut self, cx: &mut Context<Self>) {
|
||||
// Insert a user message to contain the tool results.
|
||||
self.insert_user_message(
|
||||
@@ -1484,12 +1523,11 @@ impl Thread {
|
||||
let mut canceled = false;
|
||||
for pending_tool_use in self.tool_use.cancel_pending() {
|
||||
canceled = true;
|
||||
self.tool_finished(
|
||||
pending_tool_use.id.clone(),
|
||||
Some(pending_tool_use),
|
||||
true,
|
||||
cx,
|
||||
);
|
||||
cx.emit(ThreadEvent::ToolFinished {
|
||||
tool_use_id: pending_tool_use.id.clone(),
|
||||
pending_tool_use: Some(pending_tool_use),
|
||||
canceled: true,
|
||||
});
|
||||
}
|
||||
canceled
|
||||
};
|
||||
@@ -1497,46 +1535,24 @@ impl Thread {
|
||||
canceled
|
||||
}
|
||||
|
||||
/// Returns the feedback given to the thread, if any.
|
||||
pub fn feedback(&self) -> Option<ThreadFeedback> {
|
||||
self.feedback
|
||||
}
|
||||
|
||||
pub fn message_feedback(&self, message_id: MessageId) -> Option<ThreadFeedback> {
|
||||
self.message_feedback.get(&message_id).copied()
|
||||
}
|
||||
|
||||
pub fn report_message_feedback(
|
||||
/// Reports feedback about the thread and stores it in our telemetry backend.
|
||||
pub fn report_feedback(
|
||||
&mut self,
|
||||
message_id: MessageId,
|
||||
feedback: ThreadFeedback,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
if self.message_feedback.get(&message_id) == Some(&feedback) {
|
||||
return Task::ready(Ok(()));
|
||||
}
|
||||
|
||||
let final_project_snapshot = Self::project_snapshot(self.project.clone(), cx);
|
||||
let serialized_thread = self.serialize(cx);
|
||||
let thread_id = self.id().clone();
|
||||
let client = self.project.read(cx).client();
|
||||
|
||||
let enabled_tool_names: Vec<String> = self
|
||||
.tools()
|
||||
.read(cx)
|
||||
.enabled_tools(cx)
|
||||
.iter()
|
||||
.map(|tool| tool.name().to_string())
|
||||
.collect();
|
||||
|
||||
self.message_feedback.insert(message_id, feedback);
|
||||
|
||||
self.feedback = Some(feedback);
|
||||
cx.notify();
|
||||
|
||||
let message_content = self
|
||||
.message(message_id)
|
||||
.map(|msg| msg.to_string())
|
||||
.unwrap_or_default();
|
||||
|
||||
cx.background_spawn(async move {
|
||||
let final_project_snapshot = final_project_snapshot.await;
|
||||
let serialized_thread = serialized_thread.await?;
|
||||
@@ -1551,9 +1567,6 @@ impl Thread {
|
||||
"Assistant Thread Rated",
|
||||
rating,
|
||||
thread_id,
|
||||
enabled_tool_names,
|
||||
message_id = message_id.0,
|
||||
message_content,
|
||||
thread_data,
|
||||
final_project_snapshot
|
||||
);
|
||||
@@ -1563,52 +1576,6 @@ impl Thread {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn report_feedback(
|
||||
&mut self,
|
||||
feedback: ThreadFeedback,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let last_assistant_message_id = self
|
||||
.messages
|
||||
.iter()
|
||||
.rev()
|
||||
.find(|msg| msg.role == Role::Assistant)
|
||||
.map(|msg| msg.id);
|
||||
|
||||
if let Some(message_id) = last_assistant_message_id {
|
||||
self.report_message_feedback(message_id, feedback, cx)
|
||||
} else {
|
||||
let final_project_snapshot = Self::project_snapshot(self.project.clone(), cx);
|
||||
let serialized_thread = self.serialize(cx);
|
||||
let thread_id = self.id().clone();
|
||||
let client = self.project.read(cx).client();
|
||||
self.feedback = Some(feedback);
|
||||
cx.notify();
|
||||
|
||||
cx.background_spawn(async move {
|
||||
let final_project_snapshot = final_project_snapshot.await;
|
||||
let serialized_thread = serialized_thread.await?;
|
||||
let thread_data = serde_json::to_value(serialized_thread)
|
||||
.unwrap_or_else(|_| serde_json::Value::Null);
|
||||
|
||||
let rating = match feedback {
|
||||
ThreadFeedback::Positive => "positive",
|
||||
ThreadFeedback::Negative => "negative",
|
||||
};
|
||||
telemetry::event!(
|
||||
"Assistant Thread Rated",
|
||||
rating,
|
||||
thread_id,
|
||||
thread_data,
|
||||
final_project_snapshot
|
||||
);
|
||||
client.telemetry().flush_events();
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a snapshot of the current project state including git information and unsaved buffers.
|
||||
fn project_snapshot(
|
||||
project: Entity<Project>,
|
||||
@@ -1683,10 +1650,10 @@ impl Thread {
|
||||
.ok()
|
||||
.flatten()
|
||||
.map(|repo| {
|
||||
repo.update(cx, |repo, _| {
|
||||
repo.read_with(cx, |repo, _| {
|
||||
let current_branch =
|
||||
repo.branch.as_ref().map(|branch| branch.name.to_string());
|
||||
repo.send_job(None, |state, _| async move {
|
||||
repo.send_job(|state, _| async move {
|
||||
let RepositoryState::Local { backend, .. } = state else {
|
||||
return GitState {
|
||||
remote_url: None,
|
||||
@@ -1820,52 +1787,8 @@ impl Thread {
|
||||
&self.project
|
||||
}
|
||||
|
||||
pub fn auto_capture_telemetry(&mut self, cx: &mut Context<Self>) {
|
||||
if !cx.has_flag::<feature_flags::ThreadAutoCapture>() {
|
||||
return;
|
||||
}
|
||||
|
||||
let now = Instant::now();
|
||||
if let Some(last) = self.last_auto_capture_at {
|
||||
if now.duration_since(last).as_secs() < 10 {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
self.last_auto_capture_at = Some(now);
|
||||
|
||||
let thread_id = self.id().clone();
|
||||
let github_login = self
|
||||
.project
|
||||
.read(cx)
|
||||
.user_store()
|
||||
.read(cx)
|
||||
.current_user()
|
||||
.map(|user| user.github_login.clone());
|
||||
let client = self.project.read(cx).client().clone();
|
||||
let serialize_task = self.serialize(cx);
|
||||
|
||||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
if let Ok(serialized_thread) = serialize_task.await {
|
||||
if let Ok(thread_data) = serde_json::to_value(serialized_thread) {
|
||||
telemetry::event!(
|
||||
"Agent Thread Auto-Captured",
|
||||
thread_id = thread_id.to_string(),
|
||||
thread_data = thread_data,
|
||||
auto_capture_reason = "tracked_user",
|
||||
github_login = github_login
|
||||
);
|
||||
|
||||
client.telemetry().flush_events();
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
pub fn cumulative_token_usage(&self) -> TokenUsage {
|
||||
self.cumulative_token_usage
|
||||
self.cumulative_token_usage.clone()
|
||||
}
|
||||
|
||||
pub fn total_token_usage(&self, cx: &App) -> TotalTokenUsage {
|
||||
@@ -1876,16 +1799,6 @@ impl Thread {
|
||||
|
||||
let max = model.model.max_token_count();
|
||||
|
||||
if let Some(exceeded_error) = &self.exceeded_window_error {
|
||||
if model.model.id() == exceeded_error.model_id {
|
||||
return TotalTokenUsage {
|
||||
total: exceeded_error.token_count,
|
||||
max,
|
||||
ratio: TokenUsageRatio::Exceeded,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
let warning_threshold: f32 = std::env::var("ZED_THREAD_WARNING_THRESHOLD")
|
||||
.unwrap_or("0.8".to_string())
|
||||
@@ -1918,18 +1831,20 @@ impl Thread {
|
||||
));
|
||||
|
||||
self.tool_use
|
||||
.insert_tool_output(tool_use_id.clone(), tool_name, err, cx);
|
||||
self.tool_finished(tool_use_id.clone(), None, true, cx);
|
||||
.insert_tool_output(tool_use_id.clone(), tool_name, err);
|
||||
|
||||
cx.emit(ThreadEvent::ToolFinished {
|
||||
tool_use_id,
|
||||
pending_tool_use: None,
|
||||
canceled: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Error)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum ThreadError {
|
||||
#[error("Payment required")]
|
||||
PaymentRequired,
|
||||
#[error("Max monthly spend reached")]
|
||||
MaxMonthlySpendReached,
|
||||
#[error("Message {header}: {message}")]
|
||||
Message {
|
||||
header: SharedString,
|
||||
message: SharedString,
|
||||
@@ -1942,20 +1857,20 @@ pub enum ThreadEvent {
|
||||
StreamedCompletion,
|
||||
StreamedAssistantText(MessageId, String),
|
||||
StreamedAssistantThinking(MessageId, String),
|
||||
Stopped(Result<StopReason, Arc<anyhow::Error>>),
|
||||
DoneStreaming,
|
||||
MessageAdded(MessageId),
|
||||
MessageEdited(MessageId),
|
||||
MessageDeleted(MessageId),
|
||||
SummaryGenerated,
|
||||
SummaryChanged,
|
||||
UsePendingTools {
|
||||
tool_uses: Vec<PendingToolUse>,
|
||||
},
|
||||
UsePendingTools,
|
||||
ToolFinished {
|
||||
#[allow(unused)]
|
||||
tool_use_id: LanguageModelToolUseId,
|
||||
/// The pending tool use that corresponds to this tool.
|
||||
pending_tool_use: Option<PendingToolUse>,
|
||||
/// Whether the tool was canceled by the user.
|
||||
canceled: bool,
|
||||
},
|
||||
CheckpointChanged,
|
||||
ToolConfirmationNeeded,
|
||||
@@ -2048,9 +1963,9 @@ fn main() {{
|
||||
thread.to_completion_request(RequestKind::Chat, cx)
|
||||
});
|
||||
|
||||
assert_eq!(request.messages.len(), 2);
|
||||
assert_eq!(request.messages.len(), 1);
|
||||
let expected_full_message = format!("{}Please explain this code", expected_context);
|
||||
assert_eq!(request.messages[1].string_contents(), expected_full_message);
|
||||
assert_eq!(request.messages[0].string_contents(), expected_full_message);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -2141,20 +2056,20 @@ fn main() {{
|
||||
});
|
||||
|
||||
// The request should contain all 3 messages
|
||||
assert_eq!(request.messages.len(), 4);
|
||||
assert_eq!(request.messages.len(), 3);
|
||||
|
||||
// Check that the contexts are properly formatted in each message
|
||||
assert!(request.messages[1].string_contents().contains("file1.rs"));
|
||||
assert!(!request.messages[1].string_contents().contains("file2.rs"));
|
||||
assert!(request.messages[0].string_contents().contains("file1.rs"));
|
||||
assert!(!request.messages[0].string_contents().contains("file2.rs"));
|
||||
assert!(!request.messages[0].string_contents().contains("file3.rs"));
|
||||
|
||||
assert!(!request.messages[1].string_contents().contains("file1.rs"));
|
||||
assert!(request.messages[1].string_contents().contains("file2.rs"));
|
||||
assert!(!request.messages[1].string_contents().contains("file3.rs"));
|
||||
|
||||
assert!(!request.messages[2].string_contents().contains("file1.rs"));
|
||||
assert!(request.messages[2].string_contents().contains("file2.rs"));
|
||||
assert!(!request.messages[2].string_contents().contains("file3.rs"));
|
||||
|
||||
assert!(!request.messages[3].string_contents().contains("file1.rs"));
|
||||
assert!(!request.messages[3].string_contents().contains("file2.rs"));
|
||||
assert!(request.messages[3].string_contents().contains("file3.rs"));
|
||||
assert!(!request.messages[2].string_contents().contains("file2.rs"));
|
||||
assert!(request.messages[2].string_contents().contains("file3.rs"));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -2192,9 +2107,9 @@ fn main() {{
|
||||
thread.to_completion_request(RequestKind::Chat, cx)
|
||||
});
|
||||
|
||||
assert_eq!(request.messages.len(), 2);
|
||||
assert_eq!(request.messages.len(), 1);
|
||||
assert_eq!(
|
||||
request.messages[1].string_contents(),
|
||||
request.messages[0].string_contents(),
|
||||
"What is the best way to learn Rust?"
|
||||
);
|
||||
|
||||
@@ -2212,13 +2127,13 @@ fn main() {{
|
||||
thread.to_completion_request(RequestKind::Chat, cx)
|
||||
});
|
||||
|
||||
assert_eq!(request.messages.len(), 3);
|
||||
assert_eq!(request.messages.len(), 2);
|
||||
assert_eq!(
|
||||
request.messages[1].string_contents(),
|
||||
request.messages[0].string_contents(),
|
||||
"What is the best way to learn Rust?"
|
||||
);
|
||||
assert_eq!(
|
||||
request.messages[2].string_contents(),
|
||||
request.messages[1].string_contents(),
|
||||
"Are there any good books?"
|
||||
);
|
||||
}
|
||||
@@ -2339,19 +2254,18 @@ fn main() {{
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
|
||||
let thread_store = cx
|
||||
.update(|_, cx| {
|
||||
ThreadStore::load(
|
||||
project.clone(),
|
||||
cx.new(|_| ToolWorkingSet::default()),
|
||||
Arc::new(PromptBuilder::new(None).unwrap()),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await;
|
||||
let thread_store = cx.update(|_, cx| {
|
||||
ThreadStore::new(
|
||||
project.clone(),
|
||||
Arc::default(),
|
||||
Arc::new(PromptBuilder::new(None).unwrap()),
|
||||
cx,
|
||||
)
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
let thread = thread_store.update(cx, |store, cx| store.create_thread(cx));
|
||||
let context_store = cx.new(|_cx| ContextStore::new(project.downgrade(), None));
|
||||
let context_store = cx.new(|_cx| ContextStore::new(workspace.downgrade(), None));
|
||||
|
||||
(workspace, thread_store, thread, context_store)
|
||||
}
|
||||
|
||||
@@ -4,14 +4,11 @@ use assistant_context_editor::SavedContextMetadata;
|
||||
use editor::{Editor, EditorEvent};
|
||||
use fuzzy::{StringMatch, StringMatchCandidate};
|
||||
use gpui::{
|
||||
App, Entity, FocusHandle, Focusable, ScrollStrategy, Stateful, Task, UniformListScrollHandle,
|
||||
WeakEntity, Window, uniform_list,
|
||||
App, Entity, FocusHandle, Focusable, ScrollStrategy, Task, UniformListScrollHandle, WeakEntity,
|
||||
Window, uniform_list,
|
||||
};
|
||||
use time::{OffsetDateTime, UtcOffset};
|
||||
use ui::{
|
||||
HighlightedLabel, IconButtonShape, ListItem, ListItemSpacing, Scrollbar, ScrollbarState,
|
||||
Tooltip, prelude::*,
|
||||
};
|
||||
use ui::{HighlightedLabel, IconButtonShape, ListItem, ListItemSpacing, Tooltip, prelude::*};
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::history_store::{HistoryEntry, HistoryStore};
|
||||
@@ -29,8 +26,6 @@ pub struct ThreadHistory {
|
||||
matches: Vec<StringMatch>,
|
||||
_subscriptions: Vec<gpui::Subscription>,
|
||||
_search_task: Option<Task<()>>,
|
||||
scrollbar_visibility: bool,
|
||||
scrollbar_state: ScrollbarState,
|
||||
}
|
||||
|
||||
impl ThreadHistory {
|
||||
@@ -63,13 +58,10 @@ impl ThreadHistory {
|
||||
this.update_all_entries(cx);
|
||||
});
|
||||
|
||||
let scroll_handle = UniformListScrollHandle::default();
|
||||
let scrollbar_state = ScrollbarState::new(scroll_handle.clone());
|
||||
|
||||
Self {
|
||||
assistant_panel,
|
||||
history_store,
|
||||
scroll_handle,
|
||||
scroll_handle: UniformListScrollHandle::default(),
|
||||
selected_index: 0,
|
||||
search_query: SharedString::new_static(""),
|
||||
all_entries: entries,
|
||||
@@ -77,8 +69,6 @@ impl ThreadHistory {
|
||||
search_editor,
|
||||
_subscriptions: vec![search_editor_subscription, history_store_subscription],
|
||||
_search_task: None,
|
||||
scrollbar_visibility: true,
|
||||
scrollbar_state,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -230,43 +220,6 @@ impl ThreadHistory {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn render_scrollbar(&self, cx: &mut Context<Self>) -> Option<Stateful<Div>> {
|
||||
if !(self.scrollbar_visibility || self.scrollbar_state.is_dragging()) {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(
|
||||
div()
|
||||
.occlude()
|
||||
.id("thread-history-scroll")
|
||||
.h_full()
|
||||
.bg(cx.theme().colors().panel_background.opacity(0.8))
|
||||
.border_l_1()
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
.absolute()
|
||||
.right_1()
|
||||
.top_0()
|
||||
.bottom_0()
|
||||
.w_4()
|
||||
.pl_1()
|
||||
.cursor_default()
|
||||
.on_mouse_move(cx.listener(|_, _, _window, cx| {
|
||||
cx.notify();
|
||||
cx.stop_propagation()
|
||||
}))
|
||||
.on_hover(|_, _window, cx| {
|
||||
cx.stop_propagation();
|
||||
})
|
||||
.on_any_mouse_down(|_, _window, cx| {
|
||||
cx.stop_propagation();
|
||||
})
|
||||
.on_scroll_wheel(cx.listener(|_, _, _window, cx| {
|
||||
cx.notify();
|
||||
}))
|
||||
.children(Scrollbar::vertical(self.scrollbar_state.clone())),
|
||||
)
|
||||
}
|
||||
|
||||
fn confirm(&mut self, _: &menu::Confirm, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if let Some(entry) = self.get_match(self.selected_index) {
|
||||
let task_result = match entry {
|
||||
@@ -352,11 +305,7 @@ impl Render for ThreadHistory {
|
||||
)
|
||||
})
|
||||
.child({
|
||||
let view = v_flex()
|
||||
.id("list-container")
|
||||
.relative()
|
||||
.overflow_hidden()
|
||||
.flex_grow();
|
||||
let view = v_flex().overflow_hidden().flex_grow();
|
||||
|
||||
if self.all_entries.is_empty() {
|
||||
view.justify_center()
|
||||
@@ -373,70 +322,59 @@ impl Render for ThreadHistory {
|
||||
),
|
||||
)
|
||||
} else {
|
||||
view.pr_5()
|
||||
.child(
|
||||
uniform_list(
|
||||
cx.entity().clone(),
|
||||
"thread-history",
|
||||
self.matched_count(),
|
||||
move |history, range, _window, _cx| {
|
||||
let range_start = range.start;
|
||||
let assistant_panel = history.assistant_panel.clone();
|
||||
view.p_1().child(
|
||||
uniform_list(
|
||||
cx.entity().clone(),
|
||||
"thread-history",
|
||||
self.matched_count(),
|
||||
move |history, range, _window, _cx| {
|
||||
let range_start = range.start;
|
||||
let assistant_panel = history.assistant_panel.clone();
|
||||
|
||||
let render_item = |index: usize,
|
||||
entry: &HistoryEntry,
|
||||
highlight_positions: Vec<usize>|
|
||||
-> Div {
|
||||
h_flex().w_full().pb_1().child(match entry {
|
||||
HistoryEntry::Thread(thread) => PastThread::new(
|
||||
thread.clone(),
|
||||
assistant_panel.clone(),
|
||||
selected_index == index + range_start,
|
||||
highlight_positions,
|
||||
)
|
||||
.into_any_element(),
|
||||
HistoryEntry::Context(context) => PastContext::new(
|
||||
context.clone(),
|
||||
assistant_panel.clone(),
|
||||
selected_index == index + range_start,
|
||||
highlight_positions,
|
||||
)
|
||||
.into_any_element(),
|
||||
})
|
||||
};
|
||||
let render_item = |index: usize,
|
||||
entry: &HistoryEntry,
|
||||
highlight_positions: Vec<usize>|
|
||||
-> Div {
|
||||
h_flex().w_full().pb_1().child(match entry {
|
||||
HistoryEntry::Thread(thread) => PastThread::new(
|
||||
thread.clone(),
|
||||
assistant_panel.clone(),
|
||||
selected_index == index + range_start,
|
||||
highlight_positions,
|
||||
)
|
||||
.into_any_element(),
|
||||
HistoryEntry::Context(context) => PastContext::new(
|
||||
context.clone(),
|
||||
assistant_panel.clone(),
|
||||
selected_index == index + range_start,
|
||||
highlight_positions,
|
||||
)
|
||||
.into_any_element(),
|
||||
})
|
||||
};
|
||||
|
||||
if history.has_search_query() {
|
||||
history.matches[range]
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(index, m)| {
|
||||
history.all_entries.get(m.candidate_id).map(
|
||||
|entry| {
|
||||
render_item(
|
||||
index,
|
||||
entry,
|
||||
m.positions.clone(),
|
||||
)
|
||||
},
|
||||
)
|
||||
if history.has_search_query() {
|
||||
history.matches[range]
|
||||
.iter()
|
||||
.enumerate()
|
||||
.filter_map(|(index, m)| {
|
||||
history.all_entries.get(m.candidate_id).map(|entry| {
|
||||
render_item(index, entry, m.positions.clone())
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
history.all_entries[range]
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, entry)| render_item(index, entry, vec![]))
|
||||
.collect()
|
||||
}
|
||||
},
|
||||
)
|
||||
.p_1()
|
||||
.track_scroll(self.scroll_handle.clone())
|
||||
.flex_grow(),
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
history.all_entries[range]
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, entry)| render_item(index, entry, vec![]))
|
||||
.collect()
|
||||
}
|
||||
},
|
||||
)
|
||||
.when_some(self.render_scrollbar(cx), |div, scrollbar| {
|
||||
div.child(scrollbar)
|
||||
})
|
||||
.track_scroll(self.scroll_handle.clone())
|
||||
.flex_grow(),
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -493,6 +431,17 @@ impl RenderOnce for PastThread {
|
||||
.end_slot(
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.child(
|
||||
Label::new("Thread")
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::XSmall),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.size(px(3.))
|
||||
.rounded_full()
|
||||
.bg(cx.theme().colors().text_disabled),
|
||||
)
|
||||
.child(
|
||||
Label::new(thread_timestamp)
|
||||
.color(Color::Muted)
|
||||
@@ -502,9 +451,13 @@ impl RenderOnce for PastThread {
|
||||
IconButton::new("delete", IconName::TrashAlt)
|
||||
.shape(IconButtonShape::Square)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.tooltip(move |window, cx| {
|
||||
Tooltip::for_action("Delete", &RemoveSelectedThread, window, cx)
|
||||
Tooltip::for_action(
|
||||
"Delete Thread",
|
||||
&RemoveSelectedThread,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.on_click({
|
||||
let assistant_panel = self.assistant_panel.clone();
|
||||
@@ -585,6 +538,17 @@ impl RenderOnce for PastContext {
|
||||
.end_slot(
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.child(
|
||||
Label::new("Prompt Editor")
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::XSmall),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.size(px(3.))
|
||||
.rounded_full()
|
||||
.bg(cx.theme().colors().text_disabled),
|
||||
)
|
||||
.child(
|
||||
Label::new(context_timestamp)
|
||||
.color(Color::Muted)
|
||||
@@ -594,10 +558,7 @@ impl RenderOnce for PastContext {
|
||||
IconButton::new("delete", IconName::TrashAlt)
|
||||
.shape(IconButtonShape::Square)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.tooltip(move |window, cx| {
|
||||
Tooltip::for_action("Delete", &RemoveSelectedThread, window, cx)
|
||||
})
|
||||
.tooltip(Tooltip::text("Delete Prompt Editor"))
|
||||
.on_click({
|
||||
let assistant_panel = self.assistant_panel.clone();
|
||||
let path = self.context.path.clone();
|
||||
|
||||
@@ -1,254 +1,88 @@
|
||||
use std::borrow::Cow;
|
||||
use std::cell::{Ref, RefCell};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::rc::Rc;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use anyhow::{Result, anyhow};
|
||||
use assistant_settings::{AgentProfile, AgentProfileId, AssistantSettings};
|
||||
use assistant_tool::{ToolId, ToolSource, ToolWorkingSet};
|
||||
use chrono::{DateTime, Utc};
|
||||
use collections::HashMap;
|
||||
use context_server::manager::ContextServerManager;
|
||||
use context_server::{ContextServerFactoryRegistry, ContextServerTool};
|
||||
use fs::Fs;
|
||||
use futures::FutureExt as _;
|
||||
use futures::future::{self, BoxFuture, Shared};
|
||||
use gpui::{
|
||||
App, BackgroundExecutor, Context, Entity, EventEmitter, Global, ReadGlobal, SharedString,
|
||||
Subscription, Task, prelude::*,
|
||||
App, BackgroundExecutor, Context, Entity, Global, ReadGlobal, SharedString, Subscription, Task,
|
||||
prelude::*,
|
||||
};
|
||||
use heed::Database;
|
||||
use heed::types::SerdeBincode;
|
||||
use language_model::{LanguageModelToolUseId, Role, TokenUsage};
|
||||
use project::{Project, Worktree};
|
||||
use prompt_store::{ProjectContext, PromptBuilder, RulesFileContext, WorktreeContext};
|
||||
use project::Project;
|
||||
use prompt_store::PromptBuilder;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings as _, SettingsStore};
|
||||
use util::ResultExt as _;
|
||||
|
||||
use crate::thread::{
|
||||
DetailedSummaryState, ExceededWindowError, MessageId, ProjectSnapshot, Thread, ThreadId,
|
||||
DetailedSummaryState, MessageId, ProjectSnapshot, Thread, ThreadEvent, ThreadId,
|
||||
};
|
||||
|
||||
const RULES_FILE_NAMES: [&'static str; 6] = [
|
||||
".rules",
|
||||
".cursorrules",
|
||||
".windsurfrules",
|
||||
".clinerules",
|
||||
".github/copilot-instructions.md",
|
||||
"CLAUDE.md",
|
||||
];
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
ThreadsDatabase::init(cx);
|
||||
}
|
||||
|
||||
/// A system prompt shared by all threads created by this ThreadStore
|
||||
#[derive(Clone, Default)]
|
||||
pub struct SharedProjectContext(Rc<RefCell<Option<ProjectContext>>>);
|
||||
|
||||
impl SharedProjectContext {
|
||||
pub fn borrow(&self) -> Ref<Option<ProjectContext>> {
|
||||
self.0.borrow()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ThreadStore {
|
||||
project: Entity<Project>,
|
||||
tools: Entity<ToolWorkingSet>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
context_server_manager: Entity<ContextServerManager>,
|
||||
context_server_tool_ids: HashMap<Arc<str>, Vec<ToolId>>,
|
||||
threads: Vec<SerializedThreadMetadata>,
|
||||
project_context: SharedProjectContext,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
|
||||
pub struct RulesLoadingError {
|
||||
pub message: SharedString,
|
||||
}
|
||||
|
||||
impl EventEmitter<RulesLoadingError> for ThreadStore {}
|
||||
|
||||
impl ThreadStore {
|
||||
pub fn load(
|
||||
pub fn new(
|
||||
project: Entity<Project>,
|
||||
tools: Entity<ToolWorkingSet>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
cx: &mut App,
|
||||
) -> Task<Entity<Self>> {
|
||||
let thread_store = cx.new(|cx| Self::new(project, tools, prompt_builder, cx));
|
||||
let reload = thread_store.update(cx, |store, cx| store.reload_system_prompt(cx));
|
||||
cx.foreground_executor().spawn(async move {
|
||||
reload.await;
|
||||
thread_store
|
||||
})
|
||||
}
|
||||
|
||||
fn new(
|
||||
project: Entity<Project>,
|
||||
tools: Entity<ToolWorkingSet>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let context_server_factory_registry = ContextServerFactoryRegistry::default_global(cx);
|
||||
let context_server_manager = cx.new(|cx| {
|
||||
ContextServerManager::new(context_server_factory_registry, project.clone(), cx)
|
||||
});
|
||||
let settings_subscription =
|
||||
cx.observe_global::<SettingsStore>(move |this: &mut Self, cx| {
|
||||
this.load_default_profile(cx);
|
||||
) -> Result<Entity<Self>> {
|
||||
let this = cx.new(|cx| {
|
||||
let context_server_factory_registry = ContextServerFactoryRegistry::default_global(cx);
|
||||
let context_server_manager = cx.new(|cx| {
|
||||
ContextServerManager::new(context_server_factory_registry, project.clone(), cx)
|
||||
});
|
||||
let project_subscription = cx.subscribe(&project, Self::handle_project_event);
|
||||
let settings_subscription =
|
||||
cx.observe_global::<SettingsStore>(move |this: &mut Self, cx| {
|
||||
this.load_default_profile(cx);
|
||||
});
|
||||
|
||||
let this = Self {
|
||||
project,
|
||||
tools,
|
||||
prompt_builder,
|
||||
context_server_manager,
|
||||
context_server_tool_ids: HashMap::default(),
|
||||
threads: Vec::new(),
|
||||
project_context: SharedProjectContext::default(),
|
||||
_subscriptions: vec![settings_subscription, project_subscription],
|
||||
};
|
||||
this.load_default_profile(cx);
|
||||
this.register_context_server_handlers(cx);
|
||||
this.reload(cx).detach_and_log_err(cx);
|
||||
this
|
||||
}
|
||||
|
||||
fn handle_project_event(
|
||||
&mut self,
|
||||
_project: Entity<Project>,
|
||||
event: &project::Event,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
match event {
|
||||
project::Event::WorktreeAdded(_) | project::Event::WorktreeRemoved(_) => {
|
||||
self.reload_system_prompt(cx).detach();
|
||||
}
|
||||
project::Event::WorktreeUpdatedEntries(_, items) => {
|
||||
if items.iter().any(|(path, _, _)| {
|
||||
RULES_FILE_NAMES
|
||||
.iter()
|
||||
.any(|name| path.as_ref() == Path::new(name))
|
||||
}) {
|
||||
self.reload_system_prompt(cx).detach();
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reload_system_prompt(&self, cx: &mut Context<Self>) -> Task<()> {
|
||||
let project = self.project.read(cx);
|
||||
let tasks = project
|
||||
.visible_worktrees(cx)
|
||||
.map(|worktree| {
|
||||
Self::load_worktree_info_for_system_prompt(
|
||||
project.fs().clone(),
|
||||
worktree.read(cx),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
cx.spawn(async move |this, cx| {
|
||||
let results = futures::future::join_all(tasks).await;
|
||||
let worktrees = results
|
||||
.into_iter()
|
||||
.map(|(worktree, rules_error)| {
|
||||
if let Some(rules_error) = rules_error {
|
||||
this.update(cx, |_, cx| cx.emit(rules_error)).ok();
|
||||
}
|
||||
worktree
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
this.update(cx, |this, _cx| {
|
||||
*this.project_context.0.borrow_mut() = Some(ProjectContext::new(worktrees));
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
}
|
||||
|
||||
fn load_worktree_info_for_system_prompt(
|
||||
fs: Arc<dyn Fs>,
|
||||
worktree: &Worktree,
|
||||
cx: &App,
|
||||
) -> Task<(WorktreeContext, Option<RulesLoadingError>)> {
|
||||
let root_name = worktree.root_name().into();
|
||||
let abs_path = worktree.abs_path();
|
||||
|
||||
let rules_task = Self::load_worktree_rules_file(fs, worktree, cx);
|
||||
let Some(rules_task) = rules_task else {
|
||||
return Task::ready((
|
||||
WorktreeContext {
|
||||
root_name,
|
||||
abs_path,
|
||||
rules_file: None,
|
||||
},
|
||||
None,
|
||||
));
|
||||
};
|
||||
|
||||
cx.spawn(async move |_| {
|
||||
let (rules_file, rules_file_error) = match rules_task.await {
|
||||
Ok(rules_file) => (Some(rules_file), None),
|
||||
Err(err) => (
|
||||
None,
|
||||
Some(RulesLoadingError {
|
||||
message: format!("{err}").into(),
|
||||
}),
|
||||
),
|
||||
let this = Self {
|
||||
project,
|
||||
tools,
|
||||
prompt_builder,
|
||||
context_server_manager,
|
||||
context_server_tool_ids: HashMap::default(),
|
||||
threads: Vec::new(),
|
||||
_subscriptions: vec![settings_subscription],
|
||||
};
|
||||
let worktree_info = WorktreeContext {
|
||||
root_name,
|
||||
abs_path,
|
||||
rules_file,
|
||||
};
|
||||
(worktree_info, rules_file_error)
|
||||
})
|
||||
}
|
||||
this.load_default_profile(cx);
|
||||
this.register_context_server_handlers(cx);
|
||||
this.reload(cx).detach_and_log_err(cx);
|
||||
|
||||
fn load_worktree_rules_file(
|
||||
fs: Arc<dyn Fs>,
|
||||
worktree: &Worktree,
|
||||
cx: &App,
|
||||
) -> Option<Task<Result<RulesFileContext>>> {
|
||||
let selected_rules_file = RULES_FILE_NAMES
|
||||
.into_iter()
|
||||
.filter_map(|name| {
|
||||
worktree
|
||||
.entry_for_path(name)
|
||||
.filter(|entry| entry.is_file())
|
||||
.map(|entry| (entry.path.clone(), worktree.absolutize(&entry.path)))
|
||||
})
|
||||
.next();
|
||||
this
|
||||
});
|
||||
|
||||
// Note that Cline supports `.clinerules` being a directory, but that is not currently
|
||||
// supported. This doesn't seem to occur often in GitHub repositories.
|
||||
selected_rules_file.map(|(path_in_worktree, abs_path)| {
|
||||
let fs = fs.clone();
|
||||
cx.background_spawn(async move {
|
||||
let abs_path = abs_path?;
|
||||
let text = fs.load(&abs_path).await.with_context(|| {
|
||||
format!("Failed to load assistant rules file {:?}", abs_path)
|
||||
})?;
|
||||
anyhow::Ok(RulesFileContext {
|
||||
path_in_worktree,
|
||||
abs_path: abs_path.into(),
|
||||
text: text.trim().to_string(),
|
||||
})
|
||||
})
|
||||
})
|
||||
Ok(this)
|
||||
}
|
||||
|
||||
pub fn context_server_manager(&self) -> Entity<ContextServerManager> {
|
||||
self.context_server_manager.clone()
|
||||
}
|
||||
|
||||
pub fn tools(&self) -> Entity<ToolWorkingSet> {
|
||||
pub fn tools(&self) -> Arc<ToolWorkingSet> {
|
||||
self.tools.clone()
|
||||
}
|
||||
|
||||
@@ -273,7 +107,6 @@ impl ThreadStore {
|
||||
self.project.clone(),
|
||||
self.tools.clone(),
|
||||
self.prompt_builder.clone(),
|
||||
self.project_context.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -301,12 +134,21 @@ impl ThreadStore {
|
||||
this.project.clone(),
|
||||
this.tools.clone(),
|
||||
this.prompt_builder.clone(),
|
||||
this.project_context.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
})?;
|
||||
|
||||
let (system_prompt_context, load_error) = thread
|
||||
.update(cx, |thread, cx| thread.load_system_prompt_context(cx))?
|
||||
.await;
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.set_system_prompt_context(system_prompt_context);
|
||||
if let Some(load_error) = load_error {
|
||||
cx.emit(ThreadEvent::ShowError(load_error));
|
||||
}
|
||||
})?;
|
||||
|
||||
Ok(thread)
|
||||
})
|
||||
}
|
||||
@@ -355,60 +197,52 @@ impl ThreadStore {
|
||||
})
|
||||
}
|
||||
|
||||
fn load_default_profile(&self, cx: &mut Context<Self>) {
|
||||
fn load_default_profile(&self, cx: &Context<Self>) {
|
||||
let assistant_settings = AssistantSettings::get_global(cx);
|
||||
|
||||
self.load_profile_by_id(assistant_settings.default_profile.clone(), cx);
|
||||
self.load_profile_by_id(&assistant_settings.default_profile, cx);
|
||||
}
|
||||
|
||||
pub fn load_profile_by_id(&self, profile_id: AgentProfileId, cx: &mut Context<Self>) {
|
||||
pub fn load_profile_by_id(&self, profile_id: &AgentProfileId, cx: &Context<Self>) {
|
||||
let assistant_settings = AssistantSettings::get_global(cx);
|
||||
|
||||
if let Some(profile) = assistant_settings.profiles.get(&profile_id) {
|
||||
self.load_profile(profile.clone(), cx);
|
||||
if let Some(profile) = assistant_settings.profiles.get(profile_id) {
|
||||
self.load_profile(profile, cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load_profile(&self, profile: AgentProfile, cx: &mut Context<Self>) {
|
||||
self.tools.update(cx, |tools, cx| {
|
||||
tools.disable_all_tools(cx);
|
||||
tools.enable(
|
||||
ToolSource::Native,
|
||||
&profile
|
||||
.tools
|
||||
.iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then(|| tool.clone()))
|
||||
.collect::<Vec<_>>(),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
pub fn load_profile(&self, profile: &AgentProfile, cx: &Context<Self>) {
|
||||
self.tools.disable_all_tools();
|
||||
self.tools.enable(
|
||||
ToolSource::Native,
|
||||
&profile
|
||||
.tools
|
||||
.iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then(|| tool.clone()))
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
|
||||
if profile.enable_all_context_servers {
|
||||
for context_server in self.context_server_manager.read(cx).all_servers() {
|
||||
self.tools.update(cx, |tools, cx| {
|
||||
tools.enable_source(
|
||||
ToolSource::ContextServer {
|
||||
id: context_server.id().into(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
self.tools.enable_source(
|
||||
ToolSource::ContextServer {
|
||||
id: context_server.id().into(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
for (context_server_id, preset) in &profile.context_servers {
|
||||
self.tools.update(cx, |tools, cx| {
|
||||
tools.enable(
|
||||
ToolSource::ContextServer {
|
||||
id: context_server_id.clone().into(),
|
||||
},
|
||||
&preset
|
||||
.tools
|
||||
.iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then(|| tool.clone()))
|
||||
.collect::<Vec<_>>(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
self.tools.enable(
|
||||
ToolSource::ContextServer {
|
||||
id: context_server_id.clone().into(),
|
||||
},
|
||||
&preset
|
||||
.tools
|
||||
.iter()
|
||||
.filter_map(|(tool, enabled)| enabled.then(|| tool.clone()))
|
||||
.collect::<Vec<_>>(),
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -442,36 +276,29 @@ impl ThreadStore {
|
||||
|
||||
if protocol.capable(context_server::protocol::ServerCapability::Tools) {
|
||||
if let Some(tools) = protocol.list_tools().await.log_err() {
|
||||
let tool_ids = tool_working_set
|
||||
.update(cx, |tool_working_set, _| {
|
||||
tools
|
||||
.tools
|
||||
.into_iter()
|
||||
.map(|tool| {
|
||||
log::info!(
|
||||
"registering context server tool: {:?}",
|
||||
tool.name
|
||||
);
|
||||
tool_working_set.insert(Arc::new(
|
||||
ContextServerTool::new(
|
||||
context_server_manager.clone(),
|
||||
server.id(),
|
||||
tool,
|
||||
),
|
||||
))
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
let tool_ids = tools
|
||||
.tools
|
||||
.into_iter()
|
||||
.map(|tool| {
|
||||
log::info!(
|
||||
"registering context server tool: {:?}",
|
||||
tool.name
|
||||
);
|
||||
tool_working_set.insert(Arc::new(
|
||||
ContextServerTool::new(
|
||||
context_server_manager.clone(),
|
||||
server.id(),
|
||||
tool,
|
||||
),
|
||||
))
|
||||
})
|
||||
.log_err();
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if let Some(tool_ids) = tool_ids {
|
||||
this.update(cx, |this, cx| {
|
||||
this.context_server_tool_ids
|
||||
.insert(server_id, tool_ids);
|
||||
this.load_default_profile(cx);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
this.update(cx, |this, cx| {
|
||||
this.context_server_tool_ids.insert(server_id, tool_ids);
|
||||
this.load_default_profile(cx);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -481,9 +308,7 @@ impl ThreadStore {
|
||||
}
|
||||
context_server::manager::Event::ServerStopped { server_id } => {
|
||||
if let Some(tool_ids) = self.context_server_tool_ids.remove(server_id) {
|
||||
tool_working_set.update(cx, |tool_working_set, _| {
|
||||
tool_working_set.remove(&tool_ids);
|
||||
});
|
||||
tool_working_set.remove(&tool_ids);
|
||||
self.load_default_profile(cx);
|
||||
}
|
||||
}
|
||||
@@ -510,8 +335,6 @@ pub struct SerializedThread {
|
||||
pub cumulative_token_usage: TokenUsage,
|
||||
#[serde(default)]
|
||||
pub detailed_summary_state: DetailedSummaryState,
|
||||
#[serde(default)]
|
||||
pub exceeded_window_error: Option<ExceededWindowError>,
|
||||
}
|
||||
|
||||
impl SerializedThread {
|
||||
@@ -598,7 +421,6 @@ impl LegacySerializedThread {
|
||||
initial_project_snapshot: self.initial_project_snapshot,
|
||||
cumulative_token_usage: TokenUsage::default(),
|
||||
detailed_summary_state: DetailedSummaryState::default(),
|
||||
exceeded_window_error: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -669,7 +491,7 @@ impl ThreadsDatabase {
|
||||
let database_future = executor
|
||||
.spawn({
|
||||
let executor = executor.clone();
|
||||
let database_path = paths::data_dir().join("threads/threads-db.1.mdb");
|
||||
let database_path = paths::support_dir().join("threads/threads-db.1.mdb");
|
||||
async move { ThreadsDatabase::new(database_path, executor) }
|
||||
})
|
||||
.then(|result| future::ready(result.map(Arc::new).map_err(Arc::new)))
|
||||
|
||||
@@ -1,89 +0,0 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use assistant_tool::{Tool, ToolWorkingSet, ToolWorkingSetEvent};
|
||||
use collections::HashMap;
|
||||
use gpui::{App, Context, Entity, IntoElement, Render, Subscription, Window};
|
||||
use language_model::{LanguageModel, LanguageModelToolSchemaFormat};
|
||||
use ui::prelude::*;
|
||||
|
||||
pub struct IncompatibleToolsState {
|
||||
cache: HashMap<LanguageModelToolSchemaFormat, Vec<Arc<dyn Tool>>>,
|
||||
tool_working_set: Entity<ToolWorkingSet>,
|
||||
_tool_working_set_subscription: Subscription,
|
||||
}
|
||||
|
||||
impl IncompatibleToolsState {
|
||||
pub fn new(tool_working_set: Entity<ToolWorkingSet>, cx: &mut Context<Self>) -> Self {
|
||||
let _tool_working_set_subscription =
|
||||
cx.subscribe(&tool_working_set, |this, _, event, _| match event {
|
||||
ToolWorkingSetEvent::EnabledToolsChanged => {
|
||||
this.cache.clear();
|
||||
}
|
||||
});
|
||||
|
||||
Self {
|
||||
cache: HashMap::default(),
|
||||
tool_working_set,
|
||||
_tool_working_set_subscription,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn incompatible_tools(
|
||||
&mut self,
|
||||
model: &Arc<dyn LanguageModel>,
|
||||
cx: &App,
|
||||
) -> &[Arc<dyn Tool>] {
|
||||
self.cache
|
||||
.entry(model.tool_input_format())
|
||||
.or_insert_with(|| {
|
||||
self.tool_working_set
|
||||
.read(cx)
|
||||
.enabled_tools(cx)
|
||||
.iter()
|
||||
.filter(|tool| tool.input_schema(model.tool_input_format()).is_err())
|
||||
.cloned()
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct IncompatibleToolsTooltip {
|
||||
pub incompatible_tools: Vec<Arc<dyn Tool>>,
|
||||
}
|
||||
|
||||
impl Render for IncompatibleToolsTooltip {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
ui::tooltip_container(window, cx, |container, _, cx| {
|
||||
container
|
||||
.w_72()
|
||||
.child(Label::new("Incompatible Tools").size(LabelSize::Small))
|
||||
.child(
|
||||
Label::new(
|
||||
"This model is incompatible with the following tools from your MCPs:",
|
||||
)
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(
|
||||
v_flex()
|
||||
.my_1p5()
|
||||
.py_0p5()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
.children(
|
||||
self.incompatible_tools
|
||||
.iter()
|
||||
.map(|tool| Label::new(tool.name()).size(LabelSize::Small).buffer_font(cx)),
|
||||
),
|
||||
)
|
||||
.child(Label::new("What To Do Instead").size(LabelSize::Small))
|
||||
.child(
|
||||
Label::new(
|
||||
"Every other tool continues to work with this model, but to specifically use those, switch to another model.",
|
||||
)
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -5,13 +5,12 @@ use assistant_tool::{Tool, ToolWorkingSet};
|
||||
use collections::HashMap;
|
||||
use futures::FutureExt as _;
|
||||
use futures::future::Shared;
|
||||
use gpui::{App, Entity, SharedString, Task};
|
||||
use gpui::{App, SharedString, Task};
|
||||
use language_model::{
|
||||
LanguageModelRegistry, LanguageModelRequestMessage, LanguageModelToolResult,
|
||||
LanguageModelToolUse, LanguageModelToolUseId, MessageContent, Role,
|
||||
LanguageModelRequestMessage, LanguageModelToolResult, LanguageModelToolUse,
|
||||
LanguageModelToolUseId, MessageContent, Role,
|
||||
};
|
||||
use ui::IconName;
|
||||
use util::truncate_lines_to_byte_limit;
|
||||
|
||||
use crate::thread::MessageId;
|
||||
use crate::thread_store::SerializedMessage;
|
||||
@@ -49,7 +48,7 @@ impl ToolUseStatus {
|
||||
}
|
||||
|
||||
pub struct ToolUseState {
|
||||
tools: Entity<ToolWorkingSet>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
tool_uses_by_assistant_message: HashMap<MessageId, Vec<LanguageModelToolUse>>,
|
||||
tool_uses_by_user_message: HashMap<MessageId, Vec<LanguageModelToolUseId>>,
|
||||
tool_results: HashMap<LanguageModelToolUseId, LanguageModelToolResult>,
|
||||
@@ -59,7 +58,7 @@ pub struct ToolUseState {
|
||||
pub const USING_TOOL_MARKER: &str = "<using_tool>";
|
||||
|
||||
impl ToolUseState {
|
||||
pub fn new(tools: Entity<ToolWorkingSet>) -> Self {
|
||||
pub fn new(tools: Arc<ToolWorkingSet>) -> Self {
|
||||
Self {
|
||||
tools,
|
||||
tool_uses_by_assistant_message: HashMap::default(),
|
||||
@@ -73,7 +72,7 @@ impl ToolUseState {
|
||||
///
|
||||
/// Accepts a function to filter the tools that should be used to populate the state.
|
||||
pub fn from_serialized_messages(
|
||||
tools: Entity<ToolWorkingSet>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
messages: &[SerializedMessage],
|
||||
mut filter_by_tool_name: impl FnMut(&str) -> bool,
|
||||
) -> Self {
|
||||
@@ -199,12 +198,12 @@ impl ToolUseState {
|
||||
}
|
||||
})();
|
||||
|
||||
let (icon, needs_confirmation) =
|
||||
if let Some(tool) = self.tools.read(cx).tool(&tool_use.name, cx) {
|
||||
(tool.icon(), tool.needs_confirmation(&tool_use.input, cx))
|
||||
} else {
|
||||
(IconName::Cog, false)
|
||||
};
|
||||
let (icon, needs_confirmation) = if let Some(tool) = self.tools.tool(&tool_use.name, cx)
|
||||
{
|
||||
(tool.icon(), tool.needs_confirmation())
|
||||
} else {
|
||||
(IconName::Cog, false)
|
||||
};
|
||||
|
||||
tool_uses.push(ToolUse {
|
||||
id: tool_use.id.clone(),
|
||||
@@ -226,7 +225,7 @@ impl ToolUseState {
|
||||
input: &serde_json::Value,
|
||||
cx: &App,
|
||||
) -> SharedString {
|
||||
if let Some(tool) = self.tools.read(cx).tool(tool_name, cx) {
|
||||
if let Some(tool) = self.tools.tool(tool_name, cx) {
|
||||
tool.ui_text(input).into()
|
||||
} else {
|
||||
format!("Unknown tool {tool_name:?}").into()
|
||||
@@ -332,34 +331,9 @@ impl ToolUseState {
|
||||
tool_use_id: LanguageModelToolUseId,
|
||||
tool_name: Arc<str>,
|
||||
output: Result<String>,
|
||||
cx: &App,
|
||||
) -> Option<PendingToolUse> {
|
||||
telemetry::event!("Agent Tool Finished", tool_name, success = output.is_ok());
|
||||
|
||||
match output {
|
||||
Ok(tool_result) => {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
|
||||
const BYTES_PER_TOKEN_ESTIMATE: usize = 3;
|
||||
|
||||
// Protect from clearly large output
|
||||
let tool_output_limit = model_registry
|
||||
.default_model()
|
||||
.map(|model| model.model.max_token_count() * BYTES_PER_TOKEN_ESTIMATE)
|
||||
.unwrap_or(usize::MAX);
|
||||
|
||||
let tool_result = if tool_result.len() <= tool_output_limit {
|
||||
tool_result
|
||||
} else {
|
||||
let truncated = truncate_lines_to_byte_limit(&tool_result, tool_output_limit);
|
||||
|
||||
format!(
|
||||
"Tool result too long. The first {} bytes:\n\n{}",
|
||||
truncated.len(),
|
||||
truncated
|
||||
)
|
||||
};
|
||||
|
||||
self.tool_results.insert(
|
||||
tool_use_id.clone(),
|
||||
LanguageModelToolResult {
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
mod agent_notification;
|
||||
mod context_pill;
|
||||
mod user_spending;
|
||||
|
||||
pub use agent_notification::*;
|
||||
pub use context_pill::*;
|
||||
// pub use user_spending::*;
|
||||
|
||||
@@ -280,10 +280,9 @@ impl AddedContext {
|
||||
}
|
||||
|
||||
AssistantContext::Directory(directory_context) => {
|
||||
let full_path = directory_context
|
||||
.worktree
|
||||
.read(cx)
|
||||
.full_path(&directory_context.path);
|
||||
// TODO: handle worktree disambiguation. Maybe by storing an `Arc<dyn File>` to also
|
||||
// handle renames?
|
||||
let full_path = &directory_context.project_path.path;
|
||||
let full_path_string: SharedString =
|
||||
full_path.to_string_lossy().into_owned().into();
|
||||
let name = full_path
|
||||
|
||||
@@ -1,186 +0,0 @@
|
||||
use gpui::{Entity, Render};
|
||||
use ui::{ProgressBar, prelude::*};
|
||||
|
||||
#[derive(RegisterComponent)]
|
||||
pub struct UserSpending {
|
||||
free_tier_current: u32,
|
||||
free_tier_cap: u32,
|
||||
over_tier_current: u32,
|
||||
over_tier_cap: u32,
|
||||
free_tier_progress: Entity<ProgressBar>,
|
||||
over_tier_progress: Entity<ProgressBar>,
|
||||
}
|
||||
|
||||
impl UserSpending {
|
||||
pub fn new(
|
||||
free_tier_current: u32,
|
||||
free_tier_cap: u32,
|
||||
over_tier_current: u32,
|
||||
over_tier_cap: u32,
|
||||
cx: &mut App,
|
||||
) -> Self {
|
||||
let free_tier_capped = free_tier_current == free_tier_cap;
|
||||
let free_tier_near_capped =
|
||||
free_tier_current as f32 / 100.0 >= free_tier_cap as f32 / 100.0 * 0.9;
|
||||
let over_tier_capped = over_tier_current == over_tier_cap;
|
||||
let over_tier_near_capped =
|
||||
over_tier_current as f32 / 100.0 >= over_tier_cap as f32 / 100.0 * 0.9;
|
||||
|
||||
let free_tier_progress = cx.new(|cx| {
|
||||
ProgressBar::new(
|
||||
"free_tier",
|
||||
free_tier_current as f32,
|
||||
free_tier_cap as f32,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let over_tier_progress = cx.new(|cx| {
|
||||
ProgressBar::new(
|
||||
"over_tier",
|
||||
over_tier_current as f32,
|
||||
over_tier_cap as f32,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
if free_tier_capped {
|
||||
free_tier_progress.update(cx, |progress_bar, cx| {
|
||||
progress_bar.fg_color(cx.theme().status().error);
|
||||
});
|
||||
} else if free_tier_near_capped {
|
||||
free_tier_progress.update(cx, |progress_bar, cx| {
|
||||
progress_bar.fg_color(cx.theme().status().warning);
|
||||
});
|
||||
}
|
||||
|
||||
if over_tier_capped {
|
||||
over_tier_progress.update(cx, |progress_bar, cx| {
|
||||
progress_bar.fg_color(cx.theme().status().error);
|
||||
});
|
||||
} else if over_tier_near_capped {
|
||||
over_tier_progress.update(cx, |progress_bar, cx| {
|
||||
progress_bar.fg_color(cx.theme().status().warning);
|
||||
});
|
||||
}
|
||||
|
||||
Self {
|
||||
free_tier_current,
|
||||
free_tier_cap,
|
||||
over_tier_current,
|
||||
over_tier_cap,
|
||||
free_tier_progress,
|
||||
over_tier_progress,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for UserSpending {
|
||||
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let formatted_free_tier = format!(
|
||||
"${} / ${}",
|
||||
self.free_tier_current as f32 / 100.0,
|
||||
self.free_tier_cap as f32 / 100.0
|
||||
);
|
||||
let formatted_over_tier = format!(
|
||||
"${} / ${}",
|
||||
self.over_tier_current as f32 / 100.0,
|
||||
self.over_tier_cap as f32 / 100.0
|
||||
);
|
||||
|
||||
v_group()
|
||||
.elevation_2(cx)
|
||||
.py_1p5()
|
||||
.px_2p5()
|
||||
.w(px(360.))
|
||||
.child(
|
||||
v_flex()
|
||||
.child(
|
||||
v_flex()
|
||||
.p_1p5()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_between()
|
||||
.child(Label::new("Free Tier Usage").size(LabelSize::Small))
|
||||
.child(
|
||||
Label::new(formatted_free_tier)
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
.child(self.free_tier_progress.clone()),
|
||||
)
|
||||
.child(
|
||||
v_flex()
|
||||
.p_1p5()
|
||||
.gap_0p5()
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_between()
|
||||
.child(Label::new("Current Spending").size(LabelSize::Small))
|
||||
.child(
|
||||
Label::new(formatted_over_tier)
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
.child(self.over_tier_progress.clone()),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl Component for UserSpending {
|
||||
fn scope() -> ComponentScope {
|
||||
ComponentScope::None
|
||||
}
|
||||
|
||||
fn preview(_window: &mut Window, cx: &mut App) -> Option<AnyElement> {
|
||||
let new_user = cx.new(|cx| UserSpending::new(0, 2000, 0, 2000, cx));
|
||||
let free_capped = cx.new(|cx| UserSpending::new(2000, 2000, 0, 2000, cx));
|
||||
let free_near_capped = cx.new(|cx| UserSpending::new(1800, 2000, 0, 2000, cx));
|
||||
let over_near_capped = cx.new(|cx| UserSpending::new(2000, 2000, 1800, 2000, cx));
|
||||
let over_capped = cx.new(|cx| UserSpending::new(1000, 2000, 2000, 2000, cx));
|
||||
|
||||
Some(
|
||||
v_flex()
|
||||
.gap_6()
|
||||
.p_4()
|
||||
.children(vec![example_group(vec![
|
||||
single_example(
|
||||
"New User",
|
||||
div().size_full().child(new_user.clone()).into_any_element(),
|
||||
),
|
||||
single_example(
|
||||
"Free Tier Capped",
|
||||
div()
|
||||
.size_full()
|
||||
.child(free_capped.clone())
|
||||
.into_any_element(),
|
||||
),
|
||||
single_example(
|
||||
"Free Tier Near Capped",
|
||||
div()
|
||||
.size_full()
|
||||
.child(free_near_capped.clone())
|
||||
.into_any_element(),
|
||||
),
|
||||
single_example(
|
||||
"Over Tier Near Capped",
|
||||
div()
|
||||
.size_full()
|
||||
.child(over_near_capped.clone())
|
||||
.into_any_element(),
|
||||
),
|
||||
single_example(
|
||||
"Over Tier Capped",
|
||||
div()
|
||||
.size_full()
|
||||
.child(over_capped.clone())
|
||||
.into_any_element(),
|
||||
),
|
||||
])])
|
||||
.into_any_element(),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -25,4 +25,5 @@ serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
strum.workspace = true
|
||||
thiserror.workspace = true
|
||||
util.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
@@ -10,6 +10,7 @@ use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use strum::{EnumIter, EnumString};
|
||||
use thiserror::Error;
|
||||
use util::ResultExt as _;
|
||||
|
||||
pub use supported_countries::*;
|
||||
|
||||
@@ -36,9 +37,9 @@ pub enum AnthropicModelMode {
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
|
||||
pub enum Model {
|
||||
#[default]
|
||||
#[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-latest")]
|
||||
Claude3_5Sonnet,
|
||||
#[default]
|
||||
#[serde(rename = "claude-3-7-sonnet", alias = "claude-3-7-sonnet-latest")]
|
||||
Claude3_7Sonnet,
|
||||
#[serde(
|
||||
@@ -320,68 +321,38 @@ pub async fn stream_completion(
|
||||
.map(|output| output.0)
|
||||
}
|
||||
|
||||
/// An individual rate limit.
|
||||
#[derive(Debug)]
|
||||
pub struct RateLimit {
|
||||
pub limit: usize,
|
||||
pub remaining: usize,
|
||||
pub reset: DateTime<Utc>,
|
||||
}
|
||||
|
||||
impl RateLimit {
|
||||
fn from_headers(resource: &str, headers: &HeaderMap<HeaderValue>) -> Result<Self> {
|
||||
let limit =
|
||||
get_header(&format!("anthropic-ratelimit-{resource}-limit"), headers)?.parse()?;
|
||||
let remaining = get_header(
|
||||
&format!("anthropic-ratelimit-{resource}-remaining"),
|
||||
headers,
|
||||
)?
|
||||
.parse()?;
|
||||
let reset = DateTime::parse_from_rfc3339(get_header(
|
||||
&format!("anthropic-ratelimit-{resource}-reset"),
|
||||
headers,
|
||||
)?)?
|
||||
.to_utc();
|
||||
|
||||
Ok(Self {
|
||||
limit,
|
||||
remaining,
|
||||
reset,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// <https://docs.anthropic.com/en/api/rate-limits#response-headers>
|
||||
#[derive(Debug)]
|
||||
pub struct RateLimitInfo {
|
||||
pub requests: Option<RateLimit>,
|
||||
pub tokens: Option<RateLimit>,
|
||||
pub input_tokens: Option<RateLimit>,
|
||||
pub output_tokens: Option<RateLimit>,
|
||||
pub requests_limit: usize,
|
||||
pub requests_remaining: usize,
|
||||
pub requests_reset: DateTime<Utc>,
|
||||
pub tokens_limit: usize,
|
||||
pub tokens_remaining: usize,
|
||||
pub tokens_reset: DateTime<Utc>,
|
||||
}
|
||||
|
||||
impl RateLimitInfo {
|
||||
fn from_headers(headers: &HeaderMap<HeaderValue>) -> Self {
|
||||
// Check if any rate limit headers exist
|
||||
let has_rate_limit_headers = headers
|
||||
.keys()
|
||||
.any(|k| k.as_str().starts_with("anthropic-ratelimit-"));
|
||||
fn from_headers(headers: &HeaderMap<HeaderValue>) -> Result<Self> {
|
||||
let tokens_limit = get_header("anthropic-ratelimit-tokens-limit", headers)?.parse()?;
|
||||
let requests_limit = get_header("anthropic-ratelimit-requests-limit", headers)?.parse()?;
|
||||
let tokens_remaining =
|
||||
get_header("anthropic-ratelimit-tokens-remaining", headers)?.parse()?;
|
||||
let requests_remaining =
|
||||
get_header("anthropic-ratelimit-requests-remaining", headers)?.parse()?;
|
||||
let requests_reset = get_header("anthropic-ratelimit-requests-reset", headers)?;
|
||||
let tokens_reset = get_header("anthropic-ratelimit-tokens-reset", headers)?;
|
||||
let requests_reset = DateTime::parse_from_rfc3339(requests_reset)?.to_utc();
|
||||
let tokens_reset = DateTime::parse_from_rfc3339(tokens_reset)?.to_utc();
|
||||
|
||||
if !has_rate_limit_headers {
|
||||
return Self {
|
||||
requests: None,
|
||||
tokens: None,
|
||||
input_tokens: None,
|
||||
output_tokens: None,
|
||||
};
|
||||
}
|
||||
|
||||
Self {
|
||||
requests: RateLimit::from_headers("requests", headers).ok(),
|
||||
tokens: RateLimit::from_headers("tokens", headers).ok(),
|
||||
input_tokens: RateLimit::from_headers("input-tokens", headers).ok(),
|
||||
output_tokens: RateLimit::from_headers("output-tokens", headers).ok(),
|
||||
}
|
||||
Ok(Self {
|
||||
requests_limit,
|
||||
tokens_limit,
|
||||
requests_remaining,
|
||||
tokens_remaining,
|
||||
requests_reset,
|
||||
tokens_reset,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -447,7 +418,7 @@ pub async fn stream_completion_with_rate_limit_info(
|
||||
}
|
||||
})
|
||||
.boxed();
|
||||
Ok((stream, Some(rate_limits)))
|
||||
Ok((stream, rate_limits.log_err()))
|
||||
} else {
|
||||
let mut body = Vec::new();
|
||||
response
|
||||
@@ -737,54 +708,4 @@ impl ApiError {
|
||||
pub fn is_rate_limit_error(&self) -> bool {
|
||||
matches!(self.error_type.as_str(), "rate_limit_error")
|
||||
}
|
||||
|
||||
pub fn match_window_exceeded(&self) -> Option<usize> {
|
||||
let Some(ApiErrorCode::InvalidRequestError) = self.code() else {
|
||||
return None;
|
||||
};
|
||||
|
||||
parse_prompt_too_long(&self.message)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_prompt_too_long(message: &str) -> Option<usize> {
|
||||
message
|
||||
.strip_prefix("prompt is too long: ")?
|
||||
.split_once(" tokens")?
|
||||
.0
|
||||
.parse::<usize>()
|
||||
.ok()
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_match_window_exceeded() {
|
||||
let error = ApiError {
|
||||
error_type: "invalid_request_error".to_string(),
|
||||
message: "prompt is too long: 220000 tokens > 200000".to_string(),
|
||||
};
|
||||
assert_eq!(error.match_window_exceeded(), Some(220_000));
|
||||
|
||||
let error = ApiError {
|
||||
error_type: "invalid_request_error".to_string(),
|
||||
message: "prompt is too long: 1234953 tokens".to_string(),
|
||||
};
|
||||
assert_eq!(error.match_window_exceeded(), Some(1234953));
|
||||
|
||||
let error = ApiError {
|
||||
error_type: "invalid_request_error".to_string(),
|
||||
message: "not a prompt length error".to_string(),
|
||||
};
|
||||
assert_eq!(error.match_window_exceeded(), None);
|
||||
|
||||
let error = ApiError {
|
||||
error_type: "rate_limit_error".to_string(),
|
||||
message: "prompt is too long: 12345 tokens".to_string(),
|
||||
};
|
||||
assert_eq!(error.match_window_exceeded(), None);
|
||||
|
||||
let error = ApiError {
|
||||
error_type: "invalid_request_error".to_string(),
|
||||
message: "prompt is too long: invalid tokens".to_string(),
|
||||
};
|
||||
assert_eq!(error.match_window_exceeded(), None);
|
||||
}
|
||||
|
||||
@@ -57,7 +57,7 @@ use std::{
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use streaming_diff::{CharOperation, LineDiff, LineOperation, StreamingDiff};
|
||||
use telemetry_events::{AssistantEventData, AssistantKind, AssistantPhase};
|
||||
use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
|
||||
use terminal_view::terminal_panel::TerminalPanel;
|
||||
use text::{OffsetRangeExt, ToPoint as _};
|
||||
use theme::ThemeSettings;
|
||||
@@ -315,7 +315,7 @@ impl InlineAssistant {
|
||||
if let Some(ConfiguredModel { model, .. }) =
|
||||
LanguageModelRegistry::read_global(cx).default_model()
|
||||
{
|
||||
self.telemetry.report_assistant_event(AssistantEventData {
|
||||
self.telemetry.report_assistant_event(AssistantEvent {
|
||||
conversation_id: None,
|
||||
kind: AssistantKind::Inline,
|
||||
phase: AssistantPhase::Invoked,
|
||||
@@ -892,7 +892,7 @@ impl InlineAssistant {
|
||||
.map(|language| language.name())
|
||||
});
|
||||
report_assistant_event(
|
||||
AssistantEventData {
|
||||
AssistantEvent {
|
||||
conversation_id: None,
|
||||
kind: AssistantKind::Inline,
|
||||
message_id,
|
||||
@@ -3148,7 +3148,7 @@ impl CodegenAlternative {
|
||||
|
||||
let error_message = result.as_ref().err().map(|error| error.to_string());
|
||||
report_assistant_event(
|
||||
AssistantEventData {
|
||||
AssistantEvent {
|
||||
conversation_id: None,
|
||||
message_id,
|
||||
kind: AssistantKind::Inline,
|
||||
|
||||
@@ -27,7 +27,7 @@ use std::{
|
||||
sync::Arc,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use telemetry_events::{AssistantEventData, AssistantKind, AssistantPhase};
|
||||
use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
|
||||
use terminal::Terminal;
|
||||
use terminal_view::TerminalView;
|
||||
use theme::ThemeSettings;
|
||||
@@ -324,7 +324,7 @@ impl TerminalInlineAssistant {
|
||||
let codegen = assist.codegen.read(cx);
|
||||
let executor = cx.background_executor().clone();
|
||||
report_assistant_event(
|
||||
AssistantEventData {
|
||||
AssistantEvent {
|
||||
conversation_id: None,
|
||||
kind: AssistantKind::InlineTerminal,
|
||||
message_id: codegen.message_id.clone(),
|
||||
@@ -1183,7 +1183,7 @@ impl Codegen {
|
||||
|
||||
let error_message = result.as_ref().err().map(|error| error.to_string());
|
||||
report_assistant_event(
|
||||
AssistantEventData {
|
||||
AssistantEvent {
|
||||
conversation_id: None,
|
||||
kind: AssistantKind::InlineTerminal,
|
||||
message_id,
|
||||
|
||||
@@ -40,7 +40,7 @@ use std::{
|
||||
sync::Arc,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use telemetry_events::{AssistantEventData, AssistantKind, AssistantPhase};
|
||||
use telemetry_events::{AssistantEvent, AssistantKind, AssistantPhase};
|
||||
use text::{BufferSnapshot, ToPoint};
|
||||
use ui::IconName;
|
||||
use util::{ResultExt, TryFutureExt, post_inc};
|
||||
@@ -2498,7 +2498,7 @@ impl AssistantContext {
|
||||
.language()
|
||||
.map(|language| language.name());
|
||||
report_assistant_event(
|
||||
AssistantEventData {
|
||||
AssistantEvent {
|
||||
conversation_id: Some(this.id.0.clone()),
|
||||
kind: AssistantKind::Panel,
|
||||
phase: AssistantPhase::Response,
|
||||
|
||||
@@ -10,7 +10,7 @@ use collections::{BTreeSet, HashMap, HashSet, hash_map};
|
||||
use editor::{
|
||||
Anchor, Editor, EditorEvent, MenuInlineCompletionsPolicy, ProposedChangeLocation,
|
||||
ProposedChangesEditor, RowExt, ToOffset as _, ToPoint,
|
||||
actions::{MoveToEndOfLine, Newline, ShowCompletions},
|
||||
actions::{FoldAt, MoveToEndOfLine, Newline, ShowCompletions, UnfoldAt},
|
||||
display_map::{
|
||||
BlockContext, BlockId, BlockPlacement, BlockProperties, BlockStyle, Crease, CreaseMetadata,
|
||||
CustomBlockId, FoldId, RenderBlock, ToDisplayPoint,
|
||||
@@ -1053,7 +1053,7 @@ impl ContextEditor {
|
||||
let creases = editor.insert_creases(creases, cx);
|
||||
|
||||
for buffer_row in buffer_rows_to_fold.into_iter().rev() {
|
||||
editor.fold_at(buffer_row, window, cx);
|
||||
editor.fold_at(&FoldAt { buffer_row }, window, cx);
|
||||
}
|
||||
|
||||
creases
|
||||
@@ -1109,7 +1109,7 @@ impl ContextEditor {
|
||||
buffer_rows_to_fold.clear();
|
||||
}
|
||||
for buffer_row in buffer_rows_to_fold.into_iter().rev() {
|
||||
editor.fold_at(buffer_row, window, cx);
|
||||
editor.fold_at(&FoldAt { buffer_row }, window, cx);
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -1844,7 +1844,13 @@ impl ContextEditor {
|
||||
|_, _, _, _| Empty.into_any(),
|
||||
);
|
||||
editor.insert_creases(vec![crease], cx);
|
||||
editor.fold_at(start_row, window, cx);
|
||||
editor.fold_at(
|
||||
&FoldAt {
|
||||
buffer_row: start_row,
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -2036,7 +2042,7 @@ impl ContextEditor {
|
||||
cx,
|
||||
);
|
||||
for buffer_row in buffer_rows_to_fold.into_iter().rev() {
|
||||
editor.fold_at(buffer_row, window, cx);
|
||||
editor.fold_at(&FoldAt { buffer_row }, window, cx);
|
||||
}
|
||||
}
|
||||
});
|
||||
@@ -2787,7 +2793,7 @@ fn render_thought_process_fold_icon_button(
|
||||
let button = match status {
|
||||
ThoughtProcessStatus::Pending => button
|
||||
.child(
|
||||
Icon::new(IconName::LightBulb)
|
||||
Icon::new(IconName::Brain)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
@@ -2802,7 +2808,7 @@ fn render_thought_process_fold_icon_button(
|
||||
),
|
||||
ThoughtProcessStatus::Completed => button
|
||||
.style(ButtonStyle::Filled)
|
||||
.child(Icon::new(IconName::LightBulb).size(IconSize::Small))
|
||||
.child(Icon::new(IconName::Brain).size(IconSize::Small))
|
||||
.child(Label::new("Thought Process").single_line()),
|
||||
};
|
||||
|
||||
@@ -2814,7 +2820,7 @@ fn render_thought_process_fold_icon_button(
|
||||
.start
|
||||
.to_point(&editor.buffer().read(cx).read(cx));
|
||||
let buffer_row = MultiBufferRow(buffer_start.row);
|
||||
editor.unfold_at(buffer_row, window, cx);
|
||||
editor.unfold_at(&UnfoldAt { buffer_row }, window, cx);
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
@@ -2841,7 +2847,7 @@ fn render_fold_icon_button(
|
||||
.start
|
||||
.to_point(&editor.buffer().read(cx).read(cx));
|
||||
let buffer_row = MultiBufferRow(buffer_start.row);
|
||||
editor.unfold_at(buffer_row, window, cx);
|
||||
editor.unfold_at(&UnfoldAt { buffer_row }, window, cx);
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
@@ -2901,7 +2907,7 @@ fn quote_selection_fold_placeholder(title: String, editor: WeakEntity<Editor>) -
|
||||
.start
|
||||
.to_point(&editor.buffer().read(cx).read(cx));
|
||||
let buffer_row = MultiBufferRow(buffer_start.row);
|
||||
editor.unfold_at(buffer_row, window, cx);
|
||||
editor.unfold_at(&UnfoldAt { buffer_row }, window, cx);
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
|
||||
@@ -120,14 +120,13 @@ impl SlashCommandCompletionProvider {
|
||||
) as Arc<_>
|
||||
});
|
||||
Some(project::Completion {
|
||||
replace_range: name_range.clone(),
|
||||
old_range: name_range.clone(),
|
||||
documentation: Some(CompletionDocumentation::SingleLine(
|
||||
command.description().into(),
|
||||
)),
|
||||
new_text,
|
||||
label: command.label(cx),
|
||||
icon_path: None,
|
||||
insert_text_mode: None,
|
||||
confirm,
|
||||
source: CompletionSource::Custom,
|
||||
})
|
||||
@@ -219,7 +218,7 @@ impl SlashCommandCompletionProvider {
|
||||
}
|
||||
|
||||
project::Completion {
|
||||
replace_range: if new_argument.replace_previous_arguments {
|
||||
old_range: if new_argument.replace_previous_arguments {
|
||||
argument_range.clone()
|
||||
} else {
|
||||
last_argument_range.clone()
|
||||
@@ -229,7 +228,6 @@ impl SlashCommandCompletionProvider {
|
||||
new_text,
|
||||
documentation: None,
|
||||
confirm,
|
||||
insert_text_mode: None,
|
||||
source: CompletionSource::Custom,
|
||||
}
|
||||
})
|
||||
|
||||
@@ -1,50 +1,46 @@
|
||||
[package]
|
||||
name = "eval"
|
||||
name = "assistant_eval"
|
||||
version = "0.1.0"
|
||||
publish.workspace = true
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[[bin]]
|
||||
name = "assistant_eval"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
agent.workspace = true
|
||||
anyhow.workspace = true
|
||||
async-watch.workspace = true
|
||||
assistant_tool.workspace = true
|
||||
assistant_tools.workspace = true
|
||||
chrono.workspace = true
|
||||
clap.workspace = true
|
||||
client.workspace = true
|
||||
collections.workspace = true
|
||||
context_server.workspace = true
|
||||
dap.workspace = true
|
||||
env_logger.workspace = true
|
||||
extension.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
gpui_tokio.workspace = true
|
||||
handlebars.workspace = true
|
||||
language.workspace = true
|
||||
language_extension.workspace = true
|
||||
language_model.workspace = true
|
||||
language_models.workspace = true
|
||||
languages.workspace = true
|
||||
node_runtime.workspace = true
|
||||
paths.workspace = true
|
||||
project.workspace = true
|
||||
prompt_store.workspace = true
|
||||
release_channel.workspace = true
|
||||
reqwest_client.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde_json_lenient.workspace = true
|
||||
settings.workspace = true
|
||||
shellexpand.workspace = true
|
||||
toml.workspace = true
|
||||
unindent.workspace = true
|
||||
smol.workspace = true
|
||||
tempfile.workspace = true
|
||||
util.workspace = true
|
||||
walkdir.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
[[bin]]
|
||||
name = "eval"
|
||||
path = "src/eval.rs"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
68
crates/assistant_eval/README.md
Normal file
@@ -0,0 +1,68 @@
|
||||
# Tool Evals
|
||||
|
||||
A framework for evaluating and benchmarking the agent panel generations.
|
||||
|
||||
## Overview
|
||||
|
||||
Tool Evals provides a headless environment for running assistants evaluations on code repositories. It automates the process of:
|
||||
|
||||
1. Setting up test code and repositories
|
||||
2. Sending prompts to language models
|
||||
3. Allowing the assistant to use tools to modify code
|
||||
4. Collecting metrics on performance and tool usage
|
||||
5. Evaluating results against known good solutions
|
||||
|
||||
## How It Works
|
||||
|
||||
The system consists of several key components:
|
||||
|
||||
- **Eval**: Loads exercises from the zed-ace-framework repository, creates temporary repos, and executes evaluations
|
||||
- **HeadlessAssistant**: Provides a headless environment for running the AI assistant
|
||||
- **Judge**: Evaluates AI-generated solutions against reference implementations and assigns scores
|
||||
- **Templates**: Defines evaluation frameworks for different tasks (Project Creation, Code Modification, Conversational Guidance)
|
||||
|
||||
## Setup Requirements
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Rust and Cargo
|
||||
- Git
|
||||
- Python (for report generation)
|
||||
- Network access to clone repositories
|
||||
- Appropriate API keys for language models and git services (Anthropic, GitHub, etc.)
|
||||
|
||||
### Environment Variables
|
||||
|
||||
Ensure you have the required API keys set, either from a dev run of Zed or via these environment variables:
|
||||
- `ZED_ANTHROPIC_API_KEY` for Claude models
|
||||
- `ZED_GITHUB_API_KEY` for GitHub API (or similar)
|
||||
|
||||
## Usage
|
||||
|
||||
### Running Evaluations
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
cargo run -p assistant_eval -- --all
|
||||
|
||||
# Run only specific languages
|
||||
cargo run -p assistant_eval -- --all --languages python,rust
|
||||
|
||||
# Limit concurrent evaluations
|
||||
cargo run -p assistant_eval -- --all --concurrency 5
|
||||
|
||||
# Limit number of exercises per language
|
||||
cargo run -p assistant_eval -- --all --max-exercises-per-language 3
|
||||
```
|
||||
|
||||
### Evaluation Template Types
|
||||
|
||||
The system supports three types of evaluation templates:
|
||||
|
||||
1. **ProjectCreation**: Tests the model's ability to create new implementations from scratch
|
||||
2. **CodeModification**: Tests the model's ability to modify existing code to meet new requirements
|
||||
3. **ConversationalGuidance**: Tests the model's ability to provide guidance without writing code
|
||||
|
||||
### Support Repo
|
||||
|
||||
The [zed-industries/zed-ace-framework](https://github.com/zed-industries/zed-ace-framework) contains the analytics and reporting scripts.
|
||||
52
crates/assistant_eval/build.rs
Normal file
@@ -0,0 +1,52 @@
|
||||
// Copied from `crates/zed/build.rs`, with removal of code for including the zed icon on windows.
|
||||
|
||||
use std::process::Command;
|
||||
|
||||
fn main() {
|
||||
if cfg!(target_os = "macos") {
|
||||
println!("cargo:rustc-env=MACOSX_DEPLOYMENT_TARGET=10.15.7");
|
||||
|
||||
// Weakly link ReplayKit to ensure Zed can be used on macOS 10.15+.
|
||||
println!("cargo:rustc-link-arg=-Wl,-weak_framework,ReplayKit");
|
||||
|
||||
// Seems to be required to enable Swift concurrency
|
||||
println!("cargo:rustc-link-arg=-Wl,-rpath,/usr/lib/swift");
|
||||
|
||||
// Register exported Objective-C selectors, protocols, etc
|
||||
println!("cargo:rustc-link-arg=-Wl,-ObjC");
|
||||
}
|
||||
|
||||
// Populate git sha environment variable if git is available
|
||||
println!("cargo:rerun-if-changed=../../.git/logs/HEAD");
|
||||
println!(
|
||||
"cargo:rustc-env=TARGET={}",
|
||||
std::env::var("TARGET").unwrap()
|
||||
);
|
||||
if let Ok(output) = Command::new("git").args(["rev-parse", "HEAD"]).output() {
|
||||
if output.status.success() {
|
||||
let git_sha = String::from_utf8_lossy(&output.stdout);
|
||||
let git_sha = git_sha.trim();
|
||||
|
||||
println!("cargo:rustc-env=ZED_COMMIT_SHA={git_sha}");
|
||||
|
||||
if let Ok(build_profile) = std::env::var("PROFILE") {
|
||||
if build_profile == "release" {
|
||||
// This is currently the best way to make `cargo build ...`'s build script
|
||||
// to print something to stdout without extra verbosity.
|
||||
println!(
|
||||
"cargo:warning=Info: using '{git_sha}' hash for ZED_COMMIT_SHA env var"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
#[cfg(target_env = "msvc")]
|
||||
{
|
||||
// todo(windows): This is to avoid stack overflow. Remove it when solved.
|
||||
println!("cargo:rustc-link-arg=/stack:{}", 8 * 1024 * 1024);
|
||||
}
|
||||
}
|
||||
}
|
||||
548
crates/assistant_eval/src/eval.rs
Normal file
@@ -0,0 +1,548 @@
|
||||
use crate::git_commands::{run_git, setup_temp_repo};
|
||||
use crate::headless_assistant::{HeadlessAppState, HeadlessAssistant};
|
||||
use crate::{get_exercise_language, get_exercise_name, templates_eval::Template};
|
||||
use agent::RequestKind;
|
||||
use anyhow::{Result, anyhow};
|
||||
use collections::HashMap;
|
||||
use gpui::{App, Task};
|
||||
use language_model::{LanguageModel, TokenUsage};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
fs,
|
||||
io::Write,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
time::{Duration, SystemTime},
|
||||
};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct EvalResult {
|
||||
pub exercise_name: String,
|
||||
pub template_name: String,
|
||||
pub score: String,
|
||||
pub diff: String,
|
||||
pub assistant_response: String,
|
||||
pub elapsed_time_ms: u128,
|
||||
pub timestamp: u128,
|
||||
// Token usage fields
|
||||
pub input_tokens: usize,
|
||||
pub output_tokens: usize,
|
||||
pub total_tokens: usize,
|
||||
pub tool_use_counts: usize,
|
||||
pub judge_model_name: String, // Added field for judge model name
|
||||
}
|
||||
|
||||
pub struct EvalOutput {
|
||||
pub diff: String,
|
||||
pub last_message: String,
|
||||
pub elapsed_time: Duration,
|
||||
pub assistant_response_count: usize,
|
||||
pub tool_use_counts: HashMap<Arc<str>, u32>,
|
||||
pub token_usage: TokenUsage,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct EvalSetup {
|
||||
pub url: String,
|
||||
pub base_sha: String,
|
||||
}
|
||||
|
||||
pub struct Eval {
|
||||
pub repo_path: PathBuf,
|
||||
pub eval_setup: EvalSetup,
|
||||
pub user_prompt: String,
|
||||
}
|
||||
|
||||
impl Eval {
|
||||
// Keep this method for potential future use, but mark it as intentionally unused
|
||||
#[allow(dead_code)]
|
||||
pub async fn load(_name: String, path: PathBuf, repos_dir: &Path) -> Result<Self> {
|
||||
let prompt_path = path.join("prompt.txt");
|
||||
let user_prompt = smol::unblock(|| std::fs::read_to_string(prompt_path)).await?;
|
||||
let setup_path = path.join("setup.json");
|
||||
let setup_contents = smol::unblock(|| std::fs::read_to_string(setup_path)).await?;
|
||||
let eval_setup = serde_json_lenient::from_str_lenient::<EvalSetup>(&setup_contents)?;
|
||||
|
||||
// Move this internal function inside the load method since it's only used here
|
||||
fn repo_dir_name(url: &str) -> String {
|
||||
url.trim_start_matches("https://")
|
||||
.replace(|c: char| !c.is_alphanumeric(), "_")
|
||||
}
|
||||
|
||||
let repo_path = repos_dir.join(repo_dir_name(&eval_setup.url));
|
||||
|
||||
Ok(Eval {
|
||||
repo_path,
|
||||
eval_setup,
|
||||
user_prompt,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn run(
|
||||
self,
|
||||
app_state: Arc<HeadlessAppState>,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<EvalOutput>> {
|
||||
cx.spawn(async move |cx| {
|
||||
run_git(&self.repo_path, &["checkout", &self.eval_setup.base_sha]).await?;
|
||||
|
||||
let (assistant, done_rx) =
|
||||
cx.update(|cx| HeadlessAssistant::new(app_state.clone(), cx))??;
|
||||
|
||||
let _worktree = assistant
|
||||
.update(cx, |assistant, cx| {
|
||||
assistant.project.update(cx, |project, cx| {
|
||||
project.create_worktree(&self.repo_path, true, cx)
|
||||
})
|
||||
})?
|
||||
.await?;
|
||||
|
||||
let start_time = std::time::SystemTime::now();
|
||||
|
||||
let (system_prompt_context, load_error) = cx
|
||||
.update(|cx| {
|
||||
assistant
|
||||
.read(cx)
|
||||
.thread
|
||||
.read(cx)
|
||||
.load_system_prompt_context(cx)
|
||||
})?
|
||||
.await;
|
||||
|
||||
if let Some(load_error) = load_error {
|
||||
return Err(anyhow!("{:?}", load_error));
|
||||
};
|
||||
|
||||
assistant.update(cx, |assistant, cx| {
|
||||
assistant.thread.update(cx, |thread, cx| {
|
||||
let context = vec![];
|
||||
thread.insert_user_message(self.user_prompt.clone(), context, None, cx);
|
||||
thread.set_system_prompt_context(system_prompt_context);
|
||||
thread.send_to_model(model, RequestKind::Chat, cx);
|
||||
});
|
||||
})?;
|
||||
|
||||
done_rx.recv().await??;
|
||||
|
||||
// Add this section to check untracked files
|
||||
println!("Checking for untracked files:");
|
||||
let untracked = run_git(
|
||||
&self.repo_path,
|
||||
&["ls-files", "--others", "--exclude-standard"],
|
||||
)
|
||||
.await?;
|
||||
if untracked.is_empty() {
|
||||
println!("No untracked files found");
|
||||
} else {
|
||||
// Add all files to git so they appear in the diff
|
||||
println!("Adding untracked files to git");
|
||||
run_git(&self.repo_path, &["add", "."]).await?;
|
||||
}
|
||||
|
||||
// get git status
|
||||
let _status = run_git(&self.repo_path, &["status", "--short"]).await?;
|
||||
|
||||
let elapsed_time = start_time.elapsed()?;
|
||||
|
||||
// Get diff of staged changes (the files we just added)
|
||||
let staged_diff = run_git(&self.repo_path, &["diff", "--staged"]).await?;
|
||||
|
||||
// Get diff of unstaged changes
|
||||
let unstaged_diff = run_git(&self.repo_path, &["diff"]).await?;
|
||||
|
||||
// Combine both diffs
|
||||
let diff = if unstaged_diff.is_empty() {
|
||||
staged_diff
|
||||
} else if staged_diff.is_empty() {
|
||||
unstaged_diff
|
||||
} else {
|
||||
format!(
|
||||
"# Staged changes\n{}\n\n# Unstaged changes\n{}",
|
||||
staged_diff, unstaged_diff
|
||||
)
|
||||
};
|
||||
|
||||
assistant.update(cx, |assistant, cx| {
|
||||
let thread = assistant.thread.read(cx);
|
||||
let last_message = thread.messages().last().unwrap();
|
||||
if last_message.role != language_model::Role::Assistant {
|
||||
return Err(anyhow!("Last message is not from assistant"));
|
||||
}
|
||||
let assistant_response_count = thread
|
||||
.messages()
|
||||
.filter(|message| message.role == language_model::Role::Assistant)
|
||||
.count();
|
||||
Ok(EvalOutput {
|
||||
diff,
|
||||
last_message: last_message.to_string(),
|
||||
elapsed_time,
|
||||
assistant_response_count,
|
||||
tool_use_counts: assistant.tool_use_counts.clone(),
|
||||
token_usage: thread.cumulative_token_usage(),
|
||||
})
|
||||
})?
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl EvalOutput {
|
||||
// Keep this method for potential future use, but mark it as intentionally unused
|
||||
#[allow(dead_code)]
|
||||
pub fn save_to_directory(&self, output_dir: &Path, eval_output_value: String) -> Result<()> {
|
||||
// Create the output directory if it doesn't exist
|
||||
fs::create_dir_all(&output_dir)?;
|
||||
|
||||
// Save the diff to a file
|
||||
let diff_path = output_dir.join("diff.patch");
|
||||
let mut diff_file = fs::File::create(&diff_path)?;
|
||||
diff_file.write_all(self.diff.as_bytes())?;
|
||||
|
||||
// Save the last message to a file
|
||||
let message_path = output_dir.join("assistant_response.txt");
|
||||
let mut message_file = fs::File::create(&message_path)?;
|
||||
message_file.write_all(self.last_message.as_bytes())?;
|
||||
|
||||
// Current metrics for this run
|
||||
let current_metrics = serde_json::json!({
|
||||
"elapsed_time_ms": self.elapsed_time.as_millis(),
|
||||
"assistant_response_count": self.assistant_response_count,
|
||||
"tool_use_counts": self.tool_use_counts,
|
||||
"token_usage": self.token_usage,
|
||||
"eval_output_value": eval_output_value,
|
||||
});
|
||||
|
||||
// Get current timestamp in milliseconds
|
||||
let timestamp = std::time::SystemTime::now()
|
||||
.duration_since(std::time::UNIX_EPOCH)?
|
||||
.as_millis()
|
||||
.to_string();
|
||||
|
||||
// Path to metrics file
|
||||
let metrics_path = output_dir.join("metrics.json");
|
||||
|
||||
// Load existing metrics if the file exists, or create a new object
|
||||
let mut historical_metrics = if metrics_path.exists() {
|
||||
let metrics_content = fs::read_to_string(&metrics_path)?;
|
||||
serde_json::from_str::<serde_json::Value>(&metrics_content)
|
||||
.unwrap_or_else(|_| serde_json::json!({}))
|
||||
} else {
|
||||
serde_json::json!({})
|
||||
};
|
||||
|
||||
// Add new run with timestamp as key
|
||||
if let serde_json::Value::Object(ref mut map) = historical_metrics {
|
||||
map.insert(timestamp, current_metrics);
|
||||
}
|
||||
|
||||
// Write updated metrics back to file
|
||||
let metrics_json = serde_json::to_string_pretty(&historical_metrics)?;
|
||||
let mut metrics_file = fs::File::create(&metrics_path)?;
|
||||
metrics_file.write_all(metrics_json.as_bytes())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn read_instructions(exercise_path: &Path) -> Result<String> {
|
||||
let instructions_path = exercise_path.join(".docs").join("instructions.md");
|
||||
println!("Reading instructions from: {}", instructions_path.display());
|
||||
let instructions = smol::unblock(move || std::fs::read_to_string(&instructions_path)).await?;
|
||||
Ok(instructions)
|
||||
}
|
||||
|
||||
pub async fn read_example_solution(exercise_path: &Path, language: &str) -> Result<String> {
|
||||
// Map the language to the file extension
|
||||
let language_extension = match language {
|
||||
"python" => "py",
|
||||
"go" => "go",
|
||||
"rust" => "rs",
|
||||
"typescript" => "ts",
|
||||
"javascript" => "js",
|
||||
"ruby" => "rb",
|
||||
"php" => "php",
|
||||
"bash" => "sh",
|
||||
"multi" => "diff",
|
||||
"internal" => "diff",
|
||||
_ => return Err(anyhow!("Unsupported language: {}", language)),
|
||||
};
|
||||
let example_path = exercise_path
|
||||
.join(".meta")
|
||||
.join(format!("example.{}", language_extension));
|
||||
println!("Reading example solution from: {}", example_path.display());
|
||||
let example = smol::unblock(move || std::fs::read_to_string(&example_path)).await?;
|
||||
Ok(example)
|
||||
}
|
||||
|
||||
pub async fn save_eval_results(exercise_path: &Path, results: Vec<EvalResult>) -> Result<()> {
|
||||
let eval_dir = exercise_path.join("evaluation");
|
||||
fs::create_dir_all(&eval_dir)?;
|
||||
|
||||
let eval_file = eval_dir.join("evals.json");
|
||||
|
||||
println!("Saving evaluation results to: {}", eval_file.display());
|
||||
println!(
|
||||
"Results to save: {} evaluations for exercise path: {}",
|
||||
results.len(),
|
||||
exercise_path.display()
|
||||
);
|
||||
|
||||
// Check file existence before reading/writing
|
||||
if eval_file.exists() {
|
||||
println!("Existing evals.json file found, will update it");
|
||||
} else {
|
||||
println!("No existing evals.json file found, will create new one");
|
||||
}
|
||||
|
||||
// Structure to organize evaluations by test name and timestamp
|
||||
let mut eval_data: serde_json::Value = if eval_file.exists() {
|
||||
let content = fs::read_to_string(&eval_file)?;
|
||||
serde_json::from_str(&content).unwrap_or_else(|_| serde_json::json!({}))
|
||||
} else {
|
||||
serde_json::json!({})
|
||||
};
|
||||
|
||||
// Get current timestamp for this batch of results
|
||||
let timestamp = SystemTime::now()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)?
|
||||
.as_millis()
|
||||
.to_string();
|
||||
|
||||
// Group the new results by test name (exercise name)
|
||||
for result in results {
|
||||
let exercise_name = &result.exercise_name;
|
||||
let template_name = &result.template_name;
|
||||
|
||||
println!(
|
||||
"Adding result: exercise={}, template={}",
|
||||
exercise_name, template_name
|
||||
);
|
||||
|
||||
// Ensure the exercise entry exists
|
||||
if eval_data.get(exercise_name).is_none() {
|
||||
eval_data[exercise_name] = serde_json::json!({});
|
||||
}
|
||||
|
||||
// Ensure the timestamp entry exists as an object
|
||||
if eval_data[exercise_name].get(×tamp).is_none() {
|
||||
eval_data[exercise_name][×tamp] = serde_json::json!({});
|
||||
}
|
||||
|
||||
// Add this result under the timestamp with template name as key
|
||||
eval_data[exercise_name][×tamp][template_name] = serde_json::to_value(&result)?;
|
||||
}
|
||||
|
||||
// Write back to file with pretty formatting
|
||||
let json_content = serde_json::to_string_pretty(&eval_data)?;
|
||||
match fs::write(&eval_file, json_content) {
|
||||
Ok(_) => println!("✓ Successfully saved results to {}", eval_file.display()),
|
||||
Err(e) => println!("✗ Failed to write results file: {}", e),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn run_exercise_eval(
|
||||
exercise_path: PathBuf,
|
||||
template: Template,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
judge_model: Arc<dyn LanguageModel>,
|
||||
app_state: Arc<HeadlessAppState>,
|
||||
base_sha: String,
|
||||
_framework_path: PathBuf,
|
||||
cx: gpui::AsyncApp,
|
||||
) -> Result<EvalResult> {
|
||||
let exercise_name = get_exercise_name(&exercise_path);
|
||||
let language = get_exercise_language(&exercise_path)?;
|
||||
let mut instructions = read_instructions(&exercise_path).await?;
|
||||
instructions.push_str(&format!(
|
||||
"\n\nWhen writing the code for this prompt, use {} to achieve the goal.",
|
||||
language
|
||||
));
|
||||
let example_solution = read_example_solution(&exercise_path, &language).await?;
|
||||
|
||||
println!(
|
||||
"Running evaluation for exercise: {} with template: {}",
|
||||
exercise_name, template.name
|
||||
);
|
||||
|
||||
// Create temporary directory with exercise files
|
||||
let temp_dir = setup_temp_repo(&exercise_path, &base_sha).await?;
|
||||
let temp_path = temp_dir.path().to_path_buf();
|
||||
|
||||
if template.name == "ProjectCreation" {
|
||||
for entry in fs::read_dir(&temp_path)? {
|
||||
let entry = entry?;
|
||||
let path = entry.path();
|
||||
|
||||
// Skip directories that start with dot (like .docs, .meta, .git)
|
||||
if path.is_dir()
|
||||
&& path
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.map(|name| name.starts_with("."))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
// Delete regular files
|
||||
if path.is_file() {
|
||||
println!(" Deleting file: {}", path.display());
|
||||
fs::remove_file(path)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Commit the deletion so it shows up in the diff
|
||||
run_git(&temp_path, &["add", "."]).await?;
|
||||
run_git(
|
||||
&temp_path,
|
||||
&["commit", "-m", "Remove root files for clean slate"],
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let local_commit_sha = run_git(&temp_path, &["rev-parse", "HEAD"]).await?;
|
||||
|
||||
// Prepare prompt based on template
|
||||
let prompt = match template.name {
|
||||
"ProjectCreation" => format!(
|
||||
"I need to create a new implementation for this exercise. Please create all the necessary files in the best location.\n\n{}",
|
||||
instructions
|
||||
),
|
||||
"CodeModification" => format!(
|
||||
"I need help updating my code to meet these requirements. Please modify the appropriate files:\n\n{}",
|
||||
instructions
|
||||
),
|
||||
"ConversationalGuidance" => format!(
|
||||
"I'm trying to solve this coding exercise but I'm not sure where to start. Can you help me understand the requirements and guide me through the solution process without writing code for me?\n\n{}",
|
||||
instructions
|
||||
),
|
||||
_ => instructions.clone(),
|
||||
};
|
||||
|
||||
let start_time = SystemTime::now();
|
||||
|
||||
// Create a basic eval struct to work with the existing system
|
||||
let eval = Eval {
|
||||
repo_path: temp_path.clone(),
|
||||
eval_setup: EvalSetup {
|
||||
url: format!("file://{}", temp_path.display()),
|
||||
base_sha: local_commit_sha, // Use the local commit SHA instead of the framework base SHA
|
||||
},
|
||||
user_prompt: prompt,
|
||||
};
|
||||
|
||||
// Run the evaluation
|
||||
let eval_output = cx
|
||||
.update(|cx| eval.run(app_state.clone(), model.clone(), cx))?
|
||||
.await?;
|
||||
|
||||
// Get diff from git
|
||||
let diff = eval_output.diff.clone();
|
||||
|
||||
// For project creation template, we need to compare with reference implementation
|
||||
let judge_output = if template.name == "ProjectCreation" {
|
||||
let project_judge_prompt = template
|
||||
.content
|
||||
.replace(
|
||||
"<!-- ```requirements go here``` -->",
|
||||
&format!("```\n{}\n```", instructions),
|
||||
)
|
||||
.replace(
|
||||
"<!-- ```reference code goes here``` -->",
|
||||
&format!("```{}\n{}\n```", language, example_solution),
|
||||
)
|
||||
.replace(
|
||||
"<!-- ```git diff goes here``` -->",
|
||||
&format!("```\n{}\n```", diff),
|
||||
);
|
||||
|
||||
// Use the run_with_prompt method which we'll add to judge.rs
|
||||
let judge = crate::judge::Judge {
|
||||
original_diff: None,
|
||||
original_message: Some(project_judge_prompt),
|
||||
model: judge_model.clone(),
|
||||
};
|
||||
|
||||
cx.update(|cx| judge.run_with_prompt(cx))?.await?
|
||||
} else if template.name == "CodeModification" {
|
||||
// For CodeModification, we'll compare the example solution with the LLM-generated solution
|
||||
let code_judge_prompt = template
|
||||
.content
|
||||
.replace(
|
||||
"<!-- ```reference code goes here``` -->",
|
||||
&format!("```{}\n{}\n```", language, example_solution),
|
||||
)
|
||||
.replace(
|
||||
"<!-- ```git diff goes here``` -->",
|
||||
&format!("```\n{}\n```", diff),
|
||||
);
|
||||
|
||||
// Use the run_with_prompt method
|
||||
let judge = crate::judge::Judge {
|
||||
original_diff: None,
|
||||
original_message: Some(code_judge_prompt),
|
||||
model: judge_model.clone(),
|
||||
};
|
||||
|
||||
cx.update(|cx| judge.run_with_prompt(cx))?.await?
|
||||
} else {
|
||||
// Conversational template
|
||||
let conv_judge_prompt = template
|
||||
.content
|
||||
.replace(
|
||||
"<!-- ```query goes here``` -->",
|
||||
&format!("```\n{}\n```", instructions),
|
||||
)
|
||||
.replace(
|
||||
"<!-- ```transcript goes here``` -->",
|
||||
&format!("```\n{}\n```", eval_output.last_message),
|
||||
)
|
||||
.replace(
|
||||
"<!-- ```git diff goes here``` -->",
|
||||
&format!("```\n{}\n```", diff),
|
||||
);
|
||||
|
||||
// Use the run_with_prompt method for consistency
|
||||
let judge = crate::judge::Judge {
|
||||
original_diff: None,
|
||||
original_message: Some(conv_judge_prompt),
|
||||
model: judge_model.clone(),
|
||||
};
|
||||
|
||||
cx.update(|cx| judge.run_with_prompt(cx))?.await?
|
||||
};
|
||||
|
||||
let elapsed_time = start_time.elapsed()?;
|
||||
|
||||
// Calculate total tokens as the sum of input and output tokens
|
||||
let input_tokens = eval_output.token_usage.input_tokens;
|
||||
let output_tokens = eval_output.token_usage.output_tokens;
|
||||
let tool_use_counts = eval_output.tool_use_counts.values().sum::<u32>();
|
||||
let total_tokens = input_tokens + output_tokens;
|
||||
|
||||
// Get judge model name
|
||||
let judge_model_name = judge_model.id().0.to_string();
|
||||
|
||||
// Save results to evaluation directory
|
||||
let result = EvalResult {
|
||||
exercise_name: exercise_name.clone(),
|
||||
template_name: template.name.to_string(),
|
||||
score: judge_output.trim().to_string(),
|
||||
diff,
|
||||
assistant_response: eval_output.last_message.clone(),
|
||||
elapsed_time_ms: elapsed_time.as_millis(),
|
||||
timestamp: SystemTime::now()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)?
|
||||
.as_millis(),
|
||||
// Convert u32 token counts to usize
|
||||
input_tokens: input_tokens.try_into().unwrap(),
|
||||
output_tokens: output_tokens.try_into().unwrap(),
|
||||
total_tokens: total_tokens.try_into().unwrap(),
|
||||
tool_use_counts: tool_use_counts.try_into().unwrap(),
|
||||
judge_model_name, // Add judge model name to result
|
||||
};
|
||||
|
||||
Ok(result)
|
||||
}
|
||||
149
crates/assistant_eval/src/get_exercise.rs
Normal file
@@ -0,0 +1,149 @@
|
||||
use anyhow::{Result, anyhow};
|
||||
use std::{
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
pub fn get_exercise_name(exercise_path: &Path) -> String {
|
||||
exercise_path
|
||||
.file_name()
|
||||
.unwrap_or_default()
|
||||
.to_string_lossy()
|
||||
.to_string()
|
||||
}
|
||||
|
||||
pub fn get_exercise_language(exercise_path: &Path) -> Result<String> {
|
||||
// Extract the language from path (data/python/exercises/... => python)
|
||||
let parts: Vec<_> = exercise_path.components().collect();
|
||||
|
||||
for (i, part) in parts.iter().enumerate() {
|
||||
if i > 0 && part.as_os_str() == "eval_code" {
|
||||
if i + 1 < parts.len() {
|
||||
let language = parts[i + 1].as_os_str().to_string_lossy().to_string();
|
||||
return Ok(language);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err(anyhow!(
|
||||
"Could not determine language from path: {:?}",
|
||||
exercise_path
|
||||
))
|
||||
}
|
||||
|
||||
pub fn find_exercises(
|
||||
framework_path: &Path,
|
||||
languages: &[&str],
|
||||
max_per_language: Option<usize>,
|
||||
) -> Result<Vec<PathBuf>> {
|
||||
let mut all_exercises = Vec::new();
|
||||
|
||||
println!("Searching for exercises in languages: {:?}", languages);
|
||||
|
||||
for language in languages {
|
||||
let language_dir = framework_path
|
||||
.join("eval_code")
|
||||
.join(language)
|
||||
.join("exercises")
|
||||
.join("practice");
|
||||
|
||||
println!("Checking language directory: {:?}", language_dir);
|
||||
if !language_dir.exists() {
|
||||
println!("Warning: Language directory not found: {:?}", language_dir);
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut exercises = Vec::new();
|
||||
match fs::read_dir(&language_dir) {
|
||||
Ok(entries) => {
|
||||
for entry_result in entries {
|
||||
match entry_result {
|
||||
Ok(entry) => {
|
||||
let path = entry.path();
|
||||
|
||||
if path.is_dir() {
|
||||
// Special handling for "internal" directory
|
||||
if *language == "internal" {
|
||||
// Check for repo_info.json to validate it's an internal exercise
|
||||
let repo_info_path = path.join(".meta").join("repo_info.json");
|
||||
let instructions_path =
|
||||
path.join(".docs").join("instructions.md");
|
||||
|
||||
if repo_info_path.exists() && instructions_path.exists() {
|
||||
exercises.push(path);
|
||||
}
|
||||
} else {
|
||||
// Map the language to the file extension - original code
|
||||
let language_extension = match *language {
|
||||
"python" => "py",
|
||||
"go" => "go",
|
||||
"rust" => "rs",
|
||||
"typescript" => "ts",
|
||||
"javascript" => "js",
|
||||
"ruby" => "rb",
|
||||
"php" => "php",
|
||||
"bash" => "sh",
|
||||
"multi" => "diff",
|
||||
_ => continue, // Skip unsupported languages
|
||||
};
|
||||
|
||||
// Check if this is a valid exercise with instructions and example
|
||||
let instructions_path =
|
||||
path.join(".docs").join("instructions.md");
|
||||
let has_instructions = instructions_path.exists();
|
||||
let example_path = path
|
||||
.join(".meta")
|
||||
.join(format!("example.{}", language_extension));
|
||||
let has_example = example_path.exists();
|
||||
|
||||
if has_instructions && has_example {
|
||||
exercises.push(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(err) => println!("Error reading directory entry: {}", err),
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(err) => println!(
|
||||
"Error reading directory {}: {}",
|
||||
language_dir.display(),
|
||||
err
|
||||
),
|
||||
}
|
||||
|
||||
// Sort exercises by name for consistent selection
|
||||
exercises.sort_by(|a, b| {
|
||||
let a_name = a.file_name().unwrap_or_default().to_string_lossy();
|
||||
let b_name = b.file_name().unwrap_or_default().to_string_lossy();
|
||||
a_name.cmp(&b_name)
|
||||
});
|
||||
|
||||
// Apply the limit if specified
|
||||
if let Some(limit) = max_per_language {
|
||||
if exercises.len() > limit {
|
||||
println!(
|
||||
"Limiting {} exercises to {} for language {}",
|
||||
exercises.len(),
|
||||
limit,
|
||||
language
|
||||
);
|
||||
exercises.truncate(limit);
|
||||
}
|
||||
}
|
||||
|
||||
println!(
|
||||
"Found {} exercises for language {}: {:?}",
|
||||
exercises.len(),
|
||||
language,
|
||||
exercises
|
||||
.iter()
|
||||
.map(|p| p.file_name().unwrap_or_default().to_string_lossy())
|
||||
.collect::<Vec<_>>()
|
||||
);
|
||||
all_exercises.extend(exercises);
|
||||
}
|
||||
|
||||
Ok(all_exercises)
|
||||
}
|
||||
125
crates/assistant_eval/src/git_commands.rs
Normal file
@@ -0,0 +1,125 @@
|
||||
use anyhow::{Result, anyhow};
|
||||
use serde::Deserialize;
|
||||
use std::{fs, path::Path};
|
||||
use tempfile::TempDir;
|
||||
use util::command::new_smol_command;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct SetupConfig {
|
||||
#[serde(rename = "base.sha")]
|
||||
pub base_sha: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct RepoInfo {
|
||||
pub remote_url: String,
|
||||
pub head_sha: String,
|
||||
}
|
||||
|
||||
pub async fn run_git(repo_path: &Path, args: &[&str]) -> Result<String> {
|
||||
let output = new_smol_command("git")
|
||||
.current_dir(repo_path)
|
||||
.args(args)
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
if output.status.success() {
|
||||
Ok(String::from_utf8(output.stdout)?.trim().to_string())
|
||||
} else {
|
||||
Err(anyhow!(
|
||||
"Git command failed: {} with status: {}",
|
||||
args.join(" "),
|
||||
output.status
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn read_base_sha(framework_path: &Path) -> Result<String> {
|
||||
let setup_path = framework_path.join("setup.json");
|
||||
let setup_content = smol::unblock(move || std::fs::read_to_string(&setup_path)).await?;
|
||||
let setup_config: SetupConfig = serde_json_lenient::from_str_lenient(&setup_content)?;
|
||||
Ok(setup_config.base_sha)
|
||||
}
|
||||
|
||||
pub async fn read_repo_info(exercise_path: &Path) -> Result<RepoInfo> {
|
||||
let repo_info_path = exercise_path.join(".meta").join("repo_info.json");
|
||||
println!("Reading repo info from: {}", repo_info_path.display());
|
||||
let repo_info_content = smol::unblock(move || std::fs::read_to_string(&repo_info_path)).await?;
|
||||
let repo_info: RepoInfo = serde_json_lenient::from_str_lenient(&repo_info_content)?;
|
||||
|
||||
// Remove any quotes from the strings
|
||||
let remote_url = repo_info.remote_url.trim_matches('"').to_string();
|
||||
let head_sha = repo_info.head_sha.trim_matches('"').to_string();
|
||||
|
||||
Ok(RepoInfo {
|
||||
remote_url,
|
||||
head_sha,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn setup_temp_repo(exercise_path: &Path, _base_sha: &str) -> Result<TempDir> {
|
||||
let temp_dir = TempDir::new()?;
|
||||
|
||||
// Check if this is an internal exercise by looking for repo_info.json
|
||||
let repo_info_path = exercise_path.join(".meta").join("repo_info.json");
|
||||
if repo_info_path.exists() {
|
||||
// This is an internal exercise, handle it differently
|
||||
let repo_info = read_repo_info(exercise_path).await?;
|
||||
|
||||
// Clone the repository to the temp directory
|
||||
let url = repo_info.remote_url;
|
||||
let clone_path = temp_dir.path();
|
||||
println!(
|
||||
"Cloning repository from {} to {}",
|
||||
url,
|
||||
clone_path.display()
|
||||
);
|
||||
run_git(
|
||||
&std::env::current_dir()?,
|
||||
&["clone", &url, &clone_path.to_string_lossy()],
|
||||
)
|
||||
.await?;
|
||||
|
||||
// Checkout the specified commit
|
||||
println!("Checking out commit: {}", repo_info.head_sha);
|
||||
run_git(temp_dir.path(), &["checkout", &repo_info.head_sha]).await?;
|
||||
|
||||
println!("Successfully set up internal repository");
|
||||
} else {
|
||||
// Original code for regular exercises
|
||||
// Copy the exercise files to the temp directory, excluding .docs and .meta
|
||||
for entry in WalkDir::new(exercise_path).min_depth(0).max_depth(10) {
|
||||
let entry = entry?;
|
||||
let source_path = entry.path();
|
||||
|
||||
// Skip .docs and .meta directories completely
|
||||
if source_path.starts_with(exercise_path.join(".docs"))
|
||||
|| source_path.starts_with(exercise_path.join(".meta"))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if source_path.is_file() {
|
||||
let relative_path = source_path.strip_prefix(exercise_path)?;
|
||||
let dest_path = temp_dir.path().join(relative_path);
|
||||
|
||||
// Make sure parent directories exist
|
||||
if let Some(parent) = dest_path.parent() {
|
||||
fs::create_dir_all(parent)?;
|
||||
}
|
||||
|
||||
fs::copy(source_path, dest_path)?;
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize git repo in the temp directory
|
||||
run_git(temp_dir.path(), &["init"]).await?;
|
||||
run_git(temp_dir.path(), &["add", "."]).await?;
|
||||
run_git(temp_dir.path(), &["commit", "-m", "Initial commit"]).await?;
|
||||
|
||||
println!("Created temp repo without .docs and .meta directories");
|
||||
}
|
||||
|
||||
Ok(temp_dir)
|
||||
}
|
||||
279
crates/assistant_eval/src/headless_assistant.rs
Normal file
@@ -0,0 +1,279 @@
|
||||
use agent::{RequestKind, Thread, ThreadEvent, ThreadStore};
|
||||
use anyhow::anyhow;
|
||||
use assistant_tool::ToolWorkingSet;
|
||||
use client::{Client, UserStore};
|
||||
use collections::HashMap;
|
||||
use dap::DapRegistry;
|
||||
use futures::StreamExt;
|
||||
use gpui::{App, AsyncApp, Entity, SemanticVersion, Subscription, Task, prelude::*};
|
||||
use language::LanguageRegistry;
|
||||
use language_model::{
|
||||
AuthenticateError, LanguageModel, LanguageModelProviderId, LanguageModelRegistry,
|
||||
LanguageModelRequest,
|
||||
};
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::{Project, RealFs};
|
||||
use prompt_store::PromptBuilder;
|
||||
use settings::SettingsStore;
|
||||
use smol::channel;
|
||||
use std::sync::Arc;
|
||||
|
||||
/// Subset of `workspace::AppState` needed by `HeadlessAssistant`, with additional fields.
|
||||
pub struct HeadlessAppState {
|
||||
pub languages: Arc<LanguageRegistry>,
|
||||
pub client: Arc<Client>,
|
||||
pub user_store: Entity<UserStore>,
|
||||
pub fs: Arc<dyn fs::Fs>,
|
||||
pub node_runtime: NodeRuntime,
|
||||
|
||||
// Additional fields not present in `workspace::AppState`.
|
||||
pub prompt_builder: Arc<PromptBuilder>,
|
||||
}
|
||||
|
||||
pub struct HeadlessAssistant {
|
||||
pub thread: Entity<Thread>,
|
||||
pub project: Entity<Project>,
|
||||
#[allow(dead_code)]
|
||||
pub thread_store: Entity<ThreadStore>,
|
||||
pub tool_use_counts: HashMap<Arc<str>, u32>,
|
||||
pub done_tx: channel::Sender<anyhow::Result<()>>,
|
||||
_subscription: Subscription,
|
||||
}
|
||||
|
||||
impl HeadlessAssistant {
|
||||
pub fn new(
|
||||
app_state: Arc<HeadlessAppState>,
|
||||
cx: &mut App,
|
||||
) -> anyhow::Result<(Entity<Self>, channel::Receiver<anyhow::Result<()>>)> {
|
||||
let env = None;
|
||||
let project = Project::local(
|
||||
app_state.client.clone(),
|
||||
app_state.node_runtime.clone(),
|
||||
app_state.user_store.clone(),
|
||||
app_state.languages.clone(),
|
||||
Arc::new(DapRegistry::default()),
|
||||
app_state.fs.clone(),
|
||||
env,
|
||||
cx,
|
||||
);
|
||||
|
||||
let tools = Arc::new(ToolWorkingSet::default());
|
||||
let thread_store =
|
||||
ThreadStore::new(project.clone(), tools, app_state.prompt_builder.clone(), cx)?;
|
||||
|
||||
let thread = thread_store.update(cx, |thread_store, cx| thread_store.create_thread(cx));
|
||||
|
||||
let (done_tx, done_rx) = channel::unbounded::<anyhow::Result<()>>();
|
||||
|
||||
let headless_thread = cx.new(move |cx| Self {
|
||||
_subscription: cx.subscribe(&thread, Self::handle_thread_event),
|
||||
thread,
|
||||
project,
|
||||
thread_store,
|
||||
tool_use_counts: HashMap::default(),
|
||||
done_tx,
|
||||
});
|
||||
|
||||
Ok((headless_thread, done_rx))
|
||||
}
|
||||
|
||||
fn handle_thread_event(
|
||||
&mut self,
|
||||
thread: Entity<Thread>,
|
||||
event: &ThreadEvent,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
match event {
|
||||
ThreadEvent::ShowError(err) => self
|
||||
.done_tx
|
||||
.send_blocking(Err(anyhow!("{:?}", err)))
|
||||
.unwrap(),
|
||||
ThreadEvent::DoneStreaming => {
|
||||
let thread = thread.read(cx);
|
||||
if let Some(message) = thread.messages().last() {
|
||||
println!("Message: {}", message.to_string());
|
||||
}
|
||||
if thread.all_tools_finished() {
|
||||
self.done_tx.send_blocking(Ok(())).unwrap()
|
||||
}
|
||||
}
|
||||
ThreadEvent::UsePendingTools => {
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.use_pending_tools(cx);
|
||||
});
|
||||
}
|
||||
ThreadEvent::ToolConfirmationNeeded => {
|
||||
// Automatically approve all tools that need confirmation in headless mode
|
||||
println!("Tool confirmation needed - automatically approving in headless mode");
|
||||
|
||||
// Get the tools needing confirmation
|
||||
let tools_needing_confirmation: Vec<_> = thread
|
||||
.read(cx)
|
||||
.tools_needing_confirmation()
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
// Run each tool that needs confirmation
|
||||
for tool_use in tools_needing_confirmation {
|
||||
if let Some(tool) = thread.read(cx).tools().tool(&tool_use.name, cx) {
|
||||
thread.update(cx, |thread, cx| {
|
||||
println!("Auto-approving tool: {}", tool_use.name);
|
||||
|
||||
// Create a request to send to the tool
|
||||
let request = thread.to_completion_request(RequestKind::Chat, cx);
|
||||
let messages = Arc::new(request.messages);
|
||||
|
||||
// Run the tool
|
||||
thread.run_tool(
|
||||
tool_use.id.clone(),
|
||||
tool_use.ui_text.clone(),
|
||||
tool_use.input.clone(),
|
||||
&messages,
|
||||
tool,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
ThreadEvent::ToolFinished {
|
||||
tool_use_id,
|
||||
pending_tool_use,
|
||||
..
|
||||
} => {
|
||||
if let Some(pending_tool_use) = pending_tool_use {
|
||||
println!(
|
||||
"Used tool {} with input: {}",
|
||||
pending_tool_use.name, pending_tool_use.input
|
||||
);
|
||||
*self
|
||||
.tool_use_counts
|
||||
.entry(pending_tool_use.name.clone())
|
||||
.or_insert(0) += 1;
|
||||
}
|
||||
if let Some(tool_result) = thread.read(cx).tool_result(tool_use_id) {
|
||||
println!("Tool result: {:?}", tool_result);
|
||||
}
|
||||
if thread.read(cx).all_tools_finished() {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
if let Some(model) = model_registry.default_model() {
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.attach_tool_results(cx);
|
||||
thread.send_to_model(model.model, RequestKind::Chat, cx);
|
||||
});
|
||||
} else {
|
||||
println!(
|
||||
"Warning: No active language model available to continue conversation"
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(cx: &mut App) -> Arc<HeadlessAppState> {
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
gpui_tokio::init(cx);
|
||||
|
||||
let mut settings_store = SettingsStore::new(cx);
|
||||
settings_store
|
||||
.set_default_settings(settings::default_settings().as_ref(), cx)
|
||||
.unwrap();
|
||||
cx.set_global(settings_store);
|
||||
client::init_settings(cx);
|
||||
Project::init_settings(cx);
|
||||
|
||||
let client = Client::production(cx);
|
||||
cx.set_http_client(client.http_client().clone());
|
||||
|
||||
let git_binary_path = None;
|
||||
let fs = Arc::new(RealFs::new(
|
||||
git_binary_path,
|
||||
cx.background_executor().clone(),
|
||||
));
|
||||
|
||||
let languages = Arc::new(LanguageRegistry::new(cx.background_executor().clone()));
|
||||
|
||||
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
||||
|
||||
language::init(cx);
|
||||
language_model::init(client.clone(), cx);
|
||||
language_models::init(user_store.clone(), client.clone(), fs.clone(), cx);
|
||||
assistant_tools::init(client.http_client().clone(), cx);
|
||||
context_server::init(cx);
|
||||
let stdout_is_a_pty = false;
|
||||
let prompt_builder = PromptBuilder::load(fs.clone(), stdout_is_a_pty, cx);
|
||||
agent::init(fs.clone(), client.clone(), prompt_builder.clone(), cx);
|
||||
|
||||
Arc::new(HeadlessAppState {
|
||||
languages,
|
||||
client,
|
||||
user_store,
|
||||
fs,
|
||||
node_runtime: NodeRuntime::unavailable(),
|
||||
prompt_builder,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn find_model(model_name: &str, cx: &App) -> anyhow::Result<Arc<dyn LanguageModel>> {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let model = model_registry
|
||||
.available_models(cx)
|
||||
.find(|model| model.id().0 == model_name);
|
||||
|
||||
let Some(model) = model else {
|
||||
return Err(anyhow!(
|
||||
"No language model named {} was available. Available models: {}",
|
||||
model_name,
|
||||
model_registry
|
||||
.available_models(cx)
|
||||
.map(|model| model.id().0.clone())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
));
|
||||
};
|
||||
|
||||
Ok(model)
|
||||
}
|
||||
|
||||
pub fn authenticate_model_provider(
|
||||
provider_id: LanguageModelProviderId,
|
||||
cx: &mut App,
|
||||
) -> Task<std::result::Result<(), AuthenticateError>> {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let model_provider = model_registry.provider(&provider_id).unwrap();
|
||||
model_provider.authenticate(cx)
|
||||
}
|
||||
|
||||
pub async fn send_language_model_request(
|
||||
model: Arc<dyn LanguageModel>,
|
||||
request: LanguageModelRequest,
|
||||
cx: &mut AsyncApp,
|
||||
) -> anyhow::Result<String> {
|
||||
match model.stream_completion_text(request, &cx).await {
|
||||
Ok(mut stream) => {
|
||||
let mut full_response = String::new();
|
||||
|
||||
// Process the response stream
|
||||
while let Some(chunk_result) = stream.stream.next().await {
|
||||
match chunk_result {
|
||||
Ok(chunk_str) => {
|
||||
full_response.push_str(&chunk_str);
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(anyhow!(
|
||||
"Error receiving response from language model: {err}"
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(full_response)
|
||||
}
|
||||
Err(err) => Err(anyhow!(
|
||||
"Failed to get response from language model. Error was: {err}"
|
||||
)),
|
||||
}
|
||||
}
|
||||
37
crates/assistant_eval/src/judge.rs
Normal file
@@ -0,0 +1,37 @@
|
||||
use crate::headless_assistant::send_language_model_request;
|
||||
use anyhow::anyhow;
|
||||
use gpui::{App, Task};
|
||||
use language_model::{
|
||||
LanguageModel, LanguageModelRequest, LanguageModelRequestMessage, MessageContent, Role,
|
||||
};
|
||||
use std::sync::Arc;
|
||||
|
||||
pub struct Judge {
|
||||
#[allow(dead_code)]
|
||||
pub original_diff: Option<String>,
|
||||
pub original_message: Option<String>,
|
||||
pub model: Arc<dyn LanguageModel>,
|
||||
}
|
||||
|
||||
impl Judge {
|
||||
pub fn run_with_prompt(&self, cx: &mut App) -> Task<anyhow::Result<String>> {
|
||||
let Some(prompt) = self.original_message.as_ref() else {
|
||||
return Task::ready(Err(anyhow!("No prompt provided in original_message")));
|
||||
};
|
||||
|
||||
let request = LanguageModelRequest {
|
||||
messages: vec![LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![MessageContent::Text(prompt.clone())],
|
||||
cache: false,
|
||||
}],
|
||||
temperature: Some(0.0),
|
||||
tools: Vec::new(),
|
||||
stop: Vec::new(),
|
||||
};
|
||||
|
||||
let model = self.model.clone();
|
||||
let request = request.clone();
|
||||
cx.spawn(async move |cx| send_language_model_request(model, request, cx).await)
|
||||
}
|
||||
}
|
||||
258
crates/assistant_eval/src/main.rs
Normal file
@@ -0,0 +1,258 @@
|
||||
mod eval;
|
||||
mod get_exercise;
|
||||
mod git_commands;
|
||||
mod headless_assistant;
|
||||
mod judge;
|
||||
mod templates_eval;
|
||||
|
||||
use clap::Parser;
|
||||
use eval::{run_exercise_eval, save_eval_results};
|
||||
use futures::stream::{self, StreamExt};
|
||||
use get_exercise::{find_exercises, get_exercise_language, get_exercise_name};
|
||||
use git_commands::read_base_sha;
|
||||
use gpui::Application;
|
||||
use headless_assistant::{authenticate_model_provider, find_model};
|
||||
use language_model::LanguageModelRegistry;
|
||||
use reqwest_client::ReqwestClient;
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
use templates_eval::all_templates;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(
|
||||
name = "assistant_eval",
|
||||
disable_version_flag = true,
|
||||
before_help = "Tool eval runner"
|
||||
)]
|
||||
struct Args {
|
||||
/// Match the names of evals to run.
|
||||
#[arg(long)]
|
||||
exercise_names: Vec<String>,
|
||||
/// Runs all exercises, causes the exercise_names to be ignored.
|
||||
#[arg(long)]
|
||||
all: bool,
|
||||
/// Supported language types to evaluate (default: internal).
|
||||
/// Internal is data generated from the agent panel
|
||||
#[arg(long, default_value = "internal")]
|
||||
languages: String,
|
||||
/// Name of the model (default: "claude-3-7-sonnet-latest")
|
||||
#[arg(long, default_value = "claude-3-7-sonnet-latest")]
|
||||
model_name: String,
|
||||
/// Name of the judge model (default: value of `--model_name`).
|
||||
#[arg(long)]
|
||||
judge_model_name: Option<String>,
|
||||
/// Number of evaluations to run concurrently (default: 3)
|
||||
#[arg(short, long, default_value = "3")]
|
||||
concurrency: usize,
|
||||
/// Maximum number of exercises to evaluate per language
|
||||
#[arg(long)]
|
||||
max_exercises_per_language: Option<usize>,
|
||||
}
|
||||
|
||||
// First, let's define the order in which templates should be executed
|
||||
const TEMPLATE_EXECUTION_ORDER: [&str; 3] = [
|
||||
"ProjectCreation",
|
||||
"CodeModification",
|
||||
"ConversationalGuidance",
|
||||
];
|
||||
|
||||
fn main() {
|
||||
env_logger::init();
|
||||
let args = Args::parse();
|
||||
let http_client = Arc::new(ReqwestClient::new());
|
||||
let app = Application::headless().with_http_client(http_client.clone());
|
||||
|
||||
// Path to the zed-ace-framework repo
|
||||
let framework_path = PathBuf::from("../zed-ace-framework")
|
||||
.canonicalize()
|
||||
.unwrap();
|
||||
|
||||
// Fix the 'languages' lifetime issue by creating owned Strings instead of slices
|
||||
let languages: Vec<String> = args.languages.split(',').map(|s| s.to_string()).collect();
|
||||
|
||||
println!("Using zed-ace-framework at: {:?}", framework_path);
|
||||
println!("Evaluating languages: {:?}", languages);
|
||||
|
||||
app.run(move |cx| {
|
||||
let app_state = headless_assistant::init(cx);
|
||||
|
||||
let model = find_model(&args.model_name, cx).unwrap();
|
||||
let judge_model = if let Some(model_name) = &args.judge_model_name {
|
||||
find_model(model_name, cx).unwrap()
|
||||
} else {
|
||||
model.clone()
|
||||
};
|
||||
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry.set_default_model(Some(model.clone()), cx);
|
||||
});
|
||||
|
||||
let model_provider_id = model.provider_id();
|
||||
let judge_model_provider_id = judge_model.provider_id();
|
||||
|
||||
let framework_path_clone = framework_path.clone();
|
||||
let languages_clone = languages.clone();
|
||||
let exercise_names = args.exercise_names.clone();
|
||||
let all_flag = args.all;
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
// Authenticate all model providers first
|
||||
cx.update(|cx| authenticate_model_provider(model_provider_id.clone(), cx))
|
||||
.unwrap()
|
||||
.await
|
||||
.unwrap();
|
||||
cx.update(|cx| authenticate_model_provider(judge_model_provider_id.clone(), cx))
|
||||
.unwrap()
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Read base SHA from setup.json
|
||||
let base_sha = read_base_sha(&framework_path_clone).await.unwrap();
|
||||
|
||||
// Find all exercises for the specified languages
|
||||
let all_exercises = find_exercises(
|
||||
&framework_path_clone,
|
||||
&languages_clone
|
||||
.iter()
|
||||
.map(|s| s.as_str())
|
||||
.collect::<Vec<_>>(),
|
||||
args.max_exercises_per_language,
|
||||
)
|
||||
.unwrap();
|
||||
println!("Found {} exercises total", all_exercises.len());
|
||||
|
||||
// Filter exercises if specific ones were requested
|
||||
let exercises_to_run = if !exercise_names.is_empty() {
|
||||
// If exercise names are specified, filter by them regardless of --all flag
|
||||
all_exercises
|
||||
.into_iter()
|
||||
.filter(|path| {
|
||||
let name = get_exercise_name(path);
|
||||
exercise_names.iter().any(|filter| name.contains(filter))
|
||||
})
|
||||
.collect()
|
||||
} else if all_flag {
|
||||
// Only use all_flag if no exercise names are specified
|
||||
all_exercises
|
||||
} else {
|
||||
// Default behavior (no filters)
|
||||
all_exercises
|
||||
};
|
||||
|
||||
println!("Will run {} exercises", exercises_to_run.len());
|
||||
|
||||
// Get all templates and sort them according to the execution order
|
||||
let mut templates = all_templates();
|
||||
templates.sort_by_key(|template| {
|
||||
TEMPLATE_EXECUTION_ORDER
|
||||
.iter()
|
||||
.position(|&name| name == template.name)
|
||||
.unwrap_or(usize::MAX)
|
||||
});
|
||||
|
||||
// Create exercise eval tasks - each exercise is a single task that will run templates sequentially
|
||||
let exercise_tasks: Vec<_> = exercises_to_run
|
||||
.into_iter()
|
||||
.map(|exercise_path| {
|
||||
let exercise_name = get_exercise_name(&exercise_path);
|
||||
let templates_clone = templates.clone();
|
||||
let model_clone = model.clone();
|
||||
let judge_model_clone = judge_model.clone();
|
||||
let app_state_clone = app_state.clone();
|
||||
let base_sha_clone = base_sha.clone();
|
||||
let framework_path_clone = framework_path_clone.clone();
|
||||
let cx_clone = cx.clone();
|
||||
|
||||
async move {
|
||||
println!("Processing exercise: {}", exercise_name);
|
||||
let mut exercise_results = Vec::new();
|
||||
|
||||
// Determine the language for this exercise
|
||||
let language = match get_exercise_language(&exercise_path) {
|
||||
Ok(lang) => lang,
|
||||
Err(err) => {
|
||||
println!(
|
||||
"Error determining language for {}: {}",
|
||||
exercise_name, err
|
||||
);
|
||||
return exercise_results;
|
||||
}
|
||||
};
|
||||
|
||||
// Run each template sequentially for this exercise
|
||||
for template in templates_clone {
|
||||
// For "multi" or "internal" language, only run the CodeModification template
|
||||
if (language == "multi" || language == "internal")
|
||||
&& template.name != "CodeModification"
|
||||
{
|
||||
println!(
|
||||
"Skipping {} template for {} language",
|
||||
template.name, language
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
match run_exercise_eval(
|
||||
exercise_path.clone(),
|
||||
template.clone(),
|
||||
model_clone.clone(),
|
||||
judge_model_clone.clone(),
|
||||
app_state_clone.clone(),
|
||||
base_sha_clone.clone(),
|
||||
framework_path_clone.clone(),
|
||||
cx_clone.clone(),
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(result) => {
|
||||
println!(
|
||||
"Completed {} with template {} - score: {}",
|
||||
exercise_name, template.name, result.score
|
||||
);
|
||||
exercise_results.push(result);
|
||||
}
|
||||
Err(err) => {
|
||||
println!(
|
||||
"Error running {} with template {}: {}",
|
||||
exercise_name, template.name, err
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Save results for this exercise
|
||||
if !exercise_results.is_empty() {
|
||||
if let Err(err) =
|
||||
save_eval_results(&exercise_path, exercise_results.clone()).await
|
||||
{
|
||||
println!("Error saving results for {}: {}", exercise_name, err);
|
||||
} else {
|
||||
println!("Saved results for {}", exercise_name);
|
||||
}
|
||||
}
|
||||
|
||||
exercise_results
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
|
||||
println!(
|
||||
"Running {} exercises with concurrency: {}",
|
||||
exercise_tasks.len(),
|
||||
args.concurrency
|
||||
);
|
||||
|
||||
// Run exercises concurrently, with each exercise running its templates sequentially
|
||||
let all_results = stream::iter(exercise_tasks)
|
||||
.buffer_unordered(args.concurrency)
|
||||
.flat_map(stream::iter)
|
||||
.collect::<Vec<_>>()
|
||||
.await;
|
||||
|
||||
println!("Completed {} evaluation runs", all_results.len());
|
||||
cx.update(|cx| cx.quit()).unwrap();
|
||||
})
|
||||
.detach();
|
||||
});
|
||||
|
||||
println!("Done running evals");
|
||||
}
|
||||
210
crates/assistant_eval/src/templates_eval.rs
Normal file
@@ -0,0 +1,210 @@
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Template {
|
||||
pub name: &'static str,
|
||||
pub content: &'static str,
|
||||
}
|
||||
|
||||
pub fn all_templates() -> Vec<Template> {
|
||||
vec![
|
||||
Template {
|
||||
name: "ProjectCreation",
|
||||
content: r#"
|
||||
# Project Creation Evaluation Template
|
||||
|
||||
## Instructions
|
||||
|
||||
Evaluate how well the AI assistant created a new implementation from scratch. Score it between 0.0 and 1.0 based on quality and fulfillment of requirements.
|
||||
- 1.0 = Perfect implementation that creates all necessary files with correct functionality.
|
||||
- 0.0 = Completely fails to create working files or meet requirements.
|
||||
|
||||
Note: A git diff output is required. If no code changes are provided (i.e., no git diff output), the score must be 0.0.
|
||||
|
||||
## Evaluation Criteria
|
||||
|
||||
Please consider the following aspects in order of importance:
|
||||
|
||||
1. **File Creation (25%)**
|
||||
- Did the assistant create all necessary files?
|
||||
- Are the files appropriately named and organized?
|
||||
- Did the assistant create a complete solution without missing components?
|
||||
|
||||
2. **Functional Correctness (40%)**
|
||||
- Does the implementation fulfill all specified requirements?
|
||||
- Does it handle edge cases properly?
|
||||
- Is it free of logical errors and bugs?
|
||||
- Do all components work together as expected?
|
||||
|
||||
3. **Code Quality (20%)**
|
||||
- Is the code well-structured, readable and well-documented?
|
||||
- Does it follow language-specific best practices?
|
||||
- Is there proper error handling?
|
||||
- Are naming conventions clear and consistent?
|
||||
|
||||
4. **Architecture Design (15%)**
|
||||
- Is the code modular and extensible?
|
||||
- Is there proper separation of concerns?
|
||||
- Are appropriate design patterns used?
|
||||
- Is the overall architecture appropriate for the requirements?
|
||||
|
||||
## Input
|
||||
|
||||
Requirements:
|
||||
<!-- ```requirements go here``` -->
|
||||
|
||||
Reference Implementation:
|
||||
<!-- ```reference code goes here``` -->
|
||||
|
||||
AI-Generated Implementation (git diff output):
|
||||
<!-- ```git diff goes here``` -->
|
||||
|
||||
## Output Format
|
||||
|
||||
THE ONLY OUTPUT SHOULD BE A SCORE BETWEEN 0.0 AND 1.0.
|
||||
|
||||
EXAMPLE ONE:
|
||||
|
||||
0.92
|
||||
|
||||
EXAMPLE TWO:
|
||||
|
||||
0.85
|
||||
|
||||
EXAMPLE THREE:
|
||||
|
||||
0.78
|
||||
"#,
|
||||
},
|
||||
Template {
|
||||
name: "CodeModification",
|
||||
content: r#"
|
||||
# Code Modification Evaluation Template
|
||||
|
||||
## Instructions
|
||||
|
||||
Evaluate how well the AI assistant modified existing code to meet requirements. Score between 0.0 and 1.0 based on quality and appropriateness of changes.
|
||||
- 1.0 = Perfect modifications that correctly implement all requirements.
|
||||
- 0.0 = Failed to make appropriate changes or introduced serious errors.
|
||||
|
||||
## Evaluation Criteria
|
||||
|
||||
Please consider the following aspects in order of importance:
|
||||
|
||||
1. **Functional Correctness (50%)**
|
||||
- Do the modifications correctly implement the requirements?
|
||||
- Did the assistant modify the right files and code sections?
|
||||
- Are the changes free of bugs and logical errors?
|
||||
- Do the modifications maintain compatibility with existing code?
|
||||
|
||||
2. **Modification Approach (25%)**
|
||||
- Are the changes minimal and focused on what needs to be changed?
|
||||
- Did the assistant avoid unnecessary modifications?
|
||||
- Are the changes integrated seamlessly with the existing codebase?
|
||||
- Did the assistant preserve the original code style and patterns?
|
||||
|
||||
3. **Code Quality (15%)**
|
||||
- Are the modifications well-structured and documented?
|
||||
- Do they follow the same conventions as the original code?
|
||||
- Is there proper error handling in the modified code?
|
||||
- Are the changes readable and maintainable?
|
||||
|
||||
4. **Solution Completeness (10%)**
|
||||
- Do the modifications completely address all requirements?
|
||||
- Are there any missing changes or overlooked requirements?
|
||||
- Did the assistant consider all necessary edge cases?
|
||||
|
||||
## Input
|
||||
|
||||
Original:
|
||||
<!-- ```reference code goes here``` -->
|
||||
|
||||
New (git diff output):
|
||||
<!-- ```git diff goes here``` -->
|
||||
|
||||
## Output Format
|
||||
|
||||
THE ONLY OUTPUT SHOULD BE A SCORE BETWEEN 0.0 AND 1.0.
|
||||
|
||||
EXAMPLE ONE:
|
||||
|
||||
0.92
|
||||
|
||||
EXAMPLE TWO:
|
||||
|
||||
0.85
|
||||
|
||||
EXAMPLE THREE:
|
||||
|
||||
0.78
|
||||
"#,
|
||||
},
|
||||
Template {
|
||||
name: "ConversationalGuidance",
|
||||
content: r#"
|
||||
# Conversational Guidance Evaluation Template
|
||||
|
||||
## Instructions
|
||||
|
||||
Evaluate the quality of the AI assistant's conversational guidance and score it between 0.0 and 1.0.
|
||||
- 1.0 = Perfect guidance with ideal information gathering, clarification, and advice without writing code.
|
||||
- 0.0 = Completely unhelpful, inappropriate guidance, or wrote code when it should not have.
|
||||
|
||||
## Evaluation Criteria
|
||||
|
||||
ABSOLUTE REQUIREMENT:
|
||||
- The assistant should NOT generate complete code solutions in conversation mode.
|
||||
- If the git diff shows the assistant wrote complete code, the score should be significantly reduced.
|
||||
|
||||
1. **Information Gathering Effectiveness (30%)**
|
||||
- Did the assistant ask relevant and precise questions?
|
||||
- Did it efficiently narrow down the problem scope?
|
||||
- Did it avoid unnecessary or redundant questions?
|
||||
- Was questioning appropriately paced and contextual?
|
||||
|
||||
2. **Conceptual Guidance (30%)**
|
||||
- Did the assistant provide high-level approaches and strategies?
|
||||
- Did it explain relevant concepts and algorithms?
|
||||
- Did it offer planning advice without implementing the solution?
|
||||
- Did it suggest a structured approach to solving the problem?
|
||||
|
||||
3. **Educational Value (20%)**
|
||||
- Did the assistant help the user understand the problem better?
|
||||
- Did it provide explanations that would help the user learn?
|
||||
- Did it guide without simply giving away answers?
|
||||
- Did it encourage the user to think through parts of the problem?
|
||||
|
||||
4. **Conversation Quality (20%)**
|
||||
- Was the conversation logically structured and easy to follow?
|
||||
- Did the assistant maintain appropriate context throughout?
|
||||
- Was the interaction helpful without being condescending?
|
||||
- Did the conversation reach a satisfactory conclusion with clear next steps?
|
||||
|
||||
## Input
|
||||
|
||||
Initial Query:
|
||||
<!-- ```query goes here``` -->
|
||||
|
||||
Conversation Transcript:
|
||||
<!-- ```transcript goes here``` -->
|
||||
|
||||
Git Diff:
|
||||
<!-- ```git diff goes here``` -->
|
||||
|
||||
## Output Format
|
||||
|
||||
THE ONLY OUTPUT SHOULD BE A SCORE BETWEEN 0.0 AND 1.0.
|
||||
|
||||
EXAMPLE ONE:
|
||||
|
||||
0.92
|
||||
|
||||
EXAMPLE TWO:
|
||||
|
||||
0.85
|
||||
|
||||
EXAMPLE THREE:
|
||||
|
||||
0.78
|
||||
"#,
|
||||
},
|
||||
]
|
||||
}
|
||||
@@ -69,7 +69,7 @@ pub enum AssistantProviderContentV1 {
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
#[derive(Debug, Default)]
|
||||
pub struct AssistantSettings {
|
||||
pub enabled: bool,
|
||||
pub button: bool,
|
||||
@@ -742,7 +742,7 @@ mod tests {
|
||||
AssistantSettings::get_global(cx).default_model,
|
||||
LanguageModelSelection {
|
||||
provider: "zed.dev".into(),
|
||||
model: "claude-3-7-sonnet-latest".into(),
|
||||
model: "claude-3-5-sonnet-latest".into(),
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
@@ -4,7 +4,7 @@ use collections::BTreeMap;
|
||||
use futures::{StreamExt, channel::mpsc};
|
||||
use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
|
||||
use language::{Anchor, Buffer, BufferEvent, DiskState, Point};
|
||||
use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
|
||||
use project::{Project, ProjectItem};
|
||||
use std::{cmp, ops::Range, sync::Arc};
|
||||
use text::{Edit, Patch, Rope};
|
||||
use util::RangeExt;
|
||||
@@ -49,10 +49,6 @@ impl ActionLog {
|
||||
.tracked_buffers
|
||||
.entry(buffer.clone())
|
||||
.or_insert_with(|| {
|
||||
let open_lsp_handle = self.project.update(cx, |project, cx| {
|
||||
project.register_buffer_with_language_servers(&buffer, cx)
|
||||
});
|
||||
|
||||
let text_snapshot = buffer.read(cx).text_snapshot();
|
||||
let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
|
||||
let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
|
||||
@@ -80,7 +76,6 @@ impl ActionLog {
|
||||
version: buffer.read(cx).version(),
|
||||
diff,
|
||||
diff_update: diff_update_tx,
|
||||
_open_lsp_handle: open_lsp_handle,
|
||||
_maintain_diff: cx.spawn({
|
||||
let buffer = buffer.clone();
|
||||
async move |this, cx| {
|
||||
@@ -240,7 +235,7 @@ impl ActionLog {
|
||||
.await;
|
||||
|
||||
diff.update(cx, |diff, cx| {
|
||||
diff.set_snapshot(diff_snapshot, &buffer_snapshot, cx)
|
||||
diff.set_snapshot(diff_snapshot, &buffer_snapshot, None, cx)
|
||||
})?;
|
||||
}
|
||||
this.update(cx, |this, cx| {
|
||||
@@ -620,7 +615,6 @@ struct TrackedBuffer {
|
||||
diff: Entity<BufferDiff>,
|
||||
snapshot: text::BufferSnapshot,
|
||||
diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
|
||||
_open_lsp_handle: OpenLspBufferHandle,
|
||||
_maintain_diff: Task<()>,
|
||||
_subscription: Subscription,
|
||||
}
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
mod action_log;
|
||||
mod tool_registry;
|
||||
mod tool_schema;
|
||||
mod tool_working_set;
|
||||
|
||||
use std::fmt;
|
||||
@@ -17,26 +16,12 @@ use project::Project;
|
||||
|
||||
pub use crate::action_log::*;
|
||||
pub use crate::tool_registry::*;
|
||||
pub use crate::tool_schema::*;
|
||||
pub use crate::tool_working_set::*;
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
ToolRegistry::default_global(cx);
|
||||
}
|
||||
|
||||
/// The result of running a tool
|
||||
pub struct ToolResult {
|
||||
/// The asynchronous task that will eventually resolve to the tool's output
|
||||
pub output: Task<Result<String>>,
|
||||
}
|
||||
|
||||
impl From<Task<Result<String>>> for ToolResult {
|
||||
/// Convert from a task to a ToolResult
|
||||
fn from(output: Task<Result<String>>) -> Self {
|
||||
Self { output }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone)]
|
||||
pub enum ToolSource {
|
||||
/// A native tool built-in to Zed.
|
||||
@@ -63,11 +48,11 @@ pub trait Tool: 'static + Send + Sync {
|
||||
|
||||
/// Returns true iff the tool needs the users's confirmation
|
||||
/// before having permission to run.
|
||||
fn needs_confirmation(&self, input: &serde_json::Value, cx: &App) -> bool;
|
||||
fn needs_confirmation(&self) -> bool;
|
||||
|
||||
/// Returns the JSON schema that describes the tool's input.
|
||||
fn input_schema(&self, _: LanguageModelToolSchemaFormat) -> Result<serde_json::Value> {
|
||||
Ok(serde_json::Value::Object(serde_json::Map::default()))
|
||||
fn input_schema(&self, _: LanguageModelToolSchemaFormat) -> serde_json::Value {
|
||||
serde_json::Value::Object(serde_json::Map::default())
|
||||
}
|
||||
|
||||
/// Returns markdown to be displayed in the UI for this tool.
|
||||
@@ -81,7 +66,7 @@ pub trait Tool: 'static + Send + Sync {
|
||||
project: Entity<Project>,
|
||||
action_log: Entity<ActionLog>,
|
||||
cx: &mut App,
|
||||
) -> ToolResult;
|
||||
) -> Task<Result<String>>;
|
||||
}
|
||||
|
||||
impl Debug for dyn Tool {
|
||||
|
||||
@@ -1,236 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::LanguageModelToolSchemaFormat;
|
||||
|
||||
/// Tries to adapt a JSON schema representation to be compatible with the specified format.
|
||||
///
|
||||
/// If the json cannot be made compatible with the specified format, an error is returned.
|
||||
pub fn adapt_schema_to_format(
|
||||
json: &mut Value,
|
||||
format: LanguageModelToolSchemaFormat,
|
||||
) -> Result<()> {
|
||||
match format {
|
||||
LanguageModelToolSchemaFormat::JsonSchema => Ok(()),
|
||||
LanguageModelToolSchemaFormat::JsonSchemaSubset => adapt_to_json_schema_subset(json),
|
||||
}
|
||||
}
|
||||
|
||||
/// Tries to adapt the json schema so that it is compatible with https://ai.google.dev/api/caching#Schema
|
||||
fn adapt_to_json_schema_subset(json: &mut Value) -> Result<()> {
|
||||
if let Value::Object(obj) = json {
|
||||
const UNSUPPORTED_KEYS: [&str; 4] = ["if", "then", "else", "$ref"];
|
||||
|
||||
for key in UNSUPPORTED_KEYS {
|
||||
if obj.contains_key(key) {
|
||||
return Err(anyhow::anyhow!(
|
||||
"Schema cannot be made compatible because it contains \"{}\" ",
|
||||
key
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
const KEYS_TO_REMOVE: [&str; 2] = ["format", "$schema"];
|
||||
for key in KEYS_TO_REMOVE {
|
||||
obj.remove(key);
|
||||
}
|
||||
|
||||
if let Some(default) = obj.get("default") {
|
||||
let is_null = default.is_null();
|
||||
// Default is not supported, so we need to remove it
|
||||
obj.remove("default");
|
||||
if is_null {
|
||||
obj.insert("nullable".to_string(), Value::Bool(true));
|
||||
}
|
||||
}
|
||||
|
||||
// If a type is not specified for an input parameter, add a default type
|
||||
if obj.contains_key("description")
|
||||
&& !obj.contains_key("type")
|
||||
&& !(obj.contains_key("anyOf")
|
||||
|| obj.contains_key("oneOf")
|
||||
|| obj.contains_key("allOf"))
|
||||
{
|
||||
obj.insert("type".to_string(), Value::String("string".to_string()));
|
||||
}
|
||||
|
||||
// Handle oneOf -> anyOf conversion
|
||||
if let Some(subschemas) = obj.get_mut("oneOf") {
|
||||
if subschemas.is_array() {
|
||||
let subschemas_clone = subschemas.clone();
|
||||
obj.remove("oneOf");
|
||||
obj.insert("anyOf".to_string(), subschemas_clone);
|
||||
}
|
||||
}
|
||||
|
||||
// Recursively process all nested objects and arrays
|
||||
for (_, value) in obj.iter_mut() {
|
||||
if let Value::Object(_) | Value::Array(_) = value {
|
||||
adapt_to_json_schema_subset(value)?;
|
||||
}
|
||||
}
|
||||
} else if let Value::Array(arr) = json {
|
||||
for item in arr.iter_mut() {
|
||||
adapt_to_json_schema_subset(item)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use serde_json::json;
|
||||
|
||||
#[test]
|
||||
fn test_transform_default_null_to_nullable() {
|
||||
let mut json = json!({
|
||||
"description": "A test field",
|
||||
"type": "string",
|
||||
"default": null
|
||||
});
|
||||
|
||||
adapt_to_json_schema_subset(&mut json).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
json,
|
||||
json!({
|
||||
"description": "A test field",
|
||||
"type": "string",
|
||||
"nullable": true
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_transform_adds_type_when_missing() {
|
||||
let mut json = json!({
|
||||
"description": "A test field without type"
|
||||
});
|
||||
|
||||
adapt_to_json_schema_subset(&mut json).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
json,
|
||||
json!({
|
||||
"description": "A test field without type",
|
||||
"type": "string"
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_transform_removes_format() {
|
||||
let mut json = json!({
|
||||
"description": "A test field",
|
||||
"type": "integer",
|
||||
"format": "uint32"
|
||||
});
|
||||
|
||||
adapt_to_json_schema_subset(&mut json).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
json,
|
||||
json!({
|
||||
"description": "A test field",
|
||||
"type": "integer"
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_transform_one_of_to_any_of() {
|
||||
let mut json = json!({
|
||||
"description": "A test field",
|
||||
"oneOf": [
|
||||
{ "type": "string" },
|
||||
{ "type": "integer" }
|
||||
]
|
||||
});
|
||||
|
||||
adapt_to_json_schema_subset(&mut json).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
json,
|
||||
json!({
|
||||
"description": "A test field",
|
||||
"anyOf": [
|
||||
{ "type": "string" },
|
||||
{ "type": "integer" }
|
||||
]
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_transform_nested_objects() {
|
||||
let mut json = json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"nested": {
|
||||
"oneOf": [
|
||||
{ "type": "string" },
|
||||
{ "type": "null" }
|
||||
],
|
||||
"format": "email"
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
adapt_to_json_schema_subset(&mut json).unwrap();
|
||||
|
||||
assert_eq!(
|
||||
json,
|
||||
json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"nested": {
|
||||
"anyOf": [
|
||||
{ "type": "string" },
|
||||
{ "type": "null" }
|
||||
]
|
||||
}
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_transform_fails_if_unsupported_keys_exist() {
|
||||
let mut json = json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"$ref": "#/definitions/User",
|
||||
}
|
||||
});
|
||||
|
||||
assert!(adapt_to_json_schema_subset(&mut json).is_err());
|
||||
|
||||
let mut json = json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"if": "...",
|
||||
}
|
||||
});
|
||||
|
||||
assert!(adapt_to_json_schema_subset(&mut json).is_err());
|
||||
|
||||
let mut json = json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"then": "...",
|
||||
}
|
||||
});
|
||||
|
||||
assert!(adapt_to_json_schema_subset(&mut json).is_err());
|
||||
|
||||
let mut json = json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"else": "...",
|
||||
}
|
||||
});
|
||||
|
||||
assert!(adapt_to_json_schema_subset(&mut json).is_err());
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,8 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use collections::{HashMap, HashSet, IndexMap};
|
||||
use gpui::{App, Context, EventEmitter};
|
||||
use gpui::App;
|
||||
use parking_lot::Mutex;
|
||||
|
||||
use crate::{Tool, ToolRegistry, ToolSource};
|
||||
|
||||
@@ -11,6 +12,11 @@ pub struct ToolId(usize);
|
||||
/// A working set of tools for use in one instance of the Assistant Panel.
|
||||
#[derive(Default)]
|
||||
pub struct ToolWorkingSet {
|
||||
state: Mutex<WorkingSetState>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct WorkingSetState {
|
||||
context_server_tools_by_id: HashMap<ToolId, Arc<dyn Tool>>,
|
||||
context_server_tools_by_name: HashMap<String, Arc<dyn Tool>>,
|
||||
enabled_sources: HashSet<ToolSource>,
|
||||
@@ -18,27 +24,99 @@ pub struct ToolWorkingSet {
|
||||
next_tool_id: ToolId,
|
||||
}
|
||||
|
||||
pub enum ToolWorkingSetEvent {
|
||||
EnabledToolsChanged,
|
||||
}
|
||||
|
||||
impl EventEmitter<ToolWorkingSetEvent> for ToolWorkingSet {}
|
||||
|
||||
impl ToolWorkingSet {
|
||||
pub fn tool(&self, name: &str, cx: &App) -> Option<Arc<dyn Tool>> {
|
||||
self.context_server_tools_by_name
|
||||
self.state
|
||||
.lock()
|
||||
.context_server_tools_by_name
|
||||
.get(name)
|
||||
.cloned()
|
||||
.or_else(|| ToolRegistry::global(cx).tool(name))
|
||||
}
|
||||
|
||||
pub fn tools(&self, cx: &App) -> Vec<Arc<dyn Tool>> {
|
||||
let mut tools = ToolRegistry::global(cx).tools();
|
||||
tools.extend(self.context_server_tools_by_id.values().cloned());
|
||||
tools
|
||||
self.state.lock().tools(cx)
|
||||
}
|
||||
|
||||
pub fn tools_by_source(&self, cx: &App) -> IndexMap<ToolSource, Vec<Arc<dyn Tool>>> {
|
||||
self.state.lock().tools_by_source(cx)
|
||||
}
|
||||
|
||||
pub fn enabled_tools(&self, cx: &App) -> Vec<Arc<dyn Tool>> {
|
||||
self.state.lock().enabled_tools(cx)
|
||||
}
|
||||
|
||||
pub fn disable_all_tools(&self) {
|
||||
let mut state = self.state.lock();
|
||||
state.disable_all_tools();
|
||||
}
|
||||
|
||||
pub fn enable_source(&self, source: ToolSource, cx: &App) {
|
||||
let mut state = self.state.lock();
|
||||
state.enable_source(source, cx);
|
||||
}
|
||||
|
||||
pub fn disable_source(&self, source: &ToolSource) {
|
||||
let mut state = self.state.lock();
|
||||
state.disable_source(source);
|
||||
}
|
||||
|
||||
pub fn insert(&self, tool: Arc<dyn Tool>) -> ToolId {
|
||||
let mut state = self.state.lock();
|
||||
let tool_id = state.next_tool_id;
|
||||
state.next_tool_id.0 += 1;
|
||||
state
|
||||
.context_server_tools_by_id
|
||||
.insert(tool_id, tool.clone());
|
||||
state.tools_changed();
|
||||
tool_id
|
||||
}
|
||||
|
||||
pub fn is_enabled(&self, source: &ToolSource, name: &Arc<str>) -> bool {
|
||||
self.state.lock().is_enabled(source, name)
|
||||
}
|
||||
|
||||
pub fn is_disabled(&self, source: &ToolSource, name: &Arc<str>) -> bool {
|
||||
self.state.lock().is_disabled(source, name)
|
||||
}
|
||||
|
||||
pub fn enable(&self, source: ToolSource, tools_to_enable: &[Arc<str>]) {
|
||||
let mut state = self.state.lock();
|
||||
state.enable(source, tools_to_enable);
|
||||
}
|
||||
|
||||
pub fn disable(&self, source: ToolSource, tools_to_disable: &[Arc<str>]) {
|
||||
let mut state = self.state.lock();
|
||||
state.disable(source, tools_to_disable);
|
||||
}
|
||||
|
||||
pub fn remove(&self, tool_ids_to_remove: &[ToolId]) {
|
||||
let mut state = self.state.lock();
|
||||
state
|
||||
.context_server_tools_by_id
|
||||
.retain(|id, _| !tool_ids_to_remove.contains(id));
|
||||
state.tools_changed();
|
||||
}
|
||||
}
|
||||
|
||||
impl WorkingSetState {
|
||||
fn tools_changed(&mut self) {
|
||||
self.context_server_tools_by_name.clear();
|
||||
self.context_server_tools_by_name.extend(
|
||||
self.context_server_tools_by_id
|
||||
.values()
|
||||
.map(|tool| (tool.name(), tool.clone())),
|
||||
);
|
||||
}
|
||||
|
||||
fn tools(&self, cx: &App) -> Vec<Arc<dyn Tool>> {
|
||||
let mut tools = ToolRegistry::global(cx).tools();
|
||||
tools.extend(self.context_server_tools_by_id.values().cloned());
|
||||
|
||||
tools
|
||||
}
|
||||
|
||||
fn tools_by_source(&self, cx: &App) -> IndexMap<ToolSource, Vec<Arc<dyn Tool>>> {
|
||||
let mut tools_by_source = IndexMap::default();
|
||||
|
||||
for tool in self.tools(cx) {
|
||||
@@ -57,7 +135,7 @@ impl ToolWorkingSet {
|
||||
tools_by_source
|
||||
}
|
||||
|
||||
pub fn enabled_tools(&self, cx: &App) -> Vec<Arc<dyn Tool>> {
|
||||
fn enabled_tools(&self, cx: &App) -> Vec<Arc<dyn Tool>> {
|
||||
let all_tools = self.tools(cx);
|
||||
|
||||
all_tools
|
||||
@@ -66,12 +144,31 @@ impl ToolWorkingSet {
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn disable_all_tools(&mut self, cx: &mut Context<Self>) {
|
||||
self.enabled_tools_by_source.clear();
|
||||
cx.emit(ToolWorkingSetEvent::EnabledToolsChanged);
|
||||
fn is_enabled(&self, source: &ToolSource, name: &Arc<str>) -> bool {
|
||||
self.enabled_tools_by_source
|
||||
.get(source)
|
||||
.map_or(false, |enabled_tools| enabled_tools.contains(name))
|
||||
}
|
||||
|
||||
pub fn enable_source(&mut self, source: ToolSource, cx: &mut Context<Self>) {
|
||||
fn is_disabled(&self, source: &ToolSource, name: &Arc<str>) -> bool {
|
||||
!self.is_enabled(source, name)
|
||||
}
|
||||
|
||||
fn enable(&mut self, source: ToolSource, tools_to_enable: &[Arc<str>]) {
|
||||
self.enabled_tools_by_source
|
||||
.entry(source)
|
||||
.or_default()
|
||||
.extend(tools_to_enable.into_iter().cloned());
|
||||
}
|
||||
|
||||
fn disable(&mut self, source: ToolSource, tools_to_disable: &[Arc<str>]) {
|
||||
self.enabled_tools_by_source
|
||||
.entry(source)
|
||||
.or_default()
|
||||
.retain(|name| !tools_to_disable.contains(name));
|
||||
}
|
||||
|
||||
fn enable_source(&mut self, source: ToolSource, cx: &App) {
|
||||
self.enabled_sources.insert(source.clone());
|
||||
|
||||
let tools_by_source = self.tools_by_source(cx);
|
||||
@@ -84,72 +181,14 @@ impl ToolWorkingSet {
|
||||
.collect::<HashSet<_>>(),
|
||||
);
|
||||
}
|
||||
cx.emit(ToolWorkingSetEvent::EnabledToolsChanged);
|
||||
}
|
||||
|
||||
pub fn disable_source(&mut self, source: &ToolSource, cx: &mut Context<Self>) {
|
||||
fn disable_source(&mut self, source: &ToolSource) {
|
||||
self.enabled_sources.remove(source);
|
||||
self.enabled_tools_by_source.remove(source);
|
||||
cx.emit(ToolWorkingSetEvent::EnabledToolsChanged);
|
||||
}
|
||||
|
||||
pub fn insert(&mut self, tool: Arc<dyn Tool>) -> ToolId {
|
||||
let tool_id = self.next_tool_id;
|
||||
self.next_tool_id.0 += 1;
|
||||
self.context_server_tools_by_id
|
||||
.insert(tool_id, tool.clone());
|
||||
self.tools_changed();
|
||||
tool_id
|
||||
}
|
||||
|
||||
pub fn is_enabled(&self, source: &ToolSource, name: &Arc<str>) -> bool {
|
||||
self.enabled_tools_by_source
|
||||
.get(source)
|
||||
.map_or(false, |enabled_tools| enabled_tools.contains(name))
|
||||
}
|
||||
|
||||
pub fn is_disabled(&self, source: &ToolSource, name: &Arc<str>) -> bool {
|
||||
!self.is_enabled(source, name)
|
||||
}
|
||||
|
||||
pub fn enable(
|
||||
&mut self,
|
||||
source: ToolSource,
|
||||
tools_to_enable: &[Arc<str>],
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.enabled_tools_by_source
|
||||
.entry(source)
|
||||
.or_default()
|
||||
.extend(tools_to_enable.into_iter().cloned());
|
||||
cx.emit(ToolWorkingSetEvent::EnabledToolsChanged);
|
||||
}
|
||||
|
||||
pub fn disable(
|
||||
&mut self,
|
||||
source: ToolSource,
|
||||
tools_to_disable: &[Arc<str>],
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.enabled_tools_by_source
|
||||
.entry(source)
|
||||
.or_default()
|
||||
.retain(|name| !tools_to_disable.contains(name));
|
||||
cx.emit(ToolWorkingSetEvent::EnabledToolsChanged);
|
||||
}
|
||||
|
||||
pub fn remove(&mut self, tool_ids_to_remove: &[ToolId]) {
|
||||
self.context_server_tools_by_id
|
||||
.retain(|id, _| !tool_ids_to_remove.contains(id));
|
||||
self.tools_changed();
|
||||
}
|
||||
|
||||
fn tools_changed(&mut self) {
|
||||
self.context_server_tools_by_name.clear();
|
||||
self.context_server_tools_by_name.extend(
|
||||
self.context_server_tools_by_id
|
||||
.values()
|
||||
.map(|tool| (tool.name(), tool.clone())),
|
||||
);
|
||||
fn disable_all_tools(&mut self) {
|
||||
self.enabled_tools_by_source.clear();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ http_client.workspace = true
|
||||
itertools.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
lsp.workspace = true
|
||||
project.workspace = true
|
||||
regex.workspace = true
|
||||
schemars.workspace = true
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
mod bash_tool;
|
||||
mod batch_tool;
|
||||
mod code_action_tool;
|
||||
mod code_symbol_iter;
|
||||
mod code_symbols_tool;
|
||||
mod contents_tool;
|
||||
mod copy_path_tool;
|
||||
mod create_directory_tool;
|
||||
mod create_file_tool;
|
||||
@@ -16,11 +16,9 @@ mod open_tool;
|
||||
mod path_search_tool;
|
||||
mod read_file_tool;
|
||||
mod regex_search_tool;
|
||||
mod rename_tool;
|
||||
mod replace;
|
||||
mod schema;
|
||||
mod symbol_info_tool;
|
||||
mod terminal_tool;
|
||||
mod thinking_tool;
|
||||
|
||||
use std::sync::Arc;
|
||||
@@ -31,10 +29,9 @@ use gpui::App;
|
||||
use http_client::HttpClientWithUrl;
|
||||
use move_path_tool::MovePathTool;
|
||||
|
||||
use crate::bash_tool::BashTool;
|
||||
use crate::batch_tool::BatchTool;
|
||||
use crate::code_action_tool::CodeActionTool;
|
||||
use crate::code_symbols_tool::CodeSymbolsTool;
|
||||
use crate::contents_tool::ContentsTool;
|
||||
use crate::create_directory_tool::CreateDirectoryTool;
|
||||
use crate::create_file_tool::CreateFileTool;
|
||||
use crate::delete_path_tool::DeletePathTool;
|
||||
@@ -47,16 +44,14 @@ use crate::open_tool::OpenTool;
|
||||
use crate::path_search_tool::PathSearchTool;
|
||||
use crate::read_file_tool::ReadFileTool;
|
||||
use crate::regex_search_tool::RegexSearchTool;
|
||||
use crate::rename_tool::RenameTool;
|
||||
use crate::symbol_info_tool::SymbolInfoTool;
|
||||
use crate::terminal_tool::TerminalTool;
|
||||
use crate::thinking_tool::ThinkingTool;
|
||||
|
||||
pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {
|
||||
assistant_tool::init(cx);
|
||||
|
||||
let registry = ToolRegistry::global(cx);
|
||||
registry.register_tool(TerminalTool);
|
||||
registry.register_tool(BashTool);
|
||||
registry.register_tool(BatchTool);
|
||||
registry.register_tool(CreateDirectoryTool);
|
||||
registry.register_tool(CreateFileTool);
|
||||
@@ -64,57 +59,15 @@ pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {
|
||||
registry.register_tool(DeletePathTool);
|
||||
registry.register_tool(FindReplaceFileTool);
|
||||
registry.register_tool(SymbolInfoTool);
|
||||
registry.register_tool(CodeActionTool);
|
||||
registry.register_tool(MovePathTool);
|
||||
registry.register_tool(DiagnosticsTool);
|
||||
registry.register_tool(ListDirectoryTool);
|
||||
registry.register_tool(NowTool);
|
||||
registry.register_tool(OpenTool);
|
||||
registry.register_tool(CodeSymbolsTool);
|
||||
registry.register_tool(ContentsTool);
|
||||
registry.register_tool(PathSearchTool);
|
||||
registry.register_tool(ReadFileTool);
|
||||
registry.register_tool(RegexSearchTool);
|
||||
registry.register_tool(RenameTool);
|
||||
registry.register_tool(ThinkingTool);
|
||||
registry.register_tool(FetchTool::new(http_client));
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use http_client::FakeHttpClient;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[gpui::test]
|
||||
fn test_builtin_tool_schema_compatibility(cx: &mut App) {
|
||||
crate::init(
|
||||
Arc::new(http_client::HttpClientWithUrl::new(
|
||||
FakeHttpClient::with_200_response(),
|
||||
"https://zed.dev",
|
||||
None,
|
||||
)),
|
||||
cx,
|
||||
);
|
||||
|
||||
for tool in ToolRegistry::global(cx).tools() {
|
||||
let actual_schema = tool
|
||||
.input_schema(language_model::LanguageModelToolSchemaFormat::JsonSchemaSubset)
|
||||
.unwrap();
|
||||
let mut expected_schema = actual_schema.clone();
|
||||
assistant_tool::adapt_schema_to_format(
|
||||
&mut expected_schema,
|
||||
language_model::LanguageModelToolSchemaFormat::JsonSchemaSubset,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let error_message = format!(
|
||||
"Tool schema for `{}` is not compatible with `language_model::LanguageModelToolSchemaFormat::JsonSchemaSubset` (Gemini Models).\n\
|
||||
Are you using `schema::json_schema_for<T>(format)` to generate the schema?",
|
||||
tool.name(),
|
||||
);
|
||||
|
||||
assert_eq!(actual_schema, expected_schema, "{}", error_message)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||