Compare commits
39 Commits
fix-linux-
...
git/panel-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
637296ce6e | ||
|
|
90710b91c5 | ||
|
|
53f1fd5a27 | ||
|
|
e98de8c7ca | ||
|
|
fbd98c1fdc | ||
|
|
50b491b360 | ||
|
|
9dab0e5f9b | ||
|
|
b0c8952b0c | ||
|
|
47bdf9f026 | ||
|
|
02cc7765f9 | ||
|
|
07ffb17991 | ||
|
|
b8c644a12e | ||
|
|
f7dac91680 | ||
|
|
5a8a80a956 | ||
|
|
0c64f5185f | ||
|
|
56bf1e09f7 | ||
|
|
1ce5974917 | ||
|
|
db7d11e879 | ||
|
|
c70617c69e | ||
|
|
17afd49adb | ||
|
|
6984c9b459 | ||
|
|
be8038eb81 | ||
|
|
577467bed8 | ||
|
|
18ddbc044f | ||
|
|
5e1da57f25 | ||
|
|
4bee58f70e | ||
|
|
7eda0cd996 | ||
|
|
c21c47e8ef | ||
|
|
92142fc2b2 | ||
|
|
ed0121dc3a | ||
|
|
3aeeed0b30 | ||
|
|
7e6cdabb26 | ||
|
|
f91f3f24ef | ||
|
|
4ebc20b30c | ||
|
|
ac6aa735e4 | ||
|
|
d2cb9a13b8 | ||
|
|
5956489a47 | ||
|
|
418d850375 | ||
|
|
7ad3cf4387 |
41
.github/ISSUE_TEMPLATE/01_bug_ai.yml
vendored
Normal file
@@ -0,0 +1,41 @@
|
||||
name: Bug Report (AI)
|
||||
description: Zed Agent Panel Bugs
|
||||
type: "Bug"
|
||||
labels: ["ai"]
|
||||
title: "AI: <a short description of the AI Related bug>"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
<!-- Describe with sufficient detail to reproduce from a clean Zed install. -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
**Expected Behavior**:
|
||||
**Actual Behavior**:
|
||||
|
||||
### Model Provider Details
|
||||
- Provider: (Anthropic via ZedPro, Anthropic via API key, Copilot Chat, Mistral, OpenAI, etc)
|
||||
- Model Name:
|
||||
- Mode: (Agent Panel, Inline Assistant, Terminal Assistant or Text Threads)
|
||||
- Other Details (MCPs, other settings, etc):
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
35
.github/ISSUE_TEMPLATE/04_bug_debugger.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
name: Bug Report (Debugger)
|
||||
description: Zed Debugger-Related Bugs
|
||||
type: "Bug"
|
||||
labels: ["debugger"]
|
||||
title: "Debugger: <a short description of the Debugger bug>"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
<!-- Describe with sufficient detail to reproduce from a clean Zed install. -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
**Expected Behavior**:
|
||||
**Actual Behavior**:
|
||||
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
35
.github/ISSUE_TEMPLATE/06_bug_git.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
name: Bug Report (Git)
|
||||
description: Zed Git Related Bugs
|
||||
type: "Bug"
|
||||
labels: ["git"]
|
||||
title: "Git: <a short description of the Git bug>"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one-line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one-line summary of the issue below -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
<!-- Describe with sufficient detail to reproduce from a clean Zed install. -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
**Expected Behavior**:
|
||||
**Actual Behavior**:
|
||||
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
35
.github/ISSUE_TEMPLATE/07_bug_windows.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
name: Bug Report (Windows)
|
||||
description: Zed Windows Related Bugs
|
||||
type: "Bug"
|
||||
labels: ["windows"]
|
||||
title: "Windows: <a short description of the Windows bug>"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one-line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one-line summary of the issue below -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
<!-- Describe with sufficient detail to reproduce from a clean Zed install. -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
**Expected Behavior**:
|
||||
**Actual Behavior**:
|
||||
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
119
.github/ISSUE_TEMPLATE/10_bug_report.yml
vendored
@@ -1,99 +1,58 @@
|
||||
name: Report a bug
|
||||
description: Report a problem with Zed.
|
||||
type: Bug
|
||||
labels: "state:needs triage"
|
||||
name: Bug Report (Other)
|
||||
description: |
|
||||
Something else is broken in Zed (exclude crashing).
|
||||
type: "Bug"
|
||||
body:
|
||||
- type: markdown
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Provide a one sentence summary and detailed reproduction steps
|
||||
value: |
|
||||
Is this bug already reported? Upvote to get it noticed faster. [Here's the search](https://github.com/zed-industries/zed/issues). Upvote means giving it a :+1: reaction.
|
||||
<!-- Begin your issue with a one sentence summary -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
Feature request? Please open in [discussions](https://github.com/zed-industries/zed/discussions/new/choose) instead.
|
||||
### Description
|
||||
<!-- Describe with sufficient detail to reproduce from a clean Zed install.
|
||||
- Any code must be sufficient to reproduce (include context!)
|
||||
- Include code as text, not just as a screenshot.
|
||||
- Issues with insufficient detail may be summarily closed.
|
||||
-->
|
||||
|
||||
Just have a question or need support? Welcome to [Discord Support Forums](https://discord.com/invite/zedindustries).
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Reproduction steps
|
||||
description: A step-by-step description of how to reproduce the bug from a **clean Zed install**. The more context you provide, the easier it is to find and fix the problem fast.
|
||||
placeholder: |
|
||||
1. Start Zed
|
||||
2. Click X
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Current vs. Expected behavior
|
||||
description: |
|
||||
Current behavior (screenshots, videos, etc. are appreciated), vs. what you expected the behavior to be.
|
||||
DESCRIPTION_HERE
|
||||
|
||||
placeholder: |
|
||||
Current behavior: <screenshot with an arrow> The icon is blue. Expected behavior: The icon should be red because this is what the setting is documented to do.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed version and system specs
|
||||
description: |
|
||||
Open the command palette in Zed, then type “zed: copy system specs into clipboard”.
|
||||
placeholder: |
|
||||
Zed: v0.215.0 (Zed Nightly bfe141ea79aa4984028934067ba75c48d99136ae)
|
||||
OS: macOS 15.1
|
||||
Memory: 36 GiB
|
||||
Architecture: aarch64
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Attach Zed log file
|
||||
description: |
|
||||
Open the command palette in Zed, then type `zed: open log` to see the last 1000 lines. Or type `zed: reveal log in file manager` in the command palette to reveal the log file itself.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
Steps to reproduce:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
4.
|
||||
|
||||
<!-- Paste your log inside the code block. -->
|
||||
```log
|
||||
**Expected Behavior**:
|
||||
**Actual Behavior**:
|
||||
|
||||
```
|
||||
<!-- Before Submitting, did you:
|
||||
1. Include settings.json, keymap.json, .editorconfig if relevant?
|
||||
2. Check your Zed.log for relevant errors? (please include!)
|
||||
3. Click Preview to ensure everything looks right?
|
||||
4. Hide videos, large images and logs in ``` inside collapsible blocks:
|
||||
|
||||
</details>
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Relevant Zed settings
|
||||
description: |
|
||||
Open the command palette in Zed, then type “zed: open settings file” and copy/paste any relevant (e.g., LSP-specific) settings.
|
||||
value: |
|
||||
<details><summary>settings.json</summary>
|
||||
<details><summary>click to expand</summary>
|
||||
|
||||
<!-- Paste your settings inside the code block. -->
|
||||
```json
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
-->
|
||||
|
||||
validations:
|
||||
required: false
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: (for AI issues) Model provider details
|
||||
label: Zed Version and System Specs
|
||||
description: |
|
||||
Open Zed, from the command palette select "zed: copy system specs into clipboard"
|
||||
placeholder: |
|
||||
- Provider: (Anthropic via ZedPro, Anthropic via API key, Copilot Chat, Mistral, OpenAI, etc.)
|
||||
- Model Name: (Claude Sonnet 4.5, Gemini 3 Pro, GPT-5)
|
||||
- Mode: (Agent Panel, Inline Assistant, Terminal Assistant or Text Threads)
|
||||
- Other details (ACPs, MCPs, other settings, etc.):
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: false
|
||||
- type: dropdown
|
||||
attributes:
|
||||
label: If you are using WSL on Windows, what flavor of Linux are you using?
|
||||
multiple: false
|
||||
options:
|
||||
- Arch Linux
|
||||
- Ubuntu
|
||||
- Fedora
|
||||
- Mint
|
||||
- Pop!_OS
|
||||
- NixOS
|
||||
- Other
|
||||
required: true
|
||||
|
||||
53
.github/ISSUE_TEMPLATE/11_crash_report.yml
vendored
@@ -1,35 +1,42 @@
|
||||
name: Report a crash
|
||||
description: Zed is crashing or freezing or hanging.
|
||||
type: Crash
|
||||
labels: "state:needs triage"
|
||||
name: Crash Report
|
||||
description: Zed is Crashing or Hanging
|
||||
type: "Crash"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Reproduction steps
|
||||
description: A step-by-step description of how to reproduce the crash from a **clean Zed install**. The more context you provide, the easier it is to find and fix the problem fast.
|
||||
label: Summary
|
||||
description: Summarize the issue with detailed reproduction steps
|
||||
value: |
|
||||
<!-- Begin your issue with a one sentence summary -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
<!-- Include all steps necessary to reproduce from a clean Zed installation. Be verbose -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
Actual Behavior:
|
||||
Expected Behavior:
|
||||
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
placeholder: |
|
||||
1. Start Zed
|
||||
2. Perform an action
|
||||
3. Zed crashes
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Zed version and system specs
|
||||
label: If applicable, attach your `Zed.log` file to this issue.
|
||||
description: |
|
||||
Open the command palette in Zed, then type “zed: copy system specs into clipboard”.
|
||||
placeholder: |
|
||||
Zed: v0.215.0 (Zed Nightly bfe141ea79aa4984028934067ba75c48d99136ae)
|
||||
OS: macOS 15.1
|
||||
Memory: 36 GiB
|
||||
Architecture: aarch64
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Attach Zed log file
|
||||
description: |
|
||||
Open the command palette in Zed, then type `zed: open log` to see the last 1000 lines. Or type `zed: reveal log in file manager` in the command palette to reveal the log file itself.
|
||||
From the command palette, run `zed: open log` to see the last 1000 lines.
|
||||
Or run `zed: reveal log in file manager` to reveal the log file itself.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
|
||||
19
.github/ISSUE_TEMPLATE/99_other.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
name: Other [Staff Only]
|
||||
description: Zed Staff Only
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
value: |
|
||||
<!-- Please insert a one line summary of the issue below -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
|
||||
IF YOU DO NOT WORK FOR ZED INDUSTRIES DO NOT CREATE ISSUES WITH THIS TEMPLATE.
|
||||
THEY WILL BE AUTO-CLOSED AND MAY RESULT IN YOU BEING BANNED FROM THE ZED ISSUE TRACKER.
|
||||
|
||||
FEATURE REQUESTS / SUPPORT REQUESTS SHOULD BE OPENED AS DISCUSSIONS:
|
||||
https://github.com/zed-industries/zed/discussions/new/choose
|
||||
validations:
|
||||
required: true
|
||||
12
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,9 +1,9 @@
|
||||
# yaml-language-server: $schema=https://www.schemastore.org/github-issue-config.json
|
||||
# yaml-language-server: $schema=https://json.schemastore.org/github-issue-config.json
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Feature request
|
||||
- name: Feature Request
|
||||
url: https://github.com/zed-industries/zed/discussions/new/choose
|
||||
about: To request a feature, open a new discussion under one of the appropriate categories.
|
||||
- name: Our Discord community
|
||||
url: https://discord.com/invite/zedindustries
|
||||
about: Join our Discord server for real-time discussion and user support.
|
||||
about: To request a feature, open a new Discussion in one of the appropriate Discussion categories
|
||||
- name: "Zed Discord"
|
||||
url: https://zed.dev/community-links
|
||||
about: Real-time discussion and user support
|
||||
|
||||
6
.github/actions/run_tests/action.yml
vendored
@@ -4,8 +4,10 @@ description: "Runs the tests"
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Install nextest
|
||||
uses: taiki-e/install-action@nextest
|
||||
- name: Install Rust
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
cargo install cargo-nextest --locked
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
|
||||
3
.github/actions/run_tests_windows/action.yml
vendored
@@ -11,8 +11,9 @@ runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Install test runner
|
||||
shell: powershell
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
uses: taiki-e/install-action@nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
|
||||
50
.github/workflows/after_release.yml
vendored
@@ -6,22 +6,8 @@ on:
|
||||
types:
|
||||
- published
|
||||
jobs:
|
||||
rebuild_releases_page:
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: after_release::rebuild_releases_page::refresh_cloud_releases
|
||||
run: curl -fX POST https://cloud.zed.dev/releases/refresh?expect_tag=${{ github.event.release.tag_name }}
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: after_release::rebuild_releases_page::redeploy_zed_dev
|
||||
run: npm exec --yes -- vercel@37 --token="$VERCEL_TOKEN" --scope zed-industries redeploy https://zed.dev
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
VERCEL_TOKEN: ${{ secrets.VERCEL_TOKEN }}
|
||||
post_to_discord:
|
||||
needs:
|
||||
- rebuild_releases_page
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- id: get-release-url
|
||||
@@ -51,19 +37,19 @@ jobs:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK_RELEASE_NOTES }}
|
||||
content: ${{ steps.get-content.outputs.string }}
|
||||
publish_winget:
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- id: set-package-name
|
||||
name: after_release::publish_winget::set_package_name
|
||||
run: |
|
||||
if ("${{ github.event.release.prerelease }}" -eq "true") {
|
||||
$PACKAGE_NAME = "ZedIndustries.Zed.Preview"
|
||||
} else {
|
||||
$PACKAGE_NAME = "ZedIndustries.Zed"
|
||||
}
|
||||
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
|
||||
PACKAGE_NAME=ZedIndustries.Zed.Preview
|
||||
else
|
||||
PACKAGE_NAME=ZedIndustries.Zed
|
||||
fi
|
||||
|
||||
echo "PACKAGE_NAME=$PACKAGE_NAME" >> $env:GITHUB_OUTPUT
|
||||
shell: pwsh
|
||||
echo "PACKAGE_NAME=$PACKAGE_NAME" >> "$GITHUB_OUTPUT"
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: after_release::publish_winget::winget_releaser
|
||||
uses: vedantmgoyal9/winget-releaser@19e706d4c9121098010096f9c495a70a7518b30f
|
||||
with:
|
||||
@@ -71,7 +57,7 @@ jobs:
|
||||
max-versions-to-keep: 5
|
||||
token: ${{ secrets.WINGET_TOKEN }}
|
||||
create_sentry_release:
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -86,19 +72,3 @@ jobs:
|
||||
SENTRY_ORG: zed-dev
|
||||
SENTRY_PROJECT: zed
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
notify_on_failure:
|
||||
needs:
|
||||
- rebuild_releases_page
|
||||
- post_to_discord
|
||||
- publish_winget
|
||||
- create_sentry_release
|
||||
if: failure()
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: release::notify_on_failure::notify_slack
|
||||
run: |-
|
||||
curl -X POST -H 'Content-type: application/json'\
|
||||
--data '{"text":"${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK"
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
|
||||
|
||||
2
.github/workflows/bump_patch_version.yml
vendored
@@ -42,7 +42,7 @@ jobs:
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
which cargo-set-version > /dev/null || cargo install cargo-edit -f --no-default-features --features "set-version"
|
||||
which cargo-set-version > /dev/null || cargo install cargo-edit
|
||||
output="$(cargo set-version -p zed --bump patch 2>&1 | sed 's/.* //')"
|
||||
export GIT_COMMITTER_NAME="Zed Bot"
|
||||
export GIT_COMMITTER_EMAIL="hi@zed.dev"
|
||||
|
||||
5
.github/workflows/cherry_pick.yml
vendored
@@ -1,7 +1,6 @@
|
||||
# Generated from xtask::workflows::cherry_pick
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: cherry_pick
|
||||
run-name: 'cherry_pick to ${{ inputs.channel }} #${{ inputs.pr_number }}'
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
@@ -17,10 +16,6 @@ on:
|
||||
description: channel
|
||||
required: true
|
||||
type: string
|
||||
pr_number:
|
||||
description: pr_number
|
||||
required: true
|
||||
type: string
|
||||
jobs:
|
||||
run_cherry_pick:
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
|
||||
@@ -13,72 +13,13 @@ jobs:
|
||||
steps:
|
||||
- name: Check if author is a community champion and apply label
|
||||
uses: actions/github-script@v7
|
||||
env:
|
||||
COMMUNITY_CHAMPIONS: |
|
||||
0x2CA
|
||||
5brian
|
||||
5herlocked
|
||||
abdelq
|
||||
afgomez
|
||||
AidanV
|
||||
akbxr
|
||||
AlvaroParker
|
||||
amtoaer
|
||||
artemevsevev
|
||||
bajrangCoder
|
||||
bcomnes
|
||||
Be-ing
|
||||
blopker
|
||||
bnjjj
|
||||
bobbymannino
|
||||
CharlesChen0823
|
||||
chbk
|
||||
cppcoffee
|
||||
davewa
|
||||
ddoemonn
|
||||
djsauble
|
||||
errmayank
|
||||
fantacell
|
||||
findrakecil
|
||||
FloppyDisco
|
||||
gko
|
||||
huacnlee
|
||||
imumesh18
|
||||
jacobtread
|
||||
jansol
|
||||
jeffreyguenther
|
||||
jenslys
|
||||
jongretar
|
||||
lemorage
|
||||
lnay
|
||||
marcocondrache
|
||||
marius851000
|
||||
mikebronner
|
||||
ognevny
|
||||
playdohface
|
||||
RemcoSmitsDev
|
||||
romaninsh
|
||||
Simek
|
||||
someone13574
|
||||
sourcefrog
|
||||
suxiaoshao
|
||||
Takk8IS
|
||||
thedadams
|
||||
tidely
|
||||
timvermeulen
|
||||
valentinegb
|
||||
versecafe
|
||||
vitallium
|
||||
warrenjokinen
|
||||
WhySoBad
|
||||
ya7010
|
||||
Zertsov
|
||||
with:
|
||||
script: |
|
||||
const communityChampions = process.env.COMMUNITY_CHAMPIONS
|
||||
const communityChampionBody = `${{ secrets.COMMUNITY_CHAMPIONS }}`;
|
||||
|
||||
const communityChampions = communityChampionBody
|
||||
.split('\n')
|
||||
.map(handle => handle.trim().toLowerCase())
|
||||
.filter(handle => handle.length > 0);
|
||||
.map(handle => handle.trim().toLowerCase());
|
||||
|
||||
let author;
|
||||
if (context.eventName === 'issues') {
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
name: "Close Stale Issues"
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 8 31 DEC *"
|
||||
- cron: "0 7,9,11 * * 3"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
@@ -15,15 +15,14 @@ jobs:
|
||||
stale-issue-message: >
|
||||
Hi there! 👋
|
||||
|
||||
We're working to clean up our issue tracker by closing older bugs that might not be relevant anymore. If you are able to reproduce this issue in the latest version of Zed, please let us know by commenting on this issue, and it will be kept open. If you can't reproduce it, feel free to close the issue yourself. Otherwise, it will close automatically in 14 days.
|
||||
We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. If you are able to reproduce this issue in the latest version of Zed, please let us know by commenting on this issue, and we will keep it open. If you can't reproduce it, feel free to close the issue yourself. Otherwise, we'll close it in 7 days.
|
||||
|
||||
Thanks for your help!
|
||||
close-issue-message: "This issue was closed due to inactivity. If you're still experiencing this problem, please open a new issue with a link to this issue."
|
||||
days-before-stale: 60
|
||||
days-before-close: 14
|
||||
only-issue-types: "Bug,Crash"
|
||||
days-before-stale: 120
|
||||
days-before-close: 7
|
||||
any-of-issue-labels: "bug,panic / crash"
|
||||
operations-per-run: 1000
|
||||
ascending: true
|
||||
enable-statistics: true
|
||||
stale-issue-label: "stale"
|
||||
exempt-issue-labels: "never stale"
|
||||
|
||||
6
.github/workflows/compare_perf.yml
vendored
@@ -35,11 +35,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: compare_perf::run_perf::install_hyperfine
|
||||
uses: taiki-e/install-action@hyperfine
|
||||
run: cargo install hyperfine
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::git_checkout
|
||||
run: git fetch origin ${{ inputs.base }} && git checkout ${{ inputs.base }}
|
||||
shell: bash -euxo pipefail {0}
|
||||
|
||||
2
.github/workflows/danger.yml
vendored
@@ -12,7 +12,7 @@ on:
|
||||
- main
|
||||
jobs:
|
||||
danger:
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
|
||||
4
.github/workflows/deploy_collab.yml
vendored
@@ -43,7 +43,9 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install cargo nextest
|
||||
uses: taiki-e/install-action@nextest
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
cargo install cargo-nextest --locked
|
||||
|
||||
- name: Limit target directory size
|
||||
shell: bash -euxo pipefail {0}
|
||||
|
||||
147
.github/workflows/extension_bump.yml
vendored
@@ -1,147 +0,0 @@
|
||||
# Generated from xtask::workflows::extension_bump
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: extension_bump
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUST_BACKTRACE: '1'
|
||||
CARGO_INCREMENTAL: '0'
|
||||
ZED_EXTENSION_CLI_SHA: 7cfce605704d41ca247e3f84804bf323f6c6caaf
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
bump-type:
|
||||
description: bump-type
|
||||
type: string
|
||||
default: patch
|
||||
force-bump:
|
||||
description: force-bump
|
||||
required: true
|
||||
type: boolean
|
||||
secrets:
|
||||
app-id:
|
||||
description: The app ID used to create the PR
|
||||
required: true
|
||||
app-secret:
|
||||
description: The app secret for the corresponding app ID
|
||||
required: true
|
||||
jobs:
|
||||
check_bump_needed:
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: 0
|
||||
- id: compare-versions-check
|
||||
name: extension_bump::compare_versions
|
||||
run: |
|
||||
CURRENT_VERSION="$(sed -n 's/version = \"\(.*\)\"/\1/p' < extension.toml)"
|
||||
PR_PARENT_SHA="${{ github.event.pull_request.head.sha }}"
|
||||
|
||||
if [[ -n "$PR_PARENT_SHA" ]]; then
|
||||
git checkout "$PR_PARENT_SHA"
|
||||
elif BRANCH_PARENT_SHA="$(git merge-base origin/main origin/zed-zippy-autobump)"; then
|
||||
git checkout "$BRANCH_PARENT_SHA"
|
||||
else
|
||||
git checkout "$(git log -1 --format=%H)"~1
|
||||
fi
|
||||
|
||||
PARENT_COMMIT_VERSION="$(sed -n 's/version = \"\(.*\)\"/\1/p' < extension.toml)"
|
||||
|
||||
[[ "$CURRENT_VERSION" == "$PARENT_COMMIT_VERSION" ]] && \
|
||||
echo "needs_bump=true" >> "$GITHUB_OUTPUT" || \
|
||||
echo "needs_bump=false" >> "$GITHUB_OUTPUT"
|
||||
|
||||
echo "current_version=${CURRENT_VERSION}" >> "$GITHUB_OUTPUT"
|
||||
shell: bash -euxo pipefail {0}
|
||||
outputs:
|
||||
needs_bump: ${{ steps.compare-versions-check.outputs.needs_bump }}
|
||||
current_version: ${{ steps.compare-versions-check.outputs.current_version }}
|
||||
timeout-minutes: 1
|
||||
bump_extension_version:
|
||||
needs:
|
||||
- check_bump_needed
|
||||
if: |-
|
||||
(github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') &&
|
||||
(inputs.force-bump == 'true' || needs.check_bump_needed.outputs.needs_bump == 'true')
|
||||
runs-on: namespace-profile-8x16-ubuntu-2204
|
||||
steps:
|
||||
- id: generate-token
|
||||
name: extension_bump::generate_token
|
||||
uses: actions/create-github-app-token@v2
|
||||
with:
|
||||
app-id: ${{ secrets.app-id }}
|
||||
private-key: ${{ secrets.app-secret }}
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: extension_bump::install_bump_2_version
|
||||
run: pip install bump2version
|
||||
shell: bash -euxo pipefail {0}
|
||||
- id: bump-version
|
||||
name: extension_bump::bump_version
|
||||
run: |
|
||||
OLD_VERSION="${{ needs.check_bump_needed.outputs.current_version }}"
|
||||
|
||||
BUMP_FILES=("extension.toml")
|
||||
if [[ -f "Cargo.toml" ]]; then
|
||||
BUMP_FILES+=("Cargo.toml")
|
||||
fi
|
||||
|
||||
bump2version --verbose --current-version "$OLD_VERSION" --no-configured-files ${{ inputs.bump-type }} "${BUMP_FILES[@]}"
|
||||
|
||||
if [[ -f "Cargo.toml" ]]; then
|
||||
cargo update --workspace
|
||||
fi
|
||||
|
||||
NEW_VERSION="$(sed -n 's/version = \"\(.*\)\"/\1/p' < extension.toml)"
|
||||
|
||||
echo "new_version=${NEW_VERSION}" >> "$GITHUB_OUTPUT"
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: extension_bump::create_pull_request
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
title: Bump version to ${{ steps.bump-version.outputs.new_version }}
|
||||
body: This PR bumps the version of this extension to v${{ steps.bump-version.outputs.new_version }}
|
||||
commit-message: Bump version to v${{ steps.bump-version.outputs.new_version }}
|
||||
branch: zed-zippy-autobump
|
||||
committer: zed-zippy[bot] <234243425+zed-zippy[bot]@users.noreply.github.com>
|
||||
base: main
|
||||
delete-branch: true
|
||||
token: ${{ steps.generate-token.outputs.token }}
|
||||
sign-commits: true
|
||||
timeout-minutes: 1
|
||||
create_version_label:
|
||||
needs:
|
||||
- check_bump_needed
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') && github.event_name == 'push' && github.ref == 'refs/heads/main' && needs.check_bump_needed.outputs.needs_bump == 'false'
|
||||
runs-on: namespace-profile-8x16-ubuntu-2204
|
||||
steps:
|
||||
- id: generate-token
|
||||
name: extension_bump::generate_token
|
||||
uses: actions/create-github-app-token@v2
|
||||
with:
|
||||
app-id: ${{ secrets.app-id }}
|
||||
private-key: ${{ secrets.app-secret }}
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: extension_bump::create_version_tag
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |-
|
||||
github.rest.git.createRef({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
ref: 'refs/tags/v${{ needs.check_bump_needed.outputs.current_version }}',
|
||||
sha: context.sha
|
||||
})
|
||||
github-token: ${{ steps.generate-token.outputs.token }}
|
||||
timeout-minutes: 1
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
43
.github/workflows/extension_release.yml
vendored
@@ -1,43 +0,0 @@
|
||||
# Generated from xtask::workflows::extension_release
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: extension_release
|
||||
on:
|
||||
workflow_call:
|
||||
secrets:
|
||||
app-id:
|
||||
description: The app ID used to create the PR
|
||||
required: true
|
||||
app-secret:
|
||||
description: The app secret for the corresponding app ID
|
||||
required: true
|
||||
jobs:
|
||||
create_release:
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: namespace-profile-8x16-ubuntu-2204
|
||||
steps:
|
||||
- id: generate-token
|
||||
name: extension_bump::generate_token
|
||||
uses: actions/create-github-app-token@v2
|
||||
with:
|
||||
app-id: ${{ secrets.app-id }}
|
||||
private-key: ${{ secrets.app-secret }}
|
||||
owner: zed-industries
|
||||
repositories: extensions
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- id: get-extension-id
|
||||
name: extension_release::get_extension_id
|
||||
run: |
|
||||
EXTENSION_ID="$(sed -n 's/id = \"\(.*\)\"/\1/p' < extension.toml)"
|
||||
|
||||
echo "extension_id=${EXTENSION_ID}" >> "$GITHUB_OUTPUT"
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: extension_release::release_action
|
||||
uses: huacnlee/zed-extension-action@v2
|
||||
with:
|
||||
extension-name: ${{ steps.get-extension-id.outputs.extension_id }}
|
||||
push-to: zed-industries/extensions
|
||||
env:
|
||||
COMMITTER_TOKEN: ${{ steps.generate-token.outputs.token }}
|
||||
133
.github/workflows/extension_tests.yml
vendored
@@ -1,133 +0,0 @@
|
||||
# Generated from xtask::workflows::extension_tests
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: extension_tests
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUST_BACKTRACE: '1'
|
||||
CARGO_INCREMENTAL: '0'
|
||||
ZED_EXTENSION_CLI_SHA: 7cfce605704d41ca247e3f84804bf323f6c6caaf
|
||||
on:
|
||||
workflow_call: {}
|
||||
jobs:
|
||||
orchestrate:
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: ${{ github.ref == 'refs/heads/main' && 2 || 350 }}
|
||||
- id: filter
|
||||
name: filter
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ]; then
|
||||
echo "Not in a PR context (i.e., push to main/stable/preview)"
|
||||
COMPARE_REV="$(git rev-parse HEAD~1)"
|
||||
else
|
||||
echo "In a PR context comparing to pull_request.base.ref"
|
||||
git fetch origin "$GITHUB_BASE_REF" --depth=350
|
||||
COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
|
||||
fi
|
||||
CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
|
||||
|
||||
check_pattern() {
|
||||
local output_name="$1"
|
||||
local pattern="$2"
|
||||
local grep_arg="$3"
|
||||
|
||||
echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
|
||||
echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
|
||||
echo "${output_name}=false" >> "$GITHUB_OUTPUT"
|
||||
}
|
||||
|
||||
check_pattern "check_rust" '^(Cargo.lock|Cargo.toml|.*\.rs)$' -qP
|
||||
check_pattern "check_extension" '^.*\.scm$' -qP
|
||||
shell: bash -euxo pipefail {0}
|
||||
outputs:
|
||||
check_rust: ${{ steps.filter.outputs.check_rust }}
|
||||
check_extension: ${{ steps.filter.outputs.check_extension }}
|
||||
check_rust:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.check_rust == 'true'
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::cargo_fmt
|
||||
run: cargo fmt --all -- --check
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: extension_tests::run_clippy
|
||||
run: cargo clippy --release --all-targets --all-features -- --deny warnings
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
uses: taiki-e/install-action@nextest
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
NEXTEST_NO_TESTS: warn
|
||||
timeout-minutes: 3
|
||||
check_extension:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.check_extension == 'true'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- id: cache-zed-extension-cli
|
||||
name: extension_tests::cache_zed_extension_cli
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830
|
||||
with:
|
||||
path: zed-extension
|
||||
key: zed-extension-${{ env.ZED_EXTENSION_CLI_SHA }}
|
||||
- name: extension_tests::download_zed_extension_cli
|
||||
if: steps.cache-zed-extension-cli.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
wget --quiet "https://zed-extension-cli.nyc3.digitaloceanspaces.com/$ZED_EXTENSION_CLI_SHA/x86_64-unknown-linux-gnu/zed-extension"
|
||||
chmod +x zed-extension
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: extension_tests::check
|
||||
run: |
|
||||
mkdir -p /tmp/ext-scratch
|
||||
mkdir -p /tmp/ext-output
|
||||
./zed-extension --source-dir . --scratch-dir /tmp/ext-scratch --output-dir /tmp/ext-output
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 2
|
||||
tests_pass:
|
||||
needs:
|
||||
- orchestrate
|
||||
- check_rust
|
||||
- check_extension
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') && always()
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: run_tests::tests_pass
|
||||
run: |
|
||||
set +x
|
||||
EXIT_CODE=0
|
||||
|
||||
check_result() {
|
||||
echo "* $1: $2"
|
||||
if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
|
||||
}
|
||||
|
||||
check_result "orchestrate" "${{ needs.orchestrate.result }}"
|
||||
check_result "check_rust" "${{ needs.check_rust.result }}"
|
||||
check_result "check_extension" "${{ needs.check_extension.result }}"
|
||||
|
||||
exit $EXIT_CODE
|
||||
shell: bash -euxo pipefail {0}
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
56
.github/workflows/release.yml
vendored
@@ -10,7 +10,7 @@ on:
|
||||
- v*
|
||||
jobs:
|
||||
run_tests_mac:
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: self-mini-macos
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -29,11 +29,14 @@ jobs:
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 300
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
@@ -42,7 +45,7 @@ jobs:
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
run_tests_linux:
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -54,19 +57,16 @@ jobs:
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
@@ -75,12 +75,13 @@ jobs:
|
||||
run: ./script/clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
uses: taiki-e/install-action@nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 250
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
@@ -89,7 +90,7 @@ jobs:
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
run_tests_windows:
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -108,11 +109,14 @@ jobs:
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy.ps1
|
||||
shell: pwsh
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: pwsh
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than.ps1 250
|
||||
shell: pwsh
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: pwsh
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
@@ -121,7 +125,7 @@ jobs:
|
||||
shell: pwsh
|
||||
timeout-minutes: 60
|
||||
check_scripts:
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -150,7 +154,7 @@ jobs:
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
create_draft_release:
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -198,9 +202,6 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -241,9 +242,6 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -477,20 +475,6 @@ jobs:
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
notify_on_failure:
|
||||
needs:
|
||||
- upload_release_assets
|
||||
- auto_release_preview
|
||||
if: failure()
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: release::notify_on_failure::notify_slack
|
||||
run: |-
|
||||
curl -X POST -H 'Content-type: application/json'\
|
||||
--data '{"text":"${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK"
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
39
.github/workflows/release_nightly.yml
vendored
@@ -12,7 +12,7 @@ on:
|
||||
- cron: 0 7 * * *
|
||||
jobs:
|
||||
check_style:
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: self-mini-macos
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -28,7 +28,7 @@ jobs:
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
run_tests_windows:
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -47,11 +47,14 @@ jobs:
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy.ps1
|
||||
shell: pwsh
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: pwsh
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than.ps1 250
|
||||
shell: pwsh
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: pwsh
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
@@ -90,9 +93,6 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -140,9 +140,6 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -361,7 +358,7 @@ jobs:
|
||||
needs:
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-32x64-ubuntu-2004
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
@@ -392,7 +389,7 @@ jobs:
|
||||
needs:
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: self-mini-macos
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
@@ -434,7 +431,7 @@ jobs:
|
||||
- bundle_mac_x86_64
|
||||
- bundle_windows_aarch64
|
||||
- bundle_windows_x86_64
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-4x8-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -490,21 +487,3 @@ jobs:
|
||||
SENTRY_PROJECT: zed
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
timeout-minutes: 60
|
||||
notify_on_failure:
|
||||
needs:
|
||||
- bundle_linux_aarch64
|
||||
- bundle_linux_x86_64
|
||||
- bundle_mac_aarch64
|
||||
- bundle_mac_x86_64
|
||||
- bundle_windows_aarch64
|
||||
- bundle_windows_x86_64
|
||||
if: failure()
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: release::notify_on_failure::notify_slack
|
||||
run: |-
|
||||
curl -X POST -H 'Content-type: application/json'\
|
||||
--data '{"text":"${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK"
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
|
||||
|
||||
35
.github/workflows/run_agent_evals.yml
vendored
@@ -6,21 +6,24 @@ env:
|
||||
CARGO_INCREMENTAL: '0'
|
||||
RUST_BACKTRACE: '1'
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }}
|
||||
GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_EVAL_TELEMETRY: '1'
|
||||
MODEL_NAME: ${{ inputs.model_name }}
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
model_name:
|
||||
description: model_name
|
||||
required: true
|
||||
type: string
|
||||
pull_request:
|
||||
types:
|
||||
- synchronize
|
||||
- reopened
|
||||
- labeled
|
||||
branches:
|
||||
- '**'
|
||||
schedule:
|
||||
- cron: 0 0 * * *
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
agent_evals:
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
(github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval'))
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -37,9 +40,6 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
@@ -49,19 +49,14 @@ jobs:
|
||||
run: cargo build --package=eval
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_agent_evals::agent_evals::run_eval
|
||||
run: cargo run --package=eval -- --repetitions=8 --concurrency=1 --model "${MODEL_NAME}"
|
||||
run: cargo run --package=eval -- --repetitions=8 --concurrency=1
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }}
|
||||
GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 600
|
||||
timeout-minutes: 60
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
18
.github/workflows/run_bundling.yml
vendored
@@ -13,7 +13,7 @@ jobs:
|
||||
bundle_linux_aarch64:
|
||||
if: |-
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
@@ -34,9 +34,6 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -56,7 +53,7 @@ jobs:
|
||||
bundle_linux_x86_64:
|
||||
if: |-
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
runs-on: namespace-profile-32x64-ubuntu-2004
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
@@ -77,9 +74,6 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -99,7 +93,7 @@ jobs:
|
||||
bundle_mac_aarch64:
|
||||
if: |-
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
runs-on: self-mini-macos
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
@@ -145,7 +139,7 @@ jobs:
|
||||
bundle_mac_x86_64:
|
||||
if: |-
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
runs-on: self-mini-macos
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
@@ -191,7 +185,7 @@ jobs:
|
||||
bundle_windows_aarch64:
|
||||
if: |-
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
@@ -229,7 +223,7 @@ jobs:
|
||||
bundle_windows_x86_64:
|
||||
if: |-
|
||||
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
env:
|
||||
CARGO_INCREMENTAL: 0
|
||||
|
||||
77
.github/workflows/run_cron_unit_evals.yml
vendored
@@ -1,77 +0,0 @@
|
||||
# Generated from xtask::workflows::run_cron_unit_evals
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: run_cron_unit_evals
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: '0'
|
||||
RUST_BACKTRACE: '1'
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
on:
|
||||
schedule:
|
||||
- cron: 47 1 * * 2
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
cron_unit_evals:
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
strategy:
|
||||
matrix:
|
||||
model:
|
||||
- anthropic/claude-sonnet-4-5-latest
|
||||
- anthropic/claude-opus-4-5-latest
|
||||
- google/gemini-3-pro
|
||||
- openai/gpt-5
|
||||
fail-fast: false
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
uses: taiki-e/install-action@nextest
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 250
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/run-unit-evals
|
||||
run: ./script/run-unit-evals
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }}
|
||||
GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }}
|
||||
ZED_AGENT_MODEL: ${{ matrix.model }}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_agent_evals::cron_unit_evals::send_failure_to_slack
|
||||
if: ${{ failure() }}
|
||||
uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52
|
||||
with:
|
||||
method: chat.postMessage
|
||||
token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }}
|
||||
payload: |
|
||||
channel: C04UDRNNJFQ
|
||||
text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}"
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
127
.github/workflows/run_tests.yml
vendored
@@ -15,7 +15,7 @@ on:
|
||||
- v[0-9]+.[0-9]+.x
|
||||
jobs:
|
||||
orchestrate:
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -47,7 +47,7 @@ jobs:
|
||||
}
|
||||
|
||||
check_pattern "run_action_checks" '^\.github/(workflows/|actions/|actionlint.yml)|tooling/xtask|script/' -qP
|
||||
check_pattern "run_docs" '^(docs/|crates/.*\.rs)' -qP
|
||||
check_pattern "run_docs" '^docs/' -qP
|
||||
check_pattern "run_licenses" '^(Cargo.lock|script/.*licenses)' -qP
|
||||
check_pattern "run_nix" '^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)' -qP
|
||||
check_pattern "run_tests" '^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!run_tests)))' -qvP
|
||||
@@ -59,7 +59,7 @@ jobs:
|
||||
run_nix: ${{ steps.filter.outputs.run_nix }}
|
||||
run_tests: ${{ steps.filter.outputs.run_tests }}
|
||||
check_style:
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-4x8-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -84,7 +84,7 @@ jobs:
|
||||
run: ./script/check-keymaps
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_style::check_for_typos
|
||||
uses: crate-ci/typos@2d0ce569feab1f8752f1dde43cc2f2aa53236e06
|
||||
uses: crate-ci/typos@80c8a4945eec0f6d464eaf9e65ed98ef085283d1
|
||||
with:
|
||||
config: ./typos.toml
|
||||
- name: steps::cargo_fmt
|
||||
@@ -113,11 +113,14 @@ jobs:
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy.ps1
|
||||
shell: pwsh
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: pwsh
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than.ps1 250
|
||||
shell: pwsh
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: pwsh
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
@@ -140,19 +143,16 @@ jobs:
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
@@ -161,12 +161,13 @@ jobs:
|
||||
run: ./script/clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
uses: taiki-e/install-action@nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 250
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
@@ -196,11 +197,14 @@ jobs:
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 300
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_nextest
|
||||
run: cargo nextest run --workspace --no-fail-fast
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
@@ -228,9 +232,6 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
@@ -262,19 +263,16 @@ jobs:
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: cargo build -p collab
|
||||
run: cargo build -p collab
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -287,6 +285,40 @@ jobs:
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
check_postgres_and_protobuf_migrations:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_tests == 'true'
|
||||
runs-on: self-mini-macos
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::remove_untracked_files
|
||||
run: git clean -df
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::ensure_fresh_merge
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ];
|
||||
then
|
||||
echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
|
||||
else
|
||||
git checkout -B temp
|
||||
git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
|
||||
echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
|
||||
fi
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_setup_action
|
||||
uses: bufbuild/buf-setup-action@v1
|
||||
with:
|
||||
version: v1.29.0
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_breaking_action
|
||||
uses: bufbuild/buf-breaking-action@v1
|
||||
with:
|
||||
input: crates/proto/proto/
|
||||
against: https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/
|
||||
timeout-minutes: 60
|
||||
check_dependencies:
|
||||
needs:
|
||||
- orchestrate
|
||||
@@ -350,9 +382,6 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_docs::install_mdbook
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08
|
||||
with:
|
||||
@@ -489,44 +518,6 @@ jobs:
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
continue-on-error: true
|
||||
check_postgres_and_protobuf_migrations:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_tests == 'true'
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
env:
|
||||
GIT_AUTHOR_NAME: Protobuf Action
|
||||
GIT_AUTHOR_EMAIL: ci@zed.dev
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::remove_untracked_files
|
||||
run: git clean -df
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::ensure_fresh_merge
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ];
|
||||
then
|
||||
echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
|
||||
else
|
||||
git checkout -B temp
|
||||
git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
|
||||
echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
|
||||
fi
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_setup_action
|
||||
uses: bufbuild/buf-setup-action@v1
|
||||
with:
|
||||
version: v1.29.0
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_breaking_action
|
||||
uses: bufbuild/buf-breaking-action@v1
|
||||
with:
|
||||
input: crates/proto/proto/
|
||||
against: https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/
|
||||
timeout-minutes: 60
|
||||
tests_pass:
|
||||
needs:
|
||||
- orchestrate
|
||||
@@ -536,13 +527,14 @@ jobs:
|
||||
- run_tests_mac
|
||||
- doctests
|
||||
- check_workspace_binaries
|
||||
- check_postgres_and_protobuf_migrations
|
||||
- check_dependencies
|
||||
- check_docs
|
||||
- check_licenses
|
||||
- check_scripts
|
||||
- build_nix_linux_x86_64
|
||||
- build_nix_mac_aarch64
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') && always()
|
||||
if: github.repository_owner == 'zed-industries' && always()
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: run_tests::tests_pass
|
||||
@@ -562,6 +554,7 @@ jobs:
|
||||
check_result "run_tests_mac" "${{ needs.run_tests_mac.result }}"
|
||||
check_result "doctests" "${{ needs.doctests.result }}"
|
||||
check_result "check_workspace_binaries" "${{ needs.check_workspace_binaries.result }}"
|
||||
check_result "check_postgres_and_protobuf_migrations" "${{ needs.check_postgres_and_protobuf_migrations.result }}"
|
||||
check_result "check_dependencies" "${{ needs.check_dependencies.result }}"
|
||||
check_result "check_docs" "${{ needs.check_docs.result }}"
|
||||
check_result "check_licenses" "${{ needs.check_licenses.result }}"
|
||||
|
||||
42
.github/workflows/run_unit_evals.yml
vendored
@@ -1,26 +1,17 @@
|
||||
# Generated from xtask::workflows::run_unit_evals
|
||||
# Generated from xtask::workflows::run_agent_evals
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: run_unit_evals
|
||||
name: run_agent_evals
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: '0'
|
||||
RUST_BACKTRACE: '1'
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_EVAL_TELEMETRY: '1'
|
||||
MODEL_NAME: ${{ inputs.model_name }}
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
model_name:
|
||||
description: model_name
|
||||
required: true
|
||||
type: string
|
||||
commit_sha:
|
||||
description: commit_sha
|
||||
required: true
|
||||
type: string
|
||||
schedule:
|
||||
- cron: 47 1 * * 2
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
run_unit_evals:
|
||||
unit_evals:
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -42,11 +33,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
uses: taiki-e/install-action@nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 250
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -55,15 +44,20 @@ jobs:
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }}
|
||||
GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }}
|
||||
UNIT_EVAL_COMMIT: ${{ inputs.commit_sha }}
|
||||
- name: run_agent_evals::unit_evals::send_failure_to_slack
|
||||
if: ${{ failure() }}
|
||||
uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52
|
||||
with:
|
||||
method: chat.postMessage
|
||||
token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }}
|
||||
payload: |
|
||||
channel: C04UDRNNJFQ
|
||||
text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}"
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.run_id }}
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
832
Cargo.lock
generated
83
Cargo.toml
@@ -59,7 +59,6 @@ members = [
|
||||
"crates/zeta2_tools",
|
||||
"crates/editor",
|
||||
"crates/eval",
|
||||
"crates/eval_utils",
|
||||
"crates/explorer_command_injector",
|
||||
"crates/extension",
|
||||
"crates/extension_api",
|
||||
@@ -111,7 +110,6 @@ members = [
|
||||
"crates/menu",
|
||||
"crates/migrator",
|
||||
"crates/mistral",
|
||||
"crates/miniprofiler_ui",
|
||||
"crates/multi_buffer",
|
||||
"crates/nc",
|
||||
"crates/net",
|
||||
@@ -128,7 +126,6 @@ members = [
|
||||
"crates/picker",
|
||||
"crates/prettier",
|
||||
"crates/project",
|
||||
"crates/project_benchmarks",
|
||||
"crates/project_panel",
|
||||
"crates/project_symbols",
|
||||
"crates/prompt_store",
|
||||
@@ -148,6 +145,7 @@ members = [
|
||||
"crates/rules_library",
|
||||
"crates/schema_generator",
|
||||
"crates/search",
|
||||
"crates/semantic_version",
|
||||
"crates/session",
|
||||
"crates/settings",
|
||||
"crates/settings_json",
|
||||
@@ -202,6 +200,7 @@ members = [
|
||||
"crates/zed_actions",
|
||||
"crates/zed_env_vars",
|
||||
"crates/zeta",
|
||||
"crates/zeta2",
|
||||
"crates/zeta_cli",
|
||||
"crates/zlog",
|
||||
"crates/zlog_settings",
|
||||
@@ -289,7 +288,6 @@ deepseek = { path = "crates/deepseek" }
|
||||
derive_refineable = { path = "crates/refineable/derive_refineable" }
|
||||
diagnostics = { path = "crates/diagnostics" }
|
||||
editor = { path = "crates/editor" }
|
||||
eval_utils = { path = "crates/eval_utils" }
|
||||
extension = { path = "crates/extension" }
|
||||
extension_host = { path = "crates/extension_host" }
|
||||
extensions_ui = { path = "crates/extensions_ui" }
|
||||
@@ -343,7 +341,6 @@ menu = { path = "crates/menu" }
|
||||
migrator = { path = "crates/migrator" }
|
||||
mistral = { path = "crates/mistral" }
|
||||
multi_buffer = { path = "crates/multi_buffer" }
|
||||
miniprofiler_ui = { path = "crates/miniprofiler_ui" }
|
||||
nc = { path = "crates/nc" }
|
||||
net = { path = "crates/net" }
|
||||
node_runtime = { path = "crates/node_runtime" }
|
||||
@@ -381,6 +378,7 @@ rope = { path = "crates/rope" }
|
||||
rpc = { path = "crates/rpc" }
|
||||
rules_library = { path = "crates/rules_library" }
|
||||
search = { path = "crates/search" }
|
||||
semantic_version = { path = "crates/semantic_version" }
|
||||
session = { path = "crates/session" }
|
||||
settings = { path = "crates/settings" }
|
||||
settings_json = { path = "crates/settings_json" }
|
||||
@@ -434,6 +432,7 @@ zed = { path = "crates/zed" }
|
||||
zed_actions = { path = "crates/zed_actions" }
|
||||
zed_env_vars = { path = "crates/zed_env_vars" }
|
||||
zeta = { path = "crates/zeta" }
|
||||
zeta2 = { path = "crates/zeta2" }
|
||||
zlog = { path = "crates/zlog" }
|
||||
zlog_settings = { path = "crates/zlog_settings" }
|
||||
|
||||
@@ -441,7 +440,7 @@ zlog_settings = { path = "crates/zlog_settings" }
|
||||
# External crates
|
||||
#
|
||||
|
||||
agent-client-protocol = { version = "=0.8.0", features = ["unstable"] }
|
||||
agent-client-protocol = { version = "0.7.0", features = ["unstable"] }
|
||||
aho-corasick = "1.1"
|
||||
alacritty_terminal = "0.25.1-rc1"
|
||||
any_vec = "0.14"
|
||||
@@ -459,7 +458,7 @@ async-tar = "0.5.1"
|
||||
async-task = "4.7"
|
||||
async-trait = "0.1"
|
||||
async-tungstenite = "0.31.0"
|
||||
async_zip = { version = "0.0.18", features = ["deflate", "deflate64"] }
|
||||
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
|
||||
aws-config = { version = "1.6.1", features = ["behavior-version-latest"] }
|
||||
aws-credential-types = { version = "1.2.2", features = [
|
||||
"hardcoded-credentials",
|
||||
@@ -476,7 +475,6 @@ bitflags = "2.6.0"
|
||||
blade-graphics = { version = "0.7.0" }
|
||||
blade-macros = { version = "0.3.0" }
|
||||
blade-util = { version = "0.3.0" }
|
||||
brotli = "8.0.2"
|
||||
bytes = "1.0"
|
||||
cargo_metadata = "0.19"
|
||||
cargo_toml = "0.21"
|
||||
@@ -484,7 +482,7 @@ cfg-if = "1.0.3"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
ciborium = "0.2"
|
||||
circular-buffer = "1.0"
|
||||
clap = { version = "4.4", features = ["derive", "wrap_help"] }
|
||||
clap = { version = "4.4", features = ["derive"] }
|
||||
cocoa = "=0.26.0"
|
||||
cocoa-foundation = "=0.2.0"
|
||||
convert_case = "0.8.0"
|
||||
@@ -505,12 +503,12 @@ ec4rs = "1.1"
|
||||
emojis = "0.6.1"
|
||||
env_logger = "0.11"
|
||||
exec = "0.3.1"
|
||||
fancy-regex = "0.16.0"
|
||||
fork = "0.4.0"
|
||||
fancy-regex = "0.14.0"
|
||||
fork = "0.2.0"
|
||||
futures = "0.3"
|
||||
futures-batch = "0.6.1"
|
||||
futures-lite = "1.13"
|
||||
gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "09acfdf2bd5c1d6254abefd609c808ff73547b2c" }
|
||||
gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "3eaa84abca0778eb54272f45a312cb24f9a0b435" }
|
||||
git2 = { version = "0.20.1", default-features = false }
|
||||
globset = "0.4"
|
||||
handlebars = "4.3"
|
||||
@@ -531,10 +529,10 @@ indoc = "2"
|
||||
inventory = "0.3.19"
|
||||
itertools = "0.14.0"
|
||||
json_dotpath = "1.1"
|
||||
jsonschema = "0.37.0"
|
||||
jsonschema = "0.30.0"
|
||||
jsonwebtoken = "9.3"
|
||||
jupyter-protocol = "0.10.0"
|
||||
jupyter-websocket-client = "0.15.0"
|
||||
jupyter-protocol = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
|
||||
jupyter-websocket-client = { git = "https://github.com/ConradIrwin/runtimed" ,rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
|
||||
libc = "0.2"
|
||||
libsqlite3-sys = { version = "0.30.1", features = ["bundled"] }
|
||||
linkify = "0.10.0"
|
||||
@@ -547,7 +545,7 @@ minidumper = "0.8"
|
||||
moka = { version = "0.12.10", features = ["sync"] }
|
||||
naga = { version = "25.0", features = ["wgsl-in"] }
|
||||
nanoid = "0.4"
|
||||
nbformat = "0.15.0"
|
||||
nbformat = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
|
||||
nix = "0.29"
|
||||
num-format = "0.4.4"
|
||||
num-traits = "0.2"
|
||||
@@ -585,14 +583,14 @@ partial-json-fixer = "0.5.3"
|
||||
parse_int = "0.9"
|
||||
pciid-parser = "0.8.0"
|
||||
pathdiff = "0.2"
|
||||
pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
|
||||
pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
|
||||
pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
|
||||
pet-fs = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
|
||||
pet-pixi = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
|
||||
pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
|
||||
pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
|
||||
pet-virtualenv = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
|
||||
pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" }
|
||||
pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" }
|
||||
pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" }
|
||||
pet-fs = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" }
|
||||
pet-pixi = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" }
|
||||
pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" }
|
||||
pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" }
|
||||
pet-virtualenv = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" }
|
||||
portable-pty = "0.9.0"
|
||||
postage = { version = "0.5", features = ["futures-traits"] }
|
||||
pretty_assertions = { version = "1.3.0", features = ["unstable"] }
|
||||
@@ -605,6 +603,7 @@ pulldown-cmark = { version = "0.12.0", default-features = false }
|
||||
quote = "1.0.9"
|
||||
rand = "0.9"
|
||||
rayon = "1.8"
|
||||
ref-cast = "1.0.24"
|
||||
regex = "1.5"
|
||||
# WARNING: If you change this, you must also publish a new version of zed-reqwest to crates.io
|
||||
reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "c15662463bda39148ba154100dd44d3fba5873a4", default-features = false, features = [
|
||||
@@ -617,8 +616,8 @@ reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "c15662
|
||||
"stream",
|
||||
], package = "zed-reqwest", version = "0.12.15-zed" }
|
||||
rsa = "0.9.6"
|
||||
runtimelib = { version = "0.30.0", default-features = false, features = [
|
||||
"async-dispatcher-runtime", "aws-lc-rs"
|
||||
runtimelib = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734", default-features = false, features = [
|
||||
"async-dispatcher-runtime",
|
||||
] }
|
||||
rust-embed = { version = "8.4", features = ["include-exclude"] }
|
||||
rustc-hash = "2.1.0"
|
||||
@@ -627,9 +626,8 @@ rustls-platform-verifier = "0.5.0"
|
||||
# WARNING: If you change this, you must also publish a new version of zed-scap to crates.io
|
||||
scap = { git = "https://github.com/zed-industries/scap", rev = "4afea48c3b002197176fb19cd0f9b180dd36eaac", default-features = false, package = "zed-scap", version = "0.0.8-zed" }
|
||||
schemars = { version = "1.0", features = ["indexmap2"] }
|
||||
semver = { version = "1.0", features = ["serde"] }
|
||||
semver = "1.0"
|
||||
serde = { version = "1.0.221", features = ["derive", "rc"] }
|
||||
serde_derive = "1.0.221"
|
||||
serde_json = { version = "1.0.144", features = ["preserve_order", "raw_value"] }
|
||||
serde_json_lenient = { version = "0.2", features = [
|
||||
"preserve_order",
|
||||
@@ -638,10 +636,10 @@ serde_json_lenient = { version = "0.2", features = [
|
||||
serde_path_to_error = "0.1.17"
|
||||
serde_repr = "0.1"
|
||||
serde_urlencoded = "0.7"
|
||||
serde_with = "3.4.0"
|
||||
sha2 = "0.10"
|
||||
shellexpand = "2.1.0"
|
||||
shlex = "1.3.0"
|
||||
similar = "2.6"
|
||||
simplelog = "0.12.2"
|
||||
slotmap = "1.0.6"
|
||||
smallvec = { version = "1.6", features = ["union"] }
|
||||
@@ -658,14 +656,13 @@ sysinfo = "0.37.0"
|
||||
take-until = "0.2.0"
|
||||
tempfile = "3.20.0"
|
||||
thiserror = "2.0.12"
|
||||
tiktoken-rs = { git = "https://github.com/zed-industries/tiktoken-rs", rev = "2570c4387a8505fb8f1d3f3557454b474f1e8271" }
|
||||
tiktoken-rs = { git = "https://github.com/zed-industries/tiktoken-rs", rev = "30c32a4522751699adeda0d5840c71c3b75ae73d" }
|
||||
time = { version = "0.3", features = [
|
||||
"macros",
|
||||
"parsing",
|
||||
"serde",
|
||||
"serde-well-known",
|
||||
"formatting",
|
||||
"local-offset",
|
||||
] }
|
||||
tiny_http = "0.8"
|
||||
tokio = { version = "1" }
|
||||
@@ -674,7 +671,7 @@ toml = "0.8"
|
||||
toml_edit = { version = "0.22", default-features = false, features = ["display", "parse", "serde"] }
|
||||
tower-http = "0.4.4"
|
||||
tree-sitter = { version = "0.25.10", features = ["wasm"] }
|
||||
tree-sitter-bash = "0.25.1"
|
||||
tree-sitter-bash = "0.25.0"
|
||||
tree-sitter-c = "0.23"
|
||||
tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev = "5cb9b693cfd7bfacab1d9ff4acac1a4150700609" }
|
||||
tree-sitter-css = "0.23"
|
||||
@@ -716,7 +713,6 @@ wasmtime = { version = "29", default-features = false, features = [
|
||||
"parallel-compilation",
|
||||
] }
|
||||
wasmtime-wasi = "29"
|
||||
wax = "0.6"
|
||||
which = "6.0.0"
|
||||
windows-core = "0.61"
|
||||
wit-component = "0.221"
|
||||
@@ -724,7 +720,6 @@ yawc = "0.2.5"
|
||||
zeroize = "1.8"
|
||||
zstd = "0.11"
|
||||
|
||||
|
||||
[workspace.dependencies.windows]
|
||||
version = "0.61"
|
||||
features = [
|
||||
@@ -777,10 +772,9 @@ features = [
|
||||
]
|
||||
|
||||
[patch.crates-io]
|
||||
notify = { git = "https://github.com/zed-industries/notify.git", rev = "b4588b2e5aee68f4c0e100f140e808cbce7b1419" }
|
||||
notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "b4588b2e5aee68f4c0e100f140e808cbce7b1419" }
|
||||
notify = { git = "https://github.com/zed-industries/notify.git", rev = "bbb9ea5ae52b253e095737847e367c30653a2e96" }
|
||||
notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "bbb9ea5ae52b253e095737847e367c30653a2e96" }
|
||||
windows-capture = { git = "https://github.com/zed-industries/windows-capture.git", rev = "f0d6c1b6691db75461b732f6d5ff56eed002eeb9" }
|
||||
calloop = { git = "https://github.com/zed-industries/calloop" }
|
||||
|
||||
[profile.dev]
|
||||
split-debuginfo = "unpacked"
|
||||
@@ -794,19 +788,6 @@ codegen-units = 16
|
||||
codegen-units = 16
|
||||
|
||||
[profile.dev.package]
|
||||
# proc-macros start
|
||||
gpui_macros = { opt-level = 3 }
|
||||
derive_refineable = { opt-level = 3 }
|
||||
settings_macros = { opt-level = 3 }
|
||||
sqlez_macros = { opt-level = 3, codegen-units = 1 }
|
||||
ui_macros = { opt-level = 3 }
|
||||
util_macros = { opt-level = 3 }
|
||||
serde_derive = { opt-level = 3 }
|
||||
quote = { opt-level = 3 }
|
||||
syn = { opt-level = 3 }
|
||||
proc-macro2 = { opt-level = 3 }
|
||||
# proc-macros end
|
||||
|
||||
taffy = { opt-level = 3 }
|
||||
cranelift-codegen = { opt-level = 3 }
|
||||
cranelift-codegen-meta = { opt-level = 3 }
|
||||
@@ -844,9 +825,11 @@ refineable = { codegen-units = 1 }
|
||||
release_channel = { codegen-units = 1 }
|
||||
reqwest_client = { codegen-units = 1 }
|
||||
rich_text = { codegen-units = 1 }
|
||||
semantic_version = { codegen-units = 1 }
|
||||
session = { codegen-units = 1 }
|
||||
snippet = { codegen-units = 1 }
|
||||
snippets_ui = { codegen-units = 1 }
|
||||
sqlez_macros = { codegen-units = 1 }
|
||||
story = { codegen-units = 1 }
|
||||
supermaven_api = { codegen-units = 1 }
|
||||
telemetry_events = { codegen-units = 1 }
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
|
||||
FROM rust:1.91.1-bookworm as builder
|
||||
FROM rust:1.90-bookworm as builder
|
||||
WORKDIR app
|
||||
COPY . .
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Zed
|
||||
|
||||
[](https://zed.dev)
|
||||
[](https://github.com/zed-industries/zed/actions/workflows/run_tests.yml)
|
||||
[](https://github.com/zed-industries/zed/actions/workflows/ci.yml)
|
||||
|
||||
Welcome to Zed, a high-performance, multiplayer code editor from the creators of [Atom](https://github.com/atom/atom) and [Tree-sitter](https://github.com/tree-sitter/tree-sitter).
|
||||
|
||||
|
||||
@@ -43,9 +43,7 @@ design
|
||||
= @danilo-leal
|
||||
|
||||
docs
|
||||
= @miguelraz
|
||||
= @probably-neb
|
||||
= @yeskunall
|
||||
|
||||
extension
|
||||
= @kubkon
|
||||
@@ -53,10 +51,6 @@ extension
|
||||
git
|
||||
= @cole-miller
|
||||
= @danilo-leal
|
||||
= @dvdsk
|
||||
= @kubkon
|
||||
= @Anthony-Eid
|
||||
= @cameron1024
|
||||
|
||||
gpui
|
||||
= @Anthony-Eid
|
||||
@@ -104,12 +98,6 @@ settings_ui
|
||||
= @danilo-leal
|
||||
= @probably-neb
|
||||
|
||||
sum_tree
|
||||
= @Veykril
|
||||
|
||||
support
|
||||
= @miguelraz
|
||||
|
||||
tasks
|
||||
= @SomeoneToIgnore
|
||||
= @Veykril
|
||||
@@ -118,9 +106,6 @@ terminal
|
||||
= @kubkon
|
||||
= @Veykril
|
||||
|
||||
text
|
||||
= @Veykril
|
||||
|
||||
vim
|
||||
= @ConradIrwin
|
||||
= @dinocosta
|
||||
@@ -130,4 +115,3 @@ vim
|
||||
windows
|
||||
= @localcc
|
||||
= @reflectronic
|
||||
= @Veykril
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M8.00156 10.3996C9.32705 10.3996 10.4016 9.32509 10.4016 7.99961C10.4016 6.67413 9.32705 5.59961 8.00156 5.59961C6.67608 5.59961 5.60156 6.67413 5.60156 7.99961C5.60156 9.32509 6.67608 10.3996 8.00156 10.3996Z" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M10.4 5.6V8.6C10.4 9.07739 10.5896 9.53523 10.9272 9.8728C11.2648 10.2104 11.7226 10.4 12.2 10.4C12.6774 10.4 13.1352 10.2104 13.4728 9.8728C13.8104 9.53523 14 9.07739 14 8.6V8C14 6.64839 13.5436 5.33636 12.7048 4.27651C11.8661 3.21665 10.694 2.47105 9.37852 2.16051C8.06306 1.84997 6.68129 1.99269 5.45707 2.56554C4.23285 3.13838 3.23791 4.1078 2.63344 5.31672C2.02898 6.52565 1.85041 7.90325 2.12667 9.22633C2.40292 10.5494 3.11782 11.7405 4.15552 12.6065C5.19323 13.4726 6.49295 13.9629 7.84411 13.998C9.19527 14.0331 10.5187 13.611 11.6 12.8" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 1.0 KiB |
1
assets/icons/debug_step_back.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="none"><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M14 11.333A6 6 0 0 0 4 6.867l-1 .9"/><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.333" d="M2 4.667v4h4"/><path fill="#000" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M8 12a.667.667 0 1 0 0-1.333A.667.667 0 0 0 8 12Z"/></svg>
|
||||
|
After Width: | Height: | Size: 467 B |
@@ -1,5 +1 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M2 13H5" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11 13H14" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11.5 8.5L8 12M8 12L4.5 8.5M8 12L8 3" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="none"><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M3.333 10 8 14.667 12.667 10M8 5.333v9.334"/><path fill="#000" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M8 2.667a.667.667 0 1 0 0-1.334.667.667 0 0 0 0 1.334Z"/></svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 443 B After Width: | Height: | Size: 374 B |
@@ -1,5 +1 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M4.5 6.5L8 3M8 3L11.5 6.5M8 3V12" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M2 13H5" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11 13H14" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="none"><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M3.333 6 8 1.333 12.667 6M8 10.667V1.333"/><path fill="#000" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M8 13.333a.667.667 0 1 1 0 1.334.667.667 0 0 1 0-1.334Z"/></svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 439 B After Width: | Height: | Size: 373 B |
@@ -1,5 +1 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M2 11.333C2.00118 10.1752 2.33729 9.04258 2.96777 8.07159C3.59826 7.10059 4.49621 6.33274 5.55331 5.86064C6.61041 5.38853 7.78152 5.23235 8.9254 5.41091C10.0693 5.58947 11.1371 6.09516 12 6.86698L13 7.76698" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M14 4.66699V8.66699H10" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M7 13H10" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="none"><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M2 11.333a6 6 0 0 1 10-4.466l1 .9"/><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.333" d="M14 4.667v4h-4"/><path fill="#000" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M8 12a.667.667 0 1 1 0-1.333A.667.667 0 0 1 8 12Z"/></svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 627 B After Width: | Height: | Size: 468 B |
@@ -1,10 +0,0 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0_1_2)">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M4.58747 12.9359C4.35741 12.778 4.17558 12.625 4.17558 12.625L10.092 2.37749C10.092 2.37749 10.3355 2.46782 10.5367 2.56426C10.7903 2.6858 11.0003 2.80429 11.0003 2.80429C13.8681 4.46005 14.8523 8.13267 13.1965 11.0005C11.5407 13.8684 7.8681 14.8525 5.00023 13.1967C5.00023 13.1967 4.79936 13.0812 4.58747 12.9359ZM10.5003 3.67032L5.50023 12.3307C7.89013 13.7105 10.9506 12.8904 12.3305 10.5006C13.7102 8.1106 12.8902 5.05015 10.5003 3.67032ZM3.07664 11.4314C2.87558 11.1403 2.804 11.0006 2.804 11.0006C1.77036 9.20524 1.69456 6.92215 2.80404 5.00046C3.91353 3.07877 5.92859 2.00291 8.0003 2.00036C8.0003 2.00036 8.28 1.99964 8.51289 2.02194C8.86375 2.05556 9.09702 2.10083 9.09702 2.10083L3.43905 11.9007C3.43905 11.9007 3.30482 11.7618 3.07664 11.4314ZM7.40178 3.03702C5.89399 3.22027 4.48727 4.08506 3.67008 5.50052C2.85288 6.9159 2.80733 8.56653 3.40252 9.96401L7.40178 3.03702Z" fill="black" stroke="black" stroke-width="0.1"/>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_1_2">
|
||||
<rect width="16" height="16" fill="white"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 1.2 KiB |
@@ -1,32 +0,0 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0_3348_16)">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.97419 6.27207C8.44653 6.29114 8.86622 6.27046 9.23628 6.22425C9.08884 7.48378 8.7346 8.72903 8.16697 9.90688C8.04459 9.83861 7.92582 9.76008 7.81193 9.67108C7.64539 9.54099 7.49799 9.39549 7.37015 9.23818C7.5282 9.54496 7.64901 9.86752 7.73175 10.1986C7.35693 10.6656 6.90663 11.0373 6.412 11.3101C5.01165 10.8075 4.03638 9.63089 4.03638 7.93001C4.03638 6.96185 4.35234 6.07053 4.88281 5.36157C5.34001 5.69449 6.30374 6.20455 7.97419 6.27207ZM8.27511 11.5815C10.3762 11.5349 11.8115 10.7826 12.8347 7.93001C11.6992 7.93001 11.4246 7.10731 11.1188 6.19149C11.0669 6.03596 11.0141 5.87771 10.956 5.72037C10.6733 5.86733 10.2753 6.02782 9.74834 6.13895C9.59658 7.49345 9.20592 8.83238 8.56821 10.0897C8.89933 10.2093 9.24674 10.262 9.5908 10.2502C9.08928 10.4803 8.62468 10.8066 8.22655 11.2255C8.2457 11.3438 8.26186 11.4625 8.27511 11.5815ZM6.62702 7.75422C6.62702 7.50604 6.82821 7.30485 7.07639 7.30485C7.32457 7.30485 7.52576 7.50604 7.52576 7.75422V8.23616C7.52576 8.48435 7.32457 8.68554 7.07639 8.68554C6.82821 8.68554 6.62702 8.48435 6.62702 8.23616V7.75422ZM5.27746 7.30485C5.05086 7.30485 4.86716 7.48854 4.86716 7.71513V8.27525C4.86716 8.50185 5.05086 8.68554 5.27746 8.68554C5.50406 8.68554 5.68776 8.50185 5.68776 8.27525V7.71513C5.68776 7.48854 5.50406 7.30485 5.27746 7.30485Z" fill="white"/>
|
||||
<mask id="mask0_3348_16" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="4" y="5" width="9" height="7">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.97419 6.27207C8.44653 6.29114 8.86622 6.27046 9.23628 6.22425C9.08884 7.48378 8.7346 8.72903 8.16697 9.90688C8.04459 9.83861 7.92582 9.76008 7.81193 9.67108C7.64539 9.54099 7.49799 9.39549 7.37015 9.23818C7.5282 9.54496 7.64901 9.86752 7.73175 10.1986C7.35693 10.6656 6.90663 11.0373 6.412 11.3101C5.01165 10.8075 4.03638 9.63089 4.03638 7.93001C4.03638 6.96185 4.35234 6.07053 4.88281 5.36157C5.34001 5.69449 6.30374 6.20455 7.97419 6.27207ZM8.27511 11.5815C10.3762 11.5349 11.8115 10.7826 12.8347 7.93001C11.6992 7.93001 11.4246 7.10731 11.1188 6.19149C11.0669 6.03596 11.0141 5.87771 10.956 5.72037C10.6733 5.86733 10.2753 6.02782 9.74834 6.13895C9.59658 7.49345 9.20592 8.83238 8.56821 10.0897C8.89933 10.2093 9.24674 10.262 9.5908 10.2502C9.08928 10.4803 8.62468 10.8066 8.22655 11.2255C8.2457 11.3438 8.26186 11.4625 8.27511 11.5815ZM6.62702 7.75422C6.62702 7.50604 6.82821 7.30485 7.07639 7.30485C7.32457 7.30485 7.52576 7.50604 7.52576 7.75422V8.23616C7.52576 8.48435 7.32457 8.68554 7.07639 8.68554C6.82821 8.68554 6.62702 8.48435 6.62702 8.23616V7.75422ZM5.27746 7.30485C5.05086 7.30485 4.86716 7.48854 4.86716 7.71513V8.27525C4.86716 8.50185 5.05086 8.68554 5.27746 8.68554C5.50406 8.68554 5.68776 8.50185 5.68776 8.27525V7.71513C5.68776 7.48854 5.50406 7.30485 5.27746 7.30485Z" fill="white"/>
|
||||
</mask>
|
||||
<g mask="url(#mask0_3348_16)">
|
||||
<path d="M9.23617 6.22425L9.39588 6.24293L9.41971 6.0393L9.21624 6.06471L9.23617 6.22425ZM8.16687 9.90688L8.08857 10.0473L8.23765 10.1305L8.31174 9.97669L8.16687 9.90688ZM7.37005 9.23819L7.49487 9.13676L7.22714 9.3118L7.37005 9.23819ZM7.73165 10.1986L7.85702 10.2993L7.90696 10.2371L7.88761 10.1597L7.73165 10.1986ZM6.41189 11.3101L6.35758 11.4615L6.42594 11.486L6.48954 11.4509L6.41189 11.3101ZM4.88271 5.36157L4.97736 5.23159L4.84905 5.13817L4.75397 5.26525L4.88271 5.36157ZM8.27501 11.5815L8.11523 11.5993L8.13151 11.7456L8.27859 11.7423L8.27501 11.5815ZM12.8346 7.93001L12.986 7.98428L13.0631 7.76921H12.8346V7.93001ZM10.9559 5.72037L11.1067 5.66469L11.0436 5.49354L10.8817 5.5777L10.9559 5.72037ZM9.74824 6.13896L9.71508 5.98161L9.60139 6.0056L9.58846 6.12102L9.74824 6.13896ZM8.56811 10.0897L8.42469 10.017L8.34242 10.1792L8.51348 10.241L8.56811 10.0897ZM9.5907 10.2502L9.65775 10.3964L9.58519 10.0896L9.5907 10.2502ZM8.22644 11.2255L8.10992 11.1147L8.05502 11.1725L8.06773 11.2512L8.22644 11.2255ZM9.21624 6.06471C8.85519 6.10978 8.44439 6.13015 7.98058 6.11139L7.96756 6.43272C8.44852 6.45215 8.87701 6.43111 9.25607 6.3838L9.21624 6.06471ZM8.31174 9.97669C8.88724 8.78244 9.2464 7.51988 9.39588 6.24293L9.07647 6.20557C8.93108 7.44772 8.58175 8.67563 8.02203 9.83708L8.31174 9.97669ZM8.2452 9.76645C8.12998 9.70219 8.01817 9.62826 7.91082 9.54438L7.71285 9.79779C7.8333 9.8919 7.95895 9.97503 8.08857 10.0473L8.2452 9.76645ZM7.91082 9.54438C7.75387 9.4218 7.61512 9.28479 7.49487 9.13676L7.24526 9.33957C7.38066 9.50619 7.53671 9.66023 7.71285 9.79779L7.91082 9.54438ZM7.22714 9.3118C7.37944 9.60746 7.49589 9.91837 7.57564 10.2376L7.88761 10.1597C7.80196 9.81663 7.67679 9.48248 7.513 9.16453L7.22714 9.3118ZM7.60624 10.098C7.24483 10.5482 6.81083 10.9065 6.33425 11.1693L6.48954 11.4509C7.00223 11.1682 7.46887 10.7829 7.85702 10.2993L7.60624 10.098ZM3.87549 7.93001C3.87548 9.7042 4.89861 10.9378 6.35758 11.4615L6.46622 11.1588C5.12449 10.6772 4.19707 9.55763 4.19707 7.93001H3.87549ZM4.75397 5.26525C4.20309 6.00147 3.87549 6.92646 3.87549 7.93001H4.19707C4.19707 6.99724 4.50139 6.13959 5.01145 5.45791L4.75397 5.26525ZM7.98058 6.11139C6.34236 6.04516 5.40922 5.54604 4.97736 5.23159L4.78806 5.49157C5.27058 5.84291 6.26491 6.3639 7.96756 6.43272L7.98058 6.11139ZM8.27859 11.7423C9.34696 11.7185 10.2682 11.515 11.0542 10.9376C11.8388 10.3612 12.4683 9.4273 12.986 7.98428L12.6833 7.8757C12.1776 9.28534 11.5779 10.1539 10.8638 10.6784C10.1511 11.202 9.30417 11.3978 8.27143 11.4208L8.27859 11.7423ZM12.8346 7.76921C12.3148 7.76921 12.0098 7.58516 11.7925 7.30552C11.5639 7.0114 11.4266 6.60587 11.2712 6.14061L10.9662 6.24242C11.1166 6.69294 11.2695 7.15667 11.5385 7.50285C11.8188 7.86347 12.2189 8.09078 12.8346 8.09078V7.76921ZM11.2712 6.14061C11.2195 5.98543 11.1658 5.82478 11.1067 5.66469L10.805 5.77606C10.8621 5.93065 10.9142 6.0865 10.9662 6.24242L11.2712 6.14061ZM10.8817 5.5777C10.6115 5.71821 10.2273 5.87362 9.71508 5.98161L9.78143 6.29626C10.3232 6.18206 10.735 6.0165 11.0301 5.86301L10.8817 5.5777ZM9.58846 6.12102C9.43882 7.45684 9.05355 8.77717 8.42469 10.017L8.71149 10.1625C9.35809 8.88764 9.75417 7.53011 9.90806 6.15685L9.58846 6.12102ZM9.58519 10.0896C9.26119 10.1006 8.93423 10.051 8.62269 9.93854L8.51348 10.241C8.86427 10.3677 9.23205 10.4234 9.5962 10.4109L9.58519 10.0896ZM8.34301 11.3363C8.72675 10.9325 9.17443 10.6181 9.65775 10.3964L9.52365 10.1041C9.00392 10.3425 8.52241 10.6807 8.10992 11.1147L8.34301 11.3363ZM8.43483 11.5638C8.4213 11.4421 8.40475 11.3207 8.3852 11.1998L8.06773 11.2512C8.08644 11.3668 8.10225 11.4829 8.11523 11.5993L8.43483 11.5638ZM7.07629 7.14405C6.73931 7.14405 6.46613 7.41724 6.46613 7.75423H6.7877C6.7877 7.59484 6.91691 7.46561 7.07629 7.46561V7.14405ZM7.68646 7.75423C7.68646 7.41724 7.41326 7.14405 7.07629 7.14405V7.46561C7.23567 7.46561 7.36489 7.59484 7.36489 7.75423H7.68646ZM7.68646 8.23616V7.75423H7.36489V8.23616H7.68646ZM7.07629 8.84634C7.41326 8.84634 7.68646 8.57315 7.68646 8.23616H7.36489C7.36489 8.39555 7.23567 8.52474 7.07629 8.52474V8.84634ZM6.46613 8.23616C6.46613 8.57315 6.73931 8.84634 7.07629 8.84634V8.52474C6.91691 8.52474 6.7877 8.39555 6.7877 8.23616H6.46613ZM6.46613 7.75423V8.23616H6.7877V7.75423H6.46613ZM5.02785 7.71514C5.02785 7.57734 5.13956 7.46561 5.27736 7.46561V7.14405C4.96196 7.14405 4.70627 7.39974 4.70627 7.71514H5.02785ZM5.02785 8.27525V7.71514H4.70627V8.27525H5.02785ZM5.27736 8.52474C5.13956 8.52474 5.02785 8.41305 5.02785 8.27525H4.70627C4.70627 8.59065 4.96196 8.84634 5.27736 8.84634V8.52474ZM5.52687 8.27525C5.52687 8.41305 5.41516 8.52474 5.27736 8.52474V8.84634C5.59277 8.84634 5.84845 8.59065 5.84845 8.27525H5.52687ZM5.52687 7.71514V8.27525H5.84845V7.71514H5.52687ZM5.27736 7.46561C5.41516 7.46561 5.52687 7.57734 5.52687 7.71514H5.84845C5.84845 7.39974 5.59277 7.14405 5.27736 7.14405V7.46561Z" fill="white"/>
|
||||
</g>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.12635 14.5901C7.22369 14.3749 7.3069 14.1501 7.37454 13.9167C7.54132 13.3412 7.5998 12.7599 7.56197 12.1948C7.53665 12.5349 7.47589 12.8775 7.37718 13.2181C7.23926 13.694 7.03667 14.1336 6.78174 14.5301C6.89605 14.5547 7.01101 14.5747 7.12635 14.5901Z" fill="white"/>
|
||||
<path d="M9.71984 7.74796C9.50296 7.74796 9.29496 7.83412 9.14159 7.98745C8.98822 8.14082 8.9021 8.34882 8.9021 8.5657C8.9021 8.78258 8.98822 8.99057 9.14159 9.14394C9.29496 9.29728 9.50296 9.38344 9.71984 9.38344V8.5657V7.74796Z" fill="white"/>
|
||||
<mask id="mask1_3348_16" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="5" y="2" width="8" height="9">
|
||||
<path d="M12.3783 2.9985H5.36792V10.3954H12.3783V2.9985Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M9.75733 3.61999C9.98577 5.80374 9.60089 8.05373 8.56819 10.0898C8.43122 10.0403 8.29704 9.9794 8.16699 9.90688C9.15325 7.86033 9.49538 5.61026 9.22757 3.43526C9.39923 3.51584 9.57682 3.57729 9.75733 3.61999Z" fill="black"/>
|
||||
</mask>
|
||||
<g mask="url(#mask1_3348_16)">
|
||||
<path d="M8.56815 10.0898L8.67689 10.1449L8.62812 10.241L8.52678 10.2044L8.56815 10.0898ZM9.75728 3.61998L9.78536 3.50136L9.86952 3.52127L9.87853 3.6073L9.75728 3.61998ZM8.16695 9.90687L8.1076 10.0133L8.00732 9.9574L8.05715 9.85398L8.16695 9.90687ZM9.22753 3.43524L9.10656 3.45014L9.07958 3.23116L9.27932 3.32491L9.22753 3.43524ZM8.45945 10.0346C9.48122 8.02009 9.86217 5.79374 9.63608 3.63266L9.87853 3.6073C10.1093 5.81372 9.72048 8.0873 8.67689 10.1449L8.45945 10.0346ZM8.22633 9.80041C8.35056 9.86971 8.47876 9.92791 8.60956 9.97514L8.52678 10.2044C8.38363 10.1527 8.24344 10.0891 8.1076 10.0133L8.22633 9.80041ZM9.34849 3.42035C9.61905 5.61792 9.27346 7.89158 8.27675 9.9598L8.05715 9.85398C9.03298 7.82905 9.37158 5.60258 9.10656 3.45014L9.34849 3.42035ZM9.72925 3.7386C9.54064 3.69399 9.3551 3.62977 9.17573 3.54558L9.27932 3.32491C9.44327 3.40188 9.61288 3.46058 9.78536 3.50136L9.72925 3.7386Z" fill="white"/>
|
||||
</g>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M11.4118 3.46925L11.2416 3.39926L11.1904 3.57611L11.349 3.62202C11.1904 3.57611 11.1904 3.57615 11.1904 3.5762L11.1903 3.57631L11.1902 3.57658L11.19 3.57741L11.1893 3.58009C11.1886 3.58233 11.1878 3.58548 11.1867 3.58949C11.1845 3.5975 11.1814 3.60897 11.1777 3.62359C11.1703 3.6528 11.1603 3.69464 11.1493 3.74656C11.1275 3.85017 11.102 3.99505 11.0869 4.16045C11.0573 4.4847 11.0653 4.91594 11.2489 5.26595C11.2613 5.28944 11.2643 5.31174 11.2625 5.32629C11.261 5.33849 11.2572 5.34226 11.2536 5.3449C11.0412 5.50026 10.5639 5.78997 9.76653 5.96607C9.76095 6.02373 9.75493 6.08134 9.74848 6.13895C10.601 5.95915 11.1161 5.65017 11.3511 5.4782C11.4413 5.41219 11.4471 5.28823 11.3952 5.18922C11.1546 4.73063 11.2477 4.08248 11.3103 3.78401C11.3314 3.68298 11.349 3.62202 11.349 3.62202C11.3745 3.6325 11.4002 3.63983 11.4259 3.64425C11.9083 3.72709 12.4185 2.78249 12.6294 2.33939C12.6852 2.22212 12.6234 2.08843 12.497 2.05837C11.2595 1.76399 5.46936 0.631807 4.57214 4.96989C4.55907 5.03307 4.57607 5.10106 4.62251 5.14584C4.87914 5.39322 5.86138 6.18665 7.9743 6.27207C8.44664 6.29114 8.86633 6.27046 9.23638 6.22425C9.24295 6.16797 9.24912 6.1117 9.25491 6.05534C8.88438 6.10391 8.46092 6.12641 7.98094 6.10702C5.91152 6.02337 4.96693 5.24843 4.73714 5.02692C4.73701 5.02679 4.73545 5.02525 4.73422 5.0208C4.73292 5.01611 4.73254 5.00987 4.73388 5.00334C4.94996 3.95861 5.4573 3.25195 6.11188 2.77714C6.77039 2.29947 7.58745 2.04983 8.42824 1.94075C10.1122 1.72228 11.8454 2.07312 12.4588 2.21906C12.4722 2.22225 12.4787 2.22927 12.4819 2.2362C12.4853 2.24342 12.4869 2.25443 12.4803 2.2684C12.3706 2.49879 12.183 2.85746 11.9656 3.13057C11.8564 3.26783 11.7479 3.37295 11.6469 3.43216C11.5491 3.48956 11.4752 3.49529 11.4118 3.46925Z" fill="white"/>
|
||||
<mask id="mask2_3348_16" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="3" y="9" width="7" height="6">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M8.22654 11.2255C8.62463 10.8066 9.08923 10.4803 9.59075 10.2502C8.97039 10.2715 8.33933 10.0831 7.81189 9.67109C7.64534 9.541 7.49795 9.39549 7.37014 9.23819C7.52815 9.54497 7.64896 9.86752 7.7317 10.1986C6.70151 11.4821 5.1007 12.0466 3.57739 11.8125C3.85909 12.527 4.32941 13.178 4.97849 13.6851C5.8625 14.3756 6.92544 14.6799 7.96392 14.6227C8.32513 13.5174 8.4085 12.351 8.22654 11.2255Z" fill="white"/>
|
||||
</mask>
|
||||
<g mask="url(#mask2_3348_16)">
|
||||
<path d="M9.59085 10.2502L9.58389 10.0472L9.67556 10.4349L9.59085 10.2502ZM8.22663 11.2255L8.02607 11.258L8.00999 11.1585L8.07936 11.0856L8.22663 11.2255ZM7.37024 9.23819L7.18961 9.33119L7.52789 9.11006L7.37024 9.23819ZM7.7318 10.1986L7.92886 10.1494L7.95328 10.2472L7.8902 10.3258L7.7318 10.1986ZM3.57749 11.8125L3.3885 11.887L3.25879 11.5579L3.60835 11.6117L3.57749 11.8125ZM7.96402 14.6227L8.15711 14.6858L8.11397 14.8179L7.97519 14.8255L7.96402 14.6227ZM9.67556 10.4349C9.19708 10.6544 8.7538 10.9657 8.37387 11.3655L8.07936 11.0856C8.49566 10.6475 8.98161 10.3062 9.50614 10.0656L9.67556 10.4349ZM7.93704 9.51099C8.42551 9.89261 9.00942 10.0669 9.58389 10.0472L9.59781 10.4533C8.93151 10.4761 8.25334 10.2737 7.68693 9.83118L7.93704 9.51099ZM7.52789 9.11006C7.64615 9.25565 7.78261 9.39038 7.93704 9.51099L7.68693 9.83118C7.50827 9.69161 7.34994 9.53537 7.21254 9.36627L7.52789 9.11006ZM7.5347 10.2479C7.45573 9.93178 7.34043 9.62393 7.18961 9.33119L7.55082 9.14514C7.71611 9.466 7.84242 9.80326 7.92886 10.1494L7.5347 10.2479ZM3.60835 11.6117C5.06278 11.8352 6.59038 11.2962 7.57335 10.0715L7.8902 10.3258C6.81284 11.6681 5.1388 12.258 3.54663 12.0133L3.60835 11.6117ZM4.85352 13.8452C4.17512 13.3152 3.68312 12.6343 3.3885 11.887L3.76648 11.738C4.03524 12.4197 4.4839 13.0409 5.10364 13.525L4.85352 13.8452ZM7.97519 14.8255C6.8895 14.8853 5.77774 14.5672 4.85352 13.8452L5.10364 13.525C5.94745 14.1842 6.96157 14.4744 7.95285 14.4198L7.97519 14.8255ZM8.42716 11.1931C8.61419 12.3499 8.52858 13.5491 8.15711 14.6858L7.77093 14.5596C8.12191 13.4857 8.20296 12.352 8.02607 11.258L8.42716 11.1931Z" fill="white"/>
|
||||
</g>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_3348_16">
|
||||
<rect width="9.63483" height="14" fill="white" transform="translate(3.19995 1.5)"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 14 KiB |
@@ -43,7 +43,8 @@
|
||||
"f11": "zed::ToggleFullScreen",
|
||||
"ctrl-alt-z": "edit_prediction::RateCompletions",
|
||||
"ctrl-alt-shift-i": "edit_prediction::ToggleMenu",
|
||||
"ctrl-alt-l": "lsp_tool::ToggleMenu"
|
||||
"ctrl-alt-l": "lsp_tool::ToggleMenu",
|
||||
"ctrl-alt-.": "project_panel::ToggleHideHidden"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -239,11 +240,13 @@
|
||||
"ctrl-alt-l": "agent::OpenRulesLibrary",
|
||||
"ctrl-i": "agent::ToggleProfileSelector",
|
||||
"ctrl-alt-/": "agent::ToggleModelSelector",
|
||||
"ctrl-shift-a": "agent::ToggleContextPicker",
|
||||
"ctrl-shift-j": "agent::ToggleNavigationMenu",
|
||||
"ctrl-alt-i": "agent::ToggleOptionsMenu",
|
||||
"ctrl-alt-shift-n": "agent::ToggleNewThreadMenu",
|
||||
"shift-alt-escape": "agent::ExpandMessageEditor",
|
||||
"ctrl->": "agent::AddSelectionToThread",
|
||||
"ctrl-alt-e": "agent::RemoveAllContext",
|
||||
"ctrl-shift-e": "project_panel::ToggleFocus",
|
||||
"ctrl-shift-enter": "agent::ContinueThread",
|
||||
"super-ctrl-b": "agent::ToggleBurnMode",
|
||||
@@ -320,6 +323,17 @@
|
||||
"alt-enter": "editor::Newline"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ContextStrip",
|
||||
"bindings": {
|
||||
"up": "agent::FocusUp",
|
||||
"right": "agent::FocusRight",
|
||||
"left": "agent::FocusLeft",
|
||||
"down": "agent::FocusDown",
|
||||
"backspace": "agent::RemoveFocusedContext",
|
||||
"enter": "agent::AcceptSuggestedContext"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "AcpThread > ModeSelector",
|
||||
"bindings": {
|
||||
@@ -721,20 +735,6 @@
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_next_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"tab": "editor::NextSnippetTabstop"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_previous_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"shift-tab": "editor::PreviousSnippetTabstop"
|
||||
}
|
||||
},
|
||||
// Bindings for accepting edit predictions
|
||||
//
|
||||
// alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This is
|
||||
@@ -811,7 +811,8 @@
|
||||
"context": "PromptEditor",
|
||||
"bindings": {
|
||||
"ctrl-[": "agent::CyclePreviousInlineAssist",
|
||||
"ctrl-]": "agent::CycleNextInlineAssist"
|
||||
"ctrl-]": "agent::CycleNextInlineAssist",
|
||||
"ctrl-alt-e": "agent::RemoveAllContext"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -851,7 +852,6 @@
|
||||
"context": "ProjectPanel",
|
||||
"bindings": {
|
||||
"left": "project_panel::CollapseSelectedEntry",
|
||||
"ctrl-left": "project_panel::CollapseAllEntries",
|
||||
"right": "project_panel::ExpandSelectedEntry",
|
||||
"new": "project_panel::NewFile",
|
||||
"ctrl-n": "project_panel::NewFile",
|
||||
@@ -1237,25 +1237,11 @@
|
||||
"context": "Onboarding",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-=": ["zed::IncreaseUiFontSize", { "persist": false }],
|
||||
"ctrl-+": ["zed::IncreaseUiFontSize", { "persist": false }],
|
||||
"ctrl--": ["zed::DecreaseUiFontSize", { "persist": false }],
|
||||
"ctrl-0": ["zed::ResetUiFontSize", { "persist": false }],
|
||||
"ctrl-enter": "onboarding::Finish",
|
||||
"alt-shift-l": "onboarding::SignIn",
|
||||
"alt-shift-a": "onboarding::OpenAccount"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Welcome",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-=": ["zed::IncreaseUiFontSize", { "persist": false }],
|
||||
"ctrl-+": ["zed::IncreaseUiFontSize", { "persist": false }],
|
||||
"ctrl--": ["zed::DecreaseUiFontSize", { "persist": false }],
|
||||
"ctrl-0": ["zed::ResetUiFontSize", { "persist": false }]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "InvalidBuffer",
|
||||
"use_key_equivalents": true,
|
||||
@@ -1279,7 +1265,6 @@
|
||||
"escape": "workspace::CloseWindow",
|
||||
"ctrl-m": "settings_editor::Minimize",
|
||||
"ctrl-f": "search::FocusSearch",
|
||||
"ctrl-,": "settings_editor::OpenCurrentFile",
|
||||
"left": "settings_editor::ToggleFocusNav",
|
||||
"ctrl-shift-e": "settings_editor::ToggleFocusNav",
|
||||
// todo(settings_ui): cut this down based on the max files and overflow UI
|
||||
@@ -1335,12 +1320,5 @@
|
||||
"alt-left": "dev::Zeta2ContextGoBack",
|
||||
"alt-right": "dev::Zeta2ContextGoForward"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitBranchSelector || (GitBranchSelector > Picker > Editor)",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-shift-backspace": "branch_picker::DeleteBranch"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -50,7 +50,7 @@
|
||||
"ctrl-cmd-z": "edit_prediction::RateCompletions",
|
||||
"ctrl-cmd-i": "edit_prediction::ToggleMenu",
|
||||
"ctrl-cmd-l": "lsp_tool::ToggleMenu",
|
||||
"ctrl-cmd-c": "editor::DisplayCursorNames"
|
||||
"cmd-alt-.": "project_panel::ToggleHideHidden"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -279,11 +279,13 @@
|
||||
"cmd-alt-p": "agent::ManageProfiles",
|
||||
"cmd-i": "agent::ToggleProfileSelector",
|
||||
"cmd-alt-/": "agent::ToggleModelSelector",
|
||||
"cmd-shift-a": "agent::ToggleContextPicker",
|
||||
"cmd-shift-j": "agent::ToggleNavigationMenu",
|
||||
"cmd-alt-m": "agent::ToggleOptionsMenu",
|
||||
"cmd-alt-shift-n": "agent::ToggleNewThreadMenu",
|
||||
"shift-alt-escape": "agent::ExpandMessageEditor",
|
||||
"cmd->": "agent::AddSelectionToThread",
|
||||
"cmd-alt-e": "agent::RemoveAllContext",
|
||||
"cmd-shift-e": "project_panel::ToggleFocus",
|
||||
"cmd-ctrl-b": "agent::ToggleBurnMode",
|
||||
"cmd-shift-enter": "agent::ContinueThread",
|
||||
@@ -311,7 +313,7 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-n": "agent::NewTextThread",
|
||||
"cmd-alt-n": "agent::NewExternalAgentThread"
|
||||
"cmd-alt-t": "agent::NewThread"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -364,6 +366,18 @@
|
||||
"alt-enter": "editor::Newline"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ContextStrip",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"up": "agent::FocusUp",
|
||||
"right": "agent::FocusRight",
|
||||
"left": "agent::FocusLeft",
|
||||
"down": "agent::FocusDown",
|
||||
"backspace": "agent::RemoveFocusedContext",
|
||||
"enter": "agent::AcceptSuggestedContext"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "AgentConfiguration",
|
||||
"bindings": {
|
||||
@@ -590,7 +604,8 @@
|
||||
"cmd-.": "editor::ToggleCodeActions",
|
||||
"cmd-k r": "editor::RevealInFileManager",
|
||||
"cmd-k p": "editor::CopyPath",
|
||||
"cmd-\\": "pane::SplitRight"
|
||||
"cmd-\\": "pane::SplitRight",
|
||||
"ctrl-cmd-c": "editor::DisplayCursorNames"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -730,8 +745,7 @@
|
||||
"context": "Workspace && debugger_running",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"f5": "zed::NoAction",
|
||||
"f11": "debugger::StepInto"
|
||||
"f5": "zed::NoAction"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -791,20 +805,6 @@
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_next_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"tab": "editor::NextSnippetTabstop"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_previous_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"shift-tab": "editor::PreviousSnippetTabstop"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && edit_prediction",
|
||||
"bindings": {
|
||||
@@ -876,7 +876,9 @@
|
||||
"context": "PromptEditor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-shift-a": "agent::ToggleContextPicker",
|
||||
"cmd-alt-/": "agent::ToggleModelSelector",
|
||||
"cmd-alt-e": "agent::RemoveAllContext",
|
||||
"ctrl-[": "agent::CyclePreviousInlineAssist",
|
||||
"ctrl-]": "agent::CycleNextInlineAssist"
|
||||
}
|
||||
@@ -920,7 +922,6 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"left": "project_panel::CollapseSelectedEntry",
|
||||
"cmd-left": "project_panel::CollapseAllEntries",
|
||||
"right": "project_panel::ExpandSelectedEntry",
|
||||
"cmd-n": "project_panel::NewFile",
|
||||
"cmd-d": "project_panel::Duplicate",
|
||||
@@ -1219,23 +1220,23 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "RatePredictionsModal",
|
||||
"context": "RateCompletionModal",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-shift-enter": "zeta::ThumbsUpActivePrediction",
|
||||
"cmd-shift-backspace": "zeta::ThumbsDownActivePrediction",
|
||||
"cmd-shift-enter": "zeta::ThumbsUpActiveCompletion",
|
||||
"cmd-shift-backspace": "zeta::ThumbsDownActiveCompletion",
|
||||
"shift-down": "zeta::NextEdit",
|
||||
"shift-up": "zeta::PreviousEdit",
|
||||
"right": "zeta::PreviewPrediction"
|
||||
"right": "zeta::PreviewCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "RatePredictionsModal > Editor",
|
||||
"context": "RateCompletionModal > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"escape": "zeta::FocusPredictions",
|
||||
"cmd-shift-enter": "zeta::ThumbsUpActivePrediction",
|
||||
"cmd-shift-backspace": "zeta::ThumbsDownActivePrediction"
|
||||
"escape": "zeta::FocusCompletions",
|
||||
"cmd-shift-enter": "zeta::ThumbsUpActiveCompletion",
|
||||
"cmd-shift-backspace": "zeta::ThumbsDownActiveCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -1341,25 +1342,11 @@
|
||||
"context": "Onboarding",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-=": ["zed::IncreaseUiFontSize", { "persist": false }],
|
||||
"cmd-+": ["zed::IncreaseUiFontSize", { "persist": false }],
|
||||
"cmd--": ["zed::DecreaseUiFontSize", { "persist": false }],
|
||||
"cmd-0": ["zed::ResetUiFontSize", { "persist": false }],
|
||||
"cmd-enter": "onboarding::Finish",
|
||||
"alt-tab": "onboarding::SignIn",
|
||||
"alt-shift-a": "onboarding::OpenAccount"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Welcome",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-=": ["zed::IncreaseUiFontSize", { "persist": false }],
|
||||
"cmd-+": ["zed::IncreaseUiFontSize", { "persist": false }],
|
||||
"cmd--": ["zed::DecreaseUiFontSize", { "persist": false }],
|
||||
"cmd-0": ["zed::ResetUiFontSize", { "persist": false }]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "InvalidBuffer",
|
||||
"use_key_equivalents": true,
|
||||
@@ -1383,7 +1370,6 @@
|
||||
"escape": "workspace::CloseWindow",
|
||||
"cmd-m": "settings_editor::Minimize",
|
||||
"cmd-f": "search::FocusSearch",
|
||||
"cmd-,": "settings_editor::OpenCurrentFile",
|
||||
"left": "settings_editor::ToggleFocusNav",
|
||||
"cmd-shift-e": "settings_editor::ToggleFocusNav",
|
||||
// todo(settings_ui): cut this down based on the max files and overflow UI
|
||||
@@ -1440,12 +1426,5 @@
|
||||
"alt-left": "dev::Zeta2ContextGoBack",
|
||||
"alt-right": "dev::Zeta2ContextGoForward"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitBranchSelector || (GitBranchSelector > Picker > Editor)",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-shift-backspace": "branch_picker::DeleteBranch"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -36,12 +36,13 @@
|
||||
"shift-f5": "debugger::Stop",
|
||||
"ctrl-shift-f5": "debugger::RerunSession",
|
||||
"f6": "debugger::Pause",
|
||||
"f10": "debugger::StepOver",
|
||||
"f7": "debugger::StepOver",
|
||||
"ctrl-f11": "debugger::StepInto",
|
||||
"shift-f11": "debugger::StepOut",
|
||||
"f11": "zed::ToggleFullScreen",
|
||||
"ctrl-shift-i": "edit_prediction::ToggleMenu",
|
||||
"shift-alt-l": "lsp_tool::ToggleMenu",
|
||||
"ctrl-shift-alt-c": "editor::DisplayCursorNames"
|
||||
"ctrl-alt-.": "project_panel::ToggleHideHidden"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -117,7 +118,7 @@
|
||||
"alt-g m": "git::OpenModifiedFiles",
|
||||
"menu": "editor::OpenContextMenu",
|
||||
"shift-f10": "editor::OpenContextMenu",
|
||||
"ctrl-alt-e": "editor::ToggleEditPrediction",
|
||||
"ctrl-shift-e": "editor::ToggleEditPrediction",
|
||||
"f9": "editor::ToggleBreakpoint",
|
||||
"shift-f9": "editor::EditLogBreakpoint"
|
||||
}
|
||||
@@ -215,7 +216,7 @@
|
||||
"context": "ContextEditor > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-i": "assistant::Assist",
|
||||
"ctrl-enter": "assistant::Assist",
|
||||
"ctrl-s": "workspace::Save",
|
||||
"ctrl-shift-,": "assistant::InsertIntoEditor",
|
||||
"shift-enter": "assistant::Split",
|
||||
@@ -240,18 +241,20 @@
|
||||
"shift-alt-p": "agent::ManageProfiles",
|
||||
"ctrl-i": "agent::ToggleProfileSelector",
|
||||
"shift-alt-/": "agent::ToggleModelSelector",
|
||||
"shift-alt-j": "agent::ToggleNavigationMenu",
|
||||
"shift-alt-i": "agent::ToggleOptionsMenu",
|
||||
"ctrl-shift-alt-n": "agent::ToggleNewThreadMenu",
|
||||
"ctrl-shift-a": "agent::ToggleContextPicker",
|
||||
"ctrl-shift-j": "agent::ToggleNavigationMenu",
|
||||
"ctrl-alt-i": "agent::ToggleOptionsMenu",
|
||||
// "ctrl-shift-alt-n": "agent::ToggleNewThreadMenu",
|
||||
"shift-alt-escape": "agent::ExpandMessageEditor",
|
||||
"ctrl-shift-.": "agent::AddSelectionToThread",
|
||||
"shift-alt-e": "agent::RemoveAllContext",
|
||||
"ctrl-shift-e": "project_panel::ToggleFocus",
|
||||
"ctrl-shift-enter": "agent::ContinueThread",
|
||||
"super-ctrl-b": "agent::ToggleBurnMode",
|
||||
"alt-enter": "agent::ContinueWithBurnMode",
|
||||
"shift-alt-a": "agent::AllowOnce",
|
||||
"ctrl-y": "agent::AllowOnce",
|
||||
"ctrl-alt-y": "agent::AllowAlways",
|
||||
"shift-alt-z": "agent::RejectOnce"
|
||||
"ctrl-alt-z": "agent::RejectOnce"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -326,6 +329,18 @@
|
||||
"alt-enter": "editor::Newline"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "ContextStrip",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"up": "agent::FocusUp",
|
||||
"right": "agent::FocusRight",
|
||||
"left": "agent::FocusLeft",
|
||||
"down": "agent::FocusDown",
|
||||
"backspace": "agent::RemoveFocusedContext",
|
||||
"enter": "agent::AcceptSuggestedContext"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "AcpThread > ModeSelector",
|
||||
"bindings": {
|
||||
@@ -500,7 +515,10 @@
|
||||
"ctrl-shift-l": "editor::SelectAllMatches", // Select all occurrences of current selection
|
||||
"ctrl-f2": "editor::SelectAllMatches", // Select all occurrences of current word
|
||||
"ctrl-d": ["editor::SelectNext", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch / find_under_expand
|
||||
"ctrl-shift-down": ["editor::SelectNext", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch
|
||||
"ctrl-shift-up": ["editor::SelectPrevious", { "replace_newest": false }], // editor.action.addSelectionToPreviousFindMatch
|
||||
"ctrl-k ctrl-d": ["editor::SelectNext", { "replace_newest": true }], // editor.action.moveSelectionToNextFindMatch / find_under_expand_skip
|
||||
"ctrl-k ctrl-shift-d": ["editor::SelectPrevious", { "replace_newest": true }], // editor.action.moveSelectionToPreviousFindMatch
|
||||
"ctrl-k ctrl-i": "editor::Hover",
|
||||
"ctrl-k ctrl-b": "editor::BlameHover",
|
||||
"ctrl-/": ["editor::ToggleComments", { "advance_downwards": false }],
|
||||
@@ -509,8 +527,12 @@
|
||||
"f2": "editor::Rename",
|
||||
"f12": "editor::GoToDefinition",
|
||||
"alt-f12": "editor::GoToDefinitionSplit",
|
||||
"ctrl-shift-f10": "editor::GoToDefinitionSplit",
|
||||
"ctrl-f12": "editor::GoToImplementation",
|
||||
"shift-f12": "editor::GoToTypeDefinition",
|
||||
"ctrl-alt-f12": "editor::GoToTypeDefinitionSplit",
|
||||
"shift-alt-f12": "editor::FindAllReferences",
|
||||
"ctrl-m": "editor::MoveToEnclosingBracket", // from jetbrains
|
||||
"ctrl-shift-\\": "editor::MoveToEnclosingBracket",
|
||||
"ctrl-shift-[": "editor::Fold",
|
||||
"ctrl-shift-]": "editor::UnfoldLines",
|
||||
@@ -534,6 +556,7 @@
|
||||
"ctrl-k r": "editor::RevealInFileManager",
|
||||
"ctrl-k p": "editor::CopyPath",
|
||||
"ctrl-\\": "pane::SplitRight",
|
||||
"ctrl-shift-alt-c": "editor::DisplayCursorNames",
|
||||
"alt-.": "editor::GoToHunk",
|
||||
"alt-,": "editor::GoToPreviousHunk"
|
||||
}
|
||||
@@ -716,20 +739,6 @@
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_next_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"tab": "editor::NextSnippetTabstop"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_previous_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"shift-tab": "editor::PreviousSnippetTabstop"
|
||||
}
|
||||
},
|
||||
// Bindings for accepting edit predictions
|
||||
//
|
||||
// alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This is
|
||||
@@ -815,7 +824,8 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-[": "agent::CyclePreviousInlineAssist",
|
||||
"ctrl-]": "agent::CycleNextInlineAssist"
|
||||
"ctrl-]": "agent::CycleNextInlineAssist",
|
||||
"shift-alt-e": "agent::RemoveAllContext"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -856,7 +866,6 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"left": "project_panel::CollapseSelectedEntry",
|
||||
"ctrl-left": "project_panel::CollapseAllEntries",
|
||||
"right": "project_panel::ExpandSelectedEntry",
|
||||
"ctrl-n": "project_panel::NewFile",
|
||||
"alt-n": "project_panel::NewDirectory",
|
||||
@@ -1116,7 +1125,7 @@
|
||||
"shift-insert": "terminal::Paste",
|
||||
"ctrl-v": "terminal::Paste",
|
||||
"ctrl-shift-v": "terminal::Paste",
|
||||
"ctrl-i": "assistant::InlineAssist",
|
||||
"ctrl-enter": "assistant::InlineAssist",
|
||||
"alt-b": ["terminal::SendText", "\u001bb"],
|
||||
"alt-f": ["terminal::SendText", "\u001bf"],
|
||||
"alt-.": ["terminal::SendText", "\u001b."],
|
||||
@@ -1262,25 +1271,11 @@
|
||||
"context": "Onboarding",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-=": ["zed::IncreaseUiFontSize", { "persist": false }],
|
||||
"ctrl-+": ["zed::IncreaseUiFontSize", { "persist": false }],
|
||||
"ctrl--": ["zed::DecreaseUiFontSize", { "persist": false }],
|
||||
"ctrl-0": ["zed::ResetUiFontSize", { "persist": false }],
|
||||
"ctrl-enter": "onboarding::Finish",
|
||||
"alt-shift-l": "onboarding::SignIn",
|
||||
"shift-alt-a": "onboarding::OpenAccount"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Welcome",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-=": ["zed::IncreaseUiFontSize", { "persist": false }],
|
||||
"ctrl-+": ["zed::IncreaseUiFontSize", { "persist": false }],
|
||||
"ctrl--": ["zed::DecreaseUiFontSize", { "persist": false }],
|
||||
"ctrl-0": ["zed::ResetUiFontSize", { "persist": false }]
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitWorktreeSelector || (GitWorktreeSelector > Picker > Editor)",
|
||||
"use_key_equivalents": true,
|
||||
@@ -1297,7 +1292,6 @@
|
||||
"escape": "workspace::CloseWindow",
|
||||
"ctrl-m": "settings_editor::Minimize",
|
||||
"ctrl-f": "search::FocusSearch",
|
||||
"ctrl-,": "settings_editor::OpenCurrentFile",
|
||||
"left": "settings_editor::ToggleFocusNav",
|
||||
"ctrl-shift-e": "settings_editor::ToggleFocusNav",
|
||||
// todo(settings_ui): cut this down based on the max files and overflow UI
|
||||
@@ -1354,12 +1348,5 @@
|
||||
"alt-left": "dev::Zeta2ContextGoBack",
|
||||
"alt-right": "dev::Zeta2ContextGoForward"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitBranchSelector || (GitBranchSelector > Picker > Editor)",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-shift-backspace": "branch_picker::DeleteBranch"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,18 +1,16 @@
|
||||
[
|
||||
{
|
||||
"bindings": {
|
||||
"ctrl-alt-s": "zed::OpenSettings",
|
||||
"ctrl-alt-s": "zed::OpenSettingsFile",
|
||||
"ctrl-{": "pane::ActivatePreviousItem",
|
||||
"ctrl-}": "pane::ActivateNextItem",
|
||||
"shift-escape": null, // Unmap workspace::zoom
|
||||
"ctrl-~": "git::Branch",
|
||||
"ctrl-f2": "debugger::Stop",
|
||||
"f6": "debugger::Pause",
|
||||
"f7": "debugger::StepInto",
|
||||
"f8": "debugger::StepOver",
|
||||
"shift-f8": "debugger::StepOut",
|
||||
"f9": "debugger::Continue",
|
||||
"shift-f9": "debugger::Start",
|
||||
"alt-shift-f9": "debugger::Start"
|
||||
}
|
||||
},
|
||||
@@ -48,7 +46,7 @@
|
||||
"alt-f7": "editor::FindAllReferences",
|
||||
"ctrl-alt-f7": "editor::FindAllReferences",
|
||||
"ctrl-b": "editor::GoToDefinition", // Conflicts with workspace::ToggleLeftDock
|
||||
"ctrl-alt-b": "editor::GoToImplementation", // Conflicts with workspace::ToggleRightDock
|
||||
"ctrl-alt-b": "editor::GoToDefinitionSplit", // Conflicts with workspace::ToggleRightDock
|
||||
"ctrl-shift-b": "editor::GoToTypeDefinition",
|
||||
"ctrl-alt-shift-b": "editor::GoToTypeDefinitionSplit",
|
||||
"f2": "editor::GoToDiagnostic",
|
||||
@@ -72,11 +70,7 @@
|
||||
"ctrl-r": ["buffer_search::Deploy", { "replace_enabled": true }],
|
||||
"ctrl-shift-n": "file_finder::Toggle",
|
||||
"ctrl-g": "go_to_line::Toggle",
|
||||
"alt-enter": "editor::ToggleCodeActions",
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
"ctrl-q": "editor::Hover",
|
||||
"ctrl-p": "editor::ShowSignatureHelp",
|
||||
"ctrl-\\": "assistant::InlineAssist"
|
||||
"alt-enter": "editor::ToggleCodeActions"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -100,13 +94,9 @@
|
||||
"ctrl-shift-f12": "workspace::ToggleAllDocks",
|
||||
"ctrl-shift-r": ["pane::DeploySearch", { "replace_enabled": true }],
|
||||
"alt-shift-f10": "task::Spawn",
|
||||
"shift-f10": "task::Spawn",
|
||||
"ctrl-f5": "task::Rerun",
|
||||
"ctrl-e": "file_finder::Toggle",
|
||||
"ctrl-k": "git_panel::ToggleFocus", // bug: This should also focus commit editor
|
||||
// "ctrl-k": "git_panel::ToggleFocus", // bug: This should also focus commit editor
|
||||
"ctrl-shift-n": "file_finder::Toggle",
|
||||
"ctrl-n": "project_symbols::Toggle",
|
||||
"ctrl-alt-n": "file_finder::Toggle",
|
||||
"ctrl-shift-a": "command_palette::Toggle",
|
||||
"shift shift": "command_palette::Toggle",
|
||||
"ctrl-alt-shift-n": "project_symbols::Toggle",
|
||||
@@ -143,9 +133,7 @@
|
||||
"context": "Pane",
|
||||
"bindings": {
|
||||
"ctrl-alt-left": "pane::GoBack",
|
||||
"ctrl-alt-right": "pane::GoForward",
|
||||
"alt-left": "pane::ActivatePreviousItem",
|
||||
"alt-right": "pane::ActivateNextItem"
|
||||
"ctrl-alt-right": "pane::GoForward"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -164,6 +152,8 @@
|
||||
"bindings": {
|
||||
"ctrl-shift-t": "workspace::NewTerminal",
|
||||
"alt-f12": "workspace::CloseActiveDock",
|
||||
"alt-left": "pane::ActivatePreviousItem",
|
||||
"alt-right": "pane::ActivateNextItem",
|
||||
"ctrl-up": "terminal::ScrollLineUp",
|
||||
"ctrl-down": "terminal::ScrollLineDown",
|
||||
"shift-pageup": "terminal::ScrollPageUp",
|
||||
|
||||
@@ -5,14 +5,12 @@
|
||||
"cmd-}": "pane::ActivateNextItem",
|
||||
"cmd-0": "git_panel::ToggleFocus", // overrides `cmd-0` zoom reset
|
||||
"shift-escape": null, // Unmap workspace::zoom
|
||||
"cmd-~": "git::Branch",
|
||||
"ctrl-f2": "debugger::Stop",
|
||||
"f6": "debugger::Pause",
|
||||
"f7": "debugger::StepInto",
|
||||
"f8": "debugger::StepOver",
|
||||
"shift-f8": "debugger::StepOut",
|
||||
"f9": "debugger::Continue",
|
||||
"shift-f9": "debugger::Start",
|
||||
"alt-shift-f9": "debugger::Start"
|
||||
}
|
||||
},
|
||||
@@ -47,7 +45,7 @@
|
||||
"alt-f7": "editor::FindAllReferences",
|
||||
"cmd-alt-f7": "editor::FindAllReferences",
|
||||
"cmd-b": "editor::GoToDefinition", // Conflicts with workspace::ToggleLeftDock
|
||||
"cmd-alt-b": "editor::GoToImplementation",
|
||||
"cmd-alt-b": "editor::GoToDefinitionSplit",
|
||||
"cmd-shift-b": "editor::GoToTypeDefinition",
|
||||
"cmd-alt-shift-b": "editor::GoToTypeDefinitionSplit",
|
||||
"f2": "editor::GoToDiagnostic",
|
||||
@@ -70,11 +68,7 @@
|
||||
"cmd-r": ["buffer_search::Deploy", { "replace_enabled": true }],
|
||||
"cmd-shift-o": "file_finder::Toggle",
|
||||
"cmd-l": "go_to_line::Toggle",
|
||||
"alt-enter": "editor::ToggleCodeActions",
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
"cmd-j": "editor::Hover",
|
||||
"cmd-p": "editor::ShowSignatureHelp",
|
||||
"cmd-\\": "assistant::InlineAssist"
|
||||
"alt-enter": "editor::ToggleCodeActions"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -102,13 +96,9 @@
|
||||
"cmd-shift-f12": "workspace::ToggleAllDocks",
|
||||
"cmd-shift-r": ["pane::DeploySearch", { "replace_enabled": true }],
|
||||
"ctrl-alt-r": "task::Spawn",
|
||||
"shift-f10": "task::Spawn",
|
||||
"cmd-f5": "task::Rerun",
|
||||
"cmd-e": "file_finder::Toggle",
|
||||
"cmd-k": "git_panel::ToggleFocus", // bug: This should also focus commit editor
|
||||
// "cmd-k": "git_panel::ToggleFocus", // bug: This should also focus commit editor
|
||||
"cmd-shift-o": "file_finder::Toggle",
|
||||
"cmd-shift-n": "file_finder::Toggle",
|
||||
"cmd-n": "project_symbols::Toggle",
|
||||
"cmd-shift-a": "command_palette::Toggle",
|
||||
"shift shift": "command_palette::Toggle",
|
||||
"cmd-alt-o": "project_symbols::Toggle", // JetBrains: Go to Symbol
|
||||
@@ -145,9 +135,7 @@
|
||||
"context": "Pane",
|
||||
"bindings": {
|
||||
"cmd-alt-left": "pane::GoBack",
|
||||
"cmd-alt-right": "pane::GoForward",
|
||||
"alt-left": "pane::ActivatePreviousItem",
|
||||
"alt-right": "pane::ActivateNextItem"
|
||||
"cmd-alt-right": "pane::GoForward"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -414,9 +414,8 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "VimControl && vim_mode == helix_normal && !menu",
|
||||
"context": "vim_mode == helix_normal && !menu",
|
||||
"bindings": {
|
||||
"escape": "vim::SwitchToHelixNormalMode",
|
||||
"i": "vim::HelixInsert",
|
||||
"a": "vim::HelixAppend",
|
||||
"ctrl-[": "editor::Cancel"
|
||||
@@ -456,7 +455,6 @@
|
||||
"<": "vim::Outdent",
|
||||
"=": "vim::AutoIndent",
|
||||
"d": "vim::HelixDelete",
|
||||
"alt-d": "editor::Delete", // Delete selection, without yanking
|
||||
"c": "vim::HelixSubstitute",
|
||||
"alt-c": "vim::HelixSubstituteNoYank",
|
||||
|
||||
@@ -477,9 +475,6 @@
|
||||
"alt-p": "editor::SelectPreviousSyntaxNode",
|
||||
"alt-n": "editor::SelectNextSyntaxNode",
|
||||
|
||||
"n": "vim::HelixSelectNext",
|
||||
"shift-n": "vim::HelixSelectPrevious",
|
||||
|
||||
// Goto mode
|
||||
"g e": "vim::EndOfDocument",
|
||||
"g h": "vim::StartOfLine",
|
||||
@@ -857,8 +852,6 @@
|
||||
"ctrl-w shift-right": "workspace::SwapPaneRight",
|
||||
"ctrl-w shift-up": "workspace::SwapPaneUp",
|
||||
"ctrl-w shift-down": "workspace::SwapPaneDown",
|
||||
"ctrl-w x": "workspace::SwapPaneAdjacent",
|
||||
"ctrl-w ctrl-x": "workspace::SwapPaneAdjacent",
|
||||
"ctrl-w shift-h": "workspace::MovePaneLeft",
|
||||
"ctrl-w shift-l": "workspace::MovePaneRight",
|
||||
"ctrl-w shift-k": "workspace::MovePaneUp",
|
||||
|
||||
@@ -175,16 +175,6 @@
|
||||
//
|
||||
// Default: true
|
||||
"zoomed_padding": true,
|
||||
// What draws Zed's window decorations (titlebar):
|
||||
// 1. Client application (Zed) draws its own window decorations
|
||||
// "client"
|
||||
// 2. Display server draws the window decorations. Not supported by GNOME Wayland.
|
||||
// "server"
|
||||
//
|
||||
// This requires restarting Zed for changes to take effect.
|
||||
//
|
||||
// Default: "client"
|
||||
"window_decorations": "client",
|
||||
// Whether to use the system provided dialogs for Open and Save As.
|
||||
// When set to false, Zed will use the built-in keyboard-first pickers.
|
||||
"use_system_path_prompts": true,
|
||||
@@ -265,12 +255,6 @@
|
||||
// Whether to display inline and alongside documentation for items in the
|
||||
// completions menu
|
||||
"show_completion_documentation": true,
|
||||
// Whether to colorize brackets in the editor.
|
||||
// (also known as "rainbow brackets")
|
||||
//
|
||||
// The colors that are used for different indentation levels are defined in the theme (theme key: `accents`).
|
||||
// They can be customized by using theme overrides.
|
||||
"colorize_brackets": false,
|
||||
// When to show the scrollbar in the completion menu.
|
||||
// This setting can take four values:
|
||||
//
|
||||
@@ -621,10 +605,6 @@
|
||||
// to both the horizontal and vertical delta values while scrolling. Fast scrolling
|
||||
// happens when a user holds the alt or option key while scrolling.
|
||||
"fast_scroll_sensitivity": 4.0,
|
||||
"sticky_scroll": {
|
||||
// Whether to stick scopes to the top of the editor.
|
||||
"enabled": false
|
||||
},
|
||||
"relative_line_numbers": "disabled",
|
||||
// If 'search_wrap' is disabled, search result do not wrap around the end of the file.
|
||||
"search_wrap": true,
|
||||
@@ -632,13 +612,9 @@
|
||||
"search": {
|
||||
// Whether to show the project search button in the status bar.
|
||||
"button": true,
|
||||
// Whether to only match on whole words.
|
||||
"whole_word": false,
|
||||
// Whether to match case sensitively.
|
||||
"case_sensitive": false,
|
||||
// Whether to include gitignored files in search results.
|
||||
"include_ignored": false,
|
||||
// Whether to interpret the search query as a regular expression.
|
||||
"regex": false,
|
||||
// Whether to center the cursor on each search match when navigating.
|
||||
"center_on_match": false
|
||||
@@ -758,31 +734,14 @@
|
||||
// "never"
|
||||
"show": "always"
|
||||
},
|
||||
// Sort order for entries in the project panel.
|
||||
// This setting can take three values:
|
||||
//
|
||||
// 1. Show directories first, then files:
|
||||
// "directories_first"
|
||||
// 2. Mix directories and files together:
|
||||
// "mixed"
|
||||
// 3. Show files first, then directories:
|
||||
// "files_first"
|
||||
"sort_mode": "directories_first",
|
||||
// Whether to enable drag-and-drop operations in the project panel.
|
||||
"drag_and_drop": true,
|
||||
// Whether to hide the root entry when only one folder is open in the window.
|
||||
"hide_root": false,
|
||||
// Whether to hide the hidden entries in the project panel.
|
||||
"hide_hidden": false,
|
||||
// Settings for automatically opening files.
|
||||
"auto_open": {
|
||||
// Whether to automatically open newly created files in the editor.
|
||||
"on_create": true,
|
||||
// Whether to automatically open files after pasting or duplicating them.
|
||||
"on_paste": true,
|
||||
// Whether to automatically open files dropped from external sources.
|
||||
"on_drop": true
|
||||
}
|
||||
// Whether to automatically open files when pasting them in the project panel.
|
||||
"open_file_on_paste": true
|
||||
},
|
||||
"outline_panel": {
|
||||
// Whether to show the outline panel button in the status bar
|
||||
@@ -1100,22 +1059,13 @@
|
||||
"preview_tabs": {
|
||||
// Whether preview tabs should be enabled.
|
||||
// Preview tabs allow you to open files in preview mode, where they close automatically
|
||||
// when you open another preview tab.
|
||||
// when you switch to another file unless you explicitly pin them.
|
||||
// This is useful for quickly viewing files without cluttering your workspace.
|
||||
"enabled": true,
|
||||
// Whether to open tabs in preview mode when opened from the project panel with a single click.
|
||||
"enable_preview_from_project_panel": true,
|
||||
// Whether to open tabs in preview mode when selected from the file finder.
|
||||
"enable_preview_from_file_finder": false,
|
||||
// Whether to open tabs in preview mode when opened from a multibuffer.
|
||||
"enable_preview_from_multibuffer": true,
|
||||
// Whether to open tabs in preview mode when code navigation is used to open a multibuffer.
|
||||
"enable_preview_multibuffer_from_code_navigation": false,
|
||||
// Whether to open tabs in preview mode when code navigation is used to open a single file.
|
||||
"enable_preview_file_from_code_navigation": true,
|
||||
// Whether to keep tabs in preview mode when code navigation is used to navigate away from them.
|
||||
// If `enable_preview_file_from_code_navigation` or `enable_preview_multibuffer_from_code_navigation` is also true, the new tab may replace the existing one.
|
||||
"enable_keep_preview_on_code_navigation": false
|
||||
// Whether a preview tab gets replaced when code navigation is used to navigate away from the tab.
|
||||
"enable_preview_from_code_navigation": false
|
||||
},
|
||||
// Settings related to the file finder.
|
||||
"file_finder": {
|
||||
@@ -1218,13 +1168,6 @@
|
||||
"tab_size": 4,
|
||||
// What debuggers are preferred by default for all languages.
|
||||
"debuggers": [],
|
||||
// Whether to enable word diff highlighting in the editor.
|
||||
//
|
||||
// When enabled, changed words within modified lines are highlighted
|
||||
// to show exactly what changed.
|
||||
//
|
||||
// Default: true
|
||||
"word_diff_enabled": true,
|
||||
// Control what info is collected by Zed.
|
||||
"telemetry": {
|
||||
// Send debug info like crash reports.
|
||||
@@ -1348,10 +1291,7 @@
|
||||
// "hunk_style": "staged_hollow"
|
||||
// 2. Show unstaged hunks hollow and staged hunks filled:
|
||||
// "hunk_style": "unstaged_hollow"
|
||||
"hunk_style": "staged_hollow",
|
||||
// Should the name or path be displayed first in the git view.
|
||||
// "path_style": "file_name_first" or "file_path_first"
|
||||
"path_style": "file_name_first"
|
||||
"hunk_style": "staged_hollow"
|
||||
},
|
||||
// The list of custom Git hosting providers.
|
||||
"git_hosting_providers": [
|
||||
@@ -1366,8 +1306,6 @@
|
||||
// "load_direnv": "direct"
|
||||
// 2. Load direnv configuration through the shell hook, works for POSIX shells and fish.
|
||||
// "load_direnv": "shell_hook"
|
||||
// 3. Don't load direnv configuration at all.
|
||||
// "load_direnv": "disabled"
|
||||
"load_direnv": "direct",
|
||||
"edit_predictions": {
|
||||
// A list of globs representing files that edit predictions should be disabled for.
|
||||
@@ -1459,7 +1397,7 @@
|
||||
"default_height": 320,
|
||||
// What working directory to use when launching the terminal.
|
||||
// May take 4 values:
|
||||
// 1. Use the current file's project directory. Fallback to the
|
||||
// 1. Use the current file's project directory. Will Fallback to the
|
||||
// first project directory strategy if unsuccessful
|
||||
// "working_directory": "current_project_directory"
|
||||
// 2. Use the first project in this workspace's directory
|
||||
@@ -1549,11 +1487,7 @@
|
||||
// in your project's settings, rather than globally.
|
||||
"directories": [".env", "env", ".venv", "venv"],
|
||||
// Can also be `csh`, `fish`, `nushell` and `power_shell`
|
||||
"activate_script": "default",
|
||||
// Preferred Conda manager to use when activating Conda environments.
|
||||
// Values: "auto", "conda", "mamba", "micromamba"
|
||||
// Default: "auto"
|
||||
"conda_manager": "auto"
|
||||
"activate_script": "default"
|
||||
}
|
||||
},
|
||||
"toolbar": {
|
||||
@@ -1597,8 +1531,6 @@
|
||||
// Default: 10_000, maximum: 100_000 (all bigger values set will be treated as 100_000), 0 disables the scrolling.
|
||||
// Existing terminals will not pick up this change until they are recreated.
|
||||
"max_scroll_history_lines": 10000,
|
||||
// The multiplier for scrolling speed in the terminal.
|
||||
"scroll_multiplier": 1.0,
|
||||
// The minimum APCA perceptual contrast between foreground and background colors.
|
||||
// APCA (Accessible Perceptual Contrast Algorithm) is more accurate than WCAG 2.x,
|
||||
// especially for dark mode. Values range from 0 to 106.
|
||||
@@ -1613,59 +1545,7 @@
|
||||
//
|
||||
// Most terminal themes have APCA values of 40-70.
|
||||
// A value of 45 preserves colorful themes while ensuring legibility.
|
||||
"minimum_contrast": 45,
|
||||
// Regexes used to identify paths for hyperlink navigation. Supports optional named capture
|
||||
// groups `path`, `line`, `column`, and `link`. If none of these are present, the entire match
|
||||
// is the hyperlink target. If `path` is present, it is the hyperlink target, along with `line`
|
||||
// and `column` if present. `link` may be used to customize what text in terminal is part of the
|
||||
// hyperlink. If `link` is not present, the text of the entire match is used. If `line` and
|
||||
// `column` are not present, the default built-in line and column suffix processing is used
|
||||
// which parses `line:column` and `(line,column)` variants. The default value handles Python
|
||||
// diagnostics and common path, line, column syntaxes. This can be extended or replaced to
|
||||
// handle specific scenarios. For example, to enable support for hyperlinking paths which
|
||||
// contain spaces in rust output,
|
||||
//
|
||||
// [
|
||||
// "\\s+(-->|:::|at) (?<link>(?<path>.+?))(:$|$)",
|
||||
// "\\s+(Compiling|Checking|Documenting) [^(]+\\((?<link>(?<path>.+))\\)"
|
||||
// ],
|
||||
//
|
||||
// could be used. Processing stops at the first regex with a match, even if no link is
|
||||
// produced which is the case when the cursor is not over the hyperlinked text. For best
|
||||
// performance it is recommended to order regexes from most common to least common. For
|
||||
// readability and documentation, each regex may be an array of strings which are collected
|
||||
// into one multi-line regex string for use in terminal path hyperlink detection.
|
||||
"path_hyperlink_regexes": [
|
||||
// Python-style diagnostics
|
||||
"File \"(?<path>[^\"]+)\", line (?<line>[0-9]+)",
|
||||
// Common path syntax with optional line, column, description, trailing punctuation, or
|
||||
// surrounding symbols or quotes
|
||||
[
|
||||
"(?x)",
|
||||
"# optionally starts with 0-2 opening prefix symbols",
|
||||
"[({\\[<]{0,2}",
|
||||
"# which may be followed by an opening quote",
|
||||
"(?<quote>[\"'`])?",
|
||||
"# `path` is the shortest sequence of any non-space character",
|
||||
"(?<link>(?<path>[^ ]+?",
|
||||
" # which may end with a line and optionally a column,",
|
||||
" (?<line_column>:+[0-9]+(:[0-9]+)?|:?\\([0-9]+([,:][0-9]+)?\\))?",
|
||||
"))",
|
||||
"# which must be followed by a matching quote",
|
||||
"(?(<quote>)\\k<quote>)",
|
||||
"# and optionally a single closing symbol",
|
||||
"[)}\\]>]?",
|
||||
"# if line/column matched, may be followed by a description",
|
||||
"(?(<line_column>):[^ 0-9][^ ]*)?",
|
||||
"# which may be followed by trailing punctuation",
|
||||
"[.,:)}\\]>]*",
|
||||
"# and always includes trailing whitespace or end of line",
|
||||
"([ ]+|$)"
|
||||
]
|
||||
],
|
||||
// Timeout for hover and Cmd-click path hyperlink discovery in milliseconds. Specifying a
|
||||
// timeout of `0` will disable path hyperlinking in terminal.
|
||||
"path_hyperlink_timeout_ms": 1
|
||||
"minimum_contrast": 45
|
||||
},
|
||||
"code_actions_on_format": {},
|
||||
// Settings related to running tasks.
|
||||
@@ -1907,7 +1787,7 @@
|
||||
}
|
||||
},
|
||||
"PHP": {
|
||||
"language_servers": ["phpactor", "!intelephense", "!phptools", "..."],
|
||||
"language_servers": ["phpactor", "!intelephense", "..."],
|
||||
"prettier": {
|
||||
"allowed": true,
|
||||
"plugins": ["@prettier/plugin-php"],
|
||||
@@ -2148,18 +2028,6 @@
|
||||
"dev": {
|
||||
// "theme": "Andromeda"
|
||||
},
|
||||
// Settings overrides to use when using Linux.
|
||||
"linux": {},
|
||||
// Settings overrides to use when using macOS.
|
||||
"macos": {},
|
||||
// Settings overrides to use when using Windows.
|
||||
"windows": {
|
||||
"languages": {
|
||||
"PHP": {
|
||||
"language_servers": ["intelephense", "!phpactor", "!phptools", "..."]
|
||||
}
|
||||
}
|
||||
},
|
||||
// Whether to show full labels in line indicator or short ones
|
||||
//
|
||||
// Values:
|
||||
|
||||
@@ -45,7 +45,6 @@
|
||||
"tab.inactive_background": "#1f2127ff",
|
||||
"tab.active_background": "#0d1016ff",
|
||||
"search.match_background": "#5ac2fe66",
|
||||
"search.active_match_background": "#ea570166",
|
||||
"panel.background": "#1f2127ff",
|
||||
"panel.focused_border": "#5ac1feff",
|
||||
"pane.focused_border": null,
|
||||
@@ -437,7 +436,6 @@
|
||||
"tab.inactive_background": "#ececedff",
|
||||
"tab.active_background": "#fcfcfcff",
|
||||
"search.match_background": "#3b9ee566",
|
||||
"search.active_match_background": "#f88b3666",
|
||||
"panel.background": "#ececedff",
|
||||
"panel.focused_border": "#3b9ee5ff",
|
||||
"pane.focused_border": null,
|
||||
@@ -829,7 +827,6 @@
|
||||
"tab.inactive_background": "#353944ff",
|
||||
"tab.active_background": "#242835ff",
|
||||
"search.match_background": "#73cffe66",
|
||||
"search.active_match_background": "#fd722b66",
|
||||
"panel.background": "#353944ff",
|
||||
"panel.focused_border": null,
|
||||
"pane.focused_border": null,
|
||||
|
||||
@@ -46,7 +46,6 @@
|
||||
"tab.inactive_background": "#3a3735ff",
|
||||
"tab.active_background": "#282828ff",
|
||||
"search.match_background": "#83a59866",
|
||||
"search.active_match_background": "#c09f3f66",
|
||||
"panel.background": "#3a3735ff",
|
||||
"panel.focused_border": "#83a598ff",
|
||||
"pane.focused_border": null,
|
||||
@@ -453,7 +452,6 @@
|
||||
"tab.inactive_background": "#393634ff",
|
||||
"tab.active_background": "#1d2021ff",
|
||||
"search.match_background": "#83a59866",
|
||||
"search.active_match_background": "#c9653666",
|
||||
"panel.background": "#393634ff",
|
||||
"panel.focused_border": "#83a598ff",
|
||||
"pane.focused_border": null,
|
||||
@@ -860,7 +858,6 @@
|
||||
"tab.inactive_background": "#3b3735ff",
|
||||
"tab.active_background": "#32302fff",
|
||||
"search.match_background": "#83a59866",
|
||||
"search.active_match_background": "#aea85166",
|
||||
"panel.background": "#3b3735ff",
|
||||
"panel.focused_border": null,
|
||||
"pane.focused_border": null,
|
||||
@@ -1267,7 +1264,6 @@
|
||||
"tab.inactive_background": "#ecddb4ff",
|
||||
"tab.active_background": "#fbf1c7ff",
|
||||
"search.match_background": "#0b667866",
|
||||
"search.active_match_background": "#ba2d1166",
|
||||
"panel.background": "#ecddb4ff",
|
||||
"panel.focused_border": null,
|
||||
"pane.focused_border": null,
|
||||
@@ -1674,7 +1670,6 @@
|
||||
"tab.inactive_background": "#ecddb5ff",
|
||||
"tab.active_background": "#f9f5d7ff",
|
||||
"search.match_background": "#0b667866",
|
||||
"search.active_match_background": "#dc351466",
|
||||
"panel.background": "#ecddb5ff",
|
||||
"panel.focused_border": null,
|
||||
"pane.focused_border": null,
|
||||
@@ -2081,7 +2076,6 @@
|
||||
"tab.inactive_background": "#ecdcb3ff",
|
||||
"tab.active_background": "#f2e5bcff",
|
||||
"search.match_background": "#0b667866",
|
||||
"search.active_match_background": "#d7331466",
|
||||
"panel.background": "#ecdcb3ff",
|
||||
"panel.focused_border": null,
|
||||
"pane.focused_border": null,
|
||||
|
||||
@@ -45,7 +45,6 @@
|
||||
"tab.inactive_background": "#2f343eff",
|
||||
"tab.active_background": "#282c33ff",
|
||||
"search.match_background": "#74ade866",
|
||||
"search.active_match_background": "#e8af7466",
|
||||
"panel.background": "#2f343eff",
|
||||
"panel.focused_border": null,
|
||||
"pane.focused_border": null,
|
||||
@@ -99,8 +98,6 @@
|
||||
"link_text.hover": "#74ade8ff",
|
||||
"version_control.added": "#27a657ff",
|
||||
"version_control.modified": "#d3b020ff",
|
||||
"version_control.word_added": "#2EA04859",
|
||||
"version_control.word_deleted": "#78081BCC",
|
||||
"version_control.deleted": "#e06c76ff",
|
||||
"version_control.conflict_marker.ours": "#a1c1811a",
|
||||
"version_control.conflict_marker.theirs": "#74ade81a",
|
||||
@@ -449,7 +446,6 @@
|
||||
"tab.inactive_background": "#ebebecff",
|
||||
"tab.active_background": "#fafafaff",
|
||||
"search.match_background": "#5c79e266",
|
||||
"search.active_match_background": "#d0a92366",
|
||||
"panel.background": "#ebebecff",
|
||||
"panel.focused_border": null,
|
||||
"pane.focused_border": null,
|
||||
@@ -503,8 +499,6 @@
|
||||
"link_text.hover": "#5c78e2ff",
|
||||
"version_control.added": "#27a657ff",
|
||||
"version_control.modified": "#d3b020ff",
|
||||
"version_control.word_added": "#2EA04859",
|
||||
"version_control.word_deleted": "#F85149CC",
|
||||
"version_control.deleted": "#e06c76ff",
|
||||
"conflict": "#a48819ff",
|
||||
"conflict.background": "#faf2e6ff",
|
||||
|
||||
@@ -39,7 +39,6 @@ serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
smol.workspace = true
|
||||
task.workspace = true
|
||||
telemetry.workspace = true
|
||||
terminal.workspace = true
|
||||
ui.workspace = true
|
||||
url.workspace = true
|
||||
@@ -57,4 +56,3 @@ rand.workspace = true
|
||||
tempfile.workspace = true
|
||||
util.workspace = true
|
||||
settings.workspace = true
|
||||
zlog.workspace = true
|
||||
|
||||
@@ -20,8 +20,6 @@ impl UserMessageId {
|
||||
}
|
||||
|
||||
pub trait AgentConnection {
|
||||
fn telemetry_id(&self) -> &'static str;
|
||||
|
||||
fn new_thread(
|
||||
self: Rc<Self>,
|
||||
project: Entity<Project>,
|
||||
@@ -108,6 +106,9 @@ pub trait AgentSessionSetTitle {
|
||||
}
|
||||
|
||||
pub trait AgentTelemetry {
|
||||
/// The name of the agent used for telemetry.
|
||||
fn agent_name(&self) -> String;
|
||||
|
||||
/// A representation of the current thread state that can be serialized for
|
||||
/// storage with telemetry events.
|
||||
fn thread_data(
|
||||
@@ -197,11 +198,6 @@ pub trait AgentModelSelector: 'static {
|
||||
fn watch(&self, _cx: &mut App) -> Option<watch::Receiver<()>> {
|
||||
None
|
||||
}
|
||||
|
||||
/// Returns whether the model picker should render a footer.
|
||||
fn should_render_footer(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
@@ -322,10 +318,6 @@ mod test_support {
|
||||
}
|
||||
|
||||
impl AgentConnection for StubAgentConnection {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
"stub"
|
||||
}
|
||||
|
||||
fn auth_methods(&self) -> &[acp::AuthMethod] {
|
||||
&[]
|
||||
}
|
||||
@@ -336,7 +328,7 @@ mod test_support {
|
||||
_cwd: &Path,
|
||||
cx: &mut gpui::App,
|
||||
) -> Task<gpui::Result<Entity<AcpThread>>> {
|
||||
let session_id = acp::SessionId::new(self.sessions.lock().len().to_string());
|
||||
let session_id = acp::SessionId(self.sessions.lock().len().to_string().into());
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let thread = cx.new(|cx| {
|
||||
AcpThread::new(
|
||||
@@ -345,12 +337,12 @@ mod test_support {
|
||||
project,
|
||||
action_log,
|
||||
session_id.clone(),
|
||||
watch::Receiver::constant(
|
||||
acp::PromptCapabilities::new()
|
||||
.image(true)
|
||||
.audio(true)
|
||||
.embedded_context(true),
|
||||
),
|
||||
watch::Receiver::constant(acp::PromptCapabilities {
|
||||
image: true,
|
||||
audio: true,
|
||||
embedded_context: true,
|
||||
meta: None,
|
||||
}),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -389,7 +381,10 @@ mod test_support {
|
||||
response_tx.replace(tx);
|
||||
cx.spawn(async move |_| {
|
||||
let stop_reason = rx.await?;
|
||||
Ok(acp::PromptResponse::new(stop_reason))
|
||||
Ok(acp::PromptResponse {
|
||||
stop_reason,
|
||||
meta: None,
|
||||
})
|
||||
})
|
||||
} else {
|
||||
for update in self.next_prompt_updates.lock().drain(..) {
|
||||
@@ -397,7 +392,7 @@ mod test_support {
|
||||
let update = update.clone();
|
||||
let permission_request = if let acp::SessionUpdate::ToolCall(tool_call) =
|
||||
&update
|
||||
&& let Some(options) = self.permission_requests.get(&tool_call.tool_call_id)
|
||||
&& let Some(options) = self.permission_requests.get(&tool_call.id)
|
||||
{
|
||||
Some((tool_call.clone(), options.clone()))
|
||||
} else {
|
||||
@@ -426,7 +421,10 @@ mod test_support {
|
||||
|
||||
cx.spawn(async move |_| {
|
||||
try_join_all(tasks).await?;
|
||||
Ok(acp::PromptResponse::new(acp::StopReason::EndTurn))
|
||||
Ok(acp::PromptResponse {
|
||||
stop_reason: acp::StopReason::EndTurn,
|
||||
meta: None,
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -50,14 +50,9 @@ impl Diff {
|
||||
let hunk_ranges = {
|
||||
let buffer = buffer.read(cx);
|
||||
let diff = diff.read(cx);
|
||||
diff.hunks_intersecting_range(
|
||||
Anchor::min_for_buffer(buffer.remote_id())
|
||||
..Anchor::max_for_buffer(buffer.remote_id()),
|
||||
buffer,
|
||||
cx,
|
||||
)
|
||||
.map(|diff_hunk| diff_hunk.buffer_range.to_point(buffer))
|
||||
.collect::<Vec<_>>()
|
||||
diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer, cx)
|
||||
.map(|diff_hunk| diff_hunk.buffer_range.to_point(buffer))
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
|
||||
multibuffer.set_excerpts_for_path(
|
||||
@@ -321,12 +316,7 @@ impl PendingDiff {
|
||||
let buffer = self.new_buffer.read(cx);
|
||||
let diff = self.diff.read(cx);
|
||||
let mut ranges = diff
|
||||
.hunks_intersecting_range(
|
||||
Anchor::min_for_buffer(buffer.remote_id())
|
||||
..Anchor::max_for_buffer(buffer.remote_id()),
|
||||
buffer,
|
||||
cx,
|
||||
)
|
||||
.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, buffer, cx)
|
||||
.map(|diff_hunk| diff_hunk.buffer_range.to_point(buffer))
|
||||
.collect::<Vec<_>>();
|
||||
ranges.extend(
|
||||
|
||||
@@ -108,7 +108,7 @@ impl MentionUri {
|
||||
if let Some(thread_id) = path.strip_prefix("/agent/thread/") {
|
||||
let name = single_query_param(&url, "name")?.context("Missing thread name")?;
|
||||
Ok(Self::Thread {
|
||||
id: acp::SessionId::new(thread_id),
|
||||
id: acp::SessionId(thread_id.into()),
|
||||
name,
|
||||
})
|
||||
} else if let Some(path) = path.strip_prefix("/agent/text-thread/") {
|
||||
|
||||
@@ -75,15 +75,11 @@ impl Terminal {
|
||||
|
||||
let exit_status = exit_status.map(portable_pty::ExitStatus::from);
|
||||
|
||||
let mut status = acp::TerminalExitStatus::new();
|
||||
|
||||
if let Some(exit_status) = exit_status.as_ref() {
|
||||
status = status.exit_code(exit_status.exit_code());
|
||||
if let Some(signal) = exit_status.signal() {
|
||||
status = status.signal(signal);
|
||||
}
|
||||
acp::TerminalExitStatus {
|
||||
exit_code: exit_status.as_ref().map(|e| e.exit_code()),
|
||||
signal: exit_status.and_then(|e| e.signal().map(Into::into)),
|
||||
meta: None,
|
||||
}
|
||||
status
|
||||
})
|
||||
.shared(),
|
||||
}
|
||||
@@ -105,23 +101,27 @@ impl Terminal {
|
||||
|
||||
pub fn current_output(&self, cx: &App) -> acp::TerminalOutputResponse {
|
||||
if let Some(output) = self.output.as_ref() {
|
||||
let mut exit_status = acp::TerminalExitStatus::new();
|
||||
if let Some(status) = output.exit_status.map(portable_pty::ExitStatus::from) {
|
||||
exit_status = exit_status.exit_code(status.exit_code());
|
||||
if let Some(signal) = status.signal() {
|
||||
exit_status = exit_status.signal(signal);
|
||||
}
|
||||
}
|
||||
let exit_status = output.exit_status.map(portable_pty::ExitStatus::from);
|
||||
|
||||
acp::TerminalOutputResponse::new(
|
||||
output.content.clone(),
|
||||
output.original_content_len > output.content.len(),
|
||||
)
|
||||
.exit_status(exit_status)
|
||||
acp::TerminalOutputResponse {
|
||||
output: output.content.clone(),
|
||||
truncated: output.original_content_len > output.content.len(),
|
||||
exit_status: Some(acp::TerminalExitStatus {
|
||||
exit_code: exit_status.as_ref().map(|e| e.exit_code()),
|
||||
signal: exit_status.and_then(|e| e.signal().map(Into::into)),
|
||||
meta: None,
|
||||
}),
|
||||
meta: None,
|
||||
}
|
||||
} else {
|
||||
let (current_content, original_len) = self.truncated_output(cx);
|
||||
let truncated = current_content.len() < original_len;
|
||||
acp::TerminalOutputResponse::new(current_content, truncated)
|
||||
|
||||
acp::TerminalOutputResponse {
|
||||
truncated: current_content.len() < original_len,
|
||||
output: current_content,
|
||||
exit_status: None,
|
||||
meta: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -528,7 +528,7 @@ impl Render for AcpTools {
|
||||
.with_sizing_behavior(gpui::ListSizingBehavior::Auto)
|
||||
.size_full(),
|
||||
)
|
||||
.vertical_scrollbar_for(&connection.list_state, window, cx)
|
||||
.vertical_scrollbar_for(connection.list_state.clone(), window, cx)
|
||||
.into_any()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,6 @@ futures.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
project.workspace = true
|
||||
telemetry.workspace = true
|
||||
text.workspace = true
|
||||
util.workspace = true
|
||||
watch.workspace = true
|
||||
|
||||
@@ -3,9 +3,7 @@ use buffer_diff::BufferDiff;
|
||||
use clock;
|
||||
use collections::BTreeMap;
|
||||
use futures::{FutureExt, StreamExt, channel::mpsc};
|
||||
use gpui::{
|
||||
App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
|
||||
};
|
||||
use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
|
||||
use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
|
||||
use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
|
||||
use std::{cmp, ops::Range, sync::Arc};
|
||||
@@ -33,6 +31,71 @@ impl ActionLog {
|
||||
&self.project
|
||||
}
|
||||
|
||||
pub fn latest_snapshot(&self, buffer: &Entity<Buffer>) -> Option<text::BufferSnapshot> {
|
||||
Some(self.tracked_buffers.get(buffer)?.snapshot.clone())
|
||||
}
|
||||
|
||||
/// Return a unified diff patch with user edits made since last read or notification
|
||||
pub fn unnotified_user_edits(&self, cx: &Context<Self>) -> Option<String> {
|
||||
let diffs = self
|
||||
.tracked_buffers
|
||||
.values()
|
||||
.filter_map(|tracked| {
|
||||
if !tracked.may_have_unnotified_user_edits {
|
||||
return None;
|
||||
}
|
||||
|
||||
let text_with_latest_user_edits = tracked.diff_base.to_string();
|
||||
let text_with_last_seen_user_edits = tracked.last_seen_base.to_string();
|
||||
if text_with_latest_user_edits == text_with_last_seen_user_edits {
|
||||
return None;
|
||||
}
|
||||
let patch = language::unified_diff(
|
||||
&text_with_last_seen_user_edits,
|
||||
&text_with_latest_user_edits,
|
||||
);
|
||||
|
||||
let buffer = tracked.buffer.clone();
|
||||
let file_path = buffer
|
||||
.read(cx)
|
||||
.file()
|
||||
.map(|file| {
|
||||
let mut path = file.full_path(cx).to_string_lossy().into_owned();
|
||||
if file.path_style(cx).is_windows() {
|
||||
path = path.replace('\\', "/");
|
||||
}
|
||||
path
|
||||
})
|
||||
.unwrap_or_else(|| format!("buffer_{}", buffer.entity_id()));
|
||||
|
||||
let mut result = String::new();
|
||||
result.push_str(&format!("--- a/{}\n", file_path));
|
||||
result.push_str(&format!("+++ b/{}\n", file_path));
|
||||
result.push_str(&patch);
|
||||
|
||||
Some(result)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if diffs.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let unified_diff = diffs.join("\n\n");
|
||||
Some(unified_diff)
|
||||
}
|
||||
|
||||
/// Return a unified diff patch with user edits made since last read/notification
|
||||
/// and mark them as notified
|
||||
pub fn flush_unnotified_user_edits(&mut self, cx: &Context<Self>) -> Option<String> {
|
||||
let patch = self.unnotified_user_edits(cx);
|
||||
self.tracked_buffers.values_mut().for_each(|tracked| {
|
||||
tracked.may_have_unnotified_user_edits = false;
|
||||
tracked.last_seen_base = tracked.diff_base.clone();
|
||||
});
|
||||
patch
|
||||
}
|
||||
|
||||
fn track_buffer_internal(
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
@@ -82,26 +145,31 @@ impl ActionLog {
|
||||
let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
|
||||
let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
|
||||
let diff_base;
|
||||
let last_seen_base;
|
||||
let unreviewed_edits;
|
||||
if is_created {
|
||||
diff_base = Rope::default();
|
||||
last_seen_base = Rope::default();
|
||||
unreviewed_edits = Patch::new(vec![Edit {
|
||||
old: 0..1,
|
||||
new: 0..text_snapshot.max_point().row + 1,
|
||||
}])
|
||||
} else {
|
||||
diff_base = buffer.read(cx).as_rope().clone();
|
||||
last_seen_base = diff_base.clone();
|
||||
unreviewed_edits = Patch::default();
|
||||
}
|
||||
TrackedBuffer {
|
||||
buffer: buffer.clone(),
|
||||
diff_base,
|
||||
last_seen_base,
|
||||
unreviewed_edits,
|
||||
snapshot: text_snapshot,
|
||||
status,
|
||||
version: buffer.read(cx).version(),
|
||||
diff,
|
||||
diff_update: diff_update_tx,
|
||||
may_have_unnotified_user_edits: false,
|
||||
_open_lsp_handle: open_lsp_handle,
|
||||
_maintain_diff: cx.spawn({
|
||||
let buffer = buffer.clone();
|
||||
@@ -252,9 +320,10 @@ impl ActionLog {
|
||||
let new_snapshot = buffer_snapshot.clone();
|
||||
let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
|
||||
let edits = diff_snapshots(&old_snapshot, &new_snapshot);
|
||||
let mut has_user_changes = false;
|
||||
async move {
|
||||
if let ChangeAuthor::User = author {
|
||||
apply_non_conflicting_edits(
|
||||
has_user_changes = apply_non_conflicting_edits(
|
||||
&unreviewed_edits,
|
||||
edits,
|
||||
&mut base_text,
|
||||
@@ -262,13 +331,22 @@ impl ActionLog {
|
||||
);
|
||||
}
|
||||
|
||||
(Arc::new(base_text.to_string()), base_text)
|
||||
(Arc::new(base_text.to_string()), base_text, has_user_changes)
|
||||
}
|
||||
});
|
||||
|
||||
anyhow::Ok(rebase)
|
||||
})??;
|
||||
let (new_base_text, new_diff_base) = rebase.await;
|
||||
let (new_base_text, new_diff_base, has_user_changes) = rebase.await;
|
||||
|
||||
this.update(cx, |this, _| {
|
||||
let tracked_buffer = this
|
||||
.tracked_buffers
|
||||
.get_mut(buffer)
|
||||
.context("buffer not tracked")
|
||||
.unwrap();
|
||||
tracked_buffer.may_have_unnotified_user_edits |= has_user_changes;
|
||||
})?;
|
||||
|
||||
Self::update_diff(
|
||||
this,
|
||||
@@ -409,11 +487,9 @@ impl ActionLog {
|
||||
let new_diff_base = new_diff_base.clone();
|
||||
async move {
|
||||
let mut unreviewed_edits = Patch::default();
|
||||
for hunk in diff_snapshot.hunks_intersecting_range(
|
||||
Anchor::min_for_buffer(buffer_snapshot.remote_id())
|
||||
..Anchor::max_for_buffer(buffer_snapshot.remote_id()),
|
||||
&buffer_snapshot,
|
||||
) {
|
||||
for hunk in diff_snapshot
|
||||
.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer_snapshot)
|
||||
{
|
||||
let old_range = new_diff_base
|
||||
.offset_to_point(hunk.diff_base_byte_range.start)
|
||||
..new_diff_base.offset_to_point(hunk.diff_base_byte_range.end);
|
||||
@@ -489,17 +565,14 @@ impl ActionLog {
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
buffer_range: Range<impl language::ToPoint>,
|
||||
telemetry: Option<ActionLogTelemetry>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
|
||||
match tracked_buffer.status {
|
||||
TrackedBufferStatus::Deleted => {
|
||||
metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
cx.notify();
|
||||
}
|
||||
@@ -508,6 +581,7 @@ impl ActionLog {
|
||||
let buffer_range =
|
||||
buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
|
||||
let mut delta = 0i32;
|
||||
|
||||
tracked_buffer.unreviewed_edits.retain_mut(|edit| {
|
||||
edit.old.start = (edit.old.start as i32 + delta) as u32;
|
||||
edit.old.end = (edit.old.end as i32 + delta) as u32;
|
||||
@@ -539,7 +613,6 @@ impl ActionLog {
|
||||
.collect::<String>(),
|
||||
);
|
||||
delta += edit.new_len() as i32 - edit.old_len() as i32;
|
||||
metrics.add_edit(edit);
|
||||
false
|
||||
}
|
||||
});
|
||||
@@ -551,24 +624,19 @@ impl ActionLog {
|
||||
tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
|
||||
}
|
||||
}
|
||||
if let Some(telemetry) = telemetry {
|
||||
telemetry_report_accepted_edits(&telemetry, metrics);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reject_edits_in_ranges(
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
buffer_ranges: Vec<Range<impl language::ToPoint>>,
|
||||
telemetry: Option<ActionLogTelemetry>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
|
||||
return Task::ready(Ok(()));
|
||||
};
|
||||
|
||||
let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
|
||||
let task = match &tracked_buffer.status {
|
||||
match &tracked_buffer.status {
|
||||
TrackedBufferStatus::Created {
|
||||
existing_file_content,
|
||||
} => {
|
||||
@@ -618,7 +686,6 @@ impl ActionLog {
|
||||
}
|
||||
};
|
||||
|
||||
metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
cx.notify();
|
||||
task
|
||||
@@ -632,7 +699,6 @@ impl ActionLog {
|
||||
.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
|
||||
|
||||
// Clear all tracked edits for this buffer and start over as if we just read it.
|
||||
metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
self.buffer_read(buffer.clone(), cx);
|
||||
cx.notify();
|
||||
@@ -672,7 +738,6 @@ impl ActionLog {
|
||||
}
|
||||
|
||||
if revert {
|
||||
metrics.add_edit(edit);
|
||||
let old_range = tracked_buffer
|
||||
.diff_base
|
||||
.point_to_offset(Point::new(edit.old.start, 0))
|
||||
@@ -693,25 +758,12 @@ impl ActionLog {
|
||||
self.project
|
||||
.update(cx, |project, cx| project.save_buffer(buffer, cx))
|
||||
}
|
||||
};
|
||||
if let Some(telemetry) = telemetry {
|
||||
telemetry_report_rejected_edits(&telemetry, metrics);
|
||||
}
|
||||
task
|
||||
}
|
||||
|
||||
pub fn keep_all_edits(
|
||||
&mut self,
|
||||
telemetry: Option<ActionLogTelemetry>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.tracked_buffers.retain(|buffer, tracked_buffer| {
|
||||
let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
|
||||
metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
|
||||
if let Some(telemetry) = telemetry.as_ref() {
|
||||
telemetry_report_accepted_edits(telemetry, metrics);
|
||||
}
|
||||
match tracked_buffer.status {
|
||||
pub fn keep_all_edits(&mut self, cx: &mut Context<Self>) {
|
||||
self.tracked_buffers
|
||||
.retain(|_buffer, tracked_buffer| match tracked_buffer.status {
|
||||
TrackedBufferStatus::Deleted => false,
|
||||
_ => {
|
||||
if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
|
||||
@@ -722,22 +774,13 @@ impl ActionLog {
|
||||
tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
|
||||
true
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn reject_all_edits(
|
||||
&mut self,
|
||||
telemetry: Option<ActionLogTelemetry>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<()> {
|
||||
pub fn reject_all_edits(&mut self, cx: &mut Context<Self>) -> Task<()> {
|
||||
let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
|
||||
let buffer_ranges = vec![Anchor::min_max_range_for_buffer(
|
||||
buffer.read(cx).remote_id(),
|
||||
)];
|
||||
let reject = self.reject_edits_in_ranges(buffer, buffer_ranges, telemetry.clone(), cx);
|
||||
let reject = self.reject_edits_in_ranges(buffer, vec![Anchor::MIN..Anchor::MAX], cx);
|
||||
|
||||
async move {
|
||||
reject.await.log_err();
|
||||
@@ -745,7 +788,8 @@ impl ActionLog {
|
||||
});
|
||||
|
||||
let task = futures::future::join_all(futures);
|
||||
cx.background_spawn(async move {
|
||||
|
||||
cx.spawn(async move |_, _| {
|
||||
task.await;
|
||||
})
|
||||
}
|
||||
@@ -775,61 +819,6 @@ impl ActionLog {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ActionLogTelemetry {
|
||||
pub agent_telemetry_id: &'static str,
|
||||
pub session_id: Arc<str>,
|
||||
}
|
||||
|
||||
struct ActionLogMetrics {
|
||||
lines_removed: u32,
|
||||
lines_added: u32,
|
||||
language: Option<SharedString>,
|
||||
}
|
||||
|
||||
impl ActionLogMetrics {
|
||||
fn for_buffer(buffer: &Buffer) -> Self {
|
||||
Self {
|
||||
language: buffer.language().map(|l| l.name().0),
|
||||
lines_removed: 0,
|
||||
lines_added: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_edits(&mut self, edits: &[Edit<u32>]) {
|
||||
for edit in edits {
|
||||
self.add_edit(edit);
|
||||
}
|
||||
}
|
||||
|
||||
fn add_edit(&mut self, edit: &Edit<u32>) {
|
||||
self.lines_added += edit.new_len();
|
||||
self.lines_removed += edit.old_len();
|
||||
}
|
||||
}
|
||||
|
||||
fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
|
||||
telemetry::event!(
|
||||
"Agent Edits Accepted",
|
||||
agent = telemetry.agent_telemetry_id,
|
||||
session = telemetry.session_id,
|
||||
language = metrics.language,
|
||||
lines_added = metrics.lines_added,
|
||||
lines_removed = metrics.lines_removed
|
||||
);
|
||||
}
|
||||
|
||||
fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
|
||||
telemetry::event!(
|
||||
"Agent Edits Rejected",
|
||||
agent = telemetry.agent_telemetry_id,
|
||||
session = telemetry.session_id,
|
||||
language = metrics.language,
|
||||
lines_added = metrics.lines_added,
|
||||
lines_removed = metrics.lines_removed
|
||||
);
|
||||
}
|
||||
|
||||
fn apply_non_conflicting_edits(
|
||||
patch: &Patch<u32>,
|
||||
edits: Vec<Edit<u32>>,
|
||||
@@ -960,12 +949,14 @@ enum TrackedBufferStatus {
|
||||
struct TrackedBuffer {
|
||||
buffer: Entity<Buffer>,
|
||||
diff_base: Rope,
|
||||
last_seen_base: Rope,
|
||||
unreviewed_edits: Patch<u32>,
|
||||
status: TrackedBufferStatus,
|
||||
version: clock::Global,
|
||||
diff: Entity<BufferDiff>,
|
||||
snapshot: text::BufferSnapshot,
|
||||
diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
|
||||
may_have_unnotified_user_edits: bool,
|
||||
_open_lsp_handle: OpenLspBufferHandle,
|
||||
_maintain_diff: Task<()>,
|
||||
_subscription: Subscription,
|
||||
@@ -996,6 +987,7 @@ mod tests {
|
||||
use super::*;
|
||||
use buffer_diff::DiffHunkStatusKind;
|
||||
use gpui::TestAppContext;
|
||||
use indoc::indoc;
|
||||
use language::Point;
|
||||
use project::{FakeFs, Fs, Project, RemoveOptions};
|
||||
use rand::prelude::*;
|
||||
@@ -1013,6 +1005,8 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1072,7 +1066,7 @@ mod tests {
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
@@ -1088,7 +1082,7 @@ mod tests {
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
@@ -1173,7 +1167,7 @@ mod tests {
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
@@ -1270,7 +1264,111 @@ mod tests {
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_user_edits_notifications(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/dir"),
|
||||
json!({"file": indoc! {"
|
||||
abc
|
||||
def
|
||||
ghi
|
||||
jkl
|
||||
mno"}}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let file_path = project
|
||||
.read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
|
||||
.unwrap();
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_buffer(file_path, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Agent edits
|
||||
cx.update(|cx| {
|
||||
action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer
|
||||
.edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
|
||||
.unwrap()
|
||||
});
|
||||
action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
indoc! {"
|
||||
abc
|
||||
deF
|
||||
GHI
|
||||
jkl
|
||||
mno"}
|
||||
);
|
||||
assert_eq!(
|
||||
unreviewed_hunks(&action_log, cx),
|
||||
vec![(
|
||||
buffer.clone(),
|
||||
vec![HunkStatus {
|
||||
range: Point::new(1, 0)..Point::new(3, 0),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "def\nghi\n".into(),
|
||||
}],
|
||||
)]
|
||||
);
|
||||
|
||||
// User edits
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(
|
||||
[
|
||||
(Point::new(0, 2)..Point::new(0, 2), "X"),
|
||||
(Point::new(3, 0)..Point::new(3, 0), "Y"),
|
||||
],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
indoc! {"
|
||||
abXc
|
||||
deF
|
||||
GHI
|
||||
Yjkl
|
||||
mno"}
|
||||
);
|
||||
|
||||
// User edits should be stored separately from agent's
|
||||
let user_edits = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
|
||||
assert_eq!(
|
||||
user_edits.expect("should have some user edits"),
|
||||
indoc! {"
|
||||
--- a/dir/file
|
||||
+++ b/dir/file
|
||||
@@ -1,5 +1,5 @@
|
||||
-abc
|
||||
+abXc
|
||||
def
|
||||
ghi
|
||||
-jkl
|
||||
+Yjkl
|
||||
mno
|
||||
"}
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
@@ -1329,7 +1427,7 @@ mod tests {
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), 0..5, cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
@@ -1381,7 +1479,7 @@ mod tests {
|
||||
|
||||
action_log
|
||||
.update(cx, |log, cx| {
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -1461,7 +1559,7 @@ mod tests {
|
||||
|
||||
action_log
|
||||
.update(cx, |log, cx| {
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -1644,7 +1742,6 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(4, 0)..Point::new(4, 0)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -1679,7 +1776,6 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(0, 0)..Point::new(1, 0)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -1707,7 +1803,6 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(4, 0)..Point::new(4, 0)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -1782,7 +1877,7 @@ mod tests {
|
||||
let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
|
||||
..buffer.read(cx).anchor_before(Point::new(5, 3));
|
||||
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx)
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], cx)
|
||||
.detach();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
@@ -1843,7 +1938,6 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(0, 0)..Point::new(0, 0)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -1899,7 +1993,6 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(0, 0)..Point::new(0, 11)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -1962,7 +2055,6 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(0, 0)..Point::new(100, 0)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -2010,8 +2102,7 @@ mod tests {
|
||||
|
||||
// User accepts the single hunk
|
||||
action_log.update(cx, |log, cx| {
|
||||
let buffer_range = Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id());
|
||||
log.keep_edits_in_range(buffer.clone(), buffer_range, None, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), Anchor::MIN..Anchor::MAX, cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
@@ -2032,14 +2123,7 @@ mod tests {
|
||||
// User rejects the hunk
|
||||
action_log
|
||||
.update(cx, |log, cx| {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Anchor::min_max_range_for_buffer(
|
||||
buffer.read(cx).remote_id(),
|
||||
)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![Anchor::MIN..Anchor::MAX], cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -2083,7 +2167,7 @@ mod tests {
|
||||
cx.run_until_parked();
|
||||
|
||||
// User clicks "Accept All"
|
||||
action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
|
||||
action_log.update(cx, |log, cx| log.keep_all_edits(cx));
|
||||
cx.run_until_parked();
|
||||
assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
|
||||
@@ -2102,7 +2186,7 @@ mod tests {
|
||||
|
||||
// User clicks "Reject All"
|
||||
action_log
|
||||
.update(cx, |log, cx| log.reject_all_edits(None, cx))
|
||||
.update(cx, |log, cx| log.reject_all_edits(cx))
|
||||
.await;
|
||||
cx.run_until_parked();
|
||||
assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
|
||||
@@ -2142,7 +2226,7 @@ mod tests {
|
||||
action_log.update(cx, |log, cx| {
|
||||
let range = buffer.read(cx).random_byte_range(0, &mut rng);
|
||||
log::info!("keeping edits in range {:?}", range);
|
||||
log.keep_edits_in_range(buffer.clone(), range, None, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), range, cx)
|
||||
});
|
||||
}
|
||||
25..50 => {
|
||||
@@ -2150,7 +2234,7 @@ mod tests {
|
||||
.update(cx, |log, cx| {
|
||||
let range = buffer.read(cx).random_byte_range(0, &mut rng);
|
||||
log::info!("rejecting edits in range {:?}", range);
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx)
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![range], cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -2404,4 +2488,61 @@ mod tests {
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_format_patch(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/dir"),
|
||||
json!({"test.txt": "line 1\nline 2\nline 3\n"}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
|
||||
let file_path = project
|
||||
.read_with(cx, |project, cx| {
|
||||
project.find_project_path("dir/test.txt", cx)
|
||||
})
|
||||
.unwrap();
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_buffer(file_path, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.update(|cx| {
|
||||
// Track the buffer and mark it as read first
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.buffer_read(buffer.clone(), cx);
|
||||
});
|
||||
|
||||
// Make some edits to create a patch
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer
|
||||
.edit([(Point::new(1, 0)..Point::new(1, 6), "CHANGED")], None, cx)
|
||||
.unwrap(); // Replace "line2" with "CHANGED"
|
||||
});
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
// Get the patch
|
||||
let patch = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
|
||||
|
||||
// Verify the patch format contains expected unified diff elements
|
||||
assert_eq!(
|
||||
patch.unwrap(),
|
||||
indoc! {"
|
||||
--- a/dir/test.txt
|
||||
+++ b/dir/test.txt
|
||||
@@ -1,3 +1,3 @@
|
||||
line 1
|
||||
-line 2
|
||||
+CHANGED
|
||||
line 3
|
||||
"}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,13 +17,11 @@ anyhow.workspace = true
|
||||
auto_update.workspace = true
|
||||
editor.workspace = true
|
||||
extension_host.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
project.workspace = true
|
||||
proto.workspace = true
|
||||
semver.workspace = true
|
||||
smallvec.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
|
||||
@@ -51,7 +51,6 @@ pub struct ActivityIndicator {
|
||||
project: Entity<Project>,
|
||||
auto_updater: Option<Entity<AutoUpdater>>,
|
||||
context_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
fs_jobs: Vec<fs::JobInfo>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -100,27 +99,6 @@ impl ActivityIndicator {
|
||||
})
|
||||
.detach();
|
||||
|
||||
let fs = project.read(cx).fs().clone();
|
||||
let mut job_events = fs.subscribe_to_jobs();
|
||||
cx.spawn(async move |this, cx| {
|
||||
while let Some(job_event) = job_events.next().await {
|
||||
this.update(cx, |this: &mut ActivityIndicator, cx| {
|
||||
match job_event {
|
||||
fs::JobEvent::Started { info } => {
|
||||
this.fs_jobs.retain(|j| j.id != info.id);
|
||||
this.fs_jobs.push(info);
|
||||
}
|
||||
fs::JobEvent::Completed { id } => {
|
||||
this.fs_jobs.retain(|j| j.id != id);
|
||||
}
|
||||
}
|
||||
cx.notify();
|
||||
})?;
|
||||
}
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach();
|
||||
|
||||
cx.subscribe(
|
||||
&project.read(cx).lsp_store(),
|
||||
|activity_indicator, _, event, cx| {
|
||||
@@ -223,8 +201,7 @@ impl ActivityIndicator {
|
||||
statuses: Vec::new(),
|
||||
project: project.clone(),
|
||||
auto_updater,
|
||||
context_menu_handle: PopoverMenuHandle::default(),
|
||||
fs_jobs: Vec::new(),
|
||||
context_menu_handle: Default::default(),
|
||||
}
|
||||
});
|
||||
|
||||
@@ -455,23 +432,6 @@ impl ActivityIndicator {
|
||||
});
|
||||
}
|
||||
|
||||
// Show any long-running fs command
|
||||
for fs_job in &self.fs_jobs {
|
||||
if Instant::now().duration_since(fs_job.start) >= GIT_OPERATION_DELAY {
|
||||
return Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::ArrowCircle)
|
||||
.size(IconSize::Small)
|
||||
.with_rotate_animation(2)
|
||||
.into_any_element(),
|
||||
),
|
||||
message: fs_job.message.clone().into(),
|
||||
on_click: None,
|
||||
tooltip_message: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Show any language server installation info.
|
||||
let mut downloading = SmallVec::<[_; 3]>::new();
|
||||
let mut checking_for_update = SmallVec::<[_; 3]>::new();
|
||||
@@ -925,15 +885,15 @@ impl StatusItemView for ActivityIndicator {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use gpui::SemanticVersion;
|
||||
use release_channel::AppCommitSha;
|
||||
use semver::Version;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_version_tooltip_message() {
|
||||
let message = ActivityIndicator::version_tooltip_message(&VersionCheckType::Semantic(
|
||||
Version::new(1, 0, 0),
|
||||
SemanticVersion::new(1, 0, 0),
|
||||
));
|
||||
|
||||
assert_eq!(message, "Version: 1.0.0");
|
||||
|
||||
@@ -63,6 +63,7 @@ streaming_diff.workspace = true
|
||||
strsim.workspace = true
|
||||
task.workspace = true
|
||||
telemetry.workspace = true
|
||||
terminal.workspace = true
|
||||
text.workspace = true
|
||||
thiserror.workspace = true
|
||||
ui.workspace = true
|
||||
@@ -83,7 +84,6 @@ ctor.workspace = true
|
||||
db = { workspace = true, "features" = ["test-support"] }
|
||||
editor = { workspace = true, "features" = ["test-support"] }
|
||||
env_logger.workspace = true
|
||||
eval_utils.workspace = true
|
||||
fs = { workspace = true, "features" = ["test-support"] }
|
||||
git = { workspace = true, "features" = ["test-support"] }
|
||||
gpui = { workspace = true, "features" = ["test-support"] }
|
||||
|
||||
@@ -6,6 +6,7 @@ mod native_agent_server;
|
||||
pub mod outline;
|
||||
mod templates;
|
||||
mod thread;
|
||||
mod tool_schema;
|
||||
mod tools;
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -133,7 +134,9 @@ impl LanguageModels {
|
||||
for model in provider.provided_models(cx) {
|
||||
let model_info = Self::map_language_model_to_info(&model, &provider);
|
||||
let model_id = model_info.id.clone();
|
||||
provider_models.push(model_info);
|
||||
if !recommended_models.contains(&(model.provider_id(), model.id())) {
|
||||
provider_models.push(model_info);
|
||||
}
|
||||
models.insert(model_id, model);
|
||||
}
|
||||
if !provider_models.is_empty() {
|
||||
@@ -170,7 +173,7 @@ impl LanguageModels {
|
||||
}
|
||||
|
||||
fn model_id(model: &Arc<dyn LanguageModel>) -> acp::ModelId {
|
||||
acp::ModelId::new(format!("{}/{}", model.provider_id().0, model.id().0))
|
||||
acp::ModelId(format!("{}/{}", model.provider_id().0, model.id().0).into())
|
||||
}
|
||||
|
||||
fn authenticate_all_language_model_providers(cx: &mut App) -> Task<()> {
|
||||
@@ -215,7 +218,7 @@ impl LanguageModels {
|
||||
}
|
||||
_ => {
|
||||
log::error!(
|
||||
"Failed to authenticate provider: {}: {err:#}",
|
||||
"Failed to authenticate provider: {}: {err}",
|
||||
provider_name.0
|
||||
);
|
||||
}
|
||||
@@ -789,12 +792,28 @@ impl NativeAgentConnection {
|
||||
}
|
||||
ThreadEvent::AgentText(text) => {
|
||||
acp_thread.update(cx, |thread, cx| {
|
||||
thread.push_assistant_content_block(text.into(), false, cx)
|
||||
thread.push_assistant_content_block(
|
||||
acp::ContentBlock::Text(acp::TextContent {
|
||||
text,
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
})?;
|
||||
}
|
||||
ThreadEvent::AgentThinking(text) => {
|
||||
acp_thread.update(cx, |thread, cx| {
|
||||
thread.push_assistant_content_block(text.into(), true, cx)
|
||||
thread.push_assistant_content_block(
|
||||
acp::ContentBlock::Text(acp::TextContent {
|
||||
text,
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
true,
|
||||
cx,
|
||||
)
|
||||
})?;
|
||||
}
|
||||
ThreadEvent::ToolCallAuthorization(ToolCallAuthorization {
|
||||
@@ -808,9 +827,8 @@ impl NativeAgentConnection {
|
||||
)
|
||||
})??;
|
||||
cx.background_spawn(async move {
|
||||
if let acp::RequestPermissionOutcome::Selected(
|
||||
acp::SelectedPermissionOutcome { option_id, .. },
|
||||
) = outcome_task.await
|
||||
if let acp::RequestPermissionOutcome::Selected { option_id } =
|
||||
outcome_task.await
|
||||
{
|
||||
response
|
||||
.send(option_id)
|
||||
@@ -837,7 +855,10 @@ impl NativeAgentConnection {
|
||||
}
|
||||
ThreadEvent::Stop(stop_reason) => {
|
||||
log::debug!("Assistant message complete: {:?}", stop_reason);
|
||||
return Ok(acp::PromptResponse::new(stop_reason));
|
||||
return Ok(acp::PromptResponse {
|
||||
stop_reason,
|
||||
meta: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -849,7 +870,10 @@ impl NativeAgentConnection {
|
||||
}
|
||||
|
||||
log::debug!("Response stream completed");
|
||||
anyhow::Ok(acp::PromptResponse::new(acp::StopReason::EndTurn))
|
||||
anyhow::Ok(acp::PromptResponse {
|
||||
stop_reason: acp::StopReason::EndTurn,
|
||||
meta: None,
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -940,17 +964,9 @@ impl acp_thread::AgentModelSelector for NativeAgentModelSelector {
|
||||
fn watch(&self, cx: &mut App) -> Option<watch::Receiver<()>> {
|
||||
Some(self.connection.0.read(cx).models.watch())
|
||||
}
|
||||
|
||||
fn should_render_footer(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl acp_thread::AgentConnection for NativeAgentConnection {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
"zed"
|
||||
}
|
||||
|
||||
fn new_thread(
|
||||
self: Rc<Self>,
|
||||
project: Entity<Project>,
|
||||
@@ -1091,6 +1107,10 @@ impl acp_thread::AgentConnection for NativeAgentConnection {
|
||||
}
|
||||
|
||||
impl acp_thread::AgentTelemetry for NativeAgentConnection {
|
||||
fn agent_name(&self) -> String {
|
||||
"Zed".into()
|
||||
}
|
||||
|
||||
fn thread_data(
|
||||
&self,
|
||||
session_id: &acp::SessionId,
|
||||
@@ -1353,7 +1373,7 @@ mod internal_tests {
|
||||
IndexMap::from_iter([(
|
||||
AgentModelGroupName("Fake".into()),
|
||||
vec![AgentModelInfo {
|
||||
id: acp::ModelId::new("fake/fake"),
|
||||
id: acp::ModelId("fake/fake".into()),
|
||||
name: "Fake".into(),
|
||||
description: None,
|
||||
icon: Some(ui::IconName::ZedAssistant),
|
||||
@@ -1414,7 +1434,7 @@ mod internal_tests {
|
||||
|
||||
// Select a model
|
||||
let selector = connection.model_selector(&session_id).unwrap();
|
||||
let model_id = acp::ModelId::new("fake/fake");
|
||||
let model_id = acp::ModelId("fake/fake".into());
|
||||
cx.update(|cx| selector.select_model(model_id.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -1500,14 +1520,20 @@ mod internal_tests {
|
||||
thread.send(
|
||||
vec![
|
||||
"What does ".into(),
|
||||
acp::ContentBlock::ResourceLink(acp::ResourceLink::new(
|
||||
"b.md",
|
||||
MentionUri::File {
|
||||
acp::ContentBlock::ResourceLink(acp::ResourceLink {
|
||||
name: "b.md".into(),
|
||||
uri: MentionUri::File {
|
||||
abs_path: path!("/a/b.md").into(),
|
||||
}
|
||||
.to_uri()
|
||||
.to_string(),
|
||||
)),
|
||||
annotations: None,
|
||||
description: None,
|
||||
mime_type: None,
|
||||
size: None,
|
||||
title: None,
|
||||
meta: None,
|
||||
}),
|
||||
" mean?".into(),
|
||||
],
|
||||
cx,
|
||||
@@ -1601,7 +1627,9 @@ mod internal_tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
|
||||
Project::init_settings(cx);
|
||||
agent_settings::init(cx);
|
||||
language::init(cx);
|
||||
LanguageModelRegistry::test(cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -150,7 +150,6 @@ impl DbThread {
|
||||
.unwrap_or_default(),
|
||||
input: tool_use.input,
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
}
|
||||
@@ -182,7 +181,6 @@ impl DbThread {
|
||||
crate::Message::Agent(AgentMessage {
|
||||
content,
|
||||
tool_results,
|
||||
reasoning_details: None,
|
||||
})
|
||||
}
|
||||
language_model::Role::System => {
|
||||
@@ -366,7 +364,7 @@ impl ThreadsDatabase {
|
||||
|
||||
for (id, summary, updated_at) in rows {
|
||||
threads.push(DbThreadMetadata {
|
||||
id: acp::SessionId::new(id),
|
||||
id: acp::SessionId(id),
|
||||
title: summary.into(),
|
||||
updated_at: DateTime::parse_from_rfc3339(&updated_at)?.with_timezone(&Utc),
|
||||
});
|
||||
@@ -424,20 +422,4 @@ impl ThreadsDatabase {
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
|
||||
pub fn delete_threads(&self) -> Task<Result<()>> {
|
||||
let connection = self.connection.clone();
|
||||
|
||||
self.executor.spawn(async move {
|
||||
let connection = connection.lock();
|
||||
|
||||
let mut delete = connection.exec_bound::<()>(indoc! {"
|
||||
DELETE FROM threads
|
||||
"})?;
|
||||
|
||||
delete(())?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -172,14 +172,14 @@ impl EditAgent {
|
||||
project.set_agent_location(
|
||||
Some(AgentLocation {
|
||||
buffer: buffer.downgrade(),
|
||||
position: language::Anchor::max_for_buffer(buffer.read(cx).remote_id()),
|
||||
position: language::Anchor::MAX,
|
||||
}),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
output_events_tx
|
||||
.unbounded_send(EditAgentOutputEvent::Edited(
|
||||
Anchor::min_max_range_for_buffer(buffer.read(cx).remote_id()),
|
||||
language::Anchor::MIN..language::Anchor::MAX,
|
||||
))
|
||||
.ok();
|
||||
})?;
|
||||
@@ -187,7 +187,7 @@ impl EditAgent {
|
||||
while let Some(event) = parse_rx.next().await {
|
||||
match event? {
|
||||
CreateFileParserEvent::NewTextChunk { chunk } => {
|
||||
let buffer_id = cx.update(|cx| {
|
||||
cx.update(|cx| {
|
||||
buffer.update(cx, |buffer, cx| buffer.append(chunk, cx));
|
||||
self.action_log
|
||||
.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
|
||||
@@ -195,18 +195,15 @@ impl EditAgent {
|
||||
project.set_agent_location(
|
||||
Some(AgentLocation {
|
||||
buffer: buffer.downgrade(),
|
||||
position: language::Anchor::max_for_buffer(
|
||||
buffer.read(cx).remote_id(),
|
||||
),
|
||||
position: language::Anchor::MAX,
|
||||
}),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
buffer.read(cx).remote_id()
|
||||
})?;
|
||||
output_events_tx
|
||||
.unbounded_send(EditAgentOutputEvent::Edited(
|
||||
Anchor::min_max_range_for_buffer(buffer_id),
|
||||
language::Anchor::MIN..language::Anchor::MAX,
|
||||
))
|
||||
.ok();
|
||||
}
|
||||
@@ -706,7 +703,6 @@ impl EditAgent {
|
||||
role: Role::User,
|
||||
content: vec![MessageContent::Text(prompt)],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
});
|
||||
|
||||
// Include tools in the request so that we can take advantage of
|
||||
@@ -1203,9 +1199,7 @@ mod tests {
|
||||
project.read_with(cx, |project, _| project.agent_location()),
|
||||
Some(AgentLocation {
|
||||
buffer: buffer.downgrade(),
|
||||
position: language::Anchor::max_for_buffer(
|
||||
cx.update(|cx| buffer.read(cx).remote_id())
|
||||
),
|
||||
position: language::Anchor::MAX
|
||||
})
|
||||
);
|
||||
|
||||
@@ -1223,9 +1217,7 @@ mod tests {
|
||||
project.read_with(cx, |project, _| project.agent_location()),
|
||||
Some(AgentLocation {
|
||||
buffer: buffer.downgrade(),
|
||||
position: language::Anchor::max_for_buffer(
|
||||
cx.update(|cx| buffer.read(cx).remote_id())
|
||||
),
|
||||
position: language::Anchor::MAX
|
||||
})
|
||||
);
|
||||
|
||||
@@ -1243,9 +1235,7 @@ mod tests {
|
||||
project.read_with(cx, |project, _| project.agent_location()),
|
||||
Some(AgentLocation {
|
||||
buffer: buffer.downgrade(),
|
||||
position: language::Anchor::max_for_buffer(
|
||||
cx.update(|cx| buffer.read(cx).remote_id())
|
||||
),
|
||||
position: language::Anchor::MAX
|
||||
})
|
||||
);
|
||||
|
||||
@@ -1263,9 +1253,7 @@ mod tests {
|
||||
project.read_with(cx, |project, _| project.agent_location()),
|
||||
Some(AgentLocation {
|
||||
buffer: buffer.downgrade(),
|
||||
position: language::Anchor::max_for_buffer(
|
||||
cx.update(|cx| buffer.read(cx).remote_id())
|
||||
),
|
||||
position: language::Anchor::MAX
|
||||
})
|
||||
);
|
||||
|
||||
@@ -1280,9 +1268,7 @@ mod tests {
|
||||
project.read_with(cx, |project, _| project.agent_location()),
|
||||
Some(AgentLocation {
|
||||
buffer: buffer.downgrade(),
|
||||
position: language::Anchor::max_for_buffer(
|
||||
cx.update(|cx| buffer.read(cx).remote_id())
|
||||
),
|
||||
position: language::Anchor::MAX
|
||||
})
|
||||
);
|
||||
}
|
||||
@@ -1408,7 +1394,7 @@ mod tests {
|
||||
|
||||
async fn init_test(cx: &mut TestAppContext) -> EditAgent {
|
||||
cx.update(settings::init);
|
||||
|
||||
cx.update(Project::init_settings);
|
||||
let project = Project::test(FakeFs::new(cx.executor()), [], cx).await;
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
|
||||
@@ -15,14 +15,12 @@ const SEPARATOR_MARKER: &str = "=======";
|
||||
const REPLACE_MARKER: &str = ">>>>>>> REPLACE";
|
||||
const SONNET_PARAMETER_INVOKE_1: &str = "</parameter>\n</invoke>";
|
||||
const SONNET_PARAMETER_INVOKE_2: &str = "</parameter></invoke>";
|
||||
const SONNET_PARAMETER_INVOKE_3: &str = "</parameter>";
|
||||
const END_TAGS: [&str; 6] = [
|
||||
const END_TAGS: [&str; 5] = [
|
||||
OLD_TEXT_END_TAG,
|
||||
NEW_TEXT_END_TAG,
|
||||
EDITS_END_TAG,
|
||||
SONNET_PARAMETER_INVOKE_1, // Remove these after switching to streaming tool call
|
||||
SONNET_PARAMETER_INVOKE_1, // Remove this after switching to streaming tool call
|
||||
SONNET_PARAMETER_INVOKE_2,
|
||||
SONNET_PARAMETER_INVOKE_3,
|
||||
];
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -569,29 +567,21 @@ mod tests {
|
||||
parse_random_chunks(
|
||||
indoc! {"
|
||||
<old_text>some text</old_text><new_text>updated text</parameter></invoke>
|
||||
<old_text>more text</old_text><new_text>upd</parameter></new_text>
|
||||
"},
|
||||
&mut parser,
|
||||
&mut rng
|
||||
),
|
||||
vec![
|
||||
Edit {
|
||||
old_text: "some text".to_string(),
|
||||
new_text: "updated text".to_string(),
|
||||
line_hint: None,
|
||||
},
|
||||
Edit {
|
||||
old_text: "more text".to_string(),
|
||||
new_text: "upd".to_string(),
|
||||
line_hint: None,
|
||||
},
|
||||
]
|
||||
vec![Edit {
|
||||
old_text: "some text".to_string(),
|
||||
new_text: "updated text".to_string(),
|
||||
line_hint: None,
|
||||
},]
|
||||
);
|
||||
assert_eq!(
|
||||
parser.finish(),
|
||||
EditParserMetrics {
|
||||
tags: 4,
|
||||
mismatched_tags: 2
|
||||
tags: 2,
|
||||
mismatched_tags: 1
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@@ -188,15 +188,6 @@ impl HistoryStore {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn delete_threads(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
|
||||
let database_future = ThreadsDatabase::connect(cx);
|
||||
cx.spawn(async move |this, cx| {
|
||||
let database = database_future.await.map_err(|err| anyhow!(err))?;
|
||||
database.delete_threads().await?;
|
||||
this.update(cx, |this, cx| this.reload(cx))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn delete_text_thread(
|
||||
&mut self,
|
||||
path: Arc<Path>,
|
||||
@@ -354,9 +345,9 @@ impl HistoryStore {
|
||||
.into_iter()
|
||||
.take(MAX_RECENTLY_OPENED_ENTRIES)
|
||||
.flat_map(|entry| match entry {
|
||||
SerializedRecentOpen::AcpThread(id) => {
|
||||
Some(HistoryEntryId::AcpThread(acp::SessionId::new(id.as_str())))
|
||||
}
|
||||
SerializedRecentOpen::AcpThread(id) => Some(HistoryEntryId::AcpThread(
|
||||
acp::SessionId(id.as_str().into()),
|
||||
)),
|
||||
SerializedRecentOpen::TextThread(file_name) => Some(
|
||||
HistoryEntryId::TextThread(text_threads_dir().join(file_name).into()),
|
||||
),
|
||||
|
||||
@@ -88,6 +88,8 @@ mod tests {
|
||||
async |fs, project, cx| {
|
||||
let auth = cx.update(|cx| {
|
||||
prompt_store::init(cx);
|
||||
terminal::init(cx);
|
||||
|
||||
let registry = language_model::LanguageModelRegistry::read_global(cx);
|
||||
let auth = registry
|
||||
.provider(&language_model::ANTHROPIC_PROVIDER_ID)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use gpui::{AsyncApp, Entity};
|
||||
use language::{Buffer, OutlineItem};
|
||||
use language::{Buffer, OutlineItem, ParseStatus};
|
||||
use regex::Regex;
|
||||
use std::fmt::Write;
|
||||
use text::Point;
|
||||
@@ -30,9 +30,10 @@ pub async fn get_buffer_content_or_outline(
|
||||
if file_size > AUTO_OUTLINE_SIZE {
|
||||
// For large files, use outline instead of full content
|
||||
// Wait until the buffer has been fully parsed, so we can read its outline
|
||||
buffer
|
||||
.read_with(cx, |buffer, _| buffer.parsing_idle())?
|
||||
.await;
|
||||
let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?;
|
||||
while *parse_status.borrow() != ParseStatus::Idle {
|
||||
parse_status.changed().await?;
|
||||
}
|
||||
|
||||
let outline_items = buffer.read_with(cx, |buffer, _| {
|
||||
let snapshot = buffer.snapshot();
|
||||
@@ -44,31 +45,14 @@ pub async fn get_buffer_content_or_outline(
|
||||
.collect::<Vec<_>>()
|
||||
})?;
|
||||
|
||||
// If no outline exists, fall back to first 1KB so the agent has some context
|
||||
if outline_items.is_empty() {
|
||||
let text = buffer.read_with(cx, |buffer, _| {
|
||||
let snapshot = buffer.snapshot();
|
||||
let len = snapshot.len().min(snapshot.as_rope().floor_char_boundary(1024));
|
||||
let content = snapshot.text_for_range(0..len).collect::<String>();
|
||||
if let Some(path) = path {
|
||||
format!("# First 1KB of {path} (file too large to show full content, and no outline available)\n\n{content}")
|
||||
} else {
|
||||
format!("# First 1KB of file (file too large to show full content, and no outline available)\n\n{content}")
|
||||
}
|
||||
})?;
|
||||
|
||||
return Ok(BufferContent {
|
||||
text,
|
||||
is_outline: false,
|
||||
});
|
||||
}
|
||||
|
||||
let outline_text = render_outline(outline_items, None, 0, usize::MAX).await?;
|
||||
|
||||
let text = if let Some(path) = path {
|
||||
format!("# File outline for {path}\n\n{outline_text}",)
|
||||
format!(
|
||||
"# File outline for {path} (file too large to show full content)\n\n{outline_text}",
|
||||
)
|
||||
} else {
|
||||
format!("# File outline\n\n{outline_text}",)
|
||||
format!("# File outline (file too large to show full content)\n\n{outline_text}",)
|
||||
};
|
||||
Ok(BufferContent {
|
||||
text,
|
||||
@@ -157,62 +141,3 @@ fn render_entries(
|
||||
|
||||
entries_rendered
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use fs::FakeFs;
|
||||
use gpui::TestAppContext;
|
||||
use project::Project;
|
||||
use settings::SettingsStore;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_large_file_fallback_to_subset(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let settings = SettingsStore::test(cx);
|
||||
cx.set_global(settings);
|
||||
});
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let project = Project::test(fs, [], cx).await;
|
||||
|
||||
let content = "⚡".repeat(100 * 1024); // 100KB
|
||||
let content_len = content.len();
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.create_buffer(true, cx))
|
||||
.await
|
||||
.expect("failed to create buffer");
|
||||
|
||||
buffer.update(cx, |buffer, cx| buffer.set_text(content, cx));
|
||||
|
||||
let result = cx
|
||||
.spawn(|cx| async move { get_buffer_content_or_outline(buffer, None, &cx).await })
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Should contain some of the actual file content
|
||||
assert!(
|
||||
result.text.contains("⚡⚡⚡⚡⚡⚡⚡"),
|
||||
"Result did not contain content subset"
|
||||
);
|
||||
|
||||
// Should be marked as not an outline (it's truncated content)
|
||||
assert!(
|
||||
!result.is_outline,
|
||||
"Large file without outline should not be marked as outline"
|
||||
);
|
||||
|
||||
// Should be reasonably sized (much smaller than original)
|
||||
assert!(
|
||||
result.text.len() < 50 * 1024,
|
||||
"Result size {} should be smaller than 50KB",
|
||||
result.text.len()
|
||||
);
|
||||
|
||||
// Should be significantly smaller than the original content
|
||||
assert!(
|
||||
result.text.len() < content_len / 10,
|
||||
"Result should be much smaller than original content"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -215,8 +215,7 @@ async fn test_prompt_caching(cx: &mut TestAppContext) {
|
||||
vec![LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec!["Message 1".into()],
|
||||
cache: true,
|
||||
reasoning_details: None,
|
||||
cache: true
|
||||
}]
|
||||
);
|
||||
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::Text(
|
||||
@@ -240,20 +239,17 @@ async fn test_prompt_caching(cx: &mut TestAppContext) {
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec!["Message 1".into()],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
cache: false
|
||||
},
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::Assistant,
|
||||
content: vec!["Response to Message 1".into()],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
cache: false
|
||||
},
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec!["Message 2".into()],
|
||||
cache: true,
|
||||
reasoning_details: None,
|
||||
cache: true
|
||||
}
|
||||
]
|
||||
);
|
||||
@@ -278,7 +274,6 @@ async fn test_prompt_caching(cx: &mut TestAppContext) {
|
||||
raw_input: json!({"text": "test"}).to_string(),
|
||||
input: json!({"text": "test"}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
};
|
||||
fake_model
|
||||
.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use.clone()));
|
||||
@@ -299,44 +294,37 @@ async fn test_prompt_caching(cx: &mut TestAppContext) {
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec!["Message 1".into()],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
cache: false
|
||||
},
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::Assistant,
|
||||
content: vec!["Response to Message 1".into()],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
cache: false
|
||||
},
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec!["Message 2".into()],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
cache: false
|
||||
},
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::Assistant,
|
||||
content: vec!["Response to Message 2".into()],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
cache: false
|
||||
},
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec!["Use the echo tool".into()],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
cache: false
|
||||
},
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::Assistant,
|
||||
content: vec![MessageContent::ToolUse(tool_use)],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
cache: false
|
||||
},
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![MessageContent::ToolResult(tool_result)],
|
||||
cache: true,
|
||||
reasoning_details: None,
|
||||
cache: true
|
||||
}
|
||||
]
|
||||
);
|
||||
@@ -473,7 +461,6 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
|
||||
raw_input: "{}".into(),
|
||||
input: json!({}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
|
||||
@@ -483,7 +470,6 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
|
||||
raw_input: "{}".into(),
|
||||
input: json!({}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
fake_model.end_last_completion_stream();
|
||||
@@ -493,14 +479,14 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
|
||||
// Approve the first
|
||||
tool_call_auth_1
|
||||
.response
|
||||
.send(tool_call_auth_1.options[1].option_id.clone())
|
||||
.send(tool_call_auth_1.options[1].id.clone())
|
||||
.unwrap();
|
||||
cx.run_until_parked();
|
||||
|
||||
// Reject the second
|
||||
tool_call_auth_2
|
||||
.response
|
||||
.send(tool_call_auth_1.options[2].option_id.clone())
|
||||
.send(tool_call_auth_1.options[2].id.clone())
|
||||
.unwrap();
|
||||
cx.run_until_parked();
|
||||
|
||||
@@ -510,14 +496,14 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
|
||||
message.content,
|
||||
vec![
|
||||
language_model::MessageContent::ToolResult(LanguageModelToolResult {
|
||||
tool_use_id: tool_call_auth_1.tool_call.tool_call_id.0.to_string().into(),
|
||||
tool_use_id: tool_call_auth_1.tool_call.id.0.to_string().into(),
|
||||
tool_name: ToolRequiringPermission::name().into(),
|
||||
is_error: false,
|
||||
content: "Allowed".into(),
|
||||
output: Some("Allowed".into())
|
||||
}),
|
||||
language_model::MessageContent::ToolResult(LanguageModelToolResult {
|
||||
tool_use_id: tool_call_auth_2.tool_call.tool_call_id.0.to_string().into(),
|
||||
tool_use_id: tool_call_auth_2.tool_call.id.0.to_string().into(),
|
||||
tool_name: ToolRequiringPermission::name().into(),
|
||||
is_error: true,
|
||||
content: "Permission to run tool denied by user".into(),
|
||||
@@ -534,7 +520,6 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
|
||||
raw_input: "{}".into(),
|
||||
input: json!({}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
fake_model.end_last_completion_stream();
|
||||
@@ -543,7 +528,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
|
||||
let tool_call_auth_3 = next_tool_call_authorization(&mut events).await;
|
||||
tool_call_auth_3
|
||||
.response
|
||||
.send(tool_call_auth_3.options[0].option_id.clone())
|
||||
.send(tool_call_auth_3.options[0].id.clone())
|
||||
.unwrap();
|
||||
cx.run_until_parked();
|
||||
let completion = fake_model.pending_completions().pop().unwrap();
|
||||
@@ -552,7 +537,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
|
||||
message.content,
|
||||
vec![language_model::MessageContent::ToolResult(
|
||||
LanguageModelToolResult {
|
||||
tool_use_id: tool_call_auth_3.tool_call.tool_call_id.0.to_string().into(),
|
||||
tool_use_id: tool_call_auth_3.tool_call.id.0.to_string().into(),
|
||||
tool_name: ToolRequiringPermission::name().into(),
|
||||
is_error: false,
|
||||
content: "Allowed".into(),
|
||||
@@ -569,7 +554,6 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
|
||||
raw_input: "{}".into(),
|
||||
input: json!({}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
fake_model.end_last_completion_stream();
|
||||
@@ -608,7 +592,6 @@ async fn test_tool_hallucination(cx: &mut TestAppContext) {
|
||||
raw_input: "{}".into(),
|
||||
input: json!({}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
fake_model.end_last_completion_stream();
|
||||
@@ -638,7 +621,6 @@ async fn test_resume_after_tool_use_limit(cx: &mut TestAppContext) {
|
||||
raw_input: "{}".into(),
|
||||
input: serde_json::to_value(&EchoToolInput { text: "def".into() }).unwrap(),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
};
|
||||
fake_model
|
||||
.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use.clone()));
|
||||
@@ -659,26 +641,25 @@ async fn test_resume_after_tool_use_limit(cx: &mut TestAppContext) {
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec!["abc".into()],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
cache: false
|
||||
},
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::Assistant,
|
||||
content: vec![MessageContent::ToolUse(tool_use.clone())],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
cache: false
|
||||
},
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![MessageContent::ToolResult(tool_result.clone())],
|
||||
cache: true,
|
||||
reasoning_details: None,
|
||||
cache: true
|
||||
},
|
||||
]
|
||||
);
|
||||
|
||||
// Simulate reaching tool use limit.
|
||||
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUseLimitReached);
|
||||
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::StatusUpdate(
|
||||
cloud_llm_client::CompletionRequestStatus::ToolUseLimitReached,
|
||||
));
|
||||
fake_model.end_last_completion_stream();
|
||||
let last_event = events.collect::<Vec<_>>().await.pop().unwrap();
|
||||
assert!(
|
||||
@@ -696,26 +677,22 @@ async fn test_resume_after_tool_use_limit(cx: &mut TestAppContext) {
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec!["abc".into()],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
cache: false
|
||||
},
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::Assistant,
|
||||
content: vec![MessageContent::ToolUse(tool_use)],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
cache: false
|
||||
},
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![MessageContent::ToolResult(tool_result)],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
cache: false
|
||||
},
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec!["Continue where you left off".into()],
|
||||
cache: true,
|
||||
reasoning_details: None,
|
||||
cache: true
|
||||
}
|
||||
]
|
||||
);
|
||||
@@ -754,7 +731,6 @@ async fn test_send_after_tool_use_limit(cx: &mut TestAppContext) {
|
||||
raw_input: "{}".into(),
|
||||
input: serde_json::to_value(&EchoToolInput { text: "def".into() }).unwrap(),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
};
|
||||
let tool_result = LanguageModelToolResult {
|
||||
tool_use_id: "tool_id_1".into(),
|
||||
@@ -765,7 +741,9 @@ async fn test_send_after_tool_use_limit(cx: &mut TestAppContext) {
|
||||
};
|
||||
fake_model
|
||||
.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use.clone()));
|
||||
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUseLimitReached);
|
||||
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::StatusUpdate(
|
||||
cloud_llm_client::CompletionRequestStatus::ToolUseLimitReached,
|
||||
));
|
||||
fake_model.end_last_completion_stream();
|
||||
let last_event = events.collect::<Vec<_>>().await.pop().unwrap();
|
||||
assert!(
|
||||
@@ -787,26 +765,22 @@ async fn test_send_after_tool_use_limit(cx: &mut TestAppContext) {
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec!["abc".into()],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
cache: false
|
||||
},
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::Assistant,
|
||||
content: vec![MessageContent::ToolUse(tool_use)],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
cache: false
|
||||
},
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![MessageContent::ToolResult(tool_result)],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
cache: false
|
||||
},
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec!["ghi".into()],
|
||||
cache: true,
|
||||
reasoning_details: None,
|
||||
cache: true
|
||||
}
|
||||
]
|
||||
);
|
||||
@@ -959,7 +933,7 @@ async fn test_profiles(cx: &mut TestAppContext) {
|
||||
// Test that test-1 profile (default) has echo and delay tools
|
||||
thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.set_profile(AgentProfileId("test-1".into()), cx);
|
||||
thread.set_profile(AgentProfileId("test-1".into()));
|
||||
thread.send(UserMessageId::new(), ["test"], cx)
|
||||
})
|
||||
.unwrap();
|
||||
@@ -979,7 +953,7 @@ async fn test_profiles(cx: &mut TestAppContext) {
|
||||
// Switch to test-2 profile, and verify that it has only the infinite tool.
|
||||
thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.set_profile(AgentProfileId("test-2".into()), cx);
|
||||
thread.set_profile(AgentProfileId("test-2".into()));
|
||||
thread.send(UserMessageId::new(), ["test2"], cx)
|
||||
})
|
||||
.unwrap();
|
||||
@@ -1028,8 +1002,8 @@ async fn test_mcp_tools(cx: &mut TestAppContext) {
|
||||
)
|
||||
.await;
|
||||
cx.run_until_parked();
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.set_profile(AgentProfileId("test".into()), cx)
|
||||
thread.update(cx, |thread, _| {
|
||||
thread.set_profile(AgentProfileId("test".into()))
|
||||
});
|
||||
|
||||
let mut mcp_tool_calls = setup_context_server(
|
||||
@@ -1063,7 +1037,6 @@ async fn test_mcp_tools(cx: &mut TestAppContext) {
|
||||
raw_input: json!({"text": "test"}).to_string(),
|
||||
input: json!({"text": "test"}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
fake_model.end_last_completion_stream();
|
||||
@@ -1107,7 +1080,6 @@ async fn test_mcp_tools(cx: &mut TestAppContext) {
|
||||
raw_input: json!({"text": "mcp"}).to_string(),
|
||||
input: json!({"text": "mcp"}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
|
||||
@@ -1117,7 +1089,6 @@ async fn test_mcp_tools(cx: &mut TestAppContext) {
|
||||
raw_input: json!({"text": "native"}).to_string(),
|
||||
input: json!({"text": "native"}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
fake_model.end_last_completion_stream();
|
||||
@@ -1198,8 +1169,8 @@ async fn test_mcp_tool_truncation(cx: &mut TestAppContext) {
|
||||
.await;
|
||||
cx.run_until_parked();
|
||||
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.set_profile(AgentProfileId("test".into()), cx);
|
||||
thread.update(cx, |thread, _| {
|
||||
thread.set_profile(AgentProfileId("test".into()));
|
||||
thread.add_tool(EchoTool);
|
||||
thread.add_tool(DelayTool);
|
||||
thread.add_tool(WordListTool);
|
||||
@@ -1353,20 +1324,20 @@ async fn test_cancellation(cx: &mut TestAppContext) {
|
||||
ThreadEvent::ToolCall(tool_call) => {
|
||||
assert_eq!(tool_call.title, expected_tools.remove(0));
|
||||
if tool_call.title == "Echo" {
|
||||
echo_id = Some(tool_call.tool_call_id);
|
||||
echo_id = Some(tool_call.id);
|
||||
}
|
||||
}
|
||||
ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateFields(
|
||||
acp::ToolCallUpdate {
|
||||
tool_call_id,
|
||||
id,
|
||||
fields:
|
||||
acp::ToolCallUpdateFields {
|
||||
status: Some(acp::ToolCallStatus::Completed),
|
||||
..
|
||||
},
|
||||
..
|
||||
meta: None,
|
||||
},
|
||||
)) if Some(&tool_call_id) == echo_id.as_ref() => {
|
||||
)) if Some(&id) == echo_id.as_ref() => {
|
||||
echo_completed = true;
|
||||
}
|
||||
_ => {}
|
||||
@@ -1817,7 +1788,6 @@ async fn test_building_request_with_pending_tools(cx: &mut TestAppContext) {
|
||||
raw_input: "{}".into(),
|
||||
input: json!({}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
};
|
||||
let echo_tool_use = LanguageModelToolUse {
|
||||
id: "tool_id_2".into(),
|
||||
@@ -1825,7 +1795,6 @@ async fn test_building_request_with_pending_tools(cx: &mut TestAppContext) {
|
||||
raw_input: json!({"text": "test"}).to_string(),
|
||||
input: json!({"text": "test"}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
};
|
||||
fake_model.send_last_completion_stream_text_chunk("Hi!");
|
||||
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
|
||||
@@ -1849,8 +1818,7 @@ async fn test_building_request_with_pending_tools(cx: &mut TestAppContext) {
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec!["Hey!".into()],
|
||||
cache: true,
|
||||
reasoning_details: None,
|
||||
cache: true
|
||||
},
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::Assistant,
|
||||
@@ -1858,8 +1826,7 @@ async fn test_building_request_with_pending_tools(cx: &mut TestAppContext) {
|
||||
MessageContent::Text("Hi!".into()),
|
||||
MessageContent::ToolUse(echo_tool_use.clone())
|
||||
],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
cache: false
|
||||
},
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
@@ -1870,8 +1837,7 @@ async fn test_building_request_with_pending_tools(cx: &mut TestAppContext) {
|
||||
content: "test".into(),
|
||||
output: Some("test".into())
|
||||
})],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
cache: false
|
||||
},
|
||||
],
|
||||
);
|
||||
@@ -1885,6 +1851,7 @@ async fn test_agent_connection(cx: &mut TestAppContext) {
|
||||
// Initialize language model system with test provider
|
||||
cx.update(|cx| {
|
||||
gpui_tokio::init(cx);
|
||||
client::init_settings(cx);
|
||||
|
||||
let http_client = FakeHttpClient::with_404_response();
|
||||
let clock = Arc::new(clock::FakeSystemClock::new());
|
||||
@@ -1892,7 +1859,9 @@ async fn test_agent_connection(cx: &mut TestAppContext) {
|
||||
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
||||
language_model::init(client.clone(), cx);
|
||||
language_models::init(user_store, client.clone(), cx);
|
||||
Project::init_settings(cx);
|
||||
LanguageModelRegistry::test(cx);
|
||||
agent_settings::init(cx);
|
||||
});
|
||||
cx.executor().forbid_parking();
|
||||
|
||||
@@ -1995,7 +1964,11 @@ async fn test_agent_connection(cx: &mut TestAppContext) {
|
||||
.update(|cx| {
|
||||
connection.prompt(
|
||||
Some(acp_thread::UserMessageId::new()),
|
||||
acp::PromptRequest::new(session_id.clone(), vec!["ghi".into()]),
|
||||
acp::PromptRequest {
|
||||
session_id: session_id.clone(),
|
||||
prompt: vec!["ghi".into()],
|
||||
meta: None,
|
||||
},
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -2030,7 +2003,6 @@ async fn test_tool_updates_to_completion(cx: &mut TestAppContext) {
|
||||
raw_input: input.to_string(),
|
||||
input,
|
||||
is_input_complete: false,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
|
||||
@@ -2043,7 +2015,6 @@ async fn test_tool_updates_to_completion(cx: &mut TestAppContext) {
|
||||
raw_input: input.to_string(),
|
||||
input,
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
fake_model.end_last_completion_stream();
|
||||
@@ -2052,50 +2023,68 @@ async fn test_tool_updates_to_completion(cx: &mut TestAppContext) {
|
||||
let tool_call = expect_tool_call(&mut events).await;
|
||||
assert_eq!(
|
||||
tool_call,
|
||||
acp::ToolCall::new("1", "Thinking")
|
||||
.kind(acp::ToolKind::Think)
|
||||
.raw_input(json!({}))
|
||||
.meta(acp::Meta::from_iter([(
|
||||
"tool_name".into(),
|
||||
"thinking".into()
|
||||
)]))
|
||||
acp::ToolCall {
|
||||
id: acp::ToolCallId("1".into()),
|
||||
title: "Thinking".into(),
|
||||
kind: acp::ToolKind::Think,
|
||||
status: acp::ToolCallStatus::Pending,
|
||||
content: vec![],
|
||||
locations: vec![],
|
||||
raw_input: Some(json!({})),
|
||||
raw_output: None,
|
||||
meta: Some(json!({ "tool_name": "thinking" })),
|
||||
}
|
||||
);
|
||||
let update = expect_tool_call_update_fields(&mut events).await;
|
||||
assert_eq!(
|
||||
update,
|
||||
acp::ToolCallUpdate::new(
|
||||
"1",
|
||||
acp::ToolCallUpdateFields::new()
|
||||
.title("Thinking")
|
||||
.kind(acp::ToolKind::Think)
|
||||
.raw_input(json!({ "content": "Thinking hard!"}))
|
||||
)
|
||||
acp::ToolCallUpdate {
|
||||
id: acp::ToolCallId("1".into()),
|
||||
fields: acp::ToolCallUpdateFields {
|
||||
title: Some("Thinking".into()),
|
||||
kind: Some(acp::ToolKind::Think),
|
||||
raw_input: Some(json!({ "content": "Thinking hard!" })),
|
||||
..Default::default()
|
||||
},
|
||||
meta: None,
|
||||
}
|
||||
);
|
||||
let update = expect_tool_call_update_fields(&mut events).await;
|
||||
assert_eq!(
|
||||
update,
|
||||
acp::ToolCallUpdate::new(
|
||||
"1",
|
||||
acp::ToolCallUpdateFields::new().status(acp::ToolCallStatus::InProgress)
|
||||
)
|
||||
acp::ToolCallUpdate {
|
||||
id: acp::ToolCallId("1".into()),
|
||||
fields: acp::ToolCallUpdateFields {
|
||||
status: Some(acp::ToolCallStatus::InProgress),
|
||||
..Default::default()
|
||||
},
|
||||
meta: None,
|
||||
}
|
||||
);
|
||||
let update = expect_tool_call_update_fields(&mut events).await;
|
||||
assert_eq!(
|
||||
update,
|
||||
acp::ToolCallUpdate::new(
|
||||
"1",
|
||||
acp::ToolCallUpdateFields::new().content(vec!["Thinking hard!".into()])
|
||||
)
|
||||
acp::ToolCallUpdate {
|
||||
id: acp::ToolCallId("1".into()),
|
||||
fields: acp::ToolCallUpdateFields {
|
||||
content: Some(vec!["Thinking hard!".into()]),
|
||||
..Default::default()
|
||||
},
|
||||
meta: None,
|
||||
}
|
||||
);
|
||||
let update = expect_tool_call_update_fields(&mut events).await;
|
||||
assert_eq!(
|
||||
update,
|
||||
acp::ToolCallUpdate::new(
|
||||
"1",
|
||||
acp::ToolCallUpdateFields::new()
|
||||
.status(acp::ToolCallStatus::Completed)
|
||||
.raw_output("Finished thinking.".into())
|
||||
)
|
||||
acp::ToolCallUpdate {
|
||||
id: acp::ToolCallId("1".into()),
|
||||
fields: acp::ToolCallUpdateFields {
|
||||
status: Some(acp::ToolCallStatus::Completed),
|
||||
raw_output: Some("Finished thinking.".into()),
|
||||
..Default::default()
|
||||
},
|
||||
meta: None,
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@@ -2228,7 +2217,6 @@ async fn test_send_retry_finishes_tool_calls_on_error(cx: &mut TestAppContext) {
|
||||
raw_input: json!({"text": "test"}).to_string(),
|
||||
input: json!({"text": "test"}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
};
|
||||
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
|
||||
tool_use_1.clone(),
|
||||
@@ -2247,14 +2235,12 @@ async fn test_send_retry_finishes_tool_calls_on_error(cx: &mut TestAppContext) {
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec!["Call the echo tool!".into()],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
cache: false
|
||||
},
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::Assistant,
|
||||
content: vec![language_model::MessageContent::ToolUse(tool_use_1.clone())],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
cache: false
|
||||
},
|
||||
LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
@@ -2267,8 +2253,7 @@ async fn test_send_retry_finishes_tool_calls_on_error(cx: &mut TestAppContext) {
|
||||
output: Some("test".into())
|
||||
}
|
||||
)],
|
||||
cache: true,
|
||||
reasoning_details: None,
|
||||
cache: true
|
||||
},
|
||||
]
|
||||
);
|
||||
@@ -2282,8 +2267,7 @@ async fn test_send_retry_finishes_tool_calls_on_error(cx: &mut TestAppContext) {
|
||||
thread.last_message(),
|
||||
Some(Message::Agent(AgentMessage {
|
||||
content: vec![AgentMessageContent::Text("Done".into())],
|
||||
tool_results: IndexMap::default(),
|
||||
reasoning_details: None,
|
||||
tool_results: IndexMap::default()
|
||||
}))
|
||||
);
|
||||
})
|
||||
@@ -2411,6 +2395,8 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest {
|
||||
|
||||
cx.update(|cx| {
|
||||
settings::init(cx);
|
||||
Project::init_settings(cx);
|
||||
agent_settings::init(cx);
|
||||
|
||||
match model {
|
||||
TestModel::Fake => {}
|
||||
@@ -2418,6 +2404,7 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest {
|
||||
gpui_tokio::init(cx);
|
||||
let http_client = ReqwestClient::user_agent("agent tests").unwrap();
|
||||
cx.set_http_client(Arc::new(http_client));
|
||||
client::init_settings(cx);
|
||||
let client = Client::production(cx);
|
||||
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
||||
language_model::init(client.clone(), cx);
|
||||
@@ -2531,7 +2518,7 @@ fn setup_context_server(
|
||||
let mut settings = ProjectSettings::get_global(cx).clone();
|
||||
settings.context_servers.insert(
|
||||
name.into(),
|
||||
project::project_settings::ContextServerSettings::Stdio {
|
||||
project::project_settings::ContextServerSettings::Custom {
|
||||
enabled: true,
|
||||
command: ContextServerCommand {
|
||||
path: "somebinary".into(),
|
||||
|
||||
@@ -15,7 +15,7 @@ use agent_settings::{
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use chrono::{DateTime, Utc};
|
||||
use client::{ModelRequestUsage, RequestUsage, UserStore};
|
||||
use cloud_llm_client::{CompletionIntent, Plan, UsageLimit};
|
||||
use cloud_llm_client::{CompletionIntent, CompletionRequestStatus, Plan, UsageLimit};
|
||||
use collections::{HashMap, HashSet, IndexMap};
|
||||
use fs::Fs;
|
||||
use futures::stream;
|
||||
@@ -30,17 +30,16 @@ use gpui::{
|
||||
};
|
||||
use language_model::{
|
||||
LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelExt,
|
||||
LanguageModelId, LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry,
|
||||
LanguageModelRequest, LanguageModelRequestMessage, LanguageModelRequestTool,
|
||||
LanguageModelToolResult, LanguageModelToolResultContent, LanguageModelToolSchemaFormat,
|
||||
LanguageModelToolUse, LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage,
|
||||
ZED_CLOUD_PROVIDER_ID,
|
||||
LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest,
|
||||
LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
|
||||
LanguageModelToolResultContent, LanguageModelToolSchemaFormat, LanguageModelToolUse,
|
||||
LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage, ZED_CLOUD_PROVIDER_ID,
|
||||
};
|
||||
use project::Project;
|
||||
use prompt_store::ProjectContext;
|
||||
use schemars::{JsonSchema, Schema};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{LanguageModelSelection, Settings, update_settings_file};
|
||||
use settings::{Settings, update_settings_file};
|
||||
use smol::stream::StreamExt;
|
||||
use std::{
|
||||
collections::BTreeMap,
|
||||
@@ -113,7 +112,6 @@ impl Message {
|
||||
role: Role::User,
|
||||
content: vec!["Continue where you left off".into()],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
}],
|
||||
}
|
||||
}
|
||||
@@ -178,7 +176,6 @@ impl UserMessage {
|
||||
role: Role::User,
|
||||
content: Vec::with_capacity(self.content.len()),
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
};
|
||||
|
||||
const OPEN_CONTEXT: &str = "<context>\n\
|
||||
@@ -446,7 +443,6 @@ impl AgentMessage {
|
||||
role: Role::Assistant,
|
||||
content: Vec::with_capacity(self.content.len()),
|
||||
cache: false,
|
||||
reasoning_details: self.reasoning_details.clone(),
|
||||
};
|
||||
for chunk in &self.content {
|
||||
match chunk {
|
||||
@@ -482,7 +478,6 @@ impl AgentMessage {
|
||||
role: Role::User,
|
||||
content: Vec::new(),
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
};
|
||||
|
||||
for tool_result in self.tool_results.values() {
|
||||
@@ -512,7 +507,6 @@ impl AgentMessage {
|
||||
pub struct AgentMessage {
|
||||
pub content: Vec<AgentMessageContent>,
|
||||
pub tool_results: IndexMap<LanguageModelToolUseId, LanguageModelToolResult>,
|
||||
pub reasoning_details: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
|
||||
@@ -612,16 +606,17 @@ pub struct Thread {
|
||||
pub(crate) prompt_capabilities_rx: watch::Receiver<acp::PromptCapabilities>,
|
||||
pub(crate) project: Entity<Project>,
|
||||
pub(crate) action_log: Entity<ActionLog>,
|
||||
/// Tracks the last time files were read by the agent, to detect external modifications
|
||||
pub(crate) file_read_times: HashMap<PathBuf, fs::MTime>,
|
||||
}
|
||||
|
||||
impl Thread {
|
||||
fn prompt_capabilities(model: Option<&dyn LanguageModel>) -> acp::PromptCapabilities {
|
||||
let image = model.map_or(true, |model| model.supports_images());
|
||||
acp::PromptCapabilities::new()
|
||||
.image(image)
|
||||
.embedded_context(true)
|
||||
acp::PromptCapabilities {
|
||||
meta: None,
|
||||
image,
|
||||
audio: false,
|
||||
embedded_context: true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
@@ -637,7 +632,7 @@ impl Thread {
|
||||
let (prompt_capabilities_tx, prompt_capabilities_rx) =
|
||||
watch::channel(Self::prompt_capabilities(model.as_deref()));
|
||||
Self {
|
||||
id: acp::SessionId::new(uuid::Uuid::new_v4().to_string()),
|
||||
id: acp::SessionId(uuid::Uuid::new_v4().to_string().into()),
|
||||
prompt_id: PromptId::new(),
|
||||
updated_at: Utc::now(),
|
||||
title: None,
|
||||
@@ -669,7 +664,6 @@ impl Thread {
|
||||
prompt_capabilities_rx,
|
||||
project,
|
||||
action_log,
|
||||
file_read_times: HashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -734,11 +728,17 @@ impl Thread {
|
||||
let Some(tool) = tool else {
|
||||
stream
|
||||
.0
|
||||
.unbounded_send(Ok(ThreadEvent::ToolCall(
|
||||
acp::ToolCall::new(tool_use.id.to_string(), tool_use.name.to_string())
|
||||
.status(acp::ToolCallStatus::Failed)
|
||||
.raw_input(tool_use.input.clone()),
|
||||
)))
|
||||
.unbounded_send(Ok(ThreadEvent::ToolCall(acp::ToolCall {
|
||||
meta: None,
|
||||
id: acp::ToolCallId(tool_use.id.to_string().into()),
|
||||
title: tool_use.name.to_string(),
|
||||
kind: acp::ToolKind::Other,
|
||||
status: acp::ToolCallStatus::Failed,
|
||||
content: Vec::new(),
|
||||
locations: Vec::new(),
|
||||
raw_input: Some(tool_use.input.clone()),
|
||||
raw_output: None,
|
||||
})))
|
||||
.ok();
|
||||
return;
|
||||
};
|
||||
@@ -766,20 +766,24 @@ impl Thread {
|
||||
.log_err();
|
||||
}
|
||||
|
||||
let mut fields = acp::ToolCallUpdateFields::new().status(tool_result.as_ref().map_or(
|
||||
acp::ToolCallStatus::Failed,
|
||||
|result| {
|
||||
if result.is_error {
|
||||
acp::ToolCallStatus::Failed
|
||||
} else {
|
||||
acp::ToolCallStatus::Completed
|
||||
}
|
||||
stream.update_tool_call_fields(
|
||||
&tool_use.id,
|
||||
acp::ToolCallUpdateFields {
|
||||
status: Some(
|
||||
tool_result
|
||||
.as_ref()
|
||||
.map_or(acp::ToolCallStatus::Failed, |result| {
|
||||
if result.is_error {
|
||||
acp::ToolCallStatus::Failed
|
||||
} else {
|
||||
acp::ToolCallStatus::Completed
|
||||
}
|
||||
}),
|
||||
),
|
||||
raw_output: output,
|
||||
..Default::default()
|
||||
},
|
||||
));
|
||||
if let Some(output) = output {
|
||||
fields = fields.raw_output(output);
|
||||
}
|
||||
stream.update_tool_call_fields(&tool_use.id, fields);
|
||||
);
|
||||
}
|
||||
|
||||
pub fn from_db(
|
||||
@@ -794,8 +798,7 @@ impl Thread {
|
||||
let profile_id = db_thread
|
||||
.profile
|
||||
.unwrap_or_else(|| AgentSettings::get_global(cx).default_profile.clone());
|
||||
|
||||
let mut model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
let model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
db_thread
|
||||
.model
|
||||
.and_then(|model| {
|
||||
@@ -808,16 +811,6 @@ impl Thread {
|
||||
.or_else(|| registry.default_model())
|
||||
.map(|model| model.model)
|
||||
});
|
||||
|
||||
if model.is_none() {
|
||||
model = Self::resolve_profile_model(&profile_id, cx);
|
||||
}
|
||||
if model.is_none() {
|
||||
model = LanguageModelRegistry::global(cx).update(cx, |registry, _cx| {
|
||||
registry.default_model().map(|model| model.model)
|
||||
});
|
||||
}
|
||||
|
||||
let (prompt_capabilities_tx, prompt_capabilities_rx) =
|
||||
watch::channel(Self::prompt_capabilities(model.as_deref()));
|
||||
|
||||
@@ -855,7 +848,6 @@ impl Thread {
|
||||
updated_at: db_thread.updated_at,
|
||||
prompt_capabilities_tx,
|
||||
prompt_capabilities_rx,
|
||||
file_read_times: HashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -995,7 +987,6 @@ impl Thread {
|
||||
self.add_tool(NowTool);
|
||||
self.add_tool(OpenTool::new(self.project.clone()));
|
||||
self.add_tool(ReadFileTool::new(
|
||||
cx.weak_entity(),
|
||||
self.project.clone(),
|
||||
self.action_log.clone(),
|
||||
));
|
||||
@@ -1016,17 +1007,8 @@ impl Thread {
|
||||
&self.profile_id
|
||||
}
|
||||
|
||||
pub fn set_profile(&mut self, profile_id: AgentProfileId, cx: &mut Context<Self>) {
|
||||
if self.profile_id == profile_id {
|
||||
return;
|
||||
}
|
||||
|
||||
pub fn set_profile(&mut self, profile_id: AgentProfileId) {
|
||||
self.profile_id = profile_id;
|
||||
|
||||
// Swap to the profile's preferred model when available.
|
||||
if let Some(model) = Self::resolve_profile_model(&self.profile_id, cx) {
|
||||
self.set_model(model, cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cancel(&mut self, cx: &mut Context<Self>) {
|
||||
@@ -1083,35 +1065,6 @@ impl Thread {
|
||||
})
|
||||
}
|
||||
|
||||
/// Look up the active profile and resolve its preferred model if one is configured.
|
||||
fn resolve_profile_model(
|
||||
profile_id: &AgentProfileId,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Arc<dyn LanguageModel>> {
|
||||
let selection = AgentSettings::get_global(cx)
|
||||
.profiles
|
||||
.get(profile_id)?
|
||||
.default_model
|
||||
.clone()?;
|
||||
Self::resolve_model_from_selection(&selection, cx)
|
||||
}
|
||||
|
||||
/// Translate a stored model selection into the configured model from the registry.
|
||||
fn resolve_model_from_selection(
|
||||
selection: &LanguageModelSelection,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Arc<dyn LanguageModel>> {
|
||||
let selected = SelectedModel {
|
||||
provider: LanguageModelProviderId::from(selection.provider.0.clone()),
|
||||
model: LanguageModelId::from(selection.model.clone()),
|
||||
};
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry
|
||||
.select_model(&selected, cx)
|
||||
.map(|configured| configured.model)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn resume(
|
||||
&mut self,
|
||||
cx: &mut Context<Self>,
|
||||
@@ -1259,15 +1212,18 @@ impl Thread {
|
||||
while let Some(tool_result) = tool_results.next().await {
|
||||
log::debug!("Tool finished {:?}", tool_result);
|
||||
|
||||
let mut fields = acp::ToolCallUpdateFields::new().status(if tool_result.is_error {
|
||||
acp::ToolCallStatus::Failed
|
||||
} else {
|
||||
acp::ToolCallStatus::Completed
|
||||
});
|
||||
if let Some(output) = &tool_result.output {
|
||||
fields = fields.raw_output(output.clone());
|
||||
}
|
||||
event_stream.update_tool_call_fields(&tool_result.tool_use_id, fields);
|
||||
event_stream.update_tool_call_fields(
|
||||
&tool_result.tool_use_id,
|
||||
acp::ToolCallUpdateFields {
|
||||
status: Some(if tool_result.is_error {
|
||||
acp::ToolCallStatus::Failed
|
||||
} else {
|
||||
acp::ToolCallStatus::Completed
|
||||
}),
|
||||
raw_output: tool_result.output.clone(),
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
this.update(cx, |this, _cx| {
|
||||
this.pending_message()
|
||||
.tool_results
|
||||
@@ -1387,18 +1343,6 @@ impl Thread {
|
||||
self.handle_thinking_event(text, signature, event_stream, cx)
|
||||
}
|
||||
RedactedThinking { data } => self.handle_redacted_thinking_event(data, cx),
|
||||
ReasoningDetails(details) => {
|
||||
let last_message = self.pending_message();
|
||||
// Store the last non-empty reasoning_details (overwrites earlier ones)
|
||||
// This ensures we keep the encrypted reasoning with signatures, not the early text reasoning
|
||||
if let serde_json::Value::Array(ref arr) = details {
|
||||
if !arr.is_empty() {
|
||||
last_message.reasoning_details = Some(details);
|
||||
}
|
||||
} else {
|
||||
last_message.reasoning_details = Some(details);
|
||||
}
|
||||
}
|
||||
ToolUse(tool_use) => {
|
||||
return Ok(self.handle_tool_use_event(tool_use, event_stream, cx));
|
||||
}
|
||||
@@ -1431,16 +1375,20 @@ impl Thread {
|
||||
);
|
||||
self.update_token_usage(usage, cx);
|
||||
}
|
||||
UsageUpdated { amount, limit } => {
|
||||
StatusUpdate(CompletionRequestStatus::UsageUpdated { amount, limit }) => {
|
||||
self.update_model_request_usage(amount, limit, cx);
|
||||
}
|
||||
ToolUseLimitReached => {
|
||||
StatusUpdate(
|
||||
CompletionRequestStatus::Started
|
||||
| CompletionRequestStatus::Queued { .. }
|
||||
| CompletionRequestStatus::Failed { .. },
|
||||
) => {}
|
||||
StatusUpdate(CompletionRequestStatus::ToolUseLimitReached) => {
|
||||
self.tool_use_limit_reached = true;
|
||||
}
|
||||
Stop(StopReason::Refusal) => return Err(CompletionError::Refusal.into()),
|
||||
Stop(StopReason::MaxTokens) => return Err(CompletionError::MaxTokens.into()),
|
||||
Stop(StopReason::ToolUse | StopReason::EndTurn) => {}
|
||||
Started | Queued { .. } => {}
|
||||
}
|
||||
|
||||
Ok(None)
|
||||
@@ -1544,10 +1492,12 @@ impl Thread {
|
||||
} else {
|
||||
event_stream.update_tool_call_fields(
|
||||
&tool_use.id,
|
||||
acp::ToolCallUpdateFields::new()
|
||||
.title(title)
|
||||
.kind(kind)
|
||||
.raw_input(tool_use.input.clone()),
|
||||
acp::ToolCallUpdateFields {
|
||||
title: Some(title.into()),
|
||||
kind: Some(kind),
|
||||
raw_input: Some(tool_use.input.clone()),
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1569,9 +1519,10 @@ impl Thread {
|
||||
let fs = self.project.read(cx).fs().clone();
|
||||
let tool_event_stream =
|
||||
ToolCallEventStream::new(tool_use.id.clone(), event_stream.clone(), Some(fs));
|
||||
tool_event_stream.update_fields(
|
||||
acp::ToolCallUpdateFields::new().status(acp::ToolCallStatus::InProgress),
|
||||
);
|
||||
tool_event_stream.update_fields(acp::ToolCallUpdateFields {
|
||||
status: Some(acp::ToolCallStatus::InProgress),
|
||||
..Default::default()
|
||||
});
|
||||
let supports_images = self.model().is_some_and(|model| model.supports_images());
|
||||
let tool_result = tool.run(tool_use.input, tool_event_stream, cx);
|
||||
log::debug!("Running tool {}", tool_use.name);
|
||||
@@ -1671,7 +1622,6 @@ impl Thread {
|
||||
role: Role::User,
|
||||
content: vec![SUMMARIZE_THREAD_DETAILED_PROMPT.into()],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
});
|
||||
|
||||
let task = cx
|
||||
@@ -1682,7 +1632,9 @@ impl Thread {
|
||||
let event = event.log_err()?;
|
||||
let text = match event {
|
||||
LanguageModelCompletionEvent::Text(text) => text,
|
||||
LanguageModelCompletionEvent::UsageUpdated { amount, limit } => {
|
||||
LanguageModelCompletionEvent::StatusUpdate(
|
||||
CompletionRequestStatus::UsageUpdated { amount, limit },
|
||||
) => {
|
||||
this.update(cx, |thread, cx| {
|
||||
thread.update_model_request_usage(amount, limit, cx);
|
||||
})
|
||||
@@ -1736,7 +1688,6 @@ impl Thread {
|
||||
role: Role::User,
|
||||
content: vec![SUMMARIZE_THREAD_PROMPT.into()],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
});
|
||||
self.pending_title_generation = Some(cx.spawn(async move |this, cx| {
|
||||
let mut title = String::new();
|
||||
@@ -1747,7 +1698,9 @@ impl Thread {
|
||||
let event = event?;
|
||||
let text = match event {
|
||||
LanguageModelCompletionEvent::Text(text) => text,
|
||||
LanguageModelCompletionEvent::UsageUpdated { amount, limit } => {
|
||||
LanguageModelCompletionEvent::StatusUpdate(
|
||||
CompletionRequestStatus::UsageUpdated { amount, limit },
|
||||
) => {
|
||||
this.update(cx, |thread, cx| {
|
||||
thread.update_model_request_usage(amount, limit, cx);
|
||||
})?;
|
||||
@@ -1984,7 +1937,6 @@ impl Thread {
|
||||
role: Role::System,
|
||||
content: vec![system_prompt.into()],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
}];
|
||||
for message in &self.messages {
|
||||
messages.extend(message.to_request());
|
||||
@@ -2187,7 +2139,7 @@ where
|
||||
|
||||
/// Returns the JSON schema that describes the tool's input.
|
||||
fn input_schema(format: LanguageModelToolSchemaFormat) -> Schema {
|
||||
language_model::tool_schema::root_schema_for::<Self::Input>(format)
|
||||
crate::tool_schema::root_schema_for::<Self::Input>(format)
|
||||
}
|
||||
|
||||
/// Some tools rely on a provider for the underlying billing or other reasons.
|
||||
@@ -2274,7 +2226,7 @@ where
|
||||
|
||||
fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value> {
|
||||
let mut json = serde_json::to_value(T::input_schema(format))?;
|
||||
language_model::tool_schema::adapt_schema_to_format(&mut json, format)?;
|
||||
crate::tool_schema::adapt_schema_to_format(&mut json, format)?;
|
||||
Ok(json)
|
||||
}
|
||||
|
||||
@@ -2362,13 +2314,19 @@ impl ThreadEventStream {
|
||||
kind: acp::ToolKind,
|
||||
input: serde_json::Value,
|
||||
) -> acp::ToolCall {
|
||||
acp::ToolCall::new(id.to_string(), title)
|
||||
.kind(kind)
|
||||
.raw_input(input)
|
||||
.meta(acp::Meta::from_iter([(
|
||||
"tool_name".into(),
|
||||
tool_name.into(),
|
||||
)]))
|
||||
acp::ToolCall {
|
||||
meta: Some(serde_json::json!({
|
||||
"tool_name": tool_name
|
||||
})),
|
||||
id: acp::ToolCallId(id.to_string().into()),
|
||||
title,
|
||||
kind,
|
||||
status: acp::ToolCallStatus::Pending,
|
||||
content: vec![],
|
||||
locations: vec![],
|
||||
raw_input: Some(input),
|
||||
raw_output: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn update_tool_call_fields(
|
||||
@@ -2378,7 +2336,12 @@ impl ThreadEventStream {
|
||||
) {
|
||||
self.0
|
||||
.unbounded_send(Ok(ThreadEvent::ToolCallUpdate(
|
||||
acp::ToolCallUpdate::new(tool_use_id.to_string(), fields).into(),
|
||||
acp::ToolCallUpdate {
|
||||
meta: None,
|
||||
id: acp::ToolCallId(tool_use_id.to_string().into()),
|
||||
fields,
|
||||
}
|
||||
.into(),
|
||||
)))
|
||||
.ok();
|
||||
}
|
||||
@@ -2441,7 +2404,7 @@ impl ToolCallEventStream {
|
||||
.0
|
||||
.unbounded_send(Ok(ThreadEvent::ToolCallUpdate(
|
||||
acp_thread::ToolCallUpdateDiff {
|
||||
id: acp::ToolCallId::new(self.tool_use_id.to_string()),
|
||||
id: acp::ToolCallId(self.tool_use_id.to_string().into()),
|
||||
diff,
|
||||
}
|
||||
.into(),
|
||||
@@ -2459,26 +2422,33 @@ impl ToolCallEventStream {
|
||||
.0
|
||||
.unbounded_send(Ok(ThreadEvent::ToolCallAuthorization(
|
||||
ToolCallAuthorization {
|
||||
tool_call: acp::ToolCallUpdate::new(
|
||||
self.tool_use_id.to_string(),
|
||||
acp::ToolCallUpdateFields::new().title(title),
|
||||
),
|
||||
tool_call: acp::ToolCallUpdate {
|
||||
meta: None,
|
||||
id: acp::ToolCallId(self.tool_use_id.to_string().into()),
|
||||
fields: acp::ToolCallUpdateFields {
|
||||
title: Some(title.into()),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
options: vec![
|
||||
acp::PermissionOption::new(
|
||||
acp::PermissionOptionId::new("always_allow"),
|
||||
"Always Allow",
|
||||
acp::PermissionOptionKind::AllowAlways,
|
||||
),
|
||||
acp::PermissionOption::new(
|
||||
acp::PermissionOptionId::new("allow"),
|
||||
"Allow",
|
||||
acp::PermissionOptionKind::AllowOnce,
|
||||
),
|
||||
acp::PermissionOption::new(
|
||||
acp::PermissionOptionId::new("deny"),
|
||||
"Deny",
|
||||
acp::PermissionOptionKind::RejectOnce,
|
||||
),
|
||||
acp::PermissionOption {
|
||||
id: acp::PermissionOptionId("always_allow".into()),
|
||||
name: "Always Allow".into(),
|
||||
kind: acp::PermissionOptionKind::AllowAlways,
|
||||
meta: None,
|
||||
},
|
||||
acp::PermissionOption {
|
||||
id: acp::PermissionOptionId("allow".into()),
|
||||
name: "Allow".into(),
|
||||
kind: acp::PermissionOptionKind::AllowOnce,
|
||||
meta: None,
|
||||
},
|
||||
acp::PermissionOption {
|
||||
id: acp::PermissionOptionId("deny".into()),
|
||||
name: "Deny".into(),
|
||||
kind: acp::PermissionOptionKind::RejectOnce,
|
||||
meta: None,
|
||||
},
|
||||
],
|
||||
response: response_tx,
|
||||
},
|
||||
@@ -2623,15 +2593,7 @@ impl UserMessageContent {
|
||||
// TODO
|
||||
Self::Text("[blob]".to_string())
|
||||
}
|
||||
other => {
|
||||
log::warn!("Unexpected content type: {:?}", other);
|
||||
Self::Text("[unknown]".to_string())
|
||||
}
|
||||
},
|
||||
other => {
|
||||
log::warn!("Unexpected content type: {:?}", other);
|
||||
Self::Text("[unknown]".to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2639,15 +2601,32 @@ impl UserMessageContent {
|
||||
impl From<UserMessageContent> for acp::ContentBlock {
|
||||
fn from(content: UserMessageContent) -> Self {
|
||||
match content {
|
||||
UserMessageContent::Text(text) => text.into(),
|
||||
UserMessageContent::Image(image) => {
|
||||
acp::ContentBlock::Image(acp::ImageContent::new(image.source, "image/png"))
|
||||
UserMessageContent::Text(text) => acp::ContentBlock::Text(acp::TextContent {
|
||||
text,
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
UserMessageContent::Image(image) => acp::ContentBlock::Image(acp::ImageContent {
|
||||
data: image.source.to_string(),
|
||||
mime_type: "image/png".to_string(),
|
||||
meta: None,
|
||||
annotations: None,
|
||||
uri: None,
|
||||
}),
|
||||
UserMessageContent::Mention { uri, content } => {
|
||||
acp::ContentBlock::Resource(acp::EmbeddedResource {
|
||||
meta: None,
|
||||
resource: acp::EmbeddedResourceResource::TextResourceContents(
|
||||
acp::TextResourceContents {
|
||||
meta: None,
|
||||
mime_type: None,
|
||||
text: content,
|
||||
uri: uri.to_uri().to_string(),
|
||||
},
|
||||
),
|
||||
annotations: None,
|
||||
})
|
||||
}
|
||||
UserMessageContent::Mention { uri, content } => acp::ContentBlock::Resource(
|
||||
acp::EmbeddedResource::new(acp::EmbeddedResourceResource::TextResourceContents(
|
||||
acp::TextResourceContents::new(content, uri.to_uri().to_string()),
|
||||
)),
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use anyhow::Result;
|
||||
use language_model::LanguageModelToolSchemaFormat;
|
||||
use schemars::{
|
||||
JsonSchema, Schema,
|
||||
generate::SchemaSettings,
|
||||
@@ -6,16 +7,7 @@ use schemars::{
|
||||
};
|
||||
use serde_json::Value;
|
||||
|
||||
/// Indicates the format used to define the input schema for a language model tool.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
|
||||
pub enum LanguageModelToolSchemaFormat {
|
||||
/// A JSON schema, see https://json-schema.org
|
||||
JsonSchema,
|
||||
/// A subset of an OpenAPI 3.0 schema object supported by Google AI, see https://ai.google.dev/api/caching#Schema
|
||||
JsonSchemaSubset,
|
||||
}
|
||||
|
||||
pub fn root_schema_for<T: JsonSchema>(format: LanguageModelToolSchemaFormat) -> Schema {
|
||||
pub(crate) fn root_schema_for<T: JsonSchema>(format: LanguageModelToolSchemaFormat) -> Schema {
|
||||
let mut generator = match format {
|
||||
LanguageModelToolSchemaFormat::JsonSchema => SchemaSettings::draft07().into_generator(),
|
||||
LanguageModelToolSchemaFormat::JsonSchemaSubset => SchemaSettings::openapi3()
|
||||
@@ -165,7 +165,7 @@ impl AnyAgentTool for ContextServerTool {
|
||||
format: language_model::LanguageModelToolSchemaFormat,
|
||||
) -> Result<serde_json::Value> {
|
||||
let mut schema = self.tool.input_schema.clone();
|
||||
language_model::tool_schema::adapt_schema_to_format(&mut schema, format)?;
|
||||
crate::tool_schema::adapt_schema_to_format(&mut schema, format)?;
|
||||
Ok(match schema {
|
||||
serde_json::Value::Null => {
|
||||
serde_json::json!({ "type": "object", "properties": [] })
|
||||
|
||||
@@ -273,9 +273,14 @@ impl AgentTool for EditFileTool {
|
||||
};
|
||||
let abs_path = project.read(cx).absolute_path(&project_path, cx);
|
||||
if let Some(abs_path) = abs_path.clone() {
|
||||
event_stream.update_fields(
|
||||
ToolCallUpdateFields::new().locations(vec![acp::ToolCallLocation::new(abs_path)]),
|
||||
);
|
||||
event_stream.update_fields(ToolCallUpdateFields {
|
||||
locations: Some(vec![acp::ToolCallLocation {
|
||||
path: abs_path,
|
||||
line: None,
|
||||
meta: None,
|
||||
}]),
|
||||
..Default::default()
|
||||
});
|
||||
}
|
||||
|
||||
let authorize = self.authorize(&input, &event_stream, cx);
|
||||
@@ -304,40 +309,6 @@ impl AgentTool for EditFileTool {
|
||||
})?
|
||||
.await?;
|
||||
|
||||
// Check if the file has been modified since the agent last read it
|
||||
if let Some(abs_path) = abs_path.as_ref() {
|
||||
let (last_read_mtime, current_mtime, is_dirty) = self.thread.update(cx, |thread, cx| {
|
||||
let last_read = thread.file_read_times.get(abs_path).copied();
|
||||
let current = buffer.read(cx).file().and_then(|file| file.disk_state().mtime());
|
||||
let dirty = buffer.read(cx).is_dirty();
|
||||
(last_read, current, dirty)
|
||||
})?;
|
||||
|
||||
// Check for unsaved changes first - these indicate modifications we don't know about
|
||||
if is_dirty {
|
||||
anyhow::bail!(
|
||||
"This file cannot be written to because it has unsaved changes. \
|
||||
Please end the current conversation immediately by telling the user you want to write to this file (mention its path explicitly) but you can't write to it because it has unsaved changes. \
|
||||
Ask the user to save that buffer's changes and to inform you when it's ok to proceed."
|
||||
);
|
||||
}
|
||||
|
||||
// Check if the file was modified on disk since we last read it
|
||||
if let (Some(last_read), Some(current)) = (last_read_mtime, current_mtime) {
|
||||
// MTime can be unreliable for comparisons, so our newtype intentionally
|
||||
// doesn't support comparing them. If the mtime at all different
|
||||
// (which could be because of a modification or because e.g. system clock changed),
|
||||
// we pessimistically assume it was modified.
|
||||
if current != last_read {
|
||||
anyhow::bail!(
|
||||
"The file {} has been modified since you last read it. \
|
||||
Please read the file again to get the current state before editing it.",
|
||||
input.path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let diff = cx.new(|cx| Diff::new(buffer.clone(), cx))?;
|
||||
event_stream.update_diff(diff.clone());
|
||||
let _finalize_diff = util::defer({
|
||||
@@ -384,11 +355,10 @@ impl AgentTool for EditFileTool {
|
||||
range.start.to_point(&buffer.snapshot()).row
|
||||
}).ok();
|
||||
if let Some(abs_path) = abs_path.clone() {
|
||||
let mut location = ToolCallLocation::new(abs_path);
|
||||
if let Some(line) = line {
|
||||
location = location.line(line);
|
||||
}
|
||||
event_stream.update_fields(ToolCallUpdateFields::new().locations(vec![location]));
|
||||
event_stream.update_fields(ToolCallUpdateFields {
|
||||
locations: Some(vec![ToolCallLocation { path: abs_path, line, meta: None }]),
|
||||
..Default::default()
|
||||
});
|
||||
}
|
||||
emitted_location = true;
|
||||
}
|
||||
@@ -451,17 +421,6 @@ impl AgentTool for EditFileTool {
|
||||
log.buffer_edited(buffer.clone(), cx);
|
||||
})?;
|
||||
|
||||
// Update the recorded read time after a successful edit so consecutive edits work
|
||||
if let Some(abs_path) = abs_path.as_ref() {
|
||||
if let Some(new_mtime) = buffer.read_with(cx, |buffer, _| {
|
||||
buffer.file().and_then(|file| file.disk_state().mtime())
|
||||
})? {
|
||||
self.thread.update(cx, |thread, _| {
|
||||
thread.file_read_times.insert(abs_path.to_path_buf(), new_mtime);
|
||||
})?;
|
||||
}
|
||||
}
|
||||
|
||||
let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
|
||||
let (new_text, unified_diff) = cx
|
||||
.background_spawn({
|
||||
@@ -603,6 +562,7 @@ fn resolve_path(
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{ContextServerRegistry, Templates};
|
||||
use client::TelemetrySettings;
|
||||
use fs::Fs;
|
||||
use gpui::{TestAppContext, UpdateGlobal};
|
||||
use language_model::fake_provider::FakeLanguageModel;
|
||||
@@ -1789,426 +1749,14 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_file_read_times_tracking(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = project::FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
"test.txt": "original content"
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model.clone()),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
|
||||
|
||||
// Initially, file_read_times should be empty
|
||||
let is_empty = thread.read_with(cx, |thread, _| thread.file_read_times.is_empty());
|
||||
assert!(is_empty, "file_read_times should start empty");
|
||||
|
||||
// Create read tool
|
||||
let read_tool = Arc::new(crate::ReadFileTool::new(
|
||||
thread.downgrade(),
|
||||
project.clone(),
|
||||
action_log,
|
||||
));
|
||||
|
||||
// Read the file to record the read time
|
||||
cx.update(|cx| {
|
||||
read_tool.clone().run(
|
||||
crate::ReadFileToolInput {
|
||||
path: "root/test.txt".to_string(),
|
||||
start_line: None,
|
||||
end_line: None,
|
||||
},
|
||||
ToolCallEventStream::test().0,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Verify that file_read_times now contains an entry for the file
|
||||
let has_entry = thread.read_with(cx, |thread, _| {
|
||||
thread.file_read_times.len() == 1
|
||||
&& thread
|
||||
.file_read_times
|
||||
.keys()
|
||||
.any(|path| path.ends_with("test.txt"))
|
||||
});
|
||||
assert!(
|
||||
has_entry,
|
||||
"file_read_times should contain an entry after reading the file"
|
||||
);
|
||||
|
||||
// Read the file again - should update the entry
|
||||
cx.update(|cx| {
|
||||
read_tool.clone().run(
|
||||
crate::ReadFileToolInput {
|
||||
path: "root/test.txt".to_string(),
|
||||
start_line: None,
|
||||
end_line: None,
|
||||
},
|
||||
ToolCallEventStream::test().0,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Should still have exactly one entry
|
||||
let has_one_entry = thread.read_with(cx, |thread, _| thread.file_read_times.len() == 1);
|
||||
assert!(
|
||||
has_one_entry,
|
||||
"file_read_times should still have one entry after re-reading"
|
||||
);
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
TelemetrySettings::register(cx);
|
||||
agent_settings::AgentSettings::register(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_consecutive_edits_work(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = project::FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
"test.txt": "original content"
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model.clone()),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let languages = project.read_with(cx, |project, _| project.languages().clone());
|
||||
let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
|
||||
|
||||
let read_tool = Arc::new(crate::ReadFileTool::new(
|
||||
thread.downgrade(),
|
||||
project.clone(),
|
||||
action_log,
|
||||
));
|
||||
let edit_tool = Arc::new(EditFileTool::new(
|
||||
project.clone(),
|
||||
thread.downgrade(),
|
||||
languages,
|
||||
Templates::new(),
|
||||
));
|
||||
|
||||
// Read the file first
|
||||
cx.update(|cx| {
|
||||
read_tool.clone().run(
|
||||
crate::ReadFileToolInput {
|
||||
path: "root/test.txt".to_string(),
|
||||
start_line: None,
|
||||
end_line: None,
|
||||
},
|
||||
ToolCallEventStream::test().0,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// First edit should work
|
||||
let edit_result = {
|
||||
let edit_task = cx.update(|cx| {
|
||||
edit_tool.clone().run(
|
||||
EditFileToolInput {
|
||||
display_description: "First edit".into(),
|
||||
path: "root/test.txt".into(),
|
||||
mode: EditFileMode::Edit,
|
||||
},
|
||||
ToolCallEventStream::test().0,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
model.send_last_completion_stream_text_chunk(
|
||||
"<old_text>original content</old_text><new_text>modified content</new_text>"
|
||||
.to_string(),
|
||||
);
|
||||
model.end_last_completion_stream();
|
||||
|
||||
edit_task.await
|
||||
};
|
||||
assert!(
|
||||
edit_result.is_ok(),
|
||||
"First edit should succeed, got error: {:?}",
|
||||
edit_result.as_ref().err()
|
||||
);
|
||||
|
||||
// Second edit should also work because the edit updated the recorded read time
|
||||
let edit_result = {
|
||||
let edit_task = cx.update(|cx| {
|
||||
edit_tool.clone().run(
|
||||
EditFileToolInput {
|
||||
display_description: "Second edit".into(),
|
||||
path: "root/test.txt".into(),
|
||||
mode: EditFileMode::Edit,
|
||||
},
|
||||
ToolCallEventStream::test().0,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
model.send_last_completion_stream_text_chunk(
|
||||
"<old_text>modified content</old_text><new_text>further modified content</new_text>".to_string(),
|
||||
);
|
||||
model.end_last_completion_stream();
|
||||
|
||||
edit_task.await
|
||||
};
|
||||
assert!(
|
||||
edit_result.is_ok(),
|
||||
"Second consecutive edit should succeed, got error: {:?}",
|
||||
edit_result.as_ref().err()
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_external_modification_detected(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = project::FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
"test.txt": "original content"
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model.clone()),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let languages = project.read_with(cx, |project, _| project.languages().clone());
|
||||
let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
|
||||
|
||||
let read_tool = Arc::new(crate::ReadFileTool::new(
|
||||
thread.downgrade(),
|
||||
project.clone(),
|
||||
action_log,
|
||||
));
|
||||
let edit_tool = Arc::new(EditFileTool::new(
|
||||
project.clone(),
|
||||
thread.downgrade(),
|
||||
languages,
|
||||
Templates::new(),
|
||||
));
|
||||
|
||||
// Read the file first
|
||||
cx.update(|cx| {
|
||||
read_tool.clone().run(
|
||||
crate::ReadFileToolInput {
|
||||
path: "root/test.txt".to_string(),
|
||||
start_line: None,
|
||||
end_line: None,
|
||||
},
|
||||
ToolCallEventStream::test().0,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Simulate external modification - advance time and save file
|
||||
cx.background_executor
|
||||
.advance_clock(std::time::Duration::from_secs(2));
|
||||
fs.save(
|
||||
path!("/root/test.txt").as_ref(),
|
||||
&"externally modified content".into(),
|
||||
language::LineEnding::Unix,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Reload the buffer to pick up the new mtime
|
||||
let project_path = project
|
||||
.read_with(cx, |project, cx| {
|
||||
project.find_project_path("root/test.txt", cx)
|
||||
})
|
||||
.expect("Should find project path");
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_buffer(project_path, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
buffer
|
||||
.update(cx, |buffer, cx| buffer.reload(cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
// Try to edit - should fail because file was modified externally
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
edit_tool.clone().run(
|
||||
EditFileToolInput {
|
||||
display_description: "Edit after external change".into(),
|
||||
path: "root/test.txt".into(),
|
||||
mode: EditFileMode::Edit,
|
||||
},
|
||||
ToolCallEventStream::test().0,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await;
|
||||
|
||||
assert!(
|
||||
result.is_err(),
|
||||
"Edit should fail after external modification"
|
||||
);
|
||||
let error_msg = result.unwrap_err().to_string();
|
||||
assert!(
|
||||
error_msg.contains("has been modified since you last read it"),
|
||||
"Error should mention file modification, got: {}",
|
||||
error_msg
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dirty_buffer_detected(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = project::FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
"test.txt": "original content"
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model.clone()),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let languages = project.read_with(cx, |project, _| project.languages().clone());
|
||||
let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
|
||||
|
||||
let read_tool = Arc::new(crate::ReadFileTool::new(
|
||||
thread.downgrade(),
|
||||
project.clone(),
|
||||
action_log,
|
||||
));
|
||||
let edit_tool = Arc::new(EditFileTool::new(
|
||||
project.clone(),
|
||||
thread.downgrade(),
|
||||
languages,
|
||||
Templates::new(),
|
||||
));
|
||||
|
||||
// Read the file first
|
||||
cx.update(|cx| {
|
||||
read_tool.clone().run(
|
||||
crate::ReadFileToolInput {
|
||||
path: "root/test.txt".to_string(),
|
||||
start_line: None,
|
||||
end_line: None,
|
||||
},
|
||||
ToolCallEventStream::test().0,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Open the buffer and make it dirty by editing without saving
|
||||
let project_path = project
|
||||
.read_with(cx, |project, cx| {
|
||||
project.find_project_path("root/test.txt", cx)
|
||||
})
|
||||
.expect("Should find project path");
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_buffer(project_path, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Make an in-memory edit to the buffer (making it dirty)
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
let end_point = buffer.max_point();
|
||||
buffer.edit([(end_point..end_point, " added text")], None, cx);
|
||||
});
|
||||
|
||||
// Verify buffer is dirty
|
||||
let is_dirty = buffer.read_with(cx, |buffer, _| buffer.is_dirty());
|
||||
assert!(is_dirty, "Buffer should be dirty after in-memory edit");
|
||||
|
||||
// Try to edit - should fail because buffer has unsaved changes
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
edit_tool.clone().run(
|
||||
EditFileToolInput {
|
||||
display_description: "Edit with dirty buffer".into(),
|
||||
path: "root/test.txt".into(),
|
||||
mode: EditFileMode::Edit,
|
||||
},
|
||||
ToolCallEventStream::test().0,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await;
|
||||
|
||||
assert!(result.is_err(), "Edit should fail when buffer is dirty");
|
||||
let error_msg = result.unwrap_err().to_string();
|
||||
assert!(
|
||||
error_msg.contains("cannot be written to because it has unsaved changes"),
|
||||
"Error should mention unsaved changes, got: {}",
|
||||
error_msg
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -118,29 +118,33 @@ impl AgentTool for FindPathTool {
|
||||
let paginated_matches: &[PathBuf] = &matches[cmp::min(input.offset, matches.len())
|
||||
..cmp::min(input.offset + RESULTS_PER_PAGE, matches.len())];
|
||||
|
||||
event_stream.update_fields(
|
||||
acp::ToolCallUpdateFields::new()
|
||||
.title(if paginated_matches.is_empty() {
|
||||
"No matches".into()
|
||||
} else if paginated_matches.len() == 1 {
|
||||
"1 match".into()
|
||||
} else {
|
||||
format!("{} matches", paginated_matches.len())
|
||||
})
|
||||
.content(
|
||||
paginated_matches
|
||||
.iter()
|
||||
.map(|path| {
|
||||
acp::ToolCallContent::Content(acp::Content::new(
|
||||
acp::ContentBlock::ResourceLink(acp::ResourceLink::new(
|
||||
path.to_string_lossy(),
|
||||
format!("file://{}", path.display()),
|
||||
)),
|
||||
))
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
);
|
||||
event_stream.update_fields(acp::ToolCallUpdateFields {
|
||||
title: Some(if paginated_matches.is_empty() {
|
||||
"No matches".into()
|
||||
} else if paginated_matches.len() == 1 {
|
||||
"1 match".into()
|
||||
} else {
|
||||
format!("{} matches", paginated_matches.len())
|
||||
}),
|
||||
content: Some(
|
||||
paginated_matches
|
||||
.iter()
|
||||
.map(|path| acp::ToolCallContent::Content {
|
||||
content: acp::ContentBlock::ResourceLink(acp::ResourceLink {
|
||||
uri: format!("file://{}", path.display()),
|
||||
name: path.to_string_lossy().into(),
|
||||
annotations: None,
|
||||
description: None,
|
||||
mime_type: None,
|
||||
size: None,
|
||||
title: None,
|
||||
meta: None,
|
||||
}),
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
Ok(FindPathToolOutput {
|
||||
offset: input.offset,
|
||||
@@ -173,7 +177,7 @@ fn search_paths(glob: &str, project: Entity<Project>, cx: &mut App) -> Task<Resu
|
||||
let mut results = Vec::new();
|
||||
for snapshot in snapshots {
|
||||
for entry in snapshot.entries(false, 0) {
|
||||
if path_matcher.is_match(&snapshot.root_name().join(&entry.path)) {
|
||||
if path_matcher.is_match(snapshot.root_name().join(&entry.path).as_std_path()) {
|
||||
results.push(snapshot.absolutize(&entry.path));
|
||||
}
|
||||
}
|
||||
@@ -242,6 +246,8 @@ mod test {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,21 +32,8 @@ pub struct GrepToolInput {
|
||||
/// Do NOT specify a path here! This will only be matched against the code **content**.
|
||||
pub regex: String,
|
||||
/// A glob pattern for the paths of files to include in the search.
|
||||
/// Supports standard glob patterns like "**/*.rs" or "frontend/src/**/*.ts".
|
||||
/// Supports standard glob patterns like "**/*.rs" or "src/**/*.ts".
|
||||
/// If omitted, all files in the project will be searched.
|
||||
///
|
||||
/// The glob pattern is matched against the full path including the project root directory.
|
||||
///
|
||||
/// <example>
|
||||
/// If the project has the following root directories:
|
||||
///
|
||||
/// - /a/b/backend
|
||||
/// - /c/d/frontend
|
||||
///
|
||||
/// Use "backend/**/*.rs" to search only Rust files in the backend root directory.
|
||||
/// Use "frontend/src/**/*.ts" to search TypeScript files only in the frontend root directory (sub-directory "src").
|
||||
/// Use "**/*.rs" to search Rust files across all root directories.
|
||||
/// </example>
|
||||
pub include_pattern: Option<String>,
|
||||
/// Optional starting position for paginated results (0-based).
|
||||
/// When not provided, starts from the beginning.
|
||||
@@ -145,7 +132,8 @@ impl AgentTool for GrepTool {
|
||||
let exclude_patterns = global_settings
|
||||
.file_scan_exclusions
|
||||
.sources()
|
||||
.chain(global_settings.private_files.sources());
|
||||
.iter()
|
||||
.chain(global_settings.private_files.sources().iter());
|
||||
|
||||
match PathMatcher::new(exclude_patterns, path_style) {
|
||||
Ok(matcher) => matcher,
|
||||
@@ -790,6 +778,8 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -223,6 +223,8 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -163,6 +163,8 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use action_log::ActionLog;
|
||||
use agent_client_protocol::{self as acp, ToolCallUpdateFields};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use gpui::{App, Entity, SharedString, Task, WeakEntity};
|
||||
use gpui::{App, Entity, SharedString, Task};
|
||||
use indoc::formatdoc;
|
||||
use language::Point;
|
||||
use language_model::{LanguageModelImage, LanguageModelToolResultContent};
|
||||
@@ -12,14 +12,11 @@ use settings::Settings;
|
||||
use std::sync::Arc;
|
||||
use util::markdown::MarkdownCodeBlock;
|
||||
|
||||
use crate::{AgentTool, Thread, ToolCallEventStream, outline};
|
||||
use crate::{AgentTool, ToolCallEventStream, outline};
|
||||
|
||||
/// Reads the content of the given file in the project.
|
||||
///
|
||||
/// - Never attempt to read a path that hasn't been previously mentioned.
|
||||
/// - For large files, this tool returns a file outline with symbol names and line numbers instead of the full content.
|
||||
/// This outline IS a successful response - use the line numbers to read specific sections with start_line/end_line.
|
||||
/// Do NOT retry reading the same file without line numbers if you receive an outline.
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct ReadFileToolInput {
|
||||
/// The relative path of the file to read.
|
||||
@@ -45,19 +42,13 @@ pub struct ReadFileToolInput {
|
||||
}
|
||||
|
||||
pub struct ReadFileTool {
|
||||
thread: WeakEntity<Thread>,
|
||||
project: Entity<Project>,
|
||||
action_log: Entity<ActionLog>,
|
||||
}
|
||||
|
||||
impl ReadFileTool {
|
||||
pub fn new(
|
||||
thread: WeakEntity<Thread>,
|
||||
project: Entity<Project>,
|
||||
action_log: Entity<ActionLog>,
|
||||
) -> Self {
|
||||
pub fn new(project: Entity<Project>, action_log: Entity<ActionLog>) -> Self {
|
||||
Self {
|
||||
thread,
|
||||
project,
|
||||
action_log,
|
||||
}
|
||||
@@ -152,12 +143,15 @@ impl AgentTool for ReadFileTool {
|
||||
}
|
||||
|
||||
let file_path = input.path.clone();
|
||||
let mut location = acp::ToolCallLocation::new(&abs_path);
|
||||
if let Some(line) = input.start_line {
|
||||
location = location.line(line.saturating_sub(1));
|
||||
}
|
||||
|
||||
event_stream.update_fields(ToolCallUpdateFields::new().locations(vec![location]));
|
||||
event_stream.update_fields(ToolCallUpdateFields {
|
||||
locations: Some(vec![acp::ToolCallLocation {
|
||||
path: abs_path.clone(),
|
||||
line: input.start_line.map(|line| line.saturating_sub(1)),
|
||||
meta: None,
|
||||
}]),
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
if image_store::is_image_file(&self.project, &project_path, cx) {
|
||||
return cx.spawn(async move |cx| {
|
||||
@@ -201,17 +195,6 @@ impl AgentTool for ReadFileTool {
|
||||
anyhow::bail!("{file_path} not found");
|
||||
}
|
||||
|
||||
// Record the file read time and mtime
|
||||
if let Some(mtime) = buffer.read_with(cx, |buffer, _| {
|
||||
buffer.file().and_then(|file| file.disk_state().mtime())
|
||||
})? {
|
||||
self.thread
|
||||
.update(cx, |thread, _| {
|
||||
thread.file_read_times.insert(abs_path.to_path_buf(), mtime);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
|
||||
let mut anchor = None;
|
||||
|
||||
// Check if specific line ranges are provided
|
||||
@@ -254,15 +237,16 @@ impl AgentTool for ReadFileTool {
|
||||
|
||||
if buffer_content.is_outline {
|
||||
Ok(formatdoc! {"
|
||||
SUCCESS: File outline retrieved. This file is too large to read all at once, so the outline below shows the file's structure with line numbers.
|
||||
|
||||
IMPORTANT: Do NOT retry this call without line numbers - you will get the same outline.
|
||||
Instead, use the line numbers below to read specific sections by calling this tool again with start_line and end_line parameters.
|
||||
This file was too big to read all at once.
|
||||
|
||||
{}
|
||||
|
||||
NEXT STEPS: To read a specific symbol's implementation, call read_file with the same path plus start_line and end_line from the outline above.
|
||||
For example, to read a function shown as [L100-150], use start_line: 100 and end_line: 150.", buffer_content.text
|
||||
Using the line numbers in this outline, you can call this tool again
|
||||
while specifying the start_line and end_line fields to see the
|
||||
implementations of symbols in the outline.
|
||||
|
||||
Alternatively, you can fall back to the `grep` tool (if available)
|
||||
to search the file for specific content.", buffer_content.text
|
||||
}
|
||||
.into())
|
||||
} else {
|
||||
@@ -274,9 +258,7 @@ impl AgentTool for ReadFileTool {
|
||||
project.set_agent_location(
|
||||
Some(AgentLocation {
|
||||
buffer: buffer.downgrade(),
|
||||
position: anchor.unwrap_or_else(|| {
|
||||
text::Anchor::min_for_buffer(buffer.read(cx).remote_id())
|
||||
}),
|
||||
position: anchor.unwrap_or(text::Anchor::MIN),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
@@ -286,9 +268,12 @@ impl AgentTool for ReadFileTool {
|
||||
text,
|
||||
}
|
||||
.to_string();
|
||||
event_stream.update_fields(ToolCallUpdateFields::new().content(vec![
|
||||
acp::ToolCallContent::Content(acp::Content::new(markdown)),
|
||||
]));
|
||||
event_stream.update_fields(ToolCallUpdateFields {
|
||||
content: Some(vec![acp::ToolCallContent::Content {
|
||||
content: markdown.into(),
|
||||
}]),
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
})?;
|
||||
|
||||
@@ -300,15 +285,11 @@ impl AgentTool for ReadFileTool {
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use crate::{ContextServerRegistry, Templates, Thread};
|
||||
use gpui::{AppContext, TestAppContext, UpdateGlobal as _};
|
||||
use language::{Language, LanguageConfig, LanguageMatcher, tree_sitter_rust};
|
||||
use language_model::fake_provider::FakeLanguageModel;
|
||||
use project::{FakeFs, Project};
|
||||
use prompt_store::ProjectContext;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::sync::Arc;
|
||||
use util::path;
|
||||
|
||||
#[gpui::test]
|
||||
@@ -319,20 +300,7 @@ mod test {
|
||||
fs.insert_tree(path!("/root"), json!({})).await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
|
||||
let tool = Arc::new(ReadFileTool::new(project, action_log));
|
||||
let (event_stream, _) = ToolCallEventStream::test();
|
||||
|
||||
let result = cx
|
||||
@@ -365,20 +333,7 @@ mod test {
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
|
||||
let tool = Arc::new(ReadFileTool::new(project, action_log));
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = ReadFileToolInput {
|
||||
@@ -408,20 +363,7 @@ mod test {
|
||||
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
|
||||
language_registry.add(Arc::new(rust_lang()));
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
|
||||
let tool = Arc::new(ReadFileTool::new(project, action_log));
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = ReadFileToolInput {
|
||||
@@ -436,7 +378,7 @@ mod test {
|
||||
let content = result.to_str().unwrap();
|
||||
|
||||
assert_eq!(
|
||||
content.lines().skip(7).take(6).collect::<Vec<_>>(),
|
||||
content.lines().skip(4).take(6).collect::<Vec<_>>(),
|
||||
vec![
|
||||
"struct Test0 [L1-4]",
|
||||
" a [L2]",
|
||||
@@ -471,7 +413,7 @@ mod test {
|
||||
pretty_assertions::assert_eq!(
|
||||
content
|
||||
.lines()
|
||||
.skip(7)
|
||||
.skip(4)
|
||||
.take(expected_content.len())
|
||||
.collect::<Vec<_>>(),
|
||||
expected_content
|
||||
@@ -493,20 +435,7 @@ mod test {
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
|
||||
let tool = Arc::new(ReadFileTool::new(project, action_log));
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = ReadFileToolInput {
|
||||
@@ -534,20 +463,7 @@ mod test {
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
|
||||
let tool = Arc::new(ReadFileTool::new(project, action_log));
|
||||
|
||||
// start_line of 0 should be treated as 1
|
||||
let result = cx
|
||||
@@ -593,6 +509,8 @@ mod test {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -691,20 +609,7 @@ mod test {
|
||||
|
||||
let project = Project::test(fs.clone(), [path!("/project_root").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
|
||||
let tool = Arc::new(ReadFileTool::new(project, action_log));
|
||||
|
||||
// Reading a file outside the project worktree should fail
|
||||
let result = cx
|
||||
@@ -918,24 +823,7 @@ mod test {
|
||||
.await;
|
||||
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let tool = Arc::new(ReadFileTool::new(
|
||||
thread.downgrade(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
));
|
||||
let tool = Arc::new(ReadFileTool::new(project.clone(), action_log.clone()));
|
||||
|
||||
// Test reading allowed files in worktree1
|
||||
let result = cx
|
||||
|
||||
@@ -112,9 +112,10 @@ impl AgentTool for TerminalTool {
|
||||
.await?;
|
||||
|
||||
let terminal_id = terminal.id(cx)?;
|
||||
event_stream.update_fields(acp::ToolCallUpdateFields::new().content(vec![
|
||||
acp::ToolCallContent::Terminal(acp::Terminal::new(terminal_id)),
|
||||
]));
|
||||
event_stream.update_fields(acp::ToolCallUpdateFields {
|
||||
content: Some(vec![acp::ToolCallContent::Terminal { terminal_id }]),
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
let exit_status = terminal.wait_for_exit(cx)?.await;
|
||||
let output = terminal.current_output(cx)?;
|
||||
|
||||
@@ -43,8 +43,10 @@ impl AgentTool for ThinkingTool {
|
||||
event_stream: ToolCallEventStream,
|
||||
_cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
event_stream
|
||||
.update_fields(acp::ToolCallUpdateFields::new().content(vec![input.content.into()]));
|
||||
event_stream.update_fields(acp::ToolCallUpdateFields {
|
||||
content: Some(vec![input.content.into()]),
|
||||
..Default::default()
|
||||
});
|
||||
Task::ready(Ok("Finished thinking.".to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -76,8 +76,10 @@ impl AgentTool for WebSearchTool {
|
||||
let response = match search_task.await {
|
||||
Ok(response) => response,
|
||||
Err(err) => {
|
||||
event_stream
|
||||
.update_fields(acp::ToolCallUpdateFields::new().title("Web Search Failed"));
|
||||
event_stream.update_fields(acp::ToolCallUpdateFields {
|
||||
title: Some("Web Search Failed".to_string()),
|
||||
..Default::default()
|
||||
});
|
||||
return Err(err);
|
||||
}
|
||||
};
|
||||
@@ -105,23 +107,26 @@ fn emit_update(response: &WebSearchResponse, event_stream: &ToolCallEventStream)
|
||||
} else {
|
||||
format!("{} results", response.results.len())
|
||||
};
|
||||
event_stream.update_fields(
|
||||
acp::ToolCallUpdateFields::new()
|
||||
.title(format!("Searched the web: {result_text}"))
|
||||
.content(
|
||||
response
|
||||
.results
|
||||
.iter()
|
||||
.map(|result| {
|
||||
acp::ToolCallContent::Content(acp::Content::new(
|
||||
acp::ContentBlock::ResourceLink(
|
||||
acp::ResourceLink::new(result.title.clone(), result.url.clone())
|
||||
.title(result.title.clone())
|
||||
.description(result.text.clone()),
|
||||
),
|
||||
))
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
);
|
||||
event_stream.update_fields(acp::ToolCallUpdateFields {
|
||||
title: Some(format!("Searched the web: {result_text}")),
|
||||
content: Some(
|
||||
response
|
||||
.results
|
||||
.iter()
|
||||
.map(|result| acp::ToolCallContent::Content {
|
||||
content: acp::ContentBlock::ResourceLink(acp::ResourceLink {
|
||||
name: result.title.clone(),
|
||||
uri: result.url.clone(),
|
||||
title: Some(result.title.clone()),
|
||||
description: Some(result.text.clone()),
|
||||
mime_type: None,
|
||||
annotations: None,
|
||||
size: None,
|
||||
meta: None,
|
||||
}),
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
..Default::default()
|
||||
});
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ acp_tools.workspace = true
|
||||
acp_thread.workspace = true
|
||||
action_log.workspace = true
|
||||
agent-client-protocol.workspace = true
|
||||
agent_settings.workspace = true
|
||||
anyhow.workspace = true
|
||||
async-trait.workspace = true
|
||||
client.workspace = true
|
||||
@@ -32,6 +33,7 @@ gpui.workspace = true
|
||||
gpui_tokio = { workspace = true, optional = true }
|
||||
http_client.workspace = true
|
||||
indoc.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
language_models.workspace = true
|
||||
log.workspace = true
|
||||
|
||||
@@ -29,13 +29,11 @@ pub struct UnsupportedVersion;
|
||||
|
||||
pub struct AcpConnection {
|
||||
server_name: SharedString,
|
||||
telemetry_id: &'static str,
|
||||
connection: Rc<acp::ClientSideConnection>,
|
||||
sessions: Rc<RefCell<HashMap<acp::SessionId, AcpSession>>>,
|
||||
auth_methods: Vec<acp::AuthMethod>,
|
||||
agent_capabilities: acp::AgentCapabilities,
|
||||
default_mode: Option<acp::SessionModeId>,
|
||||
default_model: Option<acp::ModelId>,
|
||||
root_dir: PathBuf,
|
||||
// NB: Don't move this into the wait_task, since we need to ensure the process is
|
||||
// killed on drop (setting kill_on_drop on the command seems to not always work).
|
||||
@@ -54,21 +52,17 @@ pub struct AcpSession {
|
||||
|
||||
pub async fn connect(
|
||||
server_name: SharedString,
|
||||
telemetry_id: &'static str,
|
||||
command: AgentServerCommand,
|
||||
root_dir: &Path,
|
||||
default_mode: Option<acp::SessionModeId>,
|
||||
default_model: Option<acp::ModelId>,
|
||||
is_remote: bool,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<Rc<dyn AgentConnection>> {
|
||||
let conn = AcpConnection::stdio(
|
||||
server_name,
|
||||
telemetry_id,
|
||||
command.clone(),
|
||||
root_dir,
|
||||
default_mode,
|
||||
default_model,
|
||||
is_remote,
|
||||
cx,
|
||||
)
|
||||
@@ -76,16 +70,14 @@ pub async fn connect(
|
||||
Ok(Rc::new(conn) as _)
|
||||
}
|
||||
|
||||
const MINIMUM_SUPPORTED_VERSION: acp::ProtocolVersion = acp::ProtocolVersion::V1;
|
||||
const MINIMUM_SUPPORTED_VERSION: acp::ProtocolVersion = acp::V1;
|
||||
|
||||
impl AcpConnection {
|
||||
pub async fn stdio(
|
||||
server_name: SharedString,
|
||||
telemetry_id: &'static str,
|
||||
command: AgentServerCommand,
|
||||
root_dir: &Path,
|
||||
default_mode: Option<acp::SessionModeId>,
|
||||
default_model: Option<acp::ModelId>,
|
||||
is_remote: bool,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<Self> {
|
||||
@@ -140,7 +132,7 @@ impl AcpConnection {
|
||||
while let Ok(n) = stderr.read_line(&mut line).await
|
||||
&& n > 0
|
||||
{
|
||||
log::warn!("agent stderr: {}", line.trim());
|
||||
log::warn!("agent stderr: {}", &line);
|
||||
line.clear();
|
||||
}
|
||||
Ok(())
|
||||
@@ -173,27 +165,29 @@ impl AcpConnection {
|
||||
});
|
||||
})?;
|
||||
|
||||
let mut client_info = acp::Implementation::new("zed", version);
|
||||
if let Some(release_channel) = release_channel {
|
||||
client_info = client_info.title(release_channel);
|
||||
}
|
||||
let response = connection
|
||||
.initialize(
|
||||
acp::InitializeRequest::new(acp::ProtocolVersion::V1)
|
||||
.client_capabilities(
|
||||
acp::ClientCapabilities::new()
|
||||
.fs(acp::FileSystemCapability::new()
|
||||
.read_text_file(true)
|
||||
.write_text_file(true))
|
||||
.terminal(true)
|
||||
// Experimental: Allow for rendering terminal output from the agents
|
||||
.meta(acp::Meta::from_iter([
|
||||
("terminal_output".into(), true.into()),
|
||||
("terminal-auth".into(), true.into()),
|
||||
])),
|
||||
)
|
||||
.client_info(client_info),
|
||||
)
|
||||
.initialize(acp::InitializeRequest {
|
||||
protocol_version: acp::VERSION,
|
||||
client_capabilities: acp::ClientCapabilities {
|
||||
fs: acp::FileSystemCapability {
|
||||
read_text_file: true,
|
||||
write_text_file: true,
|
||||
meta: None,
|
||||
},
|
||||
terminal: true,
|
||||
meta: Some(serde_json::json!({
|
||||
// Experimental: Allow for rendering terminal output from the agents
|
||||
"terminal_output": true,
|
||||
"terminal-auth": true,
|
||||
})),
|
||||
},
|
||||
client_info: Some(acp::Implementation {
|
||||
name: "zed".to_owned(),
|
||||
title: release_channel.map(|c| c.to_owned()),
|
||||
version,
|
||||
}),
|
||||
meta: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
if response.protocol_version < MINIMUM_SUPPORTED_VERSION {
|
||||
@@ -205,11 +199,9 @@ impl AcpConnection {
|
||||
root_dir: root_dir.to_owned(),
|
||||
connection,
|
||||
server_name,
|
||||
telemetry_id,
|
||||
sessions,
|
||||
agent_capabilities: response.agent_capabilities,
|
||||
default_mode,
|
||||
default_model,
|
||||
_io_task: io_task,
|
||||
_wait_task: wait_task,
|
||||
_stderr_task: stderr_task,
|
||||
@@ -234,10 +226,6 @@ impl Drop for AcpConnection {
|
||||
}
|
||||
|
||||
impl AgentConnection for AcpConnection {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
self.telemetry_id
|
||||
}
|
||||
|
||||
fn new_thread(
|
||||
self: Rc<Self>,
|
||||
project: Entity<Project>,
|
||||
@@ -248,7 +236,6 @@ impl AgentConnection for AcpConnection {
|
||||
let conn = self.connection.clone();
|
||||
let sessions = self.sessions.clone();
|
||||
let default_mode = self.default_mode.clone();
|
||||
let default_model = self.default_model.clone();
|
||||
let cwd = cwd.to_path_buf();
|
||||
let context_server_store = project.read(cx).context_server_store().read(cx);
|
||||
let mcp_servers = if project.read(cx).is_local() {
|
||||
@@ -257,37 +244,23 @@ impl AgentConnection for AcpConnection {
|
||||
.iter()
|
||||
.filter_map(|id| {
|
||||
let configuration = context_server_store.configuration_for_server(id)?;
|
||||
match &*configuration {
|
||||
project::context_server_store::ContextServerConfiguration::Custom {
|
||||
command,
|
||||
..
|
||||
}
|
||||
| project::context_server_store::ContextServerConfiguration::Extension {
|
||||
command,
|
||||
..
|
||||
} => Some(acp::McpServer::Stdio(
|
||||
acp::McpServerStdio::new(id.0.to_string(), &command.path)
|
||||
.args(command.args.clone())
|
||||
.env(if let Some(env) = command.env.as_ref() {
|
||||
env.iter()
|
||||
.map(|(name, value)| acp::EnvVariable::new(name, value))
|
||||
.collect()
|
||||
} else {
|
||||
vec![]
|
||||
}),
|
||||
)),
|
||||
project::context_server_store::ContextServerConfiguration::Http {
|
||||
url,
|
||||
headers,
|
||||
} => Some(acp::McpServer::Http(
|
||||
acp::McpServerHttp::new(id.0.to_string(), url.to_string()).headers(
|
||||
headers
|
||||
.iter()
|
||||
.map(|(name, value)| acp::HttpHeader::new(name, value))
|
||||
.collect(),
|
||||
),
|
||||
)),
|
||||
}
|
||||
let command = configuration.command();
|
||||
Some(acp::McpServer::Stdio {
|
||||
name: id.0.to_string(),
|
||||
command: command.path.clone(),
|
||||
args: command.args.clone(),
|
||||
env: if let Some(env) = command.env.as_ref() {
|
||||
env.iter()
|
||||
.map(|(name, value)| acp::EnvVariable {
|
||||
name: name.clone(),
|
||||
value: value.clone(),
|
||||
meta: None,
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
vec![]
|
||||
},
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
@@ -299,7 +272,7 @@ impl AgentConnection for AcpConnection {
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let response = conn
|
||||
.new_session(acp::NewSessionRequest::new(cwd).mcp_servers(mcp_servers))
|
||||
.new_session(acp::NewSessionRequest { mcp_servers, cwd, meta: None })
|
||||
.await
|
||||
.map_err(|err| {
|
||||
if err.code == acp::ErrorCode::AUTH_REQUIRED.code {
|
||||
@@ -330,9 +303,12 @@ impl AgentConnection for AcpConnection {
|
||||
let default_mode = default_mode.clone();
|
||||
let session_id = response.session_id.clone();
|
||||
let modes = modes.clone();
|
||||
let conn = conn.clone();
|
||||
async move |_| {
|
||||
let result = conn.set_session_mode(acp::SetSessionModeRequest::new(session_id, default_mode))
|
||||
let result = conn.set_session_mode(acp::SetSessionModeRequest {
|
||||
session_id,
|
||||
mode_id: default_mode,
|
||||
meta: None,
|
||||
})
|
||||
.await.log_err();
|
||||
|
||||
if result.is_none() {
|
||||
@@ -361,49 +337,6 @@ impl AgentConnection for AcpConnection {
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(default_model) = default_model {
|
||||
if let Some(models) = models.as_ref() {
|
||||
let mut models_ref = models.borrow_mut();
|
||||
let has_model = models_ref.available_models.iter().any(|model| model.model_id == default_model);
|
||||
|
||||
if has_model {
|
||||
let initial_model_id = models_ref.current_model_id.clone();
|
||||
|
||||
cx.spawn({
|
||||
let default_model = default_model.clone();
|
||||
let session_id = response.session_id.clone();
|
||||
let models = models.clone();
|
||||
let conn = conn.clone();
|
||||
async move |_| {
|
||||
let result = conn.set_session_model(acp::SetSessionModelRequest::new(session_id, default_model))
|
||||
.await.log_err();
|
||||
|
||||
if result.is_none() {
|
||||
models.borrow_mut().current_model_id = initial_model_id;
|
||||
}
|
||||
}
|
||||
}).detach();
|
||||
|
||||
models_ref.current_model_id = default_model;
|
||||
} else {
|
||||
let available_models = models_ref
|
||||
.available_models
|
||||
.iter()
|
||||
.map(|model| format!("- `{}`: {}", model.model_id, model.name))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
log::warn!(
|
||||
"`{default_model}` is not a valid {name} model. Available options:\n{available_models}",
|
||||
);
|
||||
}
|
||||
} else {
|
||||
log::warn!(
|
||||
"`{name}` does not support model selection, but `default_model` was set in settings.",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let session_id = response.session_id;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()))?;
|
||||
let thread = cx.new(|cx| {
|
||||
@@ -439,8 +372,12 @@ impl AgentConnection for AcpConnection {
|
||||
fn authenticate(&self, method_id: acp::AuthMethodId, cx: &mut App) -> Task<Result<()>> {
|
||||
let conn = self.connection.clone();
|
||||
cx.foreground_executor().spawn(async move {
|
||||
conn.authenticate(acp::AuthenticateRequest::new(method_id))
|
||||
.await?;
|
||||
conn.authenticate(acp::AuthenticateRequest {
|
||||
method_id: method_id.clone(),
|
||||
meta: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
@@ -494,7 +431,10 @@ impl AgentConnection for AcpConnection {
|
||||
&& (details.contains("This operation was aborted")
|
||||
|| details.contains("The user aborted a request"))
|
||||
{
|
||||
Ok(acp::PromptResponse::new(acp::StopReason::Cancelled))
|
||||
Ok(acp::PromptResponse {
|
||||
stop_reason: acp::StopReason::Cancelled,
|
||||
meta: None,
|
||||
})
|
||||
} else {
|
||||
Err(anyhow!(details))
|
||||
}
|
||||
@@ -511,7 +451,10 @@ impl AgentConnection for AcpConnection {
|
||||
session.suppress_abort_err = true;
|
||||
}
|
||||
let conn = self.connection.clone();
|
||||
let params = acp::CancelNotification::new(session_id.clone());
|
||||
let params = acp::CancelNotification {
|
||||
session_id: session_id.clone(),
|
||||
meta: None,
|
||||
};
|
||||
cx.foreground_executor()
|
||||
.spawn(async move { conn.cancel(params).await })
|
||||
.detach();
|
||||
@@ -592,7 +535,11 @@ impl acp_thread::AgentSessionModes for AcpSessionModes {
|
||||
let state = self.state.clone();
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let result = connection
|
||||
.set_session_mode(acp::SetSessionModeRequest::new(session_id, mode_id))
|
||||
.set_session_mode(acp::SetSessionModeRequest {
|
||||
session_id,
|
||||
mode_id,
|
||||
meta: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
if result.is_err() {
|
||||
@@ -651,7 +598,11 @@ impl acp_thread::AgentModelSelector for AcpModelSelector {
|
||||
let state = self.state.clone();
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let result = connection
|
||||
.set_session_model(acp::SetSessionModelRequest::new(session_id, model_id))
|
||||
.set_session_model(acp::SetSessionModelRequest {
|
||||
session_id,
|
||||
model_id,
|
||||
meta: None,
|
||||
})
|
||||
.await;
|
||||
|
||||
if result.is_err() {
|
||||
@@ -713,7 +664,10 @@ impl acp::Client for ClientDelegate {
|
||||
|
||||
let outcome = task.await;
|
||||
|
||||
Ok(acp::RequestPermissionResponse::new(outcome))
|
||||
Ok(acp::RequestPermissionResponse {
|
||||
outcome,
|
||||
meta: None,
|
||||
})
|
||||
}
|
||||
|
||||
async fn write_text_file(
|
||||
@@ -745,7 +699,10 @@ impl acp::Client for ClientDelegate {
|
||||
|
||||
let content = task.await?;
|
||||
|
||||
Ok(acp::ReadTextFileResponse::new(content))
|
||||
Ok(acp::ReadTextFileResponse {
|
||||
content,
|
||||
meta: None,
|
||||
})
|
||||
}
|
||||
|
||||
async fn session_notification(
|
||||
@@ -780,7 +737,7 @@ impl acp::Client for ClientDelegate {
|
||||
if let Some(terminal_info) = meta.get("terminal_info") {
|
||||
if let Some(id_str) = terminal_info.get("terminal_id").and_then(|v| v.as_str())
|
||||
{
|
||||
let terminal_id = acp::TerminalId::new(id_str);
|
||||
let terminal_id = acp::TerminalId(id_str.into());
|
||||
let cwd = terminal_info
|
||||
.get("cwd")
|
||||
.and_then(|v| v.as_str().map(PathBuf::from));
|
||||
@@ -796,7 +753,7 @@ impl acp::Client for ClientDelegate {
|
||||
let lower = cx.new(|cx| builder.subscribe(cx));
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Created {
|
||||
terminal_id,
|
||||
terminal_id: terminal_id.clone(),
|
||||
label: tc.title.clone(),
|
||||
cwd,
|
||||
output_byte_limit: None,
|
||||
@@ -821,12 +778,15 @@ impl acp::Client for ClientDelegate {
|
||||
if let Some(meta) = &tcu.meta {
|
||||
if let Some(term_out) = meta.get("terminal_output") {
|
||||
if let Some(id_str) = term_out.get("terminal_id").and_then(|v| v.as_str()) {
|
||||
let terminal_id = acp::TerminalId::new(id_str);
|
||||
let terminal_id = acp::TerminalId(id_str.into());
|
||||
if let Some(s) = term_out.get("data").and_then(|v| v.as_str()) {
|
||||
let data = s.as_bytes().to_vec();
|
||||
let _ = session.thread.update(&mut self.cx.clone(), |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Output { terminal_id, data },
|
||||
TerminalProviderEvent::Output {
|
||||
terminal_id: terminal_id.clone(),
|
||||
data,
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
@@ -837,19 +797,21 @@ impl acp::Client for ClientDelegate {
|
||||
// terminal_exit
|
||||
if let Some(term_exit) = meta.get("terminal_exit") {
|
||||
if let Some(id_str) = term_exit.get("terminal_id").and_then(|v| v.as_str()) {
|
||||
let terminal_id = acp::TerminalId::new(id_str);
|
||||
let mut status = acp::TerminalExitStatus::new();
|
||||
if let Some(code) = term_exit.get("exit_code").and_then(|v| v.as_u64()) {
|
||||
status = status.exit_code(code as u32)
|
||||
}
|
||||
if let Some(signal) = term_exit.get("signal").and_then(|v| v.as_str()) {
|
||||
status = status.signal(signal);
|
||||
}
|
||||
|
||||
let terminal_id = acp::TerminalId(id_str.into());
|
||||
let status = acp::TerminalExitStatus {
|
||||
exit_code: term_exit
|
||||
.get("exit_code")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|i| i as u32),
|
||||
signal: term_exit
|
||||
.get("signal")
|
||||
.and_then(|v| v.as_str().map(|s| s.to_string())),
|
||||
meta: None,
|
||||
};
|
||||
let _ = session.thread.update(&mut self.cx.clone(), |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Exit {
|
||||
terminal_id,
|
||||
terminal_id: terminal_id.clone(),
|
||||
status,
|
||||
},
|
||||
cx,
|
||||
@@ -886,7 +848,7 @@ impl acp::Client for ClientDelegate {
|
||||
// Register with renderer
|
||||
let terminal_entity = thread.update(&mut self.cx.clone(), |thread, cx| {
|
||||
thread.register_terminal_created(
|
||||
acp::TerminalId::new(uuid::Uuid::new_v4().to_string()),
|
||||
acp::TerminalId(uuid::Uuid::new_v4().to_string().into()),
|
||||
format!("{} {}", args.command, args.args.join(" ")),
|
||||
args.cwd.clone(),
|
||||
args.output_byte_limit,
|
||||
@@ -896,7 +858,10 @@ impl acp::Client for ClientDelegate {
|
||||
})?;
|
||||
let terminal_id =
|
||||
terminal_entity.read_with(&self.cx, |terminal, _| terminal.id().clone())?;
|
||||
Ok(acp::CreateTerminalResponse::new(terminal_id))
|
||||
Ok(acp::CreateTerminalResponse {
|
||||
terminal_id,
|
||||
meta: None,
|
||||
})
|
||||
}
|
||||
|
||||
async fn kill_terminal_command(
|
||||
@@ -957,7 +922,10 @@ impl acp::Client for ClientDelegate {
|
||||
})??
|
||||
.await;
|
||||
|
||||
Ok(acp::WaitForTerminalExitResponse::new(exit_status))
|
||||
Ok(acp::WaitForTerminalExitResponse {
|
||||
exit_status,
|
||||
meta: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -68,18 +68,6 @@ pub trait AgentServer: Send {
|
||||
) {
|
||||
}
|
||||
|
||||
fn default_model(&self, _cx: &mut App) -> Option<agent_client_protocol::ModelId> {
|
||||
None
|
||||
}
|
||||
|
||||
fn set_default_model(
|
||||
&self,
|
||||
_model_id: Option<agent_client_protocol::ModelId>,
|
||||
_fs: Arc<dyn Fs>,
|
||||
_cx: &mut App,
|
||||
) {
|
||||
}
|
||||
|
||||
fn connect(
|
||||
&self,
|
||||
root_dir: Option<&Path>,
|
||||
|
||||
@@ -41,7 +41,7 @@ impl AgentServer for ClaudeCode {
|
||||
|
||||
settings
|
||||
.as_ref()
|
||||
.and_then(|s| s.default_mode.clone().map(acp::SessionModeId::new))
|
||||
.and_then(|s| s.default_mode.clone().map(|m| acp::SessionModeId(m.into())))
|
||||
}
|
||||
|
||||
fn set_default_mode(&self, mode_id: Option<acp::SessionModeId>, fs: Arc<dyn Fs>, cx: &mut App) {
|
||||
@@ -55,27 +55,6 @@ impl AgentServer for ClaudeCode {
|
||||
});
|
||||
}
|
||||
|
||||
fn default_model(&self, cx: &mut App) -> Option<acp::ModelId> {
|
||||
let settings = cx.read_global(|settings: &SettingsStore, _| {
|
||||
settings.get::<AllAgentServersSettings>(None).claude.clone()
|
||||
});
|
||||
|
||||
settings
|
||||
.as_ref()
|
||||
.and_then(|s| s.default_model.clone().map(acp::ModelId::new))
|
||||
}
|
||||
|
||||
fn set_default_model(&self, model_id: Option<acp::ModelId>, fs: Arc<dyn Fs>, cx: &mut App) {
|
||||
update_settings_file(fs, cx, |settings, _| {
|
||||
settings
|
||||
.agent_servers
|
||||
.get_or_insert_default()
|
||||
.claude
|
||||
.get_or_insert_default()
|
||||
.default_model = model_id.map(|m| m.to_string())
|
||||
});
|
||||
}
|
||||
|
||||
fn connect(
|
||||
&self,
|
||||
root_dir: Option<&Path>,
|
||||
@@ -83,13 +62,11 @@ impl AgentServer for ClaudeCode {
|
||||
cx: &mut App,
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let telemetry_id = self.telemetry_id();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let store = delegate.store.downgrade();
|
||||
let extra_env = load_proxy_env(cx);
|
||||
let default_mode = self.default_mode(cx);
|
||||
let default_model = self.default_model(cx);
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let (command, root_dir, login) = store
|
||||
@@ -108,11 +85,9 @@ impl AgentServer for ClaudeCode {
|
||||
.await?;
|
||||
let connection = crate::acp::connect(
|
||||
name,
|
||||
telemetry_id,
|
||||
command,
|
||||
root_dir.as_ref(),
|
||||
default_mode,
|
||||
default_model,
|
||||
is_remote,
|
||||
cx,
|
||||
)
|
||||
|
||||
@@ -42,7 +42,7 @@ impl AgentServer for Codex {
|
||||
|
||||
settings
|
||||
.as_ref()
|
||||
.and_then(|s| s.default_mode.clone().map(acp::SessionModeId::new))
|
||||
.and_then(|s| s.default_mode.clone().map(|m| acp::SessionModeId(m.into())))
|
||||
}
|
||||
|
||||
fn set_default_mode(&self, mode_id: Option<acp::SessionModeId>, fs: Arc<dyn Fs>, cx: &mut App) {
|
||||
@@ -56,27 +56,6 @@ impl AgentServer for Codex {
|
||||
});
|
||||
}
|
||||
|
||||
fn default_model(&self, cx: &mut App) -> Option<acp::ModelId> {
|
||||
let settings = cx.read_global(|settings: &SettingsStore, _| {
|
||||
settings.get::<AllAgentServersSettings>(None).codex.clone()
|
||||
});
|
||||
|
||||
settings
|
||||
.as_ref()
|
||||
.and_then(|s| s.default_model.clone().map(acp::ModelId::new))
|
||||
}
|
||||
|
||||
fn set_default_model(&self, model_id: Option<acp::ModelId>, fs: Arc<dyn Fs>, cx: &mut App) {
|
||||
update_settings_file(fs, cx, |settings, _| {
|
||||
settings
|
||||
.agent_servers
|
||||
.get_or_insert_default()
|
||||
.codex
|
||||
.get_or_insert_default()
|
||||
.default_model = model_id.map(|m| m.to_string())
|
||||
});
|
||||
}
|
||||
|
||||
fn connect(
|
||||
&self,
|
||||
root_dir: Option<&Path>,
|
||||
@@ -84,13 +63,11 @@ impl AgentServer for Codex {
|
||||
cx: &mut App,
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let telemetry_id = self.telemetry_id();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let store = delegate.store.downgrade();
|
||||
let extra_env = load_proxy_env(cx);
|
||||
let default_mode = self.default_mode(cx);
|
||||
let default_model = self.default_model(cx);
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let (command, root_dir, login) = store
|
||||
@@ -110,11 +87,9 @@ impl AgentServer for Codex {
|
||||
|
||||
let connection = crate::acp::connect(
|
||||
name,
|
||||
telemetry_id,
|
||||
command,
|
||||
root_dir.as_ref(),
|
||||
default_mode,
|
||||
default_model,
|
||||
is_remote,
|
||||
cx,
|
||||
)
|
||||
|
||||
@@ -44,63 +44,19 @@ impl crate::AgentServer for CustomAgentServer {
|
||||
|
||||
settings
|
||||
.as_ref()
|
||||
.and_then(|s| s.default_mode().map(acp::SessionModeId::new))
|
||||
.and_then(|s| s.default_mode.clone().map(|m| acp::SessionModeId(m.into())))
|
||||
}
|
||||
|
||||
fn set_default_mode(&self, mode_id: Option<acp::SessionModeId>, fs: Arc<dyn Fs>, cx: &mut App) {
|
||||
let name = self.name();
|
||||
update_settings_file(fs, cx, move |settings, _| {
|
||||
let settings = settings
|
||||
if let Some(settings) = settings
|
||||
.agent_servers
|
||||
.get_or_insert_default()
|
||||
.custom
|
||||
.entry(name.clone())
|
||||
.or_insert_with(|| settings::CustomAgentServerSettings::Extension {
|
||||
default_model: None,
|
||||
default_mode: None,
|
||||
});
|
||||
|
||||
match settings {
|
||||
settings::CustomAgentServerSettings::Custom { default_mode, .. }
|
||||
| settings::CustomAgentServerSettings::Extension { default_mode, .. } => {
|
||||
*default_mode = mode_id.map(|m| m.to_string());
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn default_model(&self, cx: &mut App) -> Option<acp::ModelId> {
|
||||
let settings = cx.read_global(|settings: &SettingsStore, _| {
|
||||
settings
|
||||
.get::<AllAgentServersSettings>(None)
|
||||
.custom
|
||||
.get(&self.name())
|
||||
.cloned()
|
||||
});
|
||||
|
||||
settings
|
||||
.as_ref()
|
||||
.and_then(|s| s.default_model().map(acp::ModelId::new))
|
||||
}
|
||||
|
||||
fn set_default_model(&self, model_id: Option<acp::ModelId>, fs: Arc<dyn Fs>, cx: &mut App) {
|
||||
let name = self.name();
|
||||
update_settings_file(fs, cx, move |settings, _| {
|
||||
let settings = settings
|
||||
.agent_servers
|
||||
.get_or_insert_default()
|
||||
.custom
|
||||
.entry(name.clone())
|
||||
.or_insert_with(|| settings::CustomAgentServerSettings::Extension {
|
||||
default_model: None,
|
||||
default_mode: None,
|
||||
});
|
||||
|
||||
match settings {
|
||||
settings::CustomAgentServerSettings::Custom { default_model, .. }
|
||||
| settings::CustomAgentServerSettings::Extension { default_model, .. } => {
|
||||
*default_model = model_id.map(|m| m.to_string());
|
||||
}
|
||||
.get_mut(&name)
|
||||
{
|
||||
settings.default_mode = mode_id.map(|m| m.to_string())
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -112,11 +68,9 @@ impl crate::AgentServer for CustomAgentServer {
|
||||
cx: &mut App,
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let telemetry_id = self.telemetry_id();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let default_mode = self.default_mode(cx);
|
||||
let default_model = self.default_model(cx);
|
||||
let store = delegate.store.downgrade();
|
||||
let extra_env = load_proxy_env(cx);
|
||||
|
||||
@@ -139,11 +93,9 @@ impl crate::AgentServer for CustomAgentServer {
|
||||
.await?;
|
||||
let connection = crate::acp::connect(
|
||||
name,
|
||||
telemetry_id,
|
||||
command,
|
||||
root_dir.as_ref(),
|
||||
default_mode,
|
||||
default_model,
|
||||
is_remote,
|
||||
cx,
|
||||
)
|
||||
|
||||
@@ -6,9 +6,7 @@ use gpui::{AppContext, Entity, TestAppContext};
|
||||
use indoc::indoc;
|
||||
#[cfg(test)]
|
||||
use project::agent_server_store::BuiltinAgentServerSettings;
|
||||
use project::{FakeFs, Project};
|
||||
#[cfg(test)]
|
||||
use settings::Settings;
|
||||
use project::{FakeFs, Project, agent_server_store::AllAgentServersSettings};
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
@@ -82,9 +80,26 @@ where
|
||||
.update(cx, |thread, cx| {
|
||||
thread.send(
|
||||
vec![
|
||||
"Read the file ".into(),
|
||||
acp::ContentBlock::ResourceLink(acp::ResourceLink::new("foo.rs", "foo.rs")),
|
||||
" and tell me what the content of the println! is".into(),
|
||||
acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Read the file ".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
acp::ContentBlock::ResourceLink(acp::ResourceLink {
|
||||
uri: "foo.rs".into(),
|
||||
name: "foo.rs".into(),
|
||||
annotations: None,
|
||||
description: None,
|
||||
mime_type: None,
|
||||
size: None,
|
||||
title: None,
|
||||
meta: None,
|
||||
}),
|
||||
acp::ContentBlock::Text(acp::TextContent {
|
||||
text: " and tell me what the content of the println! is".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
],
|
||||
cx,
|
||||
)
|
||||
@@ -412,7 +427,7 @@ macro_rules! common_e2e_tests {
|
||||
async fn tool_call_with_permission(cx: &mut ::gpui::TestAppContext) {
|
||||
$crate::e2e_tests::test_tool_call_with_permission(
|
||||
$server,
|
||||
::agent_client_protocol::PermissionOptionId::new($allow_option_id),
|
||||
::agent_client_protocol::PermissionOptionId($allow_option_id.into()),
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
@@ -437,29 +452,35 @@ pub use common_e2e_tests;
|
||||
// Helpers
|
||||
|
||||
pub async fn init_test(cx: &mut TestAppContext) -> Arc<FakeFs> {
|
||||
use settings::Settings;
|
||||
|
||||
env_logger::try_init().ok();
|
||||
|
||||
cx.update(|cx| {
|
||||
let settings_store = settings::SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
Project::init_settings(cx);
|
||||
language::init(cx);
|
||||
gpui_tokio::init(cx);
|
||||
let http_client = reqwest_client::ReqwestClient::user_agent("agent tests").unwrap();
|
||||
cx.set_http_client(Arc::new(http_client));
|
||||
client::init_settings(cx);
|
||||
let client = client::Client::production(cx);
|
||||
let user_store = cx.new(|cx| client::UserStore::new(client.clone(), cx));
|
||||
language_model::init(client.clone(), cx);
|
||||
language_models::init(user_store, client, cx);
|
||||
agent_settings::init(cx);
|
||||
AllAgentServersSettings::register(cx);
|
||||
|
||||
#[cfg(test)]
|
||||
project::agent_server_store::AllAgentServersSettings::override_global(
|
||||
project::agent_server_store::AllAgentServersSettings {
|
||||
AllAgentServersSettings::override_global(
|
||||
AllAgentServersSettings {
|
||||
claude: Some(BuiltinAgentServerSettings {
|
||||
path: Some("claude-code-acp".into()),
|
||||
args: None,
|
||||
env: None,
|
||||
ignore_system_version: None,
|
||||
default_mode: None,
|
||||
default_model: None,
|
||||
}),
|
||||
gemini: Some(crate::gemini::tests::local_command().into()),
|
||||
codex: Some(BuiltinAgentServerSettings {
|
||||
@@ -468,7 +489,6 @@ pub async fn init_test(cx: &mut TestAppContext) -> Arc<FakeFs> {
|
||||
env: None,
|
||||
ignore_system_version: None,
|
||||
default_mode: None,
|
||||
default_model: None,
|
||||
}),
|
||||
custom: collections::HashMap::default(),
|
||||
},
|
||||
|
||||
@@ -31,13 +31,11 @@ impl AgentServer for Gemini {
|
||||
cx: &mut App,
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let telemetry_id = self.telemetry_id();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let store = delegate.store.downgrade();
|
||||
let mut extra_env = load_proxy_env(cx);
|
||||
let default_mode = self.default_mode(cx);
|
||||
let default_model = self.default_model(cx);
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
extra_env.insert("SURFACE".to_owned(), "zed".to_owned());
|
||||
@@ -66,11 +64,9 @@ impl AgentServer for Gemini {
|
||||
|
||||
let connection = crate::acp::connect(
|
||||
name,
|
||||
telemetry_id,
|
||||
command,
|
||||
root_dir.as_ref(),
|
||||
default_mode,
|
||||
default_model,
|
||||
is_remote,
|
||||
cx,
|
||||
)
|
||||
|
||||
@@ -6,8 +6,8 @@ use convert_case::{Case, Casing as _};
|
||||
use fs::Fs;
|
||||
use gpui::{App, SharedString};
|
||||
use settings::{
|
||||
AgentProfileContent, ContextServerPresetContent, LanguageModelSelection, Settings as _,
|
||||
SettingsContent, update_settings_file,
|
||||
AgentProfileContent, ContextServerPresetContent, Settings as _, SettingsContent,
|
||||
update_settings_file,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
|
||||
@@ -53,30 +53,19 @@ impl AgentProfile {
|
||||
let base_profile =
|
||||
base_profile_id.and_then(|id| AgentSettings::get_global(cx).profiles.get(&id).cloned());
|
||||
|
||||
// Copy toggles from the base profile so the new profile starts with familiar defaults.
|
||||
let tools = base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.tools.clone())
|
||||
.unwrap_or_default();
|
||||
let enable_all_context_servers = base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.enable_all_context_servers)
|
||||
.unwrap_or_default();
|
||||
let context_servers = base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.context_servers.clone())
|
||||
.unwrap_or_default();
|
||||
// Preserve the base profile's model preference when cloning into a new profile.
|
||||
let default_model = base_profile
|
||||
.as_ref()
|
||||
.and_then(|profile| profile.default_model.clone());
|
||||
|
||||
let profile_settings = AgentProfileSettings {
|
||||
name: name.into(),
|
||||
tools,
|
||||
enable_all_context_servers,
|
||||
context_servers,
|
||||
default_model,
|
||||
tools: base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.tools.clone())
|
||||
.unwrap_or_default(),
|
||||
enable_all_context_servers: base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.enable_all_context_servers)
|
||||
.unwrap_or_default(),
|
||||
context_servers: base_profile
|
||||
.map(|profile| profile.context_servers)
|
||||
.unwrap_or_default(),
|
||||
};
|
||||
|
||||
update_settings_file(fs, cx, {
|
||||
@@ -107,8 +96,6 @@ pub struct AgentProfileSettings {
|
||||
pub tools: IndexMap<Arc<str>, bool>,
|
||||
pub enable_all_context_servers: bool,
|
||||
pub context_servers: IndexMap<Arc<str>, ContextServerPreset>,
|
||||
/// Default language model to apply when this profile becomes active.
|
||||
pub default_model: Option<LanguageModelSelection>,
|
||||
}
|
||||
|
||||
impl AgentProfileSettings {
|
||||
@@ -157,7 +144,6 @@ impl AgentProfileSettings {
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
default_model: self.default_model.clone(),
|
||||
},
|
||||
);
|
||||
|
||||
@@ -167,23 +153,15 @@ impl AgentProfileSettings {
|
||||
|
||||
impl From<AgentProfileContent> for AgentProfileSettings {
|
||||
fn from(content: AgentProfileContent) -> Self {
|
||||
let AgentProfileContent {
|
||||
name,
|
||||
tools,
|
||||
enable_all_context_servers,
|
||||
context_servers,
|
||||
default_model,
|
||||
} = content;
|
||||
|
||||
Self {
|
||||
name: name.into(),
|
||||
tools,
|
||||
enable_all_context_servers: enable_all_context_servers.unwrap_or_default(),
|
||||
context_servers: context_servers
|
||||
name: content.name.into(),
|
||||
tools: content.tools,
|
||||
enable_all_context_servers: content.enable_all_context_servers.unwrap_or_default(),
|
||||
context_servers: content
|
||||
.context_servers
|
||||
.into_iter()
|
||||
.map(|(server_id, preset)| (server_id, preset.into()))
|
||||
.collect(),
|
||||
default_model,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{
|
||||
DefaultAgentView, DockPosition, LanguageModelParameters, LanguageModelSelection,
|
||||
NotifyWhenAgentWaiting, RegisterSetting, Settings,
|
||||
NotifyWhenAgentWaiting, Settings,
|
||||
};
|
||||
|
||||
pub use crate::agent_profile::*;
|
||||
@@ -19,7 +19,11 @@ pub const SUMMARIZE_THREAD_PROMPT: &str = include_str!("prompts/summarize_thread
|
||||
pub const SUMMARIZE_THREAD_DETAILED_PROMPT: &str =
|
||||
include_str!("prompts/summarize_thread_detailed_prompt.txt");
|
||||
|
||||
#[derive(Clone, Debug, RegisterSetting)]
|
||||
pub fn init(cx: &mut App) {
|
||||
AgentSettings::register(cx);
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AgentSettings {
|
||||
pub enabled: bool,
|
||||
pub button: bool,
|
||||
|
||||
@@ -13,8 +13,7 @@ path = "src/agent_ui.rs"
|
||||
doctest = false
|
||||
|
||||
[features]
|
||||
test-support = ["gpui/test-support", "language/test-support", "reqwest_client"]
|
||||
unit-eval = []
|
||||
test-support = ["gpui/test-support", "language/test-support"]
|
||||
|
||||
[dependencies]
|
||||
acp_thread.workspace = true
|
||||
@@ -48,7 +47,6 @@ fs.workspace = true
|
||||
futures.workspace = true
|
||||
fuzzy.workspace = true
|
||||
gpui.workspace = true
|
||||
gpui_tokio.workspace = true
|
||||
html_to_markdown.workspace = true
|
||||
http_client.workspace = true
|
||||
indoc.workspace = true
|
||||
@@ -71,6 +69,7 @@ postage.workspace = true
|
||||
project.workspace = true
|
||||
prompt_store.workspace = true
|
||||
proto.workspace = true
|
||||
ref-cast.workspace = true
|
||||
release_channel.workspace = true
|
||||
rope.workspace = true
|
||||
rules_library.workspace = true
|
||||
@@ -94,23 +93,19 @@ time_format.workspace = true
|
||||
ui.workspace = true
|
||||
ui_input.workspace = true
|
||||
url.workspace = true
|
||||
urlencoding.workspace = true
|
||||
util.workspace = true
|
||||
watch.workspace = true
|
||||
workspace.workspace = true
|
||||
zed_actions.workspace = true
|
||||
image.workspace = true
|
||||
async-fs.workspace = true
|
||||
reqwest_client = { workspace = true, optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
acp_thread = { workspace = true, features = ["test-support"] }
|
||||
agent = { workspace = true, features = ["test-support"] }
|
||||
assistant_text_thread = { workspace = true, features = ["test-support"] }
|
||||
buffer_diff = { workspace = true, features = ["test-support"] }
|
||||
clock.workspace = true
|
||||
db = { workspace = true, features = ["test-support"] }
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
eval_utils.workspace = true
|
||||
gpui = { workspace = true, "features" = ["test-support"] }
|
||||
indoc.workspace = true
|
||||
language = { workspace = true, "features" = ["test-support"] }
|
||||
@@ -118,8 +113,6 @@ languages = { workspace = true, features = ["test-support"] }
|
||||
language_model = { workspace = true, "features" = ["test-support"] }
|
||||
pretty_assertions.workspace = true
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
semver.workspace = true
|
||||
rand.workspace = true
|
||||
reqwest_client.workspace = true
|
||||
tree-sitter-md.workspace = true
|
||||
unindent.workspace = true
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
mod completion_provider;
|
||||
mod entry_view_state;
|
||||
mod message_editor;
|
||||
mod mode_selector;
|
||||
|
||||
@@ -401,18 +401,19 @@ mod tests {
|
||||
use acp_thread::{AgentConnection, StubAgentConnection};
|
||||
use agent::HistoryStore;
|
||||
use agent_client_protocol as acp;
|
||||
use agent_settings::AgentSettings;
|
||||
use assistant_text_thread::TextThreadStore;
|
||||
use buffer_diff::{DiffHunkStatus, DiffHunkStatusKind};
|
||||
use editor::RowInfo;
|
||||
use editor::{EditorSettings, RowInfo};
|
||||
use fs::FakeFs;
|
||||
use gpui::{AppContext as _, TestAppContext};
|
||||
use gpui::{AppContext as _, SemanticVersion, TestAppContext};
|
||||
|
||||
use crate::acp::entry_view_state::EntryViewState;
|
||||
use multi_buffer::MultiBufferRow;
|
||||
use pretty_assertions::assert_matches;
|
||||
use project::Project;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use settings::{Settings as _, SettingsStore};
|
||||
use util::path;
|
||||
use workspace::Workspace;
|
||||
|
||||
@@ -432,11 +433,24 @@ mod tests {
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
|
||||
let tool_call = acp::ToolCall::new("tool", "Tool call")
|
||||
.status(acp::ToolCallStatus::InProgress)
|
||||
.content(vec![acp::ToolCallContent::Diff(
|
||||
acp::Diff::new("/project/hello.txt", "hello world").old_text("hi world"),
|
||||
)]);
|
||||
let tool_call = acp::ToolCall {
|
||||
id: acp::ToolCallId("tool".into()),
|
||||
title: "Tool call".into(),
|
||||
kind: acp::ToolKind::Other,
|
||||
status: acp::ToolCallStatus::InProgress,
|
||||
content: vec![acp::ToolCallContent::Diff {
|
||||
diff: acp::Diff {
|
||||
path: "/project/hello.txt".into(),
|
||||
old_text: Some("hi world".into()),
|
||||
new_text: "hello world".into(),
|
||||
meta: None,
|
||||
},
|
||||
}],
|
||||
locations: vec![],
|
||||
raw_input: None,
|
||||
raw_output: None,
|
||||
meta: None,
|
||||
};
|
||||
let connection = Rc::new(StubAgentConnection::new());
|
||||
let thread = cx
|
||||
.update(|_, cx| {
|
||||
@@ -525,8 +539,13 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
AgentSettings::register(cx);
|
||||
workspace::init_settings(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
release_channel::init(semver::Version::new(0, 0, 0), cx);
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
EditorSettings::register(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ use ui::{
|
||||
PopoverMenu, PopoverMenuHandle, Tooltip, prelude::*,
|
||||
};
|
||||
|
||||
use crate::{CycleModeSelector, ToggleProfileSelector, ui::HoldForDefault};
|
||||
use crate::{CycleModeSelector, ToggleProfileSelector};
|
||||
|
||||
pub struct ModeSelector {
|
||||
connection: Rc<dyn AgentSessionModes>,
|
||||
@@ -56,10 +56,6 @@ impl ModeSelector {
|
||||
self.set_mode(all_modes[next_index].id.clone(), cx);
|
||||
}
|
||||
|
||||
pub fn mode(&self) -> acp::SessionModeId {
|
||||
self.connection.current_mode()
|
||||
}
|
||||
|
||||
pub fn set_mode(&mut self, mode: acp::SessionModeId, cx: &mut Context<Self>) {
|
||||
let task = self.connection.set_mode(mode, cx);
|
||||
self.setting_mode = true;
|
||||
@@ -108,11 +104,36 @@ impl ModeSelector {
|
||||
entry.documentation_aside(side, DocumentationEdge::Bottom, {
|
||||
let description = description.clone();
|
||||
|
||||
move |_| {
|
||||
move |cx| {
|
||||
v_flex()
|
||||
.gap_1()
|
||||
.child(Label::new(description.clone()))
|
||||
.child(HoldForDefault::new(is_default))
|
||||
.child(
|
||||
h_flex()
|
||||
.pt_1()
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
.gap_0p5()
|
||||
.text_sm()
|
||||
.text_color(Color::Muted.color(cx))
|
||||
.child("Hold")
|
||||
.child(h_flex().flex_shrink_0().children(
|
||||
ui::render_modifiers(
|
||||
&gpui::Modifiers::secondary_key(),
|
||||
PlatformStyle::platform(),
|
||||
None,
|
||||
Some(ui::TextSize::Default.rems(cx).into()),
|
||||
true,
|
||||
),
|
||||
))
|
||||
.child(div().map(|this| {
|
||||
if is_default {
|
||||
this.child("to also unset as default")
|
||||
} else {
|
||||
this.child("to also set as default")
|
||||
}
|
||||
})),
|
||||
)
|
||||
.into_any_element()
|
||||
}
|
||||
})
|
||||
@@ -161,7 +182,7 @@ impl Render for ModeSelector {
|
||||
.map(|mode| mode.name.clone())
|
||||
.unwrap_or_else(|| "Unknown".into());
|
||||
|
||||
let this = cx.weak_entity();
|
||||
let this = cx.entity();
|
||||
|
||||
let icon = if self.menu_handle.is_deployed() {
|
||||
IconName::ChevronUp
|
||||
@@ -222,8 +243,7 @@ impl Render for ModeSelector {
|
||||
y: px(-2.0),
|
||||
})
|
||||
.menu(move |window, cx| {
|
||||
this.update(cx, |this, cx| this.build_context_menu(window, cx))
|
||||
.ok()
|
||||
Some(this.update(cx, |this, cx| this.build_context_menu(window, cx)))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,38 +1,27 @@
|
||||
use std::{cmp::Reverse, rc::Rc, sync::Arc};
|
||||
|
||||
use acp_thread::{AgentModelInfo, AgentModelList, AgentModelSelector};
|
||||
use agent_servers::AgentServer;
|
||||
use anyhow::Result;
|
||||
use collections::IndexMap;
|
||||
use fs::Fs;
|
||||
use futures::FutureExt;
|
||||
use fuzzy::{StringMatchCandidate, match_strings};
|
||||
use gpui::{
|
||||
Action, AsyncWindowContext, BackgroundExecutor, DismissEvent, FocusHandle, Task, WeakEntity,
|
||||
};
|
||||
use gpui::{AsyncWindowContext, BackgroundExecutor, DismissEvent, Task, WeakEntity};
|
||||
use ordered_float::OrderedFloat;
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use ui::{
|
||||
DocumentationAside, DocumentationEdge, DocumentationSide, IntoElement, KeyBinding, ListItem,
|
||||
DocumentationAside, DocumentationEdge, DocumentationSide, IntoElement, ListItem,
|
||||
ListItemSpacing, prelude::*,
|
||||
};
|
||||
use util::ResultExt;
|
||||
use zed_actions::agent::OpenSettings;
|
||||
|
||||
use crate::ui::HoldForDefault;
|
||||
|
||||
pub type AcpModelSelector = Picker<AcpModelPickerDelegate>;
|
||||
|
||||
pub fn acp_model_selector(
|
||||
selector: Rc<dyn AgentModelSelector>,
|
||||
agent_server: Rc<dyn AgentServer>,
|
||||
fs: Arc<dyn Fs>,
|
||||
focus_handle: FocusHandle,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<AcpModelSelector>,
|
||||
) -> AcpModelSelector {
|
||||
let delegate =
|
||||
AcpModelPickerDelegate::new(selector, agent_server, fs, focus_handle, window, cx);
|
||||
let delegate = AcpModelPickerDelegate::new(selector, window, cx);
|
||||
Picker::list(delegate, window, cx)
|
||||
.show_scrollbar(true)
|
||||
.width(rems(20.))
|
||||
@@ -46,23 +35,17 @@ enum AcpModelPickerEntry {
|
||||
|
||||
pub struct AcpModelPickerDelegate {
|
||||
selector: Rc<dyn AgentModelSelector>,
|
||||
agent_server: Rc<dyn AgentServer>,
|
||||
fs: Arc<dyn Fs>,
|
||||
filtered_entries: Vec<AcpModelPickerEntry>,
|
||||
models: Option<AgentModelList>,
|
||||
selected_index: usize,
|
||||
selected_description: Option<(usize, SharedString, bool)>,
|
||||
selected_description: Option<(usize, SharedString)>,
|
||||
selected_model: Option<AgentModelInfo>,
|
||||
_refresh_models_task: Task<()>,
|
||||
focus_handle: FocusHandle,
|
||||
}
|
||||
|
||||
impl AcpModelPickerDelegate {
|
||||
fn new(
|
||||
selector: Rc<dyn AgentModelSelector>,
|
||||
agent_server: Rc<dyn AgentServer>,
|
||||
fs: Arc<dyn Fs>,
|
||||
focus_handle: FocusHandle,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<AcpModelSelector>,
|
||||
) -> Self {
|
||||
@@ -103,15 +86,12 @@ impl AcpModelPickerDelegate {
|
||||
|
||||
Self {
|
||||
selector,
|
||||
agent_server,
|
||||
fs,
|
||||
filtered_entries: Vec::new(),
|
||||
models: None,
|
||||
selected_model: None,
|
||||
selected_index: 0,
|
||||
selected_description: None,
|
||||
_refresh_models_task: refresh_models_task,
|
||||
focus_handle,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -201,21 +181,6 @@ impl PickerDelegate for AcpModelPickerDelegate {
|
||||
if let Some(AcpModelPickerEntry::Model(model_info)) =
|
||||
self.filtered_entries.get(self.selected_index)
|
||||
{
|
||||
if window.modifiers().secondary() {
|
||||
let default_model = self.agent_server.default_model(cx);
|
||||
let is_default = default_model.as_ref() == Some(&model_info.id);
|
||||
|
||||
self.agent_server.set_default_model(
|
||||
if is_default {
|
||||
None
|
||||
} else {
|
||||
Some(model_info.id.clone())
|
||||
},
|
||||
self.fs.clone(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
self.selector
|
||||
.select_model(model_info.id.clone(), cx)
|
||||
.detach_and_log_err(cx);
|
||||
@@ -260,8 +225,6 @@ impl PickerDelegate for AcpModelPickerDelegate {
|
||||
),
|
||||
AcpModelPickerEntry::Model(model_info) => {
|
||||
let is_selected = Some(model_info) == self.selected_model.as_ref();
|
||||
let default_model = self.agent_server.default_model(cx);
|
||||
let is_default = default_model.as_ref() == Some(&model_info.id);
|
||||
|
||||
let model_icon_color = if is_selected {
|
||||
Color::Accent
|
||||
@@ -276,8 +239,8 @@ impl PickerDelegate for AcpModelPickerDelegate {
|
||||
this
|
||||
.on_hover(cx.listener(move |menu, hovered, _, cx| {
|
||||
if *hovered {
|
||||
menu.delegate.selected_description = Some((ix, description.clone(), is_default));
|
||||
} else if matches!(menu.delegate.selected_description, Some((id, _, _)) if id == ix) {
|
||||
menu.delegate.selected_description = Some((ix, description.clone()));
|
||||
} else if matches!(menu.delegate.selected_description, Some((id, _)) if id == ix) {
|
||||
menu.delegate.selected_description = None;
|
||||
}
|
||||
cx.notify();
|
||||
@@ -288,17 +251,17 @@ impl PickerDelegate for AcpModelPickerDelegate {
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.toggle_state(selected)
|
||||
.start_slot::<Icon>(model_info.icon.map(|icon| {
|
||||
Icon::new(icon)
|
||||
.color(model_icon_color)
|
||||
.size(IconSize::Small)
|
||||
}))
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.pl_0p5()
|
||||
.gap_1p5()
|
||||
.when_some(model_info.icon, |this, icon| {
|
||||
this.child(
|
||||
Icon::new(icon)
|
||||
.color(model_icon_color)
|
||||
.size(IconSize::Small)
|
||||
)
|
||||
})
|
||||
.w(px(240.))
|
||||
.child(Label::new(model_info.name.clone()).truncate()),
|
||||
)
|
||||
.end_slot(div().pr_3().when(is_selected, |this| {
|
||||
@@ -320,57 +283,14 @@ impl PickerDelegate for AcpModelPickerDelegate {
|
||||
_window: &mut Window,
|
||||
_cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<ui::DocumentationAside> {
|
||||
self.selected_description
|
||||
.as_ref()
|
||||
.map(|(_, description, is_default)| {
|
||||
let description = description.clone();
|
||||
let is_default = *is_default;
|
||||
|
||||
DocumentationAside::new(
|
||||
DocumentationSide::Left,
|
||||
DocumentationEdge::Top,
|
||||
Rc::new(move |_| {
|
||||
v_flex()
|
||||
.gap_1()
|
||||
.child(Label::new(description.clone()))
|
||||
.child(HoldForDefault::new(is_default))
|
||||
.into_any_element()
|
||||
}),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn render_footer(
|
||||
&self,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<AnyElement> {
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
|
||||
if !self.selector.should_render_footer() {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.p_1p5()
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
.child(
|
||||
Button::new("configure", "Configure")
|
||||
.full_width()
|
||||
.style(ButtonStyle::Outlined)
|
||||
.key_binding(
|
||||
KeyBinding::for_action_in(&OpenSettings, &focus_handle, cx)
|
||||
.map(|kb| kb.size(rems_from_px(12.))),
|
||||
)
|
||||
.on_click(|_, window, cx| {
|
||||
window.dispatch_action(OpenSettings.boxed_clone(), cx);
|
||||
}),
|
||||
)
|
||||
.into_any(),
|
||||
)
|
||||
self.selected_description.as_ref().map(|(_, description)| {
|
||||
let description = description.clone();
|
||||
DocumentationAside::new(
|
||||
DocumentationSide::Left,
|
||||
DocumentationEdge::Top,
|
||||
Rc::new(move |_| Label::new(description.clone()).into_any_element()),
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -464,7 +384,7 @@ mod tests {
|
||||
models
|
||||
.into_iter()
|
||||
.map(|model| acp_thread::AgentModelInfo {
|
||||
id: acp::ModelId::new(model.to_string()),
|
||||
id: acp::ModelId(model.to_string().into()),
|
||||
name: model.to_string().into(),
|
||||
description: None,
|
||||
icon: None,
|
||||
|
||||