Compare commits
1 Commits
perf/proje
...
thread-edi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
257d10f324 |
18
.github/ISSUE_TEMPLATE/01_bug_ai.yml
vendored
18
.github/ISSUE_TEMPLATE/01_bug_ai.yml
vendored
@@ -39,21 +39,3 @@ body:
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, attach your `Zed.log` file to this issue.
|
||||
description: |
|
||||
From the command palette, run `zed: open log` to see the last 1000 lines.
|
||||
Or run `zed: reveal log in file manager` to reveal the log file itself.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Paste your log inside the code block. -->
|
||||
```log
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
validations:
|
||||
required: false
|
||||
|
||||
18
.github/ISSUE_TEMPLATE/04_bug_debugger.yml
vendored
18
.github/ISSUE_TEMPLATE/04_bug_debugger.yml
vendored
@@ -33,21 +33,3 @@ body:
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, attach your `Zed.log` file to this issue.
|
||||
description: |
|
||||
From the command palette, run `zed: open log` to see the last 1000 lines.
|
||||
Or run `zed: reveal log in file manager` to reveal the log file itself.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Paste your log inside the code block. -->
|
||||
```log
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
validations:
|
||||
required: false
|
||||
|
||||
18
.github/ISSUE_TEMPLATE/06_bug_git.yml
vendored
18
.github/ISSUE_TEMPLATE/06_bug_git.yml
vendored
@@ -33,21 +33,3 @@ body:
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, attach your `Zed.log` file to this issue.
|
||||
description: |
|
||||
From the command palette, run `zed: open log` to see the last 1000 lines.
|
||||
Or run `zed: reveal log in file manager` to reveal the log file itself.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Paste your log inside the code block. -->
|
||||
```log
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
validations:
|
||||
required: false
|
||||
|
||||
18
.github/ISSUE_TEMPLATE/07_bug_windows.yml
vendored
18
.github/ISSUE_TEMPLATE/07_bug_windows.yml
vendored
@@ -33,21 +33,3 @@ body:
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, attach your `Zed.log` file to this issue.
|
||||
description: |
|
||||
From the command palette, run `zed: open log` to see the last 1000 lines.
|
||||
Or run `zed: reveal log in file manager` to reveal the log file itself.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Paste your log inside the code block. -->
|
||||
```log
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
validations:
|
||||
required: false
|
||||
|
||||
17
.github/ISSUE_TEMPLATE/10_bug_report.yml
vendored
17
.github/ISSUE_TEMPLATE/10_bug_report.yml
vendored
@@ -56,20 +56,3 @@ body:
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, attach your `Zed.log` file to this issue.
|
||||
description: |
|
||||
From the command palette, run `zed: open log` to see the last 1000 lines.
|
||||
Or run `zed: reveal log in file manager` to reveal the log file itself.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Paste your log inside the code block. -->
|
||||
```log
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
validations:
|
||||
required: false
|
||||
|
||||
88
.github/workflows/after_release.yml
vendored
88
.github/workflows/after_release.yml
vendored
@@ -1,88 +0,0 @@
|
||||
# Generated from xtask::workflows::after_release
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: after_release
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- published
|
||||
jobs:
|
||||
rebuild_releases_page:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: after_release::rebuild_releases_page::refresh_cloud_releases
|
||||
run: curl -fX POST https://cloud.zed.dev/releases/refresh?expect_tag=${{ github.event.release.tag_name }}
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: after_release::rebuild_releases_page::redeploy_zed_dev
|
||||
run: npm exec --yes -- vercel@37 --token="$VERCEL_TOKEN" --scope zed-industries redeploy https://zed.dev
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
VERCEL_TOKEN: ${{ secrets.VERCEL_TOKEN }}
|
||||
post_to_discord:
|
||||
needs:
|
||||
- rebuild_releases_page
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- id: get-release-url
|
||||
name: after_release::post_to_discord::get_release_url
|
||||
run: |
|
||||
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
|
||||
URL="https://zed.dev/releases/preview"
|
||||
else
|
||||
URL="https://zed.dev/releases/stable"
|
||||
fi
|
||||
|
||||
echo "URL=$URL" >> "$GITHUB_OUTPUT"
|
||||
shell: bash -euxo pipefail {0}
|
||||
- id: get-content
|
||||
name: after_release::post_to_discord::get_content
|
||||
uses: 2428392/gh-truncate-string-action@b3ff790d21cf42af3ca7579146eedb93c8fb0757
|
||||
with:
|
||||
stringToTruncate: |
|
||||
📣 Zed [${{ github.event.release.tag_name }}](<${{ steps.get-release-url.outputs.URL }}>) was just released!
|
||||
|
||||
${{ github.event.release.body }}
|
||||
maxLength: 2000
|
||||
truncationSymbol: '...'
|
||||
- name: after_release::post_to_discord::discord_webhook_action
|
||||
uses: tsickert/discord-webhook@c840d45a03a323fbc3f7507ac7769dbd91bfb164
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK_RELEASE_NOTES }}
|
||||
content: ${{ steps.get-content.outputs.string }}
|
||||
publish_winget:
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
steps:
|
||||
- id: set-package-name
|
||||
name: after_release::publish_winget::set_package_name
|
||||
run: |
|
||||
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
|
||||
PACKAGE_NAME=ZedIndustries.Zed.Preview
|
||||
else
|
||||
PACKAGE_NAME=ZedIndustries.Zed
|
||||
fi
|
||||
|
||||
echo "PACKAGE_NAME=$PACKAGE_NAME" >> "$GITHUB_OUTPUT"
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: after_release::publish_winget::winget_releaser
|
||||
uses: vedantmgoyal9/winget-releaser@19e706d4c9121098010096f9c495a70a7518b30f
|
||||
with:
|
||||
identifier: ${{ steps.set-package-name.outputs.PACKAGE_NAME }}
|
||||
max-versions-to-keep: 5
|
||||
token: ${{ secrets.WINGET_TOKEN }}
|
||||
create_sentry_release:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: release::create_sentry_release
|
||||
uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c
|
||||
with:
|
||||
environment: production
|
||||
env:
|
||||
SENTRY_ORG: zed-dev
|
||||
SENTRY_PROJECT: zed
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
93
.github/workflows/community_release_actions.yml
vendored
Normal file
93
.github/workflows/community_release_actions.yml
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
# IF YOU UPDATE THE NAME OF ANY GITHUB SECRET, YOU MUST CHERRY PICK THE COMMIT
|
||||
# TO BOTH STABLE AND PREVIEW CHANNELS
|
||||
|
||||
name: Release Actions
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
discord_release:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Get release URL
|
||||
id: get-release-url
|
||||
run: |
|
||||
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
|
||||
URL="https://zed.dev/releases/preview"
|
||||
else
|
||||
URL="https://zed.dev/releases/stable"
|
||||
fi
|
||||
|
||||
echo "URL=$URL" >> "$GITHUB_OUTPUT"
|
||||
- name: Get content
|
||||
uses: 2428392/gh-truncate-string-action@b3ff790d21cf42af3ca7579146eedb93c8fb0757 # v1.4.1
|
||||
id: get-content
|
||||
with:
|
||||
stringToTruncate: |
|
||||
📣 Zed [${{ github.event.release.tag_name }}](<${{ steps.get-release-url.outputs.URL }}>) was just released!
|
||||
|
||||
${{ github.event.release.body }}
|
||||
maxLength: 2000
|
||||
truncationSymbol: "..."
|
||||
- name: Discord Webhook Action
|
||||
uses: tsickert/discord-webhook@c840d45a03a323fbc3f7507ac7769dbd91bfb164 # v5.3.0
|
||||
with:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK_RELEASE_NOTES }}
|
||||
content: ${{ steps.get-content.outputs.string }}
|
||||
|
||||
publish-winget:
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
steps:
|
||||
- name: Set Package Name
|
||||
id: set-package-name
|
||||
run: |
|
||||
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
|
||||
PACKAGE_NAME=ZedIndustries.Zed.Preview
|
||||
else
|
||||
PACKAGE_NAME=ZedIndustries.Zed
|
||||
fi
|
||||
|
||||
echo "PACKAGE_NAME=$PACKAGE_NAME" >> "$GITHUB_OUTPUT"
|
||||
- uses: vedantmgoyal9/winget-releaser@19e706d4c9121098010096f9c495a70a7518b30f # v2
|
||||
with:
|
||||
identifier: ${{ steps.set-package-name.outputs.PACKAGE_NAME }}
|
||||
max-versions-to-keep: 5
|
||||
token: ${{ secrets.WINGET_TOKEN }}
|
||||
|
||||
send_release_notes_email:
|
||||
if: false && github.repository_owner == 'zed-industries' && !github.event.release.prerelease
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Check if release was promoted from preview
|
||||
id: check-promotion-from-preview
|
||||
run: |
|
||||
VERSION="${{ github.event.release.tag_name }}"
|
||||
PREVIEW_TAG="${VERSION}-pre"
|
||||
|
||||
if git rev-parse "$PREVIEW_TAG" > /dev/null 2>&1; then
|
||||
echo "was_promoted_from_preview=true" >> "$GITHUB_OUTPUT"
|
||||
else
|
||||
echo "was_promoted_from_preview=false" >> "$GITHUB_OUTPUT"
|
||||
fi
|
||||
|
||||
- name: Send release notes email
|
||||
if: steps.check-promotion-from-preview.outputs.was_promoted_from_preview == 'true'
|
||||
run: |
|
||||
TAG="${{ github.event.release.tag_name }}"
|
||||
cat << 'EOF' > release_body.txt
|
||||
${{ github.event.release.body }}
|
||||
EOF
|
||||
jq -n --arg tag "$TAG" --rawfile body release_body.txt '{version: $tag, markdown_body: $body}' \
|
||||
> release_data.json
|
||||
curl -X POST "https://zed.dev/api/send_release_notes_email" \
|
||||
-H "Authorization: Bearer ${{ secrets.RELEASE_NOTES_API_TOKEN }}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d @release_data.json
|
||||
8
.github/workflows/release.yml
vendored
8
.github/workflows/release.yml
vendored
@@ -475,6 +475,14 @@ jobs:
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: release::create_sentry_release
|
||||
uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c
|
||||
with:
|
||||
environment: production
|
||||
env:
|
||||
SENTRY_ORG: zed-dev
|
||||
SENTRY_PROJECT: zed
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
70
.github/workflows/run_tests.yml
vendored
70
.github/workflows/run_tests.yml
vendored
@@ -285,6 +285,40 @@ jobs:
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
check_postgres_and_protobuf_migrations:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_tests == 'true'
|
||||
runs-on: self-mini-macos
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::remove_untracked_files
|
||||
run: git clean -df
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::ensure_fresh_merge
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ];
|
||||
then
|
||||
echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
|
||||
else
|
||||
git checkout -B temp
|
||||
git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
|
||||
echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
|
||||
fi
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_setup_action
|
||||
uses: bufbuild/buf-setup-action@v1
|
||||
with:
|
||||
version: v1.29.0
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_breaking_action
|
||||
uses: bufbuild/buf-breaking-action@v1
|
||||
with:
|
||||
input: crates/proto/proto/
|
||||
against: https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/
|
||||
timeout-minutes: 60
|
||||
check_dependencies:
|
||||
needs:
|
||||
- orchestrate
|
||||
@@ -484,40 +518,6 @@ jobs:
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
continue-on-error: true
|
||||
check_postgres_and_protobuf_migrations:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_tests == 'true'
|
||||
runs-on: self-mini-macos
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::remove_untracked_files
|
||||
run: git clean -df
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::ensure_fresh_merge
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ];
|
||||
then
|
||||
echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
|
||||
else
|
||||
git checkout -B temp
|
||||
git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
|
||||
echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
|
||||
fi
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_setup_action
|
||||
uses: bufbuild/buf-setup-action@v1
|
||||
with:
|
||||
version: v1.29.0
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_breaking_action
|
||||
uses: bufbuild/buf-breaking-action@v1
|
||||
with:
|
||||
input: crates/proto/proto/
|
||||
against: https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/
|
||||
timeout-minutes: 60
|
||||
tests_pass:
|
||||
needs:
|
||||
- orchestrate
|
||||
@@ -527,6 +527,7 @@ jobs:
|
||||
- run_tests_mac
|
||||
- doctests
|
||||
- check_workspace_binaries
|
||||
- check_postgres_and_protobuf_migrations
|
||||
- check_dependencies
|
||||
- check_docs
|
||||
- check_licenses
|
||||
@@ -553,6 +554,7 @@ jobs:
|
||||
check_result "run_tests_mac" "${{ needs.run_tests_mac.result }}"
|
||||
check_result "doctests" "${{ needs.doctests.result }}"
|
||||
check_result "check_workspace_binaries" "${{ needs.check_workspace_binaries.result }}"
|
||||
check_result "check_postgres_and_protobuf_migrations" "${{ needs.check_postgres_and_protobuf_migrations.result }}"
|
||||
check_result "check_dependencies" "${{ needs.check_dependencies.result }}"
|
||||
check_result "check_docs" "${{ needs.check_docs.result }}"
|
||||
check_result "check_licenses" "${{ needs.check_licenses.result }}"
|
||||
|
||||
37
Cargo.lock
generated
37
Cargo.lock
generated
@@ -32,7 +32,6 @@ dependencies = [
|
||||
"settings",
|
||||
"smol",
|
||||
"task",
|
||||
"telemetry",
|
||||
"tempfile",
|
||||
"terminal",
|
||||
"ui",
|
||||
@@ -40,7 +39,6 @@ dependencies = [
|
||||
"util",
|
||||
"uuid",
|
||||
"watch",
|
||||
"zlog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -81,7 +79,6 @@ dependencies = [
|
||||
"rand 0.9.2",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"telemetry",
|
||||
"text",
|
||||
"util",
|
||||
"watch",
|
||||
@@ -214,8 +211,6 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "agent-client-protocol"
|
||||
version = "0.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "525705e39c11cd73f7bc784e3681a9386aa30c8d0630808d3dc2237eb4f9cb1b"
|
||||
dependencies = [
|
||||
"agent-client-protocol-schema",
|
||||
"anyhow",
|
||||
@@ -231,9 +226,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "agent-client-protocol-schema"
|
||||
version = "0.6.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ecf16c18fea41282d6bbadd1549a06be6836bddb1893f44a6235f340fa24e2af"
|
||||
version = "0.6.3"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"derive_more 2.0.1",
|
||||
@@ -250,6 +243,7 @@ dependencies = [
|
||||
"acp_tools",
|
||||
"action_log",
|
||||
"agent-client-protocol",
|
||||
"agent_settings",
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"client",
|
||||
@@ -3200,7 +3194,6 @@ dependencies = [
|
||||
"indoc",
|
||||
"ordered-float 2.10.1",
|
||||
"rustc-hash 2.1.1",
|
||||
"schemars 1.0.4",
|
||||
"serde",
|
||||
"strum 0.27.2",
|
||||
]
|
||||
@@ -6407,7 +6400,7 @@ dependencies = [
|
||||
"ignore",
|
||||
"libc",
|
||||
"log",
|
||||
"notify 8.2.0",
|
||||
"notify 8.0.0",
|
||||
"objc",
|
||||
"parking_lot",
|
||||
"paths",
|
||||
@@ -7094,6 +7087,7 @@ dependencies = [
|
||||
"askpass",
|
||||
"buffer_diff",
|
||||
"call",
|
||||
"chrono",
|
||||
"cloud_llm_client",
|
||||
"collections",
|
||||
"command_palette_hooks",
|
||||
@@ -8866,7 +8860,6 @@ dependencies = [
|
||||
"open_router",
|
||||
"parking_lot",
|
||||
"proto",
|
||||
"schemars 1.0.4",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
@@ -9031,7 +9024,6 @@ dependencies = [
|
||||
"settings",
|
||||
"smol",
|
||||
"task",
|
||||
"terminal",
|
||||
"text",
|
||||
"theme",
|
||||
"toml 0.8.23",
|
||||
@@ -9679,7 +9671,6 @@ dependencies = [
|
||||
"settings",
|
||||
"theme",
|
||||
"ui",
|
||||
"urlencoding",
|
||||
"util",
|
||||
"workspace",
|
||||
]
|
||||
@@ -10414,10 +10405,11 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "notify"
|
||||
version = "8.2.0"
|
||||
source = "git+https://github.com/zed-industries/notify.git?rev=b4588b2e5aee68f4c0e100f140e808cbce7b1419#b4588b2e5aee68f4c0e100f140e808cbce7b1419"
|
||||
version = "8.0.0"
|
||||
source = "git+https://github.com/zed-industries/notify.git?rev=bbb9ea5ae52b253e095737847e367c30653a2e96#bbb9ea5ae52b253e095737847e367c30653a2e96"
|
||||
dependencies = [
|
||||
"bitflags 2.9.4",
|
||||
"filetime",
|
||||
"fsevent-sys 4.1.0",
|
||||
"inotify 0.11.0",
|
||||
"kqueue",
|
||||
@@ -10426,7 +10418,7 @@ dependencies = [
|
||||
"mio 1.1.0",
|
||||
"notify-types",
|
||||
"walkdir",
|
||||
"windows-sys 0.60.2",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -10443,7 +10435,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "notify-types"
|
||||
version = "2.0.0"
|
||||
source = "git+https://github.com/zed-industries/notify.git?rev=b4588b2e5aee68f4c0e100f140e808cbce7b1419#b4588b2e5aee68f4c0e100f140e808cbce7b1419"
|
||||
source = "git+https://github.com/zed-industries/notify.git?rev=bbb9ea5ae52b253e095737847e367c30653a2e96#bbb9ea5ae52b253e095737847e367c30653a2e96"
|
||||
|
||||
[[package]]
|
||||
name = "now"
|
||||
@@ -13975,7 +13967,6 @@ dependencies = [
|
||||
"gpui",
|
||||
"gpui_tokio",
|
||||
"http_client",
|
||||
"image",
|
||||
"json_schema_store",
|
||||
"language",
|
||||
"language_extension",
|
||||
@@ -16217,6 +16208,7 @@ dependencies = [
|
||||
"log",
|
||||
"menu",
|
||||
"picker",
|
||||
"project",
|
||||
"reqwest_client",
|
||||
"rust-embed",
|
||||
"settings",
|
||||
@@ -16226,6 +16218,7 @@ dependencies = [
|
||||
"theme",
|
||||
"title_bar",
|
||||
"ui",
|
||||
"workspace",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -18811,6 +18804,7 @@ dependencies = [
|
||||
name = "vim_mode_setting"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"gpui",
|
||||
"settings",
|
||||
]
|
||||
|
||||
@@ -20954,7 +20948,6 @@ dependencies = [
|
||||
"gh-workflow",
|
||||
"indexmap 2.11.4",
|
||||
"indoc",
|
||||
"serde",
|
||||
"toml 0.8.23",
|
||||
"toml_edit 0.22.27",
|
||||
]
|
||||
@@ -21678,20 +21671,18 @@ dependencies = [
|
||||
"language_model",
|
||||
"log",
|
||||
"lsp",
|
||||
"open_ai",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"release_channel",
|
||||
"schemars 1.0.4",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"smol",
|
||||
"thiserror 2.0.17",
|
||||
"util",
|
||||
"uuid",
|
||||
"workspace",
|
||||
"worktree",
|
||||
"zlog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -21703,7 +21694,6 @@ dependencies = [
|
||||
"clap",
|
||||
"client",
|
||||
"cloud_llm_client",
|
||||
"cloud_zeta2_prompt",
|
||||
"collections",
|
||||
"edit_prediction_context",
|
||||
"editor",
|
||||
@@ -21717,6 +21707,7 @@ dependencies = [
|
||||
"ordered-float 2.10.1",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"regex-syntax",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
|
||||
@@ -440,7 +440,7 @@ zlog_settings = { path = "crates/zlog_settings" }
|
||||
# External crates
|
||||
#
|
||||
|
||||
agent-client-protocol = { version = "0.7.0", features = ["unstable"] }
|
||||
agent-client-protocol = { path = "../agent-client-protocol", features = ["unstable"] }
|
||||
aho-corasick = "1.1"
|
||||
alacritty_terminal = "0.25.1-rc1"
|
||||
any_vec = "0.14"
|
||||
@@ -663,7 +663,6 @@ time = { version = "0.3", features = [
|
||||
"serde",
|
||||
"serde-well-known",
|
||||
"formatting",
|
||||
"local-offset",
|
||||
] }
|
||||
tiny_http = "0.8"
|
||||
tokio = { version = "1" }
|
||||
@@ -773,8 +772,8 @@ features = [
|
||||
]
|
||||
|
||||
[patch.crates-io]
|
||||
notify = { git = "https://github.com/zed-industries/notify.git", rev = "b4588b2e5aee68f4c0e100f140e808cbce7b1419" }
|
||||
notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "b4588b2e5aee68f4c0e100f140e808cbce7b1419" }
|
||||
notify = { git = "https://github.com/zed-industries/notify.git", rev = "bbb9ea5ae52b253e095737847e367c30653a2e96" }
|
||||
notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "bbb9ea5ae52b253e095737847e367c30653a2e96" }
|
||||
windows-capture = { git = "https://github.com/zed-industries/windows-capture.git", rev = "f0d6c1b6691db75461b732f6d5ff56eed002eeb9" }
|
||||
|
||||
[profile.dev]
|
||||
@@ -840,7 +839,7 @@ ui_input = { codegen-units = 1 }
|
||||
zed_actions = { codegen-units = 1 }
|
||||
|
||||
[profile.release]
|
||||
debug = "full"
|
||||
debug = "limited"
|
||||
lto = "thin"
|
||||
codegen-units = 1
|
||||
|
||||
|
||||
@@ -735,20 +735,6 @@
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_next_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"tab": "editor::NextSnippetTabstop"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_previous_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"shift-tab": "editor::PreviousSnippetTabstop"
|
||||
}
|
||||
},
|
||||
// Bindings for accepting edit predictions
|
||||
//
|
||||
// alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This is
|
||||
|
||||
@@ -805,20 +805,6 @@
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_next_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"tab": "editor::NextSnippetTabstop"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_previous_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"shift-tab": "editor::PreviousSnippetTabstop"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && edit_prediction",
|
||||
"bindings": {
|
||||
|
||||
@@ -739,20 +739,6 @@
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_next_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"tab": "editor::NextSnippetTabstop"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_previous_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"shift-tab": "editor::PreviousSnippetTabstop"
|
||||
}
|
||||
},
|
||||
// Bindings for accepting edit predictions
|
||||
//
|
||||
// alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This is
|
||||
|
||||
@@ -455,7 +455,6 @@
|
||||
"<": "vim::Outdent",
|
||||
"=": "vim::AutoIndent",
|
||||
"d": "vim::HelixDelete",
|
||||
"alt-d": "editor::Delete", // Delete selection, without yanking
|
||||
"c": "vim::HelixSubstitute",
|
||||
"alt-c": "vim::HelixSubstituteNoYank",
|
||||
|
||||
|
||||
@@ -1487,11 +1487,7 @@
|
||||
// in your project's settings, rather than globally.
|
||||
"directories": [".env", "env", ".venv", "venv"],
|
||||
// Can also be `csh`, `fish`, `nushell` and `power_shell`
|
||||
"activate_script": "default",
|
||||
// Preferred Conda manager to use when activating Conda environments.
|
||||
// Values: "auto", "conda", "mamba", "micromamba"
|
||||
// Default: "auto"
|
||||
"conda_manager": "auto"
|
||||
"activate_script": "default"
|
||||
}
|
||||
},
|
||||
"toolbar": {
|
||||
|
||||
@@ -39,7 +39,6 @@ serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
smol.workspace = true
|
||||
task.workspace = true
|
||||
telemetry.workspace = true
|
||||
terminal.workspace = true
|
||||
ui.workspace = true
|
||||
url.workspace = true
|
||||
@@ -57,4 +56,3 @@ rand.workspace = true
|
||||
tempfile.workspace = true
|
||||
util.workspace = true
|
||||
settings.workspace = true
|
||||
zlog.workspace = true
|
||||
|
||||
@@ -15,7 +15,7 @@ use settings::Settings as _;
|
||||
use task::{Shell, ShellBuilder};
|
||||
pub use terminal::*;
|
||||
|
||||
use action_log::{ActionLog, ActionLogTelemetry};
|
||||
use action_log::ActionLog;
|
||||
use agent_client_protocol::{self as acp};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use editor::Bias;
|
||||
@@ -38,10 +38,10 @@ use util::{ResultExt, get_default_system_shell_preferring_bash, paths::PathStyle
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UserMessage {
|
||||
pub struct UserMessage<T> {
|
||||
pub id: Option<UserMessageId>,
|
||||
pub content: ContentBlock,
|
||||
pub chunks: Vec<acp::ContentBlock>,
|
||||
pub chunks: Vec<acp::ContentBlock<T>>,
|
||||
pub checkpoint: Option<Checkpoint>,
|
||||
}
|
||||
|
||||
@@ -51,7 +51,7 @@ pub struct Checkpoint {
|
||||
pub show: bool,
|
||||
}
|
||||
|
||||
impl UserMessage {
|
||||
impl<T> UserMessage<T> {
|
||||
fn to_markdown(&self, cx: &App) -> String {
|
||||
let mut markdown = String::new();
|
||||
if self
|
||||
@@ -116,13 +116,13 @@ impl AssistantMessageChunk {
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum AgentThreadEntry {
|
||||
UserMessage(UserMessage),
|
||||
pub enum AgentThreadEntry<T> {
|
||||
UserMessage(UserMessage<T>),
|
||||
AssistantMessage(AssistantMessage),
|
||||
ToolCall(ToolCall),
|
||||
}
|
||||
|
||||
impl AgentThreadEntry {
|
||||
impl<T> AgentThreadEntry<T> {
|
||||
pub fn to_markdown(&self, cx: &App) -> String {
|
||||
match self {
|
||||
Self::UserMessage(message) => message.to_markdown(cx),
|
||||
@@ -131,7 +131,7 @@ impl AgentThreadEntry {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn user_message(&self) -> Option<&UserMessage> {
|
||||
pub fn user_message(&self) -> Option<&UserMessage<T>> {
|
||||
if let AgentThreadEntry::UserMessage(message) = self {
|
||||
Some(message)
|
||||
} else {
|
||||
@@ -802,9 +802,11 @@ pub struct RetryStatus {
|
||||
pub duration: Duration,
|
||||
}
|
||||
|
||||
pub struct AcpThread {
|
||||
title: SharedString,
|
||||
entries: Vec<AgentThreadEntry>,
|
||||
pub struct AnchoredText;
|
||||
|
||||
pub struct AcpThread<T = SharedString> {
|
||||
title: T,
|
||||
entries: Vec<AgentThreadEntry<AnchoredText>>,
|
||||
plan: Plan,
|
||||
project: Entity<Project>,
|
||||
action_log: Entity<ActionLog>,
|
||||
@@ -820,15 +822,6 @@ pub struct AcpThread {
|
||||
pending_terminal_exit: HashMap<acp::TerminalId, acp::TerminalExitStatus>,
|
||||
}
|
||||
|
||||
impl From<&AcpThread> for ActionLogTelemetry {
|
||||
fn from(value: &AcpThread) -> Self {
|
||||
Self {
|
||||
agent_telemetry_id: value.connection().telemetry_id(),
|
||||
session_id: value.session_id.0.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum AcpThreadEvent {
|
||||
NewEntry,
|
||||
@@ -1011,7 +1004,7 @@ impl Display for LoadError {
|
||||
|
||||
impl Error for LoadError {}
|
||||
|
||||
impl AcpThread {
|
||||
impl<T> AcpThread<T> {
|
||||
pub fn new(
|
||||
title: impl Into<SharedString>,
|
||||
connection: Rc<dyn AgentConnection>,
|
||||
@@ -1161,7 +1154,7 @@ impl AcpThread {
|
||||
pub fn push_user_content_block(
|
||||
&mut self,
|
||||
message_id: Option<UserMessageId>,
|
||||
chunk: acp::ContentBlock,
|
||||
chunk: acp::ContentBlock<T>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let language_registry = self.project.read(cx).languages().clone();
|
||||
@@ -1240,7 +1233,7 @@ impl AcpThread {
|
||||
}
|
||||
}
|
||||
|
||||
fn push_entry(&mut self, entry: AgentThreadEntry, cx: &mut Context<Self>) {
|
||||
fn push_entry(&mut self, entry: AgentThreadEntry<T>, cx: &mut Context<Self>) {
|
||||
self.entries.push(entry);
|
||||
cx.emit(AcpThreadEvent::NewEntry);
|
||||
}
|
||||
@@ -1355,17 +1348,6 @@ impl AcpThread {
|
||||
let path_style = self.project.read(cx).path_style(cx);
|
||||
let id = update.id.clone();
|
||||
|
||||
let agent = self.connection().telemetry_id();
|
||||
let session = self.session_id();
|
||||
if let ToolCallStatus::Completed | ToolCallStatus::Failed = status {
|
||||
let status = if matches!(status, ToolCallStatus::Completed) {
|
||||
"completed"
|
||||
} else {
|
||||
"failed"
|
||||
};
|
||||
telemetry::event!("Agent Tool Call Completed", agent, session, status);
|
||||
}
|
||||
|
||||
if let Some(ix) = self.index_for_tool_call(&id) {
|
||||
let AgentThreadEntry::ToolCall(call) = &mut self.entries[ix] else {
|
||||
unreachable!()
|
||||
@@ -1889,7 +1871,6 @@ impl AcpThread {
|
||||
return Task::ready(Err(anyhow!("not supported")));
|
||||
};
|
||||
|
||||
let telemetry = ActionLogTelemetry::from(&*self);
|
||||
cx.spawn(async move |this, cx| {
|
||||
cx.update(|cx| truncate.run(id.clone(), cx))?.await?;
|
||||
this.update(cx, |this, cx| {
|
||||
@@ -1898,9 +1879,8 @@ impl AcpThread {
|
||||
this.entries.truncate(ix);
|
||||
cx.emit(AcpThreadEvent::EntriesRemoved(range));
|
||||
}
|
||||
this.action_log().update(cx, |action_log, cx| {
|
||||
action_log.reject_all_edits(Some(telemetry), cx)
|
||||
})
|
||||
this.action_log()
|
||||
.update(cx, |action_log, cx| action_log.reject_all_edits(cx))
|
||||
})?
|
||||
.await;
|
||||
Ok(())
|
||||
@@ -1946,7 +1926,7 @@ impl AcpThread {
|
||||
})
|
||||
}
|
||||
|
||||
fn last_user_message(&mut self) -> Option<(usize, &mut UserMessage)> {
|
||||
fn last_user_message(&mut self) -> Option<(usize, &mut UserMessage<T>)> {
|
||||
self.entries
|
||||
.iter_mut()
|
||||
.enumerate()
|
||||
@@ -2377,6 +2357,8 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
Project::init_settings(cx);
|
||||
language::init(cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -3634,10 +3616,6 @@ mod tests {
|
||||
}
|
||||
|
||||
impl AgentConnection for FakeAgentConnection {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
"fake"
|
||||
}
|
||||
|
||||
fn auth_methods(&self) -> &[acp::AuthMethod] {
|
||||
&self.auth_methods
|
||||
}
|
||||
|
||||
@@ -20,8 +20,6 @@ impl UserMessageId {
|
||||
}
|
||||
|
||||
pub trait AgentConnection {
|
||||
fn telemetry_id(&self) -> &'static str;
|
||||
|
||||
fn new_thread(
|
||||
self: Rc<Self>,
|
||||
project: Entity<Project>,
|
||||
@@ -108,6 +106,9 @@ pub trait AgentSessionSetTitle {
|
||||
}
|
||||
|
||||
pub trait AgentTelemetry {
|
||||
/// The name of the agent used for telemetry.
|
||||
fn agent_name(&self) -> String;
|
||||
|
||||
/// A representation of the current thread state that can be serialized for
|
||||
/// storage with telemetry events.
|
||||
fn thread_data(
|
||||
@@ -317,10 +318,6 @@ mod test_support {
|
||||
}
|
||||
|
||||
impl AgentConnection for StubAgentConnection {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
"stub"
|
||||
}
|
||||
|
||||
fn auth_methods(&self) -> &[acp::AuthMethod] {
|
||||
&[]
|
||||
}
|
||||
|
||||
@@ -20,7 +20,6 @@ futures.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
project.workspace = true
|
||||
telemetry.workspace = true
|
||||
text.workspace = true
|
||||
util.workspace = true
|
||||
watch.workspace = true
|
||||
|
||||
@@ -3,9 +3,7 @@ use buffer_diff::BufferDiff;
|
||||
use clock;
|
||||
use collections::BTreeMap;
|
||||
use futures::{FutureExt, StreamExt, channel::mpsc};
|
||||
use gpui::{
|
||||
App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
|
||||
};
|
||||
use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
|
||||
use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
|
||||
use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
|
||||
use std::{cmp, ops::Range, sync::Arc};
|
||||
@@ -33,6 +31,71 @@ impl ActionLog {
|
||||
&self.project
|
||||
}
|
||||
|
||||
pub fn latest_snapshot(&self, buffer: &Entity<Buffer>) -> Option<text::BufferSnapshot> {
|
||||
Some(self.tracked_buffers.get(buffer)?.snapshot.clone())
|
||||
}
|
||||
|
||||
/// Return a unified diff patch with user edits made since last read or notification
|
||||
pub fn unnotified_user_edits(&self, cx: &Context<Self>) -> Option<String> {
|
||||
let diffs = self
|
||||
.tracked_buffers
|
||||
.values()
|
||||
.filter_map(|tracked| {
|
||||
if !tracked.may_have_unnotified_user_edits {
|
||||
return None;
|
||||
}
|
||||
|
||||
let text_with_latest_user_edits = tracked.diff_base.to_string();
|
||||
let text_with_last_seen_user_edits = tracked.last_seen_base.to_string();
|
||||
if text_with_latest_user_edits == text_with_last_seen_user_edits {
|
||||
return None;
|
||||
}
|
||||
let patch = language::unified_diff(
|
||||
&text_with_last_seen_user_edits,
|
||||
&text_with_latest_user_edits,
|
||||
);
|
||||
|
||||
let buffer = tracked.buffer.clone();
|
||||
let file_path = buffer
|
||||
.read(cx)
|
||||
.file()
|
||||
.map(|file| {
|
||||
let mut path = file.full_path(cx).to_string_lossy().into_owned();
|
||||
if file.path_style(cx).is_windows() {
|
||||
path = path.replace('\\', "/");
|
||||
}
|
||||
path
|
||||
})
|
||||
.unwrap_or_else(|| format!("buffer_{}", buffer.entity_id()));
|
||||
|
||||
let mut result = String::new();
|
||||
result.push_str(&format!("--- a/{}\n", file_path));
|
||||
result.push_str(&format!("+++ b/{}\n", file_path));
|
||||
result.push_str(&patch);
|
||||
|
||||
Some(result)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if diffs.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let unified_diff = diffs.join("\n\n");
|
||||
Some(unified_diff)
|
||||
}
|
||||
|
||||
/// Return a unified diff patch with user edits made since last read/notification
|
||||
/// and mark them as notified
|
||||
pub fn flush_unnotified_user_edits(&mut self, cx: &Context<Self>) -> Option<String> {
|
||||
let patch = self.unnotified_user_edits(cx);
|
||||
self.tracked_buffers.values_mut().for_each(|tracked| {
|
||||
tracked.may_have_unnotified_user_edits = false;
|
||||
tracked.last_seen_base = tracked.diff_base.clone();
|
||||
});
|
||||
patch
|
||||
}
|
||||
|
||||
fn track_buffer_internal(
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
@@ -82,26 +145,31 @@ impl ActionLog {
|
||||
let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
|
||||
let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
|
||||
let diff_base;
|
||||
let last_seen_base;
|
||||
let unreviewed_edits;
|
||||
if is_created {
|
||||
diff_base = Rope::default();
|
||||
last_seen_base = Rope::default();
|
||||
unreviewed_edits = Patch::new(vec![Edit {
|
||||
old: 0..1,
|
||||
new: 0..text_snapshot.max_point().row + 1,
|
||||
}])
|
||||
} else {
|
||||
diff_base = buffer.read(cx).as_rope().clone();
|
||||
last_seen_base = diff_base.clone();
|
||||
unreviewed_edits = Patch::default();
|
||||
}
|
||||
TrackedBuffer {
|
||||
buffer: buffer.clone(),
|
||||
diff_base,
|
||||
last_seen_base,
|
||||
unreviewed_edits,
|
||||
snapshot: text_snapshot,
|
||||
status,
|
||||
version: buffer.read(cx).version(),
|
||||
diff,
|
||||
diff_update: diff_update_tx,
|
||||
may_have_unnotified_user_edits: false,
|
||||
_open_lsp_handle: open_lsp_handle,
|
||||
_maintain_diff: cx.spawn({
|
||||
let buffer = buffer.clone();
|
||||
@@ -252,9 +320,10 @@ impl ActionLog {
|
||||
let new_snapshot = buffer_snapshot.clone();
|
||||
let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
|
||||
let edits = diff_snapshots(&old_snapshot, &new_snapshot);
|
||||
let mut has_user_changes = false;
|
||||
async move {
|
||||
if let ChangeAuthor::User = author {
|
||||
apply_non_conflicting_edits(
|
||||
has_user_changes = apply_non_conflicting_edits(
|
||||
&unreviewed_edits,
|
||||
edits,
|
||||
&mut base_text,
|
||||
@@ -262,13 +331,22 @@ impl ActionLog {
|
||||
);
|
||||
}
|
||||
|
||||
(Arc::new(base_text.to_string()), base_text)
|
||||
(Arc::new(base_text.to_string()), base_text, has_user_changes)
|
||||
}
|
||||
});
|
||||
|
||||
anyhow::Ok(rebase)
|
||||
})??;
|
||||
let (new_base_text, new_diff_base) = rebase.await;
|
||||
let (new_base_text, new_diff_base, has_user_changes) = rebase.await;
|
||||
|
||||
this.update(cx, |this, _| {
|
||||
let tracked_buffer = this
|
||||
.tracked_buffers
|
||||
.get_mut(buffer)
|
||||
.context("buffer not tracked")
|
||||
.unwrap();
|
||||
tracked_buffer.may_have_unnotified_user_edits |= has_user_changes;
|
||||
})?;
|
||||
|
||||
Self::update_diff(
|
||||
this,
|
||||
@@ -487,17 +565,14 @@ impl ActionLog {
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
buffer_range: Range<impl language::ToPoint>,
|
||||
telemetry: Option<ActionLogTelemetry>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
|
||||
match tracked_buffer.status {
|
||||
TrackedBufferStatus::Deleted => {
|
||||
metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
cx.notify();
|
||||
}
|
||||
@@ -506,6 +581,7 @@ impl ActionLog {
|
||||
let buffer_range =
|
||||
buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
|
||||
let mut delta = 0i32;
|
||||
|
||||
tracked_buffer.unreviewed_edits.retain_mut(|edit| {
|
||||
edit.old.start = (edit.old.start as i32 + delta) as u32;
|
||||
edit.old.end = (edit.old.end as i32 + delta) as u32;
|
||||
@@ -537,7 +613,6 @@ impl ActionLog {
|
||||
.collect::<String>(),
|
||||
);
|
||||
delta += edit.new_len() as i32 - edit.old_len() as i32;
|
||||
metrics.add_edit(edit);
|
||||
false
|
||||
}
|
||||
});
|
||||
@@ -549,24 +624,19 @@ impl ActionLog {
|
||||
tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
|
||||
}
|
||||
}
|
||||
if let Some(telemetry) = telemetry {
|
||||
telemetry_report_accepted_edits(&telemetry, metrics);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reject_edits_in_ranges(
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
buffer_ranges: Vec<Range<impl language::ToPoint>>,
|
||||
telemetry: Option<ActionLogTelemetry>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
|
||||
return Task::ready(Ok(()));
|
||||
};
|
||||
|
||||
let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
|
||||
let task = match &tracked_buffer.status {
|
||||
match &tracked_buffer.status {
|
||||
TrackedBufferStatus::Created {
|
||||
existing_file_content,
|
||||
} => {
|
||||
@@ -616,7 +686,6 @@ impl ActionLog {
|
||||
}
|
||||
};
|
||||
|
||||
metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
cx.notify();
|
||||
task
|
||||
@@ -630,7 +699,6 @@ impl ActionLog {
|
||||
.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
|
||||
|
||||
// Clear all tracked edits for this buffer and start over as if we just read it.
|
||||
metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
self.buffer_read(buffer.clone(), cx);
|
||||
cx.notify();
|
||||
@@ -670,7 +738,6 @@ impl ActionLog {
|
||||
}
|
||||
|
||||
if revert {
|
||||
metrics.add_edit(edit);
|
||||
let old_range = tracked_buffer
|
||||
.diff_base
|
||||
.point_to_offset(Point::new(edit.old.start, 0))
|
||||
@@ -691,25 +758,12 @@ impl ActionLog {
|
||||
self.project
|
||||
.update(cx, |project, cx| project.save_buffer(buffer, cx))
|
||||
}
|
||||
};
|
||||
if let Some(telemetry) = telemetry {
|
||||
telemetry_report_rejected_edits(&telemetry, metrics);
|
||||
}
|
||||
task
|
||||
}
|
||||
|
||||
pub fn keep_all_edits(
|
||||
&mut self,
|
||||
telemetry: Option<ActionLogTelemetry>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.tracked_buffers.retain(|buffer, tracked_buffer| {
|
||||
let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
|
||||
metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
|
||||
if let Some(telemetry) = telemetry.as_ref() {
|
||||
telemetry_report_accepted_edits(telemetry, metrics);
|
||||
}
|
||||
match tracked_buffer.status {
|
||||
pub fn keep_all_edits(&mut self, cx: &mut Context<Self>) {
|
||||
self.tracked_buffers
|
||||
.retain(|_buffer, tracked_buffer| match tracked_buffer.status {
|
||||
TrackedBufferStatus::Deleted => false,
|
||||
_ => {
|
||||
if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
|
||||
@@ -720,24 +774,13 @@ impl ActionLog {
|
||||
tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
|
||||
true
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn reject_all_edits(
|
||||
&mut self,
|
||||
telemetry: Option<ActionLogTelemetry>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<()> {
|
||||
pub fn reject_all_edits(&mut self, cx: &mut Context<Self>) -> Task<()> {
|
||||
let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
|
||||
let reject = self.reject_edits_in_ranges(
|
||||
buffer,
|
||||
vec![Anchor::MIN..Anchor::MAX],
|
||||
telemetry.clone(),
|
||||
cx,
|
||||
);
|
||||
let reject = self.reject_edits_in_ranges(buffer, vec![Anchor::MIN..Anchor::MAX], cx);
|
||||
|
||||
async move {
|
||||
reject.await.log_err();
|
||||
@@ -745,7 +788,8 @@ impl ActionLog {
|
||||
});
|
||||
|
||||
let task = futures::future::join_all(futures);
|
||||
cx.background_spawn(async move {
|
||||
|
||||
cx.spawn(async move |_, _| {
|
||||
task.await;
|
||||
})
|
||||
}
|
||||
@@ -775,61 +819,6 @@ impl ActionLog {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ActionLogTelemetry {
|
||||
pub agent_telemetry_id: &'static str,
|
||||
pub session_id: Arc<str>,
|
||||
}
|
||||
|
||||
struct ActionLogMetrics {
|
||||
lines_removed: u32,
|
||||
lines_added: u32,
|
||||
language: Option<SharedString>,
|
||||
}
|
||||
|
||||
impl ActionLogMetrics {
|
||||
fn for_buffer(buffer: &Buffer) -> Self {
|
||||
Self {
|
||||
language: buffer.language().map(|l| l.name().0),
|
||||
lines_removed: 0,
|
||||
lines_added: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_edits(&mut self, edits: &[Edit<u32>]) {
|
||||
for edit in edits {
|
||||
self.add_edit(edit);
|
||||
}
|
||||
}
|
||||
|
||||
fn add_edit(&mut self, edit: &Edit<u32>) {
|
||||
self.lines_added += edit.new_len();
|
||||
self.lines_removed += edit.old_len();
|
||||
}
|
||||
}
|
||||
|
||||
fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
|
||||
telemetry::event!(
|
||||
"Agent Edits Accepted",
|
||||
agent = telemetry.agent_telemetry_id,
|
||||
session = telemetry.session_id,
|
||||
language = metrics.language,
|
||||
lines_added = metrics.lines_added,
|
||||
lines_removed = metrics.lines_removed
|
||||
);
|
||||
}
|
||||
|
||||
fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
|
||||
telemetry::event!(
|
||||
"Agent Edits Rejected",
|
||||
agent = telemetry.agent_telemetry_id,
|
||||
session = telemetry.session_id,
|
||||
language = metrics.language,
|
||||
lines_added = metrics.lines_added,
|
||||
lines_removed = metrics.lines_removed
|
||||
);
|
||||
}
|
||||
|
||||
fn apply_non_conflicting_edits(
|
||||
patch: &Patch<u32>,
|
||||
edits: Vec<Edit<u32>>,
|
||||
@@ -960,12 +949,14 @@ enum TrackedBufferStatus {
|
||||
struct TrackedBuffer {
|
||||
buffer: Entity<Buffer>,
|
||||
diff_base: Rope,
|
||||
last_seen_base: Rope,
|
||||
unreviewed_edits: Patch<u32>,
|
||||
status: TrackedBufferStatus,
|
||||
version: clock::Global,
|
||||
diff: Entity<BufferDiff>,
|
||||
snapshot: text::BufferSnapshot,
|
||||
diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
|
||||
may_have_unnotified_user_edits: bool,
|
||||
_open_lsp_handle: OpenLspBufferHandle,
|
||||
_maintain_diff: Task<()>,
|
||||
_subscription: Subscription,
|
||||
@@ -996,6 +987,7 @@ mod tests {
|
||||
use super::*;
|
||||
use buffer_diff::DiffHunkStatusKind;
|
||||
use gpui::TestAppContext;
|
||||
use indoc::indoc;
|
||||
use language::Point;
|
||||
use project::{FakeFs, Fs, Project, RemoveOptions};
|
||||
use rand::prelude::*;
|
||||
@@ -1013,6 +1005,8 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1072,7 +1066,7 @@ mod tests {
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
@@ -1088,7 +1082,7 @@ mod tests {
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
@@ -1173,7 +1167,7 @@ mod tests {
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
@@ -1270,7 +1264,111 @@ mod tests {
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_user_edits_notifications(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/dir"),
|
||||
json!({"file": indoc! {"
|
||||
abc
|
||||
def
|
||||
ghi
|
||||
jkl
|
||||
mno"}}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let file_path = project
|
||||
.read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
|
||||
.unwrap();
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_buffer(file_path, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Agent edits
|
||||
cx.update(|cx| {
|
||||
action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer
|
||||
.edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
|
||||
.unwrap()
|
||||
});
|
||||
action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
indoc! {"
|
||||
abc
|
||||
deF
|
||||
GHI
|
||||
jkl
|
||||
mno"}
|
||||
);
|
||||
assert_eq!(
|
||||
unreviewed_hunks(&action_log, cx),
|
||||
vec![(
|
||||
buffer.clone(),
|
||||
vec![HunkStatus {
|
||||
range: Point::new(1, 0)..Point::new(3, 0),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "def\nghi\n".into(),
|
||||
}],
|
||||
)]
|
||||
);
|
||||
|
||||
// User edits
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(
|
||||
[
|
||||
(Point::new(0, 2)..Point::new(0, 2), "X"),
|
||||
(Point::new(3, 0)..Point::new(3, 0), "Y"),
|
||||
],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
indoc! {"
|
||||
abXc
|
||||
deF
|
||||
GHI
|
||||
Yjkl
|
||||
mno"}
|
||||
);
|
||||
|
||||
// User edits should be stored separately from agent's
|
||||
let user_edits = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
|
||||
assert_eq!(
|
||||
user_edits.expect("should have some user edits"),
|
||||
indoc! {"
|
||||
--- a/dir/file
|
||||
+++ b/dir/file
|
||||
@@ -1,5 +1,5 @@
|
||||
-abc
|
||||
+abXc
|
||||
def
|
||||
ghi
|
||||
-jkl
|
||||
+Yjkl
|
||||
mno
|
||||
"}
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
@@ -1329,7 +1427,7 @@ mod tests {
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), 0..5, cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
@@ -1381,7 +1479,7 @@ mod tests {
|
||||
|
||||
action_log
|
||||
.update(cx, |log, cx| {
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -1461,7 +1559,7 @@ mod tests {
|
||||
|
||||
action_log
|
||||
.update(cx, |log, cx| {
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -1644,7 +1742,6 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(4, 0)..Point::new(4, 0)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -1679,7 +1776,6 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(0, 0)..Point::new(1, 0)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -1707,7 +1803,6 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(4, 0)..Point::new(4, 0)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -1782,7 +1877,7 @@ mod tests {
|
||||
let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
|
||||
..buffer.read(cx).anchor_before(Point::new(5, 3));
|
||||
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx)
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], cx)
|
||||
.detach();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
@@ -1843,7 +1938,6 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(0, 0)..Point::new(0, 0)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -1899,7 +1993,6 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(0, 0)..Point::new(0, 11)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -1962,7 +2055,6 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(0, 0)..Point::new(100, 0)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -2010,7 +2102,7 @@ mod tests {
|
||||
|
||||
// User accepts the single hunk
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Anchor::MIN..Anchor::MAX, None, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), Anchor::MIN..Anchor::MAX, cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
@@ -2031,7 +2123,7 @@ mod tests {
|
||||
// User rejects the hunk
|
||||
action_log
|
||||
.update(cx, |log, cx| {
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![Anchor::MIN..Anchor::MAX], None, cx)
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![Anchor::MIN..Anchor::MAX], cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -2075,7 +2167,7 @@ mod tests {
|
||||
cx.run_until_parked();
|
||||
|
||||
// User clicks "Accept All"
|
||||
action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
|
||||
action_log.update(cx, |log, cx| log.keep_all_edits(cx));
|
||||
cx.run_until_parked();
|
||||
assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
|
||||
@@ -2094,7 +2186,7 @@ mod tests {
|
||||
|
||||
// User clicks "Reject All"
|
||||
action_log
|
||||
.update(cx, |log, cx| log.reject_all_edits(None, cx))
|
||||
.update(cx, |log, cx| log.reject_all_edits(cx))
|
||||
.await;
|
||||
cx.run_until_parked();
|
||||
assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
|
||||
@@ -2134,7 +2226,7 @@ mod tests {
|
||||
action_log.update(cx, |log, cx| {
|
||||
let range = buffer.read(cx).random_byte_range(0, &mut rng);
|
||||
log::info!("keeping edits in range {:?}", range);
|
||||
log.keep_edits_in_range(buffer.clone(), range, None, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), range, cx)
|
||||
});
|
||||
}
|
||||
25..50 => {
|
||||
@@ -2142,7 +2234,7 @@ mod tests {
|
||||
.update(cx, |log, cx| {
|
||||
let range = buffer.read(cx).random_byte_range(0, &mut rng);
|
||||
log::info!("rejecting edits in range {:?}", range);
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx)
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![range], cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -2396,4 +2488,61 @@ mod tests {
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_format_patch(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/dir"),
|
||||
json!({"test.txt": "line 1\nline 2\nline 3\n"}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
|
||||
let file_path = project
|
||||
.read_with(cx, |project, cx| {
|
||||
project.find_project_path("dir/test.txt", cx)
|
||||
})
|
||||
.unwrap();
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_buffer(file_path, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.update(|cx| {
|
||||
// Track the buffer and mark it as read first
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.buffer_read(buffer.clone(), cx);
|
||||
});
|
||||
|
||||
// Make some edits to create a patch
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer
|
||||
.edit([(Point::new(1, 0)..Point::new(1, 6), "CHANGED")], None, cx)
|
||||
.unwrap(); // Replace "line2" with "CHANGED"
|
||||
});
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
// Get the patch
|
||||
let patch = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
|
||||
|
||||
// Verify the patch format contains expected unified diff elements
|
||||
assert_eq!(
|
||||
patch.unwrap(),
|
||||
indoc! {"
|
||||
--- a/dir/test.txt
|
||||
+++ b/dir/test.txt
|
||||
@@ -1,3 +1,3 @@
|
||||
line 1
|
||||
-line 2
|
||||
+CHANGED
|
||||
line 3
|
||||
"}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,6 +63,7 @@ streaming_diff.workspace = true
|
||||
strsim.workspace = true
|
||||
task.workspace = true
|
||||
telemetry.workspace = true
|
||||
terminal.workspace = true
|
||||
text.workspace = true
|
||||
thiserror.workspace = true
|
||||
ui.workspace = true
|
||||
|
||||
@@ -6,6 +6,7 @@ mod native_agent_server;
|
||||
pub mod outline;
|
||||
mod templates;
|
||||
mod thread;
|
||||
mod tool_schema;
|
||||
mod tools;
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -217,7 +218,7 @@ impl LanguageModels {
|
||||
}
|
||||
_ => {
|
||||
log::error!(
|
||||
"Failed to authenticate provider: {}: {err:#}",
|
||||
"Failed to authenticate provider: {}: {err}",
|
||||
provider_name.0
|
||||
);
|
||||
}
|
||||
@@ -966,10 +967,6 @@ impl acp_thread::AgentModelSelector for NativeAgentModelSelector {
|
||||
}
|
||||
|
||||
impl acp_thread::AgentConnection for NativeAgentConnection {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
"zed"
|
||||
}
|
||||
|
||||
fn new_thread(
|
||||
self: Rc<Self>,
|
||||
project: Entity<Project>,
|
||||
@@ -1110,6 +1107,10 @@ impl acp_thread::AgentConnection for NativeAgentConnection {
|
||||
}
|
||||
|
||||
impl acp_thread::AgentTelemetry for NativeAgentConnection {
|
||||
fn agent_name(&self) -> String {
|
||||
"Zed".into()
|
||||
}
|
||||
|
||||
fn thread_data(
|
||||
&self,
|
||||
session_id: &acp::SessionId,
|
||||
@@ -1626,7 +1627,9 @@ mod internal_tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
|
||||
Project::init_settings(cx);
|
||||
agent_settings::init(cx);
|
||||
language::init(cx);
|
||||
LanguageModelRegistry::test(cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1394,7 +1394,7 @@ mod tests {
|
||||
|
||||
async fn init_test(cx: &mut TestAppContext) -> EditAgent {
|
||||
cx.update(settings::init);
|
||||
|
||||
cx.update(Project::init_settings);
|
||||
let project = Project::test(FakeFs::new(cx.executor()), [], cx).await;
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
|
||||
@@ -1468,9 +1468,14 @@ impl EditAgentTest {
|
||||
gpui_tokio::init(cx);
|
||||
let http_client = Arc::new(ReqwestClient::user_agent("agent tests").unwrap());
|
||||
cx.set_http_client(http_client);
|
||||
|
||||
client::init_settings(cx);
|
||||
let client = Client::production(cx);
|
||||
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
||||
|
||||
settings::init(cx);
|
||||
Project::init_settings(cx);
|
||||
language::init(cx);
|
||||
language_model::init(client.clone(), cx);
|
||||
language_models::init(user_store, client.clone(), cx);
|
||||
});
|
||||
|
||||
@@ -88,6 +88,8 @@ mod tests {
|
||||
async |fs, project, cx| {
|
||||
let auth = cx.update(|cx| {
|
||||
prompt_store::init(cx);
|
||||
terminal::init(cx);
|
||||
|
||||
let registry = language_model::LanguageModelRegistry::read_global(cx);
|
||||
let auth = registry
|
||||
.provider(&language_model::ANTHROPIC_PROVIDER_ID)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use gpui::{AsyncApp, Entity};
|
||||
use language::{Buffer, OutlineItem};
|
||||
use language::{Buffer, OutlineItem, ParseStatus};
|
||||
use regex::Regex;
|
||||
use std::fmt::Write;
|
||||
use text::Point;
|
||||
@@ -30,9 +30,10 @@ pub async fn get_buffer_content_or_outline(
|
||||
if file_size > AUTO_OUTLINE_SIZE {
|
||||
// For large files, use outline instead of full content
|
||||
// Wait until the buffer has been fully parsed, so we can read its outline
|
||||
buffer
|
||||
.read_with(cx, |buffer, _| buffer.parsing_idle())?
|
||||
.await;
|
||||
let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?;
|
||||
while *parse_status.borrow() != ParseStatus::Idle {
|
||||
parse_status.changed().await?;
|
||||
}
|
||||
|
||||
let outline_items = buffer.read_with(cx, |buffer, _| {
|
||||
let snapshot = buffer.snapshot();
|
||||
|
||||
@@ -1851,6 +1851,7 @@ async fn test_agent_connection(cx: &mut TestAppContext) {
|
||||
// Initialize language model system with test provider
|
||||
cx.update(|cx| {
|
||||
gpui_tokio::init(cx);
|
||||
client::init_settings(cx);
|
||||
|
||||
let http_client = FakeHttpClient::with_404_response();
|
||||
let clock = Arc::new(clock::FakeSystemClock::new());
|
||||
@@ -1858,7 +1859,9 @@ async fn test_agent_connection(cx: &mut TestAppContext) {
|
||||
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
||||
language_model::init(client.clone(), cx);
|
||||
language_models::init(user_store, client.clone(), cx);
|
||||
Project::init_settings(cx);
|
||||
LanguageModelRegistry::test(cx);
|
||||
agent_settings::init(cx);
|
||||
});
|
||||
cx.executor().forbid_parking();
|
||||
|
||||
@@ -2392,6 +2395,8 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest {
|
||||
|
||||
cx.update(|cx| {
|
||||
settings::init(cx);
|
||||
Project::init_settings(cx);
|
||||
agent_settings::init(cx);
|
||||
|
||||
match model {
|
||||
TestModel::Fake => {}
|
||||
@@ -2399,6 +2404,7 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest {
|
||||
gpui_tokio::init(cx);
|
||||
let http_client = ReqwestClient::user_agent("agent tests").unwrap();
|
||||
cx.set_http_client(Arc::new(http_client));
|
||||
client::init_settings(cx);
|
||||
let client = Client::production(cx);
|
||||
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
||||
language_model::init(client.clone(), cx);
|
||||
|
||||
@@ -2139,7 +2139,7 @@ where
|
||||
|
||||
/// Returns the JSON schema that describes the tool's input.
|
||||
fn input_schema(format: LanguageModelToolSchemaFormat) -> Schema {
|
||||
language_model::tool_schema::root_schema_for::<Self::Input>(format)
|
||||
crate::tool_schema::root_schema_for::<Self::Input>(format)
|
||||
}
|
||||
|
||||
/// Some tools rely on a provider for the underlying billing or other reasons.
|
||||
@@ -2226,7 +2226,7 @@ where
|
||||
|
||||
fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value> {
|
||||
let mut json = serde_json::to_value(T::input_schema(format))?;
|
||||
language_model::tool_schema::adapt_schema_to_format(&mut json, format)?;
|
||||
crate::tool_schema::adapt_schema_to_format(&mut json, format)?;
|
||||
Ok(json)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use anyhow::Result;
|
||||
use language_model::LanguageModelToolSchemaFormat;
|
||||
use schemars::{
|
||||
JsonSchema, Schema,
|
||||
generate::SchemaSettings,
|
||||
@@ -6,16 +7,7 @@ use schemars::{
|
||||
};
|
||||
use serde_json::Value;
|
||||
|
||||
/// Indicates the format used to define the input schema for a language model tool.
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash)]
|
||||
pub enum LanguageModelToolSchemaFormat {
|
||||
/// A JSON schema, see https://json-schema.org
|
||||
JsonSchema,
|
||||
/// A subset of an OpenAPI 3.0 schema object supported by Google AI, see https://ai.google.dev/api/caching#Schema
|
||||
JsonSchemaSubset,
|
||||
}
|
||||
|
||||
pub fn root_schema_for<T: JsonSchema>(format: LanguageModelToolSchemaFormat) -> Schema {
|
||||
pub(crate) fn root_schema_for<T: JsonSchema>(format: LanguageModelToolSchemaFormat) -> Schema {
|
||||
let mut generator = match format {
|
||||
LanguageModelToolSchemaFormat::JsonSchema => SchemaSettings::draft07().into_generator(),
|
||||
LanguageModelToolSchemaFormat::JsonSchemaSubset => SchemaSettings::openapi3()
|
||||
@@ -165,7 +165,7 @@ impl AnyAgentTool for ContextServerTool {
|
||||
format: language_model::LanguageModelToolSchemaFormat,
|
||||
) -> Result<serde_json::Value> {
|
||||
let mut schema = self.tool.input_schema.clone();
|
||||
language_model::tool_schema::adapt_schema_to_format(&mut schema, format)?;
|
||||
crate::tool_schema::adapt_schema_to_format(&mut schema, format)?;
|
||||
Ok(match schema {
|
||||
serde_json::Value::Null => {
|
||||
serde_json::json!({ "type": "object", "properties": [] })
|
||||
|
||||
@@ -562,6 +562,7 @@ fn resolve_path(
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{ContextServerRegistry, Templates};
|
||||
use client::TelemetrySettings;
|
||||
use fs::Fs;
|
||||
use gpui::{TestAppContext, UpdateGlobal};
|
||||
use language_model::fake_provider::FakeLanguageModel;
|
||||
@@ -1752,6 +1753,10 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
TelemetrySettings::register(cx);
|
||||
agent_settings::AgentSettings::register(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -246,6 +246,8 @@ mod test {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -778,6 +778,8 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -223,6 +223,8 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -163,6 +163,8 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -509,6 +509,8 @@ mod test {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -21,6 +21,7 @@ acp_tools.workspace = true
|
||||
acp_thread.workspace = true
|
||||
action_log.workspace = true
|
||||
agent-client-protocol.workspace = true
|
||||
agent_settings.workspace = true
|
||||
anyhow.workspace = true
|
||||
async-trait.workspace = true
|
||||
client.workspace = true
|
||||
@@ -32,6 +33,7 @@ gpui.workspace = true
|
||||
gpui_tokio = { workspace = true, optional = true }
|
||||
http_client.workspace = true
|
||||
indoc.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
language_models.workspace = true
|
||||
log.workspace = true
|
||||
|
||||
@@ -29,7 +29,6 @@ pub struct UnsupportedVersion;
|
||||
|
||||
pub struct AcpConnection {
|
||||
server_name: SharedString,
|
||||
telemetry_id: &'static str,
|
||||
connection: Rc<acp::ClientSideConnection>,
|
||||
sessions: Rc<RefCell<HashMap<acp::SessionId, AcpSession>>>,
|
||||
auth_methods: Vec<acp::AuthMethod>,
|
||||
@@ -53,7 +52,6 @@ pub struct AcpSession {
|
||||
|
||||
pub async fn connect(
|
||||
server_name: SharedString,
|
||||
telemetry_id: &'static str,
|
||||
command: AgentServerCommand,
|
||||
root_dir: &Path,
|
||||
default_mode: Option<acp::SessionModeId>,
|
||||
@@ -62,7 +60,6 @@ pub async fn connect(
|
||||
) -> Result<Rc<dyn AgentConnection>> {
|
||||
let conn = AcpConnection::stdio(
|
||||
server_name,
|
||||
telemetry_id,
|
||||
command.clone(),
|
||||
root_dir,
|
||||
default_mode,
|
||||
@@ -78,7 +75,6 @@ const MINIMUM_SUPPORTED_VERSION: acp::ProtocolVersion = acp::V1;
|
||||
impl AcpConnection {
|
||||
pub async fn stdio(
|
||||
server_name: SharedString,
|
||||
telemetry_id: &'static str,
|
||||
command: AgentServerCommand,
|
||||
root_dir: &Path,
|
||||
default_mode: Option<acp::SessionModeId>,
|
||||
@@ -203,7 +199,6 @@ impl AcpConnection {
|
||||
root_dir: root_dir.to_owned(),
|
||||
connection,
|
||||
server_name,
|
||||
telemetry_id,
|
||||
sessions,
|
||||
agent_capabilities: response.agent_capabilities,
|
||||
default_mode,
|
||||
@@ -231,10 +226,6 @@ impl Drop for AcpConnection {
|
||||
}
|
||||
|
||||
impl AgentConnection for AcpConnection {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
self.telemetry_id
|
||||
}
|
||||
|
||||
fn new_thread(
|
||||
self: Rc<Self>,
|
||||
project: Entity<Project>,
|
||||
|
||||
@@ -62,7 +62,6 @@ impl AgentServer for ClaudeCode {
|
||||
cx: &mut App,
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let telemetry_id = self.telemetry_id();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let store = delegate.store.downgrade();
|
||||
@@ -86,7 +85,6 @@ impl AgentServer for ClaudeCode {
|
||||
.await?;
|
||||
let connection = crate::acp::connect(
|
||||
name,
|
||||
telemetry_id,
|
||||
command,
|
||||
root_dir.as_ref(),
|
||||
default_mode,
|
||||
|
||||
@@ -63,7 +63,6 @@ impl AgentServer for Codex {
|
||||
cx: &mut App,
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let telemetry_id = self.telemetry_id();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let store = delegate.store.downgrade();
|
||||
@@ -88,7 +87,6 @@ impl AgentServer for Codex {
|
||||
|
||||
let connection = crate::acp::connect(
|
||||
name,
|
||||
telemetry_id,
|
||||
command,
|
||||
root_dir.as_ref(),
|
||||
default_mode,
|
||||
|
||||
@@ -67,7 +67,6 @@ impl crate::AgentServer for CustomAgentServer {
|
||||
cx: &mut App,
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let telemetry_id = self.telemetry_id();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let default_mode = self.default_mode(cx);
|
||||
@@ -93,7 +92,6 @@ impl crate::AgentServer for CustomAgentServer {
|
||||
.await?;
|
||||
let connection = crate::acp::connect(
|
||||
name,
|
||||
telemetry_id,
|
||||
command,
|
||||
root_dir.as_ref(),
|
||||
default_mode,
|
||||
|
||||
@@ -6,9 +6,7 @@ use gpui::{AppContext, Entity, TestAppContext};
|
||||
use indoc::indoc;
|
||||
#[cfg(test)]
|
||||
use project::agent_server_store::BuiltinAgentServerSettings;
|
||||
use project::{FakeFs, Project};
|
||||
#[cfg(test)]
|
||||
use settings::Settings;
|
||||
use project::{FakeFs, Project, agent_server_store::AllAgentServersSettings};
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
@@ -454,22 +452,29 @@ pub use common_e2e_tests;
|
||||
// Helpers
|
||||
|
||||
pub async fn init_test(cx: &mut TestAppContext) -> Arc<FakeFs> {
|
||||
use settings::Settings;
|
||||
|
||||
env_logger::try_init().ok();
|
||||
|
||||
cx.update(|cx| {
|
||||
let settings_store = settings::SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
Project::init_settings(cx);
|
||||
language::init(cx);
|
||||
gpui_tokio::init(cx);
|
||||
let http_client = reqwest_client::ReqwestClient::user_agent("agent tests").unwrap();
|
||||
cx.set_http_client(Arc::new(http_client));
|
||||
client::init_settings(cx);
|
||||
let client = client::Client::production(cx);
|
||||
let user_store = cx.new(|cx| client::UserStore::new(client.clone(), cx));
|
||||
language_model::init(client.clone(), cx);
|
||||
language_models::init(user_store, client, cx);
|
||||
agent_settings::init(cx);
|
||||
AllAgentServersSettings::register(cx);
|
||||
|
||||
#[cfg(test)]
|
||||
project::agent_server_store::AllAgentServersSettings::override_global(
|
||||
project::agent_server_store::AllAgentServersSettings {
|
||||
AllAgentServersSettings::override_global(
|
||||
AllAgentServersSettings {
|
||||
claude: Some(BuiltinAgentServerSettings {
|
||||
path: Some("claude-code-acp".into()),
|
||||
args: None,
|
||||
|
||||
@@ -31,7 +31,6 @@ impl AgentServer for Gemini {
|
||||
cx: &mut App,
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let telemetry_id = self.telemetry_id();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let store = delegate.store.downgrade();
|
||||
@@ -65,7 +64,6 @@ impl AgentServer for Gemini {
|
||||
|
||||
let connection = crate::acp::connect(
|
||||
name,
|
||||
telemetry_id,
|
||||
command,
|
||||
root_dir.as_ref(),
|
||||
default_mode,
|
||||
|
||||
@@ -10,7 +10,7 @@ use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{
|
||||
DefaultAgentView, DockPosition, LanguageModelParameters, LanguageModelSelection,
|
||||
NotifyWhenAgentWaiting, RegisterSetting, Settings,
|
||||
NotifyWhenAgentWaiting, Settings,
|
||||
};
|
||||
|
||||
pub use crate::agent_profile::*;
|
||||
@@ -19,7 +19,11 @@ pub const SUMMARIZE_THREAD_PROMPT: &str = include_str!("prompts/summarize_thread
|
||||
pub const SUMMARIZE_THREAD_DETAILED_PROMPT: &str =
|
||||
include_str!("prompts/summarize_thread_detailed_prompt.txt");
|
||||
|
||||
#[derive(Clone, Debug, RegisterSetting)]
|
||||
pub fn init(cx: &mut App) {
|
||||
AgentSettings::register(cx);
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AgentSettings {
|
||||
pub enabled: bool,
|
||||
pub button: bool,
|
||||
|
||||
@@ -4,6 +4,7 @@ mod message_editor;
|
||||
mod mode_selector;
|
||||
mod model_selector;
|
||||
mod model_selector_popover;
|
||||
mod thread_editor;
|
||||
mod thread_history;
|
||||
mod thread_view;
|
||||
|
||||
|
||||
@@ -646,14 +646,16 @@ impl ContextPickerCompletionProvider {
|
||||
cx: &mut App,
|
||||
) -> Vec<ContextPickerEntry> {
|
||||
let embedded_context = self.prompt_capabilities.borrow().embedded_context;
|
||||
let mut entries = vec![
|
||||
ContextPickerEntry::Mode(ContextPickerMode::File),
|
||||
ContextPickerEntry::Mode(ContextPickerMode::Symbol),
|
||||
];
|
||||
|
||||
if embedded_context {
|
||||
entries.push(ContextPickerEntry::Mode(ContextPickerMode::Thread));
|
||||
}
|
||||
let mut entries = if embedded_context {
|
||||
vec![
|
||||
ContextPickerEntry::Mode(ContextPickerMode::File),
|
||||
ContextPickerEntry::Mode(ContextPickerMode::Symbol),
|
||||
ContextPickerEntry::Mode(ContextPickerMode::Thread),
|
||||
]
|
||||
} else {
|
||||
// File is always available, but we don't need a mode entry
|
||||
vec![]
|
||||
};
|
||||
|
||||
let has_selection = workspace
|
||||
.read(cx)
|
||||
|
||||
@@ -401,9 +401,10 @@ mod tests {
|
||||
use acp_thread::{AgentConnection, StubAgentConnection};
|
||||
use agent::HistoryStore;
|
||||
use agent_client_protocol as acp;
|
||||
use agent_settings::AgentSettings;
|
||||
use assistant_text_thread::TextThreadStore;
|
||||
use buffer_diff::{DiffHunkStatus, DiffHunkStatusKind};
|
||||
use editor::RowInfo;
|
||||
use editor::{EditorSettings, RowInfo};
|
||||
use fs::FakeFs;
|
||||
use gpui::{AppContext as _, SemanticVersion, TestAppContext};
|
||||
|
||||
@@ -412,7 +413,7 @@ mod tests {
|
||||
use pretty_assertions::assert_matches;
|
||||
use project::Project;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use settings::{Settings as _, SettingsStore};
|
||||
use util::path;
|
||||
use workspace::Workspace;
|
||||
|
||||
@@ -538,8 +539,13 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
AgentSettings::register(cx);
|
||||
workspace::init_settings(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
EditorSettings::register(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -356,7 +356,7 @@ impl MessageEditor {
|
||||
|
||||
let task = match mention_uri.clone() {
|
||||
MentionUri::Fetch { url } => self.confirm_mention_for_fetch(url, cx),
|
||||
MentionUri::Directory { .. } => Task::ready(Ok(Mention::Link)),
|
||||
MentionUri::Directory { .. } => Task::ready(Ok(Mention::UriOnly)),
|
||||
MentionUri::Thread { id, .. } => self.confirm_mention_for_thread(id, cx),
|
||||
MentionUri::TextThread { path, .. } => self.confirm_mention_for_text_thread(path, cx),
|
||||
MentionUri::File { abs_path } => self.confirm_mention_for_file(abs_path, cx),
|
||||
@@ -373,6 +373,7 @@ impl MessageEditor {
|
||||
)))
|
||||
}
|
||||
MentionUri::Selection { .. } => {
|
||||
// Handled elsewhere
|
||||
debug_panic!("unexpected selection URI");
|
||||
Task::ready(Err(anyhow!("unexpected selection URI")))
|
||||
}
|
||||
@@ -703,21 +704,20 @@ impl MessageEditor {
|
||||
return Task::ready(Err(err));
|
||||
}
|
||||
|
||||
let contents = self
|
||||
.mention_set
|
||||
.contents(full_mention_content, self.project.clone(), cx);
|
||||
let contents = self.mention_set.contents(
|
||||
&self.prompt_capabilities.borrow(),
|
||||
full_mention_content,
|
||||
self.project.clone(),
|
||||
cx,
|
||||
);
|
||||
let editor = self.editor.clone();
|
||||
let supports_embedded_context = self.prompt_capabilities.borrow().embedded_context;
|
||||
|
||||
cx.spawn(async move |_, cx| {
|
||||
let contents = contents.await?;
|
||||
let mut all_tracked_buffers = Vec::new();
|
||||
|
||||
let result = editor.update(cx, |editor, cx| {
|
||||
let (mut ix, _) = text
|
||||
.char_indices()
|
||||
.find(|(_, c)| !c.is_whitespace())
|
||||
.unwrap_or((0, '\0'));
|
||||
let mut ix = text.chars().position(|c| !c.is_whitespace()).unwrap_or(0);
|
||||
let mut chunks: Vec<acp::ContentBlock> = Vec::new();
|
||||
let text = editor.text(cx);
|
||||
editor.display_map.update(cx, |map, cx| {
|
||||
@@ -738,32 +738,18 @@ impl MessageEditor {
|
||||
tracked_buffers,
|
||||
} => {
|
||||
all_tracked_buffers.extend(tracked_buffers.iter().cloned());
|
||||
if supports_embedded_context {
|
||||
acp::ContentBlock::Resource(acp::EmbeddedResource {
|
||||
annotations: None,
|
||||
resource:
|
||||
acp::EmbeddedResourceResource::TextResourceContents(
|
||||
acp::TextResourceContents {
|
||||
mime_type: None,
|
||||
text: content.clone(),
|
||||
uri: uri.to_uri().to_string(),
|
||||
meta: None,
|
||||
},
|
||||
),
|
||||
meta: None,
|
||||
})
|
||||
} else {
|
||||
acp::ContentBlock::ResourceLink(acp::ResourceLink {
|
||||
name: uri.name(),
|
||||
uri: uri.to_uri().to_string(),
|
||||
annotations: None,
|
||||
description: None,
|
||||
mime_type: None,
|
||||
size: None,
|
||||
title: None,
|
||||
meta: None,
|
||||
})
|
||||
}
|
||||
acp::ContentBlock::Resource(acp::EmbeddedResource {
|
||||
annotations: None,
|
||||
resource: acp::EmbeddedResourceResource::TextResourceContents(
|
||||
acp::TextResourceContents {
|
||||
mime_type: None,
|
||||
text: content.clone(),
|
||||
uri: uri.to_uri().to_string(),
|
||||
meta: None,
|
||||
},
|
||||
),
|
||||
meta: None,
|
||||
})
|
||||
}
|
||||
Mention::Image(mention_image) => {
|
||||
let uri = match uri {
|
||||
@@ -785,16 +771,18 @@ impl MessageEditor {
|
||||
meta: None,
|
||||
})
|
||||
}
|
||||
Mention::Link => acp::ContentBlock::ResourceLink(acp::ResourceLink {
|
||||
name: uri.name(),
|
||||
uri: uri.to_uri().to_string(),
|
||||
annotations: None,
|
||||
description: None,
|
||||
mime_type: None,
|
||||
size: None,
|
||||
title: None,
|
||||
meta: None,
|
||||
}),
|
||||
Mention::UriOnly => {
|
||||
acp::ContentBlock::ResourceLink(acp::ResourceLink {
|
||||
name: uri.name(),
|
||||
uri: uri.to_uri().to_string(),
|
||||
annotations: None,
|
||||
description: None,
|
||||
mime_type: None,
|
||||
size: None,
|
||||
title: None,
|
||||
meta: None,
|
||||
})
|
||||
}
|
||||
};
|
||||
chunks.push(chunk);
|
||||
ix = crease_range.end;
|
||||
@@ -1123,7 +1111,7 @@ impl MessageEditor {
|
||||
let start = text.len();
|
||||
write!(&mut text, "{}", mention_uri.as_link()).ok();
|
||||
let end = text.len();
|
||||
mentions.push((start..end, mention_uri, Mention::Link));
|
||||
mentions.push((start..end, mention_uri, Mention::UriOnly));
|
||||
}
|
||||
}
|
||||
acp::ContentBlock::Image(acp::ImageContent {
|
||||
@@ -1529,7 +1517,7 @@ pub enum Mention {
|
||||
tracked_buffers: Vec<Entity<Buffer>>,
|
||||
},
|
||||
Image(MentionImage),
|
||||
Link,
|
||||
UriOnly,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
@@ -1546,10 +1534,21 @@ pub struct MentionSet {
|
||||
impl MentionSet {
|
||||
fn contents(
|
||||
&self,
|
||||
prompt_capabilities: &acp::PromptCapabilities,
|
||||
full_mention_content: bool,
|
||||
project: Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<HashMap<CreaseId, (MentionUri, Mention)>>> {
|
||||
if !prompt_capabilities.embedded_context {
|
||||
let mentions = self
|
||||
.mentions
|
||||
.iter()
|
||||
.map(|(crease_id, (uri, _))| (*crease_id, (uri.clone(), Mention::UriOnly)))
|
||||
.collect();
|
||||
|
||||
return Task::ready(Ok(mentions));
|
||||
}
|
||||
|
||||
let mentions = self.mentions.clone();
|
||||
cx.spawn(async move |cx| {
|
||||
let mut contents = HashMap::default();
|
||||
@@ -1899,8 +1898,10 @@ mod tests {
|
||||
let app_state = cx.update(AppState::test);
|
||||
|
||||
cx.update(|cx| {
|
||||
language::init(cx);
|
||||
editor::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
@@ -2073,8 +2074,10 @@ mod tests {
|
||||
let app_state = cx.update(AppState::test);
|
||||
|
||||
cx.update(|cx| {
|
||||
language::init(cx);
|
||||
editor::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
|
||||
app_state
|
||||
@@ -2199,8 +2202,6 @@ mod tests {
|
||||
format!("seven.txt b{slash}"),
|
||||
format!("six.txt b{slash}"),
|
||||
format!("five.txt b{slash}"),
|
||||
"Files & Directories".into(),
|
||||
"Symbols".into()
|
||||
]
|
||||
);
|
||||
editor.set_text("", window, cx);
|
||||
@@ -2285,11 +2286,21 @@ mod tests {
|
||||
assert_eq!(fold_ranges(editor, cx).len(), 1);
|
||||
});
|
||||
|
||||
let all_prompt_capabilities = acp::PromptCapabilities {
|
||||
image: true,
|
||||
audio: true,
|
||||
embedded_context: true,
|
||||
meta: None,
|
||||
};
|
||||
|
||||
let contents = message_editor
|
||||
.update(&mut cx, |message_editor, cx| {
|
||||
message_editor
|
||||
.mention_set()
|
||||
.contents(false, project.clone(), cx)
|
||||
message_editor.mention_set().contents(
|
||||
&all_prompt_capabilities,
|
||||
false,
|
||||
project.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
@@ -2307,6 +2318,30 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
let contents = message_editor
|
||||
.update(&mut cx, |message_editor, cx| {
|
||||
message_editor.mention_set().contents(
|
||||
&acp::PromptCapabilities::default(),
|
||||
false,
|
||||
project.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.into_values()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
{
|
||||
let [(uri, Mention::UriOnly)] = contents.as_slice() else {
|
||||
panic!("Unexpected mentions");
|
||||
};
|
||||
pretty_assertions::assert_eq!(
|
||||
uri,
|
||||
&MentionUri::parse(&url_one, PathStyle::local()).unwrap()
|
||||
);
|
||||
}
|
||||
|
||||
cx.simulate_input(" ");
|
||||
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
@@ -2342,9 +2377,12 @@ mod tests {
|
||||
|
||||
let contents = message_editor
|
||||
.update(&mut cx, |message_editor, cx| {
|
||||
message_editor
|
||||
.mention_set()
|
||||
.contents(false, project.clone(), cx)
|
||||
message_editor.mention_set().contents(
|
||||
&all_prompt_capabilities,
|
||||
false,
|
||||
project.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
@@ -2465,9 +2503,12 @@ mod tests {
|
||||
|
||||
let contents = message_editor
|
||||
.update(&mut cx, |message_editor, cx| {
|
||||
message_editor
|
||||
.mention_set()
|
||||
.contents(false, project.clone(), cx)
|
||||
message_editor.mention_set().contents(
|
||||
&all_prompt_capabilities,
|
||||
false,
|
||||
project.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
@@ -2513,9 +2554,12 @@ mod tests {
|
||||
// Getting the message contents fails
|
||||
message_editor
|
||||
.update(&mut cx, |message_editor, cx| {
|
||||
message_editor
|
||||
.mention_set()
|
||||
.contents(false, project.clone(), cx)
|
||||
message_editor.mention_set().contents(
|
||||
&all_prompt_capabilities,
|
||||
false,
|
||||
project.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.expect_err("Should fail to load x.png");
|
||||
@@ -2566,9 +2610,12 @@ mod tests {
|
||||
// Now getting the contents succeeds, because the invalid mention was removed
|
||||
let contents = message_editor
|
||||
.update(&mut cx, |message_editor, cx| {
|
||||
message_editor
|
||||
.mention_set()
|
||||
.contents(false, project.clone(), cx)
|
||||
message_editor.mention_set().contents(
|
||||
&all_prompt_capabilities,
|
||||
false,
|
||||
project.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -2832,7 +2879,7 @@ mod tests {
|
||||
cx.run_until_parked();
|
||||
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
editor.set_text(" \u{A0}してhello world ", window, cx);
|
||||
editor.set_text(" hello world ", window, cx);
|
||||
});
|
||||
|
||||
let (content, _) = message_editor
|
||||
@@ -2843,154 +2890,13 @@ mod tests {
|
||||
assert_eq!(
|
||||
content,
|
||||
vec![acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "してhello world".into(),
|
||||
text: "hello world".into(),
|
||||
annotations: None,
|
||||
meta: None
|
||||
})]
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_editor_respects_embedded_context_capability(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
|
||||
let file_content = "fn main() { println!(\"Hello, world!\"); }\n";
|
||||
|
||||
fs.insert_tree(
|
||||
"/project",
|
||||
json!({
|
||||
"src": {
|
||||
"main.rs": file_content,
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs, [Path::new(path!("/project"))], cx).await;
|
||||
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
|
||||
let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx));
|
||||
let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx));
|
||||
|
||||
let (message_editor, editor) = workspace.update_in(cx, |workspace, window, cx| {
|
||||
let workspace_handle = cx.weak_entity();
|
||||
let message_editor = cx.new(|cx| {
|
||||
MessageEditor::new(
|
||||
workspace_handle,
|
||||
project.clone(),
|
||||
history_store.clone(),
|
||||
None,
|
||||
Default::default(),
|
||||
Default::default(),
|
||||
"Test Agent".into(),
|
||||
"Test",
|
||||
EditorMode::AutoHeight {
|
||||
max_lines: None,
|
||||
min_lines: 1,
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
workspace.active_pane().update(cx, |pane, cx| {
|
||||
pane.add_item(
|
||||
Box::new(cx.new(|_| MessageEditorItem(message_editor.clone()))),
|
||||
true,
|
||||
true,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
message_editor.read(cx).focus_handle(cx).focus(window);
|
||||
let editor = message_editor.read(cx).editor().clone();
|
||||
(message_editor, editor)
|
||||
});
|
||||
|
||||
cx.simulate_input("What is in @file main");
|
||||
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(editor.text(cx), "What is in @file main");
|
||||
editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx);
|
||||
});
|
||||
|
||||
let content = message_editor
|
||||
.update(cx, |editor, cx| editor.contents(false, cx))
|
||||
.await
|
||||
.unwrap()
|
||||
.0;
|
||||
|
||||
let main_rs_uri = if cfg!(windows) {
|
||||
"file:///C:/project/src/main.rs".to_string()
|
||||
} else {
|
||||
"file:///project/src/main.rs".to_string()
|
||||
};
|
||||
|
||||
// When embedded context is `false` we should get a resource link
|
||||
pretty_assertions::assert_eq!(
|
||||
content,
|
||||
vec![
|
||||
acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "What is in ".to_string(),
|
||||
annotations: None,
|
||||
meta: None
|
||||
}),
|
||||
acp::ContentBlock::ResourceLink(acp::ResourceLink {
|
||||
uri: main_rs_uri.clone(),
|
||||
name: "main.rs".to_string(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
description: None,
|
||||
mime_type: None,
|
||||
size: None,
|
||||
title: None,
|
||||
})
|
||||
]
|
||||
);
|
||||
|
||||
message_editor.update(cx, |editor, _cx| {
|
||||
editor.prompt_capabilities.replace(acp::PromptCapabilities {
|
||||
embedded_context: true,
|
||||
..Default::default()
|
||||
})
|
||||
});
|
||||
|
||||
let content = message_editor
|
||||
.update(cx, |editor, cx| editor.contents(false, cx))
|
||||
.await
|
||||
.unwrap()
|
||||
.0;
|
||||
|
||||
// When embedded context is `true` we should get a resource
|
||||
pretty_assertions::assert_eq!(
|
||||
content,
|
||||
vec![
|
||||
acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "What is in ".to_string(),
|
||||
annotations: None,
|
||||
meta: None
|
||||
}),
|
||||
acp::ContentBlock::Resource(acp::EmbeddedResource {
|
||||
resource: acp::EmbeddedResourceResource::TextResourceContents(
|
||||
acp::TextResourceContents {
|
||||
text: file_content.to_string(),
|
||||
uri: main_rs_uri,
|
||||
mime_type: None,
|
||||
meta: None
|
||||
}
|
||||
),
|
||||
annotations: None,
|
||||
meta: None
|
||||
})
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_autoscroll_after_insert_selections(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
@@ -2998,8 +2904,10 @@ mod tests {
|
||||
let app_state = cx.update(AppState::test);
|
||||
|
||||
cx.update(|cx| {
|
||||
language::init(cx);
|
||||
editor::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
|
||||
app_state
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use acp_thread::{AgentModelInfo, AgentModelSelector};
|
||||
use acp_thread::AgentModelSelector;
|
||||
use gpui::{Entity, FocusHandle};
|
||||
use picker::popover_menu::PickerPopoverMenu;
|
||||
use ui::{
|
||||
@@ -36,8 +36,12 @@ impl AcpModelSelectorPopover {
|
||||
self.menu_handle.toggle(window, cx);
|
||||
}
|
||||
|
||||
pub fn active_model<'a>(&self, cx: &'a App) -> Option<&'a AgentModelInfo> {
|
||||
self.selector.read(cx).delegate.active_model()
|
||||
pub fn active_model_name(&self, cx: &App) -> Option<SharedString> {
|
||||
self.selector
|
||||
.read(cx)
|
||||
.delegate
|
||||
.active_model()
|
||||
.map(|model| model.name.clone())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
6
crates/agent_ui/src/acp/thread_editor.rs
Normal file
6
crates/agent_ui/src/acp/thread_editor.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
use acp_thread::AcpThread;
|
||||
use gpui::Entity;
|
||||
|
||||
pub struct ThreadEditor {
|
||||
thread: Entity<AcpThread>,
|
||||
}
|
||||
@@ -4,12 +4,12 @@ use acp_thread::{
|
||||
ToolCallStatus, UserMessageId,
|
||||
};
|
||||
use acp_thread::{AgentConnection, Plan};
|
||||
use action_log::{ActionLog, ActionLogTelemetry};
|
||||
use action_log::ActionLog;
|
||||
use agent::{DbThreadMetadata, HistoryEntry, HistoryEntryId, HistoryStore, NativeAgentServer};
|
||||
use agent_client_protocol::{self as acp, PromptCapabilities};
|
||||
use agent_servers::{AgentServer, AgentServerDelegate};
|
||||
use agent_settings::{AgentProfileId, AgentSettings, CompletionMode};
|
||||
use anyhow::{Result, anyhow};
|
||||
use anyhow::{Result, anyhow, bail};
|
||||
use arrayvec::ArrayVec;
|
||||
use audio::{Audio, Sound};
|
||||
use buffer_diff::BufferDiff;
|
||||
@@ -169,7 +169,7 @@ impl ThreadFeedbackState {
|
||||
}
|
||||
}
|
||||
let session_id = thread.read(cx).session_id().clone();
|
||||
let agent = thread.read(cx).connection().telemetry_id();
|
||||
let agent_name = telemetry.agent_name();
|
||||
let task = telemetry.thread_data(&session_id, cx);
|
||||
let rating = match feedback {
|
||||
ThreadFeedback::Positive => "positive",
|
||||
@@ -179,9 +179,9 @@ impl ThreadFeedbackState {
|
||||
let thread = task.await?;
|
||||
telemetry::event!(
|
||||
"Agent Thread Rated",
|
||||
agent = agent,
|
||||
session_id = session_id,
|
||||
rating = rating,
|
||||
agent = agent_name,
|
||||
thread = thread
|
||||
);
|
||||
anyhow::Ok(())
|
||||
@@ -206,15 +206,15 @@ impl ThreadFeedbackState {
|
||||
self.comments_editor.take();
|
||||
|
||||
let session_id = thread.read(cx).session_id().clone();
|
||||
let agent = thread.read(cx).connection().telemetry_id();
|
||||
let agent_name = telemetry.agent_name();
|
||||
let task = telemetry.thread_data(&session_id, cx);
|
||||
cx.background_spawn(async move {
|
||||
let thread = task.await?;
|
||||
telemetry::event!(
|
||||
"Agent Thread Feedback Comments",
|
||||
agent = agent,
|
||||
session_id = session_id,
|
||||
comments = comments,
|
||||
agent = agent_name,
|
||||
thread = thread
|
||||
);
|
||||
anyhow::Ok(())
|
||||
@@ -294,6 +294,7 @@ pub struct AcpThreadView {
|
||||
resume_thread_metadata: Option<DbThreadMetadata>,
|
||||
_cancel_task: Option<Task<()>>,
|
||||
_subscriptions: [Subscription; 5],
|
||||
#[cfg(target_os = "windows")]
|
||||
show_codex_windows_warning: bool,
|
||||
}
|
||||
|
||||
@@ -400,6 +401,7 @@ impl AcpThreadView {
|
||||
),
|
||||
];
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
let show_codex_windows_warning = crate::ExternalAgent::parse_built_in(agent.as_ref())
|
||||
== Some(crate::ExternalAgent::Codex);
|
||||
|
||||
@@ -445,6 +447,7 @@ impl AcpThreadView {
|
||||
focus_handle: cx.focus_handle(),
|
||||
new_server_version_available: None,
|
||||
resume_thread_metadata: resume_thread,
|
||||
#[cfg(target_os = "windows")]
|
||||
show_codex_windows_warning,
|
||||
}
|
||||
}
|
||||
@@ -538,7 +541,14 @@ impl AcpThreadView {
|
||||
})
|
||||
.log_err()
|
||||
} else {
|
||||
let root_dir = root_dir.unwrap_or(paths::home_dir().as_path().into());
|
||||
let root_dir = if let Some(acp_agent) = connection
|
||||
.clone()
|
||||
.downcast::<agent_servers::AcpConnection>()
|
||||
{
|
||||
acp_agent.root_dir().into()
|
||||
} else {
|
||||
root_dir.unwrap_or(paths::home_dir().as_path().into())
|
||||
};
|
||||
cx.update(|_, cx| {
|
||||
connection
|
||||
.clone()
|
||||
@@ -1123,6 +1133,8 @@ impl AcpThreadView {
|
||||
message_editor.contents(full_mention_content, cx)
|
||||
});
|
||||
|
||||
let agent_telemetry_id = self.agent.telemetry_id();
|
||||
|
||||
self.thread_error.take();
|
||||
self.editing_message.take();
|
||||
self.thread_feedback.clear();
|
||||
@@ -1130,8 +1142,6 @@ impl AcpThreadView {
|
||||
let Some(thread) = self.thread() else {
|
||||
return;
|
||||
};
|
||||
let agent_telemetry_id = self.agent.telemetry_id();
|
||||
let session_id = thread.read(cx).session_id().clone();
|
||||
let thread = thread.downgrade();
|
||||
if self.should_be_following {
|
||||
self.workspace
|
||||
@@ -1142,7 +1152,6 @@ impl AcpThreadView {
|
||||
}
|
||||
|
||||
self.is_loading_contents = true;
|
||||
let model_id = self.current_model_id(cx);
|
||||
let guard = cx.new(|_| ());
|
||||
cx.observe_release(&guard, |this, _guard, cx| {
|
||||
this.is_loading_contents = false;
|
||||
@@ -1164,7 +1173,6 @@ impl AcpThreadView {
|
||||
message_editor.clear(window, cx);
|
||||
});
|
||||
})?;
|
||||
let turn_start_time = Instant::now();
|
||||
let send = thread.update(cx, |thread, cx| {
|
||||
thread.action_log().update(cx, |action_log, cx| {
|
||||
for buffer in tracked_buffers {
|
||||
@@ -1173,27 +1181,11 @@ impl AcpThreadView {
|
||||
});
|
||||
drop(guard);
|
||||
|
||||
telemetry::event!(
|
||||
"Agent Message Sent",
|
||||
agent = agent_telemetry_id,
|
||||
session = session_id,
|
||||
model = model_id
|
||||
);
|
||||
telemetry::event!("Agent Message Sent", agent = agent_telemetry_id);
|
||||
|
||||
thread.send(contents, cx)
|
||||
})?;
|
||||
let res = send.await;
|
||||
let turn_time_ms = turn_start_time.elapsed().as_millis();
|
||||
let status = if res.is_ok() { "success" } else { "failure" };
|
||||
telemetry::event!(
|
||||
"Agent Turn Completed",
|
||||
agent = agent_telemetry_id,
|
||||
session = session_id,
|
||||
model = model_id,
|
||||
status,
|
||||
turn_time_ms,
|
||||
);
|
||||
res
|
||||
send.await
|
||||
});
|
||||
|
||||
cx.spawn(async move |this, cx| {
|
||||
@@ -1395,7 +1387,7 @@ impl AcpThreadView {
|
||||
AcpThreadEvent::Refusal => {
|
||||
self.thread_retry_status.take();
|
||||
self.thread_error = Some(ThreadError::Refusal);
|
||||
let model_or_agent_name = self.current_model_name(cx);
|
||||
let model_or_agent_name = self.get_current_model_name(cx);
|
||||
let notification_message =
|
||||
format!("{} refused to respond to this request", model_or_agent_name);
|
||||
self.notify_with_sound(¬ification_message, IconName::Warning, window, cx);
|
||||
@@ -1864,14 +1856,6 @@ impl AcpThreadView {
|
||||
let Some(thread) = self.thread() else {
|
||||
return;
|
||||
};
|
||||
|
||||
telemetry::event!(
|
||||
"Agent Tool Call Authorized",
|
||||
agent = self.agent.telemetry_id(),
|
||||
session = thread.read(cx).session_id(),
|
||||
option = option_kind
|
||||
);
|
||||
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.authorize_tool_call(tool_call_id, option_id, option_kind, cx);
|
||||
});
|
||||
@@ -3604,7 +3588,6 @@ impl AcpThreadView {
|
||||
) -> Option<AnyElement> {
|
||||
let thread = thread_entity.read(cx);
|
||||
let action_log = thread.action_log();
|
||||
let telemetry = ActionLogTelemetry::from(thread);
|
||||
let changed_buffers = action_log.read(cx).changed_buffers(cx);
|
||||
let plan = thread.plan();
|
||||
|
||||
@@ -3652,7 +3635,6 @@ impl AcpThreadView {
|
||||
.when(self.edits_expanded, |parent| {
|
||||
parent.child(self.render_edited_files(
|
||||
action_log,
|
||||
telemetry,
|
||||
&changed_buffers,
|
||||
pending_edits,
|
||||
cx,
|
||||
@@ -3933,7 +3915,6 @@ impl AcpThreadView {
|
||||
fn render_edited_files(
|
||||
&self,
|
||||
action_log: &Entity<ActionLog>,
|
||||
telemetry: ActionLogTelemetry,
|
||||
changed_buffers: &BTreeMap<Entity<Buffer>, Entity<BufferDiff>>,
|
||||
pending_edits: bool,
|
||||
cx: &Context<Self>,
|
||||
@@ -4053,14 +4034,12 @@ impl AcpThreadView {
|
||||
.on_click({
|
||||
let buffer = buffer.clone();
|
||||
let action_log = action_log.clone();
|
||||
let telemetry = telemetry.clone();
|
||||
move |_, _, cx| {
|
||||
action_log.update(cx, |action_log, cx| {
|
||||
action_log
|
||||
.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Anchor::MIN..Anchor::MAX],
|
||||
Some(telemetry.clone()),
|
||||
cx,
|
||||
)
|
||||
.detach_and_log_err(cx);
|
||||
@@ -4075,13 +4054,11 @@ impl AcpThreadView {
|
||||
.on_click({
|
||||
let buffer = buffer.clone();
|
||||
let action_log = action_log.clone();
|
||||
let telemetry = telemetry.clone();
|
||||
move |_, _, cx| {
|
||||
action_log.update(cx, |action_log, cx| {
|
||||
action_log.keep_edits_in_range(
|
||||
buffer.clone(),
|
||||
Anchor::MIN..Anchor::MAX,
|
||||
Some(telemetry.clone()),
|
||||
cx,
|
||||
);
|
||||
})
|
||||
@@ -4297,23 +4274,17 @@ impl AcpThreadView {
|
||||
let Some(thread) = self.thread() else {
|
||||
return;
|
||||
};
|
||||
let telemetry = ActionLogTelemetry::from(thread.read(cx));
|
||||
let action_log = thread.read(cx).action_log().clone();
|
||||
action_log.update(cx, |action_log, cx| {
|
||||
action_log.keep_all_edits(Some(telemetry), cx)
|
||||
});
|
||||
action_log.update(cx, |action_log, cx| action_log.keep_all_edits(cx));
|
||||
}
|
||||
|
||||
fn reject_all(&mut self, _: &RejectAll, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
let Some(thread) = self.thread() else {
|
||||
return;
|
||||
};
|
||||
let telemetry = ActionLogTelemetry::from(thread.read(cx));
|
||||
let action_log = thread.read(cx).action_log().clone();
|
||||
action_log
|
||||
.update(cx, |action_log, cx| {
|
||||
action_log.reject_all_edits(Some(telemetry), cx)
|
||||
})
|
||||
.update(cx, |action_log, cx| action_log.reject_all_edits(cx))
|
||||
.detach();
|
||||
}
|
||||
|
||||
@@ -4709,36 +4680,35 @@ impl AcpThreadView {
|
||||
.languages
|
||||
.language_for_name("Markdown");
|
||||
|
||||
let (thread_title, markdown) = if let Some(thread) = self.thread() {
|
||||
let (thread_summary, markdown) = if let Some(thread) = self.thread() {
|
||||
let thread = thread.read(cx);
|
||||
(thread.title().to_string(), thread.to_markdown(cx))
|
||||
} else {
|
||||
return Task::ready(Ok(()));
|
||||
};
|
||||
|
||||
let project = workspace.read(cx).project().clone();
|
||||
window.spawn(cx, async move |cx| {
|
||||
let markdown_language = markdown_language_task.await?;
|
||||
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.create_buffer(false, cx))?
|
||||
.await?;
|
||||
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.set_text(markdown, cx);
|
||||
buffer.set_language(Some(markdown_language), cx);
|
||||
buffer.set_capability(language::Capability::ReadOnly, cx);
|
||||
})?;
|
||||
|
||||
workspace.update_in(cx, |workspace, window, cx| {
|
||||
let buffer = cx
|
||||
.new(|cx| MultiBuffer::singleton(buffer, cx).with_title(thread_title.clone()));
|
||||
let project = workspace.project().clone();
|
||||
|
||||
if !project.read(cx).is_local() {
|
||||
bail!("failed to open active thread as markdown in remote project");
|
||||
}
|
||||
|
||||
let buffer = project.update(cx, |project, cx| {
|
||||
project.create_local_buffer(&markdown, Some(markdown_language), true, cx)
|
||||
});
|
||||
let buffer = cx.new(|cx| {
|
||||
MultiBuffer::singleton(buffer, cx).with_title(thread_summary.clone())
|
||||
});
|
||||
|
||||
workspace.add_item_to_active_pane(
|
||||
Box::new(cx.new(|cx| {
|
||||
let mut editor =
|
||||
Editor::for_multibuffer(buffer, Some(project.clone()), window, cx);
|
||||
editor.set_breadcrumb_header(thread_title);
|
||||
editor.set_breadcrumb_header(thread_summary);
|
||||
editor
|
||||
})),
|
||||
None,
|
||||
@@ -4746,7 +4716,9 @@ impl AcpThreadView {
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})?;
|
||||
|
||||
anyhow::Ok(())
|
||||
})??;
|
||||
anyhow::Ok(())
|
||||
})
|
||||
}
|
||||
@@ -5280,6 +5252,7 @@ impl AcpThreadView {
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn render_codex_windows_warning(&self, cx: &mut Context<Self>) -> Option<Callout> {
|
||||
if self.show_codex_windows_warning {
|
||||
Some(
|
||||
@@ -5295,9 +5268,8 @@ impl AcpThreadView {
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.on_click(cx.listener({
|
||||
move |_, _, _window, cx| {
|
||||
#[cfg(windows)]
|
||||
_window.dispatch_action(
|
||||
move |_, _, window, cx| {
|
||||
window.dispatch_action(
|
||||
zed_actions::wsl_actions::OpenWsl::default().boxed_clone(),
|
||||
cx,
|
||||
);
|
||||
@@ -5372,21 +5344,20 @@ impl AcpThreadView {
|
||||
)
|
||||
}
|
||||
|
||||
fn current_model_id(&self, cx: &App) -> Option<String> {
|
||||
self.model_selector
|
||||
.as_ref()
|
||||
.and_then(|selector| selector.read(cx).active_model(cx).map(|m| m.id.to_string()))
|
||||
}
|
||||
|
||||
fn current_model_name(&self, cx: &App) -> SharedString {
|
||||
fn get_current_model_name(&self, cx: &App) -> SharedString {
|
||||
// For native agent (Zed Agent), use the specific model name (e.g., "Claude 3.5 Sonnet")
|
||||
// For ACP agents, use the agent name (e.g., "Claude Code", "Gemini CLI")
|
||||
// This provides better clarity about what refused the request
|
||||
if self.as_native_connection(cx).is_some() {
|
||||
if self
|
||||
.agent
|
||||
.clone()
|
||||
.downcast::<agent::NativeAgentServer>()
|
||||
.is_some()
|
||||
{
|
||||
// Native agent - use the model name
|
||||
self.model_selector
|
||||
.as_ref()
|
||||
.and_then(|selector| selector.read(cx).active_model(cx))
|
||||
.map(|model| model.name.clone())
|
||||
.and_then(|selector| selector.read(cx).active_model_name(cx))
|
||||
.unwrap_or_else(|| SharedString::from("The model"))
|
||||
} else {
|
||||
// ACP agent - use the agent name (e.g., "Claude Code", "Gemini CLI")
|
||||
@@ -5395,7 +5366,7 @@ impl AcpThreadView {
|
||||
}
|
||||
|
||||
fn render_refusal_error(&self, cx: &mut Context<'_, Self>) -> Callout {
|
||||
let model_or_agent_name = self.current_model_name(cx);
|
||||
let model_or_agent_name = self.get_current_model_name(cx);
|
||||
let refusal_message = format!(
|
||||
"{} refused to respond to this prompt. This can happen when a model believes the prompt violates its content policy or safety guidelines, so rephrasing it can sometimes address the issue.",
|
||||
model_or_agent_name
|
||||
@@ -5801,10 +5772,13 @@ impl Render for AcpThreadView {
|
||||
})
|
||||
.children(self.render_thread_retry_status_callout(window, cx))
|
||||
.children({
|
||||
if cfg!(windows) && self.project.read(cx).is_local() {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
self.render_codex_windows_warning(cx)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
{
|
||||
Vec::<Empty>::new()
|
||||
}
|
||||
})
|
||||
.children(self.render_thread_error(cx))
|
||||
@@ -5993,6 +5967,7 @@ pub(crate) mod tests {
|
||||
use acp_thread::StubAgentConnection;
|
||||
use agent_client_protocol::SessionId;
|
||||
use assistant_text_thread::TextThreadStore;
|
||||
use editor::EditorSettings;
|
||||
use fs::FakeFs;
|
||||
use gpui::{EventEmitter, SemanticVersion, TestAppContext, VisualTestContext};
|
||||
use project::Project;
|
||||
@@ -6380,10 +6355,6 @@ pub(crate) mod tests {
|
||||
struct SaboteurAgentConnection;
|
||||
|
||||
impl AgentConnection for SaboteurAgentConnection {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
"saboteur"
|
||||
}
|
||||
|
||||
fn new_thread(
|
||||
self: Rc<Self>,
|
||||
project: Entity<Project>,
|
||||
@@ -6444,10 +6415,6 @@ pub(crate) mod tests {
|
||||
struct RefusalAgentConnection;
|
||||
|
||||
impl AgentConnection for RefusalAgentConnection {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
"refusal"
|
||||
}
|
||||
|
||||
fn new_thread(
|
||||
self: Rc<Self>,
|
||||
project: Entity<Project>,
|
||||
@@ -6510,8 +6477,13 @@ pub(crate) mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
AgentSettings::register(cx);
|
||||
workspace::init_settings(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
EditorSettings::register(cx);
|
||||
prompt_store::init(cx)
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1047,7 +1047,7 @@ impl AgentConfiguration {
|
||||
AgentIcon::Name(icon_name) => Icon::new(icon_name)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
AgentIcon::Path(icon_path) => Icon::from_external_svg(icon_path)
|
||||
AgentIcon::Path(icon_path) => Icon::from_path(icon_path)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
};
|
||||
|
||||
@@ -515,14 +515,16 @@ impl Render for AddLlmProviderModal {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use editor::EditorSettings;
|
||||
use fs::FakeFs;
|
||||
use gpui::{TestAppContext, VisualTestContext};
|
||||
use language::language_settings;
|
||||
use language_model::{
|
||||
LanguageModelProviderId, LanguageModelProviderName,
|
||||
fake_provider::FakeLanguageModelProvider,
|
||||
};
|
||||
use project::Project;
|
||||
use settings::SettingsStore;
|
||||
use settings::{Settings as _, SettingsStore};
|
||||
use util::path;
|
||||
|
||||
#[gpui::test]
|
||||
@@ -728,9 +730,13 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
workspace::init_settings(cx);
|
||||
Project::init_settings(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
|
||||
language_settings::init(cx);
|
||||
EditorSettings::register(cx);
|
||||
language_model::init_settings(cx);
|
||||
language_models::init_settings(cx);
|
||||
});
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::{Keep, KeepAll, OpenAgentDiff, Reject, RejectAll};
|
||||
use acp_thread::{AcpThread, AcpThreadEvent};
|
||||
use action_log::ActionLogTelemetry;
|
||||
use action_log::ActionLog;
|
||||
use agent_settings::AgentSettings;
|
||||
use anyhow::Result;
|
||||
use buffer_diff::DiffHunkStatus;
|
||||
@@ -40,16 +40,79 @@ use zed_actions::assistant::ToggleFocus;
|
||||
pub struct AgentDiffPane {
|
||||
multibuffer: Entity<MultiBuffer>,
|
||||
editor: Entity<Editor>,
|
||||
thread: Entity<AcpThread>,
|
||||
thread: AgentDiffThread,
|
||||
focus_handle: FocusHandle,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
title: SharedString,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone)]
|
||||
pub enum AgentDiffThread {
|
||||
AcpThread(Entity<AcpThread>),
|
||||
}
|
||||
|
||||
impl AgentDiffThread {
|
||||
fn project(&self, cx: &App) -> Entity<Project> {
|
||||
match self {
|
||||
AgentDiffThread::AcpThread(thread) => thread.read(cx).project().clone(),
|
||||
}
|
||||
}
|
||||
fn action_log(&self, cx: &App) -> Entity<ActionLog> {
|
||||
match self {
|
||||
AgentDiffThread::AcpThread(thread) => thread.read(cx).action_log().clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn title(&self, cx: &App) -> SharedString {
|
||||
match self {
|
||||
AgentDiffThread::AcpThread(thread) => thread.read(cx).title(),
|
||||
}
|
||||
}
|
||||
|
||||
fn has_pending_edit_tool_uses(&self, cx: &App) -> bool {
|
||||
match self {
|
||||
AgentDiffThread::AcpThread(thread) => thread.read(cx).has_pending_edit_tool_calls(),
|
||||
}
|
||||
}
|
||||
|
||||
fn downgrade(&self) -> WeakAgentDiffThread {
|
||||
match self {
|
||||
AgentDiffThread::AcpThread(thread) => {
|
||||
WeakAgentDiffThread::AcpThread(thread.downgrade())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Entity<AcpThread>> for AgentDiffThread {
|
||||
fn from(entity: Entity<AcpThread>) -> Self {
|
||||
AgentDiffThread::AcpThread(entity)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone)]
|
||||
pub enum WeakAgentDiffThread {
|
||||
AcpThread(WeakEntity<AcpThread>),
|
||||
}
|
||||
|
||||
impl WeakAgentDiffThread {
|
||||
pub fn upgrade(&self) -> Option<AgentDiffThread> {
|
||||
match self {
|
||||
WeakAgentDiffThread::AcpThread(weak) => weak.upgrade().map(AgentDiffThread::AcpThread),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<WeakEntity<AcpThread>> for WeakAgentDiffThread {
|
||||
fn from(entity: WeakEntity<AcpThread>) -> Self {
|
||||
WeakAgentDiffThread::AcpThread(entity)
|
||||
}
|
||||
}
|
||||
|
||||
impl AgentDiffPane {
|
||||
pub fn deploy(
|
||||
thread: Entity<AcpThread>,
|
||||
thread: impl Into<AgentDiffThread>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
@@ -60,11 +123,12 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
pub fn deploy_in_workspace(
|
||||
thread: Entity<AcpThread>,
|
||||
thread: impl Into<AgentDiffThread>,
|
||||
workspace: &mut Workspace,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Workspace>,
|
||||
) -> Entity<Self> {
|
||||
let thread = thread.into();
|
||||
let existing_diff = workspace
|
||||
.items_of_type::<AgentDiffPane>(cx)
|
||||
.find(|diff| diff.read(cx).thread == thread);
|
||||
@@ -81,7 +145,7 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
thread: Entity<AcpThread>,
|
||||
thread: AgentDiffThread,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
@@ -89,7 +153,7 @@ impl AgentDiffPane {
|
||||
let focus_handle = cx.focus_handle();
|
||||
let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
|
||||
|
||||
let project = thread.read(cx).project().clone();
|
||||
let project = thread.project(cx);
|
||||
let editor = cx.new(|cx| {
|
||||
let mut editor =
|
||||
Editor::for_multibuffer(multibuffer.clone(), Some(project.clone()), window, cx);
|
||||
@@ -100,16 +164,19 @@ impl AgentDiffPane {
|
||||
editor
|
||||
});
|
||||
|
||||
let action_log = thread.read(cx).action_log().clone();
|
||||
let action_log = thread.action_log(cx);
|
||||
|
||||
let mut this = Self {
|
||||
_subscriptions: vec![
|
||||
cx.observe_in(&action_log, window, |this, _action_log, window, cx| {
|
||||
this.update_excerpts(window, cx)
|
||||
}),
|
||||
cx.subscribe(&thread, |this, _thread, event, cx| {
|
||||
this.handle_acp_thread_event(event, cx)
|
||||
}),
|
||||
match &thread {
|
||||
AgentDiffThread::AcpThread(thread) => cx
|
||||
.subscribe(thread, |this, _thread, event, cx| {
|
||||
this.handle_acp_thread_event(event, cx)
|
||||
}),
|
||||
},
|
||||
],
|
||||
title: SharedString::default(),
|
||||
multibuffer,
|
||||
@@ -124,12 +191,7 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
fn update_excerpts(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
let changed_buffers = self
|
||||
.thread
|
||||
.read(cx)
|
||||
.action_log()
|
||||
.read(cx)
|
||||
.changed_buffers(cx);
|
||||
let changed_buffers = self.thread.action_log(cx).read(cx).changed_buffers(cx);
|
||||
let mut paths_to_delete = self.multibuffer.read(cx).paths().collect::<HashSet<_>>();
|
||||
|
||||
for (buffer, diff_handle) in changed_buffers {
|
||||
@@ -216,7 +278,7 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
fn update_title(&mut self, cx: &mut Context<Self>) {
|
||||
let new_title = self.thread.read(cx).title();
|
||||
let new_title = self.thread.title(cx);
|
||||
if new_title != self.title {
|
||||
self.title = new_title;
|
||||
cx.emit(EditorEvent::TitleChanged);
|
||||
@@ -278,18 +340,16 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
fn keep_all(&mut self, _: &KeepAll, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
let telemetry = ActionLogTelemetry::from(self.thread.read(cx));
|
||||
let action_log = self.thread.read(cx).action_log().clone();
|
||||
action_log.update(cx, |action_log, cx| {
|
||||
action_log.keep_all_edits(Some(telemetry), cx)
|
||||
});
|
||||
self.thread
|
||||
.action_log(cx)
|
||||
.update(cx, |action_log, cx| action_log.keep_all_edits(cx))
|
||||
}
|
||||
}
|
||||
|
||||
fn keep_edits_in_selection(
|
||||
editor: &mut Editor,
|
||||
buffer_snapshot: &MultiBufferSnapshot,
|
||||
thread: &Entity<AcpThread>,
|
||||
thread: &AgentDiffThread,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
@@ -304,7 +364,7 @@ fn keep_edits_in_selection(
|
||||
fn reject_edits_in_selection(
|
||||
editor: &mut Editor,
|
||||
buffer_snapshot: &MultiBufferSnapshot,
|
||||
thread: &Entity<AcpThread>,
|
||||
thread: &AgentDiffThread,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
@@ -318,7 +378,7 @@ fn reject_edits_in_selection(
|
||||
fn keep_edits_in_ranges(
|
||||
editor: &mut Editor,
|
||||
buffer_snapshot: &MultiBufferSnapshot,
|
||||
thread: &Entity<AcpThread>,
|
||||
thread: &AgentDiffThread,
|
||||
ranges: Vec<Range<editor::Anchor>>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
@@ -333,15 +393,8 @@ fn keep_edits_in_ranges(
|
||||
for hunk in &diff_hunks_in_ranges {
|
||||
let buffer = multibuffer.read(cx).buffer(hunk.buffer_id);
|
||||
if let Some(buffer) = buffer {
|
||||
let action_log = thread.read(cx).action_log().clone();
|
||||
let telemetry = ActionLogTelemetry::from(thread.read(cx));
|
||||
action_log.update(cx, |action_log, cx| {
|
||||
action_log.keep_edits_in_range(
|
||||
buffer,
|
||||
hunk.buffer_range.clone(),
|
||||
Some(telemetry),
|
||||
cx,
|
||||
)
|
||||
thread.action_log(cx).update(cx, |action_log, cx| {
|
||||
action_log.keep_edits_in_range(buffer, hunk.buffer_range.clone(), cx)
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -350,7 +403,7 @@ fn keep_edits_in_ranges(
|
||||
fn reject_edits_in_ranges(
|
||||
editor: &mut Editor,
|
||||
buffer_snapshot: &MultiBufferSnapshot,
|
||||
thread: &Entity<AcpThread>,
|
||||
thread: &AgentDiffThread,
|
||||
ranges: Vec<Range<editor::Anchor>>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
@@ -374,12 +427,11 @@ fn reject_edits_in_ranges(
|
||||
}
|
||||
}
|
||||
|
||||
let action_log = thread.read(cx).action_log().clone();
|
||||
let telemetry = ActionLogTelemetry::from(thread.read(cx));
|
||||
for (buffer, ranges) in ranges_by_buffer {
|
||||
action_log
|
||||
thread
|
||||
.action_log(cx)
|
||||
.update(cx, |action_log, cx| {
|
||||
action_log.reject_edits_in_ranges(buffer, ranges, Some(telemetry.clone()), cx)
|
||||
action_log.reject_edits_in_ranges(buffer, ranges, cx)
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
@@ -479,7 +531,7 @@ impl Item for AgentDiffPane {
|
||||
}
|
||||
|
||||
fn tab_content(&self, params: TabContentParams, _window: &Window, cx: &App) -> AnyElement {
|
||||
let title = self.thread.read(cx).title();
|
||||
let title = self.thread.title(cx);
|
||||
Label::new(format!("Review: {}", title))
|
||||
.color(if params.selected {
|
||||
Color::Default
|
||||
@@ -660,7 +712,7 @@ impl Render for AgentDiffPane {
|
||||
}
|
||||
}
|
||||
|
||||
fn diff_hunk_controls(thread: &Entity<AcpThread>) -> editor::RenderDiffHunkControlsFn {
|
||||
fn diff_hunk_controls(thread: &AgentDiffThread) -> editor::RenderDiffHunkControlsFn {
|
||||
let thread = thread.clone();
|
||||
|
||||
Arc::new(
|
||||
@@ -687,7 +739,7 @@ fn render_diff_hunk_controls(
|
||||
hunk_range: Range<editor::Anchor>,
|
||||
is_created_file: bool,
|
||||
line_height: Pixels,
|
||||
thread: &Entity<AcpThread>,
|
||||
thread: &AgentDiffThread,
|
||||
editor: &Entity<Editor>,
|
||||
cx: &mut App,
|
||||
) -> AnyElement {
|
||||
@@ -1101,11 +1153,8 @@ impl Render for AgentDiffToolbar {
|
||||
return Empty.into_any();
|
||||
};
|
||||
|
||||
let has_pending_edit_tool_use = agent_diff
|
||||
.read(cx)
|
||||
.thread
|
||||
.read(cx)
|
||||
.has_pending_edit_tool_calls();
|
||||
let has_pending_edit_tool_use =
|
||||
agent_diff.read(cx).thread.has_pending_edit_tool_uses(cx);
|
||||
|
||||
if has_pending_edit_tool_use {
|
||||
return div().px_2().child(spinner_icon).into_any();
|
||||
@@ -1165,7 +1214,7 @@ pub enum EditorState {
|
||||
}
|
||||
|
||||
struct WorkspaceThread {
|
||||
thread: WeakEntity<AcpThread>,
|
||||
thread: WeakAgentDiffThread,
|
||||
_thread_subscriptions: (Subscription, Subscription),
|
||||
singleton_editors: HashMap<WeakEntity<Buffer>, HashMap<WeakEntity<Editor>, Subscription>>,
|
||||
_settings_subscription: Subscription,
|
||||
@@ -1190,23 +1239,23 @@ impl AgentDiff {
|
||||
|
||||
pub fn set_active_thread(
|
||||
workspace: &WeakEntity<Workspace>,
|
||||
thread: Entity<AcpThread>,
|
||||
thread: impl Into<AgentDiffThread>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) {
|
||||
Self::global(cx).update(cx, |this, cx| {
|
||||
this.register_active_thread_impl(workspace, thread, window, cx);
|
||||
this.register_active_thread_impl(workspace, thread.into(), window, cx);
|
||||
});
|
||||
}
|
||||
|
||||
fn register_active_thread_impl(
|
||||
&mut self,
|
||||
workspace: &WeakEntity<Workspace>,
|
||||
thread: Entity<AcpThread>,
|
||||
thread: AgentDiffThread,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let action_log = thread.read(cx).action_log().clone();
|
||||
let action_log = thread.action_log(cx);
|
||||
|
||||
let action_log_subscription = cx.observe_in(&action_log, window, {
|
||||
let workspace = workspace.clone();
|
||||
@@ -1215,12 +1264,14 @@ impl AgentDiff {
|
||||
}
|
||||
});
|
||||
|
||||
let thread_subscription = cx.subscribe_in(&thread, window, {
|
||||
let workspace = workspace.clone();
|
||||
move |this, thread, event, window, cx| {
|
||||
this.handle_acp_thread_event(&workspace, thread, event, window, cx)
|
||||
}
|
||||
});
|
||||
let thread_subscription = match &thread {
|
||||
AgentDiffThread::AcpThread(thread) => cx.subscribe_in(thread, window, {
|
||||
let workspace = workspace.clone();
|
||||
move |this, thread, event, window, cx| {
|
||||
this.handle_acp_thread_event(&workspace, thread, event, window, cx)
|
||||
}
|
||||
}),
|
||||
};
|
||||
|
||||
if let Some(workspace_thread) = self.workspace_threads.get_mut(workspace) {
|
||||
// replace thread and action log subscription, but keep editors
|
||||
@@ -1297,7 +1348,7 @@ impl AgentDiff {
|
||||
|
||||
fn register_review_action<T: Action>(
|
||||
workspace: &mut Workspace,
|
||||
review: impl Fn(&Entity<Editor>, &Entity<AcpThread>, &mut Window, &mut App) -> PostReviewState
|
||||
review: impl Fn(&Entity<Editor>, &AgentDiffThread, &mut Window, &mut App) -> PostReviewState
|
||||
+ 'static,
|
||||
this: &Entity<AgentDiff>,
|
||||
) {
|
||||
@@ -1457,7 +1508,7 @@ impl AgentDiff {
|
||||
return;
|
||||
};
|
||||
|
||||
let action_log = thread.read(cx).action_log();
|
||||
let action_log = thread.action_log(cx);
|
||||
let changed_buffers = action_log.read(cx).changed_buffers(cx);
|
||||
|
||||
let mut unaffected = self.reviewing_editors.clone();
|
||||
@@ -1576,7 +1627,7 @@ impl AgentDiff {
|
||||
|
||||
fn keep_all(
|
||||
editor: &Entity<Editor>,
|
||||
thread: &Entity<AcpThread>,
|
||||
thread: &AgentDiffThread,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> PostReviewState {
|
||||
@@ -1596,7 +1647,7 @@ impl AgentDiff {
|
||||
|
||||
fn reject_all(
|
||||
editor: &Entity<Editor>,
|
||||
thread: &Entity<AcpThread>,
|
||||
thread: &AgentDiffThread,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> PostReviewState {
|
||||
@@ -1616,7 +1667,7 @@ impl AgentDiff {
|
||||
|
||||
fn keep(
|
||||
editor: &Entity<Editor>,
|
||||
thread: &Entity<AcpThread>,
|
||||
thread: &AgentDiffThread,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> PostReviewState {
|
||||
@@ -1629,7 +1680,7 @@ impl AgentDiff {
|
||||
|
||||
fn reject(
|
||||
editor: &Entity<Editor>,
|
||||
thread: &Entity<AcpThread>,
|
||||
thread: &AgentDiffThread,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> PostReviewState {
|
||||
@@ -1652,7 +1703,7 @@ impl AgentDiff {
|
||||
fn review_in_active_editor(
|
||||
&mut self,
|
||||
workspace: &mut Workspace,
|
||||
review: impl Fn(&Entity<Editor>, &Entity<AcpThread>, &mut Window, &mut App) -> PostReviewState,
|
||||
review: impl Fn(&Entity<Editor>, &AgentDiffThread, &mut Window, &mut App) -> PostReviewState,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Task<Result<()>>> {
|
||||
@@ -1674,7 +1725,7 @@ impl AgentDiff {
|
||||
if let PostReviewState::AllReviewed = review(&editor, &thread, window, cx)
|
||||
&& let Some(curr_buffer) = editor.read(cx).buffer().read(cx).as_singleton()
|
||||
{
|
||||
let changed_buffers = thread.read(cx).action_log().read(cx).changed_buffers(cx);
|
||||
let changed_buffers = thread.action_log(cx).read(cx).changed_buffers(cx);
|
||||
|
||||
let mut keys = changed_buffers.keys().cycle();
|
||||
keys.find(|k| *k == &curr_buffer);
|
||||
@@ -1717,11 +1768,12 @@ mod tests {
|
||||
use super::*;
|
||||
use crate::Keep;
|
||||
use acp_thread::AgentConnection as _;
|
||||
use agent_settings::AgentSettings;
|
||||
use editor::EditorSettings;
|
||||
use gpui::{TestAppContext, UpdateGlobal, VisualTestContext};
|
||||
use project::{FakeFs, Project};
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use settings::{Settings, SettingsStore};
|
||||
use std::{path::Path, rc::Rc};
|
||||
use util::path;
|
||||
|
||||
@@ -1730,8 +1782,13 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
AgentSettings::register(cx);
|
||||
prompt_store::init(cx);
|
||||
workspace::init_settings(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
EditorSettings::register(cx);
|
||||
language_model::init_settings(cx);
|
||||
});
|
||||
|
||||
@@ -1758,7 +1815,8 @@ mod tests {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let action_log = cx.read(|cx| thread.read(cx).action_log().clone());
|
||||
let thread = AgentDiffThread::AcpThread(thread);
|
||||
let action_log = cx.read(|cx| thread.action_log(cx));
|
||||
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
@@ -1884,8 +1942,13 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
AgentSettings::register(cx);
|
||||
prompt_store::init(cx);
|
||||
workspace::init_settings(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
EditorSettings::register(cx);
|
||||
language_model::init_settings(cx);
|
||||
workspace::register_project_item::<Editor>(cx);
|
||||
});
|
||||
@@ -1941,6 +2004,7 @@ mod tests {
|
||||
let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
|
||||
|
||||
// Set the active thread
|
||||
let thread = AgentDiffThread::AcpThread(thread);
|
||||
cx.update(|window, cx| {
|
||||
AgentDiff::set_active_thread(&workspace.downgrade(), thread.clone(), window, cx)
|
||||
});
|
||||
|
||||
@@ -16,7 +16,7 @@ use serde::{Deserialize, Serialize};
|
||||
use settings::{
|
||||
DefaultAgentView as DefaultView, LanguageModelProviderSetting, LanguageModelSelection,
|
||||
};
|
||||
|
||||
use zed_actions::OpenBrowser;
|
||||
use zed_actions::agent::{OpenClaudeCodeOnboardingModal, ReauthenticateAgent};
|
||||
|
||||
use crate::ui::{AcpOnboardingModal, ClaudeCodeOnboardingModal};
|
||||
@@ -2131,20 +2131,12 @@ impl AgentPanel {
|
||||
menu
|
||||
})
|
||||
.separator()
|
||||
.item(
|
||||
ContextMenuEntry::new("Add More Agents")
|
||||
.icon(IconName::Plus)
|
||||
.icon_color(Color::Muted)
|
||||
.handler({
|
||||
move |window, cx| {
|
||||
window.dispatch_action(Box::new(zed_actions::Extensions {
|
||||
category_filter: Some(
|
||||
zed_actions::ExtensionCategoryFilter::AgentServers,
|
||||
),
|
||||
id: None,
|
||||
}), cx)
|
||||
}
|
||||
}),
|
||||
.link(
|
||||
"Add Other Agents",
|
||||
OpenBrowser {
|
||||
url: zed_urls::external_agents_docs(cx),
|
||||
}
|
||||
.boxed_clone(),
|
||||
)
|
||||
}))
|
||||
}
|
||||
|
||||
@@ -12,6 +12,7 @@ mod context_strip;
|
||||
mod inline_assistant;
|
||||
mod inline_prompt_editor;
|
||||
mod language_model_selector;
|
||||
mod message_editor;
|
||||
mod profile_selector;
|
||||
mod slash_command;
|
||||
mod slash_command_picker;
|
||||
@@ -247,6 +248,8 @@ pub fn init(
|
||||
is_eval: bool,
|
||||
cx: &mut App,
|
||||
) {
|
||||
AgentSettings::register(cx);
|
||||
|
||||
assistant_text_thread::init(client.clone(), cx);
|
||||
rules_library::init(cx);
|
||||
if !is_eval {
|
||||
|
||||
@@ -1082,7 +1082,10 @@ mod tests {
|
||||
};
|
||||
use gpui::TestAppContext;
|
||||
use indoc::indoc;
|
||||
use language::{Buffer, Language, LanguageConfig, LanguageMatcher, Point, tree_sitter_rust};
|
||||
use language::{
|
||||
Buffer, Language, LanguageConfig, LanguageMatcher, Point, language_settings,
|
||||
tree_sitter_rust,
|
||||
};
|
||||
use language_model::{LanguageModelRegistry, TokenUsage};
|
||||
use rand::prelude::*;
|
||||
use settings::SettingsStore;
|
||||
@@ -1462,6 +1465,8 @@ mod tests {
|
||||
fn init_test(cx: &mut TestAppContext) {
|
||||
cx.update(LanguageModelRegistry::test);
|
||||
cx.set_global(cx.update(SettingsStore::test));
|
||||
cx.update(Project::init_settings);
|
||||
cx.update(language_settings::init);
|
||||
}
|
||||
|
||||
fn simulate_response_stream(
|
||||
|
||||
@@ -1075,6 +1075,8 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@ use super::{
|
||||
ContextPickerAction, ContextPickerEntry, ContextPickerMode, MentionLink, RecentEntry,
|
||||
available_context_picker_entries, recent_context_picker_entries_with_store, selection_ranges,
|
||||
};
|
||||
use crate::inline_prompt_editor::ContextCreasesAddon;
|
||||
use crate::message_editor::ContextCreasesAddon;
|
||||
|
||||
pub(crate) enum Match {
|
||||
File(FileMatch),
|
||||
@@ -1182,8 +1182,10 @@ mod tests {
|
||||
let app_state = cx.update(AppState::test);
|
||||
|
||||
cx.update(|cx| {
|
||||
language::init(cx);
|
||||
editor::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
|
||||
app_state
|
||||
@@ -1484,8 +1486,10 @@ mod tests {
|
||||
let app_state = cx.update(AppState::test);
|
||||
|
||||
cx.update(|cx| {
|
||||
language::init(cx);
|
||||
editor::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
|
||||
app_state
|
||||
@@ -1682,6 +1686,11 @@ mod tests {
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
client::init_settings(cx);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
workspace::init_settings(cx);
|
||||
editor::init_settings(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use crate::context_store::ContextStore;
|
||||
use agent::HistoryStore;
|
||||
use collections::{HashMap, VecDeque};
|
||||
use collections::VecDeque;
|
||||
use editor::actions::Paste;
|
||||
use editor::display_map::{CreaseId, EditorMargins};
|
||||
use editor::{Addon, AnchorRangeExt as _};
|
||||
use editor::display_map::EditorMargins;
|
||||
use editor::{
|
||||
ContextMenuOptions, Editor, EditorElement, EditorEvent, EditorMode, EditorStyle, MultiBuffer,
|
||||
actions::{MoveDown, MoveUp},
|
||||
@@ -17,7 +17,6 @@ use parking_lot::Mutex;
|
||||
use prompt_store::PromptStore;
|
||||
use settings::Settings;
|
||||
use std::cmp;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use theme::ThemeSettings;
|
||||
@@ -28,15 +27,12 @@ use zed_actions::agent::ToggleModelSelector;
|
||||
|
||||
use crate::agent_model_selector::AgentModelSelector;
|
||||
use crate::buffer_codegen::BufferCodegen;
|
||||
use crate::context::{AgentContextHandle, AgentContextKey};
|
||||
use crate::context_picker::{ContextPicker, ContextPickerCompletionProvider, crease_for_mention};
|
||||
use crate::context_store::{ContextStore, ContextStoreEvent};
|
||||
use crate::context_picker::{ContextPicker, ContextPickerCompletionProvider};
|
||||
use crate::context_strip::{ContextStrip, ContextStripEvent, SuggestContextKind};
|
||||
use crate::message_editor::{ContextCreasesAddon, extract_message_creases, insert_message_creases};
|
||||
use crate::terminal_codegen::TerminalCodegen;
|
||||
use crate::{
|
||||
CycleNextInlineAssist, CyclePreviousInlineAssist, ModelUsageContext, RemoveAllContext,
|
||||
ToggleContextPicker,
|
||||
};
|
||||
use crate::{CycleNextInlineAssist, CyclePreviousInlineAssist, ModelUsageContext};
|
||||
use crate::{RemoveAllContext, ToggleContextPicker};
|
||||
|
||||
pub struct PromptEditor<T> {
|
||||
pub editor: Entity<Editor>,
|
||||
@@ -1161,156 +1157,3 @@ impl GenerationMode {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Stored information that can be used to resurrect a context crease when creating an editor for a past message.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct MessageCrease {
|
||||
pub range: Range<usize>,
|
||||
pub icon_path: SharedString,
|
||||
pub label: SharedString,
|
||||
/// None for a deserialized message, Some otherwise.
|
||||
pub context: Option<AgentContextHandle>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ContextCreasesAddon {
|
||||
creases: HashMap<AgentContextKey, Vec<(CreaseId, SharedString)>>,
|
||||
_subscription: Option<Subscription>,
|
||||
}
|
||||
|
||||
impl Addon for ContextCreasesAddon {
|
||||
fn to_any(&self) -> &dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
|
||||
fn to_any_mut(&mut self) -> Option<&mut dyn std::any::Any> {
|
||||
Some(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl ContextCreasesAddon {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
creases: HashMap::default(),
|
||||
_subscription: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_creases(
|
||||
&mut self,
|
||||
context_store: &Entity<ContextStore>,
|
||||
key: AgentContextKey,
|
||||
creases: impl IntoIterator<Item = (CreaseId, SharedString)>,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
self.creases.entry(key).or_default().extend(creases);
|
||||
self._subscription = Some(
|
||||
cx.subscribe(context_store, |editor, _, event, cx| match event {
|
||||
ContextStoreEvent::ContextRemoved(key) => {
|
||||
let Some(this) = editor.addon_mut::<Self>() else {
|
||||
return;
|
||||
};
|
||||
let (crease_ids, replacement_texts): (Vec<_>, Vec<_>) = this
|
||||
.creases
|
||||
.remove(key)
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.unzip();
|
||||
let ranges = editor
|
||||
.remove_creases(crease_ids, cx)
|
||||
.into_iter()
|
||||
.map(|(_, range)| range)
|
||||
.collect::<Vec<_>>();
|
||||
editor.unfold_ranges(&ranges, false, false, cx);
|
||||
editor.edit(ranges.into_iter().zip(replacement_texts), cx);
|
||||
cx.notify();
|
||||
}
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn into_inner(self) -> HashMap<AgentContextKey, Vec<(CreaseId, SharedString)>> {
|
||||
self.creases
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_message_creases(
|
||||
editor: &mut Editor,
|
||||
cx: &mut Context<'_, Editor>,
|
||||
) -> Vec<MessageCrease> {
|
||||
let buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
let mut contexts_by_crease_id = editor
|
||||
.addon_mut::<ContextCreasesAddon>()
|
||||
.map(std::mem::take)
|
||||
.unwrap_or_default()
|
||||
.into_inner()
|
||||
.into_iter()
|
||||
.flat_map(|(key, creases)| {
|
||||
let context = key.0;
|
||||
creases
|
||||
.into_iter()
|
||||
.map(move |(id, _)| (id, context.clone()))
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
// Filter the addon's list of creases based on what the editor reports,
|
||||
// since the addon might have removed creases in it.
|
||||
|
||||
editor.display_map.update(cx, |display_map, cx| {
|
||||
display_map
|
||||
.snapshot(cx)
|
||||
.crease_snapshot
|
||||
.creases()
|
||||
.filter_map(|(id, crease)| {
|
||||
Some((
|
||||
id,
|
||||
(
|
||||
crease.range().to_offset(&buffer_snapshot),
|
||||
crease.metadata()?.clone(),
|
||||
),
|
||||
))
|
||||
})
|
||||
.map(|(id, (range, metadata))| {
|
||||
let context = contexts_by_crease_id.remove(&id);
|
||||
MessageCrease {
|
||||
range,
|
||||
context,
|
||||
label: metadata.label,
|
||||
icon_path: metadata.icon_path,
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
|
||||
pub fn insert_message_creases(
|
||||
editor: &mut Editor,
|
||||
message_creases: &[MessageCrease],
|
||||
context_store: &Entity<ContextStore>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<'_, Editor>,
|
||||
) {
|
||||
let buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
let creases = message_creases
|
||||
.iter()
|
||||
.map(|crease| {
|
||||
let start = buffer_snapshot.anchor_after(crease.range.start);
|
||||
let end = buffer_snapshot.anchor_before(crease.range.end);
|
||||
crease_for_mention(
|
||||
crease.label.clone(),
|
||||
crease.icon_path.clone(),
|
||||
start..end,
|
||||
cx.weak_entity(),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let ids = editor.insert_creases(creases.clone(), cx);
|
||||
editor.fold_creases(creases, false, window, cx);
|
||||
if let Some(addon) = editor.addon_mut::<ContextCreasesAddon>() {
|
||||
for (crease, id) in message_creases.iter().zip(ids) {
|
||||
if let Some(context) = crease.context.as_ref() {
|
||||
let key = AgentContextKey(context.clone());
|
||||
addon.add_creases(context_store, key, vec![(id, crease.label.clone())], cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -177,7 +177,7 @@ impl LanguageModelPickerDelegate {
|
||||
}
|
||||
_ => {
|
||||
log::error!(
|
||||
"Failed to authenticate provider: {}: {err:#}",
|
||||
"Failed to authenticate provider: {}: {err}",
|
||||
provider_name.0
|
||||
);
|
||||
}
|
||||
|
||||
166
crates/agent_ui/src/message_editor.rs
Normal file
166
crates/agent_ui/src/message_editor.rs
Normal file
@@ -0,0 +1,166 @@
|
||||
use std::ops::Range;
|
||||
|
||||
use collections::HashMap;
|
||||
use editor::display_map::CreaseId;
|
||||
use editor::{Addon, AnchorRangeExt, Editor};
|
||||
use gpui::{Entity, Subscription};
|
||||
use ui::prelude::*;
|
||||
|
||||
use crate::{
|
||||
context::{AgentContextHandle, AgentContextKey},
|
||||
context_picker::crease_for_mention,
|
||||
context_store::{ContextStore, ContextStoreEvent},
|
||||
};
|
||||
|
||||
/// Stored information that can be used to resurrect a context crease when creating an editor for a past message.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct MessageCrease {
|
||||
pub range: Range<usize>,
|
||||
pub icon_path: SharedString,
|
||||
pub label: SharedString,
|
||||
/// None for a deserialized message, Some otherwise.
|
||||
pub context: Option<AgentContextHandle>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ContextCreasesAddon {
|
||||
creases: HashMap<AgentContextKey, Vec<(CreaseId, SharedString)>>,
|
||||
_subscription: Option<Subscription>,
|
||||
}
|
||||
|
||||
impl Addon for ContextCreasesAddon {
|
||||
fn to_any(&self) -> &dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
|
||||
fn to_any_mut(&mut self) -> Option<&mut dyn std::any::Any> {
|
||||
Some(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl ContextCreasesAddon {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
creases: HashMap::default(),
|
||||
_subscription: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_creases(
|
||||
&mut self,
|
||||
context_store: &Entity<ContextStore>,
|
||||
key: AgentContextKey,
|
||||
creases: impl IntoIterator<Item = (CreaseId, SharedString)>,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
self.creases.entry(key).or_default().extend(creases);
|
||||
self._subscription = Some(
|
||||
cx.subscribe(context_store, |editor, _, event, cx| match event {
|
||||
ContextStoreEvent::ContextRemoved(key) => {
|
||||
let Some(this) = editor.addon_mut::<Self>() else {
|
||||
return;
|
||||
};
|
||||
let (crease_ids, replacement_texts): (Vec<_>, Vec<_>) = this
|
||||
.creases
|
||||
.remove(key)
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.unzip();
|
||||
let ranges = editor
|
||||
.remove_creases(crease_ids, cx)
|
||||
.into_iter()
|
||||
.map(|(_, range)| range)
|
||||
.collect::<Vec<_>>();
|
||||
editor.unfold_ranges(&ranges, false, false, cx);
|
||||
editor.edit(ranges.into_iter().zip(replacement_texts), cx);
|
||||
cx.notify();
|
||||
}
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn into_inner(self) -> HashMap<AgentContextKey, Vec<(CreaseId, SharedString)>> {
|
||||
self.creases
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_message_creases(
|
||||
editor: &mut Editor,
|
||||
cx: &mut Context<'_, Editor>,
|
||||
) -> Vec<MessageCrease> {
|
||||
let buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
let mut contexts_by_crease_id = editor
|
||||
.addon_mut::<ContextCreasesAddon>()
|
||||
.map(std::mem::take)
|
||||
.unwrap_or_default()
|
||||
.into_inner()
|
||||
.into_iter()
|
||||
.flat_map(|(key, creases)| {
|
||||
let context = key.0;
|
||||
creases
|
||||
.into_iter()
|
||||
.map(move |(id, _)| (id, context.clone()))
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
// Filter the addon's list of creases based on what the editor reports,
|
||||
// since the addon might have removed creases in it.
|
||||
|
||||
editor.display_map.update(cx, |display_map, cx| {
|
||||
display_map
|
||||
.snapshot(cx)
|
||||
.crease_snapshot
|
||||
.creases()
|
||||
.filter_map(|(id, crease)| {
|
||||
Some((
|
||||
id,
|
||||
(
|
||||
crease.range().to_offset(&buffer_snapshot),
|
||||
crease.metadata()?.clone(),
|
||||
),
|
||||
))
|
||||
})
|
||||
.map(|(id, (range, metadata))| {
|
||||
let context = contexts_by_crease_id.remove(&id);
|
||||
MessageCrease {
|
||||
range,
|
||||
context,
|
||||
label: metadata.label,
|
||||
icon_path: metadata.icon_path,
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
|
||||
pub fn insert_message_creases(
|
||||
editor: &mut Editor,
|
||||
message_creases: &[MessageCrease],
|
||||
context_store: &Entity<ContextStore>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<'_, Editor>,
|
||||
) {
|
||||
let buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
let creases = message_creases
|
||||
.iter()
|
||||
.map(|crease| {
|
||||
let start = buffer_snapshot.anchor_after(crease.range.start);
|
||||
let end = buffer_snapshot.anchor_before(crease.range.end);
|
||||
crease_for_mention(
|
||||
crease.label.clone(),
|
||||
crease.icon_path.clone(),
|
||||
start..end,
|
||||
cx.weak_entity(),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let ids = editor.insert_creases(creases.clone(), cx);
|
||||
editor.fold_creases(creases, false, window, cx);
|
||||
if let Some(addon) = editor.addon_mut::<ContextCreasesAddon>() {
|
||||
for (crease, id) in message_creases.iter().zip(ids) {
|
||||
if let Some(context) = crease.context.as_ref() {
|
||||
let key = AgentContextKey(context.clone());
|
||||
addon.add_creases(context_store, key, vec![(id, crease.label.clone())], cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3223,7 +3223,11 @@ mod tests {
|
||||
prompt_store::init(cx);
|
||||
LanguageModelRegistry::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
|
||||
language::init(cx);
|
||||
agent_settings::init(cx);
|
||||
Project::init_settings(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
workspace::init_settings(cx);
|
||||
editor::init_settings(cx);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -577,6 +577,8 @@ mod test {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
// release_channel::init(SemanticVersion::default(), cx);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -22,6 +22,7 @@ use language_model::{
|
||||
};
|
||||
use parking_lot::Mutex;
|
||||
use pretty_assertions::assert_eq;
|
||||
use project::Project;
|
||||
use prompt_store::PromptBuilder;
|
||||
use rand::prelude::*;
|
||||
use serde_json::json;
|
||||
@@ -1410,6 +1411,9 @@ fn init_test(cx: &mut App) {
|
||||
prompt_store::init(cx);
|
||||
LanguageModelRegistry::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
agent_settings::init(cx);
|
||||
Project::init_settings(cx);
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
|
||||
@@ -48,6 +48,7 @@ pub const LEGACY_CHANNEL_COUNT: NonZero<u16> = nz!(2);
|
||||
pub const REPLAY_DURATION: Duration = Duration::from_secs(30);
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
AudioSettings::register(cx);
|
||||
LIVE_SETTINGS.initialize(cx);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
|
||||
use gpui::App;
|
||||
use settings::{RegisterSetting, Settings, SettingsStore};
|
||||
use settings::{Settings, SettingsStore};
|
||||
|
||||
#[derive(Clone, Debug, RegisterSetting)]
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AudioSettings {
|
||||
/// Opt into the new audio system.
|
||||
///
|
||||
|
||||
@@ -10,7 +10,7 @@ use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
|
||||
use paths::remote_servers_dir;
|
||||
use release_channel::{AppCommitSha, ReleaseChannel};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{RegisterSetting, Settings, SettingsStore};
|
||||
use settings::{Settings, SettingsStore};
|
||||
use smol::{fs, io::AsyncReadExt};
|
||||
use smol::{fs::File, process::Command};
|
||||
use std::mem;
|
||||
@@ -120,7 +120,7 @@ impl Drop for MacOsUnmounter<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, RegisterSetting)]
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
struct AutoUpdateSetting(bool);
|
||||
|
||||
/// Whether or not to automatically check for updates.
|
||||
@@ -138,6 +138,8 @@ struct GlobalAutoUpdate(Option<Entity<AutoUpdater>>);
|
||||
impl Global for GlobalAutoUpdate {}
|
||||
|
||||
pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {
|
||||
AutoUpdateSetting::register(cx);
|
||||
|
||||
cx.observe_new(|workspace: &mut Workspace, _window, _cx| {
|
||||
workspace.register_action(|_, action, window, cx| check(action, window, cx));
|
||||
|
||||
@@ -404,7 +406,6 @@ impl AutoUpdater {
|
||||
arch: &str,
|
||||
release_channel: ReleaseChannel,
|
||||
version: Option<SemanticVersion>,
|
||||
set_status: impl Fn(&str, &mut AsyncApp) + Send + 'static,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<PathBuf> {
|
||||
let this = cx.update(|cx| {
|
||||
@@ -414,7 +415,6 @@ impl AutoUpdater {
|
||||
.context("auto-update not initialized")
|
||||
})??;
|
||||
|
||||
set_status("Fetching remote server release", cx);
|
||||
let release = Self::get_release(
|
||||
&this,
|
||||
"zed-remote-server",
|
||||
@@ -439,7 +439,6 @@ impl AutoUpdater {
|
||||
"downloading zed-remote-server {os} {arch} version {}",
|
||||
release.version
|
||||
);
|
||||
set_status("Downloading remote server", cx);
|
||||
download_remote_server_binary(&version_path, release, client, cx).await?;
|
||||
}
|
||||
|
||||
@@ -1026,6 +1025,7 @@ mod tests {
|
||||
.set_user_settings("{}", cx)
|
||||
.expect("Unable to set user settings");
|
||||
cx.set_global(store);
|
||||
AutoUpdateSetting::register(cx);
|
||||
assert!(AutoUpdateSetting::get_global(cx).0);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
pub mod participant;
|
||||
pub mod room;
|
||||
|
||||
use crate::call_settings::CallSettings;
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use audio::Audio;
|
||||
use client::{ChannelId, Client, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIVE, proto};
|
||||
@@ -13,6 +14,7 @@ use gpui::{
|
||||
use postage::watch;
|
||||
use project::Project;
|
||||
use room::Event;
|
||||
use settings::Settings;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use livekit_client::{RemoteVideoTrack, RemoteVideoTrackView, RemoteVideoTrackViewEvent};
|
||||
@@ -24,6 +26,8 @@ struct GlobalActiveCall(Entity<ActiveCall>);
|
||||
impl Global for GlobalActiveCall {}
|
||||
|
||||
pub fn init(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) {
|
||||
CallSettings::register(cx);
|
||||
|
||||
let active_call = cx.new(|cx| ActiveCall::new(client, user_store, cx));
|
||||
cx.set_global(GlobalActiveCall(active_call));
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use settings::{RegisterSetting, Settings};
|
||||
use settings::Settings;
|
||||
|
||||
#[derive(Debug, RegisterSetting)]
|
||||
#[derive(Debug)]
|
||||
pub struct CallSettings {
|
||||
pub mute_on_join: bool,
|
||||
pub share_on_join: bool,
|
||||
|
||||
@@ -237,6 +237,7 @@ fn init_test(cx: &mut App) -> Entity<ChannelStore> {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
client::init_settings(cx);
|
||||
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let http = FakeHttpClient::with_404_response();
|
||||
|
||||
@@ -30,7 +30,7 @@ use rand::prelude::*;
|
||||
use release_channel::{AppVersion, ReleaseChannel};
|
||||
use rpc::proto::{AnyTypedEnvelope, EnvelopedMessage, PeerId, RequestMessage};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{RegisterSetting, Settings, SettingsContent};
|
||||
use settings::{Settings, SettingsContent};
|
||||
use std::{
|
||||
any::TypeId,
|
||||
convert::TryFrom,
|
||||
@@ -95,7 +95,7 @@ actions!(
|
||||
]
|
||||
);
|
||||
|
||||
#[derive(Deserialize, RegisterSetting)]
|
||||
#[derive(Deserialize)]
|
||||
pub struct ClientSettings {
|
||||
pub server_url: String,
|
||||
}
|
||||
@@ -113,7 +113,7 @@ impl Settings for ClientSettings {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Default, RegisterSetting)]
|
||||
#[derive(Deserialize, Default)]
|
||||
pub struct ProxySettings {
|
||||
pub proxy: Option<String>,
|
||||
}
|
||||
@@ -140,6 +140,12 @@ impl Settings for ProxySettings {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init_settings(cx: &mut App) {
|
||||
TelemetrySettings::register(cx);
|
||||
ClientSettings::register(cx);
|
||||
ProxySettings::register(cx);
|
||||
}
|
||||
|
||||
pub fn init(client: &Arc<Client>, cx: &mut App) {
|
||||
let client = Arc::downgrade(client);
|
||||
cx.on_action({
|
||||
@@ -502,7 +508,7 @@ impl<T: 'static> Drop for PendingEntitySubscription<T> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Deserialize, Debug, RegisterSetting)]
|
||||
#[derive(Copy, Clone, Deserialize, Debug)]
|
||||
pub struct TelemetrySettings {
|
||||
pub diagnostics: bool,
|
||||
pub metrics: bool,
|
||||
@@ -2171,6 +2177,7 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
init_settings(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -179,6 +179,8 @@ impl Telemetry {
|
||||
let release_channel =
|
||||
ReleaseChannel::try_global(cx).map(|release_channel| release_channel.display_name());
|
||||
|
||||
TelemetrySettings::register(cx);
|
||||
|
||||
let state = Arc::new(Mutex::new(TelemetryState {
|
||||
settings: *TelemetrySettings::get_global(cx),
|
||||
architecture: env::consts::ARCH,
|
||||
|
||||
@@ -260,7 +260,7 @@ impl fmt::Debug for Lamport {
|
||||
impl fmt::Debug for Global {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "Global {{")?;
|
||||
for timestamp in self.iter().filter(|t| t.value > 0) {
|
||||
for timestamp in self.iter() {
|
||||
if timestamp.replica_id.0 > 0 {
|
||||
write!(f, ", ")?;
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
pub mod predict_edits_v3;
|
||||
pub mod udiff;
|
||||
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
@@ -183,13 +184,13 @@ pub struct PredictEditsGitInfo {
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PredictEditsResponse {
|
||||
pub request_id: String,
|
||||
pub request_id: Uuid,
|
||||
pub output_excerpt: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AcceptEditPredictionBody {
|
||||
pub request_id: String,
|
||||
pub request_id: Uuid,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use chrono::Duration;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
fmt::{Display, Write as _},
|
||||
fmt::Display,
|
||||
ops::{Add, Range, Sub},
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
@@ -11,14 +11,7 @@ use uuid::Uuid;
|
||||
|
||||
use crate::PredictEditsGitInfo;
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PlanContextRetrievalRequest {
|
||||
pub excerpt: String,
|
||||
pub excerpt_path: Arc<Path>,
|
||||
pub excerpt_line_range: Range<Line>,
|
||||
pub cursor_file_max_row: Line,
|
||||
pub events: Vec<Event>,
|
||||
}
|
||||
// TODO: snippet ordering within file / relative to excerpt
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PredictEditsRequest {
|
||||
@@ -132,15 +125,15 @@ impl Display for Event {
|
||||
write!(
|
||||
f,
|
||||
"// User accepted prediction:\n--- a/{}\n+++ b/{}\n{diff}",
|
||||
DiffPathFmt(old_path),
|
||||
DiffPathFmt(new_path)
|
||||
old_path.display(),
|
||||
new_path.display()
|
||||
)
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"--- a/{}\n+++ b/{}\n{diff}",
|
||||
DiffPathFmt(old_path),
|
||||
DiffPathFmt(new_path)
|
||||
old_path.display(),
|
||||
new_path.display()
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -148,24 +141,6 @@ impl Display for Event {
|
||||
}
|
||||
}
|
||||
|
||||
/// always format the Path as a unix path with `/` as the path sep in Diffs
|
||||
pub struct DiffPathFmt<'a>(pub &'a Path);
|
||||
|
||||
impl<'a> std::fmt::Display for DiffPathFmt<'a> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let mut is_first = true;
|
||||
for component in self.0.components() {
|
||||
if !is_first {
|
||||
f.write_char('/')?;
|
||||
} else {
|
||||
is_first = false;
|
||||
}
|
||||
write!(f, "{}", component.as_os_str().display())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Signature {
|
||||
pub text: String,
|
||||
|
||||
294
crates/cloud_llm_client/src/udiff.rs
Normal file
294
crates/cloud_llm_client/src/udiff.rs
Normal file
@@ -0,0 +1,294 @@
|
||||
use std::{borrow::Cow, fmt::Display};
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum DiffLine<'a> {
|
||||
OldPath { path: Cow<'a, str> },
|
||||
NewPath { path: Cow<'a, str> },
|
||||
HunkHeader(Option<HunkLocation>),
|
||||
Context(&'a str),
|
||||
Deletion(&'a str),
|
||||
Addition(&'a str),
|
||||
Garbage(&'a str),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct HunkLocation {
|
||||
start_line_old: u32,
|
||||
count_old: u32,
|
||||
start_line_new: u32,
|
||||
count_new: u32,
|
||||
}
|
||||
|
||||
impl<'a> DiffLine<'a> {
|
||||
pub fn parse(line: &'a str) -> Self {
|
||||
Self::try_parse(line).unwrap_or(Self::Garbage(line))
|
||||
}
|
||||
|
||||
fn try_parse(line: &'a str) -> Option<Self> {
|
||||
if let Some(header) = line.strip_prefix("---").and_then(eat_required_whitespace) {
|
||||
let path = parse_header_path("a/", header);
|
||||
Some(Self::OldPath { path })
|
||||
} else if let Some(header) = line.strip_prefix("+++").and_then(eat_required_whitespace) {
|
||||
Some(Self::NewPath {
|
||||
path: parse_header_path("b/", header),
|
||||
})
|
||||
} else if let Some(header) = line.strip_prefix("@@").and_then(eat_required_whitespace) {
|
||||
if header.starts_with("...") {
|
||||
return Some(Self::HunkHeader(None));
|
||||
}
|
||||
|
||||
let (start_line_old, header) = header.strip_prefix('-')?.split_once(',')?;
|
||||
let mut parts = header.split_ascii_whitespace();
|
||||
let count_old = parts.next()?;
|
||||
let (start_line_new, count_new) = parts.next()?.strip_prefix('+')?.split_once(',')?;
|
||||
|
||||
Some(Self::HunkHeader(Some(HunkLocation {
|
||||
start_line_old: start_line_old.parse::<u32>().ok()?.saturating_sub(1),
|
||||
count_old: count_old.parse().ok()?,
|
||||
start_line_new: start_line_new.parse::<u32>().ok()?.saturating_sub(1),
|
||||
count_new: count_new.parse().ok()?,
|
||||
})))
|
||||
} else if let Some(deleted_header) = line.strip_prefix("-") {
|
||||
Some(Self::Deletion(deleted_header))
|
||||
} else if line.is_empty() {
|
||||
Some(Self::Context(""))
|
||||
} else if let Some(context) = line.strip_prefix(" ") {
|
||||
Some(Self::Context(context))
|
||||
} else {
|
||||
Some(Self::Addition(line.strip_prefix("+")?))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Display for DiffLine<'a> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
DiffLine::OldPath { path } => write!(f, "--- {path}"),
|
||||
DiffLine::NewPath { path } => write!(f, "+++ {path}"),
|
||||
DiffLine::HunkHeader(Some(hunk_location)) => {
|
||||
write!(
|
||||
f,
|
||||
"@@ -{},{} +{},{} @@",
|
||||
hunk_location.start_line_old + 1,
|
||||
hunk_location.count_old,
|
||||
hunk_location.start_line_new + 1,
|
||||
hunk_location.count_new
|
||||
)
|
||||
}
|
||||
DiffLine::HunkHeader(None) => write!(f, "@@ ... @@"),
|
||||
DiffLine::Context(content) => write!(f, " {content}"),
|
||||
DiffLine::Deletion(content) => write!(f, "-{content}"),
|
||||
DiffLine::Addition(content) => write!(f, "+{content}"),
|
||||
DiffLine::Garbage(line) => write!(f, "{line}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_header_path<'a>(strip_prefix: &'static str, header: &'a str) -> Cow<'a, str> {
|
||||
if !header.contains(['"', '\\']) {
|
||||
let path = header.split_ascii_whitespace().next().unwrap_or(header);
|
||||
return Cow::Borrowed(path.strip_prefix(strip_prefix).unwrap_or(path));
|
||||
}
|
||||
|
||||
let mut path = String::with_capacity(header.len());
|
||||
let mut in_quote = false;
|
||||
let mut chars = header.chars().peekable();
|
||||
let mut strip_prefix = Some(strip_prefix);
|
||||
|
||||
while let Some(char) = chars.next() {
|
||||
if char == '"' {
|
||||
in_quote = !in_quote;
|
||||
} else if char == '\\' {
|
||||
let Some(&next_char) = chars.peek() else {
|
||||
break;
|
||||
};
|
||||
chars.next();
|
||||
path.push(next_char);
|
||||
} else if char.is_ascii_whitespace() && !in_quote {
|
||||
break;
|
||||
} else {
|
||||
path.push(char);
|
||||
}
|
||||
|
||||
if let Some(prefix) = strip_prefix
|
||||
&& path == prefix
|
||||
{
|
||||
strip_prefix.take();
|
||||
path.clear();
|
||||
}
|
||||
}
|
||||
|
||||
Cow::Owned(path)
|
||||
}
|
||||
|
||||
fn eat_required_whitespace(header: &str) -> Option<&str> {
|
||||
let trimmed = header.trim_ascii_start();
|
||||
|
||||
if trimmed.len() == header.len() {
|
||||
None
|
||||
} else {
|
||||
Some(trimmed)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use indoc::indoc;
|
||||
|
||||
#[test]
|
||||
fn parse_lines_simple() {
|
||||
let input = indoc! {"
|
||||
diff --git a/text.txt b/text.txt
|
||||
index 86c770d..a1fd855 100644
|
||||
--- a/file.txt
|
||||
+++ b/file.txt
|
||||
@@ -1,2 +1,3 @@
|
||||
context
|
||||
-deleted
|
||||
+inserted
|
||||
garbage
|
||||
|
||||
--- b/file.txt
|
||||
+++ a/file.txt
|
||||
"};
|
||||
|
||||
let lines = input.lines().map(DiffLine::parse).collect::<Vec<_>>();
|
||||
|
||||
pretty_assertions::assert_eq!(
|
||||
lines,
|
||||
&[
|
||||
DiffLine::Garbage("diff --git a/text.txt b/text.txt"),
|
||||
DiffLine::Garbage("index 86c770d..a1fd855 100644"),
|
||||
DiffLine::OldPath {
|
||||
path: "file.txt".into()
|
||||
},
|
||||
DiffLine::NewPath {
|
||||
path: "file.txt".into()
|
||||
},
|
||||
DiffLine::HunkHeader(Some(HunkLocation {
|
||||
start_line_old: 0,
|
||||
count_old: 2,
|
||||
start_line_new: 0,
|
||||
count_new: 3
|
||||
})),
|
||||
DiffLine::Context("context"),
|
||||
DiffLine::Deletion("deleted"),
|
||||
DiffLine::Addition("inserted"),
|
||||
DiffLine::Garbage("garbage"),
|
||||
DiffLine::Context(""),
|
||||
DiffLine::OldPath {
|
||||
path: "b/file.txt".into()
|
||||
},
|
||||
DiffLine::NewPath {
|
||||
path: "a/file.txt".into()
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn file_header_extra_space() {
|
||||
let options = ["--- file", "--- file", "---\tfile"];
|
||||
|
||||
for option in options {
|
||||
pretty_assertions::assert_eq!(
|
||||
DiffLine::parse(option),
|
||||
DiffLine::OldPath {
|
||||
path: "file".into()
|
||||
},
|
||||
"{option}",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hunk_header_extra_space() {
|
||||
let options = [
|
||||
"@@ -1,2 +1,3 @@",
|
||||
"@@ -1,2 +1,3 @@",
|
||||
"@@\t-1,2\t+1,3\t@@",
|
||||
"@@ -1,2 +1,3 @@",
|
||||
"@@ -1,2 +1,3 @@",
|
||||
"@@ -1,2 +1,3 @@",
|
||||
"@@ -1,2 +1,3 @@ garbage",
|
||||
];
|
||||
|
||||
for option in options {
|
||||
pretty_assertions::assert_eq!(
|
||||
DiffLine::parse(option),
|
||||
DiffLine::HunkHeader(Some(HunkLocation {
|
||||
start_line_old: 0,
|
||||
count_old: 2,
|
||||
start_line_new: 0,
|
||||
count_new: 3
|
||||
})),
|
||||
"{option}",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hunk_header_without_location() {
|
||||
pretty_assertions::assert_eq!(DiffLine::parse("@@ ... @@"), DiffLine::HunkHeader(None));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_path() {
|
||||
assert_eq!(parse_header_path("a/", "foo.txt"), "foo.txt");
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "foo/bar/baz.txt"),
|
||||
"foo/bar/baz.txt"
|
||||
);
|
||||
assert_eq!(parse_header_path("a/", "a/foo.txt"), "foo.txt");
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "a/foo/bar/baz.txt"),
|
||||
"foo/bar/baz.txt"
|
||||
);
|
||||
|
||||
// Extra
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "a/foo/bar/baz.txt 2025"),
|
||||
"foo/bar/baz.txt"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "a/foo/bar/baz.txt\t2025"),
|
||||
"foo/bar/baz.txt"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "a/foo/bar/baz.txt \""),
|
||||
"foo/bar/baz.txt"
|
||||
);
|
||||
|
||||
// Quoted
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "a/foo/bar/\"baz quox.txt\""),
|
||||
"foo/bar/baz quox.txt"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "\"a/foo/bar/baz quox.txt\""),
|
||||
"foo/bar/baz quox.txt"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "\"foo/bar/baz quox.txt\""),
|
||||
"foo/bar/baz quox.txt"
|
||||
);
|
||||
assert_eq!(parse_header_path("a/", "\"whatever 🤷\""), "whatever 🤷");
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "\"foo/bar/baz quox.txt\" 2025"),
|
||||
"foo/bar/baz quox.txt"
|
||||
);
|
||||
// unescaped quotes are dropped
|
||||
assert_eq!(parse_header_path("a/", "foo/\"bar\""), "foo/bar");
|
||||
|
||||
// Escaped
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "\"foo/\\\"bar\\\"/baz.txt\""),
|
||||
"foo/\"bar\"/baz.txt"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "\"C:\\\\Projects\\\\My App\\\\old file.txt\""),
|
||||
"C:\\Projects\\My App\\old file.txt"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -17,6 +17,5 @@ cloud_llm_client.workspace = true
|
||||
indoc.workspace = true
|
||||
ordered-float.workspace = true
|
||||
rustc-hash.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
strum.workspace = true
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
//! Zeta2 prompt planning and generation code shared with cloud.
|
||||
pub mod retrieval_prompt;
|
||||
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use cloud_llm_client::predict_edits_v3::{
|
||||
self, DiffPathFmt, Excerpt, Line, Point, PromptFormat, ReferencedDeclaration,
|
||||
self, Excerpt, Line, Point, PromptFormat, ReferencedDeclaration,
|
||||
};
|
||||
use indoc::indoc;
|
||||
use ordered_float::OrderedFloat;
|
||||
@@ -213,7 +212,7 @@ pub fn write_codeblock<'a>(
|
||||
include_line_numbers: bool,
|
||||
output: &'a mut String,
|
||||
) {
|
||||
writeln!(output, "`````{}", DiffPathFmt(path)).unwrap();
|
||||
writeln!(output, "`````{}", path.display()).unwrap();
|
||||
write_excerpts(
|
||||
excerpts,
|
||||
sorted_insertions,
|
||||
@@ -276,7 +275,7 @@ pub fn write_excerpts<'a>(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn push_events(output: &mut String, events: &[predict_edits_v3::Event]) {
|
||||
fn push_events(output: &mut String, events: &[predict_edits_v3::Event]) {
|
||||
if events.is_empty() {
|
||||
return;
|
||||
};
|
||||
|
||||
@@ -1,94 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use cloud_llm_client::predict_edits_v3::{self, Excerpt};
|
||||
use indoc::indoc;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::Write;
|
||||
|
||||
use crate::{push_events, write_codeblock};
|
||||
|
||||
pub fn build_prompt(request: predict_edits_v3::PlanContextRetrievalRequest) -> Result<String> {
|
||||
let mut prompt = SEARCH_INSTRUCTIONS.to_string();
|
||||
|
||||
if !request.events.is_empty() {
|
||||
writeln!(&mut prompt, "## User Edits\n")?;
|
||||
push_events(&mut prompt, &request.events);
|
||||
}
|
||||
|
||||
writeln!(&mut prompt, "## Cursor context")?;
|
||||
write_codeblock(
|
||||
&request.excerpt_path,
|
||||
&[Excerpt {
|
||||
start_line: request.excerpt_line_range.start,
|
||||
text: request.excerpt.into(),
|
||||
}],
|
||||
&[],
|
||||
request.cursor_file_max_row,
|
||||
true,
|
||||
&mut prompt,
|
||||
);
|
||||
|
||||
writeln!(&mut prompt, "{TOOL_USE_REMINDER}")?;
|
||||
|
||||
Ok(prompt)
|
||||
}
|
||||
|
||||
/// Search for relevant code
|
||||
///
|
||||
/// For the best results, run multiple queries at once with a single invocation of this tool.
|
||||
#[derive(Clone, Deserialize, Serialize, JsonSchema)]
|
||||
pub struct SearchToolInput {
|
||||
/// An array of queries to run for gathering context relevant to the next prediction
|
||||
#[schemars(length(max = 3))]
|
||||
pub queries: Box<[SearchToolQuery]>,
|
||||
}
|
||||
|
||||
/// Search for relevant code by path, syntax hierarchy, and content.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct SearchToolQuery {
|
||||
/// 1. A glob pattern to match file paths in the codebase to search in.
|
||||
pub glob: String,
|
||||
/// 2. Regular expressions to match syntax nodes **by their first line** and hierarchy.
|
||||
///
|
||||
/// Subsequent regexes match nodes within the full content of the nodes matched by the previous regexes.
|
||||
///
|
||||
/// Example: Searching for a `User` class
|
||||
/// ["class\s+User"]
|
||||
///
|
||||
/// Example: Searching for a `get_full_name` method under a `User` class
|
||||
/// ["class\s+User", "def\sget_full_name"]
|
||||
///
|
||||
/// Skip this field to match on content alone.
|
||||
#[schemars(length(max = 3))]
|
||||
#[serde(default)]
|
||||
pub syntax_node: Vec<String>,
|
||||
/// 3. An optional regular expression to match the final content that should appear in the results.
|
||||
///
|
||||
/// - Content will be matched within all lines of the matched syntax nodes.
|
||||
/// - If syntax node regexes are provided, this field can be skipped to include as much of the node itself as possible.
|
||||
/// - If no syntax node regexes are provided, the content will be matched within the entire file.
|
||||
pub content: Option<String>,
|
||||
}
|
||||
|
||||
pub const TOOL_NAME: &str = "search";
|
||||
|
||||
const SEARCH_INSTRUCTIONS: &str = indoc! {r#"
|
||||
You are part of an edit prediction system in a code editor.
|
||||
Your role is to search for code that will serve as context for predicting the next edit.
|
||||
|
||||
- Analyze the user's recent edits and current cursor context
|
||||
- Use the `search` tool to find code that is relevant for predicting the next edit
|
||||
- Focus on finding:
|
||||
- Code patterns that might need similar changes based on the recent edits
|
||||
- Functions, variables, types, and constants referenced in the current cursor context
|
||||
- Related implementations, usages, or dependencies that may require consistent updates
|
||||
- How items defined in the cursor excerpt are used or altered
|
||||
- You will not be able to filter results or perform subsequent queries, so keep searches as targeted as possible
|
||||
- Use `syntax_node` parameter whenever you're looking for a particular type, class, or function
|
||||
- Avoid using wildcard globs if you already know the file path of the content you're looking for
|
||||
"#};
|
||||
|
||||
const TOOL_USE_REMINDER: &str = indoc! {"
|
||||
--
|
||||
Analyze the user's intent in one to two sentences, then call the `search` tool.
|
||||
"};
|
||||
@@ -34,7 +34,7 @@ struct CurrentCompletion {
|
||||
snapshot: BufferSnapshot,
|
||||
/// The edits that should be applied to transform the original text into the predicted text.
|
||||
/// Each edit is a range in the buffer and the text to replace it with.
|
||||
edits: Arc<[(Range<Anchor>, Arc<str>)]>,
|
||||
edits: Arc<[(Range<Anchor>, String)]>,
|
||||
/// Preview of how the buffer will look after applying the edits.
|
||||
edit_preview: EditPreview,
|
||||
}
|
||||
@@ -42,7 +42,7 @@ struct CurrentCompletion {
|
||||
impl CurrentCompletion {
|
||||
/// Attempts to adjust the edits based on changes made to the buffer since the completion was generated.
|
||||
/// Returns None if the user's edits conflict with the predicted edits.
|
||||
fn interpolate(&self, new_snapshot: &BufferSnapshot) -> Option<Vec<(Range<Anchor>, Arc<str>)>> {
|
||||
fn interpolate(&self, new_snapshot: &BufferSnapshot) -> Option<Vec<(Range<Anchor>, String)>> {
|
||||
edit_prediction::interpolate_edits(&self.snapshot, new_snapshot, &self.edits)
|
||||
}
|
||||
}
|
||||
@@ -281,8 +281,8 @@ impl EditPredictionProvider for CodestralCompletionProvider {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let edits: Arc<[(Range<Anchor>, Arc<str>)]> =
|
||||
vec![(cursor_position..cursor_position, completion_text.into())].into();
|
||||
let edits: Arc<[(Range<Anchor>, String)]> =
|
||||
vec![(cursor_position..cursor_position, completion_text)].into();
|
||||
let edit_preview = buffer
|
||||
.read_with(cx, |buffer, cx| buffer.preview_edits(edits.clone(), cx))?
|
||||
.await;
|
||||
|
||||
@@ -346,7 +346,6 @@ impl Server {
|
||||
.add_request_handler(forward_read_only_project_request::<proto::ResolveInlayHint>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::GetColorPresentation>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::OpenBufferByPath>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::OpenImageByPath>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::GitGetBranches>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::GetDefaultBranch>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::OpenUnstagedDiff>)
|
||||
@@ -396,7 +395,6 @@ impl Server {
|
||||
.add_request_handler(forward_mutating_project_request::<proto::StopLanguageServers>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::LinkedEditingRange>)
|
||||
.add_message_handler(create_buffer_for_peer)
|
||||
.add_message_handler(create_image_for_peer)
|
||||
.add_request_handler(update_buffer)
|
||||
.add_message_handler(broadcast_project_message_from_host::<proto::RefreshInlayHints>)
|
||||
.add_message_handler(broadcast_project_message_from_host::<proto::RefreshCodeLens>)
|
||||
@@ -2391,26 +2389,6 @@ async fn create_buffer_for_peer(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Notify other participants that a new image has been created
|
||||
async fn create_image_for_peer(
|
||||
request: proto::CreateImageForPeer,
|
||||
session: MessageContext,
|
||||
) -> Result<()> {
|
||||
session
|
||||
.db()
|
||||
.await
|
||||
.check_user_is_project_host(
|
||||
ProjectId::from_proto(request.project_id),
|
||||
session.connection_id,
|
||||
)
|
||||
.await?;
|
||||
let peer_id = request.peer_id.context("invalid peer id")?;
|
||||
session
|
||||
.peer
|
||||
.forward_send(session.connection_id, peer_id.into(), request)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Notify other participants that a buffer has been updated. This is
|
||||
/// allowed for guests as long as the update is limited to selections.
|
||||
async fn update_buffer(
|
||||
|
||||
@@ -23,6 +23,9 @@ pub fn init_test(cx: &mut gpui::TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
command_palette_hooks::init(cx);
|
||||
language::init(cx);
|
||||
workspace::init_settings(cx);
|
||||
project::Project::init_settings(cx);
|
||||
debugger_ui::init(cx);
|
||||
editor::init(cx);
|
||||
});
|
||||
|
||||
@@ -7065,7 +7065,7 @@ async fn test_remote_git_branches(
|
||||
// Also try creating a new branch
|
||||
cx_b.update(|cx| {
|
||||
repo_b.update(cx, |repository, _cx| {
|
||||
repository.create_branch("totally-new-branch".to_string(), None)
|
||||
repository.create_branch("totally-new-branch".to_string())
|
||||
})
|
||||
})
|
||||
.await
|
||||
|
||||
@@ -84,6 +84,7 @@ async fn test_sharing_an_ssh_remote_project(
|
||||
let node = NodeRuntime::unavailable();
|
||||
let languages = Arc::new(LanguageRegistry::new(server_cx.executor()));
|
||||
let _headless_project = server_cx.new(|cx| {
|
||||
client::init_settings(cx);
|
||||
HeadlessProject::new(
|
||||
HeadlessAppState {
|
||||
session: server_ssh,
|
||||
@@ -244,6 +245,7 @@ async fn test_ssh_collaboration_git_branches(
|
||||
let node = NodeRuntime::unavailable();
|
||||
let languages = Arc::new(LanguageRegistry::new(server_cx.executor()));
|
||||
let headless_project = server_cx.new(|cx| {
|
||||
client::init_settings(cx);
|
||||
HeadlessProject::new(
|
||||
HeadlessAppState {
|
||||
session: server_ssh,
|
||||
@@ -326,7 +328,7 @@ async fn test_ssh_collaboration_git_branches(
|
||||
// Also try creating a new branch
|
||||
cx_b.update(|cx| {
|
||||
repo_b.update(cx, |repo_b, _cx| {
|
||||
repo_b.create_branch("totally-new-branch".to_string(), None)
|
||||
repo_b.create_branch("totally-new-branch".to_string())
|
||||
})
|
||||
})
|
||||
.await
|
||||
@@ -448,6 +450,7 @@ async fn test_ssh_collaboration_formatting_with_prettier(
|
||||
server_cx.update(HeadlessProject::init);
|
||||
let remote_http_client = Arc::new(BlockedHttpClient);
|
||||
let _headless_project = server_cx.new(|cx| {
|
||||
client::init_settings(cx);
|
||||
HeadlessProject::new(
|
||||
HeadlessAppState {
|
||||
session: server_ssh,
|
||||
@@ -609,6 +612,7 @@ async fn test_remote_server_debugger(
|
||||
let node = NodeRuntime::unavailable();
|
||||
let languages = Arc::new(LanguageRegistry::new(server_cx.executor()));
|
||||
let _headless_project = server_cx.new(|cx| {
|
||||
client::init_settings(cx);
|
||||
HeadlessProject::new(
|
||||
HeadlessAppState {
|
||||
session: server_ssh,
|
||||
@@ -717,6 +721,7 @@ async fn test_slow_adapter_startup_retries(
|
||||
let node = NodeRuntime::unavailable();
|
||||
let languages = Arc::new(LanguageRegistry::new(server_cx.executor()));
|
||||
let _headless_project = server_cx.new(|cx| {
|
||||
client::init_settings(cx);
|
||||
HeadlessProject::new(
|
||||
HeadlessAppState {
|
||||
session: server_ssh,
|
||||
|
||||
@@ -174,6 +174,7 @@ impl TestServer {
|
||||
cx.set_global(settings);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
client::init_settings(cx);
|
||||
});
|
||||
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
@@ -344,6 +345,7 @@ impl TestServer {
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
Project::init(&client, cx);
|
||||
client::init(&client, cx);
|
||||
language::init(cx);
|
||||
editor::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
call::init(client.clone(), user_store.clone(), cx);
|
||||
@@ -357,6 +359,7 @@ impl TestServer {
|
||||
);
|
||||
language_model::LanguageModelRegistry::test(cx);
|
||||
assistant_text_thread::init(client.clone(), cx);
|
||||
agent_settings::init(cx);
|
||||
});
|
||||
|
||||
client
|
||||
|
||||
@@ -13,10 +13,14 @@ use gpui::{
|
||||
};
|
||||
pub use panel_settings::{CollaborationPanelSettings, NotificationPanelSettings};
|
||||
use release_channel::ReleaseChannel;
|
||||
use settings::Settings;
|
||||
use ui::px;
|
||||
use workspace::AppState;
|
||||
|
||||
pub fn init(app_state: &Arc<AppState>, cx: &mut App) {
|
||||
CollaborationPanelSettings::register(cx);
|
||||
NotificationPanelSettings::register(cx);
|
||||
|
||||
channel_view::init(cx);
|
||||
collab_panel::init(cx);
|
||||
notification_panel::init(cx);
|
||||
|
||||
@@ -1,16 +1,16 @@
|
||||
use gpui::Pixels;
|
||||
use settings::{RegisterSetting, Settings};
|
||||
use settings::Settings;
|
||||
use ui::px;
|
||||
use workspace::dock::DockPosition;
|
||||
|
||||
#[derive(Debug, RegisterSetting)]
|
||||
#[derive(Debug)]
|
||||
pub struct CollaborationPanelSettings {
|
||||
pub button: bool,
|
||||
pub dock: DockPosition,
|
||||
pub default_width: Pixels,
|
||||
}
|
||||
|
||||
#[derive(Debug, RegisterSetting)]
|
||||
#[derive(Debug)]
|
||||
pub struct NotificationPanelSettings {
|
||||
pub button: bool,
|
||||
pub dock: DockPosition,
|
||||
|
||||
@@ -28,6 +28,7 @@ use workspace::{ModalView, Workspace, WorkspaceSettings};
|
||||
use zed_actions::{OpenZedUrl, command_palette::Toggle};
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
client::init_settings(cx);
|
||||
command_palette_hooks::init(cx);
|
||||
cx.observe_new(CommandPalette::register).detach();
|
||||
}
|
||||
@@ -788,11 +789,13 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let app_state = AppState::test(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
language::init(cx);
|
||||
editor::init(cx);
|
||||
menu::init();
|
||||
go_to_line::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
init(cx);
|
||||
Project::init_settings(cx);
|
||||
cx.bind_keys(KeymapFile::load_panic_on_failure(
|
||||
r#"[
|
||||
{
|
||||
|
||||
@@ -26,6 +26,7 @@ test-support = [
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
chrono.workspace = true
|
||||
client.workspace = true
|
||||
collections.workspace = true
|
||||
command_palette_hooks.workspace = true
|
||||
dirs.workspace = true
|
||||
|
||||
@@ -1115,6 +1115,11 @@ mod tests {
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
client::init_settings(cx);
|
||||
language::init(cx);
|
||||
editor::init_settings(cx);
|
||||
Project::init_settings(cx);
|
||||
workspace::init_settings(cx);
|
||||
SettingsStore::update_global(cx, |store: &mut SettingsStore, cx| {
|
||||
store.update_user_settings(cx, |settings| f(&mut settings.project.all_languages));
|
||||
});
|
||||
|
||||
@@ -256,7 +256,7 @@ impl DebugAdapterClient {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::client::DebugAdapterClient;
|
||||
use crate::{client::DebugAdapterClient, debugger_settings::DebuggerSettings};
|
||||
use dap_types::{
|
||||
Capabilities, InitializeRequestArguments, InitializeRequestArgumentsPathFormat,
|
||||
RunInTerminalRequestArguments, StartDebuggingRequestArguments,
|
||||
@@ -265,7 +265,7 @@ mod tests {
|
||||
};
|
||||
use gpui::TestAppContext;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use settings::{Settings, SettingsStore};
|
||||
use std::sync::{
|
||||
Arc,
|
||||
atomic::{AtomicBool, Ordering},
|
||||
@@ -277,6 +277,7 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings = SettingsStore::test(cx);
|
||||
cx.set_global(settings);
|
||||
DebuggerSettings::register(cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use dap_types::SteppingGranularity;
|
||||
use settings::{RegisterSetting, Settings, SettingsContent};
|
||||
use settings::{Settings, SettingsContent};
|
||||
|
||||
#[derive(Debug, RegisterSetting)]
|
||||
pub struct DebuggerSettings {
|
||||
/// Determines the stepping granularity.
|
||||
///
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
use std::ffi::OsStr;
|
||||
|
||||
use anyhow::{Context as _, Result, bail};
|
||||
use async_trait::async_trait;
|
||||
use collections::HashMap;
|
||||
use dap::{StartDebuggingRequestArguments, adapters::DebugTaskDefinition};
|
||||
use gpui::AsyncApp;
|
||||
use std::ffi::OsStr;
|
||||
use task::{DebugScenario, ZedDebugConfig};
|
||||
|
||||
use crate::*;
|
||||
@@ -15,14 +16,6 @@ impl GdbDebugAdapter {
|
||||
const ADAPTER_NAME: &'static str = "GDB";
|
||||
}
|
||||
|
||||
/// Ensures that "-i=dap" is present in the GDB argument list.
|
||||
fn ensure_dap_interface(mut gdb_args: Vec<String>) -> Vec<String> {
|
||||
if !gdb_args.iter().any(|arg| arg.trim() == "-i=dap") {
|
||||
gdb_args.insert(0, "-i=dap".to_string());
|
||||
}
|
||||
gdb_args
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
impl DebugAdapter for GdbDebugAdapter {
|
||||
fn name(&self) -> DebugAdapterName {
|
||||
@@ -106,18 +99,6 @@ impl DebugAdapter for GdbDebugAdapter {
|
||||
"type": "string",
|
||||
"description": "Working directory for the debugged program. GDB will change its working directory to this directory."
|
||||
},
|
||||
"gdb_path": {
|
||||
"type": "string",
|
||||
"description": "Alternative path to the GDB executable, if the one in standard path is not desirable"
|
||||
},
|
||||
"gdb_args": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"type":"string"
|
||||
},
|
||||
"description": "additional arguments given to GDB at startup, not the program debugged",
|
||||
"default": []
|
||||
},
|
||||
"env": {
|
||||
"type": "object",
|
||||
"description": "Environment variables for the debugged program. Each key is the name of an environment variable; each value is the value of that variable."
|
||||
@@ -183,49 +164,21 @@ impl DebugAdapter for GdbDebugAdapter {
|
||||
user_env: Option<HashMap<String, String>>,
|
||||
_: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
// Try to get gdb_path from config
|
||||
let gdb_path_from_config = config
|
||||
.config
|
||||
.get("gdb_path")
|
||||
.and_then(|v| v.as_str())
|
||||
.map(|s| s.to_string());
|
||||
let user_setting_path = user_installed_path
|
||||
.filter(|p| p.exists())
|
||||
.and_then(|p| p.to_str().map(|s| s.to_string()));
|
||||
|
||||
let gdb_path = if let Some(path) = gdb_path_from_config {
|
||||
path
|
||||
} else {
|
||||
// Original logic: use user_installed_path or search in system path
|
||||
let user_setting_path = user_installed_path
|
||||
.filter(|p| p.exists())
|
||||
.and_then(|p| p.to_str().map(|s| s.to_string()));
|
||||
let gdb_path = delegate
|
||||
.which(OsStr::new("gdb"))
|
||||
.await
|
||||
.and_then(|p| p.to_str().map(|s| s.to_string()))
|
||||
.context("Could not find gdb in path");
|
||||
|
||||
let gdb_path_result = delegate
|
||||
.which(OsStr::new("gdb"))
|
||||
.await
|
||||
.and_then(|p| p.to_str().map(|s| s.to_string()))
|
||||
.context("Could not find gdb in path");
|
||||
if gdb_path.is_err() && user_setting_path.is_none() {
|
||||
bail!("Could not find gdb path or it's not installed");
|
||||
}
|
||||
|
||||
if gdb_path_result.is_err() && user_setting_path.is_none() {
|
||||
bail!("Could not find gdb path or it's not installed");
|
||||
}
|
||||
|
||||
user_setting_path.unwrap_or_else(|| gdb_path_result.unwrap())
|
||||
};
|
||||
|
||||
// Arguments: use gdb_args from config if present, else user_args, else default
|
||||
let gdb_args = {
|
||||
let args = config
|
||||
.config
|
||||
.get("gdb_args")
|
||||
.and_then(|v| v.as_array())
|
||||
.map(|arr| {
|
||||
arr.iter()
|
||||
.filter_map(|v| v.as_str().map(|s| s.to_string()))
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.or(user_args.clone())
|
||||
.unwrap_or_else(|| vec!["-i=dap".into()]);
|
||||
ensure_dap_interface(args)
|
||||
};
|
||||
let gdb_path = user_setting_path.unwrap_or(gdb_path?);
|
||||
|
||||
let mut configuration = config.config.clone();
|
||||
if let Some(configuration) = configuration.as_object_mut() {
|
||||
@@ -234,26 +187,10 @@ impl DebugAdapter for GdbDebugAdapter {
|
||||
.or_insert_with(|| delegate.worktree_root_path().to_string_lossy().into());
|
||||
}
|
||||
|
||||
let mut base_env = delegate.shell_env().await;
|
||||
base_env.extend(user_env.unwrap_or_default());
|
||||
|
||||
let config_env: HashMap<String, String> = config
|
||||
.config
|
||||
.get("env")
|
||||
.and_then(|v| v.as_object())
|
||||
.map(|obj| {
|
||||
obj.iter()
|
||||
.filter_map(|(k, v)| v.as_str().map(|s| (k.clone(), s.to_string())))
|
||||
.collect::<HashMap<String, String>>()
|
||||
})
|
||||
.unwrap_or_else(HashMap::default);
|
||||
|
||||
base_env.extend(config_env);
|
||||
|
||||
Ok(DebugAdapterBinary {
|
||||
command: Some(gdb_path),
|
||||
arguments: gdb_args,
|
||||
envs: base_env,
|
||||
arguments: user_args.unwrap_or_else(|| vec!["-i=dap".into()]),
|
||||
envs: user_env.unwrap_or_default(),
|
||||
cwd: Some(delegate.worktree_root_path().to_path_buf()),
|
||||
connection: None,
|
||||
request_args: StartDebuggingRequestArguments {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use std::any::TypeId;
|
||||
|
||||
use dap::debugger_settings::DebuggerSettings;
|
||||
use debugger_panel::DebugPanel;
|
||||
use editor::Editor;
|
||||
use gpui::{Action, App, DispatchPhase, EntityInputHandler, actions};
|
||||
@@ -9,6 +10,7 @@ use project::debugger::{self, breakpoint_store::SourceBreakpoint, session::Threa
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use session::DebugSession;
|
||||
use settings::Settings;
|
||||
use stack_trace_view::StackTraceView;
|
||||
use tasks_ui::{Spawn, TaskOverrides};
|
||||
use ui::{FluentBuilder, InteractiveElement};
|
||||
@@ -113,6 +115,7 @@ actions!(
|
||||
);
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
DebuggerSettings::register(cx);
|
||||
workspace::FollowableViewRegistry::register::<DebugSession>(cx);
|
||||
|
||||
cx.observe_new(|workspace: &mut Workspace, _, _| {
|
||||
|
||||
@@ -1404,7 +1404,6 @@ impl VariableList {
|
||||
div()
|
||||
.text_ui(cx)
|
||||
.w_full()
|
||||
.truncate()
|
||||
.when(self.disabled, |this| {
|
||||
this.text_color(Color::Disabled.color(cx))
|
||||
})
|
||||
|
||||
@@ -43,6 +43,9 @@ pub fn init_test(cx: &mut gpui::TestAppContext) {
|
||||
terminal_view::init(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
command_palette_hooks::init(cx);
|
||||
language::init(cx);
|
||||
workspace::init_settings(cx);
|
||||
Project::init_settings(cx);
|
||||
editor::init(cx);
|
||||
crate::init(cx);
|
||||
dap_adapters::init(cx);
|
||||
|
||||
@@ -23,7 +23,7 @@ use project::{
|
||||
use settings::Settings;
|
||||
use std::{
|
||||
any::{Any, TypeId},
|
||||
cmp::{self, Ordering},
|
||||
cmp::Ordering,
|
||||
sync::Arc,
|
||||
};
|
||||
use text::{Anchor, BufferSnapshot, OffsetRangeExt};
|
||||
@@ -410,7 +410,7 @@ impl BufferDiagnosticsEditor {
|
||||
// in the editor.
|
||||
// This is done by iterating over the list of diagnostic blocks and
|
||||
// determine what range does the diagnostic block span.
|
||||
let mut excerpt_ranges: Vec<ExcerptRange<_>> = Vec::new();
|
||||
let mut excerpt_ranges: Vec<ExcerptRange<Point>> = Vec::new();
|
||||
|
||||
for diagnostic_block in blocks.iter() {
|
||||
let excerpt_range = context_range_for_entry(
|
||||
@@ -420,43 +420,30 @@ impl BufferDiagnosticsEditor {
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
let initial_range = buffer_snapshot
|
||||
.anchor_after(diagnostic_block.initial_range.start)
|
||||
..buffer_snapshot.anchor_before(diagnostic_block.initial_range.end);
|
||||
|
||||
let bin_search = |probe: &ExcerptRange<text::Anchor>| {
|
||||
let context_start = || {
|
||||
let index = excerpt_ranges
|
||||
.binary_search_by(|probe| {
|
||||
probe
|
||||
.context
|
||||
.start
|
||||
.cmp(&excerpt_range.start, &buffer_snapshot)
|
||||
};
|
||||
let context_end =
|
||||
|| probe.context.end.cmp(&excerpt_range.end, &buffer_snapshot);
|
||||
let primary_start = || {
|
||||
probe
|
||||
.primary
|
||||
.start
|
||||
.cmp(&initial_range.start, &buffer_snapshot)
|
||||
};
|
||||
let primary_end =
|
||||
|| probe.primary.end.cmp(&initial_range.end, &buffer_snapshot);
|
||||
context_start()
|
||||
.then_with(context_end)
|
||||
.then_with(primary_start)
|
||||
.then_with(primary_end)
|
||||
.then(cmp::Ordering::Greater)
|
||||
};
|
||||
|
||||
let index = excerpt_ranges
|
||||
.binary_search_by(bin_search)
|
||||
.unwrap_or_else(|i| i);
|
||||
.cmp(&excerpt_range.start)
|
||||
.then(probe.context.end.cmp(&excerpt_range.end))
|
||||
.then(
|
||||
probe
|
||||
.primary
|
||||
.start
|
||||
.cmp(&diagnostic_block.initial_range.start),
|
||||
)
|
||||
.then(probe.primary.end.cmp(&diagnostic_block.initial_range.end))
|
||||
.then(Ordering::Greater)
|
||||
})
|
||||
.unwrap_or_else(|index| index);
|
||||
|
||||
excerpt_ranges.insert(
|
||||
index,
|
||||
ExcerptRange {
|
||||
context: excerpt_range,
|
||||
primary: initial_range,
|
||||
primary: diagnostic_block.initial_range.clone(),
|
||||
},
|
||||
)
|
||||
}
|
||||
@@ -479,13 +466,6 @@ impl BufferDiagnosticsEditor {
|
||||
buffer_diagnostics_editor
|
||||
.multibuffer
|
||||
.update(cx, |multibuffer, cx| {
|
||||
let excerpt_ranges = excerpt_ranges
|
||||
.into_iter()
|
||||
.map(|range| ExcerptRange {
|
||||
context: range.context.to_point(&buffer_snapshot),
|
||||
primary: range.primary.to_point(&buffer_snapshot),
|
||||
})
|
||||
.collect();
|
||||
multibuffer.set_excerpt_ranges_for_path(
|
||||
PathKey::for_buffer(&buffer, cx),
|
||||
buffer.clone(),
|
||||
|
||||
@@ -39,8 +39,8 @@ impl DiagnosticRenderer {
|
||||
let group_id = primary.diagnostic.group_id;
|
||||
let mut results = vec![];
|
||||
for entry in diagnostic_group.iter() {
|
||||
let mut markdown = Self::markdown(&entry.diagnostic);
|
||||
if entry.diagnostic.is_primary {
|
||||
let mut markdown = Self::markdown(&entry.diagnostic);
|
||||
let diagnostic = &primary.diagnostic;
|
||||
if diagnostic.source.is_some() || diagnostic.code.is_some() {
|
||||
markdown.push_str(" (");
|
||||
@@ -81,12 +81,21 @@ impl DiagnosticRenderer {
|
||||
diagnostics_editor: diagnostics_editor.clone(),
|
||||
markdown: cx.new(|cx| Markdown::new(markdown.into(), None, None, cx)),
|
||||
});
|
||||
} else if entry.range.start.row.abs_diff(primary.range.start.row) < 5 {
|
||||
let markdown = Self::markdown(&entry.diagnostic);
|
||||
|
||||
results.push(DiagnosticBlock {
|
||||
initial_range: entry.range.clone(),
|
||||
severity: entry.diagnostic.severity,
|
||||
diagnostics_editor: diagnostics_editor.clone(),
|
||||
markdown: cx.new(|cx| Markdown::new(markdown.into(), None, None, cx)),
|
||||
});
|
||||
} else {
|
||||
if entry.range.start.row.abs_diff(primary.range.start.row) >= 5 {
|
||||
markdown.push_str(&format!(
|
||||
" ([back](file://#diagnostic-{buffer_id}-{group_id}-{primary_ix}))"
|
||||
));
|
||||
}
|
||||
let mut markdown = Self::markdown(&entry.diagnostic);
|
||||
markdown.push_str(&format!(
|
||||
" ([back](file://#diagnostic-{buffer_id}-{group_id}-{primary_ix}))"
|
||||
));
|
||||
|
||||
results.push(DiagnosticBlock {
|
||||
initial_range: entry.range.clone(),
|
||||
severity: entry.diagnostic.severity,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user