Compare commits
270 Commits
perf/proje
...
inline-ass
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a7cf3c306a | ||
|
|
829be71061 | ||
|
|
2a2f5a9c7a | ||
|
|
97b429953e | ||
|
|
404ee53812 | ||
|
|
17c30565fc | ||
|
|
f05eef58c4 | ||
|
|
52716bacef | ||
|
|
79be5cbfe2 | ||
|
|
a42676b6bb | ||
|
|
39f8aefa8c | ||
|
|
1c1dfba7e3 | ||
|
|
2c4fd24d37 | ||
|
|
3125e78904 | ||
|
|
74d61aad7f | ||
|
|
40dd4e2270 | ||
|
|
9feb260216 | ||
|
|
5ccbe945a6 | ||
|
|
a910c594d6 | ||
|
|
19d2532cf8 | ||
|
|
785b81aa3a | ||
|
|
24c1617e74 | ||
|
|
1e2f15a3d7 | ||
|
|
7c0663b825 | ||
|
|
94a43dc73a | ||
|
|
e8e0707256 | ||
|
|
d7c340c739 | ||
|
|
16b24e892e | ||
|
|
917148c5ce | ||
|
|
951132fc13 | ||
|
|
bf0dd4057c | ||
|
|
3c4ca3f372 | ||
|
|
03132921c7 | ||
|
|
c0fadae881 | ||
|
|
1c66c3991d | ||
|
|
7e591a7e9a | ||
|
|
c44d93745a | ||
|
|
b4e4e0d3ac | ||
|
|
097024d46f | ||
|
|
f1c2afdee0 | ||
|
|
ea120dfe18 | ||
|
|
d2988ffc77 | ||
|
|
f17d2c92b6 | ||
|
|
c1d9dc369c | ||
|
|
696fdd8fed | ||
|
|
980f8bff2a | ||
|
|
2a3bcbfe0f | ||
|
|
5225a84aff | ||
|
|
5c70f8391f | ||
|
|
10efbd5eb4 | ||
|
|
0386f240a9 | ||
|
|
a39ba03bcc | ||
|
|
2c7bcfcb7b | ||
|
|
6bea23e990 | ||
|
|
98da1ea169 | ||
|
|
98a83b47e6 | ||
|
|
5f356d04ff | ||
|
|
73d3f9611e | ||
|
|
d9cfc2c883 | ||
|
|
ee420d530e | ||
|
|
d801d0950e | ||
|
|
3f25d36b3c | ||
|
|
f015368586 | ||
|
|
4bf3b9d62e | ||
|
|
599a217ea5 | ||
|
|
b0a7defd09 | ||
|
|
57e3bcfcf8 | ||
|
|
b2f561165f | ||
|
|
fd1494c31a | ||
|
|
faa1136651 | ||
|
|
6bf5e92a25 | ||
|
|
46ad6c0bbb | ||
|
|
671500de1b | ||
|
|
0519c645fb | ||
|
|
23872b0523 | ||
|
|
4b050b651a | ||
|
|
bb46bc167a | ||
|
|
b274f80dd9 | ||
|
|
d77ab99ab1 | ||
|
|
97792f7fb9 | ||
|
|
9bebf314e0 | ||
|
|
4092e81ada | ||
|
|
e0b64773d9 | ||
|
|
f1bebd79d1 | ||
|
|
a66a539a09 | ||
|
|
a2d3e3baf9 | ||
|
|
175162af4f | ||
|
|
cdcc068906 | ||
|
|
86484aaded | ||
|
|
d32934a893 | ||
|
|
b463266fa1 | ||
|
|
b0525a26a6 | ||
|
|
1683052e6c | ||
|
|
07cc87b288 | ||
|
|
1277f328c4 | ||
|
|
b3097cfc8a | ||
|
|
305206fd48 | ||
|
|
c387203ac8 | ||
|
|
a260ba6428 | ||
|
|
a8e0de37ac | ||
|
|
a1a599dac5 | ||
|
|
524b97d729 | ||
|
|
8772727034 | ||
|
|
aaa116d129 | ||
|
|
c1096d8b63 | ||
|
|
092071a2f0 | ||
|
|
723f9b1371 | ||
|
|
37523b0007 | ||
|
|
b4167caaf1 | ||
|
|
020f518231 | ||
|
|
ead4f26b52 | ||
|
|
3de3a369f5 | ||
|
|
28a0b82618 | ||
|
|
e2c95a8d84 | ||
|
|
3da4d3aac3 | ||
|
|
6f99eeffa8 | ||
|
|
15ab96af6b | ||
|
|
e80b490ac0 | ||
|
|
3c577ba019 | ||
|
|
e1d295a6b4 | ||
|
|
84f24e4b62 | ||
|
|
03fad4b951 | ||
|
|
c626e770a0 | ||
|
|
fa0c7500c1 | ||
|
|
e91be9e98e | ||
|
|
46eb9e5223 | ||
|
|
cb7bd5fe19 | ||
|
|
b900ac2ac7 | ||
|
|
b709996ec6 | ||
|
|
b6972d70a5 | ||
|
|
ec1664f61a | ||
|
|
c2c5fceb5b | ||
|
|
eadc2301e0 | ||
|
|
b500470391 | ||
|
|
55e4258147 | ||
|
|
8467a1b08b | ||
|
|
fb90b12073 | ||
|
|
92e64f9cf0 | ||
|
|
f318bb5fd7 | ||
|
|
430b55405a | ||
|
|
27f700e2b2 | ||
|
|
b5633f5bc7 | ||
|
|
b9ce52dc95 | ||
|
|
34a7cfb2e5 | ||
|
|
99016e3a85 | ||
|
|
dea3c8c949 | ||
|
|
7eac6d242c | ||
|
|
b92b28314f | ||
|
|
1fc0642de1 | ||
|
|
045ac6d1b6 | ||
|
|
1936f16c62 | ||
|
|
b32559f07d | ||
|
|
28adedf1fa | ||
|
|
c9e231043a | ||
|
|
ede3b1dae6 | ||
|
|
b0700a4625 | ||
|
|
f2a1eb9963 | ||
|
|
0c1ca2a45a | ||
|
|
8fd8b989a6 | ||
|
|
fd837b348f | ||
|
|
6b239c3a9a | ||
|
|
73e5df6445 | ||
|
|
b403c199df | ||
|
|
cb4067723b | ||
|
|
1c625f8783 | ||
|
|
4adec27a3d | ||
|
|
e8daab15ab | ||
|
|
6501b0c311 | ||
|
|
6c0069ca98 | ||
|
|
c8930e07a3 | ||
|
|
ab352f669e | ||
|
|
e79188261b | ||
|
|
ab62739605 | ||
|
|
cfbde91833 | ||
|
|
80b32ddaad | ||
|
|
53652cdb3f | ||
|
|
1d75a9c4b2 | ||
|
|
c5ab1d4679 | ||
|
|
1fdd95a9b3 | ||
|
|
49634f6041 | ||
|
|
2119ac42d7 | ||
|
|
e833d1af8d | ||
|
|
7be76c74d6 | ||
|
|
c2980cba18 | ||
|
|
a0be53a190 | ||
|
|
70feff3c7a | ||
|
|
f46990bac8 | ||
|
|
78f466559a | ||
|
|
4f158c1983 | ||
|
|
ddf762e368 | ||
|
|
f2cadad49a | ||
|
|
231d1b1d58 | ||
|
|
2bcfc12951 | ||
|
|
cf6ae01d07 | ||
|
|
2ad7ecbcf0 | ||
|
|
854c6873c7 | ||
|
|
da94f898e6 | ||
|
|
f62bfe1dfa | ||
|
|
a56693d9e8 | ||
|
|
b4b7a23c39 | ||
|
|
0d56ed7d91 | ||
|
|
e01e0b83c4 | ||
|
|
908ef03502 | ||
|
|
5f4d0dbaab | ||
|
|
c50f821613 | ||
|
|
7e491ac500 | ||
|
|
9e1e732db8 | ||
|
|
83351283e4 | ||
|
|
03acbb7de3 | ||
|
|
0268b17096 | ||
|
|
993919d360 | ||
|
|
8467a3dbd6 | ||
|
|
ee2e690657 | ||
|
|
28d019be2e | ||
|
|
a19d11184d | ||
|
|
38e2c7aa66 | ||
|
|
10d5d78ded | ||
|
|
dfd7e85d5d | ||
|
|
b8fcd3ea04 | ||
|
|
9be5e31aca | ||
|
|
58db38722b | ||
|
|
f2ad0d716f | ||
|
|
777b46533f | ||
|
|
b3dd51560b | ||
|
|
25489c2b7a | ||
|
|
dc372e8a84 | ||
|
|
1c4bb60209 | ||
|
|
97100ce52f | ||
|
|
dcf56144b5 | ||
|
|
46db753f79 | ||
|
|
1a807a7a6a | ||
|
|
f90d0789fb | ||
|
|
9e717c7711 | ||
|
|
823844ef18 | ||
|
|
70bcf93355 | ||
|
|
378b30eba5 | ||
|
|
83e7c21b2c | ||
|
|
e488b6cd0b | ||
|
|
f52549c1c4 | ||
|
|
359521e91d | ||
|
|
b607077c08 | ||
|
|
e5fce424b3 | ||
|
|
a8b04369ae | ||
|
|
11b38db3e3 | ||
|
|
112b5c16b7 | ||
|
|
32ec1037e1 | ||
|
|
a44fc9a1de | ||
|
|
efcd7f7d10 | ||
|
|
aaf2f9d309 | ||
|
|
62e3a49212 | ||
|
|
87d0401e64 | ||
|
|
2c375e2e0a | ||
|
|
c24f9e47b4 | ||
|
|
3fbfea491d | ||
|
|
2b369d7532 | ||
|
|
ed61a79cc5 | ||
|
|
aa6270e658 | ||
|
|
d896af2f15 | ||
|
|
c748b177c4 | ||
|
|
ddf5937899 | ||
|
|
6e1d86f311 | ||
|
|
a3f04e8b36 | ||
|
|
3c81ee6ba6 | ||
|
|
35ae2f5b2b | ||
|
|
d420dd63ed | ||
|
|
42ed032f12 | ||
|
|
2d84af91bf | ||
|
|
7aacc7566c | ||
|
|
8d632958db | ||
|
|
0149de4b54 |
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,4 +1,4 @@
|
||||
# yaml-language-server: $schema=https://json.schemastore.org/github-issue-config.json
|
||||
# yaml-language-server: $schema=https://www.schemastore.org/github-issue-config.json
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Feature Request
|
||||
|
||||
6
.github/actions/run_tests/action.yml
vendored
6
.github/actions/run_tests/action.yml
vendored
@@ -4,10 +4,8 @@ description: "Runs the tests"
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Install Rust
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
cargo install cargo-nextest --locked
|
||||
- name: Install nextest
|
||||
uses: taiki-e/install-action@nextest
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
|
||||
3
.github/actions/run_tests_windows/action.yml
vendored
3
.github/actions/run_tests_windows/action.yml
vendored
@@ -11,9 +11,8 @@ runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Install test runner
|
||||
shell: powershell
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: cargo install cargo-nextest --locked
|
||||
uses: taiki-e/install-action@nextest
|
||||
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
|
||||
36
.github/workflows/after_release.yml
vendored
36
.github/workflows/after_release.yml
vendored
@@ -7,7 +7,7 @@ on:
|
||||
- published
|
||||
jobs:
|
||||
rebuild_releases_page:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: after_release::rebuild_releases_page::refresh_cloud_releases
|
||||
@@ -21,7 +21,7 @@ jobs:
|
||||
post_to_discord:
|
||||
needs:
|
||||
- rebuild_releases_page
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- id: get-release-url
|
||||
@@ -56,14 +56,14 @@ jobs:
|
||||
- id: set-package-name
|
||||
name: after_release::publish_winget::set_package_name
|
||||
run: |
|
||||
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
|
||||
PACKAGE_NAME=ZedIndustries.Zed.Preview
|
||||
else
|
||||
PACKAGE_NAME=ZedIndustries.Zed
|
||||
fi
|
||||
if ("${{ github.event.release.prerelease }}" -eq "true") {
|
||||
$PACKAGE_NAME = "ZedIndustries.Zed.Preview"
|
||||
} else {
|
||||
$PACKAGE_NAME = "ZedIndustries.Zed"
|
||||
}
|
||||
|
||||
echo "PACKAGE_NAME=$PACKAGE_NAME" >> "$GITHUB_OUTPUT"
|
||||
shell: bash -euxo pipefail {0}
|
||||
echo "PACKAGE_NAME=$PACKAGE_NAME" >> $env:GITHUB_OUTPUT
|
||||
shell: pwsh
|
||||
- name: after_release::publish_winget::winget_releaser
|
||||
uses: vedantmgoyal9/winget-releaser@19e706d4c9121098010096f9c495a70a7518b30f
|
||||
with:
|
||||
@@ -71,7 +71,7 @@ jobs:
|
||||
max-versions-to-keep: 5
|
||||
token: ${{ secrets.WINGET_TOKEN }}
|
||||
create_sentry_release:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -86,3 +86,19 @@ jobs:
|
||||
SENTRY_ORG: zed-dev
|
||||
SENTRY_PROJECT: zed
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
notify_on_failure:
|
||||
needs:
|
||||
- rebuild_releases_page
|
||||
- post_to_discord
|
||||
- publish_winget
|
||||
- create_sentry_release
|
||||
if: failure()
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: release::notify_on_failure::notify_slack
|
||||
run: |-
|
||||
curl -X POST -H 'Content-type: application/json'\
|
||||
--data '{"text":"${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK"
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
|
||||
|
||||
2
.github/workflows/bump_patch_version.yml
vendored
2
.github/workflows/bump_patch_version.yml
vendored
@@ -42,7 +42,7 @@ jobs:
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
which cargo-set-version > /dev/null || cargo install cargo-edit
|
||||
which cargo-set-version > /dev/null || cargo install cargo-edit -f --no-default-features --features "set-version"
|
||||
output="$(cargo set-version -p zed --bump patch 2>&1 | sed 's/.* //')"
|
||||
export GIT_COMMITTER_NAME="Zed Bot"
|
||||
export GIT_COMMITTER_EMAIL="hi@zed.dev"
|
||||
|
||||
5
.github/workflows/cherry_pick.yml
vendored
5
.github/workflows/cherry_pick.yml
vendored
@@ -1,6 +1,7 @@
|
||||
# Generated from xtask::workflows::cherry_pick
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: cherry_pick
|
||||
run-name: 'cherry_pick to ${{ inputs.channel }} #${{ inputs.pr_number }}'
|
||||
on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
@@ -16,6 +17,10 @@ on:
|
||||
description: channel
|
||||
required: true
|
||||
type: string
|
||||
pr_number:
|
||||
description: pr_number
|
||||
required: true
|
||||
type: string
|
||||
jobs:
|
||||
run_cherry_pick:
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
name: "Close Stale Issues"
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 7,9,11 * * 3"
|
||||
- cron: "0 8 31 DEC *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
@@ -15,14 +15,15 @@ jobs:
|
||||
stale-issue-message: >
|
||||
Hi there! 👋
|
||||
|
||||
We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. If you are able to reproduce this issue in the latest version of Zed, please let us know by commenting on this issue, and we will keep it open. If you can't reproduce it, feel free to close the issue yourself. Otherwise, we'll close it in 7 days.
|
||||
We're working to clean up our issue tracker by closing older bugs that might not be relevant anymore. If you are able to reproduce this issue in the latest version of Zed, please let us know by commenting on this issue, and it will be kept open. If you can't reproduce it, feel free to close the issue yourself. Otherwise, it will close automatically in 14 days.
|
||||
|
||||
Thanks for your help!
|
||||
close-issue-message: "This issue was closed due to inactivity. If you're still experiencing this problem, please open a new issue with a link to this issue."
|
||||
days-before-stale: 120
|
||||
days-before-close: 7
|
||||
any-of-issue-labels: "bug,panic / crash"
|
||||
days-before-stale: 60
|
||||
days-before-close: 14
|
||||
only-issue-types: "Bug,Crash"
|
||||
operations-per-run: 1000
|
||||
ascending: true
|
||||
enable-statistics: true
|
||||
stale-issue-label: "stale"
|
||||
exempt-issue-labels: "never stale"
|
||||
|
||||
6
.github/workflows/compare_perf.yml
vendored
6
.github/workflows/compare_perf.yml
vendored
@@ -35,9 +35,11 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: compare_perf::run_perf::install_hyperfine
|
||||
run: cargo install hyperfine
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: compare_perf::run_perf::install_hyperfine
|
||||
uses: taiki-e/install-action@hyperfine
|
||||
- name: steps::git_checkout
|
||||
run: git fetch origin ${{ inputs.base }} && git checkout ${{ inputs.base }}
|
||||
shell: bash -euxo pipefail {0}
|
||||
|
||||
2
.github/workflows/danger.yml
vendored
2
.github/workflows/danger.yml
vendored
@@ -12,7 +12,7 @@ on:
|
||||
- main
|
||||
jobs:
|
||||
danger:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
|
||||
4
.github/workflows/deploy_collab.yml
vendored
4
.github/workflows/deploy_collab.yml
vendored
@@ -43,9 +43,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Install cargo nextest
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: |
|
||||
cargo install cargo-nextest --locked
|
||||
uses: taiki-e/install-action@nextest
|
||||
|
||||
- name: Limit target directory size
|
||||
shell: bash -euxo pipefail {0}
|
||||
|
||||
138
.github/workflows/extension_tests.yml
vendored
Normal file
138
.github/workflows/extension_tests.yml
vendored
Normal file
@@ -0,0 +1,138 @@
|
||||
# Generated from xtask::workflows::extension_tests
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: extension_tests
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUST_BACKTRACE: '1'
|
||||
CARGO_INCREMENTAL: '0'
|
||||
ZED_EXTENSION_CLI_SHA: 7cfce605704d41ca247e3f84804bf323f6c6caaf
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
run_tests:
|
||||
description: Whether the workflow should run rust tests
|
||||
required: true
|
||||
type: boolean
|
||||
jobs:
|
||||
orchestrate:
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
fetch-depth: ${{ github.ref == 'refs/heads/main' && 2 || 350 }}
|
||||
- id: filter
|
||||
name: filter
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ]; then
|
||||
echo "Not in a PR context (i.e., push to main/stable/preview)"
|
||||
COMPARE_REV="$(git rev-parse HEAD~1)"
|
||||
else
|
||||
echo "In a PR context comparing to pull_request.base.ref"
|
||||
git fetch origin "$GITHUB_BASE_REF" --depth=350
|
||||
COMPARE_REV="$(git merge-base "origin/${GITHUB_BASE_REF}" HEAD)"
|
||||
fi
|
||||
CHANGED_FILES="$(git diff --name-only "$COMPARE_REV" ${{ github.sha }})"
|
||||
|
||||
check_pattern() {
|
||||
local output_name="$1"
|
||||
local pattern="$2"
|
||||
local grep_arg="$3"
|
||||
|
||||
echo "$CHANGED_FILES" | grep "$grep_arg" "$pattern" && \
|
||||
echo "${output_name}=true" >> "$GITHUB_OUTPUT" || \
|
||||
echo "${output_name}=false" >> "$GITHUB_OUTPUT"
|
||||
}
|
||||
|
||||
check_pattern "check_rust" '^(Cargo.lock|Cargo.toml|.*\.rs)$' -qP
|
||||
check_pattern "check_extension" '^.*\.scm$' -qP
|
||||
shell: bash -euxo pipefail {0}
|
||||
outputs:
|
||||
check_rust: ${{ steps.filter.outputs.check_rust }}
|
||||
check_extension: ${{ steps.filter.outputs.check_extension }}
|
||||
check_rust:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.check_rust == 'true'
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::cargo_fmt
|
||||
run: cargo fmt --all -- --check
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: extension_tests::run_clippy
|
||||
run: cargo clippy --release --all-targets --all-features -- --deny warnings
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
if: inputs.run_tests
|
||||
uses: taiki-e/install-action@nextest
|
||||
- name: steps::cargo_nextest
|
||||
if: inputs.run_tests
|
||||
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 3
|
||||
check_extension:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.check_extension == 'true'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- id: cache-zed-extension-cli
|
||||
name: extension_tests::cache_zed_extension_cli
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830
|
||||
with:
|
||||
path: zed-extension
|
||||
key: zed-extension-${{ env.ZED_EXTENSION_CLI_SHA }}
|
||||
- name: extension_tests::download_zed_extension_cli
|
||||
if: steps.cache-zed-extension-cli.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
wget --quiet "https://zed-extension-cli.nyc3.digitaloceanspaces.com/$ZED_EXTENSION_CLI_SHA/x86_64-unknown-linux-gnu/zed-extension"
|
||||
chmod +x zed-extension
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: extension_tests::check
|
||||
run: |
|
||||
mkdir -p /tmp/ext-scratch
|
||||
mkdir -p /tmp/ext-output
|
||||
./zed-extension --source-dir . --scratch-dir /tmp/ext-scratch --output-dir /tmp/ext-output
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 1
|
||||
tests_pass:
|
||||
needs:
|
||||
- orchestrate
|
||||
- check_rust
|
||||
- check_extension
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') && always()
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: run_tests::tests_pass
|
||||
run: |
|
||||
set +x
|
||||
EXIT_CODE=0
|
||||
|
||||
check_result() {
|
||||
echo "* $1: $2"
|
||||
if [[ "$2" != "skipped" && "$2" != "success" ]]; then EXIT_CODE=1; fi
|
||||
}
|
||||
|
||||
check_result "orchestrate" "${{ needs.orchestrate.result }}"
|
||||
check_result "check_rust" "${{ needs.check_rust.result }}"
|
||||
check_result "check_extension" "${{ needs.check_extension.result }}"
|
||||
|
||||
exit $EXIT_CODE
|
||||
shell: bash -euxo pipefail {0}
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
50
.github/workflows/release.yml
vendored
50
.github/workflows/release.yml
vendored
@@ -10,7 +10,7 @@ on:
|
||||
- v*
|
||||
jobs:
|
||||
run_tests_mac:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: self-mini-macos
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -29,9 +29,6 @@ jobs:
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 300
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -45,7 +42,7 @@ jobs:
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
run_tests_linux:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -57,16 +54,19 @@ jobs:
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
@@ -75,8 +75,7 @@ jobs:
|
||||
run: ./script/clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: bash -euxo pipefail {0}
|
||||
uses: taiki-e/install-action@nextest
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 250
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -90,7 +89,7 @@ jobs:
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
run_tests_windows:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -109,9 +108,6 @@ jobs:
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy.ps1
|
||||
shell: pwsh
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: pwsh
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than.ps1 250
|
||||
shell: pwsh
|
||||
@@ -125,7 +121,7 @@ jobs:
|
||||
shell: pwsh
|
||||
timeout-minutes: 60
|
||||
check_scripts:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -154,7 +150,7 @@ jobs:
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
create_draft_release:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -202,6 +198,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -242,6 +241,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -475,6 +477,20 @@ jobs:
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
notify_on_failure:
|
||||
needs:
|
||||
- upload_release_assets
|
||||
- auto_release_preview
|
||||
if: failure()
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: release::notify_on_failure::notify_slack
|
||||
run: |-
|
||||
curl -X POST -H 'Content-type: application/json'\
|
||||
--data '{"text":"${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK"
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
37
.github/workflows/release_nightly.yml
vendored
37
.github/workflows/release_nightly.yml
vendored
@@ -12,7 +12,7 @@ on:
|
||||
- cron: 0 7 * * *
|
||||
jobs:
|
||||
check_style:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: self-mini-macos
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -28,7 +28,7 @@ jobs:
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
run_tests_windows:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -47,9 +47,6 @@ jobs:
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy.ps1
|
||||
shell: pwsh
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: pwsh
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than.ps1 250
|
||||
shell: pwsh
|
||||
@@ -93,6 +90,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -140,6 +140,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -358,7 +361,7 @@ jobs:
|
||||
needs:
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: namespace-profile-32x64-ubuntu-2004
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
@@ -389,7 +392,7 @@ jobs:
|
||||
needs:
|
||||
- check_style
|
||||
- run_tests_windows
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: self-mini-macos
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
@@ -431,7 +434,7 @@ jobs:
|
||||
- bundle_mac_x86_64
|
||||
- bundle_windows_aarch64
|
||||
- bundle_windows_x86_64
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: namespace-profile-4x8-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -487,3 +490,21 @@ jobs:
|
||||
SENTRY_PROJECT: zed
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
timeout-minutes: 60
|
||||
notify_on_failure:
|
||||
needs:
|
||||
- bundle_linux_aarch64
|
||||
- bundle_linux_x86_64
|
||||
- bundle_mac_aarch64
|
||||
- bundle_mac_x86_64
|
||||
- bundle_windows_aarch64
|
||||
- bundle_windows_x86_64
|
||||
if: failure()
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: release::notify_on_failure::notify_slack
|
||||
run: |-
|
||||
curl -X POST -H 'Content-type: application/json'\
|
||||
--data '{"text":"${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK"
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
|
||||
|
||||
35
.github/workflows/run_agent_evals.yml
vendored
35
.github/workflows/run_agent_evals.yml
vendored
@@ -6,24 +6,21 @@ env:
|
||||
CARGO_INCREMENTAL: '0'
|
||||
RUST_BACKTRACE: '1'
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }}
|
||||
GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_EVAL_TELEMETRY: '1'
|
||||
MODEL_NAME: ${{ inputs.model_name }}
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- synchronize
|
||||
- reopened
|
||||
- labeled
|
||||
branches:
|
||||
- '**'
|
||||
schedule:
|
||||
- cron: 0 0 * * *
|
||||
workflow_dispatch: {}
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
model_name:
|
||||
description: model_name
|
||||
required: true
|
||||
type: string
|
||||
jobs:
|
||||
agent_evals:
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
(github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval'))
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -40,6 +37,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
@@ -49,14 +49,19 @@ jobs:
|
||||
run: cargo build --package=eval
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_agent_evals::agent_evals::run_eval
|
||||
run: cargo run --package=eval -- --repetitions=8 --concurrency=1
|
||||
run: cargo run --package=eval -- --repetitions=8 --concurrency=1 --model "${MODEL_NAME}"
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }}
|
||||
GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
timeout-minutes: 600
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
6
.github/workflows/run_bundling.yml
vendored
6
.github/workflows/run_bundling.yml
vendored
@@ -34,6 +34,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -74,6 +77,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
|
||||
68
.github/workflows/run_cron_unit_evals.yml
vendored
Normal file
68
.github/workflows/run_cron_unit_evals.yml
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
# Generated from xtask::workflows::run_cron_unit_evals
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: run_cron_unit_evals
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: '0'
|
||||
RUST_BACKTRACE: '1'
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
on:
|
||||
schedule:
|
||||
- cron: 47 1 * * 2
|
||||
workflow_dispatch: {}
|
||||
jobs:
|
||||
cron_unit_evals:
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
uses: taiki-e/install-action@nextest
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 250
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/run-unit-evals
|
||||
run: ./script/run-unit-evals
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }}
|
||||
GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_agent_evals::cron_unit_evals::send_failure_to_slack
|
||||
if: ${{ failure() }}
|
||||
uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52
|
||||
with:
|
||||
method: chat.postMessage
|
||||
token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }}
|
||||
payload: |
|
||||
channel: C04UDRNNJFQ
|
||||
text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}"
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
43
.github/workflows/run_tests.yml
vendored
43
.github/workflows/run_tests.yml
vendored
@@ -15,7 +15,7 @@ on:
|
||||
- v[0-9]+.[0-9]+.x
|
||||
jobs:
|
||||
orchestrate:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -59,7 +59,7 @@ jobs:
|
||||
run_nix: ${{ steps.filter.outputs.run_nix }}
|
||||
run_tests: ${{ steps.filter.outputs.run_tests }}
|
||||
check_style:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: namespace-profile-4x8-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -113,9 +113,6 @@ jobs:
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy.ps1
|
||||
shell: pwsh
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: pwsh
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than.ps1 250
|
||||
shell: pwsh
|
||||
@@ -143,16 +140,19 @@ jobs:
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
@@ -161,8 +161,7 @@ jobs:
|
||||
run: ./script/clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: bash -euxo pipefail {0}
|
||||
uses: taiki-e/install-action@nextest
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 250
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -197,9 +196,6 @@ jobs:
|
||||
- name: steps::clippy
|
||||
run: ./script/clippy
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 300
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -232,6 +228,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
@@ -263,16 +262,19 @@ jobs:
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: cargo build -p collab
|
||||
run: cargo build -p collab
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -348,6 +350,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_docs::install_mdbook
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08
|
||||
with:
|
||||
@@ -533,7 +538,7 @@ jobs:
|
||||
- check_scripts
|
||||
- build_nix_linux_x86_64
|
||||
- build_nix_mac_aarch64
|
||||
if: github.repository_owner == 'zed-industries' && always()
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') && always()
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: run_tests::tests_pass
|
||||
|
||||
42
.github/workflows/run_unit_evals.yml
vendored
42
.github/workflows/run_unit_evals.yml
vendored
@@ -1,17 +1,26 @@
|
||||
# Generated from xtask::workflows::run_agent_evals
|
||||
# Generated from xtask::workflows::run_unit_evals
|
||||
# Rebuild with `cargo xtask workflows`.
|
||||
name: run_agent_evals
|
||||
name: run_unit_evals
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: '0'
|
||||
RUST_BACKTRACE: '1'
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_EVAL_TELEMETRY: '1'
|
||||
MODEL_NAME: ${{ inputs.model_name }}
|
||||
on:
|
||||
schedule:
|
||||
- cron: 47 1 * * 2
|
||||
workflow_dispatch: {}
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
model_name:
|
||||
description: model_name
|
||||
required: true
|
||||
type: string
|
||||
commit_sha:
|
||||
description: commit_sha
|
||||
required: true
|
||||
type: string
|
||||
jobs:
|
||||
unit_evals:
|
||||
run_unit_evals:
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -33,9 +42,11 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
uses: taiki-e/install-action@nextest
|
||||
- name: steps::clear_target_dir_if_large
|
||||
run: ./script/clear-target-dir-if-larger-than 250
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -44,20 +55,15 @@ jobs:
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
- name: run_agent_evals::unit_evals::send_failure_to_slack
|
||||
if: ${{ failure() }}
|
||||
uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52
|
||||
with:
|
||||
method: chat.postMessage
|
||||
token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }}
|
||||
payload: |
|
||||
channel: C04UDRNNJFQ
|
||||
text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}"
|
||||
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
|
||||
GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }}
|
||||
GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }}
|
||||
UNIT_EVAL_COMMIT: ${{ inputs.commit_sha }}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.run_id }}
|
||||
cancel-in-progress: true
|
||||
|
||||
163
Cargo.lock
generated
163
Cargo.lock
generated
@@ -96,6 +96,7 @@ dependencies = [
|
||||
"auto_update",
|
||||
"editor",
|
||||
"extension_host",
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"language",
|
||||
@@ -321,10 +322,12 @@ dependencies = [
|
||||
"assistant_slash_command",
|
||||
"assistant_slash_commands",
|
||||
"assistant_text_thread",
|
||||
"async-fs",
|
||||
"audio",
|
||||
"buffer_diff",
|
||||
"chrono",
|
||||
"client",
|
||||
"clock",
|
||||
"cloud_llm_client",
|
||||
"collections",
|
||||
"command_palette_hooks",
|
||||
@@ -340,8 +343,10 @@ dependencies = [
|
||||
"futures 0.3.31",
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
"gpui_tokio",
|
||||
"html_to_markdown",
|
||||
"http_client",
|
||||
"image",
|
||||
"indoc",
|
||||
"itertools 0.14.0",
|
||||
"jsonschema",
|
||||
@@ -1237,15 +1242,15 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "async_zip"
|
||||
version = "0.0.17"
|
||||
version = "0.0.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "00b9f7252833d5ed4b00aa9604b563529dd5e11de9c23615de2dcdf91eb87b52"
|
||||
checksum = "0d8c50d65ce1b0e0cb65a785ff615f78860d7754290647d3b983208daa4f85e6"
|
||||
dependencies = [
|
||||
"async-compression",
|
||||
"crc32fast",
|
||||
"futures-lite 2.6.1",
|
||||
"pin-project",
|
||||
"thiserror 1.0.69",
|
||||
"thiserror 2.0.17",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1330,10 +1335,14 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"client",
|
||||
"clock",
|
||||
"ctor",
|
||||
"db",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"http_client",
|
||||
"log",
|
||||
"parking_lot",
|
||||
"paths",
|
||||
"release_channel",
|
||||
"serde",
|
||||
@@ -1344,6 +1353,7 @@ dependencies = [
|
||||
"util",
|
||||
"which 6.0.3",
|
||||
"workspace",
|
||||
"zlog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1455,6 +1465,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "879b6c89592deb404ba4dc0ae6b58ffd1795c78991cbb5b8bc441c48a070440d"
|
||||
dependencies = [
|
||||
"aws-lc-sys",
|
||||
"untrusted 0.7.1",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
@@ -3202,6 +3213,7 @@ dependencies = [
|
||||
"rustc-hash 2.1.1",
|
||||
"schemars 1.0.4",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"strum 0.27.2",
|
||||
]
|
||||
|
||||
@@ -3681,6 +3693,7 @@ dependencies = [
|
||||
"collections",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"http_client",
|
||||
"log",
|
||||
"net",
|
||||
"parking_lot",
|
||||
@@ -5305,6 +5318,7 @@ dependencies = [
|
||||
"serde_json",
|
||||
"settings",
|
||||
"supermaven",
|
||||
"sweep_ai",
|
||||
"telemetry",
|
||||
"theme",
|
||||
"ui",
|
||||
@@ -5854,6 +5868,7 @@ dependencies = [
|
||||
"lsp",
|
||||
"parking_lot",
|
||||
"pretty_assertions",
|
||||
"proto",
|
||||
"semantic_version",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -6242,7 +6257,7 @@ dependencies = [
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
"nanorand",
|
||||
"spin",
|
||||
"spin 0.9.8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6353,9 +6368,9 @@ checksum = "aa9a19cbb55df58761df49b23516a86d432839add4af60fc256da840f66ed35b"
|
||||
|
||||
[[package]]
|
||||
name = "fork"
|
||||
version = "0.2.0"
|
||||
version = "0.4.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "05dc8b302e04a1c27f4fe694439ef0f29779ca4edc205b7b58f00db04e29656d"
|
||||
checksum = "30268f1eefccc9d72f43692e8b89e659aeb52e84016c3b32b6e7e9f1c8f38f94"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
@@ -7281,6 +7296,7 @@ dependencies = [
|
||||
"calloop",
|
||||
"calloop-wayland-source",
|
||||
"cbindgen",
|
||||
"circular-buffer",
|
||||
"cocoa 0.26.0",
|
||||
"cocoa-foundation 0.2.0",
|
||||
"collections",
|
||||
@@ -7336,6 +7352,7 @@ dependencies = [
|
||||
"slotmap",
|
||||
"smallvec",
|
||||
"smol",
|
||||
"spin 0.10.0",
|
||||
"stacksafe",
|
||||
"strum 0.27.2",
|
||||
"sum_tree",
|
||||
@@ -7799,6 +7816,7 @@ dependencies = [
|
||||
"parking_lot",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_urlencoded",
|
||||
"sha2",
|
||||
"tempfile",
|
||||
"url",
|
||||
@@ -8649,23 +8667,25 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "jupyter-protocol"
|
||||
version = "0.6.0"
|
||||
source = "git+https://github.com/ConradIrwin/runtimed?rev=7130c804216b6914355d15d0b91ea91f6babd734#7130c804216b6914355d15d0b91ea91f6babd734"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d9c047f6b5e551563af2ddb13dafed833f0ec5a5b0f9621d5ad740a9ff1e1095"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"bytes 1.10.1",
|
||||
"chrono",
|
||||
"futures 0.3.31",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"thiserror 2.0.17",
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "jupyter-websocket-client"
|
||||
version = "0.9.0"
|
||||
source = "git+https://github.com/ConradIrwin/runtimed?rev=7130c804216b6914355d15d0b91ea91f6babd734#7130c804216b6914355d15d0b91ea91f6babd734"
|
||||
version = "0.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4197fa926a6b0bddfed7377d9fed3d00a0dec44a1501e020097bd26604699cae"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
@@ -8674,6 +8694,7 @@ dependencies = [
|
||||
"jupyter-protocol",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tokio",
|
||||
"url",
|
||||
"uuid",
|
||||
]
|
||||
@@ -8711,7 +8732,6 @@ dependencies = [
|
||||
"ui",
|
||||
"ui_input",
|
||||
"util",
|
||||
"vim",
|
||||
"workspace",
|
||||
"zed_actions",
|
||||
]
|
||||
@@ -8863,6 +8883,7 @@ dependencies = [
|
||||
"icons",
|
||||
"image",
|
||||
"log",
|
||||
"open_ai",
|
||||
"open_router",
|
||||
"parking_lot",
|
||||
"proto",
|
||||
@@ -9065,7 +9086,7 @@ version = "1.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
|
||||
dependencies = [
|
||||
"spin",
|
||||
"spin 0.9.8",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -10007,6 +10028,18 @@ version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
|
||||
|
||||
[[package]]
|
||||
name = "miniprofiler_ui"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"gpui",
|
||||
"serde_json",
|
||||
"smol",
|
||||
"util",
|
||||
"workspace",
|
||||
"zed_actions",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "miniz_oxide"
|
||||
version = "0.8.9"
|
||||
@@ -10211,8 +10244,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "nbformat"
|
||||
version = "0.10.0"
|
||||
source = "git+https://github.com/ConradIrwin/runtimed?rev=7130c804216b6914355d15d0b91ea91f6babd734#7130c804216b6914355d15d0b91ea91f6babd734"
|
||||
version = "0.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "89c7229d604d847227002715e1235cd84e81919285d904ccb290a42ecc409348"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
@@ -10498,11 +10532,10 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "num-bigint-dig"
|
||||
version = "0.8.4"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dc84195820f291c7697304f3cbdadd1cb7199c0efc917ff5eafd71225c136151"
|
||||
checksum = "e661dda6640fad38e827a6d4a310ff4763082116fe217f279885c97f511bb0b7"
|
||||
dependencies = [
|
||||
"byteorder",
|
||||
"lazy_static",
|
||||
"libm",
|
||||
"num-integer",
|
||||
@@ -11005,6 +11038,7 @@ dependencies = [
|
||||
"serde_json",
|
||||
"settings",
|
||||
"strum 0.27.2",
|
||||
"thiserror 2.0.17",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -13044,6 +13078,23 @@ dependencies = [
|
||||
"zlog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "project_benchmarks"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
"client",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"http_client",
|
||||
"language",
|
||||
"node_runtime",
|
||||
"project",
|
||||
"settings",
|
||||
"watch",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "project_panel"
|
||||
version = "0.1.0"
|
||||
@@ -13071,6 +13122,7 @@ dependencies = [
|
||||
"settings",
|
||||
"smallvec",
|
||||
"telemetry",
|
||||
"tempfile",
|
||||
"theme",
|
||||
"ui",
|
||||
"util",
|
||||
@@ -13989,6 +14041,7 @@ dependencies = [
|
||||
"paths",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"prompt_store",
|
||||
"proto",
|
||||
"rayon",
|
||||
"release_channel",
|
||||
@@ -14234,7 +14287,7 @@ dependencies = [
|
||||
"cfg-if",
|
||||
"getrandom 0.2.16",
|
||||
"libc",
|
||||
"untrusted",
|
||||
"untrusted 0.9.0",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
@@ -14363,9 +14416,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rsa"
|
||||
version = "0.9.8"
|
||||
version = "0.9.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "78928ac1ed176a5ca1d17e578a1825f3d81ca54cf41053a592584b020cfd691b"
|
||||
checksum = "40a0376c50d0358279d9d643e4bf7b7be212f1f4ff1da9070a7b54d22ef75c88"
|
||||
dependencies = [
|
||||
"const-oid",
|
||||
"digest",
|
||||
@@ -14415,25 +14468,26 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "runtimelib"
|
||||
version = "0.25.0"
|
||||
source = "git+https://github.com/ConradIrwin/runtimed?rev=7130c804216b6914355d15d0b91ea91f6babd734#7130c804216b6914355d15d0b91ea91f6babd734"
|
||||
version = "0.30.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "481b48894073a0096f28cbe9860af01fc1b861e55b3bc96afafc645ee3de62dc"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-dispatcher",
|
||||
"async-std",
|
||||
"aws-lc-rs",
|
||||
"base64 0.22.1",
|
||||
"bytes 1.10.1",
|
||||
"chrono",
|
||||
"data-encoding",
|
||||
"dirs 5.0.1",
|
||||
"dirs 6.0.0",
|
||||
"futures 0.3.31",
|
||||
"glob",
|
||||
"jupyter-protocol",
|
||||
"ring",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"shellexpand 3.1.1",
|
||||
"smol",
|
||||
"thiserror 2.0.17",
|
||||
"uuid",
|
||||
"zeromq",
|
||||
]
|
||||
@@ -14701,7 +14755,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765"
|
||||
dependencies = [
|
||||
"ring",
|
||||
"untrusted",
|
||||
"untrusted 0.9.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -14713,7 +14767,7 @@ dependencies = [
|
||||
"aws-lc-rs",
|
||||
"ring",
|
||||
"rustls-pki-types",
|
||||
"untrusted",
|
||||
"untrusted 0.9.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -14943,7 +14997,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414"
|
||||
dependencies = [
|
||||
"ring",
|
||||
"untrusted",
|
||||
"untrusted 0.9.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -15846,6 +15900,15 @@ dependencies = [
|
||||
"lock_api",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "spin"
|
||||
version = "0.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d5fe4ccb98d9c292d56fec89a5e07da7fc4cf0dc11e156b41793132775d3e591"
|
||||
dependencies = [
|
||||
"lock_api",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "spirv"
|
||||
version = "0.3.0+sdk-1.3.268.0"
|
||||
@@ -16531,6 +16594,33 @@ dependencies = [
|
||||
"zeno",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sweep_ai"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"arrayvec",
|
||||
"brotli",
|
||||
"client",
|
||||
"collections",
|
||||
"edit_prediction",
|
||||
"feature_flags",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"http_client",
|
||||
"indoc",
|
||||
"language",
|
||||
"project",
|
||||
"release_channel",
|
||||
"reqwest_client",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tree-sitter-rust",
|
||||
"util",
|
||||
"workspace",
|
||||
"zlog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "symphonia"
|
||||
version = "0.5.5"
|
||||
@@ -18527,6 +18617,12 @@ version = "0.2.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861"
|
||||
|
||||
[[package]]
|
||||
name = "untrusted"
|
||||
version = "0.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
|
||||
|
||||
[[package]]
|
||||
name = "untrusted"
|
||||
version = "0.9.0"
|
||||
@@ -21139,7 +21235,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.213.0"
|
||||
version = "0.215.0"
|
||||
dependencies = [
|
||||
"acp_tools",
|
||||
"activity_indicator",
|
||||
@@ -21152,11 +21248,11 @@ dependencies = [
|
||||
"audio",
|
||||
"auto_update",
|
||||
"auto_update_ui",
|
||||
"backtrace",
|
||||
"bincode 1.3.3",
|
||||
"breadcrumbs",
|
||||
"call",
|
||||
"channel",
|
||||
"chrono",
|
||||
"clap",
|
||||
"cli",
|
||||
"client",
|
||||
@@ -21214,8 +21310,8 @@ dependencies = [
|
||||
"menu",
|
||||
"migrator",
|
||||
"mimalloc",
|
||||
"miniprofiler_ui",
|
||||
"nc",
|
||||
"nix 0.29.0",
|
||||
"node_runtime",
|
||||
"notifications",
|
||||
"onboarding",
|
||||
@@ -21251,13 +21347,13 @@ dependencies = [
|
||||
"snippets_ui",
|
||||
"supermaven",
|
||||
"svg_preview",
|
||||
"sweep_ai",
|
||||
"sysinfo 0.37.2",
|
||||
"system_specs",
|
||||
"tab_switcher",
|
||||
"task",
|
||||
"tasks_ui",
|
||||
"telemetry",
|
||||
"telemetry_events",
|
||||
"terminal_view",
|
||||
"theme",
|
||||
"theme_extension",
|
||||
@@ -21686,6 +21782,7 @@ dependencies = [
|
||||
"serde_json",
|
||||
"settings",
|
||||
"smol",
|
||||
"strsim",
|
||||
"thiserror 2.0.17",
|
||||
"util",
|
||||
"uuid",
|
||||
|
||||
40
Cargo.toml
40
Cargo.toml
@@ -110,6 +110,7 @@ members = [
|
||||
"crates/menu",
|
||||
"crates/migrator",
|
||||
"crates/mistral",
|
||||
"crates/miniprofiler_ui",
|
||||
"crates/multi_buffer",
|
||||
"crates/nc",
|
||||
"crates/net",
|
||||
@@ -126,6 +127,7 @@ members = [
|
||||
"crates/picker",
|
||||
"crates/prettier",
|
||||
"crates/project",
|
||||
"crates/project_benchmarks",
|
||||
"crates/project_panel",
|
||||
"crates/project_symbols",
|
||||
"crates/prompt_store",
|
||||
@@ -163,6 +165,7 @@ members = [
|
||||
"crates/sum_tree",
|
||||
"crates/supermaven",
|
||||
"crates/supermaven_api",
|
||||
"crates/sweep_ai",
|
||||
"crates/codestral",
|
||||
"crates/svg_preview",
|
||||
"crates/system_specs",
|
||||
@@ -341,6 +344,7 @@ menu = { path = "crates/menu" }
|
||||
migrator = { path = "crates/migrator" }
|
||||
mistral = { path = "crates/mistral" }
|
||||
multi_buffer = { path = "crates/multi_buffer" }
|
||||
miniprofiler_ui = { path = "crates/miniprofiler_ui" }
|
||||
nc = { path = "crates/nc" }
|
||||
net = { path = "crates/net" }
|
||||
node_runtime = { path = "crates/node_runtime" }
|
||||
@@ -395,6 +399,7 @@ streaming_diff = { path = "crates/streaming_diff" }
|
||||
sum_tree = { path = "crates/sum_tree" }
|
||||
supermaven = { path = "crates/supermaven" }
|
||||
supermaven_api = { path = "crates/supermaven_api" }
|
||||
sweep_ai = { path = "crates/sweep_ai" }
|
||||
codestral = { path = "crates/codestral" }
|
||||
system_specs = { path = "crates/system_specs" }
|
||||
tab_switcher = { path = "crates/tab_switcher" }
|
||||
@@ -458,7 +463,7 @@ async-tar = "0.5.1"
|
||||
async-task = "4.7"
|
||||
async-trait = "0.1"
|
||||
async-tungstenite = "0.31.0"
|
||||
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
|
||||
async_zip = { version = "0.0.18", features = ["deflate", "deflate64"] }
|
||||
aws-config = { version = "1.6.1", features = ["behavior-version-latest"] }
|
||||
aws-credential-types = { version = "1.2.2", features = [
|
||||
"hardcoded-credentials",
|
||||
@@ -475,6 +480,7 @@ bitflags = "2.6.0"
|
||||
blade-graphics = { version = "0.7.0" }
|
||||
blade-macros = { version = "0.3.0" }
|
||||
blade-util = { version = "0.3.0" }
|
||||
brotli = "8.0.2"
|
||||
bytes = "1.0"
|
||||
cargo_metadata = "0.19"
|
||||
cargo_toml = "0.21"
|
||||
@@ -482,7 +488,7 @@ cfg-if = "1.0.3"
|
||||
chrono = { version = "0.4", features = ["serde"] }
|
||||
ciborium = "0.2"
|
||||
circular-buffer = "1.0"
|
||||
clap = { version = "4.4", features = ["derive"] }
|
||||
clap = { version = "4.4", features = ["derive", "wrap_help"] }
|
||||
cocoa = "=0.26.0"
|
||||
cocoa-foundation = "=0.2.0"
|
||||
convert_case = "0.8.0"
|
||||
@@ -504,7 +510,7 @@ emojis = "0.6.1"
|
||||
env_logger = "0.11"
|
||||
exec = "0.3.1"
|
||||
fancy-regex = "0.14.0"
|
||||
fork = "0.2.0"
|
||||
fork = "0.4.0"
|
||||
futures = "0.3"
|
||||
futures-batch = "0.6.1"
|
||||
futures-lite = "1.13"
|
||||
@@ -531,8 +537,8 @@ itertools = "0.14.0"
|
||||
json_dotpath = "1.1"
|
||||
jsonschema = "0.30.0"
|
||||
jsonwebtoken = "9.3"
|
||||
jupyter-protocol = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
|
||||
jupyter-websocket-client = { git = "https://github.com/ConradIrwin/runtimed" ,rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
|
||||
jupyter-protocol = "0.10.0"
|
||||
jupyter-websocket-client = "0.15.0"
|
||||
libc = "0.2"
|
||||
libsqlite3-sys = { version = "0.30.1", features = ["bundled"] }
|
||||
linkify = "0.10.0"
|
||||
@@ -545,7 +551,7 @@ minidumper = "0.8"
|
||||
moka = { version = "0.12.10", features = ["sync"] }
|
||||
naga = { version = "25.0", features = ["wgsl-in"] }
|
||||
nanoid = "0.4"
|
||||
nbformat = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734" }
|
||||
nbformat = "0.15.0"
|
||||
nix = "0.29"
|
||||
num-format = "0.4.4"
|
||||
num-traits = "0.2"
|
||||
@@ -616,8 +622,8 @@ reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "c15662
|
||||
"stream",
|
||||
], package = "zed-reqwest", version = "0.12.15-zed" }
|
||||
rsa = "0.9.6"
|
||||
runtimelib = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734", default-features = false, features = [
|
||||
"async-dispatcher-runtime",
|
||||
runtimelib = { version = "0.30.0", default-features = false, features = [
|
||||
"async-dispatcher-runtime", "aws-lc-rs"
|
||||
] }
|
||||
rust-embed = { version = "8.4", features = ["include-exclude"] }
|
||||
rustc-hash = "2.1.0"
|
||||
@@ -628,6 +634,7 @@ scap = { git = "https://github.com/zed-industries/scap", rev = "4afea48c3b002197
|
||||
schemars = { version = "1.0", features = ["indexmap2"] }
|
||||
semver = "1.0"
|
||||
serde = { version = "1.0.221", features = ["derive", "rc"] }
|
||||
serde_derive = "1.0.221"
|
||||
serde_json = { version = "1.0.144", features = ["preserve_order", "raw_value"] }
|
||||
serde_json_lenient = { version = "0.2", features = [
|
||||
"preserve_order",
|
||||
@@ -721,6 +728,7 @@ yawc = "0.2.5"
|
||||
zeroize = "1.8"
|
||||
zstd = "0.11"
|
||||
|
||||
|
||||
[workspace.dependencies.windows]
|
||||
version = "0.61"
|
||||
features = [
|
||||
@@ -789,6 +797,19 @@ codegen-units = 16
|
||||
codegen-units = 16
|
||||
|
||||
[profile.dev.package]
|
||||
# proc-macros start
|
||||
gpui_macros = { opt-level = 3 }
|
||||
derive_refineable = { opt-level = 3 }
|
||||
settings_macros = { opt-level = 3 }
|
||||
sqlez_macros = { opt-level = 3, codegen-units = 1 }
|
||||
ui_macros = { opt-level = 3 }
|
||||
util_macros = { opt-level = 3 }
|
||||
serde_derive = { opt-level = 3 }
|
||||
quote = { opt-level = 3 }
|
||||
syn = { opt-level = 3 }
|
||||
proc-macro2 = { opt-level = 3 }
|
||||
# proc-macros end
|
||||
|
||||
taffy = { opt-level = 3 }
|
||||
cranelift-codegen = { opt-level = 3 }
|
||||
cranelift-codegen-meta = { opt-level = 3 }
|
||||
@@ -830,7 +851,6 @@ semantic_version = { codegen-units = 1 }
|
||||
session = { codegen-units = 1 }
|
||||
snippet = { codegen-units = 1 }
|
||||
snippets_ui = { codegen-units = 1 }
|
||||
sqlez_macros = { codegen-units = 1 }
|
||||
story = { codegen-units = 1 }
|
||||
supermaven_api = { codegen-units = 1 }
|
||||
telemetry_events = { codegen-units = 1 }
|
||||
@@ -840,7 +860,7 @@ ui_input = { codegen-units = 1 }
|
||||
zed_actions = { codegen-units = 1 }
|
||||
|
||||
[profile.release]
|
||||
debug = "full"
|
||||
debug = "limited"
|
||||
lto = "thin"
|
||||
codegen-units = 1
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
# syntax = docker/dockerfile:1.2
|
||||
|
||||
FROM rust:1.90-bookworm as builder
|
||||
FROM rust:1.91.1-bookworm as builder
|
||||
WORKDIR app
|
||||
COPY . .
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Zed
|
||||
|
||||
[](https://zed.dev)
|
||||
[](https://github.com/zed-industries/zed/actions/workflows/ci.yml)
|
||||
[](https://github.com/zed-industries/zed/actions/workflows/run_tests.yml)
|
||||
|
||||
Welcome to Zed, a high-performance, multiplayer code editor from the creators of [Atom](https://github.com/atom/atom) and [Tree-sitter](https://github.com/tree-sitter/tree-sitter).
|
||||
|
||||
|
||||
@@ -44,6 +44,7 @@ design
|
||||
|
||||
docs
|
||||
= @probably-neb
|
||||
= @miguelraz
|
||||
|
||||
extension
|
||||
= @kubkon
|
||||
@@ -98,6 +99,9 @@ settings_ui
|
||||
= @danilo-leal
|
||||
= @probably-neb
|
||||
|
||||
support
|
||||
= @miguelraz
|
||||
|
||||
tasks
|
||||
= @SomeoneToIgnore
|
||||
= @Veykril
|
||||
|
||||
4
assets/icons/at_sign.svg
Normal file
4
assets/icons/at_sign.svg
Normal file
@@ -0,0 +1,4 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M8.00156 10.3996C9.32705 10.3996 10.4016 9.32509 10.4016 7.99961C10.4016 6.67413 9.32705 5.59961 8.00156 5.59961C6.67608 5.59961 5.60156 6.67413 5.60156 7.99961C5.60156 9.32509 6.67608 10.3996 8.00156 10.3996Z" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M10.4 5.6V8.6C10.4 9.07739 10.5896 9.53523 10.9272 9.8728C11.2648 10.2104 11.7226 10.4 12.2 10.4C12.6774 10.4 13.1352 10.2104 13.4728 9.8728C13.8104 9.53523 14 9.07739 14 8.6V8C14 6.64839 13.5436 5.33636 12.7048 4.27651C11.8661 3.21665 10.694 2.47105 9.37852 2.16051C8.06306 1.84997 6.68129 1.99269 5.45707 2.56554C4.23285 3.13838 3.23791 4.1078 2.63344 5.31672C2.02898 6.52565 1.85041 7.90325 2.12667 9.22633C2.40292 10.5494 3.11782 11.7405 4.15552 12.6065C5.19323 13.4726 6.49295 13.9629 7.84411 13.998C9.19527 14.0331 10.5187 13.611 11.6 12.8" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.0 KiB |
32
assets/icons/sweep_ai.svg
Normal file
32
assets/icons/sweep_ai.svg
Normal file
@@ -0,0 +1,32 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<g clip-path="url(#clip0_3348_16)">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.97419 6.27207C8.44653 6.29114 8.86622 6.27046 9.23628 6.22425C9.08884 7.48378 8.7346 8.72903 8.16697 9.90688C8.04459 9.83861 7.92582 9.76008 7.81193 9.67108C7.64539 9.54099 7.49799 9.39549 7.37015 9.23818C7.5282 9.54496 7.64901 9.86752 7.73175 10.1986C7.35693 10.6656 6.90663 11.0373 6.412 11.3101C5.01165 10.8075 4.03638 9.63089 4.03638 7.93001C4.03638 6.96185 4.35234 6.07053 4.88281 5.36157C5.34001 5.69449 6.30374 6.20455 7.97419 6.27207ZM8.27511 11.5815C10.3762 11.5349 11.8115 10.7826 12.8347 7.93001C11.6992 7.93001 11.4246 7.10731 11.1188 6.19149C11.0669 6.03596 11.0141 5.87771 10.956 5.72037C10.6733 5.86733 10.2753 6.02782 9.74834 6.13895C9.59658 7.49345 9.20592 8.83238 8.56821 10.0897C8.89933 10.2093 9.24674 10.262 9.5908 10.2502C9.08928 10.4803 8.62468 10.8066 8.22655 11.2255C8.2457 11.3438 8.26186 11.4625 8.27511 11.5815ZM6.62702 7.75422C6.62702 7.50604 6.82821 7.30485 7.07639 7.30485C7.32457 7.30485 7.52576 7.50604 7.52576 7.75422V8.23616C7.52576 8.48435 7.32457 8.68554 7.07639 8.68554C6.82821 8.68554 6.62702 8.48435 6.62702 8.23616V7.75422ZM5.27746 7.30485C5.05086 7.30485 4.86716 7.48854 4.86716 7.71513V8.27525C4.86716 8.50185 5.05086 8.68554 5.27746 8.68554C5.50406 8.68554 5.68776 8.50185 5.68776 8.27525V7.71513C5.68776 7.48854 5.50406 7.30485 5.27746 7.30485Z" fill="white"/>
|
||||
<mask id="mask0_3348_16" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="4" y="5" width="9" height="7">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.97419 6.27207C8.44653 6.29114 8.86622 6.27046 9.23628 6.22425C9.08884 7.48378 8.7346 8.72903 8.16697 9.90688C8.04459 9.83861 7.92582 9.76008 7.81193 9.67108C7.64539 9.54099 7.49799 9.39549 7.37015 9.23818C7.5282 9.54496 7.64901 9.86752 7.73175 10.1986C7.35693 10.6656 6.90663 11.0373 6.412 11.3101C5.01165 10.8075 4.03638 9.63089 4.03638 7.93001C4.03638 6.96185 4.35234 6.07053 4.88281 5.36157C5.34001 5.69449 6.30374 6.20455 7.97419 6.27207ZM8.27511 11.5815C10.3762 11.5349 11.8115 10.7826 12.8347 7.93001C11.6992 7.93001 11.4246 7.10731 11.1188 6.19149C11.0669 6.03596 11.0141 5.87771 10.956 5.72037C10.6733 5.86733 10.2753 6.02782 9.74834 6.13895C9.59658 7.49345 9.20592 8.83238 8.56821 10.0897C8.89933 10.2093 9.24674 10.262 9.5908 10.2502C9.08928 10.4803 8.62468 10.8066 8.22655 11.2255C8.2457 11.3438 8.26186 11.4625 8.27511 11.5815ZM6.62702 7.75422C6.62702 7.50604 6.82821 7.30485 7.07639 7.30485C7.32457 7.30485 7.52576 7.50604 7.52576 7.75422V8.23616C7.52576 8.48435 7.32457 8.68554 7.07639 8.68554C6.82821 8.68554 6.62702 8.48435 6.62702 8.23616V7.75422ZM5.27746 7.30485C5.05086 7.30485 4.86716 7.48854 4.86716 7.71513V8.27525C4.86716 8.50185 5.05086 8.68554 5.27746 8.68554C5.50406 8.68554 5.68776 8.50185 5.68776 8.27525V7.71513C5.68776 7.48854 5.50406 7.30485 5.27746 7.30485Z" fill="white"/>
|
||||
</mask>
|
||||
<g mask="url(#mask0_3348_16)">
|
||||
<path d="M9.23617 6.22425L9.39588 6.24293L9.41971 6.0393L9.21624 6.06471L9.23617 6.22425ZM8.16687 9.90688L8.08857 10.0473L8.23765 10.1305L8.31174 9.97669L8.16687 9.90688ZM7.37005 9.23819L7.49487 9.13676L7.22714 9.3118L7.37005 9.23819ZM7.73165 10.1986L7.85702 10.2993L7.90696 10.2371L7.88761 10.1597L7.73165 10.1986ZM6.41189 11.3101L6.35758 11.4615L6.42594 11.486L6.48954 11.4509L6.41189 11.3101ZM4.88271 5.36157L4.97736 5.23159L4.84905 5.13817L4.75397 5.26525L4.88271 5.36157ZM8.27501 11.5815L8.11523 11.5993L8.13151 11.7456L8.27859 11.7423L8.27501 11.5815ZM12.8346 7.93001L12.986 7.98428L13.0631 7.76921H12.8346V7.93001ZM10.9559 5.72037L11.1067 5.66469L11.0436 5.49354L10.8817 5.5777L10.9559 5.72037ZM9.74824 6.13896L9.71508 5.98161L9.60139 6.0056L9.58846 6.12102L9.74824 6.13896ZM8.56811 10.0897L8.42469 10.017L8.34242 10.1792L8.51348 10.241L8.56811 10.0897ZM9.5907 10.2502L9.65775 10.3964L9.58519 10.0896L9.5907 10.2502ZM8.22644 11.2255L8.10992 11.1147L8.05502 11.1725L8.06773 11.2512L8.22644 11.2255ZM9.21624 6.06471C8.85519 6.10978 8.44439 6.13015 7.98058 6.11139L7.96756 6.43272C8.44852 6.45215 8.87701 6.43111 9.25607 6.3838L9.21624 6.06471ZM8.31174 9.97669C8.88724 8.78244 9.2464 7.51988 9.39588 6.24293L9.07647 6.20557C8.93108 7.44772 8.58175 8.67563 8.02203 9.83708L8.31174 9.97669ZM8.2452 9.76645C8.12998 9.70219 8.01817 9.62826 7.91082 9.54438L7.71285 9.79779C7.8333 9.8919 7.95895 9.97503 8.08857 10.0473L8.2452 9.76645ZM7.91082 9.54438C7.75387 9.4218 7.61512 9.28479 7.49487 9.13676L7.24526 9.33957C7.38066 9.50619 7.53671 9.66023 7.71285 9.79779L7.91082 9.54438ZM7.22714 9.3118C7.37944 9.60746 7.49589 9.91837 7.57564 10.2376L7.88761 10.1597C7.80196 9.81663 7.67679 9.48248 7.513 9.16453L7.22714 9.3118ZM7.60624 10.098C7.24483 10.5482 6.81083 10.9065 6.33425 11.1693L6.48954 11.4509C7.00223 11.1682 7.46887 10.7829 7.85702 10.2993L7.60624 10.098ZM3.87549 7.93001C3.87548 9.7042 4.89861 10.9378 6.35758 11.4615L6.46622 11.1588C5.12449 10.6772 4.19707 9.55763 4.19707 7.93001H3.87549ZM4.75397 5.26525C4.20309 6.00147 3.87549 6.92646 3.87549 7.93001H4.19707C4.19707 6.99724 4.50139 6.13959 5.01145 5.45791L4.75397 5.26525ZM7.98058 6.11139C6.34236 6.04516 5.40922 5.54604 4.97736 5.23159L4.78806 5.49157C5.27058 5.84291 6.26491 6.3639 7.96756 6.43272L7.98058 6.11139ZM8.27859 11.7423C9.34696 11.7185 10.2682 11.515 11.0542 10.9376C11.8388 10.3612 12.4683 9.4273 12.986 7.98428L12.6833 7.8757C12.1776 9.28534 11.5779 10.1539 10.8638 10.6784C10.1511 11.202 9.30417 11.3978 8.27143 11.4208L8.27859 11.7423ZM12.8346 7.76921C12.3148 7.76921 12.0098 7.58516 11.7925 7.30552C11.5639 7.0114 11.4266 6.60587 11.2712 6.14061L10.9662 6.24242C11.1166 6.69294 11.2695 7.15667 11.5385 7.50285C11.8188 7.86347 12.2189 8.09078 12.8346 8.09078V7.76921ZM11.2712 6.14061C11.2195 5.98543 11.1658 5.82478 11.1067 5.66469L10.805 5.77606C10.8621 5.93065 10.9142 6.0865 10.9662 6.24242L11.2712 6.14061ZM10.8817 5.5777C10.6115 5.71821 10.2273 5.87362 9.71508 5.98161L9.78143 6.29626C10.3232 6.18206 10.735 6.0165 11.0301 5.86301L10.8817 5.5777ZM9.58846 6.12102C9.43882 7.45684 9.05355 8.77717 8.42469 10.017L8.71149 10.1625C9.35809 8.88764 9.75417 7.53011 9.90806 6.15685L9.58846 6.12102ZM9.58519 10.0896C9.26119 10.1006 8.93423 10.051 8.62269 9.93854L8.51348 10.241C8.86427 10.3677 9.23205 10.4234 9.5962 10.4109L9.58519 10.0896ZM8.34301 11.3363C8.72675 10.9325 9.17443 10.6181 9.65775 10.3964L9.52365 10.1041C9.00392 10.3425 8.52241 10.6807 8.10992 11.1147L8.34301 11.3363ZM8.43483 11.5638C8.4213 11.4421 8.40475 11.3207 8.3852 11.1998L8.06773 11.2512C8.08644 11.3668 8.10225 11.4829 8.11523 11.5993L8.43483 11.5638ZM7.07629 7.14405C6.73931 7.14405 6.46613 7.41724 6.46613 7.75423H6.7877C6.7877 7.59484 6.91691 7.46561 7.07629 7.46561V7.14405ZM7.68646 7.75423C7.68646 7.41724 7.41326 7.14405 7.07629 7.14405V7.46561C7.23567 7.46561 7.36489 7.59484 7.36489 7.75423H7.68646ZM7.68646 8.23616V7.75423H7.36489V8.23616H7.68646ZM7.07629 8.84634C7.41326 8.84634 7.68646 8.57315 7.68646 8.23616H7.36489C7.36489 8.39555 7.23567 8.52474 7.07629 8.52474V8.84634ZM6.46613 8.23616C6.46613 8.57315 6.73931 8.84634 7.07629 8.84634V8.52474C6.91691 8.52474 6.7877 8.39555 6.7877 8.23616H6.46613ZM6.46613 7.75423V8.23616H6.7877V7.75423H6.46613ZM5.02785 7.71514C5.02785 7.57734 5.13956 7.46561 5.27736 7.46561V7.14405C4.96196 7.14405 4.70627 7.39974 4.70627 7.71514H5.02785ZM5.02785 8.27525V7.71514H4.70627V8.27525H5.02785ZM5.27736 8.52474C5.13956 8.52474 5.02785 8.41305 5.02785 8.27525H4.70627C4.70627 8.59065 4.96196 8.84634 5.27736 8.84634V8.52474ZM5.52687 8.27525C5.52687 8.41305 5.41516 8.52474 5.27736 8.52474V8.84634C5.59277 8.84634 5.84845 8.59065 5.84845 8.27525H5.52687ZM5.52687 7.71514V8.27525H5.84845V7.71514H5.52687ZM5.27736 7.46561C5.41516 7.46561 5.52687 7.57734 5.52687 7.71514H5.84845C5.84845 7.39974 5.59277 7.14405 5.27736 7.14405V7.46561Z" fill="white"/>
|
||||
</g>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M7.12635 14.5901C7.22369 14.3749 7.3069 14.1501 7.37454 13.9167C7.54132 13.3412 7.5998 12.7599 7.56197 12.1948C7.53665 12.5349 7.47589 12.8775 7.37718 13.2181C7.23926 13.694 7.03667 14.1336 6.78174 14.5301C6.89605 14.5547 7.01101 14.5747 7.12635 14.5901Z" fill="white"/>
|
||||
<path d="M9.71984 7.74796C9.50296 7.74796 9.29496 7.83412 9.14159 7.98745C8.98822 8.14082 8.9021 8.34882 8.9021 8.5657C8.9021 8.78258 8.98822 8.99057 9.14159 9.14394C9.29496 9.29728 9.50296 9.38344 9.71984 9.38344V8.5657V7.74796Z" fill="white"/>
|
||||
<mask id="mask1_3348_16" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="5" y="2" width="8" height="9">
|
||||
<path d="M12.3783 2.9985H5.36792V10.3954H12.3783V2.9985Z" fill="white"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M9.75733 3.61999C9.98577 5.80374 9.60089 8.05373 8.56819 10.0898C8.43122 10.0403 8.29704 9.9794 8.16699 9.90688C9.15325 7.86033 9.49538 5.61026 9.22757 3.43526C9.39923 3.51584 9.57682 3.57729 9.75733 3.61999Z" fill="black"/>
|
||||
</mask>
|
||||
<g mask="url(#mask1_3348_16)">
|
||||
<path d="M8.56815 10.0898L8.67689 10.1449L8.62812 10.241L8.52678 10.2044L8.56815 10.0898ZM9.75728 3.61998L9.78536 3.50136L9.86952 3.52127L9.87853 3.6073L9.75728 3.61998ZM8.16695 9.90687L8.1076 10.0133L8.00732 9.9574L8.05715 9.85398L8.16695 9.90687ZM9.22753 3.43524L9.10656 3.45014L9.07958 3.23116L9.27932 3.32491L9.22753 3.43524ZM8.45945 10.0346C9.48122 8.02009 9.86217 5.79374 9.63608 3.63266L9.87853 3.6073C10.1093 5.81372 9.72048 8.0873 8.67689 10.1449L8.45945 10.0346ZM8.22633 9.80041C8.35056 9.86971 8.47876 9.92791 8.60956 9.97514L8.52678 10.2044C8.38363 10.1527 8.24344 10.0891 8.1076 10.0133L8.22633 9.80041ZM9.34849 3.42035C9.61905 5.61792 9.27346 7.89158 8.27675 9.9598L8.05715 9.85398C9.03298 7.82905 9.37158 5.60258 9.10656 3.45014L9.34849 3.42035ZM9.72925 3.7386C9.54064 3.69399 9.3551 3.62977 9.17573 3.54558L9.27932 3.32491C9.44327 3.40188 9.61288 3.46058 9.78536 3.50136L9.72925 3.7386Z" fill="white"/>
|
||||
</g>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M11.4118 3.46925L11.2416 3.39926L11.1904 3.57611L11.349 3.62202C11.1904 3.57611 11.1904 3.57615 11.1904 3.5762L11.1903 3.57631L11.1902 3.57658L11.19 3.57741L11.1893 3.58009C11.1886 3.58233 11.1878 3.58548 11.1867 3.58949C11.1845 3.5975 11.1814 3.60897 11.1777 3.62359C11.1703 3.6528 11.1603 3.69464 11.1493 3.74656C11.1275 3.85017 11.102 3.99505 11.0869 4.16045C11.0573 4.4847 11.0653 4.91594 11.2489 5.26595C11.2613 5.28944 11.2643 5.31174 11.2625 5.32629C11.261 5.33849 11.2572 5.34226 11.2536 5.3449C11.0412 5.50026 10.5639 5.78997 9.76653 5.96607C9.76095 6.02373 9.75493 6.08134 9.74848 6.13895C10.601 5.95915 11.1161 5.65017 11.3511 5.4782C11.4413 5.41219 11.4471 5.28823 11.3952 5.18922C11.1546 4.73063 11.2477 4.08248 11.3103 3.78401C11.3314 3.68298 11.349 3.62202 11.349 3.62202C11.3745 3.6325 11.4002 3.63983 11.4259 3.64425C11.9083 3.72709 12.4185 2.78249 12.6294 2.33939C12.6852 2.22212 12.6234 2.08843 12.497 2.05837C11.2595 1.76399 5.46936 0.631807 4.57214 4.96989C4.55907 5.03307 4.57607 5.10106 4.62251 5.14584C4.87914 5.39322 5.86138 6.18665 7.9743 6.27207C8.44664 6.29114 8.86633 6.27046 9.23638 6.22425C9.24295 6.16797 9.24912 6.1117 9.25491 6.05534C8.88438 6.10391 8.46092 6.12641 7.98094 6.10702C5.91152 6.02337 4.96693 5.24843 4.73714 5.02692C4.73701 5.02679 4.73545 5.02525 4.73422 5.0208C4.73292 5.01611 4.73254 5.00987 4.73388 5.00334C4.94996 3.95861 5.4573 3.25195 6.11188 2.77714C6.77039 2.29947 7.58745 2.04983 8.42824 1.94075C10.1122 1.72228 11.8454 2.07312 12.4588 2.21906C12.4722 2.22225 12.4787 2.22927 12.4819 2.2362C12.4853 2.24342 12.4869 2.25443 12.4803 2.2684C12.3706 2.49879 12.183 2.85746 11.9656 3.13057C11.8564 3.26783 11.7479 3.37295 11.6469 3.43216C11.5491 3.48956 11.4752 3.49529 11.4118 3.46925Z" fill="white"/>
|
||||
<mask id="mask2_3348_16" style="mask-type:luminance" maskUnits="userSpaceOnUse" x="3" y="9" width="7" height="6">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M8.22654 11.2255C8.62463 10.8066 9.08923 10.4803 9.59075 10.2502C8.97039 10.2715 8.33933 10.0831 7.81189 9.67109C7.64534 9.541 7.49795 9.39549 7.37014 9.23819C7.52815 9.54497 7.64896 9.86752 7.7317 10.1986C6.70151 11.4821 5.1007 12.0466 3.57739 11.8125C3.85909 12.527 4.32941 13.178 4.97849 13.6851C5.8625 14.3756 6.92544 14.6799 7.96392 14.6227C8.32513 13.5174 8.4085 12.351 8.22654 11.2255Z" fill="white"/>
|
||||
</mask>
|
||||
<g mask="url(#mask2_3348_16)">
|
||||
<path d="M9.59085 10.2502L9.58389 10.0472L9.67556 10.4349L9.59085 10.2502ZM8.22663 11.2255L8.02607 11.258L8.00999 11.1585L8.07936 11.0856L8.22663 11.2255ZM7.37024 9.23819L7.18961 9.33119L7.52789 9.11006L7.37024 9.23819ZM7.7318 10.1986L7.92886 10.1494L7.95328 10.2472L7.8902 10.3258L7.7318 10.1986ZM3.57749 11.8125L3.3885 11.887L3.25879 11.5579L3.60835 11.6117L3.57749 11.8125ZM7.96402 14.6227L8.15711 14.6858L8.11397 14.8179L7.97519 14.8255L7.96402 14.6227ZM9.67556 10.4349C9.19708 10.6544 8.7538 10.9657 8.37387 11.3655L8.07936 11.0856C8.49566 10.6475 8.98161 10.3062 9.50614 10.0656L9.67556 10.4349ZM7.93704 9.51099C8.42551 9.89261 9.00942 10.0669 9.58389 10.0472L9.59781 10.4533C8.93151 10.4761 8.25334 10.2737 7.68693 9.83118L7.93704 9.51099ZM7.52789 9.11006C7.64615 9.25565 7.78261 9.39038 7.93704 9.51099L7.68693 9.83118C7.50827 9.69161 7.34994 9.53537 7.21254 9.36627L7.52789 9.11006ZM7.5347 10.2479C7.45573 9.93178 7.34043 9.62393 7.18961 9.33119L7.55082 9.14514C7.71611 9.466 7.84242 9.80326 7.92886 10.1494L7.5347 10.2479ZM3.60835 11.6117C5.06278 11.8352 6.59038 11.2962 7.57335 10.0715L7.8902 10.3258C6.81284 11.6681 5.1388 12.258 3.54663 12.0133L3.60835 11.6117ZM4.85352 13.8452C4.17512 13.3152 3.68312 12.6343 3.3885 11.887L3.76648 11.738C4.03524 12.4197 4.4839 13.0409 5.10364 13.525L4.85352 13.8452ZM7.97519 14.8255C6.8895 14.8853 5.77774 14.5672 4.85352 13.8452L5.10364 13.525C5.94745 14.1842 6.96157 14.4744 7.95285 14.4198L7.97519 14.8255ZM8.42716 11.1931C8.61419 12.3499 8.52858 13.5491 8.15711 14.6858L7.77093 14.5596C8.12191 13.4857 8.20296 12.352 8.02607 11.258L8.42716 11.1931Z" fill="white"/>
|
||||
</g>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="clip0_3348_16">
|
||||
<rect width="9.63483" height="14" fill="white" transform="translate(3.19995 1.5)"/>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 14 KiB |
@@ -313,7 +313,7 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-n": "agent::NewTextThread",
|
||||
"cmd-alt-t": "agent::NewThread"
|
||||
"cmd-alt-n": "agent::NewExternalAgentThread"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -421,12 +421,6 @@
|
||||
"ctrl-[": "editor::Cancel"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_mode == helix_select && !menu",
|
||||
"bindings": {
|
||||
"escape": "vim::SwitchToHelixNormalMode"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "(vim_mode == helix_normal || vim_mode == helix_select) && !menu",
|
||||
"bindings": {
|
||||
|
||||
@@ -605,6 +605,10 @@
|
||||
// to both the horizontal and vertical delta values while scrolling. Fast scrolling
|
||||
// happens when a user holds the alt or option key while scrolling.
|
||||
"fast_scroll_sensitivity": 4.0,
|
||||
"sticky_scroll": {
|
||||
// Whether to stick scopes to the top of the editor.
|
||||
"enabled": false
|
||||
},
|
||||
"relative_line_numbers": "disabled",
|
||||
// If 'search_wrap' is disabled, search result do not wrap around the end of the file.
|
||||
"search_wrap": true,
|
||||
@@ -612,9 +616,13 @@
|
||||
"search": {
|
||||
// Whether to show the project search button in the status bar.
|
||||
"button": true,
|
||||
// Whether to only match on whole words.
|
||||
"whole_word": false,
|
||||
// Whether to match case sensitively.
|
||||
"case_sensitive": false,
|
||||
// Whether to include gitignored files in search results.
|
||||
"include_ignored": false,
|
||||
// Whether to interpret the search query as a regular expression.
|
||||
"regex": false,
|
||||
// Whether to center the cursor on each search match when navigating.
|
||||
"center_on_match": false
|
||||
@@ -734,14 +742,31 @@
|
||||
// "never"
|
||||
"show": "always"
|
||||
},
|
||||
// Sort order for entries in the project panel.
|
||||
// This setting can take three values:
|
||||
//
|
||||
// 1. Show directories first, then files:
|
||||
// "directories_first"
|
||||
// 2. Mix directories and files together:
|
||||
// "mixed"
|
||||
// 3. Show files first, then directories:
|
||||
// "files_first"
|
||||
"sort_mode": "directories_first",
|
||||
// Whether to enable drag-and-drop operations in the project panel.
|
||||
"drag_and_drop": true,
|
||||
// Whether to hide the root entry when only one folder is open in the window.
|
||||
"hide_root": false,
|
||||
// Whether to hide the hidden entries in the project panel.
|
||||
"hide_hidden": false,
|
||||
// Whether to automatically open files when pasting them in the project panel.
|
||||
"open_file_on_paste": true
|
||||
// Settings for automatically opening files.
|
||||
"auto_open": {
|
||||
// Whether to automatically open newly created files in the editor.
|
||||
"on_create": true,
|
||||
// Whether to automatically open files after pasting or duplicating them.
|
||||
"on_paste": true,
|
||||
// Whether to automatically open files dropped from external sources.
|
||||
"on_drop": true
|
||||
}
|
||||
},
|
||||
"outline_panel": {
|
||||
// Whether to show the outline panel button in the status bar
|
||||
@@ -1535,6 +1560,8 @@
|
||||
// Default: 10_000, maximum: 100_000 (all bigger values set will be treated as 100_000), 0 disables the scrolling.
|
||||
// Existing terminals will not pick up this change until they are recreated.
|
||||
"max_scroll_history_lines": 10000,
|
||||
// The multiplier for scrolling speed in the terminal.
|
||||
"scroll_multiplier": 1.0,
|
||||
// The minimum APCA perceptual contrast between foreground and background colors.
|
||||
// APCA (Accessible Perceptual Contrast Algorithm) is more accurate than WCAG 2.x,
|
||||
// especially for dark mode. Values range from 0 to 106.
|
||||
@@ -2032,6 +2059,18 @@
|
||||
"dev": {
|
||||
// "theme": "Andromeda"
|
||||
},
|
||||
// Settings overrides to use when using linux
|
||||
"linux": {},
|
||||
// Settings overrides to use when using macos
|
||||
"macos": {},
|
||||
// Settings overrides to use when using windows
|
||||
"windows": {
|
||||
"languages": {
|
||||
"PHP": {
|
||||
"language_servers": ["intelephense", "!phpactor", "..."]
|
||||
}
|
||||
}
|
||||
},
|
||||
// Whether to show full labels in line indicator or short ones
|
||||
//
|
||||
// Values:
|
||||
|
||||
@@ -1866,10 +1866,14 @@ impl AcpThread {
|
||||
.checkpoint
|
||||
.as_ref()
|
||||
.map(|c| c.git_checkpoint.clone());
|
||||
|
||||
// Cancel any in-progress generation before restoring
|
||||
let cancel_task = self.cancel(cx);
|
||||
let rewind = self.rewind(id.clone(), cx);
|
||||
let git_store = self.project.read(cx).git_store().clone();
|
||||
|
||||
cx.spawn(async move |_, cx| {
|
||||
cancel_task.await;
|
||||
rewind.await?;
|
||||
if let Some(checkpoint) = checkpoint {
|
||||
git_store
|
||||
@@ -1894,9 +1898,25 @@ impl AcpThread {
|
||||
cx.update(|cx| truncate.run(id.clone(), cx))?.await?;
|
||||
this.update(cx, |this, cx| {
|
||||
if let Some((ix, _)) = this.user_message_mut(&id) {
|
||||
// Collect all terminals from entries that will be removed
|
||||
let terminals_to_remove: Vec<acp::TerminalId> = this.entries[ix..]
|
||||
.iter()
|
||||
.flat_map(|entry| entry.terminals())
|
||||
.filter_map(|terminal| terminal.read(cx).id().clone().into())
|
||||
.collect();
|
||||
|
||||
let range = ix..this.entries.len();
|
||||
this.entries.truncate(ix);
|
||||
cx.emit(AcpThreadEvent::EntriesRemoved(range));
|
||||
|
||||
// Kill and remove the terminals
|
||||
for terminal_id in terminals_to_remove {
|
||||
if let Some(terminal) = this.terminals.remove(&terminal_id) {
|
||||
terminal.update(cx, |terminal, cx| {
|
||||
terminal.kill(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
this.action_log().update(cx, |action_log, cx| {
|
||||
action_log.reject_all_edits(Some(telemetry), cx)
|
||||
@@ -3803,4 +3823,314 @@ mod tests {
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/// Tests that restoring a checkpoint properly cleans up terminals that were
|
||||
/// created after that checkpoint, and cancels any in-progress generation.
|
||||
///
|
||||
/// Reproduces issue #35142: When a checkpoint is restored, any terminal processes
|
||||
/// that were started after that checkpoint should be terminated, and any in-progress
|
||||
/// AI generation should be canceled.
|
||||
#[gpui::test]
|
||||
async fn test_restore_checkpoint_kills_terminal(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let project = Project::test(fs, [], cx).await;
|
||||
let connection = Rc::new(FakeAgentConnection::new());
|
||||
let thread = cx
|
||||
.update(|cx| connection.new_thread(project, Path::new(path!("/test")), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Send first user message to create a checkpoint
|
||||
cx.update(|cx| {
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.send(vec!["first message".into()], cx)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Send second message (creates another checkpoint) - we'll restore to this one
|
||||
cx.update(|cx| {
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.send(vec!["second message".into()], cx)
|
||||
})
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Create 2 terminals BEFORE the checkpoint that have completed running
|
||||
let terminal_id_1 = acp::TerminalId(uuid::Uuid::new_v4().to_string().into());
|
||||
let mock_terminal_1 = cx.new(|cx| {
|
||||
let builder = ::terminal::TerminalBuilder::new_display_only(
|
||||
::terminal::terminal_settings::CursorShape::default(),
|
||||
::terminal::terminal_settings::AlternateScroll::On,
|
||||
None,
|
||||
0,
|
||||
)
|
||||
.unwrap();
|
||||
builder.subscribe(cx)
|
||||
});
|
||||
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Created {
|
||||
terminal_id: terminal_id_1.clone(),
|
||||
label: "echo 'first'".to_string(),
|
||||
cwd: Some(PathBuf::from("/test")),
|
||||
output_byte_limit: None,
|
||||
terminal: mock_terminal_1.clone(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Output {
|
||||
terminal_id: terminal_id_1.clone(),
|
||||
data: b"first\n".to_vec(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Exit {
|
||||
terminal_id: terminal_id_1.clone(),
|
||||
status: acp::TerminalExitStatus {
|
||||
exit_code: Some(0),
|
||||
signal: None,
|
||||
meta: None,
|
||||
},
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
let terminal_id_2 = acp::TerminalId(uuid::Uuid::new_v4().to_string().into());
|
||||
let mock_terminal_2 = cx.new(|cx| {
|
||||
let builder = ::terminal::TerminalBuilder::new_display_only(
|
||||
::terminal::terminal_settings::CursorShape::default(),
|
||||
::terminal::terminal_settings::AlternateScroll::On,
|
||||
None,
|
||||
0,
|
||||
)
|
||||
.unwrap();
|
||||
builder.subscribe(cx)
|
||||
});
|
||||
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Created {
|
||||
terminal_id: terminal_id_2.clone(),
|
||||
label: "echo 'second'".to_string(),
|
||||
cwd: Some(PathBuf::from("/test")),
|
||||
output_byte_limit: None,
|
||||
terminal: mock_terminal_2.clone(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Output {
|
||||
terminal_id: terminal_id_2.clone(),
|
||||
data: b"second\n".to_vec(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Exit {
|
||||
terminal_id: terminal_id_2.clone(),
|
||||
status: acp::TerminalExitStatus {
|
||||
exit_code: Some(0),
|
||||
signal: None,
|
||||
meta: None,
|
||||
},
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
// Get the second message ID to restore to
|
||||
let second_message_id = thread.read_with(cx, |thread, _| {
|
||||
// At this point we have:
|
||||
// - Index 0: First user message (with checkpoint)
|
||||
// - Index 1: Second user message (with checkpoint)
|
||||
// No assistant responses because FakeAgentConnection just returns EndTurn
|
||||
let AgentThreadEntry::UserMessage(message) = &thread.entries[1] else {
|
||||
panic!("expected user message at index 1");
|
||||
};
|
||||
message.id.clone().unwrap()
|
||||
});
|
||||
|
||||
// Create a terminal AFTER the checkpoint we'll restore to.
|
||||
// This simulates the AI agent starting a long-running terminal command.
|
||||
let terminal_id = acp::TerminalId(uuid::Uuid::new_v4().to_string().into());
|
||||
let mock_terminal = cx.new(|cx| {
|
||||
let builder = ::terminal::TerminalBuilder::new_display_only(
|
||||
::terminal::terminal_settings::CursorShape::default(),
|
||||
::terminal::terminal_settings::AlternateScroll::On,
|
||||
None,
|
||||
0,
|
||||
)
|
||||
.unwrap();
|
||||
builder.subscribe(cx)
|
||||
});
|
||||
|
||||
// Register the terminal as created
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Created {
|
||||
terminal_id: terminal_id.clone(),
|
||||
label: "sleep 1000".to_string(),
|
||||
cwd: Some(PathBuf::from("/test")),
|
||||
output_byte_limit: None,
|
||||
terminal: mock_terminal.clone(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
// Simulate the terminal producing output (still running)
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Output {
|
||||
terminal_id: terminal_id.clone(),
|
||||
data: b"terminal is running...\n".to_vec(),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
// Create a tool call entry that references this terminal
|
||||
// This represents the agent requesting a terminal command
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread
|
||||
.handle_session_update(
|
||||
acp::SessionUpdate::ToolCall(acp::ToolCall {
|
||||
id: acp::ToolCallId("terminal-tool-1".into()),
|
||||
title: "Running command".into(),
|
||||
kind: acp::ToolKind::Execute,
|
||||
status: acp::ToolCallStatus::InProgress,
|
||||
content: vec![acp::ToolCallContent::Terminal {
|
||||
terminal_id: terminal_id.clone(),
|
||||
}],
|
||||
locations: vec![],
|
||||
raw_input: Some(
|
||||
serde_json::json!({"command": "sleep 1000", "cd": "/test"}),
|
||||
),
|
||||
raw_output: None,
|
||||
meta: None,
|
||||
}),
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
});
|
||||
|
||||
// Verify terminal exists and is in the thread
|
||||
let terminal_exists_before =
|
||||
thread.read_with(cx, |thread, _| thread.terminals.contains_key(&terminal_id));
|
||||
assert!(
|
||||
terminal_exists_before,
|
||||
"Terminal should exist before checkpoint restore"
|
||||
);
|
||||
|
||||
// Verify the terminal's underlying task is still running (not completed)
|
||||
let terminal_running_before = thread.read_with(cx, |thread, _cx| {
|
||||
let terminal_entity = thread.terminals.get(&terminal_id).unwrap();
|
||||
terminal_entity.read_with(cx, |term, _cx| {
|
||||
term.output().is_none() // output is None means it's still running
|
||||
})
|
||||
});
|
||||
assert!(
|
||||
terminal_running_before,
|
||||
"Terminal should be running before checkpoint restore"
|
||||
);
|
||||
|
||||
// Verify we have the expected entries before restore
|
||||
let entry_count_before = thread.read_with(cx, |thread, _| thread.entries.len());
|
||||
assert!(
|
||||
entry_count_before > 1,
|
||||
"Should have multiple entries before restore"
|
||||
);
|
||||
|
||||
// Restore the checkpoint to the second message.
|
||||
// This should:
|
||||
// 1. Cancel any in-progress generation (via the cancel() call)
|
||||
// 2. Remove the terminal that was created after that point
|
||||
thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.restore_checkpoint(second_message_id, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Verify that no send_task is in progress after restore
|
||||
// (cancel() clears the send_task)
|
||||
let has_send_task_after = thread.read_with(cx, |thread, _| thread.send_task.is_some());
|
||||
assert!(
|
||||
!has_send_task_after,
|
||||
"Should not have a send_task after restore (cancel should have cleared it)"
|
||||
);
|
||||
|
||||
// Verify the entries were truncated (restoring to index 1 truncates at 1, keeping only index 0)
|
||||
let entry_count = thread.read_with(cx, |thread, _| thread.entries.len());
|
||||
assert_eq!(
|
||||
entry_count, 1,
|
||||
"Should have 1 entry after restore (only the first user message)"
|
||||
);
|
||||
|
||||
// Verify the 2 completed terminals from before the checkpoint still exist
|
||||
let terminal_1_exists = thread.read_with(cx, |thread, _| {
|
||||
thread.terminals.contains_key(&terminal_id_1)
|
||||
});
|
||||
assert!(
|
||||
terminal_1_exists,
|
||||
"Terminal 1 (from before checkpoint) should still exist"
|
||||
);
|
||||
|
||||
let terminal_2_exists = thread.read_with(cx, |thread, _| {
|
||||
thread.terminals.contains_key(&terminal_id_2)
|
||||
});
|
||||
assert!(
|
||||
terminal_2_exists,
|
||||
"Terminal 2 (from before checkpoint) should still exist"
|
||||
);
|
||||
|
||||
// Verify they're still in completed state
|
||||
let terminal_1_completed = thread.read_with(cx, |thread, _cx| {
|
||||
let terminal_entity = thread.terminals.get(&terminal_id_1).unwrap();
|
||||
terminal_entity.read_with(cx, |term, _cx| term.output().is_some())
|
||||
});
|
||||
assert!(terminal_1_completed, "Terminal 1 should still be completed");
|
||||
|
||||
let terminal_2_completed = thread.read_with(cx, |thread, _cx| {
|
||||
let terminal_entity = thread.terminals.get(&terminal_id_2).unwrap();
|
||||
terminal_entity.read_with(cx, |term, _cx| term.output().is_some())
|
||||
});
|
||||
assert!(terminal_2_completed, "Terminal 2 should still be completed");
|
||||
|
||||
// Verify the running terminal (created after checkpoint) was removed
|
||||
let terminal_3_exists =
|
||||
thread.read_with(cx, |thread, _| thread.terminals.contains_key(&terminal_id));
|
||||
assert!(
|
||||
!terminal_3_exists,
|
||||
"Terminal 3 (created after checkpoint) should have been removed"
|
||||
);
|
||||
|
||||
// Verify total count is 2 (the two from before the checkpoint)
|
||||
let terminal_count = thread.read_with(cx, |thread, _| thread.terminals.len());
|
||||
assert_eq!(
|
||||
terminal_count, 2,
|
||||
"Should have exactly 2 terminals (the completed ones from before checkpoint)"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ anyhow.workspace = true
|
||||
auto_update.workspace = true
|
||||
editor.workspace = true
|
||||
extension_host.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
|
||||
@@ -51,6 +51,7 @@ pub struct ActivityIndicator {
|
||||
project: Entity<Project>,
|
||||
auto_updater: Option<Entity<AutoUpdater>>,
|
||||
context_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
fs_jobs: Vec<fs::JobInfo>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -99,6 +100,27 @@ impl ActivityIndicator {
|
||||
})
|
||||
.detach();
|
||||
|
||||
let fs = project.read(cx).fs().clone();
|
||||
let mut job_events = fs.subscribe_to_jobs();
|
||||
cx.spawn(async move |this, cx| {
|
||||
while let Some(job_event) = job_events.next().await {
|
||||
this.update(cx, |this: &mut ActivityIndicator, cx| {
|
||||
match job_event {
|
||||
fs::JobEvent::Started { info } => {
|
||||
this.fs_jobs.retain(|j| j.id != info.id);
|
||||
this.fs_jobs.push(info);
|
||||
}
|
||||
fs::JobEvent::Completed { id } => {
|
||||
this.fs_jobs.retain(|j| j.id != id);
|
||||
}
|
||||
}
|
||||
cx.notify();
|
||||
})?;
|
||||
}
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach();
|
||||
|
||||
cx.subscribe(
|
||||
&project.read(cx).lsp_store(),
|
||||
|activity_indicator, _, event, cx| {
|
||||
@@ -201,7 +223,8 @@ impl ActivityIndicator {
|
||||
statuses: Vec::new(),
|
||||
project: project.clone(),
|
||||
auto_updater,
|
||||
context_menu_handle: Default::default(),
|
||||
context_menu_handle: PopoverMenuHandle::default(),
|
||||
fs_jobs: Vec::new(),
|
||||
}
|
||||
});
|
||||
|
||||
@@ -432,6 +455,23 @@ impl ActivityIndicator {
|
||||
});
|
||||
}
|
||||
|
||||
// Show any long-running fs command
|
||||
for fs_job in &self.fs_jobs {
|
||||
if Instant::now().duration_since(fs_job.start) >= GIT_OPERATION_DELAY {
|
||||
return Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::ArrowCircle)
|
||||
.size(IconSize::Small)
|
||||
.with_rotate_animation(2)
|
||||
.into_any_element(),
|
||||
),
|
||||
message: fs_job.message.clone().into(),
|
||||
on_click: None,
|
||||
tooltip_message: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Show any language server installation info.
|
||||
let mut downloading = SmallVec::<[_; 3]>::new();
|
||||
let mut checking_for_update = SmallVec::<[_; 3]>::new();
|
||||
|
||||
@@ -133,9 +133,7 @@ impl LanguageModels {
|
||||
for model in provider.provided_models(cx) {
|
||||
let model_info = Self::map_language_model_to_info(&model, &provider);
|
||||
let model_id = model_info.id.clone();
|
||||
if !recommended_models.contains(&(model.provider_id(), model.id())) {
|
||||
provider_models.push(model_info);
|
||||
}
|
||||
provider_models.push(model_info);
|
||||
models.insert(model_id, model);
|
||||
}
|
||||
if !provider_models.is_empty() {
|
||||
|
||||
@@ -150,6 +150,7 @@ impl DbThread {
|
||||
.unwrap_or_default(),
|
||||
input: tool_use.input,
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
@@ -15,12 +15,14 @@ const SEPARATOR_MARKER: &str = "=======";
|
||||
const REPLACE_MARKER: &str = ">>>>>>> REPLACE";
|
||||
const SONNET_PARAMETER_INVOKE_1: &str = "</parameter>\n</invoke>";
|
||||
const SONNET_PARAMETER_INVOKE_2: &str = "</parameter></invoke>";
|
||||
const END_TAGS: [&str; 5] = [
|
||||
const SONNET_PARAMETER_INVOKE_3: &str = "</parameter>";
|
||||
const END_TAGS: [&str; 6] = [
|
||||
OLD_TEXT_END_TAG,
|
||||
NEW_TEXT_END_TAG,
|
||||
EDITS_END_TAG,
|
||||
SONNET_PARAMETER_INVOKE_1, // Remove this after switching to streaming tool call
|
||||
SONNET_PARAMETER_INVOKE_1, // Remove these after switching to streaming tool call
|
||||
SONNET_PARAMETER_INVOKE_2,
|
||||
SONNET_PARAMETER_INVOKE_3,
|
||||
];
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -567,21 +569,29 @@ mod tests {
|
||||
parse_random_chunks(
|
||||
indoc! {"
|
||||
<old_text>some text</old_text><new_text>updated text</parameter></invoke>
|
||||
<old_text>more text</old_text><new_text>upd</parameter></new_text>
|
||||
"},
|
||||
&mut parser,
|
||||
&mut rng
|
||||
),
|
||||
vec![Edit {
|
||||
old_text: "some text".to_string(),
|
||||
new_text: "updated text".to_string(),
|
||||
line_hint: None,
|
||||
},]
|
||||
vec![
|
||||
Edit {
|
||||
old_text: "some text".to_string(),
|
||||
new_text: "updated text".to_string(),
|
||||
line_hint: None,
|
||||
},
|
||||
Edit {
|
||||
old_text: "more text".to_string(),
|
||||
new_text: "upd".to_string(),
|
||||
line_hint: None,
|
||||
},
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
parser.finish(),
|
||||
EditParserMetrics {
|
||||
tags: 2,
|
||||
mismatched_tags: 1
|
||||
tags: 4,
|
||||
mismatched_tags: 2
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1108,6 +1108,7 @@ fn tool_use(
|
||||
raw_input: serde_json::to_string_pretty(&input).unwrap(),
|
||||
input: serde_json::to_value(input).unwrap(),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -44,6 +44,25 @@ pub async fn get_buffer_content_or_outline(
|
||||
.collect::<Vec<_>>()
|
||||
})?;
|
||||
|
||||
// If no outline exists, fall back to first 1KB so the agent has some context
|
||||
if outline_items.is_empty() {
|
||||
let text = buffer.read_with(cx, |buffer, _| {
|
||||
let snapshot = buffer.snapshot();
|
||||
let len = snapshot.len().min(1024);
|
||||
let content = snapshot.text_for_range(0..len).collect::<String>();
|
||||
if let Some(path) = path {
|
||||
format!("# First 1KB of {path} (file too large to show full content, and no outline available)\n\n{content}")
|
||||
} else {
|
||||
format!("# First 1KB of file (file too large to show full content, and no outline available)\n\n{content}")
|
||||
}
|
||||
})?;
|
||||
|
||||
return Ok(BufferContent {
|
||||
text,
|
||||
is_outline: false,
|
||||
});
|
||||
}
|
||||
|
||||
let outline_text = render_outline(outline_items, None, 0, usize::MAX).await?;
|
||||
|
||||
let text = if let Some(path) = path {
|
||||
@@ -140,3 +159,62 @@ fn render_entries(
|
||||
|
||||
entries_rendered
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use fs::FakeFs;
|
||||
use gpui::TestAppContext;
|
||||
use project::Project;
|
||||
use settings::SettingsStore;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_large_file_fallback_to_subset(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let settings = SettingsStore::test(cx);
|
||||
cx.set_global(settings);
|
||||
});
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let project = Project::test(fs, [], cx).await;
|
||||
|
||||
let content = "A".repeat(100 * 1024); // 100KB
|
||||
let content_len = content.len();
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.create_buffer(true, cx))
|
||||
.await
|
||||
.expect("failed to create buffer");
|
||||
|
||||
buffer.update(cx, |buffer, cx| buffer.set_text(content, cx));
|
||||
|
||||
let result = cx
|
||||
.spawn(|cx| async move { get_buffer_content_or_outline(buffer, None, &cx).await })
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Should contain some of the actual file content
|
||||
assert!(
|
||||
result.text.contains("AAAAAAAAAA"),
|
||||
"Result did not contain content subset"
|
||||
);
|
||||
|
||||
// Should be marked as not an outline (it's truncated content)
|
||||
assert!(
|
||||
!result.is_outline,
|
||||
"Large file without outline should not be marked as outline"
|
||||
);
|
||||
|
||||
// Should be reasonably sized (much smaller than original)
|
||||
assert!(
|
||||
result.text.len() < 50 * 1024,
|
||||
"Result size {} should be smaller than 50KB",
|
||||
result.text.len()
|
||||
);
|
||||
|
||||
// Should be significantly smaller than the original content
|
||||
assert!(
|
||||
result.text.len() < content_len / 10,
|
||||
"Result should be much smaller than original content"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -274,6 +274,7 @@ async fn test_prompt_caching(cx: &mut TestAppContext) {
|
||||
raw_input: json!({"text": "test"}).to_string(),
|
||||
input: json!({"text": "test"}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
};
|
||||
fake_model
|
||||
.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use.clone()));
|
||||
@@ -461,6 +462,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
|
||||
raw_input: "{}".into(),
|
||||
input: json!({}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
|
||||
@@ -470,6 +472,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
|
||||
raw_input: "{}".into(),
|
||||
input: json!({}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
fake_model.end_last_completion_stream();
|
||||
@@ -520,6 +523,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
|
||||
raw_input: "{}".into(),
|
||||
input: json!({}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
fake_model.end_last_completion_stream();
|
||||
@@ -554,6 +558,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
|
||||
raw_input: "{}".into(),
|
||||
input: json!({}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
fake_model.end_last_completion_stream();
|
||||
@@ -592,6 +597,7 @@ async fn test_tool_hallucination(cx: &mut TestAppContext) {
|
||||
raw_input: "{}".into(),
|
||||
input: json!({}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
fake_model.end_last_completion_stream();
|
||||
@@ -621,6 +627,7 @@ async fn test_resume_after_tool_use_limit(cx: &mut TestAppContext) {
|
||||
raw_input: "{}".into(),
|
||||
input: serde_json::to_value(&EchoToolInput { text: "def".into() }).unwrap(),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
};
|
||||
fake_model
|
||||
.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use.clone()));
|
||||
@@ -731,6 +738,7 @@ async fn test_send_after_tool_use_limit(cx: &mut TestAppContext) {
|
||||
raw_input: "{}".into(),
|
||||
input: serde_json::to_value(&EchoToolInput { text: "def".into() }).unwrap(),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
};
|
||||
let tool_result = LanguageModelToolResult {
|
||||
tool_use_id: "tool_id_1".into(),
|
||||
@@ -933,7 +941,7 @@ async fn test_profiles(cx: &mut TestAppContext) {
|
||||
// Test that test-1 profile (default) has echo and delay tools
|
||||
thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.set_profile(AgentProfileId("test-1".into()));
|
||||
thread.set_profile(AgentProfileId("test-1".into()), cx);
|
||||
thread.send(UserMessageId::new(), ["test"], cx)
|
||||
})
|
||||
.unwrap();
|
||||
@@ -953,7 +961,7 @@ async fn test_profiles(cx: &mut TestAppContext) {
|
||||
// Switch to test-2 profile, and verify that it has only the infinite tool.
|
||||
thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.set_profile(AgentProfileId("test-2".into()));
|
||||
thread.set_profile(AgentProfileId("test-2".into()), cx);
|
||||
thread.send(UserMessageId::new(), ["test2"], cx)
|
||||
})
|
||||
.unwrap();
|
||||
@@ -1002,8 +1010,8 @@ async fn test_mcp_tools(cx: &mut TestAppContext) {
|
||||
)
|
||||
.await;
|
||||
cx.run_until_parked();
|
||||
thread.update(cx, |thread, _| {
|
||||
thread.set_profile(AgentProfileId("test".into()))
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.set_profile(AgentProfileId("test".into()), cx)
|
||||
});
|
||||
|
||||
let mut mcp_tool_calls = setup_context_server(
|
||||
@@ -1037,6 +1045,7 @@ async fn test_mcp_tools(cx: &mut TestAppContext) {
|
||||
raw_input: json!({"text": "test"}).to_string(),
|
||||
input: json!({"text": "test"}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
fake_model.end_last_completion_stream();
|
||||
@@ -1080,6 +1089,7 @@ async fn test_mcp_tools(cx: &mut TestAppContext) {
|
||||
raw_input: json!({"text": "mcp"}).to_string(),
|
||||
input: json!({"text": "mcp"}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
|
||||
@@ -1089,6 +1099,7 @@ async fn test_mcp_tools(cx: &mut TestAppContext) {
|
||||
raw_input: json!({"text": "native"}).to_string(),
|
||||
input: json!({"text": "native"}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
fake_model.end_last_completion_stream();
|
||||
@@ -1169,8 +1180,8 @@ async fn test_mcp_tool_truncation(cx: &mut TestAppContext) {
|
||||
.await;
|
||||
cx.run_until_parked();
|
||||
|
||||
thread.update(cx, |thread, _| {
|
||||
thread.set_profile(AgentProfileId("test".into()));
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.set_profile(AgentProfileId("test".into()), cx);
|
||||
thread.add_tool(EchoTool);
|
||||
thread.add_tool(DelayTool);
|
||||
thread.add_tool(WordListTool);
|
||||
@@ -1788,6 +1799,7 @@ async fn test_building_request_with_pending_tools(cx: &mut TestAppContext) {
|
||||
raw_input: "{}".into(),
|
||||
input: json!({}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
};
|
||||
let echo_tool_use = LanguageModelToolUse {
|
||||
id: "tool_id_2".into(),
|
||||
@@ -1795,6 +1807,7 @@ async fn test_building_request_with_pending_tools(cx: &mut TestAppContext) {
|
||||
raw_input: json!({"text": "test"}).to_string(),
|
||||
input: json!({"text": "test"}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
};
|
||||
fake_model.send_last_completion_stream_text_chunk("Hi!");
|
||||
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
|
||||
@@ -2000,6 +2013,7 @@ async fn test_tool_updates_to_completion(cx: &mut TestAppContext) {
|
||||
raw_input: input.to_string(),
|
||||
input,
|
||||
is_input_complete: false,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
|
||||
@@ -2012,6 +2026,7 @@ async fn test_tool_updates_to_completion(cx: &mut TestAppContext) {
|
||||
raw_input: input.to_string(),
|
||||
input,
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
},
|
||||
));
|
||||
fake_model.end_last_completion_stream();
|
||||
@@ -2214,6 +2229,7 @@ async fn test_send_retry_finishes_tool_calls_on_error(cx: &mut TestAppContext) {
|
||||
raw_input: json!({"text": "test"}).to_string(),
|
||||
input: json!({"text": "test"}),
|
||||
is_input_complete: true,
|
||||
thought_signature: None,
|
||||
};
|
||||
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
|
||||
tool_use_1.clone(),
|
||||
|
||||
@@ -30,16 +30,17 @@ use gpui::{
|
||||
};
|
||||
use language_model::{
|
||||
LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelExt,
|
||||
LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest,
|
||||
LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
|
||||
LanguageModelToolResultContent, LanguageModelToolSchemaFormat, LanguageModelToolUse,
|
||||
LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage, ZED_CLOUD_PROVIDER_ID,
|
||||
LanguageModelId, LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry,
|
||||
LanguageModelRequest, LanguageModelRequestMessage, LanguageModelRequestTool,
|
||||
LanguageModelToolResult, LanguageModelToolResultContent, LanguageModelToolSchemaFormat,
|
||||
LanguageModelToolUse, LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage,
|
||||
ZED_CLOUD_PROVIDER_ID,
|
||||
};
|
||||
use project::Project;
|
||||
use prompt_store::ProjectContext;
|
||||
use schemars::{JsonSchema, Schema};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, update_settings_file};
|
||||
use settings::{LanguageModelSelection, Settings, update_settings_file};
|
||||
use smol::stream::StreamExt;
|
||||
use std::{
|
||||
collections::BTreeMap,
|
||||
@@ -606,6 +607,8 @@ pub struct Thread {
|
||||
pub(crate) prompt_capabilities_rx: watch::Receiver<acp::PromptCapabilities>,
|
||||
pub(crate) project: Entity<Project>,
|
||||
pub(crate) action_log: Entity<ActionLog>,
|
||||
/// Tracks the last time files were read by the agent, to detect external modifications
|
||||
pub(crate) file_read_times: HashMap<PathBuf, fs::MTime>,
|
||||
}
|
||||
|
||||
impl Thread {
|
||||
@@ -664,6 +667,7 @@ impl Thread {
|
||||
prompt_capabilities_rx,
|
||||
project,
|
||||
action_log,
|
||||
file_read_times: HashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -798,7 +802,8 @@ impl Thread {
|
||||
let profile_id = db_thread
|
||||
.profile
|
||||
.unwrap_or_else(|| AgentSettings::get_global(cx).default_profile.clone());
|
||||
let model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
|
||||
let mut model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
db_thread
|
||||
.model
|
||||
.and_then(|model| {
|
||||
@@ -811,6 +816,16 @@ impl Thread {
|
||||
.or_else(|| registry.default_model())
|
||||
.map(|model| model.model)
|
||||
});
|
||||
|
||||
if model.is_none() {
|
||||
model = Self::resolve_profile_model(&profile_id, cx);
|
||||
}
|
||||
if model.is_none() {
|
||||
model = LanguageModelRegistry::global(cx).update(cx, |registry, _cx| {
|
||||
registry.default_model().map(|model| model.model)
|
||||
});
|
||||
}
|
||||
|
||||
let (prompt_capabilities_tx, prompt_capabilities_rx) =
|
||||
watch::channel(Self::prompt_capabilities(model.as_deref()));
|
||||
|
||||
@@ -848,6 +863,7 @@ impl Thread {
|
||||
updated_at: db_thread.updated_at,
|
||||
prompt_capabilities_tx,
|
||||
prompt_capabilities_rx,
|
||||
file_read_times: HashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -987,6 +1003,7 @@ impl Thread {
|
||||
self.add_tool(NowTool);
|
||||
self.add_tool(OpenTool::new(self.project.clone()));
|
||||
self.add_tool(ReadFileTool::new(
|
||||
cx.weak_entity(),
|
||||
self.project.clone(),
|
||||
self.action_log.clone(),
|
||||
));
|
||||
@@ -1007,8 +1024,17 @@ impl Thread {
|
||||
&self.profile_id
|
||||
}
|
||||
|
||||
pub fn set_profile(&mut self, profile_id: AgentProfileId) {
|
||||
pub fn set_profile(&mut self, profile_id: AgentProfileId, cx: &mut Context<Self>) {
|
||||
if self.profile_id == profile_id {
|
||||
return;
|
||||
}
|
||||
|
||||
self.profile_id = profile_id;
|
||||
|
||||
// Swap to the profile's preferred model when available.
|
||||
if let Some(model) = Self::resolve_profile_model(&self.profile_id, cx) {
|
||||
self.set_model(model, cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cancel(&mut self, cx: &mut Context<Self>) {
|
||||
@@ -1065,6 +1091,35 @@ impl Thread {
|
||||
})
|
||||
}
|
||||
|
||||
/// Look up the active profile and resolve its preferred model if one is configured.
|
||||
fn resolve_profile_model(
|
||||
profile_id: &AgentProfileId,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Arc<dyn LanguageModel>> {
|
||||
let selection = AgentSettings::get_global(cx)
|
||||
.profiles
|
||||
.get(profile_id)?
|
||||
.default_model
|
||||
.clone()?;
|
||||
Self::resolve_model_from_selection(&selection, cx)
|
||||
}
|
||||
|
||||
/// Translate a stored model selection into the configured model from the registry.
|
||||
fn resolve_model_from_selection(
|
||||
selection: &LanguageModelSelection,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Arc<dyn LanguageModel>> {
|
||||
let selected = SelectedModel {
|
||||
provider: LanguageModelProviderId::from(selection.provider.0.clone()),
|
||||
model: LanguageModelId::from(selection.model.clone()),
|
||||
};
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry
|
||||
.select_model(&selected, cx)
|
||||
.map(|configured| configured.model)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn resume(
|
||||
&mut self,
|
||||
cx: &mut Context<Self>,
|
||||
|
||||
@@ -309,6 +309,40 @@ impl AgentTool for EditFileTool {
|
||||
})?
|
||||
.await?;
|
||||
|
||||
// Check if the file has been modified since the agent last read it
|
||||
if let Some(abs_path) = abs_path.as_ref() {
|
||||
let (last_read_mtime, current_mtime, is_dirty) = self.thread.update(cx, |thread, cx| {
|
||||
let last_read = thread.file_read_times.get(abs_path).copied();
|
||||
let current = buffer.read(cx).file().and_then(|file| file.disk_state().mtime());
|
||||
let dirty = buffer.read(cx).is_dirty();
|
||||
(last_read, current, dirty)
|
||||
})?;
|
||||
|
||||
// Check for unsaved changes first - these indicate modifications we don't know about
|
||||
if is_dirty {
|
||||
anyhow::bail!(
|
||||
"This file cannot be written to because it has unsaved changes. \
|
||||
Please end the current conversation immediately by telling the user you want to write to this file (mention its path explicitly) but you can't write to it because it has unsaved changes. \
|
||||
Ask the user to save that buffer's changes and to inform you when it's ok to proceed."
|
||||
);
|
||||
}
|
||||
|
||||
// Check if the file was modified on disk since we last read it
|
||||
if let (Some(last_read), Some(current)) = (last_read_mtime, current_mtime) {
|
||||
// MTime can be unreliable for comparisons, so our newtype intentionally
|
||||
// doesn't support comparing them. If the mtime at all different
|
||||
// (which could be because of a modification or because e.g. system clock changed),
|
||||
// we pessimistically assume it was modified.
|
||||
if current != last_read {
|
||||
anyhow::bail!(
|
||||
"The file {} has been modified since you last read it. \
|
||||
Please read the file again to get the current state before editing it.",
|
||||
input.path.display()
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let diff = cx.new(|cx| Diff::new(buffer.clone(), cx))?;
|
||||
event_stream.update_diff(diff.clone());
|
||||
let _finalize_diff = util::defer({
|
||||
@@ -421,6 +455,17 @@ impl AgentTool for EditFileTool {
|
||||
log.buffer_edited(buffer.clone(), cx);
|
||||
})?;
|
||||
|
||||
// Update the recorded read time after a successful edit so consecutive edits work
|
||||
if let Some(abs_path) = abs_path.as_ref() {
|
||||
if let Some(new_mtime) = buffer.read_with(cx, |buffer, _| {
|
||||
buffer.file().and_then(|file| file.disk_state().mtime())
|
||||
})? {
|
||||
self.thread.update(cx, |thread, _| {
|
||||
thread.file_read_times.insert(abs_path.to_path_buf(), new_mtime);
|
||||
})?;
|
||||
}
|
||||
}
|
||||
|
||||
let new_snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
|
||||
let (new_text, unified_diff) = cx
|
||||
.background_spawn({
|
||||
@@ -1748,10 +1793,426 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_file_read_times_tracking(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = project::FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
"test.txt": "original content"
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model.clone()),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
|
||||
|
||||
// Initially, file_read_times should be empty
|
||||
let is_empty = thread.read_with(cx, |thread, _| thread.file_read_times.is_empty());
|
||||
assert!(is_empty, "file_read_times should start empty");
|
||||
|
||||
// Create read tool
|
||||
let read_tool = Arc::new(crate::ReadFileTool::new(
|
||||
thread.downgrade(),
|
||||
project.clone(),
|
||||
action_log,
|
||||
));
|
||||
|
||||
// Read the file to record the read time
|
||||
cx.update(|cx| {
|
||||
read_tool.clone().run(
|
||||
crate::ReadFileToolInput {
|
||||
path: "root/test.txt".to_string(),
|
||||
start_line: None,
|
||||
end_line: None,
|
||||
},
|
||||
ToolCallEventStream::test().0,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Verify that file_read_times now contains an entry for the file
|
||||
let has_entry = thread.read_with(cx, |thread, _| {
|
||||
thread.file_read_times.len() == 1
|
||||
&& thread
|
||||
.file_read_times
|
||||
.keys()
|
||||
.any(|path| path.ends_with("test.txt"))
|
||||
});
|
||||
assert!(
|
||||
has_entry,
|
||||
"file_read_times should contain an entry after reading the file"
|
||||
);
|
||||
|
||||
// Read the file again - should update the entry
|
||||
cx.update(|cx| {
|
||||
read_tool.clone().run(
|
||||
crate::ReadFileToolInput {
|
||||
path: "root/test.txt".to_string(),
|
||||
start_line: None,
|
||||
end_line: None,
|
||||
},
|
||||
ToolCallEventStream::test().0,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Should still have exactly one entry
|
||||
let has_one_entry = thread.read_with(cx, |thread, _| thread.file_read_times.len() == 1);
|
||||
assert!(
|
||||
has_one_entry,
|
||||
"file_read_times should still have one entry after re-reading"
|
||||
);
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_consecutive_edits_work(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = project::FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
"test.txt": "original content"
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model.clone()),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let languages = project.read_with(cx, |project, _| project.languages().clone());
|
||||
let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
|
||||
|
||||
let read_tool = Arc::new(crate::ReadFileTool::new(
|
||||
thread.downgrade(),
|
||||
project.clone(),
|
||||
action_log,
|
||||
));
|
||||
let edit_tool = Arc::new(EditFileTool::new(
|
||||
project.clone(),
|
||||
thread.downgrade(),
|
||||
languages,
|
||||
Templates::new(),
|
||||
));
|
||||
|
||||
// Read the file first
|
||||
cx.update(|cx| {
|
||||
read_tool.clone().run(
|
||||
crate::ReadFileToolInput {
|
||||
path: "root/test.txt".to_string(),
|
||||
start_line: None,
|
||||
end_line: None,
|
||||
},
|
||||
ToolCallEventStream::test().0,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// First edit should work
|
||||
let edit_result = {
|
||||
let edit_task = cx.update(|cx| {
|
||||
edit_tool.clone().run(
|
||||
EditFileToolInput {
|
||||
display_description: "First edit".into(),
|
||||
path: "root/test.txt".into(),
|
||||
mode: EditFileMode::Edit,
|
||||
},
|
||||
ToolCallEventStream::test().0,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
model.send_last_completion_stream_text_chunk(
|
||||
"<old_text>original content</old_text><new_text>modified content</new_text>"
|
||||
.to_string(),
|
||||
);
|
||||
model.end_last_completion_stream();
|
||||
|
||||
edit_task.await
|
||||
};
|
||||
assert!(
|
||||
edit_result.is_ok(),
|
||||
"First edit should succeed, got error: {:?}",
|
||||
edit_result.as_ref().err()
|
||||
);
|
||||
|
||||
// Second edit should also work because the edit updated the recorded read time
|
||||
let edit_result = {
|
||||
let edit_task = cx.update(|cx| {
|
||||
edit_tool.clone().run(
|
||||
EditFileToolInput {
|
||||
display_description: "Second edit".into(),
|
||||
path: "root/test.txt".into(),
|
||||
mode: EditFileMode::Edit,
|
||||
},
|
||||
ToolCallEventStream::test().0,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
model.send_last_completion_stream_text_chunk(
|
||||
"<old_text>modified content</old_text><new_text>further modified content</new_text>".to_string(),
|
||||
);
|
||||
model.end_last_completion_stream();
|
||||
|
||||
edit_task.await
|
||||
};
|
||||
assert!(
|
||||
edit_result.is_ok(),
|
||||
"Second consecutive edit should succeed, got error: {:?}",
|
||||
edit_result.as_ref().err()
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_external_modification_detected(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = project::FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
"test.txt": "original content"
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model.clone()),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let languages = project.read_with(cx, |project, _| project.languages().clone());
|
||||
let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
|
||||
|
||||
let read_tool = Arc::new(crate::ReadFileTool::new(
|
||||
thread.downgrade(),
|
||||
project.clone(),
|
||||
action_log,
|
||||
));
|
||||
let edit_tool = Arc::new(EditFileTool::new(
|
||||
project.clone(),
|
||||
thread.downgrade(),
|
||||
languages,
|
||||
Templates::new(),
|
||||
));
|
||||
|
||||
// Read the file first
|
||||
cx.update(|cx| {
|
||||
read_tool.clone().run(
|
||||
crate::ReadFileToolInput {
|
||||
path: "root/test.txt".to_string(),
|
||||
start_line: None,
|
||||
end_line: None,
|
||||
},
|
||||
ToolCallEventStream::test().0,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Simulate external modification - advance time and save file
|
||||
cx.background_executor
|
||||
.advance_clock(std::time::Duration::from_secs(2));
|
||||
fs.save(
|
||||
path!("/root/test.txt").as_ref(),
|
||||
&"externally modified content".into(),
|
||||
language::LineEnding::Unix,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Reload the buffer to pick up the new mtime
|
||||
let project_path = project
|
||||
.read_with(cx, |project, cx| {
|
||||
project.find_project_path("root/test.txt", cx)
|
||||
})
|
||||
.expect("Should find project path");
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_buffer(project_path, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
buffer
|
||||
.update(cx, |buffer, cx| buffer.reload(cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
// Try to edit - should fail because file was modified externally
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
edit_tool.clone().run(
|
||||
EditFileToolInput {
|
||||
display_description: "Edit after external change".into(),
|
||||
path: "root/test.txt".into(),
|
||||
mode: EditFileMode::Edit,
|
||||
},
|
||||
ToolCallEventStream::test().0,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await;
|
||||
|
||||
assert!(
|
||||
result.is_err(),
|
||||
"Edit should fail after external modification"
|
||||
);
|
||||
let error_msg = result.unwrap_err().to_string();
|
||||
assert!(
|
||||
error_msg.contains("has been modified since you last read it"),
|
||||
"Error should mention file modification, got: {}",
|
||||
error_msg
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_dirty_buffer_detected(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = project::FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
"test.txt": "original content"
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model.clone()),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let languages = project.read_with(cx, |project, _| project.languages().clone());
|
||||
let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
|
||||
|
||||
let read_tool = Arc::new(crate::ReadFileTool::new(
|
||||
thread.downgrade(),
|
||||
project.clone(),
|
||||
action_log,
|
||||
));
|
||||
let edit_tool = Arc::new(EditFileTool::new(
|
||||
project.clone(),
|
||||
thread.downgrade(),
|
||||
languages,
|
||||
Templates::new(),
|
||||
));
|
||||
|
||||
// Read the file first
|
||||
cx.update(|cx| {
|
||||
read_tool.clone().run(
|
||||
crate::ReadFileToolInput {
|
||||
path: "root/test.txt".to_string(),
|
||||
start_line: None,
|
||||
end_line: None,
|
||||
},
|
||||
ToolCallEventStream::test().0,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Open the buffer and make it dirty by editing without saving
|
||||
let project_path = project
|
||||
.read_with(cx, |project, cx| {
|
||||
project.find_project_path("root/test.txt", cx)
|
||||
})
|
||||
.expect("Should find project path");
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_buffer(project_path, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Make an in-memory edit to the buffer (making it dirty)
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
let end_point = buffer.max_point();
|
||||
buffer.edit([(end_point..end_point, " added text")], None, cx);
|
||||
});
|
||||
|
||||
// Verify buffer is dirty
|
||||
let is_dirty = buffer.read_with(cx, |buffer, _| buffer.is_dirty());
|
||||
assert!(is_dirty, "Buffer should be dirty after in-memory edit");
|
||||
|
||||
// Try to edit - should fail because buffer has unsaved changes
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
edit_tool.clone().run(
|
||||
EditFileToolInput {
|
||||
display_description: "Edit with dirty buffer".into(),
|
||||
path: "root/test.txt".into(),
|
||||
mode: EditFileMode::Edit,
|
||||
},
|
||||
ToolCallEventStream::test().0,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await;
|
||||
|
||||
assert!(result.is_err(), "Edit should fail when buffer is dirty");
|
||||
let error_msg = result.unwrap_err().to_string();
|
||||
assert!(
|
||||
error_msg.contains("cannot be written to because it has unsaved changes"),
|
||||
"Error should mention unsaved changes, got: {}",
|
||||
error_msg
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use action_log::ActionLog;
|
||||
use agent_client_protocol::{self as acp, ToolCallUpdateFields};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use gpui::{App, Entity, SharedString, Task};
|
||||
use gpui::{App, Entity, SharedString, Task, WeakEntity};
|
||||
use indoc::formatdoc;
|
||||
use language::Point;
|
||||
use language_model::{LanguageModelImage, LanguageModelToolResultContent};
|
||||
@@ -12,7 +12,7 @@ use settings::Settings;
|
||||
use std::sync::Arc;
|
||||
use util::markdown::MarkdownCodeBlock;
|
||||
|
||||
use crate::{AgentTool, ToolCallEventStream, outline};
|
||||
use crate::{AgentTool, Thread, ToolCallEventStream, outline};
|
||||
|
||||
/// Reads the content of the given file in the project.
|
||||
///
|
||||
@@ -42,13 +42,19 @@ pub struct ReadFileToolInput {
|
||||
}
|
||||
|
||||
pub struct ReadFileTool {
|
||||
thread: WeakEntity<Thread>,
|
||||
project: Entity<Project>,
|
||||
action_log: Entity<ActionLog>,
|
||||
}
|
||||
|
||||
impl ReadFileTool {
|
||||
pub fn new(project: Entity<Project>, action_log: Entity<ActionLog>) -> Self {
|
||||
pub fn new(
|
||||
thread: WeakEntity<Thread>,
|
||||
project: Entity<Project>,
|
||||
action_log: Entity<ActionLog>,
|
||||
) -> Self {
|
||||
Self {
|
||||
thread,
|
||||
project,
|
||||
action_log,
|
||||
}
|
||||
@@ -195,6 +201,17 @@ impl AgentTool for ReadFileTool {
|
||||
anyhow::bail!("{file_path} not found");
|
||||
}
|
||||
|
||||
// Record the file read time and mtime
|
||||
if let Some(mtime) = buffer.read_with(cx, |buffer, _| {
|
||||
buffer.file().and_then(|file| file.disk_state().mtime())
|
||||
})? {
|
||||
self.thread
|
||||
.update(cx, |thread, _| {
|
||||
thread.file_read_times.insert(abs_path.to_path_buf(), mtime);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
|
||||
let mut anchor = None;
|
||||
|
||||
// Check if specific line ranges are provided
|
||||
@@ -285,11 +302,15 @@ impl AgentTool for ReadFileTool {
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use crate::{ContextServerRegistry, Templates, Thread};
|
||||
use gpui::{AppContext, TestAppContext, UpdateGlobal as _};
|
||||
use language::{Language, LanguageConfig, LanguageMatcher, tree_sitter_rust};
|
||||
use language_model::fake_provider::FakeLanguageModel;
|
||||
use project::{FakeFs, Project};
|
||||
use prompt_store::ProjectContext;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::sync::Arc;
|
||||
use util::path;
|
||||
|
||||
#[gpui::test]
|
||||
@@ -300,7 +321,20 @@ mod test {
|
||||
fs.insert_tree(path!("/root"), json!({})).await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let tool = Arc::new(ReadFileTool::new(project, action_log));
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
|
||||
let (event_stream, _) = ToolCallEventStream::test();
|
||||
|
||||
let result = cx
|
||||
@@ -333,7 +367,20 @@ mod test {
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let tool = Arc::new(ReadFileTool::new(project, action_log));
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = ReadFileToolInput {
|
||||
@@ -363,7 +410,20 @@ mod test {
|
||||
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
|
||||
language_registry.add(Arc::new(rust_lang()));
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let tool = Arc::new(ReadFileTool::new(project, action_log));
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = ReadFileToolInput {
|
||||
@@ -435,7 +495,20 @@ mod test {
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let tool = Arc::new(ReadFileTool::new(project, action_log));
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = ReadFileToolInput {
|
||||
@@ -463,7 +536,20 @@ mod test {
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let tool = Arc::new(ReadFileTool::new(project, action_log));
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
|
||||
|
||||
// start_line of 0 should be treated as 1
|
||||
let result = cx
|
||||
@@ -607,7 +693,20 @@ mod test {
|
||||
|
||||
let project = Project::test(fs.clone(), [path!("/project_root").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let tool = Arc::new(ReadFileTool::new(project, action_log));
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let tool = Arc::new(ReadFileTool::new(thread.downgrade(), project, action_log));
|
||||
|
||||
// Reading a file outside the project worktree should fail
|
||||
let result = cx
|
||||
@@ -821,7 +920,24 @@ mod test {
|
||||
.await;
|
||||
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let tool = Arc::new(ReadFileTool::new(project.clone(), action_log.clone()));
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
Some(model),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let tool = Arc::new(ReadFileTool::new(
|
||||
thread.downgrade(),
|
||||
project.clone(),
|
||||
action_log.clone(),
|
||||
));
|
||||
|
||||
// Test reading allowed files in worktree1
|
||||
let result = cx
|
||||
|
||||
@@ -136,7 +136,7 @@ impl AcpConnection {
|
||||
while let Ok(n) = stderr.read_line(&mut line).await
|
||||
&& n > 0
|
||||
{
|
||||
log::warn!("agent stderr: {}", &line);
|
||||
log::warn!("agent stderr: {}", line.trim());
|
||||
line.clear();
|
||||
}
|
||||
Ok(())
|
||||
@@ -247,37 +247,58 @@ impl AgentConnection for AcpConnection {
|
||||
let default_mode = self.default_mode.clone();
|
||||
let cwd = cwd.to_path_buf();
|
||||
let context_server_store = project.read(cx).context_server_store().read(cx);
|
||||
let mcp_servers = if project.read(cx).is_local() {
|
||||
context_server_store
|
||||
.configured_server_ids()
|
||||
.iter()
|
||||
.filter_map(|id| {
|
||||
let configuration = context_server_store.configuration_for_server(id)?;
|
||||
let command = configuration.command();
|
||||
Some(acp::McpServer::Stdio {
|
||||
name: id.0.to_string(),
|
||||
command: command.path.clone(),
|
||||
args: command.args.clone(),
|
||||
env: if let Some(env) = command.env.as_ref() {
|
||||
env.iter()
|
||||
.map(|(name, value)| acp::EnvVariable {
|
||||
name: name.clone(),
|
||||
value: value.clone(),
|
||||
meta: None,
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
vec![]
|
||||
},
|
||||
let mcp_servers =
|
||||
if project.read(cx).is_local() {
|
||||
context_server_store
|
||||
.configured_server_ids()
|
||||
.iter()
|
||||
.filter_map(|id| {
|
||||
let configuration = context_server_store.configuration_for_server(id)?;
|
||||
match &*configuration {
|
||||
project::context_server_store::ContextServerConfiguration::Custom {
|
||||
command,
|
||||
..
|
||||
}
|
||||
| project::context_server_store::ContextServerConfiguration::Extension {
|
||||
command,
|
||||
..
|
||||
} => Some(acp::McpServer::Stdio {
|
||||
name: id.0.to_string(),
|
||||
command: command.path.clone(),
|
||||
args: command.args.clone(),
|
||||
env: if let Some(env) = command.env.as_ref() {
|
||||
env.iter()
|
||||
.map(|(name, value)| acp::EnvVariable {
|
||||
name: name.clone(),
|
||||
value: value.clone(),
|
||||
meta: None,
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
vec![]
|
||||
},
|
||||
}),
|
||||
project::context_server_store::ContextServerConfiguration::Http {
|
||||
url,
|
||||
headers,
|
||||
} => Some(acp::McpServer::Http {
|
||||
name: id.0.to_string(),
|
||||
url: url.to_string(),
|
||||
headers: headers.iter().map(|(name, value)| acp::HttpHeader {
|
||||
name: name.clone(),
|
||||
value: value.clone(),
|
||||
meta: None,
|
||||
}).collect(),
|
||||
}),
|
||||
}
|
||||
})
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
// In SSH projects, the external agent is running on the remote
|
||||
// machine, and currently we only run MCP servers on the local
|
||||
// machine. So don't pass any MCP servers to the agent in that case.
|
||||
Vec::new()
|
||||
};
|
||||
.collect()
|
||||
} else {
|
||||
// In SSH projects, the external agent is running on the remote
|
||||
// machine, and currently we only run MCP servers on the local
|
||||
// machine. So don't pass any MCP servers to the agent in that case.
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let response = conn
|
||||
|
||||
@@ -50,13 +50,14 @@ impl crate::AgentServer for CustomAgentServer {
|
||||
fn set_default_mode(&self, mode_id: Option<acp::SessionModeId>, fs: Arc<dyn Fs>, cx: &mut App) {
|
||||
let name = self.name();
|
||||
update_settings_file(fs, cx, move |settings, _| {
|
||||
settings
|
||||
if let Some(settings) = settings
|
||||
.agent_servers
|
||||
.get_or_insert_default()
|
||||
.custom
|
||||
.get_mut(&name)
|
||||
.unwrap()
|
||||
.default_mode = mode_id.map(|m| m.to_string())
|
||||
{
|
||||
settings.default_mode = mode_id.map(|m| m.to_string())
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -6,8 +6,8 @@ use convert_case::{Case, Casing as _};
|
||||
use fs::Fs;
|
||||
use gpui::{App, SharedString};
|
||||
use settings::{
|
||||
AgentProfileContent, ContextServerPresetContent, Settings as _, SettingsContent,
|
||||
update_settings_file,
|
||||
AgentProfileContent, ContextServerPresetContent, LanguageModelSelection, Settings as _,
|
||||
SettingsContent, update_settings_file,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
|
||||
@@ -53,19 +53,30 @@ impl AgentProfile {
|
||||
let base_profile =
|
||||
base_profile_id.and_then(|id| AgentSettings::get_global(cx).profiles.get(&id).cloned());
|
||||
|
||||
// Copy toggles from the base profile so the new profile starts with familiar defaults.
|
||||
let tools = base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.tools.clone())
|
||||
.unwrap_or_default();
|
||||
let enable_all_context_servers = base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.enable_all_context_servers)
|
||||
.unwrap_or_default();
|
||||
let context_servers = base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.context_servers.clone())
|
||||
.unwrap_or_default();
|
||||
// Preserve the base profile's model preference when cloning into a new profile.
|
||||
let default_model = base_profile
|
||||
.as_ref()
|
||||
.and_then(|profile| profile.default_model.clone());
|
||||
|
||||
let profile_settings = AgentProfileSettings {
|
||||
name: name.into(),
|
||||
tools: base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.tools.clone())
|
||||
.unwrap_or_default(),
|
||||
enable_all_context_servers: base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.enable_all_context_servers)
|
||||
.unwrap_or_default(),
|
||||
context_servers: base_profile
|
||||
.map(|profile| profile.context_servers)
|
||||
.unwrap_or_default(),
|
||||
tools,
|
||||
enable_all_context_servers,
|
||||
context_servers,
|
||||
default_model,
|
||||
};
|
||||
|
||||
update_settings_file(fs, cx, {
|
||||
@@ -96,6 +107,8 @@ pub struct AgentProfileSettings {
|
||||
pub tools: IndexMap<Arc<str>, bool>,
|
||||
pub enable_all_context_servers: bool,
|
||||
pub context_servers: IndexMap<Arc<str>, ContextServerPreset>,
|
||||
/// Default language model to apply when this profile becomes active.
|
||||
pub default_model: Option<LanguageModelSelection>,
|
||||
}
|
||||
|
||||
impl AgentProfileSettings {
|
||||
@@ -144,6 +157,7 @@ impl AgentProfileSettings {
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
default_model: self.default_model.clone(),
|
||||
},
|
||||
);
|
||||
|
||||
@@ -153,15 +167,23 @@ impl AgentProfileSettings {
|
||||
|
||||
impl From<AgentProfileContent> for AgentProfileSettings {
|
||||
fn from(content: AgentProfileContent) -> Self {
|
||||
let AgentProfileContent {
|
||||
name,
|
||||
tools,
|
||||
enable_all_context_servers,
|
||||
context_servers,
|
||||
default_model,
|
||||
} = content;
|
||||
|
||||
Self {
|
||||
name: content.name.into(),
|
||||
tools: content.tools,
|
||||
enable_all_context_servers: content.enable_all_context_servers.unwrap_or_default(),
|
||||
context_servers: content
|
||||
.context_servers
|
||||
name: name.into(),
|
||||
tools,
|
||||
enable_all_context_servers: enable_all_context_servers.unwrap_or_default(),
|
||||
context_servers: context_servers
|
||||
.into_iter()
|
||||
.map(|(server_id, preset)| (server_id, preset.into()))
|
||||
.collect(),
|
||||
default_model,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ doctest = false
|
||||
|
||||
[features]
|
||||
test-support = ["gpui/test-support", "language/test-support"]
|
||||
unit-eval = []
|
||||
|
||||
[dependencies]
|
||||
acp_thread.workspace = true
|
||||
@@ -47,6 +48,7 @@ fs.workspace = true
|
||||
futures.workspace = true
|
||||
fuzzy.workspace = true
|
||||
gpui.workspace = true
|
||||
gpui_tokio.workspace = true
|
||||
html_to_markdown.workspace = true
|
||||
http_client.workspace = true
|
||||
indoc.workspace = true
|
||||
@@ -98,12 +100,15 @@ util.workspace = true
|
||||
watch.workspace = true
|
||||
workspace.workspace = true
|
||||
zed_actions.workspace = true
|
||||
image.workspace = true
|
||||
async-fs.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
acp_thread = { workspace = true, features = ["test-support"] }
|
||||
agent = { workspace = true, features = ["test-support"] }
|
||||
assistant_text_thread = { workspace = true, features = ["test-support"] }
|
||||
buffer_diff = { workspace = true, features = ["test-support"] }
|
||||
clock.workspace = true
|
||||
db = { workspace = true, features = ["test-support"] }
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
gpui = { workspace = true, "features" = ["test-support"] }
|
||||
|
||||
@@ -109,6 +109,8 @@ impl ContextPickerCompletionProvider {
|
||||
icon_path: Some(mode.icon().path().into()),
|
||||
documentation: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
match_start: None,
|
||||
snippet_deduplication_key: None,
|
||||
insert_text_mode: None,
|
||||
// This ensures that when a user accepts this completion, the
|
||||
// completion menu will still be shown after "@category " is
|
||||
@@ -146,6 +148,8 @@ impl ContextPickerCompletionProvider {
|
||||
documentation: None,
|
||||
insert_text_mode: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
match_start: None,
|
||||
snippet_deduplication_key: None,
|
||||
icon_path: Some(icon_for_completion),
|
||||
confirm: Some(confirm_completion_callback(
|
||||
thread_entry.title().clone(),
|
||||
@@ -177,6 +181,8 @@ impl ContextPickerCompletionProvider {
|
||||
documentation: None,
|
||||
insert_text_mode: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
match_start: None,
|
||||
snippet_deduplication_key: None,
|
||||
icon_path: Some(icon_path),
|
||||
confirm: Some(confirm_completion_callback(
|
||||
rule.title,
|
||||
@@ -233,6 +239,8 @@ impl ContextPickerCompletionProvider {
|
||||
documentation: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
icon_path: Some(completion_icon_path),
|
||||
match_start: None,
|
||||
snippet_deduplication_key: None,
|
||||
insert_text_mode: None,
|
||||
confirm: Some(confirm_completion_callback(
|
||||
file_name,
|
||||
@@ -284,6 +292,8 @@ impl ContextPickerCompletionProvider {
|
||||
documentation: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
icon_path: Some(icon_path),
|
||||
match_start: None,
|
||||
snippet_deduplication_key: None,
|
||||
insert_text_mode: None,
|
||||
confirm: Some(confirm_completion_callback(
|
||||
symbol.name.into(),
|
||||
@@ -316,6 +326,8 @@ impl ContextPickerCompletionProvider {
|
||||
documentation: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
icon_path: Some(icon_path),
|
||||
match_start: None,
|
||||
snippet_deduplication_key: None,
|
||||
insert_text_mode: None,
|
||||
confirm: Some(confirm_completion_callback(
|
||||
url_to_fetch.to_string().into(),
|
||||
@@ -384,6 +396,8 @@ impl ContextPickerCompletionProvider {
|
||||
icon_path: Some(action.icon().path().into()),
|
||||
documentation: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
match_start: None,
|
||||
snippet_deduplication_key: None,
|
||||
insert_text_mode: None,
|
||||
// This ensures that when a user accepts this completion, the
|
||||
// completion menu will still be shown after "@category " is
|
||||
@@ -694,14 +708,18 @@ fn build_symbol_label(symbol_name: &str, file_name: &str, line: u32, cx: &App) -
|
||||
}
|
||||
|
||||
fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx: &App) -> CodeLabel {
|
||||
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
|
||||
let path = cx
|
||||
.theme()
|
||||
.syntax()
|
||||
.highlight_id("variable")
|
||||
.map(HighlightId);
|
||||
let mut label = CodeLabelBuilder::default();
|
||||
|
||||
label.push_str(file_name, None);
|
||||
label.push_str(" ", None);
|
||||
|
||||
if let Some(directory) = directory {
|
||||
label.push_str(directory, comment_id);
|
||||
label.push_str(directory, path);
|
||||
}
|
||||
|
||||
label.build()
|
||||
@@ -770,6 +788,8 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
)),
|
||||
source: project::CompletionSource::Custom,
|
||||
icon_path: None,
|
||||
match_start: None,
|
||||
snippet_deduplication_key: None,
|
||||
insert_text_mode: None,
|
||||
confirm: Some(Arc::new({
|
||||
let editor = editor.clone();
|
||||
|
||||
@@ -15,6 +15,7 @@ use editor::{
|
||||
EditorEvent, EditorMode, EditorSnapshot, EditorStyle, ExcerptId, FoldPlaceholder, Inlay,
|
||||
MultiBuffer, ToOffset,
|
||||
actions::Paste,
|
||||
code_context_menus::CodeContextMenu,
|
||||
display_map::{Crease, CreaseId, FoldId},
|
||||
scroll::Autoscroll,
|
||||
};
|
||||
@@ -27,6 +28,7 @@ use gpui::{
|
||||
EventEmitter, FocusHandle, Focusable, Image, ImageFormat, Img, KeyContext, SharedString,
|
||||
Subscription, Task, TextStyle, WeakEntity, pulsating_between,
|
||||
};
|
||||
use itertools::Either;
|
||||
use language::{Buffer, Language, language_settings::InlayHintKind};
|
||||
use language_model::LanguageModelImage;
|
||||
use postage::stream::Stream as _;
|
||||
@@ -272,6 +274,15 @@ impl MessageEditor {
|
||||
self.editor.read(cx).is_empty(cx)
|
||||
}
|
||||
|
||||
pub fn is_completions_menu_visible(&self, cx: &App) -> bool {
|
||||
self.editor
|
||||
.read(cx)
|
||||
.context_menu()
|
||||
.borrow()
|
||||
.as_ref()
|
||||
.is_some_and(|menu| matches!(menu, CodeContextMenu::Completions(_)) && menu.visible())
|
||||
}
|
||||
|
||||
pub fn mentions(&self) -> HashSet<MentionUri> {
|
||||
self.mention_set
|
||||
.mentions
|
||||
@@ -836,6 +847,45 @@ impl MessageEditor {
|
||||
cx.emit(MessageEditorEvent::Send)
|
||||
}
|
||||
|
||||
pub fn trigger_completion_menu(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
let editor = self.editor.clone();
|
||||
|
||||
cx.spawn_in(window, async move |_, cx| {
|
||||
editor
|
||||
.update_in(cx, |editor, window, cx| {
|
||||
let menu_is_open =
|
||||
editor.context_menu().borrow().as_ref().is_some_and(|menu| {
|
||||
matches!(menu, CodeContextMenu::Completions(_)) && menu.visible()
|
||||
});
|
||||
|
||||
let has_at_sign = {
|
||||
let snapshot = editor.display_snapshot(cx);
|
||||
let cursor = editor.selections.newest::<text::Point>(&snapshot).head();
|
||||
let offset = cursor.to_offset(&snapshot);
|
||||
if offset > 0 {
|
||||
snapshot
|
||||
.buffer_snapshot()
|
||||
.reversed_chars_at(offset)
|
||||
.next()
|
||||
.map(|sign| sign == '@')
|
||||
.unwrap_or(false)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
};
|
||||
|
||||
if menu_is_open && has_at_sign {
|
||||
return;
|
||||
}
|
||||
|
||||
editor.insert("@", window, cx);
|
||||
editor.show_completions(&editor::actions::ShowCompletions, window, cx);
|
||||
})
|
||||
.log_err();
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn chat(&mut self, _: &Chat, _: &mut Window, cx: &mut Context<Self>) {
|
||||
self.send(cx);
|
||||
}
|
||||
@@ -863,74 +913,114 @@ impl MessageEditor {
|
||||
if !self.prompt_capabilities.borrow().image {
|
||||
return;
|
||||
}
|
||||
|
||||
let images = cx
|
||||
.read_from_clipboard()
|
||||
.map(|item| {
|
||||
item.into_entries()
|
||||
.filter_map(|entry| {
|
||||
if let ClipboardEntry::Image(image) = entry {
|
||||
Some(image)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
if images.is_empty() {
|
||||
let Some(clipboard) = cx.read_from_clipboard() else {
|
||||
return;
|
||||
}
|
||||
cx.stop_propagation();
|
||||
};
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
use itertools::Itertools;
|
||||
let (mut images, paths) = clipboard
|
||||
.into_entries()
|
||||
.filter_map(|entry| match entry {
|
||||
ClipboardEntry::Image(image) => Some(Either::Left(image)),
|
||||
ClipboardEntry::ExternalPaths(paths) => Some(Either::Right(paths)),
|
||||
_ => None,
|
||||
})
|
||||
.partition_map::<Vec<_>, Vec<_>, _, _, _>(std::convert::identity);
|
||||
|
||||
let replacement_text = MentionUri::PastedImage.as_link().to_string();
|
||||
for image in images {
|
||||
let (excerpt_id, text_anchor, multibuffer_anchor) =
|
||||
self.editor.update(cx, |message_editor, cx| {
|
||||
let snapshot = message_editor.snapshot(window, cx);
|
||||
let (excerpt_id, _, buffer_snapshot) =
|
||||
snapshot.buffer_snapshot().as_singleton().unwrap();
|
||||
if !paths.is_empty() {
|
||||
images.extend(
|
||||
cx.background_spawn(async move {
|
||||
let mut images = vec![];
|
||||
for path in paths.into_iter().flat_map(|paths| paths.paths().to_owned()) {
|
||||
let Ok(content) = async_fs::read(path).await else {
|
||||
continue;
|
||||
};
|
||||
let Ok(format) = image::guess_format(&content) else {
|
||||
continue;
|
||||
};
|
||||
images.push(gpui::Image::from_bytes(
|
||||
match format {
|
||||
image::ImageFormat::Png => gpui::ImageFormat::Png,
|
||||
image::ImageFormat::Jpeg => gpui::ImageFormat::Jpeg,
|
||||
image::ImageFormat::WebP => gpui::ImageFormat::Webp,
|
||||
image::ImageFormat::Gif => gpui::ImageFormat::Gif,
|
||||
image::ImageFormat::Bmp => gpui::ImageFormat::Bmp,
|
||||
image::ImageFormat::Tiff => gpui::ImageFormat::Tiff,
|
||||
image::ImageFormat::Ico => gpui::ImageFormat::Ico,
|
||||
_ => continue,
|
||||
},
|
||||
content,
|
||||
));
|
||||
}
|
||||
images
|
||||
})
|
||||
.await,
|
||||
);
|
||||
}
|
||||
|
||||
let text_anchor = buffer_snapshot.anchor_before(buffer_snapshot.len());
|
||||
let multibuffer_anchor = snapshot
|
||||
.buffer_snapshot()
|
||||
.anchor_in_excerpt(*excerpt_id, text_anchor);
|
||||
message_editor.edit(
|
||||
[(
|
||||
multi_buffer::Anchor::max()..multi_buffer::Anchor::max(),
|
||||
format!("{replacement_text} "),
|
||||
)],
|
||||
if images.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let replacement_text = MentionUri::PastedImage.as_link().to_string();
|
||||
let Ok(editor) = this.update(cx, |this, cx| {
|
||||
cx.stop_propagation();
|
||||
this.editor.clone()
|
||||
}) else {
|
||||
return;
|
||||
};
|
||||
for image in images {
|
||||
let Ok((excerpt_id, text_anchor, multibuffer_anchor)) =
|
||||
editor.update_in(cx, |message_editor, window, cx| {
|
||||
let snapshot = message_editor.snapshot(window, cx);
|
||||
let (excerpt_id, _, buffer_snapshot) =
|
||||
snapshot.buffer_snapshot().as_singleton().unwrap();
|
||||
|
||||
let text_anchor = buffer_snapshot.anchor_before(buffer_snapshot.len());
|
||||
let multibuffer_anchor = snapshot
|
||||
.buffer_snapshot()
|
||||
.anchor_in_excerpt(*excerpt_id, text_anchor);
|
||||
message_editor.edit(
|
||||
[(
|
||||
multi_buffer::Anchor::max()..multi_buffer::Anchor::max(),
|
||||
format!("{replacement_text} "),
|
||||
)],
|
||||
cx,
|
||||
);
|
||||
(*excerpt_id, text_anchor, multibuffer_anchor)
|
||||
})
|
||||
else {
|
||||
break;
|
||||
};
|
||||
|
||||
let content_len = replacement_text.len();
|
||||
let Some(start_anchor) = multibuffer_anchor else {
|
||||
continue;
|
||||
};
|
||||
let Ok(end_anchor) = editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
snapshot.anchor_before(start_anchor.to_offset(&snapshot) + content_len)
|
||||
}) else {
|
||||
continue;
|
||||
};
|
||||
let image = Arc::new(image);
|
||||
let Ok(Some((crease_id, tx))) = cx.update(|window, cx| {
|
||||
insert_crease_for_mention(
|
||||
excerpt_id,
|
||||
text_anchor,
|
||||
content_len,
|
||||
MentionUri::PastedImage.name().into(),
|
||||
IconName::Image.path().into(),
|
||||
Some(Task::ready(Ok(image.clone())).shared()),
|
||||
editor.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
(*excerpt_id, text_anchor, multibuffer_anchor)
|
||||
});
|
||||
|
||||
let content_len = replacement_text.len();
|
||||
let Some(start_anchor) = multibuffer_anchor else {
|
||||
continue;
|
||||
};
|
||||
let end_anchor = self.editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
snapshot.anchor_before(start_anchor.to_offset(&snapshot) + content_len)
|
||||
});
|
||||
let image = Arc::new(image);
|
||||
let Some((crease_id, tx)) = insert_crease_for_mention(
|
||||
excerpt_id,
|
||||
text_anchor,
|
||||
content_len,
|
||||
MentionUri::PastedImage.name().into(),
|
||||
IconName::Image.path().into(),
|
||||
Some(Task::ready(Ok(image.clone())).shared()),
|
||||
self.editor.clone(),
|
||||
window,
|
||||
cx,
|
||||
) else {
|
||||
continue;
|
||||
};
|
||||
let task = cx
|
||||
.spawn_in(window, {
|
||||
async move |_, cx| {
|
||||
)
|
||||
}) else {
|
||||
continue;
|
||||
};
|
||||
let task = cx
|
||||
.spawn(async move |cx| {
|
||||
let format = image.format;
|
||||
let image = cx
|
||||
.update(|_, cx| LanguageModelImage::from_image(image, cx))
|
||||
@@ -945,15 +1035,16 @@ impl MessageEditor {
|
||||
} else {
|
||||
Err("Failed to convert image".into())
|
||||
}
|
||||
}
|
||||
})
|
||||
.shared();
|
||||
|
||||
this.update(cx, |this, _| {
|
||||
this.mention_set
|
||||
.mentions
|
||||
.insert(crease_id, (MentionUri::PastedImage, task.clone()))
|
||||
})
|
||||
.shared();
|
||||
.ok();
|
||||
|
||||
self.mention_set
|
||||
.mentions
|
||||
.insert(crease_id, (MentionUri::PastedImage, task.clone()));
|
||||
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
if task.await.notify_async_err(cx).is_none() {
|
||||
this.update(cx, |this, cx| {
|
||||
this.editor.update(cx, |editor, cx| {
|
||||
@@ -963,9 +1054,9 @@ impl MessageEditor {
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
pub fn insert_dragged_files(
|
||||
@@ -1195,6 +1286,17 @@ impl MessageEditor {
|
||||
self.editor.read(cx).text(cx)
|
||||
}
|
||||
|
||||
pub fn set_placeholder_text(
|
||||
&mut self,
|
||||
placeholder: &str,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
editor.set_placeholder_text(placeholder, window, cx);
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn set_text(&mut self, text: &str, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
@@ -2611,13 +2713,14 @@ mod tests {
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_large_file_mention_uses_outline(cx: &mut TestAppContext) {
|
||||
async fn test_large_file_mention_fallback(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
|
||||
// Create a large file that exceeds AUTO_OUTLINE_SIZE
|
||||
const LINE: &str = "fn example_function() { /* some code */ }\n";
|
||||
// Using plain text without a configured language, so no outline is available
|
||||
const LINE: &str = "This is a line of text in the file\n";
|
||||
let large_content = LINE.repeat(2 * (outline::AUTO_OUTLINE_SIZE / LINE.len()));
|
||||
assert!(large_content.len() > outline::AUTO_OUTLINE_SIZE);
|
||||
|
||||
@@ -2628,8 +2731,8 @@ mod tests {
|
||||
fs.insert_tree(
|
||||
"/project",
|
||||
json!({
|
||||
"large_file.rs": large_content.clone(),
|
||||
"small_file.rs": small_content,
|
||||
"large_file.txt": large_content.clone(),
|
||||
"small_file.txt": small_content,
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
@@ -2675,7 +2778,7 @@ mod tests {
|
||||
let large_file_abs_path = project.read_with(cx, |project, cx| {
|
||||
let worktree = project.worktrees(cx).next().unwrap();
|
||||
let worktree_root = worktree.read(cx).abs_path();
|
||||
worktree_root.join("large_file.rs")
|
||||
worktree_root.join("large_file.txt")
|
||||
});
|
||||
let large_file_task = message_editor.update(cx, |editor, cx| {
|
||||
editor.confirm_mention_for_file(large_file_abs_path, cx)
|
||||
@@ -2684,11 +2787,20 @@ mod tests {
|
||||
let large_file_mention = large_file_task.await.unwrap();
|
||||
match large_file_mention {
|
||||
Mention::Text { content, .. } => {
|
||||
// Should contain outline header for large files
|
||||
assert!(content.contains("File outline for"));
|
||||
assert!(content.contains("file too large to show full content"));
|
||||
// Should not contain the full repeated content
|
||||
assert!(!content.contains(&LINE.repeat(100)));
|
||||
// Should contain some of the content but not all of it
|
||||
assert!(
|
||||
content.contains(LINE),
|
||||
"Should contain some of the file content"
|
||||
);
|
||||
assert!(
|
||||
!content.contains(&LINE.repeat(100)),
|
||||
"Should not contain the full file"
|
||||
);
|
||||
// Should be much smaller than original
|
||||
assert!(
|
||||
content.len() < large_content.len() / 10,
|
||||
"Should be significantly truncated"
|
||||
);
|
||||
}
|
||||
_ => panic!("Expected Text mention for large file"),
|
||||
}
|
||||
@@ -2698,7 +2810,7 @@ mod tests {
|
||||
let small_file_abs_path = project.read_with(cx, |project, cx| {
|
||||
let worktree = project.worktrees(cx).next().unwrap();
|
||||
let worktree_root = worktree.read(cx).abs_path();
|
||||
worktree_root.join("small_file.rs")
|
||||
worktree_root.join("small_file.txt")
|
||||
});
|
||||
let small_file_task = message_editor.update(cx, |editor, cx| {
|
||||
editor.confirm_mention_for_file(small_file_abs_path, cx)
|
||||
@@ -2707,10 +2819,8 @@ mod tests {
|
||||
let small_file_mention = small_file_task.await.unwrap();
|
||||
match small_file_mention {
|
||||
Mention::Text { content, .. } => {
|
||||
// Should contain the actual content
|
||||
// Should contain the full actual content
|
||||
assert_eq!(content, small_content);
|
||||
// Should not contain outline header
|
||||
assert!(!content.contains("File outline for"));
|
||||
}
|
||||
_ => panic!("Expected Text mention for small file"),
|
||||
}
|
||||
|
||||
@@ -56,6 +56,10 @@ impl ModeSelector {
|
||||
self.set_mode(all_modes[next_index].id.clone(), cx);
|
||||
}
|
||||
|
||||
pub fn mode(&self) -> acp::SessionModeId {
|
||||
self.connection.current_mode()
|
||||
}
|
||||
|
||||
pub fn set_mode(&mut self, mode: acp::SessionModeId, cx: &mut Context<Self>) {
|
||||
let task = self.connection.set_mode(mode, cx);
|
||||
self.setting_mode = true;
|
||||
|
||||
@@ -251,17 +251,17 @@ impl PickerDelegate for AcpModelPickerDelegate {
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.toggle_state(selected)
|
||||
.start_slot::<Icon>(model_info.icon.map(|icon| {
|
||||
Icon::new(icon)
|
||||
.color(model_icon_color)
|
||||
.size(IconSize::Small)
|
||||
}))
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.pl_0p5()
|
||||
.gap_1p5()
|
||||
.w(px(240.))
|
||||
.when_some(model_info.icon, |this, icon| {
|
||||
this.child(
|
||||
Icon::new(icon)
|
||||
.color(model_icon_color)
|
||||
.size(IconSize::Small)
|
||||
)
|
||||
})
|
||||
.child(Label::new(model_info.name.clone()).truncate()),
|
||||
)
|
||||
.end_slot(div().pr_3().when(is_selected, |this| {
|
||||
|
||||
@@ -457,25 +457,23 @@ impl Render for AcpThreadHistory {
|
||||
.on_action(cx.listener(Self::select_last))
|
||||
.on_action(cx.listener(Self::confirm))
|
||||
.on_action(cx.listener(Self::remove_selected_thread))
|
||||
.when(!self.history_store.read(cx).is_empty(cx), |parent| {
|
||||
parent.child(
|
||||
h_flex()
|
||||
.h(px(41.)) // Match the toolbar perfectly
|
||||
.w_full()
|
||||
.py_1()
|
||||
.px_2()
|
||||
.gap_2()
|
||||
.justify_between()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(
|
||||
Icon::new(IconName::MagnifyingGlass)
|
||||
.color(Color::Muted)
|
||||
.size(IconSize::Small),
|
||||
)
|
||||
.child(self.search_editor.clone()),
|
||||
)
|
||||
})
|
||||
.child(
|
||||
h_flex()
|
||||
.h(px(41.)) // Match the toolbar perfectly
|
||||
.w_full()
|
||||
.py_1()
|
||||
.px_2()
|
||||
.gap_2()
|
||||
.justify_between()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(
|
||||
Icon::new(IconName::MagnifyingGlass)
|
||||
.color(Color::Muted)
|
||||
.size(IconSize::Small),
|
||||
)
|
||||
.child(self.search_editor.clone()),
|
||||
)
|
||||
.child({
|
||||
let view = v_flex()
|
||||
.id("list-container")
|
||||
@@ -484,19 +482,15 @@ impl Render for AcpThreadHistory {
|
||||
.flex_grow();
|
||||
|
||||
if self.history_store.read(cx).is_empty(cx) {
|
||||
view.justify_center()
|
||||
.child(
|
||||
h_flex().w_full().justify_center().child(
|
||||
Label::new("You don't have any past threads yet.")
|
||||
.size(LabelSize::Small),
|
||||
),
|
||||
)
|
||||
} else if self.search_produced_no_matches() {
|
||||
view.justify_center().child(
|
||||
h_flex().w_full().justify_center().child(
|
||||
Label::new("No threads match your search.").size(LabelSize::Small),
|
||||
),
|
||||
view.justify_center().items_center().child(
|
||||
Label::new("You don't have any past threads yet.")
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
} else if self.search_produced_no_matches() {
|
||||
view.justify_center()
|
||||
.items_center()
|
||||
.child(Label::new("No threads match your search.").size(LabelSize::Small))
|
||||
} else {
|
||||
view.child(
|
||||
uniform_list(
|
||||
@@ -673,7 +667,7 @@ impl EntryTimeFormat {
|
||||
timezone,
|
||||
time_format::TimestampFormat::EnhancedAbsolute,
|
||||
),
|
||||
EntryTimeFormat::TimeOnly => time_format::format_time(timestamp),
|
||||
EntryTimeFormat::TimeOnly => time_format::format_time(timestamp.to_offset(timezone)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -51,7 +51,7 @@ use ui::{
|
||||
PopoverMenuHandle, SpinnerLabel, TintColor, Tooltip, WithScrollbar, prelude::*,
|
||||
};
|
||||
use util::{ResultExt, size::format_file_size, time::duration_alt_display};
|
||||
use workspace::{CollaboratorId, Workspace};
|
||||
use workspace::{CollaboratorId, NewTerminal, Workspace};
|
||||
use zed_actions::agent::{Chat, ToggleModelSelector};
|
||||
use zed_actions::assistant::OpenRulesLibrary;
|
||||
|
||||
@@ -69,8 +69,8 @@ use crate::ui::{
|
||||
};
|
||||
use crate::{
|
||||
AgentDiffPane, AgentPanel, AllowAlways, AllowOnce, ContinueThread, ContinueWithBurnMode,
|
||||
CycleModeSelector, ExpandMessageEditor, Follow, KeepAll, OpenAgentDiff, OpenHistory, RejectAll,
|
||||
RejectOnce, ToggleBurnMode, ToggleProfileSelector,
|
||||
CycleModeSelector, ExpandMessageEditor, Follow, KeepAll, NewThread, OpenAgentDiff, OpenHistory,
|
||||
RejectAll, RejectOnce, ToggleBurnMode, ToggleProfileSelector,
|
||||
};
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
|
||||
@@ -125,8 +125,9 @@ impl ProfileProvider for Entity<agent::Thread> {
|
||||
}
|
||||
|
||||
fn set_profile(&self, profile_id: AgentProfileId, cx: &mut App) {
|
||||
self.update(cx, |thread, _cx| {
|
||||
thread.set_profile(profile_id);
|
||||
self.update(cx, |thread, cx| {
|
||||
// Apply the profile and let the thread swap to its default model.
|
||||
thread.set_profile(profile_id, cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -277,6 +278,7 @@ pub struct AcpThreadView {
|
||||
notification_subscriptions: HashMap<WindowHandle<AgentNotification>, Vec<Subscription>>,
|
||||
thread_retry_status: Option<RetryStatus>,
|
||||
thread_error: Option<ThreadError>,
|
||||
thread_error_markdown: Option<Entity<Markdown>>,
|
||||
thread_feedback: ThreadFeedbackState,
|
||||
list_state: ListState,
|
||||
auth_task: Option<Task<()>>,
|
||||
@@ -336,19 +338,7 @@ impl AcpThreadView {
|
||||
let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default()));
|
||||
let available_commands = Rc::new(RefCell::new(vec![]));
|
||||
|
||||
let placeholder = if agent.name() == "Zed Agent" {
|
||||
format!("Message the {} — @ to include context", agent.name())
|
||||
} else if agent.name() == "Claude Code"
|
||||
|| agent.name() == "Codex"
|
||||
|| !available_commands.borrow().is_empty()
|
||||
{
|
||||
format!(
|
||||
"Message {} — @ to include context, / for commands",
|
||||
agent.name()
|
||||
)
|
||||
} else {
|
||||
format!("Message {} — @ to include context", agent.name())
|
||||
};
|
||||
let placeholder = placeholder_text(agent.name().as_ref(), false);
|
||||
|
||||
let message_editor = cx.new(|cx| {
|
||||
let mut editor = MessageEditor::new(
|
||||
@@ -426,6 +416,7 @@ impl AcpThreadView {
|
||||
list_state: list_state,
|
||||
thread_retry_status: None,
|
||||
thread_error: None,
|
||||
thread_error_markdown: None,
|
||||
thread_feedback: Default::default(),
|
||||
auth_task: None,
|
||||
expanded_tool_calls: HashSet::default(),
|
||||
@@ -809,6 +800,7 @@ impl AcpThreadView {
|
||||
|
||||
if should_retry {
|
||||
self.thread_error = None;
|
||||
self.thread_error_markdown = None;
|
||||
self.reset(window, cx);
|
||||
}
|
||||
}
|
||||
@@ -1143,6 +1135,7 @@ impl AcpThreadView {
|
||||
|
||||
self.is_loading_contents = true;
|
||||
let model_id = self.current_model_id(cx);
|
||||
let mode_id = self.current_mode_id(cx);
|
||||
let guard = cx.new(|_| ());
|
||||
cx.observe_release(&guard, |this, _guard, cx| {
|
||||
this.is_loading_contents = false;
|
||||
@@ -1177,7 +1170,8 @@ impl AcpThreadView {
|
||||
"Agent Message Sent",
|
||||
agent = agent_telemetry_id,
|
||||
session = session_id,
|
||||
model = model_id
|
||||
model = model_id,
|
||||
mode = mode_id
|
||||
);
|
||||
|
||||
thread.send(contents, cx)
|
||||
@@ -1190,6 +1184,7 @@ impl AcpThreadView {
|
||||
agent = agent_telemetry_id,
|
||||
session = session_id,
|
||||
model = model_id,
|
||||
mode = mode_id,
|
||||
status,
|
||||
turn_time_ms,
|
||||
);
|
||||
@@ -1338,6 +1333,7 @@ impl AcpThreadView {
|
||||
|
||||
fn clear_thread_error(&mut self, cx: &mut Context<Self>) {
|
||||
self.thread_error = None;
|
||||
self.thread_error_markdown = None;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
@@ -1455,7 +1451,14 @@ impl AcpThreadView {
|
||||
});
|
||||
}
|
||||
|
||||
let has_commands = !available_commands.is_empty();
|
||||
self.available_commands.replace(available_commands);
|
||||
|
||||
let new_placeholder = placeholder_text(self.agent.name().as_ref(), has_commands);
|
||||
|
||||
self.message_editor.update(cx, |editor, cx| {
|
||||
editor.set_placeholder_text(&new_placeholder, window, cx);
|
||||
});
|
||||
}
|
||||
AcpThreadEvent::ModeUpdated(_mode) => {
|
||||
// The connection keeps track of the mode
|
||||
@@ -3144,7 +3147,7 @@ impl AcpThreadView {
|
||||
.text_ui_sm(cx)
|
||||
.h_full()
|
||||
.children(terminal_view.map(|terminal_view| {
|
||||
if terminal_view
|
||||
let element = if terminal_view
|
||||
.read(cx)
|
||||
.content_mode(window, cx)
|
||||
.is_scrollable()
|
||||
@@ -3152,7 +3155,15 @@ impl AcpThreadView {
|
||||
div().h_72().child(terminal_view).into_any_element()
|
||||
} else {
|
||||
terminal_view.into_any_element()
|
||||
}
|
||||
};
|
||||
|
||||
div()
|
||||
.on_action(cx.listener(|_this, _: &NewTerminal, window, cx| {
|
||||
window.dispatch_action(NewThread.boxed_clone(), cx);
|
||||
cx.stop_propagation();
|
||||
}))
|
||||
.child(element)
|
||||
.into_any_element()
|
||||
})),
|
||||
)
|
||||
})
|
||||
@@ -4192,6 +4203,8 @@ impl AcpThreadView {
|
||||
.justify_between()
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_0p5()
|
||||
.child(self.render_add_context_button(cx))
|
||||
.child(self.render_follow_toggle(cx))
|
||||
.children(self.render_burn_mode_toggle(cx)),
|
||||
)
|
||||
@@ -4506,6 +4519,29 @@ impl AcpThreadView {
|
||||
}))
|
||||
}
|
||||
|
||||
fn render_add_context_button(&self, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let message_editor = self.message_editor.clone();
|
||||
let menu_visible = message_editor.read(cx).is_completions_menu_visible(cx);
|
||||
|
||||
IconButton::new("add-context", IconName::AtSign)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.when(!menu_visible, |this| {
|
||||
this.tooltip(move |_window, cx| {
|
||||
Tooltip::with_meta("Add Context", None, "Or type @ to include context", cx)
|
||||
})
|
||||
})
|
||||
.on_click(cx.listener(move |_this, _, window, cx| {
|
||||
let message_editor_clone = message_editor.clone();
|
||||
|
||||
window.defer(cx, move |window, cx| {
|
||||
message_editor_clone.update(cx, |message_editor, cx| {
|
||||
message_editor.trigger_completion_menu(window, cx);
|
||||
});
|
||||
});
|
||||
}))
|
||||
}
|
||||
|
||||
fn render_markdown(&self, markdown: Entity<Markdown>, style: MarkdownStyle) -> MarkdownElement {
|
||||
let workspace = self.workspace.clone();
|
||||
MarkdownElement::new(markdown, style).on_url_click(move |text, window, cx| {
|
||||
@@ -5323,9 +5359,9 @@ impl AcpThreadView {
|
||||
}
|
||||
}
|
||||
|
||||
fn render_thread_error(&self, cx: &mut Context<Self>) -> Option<Div> {
|
||||
fn render_thread_error(&mut self, window: &mut Window, cx: &mut Context<Self>) -> Option<Div> {
|
||||
let content = match self.thread_error.as_ref()? {
|
||||
ThreadError::Other(error) => self.render_any_thread_error(error.clone(), cx),
|
||||
ThreadError::Other(error) => self.render_any_thread_error(error.clone(), window, cx),
|
||||
ThreadError::Refusal => self.render_refusal_error(cx),
|
||||
ThreadError::AuthenticationRequired(error) => {
|
||||
self.render_authentication_required_error(error.clone(), cx)
|
||||
@@ -5372,6 +5408,16 @@ impl AcpThreadView {
|
||||
)
|
||||
}
|
||||
|
||||
fn current_mode_id(&self, cx: &App) -> Option<Arc<str>> {
|
||||
if let Some(thread) = self.as_native_thread(cx) {
|
||||
Some(thread.read(cx).profile().0.clone())
|
||||
} else if let Some(mode_selector) = self.mode_selector() {
|
||||
Some(mode_selector.read(cx).mode().0)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn current_model_id(&self, cx: &App) -> Option<String> {
|
||||
self.model_selector
|
||||
.as_ref()
|
||||
@@ -5410,7 +5456,12 @@ impl AcpThreadView {
|
||||
.dismiss_action(self.dismiss_error_button(cx))
|
||||
}
|
||||
|
||||
fn render_any_thread_error(&self, error: SharedString, cx: &mut Context<'_, Self>) -> Callout {
|
||||
fn render_any_thread_error(
|
||||
&mut self,
|
||||
error: SharedString,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<'_, Self>,
|
||||
) -> Callout {
|
||||
let can_resume = self
|
||||
.thread()
|
||||
.map_or(false, |thread| thread.read(cx).can_resume(cx));
|
||||
@@ -5423,11 +5474,24 @@ impl AcpThreadView {
|
||||
supports_burn_mode && thread.completion_mode() == CompletionMode::Normal
|
||||
});
|
||||
|
||||
let markdown = if let Some(markdown) = &self.thread_error_markdown {
|
||||
markdown.clone()
|
||||
} else {
|
||||
let markdown = cx.new(|cx| Markdown::new(error.clone(), None, None, cx));
|
||||
self.thread_error_markdown = Some(markdown.clone());
|
||||
markdown
|
||||
};
|
||||
|
||||
let markdown_style = default_markdown_style(false, true, window, cx);
|
||||
let description = self
|
||||
.render_markdown(markdown, markdown_style)
|
||||
.into_any_element();
|
||||
|
||||
Callout::new()
|
||||
.severity(Severity::Error)
|
||||
.title("Error")
|
||||
.icon(IconName::XCircle)
|
||||
.description(error.clone())
|
||||
.title("An Error Happened")
|
||||
.description_slot(description)
|
||||
.actions_slot(
|
||||
h_flex()
|
||||
.gap_0p5()
|
||||
@@ -5446,11 +5510,9 @@ impl AcpThreadView {
|
||||
})
|
||||
.when(can_resume, |this| {
|
||||
this.child(
|
||||
Button::new("retry", "Retry")
|
||||
.icon(IconName::RotateCw)
|
||||
.icon_position(IconPosition::Start)
|
||||
IconButton::new("retry", IconName::RotateCw)
|
||||
.icon_size(IconSize::Small)
|
||||
.label_size(LabelSize::Small)
|
||||
.tooltip(Tooltip::text("Retry Generation"))
|
||||
.on_click(cx.listener(|this, _, _window, cx| {
|
||||
this.resume_chat(cx);
|
||||
})),
|
||||
@@ -5592,7 +5654,6 @@ impl AcpThreadView {
|
||||
|
||||
IconButton::new("copy", IconName::Copy)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.tooltip(Tooltip::text("Copy Error Message"))
|
||||
.on_click(move |_, _, cx| {
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(message.clone()))
|
||||
@@ -5602,7 +5663,6 @@ impl AcpThreadView {
|
||||
fn dismiss_error_button(&self, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
IconButton::new("dismiss", IconName::Close)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.tooltip(Tooltip::text("Dismiss Error"))
|
||||
.on_click(cx.listener({
|
||||
move |this, _, _, cx| {
|
||||
@@ -5707,6 +5767,19 @@ fn loading_contents_spinner(size: IconSize) -> AnyElement {
|
||||
.into_any_element()
|
||||
}
|
||||
|
||||
fn placeholder_text(agent_name: &str, has_commands: bool) -> String {
|
||||
if agent_name == "Zed Agent" {
|
||||
format!("Message the {} — @ to include context", agent_name)
|
||||
} else if has_commands {
|
||||
format!(
|
||||
"Message {} — @ to include context, / for commands",
|
||||
agent_name
|
||||
)
|
||||
} else {
|
||||
format!("Message {} — @ to include context", agent_name)
|
||||
}
|
||||
}
|
||||
|
||||
impl Focusable for AcpThreadView {
|
||||
fn focus_handle(&self, cx: &App) -> FocusHandle {
|
||||
match self.thread_state {
|
||||
@@ -5807,7 +5880,7 @@ impl Render for AcpThreadView {
|
||||
None
|
||||
}
|
||||
})
|
||||
.children(self.render_thread_error(cx))
|
||||
.children(self.render_thread_error(window, cx))
|
||||
.when_some(
|
||||
self.new_server_version_available.as_ref().filter(|_| {
|
||||
!has_messages || !matches!(self.thread_state, ThreadState::Ready { .. })
|
||||
@@ -5873,7 +5946,6 @@ fn default_markdown_style(
|
||||
syntax: cx.theme().syntax().clone(),
|
||||
selection_background_color: colors.element_selection_background,
|
||||
code_block_overflow_x_scroll: true,
|
||||
table_overflow_x_scroll: true,
|
||||
heading_level_styles: Some(HeadingLevelStyles {
|
||||
h1: Some(TextStyleRefinement {
|
||||
font_size: Some(rems(1.15).into()),
|
||||
@@ -5941,6 +6013,7 @@ fn default_markdown_style(
|
||||
},
|
||||
link: TextStyleRefinement {
|
||||
background_color: Some(colors.editor_foreground.opacity(0.025)),
|
||||
color: Some(colors.text_accent),
|
||||
underline: Some(UnderlineStyle {
|
||||
color: Some(colors.text_accent.opacity(0.5)),
|
||||
thickness: px(1.),
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
mod add_llm_provider_modal;
|
||||
mod configure_context_server_modal;
|
||||
pub mod configure_context_server_modal;
|
||||
mod configure_context_server_tools_modal;
|
||||
mod manage_profiles_modal;
|
||||
mod tool_picker;
|
||||
@@ -8,6 +8,7 @@ use std::{ops::Range, sync::Arc};
|
||||
|
||||
use agent::ContextServerRegistry;
|
||||
use anyhow::Result;
|
||||
use client::zed_urls;
|
||||
use cloud_llm_client::{Plan, PlanV1, PlanV2};
|
||||
use collections::HashMap;
|
||||
use context_server::ContextServerId;
|
||||
@@ -26,26 +27,27 @@ use language_model::{
|
||||
use language_models::AllLanguageModelSettings;
|
||||
use notifications::status_toast::{StatusToast, ToastIcon};
|
||||
use project::{
|
||||
agent_server_store::{AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME},
|
||||
agent_server_store::{
|
||||
AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, ExternalAgentServerName, GEMINI_NAME,
|
||||
},
|
||||
context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore},
|
||||
};
|
||||
use settings::{Settings, SettingsStore, update_settings_file};
|
||||
use ui::{
|
||||
Button, ButtonStyle, Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor,
|
||||
ElevationIndex, IconName, IconPosition, IconSize, Indicator, LabelSize, PopoverMenu, Switch,
|
||||
SwitchColor, Tooltip, WithScrollbar, prelude::*,
|
||||
Button, ButtonStyle, Chip, CommonAnimationExt, ContextMenu, ContextMenuEntry, Disclosure,
|
||||
Divider, DividerColor, ElevationIndex, IconName, IconPosition, IconSize, Indicator, LabelSize,
|
||||
PopoverMenu, Switch, SwitchColor, Tooltip, WithScrollbar, prelude::*,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
use workspace::{Workspace, create_and_open_local_file};
|
||||
use zed_actions::ExtensionCategoryFilter;
|
||||
use zed_actions::{ExtensionCategoryFilter, OpenBrowser};
|
||||
|
||||
pub(crate) use configure_context_server_modal::ConfigureContextServerModal;
|
||||
pub(crate) use configure_context_server_tools_modal::ConfigureContextServerToolsModal;
|
||||
pub(crate) use manage_profiles_modal::ManageProfilesModal;
|
||||
|
||||
use crate::{
|
||||
AddContextServer,
|
||||
agent_configuration::add_llm_provider_modal::{AddLlmProviderModal, LlmCompatibleProvider},
|
||||
use crate::agent_configuration::add_llm_provider_modal::{
|
||||
AddLlmProviderModal, LlmCompatibleProvider,
|
||||
};
|
||||
|
||||
pub struct AgentConfiguration {
|
||||
@@ -415,6 +417,7 @@ impl AgentConfiguration {
|
||||
cx: &mut Context<Self>,
|
||||
) -> impl IntoElement {
|
||||
let providers = LanguageModelRegistry::read_global(cx).providers();
|
||||
|
||||
let popover_menu = PopoverMenu::new("add-provider-popover")
|
||||
.trigger(
|
||||
Button::new("add-provider", "Add Provider")
|
||||
@@ -425,7 +428,6 @@ impl AgentConfiguration {
|
||||
.icon_color(Color::Muted)
|
||||
.label_size(LabelSize::Small),
|
||||
)
|
||||
.anchor(gpui::Corner::TopRight)
|
||||
.menu({
|
||||
let workspace = self.workspace.clone();
|
||||
move |window, cx| {
|
||||
@@ -447,6 +449,11 @@ impl AgentConfiguration {
|
||||
})
|
||||
}))
|
||||
}
|
||||
})
|
||||
.anchor(gpui::Corner::TopRight)
|
||||
.offset(gpui::Point {
|
||||
x: px(0.0),
|
||||
y: px(2.0),
|
||||
});
|
||||
|
||||
v_flex()
|
||||
@@ -541,12 +548,13 @@ impl AgentConfiguration {
|
||||
.icon_color(Color::Muted)
|
||||
.label_size(LabelSize::Small),
|
||||
)
|
||||
.anchor(gpui::Corner::TopRight)
|
||||
.menu({
|
||||
move |window, cx| {
|
||||
Some(ContextMenu::build(window, cx, |menu, _window, _cx| {
|
||||
menu.entry("Add Custom Server", None, {
|
||||
|window, cx| window.dispatch_action(AddContextServer.boxed_clone(), cx)
|
||||
|window, cx| {
|
||||
window.dispatch_action(crate::AddContextServer.boxed_clone(), cx)
|
||||
}
|
||||
})
|
||||
.entry("Install from Extensions", None, {
|
||||
|window, cx| {
|
||||
@@ -564,6 +572,11 @@ impl AgentConfiguration {
|
||||
})
|
||||
}))
|
||||
}
|
||||
})
|
||||
.anchor(gpui::Corner::TopRight)
|
||||
.offset(gpui::Point {
|
||||
x: px(0.0),
|
||||
y: px(2.0),
|
||||
});
|
||||
|
||||
v_flex()
|
||||
@@ -638,15 +651,13 @@ impl AgentConfiguration {
|
||||
|
||||
let is_running = matches!(server_status, ContextServerStatus::Running);
|
||||
let item_id = SharedString::from(context_server_id.0.clone());
|
||||
let is_from_extension = server_configuration
|
||||
.as_ref()
|
||||
.map(|config| {
|
||||
matches!(
|
||||
config.as_ref(),
|
||||
ContextServerConfiguration::Extension { .. }
|
||||
)
|
||||
})
|
||||
.unwrap_or(false);
|
||||
// Servers without a configuration can only be provided by extensions.
|
||||
let provided_by_extension = server_configuration.as_ref().is_none_or(|config| {
|
||||
matches!(
|
||||
config.as_ref(),
|
||||
ContextServerConfiguration::Extension { .. }
|
||||
)
|
||||
});
|
||||
|
||||
let error = if let ContextServerStatus::Error(error) = server_status.clone() {
|
||||
Some(error)
|
||||
@@ -660,7 +671,7 @@ impl AgentConfiguration {
|
||||
.tools_for_server(&context_server_id)
|
||||
.count();
|
||||
|
||||
let (source_icon, source_tooltip) = if is_from_extension {
|
||||
let (source_icon, source_tooltip) = if provided_by_extension {
|
||||
(
|
||||
IconName::ZedSrcExtension,
|
||||
"This MCP server was installed from an extension.",
|
||||
@@ -697,7 +708,10 @@ impl AgentConfiguration {
|
||||
"Server is stopped.",
|
||||
),
|
||||
};
|
||||
|
||||
let is_remote = server_configuration
|
||||
.as_ref()
|
||||
.map(|config| matches!(config.as_ref(), ContextServerConfiguration::Http { .. }))
|
||||
.unwrap_or(false);
|
||||
let context_server_configuration_menu = PopoverMenu::new("context-server-config-menu")
|
||||
.trigger_with_tooltip(
|
||||
IconButton::new("context-server-config-menu", IconName::Settings)
|
||||
@@ -710,7 +724,6 @@ impl AgentConfiguration {
|
||||
let fs = self.fs.clone();
|
||||
let context_server_id = context_server_id.clone();
|
||||
let language_registry = self.language_registry.clone();
|
||||
let context_server_store = self.context_server_store.clone();
|
||||
let workspace = self.workspace.clone();
|
||||
let context_server_registry = self.context_server_registry.clone();
|
||||
|
||||
@@ -721,14 +734,25 @@ impl AgentConfiguration {
|
||||
let language_registry = language_registry.clone();
|
||||
let workspace = workspace.clone();
|
||||
move |window, cx| {
|
||||
ConfigureContextServerModal::show_modal_for_existing_server(
|
||||
context_server_id.clone(),
|
||||
language_registry.clone(),
|
||||
workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.detach_and_log_err(cx);
|
||||
if is_remote {
|
||||
crate::agent_configuration::configure_context_server_modal::ConfigureContextServerModal::show_modal_for_existing_server(
|
||||
context_server_id.clone(),
|
||||
language_registry.clone(),
|
||||
workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.detach();
|
||||
} else {
|
||||
ConfigureContextServerModal::show_modal_for_existing_server(
|
||||
context_server_id.clone(),
|
||||
language_registry.clone(),
|
||||
workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
}).when(tool_count > 0, |this| this.entry("View Tools", None, {
|
||||
let context_server_id = context_server_id.clone();
|
||||
@@ -752,23 +776,10 @@ impl AgentConfiguration {
|
||||
.entry("Uninstall", None, {
|
||||
let fs = fs.clone();
|
||||
let context_server_id = context_server_id.clone();
|
||||
let context_server_store = context_server_store.clone();
|
||||
let workspace = workspace.clone();
|
||||
move |_, cx| {
|
||||
let is_provided_by_extension = context_server_store
|
||||
.read(cx)
|
||||
.configuration_for_server(&context_server_id)
|
||||
.as_ref()
|
||||
.map(|config| {
|
||||
matches!(
|
||||
config.as_ref(),
|
||||
ContextServerConfiguration::Extension { .. }
|
||||
)
|
||||
})
|
||||
.unwrap_or(false);
|
||||
|
||||
let uninstall_extension_task = match (
|
||||
is_provided_by_extension,
|
||||
provided_by_extension,
|
||||
resolve_extension_for_context_server(&context_server_id, cx),
|
||||
) {
|
||||
(true, Some((id, manifest))) => {
|
||||
@@ -959,7 +970,7 @@ impl AgentConfiguration {
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let user_defined_agents = user_defined_agents
|
||||
let user_defined_agents: Vec<_> = user_defined_agents
|
||||
.into_iter()
|
||||
.map(|name| {
|
||||
let icon = if let Some(icon_path) = agent_server_store.agent_icon(&name) {
|
||||
@@ -967,27 +978,93 @@ impl AgentConfiguration {
|
||||
} else {
|
||||
AgentIcon::Name(IconName::Ai)
|
||||
};
|
||||
self.render_agent_server(icon, name, true)
|
||||
.into_any_element()
|
||||
(name, icon)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
.collect();
|
||||
|
||||
let add_agens_button = Button::new("add-agent", "Add Agent")
|
||||
.style(ButtonStyle::Outlined)
|
||||
.icon_position(IconPosition::Start)
|
||||
.icon(IconName::Plus)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.label_size(LabelSize::Small)
|
||||
.on_click(move |_, window, cx| {
|
||||
if let Some(workspace) = window.root().flatten() {
|
||||
let workspace = workspace.downgrade();
|
||||
window
|
||||
.spawn(cx, async |cx| {
|
||||
open_new_agent_servers_entry_in_settings_editor(workspace, cx).await
|
||||
let add_agent_popover = PopoverMenu::new("add-agent-server-popover")
|
||||
.trigger(
|
||||
Button::new("add-agent", "Add Agent")
|
||||
.style(ButtonStyle::Outlined)
|
||||
.icon_position(IconPosition::Start)
|
||||
.icon(IconName::Plus)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.label_size(LabelSize::Small),
|
||||
)
|
||||
.menu({
|
||||
move |window, cx| {
|
||||
Some(ContextMenu::build(window, cx, |menu, _window, _cx| {
|
||||
menu.entry("Install from Extensions", None, {
|
||||
|window, cx| {
|
||||
window.dispatch_action(
|
||||
zed_actions::Extensions {
|
||||
category_filter: Some(
|
||||
ExtensionCategoryFilter::AgentServers,
|
||||
),
|
||||
id: None,
|
||||
}
|
||||
.boxed_clone(),
|
||||
cx,
|
||||
)
|
||||
}
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
.entry("Add Custom Agent", None, {
|
||||
move |window, cx| {
|
||||
if let Some(workspace) = window.root().flatten() {
|
||||
let workspace = workspace.downgrade();
|
||||
window
|
||||
.spawn(cx, async |cx| {
|
||||
open_new_agent_servers_entry_in_settings_editor(
|
||||
workspace, cx,
|
||||
)
|
||||
.await
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
}
|
||||
})
|
||||
.separator()
|
||||
.header("Learn More")
|
||||
.item(
|
||||
ContextMenuEntry::new("Agent Servers Docs")
|
||||
.icon(IconName::ArrowUpRight)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_position(IconPosition::End)
|
||||
.handler({
|
||||
move |window, cx| {
|
||||
window.dispatch_action(
|
||||
Box::new(OpenBrowser {
|
||||
url: zed_urls::agent_server_docs(cx),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}),
|
||||
)
|
||||
.item(
|
||||
ContextMenuEntry::new("ACP Docs")
|
||||
.icon(IconName::ArrowUpRight)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_position(IconPosition::End)
|
||||
.handler({
|
||||
move |window, cx| {
|
||||
window.dispatch_action(
|
||||
Box::new(OpenBrowser {
|
||||
url: "https://agentclientprotocol.com/".into(),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}),
|
||||
)
|
||||
}))
|
||||
}
|
||||
})
|
||||
.anchor(gpui::Corner::TopRight)
|
||||
.offset(gpui::Point {
|
||||
x: px(0.0),
|
||||
y: px(2.0),
|
||||
});
|
||||
|
||||
v_flex()
|
||||
@@ -998,7 +1075,7 @@ impl AgentConfiguration {
|
||||
.child(self.render_section_title(
|
||||
"External Agents",
|
||||
"All agents connected through the Agent Client Protocol.",
|
||||
add_agens_button.into_any_element(),
|
||||
add_agent_popover.into_any_element(),
|
||||
))
|
||||
.child(
|
||||
v_flex()
|
||||
@@ -1009,26 +1086,29 @@ impl AgentConfiguration {
|
||||
AgentIcon::Name(IconName::AiClaude),
|
||||
"Claude Code",
|
||||
false,
|
||||
cx,
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.child(self.render_agent_server(
|
||||
AgentIcon::Name(IconName::AiOpenAi),
|
||||
"Codex CLI",
|
||||
false,
|
||||
cx,
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.child(self.render_agent_server(
|
||||
AgentIcon::Name(IconName::AiGemini),
|
||||
"Gemini CLI",
|
||||
false,
|
||||
cx,
|
||||
))
|
||||
.map(|mut parent| {
|
||||
for agent in user_defined_agents {
|
||||
for (name, icon) in user_defined_agents {
|
||||
parent = parent
|
||||
.child(
|
||||
Divider::horizontal().color(DividerColor::BorderFaded),
|
||||
)
|
||||
.child(agent);
|
||||
.child(self.render_agent_server(icon, name, true, cx));
|
||||
}
|
||||
parent
|
||||
}),
|
||||
@@ -1041,6 +1121,7 @@ impl AgentConfiguration {
|
||||
icon: AgentIcon,
|
||||
name: impl Into<SharedString>,
|
||||
external: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) -> impl IntoElement {
|
||||
let name = name.into();
|
||||
let icon = match icon {
|
||||
@@ -1055,28 +1136,53 @@ impl AgentConfiguration {
|
||||
let tooltip_id = SharedString::new(format!("agent-source-{}", name));
|
||||
let tooltip_message = format!("The {} agent was installed from an extension.", name);
|
||||
|
||||
let agent_server_name = ExternalAgentServerName(name.clone());
|
||||
|
||||
let uninstall_btn_id = SharedString::from(format!("uninstall-{}", name));
|
||||
let uninstall_button = IconButton::new(uninstall_btn_id, IconName::Trash)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_size(IconSize::Small)
|
||||
.tooltip(Tooltip::text("Uninstall Agent Extension"))
|
||||
.on_click(cx.listener(move |this, _, _window, cx| {
|
||||
let agent_name = agent_server_name.clone();
|
||||
|
||||
if let Some(ext_id) = this.agent_server_store.update(cx, |store, _cx| {
|
||||
store.get_extension_id_for_agent(&agent_name)
|
||||
}) {
|
||||
ExtensionStore::global(cx)
|
||||
.update(cx, |store, cx| store.uninstall_extension(ext_id, cx))
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
}));
|
||||
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.child(icon)
|
||||
.child(Label::new(name))
|
||||
.when(external, |this| {
|
||||
this.child(
|
||||
div()
|
||||
.id(tooltip_id)
|
||||
.flex_none()
|
||||
.tooltip(Tooltip::text(tooltip_message))
|
||||
.child(
|
||||
Icon::new(IconName::ZedSrcExtension)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
})
|
||||
.gap_1()
|
||||
.justify_between()
|
||||
.child(
|
||||
Icon::new(IconName::Check)
|
||||
.color(Color::Success)
|
||||
.size(IconSize::Small),
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.child(icon)
|
||||
.child(Label::new(name))
|
||||
.when(external, |this| {
|
||||
this.child(
|
||||
div()
|
||||
.id(tooltip_id)
|
||||
.flex_none()
|
||||
.tooltip(Tooltip::text(tooltip_message))
|
||||
.child(
|
||||
Icon::new(IconName::ZedSrcExtension)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
})
|
||||
.child(
|
||||
Icon::new(IconName::Check)
|
||||
.color(Color::Success)
|
||||
.size(IconSize::Small),
|
||||
),
|
||||
)
|
||||
.when(external, |this| this.child(uninstall_button))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3,16 +3,42 @@ use std::sync::Arc;
|
||||
use anyhow::Result;
|
||||
use collections::HashSet;
|
||||
use fs::Fs;
|
||||
use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Render, Task};
|
||||
use gpui::{
|
||||
DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Render, ScrollHandle, Task,
|
||||
};
|
||||
use language_model::LanguageModelRegistry;
|
||||
use language_models::provider::open_ai_compatible::{AvailableModel, ModelCapabilities};
|
||||
use settings::{OpenAiCompatibleSettingsContent, update_settings_file};
|
||||
use ui::{
|
||||
Banner, Checkbox, KeyBinding, Modal, ModalFooter, ModalHeader, Section, ToggleState, prelude::*,
|
||||
Banner, Checkbox, KeyBinding, Modal, ModalFooter, ModalHeader, Section, ToggleState,
|
||||
WithScrollbar, prelude::*,
|
||||
};
|
||||
use ui_input::InputField;
|
||||
use workspace::{ModalView, Workspace};
|
||||
|
||||
fn single_line_input(
|
||||
label: impl Into<SharedString>,
|
||||
placeholder: impl Into<SharedString>,
|
||||
text: Option<&str>,
|
||||
tab_index: isize,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Entity<InputField> {
|
||||
cx.new(|cx| {
|
||||
let input = InputField::new(window, cx, placeholder)
|
||||
.label(label)
|
||||
.tab_index(tab_index)
|
||||
.tab_stop(true);
|
||||
|
||||
if let Some(text) = text {
|
||||
input
|
||||
.editor()
|
||||
.update(cx, |editor, cx| editor.set_text(text, window, cx));
|
||||
}
|
||||
input
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub enum LlmCompatibleProvider {
|
||||
OpenAi,
|
||||
@@ -41,12 +67,14 @@ struct AddLlmProviderInput {
|
||||
|
||||
impl AddLlmProviderInput {
|
||||
fn new(provider: LlmCompatibleProvider, window: &mut Window, cx: &mut App) -> Self {
|
||||
let provider_name = single_line_input("Provider Name", provider.name(), None, window, cx);
|
||||
let api_url = single_line_input("API URL", provider.api_url(), None, window, cx);
|
||||
let provider_name =
|
||||
single_line_input("Provider Name", provider.name(), None, 1, window, cx);
|
||||
let api_url = single_line_input("API URL", provider.api_url(), None, 2, window, cx);
|
||||
let api_key = single_line_input(
|
||||
"API Key",
|
||||
"000000000000000000000000000000000000000000000000",
|
||||
None,
|
||||
3,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
@@ -55,12 +83,13 @@ impl AddLlmProviderInput {
|
||||
provider_name,
|
||||
api_url,
|
||||
api_key,
|
||||
models: vec![ModelInput::new(window, cx)],
|
||||
models: vec![ModelInput::new(0, window, cx)],
|
||||
}
|
||||
}
|
||||
|
||||
fn add_model(&mut self, window: &mut Window, cx: &mut App) {
|
||||
self.models.push(ModelInput::new(window, cx));
|
||||
let model_index = self.models.len();
|
||||
self.models.push(ModelInput::new(model_index, window, cx));
|
||||
}
|
||||
|
||||
fn remove_model(&mut self, index: usize) {
|
||||
@@ -84,11 +113,14 @@ struct ModelInput {
|
||||
}
|
||||
|
||||
impl ModelInput {
|
||||
fn new(window: &mut Window, cx: &mut App) -> Self {
|
||||
fn new(model_index: usize, window: &mut Window, cx: &mut App) -> Self {
|
||||
let base_tab_index = (3 + (model_index * 4)) as isize;
|
||||
|
||||
let model_name = single_line_input(
|
||||
"Model Name",
|
||||
"e.g. gpt-4o, claude-opus-4, gemini-2.5-pro",
|
||||
None,
|
||||
base_tab_index + 1,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
@@ -96,6 +128,7 @@ impl ModelInput {
|
||||
"Max Completion Tokens",
|
||||
"200000",
|
||||
Some("200000"),
|
||||
base_tab_index + 2,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
@@ -103,16 +136,26 @@ impl ModelInput {
|
||||
"Max Output Tokens",
|
||||
"Max Output Tokens",
|
||||
Some("32000"),
|
||||
base_tab_index + 3,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
let max_tokens = single_line_input("Max Tokens", "Max Tokens", Some("200000"), window, cx);
|
||||
let max_tokens = single_line_input(
|
||||
"Max Tokens",
|
||||
"Max Tokens",
|
||||
Some("200000"),
|
||||
base_tab_index + 4,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
|
||||
let ModelCapabilities {
|
||||
tools,
|
||||
images,
|
||||
parallel_tool_calls,
|
||||
prompt_cache_key,
|
||||
} = ModelCapabilities::default();
|
||||
|
||||
Self {
|
||||
name: model_name,
|
||||
max_completion_tokens,
|
||||
@@ -165,24 +208,6 @@ impl ModelInput {
|
||||
}
|
||||
}
|
||||
|
||||
fn single_line_input(
|
||||
label: impl Into<SharedString>,
|
||||
placeholder: impl Into<SharedString>,
|
||||
text: Option<&str>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Entity<InputField> {
|
||||
cx.new(|cx| {
|
||||
let input = InputField::new(window, cx, placeholder).label(label);
|
||||
if let Some(text) = text {
|
||||
input
|
||||
.editor()
|
||||
.update(cx, |editor, cx| editor.set_text(text, window, cx));
|
||||
}
|
||||
input
|
||||
})
|
||||
}
|
||||
|
||||
fn save_provider_to_settings(
|
||||
input: &AddLlmProviderInput,
|
||||
cx: &mut App,
|
||||
@@ -258,6 +283,7 @@ fn save_provider_to_settings(
|
||||
pub struct AddLlmProviderModal {
|
||||
provider: LlmCompatibleProvider,
|
||||
input: AddLlmProviderInput,
|
||||
scroll_handle: ScrollHandle,
|
||||
focus_handle: FocusHandle,
|
||||
last_error: Option<SharedString>,
|
||||
}
|
||||
@@ -278,6 +304,7 @@ impl AddLlmProviderModal {
|
||||
provider,
|
||||
last_error: None,
|
||||
focus_handle: cx.focus_handle(),
|
||||
scroll_handle: ScrollHandle::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -418,6 +445,19 @@ impl AddLlmProviderModal {
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn on_tab(&mut self, _: &menu::SelectNext, window: &mut Window, _: &mut Context<Self>) {
|
||||
window.focus_next();
|
||||
}
|
||||
|
||||
fn on_tab_prev(
|
||||
&mut self,
|
||||
_: &menu::SelectPrevious,
|
||||
window: &mut Window,
|
||||
_: &mut Context<Self>,
|
||||
) {
|
||||
window.focus_prev();
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<DismissEvent> for AddLlmProviderModal {}
|
||||
@@ -431,15 +471,27 @@ impl Focusable for AddLlmProviderModal {
|
||||
impl ModalView for AddLlmProviderModal {}
|
||||
|
||||
impl Render for AddLlmProviderModal {
|
||||
fn render(&mut self, _window: &mut ui::Window, cx: &mut ui::Context<Self>) -> impl IntoElement {
|
||||
fn render(&mut self, window: &mut ui::Window, cx: &mut ui::Context<Self>) -> impl IntoElement {
|
||||
let focus_handle = self.focus_handle(cx);
|
||||
|
||||
div()
|
||||
let window_size = window.viewport_size();
|
||||
let rem_size = window.rem_size();
|
||||
let is_large_window = window_size.height / rem_size > rems_from_px(600.).0;
|
||||
|
||||
let modal_max_height = if is_large_window {
|
||||
rems_from_px(450.)
|
||||
} else {
|
||||
rems_from_px(200.)
|
||||
};
|
||||
|
||||
v_flex()
|
||||
.id("add-llm-provider-modal")
|
||||
.key_context("AddLlmProviderModal")
|
||||
.w(rems(34.))
|
||||
.elevation_3(cx)
|
||||
.on_action(cx.listener(Self::cancel))
|
||||
.on_action(cx.listener(Self::on_tab))
|
||||
.on_action(cx.listener(Self::on_tab_prev))
|
||||
.capture_any_mouse_down(cx.listener(|this, _, window, cx| {
|
||||
this.focus_handle(cx).focus(window);
|
||||
}))
|
||||
@@ -462,17 +514,25 @@ impl Render for AddLlmProviderModal {
|
||||
)
|
||||
})
|
||||
.child(
|
||||
v_flex()
|
||||
.id("modal_content")
|
||||
div()
|
||||
.size_full()
|
||||
.max_h_128()
|
||||
.overflow_y_scroll()
|
||||
.px(DynamicSpacing::Base12.rems(cx))
|
||||
.gap(DynamicSpacing::Base04.rems(cx))
|
||||
.child(self.input.provider_name.clone())
|
||||
.child(self.input.api_url.clone())
|
||||
.child(self.input.api_key.clone())
|
||||
.child(self.render_model_section(cx)),
|
||||
.vertical_scrollbar_for(self.scroll_handle.clone(), window, cx)
|
||||
.child(
|
||||
v_flex()
|
||||
.id("modal_content")
|
||||
.size_full()
|
||||
.tab_group()
|
||||
.max_h(modal_max_height)
|
||||
.pl_3()
|
||||
.pr_4()
|
||||
.gap_2()
|
||||
.overflow_y_scroll()
|
||||
.track_scroll(&self.scroll_handle)
|
||||
.child(self.input.provider_name.clone())
|
||||
.child(self.input.api_url.clone())
|
||||
.child(self.input.api_key.clone())
|
||||
.child(self.render_model_section(cx)),
|
||||
),
|
||||
)
|
||||
.footer(
|
||||
ModalFooter::new().end_slot(
|
||||
@@ -642,7 +702,7 @@ mod tests {
|
||||
let cx = setup_test(cx).await;
|
||||
|
||||
cx.update(|window, cx| {
|
||||
let model_input = ModelInput::new(window, cx);
|
||||
let model_input = ModelInput::new(0, window, cx);
|
||||
model_input.name.update(cx, |input, cx| {
|
||||
input.editor().update(cx, |editor, cx| {
|
||||
editor.set_text("somemodel", window, cx);
|
||||
@@ -678,7 +738,7 @@ mod tests {
|
||||
let cx = setup_test(cx).await;
|
||||
|
||||
cx.update(|window, cx| {
|
||||
let mut model_input = ModelInput::new(window, cx);
|
||||
let mut model_input = ModelInput::new(0, window, cx);
|
||||
model_input.name.update(cx, |input, cx| {
|
||||
input.editor().update(cx, |editor, cx| {
|
||||
editor.set_text("somemodel", window, cx);
|
||||
@@ -703,7 +763,7 @@ mod tests {
|
||||
let cx = setup_test(cx).await;
|
||||
|
||||
cx.update(|window, cx| {
|
||||
let mut model_input = ModelInput::new(window, cx);
|
||||
let mut model_input = ModelInput::new(0, window, cx);
|
||||
model_input.name.update(cx, |input, cx| {
|
||||
input.editor().update(cx, |editor, cx| {
|
||||
editor.set_text("somemodel", window, cx);
|
||||
@@ -767,7 +827,7 @@ mod tests {
|
||||
models.iter().enumerate()
|
||||
{
|
||||
if i >= input.models.len() {
|
||||
input.models.push(ModelInput::new(window, cx));
|
||||
input.models.push(ModelInput::new(i, window, cx));
|
||||
}
|
||||
let model = &mut input.models[i];
|
||||
set_text(&model.name, name, window, cx);
|
||||
|
||||
@@ -4,11 +4,12 @@ use std::{
|
||||
};
|
||||
|
||||
use anyhow::{Context as _, Result};
|
||||
use collections::HashMap;
|
||||
use context_server::{ContextServerCommand, ContextServerId};
|
||||
use editor::{Editor, EditorElement, EditorStyle};
|
||||
use gpui::{
|
||||
AsyncWindowContext, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Task,
|
||||
TextStyle, TextStyleRefinement, UnderlineStyle, WeakEntity, prelude::*,
|
||||
AsyncWindowContext, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, ScrollHandle,
|
||||
Task, TextStyle, TextStyleRefinement, UnderlineStyle, WeakEntity, prelude::*,
|
||||
};
|
||||
use language::{Language, LanguageRegistry};
|
||||
use markdown::{Markdown, MarkdownElement, MarkdownStyle};
|
||||
@@ -20,10 +21,12 @@ use project::{
|
||||
project_settings::{ContextServerSettings, ProjectSettings},
|
||||
worktree_store::WorktreeStore,
|
||||
};
|
||||
use serde::Deserialize;
|
||||
use settings::{Settings as _, update_settings_file};
|
||||
use theme::ThemeSettings;
|
||||
use ui::{
|
||||
CommonAnimationExt, KeyBinding, Modal, ModalFooter, ModalHeader, Section, Tooltip, prelude::*,
|
||||
CommonAnimationExt, KeyBinding, Modal, ModalFooter, ModalHeader, Section, Tooltip,
|
||||
WithScrollbar, prelude::*,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
use workspace::{ModalView, Workspace};
|
||||
@@ -36,6 +39,11 @@ enum ConfigurationTarget {
|
||||
id: ContextServerId,
|
||||
command: ContextServerCommand,
|
||||
},
|
||||
ExistingHttp {
|
||||
id: ContextServerId,
|
||||
url: String,
|
||||
headers: HashMap<String, String>,
|
||||
},
|
||||
Extension {
|
||||
id: ContextServerId,
|
||||
repository_url: Option<SharedString>,
|
||||
@@ -46,9 +54,11 @@ enum ConfigurationTarget {
|
||||
enum ConfigurationSource {
|
||||
New {
|
||||
editor: Entity<Editor>,
|
||||
is_http: bool,
|
||||
},
|
||||
Existing {
|
||||
editor: Entity<Editor>,
|
||||
is_http: bool,
|
||||
},
|
||||
Extension {
|
||||
id: ContextServerId,
|
||||
@@ -96,6 +106,7 @@ impl ConfigurationSource {
|
||||
match target {
|
||||
ConfigurationTarget::New => ConfigurationSource::New {
|
||||
editor: create_editor(context_server_input(None), jsonc_language, window, cx),
|
||||
is_http: false,
|
||||
},
|
||||
ConfigurationTarget::Existing { id, command } => ConfigurationSource::Existing {
|
||||
editor: create_editor(
|
||||
@@ -104,6 +115,20 @@ impl ConfigurationSource {
|
||||
window,
|
||||
cx,
|
||||
),
|
||||
is_http: false,
|
||||
},
|
||||
ConfigurationTarget::ExistingHttp {
|
||||
id,
|
||||
url,
|
||||
headers: auth,
|
||||
} => ConfigurationSource::Existing {
|
||||
editor: create_editor(
|
||||
context_server_http_input(Some((id, url, auth))),
|
||||
jsonc_language,
|
||||
window,
|
||||
cx,
|
||||
),
|
||||
is_http: true,
|
||||
},
|
||||
ConfigurationTarget::Extension {
|
||||
id,
|
||||
@@ -140,16 +165,30 @@ impl ConfigurationSource {
|
||||
|
||||
fn output(&self, cx: &mut App) -> Result<(ContextServerId, ContextServerSettings)> {
|
||||
match self {
|
||||
ConfigurationSource::New { editor } | ConfigurationSource::Existing { editor } => {
|
||||
parse_input(&editor.read(cx).text(cx)).map(|(id, command)| {
|
||||
(
|
||||
id,
|
||||
ContextServerSettings::Custom {
|
||||
enabled: true,
|
||||
command,
|
||||
},
|
||||
)
|
||||
})
|
||||
ConfigurationSource::New { editor, is_http }
|
||||
| ConfigurationSource::Existing { editor, is_http } => {
|
||||
if *is_http {
|
||||
parse_http_input(&editor.read(cx).text(cx)).map(|(id, url, auth)| {
|
||||
(
|
||||
id,
|
||||
ContextServerSettings::Http {
|
||||
enabled: true,
|
||||
url,
|
||||
headers: auth,
|
||||
},
|
||||
)
|
||||
})
|
||||
} else {
|
||||
parse_input(&editor.read(cx).text(cx)).map(|(id, command)| {
|
||||
(
|
||||
id,
|
||||
ContextServerSettings::Custom {
|
||||
enabled: true,
|
||||
command,
|
||||
},
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
ConfigurationSource::Extension {
|
||||
id,
|
||||
@@ -211,6 +250,66 @@ fn context_server_input(existing: Option<(ContextServerId, ContextServerCommand)
|
||||
)
|
||||
}
|
||||
|
||||
fn context_server_http_input(
|
||||
existing: Option<(ContextServerId, String, HashMap<String, String>)>,
|
||||
) -> String {
|
||||
let (name, url, headers) = match existing {
|
||||
Some((id, url, headers)) => {
|
||||
let header = if headers.is_empty() {
|
||||
r#"// "Authorization": "Bearer <token>"#.to_string()
|
||||
} else {
|
||||
let json = serde_json::to_string_pretty(&headers).unwrap();
|
||||
let mut lines = json.split("\n").collect::<Vec<_>>();
|
||||
if lines.len() > 1 {
|
||||
lines.remove(0);
|
||||
lines.pop();
|
||||
}
|
||||
lines
|
||||
.into_iter()
|
||||
.map(|line| format!(" {}", line))
|
||||
.collect::<String>()
|
||||
};
|
||||
(id.0.to_string(), url, header)
|
||||
}
|
||||
None => (
|
||||
"some-remote-server".to_string(),
|
||||
"https://example.com/mcp".to_string(),
|
||||
r#"// "Authorization": "Bearer <token>"#.to_string(),
|
||||
),
|
||||
};
|
||||
|
||||
format!(
|
||||
r#"{{
|
||||
/// The name of your remote MCP server
|
||||
"{name}": {{
|
||||
/// The URL of the remote MCP server
|
||||
"url": "{url}",
|
||||
"headers": {{
|
||||
/// Any headers to send along
|
||||
{headers}
|
||||
}}
|
||||
}}
|
||||
}}"#
|
||||
)
|
||||
}
|
||||
|
||||
fn parse_http_input(text: &str) -> Result<(ContextServerId, String, HashMap<String, String>)> {
|
||||
#[derive(Deserialize)]
|
||||
struct Temp {
|
||||
url: String,
|
||||
#[serde(default)]
|
||||
headers: HashMap<String, String>,
|
||||
}
|
||||
let value: HashMap<String, Temp> = serde_json_lenient::from_str(text)?;
|
||||
if value.len() != 1 {
|
||||
anyhow::bail!("Expected exactly one context server configuration");
|
||||
}
|
||||
|
||||
let (key, value) = value.into_iter().next().unwrap();
|
||||
|
||||
Ok((ContextServerId(key.into()), value.url, value.headers))
|
||||
}
|
||||
|
||||
fn resolve_context_server_extension(
|
||||
id: ContextServerId,
|
||||
worktree_store: Entity<WorktreeStore>,
|
||||
@@ -252,6 +351,7 @@ pub struct ConfigureContextServerModal {
|
||||
source: ConfigurationSource,
|
||||
state: State,
|
||||
original_server_id: Option<ContextServerId>,
|
||||
scroll_handle: ScrollHandle,
|
||||
}
|
||||
|
||||
impl ConfigureContextServerModal {
|
||||
@@ -310,6 +410,15 @@ impl ConfigureContextServerModal {
|
||||
id: server_id,
|
||||
command,
|
||||
}),
|
||||
ContextServerSettings::Http {
|
||||
enabled: _,
|
||||
url,
|
||||
headers,
|
||||
} => Some(ConfigurationTarget::ExistingHttp {
|
||||
id: server_id,
|
||||
url,
|
||||
headers,
|
||||
}),
|
||||
ContextServerSettings::Extension { .. } => {
|
||||
match workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
@@ -351,6 +460,7 @@ impl ConfigureContextServerModal {
|
||||
state: State::Idle,
|
||||
original_server_id: match &target {
|
||||
ConfigurationTarget::Existing { id, .. } => Some(id.clone()),
|
||||
ConfigurationTarget::ExistingHttp { id, .. } => Some(id.clone()),
|
||||
ConfigurationTarget::Extension { id, .. } => Some(id.clone()),
|
||||
ConfigurationTarget::New => None,
|
||||
},
|
||||
@@ -361,6 +471,7 @@ impl ConfigureContextServerModal {
|
||||
window,
|
||||
cx,
|
||||
),
|
||||
scroll_handle: ScrollHandle::new(),
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -478,7 +589,7 @@ impl ModalView for ConfigureContextServerModal {}
|
||||
impl Focusable for ConfigureContextServerModal {
|
||||
fn focus_handle(&self, cx: &App) -> FocusHandle {
|
||||
match &self.source {
|
||||
ConfigurationSource::New { editor } => editor.focus_handle(cx),
|
||||
ConfigurationSource::New { editor, .. } => editor.focus_handle(cx),
|
||||
ConfigurationSource::Existing { editor, .. } => editor.focus_handle(cx),
|
||||
ConfigurationSource::Extension { editor, .. } => editor
|
||||
.as_ref()
|
||||
@@ -524,9 +635,10 @@ impl ConfigureContextServerModal {
|
||||
}
|
||||
|
||||
fn render_modal_content(&self, cx: &App) -> AnyElement {
|
||||
// All variants now use single editor approach
|
||||
let editor = match &self.source {
|
||||
ConfigurationSource::New { editor } => editor,
|
||||
ConfigurationSource::Existing { editor } => editor,
|
||||
ConfigurationSource::New { editor, .. } => editor,
|
||||
ConfigurationSource::Existing { editor, .. } => editor,
|
||||
ConfigurationSource::Extension { editor, .. } => {
|
||||
let Some(editor) = editor else {
|
||||
return div().into_any_element();
|
||||
@@ -598,6 +710,36 @@ impl ConfigureContextServerModal {
|
||||
move |_, _, cx| cx.open_url(&repository_url)
|
||||
}),
|
||||
)
|
||||
} else if let ConfigurationSource::New { is_http, .. } = &self.source {
|
||||
let label = if *is_http {
|
||||
"Run command"
|
||||
} else {
|
||||
"Connect via HTTP"
|
||||
};
|
||||
let tooltip = if *is_http {
|
||||
"Configure an MCP serevr that runs on stdin/stdout."
|
||||
} else {
|
||||
"Configure an MCP server that you connect to over HTTP"
|
||||
};
|
||||
|
||||
Some(
|
||||
Button::new("toggle-kind", label)
|
||||
.tooltip(Tooltip::text(tooltip))
|
||||
.on_click(cx.listener(|this, _, window, cx| match &mut this.source {
|
||||
ConfigurationSource::New { editor, is_http } => {
|
||||
*is_http = !*is_http;
|
||||
let new_text = if *is_http {
|
||||
context_server_http_input(None)
|
||||
} else {
|
||||
context_server_input(None)
|
||||
};
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.set_text(new_text, window, cx);
|
||||
})
|
||||
}
|
||||
_ => {}
|
||||
})),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
@@ -680,6 +822,7 @@ impl ConfigureContextServerModal {
|
||||
|
||||
impl Render for ConfigureContextServerModal {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let scroll_handle = self.scroll_handle.clone();
|
||||
div()
|
||||
.elevation_3(cx)
|
||||
.w(rems(34.))
|
||||
@@ -699,14 +842,29 @@ impl Render for ConfigureContextServerModal {
|
||||
Modal::new("configure-context-server", None)
|
||||
.header(self.render_modal_header())
|
||||
.section(
|
||||
Section::new()
|
||||
.child(self.render_modal_description(window, cx))
|
||||
.child(self.render_modal_content(cx))
|
||||
.child(match &self.state {
|
||||
State::Idle => div(),
|
||||
State::Waiting => Self::render_waiting_for_context_server(),
|
||||
State::Error(error) => Self::render_modal_error(error.clone()),
|
||||
}),
|
||||
Section::new().child(
|
||||
div()
|
||||
.size_full()
|
||||
.child(
|
||||
div()
|
||||
.id("modal-content")
|
||||
.max_h(vh(0.7, window))
|
||||
.overflow_y_scroll()
|
||||
.track_scroll(&scroll_handle)
|
||||
.child(self.render_modal_description(window, cx))
|
||||
.child(self.render_modal_content(cx))
|
||||
.child(match &self.state {
|
||||
State::Idle => div(),
|
||||
State::Waiting => {
|
||||
Self::render_waiting_for_context_server()
|
||||
}
|
||||
State::Error(error) => {
|
||||
Self::render_modal_error(error.clone())
|
||||
}
|
||||
}),
|
||||
)
|
||||
.vertical_scrollbar_for(scroll_handle, window, cx),
|
||||
),
|
||||
)
|
||||
.footer(self.render_modal_footer(cx)),
|
||||
)
|
||||
|
||||
@@ -7,8 +7,10 @@ use agent_settings::{AgentProfile, AgentProfileId, AgentSettings, builtin_profil
|
||||
use editor::Editor;
|
||||
use fs::Fs;
|
||||
use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription, prelude::*};
|
||||
use language_model::LanguageModel;
|
||||
use settings::Settings as _;
|
||||
use language_model::{LanguageModel, LanguageModelRegistry};
|
||||
use settings::{
|
||||
LanguageModelProviderSetting, LanguageModelSelection, Settings as _, update_settings_file,
|
||||
};
|
||||
use ui::{
|
||||
KeyBinding, ListItem, ListItemSpacing, ListSeparator, Navigable, NavigableEntry, prelude::*,
|
||||
};
|
||||
@@ -16,6 +18,7 @@ use workspace::{ModalView, Workspace};
|
||||
|
||||
use crate::agent_configuration::manage_profiles_modal::profile_modal_header::ProfileModalHeader;
|
||||
use crate::agent_configuration::tool_picker::{ToolPicker, ToolPickerDelegate};
|
||||
use crate::language_model_selector::{LanguageModelSelector, language_model_selector};
|
||||
use crate::{AgentPanel, ManageProfiles};
|
||||
|
||||
enum Mode {
|
||||
@@ -32,6 +35,11 @@ enum Mode {
|
||||
tool_picker: Entity<ToolPicker>,
|
||||
_subscription: Subscription,
|
||||
},
|
||||
ConfigureDefaultModel {
|
||||
profile_id: AgentProfileId,
|
||||
model_picker: Entity<LanguageModelSelector>,
|
||||
_subscription: Subscription,
|
||||
},
|
||||
}
|
||||
|
||||
impl Mode {
|
||||
@@ -83,6 +91,7 @@ pub struct ChooseProfileMode {
|
||||
pub struct ViewProfileMode {
|
||||
profile_id: AgentProfileId,
|
||||
fork_profile: NavigableEntry,
|
||||
configure_default_model: NavigableEntry,
|
||||
configure_tools: NavigableEntry,
|
||||
configure_mcps: NavigableEntry,
|
||||
cancel_item: NavigableEntry,
|
||||
@@ -180,6 +189,7 @@ impl ManageProfilesModal {
|
||||
self.mode = Mode::ViewProfile(ViewProfileMode {
|
||||
profile_id,
|
||||
fork_profile: NavigableEntry::focusable(cx),
|
||||
configure_default_model: NavigableEntry::focusable(cx),
|
||||
configure_tools: NavigableEntry::focusable(cx),
|
||||
configure_mcps: NavigableEntry::focusable(cx),
|
||||
cancel_item: NavigableEntry::focusable(cx),
|
||||
@@ -187,6 +197,83 @@ impl ManageProfilesModal {
|
||||
self.focus_handle(cx).focus(window);
|
||||
}
|
||||
|
||||
fn configure_default_model(
|
||||
&mut self,
|
||||
profile_id: AgentProfileId,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let fs = self.fs.clone();
|
||||
let profile_id_for_closure = profile_id.clone();
|
||||
|
||||
let model_picker = cx.new(|cx| {
|
||||
let fs = fs.clone();
|
||||
let profile_id = profile_id_for_closure.clone();
|
||||
|
||||
language_model_selector(
|
||||
{
|
||||
let profile_id = profile_id.clone();
|
||||
move |cx| {
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
|
||||
settings
|
||||
.profiles
|
||||
.get(&profile_id)
|
||||
.and_then(|profile| profile.default_model.as_ref())
|
||||
.and_then(|selection| {
|
||||
let registry = LanguageModelRegistry::read_global(cx);
|
||||
let provider_id = language_model::LanguageModelProviderId(
|
||||
gpui::SharedString::from(selection.provider.0.clone()),
|
||||
);
|
||||
let provider = registry.provider(&provider_id)?;
|
||||
let model = provider
|
||||
.provided_models(cx)
|
||||
.iter()
|
||||
.find(|m| m.id().0 == selection.model.as_str())?
|
||||
.clone();
|
||||
Some(language_model::ConfiguredModel { provider, model })
|
||||
})
|
||||
}
|
||||
},
|
||||
move |model, cx| {
|
||||
let provider = model.provider_id().0.to_string();
|
||||
let model_id = model.id().0.to_string();
|
||||
let profile_id = profile_id.clone();
|
||||
|
||||
update_settings_file(fs.clone(), cx, move |settings, _cx| {
|
||||
let agent_settings = settings.agent.get_or_insert_default();
|
||||
if let Some(profiles) = agent_settings.profiles.as_mut() {
|
||||
if let Some(profile) = profiles.get_mut(profile_id.0.as_ref()) {
|
||||
profile.default_model = Some(LanguageModelSelection {
|
||||
provider: LanguageModelProviderSetting(provider.clone()),
|
||||
model: model_id.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
false, // Do not use popover styles for the model picker
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.modal(false)
|
||||
});
|
||||
|
||||
let dismiss_subscription = cx.subscribe_in(&model_picker, window, {
|
||||
let profile_id = profile_id.clone();
|
||||
move |this, _picker, _: &DismissEvent, window, cx| {
|
||||
this.view_profile(profile_id.clone(), window, cx);
|
||||
}
|
||||
});
|
||||
|
||||
self.mode = Mode::ConfigureDefaultModel {
|
||||
profile_id,
|
||||
model_picker,
|
||||
_subscription: dismiss_subscription,
|
||||
};
|
||||
self.focus_handle(cx).focus(window);
|
||||
}
|
||||
|
||||
fn configure_mcp_tools(
|
||||
&mut self,
|
||||
profile_id: AgentProfileId,
|
||||
@@ -277,6 +364,7 @@ impl ManageProfilesModal {
|
||||
Mode::ViewProfile(_) => {}
|
||||
Mode::ConfigureTools { .. } => {}
|
||||
Mode::ConfigureMcps { .. } => {}
|
||||
Mode::ConfigureDefaultModel { .. } => {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -299,6 +387,9 @@ impl ManageProfilesModal {
|
||||
Mode::ConfigureMcps { profile_id, .. } => {
|
||||
self.view_profile(profile_id.clone(), window, cx)
|
||||
}
|
||||
Mode::ConfigureDefaultModel { profile_id, .. } => {
|
||||
self.view_profile(profile_id.clone(), window, cx)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -313,6 +404,7 @@ impl Focusable for ManageProfilesModal {
|
||||
Mode::ViewProfile(_) => self.focus_handle.clone(),
|
||||
Mode::ConfigureTools { tool_picker, .. } => tool_picker.focus_handle(cx),
|
||||
Mode::ConfigureMcps { tool_picker, .. } => tool_picker.focus_handle(cx),
|
||||
Mode::ConfigureDefaultModel { model_picker, .. } => model_picker.focus_handle(cx),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -544,6 +636,47 @@ impl ManageProfilesModal {
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.id("configure-default-model")
|
||||
.track_focus(&mode.configure_default_model.focus_handle)
|
||||
.on_action({
|
||||
let profile_id = mode.profile_id.clone();
|
||||
cx.listener(move |this, _: &menu::Confirm, window, cx| {
|
||||
this.configure_default_model(
|
||||
profile_id.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
})
|
||||
.child(
|
||||
ListItem::new("model-item")
|
||||
.toggle_state(
|
||||
mode.configure_default_model
|
||||
.focus_handle
|
||||
.contains_focused(window, cx),
|
||||
)
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.start_slot(
|
||||
Icon::new(IconName::ZedAssistant)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(Label::new("Configure Default Model"))
|
||||
.on_click({
|
||||
let profile_id = mode.profile_id.clone();
|
||||
cx.listener(move |this, _, window, cx| {
|
||||
this.configure_default_model(
|
||||
profile_id.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.id("configure-builtin-tools")
|
||||
@@ -668,6 +801,7 @@ impl ManageProfilesModal {
|
||||
.into_any_element(),
|
||||
)
|
||||
.entry(mode.fork_profile)
|
||||
.entry(mode.configure_default_model)
|
||||
.entry(mode.configure_tools)
|
||||
.entry(mode.configure_mcps)
|
||||
.entry(mode.cancel_item)
|
||||
@@ -753,6 +887,29 @@ impl Render for ManageProfilesModal {
|
||||
.child(go_back_item)
|
||||
.into_any_element()
|
||||
}
|
||||
Mode::ConfigureDefaultModel {
|
||||
profile_id,
|
||||
model_picker,
|
||||
..
|
||||
} => {
|
||||
let profile_name = settings
|
||||
.profiles
|
||||
.get(profile_id)
|
||||
.map(|profile| profile.name.clone())
|
||||
.unwrap_or_else(|| "Unknown".into());
|
||||
|
||||
v_flex()
|
||||
.pb_1()
|
||||
.child(ProfileModalHeader::new(
|
||||
format!("{profile_name} — Configure Default Model"),
|
||||
Some(IconName::Ai),
|
||||
))
|
||||
.child(ListSeparator)
|
||||
.child(v_flex().w(rems(34.)).child(model_picker.clone()))
|
||||
.child(ListSeparator)
|
||||
.child(go_back_item)
|
||||
.into_any_element()
|
||||
}
|
||||
Mode::ConfigureMcps {
|
||||
profile_id,
|
||||
tool_picker,
|
||||
|
||||
@@ -314,6 +314,7 @@ impl PickerDelegate for ToolPickerDelegate {
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
default_model: default_profile.default_model.clone(),
|
||||
});
|
||||
|
||||
if let Some(server_id) = server_id {
|
||||
|
||||
@@ -47,6 +47,7 @@ impl AgentModelSelector {
|
||||
}
|
||||
}
|
||||
},
|
||||
true, // Use popover styles for picker
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
|
||||
@@ -19,7 +19,6 @@ use settings::{
|
||||
|
||||
use zed_actions::agent::{OpenClaudeCodeOnboardingModal, ReauthenticateAgent};
|
||||
|
||||
use crate::ui::{AcpOnboardingModal, ClaudeCodeOnboardingModal};
|
||||
use crate::{
|
||||
AddContextServer, AgentDiffPane, DeleteRecentlyOpenThread, Follow, InlineAssistant,
|
||||
NewTextThread, NewThread, OpenActiveThreadAsMarkdown, OpenAgentDiff, OpenHistory,
|
||||
@@ -39,6 +38,10 @@ use crate::{
|
||||
ExternalAgent, NewExternalAgentThread, NewNativeAgentThreadFromSummary, placeholder_command,
|
||||
};
|
||||
use crate::{ManageProfiles, context_store::ContextStore};
|
||||
use crate::{
|
||||
inline_assistant::ContextProviders,
|
||||
ui::{AcpOnboardingModal, ClaudeCodeOnboardingModal},
|
||||
};
|
||||
use agent_settings::AgentSettings;
|
||||
use ai_onboarding::AgentPanelOnboarding;
|
||||
use anyhow::{Result, anyhow};
|
||||
@@ -455,6 +458,10 @@ pub struct AgentPanel {
|
||||
}
|
||||
|
||||
impl AgentPanel {
|
||||
pub(crate) fn workspace(&self) -> WeakEntity<Workspace> {
|
||||
self.workspace.clone()
|
||||
}
|
||||
|
||||
fn serialize(&mut self, cx: &mut Context<Self>) {
|
||||
let width = self.width;
|
||||
let selected_agent = self.selected_agent.clone();
|
||||
@@ -1892,6 +1899,9 @@ impl AgentPanel {
|
||||
.anchor(Corner::TopRight)
|
||||
.with_handle(self.new_thread_menu_handle.clone())
|
||||
.menu({
|
||||
let selected_agent = self.selected_agent.clone();
|
||||
let is_agent_selected = move |agent_type: AgentType| selected_agent == agent_type;
|
||||
|
||||
let workspace = self.workspace.clone();
|
||||
let is_via_collab = workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
@@ -1905,7 +1915,6 @@ impl AgentPanel {
|
||||
let active_thread = active_thread.clone();
|
||||
Some(ContextMenu::build(window, cx, |menu, _window, cx| {
|
||||
menu.context(focus_handle.clone())
|
||||
.header("Zed Agent")
|
||||
.when_some(active_thread, |this, active_thread| {
|
||||
let thread = active_thread.read(cx);
|
||||
|
||||
@@ -1929,9 +1938,11 @@ impl AgentPanel {
|
||||
}
|
||||
})
|
||||
.item(
|
||||
ContextMenuEntry::new("New Thread")
|
||||
.action(NewThread.boxed_clone())
|
||||
.icon(IconName::Thread)
|
||||
ContextMenuEntry::new("Zed Agent")
|
||||
.when(is_agent_selected(AgentType::NativeAgent) | is_agent_selected(AgentType::TextThread) , |this| {
|
||||
this.action(Box::new(NewExternalAgentThread { agent: None }))
|
||||
})
|
||||
.icon(IconName::ZedAgent)
|
||||
.icon_color(Color::Muted)
|
||||
.handler({
|
||||
let workspace = workspace.clone();
|
||||
@@ -1955,10 +1966,10 @@ impl AgentPanel {
|
||||
}),
|
||||
)
|
||||
.item(
|
||||
ContextMenuEntry::new("New Text Thread")
|
||||
ContextMenuEntry::new("Text Thread")
|
||||
.action(NewTextThread.boxed_clone())
|
||||
.icon(IconName::TextThread)
|
||||
.icon_color(Color::Muted)
|
||||
.action(NewTextThread.boxed_clone())
|
||||
.handler({
|
||||
let workspace = workspace.clone();
|
||||
move |window, cx| {
|
||||
@@ -1983,7 +1994,10 @@ impl AgentPanel {
|
||||
.separator()
|
||||
.header("External Agents")
|
||||
.item(
|
||||
ContextMenuEntry::new("New Claude Code")
|
||||
ContextMenuEntry::new("Claude Code")
|
||||
.when(is_agent_selected(AgentType::ClaudeCode), |this| {
|
||||
this.action(Box::new(NewExternalAgentThread { agent: None }))
|
||||
})
|
||||
.icon(IconName::AiClaude)
|
||||
.disabled(is_via_collab)
|
||||
.icon_color(Color::Muted)
|
||||
@@ -2009,7 +2023,10 @@ impl AgentPanel {
|
||||
}),
|
||||
)
|
||||
.item(
|
||||
ContextMenuEntry::new("New Codex CLI")
|
||||
ContextMenuEntry::new("Codex CLI")
|
||||
.when(is_agent_selected(AgentType::Codex), |this| {
|
||||
this.action(Box::new(NewExternalAgentThread { agent: None }))
|
||||
})
|
||||
.icon(IconName::AiOpenAi)
|
||||
.disabled(is_via_collab)
|
||||
.icon_color(Color::Muted)
|
||||
@@ -2035,7 +2052,10 @@ impl AgentPanel {
|
||||
}),
|
||||
)
|
||||
.item(
|
||||
ContextMenuEntry::new("New Gemini CLI")
|
||||
ContextMenuEntry::new("Gemini CLI")
|
||||
.when(is_agent_selected(AgentType::Gemini), |this| {
|
||||
this.action(Box::new(NewExternalAgentThread { agent: None }))
|
||||
})
|
||||
.icon(IconName::AiGemini)
|
||||
.icon_color(Color::Muted)
|
||||
.disabled(is_via_collab)
|
||||
@@ -2061,8 +2081,8 @@ impl AgentPanel {
|
||||
}),
|
||||
)
|
||||
.map(|mut menu| {
|
||||
let agent_server_store_read = agent_server_store.read(cx);
|
||||
let agent_names = agent_server_store_read
|
||||
let agent_server_store = agent_server_store.read(cx);
|
||||
let agent_names = agent_server_store
|
||||
.external_agents()
|
||||
.filter(|name| {
|
||||
name.0 != GEMINI_NAME
|
||||
@@ -2071,21 +2091,38 @@ impl AgentPanel {
|
||||
})
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let custom_settings = cx
|
||||
.global::<SettingsStore>()
|
||||
.get::<AllAgentServersSettings>(None)
|
||||
.custom
|
||||
.clone();
|
||||
|
||||
for agent_name in agent_names {
|
||||
let icon_path = agent_server_store_read.agent_icon(&agent_name);
|
||||
let mut entry =
|
||||
ContextMenuEntry::new(format!("New {}", agent_name));
|
||||
let icon_path = agent_server_store.agent_icon(&agent_name);
|
||||
|
||||
let mut entry = ContextMenuEntry::new(agent_name.clone());
|
||||
|
||||
let command = custom_settings
|
||||
.get(&agent_name.0)
|
||||
.map(|settings| settings.command.clone())
|
||||
.unwrap_or(placeholder_command());
|
||||
|
||||
if let Some(icon_path) = icon_path {
|
||||
entry = entry.custom_icon_svg(icon_path);
|
||||
} else {
|
||||
entry = entry.icon(IconName::Terminal);
|
||||
}
|
||||
entry = entry
|
||||
.when(
|
||||
is_agent_selected(AgentType::Custom {
|
||||
name: agent_name.0.clone(),
|
||||
command: command.clone(),
|
||||
}),
|
||||
|this| {
|
||||
this.action(Box::new(NewExternalAgentThread { agent: None }))
|
||||
},
|
||||
)
|
||||
.icon_color(Color::Muted)
|
||||
.disabled(is_via_collab)
|
||||
.handler({
|
||||
@@ -2125,6 +2162,7 @@ impl AgentPanel {
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
menu = menu.item(entry);
|
||||
}
|
||||
|
||||
@@ -2157,7 +2195,7 @@ impl AgentPanel {
|
||||
.id("selected_agent_icon")
|
||||
.when_some(selected_agent_custom_icon, |this, icon_path| {
|
||||
let label = selected_agent_label.clone();
|
||||
this.px(DynamicSpacing::Base02.rems(cx))
|
||||
this.px_1()
|
||||
.child(Icon::from_external_svg(icon_path).color(Color::Muted))
|
||||
.tooltip(move |_window, cx| {
|
||||
Tooltip::with_meta(label.clone(), None, "Selected Agent", cx)
|
||||
@@ -2166,7 +2204,7 @@ impl AgentPanel {
|
||||
.when(!has_custom_icon, |this| {
|
||||
this.when_some(self.selected_agent.icon(), |this, icon| {
|
||||
let label = selected_agent_label.clone();
|
||||
this.px(DynamicSpacing::Base02.rems(cx))
|
||||
this.px_1()
|
||||
.child(Icon::new(icon).color(Color::Muted))
|
||||
.tooltip(move |_window, cx| {
|
||||
Tooltip::with_meta(label.clone(), None, "Selected Agent", cx)
|
||||
@@ -2669,17 +2707,12 @@ impl rules_library::InlineAssistDelegate for PromptLibraryInlineAssist {
|
||||
else {
|
||||
return;
|
||||
};
|
||||
let prompt_store = None;
|
||||
let thread_store = None;
|
||||
let context_store = cx.new(|_| ContextStore::new(project.clone()));
|
||||
let context_providers = ContextProviders::empty(project, cx);
|
||||
assistant.assist(
|
||||
prompt_editor,
|
||||
self.workspace.clone(),
|
||||
context_store,
|
||||
project,
|
||||
prompt_store,
|
||||
thread_store,
|
||||
initial_prompt,
|
||||
context_providers,
|
||||
self.workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
|
||||
@@ -9,6 +9,8 @@ mod context_picker;
|
||||
mod context_server_configuration;
|
||||
mod context_store;
|
||||
mod context_strip;
|
||||
#[cfg(test)]
|
||||
mod evals;
|
||||
mod inline_assistant;
|
||||
mod inline_prompt_editor;
|
||||
mod language_model_selector;
|
||||
@@ -30,7 +32,10 @@ use command_palette_hooks::CommandPaletteFilter;
|
||||
use feature_flags::FeatureFlagAppExt as _;
|
||||
use fs::Fs;
|
||||
use gpui::{Action, App, Entity, SharedString, actions};
|
||||
use language::LanguageRegistry;
|
||||
use language::{
|
||||
LanguageRegistry,
|
||||
language_settings::{AllLanguageSettings, EditPredictionProvider},
|
||||
};
|
||||
use language_model::{
|
||||
ConfiguredModel, LanguageModel, LanguageModelId, LanguageModelProviderId, LanguageModelRegistry,
|
||||
};
|
||||
@@ -286,7 +291,25 @@ pub fn init(
|
||||
|
||||
fn update_command_palette_filter(cx: &mut App) {
|
||||
let disable_ai = DisableAiSettings::get_global(cx).disable_ai;
|
||||
let agent_enabled = AgentSettings::get_global(cx).enabled;
|
||||
let edit_prediction_provider = AllLanguageSettings::get_global(cx)
|
||||
.edit_predictions
|
||||
.provider;
|
||||
|
||||
CommandPaletteFilter::update_global(cx, |filter, _| {
|
||||
use editor::actions::{
|
||||
AcceptEditPrediction, AcceptPartialEditPrediction, NextEditPrediction,
|
||||
PreviousEditPrediction, ShowEditPrediction, ToggleEditPrediction,
|
||||
};
|
||||
let edit_prediction_actions = [
|
||||
TypeId::of::<AcceptEditPrediction>(),
|
||||
TypeId::of::<AcceptPartialEditPrediction>(),
|
||||
TypeId::of::<ShowEditPrediction>(),
|
||||
TypeId::of::<NextEditPrediction>(),
|
||||
TypeId::of::<PreviousEditPrediction>(),
|
||||
TypeId::of::<ToggleEditPrediction>(),
|
||||
];
|
||||
|
||||
if disable_ai {
|
||||
filter.hide_namespace("agent");
|
||||
filter.hide_namespace("assistant");
|
||||
@@ -295,42 +318,47 @@ fn update_command_palette_filter(cx: &mut App) {
|
||||
filter.hide_namespace("zed_predict_onboarding");
|
||||
filter.hide_namespace("edit_prediction");
|
||||
|
||||
use editor::actions::{
|
||||
AcceptEditPrediction, AcceptPartialEditPrediction, NextEditPrediction,
|
||||
PreviousEditPrediction, ShowEditPrediction, ToggleEditPrediction,
|
||||
};
|
||||
let edit_prediction_actions = [
|
||||
TypeId::of::<AcceptEditPrediction>(),
|
||||
TypeId::of::<AcceptPartialEditPrediction>(),
|
||||
TypeId::of::<ShowEditPrediction>(),
|
||||
TypeId::of::<NextEditPrediction>(),
|
||||
TypeId::of::<PreviousEditPrediction>(),
|
||||
TypeId::of::<ToggleEditPrediction>(),
|
||||
];
|
||||
filter.hide_action_types(&edit_prediction_actions);
|
||||
filter.hide_action_types(&[TypeId::of::<zed_actions::OpenZedPredictOnboarding>()]);
|
||||
} else {
|
||||
filter.show_namespace("agent");
|
||||
if agent_enabled {
|
||||
filter.show_namespace("agent");
|
||||
} else {
|
||||
filter.hide_namespace("agent");
|
||||
}
|
||||
|
||||
filter.show_namespace("assistant");
|
||||
filter.show_namespace("copilot");
|
||||
|
||||
match edit_prediction_provider {
|
||||
EditPredictionProvider::None => {
|
||||
filter.hide_namespace("edit_prediction");
|
||||
filter.hide_namespace("copilot");
|
||||
filter.hide_namespace("supermaven");
|
||||
filter.hide_action_types(&edit_prediction_actions);
|
||||
}
|
||||
EditPredictionProvider::Copilot => {
|
||||
filter.show_namespace("edit_prediction");
|
||||
filter.show_namespace("copilot");
|
||||
filter.hide_namespace("supermaven");
|
||||
filter.show_action_types(edit_prediction_actions.iter());
|
||||
}
|
||||
EditPredictionProvider::Supermaven => {
|
||||
filter.show_namespace("edit_prediction");
|
||||
filter.hide_namespace("copilot");
|
||||
filter.show_namespace("supermaven");
|
||||
filter.show_action_types(edit_prediction_actions.iter());
|
||||
}
|
||||
EditPredictionProvider::Zed
|
||||
| EditPredictionProvider::Codestral
|
||||
| EditPredictionProvider::Experimental(_) => {
|
||||
filter.show_namespace("edit_prediction");
|
||||
filter.hide_namespace("copilot");
|
||||
filter.hide_namespace("supermaven");
|
||||
filter.show_action_types(edit_prediction_actions.iter());
|
||||
}
|
||||
}
|
||||
|
||||
filter.show_namespace("zed_predict_onboarding");
|
||||
|
||||
filter.show_namespace("edit_prediction");
|
||||
|
||||
use editor::actions::{
|
||||
AcceptEditPrediction, AcceptPartialEditPrediction, NextEditPrediction,
|
||||
PreviousEditPrediction, ShowEditPrediction, ToggleEditPrediction,
|
||||
};
|
||||
let edit_prediction_actions = [
|
||||
TypeId::of::<AcceptEditPrediction>(),
|
||||
TypeId::of::<AcceptPartialEditPrediction>(),
|
||||
TypeId::of::<ShowEditPrediction>(),
|
||||
TypeId::of::<NextEditPrediction>(),
|
||||
TypeId::of::<PreviousEditPrediction>(),
|
||||
TypeId::of::<ToggleEditPrediction>(),
|
||||
];
|
||||
filter.show_action_types(edit_prediction_actions.iter());
|
||||
|
||||
filter.show_action_types(&[TypeId::of::<zed_actions::OpenZedPredictOnboarding>()]);
|
||||
}
|
||||
});
|
||||
@@ -420,3 +448,137 @@ fn register_slash_commands(cx: &mut App) {
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use agent_settings::{AgentProfileId, AgentSettings, CompletionMode};
|
||||
use command_palette_hooks::CommandPaletteFilter;
|
||||
use editor::actions::AcceptEditPrediction;
|
||||
use gpui::{BorrowAppContext, TestAppContext, px};
|
||||
use project::DisableAiSettings;
|
||||
use settings::{
|
||||
DefaultAgentView, DockPosition, NotifyWhenAgentWaiting, Settings, SettingsStore,
|
||||
};
|
||||
|
||||
#[gpui::test]
|
||||
fn test_agent_command_palette_visibility(cx: &mut TestAppContext) {
|
||||
// Init settings
|
||||
cx.update(|cx| {
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
command_palette_hooks::init(cx);
|
||||
AgentSettings::register(cx);
|
||||
DisableAiSettings::register(cx);
|
||||
AllLanguageSettings::register(cx);
|
||||
});
|
||||
|
||||
let agent_settings = AgentSettings {
|
||||
enabled: true,
|
||||
button: true,
|
||||
dock: DockPosition::Right,
|
||||
default_width: px(300.),
|
||||
default_height: px(600.),
|
||||
default_model: None,
|
||||
inline_assistant_model: None,
|
||||
commit_message_model: None,
|
||||
thread_summary_model: None,
|
||||
inline_alternatives: vec![],
|
||||
default_profile: AgentProfileId::default(),
|
||||
default_view: DefaultAgentView::Thread,
|
||||
profiles: Default::default(),
|
||||
always_allow_tool_actions: false,
|
||||
notify_when_agent_waiting: NotifyWhenAgentWaiting::default(),
|
||||
play_sound_when_agent_done: false,
|
||||
single_file_review: false,
|
||||
model_parameters: vec![],
|
||||
preferred_completion_mode: CompletionMode::Normal,
|
||||
enable_feedback: false,
|
||||
expand_edit_card: true,
|
||||
expand_terminal_card: true,
|
||||
use_modifier_to_send: true,
|
||||
message_editor_min_lines: 1,
|
||||
};
|
||||
|
||||
cx.update(|cx| {
|
||||
AgentSettings::override_global(agent_settings.clone(), cx);
|
||||
DisableAiSettings::override_global(DisableAiSettings { disable_ai: false }, cx);
|
||||
|
||||
// Initial update
|
||||
update_command_palette_filter(cx);
|
||||
});
|
||||
|
||||
// Assert visible
|
||||
cx.update(|cx| {
|
||||
let filter = CommandPaletteFilter::try_global(cx).unwrap();
|
||||
assert!(
|
||||
!filter.is_hidden(&NewThread),
|
||||
"NewThread should be visible by default"
|
||||
);
|
||||
});
|
||||
|
||||
// Disable agent
|
||||
cx.update(|cx| {
|
||||
let mut new_settings = agent_settings.clone();
|
||||
new_settings.enabled = false;
|
||||
AgentSettings::override_global(new_settings, cx);
|
||||
|
||||
// Trigger update
|
||||
update_command_palette_filter(cx);
|
||||
});
|
||||
|
||||
// Assert hidden
|
||||
cx.update(|cx| {
|
||||
let filter = CommandPaletteFilter::try_global(cx).unwrap();
|
||||
assert!(
|
||||
filter.is_hidden(&NewThread),
|
||||
"NewThread should be hidden when agent is disabled"
|
||||
);
|
||||
});
|
||||
|
||||
// Test EditPredictionProvider
|
||||
// Enable EditPredictionProvider::Copilot
|
||||
cx.update(|cx| {
|
||||
cx.update_global::<SettingsStore, _>(|store, cx| {
|
||||
store.update_user_settings(cx, |s| {
|
||||
s.project
|
||||
.all_languages
|
||||
.features
|
||||
.get_or_insert(Default::default())
|
||||
.edit_prediction_provider = Some(EditPredictionProvider::Copilot);
|
||||
});
|
||||
});
|
||||
update_command_palette_filter(cx);
|
||||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
let filter = CommandPaletteFilter::try_global(cx).unwrap();
|
||||
assert!(
|
||||
!filter.is_hidden(&AcceptEditPrediction),
|
||||
"EditPrediction should be visible when provider is Copilot"
|
||||
);
|
||||
});
|
||||
|
||||
// Disable EditPredictionProvider (None)
|
||||
cx.update(|cx| {
|
||||
cx.update_global::<SettingsStore, _>(|store, cx| {
|
||||
store.update_user_settings(cx, |s| {
|
||||
s.project
|
||||
.all_languages
|
||||
.features
|
||||
.get_or_insert(Default::default())
|
||||
.edit_prediction_provider = Some(EditPredictionProvider::None);
|
||||
});
|
||||
});
|
||||
update_command_palette_filter(cx);
|
||||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
let filter = CommandPaletteFilter::try_global(cx).unwrap();
|
||||
assert!(
|
||||
filter.is_hidden(&AcceptEditPrediction),
|
||||
"EditPrediction should be hidden when provider is None"
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1089,7 +1089,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_large_file_uses_outline(cx: &mut TestAppContext) {
|
||||
async fn test_large_file_uses_fallback(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
|
||||
// Create a large file that exceeds AUTO_OUTLINE_SIZE
|
||||
@@ -1101,16 +1101,16 @@ mod tests {
|
||||
|
||||
let file_context = load_context_for("file.txt", large_content, cx).await;
|
||||
|
||||
// Should contain some of the actual file content
|
||||
assert!(
|
||||
file_context
|
||||
.text
|
||||
.contains(&format!("# File outline for {}", path!("test/file.txt"))),
|
||||
"Large files should not get an outline"
|
||||
file_context.text.contains(LINE),
|
||||
"Should contain some of the file content"
|
||||
);
|
||||
|
||||
// Should be much smaller than original
|
||||
assert!(
|
||||
file_context.text.len() < content_len,
|
||||
"Outline should be smaller than original content"
|
||||
file_context.text.len() < content_len / 10,
|
||||
"Should be significantly smaller than original content"
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -38,6 +38,7 @@ use util::rel_path::RelPath;
|
||||
use workspace::{Workspace, notifications::NotifyResultExt};
|
||||
|
||||
use crate::context_picker::thread_context_picker::ThreadContextPicker;
|
||||
use crate::inline_assistant::ContextProviders;
|
||||
use crate::{context::RULES_ICON, context_store::ContextStore};
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
@@ -172,12 +173,18 @@ pub(super) struct ContextPicker {
|
||||
impl ContextPicker {
|
||||
pub fn new(
|
||||
workspace: WeakEntity<Workspace>,
|
||||
thread_store: Option<WeakEntity<HistoryStore>>,
|
||||
prompt_store: Option<WeakEntity<PromptStore>>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
context_stores: ContextProviders,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let context_store = context_stores.context_store.downgrade();
|
||||
let thread_store = context_stores.thread_store.clone();
|
||||
let prompt_store = context_stores
|
||||
.prompt_store
|
||||
.as_ref()
|
||||
.map(Entity::downgrade)
|
||||
.clone();
|
||||
|
||||
let subscriptions = context_store
|
||||
.upgrade()
|
||||
.map(|context_store| {
|
||||
|
||||
@@ -27,6 +27,7 @@ use util::paths::PathStyle;
|
||||
use util::rel_path::RelPath;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::inline_assistant::ContextProviders;
|
||||
use crate::{
|
||||
context::{AgentContextHandle, AgentContextKey, RULES_ICON},
|
||||
context_store::ContextStore,
|
||||
@@ -245,17 +246,15 @@ pub struct ContextPickerCompletionProvider {
|
||||
impl ContextPickerCompletionProvider {
|
||||
pub fn new(
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
thread_store: Option<WeakEntity<HistoryStore>>,
|
||||
prompt_store: Option<WeakEntity<PromptStore>>,
|
||||
context_stores: ContextProviders,
|
||||
editor: WeakEntity<Editor>,
|
||||
exclude_buffer: Option<WeakEntity<Buffer>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
workspace,
|
||||
context_store,
|
||||
thread_store,
|
||||
prompt_store,
|
||||
context_store: context_stores.context_store.downgrade(),
|
||||
thread_store: context_stores.thread_store,
|
||||
prompt_store: context_stores.prompt_store.as_ref().map(Entity::downgrade),
|
||||
editor,
|
||||
excluded_buffer: exclude_buffer,
|
||||
}
|
||||
@@ -278,6 +277,8 @@ impl ContextPickerCompletionProvider {
|
||||
icon_path: Some(mode.icon().path().into()),
|
||||
documentation: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
match_start: None,
|
||||
snippet_deduplication_key: None,
|
||||
insert_text_mode: None,
|
||||
// This ensures that when a user accepts this completion, the
|
||||
// completion menu will still be shown after "@category " is
|
||||
@@ -386,6 +387,8 @@ impl ContextPickerCompletionProvider {
|
||||
icon_path: Some(action.icon().path().into()),
|
||||
documentation: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
match_start: None,
|
||||
snippet_deduplication_key: None,
|
||||
insert_text_mode: None,
|
||||
// This ensures that when a user accepts this completion, the
|
||||
// completion menu will still be shown after "@category " is
|
||||
@@ -417,6 +420,8 @@ impl ContextPickerCompletionProvider {
|
||||
replace_range: source_range.clone(),
|
||||
new_text,
|
||||
label: CodeLabel::plain(thread_entry.title().to_string(), None),
|
||||
match_start: None,
|
||||
snippet_deduplication_key: None,
|
||||
documentation: None,
|
||||
insert_text_mode: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
@@ -484,6 +489,8 @@ impl ContextPickerCompletionProvider {
|
||||
replace_range: source_range.clone(),
|
||||
new_text,
|
||||
label: CodeLabel::plain(rules.title.to_string(), None),
|
||||
match_start: None,
|
||||
snippet_deduplication_key: None,
|
||||
documentation: None,
|
||||
insert_text_mode: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
@@ -524,6 +531,8 @@ impl ContextPickerCompletionProvider {
|
||||
documentation: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
icon_path: Some(IconName::ToolWeb.path().into()),
|
||||
match_start: None,
|
||||
snippet_deduplication_key: None,
|
||||
insert_text_mode: None,
|
||||
confirm: Some(confirm_completion_callback(
|
||||
IconName::ToolWeb.path().into(),
|
||||
@@ -612,6 +621,8 @@ impl ContextPickerCompletionProvider {
|
||||
documentation: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
icon_path: Some(completion_icon_path),
|
||||
match_start: None,
|
||||
snippet_deduplication_key: None,
|
||||
insert_text_mode: None,
|
||||
confirm: Some(confirm_completion_callback(
|
||||
crease_icon_path,
|
||||
@@ -689,6 +700,8 @@ impl ContextPickerCompletionProvider {
|
||||
documentation: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
icon_path: Some(IconName::Code.path().into()),
|
||||
match_start: None,
|
||||
snippet_deduplication_key: None,
|
||||
insert_text_mode: None,
|
||||
confirm: Some(confirm_completion_callback(
|
||||
IconName::Code.path().into(),
|
||||
@@ -1278,7 +1291,7 @@ mod tests {
|
||||
editor
|
||||
});
|
||||
|
||||
let context_store = cx.new(|_| ContextStore::new(project.downgrade()));
|
||||
let context_providers = ContextProviders::empty(project.downgrade(), cx);
|
||||
|
||||
let editor_entity = editor.downgrade();
|
||||
editor.update_in(&mut cx, |editor, window, cx| {
|
||||
@@ -1293,11 +1306,10 @@ mod tests {
|
||||
.map(Entity::downgrade)
|
||||
});
|
||||
window.focus(&editor.focus_handle(cx));
|
||||
|
||||
editor.set_completion_provider(Some(Rc::new(ContextPickerCompletionProvider::new(
|
||||
workspace.downgrade(),
|
||||
context_store.downgrade(),
|
||||
None,
|
||||
None,
|
||||
context_providers,
|
||||
editor_entity,
|
||||
last_opened_buffer,
|
||||
))));
|
||||
|
||||
@@ -39,6 +39,10 @@ pub enum ContextStoreEvent {
|
||||
impl EventEmitter<ContextStoreEvent> for ContextStore {}
|
||||
|
||||
impl ContextStore {
|
||||
pub fn project(&self) -> WeakEntity<Project> {
|
||||
self.project.clone()
|
||||
}
|
||||
|
||||
pub fn new(project: WeakEntity<Project>) -> Self {
|
||||
Self {
|
||||
project,
|
||||
|
||||
@@ -2,13 +2,13 @@ use crate::{
|
||||
AcceptSuggestedContext, AgentPanel, FocusDown, FocusLeft, FocusRight, FocusUp,
|
||||
ModelUsageContext, RemoveAllContext, RemoveFocusedContext, ToggleContextPicker,
|
||||
context_picker::ContextPicker,
|
||||
inline_assistant::ContextProviders,
|
||||
ui::{AddedContext, ContextPill},
|
||||
};
|
||||
use crate::{
|
||||
context::AgentContextHandle,
|
||||
context_store::{ContextStore, SuggestedContext},
|
||||
};
|
||||
use agent::HistoryStore;
|
||||
use collections::HashSet;
|
||||
use editor::Editor;
|
||||
use gpui::{
|
||||
@@ -42,10 +42,8 @@ pub struct ContextStrip {
|
||||
|
||||
impl ContextStrip {
|
||||
pub fn new(
|
||||
context_store: Entity<ContextStore>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
thread_store: Option<WeakEntity<HistoryStore>>,
|
||||
prompt_store: Option<WeakEntity<PromptStore>>,
|
||||
inline_services: ContextProviders,
|
||||
context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
|
||||
suggest_context_kind: SuggestContextKind,
|
||||
model_usage_context: ModelUsageContext,
|
||||
|
||||
1869
crates/agent_ui/src/evals.rs
Normal file
1869
crates/agent_ui/src/evals.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -16,6 +16,7 @@ use agent_settings::AgentSettings;
|
||||
use anyhow::{Context as _, Result};
|
||||
use client::telemetry::Telemetry;
|
||||
use collections::{HashMap, HashSet, VecDeque, hash_map};
|
||||
use editor::EditorSnapshot;
|
||||
use editor::RowExt;
|
||||
use editor::SelectionEffects;
|
||||
use editor::scroll::ScrollOffset;
|
||||
@@ -87,6 +88,38 @@ enum InlineAssistTarget {
|
||||
Terminal(Entity<TerminalView>),
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub(crate) struct ContextProviders {
|
||||
pub(crate) workspace: Option<WeakEntity<Workspace>>,
|
||||
pub(crate) prompt_store: Option<Entity<PromptStore>>,
|
||||
pub(crate) thread_store: Option<WeakEntity<HistoryStore>>,
|
||||
pub(crate) context_store: Entity<ContextStore>,
|
||||
}
|
||||
|
||||
impl ContextProviders {
|
||||
pub(crate) fn project(&self, cx: &mut App) -> WeakEntity<Project> {
|
||||
self.context_store.read(cx).project()
|
||||
}
|
||||
|
||||
pub(crate) fn from_panel(agent_panel: &AgentPanel) -> Self {
|
||||
Self {
|
||||
workspace: Some(agent_panel.workspace()),
|
||||
prompt_store: agent_panel.prompt_store().as_ref().cloned(),
|
||||
thread_store: Some(agent_panel.thread_store().downgrade()),
|
||||
context_store: agent_panel.inline_assist_context_store().clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn empty(project: WeakEntity<Project>, cx: &mut App) -> Self {
|
||||
Self {
|
||||
workspace: None,
|
||||
prompt_store: None,
|
||||
thread_store: None,
|
||||
context_store: cx.new(|_| ContextStore::new(project.clone())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct InlineAssistant {
|
||||
next_assist_id: InlineAssistId,
|
||||
next_assist_group_id: InlineAssistGroupId,
|
||||
@@ -274,11 +307,8 @@ impl InlineAssistant {
|
||||
let Some(agent_panel) = workspace.panel::<AgentPanel>(cx) else {
|
||||
return;
|
||||
};
|
||||
let agent_panel = agent_panel.read(cx);
|
||||
|
||||
let prompt_store = agent_panel.prompt_store().as_ref().cloned();
|
||||
let thread_store = Some(agent_panel.thread_store().downgrade());
|
||||
let context_store = agent_panel.inline_assist_context_store().clone();
|
||||
let services = ContextProviders::from_panel(agent_panel.read(cx));
|
||||
|
||||
let handle_assist =
|
||||
|window: &mut Window, cx: &mut Context<Workspace>| match inline_assist_target {
|
||||
@@ -287,10 +317,6 @@ impl InlineAssistant {
|
||||
assistant.assist(
|
||||
&active_editor,
|
||||
cx.entity().downgrade(),
|
||||
context_store,
|
||||
workspace.project().downgrade(),
|
||||
prompt_store,
|
||||
thread_store,
|
||||
action.prompt.clone(),
|
||||
window,
|
||||
cx,
|
||||
@@ -302,10 +328,8 @@ impl InlineAssistant {
|
||||
assistant.assist(
|
||||
&active_terminal,
|
||||
cx.entity().downgrade(),
|
||||
workspace.project().downgrade(),
|
||||
prompt_store,
|
||||
thread_store,
|
||||
action.prompt.clone(),
|
||||
services,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -350,25 +374,20 @@ impl InlineAssistant {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn assist(
|
||||
fn codegen_ranges(
|
||||
&mut self,
|
||||
editor: &Entity<Editor>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: Entity<ContextStore>,
|
||||
project: WeakEntity<Project>,
|
||||
prompt_store: Option<Entity<PromptStore>>,
|
||||
thread_store: Option<WeakEntity<HistoryStore>>,
|
||||
initial_prompt: Option<String>,
|
||||
snapshot: &EditorSnapshot,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) {
|
||||
let (snapshot, initial_selections, newest_selection) = editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(window, cx);
|
||||
let selections = editor.selections.all::<Point>(&snapshot.display_snapshot);
|
||||
let newest_selection = editor
|
||||
.selections
|
||||
.newest::<Point>(&snapshot.display_snapshot);
|
||||
(snapshot, selections, newest_selection)
|
||||
) -> Option<(Vec<Range<Anchor>>, Selection<Point>)> {
|
||||
let (initial_selections, newest_selection) = editor.update(cx, |editor, _| {
|
||||
(
|
||||
editor.selections.all::<Point>(&snapshot.display_snapshot),
|
||||
editor
|
||||
.selections
|
||||
.newest::<Point>(&snapshot.display_snapshot),
|
||||
)
|
||||
});
|
||||
|
||||
// Check if there is already an inline assistant that contains the
|
||||
@@ -381,7 +400,7 @@ impl InlineAssistant {
|
||||
&& newest_selection.end.row <= range.end.row
|
||||
{
|
||||
self.focus_assist(*assist_id, window, cx);
|
||||
return;
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -473,6 +492,25 @@ impl InlineAssistant {
|
||||
}
|
||||
}
|
||||
|
||||
Some((codegen_ranges, newest_selection))
|
||||
}
|
||||
|
||||
pub fn assist(
|
||||
&mut self,
|
||||
editor: &Entity<Editor>,
|
||||
initial_prompt: Option<String>,
|
||||
services: ContextProviders,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) {
|
||||
let snapshot = editor.update(cx, |editor, cx| editor.snapshot(window, cx));
|
||||
|
||||
let Some((codegen_ranges, newest_selection)) =
|
||||
self.codegen_ranges(editor, &snapshot, window, cx)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
let assist_group_id = self.next_assist_group_id.post_inc();
|
||||
let prompt_buffer = cx.new(|cx| {
|
||||
MultiBuffer::singleton(
|
||||
@@ -484,53 +522,26 @@ impl InlineAssistant {
|
||||
let mut assists = Vec::new();
|
||||
let mut assist_to_focus = None;
|
||||
for range in codegen_ranges {
|
||||
let assist_id = self.next_assist_id.post_inc();
|
||||
let codegen = cx.new(|cx| {
|
||||
BufferCodegen::new(
|
||||
editor.read(cx).buffer().clone(),
|
||||
range.clone(),
|
||||
None,
|
||||
context_store.clone(),
|
||||
project.clone(),
|
||||
prompt_store.clone(),
|
||||
self.telemetry.clone(),
|
||||
self.prompt_builder.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let editor_margins = Arc::new(Mutex::new(EditorMargins::default()));
|
||||
let prompt_editor = cx.new(|cx| {
|
||||
PromptEditor::new_buffer(
|
||||
assist_id,
|
||||
editor_margins,
|
||||
self.prompt_history.clone(),
|
||||
prompt_buffer.clone(),
|
||||
codegen.clone(),
|
||||
self.fs.clone(),
|
||||
context_store.clone(),
|
||||
workspace.clone(),
|
||||
thread_store.clone(),
|
||||
prompt_store.as_ref().map(|s| s.downgrade()),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let (assist_id, prompt_block_id, end_block_id, prompt_editor) = self.single_assist(
|
||||
range.clone(),
|
||||
editor,
|
||||
prompt_buffer.clone(),
|
||||
services.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
|
||||
if assist_to_focus.is_none() {
|
||||
let focus_assist = if newest_selection.reversed {
|
||||
range.start.to_point(snapshot) == newest_selection.start
|
||||
range.start.to_point(&snapshot) == newest_selection.start
|
||||
} else {
|
||||
range.end.to_point(snapshot) == newest_selection.end
|
||||
range.end.to_point(&snapshot) == newest_selection.end
|
||||
};
|
||||
if focus_assist {
|
||||
assist_to_focus = Some(assist_id);
|
||||
}
|
||||
}
|
||||
|
||||
let [prompt_block_id, end_block_id] =
|
||||
self.insert_assist_blocks(editor, &range, &prompt_editor, cx);
|
||||
|
||||
assists.push((
|
||||
assist_id,
|
||||
range,
|
||||
@@ -574,11 +585,66 @@ impl InlineAssistant {
|
||||
}
|
||||
}
|
||||
|
||||
fn single_assist(
|
||||
&mut self,
|
||||
range: Range<Anchor>,
|
||||
editor: &Entity<Editor>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
prompt_buffer: Entity<MultiBuffer>,
|
||||
services: ContextProviders,
|
||||
project: &WeakEntity<Project>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> (
|
||||
InlineAssistId,
|
||||
CustomBlockId,
|
||||
CustomBlockId,
|
||||
Entity<PromptEditor<BufferCodegen>>,
|
||||
) {
|
||||
let assist_id = self.next_assist_id.post_inc();
|
||||
let codegen = cx.new(|cx| {
|
||||
BufferCodegen::new(
|
||||
editor.read(cx).buffer().clone(),
|
||||
range.clone(),
|
||||
None,
|
||||
context_store.clone(),
|
||||
project.clone(),
|
||||
prompt_store.clone(),
|
||||
self.telemetry.clone(),
|
||||
self.prompt_builder.clone(),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let editor_margins = Arc::new(Mutex::new(EditorMargins::default()));
|
||||
let prompt_editor = cx.new(|cx| {
|
||||
PromptEditor::new_buffer(
|
||||
assist_id,
|
||||
editor_margins,
|
||||
self.prompt_history.clone(),
|
||||
prompt_buffer.clone(),
|
||||
codegen.clone(),
|
||||
self.fs.clone(),
|
||||
context_store.clone(),
|
||||
workspace.clone(),
|
||||
thread_store.clone(),
|
||||
prompt_store.as_ref().map(|s| s.downgrade()),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let [prompt_block_id, end_block_id] =
|
||||
self.insert_assist_blocks(editor, &range, &prompt_editor, cx);
|
||||
|
||||
(assist_id, prompt_block_id, end_block_id, prompt_editor)
|
||||
}
|
||||
|
||||
pub fn suggest_assist(
|
||||
&mut self,
|
||||
editor: &Entity<Editor>,
|
||||
mut range: Range<Anchor>,
|
||||
initial_prompt: String,
|
||||
prompt_buffer: Entity<MultiBuffer>,
|
||||
initial_transaction_id: Option<TransactionId>,
|
||||
focus: bool,
|
||||
workspace: Entity<Workspace>,
|
||||
@@ -587,9 +653,8 @@ impl InlineAssistant {
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> InlineAssistId {
|
||||
/// !!!!!!!!
|
||||
let assist_group_id = self.next_assist_group_id.post_inc();
|
||||
let prompt_buffer = cx.new(|cx| Buffer::local(&initial_prompt, cx));
|
||||
let prompt_buffer = cx.new(|cx| MultiBuffer::singleton(prompt_buffer, cx));
|
||||
|
||||
let assist_id = self.next_assist_id.post_inc();
|
||||
|
||||
@@ -1879,11 +1944,15 @@ impl CodeActionProvider for AssistantCodeActionProvider {
|
||||
.context("invalid range")?;
|
||||
|
||||
let prompt_store = prompt_store.await.ok();
|
||||
|
||||
const PROMPT: &'static str = "Fix Diagnostics";
|
||||
cx.update_global(|assistant: &mut InlineAssistant, window, cx| {
|
||||
let prompt_buffer = cx.new(|cx| Buffer::local(PROMPT, cx));
|
||||
let prompt_buffer = cx.new(|cx| MultiBuffer::singleton(prompt_buffer, cx));
|
||||
let assist_id = assistant.suggest_assist(
|
||||
&editor,
|
||||
range,
|
||||
"Fix Diagnostics".into(),
|
||||
prompt_buffer,
|
||||
None,
|
||||
true,
|
||||
workspace,
|
||||
|
||||
@@ -32,6 +32,7 @@ use crate::context::{AgentContextHandle, AgentContextKey};
|
||||
use crate::context_picker::{ContextPicker, ContextPickerCompletionProvider, crease_for_mention};
|
||||
use crate::context_store::{ContextStore, ContextStoreEvent};
|
||||
use crate::context_strip::{ContextStrip, ContextStripEvent, SuggestContextKind};
|
||||
use crate::inline_assistant::ContextProviders;
|
||||
use crate::terminal_codegen::TerminalCodegen;
|
||||
use crate::{
|
||||
CycleNextInlineAssist, CyclePreviousInlineAssist, ModelUsageContext, RemoveAllContext,
|
||||
@@ -773,10 +774,8 @@ impl PromptEditor<BufferCodegen> {
|
||||
prompt_buffer: Entity<MultiBuffer>,
|
||||
codegen: Entity<BufferCodegen>,
|
||||
fs: Arc<dyn Fs>,
|
||||
context_store: Entity<ContextStore>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
thread_store: Option<WeakEntity<HistoryStore>>,
|
||||
prompt_store: Option<WeakEntity<PromptStore>>,
|
||||
services: ContextProviders,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<PromptEditor<BufferCodegen>>,
|
||||
) -> PromptEditor<BufferCodegen> {
|
||||
@@ -945,10 +944,8 @@ impl PromptEditor<TerminalCodegen> {
|
||||
prompt_buffer: Entity<MultiBuffer>,
|
||||
codegen: Entity<TerminalCodegen>,
|
||||
fs: Arc<dyn Fs>,
|
||||
context_store: Entity<ContextStore>,
|
||||
context_providers: ContextProviders,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
thread_store: Option<WeakEntity<HistoryStore>>,
|
||||
prompt_store: Option<WeakEntity<PromptStore>>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::{cmp::Reverse, sync::Arc};
|
||||
|
||||
use collections::{HashSet, IndexMap};
|
||||
use collections::IndexMap;
|
||||
use fuzzy::{StringMatch, StringMatchCandidate, match_strings};
|
||||
use gpui::{Action, AnyElement, App, BackgroundExecutor, DismissEvent, Subscription, Task};
|
||||
use language_model::{
|
||||
@@ -19,14 +19,26 @@ pub type LanguageModelSelector = Picker<LanguageModelPickerDelegate>;
|
||||
pub fn language_model_selector(
|
||||
get_active_model: impl Fn(&App) -> Option<ConfiguredModel> + 'static,
|
||||
on_model_changed: impl Fn(Arc<dyn LanguageModel>, &mut App) + 'static,
|
||||
popover_styles: bool,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<LanguageModelSelector>,
|
||||
) -> LanguageModelSelector {
|
||||
let delegate = LanguageModelPickerDelegate::new(get_active_model, on_model_changed, window, cx);
|
||||
Picker::list(delegate, window, cx)
|
||||
.show_scrollbar(true)
|
||||
.width(rems(20.))
|
||||
.max_height(Some(rems(20.).into()))
|
||||
let delegate = LanguageModelPickerDelegate::new(
|
||||
get_active_model,
|
||||
on_model_changed,
|
||||
popover_styles,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
|
||||
if popover_styles {
|
||||
Picker::list(delegate, window, cx)
|
||||
.show_scrollbar(true)
|
||||
.width(rems(20.))
|
||||
.max_height(Some(rems(20.).into()))
|
||||
} else {
|
||||
Picker::list(delegate, window, cx).show_scrollbar(true)
|
||||
}
|
||||
}
|
||||
|
||||
fn all_models(cx: &App) -> GroupedModels {
|
||||
@@ -45,7 +57,7 @@ fn all_models(cx: &App) -> GroupedModels {
|
||||
})
|
||||
.collect();
|
||||
|
||||
let other = providers
|
||||
let all = providers
|
||||
.iter()
|
||||
.flat_map(|provider| {
|
||||
provider
|
||||
@@ -58,7 +70,7 @@ fn all_models(cx: &App) -> GroupedModels {
|
||||
})
|
||||
.collect();
|
||||
|
||||
GroupedModels::new(other, recommended)
|
||||
GroupedModels::new(all, recommended)
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@@ -75,12 +87,14 @@ pub struct LanguageModelPickerDelegate {
|
||||
selected_index: usize,
|
||||
_authenticate_all_providers_task: Task<()>,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
popover_styles: bool,
|
||||
}
|
||||
|
||||
impl LanguageModelPickerDelegate {
|
||||
fn new(
|
||||
get_active_model: impl Fn(&App) -> Option<ConfiguredModel> + 'static,
|
||||
on_model_changed: impl Fn(Arc<dyn LanguageModel>, &mut App) + 'static,
|
||||
popover_styles: bool,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Self {
|
||||
@@ -113,6 +127,7 @@ impl LanguageModelPickerDelegate {
|
||||
}
|
||||
},
|
||||
)],
|
||||
popover_styles,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -195,33 +210,24 @@ impl LanguageModelPickerDelegate {
|
||||
|
||||
struct GroupedModels {
|
||||
recommended: Vec<ModelInfo>,
|
||||
other: IndexMap<LanguageModelProviderId, Vec<ModelInfo>>,
|
||||
all: IndexMap<LanguageModelProviderId, Vec<ModelInfo>>,
|
||||
}
|
||||
|
||||
impl GroupedModels {
|
||||
pub fn new(other: Vec<ModelInfo>, recommended: Vec<ModelInfo>) -> Self {
|
||||
let recommended_ids = recommended
|
||||
.iter()
|
||||
.map(|info| (info.model.provider_id(), info.model.id()))
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
let mut other_by_provider: IndexMap<_, Vec<ModelInfo>> = IndexMap::default();
|
||||
for model in other {
|
||||
if recommended_ids.contains(&(model.model.provider_id(), model.model.id())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
pub fn new(all: Vec<ModelInfo>, recommended: Vec<ModelInfo>) -> Self {
|
||||
let mut all_by_provider: IndexMap<_, Vec<ModelInfo>> = IndexMap::default();
|
||||
for model in all {
|
||||
let provider = model.model.provider_id();
|
||||
if let Some(models) = other_by_provider.get_mut(&provider) {
|
||||
if let Some(models) = all_by_provider.get_mut(&provider) {
|
||||
models.push(model);
|
||||
} else {
|
||||
other_by_provider.insert(provider, vec![model]);
|
||||
all_by_provider.insert(provider, vec![model]);
|
||||
}
|
||||
}
|
||||
|
||||
Self {
|
||||
recommended,
|
||||
other: other_by_provider,
|
||||
all: all_by_provider,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -237,7 +243,7 @@ impl GroupedModels {
|
||||
);
|
||||
}
|
||||
|
||||
for models in self.other.values() {
|
||||
for models in self.all.values() {
|
||||
if models.is_empty() {
|
||||
continue;
|
||||
}
|
||||
@@ -252,20 +258,6 @@ impl GroupedModels {
|
||||
}
|
||||
entries
|
||||
}
|
||||
|
||||
fn model_infos(&self) -> Vec<ModelInfo> {
|
||||
let other = self
|
||||
.other
|
||||
.values()
|
||||
.flat_map(|model| model.iter())
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
self.recommended
|
||||
.iter()
|
||||
.chain(&other)
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
}
|
||||
|
||||
enum LanguageModelPickerEntry {
|
||||
@@ -410,8 +402,9 @@ impl PickerDelegate for LanguageModelPickerDelegate {
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let available_models = all_models
|
||||
.model_infos()
|
||||
.iter()
|
||||
.all
|
||||
.values()
|
||||
.flat_map(|models| models.iter())
|
||||
.filter(|m| configured_provider_ids.contains(&m.model.provider_id()))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
@@ -499,17 +492,15 @@ impl PickerDelegate for LanguageModelPickerDelegate {
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.toggle_state(selected)
|
||||
.start_slot(
|
||||
Icon::new(model_info.icon)
|
||||
.color(model_icon_color)
|
||||
.size(IconSize::Small),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.pl_0p5()
|
||||
.gap_1p5()
|
||||
.w(px(240.))
|
||||
.child(
|
||||
Icon::new(model_info.icon)
|
||||
.color(model_icon_color)
|
||||
.size(IconSize::Small),
|
||||
)
|
||||
.child(Label::new(model_info.model.name().0).truncate()),
|
||||
)
|
||||
.end_slot(div().pr_3().when(is_selected, |this| {
|
||||
@@ -530,6 +521,10 @@ impl PickerDelegate for LanguageModelPickerDelegate {
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<gpui::AnyElement> {
|
||||
if !self.popover_styles {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(
|
||||
h_flex()
|
||||
.w_full()
|
||||
@@ -745,46 +740,52 @@ mod tests {
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_exclude_recommended_models(_cx: &mut TestAppContext) {
|
||||
fn test_recommended_models_also_appear_in_other(_cx: &mut TestAppContext) {
|
||||
let recommended_models = create_models(vec![("zed", "claude")]);
|
||||
let all_models = create_models(vec![
|
||||
("zed", "claude"), // Should be filtered out from "other"
|
||||
("zed", "claude"), // Should also appear in "other"
|
||||
("zed", "gemini"),
|
||||
("copilot", "o3"),
|
||||
]);
|
||||
|
||||
let grouped_models = GroupedModels::new(all_models, recommended_models);
|
||||
|
||||
let actual_other_models = grouped_models
|
||||
.other
|
||||
let actual_all_models = grouped_models
|
||||
.all
|
||||
.values()
|
||||
.flatten()
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Recommended models should not appear in "other"
|
||||
assert_models_eq(actual_other_models, vec!["zed/gemini", "copilot/o3"]);
|
||||
// Recommended models should also appear in "all"
|
||||
assert_models_eq(
|
||||
actual_all_models,
|
||||
vec!["zed/claude", "zed/gemini", "copilot/o3"],
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_dont_exclude_models_from_other_providers(_cx: &mut TestAppContext) {
|
||||
fn test_models_from_different_providers(_cx: &mut TestAppContext) {
|
||||
let recommended_models = create_models(vec![("zed", "claude")]);
|
||||
let all_models = create_models(vec![
|
||||
("zed", "claude"), // Should be filtered out from "other"
|
||||
("zed", "claude"), // Should also appear in "other"
|
||||
("zed", "gemini"),
|
||||
("copilot", "claude"), // Should not be filtered out from "other"
|
||||
("copilot", "claude"), // Different provider, should appear in "other"
|
||||
]);
|
||||
|
||||
let grouped_models = GroupedModels::new(all_models, recommended_models);
|
||||
|
||||
let actual_other_models = grouped_models
|
||||
.other
|
||||
let actual_all_models = grouped_models
|
||||
.all
|
||||
.values()
|
||||
.flatten()
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
// Recommended models should not appear in "other"
|
||||
assert_models_eq(actual_other_models, vec!["zed/gemini", "copilot/claude"]);
|
||||
// All models should appear in "all" regardless of recommended status
|
||||
assert_models_eq(
|
||||
actual_all_models,
|
||||
vec!["zed/claude", "zed/gemini", "copilot/claude"],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,8 +15,8 @@ use std::{
|
||||
sync::{Arc, atomic::AtomicBool},
|
||||
};
|
||||
use ui::{
|
||||
DocumentationAside, DocumentationEdge, DocumentationSide, HighlightedLabel, LabelSize,
|
||||
ListItem, ListItemSpacing, PopoverMenuHandle, TintColor, Tooltip, prelude::*,
|
||||
DocumentationAside, DocumentationEdge, DocumentationSide, HighlightedLabel, KeyBinding,
|
||||
LabelSize, ListItem, ListItemSpacing, PopoverMenuHandle, TintColor, Tooltip, prelude::*,
|
||||
};
|
||||
|
||||
/// Trait for types that can provide and manage agent profiles
|
||||
@@ -81,6 +81,7 @@ impl ProfileSelector {
|
||||
self.provider.clone(),
|
||||
self.profiles.clone(),
|
||||
cx.background_executor().clone(),
|
||||
self.focus_handle.clone(),
|
||||
cx,
|
||||
);
|
||||
|
||||
@@ -207,6 +208,7 @@ pub(crate) struct ProfilePickerDelegate {
|
||||
selected_index: usize,
|
||||
query: String,
|
||||
cancel: Option<Arc<AtomicBool>>,
|
||||
focus_handle: FocusHandle,
|
||||
}
|
||||
|
||||
impl ProfilePickerDelegate {
|
||||
@@ -215,6 +217,7 @@ impl ProfilePickerDelegate {
|
||||
provider: Arc<dyn ProfileProvider>,
|
||||
profiles: AvailableProfiles,
|
||||
background: BackgroundExecutor,
|
||||
focus_handle: FocusHandle,
|
||||
cx: &mut Context<ProfileSelector>,
|
||||
) -> Self {
|
||||
let candidates = Self::candidates_from(profiles);
|
||||
@@ -231,6 +234,7 @@ impl ProfilePickerDelegate {
|
||||
selected_index: 0,
|
||||
query: String::new(),
|
||||
cancel: None,
|
||||
focus_handle,
|
||||
};
|
||||
|
||||
this.selected_index = this
|
||||
@@ -594,20 +598,26 @@ impl PickerDelegate for ProfilePickerDelegate {
|
||||
_: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<gpui::AnyElement> {
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
|
||||
Some(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
.p_1()
|
||||
.gap_4()
|
||||
.justify_between()
|
||||
.p_1p5()
|
||||
.child(
|
||||
Button::new("configure", "Configure")
|
||||
.icon(IconName::Settings)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_position(IconPosition::Start)
|
||||
.full_width()
|
||||
.style(ButtonStyle::Outlined)
|
||||
.key_binding(
|
||||
KeyBinding::for_action_in(
|
||||
&ManageProfiles::default(),
|
||||
&focus_handle,
|
||||
cx,
|
||||
)
|
||||
.map(|kb| kb.size(rems_from_px(12.))),
|
||||
)
|
||||
.on_click(|_, window, cx| {
|
||||
window.dispatch_action(ManageProfiles::default().boxed_clone(), cx);
|
||||
}),
|
||||
@@ -659,20 +669,25 @@ mod tests {
|
||||
is_builtin: true,
|
||||
}];
|
||||
|
||||
let delegate = ProfilePickerDelegate {
|
||||
fs: FakeFs::new(cx.executor()),
|
||||
provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))),
|
||||
background: cx.executor(),
|
||||
candidates,
|
||||
string_candidates: Arc::new(Vec::new()),
|
||||
filtered_entries: Vec::new(),
|
||||
selected_index: 0,
|
||||
query: String::new(),
|
||||
cancel: None,
|
||||
};
|
||||
cx.update(|cx| {
|
||||
let focus_handle = cx.focus_handle();
|
||||
|
||||
let matches = Vec::new(); // No matches
|
||||
let _entries = delegate.entries_from_matches(matches);
|
||||
let delegate = ProfilePickerDelegate {
|
||||
fs: FakeFs::new(cx.background_executor().clone()),
|
||||
provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))),
|
||||
background: cx.background_executor().clone(),
|
||||
candidates,
|
||||
string_candidates: Arc::new(Vec::new()),
|
||||
filtered_entries: Vec::new(),
|
||||
selected_index: 0,
|
||||
query: String::new(),
|
||||
cancel: None,
|
||||
focus_handle,
|
||||
};
|
||||
|
||||
let matches = Vec::new(); // No matches
|
||||
let _entries = delegate.entries_from_matches(matches);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -690,30 +705,35 @@ mod tests {
|
||||
},
|
||||
];
|
||||
|
||||
let delegate = ProfilePickerDelegate {
|
||||
fs: FakeFs::new(cx.executor()),
|
||||
provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))),
|
||||
background: cx.executor(),
|
||||
candidates,
|
||||
string_candidates: Arc::new(Vec::new()),
|
||||
filtered_entries: vec![
|
||||
ProfilePickerEntry::Profile(ProfileMatchEntry {
|
||||
candidate_index: 0,
|
||||
positions: Vec::new(),
|
||||
}),
|
||||
ProfilePickerEntry::Profile(ProfileMatchEntry {
|
||||
candidate_index: 1,
|
||||
positions: Vec::new(),
|
||||
}),
|
||||
],
|
||||
selected_index: 0,
|
||||
query: String::new(),
|
||||
cancel: None,
|
||||
};
|
||||
cx.update(|cx| {
|
||||
let focus_handle = cx.focus_handle();
|
||||
|
||||
// Active profile should be found at index 0
|
||||
let active_index = delegate.index_of_profile(&AgentProfileId("write".into()));
|
||||
assert_eq!(active_index, Some(0));
|
||||
let delegate = ProfilePickerDelegate {
|
||||
fs: FakeFs::new(cx.background_executor().clone()),
|
||||
provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))),
|
||||
background: cx.background_executor().clone(),
|
||||
candidates,
|
||||
string_candidates: Arc::new(Vec::new()),
|
||||
filtered_entries: vec![
|
||||
ProfilePickerEntry::Profile(ProfileMatchEntry {
|
||||
candidate_index: 0,
|
||||
positions: Vec::new(),
|
||||
}),
|
||||
ProfilePickerEntry::Profile(ProfileMatchEntry {
|
||||
candidate_index: 1,
|
||||
positions: Vec::new(),
|
||||
}),
|
||||
],
|
||||
selected_index: 0,
|
||||
query: String::new(),
|
||||
cancel: None,
|
||||
focus_handle,
|
||||
};
|
||||
|
||||
// Active profile should be found at index 0
|
||||
let active_index = delegate.index_of_profile(&AgentProfileId("write".into()));
|
||||
assert_eq!(active_index, Some(0));
|
||||
});
|
||||
}
|
||||
|
||||
struct TestProfileProvider {
|
||||
|
||||
@@ -127,6 +127,8 @@ impl SlashCommandCompletionProvider {
|
||||
new_text,
|
||||
label: command.label(cx),
|
||||
icon_path: None,
|
||||
match_start: None,
|
||||
snippet_deduplication_key: None,
|
||||
insert_text_mode: None,
|
||||
confirm,
|
||||
source: CompletionSource::Custom,
|
||||
@@ -232,6 +234,8 @@ impl SlashCommandCompletionProvider {
|
||||
icon_path: None,
|
||||
new_text,
|
||||
documentation: None,
|
||||
match_start: None,
|
||||
snippet_deduplication_key: None,
|
||||
confirm,
|
||||
insert_text_mode: None,
|
||||
source: CompletionSource::Custom,
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use crate::{
|
||||
context::load_context,
|
||||
context_store::ContextStore,
|
||||
inline_assistant::ContextProviders,
|
||||
inline_prompt_editor::{
|
||||
CodegenStatus, PromptEditor, PromptEditorEvent, TerminalInlineAssistId,
|
||||
},
|
||||
@@ -72,10 +73,8 @@ impl TerminalInlineAssistant {
|
||||
&mut self,
|
||||
terminal_view: &Entity<TerminalView>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
project: WeakEntity<Project>,
|
||||
prompt_store: Option<Entity<PromptStore>>,
|
||||
thread_store: Option<WeakEntity<HistoryStore>>,
|
||||
initial_prompt: Option<String>,
|
||||
context_providers: ContextProviders,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) {
|
||||
@@ -87,7 +86,7 @@ impl TerminalInlineAssistant {
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let context_store = cx.new(|_cx| ContextStore::new(project));
|
||||
|
||||
let codegen = cx.new(|_| TerminalCodegen::new(terminal, self.telemetry.clone()));
|
||||
|
||||
let prompt_editor = cx.new(|cx| {
|
||||
@@ -97,10 +96,8 @@ impl TerminalInlineAssistant {
|
||||
prompt_buffer.clone(),
|
||||
codegen,
|
||||
self.fs.clone(),
|
||||
context_store.clone(),
|
||||
context_providers,
|
||||
workspace.clone(),
|
||||
thread_store.clone(),
|
||||
prompt_store.as_ref().map(|s| s.downgrade()),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -420,8 +417,7 @@ impl TerminalInlineAssist {
|
||||
terminal: &Entity<TerminalView>,
|
||||
prompt_editor: Entity<PromptEditor<TerminalCodegen>>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
context_store: Entity<ContextStore>,
|
||||
prompt_store: Option<Entity<PromptStore>>,
|
||||
context_providers: ContextProviders,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Self {
|
||||
|
||||
@@ -314,6 +314,7 @@ impl TextThreadEditor {
|
||||
)
|
||||
});
|
||||
},
|
||||
true, // Use popover styles for picker
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -477,7 +478,7 @@ impl TextThreadEditor {
|
||||
editor.insert(&format!("/{name}"), window, cx);
|
||||
if command.accepts_arguments() {
|
||||
editor.insert(" ", window, cx);
|
||||
editor.show_completions(&ShowCompletions::default(), window, cx);
|
||||
editor.show_completions(&ShowCompletions, window, cx);
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -1678,7 +1679,7 @@ impl TextThreadEditor {
|
||||
) {
|
||||
cx.stop_propagation();
|
||||
|
||||
let images = if let Some(item) = cx.read_from_clipboard() {
|
||||
let mut images = if let Some(item) = cx.read_from_clipboard() {
|
||||
item.into_entries()
|
||||
.filter_map(|entry| {
|
||||
if let ClipboardEntry::Image(image) = entry {
|
||||
@@ -1692,6 +1693,40 @@ impl TextThreadEditor {
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
if let Some(paths) = cx.read_from_clipboard() {
|
||||
for path in paths
|
||||
.into_entries()
|
||||
.filter_map(|entry| {
|
||||
if let ClipboardEntry::ExternalPaths(paths) = entry {
|
||||
Some(paths.paths().to_owned())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.flatten()
|
||||
{
|
||||
let Ok(content) = std::fs::read(path) else {
|
||||
continue;
|
||||
};
|
||||
let Ok(format) = image::guess_format(&content) else {
|
||||
continue;
|
||||
};
|
||||
images.push(gpui::Image::from_bytes(
|
||||
match format {
|
||||
image::ImageFormat::Png => gpui::ImageFormat::Png,
|
||||
image::ImageFormat::Jpeg => gpui::ImageFormat::Jpeg,
|
||||
image::ImageFormat::WebP => gpui::ImageFormat::Webp,
|
||||
image::ImageFormat::Gif => gpui::ImageFormat::Gif,
|
||||
image::ImageFormat::Bmp => gpui::ImageFormat::Bmp,
|
||||
image::ImageFormat::Tiff => gpui::ImageFormat::Tiff,
|
||||
image::ImageFormat::Ico => gpui::ImageFormat::Ico,
|
||||
_ => continue,
|
||||
},
|
||||
content,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
let metadata = if let Some(item) = cx.read_from_clipboard() {
|
||||
item.entries().first().and_then(|entry| {
|
||||
if let ClipboardEntry::String(text) = entry {
|
||||
@@ -2591,12 +2626,11 @@ impl SearchableItem for TextThreadEditor {
|
||||
&mut self,
|
||||
index: usize,
|
||||
matches: &[Self::Match],
|
||||
collapse: bool,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
editor.activate_match(index, matches, collapse, window, cx);
|
||||
editor.activate_match(index, matches, window, cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -205,13 +205,9 @@ impl PasswordProxy {
|
||||
} else {
|
||||
ShellKind::Posix
|
||||
};
|
||||
let askpass_program = ASKPASS_PROGRAM
|
||||
.get_or_init(|| current_exec)
|
||||
.try_shell_safe(shell_kind)
|
||||
.context("Failed to shell-escape Askpass program path.")?
|
||||
.to_string();
|
||||
let askpass_program = ASKPASS_PROGRAM.get_or_init(|| current_exec);
|
||||
// Create an askpass script that communicates back to this process.
|
||||
let askpass_script = generate_askpass_script(&askpass_program, &askpass_socket);
|
||||
let askpass_script = generate_askpass_script(shell_kind, askpass_program, &askpass_socket)?;
|
||||
let _task = executor.spawn(async move {
|
||||
maybe!(async move {
|
||||
let listener =
|
||||
@@ -254,6 +250,7 @@ impl PasswordProxy {
|
||||
.await
|
||||
.with_context(|| format!("creating askpass script at {askpass_script_path:?}"))?;
|
||||
make_file_executable(&askpass_script_path).await?;
|
||||
// todo(shell): There might be no powershell on the system
|
||||
#[cfg(target_os = "windows")]
|
||||
let askpass_helper = format!(
|
||||
"powershell.exe -ExecutionPolicy Bypass -File {}",
|
||||
@@ -334,23 +331,51 @@ pub fn set_askpass_program(path: std::path::PathBuf) {
|
||||
|
||||
#[inline]
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
fn generate_askpass_script(askpass_program: &str, askpass_socket: &std::path::Path) -> String {
|
||||
format!(
|
||||
fn generate_askpass_script(
|
||||
shell_kind: ShellKind,
|
||||
askpass_program: &std::path::Path,
|
||||
askpass_socket: &std::path::Path,
|
||||
) -> Result<String> {
|
||||
let askpass_program = shell_kind.prepend_command_prefix(
|
||||
askpass_program
|
||||
.to_str()
|
||||
.context("Askpass program is on a non-utf8 path")?,
|
||||
);
|
||||
let askpass_program = shell_kind
|
||||
.try_quote_prefix_aware(&askpass_program)
|
||||
.context("Failed to shell-escape Askpass program path")?;
|
||||
let askpass_socket = askpass_socket
|
||||
.try_shell_safe(shell_kind)
|
||||
.context("Failed to shell-escape Askpass socket path")?;
|
||||
let print_args = "printf '%s\\0' \"$@\"";
|
||||
let shebang = "#!/bin/sh";
|
||||
Ok(format!(
|
||||
"{shebang}\n{print_args} | {askpass_program} --askpass={askpass_socket} 2> /dev/null \n",
|
||||
askpass_socket = askpass_socket.display(),
|
||||
print_args = "printf '%s\\0' \"$@\"",
|
||||
shebang = "#!/bin/sh",
|
||||
)
|
||||
))
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg(target_os = "windows")]
|
||||
fn generate_askpass_script(askpass_program: &str, askpass_socket: &std::path::Path) -> String {
|
||||
format!(
|
||||
fn generate_askpass_script(
|
||||
shell_kind: ShellKind,
|
||||
askpass_program: &std::path::Path,
|
||||
askpass_socket: &std::path::Path,
|
||||
) -> Result<String> {
|
||||
let askpass_program = shell_kind.prepend_command_prefix(
|
||||
askpass_program
|
||||
.to_str()
|
||||
.context("Askpass program is on a non-utf8 path")?,
|
||||
);
|
||||
let askpass_program = shell_kind
|
||||
.try_quote_prefix_aware(&askpass_program)
|
||||
.context("Failed to shell-escape Askpass program path")?;
|
||||
let askpass_socket = askpass_socket
|
||||
.try_shell_safe(shell_kind)
|
||||
.context("Failed to shell-escape Askpass socket path")?;
|
||||
Ok(format!(
|
||||
r#"
|
||||
$ErrorActionPreference = 'Stop';
|
||||
($args -join [char]0) | & {askpass_program} --askpass={askpass_socket} 2> $null
|
||||
"#,
|
||||
askpass_socket = askpass_socket.display(),
|
||||
)
|
||||
))
|
||||
}
|
||||
|
||||
@@ -33,4 +33,9 @@ workspace.workspace = true
|
||||
which.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
ctor.workspace = true
|
||||
clock= { workspace = true, "features" = ["test-support"] }
|
||||
futures.workspace = true
|
||||
gpui = { workspace = true, "features" = ["test-support"] }
|
||||
parking_lot.workspace = true
|
||||
zlog.workspace = true
|
||||
|
||||
@@ -1,18 +1,17 @@
|
||||
use anyhow::{Context as _, Result};
|
||||
use client::{Client, TelemetrySettings};
|
||||
use db::RELEASE_CHANNEL;
|
||||
use client::Client;
|
||||
use db::kvp::KEY_VALUE_STORE;
|
||||
use gpui::{
|
||||
App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, Global, SemanticVersion,
|
||||
Task, Window, actions,
|
||||
};
|
||||
use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
|
||||
use http_client::{HttpClient, HttpClientWithUrl};
|
||||
use paths::remote_servers_dir;
|
||||
use release_channel::{AppCommitSha, ReleaseChannel};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{RegisterSetting, Settings, SettingsStore};
|
||||
use smol::fs::File;
|
||||
use smol::{fs, io::AsyncReadExt};
|
||||
use smol::{fs::File, process::Command};
|
||||
use std::mem;
|
||||
use std::{
|
||||
env::{
|
||||
@@ -24,6 +23,7 @@ use std::{
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
};
|
||||
use util::command::new_smol_command;
|
||||
use workspace::Workspace;
|
||||
|
||||
const SHOULD_SHOW_UPDATE_NOTIFICATION_KEY: &str = "auto-updater-should-show-updated-notification";
|
||||
@@ -41,22 +41,23 @@ actions!(
|
||||
]
|
||||
);
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct UpdateRequestBody {
|
||||
installation_id: Option<Arc<str>>,
|
||||
release_channel: Option<&'static str>,
|
||||
telemetry: bool,
|
||||
is_staff: Option<bool>,
|
||||
destination: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum VersionCheckType {
|
||||
Sha(AppCommitSha),
|
||||
Semantic(SemanticVersion),
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Serialize, Debug)]
|
||||
pub struct AssetQuery<'a> {
|
||||
asset: &'a str,
|
||||
os: &'a str,
|
||||
arch: &'a str,
|
||||
metrics_id: Option<&'a str>,
|
||||
system_id: Option<&'a str>,
|
||||
is_staff: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum AutoUpdateStatus {
|
||||
Idle,
|
||||
Checking,
|
||||
@@ -66,6 +67,31 @@ pub enum AutoUpdateStatus {
|
||||
Errored { error: Arc<anyhow::Error> },
|
||||
}
|
||||
|
||||
impl PartialEq for AutoUpdateStatus {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
match (self, other) {
|
||||
(AutoUpdateStatus::Idle, AutoUpdateStatus::Idle) => true,
|
||||
(AutoUpdateStatus::Checking, AutoUpdateStatus::Checking) => true,
|
||||
(
|
||||
AutoUpdateStatus::Downloading { version: v1 },
|
||||
AutoUpdateStatus::Downloading { version: v2 },
|
||||
) => v1 == v2,
|
||||
(
|
||||
AutoUpdateStatus::Installing { version: v1 },
|
||||
AutoUpdateStatus::Installing { version: v2 },
|
||||
) => v1 == v2,
|
||||
(
|
||||
AutoUpdateStatus::Updated { version: v1 },
|
||||
AutoUpdateStatus::Updated { version: v2 },
|
||||
) => v1 == v2,
|
||||
(AutoUpdateStatus::Errored { error: e1 }, AutoUpdateStatus::Errored { error: e2 }) => {
|
||||
e1.to_string() == e2.to_string()
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AutoUpdateStatus {
|
||||
pub fn is_updated(&self) -> bool {
|
||||
matches!(self, Self::Updated { .. })
|
||||
@@ -75,13 +101,13 @@ impl AutoUpdateStatus {
|
||||
pub struct AutoUpdater {
|
||||
status: AutoUpdateStatus,
|
||||
current_version: SemanticVersion,
|
||||
http_client: Arc<HttpClientWithUrl>,
|
||||
client: Arc<Client>,
|
||||
pending_poll: Option<Task<Option<()>>>,
|
||||
quit_subscription: Option<gpui::Subscription>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Clone, Debug)]
|
||||
pub struct JsonRelease {
|
||||
#[derive(Deserialize, Serialize, Clone, Debug)]
|
||||
pub struct ReleaseAsset {
|
||||
pub version: String,
|
||||
pub url: String,
|
||||
}
|
||||
@@ -96,7 +122,7 @@ impl Drop for MacOsUnmounter<'_> {
|
||||
let mount_path = mem::take(&mut self.mount_path);
|
||||
self.background_executor
|
||||
.spawn(async move {
|
||||
let unmount_output = Command::new("hdiutil")
|
||||
let unmount_output = new_smol_command("hdiutil")
|
||||
.args(["detach", "-force"])
|
||||
.arg(&mount_path)
|
||||
.output()
|
||||
@@ -137,7 +163,7 @@ struct GlobalAutoUpdate(Option<Entity<AutoUpdater>>);
|
||||
|
||||
impl Global for GlobalAutoUpdate {}
|
||||
|
||||
pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {
|
||||
pub fn init(client: Arc<Client>, cx: &mut App) {
|
||||
cx.observe_new(|workspace: &mut Workspace, _window, _cx| {
|
||||
workspace.register_action(|_, action, window, cx| check(action, window, cx));
|
||||
|
||||
@@ -149,7 +175,7 @@ pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {
|
||||
|
||||
let version = release_channel::AppVersion::global(cx);
|
||||
let auto_updater = cx.new(|cx| {
|
||||
let updater = AutoUpdater::new(version, http_client, cx);
|
||||
let updater = AutoUpdater::new(version, client, cx);
|
||||
|
||||
let poll_for_updates = ReleaseChannel::try_global(cx)
|
||||
.map(|channel| channel.poll_for_updates())
|
||||
@@ -233,7 +259,7 @@ pub fn view_release_notes(_: &ViewReleaseNotes, cx: &mut App) -> Option<()> {
|
||||
let current_version = auto_updater.current_version;
|
||||
let release_channel = release_channel.dev_name();
|
||||
let path = format!("/releases/{release_channel}/{current_version}");
|
||||
let url = &auto_updater.http_client.build_url(&path);
|
||||
let url = &auto_updater.client.http_client().build_url(&path);
|
||||
cx.open_url(url);
|
||||
}
|
||||
ReleaseChannel::Nightly => {
|
||||
@@ -296,11 +322,7 @@ impl AutoUpdater {
|
||||
cx.default_global::<GlobalAutoUpdate>().0.clone()
|
||||
}
|
||||
|
||||
fn new(
|
||||
current_version: SemanticVersion,
|
||||
http_client: Arc<HttpClientWithUrl>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
fn new(current_version: SemanticVersion, client: Arc<Client>, cx: &mut Context<Self>) -> Self {
|
||||
// On windows, executable files cannot be overwritten while they are
|
||||
// running, so we must wait to overwrite the application until quitting
|
||||
// or restarting. When quitting the app, we spawn the auto update helper
|
||||
@@ -321,7 +343,7 @@ impl AutoUpdater {
|
||||
Self {
|
||||
status: AutoUpdateStatus::Idle,
|
||||
current_version,
|
||||
http_client,
|
||||
client,
|
||||
pending_poll: None,
|
||||
quit_subscription,
|
||||
}
|
||||
@@ -329,8 +351,7 @@ impl AutoUpdater {
|
||||
|
||||
pub fn start_polling(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
|
||||
cx.spawn(async move |this, cx| {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
if cfg!(target_os = "windows") {
|
||||
use util::ResultExt;
|
||||
|
||||
cleanup_windows()
|
||||
@@ -354,7 +375,7 @@ impl AutoUpdater {
|
||||
cx.notify();
|
||||
|
||||
self.pending_poll = Some(cx.spawn(async move |this, cx| {
|
||||
let result = Self::update(this.upgrade()?, cx.clone()).await;
|
||||
let result = Self::update(this.upgrade()?, cx).await;
|
||||
this.update(cx, |this, cx| {
|
||||
this.pending_poll = None;
|
||||
if let Err(error) = result {
|
||||
@@ -400,10 +421,10 @@ impl AutoUpdater {
|
||||
// you can override this function. You should also update get_remote_server_release_url to return
|
||||
// Ok(None).
|
||||
pub async fn download_remote_server_release(
|
||||
os: &str,
|
||||
arch: &str,
|
||||
release_channel: ReleaseChannel,
|
||||
version: Option<SemanticVersion>,
|
||||
os: &str,
|
||||
arch: &str,
|
||||
set_status: impl Fn(&str, &mut AsyncApp) + Send + 'static,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<PathBuf> {
|
||||
@@ -415,13 +436,13 @@ impl AutoUpdater {
|
||||
})??;
|
||||
|
||||
set_status("Fetching remote server release", cx);
|
||||
let release = Self::get_release(
|
||||
let release = Self::get_release_asset(
|
||||
&this,
|
||||
release_channel,
|
||||
version,
|
||||
"zed-remote-server",
|
||||
os,
|
||||
arch,
|
||||
version,
|
||||
Some(release_channel),
|
||||
cx,
|
||||
)
|
||||
.await?;
|
||||
@@ -432,7 +453,7 @@ impl AutoUpdater {
|
||||
let version_path = platform_dir.join(format!("{}.gz", release.version));
|
||||
smol::fs::create_dir_all(&platform_dir).await.ok();
|
||||
|
||||
let client = this.read_with(cx, |this, _| this.http_client.clone())?;
|
||||
let client = this.read_with(cx, |this, _| this.client.http_client())?;
|
||||
|
||||
if smol::fs::metadata(&version_path).await.is_err() {
|
||||
log::info!(
|
||||
@@ -440,19 +461,19 @@ impl AutoUpdater {
|
||||
release.version
|
||||
);
|
||||
set_status("Downloading remote server", cx);
|
||||
download_remote_server_binary(&version_path, release, client, cx).await?;
|
||||
download_remote_server_binary(&version_path, release, client).await?;
|
||||
}
|
||||
|
||||
Ok(version_path)
|
||||
}
|
||||
|
||||
pub async fn get_remote_server_release_url(
|
||||
channel: ReleaseChannel,
|
||||
version: Option<SemanticVersion>,
|
||||
os: &str,
|
||||
arch: &str,
|
||||
release_channel: ReleaseChannel,
|
||||
version: Option<SemanticVersion>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<Option<(String, String)>> {
|
||||
) -> Result<Option<String>> {
|
||||
let this = cx.update(|cx| {
|
||||
cx.default_global::<GlobalAutoUpdate>()
|
||||
.0
|
||||
@@ -460,108 +481,99 @@ impl AutoUpdater {
|
||||
.context("auto-update not initialized")
|
||||
})??;
|
||||
|
||||
let release = Self::get_release(
|
||||
&this,
|
||||
"zed-remote-server",
|
||||
os,
|
||||
arch,
|
||||
version,
|
||||
Some(release_channel),
|
||||
cx,
|
||||
)
|
||||
.await?;
|
||||
let release =
|
||||
Self::get_release_asset(&this, channel, version, "zed-remote-server", os, arch, cx)
|
||||
.await?;
|
||||
|
||||
let update_request_body = build_remote_server_update_request_body(cx)?;
|
||||
let body = serde_json::to_string(&update_request_body)?;
|
||||
|
||||
Ok(Some((release.url, body)))
|
||||
Ok(Some(release.url))
|
||||
}
|
||||
|
||||
async fn get_release(
|
||||
async fn get_release_asset(
|
||||
this: &Entity<Self>,
|
||||
asset: &str,
|
||||
os: &str,
|
||||
arch: &str,
|
||||
release_channel: ReleaseChannel,
|
||||
version: Option<SemanticVersion>,
|
||||
release_channel: Option<ReleaseChannel>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<JsonRelease> {
|
||||
let client = this.read_with(cx, |this, _| this.http_client.clone())?;
|
||||
|
||||
if let Some(version) = version {
|
||||
let channel = release_channel.map(|c| c.dev_name()).unwrap_or("stable");
|
||||
|
||||
let url = format!("/api/releases/{channel}/{version}/{asset}-{os}-{arch}.gz?update=1",);
|
||||
|
||||
Ok(JsonRelease {
|
||||
version: version.to_string(),
|
||||
url: client.build_url(&url),
|
||||
})
|
||||
} else {
|
||||
let mut url_string = client.build_url(&format!(
|
||||
"/api/releases/latest?asset={}&os={}&arch={}",
|
||||
asset, os, arch
|
||||
));
|
||||
if let Some(param) = release_channel.and_then(|c| c.release_query_param()) {
|
||||
url_string += "&";
|
||||
url_string += param;
|
||||
}
|
||||
|
||||
let mut response = client.get(&url_string, Default::default(), true).await?;
|
||||
let mut body = Vec::new();
|
||||
response.body_mut().read_to_end(&mut body).await?;
|
||||
|
||||
anyhow::ensure!(
|
||||
response.status().is_success(),
|
||||
"failed to fetch release: {:?}",
|
||||
String::from_utf8_lossy(&body),
|
||||
);
|
||||
|
||||
serde_json::from_slice(body.as_slice()).with_context(|| {
|
||||
format!(
|
||||
"error deserializing release {:?}",
|
||||
String::from_utf8_lossy(&body),
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_latest_release(
|
||||
this: &Entity<Self>,
|
||||
asset: &str,
|
||||
os: &str,
|
||||
arch: &str,
|
||||
release_channel: Option<ReleaseChannel>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<JsonRelease> {
|
||||
Self::get_release(this, asset, os, arch, None, release_channel, cx).await
|
||||
) -> Result<ReleaseAsset> {
|
||||
let client = this.read_with(cx, |this, _| this.client.clone())?;
|
||||
|
||||
let (system_id, metrics_id, is_staff) = if client.telemetry().metrics_enabled() {
|
||||
(
|
||||
client.telemetry().system_id(),
|
||||
client.telemetry().metrics_id(),
|
||||
client.telemetry().is_staff(),
|
||||
)
|
||||
} else {
|
||||
(None, None, None)
|
||||
};
|
||||
|
||||
let version = if let Some(version) = version {
|
||||
version.to_string()
|
||||
} else {
|
||||
"latest".to_string()
|
||||
};
|
||||
let http_client = client.http_client();
|
||||
|
||||
let path = format!("/releases/{}/{}/asset", release_channel.dev_name(), version,);
|
||||
let url = http_client.build_zed_cloud_url_with_query(
|
||||
&path,
|
||||
AssetQuery {
|
||||
os,
|
||||
arch,
|
||||
asset,
|
||||
metrics_id: metrics_id.as_deref(),
|
||||
system_id: system_id.as_deref(),
|
||||
is_staff: is_staff,
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut response = http_client
|
||||
.get(url.as_str(), Default::default(), true)
|
||||
.await?;
|
||||
let mut body = Vec::new();
|
||||
response.body_mut().read_to_end(&mut body).await?;
|
||||
|
||||
anyhow::ensure!(
|
||||
response.status().is_success(),
|
||||
"failed to fetch release: {:?}",
|
||||
String::from_utf8_lossy(&body),
|
||||
);
|
||||
|
||||
serde_json::from_slice(body.as_slice()).with_context(|| {
|
||||
format!(
|
||||
"error deserializing release {:?}",
|
||||
String::from_utf8_lossy(&body),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
async fn update(this: Entity<Self>, mut cx: AsyncApp) -> Result<()> {
|
||||
async fn update(this: Entity<Self>, cx: &mut AsyncApp) -> Result<()> {
|
||||
let (client, installed_version, previous_status, release_channel) =
|
||||
this.read_with(&cx, |this, cx| {
|
||||
this.read_with(cx, |this, cx| {
|
||||
(
|
||||
this.http_client.clone(),
|
||||
this.client.http_client(),
|
||||
this.current_version,
|
||||
this.status.clone(),
|
||||
ReleaseChannel::try_global(cx),
|
||||
ReleaseChannel::try_global(cx).unwrap_or(ReleaseChannel::Stable),
|
||||
)
|
||||
})?;
|
||||
|
||||
Self::check_dependencies()?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.status = AutoUpdateStatus::Checking;
|
||||
log::info!("Auto Update: checking for updates");
|
||||
cx.notify();
|
||||
})?;
|
||||
|
||||
let fetched_release_data =
|
||||
Self::get_latest_release(&this, "zed", OS, ARCH, release_channel, &mut cx).await?;
|
||||
Self::get_release_asset(&this, release_channel, None, "zed", OS, ARCH, cx).await?;
|
||||
let fetched_version = fetched_release_data.clone().version;
|
||||
let app_commit_sha = cx.update(|cx| AppCommitSha::try_global(cx).map(|sha| sha.full()));
|
||||
let newer_version = Self::check_if_fetched_version_is_newer(
|
||||
*RELEASE_CHANNEL,
|
||||
release_channel,
|
||||
app_commit_sha,
|
||||
installed_version,
|
||||
fetched_version,
|
||||
@@ -569,7 +581,7 @@ impl AutoUpdater {
|
||||
)?;
|
||||
|
||||
let Some(newer_version) = newer_version else {
|
||||
return this.update(&mut cx, |this, cx| {
|
||||
return this.update(cx, |this, cx| {
|
||||
let status = match previous_status {
|
||||
AutoUpdateStatus::Updated { .. } => previous_status,
|
||||
_ => AutoUpdateStatus::Idle,
|
||||
@@ -579,7 +591,7 @@ impl AutoUpdater {
|
||||
});
|
||||
};
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.status = AutoUpdateStatus::Downloading {
|
||||
version: newer_version.clone(),
|
||||
};
|
||||
@@ -588,21 +600,21 @@ impl AutoUpdater {
|
||||
|
||||
let installer_dir = InstallerDir::new().await?;
|
||||
let target_path = Self::target_path(&installer_dir).await?;
|
||||
download_release(&target_path, fetched_release_data, client, &cx).await?;
|
||||
download_release(&target_path, fetched_release_data, client).await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.status = AutoUpdateStatus::Installing {
|
||||
version: newer_version.clone(),
|
||||
};
|
||||
cx.notify();
|
||||
})?;
|
||||
|
||||
let new_binary_path = Self::install_release(installer_dir, target_path, &cx).await?;
|
||||
let new_binary_path = Self::install_release(installer_dir, target_path, cx).await?;
|
||||
if let Some(new_binary_path) = new_binary_path {
|
||||
cx.update(|cx| cx.set_restart_path(new_binary_path))?;
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.set_should_show_update_notification(true, cx)
|
||||
.detach_and_log_err(cx);
|
||||
this.status = AutoUpdateStatus::Updated {
|
||||
@@ -681,6 +693,12 @@ impl AutoUpdater {
|
||||
target_path: PathBuf,
|
||||
cx: &AsyncApp,
|
||||
) -> Result<Option<PathBuf>> {
|
||||
#[cfg(test)]
|
||||
if let Some(test_install) =
|
||||
cx.try_read_global::<tests::InstallOverride, _>(|g, _| g.0.clone())
|
||||
{
|
||||
return test_install(target_path, cx);
|
||||
}
|
||||
match OS {
|
||||
"macos" => install_release_macos(&installer_dir, target_path, cx).await,
|
||||
"linux" => install_release_linux(&installer_dir, target_path, cx).await,
|
||||
@@ -731,16 +749,13 @@ impl AutoUpdater {
|
||||
|
||||
async fn download_remote_server_binary(
|
||||
target_path: &PathBuf,
|
||||
release: JsonRelease,
|
||||
release: ReleaseAsset,
|
||||
client: Arc<HttpClientWithUrl>,
|
||||
cx: &AsyncApp,
|
||||
) -> Result<()> {
|
||||
let temp = tempfile::Builder::new().tempfile_in(remote_servers_dir())?;
|
||||
let mut temp_file = File::create(&temp).await?;
|
||||
let update_request_body = build_remote_server_update_request_body(cx)?;
|
||||
let request_body = AsyncBody::from(serde_json::to_string(&update_request_body)?);
|
||||
|
||||
let mut response = client.get(&release.url, request_body, true).await?;
|
||||
let mut response = client.get(&release.url, Default::default(), true).await?;
|
||||
anyhow::ensure!(
|
||||
response.status().is_success(),
|
||||
"failed to download remote server release: {:?}",
|
||||
@@ -752,65 +767,19 @@ async fn download_remote_server_binary(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_remote_server_update_request_body(cx: &AsyncApp) -> Result<UpdateRequestBody> {
|
||||
let (installation_id, release_channel, telemetry_enabled, is_staff) = cx.update(|cx| {
|
||||
let telemetry = Client::global(cx).telemetry().clone();
|
||||
let is_staff = telemetry.is_staff();
|
||||
let installation_id = telemetry.installation_id();
|
||||
let release_channel =
|
||||
ReleaseChannel::try_global(cx).map(|release_channel| release_channel.display_name());
|
||||
let telemetry_enabled = TelemetrySettings::get_global(cx).metrics;
|
||||
|
||||
(
|
||||
installation_id,
|
||||
release_channel,
|
||||
telemetry_enabled,
|
||||
is_staff,
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(UpdateRequestBody {
|
||||
installation_id,
|
||||
release_channel,
|
||||
telemetry: telemetry_enabled,
|
||||
is_staff,
|
||||
destination: "remote",
|
||||
})
|
||||
}
|
||||
|
||||
async fn download_release(
|
||||
target_path: &Path,
|
||||
release: JsonRelease,
|
||||
release: ReleaseAsset,
|
||||
client: Arc<HttpClientWithUrl>,
|
||||
cx: &AsyncApp,
|
||||
) -> Result<()> {
|
||||
let mut target_file = File::create(&target_path).await?;
|
||||
|
||||
let (installation_id, release_channel, telemetry_enabled, is_staff) = cx.update(|cx| {
|
||||
let telemetry = Client::global(cx).telemetry().clone();
|
||||
let is_staff = telemetry.is_staff();
|
||||
let installation_id = telemetry.installation_id();
|
||||
let release_channel =
|
||||
ReleaseChannel::try_global(cx).map(|release_channel| release_channel.display_name());
|
||||
let telemetry_enabled = TelemetrySettings::get_global(cx).metrics;
|
||||
|
||||
(
|
||||
installation_id,
|
||||
release_channel,
|
||||
telemetry_enabled,
|
||||
is_staff,
|
||||
)
|
||||
})?;
|
||||
|
||||
let request_body = AsyncBody::from(serde_json::to_string(&UpdateRequestBody {
|
||||
installation_id,
|
||||
release_channel,
|
||||
telemetry: telemetry_enabled,
|
||||
is_staff,
|
||||
destination: "local",
|
||||
})?);
|
||||
|
||||
let mut response = client.get(&release.url, request_body, true).await?;
|
||||
let mut response = client.get(&release.url, Default::default(), true).await?;
|
||||
anyhow::ensure!(
|
||||
response.status().is_success(),
|
||||
"failed to download update: {:?}",
|
||||
response.status()
|
||||
);
|
||||
smol::io::copy(response.body_mut(), &mut target_file).await?;
|
||||
log::info!("downloaded update. path:{:?}", target_path);
|
||||
|
||||
@@ -831,7 +800,7 @@ async fn install_release_linux(
|
||||
.await
|
||||
.context("failed to create directory into which to extract update")?;
|
||||
|
||||
let output = Command::new("tar")
|
||||
let output = new_smol_command("tar")
|
||||
.arg("-xzf")
|
||||
.arg(&downloaded_tar_gz)
|
||||
.arg("-C")
|
||||
@@ -866,7 +835,7 @@ async fn install_release_linux(
|
||||
to = PathBuf::from(prefix);
|
||||
}
|
||||
|
||||
let output = Command::new("rsync")
|
||||
let output = new_smol_command("rsync")
|
||||
.args(["-av", "--delete"])
|
||||
.arg(&from)
|
||||
.arg(&to)
|
||||
@@ -898,7 +867,7 @@ async fn install_release_macos(
|
||||
let mut mounted_app_path: OsString = mount_path.join(running_app_filename).into();
|
||||
|
||||
mounted_app_path.push("/");
|
||||
let output = Command::new("hdiutil")
|
||||
let output = new_smol_command("hdiutil")
|
||||
.args(["attach", "-nobrowse"])
|
||||
.arg(&downloaded_dmg)
|
||||
.arg("-mountroot")
|
||||
@@ -918,7 +887,7 @@ async fn install_release_macos(
|
||||
background_executor: cx.background_executor(),
|
||||
};
|
||||
|
||||
let output = Command::new("rsync")
|
||||
let output = new_smol_command("rsync")
|
||||
.args(["-av", "--delete"])
|
||||
.arg(&mounted_app_path)
|
||||
.arg(&running_app_path)
|
||||
@@ -934,34 +903,22 @@ async fn install_release_macos(
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
async fn cleanup_windows() -> Result<()> {
|
||||
use util::ResultExt;
|
||||
|
||||
let parent = std::env::current_exe()?
|
||||
.parent()
|
||||
.context("No parent dir for Zed.exe")?
|
||||
.to_owned();
|
||||
|
||||
// keep in sync with crates/auto_update_helper/src/updater.rs
|
||||
smol::fs::remove_dir(parent.join("updates"))
|
||||
.await
|
||||
.context("failed to remove updates dir")
|
||||
.log_err();
|
||||
smol::fs::remove_dir(parent.join("install"))
|
||||
.await
|
||||
.context("failed to remove install dir")
|
||||
.log_err();
|
||||
smol::fs::remove_dir(parent.join("old"))
|
||||
.await
|
||||
.context("failed to remove old version dir")
|
||||
.log_err();
|
||||
_ = smol::fs::remove_dir(parent.join("updates")).await;
|
||||
_ = smol::fs::remove_dir(parent.join("install")).await;
|
||||
_ = smol::fs::remove_dir(parent.join("old")).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn install_release_windows(downloaded_installer: PathBuf) -> Result<Option<PathBuf>> {
|
||||
let output = Command::new(downloaded_installer)
|
||||
let output = new_smol_command(downloaded_installer)
|
||||
.arg("/verysilent")
|
||||
.arg("/update=true")
|
||||
.arg("!desktopicon")
|
||||
@@ -1010,11 +967,33 @@ pub async fn finalize_auto_update_on_quit() {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use client::Client;
|
||||
use clock::FakeSystemClock;
|
||||
use futures::channel::oneshot;
|
||||
use gpui::TestAppContext;
|
||||
use http_client::{FakeHttpClient, Response};
|
||||
use settings::default_settings;
|
||||
use std::{
|
||||
rc::Rc,
|
||||
sync::{
|
||||
Arc,
|
||||
atomic::{self, AtomicBool},
|
||||
},
|
||||
};
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[ctor::ctor]
|
||||
fn init_logger() {
|
||||
zlog::init_test();
|
||||
}
|
||||
|
||||
use super::*;
|
||||
|
||||
pub(super) struct InstallOverride(
|
||||
pub Rc<dyn Fn(PathBuf, &AsyncApp) -> Result<Option<PathBuf>>>,
|
||||
);
|
||||
impl Global for InstallOverride {}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_auto_update_defaults_to_true(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
@@ -1030,6 +1009,115 @@ mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_auto_update_downloads(cx: &mut TestAppContext) {
|
||||
cx.background_executor.allow_parking();
|
||||
zlog::init_test();
|
||||
let release_available = Arc::new(AtomicBool::new(false));
|
||||
|
||||
let (dmg_tx, dmg_rx) = oneshot::channel::<String>();
|
||||
|
||||
cx.update(|cx| {
|
||||
settings::init(cx);
|
||||
|
||||
let current_version = SemanticVersion::new(0, 100, 0);
|
||||
release_channel::init_test(current_version, ReleaseChannel::Stable, cx);
|
||||
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let release_available = Arc::clone(&release_available);
|
||||
let dmg_rx = Arc::new(parking_lot::Mutex::new(Some(dmg_rx)));
|
||||
let fake_client_http = FakeHttpClient::create(move |req| {
|
||||
let release_available = release_available.load(atomic::Ordering::Relaxed);
|
||||
let dmg_rx = dmg_rx.clone();
|
||||
async move {
|
||||
if req.uri().path() == "/releases/stable/latest/asset" {
|
||||
if release_available {
|
||||
return Ok(Response::builder().status(200).body(
|
||||
r#"{"version":"0.100.1","url":"https://test.example/new-download"}"#.into()
|
||||
).unwrap());
|
||||
} else {
|
||||
return Ok(Response::builder().status(200).body(
|
||||
r#"{"version":"0.100.0","url":"https://test.example/old-download"}"#.into()
|
||||
).unwrap());
|
||||
}
|
||||
} else if req.uri().path() == "/new-download" {
|
||||
return Ok(Response::builder().status(200).body({
|
||||
let dmg_rx = dmg_rx.lock().take().unwrap();
|
||||
dmg_rx.await.unwrap().into()
|
||||
}).unwrap());
|
||||
}
|
||||
Ok(Response::builder().status(404).body("".into()).unwrap())
|
||||
}
|
||||
});
|
||||
let client = Client::new(clock, fake_client_http, cx);
|
||||
crate::init(client, cx);
|
||||
});
|
||||
|
||||
let auto_updater = cx.update(|cx| AutoUpdater::get(cx).expect("auto updater should exist"));
|
||||
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
auto_updater.read_with(cx, |updater, _| {
|
||||
assert_eq!(updater.status(), AutoUpdateStatus::Idle);
|
||||
assert_eq!(updater.current_version(), SemanticVersion::new(0, 100, 0));
|
||||
});
|
||||
|
||||
release_available.store(true, atomic::Ordering::SeqCst);
|
||||
cx.background_executor.advance_clock(POLL_INTERVAL);
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
loop {
|
||||
cx.background_executor.timer(Duration::from_millis(0)).await;
|
||||
cx.run_until_parked();
|
||||
let status = auto_updater.read_with(cx, |updater, _| updater.status());
|
||||
if !matches!(status, AutoUpdateStatus::Idle) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let status = auto_updater.read_with(cx, |updater, _| updater.status());
|
||||
assert_eq!(
|
||||
status,
|
||||
AutoUpdateStatus::Downloading {
|
||||
version: VersionCheckType::Semantic(SemanticVersion::new(0, 100, 1))
|
||||
}
|
||||
);
|
||||
|
||||
dmg_tx.send("<fake-zed-update>".to_owned()).unwrap();
|
||||
|
||||
let tmp_dir = Arc::new(tempdir().unwrap());
|
||||
|
||||
cx.update(|cx| {
|
||||
let tmp_dir = tmp_dir.clone();
|
||||
cx.set_global(InstallOverride(Rc::new(move |target_path, _cx| {
|
||||
let tmp_dir = tmp_dir.clone();
|
||||
let dest_path = tmp_dir.path().join("zed");
|
||||
std::fs::copy(&target_path, &dest_path)?;
|
||||
Ok(Some(dest_path))
|
||||
})));
|
||||
});
|
||||
|
||||
loop {
|
||||
cx.background_executor.timer(Duration::from_millis(0)).await;
|
||||
cx.run_until_parked();
|
||||
let status = auto_updater.read_with(cx, |updater, _| updater.status());
|
||||
if !matches!(status, AutoUpdateStatus::Downloading { .. }) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let status = auto_updater.read_with(cx, |updater, _| updater.status());
|
||||
assert_eq!(
|
||||
status,
|
||||
AutoUpdateStatus::Updated {
|
||||
version: VersionCheckType::Semantic(SemanticVersion::new(0, 100, 1))
|
||||
}
|
||||
);
|
||||
let will_restart = cx.expect_restart();
|
||||
cx.update(|cx| cx.restart());
|
||||
let path = will_restart.await.unwrap().unwrap();
|
||||
assert_eq!(path, tmp_dir.path().join("zed"));
|
||||
assert_eq!(std::fs::read_to_string(path).unwrap(), "<fake-zed-update>");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_stable_does_not_update_when_fetched_version_is_not_higher() {
|
||||
let release_channel = ReleaseChannel::Stable;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::{
|
||||
cell::LazyCell,
|
||||
path::Path,
|
||||
sync::LazyLock,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
|
||||
@@ -13,8 +13,8 @@ use windows::Win32::{
|
||||
use crate::windows_impl::WM_JOB_UPDATED;
|
||||
|
||||
pub(crate) struct Job {
|
||||
pub apply: Box<dyn Fn(&Path) -> Result<()>>,
|
||||
pub rollback: Box<dyn Fn(&Path) -> Result<()>>,
|
||||
pub apply: Box<dyn Fn(&Path) -> Result<()> + Send + Sync>,
|
||||
pub rollback: Box<dyn Fn(&Path) -> Result<()> + Send + Sync>,
|
||||
}
|
||||
|
||||
impl Job {
|
||||
@@ -154,10 +154,8 @@ impl Job {
|
||||
}
|
||||
}
|
||||
|
||||
// app is single threaded
|
||||
#[cfg(not(test))]
|
||||
#[allow(clippy::declare_interior_mutable_const)]
|
||||
pub(crate) const JOBS: LazyCell<[Job; 22]> = LazyCell::new(|| {
|
||||
pub(crate) static JOBS: LazyLock<[Job; 22]> = LazyLock::new(|| {
|
||||
fn p(value: &str) -> &Path {
|
||||
Path::new(value)
|
||||
}
|
||||
@@ -206,10 +204,8 @@ pub(crate) const JOBS: LazyCell<[Job; 22]> = LazyCell::new(|| {
|
||||
]
|
||||
});
|
||||
|
||||
// app is single threaded
|
||||
#[cfg(test)]
|
||||
#[allow(clippy::declare_interior_mutable_const)]
|
||||
pub(crate) const JOBS: LazyCell<[Job; 9]> = LazyCell::new(|| {
|
||||
pub(crate) static JOBS: LazyLock<[Job; 9]> = LazyLock::new(|| {
|
||||
fn p(value: &str) -> &Path {
|
||||
Path::new(value)
|
||||
}
|
||||
|
||||
@@ -1683,7 +1683,9 @@ impl LiveKitRoom {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
enum LocalTrack<Stream: ?Sized> {
|
||||
#[default]
|
||||
None,
|
||||
Pending {
|
||||
publish_id: usize,
|
||||
@@ -1694,12 +1696,6 @@ enum LocalTrack<Stream: ?Sized> {
|
||||
},
|
||||
}
|
||||
|
||||
impl<T: ?Sized> Default for LocalTrack<T> {
|
||||
fn default() -> Self {
|
||||
Self::None
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||
pub enum RoomStatus {
|
||||
Online,
|
||||
|
||||
@@ -1487,7 +1487,7 @@ impl Client {
|
||||
|
||||
let url = self
|
||||
.http
|
||||
.build_zed_cloud_url("/internal/users/impersonate", &[])?;
|
||||
.build_zed_cloud_url("/internal/users/impersonate")?;
|
||||
let request = Request::post(url.as_str())
|
||||
.header("Content-Type", "application/json")
|
||||
.header("Authorization", format!("Bearer {api_token}"))
|
||||
|
||||
@@ -293,10 +293,11 @@ impl Telemetry {
|
||||
}
|
||||
|
||||
pub fn metrics_enabled(self: &Arc<Self>) -> bool {
|
||||
let state = self.state.lock();
|
||||
let enabled = state.settings.metrics;
|
||||
drop(state);
|
||||
enabled
|
||||
self.state.lock().settings.metrics
|
||||
}
|
||||
|
||||
pub fn diagnostics_enabled(self: &Arc<Self>) -> bool {
|
||||
self.state.lock().settings.diagnostics
|
||||
}
|
||||
|
||||
pub fn set_authenticated_user_info(
|
||||
@@ -435,7 +436,7 @@ impl Telemetry {
|
||||
Some(project_types)
|
||||
}
|
||||
|
||||
fn report_event(self: &Arc<Self>, event: Event) {
|
||||
fn report_event(self: &Arc<Self>, mut event: Event) {
|
||||
let mut state = self.state.lock();
|
||||
// RUST_LOG=telemetry=trace to debug telemetry events
|
||||
log::trace!(target: "telemetry", "{:?}", event);
|
||||
@@ -444,6 +445,12 @@ impl Telemetry {
|
||||
return;
|
||||
}
|
||||
|
||||
match &mut event {
|
||||
Event::Flexible(event) => event
|
||||
.event_properties
|
||||
.insert("event_source".into(), "zed".into()),
|
||||
};
|
||||
|
||||
if state.flush_events_task.is_none() {
|
||||
let this = self.clone();
|
||||
state.flush_events_task = Some(self.executor.spawn(async move {
|
||||
|
||||
@@ -267,6 +267,7 @@ impl UserStore {
|
||||
Status::SignedOut => {
|
||||
current_user_tx.send(None).await.ok();
|
||||
this.update(cx, |this, cx| {
|
||||
this.clear_plan_and_usage();
|
||||
cx.emit(Event::PrivateUserInfoUpdated);
|
||||
cx.notify();
|
||||
this.clear_contacts()
|
||||
@@ -779,6 +780,12 @@ impl UserStore {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn clear_plan_and_usage(&mut self) {
|
||||
self.plan_info = None;
|
||||
self.model_request_usage = None;
|
||||
self.edit_prediction_usage = None;
|
||||
}
|
||||
|
||||
fn update_authenticated_user(
|
||||
&mut self,
|
||||
response: GetAuthenticatedUserResponse,
|
||||
|
||||
@@ -51,3 +51,11 @@ pub fn external_agents_docs(cx: &App) -> String {
|
||||
server_url = server_url(cx)
|
||||
)
|
||||
}
|
||||
|
||||
/// Returns the URL to Zed agent servers documentation.
|
||||
pub fn agent_server_docs(cx: &App) -> String {
|
||||
format!(
|
||||
"{server_url}/docs/extensions/agent-servers",
|
||||
server_url = server_url(cx)
|
||||
)
|
||||
}
|
||||
|
||||
@@ -62,7 +62,7 @@ impl CloudApiClient {
|
||||
let request = self.build_request(
|
||||
Request::builder().method(Method::GET).uri(
|
||||
self.http_client
|
||||
.build_zed_cloud_url("/client/users/me", &[])?
|
||||
.build_zed_cloud_url("/client/users/me")?
|
||||
.as_ref(),
|
||||
),
|
||||
AsyncBody::default(),
|
||||
@@ -89,7 +89,7 @@ impl CloudApiClient {
|
||||
pub fn connect(&self, cx: &App) -> Result<Task<Result<Connection>>> {
|
||||
let mut connect_url = self
|
||||
.http_client
|
||||
.build_zed_cloud_url("/client/users/connect", &[])?;
|
||||
.build_zed_cloud_url("/client/users/connect")?;
|
||||
connect_url
|
||||
.set_scheme(match connect_url.scheme() {
|
||||
"https" => "wss",
|
||||
@@ -123,7 +123,7 @@ impl CloudApiClient {
|
||||
.method(Method::POST)
|
||||
.uri(
|
||||
self.http_client
|
||||
.build_zed_cloud_url("/client/llm_tokens", &[])?
|
||||
.build_zed_cloud_url("/client/llm_tokens")?
|
||||
.as_ref(),
|
||||
)
|
||||
.when_some(system_id, |builder, system_id| {
|
||||
@@ -154,7 +154,7 @@ impl CloudApiClient {
|
||||
let request = build_request(
|
||||
Request::builder().method(Method::GET).uri(
|
||||
self.http_client
|
||||
.build_zed_cloud_url("/client/users/me", &[])?
|
||||
.build_zed_cloud_url("/client/users/me")?
|
||||
.as_ref(),
|
||||
),
|
||||
AsyncBody::default(),
|
||||
|
||||
@@ -58,6 +58,9 @@ pub const SERVER_SUPPORTS_STATUS_MESSAGES_HEADER_NAME: &str =
|
||||
/// The name of the header used by the client to indicate that it supports receiving xAI models.
|
||||
pub const CLIENT_SUPPORTS_X_AI_HEADER_NAME: &str = "x-zed-client-supports-x-ai";
|
||||
|
||||
/// The maximum number of edit predictions that can be rejected per request.
|
||||
pub const MAX_EDIT_PREDICTION_REJECTIONS_PER_REQUEST: usize = 100;
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Copy, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum UsageLimit {
|
||||
@@ -192,6 +195,17 @@ pub struct AcceptEditPredictionBody {
|
||||
pub request_id: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct RejectEditPredictionsBody {
|
||||
pub rejections: Vec<EditPredictionRejection>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct EditPredictionRejection {
|
||||
pub request_id: String,
|
||||
pub was_shown: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum CompletionMode {
|
||||
|
||||
@@ -73,8 +73,13 @@ pub enum PromptFormat {
|
||||
MarkedExcerpt,
|
||||
LabeledSections,
|
||||
NumLinesUniDiff,
|
||||
OldTextNewText,
|
||||
/// Prompt format intended for use via zeta_cli
|
||||
OnlySnippets,
|
||||
/// One-sentence instructions used in fine-tuned models
|
||||
Minimal,
|
||||
/// One-sentence instructions + FIM-like template
|
||||
MinimalQwen,
|
||||
}
|
||||
|
||||
impl PromptFormat {
|
||||
@@ -100,6 +105,9 @@ impl std::fmt::Display for PromptFormat {
|
||||
PromptFormat::LabeledSections => write!(f, "Labeled Sections"),
|
||||
PromptFormat::OnlySnippets => write!(f, "Only Snippets"),
|
||||
PromptFormat::NumLinesUniDiff => write!(f, "Numbered Lines / Unified Diff"),
|
||||
PromptFormat::OldTextNewText => write!(f, "Old Text / New Text"),
|
||||
PromptFormat::Minimal => write!(f, "Minimal"),
|
||||
PromptFormat::MinimalQwen => write!(f, "Minimal + Qwen FIM"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,4 +19,5 @@ ordered-float.workspace = true
|
||||
rustc-hash.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
strum.workspace = true
|
||||
|
||||
@@ -3,7 +3,8 @@ pub mod retrieval_prompt;
|
||||
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use cloud_llm_client::predict_edits_v3::{
|
||||
self, DiffPathFmt, Excerpt, Line, Point, PromptFormat, ReferencedDeclaration,
|
||||
self, DiffPathFmt, Event, Excerpt, IncludedFile, Line, Point, PromptFormat,
|
||||
ReferencedDeclaration,
|
||||
};
|
||||
use indoc::indoc;
|
||||
use ordered_float::OrderedFloat;
|
||||
@@ -31,7 +32,7 @@ const MARKED_EXCERPT_INSTRUCTIONS: &str = indoc! {"
|
||||
|
||||
Other code is provided for context, and `…` indicates when code has been skipped.
|
||||
|
||||
# Edit History:
|
||||
## Edit History
|
||||
|
||||
"};
|
||||
|
||||
@@ -49,60 +50,138 @@ const LABELED_SECTIONS_INSTRUCTIONS: &str = indoc! {r#"
|
||||
println!("{i}");
|
||||
}
|
||||
|
||||
# Edit History:
|
||||
## Edit History
|
||||
|
||||
"#};
|
||||
|
||||
const NUMBERED_LINES_INSTRUCTIONS: &str = indoc! {r#"
|
||||
# Instructions
|
||||
|
||||
You are a code completion assistant helping a programmer finish their work. Your task is to:
|
||||
You are an edit prediction agent in a code editor.
|
||||
Your job is to predict the next edit that the user will make,
|
||||
based on their last few edits and their current cursor location.
|
||||
|
||||
1. Analyze the edit history to understand what the programmer is trying to achieve
|
||||
2. Identify any incomplete refactoring or changes that need to be finished
|
||||
3. Make the remaining edits that a human programmer would logically make next
|
||||
4. Apply systematic changes consistently across the entire codebase - if you see a pattern starting, complete it everywhere.
|
||||
## Output Format
|
||||
|
||||
Focus on:
|
||||
- Understanding the intent behind the changes (e.g., improving error handling, refactoring APIs, fixing bugs)
|
||||
- Completing any partially-applied changes across the codebase
|
||||
- Ensuring consistency with the programming style and patterns already established
|
||||
- Making edits that maintain or improve code quality
|
||||
- If the programmer started refactoring one instance of a pattern, find and update ALL similar instances
|
||||
- Don't write a lot of code if you're not sure what to do
|
||||
|
||||
Rules:
|
||||
- Do not just mechanically apply patterns - reason about what changes make sense given the context and the programmer's apparent goals.
|
||||
- Do not just fix syntax errors - look for the broader refactoring pattern and apply it systematically throughout the code.
|
||||
- Write the edits in the unified diff format as shown in the example.
|
||||
|
||||
# Example output:
|
||||
You must briefly explain your understanding of the user's goal, in one
|
||||
or two sentences, and then specify their next edit in the form of a
|
||||
unified diff, like this:
|
||||
|
||||
```
|
||||
--- a/src/myapp/cli.py
|
||||
+++ b/src/myapp/cli.py
|
||||
@@ -1,3 +1,3 @@
|
||||
-
|
||||
-
|
||||
-import sys
|
||||
+import json
|
||||
@@ ... @@
|
||||
import os
|
||||
import time
|
||||
import sys
|
||||
+from constants import LOG_LEVEL_WARNING
|
||||
@@ ... @@
|
||||
config.headless()
|
||||
config.set_interactive(false)
|
||||
-config.set_log_level(LOG_L)
|
||||
+config.set_log_level(LOG_LEVEL_WARNING)
|
||||
config.set_use_color(True)
|
||||
```
|
||||
|
||||
# Edit History:
|
||||
## Edit History
|
||||
|
||||
"#};
|
||||
|
||||
const STUDENT_MODEL_INSTRUCTIONS: &str = indoc! {r#"
|
||||
You are a code completion assistant that analyzes edit history to identify and systematically complete incomplete refactorings or patterns across the entire codebase.
|
||||
|
||||
## Edit History
|
||||
|
||||
"#};
|
||||
|
||||
const UNIFIED_DIFF_REMINDER: &str = indoc! {"
|
||||
---
|
||||
|
||||
Analyze the edit history and the files, then provide the unified diff for your predicted edits.
|
||||
Do not include the cursor marker in your output.
|
||||
Your diff should include edited file paths in its file headers (lines beginning with `---` and `+++`).
|
||||
Do not include line numbers in the hunk headers, use `@@ ... @@`.
|
||||
Removed lines begin with `-`.
|
||||
Added lines begin with `+`.
|
||||
Context lines begin with an extra space.
|
||||
Context and removed lines are used to match the target edit location, so make sure to include enough of them
|
||||
to uniquely identify it amongst all excerpts of code provided.
|
||||
"};
|
||||
|
||||
const MINIMAL_PROMPT_REMINDER: &str = indoc! {"
|
||||
---
|
||||
|
||||
Please analyze the edit history and the files, then provide the unified diff for your predicted edits.
|
||||
Do not include the cursor marker in your output.
|
||||
If you're editing multiple files, be sure to reflect filename in the hunk's header.
|
||||
"};
|
||||
"};
|
||||
|
||||
const XML_TAGS_INSTRUCTIONS: &str = indoc! {r#"
|
||||
# Instructions
|
||||
|
||||
You are an edit prediction agent in a code editor.
|
||||
|
||||
Analyze the history of edits made by the user in order to infer what they are currently trying to accomplish.
|
||||
Then complete the remainder of the current change if it is incomplete, or predict the next edit the user intends to make.
|
||||
Always continue along the user's current trajectory, rather than changing course.
|
||||
|
||||
## Output Format
|
||||
|
||||
You should briefly explain your understanding of the user's overall goal in one sentence, then explain what the next change
|
||||
along the users current trajectory will be in another, and finally specify the next edit using the following XML-like format:
|
||||
|
||||
<edits path="my-project/src/myapp/cli.py">
|
||||
<old_text>
|
||||
OLD TEXT 1 HERE
|
||||
</old_text>
|
||||
<new_text>
|
||||
NEW TEXT 1 HERE
|
||||
</new_text>
|
||||
|
||||
<old_text>
|
||||
OLD TEXT 1 HERE
|
||||
</old_text>
|
||||
<new_text>
|
||||
NEW TEXT 1 HERE
|
||||
</new_text>
|
||||
</edits>
|
||||
|
||||
- Specify the file to edit using the `path` attribute.
|
||||
- Use `<old_text>` and `<new_text>` tags to replace content
|
||||
- `<old_text>` must exactly match existing file content, including indentation
|
||||
- `<old_text>` cannot be empty
|
||||
- Do not escape quotes, newlines, or other characters within tags
|
||||
- Always close all tags properly
|
||||
- Don't include the <|user_cursor|> marker in your output.
|
||||
|
||||
## Edit History
|
||||
|
||||
"#};
|
||||
|
||||
const OLD_TEXT_NEW_TEXT_REMINDER: &str = indoc! {r#"
|
||||
---
|
||||
|
||||
Remember that the edits in the edit history have already been applied.
|
||||
"#};
|
||||
|
||||
pub fn build_prompt(
|
||||
request: &predict_edits_v3::PredictEditsRequest,
|
||||
) -> Result<(String, SectionLabels)> {
|
||||
let mut section_labels = Default::default();
|
||||
|
||||
match request.prompt_format {
|
||||
PromptFormat::MinimalQwen => {
|
||||
let prompt = MinimalQwenPrompt {
|
||||
events: request.events.clone(),
|
||||
cursor_point: request.cursor_point,
|
||||
cursor_path: request.excerpt_path.clone(),
|
||||
included_files: request.included_files.clone(),
|
||||
};
|
||||
return Ok((prompt.render(), section_labels));
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
|
||||
let mut insertions = match request.prompt_format {
|
||||
PromptFormat::MarkedExcerpt => vec![
|
||||
(
|
||||
@@ -121,58 +200,74 @@ pub fn build_prompt(
|
||||
EDITABLE_REGION_END_MARKER_WITH_NEWLINE,
|
||||
),
|
||||
],
|
||||
PromptFormat::LabeledSections => vec![(request.cursor_point, CURSOR_MARKER)],
|
||||
PromptFormat::NumLinesUniDiff => {
|
||||
PromptFormat::LabeledSections
|
||||
| PromptFormat::NumLinesUniDiff
|
||||
| PromptFormat::Minimal
|
||||
| PromptFormat::OldTextNewText => {
|
||||
vec![(request.cursor_point, CURSOR_MARKER)]
|
||||
}
|
||||
PromptFormat::OnlySnippets => vec![],
|
||||
PromptFormat::MinimalQwen => unreachable!(),
|
||||
};
|
||||
|
||||
let mut prompt = match request.prompt_format {
|
||||
PromptFormat::MarkedExcerpt => MARKED_EXCERPT_INSTRUCTIONS.to_string(),
|
||||
PromptFormat::LabeledSections => LABELED_SECTIONS_INSTRUCTIONS.to_string(),
|
||||
PromptFormat::NumLinesUniDiff => NUMBERED_LINES_INSTRUCTIONS.to_string(),
|
||||
// only intended for use via zeta_cli
|
||||
PromptFormat::OldTextNewText => XML_TAGS_INSTRUCTIONS.to_string(),
|
||||
PromptFormat::OnlySnippets => String::new(),
|
||||
PromptFormat::Minimal => STUDENT_MODEL_INSTRUCTIONS.to_string(),
|
||||
PromptFormat::MinimalQwen => unreachable!(),
|
||||
};
|
||||
|
||||
if request.events.is_empty() {
|
||||
prompt.push_str("(No edit history)\n\n");
|
||||
} else {
|
||||
prompt.push_str(
|
||||
"The following are the latest edits made by the user, from earlier to later.\n\n",
|
||||
);
|
||||
let edit_preamble = if request.prompt_format == PromptFormat::Minimal {
|
||||
"The following are the latest edits made by the user, from earlier to later.\n\n"
|
||||
} else {
|
||||
"Here are the latest edits made by the user, from earlier to later.\n\n"
|
||||
};
|
||||
prompt.push_str(edit_preamble);
|
||||
push_events(&mut prompt, &request.events);
|
||||
}
|
||||
|
||||
if request.prompt_format == PromptFormat::NumLinesUniDiff {
|
||||
if request.referenced_declarations.is_empty() {
|
||||
prompt.push_str(indoc! {"
|
||||
# File under the cursor:
|
||||
let excerpts_preamble = match request.prompt_format {
|
||||
PromptFormat::Minimal => indoc! {"
|
||||
## Part of the file under the cursor
|
||||
|
||||
The cursor marker <|user_cursor|> indicates the current user cursor position.
|
||||
The file is in current state, edits from edit history have been applied.
|
||||
We prepend line numbers (e.g., `123|<actual line>`); they are not part of the file.
|
||||
(The cursor marker <|user_cursor|> indicates the current user cursor position.
|
||||
The file is in current state, edits from edit history has been applied.
|
||||
We only show part of the file around the cursor.
|
||||
You can only edit exactly this part of the file.
|
||||
We prepend line numbers (e.g., `123|<actual line>`); they are not part of the file.)
|
||||
"},
|
||||
PromptFormat::NumLinesUniDiff | PromptFormat::OldTextNewText => indoc! {"
|
||||
## Code Excerpts
|
||||
|
||||
"});
|
||||
} else {
|
||||
// Note: This hasn't been trained on yet
|
||||
prompt.push_str(indoc! {"
|
||||
# Code Excerpts:
|
||||
Here is some excerpts of code that you should take into account to predict the next edit.
|
||||
|
||||
The cursor marker <|user_cursor|> indicates the current user cursor position.
|
||||
Other excerpts of code from the project have been included as context based on their similarity to the code under the cursor.
|
||||
Context excerpts are not guaranteed to be relevant, so use your own judgement.
|
||||
Files are in their current state, edits from edit history have been applied.
|
||||
We prepend line numbers (e.g., `123|<actual line>`); they are not part of the file.
|
||||
The cursor position is marked by `<|user_cursor|>` as it stands after the last edit in the history.
|
||||
|
||||
"});
|
||||
}
|
||||
} else {
|
||||
prompt.push_str("\n## Code\n\n");
|
||||
}
|
||||
In addition other excerpts are included to better understand what the edit will be, including the declaration
|
||||
or references of symbols around the cursor, or other similar code snippets that may need to be updated
|
||||
following patterns that appear in the edit history.
|
||||
|
||||
let mut section_labels = Default::default();
|
||||
Consider each of them carefully in relation to the edit history, and that the user may not have navigated
|
||||
to the next place they want to edit yet.
|
||||
|
||||
Lines starting with `…` indicate omitted line ranges. These may appear inside multi-line code constructs.
|
||||
"},
|
||||
_ => indoc! {"
|
||||
## Code Excerpts
|
||||
|
||||
The cursor marker <|user_cursor|> indicates the current user cursor position.
|
||||
The file is in current state, edits from edit history have been applied.
|
||||
"},
|
||||
};
|
||||
|
||||
prompt.push_str(excerpts_preamble);
|
||||
prompt.push('\n');
|
||||
|
||||
if !request.referenced_declarations.is_empty() || !request.signatures.is_empty() {
|
||||
let syntax_based_prompt = SyntaxBasedPrompt::populate(request)?;
|
||||
@@ -182,24 +277,52 @@ pub fn build_prompt(
|
||||
anyhow::bail!("PromptFormat::LabeledSections cannot be used with ContextMode::Llm");
|
||||
}
|
||||
|
||||
let include_line_numbers = matches!(
|
||||
request.prompt_format,
|
||||
PromptFormat::NumLinesUniDiff | PromptFormat::Minimal
|
||||
);
|
||||
for related_file in &request.included_files {
|
||||
write_codeblock(
|
||||
&related_file.path,
|
||||
&related_file.excerpts,
|
||||
if related_file.path == request.excerpt_path {
|
||||
&insertions
|
||||
} else {
|
||||
&[]
|
||||
},
|
||||
related_file.max_row,
|
||||
request.prompt_format == PromptFormat::NumLinesUniDiff,
|
||||
&mut prompt,
|
||||
);
|
||||
if request.prompt_format == PromptFormat::Minimal {
|
||||
write_codeblock_with_filename(
|
||||
&related_file.path,
|
||||
&related_file.excerpts,
|
||||
if related_file.path == request.excerpt_path {
|
||||
&insertions
|
||||
} else {
|
||||
&[]
|
||||
},
|
||||
related_file.max_row,
|
||||
include_line_numbers,
|
||||
&mut prompt,
|
||||
);
|
||||
} else {
|
||||
write_codeblock(
|
||||
&related_file.path,
|
||||
&related_file.excerpts,
|
||||
if related_file.path == request.excerpt_path {
|
||||
&insertions
|
||||
} else {
|
||||
&[]
|
||||
},
|
||||
related_file.max_row,
|
||||
include_line_numbers,
|
||||
&mut prompt,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if request.prompt_format == PromptFormat::NumLinesUniDiff {
|
||||
prompt.push_str(UNIFIED_DIFF_REMINDER);
|
||||
match request.prompt_format {
|
||||
PromptFormat::NumLinesUniDiff => {
|
||||
prompt.push_str(UNIFIED_DIFF_REMINDER);
|
||||
}
|
||||
PromptFormat::OldTextNewText => {
|
||||
prompt.push_str(OLD_TEXT_NEW_TEXT_REMINDER);
|
||||
}
|
||||
PromptFormat::Minimal => {
|
||||
prompt.push_str(MINIMAL_PROMPT_REMINDER);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
Ok((prompt, section_labels))
|
||||
@@ -214,6 +337,27 @@ pub fn write_codeblock<'a>(
|
||||
output: &'a mut String,
|
||||
) {
|
||||
writeln!(output, "`````{}", DiffPathFmt(path)).unwrap();
|
||||
|
||||
write_excerpts(
|
||||
excerpts,
|
||||
sorted_insertions,
|
||||
file_line_count,
|
||||
include_line_numbers,
|
||||
output,
|
||||
);
|
||||
write!(output, "`````\n\n").unwrap();
|
||||
}
|
||||
|
||||
fn write_codeblock_with_filename<'a>(
|
||||
path: &Path,
|
||||
excerpts: impl IntoIterator<Item = &'a Excerpt>,
|
||||
sorted_insertions: &[(Point, &str)],
|
||||
file_line_count: Line,
|
||||
include_line_numbers: bool,
|
||||
output: &'a mut String,
|
||||
) {
|
||||
writeln!(output, "`````filename={}", DiffPathFmt(path)).unwrap();
|
||||
|
||||
write_excerpts(
|
||||
excerpts,
|
||||
sorted_insertions,
|
||||
@@ -624,6 +768,8 @@ impl<'a> SyntaxBasedPrompt<'a> {
|
||||
match self.request.prompt_format {
|
||||
PromptFormat::MarkedExcerpt
|
||||
| PromptFormat::OnlySnippets
|
||||
| PromptFormat::OldTextNewText
|
||||
| PromptFormat::Minimal
|
||||
| PromptFormat::NumLinesUniDiff => {
|
||||
if range.start.0 > 0 && !skipped_last_snippet {
|
||||
output.push_str("…\n");
|
||||
@@ -639,6 +785,7 @@ impl<'a> SyntaxBasedPrompt<'a> {
|
||||
writeln!(output, "<|section_{}|>", section_index).ok();
|
||||
}
|
||||
}
|
||||
PromptFormat::MinimalQwen => unreachable!(),
|
||||
}
|
||||
|
||||
let push_full_snippet = |output: &mut String| {
|
||||
@@ -748,3 +895,69 @@ fn declaration_size(declaration: &ReferencedDeclaration, style: DeclarationStyle
|
||||
DeclarationStyle::Declaration => declaration.text.len(),
|
||||
}
|
||||
}
|
||||
|
||||
struct MinimalQwenPrompt {
|
||||
events: Vec<Event>,
|
||||
cursor_point: Point,
|
||||
cursor_path: Arc<Path>, // TODO: make a common struct with cursor_point
|
||||
included_files: Vec<IncludedFile>,
|
||||
}
|
||||
|
||||
impl MinimalQwenPrompt {
|
||||
const INSTRUCTIONS: &str = "You are a code completion assistant that analyzes edit history to identify and systematically complete incomplete refactorings or patterns across the entire codebase.\n";
|
||||
|
||||
fn render(&self) -> String {
|
||||
let edit_history = self.fmt_edit_history();
|
||||
let context = self.fmt_context();
|
||||
|
||||
format!(
|
||||
"{instructions}\n\n{edit_history}\n\n{context}",
|
||||
instructions = MinimalQwenPrompt::INSTRUCTIONS,
|
||||
edit_history = edit_history,
|
||||
context = context
|
||||
)
|
||||
}
|
||||
|
||||
fn fmt_edit_history(&self) -> String {
|
||||
if self.events.is_empty() {
|
||||
"(No edit history)\n\n".to_string()
|
||||
} else {
|
||||
let mut events_str = String::new();
|
||||
push_events(&mut events_str, &self.events);
|
||||
format!(
|
||||
"The following are the latest edits made by the user, from earlier to later.\n\n{}",
|
||||
events_str
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn fmt_context(&self) -> String {
|
||||
let mut context = String::new();
|
||||
let include_line_numbers = true;
|
||||
|
||||
for related_file in &self.included_files {
|
||||
writeln!(context, "<|file_sep|>{}", DiffPathFmt(&related_file.path)).unwrap();
|
||||
|
||||
if related_file.path == self.cursor_path {
|
||||
write!(context, "<|fim_prefix|>").unwrap();
|
||||
write_excerpts(
|
||||
&related_file.excerpts,
|
||||
&[(self.cursor_point, "<|fim_suffix|>")],
|
||||
related_file.max_row,
|
||||
include_line_numbers,
|
||||
&mut context,
|
||||
);
|
||||
writeln!(context, "<|fim_middle|>").unwrap();
|
||||
} else {
|
||||
write_excerpts(
|
||||
&related_file.excerpts,
|
||||
&[],
|
||||
related_file.max_row,
|
||||
include_line_numbers,
|
||||
&mut context,
|
||||
);
|
||||
}
|
||||
}
|
||||
context
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,11 +11,11 @@ pub fn build_prompt(request: predict_edits_v3::PlanContextRetrievalRequest) -> R
|
||||
let mut prompt = SEARCH_INSTRUCTIONS.to_string();
|
||||
|
||||
if !request.events.is_empty() {
|
||||
writeln!(&mut prompt, "## User Edits\n")?;
|
||||
writeln!(&mut prompt, "\n## User Edits\n\n")?;
|
||||
push_events(&mut prompt, &request.events);
|
||||
}
|
||||
|
||||
writeln!(&mut prompt, "## Cursor context")?;
|
||||
writeln!(&mut prompt, "## Cursor context\n")?;
|
||||
write_codeblock(
|
||||
&request.excerpt_path,
|
||||
&[Excerpt {
|
||||
@@ -40,11 +40,49 @@ pub fn build_prompt(request: predict_edits_v3::PlanContextRetrievalRequest) -> R
|
||||
pub struct SearchToolInput {
|
||||
/// An array of queries to run for gathering context relevant to the next prediction
|
||||
#[schemars(length(max = 3))]
|
||||
#[serde(deserialize_with = "deserialize_queries")]
|
||||
pub queries: Box<[SearchToolQuery]>,
|
||||
}
|
||||
|
||||
fn deserialize_queries<'de, D>(deserializer: D) -> Result<Box<[SearchToolQuery]>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
use serde::de::Error;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum QueryCollection {
|
||||
Array(Box<[SearchToolQuery]>),
|
||||
DoubleArray(Box<[Box<[SearchToolQuery]>]>),
|
||||
Single(SearchToolQuery),
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum MaybeDoubleEncoded {
|
||||
SingleEncoded(QueryCollection),
|
||||
DoubleEncoded(String),
|
||||
}
|
||||
|
||||
let result = MaybeDoubleEncoded::deserialize(deserializer)?;
|
||||
|
||||
let normalized = match result {
|
||||
MaybeDoubleEncoded::SingleEncoded(value) => value,
|
||||
MaybeDoubleEncoded::DoubleEncoded(value) => {
|
||||
serde_json::from_str(&value).map_err(D::Error::custom)?
|
||||
}
|
||||
};
|
||||
|
||||
Ok(match normalized {
|
||||
QueryCollection::Array(items) => items,
|
||||
QueryCollection::Single(search_tool_query) => Box::new([search_tool_query]),
|
||||
QueryCollection::DoubleArray(double_array) => double_array.into_iter().flatten().collect(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Search for relevant code by path, syntax hierarchy, and content.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Hash)]
|
||||
pub struct SearchToolQuery {
|
||||
/// 1. A glob pattern to match file paths in the codebase to search in.
|
||||
pub glob: String,
|
||||
@@ -92,3 +130,115 @@ const TOOL_USE_REMINDER: &str = indoc! {"
|
||||
--
|
||||
Analyze the user's intent in one to two sentences, then call the `search` tool.
|
||||
"};
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use serde_json::json;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_queries() {
|
||||
let single_query_json = indoc! {r#"{
|
||||
"queries": {
|
||||
"glob": "**/*.rs",
|
||||
"syntax_node": ["fn test"],
|
||||
"content": "assert"
|
||||
}
|
||||
}"#};
|
||||
|
||||
let flat_input: SearchToolInput = serde_json::from_str(single_query_json).unwrap();
|
||||
assert_eq!(flat_input.queries.len(), 1);
|
||||
assert_eq!(flat_input.queries[0].glob, "**/*.rs");
|
||||
assert_eq!(flat_input.queries[0].syntax_node, vec!["fn test"]);
|
||||
assert_eq!(flat_input.queries[0].content, Some("assert".to_string()));
|
||||
|
||||
let flat_json = indoc! {r#"{
|
||||
"queries": [
|
||||
{
|
||||
"glob": "**/*.rs",
|
||||
"syntax_node": ["fn test"],
|
||||
"content": "assert"
|
||||
},
|
||||
{
|
||||
"glob": "**/*.ts",
|
||||
"syntax_node": [],
|
||||
"content": null
|
||||
}
|
||||
]
|
||||
}"#};
|
||||
|
||||
let flat_input: SearchToolInput = serde_json::from_str(flat_json).unwrap();
|
||||
assert_eq!(flat_input.queries.len(), 2);
|
||||
assert_eq!(flat_input.queries[0].glob, "**/*.rs");
|
||||
assert_eq!(flat_input.queries[0].syntax_node, vec!["fn test"]);
|
||||
assert_eq!(flat_input.queries[0].content, Some("assert".to_string()));
|
||||
assert_eq!(flat_input.queries[1].glob, "**/*.ts");
|
||||
assert_eq!(flat_input.queries[1].syntax_node.len(), 0);
|
||||
assert_eq!(flat_input.queries[1].content, None);
|
||||
|
||||
let nested_json = indoc! {r#"{
|
||||
"queries": [
|
||||
[
|
||||
{
|
||||
"glob": "**/*.rs",
|
||||
"syntax_node": ["fn test"],
|
||||
"content": "assert"
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
"glob": "**/*.ts",
|
||||
"syntax_node": [],
|
||||
"content": null
|
||||
}
|
||||
]
|
||||
]
|
||||
}"#};
|
||||
|
||||
let nested_input: SearchToolInput = serde_json::from_str(nested_json).unwrap();
|
||||
|
||||
assert_eq!(nested_input.queries.len(), 2);
|
||||
|
||||
assert_eq!(nested_input.queries[0].glob, "**/*.rs");
|
||||
assert_eq!(nested_input.queries[0].syntax_node, vec!["fn test"]);
|
||||
assert_eq!(nested_input.queries[0].content, Some("assert".to_string()));
|
||||
assert_eq!(nested_input.queries[1].glob, "**/*.ts");
|
||||
assert_eq!(nested_input.queries[1].syntax_node.len(), 0);
|
||||
assert_eq!(nested_input.queries[1].content, None);
|
||||
|
||||
let double_encoded_queries = serde_json::to_string(&json!({
|
||||
"queries": serde_json::to_string(&json!([
|
||||
{
|
||||
"glob": "**/*.rs",
|
||||
"syntax_node": ["fn test"],
|
||||
"content": "assert"
|
||||
},
|
||||
{
|
||||
"glob": "**/*.ts",
|
||||
"syntax_node": [],
|
||||
"content": null
|
||||
}
|
||||
])).unwrap()
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let double_encoded_input: SearchToolInput =
|
||||
serde_json::from_str(&double_encoded_queries).unwrap();
|
||||
|
||||
assert_eq!(double_encoded_input.queries.len(), 2);
|
||||
|
||||
assert_eq!(double_encoded_input.queries[0].glob, "**/*.rs");
|
||||
assert_eq!(double_encoded_input.queries[0].syntax_node, vec!["fn test"]);
|
||||
assert_eq!(
|
||||
double_encoded_input.queries[0].content,
|
||||
Some("assert".to_string())
|
||||
);
|
||||
assert_eq!(double_encoded_input.queries[1].glob, "**/*.ts");
|
||||
assert_eq!(double_encoded_input.queries[1].syntax_node.len(), 0);
|
||||
assert_eq!(double_encoded_input.queries[1].content, None);
|
||||
|
||||
// ### ERROR Switching from var declarations to lexical declarations [RUN 073]
|
||||
// invalid search json {"queries": ["express/lib/response.js", "var\\s+[a-zA-Z_][a-zA-Z0-9_]*\\s*=.*;", "function.*\\(.*\\).*\\{.*\\}"]}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -291,29 +291,6 @@ CREATE TABLE IF NOT EXISTS "channel_chat_participants" (
|
||||
|
||||
CREATE INDEX "index_channel_chat_participants_on_channel_id" ON "channel_chat_participants" ("channel_id");
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "channel_messages" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
|
||||
"sender_id" INTEGER NOT NULL REFERENCES users (id),
|
||||
"body" TEXT NOT NULL,
|
||||
"sent_at" TIMESTAMP,
|
||||
"edited_at" TIMESTAMP,
|
||||
"nonce" BLOB NOT NULL,
|
||||
"reply_to_message_id" INTEGER DEFAULT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX "index_channel_messages_on_channel_id" ON "channel_messages" ("channel_id");
|
||||
|
||||
CREATE UNIQUE INDEX "index_channel_messages_on_sender_id_nonce" ON "channel_messages" ("sender_id", "nonce");
|
||||
|
||||
CREATE TABLE "channel_message_mentions" (
|
||||
"message_id" INTEGER NOT NULL REFERENCES channel_messages (id) ON DELETE CASCADE,
|
||||
"start_offset" INTEGER NOT NULL,
|
||||
"end_offset" INTEGER NOT NULL,
|
||||
"user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (message_id, start_offset)
|
||||
);
|
||||
|
||||
CREATE TABLE "channel_members" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
|
||||
@@ -408,15 +385,6 @@ CREATE TABLE "observed_buffer_edits" (
|
||||
|
||||
CREATE UNIQUE INDEX "index_observed_buffers_user_and_buffer_id" ON "observed_buffer_edits" ("user_id", "buffer_id");
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "observed_channel_messages" (
|
||||
"user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
|
||||
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
|
||||
"channel_message_id" INTEGER NOT NULL,
|
||||
PRIMARY KEY (user_id, channel_id)
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "index_observed_channel_messages_user_and_channel_id" ON "observed_channel_messages" ("user_id", "channel_id");
|
||||
|
||||
CREATE TABLE "notification_kinds" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"name" VARCHAR NOT NULL
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
drop table observed_channel_messages;
|
||||
drop table channel_message_mentions;
|
||||
drop table channel_messages;
|
||||
@@ -0,0 +1 @@
|
||||
drop table embeddings;
|
||||
@@ -0,0 +1,4 @@
|
||||
alter table billing_customers
|
||||
add column external_id text;
|
||||
|
||||
create unique index uix_billing_customers_on_external_id on billing_customers (external_id);
|
||||
@@ -5,7 +5,6 @@ pub mod buffers;
|
||||
pub mod channels;
|
||||
pub mod contacts;
|
||||
pub mod contributors;
|
||||
pub mod embeddings;
|
||||
pub mod extensions;
|
||||
pub mod notifications;
|
||||
pub mod projects;
|
||||
|
||||
@@ -1,94 +0,0 @@
|
||||
use super::*;
|
||||
use time::Duration;
|
||||
use time::OffsetDateTime;
|
||||
|
||||
impl Database {
|
||||
pub async fn get_embeddings(
|
||||
&self,
|
||||
model: &str,
|
||||
digests: &[Vec<u8>],
|
||||
) -> Result<HashMap<Vec<u8>, Vec<f32>>> {
|
||||
self.transaction(|tx| async move {
|
||||
let embeddings = {
|
||||
let mut db_embeddings = embedding::Entity::find()
|
||||
.filter(
|
||||
embedding::Column::Model.eq(model).and(
|
||||
embedding::Column::Digest
|
||||
.is_in(digests.iter().map(|digest| digest.as_slice())),
|
||||
),
|
||||
)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut embeddings = HashMap::default();
|
||||
while let Some(db_embedding) = db_embeddings.next().await {
|
||||
let db_embedding = db_embedding?;
|
||||
embeddings.insert(db_embedding.digest, db_embedding.dimensions);
|
||||
}
|
||||
embeddings
|
||||
};
|
||||
|
||||
if !embeddings.is_empty() {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let retrieved_at = PrimitiveDateTime::new(now.date(), now.time());
|
||||
|
||||
embedding::Entity::update_many()
|
||||
.filter(
|
||||
embedding::Column::Digest
|
||||
.is_in(embeddings.keys().map(|digest| digest.as_slice())),
|
||||
)
|
||||
.col_expr(embedding::Column::RetrievedAt, Expr::value(retrieved_at))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(embeddings)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn save_embeddings(
|
||||
&self,
|
||||
model: &str,
|
||||
embeddings: &HashMap<Vec<u8>, Vec<f32>>,
|
||||
) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
embedding::Entity::insert_many(embeddings.iter().map(|(digest, dimensions)| {
|
||||
let now_offset_datetime = OffsetDateTime::now_utc();
|
||||
let retrieved_at =
|
||||
PrimitiveDateTime::new(now_offset_datetime.date(), now_offset_datetime.time());
|
||||
|
||||
embedding::ActiveModel {
|
||||
model: ActiveValue::set(model.to_string()),
|
||||
digest: ActiveValue::set(digest.clone()),
|
||||
dimensions: ActiveValue::set(dimensions.clone()),
|
||||
retrieved_at: ActiveValue::set(retrieved_at),
|
||||
}
|
||||
}))
|
||||
.on_conflict(
|
||||
OnConflict::columns([embedding::Column::Model, embedding::Column::Digest])
|
||||
.do_nothing()
|
||||
.to_owned(),
|
||||
)
|
||||
.exec_without_returning(&*tx)
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn purge_old_embeddings(&self) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
embedding::Entity::delete_many()
|
||||
.filter(
|
||||
embedding::Column::RetrievedAt
|
||||
.lte(OffsetDateTime::now_utc() - Duration::days(60)),
|
||||
)
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
@@ -66,40 +66,6 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
/// Returns all users flagged as staff.
|
||||
pub async fn get_staff_users(&self) -> Result<Vec<user::Model>> {
|
||||
self.transaction(|tx| async {
|
||||
let tx = tx;
|
||||
Ok(user::Entity::find()
|
||||
.filter(user::Column::Admin.eq(true))
|
||||
.all(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Returns a user by email address. There are no access checks here, so this should only be used internally.
|
||||
pub async fn get_user_by_email(&self, email: &str) -> Result<Option<User>> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(user::Entity::find()
|
||||
.filter(user::Column::EmailAddress.eq(email))
|
||||
.one(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Returns a user by GitHub user ID. There are no access checks here, so this should only be used internally.
|
||||
pub async fn get_user_by_github_user_id(&self, github_user_id: i32) -> Result<Option<User>> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(user::Entity::find()
|
||||
.filter(user::Column::GithubUserId.eq(github_user_id))
|
||||
.one(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Returns a user by GitHub login. There are no access checks here, so this should only be used internally.
|
||||
pub async fn get_user_by_github_login(&self, github_login: &str) -> Result<Option<User>> {
|
||||
self.transaction(|tx| async move {
|
||||
@@ -270,39 +236,6 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
/// Sets "accepted_tos_at" on the user to the given timestamp.
|
||||
pub async fn set_user_accepted_tos_at(
|
||||
&self,
|
||||
id: UserId,
|
||||
accepted_tos_at: Option<DateTime>,
|
||||
) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
user::Entity::update_many()
|
||||
.filter(user::Column::Id.eq(id))
|
||||
.set(user::ActiveModel {
|
||||
accepted_tos_at: ActiveValue::set(accepted_tos_at),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// hard delete the user.
|
||||
pub async fn destroy_user(&self, id: UserId) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
access_token::Entity::delete_many()
|
||||
.filter(access_token::Column::UserId.eq(id))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
user::Entity::delete_by_id(id).exec(&*tx).await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Find users where github_login ILIKE name_query.
|
||||
pub async fn fuzzy_search_users(&self, name_query: &str, limit: u32) -> Result<Vec<User>> {
|
||||
self.transaction(|tx| async {
|
||||
@@ -341,14 +274,4 @@ impl Database {
|
||||
result.push('%');
|
||||
result
|
||||
}
|
||||
|
||||
pub async fn get_users_missing_github_user_created_at(&self) -> Result<Vec<user::Model>> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(user::Entity::find()
|
||||
.filter(user::Column::GithubUserCreatedAt.is_null())
|
||||
.all(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,11 +6,8 @@ pub mod channel;
|
||||
pub mod channel_buffer_collaborator;
|
||||
pub mod channel_chat_participant;
|
||||
pub mod channel_member;
|
||||
pub mod channel_message;
|
||||
pub mod channel_message_mention;
|
||||
pub mod contact;
|
||||
pub mod contributor;
|
||||
pub mod embedding;
|
||||
pub mod extension;
|
||||
pub mod extension_version;
|
||||
pub mod follower;
|
||||
@@ -18,7 +15,6 @@ pub mod language_server;
|
||||
pub mod notification;
|
||||
pub mod notification_kind;
|
||||
pub mod observed_buffer_edits;
|
||||
pub mod observed_channel_messages;
|
||||
pub mod project;
|
||||
pub mod project_collaborator;
|
||||
pub mod project_repository;
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
use crate::db::{ChannelId, MessageId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
use time::PrimitiveDateTime;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "channel_messages")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: MessageId,
|
||||
pub channel_id: ChannelId,
|
||||
pub sender_id: UserId,
|
||||
pub body: String,
|
||||
pub sent_at: PrimitiveDateTime,
|
||||
pub edited_at: Option<PrimitiveDateTime>,
|
||||
pub nonce: Uuid,
|
||||
pub reply_to_message_id: Option<MessageId>,
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::channel::Entity",
|
||||
from = "Column::ChannelId",
|
||||
to = "super::channel::Column::Id"
|
||||
)]
|
||||
Channel,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::SenderId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
Sender,
|
||||
}
|
||||
|
||||
impl Related<super::channel::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Channel.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Sender.def()
|
||||
}
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
use crate::db::{MessageId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "channel_message_mentions")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub message_id: MessageId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub start_offset: i32,
|
||||
pub end_offset: i32,
|
||||
pub user_id: UserId,
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::channel_message::Entity",
|
||||
from = "Column::MessageId",
|
||||
to = "super::channel_message::Column::Id"
|
||||
)]
|
||||
Message,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::UserId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
MentionedUser,
|
||||
}
|
||||
|
||||
impl Related<super::channel::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Message.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::MentionedUser.def()
|
||||
}
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use time::PrimitiveDateTime;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "embeddings")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub model: String,
|
||||
#[sea_orm(primary_key)]
|
||||
pub digest: Vec<u8>,
|
||||
pub dimensions: Vec<f32>,
|
||||
pub retrieved_at: PrimitiveDateTime,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
@@ -1,41 +0,0 @@
|
||||
use crate::db::{ChannelId, MessageId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "observed_channel_messages")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub user_id: UserId,
|
||||
pub channel_id: ChannelId,
|
||||
pub channel_message_id: MessageId,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::channel::Entity",
|
||||
from = "Column::ChannelId",
|
||||
to = "super::channel::Column::Id"
|
||||
)]
|
||||
Channel,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::UserId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
User,
|
||||
}
|
||||
|
||||
impl Related<super::channel::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Channel.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::User.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
@@ -39,25 +39,6 @@ pub enum Relation {
|
||||
Contributor,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
/// Returns the timestamp of when the user's account was created.
|
||||
///
|
||||
/// This will be the earlier of the `created_at` and `github_user_created_at` timestamps.
|
||||
pub fn account_created_at(&self) -> NaiveDateTime {
|
||||
let mut account_created_at = self.created_at;
|
||||
if let Some(github_created_at) = self.github_user_created_at {
|
||||
account_created_at = account_created_at.min(github_created_at);
|
||||
}
|
||||
|
||||
account_created_at
|
||||
}
|
||||
|
||||
/// Returns the age of the user's account.
|
||||
pub fn account_age(&self) -> chrono::Duration {
|
||||
chrono::Utc::now().naive_utc() - self.account_created_at()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::access_token::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::AccessToken.def()
|
||||
|
||||
@@ -2,11 +2,7 @@ mod buffer_tests;
|
||||
mod channel_tests;
|
||||
mod contributor_tests;
|
||||
mod db_tests;
|
||||
// we only run postgres tests on macos right now
|
||||
#[cfg(target_os = "macos")]
|
||||
mod embedding_tests;
|
||||
mod extension_tests;
|
||||
mod user_tests;
|
||||
|
||||
use crate::migrations::run_database_migrations;
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user