Compare commits
181 Commits
git/panel-
...
feat/file-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5840484ef2 | ||
|
|
d7efc388e9 | ||
|
|
f2ad0d716f | ||
|
|
777b46533f | ||
|
|
b3dd51560b | ||
|
|
25489c2b7a | ||
|
|
dc372e8a84 | ||
|
|
1c4bb60209 | ||
|
|
97100ce52f | ||
|
|
dcf56144b5 | ||
|
|
46db753f79 | ||
|
|
1a807a7a6a | ||
|
|
f90d0789fb | ||
|
|
9e717c7711 | ||
|
|
823844ef18 | ||
|
|
70bcf93355 | ||
|
|
378b30eba5 | ||
|
|
83e7c21b2c | ||
|
|
e488b6cd0b | ||
|
|
f52549c1c4 | ||
|
|
359521e91d | ||
|
|
b607077c08 | ||
|
|
e5fce424b3 | ||
|
|
a8b04369ae | ||
|
|
11b38db3e3 | ||
|
|
112b5c16b7 | ||
|
|
32ec1037e1 | ||
|
|
a44fc9a1de | ||
|
|
efcd7f7d10 | ||
|
|
aaf2f9d309 | ||
|
|
62e3a49212 | ||
|
|
87d0401e64 | ||
|
|
2c375e2e0a | ||
|
|
c24f9e47b4 | ||
|
|
3fbfea491d | ||
|
|
2b369d7532 | ||
|
|
ed61a79cc5 | ||
|
|
aa6270e658 | ||
|
|
d896af2f15 | ||
|
|
c748b177c4 | ||
|
|
ddf5937899 | ||
|
|
6e1d86f311 | ||
|
|
a3f04e8b36 | ||
|
|
3c81ee6ba6 | ||
|
|
34bd8201aa | ||
|
|
35ae2f5b2b | ||
|
|
d420dd63ed | ||
|
|
42ed032f12 | ||
|
|
2d84af91bf | ||
|
|
7aacc7566c | ||
|
|
8d632958db | ||
|
|
0149de4b54 | ||
|
|
359160c8b1 | ||
|
|
b8081ad7a6 | ||
|
|
35c58151eb | ||
|
|
e025ee6a11 | ||
|
|
c60d31a726 | ||
|
|
0bcf607a28 | ||
|
|
431a195c32 | ||
|
|
6db6251484 | ||
|
|
2fb3d593bc | ||
|
|
cc1d66b530 | ||
|
|
5d08c1b35f | ||
|
|
b7d4d1791a | ||
|
|
81d38d9872 | ||
|
|
21f73d9c02 | ||
|
|
12857a7207 | ||
|
|
f6be16da3b | ||
|
|
94aa643484 | ||
|
|
28a85158c7 | ||
|
|
a2c2c617b5 | ||
|
|
77667f4844 | ||
|
|
b01a6fbdea | ||
|
|
44d91c1709 | ||
|
|
d187cbb188 | ||
|
|
c241eadbc3 | ||
|
|
5f8226457e | ||
|
|
309947aa53 | ||
|
|
0881e548de | ||
|
|
4511d11a11 | ||
|
|
19d2fdb6c6 | ||
|
|
20953ecb9d | ||
|
|
7475bdaf20 | ||
|
|
8e4c807c6a | ||
|
|
a66dac7b3a | ||
|
|
8a903f9c10 | ||
|
|
9f9575d100 | ||
|
|
00898d46c0 | ||
|
|
bcc3307a7e | ||
|
|
8ba33ad270 | ||
|
|
1e6344899d | ||
|
|
93f9cff876 | ||
|
|
6cafe4a9c5 | ||
|
|
585c440e6e | ||
|
|
083bd147ef | ||
|
|
9591790d8d | ||
|
|
74bf1a170d | ||
|
|
e72c3bf20d | ||
|
|
160bf915aa | ||
|
|
aff4c25a47 | ||
|
|
146e754f73 | ||
|
|
278fe91a9a | ||
|
|
39fb89e031 | ||
|
|
9d52b6c538 | ||
|
|
082b80ec89 | ||
|
|
483e31e42a | ||
|
|
61c263fcf0 | ||
|
|
3c19174f7b | ||
|
|
7e93c171b5 | ||
|
|
88a8e53696 | ||
|
|
c2416d6bab | ||
|
|
29cc3d0e18 | ||
|
|
760747f127 | ||
|
|
d4ec55b183 | ||
|
|
f89bb2f0d2 | ||
|
|
e43c436cb6 | ||
|
|
de1bf64f41 | ||
|
|
00eafe63d9 | ||
|
|
5044e6ac1d | ||
|
|
784fdcaee3 | ||
|
|
fb87972f44 | ||
|
|
8cccb5d4f5 | ||
|
|
2895d31d83 | ||
|
|
a112153a2e | ||
|
|
d21184b1d3 | ||
|
|
ba136abf6c | ||
|
|
2d45c23fb0 | ||
|
|
b78f19982f | ||
|
|
a7fac65d62 | ||
|
|
bf8864d106 | ||
|
|
08ee4f7966 | ||
|
|
6f6f652cf2 | ||
|
|
9c8e37a156 | ||
|
|
d54c64f35a | ||
|
|
0b53da18d5 | ||
|
|
5ced3ef0fd | ||
|
|
7a37dd9433 | ||
|
|
a7163623e7 | ||
|
|
f08068680d | ||
|
|
a951e414d8 | ||
|
|
e75c6b1aa5 | ||
|
|
149eedb73d | ||
|
|
fb46bae3ed | ||
|
|
28d7c37b0d | ||
|
|
f6da987d4c | ||
|
|
efc71f35a5 | ||
|
|
3b7ee58cfa | ||
|
|
32047bef93 | ||
|
|
4003287cc3 | ||
|
|
001a47c8b7 | ||
|
|
113f0780b3 | ||
|
|
273321608f | ||
|
|
92cfce568b | ||
|
|
2b6cf31ace | ||
|
|
58cec41932 | ||
|
|
2ec5ca0e05 | ||
|
|
f8da550867 | ||
|
|
0b1d3d78a4 | ||
|
|
930b489d90 | ||
|
|
121cee8045 | ||
|
|
5360dc1504 | ||
|
|
69862790cb | ||
|
|
284d8f790a | ||
|
|
f824e93eeb | ||
|
|
e71bc4821c | ||
|
|
64c8c19e1b | ||
|
|
622d626a29 | ||
|
|
714481073d | ||
|
|
eccdfed32b | ||
|
|
2664596a34 | ||
|
|
23f2fb6089 | ||
|
|
fb2c2c55dc | ||
|
|
8315fde1ff | ||
|
|
fc87440682 | ||
|
|
c996eadaf5 | ||
|
|
e8c6c1ba04 | ||
|
|
b8364d7c33 | ||
|
|
7c23ef89ec | ||
|
|
2f463370cc | ||
|
|
feed34cafe | ||
|
|
4724aa5cb8 |
18
.github/ISSUE_TEMPLATE/01_bug_ai.yml
vendored
18
.github/ISSUE_TEMPLATE/01_bug_ai.yml
vendored
@@ -39,3 +39,21 @@ body:
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, attach your `Zed.log` file to this issue.
|
||||
description: |
|
||||
From the command palette, run `zed: open log` to see the last 1000 lines.
|
||||
Or run `zed: reveal log in file manager` to reveal the log file itself.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Paste your log inside the code block. -->
|
||||
```log
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
validations:
|
||||
required: false
|
||||
|
||||
18
.github/ISSUE_TEMPLATE/04_bug_debugger.yml
vendored
18
.github/ISSUE_TEMPLATE/04_bug_debugger.yml
vendored
@@ -33,3 +33,21 @@ body:
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, attach your `Zed.log` file to this issue.
|
||||
description: |
|
||||
From the command palette, run `zed: open log` to see the last 1000 lines.
|
||||
Or run `zed: reveal log in file manager` to reveal the log file itself.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Paste your log inside the code block. -->
|
||||
```log
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
validations:
|
||||
required: false
|
||||
|
||||
18
.github/ISSUE_TEMPLATE/06_bug_git.yml
vendored
18
.github/ISSUE_TEMPLATE/06_bug_git.yml
vendored
@@ -33,3 +33,21 @@ body:
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, attach your `Zed.log` file to this issue.
|
||||
description: |
|
||||
From the command palette, run `zed: open log` to see the last 1000 lines.
|
||||
Or run `zed: reveal log in file manager` to reveal the log file itself.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Paste your log inside the code block. -->
|
||||
```log
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
validations:
|
||||
required: false
|
||||
|
||||
18
.github/ISSUE_TEMPLATE/07_bug_windows.yml
vendored
18
.github/ISSUE_TEMPLATE/07_bug_windows.yml
vendored
@@ -33,3 +33,21 @@ body:
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, attach your `Zed.log` file to this issue.
|
||||
description: |
|
||||
From the command palette, run `zed: open log` to see the last 1000 lines.
|
||||
Or run `zed: reveal log in file manager` to reveal the log file itself.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Paste your log inside the code block. -->
|
||||
```log
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
validations:
|
||||
required: false
|
||||
|
||||
17
.github/ISSUE_TEMPLATE/10_bug_report.yml
vendored
17
.github/ISSUE_TEMPLATE/10_bug_report.yml
vendored
@@ -56,3 +56,20 @@ body:
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, attach your `Zed.log` file to this issue.
|
||||
description: |
|
||||
From the command palette, run `zed: open log` to see the last 1000 lines.
|
||||
Or run `zed: reveal log in file manager` to reveal the log file itself.
|
||||
value: |
|
||||
<details><summary>Zed.log</summary>
|
||||
|
||||
<!-- Paste your log inside the code block. -->
|
||||
```log
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
validations:
|
||||
required: false
|
||||
|
||||
16
.github/workflows/after_release.yml
vendored
16
.github/workflows/after_release.yml
vendored
@@ -6,7 +6,21 @@ on:
|
||||
types:
|
||||
- published
|
||||
jobs:
|
||||
rebuild_releases_page:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: after_release::rebuild_releases_page::refresh_cloud_releases
|
||||
run: curl -fX POST https://cloud.zed.dev/releases/refresh?expect_tag=${{ github.event.release.tag_name }}
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: after_release::rebuild_releases_page::redeploy_zed_dev
|
||||
run: npm exec --yes -- vercel@37 --token="$VERCEL_TOKEN" --scope zed-industries redeploy https://zed.dev
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
VERCEL_TOKEN: ${{ secrets.VERCEL_TOKEN }}
|
||||
post_to_discord:
|
||||
needs:
|
||||
- rebuild_releases_page
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
@@ -37,7 +51,7 @@ jobs:
|
||||
webhook-url: ${{ secrets.DISCORD_WEBHOOK_RELEASE_NOTES }}
|
||||
content: ${{ steps.get-content.outputs.string }}
|
||||
publish_winget:
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
runs-on: self-32vcpu-windows-2022
|
||||
steps:
|
||||
- id: set-package-name
|
||||
name: after_release::publish_winget::set_package_name
|
||||
|
||||
3
.github/workflows/compare_perf.yml
vendored
3
.github/workflows/compare_perf.yml
vendored
@@ -35,6 +35,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: compare_perf::run_perf::install_hyperfine
|
||||
run: cargo install hyperfine
|
||||
shell: bash -euxo pipefail {0}
|
||||
|
||||
17
.github/workflows/release.yml
vendored
17
.github/workflows/release.yml
vendored
@@ -57,16 +57,19 @@ jobs:
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
@@ -202,6 +205,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -242,6 +248,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
|
||||
6
.github/workflows/release_nightly.yml
vendored
6
.github/workflows/release_nightly.yml
vendored
@@ -93,6 +93,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -140,6 +143,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
|
||||
27
.github/workflows/run_agent_evals.yml
vendored
27
.github/workflows/run_agent_evals.yml
vendored
@@ -8,22 +8,16 @@ env:
|
||||
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
ZED_EVAL_TELEMETRY: '1'
|
||||
MODEL_NAME: ${{ inputs.model_name }}
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- synchronize
|
||||
- reopened
|
||||
- labeled
|
||||
branches:
|
||||
- '**'
|
||||
schedule:
|
||||
- cron: 0 0 * * *
|
||||
workflow_dispatch: {}
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
model_name:
|
||||
description: model_name
|
||||
required: true
|
||||
type: string
|
||||
jobs:
|
||||
agent_evals:
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
(github.event_name != 'pull_request' || contains(github.event.pull_request.labels.*.name, 'run-eval'))
|
||||
runs-on: namespace-profile-16x32-ubuntu-2204
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
@@ -40,6 +34,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
@@ -49,14 +46,14 @@ jobs:
|
||||
run: cargo build --package=eval
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_agent_evals::agent_evals::run_eval
|
||||
run: cargo run --package=eval -- --repetitions=8 --concurrency=1
|
||||
run: cargo run --package=eval -- --repetitions=8 --concurrency=1 --model "${MODEL_NAME}"
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cleanup_cargo_config
|
||||
if: always()
|
||||
run: |
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
timeout-minutes: 600
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
6
.github/workflows/run_bundling.yml
vendored
6
.github/workflows/run_bundling.yml
vendored
@@ -34,6 +34,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -74,6 +77,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: ./script/bundle-linux
|
||||
run: ./script/bundle-linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
|
||||
98
.github/workflows/run_tests.yml
vendored
98
.github/workflows/run_tests.yml
vendored
@@ -143,16 +143,19 @@ jobs:
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
|
||||
with:
|
||||
@@ -232,6 +235,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::setup_cargo_config
|
||||
run: |
|
||||
mkdir -p ./../.cargo
|
||||
@@ -263,16 +269,19 @@ jobs:
|
||||
mkdir -p ./../.cargo
|
||||
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::setup_linux
|
||||
run: ./script/linux
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cache_rust_dependencies_namespace
|
||||
uses: namespacelabs/nscloud-cache-action@v1
|
||||
with:
|
||||
cache: rust
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: cargo build -p collab
|
||||
run: cargo build -p collab
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -285,40 +294,6 @@ jobs:
|
||||
rm -rf ./../.cargo
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
check_postgres_and_protobuf_migrations:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_tests == 'true'
|
||||
runs-on: self-mini-macos
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::remove_untracked_files
|
||||
run: git clean -df
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::ensure_fresh_merge
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ];
|
||||
then
|
||||
echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
|
||||
else
|
||||
git checkout -B temp
|
||||
git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
|
||||
echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
|
||||
fi
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_setup_action
|
||||
uses: bufbuild/buf-setup-action@v1
|
||||
with:
|
||||
version: v1.29.0
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_breaking_action
|
||||
uses: bufbuild/buf-breaking-action@v1
|
||||
with:
|
||||
input: crates/proto/proto/
|
||||
against: https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/
|
||||
timeout-minutes: 60
|
||||
check_dependencies:
|
||||
needs:
|
||||
- orchestrate
|
||||
@@ -382,6 +357,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_docs::install_mdbook
|
||||
uses: peaceiris/actions-mdbook@ee69d230fe19748b7abf22df32acaa93833fad08
|
||||
with:
|
||||
@@ -518,6 +496,40 @@ jobs:
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 60
|
||||
continue-on-error: true
|
||||
check_postgres_and_protobuf_migrations:
|
||||
needs:
|
||||
- orchestrate
|
||||
if: needs.orchestrate.outputs.run_tests == 'true'
|
||||
runs-on: self-mini-macos
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::remove_untracked_files
|
||||
run: git clean -df
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::ensure_fresh_merge
|
||||
run: |
|
||||
if [ -z "$GITHUB_BASE_REF" ];
|
||||
then
|
||||
echo "BUF_BASE_BRANCH=$(git merge-base origin/main HEAD)" >> "$GITHUB_ENV"
|
||||
else
|
||||
git checkout -B temp
|
||||
git merge -q "origin/$GITHUB_BASE_REF" -m "merge main into temp"
|
||||
echo "BUF_BASE_BRANCH=$GITHUB_BASE_REF" >> "$GITHUB_ENV"
|
||||
fi
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_setup_action
|
||||
uses: bufbuild/buf-setup-action@v1
|
||||
with:
|
||||
version: v1.29.0
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_breaking_action
|
||||
uses: bufbuild/buf-breaking-action@v1
|
||||
with:
|
||||
input: crates/proto/proto/
|
||||
against: https://github.com/${GITHUB_REPOSITORY}.git#branch=${BUF_BASE_BRANCH},subdir=crates/proto/proto/
|
||||
timeout-minutes: 60
|
||||
tests_pass:
|
||||
needs:
|
||||
- orchestrate
|
||||
@@ -527,7 +539,6 @@ jobs:
|
||||
- run_tests_mac
|
||||
- doctests
|
||||
- check_workspace_binaries
|
||||
- check_postgres_and_protobuf_migrations
|
||||
- check_dependencies
|
||||
- check_docs
|
||||
- check_licenses
|
||||
@@ -554,7 +565,6 @@ jobs:
|
||||
check_result "run_tests_mac" "${{ needs.run_tests_mac.result }}"
|
||||
check_result "doctests" "${{ needs.doctests.result }}"
|
||||
check_result "check_workspace_binaries" "${{ needs.check_workspace_binaries.result }}"
|
||||
check_result "check_postgres_and_protobuf_migrations" "${{ needs.check_postgres_and_protobuf_migrations.result }}"
|
||||
check_result "check_dependencies" "${{ needs.check_dependencies.result }}"
|
||||
check_result "check_docs" "${{ needs.check_docs.result }}"
|
||||
check_result "check_licenses" "${{ needs.check_licenses.result }}"
|
||||
|
||||
3
.github/workflows/run_unit_evals.yml
vendored
3
.github/workflows/run_unit_evals.yml
vendored
@@ -33,6 +33,9 @@ jobs:
|
||||
- name: steps::install_mold
|
||||
run: ./script/install-mold
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::download_wasi_sdk
|
||||
run: ./script/download-wasi-sdk
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
run: cargo install cargo-nextest --locked
|
||||
shell: bash -euxo pipefail {0}
|
||||
|
||||
40
Cargo.lock
generated
40
Cargo.lock
generated
@@ -32,6 +32,7 @@ dependencies = [
|
||||
"settings",
|
||||
"smol",
|
||||
"task",
|
||||
"telemetry",
|
||||
"tempfile",
|
||||
"terminal",
|
||||
"ui",
|
||||
@@ -39,6 +40,7 @@ dependencies = [
|
||||
"util",
|
||||
"uuid",
|
||||
"watch",
|
||||
"zlog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -79,6 +81,7 @@ dependencies = [
|
||||
"rand 0.9.2",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"telemetry",
|
||||
"text",
|
||||
"util",
|
||||
"watch",
|
||||
@@ -93,6 +96,7 @@ dependencies = [
|
||||
"auto_update",
|
||||
"editor",
|
||||
"extension_host",
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"language",
|
||||
@@ -247,7 +251,6 @@ dependencies = [
|
||||
"acp_tools",
|
||||
"action_log",
|
||||
"agent-client-protocol",
|
||||
"agent_settings",
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"client",
|
||||
@@ -1328,10 +1331,14 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"client",
|
||||
"clock",
|
||||
"ctor",
|
||||
"db",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"http_client",
|
||||
"log",
|
||||
"parking_lot",
|
||||
"paths",
|
||||
"release_channel",
|
||||
"serde",
|
||||
@@ -1342,6 +1349,7 @@ dependencies = [
|
||||
"util",
|
||||
"which 6.0.3",
|
||||
"workspace",
|
||||
"zlog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3198,6 +3206,7 @@ dependencies = [
|
||||
"indoc",
|
||||
"ordered-float 2.10.1",
|
||||
"rustc-hash 2.1.1",
|
||||
"schemars 1.0.4",
|
||||
"serde",
|
||||
"strum 0.27.2",
|
||||
]
|
||||
@@ -6404,7 +6413,7 @@ dependencies = [
|
||||
"ignore",
|
||||
"libc",
|
||||
"log",
|
||||
"notify 8.0.0",
|
||||
"notify 8.2.0",
|
||||
"objc",
|
||||
"parking_lot",
|
||||
"paths",
|
||||
@@ -7091,7 +7100,6 @@ dependencies = [
|
||||
"askpass",
|
||||
"buffer_diff",
|
||||
"call",
|
||||
"chrono",
|
||||
"cloud_llm_client",
|
||||
"collections",
|
||||
"command_palette_hooks",
|
||||
@@ -7797,6 +7805,7 @@ dependencies = [
|
||||
"parking_lot",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_urlencoded",
|
||||
"sha2",
|
||||
"tempfile",
|
||||
"url",
|
||||
@@ -8864,6 +8873,7 @@ dependencies = [
|
||||
"open_router",
|
||||
"parking_lot",
|
||||
"proto",
|
||||
"schemars 1.0.4",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
@@ -9028,6 +9038,7 @@ dependencies = [
|
||||
"settings",
|
||||
"smol",
|
||||
"task",
|
||||
"terminal",
|
||||
"text",
|
||||
"theme",
|
||||
"toml 0.8.23",
|
||||
@@ -9675,6 +9686,7 @@ dependencies = [
|
||||
"settings",
|
||||
"theme",
|
||||
"ui",
|
||||
"urlencoding",
|
||||
"util",
|
||||
"workspace",
|
||||
]
|
||||
@@ -10409,11 +10421,10 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "notify"
|
||||
version = "8.0.0"
|
||||
source = "git+https://github.com/zed-industries/notify.git?rev=bbb9ea5ae52b253e095737847e367c30653a2e96#bbb9ea5ae52b253e095737847e367c30653a2e96"
|
||||
version = "8.2.0"
|
||||
source = "git+https://github.com/zed-industries/notify.git?rev=b4588b2e5aee68f4c0e100f140e808cbce7b1419#b4588b2e5aee68f4c0e100f140e808cbce7b1419"
|
||||
dependencies = [
|
||||
"bitflags 2.9.4",
|
||||
"filetime",
|
||||
"fsevent-sys 4.1.0",
|
||||
"inotify 0.11.0",
|
||||
"kqueue",
|
||||
@@ -10422,7 +10433,7 @@ dependencies = [
|
||||
"mio 1.1.0",
|
||||
"notify-types",
|
||||
"walkdir",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -10439,7 +10450,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "notify-types"
|
||||
version = "2.0.0"
|
||||
source = "git+https://github.com/zed-industries/notify.git?rev=bbb9ea5ae52b253e095737847e367c30653a2e96#bbb9ea5ae52b253e095737847e367c30653a2e96"
|
||||
source = "git+https://github.com/zed-industries/notify.git?rev=b4588b2e5aee68f4c0e100f140e808cbce7b1419#b4588b2e5aee68f4c0e100f140e808cbce7b1419"
|
||||
|
||||
[[package]]
|
||||
name = "now"
|
||||
@@ -13971,6 +13982,7 @@ dependencies = [
|
||||
"gpui",
|
||||
"gpui_tokio",
|
||||
"http_client",
|
||||
"image",
|
||||
"json_schema_store",
|
||||
"language",
|
||||
"language_extension",
|
||||
@@ -16212,7 +16224,6 @@ dependencies = [
|
||||
"log",
|
||||
"menu",
|
||||
"picker",
|
||||
"project",
|
||||
"reqwest_client",
|
||||
"rust-embed",
|
||||
"settings",
|
||||
@@ -16222,7 +16233,6 @@ dependencies = [
|
||||
"theme",
|
||||
"title_bar",
|
||||
"ui",
|
||||
"workspace",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -18618,6 +18628,7 @@ dependencies = [
|
||||
"itertools 0.14.0",
|
||||
"libc",
|
||||
"log",
|
||||
"mach2 0.5.0",
|
||||
"nix 0.29.0",
|
||||
"pretty_assertions",
|
||||
"rand 0.9.2",
|
||||
@@ -18807,7 +18818,6 @@ dependencies = [
|
||||
name = "vim_mode_setting"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"gpui",
|
||||
"settings",
|
||||
]
|
||||
|
||||
@@ -21136,7 +21146,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.212.3"
|
||||
version = "0.213.0"
|
||||
dependencies = [
|
||||
"acp_tools",
|
||||
"activity_indicator",
|
||||
@@ -21675,18 +21685,20 @@ dependencies = [
|
||||
"language_model",
|
||||
"log",
|
||||
"lsp",
|
||||
"open_ai",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"release_channel",
|
||||
"schemars 1.0.4",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"smol",
|
||||
"thiserror 2.0.17",
|
||||
"util",
|
||||
"uuid",
|
||||
"workspace",
|
||||
"worktree",
|
||||
"zlog",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -21698,6 +21710,7 @@ dependencies = [
|
||||
"clap",
|
||||
"client",
|
||||
"cloud_llm_client",
|
||||
"cloud_zeta2_prompt",
|
||||
"collections",
|
||||
"edit_prediction_context",
|
||||
"editor",
|
||||
@@ -21711,7 +21724,6 @@ dependencies = [
|
||||
"ordered-float 2.10.1",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"regex-syntax",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
|
||||
@@ -663,6 +663,7 @@ time = { version = "0.3", features = [
|
||||
"serde",
|
||||
"serde-well-known",
|
||||
"formatting",
|
||||
"local-offset",
|
||||
] }
|
||||
tiny_http = "0.8"
|
||||
tokio = { version = "1" }
|
||||
@@ -772,8 +773,8 @@ features = [
|
||||
]
|
||||
|
||||
[patch.crates-io]
|
||||
notify = { git = "https://github.com/zed-industries/notify.git", rev = "bbb9ea5ae52b253e095737847e367c30653a2e96" }
|
||||
notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "bbb9ea5ae52b253e095737847e367c30653a2e96" }
|
||||
notify = { git = "https://github.com/zed-industries/notify.git", rev = "b4588b2e5aee68f4c0e100f140e808cbce7b1419" }
|
||||
notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "b4588b2e5aee68f4c0e100f140e808cbce7b1419" }
|
||||
windows-capture = { git = "https://github.com/zed-industries/windows-capture.git", rev = "f0d6c1b6691db75461b732f6d5ff56eed002eeb9" }
|
||||
|
||||
[profile.dev]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Zed
|
||||
|
||||
[](https://zed.dev)
|
||||
[](https://github.com/zed-industries/zed/actions/workflows/ci.yml)
|
||||
[](https://github.com/zed-industries/zed/actions/workflows/run_tests.yml)
|
||||
|
||||
Welcome to Zed, a high-performance, multiplayer code editor from the creators of [Atom](https://github.com/atom/atom) and [Tree-sitter](https://github.com/tree-sitter/tree-sitter).
|
||||
|
||||
|
||||
@@ -735,6 +735,20 @@
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_next_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"tab": "editor::NextSnippetTabstop"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_previous_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"shift-tab": "editor::PreviousSnippetTabstop"
|
||||
}
|
||||
},
|
||||
// Bindings for accepting edit predictions
|
||||
//
|
||||
// alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This is
|
||||
|
||||
@@ -805,6 +805,20 @@
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_next_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"tab": "editor::NextSnippetTabstop"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_previous_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"shift-tab": "editor::PreviousSnippetTabstop"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && edit_prediction",
|
||||
"bindings": {
|
||||
|
||||
@@ -739,6 +739,20 @@
|
||||
"tab": "editor::ComposeCompletion"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_next_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"tab": "editor::NextSnippetTabstop"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && in_snippet && has_previous_tabstop && !showing_completions",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"shift-tab": "editor::PreviousSnippetTabstop"
|
||||
}
|
||||
},
|
||||
// Bindings for accepting edit predictions
|
||||
//
|
||||
// alt-l is provided as an alternative to tab/alt-tab. and will be displayed in the UI. This is
|
||||
|
||||
@@ -455,6 +455,7 @@
|
||||
"<": "vim::Outdent",
|
||||
"=": "vim::AutoIndent",
|
||||
"d": "vim::HelixDelete",
|
||||
"alt-d": "editor::Delete", // Delete selection, without yanking
|
||||
"c": "vim::HelixSubstitute",
|
||||
"alt-c": "vim::HelixSubstituteNoYank",
|
||||
|
||||
|
||||
@@ -605,6 +605,10 @@
|
||||
// to both the horizontal and vertical delta values while scrolling. Fast scrolling
|
||||
// happens when a user holds the alt or option key while scrolling.
|
||||
"fast_scroll_sensitivity": 4.0,
|
||||
"sticky_scroll": {
|
||||
// Whether to stick scopes to the top of the editor.
|
||||
"enabled": false
|
||||
},
|
||||
"relative_line_numbers": "disabled",
|
||||
// If 'search_wrap' is disabled, search result do not wrap around the end of the file.
|
||||
"search_wrap": true,
|
||||
@@ -612,9 +616,13 @@
|
||||
"search": {
|
||||
// Whether to show the project search button in the status bar.
|
||||
"button": true,
|
||||
// Whether to only match on whole words.
|
||||
"whole_word": false,
|
||||
// Whether to match case sensitively.
|
||||
"case_sensitive": false,
|
||||
// Whether to include gitignored files in search results.
|
||||
"include_ignored": false,
|
||||
// Whether to interpret the search query as a regular expression.
|
||||
"regex": false,
|
||||
// Whether to center the cursor on each search match when navigating.
|
||||
"center_on_match": false
|
||||
@@ -1487,7 +1495,11 @@
|
||||
// in your project's settings, rather than globally.
|
||||
"directories": [".env", "env", ".venv", "venv"],
|
||||
// Can also be `csh`, `fish`, `nushell` and `power_shell`
|
||||
"activate_script": "default"
|
||||
"activate_script": "default",
|
||||
// Preferred Conda manager to use when activating Conda environments.
|
||||
// Values: "auto", "conda", "mamba", "micromamba"
|
||||
// Default: "auto"
|
||||
"conda_manager": "auto"
|
||||
}
|
||||
},
|
||||
"toolbar": {
|
||||
|
||||
@@ -39,6 +39,7 @@ serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
smol.workspace = true
|
||||
task.workspace = true
|
||||
telemetry.workspace = true
|
||||
terminal.workspace = true
|
||||
ui.workspace = true
|
||||
url.workspace = true
|
||||
@@ -56,3 +57,4 @@ rand.workspace = true
|
||||
tempfile.workspace = true
|
||||
util.workspace = true
|
||||
settings.workspace = true
|
||||
zlog.workspace = true
|
||||
|
||||
@@ -15,7 +15,7 @@ use settings::Settings as _;
|
||||
use task::{Shell, ShellBuilder};
|
||||
pub use terminal::*;
|
||||
|
||||
use action_log::ActionLog;
|
||||
use action_log::{ActionLog, ActionLogTelemetry};
|
||||
use agent_client_protocol::{self as acp};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use editor::Bias;
|
||||
@@ -820,6 +820,15 @@ pub struct AcpThread {
|
||||
pending_terminal_exit: HashMap<acp::TerminalId, acp::TerminalExitStatus>,
|
||||
}
|
||||
|
||||
impl From<&AcpThread> for ActionLogTelemetry {
|
||||
fn from(value: &AcpThread) -> Self {
|
||||
Self {
|
||||
agent_telemetry_id: value.connection().telemetry_id(),
|
||||
session_id: value.session_id.0.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum AcpThreadEvent {
|
||||
NewEntry,
|
||||
@@ -1346,6 +1355,17 @@ impl AcpThread {
|
||||
let path_style = self.project.read(cx).path_style(cx);
|
||||
let id = update.id.clone();
|
||||
|
||||
let agent = self.connection().telemetry_id();
|
||||
let session = self.session_id();
|
||||
if let ToolCallStatus::Completed | ToolCallStatus::Failed = status {
|
||||
let status = if matches!(status, ToolCallStatus::Completed) {
|
||||
"completed"
|
||||
} else {
|
||||
"failed"
|
||||
};
|
||||
telemetry::event!("Agent Tool Call Completed", agent, session, status);
|
||||
}
|
||||
|
||||
if let Some(ix) = self.index_for_tool_call(&id) {
|
||||
let AgentThreadEntry::ToolCall(call) = &mut self.entries[ix] else {
|
||||
unreachable!()
|
||||
@@ -1869,6 +1889,7 @@ impl AcpThread {
|
||||
return Task::ready(Err(anyhow!("not supported")));
|
||||
};
|
||||
|
||||
let telemetry = ActionLogTelemetry::from(&*self);
|
||||
cx.spawn(async move |this, cx| {
|
||||
cx.update(|cx| truncate.run(id.clone(), cx))?.await?;
|
||||
this.update(cx, |this, cx| {
|
||||
@@ -1877,8 +1898,9 @@ impl AcpThread {
|
||||
this.entries.truncate(ix);
|
||||
cx.emit(AcpThreadEvent::EntriesRemoved(range));
|
||||
}
|
||||
this.action_log()
|
||||
.update(cx, |action_log, cx| action_log.reject_all_edits(cx))
|
||||
this.action_log().update(cx, |action_log, cx| {
|
||||
action_log.reject_all_edits(Some(telemetry), cx)
|
||||
})
|
||||
})?
|
||||
.await;
|
||||
Ok(())
|
||||
@@ -2355,8 +2377,6 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
Project::init_settings(cx);
|
||||
language::init(cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -3614,6 +3634,10 @@ mod tests {
|
||||
}
|
||||
|
||||
impl AgentConnection for FakeAgentConnection {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
"fake"
|
||||
}
|
||||
|
||||
fn auth_methods(&self) -> &[acp::AuthMethod] {
|
||||
&self.auth_methods
|
||||
}
|
||||
|
||||
@@ -20,6 +20,8 @@ impl UserMessageId {
|
||||
}
|
||||
|
||||
pub trait AgentConnection {
|
||||
fn telemetry_id(&self) -> &'static str;
|
||||
|
||||
fn new_thread(
|
||||
self: Rc<Self>,
|
||||
project: Entity<Project>,
|
||||
@@ -106,9 +108,6 @@ pub trait AgentSessionSetTitle {
|
||||
}
|
||||
|
||||
pub trait AgentTelemetry {
|
||||
/// The name of the agent used for telemetry.
|
||||
fn agent_name(&self) -> String;
|
||||
|
||||
/// A representation of the current thread state that can be serialized for
|
||||
/// storage with telemetry events.
|
||||
fn thread_data(
|
||||
@@ -318,6 +317,10 @@ mod test_support {
|
||||
}
|
||||
|
||||
impl AgentConnection for StubAgentConnection {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
"stub"
|
||||
}
|
||||
|
||||
fn auth_methods(&self) -> &[acp::AuthMethod] {
|
||||
&[]
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ futures.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
project.workspace = true
|
||||
telemetry.workspace = true
|
||||
text.workspace = true
|
||||
util.workspace = true
|
||||
watch.workspace = true
|
||||
|
||||
@@ -3,7 +3,9 @@ use buffer_diff::BufferDiff;
|
||||
use clock;
|
||||
use collections::BTreeMap;
|
||||
use futures::{FutureExt, StreamExt, channel::mpsc};
|
||||
use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
|
||||
use gpui::{
|
||||
App, AppContext, AsyncApp, Context, Entity, SharedString, Subscription, Task, WeakEntity,
|
||||
};
|
||||
use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
|
||||
use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
|
||||
use std::{cmp, ops::Range, sync::Arc};
|
||||
@@ -31,71 +33,6 @@ impl ActionLog {
|
||||
&self.project
|
||||
}
|
||||
|
||||
pub fn latest_snapshot(&self, buffer: &Entity<Buffer>) -> Option<text::BufferSnapshot> {
|
||||
Some(self.tracked_buffers.get(buffer)?.snapshot.clone())
|
||||
}
|
||||
|
||||
/// Return a unified diff patch with user edits made since last read or notification
|
||||
pub fn unnotified_user_edits(&self, cx: &Context<Self>) -> Option<String> {
|
||||
let diffs = self
|
||||
.tracked_buffers
|
||||
.values()
|
||||
.filter_map(|tracked| {
|
||||
if !tracked.may_have_unnotified_user_edits {
|
||||
return None;
|
||||
}
|
||||
|
||||
let text_with_latest_user_edits = tracked.diff_base.to_string();
|
||||
let text_with_last_seen_user_edits = tracked.last_seen_base.to_string();
|
||||
if text_with_latest_user_edits == text_with_last_seen_user_edits {
|
||||
return None;
|
||||
}
|
||||
let patch = language::unified_diff(
|
||||
&text_with_last_seen_user_edits,
|
||||
&text_with_latest_user_edits,
|
||||
);
|
||||
|
||||
let buffer = tracked.buffer.clone();
|
||||
let file_path = buffer
|
||||
.read(cx)
|
||||
.file()
|
||||
.map(|file| {
|
||||
let mut path = file.full_path(cx).to_string_lossy().into_owned();
|
||||
if file.path_style(cx).is_windows() {
|
||||
path = path.replace('\\', "/");
|
||||
}
|
||||
path
|
||||
})
|
||||
.unwrap_or_else(|| format!("buffer_{}", buffer.entity_id()));
|
||||
|
||||
let mut result = String::new();
|
||||
result.push_str(&format!("--- a/{}\n", file_path));
|
||||
result.push_str(&format!("+++ b/{}\n", file_path));
|
||||
result.push_str(&patch);
|
||||
|
||||
Some(result)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if diffs.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let unified_diff = diffs.join("\n\n");
|
||||
Some(unified_diff)
|
||||
}
|
||||
|
||||
/// Return a unified diff patch with user edits made since last read/notification
|
||||
/// and mark them as notified
|
||||
pub fn flush_unnotified_user_edits(&mut self, cx: &Context<Self>) -> Option<String> {
|
||||
let patch = self.unnotified_user_edits(cx);
|
||||
self.tracked_buffers.values_mut().for_each(|tracked| {
|
||||
tracked.may_have_unnotified_user_edits = false;
|
||||
tracked.last_seen_base = tracked.diff_base.clone();
|
||||
});
|
||||
patch
|
||||
}
|
||||
|
||||
fn track_buffer_internal(
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
@@ -145,31 +82,26 @@ impl ActionLog {
|
||||
let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
|
||||
let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
|
||||
let diff_base;
|
||||
let last_seen_base;
|
||||
let unreviewed_edits;
|
||||
if is_created {
|
||||
diff_base = Rope::default();
|
||||
last_seen_base = Rope::default();
|
||||
unreviewed_edits = Patch::new(vec![Edit {
|
||||
old: 0..1,
|
||||
new: 0..text_snapshot.max_point().row + 1,
|
||||
}])
|
||||
} else {
|
||||
diff_base = buffer.read(cx).as_rope().clone();
|
||||
last_seen_base = diff_base.clone();
|
||||
unreviewed_edits = Patch::default();
|
||||
}
|
||||
TrackedBuffer {
|
||||
buffer: buffer.clone(),
|
||||
diff_base,
|
||||
last_seen_base,
|
||||
unreviewed_edits,
|
||||
snapshot: text_snapshot,
|
||||
status,
|
||||
version: buffer.read(cx).version(),
|
||||
diff,
|
||||
diff_update: diff_update_tx,
|
||||
may_have_unnotified_user_edits: false,
|
||||
_open_lsp_handle: open_lsp_handle,
|
||||
_maintain_diff: cx.spawn({
|
||||
let buffer = buffer.clone();
|
||||
@@ -320,10 +252,9 @@ impl ActionLog {
|
||||
let new_snapshot = buffer_snapshot.clone();
|
||||
let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
|
||||
let edits = diff_snapshots(&old_snapshot, &new_snapshot);
|
||||
let mut has_user_changes = false;
|
||||
async move {
|
||||
if let ChangeAuthor::User = author {
|
||||
has_user_changes = apply_non_conflicting_edits(
|
||||
apply_non_conflicting_edits(
|
||||
&unreviewed_edits,
|
||||
edits,
|
||||
&mut base_text,
|
||||
@@ -331,22 +262,13 @@ impl ActionLog {
|
||||
);
|
||||
}
|
||||
|
||||
(Arc::new(base_text.to_string()), base_text, has_user_changes)
|
||||
(Arc::new(base_text.to_string()), base_text)
|
||||
}
|
||||
});
|
||||
|
||||
anyhow::Ok(rebase)
|
||||
})??;
|
||||
let (new_base_text, new_diff_base, has_user_changes) = rebase.await;
|
||||
|
||||
this.update(cx, |this, _| {
|
||||
let tracked_buffer = this
|
||||
.tracked_buffers
|
||||
.get_mut(buffer)
|
||||
.context("buffer not tracked")
|
||||
.unwrap();
|
||||
tracked_buffer.may_have_unnotified_user_edits |= has_user_changes;
|
||||
})?;
|
||||
let (new_base_text, new_diff_base) = rebase.await;
|
||||
|
||||
Self::update_diff(
|
||||
this,
|
||||
@@ -565,14 +487,17 @@ impl ActionLog {
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
buffer_range: Range<impl language::ToPoint>,
|
||||
telemetry: Option<ActionLogTelemetry>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
|
||||
match tracked_buffer.status {
|
||||
TrackedBufferStatus::Deleted => {
|
||||
metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
cx.notify();
|
||||
}
|
||||
@@ -581,7 +506,6 @@ impl ActionLog {
|
||||
let buffer_range =
|
||||
buffer_range.start.to_point(buffer)..buffer_range.end.to_point(buffer);
|
||||
let mut delta = 0i32;
|
||||
|
||||
tracked_buffer.unreviewed_edits.retain_mut(|edit| {
|
||||
edit.old.start = (edit.old.start as i32 + delta) as u32;
|
||||
edit.old.end = (edit.old.end as i32 + delta) as u32;
|
||||
@@ -613,6 +537,7 @@ impl ActionLog {
|
||||
.collect::<String>(),
|
||||
);
|
||||
delta += edit.new_len() as i32 - edit.old_len() as i32;
|
||||
metrics.add_edit(edit);
|
||||
false
|
||||
}
|
||||
});
|
||||
@@ -624,19 +549,24 @@ impl ActionLog {
|
||||
tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
|
||||
}
|
||||
}
|
||||
if let Some(telemetry) = telemetry {
|
||||
telemetry_report_accepted_edits(&telemetry, metrics);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn reject_edits_in_ranges(
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
buffer_ranges: Vec<Range<impl language::ToPoint>>,
|
||||
telemetry: Option<ActionLogTelemetry>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let Some(tracked_buffer) = self.tracked_buffers.get_mut(&buffer) else {
|
||||
return Task::ready(Ok(()));
|
||||
};
|
||||
|
||||
match &tracked_buffer.status {
|
||||
let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
|
||||
let task = match &tracked_buffer.status {
|
||||
TrackedBufferStatus::Created {
|
||||
existing_file_content,
|
||||
} => {
|
||||
@@ -686,6 +616,7 @@ impl ActionLog {
|
||||
}
|
||||
};
|
||||
|
||||
metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
cx.notify();
|
||||
task
|
||||
@@ -699,6 +630,7 @@ impl ActionLog {
|
||||
.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
|
||||
|
||||
// Clear all tracked edits for this buffer and start over as if we just read it.
|
||||
metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
self.buffer_read(buffer.clone(), cx);
|
||||
cx.notify();
|
||||
@@ -738,6 +670,7 @@ impl ActionLog {
|
||||
}
|
||||
|
||||
if revert {
|
||||
metrics.add_edit(edit);
|
||||
let old_range = tracked_buffer
|
||||
.diff_base
|
||||
.point_to_offset(Point::new(edit.old.start, 0))
|
||||
@@ -758,12 +691,25 @@ impl ActionLog {
|
||||
self.project
|
||||
.update(cx, |project, cx| project.save_buffer(buffer, cx))
|
||||
}
|
||||
};
|
||||
if let Some(telemetry) = telemetry {
|
||||
telemetry_report_rejected_edits(&telemetry, metrics);
|
||||
}
|
||||
task
|
||||
}
|
||||
|
||||
pub fn keep_all_edits(&mut self, cx: &mut Context<Self>) {
|
||||
self.tracked_buffers
|
||||
.retain(|_buffer, tracked_buffer| match tracked_buffer.status {
|
||||
pub fn keep_all_edits(
|
||||
&mut self,
|
||||
telemetry: Option<ActionLogTelemetry>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.tracked_buffers.retain(|buffer, tracked_buffer| {
|
||||
let mut metrics = ActionLogMetrics::for_buffer(buffer.read(cx));
|
||||
metrics.add_edits(tracked_buffer.unreviewed_edits.edits());
|
||||
if let Some(telemetry) = telemetry.as_ref() {
|
||||
telemetry_report_accepted_edits(telemetry, metrics);
|
||||
}
|
||||
match tracked_buffer.status {
|
||||
TrackedBufferStatus::Deleted => false,
|
||||
_ => {
|
||||
if let TrackedBufferStatus::Created { .. } = &mut tracked_buffer.status {
|
||||
@@ -774,13 +720,24 @@ impl ActionLog {
|
||||
tracked_buffer.schedule_diff_update(ChangeAuthor::User, cx);
|
||||
true
|
||||
}
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn reject_all_edits(&mut self, cx: &mut Context<Self>) -> Task<()> {
|
||||
pub fn reject_all_edits(
|
||||
&mut self,
|
||||
telemetry: Option<ActionLogTelemetry>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<()> {
|
||||
let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
|
||||
let reject = self.reject_edits_in_ranges(buffer, vec![Anchor::MIN..Anchor::MAX], cx);
|
||||
let reject = self.reject_edits_in_ranges(
|
||||
buffer,
|
||||
vec![Anchor::MIN..Anchor::MAX],
|
||||
telemetry.clone(),
|
||||
cx,
|
||||
);
|
||||
|
||||
async move {
|
||||
reject.await.log_err();
|
||||
@@ -788,8 +745,7 @@ impl ActionLog {
|
||||
});
|
||||
|
||||
let task = futures::future::join_all(futures);
|
||||
|
||||
cx.spawn(async move |_, _| {
|
||||
cx.background_spawn(async move {
|
||||
task.await;
|
||||
})
|
||||
}
|
||||
@@ -819,6 +775,61 @@ impl ActionLog {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ActionLogTelemetry {
|
||||
pub agent_telemetry_id: &'static str,
|
||||
pub session_id: Arc<str>,
|
||||
}
|
||||
|
||||
struct ActionLogMetrics {
|
||||
lines_removed: u32,
|
||||
lines_added: u32,
|
||||
language: Option<SharedString>,
|
||||
}
|
||||
|
||||
impl ActionLogMetrics {
|
||||
fn for_buffer(buffer: &Buffer) -> Self {
|
||||
Self {
|
||||
language: buffer.language().map(|l| l.name().0),
|
||||
lines_removed: 0,
|
||||
lines_added: 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn add_edits(&mut self, edits: &[Edit<u32>]) {
|
||||
for edit in edits {
|
||||
self.add_edit(edit);
|
||||
}
|
||||
}
|
||||
|
||||
fn add_edit(&mut self, edit: &Edit<u32>) {
|
||||
self.lines_added += edit.new_len();
|
||||
self.lines_removed += edit.old_len();
|
||||
}
|
||||
}
|
||||
|
||||
fn telemetry_report_accepted_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
|
||||
telemetry::event!(
|
||||
"Agent Edits Accepted",
|
||||
agent = telemetry.agent_telemetry_id,
|
||||
session = telemetry.session_id,
|
||||
language = metrics.language,
|
||||
lines_added = metrics.lines_added,
|
||||
lines_removed = metrics.lines_removed
|
||||
);
|
||||
}
|
||||
|
||||
fn telemetry_report_rejected_edits(telemetry: &ActionLogTelemetry, metrics: ActionLogMetrics) {
|
||||
telemetry::event!(
|
||||
"Agent Edits Rejected",
|
||||
agent = telemetry.agent_telemetry_id,
|
||||
session = telemetry.session_id,
|
||||
language = metrics.language,
|
||||
lines_added = metrics.lines_added,
|
||||
lines_removed = metrics.lines_removed
|
||||
);
|
||||
}
|
||||
|
||||
fn apply_non_conflicting_edits(
|
||||
patch: &Patch<u32>,
|
||||
edits: Vec<Edit<u32>>,
|
||||
@@ -949,14 +960,12 @@ enum TrackedBufferStatus {
|
||||
struct TrackedBuffer {
|
||||
buffer: Entity<Buffer>,
|
||||
diff_base: Rope,
|
||||
last_seen_base: Rope,
|
||||
unreviewed_edits: Patch<u32>,
|
||||
status: TrackedBufferStatus,
|
||||
version: clock::Global,
|
||||
diff: Entity<BufferDiff>,
|
||||
snapshot: text::BufferSnapshot,
|
||||
diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
|
||||
may_have_unnotified_user_edits: bool,
|
||||
_open_lsp_handle: OpenLspBufferHandle,
|
||||
_maintain_diff: Task<()>,
|
||||
_subscription: Subscription,
|
||||
@@ -987,7 +996,6 @@ mod tests {
|
||||
use super::*;
|
||||
use buffer_diff::DiffHunkStatusKind;
|
||||
use gpui::TestAppContext;
|
||||
use indoc::indoc;
|
||||
use language::Point;
|
||||
use project::{FakeFs, Fs, Project, RemoveOptions};
|
||||
use rand::prelude::*;
|
||||
@@ -1005,8 +1013,6 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1066,7 +1072,7 @@ mod tests {
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), cx)
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(3, 0)..Point::new(4, 3), None, cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
@@ -1082,7 +1088,7 @@ mod tests {
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), cx)
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(4, 3), None, cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
@@ -1167,7 +1173,7 @@ mod tests {
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), cx)
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(1, 0)..Point::new(1, 0), None, cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
@@ -1264,111 +1270,7 @@ mod tests {
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_user_edits_notifications(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/dir"),
|
||||
json!({"file": indoc! {"
|
||||
abc
|
||||
def
|
||||
ghi
|
||||
jkl
|
||||
mno"}}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let file_path = project
|
||||
.read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
|
||||
.unwrap();
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_buffer(file_path, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Agent edits
|
||||
cx.update(|cx| {
|
||||
action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer
|
||||
.edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
|
||||
.unwrap()
|
||||
});
|
||||
action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
indoc! {"
|
||||
abc
|
||||
deF
|
||||
GHI
|
||||
jkl
|
||||
mno"}
|
||||
);
|
||||
assert_eq!(
|
||||
unreviewed_hunks(&action_log, cx),
|
||||
vec![(
|
||||
buffer.clone(),
|
||||
vec![HunkStatus {
|
||||
range: Point::new(1, 0)..Point::new(3, 0),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "def\nghi\n".into(),
|
||||
}],
|
||||
)]
|
||||
);
|
||||
|
||||
// User edits
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(
|
||||
[
|
||||
(Point::new(0, 2)..Point::new(0, 2), "X"),
|
||||
(Point::new(3, 0)..Point::new(3, 0), "Y"),
|
||||
],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
indoc! {"
|
||||
abXc
|
||||
deF
|
||||
GHI
|
||||
Yjkl
|
||||
mno"}
|
||||
);
|
||||
|
||||
// User edits should be stored separately from agent's
|
||||
let user_edits = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
|
||||
assert_eq!(
|
||||
user_edits.expect("should have some user edits"),
|
||||
indoc! {"
|
||||
--- a/dir/file
|
||||
+++ b/dir/file
|
||||
@@ -1,5 +1,5 @@
|
||||
-abc
|
||||
+abXc
|
||||
def
|
||||
ghi
|
||||
-jkl
|
||||
+Yjkl
|
||||
mno
|
||||
"}
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), None, cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
@@ -1427,7 +1329,7 @@ mod tests {
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), 0..5, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), 0..5, None, cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
@@ -1479,7 +1381,7 @@ mod tests {
|
||||
|
||||
action_log
|
||||
.update(cx, |log, cx| {
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -1559,7 +1461,7 @@ mod tests {
|
||||
|
||||
action_log
|
||||
.update(cx, |log, cx| {
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![2..5], cx)
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![2..5], None, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -1742,6 +1644,7 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(4, 0)..Point::new(4, 0)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -1776,6 +1679,7 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(0, 0)..Point::new(1, 0)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -1803,6 +1707,7 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(4, 0)..Point::new(4, 0)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -1877,7 +1782,7 @@ mod tests {
|
||||
let range_2 = buffer.read(cx).anchor_before(Point::new(5, 0))
|
||||
..buffer.read(cx).anchor_before(Point::new(5, 3));
|
||||
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], cx)
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![range_1, range_2], None, cx)
|
||||
.detach();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
@@ -1938,6 +1843,7 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(0, 0)..Point::new(0, 0)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -1993,6 +1899,7 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(0, 0)..Point::new(0, 11)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -2055,6 +1962,7 @@ mod tests {
|
||||
log.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Point::new(0, 0)..Point::new(100, 0)],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -2102,7 +2010,7 @@ mod tests {
|
||||
|
||||
// User accepts the single hunk
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Anchor::MIN..Anchor::MAX, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), Anchor::MIN..Anchor::MAX, None, cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
@@ -2123,7 +2031,7 @@ mod tests {
|
||||
// User rejects the hunk
|
||||
action_log
|
||||
.update(cx, |log, cx| {
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![Anchor::MIN..Anchor::MAX], cx)
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![Anchor::MIN..Anchor::MAX], None, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -2167,7 +2075,7 @@ mod tests {
|
||||
cx.run_until_parked();
|
||||
|
||||
// User clicks "Accept All"
|
||||
action_log.update(cx, |log, cx| log.keep_all_edits(cx));
|
||||
action_log.update(cx, |log, cx| log.keep_all_edits(None, cx));
|
||||
cx.run_until_parked();
|
||||
assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]); // Hunks are cleared
|
||||
@@ -2186,7 +2094,7 @@ mod tests {
|
||||
|
||||
// User clicks "Reject All"
|
||||
action_log
|
||||
.update(cx, |log, cx| log.reject_all_edits(cx))
|
||||
.update(cx, |log, cx| log.reject_all_edits(None, cx))
|
||||
.await;
|
||||
cx.run_until_parked();
|
||||
assert!(fs.is_file(path!("/dir/new_file").as_ref()).await);
|
||||
@@ -2226,7 +2134,7 @@ mod tests {
|
||||
action_log.update(cx, |log, cx| {
|
||||
let range = buffer.read(cx).random_byte_range(0, &mut rng);
|
||||
log::info!("keeping edits in range {:?}", range);
|
||||
log.keep_edits_in_range(buffer.clone(), range, cx)
|
||||
log.keep_edits_in_range(buffer.clone(), range, None, cx)
|
||||
});
|
||||
}
|
||||
25..50 => {
|
||||
@@ -2234,7 +2142,7 @@ mod tests {
|
||||
.update(cx, |log, cx| {
|
||||
let range = buffer.read(cx).random_byte_range(0, &mut rng);
|
||||
log::info!("rejecting edits in range {:?}", range);
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![range], cx)
|
||||
log.reject_edits_in_ranges(buffer.clone(), vec![range], None, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -2488,61 +2396,4 @@ mod tests {
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_format_patch(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/dir"),
|
||||
json!({"test.txt": "line 1\nline 2\nline 3\n"}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
|
||||
let file_path = project
|
||||
.read_with(cx, |project, cx| {
|
||||
project.find_project_path("dir/test.txt", cx)
|
||||
})
|
||||
.unwrap();
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_buffer(file_path, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.update(|cx| {
|
||||
// Track the buffer and mark it as read first
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.buffer_read(buffer.clone(), cx);
|
||||
});
|
||||
|
||||
// Make some edits to create a patch
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer
|
||||
.edit([(Point::new(1, 0)..Point::new(1, 6), "CHANGED")], None, cx)
|
||||
.unwrap(); // Replace "line2" with "CHANGED"
|
||||
});
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
// Get the patch
|
||||
let patch = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
|
||||
|
||||
// Verify the patch format contains expected unified diff elements
|
||||
assert_eq!(
|
||||
patch.unwrap(),
|
||||
indoc! {"
|
||||
--- a/dir/test.txt
|
||||
+++ b/dir/test.txt
|
||||
@@ -1,3 +1,3 @@
|
||||
line 1
|
||||
-line 2
|
||||
+CHANGED
|
||||
line 3
|
||||
"}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -17,6 +17,7 @@ anyhow.workspace = true
|
||||
auto_update.workspace = true
|
||||
editor.workspace = true
|
||||
extension_host.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
|
||||
@@ -51,6 +51,7 @@ pub struct ActivityIndicator {
|
||||
project: Entity<Project>,
|
||||
auto_updater: Option<Entity<AutoUpdater>>,
|
||||
context_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
fs_jobs: Vec<fs::JobInfo>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -99,6 +100,27 @@ impl ActivityIndicator {
|
||||
})
|
||||
.detach();
|
||||
|
||||
let fs = project.read(cx).fs().clone();
|
||||
let mut job_events = fs.subscribe_to_jobs();
|
||||
cx.spawn(async move |this, cx| {
|
||||
while let Some(job_event) = job_events.next().await {
|
||||
this.update(cx, |this: &mut ActivityIndicator, cx| {
|
||||
match job_event {
|
||||
fs::JobEvent::Started { info } => {
|
||||
this.fs_jobs.retain(|j| j.id != info.id);
|
||||
this.fs_jobs.push(info);
|
||||
}
|
||||
fs::JobEvent::Completed { id } => {
|
||||
this.fs_jobs.retain(|j| j.id != id);
|
||||
}
|
||||
}
|
||||
cx.notify();
|
||||
})?;
|
||||
}
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach();
|
||||
|
||||
cx.subscribe(
|
||||
&project.read(cx).lsp_store(),
|
||||
|activity_indicator, _, event, cx| {
|
||||
@@ -201,7 +223,8 @@ impl ActivityIndicator {
|
||||
statuses: Vec::new(),
|
||||
project: project.clone(),
|
||||
auto_updater,
|
||||
context_menu_handle: Default::default(),
|
||||
context_menu_handle: PopoverMenuHandle::default(),
|
||||
fs_jobs: Vec::new(),
|
||||
}
|
||||
});
|
||||
|
||||
@@ -432,6 +455,23 @@ impl ActivityIndicator {
|
||||
});
|
||||
}
|
||||
|
||||
// Show any long-running fs command
|
||||
for fs_job in &self.fs_jobs {
|
||||
if Instant::now().duration_since(fs_job.start) >= GIT_OPERATION_DELAY {
|
||||
return Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::ArrowCircle)
|
||||
.size(IconSize::Small)
|
||||
.with_rotate_animation(2)
|
||||
.into_any_element(),
|
||||
),
|
||||
message: fs_job.message.clone().into(),
|
||||
on_click: None,
|
||||
tooltip_message: None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Show any language server installation info.
|
||||
let mut downloading = SmallVec::<[_; 3]>::new();
|
||||
let mut checking_for_update = SmallVec::<[_; 3]>::new();
|
||||
|
||||
@@ -63,7 +63,6 @@ streaming_diff.workspace = true
|
||||
strsim.workspace = true
|
||||
task.workspace = true
|
||||
telemetry.workspace = true
|
||||
terminal.workspace = true
|
||||
text.workspace = true
|
||||
thiserror.workspace = true
|
||||
ui.workspace = true
|
||||
|
||||
@@ -6,7 +6,6 @@ mod native_agent_server;
|
||||
pub mod outline;
|
||||
mod templates;
|
||||
mod thread;
|
||||
mod tool_schema;
|
||||
mod tools;
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -218,7 +217,7 @@ impl LanguageModels {
|
||||
}
|
||||
_ => {
|
||||
log::error!(
|
||||
"Failed to authenticate provider: {}: {err}",
|
||||
"Failed to authenticate provider: {}: {err:#}",
|
||||
provider_name.0
|
||||
);
|
||||
}
|
||||
@@ -967,6 +966,10 @@ impl acp_thread::AgentModelSelector for NativeAgentModelSelector {
|
||||
}
|
||||
|
||||
impl acp_thread::AgentConnection for NativeAgentConnection {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
"zed"
|
||||
}
|
||||
|
||||
fn new_thread(
|
||||
self: Rc<Self>,
|
||||
project: Entity<Project>,
|
||||
@@ -1107,10 +1110,6 @@ impl acp_thread::AgentConnection for NativeAgentConnection {
|
||||
}
|
||||
|
||||
impl acp_thread::AgentTelemetry for NativeAgentConnection {
|
||||
fn agent_name(&self) -> String {
|
||||
"Zed".into()
|
||||
}
|
||||
|
||||
fn thread_data(
|
||||
&self,
|
||||
session_id: &acp::SessionId,
|
||||
@@ -1627,9 +1626,7 @@ mod internal_tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
Project::init_settings(cx);
|
||||
agent_settings::init(cx);
|
||||
language::init(cx);
|
||||
|
||||
LanguageModelRegistry::test(cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -1394,7 +1394,7 @@ mod tests {
|
||||
|
||||
async fn init_test(cx: &mut TestAppContext) -> EditAgent {
|
||||
cx.update(settings::init);
|
||||
cx.update(Project::init_settings);
|
||||
|
||||
let project = Project::test(FakeFs::new(cx.executor()), [], cx).await;
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
|
||||
@@ -1468,14 +1468,9 @@ impl EditAgentTest {
|
||||
gpui_tokio::init(cx);
|
||||
let http_client = Arc::new(ReqwestClient::user_agent("agent tests").unwrap());
|
||||
cx.set_http_client(http_client);
|
||||
|
||||
client::init_settings(cx);
|
||||
let client = Client::production(cx);
|
||||
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
||||
|
||||
settings::init(cx);
|
||||
Project::init_settings(cx);
|
||||
language::init(cx);
|
||||
language_model::init(client.clone(), cx);
|
||||
language_models::init(user_store, client.clone(), cx);
|
||||
});
|
||||
|
||||
@@ -88,8 +88,6 @@ mod tests {
|
||||
async |fs, project, cx| {
|
||||
let auth = cx.update(|cx| {
|
||||
prompt_store::init(cx);
|
||||
terminal::init(cx);
|
||||
|
||||
let registry = language_model::LanguageModelRegistry::read_global(cx);
|
||||
let auth = registry
|
||||
.provider(&language_model::ANTHROPIC_PROVIDER_ID)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use gpui::{AsyncApp, Entity};
|
||||
use language::{Buffer, OutlineItem, ParseStatus};
|
||||
use language::{Buffer, OutlineItem};
|
||||
use regex::Regex;
|
||||
use std::fmt::Write;
|
||||
use text::Point;
|
||||
@@ -30,10 +30,9 @@ pub async fn get_buffer_content_or_outline(
|
||||
if file_size > AUTO_OUTLINE_SIZE {
|
||||
// For large files, use outline instead of full content
|
||||
// Wait until the buffer has been fully parsed, so we can read its outline
|
||||
let mut parse_status = buffer.read_with(cx, |buffer, _| buffer.parse_status())?;
|
||||
while *parse_status.borrow() != ParseStatus::Idle {
|
||||
parse_status.changed().await?;
|
||||
}
|
||||
buffer
|
||||
.read_with(cx, |buffer, _| buffer.parsing_idle())?
|
||||
.await;
|
||||
|
||||
let outline_items = buffer.read_with(cx, |buffer, _| {
|
||||
let snapshot = buffer.snapshot();
|
||||
|
||||
@@ -933,7 +933,7 @@ async fn test_profiles(cx: &mut TestAppContext) {
|
||||
// Test that test-1 profile (default) has echo and delay tools
|
||||
thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.set_profile(AgentProfileId("test-1".into()));
|
||||
thread.set_profile(AgentProfileId("test-1".into()), cx);
|
||||
thread.send(UserMessageId::new(), ["test"], cx)
|
||||
})
|
||||
.unwrap();
|
||||
@@ -953,7 +953,7 @@ async fn test_profiles(cx: &mut TestAppContext) {
|
||||
// Switch to test-2 profile, and verify that it has only the infinite tool.
|
||||
thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.set_profile(AgentProfileId("test-2".into()));
|
||||
thread.set_profile(AgentProfileId("test-2".into()), cx);
|
||||
thread.send(UserMessageId::new(), ["test2"], cx)
|
||||
})
|
||||
.unwrap();
|
||||
@@ -1002,8 +1002,8 @@ async fn test_mcp_tools(cx: &mut TestAppContext) {
|
||||
)
|
||||
.await;
|
||||
cx.run_until_parked();
|
||||
thread.update(cx, |thread, _| {
|
||||
thread.set_profile(AgentProfileId("test".into()))
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.set_profile(AgentProfileId("test".into()), cx)
|
||||
});
|
||||
|
||||
let mut mcp_tool_calls = setup_context_server(
|
||||
@@ -1169,8 +1169,8 @@ async fn test_mcp_tool_truncation(cx: &mut TestAppContext) {
|
||||
.await;
|
||||
cx.run_until_parked();
|
||||
|
||||
thread.update(cx, |thread, _| {
|
||||
thread.set_profile(AgentProfileId("test".into()));
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.set_profile(AgentProfileId("test".into()), cx);
|
||||
thread.add_tool(EchoTool);
|
||||
thread.add_tool(DelayTool);
|
||||
thread.add_tool(WordListTool);
|
||||
@@ -1851,7 +1851,6 @@ async fn test_agent_connection(cx: &mut TestAppContext) {
|
||||
// Initialize language model system with test provider
|
||||
cx.update(|cx| {
|
||||
gpui_tokio::init(cx);
|
||||
client::init_settings(cx);
|
||||
|
||||
let http_client = FakeHttpClient::with_404_response();
|
||||
let clock = Arc::new(clock::FakeSystemClock::new());
|
||||
@@ -1859,9 +1858,7 @@ async fn test_agent_connection(cx: &mut TestAppContext) {
|
||||
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
||||
language_model::init(client.clone(), cx);
|
||||
language_models::init(user_store, client.clone(), cx);
|
||||
Project::init_settings(cx);
|
||||
LanguageModelRegistry::test(cx);
|
||||
agent_settings::init(cx);
|
||||
});
|
||||
cx.executor().forbid_parking();
|
||||
|
||||
@@ -2395,8 +2392,6 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest {
|
||||
|
||||
cx.update(|cx| {
|
||||
settings::init(cx);
|
||||
Project::init_settings(cx);
|
||||
agent_settings::init(cx);
|
||||
|
||||
match model {
|
||||
TestModel::Fake => {}
|
||||
@@ -2404,7 +2399,6 @@ async fn setup(cx: &mut TestAppContext, model: TestModel) -> ThreadTest {
|
||||
gpui_tokio::init(cx);
|
||||
let http_client = ReqwestClient::user_agent("agent tests").unwrap();
|
||||
cx.set_http_client(Arc::new(http_client));
|
||||
client::init_settings(cx);
|
||||
let client = Client::production(cx);
|
||||
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
||||
language_model::init(client.clone(), cx);
|
||||
|
||||
@@ -30,16 +30,17 @@ use gpui::{
|
||||
};
|
||||
use language_model::{
|
||||
LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent, LanguageModelExt,
|
||||
LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest,
|
||||
LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
|
||||
LanguageModelToolResultContent, LanguageModelToolSchemaFormat, LanguageModelToolUse,
|
||||
LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage, ZED_CLOUD_PROVIDER_ID,
|
||||
LanguageModelId, LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry,
|
||||
LanguageModelRequest, LanguageModelRequestMessage, LanguageModelRequestTool,
|
||||
LanguageModelToolResult, LanguageModelToolResultContent, LanguageModelToolSchemaFormat,
|
||||
LanguageModelToolUse, LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage,
|
||||
ZED_CLOUD_PROVIDER_ID,
|
||||
};
|
||||
use project::Project;
|
||||
use prompt_store::ProjectContext;
|
||||
use schemars::{JsonSchema, Schema};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, update_settings_file};
|
||||
use settings::{LanguageModelSelection, Settings, update_settings_file};
|
||||
use smol::stream::StreamExt;
|
||||
use std::{
|
||||
collections::BTreeMap,
|
||||
@@ -798,7 +799,8 @@ impl Thread {
|
||||
let profile_id = db_thread
|
||||
.profile
|
||||
.unwrap_or_else(|| AgentSettings::get_global(cx).default_profile.clone());
|
||||
let model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
|
||||
let mut model = LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
db_thread
|
||||
.model
|
||||
.and_then(|model| {
|
||||
@@ -811,6 +813,16 @@ impl Thread {
|
||||
.or_else(|| registry.default_model())
|
||||
.map(|model| model.model)
|
||||
});
|
||||
|
||||
if model.is_none() {
|
||||
model = Self::resolve_profile_model(&profile_id, cx);
|
||||
}
|
||||
if model.is_none() {
|
||||
model = LanguageModelRegistry::global(cx).update(cx, |registry, _cx| {
|
||||
registry.default_model().map(|model| model.model)
|
||||
});
|
||||
}
|
||||
|
||||
let (prompt_capabilities_tx, prompt_capabilities_rx) =
|
||||
watch::channel(Self::prompt_capabilities(model.as_deref()));
|
||||
|
||||
@@ -1007,8 +1019,17 @@ impl Thread {
|
||||
&self.profile_id
|
||||
}
|
||||
|
||||
pub fn set_profile(&mut self, profile_id: AgentProfileId) {
|
||||
pub fn set_profile(&mut self, profile_id: AgentProfileId, cx: &mut Context<Self>) {
|
||||
if self.profile_id == profile_id {
|
||||
return;
|
||||
}
|
||||
|
||||
self.profile_id = profile_id;
|
||||
|
||||
// Swap to the profile's preferred model when available.
|
||||
if let Some(model) = Self::resolve_profile_model(&self.profile_id, cx) {
|
||||
self.set_model(model, cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cancel(&mut self, cx: &mut Context<Self>) {
|
||||
@@ -1065,6 +1086,35 @@ impl Thread {
|
||||
})
|
||||
}
|
||||
|
||||
/// Look up the active profile and resolve its preferred model if one is configured.
|
||||
fn resolve_profile_model(
|
||||
profile_id: &AgentProfileId,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Arc<dyn LanguageModel>> {
|
||||
let selection = AgentSettings::get_global(cx)
|
||||
.profiles
|
||||
.get(profile_id)?
|
||||
.default_model
|
||||
.clone()?;
|
||||
Self::resolve_model_from_selection(&selection, cx)
|
||||
}
|
||||
|
||||
/// Translate a stored model selection into the configured model from the registry.
|
||||
fn resolve_model_from_selection(
|
||||
selection: &LanguageModelSelection,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Arc<dyn LanguageModel>> {
|
||||
let selected = SelectedModel {
|
||||
provider: LanguageModelProviderId::from(selection.provider.0.clone()),
|
||||
model: LanguageModelId::from(selection.model.clone()),
|
||||
};
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry
|
||||
.select_model(&selected, cx)
|
||||
.map(|configured| configured.model)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn resume(
|
||||
&mut self,
|
||||
cx: &mut Context<Self>,
|
||||
@@ -2139,7 +2189,7 @@ where
|
||||
|
||||
/// Returns the JSON schema that describes the tool's input.
|
||||
fn input_schema(format: LanguageModelToolSchemaFormat) -> Schema {
|
||||
crate::tool_schema::root_schema_for::<Self::Input>(format)
|
||||
language_model::tool_schema::root_schema_for::<Self::Input>(format)
|
||||
}
|
||||
|
||||
/// Some tools rely on a provider for the underlying billing or other reasons.
|
||||
@@ -2226,7 +2276,7 @@ where
|
||||
|
||||
fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value> {
|
||||
let mut json = serde_json::to_value(T::input_schema(format))?;
|
||||
crate::tool_schema::adapt_schema_to_format(&mut json, format)?;
|
||||
language_model::tool_schema::adapt_schema_to_format(&mut json, format)?;
|
||||
Ok(json)
|
||||
}
|
||||
|
||||
|
||||
@@ -165,7 +165,7 @@ impl AnyAgentTool for ContextServerTool {
|
||||
format: language_model::LanguageModelToolSchemaFormat,
|
||||
) -> Result<serde_json::Value> {
|
||||
let mut schema = self.tool.input_schema.clone();
|
||||
crate::tool_schema::adapt_schema_to_format(&mut schema, format)?;
|
||||
language_model::tool_schema::adapt_schema_to_format(&mut schema, format)?;
|
||||
Ok(match schema {
|
||||
serde_json::Value::Null => {
|
||||
serde_json::json!({ "type": "object", "properties": [] })
|
||||
|
||||
@@ -562,7 +562,6 @@ fn resolve_path(
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{ContextServerRegistry, Templates};
|
||||
use client::TelemetrySettings;
|
||||
use fs::Fs;
|
||||
use gpui::{TestAppContext, UpdateGlobal};
|
||||
use language_model::fake_provider::FakeLanguageModel;
|
||||
@@ -1753,10 +1752,6 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
TelemetrySettings::register(cx);
|
||||
agent_settings::AgentSettings::register(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -246,8 +246,6 @@ mod test {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -778,8 +778,6 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -223,8 +223,6 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -163,8 +163,6 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -509,8 +509,6 @@ mod test {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -21,7 +21,6 @@ acp_tools.workspace = true
|
||||
acp_thread.workspace = true
|
||||
action_log.workspace = true
|
||||
agent-client-protocol.workspace = true
|
||||
agent_settings.workspace = true
|
||||
anyhow.workspace = true
|
||||
async-trait.workspace = true
|
||||
client.workspace = true
|
||||
@@ -33,7 +32,6 @@ gpui.workspace = true
|
||||
gpui_tokio = { workspace = true, optional = true }
|
||||
http_client.workspace = true
|
||||
indoc.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
language_models.workspace = true
|
||||
log.workspace = true
|
||||
|
||||
@@ -29,6 +29,7 @@ pub struct UnsupportedVersion;
|
||||
|
||||
pub struct AcpConnection {
|
||||
server_name: SharedString,
|
||||
telemetry_id: &'static str,
|
||||
connection: Rc<acp::ClientSideConnection>,
|
||||
sessions: Rc<RefCell<HashMap<acp::SessionId, AcpSession>>>,
|
||||
auth_methods: Vec<acp::AuthMethod>,
|
||||
@@ -52,6 +53,7 @@ pub struct AcpSession {
|
||||
|
||||
pub async fn connect(
|
||||
server_name: SharedString,
|
||||
telemetry_id: &'static str,
|
||||
command: AgentServerCommand,
|
||||
root_dir: &Path,
|
||||
default_mode: Option<acp::SessionModeId>,
|
||||
@@ -60,6 +62,7 @@ pub async fn connect(
|
||||
) -> Result<Rc<dyn AgentConnection>> {
|
||||
let conn = AcpConnection::stdio(
|
||||
server_name,
|
||||
telemetry_id,
|
||||
command.clone(),
|
||||
root_dir,
|
||||
default_mode,
|
||||
@@ -75,6 +78,7 @@ const MINIMUM_SUPPORTED_VERSION: acp::ProtocolVersion = acp::V1;
|
||||
impl AcpConnection {
|
||||
pub async fn stdio(
|
||||
server_name: SharedString,
|
||||
telemetry_id: &'static str,
|
||||
command: AgentServerCommand,
|
||||
root_dir: &Path,
|
||||
default_mode: Option<acp::SessionModeId>,
|
||||
@@ -132,7 +136,7 @@ impl AcpConnection {
|
||||
while let Ok(n) = stderr.read_line(&mut line).await
|
||||
&& n > 0
|
||||
{
|
||||
log::warn!("agent stderr: {}", &line);
|
||||
log::warn!("agent stderr: {}", line.trim());
|
||||
line.clear();
|
||||
}
|
||||
Ok(())
|
||||
@@ -199,6 +203,7 @@ impl AcpConnection {
|
||||
root_dir: root_dir.to_owned(),
|
||||
connection,
|
||||
server_name,
|
||||
telemetry_id,
|
||||
sessions,
|
||||
agent_capabilities: response.agent_capabilities,
|
||||
default_mode,
|
||||
@@ -226,6 +231,10 @@ impl Drop for AcpConnection {
|
||||
}
|
||||
|
||||
impl AgentConnection for AcpConnection {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
self.telemetry_id
|
||||
}
|
||||
|
||||
fn new_thread(
|
||||
self: Rc<Self>,
|
||||
project: Entity<Project>,
|
||||
|
||||
@@ -62,6 +62,7 @@ impl AgentServer for ClaudeCode {
|
||||
cx: &mut App,
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let telemetry_id = self.telemetry_id();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let store = delegate.store.downgrade();
|
||||
@@ -85,6 +86,7 @@ impl AgentServer for ClaudeCode {
|
||||
.await?;
|
||||
let connection = crate::acp::connect(
|
||||
name,
|
||||
telemetry_id,
|
||||
command,
|
||||
root_dir.as_ref(),
|
||||
default_mode,
|
||||
|
||||
@@ -63,6 +63,7 @@ impl AgentServer for Codex {
|
||||
cx: &mut App,
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let telemetry_id = self.telemetry_id();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let store = delegate.store.downgrade();
|
||||
@@ -87,6 +88,7 @@ impl AgentServer for Codex {
|
||||
|
||||
let connection = crate::acp::connect(
|
||||
name,
|
||||
telemetry_id,
|
||||
command,
|
||||
root_dir.as_ref(),
|
||||
default_mode,
|
||||
|
||||
@@ -50,14 +50,13 @@ impl crate::AgentServer for CustomAgentServer {
|
||||
fn set_default_mode(&self, mode_id: Option<acp::SessionModeId>, fs: Arc<dyn Fs>, cx: &mut App) {
|
||||
let name = self.name();
|
||||
update_settings_file(fs, cx, move |settings, _| {
|
||||
if let Some(settings) = settings
|
||||
settings
|
||||
.agent_servers
|
||||
.get_or_insert_default()
|
||||
.custom
|
||||
.get_mut(&name)
|
||||
{
|
||||
settings.default_mode = mode_id.map(|m| m.to_string())
|
||||
}
|
||||
.unwrap()
|
||||
.default_mode = mode_id.map(|m| m.to_string())
|
||||
});
|
||||
}
|
||||
|
||||
@@ -68,6 +67,7 @@ impl crate::AgentServer for CustomAgentServer {
|
||||
cx: &mut App,
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let telemetry_id = self.telemetry_id();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let default_mode = self.default_mode(cx);
|
||||
@@ -93,6 +93,7 @@ impl crate::AgentServer for CustomAgentServer {
|
||||
.await?;
|
||||
let connection = crate::acp::connect(
|
||||
name,
|
||||
telemetry_id,
|
||||
command,
|
||||
root_dir.as_ref(),
|
||||
default_mode,
|
||||
|
||||
@@ -6,7 +6,9 @@ use gpui::{AppContext, Entity, TestAppContext};
|
||||
use indoc::indoc;
|
||||
#[cfg(test)]
|
||||
use project::agent_server_store::BuiltinAgentServerSettings;
|
||||
use project::{FakeFs, Project, agent_server_store::AllAgentServersSettings};
|
||||
use project::{FakeFs, Project};
|
||||
#[cfg(test)]
|
||||
use settings::Settings;
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
@@ -452,29 +454,22 @@ pub use common_e2e_tests;
|
||||
// Helpers
|
||||
|
||||
pub async fn init_test(cx: &mut TestAppContext) -> Arc<FakeFs> {
|
||||
use settings::Settings;
|
||||
|
||||
env_logger::try_init().ok();
|
||||
|
||||
cx.update(|cx| {
|
||||
let settings_store = settings::SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
Project::init_settings(cx);
|
||||
language::init(cx);
|
||||
gpui_tokio::init(cx);
|
||||
let http_client = reqwest_client::ReqwestClient::user_agent("agent tests").unwrap();
|
||||
cx.set_http_client(Arc::new(http_client));
|
||||
client::init_settings(cx);
|
||||
let client = client::Client::production(cx);
|
||||
let user_store = cx.new(|cx| client::UserStore::new(client.clone(), cx));
|
||||
language_model::init(client.clone(), cx);
|
||||
language_models::init(user_store, client, cx);
|
||||
agent_settings::init(cx);
|
||||
AllAgentServersSettings::register(cx);
|
||||
|
||||
#[cfg(test)]
|
||||
AllAgentServersSettings::override_global(
|
||||
AllAgentServersSettings {
|
||||
project::agent_server_store::AllAgentServersSettings::override_global(
|
||||
project::agent_server_store::AllAgentServersSettings {
|
||||
claude: Some(BuiltinAgentServerSettings {
|
||||
path: Some("claude-code-acp".into()),
|
||||
args: None,
|
||||
|
||||
@@ -31,6 +31,7 @@ impl AgentServer for Gemini {
|
||||
cx: &mut App,
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let telemetry_id = self.telemetry_id();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let store = delegate.store.downgrade();
|
||||
@@ -64,6 +65,7 @@ impl AgentServer for Gemini {
|
||||
|
||||
let connection = crate::acp::connect(
|
||||
name,
|
||||
telemetry_id,
|
||||
command,
|
||||
root_dir.as_ref(),
|
||||
default_mode,
|
||||
|
||||
@@ -6,8 +6,8 @@ use convert_case::{Case, Casing as _};
|
||||
use fs::Fs;
|
||||
use gpui::{App, SharedString};
|
||||
use settings::{
|
||||
AgentProfileContent, ContextServerPresetContent, Settings as _, SettingsContent,
|
||||
update_settings_file,
|
||||
AgentProfileContent, ContextServerPresetContent, LanguageModelSelection, Settings as _,
|
||||
SettingsContent, update_settings_file,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
|
||||
@@ -53,19 +53,30 @@ impl AgentProfile {
|
||||
let base_profile =
|
||||
base_profile_id.and_then(|id| AgentSettings::get_global(cx).profiles.get(&id).cloned());
|
||||
|
||||
// Copy toggles from the base profile so the new profile starts with familiar defaults.
|
||||
let tools = base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.tools.clone())
|
||||
.unwrap_or_default();
|
||||
let enable_all_context_servers = base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.enable_all_context_servers)
|
||||
.unwrap_or_default();
|
||||
let context_servers = base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.context_servers.clone())
|
||||
.unwrap_or_default();
|
||||
// Preserve the base profile's model preference when cloning into a new profile.
|
||||
let default_model = base_profile
|
||||
.as_ref()
|
||||
.and_then(|profile| profile.default_model.clone());
|
||||
|
||||
let profile_settings = AgentProfileSettings {
|
||||
name: name.into(),
|
||||
tools: base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.tools.clone())
|
||||
.unwrap_or_default(),
|
||||
enable_all_context_servers: base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.enable_all_context_servers)
|
||||
.unwrap_or_default(),
|
||||
context_servers: base_profile
|
||||
.map(|profile| profile.context_servers)
|
||||
.unwrap_or_default(),
|
||||
tools,
|
||||
enable_all_context_servers,
|
||||
context_servers,
|
||||
default_model,
|
||||
};
|
||||
|
||||
update_settings_file(fs, cx, {
|
||||
@@ -96,6 +107,8 @@ pub struct AgentProfileSettings {
|
||||
pub tools: IndexMap<Arc<str>, bool>,
|
||||
pub enable_all_context_servers: bool,
|
||||
pub context_servers: IndexMap<Arc<str>, ContextServerPreset>,
|
||||
/// Default language model to apply when this profile becomes active.
|
||||
pub default_model: Option<LanguageModelSelection>,
|
||||
}
|
||||
|
||||
impl AgentProfileSettings {
|
||||
@@ -144,6 +157,7 @@ impl AgentProfileSettings {
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
default_model: self.default_model.clone(),
|
||||
},
|
||||
);
|
||||
|
||||
@@ -153,15 +167,23 @@ impl AgentProfileSettings {
|
||||
|
||||
impl From<AgentProfileContent> for AgentProfileSettings {
|
||||
fn from(content: AgentProfileContent) -> Self {
|
||||
let AgentProfileContent {
|
||||
name,
|
||||
tools,
|
||||
enable_all_context_servers,
|
||||
context_servers,
|
||||
default_model,
|
||||
} = content;
|
||||
|
||||
Self {
|
||||
name: content.name.into(),
|
||||
tools: content.tools,
|
||||
enable_all_context_servers: content.enable_all_context_servers.unwrap_or_default(),
|
||||
context_servers: content
|
||||
.context_servers
|
||||
name: name.into(),
|
||||
tools,
|
||||
enable_all_context_servers: enable_all_context_servers.unwrap_or_default(),
|
||||
context_servers: context_servers
|
||||
.into_iter()
|
||||
.map(|(server_id, preset)| (server_id, preset.into()))
|
||||
.collect(),
|
||||
default_model,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,7 +10,7 @@ use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{
|
||||
DefaultAgentView, DockPosition, LanguageModelParameters, LanguageModelSelection,
|
||||
NotifyWhenAgentWaiting, Settings,
|
||||
NotifyWhenAgentWaiting, RegisterSetting, Settings,
|
||||
};
|
||||
|
||||
pub use crate::agent_profile::*;
|
||||
@@ -19,11 +19,7 @@ pub const SUMMARIZE_THREAD_PROMPT: &str = include_str!("prompts/summarize_thread
|
||||
pub const SUMMARIZE_THREAD_DETAILED_PROMPT: &str =
|
||||
include_str!("prompts/summarize_thread_detailed_prompt.txt");
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
AgentSettings::register(cx);
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, Debug, RegisterSetting)]
|
||||
pub struct AgentSettings {
|
||||
pub enabled: bool,
|
||||
pub button: bool,
|
||||
|
||||
@@ -401,10 +401,9 @@ mod tests {
|
||||
use acp_thread::{AgentConnection, StubAgentConnection};
|
||||
use agent::HistoryStore;
|
||||
use agent_client_protocol as acp;
|
||||
use agent_settings::AgentSettings;
|
||||
use assistant_text_thread::TextThreadStore;
|
||||
use buffer_diff::{DiffHunkStatus, DiffHunkStatusKind};
|
||||
use editor::{EditorSettings, RowInfo};
|
||||
use editor::RowInfo;
|
||||
use fs::FakeFs;
|
||||
use gpui::{AppContext as _, SemanticVersion, TestAppContext};
|
||||
|
||||
@@ -413,7 +412,7 @@ mod tests {
|
||||
use pretty_assertions::assert_matches;
|
||||
use project::Project;
|
||||
use serde_json::json;
|
||||
use settings::{Settings as _, SettingsStore};
|
||||
use settings::SettingsStore;
|
||||
use util::path;
|
||||
use workspace::Workspace;
|
||||
|
||||
@@ -539,13 +538,8 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
AgentSettings::register(cx);
|
||||
workspace::init_settings(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
EditorSettings::register(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -714,7 +714,10 @@ impl MessageEditor {
|
||||
let mut all_tracked_buffers = Vec::new();
|
||||
|
||||
let result = editor.update(cx, |editor, cx| {
|
||||
let mut ix = text.chars().position(|c| !c.is_whitespace()).unwrap_or(0);
|
||||
let (mut ix, _) = text
|
||||
.char_indices()
|
||||
.find(|(_, c)| !c.is_whitespace())
|
||||
.unwrap_or((0, '\0'));
|
||||
let mut chunks: Vec<acp::ContentBlock> = Vec::new();
|
||||
let text = editor.text(cx);
|
||||
editor.display_map.update(cx, |map, cx| {
|
||||
@@ -1192,6 +1195,17 @@ impl MessageEditor {
|
||||
self.editor.read(cx).text(cx)
|
||||
}
|
||||
|
||||
pub fn set_placeholder_text(
|
||||
&mut self,
|
||||
placeholder: &str,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
editor.set_placeholder_text(placeholder, window, cx);
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn set_text(&mut self, text: &str, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
@@ -1896,10 +1910,8 @@ mod tests {
|
||||
let app_state = cx.update(AppState::test);
|
||||
|
||||
cx.update(|cx| {
|
||||
language::init(cx);
|
||||
editor::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
@@ -2072,10 +2084,8 @@ mod tests {
|
||||
let app_state = cx.update(AppState::test);
|
||||
|
||||
cx.update(|cx| {
|
||||
language::init(cx);
|
||||
editor::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
|
||||
app_state
|
||||
@@ -2833,7 +2843,7 @@ mod tests {
|
||||
cx.run_until_parked();
|
||||
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
editor.set_text(" hello world ", window, cx);
|
||||
editor.set_text(" \u{A0}してhello world ", window, cx);
|
||||
});
|
||||
|
||||
let (content, _) = message_editor
|
||||
@@ -2844,7 +2854,7 @@ mod tests {
|
||||
assert_eq!(
|
||||
content,
|
||||
vec![acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "hello world".into(),
|
||||
text: "してhello world".into(),
|
||||
annotations: None,
|
||||
meta: None
|
||||
})]
|
||||
@@ -2999,10 +3009,8 @@ mod tests {
|
||||
let app_state = cx.update(AppState::test);
|
||||
|
||||
cx.update(|cx| {
|
||||
language::init(cx);
|
||||
editor::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
|
||||
app_state
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use acp_thread::AgentModelSelector;
|
||||
use acp_thread::{AgentModelInfo, AgentModelSelector};
|
||||
use gpui::{Entity, FocusHandle};
|
||||
use picker::popover_menu::PickerPopoverMenu;
|
||||
use ui::{
|
||||
@@ -36,12 +36,8 @@ impl AcpModelSelectorPopover {
|
||||
self.menu_handle.toggle(window, cx);
|
||||
}
|
||||
|
||||
pub fn active_model_name(&self, cx: &App) -> Option<SharedString> {
|
||||
self.selector
|
||||
.read(cx)
|
||||
.delegate
|
||||
.active_model()
|
||||
.map(|model| model.name.clone())
|
||||
pub fn active_model<'a>(&self, cx: &'a App) -> Option<&'a AgentModelInfo> {
|
||||
self.selector.read(cx).delegate.active_model()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -457,25 +457,23 @@ impl Render for AcpThreadHistory {
|
||||
.on_action(cx.listener(Self::select_last))
|
||||
.on_action(cx.listener(Self::confirm))
|
||||
.on_action(cx.listener(Self::remove_selected_thread))
|
||||
.when(!self.history_store.read(cx).is_empty(cx), |parent| {
|
||||
parent.child(
|
||||
h_flex()
|
||||
.h(px(41.)) // Match the toolbar perfectly
|
||||
.w_full()
|
||||
.py_1()
|
||||
.px_2()
|
||||
.gap_2()
|
||||
.justify_between()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(
|
||||
Icon::new(IconName::MagnifyingGlass)
|
||||
.color(Color::Muted)
|
||||
.size(IconSize::Small),
|
||||
)
|
||||
.child(self.search_editor.clone()),
|
||||
)
|
||||
})
|
||||
.child(
|
||||
h_flex()
|
||||
.h(px(41.)) // Match the toolbar perfectly
|
||||
.w_full()
|
||||
.py_1()
|
||||
.px_2()
|
||||
.gap_2()
|
||||
.justify_between()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(
|
||||
Icon::new(IconName::MagnifyingGlass)
|
||||
.color(Color::Muted)
|
||||
.size(IconSize::Small),
|
||||
)
|
||||
.child(self.search_editor.clone()),
|
||||
)
|
||||
.child({
|
||||
let view = v_flex()
|
||||
.id("list-container")
|
||||
@@ -484,19 +482,15 @@ impl Render for AcpThreadHistory {
|
||||
.flex_grow();
|
||||
|
||||
if self.history_store.read(cx).is_empty(cx) {
|
||||
view.justify_center()
|
||||
.child(
|
||||
h_flex().w_full().justify_center().child(
|
||||
Label::new("You don't have any past threads yet.")
|
||||
.size(LabelSize::Small),
|
||||
),
|
||||
)
|
||||
} else if self.search_produced_no_matches() {
|
||||
view.justify_center().child(
|
||||
h_flex().w_full().justify_center().child(
|
||||
Label::new("No threads match your search.").size(LabelSize::Small),
|
||||
),
|
||||
view.justify_center().items_center().child(
|
||||
Label::new("You don't have any past threads yet.")
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
} else if self.search_produced_no_matches() {
|
||||
view.justify_center()
|
||||
.items_center()
|
||||
.child(Label::new("No threads match your search.").size(LabelSize::Small))
|
||||
} else {
|
||||
view.child(
|
||||
uniform_list(
|
||||
@@ -673,7 +667,7 @@ impl EntryTimeFormat {
|
||||
timezone,
|
||||
time_format::TimestampFormat::EnhancedAbsolute,
|
||||
),
|
||||
EntryTimeFormat::TimeOnly => time_format::format_time(timestamp),
|
||||
EntryTimeFormat::TimeOnly => time_format::format_time(timestamp.to_offset(timezone)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,12 +4,12 @@ use acp_thread::{
|
||||
ToolCallStatus, UserMessageId,
|
||||
};
|
||||
use acp_thread::{AgentConnection, Plan};
|
||||
use action_log::ActionLog;
|
||||
use action_log::{ActionLog, ActionLogTelemetry};
|
||||
use agent::{DbThreadMetadata, HistoryEntry, HistoryEntryId, HistoryStore, NativeAgentServer};
|
||||
use agent_client_protocol::{self as acp, PromptCapabilities};
|
||||
use agent_servers::{AgentServer, AgentServerDelegate};
|
||||
use agent_settings::{AgentProfileId, AgentSettings, CompletionMode};
|
||||
use anyhow::{Result, anyhow, bail};
|
||||
use anyhow::{Result, anyhow};
|
||||
use arrayvec::ArrayVec;
|
||||
use audio::{Audio, Sound};
|
||||
use buffer_diff::BufferDiff;
|
||||
@@ -125,8 +125,9 @@ impl ProfileProvider for Entity<agent::Thread> {
|
||||
}
|
||||
|
||||
fn set_profile(&self, profile_id: AgentProfileId, cx: &mut App) {
|
||||
self.update(cx, |thread, _cx| {
|
||||
thread.set_profile(profile_id);
|
||||
self.update(cx, |thread, cx| {
|
||||
// Apply the profile and let the thread swap to its default model.
|
||||
thread.set_profile(profile_id, cx);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -169,7 +170,7 @@ impl ThreadFeedbackState {
|
||||
}
|
||||
}
|
||||
let session_id = thread.read(cx).session_id().clone();
|
||||
let agent_name = telemetry.agent_name();
|
||||
let agent = thread.read(cx).connection().telemetry_id();
|
||||
let task = telemetry.thread_data(&session_id, cx);
|
||||
let rating = match feedback {
|
||||
ThreadFeedback::Positive => "positive",
|
||||
@@ -179,9 +180,9 @@ impl ThreadFeedbackState {
|
||||
let thread = task.await?;
|
||||
telemetry::event!(
|
||||
"Agent Thread Rated",
|
||||
agent = agent,
|
||||
session_id = session_id,
|
||||
rating = rating,
|
||||
agent = agent_name,
|
||||
thread = thread
|
||||
);
|
||||
anyhow::Ok(())
|
||||
@@ -206,15 +207,15 @@ impl ThreadFeedbackState {
|
||||
self.comments_editor.take();
|
||||
|
||||
let session_id = thread.read(cx).session_id().clone();
|
||||
let agent_name = telemetry.agent_name();
|
||||
let agent = thread.read(cx).connection().telemetry_id();
|
||||
let task = telemetry.thread_data(&session_id, cx);
|
||||
cx.background_spawn(async move {
|
||||
let thread = task.await?;
|
||||
telemetry::event!(
|
||||
"Agent Thread Feedback Comments",
|
||||
agent = agent,
|
||||
session_id = session_id,
|
||||
comments = comments,
|
||||
agent = agent_name,
|
||||
thread = thread
|
||||
);
|
||||
anyhow::Ok(())
|
||||
@@ -336,19 +337,7 @@ impl AcpThreadView {
|
||||
let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default()));
|
||||
let available_commands = Rc::new(RefCell::new(vec![]));
|
||||
|
||||
let placeholder = if agent.name() == "Zed Agent" {
|
||||
format!("Message the {} — @ to include context", agent.name())
|
||||
} else if agent.name() == "Claude Code"
|
||||
|| agent.name() == "Codex"
|
||||
|| !available_commands.borrow().is_empty()
|
||||
{
|
||||
format!(
|
||||
"Message {} — @ to include context, / for commands",
|
||||
agent.name()
|
||||
)
|
||||
} else {
|
||||
format!("Message {} — @ to include context", agent.name())
|
||||
};
|
||||
let placeholder = placeholder_text(agent.name().as_ref(), false);
|
||||
|
||||
let message_editor = cx.new(|cx| {
|
||||
let mut editor = MessageEditor::new(
|
||||
@@ -538,14 +527,7 @@ impl AcpThreadView {
|
||||
})
|
||||
.log_err()
|
||||
} else {
|
||||
let root_dir = if let Some(acp_agent) = connection
|
||||
.clone()
|
||||
.downcast::<agent_servers::AcpConnection>()
|
||||
{
|
||||
acp_agent.root_dir().into()
|
||||
} else {
|
||||
root_dir.unwrap_or(paths::home_dir().as_path().into())
|
||||
};
|
||||
let root_dir = root_dir.unwrap_or(paths::home_dir().as_path().into());
|
||||
cx.update(|_, cx| {
|
||||
connection
|
||||
.clone()
|
||||
@@ -1130,8 +1112,6 @@ impl AcpThreadView {
|
||||
message_editor.contents(full_mention_content, cx)
|
||||
});
|
||||
|
||||
let agent_telemetry_id = self.agent.telemetry_id();
|
||||
|
||||
self.thread_error.take();
|
||||
self.editing_message.take();
|
||||
self.thread_feedback.clear();
|
||||
@@ -1139,6 +1119,8 @@ impl AcpThreadView {
|
||||
let Some(thread) = self.thread() else {
|
||||
return;
|
||||
};
|
||||
let agent_telemetry_id = self.agent.telemetry_id();
|
||||
let session_id = thread.read(cx).session_id().clone();
|
||||
let thread = thread.downgrade();
|
||||
if self.should_be_following {
|
||||
self.workspace
|
||||
@@ -1149,6 +1131,7 @@ impl AcpThreadView {
|
||||
}
|
||||
|
||||
self.is_loading_contents = true;
|
||||
let model_id = self.current_model_id(cx);
|
||||
let guard = cx.new(|_| ());
|
||||
cx.observe_release(&guard, |this, _guard, cx| {
|
||||
this.is_loading_contents = false;
|
||||
@@ -1170,6 +1153,7 @@ impl AcpThreadView {
|
||||
message_editor.clear(window, cx);
|
||||
});
|
||||
})?;
|
||||
let turn_start_time = Instant::now();
|
||||
let send = thread.update(cx, |thread, cx| {
|
||||
thread.action_log().update(cx, |action_log, cx| {
|
||||
for buffer in tracked_buffers {
|
||||
@@ -1178,11 +1162,27 @@ impl AcpThreadView {
|
||||
});
|
||||
drop(guard);
|
||||
|
||||
telemetry::event!("Agent Message Sent", agent = agent_telemetry_id);
|
||||
telemetry::event!(
|
||||
"Agent Message Sent",
|
||||
agent = agent_telemetry_id,
|
||||
session = session_id,
|
||||
model = model_id
|
||||
);
|
||||
|
||||
thread.send(contents, cx)
|
||||
})?;
|
||||
send.await
|
||||
let res = send.await;
|
||||
let turn_time_ms = turn_start_time.elapsed().as_millis();
|
||||
let status = if res.is_ok() { "success" } else { "failure" };
|
||||
telemetry::event!(
|
||||
"Agent Turn Completed",
|
||||
agent = agent_telemetry_id,
|
||||
session = session_id,
|
||||
model = model_id,
|
||||
status,
|
||||
turn_time_ms,
|
||||
);
|
||||
res
|
||||
});
|
||||
|
||||
cx.spawn(async move |this, cx| {
|
||||
@@ -1384,7 +1384,7 @@ impl AcpThreadView {
|
||||
AcpThreadEvent::Refusal => {
|
||||
self.thread_retry_status.take();
|
||||
self.thread_error = Some(ThreadError::Refusal);
|
||||
let model_or_agent_name = self.get_current_model_name(cx);
|
||||
let model_or_agent_name = self.current_model_name(cx);
|
||||
let notification_message =
|
||||
format!("{} refused to respond to this request", model_or_agent_name);
|
||||
self.notify_with_sound(¬ification_message, IconName::Warning, window, cx);
|
||||
@@ -1444,7 +1444,14 @@ impl AcpThreadView {
|
||||
});
|
||||
}
|
||||
|
||||
let has_commands = !available_commands.is_empty();
|
||||
self.available_commands.replace(available_commands);
|
||||
|
||||
let new_placeholder = placeholder_text(self.agent.name().as_ref(), has_commands);
|
||||
|
||||
self.message_editor.update(cx, |editor, cx| {
|
||||
editor.set_placeholder_text(&new_placeholder, window, cx);
|
||||
});
|
||||
}
|
||||
AcpThreadEvent::ModeUpdated(_mode) => {
|
||||
// The connection keeps track of the mode
|
||||
@@ -1853,6 +1860,14 @@ impl AcpThreadView {
|
||||
let Some(thread) = self.thread() else {
|
||||
return;
|
||||
};
|
||||
|
||||
telemetry::event!(
|
||||
"Agent Tool Call Authorized",
|
||||
agent = self.agent.telemetry_id(),
|
||||
session = thread.read(cx).session_id(),
|
||||
option = option_kind
|
||||
);
|
||||
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.authorize_tool_call(tool_call_id, option_id, option_kind, cx);
|
||||
});
|
||||
@@ -3585,6 +3600,7 @@ impl AcpThreadView {
|
||||
) -> Option<AnyElement> {
|
||||
let thread = thread_entity.read(cx);
|
||||
let action_log = thread.action_log();
|
||||
let telemetry = ActionLogTelemetry::from(thread);
|
||||
let changed_buffers = action_log.read(cx).changed_buffers(cx);
|
||||
let plan = thread.plan();
|
||||
|
||||
@@ -3632,6 +3648,7 @@ impl AcpThreadView {
|
||||
.when(self.edits_expanded, |parent| {
|
||||
parent.child(self.render_edited_files(
|
||||
action_log,
|
||||
telemetry,
|
||||
&changed_buffers,
|
||||
pending_edits,
|
||||
cx,
|
||||
@@ -3912,6 +3929,7 @@ impl AcpThreadView {
|
||||
fn render_edited_files(
|
||||
&self,
|
||||
action_log: &Entity<ActionLog>,
|
||||
telemetry: ActionLogTelemetry,
|
||||
changed_buffers: &BTreeMap<Entity<Buffer>, Entity<BufferDiff>>,
|
||||
pending_edits: bool,
|
||||
cx: &Context<Self>,
|
||||
@@ -4031,12 +4049,14 @@ impl AcpThreadView {
|
||||
.on_click({
|
||||
let buffer = buffer.clone();
|
||||
let action_log = action_log.clone();
|
||||
let telemetry = telemetry.clone();
|
||||
move |_, _, cx| {
|
||||
action_log.update(cx, |action_log, cx| {
|
||||
action_log
|
||||
.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Anchor::MIN..Anchor::MAX],
|
||||
Some(telemetry.clone()),
|
||||
cx,
|
||||
)
|
||||
.detach_and_log_err(cx);
|
||||
@@ -4051,11 +4071,13 @@ impl AcpThreadView {
|
||||
.on_click({
|
||||
let buffer = buffer.clone();
|
||||
let action_log = action_log.clone();
|
||||
let telemetry = telemetry.clone();
|
||||
move |_, _, cx| {
|
||||
action_log.update(cx, |action_log, cx| {
|
||||
action_log.keep_edits_in_range(
|
||||
buffer.clone(),
|
||||
Anchor::MIN..Anchor::MAX,
|
||||
Some(telemetry.clone()),
|
||||
cx,
|
||||
);
|
||||
})
|
||||
@@ -4271,17 +4293,23 @@ impl AcpThreadView {
|
||||
let Some(thread) = self.thread() else {
|
||||
return;
|
||||
};
|
||||
let telemetry = ActionLogTelemetry::from(thread.read(cx));
|
||||
let action_log = thread.read(cx).action_log().clone();
|
||||
action_log.update(cx, |action_log, cx| action_log.keep_all_edits(cx));
|
||||
action_log.update(cx, |action_log, cx| {
|
||||
action_log.keep_all_edits(Some(telemetry), cx)
|
||||
});
|
||||
}
|
||||
|
||||
fn reject_all(&mut self, _: &RejectAll, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
let Some(thread) = self.thread() else {
|
||||
return;
|
||||
};
|
||||
let telemetry = ActionLogTelemetry::from(thread.read(cx));
|
||||
let action_log = thread.read(cx).action_log().clone();
|
||||
action_log
|
||||
.update(cx, |action_log, cx| action_log.reject_all_edits(cx))
|
||||
.update(cx, |action_log, cx| {
|
||||
action_log.reject_all_edits(Some(telemetry), cx)
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
@@ -4677,35 +4705,36 @@ impl AcpThreadView {
|
||||
.languages
|
||||
.language_for_name("Markdown");
|
||||
|
||||
let (thread_summary, markdown) = if let Some(thread) = self.thread() {
|
||||
let (thread_title, markdown) = if let Some(thread) = self.thread() {
|
||||
let thread = thread.read(cx);
|
||||
(thread.title().to_string(), thread.to_markdown(cx))
|
||||
} else {
|
||||
return Task::ready(Ok(()));
|
||||
};
|
||||
|
||||
let project = workspace.read(cx).project().clone();
|
||||
window.spawn(cx, async move |cx| {
|
||||
let markdown_language = markdown_language_task.await?;
|
||||
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.create_buffer(false, cx))?
|
||||
.await?;
|
||||
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.set_text(markdown, cx);
|
||||
buffer.set_language(Some(markdown_language), cx);
|
||||
buffer.set_capability(language::Capability::ReadOnly, cx);
|
||||
})?;
|
||||
|
||||
workspace.update_in(cx, |workspace, window, cx| {
|
||||
let project = workspace.project().clone();
|
||||
|
||||
if !project.read(cx).is_local() {
|
||||
bail!("failed to open active thread as markdown in remote project");
|
||||
}
|
||||
|
||||
let buffer = project.update(cx, |project, cx| {
|
||||
project.create_local_buffer(&markdown, Some(markdown_language), true, cx)
|
||||
});
|
||||
let buffer = cx.new(|cx| {
|
||||
MultiBuffer::singleton(buffer, cx).with_title(thread_summary.clone())
|
||||
});
|
||||
let buffer = cx
|
||||
.new(|cx| MultiBuffer::singleton(buffer, cx).with_title(thread_title.clone()));
|
||||
|
||||
workspace.add_item_to_active_pane(
|
||||
Box::new(cx.new(|cx| {
|
||||
let mut editor =
|
||||
Editor::for_multibuffer(buffer, Some(project.clone()), window, cx);
|
||||
editor.set_breadcrumb_header(thread_summary);
|
||||
editor.set_breadcrumb_header(thread_title);
|
||||
editor
|
||||
})),
|
||||
None,
|
||||
@@ -4713,9 +4742,7 @@ impl AcpThreadView {
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
|
||||
anyhow::Ok(())
|
||||
})??;
|
||||
})?;
|
||||
anyhow::Ok(())
|
||||
})
|
||||
}
|
||||
@@ -5341,20 +5368,21 @@ impl AcpThreadView {
|
||||
)
|
||||
}
|
||||
|
||||
fn get_current_model_name(&self, cx: &App) -> SharedString {
|
||||
fn current_model_id(&self, cx: &App) -> Option<String> {
|
||||
self.model_selector
|
||||
.as_ref()
|
||||
.and_then(|selector| selector.read(cx).active_model(cx).map(|m| m.id.to_string()))
|
||||
}
|
||||
|
||||
fn current_model_name(&self, cx: &App) -> SharedString {
|
||||
// For native agent (Zed Agent), use the specific model name (e.g., "Claude 3.5 Sonnet")
|
||||
// For ACP agents, use the agent name (e.g., "Claude Code", "Gemini CLI")
|
||||
// This provides better clarity about what refused the request
|
||||
if self
|
||||
.agent
|
||||
.clone()
|
||||
.downcast::<agent::NativeAgentServer>()
|
||||
.is_some()
|
||||
{
|
||||
// Native agent - use the model name
|
||||
if self.as_native_connection(cx).is_some() {
|
||||
self.model_selector
|
||||
.as_ref()
|
||||
.and_then(|selector| selector.read(cx).active_model_name(cx))
|
||||
.and_then(|selector| selector.read(cx).active_model(cx))
|
||||
.map(|model| model.name.clone())
|
||||
.unwrap_or_else(|| SharedString::from("The model"))
|
||||
} else {
|
||||
// ACP agent - use the agent name (e.g., "Claude Code", "Gemini CLI")
|
||||
@@ -5363,7 +5391,7 @@ impl AcpThreadView {
|
||||
}
|
||||
|
||||
fn render_refusal_error(&self, cx: &mut Context<'_, Self>) -> Callout {
|
||||
let model_or_agent_name = self.get_current_model_name(cx);
|
||||
let model_or_agent_name = self.current_model_name(cx);
|
||||
let refusal_message = format!(
|
||||
"{} refused to respond to this prompt. This can happen when a model believes the prompt violates its content policy or safety guidelines, so rephrasing it can sometimes address the issue.",
|
||||
model_or_agent_name
|
||||
@@ -5675,6 +5703,19 @@ fn loading_contents_spinner(size: IconSize) -> AnyElement {
|
||||
.into_any_element()
|
||||
}
|
||||
|
||||
fn placeholder_text(agent_name: &str, has_commands: bool) -> String {
|
||||
if agent_name == "Zed Agent" {
|
||||
format!("Message the {} — @ to include context", agent_name)
|
||||
} else if has_commands {
|
||||
format!(
|
||||
"Message {} — @ to include context, / for commands",
|
||||
agent_name
|
||||
)
|
||||
} else {
|
||||
format!("Message {} — @ to include context", agent_name)
|
||||
}
|
||||
}
|
||||
|
||||
impl Focusable for AcpThreadView {
|
||||
fn focus_handle(&self, cx: &App) -> FocusHandle {
|
||||
match self.thread_state {
|
||||
@@ -5961,7 +6002,6 @@ pub(crate) mod tests {
|
||||
use acp_thread::StubAgentConnection;
|
||||
use agent_client_protocol::SessionId;
|
||||
use assistant_text_thread::TextThreadStore;
|
||||
use editor::EditorSettings;
|
||||
use fs::FakeFs;
|
||||
use gpui::{EventEmitter, SemanticVersion, TestAppContext, VisualTestContext};
|
||||
use project::Project;
|
||||
@@ -6349,6 +6389,10 @@ pub(crate) mod tests {
|
||||
struct SaboteurAgentConnection;
|
||||
|
||||
impl AgentConnection for SaboteurAgentConnection {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
"saboteur"
|
||||
}
|
||||
|
||||
fn new_thread(
|
||||
self: Rc<Self>,
|
||||
project: Entity<Project>,
|
||||
@@ -6409,6 +6453,10 @@ pub(crate) mod tests {
|
||||
struct RefusalAgentConnection;
|
||||
|
||||
impl AgentConnection for RefusalAgentConnection {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
"refusal"
|
||||
}
|
||||
|
||||
fn new_thread(
|
||||
self: Rc<Self>,
|
||||
project: Entity<Project>,
|
||||
@@ -6471,13 +6519,8 @@ pub(crate) mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
AgentSettings::register(cx);
|
||||
workspace::init_settings(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
EditorSettings::register(cx);
|
||||
prompt_store::init(cx)
|
||||
});
|
||||
}
|
||||
|
||||
@@ -638,15 +638,13 @@ impl AgentConfiguration {
|
||||
|
||||
let is_running = matches!(server_status, ContextServerStatus::Running);
|
||||
let item_id = SharedString::from(context_server_id.0.clone());
|
||||
let is_from_extension = server_configuration
|
||||
.as_ref()
|
||||
.map(|config| {
|
||||
matches!(
|
||||
config.as_ref(),
|
||||
ContextServerConfiguration::Extension { .. }
|
||||
)
|
||||
})
|
||||
.unwrap_or(false);
|
||||
// Servers without a configuration can only be provided by extensions.
|
||||
let provided_by_extension = server_configuration.is_none_or(|config| {
|
||||
matches!(
|
||||
config.as_ref(),
|
||||
ContextServerConfiguration::Extension { .. }
|
||||
)
|
||||
});
|
||||
|
||||
let error = if let ContextServerStatus::Error(error) = server_status.clone() {
|
||||
Some(error)
|
||||
@@ -660,7 +658,7 @@ impl AgentConfiguration {
|
||||
.tools_for_server(&context_server_id)
|
||||
.count();
|
||||
|
||||
let (source_icon, source_tooltip) = if is_from_extension {
|
||||
let (source_icon, source_tooltip) = if provided_by_extension {
|
||||
(
|
||||
IconName::ZedSrcExtension,
|
||||
"This MCP server was installed from an extension.",
|
||||
@@ -710,7 +708,6 @@ impl AgentConfiguration {
|
||||
let fs = self.fs.clone();
|
||||
let context_server_id = context_server_id.clone();
|
||||
let language_registry = self.language_registry.clone();
|
||||
let context_server_store = self.context_server_store.clone();
|
||||
let workspace = self.workspace.clone();
|
||||
let context_server_registry = self.context_server_registry.clone();
|
||||
|
||||
@@ -752,23 +749,10 @@ impl AgentConfiguration {
|
||||
.entry("Uninstall", None, {
|
||||
let fs = fs.clone();
|
||||
let context_server_id = context_server_id.clone();
|
||||
let context_server_store = context_server_store.clone();
|
||||
let workspace = workspace.clone();
|
||||
move |_, cx| {
|
||||
let is_provided_by_extension = context_server_store
|
||||
.read(cx)
|
||||
.configuration_for_server(&context_server_id)
|
||||
.as_ref()
|
||||
.map(|config| {
|
||||
matches!(
|
||||
config.as_ref(),
|
||||
ContextServerConfiguration::Extension { .. }
|
||||
)
|
||||
})
|
||||
.unwrap_or(false);
|
||||
|
||||
let uninstall_extension_task = match (
|
||||
is_provided_by_extension,
|
||||
provided_by_extension,
|
||||
resolve_extension_for_context_server(&context_server_id, cx),
|
||||
) {
|
||||
(true, Some((id, manifest))) => {
|
||||
@@ -1047,7 +1031,7 @@ impl AgentConfiguration {
|
||||
AgentIcon::Name(icon_name) => Icon::new(icon_name)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
AgentIcon::Path(icon_path) => Icon::from_path(icon_path)
|
||||
AgentIcon::Path(icon_path) => Icon::from_external_svg(icon_path)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
};
|
||||
|
||||
@@ -515,16 +515,14 @@ impl Render for AddLlmProviderModal {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use editor::EditorSettings;
|
||||
use fs::FakeFs;
|
||||
use gpui::{TestAppContext, VisualTestContext};
|
||||
use language::language_settings;
|
||||
use language_model::{
|
||||
LanguageModelProviderId, LanguageModelProviderName,
|
||||
fake_provider::FakeLanguageModelProvider,
|
||||
};
|
||||
use project::Project;
|
||||
use settings::{Settings as _, SettingsStore};
|
||||
use settings::SettingsStore;
|
||||
use util::path;
|
||||
|
||||
#[gpui::test]
|
||||
@@ -730,13 +728,9 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
workspace::init_settings(cx);
|
||||
Project::init_settings(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
language_settings::init(cx);
|
||||
EditorSettings::register(cx);
|
||||
|
||||
language_model::init_settings(cx);
|
||||
language_models::init_settings(cx);
|
||||
});
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
|
||||
@@ -7,8 +7,10 @@ use agent_settings::{AgentProfile, AgentProfileId, AgentSettings, builtin_profil
|
||||
use editor::Editor;
|
||||
use fs::Fs;
|
||||
use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription, prelude::*};
|
||||
use language_model::LanguageModel;
|
||||
use settings::Settings as _;
|
||||
use language_model::{LanguageModel, LanguageModelRegistry};
|
||||
use settings::{
|
||||
LanguageModelProviderSetting, LanguageModelSelection, Settings as _, update_settings_file,
|
||||
};
|
||||
use ui::{
|
||||
KeyBinding, ListItem, ListItemSpacing, ListSeparator, Navigable, NavigableEntry, prelude::*,
|
||||
};
|
||||
@@ -16,6 +18,7 @@ use workspace::{ModalView, Workspace};
|
||||
|
||||
use crate::agent_configuration::manage_profiles_modal::profile_modal_header::ProfileModalHeader;
|
||||
use crate::agent_configuration::tool_picker::{ToolPicker, ToolPickerDelegate};
|
||||
use crate::language_model_selector::{LanguageModelSelector, language_model_selector};
|
||||
use crate::{AgentPanel, ManageProfiles};
|
||||
|
||||
enum Mode {
|
||||
@@ -32,6 +35,11 @@ enum Mode {
|
||||
tool_picker: Entity<ToolPicker>,
|
||||
_subscription: Subscription,
|
||||
},
|
||||
ConfigureDefaultModel {
|
||||
profile_id: AgentProfileId,
|
||||
model_picker: Entity<LanguageModelSelector>,
|
||||
_subscription: Subscription,
|
||||
},
|
||||
}
|
||||
|
||||
impl Mode {
|
||||
@@ -83,6 +91,7 @@ pub struct ChooseProfileMode {
|
||||
pub struct ViewProfileMode {
|
||||
profile_id: AgentProfileId,
|
||||
fork_profile: NavigableEntry,
|
||||
configure_default_model: NavigableEntry,
|
||||
configure_tools: NavigableEntry,
|
||||
configure_mcps: NavigableEntry,
|
||||
cancel_item: NavigableEntry,
|
||||
@@ -180,6 +189,7 @@ impl ManageProfilesModal {
|
||||
self.mode = Mode::ViewProfile(ViewProfileMode {
|
||||
profile_id,
|
||||
fork_profile: NavigableEntry::focusable(cx),
|
||||
configure_default_model: NavigableEntry::focusable(cx),
|
||||
configure_tools: NavigableEntry::focusable(cx),
|
||||
configure_mcps: NavigableEntry::focusable(cx),
|
||||
cancel_item: NavigableEntry::focusable(cx),
|
||||
@@ -187,6 +197,83 @@ impl ManageProfilesModal {
|
||||
self.focus_handle(cx).focus(window);
|
||||
}
|
||||
|
||||
fn configure_default_model(
|
||||
&mut self,
|
||||
profile_id: AgentProfileId,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let fs = self.fs.clone();
|
||||
let profile_id_for_closure = profile_id.clone();
|
||||
|
||||
let model_picker = cx.new(|cx| {
|
||||
let fs = fs.clone();
|
||||
let profile_id = profile_id_for_closure.clone();
|
||||
|
||||
language_model_selector(
|
||||
{
|
||||
let profile_id = profile_id.clone();
|
||||
move |cx| {
|
||||
let settings = AgentSettings::get_global(cx);
|
||||
|
||||
settings
|
||||
.profiles
|
||||
.get(&profile_id)
|
||||
.and_then(|profile| profile.default_model.as_ref())
|
||||
.and_then(|selection| {
|
||||
let registry = LanguageModelRegistry::read_global(cx);
|
||||
let provider_id = language_model::LanguageModelProviderId(
|
||||
gpui::SharedString::from(selection.provider.0.clone()),
|
||||
);
|
||||
let provider = registry.provider(&provider_id)?;
|
||||
let model = provider
|
||||
.provided_models(cx)
|
||||
.iter()
|
||||
.find(|m| m.id().0 == selection.model.as_str())?
|
||||
.clone();
|
||||
Some(language_model::ConfiguredModel { provider, model })
|
||||
})
|
||||
}
|
||||
},
|
||||
move |model, cx| {
|
||||
let provider = model.provider_id().0.to_string();
|
||||
let model_id = model.id().0.to_string();
|
||||
let profile_id = profile_id.clone();
|
||||
|
||||
update_settings_file(fs.clone(), cx, move |settings, _cx| {
|
||||
let agent_settings = settings.agent.get_or_insert_default();
|
||||
if let Some(profiles) = agent_settings.profiles.as_mut() {
|
||||
if let Some(profile) = profiles.get_mut(profile_id.0.as_ref()) {
|
||||
profile.default_model = Some(LanguageModelSelection {
|
||||
provider: LanguageModelProviderSetting(provider.clone()),
|
||||
model: model_id.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
},
|
||||
false, // Do not use popover styles for the model picker
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.modal(false)
|
||||
});
|
||||
|
||||
let dismiss_subscription = cx.subscribe_in(&model_picker, window, {
|
||||
let profile_id = profile_id.clone();
|
||||
move |this, _picker, _: &DismissEvent, window, cx| {
|
||||
this.view_profile(profile_id.clone(), window, cx);
|
||||
}
|
||||
});
|
||||
|
||||
self.mode = Mode::ConfigureDefaultModel {
|
||||
profile_id,
|
||||
model_picker,
|
||||
_subscription: dismiss_subscription,
|
||||
};
|
||||
self.focus_handle(cx).focus(window);
|
||||
}
|
||||
|
||||
fn configure_mcp_tools(
|
||||
&mut self,
|
||||
profile_id: AgentProfileId,
|
||||
@@ -277,6 +364,7 @@ impl ManageProfilesModal {
|
||||
Mode::ViewProfile(_) => {}
|
||||
Mode::ConfigureTools { .. } => {}
|
||||
Mode::ConfigureMcps { .. } => {}
|
||||
Mode::ConfigureDefaultModel { .. } => {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -299,6 +387,9 @@ impl ManageProfilesModal {
|
||||
Mode::ConfigureMcps { profile_id, .. } => {
|
||||
self.view_profile(profile_id.clone(), window, cx)
|
||||
}
|
||||
Mode::ConfigureDefaultModel { profile_id, .. } => {
|
||||
self.view_profile(profile_id.clone(), window, cx)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -313,6 +404,7 @@ impl Focusable for ManageProfilesModal {
|
||||
Mode::ViewProfile(_) => self.focus_handle.clone(),
|
||||
Mode::ConfigureTools { tool_picker, .. } => tool_picker.focus_handle(cx),
|
||||
Mode::ConfigureMcps { tool_picker, .. } => tool_picker.focus_handle(cx),
|
||||
Mode::ConfigureDefaultModel { model_picker, .. } => model_picker.focus_handle(cx),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -544,6 +636,47 @@ impl ManageProfilesModal {
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.id("configure-default-model")
|
||||
.track_focus(&mode.configure_default_model.focus_handle)
|
||||
.on_action({
|
||||
let profile_id = mode.profile_id.clone();
|
||||
cx.listener(move |this, _: &menu::Confirm, window, cx| {
|
||||
this.configure_default_model(
|
||||
profile_id.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
})
|
||||
.child(
|
||||
ListItem::new("model-item")
|
||||
.toggle_state(
|
||||
mode.configure_default_model
|
||||
.focus_handle
|
||||
.contains_focused(window, cx),
|
||||
)
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.start_slot(
|
||||
Icon::new(IconName::ZedAssistant)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(Label::new("Configure Default Model"))
|
||||
.on_click({
|
||||
let profile_id = mode.profile_id.clone();
|
||||
cx.listener(move |this, _, window, cx| {
|
||||
this.configure_default_model(
|
||||
profile_id.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.id("configure-builtin-tools")
|
||||
@@ -668,6 +801,7 @@ impl ManageProfilesModal {
|
||||
.into_any_element(),
|
||||
)
|
||||
.entry(mode.fork_profile)
|
||||
.entry(mode.configure_default_model)
|
||||
.entry(mode.configure_tools)
|
||||
.entry(mode.configure_mcps)
|
||||
.entry(mode.cancel_item)
|
||||
@@ -753,6 +887,29 @@ impl Render for ManageProfilesModal {
|
||||
.child(go_back_item)
|
||||
.into_any_element()
|
||||
}
|
||||
Mode::ConfigureDefaultModel {
|
||||
profile_id,
|
||||
model_picker,
|
||||
..
|
||||
} => {
|
||||
let profile_name = settings
|
||||
.profiles
|
||||
.get(profile_id)
|
||||
.map(|profile| profile.name.clone())
|
||||
.unwrap_or_else(|| "Unknown".into());
|
||||
|
||||
v_flex()
|
||||
.pb_1()
|
||||
.child(ProfileModalHeader::new(
|
||||
format!("{profile_name} — Configure Default Model"),
|
||||
Some(IconName::Ai),
|
||||
))
|
||||
.child(ListSeparator)
|
||||
.child(v_flex().w(rems(34.)).child(model_picker.clone()))
|
||||
.child(ListSeparator)
|
||||
.child(go_back_item)
|
||||
.into_any_element()
|
||||
}
|
||||
Mode::ConfigureMcps {
|
||||
profile_id,
|
||||
tool_picker,
|
||||
|
||||
@@ -314,6 +314,7 @@ impl PickerDelegate for ToolPickerDelegate {
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
default_model: default_profile.default_model.clone(),
|
||||
});
|
||||
|
||||
if let Some(server_id) = server_id {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::{Keep, KeepAll, OpenAgentDiff, Reject, RejectAll};
|
||||
use acp_thread::{AcpThread, AcpThreadEvent};
|
||||
use action_log::ActionLog;
|
||||
use action_log::ActionLogTelemetry;
|
||||
use agent_settings::AgentSettings;
|
||||
use anyhow::Result;
|
||||
use buffer_diff::DiffHunkStatus;
|
||||
@@ -40,79 +40,16 @@ use zed_actions::assistant::ToggleFocus;
|
||||
pub struct AgentDiffPane {
|
||||
multibuffer: Entity<MultiBuffer>,
|
||||
editor: Entity<Editor>,
|
||||
thread: AgentDiffThread,
|
||||
thread: Entity<AcpThread>,
|
||||
focus_handle: FocusHandle,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
title: SharedString,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone)]
|
||||
pub enum AgentDiffThread {
|
||||
AcpThread(Entity<AcpThread>),
|
||||
}
|
||||
|
||||
impl AgentDiffThread {
|
||||
fn project(&self, cx: &App) -> Entity<Project> {
|
||||
match self {
|
||||
AgentDiffThread::AcpThread(thread) => thread.read(cx).project().clone(),
|
||||
}
|
||||
}
|
||||
fn action_log(&self, cx: &App) -> Entity<ActionLog> {
|
||||
match self {
|
||||
AgentDiffThread::AcpThread(thread) => thread.read(cx).action_log().clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn title(&self, cx: &App) -> SharedString {
|
||||
match self {
|
||||
AgentDiffThread::AcpThread(thread) => thread.read(cx).title(),
|
||||
}
|
||||
}
|
||||
|
||||
fn has_pending_edit_tool_uses(&self, cx: &App) -> bool {
|
||||
match self {
|
||||
AgentDiffThread::AcpThread(thread) => thread.read(cx).has_pending_edit_tool_calls(),
|
||||
}
|
||||
}
|
||||
|
||||
fn downgrade(&self) -> WeakAgentDiffThread {
|
||||
match self {
|
||||
AgentDiffThread::AcpThread(thread) => {
|
||||
WeakAgentDiffThread::AcpThread(thread.downgrade())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Entity<AcpThread>> for AgentDiffThread {
|
||||
fn from(entity: Entity<AcpThread>) -> Self {
|
||||
AgentDiffThread::AcpThread(entity)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone)]
|
||||
pub enum WeakAgentDiffThread {
|
||||
AcpThread(WeakEntity<AcpThread>),
|
||||
}
|
||||
|
||||
impl WeakAgentDiffThread {
|
||||
pub fn upgrade(&self) -> Option<AgentDiffThread> {
|
||||
match self {
|
||||
WeakAgentDiffThread::AcpThread(weak) => weak.upgrade().map(AgentDiffThread::AcpThread),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<WeakEntity<AcpThread>> for WeakAgentDiffThread {
|
||||
fn from(entity: WeakEntity<AcpThread>) -> Self {
|
||||
WeakAgentDiffThread::AcpThread(entity)
|
||||
}
|
||||
}
|
||||
|
||||
impl AgentDiffPane {
|
||||
pub fn deploy(
|
||||
thread: impl Into<AgentDiffThread>,
|
||||
thread: Entity<AcpThread>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
@@ -123,12 +60,11 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
pub fn deploy_in_workspace(
|
||||
thread: impl Into<AgentDiffThread>,
|
||||
thread: Entity<AcpThread>,
|
||||
workspace: &mut Workspace,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Workspace>,
|
||||
) -> Entity<Self> {
|
||||
let thread = thread.into();
|
||||
let existing_diff = workspace
|
||||
.items_of_type::<AgentDiffPane>(cx)
|
||||
.find(|diff| diff.read(cx).thread == thread);
|
||||
@@ -145,7 +81,7 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
thread: AgentDiffThread,
|
||||
thread: Entity<AcpThread>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
@@ -153,7 +89,7 @@ impl AgentDiffPane {
|
||||
let focus_handle = cx.focus_handle();
|
||||
let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
|
||||
|
||||
let project = thread.project(cx);
|
||||
let project = thread.read(cx).project().clone();
|
||||
let editor = cx.new(|cx| {
|
||||
let mut editor =
|
||||
Editor::for_multibuffer(multibuffer.clone(), Some(project.clone()), window, cx);
|
||||
@@ -164,19 +100,16 @@ impl AgentDiffPane {
|
||||
editor
|
||||
});
|
||||
|
||||
let action_log = thread.action_log(cx);
|
||||
let action_log = thread.read(cx).action_log().clone();
|
||||
|
||||
let mut this = Self {
|
||||
_subscriptions: vec![
|
||||
cx.observe_in(&action_log, window, |this, _action_log, window, cx| {
|
||||
this.update_excerpts(window, cx)
|
||||
}),
|
||||
match &thread {
|
||||
AgentDiffThread::AcpThread(thread) => cx
|
||||
.subscribe(thread, |this, _thread, event, cx| {
|
||||
this.handle_acp_thread_event(event, cx)
|
||||
}),
|
||||
},
|
||||
cx.subscribe(&thread, |this, _thread, event, cx| {
|
||||
this.handle_acp_thread_event(event, cx)
|
||||
}),
|
||||
],
|
||||
title: SharedString::default(),
|
||||
multibuffer,
|
||||
@@ -191,7 +124,12 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
fn update_excerpts(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
let changed_buffers = self.thread.action_log(cx).read(cx).changed_buffers(cx);
|
||||
let changed_buffers = self
|
||||
.thread
|
||||
.read(cx)
|
||||
.action_log()
|
||||
.read(cx)
|
||||
.changed_buffers(cx);
|
||||
let mut paths_to_delete = self.multibuffer.read(cx).paths().collect::<HashSet<_>>();
|
||||
|
||||
for (buffer, diff_handle) in changed_buffers {
|
||||
@@ -278,7 +216,7 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
fn update_title(&mut self, cx: &mut Context<Self>) {
|
||||
let new_title = self.thread.title(cx);
|
||||
let new_title = self.thread.read(cx).title();
|
||||
if new_title != self.title {
|
||||
self.title = new_title;
|
||||
cx.emit(EditorEvent::TitleChanged);
|
||||
@@ -340,16 +278,18 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
fn keep_all(&mut self, _: &KeepAll, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.thread
|
||||
.action_log(cx)
|
||||
.update(cx, |action_log, cx| action_log.keep_all_edits(cx))
|
||||
let telemetry = ActionLogTelemetry::from(self.thread.read(cx));
|
||||
let action_log = self.thread.read(cx).action_log().clone();
|
||||
action_log.update(cx, |action_log, cx| {
|
||||
action_log.keep_all_edits(Some(telemetry), cx)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn keep_edits_in_selection(
|
||||
editor: &mut Editor,
|
||||
buffer_snapshot: &MultiBufferSnapshot,
|
||||
thread: &AgentDiffThread,
|
||||
thread: &Entity<AcpThread>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
@@ -364,7 +304,7 @@ fn keep_edits_in_selection(
|
||||
fn reject_edits_in_selection(
|
||||
editor: &mut Editor,
|
||||
buffer_snapshot: &MultiBufferSnapshot,
|
||||
thread: &AgentDiffThread,
|
||||
thread: &Entity<AcpThread>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
@@ -378,7 +318,7 @@ fn reject_edits_in_selection(
|
||||
fn keep_edits_in_ranges(
|
||||
editor: &mut Editor,
|
||||
buffer_snapshot: &MultiBufferSnapshot,
|
||||
thread: &AgentDiffThread,
|
||||
thread: &Entity<AcpThread>,
|
||||
ranges: Vec<Range<editor::Anchor>>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
@@ -393,8 +333,15 @@ fn keep_edits_in_ranges(
|
||||
for hunk in &diff_hunks_in_ranges {
|
||||
let buffer = multibuffer.read(cx).buffer(hunk.buffer_id);
|
||||
if let Some(buffer) = buffer {
|
||||
thread.action_log(cx).update(cx, |action_log, cx| {
|
||||
action_log.keep_edits_in_range(buffer, hunk.buffer_range.clone(), cx)
|
||||
let action_log = thread.read(cx).action_log().clone();
|
||||
let telemetry = ActionLogTelemetry::from(thread.read(cx));
|
||||
action_log.update(cx, |action_log, cx| {
|
||||
action_log.keep_edits_in_range(
|
||||
buffer,
|
||||
hunk.buffer_range.clone(),
|
||||
Some(telemetry),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -403,7 +350,7 @@ fn keep_edits_in_ranges(
|
||||
fn reject_edits_in_ranges(
|
||||
editor: &mut Editor,
|
||||
buffer_snapshot: &MultiBufferSnapshot,
|
||||
thread: &AgentDiffThread,
|
||||
thread: &Entity<AcpThread>,
|
||||
ranges: Vec<Range<editor::Anchor>>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
@@ -427,11 +374,12 @@ fn reject_edits_in_ranges(
|
||||
}
|
||||
}
|
||||
|
||||
let action_log = thread.read(cx).action_log().clone();
|
||||
let telemetry = ActionLogTelemetry::from(thread.read(cx));
|
||||
for (buffer, ranges) in ranges_by_buffer {
|
||||
thread
|
||||
.action_log(cx)
|
||||
action_log
|
||||
.update(cx, |action_log, cx| {
|
||||
action_log.reject_edits_in_ranges(buffer, ranges, cx)
|
||||
action_log.reject_edits_in_ranges(buffer, ranges, Some(telemetry.clone()), cx)
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
@@ -531,7 +479,7 @@ impl Item for AgentDiffPane {
|
||||
}
|
||||
|
||||
fn tab_content(&self, params: TabContentParams, _window: &Window, cx: &App) -> AnyElement {
|
||||
let title = self.thread.title(cx);
|
||||
let title = self.thread.read(cx).title();
|
||||
Label::new(format!("Review: {}", title))
|
||||
.color(if params.selected {
|
||||
Color::Default
|
||||
@@ -712,7 +660,7 @@ impl Render for AgentDiffPane {
|
||||
}
|
||||
}
|
||||
|
||||
fn diff_hunk_controls(thread: &AgentDiffThread) -> editor::RenderDiffHunkControlsFn {
|
||||
fn diff_hunk_controls(thread: &Entity<AcpThread>) -> editor::RenderDiffHunkControlsFn {
|
||||
let thread = thread.clone();
|
||||
|
||||
Arc::new(
|
||||
@@ -739,7 +687,7 @@ fn render_diff_hunk_controls(
|
||||
hunk_range: Range<editor::Anchor>,
|
||||
is_created_file: bool,
|
||||
line_height: Pixels,
|
||||
thread: &AgentDiffThread,
|
||||
thread: &Entity<AcpThread>,
|
||||
editor: &Entity<Editor>,
|
||||
cx: &mut App,
|
||||
) -> AnyElement {
|
||||
@@ -1153,8 +1101,11 @@ impl Render for AgentDiffToolbar {
|
||||
return Empty.into_any();
|
||||
};
|
||||
|
||||
let has_pending_edit_tool_use =
|
||||
agent_diff.read(cx).thread.has_pending_edit_tool_uses(cx);
|
||||
let has_pending_edit_tool_use = agent_diff
|
||||
.read(cx)
|
||||
.thread
|
||||
.read(cx)
|
||||
.has_pending_edit_tool_calls();
|
||||
|
||||
if has_pending_edit_tool_use {
|
||||
return div().px_2().child(spinner_icon).into_any();
|
||||
@@ -1214,7 +1165,7 @@ pub enum EditorState {
|
||||
}
|
||||
|
||||
struct WorkspaceThread {
|
||||
thread: WeakAgentDiffThread,
|
||||
thread: WeakEntity<AcpThread>,
|
||||
_thread_subscriptions: (Subscription, Subscription),
|
||||
singleton_editors: HashMap<WeakEntity<Buffer>, HashMap<WeakEntity<Editor>, Subscription>>,
|
||||
_settings_subscription: Subscription,
|
||||
@@ -1239,23 +1190,23 @@ impl AgentDiff {
|
||||
|
||||
pub fn set_active_thread(
|
||||
workspace: &WeakEntity<Workspace>,
|
||||
thread: impl Into<AgentDiffThread>,
|
||||
thread: Entity<AcpThread>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) {
|
||||
Self::global(cx).update(cx, |this, cx| {
|
||||
this.register_active_thread_impl(workspace, thread.into(), window, cx);
|
||||
this.register_active_thread_impl(workspace, thread, window, cx);
|
||||
});
|
||||
}
|
||||
|
||||
fn register_active_thread_impl(
|
||||
&mut self,
|
||||
workspace: &WeakEntity<Workspace>,
|
||||
thread: AgentDiffThread,
|
||||
thread: Entity<AcpThread>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let action_log = thread.action_log(cx);
|
||||
let action_log = thread.read(cx).action_log().clone();
|
||||
|
||||
let action_log_subscription = cx.observe_in(&action_log, window, {
|
||||
let workspace = workspace.clone();
|
||||
@@ -1264,14 +1215,12 @@ impl AgentDiff {
|
||||
}
|
||||
});
|
||||
|
||||
let thread_subscription = match &thread {
|
||||
AgentDiffThread::AcpThread(thread) => cx.subscribe_in(thread, window, {
|
||||
let workspace = workspace.clone();
|
||||
move |this, thread, event, window, cx| {
|
||||
this.handle_acp_thread_event(&workspace, thread, event, window, cx)
|
||||
}
|
||||
}),
|
||||
};
|
||||
let thread_subscription = cx.subscribe_in(&thread, window, {
|
||||
let workspace = workspace.clone();
|
||||
move |this, thread, event, window, cx| {
|
||||
this.handle_acp_thread_event(&workspace, thread, event, window, cx)
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(workspace_thread) = self.workspace_threads.get_mut(workspace) {
|
||||
// replace thread and action log subscription, but keep editors
|
||||
@@ -1348,7 +1297,7 @@ impl AgentDiff {
|
||||
|
||||
fn register_review_action<T: Action>(
|
||||
workspace: &mut Workspace,
|
||||
review: impl Fn(&Entity<Editor>, &AgentDiffThread, &mut Window, &mut App) -> PostReviewState
|
||||
review: impl Fn(&Entity<Editor>, &Entity<AcpThread>, &mut Window, &mut App) -> PostReviewState
|
||||
+ 'static,
|
||||
this: &Entity<AgentDiff>,
|
||||
) {
|
||||
@@ -1508,7 +1457,7 @@ impl AgentDiff {
|
||||
return;
|
||||
};
|
||||
|
||||
let action_log = thread.action_log(cx);
|
||||
let action_log = thread.read(cx).action_log();
|
||||
let changed_buffers = action_log.read(cx).changed_buffers(cx);
|
||||
|
||||
let mut unaffected = self.reviewing_editors.clone();
|
||||
@@ -1627,7 +1576,7 @@ impl AgentDiff {
|
||||
|
||||
fn keep_all(
|
||||
editor: &Entity<Editor>,
|
||||
thread: &AgentDiffThread,
|
||||
thread: &Entity<AcpThread>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> PostReviewState {
|
||||
@@ -1647,7 +1596,7 @@ impl AgentDiff {
|
||||
|
||||
fn reject_all(
|
||||
editor: &Entity<Editor>,
|
||||
thread: &AgentDiffThread,
|
||||
thread: &Entity<AcpThread>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> PostReviewState {
|
||||
@@ -1667,7 +1616,7 @@ impl AgentDiff {
|
||||
|
||||
fn keep(
|
||||
editor: &Entity<Editor>,
|
||||
thread: &AgentDiffThread,
|
||||
thread: &Entity<AcpThread>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> PostReviewState {
|
||||
@@ -1680,7 +1629,7 @@ impl AgentDiff {
|
||||
|
||||
fn reject(
|
||||
editor: &Entity<Editor>,
|
||||
thread: &AgentDiffThread,
|
||||
thread: &Entity<AcpThread>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> PostReviewState {
|
||||
@@ -1703,7 +1652,7 @@ impl AgentDiff {
|
||||
fn review_in_active_editor(
|
||||
&mut self,
|
||||
workspace: &mut Workspace,
|
||||
review: impl Fn(&Entity<Editor>, &AgentDiffThread, &mut Window, &mut App) -> PostReviewState,
|
||||
review: impl Fn(&Entity<Editor>, &Entity<AcpThread>, &mut Window, &mut App) -> PostReviewState,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Task<Result<()>>> {
|
||||
@@ -1725,7 +1674,7 @@ impl AgentDiff {
|
||||
if let PostReviewState::AllReviewed = review(&editor, &thread, window, cx)
|
||||
&& let Some(curr_buffer) = editor.read(cx).buffer().read(cx).as_singleton()
|
||||
{
|
||||
let changed_buffers = thread.action_log(cx).read(cx).changed_buffers(cx);
|
||||
let changed_buffers = thread.read(cx).action_log().read(cx).changed_buffers(cx);
|
||||
|
||||
let mut keys = changed_buffers.keys().cycle();
|
||||
keys.find(|k| *k == &curr_buffer);
|
||||
@@ -1768,12 +1717,11 @@ mod tests {
|
||||
use super::*;
|
||||
use crate::Keep;
|
||||
use acp_thread::AgentConnection as _;
|
||||
use agent_settings::AgentSettings;
|
||||
use editor::EditorSettings;
|
||||
use gpui::{TestAppContext, UpdateGlobal, VisualTestContext};
|
||||
use project::{FakeFs, Project};
|
||||
use serde_json::json;
|
||||
use settings::{Settings, SettingsStore};
|
||||
use settings::SettingsStore;
|
||||
use std::{path::Path, rc::Rc};
|
||||
use util::path;
|
||||
|
||||
@@ -1782,13 +1730,8 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
AgentSettings::register(cx);
|
||||
prompt_store::init(cx);
|
||||
workspace::init_settings(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
EditorSettings::register(cx);
|
||||
language_model::init_settings(cx);
|
||||
});
|
||||
|
||||
@@ -1815,8 +1758,7 @@ mod tests {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let thread = AgentDiffThread::AcpThread(thread);
|
||||
let action_log = cx.read(|cx| thread.action_log(cx));
|
||||
let action_log = cx.read(|cx| thread.read(cx).action_log().clone());
|
||||
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
@@ -1942,13 +1884,8 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
AgentSettings::register(cx);
|
||||
prompt_store::init(cx);
|
||||
workspace::init_settings(cx);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
EditorSettings::register(cx);
|
||||
language_model::init_settings(cx);
|
||||
workspace::register_project_item::<Editor>(cx);
|
||||
});
|
||||
@@ -2004,7 +1941,6 @@ mod tests {
|
||||
let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
|
||||
|
||||
// Set the active thread
|
||||
let thread = AgentDiffThread::AcpThread(thread);
|
||||
cx.update(|window, cx| {
|
||||
AgentDiff::set_active_thread(&workspace.downgrade(), thread.clone(), window, cx)
|
||||
});
|
||||
|
||||
@@ -47,6 +47,7 @@ impl AgentModelSelector {
|
||||
}
|
||||
}
|
||||
},
|
||||
true, // Use popover styles for picker
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
|
||||
@@ -16,7 +16,7 @@ use serde::{Deserialize, Serialize};
|
||||
use settings::{
|
||||
DefaultAgentView as DefaultView, LanguageModelProviderSetting, LanguageModelSelection,
|
||||
};
|
||||
use zed_actions::OpenBrowser;
|
||||
|
||||
use zed_actions::agent::{OpenClaudeCodeOnboardingModal, ReauthenticateAgent};
|
||||
|
||||
use crate::ui::{AcpOnboardingModal, ClaudeCodeOnboardingModal};
|
||||
@@ -2131,12 +2131,20 @@ impl AgentPanel {
|
||||
menu
|
||||
})
|
||||
.separator()
|
||||
.link(
|
||||
"Add Other Agents",
|
||||
OpenBrowser {
|
||||
url: zed_urls::external_agents_docs(cx),
|
||||
}
|
||||
.boxed_clone(),
|
||||
.item(
|
||||
ContextMenuEntry::new("Add More Agents")
|
||||
.icon(IconName::Plus)
|
||||
.icon_color(Color::Muted)
|
||||
.handler({
|
||||
move |window, cx| {
|
||||
window.dispatch_action(Box::new(zed_actions::Extensions {
|
||||
category_filter: Some(
|
||||
zed_actions::ExtensionCategoryFilter::AgentServers,
|
||||
),
|
||||
id: None,
|
||||
}), cx)
|
||||
}
|
||||
}),
|
||||
)
|
||||
}))
|
||||
}
|
||||
|
||||
@@ -12,7 +12,6 @@ mod context_strip;
|
||||
mod inline_assistant;
|
||||
mod inline_prompt_editor;
|
||||
mod language_model_selector;
|
||||
mod message_editor;
|
||||
mod profile_selector;
|
||||
mod slash_command;
|
||||
mod slash_command_picker;
|
||||
@@ -248,8 +247,6 @@ pub fn init(
|
||||
is_eval: bool,
|
||||
cx: &mut App,
|
||||
) {
|
||||
AgentSettings::register(cx);
|
||||
|
||||
assistant_text_thread::init(client.clone(), cx);
|
||||
rules_library::init(cx);
|
||||
if !is_eval {
|
||||
|
||||
@@ -1082,10 +1082,7 @@ mod tests {
|
||||
};
|
||||
use gpui::TestAppContext;
|
||||
use indoc::indoc;
|
||||
use language::{
|
||||
Buffer, Language, LanguageConfig, LanguageMatcher, Point, language_settings,
|
||||
tree_sitter_rust,
|
||||
};
|
||||
use language::{Buffer, Language, LanguageConfig, LanguageMatcher, Point, tree_sitter_rust};
|
||||
use language_model::{LanguageModelRegistry, TokenUsage};
|
||||
use rand::prelude::*;
|
||||
use settings::SettingsStore;
|
||||
@@ -1465,8 +1462,6 @@ mod tests {
|
||||
fn init_test(cx: &mut TestAppContext) {
|
||||
cx.update(LanguageModelRegistry::test);
|
||||
cx.set_global(cx.update(SettingsStore::test));
|
||||
cx.update(Project::init_settings);
|
||||
cx.update(language_settings::init);
|
||||
}
|
||||
|
||||
fn simulate_response_stream(
|
||||
|
||||
@@ -1075,8 +1075,6 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -42,7 +42,7 @@ use super::{
|
||||
ContextPickerAction, ContextPickerEntry, ContextPickerMode, MentionLink, RecentEntry,
|
||||
available_context_picker_entries, recent_context_picker_entries_with_store, selection_ranges,
|
||||
};
|
||||
use crate::message_editor::ContextCreasesAddon;
|
||||
use crate::inline_prompt_editor::ContextCreasesAddon;
|
||||
|
||||
pub(crate) enum Match {
|
||||
File(FileMatch),
|
||||
@@ -1182,10 +1182,8 @@ mod tests {
|
||||
let app_state = cx.update(AppState::test);
|
||||
|
||||
cx.update(|cx| {
|
||||
language::init(cx);
|
||||
editor::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
|
||||
app_state
|
||||
@@ -1486,10 +1484,8 @@ mod tests {
|
||||
let app_state = cx.update(AppState::test);
|
||||
|
||||
cx.update(|cx| {
|
||||
language::init(cx);
|
||||
editor::init(cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
|
||||
app_state
|
||||
@@ -1686,11 +1682,6 @@ mod tests {
|
||||
let store = SettingsStore::test(cx);
|
||||
cx.set_global(store);
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
client::init_settings(cx);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
workspace::init_settings(cx);
|
||||
editor::init_settings(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
use crate::context_store::ContextStore;
|
||||
use agent::HistoryStore;
|
||||
use collections::VecDeque;
|
||||
use collections::{HashMap, VecDeque};
|
||||
use editor::actions::Paste;
|
||||
use editor::display_map::EditorMargins;
|
||||
use editor::display_map::{CreaseId, EditorMargins};
|
||||
use editor::{Addon, AnchorRangeExt as _};
|
||||
use editor::{
|
||||
ContextMenuOptions, Editor, EditorElement, EditorEvent, EditorMode, EditorStyle, MultiBuffer,
|
||||
actions::{MoveDown, MoveUp},
|
||||
@@ -17,6 +17,7 @@ use parking_lot::Mutex;
|
||||
use prompt_store::PromptStore;
|
||||
use settings::Settings;
|
||||
use std::cmp;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use theme::ThemeSettings;
|
||||
@@ -27,12 +28,15 @@ use zed_actions::agent::ToggleModelSelector;
|
||||
|
||||
use crate::agent_model_selector::AgentModelSelector;
|
||||
use crate::buffer_codegen::BufferCodegen;
|
||||
use crate::context_picker::{ContextPicker, ContextPickerCompletionProvider};
|
||||
use crate::context::{AgentContextHandle, AgentContextKey};
|
||||
use crate::context_picker::{ContextPicker, ContextPickerCompletionProvider, crease_for_mention};
|
||||
use crate::context_store::{ContextStore, ContextStoreEvent};
|
||||
use crate::context_strip::{ContextStrip, ContextStripEvent, SuggestContextKind};
|
||||
use crate::message_editor::{ContextCreasesAddon, extract_message_creases, insert_message_creases};
|
||||
use crate::terminal_codegen::TerminalCodegen;
|
||||
use crate::{CycleNextInlineAssist, CyclePreviousInlineAssist, ModelUsageContext};
|
||||
use crate::{RemoveAllContext, ToggleContextPicker};
|
||||
use crate::{
|
||||
CycleNextInlineAssist, CyclePreviousInlineAssist, ModelUsageContext, RemoveAllContext,
|
||||
ToggleContextPicker,
|
||||
};
|
||||
|
||||
pub struct PromptEditor<T> {
|
||||
pub editor: Entity<Editor>,
|
||||
@@ -1157,3 +1161,156 @@ impl GenerationMode {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Stored information that can be used to resurrect a context crease when creating an editor for a past message.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct MessageCrease {
|
||||
pub range: Range<usize>,
|
||||
pub icon_path: SharedString,
|
||||
pub label: SharedString,
|
||||
/// None for a deserialized message, Some otherwise.
|
||||
pub context: Option<AgentContextHandle>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ContextCreasesAddon {
|
||||
creases: HashMap<AgentContextKey, Vec<(CreaseId, SharedString)>>,
|
||||
_subscription: Option<Subscription>,
|
||||
}
|
||||
|
||||
impl Addon for ContextCreasesAddon {
|
||||
fn to_any(&self) -> &dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
|
||||
fn to_any_mut(&mut self) -> Option<&mut dyn std::any::Any> {
|
||||
Some(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl ContextCreasesAddon {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
creases: HashMap::default(),
|
||||
_subscription: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_creases(
|
||||
&mut self,
|
||||
context_store: &Entity<ContextStore>,
|
||||
key: AgentContextKey,
|
||||
creases: impl IntoIterator<Item = (CreaseId, SharedString)>,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
self.creases.entry(key).or_default().extend(creases);
|
||||
self._subscription = Some(
|
||||
cx.subscribe(context_store, |editor, _, event, cx| match event {
|
||||
ContextStoreEvent::ContextRemoved(key) => {
|
||||
let Some(this) = editor.addon_mut::<Self>() else {
|
||||
return;
|
||||
};
|
||||
let (crease_ids, replacement_texts): (Vec<_>, Vec<_>) = this
|
||||
.creases
|
||||
.remove(key)
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.unzip();
|
||||
let ranges = editor
|
||||
.remove_creases(crease_ids, cx)
|
||||
.into_iter()
|
||||
.map(|(_, range)| range)
|
||||
.collect::<Vec<_>>();
|
||||
editor.unfold_ranges(&ranges, false, false, cx);
|
||||
editor.edit(ranges.into_iter().zip(replacement_texts), cx);
|
||||
cx.notify();
|
||||
}
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn into_inner(self) -> HashMap<AgentContextKey, Vec<(CreaseId, SharedString)>> {
|
||||
self.creases
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_message_creases(
|
||||
editor: &mut Editor,
|
||||
cx: &mut Context<'_, Editor>,
|
||||
) -> Vec<MessageCrease> {
|
||||
let buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
let mut contexts_by_crease_id = editor
|
||||
.addon_mut::<ContextCreasesAddon>()
|
||||
.map(std::mem::take)
|
||||
.unwrap_or_default()
|
||||
.into_inner()
|
||||
.into_iter()
|
||||
.flat_map(|(key, creases)| {
|
||||
let context = key.0;
|
||||
creases
|
||||
.into_iter()
|
||||
.map(move |(id, _)| (id, context.clone()))
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
// Filter the addon's list of creases based on what the editor reports,
|
||||
// since the addon might have removed creases in it.
|
||||
|
||||
editor.display_map.update(cx, |display_map, cx| {
|
||||
display_map
|
||||
.snapshot(cx)
|
||||
.crease_snapshot
|
||||
.creases()
|
||||
.filter_map(|(id, crease)| {
|
||||
Some((
|
||||
id,
|
||||
(
|
||||
crease.range().to_offset(&buffer_snapshot),
|
||||
crease.metadata()?.clone(),
|
||||
),
|
||||
))
|
||||
})
|
||||
.map(|(id, (range, metadata))| {
|
||||
let context = contexts_by_crease_id.remove(&id);
|
||||
MessageCrease {
|
||||
range,
|
||||
context,
|
||||
label: metadata.label,
|
||||
icon_path: metadata.icon_path,
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
|
||||
pub fn insert_message_creases(
|
||||
editor: &mut Editor,
|
||||
message_creases: &[MessageCrease],
|
||||
context_store: &Entity<ContextStore>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<'_, Editor>,
|
||||
) {
|
||||
let buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
let creases = message_creases
|
||||
.iter()
|
||||
.map(|crease| {
|
||||
let start = buffer_snapshot.anchor_after(crease.range.start);
|
||||
let end = buffer_snapshot.anchor_before(crease.range.end);
|
||||
crease_for_mention(
|
||||
crease.label.clone(),
|
||||
crease.icon_path.clone(),
|
||||
start..end,
|
||||
cx.weak_entity(),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let ids = editor.insert_creases(creases.clone(), cx);
|
||||
editor.fold_creases(creases, false, window, cx);
|
||||
if let Some(addon) = editor.addon_mut::<ContextCreasesAddon>() {
|
||||
for (crease, id) in message_creases.iter().zip(ids) {
|
||||
if let Some(context) = crease.context.as_ref() {
|
||||
let key = AgentContextKey(context.clone());
|
||||
addon.add_creases(context_store, key, vec![(id, crease.label.clone())], cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,14 +19,26 @@ pub type LanguageModelSelector = Picker<LanguageModelPickerDelegate>;
|
||||
pub fn language_model_selector(
|
||||
get_active_model: impl Fn(&App) -> Option<ConfiguredModel> + 'static,
|
||||
on_model_changed: impl Fn(Arc<dyn LanguageModel>, &mut App) + 'static,
|
||||
popover_styles: bool,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<LanguageModelSelector>,
|
||||
) -> LanguageModelSelector {
|
||||
let delegate = LanguageModelPickerDelegate::new(get_active_model, on_model_changed, window, cx);
|
||||
Picker::list(delegate, window, cx)
|
||||
.show_scrollbar(true)
|
||||
.width(rems(20.))
|
||||
.max_height(Some(rems(20.).into()))
|
||||
let delegate = LanguageModelPickerDelegate::new(
|
||||
get_active_model,
|
||||
on_model_changed,
|
||||
popover_styles,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
|
||||
if popover_styles {
|
||||
Picker::list(delegate, window, cx)
|
||||
.show_scrollbar(true)
|
||||
.width(rems(20.))
|
||||
.max_height(Some(rems(20.).into()))
|
||||
} else {
|
||||
Picker::list(delegate, window, cx).show_scrollbar(true)
|
||||
}
|
||||
}
|
||||
|
||||
fn all_models(cx: &App) -> GroupedModels {
|
||||
@@ -75,12 +87,14 @@ pub struct LanguageModelPickerDelegate {
|
||||
selected_index: usize,
|
||||
_authenticate_all_providers_task: Task<()>,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
popover_styles: bool,
|
||||
}
|
||||
|
||||
impl LanguageModelPickerDelegate {
|
||||
fn new(
|
||||
get_active_model: impl Fn(&App) -> Option<ConfiguredModel> + 'static,
|
||||
on_model_changed: impl Fn(Arc<dyn LanguageModel>, &mut App) + 'static,
|
||||
popover_styles: bool,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Self {
|
||||
@@ -113,6 +127,7 @@ impl LanguageModelPickerDelegate {
|
||||
}
|
||||
},
|
||||
)],
|
||||
popover_styles,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -177,7 +192,7 @@ impl LanguageModelPickerDelegate {
|
||||
}
|
||||
_ => {
|
||||
log::error!(
|
||||
"Failed to authenticate provider: {}: {err}",
|
||||
"Failed to authenticate provider: {}: {err:#}",
|
||||
provider_name.0
|
||||
);
|
||||
}
|
||||
@@ -530,6 +545,10 @@ impl PickerDelegate for LanguageModelPickerDelegate {
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<gpui::AnyElement> {
|
||||
if !self.popover_styles {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(
|
||||
h_flex()
|
||||
.w_full()
|
||||
|
||||
@@ -1,166 +0,0 @@
|
||||
use std::ops::Range;
|
||||
|
||||
use collections::HashMap;
|
||||
use editor::display_map::CreaseId;
|
||||
use editor::{Addon, AnchorRangeExt, Editor};
|
||||
use gpui::{Entity, Subscription};
|
||||
use ui::prelude::*;
|
||||
|
||||
use crate::{
|
||||
context::{AgentContextHandle, AgentContextKey},
|
||||
context_picker::crease_for_mention,
|
||||
context_store::{ContextStore, ContextStoreEvent},
|
||||
};
|
||||
|
||||
/// Stored information that can be used to resurrect a context crease when creating an editor for a past message.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct MessageCrease {
|
||||
pub range: Range<usize>,
|
||||
pub icon_path: SharedString,
|
||||
pub label: SharedString,
|
||||
/// None for a deserialized message, Some otherwise.
|
||||
pub context: Option<AgentContextHandle>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ContextCreasesAddon {
|
||||
creases: HashMap<AgentContextKey, Vec<(CreaseId, SharedString)>>,
|
||||
_subscription: Option<Subscription>,
|
||||
}
|
||||
|
||||
impl Addon for ContextCreasesAddon {
|
||||
fn to_any(&self) -> &dyn std::any::Any {
|
||||
self
|
||||
}
|
||||
|
||||
fn to_any_mut(&mut self) -> Option<&mut dyn std::any::Any> {
|
||||
Some(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl ContextCreasesAddon {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
creases: HashMap::default(),
|
||||
_subscription: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_creases(
|
||||
&mut self,
|
||||
context_store: &Entity<ContextStore>,
|
||||
key: AgentContextKey,
|
||||
creases: impl IntoIterator<Item = (CreaseId, SharedString)>,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
self.creases.entry(key).or_default().extend(creases);
|
||||
self._subscription = Some(
|
||||
cx.subscribe(context_store, |editor, _, event, cx| match event {
|
||||
ContextStoreEvent::ContextRemoved(key) => {
|
||||
let Some(this) = editor.addon_mut::<Self>() else {
|
||||
return;
|
||||
};
|
||||
let (crease_ids, replacement_texts): (Vec<_>, Vec<_>) = this
|
||||
.creases
|
||||
.remove(key)
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.unzip();
|
||||
let ranges = editor
|
||||
.remove_creases(crease_ids, cx)
|
||||
.into_iter()
|
||||
.map(|(_, range)| range)
|
||||
.collect::<Vec<_>>();
|
||||
editor.unfold_ranges(&ranges, false, false, cx);
|
||||
editor.edit(ranges.into_iter().zip(replacement_texts), cx);
|
||||
cx.notify();
|
||||
}
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn into_inner(self) -> HashMap<AgentContextKey, Vec<(CreaseId, SharedString)>> {
|
||||
self.creases
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_message_creases(
|
||||
editor: &mut Editor,
|
||||
cx: &mut Context<'_, Editor>,
|
||||
) -> Vec<MessageCrease> {
|
||||
let buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
let mut contexts_by_crease_id = editor
|
||||
.addon_mut::<ContextCreasesAddon>()
|
||||
.map(std::mem::take)
|
||||
.unwrap_or_default()
|
||||
.into_inner()
|
||||
.into_iter()
|
||||
.flat_map(|(key, creases)| {
|
||||
let context = key.0;
|
||||
creases
|
||||
.into_iter()
|
||||
.map(move |(id, _)| (id, context.clone()))
|
||||
})
|
||||
.collect::<HashMap<_, _>>();
|
||||
// Filter the addon's list of creases based on what the editor reports,
|
||||
// since the addon might have removed creases in it.
|
||||
|
||||
editor.display_map.update(cx, |display_map, cx| {
|
||||
display_map
|
||||
.snapshot(cx)
|
||||
.crease_snapshot
|
||||
.creases()
|
||||
.filter_map(|(id, crease)| {
|
||||
Some((
|
||||
id,
|
||||
(
|
||||
crease.range().to_offset(&buffer_snapshot),
|
||||
crease.metadata()?.clone(),
|
||||
),
|
||||
))
|
||||
})
|
||||
.map(|(id, (range, metadata))| {
|
||||
let context = contexts_by_crease_id.remove(&id);
|
||||
MessageCrease {
|
||||
range,
|
||||
context,
|
||||
label: metadata.label,
|
||||
icon_path: metadata.icon_path,
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
|
||||
pub fn insert_message_creases(
|
||||
editor: &mut Editor,
|
||||
message_creases: &[MessageCrease],
|
||||
context_store: &Entity<ContextStore>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<'_, Editor>,
|
||||
) {
|
||||
let buffer_snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
let creases = message_creases
|
||||
.iter()
|
||||
.map(|crease| {
|
||||
let start = buffer_snapshot.anchor_after(crease.range.start);
|
||||
let end = buffer_snapshot.anchor_before(crease.range.end);
|
||||
crease_for_mention(
|
||||
crease.label.clone(),
|
||||
crease.icon_path.clone(),
|
||||
start..end,
|
||||
cx.weak_entity(),
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let ids = editor.insert_creases(creases.clone(), cx);
|
||||
editor.fold_creases(creases, false, window, cx);
|
||||
if let Some(addon) = editor.addon_mut::<ContextCreasesAddon>() {
|
||||
for (crease, id) in message_creases.iter().zip(ids) {
|
||||
if let Some(context) = crease.context.as_ref() {
|
||||
let key = AgentContextKey(context.clone());
|
||||
addon.add_creases(context_store, key, vec![(id, crease.label.clone())], cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -15,8 +15,8 @@ use std::{
|
||||
sync::{Arc, atomic::AtomicBool},
|
||||
};
|
||||
use ui::{
|
||||
DocumentationAside, DocumentationEdge, DocumentationSide, HighlightedLabel, LabelSize,
|
||||
ListItem, ListItemSpacing, PopoverMenuHandle, TintColor, Tooltip, prelude::*,
|
||||
DocumentationAside, DocumentationEdge, DocumentationSide, HighlightedLabel, KeyBinding,
|
||||
LabelSize, ListItem, ListItemSpacing, PopoverMenuHandle, TintColor, Tooltip, prelude::*,
|
||||
};
|
||||
|
||||
/// Trait for types that can provide and manage agent profiles
|
||||
@@ -81,6 +81,7 @@ impl ProfileSelector {
|
||||
self.provider.clone(),
|
||||
self.profiles.clone(),
|
||||
cx.background_executor().clone(),
|
||||
self.focus_handle.clone(),
|
||||
cx,
|
||||
);
|
||||
|
||||
@@ -207,6 +208,7 @@ pub(crate) struct ProfilePickerDelegate {
|
||||
selected_index: usize,
|
||||
query: String,
|
||||
cancel: Option<Arc<AtomicBool>>,
|
||||
focus_handle: FocusHandle,
|
||||
}
|
||||
|
||||
impl ProfilePickerDelegate {
|
||||
@@ -215,6 +217,7 @@ impl ProfilePickerDelegate {
|
||||
provider: Arc<dyn ProfileProvider>,
|
||||
profiles: AvailableProfiles,
|
||||
background: BackgroundExecutor,
|
||||
focus_handle: FocusHandle,
|
||||
cx: &mut Context<ProfileSelector>,
|
||||
) -> Self {
|
||||
let candidates = Self::candidates_from(profiles);
|
||||
@@ -231,6 +234,7 @@ impl ProfilePickerDelegate {
|
||||
selected_index: 0,
|
||||
query: String::new(),
|
||||
cancel: None,
|
||||
focus_handle,
|
||||
};
|
||||
|
||||
this.selected_index = this
|
||||
@@ -594,20 +598,26 @@ impl PickerDelegate for ProfilePickerDelegate {
|
||||
_: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<gpui::AnyElement> {
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
|
||||
Some(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
.p_1()
|
||||
.gap_4()
|
||||
.justify_between()
|
||||
.p_1p5()
|
||||
.child(
|
||||
Button::new("configure", "Configure")
|
||||
.icon(IconName::Settings)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_position(IconPosition::Start)
|
||||
.full_width()
|
||||
.style(ButtonStyle::Outlined)
|
||||
.key_binding(
|
||||
KeyBinding::for_action_in(
|
||||
&ManageProfiles::default(),
|
||||
&focus_handle,
|
||||
cx,
|
||||
)
|
||||
.map(|kb| kb.size(rems_from_px(12.))),
|
||||
)
|
||||
.on_click(|_, window, cx| {
|
||||
window.dispatch_action(ManageProfiles::default().boxed_clone(), cx);
|
||||
}),
|
||||
@@ -659,20 +669,25 @@ mod tests {
|
||||
is_builtin: true,
|
||||
}];
|
||||
|
||||
let delegate = ProfilePickerDelegate {
|
||||
fs: FakeFs::new(cx.executor()),
|
||||
provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))),
|
||||
background: cx.executor(),
|
||||
candidates,
|
||||
string_candidates: Arc::new(Vec::new()),
|
||||
filtered_entries: Vec::new(),
|
||||
selected_index: 0,
|
||||
query: String::new(),
|
||||
cancel: None,
|
||||
};
|
||||
cx.update(|cx| {
|
||||
let focus_handle = cx.focus_handle();
|
||||
|
||||
let matches = Vec::new(); // No matches
|
||||
let _entries = delegate.entries_from_matches(matches);
|
||||
let delegate = ProfilePickerDelegate {
|
||||
fs: FakeFs::new(cx.background_executor().clone()),
|
||||
provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))),
|
||||
background: cx.background_executor().clone(),
|
||||
candidates,
|
||||
string_candidates: Arc::new(Vec::new()),
|
||||
filtered_entries: Vec::new(),
|
||||
selected_index: 0,
|
||||
query: String::new(),
|
||||
cancel: None,
|
||||
focus_handle,
|
||||
};
|
||||
|
||||
let matches = Vec::new(); // No matches
|
||||
let _entries = delegate.entries_from_matches(matches);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -690,30 +705,35 @@ mod tests {
|
||||
},
|
||||
];
|
||||
|
||||
let delegate = ProfilePickerDelegate {
|
||||
fs: FakeFs::new(cx.executor()),
|
||||
provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))),
|
||||
background: cx.executor(),
|
||||
candidates,
|
||||
string_candidates: Arc::new(Vec::new()),
|
||||
filtered_entries: vec![
|
||||
ProfilePickerEntry::Profile(ProfileMatchEntry {
|
||||
candidate_index: 0,
|
||||
positions: Vec::new(),
|
||||
}),
|
||||
ProfilePickerEntry::Profile(ProfileMatchEntry {
|
||||
candidate_index: 1,
|
||||
positions: Vec::new(),
|
||||
}),
|
||||
],
|
||||
selected_index: 0,
|
||||
query: String::new(),
|
||||
cancel: None,
|
||||
};
|
||||
cx.update(|cx| {
|
||||
let focus_handle = cx.focus_handle();
|
||||
|
||||
// Active profile should be found at index 0
|
||||
let active_index = delegate.index_of_profile(&AgentProfileId("write".into()));
|
||||
assert_eq!(active_index, Some(0));
|
||||
let delegate = ProfilePickerDelegate {
|
||||
fs: FakeFs::new(cx.background_executor().clone()),
|
||||
provider: Arc::new(TestProfileProvider::new(AgentProfileId("write".into()))),
|
||||
background: cx.background_executor().clone(),
|
||||
candidates,
|
||||
string_candidates: Arc::new(Vec::new()),
|
||||
filtered_entries: vec![
|
||||
ProfilePickerEntry::Profile(ProfileMatchEntry {
|
||||
candidate_index: 0,
|
||||
positions: Vec::new(),
|
||||
}),
|
||||
ProfilePickerEntry::Profile(ProfileMatchEntry {
|
||||
candidate_index: 1,
|
||||
positions: Vec::new(),
|
||||
}),
|
||||
],
|
||||
selected_index: 0,
|
||||
query: String::new(),
|
||||
cancel: None,
|
||||
focus_handle,
|
||||
};
|
||||
|
||||
// Active profile should be found at index 0
|
||||
let active_index = delegate.index_of_profile(&AgentProfileId("write".into()));
|
||||
assert_eq!(active_index, Some(0));
|
||||
});
|
||||
}
|
||||
|
||||
struct TestProfileProvider {
|
||||
|
||||
@@ -314,6 +314,7 @@ impl TextThreadEditor {
|
||||
)
|
||||
});
|
||||
},
|
||||
true, // Use popover styles for picker
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -477,7 +478,7 @@ impl TextThreadEditor {
|
||||
editor.insert(&format!("/{name}"), window, cx);
|
||||
if command.accepts_arguments() {
|
||||
editor.insert(" ", window, cx);
|
||||
editor.show_completions(&ShowCompletions::default(), window, cx);
|
||||
editor.show_completions(&ShowCompletions, window, cx);
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -3223,11 +3224,7 @@ mod tests {
|
||||
prompt_store::init(cx);
|
||||
LanguageModelRegistry::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
agent_settings::init(cx);
|
||||
Project::init_settings(cx);
|
||||
|
||||
theme::init(theme::LoadThemes::JustBase, cx);
|
||||
workspace::init_settings(cx);
|
||||
editor::init_settings(cx);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -577,8 +577,6 @@ mod test {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
// release_channel::init(SemanticVersion::default(), cx);
|
||||
language::init(cx);
|
||||
Project::init_settings(cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -22,7 +22,6 @@ use language_model::{
|
||||
};
|
||||
use parking_lot::Mutex;
|
||||
use pretty_assertions::assert_eq;
|
||||
use project::Project;
|
||||
use prompt_store::PromptBuilder;
|
||||
use rand::prelude::*;
|
||||
use serde_json::json;
|
||||
@@ -1411,9 +1410,6 @@ fn init_test(cx: &mut App) {
|
||||
prompt_store::init(cx);
|
||||
LanguageModelRegistry::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
language::init(cx);
|
||||
agent_settings::init(cx);
|
||||
Project::init_settings(cx);
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
|
||||
@@ -48,7 +48,6 @@ pub const LEGACY_CHANNEL_COUNT: NonZero<u16> = nz!(2);
|
||||
pub const REPLAY_DURATION: Duration = Duration::from_secs(30);
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
AudioSettings::register(cx);
|
||||
LIVE_SETTINGS.initialize(cx);
|
||||
}
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
|
||||
use gpui::App;
|
||||
use settings::{Settings, SettingsStore};
|
||||
use settings::{RegisterSetting, Settings, SettingsStore};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone, Debug, RegisterSetting)]
|
||||
pub struct AudioSettings {
|
||||
/// Opt into the new audio system.
|
||||
///
|
||||
|
||||
@@ -33,4 +33,9 @@ workspace.workspace = true
|
||||
which.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
ctor.workspace = true
|
||||
clock= { workspace = true, "features" = ["test-support"] }
|
||||
futures.workspace = true
|
||||
gpui = { workspace = true, "features" = ["test-support"] }
|
||||
parking_lot.workspace = true
|
||||
zlog.workspace = true
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
use anyhow::{Context as _, Result};
|
||||
use client::{Client, TelemetrySettings};
|
||||
use db::RELEASE_CHANNEL;
|
||||
use client::Client;
|
||||
use db::kvp::KEY_VALUE_STORE;
|
||||
use gpui::{
|
||||
App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, Global, SemanticVersion,
|
||||
Task, Window, actions,
|
||||
};
|
||||
use http_client::{AsyncBody, HttpClient, HttpClientWithUrl};
|
||||
use http_client::{HttpClient, HttpClientWithUrl};
|
||||
use paths::remote_servers_dir;
|
||||
use release_channel::{AppCommitSha, ReleaseChannel};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsStore};
|
||||
use settings::{RegisterSetting, Settings, SettingsStore};
|
||||
use smol::{fs, io::AsyncReadExt};
|
||||
use smol::{fs::File, process::Command};
|
||||
use std::mem;
|
||||
@@ -41,22 +40,23 @@ actions!(
|
||||
]
|
||||
);
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct UpdateRequestBody {
|
||||
installation_id: Option<Arc<str>>,
|
||||
release_channel: Option<&'static str>,
|
||||
telemetry: bool,
|
||||
is_staff: Option<bool>,
|
||||
destination: &'static str,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub enum VersionCheckType {
|
||||
Sha(AppCommitSha),
|
||||
Semantic(SemanticVersion),
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Serialize, Debug)]
|
||||
pub struct AssetQuery<'a> {
|
||||
asset: &'a str,
|
||||
os: &'a str,
|
||||
arch: &'a str,
|
||||
metrics_id: Option<&'a str>,
|
||||
system_id: Option<&'a str>,
|
||||
is_staff: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum AutoUpdateStatus {
|
||||
Idle,
|
||||
Checking,
|
||||
@@ -66,6 +66,31 @@ pub enum AutoUpdateStatus {
|
||||
Errored { error: Arc<anyhow::Error> },
|
||||
}
|
||||
|
||||
impl PartialEq for AutoUpdateStatus {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
match (self, other) {
|
||||
(AutoUpdateStatus::Idle, AutoUpdateStatus::Idle) => true,
|
||||
(AutoUpdateStatus::Checking, AutoUpdateStatus::Checking) => true,
|
||||
(
|
||||
AutoUpdateStatus::Downloading { version: v1 },
|
||||
AutoUpdateStatus::Downloading { version: v2 },
|
||||
) => v1 == v2,
|
||||
(
|
||||
AutoUpdateStatus::Installing { version: v1 },
|
||||
AutoUpdateStatus::Installing { version: v2 },
|
||||
) => v1 == v2,
|
||||
(
|
||||
AutoUpdateStatus::Updated { version: v1 },
|
||||
AutoUpdateStatus::Updated { version: v2 },
|
||||
) => v1 == v2,
|
||||
(AutoUpdateStatus::Errored { error: e1 }, AutoUpdateStatus::Errored { error: e2 }) => {
|
||||
e1.to_string() == e2.to_string()
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AutoUpdateStatus {
|
||||
pub fn is_updated(&self) -> bool {
|
||||
matches!(self, Self::Updated { .. })
|
||||
@@ -75,13 +100,13 @@ impl AutoUpdateStatus {
|
||||
pub struct AutoUpdater {
|
||||
status: AutoUpdateStatus,
|
||||
current_version: SemanticVersion,
|
||||
http_client: Arc<HttpClientWithUrl>,
|
||||
client: Arc<Client>,
|
||||
pending_poll: Option<Task<Option<()>>>,
|
||||
quit_subscription: Option<gpui::Subscription>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Clone, Debug)]
|
||||
pub struct JsonRelease {
|
||||
#[derive(Deserialize, Serialize, Clone, Debug)]
|
||||
pub struct ReleaseAsset {
|
||||
pub version: String,
|
||||
pub url: String,
|
||||
}
|
||||
@@ -120,7 +145,7 @@ impl Drop for MacOsUnmounter<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
#[derive(Clone, Copy, Debug, RegisterSetting)]
|
||||
struct AutoUpdateSetting(bool);
|
||||
|
||||
/// Whether or not to automatically check for updates.
|
||||
@@ -137,9 +162,7 @@ struct GlobalAutoUpdate(Option<Entity<AutoUpdater>>);
|
||||
|
||||
impl Global for GlobalAutoUpdate {}
|
||||
|
||||
pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {
|
||||
AutoUpdateSetting::register(cx);
|
||||
|
||||
pub fn init(client: Arc<Client>, cx: &mut App) {
|
||||
cx.observe_new(|workspace: &mut Workspace, _window, _cx| {
|
||||
workspace.register_action(|_, action, window, cx| check(action, window, cx));
|
||||
|
||||
@@ -151,7 +174,7 @@ pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {
|
||||
|
||||
let version = release_channel::AppVersion::global(cx);
|
||||
let auto_updater = cx.new(|cx| {
|
||||
let updater = AutoUpdater::new(version, http_client, cx);
|
||||
let updater = AutoUpdater::new(version, client, cx);
|
||||
|
||||
let poll_for_updates = ReleaseChannel::try_global(cx)
|
||||
.map(|channel| channel.poll_for_updates())
|
||||
@@ -235,7 +258,7 @@ pub fn view_release_notes(_: &ViewReleaseNotes, cx: &mut App) -> Option<()> {
|
||||
let current_version = auto_updater.current_version;
|
||||
let release_channel = release_channel.dev_name();
|
||||
let path = format!("/releases/{release_channel}/{current_version}");
|
||||
let url = &auto_updater.http_client.build_url(&path);
|
||||
let url = &auto_updater.client.http_client().build_url(&path);
|
||||
cx.open_url(url);
|
||||
}
|
||||
ReleaseChannel::Nightly => {
|
||||
@@ -298,11 +321,7 @@ impl AutoUpdater {
|
||||
cx.default_global::<GlobalAutoUpdate>().0.clone()
|
||||
}
|
||||
|
||||
fn new(
|
||||
current_version: SemanticVersion,
|
||||
http_client: Arc<HttpClientWithUrl>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
fn new(current_version: SemanticVersion, client: Arc<Client>, cx: &mut Context<Self>) -> Self {
|
||||
// On windows, executable files cannot be overwritten while they are
|
||||
// running, so we must wait to overwrite the application until quitting
|
||||
// or restarting. When quitting the app, we spawn the auto update helper
|
||||
@@ -323,7 +342,7 @@ impl AutoUpdater {
|
||||
Self {
|
||||
status: AutoUpdateStatus::Idle,
|
||||
current_version,
|
||||
http_client,
|
||||
client,
|
||||
pending_poll: None,
|
||||
quit_subscription,
|
||||
}
|
||||
@@ -356,7 +375,7 @@ impl AutoUpdater {
|
||||
cx.notify();
|
||||
|
||||
self.pending_poll = Some(cx.spawn(async move |this, cx| {
|
||||
let result = Self::update(this.upgrade()?, cx.clone()).await;
|
||||
let result = Self::update(this.upgrade()?, cx).await;
|
||||
this.update(cx, |this, cx| {
|
||||
this.pending_poll = None;
|
||||
if let Err(error) = result {
|
||||
@@ -402,10 +421,11 @@ impl AutoUpdater {
|
||||
// you can override this function. You should also update get_remote_server_release_url to return
|
||||
// Ok(None).
|
||||
pub async fn download_remote_server_release(
|
||||
os: &str,
|
||||
arch: &str,
|
||||
release_channel: ReleaseChannel,
|
||||
version: Option<SemanticVersion>,
|
||||
os: &str,
|
||||
arch: &str,
|
||||
set_status: impl Fn(&str, &mut AsyncApp) + Send + 'static,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<PathBuf> {
|
||||
let this = cx.update(|cx| {
|
||||
@@ -415,13 +435,14 @@ impl AutoUpdater {
|
||||
.context("auto-update not initialized")
|
||||
})??;
|
||||
|
||||
let release = Self::get_release(
|
||||
set_status("Fetching remote server release", cx);
|
||||
let release = Self::get_release_asset(
|
||||
&this,
|
||||
release_channel,
|
||||
version,
|
||||
"zed-remote-server",
|
||||
os,
|
||||
arch,
|
||||
version,
|
||||
Some(release_channel),
|
||||
cx,
|
||||
)
|
||||
.await?;
|
||||
@@ -432,26 +453,27 @@ impl AutoUpdater {
|
||||
let version_path = platform_dir.join(format!("{}.gz", release.version));
|
||||
smol::fs::create_dir_all(&platform_dir).await.ok();
|
||||
|
||||
let client = this.read_with(cx, |this, _| this.http_client.clone())?;
|
||||
let client = this.read_with(cx, |this, _| this.client.http_client())?;
|
||||
|
||||
if smol::fs::metadata(&version_path).await.is_err() {
|
||||
log::info!(
|
||||
"downloading zed-remote-server {os} {arch} version {}",
|
||||
release.version
|
||||
);
|
||||
download_remote_server_binary(&version_path, release, client, cx).await?;
|
||||
set_status("Downloading remote server", cx);
|
||||
download_remote_server_binary(&version_path, release, client).await?;
|
||||
}
|
||||
|
||||
Ok(version_path)
|
||||
}
|
||||
|
||||
pub async fn get_remote_server_release_url(
|
||||
channel: ReleaseChannel,
|
||||
version: Option<SemanticVersion>,
|
||||
os: &str,
|
||||
arch: &str,
|
||||
release_channel: ReleaseChannel,
|
||||
version: Option<SemanticVersion>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<Option<(String, String)>> {
|
||||
) -> Result<Option<String>> {
|
||||
let this = cx.update(|cx| {
|
||||
cx.default_global::<GlobalAutoUpdate>()
|
||||
.0
|
||||
@@ -459,108 +481,99 @@ impl AutoUpdater {
|
||||
.context("auto-update not initialized")
|
||||
})??;
|
||||
|
||||
let release = Self::get_release(
|
||||
&this,
|
||||
"zed-remote-server",
|
||||
os,
|
||||
arch,
|
||||
version,
|
||||
Some(release_channel),
|
||||
cx,
|
||||
)
|
||||
.await?;
|
||||
let release =
|
||||
Self::get_release_asset(&this, channel, version, "zed-remote-server", os, arch, cx)
|
||||
.await?;
|
||||
|
||||
let update_request_body = build_remote_server_update_request_body(cx)?;
|
||||
let body = serde_json::to_string(&update_request_body)?;
|
||||
|
||||
Ok(Some((release.url, body)))
|
||||
Ok(Some(release.url))
|
||||
}
|
||||
|
||||
async fn get_release(
|
||||
async fn get_release_asset(
|
||||
this: &Entity<Self>,
|
||||
asset: &str,
|
||||
os: &str,
|
||||
arch: &str,
|
||||
release_channel: ReleaseChannel,
|
||||
version: Option<SemanticVersion>,
|
||||
release_channel: Option<ReleaseChannel>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<JsonRelease> {
|
||||
let client = this.read_with(cx, |this, _| this.http_client.clone())?;
|
||||
|
||||
if let Some(version) = version {
|
||||
let channel = release_channel.map(|c| c.dev_name()).unwrap_or("stable");
|
||||
|
||||
let url = format!("/api/releases/{channel}/{version}/{asset}-{os}-{arch}.gz?update=1",);
|
||||
|
||||
Ok(JsonRelease {
|
||||
version: version.to_string(),
|
||||
url: client.build_url(&url),
|
||||
})
|
||||
} else {
|
||||
let mut url_string = client.build_url(&format!(
|
||||
"/api/releases/latest?asset={}&os={}&arch={}",
|
||||
asset, os, arch
|
||||
));
|
||||
if let Some(param) = release_channel.and_then(|c| c.release_query_param()) {
|
||||
url_string += "&";
|
||||
url_string += param;
|
||||
}
|
||||
|
||||
let mut response = client.get(&url_string, Default::default(), true).await?;
|
||||
let mut body = Vec::new();
|
||||
response.body_mut().read_to_end(&mut body).await?;
|
||||
|
||||
anyhow::ensure!(
|
||||
response.status().is_success(),
|
||||
"failed to fetch release: {:?}",
|
||||
String::from_utf8_lossy(&body),
|
||||
);
|
||||
|
||||
serde_json::from_slice(body.as_slice()).with_context(|| {
|
||||
format!(
|
||||
"error deserializing release {:?}",
|
||||
String::from_utf8_lossy(&body),
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_latest_release(
|
||||
this: &Entity<Self>,
|
||||
asset: &str,
|
||||
os: &str,
|
||||
arch: &str,
|
||||
release_channel: Option<ReleaseChannel>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<JsonRelease> {
|
||||
Self::get_release(this, asset, os, arch, None, release_channel, cx).await
|
||||
) -> Result<ReleaseAsset> {
|
||||
let client = this.read_with(cx, |this, _| this.client.clone())?;
|
||||
|
||||
let (system_id, metrics_id, is_staff) = if client.telemetry().metrics_enabled() {
|
||||
(
|
||||
client.telemetry().system_id(),
|
||||
client.telemetry().metrics_id(),
|
||||
client.telemetry().is_staff(),
|
||||
)
|
||||
} else {
|
||||
(None, None, None)
|
||||
};
|
||||
|
||||
let version = if let Some(version) = version {
|
||||
version.to_string()
|
||||
} else {
|
||||
"latest".to_string()
|
||||
};
|
||||
let http_client = client.http_client();
|
||||
|
||||
let path = format!("/releases/{}/{}/asset", release_channel.dev_name(), version,);
|
||||
let url = http_client.build_zed_cloud_url_with_query(
|
||||
&path,
|
||||
AssetQuery {
|
||||
os,
|
||||
arch,
|
||||
asset,
|
||||
metrics_id: metrics_id.as_deref(),
|
||||
system_id: system_id.as_deref(),
|
||||
is_staff: is_staff,
|
||||
},
|
||||
)?;
|
||||
|
||||
let mut response = http_client
|
||||
.get(url.as_str(), Default::default(), true)
|
||||
.await?;
|
||||
let mut body = Vec::new();
|
||||
response.body_mut().read_to_end(&mut body).await?;
|
||||
|
||||
anyhow::ensure!(
|
||||
response.status().is_success(),
|
||||
"failed to fetch release: {:?}",
|
||||
String::from_utf8_lossy(&body),
|
||||
);
|
||||
|
||||
serde_json::from_slice(body.as_slice()).with_context(|| {
|
||||
format!(
|
||||
"error deserializing release {:?}",
|
||||
String::from_utf8_lossy(&body),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
async fn update(this: Entity<Self>, mut cx: AsyncApp) -> Result<()> {
|
||||
async fn update(this: Entity<Self>, cx: &mut AsyncApp) -> Result<()> {
|
||||
let (client, installed_version, previous_status, release_channel) =
|
||||
this.read_with(&cx, |this, cx| {
|
||||
this.read_with(cx, |this, cx| {
|
||||
(
|
||||
this.http_client.clone(),
|
||||
this.client.http_client(),
|
||||
this.current_version,
|
||||
this.status.clone(),
|
||||
ReleaseChannel::try_global(cx),
|
||||
ReleaseChannel::try_global(cx).unwrap_or(ReleaseChannel::Stable),
|
||||
)
|
||||
})?;
|
||||
|
||||
Self::check_dependencies()?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.status = AutoUpdateStatus::Checking;
|
||||
log::info!("Auto Update: checking for updates");
|
||||
cx.notify();
|
||||
})?;
|
||||
|
||||
let fetched_release_data =
|
||||
Self::get_latest_release(&this, "zed", OS, ARCH, release_channel, &mut cx).await?;
|
||||
Self::get_release_asset(&this, release_channel, None, "zed", OS, ARCH, cx).await?;
|
||||
let fetched_version = fetched_release_data.clone().version;
|
||||
let app_commit_sha = cx.update(|cx| AppCommitSha::try_global(cx).map(|sha| sha.full()));
|
||||
let newer_version = Self::check_if_fetched_version_is_newer(
|
||||
*RELEASE_CHANNEL,
|
||||
release_channel,
|
||||
app_commit_sha,
|
||||
installed_version,
|
||||
fetched_version,
|
||||
@@ -568,7 +581,7 @@ impl AutoUpdater {
|
||||
)?;
|
||||
|
||||
let Some(newer_version) = newer_version else {
|
||||
return this.update(&mut cx, |this, cx| {
|
||||
return this.update(cx, |this, cx| {
|
||||
let status = match previous_status {
|
||||
AutoUpdateStatus::Updated { .. } => previous_status,
|
||||
_ => AutoUpdateStatus::Idle,
|
||||
@@ -578,7 +591,7 @@ impl AutoUpdater {
|
||||
});
|
||||
};
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.status = AutoUpdateStatus::Downloading {
|
||||
version: newer_version.clone(),
|
||||
};
|
||||
@@ -587,21 +600,21 @@ impl AutoUpdater {
|
||||
|
||||
let installer_dir = InstallerDir::new().await?;
|
||||
let target_path = Self::target_path(&installer_dir).await?;
|
||||
download_release(&target_path, fetched_release_data, client, &cx).await?;
|
||||
download_release(&target_path, fetched_release_data, client).await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.status = AutoUpdateStatus::Installing {
|
||||
version: newer_version.clone(),
|
||||
};
|
||||
cx.notify();
|
||||
})?;
|
||||
|
||||
let new_binary_path = Self::install_release(installer_dir, target_path, &cx).await?;
|
||||
let new_binary_path = Self::install_release(installer_dir, target_path, cx).await?;
|
||||
if let Some(new_binary_path) = new_binary_path {
|
||||
cx.update(|cx| cx.set_restart_path(new_binary_path))?;
|
||||
}
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.set_should_show_update_notification(true, cx)
|
||||
.detach_and_log_err(cx);
|
||||
this.status = AutoUpdateStatus::Updated {
|
||||
@@ -680,6 +693,12 @@ impl AutoUpdater {
|
||||
target_path: PathBuf,
|
||||
cx: &AsyncApp,
|
||||
) -> Result<Option<PathBuf>> {
|
||||
#[cfg(test)]
|
||||
if let Some(test_install) =
|
||||
cx.try_read_global::<tests::InstallOverride, _>(|g, _| g.0.clone())
|
||||
{
|
||||
return test_install(target_path, cx);
|
||||
}
|
||||
match OS {
|
||||
"macos" => install_release_macos(&installer_dir, target_path, cx).await,
|
||||
"linux" => install_release_linux(&installer_dir, target_path, cx).await,
|
||||
@@ -730,16 +749,13 @@ impl AutoUpdater {
|
||||
|
||||
async fn download_remote_server_binary(
|
||||
target_path: &PathBuf,
|
||||
release: JsonRelease,
|
||||
release: ReleaseAsset,
|
||||
client: Arc<HttpClientWithUrl>,
|
||||
cx: &AsyncApp,
|
||||
) -> Result<()> {
|
||||
let temp = tempfile::Builder::new().tempfile_in(remote_servers_dir())?;
|
||||
let mut temp_file = File::create(&temp).await?;
|
||||
let update_request_body = build_remote_server_update_request_body(cx)?;
|
||||
let request_body = AsyncBody::from(serde_json::to_string(&update_request_body)?);
|
||||
|
||||
let mut response = client.get(&release.url, request_body, true).await?;
|
||||
let mut response = client.get(&release.url, Default::default(), true).await?;
|
||||
anyhow::ensure!(
|
||||
response.status().is_success(),
|
||||
"failed to download remote server release: {:?}",
|
||||
@@ -751,65 +767,19 @@ async fn download_remote_server_binary(
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_remote_server_update_request_body(cx: &AsyncApp) -> Result<UpdateRequestBody> {
|
||||
let (installation_id, release_channel, telemetry_enabled, is_staff) = cx.update(|cx| {
|
||||
let telemetry = Client::global(cx).telemetry().clone();
|
||||
let is_staff = telemetry.is_staff();
|
||||
let installation_id = telemetry.installation_id();
|
||||
let release_channel =
|
||||
ReleaseChannel::try_global(cx).map(|release_channel| release_channel.display_name());
|
||||
let telemetry_enabled = TelemetrySettings::get_global(cx).metrics;
|
||||
|
||||
(
|
||||
installation_id,
|
||||
release_channel,
|
||||
telemetry_enabled,
|
||||
is_staff,
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok(UpdateRequestBody {
|
||||
installation_id,
|
||||
release_channel,
|
||||
telemetry: telemetry_enabled,
|
||||
is_staff,
|
||||
destination: "remote",
|
||||
})
|
||||
}
|
||||
|
||||
async fn download_release(
|
||||
target_path: &Path,
|
||||
release: JsonRelease,
|
||||
release: ReleaseAsset,
|
||||
client: Arc<HttpClientWithUrl>,
|
||||
cx: &AsyncApp,
|
||||
) -> Result<()> {
|
||||
let mut target_file = File::create(&target_path).await?;
|
||||
|
||||
let (installation_id, release_channel, telemetry_enabled, is_staff) = cx.update(|cx| {
|
||||
let telemetry = Client::global(cx).telemetry().clone();
|
||||
let is_staff = telemetry.is_staff();
|
||||
let installation_id = telemetry.installation_id();
|
||||
let release_channel =
|
||||
ReleaseChannel::try_global(cx).map(|release_channel| release_channel.display_name());
|
||||
let telemetry_enabled = TelemetrySettings::get_global(cx).metrics;
|
||||
|
||||
(
|
||||
installation_id,
|
||||
release_channel,
|
||||
telemetry_enabled,
|
||||
is_staff,
|
||||
)
|
||||
})?;
|
||||
|
||||
let request_body = AsyncBody::from(serde_json::to_string(&UpdateRequestBody {
|
||||
installation_id,
|
||||
release_channel,
|
||||
telemetry: telemetry_enabled,
|
||||
is_staff,
|
||||
destination: "local",
|
||||
})?);
|
||||
|
||||
let mut response = client.get(&release.url, request_body, true).await?;
|
||||
let mut response = client.get(&release.url, Default::default(), true).await?;
|
||||
anyhow::ensure!(
|
||||
response.status().is_success(),
|
||||
"failed to download update: {:?}",
|
||||
response.status()
|
||||
);
|
||||
smol::io::copy(response.body_mut(), &mut target_file).await?;
|
||||
log::info!("downloaded update. path:{:?}", target_path);
|
||||
|
||||
@@ -935,26 +905,15 @@ async fn install_release_macos(
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
async fn cleanup_windows() -> Result<()> {
|
||||
use util::ResultExt;
|
||||
|
||||
let parent = std::env::current_exe()?
|
||||
.parent()
|
||||
.context("No parent dir for Zed.exe")?
|
||||
.to_owned();
|
||||
|
||||
// keep in sync with crates/auto_update_helper/src/updater.rs
|
||||
smol::fs::remove_dir(parent.join("updates"))
|
||||
.await
|
||||
.context("failed to remove updates dir")
|
||||
.log_err();
|
||||
smol::fs::remove_dir(parent.join("install"))
|
||||
.await
|
||||
.context("failed to remove install dir")
|
||||
.log_err();
|
||||
smol::fs::remove_dir(parent.join("old"))
|
||||
.await
|
||||
.context("failed to remove old version dir")
|
||||
.log_err();
|
||||
_ = smol::fs::remove_dir(parent.join("updates")).await;
|
||||
_ = smol::fs::remove_dir(parent.join("install")).await;
|
||||
_ = smol::fs::remove_dir(parent.join("old")).await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
@@ -1009,11 +968,33 @@ pub async fn finalize_auto_update_on_quit() {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use client::Client;
|
||||
use clock::FakeSystemClock;
|
||||
use futures::channel::oneshot;
|
||||
use gpui::TestAppContext;
|
||||
use http_client::{FakeHttpClient, Response};
|
||||
use settings::default_settings;
|
||||
use std::{
|
||||
rc::Rc,
|
||||
sync::{
|
||||
Arc,
|
||||
atomic::{self, AtomicBool},
|
||||
},
|
||||
};
|
||||
use tempfile::tempdir;
|
||||
|
||||
#[ctor::ctor]
|
||||
fn init_logger() {
|
||||
zlog::init_test();
|
||||
}
|
||||
|
||||
use super::*;
|
||||
|
||||
pub(super) struct InstallOverride(
|
||||
pub Rc<dyn Fn(PathBuf, &AsyncApp) -> Result<Option<PathBuf>>>,
|
||||
);
|
||||
impl Global for InstallOverride {}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_auto_update_defaults_to_true(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
@@ -1025,11 +1006,119 @@ mod tests {
|
||||
.set_user_settings("{}", cx)
|
||||
.expect("Unable to set user settings");
|
||||
cx.set_global(store);
|
||||
AutoUpdateSetting::register(cx);
|
||||
assert!(AutoUpdateSetting::get_global(cx).0);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_auto_update_downloads(cx: &mut TestAppContext) {
|
||||
cx.background_executor.allow_parking();
|
||||
zlog::init_test();
|
||||
let release_available = Arc::new(AtomicBool::new(false));
|
||||
|
||||
let (dmg_tx, dmg_rx) = oneshot::channel::<String>();
|
||||
|
||||
cx.update(|cx| {
|
||||
settings::init(cx);
|
||||
|
||||
let current_version = SemanticVersion::new(0, 100, 0);
|
||||
release_channel::init_test(current_version, ReleaseChannel::Stable, cx);
|
||||
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let release_available = Arc::clone(&release_available);
|
||||
let dmg_rx = Arc::new(parking_lot::Mutex::new(Some(dmg_rx)));
|
||||
let fake_client_http = FakeHttpClient::create(move |req| {
|
||||
let release_available = release_available.load(atomic::Ordering::Relaxed);
|
||||
let dmg_rx = dmg_rx.clone();
|
||||
async move {
|
||||
if req.uri().path() == "/releases/stable/latest/asset" {
|
||||
if release_available {
|
||||
return Ok(Response::builder().status(200).body(
|
||||
r#"{"version":"0.100.1","url":"https://test.example/new-download"}"#.into()
|
||||
).unwrap());
|
||||
} else {
|
||||
return Ok(Response::builder().status(200).body(
|
||||
r#"{"version":"0.100.0","url":"https://test.example/old-download"}"#.into()
|
||||
).unwrap());
|
||||
}
|
||||
} else if req.uri().path() == "/new-download" {
|
||||
return Ok(Response::builder().status(200).body({
|
||||
let dmg_rx = dmg_rx.lock().take().unwrap();
|
||||
dmg_rx.await.unwrap().into()
|
||||
}).unwrap());
|
||||
}
|
||||
Ok(Response::builder().status(404).body("".into()).unwrap())
|
||||
}
|
||||
});
|
||||
let client = Client::new(clock, fake_client_http, cx);
|
||||
crate::init(client, cx);
|
||||
});
|
||||
|
||||
let auto_updater = cx.update(|cx| AutoUpdater::get(cx).expect("auto updater should exist"));
|
||||
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
auto_updater.read_with(cx, |updater, _| {
|
||||
assert_eq!(updater.status(), AutoUpdateStatus::Idle);
|
||||
assert_eq!(updater.current_version(), SemanticVersion::new(0, 100, 0));
|
||||
});
|
||||
|
||||
release_available.store(true, atomic::Ordering::SeqCst);
|
||||
cx.background_executor.advance_clock(POLL_INTERVAL);
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
loop {
|
||||
cx.background_executor.timer(Duration::from_millis(0)).await;
|
||||
cx.run_until_parked();
|
||||
let status = auto_updater.read_with(cx, |updater, _| updater.status());
|
||||
if !matches!(status, AutoUpdateStatus::Idle) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let status = auto_updater.read_with(cx, |updater, _| updater.status());
|
||||
assert_eq!(
|
||||
status,
|
||||
AutoUpdateStatus::Downloading {
|
||||
version: VersionCheckType::Semantic(SemanticVersion::new(0, 100, 1))
|
||||
}
|
||||
);
|
||||
|
||||
dmg_tx.send("<fake-zed-update>".to_owned()).unwrap();
|
||||
|
||||
let tmp_dir = Arc::new(tempdir().unwrap());
|
||||
|
||||
cx.update(|cx| {
|
||||
let tmp_dir = tmp_dir.clone();
|
||||
cx.set_global(InstallOverride(Rc::new(move |target_path, _cx| {
|
||||
let tmp_dir = tmp_dir.clone();
|
||||
let dest_path = tmp_dir.path().join("zed");
|
||||
std::fs::copy(&target_path, &dest_path)?;
|
||||
Ok(Some(dest_path))
|
||||
})));
|
||||
});
|
||||
|
||||
loop {
|
||||
cx.background_executor.timer(Duration::from_millis(0)).await;
|
||||
cx.run_until_parked();
|
||||
let status = auto_updater.read_with(cx, |updater, _| updater.status());
|
||||
if !matches!(status, AutoUpdateStatus::Downloading { .. }) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let status = auto_updater.read_with(cx, |updater, _| updater.status());
|
||||
assert_eq!(
|
||||
status,
|
||||
AutoUpdateStatus::Updated {
|
||||
version: VersionCheckType::Semantic(SemanticVersion::new(0, 100, 1))
|
||||
}
|
||||
);
|
||||
let will_restart = cx.expect_restart();
|
||||
cx.update(|cx| cx.restart());
|
||||
let path = will_restart.await.unwrap().unwrap();
|
||||
assert_eq!(path, tmp_dir.path().join("zed"));
|
||||
assert_eq!(std::fs::read_to_string(path).unwrap(), "<fake-zed-update>");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_stable_does_not_update_when_fetched_version_is_not_higher() {
|
||||
let release_channel = ReleaseChannel::Stable;
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::{
|
||||
cell::LazyCell,
|
||||
path::Path,
|
||||
sync::LazyLock,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
|
||||
@@ -13,8 +13,8 @@ use windows::Win32::{
|
||||
use crate::windows_impl::WM_JOB_UPDATED;
|
||||
|
||||
pub(crate) struct Job {
|
||||
pub apply: Box<dyn Fn(&Path) -> Result<()>>,
|
||||
pub rollback: Box<dyn Fn(&Path) -> Result<()>>,
|
||||
pub apply: Box<dyn Fn(&Path) -> Result<()> + Send + Sync>,
|
||||
pub rollback: Box<dyn Fn(&Path) -> Result<()> + Send + Sync>,
|
||||
}
|
||||
|
||||
impl Job {
|
||||
@@ -154,10 +154,8 @@ impl Job {
|
||||
}
|
||||
}
|
||||
|
||||
// app is single threaded
|
||||
#[cfg(not(test))]
|
||||
#[allow(clippy::declare_interior_mutable_const)]
|
||||
pub(crate) const JOBS: LazyCell<[Job; 22]> = LazyCell::new(|| {
|
||||
pub(crate) static JOBS: LazyLock<[Job; 22]> = LazyLock::new(|| {
|
||||
fn p(value: &str) -> &Path {
|
||||
Path::new(value)
|
||||
}
|
||||
@@ -206,10 +204,8 @@ pub(crate) const JOBS: LazyCell<[Job; 22]> = LazyCell::new(|| {
|
||||
]
|
||||
});
|
||||
|
||||
// app is single threaded
|
||||
#[cfg(test)]
|
||||
#[allow(clippy::declare_interior_mutable_const)]
|
||||
pub(crate) const JOBS: LazyCell<[Job; 9]> = LazyCell::new(|| {
|
||||
pub(crate) static JOBS: LazyLock<[Job; 9]> = LazyLock::new(|| {
|
||||
fn p(value: &str) -> &Path {
|
||||
Path::new(value)
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
pub mod participant;
|
||||
pub mod room;
|
||||
|
||||
use crate::call_settings::CallSettings;
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use audio::Audio;
|
||||
use client::{ChannelId, Client, TypedEnvelope, User, UserStore, ZED_ALWAYS_ACTIVE, proto};
|
||||
@@ -14,7 +13,6 @@ use gpui::{
|
||||
use postage::watch;
|
||||
use project::Project;
|
||||
use room::Event;
|
||||
use settings::Settings;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub use livekit_client::{RemoteVideoTrack, RemoteVideoTrackView, RemoteVideoTrackViewEvent};
|
||||
@@ -26,8 +24,6 @@ struct GlobalActiveCall(Entity<ActiveCall>);
|
||||
impl Global for GlobalActiveCall {}
|
||||
|
||||
pub fn init(client: Arc<Client>, user_store: Entity<UserStore>, cx: &mut App) {
|
||||
CallSettings::register(cx);
|
||||
|
||||
let active_call = cx.new(|cx| ActiveCall::new(client, user_store, cx));
|
||||
cx.set_global(GlobalActiveCall(active_call));
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use settings::Settings;
|
||||
use settings::{RegisterSetting, Settings};
|
||||
|
||||
#[derive(Debug)]
|
||||
#[derive(Debug, RegisterSetting)]
|
||||
pub struct CallSettings {
|
||||
pub mute_on_join: bool,
|
||||
pub share_on_join: bool,
|
||||
|
||||
@@ -237,7 +237,6 @@ fn init_test(cx: &mut App) -> Entity<ChannelStore> {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
client::init_settings(cx);
|
||||
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let http = FakeHttpClient::with_404_response();
|
||||
|
||||
@@ -30,7 +30,7 @@ use rand::prelude::*;
|
||||
use release_channel::{AppVersion, ReleaseChannel};
|
||||
use rpc::proto::{AnyTypedEnvelope, EnvelopedMessage, PeerId, RequestMessage};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsContent};
|
||||
use settings::{RegisterSetting, Settings, SettingsContent};
|
||||
use std::{
|
||||
any::TypeId,
|
||||
convert::TryFrom,
|
||||
@@ -95,7 +95,7 @@ actions!(
|
||||
]
|
||||
);
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[derive(Deserialize, RegisterSetting)]
|
||||
pub struct ClientSettings {
|
||||
pub server_url: String,
|
||||
}
|
||||
@@ -113,7 +113,7 @@ impl Settings for ClientSettings {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Default)]
|
||||
#[derive(Deserialize, Default, RegisterSetting)]
|
||||
pub struct ProxySettings {
|
||||
pub proxy: Option<String>,
|
||||
}
|
||||
@@ -140,12 +140,6 @@ impl Settings for ProxySettings {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init_settings(cx: &mut App) {
|
||||
TelemetrySettings::register(cx);
|
||||
ClientSettings::register(cx);
|
||||
ProxySettings::register(cx);
|
||||
}
|
||||
|
||||
pub fn init(client: &Arc<Client>, cx: &mut App) {
|
||||
let client = Arc::downgrade(client);
|
||||
cx.on_action({
|
||||
@@ -508,7 +502,7 @@ impl<T: 'static> Drop for PendingEntitySubscription<T> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Deserialize, Debug)]
|
||||
#[derive(Copy, Clone, Deserialize, Debug, RegisterSetting)]
|
||||
pub struct TelemetrySettings {
|
||||
pub diagnostics: bool,
|
||||
pub metrics: bool,
|
||||
@@ -1493,7 +1487,7 @@ impl Client {
|
||||
|
||||
let url = self
|
||||
.http
|
||||
.build_zed_cloud_url("/internal/users/impersonate", &[])?;
|
||||
.build_zed_cloud_url("/internal/users/impersonate")?;
|
||||
let request = Request::post(url.as_str())
|
||||
.header("Content-Type", "application/json")
|
||||
.header("Authorization", format!("Bearer {api_token}"))
|
||||
@@ -2177,7 +2171,6 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
init_settings(cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -179,8 +179,6 @@ impl Telemetry {
|
||||
let release_channel =
|
||||
ReleaseChannel::try_global(cx).map(|release_channel| release_channel.display_name());
|
||||
|
||||
TelemetrySettings::register(cx);
|
||||
|
||||
let state = Arc::new(Mutex::new(TelemetryState {
|
||||
settings: *TelemetrySettings::get_global(cx),
|
||||
architecture: env::consts::ARCH,
|
||||
@@ -437,7 +435,7 @@ impl Telemetry {
|
||||
Some(project_types)
|
||||
}
|
||||
|
||||
fn report_event(self: &Arc<Self>, event: Event) {
|
||||
fn report_event(self: &Arc<Self>, mut event: Event) {
|
||||
let mut state = self.state.lock();
|
||||
// RUST_LOG=telemetry=trace to debug telemetry events
|
||||
log::trace!(target: "telemetry", "{:?}", event);
|
||||
@@ -446,6 +444,12 @@ impl Telemetry {
|
||||
return;
|
||||
}
|
||||
|
||||
match &mut event {
|
||||
Event::Flexible(event) => event
|
||||
.event_properties
|
||||
.insert("event_source".into(), "zed".into()),
|
||||
};
|
||||
|
||||
if state.flush_events_task.is_none() {
|
||||
let this = self.clone();
|
||||
state.flush_events_task = Some(self.executor.spawn(async move {
|
||||
|
||||
@@ -260,7 +260,7 @@ impl fmt::Debug for Lamport {
|
||||
impl fmt::Debug for Global {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(f, "Global {{")?;
|
||||
for timestamp in self.iter() {
|
||||
for timestamp in self.iter().filter(|t| t.value > 0) {
|
||||
if timestamp.replica_id.0 > 0 {
|
||||
write!(f, ", ")?;
|
||||
}
|
||||
|
||||
@@ -62,7 +62,7 @@ impl CloudApiClient {
|
||||
let request = self.build_request(
|
||||
Request::builder().method(Method::GET).uri(
|
||||
self.http_client
|
||||
.build_zed_cloud_url("/client/users/me", &[])?
|
||||
.build_zed_cloud_url("/client/users/me")?
|
||||
.as_ref(),
|
||||
),
|
||||
AsyncBody::default(),
|
||||
@@ -89,7 +89,7 @@ impl CloudApiClient {
|
||||
pub fn connect(&self, cx: &App) -> Result<Task<Result<Connection>>> {
|
||||
let mut connect_url = self
|
||||
.http_client
|
||||
.build_zed_cloud_url("/client/users/connect", &[])?;
|
||||
.build_zed_cloud_url("/client/users/connect")?;
|
||||
connect_url
|
||||
.set_scheme(match connect_url.scheme() {
|
||||
"https" => "wss",
|
||||
@@ -123,7 +123,7 @@ impl CloudApiClient {
|
||||
.method(Method::POST)
|
||||
.uri(
|
||||
self.http_client
|
||||
.build_zed_cloud_url("/client/llm_tokens", &[])?
|
||||
.build_zed_cloud_url("/client/llm_tokens")?
|
||||
.as_ref(),
|
||||
)
|
||||
.when_some(system_id, |builder, system_id| {
|
||||
@@ -154,7 +154,7 @@ impl CloudApiClient {
|
||||
let request = build_request(
|
||||
Request::builder().method(Method::GET).uri(
|
||||
self.http_client
|
||||
.build_zed_cloud_url("/client/users/me", &[])?
|
||||
.build_zed_cloud_url("/client/users/me")?
|
||||
.as_ref(),
|
||||
),
|
||||
AsyncBody::default(),
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
pub mod predict_edits_v3;
|
||||
pub mod udiff;
|
||||
|
||||
use std::str::FromStr;
|
||||
use std::sync::Arc;
|
||||
@@ -184,13 +183,13 @@ pub struct PredictEditsGitInfo {
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PredictEditsResponse {
|
||||
pub request_id: Uuid,
|
||||
pub request_id: String,
|
||||
pub output_excerpt: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct AcceptEditPredictionBody {
|
||||
pub request_id: Uuid,
|
||||
pub request_id: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use chrono::Duration;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
fmt::Display,
|
||||
fmt::{Display, Write as _},
|
||||
ops::{Add, Range, Sub},
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
@@ -11,7 +11,14 @@ use uuid::Uuid;
|
||||
|
||||
use crate::PredictEditsGitInfo;
|
||||
|
||||
// TODO: snippet ordering within file / relative to excerpt
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PlanContextRetrievalRequest {
|
||||
pub excerpt: String,
|
||||
pub excerpt_path: Arc<Path>,
|
||||
pub excerpt_line_range: Range<Line>,
|
||||
pub cursor_file_max_row: Line,
|
||||
pub events: Vec<Event>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PredictEditsRequest {
|
||||
@@ -66,6 +73,7 @@ pub enum PromptFormat {
|
||||
MarkedExcerpt,
|
||||
LabeledSections,
|
||||
NumLinesUniDiff,
|
||||
OldTextNewText,
|
||||
/// Prompt format intended for use via zeta_cli
|
||||
OnlySnippets,
|
||||
}
|
||||
@@ -93,6 +101,7 @@ impl std::fmt::Display for PromptFormat {
|
||||
PromptFormat::LabeledSections => write!(f, "Labeled Sections"),
|
||||
PromptFormat::OnlySnippets => write!(f, "Only Snippets"),
|
||||
PromptFormat::NumLinesUniDiff => write!(f, "Numbered Lines / Unified Diff"),
|
||||
PromptFormat::OldTextNewText => write!(f, "Old Text / New Text"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -125,15 +134,15 @@ impl Display for Event {
|
||||
write!(
|
||||
f,
|
||||
"// User accepted prediction:\n--- a/{}\n+++ b/{}\n{diff}",
|
||||
old_path.display(),
|
||||
new_path.display()
|
||||
DiffPathFmt(old_path),
|
||||
DiffPathFmt(new_path)
|
||||
)
|
||||
} else {
|
||||
write!(
|
||||
f,
|
||||
"--- a/{}\n+++ b/{}\n{diff}",
|
||||
old_path.display(),
|
||||
new_path.display()
|
||||
DiffPathFmt(old_path),
|
||||
DiffPathFmt(new_path)
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -141,6 +150,24 @@ impl Display for Event {
|
||||
}
|
||||
}
|
||||
|
||||
/// always format the Path as a unix path with `/` as the path sep in Diffs
|
||||
pub struct DiffPathFmt<'a>(pub &'a Path);
|
||||
|
||||
impl<'a> std::fmt::Display for DiffPathFmt<'a> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let mut is_first = true;
|
||||
for component in self.0.components() {
|
||||
if !is_first {
|
||||
f.write_char('/')?;
|
||||
} else {
|
||||
is_first = false;
|
||||
}
|
||||
write!(f, "{}", component.as_os_str().display())?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Signature {
|
||||
pub text: String,
|
||||
|
||||
@@ -1,294 +0,0 @@
|
||||
use std::{borrow::Cow, fmt::Display};
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub enum DiffLine<'a> {
|
||||
OldPath { path: Cow<'a, str> },
|
||||
NewPath { path: Cow<'a, str> },
|
||||
HunkHeader(Option<HunkLocation>),
|
||||
Context(&'a str),
|
||||
Deletion(&'a str),
|
||||
Addition(&'a str),
|
||||
Garbage(&'a str),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
pub struct HunkLocation {
|
||||
start_line_old: u32,
|
||||
count_old: u32,
|
||||
start_line_new: u32,
|
||||
count_new: u32,
|
||||
}
|
||||
|
||||
impl<'a> DiffLine<'a> {
|
||||
pub fn parse(line: &'a str) -> Self {
|
||||
Self::try_parse(line).unwrap_or(Self::Garbage(line))
|
||||
}
|
||||
|
||||
fn try_parse(line: &'a str) -> Option<Self> {
|
||||
if let Some(header) = line.strip_prefix("---").and_then(eat_required_whitespace) {
|
||||
let path = parse_header_path("a/", header);
|
||||
Some(Self::OldPath { path })
|
||||
} else if let Some(header) = line.strip_prefix("+++").and_then(eat_required_whitespace) {
|
||||
Some(Self::NewPath {
|
||||
path: parse_header_path("b/", header),
|
||||
})
|
||||
} else if let Some(header) = line.strip_prefix("@@").and_then(eat_required_whitespace) {
|
||||
if header.starts_with("...") {
|
||||
return Some(Self::HunkHeader(None));
|
||||
}
|
||||
|
||||
let (start_line_old, header) = header.strip_prefix('-')?.split_once(',')?;
|
||||
let mut parts = header.split_ascii_whitespace();
|
||||
let count_old = parts.next()?;
|
||||
let (start_line_new, count_new) = parts.next()?.strip_prefix('+')?.split_once(',')?;
|
||||
|
||||
Some(Self::HunkHeader(Some(HunkLocation {
|
||||
start_line_old: start_line_old.parse::<u32>().ok()?.saturating_sub(1),
|
||||
count_old: count_old.parse().ok()?,
|
||||
start_line_new: start_line_new.parse::<u32>().ok()?.saturating_sub(1),
|
||||
count_new: count_new.parse().ok()?,
|
||||
})))
|
||||
} else if let Some(deleted_header) = line.strip_prefix("-") {
|
||||
Some(Self::Deletion(deleted_header))
|
||||
} else if line.is_empty() {
|
||||
Some(Self::Context(""))
|
||||
} else if let Some(context) = line.strip_prefix(" ") {
|
||||
Some(Self::Context(context))
|
||||
} else {
|
||||
Some(Self::Addition(line.strip_prefix("+")?))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Display for DiffLine<'a> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
DiffLine::OldPath { path } => write!(f, "--- {path}"),
|
||||
DiffLine::NewPath { path } => write!(f, "+++ {path}"),
|
||||
DiffLine::HunkHeader(Some(hunk_location)) => {
|
||||
write!(
|
||||
f,
|
||||
"@@ -{},{} +{},{} @@",
|
||||
hunk_location.start_line_old + 1,
|
||||
hunk_location.count_old,
|
||||
hunk_location.start_line_new + 1,
|
||||
hunk_location.count_new
|
||||
)
|
||||
}
|
||||
DiffLine::HunkHeader(None) => write!(f, "@@ ... @@"),
|
||||
DiffLine::Context(content) => write!(f, " {content}"),
|
||||
DiffLine::Deletion(content) => write!(f, "-{content}"),
|
||||
DiffLine::Addition(content) => write!(f, "+{content}"),
|
||||
DiffLine::Garbage(line) => write!(f, "{line}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_header_path<'a>(strip_prefix: &'static str, header: &'a str) -> Cow<'a, str> {
|
||||
if !header.contains(['"', '\\']) {
|
||||
let path = header.split_ascii_whitespace().next().unwrap_or(header);
|
||||
return Cow::Borrowed(path.strip_prefix(strip_prefix).unwrap_or(path));
|
||||
}
|
||||
|
||||
let mut path = String::with_capacity(header.len());
|
||||
let mut in_quote = false;
|
||||
let mut chars = header.chars().peekable();
|
||||
let mut strip_prefix = Some(strip_prefix);
|
||||
|
||||
while let Some(char) = chars.next() {
|
||||
if char == '"' {
|
||||
in_quote = !in_quote;
|
||||
} else if char == '\\' {
|
||||
let Some(&next_char) = chars.peek() else {
|
||||
break;
|
||||
};
|
||||
chars.next();
|
||||
path.push(next_char);
|
||||
} else if char.is_ascii_whitespace() && !in_quote {
|
||||
break;
|
||||
} else {
|
||||
path.push(char);
|
||||
}
|
||||
|
||||
if let Some(prefix) = strip_prefix
|
||||
&& path == prefix
|
||||
{
|
||||
strip_prefix.take();
|
||||
path.clear();
|
||||
}
|
||||
}
|
||||
|
||||
Cow::Owned(path)
|
||||
}
|
||||
|
||||
fn eat_required_whitespace(header: &str) -> Option<&str> {
|
||||
let trimmed = header.trim_ascii_start();
|
||||
|
||||
if trimmed.len() == header.len() {
|
||||
None
|
||||
} else {
|
||||
Some(trimmed)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use indoc::indoc;
|
||||
|
||||
#[test]
|
||||
fn parse_lines_simple() {
|
||||
let input = indoc! {"
|
||||
diff --git a/text.txt b/text.txt
|
||||
index 86c770d..a1fd855 100644
|
||||
--- a/file.txt
|
||||
+++ b/file.txt
|
||||
@@ -1,2 +1,3 @@
|
||||
context
|
||||
-deleted
|
||||
+inserted
|
||||
garbage
|
||||
|
||||
--- b/file.txt
|
||||
+++ a/file.txt
|
||||
"};
|
||||
|
||||
let lines = input.lines().map(DiffLine::parse).collect::<Vec<_>>();
|
||||
|
||||
pretty_assertions::assert_eq!(
|
||||
lines,
|
||||
&[
|
||||
DiffLine::Garbage("diff --git a/text.txt b/text.txt"),
|
||||
DiffLine::Garbage("index 86c770d..a1fd855 100644"),
|
||||
DiffLine::OldPath {
|
||||
path: "file.txt".into()
|
||||
},
|
||||
DiffLine::NewPath {
|
||||
path: "file.txt".into()
|
||||
},
|
||||
DiffLine::HunkHeader(Some(HunkLocation {
|
||||
start_line_old: 0,
|
||||
count_old: 2,
|
||||
start_line_new: 0,
|
||||
count_new: 3
|
||||
})),
|
||||
DiffLine::Context("context"),
|
||||
DiffLine::Deletion("deleted"),
|
||||
DiffLine::Addition("inserted"),
|
||||
DiffLine::Garbage("garbage"),
|
||||
DiffLine::Context(""),
|
||||
DiffLine::OldPath {
|
||||
path: "b/file.txt".into()
|
||||
},
|
||||
DiffLine::NewPath {
|
||||
path: "a/file.txt".into()
|
||||
},
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn file_header_extra_space() {
|
||||
let options = ["--- file", "--- file", "---\tfile"];
|
||||
|
||||
for option in options {
|
||||
pretty_assertions::assert_eq!(
|
||||
DiffLine::parse(option),
|
||||
DiffLine::OldPath {
|
||||
path: "file".into()
|
||||
},
|
||||
"{option}",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hunk_header_extra_space() {
|
||||
let options = [
|
||||
"@@ -1,2 +1,3 @@",
|
||||
"@@ -1,2 +1,3 @@",
|
||||
"@@\t-1,2\t+1,3\t@@",
|
||||
"@@ -1,2 +1,3 @@",
|
||||
"@@ -1,2 +1,3 @@",
|
||||
"@@ -1,2 +1,3 @@",
|
||||
"@@ -1,2 +1,3 @@ garbage",
|
||||
];
|
||||
|
||||
for option in options {
|
||||
pretty_assertions::assert_eq!(
|
||||
DiffLine::parse(option),
|
||||
DiffLine::HunkHeader(Some(HunkLocation {
|
||||
start_line_old: 0,
|
||||
count_old: 2,
|
||||
start_line_new: 0,
|
||||
count_new: 3
|
||||
})),
|
||||
"{option}",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn hunk_header_without_location() {
|
||||
pretty_assertions::assert_eq!(DiffLine::parse("@@ ... @@"), DiffLine::HunkHeader(None));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_path() {
|
||||
assert_eq!(parse_header_path("a/", "foo.txt"), "foo.txt");
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "foo/bar/baz.txt"),
|
||||
"foo/bar/baz.txt"
|
||||
);
|
||||
assert_eq!(parse_header_path("a/", "a/foo.txt"), "foo.txt");
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "a/foo/bar/baz.txt"),
|
||||
"foo/bar/baz.txt"
|
||||
);
|
||||
|
||||
// Extra
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "a/foo/bar/baz.txt 2025"),
|
||||
"foo/bar/baz.txt"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "a/foo/bar/baz.txt\t2025"),
|
||||
"foo/bar/baz.txt"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "a/foo/bar/baz.txt \""),
|
||||
"foo/bar/baz.txt"
|
||||
);
|
||||
|
||||
// Quoted
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "a/foo/bar/\"baz quox.txt\""),
|
||||
"foo/bar/baz quox.txt"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "\"a/foo/bar/baz quox.txt\""),
|
||||
"foo/bar/baz quox.txt"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "\"foo/bar/baz quox.txt\""),
|
||||
"foo/bar/baz quox.txt"
|
||||
);
|
||||
assert_eq!(parse_header_path("a/", "\"whatever 🤷\""), "whatever 🤷");
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "\"foo/bar/baz quox.txt\" 2025"),
|
||||
"foo/bar/baz quox.txt"
|
||||
);
|
||||
// unescaped quotes are dropped
|
||||
assert_eq!(parse_header_path("a/", "foo/\"bar\""), "foo/bar");
|
||||
|
||||
// Escaped
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "\"foo/\\\"bar\\\"/baz.txt\""),
|
||||
"foo/\"bar\"/baz.txt"
|
||||
);
|
||||
assert_eq!(
|
||||
parse_header_path("a/", "\"C:\\\\Projects\\\\My App\\\\old file.txt\""),
|
||||
"C:\\Projects\\My App\\old file.txt"
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -17,5 +17,6 @@ cloud_llm_client.workspace = true
|
||||
indoc.workspace = true
|
||||
ordered-float.workspace = true
|
||||
rustc-hash.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
strum.workspace = true
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
//! Zeta2 prompt planning and generation code shared with cloud.
|
||||
pub mod retrieval_prompt;
|
||||
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use cloud_llm_client::predict_edits_v3::{
|
||||
self, Excerpt, Line, Point, PromptFormat, ReferencedDeclaration,
|
||||
self, DiffPathFmt, Excerpt, Line, Point, PromptFormat, ReferencedDeclaration,
|
||||
};
|
||||
use indoc::indoc;
|
||||
use ordered_float::OrderedFloat;
|
||||
@@ -55,50 +56,98 @@ const LABELED_SECTIONS_INSTRUCTIONS: &str = indoc! {r#"
|
||||
const NUMBERED_LINES_INSTRUCTIONS: &str = indoc! {r#"
|
||||
# Instructions
|
||||
|
||||
You are a code completion assistant helping a programmer finish their work. Your task is to:
|
||||
You are an edit prediction agent in a code editor.
|
||||
Your job is to predict the next edit that the user will make,
|
||||
based on their last few edits and their current cursor location.
|
||||
|
||||
1. Analyze the edit history to understand what the programmer is trying to achieve
|
||||
2. Identify any incomplete refactoring or changes that need to be finished
|
||||
3. Make the remaining edits that a human programmer would logically make next
|
||||
4. Apply systematic changes consistently across the entire codebase - if you see a pattern starting, complete it everywhere.
|
||||
## Output Format
|
||||
|
||||
Focus on:
|
||||
- Understanding the intent behind the changes (e.g., improving error handling, refactoring APIs, fixing bugs)
|
||||
- Completing any partially-applied changes across the codebase
|
||||
- Ensuring consistency with the programming style and patterns already established
|
||||
- Making edits that maintain or improve code quality
|
||||
- If the programmer started refactoring one instance of a pattern, find and update ALL similar instances
|
||||
- Don't write a lot of code if you're not sure what to do
|
||||
|
||||
Rules:
|
||||
- Do not just mechanically apply patterns - reason about what changes make sense given the context and the programmer's apparent goals.
|
||||
- Do not just fix syntax errors - look for the broader refactoring pattern and apply it systematically throughout the code.
|
||||
- Write the edits in the unified diff format as shown in the example.
|
||||
|
||||
# Example output:
|
||||
You must briefly explain your understanding of the user's goal, in one
|
||||
or two sentences, and then specify their next edit in the form of a
|
||||
unified diff, like this:
|
||||
|
||||
```
|
||||
--- a/src/myapp/cli.py
|
||||
+++ b/src/myapp/cli.py
|
||||
@@ -1,3 +1,3 @@
|
||||
-
|
||||
-
|
||||
-import sys
|
||||
+import json
|
||||
@@ ... @@
|
||||
import os
|
||||
import time
|
||||
import sys
|
||||
+from constants import LOG_LEVEL_WARNING
|
||||
@@ ... @@
|
||||
config.headless()
|
||||
config.set_interactive(false)
|
||||
-config.set_log_level(LOG_L)
|
||||
+config.set_log_level(LOG_LEVEL_WARNING)
|
||||
config.set_use_color(True)
|
||||
```
|
||||
|
||||
# Edit History:
|
||||
## Edit History
|
||||
|
||||
"#};
|
||||
|
||||
const UNIFIED_DIFF_REMINDER: &str = indoc! {"
|
||||
---
|
||||
|
||||
Please analyze the edit history and the files, then provide the unified diff for your predicted edits.
|
||||
Analyze the edit history and the files, then provide the unified diff for your predicted edits.
|
||||
Do not include the cursor marker in your output.
|
||||
If you're editing multiple files, be sure to reflect filename in the hunk's header.
|
||||
Your diff should include edited file paths in its file headers (lines beginning with `---` and `+++`).
|
||||
Do not include line numbers in the hunk headers, use `@@ ... @@`.
|
||||
Removed lines begin with `-`.
|
||||
Added lines begin with `+`.
|
||||
Context lines begin with an extra space.
|
||||
Context and removed lines are used to match the target edit location, so make sure to include enough of them
|
||||
to uniquely identify it amongst all excerpts of code provided.
|
||||
"};
|
||||
|
||||
const XML_TAGS_INSTRUCTIONS: &str = indoc! {r#"
|
||||
# Instructions
|
||||
|
||||
You are an edit prediction agent in a code editor.
|
||||
Your job is to predict the next edit that the user will make,
|
||||
based on their last few edits and their current cursor location.
|
||||
|
||||
# Output Format
|
||||
|
||||
You must briefly explain your understanding of the user's goal, in one
|
||||
or two sentences, and then specify their next edit, using the following
|
||||
XML format:
|
||||
|
||||
<edits path="my-project/src/myapp/cli.py">
|
||||
<old_text>
|
||||
OLD TEXT 1 HERE
|
||||
</old_text>
|
||||
<new_text>
|
||||
NEW TEXT 1 HERE
|
||||
</new_text>
|
||||
|
||||
<old_text>
|
||||
OLD TEXT 1 HERE
|
||||
</old_text>
|
||||
<new_text>
|
||||
NEW TEXT 1 HERE
|
||||
</new_text>
|
||||
</edits>
|
||||
|
||||
- Specify the file to edit using the `path` attribute.
|
||||
- Use `<old_text>` and `<new_text>` tags to replace content
|
||||
- `<old_text>` must exactly match existing file content, including indentation
|
||||
- `<old_text>` cannot be empty
|
||||
- Do not escape quotes, newlines, or other characters within tags
|
||||
- Always close all tags properly
|
||||
- Don't include the <|user_cursor|> marker in your output.
|
||||
|
||||
# Edit History:
|
||||
|
||||
"#};
|
||||
|
||||
const OLD_TEXT_NEW_TEXT_REMINDER: &str = indoc! {r#"
|
||||
---
|
||||
|
||||
Remember that the edits in the edit history have already been deployed.
|
||||
The files are currently as shown in the Code Excerpts section.
|
||||
"#};
|
||||
|
||||
pub fn build_prompt(
|
||||
request: &predict_edits_v3::PredictEditsRequest,
|
||||
) -> Result<(String, SectionLabels)> {
|
||||
@@ -120,8 +169,9 @@ pub fn build_prompt(
|
||||
EDITABLE_REGION_END_MARKER_WITH_NEWLINE,
|
||||
),
|
||||
],
|
||||
PromptFormat::LabeledSections => vec![(request.cursor_point, CURSOR_MARKER)],
|
||||
PromptFormat::NumLinesUniDiff => {
|
||||
PromptFormat::LabeledSections
|
||||
| PromptFormat::NumLinesUniDiff
|
||||
| PromptFormat::OldTextNewText => {
|
||||
vec![(request.cursor_point, CURSOR_MARKER)]
|
||||
}
|
||||
PromptFormat::OnlySnippets => vec![],
|
||||
@@ -131,46 +181,32 @@ pub fn build_prompt(
|
||||
PromptFormat::MarkedExcerpt => MARKED_EXCERPT_INSTRUCTIONS.to_string(),
|
||||
PromptFormat::LabeledSections => LABELED_SECTIONS_INSTRUCTIONS.to_string(),
|
||||
PromptFormat::NumLinesUniDiff => NUMBERED_LINES_INSTRUCTIONS.to_string(),
|
||||
// only intended for use via zeta_cli
|
||||
PromptFormat::OldTextNewText => XML_TAGS_INSTRUCTIONS.to_string(),
|
||||
PromptFormat::OnlySnippets => String::new(),
|
||||
};
|
||||
|
||||
if request.events.is_empty() {
|
||||
prompt.push_str("(No edit history)\n\n");
|
||||
} else {
|
||||
prompt.push_str(
|
||||
"The following are the latest edits made by the user, from earlier to later.\n\n",
|
||||
);
|
||||
prompt.push_str("Here are the latest edits made by the user, from earlier to later.\n\n");
|
||||
push_events(&mut prompt, &request.events);
|
||||
}
|
||||
|
||||
prompt.push_str(indoc! {"
|
||||
# Code Excerpts
|
||||
|
||||
The cursor marker <|user_cursor|> indicates the current user cursor position.
|
||||
The file is in current state, edits from edit history have been applied.
|
||||
"});
|
||||
|
||||
if request.prompt_format == PromptFormat::NumLinesUniDiff {
|
||||
if request.referenced_declarations.is_empty() {
|
||||
prompt.push_str(indoc! {"
|
||||
# File under the cursor:
|
||||
|
||||
The cursor marker <|user_cursor|> indicates the current user cursor position.
|
||||
The file is in current state, edits from edit history have been applied.
|
||||
We prepend line numbers (e.g., `123|<actual line>`); they are not part of the file.
|
||||
|
||||
"});
|
||||
} else {
|
||||
// Note: This hasn't been trained on yet
|
||||
prompt.push_str(indoc! {"
|
||||
# Code Excerpts:
|
||||
|
||||
The cursor marker <|user_cursor|> indicates the current user cursor position.
|
||||
Other excerpts of code from the project have been included as context based on their similarity to the code under the cursor.
|
||||
Context excerpts are not guaranteed to be relevant, so use your own judgement.
|
||||
Files are in their current state, edits from edit history have been applied.
|
||||
We prepend line numbers (e.g., `123|<actual line>`); they are not part of the file.
|
||||
|
||||
"});
|
||||
}
|
||||
} else {
|
||||
prompt.push_str("\n## Code\n\n");
|
||||
prompt.push_str(indoc! {"
|
||||
We prepend line numbers (e.g., `123|<actual line>`); they are not part of the file.
|
||||
"});
|
||||
}
|
||||
|
||||
prompt.push('\n');
|
||||
|
||||
let mut section_labels = Default::default();
|
||||
|
||||
if !request.referenced_declarations.is_empty() || !request.signatures.is_empty() {
|
||||
@@ -197,8 +233,14 @@ pub fn build_prompt(
|
||||
}
|
||||
}
|
||||
|
||||
if request.prompt_format == PromptFormat::NumLinesUniDiff {
|
||||
prompt.push_str(UNIFIED_DIFF_REMINDER);
|
||||
match request.prompt_format {
|
||||
PromptFormat::NumLinesUniDiff => {
|
||||
prompt.push_str(UNIFIED_DIFF_REMINDER);
|
||||
}
|
||||
PromptFormat::OldTextNewText => {
|
||||
prompt.push_str(OLD_TEXT_NEW_TEXT_REMINDER);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
Ok((prompt, section_labels))
|
||||
@@ -212,7 +254,7 @@ pub fn write_codeblock<'a>(
|
||||
include_line_numbers: bool,
|
||||
output: &'a mut String,
|
||||
) {
|
||||
writeln!(output, "`````{}", path.display()).unwrap();
|
||||
writeln!(output, "`````{}", DiffPathFmt(path)).unwrap();
|
||||
write_excerpts(
|
||||
excerpts,
|
||||
sorted_insertions,
|
||||
@@ -275,7 +317,7 @@ pub fn write_excerpts<'a>(
|
||||
}
|
||||
}
|
||||
|
||||
fn push_events(output: &mut String, events: &[predict_edits_v3::Event]) {
|
||||
pub fn push_events(output: &mut String, events: &[predict_edits_v3::Event]) {
|
||||
if events.is_empty() {
|
||||
return;
|
||||
};
|
||||
@@ -623,6 +665,7 @@ impl<'a> SyntaxBasedPrompt<'a> {
|
||||
match self.request.prompt_format {
|
||||
PromptFormat::MarkedExcerpt
|
||||
| PromptFormat::OnlySnippets
|
||||
| PromptFormat::OldTextNewText
|
||||
| PromptFormat::NumLinesUniDiff => {
|
||||
if range.start.0 > 0 && !skipped_last_snippet {
|
||||
output.push_str("…\n");
|
||||
|
||||
94
crates/cloud_zeta2_prompt/src/retrieval_prompt.rs
Normal file
94
crates/cloud_zeta2_prompt/src/retrieval_prompt.rs
Normal file
@@ -0,0 +1,94 @@
|
||||
use anyhow::Result;
|
||||
use cloud_llm_client::predict_edits_v3::{self, Excerpt};
|
||||
use indoc::indoc;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::Write;
|
||||
|
||||
use crate::{push_events, write_codeblock};
|
||||
|
||||
pub fn build_prompt(request: predict_edits_v3::PlanContextRetrievalRequest) -> Result<String> {
|
||||
let mut prompt = SEARCH_INSTRUCTIONS.to_string();
|
||||
|
||||
if !request.events.is_empty() {
|
||||
writeln!(&mut prompt, "## User Edits\n")?;
|
||||
push_events(&mut prompt, &request.events);
|
||||
}
|
||||
|
||||
writeln!(&mut prompt, "## Cursor context")?;
|
||||
write_codeblock(
|
||||
&request.excerpt_path,
|
||||
&[Excerpt {
|
||||
start_line: request.excerpt_line_range.start,
|
||||
text: request.excerpt.into(),
|
||||
}],
|
||||
&[],
|
||||
request.cursor_file_max_row,
|
||||
true,
|
||||
&mut prompt,
|
||||
);
|
||||
|
||||
writeln!(&mut prompt, "{TOOL_USE_REMINDER}")?;
|
||||
|
||||
Ok(prompt)
|
||||
}
|
||||
|
||||
/// Search for relevant code
|
||||
///
|
||||
/// For the best results, run multiple queries at once with a single invocation of this tool.
|
||||
#[derive(Clone, Deserialize, Serialize, JsonSchema)]
|
||||
pub struct SearchToolInput {
|
||||
/// An array of queries to run for gathering context relevant to the next prediction
|
||||
#[schemars(length(max = 3))]
|
||||
pub queries: Box<[SearchToolQuery]>,
|
||||
}
|
||||
|
||||
/// Search for relevant code by path, syntax hierarchy, and content.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct SearchToolQuery {
|
||||
/// 1. A glob pattern to match file paths in the codebase to search in.
|
||||
pub glob: String,
|
||||
/// 2. Regular expressions to match syntax nodes **by their first line** and hierarchy.
|
||||
///
|
||||
/// Subsequent regexes match nodes within the full content of the nodes matched by the previous regexes.
|
||||
///
|
||||
/// Example: Searching for a `User` class
|
||||
/// ["class\s+User"]
|
||||
///
|
||||
/// Example: Searching for a `get_full_name` method under a `User` class
|
||||
/// ["class\s+User", "def\sget_full_name"]
|
||||
///
|
||||
/// Skip this field to match on content alone.
|
||||
#[schemars(length(max = 3))]
|
||||
#[serde(default)]
|
||||
pub syntax_node: Vec<String>,
|
||||
/// 3. An optional regular expression to match the final content that should appear in the results.
|
||||
///
|
||||
/// - Content will be matched within all lines of the matched syntax nodes.
|
||||
/// - If syntax node regexes are provided, this field can be skipped to include as much of the node itself as possible.
|
||||
/// - If no syntax node regexes are provided, the content will be matched within the entire file.
|
||||
pub content: Option<String>,
|
||||
}
|
||||
|
||||
pub const TOOL_NAME: &str = "search";
|
||||
|
||||
const SEARCH_INSTRUCTIONS: &str = indoc! {r#"
|
||||
You are part of an edit prediction system in a code editor.
|
||||
Your role is to search for code that will serve as context for predicting the next edit.
|
||||
|
||||
- Analyze the user's recent edits and current cursor context
|
||||
- Use the `search` tool to find code that is relevant for predicting the next edit
|
||||
- Focus on finding:
|
||||
- Code patterns that might need similar changes based on the recent edits
|
||||
- Functions, variables, types, and constants referenced in the current cursor context
|
||||
- Related implementations, usages, or dependencies that may require consistent updates
|
||||
- How items defined in the cursor excerpt are used or altered
|
||||
- You will not be able to filter results or perform subsequent queries, so keep searches as targeted as possible
|
||||
- Use `syntax_node` parameter whenever you're looking for a particular type, class, or function
|
||||
- Avoid using wildcard globs if you already know the file path of the content you're looking for
|
||||
"#};
|
||||
|
||||
const TOOL_USE_REMINDER: &str = indoc! {"
|
||||
--
|
||||
Analyze the user's intent in one to two sentences, then call the `search` tool.
|
||||
"};
|
||||
@@ -34,7 +34,7 @@ struct CurrentCompletion {
|
||||
snapshot: BufferSnapshot,
|
||||
/// The edits that should be applied to transform the original text into the predicted text.
|
||||
/// Each edit is a range in the buffer and the text to replace it with.
|
||||
edits: Arc<[(Range<Anchor>, String)]>,
|
||||
edits: Arc<[(Range<Anchor>, Arc<str>)]>,
|
||||
/// Preview of how the buffer will look after applying the edits.
|
||||
edit_preview: EditPreview,
|
||||
}
|
||||
@@ -42,7 +42,7 @@ struct CurrentCompletion {
|
||||
impl CurrentCompletion {
|
||||
/// Attempts to adjust the edits based on changes made to the buffer since the completion was generated.
|
||||
/// Returns None if the user's edits conflict with the predicted edits.
|
||||
fn interpolate(&self, new_snapshot: &BufferSnapshot) -> Option<Vec<(Range<Anchor>, String)>> {
|
||||
fn interpolate(&self, new_snapshot: &BufferSnapshot) -> Option<Vec<(Range<Anchor>, Arc<str>)>> {
|
||||
edit_prediction::interpolate_edits(&self.snapshot, new_snapshot, &self.edits)
|
||||
}
|
||||
}
|
||||
@@ -281,8 +281,8 @@ impl EditPredictionProvider for CodestralCompletionProvider {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let edits: Arc<[(Range<Anchor>, String)]> =
|
||||
vec![(cursor_position..cursor_position, completion_text)].into();
|
||||
let edits: Arc<[(Range<Anchor>, Arc<str>)]> =
|
||||
vec![(cursor_position..cursor_position, completion_text.into())].into();
|
||||
let edit_preview = buffer
|
||||
.read_with(cx, |buffer, cx| buffer.preview_edits(edits.clone(), cx))?
|
||||
.await;
|
||||
|
||||
@@ -291,29 +291,6 @@ CREATE TABLE IF NOT EXISTS "channel_chat_participants" (
|
||||
|
||||
CREATE INDEX "index_channel_chat_participants_on_channel_id" ON "channel_chat_participants" ("channel_id");
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "channel_messages" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
|
||||
"sender_id" INTEGER NOT NULL REFERENCES users (id),
|
||||
"body" TEXT NOT NULL,
|
||||
"sent_at" TIMESTAMP,
|
||||
"edited_at" TIMESTAMP,
|
||||
"nonce" BLOB NOT NULL,
|
||||
"reply_to_message_id" INTEGER DEFAULT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX "index_channel_messages_on_channel_id" ON "channel_messages" ("channel_id");
|
||||
|
||||
CREATE UNIQUE INDEX "index_channel_messages_on_sender_id_nonce" ON "channel_messages" ("sender_id", "nonce");
|
||||
|
||||
CREATE TABLE "channel_message_mentions" (
|
||||
"message_id" INTEGER NOT NULL REFERENCES channel_messages (id) ON DELETE CASCADE,
|
||||
"start_offset" INTEGER NOT NULL,
|
||||
"end_offset" INTEGER NOT NULL,
|
||||
"user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
|
||||
PRIMARY KEY (message_id, start_offset)
|
||||
);
|
||||
|
||||
CREATE TABLE "channel_members" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
|
||||
@@ -408,15 +385,6 @@ CREATE TABLE "observed_buffer_edits" (
|
||||
|
||||
CREATE UNIQUE INDEX "index_observed_buffers_user_and_buffer_id" ON "observed_buffer_edits" ("user_id", "buffer_id");
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "observed_channel_messages" (
|
||||
"user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
|
||||
"channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
|
||||
"channel_message_id" INTEGER NOT NULL,
|
||||
PRIMARY KEY (user_id, channel_id)
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "index_observed_channel_messages_user_and_channel_id" ON "observed_channel_messages" ("user_id", "channel_id");
|
||||
|
||||
CREATE TABLE "notification_kinds" (
|
||||
"id" INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
"name" VARCHAR NOT NULL
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
drop table observed_channel_messages;
|
||||
drop table channel_message_mentions;
|
||||
drop table channel_messages;
|
||||
@@ -66,40 +66,6 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
/// Returns all users flagged as staff.
|
||||
pub async fn get_staff_users(&self) -> Result<Vec<user::Model>> {
|
||||
self.transaction(|tx| async {
|
||||
let tx = tx;
|
||||
Ok(user::Entity::find()
|
||||
.filter(user::Column::Admin.eq(true))
|
||||
.all(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Returns a user by email address. There are no access checks here, so this should only be used internally.
|
||||
pub async fn get_user_by_email(&self, email: &str) -> Result<Option<User>> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(user::Entity::find()
|
||||
.filter(user::Column::EmailAddress.eq(email))
|
||||
.one(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Returns a user by GitHub user ID. There are no access checks here, so this should only be used internally.
|
||||
pub async fn get_user_by_github_user_id(&self, github_user_id: i32) -> Result<Option<User>> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(user::Entity::find()
|
||||
.filter(user::Column::GithubUserId.eq(github_user_id))
|
||||
.one(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Returns a user by GitHub login. There are no access checks here, so this should only be used internally.
|
||||
pub async fn get_user_by_github_login(&self, github_login: &str) -> Result<Option<User>> {
|
||||
self.transaction(|tx| async move {
|
||||
@@ -270,39 +236,6 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
/// Sets "accepted_tos_at" on the user to the given timestamp.
|
||||
pub async fn set_user_accepted_tos_at(
|
||||
&self,
|
||||
id: UserId,
|
||||
accepted_tos_at: Option<DateTime>,
|
||||
) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
user::Entity::update_many()
|
||||
.filter(user::Column::Id.eq(id))
|
||||
.set(user::ActiveModel {
|
||||
accepted_tos_at: ActiveValue::set(accepted_tos_at),
|
||||
..Default::default()
|
||||
})
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// hard delete the user.
|
||||
pub async fn destroy_user(&self, id: UserId) -> Result<()> {
|
||||
self.transaction(|tx| async move {
|
||||
access_token::Entity::delete_many()
|
||||
.filter(access_token::Column::UserId.eq(id))
|
||||
.exec(&*tx)
|
||||
.await?;
|
||||
user::Entity::delete_by_id(id).exec(&*tx).await?;
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
/// Find users where github_login ILIKE name_query.
|
||||
pub async fn fuzzy_search_users(&self, name_query: &str, limit: u32) -> Result<Vec<User>> {
|
||||
self.transaction(|tx| async {
|
||||
@@ -341,14 +274,4 @@ impl Database {
|
||||
result.push('%');
|
||||
result
|
||||
}
|
||||
|
||||
pub async fn get_users_missing_github_user_created_at(&self) -> Result<Vec<user::Model>> {
|
||||
self.transaction(|tx| async move {
|
||||
Ok(user::Entity::find()
|
||||
.filter(user::Column::GithubUserCreatedAt.is_null())
|
||||
.all(&*tx)
|
||||
.await?)
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,8 +6,6 @@ pub mod channel;
|
||||
pub mod channel_buffer_collaborator;
|
||||
pub mod channel_chat_participant;
|
||||
pub mod channel_member;
|
||||
pub mod channel_message;
|
||||
pub mod channel_message_mention;
|
||||
pub mod contact;
|
||||
pub mod contributor;
|
||||
pub mod embedding;
|
||||
@@ -18,7 +16,6 @@ pub mod language_server;
|
||||
pub mod notification;
|
||||
pub mod notification_kind;
|
||||
pub mod observed_buffer_edits;
|
||||
pub mod observed_channel_messages;
|
||||
pub mod project;
|
||||
pub mod project_collaborator;
|
||||
pub mod project_repository;
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
use crate::db::{ChannelId, MessageId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
use time::PrimitiveDateTime;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "channel_messages")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub id: MessageId,
|
||||
pub channel_id: ChannelId,
|
||||
pub sender_id: UserId,
|
||||
pub body: String,
|
||||
pub sent_at: PrimitiveDateTime,
|
||||
pub edited_at: Option<PrimitiveDateTime>,
|
||||
pub nonce: Uuid,
|
||||
pub reply_to_message_id: Option<MessageId>,
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::channel::Entity",
|
||||
from = "Column::ChannelId",
|
||||
to = "super::channel::Column::Id"
|
||||
)]
|
||||
Channel,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::SenderId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
Sender,
|
||||
}
|
||||
|
||||
impl Related<super::channel::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Channel.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Sender.def()
|
||||
}
|
||||
}
|
||||
@@ -1,43 +0,0 @@
|
||||
use crate::db::{MessageId, UserId};
|
||||
use sea_orm::entity::prelude::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
|
||||
#[sea_orm(table_name = "channel_message_mentions")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key)]
|
||||
pub message_id: MessageId,
|
||||
#[sea_orm(primary_key)]
|
||||
pub start_offset: i32,
|
||||
pub end_offset: i32,
|
||||
pub user_id: UserId,
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::channel_message::Entity",
|
||||
from = "Column::MessageId",
|
||||
to = "super::channel_message::Column::Id"
|
||||
)]
|
||||
Message,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user::Entity",
|
||||
from = "Column::UserId",
|
||||
to = "super::user::Column::Id"
|
||||
)]
|
||||
MentionedUser,
|
||||
}
|
||||
|
||||
impl Related<super::channel::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Message.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::user::Entity> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::MentionedUser.def()
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user