Compare commits
231 Commits
fix-34466
...
commit-vie
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f3ffbcc5ab | ||
|
|
21f7e6a9e6 | ||
|
|
dd431631b4 | ||
|
|
511e51c80e | ||
|
|
0a816cbc87 | ||
|
|
b1333b53ad | ||
|
|
30597a0cba | ||
|
|
a8e2dc2f25 | ||
|
|
fd2094fa19 | ||
|
|
22f1655f8f | ||
|
|
7cbe25fda5 | ||
|
|
728f09f3f4 | ||
|
|
4353b8ecd5 | ||
|
|
736a712387 | ||
|
|
3180f44477 | ||
|
|
5dd8561b06 | ||
|
|
bfab0b71e0 | ||
|
|
04d920016f | ||
|
|
20fa9983ad | ||
|
|
dd57d97bb6 | ||
|
|
d5a437d22f | ||
|
|
a524071dd9 | ||
|
|
1471105643 | ||
|
|
f05ee8a24d | ||
|
|
4d0cada8f4 | ||
|
|
abf90cc274 | ||
|
|
b79d92d1c6 | ||
|
|
660234fed2 | ||
|
|
2b02b60317 | ||
|
|
9d49c1ffda | ||
|
|
6253b1d220 | ||
|
|
4e75f0f3ab | ||
|
|
0b4f72e549 | ||
|
|
dc5f54eaf9 | ||
|
|
ba807a3c46 | ||
|
|
45829b3380 | ||
|
|
631e3dd272 | ||
|
|
8d44bcd4f9 | ||
|
|
1888106664 | ||
|
|
c005adb09c | ||
|
|
6b2d1f153d | ||
|
|
22e1bcccad | ||
|
|
bb591f1e65 | ||
|
|
3d6cc3dc79 | ||
|
|
464d4f72eb | ||
|
|
f4892559f0 | ||
|
|
387059c6b2 | ||
|
|
4a382b2797 | ||
|
|
b948d8b9e7 | ||
|
|
bc17491527 | ||
|
|
f6a6630171 | ||
|
|
18421845eb | ||
|
|
21439426a0 | ||
|
|
044f7b5583 | ||
|
|
12dba5edbe | ||
|
|
216a3a60f5 | ||
|
|
66789607c9 | ||
|
|
62c312b35f | ||
|
|
066dd5c9d5 | ||
|
|
bdba6fd069 | ||
|
|
2260b87ea8 | ||
|
|
e6214b2b71 | ||
|
|
7ef45914e8 | ||
|
|
00e6cbc4fc | ||
|
|
9e0a4c2a9c | ||
|
|
a8b61c5ffa | ||
|
|
d312d59ace | ||
|
|
b4083ec47b | ||
|
|
e75a7aecd6 | ||
|
|
03cf7ddb53 | ||
|
|
9364d39487 | ||
|
|
f16913400a | ||
|
|
5bfc0baa4c | ||
|
|
d7b99a5b12 | ||
|
|
7691cf341c | ||
|
|
63cc90cd2c | ||
|
|
d1e45e27de | ||
|
|
9da0d40694 | ||
|
|
9f344f093e | ||
|
|
ef76f07b1e | ||
|
|
4577e1bf8f | ||
|
|
a574ae8779 | ||
|
|
16666f5357 | ||
|
|
b2e35b5f99 | ||
|
|
9e33243015 | ||
|
|
a0848daab4 | ||
|
|
d72746773f | ||
|
|
0565992d7a | ||
|
|
e1d8c1a6a1 | ||
|
|
f08fd732a7 | ||
|
|
51b7d06a27 | ||
|
|
66c7bdf037 | ||
|
|
363fbbf0d4 | ||
|
|
9860884217 | ||
|
|
4cef8eb47b | ||
|
|
e5f87735d3 | ||
|
|
f4b8b0f471 | ||
|
|
5cd30e5106 | ||
|
|
a350438a21 | ||
|
|
bd6ca841ad | ||
|
|
f9cea5af29 | ||
|
|
3bb6c2546a | ||
|
|
37b0cdf94b | ||
|
|
d76dd86272 | ||
|
|
b558be7ec6 | ||
|
|
07fe8e9bb1 | ||
|
|
b776178b52 | ||
|
|
1d0aef6b22 | ||
|
|
c7ef3025e4 | ||
|
|
822fc7ef16 | ||
|
|
126d708fa1 | ||
|
|
a5ab5c7d5d | ||
|
|
35da6d000a | ||
|
|
d6241b17d3 | ||
|
|
42583c1141 | ||
|
|
76167109db | ||
|
|
cd8679e81a | ||
|
|
43f977c6b9 | ||
|
|
bdb8caa42e | ||
|
|
9ae77ec3c9 | ||
|
|
d5ed9d3e3a | ||
|
|
74a1b5d14d | ||
|
|
07af011eb4 | ||
|
|
c357dc25fc | ||
|
|
93bc6616c6 | ||
|
|
a33e881906 | ||
|
|
c978db8626 | ||
|
|
2dad46c5c0 | ||
|
|
4c51fffbb5 | ||
|
|
0d80b452fb | ||
|
|
bad6bde03a | ||
|
|
4ec2d04ad9 | ||
|
|
0f0017dc8e | ||
|
|
9db0d66251 | ||
|
|
b07389d9f3 | ||
|
|
db2e26f67b | ||
|
|
391c92b07a | ||
|
|
1e4d80a21f | ||
|
|
f90d9d26a5 | ||
|
|
40a611bf34 | ||
|
|
8ad3a150c8 | ||
|
|
87976e91cf | ||
|
|
290a1550aa | ||
|
|
92dcfdef76 | ||
|
|
4ef8433396 | ||
|
|
a51e975b81 | ||
|
|
493cfadb42 | ||
|
|
0818cedded | ||
|
|
6b46a71dd0 | ||
|
|
575ea49aad | ||
|
|
85ccd7c98b | ||
|
|
b168679c18 | ||
|
|
621ac16e35 | ||
|
|
c248a956e0 | ||
|
|
7e177c496c | ||
|
|
e39dd2af67 | ||
|
|
904d90bee7 | ||
|
|
1e09cbfefa | ||
|
|
8ca2571367 | ||
|
|
95a553ea94 | ||
|
|
bf878e9a95 | ||
|
|
a688239113 | ||
|
|
4e8f6ddae9 | ||
|
|
0f67f08795 | ||
|
|
fe6fa1bbdc | ||
|
|
50d0f29624 | ||
|
|
9857fd233d | ||
|
|
ad51017f20 | ||
|
|
2bf47879de | ||
|
|
65b4e9b10a | ||
|
|
98dec9246e | ||
|
|
39536cae83 | ||
|
|
b4e1d86a16 | ||
|
|
8a12ecf849 | ||
|
|
22bf449b9e | ||
|
|
bcf9142bbc | ||
|
|
a2d57fc7b6 | ||
|
|
96a917091a | ||
|
|
a2ddb0f1cb | ||
|
|
23e5477a4c | ||
|
|
4e043cd56b | ||
|
|
d283338885 | ||
|
|
b1af02ca71 | ||
|
|
59b5de5532 | ||
|
|
efa98a12fd | ||
|
|
7bea1ba555 | ||
|
|
7c95834b7b | ||
|
|
3d58738548 | ||
|
|
2db237aa52 | ||
|
|
305e73ebbb | ||
|
|
ec6e7b84b8 | ||
|
|
4f5cc0a24b | ||
|
|
a2f69cd5bd | ||
|
|
6a097298b0 | ||
|
|
0df86e406a | ||
|
|
a74aac88c9 | ||
|
|
e5105ccdbe | ||
|
|
876b258088 | ||
|
|
19aba43f3e | ||
|
|
8d09610748 | ||
|
|
5b6663ef97 | ||
|
|
f445f22fe6 | ||
|
|
6216af9b5a | ||
|
|
464c0be2b7 | ||
|
|
2df5993eb0 | ||
|
|
04e92fb2d2 | ||
|
|
e27590432f | ||
|
|
a675eb1667 | ||
|
|
b27ad98520 | ||
|
|
9c4e16088c | ||
|
|
34a2bfd6b7 | ||
|
|
99d8d34d48 | ||
|
|
bd79edee71 | ||
|
|
0bb1c6ad3e | ||
|
|
fd146757cf | ||
|
|
6eb9f9add7 | ||
|
|
3d3d124e01 | ||
|
|
de392cda39 | ||
|
|
33513292af | ||
|
|
1535e95066 | ||
|
|
26f77032a2 | ||
|
|
efff602909 | ||
|
|
58c9cbae40 | ||
|
|
7881551dda | ||
|
|
ff6bd7d82e | ||
|
|
bfb876c782 | ||
|
|
64b432e4ac | ||
|
|
33ecb0a68f | ||
|
|
7b7ddbd1e8 | ||
|
|
ed81ef0442 | ||
|
|
88fffae9dd |
16
.github/ISSUE_TEMPLATE/11_crash_report.yml
vendored
@@ -6,28 +6,18 @@ body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Reproduction steps
|
||||
description: A step-by-step description of how to reproduce the crash from a **clean Zed install**. **Be verbose**. **Issues with insufficient detail may be summarily closed**.
|
||||
description: A step-by-step description of how to reproduce the crash from a **clean Zed install**. The more context you provide, the easier it is to find and fix the problem fast.
|
||||
placeholder: |
|
||||
1. Start Zed
|
||||
2. Perform an action
|
||||
3. Zed crashes
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Current vs. Expected behavior
|
||||
description: |
|
||||
Go into depth about what actions you’re performing in Zed to trigger the crash. If Zed crashes before it loads any windows, make sure to mention that. Again, **be verbose**.
|
||||
|
||||
**Skipping this/failure to provide complete information will result in the issue being closed.**
|
||||
placeholder: "Based on my reproduction steps above, when I perform said action, I expect this to happen, but instead Zed crashes."
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Zed version and system specs
|
||||
description: |
|
||||
Open the command palette in Zed, then type “zed: copy system specs into clipboard”. **Skipping this/failure to provide complete information will result in the issue being closed**.
|
||||
Open the command palette in Zed, then type “zed: copy system specs into clipboard”.
|
||||
placeholder: |
|
||||
Zed: v0.215.0 (Zed Nightly bfe141ea79aa4984028934067ba75c48d99136ae)
|
||||
OS: macOS 15.1
|
||||
@@ -37,7 +27,7 @@ body:
|
||||
required: true
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: If applicable, attach your Zed log file to this issue
|
||||
label: Attach Zed log file
|
||||
description: |
|
||||
Open the command palette in Zed, then type `zed: open log` to see the last 1000 lines. Or type `zed: reveal log in file manager` in the command palette to reveal the log file itself.
|
||||
value: |
|
||||
|
||||
48
.github/workflows/extension_bump.yml
vendored
@@ -25,33 +25,6 @@ on:
|
||||
description: The app secret for the corresponding app ID
|
||||
required: true
|
||||
jobs:
|
||||
check_extension:
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
steps:
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
clean: false
|
||||
- id: cache-zed-extension-cli
|
||||
name: extension_tests::cache_zed_extension_cli
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830
|
||||
with:
|
||||
path: zed-extension
|
||||
key: zed-extension-${{ env.ZED_EXTENSION_CLI_SHA }}
|
||||
- name: extension_tests::download_zed_extension_cli
|
||||
if: steps.cache-zed-extension-cli.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
wget --quiet "https://zed-extension-cli.nyc3.digitaloceanspaces.com/$ZED_EXTENSION_CLI_SHA/x86_64-unknown-linux-gnu/zed-extension"
|
||||
chmod +x zed-extension
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: extension_tests::check
|
||||
run: |
|
||||
mkdir -p /tmp/ext-scratch
|
||||
mkdir -p /tmp/ext-output
|
||||
./zed-extension --source-dir . --scratch-dir /tmp/ext-scratch --output-dir /tmp/ext-output
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 1
|
||||
check_bump_needed:
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
runs-on: namespace-profile-2x4-ubuntu-2404
|
||||
@@ -89,7 +62,6 @@ jobs:
|
||||
timeout-minutes: 1
|
||||
bump_extension_version:
|
||||
needs:
|
||||
- check_extension
|
||||
- check_bump_needed
|
||||
if: |-
|
||||
(github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') &&
|
||||
@@ -114,21 +86,18 @@ jobs:
|
||||
run: |
|
||||
OLD_VERSION="${{ needs.check_bump_needed.outputs.current_version }}"
|
||||
|
||||
cat <<EOF > .bumpversion.cfg
|
||||
[bumpversion]
|
||||
current_version = "$OLD_VERSION"
|
||||
BUMP_FILES=("extension.toml")
|
||||
if [[ -f "Cargo.toml" ]]; then
|
||||
BUMP_FILES+=("Cargo.toml")
|
||||
fi
|
||||
|
||||
[bumpversion:file:Cargo.toml]
|
||||
bump2version --verbose --current-version "$OLD_VERSION" --no-configured-files ${{ inputs.bump-type }} "${BUMP_FILES[@]}"
|
||||
|
||||
[bumpversion:file:extension.toml]
|
||||
if [[ -f "Cargo.toml" ]]; then
|
||||
cargo update --workspace
|
||||
fi
|
||||
|
||||
EOF
|
||||
|
||||
bump2version --verbose ${{ inputs.bump-type }}
|
||||
NEW_VERSION="$(sed -n 's/version = \"\(.*\)\"/\1/p' < extension.toml)"
|
||||
cargo update --workspace
|
||||
|
||||
rm .bumpversion.cfg
|
||||
|
||||
echo "new_version=${NEW_VERSION}" >> "$GITHUB_OUTPUT"
|
||||
shell: bash -euxo pipefail {0}
|
||||
@@ -147,7 +116,6 @@ jobs:
|
||||
timeout-minutes: 1
|
||||
create_version_label:
|
||||
needs:
|
||||
- check_extension
|
||||
- check_bump_needed
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions') && github.event_name == 'push' && github.ref == 'refs/heads/main' && needs.check_bump_needed.outputs.needs_bump == 'false'
|
||||
runs-on: namespace-profile-8x16-ubuntu-2204
|
||||
|
||||
2
.github/workflows/extension_release.yml
vendored
@@ -21,6 +21,8 @@ jobs:
|
||||
with:
|
||||
app-id: ${{ secrets.app-id }}
|
||||
private-key: ${{ secrets.app-secret }}
|
||||
owner: zed-industries
|
||||
repositories: extensions
|
||||
- name: steps::checkout_repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
|
||||
with:
|
||||
|
||||
13
.github/workflows/extension_tests.yml
vendored
@@ -7,12 +7,7 @@ env:
|
||||
CARGO_INCREMENTAL: '0'
|
||||
ZED_EXTENSION_CLI_SHA: 7cfce605704d41ca247e3f84804bf323f6c6caaf
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
run_tests:
|
||||
description: Whether the workflow should run rust tests
|
||||
required: true
|
||||
type: boolean
|
||||
workflow_call: {}
|
||||
jobs:
|
||||
orchestrate:
|
||||
if: (github.repository_owner == 'zed-industries' || github.repository_owner == 'zed-extensions')
|
||||
@@ -73,12 +68,12 @@ jobs:
|
||||
run: cargo clippy --release --all-targets --all-features -- --deny warnings
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: steps::cargo_install_nextest
|
||||
if: inputs.run_tests
|
||||
uses: taiki-e/install-action@nextest
|
||||
- name: steps::cargo_nextest
|
||||
if: inputs.run_tests
|
||||
run: cargo nextest run --workspace --no-fail-fast
|
||||
shell: bash -euxo pipefail {0}
|
||||
env:
|
||||
NEXTEST_NO_TESTS: warn
|
||||
timeout-minutes: 3
|
||||
check_extension:
|
||||
needs:
|
||||
@@ -108,7 +103,7 @@ jobs:
|
||||
mkdir -p /tmp/ext-output
|
||||
./zed-extension --source-dir . --scratch-dir /tmp/ext-scratch --output-dir /tmp/ext-output
|
||||
shell: bash -euxo pipefail {0}
|
||||
timeout-minutes: 1
|
||||
timeout-minutes: 2
|
||||
tests_pass:
|
||||
needs:
|
||||
- orchestrate
|
||||
|
||||
3
.github/workflows/run_tests.yml
vendored
@@ -84,7 +84,7 @@ jobs:
|
||||
run: ./script/check-keymaps
|
||||
shell: bash -euxo pipefail {0}
|
||||
- name: run_tests::check_style::check_for_typos
|
||||
uses: crate-ci/typos@80c8a4945eec0f6d464eaf9e65ed98ef085283d1
|
||||
uses: crate-ci/typos@2d0ce569feab1f8752f1dde43cc2f2aa53236e06
|
||||
with:
|
||||
config: ./typos.toml
|
||||
- name: steps::cargo_fmt
|
||||
@@ -520,6 +520,7 @@ jobs:
|
||||
uses: bufbuild/buf-setup-action@v1
|
||||
with:
|
||||
version: v1.29.0
|
||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
- name: run_tests::check_postgres_and_protobuf_migrations::bufbuild_breaking_action
|
||||
uses: bufbuild/buf-breaking-action@v1
|
||||
with:
|
||||
|
||||
3
.gitignore
vendored
@@ -39,3 +39,6 @@ xcuserdata/
|
||||
# Don't commit any secrets to the repo.
|
||||
.env
|
||||
.env.secret.toml
|
||||
|
||||
# `nix build` output
|
||||
/result
|
||||
|
||||
1556
Cargo.lock
generated
59
Cargo.toml
@@ -54,11 +54,12 @@ members = [
|
||||
"crates/diagnostics",
|
||||
"crates/docs_preprocessor",
|
||||
"crates/edit_prediction",
|
||||
"crates/edit_prediction_button",
|
||||
"crates/edit_prediction_types",
|
||||
"crates/edit_prediction_ui",
|
||||
"crates/edit_prediction_context",
|
||||
"crates/zeta2_tools",
|
||||
"crates/editor",
|
||||
"crates/eval",
|
||||
"crates/eval_utils",
|
||||
"crates/explorer_command_injector",
|
||||
"crates/extension",
|
||||
"crates/extension_api",
|
||||
@@ -200,10 +201,11 @@ members = [
|
||||
"crates/zed",
|
||||
"crates/zed_actions",
|
||||
"crates/zed_env_vars",
|
||||
"crates/zeta",
|
||||
"crates/zeta_cli",
|
||||
"crates/edit_prediction_cli",
|
||||
"crates/zlog",
|
||||
"crates/zlog_settings",
|
||||
"crates/ztracing",
|
||||
"crates/ztracing_macro",
|
||||
|
||||
#
|
||||
# Extensions
|
||||
@@ -242,7 +244,6 @@ activity_indicator = { path = "crates/activity_indicator" }
|
||||
agent_ui = { path = "crates/agent_ui" }
|
||||
agent_settings = { path = "crates/agent_settings" }
|
||||
agent_servers = { path = "crates/agent_servers" }
|
||||
ai = { path = "crates/ai" }
|
||||
ai_onboarding = { path = "crates/ai_onboarding" }
|
||||
anthropic = { path = "crates/anthropic" }
|
||||
askpass = { path = "crates/askpass" }
|
||||
@@ -252,7 +253,6 @@ assistant_slash_command = { path = "crates/assistant_slash_command" }
|
||||
assistant_slash_commands = { path = "crates/assistant_slash_commands" }
|
||||
audio = { path = "crates/audio" }
|
||||
auto_update = { path = "crates/auto_update" }
|
||||
auto_update_helper = { path = "crates/auto_update_helper" }
|
||||
auto_update_ui = { path = "crates/auto_update_ui" }
|
||||
aws_http_client = { path = "crates/aws_http_client" }
|
||||
bedrock = { path = "crates/bedrock" }
|
||||
@@ -267,7 +267,6 @@ cloud_api_client = { path = "crates/cloud_api_client" }
|
||||
cloud_api_types = { path = "crates/cloud_api_types" }
|
||||
cloud_llm_client = { path = "crates/cloud_llm_client" }
|
||||
cloud_zeta2_prompt = { path = "crates/cloud_zeta2_prompt" }
|
||||
collab = { path = "crates/collab" }
|
||||
collab_ui = { path = "crates/collab_ui" }
|
||||
collections = { path = "crates/collections", version = "0.1.0" }
|
||||
command_palette = { path = "crates/command_palette" }
|
||||
@@ -288,6 +287,7 @@ deepseek = { path = "crates/deepseek" }
|
||||
derive_refineable = { path = "crates/refineable/derive_refineable" }
|
||||
diagnostics = { path = "crates/diagnostics" }
|
||||
editor = { path = "crates/editor" }
|
||||
eval_utils = { path = "crates/eval_utils" }
|
||||
extension = { path = "crates/extension" }
|
||||
extension_host = { path = "crates/extension_host" }
|
||||
extensions_ui = { path = "crates/extensions_ui" }
|
||||
@@ -311,10 +311,9 @@ http_client = { path = "crates/http_client" }
|
||||
http_client_tls = { path = "crates/http_client_tls" }
|
||||
icons = { path = "crates/icons" }
|
||||
image_viewer = { path = "crates/image_viewer" }
|
||||
edit_prediction = { path = "crates/edit_prediction" }
|
||||
edit_prediction_button = { path = "crates/edit_prediction_button" }
|
||||
edit_prediction_types = { path = "crates/edit_prediction_types" }
|
||||
edit_prediction_ui = { path = "crates/edit_prediction_ui" }
|
||||
edit_prediction_context = { path = "crates/edit_prediction_context" }
|
||||
zeta2_tools = { path = "crates/zeta2_tools" }
|
||||
inspector_ui = { path = "crates/inspector_ui" }
|
||||
install_cli = { path = "crates/install_cli" }
|
||||
journal = { path = "crates/journal" }
|
||||
@@ -356,8 +355,6 @@ panel = { path = "crates/panel" }
|
||||
paths = { path = "crates/paths" }
|
||||
perf = { path = "tooling/perf" }
|
||||
picker = { path = "crates/picker" }
|
||||
plugin = { path = "crates/plugin" }
|
||||
plugin_macros = { path = "crates/plugin_macros" }
|
||||
prettier = { path = "crates/prettier" }
|
||||
settings_profile_selector = { path = "crates/settings_profile_selector" }
|
||||
project = { path = "crates/project" }
|
||||
@@ -368,12 +365,10 @@ proto = { path = "crates/proto" }
|
||||
recent_projects = { path = "crates/recent_projects" }
|
||||
refineable = { path = "crates/refineable" }
|
||||
release_channel = { path = "crates/release_channel" }
|
||||
scheduler = { path = "crates/scheduler" }
|
||||
remote = { path = "crates/remote" }
|
||||
remote_server = { path = "crates/remote_server" }
|
||||
repl = { path = "crates/repl" }
|
||||
reqwest_client = { path = "crates/reqwest_client" }
|
||||
rich_text = { path = "crates/rich_text" }
|
||||
rodio = { git = "https://github.com/RustAudio/rodio", rev ="e2074c6c2acf07b57cf717e076bdda7a9ac6e70b", features = ["wav", "playback", "wav_output", "recording"] }
|
||||
rope = { path = "crates/rope" }
|
||||
rpc = { path = "crates/rpc" }
|
||||
@@ -390,7 +385,6 @@ snippets_ui = { path = "crates/snippets_ui" }
|
||||
sqlez = { path = "crates/sqlez" }
|
||||
sqlez_macros = { path = "crates/sqlez_macros" }
|
||||
story = { path = "crates/story" }
|
||||
storybook = { path = "crates/storybook" }
|
||||
streaming_diff = { path = "crates/streaming_diff" }
|
||||
sum_tree = { path = "crates/sum_tree" }
|
||||
supermaven = { path = "crates/supermaven" }
|
||||
@@ -407,7 +401,6 @@ terminal_view = { path = "crates/terminal_view" }
|
||||
text = { path = "crates/text" }
|
||||
theme = { path = "crates/theme" }
|
||||
theme_extension = { path = "crates/theme_extension" }
|
||||
theme_importer = { path = "crates/theme_importer" }
|
||||
theme_selector = { path = "crates/theme_selector" }
|
||||
time_format = { path = "crates/time_format" }
|
||||
title_bar = { path = "crates/title_bar" }
|
||||
@@ -431,15 +424,17 @@ x_ai = { path = "crates/x_ai" }
|
||||
zed = { path = "crates/zed" }
|
||||
zed_actions = { path = "crates/zed_actions" }
|
||||
zed_env_vars = { path = "crates/zed_env_vars" }
|
||||
zeta = { path = "crates/zeta" }
|
||||
edit_prediction = { path = "crates/edit_prediction" }
|
||||
zlog = { path = "crates/zlog" }
|
||||
zlog_settings = { path = "crates/zlog_settings" }
|
||||
ztracing = { path = "crates/ztracing" }
|
||||
ztracing_macro = { path = "crates/ztracing_macro" }
|
||||
|
||||
#
|
||||
# External crates
|
||||
#
|
||||
|
||||
agent-client-protocol = { version = "0.7.0", features = ["unstable"] }
|
||||
agent-client-protocol = { version = "=0.9.0", features = ["unstable"] }
|
||||
aho-corasick = "1.1"
|
||||
alacritty_terminal = "0.25.1-rc1"
|
||||
any_vec = "0.14"
|
||||
@@ -503,16 +498,14 @@ ec4rs = "1.1"
|
||||
emojis = "0.6.1"
|
||||
env_logger = "0.11"
|
||||
exec = "0.3.1"
|
||||
fancy-regex = "0.14.0"
|
||||
fancy-regex = "0.16.0"
|
||||
fork = "0.4.0"
|
||||
futures = "0.3"
|
||||
futures-batch = "0.6.1"
|
||||
futures-lite = "1.13"
|
||||
gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "e5f883040530b4df36437f140084ee5cc7c1c9be" }
|
||||
gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "09acfdf2bd5c1d6254abefd609c808ff73547b2c" }
|
||||
git2 = { version = "0.20.1", default-features = false }
|
||||
globset = "0.4"
|
||||
handlebars = "4.3"
|
||||
hashbrown = "0.15.3"
|
||||
heck = "0.5"
|
||||
heed = { version = "0.21.0", features = ["read-txn-no-tls"] }
|
||||
hex = "0.4.3"
|
||||
@@ -529,7 +522,7 @@ indoc = "2"
|
||||
inventory = "0.3.19"
|
||||
itertools = "0.14.0"
|
||||
json_dotpath = "1.1"
|
||||
jsonschema = "0.30.0"
|
||||
jsonschema = "0.37.0"
|
||||
jsonwebtoken = "9.3"
|
||||
jupyter-protocol = "0.10.0"
|
||||
jupyter-websocket-client = "0.15.0"
|
||||
@@ -548,7 +541,6 @@ nanoid = "0.4"
|
||||
nbformat = "0.15.0"
|
||||
nix = "0.29"
|
||||
num-format = "0.4.4"
|
||||
num-traits = "0.2"
|
||||
objc = "0.2"
|
||||
objc2-foundation = { version = "=0.3.1", default-features = false, features = [
|
||||
"NSArray",
|
||||
@@ -587,7 +579,6 @@ pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev =
|
||||
pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
|
||||
pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
|
||||
pet-fs = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
|
||||
pet-pixi = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
|
||||
pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
|
||||
pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
|
||||
pet-virtualenv = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
|
||||
@@ -627,7 +618,6 @@ scap = { git = "https://github.com/zed-industries/scap", rev = "4afea48c3b002197
|
||||
schemars = { version = "1.0", features = ["indexmap2"] }
|
||||
semver = { version = "1.0", features = ["serde"] }
|
||||
serde = { version = "1.0.221", features = ["derive", "rc"] }
|
||||
serde_derive = "1.0.221"
|
||||
serde_json = { version = "1.0.144", features = ["preserve_order", "raw_value"] }
|
||||
serde_json_lenient = { version = "0.2", features = [
|
||||
"preserve_order",
|
||||
@@ -655,7 +645,7 @@ sysinfo = "0.37.0"
|
||||
take-until = "0.2.0"
|
||||
tempfile = "3.20.0"
|
||||
thiserror = "2.0.12"
|
||||
tiktoken-rs = { git = "https://github.com/zed-industries/tiktoken-rs", rev = "7249f999c5fdf9bf3cc5c288c964454e4dac0c00" }
|
||||
tiktoken-rs = { git = "https://github.com/zed-industries/tiktoken-rs", rev = "2570c4387a8505fb8f1d3f3557454b474f1e8271" }
|
||||
time = { version = "0.3", features = [
|
||||
"macros",
|
||||
"parsing",
|
||||
@@ -671,7 +661,7 @@ toml = "0.8"
|
||||
toml_edit = { version = "0.22", default-features = false, features = ["display", "parse", "serde"] }
|
||||
tower-http = "0.4.4"
|
||||
tree-sitter = { version = "0.25.10", features = ["wasm"] }
|
||||
tree-sitter-bash = "0.25.0"
|
||||
tree-sitter-bash = "0.25.1"
|
||||
tree-sitter-c = "0.23"
|
||||
tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev = "5cb9b693cfd7bfacab1d9ff4acac1a4150700609" }
|
||||
tree-sitter-css = "0.23"
|
||||
@@ -693,6 +683,7 @@ tree-sitter-ruby = "0.23"
|
||||
tree-sitter-rust = "0.24"
|
||||
tree-sitter-typescript = { git = "https://github.com/zed-industries/tree-sitter-typescript", rev = "e2c53597d6a5d9cf7bbe8dccde576fe1e46c5899" } # https://github.com/tree-sitter/tree-sitter-typescript/pull/347
|
||||
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" }
|
||||
tracing = "0.1.40"
|
||||
unicase = "2.6"
|
||||
unicode-script = "0.5.7"
|
||||
unicode-segmentation = "1.10"
|
||||
@@ -716,7 +707,6 @@ wasmtime-wasi = "29"
|
||||
wax = "0.6"
|
||||
which = "6.0.0"
|
||||
windows-core = "0.61"
|
||||
wit-component = "0.221"
|
||||
yawc = "0.2.5"
|
||||
zeroize = "1.8"
|
||||
zstd = "0.11"
|
||||
@@ -798,20 +788,13 @@ settings_macros = { opt-level = 3 }
|
||||
sqlez_macros = { opt-level = 3, codegen-units = 1 }
|
||||
ui_macros = { opt-level = 3 }
|
||||
util_macros = { opt-level = 3 }
|
||||
serde_derive = { opt-level = 3 }
|
||||
quote = { opt-level = 3 }
|
||||
syn = { opt-level = 3 }
|
||||
proc-macro2 = { opt-level = 3 }
|
||||
# proc-macros end
|
||||
|
||||
taffy = { opt-level = 3 }
|
||||
cranelift-codegen = { opt-level = 3 }
|
||||
cranelift-codegen-meta = { opt-level = 3 }
|
||||
cranelift-codegen-shared = { opt-level = 3 }
|
||||
resvg = { opt-level = 3 }
|
||||
rustybuzz = { opt-level = 3 }
|
||||
ttf-parser = { opt-level = 3 }
|
||||
wasmtime-cranelift = { opt-level = 3 }
|
||||
wasmtime = { opt-level = 3 }
|
||||
# Build single-source-file crates with cg=1 as it helps make `cargo build` of a whole workspace a bit faster
|
||||
activity_indicator = { codegen-units = 1 }
|
||||
@@ -820,12 +803,11 @@ breadcrumbs = { codegen-units = 1 }
|
||||
collections = { codegen-units = 1 }
|
||||
command_palette = { codegen-units = 1 }
|
||||
command_palette_hooks = { codegen-units = 1 }
|
||||
extension_cli = { codegen-units = 1 }
|
||||
feature_flags = { codegen-units = 1 }
|
||||
file_icons = { codegen-units = 1 }
|
||||
fsevent = { codegen-units = 1 }
|
||||
image_viewer = { codegen-units = 1 }
|
||||
edit_prediction_button = { codegen-units = 1 }
|
||||
edit_prediction_ui = { codegen-units = 1 }
|
||||
install_cli = { codegen-units = 1 }
|
||||
journal = { codegen-units = 1 }
|
||||
json_schema_store = { codegen-units = 1 }
|
||||
@@ -840,7 +822,6 @@ project_symbols = { codegen-units = 1 }
|
||||
refineable = { codegen-units = 1 }
|
||||
release_channel = { codegen-units = 1 }
|
||||
reqwest_client = { codegen-units = 1 }
|
||||
rich_text = { codegen-units = 1 }
|
||||
session = { codegen-units = 1 }
|
||||
snippet = { codegen-units = 1 }
|
||||
snippets_ui = { codegen-units = 1 }
|
||||
|
||||
@@ -34,8 +34,4 @@ RUN apt-get update; \
|
||||
linux-perf binutils
|
||||
WORKDIR app
|
||||
COPY --from=builder /app/collab /app/collab
|
||||
COPY --from=builder /app/crates/collab/migrations /app/migrations
|
||||
COPY --from=builder /app/crates/collab/migrations_llm /app/migrations_llm
|
||||
ENV MIGRATIONS_PATH=/app/migrations
|
||||
ENV LLM_DATABASE_MIGRATIONS_PATH=/app/migrations_llm
|
||||
ENTRYPOINT ["/app/collab"]
|
||||
|
||||
@@ -53,6 +53,10 @@ extension
|
||||
git
|
||||
= @cole-miller
|
||||
= @danilo-leal
|
||||
= @dvdsk
|
||||
= @kubkon
|
||||
= @Anthony-Eid
|
||||
= @cameron1024
|
||||
|
||||
gpui
|
||||
= @Anthony-Eid
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="none"><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M14 11.333A6 6 0 0 0 4 6.867l-1 .9"/><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.333" d="M2 4.667v4h4"/><path fill="#000" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M8 12a.667.667 0 1 0 0-1.333A.667.667 0 0 0 8 12Z"/></svg>
|
||||
|
Before Width: | Height: | Size: 467 B |
@@ -1 +1,5 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="none"><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M3.333 10 8 14.667 12.667 10M8 5.333v9.334"/><path fill="#000" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M8 2.667a.667.667 0 1 0 0-1.334.667.667 0 0 0 0 1.334Z"/></svg>
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M2 13H5" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11 13H14" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11.5 8.5L8 12M8 12L4.5 8.5M8 12L8 3" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 374 B After Width: | Height: | Size: 443 B |
@@ -1 +1,5 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="none"><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M3.333 6 8 1.333 12.667 6M8 10.667V1.333"/><path fill="#000" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M8 13.333a.667.667 0 1 1 0 1.334.667.667 0 0 1 0-1.334Z"/></svg>
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M4.5 6.5L8 3M8 3L11.5 6.5M8 3V12" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M2 13H5" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11 13H14" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 373 B After Width: | Height: | Size: 439 B |
@@ -1 +1,5 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="none"><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M2 11.333a6 6 0 0 1 10-4.466l1 .9"/><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.333" d="M14 4.667v4h-4"/><path fill="#000" stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M8 12a.667.667 0 1 1 0-1.333A.667.667 0 0 1 8 12Z"/></svg>
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M2 11.333C2.00118 10.1752 2.33729 9.04258 2.96777 8.07159C3.59826 7.10059 4.49621 6.33274 5.55331 5.86064C6.61041 5.38853 7.78152 5.23235 8.9254 5.41091C10.0693 5.58947 11.1371 6.09516 12 6.86698L13 7.76698" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M14 4.66699V8.66699H10" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M7 13H10" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 468 B After Width: | Height: | Size: 627 B |
8
assets/icons/git_branch_plus.svg
Normal file
@@ -0,0 +1,8 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M4 2V10" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M12 6C12.5304 6 13.0391 5.78929 13.4142 5.41421C13.7893 5.03914 14 4.53043 14 4C14 3.46957 13.7893 2.96086 13.4142 2.58579C13.0391 2.21071 12.5304 2 12 2C11.4696 2 10.9609 2.21071 10.5858 2.58579C10.2107 2.96086 10 3.46957 10 4C10 4.53043 10.2107 5.03914 10.5858 5.41421C10.9609 5.78929 11.4696 6 12 6Z" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M4 14C4.53043 14 5.03914 13.7893 5.41421 13.4142C5.78929 13.0391 6 12.5304 6 12C6 11.4696 5.78929 10.9609 5.41421 10.5858C5.03914 10.2107 4.53043 10 4 10C3.46957 10 2.96086 10.2107 2.58579 10.5858C2.21071 10.9609 2 11.4696 2 12C2 12.5304 2.21071 13.0391 2.58579 13.4142C2.96086 13.7893 3.46957 14 4 14Z" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M10 4C8.4087 4 6.88258 4.63214 5.75736 5.75736C4.63214 6.88258 4 8.4087 4 10" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M12 10V14" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M14 12H10" stroke="#C6CAD0" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 1.4 KiB |
11
assets/icons/inception.svg
Normal file
@@ -0,0 +1,11 @@
|
||||
<svg width="28" height="28" viewBox="0 0 28 28" fill="none" id="svg1378540956_510">
|
||||
<g clip-path="url(#svg1378540956_510_clip0_1_1506)" transform="translate(4, 4) scale(0.857)">
|
||||
<path d="M17.0547 0.372066H8.52652L-0.00165176 8.90024V17.4284H8.52652V8.90024H17.0547V0.372066Z" fill="#1A1C20"></path>
|
||||
<path d="M10.1992 27.6279H18.7274L27.2556 19.0998V10.5716H18.7274V19.0998H10.1992V27.6279Z" fill="#1A1C20"></path>
|
||||
</g>
|
||||
<defs>
|
||||
<clipPath id="svg1378540956_510_clip0_1_1506">
|
||||
<rect width="27.2559" height="27.2559" fill="white" transform="translate(0 0.37207)"></rect>
|
||||
</clipPath>
|
||||
</defs>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 593 B |
@@ -41,7 +41,7 @@
|
||||
"ctrl-f11": "debugger::StepInto",
|
||||
"shift-f11": "debugger::StepOut",
|
||||
"f11": "zed::ToggleFullScreen",
|
||||
"ctrl-alt-z": "edit_prediction::RateCompletions",
|
||||
"ctrl-alt-z": "edit_prediction::RatePredictions",
|
||||
"ctrl-alt-shift-i": "edit_prediction::ToggleMenu",
|
||||
"ctrl-alt-l": "lsp_tool::ToggleMenu"
|
||||
}
|
||||
@@ -616,8 +616,8 @@
|
||||
"ctrl-alt-super-p": "settings_profile_selector::Toggle",
|
||||
"ctrl-t": "project_symbols::Toggle",
|
||||
"ctrl-p": "file_finder::Toggle",
|
||||
"ctrl-tab": "tab_switcher::Toggle",
|
||||
"ctrl-shift-tab": ["tab_switcher::Toggle", { "select_last": true }],
|
||||
"ctrl-tab": "tab_switcher::Toggle",
|
||||
"ctrl-e": "file_finder::Toggle",
|
||||
"f1": "command_palette::Toggle",
|
||||
"ctrl-shift-p": "command_palette::Toggle",
|
||||
@@ -811,7 +811,10 @@
|
||||
"context": "PromptEditor",
|
||||
"bindings": {
|
||||
"ctrl-[": "agent::CyclePreviousInlineAssist",
|
||||
"ctrl-]": "agent::CycleNextInlineAssist"
|
||||
"ctrl-]": "agent::CycleNextInlineAssist",
|
||||
"ctrl-shift-enter": "inline_assistant::ThumbsUpResult",
|
||||
"ctrl-shift-backspace": "inline_assistant::ThumbsDownResult"
|
||||
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -1322,25 +1325,18 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Zeta2Feedback > Editor",
|
||||
"context": "EditPredictionContext > Editor",
|
||||
"bindings": {
|
||||
"enter": "editor::Newline",
|
||||
"ctrl-enter up": "dev::Zeta2RatePredictionPositive",
|
||||
"ctrl-enter down": "dev::Zeta2RatePredictionNegative"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Zeta2Context > Editor",
|
||||
"bindings": {
|
||||
"alt-left": "dev::Zeta2ContextGoBack",
|
||||
"alt-right": "dev::Zeta2ContextGoForward"
|
||||
"alt-left": "dev::EditPredictionContextGoBack",
|
||||
"alt-right": "dev::EditPredictionContextGoForward"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitBranchSelector || (GitBranchSelector > Picker > Editor)",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-shift-backspace": "branch_picker::DeleteBranch"
|
||||
"ctrl-shift-backspace": "branch_picker::DeleteBranch",
|
||||
"ctrl-shift-i": "branch_picker::FilterRemotes"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -47,9 +47,10 @@
|
||||
"cmd-m": "zed::Minimize",
|
||||
"fn-f": "zed::ToggleFullScreen",
|
||||
"ctrl-cmd-f": "zed::ToggleFullScreen",
|
||||
"ctrl-cmd-z": "edit_prediction::RateCompletions",
|
||||
"ctrl-cmd-z": "edit_prediction::RatePredictions",
|
||||
"ctrl-cmd-i": "edit_prediction::ToggleMenu",
|
||||
"ctrl-cmd-l": "lsp_tool::ToggleMenu"
|
||||
"ctrl-cmd-l": "lsp_tool::ToggleMenu",
|
||||
"ctrl-cmd-c": "editor::DisplayCursorNames"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -589,8 +590,7 @@
|
||||
"cmd-.": "editor::ToggleCodeActions",
|
||||
"cmd-k r": "editor::RevealInFileManager",
|
||||
"cmd-k p": "editor::CopyPath",
|
||||
"cmd-\\": "pane::SplitRight",
|
||||
"ctrl-cmd-c": "editor::DisplayCursorNames"
|
||||
"cmd-\\": "pane::SplitRight"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -684,8 +684,8 @@
|
||||
"ctrl-alt-cmd-p": "settings_profile_selector::Toggle",
|
||||
"cmd-t": "project_symbols::Toggle",
|
||||
"cmd-p": "file_finder::Toggle",
|
||||
"ctrl-tab": "tab_switcher::Toggle",
|
||||
"ctrl-shift-tab": ["tab_switcher::Toggle", { "select_last": true }],
|
||||
"ctrl-tab": "tab_switcher::Toggle",
|
||||
"cmd-shift-p": "command_palette::Toggle",
|
||||
"cmd-shift-m": "diagnostics::Deploy",
|
||||
"cmd-shift-e": "project_panel::ToggleFocus",
|
||||
@@ -730,7 +730,8 @@
|
||||
"context": "Workspace && debugger_running",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"f5": "zed::NoAction"
|
||||
"f5": "zed::NoAction",
|
||||
"f11": "debugger::StepInto"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -877,7 +878,9 @@
|
||||
"bindings": {
|
||||
"cmd-alt-/": "agent::ToggleModelSelector",
|
||||
"ctrl-[": "agent::CyclePreviousInlineAssist",
|
||||
"ctrl-]": "agent::CycleNextInlineAssist"
|
||||
"ctrl-]": "agent::CycleNextInlineAssist",
|
||||
"cmd-shift-enter": "inline_assistant::ThumbsUpResult",
|
||||
"cmd-shift-backspace": "inline_assistant::ThumbsDownResult"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -1426,25 +1429,18 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Zeta2Feedback > Editor",
|
||||
"context": "EditPredictionContext > Editor",
|
||||
"bindings": {
|
||||
"enter": "editor::Newline",
|
||||
"cmd-enter up": "dev::Zeta2RatePredictionPositive",
|
||||
"cmd-enter down": "dev::Zeta2RatePredictionNegative"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Zeta2Context > Editor",
|
||||
"bindings": {
|
||||
"alt-left": "dev::Zeta2ContextGoBack",
|
||||
"alt-right": "dev::Zeta2ContextGoForward"
|
||||
"alt-left": "dev::EditPredictionContextGoBack",
|
||||
"alt-right": "dev::EditPredictionContextGoForward"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitBranchSelector || (GitBranchSelector > Picker > Editor)",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-shift-backspace": "branch_picker::DeleteBranch"
|
||||
"cmd-shift-backspace": "branch_picker::DeleteBranch",
|
||||
"cmd-shift-i": "branch_picker::FilterRemotes"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -24,7 +24,8 @@
|
||||
"ctrl-alt-enter": ["picker::ConfirmInput", { "secondary": true }],
|
||||
"ctrl-shift-w": "workspace::CloseWindow",
|
||||
"shift-escape": "workspace::ToggleZoom",
|
||||
"ctrl-o": "workspace::Open",
|
||||
"ctrl-o": "workspace::OpenFiles",
|
||||
"ctrl-k ctrl-o": "workspace::Open",
|
||||
"ctrl-=": ["zed::IncreaseBufferFontSize", { "persist": false }],
|
||||
"ctrl-shift-=": ["zed::IncreaseBufferFontSize", { "persist": false }],
|
||||
"ctrl--": ["zed::DecreaseBufferFontSize", { "persist": false }],
|
||||
@@ -36,12 +37,12 @@
|
||||
"shift-f5": "debugger::Stop",
|
||||
"ctrl-shift-f5": "debugger::RerunSession",
|
||||
"f6": "debugger::Pause",
|
||||
"f7": "debugger::StepOver",
|
||||
"ctrl-f11": "debugger::StepInto",
|
||||
"f10": "debugger::StepOver",
|
||||
"shift-f11": "debugger::StepOut",
|
||||
"f11": "zed::ToggleFullScreen",
|
||||
"ctrl-shift-i": "edit_prediction::ToggleMenu",
|
||||
"shift-alt-l": "lsp_tool::ToggleMenu"
|
||||
"shift-alt-l": "lsp_tool::ToggleMenu",
|
||||
"ctrl-shift-alt-c": "editor::DisplayCursorNames"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -117,7 +118,7 @@
|
||||
"alt-g m": "git::OpenModifiedFiles",
|
||||
"menu": "editor::OpenContextMenu",
|
||||
"shift-f10": "editor::OpenContextMenu",
|
||||
"ctrl-shift-e": "editor::ToggleEditPrediction",
|
||||
"ctrl-alt-e": "editor::ToggleEditPrediction",
|
||||
"f9": "editor::ToggleBreakpoint",
|
||||
"shift-f9": "editor::EditLogBreakpoint"
|
||||
}
|
||||
@@ -215,7 +216,7 @@
|
||||
"context": "ContextEditor > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-enter": "assistant::Assist",
|
||||
"ctrl-i": "assistant::Assist",
|
||||
"ctrl-s": "workspace::Save",
|
||||
"ctrl-shift-,": "assistant::InsertIntoEditor",
|
||||
"shift-enter": "assistant::Split",
|
||||
@@ -500,10 +501,7 @@
|
||||
"ctrl-shift-l": "editor::SelectAllMatches", // Select all occurrences of current selection
|
||||
"ctrl-f2": "editor::SelectAllMatches", // Select all occurrences of current word
|
||||
"ctrl-d": ["editor::SelectNext", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch / find_under_expand
|
||||
"ctrl-shift-down": ["editor::SelectNext", { "replace_newest": false }], // editor.action.addSelectionToNextFindMatch
|
||||
"ctrl-shift-up": ["editor::SelectPrevious", { "replace_newest": false }], // editor.action.addSelectionToPreviousFindMatch
|
||||
"ctrl-k ctrl-d": ["editor::SelectNext", { "replace_newest": true }], // editor.action.moveSelectionToNextFindMatch / find_under_expand_skip
|
||||
"ctrl-k ctrl-shift-d": ["editor::SelectPrevious", { "replace_newest": true }], // editor.action.moveSelectionToPreviousFindMatch
|
||||
"ctrl-k ctrl-i": "editor::Hover",
|
||||
"ctrl-k ctrl-b": "editor::BlameHover",
|
||||
"ctrl-/": ["editor::ToggleComments", { "advance_downwards": false }],
|
||||
@@ -512,12 +510,8 @@
|
||||
"f2": "editor::Rename",
|
||||
"f12": "editor::GoToDefinition",
|
||||
"alt-f12": "editor::GoToDefinitionSplit",
|
||||
"ctrl-shift-f10": "editor::GoToDefinitionSplit",
|
||||
"ctrl-f12": "editor::GoToImplementation",
|
||||
"shift-f12": "editor::GoToTypeDefinition",
|
||||
"ctrl-alt-f12": "editor::GoToTypeDefinitionSplit",
|
||||
"shift-alt-f12": "editor::FindAllReferences",
|
||||
"ctrl-m": "editor::MoveToEnclosingBracket", // from jetbrains
|
||||
"ctrl-shift-\\": "editor::MoveToEnclosingBracket",
|
||||
"ctrl-shift-[": "editor::Fold",
|
||||
"ctrl-shift-]": "editor::UnfoldLines",
|
||||
@@ -541,7 +535,6 @@
|
||||
"ctrl-k r": "editor::RevealInFileManager",
|
||||
"ctrl-k p": "editor::CopyPath",
|
||||
"ctrl-\\": "pane::SplitRight",
|
||||
"ctrl-shift-alt-c": "editor::DisplayCursorNames",
|
||||
"alt-.": "editor::GoToHunk",
|
||||
"alt-,": "editor::GoToPreviousHunk"
|
||||
}
|
||||
@@ -616,8 +609,8 @@
|
||||
"ctrl-alt-super-p": "settings_profile_selector::Toggle",
|
||||
"ctrl-t": "project_symbols::Toggle",
|
||||
"ctrl-p": "file_finder::Toggle",
|
||||
"ctrl-tab": "tab_switcher::Toggle",
|
||||
"ctrl-shift-tab": ["tab_switcher::Toggle", { "select_last": true }],
|
||||
"ctrl-tab": "tab_switcher::Toggle",
|
||||
"ctrl-e": "file_finder::Toggle",
|
||||
"f1": "command_palette::Toggle",
|
||||
"ctrl-shift-p": "command_palette::Toggle",
|
||||
@@ -823,7 +816,9 @@
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-[": "agent::CyclePreviousInlineAssist",
|
||||
"ctrl-]": "agent::CycleNextInlineAssist"
|
||||
"ctrl-]": "agent::CycleNextInlineAssist",
|
||||
"ctrl-shift-enter": "inline_assistant::ThumbsUpResult",
|
||||
"ctrl-shift-delete": "inline_assistant::ThumbsDownResult"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -1124,7 +1119,7 @@
|
||||
"shift-insert": "terminal::Paste",
|
||||
"ctrl-v": "terminal::Paste",
|
||||
"ctrl-shift-v": "terminal::Paste",
|
||||
"ctrl-enter": "assistant::InlineAssist",
|
||||
"ctrl-i": "assistant::InlineAssist",
|
||||
"alt-b": ["terminal::SendText", "\u001bb"],
|
||||
"alt-f": ["terminal::SendText", "\u001bf"],
|
||||
"alt-.": ["terminal::SendText", "\u001b."],
|
||||
@@ -1136,6 +1131,8 @@
|
||||
"ctrl-e": ["terminal::SendKeystroke", "ctrl-e"],
|
||||
"ctrl-o": ["terminal::SendKeystroke", "ctrl-o"],
|
||||
"ctrl-w": ["terminal::SendKeystroke", "ctrl-w"],
|
||||
"ctrl-q": ["terminal::SendKeystroke", "ctrl-q"],
|
||||
"ctrl-r": ["terminal::SendKeystroke", "ctrl-r"],
|
||||
"ctrl-backspace": ["terminal::SendKeystroke", "ctrl-w"],
|
||||
"ctrl-shift-a": "editor::SelectAll",
|
||||
"ctrl-shift-f": "buffer_search::Deploy",
|
||||
@@ -1349,25 +1346,18 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Zeta2Feedback > Editor",
|
||||
"context": "EditPredictionContext > Editor",
|
||||
"bindings": {
|
||||
"enter": "editor::Newline",
|
||||
"ctrl-enter up": "dev::Zeta2RatePredictionPositive",
|
||||
"ctrl-enter down": "dev::Zeta2RatePredictionNegative"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Zeta2Context > Editor",
|
||||
"bindings": {
|
||||
"alt-left": "dev::Zeta2ContextGoBack",
|
||||
"alt-right": "dev::Zeta2ContextGoForward"
|
||||
"alt-left": "dev::EditPredictionContextGoBack",
|
||||
"alt-right": "dev::EditPredictionContextGoForward"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "GitBranchSelector || (GitBranchSelector > Picker > Editor)",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-shift-backspace": "branch_picker::DeleteBranch"
|
||||
"ctrl-shift-backspace": "branch_picker::DeleteBranch",
|
||||
"ctrl-shift-i": "branch_picker::FilterRemotes"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -857,6 +857,8 @@
|
||||
"ctrl-w shift-right": "workspace::SwapPaneRight",
|
||||
"ctrl-w shift-up": "workspace::SwapPaneUp",
|
||||
"ctrl-w shift-down": "workspace::SwapPaneDown",
|
||||
"ctrl-w x": "workspace::SwapPaneAdjacent",
|
||||
"ctrl-w ctrl-x": "workspace::SwapPaneAdjacent",
|
||||
"ctrl-w shift-h": "workspace::MovePaneLeft",
|
||||
"ctrl-w shift-l": "workspace::MovePaneRight",
|
||||
"ctrl-w shift-k": "workspace::MovePaneUp",
|
||||
@@ -900,7 +902,11 @@
|
||||
"context": "!Editor && !Terminal",
|
||||
"bindings": {
|
||||
":": "command_palette::Toggle",
|
||||
"g /": "pane::DeploySearch"
|
||||
"g /": "pane::DeploySearch",
|
||||
"] b": "pane::ActivateNextItem",
|
||||
"[ b": "pane::ActivatePreviousItem",
|
||||
"] shift-b": "pane::ActivateLastItem",
|
||||
"[ shift-b": ["pane::ActivateItem", 0]
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
44
assets/prompts/content_prompt_v2.hbs
Normal file
@@ -0,0 +1,44 @@
|
||||
{{#if language_name}}
|
||||
Here's a file of {{language_name}} that the user is going to ask you to make an edit to.
|
||||
{{else}}
|
||||
Here's a file of text that the user is going to ask you to make an edit to.
|
||||
{{/if}}
|
||||
|
||||
The section you'll need to rewrite is marked with <rewrite_this></rewrite_this> tags.
|
||||
|
||||
<document>
|
||||
{{{document_content}}}
|
||||
</document>
|
||||
|
||||
{{#if is_truncated}}
|
||||
The context around the relevant section has been truncated (possibly in the middle of a line) for brevity.
|
||||
{{/if}}
|
||||
|
||||
{{#if rewrite_section}}
|
||||
And here's the section to rewrite based on that prompt again for reference:
|
||||
|
||||
<rewrite_this>
|
||||
{{{rewrite_section}}}
|
||||
</rewrite_this>
|
||||
|
||||
{{#if diagnostic_errors}}
|
||||
Below are the diagnostic errors visible to the user. If the user requests problems to be fixed, use this information, but do not try to fix these errors if the user hasn't asked you to.
|
||||
|
||||
{{#each diagnostic_errors}}
|
||||
<diagnostic_error>
|
||||
<line_number>{{line_number}}</line_number>
|
||||
<error_message>{{error_message}}</error_message>
|
||||
<code_content>{{code_content}}</code_content>
|
||||
</diagnostic_error>
|
||||
{{/each}}
|
||||
{{/if}}
|
||||
|
||||
{{/if}}
|
||||
|
||||
Only make changes that are necessary to fulfill the prompt, leave everything else as-is. All surrounding {{content_type}} will be preserved.
|
||||
|
||||
Start at the indentation level in the original file in the rewritten {{content_type}}.
|
||||
|
||||
You must use one of the provided tools to make the rewrite or to provide an explanation as to why the user's request cannot be fulfilled. It is an error if
|
||||
you simply send back unstructured text. If you need to make a statement or ask a question you must use one of the tools to do so.
|
||||
It is an error if you try to make a change that cannot be made simply by editing the rewrite_section.
|
||||
@@ -1100,13 +1100,22 @@
|
||||
"preview_tabs": {
|
||||
// Whether preview tabs should be enabled.
|
||||
// Preview tabs allow you to open files in preview mode, where they close automatically
|
||||
// when you switch to another file unless you explicitly pin them.
|
||||
// when you open another preview tab.
|
||||
// This is useful for quickly viewing files without cluttering your workspace.
|
||||
"enabled": true,
|
||||
// Whether to open tabs in preview mode when opened from the project panel with a single click.
|
||||
"enable_preview_from_project_panel": true,
|
||||
// Whether to open tabs in preview mode when selected from the file finder.
|
||||
"enable_preview_from_file_finder": false,
|
||||
// Whether a preview tab gets replaced when code navigation is used to navigate away from the tab.
|
||||
"enable_preview_from_code_navigation": false
|
||||
// Whether to open tabs in preview mode when opened from a multibuffer.
|
||||
"enable_preview_from_multibuffer": true,
|
||||
// Whether to open tabs in preview mode when code navigation is used to open a multibuffer.
|
||||
"enable_preview_multibuffer_from_code_navigation": false,
|
||||
// Whether to open tabs in preview mode when code navigation is used to open a single file.
|
||||
"enable_preview_file_from_code_navigation": true,
|
||||
// Whether to keep tabs in preview mode when code navigation is used to navigate away from them.
|
||||
// If `enable_preview_file_from_code_navigation` or `enable_preview_multibuffer_from_code_navigation` is also true, the new tab may replace the existing one.
|
||||
"enable_keep_preview_on_code_navigation": false
|
||||
},
|
||||
// Settings related to the file finder.
|
||||
"file_finder": {
|
||||
@@ -1209,6 +1218,13 @@
|
||||
"tab_size": 4,
|
||||
// What debuggers are preferred by default for all languages.
|
||||
"debuggers": [],
|
||||
// Whether to enable word diff highlighting in the editor.
|
||||
//
|
||||
// When enabled, changed words within modified lines are highlighted
|
||||
// to show exactly what changed.
|
||||
//
|
||||
// Default: true
|
||||
"word_diff_enabled": true,
|
||||
// Control what info is collected by Zed.
|
||||
"telemetry": {
|
||||
// Send debug info like crash reports.
|
||||
|
||||
@@ -45,6 +45,7 @@
|
||||
"tab.inactive_background": "#1f2127ff",
|
||||
"tab.active_background": "#0d1016ff",
|
||||
"search.match_background": "#5ac2fe66",
|
||||
"search.active_match_background": "#ea570166",
|
||||
"panel.background": "#1f2127ff",
|
||||
"panel.focused_border": "#5ac1feff",
|
||||
"pane.focused_border": null,
|
||||
@@ -436,6 +437,7 @@
|
||||
"tab.inactive_background": "#ececedff",
|
||||
"tab.active_background": "#fcfcfcff",
|
||||
"search.match_background": "#3b9ee566",
|
||||
"search.active_match_background": "#f88b3666",
|
||||
"panel.background": "#ececedff",
|
||||
"panel.focused_border": "#3b9ee5ff",
|
||||
"pane.focused_border": null,
|
||||
@@ -827,6 +829,7 @@
|
||||
"tab.inactive_background": "#353944ff",
|
||||
"tab.active_background": "#242835ff",
|
||||
"search.match_background": "#73cffe66",
|
||||
"search.active_match_background": "#fd722b66",
|
||||
"panel.background": "#353944ff",
|
||||
"panel.focused_border": null,
|
||||
"pane.focused_border": null,
|
||||
|
||||
@@ -46,6 +46,7 @@
|
||||
"tab.inactive_background": "#3a3735ff",
|
||||
"tab.active_background": "#282828ff",
|
||||
"search.match_background": "#83a59866",
|
||||
"search.active_match_background": "#c09f3f66",
|
||||
"panel.background": "#3a3735ff",
|
||||
"panel.focused_border": "#83a598ff",
|
||||
"pane.focused_border": null,
|
||||
@@ -452,6 +453,7 @@
|
||||
"tab.inactive_background": "#393634ff",
|
||||
"tab.active_background": "#1d2021ff",
|
||||
"search.match_background": "#83a59866",
|
||||
"search.active_match_background": "#c9653666",
|
||||
"panel.background": "#393634ff",
|
||||
"panel.focused_border": "#83a598ff",
|
||||
"pane.focused_border": null,
|
||||
@@ -858,6 +860,7 @@
|
||||
"tab.inactive_background": "#3b3735ff",
|
||||
"tab.active_background": "#32302fff",
|
||||
"search.match_background": "#83a59866",
|
||||
"search.active_match_background": "#aea85166",
|
||||
"panel.background": "#3b3735ff",
|
||||
"panel.focused_border": null,
|
||||
"pane.focused_border": null,
|
||||
@@ -1264,6 +1267,7 @@
|
||||
"tab.inactive_background": "#ecddb4ff",
|
||||
"tab.active_background": "#fbf1c7ff",
|
||||
"search.match_background": "#0b667866",
|
||||
"search.active_match_background": "#ba2d1166",
|
||||
"panel.background": "#ecddb4ff",
|
||||
"panel.focused_border": null,
|
||||
"pane.focused_border": null,
|
||||
@@ -1670,6 +1674,7 @@
|
||||
"tab.inactive_background": "#ecddb5ff",
|
||||
"tab.active_background": "#f9f5d7ff",
|
||||
"search.match_background": "#0b667866",
|
||||
"search.active_match_background": "#dc351466",
|
||||
"panel.background": "#ecddb5ff",
|
||||
"panel.focused_border": null,
|
||||
"pane.focused_border": null,
|
||||
@@ -2076,6 +2081,7 @@
|
||||
"tab.inactive_background": "#ecdcb3ff",
|
||||
"tab.active_background": "#f2e5bcff",
|
||||
"search.match_background": "#0b667866",
|
||||
"search.active_match_background": "#d7331466",
|
||||
"panel.background": "#ecdcb3ff",
|
||||
"panel.focused_border": null,
|
||||
"pane.focused_border": null,
|
||||
|
||||
@@ -45,6 +45,7 @@
|
||||
"tab.inactive_background": "#2f343eff",
|
||||
"tab.active_background": "#282c33ff",
|
||||
"search.match_background": "#74ade866",
|
||||
"search.active_match_background": "#e8af7466",
|
||||
"panel.background": "#2f343eff",
|
||||
"panel.focused_border": null,
|
||||
"pane.focused_border": null,
|
||||
@@ -98,6 +99,8 @@
|
||||
"link_text.hover": "#74ade8ff",
|
||||
"version_control.added": "#27a657ff",
|
||||
"version_control.modified": "#d3b020ff",
|
||||
"version_control.word_added": "#2EA04859",
|
||||
"version_control.word_deleted": "#78081BCC",
|
||||
"version_control.deleted": "#e06c76ff",
|
||||
"version_control.conflict_marker.ours": "#a1c1811a",
|
||||
"version_control.conflict_marker.theirs": "#74ade81a",
|
||||
@@ -446,6 +449,7 @@
|
||||
"tab.inactive_background": "#ebebecff",
|
||||
"tab.active_background": "#fafafaff",
|
||||
"search.match_background": "#5c79e266",
|
||||
"search.active_match_background": "#d0a92366",
|
||||
"panel.background": "#ebebecff",
|
||||
"panel.focused_border": null,
|
||||
"pane.focused_border": null,
|
||||
@@ -499,6 +503,8 @@
|
||||
"link_text.hover": "#5c78e2ff",
|
||||
"version_control.added": "#27a657ff",
|
||||
"version_control.modified": "#d3b020ff",
|
||||
"version_control.word_added": "#2EA04859",
|
||||
"version_control.word_deleted": "#F85149CC",
|
||||
"version_control.deleted": "#e06c76ff",
|
||||
"conflict": "#a48819ff",
|
||||
"conflict.background": "#faf2e6ff",
|
||||
|
||||
@@ -201,17 +201,19 @@ impl ToolCall {
|
||||
};
|
||||
let mut content = Vec::with_capacity(tool_call.content.len());
|
||||
for item in tool_call.content {
|
||||
content.push(ToolCallContent::from_acp(
|
||||
if let Some(item) = ToolCallContent::from_acp(
|
||||
item,
|
||||
language_registry.clone(),
|
||||
path_style,
|
||||
terminals,
|
||||
cx,
|
||||
)?);
|
||||
)? {
|
||||
content.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
let result = Self {
|
||||
id: tool_call.id,
|
||||
id: tool_call.tool_call_id,
|
||||
label: cx
|
||||
.new(|cx| Markdown::new(title.into(), Some(language_registry.clone()), None, cx)),
|
||||
kind: tool_call.kind,
|
||||
@@ -241,6 +243,7 @@ impl ToolCall {
|
||||
locations,
|
||||
raw_input,
|
||||
raw_output,
|
||||
..
|
||||
} = fields;
|
||||
|
||||
if let Some(kind) = kind {
|
||||
@@ -262,21 +265,29 @@ impl ToolCall {
|
||||
}
|
||||
|
||||
if let Some(content) = content {
|
||||
let new_content_len = content.len();
|
||||
let mut new_content_len = content.len();
|
||||
let mut content = content.into_iter();
|
||||
|
||||
// Reuse existing content if we can
|
||||
for (old, new) in self.content.iter_mut().zip(content.by_ref()) {
|
||||
old.update_from_acp(new, language_registry.clone(), path_style, terminals, cx)?;
|
||||
let valid_content =
|
||||
old.update_from_acp(new, language_registry.clone(), path_style, terminals, cx)?;
|
||||
if !valid_content {
|
||||
new_content_len -= 1;
|
||||
}
|
||||
}
|
||||
for new in content {
|
||||
self.content.push(ToolCallContent::from_acp(
|
||||
if let Some(new) = ToolCallContent::from_acp(
|
||||
new,
|
||||
language_registry.clone(),
|
||||
path_style,
|
||||
terminals,
|
||||
cx,
|
||||
)?)
|
||||
)? {
|
||||
self.content.push(new);
|
||||
} else {
|
||||
new_content_len -= 1;
|
||||
}
|
||||
}
|
||||
self.content.truncate(new_content_len);
|
||||
}
|
||||
@@ -425,6 +436,7 @@ impl From<acp::ToolCallStatus> for ToolCallStatus {
|
||||
acp::ToolCallStatus::InProgress => Self::InProgress,
|
||||
acp::ToolCallStatus::Completed => Self::Completed,
|
||||
acp::ToolCallStatus::Failed => Self::Failed,
|
||||
_ => Self::Pending,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -537,7 +549,7 @@ impl ContentBlock {
|
||||
..
|
||||
}) => Self::resource_link_md(&uri, path_style),
|
||||
acp::ContentBlock::Image(image) => Self::image_md(&image),
|
||||
acp::ContentBlock::Audio(_) | acp::ContentBlock::Resource(_) => String::new(),
|
||||
_ => String::new(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -591,15 +603,17 @@ impl ToolCallContent {
|
||||
path_style: PathStyle,
|
||||
terminals: &HashMap<acp::TerminalId, Entity<Terminal>>,
|
||||
cx: &mut App,
|
||||
) -> Result<Self> {
|
||||
) -> Result<Option<Self>> {
|
||||
match content {
|
||||
acp::ToolCallContent::Content { content } => Ok(Self::ContentBlock(ContentBlock::new(
|
||||
content,
|
||||
&language_registry,
|
||||
path_style,
|
||||
cx,
|
||||
))),
|
||||
acp::ToolCallContent::Diff { diff } => Ok(Self::Diff(cx.new(|cx| {
|
||||
acp::ToolCallContent::Content(acp::Content { content, .. }) => {
|
||||
Ok(Some(Self::ContentBlock(ContentBlock::new(
|
||||
content,
|
||||
&language_registry,
|
||||
path_style,
|
||||
cx,
|
||||
))))
|
||||
}
|
||||
acp::ToolCallContent::Diff(diff) => Ok(Some(Self::Diff(cx.new(|cx| {
|
||||
Diff::finalized(
|
||||
diff.path.to_string_lossy().into_owned(),
|
||||
diff.old_text,
|
||||
@@ -607,12 +621,13 @@ impl ToolCallContent {
|
||||
language_registry,
|
||||
cx,
|
||||
)
|
||||
}))),
|
||||
acp::ToolCallContent::Terminal { terminal_id } => terminals
|
||||
})))),
|
||||
acp::ToolCallContent::Terminal(acp::Terminal { terminal_id, .. }) => terminals
|
||||
.get(&terminal_id)
|
||||
.cloned()
|
||||
.map(Self::Terminal)
|
||||
.map(|terminal| Some(Self::Terminal(terminal)))
|
||||
.ok_or_else(|| anyhow::anyhow!("Terminal with id `{}` not found", terminal_id)),
|
||||
_ => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -623,9 +638,9 @@ impl ToolCallContent {
|
||||
path_style: PathStyle,
|
||||
terminals: &HashMap<acp::TerminalId, Entity<Terminal>>,
|
||||
cx: &mut App,
|
||||
) -> Result<()> {
|
||||
) -> Result<bool> {
|
||||
let needs_update = match (&self, &new) {
|
||||
(Self::Diff(old_diff), acp::ToolCallContent::Diff { diff: new_diff }) => {
|
||||
(Self::Diff(old_diff), acp::ToolCallContent::Diff(new_diff)) => {
|
||||
old_diff.read(cx).needs_update(
|
||||
new_diff.old_text.as_deref().unwrap_or(""),
|
||||
&new_diff.new_text,
|
||||
@@ -635,10 +650,14 @@ impl ToolCallContent {
|
||||
_ => true,
|
||||
};
|
||||
|
||||
if needs_update {
|
||||
*self = Self::from_acp(new, language_registry, path_style, terminals, cx)?;
|
||||
if let Some(update) = Self::from_acp(new, language_registry, path_style, terminals, cx)? {
|
||||
if needs_update {
|
||||
*self = update;
|
||||
}
|
||||
Ok(true)
|
||||
} else {
|
||||
Ok(false)
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn to_markdown(&self, cx: &App) -> String {
|
||||
@@ -660,7 +679,7 @@ pub enum ToolCallUpdate {
|
||||
impl ToolCallUpdate {
|
||||
fn id(&self) -> &acp::ToolCallId {
|
||||
match self {
|
||||
Self::UpdateFields(update) => &update.id,
|
||||
Self::UpdateFields(update) => &update.tool_call_id,
|
||||
Self::UpdateDiff(diff) => &diff.id,
|
||||
Self::UpdateTerminal(terminal) => &terminal.id,
|
||||
}
|
||||
@@ -732,6 +751,7 @@ impl Plan {
|
||||
acp::PlanEntryStatus::Completed => {
|
||||
stats.completed += 1;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1154,6 +1174,7 @@ impl AcpThread {
|
||||
current_mode_id,
|
||||
..
|
||||
}) => cx.emit(AcpThreadEvent::ModeUpdated(current_mode_id)),
|
||||
_ => {}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
@@ -1287,11 +1308,7 @@ impl AcpThread {
|
||||
label: cx.new(|cx| Markdown::new("Tool call not found".into(), None, None, cx)),
|
||||
kind: acp::ToolKind::Fetch,
|
||||
content: vec![ToolCallContent::ContentBlock(ContentBlock::new(
|
||||
acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Tool call not found".to_string(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
"Tool call not found".into(),
|
||||
&languages,
|
||||
path_style,
|
||||
cx,
|
||||
@@ -1315,7 +1332,7 @@ impl AcpThread {
|
||||
let location_updated = update.fields.locations.is_some();
|
||||
call.update_fields(update.fields, languages, path_style, &self.terminals, cx)?;
|
||||
if location_updated {
|
||||
self.resolve_locations(update.id, cx);
|
||||
self.resolve_locations(update.tool_call_id, cx);
|
||||
}
|
||||
}
|
||||
ToolCallUpdate::UpdateDiff(update) => {
|
||||
@@ -1353,7 +1370,7 @@ impl AcpThread {
|
||||
) -> Result<(), acp::Error> {
|
||||
let language_registry = self.project.read(cx).languages().clone();
|
||||
let path_style = self.project.read(cx).path_style(cx);
|
||||
let id = update.id.clone();
|
||||
let id = update.tool_call_id.clone();
|
||||
|
||||
let agent = self.connection().telemetry_id();
|
||||
let session = self.session_id();
|
||||
@@ -1518,16 +1535,16 @@ impl AcpThread {
|
||||
// some tools would (incorrectly) continue to auto-accept.
|
||||
if let Some(allow_once_option) = options.iter().find_map(|option| {
|
||||
if matches!(option.kind, acp::PermissionOptionKind::AllowOnce) {
|
||||
Some(option.id.clone())
|
||||
Some(option.option_id.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}) {
|
||||
self.upsert_tool_call_inner(tool_call, ToolCallStatus::Pending, cx)?;
|
||||
return Ok(async {
|
||||
acp::RequestPermissionOutcome::Selected {
|
||||
option_id: allow_once_option,
|
||||
}
|
||||
acp::RequestPermissionOutcome::Selected(acp::SelectedPermissionOutcome::new(
|
||||
allow_once_option,
|
||||
))
|
||||
}
|
||||
.boxed());
|
||||
}
|
||||
@@ -1543,7 +1560,9 @@ impl AcpThread {
|
||||
|
||||
let fut = async {
|
||||
match rx.await {
|
||||
Ok(option) => acp::RequestPermissionOutcome::Selected { option_id: option },
|
||||
Ok(option) => acp::RequestPermissionOutcome::Selected(
|
||||
acp::SelectedPermissionOutcome::new(option),
|
||||
),
|
||||
Err(oneshot::Canceled) => acp::RequestPermissionOutcome::Cancelled,
|
||||
}
|
||||
}
|
||||
@@ -1570,6 +1589,7 @@ impl AcpThread {
|
||||
acp::PermissionOptionKind::AllowOnce | acp::PermissionOptionKind::AllowAlways => {
|
||||
ToolCallStatus::InProgress
|
||||
}
|
||||
_ => ToolCallStatus::InProgress,
|
||||
};
|
||||
|
||||
let curr_status = mem::replace(&mut call.status, new_status);
|
||||
@@ -1648,14 +1668,7 @@ impl AcpThread {
|
||||
message: &str,
|
||||
cx: &mut Context<Self>,
|
||||
) -> BoxFuture<'static, Result<()>> {
|
||||
self.send(
|
||||
vec![acp::ContentBlock::Text(acp::TextContent {
|
||||
text: message.to_string(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
})],
|
||||
cx,
|
||||
)
|
||||
self.send(vec![message.into()], cx)
|
||||
}
|
||||
|
||||
pub fn send(
|
||||
@@ -1669,11 +1682,7 @@ impl AcpThread {
|
||||
self.project.read(cx).path_style(cx),
|
||||
cx,
|
||||
);
|
||||
let request = acp::PromptRequest {
|
||||
prompt: message.clone(),
|
||||
session_id: self.session_id.clone(),
|
||||
meta: None,
|
||||
};
|
||||
let request = acp::PromptRequest::new(self.session_id.clone(), message.clone());
|
||||
let git_store = self.project.read(cx).git_store().clone();
|
||||
|
||||
let message_id = if self.connection.truncate(&self.session_id, cx).is_some() {
|
||||
@@ -1765,7 +1774,7 @@ impl AcpThread {
|
||||
result,
|
||||
Ok(Ok(acp::PromptResponse {
|
||||
stop_reason: acp::StopReason::Cancelled,
|
||||
meta: None,
|
||||
..
|
||||
}))
|
||||
);
|
||||
|
||||
@@ -1781,7 +1790,7 @@ impl AcpThread {
|
||||
// Handle refusal - distinguish between user prompt and tool call refusals
|
||||
if let Ok(Ok(acp::PromptResponse {
|
||||
stop_reason: acp::StopReason::Refusal,
|
||||
meta: _,
|
||||
..
|
||||
})) = result
|
||||
{
|
||||
if let Some((user_msg_ix, _)) = this.last_user_message() {
|
||||
@@ -2017,7 +2026,7 @@ impl AcpThread {
|
||||
})?;
|
||||
Ok(project.open_buffer(path, cx))
|
||||
})
|
||||
.map_err(|e| acp::Error::internal_error().with_data(e.to_string()))
|
||||
.map_err(|e| acp::Error::internal_error().data(e.to_string()))
|
||||
.flatten()?;
|
||||
|
||||
let buffer = load.await?;
|
||||
@@ -2050,7 +2059,7 @@ impl AcpThread {
|
||||
let start_position = Point::new(line, 0);
|
||||
|
||||
if start_position > max_point {
|
||||
return Err(acp::Error::invalid_params().with_data(format!(
|
||||
return Err(acp::Error::invalid_params().data(format!(
|
||||
"Attempting to read beyond the end of the file, line {}:{}",
|
||||
max_point.row + 1,
|
||||
max_point.column
|
||||
@@ -2202,7 +2211,7 @@ impl AcpThread {
|
||||
let language_registry = project.read(cx).languages().clone();
|
||||
let is_windows = project.read(cx).path_style(cx).is_windows();
|
||||
|
||||
let terminal_id = acp::TerminalId(Uuid::new_v4().to_string().into());
|
||||
let terminal_id = acp::TerminalId::new(Uuid::new_v4().to_string());
|
||||
let terminal_task = cx.spawn({
|
||||
let terminal_id = terminal_id.clone();
|
||||
async move |_this, cx| {
|
||||
@@ -2412,7 +2421,7 @@ mod tests {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let terminal_id = acp::TerminalId(uuid::Uuid::new_v4().to_string().into());
|
||||
let terminal_id = acp::TerminalId::new(uuid::Uuid::new_v4().to_string());
|
||||
|
||||
// Send Output BEFORE Created - should be buffered by acp_thread
|
||||
thread.update(cx, |thread, cx| {
|
||||
@@ -2474,7 +2483,7 @@ mod tests {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let terminal_id = acp::TerminalId(uuid::Uuid::new_v4().to_string().into());
|
||||
let terminal_id = acp::TerminalId::new(uuid::Uuid::new_v4().to_string());
|
||||
|
||||
// Send Output BEFORE Created
|
||||
thread.update(cx, |thread, cx| {
|
||||
@@ -2492,11 +2501,7 @@ mod tests {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Exit {
|
||||
terminal_id: terminal_id.clone(),
|
||||
status: acp::TerminalExitStatus {
|
||||
exit_code: Some(0),
|
||||
signal: None,
|
||||
meta: None,
|
||||
},
|
||||
status: acp::TerminalExitStatus::new().exit_code(0),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
@@ -2553,15 +2558,7 @@ mod tests {
|
||||
|
||||
// Test creating a new user message
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.push_user_content_block(
|
||||
None,
|
||||
acp::ContentBlock::Text(acp::TextContent {
|
||||
annotations: None,
|
||||
text: "Hello, ".to_string(),
|
||||
meta: None,
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
thread.push_user_content_block(None, "Hello, ".into(), cx);
|
||||
});
|
||||
|
||||
thread.update(cx, |thread, cx| {
|
||||
@@ -2577,15 +2574,7 @@ mod tests {
|
||||
// Test appending to existing user message
|
||||
let message_1_id = UserMessageId::new();
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.push_user_content_block(
|
||||
Some(message_1_id.clone()),
|
||||
acp::ContentBlock::Text(acp::TextContent {
|
||||
annotations: None,
|
||||
text: "world!".to_string(),
|
||||
meta: None,
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
thread.push_user_content_block(Some(message_1_id.clone()), "world!".into(), cx);
|
||||
});
|
||||
|
||||
thread.update(cx, |thread, cx| {
|
||||
@@ -2600,26 +2589,14 @@ mod tests {
|
||||
|
||||
// Test creating new user message after assistant message
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.push_assistant_content_block(
|
||||
acp::ContentBlock::Text(acp::TextContent {
|
||||
annotations: None,
|
||||
text: "Assistant response".to_string(),
|
||||
meta: None,
|
||||
}),
|
||||
false,
|
||||
cx,
|
||||
);
|
||||
thread.push_assistant_content_block("Assistant response".into(), false, cx);
|
||||
});
|
||||
|
||||
let message_2_id = UserMessageId::new();
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.push_user_content_block(
|
||||
Some(message_2_id.clone()),
|
||||
acp::ContentBlock::Text(acp::TextContent {
|
||||
annotations: None,
|
||||
text: "New user message".to_string(),
|
||||
meta: None,
|
||||
}),
|
||||
"New user message".into(),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
@@ -2647,27 +2624,22 @@ mod tests {
|
||||
thread.update(&mut cx, |thread, cx| {
|
||||
thread
|
||||
.handle_session_update(
|
||||
acp::SessionUpdate::AgentThoughtChunk(acp::ContentChunk {
|
||||
content: "Thinking ".into(),
|
||||
meta: None,
|
||||
}),
|
||||
acp::SessionUpdate::AgentThoughtChunk(acp::ContentChunk::new(
|
||||
"Thinking ".into(),
|
||||
)),
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
thread
|
||||
.handle_session_update(
|
||||
acp::SessionUpdate::AgentThoughtChunk(acp::ContentChunk {
|
||||
content: "hard!".into(),
|
||||
meta: None,
|
||||
}),
|
||||
acp::SessionUpdate::AgentThoughtChunk(acp::ContentChunk::new(
|
||||
"hard!".into(),
|
||||
)),
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
})?;
|
||||
Ok(acp::PromptResponse {
|
||||
stop_reason: acp::StopReason::EndTurn,
|
||||
meta: None,
|
||||
})
|
||||
Ok(acp::PromptResponse::new(acp::StopReason::EndTurn))
|
||||
}
|
||||
.boxed_local()
|
||||
},
|
||||
@@ -2735,10 +2707,7 @@ mod tests {
|
||||
.unwrap()
|
||||
.await
|
||||
.unwrap();
|
||||
Ok(acp::PromptResponse {
|
||||
stop_reason: acp::StopReason::EndTurn,
|
||||
meta: None,
|
||||
})
|
||||
Ok(acp::PromptResponse::new(acp::StopReason::EndTurn))
|
||||
}
|
||||
.boxed_local()
|
||||
},
|
||||
@@ -2960,7 +2929,7 @@ mod tests {
|
||||
.await
|
||||
.unwrap_err();
|
||||
|
||||
assert_eq!(err.code, acp::ErrorCode::RESOURCE_NOT_FOUND.code);
|
||||
assert_eq!(err.code, acp::ErrorCode::ResourceNotFound);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -2969,7 +2938,7 @@ mod tests {
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let project = Project::test(fs, [], cx).await;
|
||||
let id = acp::ToolCallId("test".into());
|
||||
let id = acp::ToolCallId::new("test");
|
||||
|
||||
let connection = Rc::new(FakeAgentConnection::new().on_user_message({
|
||||
let id = id.clone();
|
||||
@@ -2979,26 +2948,17 @@ mod tests {
|
||||
thread
|
||||
.update(&mut cx, |thread, cx| {
|
||||
thread.handle_session_update(
|
||||
acp::SessionUpdate::ToolCall(acp::ToolCall {
|
||||
id: id.clone(),
|
||||
title: "Label".into(),
|
||||
kind: acp::ToolKind::Fetch,
|
||||
status: acp::ToolCallStatus::InProgress,
|
||||
content: vec![],
|
||||
locations: vec![],
|
||||
raw_input: None,
|
||||
raw_output: None,
|
||||
meta: None,
|
||||
}),
|
||||
acp::SessionUpdate::ToolCall(
|
||||
acp::ToolCall::new(id.clone(), "Label")
|
||||
.kind(acp::ToolKind::Fetch)
|
||||
.status(acp::ToolCallStatus::InProgress),
|
||||
),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
Ok(acp::PromptResponse {
|
||||
stop_reason: acp::StopReason::EndTurn,
|
||||
meta: None,
|
||||
})
|
||||
Ok(acp::PromptResponse::new(acp::StopReason::EndTurn))
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
@@ -3040,14 +3000,10 @@ mod tests {
|
||||
thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.handle_session_update(
|
||||
acp::SessionUpdate::ToolCallUpdate(acp::ToolCallUpdate {
|
||||
acp::SessionUpdate::ToolCallUpdate(acp::ToolCallUpdate::new(
|
||||
id,
|
||||
fields: acp::ToolCallUpdateFields {
|
||||
status: Some(acp::ToolCallStatus::Completed),
|
||||
..Default::default()
|
||||
},
|
||||
meta: None,
|
||||
}),
|
||||
acp::ToolCallUpdateFields::new().status(acp::ToolCallStatus::Completed),
|
||||
)),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -3079,33 +3035,21 @@ mod tests {
|
||||
thread
|
||||
.update(&mut cx, |thread, cx| {
|
||||
thread.handle_session_update(
|
||||
acp::SessionUpdate::ToolCall(acp::ToolCall {
|
||||
id: acp::ToolCallId("test".into()),
|
||||
title: "Label".into(),
|
||||
kind: acp::ToolKind::Edit,
|
||||
status: acp::ToolCallStatus::Completed,
|
||||
content: vec![acp::ToolCallContent::Diff {
|
||||
diff: acp::Diff {
|
||||
path: "/test/test.txt".into(),
|
||||
old_text: None,
|
||||
new_text: "foo".into(),
|
||||
meta: None,
|
||||
},
|
||||
}],
|
||||
locations: vec![],
|
||||
raw_input: None,
|
||||
raw_output: None,
|
||||
meta: None,
|
||||
}),
|
||||
acp::SessionUpdate::ToolCall(
|
||||
acp::ToolCall::new("test", "Label")
|
||||
.kind(acp::ToolKind::Edit)
|
||||
.status(acp::ToolCallStatus::Completed)
|
||||
.content(vec![acp::ToolCallContent::Diff(acp::Diff::new(
|
||||
"/test/test.txt",
|
||||
"foo",
|
||||
))]),
|
||||
),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
Ok(acp::PromptResponse {
|
||||
stop_reason: acp::StopReason::EndTurn,
|
||||
meta: None,
|
||||
})
|
||||
Ok(acp::PromptResponse::new(acp::StopReason::EndTurn))
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
@@ -3158,18 +3102,14 @@ mod tests {
|
||||
thread.update(&mut cx, |thread, cx| {
|
||||
thread
|
||||
.handle_session_update(
|
||||
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk {
|
||||
content: content.text.to_uppercase().into(),
|
||||
meta: None,
|
||||
}),
|
||||
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new(
|
||||
content.text.to_uppercase().into(),
|
||||
)),
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
})?;
|
||||
Ok(acp::PromptResponse {
|
||||
stop_reason: acp::StopReason::EndTurn,
|
||||
meta: None,
|
||||
})
|
||||
Ok(acp::PromptResponse::new(acp::StopReason::EndTurn))
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
@@ -3325,34 +3265,22 @@ mod tests {
|
||||
thread.update(&mut cx, |thread, cx| {
|
||||
thread
|
||||
.handle_session_update(
|
||||
acp::SessionUpdate::ToolCall(acp::ToolCall {
|
||||
id: acp::ToolCallId("tool1".into()),
|
||||
title: "Test Tool".into(),
|
||||
kind: acp::ToolKind::Fetch,
|
||||
status: acp::ToolCallStatus::Completed,
|
||||
content: vec![],
|
||||
locations: vec![],
|
||||
raw_input: Some(serde_json::json!({"query": "test"})),
|
||||
raw_output: Some(
|
||||
serde_json::json!({"result": "inappropriate content"}),
|
||||
),
|
||||
meta: None,
|
||||
}),
|
||||
acp::SessionUpdate::ToolCall(
|
||||
acp::ToolCall::new("tool1", "Test Tool")
|
||||
.kind(acp::ToolKind::Fetch)
|
||||
.status(acp::ToolCallStatus::Completed)
|
||||
.raw_input(serde_json::json!({"query": "test"}))
|
||||
.raw_output(serde_json::json!({"result": "inappropriate content"})),
|
||||
),
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
})?;
|
||||
|
||||
// Now return refusal because of the tool result
|
||||
Ok(acp::PromptResponse {
|
||||
stop_reason: acp::StopReason::Refusal,
|
||||
meta: None,
|
||||
})
|
||||
Ok(acp::PromptResponse::new(acp::StopReason::Refusal))
|
||||
} else {
|
||||
Ok(acp::PromptResponse {
|
||||
stop_reason: acp::StopReason::EndTurn,
|
||||
meta: None,
|
||||
})
|
||||
Ok(acp::PromptResponse::new(acp::StopReason::EndTurn))
|
||||
}
|
||||
}
|
||||
.boxed_local()
|
||||
@@ -3380,16 +3308,7 @@ mod tests {
|
||||
});
|
||||
|
||||
// Send a user message - this will trigger tool call and then refusal
|
||||
let send_task = thread.update(cx, |thread, cx| {
|
||||
thread.send(
|
||||
vec![acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Hello".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
})],
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let send_task = thread.update(cx, |thread, cx| thread.send(vec!["Hello".into()], cx));
|
||||
cx.background_executor.spawn(send_task).detach();
|
||||
cx.run_until_parked();
|
||||
|
||||
@@ -3435,21 +3354,11 @@ mod tests {
|
||||
let refuse_next = refuse_next.clone();
|
||||
move |_request, _thread, _cx| {
|
||||
if refuse_next.load(SeqCst) {
|
||||
async move {
|
||||
Ok(acp::PromptResponse {
|
||||
stop_reason: acp::StopReason::Refusal,
|
||||
meta: None,
|
||||
})
|
||||
}
|
||||
.boxed_local()
|
||||
async move { Ok(acp::PromptResponse::new(acp::StopReason::Refusal)) }
|
||||
.boxed_local()
|
||||
} else {
|
||||
async move {
|
||||
Ok(acp::PromptResponse {
|
||||
stop_reason: acp::StopReason::EndTurn,
|
||||
meta: None,
|
||||
})
|
||||
}
|
||||
.boxed_local()
|
||||
async move { Ok(acp::PromptResponse::new(acp::StopReason::EndTurn)) }
|
||||
.boxed_local()
|
||||
}
|
||||
}
|
||||
}));
|
||||
@@ -3506,10 +3415,7 @@ mod tests {
|
||||
let refuse_next = refuse_next.clone();
|
||||
async move {
|
||||
if refuse_next.load(SeqCst) {
|
||||
return Ok(acp::PromptResponse {
|
||||
stop_reason: acp::StopReason::Refusal,
|
||||
meta: None,
|
||||
});
|
||||
return Ok(acp::PromptResponse::new(acp::StopReason::Refusal));
|
||||
}
|
||||
|
||||
let acp::ContentBlock::Text(content) = &request.prompt[0] else {
|
||||
@@ -3518,18 +3424,14 @@ mod tests {
|
||||
thread.update(&mut cx, |thread, cx| {
|
||||
thread
|
||||
.handle_session_update(
|
||||
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk {
|
||||
content: content.text.to_uppercase().into(),
|
||||
meta: None,
|
||||
}),
|
||||
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new(
|
||||
content.text.to_uppercase().into(),
|
||||
)),
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
})?;
|
||||
Ok(acp::PromptResponse {
|
||||
stop_reason: acp::StopReason::EndTurn,
|
||||
meta: None,
|
||||
})
|
||||
Ok(acp::PromptResponse::new(acp::StopReason::EndTurn))
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
@@ -3668,13 +3570,12 @@ mod tests {
|
||||
_cwd: &Path,
|
||||
cx: &mut App,
|
||||
) -> Task<gpui::Result<Entity<AcpThread>>> {
|
||||
let session_id = acp::SessionId(
|
||||
let session_id = acp::SessionId::new(
|
||||
rand::rng()
|
||||
.sample_iter(&distr::Alphanumeric)
|
||||
.take(7)
|
||||
.map(char::from)
|
||||
.collect::<String>()
|
||||
.into(),
|
||||
.collect::<String>(),
|
||||
);
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let thread = cx.new(|cx| {
|
||||
@@ -3684,12 +3585,12 @@ mod tests {
|
||||
project,
|
||||
action_log,
|
||||
session_id.clone(),
|
||||
watch::Receiver::constant(acp::PromptCapabilities {
|
||||
image: true,
|
||||
audio: true,
|
||||
embedded_context: true,
|
||||
meta: None,
|
||||
}),
|
||||
watch::Receiver::constant(
|
||||
acp::PromptCapabilities::new()
|
||||
.image(true)
|
||||
.audio(true)
|
||||
.embedded_context(true),
|
||||
),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -3718,10 +3619,7 @@ mod tests {
|
||||
let thread = thread.clone();
|
||||
cx.spawn(async move |cx| handler(params, thread, cx.clone()).await)
|
||||
} else {
|
||||
Task::ready(Ok(acp::PromptResponse {
|
||||
stop_reason: acp::StopReason::EndTurn,
|
||||
meta: None,
|
||||
}))
|
||||
Task::ready(Ok(acp::PromptResponse::new(acp::StopReason::EndTurn)))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3776,17 +3674,13 @@ mod tests {
|
||||
.unwrap();
|
||||
|
||||
// Try to update a tool call that doesn't exist
|
||||
let nonexistent_id = acp::ToolCallId("nonexistent-tool-call".into());
|
||||
let nonexistent_id = acp::ToolCallId::new("nonexistent-tool-call");
|
||||
thread.update(cx, |thread, cx| {
|
||||
let result = thread.handle_session_update(
|
||||
acp::SessionUpdate::ToolCallUpdate(acp::ToolCallUpdate {
|
||||
id: nonexistent_id.clone(),
|
||||
fields: acp::ToolCallUpdateFields {
|
||||
status: Some(acp::ToolCallStatus::Completed),
|
||||
..Default::default()
|
||||
},
|
||||
meta: None,
|
||||
}),
|
||||
acp::SessionUpdate::ToolCallUpdate(acp::ToolCallUpdate::new(
|
||||
nonexistent_id.clone(),
|
||||
acp::ToolCallUpdateFields::new().status(acp::ToolCallStatus::Completed),
|
||||
)),
|
||||
cx,
|
||||
);
|
||||
|
||||
@@ -3861,7 +3755,7 @@ mod tests {
|
||||
.unwrap();
|
||||
|
||||
// Create 2 terminals BEFORE the checkpoint that have completed running
|
||||
let terminal_id_1 = acp::TerminalId(uuid::Uuid::new_v4().to_string().into());
|
||||
let terminal_id_1 = acp::TerminalId::new(uuid::Uuid::new_v4().to_string());
|
||||
let mock_terminal_1 = cx.new(|cx| {
|
||||
let builder = ::terminal::TerminalBuilder::new_display_only(
|
||||
::terminal::terminal_settings::CursorShape::default(),
|
||||
@@ -3900,17 +3794,13 @@ mod tests {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Exit {
|
||||
terminal_id: terminal_id_1.clone(),
|
||||
status: acp::TerminalExitStatus {
|
||||
exit_code: Some(0),
|
||||
signal: None,
|
||||
meta: None,
|
||||
},
|
||||
status: acp::TerminalExitStatus::new().exit_code(0),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
let terminal_id_2 = acp::TerminalId(uuid::Uuid::new_v4().to_string().into());
|
||||
let terminal_id_2 = acp::TerminalId::new(uuid::Uuid::new_v4().to_string());
|
||||
let mock_terminal_2 = cx.new(|cx| {
|
||||
let builder = ::terminal::TerminalBuilder::new_display_only(
|
||||
::terminal::terminal_settings::CursorShape::default(),
|
||||
@@ -3949,11 +3839,7 @@ mod tests {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Exit {
|
||||
terminal_id: terminal_id_2.clone(),
|
||||
status: acp::TerminalExitStatus {
|
||||
exit_code: Some(0),
|
||||
signal: None,
|
||||
meta: None,
|
||||
},
|
||||
status: acp::TerminalExitStatus::new().exit_code(0),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
@@ -3973,7 +3859,7 @@ mod tests {
|
||||
|
||||
// Create a terminal AFTER the checkpoint we'll restore to.
|
||||
// This simulates the AI agent starting a long-running terminal command.
|
||||
let terminal_id = acp::TerminalId(uuid::Uuid::new_v4().to_string().into());
|
||||
let terminal_id = acp::TerminalId::new(uuid::Uuid::new_v4().to_string());
|
||||
let mock_terminal = cx.new(|cx| {
|
||||
let builder = ::terminal::TerminalBuilder::new_display_only(
|
||||
::terminal::terminal_settings::CursorShape::default(),
|
||||
@@ -4015,21 +3901,15 @@ mod tests {
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread
|
||||
.handle_session_update(
|
||||
acp::SessionUpdate::ToolCall(acp::ToolCall {
|
||||
id: acp::ToolCallId("terminal-tool-1".into()),
|
||||
title: "Running command".into(),
|
||||
kind: acp::ToolKind::Execute,
|
||||
status: acp::ToolCallStatus::InProgress,
|
||||
content: vec![acp::ToolCallContent::Terminal {
|
||||
terminal_id: terminal_id.clone(),
|
||||
}],
|
||||
locations: vec![],
|
||||
raw_input: Some(
|
||||
serde_json::json!({"command": "sleep 1000", "cd": "/test"}),
|
||||
),
|
||||
raw_output: None,
|
||||
meta: None,
|
||||
}),
|
||||
acp::SessionUpdate::ToolCall(
|
||||
acp::ToolCall::new("terminal-tool-1", "Running command")
|
||||
.kind(acp::ToolKind::Execute)
|
||||
.status(acp::ToolCallStatus::InProgress)
|
||||
.content(vec![acp::ToolCallContent::Terminal(acp::Terminal::new(
|
||||
terminal_id.clone(),
|
||||
))])
|
||||
.raw_input(serde_json::json!({"command": "sleep 1000", "cd": "/test"})),
|
||||
),
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
@@ -336,7 +336,7 @@ mod test_support {
|
||||
_cwd: &Path,
|
||||
cx: &mut gpui::App,
|
||||
) -> Task<gpui::Result<Entity<AcpThread>>> {
|
||||
let session_id = acp::SessionId(self.sessions.lock().len().to_string().into());
|
||||
let session_id = acp::SessionId::new(self.sessions.lock().len().to_string());
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let thread = cx.new(|cx| {
|
||||
AcpThread::new(
|
||||
@@ -345,12 +345,12 @@ mod test_support {
|
||||
project,
|
||||
action_log,
|
||||
session_id.clone(),
|
||||
watch::Receiver::constant(acp::PromptCapabilities {
|
||||
image: true,
|
||||
audio: true,
|
||||
embedded_context: true,
|
||||
meta: None,
|
||||
}),
|
||||
watch::Receiver::constant(
|
||||
acp::PromptCapabilities::new()
|
||||
.image(true)
|
||||
.audio(true)
|
||||
.embedded_context(true),
|
||||
),
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -389,10 +389,7 @@ mod test_support {
|
||||
response_tx.replace(tx);
|
||||
cx.spawn(async move |_| {
|
||||
let stop_reason = rx.await?;
|
||||
Ok(acp::PromptResponse {
|
||||
stop_reason,
|
||||
meta: None,
|
||||
})
|
||||
Ok(acp::PromptResponse::new(stop_reason))
|
||||
})
|
||||
} else {
|
||||
for update in self.next_prompt_updates.lock().drain(..) {
|
||||
@@ -400,7 +397,7 @@ mod test_support {
|
||||
let update = update.clone();
|
||||
let permission_request = if let acp::SessionUpdate::ToolCall(tool_call) =
|
||||
&update
|
||||
&& let Some(options) = self.permission_requests.get(&tool_call.id)
|
||||
&& let Some(options) = self.permission_requests.get(&tool_call.tool_call_id)
|
||||
{
|
||||
Some((tool_call.clone(), options.clone()))
|
||||
} else {
|
||||
@@ -429,10 +426,7 @@ mod test_support {
|
||||
|
||||
cx.spawn(async move |_| {
|
||||
try_join_all(tasks).await?;
|
||||
Ok(acp::PromptResponse {
|
||||
stop_reason: acp::StopReason::EndTurn,
|
||||
meta: None,
|
||||
})
|
||||
Ok(acp::PromptResponse::new(acp::StopReason::EndTurn))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -108,7 +108,7 @@ impl MentionUri {
|
||||
if let Some(thread_id) = path.strip_prefix("/agent/thread/") {
|
||||
let name = single_query_param(&url, "name")?.context("Missing thread name")?;
|
||||
Ok(Self::Thread {
|
||||
id: acp::SessionId(thread_id.into()),
|
||||
id: acp::SessionId::new(thread_id),
|
||||
name,
|
||||
})
|
||||
} else if let Some(path) = path.strip_prefix("/agent/text-thread/") {
|
||||
|
||||
@@ -75,11 +75,9 @@ impl Terminal {
|
||||
|
||||
let exit_status = exit_status.map(portable_pty::ExitStatus::from);
|
||||
|
||||
acp::TerminalExitStatus {
|
||||
exit_code: exit_status.as_ref().map(|e| e.exit_code()),
|
||||
signal: exit_status.and_then(|e| e.signal().map(Into::into)),
|
||||
meta: None,
|
||||
}
|
||||
acp::TerminalExitStatus::new()
|
||||
.exit_code(exit_status.as_ref().map(|e| e.exit_code()))
|
||||
.signal(exit_status.and_then(|e| e.signal().map(ToOwned::to_owned)))
|
||||
})
|
||||
.shared(),
|
||||
}
|
||||
@@ -103,25 +101,19 @@ impl Terminal {
|
||||
if let Some(output) = self.output.as_ref() {
|
||||
let exit_status = output.exit_status.map(portable_pty::ExitStatus::from);
|
||||
|
||||
acp::TerminalOutputResponse {
|
||||
output: output.content.clone(),
|
||||
truncated: output.original_content_len > output.content.len(),
|
||||
exit_status: Some(acp::TerminalExitStatus {
|
||||
exit_code: exit_status.as_ref().map(|e| e.exit_code()),
|
||||
signal: exit_status.and_then(|e| e.signal().map(Into::into)),
|
||||
meta: None,
|
||||
}),
|
||||
meta: None,
|
||||
}
|
||||
acp::TerminalOutputResponse::new(
|
||||
output.content.clone(),
|
||||
output.original_content_len > output.content.len(),
|
||||
)
|
||||
.exit_status(
|
||||
acp::TerminalExitStatus::new()
|
||||
.exit_code(exit_status.as_ref().map(|e| e.exit_code()))
|
||||
.signal(exit_status.and_then(|e| e.signal().map(ToOwned::to_owned))),
|
||||
)
|
||||
} else {
|
||||
let (current_content, original_len) = self.truncated_output(cx);
|
||||
|
||||
acp::TerminalOutputResponse {
|
||||
truncated: current_content.len() < original_len,
|
||||
output: current_content,
|
||||
exit_status: None,
|
||||
meta: None,
|
||||
}
|
||||
let truncated = current_content.len() < original_len;
|
||||
acp::TerminalOutputResponse::new(current_content, truncated)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -83,6 +83,7 @@ ctor.workspace = true
|
||||
db = { workspace = true, "features" = ["test-support"] }
|
||||
editor = { workspace = true, "features" = ["test-support"] }
|
||||
env_logger.workspace = true
|
||||
eval_utils.workspace = true
|
||||
fs = { workspace = true, "features" = ["test-support"] }
|
||||
git = { workspace = true, "features" = ["test-support"] }
|
||||
gpui = { workspace = true, "features" = ["test-support"] }
|
||||
|
||||
@@ -170,7 +170,7 @@ impl LanguageModels {
|
||||
}
|
||||
|
||||
fn model_id(model: &Arc<dyn LanguageModel>) -> acp::ModelId {
|
||||
acp::ModelId(format!("{}/{}", model.provider_id().0, model.id().0).into())
|
||||
acp::ModelId::new(format!("{}/{}", model.provider_id().0, model.id().0))
|
||||
}
|
||||
|
||||
fn authenticate_all_language_model_providers(cx: &mut App) -> Task<()> {
|
||||
@@ -789,28 +789,12 @@ impl NativeAgentConnection {
|
||||
}
|
||||
ThreadEvent::AgentText(text) => {
|
||||
acp_thread.update(cx, |thread, cx| {
|
||||
thread.push_assistant_content_block(
|
||||
acp::ContentBlock::Text(acp::TextContent {
|
||||
text,
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
thread.push_assistant_content_block(text.into(), false, cx)
|
||||
})?;
|
||||
}
|
||||
ThreadEvent::AgentThinking(text) => {
|
||||
acp_thread.update(cx, |thread, cx| {
|
||||
thread.push_assistant_content_block(
|
||||
acp::ContentBlock::Text(acp::TextContent {
|
||||
text,
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
true,
|
||||
cx,
|
||||
)
|
||||
thread.push_assistant_content_block(text.into(), true, cx)
|
||||
})?;
|
||||
}
|
||||
ThreadEvent::ToolCallAuthorization(ToolCallAuthorization {
|
||||
@@ -824,8 +808,9 @@ impl NativeAgentConnection {
|
||||
)
|
||||
})??;
|
||||
cx.background_spawn(async move {
|
||||
if let acp::RequestPermissionOutcome::Selected { option_id } =
|
||||
outcome_task.await
|
||||
if let acp::RequestPermissionOutcome::Selected(
|
||||
acp::SelectedPermissionOutcome { option_id, .. },
|
||||
) = outcome_task.await
|
||||
{
|
||||
response
|
||||
.send(option_id)
|
||||
@@ -852,10 +837,7 @@ impl NativeAgentConnection {
|
||||
}
|
||||
ThreadEvent::Stop(stop_reason) => {
|
||||
log::debug!("Assistant message complete: {:?}", stop_reason);
|
||||
return Ok(acp::PromptResponse {
|
||||
stop_reason,
|
||||
meta: None,
|
||||
});
|
||||
return Ok(acp::PromptResponse::new(stop_reason));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -867,10 +849,7 @@ impl NativeAgentConnection {
|
||||
}
|
||||
|
||||
log::debug!("Response stream completed");
|
||||
anyhow::Ok(acp::PromptResponse {
|
||||
stop_reason: acp::StopReason::EndTurn,
|
||||
meta: None,
|
||||
})
|
||||
anyhow::Ok(acp::PromptResponse::new(acp::StopReason::EndTurn))
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1374,7 +1353,7 @@ mod internal_tests {
|
||||
IndexMap::from_iter([(
|
||||
AgentModelGroupName("Fake".into()),
|
||||
vec![AgentModelInfo {
|
||||
id: acp::ModelId("fake/fake".into()),
|
||||
id: acp::ModelId::new("fake/fake"),
|
||||
name: "Fake".into(),
|
||||
description: None,
|
||||
icon: Some(ui::IconName::ZedAssistant),
|
||||
@@ -1435,7 +1414,7 @@ mod internal_tests {
|
||||
|
||||
// Select a model
|
||||
let selector = connection.model_selector(&session_id).unwrap();
|
||||
let model_id = acp::ModelId("fake/fake".into());
|
||||
let model_id = acp::ModelId::new("fake/fake");
|
||||
cx.update(|cx| selector.select_model(model_id.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -1521,20 +1500,14 @@ mod internal_tests {
|
||||
thread.send(
|
||||
vec![
|
||||
"What does ".into(),
|
||||
acp::ContentBlock::ResourceLink(acp::ResourceLink {
|
||||
name: "b.md".into(),
|
||||
uri: MentionUri::File {
|
||||
acp::ContentBlock::ResourceLink(acp::ResourceLink::new(
|
||||
"b.md",
|
||||
MentionUri::File {
|
||||
abs_path: path!("/a/b.md").into(),
|
||||
}
|
||||
.to_uri()
|
||||
.to_string(),
|
||||
annotations: None,
|
||||
description: None,
|
||||
mime_type: None,
|
||||
size: None,
|
||||
title: None,
|
||||
meta: None,
|
||||
}),
|
||||
)),
|
||||
" mean?".into(),
|
||||
],
|
||||
cx,
|
||||
|
||||
@@ -366,7 +366,7 @@ impl ThreadsDatabase {
|
||||
|
||||
for (id, summary, updated_at) in rows {
|
||||
threads.push(DbThreadMetadata {
|
||||
id: acp::SessionId(id),
|
||||
id: acp::SessionId::new(id),
|
||||
title: summary.into(),
|
||||
updated_at: DateTime::parse_from_rfc3339(&updated_at)?.with_timezone(&Utc),
|
||||
});
|
||||
|
||||
@@ -4,7 +4,7 @@ use crate::{
|
||||
};
|
||||
use Role::*;
|
||||
use client::{Client, UserStore};
|
||||
use collections::HashMap;
|
||||
use eval_utils::{EvalOutput, EvalOutputProcessor, OutcomeKind};
|
||||
use fs::FakeFs;
|
||||
use futures::{FutureExt, future::LocalBoxFuture};
|
||||
use gpui::{AppContext, TestAppContext, Timer};
|
||||
@@ -20,16 +20,62 @@ use rand::prelude::*;
|
||||
use reqwest_client::ReqwestClient;
|
||||
use serde_json::json;
|
||||
use std::{
|
||||
cmp::Reverse,
|
||||
fmt::{self, Display},
|
||||
io::Write as _,
|
||||
path::Path,
|
||||
str::FromStr,
|
||||
sync::mpsc,
|
||||
time::Duration,
|
||||
};
|
||||
use util::path;
|
||||
|
||||
#[derive(Default, Clone, Debug)]
|
||||
struct EditAgentOutputProcessor {
|
||||
mismatched_tag_threshold: f32,
|
||||
cumulative_tags: usize,
|
||||
cumulative_mismatched_tags: usize,
|
||||
eval_outputs: Vec<EvalOutput<EditEvalMetadata>>,
|
||||
}
|
||||
|
||||
fn mismatched_tag_threshold(mismatched_tag_threshold: f32) -> EditAgentOutputProcessor {
|
||||
EditAgentOutputProcessor {
|
||||
mismatched_tag_threshold,
|
||||
cumulative_tags: 0,
|
||||
cumulative_mismatched_tags: 0,
|
||||
eval_outputs: Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct EditEvalMetadata {
|
||||
tags: usize,
|
||||
mismatched_tags: usize,
|
||||
}
|
||||
|
||||
impl EvalOutputProcessor for EditAgentOutputProcessor {
|
||||
type Metadata = EditEvalMetadata;
|
||||
|
||||
fn process(&mut self, output: &EvalOutput<Self::Metadata>) {
|
||||
if matches!(output.outcome, OutcomeKind::Passed | OutcomeKind::Failed) {
|
||||
self.cumulative_mismatched_tags += output.metadata.mismatched_tags;
|
||||
self.cumulative_tags += output.metadata.tags;
|
||||
self.eval_outputs.push(output.clone());
|
||||
}
|
||||
}
|
||||
|
||||
fn assert(&mut self) {
|
||||
let mismatched_tag_ratio =
|
||||
self.cumulative_mismatched_tags as f32 / self.cumulative_tags as f32;
|
||||
if mismatched_tag_ratio > self.mismatched_tag_threshold {
|
||||
for eval_output in &self.eval_outputs {
|
||||
println!("{}", eval_output.data);
|
||||
}
|
||||
panic!(
|
||||
"Too many mismatched tags: {:?}",
|
||||
self.cumulative_mismatched_tags
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "unit-eval"), ignore)]
|
||||
fn eval_extract_handle_command_output() {
|
||||
@@ -55,22 +101,19 @@ fn eval_extract_handle_command_output() {
|
||||
include_str!("evals/fixtures/extract_handle_command_output/possible-07.diff"),
|
||||
];
|
||||
let edit_description = "Extract `handle_command_output` method from `run_git_blame`.";
|
||||
eval(
|
||||
100,
|
||||
0.95,
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
eval_utils::eval(100, 0.95, mismatched_tag_threshold(0.05), move || {
|
||||
run_eval(EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
User,
|
||||
[text(formatdoc! {"
|
||||
Read the `{input_file_path}` file and extract a method in
|
||||
the final stanza of `run_git_blame` to deal with command failures,
|
||||
call it `handle_command_output` and take the std::process::Output as the only parameter.
|
||||
Do not document the method and do not add any comments.
|
||||
Read the `{input_file_path}` file and extract a method in
|
||||
the final stanza of `run_git_blame` to deal with command failures,
|
||||
call it `handle_command_output` and take the std::process::Output as the only parameter.
|
||||
Do not document the method and do not add any comments.
|
||||
|
||||
Add it right next to `run_git_blame` and copy it verbatim from `run_git_blame`.
|
||||
"})],
|
||||
Add it right next to `run_git_blame` and copy it verbatim from `run_git_blame`.
|
||||
"})],
|
||||
),
|
||||
message(
|
||||
Assistant,
|
||||
@@ -102,9 +145,9 @@ fn eval_extract_handle_command_output() {
|
||||
),
|
||||
],
|
||||
Some(input_file_content.into()),
|
||||
EvalAssertion::assert_diff_any(possible_diffs),
|
||||
),
|
||||
);
|
||||
EvalAssertion::assert_diff_any(possible_diffs.clone()),
|
||||
))
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -122,18 +165,16 @@ fn eval_delete_run_git_blame() {
|
||||
let input_file_content = include_str!("evals/fixtures/delete_run_git_blame/before.rs");
|
||||
let output_file_content = include_str!("evals/fixtures/delete_run_git_blame/after.rs");
|
||||
let edit_description = "Delete the `run_git_blame` function.";
|
||||
eval(
|
||||
100,
|
||||
0.95,
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
|
||||
eval_utils::eval(100, 0.95, mismatched_tag_threshold(0.05), move || {
|
||||
run_eval(EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
User,
|
||||
[text(formatdoc! {"
|
||||
Read the `{input_file_path}` file and delete `run_git_blame`. Just that
|
||||
one function, not its usages.
|
||||
"})],
|
||||
Read the `{input_file_path}` file and delete `run_git_blame`. Just that
|
||||
one function, not its usages.
|
||||
"})],
|
||||
),
|
||||
message(
|
||||
Assistant,
|
||||
@@ -166,8 +207,8 @@ fn eval_delete_run_git_blame() {
|
||||
],
|
||||
Some(input_file_content.into()),
|
||||
EvalAssertion::assert_eq(output_file_content),
|
||||
),
|
||||
);
|
||||
))
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -185,18 +226,16 @@ fn eval_translate_doc_comments() {
|
||||
let input_file_path = "root/canvas.rs";
|
||||
let input_file_content = include_str!("evals/fixtures/translate_doc_comments/before.rs");
|
||||
let edit_description = "Translate all doc comments to Italian";
|
||||
eval(
|
||||
200,
|
||||
1.,
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
|
||||
eval_utils::eval(200, 1., mismatched_tag_threshold(0.05), move || {
|
||||
run_eval(EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
User,
|
||||
[text(formatdoc! {"
|
||||
Read the {input_file_path} file and edit it (without overwriting it),
|
||||
translating all the doc comments to italian.
|
||||
"})],
|
||||
Read the {input_file_path} file and edit it (without overwriting it),
|
||||
translating all the doc comments to italian.
|
||||
"})],
|
||||
),
|
||||
message(
|
||||
Assistant,
|
||||
@@ -229,8 +268,8 @@ fn eval_translate_doc_comments() {
|
||||
],
|
||||
Some(input_file_content.into()),
|
||||
EvalAssertion::judge_diff("Doc comments were translated to Italian"),
|
||||
),
|
||||
);
|
||||
))
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -249,33 +288,31 @@ fn eval_use_wasi_sdk_in_compile_parser_to_wasm() {
|
||||
let input_file_content =
|
||||
include_str!("evals/fixtures/use_wasi_sdk_in_compile_parser_to_wasm/before.rs");
|
||||
let edit_description = "Update compile_parser_to_wasm to use wasi-sdk instead of emscripten";
|
||||
eval(
|
||||
100,
|
||||
0.95,
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
|
||||
eval_utils::eval(100, 0.95, mismatched_tag_threshold(0.05), move || {
|
||||
run_eval(EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
User,
|
||||
[text(formatdoc! {"
|
||||
Read the `{input_file_path}` file and change `compile_parser_to_wasm` to use `wasi-sdk` instead of emscripten.
|
||||
Use `ureq` to download the SDK for the current platform and architecture.
|
||||
Extract the archive into a sibling of `lib` inside the `tree-sitter` directory in the cache_dir.
|
||||
Compile the parser to wasm using the `bin/clang` executable (or `bin/clang.exe` on windows)
|
||||
that's inside of the archive.
|
||||
Don't re-download the SDK if that executable already exists.
|
||||
Read the `{input_file_path}` file and change `compile_parser_to_wasm` to use `wasi-sdk` instead of emscripten.
|
||||
Use `ureq` to download the SDK for the current platform and architecture.
|
||||
Extract the archive into a sibling of `lib` inside the `tree-sitter` directory in the cache_dir.
|
||||
Compile the parser to wasm using the `bin/clang` executable (or `bin/clang.exe` on windows)
|
||||
that's inside of the archive.
|
||||
Don't re-download the SDK if that executable already exists.
|
||||
|
||||
Use these clang flags: -fPIC -shared -Os -Wl,--export=tree_sitter_{{language_name}}
|
||||
Use these clang flags: -fPIC -shared -Os -Wl,--export=tree_sitter_{{language_name}}
|
||||
|
||||
Here are the available wasi-sdk assets:
|
||||
- wasi-sdk-25.0-x86_64-macos.tar.gz
|
||||
- wasi-sdk-25.0-arm64-macos.tar.gz
|
||||
- wasi-sdk-25.0-x86_64-linux.tar.gz
|
||||
- wasi-sdk-25.0-arm64-linux.tar.gz
|
||||
- wasi-sdk-25.0-x86_64-linux.tar.gz
|
||||
- wasi-sdk-25.0-arm64-linux.tar.gz
|
||||
- wasi-sdk-25.0-x86_64-windows.tar.gz
|
||||
"})],
|
||||
Here are the available wasi-sdk assets:
|
||||
- wasi-sdk-25.0-x86_64-macos.tar.gz
|
||||
- wasi-sdk-25.0-arm64-macos.tar.gz
|
||||
- wasi-sdk-25.0-x86_64-linux.tar.gz
|
||||
- wasi-sdk-25.0-arm64-linux.tar.gz
|
||||
- wasi-sdk-25.0-x86_64-linux.tar.gz
|
||||
- wasi-sdk-25.0-arm64-linux.tar.gz
|
||||
- wasi-sdk-25.0-x86_64-windows.tar.gz
|
||||
"})],
|
||||
),
|
||||
message(
|
||||
Assistant,
|
||||
@@ -352,11 +389,11 @@ fn eval_use_wasi_sdk_in_compile_parser_to_wasm() {
|
||||
],
|
||||
Some(input_file_content.into()),
|
||||
EvalAssertion::judge_diff(indoc! {"
|
||||
- The compile_parser_to_wasm method has been changed to use wasi-sdk
|
||||
- ureq is used to download the SDK for current platform and architecture
|
||||
"}),
|
||||
),
|
||||
);
|
||||
- The compile_parser_to_wasm method has been changed to use wasi-sdk
|
||||
- ureq is used to download the SDK for current platform and architecture
|
||||
"}),
|
||||
))
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -380,11 +417,8 @@ fn eval_disable_cursor_blinking() {
|
||||
include_str!("evals/fixtures/disable_cursor_blinking/possible-03.diff"),
|
||||
include_str!("evals/fixtures/disable_cursor_blinking/possible-04.diff"),
|
||||
];
|
||||
eval(
|
||||
100,
|
||||
0.51,
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
eval_utils::eval(100, 0.51, mismatched_tag_threshold(0.05), move || {
|
||||
run_eval(EvalInput::from_conversation(
|
||||
vec![
|
||||
message(User, [text("Let's research how to cursor blinking works.")]),
|
||||
message(
|
||||
@@ -421,10 +455,10 @@ fn eval_disable_cursor_blinking() {
|
||||
message(
|
||||
User,
|
||||
[text(indoc! {"
|
||||
Comment out the lines that interact with the BlinkManager.
|
||||
Keep the outer `update` blocks, but comments everything that's inside (including if statements).
|
||||
Don't add additional comments.
|
||||
"})],
|
||||
Comment out the lines that interact with the BlinkManager.
|
||||
Keep the outer `update` blocks, but comments everything that's inside (including if statements).
|
||||
Don't add additional comments.
|
||||
"})],
|
||||
),
|
||||
message(
|
||||
Assistant,
|
||||
@@ -440,9 +474,9 @@ fn eval_disable_cursor_blinking() {
|
||||
),
|
||||
],
|
||||
Some(input_file_content.into()),
|
||||
EvalAssertion::assert_diff_any(possible_diffs),
|
||||
),
|
||||
);
|
||||
EvalAssertion::assert_diff_any(possible_diffs.clone()),
|
||||
))
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -467,20 +501,16 @@ fn eval_from_pixels_constructor() {
|
||||
let input_file_path = "root/canvas.rs";
|
||||
let input_file_content = include_str!("evals/fixtures/from_pixels_constructor/before.rs");
|
||||
let edit_description = "Implement from_pixels constructor and add tests.";
|
||||
eval(
|
||||
100,
|
||||
0.95,
|
||||
// For whatever reason, this eval produces more mismatched tags.
|
||||
// Increasing for now, let's see if we can bring this down.
|
||||
0.25,
|
||||
EvalInput::from_conversation(
|
||||
|
||||
eval_utils::eval(100, 0.95, mismatched_tag_threshold(0.25), move || {
|
||||
run_eval(EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
User,
|
||||
[text(indoc! {"
|
||||
Introduce a new `from_pixels` constructor in Canvas and
|
||||
also add tests for it in the same file.
|
||||
"})],
|
||||
Introduce a new `from_pixels` constructor in Canvas and
|
||||
also add tests for it in the same file.
|
||||
"})],
|
||||
),
|
||||
message(
|
||||
Assistant,
|
||||
@@ -545,92 +575,92 @@ fn eval_from_pixels_constructor() {
|
||||
"tool_4",
|
||||
"grep",
|
||||
indoc! {"
|
||||
Found 6 matches:
|
||||
Found 6 matches:
|
||||
|
||||
## Matches in font-kit/src/loaders/core_text.rs
|
||||
## Matches in font-kit/src/loaders/core_text.rs
|
||||
|
||||
### mod test › L926-936
|
||||
```
|
||||
mod test {
|
||||
use super::Font;
|
||||
use crate::properties::{Stretch, Weight};
|
||||
### mod test › L926-936
|
||||
```
|
||||
mod test {
|
||||
use super::Font;
|
||||
use crate::properties::{Stretch, Weight};
|
||||
|
||||
#[cfg(feature = \"source\")]
|
||||
use crate::source::SystemSource;
|
||||
#[cfg(feature = \"source\")]
|
||||
use crate::source::SystemSource;
|
||||
|
||||
static TEST_FONT_POSTSCRIPT_NAME: &'static str = \"ArialMT\";
|
||||
static TEST_FONT_POSTSCRIPT_NAME: &'static str = \"ArialMT\";
|
||||
|
||||
#[cfg(feature = \"source\")]
|
||||
#[test]
|
||||
```
|
||||
#[cfg(feature = \"source\")]
|
||||
#[test]
|
||||
```
|
||||
|
||||
55 lines remaining in ancestor node. Read the file to see all.
|
||||
55 lines remaining in ancestor node. Read the file to see all.
|
||||
|
||||
### mod test › L947-951
|
||||
```
|
||||
}
|
||||
### mod test › L947-951
|
||||
```
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_core_text_to_css_font_weight() {
|
||||
// Exact matches
|
||||
```
|
||||
#[test]
|
||||
fn test_core_text_to_css_font_weight() {
|
||||
// Exact matches
|
||||
```
|
||||
|
||||
### mod test › L959-963
|
||||
```
|
||||
}
|
||||
### mod test › L959-963
|
||||
```
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_core_text_to_css_font_stretch() {
|
||||
// Exact matches
|
||||
```
|
||||
#[test]
|
||||
fn test_core_text_to_css_font_stretch() {
|
||||
// Exact matches
|
||||
```
|
||||
|
||||
## Matches in font-kit/src/loaders/freetype.rs
|
||||
## Matches in font-kit/src/loaders/freetype.rs
|
||||
|
||||
### mod test › L1238-1248
|
||||
```
|
||||
mod test {
|
||||
use crate::loaders::freetype::Font;
|
||||
### mod test › L1238-1248
|
||||
```
|
||||
mod test {
|
||||
use crate::loaders::freetype::Font;
|
||||
|
||||
static PCF_FONT_PATH: &str = \"resources/tests/times-roman-pcf/timR12.pcf\";
|
||||
static PCF_FONT_POSTSCRIPT_NAME: &str = \"Times-Roman\";
|
||||
static PCF_FONT_PATH: &str = \"resources/tests/times-roman-pcf/timR12.pcf\";
|
||||
static PCF_FONT_POSTSCRIPT_NAME: &str = \"Times-Roman\";
|
||||
|
||||
#[test]
|
||||
fn get_pcf_postscript_name() {
|
||||
let font = Font::from_path(PCF_FONT_PATH, 0).unwrap();
|
||||
assert_eq!(font.postscript_name().unwrap(), PCF_FONT_POSTSCRIPT_NAME);
|
||||
}
|
||||
```
|
||||
#[test]
|
||||
fn get_pcf_postscript_name() {
|
||||
let font = Font::from_path(PCF_FONT_PATH, 0).unwrap();
|
||||
assert_eq!(font.postscript_name().unwrap(), PCF_FONT_POSTSCRIPT_NAME);
|
||||
}
|
||||
```
|
||||
|
||||
1 lines remaining in ancestor node. Read the file to see all.
|
||||
1 lines remaining in ancestor node. Read the file to see all.
|
||||
|
||||
## Matches in font-kit/src/sources/core_text.rs
|
||||
## Matches in font-kit/src/sources/core_text.rs
|
||||
|
||||
### mod test › L265-275
|
||||
```
|
||||
mod test {
|
||||
use crate::properties::{Stretch, Weight};
|
||||
### mod test › L265-275
|
||||
```
|
||||
mod test {
|
||||
use crate::properties::{Stretch, Weight};
|
||||
|
||||
#[test]
|
||||
fn test_css_to_core_text_font_weight() {
|
||||
// Exact matches
|
||||
assert_eq!(super::css_to_core_text_font_weight(Weight(100.0)), -0.7);
|
||||
assert_eq!(super::css_to_core_text_font_weight(Weight(400.0)), 0.0);
|
||||
assert_eq!(super::css_to_core_text_font_weight(Weight(700.0)), 0.4);
|
||||
assert_eq!(super::css_to_core_text_font_weight(Weight(900.0)), 0.8);
|
||||
#[test]
|
||||
fn test_css_to_core_text_font_weight() {
|
||||
// Exact matches
|
||||
assert_eq!(super::css_to_core_text_font_weight(Weight(100.0)), -0.7);
|
||||
assert_eq!(super::css_to_core_text_font_weight(Weight(400.0)), 0.0);
|
||||
assert_eq!(super::css_to_core_text_font_weight(Weight(700.0)), 0.4);
|
||||
assert_eq!(super::css_to_core_text_font_weight(Weight(900.0)), 0.8);
|
||||
|
||||
```
|
||||
```
|
||||
|
||||
27 lines remaining in ancestor node. Read the file to see all.
|
||||
27 lines remaining in ancestor node. Read the file to see all.
|
||||
|
||||
### mod test › L278-282
|
||||
```
|
||||
}
|
||||
### mod test › L278-282
|
||||
```
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_css_to_core_text_font_stretch() {
|
||||
// Exact matches
|
||||
```
|
||||
"},
|
||||
#[test]
|
||||
fn test_css_to_core_text_font_stretch() {
|
||||
// Exact matches
|
||||
```
|
||||
"},
|
||||
)],
|
||||
),
|
||||
message(
|
||||
@@ -648,11 +678,11 @@ fn eval_from_pixels_constructor() {
|
||||
],
|
||||
Some(input_file_content.into()),
|
||||
EvalAssertion::judge_diff(indoc! {"
|
||||
- The diff contains a new `from_pixels` constructor
|
||||
- The diff contains new tests for the `from_pixels` constructor
|
||||
"}),
|
||||
),
|
||||
);
|
||||
- The diff contains a new `from_pixels` constructor
|
||||
- The diff contains new tests for the `from_pixels` constructor
|
||||
"}),
|
||||
))
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -670,11 +700,9 @@ fn eval_zode() {
|
||||
let input_file_path = "root/zode.py";
|
||||
let input_content = None;
|
||||
let edit_description = "Create the main Zode CLI script";
|
||||
eval(
|
||||
50,
|
||||
1.,
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
|
||||
eval_utils::eval(50, 1., mismatched_tag_threshold(0.05), move || {
|
||||
run_eval(EvalInput::from_conversation(
|
||||
vec![
|
||||
message(User, [text(include_str!("evals/fixtures/zode/prompt.md"))]),
|
||||
message(
|
||||
@@ -733,7 +761,7 @@ fn eval_zode() {
|
||||
],
|
||||
),
|
||||
],
|
||||
input_content,
|
||||
input_content.clone(),
|
||||
EvalAssertion::new(async move |sample, _, _cx| {
|
||||
let invalid_starts = [' ', '`', '\n'];
|
||||
let mut message = String::new();
|
||||
@@ -758,8 +786,8 @@ fn eval_zode() {
|
||||
})
|
||||
}
|
||||
}),
|
||||
),
|
||||
);
|
||||
))
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -777,19 +805,17 @@ fn eval_add_overwrite_test() {
|
||||
let input_file_path = "root/action_log.rs";
|
||||
let input_file_content = include_str!("evals/fixtures/add_overwrite_test/before.rs");
|
||||
let edit_description = "Add a new test for overwriting a file in action_log.rs";
|
||||
eval(
|
||||
200,
|
||||
0.5, // TODO: make this eval better
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
|
||||
eval_utils::eval(200, 0.5, mismatched_tag_threshold(0.05), move || {
|
||||
run_eval(EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
User,
|
||||
[text(indoc! {"
|
||||
Introduce a new test in `action_log.rs` to test overwriting a file.
|
||||
That is, a file already exists, but we call `buffer_created` as if the file were new.
|
||||
Take inspiration from all the other tests in the file.
|
||||
"})],
|
||||
Introduce a new test in `action_log.rs` to test overwriting a file.
|
||||
That is, a file already exists, but we call `buffer_created` as if the file were new.
|
||||
Take inspiration from all the other tests in the file.
|
||||
"})],
|
||||
),
|
||||
message(
|
||||
Assistant,
|
||||
@@ -809,81 +835,81 @@ fn eval_add_overwrite_test() {
|
||||
"tool_1",
|
||||
"read_file",
|
||||
indoc! {"
|
||||
pub struct ActionLog [L13-20]
|
||||
tracked_buffers [L15]
|
||||
edited_since_project_diagnostics_check [L17]
|
||||
project [L19]
|
||||
impl ActionLog [L22-498]
|
||||
pub fn new [L24-30]
|
||||
pub fn project [L32-34]
|
||||
pub fn checked_project_diagnostics [L37-39]
|
||||
pub fn has_edited_files_since_project_diagnostics_check [L42-44]
|
||||
fn track_buffer_internal [L46-101]
|
||||
fn handle_buffer_event [L103-116]
|
||||
fn handle_buffer_edited [L118-123]
|
||||
fn handle_buffer_file_changed [L125-158]
|
||||
async fn maintain_diff [L160-264]
|
||||
pub fn buffer_read [L267-269]
|
||||
pub fn buffer_created [L272-276]
|
||||
pub fn buffer_edited [L279-287]
|
||||
pub fn will_delete_buffer [L289-304]
|
||||
pub fn keep_edits_in_range [L306-364]
|
||||
pub fn reject_edits_in_ranges [L366-459]
|
||||
pub fn keep_all_edits [L461-473]
|
||||
pub fn changed_buffers [L476-482]
|
||||
pub fn stale_buffers [L485-497]
|
||||
fn apply_non_conflicting_edits [L500-561]
|
||||
fn diff_snapshots [L563-585]
|
||||
fn point_to_row_edit [L587-614]
|
||||
enum ChangeAuthor [L617-620]
|
||||
User [L618]
|
||||
Agent [L619]
|
||||
enum TrackedBufferStatus [L623-627]
|
||||
Created [L624]
|
||||
Modified [L625]
|
||||
Deleted [L626]
|
||||
struct TrackedBuffer [L629-641]
|
||||
buffer [L630]
|
||||
base_text [L631]
|
||||
unreviewed_changes [L632]
|
||||
status [L633]
|
||||
version [L634]
|
||||
diff [L635]
|
||||
snapshot [L636]
|
||||
diff_update [L637]
|
||||
_open_lsp_handle [L638]
|
||||
_maintain_diff [L639]
|
||||
_subscription [L640]
|
||||
impl TrackedBuffer [L643-657]
|
||||
fn has_changes [L644-650]
|
||||
fn schedule_diff_update [L652-656]
|
||||
pub struct ChangedBuffer [L659-661]
|
||||
pub diff [L660]
|
||||
mod tests [L664-1574]
|
||||
fn init_logger [L678-682]
|
||||
fn init_test [L684-691]
|
||||
async fn test_keep_edits [L694-769]
|
||||
async fn test_deletions [L772-854]
|
||||
async fn test_overlapping_user_edits [L857-951]
|
||||
async fn test_creating_files [L954-1010]
|
||||
async fn test_deleting_files [L1013-1120]
|
||||
async fn test_reject_edits [L1123-1255]
|
||||
async fn test_reject_multiple_edits [L1258-1331]
|
||||
async fn test_reject_deleted_file [L1334-1388]
|
||||
async fn test_reject_created_file [L1391-1443]
|
||||
async fn test_random_diffs [L1446-1535]
|
||||
fn quiesce [L1510-1534]
|
||||
struct HunkStatus [L1538-1542]
|
||||
range [L1539]
|
||||
diff_status [L1540]
|
||||
old_text [L1541]
|
||||
fn unreviewed_hunks [L1544-1573]
|
||||
pub struct ActionLog [L13-20]
|
||||
tracked_buffers [L15]
|
||||
edited_since_project_diagnostics_check [L17]
|
||||
project [L19]
|
||||
impl ActionLog [L22-498]
|
||||
pub fn new [L24-30]
|
||||
pub fn project [L32-34]
|
||||
pub fn checked_project_diagnostics [L37-39]
|
||||
pub fn has_edited_files_since_project_diagnostics_check [L42-44]
|
||||
fn track_buffer_internal [L46-101]
|
||||
fn handle_buffer_event [L103-116]
|
||||
fn handle_buffer_edited [L118-123]
|
||||
fn handle_buffer_file_changed [L125-158]
|
||||
async fn maintain_diff [L160-264]
|
||||
pub fn buffer_read [L267-269]
|
||||
pub fn buffer_created [L272-276]
|
||||
pub fn buffer_edited [L279-287]
|
||||
pub fn will_delete_buffer [L289-304]
|
||||
pub fn keep_edits_in_range [L306-364]
|
||||
pub fn reject_edits_in_ranges [L366-459]
|
||||
pub fn keep_all_edits [L461-473]
|
||||
pub fn changed_buffers [L476-482]
|
||||
pub fn stale_buffers [L485-497]
|
||||
fn apply_non_conflicting_edits [L500-561]
|
||||
fn diff_snapshots [L563-585]
|
||||
fn point_to_row_edit [L587-614]
|
||||
enum ChangeAuthor [L617-620]
|
||||
User [L618]
|
||||
Agent [L619]
|
||||
enum TrackedBufferStatus [L623-627]
|
||||
Created [L624]
|
||||
Modified [L625]
|
||||
Deleted [L626]
|
||||
struct TrackedBuffer [L629-641]
|
||||
buffer [L630]
|
||||
base_text [L631]
|
||||
unreviewed_changes [L632]
|
||||
status [L633]
|
||||
version [L634]
|
||||
diff [L635]
|
||||
snapshot [L636]
|
||||
diff_update [L637]
|
||||
_open_lsp_handle [L638]
|
||||
_maintain_diff [L639]
|
||||
_subscription [L640]
|
||||
impl TrackedBuffer [L643-657]
|
||||
fn has_changes [L644-650]
|
||||
fn schedule_diff_update [L652-656]
|
||||
pub struct ChangedBuffer [L659-661]
|
||||
pub diff [L660]
|
||||
mod tests [L664-1574]
|
||||
fn init_logger [L678-682]
|
||||
fn init_test [L684-691]
|
||||
async fn test_keep_edits [L694-769]
|
||||
async fn test_deletions [L772-854]
|
||||
async fn test_overlapping_user_edits [L857-951]
|
||||
async fn test_creating_files [L954-1010]
|
||||
async fn test_deleting_files [L1013-1120]
|
||||
async fn test_reject_edits [L1123-1255]
|
||||
async fn test_reject_multiple_edits [L1258-1331]
|
||||
async fn test_reject_deleted_file [L1334-1388]
|
||||
async fn test_reject_created_file [L1391-1443]
|
||||
async fn test_random_diffs [L1446-1535]
|
||||
fn quiesce [L1510-1534]
|
||||
struct HunkStatus [L1538-1542]
|
||||
range [L1539]
|
||||
diff_status [L1540]
|
||||
old_text [L1541]
|
||||
fn unreviewed_hunks [L1544-1573]
|
||||
|
||||
Showing symbols 1-69 (total symbols: 69)
|
||||
Showing symbols 1-69 (total symbols: 69)
|
||||
|
||||
Using the line numbers in this outline, you can call this tool again while specifying
|
||||
the start_line and end_line fields to see the implementations of symbols in the outline.
|
||||
"},
|
||||
Using the line numbers in this outline, you can call this tool again while specifying
|
||||
the start_line and end_line fields to see the implementations of symbols in the outline.
|
||||
"},
|
||||
)],
|
||||
),
|
||||
message(
|
||||
@@ -963,17 +989,17 @@ fn eval_add_overwrite_test() {
|
||||
Assistant,
|
||||
[
|
||||
text(indoc! {"
|
||||
Based on my understanding of the existing tests and code, I'll now create a new test for overwriting a file. In this test, we'll:
|
||||
Based on my understanding of the existing tests and code, I'll now create a new test for overwriting a file. In this test, we'll:
|
||||
|
||||
1. Create a file
|
||||
2. Open a buffer for it
|
||||
3. Track it in the action log
|
||||
4. Modify the buffer
|
||||
5. Call `buffer_created` again as if the file were new
|
||||
6. Check that the changes are properly tracked
|
||||
1. Create a file
|
||||
2. Open a buffer for it
|
||||
3. Track it in the action log
|
||||
4. Modify the buffer
|
||||
5. Call `buffer_created` again as if the file were new
|
||||
6. Check that the changes are properly tracked
|
||||
|
||||
Let's write the test:
|
||||
"}),
|
||||
Let's write the test:
|
||||
"}),
|
||||
tool_use(
|
||||
"tool_5",
|
||||
"edit_file",
|
||||
@@ -990,8 +1016,8 @@ fn eval_add_overwrite_test() {
|
||||
EvalAssertion::judge_diff(
|
||||
"A new test for overwritten files was created, without changing any previous test",
|
||||
),
|
||||
),
|
||||
);
|
||||
))
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
@@ -1016,20 +1042,18 @@ fn eval_create_empty_file() {
|
||||
|
||||
let input_file_content = None;
|
||||
let expected_output_content = String::new();
|
||||
eval(
|
||||
100,
|
||||
0.99,
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
|
||||
eval_utils::eval(100, 0.99, mismatched_tag_threshold(0.05), move || {
|
||||
run_eval(EvalInput::from_conversation(
|
||||
vec![
|
||||
message(User, [text("Create a second empty todo file ")]),
|
||||
message(
|
||||
Assistant,
|
||||
[
|
||||
text(formatdoc! {"
|
||||
I'll help you create a second empty todo file.
|
||||
First, let me examine the project structure to see if there's already a todo file, which will help me determine the appropriate name and location for the second one.
|
||||
"}),
|
||||
I'll help you create a second empty todo file.
|
||||
First, let me examine the project structure to see if there's already a todo file, which will help me determine the appropriate name and location for the second one.
|
||||
"}),
|
||||
tool_use(
|
||||
"toolu_01GAF8TtsgpjKxCr8fgQLDgR",
|
||||
"list_directory",
|
||||
@@ -1051,8 +1075,8 @@ fn eval_create_empty_file() {
|
||||
Assistant,
|
||||
[
|
||||
text(formatdoc! {"
|
||||
I can see there's already a `TODO` file in the `root` directory. Let me create a second empty todo file called `TODO3` in the same directory:
|
||||
"}),
|
||||
I can see there's already a `TODO` file in the `root` directory. Let me create a second empty todo file called `TODO3` in the same directory:
|
||||
"}),
|
||||
tool_use(
|
||||
"toolu_01Tb3iQ9griqSYMmVuykQPWU",
|
||||
"edit_file",
|
||||
@@ -1065,12 +1089,12 @@ fn eval_create_empty_file() {
|
||||
],
|
||||
),
|
||||
],
|
||||
input_file_content,
|
||||
input_file_content.clone(),
|
||||
// Bad behavior is to write something like
|
||||
// "I'll create an empty TODO3 file as requested."
|
||||
EvalAssertion::assert_eq(expected_output_content),
|
||||
),
|
||||
);
|
||||
EvalAssertion::assert_eq(expected_output_content.clone()),
|
||||
))
|
||||
});
|
||||
}
|
||||
|
||||
fn message(
|
||||
@@ -1312,115 +1336,44 @@ impl EvalAssertion {
|
||||
}
|
||||
}
|
||||
|
||||
fn eval(
|
||||
iterations: usize,
|
||||
expected_pass_ratio: f32,
|
||||
mismatched_tag_threshold: f32,
|
||||
mut eval: EvalInput,
|
||||
) {
|
||||
let mut evaluated_count = 0;
|
||||
let mut failed_count = 0;
|
||||
report_progress(evaluated_count, failed_count, iterations);
|
||||
|
||||
let (tx, rx) = mpsc::channel();
|
||||
|
||||
// Cache the last message in the conversation, and run one instance of the eval so that
|
||||
// all the next ones are cached.
|
||||
eval.conversation.last_mut().unwrap().cache = true;
|
||||
run_eval(eval.clone(), tx.clone());
|
||||
|
||||
let executor = gpui::background_executor();
|
||||
let semaphore = Arc::new(smol::lock::Semaphore::new(32));
|
||||
for _ in 1..iterations {
|
||||
let eval = eval.clone();
|
||||
let tx = tx.clone();
|
||||
let semaphore = semaphore.clone();
|
||||
executor
|
||||
.spawn(async move {
|
||||
let _guard = semaphore.acquire().await;
|
||||
run_eval(eval, tx)
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
drop(tx);
|
||||
|
||||
let mut failed_evals = HashMap::default();
|
||||
let mut errored_evals = HashMap::default();
|
||||
let mut eval_outputs = Vec::new();
|
||||
let mut cumulative_parser_metrics = EditParserMetrics::default();
|
||||
while let Ok(output) = rx.recv() {
|
||||
match output {
|
||||
Ok(output) => {
|
||||
cumulative_parser_metrics += output.sample.edit_output.parser_metrics.clone();
|
||||
eval_outputs.push(output.clone());
|
||||
if output.assertion.score < 80 {
|
||||
failed_count += 1;
|
||||
failed_evals
|
||||
.entry(output.sample.text_after.clone())
|
||||
.or_insert(Vec::new())
|
||||
.push(output);
|
||||
}
|
||||
}
|
||||
Err(error) => {
|
||||
failed_count += 1;
|
||||
*errored_evals.entry(format!("{:?}", error)).or_insert(0) += 1;
|
||||
}
|
||||
}
|
||||
|
||||
evaluated_count += 1;
|
||||
report_progress(evaluated_count, failed_count, iterations);
|
||||
}
|
||||
|
||||
let actual_pass_ratio = (iterations - failed_count) as f32 / iterations as f32;
|
||||
println!("Actual pass ratio: {}\n", actual_pass_ratio);
|
||||
if actual_pass_ratio < expected_pass_ratio {
|
||||
let mut errored_evals = errored_evals.into_iter().collect::<Vec<_>>();
|
||||
errored_evals.sort_by_key(|(_, count)| Reverse(*count));
|
||||
for (error, count) in errored_evals {
|
||||
println!("Eval errored {} times. Error: {}", count, error);
|
||||
}
|
||||
|
||||
let mut failed_evals = failed_evals.into_iter().collect::<Vec<_>>();
|
||||
failed_evals.sort_by_key(|(_, evals)| Reverse(evals.len()));
|
||||
for (_buffer_output, failed_evals) in failed_evals {
|
||||
let eval_output = failed_evals.first().unwrap();
|
||||
println!("Eval failed {} times", failed_evals.len());
|
||||
println!("{}", eval_output);
|
||||
}
|
||||
|
||||
panic!(
|
||||
"Actual pass ratio: {}\nExpected pass ratio: {}",
|
||||
actual_pass_ratio, expected_pass_ratio
|
||||
);
|
||||
}
|
||||
|
||||
let mismatched_tag_ratio =
|
||||
cumulative_parser_metrics.mismatched_tags as f32 / cumulative_parser_metrics.tags as f32;
|
||||
if mismatched_tag_ratio > mismatched_tag_threshold {
|
||||
for eval_output in eval_outputs {
|
||||
println!("{}", eval_output);
|
||||
}
|
||||
panic!("Too many mismatched tags: {:?}", cumulative_parser_metrics);
|
||||
}
|
||||
}
|
||||
|
||||
fn run_eval(eval: EvalInput, tx: mpsc::Sender<Result<EvalOutput>>) {
|
||||
fn run_eval(eval: EvalInput) -> eval_utils::EvalOutput<EditEvalMetadata> {
|
||||
let dispatcher = gpui::TestDispatcher::new(StdRng::from_os_rng());
|
||||
let mut cx = TestAppContext::build(dispatcher, None);
|
||||
let output = cx.executor().block_test(async {
|
||||
let result = cx.executor().block_test(async {
|
||||
let test = EditAgentTest::new(&mut cx).await;
|
||||
test.eval(eval, &mut cx).await
|
||||
});
|
||||
tx.send(output).unwrap();
|
||||
match result {
|
||||
Ok(output) => eval_utils::EvalOutput {
|
||||
data: output.to_string(),
|
||||
outcome: if output.assertion.score < 80 {
|
||||
eval_utils::OutcomeKind::Failed
|
||||
} else {
|
||||
eval_utils::OutcomeKind::Passed
|
||||
},
|
||||
metadata: EditEvalMetadata {
|
||||
tags: output.sample.edit_output.parser_metrics.tags,
|
||||
mismatched_tags: output.sample.edit_output.parser_metrics.mismatched_tags,
|
||||
},
|
||||
},
|
||||
Err(e) => eval_utils::EvalOutput {
|
||||
data: format!("{e:?}"),
|
||||
outcome: eval_utils::OutcomeKind::Error,
|
||||
metadata: EditEvalMetadata {
|
||||
tags: 0,
|
||||
mismatched_tags: 0,
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct EvalOutput {
|
||||
struct EditEvalOutput {
|
||||
sample: EvalSample,
|
||||
assertion: EvalAssertionOutcome,
|
||||
}
|
||||
|
||||
impl Display for EvalOutput {
|
||||
impl Display for EditEvalOutput {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
writeln!(f, "Score: {:?}", self.assertion.score)?;
|
||||
if let Some(message) = self.assertion.message.as_ref() {
|
||||
@@ -1439,22 +1392,6 @@ impl Display for EvalOutput {
|
||||
}
|
||||
}
|
||||
|
||||
fn report_progress(evaluated_count: usize, failed_count: usize, iterations: usize) {
|
||||
let passed_count = evaluated_count - failed_count;
|
||||
let passed_ratio = if evaluated_count == 0 {
|
||||
0.0
|
||||
} else {
|
||||
passed_count as f64 / evaluated_count as f64
|
||||
};
|
||||
print!(
|
||||
"\r\x1b[KEvaluated {}/{} ({:.2}% passed)",
|
||||
evaluated_count,
|
||||
iterations,
|
||||
passed_ratio * 100.0
|
||||
);
|
||||
std::io::stdout().flush().unwrap();
|
||||
}
|
||||
|
||||
struct EditAgentTest {
|
||||
agent: EditAgent,
|
||||
project: Entity<Project>,
|
||||
@@ -1550,7 +1487,10 @@ impl EditAgentTest {
|
||||
})
|
||||
}
|
||||
|
||||
async fn eval(&self, eval: EvalInput, cx: &mut TestAppContext) -> Result<EvalOutput> {
|
||||
async fn eval(&self, mut eval: EvalInput, cx: &mut TestAppContext) -> Result<EditEvalOutput> {
|
||||
// Make sure the last message in the conversation is cached.
|
||||
eval.conversation.last_mut().unwrap().cache = true;
|
||||
|
||||
let path = self
|
||||
.project
|
||||
.read_with(cx, |project, cx| {
|
||||
@@ -1656,7 +1596,7 @@ impl EditAgentTest {
|
||||
.run(&sample, self.judge_model.clone(), cx)
|
||||
.await?;
|
||||
|
||||
Ok(EvalOutput { assertion, sample })
|
||||
Ok(EditEvalOutput { assertion, sample })
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,12 +2,12 @@
|
||||
- We're starting from a completely blank project
|
||||
- Like Aider/Claude Code you take the user's initial prompt and then call the LLM and perform tool calls in a loop until the ultimate goal is achieved.
|
||||
- Unlike Aider or Claude code, it's not intended to be interactive. Once the initial prompt is passed in, there will be no further input from the user.
|
||||
- The system you will build must reach the stated goal just by performing too calls and calling the LLM
|
||||
- The system you will build must reach the stated goal just by performing tool calls and calling the LLM
|
||||
- I want you to build this in python. Use the anthropic python sdk and the model context protocol sdk. Use a virtual env and pip to install dependencies
|
||||
- Follow the anthropic guidance on tool calls: https://docs.anthropic.com/en/docs/build-with-claude/tool-use/overview
|
||||
- Use this Anthropic model: `claude-3-7-sonnet-20250219`
|
||||
- Use this Anthropic API Key: `sk-ant-api03-qweeryiofdjsncmxquywefidopsugus`
|
||||
- One of the most important pieces to this is having good too calls. We will be using the tools provided by the Claude MCP server. You can start this server using `claude mcp serve` and then you will need to write code that acts as an MCP **client** to connect to this mcp server via MCP. Likely you want to start this using a subprocess. The JSON schema showing the tools available via this sdk are available below. Via this MCP server you have access to all the tools that zode needs: Bash, GlobTool, GrepTool, LS, View, Edit, Replace, WebFetchTool
|
||||
- One of the most important pieces to this is having good tool calls. We will be using the tools provided by the Claude MCP server. You can start this server using `claude mcp serve` and then you will need to write code that acts as an MCP **client** to connect to this mcp server via MCP. Likely you want to start this using a subprocess. The JSON schema showing the tools available via this sdk are available below. Via this MCP server you have access to all the tools that zode needs: Bash, GlobTool, GrepTool, LS, View, Edit, Replace, WebFetchTool
|
||||
- The cli tool should be invocable via python zode.py file.md where file.md is any possible file that contains the users prompt. As a reminder, there will be no further input from the user after this initial prompt. Zode must take it from there and call the LLM and tools until the user goal is accomplished
|
||||
- Try and keep all code in zode.py and make heavy use of the asks I mentioned
|
||||
- Once you’ve implemented this, you must run python zode.py eval/instructions.md to see how well our new agent tool does!
|
||||
|
||||
@@ -354,9 +354,9 @@ impl HistoryStore {
|
||||
.into_iter()
|
||||
.take(MAX_RECENTLY_OPENED_ENTRIES)
|
||||
.flat_map(|entry| match entry {
|
||||
SerializedRecentOpen::AcpThread(id) => Some(HistoryEntryId::AcpThread(
|
||||
acp::SessionId(id.as_str().into()),
|
||||
)),
|
||||
SerializedRecentOpen::AcpThread(id) => {
|
||||
Some(HistoryEntryId::AcpThread(acp::SessionId::new(id.as_str())))
|
||||
}
|
||||
SerializedRecentOpen::TextThread(file_name) => Some(
|
||||
HistoryEntryId::TextThread(text_threads_dir().join(file_name).into()),
|
||||
),
|
||||
|
||||
@@ -66,11 +66,9 @@ pub async fn get_buffer_content_or_outline(
|
||||
let outline_text = render_outline(outline_items, None, 0, usize::MAX).await?;
|
||||
|
||||
let text = if let Some(path) = path {
|
||||
format!(
|
||||
"# File outline for {path} (file too large to show full content)\n\n{outline_text}",
|
||||
)
|
||||
format!("# File outline for {path}\n\n{outline_text}",)
|
||||
} else {
|
||||
format!("# File outline (file too large to show full content)\n\n{outline_text}",)
|
||||
format!("# File outline\n\n{outline_text}",)
|
||||
};
|
||||
Ok(BufferContent {
|
||||
text,
|
||||
|
||||
@@ -493,14 +493,14 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
|
||||
// Approve the first
|
||||
tool_call_auth_1
|
||||
.response
|
||||
.send(tool_call_auth_1.options[1].id.clone())
|
||||
.send(tool_call_auth_1.options[1].option_id.clone())
|
||||
.unwrap();
|
||||
cx.run_until_parked();
|
||||
|
||||
// Reject the second
|
||||
tool_call_auth_2
|
||||
.response
|
||||
.send(tool_call_auth_1.options[2].id.clone())
|
||||
.send(tool_call_auth_1.options[2].option_id.clone())
|
||||
.unwrap();
|
||||
cx.run_until_parked();
|
||||
|
||||
@@ -510,14 +510,14 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
|
||||
message.content,
|
||||
vec![
|
||||
language_model::MessageContent::ToolResult(LanguageModelToolResult {
|
||||
tool_use_id: tool_call_auth_1.tool_call.id.0.to_string().into(),
|
||||
tool_use_id: tool_call_auth_1.tool_call.tool_call_id.0.to_string().into(),
|
||||
tool_name: ToolRequiringPermission::name().into(),
|
||||
is_error: false,
|
||||
content: "Allowed".into(),
|
||||
output: Some("Allowed".into())
|
||||
}),
|
||||
language_model::MessageContent::ToolResult(LanguageModelToolResult {
|
||||
tool_use_id: tool_call_auth_2.tool_call.id.0.to_string().into(),
|
||||
tool_use_id: tool_call_auth_2.tool_call.tool_call_id.0.to_string().into(),
|
||||
tool_name: ToolRequiringPermission::name().into(),
|
||||
is_error: true,
|
||||
content: "Permission to run tool denied by user".into(),
|
||||
@@ -543,7 +543,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
|
||||
let tool_call_auth_3 = next_tool_call_authorization(&mut events).await;
|
||||
tool_call_auth_3
|
||||
.response
|
||||
.send(tool_call_auth_3.options[0].id.clone())
|
||||
.send(tool_call_auth_3.options[0].option_id.clone())
|
||||
.unwrap();
|
||||
cx.run_until_parked();
|
||||
let completion = fake_model.pending_completions().pop().unwrap();
|
||||
@@ -552,7 +552,7 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
|
||||
message.content,
|
||||
vec![language_model::MessageContent::ToolResult(
|
||||
LanguageModelToolResult {
|
||||
tool_use_id: tool_call_auth_3.tool_call.id.0.to_string().into(),
|
||||
tool_use_id: tool_call_auth_3.tool_call.tool_call_id.0.to_string().into(),
|
||||
tool_name: ToolRequiringPermission::name().into(),
|
||||
is_error: false,
|
||||
content: "Allowed".into(),
|
||||
@@ -1353,20 +1353,20 @@ async fn test_cancellation(cx: &mut TestAppContext) {
|
||||
ThreadEvent::ToolCall(tool_call) => {
|
||||
assert_eq!(tool_call.title, expected_tools.remove(0));
|
||||
if tool_call.title == "Echo" {
|
||||
echo_id = Some(tool_call.id);
|
||||
echo_id = Some(tool_call.tool_call_id);
|
||||
}
|
||||
}
|
||||
ThreadEvent::ToolCallUpdate(acp_thread::ToolCallUpdate::UpdateFields(
|
||||
acp::ToolCallUpdate {
|
||||
id,
|
||||
tool_call_id,
|
||||
fields:
|
||||
acp::ToolCallUpdateFields {
|
||||
status: Some(acp::ToolCallStatus::Completed),
|
||||
..
|
||||
},
|
||||
meta: None,
|
||||
..
|
||||
},
|
||||
)) if Some(&id) == echo_id.as_ref() => {
|
||||
)) if Some(&tool_call_id) == echo_id.as_ref() => {
|
||||
echo_completed = true;
|
||||
}
|
||||
_ => {}
|
||||
@@ -1995,11 +1995,7 @@ async fn test_agent_connection(cx: &mut TestAppContext) {
|
||||
.update(|cx| {
|
||||
connection.prompt(
|
||||
Some(acp_thread::UserMessageId::new()),
|
||||
acp::PromptRequest {
|
||||
session_id: session_id.clone(),
|
||||
prompt: vec!["ghi".into()],
|
||||
meta: None,
|
||||
},
|
||||
acp::PromptRequest::new(session_id.clone(), vec!["ghi".into()]),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -2056,68 +2052,50 @@ async fn test_tool_updates_to_completion(cx: &mut TestAppContext) {
|
||||
let tool_call = expect_tool_call(&mut events).await;
|
||||
assert_eq!(
|
||||
tool_call,
|
||||
acp::ToolCall {
|
||||
id: acp::ToolCallId("1".into()),
|
||||
title: "Thinking".into(),
|
||||
kind: acp::ToolKind::Think,
|
||||
status: acp::ToolCallStatus::Pending,
|
||||
content: vec![],
|
||||
locations: vec![],
|
||||
raw_input: Some(json!({})),
|
||||
raw_output: None,
|
||||
meta: Some(json!({ "tool_name": "thinking" })),
|
||||
}
|
||||
acp::ToolCall::new("1", "Thinking")
|
||||
.kind(acp::ToolKind::Think)
|
||||
.raw_input(json!({}))
|
||||
.meta(acp::Meta::from_iter([(
|
||||
"tool_name".into(),
|
||||
"thinking".into()
|
||||
)]))
|
||||
);
|
||||
let update = expect_tool_call_update_fields(&mut events).await;
|
||||
assert_eq!(
|
||||
update,
|
||||
acp::ToolCallUpdate {
|
||||
id: acp::ToolCallId("1".into()),
|
||||
fields: acp::ToolCallUpdateFields {
|
||||
title: Some("Thinking".into()),
|
||||
kind: Some(acp::ToolKind::Think),
|
||||
raw_input: Some(json!({ "content": "Thinking hard!" })),
|
||||
..Default::default()
|
||||
},
|
||||
meta: None,
|
||||
}
|
||||
acp::ToolCallUpdate::new(
|
||||
"1",
|
||||
acp::ToolCallUpdateFields::new()
|
||||
.title("Thinking")
|
||||
.kind(acp::ToolKind::Think)
|
||||
.raw_input(json!({ "content": "Thinking hard!"}))
|
||||
)
|
||||
);
|
||||
let update = expect_tool_call_update_fields(&mut events).await;
|
||||
assert_eq!(
|
||||
update,
|
||||
acp::ToolCallUpdate {
|
||||
id: acp::ToolCallId("1".into()),
|
||||
fields: acp::ToolCallUpdateFields {
|
||||
status: Some(acp::ToolCallStatus::InProgress),
|
||||
..Default::default()
|
||||
},
|
||||
meta: None,
|
||||
}
|
||||
acp::ToolCallUpdate::new(
|
||||
"1",
|
||||
acp::ToolCallUpdateFields::new().status(acp::ToolCallStatus::InProgress)
|
||||
)
|
||||
);
|
||||
let update = expect_tool_call_update_fields(&mut events).await;
|
||||
assert_eq!(
|
||||
update,
|
||||
acp::ToolCallUpdate {
|
||||
id: acp::ToolCallId("1".into()),
|
||||
fields: acp::ToolCallUpdateFields {
|
||||
content: Some(vec!["Thinking hard!".into()]),
|
||||
..Default::default()
|
||||
},
|
||||
meta: None,
|
||||
}
|
||||
acp::ToolCallUpdate::new(
|
||||
"1",
|
||||
acp::ToolCallUpdateFields::new().content(vec!["Thinking hard!".into()])
|
||||
)
|
||||
);
|
||||
let update = expect_tool_call_update_fields(&mut events).await;
|
||||
assert_eq!(
|
||||
update,
|
||||
acp::ToolCallUpdate {
|
||||
id: acp::ToolCallId("1".into()),
|
||||
fields: acp::ToolCallUpdateFields {
|
||||
status: Some(acp::ToolCallStatus::Completed),
|
||||
raw_output: Some("Finished thinking.".into()),
|
||||
..Default::default()
|
||||
},
|
||||
meta: None,
|
||||
}
|
||||
acp::ToolCallUpdate::new(
|
||||
"1",
|
||||
acp::ToolCallUpdateFields::new()
|
||||
.status(acp::ToolCallStatus::Completed)
|
||||
.raw_output("Finished thinking.")
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
@@ -619,12 +619,9 @@ pub struct Thread {
|
||||
impl Thread {
|
||||
fn prompt_capabilities(model: Option<&dyn LanguageModel>) -> acp::PromptCapabilities {
|
||||
let image = model.map_or(true, |model| model.supports_images());
|
||||
acp::PromptCapabilities {
|
||||
meta: None,
|
||||
image,
|
||||
audio: false,
|
||||
embedded_context: true,
|
||||
}
|
||||
acp::PromptCapabilities::new()
|
||||
.image(image)
|
||||
.embedded_context(true)
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
@@ -640,7 +637,7 @@ impl Thread {
|
||||
let (prompt_capabilities_tx, prompt_capabilities_rx) =
|
||||
watch::channel(Self::prompt_capabilities(model.as_deref()));
|
||||
Self {
|
||||
id: acp::SessionId(uuid::Uuid::new_v4().to_string().into()),
|
||||
id: acp::SessionId::new(uuid::Uuid::new_v4().to_string()),
|
||||
prompt_id: PromptId::new(),
|
||||
updated_at: Utc::now(),
|
||||
title: None,
|
||||
@@ -737,17 +734,11 @@ impl Thread {
|
||||
let Some(tool) = tool else {
|
||||
stream
|
||||
.0
|
||||
.unbounded_send(Ok(ThreadEvent::ToolCall(acp::ToolCall {
|
||||
meta: None,
|
||||
id: acp::ToolCallId(tool_use.id.to_string().into()),
|
||||
title: tool_use.name.to_string(),
|
||||
kind: acp::ToolKind::Other,
|
||||
status: acp::ToolCallStatus::Failed,
|
||||
content: Vec::new(),
|
||||
locations: Vec::new(),
|
||||
raw_input: Some(tool_use.input.clone()),
|
||||
raw_output: None,
|
||||
})))
|
||||
.unbounded_send(Ok(ThreadEvent::ToolCall(
|
||||
acp::ToolCall::new(tool_use.id.to_string(), tool_use.name.to_string())
|
||||
.status(acp::ToolCallStatus::Failed)
|
||||
.raw_input(tool_use.input.clone()),
|
||||
)))
|
||||
.ok();
|
||||
return;
|
||||
};
|
||||
@@ -777,8 +768,8 @@ impl Thread {
|
||||
|
||||
stream.update_tool_call_fields(
|
||||
&tool_use.id,
|
||||
acp::ToolCallUpdateFields {
|
||||
status: Some(
|
||||
acp::ToolCallUpdateFields::new()
|
||||
.status(
|
||||
tool_result
|
||||
.as_ref()
|
||||
.map_or(acp::ToolCallStatus::Failed, |result| {
|
||||
@@ -788,10 +779,8 @@ impl Thread {
|
||||
acp::ToolCallStatus::Completed
|
||||
}
|
||||
}),
|
||||
),
|
||||
raw_output: output,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.raw_output(output),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1274,15 +1263,13 @@ impl Thread {
|
||||
|
||||
event_stream.update_tool_call_fields(
|
||||
&tool_result.tool_use_id,
|
||||
acp::ToolCallUpdateFields {
|
||||
status: Some(if tool_result.is_error {
|
||||
acp::ToolCallUpdateFields::new()
|
||||
.status(if tool_result.is_error {
|
||||
acp::ToolCallStatus::Failed
|
||||
} else {
|
||||
acp::ToolCallStatus::Completed
|
||||
}),
|
||||
raw_output: tool_result.output.clone(),
|
||||
..Default::default()
|
||||
},
|
||||
})
|
||||
.raw_output(tool_result.output.clone()),
|
||||
);
|
||||
this.update(cx, |this, _cx| {
|
||||
this.pending_message()
|
||||
@@ -1560,12 +1547,10 @@ impl Thread {
|
||||
} else {
|
||||
event_stream.update_tool_call_fields(
|
||||
&tool_use.id,
|
||||
acp::ToolCallUpdateFields {
|
||||
title: Some(title.into()),
|
||||
kind: Some(kind),
|
||||
raw_input: Some(tool_use.input.clone()),
|
||||
..Default::default()
|
||||
},
|
||||
acp::ToolCallUpdateFields::new()
|
||||
.title(title.as_str())
|
||||
.kind(kind)
|
||||
.raw_input(tool_use.input.clone()),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1587,10 +1572,9 @@ impl Thread {
|
||||
let fs = self.project.read(cx).fs().clone();
|
||||
let tool_event_stream =
|
||||
ToolCallEventStream::new(tool_use.id.clone(), event_stream.clone(), Some(fs));
|
||||
tool_event_stream.update_fields(acp::ToolCallUpdateFields {
|
||||
status: Some(acp::ToolCallStatus::InProgress),
|
||||
..Default::default()
|
||||
});
|
||||
tool_event_stream.update_fields(
|
||||
acp::ToolCallUpdateFields::new().status(acp::ToolCallStatus::InProgress),
|
||||
);
|
||||
let supports_images = self.model().is_some_and(|model| model.supports_images());
|
||||
let tool_result = tool.run(tool_use.input, tool_event_stream, cx);
|
||||
log::debug!("Running tool {}", tool_use.name);
|
||||
@@ -2381,19 +2365,13 @@ impl ThreadEventStream {
|
||||
kind: acp::ToolKind,
|
||||
input: serde_json::Value,
|
||||
) -> acp::ToolCall {
|
||||
acp::ToolCall {
|
||||
meta: Some(serde_json::json!({
|
||||
"tool_name": tool_name
|
||||
})),
|
||||
id: acp::ToolCallId(id.to_string().into()),
|
||||
title,
|
||||
kind,
|
||||
status: acp::ToolCallStatus::Pending,
|
||||
content: vec![],
|
||||
locations: vec![],
|
||||
raw_input: Some(input),
|
||||
raw_output: None,
|
||||
}
|
||||
acp::ToolCall::new(id.to_string(), title)
|
||||
.kind(kind)
|
||||
.raw_input(input)
|
||||
.meta(acp::Meta::from_iter([(
|
||||
"tool_name".into(),
|
||||
tool_name.into(),
|
||||
)]))
|
||||
}
|
||||
|
||||
fn update_tool_call_fields(
|
||||
@@ -2403,12 +2381,7 @@ impl ThreadEventStream {
|
||||
) {
|
||||
self.0
|
||||
.unbounded_send(Ok(ThreadEvent::ToolCallUpdate(
|
||||
acp::ToolCallUpdate {
|
||||
meta: None,
|
||||
id: acp::ToolCallId(tool_use_id.to_string().into()),
|
||||
fields,
|
||||
}
|
||||
.into(),
|
||||
acp::ToolCallUpdate::new(tool_use_id.to_string(), fields).into(),
|
||||
)))
|
||||
.ok();
|
||||
}
|
||||
@@ -2471,7 +2444,7 @@ impl ToolCallEventStream {
|
||||
.0
|
||||
.unbounded_send(Ok(ThreadEvent::ToolCallUpdate(
|
||||
acp_thread::ToolCallUpdateDiff {
|
||||
id: acp::ToolCallId(self.tool_use_id.to_string().into()),
|
||||
id: acp::ToolCallId::new(self.tool_use_id.to_string()),
|
||||
diff,
|
||||
}
|
||||
.into(),
|
||||
@@ -2489,33 +2462,26 @@ impl ToolCallEventStream {
|
||||
.0
|
||||
.unbounded_send(Ok(ThreadEvent::ToolCallAuthorization(
|
||||
ToolCallAuthorization {
|
||||
tool_call: acp::ToolCallUpdate {
|
||||
meta: None,
|
||||
id: acp::ToolCallId(self.tool_use_id.to_string().into()),
|
||||
fields: acp::ToolCallUpdateFields {
|
||||
title: Some(title.into()),
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
tool_call: acp::ToolCallUpdate::new(
|
||||
self.tool_use_id.to_string(),
|
||||
acp::ToolCallUpdateFields::new().title(title.into()),
|
||||
),
|
||||
options: vec![
|
||||
acp::PermissionOption {
|
||||
id: acp::PermissionOptionId("always_allow".into()),
|
||||
name: "Always Allow".into(),
|
||||
kind: acp::PermissionOptionKind::AllowAlways,
|
||||
meta: None,
|
||||
},
|
||||
acp::PermissionOption {
|
||||
id: acp::PermissionOptionId("allow".into()),
|
||||
name: "Allow".into(),
|
||||
kind: acp::PermissionOptionKind::AllowOnce,
|
||||
meta: None,
|
||||
},
|
||||
acp::PermissionOption {
|
||||
id: acp::PermissionOptionId("deny".into()),
|
||||
name: "Deny".into(),
|
||||
kind: acp::PermissionOptionKind::RejectOnce,
|
||||
meta: None,
|
||||
},
|
||||
acp::PermissionOption::new(
|
||||
acp::PermissionOptionId::new("always_allow"),
|
||||
"Always Allow",
|
||||
acp::PermissionOptionKind::AllowAlways,
|
||||
),
|
||||
acp::PermissionOption::new(
|
||||
acp::PermissionOptionId::new("allow"),
|
||||
"Allow",
|
||||
acp::PermissionOptionKind::AllowOnce,
|
||||
),
|
||||
acp::PermissionOption::new(
|
||||
acp::PermissionOptionId::new("deny"),
|
||||
"Deny",
|
||||
acp::PermissionOptionKind::RejectOnce,
|
||||
),
|
||||
],
|
||||
response: response_tx,
|
||||
},
|
||||
@@ -2660,7 +2626,15 @@ impl UserMessageContent {
|
||||
// TODO
|
||||
Self::Text("[blob]".to_string())
|
||||
}
|
||||
other => {
|
||||
log::warn!("Unexpected content type: {:?}", other);
|
||||
Self::Text("[unknown]".to_string())
|
||||
}
|
||||
},
|
||||
other => {
|
||||
log::warn!("Unexpected content type: {:?}", other);
|
||||
Self::Text("[unknown]".to_string())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2668,32 +2642,15 @@ impl UserMessageContent {
|
||||
impl From<UserMessageContent> for acp::ContentBlock {
|
||||
fn from(content: UserMessageContent) -> Self {
|
||||
match content {
|
||||
UserMessageContent::Text(text) => acp::ContentBlock::Text(acp::TextContent {
|
||||
text,
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
UserMessageContent::Image(image) => acp::ContentBlock::Image(acp::ImageContent {
|
||||
data: image.source.to_string(),
|
||||
mime_type: "image/png".to_string(),
|
||||
meta: None,
|
||||
annotations: None,
|
||||
uri: None,
|
||||
}),
|
||||
UserMessageContent::Mention { uri, content } => {
|
||||
acp::ContentBlock::Resource(acp::EmbeddedResource {
|
||||
meta: None,
|
||||
resource: acp::EmbeddedResourceResource::TextResourceContents(
|
||||
acp::TextResourceContents {
|
||||
meta: None,
|
||||
mime_type: None,
|
||||
text: content,
|
||||
uri: uri.to_uri().to_string(),
|
||||
},
|
||||
),
|
||||
annotations: None,
|
||||
})
|
||||
UserMessageContent::Text(text) => text.into(),
|
||||
UserMessageContent::Image(image) => {
|
||||
acp::ContentBlock::Image(acp::ImageContent::new(image.source, "image/png"))
|
||||
}
|
||||
UserMessageContent::Mention { uri, content } => acp::ContentBlock::Resource(
|
||||
acp::EmbeddedResource::new(acp::EmbeddedResourceResource::TextResourceContents(
|
||||
acp::TextResourceContents::new(content, uri.to_uri().to_string()),
|
||||
)),
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,6 +4,7 @@ mod create_directory_tool;
|
||||
mod delete_path_tool;
|
||||
mod diagnostics_tool;
|
||||
mod edit_file_tool;
|
||||
|
||||
mod fetch_tool;
|
||||
mod find_path_tool;
|
||||
mod grep_tool;
|
||||
@@ -12,6 +13,7 @@ mod move_path_tool;
|
||||
mod now_tool;
|
||||
mod open_tool;
|
||||
mod read_file_tool;
|
||||
|
||||
mod terminal_tool;
|
||||
mod thinking_tool;
|
||||
mod web_search_tool;
|
||||
@@ -25,6 +27,7 @@ pub use create_directory_tool::*;
|
||||
pub use delete_path_tool::*;
|
||||
pub use diagnostics_tool::*;
|
||||
pub use edit_file_tool::*;
|
||||
|
||||
pub use fetch_tool::*;
|
||||
pub use find_path_tool::*;
|
||||
pub use grep_tool::*;
|
||||
@@ -33,6 +36,7 @@ pub use move_path_tool::*;
|
||||
pub use now_tool::*;
|
||||
pub use open_tool::*;
|
||||
pub use read_file_tool::*;
|
||||
|
||||
pub use terminal_tool::*;
|
||||
pub use thinking_tool::*;
|
||||
pub use web_search_tool::*;
|
||||
|
||||
@@ -273,14 +273,9 @@ impl AgentTool for EditFileTool {
|
||||
};
|
||||
let abs_path = project.read(cx).absolute_path(&project_path, cx);
|
||||
if let Some(abs_path) = abs_path.clone() {
|
||||
event_stream.update_fields(ToolCallUpdateFields {
|
||||
locations: Some(vec![acp::ToolCallLocation {
|
||||
path: abs_path,
|
||||
line: None,
|
||||
meta: None,
|
||||
}]),
|
||||
..Default::default()
|
||||
});
|
||||
event_stream.update_fields(
|
||||
ToolCallUpdateFields::new().locations(vec![acp::ToolCallLocation::new(abs_path)]),
|
||||
);
|
||||
}
|
||||
|
||||
let authorize = self.authorize(&input, &event_stream, cx);
|
||||
@@ -389,10 +384,7 @@ impl AgentTool for EditFileTool {
|
||||
range.start.to_point(&buffer.snapshot()).row
|
||||
}).ok();
|
||||
if let Some(abs_path) = abs_path.clone() {
|
||||
event_stream.update_fields(ToolCallUpdateFields {
|
||||
locations: Some(vec![ToolCallLocation { path: abs_path, line, meta: None }]),
|
||||
..Default::default()
|
||||
});
|
||||
event_stream.update_fields(ToolCallUpdateFields::new().locations(vec![ToolCallLocation::new(abs_path).line(line)]));
|
||||
}
|
||||
emitted_location = true;
|
||||
}
|
||||
|
||||
@@ -118,33 +118,29 @@ impl AgentTool for FindPathTool {
|
||||
let paginated_matches: &[PathBuf] = &matches[cmp::min(input.offset, matches.len())
|
||||
..cmp::min(input.offset + RESULTS_PER_PAGE, matches.len())];
|
||||
|
||||
event_stream.update_fields(acp::ToolCallUpdateFields {
|
||||
title: Some(if paginated_matches.is_empty() {
|
||||
"No matches".into()
|
||||
} else if paginated_matches.len() == 1 {
|
||||
"1 match".into()
|
||||
} else {
|
||||
format!("{} matches", paginated_matches.len())
|
||||
}),
|
||||
content: Some(
|
||||
paginated_matches
|
||||
.iter()
|
||||
.map(|path| acp::ToolCallContent::Content {
|
||||
content: acp::ContentBlock::ResourceLink(acp::ResourceLink {
|
||||
uri: format!("file://{}", path.display()),
|
||||
name: path.to_string_lossy().into(),
|
||||
annotations: None,
|
||||
description: None,
|
||||
mime_type: None,
|
||||
size: None,
|
||||
title: None,
|
||||
meta: None,
|
||||
}),
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
..Default::default()
|
||||
});
|
||||
event_stream.update_fields(
|
||||
acp::ToolCallUpdateFields::new()
|
||||
.title(if paginated_matches.is_empty() {
|
||||
"No matches".into()
|
||||
} else if paginated_matches.len() == 1 {
|
||||
"1 match".into()
|
||||
} else {
|
||||
format!("{} matches", paginated_matches.len())
|
||||
})
|
||||
.content(
|
||||
paginated_matches
|
||||
.iter()
|
||||
.map(|path| {
|
||||
acp::ToolCallContent::Content(acp::Content::new(
|
||||
acp::ContentBlock::ResourceLink(acp::ResourceLink::new(
|
||||
path.to_string_lossy(),
|
||||
format!("file://{}", path.display()),
|
||||
)),
|
||||
))
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
),
|
||||
);
|
||||
|
||||
Ok(FindPathToolOutput {
|
||||
offset: input.offset,
|
||||
|
||||
@@ -322,7 +322,6 @@ mod tests {
|
||||
|
||||
use super::*;
|
||||
use gpui::{TestAppContext, UpdateGlobal};
|
||||
use language::{Language, LanguageConfig, LanguageMatcher};
|
||||
use project::{FakeFs, Project};
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
@@ -564,7 +563,7 @@ mod tests {
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
|
||||
project.update(cx, |project, _cx| {
|
||||
project.languages().add(rust_lang().into())
|
||||
project.languages().add(language::rust_lang())
|
||||
});
|
||||
|
||||
project
|
||||
@@ -793,22 +792,6 @@ mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
fn rust_lang() -> Language {
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".into(),
|
||||
matcher: LanguageMatcher {
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_rust::LANGUAGE.into()),
|
||||
)
|
||||
.with_outline_query(include_str!("../../../languages/src/rust/outline.scm"))
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_grep_security_boundaries(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
@@ -17,6 +17,9 @@ use crate::{AgentTool, Thread, ToolCallEventStream, outline};
|
||||
/// Reads the content of the given file in the project.
|
||||
///
|
||||
/// - Never attempt to read a path that hasn't been previously mentioned.
|
||||
/// - For large files, this tool returns a file outline with symbol names and line numbers instead of the full content.
|
||||
/// This outline IS a successful response - use the line numbers to read specific sections with start_line/end_line.
|
||||
/// Do NOT retry reading the same file without line numbers if you receive an outline.
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct ReadFileToolInput {
|
||||
/// The relative path of the file to read.
|
||||
@@ -150,14 +153,10 @@ impl AgentTool for ReadFileTool {
|
||||
|
||||
let file_path = input.path.clone();
|
||||
|
||||
event_stream.update_fields(ToolCallUpdateFields {
|
||||
locations: Some(vec![acp::ToolCallLocation {
|
||||
path: abs_path.clone(),
|
||||
line: input.start_line.map(|line| line.saturating_sub(1)),
|
||||
meta: None,
|
||||
}]),
|
||||
..Default::default()
|
||||
});
|
||||
event_stream.update_fields(ToolCallUpdateFields::new().locations(vec![
|
||||
acp::ToolCallLocation::new(&abs_path)
|
||||
.line(input.start_line.map(|line| line.saturating_sub(1))),
|
||||
]));
|
||||
|
||||
if image_store::is_image_file(&self.project, &project_path, cx) {
|
||||
return cx.spawn(async move |cx| {
|
||||
@@ -254,16 +253,15 @@ impl AgentTool for ReadFileTool {
|
||||
|
||||
if buffer_content.is_outline {
|
||||
Ok(formatdoc! {"
|
||||
This file was too big to read all at once.
|
||||
SUCCESS: File outline retrieved. This file is too large to read all at once, so the outline below shows the file's structure with line numbers.
|
||||
|
||||
IMPORTANT: Do NOT retry this call without line numbers - you will get the same outline.
|
||||
Instead, use the line numbers below to read specific sections by calling this tool again with start_line and end_line parameters.
|
||||
|
||||
{}
|
||||
|
||||
Using the line numbers in this outline, you can call this tool again
|
||||
while specifying the start_line and end_line fields to see the
|
||||
implementations of symbols in the outline.
|
||||
|
||||
Alternatively, you can fall back to the `grep` tool (if available)
|
||||
to search the file for specific content.", buffer_content.text
|
||||
NEXT STEPS: To read a specific symbol's implementation, call read_file with the same path plus start_line and end_line from the outline above.
|
||||
For example, to read a function shown as [L100-150], use start_line: 100 and end_line: 150.", buffer_content.text
|
||||
}
|
||||
.into())
|
||||
} else {
|
||||
@@ -287,12 +285,9 @@ impl AgentTool for ReadFileTool {
|
||||
text,
|
||||
}
|
||||
.to_string();
|
||||
event_stream.update_fields(ToolCallUpdateFields {
|
||||
content: Some(vec![acp::ToolCallContent::Content {
|
||||
content: markdown.into(),
|
||||
}]),
|
||||
..Default::default()
|
||||
})
|
||||
event_stream.update_fields(ToolCallUpdateFields::new().content(vec![
|
||||
acp::ToolCallContent::Content(acp::Content::new(markdown)),
|
||||
]));
|
||||
}
|
||||
})?;
|
||||
|
||||
@@ -306,7 +301,6 @@ mod test {
|
||||
use super::*;
|
||||
use crate::{ContextServerRegistry, Templates, Thread};
|
||||
use gpui::{AppContext, TestAppContext, UpdateGlobal as _};
|
||||
use language::{Language, LanguageConfig, LanguageMatcher, tree_sitter_rust};
|
||||
use language_model::fake_provider::FakeLanguageModel;
|
||||
use project::{FakeFs, Project};
|
||||
use prompt_store::ProjectContext;
|
||||
@@ -410,7 +404,7 @@ mod test {
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
|
||||
language_registry.add(Arc::new(rust_lang()));
|
||||
language_registry.add(language::rust_lang());
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let context_server_registry =
|
||||
cx.new(|cx| ContextServerRegistry::new(project.read(cx).context_server_store(), cx));
|
||||
@@ -440,7 +434,7 @@ mod test {
|
||||
let content = result.to_str().unwrap();
|
||||
|
||||
assert_eq!(
|
||||
content.lines().skip(4).take(6).collect::<Vec<_>>(),
|
||||
content.lines().skip(7).take(6).collect::<Vec<_>>(),
|
||||
vec![
|
||||
"struct Test0 [L1-4]",
|
||||
" a [L2]",
|
||||
@@ -475,7 +469,7 @@ mod test {
|
||||
pretty_assertions::assert_eq!(
|
||||
content
|
||||
.lines()
|
||||
.skip(4)
|
||||
.skip(7)
|
||||
.take(expected_content.len())
|
||||
.collect::<Vec<_>>(),
|
||||
expected_content
|
||||
@@ -600,49 +594,6 @@ mod test {
|
||||
});
|
||||
}
|
||||
|
||||
fn rust_lang() -> Language {
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".into(),
|
||||
matcher: LanguageMatcher {
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_rust::LANGUAGE.into()),
|
||||
)
|
||||
.with_outline_query(
|
||||
r#"
|
||||
(line_comment) @annotation
|
||||
|
||||
(struct_item
|
||||
"struct" @context
|
||||
name: (_) @name) @item
|
||||
(enum_item
|
||||
"enum" @context
|
||||
name: (_) @name) @item
|
||||
(enum_variant
|
||||
name: (_) @name) @item
|
||||
(field_declaration
|
||||
name: (_) @name) @item
|
||||
(impl_item
|
||||
"impl" @context
|
||||
trait: (_)? @name
|
||||
"for"? @context
|
||||
type: (_) @name
|
||||
body: (_ "{" (_)* "}")) @item
|
||||
(function_item
|
||||
"fn" @context
|
||||
name: (_) @name) @item
|
||||
(mod_item
|
||||
"mod" @context
|
||||
name: (_) @name) @item
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_read_file_security(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
@@ -112,10 +112,9 @@ impl AgentTool for TerminalTool {
|
||||
.await?;
|
||||
|
||||
let terminal_id = terminal.id(cx)?;
|
||||
event_stream.update_fields(acp::ToolCallUpdateFields {
|
||||
content: Some(vec![acp::ToolCallContent::Terminal { terminal_id }]),
|
||||
..Default::default()
|
||||
});
|
||||
event_stream.update_fields(acp::ToolCallUpdateFields::new().content(vec![
|
||||
acp::ToolCallContent::Terminal(acp::Terminal::new(terminal_id)),
|
||||
]));
|
||||
|
||||
let exit_status = terminal.wait_for_exit(cx)?.await;
|
||||
let output = terminal.current_output(cx)?;
|
||||
|
||||
@@ -43,10 +43,8 @@ impl AgentTool for ThinkingTool {
|
||||
event_stream: ToolCallEventStream,
|
||||
_cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
event_stream.update_fields(acp::ToolCallUpdateFields {
|
||||
content: Some(vec![input.content.into()]),
|
||||
..Default::default()
|
||||
});
|
||||
event_stream
|
||||
.update_fields(acp::ToolCallUpdateFields::new().content(vec![input.content.into()]));
|
||||
Task::ready(Ok("Finished thinking.".to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -76,10 +76,8 @@ impl AgentTool for WebSearchTool {
|
||||
let response = match search_task.await {
|
||||
Ok(response) => response,
|
||||
Err(err) => {
|
||||
event_stream.update_fields(acp::ToolCallUpdateFields {
|
||||
title: Some("Web Search Failed".to_string()),
|
||||
..Default::default()
|
||||
});
|
||||
event_stream
|
||||
.update_fields(acp::ToolCallUpdateFields::new().title("Web Search Failed"));
|
||||
return Err(err);
|
||||
}
|
||||
};
|
||||
@@ -107,26 +105,23 @@ fn emit_update(response: &WebSearchResponse, event_stream: &ToolCallEventStream)
|
||||
} else {
|
||||
format!("{} results", response.results.len())
|
||||
};
|
||||
event_stream.update_fields(acp::ToolCallUpdateFields {
|
||||
title: Some(format!("Searched the web: {result_text}")),
|
||||
content: Some(
|
||||
response
|
||||
.results
|
||||
.iter()
|
||||
.map(|result| acp::ToolCallContent::Content {
|
||||
content: acp::ContentBlock::ResourceLink(acp::ResourceLink {
|
||||
name: result.title.clone(),
|
||||
uri: result.url.clone(),
|
||||
title: Some(result.title.clone()),
|
||||
description: Some(result.text.clone()),
|
||||
mime_type: None,
|
||||
annotations: None,
|
||||
size: None,
|
||||
meta: None,
|
||||
}),
|
||||
})
|
||||
.collect(),
|
||||
),
|
||||
..Default::default()
|
||||
});
|
||||
event_stream.update_fields(
|
||||
acp::ToolCallUpdateFields::new()
|
||||
.title(format!("Searched the web: {result_text}"))
|
||||
.content(
|
||||
response
|
||||
.results
|
||||
.iter()
|
||||
.map(|result| {
|
||||
acp::ToolCallContent::Content(acp::Content::new(
|
||||
acp::ContentBlock::ResourceLink(
|
||||
acp::ResourceLink::new(result.title.clone(), result.url.clone())
|
||||
.title(result.title.clone())
|
||||
.description(result.text.clone()),
|
||||
),
|
||||
))
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -76,7 +76,7 @@ pub async fn connect(
|
||||
Ok(Rc::new(conn) as _)
|
||||
}
|
||||
|
||||
const MINIMUM_SUPPORTED_VERSION: acp::ProtocolVersion = acp::V1;
|
||||
const MINIMUM_SUPPORTED_VERSION: acp::ProtocolVersion = acp::ProtocolVersion::V1;
|
||||
|
||||
impl AcpConnection {
|
||||
pub async fn stdio(
|
||||
@@ -174,28 +174,25 @@ impl AcpConnection {
|
||||
})?;
|
||||
|
||||
let response = connection
|
||||
.initialize(acp::InitializeRequest {
|
||||
protocol_version: acp::VERSION,
|
||||
client_capabilities: acp::ClientCapabilities {
|
||||
fs: acp::FileSystemCapability {
|
||||
read_text_file: true,
|
||||
write_text_file: true,
|
||||
meta: None,
|
||||
},
|
||||
terminal: true,
|
||||
meta: Some(serde_json::json!({
|
||||
// Experimental: Allow for rendering terminal output from the agents
|
||||
"terminal_output": true,
|
||||
"terminal-auth": true,
|
||||
})),
|
||||
},
|
||||
client_info: Some(acp::Implementation {
|
||||
name: "zed".to_owned(),
|
||||
title: release_channel.map(|c| c.to_owned()),
|
||||
version,
|
||||
}),
|
||||
meta: None,
|
||||
})
|
||||
.initialize(
|
||||
acp::InitializeRequest::new(acp::ProtocolVersion::V1)
|
||||
.client_capabilities(
|
||||
acp::ClientCapabilities::new()
|
||||
.fs(acp::FileSystemCapability::new()
|
||||
.read_text_file(true)
|
||||
.write_text_file(true))
|
||||
.terminal(true)
|
||||
// Experimental: Allow for rendering terminal output from the agents
|
||||
.meta(acp::Meta::from_iter([
|
||||
("terminal_output".into(), true.into()),
|
||||
("terminal-auth".into(), true.into()),
|
||||
])),
|
||||
)
|
||||
.client_info(
|
||||
acp::Implementation::new("zed", version)
|
||||
.title(release_channel.map(ToOwned::to_owned)),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
|
||||
if response.protocol_version < MINIMUM_SUPPORTED_VERSION {
|
||||
@@ -253,14 +250,13 @@ impl AgentConnection for AcpConnection {
|
||||
let default_model = self.default_model.clone();
|
||||
let cwd = cwd.to_path_buf();
|
||||
let context_server_store = project.read(cx).context_server_store().read(cx);
|
||||
let mcp_servers =
|
||||
if project.read(cx).is_local() {
|
||||
context_server_store
|
||||
.configured_server_ids()
|
||||
.iter()
|
||||
.filter_map(|id| {
|
||||
let configuration = context_server_store.configuration_for_server(id)?;
|
||||
match &*configuration {
|
||||
let mcp_servers = if project.read(cx).is_local() {
|
||||
context_server_store
|
||||
.configured_server_ids()
|
||||
.iter()
|
||||
.filter_map(|id| {
|
||||
let configuration = context_server_store.configuration_for_server(id)?;
|
||||
match &*configuration {
|
||||
project::context_server_store::ContextServerConfiguration::Custom {
|
||||
command,
|
||||
..
|
||||
@@ -268,53 +264,47 @@ impl AgentConnection for AcpConnection {
|
||||
| project::context_server_store::ContextServerConfiguration::Extension {
|
||||
command,
|
||||
..
|
||||
} => Some(acp::McpServer::Stdio {
|
||||
name: id.0.to_string(),
|
||||
command: command.path.clone(),
|
||||
args: command.args.clone(),
|
||||
env: if let Some(env) = command.env.as_ref() {
|
||||
env.iter()
|
||||
.map(|(name, value)| acp::EnvVariable {
|
||||
name: name.clone(),
|
||||
value: value.clone(),
|
||||
meta: None,
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
vec![]
|
||||
},
|
||||
}),
|
||||
} => Some(acp::McpServer::Stdio(
|
||||
acp::McpServerStdio::new(id.0.to_string(), &command.path)
|
||||
.args(command.args.clone())
|
||||
.env(if let Some(env) = command.env.as_ref() {
|
||||
env.iter()
|
||||
.map(|(name, value)| acp::EnvVariable::new(name, value))
|
||||
.collect()
|
||||
} else {
|
||||
vec![]
|
||||
}),
|
||||
)),
|
||||
project::context_server_store::ContextServerConfiguration::Http {
|
||||
url,
|
||||
headers,
|
||||
} => Some(acp::McpServer::Http {
|
||||
name: id.0.to_string(),
|
||||
url: url.to_string(),
|
||||
headers: headers.iter().map(|(name, value)| acp::HttpHeader {
|
||||
name: name.clone(),
|
||||
value: value.clone(),
|
||||
meta: None,
|
||||
}).collect(),
|
||||
}),
|
||||
} => Some(acp::McpServer::Http(
|
||||
acp::McpServerHttp::new(id.0.to_string(), url.to_string()).headers(
|
||||
headers
|
||||
.iter()
|
||||
.map(|(name, value)| acp::HttpHeader::new(name, value))
|
||||
.collect(),
|
||||
),
|
||||
)),
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
// In SSH projects, the external agent is running on the remote
|
||||
// machine, and currently we only run MCP servers on the local
|
||||
// machine. So don't pass any MCP servers to the agent in that case.
|
||||
Vec::new()
|
||||
};
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
// In SSH projects, the external agent is running on the remote
|
||||
// machine, and currently we only run MCP servers on the local
|
||||
// machine. So don't pass any MCP servers to the agent in that case.
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let response = conn
|
||||
.new_session(acp::NewSessionRequest { mcp_servers, cwd, meta: None })
|
||||
.new_session(acp::NewSessionRequest::new(cwd).mcp_servers(mcp_servers))
|
||||
.await
|
||||
.map_err(|err| {
|
||||
if err.code == acp::ErrorCode::AUTH_REQUIRED.code {
|
||||
if err.code == acp::ErrorCode::AuthRequired {
|
||||
let mut error = AuthRequired::new();
|
||||
|
||||
if err.message != acp::ErrorCode::AUTH_REQUIRED.message {
|
||||
if err.message != acp::ErrorCode::AuthRequired.to_string() {
|
||||
error = error.with_description(err.message);
|
||||
}
|
||||
|
||||
@@ -341,11 +331,7 @@ impl AgentConnection for AcpConnection {
|
||||
let modes = modes.clone();
|
||||
let conn = conn.clone();
|
||||
async move |_| {
|
||||
let result = conn.set_session_mode(acp::SetSessionModeRequest {
|
||||
session_id,
|
||||
mode_id: default_mode,
|
||||
meta: None,
|
||||
})
|
||||
let result = conn.set_session_mode(acp::SetSessionModeRequest::new(session_id, default_mode))
|
||||
.await.log_err();
|
||||
|
||||
if result.is_none() {
|
||||
@@ -388,11 +374,7 @@ impl AgentConnection for AcpConnection {
|
||||
let models = models.clone();
|
||||
let conn = conn.clone();
|
||||
async move |_| {
|
||||
let result = conn.set_session_model(acp::SetSessionModelRequest {
|
||||
session_id,
|
||||
model_id: default_model,
|
||||
meta: None,
|
||||
})
|
||||
let result = conn.set_session_model(acp::SetSessionModelRequest::new(session_id, default_model))
|
||||
.await.log_err();
|
||||
|
||||
if result.is_none() {
|
||||
@@ -456,12 +438,8 @@ impl AgentConnection for AcpConnection {
|
||||
fn authenticate(&self, method_id: acp::AuthMethodId, cx: &mut App) -> Task<Result<()>> {
|
||||
let conn = self.connection.clone();
|
||||
cx.foreground_executor().spawn(async move {
|
||||
conn.authenticate(acp::AuthenticateRequest {
|
||||
method_id: method_id.clone(),
|
||||
meta: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
conn.authenticate(acp::AuthenticateRequest::new(method_id))
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
@@ -488,11 +466,11 @@ impl AgentConnection for AcpConnection {
|
||||
match result {
|
||||
Ok(response) => Ok(response),
|
||||
Err(err) => {
|
||||
if err.code == acp::ErrorCode::AUTH_REQUIRED.code {
|
||||
if err.code == acp::ErrorCode::AuthRequired {
|
||||
return Err(anyhow!(acp::Error::auth_required()));
|
||||
}
|
||||
|
||||
if err.code != ErrorCode::INTERNAL_ERROR.code {
|
||||
if err.code != ErrorCode::InternalError {
|
||||
anyhow::bail!(err)
|
||||
}
|
||||
|
||||
@@ -515,10 +493,7 @@ impl AgentConnection for AcpConnection {
|
||||
&& (details.contains("This operation was aborted")
|
||||
|| details.contains("The user aborted a request"))
|
||||
{
|
||||
Ok(acp::PromptResponse {
|
||||
stop_reason: acp::StopReason::Cancelled,
|
||||
meta: None,
|
||||
})
|
||||
Ok(acp::PromptResponse::new(acp::StopReason::Cancelled))
|
||||
} else {
|
||||
Err(anyhow!(details))
|
||||
}
|
||||
@@ -535,10 +510,7 @@ impl AgentConnection for AcpConnection {
|
||||
session.suppress_abort_err = true;
|
||||
}
|
||||
let conn = self.connection.clone();
|
||||
let params = acp::CancelNotification {
|
||||
session_id: session_id.clone(),
|
||||
meta: None,
|
||||
};
|
||||
let params = acp::CancelNotification::new(session_id.clone());
|
||||
cx.foreground_executor()
|
||||
.spawn(async move { conn.cancel(params).await })
|
||||
.detach();
|
||||
@@ -619,11 +591,7 @@ impl acp_thread::AgentSessionModes for AcpSessionModes {
|
||||
let state = self.state.clone();
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let result = connection
|
||||
.set_session_mode(acp::SetSessionModeRequest {
|
||||
session_id,
|
||||
mode_id,
|
||||
meta: None,
|
||||
})
|
||||
.set_session_mode(acp::SetSessionModeRequest::new(session_id, mode_id))
|
||||
.await;
|
||||
|
||||
if result.is_err() {
|
||||
@@ -682,11 +650,7 @@ impl acp_thread::AgentModelSelector for AcpModelSelector {
|
||||
let state = self.state.clone();
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let result = connection
|
||||
.set_session_model(acp::SetSessionModelRequest {
|
||||
session_id,
|
||||
model_id,
|
||||
meta: None,
|
||||
})
|
||||
.set_session_model(acp::SetSessionModelRequest::new(session_id, model_id))
|
||||
.await;
|
||||
|
||||
if result.is_err() {
|
||||
@@ -748,10 +712,7 @@ impl acp::Client for ClientDelegate {
|
||||
|
||||
let outcome = task.await;
|
||||
|
||||
Ok(acp::RequestPermissionResponse {
|
||||
outcome,
|
||||
meta: None,
|
||||
})
|
||||
Ok(acp::RequestPermissionResponse::new(outcome))
|
||||
}
|
||||
|
||||
async fn write_text_file(
|
||||
@@ -783,10 +744,7 @@ impl acp::Client for ClientDelegate {
|
||||
|
||||
let content = task.await?;
|
||||
|
||||
Ok(acp::ReadTextFileResponse {
|
||||
content,
|
||||
meta: None,
|
||||
})
|
||||
Ok(acp::ReadTextFileResponse::new(content))
|
||||
}
|
||||
|
||||
async fn session_notification(
|
||||
@@ -821,7 +779,7 @@ impl acp::Client for ClientDelegate {
|
||||
if let Some(terminal_info) = meta.get("terminal_info") {
|
||||
if let Some(id_str) = terminal_info.get("terminal_id").and_then(|v| v.as_str())
|
||||
{
|
||||
let terminal_id = acp::TerminalId(id_str.into());
|
||||
let terminal_id = acp::TerminalId::new(id_str);
|
||||
let cwd = terminal_info
|
||||
.get("cwd")
|
||||
.and_then(|v| v.as_str().map(PathBuf::from));
|
||||
@@ -837,7 +795,7 @@ impl acp::Client for ClientDelegate {
|
||||
let lower = cx.new(|cx| builder.subscribe(cx));
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Created {
|
||||
terminal_id: terminal_id.clone(),
|
||||
terminal_id,
|
||||
label: tc.title.clone(),
|
||||
cwd,
|
||||
output_byte_limit: None,
|
||||
@@ -862,15 +820,12 @@ impl acp::Client for ClientDelegate {
|
||||
if let Some(meta) = &tcu.meta {
|
||||
if let Some(term_out) = meta.get("terminal_output") {
|
||||
if let Some(id_str) = term_out.get("terminal_id").and_then(|v| v.as_str()) {
|
||||
let terminal_id = acp::TerminalId(id_str.into());
|
||||
let terminal_id = acp::TerminalId::new(id_str);
|
||||
if let Some(s) = term_out.get("data").and_then(|v| v.as_str()) {
|
||||
let data = s.as_bytes().to_vec();
|
||||
let _ = session.thread.update(&mut self.cx.clone(), |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Output {
|
||||
terminal_id: terminal_id.clone(),
|
||||
data,
|
||||
},
|
||||
TerminalProviderEvent::Output { terminal_id, data },
|
||||
cx,
|
||||
);
|
||||
});
|
||||
@@ -881,21 +836,24 @@ impl acp::Client for ClientDelegate {
|
||||
// terminal_exit
|
||||
if let Some(term_exit) = meta.get("terminal_exit") {
|
||||
if let Some(id_str) = term_exit.get("terminal_id").and_then(|v| v.as_str()) {
|
||||
let terminal_id = acp::TerminalId(id_str.into());
|
||||
let status = acp::TerminalExitStatus {
|
||||
exit_code: term_exit
|
||||
.get("exit_code")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|i| i as u32),
|
||||
signal: term_exit
|
||||
.get("signal")
|
||||
.and_then(|v| v.as_str().map(|s| s.to_string())),
|
||||
meta: None,
|
||||
};
|
||||
let terminal_id = acp::TerminalId::new(id_str);
|
||||
let status = acp::TerminalExitStatus::new()
|
||||
.exit_code(
|
||||
term_exit
|
||||
.get("exit_code")
|
||||
.and_then(|v| v.as_u64())
|
||||
.map(|i| i as u32),
|
||||
)
|
||||
.signal(
|
||||
term_exit
|
||||
.get("signal")
|
||||
.and_then(|v| v.as_str().map(|s| s.to_string())),
|
||||
);
|
||||
|
||||
let _ = session.thread.update(&mut self.cx.clone(), |thread, cx| {
|
||||
thread.on_terminal_provider_event(
|
||||
TerminalProviderEvent::Exit {
|
||||
terminal_id: terminal_id.clone(),
|
||||
terminal_id,
|
||||
status,
|
||||
},
|
||||
cx,
|
||||
@@ -932,7 +890,7 @@ impl acp::Client for ClientDelegate {
|
||||
// Register with renderer
|
||||
let terminal_entity = thread.update(&mut self.cx.clone(), |thread, cx| {
|
||||
thread.register_terminal_created(
|
||||
acp::TerminalId(uuid::Uuid::new_v4().to_string().into()),
|
||||
acp::TerminalId::new(uuid::Uuid::new_v4().to_string()),
|
||||
format!("{} {}", args.command, args.args.join(" ")),
|
||||
args.cwd.clone(),
|
||||
args.output_byte_limit,
|
||||
@@ -942,10 +900,7 @@ impl acp::Client for ClientDelegate {
|
||||
})?;
|
||||
let terminal_id =
|
||||
terminal_entity.read_with(&self.cx, |terminal, _| terminal.id().clone())?;
|
||||
Ok(acp::CreateTerminalResponse {
|
||||
terminal_id,
|
||||
meta: None,
|
||||
})
|
||||
Ok(acp::CreateTerminalResponse::new(terminal_id))
|
||||
}
|
||||
|
||||
async fn kill_terminal_command(
|
||||
@@ -1006,10 +961,7 @@ impl acp::Client for ClientDelegate {
|
||||
})??
|
||||
.await;
|
||||
|
||||
Ok(acp::WaitForTerminalExitResponse {
|
||||
exit_status,
|
||||
meta: None,
|
||||
})
|
||||
Ok(acp::WaitForTerminalExitResponse::new(exit_status))
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -41,7 +41,7 @@ impl AgentServer for ClaudeCode {
|
||||
|
||||
settings
|
||||
.as_ref()
|
||||
.and_then(|s| s.default_mode.clone().map(|m| acp::SessionModeId(m.into())))
|
||||
.and_then(|s| s.default_mode.clone().map(acp::SessionModeId::new))
|
||||
}
|
||||
|
||||
fn set_default_mode(&self, mode_id: Option<acp::SessionModeId>, fs: Arc<dyn Fs>, cx: &mut App) {
|
||||
@@ -62,7 +62,7 @@ impl AgentServer for ClaudeCode {
|
||||
|
||||
settings
|
||||
.as_ref()
|
||||
.and_then(|s| s.default_model.clone().map(|m| acp::ModelId(m.into())))
|
||||
.and_then(|s| s.default_model.clone().map(acp::ModelId::new))
|
||||
}
|
||||
|
||||
fn set_default_model(&self, model_id: Option<acp::ModelId>, fs: Arc<dyn Fs>, cx: &mut App) {
|
||||
|
||||
@@ -42,7 +42,7 @@ impl AgentServer for Codex {
|
||||
|
||||
settings
|
||||
.as_ref()
|
||||
.and_then(|s| s.default_mode.clone().map(|m| acp::SessionModeId(m.into())))
|
||||
.and_then(|s| s.default_mode.clone().map(acp::SessionModeId::new))
|
||||
}
|
||||
|
||||
fn set_default_mode(&self, mode_id: Option<acp::SessionModeId>, fs: Arc<dyn Fs>, cx: &mut App) {
|
||||
@@ -63,7 +63,7 @@ impl AgentServer for Codex {
|
||||
|
||||
settings
|
||||
.as_ref()
|
||||
.and_then(|s| s.default_model.clone().map(|m| acp::ModelId(m.into())))
|
||||
.and_then(|s| s.default_model.clone().map(acp::ModelId::new))
|
||||
}
|
||||
|
||||
fn set_default_model(&self, model_id: Option<acp::ModelId>, fs: Arc<dyn Fs>, cx: &mut App) {
|
||||
|
||||
@@ -44,7 +44,7 @@ impl crate::AgentServer for CustomAgentServer {
|
||||
|
||||
settings
|
||||
.as_ref()
|
||||
.and_then(|s| s.default_mode().map(|m| acp::SessionModeId(m.into())))
|
||||
.and_then(|s| s.default_mode().map(acp::SessionModeId::new))
|
||||
}
|
||||
|
||||
fn set_default_mode(&self, mode_id: Option<acp::SessionModeId>, fs: Arc<dyn Fs>, cx: &mut App) {
|
||||
@@ -80,7 +80,7 @@ impl crate::AgentServer for CustomAgentServer {
|
||||
|
||||
settings
|
||||
.as_ref()
|
||||
.and_then(|s| s.default_model().map(|m| acp::ModelId(m.into())))
|
||||
.and_then(|s| s.default_model().map(acp::ModelId::new))
|
||||
}
|
||||
|
||||
fn set_default_model(&self, model_id: Option<acp::ModelId>, fs: Arc<dyn Fs>, cx: &mut App) {
|
||||
|
||||
@@ -82,26 +82,9 @@ where
|
||||
.update(cx, |thread, cx| {
|
||||
thread.send(
|
||||
vec![
|
||||
acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Read the file ".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
acp::ContentBlock::ResourceLink(acp::ResourceLink {
|
||||
uri: "foo.rs".into(),
|
||||
name: "foo.rs".into(),
|
||||
annotations: None,
|
||||
description: None,
|
||||
mime_type: None,
|
||||
size: None,
|
||||
title: None,
|
||||
meta: None,
|
||||
}),
|
||||
acp::ContentBlock::Text(acp::TextContent {
|
||||
text: " and tell me what the content of the println! is".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
"Read the file ".into(),
|
||||
acp::ContentBlock::ResourceLink(acp::ResourceLink::new("foo.rs", "foo.rs")),
|
||||
" and tell me what the content of the println! is".into(),
|
||||
],
|
||||
cx,
|
||||
)
|
||||
@@ -429,7 +412,7 @@ macro_rules! common_e2e_tests {
|
||||
async fn tool_call_with_permission(cx: &mut ::gpui::TestAppContext) {
|
||||
$crate::e2e_tests::test_tool_call_with_permission(
|
||||
$server,
|
||||
::agent_client_protocol::PermissionOptionId($allow_option_id.into()),
|
||||
::agent_client_protocol::PermissionOptionId::new($allow_option_id),
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
@@ -13,7 +13,8 @@ path = "src/agent_ui.rs"
|
||||
doctest = false
|
||||
|
||||
[features]
|
||||
test-support = ["gpui/test-support", "language/test-support"]
|
||||
test-support = ["gpui/test-support", "language/test-support", "reqwest_client"]
|
||||
unit-eval = []
|
||||
|
||||
[dependencies]
|
||||
acp_thread.workspace = true
|
||||
@@ -47,6 +48,7 @@ fs.workspace = true
|
||||
futures.workspace = true
|
||||
fuzzy.workspace = true
|
||||
gpui.workspace = true
|
||||
gpui_tokio.workspace = true
|
||||
html_to_markdown.workspace = true
|
||||
http_client.workspace = true
|
||||
indoc.workspace = true
|
||||
@@ -93,19 +95,23 @@ ui.workspace = true
|
||||
ui_input.workspace = true
|
||||
url.workspace = true
|
||||
util.workspace = true
|
||||
uuid.workspace = true
|
||||
watch.workspace = true
|
||||
workspace.workspace = true
|
||||
zed_actions.workspace = true
|
||||
image.workspace = true
|
||||
async-fs.workspace = true
|
||||
reqwest_client = { workspace = true, optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
acp_thread = { workspace = true, features = ["test-support"] }
|
||||
agent = { workspace = true, features = ["test-support"] }
|
||||
assistant_text_thread = { workspace = true, features = ["test-support"] }
|
||||
buffer_diff = { workspace = true, features = ["test-support"] }
|
||||
clock.workspace = true
|
||||
db = { workspace = true, features = ["test-support"] }
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
eval_utils.workspace = true
|
||||
gpui = { workspace = true, "features" = ["test-support"] }
|
||||
indoc.workspace = true
|
||||
language = { workspace = true, "features" = ["test-support"] }
|
||||
@@ -115,5 +121,6 @@ pretty_assertions.workspace = true
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
semver.workspace = true
|
||||
rand.workspace = true
|
||||
reqwest_client.workspace = true
|
||||
tree-sitter-md.workspace = true
|
||||
unindent.workspace = true
|
||||
|
||||
@@ -22,7 +22,7 @@ use crate::acp::message_editor::{MessageEditor, MessageEditorEvent};
|
||||
|
||||
pub struct EntryViewState {
|
||||
workspace: WeakEntity<Workspace>,
|
||||
project: Entity<Project>,
|
||||
project: WeakEntity<Project>,
|
||||
history_store: Entity<HistoryStore>,
|
||||
prompt_store: Option<Entity<PromptStore>>,
|
||||
entries: Vec<Entry>,
|
||||
@@ -34,7 +34,7 @@ pub struct EntryViewState {
|
||||
impl EntryViewState {
|
||||
pub fn new(
|
||||
workspace: WeakEntity<Workspace>,
|
||||
project: Entity<Project>,
|
||||
project: WeakEntity<Project>,
|
||||
history_store: Entity<HistoryStore>,
|
||||
prompt_store: Option<Entity<PromptStore>>,
|
||||
prompt_capabilities: Rc<RefCell<acp::PromptCapabilities>>,
|
||||
@@ -328,7 +328,7 @@ impl Entry {
|
||||
|
||||
fn create_terminal(
|
||||
workspace: WeakEntity<Workspace>,
|
||||
project: Entity<Project>,
|
||||
project: WeakEntity<Project>,
|
||||
terminal: Entity<acp_thread::Terminal>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
@@ -336,9 +336,9 @@ fn create_terminal(
|
||||
cx.new(|cx| {
|
||||
let mut view = TerminalView::new(
|
||||
terminal.read(cx).inner().clone(),
|
||||
workspace.clone(),
|
||||
workspace,
|
||||
None,
|
||||
project.downgrade(),
|
||||
project,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
@@ -432,24 +432,11 @@ mod tests {
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
|
||||
let tool_call = acp::ToolCall {
|
||||
id: acp::ToolCallId("tool".into()),
|
||||
title: "Tool call".into(),
|
||||
kind: acp::ToolKind::Other,
|
||||
status: acp::ToolCallStatus::InProgress,
|
||||
content: vec![acp::ToolCallContent::Diff {
|
||||
diff: acp::Diff {
|
||||
path: "/project/hello.txt".into(),
|
||||
old_text: Some("hi world".into()),
|
||||
new_text: "hello world".into(),
|
||||
meta: None,
|
||||
},
|
||||
}],
|
||||
locations: vec![],
|
||||
raw_input: None,
|
||||
raw_output: None,
|
||||
meta: None,
|
||||
};
|
||||
let tool_call = acp::ToolCall::new("tool", "Tool call")
|
||||
.status(acp::ToolCallStatus::InProgress)
|
||||
.content(vec![acp::ToolCallContent::Diff(
|
||||
acp::Diff::new("/project/hello.txt", "hello world").old_text("hi world"),
|
||||
)]);
|
||||
let connection = Rc::new(StubAgentConnection::new());
|
||||
let thread = cx
|
||||
.update(|_, cx| {
|
||||
@@ -471,7 +458,7 @@ mod tests {
|
||||
let view_state = cx.new(|_cx| {
|
||||
EntryViewState::new(
|
||||
workspace.downgrade(),
|
||||
project.clone(),
|
||||
project.downgrade(),
|
||||
history_store,
|
||||
None,
|
||||
Default::default(),
|
||||
|
||||
@@ -21,8 +21,8 @@ use editor::{
|
||||
};
|
||||
use futures::{FutureExt as _, future::join_all};
|
||||
use gpui::{
|
||||
AppContext, Context, Entity, EventEmitter, FocusHandle, Focusable, ImageFormat, KeyContext,
|
||||
SharedString, Subscription, Task, TextStyle, WeakEntity,
|
||||
AppContext, ClipboardEntry, Context, Entity, EventEmitter, FocusHandle, Focusable, ImageFormat,
|
||||
KeyContext, SharedString, Subscription, Task, TextStyle, WeakEntity,
|
||||
};
|
||||
use language::{Buffer, Language, language_settings::InlayHintKind};
|
||||
use project::{CompletionIntent, InlayHint, InlayHintLabel, InlayId, Project, Worktree};
|
||||
@@ -39,7 +39,6 @@ use zed_actions::agent::Chat;
|
||||
pub struct MessageEditor {
|
||||
mention_set: Entity<MentionSet>,
|
||||
editor: Entity<Editor>,
|
||||
project: Entity<Project>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
prompt_capabilities: Rc<RefCell<acp::PromptCapabilities>>,
|
||||
available_commands: Rc<RefCell<Vec<acp::AvailableCommand>>>,
|
||||
@@ -98,7 +97,7 @@ impl PromptCompletionProviderDelegate for Entity<MessageEditor> {
|
||||
impl MessageEditor {
|
||||
pub fn new(
|
||||
workspace: WeakEntity<Workspace>,
|
||||
project: Entity<Project>,
|
||||
project: WeakEntity<Project>,
|
||||
history_store: Entity<HistoryStore>,
|
||||
prompt_store: Option<Entity<PromptStore>>,
|
||||
prompt_capabilities: Rc<RefCell<acp::PromptCapabilities>>,
|
||||
@@ -124,6 +123,7 @@ impl MessageEditor {
|
||||
let mut editor = Editor::new(mode, buffer, None, window, cx);
|
||||
editor.set_placeholder_text(placeholder, window, cx);
|
||||
editor.set_show_indent_guides(false, cx);
|
||||
editor.set_show_completions_on_input(Some(true));
|
||||
editor.set_soft_wrap();
|
||||
editor.set_use_modal_editing(true);
|
||||
editor.set_context_menu_options(ContextMenuOptions {
|
||||
@@ -134,13 +134,8 @@ impl MessageEditor {
|
||||
editor.register_addon(MessageEditorAddon::new());
|
||||
editor
|
||||
});
|
||||
let mention_set = cx.new(|_cx| {
|
||||
MentionSet::new(
|
||||
project.downgrade(),
|
||||
history_store.clone(),
|
||||
prompt_store.clone(),
|
||||
)
|
||||
});
|
||||
let mention_set =
|
||||
cx.new(|_cx| MentionSet::new(project, history_store.clone(), prompt_store.clone()));
|
||||
let completion_provider = Rc::new(PromptCompletionProvider::new(
|
||||
cx.entity(),
|
||||
editor.downgrade(),
|
||||
@@ -198,7 +193,6 @@ impl MessageEditor {
|
||||
|
||||
Self {
|
||||
editor,
|
||||
project,
|
||||
mention_set,
|
||||
workspace,
|
||||
prompt_capabilities,
|
||||
@@ -225,8 +219,13 @@ impl MessageEditor {
|
||||
.iter()
|
||||
.find(|command| command.name == command_name)?;
|
||||
|
||||
let acp::AvailableCommandInput::Unstructured { mut hint } =
|
||||
available_command.input.clone()?;
|
||||
let acp::AvailableCommandInput::Unstructured(acp::UnstructuredCommandInput {
|
||||
mut hint,
|
||||
..
|
||||
}) = available_command.input.clone()?
|
||||
else {
|
||||
return None;
|
||||
};
|
||||
|
||||
let mut hint_pos = MultiBufferOffset(parsed_command.source_range.end) + 1usize;
|
||||
if hint_pos > snapshot.len() {
|
||||
@@ -403,34 +402,27 @@ impl MessageEditor {
|
||||
} => {
|
||||
all_tracked_buffers.extend(tracked_buffers.iter().cloned());
|
||||
if supports_embedded_context {
|
||||
acp::ContentBlock::Resource(acp::EmbeddedResource {
|
||||
annotations: None,
|
||||
resource:
|
||||
acp::EmbeddedResourceResource::TextResourceContents(
|
||||
acp::TextResourceContents {
|
||||
mime_type: None,
|
||||
text: content.clone(),
|
||||
uri: uri.to_uri().to_string(),
|
||||
meta: None,
|
||||
},
|
||||
acp::ContentBlock::Resource(acp::EmbeddedResource::new(
|
||||
acp::EmbeddedResourceResource::TextResourceContents(
|
||||
acp::TextResourceContents::new(
|
||||
content.clone(),
|
||||
uri.to_uri().to_string(),
|
||||
),
|
||||
meta: None,
|
||||
})
|
||||
),
|
||||
))
|
||||
} else {
|
||||
acp::ContentBlock::ResourceLink(acp::ResourceLink {
|
||||
name: uri.name(),
|
||||
uri: uri.to_uri().to_string(),
|
||||
annotations: None,
|
||||
description: None,
|
||||
mime_type: None,
|
||||
size: None,
|
||||
title: None,
|
||||
meta: None,
|
||||
})
|
||||
acp::ContentBlock::ResourceLink(acp::ResourceLink::new(
|
||||
uri.name(),
|
||||
uri.to_uri().to_string(),
|
||||
))
|
||||
}
|
||||
}
|
||||
Mention::Image(mention_image) => {
|
||||
let uri = match uri {
|
||||
Mention::Image(mention_image) => acp::ContentBlock::Image(
|
||||
acp::ImageContent::new(
|
||||
mention_image.data.clone(),
|
||||
mention_image.format.mime_type(),
|
||||
)
|
||||
.uri(match uri {
|
||||
MentionUri::File { .. } => Some(uri.to_uri().to_string()),
|
||||
MentionUri::PastedImage => None,
|
||||
other => {
|
||||
@@ -440,25 +432,11 @@ impl MessageEditor {
|
||||
);
|
||||
None
|
||||
}
|
||||
};
|
||||
acp::ContentBlock::Image(acp::ImageContent {
|
||||
annotations: None,
|
||||
data: mention_image.data.to_string(),
|
||||
mime_type: mention_image.format.mime_type().into(),
|
||||
uri,
|
||||
meta: None,
|
||||
})
|
||||
}
|
||||
Mention::Link => acp::ContentBlock::ResourceLink(acp::ResourceLink {
|
||||
name: uri.name(),
|
||||
uri: uri.to_uri().to_string(),
|
||||
annotations: None,
|
||||
description: None,
|
||||
mime_type: None,
|
||||
size: None,
|
||||
title: None,
|
||||
meta: None,
|
||||
}),
|
||||
}),
|
||||
),
|
||||
Mention::Link => acp::ContentBlock::ResourceLink(
|
||||
acp::ResourceLink::new(uri.name(), uri.to_uri().to_string()),
|
||||
),
|
||||
};
|
||||
chunks.push(chunk);
|
||||
ix = crease_range.end.0;
|
||||
@@ -565,6 +543,127 @@ impl MessageEditor {
|
||||
}
|
||||
|
||||
fn paste(&mut self, _: &Paste, window: &mut Window, cx: &mut Context<Self>) {
|
||||
let editor_clipboard_selections = cx
|
||||
.read_from_clipboard()
|
||||
.and_then(|item| item.entries().first().cloned())
|
||||
.and_then(|entry| match entry {
|
||||
ClipboardEntry::String(text) => {
|
||||
text.metadata_json::<Vec<editor::ClipboardSelection>>()
|
||||
}
|
||||
_ => None,
|
||||
});
|
||||
|
||||
let has_file_context = editor_clipboard_selections
|
||||
.as_ref()
|
||||
.is_some_and(|selections| {
|
||||
selections
|
||||
.iter()
|
||||
.any(|sel| sel.file_path.is_some() && sel.line_range.is_some())
|
||||
});
|
||||
|
||||
if has_file_context {
|
||||
if let Some((workspace, selections)) =
|
||||
self.workspace.upgrade().zip(editor_clipboard_selections)
|
||||
{
|
||||
cx.stop_propagation();
|
||||
|
||||
let insertion_target = self
|
||||
.editor
|
||||
.read(cx)
|
||||
.selections
|
||||
.newest_anchor()
|
||||
.start
|
||||
.text_anchor;
|
||||
|
||||
let project = workspace.read(cx).project().clone();
|
||||
for selection in selections {
|
||||
if let (Some(file_path), Some(line_range)) =
|
||||
(selection.file_path, selection.line_range)
|
||||
{
|
||||
let crease_text =
|
||||
acp_thread::selection_name(Some(file_path.as_ref()), &line_range);
|
||||
|
||||
let mention_uri = MentionUri::Selection {
|
||||
abs_path: Some(file_path.clone()),
|
||||
line_range: line_range.clone(),
|
||||
};
|
||||
|
||||
let mention_text = mention_uri.as_link().to_string();
|
||||
let (excerpt_id, text_anchor, content_len) =
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
let buffer = editor.buffer().read(cx);
|
||||
let snapshot = buffer.snapshot(cx);
|
||||
let (excerpt_id, _, buffer_snapshot) =
|
||||
snapshot.as_singleton().unwrap();
|
||||
let text_anchor = insertion_target.bias_left(&buffer_snapshot);
|
||||
|
||||
editor.insert(&mention_text, window, cx);
|
||||
editor.insert(" ", window, cx);
|
||||
|
||||
(*excerpt_id, text_anchor, mention_text.len())
|
||||
});
|
||||
|
||||
let Some((crease_id, tx)) = insert_crease_for_mention(
|
||||
excerpt_id,
|
||||
text_anchor,
|
||||
content_len,
|
||||
crease_text.into(),
|
||||
mention_uri.icon_path(cx),
|
||||
None,
|
||||
self.editor.clone(),
|
||||
window,
|
||||
cx,
|
||||
) else {
|
||||
continue;
|
||||
};
|
||||
drop(tx);
|
||||
|
||||
let mention_task = cx
|
||||
.spawn({
|
||||
let project = project.clone();
|
||||
async move |_, cx| {
|
||||
let project_path = project
|
||||
.update(cx, |project, cx| {
|
||||
project.project_path_for_absolute_path(&file_path, cx)
|
||||
})
|
||||
.map_err(|e| e.to_string())?
|
||||
.ok_or_else(|| "project path not found".to_string())?;
|
||||
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_buffer(project_path, cx)
|
||||
})
|
||||
.map_err(|e| e.to_string())?
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
buffer
|
||||
.update(cx, |buffer, cx| {
|
||||
let start = Point::new(*line_range.start(), 0)
|
||||
.min(buffer.max_point());
|
||||
let end = Point::new(*line_range.end() + 1, 0)
|
||||
.min(buffer.max_point());
|
||||
let content =
|
||||
buffer.text_for_range(start..end).collect();
|
||||
Mention::Text {
|
||||
content,
|
||||
tracked_buffers: vec![cx.entity()],
|
||||
}
|
||||
})
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
})
|
||||
.shared();
|
||||
|
||||
self.mention_set.update(cx, |mention_set, _cx| {
|
||||
mention_set.insert_mention(crease_id, mention_uri.clone(), mention_task)
|
||||
});
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if self.prompt_capabilities.borrow().image
|
||||
&& let Some(task) =
|
||||
paste_images_as_context(self.editor.clone(), self.mention_set.clone(), window, cx)
|
||||
@@ -583,17 +682,18 @@ impl MessageEditor {
|
||||
let Some(workspace) = self.workspace.upgrade() else {
|
||||
return;
|
||||
};
|
||||
let path_style = self.project.read(cx).path_style(cx);
|
||||
let project = workspace.read(cx).project().clone();
|
||||
let path_style = project.read(cx).path_style(cx);
|
||||
let buffer = self.editor.read(cx).buffer().clone();
|
||||
let Some(buffer) = buffer.read(cx).as_singleton() else {
|
||||
return;
|
||||
};
|
||||
let mut tasks = Vec::new();
|
||||
for path in paths {
|
||||
let Some(entry) = self.project.read(cx).entry_for_path(&path, cx) else {
|
||||
let Some(entry) = project.read(cx).entry_for_path(&path, cx) else {
|
||||
continue;
|
||||
};
|
||||
let Some(worktree) = self.project.read(cx).worktree_for_id(path.worktree_id, cx) else {
|
||||
let Some(worktree) = project.read(cx).worktree_for_id(path.worktree_id, cx) else {
|
||||
continue;
|
||||
};
|
||||
let abs_path = worktree.read(cx).absolutize(&path.path);
|
||||
@@ -701,9 +801,13 @@ impl MessageEditor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let Some(workspace) = self.workspace.upgrade() else {
|
||||
return;
|
||||
};
|
||||
|
||||
self.clear(window, cx);
|
||||
|
||||
let path_style = self.project.read(cx).path_style(cx);
|
||||
let path_style = workspace.read(cx).project().read(cx).path_style(cx);
|
||||
let mut text = String::new();
|
||||
let mut mentions = Vec::new();
|
||||
|
||||
@@ -746,8 +850,7 @@ impl MessageEditor {
|
||||
uri,
|
||||
data,
|
||||
mime_type,
|
||||
annotations: _,
|
||||
meta: _,
|
||||
..
|
||||
}) => {
|
||||
let mention_uri = if let Some(uri) = uri {
|
||||
MentionUri::parse(&uri, path_style)
|
||||
@@ -773,7 +876,7 @@ impl MessageEditor {
|
||||
}),
|
||||
));
|
||||
}
|
||||
acp::ContentBlock::Audio(_) | acp::ContentBlock::Resource(_) => {}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -947,7 +1050,7 @@ mod tests {
|
||||
cx.new(|cx| {
|
||||
MessageEditor::new(
|
||||
workspace.downgrade(),
|
||||
project.clone(),
|
||||
project.downgrade(),
|
||||
history_store.clone(),
|
||||
None,
|
||||
Default::default(),
|
||||
@@ -1058,7 +1161,7 @@ mod tests {
|
||||
cx.new(|cx| {
|
||||
MessageEditor::new(
|
||||
workspace_handle.clone(),
|
||||
project.clone(),
|
||||
project.downgrade(),
|
||||
history_store.clone(),
|
||||
None,
|
||||
prompt_capabilities.clone(),
|
||||
@@ -1092,12 +1195,7 @@ mod tests {
|
||||
assert!(error_message.contains("Available commands: none"));
|
||||
|
||||
// Now simulate Claude providing its list of available commands (which doesn't include file)
|
||||
available_commands.replace(vec![acp::AvailableCommand {
|
||||
name: "help".to_string(),
|
||||
description: "Get help".to_string(),
|
||||
input: None,
|
||||
meta: None,
|
||||
}]);
|
||||
available_commands.replace(vec![acp::AvailableCommand::new("help", "Get help")]);
|
||||
|
||||
// Test that unsupported slash commands trigger an error when we have a list of available commands
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
@@ -1211,20 +1309,12 @@ mod tests {
|
||||
let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx));
|
||||
let prompt_capabilities = Rc::new(RefCell::new(acp::PromptCapabilities::default()));
|
||||
let available_commands = Rc::new(RefCell::new(vec![
|
||||
acp::AvailableCommand {
|
||||
name: "quick-math".to_string(),
|
||||
description: "2 + 2 = 4 - 1 = 3".to_string(),
|
||||
input: None,
|
||||
meta: None,
|
||||
},
|
||||
acp::AvailableCommand {
|
||||
name: "say-hello".to_string(),
|
||||
description: "Say hello to whoever you want".to_string(),
|
||||
input: Some(acp::AvailableCommandInput::Unstructured {
|
||||
hint: "<name>".to_string(),
|
||||
}),
|
||||
meta: None,
|
||||
},
|
||||
acp::AvailableCommand::new("quick-math", "2 + 2 = 4 - 1 = 3"),
|
||||
acp::AvailableCommand::new("say-hello", "Say hello to whoever you want").input(
|
||||
acp::AvailableCommandInput::Unstructured(acp::UnstructuredCommandInput::new(
|
||||
"<name>",
|
||||
)),
|
||||
),
|
||||
]));
|
||||
|
||||
let editor = workspace.update_in(&mut cx, |workspace, window, cx| {
|
||||
@@ -1232,7 +1322,7 @@ mod tests {
|
||||
let message_editor = cx.new(|cx| {
|
||||
MessageEditor::new(
|
||||
workspace_handle,
|
||||
project.clone(),
|
||||
project.downgrade(),
|
||||
history_store.clone(),
|
||||
None,
|
||||
prompt_capabilities.clone(),
|
||||
@@ -1454,7 +1544,7 @@ mod tests {
|
||||
let message_editor = cx.new(|cx| {
|
||||
MessageEditor::new(
|
||||
workspace_handle,
|
||||
project.clone(),
|
||||
project.downgrade(),
|
||||
history_store.clone(),
|
||||
None,
|
||||
prompt_capabilities.clone(),
|
||||
@@ -1504,12 +1594,12 @@ mod tests {
|
||||
editor.set_text("", window, cx);
|
||||
});
|
||||
|
||||
prompt_capabilities.replace(acp::PromptCapabilities {
|
||||
image: true,
|
||||
audio: true,
|
||||
embedded_context: true,
|
||||
meta: None,
|
||||
});
|
||||
prompt_capabilities.replace(
|
||||
acp::PromptCapabilities::new()
|
||||
.image(true)
|
||||
.audio(true)
|
||||
.embedded_context(true),
|
||||
);
|
||||
|
||||
cx.simulate_input("Lorem ");
|
||||
|
||||
@@ -1945,7 +2035,7 @@ mod tests {
|
||||
cx.new(|cx| {
|
||||
let editor = MessageEditor::new(
|
||||
workspace.downgrade(),
|
||||
project.clone(),
|
||||
project.downgrade(),
|
||||
history_store.clone(),
|
||||
None,
|
||||
Default::default(),
|
||||
@@ -1960,11 +2050,9 @@ mod tests {
|
||||
cx,
|
||||
);
|
||||
// Enable embedded context so files are actually included
|
||||
editor.prompt_capabilities.replace(acp::PromptCapabilities {
|
||||
embedded_context: true,
|
||||
meta: None,
|
||||
..Default::default()
|
||||
});
|
||||
editor
|
||||
.prompt_capabilities
|
||||
.replace(acp::PromptCapabilities::new().embedded_context(true));
|
||||
editor
|
||||
})
|
||||
});
|
||||
@@ -2043,7 +2131,7 @@ mod tests {
|
||||
|
||||
// Create a thread metadata to insert as summary
|
||||
let thread_metadata = agent::DbThreadMetadata {
|
||||
id: acp::SessionId("thread-123".into()),
|
||||
id: acp::SessionId::new("thread-123"),
|
||||
title: "Previous Conversation".into(),
|
||||
updated_at: chrono::Utc::now(),
|
||||
};
|
||||
@@ -2052,7 +2140,7 @@ mod tests {
|
||||
cx.new(|cx| {
|
||||
let mut editor = MessageEditor::new(
|
||||
workspace.downgrade(),
|
||||
project.clone(),
|
||||
project.downgrade(),
|
||||
history_store.clone(),
|
||||
None,
|
||||
Default::default(),
|
||||
@@ -2121,7 +2209,7 @@ mod tests {
|
||||
cx.new(|cx| {
|
||||
MessageEditor::new(
|
||||
workspace.downgrade(),
|
||||
project.clone(),
|
||||
project.downgrade(),
|
||||
history_store.clone(),
|
||||
None,
|
||||
Default::default(),
|
||||
@@ -2150,14 +2238,7 @@ mod tests {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(
|
||||
content,
|
||||
vec![acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "してhello world".into(),
|
||||
annotations: None,
|
||||
meta: None
|
||||
})]
|
||||
);
|
||||
assert_eq!(content, vec!["してhello world".into()]);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -2191,7 +2272,7 @@ mod tests {
|
||||
let message_editor = cx.new(|cx| {
|
||||
MessageEditor::new(
|
||||
workspace_handle,
|
||||
project.clone(),
|
||||
project.downgrade(),
|
||||
history_store.clone(),
|
||||
None,
|
||||
Default::default(),
|
||||
@@ -2236,38 +2317,24 @@ mod tests {
|
||||
.0;
|
||||
|
||||
let main_rs_uri = if cfg!(windows) {
|
||||
"file:///C:/project/src/main.rs".to_string()
|
||||
"file:///C:/project/src/main.rs"
|
||||
} else {
|
||||
"file:///project/src/main.rs".to_string()
|
||||
"file:///project/src/main.rs"
|
||||
};
|
||||
|
||||
// When embedded context is `false` we should get a resource link
|
||||
pretty_assertions::assert_eq!(
|
||||
content,
|
||||
vec![
|
||||
acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "What is in ".to_string(),
|
||||
annotations: None,
|
||||
meta: None
|
||||
}),
|
||||
acp::ContentBlock::ResourceLink(acp::ResourceLink {
|
||||
uri: main_rs_uri.clone(),
|
||||
name: "main.rs".to_string(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
description: None,
|
||||
mime_type: None,
|
||||
size: None,
|
||||
title: None,
|
||||
})
|
||||
"What is in ".into(),
|
||||
acp::ContentBlock::ResourceLink(acp::ResourceLink::new("main.rs", main_rs_uri))
|
||||
]
|
||||
);
|
||||
|
||||
message_editor.update(cx, |editor, _cx| {
|
||||
editor.prompt_capabilities.replace(acp::PromptCapabilities {
|
||||
embedded_context: true,
|
||||
..Default::default()
|
||||
})
|
||||
editor
|
||||
.prompt_capabilities
|
||||
.replace(acp::PromptCapabilities::new().embedded_context(true))
|
||||
});
|
||||
|
||||
let content = message_editor
|
||||
@@ -2280,23 +2347,12 @@ mod tests {
|
||||
pretty_assertions::assert_eq!(
|
||||
content,
|
||||
vec![
|
||||
acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "What is in ".to_string(),
|
||||
annotations: None,
|
||||
meta: None
|
||||
}),
|
||||
acp::ContentBlock::Resource(acp::EmbeddedResource {
|
||||
resource: acp::EmbeddedResourceResource::TextResourceContents(
|
||||
acp::TextResourceContents {
|
||||
text: file_content.to_string(),
|
||||
uri: main_rs_uri,
|
||||
mime_type: None,
|
||||
meta: None
|
||||
}
|
||||
),
|
||||
annotations: None,
|
||||
meta: None
|
||||
})
|
||||
"What is in ".into(),
|
||||
acp::ContentBlock::Resource(acp::EmbeddedResource::new(
|
||||
acp::EmbeddedResourceResource::TextResourceContents(
|
||||
acp::TextResourceContents::new(file_content, main_rs_uri)
|
||||
)
|
||||
))
|
||||
]
|
||||
);
|
||||
}
|
||||
@@ -2374,7 +2430,7 @@ mod tests {
|
||||
let message_editor = cx.new(|cx| {
|
||||
MessageEditor::new(
|
||||
workspace_handle,
|
||||
project.clone(),
|
||||
project.downgrade(),
|
||||
history_store.clone(),
|
||||
None,
|
||||
Default::default(),
|
||||
|
||||
@@ -161,7 +161,7 @@ impl Render for ModeSelector {
|
||||
.map(|mode| mode.name.clone())
|
||||
.unwrap_or_else(|| "Unknown".into());
|
||||
|
||||
let this = cx.entity();
|
||||
let this = cx.weak_entity();
|
||||
|
||||
let icon = if self.menu_handle.is_deployed() {
|
||||
IconName::ChevronUp
|
||||
@@ -222,7 +222,8 @@ impl Render for ModeSelector {
|
||||
y: px(-2.0),
|
||||
})
|
||||
.menu(move |window, cx| {
|
||||
Some(this.update(cx, |this, cx| this.build_context_menu(window, cx)))
|
||||
this.update(cx, |this, cx| this.build_context_menu(window, cx))
|
||||
.ok()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -464,7 +464,7 @@ mod tests {
|
||||
models
|
||||
.into_iter()
|
||||
.map(|model| acp_thread::AgentModelInfo {
|
||||
id: acp::ModelId(model.to_string().into()),
|
||||
id: acp::ModelId::new(model.to_string()),
|
||||
name: model.to_string().into(),
|
||||
description: None,
|
||||
icon: None,
|
||||
|
||||
@@ -100,7 +100,7 @@ impl ThreadError {
|
||||
{
|
||||
Self::ModelRequestLimitReached(error.plan)
|
||||
} else if let Some(acp_error) = error.downcast_ref::<acp::Error>()
|
||||
&& acp_error.code == acp::ErrorCode::AUTH_REQUIRED.code
|
||||
&& acp_error.code == acp::ErrorCode::AuthRequired
|
||||
{
|
||||
Self::AuthenticationRequired(acp_error.message.clone().into())
|
||||
} else {
|
||||
@@ -344,7 +344,7 @@ impl AcpThreadView {
|
||||
let message_editor = cx.new(|cx| {
|
||||
let mut editor = MessageEditor::new(
|
||||
workspace.clone(),
|
||||
project.clone(),
|
||||
project.downgrade(),
|
||||
history_store.clone(),
|
||||
prompt_store.clone(),
|
||||
prompt_capabilities.clone(),
|
||||
@@ -369,7 +369,7 @@ impl AcpThreadView {
|
||||
let entry_view_state = cx.new(|_| {
|
||||
EntryViewState::new(
|
||||
workspace.clone(),
|
||||
project.clone(),
|
||||
project.downgrade(),
|
||||
history_store.clone(),
|
||||
prompt_store.clone(),
|
||||
prompt_capabilities.clone(),
|
||||
@@ -498,17 +498,7 @@ impl AcpThreadView {
|
||||
Some(new_version_available_tx),
|
||||
);
|
||||
|
||||
let agent_name = agent.name();
|
||||
let timeout = cx.background_executor().timer(Duration::from_secs(30));
|
||||
let connect_task = smol::future::or(
|
||||
agent.connect(root_dir.as_deref(), delegate, cx),
|
||||
async move {
|
||||
timeout.await;
|
||||
Err(anyhow::Error::new(LoadError::Other(
|
||||
format!("{agent_name} is unable to initialize after 30 seconds.").into(),
|
||||
)))
|
||||
},
|
||||
);
|
||||
let connect_task = agent.connect(root_dir.as_deref(), delegate, cx);
|
||||
let load_task = cx.spawn_in(window, async move |this, cx| {
|
||||
let connection = match connect_task.await {
|
||||
Ok((connection, login)) => {
|
||||
@@ -1486,18 +1476,8 @@ impl AcpThreadView {
|
||||
.iter()
|
||||
.any(|method| method.id.0.as_ref() == "claude-login")
|
||||
{
|
||||
available_commands.push(acp::AvailableCommand {
|
||||
name: "login".to_owned(),
|
||||
description: "Authenticate".to_owned(),
|
||||
input: None,
|
||||
meta: None,
|
||||
});
|
||||
available_commands.push(acp::AvailableCommand {
|
||||
name: "logout".to_owned(),
|
||||
description: "Authenticate".to_owned(),
|
||||
input: None,
|
||||
meta: None,
|
||||
});
|
||||
available_commands.push(acp::AvailableCommand::new("login", "Authenticate"));
|
||||
available_commands.push(acp::AvailableCommand::new("logout", "Authenticate"));
|
||||
}
|
||||
|
||||
let has_commands = !available_commands.is_empty();
|
||||
@@ -2572,7 +2552,7 @@ impl AcpThreadView {
|
||||
acp::ToolKind::Think => IconName::ToolThink,
|
||||
acp::ToolKind::Fetch => IconName::ToolWeb,
|
||||
acp::ToolKind::SwitchMode => IconName::ArrowRightLeft,
|
||||
acp::ToolKind::Other => IconName::ToolHammer,
|
||||
acp::ToolKind::Other | _ => IconName::ToolHammer,
|
||||
})
|
||||
}
|
||||
.size(IconSize::Small)
|
||||
@@ -2824,7 +2804,7 @@ impl AcpThreadView {
|
||||
})
|
||||
.gap_0p5()
|
||||
.children(options.iter().map(move |option| {
|
||||
let option_id = SharedString::from(option.id.0.clone());
|
||||
let option_id = SharedString::from(option.option_id.0.clone());
|
||||
Button::new((option_id, entry_ix), option.name.clone())
|
||||
.map(|this| {
|
||||
let (this, action) = match option.kind {
|
||||
@@ -2840,7 +2820,7 @@ impl AcpThreadView {
|
||||
this.icon(IconName::Close).icon_color(Color::Error),
|
||||
Some(&RejectOnce as &dyn Action),
|
||||
),
|
||||
acp::PermissionOptionKind::RejectAlways => {
|
||||
acp::PermissionOptionKind::RejectAlways | _ => {
|
||||
(this.icon(IconName::Close).icon_color(Color::Error), None)
|
||||
}
|
||||
};
|
||||
@@ -2865,7 +2845,7 @@ impl AcpThreadView {
|
||||
.label_size(LabelSize::Small)
|
||||
.on_click(cx.listener({
|
||||
let tool_call_id = tool_call_id.clone();
|
||||
let option_id = option.id.clone();
|
||||
let option_id = option.option_id.clone();
|
||||
let option_kind = option.kind;
|
||||
move |this, _, window, cx| {
|
||||
this.authorize_tool_call(
|
||||
@@ -3529,7 +3509,7 @@ impl AcpThreadView {
|
||||
(method.id.0.clone(), method.name.clone())
|
||||
};
|
||||
|
||||
Button::new(SharedString::from(method_id.clone()), name)
|
||||
Button::new(method_id.clone(), name)
|
||||
.label_size(LabelSize::Small)
|
||||
.map(|this| {
|
||||
if ix == 0 {
|
||||
@@ -3553,7 +3533,7 @@ impl AcpThreadView {
|
||||
);
|
||||
|
||||
this.authenticate(
|
||||
acp::AuthMethodId(method_id.clone()),
|
||||
acp::AuthMethodId::new(method_id.clone()),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -3847,10 +3827,6 @@ impl AcpThreadView {
|
||||
.text_xs()
|
||||
.text_color(cx.theme().colors().text_muted)
|
||||
.child(match entry.status {
|
||||
acp::PlanEntryStatus::Pending => Icon::new(IconName::TodoPending)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted)
|
||||
.into_any_element(),
|
||||
acp::PlanEntryStatus::InProgress => {
|
||||
Icon::new(IconName::TodoProgress)
|
||||
.size(IconSize::Small)
|
||||
@@ -3864,6 +3840,12 @@ impl AcpThreadView {
|
||||
.color(Color::Success)
|
||||
.into_any_element()
|
||||
}
|
||||
acp::PlanEntryStatus::Pending | _ => {
|
||||
Icon::new(IconName::TodoPending)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted)
|
||||
.into_any_element()
|
||||
}
|
||||
})
|
||||
.child(MarkdownElement::new(
|
||||
entry.content.clone(),
|
||||
@@ -4437,7 +4419,7 @@ impl AcpThreadView {
|
||||
|
||||
self.authorize_tool_call(
|
||||
tool_call.id.clone(),
|
||||
option.id.clone(),
|
||||
option.option_id.clone(),
|
||||
option.kind,
|
||||
window,
|
||||
cx,
|
||||
@@ -6253,27 +6235,18 @@ pub(crate) mod tests {
|
||||
async fn test_notification_for_tool_authorization(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let tool_call_id = acp::ToolCallId("1".into());
|
||||
let tool_call = acp::ToolCall {
|
||||
id: tool_call_id.clone(),
|
||||
title: "Label".into(),
|
||||
kind: acp::ToolKind::Edit,
|
||||
status: acp::ToolCallStatus::Pending,
|
||||
content: vec!["hi".into()],
|
||||
locations: vec![],
|
||||
raw_input: None,
|
||||
raw_output: None,
|
||||
meta: None,
|
||||
};
|
||||
let tool_call_id = acp::ToolCallId::new("1");
|
||||
let tool_call = acp::ToolCall::new(tool_call_id.clone(), "Label")
|
||||
.kind(acp::ToolKind::Edit)
|
||||
.content(vec!["hi".into()]);
|
||||
let connection =
|
||||
StubAgentConnection::new().with_permission_requests(HashMap::from_iter([(
|
||||
tool_call_id,
|
||||
vec![acp::PermissionOption {
|
||||
id: acp::PermissionOptionId("1".into()),
|
||||
name: "Allow".into(),
|
||||
kind: acp::PermissionOptionKind::AllowOnce,
|
||||
meta: None,
|
||||
}],
|
||||
vec![acp::PermissionOption::new(
|
||||
"1",
|
||||
"Allow",
|
||||
acp::PermissionOptionKind::AllowOnce,
|
||||
)],
|
||||
)]));
|
||||
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::ToolCall(tool_call)]);
|
||||
@@ -6492,10 +6465,7 @@ pub(crate) mod tests {
|
||||
fn default_response() -> Self {
|
||||
let conn = StubAgentConnection::new();
|
||||
conn.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk(
|
||||
acp::ContentChunk {
|
||||
content: "Default response".into(),
|
||||
meta: None,
|
||||
},
|
||||
acp::ContentChunk::new("Default response".into()),
|
||||
)]);
|
||||
Self::new(conn)
|
||||
}
|
||||
@@ -6552,13 +6522,13 @@ pub(crate) mod tests {
|
||||
self,
|
||||
project,
|
||||
action_log,
|
||||
SessionId("test".into()),
|
||||
watch::Receiver::constant(acp::PromptCapabilities {
|
||||
image: true,
|
||||
audio: true,
|
||||
embedded_context: true,
|
||||
meta: None,
|
||||
}),
|
||||
SessionId::new("test"),
|
||||
watch::Receiver::constant(
|
||||
acp::PromptCapabilities::new()
|
||||
.image(true)
|
||||
.audio(true)
|
||||
.embedded_context(true),
|
||||
),
|
||||
cx,
|
||||
)
|
||||
})))
|
||||
@@ -6616,13 +6586,13 @@ pub(crate) mod tests {
|
||||
self,
|
||||
project,
|
||||
action_log,
|
||||
SessionId("test".into()),
|
||||
watch::Receiver::constant(acp::PromptCapabilities {
|
||||
image: true,
|
||||
audio: true,
|
||||
embedded_context: true,
|
||||
meta: None,
|
||||
}),
|
||||
SessionId::new("test"),
|
||||
watch::Receiver::constant(
|
||||
acp::PromptCapabilities::new()
|
||||
.image(true)
|
||||
.audio(true)
|
||||
.embedded_context(true),
|
||||
),
|
||||
cx,
|
||||
)
|
||||
})))
|
||||
@@ -6646,10 +6616,7 @@ pub(crate) mod tests {
|
||||
_params: acp::PromptRequest,
|
||||
_cx: &mut App,
|
||||
) -> Task<gpui::Result<acp::PromptResponse>> {
|
||||
Task::ready(Ok(acp::PromptResponse {
|
||||
stop_reason: acp::StopReason::Refusal,
|
||||
meta: None,
|
||||
}))
|
||||
Task::ready(Ok(acp::PromptResponse::new(acp::StopReason::Refusal)))
|
||||
}
|
||||
|
||||
fn cancel(&self, _session_id: &acp::SessionId, _cx: &mut App) {
|
||||
@@ -6717,24 +6684,14 @@ pub(crate) mod tests {
|
||||
.unwrap();
|
||||
|
||||
// First user message
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::ToolCall(acp::ToolCall {
|
||||
id: acp::ToolCallId("tool1".into()),
|
||||
title: "Edit file 1".into(),
|
||||
kind: acp::ToolKind::Edit,
|
||||
status: acp::ToolCallStatus::Completed,
|
||||
content: vec![acp::ToolCallContent::Diff {
|
||||
diff: acp::Diff {
|
||||
path: "/project/test1.txt".into(),
|
||||
old_text: Some("old content 1".into()),
|
||||
new_text: "new content 1".into(),
|
||||
meta: None,
|
||||
},
|
||||
}],
|
||||
locations: vec![],
|
||||
raw_input: None,
|
||||
raw_output: None,
|
||||
meta: None,
|
||||
})]);
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::ToolCall(
|
||||
acp::ToolCall::new("tool1", "Edit file 1")
|
||||
.kind(acp::ToolKind::Edit)
|
||||
.status(acp::ToolCallStatus::Completed)
|
||||
.content(vec![acp::ToolCallContent::Diff(
|
||||
acp::Diff::new("/project/test1.txt", "new content 1").old_text("old content 1"),
|
||||
)]),
|
||||
)]);
|
||||
|
||||
thread
|
||||
.update(cx, |thread, cx| thread.send_raw("Give me a diff", cx))
|
||||
@@ -6760,24 +6717,14 @@ pub(crate) mod tests {
|
||||
});
|
||||
|
||||
// Second user message
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::ToolCall(acp::ToolCall {
|
||||
id: acp::ToolCallId("tool2".into()),
|
||||
title: "Edit file 2".into(),
|
||||
kind: acp::ToolKind::Edit,
|
||||
status: acp::ToolCallStatus::Completed,
|
||||
content: vec![acp::ToolCallContent::Diff {
|
||||
diff: acp::Diff {
|
||||
path: "/project/test2.txt".into(),
|
||||
old_text: Some("old content 2".into()),
|
||||
new_text: "new content 2".into(),
|
||||
meta: None,
|
||||
},
|
||||
}],
|
||||
locations: vec![],
|
||||
raw_input: None,
|
||||
raw_output: None,
|
||||
meta: None,
|
||||
})]);
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::ToolCall(
|
||||
acp::ToolCall::new("tool2", "Edit file 2")
|
||||
.kind(acp::ToolKind::Edit)
|
||||
.status(acp::ToolCallStatus::Completed)
|
||||
.content(vec![acp::ToolCallContent::Diff(
|
||||
acp::Diff::new("/project/test2.txt", "new content 2").old_text("old content 2"),
|
||||
)]),
|
||||
)]);
|
||||
|
||||
thread
|
||||
.update(cx, |thread, cx| thread.send_raw("Another one", cx))
|
||||
@@ -6851,14 +6798,7 @@ pub(crate) mod tests {
|
||||
let connection = StubAgentConnection::new();
|
||||
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk(
|
||||
acp::ContentChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Response".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
meta: None,
|
||||
},
|
||||
acp::ContentChunk::new("Response".into()),
|
||||
)]);
|
||||
|
||||
let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await;
|
||||
@@ -6944,14 +6884,7 @@ pub(crate) mod tests {
|
||||
let connection = StubAgentConnection::new();
|
||||
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk(
|
||||
acp::ContentChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Response".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
meta: None,
|
||||
},
|
||||
acp::ContentChunk::new("Response".into()),
|
||||
)]);
|
||||
|
||||
let (thread_view, cx) =
|
||||
@@ -6991,14 +6924,7 @@ pub(crate) mod tests {
|
||||
|
||||
// Send
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk(
|
||||
acp::ContentChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "New Response".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
meta: None,
|
||||
},
|
||||
acp::ContentChunk::new("New Response".into()),
|
||||
)]);
|
||||
|
||||
user_message_editor.update_in(cx, |_editor, window, cx| {
|
||||
@@ -7086,14 +7012,7 @@ pub(crate) mod tests {
|
||||
cx.update(|_, cx| {
|
||||
connection.send_update(
|
||||
session_id.clone(),
|
||||
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Response".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
meta: None,
|
||||
}),
|
||||
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new("Response".into())),
|
||||
cx,
|
||||
);
|
||||
connection.end_turn(session_id, acp::StopReason::EndTurn);
|
||||
@@ -7145,10 +7064,9 @@ pub(crate) mod tests {
|
||||
cx.update(|_, cx| {
|
||||
connection.send_update(
|
||||
session_id.clone(),
|
||||
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk {
|
||||
content: "Message 1 resp".into(),
|
||||
meta: None,
|
||||
}),
|
||||
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new(
|
||||
"Message 1 resp".into(),
|
||||
)),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
@@ -7182,10 +7100,7 @@ pub(crate) mod tests {
|
||||
// Simulate a response sent after beginning to cancel
|
||||
connection.send_update(
|
||||
session_id.clone(),
|
||||
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk {
|
||||
content: "onse".into(),
|
||||
meta: None,
|
||||
}),
|
||||
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new("onse".into())),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
@@ -7216,10 +7131,9 @@ pub(crate) mod tests {
|
||||
cx.update(|_, cx| {
|
||||
connection.send_update(
|
||||
session_id.clone(),
|
||||
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk {
|
||||
content: "Message 2 response".into(),
|
||||
meta: None,
|
||||
}),
|
||||
acp::SessionUpdate::AgentMessageChunk(acp::ContentChunk::new(
|
||||
"Message 2 response".into(),
|
||||
)),
|
||||
cx,
|
||||
);
|
||||
connection.end_turn(session_id.clone(), acp::StopReason::EndTurn);
|
||||
@@ -7258,14 +7172,7 @@ pub(crate) mod tests {
|
||||
|
||||
let connection = StubAgentConnection::new();
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk(
|
||||
acp::ContentChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Response".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
meta: None,
|
||||
},
|
||||
acp::ContentChunk::new("Response".into()),
|
||||
)]);
|
||||
|
||||
let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await;
|
||||
@@ -7344,14 +7251,7 @@ pub(crate) mod tests {
|
||||
|
||||
let connection = StubAgentConnection::new();
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk(
|
||||
acp::ContentChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Response".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
meta: None,
|
||||
},
|
||||
acp::ContentChunk::new("Response".into()),
|
||||
)]);
|
||||
|
||||
let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await;
|
||||
@@ -7399,54 +7299,4 @@ pub(crate) mod tests {
|
||||
assert_eq!(text, expected_txt);
|
||||
})
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_initialize_timeout(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
struct InfiniteInitialize;
|
||||
|
||||
impl AgentServer for InfiniteInitialize {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
"test"
|
||||
}
|
||||
|
||||
fn logo(&self) -> ui::IconName {
|
||||
ui::IconName::Ai
|
||||
}
|
||||
|
||||
fn name(&self) -> SharedString {
|
||||
"Test".into()
|
||||
}
|
||||
|
||||
fn connect(
|
||||
&self,
|
||||
_root_dir: Option<&Path>,
|
||||
_delegate: AgentServerDelegate,
|
||||
cx: &mut App,
|
||||
) -> Task<gpui::Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>>
|
||||
{
|
||||
cx.spawn(async |_| futures::future::pending().await)
|
||||
}
|
||||
|
||||
fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
let (thread_view, cx) = setup_thread_view(InfiniteInitialize, cx).await;
|
||||
|
||||
cx.executor().advance_clock(Duration::from_secs(31));
|
||||
cx.run_until_parked();
|
||||
|
||||
let error = thread_view.read_with(cx, |thread_view, _| match &thread_view.thread_state {
|
||||
ThreadState::LoadError(err) => err.clone(),
|
||||
_ => panic!("Incorrect thread state"),
|
||||
});
|
||||
|
||||
match error {
|
||||
LoadError::Other(str) => assert!(str.contains("initialize")),
|
||||
_ => panic!("Unexpected load error"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,7 +36,7 @@ use settings::{Settings, SettingsStore, update_settings_file};
|
||||
use ui::{
|
||||
Button, ButtonStyle, Chip, CommonAnimationExt, ContextMenu, ContextMenuEntry, Disclosure,
|
||||
Divider, DividerColor, ElevationIndex, IconName, IconPosition, IconSize, Indicator, LabelSize,
|
||||
PopoverMenu, Switch, SwitchColor, Tooltip, WithScrollbar, prelude::*,
|
||||
PopoverMenu, Switch, Tooltip, WithScrollbar, prelude::*,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
use workspace::{Workspace, create_and_open_local_file};
|
||||
@@ -838,7 +838,7 @@ impl AgentConfiguration {
|
||||
.min_w_0()
|
||||
.child(
|
||||
h_flex()
|
||||
.id(SharedString::from(format!("tooltip-{}", item_id)))
|
||||
.id(format!("tooltip-{}", item_id))
|
||||
.h_full()
|
||||
.w_3()
|
||||
.mr_2()
|
||||
@@ -879,7 +879,6 @@ impl AgentConfiguration {
|
||||
.child(context_server_configuration_menu)
|
||||
.child(
|
||||
Switch::new("context-server-switch", is_running.into())
|
||||
.color(SwitchColor::Accent)
|
||||
.on_click({
|
||||
let context_server_manager = self.context_server_store.clone();
|
||||
let fs = self.fs.clone();
|
||||
@@ -978,7 +977,10 @@ impl AgentConfiguration {
|
||||
} else {
|
||||
AgentIcon::Name(IconName::Ai)
|
||||
};
|
||||
(name, icon)
|
||||
let display_name = agent_server_store
|
||||
.agent_display_name(&name)
|
||||
.unwrap_or_else(|| name.0.clone());
|
||||
(name, icon, display_name)
|
||||
})
|
||||
.collect();
|
||||
|
||||
@@ -1085,6 +1087,7 @@ impl AgentConfiguration {
|
||||
.child(self.render_agent_server(
|
||||
AgentIcon::Name(IconName::AiClaude),
|
||||
"Claude Code",
|
||||
"Claude Code",
|
||||
false,
|
||||
cx,
|
||||
))
|
||||
@@ -1092,6 +1095,7 @@ impl AgentConfiguration {
|
||||
.child(self.render_agent_server(
|
||||
AgentIcon::Name(IconName::AiOpenAi),
|
||||
"Codex CLI",
|
||||
"Codex CLI",
|
||||
false,
|
||||
cx,
|
||||
))
|
||||
@@ -1099,16 +1103,23 @@ impl AgentConfiguration {
|
||||
.child(self.render_agent_server(
|
||||
AgentIcon::Name(IconName::AiGemini),
|
||||
"Gemini CLI",
|
||||
"Gemini CLI",
|
||||
false,
|
||||
cx,
|
||||
))
|
||||
.map(|mut parent| {
|
||||
for (name, icon) in user_defined_agents {
|
||||
for (name, icon, display_name) in user_defined_agents {
|
||||
parent = parent
|
||||
.child(
|
||||
Divider::horizontal().color(DividerColor::BorderFaded),
|
||||
)
|
||||
.child(self.render_agent_server(icon, name, true, cx));
|
||||
.child(self.render_agent_server(
|
||||
icon,
|
||||
name,
|
||||
display_name,
|
||||
true,
|
||||
cx,
|
||||
));
|
||||
}
|
||||
parent
|
||||
}),
|
||||
@@ -1119,11 +1130,13 @@ impl AgentConfiguration {
|
||||
fn render_agent_server(
|
||||
&self,
|
||||
icon: AgentIcon,
|
||||
name: impl Into<SharedString>,
|
||||
id: impl Into<SharedString>,
|
||||
display_name: impl Into<SharedString>,
|
||||
external: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) -> impl IntoElement {
|
||||
let name = name.into();
|
||||
let id = id.into();
|
||||
let display_name = display_name.into();
|
||||
let icon = match icon {
|
||||
AgentIcon::Name(icon_name) => Icon::new(icon_name)
|
||||
.size(IconSize::Small)
|
||||
@@ -1133,12 +1146,15 @@ impl AgentConfiguration {
|
||||
.color(Color::Muted),
|
||||
};
|
||||
|
||||
let tooltip_id = SharedString::new(format!("agent-source-{}", name));
|
||||
let tooltip_message = format!("The {} agent was installed from an extension.", name);
|
||||
let tooltip_id = SharedString::new(format!("agent-source-{}", id));
|
||||
let tooltip_message = format!(
|
||||
"The {} agent was installed from an extension.",
|
||||
display_name
|
||||
);
|
||||
|
||||
let agent_server_name = ExternalAgentServerName(name.clone());
|
||||
let agent_server_name = ExternalAgentServerName(id.clone());
|
||||
|
||||
let uninstall_btn_id = SharedString::from(format!("uninstall-{}", name));
|
||||
let uninstall_btn_id = SharedString::from(format!("uninstall-{}", id));
|
||||
let uninstall_button = IconButton::new(uninstall_btn_id, IconName::Trash)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_size(IconSize::Small)
|
||||
@@ -1162,7 +1178,7 @@ impl AgentConfiguration {
|
||||
h_flex()
|
||||
.gap_1p5()
|
||||
.child(icon)
|
||||
.child(Label::new(name))
|
||||
.child(Label::new(display_name))
|
||||
.when(external, |this| {
|
||||
this.child(
|
||||
div()
|
||||
|
||||
@@ -87,7 +87,7 @@ impl ConfigureContextServerToolsModal {
|
||||
v_flex()
|
||||
.child(
|
||||
h_flex()
|
||||
.id(SharedString::from(format!("tool-header-{}", index)))
|
||||
.id(format!("tool-header-{}", index))
|
||||
.py_1()
|
||||
.pl_1()
|
||||
.pr_2()
|
||||
|
||||
@@ -422,7 +422,7 @@ impl ManageProfilesModal {
|
||||
let is_focused = profile.navigation.focus_handle.contains_focused(window, cx);
|
||||
|
||||
div()
|
||||
.id(SharedString::from(format!("profile-{}", profile.id)))
|
||||
.id(format!("profile-{}", profile.id))
|
||||
.track_focus(&profile.navigation.focus_handle)
|
||||
.on_action({
|
||||
let profile_id = profile.id.clone();
|
||||
@@ -431,7 +431,7 @@ impl ManageProfilesModal {
|
||||
})
|
||||
})
|
||||
.child(
|
||||
ListItem::new(SharedString::from(format!("profile-{}", profile.id)))
|
||||
ListItem::new(format!("profile-{}", profile.id))
|
||||
.toggle_state(is_focused)
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
|
||||
@@ -63,6 +63,10 @@ impl AgentModelSelector {
|
||||
pub fn toggle(&self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.menu_handle.toggle(window, cx);
|
||||
}
|
||||
|
||||
pub fn active_model(&self, cx: &App) -> Option<language_model::ConfiguredModel> {
|
||||
self.selector.read(cx).delegate.active_model(cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for AgentModelSelector {
|
||||
@@ -98,7 +102,7 @@ impl Render for AgentModelSelector {
|
||||
.child(
|
||||
Icon::new(IconName::ChevronDown)
|
||||
.color(color)
|
||||
.size(IconSize::XSmall),
|
||||
.size(IconSize::Small),
|
||||
),
|
||||
move |_window, cx| {
|
||||
Tooltip::for_action_in("Change Model", &ToggleModelSelector, &focus_handle, cx)
|
||||
|
||||
@@ -2083,8 +2083,11 @@ impl AgentPanel {
|
||||
|
||||
for agent_name in agent_names {
|
||||
let icon_path = agent_server_store.agent_icon(&agent_name);
|
||||
let display_name = agent_server_store
|
||||
.agent_display_name(&agent_name)
|
||||
.unwrap_or_else(|| agent_name.0.clone());
|
||||
|
||||
let mut entry = ContextMenuEntry::new(agent_name.clone());
|
||||
let mut entry = ContextMenuEntry::new(display_name);
|
||||
|
||||
if let Some(icon_path) = icon_path {
|
||||
entry = entry.custom_icon_svg(icon_path);
|
||||
@@ -2685,16 +2688,17 @@ impl rules_library::InlineAssistDelegate for PromptLibraryInlineAssist {
|
||||
return;
|
||||
};
|
||||
let project = workspace.read(cx).project().downgrade();
|
||||
let thread_store = panel.read(cx).thread_store().clone();
|
||||
assistant.assist(
|
||||
prompt_editor,
|
||||
self.workspace.clone(),
|
||||
project,
|
||||
panel.read(cx).thread_store().clone(),
|
||||
thread_store,
|
||||
None,
|
||||
initial_prompt,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -7,6 +7,8 @@ mod buffer_codegen;
|
||||
mod completion_provider;
|
||||
mod context;
|
||||
mod context_server_configuration;
|
||||
#[cfg(test)]
|
||||
mod evals;
|
||||
mod inline_assistant;
|
||||
mod inline_prompt_editor;
|
||||
mod language_model_selector;
|
||||
|
||||
@@ -5,22 +5,26 @@ use client::telemetry::Telemetry;
|
||||
use cloud_llm_client::CompletionIntent;
|
||||
use collections::HashSet;
|
||||
use editor::{Anchor, AnchorRangeExt, MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint};
|
||||
use feature_flags::{FeatureFlagAppExt as _, InlineAssistantV2FeatureFlag};
|
||||
use futures::{
|
||||
SinkExt, Stream, StreamExt, TryStreamExt as _,
|
||||
channel::mpsc,
|
||||
future::{LocalBoxFuture, Shared},
|
||||
join,
|
||||
};
|
||||
use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Subscription, Task};
|
||||
use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task};
|
||||
use language::{Buffer, IndentKind, Point, TransactionId, line_diff};
|
||||
use language_model::{
|
||||
LanguageModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
|
||||
LanguageModelTextStream, Role, report_assistant_event,
|
||||
LanguageModel, LanguageModelCompletionError, LanguageModelRegistry, LanguageModelRequest,
|
||||
LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelTextStream, Role,
|
||||
report_assistant_event,
|
||||
};
|
||||
use multi_buffer::MultiBufferRow;
|
||||
use parking_lot::Mutex;
|
||||
use prompt_store::PromptBuilder;
|
||||
use rope::Rope;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smol::future::FutureExt;
|
||||
use std::{
|
||||
cmp,
|
||||
@@ -34,6 +38,29 @@ use std::{
|
||||
};
|
||||
use streaming_diff::{CharOperation, LineDiff, LineOperation, StreamingDiff};
|
||||
use telemetry_events::{AssistantEventData, AssistantKind, AssistantPhase};
|
||||
use ui::SharedString;
|
||||
|
||||
/// Use this tool to provide a message to the user when you're unable to complete a task.
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct FailureMessageInput {
|
||||
/// A brief message to the user explaining why you're unable to fulfill the request or to ask a question about the request.
|
||||
///
|
||||
/// The message may use markdown formatting if you wish.
|
||||
pub message: String,
|
||||
}
|
||||
|
||||
/// Replaces text in <rewrite_this></rewrite_this> tags with your replacement_text.
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct RewriteSectionInput {
|
||||
/// A brief description of the edit you have made.
|
||||
///
|
||||
/// The description may use markdown formatting if you wish.
|
||||
/// This is optional - if the edit is simple or obvious, you should leave it empty.
|
||||
pub description: String,
|
||||
|
||||
/// The text to replace the section with.
|
||||
pub replacement_text: String,
|
||||
}
|
||||
|
||||
pub struct BufferCodegen {
|
||||
alternatives: Vec<Entity<CodegenAlternative>>,
|
||||
@@ -92,6 +119,10 @@ impl BufferCodegen {
|
||||
.push(cx.subscribe(&codegen, |_, _, event, cx| cx.emit(*event)));
|
||||
}
|
||||
|
||||
pub fn active_completion(&self, cx: &App) -> Option<String> {
|
||||
self.active_alternative().read(cx).current_completion()
|
||||
}
|
||||
|
||||
pub fn active_alternative(&self) -> &Entity<CodegenAlternative> {
|
||||
&self.alternatives[self.active_alternative]
|
||||
}
|
||||
@@ -214,6 +245,10 @@ impl BufferCodegen {
|
||||
pub fn last_equal_ranges<'a>(&self, cx: &'a App) -> &'a [Range<Anchor>] {
|
||||
self.active_alternative().read(cx).last_equal_ranges()
|
||||
}
|
||||
|
||||
pub fn selected_text<'a>(&self, cx: &'a App) -> Option<&'a str> {
|
||||
self.active_alternative().read(cx).selected_text()
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<CodegenEvent> for BufferCodegen {}
|
||||
@@ -237,7 +272,9 @@ pub struct CodegenAlternative {
|
||||
line_operations: Vec<LineOperation>,
|
||||
elapsed_time: Option<f64>,
|
||||
completion: Option<String>,
|
||||
selected_text: Option<String>,
|
||||
pub message_id: Option<String>,
|
||||
pub model_explanation: Option<SharedString>,
|
||||
}
|
||||
|
||||
impl EventEmitter<CodegenEvent> for CodegenAlternative {}
|
||||
@@ -288,14 +325,16 @@ impl CodegenAlternative {
|
||||
generation: Task::ready(()),
|
||||
diff: Diff::default(),
|
||||
telemetry,
|
||||
_subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
|
||||
builder,
|
||||
active,
|
||||
active: active,
|
||||
edits: Vec::new(),
|
||||
line_operations: Vec::new(),
|
||||
range,
|
||||
elapsed_time: None,
|
||||
completion: None,
|
||||
selected_text: None,
|
||||
model_explanation: None,
|
||||
_subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -358,20 +397,126 @@ impl CodegenAlternative {
|
||||
let api_key = model.api_key(cx);
|
||||
let telemetry_id = model.telemetry_id();
|
||||
let provider_id = model.provider_id();
|
||||
let stream: LocalBoxFuture<Result<LanguageModelTextStream>> =
|
||||
if user_prompt.trim().to_lowercase() == "delete" {
|
||||
async { Ok(LanguageModelTextStream::default()) }.boxed_local()
|
||||
} else {
|
||||
let request = self.build_request(&model, user_prompt, context_task, cx)?;
|
||||
cx.spawn(async move |_, cx| {
|
||||
Ok(model.stream_completion_text(request.await, cx).await?)
|
||||
})
|
||||
.boxed_local()
|
||||
};
|
||||
self.handle_stream(telemetry_id, provider_id.to_string(), api_key, stream, cx);
|
||||
|
||||
if cx.has_flag::<InlineAssistantV2FeatureFlag>() {
|
||||
let request = self.build_request(&model, user_prompt, context_task, cx)?;
|
||||
let tool_use =
|
||||
cx.spawn(async move |_, cx| model.stream_completion_tool(request.await, cx).await);
|
||||
self.handle_tool_use(telemetry_id, provider_id.to_string(), api_key, tool_use, cx);
|
||||
} else {
|
||||
let stream: LocalBoxFuture<Result<LanguageModelTextStream>> =
|
||||
if user_prompt.trim().to_lowercase() == "delete" {
|
||||
async { Ok(LanguageModelTextStream::default()) }.boxed_local()
|
||||
} else {
|
||||
let request = self.build_request(&model, user_prompt, context_task, cx)?;
|
||||
cx.spawn(async move |_, cx| {
|
||||
Ok(model.stream_completion_text(request.await, cx).await?)
|
||||
})
|
||||
.boxed_local()
|
||||
};
|
||||
self.handle_stream(telemetry_id, provider_id.to_string(), api_key, stream, cx);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn build_request_v2(
|
||||
&self,
|
||||
model: &Arc<dyn LanguageModel>,
|
||||
user_prompt: String,
|
||||
context_task: Shared<Task<Option<LoadedContext>>>,
|
||||
cx: &mut App,
|
||||
) -> Result<Task<LanguageModelRequest>> {
|
||||
let buffer = self.buffer.read(cx).snapshot(cx);
|
||||
let language = buffer.language_at(self.range.start);
|
||||
let language_name = if let Some(language) = language.as_ref() {
|
||||
if Arc::ptr_eq(language, &language::PLAIN_TEXT) {
|
||||
None
|
||||
} else {
|
||||
Some(language.name())
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let language_name = language_name.as_ref();
|
||||
let start = buffer.point_to_buffer_offset(self.range.start);
|
||||
let end = buffer.point_to_buffer_offset(self.range.end);
|
||||
let (buffer, range) = if let Some((start, end)) = start.zip(end) {
|
||||
let (start_buffer, start_buffer_offset) = start;
|
||||
let (end_buffer, end_buffer_offset) = end;
|
||||
if start_buffer.remote_id() == end_buffer.remote_id() {
|
||||
(start_buffer.clone(), start_buffer_offset..end_buffer_offset)
|
||||
} else {
|
||||
anyhow::bail!("invalid transformation range");
|
||||
}
|
||||
} else {
|
||||
anyhow::bail!("invalid transformation range");
|
||||
};
|
||||
|
||||
let system_prompt = self
|
||||
.builder
|
||||
.generate_inline_transformation_prompt_v2(
|
||||
language_name,
|
||||
buffer,
|
||||
range.start.0..range.end.0,
|
||||
)
|
||||
.context("generating content prompt")?;
|
||||
|
||||
let temperature = AgentSettings::temperature_for_model(model, cx);
|
||||
|
||||
let tool_input_format = model.tool_input_format();
|
||||
|
||||
Ok(cx.spawn(async move |_cx| {
|
||||
let mut messages = vec![LanguageModelRequestMessage {
|
||||
role: Role::System,
|
||||
content: vec![system_prompt.into()],
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
}];
|
||||
|
||||
let mut user_message = LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: Vec::new(),
|
||||
cache: false,
|
||||
reasoning_details: None,
|
||||
};
|
||||
|
||||
if let Some(context) = context_task.await {
|
||||
context.add_to_request_message(&mut user_message);
|
||||
}
|
||||
|
||||
user_message.content.push(user_prompt.into());
|
||||
messages.push(user_message);
|
||||
|
||||
let tools = vec![
|
||||
LanguageModelRequestTool {
|
||||
name: "rewrite_section".to_string(),
|
||||
description: "Replaces text in <rewrite_this></rewrite_this> tags with your replacement_text.".to_string(),
|
||||
input_schema: language_model::tool_schema::root_schema_for::<RewriteSectionInput>(tool_input_format).to_value(),
|
||||
},
|
||||
LanguageModelRequestTool {
|
||||
name: "failure_message".to_string(),
|
||||
description: "Use this tool to provide a message to the user when you're unable to complete a task.".to_string(),
|
||||
input_schema: language_model::tool_schema::root_schema_for::<FailureMessageInput>(tool_input_format).to_value(),
|
||||
},
|
||||
];
|
||||
|
||||
LanguageModelRequest {
|
||||
thread_id: None,
|
||||
prompt_id: None,
|
||||
intent: Some(CompletionIntent::InlineAssist),
|
||||
mode: None,
|
||||
tools,
|
||||
tool_choice: None,
|
||||
stop: Vec::new(),
|
||||
temperature,
|
||||
messages,
|
||||
thinking_allowed: false,
|
||||
}
|
||||
}))
|
||||
}
|
||||
|
||||
fn build_request(
|
||||
&self,
|
||||
model: &Arc<dyn LanguageModel>,
|
||||
@@ -379,6 +524,10 @@ impl CodegenAlternative {
|
||||
context_task: Shared<Task<Option<LoadedContext>>>,
|
||||
cx: &mut App,
|
||||
) -> Result<Task<LanguageModelRequest>> {
|
||||
if cx.has_flag::<InlineAssistantV2FeatureFlag>() {
|
||||
return self.build_request_v2(model, user_prompt, context_task, cx);
|
||||
}
|
||||
|
||||
let buffer = self.buffer.read(cx).snapshot(cx);
|
||||
let language = buffer.language_at(self.range.start);
|
||||
let language_name = if let Some(language) = language.as_ref() {
|
||||
@@ -469,6 +618,8 @@ impl CodegenAlternative {
|
||||
.text_for_range(self.range.start..self.range.end)
|
||||
.collect::<Rope>();
|
||||
|
||||
self.selected_text = Some(selected_text.to_string());
|
||||
|
||||
let selection_start = self.range.start.to_point(&snapshot);
|
||||
|
||||
// Start with the indentation of the first line in the selection
|
||||
@@ -510,6 +661,7 @@ impl CodegenAlternative {
|
||||
|
||||
self.generation = cx.spawn(async move |codegen, cx| {
|
||||
let stream = stream.await;
|
||||
|
||||
let token_usage = stream
|
||||
.as_ref()
|
||||
.ok()
|
||||
@@ -719,6 +871,7 @@ impl CodegenAlternative {
|
||||
output_tokens = usage.output_tokens,
|
||||
)
|
||||
}
|
||||
|
||||
cx.emit(CodegenEvent::Finished);
|
||||
cx.notify();
|
||||
})
|
||||
@@ -727,6 +880,14 @@ impl CodegenAlternative {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn current_completion(&self) -> Option<String> {
|
||||
self.completion.clone()
|
||||
}
|
||||
|
||||
pub fn selected_text(&self) -> Option<&str> {
|
||||
self.selected_text.as_deref()
|
||||
}
|
||||
|
||||
pub fn stop(&mut self, cx: &mut Context<Self>) {
|
||||
self.last_equal_ranges.clear();
|
||||
if self.diff.is_empty() {
|
||||
@@ -898,6 +1059,101 @@ impl CodegenAlternative {
|
||||
.ok();
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_tool_use(
|
||||
&mut self,
|
||||
_telemetry_id: String,
|
||||
_provider_id: String,
|
||||
_api_key: Option<String>,
|
||||
tool_use: impl 'static
|
||||
+ Future<
|
||||
Output = Result<language_model::LanguageModelToolUse, LanguageModelCompletionError>,
|
||||
>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.diff = Diff::default();
|
||||
self.status = CodegenStatus::Pending;
|
||||
|
||||
self.generation = cx.spawn(async move |codegen, cx| {
|
||||
let finish_with_status = |status: CodegenStatus, cx: &mut AsyncApp| {
|
||||
let _ = codegen.update(cx, |this, cx| {
|
||||
this.status = status;
|
||||
cx.emit(CodegenEvent::Finished);
|
||||
cx.notify();
|
||||
});
|
||||
};
|
||||
|
||||
let tool_use = tool_use.await;
|
||||
|
||||
match tool_use {
|
||||
Ok(tool_use) if tool_use.name.as_ref() == "rewrite_section" => {
|
||||
// Parse the input JSON into RewriteSectionInput
|
||||
match serde_json::from_value::<RewriteSectionInput>(tool_use.input) {
|
||||
Ok(input) => {
|
||||
// Store the description if non-empty
|
||||
let description = if !input.description.trim().is_empty() {
|
||||
Some(input.description.clone())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Apply the replacement text to the buffer and compute diff
|
||||
let batch_diff_task = codegen
|
||||
.update(cx, |this, cx| {
|
||||
this.model_explanation = description.map(Into::into);
|
||||
let range = this.range.clone();
|
||||
this.apply_edits(
|
||||
std::iter::once((range, input.replacement_text)),
|
||||
cx,
|
||||
);
|
||||
this.reapply_batch_diff(cx)
|
||||
})
|
||||
.ok();
|
||||
|
||||
// Wait for the diff computation to complete
|
||||
if let Some(diff_task) = batch_diff_task {
|
||||
diff_task.await;
|
||||
}
|
||||
|
||||
finish_with_status(CodegenStatus::Done, cx);
|
||||
return;
|
||||
}
|
||||
Err(e) => {
|
||||
finish_with_status(CodegenStatus::Error(e.into()), cx);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(tool_use) if tool_use.name.as_ref() == "failure_message" => {
|
||||
// Handle failure message tool use
|
||||
match serde_json::from_value::<FailureMessageInput>(tool_use.input) {
|
||||
Ok(input) => {
|
||||
let _ = codegen.update(cx, |this, _cx| {
|
||||
// Store the failure message as the tool description
|
||||
this.model_explanation = Some(input.message.into());
|
||||
});
|
||||
finish_with_status(CodegenStatus::Done, cx);
|
||||
return;
|
||||
}
|
||||
Err(e) => {
|
||||
finish_with_status(CodegenStatus::Error(e.into()), cx);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(_tool_use) => {
|
||||
// Unexpected tool.
|
||||
finish_with_status(CodegenStatus::Done, cx);
|
||||
return;
|
||||
}
|
||||
Err(e) => {
|
||||
finish_with_status(CodegenStatus::Error(e.into()), cx);
|
||||
return;
|
||||
}
|
||||
}
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
@@ -1059,8 +1315,9 @@ mod tests {
|
||||
};
|
||||
use gpui::TestAppContext;
|
||||
use indoc::indoc;
|
||||
use language::{Buffer, Language, LanguageConfig, LanguageMatcher, Point, tree_sitter_rust};
|
||||
use language::{Buffer, Point};
|
||||
use language_model::{LanguageModelRegistry, TokenUsage};
|
||||
use languages::rust_lang;
|
||||
use rand::prelude::*;
|
||||
use settings::SettingsStore;
|
||||
use std::{future, sync::Arc};
|
||||
@@ -1077,7 +1334,7 @@ mod tests {
|
||||
}
|
||||
}
|
||||
"};
|
||||
let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(rust_lang(), cx));
|
||||
let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let range = buffer.read_with(cx, |buffer, cx| {
|
||||
let snapshot = buffer.snapshot(cx);
|
||||
@@ -1139,7 +1396,7 @@ mod tests {
|
||||
le
|
||||
}
|
||||
"};
|
||||
let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(rust_lang(), cx));
|
||||
let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let range = buffer.read_with(cx, |buffer, cx| {
|
||||
let snapshot = buffer.snapshot(cx);
|
||||
@@ -1203,7 +1460,7 @@ mod tests {
|
||||
" \n",
|
||||
"}\n" //
|
||||
);
|
||||
let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(rust_lang(), cx));
|
||||
let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let range = buffer.read_with(cx, |buffer, cx| {
|
||||
let snapshot = buffer.snapshot(cx);
|
||||
@@ -1319,7 +1576,7 @@ mod tests {
|
||||
let x = 0;
|
||||
}
|
||||
"};
|
||||
let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(Arc::new(rust_lang()), cx));
|
||||
let buffer = cx.new(|cx| Buffer::local(text, cx).with_language(rust_lang(), cx));
|
||||
let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let range = buffer.read_with(cx, |buffer, cx| {
|
||||
let snapshot = buffer.snapshot(cx);
|
||||
@@ -1436,27 +1693,4 @@ mod tests {
|
||||
});
|
||||
chunks_tx
|
||||
}
|
||||
|
||||
fn rust_lang() -> Language {
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".into(),
|
||||
matcher: LanguageMatcher {
|
||||
path_suffixes: vec!["rs".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_rust::LANGUAGE.into()),
|
||||
)
|
||||
.with_indents_query(
|
||||
r#"
|
||||
(call_expression) @indent
|
||||
(field_expression) @indent
|
||||
(_ "(" ")" @end) @indent
|
||||
(_ "{" "}" @end) @indent
|
||||
"#,
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1114,7 +1114,6 @@ impl<T: PromptCompletionProviderDelegate> CompletionProvider for PromptCompletio
|
||||
position: language::Anchor,
|
||||
_text: &str,
|
||||
_trigger_in_words: bool,
|
||||
_menu_is_open: bool,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> bool {
|
||||
let buffer = buffer.read(cx);
|
||||
|
||||
89
crates/agent_ui/src/evals.rs
Normal file
@@ -0,0 +1,89 @@
|
||||
use std::str::FromStr;
|
||||
|
||||
use crate::inline_assistant::test::run_inline_assistant_test;
|
||||
|
||||
use eval_utils::{EvalOutput, NoProcessor};
|
||||
use gpui::TestAppContext;
|
||||
use language_model::{LanguageModelRegistry, SelectedModel};
|
||||
use rand::{SeedableRng as _, rngs::StdRng};
|
||||
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "unit-eval"), ignore)]
|
||||
fn eval_single_cursor_edit() {
|
||||
eval_utils::eval(20, 1.0, NoProcessor, move || {
|
||||
run_eval(
|
||||
&EvalInput {
|
||||
prompt: "Rename this variable to buffer_text".to_string(),
|
||||
buffer: indoc::indoc! {"
|
||||
struct EvalExampleStruct {
|
||||
text: Strˇing,
|
||||
prompt: String,
|
||||
}
|
||||
"}
|
||||
.to_string(),
|
||||
},
|
||||
&|_, output| {
|
||||
let expected = indoc::indoc! {"
|
||||
struct EvalExampleStruct {
|
||||
buffer_text: String,
|
||||
prompt: String,
|
||||
}
|
||||
"};
|
||||
if output == expected {
|
||||
EvalOutput {
|
||||
outcome: eval_utils::OutcomeKind::Passed,
|
||||
data: "Passed!".to_string(),
|
||||
metadata: (),
|
||||
}
|
||||
} else {
|
||||
EvalOutput {
|
||||
outcome: eval_utils::OutcomeKind::Failed,
|
||||
data: format!("Failed to rename variable, output: {}", output),
|
||||
metadata: (),
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
struct EvalInput {
|
||||
buffer: String,
|
||||
prompt: String,
|
||||
}
|
||||
|
||||
fn run_eval(
|
||||
input: &EvalInput,
|
||||
judge: &dyn Fn(&EvalInput, &str) -> eval_utils::EvalOutput<()>,
|
||||
) -> eval_utils::EvalOutput<()> {
|
||||
let dispatcher = gpui::TestDispatcher::new(StdRng::from_os_rng());
|
||||
let mut cx = TestAppContext::build(dispatcher, None);
|
||||
cx.skip_drawing();
|
||||
|
||||
let buffer_text = run_inline_assistant_test(
|
||||
input.buffer.clone(),
|
||||
input.prompt.clone(),
|
||||
|cx| {
|
||||
// Reconfigure to use a real model instead of the fake one
|
||||
let model_name = std::env::var("ZED_AGENT_MODEL")
|
||||
.unwrap_or("anthropic/claude-sonnet-4-latest".into());
|
||||
|
||||
let selected_model = SelectedModel::from_str(&model_name)
|
||||
.expect("Invalid model format. Use 'provider/model-id'");
|
||||
|
||||
log::info!("Selected model: {selected_model:?}");
|
||||
|
||||
cx.update(|_, cx| {
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry.select_inline_assistant_model(Some(&selected_model), cx);
|
||||
});
|
||||
});
|
||||
},
|
||||
|_cx| {
|
||||
log::info!("Waiting for actual response from the LLM...");
|
||||
},
|
||||
&mut cx,
|
||||
);
|
||||
|
||||
judge(input, &buffer_text)
|
||||
}
|
||||
@@ -32,7 +32,7 @@ use editor::{
|
||||
},
|
||||
};
|
||||
use fs::Fs;
|
||||
use futures::FutureExt;
|
||||
use futures::{FutureExt, channel::mpsc};
|
||||
use gpui::{
|
||||
App, Context, Entity, Focusable, Global, HighlightStyle, Subscription, Task, UpdateGlobal,
|
||||
WeakEntity, Window, point,
|
||||
@@ -102,6 +102,7 @@ pub struct InlineAssistant {
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
telemetry: Arc<Telemetry>,
|
||||
fs: Arc<dyn Fs>,
|
||||
_inline_assistant_completions: Option<mpsc::UnboundedSender<anyhow::Result<InlineAssistId>>>,
|
||||
}
|
||||
|
||||
impl Global for InlineAssistant {}
|
||||
@@ -123,9 +124,18 @@ impl InlineAssistant {
|
||||
prompt_builder,
|
||||
telemetry,
|
||||
fs,
|
||||
_inline_assistant_completions: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn set_completion_receiver(
|
||||
&mut self,
|
||||
sender: mpsc::UnboundedSender<anyhow::Result<InlineAssistId>>,
|
||||
) {
|
||||
self._inline_assistant_completions = Some(sender);
|
||||
}
|
||||
|
||||
pub fn register_workspace(
|
||||
&mut self,
|
||||
workspace: &Entity<Workspace>,
|
||||
@@ -287,7 +297,7 @@ impl InlineAssistant {
|
||||
action.prompt.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
);
|
||||
})
|
||||
}
|
||||
InlineAssistTarget::Terminal(active_terminal) => {
|
||||
@@ -301,8 +311,8 @@ impl InlineAssistant {
|
||||
action.prompt.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
@@ -377,17 +387,9 @@ impl InlineAssistant {
|
||||
let mut selections = Vec::<Selection<Point>>::new();
|
||||
let mut newest_selection = None;
|
||||
for mut selection in initial_selections {
|
||||
if selection.end > selection.start {
|
||||
selection.start.column = 0;
|
||||
// If the selection ends at the start of the line, we don't want to include it.
|
||||
if selection.end.column == 0 {
|
||||
selection.end.row -= 1;
|
||||
}
|
||||
selection.end.column = snapshot
|
||||
.buffer_snapshot()
|
||||
.line_len(MultiBufferRow(selection.end.row));
|
||||
} else if let Some(fold) =
|
||||
snapshot.crease_for_buffer_row(MultiBufferRow(selection.end.row))
|
||||
if selection.end == selection.start
|
||||
&& let Some(fold) =
|
||||
snapshot.crease_for_buffer_row(MultiBufferRow(selection.end.row))
|
||||
{
|
||||
selection.start = fold.range().start;
|
||||
selection.end = fold.range().end;
|
||||
@@ -414,6 +416,15 @@ impl InlineAssistant {
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
selection.start.column = 0;
|
||||
// If the selection ends at the start of the line, we don't want to include it.
|
||||
if selection.end.column == 0 && selection.start.row != selection.end.row {
|
||||
selection.end.row -= 1;
|
||||
}
|
||||
selection.end.column = snapshot
|
||||
.buffer_snapshot()
|
||||
.line_len(MultiBufferRow(selection.end.row));
|
||||
}
|
||||
|
||||
if let Some(prev_selection) = selections.last_mut()
|
||||
@@ -534,14 +545,15 @@ impl InlineAssistant {
|
||||
}
|
||||
}
|
||||
|
||||
let [prompt_block_id, end_block_id] =
|
||||
self.insert_assist_blocks(editor, &range, &prompt_editor, cx);
|
||||
let [prompt_block_id, tool_description_block_id, end_block_id] =
|
||||
self.insert_assist_blocks(&editor, &range, &prompt_editor, cx);
|
||||
|
||||
assists.push((
|
||||
assist_id,
|
||||
range.clone(),
|
||||
prompt_editor,
|
||||
prompt_block_id,
|
||||
tool_description_block_id,
|
||||
end_block_id,
|
||||
));
|
||||
}
|
||||
@@ -560,7 +572,15 @@ impl InlineAssistant {
|
||||
};
|
||||
|
||||
let mut assist_group = InlineAssistGroup::new();
|
||||
for (assist_id, range, prompt_editor, prompt_block_id, end_block_id) in assists {
|
||||
for (
|
||||
assist_id,
|
||||
range,
|
||||
prompt_editor,
|
||||
prompt_block_id,
|
||||
tool_description_block_id,
|
||||
end_block_id,
|
||||
) in assists
|
||||
{
|
||||
let codegen = prompt_editor.read(cx).codegen().clone();
|
||||
|
||||
self.assists.insert(
|
||||
@@ -571,6 +591,7 @@ impl InlineAssistant {
|
||||
editor,
|
||||
&prompt_editor,
|
||||
prompt_block_id,
|
||||
tool_description_block_id,
|
||||
end_block_id,
|
||||
range,
|
||||
codegen,
|
||||
@@ -598,13 +619,13 @@ impl InlineAssistant {
|
||||
initial_prompt: Option<String>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) {
|
||||
) -> Option<InlineAssistId> {
|
||||
let snapshot = editor.update(cx, |editor, cx| editor.snapshot(window, cx));
|
||||
|
||||
let Some((codegen_ranges, newest_selection)) =
|
||||
self.codegen_ranges(editor, &snapshot, window, cx)
|
||||
else {
|
||||
return;
|
||||
return None;
|
||||
};
|
||||
|
||||
let assist_to_focus = self.batch_assist(
|
||||
@@ -624,6 +645,8 @@ impl InlineAssistant {
|
||||
if let Some(assist_id) = assist_to_focus {
|
||||
self.focus_assist(assist_id, window, cx);
|
||||
}
|
||||
|
||||
assist_to_focus
|
||||
}
|
||||
|
||||
pub fn suggest_assist(
|
||||
@@ -677,7 +700,7 @@ impl InlineAssistant {
|
||||
range: &Range<Anchor>,
|
||||
prompt_editor: &Entity<PromptEditor<BufferCodegen>>,
|
||||
cx: &mut App,
|
||||
) -> [CustomBlockId; 2] {
|
||||
) -> [CustomBlockId; 3] {
|
||||
let prompt_editor_height = prompt_editor.update(cx, |prompt_editor, cx| {
|
||||
prompt_editor
|
||||
.editor
|
||||
@@ -691,6 +714,14 @@ impl InlineAssistant {
|
||||
render: build_assist_editor_renderer(prompt_editor),
|
||||
priority: 0,
|
||||
},
|
||||
// Placeholder for tool description - will be updated dynamically
|
||||
BlockProperties {
|
||||
style: BlockStyle::Flex,
|
||||
placement: BlockPlacement::Below(range.end),
|
||||
height: Some(0),
|
||||
render: Arc::new(|_cx| div().into_any_element()),
|
||||
priority: 0,
|
||||
},
|
||||
BlockProperties {
|
||||
style: BlockStyle::Sticky,
|
||||
placement: BlockPlacement::Below(range.end),
|
||||
@@ -709,7 +740,7 @@ impl InlineAssistant {
|
||||
|
||||
editor.update(cx, |editor, cx| {
|
||||
let block_ids = editor.insert_blocks(assist_blocks, None, cx);
|
||||
[block_ids[0], block_ids[1]]
|
||||
[block_ids[0], block_ids[1], block_ids[2]]
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1101,6 +1132,9 @@ impl InlineAssistant {
|
||||
let mut to_remove = decorations.removed_line_block_ids;
|
||||
to_remove.insert(decorations.prompt_block_id);
|
||||
to_remove.insert(decorations.end_block_id);
|
||||
if let Some(tool_description_block_id) = decorations.model_explanation {
|
||||
to_remove.insert(tool_description_block_id);
|
||||
}
|
||||
editor.remove_blocks(to_remove, None, cx);
|
||||
});
|
||||
|
||||
@@ -1421,8 +1455,60 @@ impl InlineAssistant {
|
||||
let old_snapshot = codegen.snapshot(cx);
|
||||
let old_buffer = codegen.old_buffer(cx);
|
||||
let deleted_row_ranges = codegen.diff(cx).deleted_row_ranges.clone();
|
||||
// let model_explanation = codegen.model_explanation(cx);
|
||||
|
||||
editor.update(cx, |editor, cx| {
|
||||
// Update tool description block
|
||||
// if let Some(description) = model_explanation {
|
||||
// if let Some(block_id) = decorations.model_explanation {
|
||||
// editor.remove_blocks(HashSet::from_iter([block_id]), None, cx);
|
||||
// let new_block_id = editor.insert_blocks(
|
||||
// [BlockProperties {
|
||||
// style: BlockStyle::Flex,
|
||||
// placement: BlockPlacement::Below(assist.range.end),
|
||||
// height: Some(1),
|
||||
// render: Arc::new({
|
||||
// let description = description.clone();
|
||||
// move |cx| {
|
||||
// div()
|
||||
// .w_full()
|
||||
// .py_1()
|
||||
// .px_2()
|
||||
// .bg(cx.theme().colors().editor_background)
|
||||
// .border_y_1()
|
||||
// .border_color(cx.theme().status().info_border)
|
||||
// .child(
|
||||
// Label::new(description.clone())
|
||||
// .color(Color::Muted)
|
||||
// .size(LabelSize::Small),
|
||||
// )
|
||||
// .into_any_element()
|
||||
// }
|
||||
// }),
|
||||
// priority: 0,
|
||||
// }],
|
||||
// None,
|
||||
// cx,
|
||||
// );
|
||||
// decorations.model_explanation = new_block_id.into_iter().next();
|
||||
// }
|
||||
// } else if let Some(block_id) = decorations.model_explanation {
|
||||
// // Hide the block if there's no description
|
||||
// editor.remove_blocks(HashSet::from_iter([block_id]), None, cx);
|
||||
// let new_block_id = editor.insert_blocks(
|
||||
// [BlockProperties {
|
||||
// style: BlockStyle::Flex,
|
||||
// placement: BlockPlacement::Below(assist.range.end),
|
||||
// height: Some(0),
|
||||
// render: Arc::new(|_cx| div().into_any_element()),
|
||||
// priority: 0,
|
||||
// }],
|
||||
// None,
|
||||
// cx,
|
||||
// );
|
||||
// decorations.model_explanation = new_block_id.into_iter().next();
|
||||
// }
|
||||
|
||||
let old_blocks = mem::take(&mut decorations.removed_line_block_ids);
|
||||
editor.remove_blocks(old_blocks, None, cx);
|
||||
|
||||
@@ -1674,6 +1760,7 @@ impl InlineAssist {
|
||||
editor: &Entity<Editor>,
|
||||
prompt_editor: &Entity<PromptEditor<BufferCodegen>>,
|
||||
prompt_block_id: CustomBlockId,
|
||||
tool_description_block_id: CustomBlockId,
|
||||
end_block_id: CustomBlockId,
|
||||
range: Range<Anchor>,
|
||||
codegen: Entity<BufferCodegen>,
|
||||
@@ -1688,7 +1775,8 @@ impl InlineAssist {
|
||||
decorations: Some(InlineAssistDecorations {
|
||||
prompt_block_id,
|
||||
prompt_editor: prompt_editor.clone(),
|
||||
removed_line_block_ids: HashSet::default(),
|
||||
removed_line_block_ids: Default::default(),
|
||||
model_explanation: Some(tool_description_block_id),
|
||||
end_block_id,
|
||||
}),
|
||||
range,
|
||||
@@ -1740,6 +1828,16 @@ impl InlineAssist {
|
||||
&& assist.decorations.is_none()
|
||||
&& let Some(workspace) = assist.workspace.upgrade()
|
||||
{
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
if let Some(sender) = &mut this._inline_assistant_completions {
|
||||
sender
|
||||
.unbounded_send(Err(anyhow::anyhow!(
|
||||
"Inline assistant error: {}",
|
||||
error
|
||||
)))
|
||||
.ok();
|
||||
}
|
||||
|
||||
let error = format!("Inline assistant error: {}", error);
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
struct InlineAssistantError;
|
||||
@@ -1750,6 +1848,11 @@ impl InlineAssist {
|
||||
|
||||
workspace.show_toast(Toast::new(id, error), cx);
|
||||
})
|
||||
} else {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
if let Some(sender) = &mut this._inline_assistant_completions {
|
||||
sender.unbounded_send(Ok(assist_id)).ok();
|
||||
}
|
||||
}
|
||||
|
||||
if assist.decorations.is_none() {
|
||||
@@ -1777,6 +1880,7 @@ struct InlineAssistDecorations {
|
||||
prompt_block_id: CustomBlockId,
|
||||
prompt_editor: Entity<PromptEditor<BufferCodegen>>,
|
||||
removed_line_block_ids: HashSet<CustomBlockId>,
|
||||
model_explanation: Option<CustomBlockId>,
|
||||
end_block_id: CustomBlockId,
|
||||
}
|
||||
|
||||
@@ -1943,3 +2047,160 @@ fn merge_ranges(ranges: &mut Vec<Range<Anchor>>, buffer: &MultiBufferSnapshot) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub mod test {
|
||||
use std::sync::Arc;
|
||||
|
||||
use agent::HistoryStore;
|
||||
use assistant_text_thread::TextThreadStore;
|
||||
use client::{Client, UserStore};
|
||||
use editor::{Editor, MultiBuffer, MultiBufferOffset};
|
||||
use fs::FakeFs;
|
||||
use futures::channel::mpsc;
|
||||
use gpui::{AppContext, TestAppContext, UpdateGlobal as _};
|
||||
use language::Buffer;
|
||||
use language_model::LanguageModelRegistry;
|
||||
use project::Project;
|
||||
use prompt_store::PromptBuilder;
|
||||
use smol::stream::StreamExt as _;
|
||||
use util::test::marked_text_ranges;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::InlineAssistant;
|
||||
|
||||
pub fn run_inline_assistant_test<SetupF, TestF>(
|
||||
base_buffer: String,
|
||||
prompt: String,
|
||||
setup: SetupF,
|
||||
test: TestF,
|
||||
cx: &mut TestAppContext,
|
||||
) -> String
|
||||
where
|
||||
SetupF: FnOnce(&mut gpui::VisualTestContext),
|
||||
TestF: FnOnce(&mut gpui::VisualTestContext),
|
||||
{
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let app_state = cx.update(|cx| workspace::AppState::test(cx));
|
||||
let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap());
|
||||
let http = Arc::new(reqwest_client::ReqwestClient::user_agent("agent tests").unwrap());
|
||||
let client = cx.update(|cx| {
|
||||
cx.set_http_client(http);
|
||||
Client::production(cx)
|
||||
});
|
||||
let mut inline_assistant =
|
||||
InlineAssistant::new(fs.clone(), prompt_builder, client.telemetry().clone());
|
||||
|
||||
let (tx, mut completion_rx) = mpsc::unbounded();
|
||||
inline_assistant.set_completion_receiver(tx);
|
||||
|
||||
// Initialize settings and client
|
||||
cx.update(|cx| {
|
||||
gpui_tokio::init(cx);
|
||||
settings::init(cx);
|
||||
client::init(&client, cx);
|
||||
workspace::init(app_state.clone(), cx);
|
||||
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
||||
language_model::init(client.clone(), cx);
|
||||
language_models::init(user_store, client.clone(), cx);
|
||||
|
||||
cx.set_global(inline_assistant);
|
||||
});
|
||||
|
||||
let project = cx
|
||||
.executor()
|
||||
.block_test(async { Project::test(fs.clone(), [], cx).await });
|
||||
|
||||
// Create workspace with window
|
||||
let (workspace, cx) = cx.add_window_view(|window, cx| {
|
||||
window.activate_window();
|
||||
Workspace::new(None, project.clone(), app_state.clone(), window, cx)
|
||||
});
|
||||
|
||||
setup(cx);
|
||||
|
||||
let (_editor, buffer) = cx.update(|window, cx| {
|
||||
let buffer = cx.new(|cx| Buffer::local("", cx));
|
||||
let multibuffer = cx.new(|cx| MultiBuffer::singleton(buffer.clone(), cx));
|
||||
let editor = cx.new(|cx| Editor::for_multibuffer(multibuffer, None, window, cx));
|
||||
editor.update(cx, |editor, cx| {
|
||||
let (unmarked_text, selection_ranges) = marked_text_ranges(&base_buffer, true);
|
||||
editor.set_text(unmarked_text, window, cx);
|
||||
editor.change_selections(Default::default(), window, cx, |s| {
|
||||
s.select_ranges(
|
||||
selection_ranges.into_iter().map(|range| {
|
||||
MultiBufferOffset(range.start)..MultiBufferOffset(range.end)
|
||||
}),
|
||||
)
|
||||
})
|
||||
});
|
||||
|
||||
let text_thread_store = cx.new(|cx| TextThreadStore::fake(project.clone(), cx));
|
||||
let history_store = cx.new(|cx| HistoryStore::new(text_thread_store, cx));
|
||||
|
||||
// Add editor to workspace
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
workspace.add_item_to_active_pane(Box::new(editor.clone()), None, true, window, cx);
|
||||
});
|
||||
|
||||
// Call assist method
|
||||
InlineAssistant::update_global(cx, |inline_assistant, cx| {
|
||||
let assist_id = inline_assistant
|
||||
.assist(
|
||||
&editor,
|
||||
workspace.downgrade(),
|
||||
project.downgrade(),
|
||||
history_store, // thread_store
|
||||
None, // prompt_store
|
||||
Some(prompt),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
inline_assistant.start_assist(assist_id, window, cx);
|
||||
});
|
||||
|
||||
(editor, buffer)
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
test(cx);
|
||||
|
||||
cx.executor()
|
||||
.block_test(async { completion_rx.next().await });
|
||||
|
||||
buffer.read_with(cx, |buffer, _| buffer.text())
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn test_inline_assistant(
|
||||
base_buffer: &'static str,
|
||||
llm_output: &'static str,
|
||||
cx: &mut TestAppContext,
|
||||
) -> String {
|
||||
run_inline_assistant_test(
|
||||
base_buffer.to_string(),
|
||||
"Prompt doesn't matter because we're using a fake model".to_string(),
|
||||
|cx| {
|
||||
cx.update(|_, cx| LanguageModelRegistry::test(cx));
|
||||
},
|
||||
|cx| {
|
||||
let fake_model = cx.update(|_, cx| {
|
||||
LanguageModelRegistry::global(cx)
|
||||
.update(cx, |registry, _| registry.fake_model())
|
||||
});
|
||||
let fake = fake_model.as_fake();
|
||||
|
||||
// let fake = fake_model;
|
||||
fake.send_last_completion_stream_text_chunk(llm_output.to_string());
|
||||
fake.end_last_completion_stream();
|
||||
|
||||
// Run again to process the model's response
|
||||
cx.run_until_parked();
|
||||
},
|
||||
cx,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,24 +8,28 @@ use editor::{
|
||||
ContextMenuOptions, Editor, EditorElement, EditorEvent, EditorMode, EditorStyle, MultiBuffer,
|
||||
actions::{MoveDown, MoveUp},
|
||||
};
|
||||
use feature_flags::{FeatureFlag, FeatureFlagAppExt};
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
AnyElement, App, Context, CursorStyle, Entity, EventEmitter, FocusHandle, Focusable,
|
||||
Subscription, TextStyle, WeakEntity, Window,
|
||||
AnyElement, App, ClipboardItem, Context, Entity, EventEmitter, FocusHandle, Focusable,
|
||||
Subscription, TextStyle, TextStyleRefinement, WeakEntity, Window, actions,
|
||||
};
|
||||
use language_model::{LanguageModel, LanguageModelRegistry};
|
||||
use markdown::{HeadingLevelStyles, Markdown, MarkdownElement, MarkdownStyle};
|
||||
use parking_lot::Mutex;
|
||||
use project::Project;
|
||||
use prompt_store::PromptStore;
|
||||
use settings::Settings;
|
||||
use std::cmp;
|
||||
use std::ops::Range;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use std::{cmp, mem};
|
||||
use theme::ThemeSettings;
|
||||
use ui::utils::WithRemSize;
|
||||
use ui::{IconButtonShape, KeyBinding, PopoverMenuHandle, Tooltip, prelude::*};
|
||||
use workspace::Workspace;
|
||||
use uuid::Uuid;
|
||||
use workspace::notifications::NotificationId;
|
||||
use workspace::{Toast, Workspace};
|
||||
use zed_actions::agent::ToggleModelSelector;
|
||||
|
||||
use crate::agent_model_selector::AgentModelSelector;
|
||||
@@ -38,6 +42,58 @@ use crate::mention_set::{MentionSet, crease_for_mention};
|
||||
use crate::terminal_codegen::TerminalCodegen;
|
||||
use crate::{CycleNextInlineAssist, CyclePreviousInlineAssist, ModelUsageContext};
|
||||
|
||||
actions!(inline_assistant, [ThumbsUpResult, ThumbsDownResult]);
|
||||
|
||||
pub struct InlineAssistRatingFeatureFlag;
|
||||
|
||||
impl FeatureFlag for InlineAssistRatingFeatureFlag {
|
||||
const NAME: &'static str = "inline-assist-rating";
|
||||
|
||||
fn enabled_for_staff() -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
enum RatingState {
|
||||
Pending,
|
||||
GeneratedCompletion(Option<String>),
|
||||
Rated(Uuid),
|
||||
}
|
||||
|
||||
impl RatingState {
|
||||
fn is_pending(&self) -> bool {
|
||||
matches!(self, RatingState::Pending)
|
||||
}
|
||||
|
||||
fn rating_id(&self) -> Option<Uuid> {
|
||||
match self {
|
||||
RatingState::Pending => None,
|
||||
RatingState::GeneratedCompletion(_) => None,
|
||||
RatingState::Rated(id) => Some(*id),
|
||||
}
|
||||
}
|
||||
|
||||
fn rate(&mut self) -> (Uuid, Option<String>) {
|
||||
let id = Uuid::new_v4();
|
||||
let old_state = mem::replace(self, RatingState::Rated(id));
|
||||
let completion = match old_state {
|
||||
RatingState::Pending => None,
|
||||
RatingState::GeneratedCompletion(completion) => completion,
|
||||
RatingState::Rated(_) => None,
|
||||
};
|
||||
|
||||
(id, completion)
|
||||
}
|
||||
|
||||
fn reset(&mut self) {
|
||||
*self = RatingState::Pending;
|
||||
}
|
||||
|
||||
fn generated_completion(&mut self, generated_completion: Option<String>) {
|
||||
*self = RatingState::GeneratedCompletion(generated_completion);
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PromptEditor<T> {
|
||||
pub editor: Entity<Editor>,
|
||||
mode: PromptEditorMode,
|
||||
@@ -53,6 +109,7 @@ pub struct PromptEditor<T> {
|
||||
_codegen_subscription: Subscription,
|
||||
editor_subscriptions: Vec<Subscription>,
|
||||
show_rate_limit_notice: bool,
|
||||
rated: RatingState,
|
||||
_phantom: std::marker::PhantomData<T>,
|
||||
}
|
||||
|
||||
@@ -65,7 +122,7 @@ impl<T: 'static> Render for PromptEditor<T> {
|
||||
|
||||
const RIGHT_PADDING: Pixels = px(9.);
|
||||
|
||||
let (left_gutter_width, right_padding) = match &self.mode {
|
||||
let (left_gutter_width, right_padding, explanation) = match &self.mode {
|
||||
PromptEditorMode::Buffer {
|
||||
id: _,
|
||||
codegen,
|
||||
@@ -83,17 +140,23 @@ impl<T: 'static> Render for PromptEditor<T> {
|
||||
let left_gutter_width = gutter.full_width() + (gutter.margin / 2.0);
|
||||
let right_padding = editor_margins.right + RIGHT_PADDING;
|
||||
|
||||
(left_gutter_width, right_padding)
|
||||
let explanation = codegen
|
||||
.active_alternative()
|
||||
.read(cx)
|
||||
.model_explanation
|
||||
.clone();
|
||||
|
||||
(left_gutter_width, right_padding, explanation)
|
||||
}
|
||||
PromptEditorMode::Terminal { .. } => {
|
||||
// Give the equivalent of the same left-padding that we're using on the right
|
||||
(Pixels::from(40.0), Pixels::from(24.))
|
||||
(Pixels::from(40.0), Pixels::from(24.), None)
|
||||
}
|
||||
};
|
||||
|
||||
let bottom_padding = match &self.mode {
|
||||
PromptEditorMode::Buffer { .. } => rems_from_px(2.0),
|
||||
PromptEditorMode::Terminal { .. } => rems_from_px(8.0),
|
||||
PromptEditorMode::Terminal { .. } => rems_from_px(4.0),
|
||||
};
|
||||
|
||||
buttons.extend(self.render_buttons(window, cx));
|
||||
@@ -111,22 +174,33 @@ impl<T: 'static> Render for PromptEditor<T> {
|
||||
this.trigger_completion_menu(window, cx);
|
||||
}));
|
||||
|
||||
let markdown = window.use_state(cx, |_, cx| Markdown::new("".into(), None, None, cx));
|
||||
|
||||
if let Some(explanation) = &explanation {
|
||||
markdown.update(cx, |markdown, cx| {
|
||||
markdown.reset(explanation.clone(), cx);
|
||||
});
|
||||
}
|
||||
|
||||
let explanation_label = self
|
||||
.render_markdown(markdown, markdown_style(window, cx))
|
||||
.into_any_element();
|
||||
|
||||
v_flex()
|
||||
.key_context("PromptEditor")
|
||||
.capture_action(cx.listener(Self::paste))
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.block_mouse_except_scroll()
|
||||
.gap_0p5()
|
||||
.border_y_1()
|
||||
.border_color(cx.theme().status().info_border)
|
||||
.size_full()
|
||||
.pt_0p5()
|
||||
.pb(bottom_padding)
|
||||
.pr(right_padding)
|
||||
.gap_0p5()
|
||||
.justify_center()
|
||||
.border_y_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(
|
||||
h_flex()
|
||||
.items_start()
|
||||
.cursor(CursorStyle::Arrow)
|
||||
.on_action(cx.listener(|this, _: &ToggleModelSelector, window, cx| {
|
||||
this.model_selector
|
||||
.update(cx, |model_selector, cx| model_selector.toggle(window, cx));
|
||||
@@ -135,18 +209,20 @@ impl<T: 'static> Render for PromptEditor<T> {
|
||||
.on_action(cx.listener(Self::cancel))
|
||||
.on_action(cx.listener(Self::move_up))
|
||||
.on_action(cx.listener(Self::move_down))
|
||||
.on_action(cx.listener(Self::thumbs_up))
|
||||
.on_action(cx.listener(Self::thumbs_down))
|
||||
.capture_action(cx.listener(Self::cycle_prev))
|
||||
.capture_action(cx.listener(Self::cycle_next))
|
||||
.child(
|
||||
WithRemSize::new(ui_font_size)
|
||||
.h_full()
|
||||
.w(left_gutter_width)
|
||||
.flex()
|
||||
.flex_row()
|
||||
.flex_shrink_0()
|
||||
.items_center()
|
||||
.h_full()
|
||||
.w(left_gutter_width)
|
||||
.justify_center()
|
||||
.gap_2()
|
||||
.gap_1()
|
||||
.child(self.render_close_button(cx))
|
||||
.map(|el| {
|
||||
let CodegenStatus::Error(error) = self.codegen_status(cx) else {
|
||||
@@ -177,26 +253,83 @@ impl<T: 'static> Render for PromptEditor<T> {
|
||||
.flex_row()
|
||||
.items_center()
|
||||
.gap_1()
|
||||
.child(add_context_button)
|
||||
.child(self.model_selector.clone())
|
||||
.children(buttons),
|
||||
),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
WithRemSize::new(ui_font_size)
|
||||
.flex()
|
||||
.flex_row()
|
||||
.items_center()
|
||||
.child(h_flex().flex_shrink_0().w(left_gutter_width))
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.pl_1()
|
||||
.items_start()
|
||||
.justify_between()
|
||||
.child(add_context_button)
|
||||
.child(self.model_selector.clone()),
|
||||
),
|
||||
)
|
||||
.when_some(explanation, |this, _| {
|
||||
this.child(
|
||||
h_flex()
|
||||
.size_full()
|
||||
.justify_center()
|
||||
.child(div().w(left_gutter_width + px(6.)))
|
||||
.child(
|
||||
div()
|
||||
.size_full()
|
||||
.min_w_0()
|
||||
.pt(rems_from_px(3.))
|
||||
.pl_0p5()
|
||||
.flex_1()
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
.child(explanation_label),
|
||||
),
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn markdown_style(window: &Window, cx: &App) -> MarkdownStyle {
|
||||
let theme_settings = ThemeSettings::get_global(cx);
|
||||
let colors = cx.theme().colors();
|
||||
let mut text_style = window.text_style();
|
||||
|
||||
text_style.refine(&TextStyleRefinement {
|
||||
font_family: Some(theme_settings.ui_font.family.clone()),
|
||||
color: Some(colors.text),
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
MarkdownStyle {
|
||||
base_text_style: text_style.clone(),
|
||||
syntax: cx.theme().syntax().clone(),
|
||||
selection_background_color: colors.element_selection_background,
|
||||
heading_level_styles: Some(HeadingLevelStyles {
|
||||
h1: Some(TextStyleRefinement {
|
||||
font_size: Some(rems(1.15).into()),
|
||||
..Default::default()
|
||||
}),
|
||||
h2: Some(TextStyleRefinement {
|
||||
font_size: Some(rems(1.1).into()),
|
||||
..Default::default()
|
||||
}),
|
||||
h3: Some(TextStyleRefinement {
|
||||
font_size: Some(rems(1.05).into()),
|
||||
..Default::default()
|
||||
}),
|
||||
h4: Some(TextStyleRefinement {
|
||||
font_size: Some(rems(1.).into()),
|
||||
..Default::default()
|
||||
}),
|
||||
h5: Some(TextStyleRefinement {
|
||||
font_size: Some(rems(0.95).into()),
|
||||
..Default::default()
|
||||
}),
|
||||
h6: Some(TextStyleRefinement {
|
||||
font_size: Some(rems(0.875).into()),
|
||||
..Default::default()
|
||||
}),
|
||||
}),
|
||||
inline_code: TextStyleRefinement {
|
||||
font_family: Some(theme_settings.buffer_font.family.clone()),
|
||||
font_fallbacks: theme_settings.buffer_font.fallbacks.clone(),
|
||||
font_features: Some(theme_settings.buffer_font.features.clone()),
|
||||
background_color: Some(colors.editor_foreground.opacity(0.08)),
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -354,6 +487,7 @@ impl<T: 'static> PromptEditor<T> {
|
||||
}
|
||||
|
||||
self.edited_since_done = true;
|
||||
self.rated.reset();
|
||||
cx.notify();
|
||||
}
|
||||
EditorEvent::Blurred => {
|
||||
@@ -441,6 +575,121 @@ impl<T: 'static> PromptEditor<T> {
|
||||
}
|
||||
}
|
||||
|
||||
fn thumbs_up(&mut self, _: &ThumbsUpResult, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
if self.rated.is_pending() {
|
||||
self.toast("Still generating...", None, cx);
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(rating_id) = self.rated.rating_id() {
|
||||
self.toast("Already rated this completion", Some(rating_id), cx);
|
||||
return;
|
||||
}
|
||||
|
||||
let (rating_id, completion) = self.rated.rate();
|
||||
|
||||
let selected_text = match &self.mode {
|
||||
PromptEditorMode::Buffer { codegen, .. } => {
|
||||
codegen.read(cx).selected_text(cx).map(|s| s.to_string())
|
||||
}
|
||||
PromptEditorMode::Terminal { .. } => None,
|
||||
};
|
||||
|
||||
let model_info = self.model_selector.read(cx).active_model(cx);
|
||||
let model_id = {
|
||||
let Some(configured_model) = model_info else {
|
||||
self.toast("No configured model", None, cx);
|
||||
return;
|
||||
};
|
||||
|
||||
configured_model.model.telemetry_id()
|
||||
};
|
||||
|
||||
let prompt = self.editor.read(cx).text(cx);
|
||||
|
||||
telemetry::event!(
|
||||
"Inline Assistant Rated",
|
||||
rating = "positive",
|
||||
model = model_id,
|
||||
prompt = prompt,
|
||||
completion = completion,
|
||||
selected_text = selected_text,
|
||||
rating_id = rating_id.to_string()
|
||||
);
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn thumbs_down(&mut self, _: &ThumbsDownResult, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
if self.rated.is_pending() {
|
||||
self.toast("Still generating...", None, cx);
|
||||
return;
|
||||
}
|
||||
if let Some(rating_id) = self.rated.rating_id() {
|
||||
self.toast("Already rated this completion", Some(rating_id), cx);
|
||||
return;
|
||||
}
|
||||
|
||||
let (rating_id, completion) = self.rated.rate();
|
||||
|
||||
let selected_text = match &self.mode {
|
||||
PromptEditorMode::Buffer { codegen, .. } => {
|
||||
codegen.read(cx).selected_text(cx).map(|s| s.to_string())
|
||||
}
|
||||
PromptEditorMode::Terminal { .. } => None,
|
||||
};
|
||||
|
||||
let model_info = self.model_selector.read(cx).active_model(cx);
|
||||
let model_telemetry_id = {
|
||||
let Some(configured_model) = model_info else {
|
||||
self.toast("No configured model", None, cx);
|
||||
return;
|
||||
};
|
||||
|
||||
configured_model.model.telemetry_id()
|
||||
};
|
||||
|
||||
let prompt = self.editor.read(cx).text(cx);
|
||||
|
||||
telemetry::event!(
|
||||
"Inline Assistant Rated",
|
||||
rating = "negative",
|
||||
model = model_telemetry_id,
|
||||
prompt = prompt,
|
||||
completion = completion,
|
||||
selected_text = selected_text,
|
||||
rating_id = rating_id.to_string()
|
||||
);
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn toast(&mut self, msg: &str, uuid: Option<Uuid>, cx: &mut Context<'_, PromptEditor<T>>) {
|
||||
self.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
enum InlinePromptRating {}
|
||||
workspace.show_toast(
|
||||
{
|
||||
let mut toast = Toast::new(
|
||||
NotificationId::unique::<InlinePromptRating>(),
|
||||
msg.to_string(),
|
||||
)
|
||||
.autohide();
|
||||
|
||||
if let Some(uuid) = uuid {
|
||||
toast = toast.on_click("Click to copy rating ID", move |_, cx| {
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(uuid.to_string()));
|
||||
});
|
||||
};
|
||||
|
||||
toast
|
||||
},
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
|
||||
fn move_up(&mut self, _: &MoveUp, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if let Some(ix) = self.prompt_history_ix {
|
||||
if ix > 0 {
|
||||
@@ -546,6 +795,9 @@ impl<T: 'static> PromptEditor<T> {
|
||||
.into_any_element(),
|
||||
]
|
||||
} else {
|
||||
let show_rating_buttons = cx.has_flag::<InlineAssistRatingFeatureFlag>();
|
||||
let rated = self.rated.rating_id().is_some();
|
||||
|
||||
let accept = IconButton::new("accept", IconName::Check)
|
||||
.icon_color(Color::Info)
|
||||
.shape(IconButtonShape::Square)
|
||||
@@ -557,25 +809,59 @@ impl<T: 'static> PromptEditor<T> {
|
||||
}))
|
||||
.into_any_element();
|
||||
|
||||
match &self.mode {
|
||||
PromptEditorMode::Terminal { .. } => vec![
|
||||
accept,
|
||||
IconButton::new("confirm", IconName::PlayFilled)
|
||||
.icon_color(Color::Info)
|
||||
let mut buttons = Vec::new();
|
||||
|
||||
if show_rating_buttons {
|
||||
buttons.push(
|
||||
IconButton::new("thumbs-down", IconName::ThumbsDown)
|
||||
.icon_color(if rated { Color::Muted } else { Color::Default })
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(|_window, cx| {
|
||||
Tooltip::for_action(
|
||||
"Execute Generated Command",
|
||||
&menu::SecondaryConfirm,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.on_click(cx.listener(|_, _, _, cx| {
|
||||
cx.emit(PromptEditorEvent::ConfirmRequested { execute: true });
|
||||
.disabled(rated)
|
||||
.tooltip(Tooltip::text("Bad result"))
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.thumbs_down(&ThumbsDownResult, window, cx);
|
||||
}))
|
||||
.into_any_element(),
|
||||
],
|
||||
PromptEditorMode::Buffer { .. } => vec![accept],
|
||||
);
|
||||
|
||||
buttons.push(
|
||||
IconButton::new("thumbs-up", IconName::ThumbsUp)
|
||||
.icon_color(if rated { Color::Muted } else { Color::Default })
|
||||
.shape(IconButtonShape::Square)
|
||||
.disabled(rated)
|
||||
.tooltip(Tooltip::text("Good result"))
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.thumbs_up(&ThumbsUpResult, window, cx);
|
||||
}))
|
||||
.into_any_element(),
|
||||
);
|
||||
}
|
||||
|
||||
buttons.push(accept);
|
||||
|
||||
match &self.mode {
|
||||
PromptEditorMode::Terminal { .. } => {
|
||||
buttons.push(
|
||||
IconButton::new("confirm", IconName::PlayFilled)
|
||||
.icon_color(Color::Info)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(|_window, cx| {
|
||||
Tooltip::for_action(
|
||||
"Execute Generated Command",
|
||||
&menu::SecondaryConfirm,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.on_click(cx.listener(|_, _, _, cx| {
|
||||
cx.emit(PromptEditorEvent::ConfirmRequested {
|
||||
execute: true,
|
||||
});
|
||||
}))
|
||||
.into_any_element(),
|
||||
);
|
||||
buttons
|
||||
}
|
||||
PromptEditorMode::Buffer { .. } => buttons,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -759,6 +1045,10 @@ impl<T: 'static> PromptEditor<T> {
|
||||
})
|
||||
.into_any_element()
|
||||
}
|
||||
|
||||
fn render_markdown(&self, markdown: Entity<Markdown>, style: MarkdownStyle) -> MarkdownElement {
|
||||
MarkdownElement::new(markdown, style)
|
||||
}
|
||||
}
|
||||
|
||||
pub enum PromptEditorMode {
|
||||
@@ -900,6 +1190,7 @@ impl PromptEditor<BufferCodegen> {
|
||||
editor_subscriptions: Vec::new(),
|
||||
show_rate_limit_notice: false,
|
||||
mode,
|
||||
rated: RatingState::Pending,
|
||||
_phantom: Default::default(),
|
||||
};
|
||||
|
||||
@@ -910,7 +1201,7 @@ impl PromptEditor<BufferCodegen> {
|
||||
|
||||
fn handle_codegen_changed(
|
||||
&mut self,
|
||||
_: Entity<BufferCodegen>,
|
||||
codegen: Entity<BufferCodegen>,
|
||||
cx: &mut Context<PromptEditor<BufferCodegen>>,
|
||||
) {
|
||||
match self.codegen_status(cx) {
|
||||
@@ -919,10 +1210,13 @@ impl PromptEditor<BufferCodegen> {
|
||||
.update(cx, |editor, _| editor.set_read_only(false));
|
||||
}
|
||||
CodegenStatus::Pending => {
|
||||
self.rated.reset();
|
||||
self.editor
|
||||
.update(cx, |editor, _| editor.set_read_only(true));
|
||||
}
|
||||
CodegenStatus::Done => {
|
||||
let completion = codegen.read(cx).active_completion(cx);
|
||||
self.rated.generated_completion(completion);
|
||||
self.edited_since_done = false;
|
||||
self.editor
|
||||
.update(cx, |editor, _| editor.set_read_only(false));
|
||||
@@ -1043,6 +1337,7 @@ impl PromptEditor<TerminalCodegen> {
|
||||
editor_subscriptions: Vec::new(),
|
||||
mode,
|
||||
show_rate_limit_notice: false,
|
||||
rated: RatingState::Pending,
|
||||
_phantom: Default::default(),
|
||||
};
|
||||
this.count_lines(cx);
|
||||
@@ -1075,17 +1370,20 @@ impl PromptEditor<TerminalCodegen> {
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_codegen_changed(&mut self, _: Entity<TerminalCodegen>, cx: &mut Context<Self>) {
|
||||
fn handle_codegen_changed(&mut self, codegen: Entity<TerminalCodegen>, cx: &mut Context<Self>) {
|
||||
match &self.codegen().read(cx).status {
|
||||
CodegenStatus::Idle => {
|
||||
self.editor
|
||||
.update(cx, |editor, _| editor.set_read_only(false));
|
||||
}
|
||||
CodegenStatus::Pending => {
|
||||
self.rated = RatingState::Pending;
|
||||
self.editor
|
||||
.update(cx, |editor, _| editor.set_read_only(true));
|
||||
}
|
||||
CodegenStatus::Done | CodegenStatus::Error(_) => {
|
||||
self.rated
|
||||
.generated_completion(codegen.read(cx).completion());
|
||||
self.edited_since_done = false;
|
||||
self.editor
|
||||
.update(cx, |editor, _| editor.set_read_only(false));
|
||||
|
||||
@@ -542,7 +542,7 @@ impl PickerDelegate for ProfilePickerDelegate {
|
||||
let is_active = active_id == candidate.id;
|
||||
|
||||
Some(
|
||||
ListItem::new(SharedString::from(candidate.id.0.clone()))
|
||||
ListItem::new(candidate.id.0.clone())
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.toggle_state(selected)
|
||||
|
||||
@@ -341,7 +341,6 @@ impl CompletionProvider for SlashCommandCompletionProvider {
|
||||
position: language::Anchor,
|
||||
_text: &str,
|
||||
_trigger_in_words: bool,
|
||||
_menu_is_open: bool,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> bool {
|
||||
let buffer = buffer.read(cx);
|
||||
|
||||
@@ -135,6 +135,12 @@ impl TerminalCodegen {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn completion(&self) -> Option<String> {
|
||||
self.transaction
|
||||
.as_ref()
|
||||
.map(|transaction| transaction.completion.clone())
|
||||
}
|
||||
|
||||
pub fn stop(&mut self, cx: &mut Context<Self>) {
|
||||
self.status = CodegenStatus::Done;
|
||||
self.generation = Task::ready(());
|
||||
@@ -167,27 +173,32 @@ pub const CLEAR_INPUT: &str = "\x03";
|
||||
const CARRIAGE_RETURN: &str = "\x0d";
|
||||
|
||||
struct TerminalTransaction {
|
||||
completion: String,
|
||||
terminal: Entity<Terminal>,
|
||||
}
|
||||
|
||||
impl TerminalTransaction {
|
||||
pub fn start(terminal: Entity<Terminal>) -> Self {
|
||||
Self { terminal }
|
||||
Self {
|
||||
completion: String::new(),
|
||||
terminal,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn push(&mut self, hunk: String, cx: &mut App) {
|
||||
// Ensure that the assistant cannot accidentally execute commands that are streamed into the terminal
|
||||
let input = Self::sanitize_input(hunk);
|
||||
self.completion.push_str(&input);
|
||||
self.terminal
|
||||
.update(cx, |terminal, _| terminal.input(input.into_bytes()));
|
||||
}
|
||||
|
||||
pub fn undo(&self, cx: &mut App) {
|
||||
pub fn undo(self, cx: &mut App) {
|
||||
self.terminal
|
||||
.update(cx, |terminal, _| terminal.input(CLEAR_INPUT.as_bytes()));
|
||||
}
|
||||
|
||||
pub fn complete(&self, cx: &mut App) {
|
||||
pub fn complete(self, cx: &mut App) {
|
||||
self.terminal
|
||||
.update(cx, |terminal, _| terminal.input(CARRIAGE_RETURN.as_bytes()));
|
||||
}
|
||||
|
||||
@@ -1682,6 +1682,98 @@ impl TextThreadEditor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let editor_clipboard_selections = cx
|
||||
.read_from_clipboard()
|
||||
.and_then(|item| item.entries().first().cloned())
|
||||
.and_then(|entry| match entry {
|
||||
ClipboardEntry::String(text) => {
|
||||
text.metadata_json::<Vec<editor::ClipboardSelection>>()
|
||||
}
|
||||
_ => None,
|
||||
});
|
||||
|
||||
let has_file_context = editor_clipboard_selections
|
||||
.as_ref()
|
||||
.is_some_and(|selections| {
|
||||
selections
|
||||
.iter()
|
||||
.any(|sel| sel.file_path.is_some() && sel.line_range.is_some())
|
||||
});
|
||||
|
||||
if has_file_context {
|
||||
if let Some(clipboard_item) = cx.read_from_clipboard() {
|
||||
if let Some(ClipboardEntry::String(clipboard_text)) =
|
||||
clipboard_item.entries().first()
|
||||
{
|
||||
if let Some(selections) = editor_clipboard_selections {
|
||||
cx.stop_propagation();
|
||||
|
||||
let text = clipboard_text.text();
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
let mut current_offset = 0;
|
||||
let weak_editor = cx.entity().downgrade();
|
||||
|
||||
for selection in selections {
|
||||
if let (Some(file_path), Some(line_range)) =
|
||||
(selection.file_path, selection.line_range)
|
||||
{
|
||||
let selected_text =
|
||||
&text[current_offset..current_offset + selection.len];
|
||||
let fence = assistant_slash_commands::codeblock_fence_for_path(
|
||||
file_path.to_str(),
|
||||
Some(line_range.clone()),
|
||||
);
|
||||
let formatted_text = format!("{fence}{selected_text}\n```");
|
||||
|
||||
let insert_point = editor
|
||||
.selections
|
||||
.newest::<Point>(&editor.display_snapshot(cx))
|
||||
.head();
|
||||
let start_row = MultiBufferRow(insert_point.row);
|
||||
|
||||
editor.insert(&formatted_text, window, cx);
|
||||
|
||||
let snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
let anchor_before = snapshot.anchor_after(insert_point);
|
||||
let anchor_after = editor
|
||||
.selections
|
||||
.newest_anchor()
|
||||
.head()
|
||||
.bias_left(&snapshot);
|
||||
|
||||
editor.insert("\n", window, cx);
|
||||
|
||||
let crease_text = acp_thread::selection_name(
|
||||
Some(file_path.as_ref()),
|
||||
&line_range,
|
||||
);
|
||||
|
||||
let fold_placeholder = quote_selection_fold_placeholder(
|
||||
crease_text,
|
||||
weak_editor.clone(),
|
||||
);
|
||||
let crease = Crease::inline(
|
||||
anchor_before..anchor_after,
|
||||
fold_placeholder,
|
||||
render_quote_selection_output_toggle,
|
||||
|_, _, _, _| Empty.into_any(),
|
||||
);
|
||||
editor.insert_creases(vec![crease], cx);
|
||||
editor.fold_at(start_row, window, cx);
|
||||
|
||||
current_offset += selection.len;
|
||||
if !selection.is_entire_line && current_offset < text.len() {
|
||||
current_offset += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
cx.stop_propagation();
|
||||
|
||||
let mut images = if let Some(item) = cx.read_from_clipboard() {
|
||||
@@ -2622,11 +2714,13 @@ impl SearchableItem for TextThreadEditor {
|
||||
fn update_matches(
|
||||
&mut self,
|
||||
matches: &[Self::Match],
|
||||
active_match_index: Option<usize>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| editor.update_matches(matches, window, cx));
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
editor.update_matches(matches, active_match_index, window, cx)
|
||||
});
|
||||
}
|
||||
|
||||
fn query_suggestion(&mut self, window: &mut Window, cx: &mut Context<Self>) -> String {
|
||||
|
||||
@@ -106,9 +106,6 @@ impl Render for AgentNotification {
|
||||
.font(ui_font)
|
||||
.border_color(cx.theme().colors().border)
|
||||
.rounded_xl()
|
||||
.on_click(cx.listener(|_, _, _, cx| {
|
||||
cx.emit(AgentNotificationEvent::Accepted);
|
||||
}))
|
||||
.child(
|
||||
h_flex()
|
||||
.items_start()
|
||||
|
||||
@@ -12,6 +12,8 @@ pub use settings::{AnthropicAvailableModel as AvailableModel, ModelMode};
|
||||
use strum::{EnumIter, EnumString};
|
||||
use thiserror::Error;
|
||||
|
||||
pub mod batches;
|
||||
|
||||
pub const ANTHROPIC_API_URL: &str = "https://api.anthropic.com";
|
||||
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
@@ -465,6 +467,7 @@ impl Model {
|
||||
}
|
||||
}
|
||||
|
||||
/// Generate completion with streaming.
|
||||
pub async fn stream_completion(
|
||||
client: &dyn HttpClient,
|
||||
api_url: &str,
|
||||
@@ -477,6 +480,101 @@ pub async fn stream_completion(
|
||||
.map(|output| output.0)
|
||||
}
|
||||
|
||||
/// Generate completion without streaming.
|
||||
pub async fn non_streaming_completion(
|
||||
client: &dyn HttpClient,
|
||||
api_url: &str,
|
||||
api_key: &str,
|
||||
request: Request,
|
||||
beta_headers: Option<String>,
|
||||
) -> Result<Response, AnthropicError> {
|
||||
let (mut response, rate_limits) =
|
||||
send_request(client, api_url, api_key, &request, beta_headers).await?;
|
||||
|
||||
if response.status().is_success() {
|
||||
let mut body = String::new();
|
||||
response
|
||||
.body_mut()
|
||||
.read_to_string(&mut body)
|
||||
.await
|
||||
.map_err(AnthropicError::ReadResponse)?;
|
||||
|
||||
serde_json::from_str(&body).map_err(AnthropicError::DeserializeResponse)
|
||||
} else {
|
||||
Err(handle_error_response(response, rate_limits).await)
|
||||
}
|
||||
}
|
||||
|
||||
async fn send_request(
|
||||
client: &dyn HttpClient,
|
||||
api_url: &str,
|
||||
api_key: &str,
|
||||
request: impl Serialize,
|
||||
beta_headers: Option<String>,
|
||||
) -> Result<(http::Response<AsyncBody>, RateLimitInfo), AnthropicError> {
|
||||
let uri = format!("{api_url}/v1/messages");
|
||||
|
||||
let mut request_builder = HttpRequest::builder()
|
||||
.method(Method::POST)
|
||||
.uri(uri)
|
||||
.header("Anthropic-Version", "2023-06-01")
|
||||
.header("X-Api-Key", api_key.trim())
|
||||
.header("Content-Type", "application/json");
|
||||
|
||||
if let Some(beta_headers) = beta_headers {
|
||||
request_builder = request_builder.header("Anthropic-Beta", beta_headers);
|
||||
}
|
||||
|
||||
let serialized_request =
|
||||
serde_json::to_string(&request).map_err(AnthropicError::SerializeRequest)?;
|
||||
let request = request_builder
|
||||
.body(AsyncBody::from(serialized_request))
|
||||
.map_err(AnthropicError::BuildRequestBody)?;
|
||||
|
||||
let response = client
|
||||
.send(request)
|
||||
.await
|
||||
.map_err(AnthropicError::HttpSend)?;
|
||||
|
||||
let rate_limits = RateLimitInfo::from_headers(response.headers());
|
||||
|
||||
Ok((response, rate_limits))
|
||||
}
|
||||
|
||||
async fn handle_error_response(
|
||||
mut response: http::Response<AsyncBody>,
|
||||
rate_limits: RateLimitInfo,
|
||||
) -> AnthropicError {
|
||||
if response.status().as_u16() == 529 {
|
||||
return AnthropicError::ServerOverloaded {
|
||||
retry_after: rate_limits.retry_after,
|
||||
};
|
||||
}
|
||||
|
||||
if let Some(retry_after) = rate_limits.retry_after {
|
||||
return AnthropicError::RateLimit { retry_after };
|
||||
}
|
||||
|
||||
let mut body = String::new();
|
||||
let read_result = response
|
||||
.body_mut()
|
||||
.read_to_string(&mut body)
|
||||
.await
|
||||
.map_err(AnthropicError::ReadResponse);
|
||||
|
||||
if let Err(err) = read_result {
|
||||
return err;
|
||||
}
|
||||
|
||||
match serde_json::from_str::<Event>(&body) {
|
||||
Ok(Event::Error { error }) => AnthropicError::ApiError(error),
|
||||
Ok(_) | Err(_) => AnthropicError::HttpResponseError {
|
||||
status_code: response.status(),
|
||||
message: body,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
/// An individual rate limit.
|
||||
#[derive(Debug)]
|
||||
pub struct RateLimit {
|
||||
@@ -580,30 +678,10 @@ pub async fn stream_completion_with_rate_limit_info(
|
||||
base: request,
|
||||
stream: true,
|
||||
};
|
||||
let uri = format!("{api_url}/v1/messages");
|
||||
|
||||
let mut request_builder = HttpRequest::builder()
|
||||
.method(Method::POST)
|
||||
.uri(uri)
|
||||
.header("Anthropic-Version", "2023-06-01")
|
||||
.header("X-Api-Key", api_key.trim())
|
||||
.header("Content-Type", "application/json");
|
||||
let (response, rate_limits) =
|
||||
send_request(client, api_url, api_key, &request, beta_headers).await?;
|
||||
|
||||
if let Some(beta_headers) = beta_headers {
|
||||
request_builder = request_builder.header("Anthropic-Beta", beta_headers);
|
||||
}
|
||||
|
||||
let serialized_request =
|
||||
serde_json::to_string(&request).map_err(AnthropicError::SerializeRequest)?;
|
||||
let request = request_builder
|
||||
.body(AsyncBody::from(serialized_request))
|
||||
.map_err(AnthropicError::BuildRequestBody)?;
|
||||
|
||||
let mut response = client
|
||||
.send(request)
|
||||
.await
|
||||
.map_err(AnthropicError::HttpSend)?;
|
||||
let rate_limits = RateLimitInfo::from_headers(response.headers());
|
||||
if response.status().is_success() {
|
||||
let reader = BufReader::new(response.into_body());
|
||||
let stream = reader
|
||||
@@ -622,27 +700,8 @@ pub async fn stream_completion_with_rate_limit_info(
|
||||
})
|
||||
.boxed();
|
||||
Ok((stream, Some(rate_limits)))
|
||||
} else if response.status().as_u16() == 529 {
|
||||
Err(AnthropicError::ServerOverloaded {
|
||||
retry_after: rate_limits.retry_after,
|
||||
})
|
||||
} else if let Some(retry_after) = rate_limits.retry_after {
|
||||
Err(AnthropicError::RateLimit { retry_after })
|
||||
} else {
|
||||
let mut body = String::new();
|
||||
response
|
||||
.body_mut()
|
||||
.read_to_string(&mut body)
|
||||
.await
|
||||
.map_err(AnthropicError::ReadResponse)?;
|
||||
|
||||
match serde_json::from_str::<Event>(&body) {
|
||||
Ok(Event::Error { error }) => Err(AnthropicError::ApiError(error)),
|
||||
Ok(_) | Err(_) => Err(AnthropicError::HttpResponseError {
|
||||
status_code: response.status(),
|
||||
message: body,
|
||||
}),
|
||||
}
|
||||
Err(handle_error_response(response, rate_limits).await)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
190
crates/anthropic/src/batches.rs
Normal file
@@ -0,0 +1,190 @@
|
||||
use anyhow::Result;
|
||||
use futures::AsyncReadExt;
|
||||
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{AnthropicError, ApiError, RateLimitInfo, Request, Response};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct BatchRequest {
|
||||
pub custom_id: String,
|
||||
pub params: Request,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct CreateBatchRequest {
|
||||
pub requests: Vec<BatchRequest>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct MessageBatchRequestCounts {
|
||||
pub processing: u64,
|
||||
pub succeeded: u64,
|
||||
pub errored: u64,
|
||||
pub canceled: u64,
|
||||
pub expired: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct MessageBatch {
|
||||
pub id: String,
|
||||
#[serde(rename = "type")]
|
||||
pub batch_type: String,
|
||||
pub processing_status: String,
|
||||
pub request_counts: MessageBatchRequestCounts,
|
||||
pub ended_at: Option<String>,
|
||||
pub created_at: String,
|
||||
pub expires_at: String,
|
||||
pub archived_at: Option<String>,
|
||||
pub cancel_initiated_at: Option<String>,
|
||||
pub results_url: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum BatchResult {
|
||||
#[serde(rename = "succeeded")]
|
||||
Succeeded { message: Response },
|
||||
#[serde(rename = "errored")]
|
||||
Errored { error: ApiError },
|
||||
#[serde(rename = "canceled")]
|
||||
Canceled,
|
||||
#[serde(rename = "expired")]
|
||||
Expired,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct BatchIndividualResponse {
|
||||
pub custom_id: String,
|
||||
pub result: BatchResult,
|
||||
}
|
||||
|
||||
pub async fn create_batch(
|
||||
client: &dyn HttpClient,
|
||||
api_url: &str,
|
||||
api_key: &str,
|
||||
request: CreateBatchRequest,
|
||||
) -> Result<MessageBatch, AnthropicError> {
|
||||
let uri = format!("{api_url}/v1/messages/batches");
|
||||
|
||||
let request_builder = HttpRequest::builder()
|
||||
.method(Method::POST)
|
||||
.uri(uri)
|
||||
.header("Anthropic-Version", "2023-06-01")
|
||||
.header("X-Api-Key", api_key.trim())
|
||||
.header("Content-Type", "application/json");
|
||||
|
||||
let serialized_request =
|
||||
serde_json::to_string(&request).map_err(AnthropicError::SerializeRequest)?;
|
||||
let http_request = request_builder
|
||||
.body(AsyncBody::from(serialized_request))
|
||||
.map_err(AnthropicError::BuildRequestBody)?;
|
||||
|
||||
let mut response = client
|
||||
.send(http_request)
|
||||
.await
|
||||
.map_err(AnthropicError::HttpSend)?;
|
||||
|
||||
let rate_limits = RateLimitInfo::from_headers(response.headers());
|
||||
|
||||
if response.status().is_success() {
|
||||
let mut body = String::new();
|
||||
response
|
||||
.body_mut()
|
||||
.read_to_string(&mut body)
|
||||
.await
|
||||
.map_err(AnthropicError::ReadResponse)?;
|
||||
|
||||
serde_json::from_str(&body).map_err(AnthropicError::DeserializeResponse)
|
||||
} else {
|
||||
Err(crate::handle_error_response(response, rate_limits).await)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn retrieve_batch(
|
||||
client: &dyn HttpClient,
|
||||
api_url: &str,
|
||||
api_key: &str,
|
||||
message_batch_id: &str,
|
||||
) -> Result<MessageBatch, AnthropicError> {
|
||||
let uri = format!("{api_url}/v1/messages/batches/{message_batch_id}");
|
||||
|
||||
let request_builder = HttpRequest::builder()
|
||||
.method(Method::GET)
|
||||
.uri(uri)
|
||||
.header("Anthropic-Version", "2023-06-01")
|
||||
.header("X-Api-Key", api_key.trim());
|
||||
|
||||
let http_request = request_builder
|
||||
.body(AsyncBody::default())
|
||||
.map_err(AnthropicError::BuildRequestBody)?;
|
||||
|
||||
let mut response = client
|
||||
.send(http_request)
|
||||
.await
|
||||
.map_err(AnthropicError::HttpSend)?;
|
||||
|
||||
let rate_limits = RateLimitInfo::from_headers(response.headers());
|
||||
|
||||
if response.status().is_success() {
|
||||
let mut body = String::new();
|
||||
response
|
||||
.body_mut()
|
||||
.read_to_string(&mut body)
|
||||
.await
|
||||
.map_err(AnthropicError::ReadResponse)?;
|
||||
|
||||
serde_json::from_str(&body).map_err(AnthropicError::DeserializeResponse)
|
||||
} else {
|
||||
Err(crate::handle_error_response(response, rate_limits).await)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn retrieve_batch_results(
|
||||
client: &dyn HttpClient,
|
||||
api_url: &str,
|
||||
api_key: &str,
|
||||
message_batch_id: &str,
|
||||
) -> Result<Vec<BatchIndividualResponse>, AnthropicError> {
|
||||
let uri = format!("{api_url}/v1/messages/batches/{message_batch_id}/results");
|
||||
|
||||
let request_builder = HttpRequest::builder()
|
||||
.method(Method::GET)
|
||||
.uri(uri)
|
||||
.header("Anthropic-Version", "2023-06-01")
|
||||
.header("X-Api-Key", api_key.trim());
|
||||
|
||||
let http_request = request_builder
|
||||
.body(AsyncBody::default())
|
||||
.map_err(AnthropicError::BuildRequestBody)?;
|
||||
|
||||
let mut response = client
|
||||
.send(http_request)
|
||||
.await
|
||||
.map_err(AnthropicError::HttpSend)?;
|
||||
|
||||
let rate_limits = RateLimitInfo::from_headers(response.headers());
|
||||
|
||||
if response.status().is_success() {
|
||||
let mut body = String::new();
|
||||
response
|
||||
.body_mut()
|
||||
.read_to_string(&mut body)
|
||||
.await
|
||||
.map_err(AnthropicError::ReadResponse)?;
|
||||
|
||||
let mut results = Vec::new();
|
||||
for line in body.lines() {
|
||||
if line.trim().is_empty() {
|
||||
continue;
|
||||
}
|
||||
let result: BatchIndividualResponse =
|
||||
serde_json::from_str(line).map_err(AnthropicError::DeserializeResponse)?;
|
||||
results.push(result);
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
} else {
|
||||
Err(crate::handle_error_response(response, rate_limits).await)
|
||||
}
|
||||
}
|
||||
@@ -14,7 +14,7 @@ use fs::{Fs, RenameOptions};
|
||||
use futures::{FutureExt, StreamExt, future::Shared};
|
||||
use gpui::{
|
||||
App, AppContext as _, Context, Entity, EventEmitter, RenderImage, SharedString, Subscription,
|
||||
Task,
|
||||
Task, WeakEntity,
|
||||
};
|
||||
use itertools::Itertools as _;
|
||||
use language::{AnchorRangeExt, Bias, Buffer, LanguageRegistry, OffsetRangeExt, Point, ToOffset};
|
||||
@@ -688,7 +688,7 @@ pub struct TextThread {
|
||||
_subscriptions: Vec<Subscription>,
|
||||
telemetry: Option<Arc<Telemetry>>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
project: Option<Entity<Project>>,
|
||||
project: Option<WeakEntity<Project>>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
completion_mode: agent_settings::CompletionMode,
|
||||
}
|
||||
@@ -708,7 +708,7 @@ impl EventEmitter<TextThreadEvent> for TextThread {}
|
||||
impl TextThread {
|
||||
pub fn local(
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
project: Option<Entity<Project>>,
|
||||
project: Option<WeakEntity<Project>>,
|
||||
telemetry: Option<Arc<Telemetry>>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
slash_commands: Arc<SlashCommandWorkingSet>,
|
||||
@@ -742,7 +742,7 @@ impl TextThread {
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
slash_commands: Arc<SlashCommandWorkingSet>,
|
||||
project: Option<Entity<Project>>,
|
||||
project: Option<WeakEntity<Project>>,
|
||||
telemetry: Option<Arc<Telemetry>>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
@@ -873,7 +873,7 @@ impl TextThread {
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
prompt_builder: Arc<PromptBuilder>,
|
||||
slash_commands: Arc<SlashCommandWorkingSet>,
|
||||
project: Option<Entity<Project>>,
|
||||
project: Option<WeakEntity<Project>>,
|
||||
telemetry: Option<Arc<Telemetry>>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
@@ -1167,10 +1167,6 @@ impl TextThread {
|
||||
self.language_registry.clone()
|
||||
}
|
||||
|
||||
pub fn project(&self) -> Option<Entity<Project>> {
|
||||
self.project.clone()
|
||||
}
|
||||
|
||||
pub fn prompt_builder(&self) -> Arc<PromptBuilder> {
|
||||
self.prompt_builder.clone()
|
||||
}
|
||||
@@ -2967,7 +2963,7 @@ impl TextThread {
|
||||
}
|
||||
|
||||
fn update_model_request_usage(&self, amount: u32, limit: UsageLimit, cx: &mut App) {
|
||||
let Some(project) = &self.project else {
|
||||
let Some(project) = self.project.as_ref().and_then(|project| project.upgrade()) else {
|
||||
return;
|
||||
};
|
||||
project.read(cx).user_store().update(cx, |user_store, cx| {
|
||||
|
||||
@@ -51,7 +51,7 @@ pub struct TextThreadStore {
|
||||
telemetry: Arc<Telemetry>,
|
||||
_watch_updates: Task<Option<()>>,
|
||||
client: Arc<Client>,
|
||||
project: Entity<Project>,
|
||||
project: WeakEntity<Project>,
|
||||
project_is_shared: bool,
|
||||
client_subscription: Option<client::Subscription>,
|
||||
_project_subscriptions: Vec<gpui::Subscription>,
|
||||
@@ -119,10 +119,10 @@ impl TextThreadStore {
|
||||
],
|
||||
project_is_shared: false,
|
||||
client: project.read(cx).client(),
|
||||
project: project.clone(),
|
||||
project: project.downgrade(),
|
||||
prompt_builder,
|
||||
};
|
||||
this.handle_project_shared(project.clone(), cx);
|
||||
this.handle_project_shared(cx);
|
||||
this.synchronize_contexts(cx);
|
||||
this.register_context_server_handlers(cx);
|
||||
this.reload(cx).detach_and_log_err(cx);
|
||||
@@ -146,7 +146,7 @@ impl TextThreadStore {
|
||||
telemetry: project.read(cx).client().telemetry().clone(),
|
||||
_watch_updates: Task::ready(None),
|
||||
client: project.read(cx).client(),
|
||||
project,
|
||||
project: project.downgrade(),
|
||||
project_is_shared: false,
|
||||
client_subscription: None,
|
||||
_project_subscriptions: Default::default(),
|
||||
@@ -180,8 +180,10 @@ impl TextThreadStore {
|
||||
) -> Result<proto::OpenContextResponse> {
|
||||
let context_id = TextThreadId::from_proto(envelope.payload.context_id);
|
||||
let operations = this.update(&mut cx, |this, cx| {
|
||||
let project = this.project.upgrade().context("project not found")?;
|
||||
|
||||
anyhow::ensure!(
|
||||
!this.project.read(cx).is_via_collab(),
|
||||
!project.read(cx).is_via_collab(),
|
||||
"only the host contexts can be opened"
|
||||
);
|
||||
|
||||
@@ -211,8 +213,9 @@ impl TextThreadStore {
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<proto::CreateContextResponse> {
|
||||
let (context_id, operations) = this.update(&mut cx, |this, cx| {
|
||||
let project = this.project.upgrade().context("project not found")?;
|
||||
anyhow::ensure!(
|
||||
!this.project.read(cx).is_via_collab(),
|
||||
!project.read(cx).is_via_collab(),
|
||||
"can only create contexts as the host"
|
||||
);
|
||||
|
||||
@@ -255,8 +258,9 @@ impl TextThreadStore {
|
||||
mut cx: AsyncApp,
|
||||
) -> Result<proto::SynchronizeContextsResponse> {
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let project = this.project.upgrade().context("project not found")?;
|
||||
anyhow::ensure!(
|
||||
!this.project.read(cx).is_via_collab(),
|
||||
!project.read(cx).is_via_collab(),
|
||||
"only the host can synchronize contexts"
|
||||
);
|
||||
|
||||
@@ -293,8 +297,12 @@ impl TextThreadStore {
|
||||
})?
|
||||
}
|
||||
|
||||
fn handle_project_shared(&mut self, _: Entity<Project>, cx: &mut Context<Self>) {
|
||||
let is_shared = self.project.read(cx).is_shared();
|
||||
fn handle_project_shared(&mut self, cx: &mut Context<Self>) {
|
||||
let Some(project) = self.project.upgrade() else {
|
||||
return;
|
||||
};
|
||||
|
||||
let is_shared = project.read(cx).is_shared();
|
||||
let was_shared = mem::replace(&mut self.project_is_shared, is_shared);
|
||||
if is_shared == was_shared {
|
||||
return;
|
||||
@@ -309,7 +317,7 @@ impl TextThreadStore {
|
||||
false
|
||||
}
|
||||
});
|
||||
let remote_id = self.project.read(cx).remote_id().unwrap();
|
||||
let remote_id = project.read(cx).remote_id().unwrap();
|
||||
self.client_subscription = self
|
||||
.client
|
||||
.subscribe_to_entity(remote_id)
|
||||
@@ -323,13 +331,13 @@ impl TextThreadStore {
|
||||
|
||||
fn handle_project_event(
|
||||
&mut self,
|
||||
project: Entity<Project>,
|
||||
_project: Entity<Project>,
|
||||
event: &project::Event,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
match event {
|
||||
project::Event::RemoteIdChanged(_) => {
|
||||
self.handle_project_shared(project, cx);
|
||||
self.handle_project_shared(cx);
|
||||
}
|
||||
project::Event::Reshared => {
|
||||
self.advertise_contexts(cx);
|
||||
@@ -382,7 +390,10 @@ impl TextThreadStore {
|
||||
}
|
||||
|
||||
pub fn create_remote(&mut self, cx: &mut Context<Self>) -> Task<Result<Entity<TextThread>>> {
|
||||
let project = self.project.read(cx);
|
||||
let Some(project) = self.project.upgrade() else {
|
||||
return Task::ready(Err(anyhow::anyhow!("project was dropped")));
|
||||
};
|
||||
let project = project.read(cx);
|
||||
let Some(project_id) = project.remote_id() else {
|
||||
return Task::ready(Err(anyhow::anyhow!("project was not remote")));
|
||||
};
|
||||
@@ -541,7 +552,10 @@ impl TextThreadStore {
|
||||
text_thread_id: TextThreadId,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<Entity<TextThread>>> {
|
||||
let project = self.project.read(cx);
|
||||
let Some(project) = self.project.upgrade() else {
|
||||
return Task::ready(Err(anyhow::anyhow!("project was dropped")));
|
||||
};
|
||||
let project = project.read(cx);
|
||||
let Some(project_id) = project.remote_id() else {
|
||||
return Task::ready(Err(anyhow::anyhow!("project was not remote")));
|
||||
};
|
||||
@@ -618,7 +632,10 @@ impl TextThreadStore {
|
||||
event: &TextThreadEvent,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let Some(project_id) = self.project.read(cx).remote_id() else {
|
||||
let Some(project) = self.project.upgrade() else {
|
||||
return;
|
||||
};
|
||||
let Some(project_id) = project.read(cx).remote_id() else {
|
||||
return;
|
||||
};
|
||||
|
||||
@@ -652,12 +669,14 @@ impl TextThreadStore {
|
||||
}
|
||||
|
||||
fn advertise_contexts(&self, cx: &App) {
|
||||
let Some(project_id) = self.project.read(cx).remote_id() else {
|
||||
let Some(project) = self.project.upgrade() else {
|
||||
return;
|
||||
};
|
||||
let Some(project_id) = project.read(cx).remote_id() else {
|
||||
return;
|
||||
};
|
||||
|
||||
// For now, only the host can advertise their open contexts.
|
||||
if self.project.read(cx).is_via_collab() {
|
||||
if project.read(cx).is_via_collab() {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -689,7 +708,10 @@ impl TextThreadStore {
|
||||
}
|
||||
|
||||
fn synchronize_contexts(&mut self, cx: &mut Context<Self>) {
|
||||
let Some(project_id) = self.project.read(cx).remote_id() else {
|
||||
let Some(project) = self.project.upgrade() else {
|
||||
return;
|
||||
};
|
||||
let Some(project_id) = project.read(cx).remote_id() else {
|
||||
return;
|
||||
};
|
||||
|
||||
@@ -828,7 +850,10 @@ impl TextThreadStore {
|
||||
}
|
||||
|
||||
fn register_context_server_handlers(&self, cx: &mut Context<Self>) {
|
||||
let context_server_store = self.project.read(cx).context_server_store();
|
||||
let Some(project) = self.project.upgrade() else {
|
||||
return;
|
||||
};
|
||||
let context_server_store = project.read(cx).context_server_store();
|
||||
cx.subscribe(&context_server_store, Self::handle_context_server_event)
|
||||
.detach();
|
||||
|
||||
|
||||
@@ -584,41 +584,100 @@ impl Model {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cross_region_inference_id(&self, region: &str) -> anyhow::Result<String> {
|
||||
pub fn cross_region_inference_id(
|
||||
&self,
|
||||
region: &str,
|
||||
allow_global: bool,
|
||||
) -> anyhow::Result<String> {
|
||||
// List derived from here:
|
||||
// https://docs.aws.amazon.com/bedrock/latest/userguide/inference-profiles-support.html#inference-profiles-support-system
|
||||
let model_id = self.request_id();
|
||||
|
||||
let supports_global = matches!(
|
||||
self,
|
||||
Model::ClaudeOpus4_5
|
||||
| Model::ClaudeOpus4_5Thinking
|
||||
| Model::ClaudeHaiku4_5
|
||||
| Model::ClaudeSonnet4
|
||||
| Model::ClaudeSonnet4Thinking
|
||||
| Model::ClaudeSonnet4_5
|
||||
| Model::ClaudeSonnet4_5Thinking
|
||||
);
|
||||
|
||||
let region_group = if region.starts_with("us-gov-") {
|
||||
"us-gov"
|
||||
} else if region.starts_with("us-") {
|
||||
"us"
|
||||
} else if region.starts_with("us-")
|
||||
|| region.starts_with("ca-")
|
||||
|| region.starts_with("sa-")
|
||||
{
|
||||
if allow_global && supports_global {
|
||||
"global"
|
||||
} else {
|
||||
"us"
|
||||
}
|
||||
} else if region.starts_with("eu-") {
|
||||
"eu"
|
||||
if allow_global && supports_global {
|
||||
"global"
|
||||
} else {
|
||||
"eu"
|
||||
}
|
||||
} else if region.starts_with("ap-") || region == "me-central-1" || region == "me-south-1" {
|
||||
"apac"
|
||||
} else if region.starts_with("ca-") || region.starts_with("sa-") {
|
||||
// Canada and South America regions - default to US profiles
|
||||
"us"
|
||||
if allow_global && supports_global {
|
||||
"global"
|
||||
} else {
|
||||
"apac"
|
||||
}
|
||||
} else {
|
||||
anyhow::bail!("Unsupported Region {region}");
|
||||
};
|
||||
|
||||
let model_id = self.request_id();
|
||||
match (self, region_group, region) {
|
||||
(Model::Custom { .. }, _, _) => Ok(self.request_id().into()),
|
||||
|
||||
match (self, region_group) {
|
||||
// Custom models can't have CRI IDs
|
||||
(Model::Custom { .. }, _) => Ok(self.request_id().into()),
|
||||
(
|
||||
Model::ClaudeOpus4_5
|
||||
| Model::ClaudeOpus4_5Thinking
|
||||
| Model::ClaudeHaiku4_5
|
||||
| Model::ClaudeSonnet4
|
||||
| Model::ClaudeSonnet4Thinking
|
||||
| Model::ClaudeSonnet4_5
|
||||
| Model::ClaudeSonnet4_5Thinking,
|
||||
"global",
|
||||
_,
|
||||
) => Ok(format!("{}.{}", region_group, model_id)),
|
||||
|
||||
// Models with US Gov only
|
||||
(Model::Claude3_5Sonnet, "us-gov") | (Model::Claude3Haiku, "us-gov") => {
|
||||
Ok(format!("{}.{}", region_group, model_id))
|
||||
(
|
||||
Model::Claude3Haiku
|
||||
| Model::Claude3_5Sonnet
|
||||
| Model::Claude3_7Sonnet
|
||||
| Model::Claude3_7SonnetThinking
|
||||
| Model::ClaudeSonnet4_5
|
||||
| Model::ClaudeSonnet4_5Thinking,
|
||||
"us-gov",
|
||||
_,
|
||||
) => Ok(format!("{}.{}", region_group, model_id)),
|
||||
|
||||
(
|
||||
Model::ClaudeHaiku4_5 | Model::ClaudeSonnet4_5 | Model::ClaudeSonnet4_5Thinking,
|
||||
"apac",
|
||||
"ap-southeast-2" | "ap-southeast-4",
|
||||
) => Ok(format!("au.{}", model_id)),
|
||||
|
||||
(
|
||||
Model::ClaudeHaiku4_5 | Model::ClaudeSonnet4_5 | Model::ClaudeSonnet4_5Thinking,
|
||||
"apac",
|
||||
"ap-northeast-1" | "ap-northeast-3",
|
||||
) => Ok(format!("jp.{}", model_id)),
|
||||
|
||||
(Model::AmazonNovaLite, "us", r) if r.starts_with("ca-") => {
|
||||
Ok(format!("ca.{}", model_id))
|
||||
}
|
||||
|
||||
// Available everywhere
|
||||
(Model::AmazonNovaLite | Model::AmazonNovaMicro | Model::AmazonNovaPro, _) => {
|
||||
Ok(format!("{}.{}", region_group, model_id))
|
||||
}
|
||||
|
||||
// Models in US
|
||||
(
|
||||
Model::AmazonNovaPremier
|
||||
| Model::AmazonNovaLite
|
||||
| Model::AmazonNovaMicro
|
||||
| Model::AmazonNovaPro
|
||||
| Model::Claude3_5Haiku
|
||||
| Model::ClaudeHaiku4_5
|
||||
| Model::Claude3_5Sonnet
|
||||
@@ -655,16 +714,18 @@ impl Model {
|
||||
| Model::PalmyraWriterX4
|
||||
| Model::PalmyraWriterX5,
|
||||
"us",
|
||||
_,
|
||||
) => Ok(format!("{}.{}", region_group, model_id)),
|
||||
|
||||
// Models available in EU
|
||||
(
|
||||
Model::Claude3_5Sonnet
|
||||
Model::AmazonNovaLite
|
||||
| Model::AmazonNovaMicro
|
||||
| Model::AmazonNovaPro
|
||||
| Model::Claude3_5Sonnet
|
||||
| Model::ClaudeHaiku4_5
|
||||
| Model::Claude3_7Sonnet
|
||||
| Model::Claude3_7SonnetThinking
|
||||
| Model::ClaudeSonnet4
|
||||
| Model::ClaudeSonnet4Thinking
|
||||
| Model::ClaudeSonnet4_5
|
||||
| Model::ClaudeSonnet4_5Thinking
|
||||
| Model::Claude3Haiku
|
||||
@@ -673,26 +734,26 @@ impl Model {
|
||||
| Model::MetaLlama323BInstructV1
|
||||
| Model::MistralPixtralLarge2502V1,
|
||||
"eu",
|
||||
_,
|
||||
) => Ok(format!("{}.{}", region_group, model_id)),
|
||||
|
||||
// Models available in APAC
|
||||
(
|
||||
Model::Claude3_5Sonnet
|
||||
Model::AmazonNovaLite
|
||||
| Model::AmazonNovaMicro
|
||||
| Model::AmazonNovaPro
|
||||
| Model::Claude3_5Sonnet
|
||||
| Model::Claude3_5SonnetV2
|
||||
| Model::ClaudeHaiku4_5
|
||||
| Model::Claude3Haiku
|
||||
| Model::Claude3Sonnet
|
||||
| Model::Claude3_7Sonnet
|
||||
| Model::Claude3_7SonnetThinking
|
||||
| Model::ClaudeSonnet4
|
||||
| Model::ClaudeSonnet4Thinking
|
||||
| Model::ClaudeSonnet4_5
|
||||
| Model::ClaudeSonnet4_5Thinking,
|
||||
| Model::Claude3Haiku
|
||||
| Model::Claude3Sonnet,
|
||||
"apac",
|
||||
_,
|
||||
) => Ok(format!("{}.{}", region_group, model_id)),
|
||||
|
||||
// Any other combination is not supported
|
||||
_ => Ok(self.request_id().into()),
|
||||
_ => Ok(model_id.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -705,15 +766,15 @@ mod tests {
|
||||
fn test_us_region_inference_ids() -> anyhow::Result<()> {
|
||||
// Test US regions
|
||||
assert_eq!(
|
||||
Model::Claude3_5SonnetV2.cross_region_inference_id("us-east-1")?,
|
||||
Model::Claude3_5SonnetV2.cross_region_inference_id("us-east-1", false)?,
|
||||
"us.anthropic.claude-3-5-sonnet-20241022-v2:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::Claude3_5SonnetV2.cross_region_inference_id("us-west-2")?,
|
||||
Model::Claude3_5SonnetV2.cross_region_inference_id("us-west-2", false)?,
|
||||
"us.anthropic.claude-3-5-sonnet-20241022-v2:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::AmazonNovaPro.cross_region_inference_id("us-east-2")?,
|
||||
Model::AmazonNovaPro.cross_region_inference_id("us-east-2", false)?,
|
||||
"us.amazon.nova-pro-v1:0"
|
||||
);
|
||||
Ok(())
|
||||
@@ -723,19 +784,19 @@ mod tests {
|
||||
fn test_eu_region_inference_ids() -> anyhow::Result<()> {
|
||||
// Test European regions
|
||||
assert_eq!(
|
||||
Model::ClaudeSonnet4.cross_region_inference_id("eu-west-1")?,
|
||||
Model::ClaudeSonnet4.cross_region_inference_id("eu-west-1", false)?,
|
||||
"eu.anthropic.claude-sonnet-4-20250514-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::ClaudeSonnet4_5.cross_region_inference_id("eu-west-1")?,
|
||||
Model::ClaudeSonnet4_5.cross_region_inference_id("eu-west-1", false)?,
|
||||
"eu.anthropic.claude-sonnet-4-5-20250929-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::Claude3Sonnet.cross_region_inference_id("eu-west-1")?,
|
||||
Model::Claude3Sonnet.cross_region_inference_id("eu-west-1", false)?,
|
||||
"eu.anthropic.claude-3-sonnet-20240229-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::AmazonNovaMicro.cross_region_inference_id("eu-north-1")?,
|
||||
Model::AmazonNovaMicro.cross_region_inference_id("eu-north-1", false)?,
|
||||
"eu.amazon.nova-micro-v1:0"
|
||||
);
|
||||
Ok(())
|
||||
@@ -745,15 +806,15 @@ mod tests {
|
||||
fn test_apac_region_inference_ids() -> anyhow::Result<()> {
|
||||
// Test Asia-Pacific regions
|
||||
assert_eq!(
|
||||
Model::Claude3_5SonnetV2.cross_region_inference_id("ap-northeast-1")?,
|
||||
Model::Claude3_5SonnetV2.cross_region_inference_id("ap-northeast-1", false)?,
|
||||
"apac.anthropic.claude-3-5-sonnet-20241022-v2:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::Claude3_5SonnetV2.cross_region_inference_id("ap-southeast-2")?,
|
||||
Model::Claude3_5SonnetV2.cross_region_inference_id("ap-southeast-2", false)?,
|
||||
"apac.anthropic.claude-3-5-sonnet-20241022-v2:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::AmazonNovaLite.cross_region_inference_id("ap-south-1")?,
|
||||
Model::AmazonNovaLite.cross_region_inference_id("ap-south-1", false)?,
|
||||
"apac.amazon.nova-lite-v1:0"
|
||||
);
|
||||
Ok(())
|
||||
@@ -763,11 +824,11 @@ mod tests {
|
||||
fn test_gov_region_inference_ids() -> anyhow::Result<()> {
|
||||
// Test Government regions
|
||||
assert_eq!(
|
||||
Model::Claude3_5Sonnet.cross_region_inference_id("us-gov-east-1")?,
|
||||
Model::Claude3_5Sonnet.cross_region_inference_id("us-gov-east-1", false)?,
|
||||
"us-gov.anthropic.claude-3-5-sonnet-20240620-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::Claude3Haiku.cross_region_inference_id("us-gov-west-1")?,
|
||||
Model::Claude3Haiku.cross_region_inference_id("us-gov-west-1", false)?,
|
||||
"us-gov.anthropic.claude-3-haiku-20240307-v1:0"
|
||||
);
|
||||
Ok(())
|
||||
@@ -777,15 +838,15 @@ mod tests {
|
||||
fn test_meta_models_inference_ids() -> anyhow::Result<()> {
|
||||
// Test Meta models
|
||||
assert_eq!(
|
||||
Model::MetaLlama370BInstructV1.cross_region_inference_id("us-east-1")?,
|
||||
Model::MetaLlama370BInstructV1.cross_region_inference_id("us-east-1", false)?,
|
||||
"meta.llama3-70b-instruct-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::MetaLlama3170BInstructV1.cross_region_inference_id("us-east-1")?,
|
||||
Model::MetaLlama3170BInstructV1.cross_region_inference_id("us-east-1", false)?,
|
||||
"us.meta.llama3-1-70b-instruct-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::MetaLlama321BInstructV1.cross_region_inference_id("eu-west-1")?,
|
||||
Model::MetaLlama321BInstructV1.cross_region_inference_id("eu-west-1", false)?,
|
||||
"eu.meta.llama3-2-1b-instruct-v1:0"
|
||||
);
|
||||
Ok(())
|
||||
@@ -796,11 +857,11 @@ mod tests {
|
||||
// Mistral models don't follow the regional prefix pattern,
|
||||
// so they should return their original IDs
|
||||
assert_eq!(
|
||||
Model::MistralMistralLarge2402V1.cross_region_inference_id("us-east-1")?,
|
||||
Model::MistralMistralLarge2402V1.cross_region_inference_id("us-east-1", false)?,
|
||||
"mistral.mistral-large-2402-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::MistralMixtral8x7BInstructV0.cross_region_inference_id("eu-west-1")?,
|
||||
Model::MistralMixtral8x7BInstructV0.cross_region_inference_id("eu-west-1", false)?,
|
||||
"mistral.mixtral-8x7b-instruct-v0:1"
|
||||
);
|
||||
Ok(())
|
||||
@@ -811,11 +872,11 @@ mod tests {
|
||||
// AI21 models don't follow the regional prefix pattern,
|
||||
// so they should return their original IDs
|
||||
assert_eq!(
|
||||
Model::AI21J2UltraV1.cross_region_inference_id("us-east-1")?,
|
||||
Model::AI21J2UltraV1.cross_region_inference_id("us-east-1", false)?,
|
||||
"ai21.j2-ultra-v1"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::AI21JambaInstructV1.cross_region_inference_id("eu-west-1")?,
|
||||
Model::AI21JambaInstructV1.cross_region_inference_id("eu-west-1", false)?,
|
||||
"ai21.jamba-instruct-v1:0"
|
||||
);
|
||||
Ok(())
|
||||
@@ -826,11 +887,11 @@ mod tests {
|
||||
// Cohere models don't follow the regional prefix pattern,
|
||||
// so they should return their original IDs
|
||||
assert_eq!(
|
||||
Model::CohereCommandRV1.cross_region_inference_id("us-east-1")?,
|
||||
Model::CohereCommandRV1.cross_region_inference_id("us-east-1", false)?,
|
||||
"cohere.command-r-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::CohereCommandTextV14_4k.cross_region_inference_id("ap-southeast-1")?,
|
||||
Model::CohereCommandTextV14_4k.cross_region_inference_id("ap-southeast-1", false)?,
|
||||
"cohere.command-text-v14:7:4k"
|
||||
);
|
||||
Ok(())
|
||||
@@ -850,10 +911,17 @@ mod tests {
|
||||
|
||||
// Custom model should return its name unchanged
|
||||
assert_eq!(
|
||||
custom_model.cross_region_inference_id("us-east-1")?,
|
||||
custom_model.cross_region_inference_id("us-east-1", false)?,
|
||||
"custom.my-model-v1:0"
|
||||
);
|
||||
|
||||
// Test that models without global support fall back to regional when allow_global is true
|
||||
assert_eq!(
|
||||
Model::AmazonNovaPro.cross_region_inference_id("us-east-1", true)?,
|
||||
"us.amazon.nova-pro-v1:0",
|
||||
"Nova Pro should fall back to regional profile even when allow_global is true"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -892,3 +960,28 @@ mod tests {
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_global_inference_ids() -> anyhow::Result<()> {
|
||||
// Test global inference for models that support it when allow_global is true
|
||||
assert_eq!(
|
||||
Model::ClaudeSonnet4.cross_region_inference_id("us-east-1", true)?,
|
||||
"global.anthropic.claude-sonnet-4-20250514-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::ClaudeSonnet4_5.cross_region_inference_id("eu-west-1", true)?,
|
||||
"global.anthropic.claude-sonnet-4-5-20250929-v1:0"
|
||||
);
|
||||
assert_eq!(
|
||||
Model::ClaudeHaiku4_5.cross_region_inference_id("ap-south-1", true)?,
|
||||
"global.anthropic.claude-haiku-4-5-20251001-v1:0"
|
||||
);
|
||||
|
||||
// Test that regional prefix is used when allow_global is false
|
||||
assert_eq!(
|
||||
Model::ClaudeSonnet4.cross_region_inference_id("us-east-1", false)?,
|
||||
"us.anthropic.claude-sonnet-4-20250514-v1:0"
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@ workspace = true
|
||||
path = "src/buffer_diff.rs"
|
||||
|
||||
[features]
|
||||
test-support = []
|
||||
test-support = ["settings"]
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
@@ -24,6 +24,7 @@ language.workspace = true
|
||||
log.workspace = true
|
||||
pretty_assertions.workspace = true
|
||||
rope.workspace = true
|
||||
settings = { workspace = true, optional = true }
|
||||
sum_tree.workspace = true
|
||||
text.workspace = true
|
||||
util.workspace = true
|
||||
@@ -33,6 +34,7 @@ ctor.workspace = true
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
rand.workspace = true
|
||||
serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
text = { workspace = true, features = ["test-support"] }
|
||||
unindent.workspace = true
|
||||
zlog.workspace = true
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
use futures::channel::oneshot;
|
||||
use git2::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch};
|
||||
use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task, TaskLabel};
|
||||
use language::{BufferRow, Language, LanguageRegistry};
|
||||
use language::{
|
||||
BufferRow, DiffOptions, File, Language, LanguageName, LanguageRegistry,
|
||||
language_settings::language_settings, word_diff_ranges,
|
||||
};
|
||||
use rope::Rope;
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
@@ -15,10 +18,12 @@ use text::{Anchor, Bias, BufferId, OffsetRangeExt, Point, ToOffset as _, ToPoint
|
||||
use util::ResultExt;
|
||||
|
||||
pub static CALCULATE_DIFF_TASK: LazyLock<TaskLabel> = LazyLock::new(TaskLabel::new);
|
||||
pub const MAX_WORD_DIFF_LINE_COUNT: usize = 5;
|
||||
|
||||
pub struct BufferDiff {
|
||||
pub buffer_id: BufferId,
|
||||
inner: BufferDiffInner,
|
||||
// diff of the index vs head
|
||||
secondary_diff: Option<Entity<BufferDiff>>,
|
||||
}
|
||||
|
||||
@@ -31,6 +36,7 @@ pub struct BufferDiffSnapshot {
|
||||
#[derive(Clone)]
|
||||
struct BufferDiffInner {
|
||||
hunks: SumTree<InternalDiffHunk>,
|
||||
// Used for making staging mo
|
||||
pending_hunks: SumTree<PendingHunk>,
|
||||
base_text: language::BufferSnapshot,
|
||||
base_text_exists: bool,
|
||||
@@ -50,11 +56,18 @@ pub enum DiffHunkStatusKind {
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
/// Diff of Working Copy vs Index
|
||||
/// aka 'is this hunk staged or not'
|
||||
pub enum DiffHunkSecondaryStatus {
|
||||
/// Unstaged
|
||||
HasSecondaryHunk,
|
||||
/// Partially staged
|
||||
OverlapsWithSecondaryHunk,
|
||||
/// Staged
|
||||
NoSecondaryHunk,
|
||||
/// We are unstaging
|
||||
SecondaryHunkAdditionPending,
|
||||
/// We are stagind
|
||||
SecondaryHunkRemovalPending,
|
||||
}
|
||||
|
||||
@@ -68,6 +81,10 @@ pub struct DiffHunk {
|
||||
/// The range in the buffer's diff base text to which this hunk corresponds.
|
||||
pub diff_base_byte_range: Range<usize>,
|
||||
pub secondary_status: DiffHunkSecondaryStatus,
|
||||
// Anchors representing the word diff locations in the active buffer
|
||||
pub buffer_word_diffs: Vec<Range<Anchor>>,
|
||||
// Offsets relative to the start of the deleted diff that represent word diff locations
|
||||
pub base_word_diffs: Vec<Range<usize>>,
|
||||
}
|
||||
|
||||
/// We store [`InternalDiffHunk`]s internally so we don't need to store the additional row range.
|
||||
@@ -75,6 +92,8 @@ pub struct DiffHunk {
|
||||
struct InternalDiffHunk {
|
||||
buffer_range: Range<Anchor>,
|
||||
diff_base_byte_range: Range<usize>,
|
||||
base_word_diffs: Vec<Range<usize>>,
|
||||
buffer_word_diffs: Vec<Range<Anchor>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
@@ -208,6 +227,13 @@ impl BufferDiffSnapshot {
|
||||
let base_text_pair;
|
||||
let base_text_exists;
|
||||
let base_text_snapshot;
|
||||
let diff_options = build_diff_options(
|
||||
None,
|
||||
language.as_ref().map(|l| l.name()),
|
||||
language.as_ref().map(|l| l.default_scope()),
|
||||
cx,
|
||||
);
|
||||
|
||||
if let Some(text) = &base_text {
|
||||
let base_text_rope = Rope::from(text.as_str());
|
||||
base_text_pair = Some((text.clone(), base_text_rope.clone()));
|
||||
@@ -225,7 +251,7 @@ impl BufferDiffSnapshot {
|
||||
.background_executor()
|
||||
.spawn_labeled(*CALCULATE_DIFF_TASK, {
|
||||
let buffer = buffer.clone();
|
||||
async move { compute_hunks(base_text_pair, buffer) }
|
||||
async move { compute_hunks(base_text_pair, buffer, diff_options) }
|
||||
});
|
||||
|
||||
async move {
|
||||
@@ -248,6 +274,12 @@ impl BufferDiffSnapshot {
|
||||
base_text_snapshot: language::BufferSnapshot,
|
||||
cx: &App,
|
||||
) -> impl Future<Output = Self> + use<> {
|
||||
let diff_options = build_diff_options(
|
||||
base_text_snapshot.file(),
|
||||
base_text_snapshot.language().map(|l| l.name()),
|
||||
base_text_snapshot.language().map(|l| l.default_scope()),
|
||||
cx,
|
||||
);
|
||||
let base_text_exists = base_text.is_some();
|
||||
let base_text_pair = base_text.map(|text| {
|
||||
debug_assert_eq!(&*text, &base_text_snapshot.text());
|
||||
@@ -259,7 +291,7 @@ impl BufferDiffSnapshot {
|
||||
inner: BufferDiffInner {
|
||||
base_text: base_text_snapshot,
|
||||
pending_hunks: SumTree::new(&buffer),
|
||||
hunks: compute_hunks(base_text_pair, buffer),
|
||||
hunks: compute_hunks(base_text_pair, buffer, diff_options),
|
||||
base_text_exists,
|
||||
},
|
||||
secondary_diff: None,
|
||||
@@ -602,11 +634,15 @@ impl BufferDiffInner {
|
||||
[
|
||||
(
|
||||
&hunk.buffer_range.start,
|
||||
(hunk.buffer_range.start, hunk.diff_base_byte_range.start),
|
||||
(
|
||||
hunk.buffer_range.start,
|
||||
hunk.diff_base_byte_range.start,
|
||||
hunk,
|
||||
),
|
||||
),
|
||||
(
|
||||
&hunk.buffer_range.end,
|
||||
(hunk.buffer_range.end, hunk.diff_base_byte_range.end),
|
||||
(hunk.buffer_range.end, hunk.diff_base_byte_range.end, hunk),
|
||||
),
|
||||
]
|
||||
});
|
||||
@@ -625,8 +661,11 @@ impl BufferDiffInner {
|
||||
let mut summaries = buffer.summaries_for_anchors_with_payload::<Point, _, _>(anchor_iter);
|
||||
iter::from_fn(move || {
|
||||
loop {
|
||||
let (start_point, (start_anchor, start_base)) = summaries.next()?;
|
||||
let (mut end_point, (mut end_anchor, end_base)) = summaries.next()?;
|
||||
let (start_point, (start_anchor, start_base, hunk)) = summaries.next()?;
|
||||
let (mut end_point, (mut end_anchor, end_base, _)) = summaries.next()?;
|
||||
|
||||
let base_word_diffs = hunk.base_word_diffs.clone();
|
||||
let buffer_word_diffs = hunk.buffer_word_diffs.clone();
|
||||
|
||||
if !start_anchor.is_valid(buffer) {
|
||||
continue;
|
||||
@@ -696,6 +735,8 @@ impl BufferDiffInner {
|
||||
range: start_point..end_point,
|
||||
diff_base_byte_range: start_base..end_base,
|
||||
buffer_range: start_anchor..end_anchor,
|
||||
base_word_diffs,
|
||||
buffer_word_diffs,
|
||||
secondary_status,
|
||||
});
|
||||
}
|
||||
@@ -727,6 +768,8 @@ impl BufferDiffInner {
|
||||
buffer_range: hunk.buffer_range.clone(),
|
||||
// The secondary status is not used by callers of this method.
|
||||
secondary_status: DiffHunkSecondaryStatus::NoSecondaryHunk,
|
||||
base_word_diffs: hunk.base_word_diffs.clone(),
|
||||
buffer_word_diffs: hunk.buffer_word_diffs.clone(),
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -795,9 +838,36 @@ impl BufferDiffInner {
|
||||
}
|
||||
}
|
||||
|
||||
fn build_diff_options(
|
||||
file: Option<&Arc<dyn File>>,
|
||||
language: Option<LanguageName>,
|
||||
language_scope: Option<language::LanguageScope>,
|
||||
cx: &App,
|
||||
) -> Option<DiffOptions> {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
{
|
||||
if !cx.has_global::<settings::SettingsStore>() {
|
||||
return Some(DiffOptions {
|
||||
language_scope,
|
||||
max_word_diff_line_count: MAX_WORD_DIFF_LINE_COUNT,
|
||||
..Default::default()
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
language_settings(language, file, cx)
|
||||
.word_diff_enabled
|
||||
.then_some(DiffOptions {
|
||||
language_scope,
|
||||
max_word_diff_line_count: MAX_WORD_DIFF_LINE_COUNT,
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
||||
fn compute_hunks(
|
||||
diff_base: Option<(Arc<String>, Rope)>,
|
||||
buffer: text::BufferSnapshot,
|
||||
diff_options: Option<DiffOptions>,
|
||||
) -> SumTree<InternalDiffHunk> {
|
||||
let mut tree = SumTree::new(&buffer);
|
||||
|
||||
@@ -823,6 +893,8 @@ fn compute_hunks(
|
||||
InternalDiffHunk {
|
||||
buffer_range: buffer.anchor_before(0)..buffer.anchor_before(0),
|
||||
diff_base_byte_range: 0..diff_base.len() - 1,
|
||||
base_word_diffs: Vec::default(),
|
||||
buffer_word_diffs: Vec::default(),
|
||||
},
|
||||
&buffer,
|
||||
);
|
||||
@@ -838,6 +910,7 @@ fn compute_hunks(
|
||||
&diff_base_rope,
|
||||
&buffer,
|
||||
&mut divergence,
|
||||
diff_options.as_ref(),
|
||||
);
|
||||
tree.push(hunk, &buffer);
|
||||
}
|
||||
@@ -847,6 +920,8 @@ fn compute_hunks(
|
||||
InternalDiffHunk {
|
||||
buffer_range: Anchor::min_max_range_for_buffer(buffer.remote_id()),
|
||||
diff_base_byte_range: 0..0,
|
||||
base_word_diffs: Vec::default(),
|
||||
buffer_word_diffs: Vec::default(),
|
||||
},
|
||||
&buffer,
|
||||
);
|
||||
@@ -861,6 +936,7 @@ fn process_patch_hunk(
|
||||
diff_base: &Rope,
|
||||
buffer: &text::BufferSnapshot,
|
||||
buffer_row_divergence: &mut i64,
|
||||
diff_options: Option<&DiffOptions>,
|
||||
) -> InternalDiffHunk {
|
||||
let line_item_count = patch.num_lines_in_hunk(hunk_index).unwrap();
|
||||
assert!(line_item_count > 0);
|
||||
@@ -925,9 +1001,49 @@ fn process_patch_hunk(
|
||||
let start = Point::new(buffer_row_range.start, 0);
|
||||
let end = Point::new(buffer_row_range.end, 0);
|
||||
let buffer_range = buffer.anchor_before(start)..buffer.anchor_before(end);
|
||||
|
||||
let base_line_count = line_item_count.saturating_sub(buffer_row_range.len());
|
||||
|
||||
let (base_word_diffs, buffer_word_diffs) = if let Some(diff_options) = diff_options
|
||||
&& !buffer_row_range.is_empty()
|
||||
&& base_line_count == buffer_row_range.len()
|
||||
&& diff_options.max_word_diff_line_count >= base_line_count
|
||||
{
|
||||
let base_text: String = diff_base
|
||||
.chunks_in_range(diff_base_byte_range.clone())
|
||||
.collect();
|
||||
|
||||
let buffer_text: String = buffer.text_for_range(buffer_range.clone()).collect();
|
||||
|
||||
let (base_word_diffs, buffer_word_diffs_relative) = word_diff_ranges(
|
||||
&base_text,
|
||||
&buffer_text,
|
||||
DiffOptions {
|
||||
language_scope: diff_options.language_scope.clone(),
|
||||
..*diff_options
|
||||
},
|
||||
);
|
||||
|
||||
let buffer_start_offset = buffer_range.start.to_offset(buffer);
|
||||
let buffer_word_diffs = buffer_word_diffs_relative
|
||||
.into_iter()
|
||||
.map(|range| {
|
||||
let start = buffer.anchor_after(buffer_start_offset + range.start);
|
||||
let end = buffer.anchor_after(buffer_start_offset + range.end);
|
||||
start..end
|
||||
})
|
||||
.collect();
|
||||
|
||||
(base_word_diffs, buffer_word_diffs)
|
||||
} else {
|
||||
(Vec::default(), Vec::default())
|
||||
};
|
||||
|
||||
InternalDiffHunk {
|
||||
buffer_range,
|
||||
diff_base_byte_range,
|
||||
base_word_diffs,
|
||||
buffer_word_diffs,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -524,6 +524,16 @@ impl Room {
|
||||
self.id
|
||||
}
|
||||
|
||||
pub fn room_id(&self) -> impl Future<Output = Option<String>> + 'static {
|
||||
let room = self.live_kit.as_ref().map(|lk| lk.room.clone());
|
||||
async move {
|
||||
let room = room?;
|
||||
let sid = room.sid().await;
|
||||
let name = room.name();
|
||||
Some(format!("{} (sid: {sid})", name))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn status(&self) -> RoomStatus {
|
||||
self.status
|
||||
}
|
||||
|
||||
@@ -32,7 +32,7 @@ struct Detect;
|
||||
|
||||
trait InstalledApp {
|
||||
fn zed_version_string(&self) -> String;
|
||||
fn launch(&self, ipc_url: String) -> anyhow::Result<()>;
|
||||
fn launch(&self, ipc_url: String, user_data_dir: Option<&str>) -> anyhow::Result<()>;
|
||||
fn run_foreground(
|
||||
&self,
|
||||
ipc_url: String,
|
||||
@@ -588,7 +588,7 @@ fn main() -> Result<()> {
|
||||
if args.foreground {
|
||||
app.run_foreground(url, user_data_dir.as_deref())?;
|
||||
} else {
|
||||
app.launch(url)?;
|
||||
app.launch(url, user_data_dir.as_deref())?;
|
||||
sender.join().unwrap()?;
|
||||
if let Some(handle) = stdin_pipe_handle {
|
||||
handle.join().unwrap()?;
|
||||
@@ -709,14 +709,18 @@ mod linux {
|
||||
)
|
||||
}
|
||||
|
||||
fn launch(&self, ipc_url: String) -> anyhow::Result<()> {
|
||||
let sock_path = paths::data_dir().join(format!(
|
||||
fn launch(&self, ipc_url: String, user_data_dir: Option<&str>) -> anyhow::Result<()> {
|
||||
let data_dir = user_data_dir
|
||||
.map(PathBuf::from)
|
||||
.unwrap_or_else(|| paths::data_dir().clone());
|
||||
|
||||
let sock_path = data_dir.join(format!(
|
||||
"zed-{}.sock",
|
||||
*release_channel::RELEASE_CHANNEL_NAME
|
||||
));
|
||||
let sock = UnixDatagram::unbound()?;
|
||||
if sock.connect(&sock_path).is_err() {
|
||||
self.boot_background(ipc_url)?;
|
||||
self.boot_background(ipc_url, user_data_dir)?;
|
||||
} else {
|
||||
sock.send(ipc_url.as_bytes())?;
|
||||
}
|
||||
@@ -742,7 +746,11 @@ mod linux {
|
||||
}
|
||||
|
||||
impl App {
|
||||
fn boot_background(&self, ipc_url: String) -> anyhow::Result<()> {
|
||||
fn boot_background(
|
||||
&self,
|
||||
ipc_url: String,
|
||||
user_data_dir: Option<&str>,
|
||||
) -> anyhow::Result<()> {
|
||||
let path = &self.0;
|
||||
|
||||
match fork::fork() {
|
||||
@@ -756,8 +764,13 @@ mod linux {
|
||||
if fork::close_fd().is_err() {
|
||||
eprintln!("failed to close_fd: {}", std::io::Error::last_os_error());
|
||||
}
|
||||
let error =
|
||||
exec::execvp(path.clone(), &[path.as_os_str(), &OsString::from(ipc_url)]);
|
||||
let mut args: Vec<OsString> =
|
||||
vec![path.as_os_str().to_owned(), OsString::from(ipc_url)];
|
||||
if let Some(dir) = user_data_dir {
|
||||
args.push(OsString::from("--user-data-dir"));
|
||||
args.push(OsString::from(dir));
|
||||
}
|
||||
let error = exec::execvp(path.clone(), &args);
|
||||
// if exec succeeded, we never get here.
|
||||
eprintln!("failed to exec {:?}: {}", path, error);
|
||||
process::exit(1)
|
||||
@@ -943,11 +956,14 @@ mod windows {
|
||||
)
|
||||
}
|
||||
|
||||
fn launch(&self, ipc_url: String) -> anyhow::Result<()> {
|
||||
fn launch(&self, ipc_url: String, user_data_dir: Option<&str>) -> anyhow::Result<()> {
|
||||
if check_single_instance() {
|
||||
std::process::Command::new(self.0.clone())
|
||||
.arg(ipc_url)
|
||||
.spawn()?;
|
||||
let mut cmd = std::process::Command::new(self.0.clone());
|
||||
cmd.arg(ipc_url);
|
||||
if let Some(dir) = user_data_dir {
|
||||
cmd.arg("--user-data-dir").arg(dir);
|
||||
}
|
||||
cmd.spawn()?;
|
||||
} else {
|
||||
unsafe {
|
||||
let pipe = CreateFileW(
|
||||
@@ -1096,7 +1112,7 @@ mod mac_os {
|
||||
format!("Zed {} – {}", self.version(), self.path().display(),)
|
||||
}
|
||||
|
||||
fn launch(&self, url: String) -> anyhow::Result<()> {
|
||||
fn launch(&self, url: String, user_data_dir: Option<&str>) -> anyhow::Result<()> {
|
||||
match self {
|
||||
Self::App { app_bundle, .. } => {
|
||||
let app_path = app_bundle;
|
||||
@@ -1146,8 +1162,11 @@ mod mac_os {
|
||||
format!("Cloning descriptor for file {subprocess_stdout_file:?}")
|
||||
})?;
|
||||
let mut command = std::process::Command::new(executable);
|
||||
let command = command
|
||||
.env(FORCE_CLI_MODE_ENV_VAR_NAME, "")
|
||||
command.env(FORCE_CLI_MODE_ENV_VAR_NAME, "");
|
||||
if let Some(dir) = user_data_dir {
|
||||
command.arg("--user-data-dir").arg(dir);
|
||||
}
|
||||
command
|
||||
.stderr(subprocess_stdout_file)
|
||||
.stdout(subprocess_stdin_file)
|
||||
.arg(url);
|
||||
|
||||
@@ -1723,6 +1723,10 @@ impl ProtoClient for Client {
|
||||
fn is_via_collab(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn has_wsl_interop(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// prefix for the zed:// url scheme
|
||||
|
||||
@@ -206,11 +206,16 @@ pub struct AcceptEditPredictionBody {
|
||||
pub request_id: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct RejectEditPredictionsBody {
|
||||
pub rejections: Vec<EditPredictionRejection>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize)]
|
||||
pub struct RejectEditPredictionsBodyRef<'a> {
|
||||
pub rejections: &'a [EditPredictionRejection],
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
pub struct EditPredictionRejection {
|
||||
pub request_id: String,
|
||||
|
||||
@@ -31,18 +31,10 @@ pub struct PredictEditsRequest {
|
||||
/// Within `signatures`
|
||||
pub excerpt_parent: Option<usize>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
pub included_files: Vec<IncludedFile>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
pub signatures: Vec<Signature>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
pub referenced_declarations: Vec<ReferencedDeclaration>,
|
||||
pub related_files: Vec<RelatedFile>,
|
||||
pub events: Vec<Arc<Event>>,
|
||||
#[serde(default)]
|
||||
pub can_collect_data: bool,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty", default)]
|
||||
pub diagnostic_groups: Vec<DiagnosticGroup>,
|
||||
#[serde(skip_serializing_if = "is_default", default)]
|
||||
pub diagnostic_groups_truncated: bool,
|
||||
/// Info about the git repository state, only present when can_collect_data is true.
|
||||
#[serde(skip_serializing_if = "Option::is_none", default)]
|
||||
pub git_info: Option<PredictEditsGitInfo>,
|
||||
@@ -58,7 +50,7 @@ pub struct PredictEditsRequest {
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct IncludedFile {
|
||||
pub struct RelatedFile {
|
||||
pub path: Arc<Path>,
|
||||
pub max_row: Line,
|
||||
pub excerpts: Vec<Excerpt>,
|
||||
@@ -72,11 +64,9 @@ pub struct Excerpt {
|
||||
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, EnumIter)]
|
||||
pub enum PromptFormat {
|
||||
MarkedExcerpt,
|
||||
LabeledSections,
|
||||
NumLinesUniDiff,
|
||||
/// XML old_tex/new_text
|
||||
OldTextNewText,
|
||||
/// Prompt format intended for use via zeta_cli
|
||||
/// Prompt format intended for use via edit_prediction_cli
|
||||
OnlySnippets,
|
||||
/// One-sentence instructions used in fine-tuned models
|
||||
Minimal,
|
||||
@@ -87,7 +77,7 @@ pub enum PromptFormat {
|
||||
}
|
||||
|
||||
impl PromptFormat {
|
||||
pub const DEFAULT: PromptFormat = PromptFormat::NumLinesUniDiff;
|
||||
pub const DEFAULT: PromptFormat = PromptFormat::Minimal;
|
||||
}
|
||||
|
||||
impl Default for PromptFormat {
|
||||
@@ -105,10 +95,7 @@ impl PromptFormat {
|
||||
impl std::fmt::Display for PromptFormat {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
PromptFormat::MarkedExcerpt => write!(f, "Marked Excerpt"),
|
||||
PromptFormat::LabeledSections => write!(f, "Labeled Sections"),
|
||||
PromptFormat::OnlySnippets => write!(f, "Only Snippets"),
|
||||
PromptFormat::NumLinesUniDiff => write!(f, "Numbered Lines / Unified Diff"),
|
||||
PromptFormat::OldTextNewText => write!(f, "Old Text / New Text"),
|
||||
PromptFormat::Minimal => write!(f, "Minimal"),
|
||||
PromptFormat::MinimalQwen => write!(f, "Minimal + Qwen FIM"),
|
||||
@@ -178,67 +165,6 @@ impl<'a> std::fmt::Display for DiffPathFmt<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Signature {
|
||||
pub text: String,
|
||||
pub text_is_truncated: bool,
|
||||
#[serde(skip_serializing_if = "Option::is_none", default)]
|
||||
pub parent_index: Option<usize>,
|
||||
/// Range of `text` within the file, possibly truncated according to `text_is_truncated`. The
|
||||
/// file is implicitly the file that contains the descendant declaration or excerpt.
|
||||
pub range: Range<Line>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct ReferencedDeclaration {
|
||||
pub path: Arc<Path>,
|
||||
pub text: String,
|
||||
pub text_is_truncated: bool,
|
||||
/// Range of `text` within file, possibly truncated according to `text_is_truncated`
|
||||
pub range: Range<Line>,
|
||||
/// Range within `text`
|
||||
pub signature_range: Range<usize>,
|
||||
/// Index within `signatures`.
|
||||
#[serde(skip_serializing_if = "Option::is_none", default)]
|
||||
pub parent_index: Option<usize>,
|
||||
pub score_components: DeclarationScoreComponents,
|
||||
pub signature_score: f32,
|
||||
pub declaration_score: f32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct DeclarationScoreComponents {
|
||||
pub is_same_file: bool,
|
||||
pub is_referenced_nearby: bool,
|
||||
pub is_referenced_in_breadcrumb: bool,
|
||||
pub reference_count: usize,
|
||||
pub same_file_declaration_count: usize,
|
||||
pub declaration_count: usize,
|
||||
pub reference_line_distance: u32,
|
||||
pub declaration_line_distance: u32,
|
||||
pub excerpt_vs_item_jaccard: f32,
|
||||
pub excerpt_vs_signature_jaccard: f32,
|
||||
pub adjacent_vs_item_jaccard: f32,
|
||||
pub adjacent_vs_signature_jaccard: f32,
|
||||
pub excerpt_vs_item_weighted_overlap: f32,
|
||||
pub excerpt_vs_signature_weighted_overlap: f32,
|
||||
pub adjacent_vs_item_weighted_overlap: f32,
|
||||
pub adjacent_vs_signature_weighted_overlap: f32,
|
||||
pub path_import_match_count: usize,
|
||||
pub wildcard_path_import_match_count: usize,
|
||||
pub import_similarity: f32,
|
||||
pub max_import_similarity: f32,
|
||||
pub normalized_import_similarity: f32,
|
||||
pub wildcard_import_similarity: f32,
|
||||
pub normalized_wildcard_import_similarity: f32,
|
||||
pub included_by_others: usize,
|
||||
pub includes_others: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(transparent)]
|
||||
pub struct DiagnosticGroup(pub Box<serde_json::value::RawValue>);
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct PredictEditsResponse {
|
||||
pub request_id: Uuid,
|
||||
@@ -262,10 +188,6 @@ pub struct Edit {
|
||||
pub content: String,
|
||||
}
|
||||
|
||||
fn is_default<T: Default + PartialEq>(value: &T) -> bool {
|
||||
*value == T::default()
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, PartialOrd, Eq, Ord)]
|
||||
pub struct Point {
|
||||
pub line: Line,
|
||||
|
||||
@@ -15,9 +15,4 @@ path = "src/cloud_zeta2_prompt.rs"
|
||||
anyhow.workspace = true
|
||||
cloud_llm_client.workspace = true
|
||||
indoc.workspace = true
|
||||
ordered-float.workspace = true
|
||||
rustc-hash.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
strum.workspace = true
|
||||
|
||||
@@ -1,20 +1,12 @@
|
||||
//! Zeta2 prompt planning and generation code shared with cloud.
|
||||
pub mod retrieval_prompt;
|
||||
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use anyhow::Result;
|
||||
use cloud_llm_client::predict_edits_v3::{
|
||||
self, DiffPathFmt, Event, Excerpt, IncludedFile, Line, Point, PromptFormat,
|
||||
ReferencedDeclaration,
|
||||
self, DiffPathFmt, Event, Excerpt, Line, Point, PromptFormat, RelatedFile,
|
||||
};
|
||||
use indoc::indoc;
|
||||
use ordered_float::OrderedFloat;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
use serde::Serialize;
|
||||
use std::cmp;
|
||||
use std::fmt::Write;
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::{cmp::Reverse, collections::BinaryHeap, ops::Range, path::Path};
|
||||
use strum::{EnumIter, IntoEnumIterator};
|
||||
|
||||
pub const DEFAULT_MAX_PROMPT_BYTES: usize = 10 * 1024;
|
||||
|
||||
@@ -24,69 +16,6 @@ pub const EDITABLE_REGION_START_MARKER_WITH_NEWLINE: &str = "<|editable_region_s
|
||||
/// NOTE: Differs from zed version of constant - includes a newline
|
||||
pub const EDITABLE_REGION_END_MARKER_WITH_NEWLINE: &str = "<|editable_region_end|>\n";
|
||||
|
||||
// TODO: use constants for markers?
|
||||
const MARKED_EXCERPT_INSTRUCTIONS: &str = indoc! {"
|
||||
You are a code completion assistant and your task is to analyze user edits and then rewrite an excerpt that the user provides, suggesting the appropriate edits within the excerpt, taking into account the cursor location.
|
||||
|
||||
The excerpt to edit will be wrapped in markers <|editable_region_start|> and <|editable_region_end|>. The cursor position is marked with <|user_cursor|>. Please respond with edited code for that region.
|
||||
|
||||
Other code is provided for context, and `…` indicates when code has been skipped.
|
||||
|
||||
## Edit History
|
||||
|
||||
"};
|
||||
|
||||
const LABELED_SECTIONS_INSTRUCTIONS: &str = indoc! {r#"
|
||||
You are a code completion assistant and your task is to analyze user edits, and suggest an edit to one of the provided sections of code.
|
||||
|
||||
Sections of code are grouped by file and then labeled by `<|section_N|>` (e.g `<|section_8|>`).
|
||||
|
||||
The cursor position is marked with `<|user_cursor|>` and it will appear within a special section labeled `<|current_section|>`. Prefer editing the current section until no more changes are needed within it.
|
||||
|
||||
Respond ONLY with the name of the section to edit on a single line, followed by all of the code that should replace that section. For example:
|
||||
|
||||
<|current_section|>
|
||||
for i in 0..16 {
|
||||
println!("{i}");
|
||||
}
|
||||
|
||||
## Edit History
|
||||
|
||||
"#};
|
||||
|
||||
const NUMBERED_LINES_INSTRUCTIONS: &str = indoc! {r#"
|
||||
# Instructions
|
||||
|
||||
You are an edit prediction agent in a code editor.
|
||||
Your job is to predict the next edit that the user will make,
|
||||
based on their last few edits and their current cursor location.
|
||||
|
||||
## Output Format
|
||||
|
||||
You must briefly explain your understanding of the user's goal, in one
|
||||
or two sentences, and then specify their next edit in the form of a
|
||||
unified diff, like this:
|
||||
|
||||
```
|
||||
--- a/src/myapp/cli.py
|
||||
+++ b/src/myapp/cli.py
|
||||
@@ ... @@
|
||||
import os
|
||||
import time
|
||||
import sys
|
||||
+from constants import LOG_LEVEL_WARNING
|
||||
@@ ... @@
|
||||
config.headless()
|
||||
config.set_interactive(false)
|
||||
-config.set_log_level(LOG_L)
|
||||
+config.set_log_level(LOG_LEVEL_WARNING)
|
||||
config.set_use_color(True)
|
||||
```
|
||||
|
||||
## Edit History
|
||||
|
||||
"#};
|
||||
|
||||
const STUDENT_MODEL_INSTRUCTIONS: &str = indoc! {r#"
|
||||
You are a code completion assistant that analyzes edit history to identify and systematically complete incomplete refactorings or patterns across the entire codebase.
|
||||
|
||||
@@ -94,20 +23,6 @@ const STUDENT_MODEL_INSTRUCTIONS: &str = indoc! {r#"
|
||||
|
||||
"#};
|
||||
|
||||
const UNIFIED_DIFF_REMINDER: &str = indoc! {"
|
||||
---
|
||||
|
||||
Analyze the edit history and the files, then provide the unified diff for your predicted edits.
|
||||
Do not include the cursor marker in your output.
|
||||
Your diff should include edited file paths in its file headers (lines beginning with `---` and `+++`).
|
||||
Do not include line numbers in the hunk headers, use `@@ ... @@`.
|
||||
Removed lines begin with `-`.
|
||||
Added lines begin with `+`.
|
||||
Context lines begin with an extra space.
|
||||
Context and removed lines are used to match the target edit location, so make sure to include enough of them
|
||||
to uniquely identify it amongst all excerpts of code provided.
|
||||
"};
|
||||
|
||||
const MINIMAL_PROMPT_REMINDER: &str = indoc! {"
|
||||
---
|
||||
|
||||
@@ -164,49 +79,25 @@ const OLD_TEXT_NEW_TEXT_REMINDER: &str = indoc! {r#"
|
||||
Remember that the edits in the edit history have already been applied.
|
||||
"#};
|
||||
|
||||
pub fn build_prompt(
|
||||
request: &predict_edits_v3::PredictEditsRequest,
|
||||
) -> Result<(String, SectionLabels)> {
|
||||
let mut section_labels = Default::default();
|
||||
|
||||
pub fn build_prompt(request: &predict_edits_v3::PredictEditsRequest) -> Result<String> {
|
||||
let prompt_data = PromptData {
|
||||
events: request.events.clone(),
|
||||
cursor_point: request.cursor_point,
|
||||
cursor_path: request.excerpt_path.clone(),
|
||||
included_files: request.included_files.clone(),
|
||||
included_files: request.related_files.clone(),
|
||||
};
|
||||
match request.prompt_format {
|
||||
PromptFormat::MinimalQwen => {
|
||||
return Ok((MinimalQwenPrompt.render(&prompt_data), section_labels));
|
||||
return Ok(MinimalQwenPrompt.render(&prompt_data));
|
||||
}
|
||||
PromptFormat::SeedCoder1120 => {
|
||||
return Ok((SeedCoder1120Prompt.render(&prompt_data), section_labels));
|
||||
return Ok(SeedCoder1120Prompt.render(&prompt_data));
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
|
||||
let mut insertions = match request.prompt_format {
|
||||
PromptFormat::MarkedExcerpt => vec![
|
||||
(
|
||||
Point {
|
||||
line: request.excerpt_line_range.start,
|
||||
column: 0,
|
||||
},
|
||||
EDITABLE_REGION_START_MARKER_WITH_NEWLINE,
|
||||
),
|
||||
(request.cursor_point, CURSOR_MARKER),
|
||||
(
|
||||
Point {
|
||||
line: request.excerpt_line_range.end,
|
||||
column: 0,
|
||||
},
|
||||
EDITABLE_REGION_END_MARKER_WITH_NEWLINE,
|
||||
),
|
||||
],
|
||||
PromptFormat::LabeledSections
|
||||
| PromptFormat::NumLinesUniDiff
|
||||
| PromptFormat::Minimal
|
||||
| PromptFormat::OldTextNewText => {
|
||||
let insertions = match request.prompt_format {
|
||||
PromptFormat::Minimal | PromptFormat::OldTextNewText => {
|
||||
vec![(request.cursor_point, CURSOR_MARKER)]
|
||||
}
|
||||
PromptFormat::OnlySnippets => vec![],
|
||||
@@ -215,9 +106,6 @@ pub fn build_prompt(
|
||||
};
|
||||
|
||||
let mut prompt = match request.prompt_format {
|
||||
PromptFormat::MarkedExcerpt => MARKED_EXCERPT_INSTRUCTIONS.to_string(),
|
||||
PromptFormat::LabeledSections => LABELED_SECTIONS_INSTRUCTIONS.to_string(),
|
||||
PromptFormat::NumLinesUniDiff => NUMBERED_LINES_INSTRUCTIONS.to_string(),
|
||||
PromptFormat::OldTextNewText => XML_TAGS_INSTRUCTIONS.to_string(),
|
||||
PromptFormat::OnlySnippets => String::new(),
|
||||
PromptFormat::Minimal => STUDENT_MODEL_INSTRUCTIONS.to_string(),
|
||||
@@ -247,7 +135,7 @@ pub fn build_prompt(
|
||||
You can only edit exactly this part of the file.
|
||||
We prepend line numbers (e.g., `123|<actual line>`); they are not part of the file.)
|
||||
"},
|
||||
PromptFormat::NumLinesUniDiff | PromptFormat::OldTextNewText => indoc! {"
|
||||
PromptFormat::OldTextNewText => indoc! {"
|
||||
## Code Excerpts
|
||||
|
||||
Here is some excerpts of code that you should take into account to predict the next edit.
|
||||
@@ -263,64 +151,51 @@ pub fn build_prompt(
|
||||
|
||||
Lines starting with `…` indicate omitted line ranges. These may appear inside multi-line code constructs.
|
||||
"},
|
||||
_ => indoc! {"
|
||||
PromptFormat::OnlySnippets | PromptFormat::MinimalQwen | PromptFormat::SeedCoder1120 => {
|
||||
indoc! {"
|
||||
## Code Excerpts
|
||||
|
||||
The cursor marker <|user_cursor|> indicates the current user cursor position.
|
||||
The file is in current state, edits from edit history have been applied.
|
||||
"},
|
||||
"}
|
||||
}
|
||||
};
|
||||
|
||||
prompt.push_str(excerpts_preamble);
|
||||
prompt.push('\n');
|
||||
|
||||
if !request.referenced_declarations.is_empty() || !request.signatures.is_empty() {
|
||||
let syntax_based_prompt = SyntaxBasedPrompt::populate(request)?;
|
||||
section_labels = syntax_based_prompt.write(&mut insertions, &mut prompt)?;
|
||||
} else {
|
||||
if request.prompt_format == PromptFormat::LabeledSections {
|
||||
anyhow::bail!("PromptFormat::LabeledSections cannot be used with ContextMode::Llm");
|
||||
}
|
||||
|
||||
let include_line_numbers = matches!(
|
||||
request.prompt_format,
|
||||
PromptFormat::NumLinesUniDiff | PromptFormat::Minimal
|
||||
);
|
||||
for related_file in &request.included_files {
|
||||
if request.prompt_format == PromptFormat::Minimal {
|
||||
write_codeblock_with_filename(
|
||||
&related_file.path,
|
||||
&related_file.excerpts,
|
||||
if related_file.path == request.excerpt_path {
|
||||
&insertions
|
||||
} else {
|
||||
&[]
|
||||
},
|
||||
related_file.max_row,
|
||||
include_line_numbers,
|
||||
&mut prompt,
|
||||
);
|
||||
} else {
|
||||
write_codeblock(
|
||||
&related_file.path,
|
||||
&related_file.excerpts,
|
||||
if related_file.path == request.excerpt_path {
|
||||
&insertions
|
||||
} else {
|
||||
&[]
|
||||
},
|
||||
related_file.max_row,
|
||||
include_line_numbers,
|
||||
&mut prompt,
|
||||
);
|
||||
}
|
||||
let include_line_numbers = matches!(request.prompt_format, PromptFormat::Minimal);
|
||||
for related_file in &request.related_files {
|
||||
if request.prompt_format == PromptFormat::Minimal {
|
||||
write_codeblock_with_filename(
|
||||
&related_file.path,
|
||||
&related_file.excerpts,
|
||||
if related_file.path == request.excerpt_path {
|
||||
&insertions
|
||||
} else {
|
||||
&[]
|
||||
},
|
||||
related_file.max_row,
|
||||
include_line_numbers,
|
||||
&mut prompt,
|
||||
);
|
||||
} else {
|
||||
write_codeblock(
|
||||
&related_file.path,
|
||||
&related_file.excerpts,
|
||||
if related_file.path == request.excerpt_path {
|
||||
&insertions
|
||||
} else {
|
||||
&[]
|
||||
},
|
||||
related_file.max_row,
|
||||
include_line_numbers,
|
||||
&mut prompt,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
match request.prompt_format {
|
||||
PromptFormat::NumLinesUniDiff => {
|
||||
prompt.push_str(UNIFIED_DIFF_REMINDER);
|
||||
}
|
||||
PromptFormat::OldTextNewText => {
|
||||
prompt.push_str(OLD_TEXT_NEW_TEXT_REMINDER);
|
||||
}
|
||||
@@ -330,7 +205,7 @@ pub fn build_prompt(
|
||||
_ => {}
|
||||
}
|
||||
|
||||
Ok((prompt, section_labels))
|
||||
Ok(prompt)
|
||||
}
|
||||
|
||||
pub fn generation_params(prompt_format: PromptFormat) -> GenerationParams {
|
||||
@@ -444,476 +319,11 @@ pub fn push_events(output: &mut String, events: &[Arc<predict_edits_v3::Event>])
|
||||
writeln!(output, "`````\n").unwrap();
|
||||
}
|
||||
|
||||
pub struct SyntaxBasedPrompt<'a> {
|
||||
request: &'a predict_edits_v3::PredictEditsRequest,
|
||||
/// Snippets to include in the prompt. These may overlap - they are merged / deduplicated in
|
||||
/// `to_prompt_string`.
|
||||
snippets: Vec<PlannedSnippet<'a>>,
|
||||
budget_used: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct PlannedSnippet<'a> {
|
||||
path: Arc<Path>,
|
||||
range: Range<Line>,
|
||||
text: &'a str,
|
||||
// TODO: Indicate this in the output
|
||||
#[allow(dead_code)]
|
||||
text_is_truncated: bool,
|
||||
}
|
||||
|
||||
#[derive(EnumIter, Clone, Copy, PartialEq, Eq, Hash, Debug, PartialOrd, Ord)]
|
||||
pub enum DeclarationStyle {
|
||||
Signature,
|
||||
Declaration,
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Debug, Serialize)]
|
||||
pub struct SectionLabels {
|
||||
pub excerpt_index: usize,
|
||||
pub section_ranges: Vec<(Arc<Path>, Range<Line>)>,
|
||||
}
|
||||
|
||||
impl<'a> SyntaxBasedPrompt<'a> {
|
||||
/// Greedy one-pass knapsack algorithm to populate the prompt plan. Does the following:
|
||||
///
|
||||
/// Initializes a priority queue by populating it with each snippet, finding the
|
||||
/// DeclarationStyle that minimizes `score_density = score / snippet.range(style).len()`. When a
|
||||
/// "signature" snippet is popped, insert an entry for the "declaration" variant that reflects
|
||||
/// the cost of upgrade.
|
||||
///
|
||||
/// TODO: Implement an early halting condition. One option might be to have another priority
|
||||
/// queue where the score is the size, and update it accordingly. Another option might be to
|
||||
/// have some simpler heuristic like bailing after N failed insertions, or based on how much
|
||||
/// budget is left.
|
||||
///
|
||||
/// TODO: Has the current known sources of imprecision:
|
||||
///
|
||||
/// * Does not consider snippet overlap when ranking. For example, it might add a field to the
|
||||
/// plan even though the containing struct is already included.
|
||||
///
|
||||
/// * Does not consider cost of signatures when ranking snippets - this is tricky since
|
||||
/// signatures may be shared by multiple snippets.
|
||||
///
|
||||
/// * Does not include file paths / other text when considering max_bytes.
|
||||
pub fn populate(request: &'a predict_edits_v3::PredictEditsRequest) -> Result<Self> {
|
||||
let mut this = Self {
|
||||
request,
|
||||
snippets: Vec::new(),
|
||||
budget_used: request.excerpt.len(),
|
||||
};
|
||||
let mut included_parents = FxHashSet::default();
|
||||
let additional_parents = this.additional_parent_signatures(
|
||||
&request.excerpt_path,
|
||||
request.excerpt_parent,
|
||||
&included_parents,
|
||||
)?;
|
||||
this.add_parents(&mut included_parents, additional_parents);
|
||||
|
||||
let max_bytes = request.prompt_max_bytes.unwrap_or(DEFAULT_MAX_PROMPT_BYTES);
|
||||
|
||||
if this.budget_used > max_bytes {
|
||||
return Err(anyhow!(
|
||||
"Excerpt + signatures size of {} already exceeds budget of {}",
|
||||
this.budget_used,
|
||||
max_bytes
|
||||
));
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
struct QueueEntry {
|
||||
score_density: OrderedFloat<f32>,
|
||||
declaration_index: usize,
|
||||
style: DeclarationStyle,
|
||||
}
|
||||
|
||||
// Initialize priority queue with the best score for each snippet.
|
||||
let mut queue: BinaryHeap<QueueEntry> = BinaryHeap::new();
|
||||
for (declaration_index, declaration) in request.referenced_declarations.iter().enumerate() {
|
||||
let (style, score_density) = DeclarationStyle::iter()
|
||||
.map(|style| {
|
||||
(
|
||||
style,
|
||||
OrderedFloat(declaration_score_density(&declaration, style)),
|
||||
)
|
||||
})
|
||||
.max_by_key(|(_, score_density)| *score_density)
|
||||
.unwrap();
|
||||
queue.push(QueueEntry {
|
||||
score_density,
|
||||
declaration_index,
|
||||
style,
|
||||
});
|
||||
}
|
||||
|
||||
// Knapsack selection loop
|
||||
while let Some(queue_entry) = queue.pop() {
|
||||
let Some(declaration) = request
|
||||
.referenced_declarations
|
||||
.get(queue_entry.declaration_index)
|
||||
else {
|
||||
return Err(anyhow!(
|
||||
"Invalid declaration index {}",
|
||||
queue_entry.declaration_index
|
||||
));
|
||||
};
|
||||
|
||||
let mut additional_bytes = declaration_size(declaration, queue_entry.style);
|
||||
if this.budget_used + additional_bytes > max_bytes {
|
||||
continue;
|
||||
}
|
||||
|
||||
let additional_parents = this.additional_parent_signatures(
|
||||
&declaration.path,
|
||||
declaration.parent_index,
|
||||
&mut included_parents,
|
||||
)?;
|
||||
additional_bytes += additional_parents
|
||||
.iter()
|
||||
.map(|(_, snippet)| snippet.text.len())
|
||||
.sum::<usize>();
|
||||
if this.budget_used + additional_bytes > max_bytes {
|
||||
continue;
|
||||
}
|
||||
|
||||
this.budget_used += additional_bytes;
|
||||
this.add_parents(&mut included_parents, additional_parents);
|
||||
let planned_snippet = match queue_entry.style {
|
||||
DeclarationStyle::Signature => {
|
||||
let Some(text) = declaration.text.get(declaration.signature_range.clone())
|
||||
else {
|
||||
return Err(anyhow!(
|
||||
"Invalid declaration signature_range {:?} with text.len() = {}",
|
||||
declaration.signature_range,
|
||||
declaration.text.len()
|
||||
));
|
||||
};
|
||||
let signature_start_line = declaration.range.start
|
||||
+ Line(
|
||||
declaration.text[..declaration.signature_range.start]
|
||||
.lines()
|
||||
.count() as u32,
|
||||
);
|
||||
let signature_end_line = signature_start_line
|
||||
+ Line(
|
||||
declaration.text
|
||||
[declaration.signature_range.start..declaration.signature_range.end]
|
||||
.lines()
|
||||
.count() as u32,
|
||||
);
|
||||
let range = signature_start_line..signature_end_line;
|
||||
|
||||
PlannedSnippet {
|
||||
path: declaration.path.clone(),
|
||||
range,
|
||||
text,
|
||||
text_is_truncated: declaration.text_is_truncated,
|
||||
}
|
||||
}
|
||||
DeclarationStyle::Declaration => PlannedSnippet {
|
||||
path: declaration.path.clone(),
|
||||
range: declaration.range.clone(),
|
||||
text: &declaration.text,
|
||||
text_is_truncated: declaration.text_is_truncated,
|
||||
},
|
||||
};
|
||||
this.snippets.push(planned_snippet);
|
||||
|
||||
// When a Signature is consumed, insert an entry for Definition style.
|
||||
if queue_entry.style == DeclarationStyle::Signature {
|
||||
let signature_size = declaration_size(&declaration, DeclarationStyle::Signature);
|
||||
let declaration_size =
|
||||
declaration_size(&declaration, DeclarationStyle::Declaration);
|
||||
let signature_score = declaration_score(&declaration, DeclarationStyle::Signature);
|
||||
let declaration_score =
|
||||
declaration_score(&declaration, DeclarationStyle::Declaration);
|
||||
|
||||
let score_diff = declaration_score - signature_score;
|
||||
let size_diff = declaration_size.saturating_sub(signature_size);
|
||||
if score_diff > 0.0001 && size_diff > 0 {
|
||||
queue.push(QueueEntry {
|
||||
declaration_index: queue_entry.declaration_index,
|
||||
score_density: OrderedFloat(score_diff / (size_diff as f32)),
|
||||
style: DeclarationStyle::Declaration,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
anyhow::Ok(this)
|
||||
}
|
||||
|
||||
fn add_parents(
|
||||
&mut self,
|
||||
included_parents: &mut FxHashSet<usize>,
|
||||
snippets: Vec<(usize, PlannedSnippet<'a>)>,
|
||||
) {
|
||||
for (parent_index, snippet) in snippets {
|
||||
included_parents.insert(parent_index);
|
||||
self.budget_used += snippet.text.len();
|
||||
self.snippets.push(snippet);
|
||||
}
|
||||
}
|
||||
|
||||
fn additional_parent_signatures(
|
||||
&self,
|
||||
path: &Arc<Path>,
|
||||
parent_index: Option<usize>,
|
||||
included_parents: &FxHashSet<usize>,
|
||||
) -> Result<Vec<(usize, PlannedSnippet<'a>)>> {
|
||||
let mut results = Vec::new();
|
||||
self.additional_parent_signatures_impl(path, parent_index, included_parents, &mut results)?;
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
fn additional_parent_signatures_impl(
|
||||
&self,
|
||||
path: &Arc<Path>,
|
||||
parent_index: Option<usize>,
|
||||
included_parents: &FxHashSet<usize>,
|
||||
results: &mut Vec<(usize, PlannedSnippet<'a>)>,
|
||||
) -> Result<()> {
|
||||
let Some(parent_index) = parent_index else {
|
||||
return Ok(());
|
||||
};
|
||||
if included_parents.contains(&parent_index) {
|
||||
return Ok(());
|
||||
}
|
||||
let Some(parent_signature) = self.request.signatures.get(parent_index) else {
|
||||
return Err(anyhow!("Invalid parent index {}", parent_index));
|
||||
};
|
||||
results.push((
|
||||
parent_index,
|
||||
PlannedSnippet {
|
||||
path: path.clone(),
|
||||
range: parent_signature.range.clone(),
|
||||
text: &parent_signature.text,
|
||||
text_is_truncated: parent_signature.text_is_truncated,
|
||||
},
|
||||
));
|
||||
self.additional_parent_signatures_impl(
|
||||
path,
|
||||
parent_signature.parent_index,
|
||||
included_parents,
|
||||
results,
|
||||
)
|
||||
}
|
||||
|
||||
/// Renders the planned context. Each file starts with "```FILE_PATH\n` and ends with triple
|
||||
/// backticks, with a newline after each file. Outputs a line with "..." between nonconsecutive
|
||||
/// chunks.
|
||||
pub fn write(
|
||||
&'a self,
|
||||
excerpt_file_insertions: &mut Vec<(Point, &'static str)>,
|
||||
prompt: &mut String,
|
||||
) -> Result<SectionLabels> {
|
||||
let mut file_to_snippets: FxHashMap<&'a std::path::Path, Vec<&PlannedSnippet<'a>>> =
|
||||
FxHashMap::default();
|
||||
for snippet in &self.snippets {
|
||||
file_to_snippets
|
||||
.entry(&snippet.path)
|
||||
.or_default()
|
||||
.push(snippet);
|
||||
}
|
||||
|
||||
// Reorder so that file with cursor comes last
|
||||
let mut file_snippets = Vec::new();
|
||||
let mut excerpt_file_snippets = Vec::new();
|
||||
for (file_path, snippets) in file_to_snippets {
|
||||
if file_path == self.request.excerpt_path.as_ref() {
|
||||
excerpt_file_snippets = snippets;
|
||||
} else {
|
||||
file_snippets.push((file_path, snippets, false));
|
||||
}
|
||||
}
|
||||
let excerpt_snippet = PlannedSnippet {
|
||||
path: self.request.excerpt_path.clone(),
|
||||
range: self.request.excerpt_line_range.clone(),
|
||||
text: &self.request.excerpt,
|
||||
text_is_truncated: false,
|
||||
};
|
||||
excerpt_file_snippets.push(&excerpt_snippet);
|
||||
file_snippets.push((&self.request.excerpt_path, excerpt_file_snippets, true));
|
||||
|
||||
let section_labels =
|
||||
self.push_file_snippets(prompt, excerpt_file_insertions, file_snippets)?;
|
||||
|
||||
Ok(section_labels)
|
||||
}
|
||||
|
||||
fn push_file_snippets(
|
||||
&self,
|
||||
output: &mut String,
|
||||
excerpt_file_insertions: &mut Vec<(Point, &'static str)>,
|
||||
file_snippets: Vec<(&'a Path, Vec<&'a PlannedSnippet>, bool)>,
|
||||
) -> Result<SectionLabels> {
|
||||
let mut section_ranges = Vec::new();
|
||||
let mut excerpt_index = None;
|
||||
|
||||
for (file_path, mut snippets, is_excerpt_file) in file_snippets {
|
||||
snippets.sort_by_key(|s| (s.range.start, Reverse(s.range.end)));
|
||||
|
||||
// TODO: What if the snippets get expanded too large to be editable?
|
||||
let mut current_snippet: Option<(&PlannedSnippet, Range<Line>)> = None;
|
||||
let mut disjoint_snippets: Vec<(&PlannedSnippet, Range<Line>)> = Vec::new();
|
||||
for snippet in snippets {
|
||||
if let Some((_, current_snippet_range)) = current_snippet.as_mut()
|
||||
&& snippet.range.start <= current_snippet_range.end
|
||||
{
|
||||
current_snippet_range.end = current_snippet_range.end.max(snippet.range.end);
|
||||
continue;
|
||||
}
|
||||
if let Some(current_snippet) = current_snippet.take() {
|
||||
disjoint_snippets.push(current_snippet);
|
||||
}
|
||||
current_snippet = Some((snippet, snippet.range.clone()));
|
||||
}
|
||||
if let Some(current_snippet) = current_snippet.take() {
|
||||
disjoint_snippets.push(current_snippet);
|
||||
}
|
||||
|
||||
writeln!(output, "`````path={}", file_path.display()).ok();
|
||||
let mut skipped_last_snippet = false;
|
||||
for (snippet, range) in disjoint_snippets {
|
||||
let section_index = section_ranges.len();
|
||||
|
||||
match self.request.prompt_format {
|
||||
PromptFormat::MarkedExcerpt
|
||||
| PromptFormat::OnlySnippets
|
||||
| PromptFormat::OldTextNewText
|
||||
| PromptFormat::Minimal
|
||||
| PromptFormat::NumLinesUniDiff => {
|
||||
if range.start.0 > 0 && !skipped_last_snippet {
|
||||
output.push_str("…\n");
|
||||
}
|
||||
}
|
||||
PromptFormat::LabeledSections => {
|
||||
if is_excerpt_file
|
||||
&& range.start <= self.request.excerpt_line_range.start
|
||||
&& range.end >= self.request.excerpt_line_range.end
|
||||
{
|
||||
writeln!(output, "<|current_section|>").ok();
|
||||
} else {
|
||||
writeln!(output, "<|section_{}|>", section_index).ok();
|
||||
}
|
||||
}
|
||||
PromptFormat::MinimalQwen => unreachable!(),
|
||||
PromptFormat::SeedCoder1120 => unreachable!(),
|
||||
}
|
||||
|
||||
let push_full_snippet = |output: &mut String| {
|
||||
if self.request.prompt_format == PromptFormat::NumLinesUniDiff {
|
||||
for (i, line) in snippet.text.lines().enumerate() {
|
||||
writeln!(output, "{}|{}", i as u32 + range.start.0 + 1, line)?;
|
||||
}
|
||||
} else {
|
||||
output.push_str(&snippet.text);
|
||||
}
|
||||
anyhow::Ok(())
|
||||
};
|
||||
|
||||
if is_excerpt_file {
|
||||
if self.request.prompt_format == PromptFormat::OnlySnippets {
|
||||
if range.start >= self.request.excerpt_line_range.start
|
||||
&& range.end <= self.request.excerpt_line_range.end
|
||||
{
|
||||
skipped_last_snippet = true;
|
||||
} else {
|
||||
skipped_last_snippet = false;
|
||||
output.push_str(snippet.text);
|
||||
}
|
||||
} else if !excerpt_file_insertions.is_empty() {
|
||||
let lines = snippet.text.lines().collect::<Vec<_>>();
|
||||
let push_line = |output: &mut String, line_ix: usize| {
|
||||
if self.request.prompt_format == PromptFormat::NumLinesUniDiff {
|
||||
write!(output, "{}|", line_ix as u32 + range.start.0 + 1)?;
|
||||
}
|
||||
anyhow::Ok(writeln!(output, "{}", lines[line_ix])?)
|
||||
};
|
||||
let mut last_line_ix = 0;
|
||||
let mut insertion_ix = 0;
|
||||
while insertion_ix < excerpt_file_insertions.len() {
|
||||
let (point, insertion) = &excerpt_file_insertions[insertion_ix];
|
||||
let found = point.line >= range.start && point.line <= range.end;
|
||||
if found {
|
||||
excerpt_index = Some(section_index);
|
||||
let insertion_line_ix = (point.line.0 - range.start.0) as usize;
|
||||
for line_ix in last_line_ix..insertion_line_ix {
|
||||
push_line(output, line_ix)?;
|
||||
}
|
||||
if let Some(next_line) = lines.get(insertion_line_ix) {
|
||||
if self.request.prompt_format == PromptFormat::NumLinesUniDiff {
|
||||
write!(
|
||||
output,
|
||||
"{}|",
|
||||
insertion_line_ix as u32 + range.start.0 + 1
|
||||
)?
|
||||
}
|
||||
output.push_str(&next_line[..point.column as usize]);
|
||||
output.push_str(insertion);
|
||||
writeln!(output, "{}", &next_line[point.column as usize..])?;
|
||||
} else {
|
||||
writeln!(output, "{}", insertion)?;
|
||||
}
|
||||
last_line_ix = insertion_line_ix + 1;
|
||||
excerpt_file_insertions.remove(insertion_ix);
|
||||
continue;
|
||||
}
|
||||
insertion_ix += 1;
|
||||
}
|
||||
skipped_last_snippet = false;
|
||||
for line_ix in last_line_ix..lines.len() {
|
||||
push_line(output, line_ix)?;
|
||||
}
|
||||
} else {
|
||||
skipped_last_snippet = false;
|
||||
push_full_snippet(output)?;
|
||||
}
|
||||
} else {
|
||||
skipped_last_snippet = false;
|
||||
push_full_snippet(output)?;
|
||||
}
|
||||
|
||||
section_ranges.push((snippet.path.clone(), range));
|
||||
}
|
||||
|
||||
output.push_str("`````\n\n");
|
||||
}
|
||||
|
||||
Ok(SectionLabels {
|
||||
// TODO: Clean this up
|
||||
excerpt_index: match self.request.prompt_format {
|
||||
PromptFormat::OnlySnippets => 0,
|
||||
_ => excerpt_index.context("bug: no snippet found for excerpt")?,
|
||||
},
|
||||
section_ranges,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn declaration_score_density(declaration: &ReferencedDeclaration, style: DeclarationStyle) -> f32 {
|
||||
declaration_score(declaration, style) / declaration_size(declaration, style) as f32
|
||||
}
|
||||
|
||||
fn declaration_score(declaration: &ReferencedDeclaration, style: DeclarationStyle) -> f32 {
|
||||
match style {
|
||||
DeclarationStyle::Signature => declaration.signature_score,
|
||||
DeclarationStyle::Declaration => declaration.declaration_score,
|
||||
}
|
||||
}
|
||||
|
||||
fn declaration_size(declaration: &ReferencedDeclaration, style: DeclarationStyle) -> usize {
|
||||
match style {
|
||||
DeclarationStyle::Signature => declaration.signature_range.len(),
|
||||
DeclarationStyle::Declaration => declaration.text.len(),
|
||||
}
|
||||
}
|
||||
|
||||
struct PromptData {
|
||||
events: Vec<Arc<Event>>,
|
||||
cursor_point: Point,
|
||||
cursor_path: Arc<Path>, // TODO: make a common struct with cursor_point
|
||||
included_files: Vec<IncludedFile>,
|
||||
included_files: Vec<RelatedFile>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -1051,7 +461,7 @@ impl SeedCoder1120Prompt {
|
||||
context
|
||||
}
|
||||
|
||||
fn fmt_fim(&self, file: &IncludedFile, cursor_point: Point) -> String {
|
||||
fn fmt_fim(&self, file: &RelatedFile, cursor_point: Point) -> String {
|
||||
let mut buf = String::new();
|
||||
const FIM_SUFFIX: &str = "<[fim-suffix]>";
|
||||
const FIM_PREFIX: &str = "<[fim-prefix]>";
|
||||
|
||||
@@ -1,244 +0,0 @@
|
||||
use anyhow::Result;
|
||||
use cloud_llm_client::predict_edits_v3::{self, Excerpt};
|
||||
use indoc::indoc;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::Write;
|
||||
|
||||
use crate::{push_events, write_codeblock};
|
||||
|
||||
pub fn build_prompt(request: predict_edits_v3::PlanContextRetrievalRequest) -> Result<String> {
|
||||
let mut prompt = SEARCH_INSTRUCTIONS.to_string();
|
||||
|
||||
if !request.events.is_empty() {
|
||||
writeln!(&mut prompt, "\n## User Edits\n\n")?;
|
||||
push_events(&mut prompt, &request.events);
|
||||
}
|
||||
|
||||
writeln!(&mut prompt, "## Cursor context\n")?;
|
||||
write_codeblock(
|
||||
&request.excerpt_path,
|
||||
&[Excerpt {
|
||||
start_line: request.excerpt_line_range.start,
|
||||
text: request.excerpt.into(),
|
||||
}],
|
||||
&[],
|
||||
request.cursor_file_max_row,
|
||||
true,
|
||||
&mut prompt,
|
||||
);
|
||||
|
||||
writeln!(&mut prompt, "{TOOL_USE_REMINDER}")?;
|
||||
|
||||
Ok(prompt)
|
||||
}
|
||||
|
||||
/// Search for relevant code
|
||||
///
|
||||
/// For the best results, run multiple queries at once with a single invocation of this tool.
|
||||
#[derive(Clone, Deserialize, Serialize, JsonSchema)]
|
||||
pub struct SearchToolInput {
|
||||
/// An array of queries to run for gathering context relevant to the next prediction
|
||||
#[schemars(length(max = 3))]
|
||||
#[serde(deserialize_with = "deserialize_queries")]
|
||||
pub queries: Box<[SearchToolQuery]>,
|
||||
}
|
||||
|
||||
fn deserialize_queries<'de, D>(deserializer: D) -> Result<Box<[SearchToolQuery]>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
use serde::de::Error;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum QueryCollection {
|
||||
Array(Box<[SearchToolQuery]>),
|
||||
DoubleArray(Box<[Box<[SearchToolQuery]>]>),
|
||||
Single(SearchToolQuery),
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum MaybeDoubleEncoded {
|
||||
SingleEncoded(QueryCollection),
|
||||
DoubleEncoded(String),
|
||||
}
|
||||
|
||||
let result = MaybeDoubleEncoded::deserialize(deserializer)?;
|
||||
|
||||
let normalized = match result {
|
||||
MaybeDoubleEncoded::SingleEncoded(value) => value,
|
||||
MaybeDoubleEncoded::DoubleEncoded(value) => {
|
||||
serde_json::from_str(&value).map_err(D::Error::custom)?
|
||||
}
|
||||
};
|
||||
|
||||
Ok(match normalized {
|
||||
QueryCollection::Array(items) => items,
|
||||
QueryCollection::Single(search_tool_query) => Box::new([search_tool_query]),
|
||||
QueryCollection::DoubleArray(double_array) => double_array.into_iter().flatten().collect(),
|
||||
})
|
||||
}
|
||||
|
||||
/// Search for relevant code by path, syntax hierarchy, and content.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, JsonSchema, Hash)]
|
||||
pub struct SearchToolQuery {
|
||||
/// 1. A glob pattern to match file paths in the codebase to search in.
|
||||
pub glob: String,
|
||||
/// 2. Regular expressions to match syntax nodes **by their first line** and hierarchy.
|
||||
///
|
||||
/// Subsequent regexes match nodes within the full content of the nodes matched by the previous regexes.
|
||||
///
|
||||
/// Example: Searching for a `User` class
|
||||
/// ["class\s+User"]
|
||||
///
|
||||
/// Example: Searching for a `get_full_name` method under a `User` class
|
||||
/// ["class\s+User", "def\sget_full_name"]
|
||||
///
|
||||
/// Skip this field to match on content alone.
|
||||
#[schemars(length(max = 3))]
|
||||
#[serde(default)]
|
||||
pub syntax_node: Vec<String>,
|
||||
/// 3. An optional regular expression to match the final content that should appear in the results.
|
||||
///
|
||||
/// - Content will be matched within all lines of the matched syntax nodes.
|
||||
/// - If syntax node regexes are provided, this field can be skipped to include as much of the node itself as possible.
|
||||
/// - If no syntax node regexes are provided, the content will be matched within the entire file.
|
||||
pub content: Option<String>,
|
||||
}
|
||||
|
||||
pub const TOOL_NAME: &str = "search";
|
||||
|
||||
const SEARCH_INSTRUCTIONS: &str = indoc! {r#"
|
||||
You are part of an edit prediction system in a code editor.
|
||||
Your role is to search for code that will serve as context for predicting the next edit.
|
||||
|
||||
- Analyze the user's recent edits and current cursor context
|
||||
- Use the `search` tool to find code that is relevant for predicting the next edit
|
||||
- Focus on finding:
|
||||
- Code patterns that might need similar changes based on the recent edits
|
||||
- Functions, variables, types, and constants referenced in the current cursor context
|
||||
- Related implementations, usages, or dependencies that may require consistent updates
|
||||
- How items defined in the cursor excerpt are used or altered
|
||||
- You will not be able to filter results or perform subsequent queries, so keep searches as targeted as possible
|
||||
- Use `syntax_node` parameter whenever you're looking for a particular type, class, or function
|
||||
- Avoid using wildcard globs if you already know the file path of the content you're looking for
|
||||
"#};
|
||||
|
||||
const TOOL_USE_REMINDER: &str = indoc! {"
|
||||
--
|
||||
Analyze the user's intent in one to two sentences, then call the `search` tool.
|
||||
"};
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use serde_json::json;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_queries() {
|
||||
let single_query_json = indoc! {r#"{
|
||||
"queries": {
|
||||
"glob": "**/*.rs",
|
||||
"syntax_node": ["fn test"],
|
||||
"content": "assert"
|
||||
}
|
||||
}"#};
|
||||
|
||||
let flat_input: SearchToolInput = serde_json::from_str(single_query_json).unwrap();
|
||||
assert_eq!(flat_input.queries.len(), 1);
|
||||
assert_eq!(flat_input.queries[0].glob, "**/*.rs");
|
||||
assert_eq!(flat_input.queries[0].syntax_node, vec!["fn test"]);
|
||||
assert_eq!(flat_input.queries[0].content, Some("assert".to_string()));
|
||||
|
||||
let flat_json = indoc! {r#"{
|
||||
"queries": [
|
||||
{
|
||||
"glob": "**/*.rs",
|
||||
"syntax_node": ["fn test"],
|
||||
"content": "assert"
|
||||
},
|
||||
{
|
||||
"glob": "**/*.ts",
|
||||
"syntax_node": [],
|
||||
"content": null
|
||||
}
|
||||
]
|
||||
}"#};
|
||||
|
||||
let flat_input: SearchToolInput = serde_json::from_str(flat_json).unwrap();
|
||||
assert_eq!(flat_input.queries.len(), 2);
|
||||
assert_eq!(flat_input.queries[0].glob, "**/*.rs");
|
||||
assert_eq!(flat_input.queries[0].syntax_node, vec!["fn test"]);
|
||||
assert_eq!(flat_input.queries[0].content, Some("assert".to_string()));
|
||||
assert_eq!(flat_input.queries[1].glob, "**/*.ts");
|
||||
assert_eq!(flat_input.queries[1].syntax_node.len(), 0);
|
||||
assert_eq!(flat_input.queries[1].content, None);
|
||||
|
||||
let nested_json = indoc! {r#"{
|
||||
"queries": [
|
||||
[
|
||||
{
|
||||
"glob": "**/*.rs",
|
||||
"syntax_node": ["fn test"],
|
||||
"content": "assert"
|
||||
}
|
||||
],
|
||||
[
|
||||
{
|
||||
"glob": "**/*.ts",
|
||||
"syntax_node": [],
|
||||
"content": null
|
||||
}
|
||||
]
|
||||
]
|
||||
}"#};
|
||||
|
||||
let nested_input: SearchToolInput = serde_json::from_str(nested_json).unwrap();
|
||||
|
||||
assert_eq!(nested_input.queries.len(), 2);
|
||||
|
||||
assert_eq!(nested_input.queries[0].glob, "**/*.rs");
|
||||
assert_eq!(nested_input.queries[0].syntax_node, vec!["fn test"]);
|
||||
assert_eq!(nested_input.queries[0].content, Some("assert".to_string()));
|
||||
assert_eq!(nested_input.queries[1].glob, "**/*.ts");
|
||||
assert_eq!(nested_input.queries[1].syntax_node.len(), 0);
|
||||
assert_eq!(nested_input.queries[1].content, None);
|
||||
|
||||
let double_encoded_queries = serde_json::to_string(&json!({
|
||||
"queries": serde_json::to_string(&json!([
|
||||
{
|
||||
"glob": "**/*.rs",
|
||||
"syntax_node": ["fn test"],
|
||||
"content": "assert"
|
||||
},
|
||||
{
|
||||
"glob": "**/*.ts",
|
||||
"syntax_node": [],
|
||||
"content": null
|
||||
}
|
||||
])).unwrap()
|
||||
}))
|
||||
.unwrap();
|
||||
|
||||
let double_encoded_input: SearchToolInput =
|
||||
serde_json::from_str(&double_encoded_queries).unwrap();
|
||||
|
||||
assert_eq!(double_encoded_input.queries.len(), 2);
|
||||
|
||||
assert_eq!(double_encoded_input.queries[0].glob, "**/*.rs");
|
||||
assert_eq!(double_encoded_input.queries[0].syntax_node, vec!["fn test"]);
|
||||
assert_eq!(
|
||||
double_encoded_input.queries[0].content,
|
||||
Some("assert".to_string())
|
||||
);
|
||||
assert_eq!(double_encoded_input.queries[1].glob, "**/*.ts");
|
||||
assert_eq!(double_encoded_input.queries[1].syntax_node.len(), 0);
|
||||
assert_eq!(double_encoded_input.queries[1].content, None);
|
||||
|
||||
// ### ERROR Switching from var declarations to lexical declarations [RUN 073]
|
||||
// invalid search json {"queries": ["express/lib/response.js", "var\\s+[a-zA-Z_][a-zA-Z0-9_]*\\s*=.*;", "function.*\\(.*\\).*\\{.*\\}"]}
|
||||
}
|
||||
}
|
||||
@@ -10,7 +10,7 @@ path = "src/codestral.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
edit_prediction.workspace = true
|
||||
edit_prediction_types.workspace = true
|
||||
edit_prediction_context.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use anyhow::{Context as _, Result};
|
||||
use edit_prediction::{Direction, EditPrediction, EditPredictionProvider};
|
||||
use edit_prediction_context::{EditPredictionExcerpt, EditPredictionExcerptOptions};
|
||||
use edit_prediction_types::{Direction, EditPrediction, EditPredictionDelegate};
|
||||
use futures::AsyncReadExt;
|
||||
use gpui::{App, Context, Entity, Task};
|
||||
use http_client::HttpClient;
|
||||
@@ -43,17 +43,17 @@ impl CurrentCompletion {
|
||||
/// Attempts to adjust the edits based on changes made to the buffer since the completion was generated.
|
||||
/// Returns None if the user's edits conflict with the predicted edits.
|
||||
fn interpolate(&self, new_snapshot: &BufferSnapshot) -> Option<Vec<(Range<Anchor>, Arc<str>)>> {
|
||||
edit_prediction::interpolate_edits(&self.snapshot, new_snapshot, &self.edits)
|
||||
edit_prediction_types::interpolate_edits(&self.snapshot, new_snapshot, &self.edits)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CodestralCompletionProvider {
|
||||
pub struct CodestralEditPredictionDelegate {
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
pending_request: Option<Task<Result<()>>>,
|
||||
current_completion: Option<CurrentCompletion>,
|
||||
}
|
||||
|
||||
impl CodestralCompletionProvider {
|
||||
impl CodestralEditPredictionDelegate {
|
||||
pub fn new(http_client: Arc<dyn HttpClient>) -> Self {
|
||||
Self {
|
||||
http_client,
|
||||
@@ -165,7 +165,7 @@ impl CodestralCompletionProvider {
|
||||
}
|
||||
}
|
||||
|
||||
impl EditPredictionProvider for CodestralCompletionProvider {
|
||||
impl EditPredictionDelegate for CodestralEditPredictionDelegate {
|
||||
fn name() -> &'static str {
|
||||
"codestral"
|
||||
}
|
||||
@@ -174,7 +174,7 @@ impl EditPredictionProvider for CodestralCompletionProvider {
|
||||
"Codestral"
|
||||
}
|
||||
|
||||
fn show_completions_in_menu() -> bool {
|
||||
fn show_predictions_in_menu() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
@@ -239,7 +239,6 @@ impl EditPredictionProvider for CodestralCompletionProvider {
|
||||
cursor_point,
|
||||
&snapshot,
|
||||
&EXCERPT_OPTIONS,
|
||||
None,
|
||||
)
|
||||
.context("Line containing cursor doesn't fit in excerpt max bytes")?;
|
||||
|
||||
|
||||
@@ -65,7 +65,7 @@ tokio = { workspace = true, features = ["full"] }
|
||||
toml.workspace = true
|
||||
tower = "0.4"
|
||||
tower-http = { workspace = true, features = ["trace"] }
|
||||
tracing = "0.1.40"
|
||||
tracing.workspace = true
|
||||
tracing-subscriber = { version = "0.3.18", features = ["env-filter", "json", "registry", "tracing-log"] } # workaround for https://github.com/tokio-rs/tracing/issues/2927
|
||||
util.workspace = true
|
||||
uuid.workspace = true
|
||||
|
||||
@@ -63,15 +63,3 @@ Deployment is triggered by pushing to the `collab-staging` (or `collab-productio
|
||||
- `./script/deploy-collab production`
|
||||
|
||||
You can tell what is currently deployed with `./script/what-is-deployed`.
|
||||
|
||||
# Database Migrations
|
||||
|
||||
To create a new migration:
|
||||
|
||||
```sh
|
||||
./script/create-migration <name>
|
||||
```
|
||||
|
||||
Migrations are run automatically on service start, so run `foreman start` again. The service will crash if the migrations fail.
|
||||
|
||||
When you create a new migration, you also need to update the [SQLite schema](./migrations.sqlite/20221109000000_test_schema.sql) that is used for testing.
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
apiVersion: batch/v1
|
||||
kind: Job
|
||||
metadata:
|
||||
namespace: ${ZED_KUBE_NAMESPACE}
|
||||
name: ${ZED_MIGRATE_JOB_NAME}
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
restartPolicy: Never
|
||||
containers:
|
||||
- name: migrator
|
||||
imagePullPolicy: Always
|
||||
image: ${ZED_IMAGE_ID}
|
||||
args:
|
||||
- migrate
|
||||
env:
|
||||
- name: DATABASE_URL
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: database
|
||||
key: url
|
||||
@@ -121,6 +121,8 @@ CREATE TABLE "project_repositories" (
|
||||
"merge_message" VARCHAR,
|
||||
"branch_summary" VARCHAR,
|
||||
"head_commit_details" VARCHAR,
|
||||
"remote_upstream_url" VARCHAR,
|
||||
"remote_origin_url" VARCHAR,
|
||||
PRIMARY KEY (project_id, id)
|
||||
);
|
||||
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS "sessions" (
|
||||
"id" VARCHAR NOT NULL PRIMARY KEY,
|
||||
"expires" TIMESTAMP WITH TIME ZONE NULL,
|
||||
"session" TEXT NOT NULL
|
||||
);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "users" (
|
||||
"id" SERIAL PRIMARY KEY,
|
||||
"github_login" VARCHAR,
|
||||
"admin" BOOLEAN
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "index_users_github_login" ON "users" ("github_login");
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "signups" (
|
||||
"id" SERIAL PRIMARY KEY,
|
||||
"github_login" VARCHAR,
|
||||
"email_address" VARCHAR,
|
||||
"about" TEXT
|
||||
);
|
||||
@@ -1,7 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS "access_tokens" (
|
||||
"id" SERIAL PRIMARY KEY,
|
||||
"user_id" INTEGER REFERENCES users (id),
|
||||
"hash" VARCHAR(128)
|
||||
);
|
||||
|
||||
CREATE INDEX "index_access_tokens_user_id" ON "access_tokens" ("user_id");
|
||||
@@ -1,46 +0,0 @@
|
||||
CREATE TABLE IF NOT EXISTS "orgs" (
|
||||
"id" SERIAL PRIMARY KEY,
|
||||
"name" VARCHAR NOT NULL,
|
||||
"slug" VARCHAR NOT NULL
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "index_orgs_slug" ON "orgs" ("slug");
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "org_memberships" (
|
||||
"id" SERIAL PRIMARY KEY,
|
||||
"org_id" INTEGER REFERENCES orgs (id) NOT NULL,
|
||||
"user_id" INTEGER REFERENCES users (id) NOT NULL,
|
||||
"admin" BOOLEAN NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX "index_org_memberships_user_id" ON "org_memberships" ("user_id");
|
||||
CREATE UNIQUE INDEX "index_org_memberships_org_id_and_user_id" ON "org_memberships" ("org_id", "user_id");
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "channels" (
|
||||
"id" SERIAL PRIMARY KEY,
|
||||
"owner_id" INTEGER NOT NULL,
|
||||
"owner_is_user" BOOLEAN NOT NULL,
|
||||
"name" VARCHAR NOT NULL
|
||||
);
|
||||
|
||||
CREATE UNIQUE INDEX "index_channels_owner_and_name" ON "channels" ("owner_is_user", "owner_id", "name");
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "channel_memberships" (
|
||||
"id" SERIAL PRIMARY KEY,
|
||||
"channel_id" INTEGER REFERENCES channels (id) NOT NULL,
|
||||
"user_id" INTEGER REFERENCES users (id) NOT NULL,
|
||||
"admin" BOOLEAN NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX "index_channel_memberships_user_id" ON "channel_memberships" ("user_id");
|
||||
CREATE UNIQUE INDEX "index_channel_memberships_channel_id_and_user_id" ON "channel_memberships" ("channel_id", "user_id");
|
||||
|
||||
CREATE TABLE IF NOT EXISTS "channel_messages" (
|
||||
"id" SERIAL PRIMARY KEY,
|
||||
"channel_id" INTEGER REFERENCES channels (id) NOT NULL,
|
||||
"sender_id" INTEGER REFERENCES users (id) NOT NULL,
|
||||
"body" TEXT NOT NULL,
|
||||
"sent_at" TIMESTAMP
|
||||
);
|
||||
|
||||
CREATE INDEX "index_channel_messages_channel_id" ON "channel_messages" ("channel_id");
|
||||
@@ -1,4 +0,0 @@
|
||||
ALTER TABLE "channel_messages"
|
||||
ADD "nonce" UUID NOT NULL DEFAULT gen_random_uuid();
|
||||
|
||||
CREATE UNIQUE INDEX "index_channel_messages_nonce" ON "channel_messages" ("nonce");
|
||||
@@ -1,4 +0,0 @@
|
||||
ALTER TABLE "signups"
|
||||
ADD "wants_releases" BOOLEAN,
|
||||
ADD "wants_updates" BOOLEAN,
|
||||
ADD "wants_community" BOOLEAN;
|
||||
@@ -1 +0,0 @@
|
||||
DROP TABLE IF EXISTS "signups";
|
||||