Compare commits
186 Commits
linux/keys
...
acp-codex
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4cb928630e | ||
|
|
cd0263bb85 | ||
|
|
6d775db481 | ||
|
|
dab0b3509d | ||
|
|
0f72d7ed52 | ||
|
|
1ceda2babd | ||
|
|
ae0d4f6a0d | ||
|
|
8980526a85 | ||
|
|
b4dc7f8a8a | ||
|
|
b94649ce1e | ||
|
|
948c1f22bb | ||
|
|
b0eac4267d | ||
|
|
8e4555455c | ||
|
|
5b97cd1900 | ||
|
|
ceab139f54 | ||
|
|
4df7f52bf3 | ||
|
|
1e67e30034 | ||
|
|
758c5fb955 | ||
|
|
acb3ecef0c | ||
|
|
ad2bfa3edd | ||
|
|
1d72fa8e9e | ||
|
|
1ce384bbda | ||
|
|
9f302df6d6 | ||
|
|
ebad5ca50e | ||
|
|
b9ff538747 | ||
|
|
c0261a1ea9 | ||
|
|
f43bcc1492 | ||
|
|
e23a4564cc | ||
|
|
f82ef1f76f | ||
|
|
b4c2ae5196 | ||
|
|
0023773c68 | ||
|
|
0bde929d54 | ||
|
|
6f60939d30 | ||
|
|
a6a7a1cc28 | ||
|
|
13f4a093c8 | ||
|
|
573836a654 | ||
|
|
048dc47d87 | ||
|
|
ffc69b07e5 | ||
|
|
dc8d0868ec | ||
|
|
58807f0dd2 | ||
|
|
313f5968eb | ||
|
|
9ab3d55211 | ||
|
|
e339566dab | ||
|
|
8ee5bf2c38 | ||
|
|
b0e0485b32 | ||
|
|
2a49f40cf5 | ||
|
|
21b4a2ecdd | ||
|
|
2a9a82d757 | ||
|
|
6e147b3b91 | ||
|
|
875c86e3ef | ||
|
|
406ffb1e20 | ||
|
|
257bedf09b | ||
|
|
37927a5dc8 | ||
|
|
d4110fd2ab | ||
|
|
3d160a6e26 | ||
|
|
c29c46d3b6 | ||
|
|
312369c84f | ||
|
|
42b2b65241 | ||
|
|
a529103825 | ||
|
|
1ed3f9eb42 | ||
|
|
59d524427e | ||
|
|
ee4b9a27a2 | ||
|
|
ae65ff95a6 | ||
|
|
fc24102491 | ||
|
|
7ca3d969e0 | ||
|
|
afbd2b760f | ||
|
|
0a3ef40c2f | ||
|
|
0ebbeec11c | ||
|
|
0ada4ce900 | ||
|
|
572d3d637a | ||
|
|
3751737621 | ||
|
|
ec52e9281a | ||
|
|
af0031ae8b | ||
|
|
b398935081 | ||
|
|
78b7737368 | ||
|
|
57e8f5c5b9 | ||
|
|
729cde33f1 | ||
|
|
ebbf02e25b | ||
|
|
3ecdfc9b5a | ||
|
|
f9561da673 | ||
|
|
b3747d9a21 | ||
|
|
95de2bfc74 | ||
|
|
d7bb1c1d0e | ||
|
|
5f3e7a5f91 | ||
|
|
0671a4d5ae | ||
|
|
bd78f2c493 | ||
|
|
d1abba0d33 | ||
|
|
05065985e7 | ||
|
|
7ab8f431a7 | ||
|
|
050ed85d71 | ||
|
|
858e176a1c | ||
|
|
a65c0b2bff | ||
|
|
848a86a385 | ||
|
|
52f2b32557 | ||
|
|
8dca4d150e | ||
|
|
440beb8a90 | ||
|
|
ce63a6ddd8 | ||
|
|
26ba6e7e00 | ||
|
|
363a265051 | ||
|
|
37e73e3277 | ||
|
|
32f5132bde | ||
|
|
fd5650d4ed | ||
|
|
8b6b039b63 | ||
|
|
4848bd705e | ||
|
|
45d0686129 | ||
|
|
eca36c502e | ||
|
|
6673c7cd4c | ||
|
|
a2f5c47e2d | ||
|
|
c6a6db9754 | ||
|
|
6f9e052edb | ||
|
|
2edf85f054 | ||
|
|
00ec243771 | ||
|
|
84124c60db | ||
|
|
cf1ce1beed | ||
|
|
e4effa5e01 | ||
|
|
f50041779d | ||
|
|
51df8a17ef | ||
|
|
85d12548a1 | ||
|
|
0af7d32b7d | ||
|
|
1cadff9311 | ||
|
|
8f6b9f0d65 | ||
|
|
970a1066f5 | ||
|
|
833bc6979a | ||
|
|
a8cc927303 | ||
|
|
13ddd5e4cb | ||
|
|
1b6e212eba | ||
|
|
46834d31f1 | ||
|
|
e070c81687 | ||
|
|
5b61b8c8ed | ||
|
|
625ce12a3e | ||
|
|
12bc8907d9 | ||
|
|
67c765a99a | ||
|
|
206cce6783 | ||
|
|
c3edc2cfc1 | ||
|
|
625a4b90a5 | ||
|
|
fbead09c30 | ||
|
|
0797f7b66e | ||
|
|
6f6c2915b2 | ||
|
|
0bd65829f7 | ||
|
|
90bf602ceb | ||
|
|
cd024b8870 | ||
|
|
af71e15ea0 | ||
|
|
d0e01dbd8f | ||
|
|
d65855c4a1 | ||
|
|
70351360d7 | ||
|
|
993e0f55ec | ||
|
|
496bf0ec43 | ||
|
|
c09f484ec4 | ||
|
|
a58a75c0f6 | ||
|
|
d1a6c5d494 | ||
|
|
10028aaae8 | ||
|
|
3b9bb521f4 | ||
|
|
7eb739d489 | ||
|
|
b4cbea50bb | ||
|
|
153840199e | ||
|
|
8812e7cd14 | ||
|
|
56d0ae6782 | ||
|
|
d52f07b77c | ||
|
|
089ce8f6aa | ||
|
|
842ac984d5 | ||
|
|
87362c602f | ||
|
|
94916cd3b6 | ||
|
|
7915b9f93f | ||
|
|
33f1ac8b34 | ||
|
|
a1188848ef | ||
|
|
7588280915 | ||
|
|
f82fdaa0a4 | ||
|
|
41085f8f55 | ||
|
|
8e1d341d09 | ||
|
|
9d2b7c8033 | ||
|
|
c6603e4fba | ||
|
|
c549b712fd | ||
|
|
d6bff274eb | ||
|
|
cfc9cfa4ab | ||
|
|
e2e529bd94 | ||
|
|
e6c41b577b | ||
|
|
f8f827583d | ||
|
|
36c325bc60 | ||
|
|
f4106ad404 | ||
|
|
1583dd2d6f | ||
|
|
d7fd9245cd | ||
|
|
5f21a9bd32 | ||
|
|
c30e28179a | ||
|
|
8bc1396a55 | ||
|
|
51c24e2010 | ||
|
|
3169f06404 |
@@ -23,6 +23,8 @@ workspace-members = [
|
||||
]
|
||||
third-party = [
|
||||
{ name = "reqwest", version = "0.11.27" },
|
||||
# build of remote_server should not include scap / its x11 dependency
|
||||
{ name = "scap", git = "https://github.com/zed-industries/scap", rev = "270538dc780f5240723233ff901e1054641ed318" },
|
||||
]
|
||||
|
||||
[final-excludes]
|
||||
|
||||
@@ -1,64 +0,0 @@
|
||||
name: "Trusted Signing on Windows"
|
||||
description: "Install trusted signing on Windows."
|
||||
|
||||
# Modified from https://github.com/Azure/trusted-signing-action
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Set variables
|
||||
id: set-variables
|
||||
shell: "pwsh"
|
||||
run: |
|
||||
$defaultPath = $env:PSModulePath -split ';' | Select-Object -First 1
|
||||
"PSMODULEPATH=$defaultPath" | Out-File -FilePath $env:GITHUB_OUTPUT -Append
|
||||
|
||||
"TRUSTED_SIGNING_MODULE_VERSION=0.5.3" | Out-File -FilePath $env:GITHUB_OUTPUT -Append
|
||||
"BUILD_TOOLS_NUGET_VERSION=10.0.22621.3233" | Out-File -FilePath $env:GITHUB_OUTPUT -Append
|
||||
"TRUSTED_SIGNING_NUGET_VERSION=1.0.53" | Out-File -FilePath $env:GITHUB_OUTPUT -Append
|
||||
"DOTNET_SIGNCLI_NUGET_VERSION=0.9.1-beta.24469.1" | Out-File -FilePath $env:GITHUB_OUTPUT -Append
|
||||
|
||||
- name: Cache TrustedSigning PowerShell module
|
||||
id: cache-module
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-module
|
||||
with:
|
||||
path: ${{ steps.set-variables.outputs.PSMODULEPATH }}\TrustedSigning\${{ steps.set-variables.outputs.TRUSTED_SIGNING_MODULE_VERSION }}
|
||||
key: TrustedSigning-${{ steps.set-variables.outputs.TRUSTED_SIGNING_MODULE_VERSION }}
|
||||
if: ${{ inputs.cache-dependencies == 'true' }}
|
||||
|
||||
- name: Cache Microsoft.Windows.SDK.BuildTools NuGet package
|
||||
id: cache-buildtools
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-buildtools
|
||||
with:
|
||||
path: ~\AppData\Local\TrustedSigning\Microsoft.Windows.SDK.BuildTools\Microsoft.Windows.SDK.BuildTools.${{ steps.set-variables.outputs.BUILD_TOOLS_NUGET_VERSION }}
|
||||
key: Microsoft.Windows.SDK.BuildTools-${{ steps.set-variables.outputs.BUILD_TOOLS_NUGET_VERSION }}
|
||||
if: ${{ inputs.cache-dependencies == 'true' }}
|
||||
|
||||
- name: Cache Microsoft.Trusted.Signing.Client NuGet package
|
||||
id: cache-tsclient
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-tsclient
|
||||
with:
|
||||
path: ~\AppData\Local\TrustedSigning\Microsoft.Trusted.Signing.Client\Microsoft.Trusted.Signing.Client.${{ steps.set-variables.outputs.TRUSTED_SIGNING_NUGET_VERSION }}
|
||||
key: Microsoft.Trusted.Signing.Client-${{ steps.set-variables.outputs.TRUSTED_SIGNING_NUGET_VERSION }}
|
||||
if: ${{ inputs.cache-dependencies == 'true' }}
|
||||
|
||||
- name: Cache SignCli NuGet package
|
||||
id: cache-signcli
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-signcli
|
||||
with:
|
||||
path: ~\AppData\Local\TrustedSigning\sign\sign.${{ steps.set-variables.outputs.DOTNET_SIGNCLI_NUGET_VERSION }}
|
||||
key: SignCli-${{ steps.set-variables.outputs.DOTNET_SIGNCLI_NUGET_VERSION }}
|
||||
if: ${{ inputs.cache-dependencies == 'true' }}
|
||||
|
||||
- name: Install Trusted Signing module
|
||||
shell: "pwsh"
|
||||
run: |
|
||||
Install-Module -Name TrustedSigning -RequiredVersion ${{ steps.set-variables.outputs.TRUSTED_SIGNING_MODULE_VERSION }} -Force -Repository PSGallery
|
||||
if: ${{ inputs.cache-dependencies != 'true' || steps.cache-module.outputs.cache-hit != 'true' }}
|
||||
32
.github/workflows/ci.yml
vendored
@@ -21,6 +21,9 @@ env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
|
||||
jobs:
|
||||
job_spec:
|
||||
@@ -52,9 +55,10 @@ jobs:
|
||||
fi
|
||||
# Specify anything which should skip full CI in this regex:
|
||||
# - docs/
|
||||
# - script/update_top_ranking_issues/
|
||||
# - .github/ISSUE_TEMPLATE/
|
||||
# - .github/workflows/ (except .github/workflows/ci.yml)
|
||||
SKIP_REGEX='^(docs/|\.github/(ISSUE_TEMPLATE|workflows/(?!ci)))'
|
||||
SKIP_REGEX='^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!ci)))'
|
||||
if [[ $(git diff --name-only $COMPARE_REV ${{ github.sha }} | grep -vP "$SKIP_REGEX") ]]; then
|
||||
echo "run_tests=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
@@ -71,7 +75,7 @@ jobs:
|
||||
echo "run_license=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
NIX_REGEX='^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)'
|
||||
if [[ $(git diff --name-only $COMPARE_REV ${{ github.sha }} | grep "$NIX_REGEX") ]]; then
|
||||
if [[ $(git diff --name-only $COMPARE_REV ${{ github.sha }} | grep -P "$NIX_REGEX") ]]; then
|
||||
echo "run_nix=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "run_nix=false" >> $GITHUB_OUTPUT
|
||||
@@ -492,9 +496,6 @@ jobs:
|
||||
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@@ -577,10 +578,6 @@ jobs:
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
needs: [linux_tests]
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
@@ -634,10 +631,6 @@ jobs:
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
needs: [linux_tests]
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
@@ -686,20 +679,18 @@ jobs:
|
||||
timeout-minutes: 60
|
||||
runs-on: github-8vcpu-ubuntu-2404
|
||||
if: |
|
||||
false && (
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
)
|
||||
needs: [linux_tests]
|
||||
name: Build Zed on FreeBSD
|
||||
# env:
|
||||
# MYTOKEN : ${{ secrets.MYTOKEN }}
|
||||
# MYTOKEN2: "value2"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build FreeBSD remote-server
|
||||
id: freebsd-build
|
||||
uses: vmactions/freebsd-vm@c3ae29a132c8ef1924775414107a97cac042aad5 # v1.2.0
|
||||
with:
|
||||
# envs: "MYTOKEN MYTOKEN2"
|
||||
usesh: true
|
||||
release: 13.5
|
||||
copyback: true
|
||||
@@ -766,8 +757,6 @@ jobs:
|
||||
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
|
||||
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
|
||||
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
FILE_DIGEST: SHA256
|
||||
TIMESTAMP_DIGEST: SHA256
|
||||
TIMESTAMP_SERVER: "http://timestamp.acs.microsoft.com"
|
||||
@@ -784,9 +773,6 @@ jobs:
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel.ps1
|
||||
|
||||
- name: Install trusted signing
|
||||
uses: ./.github/actions/install_trusted_signing
|
||||
|
||||
- name: Build Zed installer
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
run: script/bundle-windows.ps1
|
||||
@@ -814,7 +800,7 @@ jobs:
|
||||
if: |
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
&& endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre')
|
||||
needs: [bundle-mac, bundle-linux-x86_x64, bundle-linux-aarch64, bundle-windows-x64, freebsd]
|
||||
needs: [bundle-mac, bundle-linux-x86_x64, bundle-linux-aarch64, bundle-windows-x64]
|
||||
runs-on:
|
||||
- self-hosted
|
||||
- bundle
|
||||
|
||||
24
.github/workflows/release_nightly.yml
vendored
@@ -12,6 +12,9 @@ env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
|
||||
jobs:
|
||||
style:
|
||||
@@ -91,9 +94,6 @@ jobs:
|
||||
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@@ -125,10 +125,6 @@ jobs:
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2004
|
||||
needs: tests
|
||||
env:
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
@@ -164,10 +160,6 @@ jobs:
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204-arm
|
||||
needs: tests
|
||||
env:
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
@@ -195,12 +187,9 @@ jobs:
|
||||
|
||||
freebsd:
|
||||
timeout-minutes: 60
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
if: false && github.repository_owner == 'zed-industries'
|
||||
runs-on: github-8vcpu-ubuntu-2404
|
||||
needs: tests
|
||||
env:
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
name: Build Zed on FreeBSD
|
||||
# env:
|
||||
# MYTOKEN : ${{ secrets.MYTOKEN }}
|
||||
@@ -257,8 +246,6 @@ jobs:
|
||||
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
|
||||
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
|
||||
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
FILE_DIGEST: SHA256
|
||||
TIMESTAMP_DIGEST: SHA256
|
||||
TIMESTAMP_SERVER: "http://timestamp.acs.microsoft.com"
|
||||
@@ -276,9 +263,6 @@ jobs:
|
||||
Write-Host "Publishing version: $version on release channel nightly"
|
||||
"nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL"
|
||||
|
||||
- name: Install trusted signing
|
||||
uses: ./.github/actions/install_trusted_signing
|
||||
|
||||
- name: Build Zed installer
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
run: script/bundle-windows.ps1
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
},
|
||||
"file_types": {
|
||||
"Dockerfile": ["Dockerfile*[!dockerignore]"],
|
||||
"JSONC": ["assets/**/*.json", "renovate.json"],
|
||||
"JSONC": ["**/assets/**/*.json", "renovate.json"],
|
||||
"Git Ignore": ["dockerignore"]
|
||||
},
|
||||
"hard_tabs": false,
|
||||
|
||||
132
Cargo.lock
generated
@@ -3,12 +3,12 @@
|
||||
version = 4
|
||||
|
||||
[[package]]
|
||||
name = "acp"
|
||||
name = "acp_thread"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"agent_servers",
|
||||
"agentic-coding-protocol",
|
||||
"anyhow",
|
||||
"assistant_tool",
|
||||
"async-pipe",
|
||||
"buffer_diff",
|
||||
"editor",
|
||||
@@ -20,6 +20,7 @@ dependencies = [
|
||||
"language",
|
||||
"markdown",
|
||||
"project",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"smol",
|
||||
@@ -138,16 +139,29 @@ dependencies = [
|
||||
name = "agent_servers"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"acp_thread",
|
||||
"agentic-coding-protocol",
|
||||
"anyhow",
|
||||
"collections",
|
||||
"context_server",
|
||||
"env_logger 0.11.8",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"indoc",
|
||||
"itertools 0.14.0",
|
||||
"language",
|
||||
"log",
|
||||
"paths",
|
||||
"project",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"smol",
|
||||
"tempfile",
|
||||
"ui",
|
||||
"util",
|
||||
"watch",
|
||||
"which 6.0.3",
|
||||
"workspace-hack",
|
||||
]
|
||||
@@ -175,7 +189,7 @@ dependencies = [
|
||||
name = "agent_ui"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"acp",
|
||||
"acp_thread",
|
||||
"agent",
|
||||
"agent_servers",
|
||||
"agent_settings",
|
||||
@@ -263,16 +277,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "agentic-coding-protocol"
|
||||
version = "0.0.6"
|
||||
version = "0.0.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d1ac0351749af7bf53c65042ef69fefb9351aa8b7efa0a813d6281377605c37d"
|
||||
checksum = "0e276b798eddd02562a339340a96919d90bbfcf78de118fdddc932524646fac7"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
"derive_more 2.0.1",
|
||||
"futures 0.3.31",
|
||||
"log",
|
||||
"parking_lot",
|
||||
"schemars",
|
||||
"semver",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
@@ -675,7 +691,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"collections",
|
||||
"derive_more",
|
||||
"derive_more 0.99.19",
|
||||
"extension",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
@@ -738,10 +754,11 @@ dependencies = [
|
||||
"clock",
|
||||
"collections",
|
||||
"ctor",
|
||||
"derive_more",
|
||||
"derive_more 0.99.19",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"icons",
|
||||
"indoc",
|
||||
"language",
|
||||
"language_model",
|
||||
"log",
|
||||
@@ -774,7 +791,8 @@ dependencies = [
|
||||
"clock",
|
||||
"collections",
|
||||
"component",
|
||||
"derive_more",
|
||||
"derive_more 0.99.19",
|
||||
"diffy",
|
||||
"editor",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
@@ -1231,7 +1249,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"collections",
|
||||
"derive_more",
|
||||
"derive_more 0.99.19",
|
||||
"gpui",
|
||||
"parking_lot",
|
||||
"rodio",
|
||||
@@ -2924,7 +2942,7 @@ dependencies = [
|
||||
"cocoa 0.26.0",
|
||||
"collections",
|
||||
"credentials_provider",
|
||||
"derive_more",
|
||||
"derive_more 0.99.19",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
@@ -3108,10 +3126,11 @@ dependencies = [
|
||||
"context_server",
|
||||
"ctor",
|
||||
"dap",
|
||||
"dap-types",
|
||||
"dap_adapters",
|
||||
"dashmap 6.1.0",
|
||||
"debugger_ui",
|
||||
"derive_more",
|
||||
"derive_more 0.99.19",
|
||||
"editor",
|
||||
"envy",
|
||||
"extension",
|
||||
@@ -3164,6 +3183,7 @@ dependencies = [
|
||||
"session",
|
||||
"settings",
|
||||
"sha2",
|
||||
"smol",
|
||||
"sqlx",
|
||||
"strum 0.27.1",
|
||||
"subtle",
|
||||
@@ -3316,7 +3336,7 @@ name = "command_palette_hooks"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"collections",
|
||||
"derive_more",
|
||||
"derive_more 0.99.19",
|
||||
"gpui",
|
||||
"workspace-hack",
|
||||
]
|
||||
@@ -3404,12 +3424,14 @@ dependencies = [
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"log",
|
||||
"net",
|
||||
"parking_lot",
|
||||
"postage",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"smol",
|
||||
"tempfile",
|
||||
"url",
|
||||
"util",
|
||||
"workspace-hack",
|
||||
@@ -4391,17 +4413,21 @@ dependencies = [
|
||||
"futures 0.3.31",
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
"hex",
|
||||
"indoc",
|
||||
"itertools 0.14.0",
|
||||
"language",
|
||||
"log",
|
||||
"menu",
|
||||
"notifications",
|
||||
"parking_lot",
|
||||
"parse_int",
|
||||
"paths",
|
||||
"picker",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"rpc",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_json_lenient",
|
||||
@@ -4520,6 +4546,27 @@ dependencies = [
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derive_more"
|
||||
version = "2.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "093242cf7570c207c83073cf82f79706fe7b8317e98620a47d5be7c3d8497678"
|
||||
dependencies = [
|
||||
"derive_more-impl",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derive_more-impl"
|
||||
version = "2.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bda628edc44c4bb645fbe0f758797143e4e07926f7ebf4e9bdfbd3d2ce621df3"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.101",
|
||||
"unicode-xid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "derive_refineable"
|
||||
version = "0.1.0"
|
||||
@@ -6218,7 +6265,7 @@ dependencies = [
|
||||
"askpass",
|
||||
"async-trait",
|
||||
"collections",
|
||||
"derive_more",
|
||||
"derive_more 0.99.19",
|
||||
"futures 0.3.31",
|
||||
"git2",
|
||||
"gpui",
|
||||
@@ -7235,7 +7282,7 @@ dependencies = [
|
||||
"core-video",
|
||||
"cosmic-text",
|
||||
"ctor",
|
||||
"derive_more",
|
||||
"derive_more 0.99.19",
|
||||
"embed-resource",
|
||||
"env_logger 0.11.8",
|
||||
"etagere",
|
||||
@@ -7781,7 +7828,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes 1.10.1",
|
||||
"derive_more",
|
||||
"derive_more 0.99.19",
|
||||
"futures 0.3.31",
|
||||
"http 1.3.1",
|
||||
"log",
|
||||
@@ -8219,7 +8266,7 @@ dependencies = [
|
||||
"async-trait",
|
||||
"cargo_metadata",
|
||||
"collections",
|
||||
"derive_more",
|
||||
"derive_more 0.99.19",
|
||||
"extension",
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
@@ -8978,6 +9025,7 @@ dependencies = [
|
||||
"gpui",
|
||||
"language",
|
||||
"lsp",
|
||||
"project",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"util",
|
||||
@@ -9030,7 +9078,6 @@ dependencies = [
|
||||
"credentials_provider",
|
||||
"deepseek",
|
||||
"editor",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
"google_ai",
|
||||
@@ -9065,6 +9112,7 @@ dependencies = [
|
||||
"util",
|
||||
"vercel",
|
||||
"workspace-hack",
|
||||
"x_ai",
|
||||
"zed_llm_client",
|
||||
]
|
||||
|
||||
@@ -9657,12 +9705,11 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "lsp-types"
|
||||
version = "0.95.1"
|
||||
source = "git+https://github.com/zed-industries/lsp-types?rev=c9c189f1c5dd53c624a419ce35bc77ad6a908d18#c9c189f1c5dd53c624a419ce35bc77ad6a908d18"
|
||||
source = "git+https://github.com/zed-industries/lsp-types?rev=39f629bdd03d59abd786ed9fc27e8bca02c0c0ec#39f629bdd03d59abd786ed9fc27e8bca02c0c0ec"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_repr",
|
||||
"url",
|
||||
]
|
||||
|
||||
@@ -10257,6 +10304,17 @@ dependencies = [
|
||||
"uuid",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nc"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"futures 0.3.31",
|
||||
"net",
|
||||
"smol",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ndk"
|
||||
version = "0.8.0"
|
||||
@@ -11274,6 +11332,15 @@ dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parse_int"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1c464266693329dd5a8715098c7f86e6c5fd5d985018b8318f53d9c6c2b21a31"
|
||||
dependencies = [
|
||||
"num-traits",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "partial-json-fixer"
|
||||
version = "0.5.3"
|
||||
@@ -12317,6 +12384,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"askpass",
|
||||
"async-trait",
|
||||
"base64 0.22.1",
|
||||
"buffer_diff",
|
||||
"circular-buffer",
|
||||
"client",
|
||||
@@ -12362,6 +12430,7 @@ dependencies = [
|
||||
"sha2",
|
||||
"shellexpand 2.1.2",
|
||||
"shlex",
|
||||
"smallvec",
|
||||
"smol",
|
||||
"snippet",
|
||||
"snippet_provider",
|
||||
@@ -14096,7 +14165,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "scap"
|
||||
version = "0.0.8"
|
||||
source = "git+https://github.com/zed-industries/scap?rev=08f0a01417505cc0990b9931a37e5120db92e0d0#08f0a01417505cc0990b9931a37e5120db92e0d0"
|
||||
source = "git+https://github.com/zed-industries/scap?rev=270538dc780f5240723233ff901e1054641ed318#270538dc780f5240723233ff901e1054641ed318"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cocoa 0.25.0",
|
||||
@@ -14148,6 +14217,7 @@ dependencies = [
|
||||
"indexmap",
|
||||
"ref-cast",
|
||||
"schemars_derive",
|
||||
"semver",
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
@@ -14669,16 +14739,20 @@ dependencies = [
|
||||
"language",
|
||||
"log",
|
||||
"menu",
|
||||
"notifications",
|
||||
"paths",
|
||||
"project",
|
||||
"schemars",
|
||||
"search",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"telemetry",
|
||||
"theme",
|
||||
"tree-sitter-json",
|
||||
"tree-sitter-rust",
|
||||
"ui",
|
||||
"ui_input",
|
||||
"util",
|
||||
"workspace",
|
||||
"workspace-hack",
|
||||
@@ -16106,7 +16180,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"collections",
|
||||
"derive_more",
|
||||
"derive_more 0.99.19",
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
@@ -16405,6 +16479,7 @@ dependencies = [
|
||||
"schemars",
|
||||
"serde",
|
||||
"settings",
|
||||
"settings_ui",
|
||||
"smallvec",
|
||||
"story",
|
||||
"telemetry",
|
||||
@@ -18373,7 +18448,6 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"client",
|
||||
"feature_flags",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"http_client",
|
||||
@@ -19798,6 +19872,17 @@ version = "0.13.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ec107c4503ea0b4a98ef47356329af139c0a4f7750e621cf2973cd3385ebcb3d"
|
||||
|
||||
[[package]]
|
||||
name = "x_ai"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"schemars",
|
||||
"serde",
|
||||
"strum 0.27.1",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "xattr"
|
||||
version = "0.2.3"
|
||||
@@ -20039,7 +20124,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.196.0"
|
||||
version = "0.197.0"
|
||||
dependencies = [
|
||||
"activity_indicator",
|
||||
"agent",
|
||||
@@ -20113,6 +20198,7 @@ dependencies = [
|
||||
"menu",
|
||||
"migrator",
|
||||
"mimalloc",
|
||||
"nc",
|
||||
"nix 0.29.0",
|
||||
"node_runtime",
|
||||
"notifications",
|
||||
|
||||
16
Cargo.toml
@@ -2,7 +2,7 @@
|
||||
resolver = "2"
|
||||
members = [
|
||||
"crates/activity_indicator",
|
||||
"crates/acp",
|
||||
"crates/acp_thread",
|
||||
"crates/agent_ui",
|
||||
"crates/agent",
|
||||
"crates/agent_settings",
|
||||
@@ -102,6 +102,7 @@ members = [
|
||||
"crates/migrator",
|
||||
"crates/mistral",
|
||||
"crates/multi_buffer",
|
||||
"crates/nc",
|
||||
"crates/net",
|
||||
"crates/node_runtime",
|
||||
"crates/notifications",
|
||||
@@ -179,6 +180,7 @@ members = [
|
||||
"crates/welcome",
|
||||
"crates/workspace",
|
||||
"crates/worktree",
|
||||
"crates/x_ai",
|
||||
"crates/zed",
|
||||
"crates/zed_actions",
|
||||
"crates/zeta",
|
||||
@@ -218,7 +220,7 @@ edition = "2024"
|
||||
# Workspace member crates
|
||||
#
|
||||
|
||||
acp = { path = "crates/acp" }
|
||||
acp_thread = { path = "crates/acp_thread" }
|
||||
agent = { path = "crates/agent" }
|
||||
activity_indicator = { path = "crates/activity_indicator" }
|
||||
agent_ui = { path = "crates/agent_ui" }
|
||||
@@ -316,6 +318,7 @@ menu = { path = "crates/menu" }
|
||||
migrator = { path = "crates/migrator" }
|
||||
mistral = { path = "crates/mistral" }
|
||||
multi_buffer = { path = "crates/multi_buffer" }
|
||||
nc = { path = "crates/nc" }
|
||||
net = { path = "crates/net" }
|
||||
node_runtime = { path = "crates/node_runtime" }
|
||||
notifications = { path = "crates/notifications" }
|
||||
@@ -394,6 +397,7 @@ web_search_providers = { path = "crates/web_search_providers" }
|
||||
welcome = { path = "crates/welcome" }
|
||||
workspace = { path = "crates/workspace" }
|
||||
worktree = { path = "crates/worktree" }
|
||||
x_ai = { path = "crates/x_ai" }
|
||||
zed = { path = "crates/zed" }
|
||||
zed_actions = { path = "crates/zed_actions" }
|
||||
zeta = { path = "crates/zeta" }
|
||||
@@ -404,7 +408,7 @@ zlog_settings = { path = "crates/zlog_settings" }
|
||||
# External crates
|
||||
#
|
||||
|
||||
agentic-coding-protocol = "0.0.6"
|
||||
agentic-coding-protocol = "0.0.9"
|
||||
aho-corasick = "1.1"
|
||||
alacritty_terminal = { git = "https://github.com/zed-industries/alacritty.git", branch = "add-hush-login-flag" }
|
||||
any_vec = "0.14"
|
||||
@@ -492,7 +496,7 @@ libc = "0.2"
|
||||
libsqlite3-sys = { version = "0.30.1", features = ["bundled"] }
|
||||
linkify = "0.10.0"
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
|
||||
lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "c9c189f1c5dd53c624a419ce35bc77ad6a908d18" }
|
||||
lsp-types = { git = "https://github.com/zed-industries/lsp-types", rev = "39f629bdd03d59abd786ed9fc27e8bca02c0c0ec" }
|
||||
markup5ever_rcdom = "0.3.0"
|
||||
metal = "0.29"
|
||||
moka = { version = "0.12.10", features = ["sync"] }
|
||||
@@ -507,6 +511,7 @@ ordered-float = "2.1.1"
|
||||
palette = { version = "0.7.5", default-features = false, features = ["std"] }
|
||||
parking_lot = "0.12.1"
|
||||
partial-json-fixer = "0.5.3"
|
||||
parse_int = "0.9"
|
||||
pathdiff = "0.2"
|
||||
pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" }
|
||||
pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "845945b830297a50de0e24020b980a65e4820559" }
|
||||
@@ -546,7 +551,8 @@ rustc-demangle = "0.1.23"
|
||||
rustc-hash = "2.1.0"
|
||||
rustls = { version = "0.23.26" }
|
||||
rustls-platform-verifier = "0.5.0"
|
||||
scap = { git = "https://github.com/zed-industries/scap", rev = "08f0a01417505cc0990b9931a37e5120db92e0d0", default-features = false }
|
||||
# When updating scap rev, also update it in .config/hakari.toml
|
||||
scap = { git = "https://github.com/zed-industries/scap", rev = "270538dc780f5240723233ff901e1054641ed318", default-features = false }
|
||||
schemars = { version = "1.0", features = ["indexmap2"] }
|
||||
semver = "1.0"
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
|
||||
3
assets/icons/ai_claude.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="40" height="40" viewBox="0 0 40 40" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M7.8481 26.5925L15.7165 22.1806L15.8481 21.7961L15.7165 21.5836H15.3316L14.0152 21.5027L9.51899 21.3812L5.62025 21.2193L1.84304 21.0169L0.891139 20.8146L0 19.6408L0.0911392 19.0539L0.891139 18.5176L2.03544 18.6188L4.56709 18.7908L8.36456 19.0539L11.119 19.2158L15.2 19.6408H15.8481L15.9392 19.3777L15.7165 19.2158L15.5443 19.0539L11.6152 16.3926L7.36203 13.5796L5.13418 11.9605L3.92911 11.1409L3.32152 10.3719L3.05823 8.69213L4.1519 7.48798L5.62025 7.58917L5.99494 7.69036L7.48354 8.8338L10.6633 11.2927L14.8152 14.3486L15.4228 14.8545L15.6658 14.6825L15.6962 14.5611L15.4228 14.1057L13.1646 10.0278L10.7544 5.87908L9.68101 4.15887L9.39747 3.12674C9.2962 2.70175 9.22532 2.34758 9.22532 1.91247L10.4709 0.222616L11.1595 0L12.8203 0.222616L13.519 0.82975L14.5519 3.18745L16.2228 6.90109L18.8152 11.9504L19.5747 13.448L19.9797 14.8343L20.1316 15.2593H20.3949V15.0164L20.6076 12.173L21.0025 8.68201L21.3873 4.18922L21.519 2.92436L22.1468 1.40653L23.3924 0.586896L24.3646 1.05237L25.1646 2.1958L25.0532 2.93448L24.5772 6.02074L23.6456 10.8576L23.038 14.0956H23.3924L23.7975 13.6909L25.438 11.5153L28.1924 8.07488L29.4076 6.70883L30.8253 5.20111L31.7367 4.48267H33.4582L34.7241 6.36479L34.157 8.30761L32.3848 10.554L30.9165 12.4564L28.8101 15.2897L27.4937 17.5563L27.6152 17.7384L27.9291 17.7081L32.6886 16.6962L35.2608 16.2307L38.3291 15.7045L39.7165 16.3521L39.8684 17.0099L39.3215 18.3557L36.0405 19.1652L32.1924 19.9342L26.4608 21.2902L26.3899 21.3408L26.4709 21.4419L29.0532 21.6848L30.157 21.7455H32.8608L37.8937 22.1199L39.2101 22.9901L40 24.0526L39.8684 24.8621L37.843 25.8943L35.1089 25.2466L28.7291 23.7288L26.5418 23.1824H26.238V23.3645L28.0608 25.1455L31.4025 28.1609L35.5848 32.0465L35.7975 33.0078L35.2608 33.7668L34.6937 33.6858L31.0177 30.9233L29.6 29.6787L26.3899 26.977H26.1772V27.2603L26.9165 28.343L30.8253 34.212L31.0278 36.0132L30.7443 36.6L29.7316 36.9542L28.6177 36.7518L26.3291 33.5441L23.9696 29.9317L22.0658 26.6937L21.8329 26.8252L20.7089 38.9173L20.1823 39.5345L18.9671 40L17.9544 39.231L17.4177 37.9863L17.9544 35.5274L18.6025 32.3198L19.1291 29.7698L19.6051 26.6026L19.8886 25.5502L19.8684 25.4794L19.6354 25.5097L17.2456 28.7883L13.6101 33.6959L10.7342 36.7721L10.0456 37.0453L8.85063 36.428L8.96203 35.3251L9.63038 34.3435L13.6101 29.2841L16.0101 26.1472L17.5595 24.3359L17.5494 24.0729H17.4582L6.88608 30.9335L5.00253 31.1763L4.1924 30.4174L4.29367 29.1728L4.67848 28.768L7.85823 26.5823L7.8481 26.5925Z" fill="black"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 2.5 KiB |
3
assets/icons/ai_x_ai.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="m12.414 5.47.27 9.641h2.157l.27-13.15zM15.11.889h-3.293L6.651 7.613l1.647 2.142zM.889 15.11H4.18l1.647-2.142-1.647-2.143zm0-9.641 7.409 9.641h3.292L4.181 5.47z" fill="#000"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 289 B |
@@ -1 +1,12 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-bug"><path d="m8 2 1.88 1.88"/><path d="M14.12 3.88 16 2"/><path d="M9 7.13v-1a3.003 3.003 0 1 1 6 0v1"/><path d="M12 20c-3.3 0-6-2.7-6-6v-3a4 4 0 0 1 4-4h4a4 4 0 0 1 4 4v3c0 3.3-2.7 6-6 6"/><path d="M12 20v-9"/><path d="M6.53 9C4.6 8.8 3 7.1 3 5"/><path d="M6 13H2"/><path d="M3 21c0-2.1 1.7-3.9 3.8-4"/><path d="M20.97 5c0 2.1-1.6 3.8-3.5 4"/><path d="M22 13h-4"/><path d="M17.2 17c2.1.1 3.8 1.9 3.8 4"/></svg>
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M5.49219 2.29071L6.41455 3.1933" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M9.61816 3.1933L10.508 2.29071" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M5.7042 5.89221V5.15749C5.69033 4.85975 5.73943 4.56239 5.84856 4.28336C5.95768 4.00434 6.12456 3.74943 6.33913 3.53402C6.55369 3.31862 6.81149 3.14718 7.09697 3.03005C7.38245 2.91292 7.68969 2.85254 8.00014 2.85254C8.3106 2.85254 8.61784 2.91292 8.90332 3.03005C9.18879 3.14718 9.44659 3.31862 9.66116 3.53402C9.87572 3.74943 10.0426 4.00434 10.1517 4.28336C10.2609 4.56239 10.31 4.85975 10.2961 5.15749V5.89221" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M8.00006 13.0426C6.13263 13.0426 4.60474 11.6005 4.60474 9.83792V8.23558C4.60474 7.66895 4.84322 7.12554 5.26772 6.72487C5.69221 6.32421 6.26796 6.09912 6.86829 6.09912H9.13184C9.73217 6.09912 10.3079 6.32421 10.7324 6.72487C11.1569 7.12554 11.3954 7.66895 11.3954 8.23558V9.83792C11.3954 11.6005 9.86749 13.0426 8.00006 13.0426Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M4.60452 6.25196C3.51235 6.13878 2.60693 5.17677 2.60693 3.9884" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M4.60462 8.81659H2.34106" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M2.4541 13.3186C2.4541 12.1302 3.41611 11.1116 4.60448 11.0551" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M13.0761 3.9884C13.0761 5.17677 12.1706 6.13878 11.0955 6.25196" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M13.6591 8.81659H11.3955" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M11.3955 11.0551C12.5839 11.1116 13.5459 12.1302 13.5459 13.3186" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 615 B After Width: | Height: | Size: 2.1 KiB |
1
assets/icons/equal.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-equal-icon lucide-equal"><line x1="5" x2="19" y1="9" y2="9"/><line x1="5" x2="19" y1="15" y2="15"/></svg>
|
||||
|
After Width: | Height: | Size: 308 B |
@@ -1,5 +1,5 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M3.03125 3V3.03125M3.03125 3.03125V9M3.03125 3.03125C3.03125 5 6 5 6 5M3.03125 9C3.03125 11 6 11 6 11M3.03125 9V12" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<rect x="8" y="2.5" width="6" height="5" rx="1.5" fill="black"/>
|
||||
<rect x="8" y="8.46875" width="6" height="5.0625" rx="1.5" fill="black"/>
|
||||
<path d="M3 3V3.03125M3 3.03125V9M3 3.03125C3 5 5.96875 5 5.96875 5M3 9C3 11 5.96875 11 5.96875 11M3 9V12" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<rect x="8" y="3" width="5.5" height="4" rx="1.5" fill="black"/>
|
||||
<rect x="8" y="9" width="5.5" height="4" rx="1.5" fill="black"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 462 B After Width: | Height: | Size: 423 B |
@@ -1,6 +1,7 @@
|
||||
<svg width="12" height="12" viewBox="0 0 12 12" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M3.75 3.25C4.02614 3.25 4.25 3.02614 4.25 2.75C4.25 2.47386 4.02614 2.25 3.75 2.25C3.47386 2.25 3.25 2.47386 3.25 2.75C3.25 3.02614 3.47386 3.25 3.75 3.25ZM3.75 4.25C4.57843 4.25 5.25 3.57843 5.25 2.75C5.25 1.92157 4.57843 1.25 3.75 1.25C2.92157 1.25 2.25 1.92157 2.25 2.75C2.25 3.57843 2.92157 4.25 3.75 4.25Z" fill="black"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M8.25 3.25C8.52614 3.25 8.75 3.02614 8.75 2.75C8.75 2.47386 8.52614 2.25 8.25 2.25C7.97386 2.25 7.75 2.47386 7.75 2.75C7.75 3.02614 7.97386 3.25 8.25 3.25ZM8.25 4.25C9.07843 4.25 9.75 3.57843 9.75 2.75C9.75 1.92157 9.07843 1.25 8.25 1.25C7.42157 1.25 6.75 1.92157 6.75 2.75C6.75 3.57843 7.42157 4.25 8.25 4.25Z" fill="black"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M3.75 9.75C4.02614 9.75 4.25 9.52614 4.25 9.25C4.25 8.97386 4.02614 8.75 3.75 8.75C3.47386 8.75 3.25 8.97386 3.25 9.25C3.25 9.52614 3.47386 9.75 3.75 9.75ZM3.75 10.75C4.57843 10.75 5.25 10.0784 5.25 9.25C5.25 8.42157 4.57843 7.75 3.75 7.75C2.92157 7.75 2.25 8.42157 2.25 9.25C2.25 10.0784 2.92157 10.75 3.75 10.75Z" fill="black"/>
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M3.25 3.75H4.25V5.59609C4.67823 5.35824 5.24991 5.25 6 5.25H7.25017C7.5262 5.25 7.75 5.02625 7.75 4.75V3.75H8.75V4.75C8.75 5.57832 8.07871 6.25 7.25017 6.25H6C5.14559 6.25 4.77639 6.41132 4.59684 6.56615C4.42571 6.71373 4.33877 6.92604 4.25 7.30651V8.25H3.25V3.75Z" fill="black"/>
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<circle cx="5" cy="12" r="1.25" stroke="black" stroke-width="1.5"/>
|
||||
<path d="M5 11V5" stroke="black" stroke-width="1.5"/>
|
||||
<path d="M5 10C5 10 5.5 8 7 8C7.73103 8 8.69957 8 9.50049 8C10.3289 8 11 7.32843 11 6.5V5" stroke="black" stroke-width="1.5"/>
|
||||
<circle cx="5" cy="4" r="1.25" stroke="black" stroke-width="1.5"/>
|
||||
<circle cx="11" cy="4" r="1.25" stroke="black" stroke-width="1.5"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 1.5 KiB After Width: | Height: | Size: 487 B |
@@ -1 +1,7 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-list-tree"><path d="M21 12h-8"/><path d="M21 6H8"/><path d="M21 18h-8"/><path d="M3 6v4c0 1.1.9 2 2 2h3"/><path d="M3 10v6c0 1.1.9 2 2 2h3"/></svg>
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M13.5 8H9.5" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M13.5 4L6.5 4" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M13.5 12H9.5" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M3 3.5V6.33333C3 7.25 3.72 8 4.6 8H7" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M3 6V10.5C3 11.325 3.72 12 4.6 12H7" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 349 B After Width: | Height: | Size: 680 B |
1
assets/icons/location_edit.svg
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-location-edit-icon lucide-location-edit"><path d="M17.97 9.304A8 8 0 0 0 2 10c0 4.69 4.887 9.562 7.022 11.468"/><path d="M21.378 16.626a1 1 0 0 0-3.004-3.004l-4.01 4.012a2 2 0 0 0-.506.854l-.837 2.87a.5.5 0 0 0 .62.62l2.87-.837a2 2 0 0 0 .854-.506z"/><circle cx="10" cy="10" r="3"/></svg>
|
||||
|
After Width: | Height: | Size: 491 B |
3
assets/icons/play_filled.svg
Normal file
@@ -0,0 +1,3 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M5 4L10 7L5 10V4Z" fill="black" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 227 B |
5
assets/icons/terminal_alt.svg
Normal file
@@ -0,0 +1,5 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M11.8889 3H4.11111C3.49746 3 3 3.49746 3 4.11111V11.8889C3 12.5025 3.49746 13 4.11111 13H11.8889C12.5025 13 13 12.5025 13 11.8889V4.11111C13 3.49746 12.5025 3 11.8889 3Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M8.37939 10.3243H10.3794" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M5.64966 9.32837L7.64966 7.32837L5.64966 5.32837" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
After Width: | Height: | Size: 659 B |
@@ -1,3 +1,5 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M5.9 8.00002C7.44656 8.00002 8.7 6.74637 8.7 5.20002C8.7 3.65368 7.44656 2.40002 5.9 2.40002C4.35344 2.40002 3.1 3.65368 3.1 5.20002C3.1 6.74637 4.35344 8.00002 5.9 8.00002ZM7.00906 9.05002H4.79094C2.69684 9.05002 1 10.7475 1 12.841C1 13.261 1.3395 13.6 1.75819 13.6H10.0409C10.4609 13.6 10.8 13.261 10.8 12.841C10.8 10.7475 9.1025 9.05002 7.00906 9.05002ZM11.4803 9.40002H9.86484C10.87 10.2247 11.5 11.4585 11.5 12.841C11.5 13.121 11.4169 13.3791 11.2812 13.6H14.3C14.6872 13.6 15 13.285 15 12.8803C15 10.9663 13.4338 9.40002 11.4803 9.40002ZM10.45 8.00002C11.8041 8.00002 12.9 6.90409 12.9 5.55002C12.9 4.19596 11.8041 3.10002 10.45 3.10002C9.90072 3.10002 9.39913 3.28716 8.9905 3.59243C9.2425 4.07631 9.4 4.61815 9.4 5.20002C9.4 5.97702 9.13903 6.69059 8.70897 7.27181C9.15281 7.72002 9.7675 8.00002 10.45 8.00002Z" fill="white"/>
|
||||
<path d="M6.79118 8.27005C8.27568 8.27005 9.4791 7.06663 9.4791 5.58214C9.4791 4.09765 8.27568 2.89423 6.79118 2.89423C5.30669 2.89423 4.10327 4.09765 4.10327 5.58214C4.10327 7.06663 5.30669 8.27005 6.79118 8.27005Z" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M6.79112 8.60443C4.19441 8.60443 2.08936 10.7095 2.08936 13.3062H11.4929C11.4929 10.7095 9.38784 8.60443 6.79112 8.60443Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M14.6984 12.9263C14.6984 10.8893 13.4895 8.99736 12.2806 8.09067C12.6779 7.79254 12.9957 7.40104 13.2057 6.95083C13.4157 6.50062 13.5115 6.00558 13.4846 5.50952C13.4577 5.01346 13.309 4.53168 13.0515 4.10681C12.7941 3.68194 12.4358 3.3271 12.0085 3.07367" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 947 B After Width: | Height: | Size: 999 B |
@@ -1,5 +1,5 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M8 2L6.72534 5.87534C6.6601 6.07367 6.5492 6.25392 6.40155 6.40155C6.25392 6.5492 6.07367 6.6601 5.87534 6.72534L2 8L5.87534 9.27466C6.07367 9.3399 6.25392 9.4508 6.40155 9.59845C6.5492 9.74608 6.6601 9.92633 6.72534 10.1247L8 14L9.27466 10.1247C9.3399 9.92633 9.4508 9.74608 9.59845 9.59845C9.74608 9.4508 9.92633 9.3399 10.1247 9.27466L14 8L10.1247 6.72534C9.92633 6.6601 9.74608 6.5492 9.59845 6.40155C9.4508 6.25392 9.3399 6.07367 9.27466 5.87534L8 2Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M8 2.93652L6.9243 6.20697C6.86924 6.37435 6.77565 6.52646 6.65105 6.65105C6.52646 6.77565 6.37435 6.86924 6.20697 6.9243L2.93652 8L6.20697 9.0757C6.37435 9.13076 6.52646 9.22435 6.65105 9.34895C6.77565 9.47354 6.86924 9.62565 6.9243 9.79306L8 13.0635L9.0757 9.79306C9.13076 9.62565 9.22435 9.47354 9.34895 9.34895C9.47354 9.22435 9.62565 9.13076 9.79306 9.0757L13.0635 8L9.79306 6.9243C9.62565 6.86924 9.47354 6.77565 9.34895 6.65105C9.22435 6.52646 9.13076 6.37435 9.0757 6.20697L8 2.93652Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M3.33334 2V4.66666M2 3.33334H4.66666" stroke="black" stroke-opacity="0.75" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M12.6665 11.3333V14M11.3333 12.6666H13.9999" stroke="black" stroke-opacity="0.75" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 998 B After Width: | Height: | Size: 1.0 KiB |
@@ -269,10 +269,10 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "AgentPanel && acp_thread",
|
||||
"context": "AgentPanel && external_agent_thread",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-n": "agent::NewAcpThread",
|
||||
"ctrl-n": "agent::NewExternalAgentThread",
|
||||
"ctrl-alt-t": "agent::NewThread"
|
||||
}
|
||||
},
|
||||
@@ -320,7 +320,8 @@
|
||||
"bindings": {
|
||||
"enter": "agent::Chat",
|
||||
"up": "agent::PreviousHistoryMessage",
|
||||
"down": "agent::NextHistoryMessage"
|
||||
"down": "agent::NextHistoryMessage",
|
||||
"shift-ctrl-r": "agent::OpenAgentDiff"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -474,8 +475,8 @@
|
||||
"ctrl-/": ["editor::ToggleComments", { "advance_downwards": false }],
|
||||
"ctrl-u": "editor::UndoSelection",
|
||||
"ctrl-shift-u": "editor::RedoSelection",
|
||||
"f8": "editor::GoToDiagnostic",
|
||||
"shift-f8": "editor::GoToPreviousDiagnostic",
|
||||
"f8": ["editor::GoToDiagnostic", { "severity": { "min": "hint", "max": "error" } }],
|
||||
"shift-f8": ["editor::GoToPreviousDiagnostic", { "severity": { "min": "hint", "max": "error" } }],
|
||||
"f2": "editor::Rename",
|
||||
"f12": "editor::GoToDefinition",
|
||||
"alt-f12": "editor::GoToDefinitionSplit",
|
||||
@@ -585,7 +586,7 @@
|
||||
"ctrl-shift-f": "pane::DeploySearch",
|
||||
"ctrl-shift-h": ["pane::DeploySearch", { "replace_enabled": true }],
|
||||
"ctrl-shift-t": "pane::ReopenClosedItem",
|
||||
"ctrl-k ctrl-s": "zed::OpenKeymap",
|
||||
"ctrl-k ctrl-s": "zed::OpenKeymapEditor",
|
||||
"ctrl-k ctrl-t": "theme_selector::Toggle",
|
||||
"ctrl-t": "project_symbols::Toggle",
|
||||
"ctrl-p": "file_finder::Toggle",
|
||||
@@ -855,6 +856,7 @@
|
||||
"alt-shift-y": "git::UnstageFile",
|
||||
"ctrl-alt-y": "git::ToggleStaged",
|
||||
"space": "git::ToggleStaged",
|
||||
"shift-space": "git::StageRange",
|
||||
"tab": "git_panel::FocusEditor",
|
||||
"shift-tab": "git_panel::FocusEditor",
|
||||
"escape": "git_panel::ToggleFocus",
|
||||
@@ -997,6 +999,7 @@
|
||||
{
|
||||
"context": "FileFinder || (FileFinder > Picker > Editor)",
|
||||
"bindings": {
|
||||
"ctrl-p": "file_finder::Toggle",
|
||||
"ctrl-shift-a": "file_finder::ToggleSplitMenu",
|
||||
"ctrl-shift-i": "file_finder::ToggleFilterMenu"
|
||||
}
|
||||
@@ -1112,7 +1115,37 @@
|
||||
"context": "KeymapEditor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-f": "search::FocusSearch"
|
||||
"ctrl-f": "search::FocusSearch",
|
||||
"alt-find": "keymap_editor::ToggleKeystrokeSearch",
|
||||
"alt-ctrl-f": "keymap_editor::ToggleKeystrokeSearch",
|
||||
"alt-c": "keymap_editor::ToggleConflictFilter",
|
||||
"enter": "keymap_editor::EditBinding",
|
||||
"alt-enter": "keymap_editor::CreateBinding"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "KeystrokeInput",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"enter": "keystroke_input::StartRecording",
|
||||
"escape escape escape": "keystroke_input::StopRecording",
|
||||
"delete": "keystroke_input::ClearKeystrokes"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "KeybindEditorModal",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-enter": "menu::Confirm",
|
||||
"escape": "menu::Cancel"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "KeybindEditorModal > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"up": "menu::SelectPrevious",
|
||||
"down": "menu::SelectNext"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -310,10 +310,10 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "AgentPanel && acp_thread",
|
||||
"context": "AgentPanel && external_agent_thread",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-n": "agent::NewAcpThread",
|
||||
"cmd-n": "agent::NewExternalAgentThread",
|
||||
"cmd-alt-t": "agent::NewThread"
|
||||
}
|
||||
},
|
||||
@@ -371,7 +371,8 @@
|
||||
"bindings": {
|
||||
"enter": "agent::Chat",
|
||||
"up": "agent::PreviousHistoryMessage",
|
||||
"down": "agent::NextHistoryMessage"
|
||||
"down": "agent::NextHistoryMessage",
|
||||
"shift-ctrl-r": "agent::OpenAgentDiff"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -527,8 +528,8 @@
|
||||
"cmd-/": ["editor::ToggleComments", { "advance_downwards": false }],
|
||||
"cmd-u": "editor::UndoSelection",
|
||||
"cmd-shift-u": "editor::RedoSelection",
|
||||
"f8": "editor::GoToDiagnostic",
|
||||
"shift-f8": "editor::GoToPreviousDiagnostic",
|
||||
"f8": ["editor::GoToDiagnostic", { "severity": { "min": "hint", "max": "error" } }],
|
||||
"shift-f8": ["editor::GoToPreviousDiagnostic", { "severity": { "min": "hint", "max": "error" } }],
|
||||
"f2": "editor::Rename",
|
||||
"f12": "editor::GoToDefinition",
|
||||
"alt-f12": "editor::GoToDefinitionSplit",
|
||||
@@ -651,7 +652,7 @@
|
||||
"cmd-shift-f": "pane::DeploySearch",
|
||||
"cmd-shift-h": ["pane::DeploySearch", { "replace_enabled": true }],
|
||||
"cmd-shift-t": "pane::ReopenClosedItem",
|
||||
"cmd-k cmd-s": "zed::OpenKeymap",
|
||||
"cmd-k cmd-s": "zed::OpenKeymapEditor",
|
||||
"cmd-k cmd-t": "theme_selector::Toggle",
|
||||
"cmd-t": "project_symbols::Toggle",
|
||||
"cmd-p": "file_finder::Toggle",
|
||||
@@ -929,6 +930,7 @@
|
||||
"enter": "menu::Confirm",
|
||||
"cmd-alt-y": "git::ToggleStaged",
|
||||
"space": "git::ToggleStaged",
|
||||
"shift-space": "git::StageRange",
|
||||
"cmd-y": "git::StageFile",
|
||||
"cmd-shift-y": "git::UnstageFile",
|
||||
"alt-down": "git_panel::FocusEditor",
|
||||
@@ -1096,13 +1098,16 @@
|
||||
"ctrl-cmd-space": "terminal::ShowCharacterPalette",
|
||||
"cmd-c": "terminal::Copy",
|
||||
"cmd-v": "terminal::Paste",
|
||||
"cmd-f": "buffer_search::Deploy",
|
||||
"cmd-a": "editor::SelectAll",
|
||||
"cmd-k": "terminal::Clear",
|
||||
"cmd-n": "workspace::NewTerminal",
|
||||
"ctrl-enter": "assistant::InlineAssist",
|
||||
"ctrl-_": null, // emacs undo
|
||||
// Some nice conveniences
|
||||
"cmd-backspace": ["terminal::SendText", "\u0015"],
|
||||
"cmd-backspace": ["terminal::SendText", "\u0015"], // ctrl-u: clear line
|
||||
"alt-delete": ["terminal::SendText", "\u001bd"], // alt-d: delete word forward
|
||||
"cmd-delete": ["terminal::SendText", "\u000b"], // ctrl-k: delete to end of line
|
||||
"cmd-right": ["terminal::SendText", "\u0005"],
|
||||
"cmd-left": ["terminal::SendText", "\u0001"],
|
||||
// Terminal.app compatibility
|
||||
@@ -1211,7 +1216,35 @@
|
||||
"context": "KeymapEditor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-f": "search::FocusSearch"
|
||||
"cmd-alt-f": "keymap_editor::ToggleKeystrokeSearch",
|
||||
"cmd-alt-c": "keymap_editor::ToggleConflictFilter",
|
||||
"enter": "keymap_editor::EditBinding",
|
||||
"alt-enter": "keymap_editor::CreateBinding"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "KeystrokeInput",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"enter": "keystroke_input::StartRecording",
|
||||
"escape escape escape": "keystroke_input::StopRecording",
|
||||
"delete": "keystroke_input::ClearKeystrokes"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "KeybindEditorModal",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-enter": "menu::Confirm",
|
||||
"escape": "menu::Cancel"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "KeybindEditorModal > Editor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"up": "menu::SelectPrevious",
|
||||
"down": "menu::SelectNext"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -66,22 +66,46 @@
|
||||
"context": "Editor && mode == full",
|
||||
"bindings": {
|
||||
"ctrl-f12": "outline::Toggle",
|
||||
"alt-7": "outline::Toggle",
|
||||
"ctrl-r": ["buffer_search::Deploy", { "replace_enabled": true }],
|
||||
"ctrl-shift-n": "file_finder::Toggle",
|
||||
"ctrl-g": "go_to_line::Toggle",
|
||||
"alt-enter": "editor::ToggleCodeActions"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar || ProjectSearchBar",
|
||||
"bindings": {
|
||||
"shift-enter": "search::SelectPreviousMatch",
|
||||
"ctrl-alt-c": "search::ToggleCaseSensitive",
|
||||
"ctrl-alt-e": "search::ToggleSelection",
|
||||
"ctrl-alt-w": "search::ToggleWholeWord",
|
||||
"ctrl-alt-x": "search::ToggleRegex"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Workspace",
|
||||
"bindings": {
|
||||
"ctrl-shift-f12": "workspace::CloseAllDocks",
|
||||
"ctrl-shift-r": ["pane::DeploySearch", { "replace_enabled": true }],
|
||||
"alt-shift-f10": "task::Spawn",
|
||||
"ctrl-e": "file_finder::Toggle",
|
||||
"ctrl-k": "git_panel::ToggleFocus", // bug: This should also focus commit editor
|
||||
"ctrl-shift-n": "file_finder::Toggle",
|
||||
"ctrl-shift-a": "command_palette::Toggle",
|
||||
"shift shift": "command_palette::Toggle",
|
||||
"ctrl-alt-shift-n": "project_symbols::Toggle",
|
||||
"alt-0": "git_panel::ToggleFocus",
|
||||
"alt-1": "workspace::ToggleLeftDock",
|
||||
"ctrl-e": "tab_switcher::Toggle",
|
||||
"alt-6": "diagnostics::Deploy"
|
||||
"alt-5": "debug_panel::ToggleFocus",
|
||||
"alt-6": "diagnostics::Deploy",
|
||||
"alt-7": "outline_panel::ToggleFocus"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Workspace || Editor",
|
||||
"bindings": {
|
||||
"alt-f12": "terminal_panel::ToggleFocus",
|
||||
"ctrl-shift-k": "git::Push"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -95,10 +119,33 @@
|
||||
"context": "ProjectPanel",
|
||||
"bindings": {
|
||||
"enter": "project_panel::Open",
|
||||
"ctrl-shift-f": "project_panel::NewSearchInDirectory",
|
||||
"backspace": ["project_panel::Trash", { "skip_prompt": false }],
|
||||
"delete": ["project_panel::Trash", { "skip_prompt": false }],
|
||||
"shift-delete": ["project_panel::Delete", { "skip_prompt": false }],
|
||||
"shift-f6": "project_panel::Rename"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Terminal",
|
||||
"bindings": {
|
||||
"ctrl-shift-t": "workspace::NewTerminal",
|
||||
"alt-f12": "workspace::CloseActiveDock",
|
||||
"alt-left": "pane::ActivatePreviousItem",
|
||||
"alt-right": "pane::ActivateNextItem",
|
||||
"ctrl-up": "terminal::ScrollLineUp",
|
||||
"ctrl-down": "terminal::ScrollLineDown",
|
||||
"shift-pageup": "terminal::ScrollPageUp",
|
||||
"shift-pagedown": "terminal::ScrollPageDown"
|
||||
}
|
||||
},
|
||||
{ "context": "GitPanel", "bindings": { "alt-0": "workspace::CloseActiveDock" } },
|
||||
{ "context": "ProjectPanel", "bindings": { "alt-1": "workspace::CloseActiveDock" } },
|
||||
{ "context": "DebugPanel", "bindings": { "alt-5": "workspace::CloseActiveDock" } },
|
||||
{ "context": "Diagnostics > Editor", "bindings": { "alt-6": "pane::CloseActiveItem" } },
|
||||
{ "context": "OutlinePanel", "bindings": { "alt-7": "workspace::CloseActiveDock" } },
|
||||
{
|
||||
"context": "Dock || Workspace || Terminal || OutlinePanel || ProjectPanel || CollabPanel || (Editor && mode == auto_height)",
|
||||
"bindings": { "escape": "editor::ToggleFocus" }
|
||||
}
|
||||
]
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
"bindings": {
|
||||
"cmd-{": "pane::ActivatePreviousItem",
|
||||
"cmd-}": "pane::ActivateNextItem",
|
||||
"cmd-0": "git_panel::ToggleFocus", // overrides `cmd-0` zoom reset
|
||||
"ctrl-f2": "debugger::Stop",
|
||||
"f6": "debugger::Pause",
|
||||
"f7": "debugger::StepInto",
|
||||
@@ -63,28 +64,50 @@
|
||||
"context": "Editor && mode == full",
|
||||
"bindings": {
|
||||
"cmd-f12": "outline::Toggle",
|
||||
"cmd-7": "outline::Toggle",
|
||||
"cmd-r": ["buffer_search::Deploy", { "replace_enabled": true }],
|
||||
"cmd-shift-o": "file_finder::Toggle",
|
||||
"cmd-l": "go_to_line::Toggle",
|
||||
"alt-enter": "editor::ToggleCodeActions"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar > Editor",
|
||||
"context": "BufferSearchBar || ProjectSearchBar",
|
||||
"bindings": {
|
||||
"shift-enter": "search::SelectPreviousMatch"
|
||||
"shift-enter": "search::SelectPreviousMatch",
|
||||
"alt-c": "search::ToggleCaseSensitive",
|
||||
"alt-e": "search::ToggleSelection",
|
||||
"alt-x": "search::ToggleRegex",
|
||||
"alt-w": "search::ToggleWholeWord",
|
||||
"ctrl-alt-c": "search::ToggleCaseSensitive",
|
||||
"ctrl-alt-e": "search::ToggleSelection",
|
||||
"ctrl-alt-w": "search::ToggleWholeWord",
|
||||
"ctrl-alt-x": "search::ToggleRegex"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Workspace",
|
||||
"bindings": {
|
||||
"cmd-shift-f12": "workspace::CloseAllDocks",
|
||||
"cmd-shift-r": ["pane::DeploySearch", { "replace_enabled": true }],
|
||||
"ctrl-alt-r": "task::Spawn",
|
||||
"cmd-e": "file_finder::Toggle",
|
||||
"cmd-k": "git_panel::ToggleFocus", // bug: This should also focus commit editor
|
||||
"cmd-shift-o": "file_finder::Toggle",
|
||||
"cmd-shift-a": "command_palette::Toggle",
|
||||
"shift shift": "command_palette::Toggle",
|
||||
"cmd-alt-o": "project_symbols::Toggle", // JetBrains: Go to Symbol
|
||||
"cmd-o": "project_symbols::Toggle", // JetBrains: Go to Class
|
||||
"cmd-1": "workspace::ToggleLeftDock",
|
||||
"cmd-6": "diagnostics::Deploy"
|
||||
"cmd-1": "project_panel::ToggleFocus",
|
||||
"cmd-5": "debug_panel::ToggleFocus",
|
||||
"cmd-6": "diagnostics::Deploy",
|
||||
"cmd-7": "outline_panel::ToggleFocus"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Workspace || Editor",
|
||||
"bindings": {
|
||||
"alt-f12": "terminal_panel::ToggleFocus",
|
||||
"cmd-shift-k": "git::Push"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -98,11 +121,31 @@
|
||||
"context": "ProjectPanel",
|
||||
"bindings": {
|
||||
"enter": "project_panel::Open",
|
||||
"cmd-shift-f": "project_panel::NewSearchInDirectory",
|
||||
"cmd-backspace": ["project_panel::Trash", { "skip_prompt": false }],
|
||||
"backspace": ["project_panel::Trash", { "skip_prompt": false }],
|
||||
"delete": ["project_panel::Trash", { "skip_prompt": false }],
|
||||
"shift-delete": ["project_panel::Delete", { "skip_prompt": false }],
|
||||
"shift-f6": "project_panel::Rename"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Terminal",
|
||||
"bindings": {
|
||||
"cmd-t": "workspace::NewTerminal",
|
||||
"alt-f12": "workspace::CloseActiveDock",
|
||||
"cmd-up": "terminal::ScrollLineUp",
|
||||
"cmd-down": "terminal::ScrollLineDown",
|
||||
"shift-pageup": "terminal::ScrollPageUp",
|
||||
"shift-pagedown": "terminal::ScrollPageDown"
|
||||
}
|
||||
},
|
||||
{ "context": "GitPanel", "bindings": { "cmd-0": "workspace::CloseActiveDock" } },
|
||||
{ "context": "DebugPanel", "bindings": { "cmd-5": "workspace::CloseActiveDock" } },
|
||||
{ "context": "Diagnostics > Editor", "bindings": { "cmd-6": "pane::CloseActiveItem" } },
|
||||
{ "context": "OutlinePanel", "bindings": { "cmd-7": "workspace::CloseActiveDock" } },
|
||||
{
|
||||
"context": "Dock || Workspace || Terminal || OutlinePanel || ProjectPanel || CollabPanel || (Editor && mode == auto_height)",
|
||||
"bindings": { "escape": "editor::ToggleFocus" }
|
||||
}
|
||||
]
|
||||
|
||||
@@ -466,7 +466,7 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_mode == insert && showing_signature_help && !showing_completions",
|
||||
"context": "(vim_mode == insert || vim_mode == normal) && showing_signature_help && !showing_completions",
|
||||
"bindings": {
|
||||
"ctrl-p": "editor::SignatureHelpPrevious",
|
||||
"ctrl-n": "editor::SignatureHelpNext"
|
||||
@@ -841,6 +841,7 @@
|
||||
"i": "git_panel::FocusEditor",
|
||||
"x": "git::ToggleStaged",
|
||||
"shift-x": "git::StageAll",
|
||||
"g x": "git::StageRange",
|
||||
"shift-u": "git::UnstageAll"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -84,7 +84,7 @@
|
||||
"bottom_dock_layout": "contained",
|
||||
// The direction that you want to split panes horizontally. Defaults to "up"
|
||||
"pane_split_direction_horizontal": "up",
|
||||
// The direction that you want to split panes horizontally. Defaults to "left"
|
||||
// The direction that you want to split panes vertically. Defaults to "left"
|
||||
"pane_split_direction_vertical": "left",
|
||||
// Centered layout related settings.
|
||||
"centered_layout": {
|
||||
@@ -197,6 +197,8 @@
|
||||
// "inline"
|
||||
// 3. Place snippets at the bottom of the completion list:
|
||||
// "bottom"
|
||||
// 4. Do not show snippets in the completion list:
|
||||
// "none"
|
||||
"snippet_sort_order": "inline",
|
||||
// How to highlight the current line in the editor.
|
||||
//
|
||||
@@ -817,7 +819,7 @@
|
||||
"edit_file": true,
|
||||
"fetch": true,
|
||||
"list_directory": true,
|
||||
"project_notifications": true,
|
||||
"project_notifications": false,
|
||||
"move_path": true,
|
||||
"now": true,
|
||||
"find_path": true,
|
||||
@@ -837,7 +839,7 @@
|
||||
"diagnostics": true,
|
||||
"fetch": true,
|
||||
"list_directory": true,
|
||||
"project_notifications": true,
|
||||
"project_notifications": false,
|
||||
"now": true,
|
||||
"find_path": true,
|
||||
"read_file": true,
|
||||
@@ -1135,6 +1137,7 @@
|
||||
"**/.svn",
|
||||
"**/.hg",
|
||||
"**/.jj",
|
||||
"**/.repo",
|
||||
"**/CVS",
|
||||
"**/.DS_Store",
|
||||
"**/Thumbs.db",
|
||||
@@ -1157,16 +1160,14 @@
|
||||
// Control whether the git blame information is shown inline,
|
||||
// in the currently focused line.
|
||||
"inline_blame": {
|
||||
"enabled": true
|
||||
"enabled": true,
|
||||
// Sets a delay after which the inline blame information is shown.
|
||||
// Delay is restarted with every cursor movement.
|
||||
// "delay_ms": 600
|
||||
//
|
||||
"delay_ms": 0,
|
||||
// Whether or not to display the git commit summary on the same line.
|
||||
// "show_commit_summary": false
|
||||
//
|
||||
"show_commit_summary": false,
|
||||
// The minimum column number to show the inline blame information at
|
||||
// "min_column": 0
|
||||
"min_column": 0
|
||||
},
|
||||
// How git hunks are displayed visually in the editor.
|
||||
// This setting can take two values:
|
||||
@@ -1379,11 +1380,11 @@
|
||||
// This will be merged with the platform's default font fallbacks
|
||||
// "font_fallbacks": ["FiraCode Nerd Fonts"],
|
||||
// The weight of the editor font in standard CSS units from 100 to 900.
|
||||
// "font_weight": 400
|
||||
"font_weight": 400,
|
||||
// Sets the maximum number of lines in the terminal's scrollback buffer.
|
||||
// Default: 10_000, maximum: 100_000 (all bigger values set will be treated as 100_000), 0 disables the scrolling.
|
||||
// Existing terminals will not pick up this change until they are recreated.
|
||||
// "max_scroll_history_lines": 10000,
|
||||
"max_scroll_history_lines": 10000,
|
||||
// The minimum APCA perceptual contrast between foreground and background colors.
|
||||
// APCA (Accessible Perceptual Contrast Algorithm) is more accurate than WCAG 2.x,
|
||||
// especially for dark mode. Values range from 0 to 106.
|
||||
@@ -1672,6 +1673,10 @@
|
||||
"allowed": true
|
||||
}
|
||||
},
|
||||
"SystemVerilog": {
|
||||
"format_on_save": "off",
|
||||
"use_on_type_format": false
|
||||
},
|
||||
"Vue.js": {
|
||||
"language_servers": ["vue-language-server", "..."],
|
||||
"prettier": {
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
[package]
|
||||
name = "acp"
|
||||
name = "acp_thread"
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
@@ -9,17 +9,16 @@ license = "GPL-3.0-or-later"
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/acp.rs"
|
||||
path = "src/acp_thread.rs"
|
||||
doctest = false
|
||||
|
||||
[features]
|
||||
test-support = ["gpui/test-support", "project/test-support"]
|
||||
gemini = []
|
||||
|
||||
[dependencies]
|
||||
agent_servers.workspace = true
|
||||
agentic-coding-protocol.workspace = true
|
||||
anyhow.workspace = true
|
||||
assistant_tool.workspace = true
|
||||
buffer_diff.workspace = true
|
||||
editor.workspace = true
|
||||
futures.workspace = true
|
||||
@@ -28,6 +27,8 @@ itertools.workspace = true
|
||||
language.workspace = true
|
||||
markdown.workspace = true
|
||||
project.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
smol.workspace = true
|
||||
ui.workspace = true
|
||||
@@ -40,7 +41,6 @@ env_logger.workspace = true
|
||||
gpui = { workspace = true, "features" = ["test-support"] }
|
||||
indoc.workspace = true
|
||||
project = { workspace = true, "features" = ["test-support"] }
|
||||
serde_json.workspace = true
|
||||
tempfile.workspace = true
|
||||
util.workspace = true
|
||||
settings.workspace = true
|
||||
20
crates/acp_thread/src/connection.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use agentic_coding_protocol as acp;
|
||||
use anyhow::Result;
|
||||
use futures::future::{FutureExt as _, LocalBoxFuture};
|
||||
|
||||
pub trait AgentConnection {
|
||||
fn request_any(
|
||||
&self,
|
||||
params: acp::AnyAgentRequest,
|
||||
) -> LocalBoxFuture<'static, Result<acp::AnyAgentResult>>;
|
||||
}
|
||||
|
||||
impl AgentConnection for acp::AgentConnection {
|
||||
fn request_any(
|
||||
&self,
|
||||
params: acp::AnyAgentRequest,
|
||||
) -> LocalBoxFuture<'static, Result<acp::AnyAgentResult>> {
|
||||
let task = self.request_any(params);
|
||||
async move { Ok(task.await?) }.boxed_local()
|
||||
}
|
||||
}
|
||||
@@ -448,7 +448,7 @@ impl ActivityIndicator {
|
||||
.into_any_element(),
|
||||
),
|
||||
message: format!("Debug: {}", session.read(cx).adapter()),
|
||||
tooltip_message: Some(session.read(cx).label().to_string()),
|
||||
tooltip_message: session.read(cx).label().map(|label| label.to_string()),
|
||||
on_click: None,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -21,6 +21,7 @@ use gpui::{
|
||||
AnyWindowHandle, App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task,
|
||||
WeakEntity, Window,
|
||||
};
|
||||
use http_client::StatusCode;
|
||||
use language_model::{
|
||||
ConfiguredModel, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
|
||||
LanguageModelExt as _, LanguageModelId, LanguageModelRegistry, LanguageModelRequest,
|
||||
@@ -51,7 +52,19 @@ use uuid::Uuid;
|
||||
use zed_llm_client::{CompletionIntent, CompletionRequestStatus, UsageLimit};
|
||||
|
||||
const MAX_RETRY_ATTEMPTS: u8 = 3;
|
||||
const BASE_RETRY_DELAY_SECS: u64 = 5;
|
||||
const BASE_RETRY_DELAY: Duration = Duration::from_secs(5);
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum RetryStrategy {
|
||||
ExponentialBackoff {
|
||||
initial_delay: Duration,
|
||||
max_attempts: u8,
|
||||
},
|
||||
Fixed {
|
||||
delay: Duration,
|
||||
max_attempts: u8,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Serialize, Deserialize, JsonSchema,
|
||||
@@ -1519,7 +1532,9 @@ impl Thread {
|
||||
) -> Option<PendingToolUse> {
|
||||
let action_log = self.action_log.read(cx);
|
||||
|
||||
action_log.unnotified_stale_buffers(cx).next()?;
|
||||
if !action_log.has_unnotified_user_edits() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// Represent notification as a simulated `project_notifications` tool call
|
||||
let tool_name = Arc::from("project_notifications");
|
||||
@@ -1933,18 +1948,6 @@ impl Thread {
|
||||
project.set_agent_location(None, cx);
|
||||
});
|
||||
|
||||
fn emit_generic_error(error: &anyhow::Error, cx: &mut Context<Thread>) {
|
||||
let error_message = error
|
||||
.chain()
|
||||
.map(|err| err.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
cx.emit(ThreadEvent::ShowError(ThreadError::Message {
|
||||
header: "Error interacting with language model".into(),
|
||||
message: SharedString::from(error_message.clone()),
|
||||
}));
|
||||
}
|
||||
|
||||
if error.is::<PaymentRequiredError>() {
|
||||
cx.emit(ThreadEvent::ShowError(ThreadError::PaymentRequired));
|
||||
} else if let Some(error) =
|
||||
@@ -1956,9 +1959,10 @@ impl Thread {
|
||||
} else if let Some(completion_error) =
|
||||
error.downcast_ref::<LanguageModelCompletionError>()
|
||||
{
|
||||
use LanguageModelCompletionError::*;
|
||||
match &completion_error {
|
||||
PromptTooLarge { tokens, .. } => {
|
||||
LanguageModelCompletionError::PromptTooLarge {
|
||||
tokens, ..
|
||||
} => {
|
||||
let tokens = tokens.unwrap_or_else(|| {
|
||||
// We didn't get an exact token count from the API, so fall back on our estimate.
|
||||
thread
|
||||
@@ -1979,63 +1983,22 @@ impl Thread {
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
RateLimitExceeded {
|
||||
retry_after: Some(retry_after),
|
||||
..
|
||||
}
|
||||
| ServerOverloaded {
|
||||
retry_after: Some(retry_after),
|
||||
..
|
||||
} => {
|
||||
thread.handle_rate_limit_error(
|
||||
&completion_error,
|
||||
*retry_after,
|
||||
model.clone(),
|
||||
intent,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
retry_scheduled = true;
|
||||
}
|
||||
RateLimitExceeded { .. } | ServerOverloaded { .. } => {
|
||||
retry_scheduled = thread.handle_retryable_error(
|
||||
&completion_error,
|
||||
model.clone(),
|
||||
intent,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
if !retry_scheduled {
|
||||
emit_generic_error(error, cx);
|
||||
_ => {
|
||||
if let Some(retry_strategy) =
|
||||
Thread::get_retry_strategy(completion_error)
|
||||
{
|
||||
retry_scheduled = thread
|
||||
.handle_retryable_error_with_delay(
|
||||
&completion_error,
|
||||
Some(retry_strategy),
|
||||
model.clone(),
|
||||
intent,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}
|
||||
ApiInternalServerError { .. }
|
||||
| ApiReadResponseError { .. }
|
||||
| HttpSend { .. } => {
|
||||
retry_scheduled = thread.handle_retryable_error(
|
||||
&completion_error,
|
||||
model.clone(),
|
||||
intent,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
if !retry_scheduled {
|
||||
emit_generic_error(error, cx);
|
||||
}
|
||||
}
|
||||
NoApiKey { .. }
|
||||
| HttpResponseError { .. }
|
||||
| BadRequestFormat { .. }
|
||||
| AuthenticationError { .. }
|
||||
| PermissionError { .. }
|
||||
| ApiEndpointNotFound { .. }
|
||||
| SerializeRequest { .. }
|
||||
| BuildRequestBody { .. }
|
||||
| DeserializeResponse { .. }
|
||||
| Other { .. } => emit_generic_error(error, cx),
|
||||
}
|
||||
} else {
|
||||
emit_generic_error(error, cx);
|
||||
}
|
||||
|
||||
if !retry_scheduled {
|
||||
@@ -2162,73 +2125,86 @@ impl Thread {
|
||||
});
|
||||
}
|
||||
|
||||
fn handle_rate_limit_error(
|
||||
&mut self,
|
||||
error: &LanguageModelCompletionError,
|
||||
retry_after: Duration,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
intent: CompletionIntent,
|
||||
window: Option<AnyWindowHandle>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
// For rate limit errors, we only retry once with the specified duration
|
||||
let retry_message = format!("{error}. Retrying in {} seconds…", retry_after.as_secs());
|
||||
log::warn!(
|
||||
"Retrying completion request in {} seconds: {error:?}",
|
||||
retry_after.as_secs(),
|
||||
);
|
||||
fn get_retry_strategy(error: &LanguageModelCompletionError) -> Option<RetryStrategy> {
|
||||
use LanguageModelCompletionError::*;
|
||||
|
||||
// Add a UI-only message instead of a regular message
|
||||
let id = self.next_message_id.post_inc();
|
||||
self.messages.push(Message {
|
||||
id,
|
||||
role: Role::System,
|
||||
segments: vec![MessageSegment::Text(retry_message)],
|
||||
loaded_context: LoadedContext::default(),
|
||||
creases: Vec::new(),
|
||||
is_hidden: false,
|
||||
ui_only: true,
|
||||
});
|
||||
cx.emit(ThreadEvent::MessageAdded(id));
|
||||
// Schedule the retry
|
||||
let thread_handle = cx.entity().downgrade();
|
||||
|
||||
cx.spawn(async move |_thread, cx| {
|
||||
cx.background_executor().timer(retry_after).await;
|
||||
|
||||
thread_handle
|
||||
.update(cx, |thread, cx| {
|
||||
// Retry the completion
|
||||
thread.send_to_model(model, intent, window, cx);
|
||||
// General strategy here:
|
||||
// - If retrying won't help (e.g. invalid API key or payload too large), return None so we don't retry at all.
|
||||
// - If it's a time-based issue (e.g. server overloaded, rate limit exceeded), try multiple times with exponential backoff.
|
||||
// - If it's an issue that *might* be fixed by retrying (e.g. internal server error), just retry once.
|
||||
match error {
|
||||
HttpResponseError {
|
||||
status_code: StatusCode::TOO_MANY_REQUESTS,
|
||||
..
|
||||
} => Some(RetryStrategy::ExponentialBackoff {
|
||||
initial_delay: BASE_RETRY_DELAY,
|
||||
max_attempts: MAX_RETRY_ATTEMPTS,
|
||||
}),
|
||||
ServerOverloaded { retry_after, .. } | RateLimitExceeded { retry_after, .. } => {
|
||||
Some(RetryStrategy::Fixed {
|
||||
delay: retry_after.unwrap_or(BASE_RETRY_DELAY),
|
||||
max_attempts: MAX_RETRY_ATTEMPTS,
|
||||
})
|
||||
.log_err();
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn handle_retryable_error(
|
||||
&mut self,
|
||||
error: &LanguageModelCompletionError,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
intent: CompletionIntent,
|
||||
window: Option<AnyWindowHandle>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> bool {
|
||||
self.handle_retryable_error_with_delay(error, None, model, intent, window, cx)
|
||||
}
|
||||
ApiInternalServerError { .. } => Some(RetryStrategy::Fixed {
|
||||
delay: BASE_RETRY_DELAY,
|
||||
max_attempts: 1,
|
||||
}),
|
||||
ApiReadResponseError { .. }
|
||||
| HttpSend { .. }
|
||||
| DeserializeResponse { .. }
|
||||
| BadRequestFormat { .. } => Some(RetryStrategy::Fixed {
|
||||
delay: BASE_RETRY_DELAY,
|
||||
max_attempts: 1,
|
||||
}),
|
||||
// Retrying these errors definitely shouldn't help.
|
||||
HttpResponseError {
|
||||
status_code:
|
||||
StatusCode::PAYLOAD_TOO_LARGE | StatusCode::FORBIDDEN | StatusCode::UNAUTHORIZED,
|
||||
..
|
||||
}
|
||||
| SerializeRequest { .. }
|
||||
| BuildRequestBody { .. }
|
||||
| PromptTooLarge { .. }
|
||||
| AuthenticationError { .. }
|
||||
| PermissionError { .. }
|
||||
| ApiEndpointNotFound { .. }
|
||||
| NoApiKey { .. } => None,
|
||||
// Retry all other 4xx and 5xx errors once.
|
||||
HttpResponseError { status_code, .. }
|
||||
if status_code.is_client_error() || status_code.is_server_error() =>
|
||||
{
|
||||
Some(RetryStrategy::Fixed {
|
||||
delay: BASE_RETRY_DELAY,
|
||||
max_attempts: 1,
|
||||
})
|
||||
}
|
||||
// Conservatively assume that any other errors are non-retryable
|
||||
HttpResponseError { .. } | Other(..) => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_retryable_error_with_delay(
|
||||
&mut self,
|
||||
error: &LanguageModelCompletionError,
|
||||
custom_delay: Option<Duration>,
|
||||
strategy: Option<RetryStrategy>,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
intent: CompletionIntent,
|
||||
window: Option<AnyWindowHandle>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> bool {
|
||||
let Some(strategy) = strategy.or_else(|| Self::get_retry_strategy(error)) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
let max_attempts = match &strategy {
|
||||
RetryStrategy::ExponentialBackoff { max_attempts, .. } => *max_attempts,
|
||||
RetryStrategy::Fixed { max_attempts, .. } => *max_attempts,
|
||||
};
|
||||
|
||||
let retry_state = self.retry_state.get_or_insert(RetryState {
|
||||
attempt: 0,
|
||||
max_attempts: MAX_RETRY_ATTEMPTS,
|
||||
max_attempts,
|
||||
intent,
|
||||
});
|
||||
|
||||
@@ -2238,20 +2214,24 @@ impl Thread {
|
||||
let intent = retry_state.intent;
|
||||
|
||||
if attempt <= max_attempts {
|
||||
// Use custom delay if provided (e.g., from rate limit), otherwise exponential backoff
|
||||
let delay = if let Some(custom_delay) = custom_delay {
|
||||
custom_delay
|
||||
} else {
|
||||
let delay_secs = BASE_RETRY_DELAY_SECS * 2u64.pow((attempt - 1) as u32);
|
||||
Duration::from_secs(delay_secs)
|
||||
let delay = match &strategy {
|
||||
RetryStrategy::ExponentialBackoff { initial_delay, .. } => {
|
||||
let delay_secs = initial_delay.as_secs() * 2u64.pow((attempt - 1) as u32);
|
||||
Duration::from_secs(delay_secs)
|
||||
}
|
||||
RetryStrategy::Fixed { delay, .. } => *delay,
|
||||
};
|
||||
|
||||
// Add a transient message to inform the user
|
||||
let delay_secs = delay.as_secs();
|
||||
let retry_message = format!(
|
||||
"{error}. Retrying (attempt {attempt} of {max_attempts}) \
|
||||
in {delay_secs} seconds..."
|
||||
);
|
||||
let retry_message = if max_attempts == 1 {
|
||||
format!("{error}. Retrying in {delay_secs} seconds...")
|
||||
} else {
|
||||
format!(
|
||||
"{error}. Retrying (attempt {attempt} of {max_attempts}) \
|
||||
in {delay_secs} seconds..."
|
||||
)
|
||||
};
|
||||
log::warn!(
|
||||
"Retrying completion request (attempt {attempt} of {max_attempts}) \
|
||||
in {delay_secs} seconds: {error:?}",
|
||||
@@ -2290,19 +2270,9 @@ impl Thread {
|
||||
// Max retries exceeded
|
||||
self.retry_state = None;
|
||||
|
||||
let notification_text = if max_attempts == 1 {
|
||||
"Failed after retrying.".into()
|
||||
} else {
|
||||
format!("Failed after retrying {} times.", max_attempts).into()
|
||||
};
|
||||
|
||||
// Stop generating since we're giving up on retrying.
|
||||
self.pending_completions.clear();
|
||||
|
||||
cx.emit(ThreadEvent::RetriesFailed {
|
||||
message: notification_text,
|
||||
});
|
||||
|
||||
false
|
||||
}
|
||||
}
|
||||
@@ -3258,9 +3228,6 @@ pub enum ThreadEvent {
|
||||
CancelEditing,
|
||||
CompletionCanceled,
|
||||
ProfileChanged,
|
||||
RetriesFailed {
|
||||
message: SharedString,
|
||||
},
|
||||
}
|
||||
|
||||
impl EventEmitter<ThreadEvent> for Thread {}
|
||||
@@ -3288,7 +3255,6 @@ mod tests {
|
||||
use futures::stream::BoxStream;
|
||||
use gpui::TestAppContext;
|
||||
use http_client;
|
||||
use indoc::indoc;
|
||||
use language_model::fake_provider::{FakeLanguageModel, FakeLanguageModelProvider};
|
||||
use language_model::{
|
||||
LanguageModelCompletionError, LanguageModelName, LanguageModelProviderId,
|
||||
@@ -3617,6 +3583,7 @@ fn main() {{
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
#[ignore] // turn this test on when project_notifications tool is re-enabled
|
||||
async fn test_stale_buffer_notification(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
|
||||
@@ -3649,6 +3616,7 @@ fn main() {{
|
||||
cx,
|
||||
);
|
||||
});
|
||||
cx.run_until_parked();
|
||||
|
||||
// We shouldn't have a stale buffer notification yet
|
||||
let notifications = thread.read_with(cx, |thread, _| {
|
||||
@@ -3678,11 +3646,13 @@ fn main() {{
|
||||
cx,
|
||||
)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
|
||||
// Check for the stale buffer warning
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.flush_notifications(model.clone(), CompletionIntent::UserPrompt, cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
|
||||
let notifications = thread.read_with(cx, |thread, _cx| {
|
||||
find_tool_uses(thread, "project_notifications")
|
||||
@@ -3696,12 +3666,8 @@ fn main() {{
|
||||
panic!("`project_notifications` should return text");
|
||||
};
|
||||
|
||||
let expected_content = indoc! {"[The following is an auto-generated notification; do not reply]
|
||||
|
||||
These files have changed since the last read:
|
||||
- code.rs
|
||||
"};
|
||||
assert_eq!(notification_content, expected_content);
|
||||
assert!(notification_content.contains("These files have changed since the last read:"));
|
||||
assert!(notification_content.contains("code.rs"));
|
||||
|
||||
// Insert another user message and flush notifications again
|
||||
thread.update(cx, |thread, cx| {
|
||||
@@ -3717,6 +3683,7 @@ fn main() {{
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.flush_notifications(model.clone(), CompletionIntent::UserPrompt, cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
|
||||
// There should be no new notifications (we already flushed one)
|
||||
let notifications = thread.read_with(cx, |thread, _cx| {
|
||||
@@ -4192,7 +4159,7 @@ fn main() {{
|
||||
assert_eq!(retry_state.attempt, 1, "Should be first retry attempt");
|
||||
assert_eq!(
|
||||
retry_state.max_attempts, MAX_RETRY_ATTEMPTS,
|
||||
"Should have default max attempts"
|
||||
"Should retry MAX_RETRY_ATTEMPTS times for overloaded errors"
|
||||
);
|
||||
});
|
||||
|
||||
@@ -4265,7 +4232,7 @@ fn main() {{
|
||||
let retry_state = thread.retry_state.as_ref().unwrap();
|
||||
assert_eq!(retry_state.attempt, 1, "Should be first retry attempt");
|
||||
assert_eq!(
|
||||
retry_state.max_attempts, MAX_RETRY_ATTEMPTS,
|
||||
retry_state.max_attempts, 1,
|
||||
"Should have correct max attempts"
|
||||
);
|
||||
});
|
||||
@@ -4281,8 +4248,8 @@ fn main() {{
|
||||
if let MessageSegment::Text(text) = seg {
|
||||
text.contains("internal")
|
||||
&& text.contains("Fake")
|
||||
&& text
|
||||
.contains(&format!("attempt 1 of {}", MAX_RETRY_ATTEMPTS))
|
||||
&& text.contains("Retrying in")
|
||||
&& !text.contains("attempt")
|
||||
} else {
|
||||
false
|
||||
}
|
||||
@@ -4320,8 +4287,8 @@ fn main() {{
|
||||
let project = create_test_project(cx, json!({})).await;
|
||||
let (_, _, thread, _, _base_model) = setup_test_environment(cx, project.clone()).await;
|
||||
|
||||
// Create model that returns overloaded error
|
||||
let model = Arc::new(ErrorInjector::new(TestError::Overloaded));
|
||||
// Create model that returns internal server error
|
||||
let model = Arc::new(ErrorInjector::new(TestError::InternalServerError));
|
||||
|
||||
// Insert a user message
|
||||
thread.update(cx, |thread, cx| {
|
||||
@@ -4371,11 +4338,14 @@ fn main() {{
|
||||
assert!(thread.retry_state.is_some(), "Should have retry state");
|
||||
let retry_state = thread.retry_state.as_ref().unwrap();
|
||||
assert_eq!(retry_state.attempt, 1, "Should be first retry attempt");
|
||||
assert_eq!(
|
||||
retry_state.max_attempts, 1,
|
||||
"Internal server errors should only retry once"
|
||||
);
|
||||
});
|
||||
|
||||
// Advance clock for first retry
|
||||
cx.executor()
|
||||
.advance_clock(Duration::from_secs(BASE_RETRY_DELAY_SECS));
|
||||
cx.executor().advance_clock(BASE_RETRY_DELAY);
|
||||
cx.run_until_parked();
|
||||
|
||||
// Should have scheduled second retry - count retry messages
|
||||
@@ -4395,93 +4365,25 @@ fn main() {{
|
||||
})
|
||||
.count()
|
||||
});
|
||||
assert_eq!(retry_count, 2, "Should have scheduled second retry");
|
||||
|
||||
// Check retry state updated
|
||||
thread.read_with(cx, |thread, _| {
|
||||
assert!(thread.retry_state.is_some(), "Should have retry state");
|
||||
let retry_state = thread.retry_state.as_ref().unwrap();
|
||||
assert_eq!(retry_state.attempt, 2, "Should be second retry attempt");
|
||||
assert_eq!(
|
||||
retry_state.max_attempts, MAX_RETRY_ATTEMPTS,
|
||||
"Should have correct max attempts"
|
||||
);
|
||||
});
|
||||
|
||||
// Advance clock for second retry (exponential backoff)
|
||||
cx.executor()
|
||||
.advance_clock(Duration::from_secs(BASE_RETRY_DELAY_SECS * 2));
|
||||
cx.run_until_parked();
|
||||
|
||||
// Should have scheduled third retry
|
||||
// Count all retry messages now
|
||||
let retry_count = thread.update(cx, |thread, _| {
|
||||
thread
|
||||
.messages
|
||||
.iter()
|
||||
.filter(|m| {
|
||||
m.ui_only
|
||||
&& m.segments.iter().any(|s| {
|
||||
if let MessageSegment::Text(text) = s {
|
||||
text.contains("Retrying") && text.contains("seconds")
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
})
|
||||
.count()
|
||||
});
|
||||
assert_eq!(
|
||||
retry_count, MAX_RETRY_ATTEMPTS as usize,
|
||||
"Should have scheduled third retry"
|
||||
retry_count, 1,
|
||||
"Should have only one retry for internal server errors"
|
||||
);
|
||||
|
||||
// Check retry state updated
|
||||
// For internal server errors, we only retry once and then give up
|
||||
// Check that retry_state is cleared after the single retry
|
||||
thread.read_with(cx, |thread, _| {
|
||||
assert!(thread.retry_state.is_some(), "Should have retry state");
|
||||
let retry_state = thread.retry_state.as_ref().unwrap();
|
||||
assert_eq!(
|
||||
retry_state.attempt, MAX_RETRY_ATTEMPTS,
|
||||
"Should be at max retry attempt"
|
||||
);
|
||||
assert_eq!(
|
||||
retry_state.max_attempts, MAX_RETRY_ATTEMPTS,
|
||||
"Should have correct max attempts"
|
||||
assert!(
|
||||
thread.retry_state.is_none(),
|
||||
"Retry state should be cleared after single retry"
|
||||
);
|
||||
});
|
||||
|
||||
// Advance clock for third retry (exponential backoff)
|
||||
cx.executor()
|
||||
.advance_clock(Duration::from_secs(BASE_RETRY_DELAY_SECS * 4));
|
||||
cx.run_until_parked();
|
||||
|
||||
// No more retries should be scheduled after clock was advanced.
|
||||
let retry_count = thread.update(cx, |thread, _| {
|
||||
thread
|
||||
.messages
|
||||
.iter()
|
||||
.filter(|m| {
|
||||
m.ui_only
|
||||
&& m.segments.iter().any(|s| {
|
||||
if let MessageSegment::Text(text) = s {
|
||||
text.contains("Retrying") && text.contains("seconds")
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
})
|
||||
.count()
|
||||
});
|
||||
assert_eq!(
|
||||
retry_count, MAX_RETRY_ATTEMPTS as usize,
|
||||
"Should not exceed max retries"
|
||||
);
|
||||
|
||||
// Final completion count should be initial + max retries
|
||||
// Verify total attempts (1 initial + 1 retry)
|
||||
assert_eq!(
|
||||
*completion_count.lock(),
|
||||
(MAX_RETRY_ATTEMPTS + 1) as usize,
|
||||
"Should have made initial + max retry attempts"
|
||||
2,
|
||||
"Should have attempted once plus 1 retry"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -4501,13 +4403,13 @@ fn main() {{
|
||||
});
|
||||
|
||||
// Track events
|
||||
let retries_failed = Arc::new(Mutex::new(false));
|
||||
let retries_failed_clone = retries_failed.clone();
|
||||
let stopped_with_error = Arc::new(Mutex::new(false));
|
||||
let stopped_with_error_clone = stopped_with_error.clone();
|
||||
|
||||
let _subscription = thread.update(cx, |_, cx| {
|
||||
cx.subscribe(&thread, move |_, _, event: &ThreadEvent, _| {
|
||||
if let ThreadEvent::RetriesFailed { .. } = event {
|
||||
*retries_failed_clone.lock() = true;
|
||||
if let ThreadEvent::Stopped(Err(_)) = event {
|
||||
*stopped_with_error_clone.lock() = true;
|
||||
}
|
||||
})
|
||||
});
|
||||
@@ -4519,23 +4421,11 @@ fn main() {{
|
||||
cx.run_until_parked();
|
||||
|
||||
// Advance through all retries
|
||||
for i in 0..MAX_RETRY_ATTEMPTS {
|
||||
let delay = if i == 0 {
|
||||
BASE_RETRY_DELAY_SECS
|
||||
} else {
|
||||
BASE_RETRY_DELAY_SECS * 2u64.pow(i as u32 - 1)
|
||||
};
|
||||
cx.executor().advance_clock(Duration::from_secs(delay));
|
||||
for _ in 0..MAX_RETRY_ATTEMPTS {
|
||||
cx.executor().advance_clock(BASE_RETRY_DELAY);
|
||||
cx.run_until_parked();
|
||||
}
|
||||
|
||||
// After the 3rd retry is scheduled, we need to wait for it to execute and fail
|
||||
// The 3rd retry has a delay of BASE_RETRY_DELAY_SECS * 4 (20 seconds)
|
||||
let final_delay = BASE_RETRY_DELAY_SECS * 2u64.pow((MAX_RETRY_ATTEMPTS - 1) as u32);
|
||||
cx.executor()
|
||||
.advance_clock(Duration::from_secs(final_delay));
|
||||
cx.run_until_parked();
|
||||
|
||||
let retry_count = thread.update(cx, |thread, _| {
|
||||
thread
|
||||
.messages
|
||||
@@ -4553,14 +4443,14 @@ fn main() {{
|
||||
.count()
|
||||
});
|
||||
|
||||
// After max retries, should emit RetriesFailed event
|
||||
// After max retries, should emit Stopped(Err(...)) event
|
||||
assert_eq!(
|
||||
retry_count, MAX_RETRY_ATTEMPTS as usize,
|
||||
"Should have attempted max retries"
|
||||
"Should have attempted MAX_RETRY_ATTEMPTS retries for overloaded errors"
|
||||
);
|
||||
assert!(
|
||||
*retries_failed.lock(),
|
||||
"Should emit RetriesFailed event after max retries exceeded"
|
||||
*stopped_with_error.lock(),
|
||||
"Should emit Stopped(Err(...)) event after max retries exceeded"
|
||||
);
|
||||
|
||||
// Retry state should be cleared
|
||||
@@ -4578,7 +4468,7 @@ fn main() {{
|
||||
.count();
|
||||
assert_eq!(
|
||||
retry_messages, MAX_RETRY_ATTEMPTS as usize,
|
||||
"Should have one retry message per attempt"
|
||||
"Should have MAX_RETRY_ATTEMPTS retry messages for overloaded errors"
|
||||
);
|
||||
});
|
||||
}
|
||||
@@ -4716,8 +4606,7 @@ fn main() {{
|
||||
});
|
||||
|
||||
// Wait for retry
|
||||
cx.executor()
|
||||
.advance_clock(Duration::from_secs(BASE_RETRY_DELAY_SECS));
|
||||
cx.executor().advance_clock(BASE_RETRY_DELAY);
|
||||
cx.run_until_parked();
|
||||
|
||||
// Stream some successful content
|
||||
@@ -4879,8 +4768,7 @@ fn main() {{
|
||||
});
|
||||
|
||||
// Wait for retry delay
|
||||
cx.executor()
|
||||
.advance_clock(Duration::from_secs(BASE_RETRY_DELAY_SECS));
|
||||
cx.executor().advance_clock(BASE_RETRY_DELAY);
|
||||
cx.run_until_parked();
|
||||
|
||||
// The retry should now use our FailOnceModel which should succeed
|
||||
@@ -5039,9 +4927,15 @@ fn main() {{
|
||||
|
||||
thread.read_with(cx, |thread, _| {
|
||||
assert!(
|
||||
thread.retry_state.is_none(),
|
||||
"Rate limit errors should not set retry_state"
|
||||
thread.retry_state.is_some(),
|
||||
"Rate limit errors should set retry_state"
|
||||
);
|
||||
if let Some(retry_state) = &thread.retry_state {
|
||||
assert_eq!(
|
||||
retry_state.max_attempts, MAX_RETRY_ATTEMPTS,
|
||||
"Rate limit errors should use MAX_RETRY_ATTEMPTS"
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
// Verify we have one retry message
|
||||
@@ -5074,18 +4968,15 @@ fn main() {{
|
||||
.find(|msg| msg.role == Role::System && msg.ui_only)
|
||||
.expect("Should have a retry message");
|
||||
|
||||
// Check that the message doesn't contain attempt count
|
||||
// Check that the message contains attempt count since we use retry_state
|
||||
if let Some(MessageSegment::Text(text)) = retry_message.segments.first() {
|
||||
assert!(
|
||||
!text.contains("attempt"),
|
||||
"Rate limit retry message should not contain attempt count"
|
||||
text.contains(&format!("attempt 1 of {}", MAX_RETRY_ATTEMPTS)),
|
||||
"Rate limit retry message should contain attempt count with MAX_RETRY_ATTEMPTS"
|
||||
);
|
||||
assert!(
|
||||
text.contains(&format!(
|
||||
"Retrying in {} seconds",
|
||||
TEST_RATE_LIMIT_RETRY_SECS
|
||||
)),
|
||||
"Rate limit retry message should contain retry delay"
|
||||
text.contains("Retrying"),
|
||||
"Rate limit retry message should contain retry text"
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -5,6 +5,10 @@ edition.workspace = true
|
||||
publish.workspace = true
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[features]
|
||||
test-support = ["acp_thread/test-support", "gpui/test-support", "project/test-support"]
|
||||
e2e = []
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -13,15 +17,32 @@ path = "src/agent_servers.rs"
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
acp_thread.workspace = true
|
||||
agentic-coding-protocol.workspace = true
|
||||
anyhow.workspace = true
|
||||
collections.workspace = true
|
||||
context_server.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
itertools.workspace = true
|
||||
log.workspace = true
|
||||
paths.workspace = true
|
||||
project.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
smol.workspace = true
|
||||
tempfile.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
watch.workspace = true
|
||||
which.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
env_logger.workspace = true
|
||||
language.workspace = true
|
||||
indoc.workspace = true
|
||||
acp_thread = { workspace = true, features = ["test-support"] }
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -1,30 +1,82 @@
|
||||
mod claude;
|
||||
mod codex;
|
||||
mod gemini;
|
||||
mod settings;
|
||||
mod stdio_agent_server;
|
||||
|
||||
#[cfg(test)]
|
||||
mod e2e_tests;
|
||||
|
||||
pub use claude::*;
|
||||
pub use codex::*;
|
||||
pub use gemini::*;
|
||||
pub use settings::*;
|
||||
pub use stdio_agent_server::*;
|
||||
|
||||
use acp_thread::AcpThread;
|
||||
use anyhow::Result;
|
||||
use collections::HashMap;
|
||||
use gpui::{App, AsyncApp, Entity, SharedString, Task};
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use anyhow::{Context as _, Result};
|
||||
use collections::HashMap;
|
||||
use gpui::{App, AsyncApp, Entity, SharedString};
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources, SettingsStore};
|
||||
use util::{ResultExt, paths};
|
||||
use util::ResultExt as _;
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
AllAgentServersSettings::register(cx);
|
||||
settings::init(cx);
|
||||
}
|
||||
|
||||
#[derive(Default, Deserialize, Serialize, Clone, JsonSchema, Debug)]
|
||||
pub struct AllAgentServersSettings {
|
||||
gemini: Option<AgentServerSettings>,
|
||||
pub trait AgentServer: Send {
|
||||
fn logo(&self) -> ui::IconName;
|
||||
fn name(&self) -> &'static str;
|
||||
fn empty_state_headline(&self) -> &'static str;
|
||||
fn empty_state_message(&self) -> &'static str;
|
||||
fn supports_always_allow(&self) -> bool;
|
||||
|
||||
fn new_thread(
|
||||
&self,
|
||||
root_dir: &Path,
|
||||
project: &Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Entity<AcpThread>>>;
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Clone, JsonSchema, Debug)]
|
||||
pub struct AgentServerSettings {
|
||||
#[serde(flatten)]
|
||||
command: AgentServerCommand,
|
||||
impl std::fmt::Debug for AgentServerCommand {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let filtered_env = self.env.as_ref().map(|env| {
|
||||
env.iter()
|
||||
.map(|(k, v)| {
|
||||
(
|
||||
k,
|
||||
if util::redact::should_redact(k) {
|
||||
"[REDACTED]"
|
||||
} else {
|
||||
v
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
f.debug_struct("AgentServerCommand")
|
||||
.field("path", &self.path)
|
||||
.field("args", &self.args)
|
||||
.field("env", &filtered_env)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
pub enum AgentServerVersion {
|
||||
Supported,
|
||||
Unsupported {
|
||||
error_message: SharedString,
|
||||
upgrade_message: SharedString,
|
||||
upgrade_command: String,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Clone, PartialEq, Eq, JsonSchema)]
|
||||
@@ -36,105 +88,34 @@ pub struct AgentServerCommand {
|
||||
pub env: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
pub struct Gemini;
|
||||
|
||||
pub struct AgentServerVersion {
|
||||
pub current_version: SharedString,
|
||||
pub supported: bool,
|
||||
}
|
||||
|
||||
pub trait AgentServer: Send {
|
||||
fn command(
|
||||
&self,
|
||||
impl AgentServerCommand {
|
||||
pub(crate) async fn resolve(
|
||||
path_bin_name: &'static str,
|
||||
extra_args: &[&'static str],
|
||||
settings: Option<AgentServerSettings>,
|
||||
project: &Entity<Project>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> impl Future<Output = Result<AgentServerCommand>>;
|
||||
|
||||
fn version(
|
||||
&self,
|
||||
command: &AgentServerCommand,
|
||||
) -> impl Future<Output = Result<AgentServerVersion>> + Send;
|
||||
}
|
||||
|
||||
const GEMINI_ACP_ARG: &str = "--acp";
|
||||
|
||||
impl AgentServer for Gemini {
|
||||
async fn command(
|
||||
&self,
|
||||
project: &Entity<Project>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<AgentServerCommand> {
|
||||
let custom_command = cx.read_global(|settings: &SettingsStore, _| {
|
||||
let settings = settings.get::<AllAgentServersSettings>(None);
|
||||
settings
|
||||
.gemini
|
||||
.as_ref()
|
||||
.map(|gemini_settings| AgentServerCommand {
|
||||
path: gemini_settings.command.path.clone(),
|
||||
args: gemini_settings
|
||||
.command
|
||||
.args
|
||||
.iter()
|
||||
.cloned()
|
||||
.chain(std::iter::once(GEMINI_ACP_ARG.into()))
|
||||
.collect(),
|
||||
env: gemini_settings.command.env.clone(),
|
||||
})
|
||||
})?;
|
||||
|
||||
if let Some(custom_command) = custom_command {
|
||||
return Ok(custom_command);
|
||||
}
|
||||
|
||||
if let Some(path) = find_bin_in_path("gemini", project, cx).await {
|
||||
return Ok(AgentServerCommand {
|
||||
path,
|
||||
args: vec![GEMINI_ACP_ARG.into()],
|
||||
env: None,
|
||||
) -> Option<Self> {
|
||||
if let Some(agent_settings) = settings {
|
||||
return Some(Self {
|
||||
path: agent_settings.command.path,
|
||||
args: agent_settings
|
||||
.command
|
||||
.args
|
||||
.into_iter()
|
||||
.chain(extra_args.iter().map(|arg| arg.to_string()))
|
||||
.collect(),
|
||||
env: agent_settings.command.env,
|
||||
});
|
||||
} else {
|
||||
find_bin_in_path(path_bin_name, project, cx)
|
||||
.await
|
||||
.map(|path| Self {
|
||||
path,
|
||||
args: extra_args.iter().map(|arg| arg.to_string()).collect(),
|
||||
env: None,
|
||||
})
|
||||
}
|
||||
|
||||
let (fs, node_runtime) = project.update(cx, |project, _| {
|
||||
(project.fs().clone(), project.node_runtime().cloned())
|
||||
})?;
|
||||
let node_runtime = node_runtime.context("gemini not found on path")?;
|
||||
|
||||
let directory = ::paths::agent_servers_dir().join("gemini");
|
||||
fs.create_dir(&directory).await?;
|
||||
node_runtime
|
||||
.npm_install_packages(&directory, &[("@google/gemini-cli", "latest")])
|
||||
.await?;
|
||||
let path = directory.join("node_modules/.bin/gemini");
|
||||
|
||||
Ok(AgentServerCommand {
|
||||
path,
|
||||
args: vec![GEMINI_ACP_ARG.into()],
|
||||
env: None,
|
||||
})
|
||||
}
|
||||
|
||||
async fn version(&self, command: &AgentServerCommand) -> Result<AgentServerVersion> {
|
||||
let version_fut = util::command::new_smol_command(&command.path)
|
||||
.args(command.args.iter())
|
||||
.arg("--version")
|
||||
.kill_on_drop(true)
|
||||
.output();
|
||||
|
||||
let help_fut = util::command::new_smol_command(&command.path)
|
||||
.args(command.args.iter())
|
||||
.arg("--help")
|
||||
.kill_on_drop(true)
|
||||
.output();
|
||||
|
||||
let (version_output, help_output) = futures::future::join(version_fut, help_fut).await;
|
||||
|
||||
let current_version = String::from_utf8(version_output?.stdout)?.into();
|
||||
let supported = String::from_utf8(help_output?.stdout)?.contains(GEMINI_ACP_ARG);
|
||||
|
||||
Ok(AgentServerVersion {
|
||||
current_version,
|
||||
supported,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -184,48 +165,3 @@ async fn find_bin_in_path(
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for AgentServerCommand {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let filtered_env = self.env.as_ref().map(|env| {
|
||||
env.iter()
|
||||
.map(|(k, v)| {
|
||||
(
|
||||
k,
|
||||
if util::redact::should_redact(k) {
|
||||
"[REDACTED]"
|
||||
} else {
|
||||
v
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
f.debug_struct("AgentServerCommand")
|
||||
.field("path", &self.path)
|
||||
.field("args", &self.args)
|
||||
.field("env", &filtered_env)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl settings::Settings for AllAgentServersSettings {
|
||||
const KEY: Option<&'static str> = Some("agent_servers");
|
||||
|
||||
type FileContent = Self;
|
||||
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
|
||||
let mut settings = AllAgentServersSettings::default();
|
||||
|
||||
for value in sources.defaults_and_customizations() {
|
||||
if value.gemini.is_some() {
|
||||
settings.gemini = value.gemini.clone();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(settings)
|
||||
}
|
||||
|
||||
fn import_from_vscode(_vscode: &settings::VsCodeSettings, _current: &mut Self::FileContent) {}
|
||||
}
|
||||
|
||||
701
crates/agent_servers/src/claude.rs
Normal file
@@ -0,0 +1,701 @@
|
||||
mod mcp_server;
|
||||
mod tools;
|
||||
|
||||
use collections::HashMap;
|
||||
use project::Project;
|
||||
use settings::SettingsStore;
|
||||
use std::cell::RefCell;
|
||||
use std::fmt::Display;
|
||||
use std::path::Path;
|
||||
use std::rc::Rc;
|
||||
|
||||
use agentic_coding_protocol::{
|
||||
self as acp, AnyAgentRequest, AnyAgentResult, Client, ProtocolVersion,
|
||||
StreamAssistantMessageChunkParams, ToolCallContent, UpdateToolCallParams,
|
||||
};
|
||||
use anyhow::{Result, anyhow};
|
||||
use futures::channel::oneshot;
|
||||
use futures::future::LocalBoxFuture;
|
||||
use futures::{AsyncBufReadExt, AsyncWriteExt};
|
||||
use futures::{
|
||||
AsyncRead, AsyncWrite, FutureExt, StreamExt,
|
||||
channel::mpsc::{self, UnboundedReceiver, UnboundedSender},
|
||||
io::BufReader,
|
||||
select_biased,
|
||||
};
|
||||
use gpui::{App, AppContext, Entity, Task};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::claude::mcp_server::ClaudeMcpServer;
|
||||
use crate::claude::tools::ClaudeTool;
|
||||
use crate::{AgentServer, AgentServerCommand, AllAgentServersSettings};
|
||||
use acp_thread::{AcpClientDelegate, AcpThread, AgentConnection};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ClaudeCode;
|
||||
|
||||
impl AgentServer for ClaudeCode {
|
||||
fn name(&self) -> &'static str {
|
||||
"Claude Code"
|
||||
}
|
||||
|
||||
fn empty_state_headline(&self) -> &'static str {
|
||||
self.name()
|
||||
}
|
||||
|
||||
fn empty_state_message(&self) -> &'static str {
|
||||
""
|
||||
}
|
||||
|
||||
fn logo(&self) -> ui::IconName {
|
||||
ui::IconName::AiClaude
|
||||
}
|
||||
|
||||
fn supports_always_allow(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn new_thread(
|
||||
&self,
|
||||
root_dir: &Path,
|
||||
project: &Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Entity<AcpThread>>> {
|
||||
let project = project.clone();
|
||||
let root_dir = root_dir.to_path_buf();
|
||||
let title = self.name().into();
|
||||
cx.spawn(async move |cx| {
|
||||
let (mut delegate_tx, delegate_rx) = watch::channel(None);
|
||||
let tool_id_map = Rc::new(RefCell::new(HashMap::default()));
|
||||
|
||||
let permission_mcp_server =
|
||||
ClaudeMcpServer::new(delegate_rx, tool_id_map.clone(), cx).await?;
|
||||
|
||||
let mut mcp_servers = HashMap::default();
|
||||
mcp_servers.insert(
|
||||
mcp_server::SERVER_NAME.to_string(),
|
||||
permission_mcp_server.server_config()?,
|
||||
);
|
||||
let mcp_config = McpConfig { mcp_servers };
|
||||
|
||||
let mcp_config_file = tempfile::NamedTempFile::new()?;
|
||||
let (mcp_config_file, mcp_config_path) = mcp_config_file.into_parts();
|
||||
|
||||
let mut mcp_config_file = smol::fs::File::from(mcp_config_file);
|
||||
mcp_config_file
|
||||
.write_all(serde_json::to_string(&mcp_config)?.as_bytes())
|
||||
.await?;
|
||||
mcp_config_file.flush().await?;
|
||||
|
||||
let settings = cx.read_global(|settings: &SettingsStore, _| {
|
||||
settings.get::<AllAgentServersSettings>(None).claude.clone()
|
||||
})?;
|
||||
|
||||
let Some(command) =
|
||||
AgentServerCommand::resolve("claude", &[], settings, &project, cx).await
|
||||
else {
|
||||
anyhow::bail!("Failed to find claude binary");
|
||||
};
|
||||
|
||||
let mut child = util::command::new_smol_command(&command.path)
|
||||
.args(
|
||||
[
|
||||
"--input-format",
|
||||
"stream-json",
|
||||
"--output-format",
|
||||
"stream-json",
|
||||
"--print",
|
||||
"--verbose",
|
||||
"--mcp-config",
|
||||
mcp_config_path.to_string_lossy().as_ref(),
|
||||
"--permission-prompt-tool",
|
||||
&format!(
|
||||
"mcp__{}__{}",
|
||||
mcp_server::SERVER_NAME,
|
||||
mcp_server::PERMISSION_TOOL
|
||||
),
|
||||
"--allowedTools",
|
||||
"mcp__zed__Read,mcp__zed__Edit",
|
||||
"--disallowedTools",
|
||||
"Read,Edit",
|
||||
]
|
||||
.into_iter()
|
||||
.chain(command.args.iter().map(|arg| arg.as_str())),
|
||||
)
|
||||
.current_dir(root_dir)
|
||||
.stdin(std::process::Stdio::piped())
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.stderr(std::process::Stdio::inherit())
|
||||
.kill_on_drop(true)
|
||||
.spawn()?;
|
||||
|
||||
let stdin = child.stdin.take().unwrap();
|
||||
let stdout = child.stdout.take().unwrap();
|
||||
|
||||
let (incoming_message_tx, mut incoming_message_rx) = mpsc::unbounded();
|
||||
let (outgoing_tx, outgoing_rx) = mpsc::unbounded();
|
||||
|
||||
let io_task =
|
||||
ClaudeAgentConnection::handle_io(outgoing_rx, incoming_message_tx, stdin, stdout);
|
||||
cx.background_spawn(async move {
|
||||
io_task.await.log_err();
|
||||
drop(mcp_config_path);
|
||||
drop(child);
|
||||
})
|
||||
.detach();
|
||||
|
||||
cx.new(|cx| {
|
||||
let end_turn_tx = Rc::new(RefCell::new(None));
|
||||
let delegate = AcpClientDelegate::new(cx.entity().downgrade(), cx.to_async());
|
||||
delegate_tx.send(Some(delegate.clone())).log_err();
|
||||
|
||||
let handler_task = cx.foreground_executor().spawn({
|
||||
let end_turn_tx = end_turn_tx.clone();
|
||||
let tool_id_map = tool_id_map.clone();
|
||||
async move {
|
||||
while let Some(message) = incoming_message_rx.next().await {
|
||||
ClaudeAgentConnection::handle_message(
|
||||
delegate.clone(),
|
||||
message,
|
||||
end_turn_tx.clone(),
|
||||
tool_id_map.clone(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let mut connection = ClaudeAgentConnection {
|
||||
outgoing_tx,
|
||||
end_turn_tx,
|
||||
_handler_task: handler_task,
|
||||
_mcp_server: None,
|
||||
};
|
||||
|
||||
connection._mcp_server = Some(permission_mcp_server);
|
||||
acp_thread::AcpThread::new(connection, title, None, project.clone(), cx)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl AgentConnection for ClaudeAgentConnection {
|
||||
/// Send a request to the agent and wait for a response.
|
||||
fn request_any(
|
||||
&self,
|
||||
params: AnyAgentRequest,
|
||||
) -> LocalBoxFuture<'static, Result<acp::AnyAgentResult>> {
|
||||
let end_turn_tx = self.end_turn_tx.clone();
|
||||
let outgoing_tx = self.outgoing_tx.clone();
|
||||
async move {
|
||||
match params {
|
||||
// todo: consider sending an empty request so we get the init response?
|
||||
AnyAgentRequest::InitializeParams(_) => Ok(AnyAgentResult::InitializeResponse(
|
||||
acp::InitializeResponse {
|
||||
is_authenticated: true,
|
||||
protocol_version: ProtocolVersion::latest(),
|
||||
},
|
||||
)),
|
||||
AnyAgentRequest::AuthenticateParams(_) => {
|
||||
Err(anyhow!("Authentication not supported"))
|
||||
}
|
||||
AnyAgentRequest::SendUserMessageParams(message) => {
|
||||
let (tx, rx) = oneshot::channel();
|
||||
end_turn_tx.borrow_mut().replace(tx);
|
||||
let mut content = String::new();
|
||||
for chunk in message.chunks {
|
||||
match chunk {
|
||||
agentic_coding_protocol::UserMessageChunk::Text { text } => {
|
||||
content.push_str(&text)
|
||||
}
|
||||
agentic_coding_protocol::UserMessageChunk::Path { path } => {
|
||||
content.push_str(&format!("@{path:?}"))
|
||||
}
|
||||
}
|
||||
}
|
||||
outgoing_tx.unbounded_send(SdkMessage::User {
|
||||
message: Message {
|
||||
role: Role::User,
|
||||
content: Content::UntaggedText(content),
|
||||
id: None,
|
||||
model: None,
|
||||
stop_reason: None,
|
||||
stop_sequence: None,
|
||||
usage: None,
|
||||
},
|
||||
session_id: None,
|
||||
})?;
|
||||
rx.await??;
|
||||
Ok(AnyAgentResult::SendUserMessageResponse(
|
||||
acp::SendUserMessageResponse,
|
||||
))
|
||||
}
|
||||
AnyAgentRequest::CancelSendMessageParams(_) => Ok(
|
||||
AnyAgentResult::CancelSendMessageResponse(acp::CancelSendMessageResponse),
|
||||
),
|
||||
}
|
||||
}
|
||||
.boxed_local()
|
||||
}
|
||||
}
|
||||
|
||||
struct ClaudeAgentConnection {
|
||||
outgoing_tx: UnboundedSender<SdkMessage>,
|
||||
end_turn_tx: Rc<RefCell<Option<oneshot::Sender<Result<()>>>>>,
|
||||
_mcp_server: Option<ClaudeMcpServer>,
|
||||
_handler_task: Task<()>,
|
||||
}
|
||||
|
||||
impl ClaudeAgentConnection {
|
||||
async fn handle_message(
|
||||
delegate: AcpClientDelegate,
|
||||
message: SdkMessage,
|
||||
end_turn_tx: Rc<RefCell<Option<oneshot::Sender<Result<()>>>>>,
|
||||
tool_id_map: Rc<RefCell<HashMap<String, acp::ToolCallId>>>,
|
||||
) {
|
||||
match message {
|
||||
SdkMessage::Assistant { message, .. } | SdkMessage::User { message, .. } => {
|
||||
for chunk in message.content.chunks() {
|
||||
match chunk {
|
||||
ContentChunk::Text { text } | ContentChunk::UntaggedText(text) => {
|
||||
delegate
|
||||
.stream_assistant_message_chunk(StreamAssistantMessageChunkParams {
|
||||
chunk: acp::AssistantMessageChunk::Text { text },
|
||||
})
|
||||
.await
|
||||
.log_err();
|
||||
}
|
||||
ContentChunk::ToolUse { id, name, input } => {
|
||||
if let Some(resp) = delegate
|
||||
.push_tool_call(ClaudeTool::infer(&name, input).as_acp())
|
||||
.await
|
||||
.log_err()
|
||||
{
|
||||
tool_id_map.borrow_mut().insert(id, resp.id);
|
||||
}
|
||||
}
|
||||
ContentChunk::ToolResult {
|
||||
content,
|
||||
tool_use_id,
|
||||
} => {
|
||||
let id = tool_id_map.borrow_mut().remove(&tool_use_id);
|
||||
if let Some(id) = id {
|
||||
delegate
|
||||
.update_tool_call(UpdateToolCallParams {
|
||||
tool_call_id: id,
|
||||
status: acp::ToolCallStatus::Finished,
|
||||
content: Some(ToolCallContent::Markdown {
|
||||
// For now we only include text content
|
||||
markdown: content.to_string(),
|
||||
}),
|
||||
})
|
||||
.await
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
ContentChunk::Image
|
||||
| ContentChunk::Document
|
||||
| ContentChunk::Thinking
|
||||
| ContentChunk::RedactedThinking
|
||||
| ContentChunk::WebSearchToolResult => {
|
||||
delegate
|
||||
.stream_assistant_message_chunk(StreamAssistantMessageChunkParams {
|
||||
chunk: acp::AssistantMessageChunk::Text {
|
||||
text: format!("Unsupported content: {:?}", chunk),
|
||||
},
|
||||
})
|
||||
.await
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
SdkMessage::Result {
|
||||
is_error, subtype, ..
|
||||
} => {
|
||||
if let Some(end_turn_tx) = end_turn_tx.borrow_mut().take() {
|
||||
if is_error {
|
||||
end_turn_tx.send(Err(anyhow!("Error: {subtype}"))).ok();
|
||||
} else {
|
||||
end_turn_tx.send(Ok(())).ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
SdkMessage::System { .. } => {}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_io(
|
||||
mut outgoing_rx: UnboundedReceiver<SdkMessage>,
|
||||
incoming_tx: UnboundedSender<SdkMessage>,
|
||||
mut outgoing_bytes: impl Unpin + AsyncWrite,
|
||||
incoming_bytes: impl Unpin + AsyncRead,
|
||||
) -> Result<()> {
|
||||
let mut output_reader = BufReader::new(incoming_bytes);
|
||||
let mut outgoing_line = Vec::new();
|
||||
let mut incoming_line = String::new();
|
||||
loop {
|
||||
select_biased! {
|
||||
message = outgoing_rx.next() => {
|
||||
if let Some(message) = message {
|
||||
outgoing_line.clear();
|
||||
serde_json::to_writer(&mut outgoing_line, &message)?;
|
||||
log::trace!("send: {}", String::from_utf8_lossy(&outgoing_line));
|
||||
outgoing_line.push(b'\n');
|
||||
outgoing_bytes.write_all(&outgoing_line).await.ok();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
bytes_read = output_reader.read_line(&mut incoming_line).fuse() => {
|
||||
if bytes_read? == 0 {
|
||||
break
|
||||
}
|
||||
log::trace!("recv: {}", &incoming_line);
|
||||
match serde_json::from_str::<SdkMessage>(&incoming_line) {
|
||||
Ok(message) => {
|
||||
incoming_tx.unbounded_send(message).log_err();
|
||||
}
|
||||
Err(error) => {
|
||||
log::error!("failed to parse incoming message: {error}. Raw: {incoming_line}");
|
||||
}
|
||||
}
|
||||
incoming_line.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
struct Message {
|
||||
role: Role,
|
||||
content: Content,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
id: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
model: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
stop_reason: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
stop_sequence: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
usage: Option<Usage>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum Content {
|
||||
UntaggedText(String),
|
||||
Chunks(Vec<ContentChunk>),
|
||||
}
|
||||
|
||||
impl Content {
|
||||
pub fn chunks(self) -> impl Iterator<Item = ContentChunk> {
|
||||
match self {
|
||||
Self::Chunks(chunks) => chunks.into_iter(),
|
||||
Self::UntaggedText(text) => vec![ContentChunk::Text { text: text.clone() }].into_iter(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Content {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Content::UntaggedText(txt) => write!(f, "{}", txt),
|
||||
Content::Chunks(chunks) => {
|
||||
for chunk in chunks {
|
||||
write!(f, "{}", chunk)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
enum ContentChunk {
|
||||
Text {
|
||||
text: String,
|
||||
},
|
||||
ToolUse {
|
||||
id: String,
|
||||
name: String,
|
||||
input: serde_json::Value,
|
||||
},
|
||||
ToolResult {
|
||||
content: Content,
|
||||
tool_use_id: String,
|
||||
},
|
||||
// TODO
|
||||
Image,
|
||||
Document,
|
||||
Thinking,
|
||||
RedactedThinking,
|
||||
WebSearchToolResult,
|
||||
#[serde(untagged)]
|
||||
UntaggedText(String),
|
||||
}
|
||||
|
||||
impl Display for ContentChunk {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
ContentChunk::Text { text } => write!(f, "{}", text),
|
||||
ContentChunk::UntaggedText(text) => write!(f, "{}", text),
|
||||
ContentChunk::ToolResult { content, .. } => write!(f, "{}", content),
|
||||
ContentChunk::Image
|
||||
| ContentChunk::Document
|
||||
| ContentChunk::Thinking
|
||||
| ContentChunk::RedactedThinking
|
||||
| ContentChunk::ToolUse { .. }
|
||||
| ContentChunk::WebSearchToolResult => {
|
||||
write!(f, "\n{:?}\n", &self)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
struct Usage {
|
||||
input_tokens: u32,
|
||||
cache_creation_input_tokens: u32,
|
||||
cache_read_input_tokens: u32,
|
||||
output_tokens: u32,
|
||||
service_tier: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
enum Role {
|
||||
System,
|
||||
Assistant,
|
||||
User,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
struct MessageParam {
|
||||
role: Role,
|
||||
content: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(tag = "type", rename_all = "snake_case")]
|
||||
enum SdkMessage {
|
||||
// An assistant message
|
||||
Assistant {
|
||||
message: Message, // from Anthropic SDK
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
session_id: Option<String>,
|
||||
},
|
||||
|
||||
// A user message
|
||||
User {
|
||||
message: Message, // from Anthropic SDK
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
session_id: Option<String>,
|
||||
},
|
||||
|
||||
// Emitted as the last message in a conversation
|
||||
Result {
|
||||
subtype: ResultErrorType,
|
||||
duration_ms: f64,
|
||||
duration_api_ms: f64,
|
||||
is_error: bool,
|
||||
num_turns: i32,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
result: Option<String>,
|
||||
session_id: String,
|
||||
total_cost_usd: f64,
|
||||
},
|
||||
// Emitted as the first message at the start of a conversation
|
||||
System {
|
||||
cwd: String,
|
||||
session_id: String,
|
||||
tools: Vec<String>,
|
||||
model: String,
|
||||
mcp_servers: Vec<McpServer>,
|
||||
#[serde(rename = "apiKeySource")]
|
||||
api_key_source: String,
|
||||
#[serde(rename = "permissionMode")]
|
||||
permission_mode: PermissionMode,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
enum ResultErrorType {
|
||||
Success,
|
||||
ErrorMaxTurns,
|
||||
ErrorDuringExecution,
|
||||
}
|
||||
|
||||
impl Display for ResultErrorType {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
ResultErrorType::Success => write!(f, "success"),
|
||||
ResultErrorType::ErrorMaxTurns => write!(f, "error_max_turns"),
|
||||
ResultErrorType::ErrorDuringExecution => write!(f, "error_during_execution"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
struct McpServer {
|
||||
name: String,
|
||||
status: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
enum PermissionMode {
|
||||
Default,
|
||||
AcceptEdits,
|
||||
BypassPermissions,
|
||||
Plan,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct McpConfig {
|
||||
mcp_servers: HashMap<String, McpServerConfig>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct McpServerConfig {
|
||||
command: String,
|
||||
args: Vec<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
env: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use super::*;
|
||||
use serde_json::json;
|
||||
|
||||
// crate::common_e2e_tests!(ClaudeCode);
|
||||
|
||||
pub fn local_command() -> AgentServerCommand {
|
||||
AgentServerCommand {
|
||||
path: "claude".into(),
|
||||
args: vec![],
|
||||
env: None,
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_content_untagged_text() {
|
||||
let json = json!("Hello, world!");
|
||||
let content: Content = serde_json::from_value(json).unwrap();
|
||||
match content {
|
||||
Content::UntaggedText(text) => assert_eq!(text, "Hello, world!"),
|
||||
_ => panic!("Expected UntaggedText variant"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_content_chunks() {
|
||||
let json = json!([
|
||||
{
|
||||
"type": "text",
|
||||
"text": "Hello"
|
||||
},
|
||||
{
|
||||
"type": "tool_use",
|
||||
"id": "tool_123",
|
||||
"name": "calculator",
|
||||
"input": {"operation": "add", "a": 1, "b": 2}
|
||||
}
|
||||
]);
|
||||
let content: Content = serde_json::from_value(json).unwrap();
|
||||
match content {
|
||||
Content::Chunks(chunks) => {
|
||||
assert_eq!(chunks.len(), 2);
|
||||
match &chunks[0] {
|
||||
ContentChunk::Text { text } => assert_eq!(text, "Hello"),
|
||||
_ => panic!("Expected Text chunk"),
|
||||
}
|
||||
match &chunks[1] {
|
||||
ContentChunk::ToolUse { id, name, input } => {
|
||||
assert_eq!(id, "tool_123");
|
||||
assert_eq!(name, "calculator");
|
||||
assert_eq!(input["operation"], "add");
|
||||
assert_eq!(input["a"], 1);
|
||||
assert_eq!(input["b"], 2);
|
||||
}
|
||||
_ => panic!("Expected ToolUse chunk"),
|
||||
}
|
||||
}
|
||||
_ => panic!("Expected Chunks variant"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_tool_result_untagged_text() {
|
||||
let json = json!({
|
||||
"type": "tool_result",
|
||||
"content": "Result content",
|
||||
"tool_use_id": "tool_456"
|
||||
});
|
||||
let chunk: ContentChunk = serde_json::from_value(json).unwrap();
|
||||
match chunk {
|
||||
ContentChunk::ToolResult {
|
||||
content,
|
||||
tool_use_id,
|
||||
} => {
|
||||
match content {
|
||||
Content::UntaggedText(text) => assert_eq!(text, "Result content"),
|
||||
_ => panic!("Expected UntaggedText content"),
|
||||
}
|
||||
assert_eq!(tool_use_id, "tool_456");
|
||||
}
|
||||
_ => panic!("Expected ToolResult variant"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_deserialize_tool_result_chunks() {
|
||||
let json = json!({
|
||||
"type": "tool_result",
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": "Processing complete"
|
||||
},
|
||||
{
|
||||
"type": "text",
|
||||
"text": "Result: 42"
|
||||
}
|
||||
],
|
||||
"tool_use_id": "tool_789"
|
||||
});
|
||||
let chunk: ContentChunk = serde_json::from_value(json).unwrap();
|
||||
match chunk {
|
||||
ContentChunk::ToolResult {
|
||||
content,
|
||||
tool_use_id,
|
||||
} => {
|
||||
match content {
|
||||
Content::Chunks(chunks) => {
|
||||
assert_eq!(chunks.len(), 2);
|
||||
match &chunks[0] {
|
||||
ContentChunk::Text { text } => assert_eq!(text, "Processing complete"),
|
||||
_ => panic!("Expected Text chunk"),
|
||||
}
|
||||
match &chunks[1] {
|
||||
ContentChunk::Text { text } => assert_eq!(text, "Result: 42"),
|
||||
_ => panic!("Expected Text chunk"),
|
||||
}
|
||||
}
|
||||
_ => panic!("Expected Chunks content"),
|
||||
}
|
||||
assert_eq!(tool_use_id, "tool_789");
|
||||
}
|
||||
_ => panic!("Expected ToolResult variant"),
|
||||
}
|
||||
}
|
||||
}
|
||||
303
crates/agent_servers/src/claude/mcp_server.rs
Normal file
@@ -0,0 +1,303 @@
|
||||
use std::{cell::RefCell, rc::Rc};
|
||||
|
||||
use acp_thread::AcpClientDelegate;
|
||||
use agentic_coding_protocol::{self as acp, Client, ReadTextFileParams, WriteTextFileParams};
|
||||
use anyhow::{Context, Result};
|
||||
use collections::HashMap;
|
||||
use context_server::{
|
||||
listener::McpServer,
|
||||
types::{
|
||||
CallToolParams, CallToolResponse, Implementation, InitializeParams, InitializeResponse,
|
||||
ListToolsResponse, ProtocolVersion, ServerCapabilities, Tool, ToolAnnotations,
|
||||
ToolResponseContent, ToolsCapabilities, requests,
|
||||
},
|
||||
};
|
||||
use gpui::{App, AsyncApp, Task};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use util::debug_panic;
|
||||
|
||||
use crate::claude::{
|
||||
McpServerConfig,
|
||||
tools::{ClaudeTool, EditToolParams, EditToolResponse, ReadToolParams, ReadToolResponse},
|
||||
};
|
||||
|
||||
pub struct ClaudeMcpServer {
|
||||
server: McpServer,
|
||||
}
|
||||
|
||||
pub const SERVER_NAME: &str = "zed";
|
||||
pub const READ_TOOL: &str = "Read";
|
||||
pub const EDIT_TOOL: &str = "Edit";
|
||||
pub const PERMISSION_TOOL: &str = "Confirmation";
|
||||
|
||||
#[derive(Deserialize, JsonSchema, Debug)]
|
||||
struct PermissionToolParams {
|
||||
tool_name: String,
|
||||
input: serde_json::Value,
|
||||
tool_use_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
struct PermissionToolResponse {
|
||||
behavior: PermissionToolBehavior,
|
||||
updated_input: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
enum PermissionToolBehavior {
|
||||
Allow,
|
||||
Deny,
|
||||
}
|
||||
|
||||
impl ClaudeMcpServer {
|
||||
pub async fn new(
|
||||
delegate: watch::Receiver<Option<AcpClientDelegate>>,
|
||||
tool_id_map: Rc<RefCell<HashMap<String, acp::ToolCallId>>>,
|
||||
cx: &AsyncApp,
|
||||
) -> Result<Self> {
|
||||
let mut mcp_server = McpServer::new(cx).await?;
|
||||
mcp_server.handle_request::<requests::Initialize>(Self::handle_initialize);
|
||||
mcp_server.handle_request::<requests::ListTools>(Self::handle_list_tools);
|
||||
mcp_server.handle_request::<requests::CallTool>(move |request, cx| {
|
||||
Self::handle_call_tool(request, delegate.clone(), tool_id_map.clone(), cx)
|
||||
});
|
||||
|
||||
Ok(Self { server: mcp_server })
|
||||
}
|
||||
|
||||
pub fn server_config(&self) -> Result<McpServerConfig> {
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
let zed_path = util::get_shell_safe_zed_path()?;
|
||||
#[cfg(target_os = "windows")]
|
||||
let zed_path = std::env::current_exe()
|
||||
.context("finding current executable path for use in mcp_server")?
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
Ok(McpServerConfig {
|
||||
command: zed_path,
|
||||
args: vec![
|
||||
"--nc".into(),
|
||||
self.server.socket_path().display().to_string(),
|
||||
],
|
||||
env: None,
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_initialize(_: InitializeParams, cx: &App) -> Task<Result<InitializeResponse>> {
|
||||
cx.foreground_executor().spawn(async move {
|
||||
Ok(InitializeResponse {
|
||||
protocol_version: ProtocolVersion("2025-06-18".into()),
|
||||
capabilities: ServerCapabilities {
|
||||
experimental: None,
|
||||
logging: None,
|
||||
completions: None,
|
||||
prompts: None,
|
||||
resources: None,
|
||||
tools: Some(ToolsCapabilities {
|
||||
list_changed: Some(false),
|
||||
}),
|
||||
},
|
||||
server_info: Implementation {
|
||||
name: SERVER_NAME.into(),
|
||||
version: "0.1.0".into(),
|
||||
},
|
||||
meta: None,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_list_tools(_: (), cx: &App) -> Task<Result<ListToolsResponse>> {
|
||||
cx.foreground_executor().spawn(async move {
|
||||
Ok(ListToolsResponse {
|
||||
tools: vec![
|
||||
Tool {
|
||||
name: PERMISSION_TOOL.into(),
|
||||
input_schema: schemars::schema_for!(PermissionToolParams).into(),
|
||||
description: None,
|
||||
annotations: None,
|
||||
},
|
||||
Tool {
|
||||
name: READ_TOOL.into(),
|
||||
input_schema: schemars::schema_for!(ReadToolParams).into(),
|
||||
description: Some("Read the contents of a file. In sessions with mcp__zed__Read always use it instead of Read as it contains the most up-to-date contents.".to_string()),
|
||||
annotations: Some(ToolAnnotations {
|
||||
title: Some("Read file".to_string()),
|
||||
read_only_hint: Some(true),
|
||||
destructive_hint: Some(false),
|
||||
open_world_hint: Some(false),
|
||||
// if time passes the contents might change, but it's not going to do anything different
|
||||
// true or false seem too strong, let's try a none.
|
||||
idempotent_hint: None,
|
||||
}),
|
||||
},
|
||||
Tool {
|
||||
name: EDIT_TOOL.into(),
|
||||
input_schema: schemars::schema_for!(EditToolParams).into(),
|
||||
description: Some("Edits a file. In sessions with mcp__zed__Edit always use it instead of Edit as it will show the diff to the user better.".to_string()),
|
||||
annotations: Some(ToolAnnotations {
|
||||
title: Some("Edit file".to_string()),
|
||||
read_only_hint: Some(false),
|
||||
destructive_hint: Some(false),
|
||||
open_world_hint: Some(false),
|
||||
idempotent_hint: Some(false),
|
||||
}),
|
||||
},
|
||||
],
|
||||
next_cursor: None,
|
||||
meta: None,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_call_tool(
|
||||
request: CallToolParams,
|
||||
mut delegate_watch: watch::Receiver<Option<AcpClientDelegate>>,
|
||||
tool_id_map: Rc<RefCell<HashMap<String, acp::ToolCallId>>>,
|
||||
cx: &App,
|
||||
) -> Task<Result<CallToolResponse>> {
|
||||
cx.spawn(async move |cx| {
|
||||
let Some(delegate) = delegate_watch.recv().await? else {
|
||||
debug_panic!("Sent None delegate");
|
||||
anyhow::bail!("Server not available");
|
||||
};
|
||||
|
||||
if request.name.as_str() == PERMISSION_TOOL {
|
||||
let input =
|
||||
serde_json::from_value(request.arguments.context("Arguments required")?)?;
|
||||
|
||||
let result =
|
||||
Self::handle_permissions_tool_call(input, delegate, tool_id_map, cx).await?;
|
||||
Ok(CallToolResponse {
|
||||
content: vec![ToolResponseContent::Text {
|
||||
text: serde_json::to_string(&result)?,
|
||||
}],
|
||||
is_error: None,
|
||||
meta: None,
|
||||
})
|
||||
} else if request.name.as_str() == READ_TOOL {
|
||||
let input =
|
||||
serde_json::from_value(request.arguments.context("Arguments required")?)?;
|
||||
|
||||
let result = Self::handle_read_tool_call(input, delegate, cx).await?;
|
||||
Ok(CallToolResponse {
|
||||
content: vec![ToolResponseContent::Text {
|
||||
text: serde_json::to_string(&result)?,
|
||||
}],
|
||||
is_error: None,
|
||||
meta: None,
|
||||
})
|
||||
} else if request.name.as_str() == EDIT_TOOL {
|
||||
let input =
|
||||
serde_json::from_value(request.arguments.context("Arguments required")?)?;
|
||||
|
||||
let result = Self::handle_edit_tool_call(input, delegate, cx).await?;
|
||||
Ok(CallToolResponse {
|
||||
content: vec![ToolResponseContent::Text {
|
||||
text: serde_json::to_string(&result)?,
|
||||
}],
|
||||
is_error: None,
|
||||
meta: None,
|
||||
})
|
||||
} else {
|
||||
anyhow::bail!("Unsupported tool");
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_read_tool_call(
|
||||
params: ReadToolParams,
|
||||
delegate: AcpClientDelegate,
|
||||
cx: &AsyncApp,
|
||||
) -> Task<Result<ReadToolResponse>> {
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let response = delegate
|
||||
.read_text_file(ReadTextFileParams {
|
||||
path: params.abs_path,
|
||||
line: params.offset,
|
||||
limit: params.limit,
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(ReadToolResponse {
|
||||
content: response.content,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_edit_tool_call(
|
||||
params: EditToolParams,
|
||||
delegate: AcpClientDelegate,
|
||||
cx: &AsyncApp,
|
||||
) -> Task<Result<EditToolResponse>> {
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let response = delegate
|
||||
.read_text_file_reusing_snapshot(ReadTextFileParams {
|
||||
path: params.abs_path.clone(),
|
||||
line: None,
|
||||
limit: None,
|
||||
})
|
||||
.await?;
|
||||
|
||||
let new_content = response.content.replace(¶ms.old_text, ¶ms.new_text);
|
||||
if new_content == response.content {
|
||||
return Err(anyhow::anyhow!("The old_text was not found in the content"));
|
||||
}
|
||||
|
||||
delegate
|
||||
.write_text_file(WriteTextFileParams {
|
||||
path: params.abs_path,
|
||||
content: new_content,
|
||||
})
|
||||
.await?;
|
||||
|
||||
Ok(EditToolResponse)
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_permissions_tool_call(
|
||||
params: PermissionToolParams,
|
||||
delegate: AcpClientDelegate,
|
||||
tool_id_map: Rc<RefCell<HashMap<String, acp::ToolCallId>>>,
|
||||
cx: &AsyncApp,
|
||||
) -> Task<Result<PermissionToolResponse>> {
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let claude_tool = ClaudeTool::infer(¶ms.tool_name, params.input.clone());
|
||||
|
||||
let tool_call_id = match params.tool_use_id {
|
||||
Some(tool_use_id) => tool_id_map
|
||||
.borrow()
|
||||
.get(&tool_use_id)
|
||||
.cloned()
|
||||
.context("Tool call ID not found")?,
|
||||
|
||||
None => delegate.push_tool_call(claude_tool.as_acp()).await?.id,
|
||||
};
|
||||
|
||||
let outcome = delegate
|
||||
.request_existing_tool_call_confirmation(
|
||||
tool_call_id,
|
||||
claude_tool.confirmation(None),
|
||||
)
|
||||
.await?;
|
||||
|
||||
match outcome {
|
||||
acp::ToolCallConfirmationOutcome::Allow
|
||||
| acp::ToolCallConfirmationOutcome::AlwaysAllow
|
||||
| acp::ToolCallConfirmationOutcome::AlwaysAllowMcpServer
|
||||
| acp::ToolCallConfirmationOutcome::AlwaysAllowTool => Ok(PermissionToolResponse {
|
||||
behavior: PermissionToolBehavior::Allow,
|
||||
updated_input: params.input,
|
||||
}),
|
||||
acp::ToolCallConfirmationOutcome::Reject
|
||||
| acp::ToolCallConfirmationOutcome::Cancel => Ok(PermissionToolResponse {
|
||||
behavior: PermissionToolBehavior::Deny,
|
||||
updated_input: params.input,
|
||||
}),
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
670
crates/agent_servers/src/claude/tools.rs
Normal file
@@ -0,0 +1,670 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use agentic_coding_protocol::{self as acp, PushToolCallParams, ToolCallLocation};
|
||||
use itertools::Itertools;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use util::ResultExt;
|
||||
|
||||
pub enum ClaudeTool {
|
||||
Task(Option<TaskToolParams>),
|
||||
NotebookRead(Option<NotebookReadToolParams>),
|
||||
NotebookEdit(Option<NotebookEditToolParams>),
|
||||
Edit(Option<EditToolParams>),
|
||||
MultiEdit(Option<MultiEditToolParams>),
|
||||
ReadFile(Option<ReadToolParams>),
|
||||
Write(Option<WriteToolParams>),
|
||||
Ls(Option<LsToolParams>),
|
||||
Glob(Option<GlobToolParams>),
|
||||
Grep(Option<GrepToolParams>),
|
||||
Terminal(Option<BashToolParams>),
|
||||
WebFetch(Option<WebFetchToolParams>),
|
||||
WebSearch(Option<WebSearchToolParams>),
|
||||
TodoWrite(Option<TodoWriteToolParams>),
|
||||
ExitPlanMode(Option<ExitPlanModeToolParams>),
|
||||
Other {
|
||||
name: String,
|
||||
input: serde_json::Value,
|
||||
},
|
||||
}
|
||||
|
||||
impl ClaudeTool {
|
||||
pub fn infer(tool_name: &str, input: serde_json::Value) -> Self {
|
||||
match tool_name {
|
||||
// Known tools
|
||||
"mcp__zed__Read" => Self::ReadFile(serde_json::from_value(input).log_err()),
|
||||
"mcp__zed__Edit" => Self::Edit(serde_json::from_value(input).log_err()),
|
||||
"MultiEdit" => Self::MultiEdit(serde_json::from_value(input).log_err()),
|
||||
"Write" => Self::Write(serde_json::from_value(input).log_err()),
|
||||
"LS" => Self::Ls(serde_json::from_value(input).log_err()),
|
||||
"Glob" => Self::Glob(serde_json::from_value(input).log_err()),
|
||||
"Grep" => Self::Grep(serde_json::from_value(input).log_err()),
|
||||
"Bash" => Self::Terminal(serde_json::from_value(input).log_err()),
|
||||
"WebFetch" => Self::WebFetch(serde_json::from_value(input).log_err()),
|
||||
"WebSearch" => Self::WebSearch(serde_json::from_value(input).log_err()),
|
||||
"TodoWrite" => Self::TodoWrite(serde_json::from_value(input).log_err()),
|
||||
"exit_plan_mode" => Self::ExitPlanMode(serde_json::from_value(input).log_err()),
|
||||
"Task" => Self::Task(serde_json::from_value(input).log_err()),
|
||||
"NotebookRead" => Self::NotebookRead(serde_json::from_value(input).log_err()),
|
||||
"NotebookEdit" => Self::NotebookEdit(serde_json::from_value(input).log_err()),
|
||||
// Inferred from name
|
||||
_ => {
|
||||
let tool_name = tool_name.to_lowercase();
|
||||
|
||||
if tool_name.contains("edit") || tool_name.contains("write") {
|
||||
Self::Edit(None)
|
||||
} else if tool_name.contains("terminal") {
|
||||
Self::Terminal(None)
|
||||
} else {
|
||||
Self::Other {
|
||||
name: tool_name.to_string(),
|
||||
input,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn label(&self) -> String {
|
||||
match &self {
|
||||
Self::Task(Some(params)) => params.description.clone(),
|
||||
Self::Task(None) => "Task".into(),
|
||||
Self::NotebookRead(Some(params)) => {
|
||||
format!("Read Notebook {}", params.notebook_path.display())
|
||||
}
|
||||
Self::NotebookRead(None) => "Read Notebook".into(),
|
||||
Self::NotebookEdit(Some(params)) => {
|
||||
format!("Edit Notebook {}", params.notebook_path.display())
|
||||
}
|
||||
Self::NotebookEdit(None) => "Edit Notebook".into(),
|
||||
Self::Terminal(Some(params)) => format!("`{}`", params.command),
|
||||
Self::Terminal(None) => "Terminal".into(),
|
||||
Self::ReadFile(_) => "Read File".into(),
|
||||
Self::Ls(Some(params)) => {
|
||||
format!("List Directory {}", params.path.display())
|
||||
}
|
||||
Self::Ls(None) => "List Directory".into(),
|
||||
Self::Edit(Some(params)) => {
|
||||
format!("Edit {}", params.abs_path.display())
|
||||
}
|
||||
Self::Edit(None) => "Edit".into(),
|
||||
Self::MultiEdit(Some(params)) => {
|
||||
format!("Multi Edit {}", params.file_path.display())
|
||||
}
|
||||
Self::MultiEdit(None) => "Multi Edit".into(),
|
||||
Self::Write(Some(params)) => {
|
||||
format!("Write {}", params.file_path.display())
|
||||
}
|
||||
Self::Write(None) => "Write".into(),
|
||||
Self::Glob(Some(params)) => {
|
||||
format!("Glob `{params}`")
|
||||
}
|
||||
Self::Glob(None) => "Glob".into(),
|
||||
Self::Grep(Some(params)) => format!("`{params}`"),
|
||||
Self::Grep(None) => "Grep".into(),
|
||||
Self::WebFetch(Some(params)) => format!("Fetch {}", params.url),
|
||||
Self::WebFetch(None) => "Fetch".into(),
|
||||
Self::WebSearch(Some(params)) => format!("Web Search: {}", params),
|
||||
Self::WebSearch(None) => "Web Search".into(),
|
||||
Self::TodoWrite(Some(params)) => format!(
|
||||
"Update TODOs: {}",
|
||||
params.todos.iter().map(|todo| &todo.content).join(", ")
|
||||
),
|
||||
Self::TodoWrite(None) => "Update TODOs".into(),
|
||||
Self::ExitPlanMode(_) => "Exit Plan Mode".into(),
|
||||
Self::Other { name, .. } => name.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn content(&self) -> Option<acp::ToolCallContent> {
|
||||
match &self {
|
||||
ClaudeTool::Other { input, .. } => Some(acp::ToolCallContent::Markdown {
|
||||
markdown: format!(
|
||||
"```json\n{}```",
|
||||
serde_json::to_string_pretty(&input).unwrap_or("{}".to_string())
|
||||
),
|
||||
}),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn icon(&self) -> acp::Icon {
|
||||
match self {
|
||||
Self::Task(_) => acp::Icon::Hammer,
|
||||
Self::NotebookRead(_) => acp::Icon::FileSearch,
|
||||
Self::NotebookEdit(_) => acp::Icon::Pencil,
|
||||
Self::Edit(_) => acp::Icon::Pencil,
|
||||
Self::MultiEdit(_) => acp::Icon::Pencil,
|
||||
Self::Write(_) => acp::Icon::Pencil,
|
||||
Self::ReadFile(_) => acp::Icon::FileSearch,
|
||||
Self::Ls(_) => acp::Icon::Folder,
|
||||
Self::Glob(_) => acp::Icon::FileSearch,
|
||||
Self::Grep(_) => acp::Icon::Regex,
|
||||
Self::Terminal(_) => acp::Icon::Terminal,
|
||||
Self::WebSearch(_) => acp::Icon::Globe,
|
||||
Self::WebFetch(_) => acp::Icon::Globe,
|
||||
Self::TodoWrite(_) => acp::Icon::LightBulb,
|
||||
Self::ExitPlanMode(_) => acp::Icon::Hammer,
|
||||
Self::Other { .. } => acp::Icon::Hammer,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn confirmation(&self, description: Option<String>) -> acp::ToolCallConfirmation {
|
||||
match &self {
|
||||
Self::Edit(_) | Self::Write(_) | Self::NotebookEdit(_) | Self::MultiEdit(_) => {
|
||||
acp::ToolCallConfirmation::Edit { description }
|
||||
}
|
||||
Self::WebFetch(params) => acp::ToolCallConfirmation::Fetch {
|
||||
urls: params
|
||||
.as_ref()
|
||||
.map(|p| vec![p.url.clone()])
|
||||
.unwrap_or_default(),
|
||||
description,
|
||||
},
|
||||
Self::Terminal(Some(BashToolParams {
|
||||
description,
|
||||
command,
|
||||
..
|
||||
})) => acp::ToolCallConfirmation::Execute {
|
||||
command: command.clone(),
|
||||
root_command: command.clone(),
|
||||
description: description.clone(),
|
||||
},
|
||||
Self::ExitPlanMode(Some(params)) => acp::ToolCallConfirmation::Other {
|
||||
description: if let Some(description) = description {
|
||||
format!("{description} {}", params.plan)
|
||||
} else {
|
||||
params.plan.clone()
|
||||
},
|
||||
},
|
||||
Self::Task(Some(params)) => acp::ToolCallConfirmation::Other {
|
||||
description: if let Some(description) = description {
|
||||
format!("{description} {}", params.description)
|
||||
} else {
|
||||
params.description.clone()
|
||||
},
|
||||
},
|
||||
Self::Ls(Some(LsToolParams { path, .. }))
|
||||
| Self::ReadFile(Some(ReadToolParams { abs_path: path, .. })) => {
|
||||
let path = path.display();
|
||||
acp::ToolCallConfirmation::Other {
|
||||
description: if let Some(description) = description {
|
||||
format!("{description} {path}")
|
||||
} else {
|
||||
path.to_string()
|
||||
},
|
||||
}
|
||||
}
|
||||
Self::NotebookRead(Some(NotebookReadToolParams { notebook_path, .. })) => {
|
||||
let path = notebook_path.display();
|
||||
acp::ToolCallConfirmation::Other {
|
||||
description: if let Some(description) = description {
|
||||
format!("{description} {path}")
|
||||
} else {
|
||||
path.to_string()
|
||||
},
|
||||
}
|
||||
}
|
||||
Self::Glob(Some(params)) => acp::ToolCallConfirmation::Other {
|
||||
description: if let Some(description) = description {
|
||||
format!("{description} {params}")
|
||||
} else {
|
||||
params.to_string()
|
||||
},
|
||||
},
|
||||
Self::Grep(Some(params)) => acp::ToolCallConfirmation::Other {
|
||||
description: if let Some(description) = description {
|
||||
format!("{description} {params}")
|
||||
} else {
|
||||
params.to_string()
|
||||
},
|
||||
},
|
||||
Self::WebSearch(Some(params)) => acp::ToolCallConfirmation::Other {
|
||||
description: if let Some(description) = description {
|
||||
format!("{description} {params}")
|
||||
} else {
|
||||
params.to_string()
|
||||
},
|
||||
},
|
||||
Self::TodoWrite(Some(params)) => {
|
||||
let params = params.todos.iter().map(|todo| &todo.content).join(", ");
|
||||
acp::ToolCallConfirmation::Other {
|
||||
description: if let Some(description) = description {
|
||||
format!("{description} {params}")
|
||||
} else {
|
||||
params
|
||||
},
|
||||
}
|
||||
}
|
||||
Self::Terminal(None)
|
||||
| Self::Task(None)
|
||||
| Self::NotebookRead(None)
|
||||
| Self::ExitPlanMode(None)
|
||||
| Self::Ls(None)
|
||||
| Self::Glob(None)
|
||||
| Self::Grep(None)
|
||||
| Self::ReadFile(None)
|
||||
| Self::WebSearch(None)
|
||||
| Self::TodoWrite(None)
|
||||
| Self::Other { .. } => acp::ToolCallConfirmation::Other {
|
||||
description: description.unwrap_or("".to_string()),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn locations(&self) -> Vec<acp::ToolCallLocation> {
|
||||
match &self {
|
||||
Self::Edit(Some(EditToolParams { abs_path, .. })) => vec![ToolCallLocation {
|
||||
path: abs_path.clone(),
|
||||
line: None,
|
||||
}],
|
||||
Self::MultiEdit(Some(MultiEditToolParams { file_path, .. })) => {
|
||||
vec![ToolCallLocation {
|
||||
path: file_path.clone(),
|
||||
line: None,
|
||||
}]
|
||||
}
|
||||
Self::Write(Some(WriteToolParams { file_path, .. })) => vec![ToolCallLocation {
|
||||
path: file_path.clone(),
|
||||
line: None,
|
||||
}],
|
||||
Self::ReadFile(Some(ReadToolParams {
|
||||
abs_path, offset, ..
|
||||
})) => vec![ToolCallLocation {
|
||||
path: abs_path.clone(),
|
||||
line: *offset,
|
||||
}],
|
||||
Self::NotebookRead(Some(NotebookReadToolParams { notebook_path, .. })) => {
|
||||
vec![ToolCallLocation {
|
||||
path: notebook_path.clone(),
|
||||
line: None,
|
||||
}]
|
||||
}
|
||||
Self::NotebookEdit(Some(NotebookEditToolParams { notebook_path, .. })) => {
|
||||
vec![ToolCallLocation {
|
||||
path: notebook_path.clone(),
|
||||
line: None,
|
||||
}]
|
||||
}
|
||||
Self::Glob(Some(GlobToolParams {
|
||||
path: Some(path), ..
|
||||
})) => vec![ToolCallLocation {
|
||||
path: path.clone(),
|
||||
line: None,
|
||||
}],
|
||||
Self::Ls(Some(LsToolParams { path, .. })) => vec![ToolCallLocation {
|
||||
path: path.clone(),
|
||||
line: None,
|
||||
}],
|
||||
Self::Grep(Some(GrepToolParams {
|
||||
path: Some(path), ..
|
||||
})) => vec![ToolCallLocation {
|
||||
path: PathBuf::from(path),
|
||||
line: None,
|
||||
}],
|
||||
Self::Task(_)
|
||||
| Self::NotebookRead(None)
|
||||
| Self::NotebookEdit(None)
|
||||
| Self::Edit(None)
|
||||
| Self::MultiEdit(None)
|
||||
| Self::Write(None)
|
||||
| Self::ReadFile(None)
|
||||
| Self::Ls(None)
|
||||
| Self::Glob(_)
|
||||
| Self::Grep(_)
|
||||
| Self::Terminal(_)
|
||||
| Self::WebFetch(_)
|
||||
| Self::WebSearch(_)
|
||||
| Self::TodoWrite(_)
|
||||
| Self::ExitPlanMode(_)
|
||||
| Self::Other { .. } => vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_acp(&self) -> PushToolCallParams {
|
||||
PushToolCallParams {
|
||||
label: self.label(),
|
||||
content: self.content(),
|
||||
icon: self.icon(),
|
||||
locations: self.locations(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, JsonSchema, Debug)]
|
||||
pub struct EditToolParams {
|
||||
/// The absolute path to the file to read.
|
||||
pub abs_path: PathBuf,
|
||||
/// The old text to replace (must be unique in the file)
|
||||
pub old_text: String,
|
||||
/// The new text.
|
||||
pub new_text: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct EditToolResponse;
|
||||
|
||||
#[derive(Deserialize, JsonSchema, Debug)]
|
||||
pub struct ReadToolParams {
|
||||
/// The absolute path to the file to read.
|
||||
pub abs_path: PathBuf,
|
||||
/// Which line to start reading from. Omit to start from the beginning.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub offset: Option<u32>,
|
||||
/// How many lines to read. Omit for the whole file.
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub limit: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ReadToolResponse {
|
||||
pub content: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, JsonSchema, Debug)]
|
||||
pub struct WriteToolParams {
|
||||
/// Absolute path for new file
|
||||
pub file_path: PathBuf,
|
||||
/// File content
|
||||
pub content: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, JsonSchema, Debug)]
|
||||
pub struct BashToolParams {
|
||||
/// Shell command to execute
|
||||
pub command: String,
|
||||
/// 5-10 word description of what command does
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub description: Option<String>,
|
||||
/// Timeout in ms (max 600000ms/10min, default 120000ms)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub timeout: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, JsonSchema, Debug)]
|
||||
pub struct GlobToolParams {
|
||||
/// Glob pattern like **/*.js or src/**/*.ts
|
||||
pub pattern: String,
|
||||
/// Directory to search in (omit for current directory)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub path: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for GlobToolParams {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
if let Some(path) = &self.path {
|
||||
write!(f, "{}", path.display())?;
|
||||
}
|
||||
write!(f, "{}", self.pattern)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, JsonSchema, Debug)]
|
||||
pub struct LsToolParams {
|
||||
/// Absolute path to directory
|
||||
pub path: PathBuf,
|
||||
/// Array of glob patterns to ignore
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
pub ignore: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, JsonSchema, Debug)]
|
||||
pub struct GrepToolParams {
|
||||
/// Regex pattern to search for
|
||||
pub pattern: String,
|
||||
/// File/directory to search (defaults to current directory)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub path: Option<String>,
|
||||
/// "content" (shows lines), "files_with_matches" (default), "count"
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub output_mode: Option<GrepOutputMode>,
|
||||
/// Filter files with glob pattern like "*.js"
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub glob: Option<String>,
|
||||
/// File type filter like "js", "py", "rust"
|
||||
#[serde(rename = "type", skip_serializing_if = "Option::is_none")]
|
||||
pub file_type: Option<String>,
|
||||
/// Case insensitive search
|
||||
#[serde(rename = "-i", default, skip_serializing_if = "is_false")]
|
||||
pub case_insensitive: bool,
|
||||
/// Show line numbers (content mode only)
|
||||
#[serde(rename = "-n", default, skip_serializing_if = "is_false")]
|
||||
pub line_numbers: bool,
|
||||
/// Lines after match (content mode only)
|
||||
#[serde(rename = "-A", skip_serializing_if = "Option::is_none")]
|
||||
pub after_context: Option<u32>,
|
||||
/// Lines before match (content mode only)
|
||||
#[serde(rename = "-B", skip_serializing_if = "Option::is_none")]
|
||||
pub before_context: Option<u32>,
|
||||
/// Lines before and after match (content mode only)
|
||||
#[serde(rename = "-C", skip_serializing_if = "Option::is_none")]
|
||||
pub context: Option<u32>,
|
||||
/// Enable multiline/cross-line matching
|
||||
#[serde(default, skip_serializing_if = "is_false")]
|
||||
pub multiline: bool,
|
||||
/// Limit output to first N results
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub head_limit: Option<u32>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for GrepToolParams {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "grep")?;
|
||||
|
||||
// Boolean flags
|
||||
if self.case_insensitive {
|
||||
write!(f, " -i")?;
|
||||
}
|
||||
if self.line_numbers {
|
||||
write!(f, " -n")?;
|
||||
}
|
||||
|
||||
// Context options
|
||||
if let Some(after) = self.after_context {
|
||||
write!(f, " -A {}", after)?;
|
||||
}
|
||||
if let Some(before) = self.before_context {
|
||||
write!(f, " -B {}", before)?;
|
||||
}
|
||||
if let Some(context) = self.context {
|
||||
write!(f, " -C {}", context)?;
|
||||
}
|
||||
|
||||
// Output mode
|
||||
if let Some(mode) = &self.output_mode {
|
||||
match mode {
|
||||
GrepOutputMode::FilesWithMatches => write!(f, " -l")?,
|
||||
GrepOutputMode::Count => write!(f, " -c")?,
|
||||
GrepOutputMode::Content => {} // Default mode
|
||||
}
|
||||
}
|
||||
|
||||
// Head limit
|
||||
if let Some(limit) = self.head_limit {
|
||||
write!(f, " | head -{}", limit)?;
|
||||
}
|
||||
|
||||
// Glob pattern
|
||||
if let Some(glob) = &self.glob {
|
||||
write!(f, " --include=\"{}\"", glob)?;
|
||||
}
|
||||
|
||||
// File type
|
||||
if let Some(file_type) = &self.file_type {
|
||||
write!(f, " --type={}", file_type)?;
|
||||
}
|
||||
|
||||
// Multiline
|
||||
if self.multiline {
|
||||
write!(f, " -P")?; // Perl-compatible regex for multiline
|
||||
}
|
||||
|
||||
// Pattern (escaped if contains special characters)
|
||||
write!(f, " \"{}\"", self.pattern)?;
|
||||
|
||||
// Path
|
||||
if let Some(path) = &self.path {
|
||||
write!(f, " {}", path)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, JsonSchema, Debug)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum TodoPriority {
|
||||
High,
|
||||
Medium,
|
||||
Low,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, JsonSchema, Debug)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum TodoStatus {
|
||||
Pending,
|
||||
InProgress,
|
||||
Completed,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, JsonSchema, Debug)]
|
||||
pub struct Todo {
|
||||
/// Unique identifier
|
||||
pub id: String,
|
||||
/// Task description
|
||||
pub content: String,
|
||||
/// Priority level of the todo
|
||||
pub priority: TodoPriority,
|
||||
/// Current status of the todo
|
||||
pub status: TodoStatus,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, JsonSchema, Debug)]
|
||||
pub struct TodoWriteToolParams {
|
||||
pub todos: Vec<Todo>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, JsonSchema, Debug)]
|
||||
pub struct ExitPlanModeToolParams {
|
||||
/// Implementation plan in markdown format
|
||||
pub plan: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, JsonSchema, Debug)]
|
||||
pub struct TaskToolParams {
|
||||
/// Short 3-5 word description of task
|
||||
pub description: String,
|
||||
/// Detailed task for agent to perform
|
||||
pub prompt: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, JsonSchema, Debug)]
|
||||
pub struct NotebookReadToolParams {
|
||||
/// Absolute path to .ipynb file
|
||||
pub notebook_path: PathBuf,
|
||||
/// Specific cell ID to read
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub cell_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, JsonSchema, Debug)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum CellType {
|
||||
Code,
|
||||
Markdown,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, JsonSchema, Debug)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum EditMode {
|
||||
Replace,
|
||||
Insert,
|
||||
Delete,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, JsonSchema, Debug)]
|
||||
pub struct NotebookEditToolParams {
|
||||
/// Absolute path to .ipynb file
|
||||
pub notebook_path: PathBuf,
|
||||
/// New cell content
|
||||
pub new_source: String,
|
||||
/// Cell ID to edit
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub cell_id: Option<String>,
|
||||
/// Type of cell (code or markdown)
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub cell_type: Option<CellType>,
|
||||
/// Edit operation mode
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub edit_mode: Option<EditMode>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, JsonSchema, Debug)]
|
||||
pub struct MultiEditItem {
|
||||
/// The text to search for and replace
|
||||
pub old_string: String,
|
||||
/// The replacement text
|
||||
pub new_string: String,
|
||||
/// Whether to replace all occurrences or just the first
|
||||
#[serde(default, skip_serializing_if = "is_false")]
|
||||
pub replace_all: bool,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, JsonSchema, Debug)]
|
||||
pub struct MultiEditToolParams {
|
||||
/// Absolute path to file
|
||||
pub file_path: PathBuf,
|
||||
/// List of edits to apply
|
||||
pub edits: Vec<MultiEditItem>,
|
||||
}
|
||||
|
||||
fn is_false(v: &bool) -> bool {
|
||||
!*v
|
||||
}
|
||||
|
||||
#[derive(Deserialize, JsonSchema, Debug)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum GrepOutputMode {
|
||||
Content,
|
||||
FilesWithMatches,
|
||||
Count,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, JsonSchema, Debug)]
|
||||
pub struct WebFetchToolParams {
|
||||
/// Valid URL to fetch
|
||||
#[serde(rename = "url")]
|
||||
pub url: String,
|
||||
/// What to extract from content
|
||||
pub prompt: String,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, JsonSchema, Debug)]
|
||||
pub struct WebSearchToolParams {
|
||||
/// Search query (min 2 chars)
|
||||
pub query: String,
|
||||
/// Only include these domains
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
pub allowed_domains: Vec<String>,
|
||||
/// Exclude these domains
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
pub blocked_domains: Vec<String>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for WebSearchToolParams {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "\"{}\"", self.query)?;
|
||||
|
||||
if !self.allowed_domains.is_empty() {
|
||||
write!(f, " (allowed: {})", self.allowed_domains.join(", "))?;
|
||||
}
|
||||
|
||||
if !self.blocked_domains.is_empty() {
|
||||
write!(f, " (blocked: {})", self.blocked_domains.join(", "))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
121
crates/agent_servers/src/codex.rs
Normal file
@@ -0,0 +1,121 @@
|
||||
use crate::stdio_agent_server::StdioAgentServer;
|
||||
use crate::{AgentServerCommand, AgentServerVersion};
|
||||
use anyhow::{Context as _, Result};
|
||||
use gpui::{AsyncApp, Entity};
|
||||
use project::Project;
|
||||
use settings::SettingsStore;
|
||||
|
||||
use crate::AllAgentServersSettings;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Codex;
|
||||
|
||||
const ACP_ARG: &str = "experimental-acp";
|
||||
|
||||
impl StdioAgentServer for Codex {
|
||||
fn name(&self) -> &'static str {
|
||||
"Codex"
|
||||
}
|
||||
|
||||
fn empty_state_headline(&self) -> &'static str {
|
||||
"Welcome to Codex"
|
||||
}
|
||||
|
||||
fn empty_state_message(&self) -> &'static str {
|
||||
""
|
||||
}
|
||||
|
||||
fn supports_always_allow(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn logo(&self) -> ui::IconName {
|
||||
ui::IconName::AiOpenAi
|
||||
}
|
||||
|
||||
async fn command(
|
||||
&self,
|
||||
project: &Entity<Project>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<AgentServerCommand> {
|
||||
let settings = cx.read_global(|settings: &SettingsStore, _| {
|
||||
settings.get::<AllAgentServersSettings>(None).codex.clone()
|
||||
})?;
|
||||
|
||||
if let Some(command) =
|
||||
AgentServerCommand::resolve("codex", &[ACP_ARG], settings, &project, cx).await
|
||||
{
|
||||
return Ok(command);
|
||||
};
|
||||
|
||||
let (fs, node_runtime) = project.update(cx, |project, _| {
|
||||
(project.fs().clone(), project.node_runtime().cloned())
|
||||
})?;
|
||||
let node_runtime = node_runtime.context("codex not found on path")?;
|
||||
|
||||
let directory = ::paths::agent_servers_dir().join("codex");
|
||||
fs.create_dir(&directory).await?;
|
||||
node_runtime
|
||||
.npm_install_packages(&directory, &[("@openai/codex", "latest")])
|
||||
.await?;
|
||||
let path = directory.join("node_modules/.bin/codex");
|
||||
|
||||
Ok(AgentServerCommand {
|
||||
path,
|
||||
args: vec![ACP_ARG.into()],
|
||||
env: None,
|
||||
})
|
||||
}
|
||||
|
||||
async fn version(&self, command: &AgentServerCommand) -> Result<AgentServerVersion> {
|
||||
let version_fut = util::command::new_smol_command(&command.path)
|
||||
.args(command.args.iter())
|
||||
.arg("--version")
|
||||
.kill_on_drop(true)
|
||||
.output();
|
||||
|
||||
let help_fut = util::command::new_smol_command(&command.path)
|
||||
.args(command.args.iter())
|
||||
.arg("--help")
|
||||
.kill_on_drop(true)
|
||||
.output();
|
||||
|
||||
let (version_output, help_output) = futures::future::join(version_fut, help_fut).await;
|
||||
|
||||
let current_version = String::from_utf8(version_output?.stdout)?;
|
||||
let supported = String::from_utf8(help_output?.stdout)?.contains(ACP_ARG);
|
||||
|
||||
if supported {
|
||||
Ok(AgentServerVersion::Supported)
|
||||
} else {
|
||||
Ok(AgentServerVersion::Unsupported {
|
||||
error_message: format!(
|
||||
"Your installed version of Codex {} doesn't support the Agentic Coding Protocol (ACP).",
|
||||
current_version
|
||||
).into(),
|
||||
upgrade_message: "Upgrade Codex to Latest".into(),
|
||||
upgrade_command: "npm install -g @openai/codex@latest".into(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use super::*;
|
||||
use crate::AgentServerCommand;
|
||||
use std::path::Path;
|
||||
|
||||
crate::common_e2e_tests!(Codex);
|
||||
|
||||
pub fn local_command() -> AgentServerCommand {
|
||||
let cli_path = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("../../../codex/codex-rs/target/debug/codex");
|
||||
|
||||
AgentServerCommand {
|
||||
path: cli_path,
|
||||
args: vec![],
|
||||
env: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
371
crates/agent_servers/src/e2e_tests.rs
Normal file
@@ -0,0 +1,371 @@
|
||||
use std::{path::Path, sync::Arc, time::Duration};
|
||||
|
||||
use crate::{AgentServer, AgentServerSettings, AllAgentServersSettings};
|
||||
use acp_thread::{
|
||||
AcpThread, AgentThreadEntry, ToolCall, ToolCallConfirmation, ToolCallContent, ToolCallStatus,
|
||||
};
|
||||
use agentic_coding_protocol as acp;
|
||||
use futures::{FutureExt, StreamExt, channel::mpsc, select};
|
||||
use gpui::{Entity, TestAppContext};
|
||||
use indoc::indoc;
|
||||
use project::{FakeFs, Project};
|
||||
use serde_json::json;
|
||||
use settings::{Settings, SettingsStore};
|
||||
use util::path;
|
||||
|
||||
pub async fn test_basic(server: impl AgentServer + 'static, cx: &mut TestAppContext) {
|
||||
let fs = init_test(cx).await;
|
||||
let project = Project::test(fs, [], cx).await;
|
||||
let thread = new_test_thread(server, project.clone(), "/private/tmp", cx).await;
|
||||
|
||||
thread
|
||||
.update(cx, |thread, cx| thread.send_raw("Hello from Zed!", cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
thread.read_with(cx, |thread, _| {
|
||||
assert_eq!(thread.entries().len(), 2);
|
||||
assert!(matches!(
|
||||
thread.entries()[0],
|
||||
AgentThreadEntry::UserMessage(_)
|
||||
));
|
||||
assert!(matches!(
|
||||
thread.entries()[1],
|
||||
AgentThreadEntry::AssistantMessage(_)
|
||||
));
|
||||
});
|
||||
}
|
||||
|
||||
pub async fn test_path_mentions(server: impl AgentServer + 'static, cx: &mut TestAppContext) {
|
||||
let _fs = init_test(cx).await;
|
||||
|
||||
let tempdir = tempfile::tempdir().unwrap();
|
||||
std::fs::write(
|
||||
tempdir.path().join("foo.rs"),
|
||||
indoc! {"
|
||||
fn main() {
|
||||
println!(\"Hello, world!\");
|
||||
}
|
||||
"},
|
||||
)
|
||||
.expect("failed to write file");
|
||||
let project = Project::example([tempdir.path()], &mut cx.to_async()).await;
|
||||
let thread = new_test_thread(server, project.clone(), tempdir.path(), cx).await;
|
||||
thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.send(
|
||||
acp::SendUserMessageParams {
|
||||
chunks: vec![
|
||||
acp::UserMessageChunk::Text {
|
||||
text: "Read the file ".into(),
|
||||
},
|
||||
acp::UserMessageChunk::Path {
|
||||
path: Path::new("foo.rs").into(),
|
||||
},
|
||||
acp::UserMessageChunk::Text {
|
||||
text: " and tell me what the content of the println! is".into(),
|
||||
},
|
||||
],
|
||||
},
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
thread.read_with(cx, |thread, cx| {
|
||||
assert_eq!(thread.entries().len(), 3);
|
||||
assert!(matches!(
|
||||
thread.entries()[0],
|
||||
AgentThreadEntry::UserMessage(_)
|
||||
));
|
||||
assert!(matches!(thread.entries()[1], AgentThreadEntry::ToolCall(_)));
|
||||
let AgentThreadEntry::AssistantMessage(assistant_message) = &thread.entries()[2] else {
|
||||
panic!("Expected AssistantMessage")
|
||||
};
|
||||
assert!(
|
||||
assistant_message.to_markdown(cx).contains("Hello, world!"),
|
||||
"unexpected assistant message: {:?}",
|
||||
assistant_message.to_markdown(cx)
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
pub async fn test_tool_call(server: impl AgentServer + 'static, cx: &mut TestAppContext) {
|
||||
let fs = init_test(cx).await;
|
||||
fs.insert_tree(
|
||||
path!("/private/tmp"),
|
||||
json!({"foo": "Lorem ipsum dolor", "bar": "bar", "baz": "baz"}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs, [path!("/private/tmp").as_ref()], cx).await;
|
||||
let thread = new_test_thread(server, project.clone(), "/private/tmp", cx).await;
|
||||
|
||||
thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.send_raw(
|
||||
"Read the '/private/tmp/foo' file and tell me what you see.",
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
thread.read_with(cx, |thread, _cx| {
|
||||
assert!(matches!(
|
||||
&thread.entries()[2],
|
||||
AgentThreadEntry::ToolCall(ToolCall {
|
||||
status: ToolCallStatus::Allowed { .. },
|
||||
..
|
||||
})
|
||||
));
|
||||
|
||||
assert!(matches!(
|
||||
thread.entries()[3],
|
||||
AgentThreadEntry::AssistantMessage(_)
|
||||
));
|
||||
});
|
||||
}
|
||||
|
||||
pub async fn test_tool_call_with_confirmation(
|
||||
server: impl AgentServer + 'static,
|
||||
cx: &mut TestAppContext,
|
||||
) {
|
||||
let fs = init_test(cx).await;
|
||||
let project = Project::test(fs, [path!("/private/tmp").as_ref()], cx).await;
|
||||
let thread = new_test_thread(server, project.clone(), "/private/tmp", cx).await;
|
||||
let full_turn = thread.update(cx, |thread, cx| {
|
||||
thread.send_raw(r#"Run `echo "Hello, world!"`"#, cx)
|
||||
});
|
||||
|
||||
run_until_first_tool_call(&thread, cx).await;
|
||||
|
||||
let tool_call_id = thread.read_with(cx, |thread, _cx| {
|
||||
let AgentThreadEntry::ToolCall(ToolCall {
|
||||
id,
|
||||
status:
|
||||
ToolCallStatus::WaitingForConfirmation {
|
||||
confirmation: ToolCallConfirmation::Execute { root_command, .. },
|
||||
..
|
||||
},
|
||||
..
|
||||
}) = &thread.entries()[2]
|
||||
else {
|
||||
panic!();
|
||||
};
|
||||
|
||||
assert_eq!(root_command, "echo");
|
||||
|
||||
*id
|
||||
});
|
||||
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.authorize_tool_call(tool_call_id, acp::ToolCallConfirmationOutcome::Allow, cx);
|
||||
|
||||
assert!(matches!(
|
||||
&thread.entries()[2],
|
||||
AgentThreadEntry::ToolCall(ToolCall {
|
||||
status: ToolCallStatus::Allowed { .. },
|
||||
..
|
||||
})
|
||||
));
|
||||
});
|
||||
|
||||
full_turn.await.unwrap();
|
||||
|
||||
thread.read_with(cx, |thread, cx| {
|
||||
let AgentThreadEntry::ToolCall(ToolCall {
|
||||
content: Some(ToolCallContent::Markdown { markdown }),
|
||||
status: ToolCallStatus::Allowed { .. },
|
||||
..
|
||||
}) = &thread.entries()[2]
|
||||
else {
|
||||
panic!();
|
||||
};
|
||||
|
||||
markdown.read_with(cx, |md, _cx| {
|
||||
assert!(
|
||||
md.source().contains("Hello, world!"),
|
||||
r#"Expected '{}' to contain "Hello, world!""#,
|
||||
md.source()
|
||||
);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
pub async fn test_cancel(server: impl AgentServer + 'static, cx: &mut TestAppContext) {
|
||||
let fs = init_test(cx).await;
|
||||
|
||||
let project = Project::test(fs, [path!("/private/tmp").as_ref()], cx).await;
|
||||
let thread = new_test_thread(server, project.clone(), "/private/tmp", cx).await;
|
||||
let full_turn = thread.update(cx, |thread, cx| {
|
||||
thread.send_raw(r#"Run `echo "Hello, world!"`"#, cx)
|
||||
});
|
||||
|
||||
let first_tool_call_ix = run_until_first_tool_call(&thread, cx).await;
|
||||
|
||||
thread.read_with(cx, |thread, _cx| {
|
||||
let AgentThreadEntry::ToolCall(ToolCall {
|
||||
id,
|
||||
status:
|
||||
ToolCallStatus::WaitingForConfirmation {
|
||||
confirmation: ToolCallConfirmation::Execute { root_command, .. },
|
||||
..
|
||||
},
|
||||
..
|
||||
}) = &thread.entries()[first_tool_call_ix]
|
||||
else {
|
||||
panic!("{:?}", thread.entries()[1]);
|
||||
};
|
||||
|
||||
assert_eq!(root_command, "echo");
|
||||
|
||||
*id
|
||||
});
|
||||
|
||||
thread
|
||||
.update(cx, |thread, cx| thread.cancel(cx))
|
||||
.await
|
||||
.unwrap();
|
||||
full_turn.await.unwrap();
|
||||
thread.read_with(cx, |thread, _| {
|
||||
let AgentThreadEntry::ToolCall(ToolCall {
|
||||
status: ToolCallStatus::Canceled,
|
||||
..
|
||||
}) = &thread.entries()[first_tool_call_ix]
|
||||
else {
|
||||
panic!();
|
||||
};
|
||||
});
|
||||
|
||||
thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.send_raw(r#"Stop running and say goodbye to me."#, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
thread.read_with(cx, |thread, _| {
|
||||
assert!(matches!(
|
||||
&thread.entries().last().unwrap(),
|
||||
AgentThreadEntry::AssistantMessage(..),
|
||||
))
|
||||
});
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! common_e2e_tests {
|
||||
($server:expr) => {
|
||||
mod common_e2e {
|
||||
use super::*;
|
||||
|
||||
#[::gpui::test]
|
||||
#[cfg_attr(not(feature = "e2e"), ignore)]
|
||||
async fn basic(cx: &mut ::gpui::TestAppContext) {
|
||||
$crate::e2e_tests::test_basic($server, cx).await;
|
||||
}
|
||||
|
||||
#[::gpui::test]
|
||||
#[cfg_attr(not(feature = "e2e"), ignore)]
|
||||
async fn path_mentions(cx: &mut ::gpui::TestAppContext) {
|
||||
$crate::e2e_tests::test_path_mentions($server, cx).await;
|
||||
}
|
||||
|
||||
#[::gpui::test]
|
||||
#[cfg_attr(not(feature = "e2e"), ignore)]
|
||||
async fn tool_call(cx: &mut ::gpui::TestAppContext) {
|
||||
$crate::e2e_tests::test_tool_call($server, cx).await;
|
||||
}
|
||||
|
||||
#[::gpui::test]
|
||||
#[cfg_attr(not(feature = "e2e"), ignore)]
|
||||
async fn tool_call_with_confirmation(cx: &mut ::gpui::TestAppContext) {
|
||||
$crate::e2e_tests::test_tool_call_with_confirmation($server, cx).await;
|
||||
}
|
||||
|
||||
#[::gpui::test]
|
||||
#[cfg_attr(not(feature = "e2e"), ignore)]
|
||||
async fn cancel(cx: &mut ::gpui::TestAppContext) {
|
||||
$crate::e2e_tests::test_cancel($server, cx).await;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Helpers
|
||||
|
||||
pub async fn init_test(cx: &mut TestAppContext) -> Arc<FakeFs> {
|
||||
env_logger::try_init().ok();
|
||||
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
cx.set_global(settings_store);
|
||||
Project::init_settings(cx);
|
||||
language::init(cx);
|
||||
crate::settings::init(cx);
|
||||
|
||||
crate::AllAgentServersSettings::override_global(
|
||||
AllAgentServersSettings {
|
||||
claude: Some(AgentServerSettings {
|
||||
command: crate::claude::tests::local_command(),
|
||||
}),
|
||||
codex: Some(AgentServerSettings {
|
||||
command: crate::codex::tests::local_command(),
|
||||
}),
|
||||
gemini: Some(AgentServerSettings {
|
||||
command: crate::gemini::tests::local_command(),
|
||||
}),
|
||||
},
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
cx.executor().allow_parking();
|
||||
|
||||
FakeFs::new(cx.executor())
|
||||
}
|
||||
|
||||
pub async fn new_test_thread(
|
||||
server: impl AgentServer + 'static,
|
||||
project: Entity<Project>,
|
||||
current_dir: impl AsRef<Path>,
|
||||
cx: &mut TestAppContext,
|
||||
) -> Entity<AcpThread> {
|
||||
let thread = cx
|
||||
.update(|cx| server.new_thread(current_dir.as_ref(), &project, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
thread
|
||||
.update(cx, |thread, _| thread.initialize())
|
||||
.await
|
||||
.unwrap();
|
||||
thread
|
||||
}
|
||||
|
||||
pub async fn run_until_first_tool_call(
|
||||
thread: &Entity<AcpThread>,
|
||||
cx: &mut TestAppContext,
|
||||
) -> usize {
|
||||
let (mut tx, mut rx) = mpsc::channel::<usize>(1);
|
||||
|
||||
let subscription = cx.update(|cx| {
|
||||
cx.subscribe(thread, move |thread, _, cx| {
|
||||
for (ix, entry) in thread.read(cx).entries().iter().enumerate() {
|
||||
if matches!(entry, AgentThreadEntry::ToolCall(_)) {
|
||||
return tx.try_send(ix).unwrap();
|
||||
}
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
select! {
|
||||
// We have to use a smol timer here because
|
||||
// cx.background_executor().timer isn't real in the test context
|
||||
_ = futures::FutureExt::fuse(smol::Timer::after(Duration::from_secs(10))) => {
|
||||
panic!("Timeout waiting for tool call")
|
||||
}
|
||||
ix = rx.next().fuse() => {
|
||||
drop(subscription);
|
||||
ix.unwrap()
|
||||
}
|
||||
}
|
||||
}
|
||||
123
crates/agent_servers/src/gemini.rs
Normal file
@@ -0,0 +1,123 @@
|
||||
use crate::stdio_agent_server::StdioAgentServer;
|
||||
use crate::{AgentServerCommand, AgentServerVersion};
|
||||
use anyhow::{Context as _, Result};
|
||||
use gpui::{AsyncApp, Entity};
|
||||
use project::Project;
|
||||
use settings::SettingsStore;
|
||||
|
||||
use crate::AllAgentServersSettings;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Gemini;
|
||||
|
||||
const ACP_ARG: &str = "--experimental-acp";
|
||||
|
||||
impl StdioAgentServer for Gemini {
|
||||
fn name(&self) -> &'static str {
|
||||
"Gemini"
|
||||
}
|
||||
|
||||
fn empty_state_headline(&self) -> &'static str {
|
||||
"Welcome to Gemini"
|
||||
}
|
||||
|
||||
fn empty_state_message(&self) -> &'static str {
|
||||
"Ask questions, edit files, run commands.\nBe specific for the best results."
|
||||
}
|
||||
|
||||
fn supports_always_allow(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn logo(&self) -> ui::IconName {
|
||||
ui::IconName::AiGemini
|
||||
}
|
||||
|
||||
async fn command(
|
||||
&self,
|
||||
project: &Entity<Project>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<AgentServerCommand> {
|
||||
let settings = cx.read_global(|settings: &SettingsStore, _| {
|
||||
settings.get::<AllAgentServersSettings>(None).gemini.clone()
|
||||
})?;
|
||||
|
||||
if let Some(command) =
|
||||
AgentServerCommand::resolve("gemini", &[ACP_ARG], settings, &project, cx).await
|
||||
{
|
||||
return Ok(command);
|
||||
};
|
||||
|
||||
let (fs, node_runtime) = project.update(cx, |project, _| {
|
||||
(project.fs().clone(), project.node_runtime().cloned())
|
||||
})?;
|
||||
let node_runtime = node_runtime.context("gemini not found on path")?;
|
||||
|
||||
let directory = ::paths::agent_servers_dir().join("gemini");
|
||||
fs.create_dir(&directory).await?;
|
||||
node_runtime
|
||||
.npm_install_packages(&directory, &[("@google/gemini-cli", "latest")])
|
||||
.await?;
|
||||
let path = directory.join("node_modules/.bin/gemini");
|
||||
|
||||
Ok(AgentServerCommand {
|
||||
path,
|
||||
args: vec![ACP_ARG.into()],
|
||||
env: None,
|
||||
})
|
||||
}
|
||||
|
||||
async fn version(&self, command: &AgentServerCommand) -> Result<AgentServerVersion> {
|
||||
let version_fut = util::command::new_smol_command(&command.path)
|
||||
.args(command.args.iter())
|
||||
.arg("--version")
|
||||
.kill_on_drop(true)
|
||||
.output();
|
||||
|
||||
let help_fut = util::command::new_smol_command(&command.path)
|
||||
.args(command.args.iter())
|
||||
.arg("--help")
|
||||
.kill_on_drop(true)
|
||||
.output();
|
||||
|
||||
let (version_output, help_output) = futures::future::join(version_fut, help_fut).await;
|
||||
|
||||
let current_version = String::from_utf8(version_output?.stdout)?;
|
||||
let supported = String::from_utf8(help_output?.stdout)?.contains(ACP_ARG);
|
||||
|
||||
if supported {
|
||||
Ok(AgentServerVersion::Supported)
|
||||
} else {
|
||||
Ok(AgentServerVersion::Unsupported {
|
||||
error_message: format!(
|
||||
"Your installed version of Gemini {} doesn't support the Agentic Coding Protocol (ACP).",
|
||||
current_version
|
||||
).into(),
|
||||
upgrade_message: "Upgrade Gemini to Latest".into(),
|
||||
upgrade_command: "npm install -g @google/gemini-cli@latest".into(),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use super::*;
|
||||
use crate::AgentServerCommand;
|
||||
use std::path::Path;
|
||||
|
||||
crate::common_e2e_tests!(Gemini);
|
||||
|
||||
pub fn local_command() -> AgentServerCommand {
|
||||
let cli_path = Path::new(env!("CARGO_MANIFEST_DIR"))
|
||||
.join("../../../gemini-cli/packages/cli")
|
||||
.to_string_lossy()
|
||||
.to_string();
|
||||
|
||||
AgentServerCommand {
|
||||
path: "node".into(),
|
||||
args: vec![cli_path, ACP_ARG.into()],
|
||||
env: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
54
crates/agent_servers/src/settings.rs
Normal file
@@ -0,0 +1,54 @@
|
||||
use crate::AgentServerCommand;
|
||||
use anyhow::Result;
|
||||
use gpui::App;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources};
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
AllAgentServersSettings::register(cx);
|
||||
}
|
||||
|
||||
#[derive(Default, Deserialize, Serialize, Clone, JsonSchema, Debug)]
|
||||
pub struct AllAgentServersSettings {
|
||||
pub gemini: Option<AgentServerSettings>,
|
||||
pub claude: Option<AgentServerSettings>,
|
||||
pub codex: Option<AgentServerSettings>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Clone, JsonSchema, Debug)]
|
||||
pub struct AgentServerSettings {
|
||||
#[serde(flatten)]
|
||||
pub command: AgentServerCommand,
|
||||
}
|
||||
|
||||
impl settings::Settings for AllAgentServersSettings {
|
||||
const KEY: Option<&'static str> = Some("agent_servers");
|
||||
|
||||
type FileContent = Self;
|
||||
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
|
||||
let mut settings = AllAgentServersSettings::default();
|
||||
|
||||
for AllAgentServersSettings {
|
||||
gemini,
|
||||
claude,
|
||||
codex,
|
||||
} in sources.defaults_and_customizations()
|
||||
{
|
||||
if gemini.is_some() {
|
||||
settings.gemini = gemini.clone();
|
||||
}
|
||||
if claude.is_some() {
|
||||
settings.claude = claude.clone();
|
||||
}
|
||||
if codex.is_some() {
|
||||
settings.codex = codex.clone();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(settings)
|
||||
}
|
||||
|
||||
fn import_from_vscode(_vscode: &settings::VsCodeSettings, _current: &mut Self::FileContent) {}
|
||||
}
|
||||
119
crates/agent_servers/src/stdio_agent_server.rs
Normal file
@@ -0,0 +1,119 @@
|
||||
use crate::{AgentServer, AgentServerCommand, AgentServerVersion};
|
||||
use acp_thread::{AcpClientDelegate, AcpThread, LoadError};
|
||||
use agentic_coding_protocol as acp;
|
||||
use anyhow::{Result, anyhow};
|
||||
use gpui::{App, AsyncApp, Entity, Task, prelude::*};
|
||||
use project::Project;
|
||||
use std::path::Path;
|
||||
use util::ResultExt;
|
||||
|
||||
pub trait StdioAgentServer: Send + Clone {
|
||||
fn logo(&self) -> ui::IconName;
|
||||
fn name(&self) -> &'static str;
|
||||
fn empty_state_headline(&self) -> &'static str;
|
||||
fn empty_state_message(&self) -> &'static str;
|
||||
fn supports_always_allow(&self) -> bool;
|
||||
|
||||
fn command(
|
||||
&self,
|
||||
project: &Entity<Project>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> impl Future<Output = Result<AgentServerCommand>>;
|
||||
|
||||
fn version(
|
||||
&self,
|
||||
command: &AgentServerCommand,
|
||||
) -> impl Future<Output = Result<AgentServerVersion>> + Send;
|
||||
}
|
||||
|
||||
impl<T: StdioAgentServer + 'static> AgentServer for T {
|
||||
fn name(&self) -> &'static str {
|
||||
self.name()
|
||||
}
|
||||
|
||||
fn empty_state_headline(&self) -> &'static str {
|
||||
self.empty_state_headline()
|
||||
}
|
||||
|
||||
fn empty_state_message(&self) -> &'static str {
|
||||
self.empty_state_message()
|
||||
}
|
||||
|
||||
fn logo(&self) -> ui::IconName {
|
||||
self.logo()
|
||||
}
|
||||
|
||||
fn supports_always_allow(&self) -> bool {
|
||||
self.supports_always_allow()
|
||||
}
|
||||
|
||||
fn new_thread(
|
||||
&self,
|
||||
root_dir: &Path,
|
||||
project: &Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Entity<AcpThread>>> {
|
||||
let root_dir = root_dir.to_path_buf();
|
||||
let project = project.clone();
|
||||
let this = self.clone();
|
||||
let title = self.name().into();
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let command = this.command(&project, cx).await?;
|
||||
|
||||
let mut child = util::command::new_smol_command(&command.path)
|
||||
.args(command.args.iter())
|
||||
.current_dir(root_dir)
|
||||
.stdin(std::process::Stdio::piped())
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.stderr(std::process::Stdio::inherit())
|
||||
.kill_on_drop(true)
|
||||
.spawn()?;
|
||||
|
||||
let stdin = child.stdin.take().unwrap();
|
||||
let stdout = child.stdout.take().unwrap();
|
||||
|
||||
cx.new(|cx| {
|
||||
let foreground_executor = cx.foreground_executor().clone();
|
||||
|
||||
let (connection, io_fut) = acp::AgentConnection::connect_to_agent(
|
||||
AcpClientDelegate::new(cx.entity().downgrade(), cx.to_async()),
|
||||
stdin,
|
||||
stdout,
|
||||
move |fut| foreground_executor.spawn(fut).detach(),
|
||||
);
|
||||
|
||||
let io_task = cx.background_spawn(async move {
|
||||
io_fut.await.log_err();
|
||||
});
|
||||
|
||||
let child_status = cx.background_spawn(async move {
|
||||
let result = match child.status().await {
|
||||
Err(e) => Err(anyhow!(e)),
|
||||
Ok(result) if result.success() => Ok(()),
|
||||
Ok(result) => {
|
||||
if let Some(AgentServerVersion::Unsupported {
|
||||
error_message,
|
||||
upgrade_message,
|
||||
upgrade_command,
|
||||
}) = this.version(&command).await.log_err()
|
||||
{
|
||||
Err(anyhow!(LoadError::Unsupported {
|
||||
error_message,
|
||||
upgrade_message,
|
||||
upgrade_command
|
||||
}))
|
||||
} else {
|
||||
Err(anyhow!(LoadError::Exited(result.code().unwrap_or(-127))))
|
||||
}
|
||||
}
|
||||
};
|
||||
drop(io_task);
|
||||
result
|
||||
});
|
||||
|
||||
AcpThread::new(connection, title, Some(child_status), project.clone(), cx)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -16,7 +16,7 @@ doctest = false
|
||||
test-support = ["gpui/test-support", "language/test-support"]
|
||||
|
||||
[dependencies]
|
||||
acp.workspace = true
|
||||
acp_thread.workspace = true
|
||||
agent.workspace = true
|
||||
agentic-coding-protocol.workspace = true
|
||||
agent_settings.workspace = true
|
||||
|
||||
@@ -2,4 +2,5 @@ mod completion_provider;
|
||||
mod message_history;
|
||||
mod thread_view;
|
||||
|
||||
pub use message_history::MessageHistory;
|
||||
pub use thread_view::AcpThreadView;
|
||||
|
||||
@@ -3,19 +3,25 @@ pub struct MessageHistory<T> {
|
||||
current: Option<usize>,
|
||||
}
|
||||
|
||||
impl<T> MessageHistory<T> {
|
||||
pub fn new() -> Self {
|
||||
impl<T> Default for MessageHistory<T> {
|
||||
fn default() -> Self {
|
||||
MessageHistory {
|
||||
items: Vec::new(),
|
||||
current: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> MessageHistory<T> {
|
||||
pub fn push(&mut self, message: T) {
|
||||
self.current.take();
|
||||
self.items.push(message);
|
||||
}
|
||||
|
||||
pub fn reset_position(&mut self) {
|
||||
self.current.take();
|
||||
}
|
||||
|
||||
pub fn prev(&mut self) -> Option<&T> {
|
||||
if self.items.is_empty() {
|
||||
return None;
|
||||
@@ -46,7 +52,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_prev_next() {
|
||||
let mut history = MessageHistory::new();
|
||||
let mut history = MessageHistory::default();
|
||||
|
||||
// Test empty history
|
||||
assert_eq!(history.prev(), None);
|
||||
|
||||
@@ -787,6 +787,15 @@ impl ActiveThread {
|
||||
.unwrap()
|
||||
}
|
||||
});
|
||||
|
||||
let workspace_subscription = if let Some(workspace) = workspace.upgrade() {
|
||||
Some(cx.observe_release(&workspace, |this, _, cx| {
|
||||
this.dismiss_notifications(cx);
|
||||
}))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut this = Self {
|
||||
language_registry,
|
||||
thread_store,
|
||||
@@ -834,6 +843,10 @@ impl ActiveThread {
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(subscription) = workspace_subscription {
|
||||
this._subscriptions.push(subscription);
|
||||
}
|
||||
|
||||
this
|
||||
}
|
||||
|
||||
@@ -983,30 +996,57 @@ impl ActiveThread {
|
||||
| ThreadEvent::SummaryChanged => {
|
||||
self.save_thread(cx);
|
||||
}
|
||||
ThreadEvent::Stopped(reason) => match reason {
|
||||
Ok(StopReason::EndTurn | StopReason::MaxTokens) => {
|
||||
let used_tools = self.thread.read(cx).used_tools_since_last_user_message();
|
||||
self.play_notification_sound(window, cx);
|
||||
self.show_notification(
|
||||
if used_tools {
|
||||
"Finished running tools"
|
||||
} else {
|
||||
"New message"
|
||||
},
|
||||
IconName::ZedAssistant,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
ThreadEvent::Stopped(reason) => {
|
||||
match reason {
|
||||
Ok(StopReason::EndTurn | StopReason::MaxTokens) => {
|
||||
let used_tools = self.thread.read(cx).used_tools_since_last_user_message();
|
||||
self.notify_with_sound(
|
||||
if used_tools {
|
||||
"Finished running tools"
|
||||
} else {
|
||||
"New message"
|
||||
},
|
||||
IconName::ZedAssistant,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
Ok(StopReason::ToolUse) => {
|
||||
// Don't notify for intermediate tool use
|
||||
}
|
||||
Ok(StopReason::Refusal) => {
|
||||
self.notify_with_sound(
|
||||
"Language model refused to respond",
|
||||
IconName::Warning,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
Err(error) => {
|
||||
self.notify_with_sound(
|
||||
"Agent stopped due to an error",
|
||||
IconName::Warning,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
|
||||
let error_message = error
|
||||
.chain()
|
||||
.map(|err| err.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
self.last_error = Some(ThreadError::Message {
|
||||
header: "Error interacting with language model".into(),
|
||||
message: error_message.into(),
|
||||
});
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
}
|
||||
ThreadEvent::ToolConfirmationNeeded => {
|
||||
self.play_notification_sound(window, cx);
|
||||
self.show_notification("Waiting for tool confirmation", IconName::Info, window, cx);
|
||||
self.notify_with_sound("Waiting for tool confirmation", IconName::Info, window, cx);
|
||||
}
|
||||
ThreadEvent::ToolUseLimitReached => {
|
||||
self.play_notification_sound(window, cx);
|
||||
self.show_notification(
|
||||
self.notify_with_sound(
|
||||
"Consecutive tool use limit reached.",
|
||||
IconName::Warning,
|
||||
window,
|
||||
@@ -1149,9 +1189,6 @@ impl ActiveThread {
|
||||
self.save_thread(cx);
|
||||
cx.notify();
|
||||
}
|
||||
ThreadEvent::RetriesFailed { message } => {
|
||||
self.show_notification(message, ui::IconName::Warning, window, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1206,6 +1243,17 @@ impl ActiveThread {
|
||||
}
|
||||
}
|
||||
|
||||
fn notify_with_sound(
|
||||
&mut self,
|
||||
caption: impl Into<SharedString>,
|
||||
icon: IconName,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<ActiveThread>,
|
||||
) {
|
||||
self.play_notification_sound(window, cx);
|
||||
self.show_notification(caption, icon, window, cx);
|
||||
}
|
||||
|
||||
fn pop_up(
|
||||
&mut self,
|
||||
icon: IconName,
|
||||
|
||||
@@ -24,9 +24,10 @@ use project::{
|
||||
context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore},
|
||||
project_settings::{ContextServerSettings, ProjectSettings},
|
||||
};
|
||||
use proto::Plan;
|
||||
use settings::{Settings, update_settings_file};
|
||||
use ui::{
|
||||
ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex, Indicator, PopoverMenu,
|
||||
Chip, ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex, Indicator, PopoverMenu,
|
||||
Scrollbar, ScrollbarState, Switch, SwitchColor, Tooltip, prelude::*,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
@@ -171,6 +172,15 @@ impl AgentConfiguration {
|
||||
.copied()
|
||||
.unwrap_or(false);
|
||||
|
||||
let is_zed_provider = provider.id() == ZED_CLOUD_PROVIDER_ID;
|
||||
let current_plan = if is_zed_provider {
|
||||
self.workspace
|
||||
.upgrade()
|
||||
.and_then(|workspace| workspace.read(cx).user_store().read(cx).current_plan())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
v_flex()
|
||||
.when(is_expanded, |this| this.mb_2())
|
||||
.child(
|
||||
@@ -208,14 +218,31 @@ impl AgentConfiguration {
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(Label::new(provider_name.clone()).size(LabelSize::Large))
|
||||
.when(
|
||||
provider.is_authenticated(cx) && !is_expanded,
|
||||
|parent| {
|
||||
parent.child(
|
||||
Icon::new(IconName::Check).color(Color::Success),
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(
|
||||
Label::new(provider_name.clone())
|
||||
.size(LabelSize::Large),
|
||||
)
|
||||
},
|
||||
.map(|this| {
|
||||
if is_zed_provider {
|
||||
this.gap_2().child(
|
||||
self.render_zed_plan_info(current_plan, cx),
|
||||
)
|
||||
} else {
|
||||
this.when(
|
||||
provider.is_authenticated(cx)
|
||||
&& !is_expanded,
|
||||
|parent| {
|
||||
parent.child(
|
||||
Icon::new(IconName::Check)
|
||||
.color(Color::Success),
|
||||
)
|
||||
},
|
||||
)
|
||||
}
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
@@ -431,6 +458,37 @@ impl AgentConfiguration {
|
||||
.child(self.render_sound_notification(cx))
|
||||
}
|
||||
|
||||
fn render_zed_plan_info(&self, plan: Option<Plan>, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
if let Some(plan) = plan {
|
||||
let free_chip_bg = cx
|
||||
.theme()
|
||||
.colors()
|
||||
.editor_background
|
||||
.opacity(0.5)
|
||||
.blend(cx.theme().colors().text_accent.opacity(0.05));
|
||||
|
||||
let pro_chip_bg = cx
|
||||
.theme()
|
||||
.colors()
|
||||
.editor_background
|
||||
.opacity(0.5)
|
||||
.blend(cx.theme().colors().text_accent.opacity(0.2));
|
||||
|
||||
let (plan_name, label_color, bg_color) = match plan {
|
||||
Plan::Free => ("Free", Color::Default, free_chip_bg),
|
||||
Plan::ZedProTrial => ("Pro Trial", Color::Accent, pro_chip_bg),
|
||||
Plan::ZedPro => ("Pro", Color::Accent, pro_chip_bg),
|
||||
};
|
||||
|
||||
Chip::new(plan_name.to_string())
|
||||
.bg_color(bg_color)
|
||||
.label_color(label_color)
|
||||
.into_any_element()
|
||||
} else {
|
||||
div().into_any_element()
|
||||
}
|
||||
}
|
||||
|
||||
fn render_context_servers_section(
|
||||
&mut self,
|
||||
window: &mut Window,
|
||||
@@ -491,6 +549,7 @@ impl AgentConfiguration {
|
||||
category_filter: Some(
|
||||
ExtensionCategoryFilter::ContextServers,
|
||||
),
|
||||
id: None,
|
||||
}
|
||||
.boxed_clone(),
|
||||
cx,
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
use crate::{Keep, KeepAll, OpenAgentDiff, Reject, RejectAll};
|
||||
use agent::{Thread, ThreadEvent};
|
||||
use acp_thread::{AcpThread, AcpThreadEvent};
|
||||
use agent::{Thread, ThreadEvent, ThreadSummary};
|
||||
use agent_settings::AgentSettings;
|
||||
use anyhow::Result;
|
||||
use assistant_tool::ActionLog;
|
||||
use buffer_diff::DiffHunkStatus;
|
||||
use collections::{HashMap, HashSet};
|
||||
use editor::{
|
||||
@@ -41,16 +43,108 @@ use zed_actions::assistant::ToggleFocus;
|
||||
pub struct AgentDiffPane {
|
||||
multibuffer: Entity<MultiBuffer>,
|
||||
editor: Entity<Editor>,
|
||||
thread: Entity<Thread>,
|
||||
thread: AgentDiffThread,
|
||||
focus_handle: FocusHandle,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
title: SharedString,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone)]
|
||||
pub enum AgentDiffThread {
|
||||
Native(Entity<Thread>),
|
||||
AcpThread(Entity<AcpThread>),
|
||||
}
|
||||
|
||||
impl AgentDiffThread {
|
||||
fn project(&self, cx: &App) -> Entity<Project> {
|
||||
match self {
|
||||
AgentDiffThread::Native(thread) => thread.read(cx).project().clone(),
|
||||
AgentDiffThread::AcpThread(thread) => thread.read(cx).project().clone(),
|
||||
}
|
||||
}
|
||||
fn action_log(&self, cx: &App) -> Entity<ActionLog> {
|
||||
match self {
|
||||
AgentDiffThread::Native(thread) => thread.read(cx).action_log().clone(),
|
||||
AgentDiffThread::AcpThread(thread) => thread.read(cx).action_log().clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn summary(&self, cx: &App) -> ThreadSummary {
|
||||
match self {
|
||||
AgentDiffThread::Native(thread) => thread.read(cx).summary().clone(),
|
||||
AgentDiffThread::AcpThread(thread) => ThreadSummary::Ready(thread.read(cx).title()),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_generating(&self, cx: &App) -> bool {
|
||||
match self {
|
||||
AgentDiffThread::Native(thread) => thread.read(cx).is_generating(),
|
||||
AgentDiffThread::AcpThread(thread) => {
|
||||
thread.read(cx).status() == acp_thread::ThreadStatus::Generating
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn has_pending_edit_tool_uses(&self, cx: &App) -> bool {
|
||||
match self {
|
||||
AgentDiffThread::Native(thread) => thread.read(cx).has_pending_edit_tool_uses(),
|
||||
AgentDiffThread::AcpThread(thread) => thread.read(cx).has_pending_edit_tool_calls(),
|
||||
}
|
||||
}
|
||||
|
||||
fn downgrade(&self) -> WeakAgentDiffThread {
|
||||
match self {
|
||||
AgentDiffThread::Native(thread) => WeakAgentDiffThread::Native(thread.downgrade()),
|
||||
AgentDiffThread::AcpThread(thread) => {
|
||||
WeakAgentDiffThread::AcpThread(thread.downgrade())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Entity<Thread>> for AgentDiffThread {
|
||||
fn from(entity: Entity<Thread>) -> Self {
|
||||
AgentDiffThread::Native(entity)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Entity<AcpThread>> for AgentDiffThread {
|
||||
fn from(entity: Entity<AcpThread>) -> Self {
|
||||
AgentDiffThread::AcpThread(entity)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone)]
|
||||
pub enum WeakAgentDiffThread {
|
||||
Native(WeakEntity<Thread>),
|
||||
AcpThread(WeakEntity<AcpThread>),
|
||||
}
|
||||
|
||||
impl WeakAgentDiffThread {
|
||||
pub fn upgrade(&self) -> Option<AgentDiffThread> {
|
||||
match self {
|
||||
WeakAgentDiffThread::Native(weak) => weak.upgrade().map(AgentDiffThread::Native),
|
||||
WeakAgentDiffThread::AcpThread(weak) => weak.upgrade().map(AgentDiffThread::AcpThread),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<WeakEntity<Thread>> for WeakAgentDiffThread {
|
||||
fn from(entity: WeakEntity<Thread>) -> Self {
|
||||
WeakAgentDiffThread::Native(entity)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<WeakEntity<AcpThread>> for WeakAgentDiffThread {
|
||||
fn from(entity: WeakEntity<AcpThread>) -> Self {
|
||||
WeakAgentDiffThread::AcpThread(entity)
|
||||
}
|
||||
}
|
||||
|
||||
impl AgentDiffPane {
|
||||
pub fn deploy(
|
||||
thread: Entity<Thread>,
|
||||
thread: impl Into<AgentDiffThread>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
@@ -61,14 +155,16 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
pub fn deploy_in_workspace(
|
||||
thread: Entity<Thread>,
|
||||
thread: impl Into<AgentDiffThread>,
|
||||
workspace: &mut Workspace,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Workspace>,
|
||||
) -> Entity<Self> {
|
||||
let thread = thread.into();
|
||||
let existing_diff = workspace
|
||||
.items_of_type::<AgentDiffPane>(cx)
|
||||
.find(|diff| diff.read(cx).thread == thread);
|
||||
|
||||
if let Some(existing_diff) = existing_diff {
|
||||
workspace.activate_item(&existing_diff, true, true, window, cx);
|
||||
existing_diff
|
||||
@@ -81,7 +177,7 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
thread: Entity<Thread>,
|
||||
thread: AgentDiffThread,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
@@ -89,7 +185,7 @@ impl AgentDiffPane {
|
||||
let focus_handle = cx.focus_handle();
|
||||
let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
|
||||
|
||||
let project = thread.read(cx).project().clone();
|
||||
let project = thread.project(cx).clone();
|
||||
let editor = cx.new(|cx| {
|
||||
let mut editor =
|
||||
Editor::for_multibuffer(multibuffer.clone(), Some(project.clone()), window, cx);
|
||||
@@ -100,16 +196,27 @@ impl AgentDiffPane {
|
||||
editor
|
||||
});
|
||||
|
||||
let action_log = thread.read(cx).action_log().clone();
|
||||
let action_log = thread.action_log(cx).clone();
|
||||
|
||||
let mut this = Self {
|
||||
_subscriptions: vec![
|
||||
cx.observe_in(&action_log, window, |this, _action_log, window, cx| {
|
||||
this.update_excerpts(window, cx)
|
||||
}),
|
||||
cx.subscribe(&thread, |this, _thread, event, cx| {
|
||||
this.handle_thread_event(event, cx)
|
||||
}),
|
||||
],
|
||||
_subscriptions: [
|
||||
Some(
|
||||
cx.observe_in(&action_log, window, |this, _action_log, window, cx| {
|
||||
this.update_excerpts(window, cx)
|
||||
}),
|
||||
),
|
||||
match &thread {
|
||||
AgentDiffThread::Native(thread) => {
|
||||
Some(cx.subscribe(&thread, |this, _thread, event, cx| {
|
||||
this.handle_thread_event(event, cx)
|
||||
}))
|
||||
}
|
||||
AgentDiffThread::AcpThread(_) => None,
|
||||
},
|
||||
]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect(),
|
||||
title: SharedString::default(),
|
||||
multibuffer,
|
||||
editor,
|
||||
@@ -123,8 +230,7 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
fn update_excerpts(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
let thread = self.thread.read(cx);
|
||||
let changed_buffers = thread.action_log().read(cx).changed_buffers(cx);
|
||||
let changed_buffers = self.thread.action_log(cx).read(cx).changed_buffers(cx);
|
||||
let mut paths_to_delete = self.multibuffer.read(cx).paths().collect::<HashSet<_>>();
|
||||
|
||||
for (buffer, diff_handle) in changed_buffers {
|
||||
@@ -211,7 +317,7 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
fn update_title(&mut self, cx: &mut Context<Self>) {
|
||||
let new_title = self.thread.read(cx).summary().unwrap_or("Agent Changes");
|
||||
let new_title = self.thread.summary(cx).unwrap_or("Agent Changes");
|
||||
if new_title != self.title {
|
||||
self.title = new_title;
|
||||
cx.emit(EditorEvent::TitleChanged);
|
||||
@@ -275,14 +381,15 @@ impl AgentDiffPane {
|
||||
|
||||
fn keep_all(&mut self, _: &KeepAll, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.thread
|
||||
.update(cx, |thread, cx| thread.keep_all_edits(cx));
|
||||
.action_log(cx)
|
||||
.update(cx, |action_log, cx| action_log.keep_all_edits(cx))
|
||||
}
|
||||
}
|
||||
|
||||
fn keep_edits_in_selection(
|
||||
editor: &mut Editor,
|
||||
buffer_snapshot: &MultiBufferSnapshot,
|
||||
thread: &Entity<Thread>,
|
||||
thread: &AgentDiffThread,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
@@ -297,7 +404,7 @@ fn keep_edits_in_selection(
|
||||
fn reject_edits_in_selection(
|
||||
editor: &mut Editor,
|
||||
buffer_snapshot: &MultiBufferSnapshot,
|
||||
thread: &Entity<Thread>,
|
||||
thread: &AgentDiffThread,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
@@ -311,7 +418,7 @@ fn reject_edits_in_selection(
|
||||
fn keep_edits_in_ranges(
|
||||
editor: &mut Editor,
|
||||
buffer_snapshot: &MultiBufferSnapshot,
|
||||
thread: &Entity<Thread>,
|
||||
thread: &AgentDiffThread,
|
||||
ranges: Vec<Range<editor::Anchor>>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
@@ -326,8 +433,8 @@ fn keep_edits_in_ranges(
|
||||
for hunk in &diff_hunks_in_ranges {
|
||||
let buffer = multibuffer.read(cx).buffer(hunk.buffer_id);
|
||||
if let Some(buffer) = buffer {
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.keep_edits_in_range(buffer, hunk.buffer_range.clone(), cx)
|
||||
thread.action_log(cx).update(cx, |action_log, cx| {
|
||||
action_log.keep_edits_in_range(buffer, hunk.buffer_range.clone(), cx)
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -336,7 +443,7 @@ fn keep_edits_in_ranges(
|
||||
fn reject_edits_in_ranges(
|
||||
editor: &mut Editor,
|
||||
buffer_snapshot: &MultiBufferSnapshot,
|
||||
thread: &Entity<Thread>,
|
||||
thread: &AgentDiffThread,
|
||||
ranges: Vec<Range<editor::Anchor>>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
@@ -362,8 +469,9 @@ fn reject_edits_in_ranges(
|
||||
|
||||
for (buffer, ranges) in ranges_by_buffer {
|
||||
thread
|
||||
.update(cx, |thread, cx| {
|
||||
thread.reject_edits_in_ranges(buffer, ranges, cx)
|
||||
.action_log(cx)
|
||||
.update(cx, |action_log, cx| {
|
||||
action_log.reject_edits_in_ranges(buffer, ranges, cx)
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
@@ -461,7 +569,7 @@ impl Item for AgentDiffPane {
|
||||
}
|
||||
|
||||
fn tab_content(&self, params: TabContentParams, _window: &Window, cx: &App) -> AnyElement {
|
||||
let summary = self.thread.read(cx).summary().unwrap_or("Agent Changes");
|
||||
let summary = self.thread.summary(cx).unwrap_or("Agent Changes");
|
||||
Label::new(format!("Review: {}", summary))
|
||||
.color(if params.selected {
|
||||
Color::Default
|
||||
@@ -641,7 +749,7 @@ impl Render for AgentDiffPane {
|
||||
}
|
||||
}
|
||||
|
||||
fn diff_hunk_controls(thread: &Entity<Thread>) -> editor::RenderDiffHunkControlsFn {
|
||||
fn diff_hunk_controls(thread: &AgentDiffThread) -> editor::RenderDiffHunkControlsFn {
|
||||
let thread = thread.clone();
|
||||
|
||||
Arc::new(
|
||||
@@ -676,7 +784,7 @@ fn render_diff_hunk_controls(
|
||||
hunk_range: Range<editor::Anchor>,
|
||||
is_created_file: bool,
|
||||
line_height: Pixels,
|
||||
thread: &Entity<Thread>,
|
||||
thread: &AgentDiffThread,
|
||||
editor: &Entity<Editor>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
@@ -1112,11 +1220,8 @@ impl Render for AgentDiffToolbar {
|
||||
return Empty.into_any();
|
||||
};
|
||||
|
||||
let has_pending_edit_tool_use = agent_diff
|
||||
.read(cx)
|
||||
.thread
|
||||
.read(cx)
|
||||
.has_pending_edit_tool_uses();
|
||||
let has_pending_edit_tool_use =
|
||||
agent_diff.read(cx).thread.has_pending_edit_tool_uses(cx);
|
||||
|
||||
if has_pending_edit_tool_use {
|
||||
return div().px_2().child(spinner_icon).into_any();
|
||||
@@ -1187,8 +1292,8 @@ pub enum EditorState {
|
||||
}
|
||||
|
||||
struct WorkspaceThread {
|
||||
thread: WeakEntity<Thread>,
|
||||
_thread_subscriptions: [Subscription; 2],
|
||||
thread: WeakAgentDiffThread,
|
||||
_thread_subscriptions: (Subscription, Subscription),
|
||||
singleton_editors: HashMap<WeakEntity<Buffer>, HashMap<WeakEntity<Editor>, Subscription>>,
|
||||
_settings_subscription: Subscription,
|
||||
_workspace_subscription: Option<Subscription>,
|
||||
@@ -1212,23 +1317,23 @@ impl AgentDiff {
|
||||
|
||||
pub fn set_active_thread(
|
||||
workspace: &WeakEntity<Workspace>,
|
||||
thread: &Entity<Thread>,
|
||||
thread: impl Into<AgentDiffThread>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) {
|
||||
Self::global(cx).update(cx, |this, cx| {
|
||||
this.register_active_thread_impl(workspace, thread, window, cx);
|
||||
this.register_active_thread_impl(workspace, thread.into(), window, cx);
|
||||
});
|
||||
}
|
||||
|
||||
fn register_active_thread_impl(
|
||||
&mut self,
|
||||
workspace: &WeakEntity<Workspace>,
|
||||
thread: &Entity<Thread>,
|
||||
thread: AgentDiffThread,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let action_log = thread.read(cx).action_log().clone();
|
||||
let action_log = thread.action_log(cx).clone();
|
||||
|
||||
let action_log_subscription = cx.observe_in(&action_log, window, {
|
||||
let workspace = workspace.clone();
|
||||
@@ -1237,17 +1342,25 @@ impl AgentDiff {
|
||||
}
|
||||
});
|
||||
|
||||
let thread_subscription = cx.subscribe_in(&thread, window, {
|
||||
let workspace = workspace.clone();
|
||||
move |this, _thread, event, window, cx| {
|
||||
this.handle_thread_event(&workspace, event, window, cx)
|
||||
}
|
||||
});
|
||||
let thread_subscription = match &thread {
|
||||
AgentDiffThread::Native(thread) => cx.subscribe_in(&thread, window, {
|
||||
let workspace = workspace.clone();
|
||||
move |this, _thread, event, window, cx| {
|
||||
this.handle_native_thread_event(&workspace, event, window, cx)
|
||||
}
|
||||
}),
|
||||
AgentDiffThread::AcpThread(thread) => cx.subscribe_in(&thread, window, {
|
||||
let workspace = workspace.clone();
|
||||
move |this, thread, event, window, cx| {
|
||||
this.handle_acp_thread_event(&workspace, thread, event, window, cx)
|
||||
}
|
||||
}),
|
||||
};
|
||||
|
||||
if let Some(workspace_thread) = self.workspace_threads.get_mut(&workspace) {
|
||||
// replace thread and action log subscription, but keep editors
|
||||
workspace_thread.thread = thread.downgrade();
|
||||
workspace_thread._thread_subscriptions = [action_log_subscription, thread_subscription];
|
||||
workspace_thread._thread_subscriptions = (action_log_subscription, thread_subscription);
|
||||
self.update_reviewing_editors(&workspace, window, cx);
|
||||
return;
|
||||
}
|
||||
@@ -1272,7 +1385,7 @@ impl AgentDiff {
|
||||
workspace.clone(),
|
||||
WorkspaceThread {
|
||||
thread: thread.downgrade(),
|
||||
_thread_subscriptions: [action_log_subscription, thread_subscription],
|
||||
_thread_subscriptions: (action_log_subscription, thread_subscription),
|
||||
singleton_editors: HashMap::default(),
|
||||
_settings_subscription: settings_subscription,
|
||||
_workspace_subscription: workspace_subscription,
|
||||
@@ -1319,7 +1432,7 @@ impl AgentDiff {
|
||||
|
||||
fn register_review_action<T: Action>(
|
||||
workspace: &mut Workspace,
|
||||
review: impl Fn(&Entity<Editor>, &Entity<Thread>, &mut Window, &mut App) -> PostReviewState
|
||||
review: impl Fn(&Entity<Editor>, &AgentDiffThread, &mut Window, &mut App) -> PostReviewState
|
||||
+ 'static,
|
||||
this: &Entity<AgentDiff>,
|
||||
) {
|
||||
@@ -1338,7 +1451,7 @@ impl AgentDiff {
|
||||
});
|
||||
}
|
||||
|
||||
fn handle_thread_event(
|
||||
fn handle_native_thread_event(
|
||||
&mut self,
|
||||
workspace: &WeakEntity<Workspace>,
|
||||
event: &ThreadEvent,
|
||||
@@ -1375,11 +1488,44 @@ impl AgentDiff {
|
||||
| ThreadEvent::ToolConfirmationNeeded
|
||||
| ThreadEvent::ToolUseLimitReached
|
||||
| ThreadEvent::CancelEditing
|
||||
| ThreadEvent::RetriesFailed { .. }
|
||||
| ThreadEvent::ProfileChanged => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_acp_thread_event(
|
||||
&mut self,
|
||||
workspace: &WeakEntity<Workspace>,
|
||||
thread: &Entity<AcpThread>,
|
||||
event: &AcpThreadEvent,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
match event {
|
||||
AcpThreadEvent::NewEntry => {
|
||||
if thread
|
||||
.read(cx)
|
||||
.entries()
|
||||
.last()
|
||||
.and_then(|entry| entry.diff())
|
||||
.is_some()
|
||||
{
|
||||
self.update_reviewing_editors(workspace, window, cx);
|
||||
}
|
||||
}
|
||||
AcpThreadEvent::EntryUpdated(ix) => {
|
||||
if thread
|
||||
.read(cx)
|
||||
.entries()
|
||||
.get(*ix)
|
||||
.and_then(|entry| entry.diff())
|
||||
.is_some()
|
||||
{
|
||||
self.update_reviewing_editors(workspace, window, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_workspace_event(
|
||||
&mut self,
|
||||
workspace: &Entity<Workspace>,
|
||||
@@ -1485,7 +1631,7 @@ impl AgentDiff {
|
||||
return;
|
||||
};
|
||||
|
||||
let action_log = thread.read(cx).action_log();
|
||||
let action_log = thread.action_log(cx);
|
||||
let changed_buffers = action_log.read(cx).changed_buffers(cx);
|
||||
|
||||
let mut unaffected = self.reviewing_editors.clone();
|
||||
@@ -1510,7 +1656,7 @@ impl AgentDiff {
|
||||
multibuffer.add_diff(diff_handle.clone(), cx);
|
||||
});
|
||||
|
||||
let new_state = if thread.read(cx).is_generating() {
|
||||
let new_state = if thread.is_generating(cx) {
|
||||
EditorState::Generating
|
||||
} else {
|
||||
EditorState::Reviewing
|
||||
@@ -1606,7 +1752,7 @@ impl AgentDiff {
|
||||
|
||||
fn keep_all(
|
||||
editor: &Entity<Editor>,
|
||||
thread: &Entity<Thread>,
|
||||
thread: &AgentDiffThread,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> PostReviewState {
|
||||
@@ -1626,7 +1772,7 @@ impl AgentDiff {
|
||||
|
||||
fn reject_all(
|
||||
editor: &Entity<Editor>,
|
||||
thread: &Entity<Thread>,
|
||||
thread: &AgentDiffThread,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> PostReviewState {
|
||||
@@ -1646,7 +1792,7 @@ impl AgentDiff {
|
||||
|
||||
fn keep(
|
||||
editor: &Entity<Editor>,
|
||||
thread: &Entity<Thread>,
|
||||
thread: &AgentDiffThread,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> PostReviewState {
|
||||
@@ -1659,7 +1805,7 @@ impl AgentDiff {
|
||||
|
||||
fn reject(
|
||||
editor: &Entity<Editor>,
|
||||
thread: &Entity<Thread>,
|
||||
thread: &AgentDiffThread,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> PostReviewState {
|
||||
@@ -1682,7 +1828,7 @@ impl AgentDiff {
|
||||
fn review_in_active_editor(
|
||||
&mut self,
|
||||
workspace: &mut Workspace,
|
||||
review: impl Fn(&Entity<Editor>, &Entity<Thread>, &mut Window, &mut App) -> PostReviewState,
|
||||
review: impl Fn(&Entity<Editor>, &AgentDiffThread, &mut Window, &mut App) -> PostReviewState,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Task<Result<()>>> {
|
||||
@@ -1703,7 +1849,7 @@ impl AgentDiff {
|
||||
|
||||
if let PostReviewState::AllReviewed = review(&editor, &thread, window, cx) {
|
||||
if let Some(curr_buffer) = editor.read(cx).buffer().read(cx).as_singleton() {
|
||||
let changed_buffers = thread.read(cx).action_log().read(cx).changed_buffers(cx);
|
||||
let changed_buffers = thread.action_log(cx).read(cx).changed_buffers(cx);
|
||||
|
||||
let mut keys = changed_buffers.keys().cycle();
|
||||
keys.find(|k| *k == &curr_buffer);
|
||||
@@ -1801,8 +1947,9 @@ mod tests {
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let thread = thread_store.update(cx, |store, cx| store.create_thread(cx));
|
||||
let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
|
||||
let thread =
|
||||
AgentDiffThread::Native(thread_store.update(cx, |store, cx| store.create_thread(cx)));
|
||||
let action_log = cx.read(|cx| thread.action_log(cx));
|
||||
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
@@ -1988,8 +2135,9 @@ mod tests {
|
||||
});
|
||||
|
||||
// Set the active thread
|
||||
let thread = AgentDiffThread::Native(thread);
|
||||
cx.update(|window, cx| {
|
||||
AgentDiff::set_active_thread(&workspace.downgrade(), &thread, window, cx)
|
||||
AgentDiff::set_active_thread(&workspace.downgrade(), thread.clone(), window, cx)
|
||||
});
|
||||
|
||||
let buffer1 = project
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
use std::cell::RefCell;
|
||||
use std::ops::Range;
|
||||
use std::path::Path;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use agent_servers::AgentServer;
|
||||
use db::kvp::{Dismissable, KEY_VALUE_STORE};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::NewAcpThread;
|
||||
use crate::NewExternalAgentThread;
|
||||
use crate::agent_diff::AgentDiffThread;
|
||||
use crate::language_model_selector::ToggleModelSelector;
|
||||
use crate::{
|
||||
AddContextServer, AgentDiffPane, ContinueThread, ContinueWithBurnMode,
|
||||
@@ -112,10 +115,12 @@ pub fn init(cx: &mut App) {
|
||||
panel.update(cx, |panel, cx| panel.new_prompt_editor(window, cx));
|
||||
}
|
||||
})
|
||||
.register_action(|workspace, _: &NewAcpThread, window, cx| {
|
||||
.register_action(|workspace, action: &NewExternalAgentThread, window, cx| {
|
||||
if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
|
||||
workspace.focus_panel::<AgentPanel>(window, cx);
|
||||
panel.update(cx, |panel, cx| panel.new_gemini_thread(window, cx));
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.new_external_thread(action.agent, window, cx)
|
||||
});
|
||||
}
|
||||
})
|
||||
.register_action(|workspace, action: &OpenRulesLibrary, window, cx| {
|
||||
@@ -134,7 +139,7 @@ pub fn init(cx: &mut App) {
|
||||
let thread = thread.read(cx).thread().clone();
|
||||
AgentDiffPane::deploy_in_workspace(thread, workspace, window, cx);
|
||||
}
|
||||
ActiveView::AcpThread { .. }
|
||||
ActiveView::ExternalAgentThread { .. }
|
||||
| ActiveView::TextThread { .. }
|
||||
| ActiveView::History
|
||||
| ActiveView::Configuration => {}
|
||||
@@ -198,7 +203,7 @@ enum ActiveView {
|
||||
message_editor: Entity<MessageEditor>,
|
||||
_subscriptions: Vec<gpui::Subscription>,
|
||||
},
|
||||
AcpThread {
|
||||
ExternalAgentThread {
|
||||
thread_view: Entity<AcpThreadView>,
|
||||
},
|
||||
TextThread {
|
||||
@@ -220,9 +225,9 @@ enum WhichFontSize {
|
||||
impl ActiveView {
|
||||
pub fn which_font_size_used(&self) -> WhichFontSize {
|
||||
match self {
|
||||
ActiveView::Thread { .. } | ActiveView::AcpThread { .. } | ActiveView::History => {
|
||||
WhichFontSize::AgentFont
|
||||
}
|
||||
ActiveView::Thread { .. }
|
||||
| ActiveView::ExternalAgentThread { .. }
|
||||
| ActiveView::History => WhichFontSize::AgentFont,
|
||||
ActiveView::TextThread { .. } => WhichFontSize::BufferFont,
|
||||
ActiveView::Configuration => WhichFontSize::None,
|
||||
}
|
||||
@@ -253,7 +258,7 @@ impl ActiveView {
|
||||
thread.scroll_to_bottom(cx);
|
||||
});
|
||||
}
|
||||
ActiveView::AcpThread { .. } => {}
|
||||
ActiveView::ExternalAgentThread { .. } => {}
|
||||
ActiveView::TextThread { .. }
|
||||
| ActiveView::History
|
||||
| ActiveView::Configuration => {}
|
||||
@@ -432,6 +437,8 @@ pub struct AgentPanel {
|
||||
configuration_subscription: Option<Subscription>,
|
||||
local_timezone: UtcOffset,
|
||||
active_view: ActiveView,
|
||||
acp_message_history:
|
||||
Rc<RefCell<crate::acp::MessageHistory<agentic_coding_protocol::SendUserMessageParams>>>,
|
||||
previous_view: Option<ActiveView>,
|
||||
history_store: Entity<HistoryStore>,
|
||||
history: Entity<ThreadHistory>,
|
||||
@@ -624,7 +631,7 @@ impl AgentPanel {
|
||||
}
|
||||
};
|
||||
|
||||
AgentDiff::set_active_thread(&workspace, &thread, window, cx);
|
||||
AgentDiff::set_active_thread(&workspace, thread.clone(), window, cx);
|
||||
|
||||
let weak_panel = weak_self.clone();
|
||||
|
||||
@@ -670,7 +677,7 @@ impl AgentPanel {
|
||||
.clone()
|
||||
.update(cx, |thread, cx| thread.get_or_init_configured_model(cx));
|
||||
}
|
||||
ActiveView::AcpThread { .. }
|
||||
ActiveView::ExternalAgentThread { .. }
|
||||
| ActiveView::TextThread { .. }
|
||||
| ActiveView::History
|
||||
| ActiveView::Configuration => {}
|
||||
@@ -698,6 +705,7 @@ impl AgentPanel {
|
||||
.unwrap(),
|
||||
inline_assist_context_store,
|
||||
previous_view: None,
|
||||
acp_message_history: Default::default(),
|
||||
history_store: history_store.clone(),
|
||||
history: cx.new(|cx| ThreadHistory::new(weak_self, history_store, window, cx)),
|
||||
hovered_recent_history_item: None,
|
||||
@@ -752,7 +760,7 @@ impl AgentPanel {
|
||||
ActiveView::Thread { thread, .. } => {
|
||||
thread.update(cx, |thread, cx| thread.cancel_last_completion(window, cx));
|
||||
}
|
||||
ActiveView::AcpThread { thread_view, .. } => {
|
||||
ActiveView::ExternalAgentThread { thread_view, .. } => {
|
||||
thread_view.update(cx, |thread_element, cx| thread_element.cancel(cx));
|
||||
}
|
||||
ActiveView::TextThread { .. } | ActiveView::History | ActiveView::Configuration => {}
|
||||
@@ -762,7 +770,7 @@ impl AgentPanel {
|
||||
fn active_message_editor(&self) -> Option<&Entity<MessageEditor>> {
|
||||
match &self.active_view {
|
||||
ActiveView::Thread { message_editor, .. } => Some(message_editor),
|
||||
ActiveView::AcpThread { .. }
|
||||
ActiveView::ExternalAgentThread { .. }
|
||||
| ActiveView::TextThread { .. }
|
||||
| ActiveView::History
|
||||
| ActiveView::Configuration => None,
|
||||
@@ -770,13 +778,10 @@ impl AgentPanel {
|
||||
}
|
||||
|
||||
fn new_thread(&mut self, action: &NewThread, window: &mut Window, cx: &mut Context<Self>) {
|
||||
// Preserve chat box text when using creating new thread from summary'
|
||||
let preserved_text = if action.from_thread_id.is_some() {
|
||||
self.active_message_editor()
|
||||
.map(|editor| editor.read(cx).get_text(cx).trim().to_string())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
// Preserve chat box text when using creating new thread
|
||||
let preserved_text = self
|
||||
.active_message_editor()
|
||||
.map(|editor| editor.read(cx).get_text(cx).trim().to_string());
|
||||
|
||||
let thread = self
|
||||
.thread_store
|
||||
@@ -848,7 +853,7 @@ impl AgentPanel {
|
||||
let thread_view = ActiveView::thread(active_thread.clone(), message_editor, window, cx);
|
||||
self.set_active_view(thread_view, window, cx);
|
||||
|
||||
AgentDiff::set_active_thread(&self.workspace, &thread, window, cx);
|
||||
AgentDiff::set_active_thread(&self.workspace, thread.clone(), window, cx);
|
||||
}
|
||||
|
||||
fn new_prompt_editor(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
@@ -887,19 +892,77 @@ impl AgentPanel {
|
||||
context_editor.focus_handle(cx).focus(window);
|
||||
}
|
||||
|
||||
fn new_gemini_thread(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
fn new_external_thread(
|
||||
&mut self,
|
||||
agent_choice: Option<crate::ExternalAgent>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let workspace = self.workspace.clone();
|
||||
let project = self.project.clone();
|
||||
let message_history = self.acp_message_history.clone();
|
||||
|
||||
const LAST_USED_EXTERNAL_AGENT_KEY: &str = "agent_panel__last_used_external_agent";
|
||||
|
||||
#[derive(Default, Serialize, Deserialize)]
|
||||
struct LastUsedExternalAgent {
|
||||
agent: crate::ExternalAgent,
|
||||
}
|
||||
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let thread_view = cx.new_window_entity(|window, cx| {
|
||||
crate::acp::AcpThreadView::new(workspace, project, window, cx)
|
||||
})?;
|
||||
let server: Rc<dyn AgentServer> = match agent_choice {
|
||||
Some(agent) => {
|
||||
cx.background_spawn(async move {
|
||||
if let Some(serialized) =
|
||||
serde_json::to_string(&LastUsedExternalAgent { agent }).log_err()
|
||||
{
|
||||
KEY_VALUE_STORE
|
||||
.write_kvp(LAST_USED_EXTERNAL_AGENT_KEY.to_string(), serialized)
|
||||
.await
|
||||
.log_err();
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
agent.server()
|
||||
}
|
||||
None => cx
|
||||
.background_spawn(async move {
|
||||
KEY_VALUE_STORE.read_kvp(LAST_USED_EXTERNAL_AGENT_KEY)
|
||||
})
|
||||
.await
|
||||
.log_err()
|
||||
.flatten()
|
||||
.and_then(|value| {
|
||||
serde_json::from_str::<LastUsedExternalAgent>(&value).log_err()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
.agent
|
||||
.server(),
|
||||
};
|
||||
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.set_active_view(ActiveView::AcpThread { thread_view }, window, cx);
|
||||
let thread_view = cx.new(|cx| {
|
||||
crate::acp::AcpThreadView::new(
|
||||
server,
|
||||
workspace.clone(),
|
||||
project,
|
||||
message_history,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
this.set_active_view(
|
||||
ActiveView::ExternalAgentThread {
|
||||
thread_view: thread_view.clone(),
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
})
|
||||
.detach();
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
fn deploy_rules_library(
|
||||
@@ -1053,7 +1116,7 @@ impl AgentPanel {
|
||||
|
||||
let thread_view = ActiveView::thread(active_thread.clone(), message_editor, window, cx);
|
||||
self.set_active_view(thread_view, window, cx);
|
||||
AgentDiff::set_active_thread(&self.workspace, &thread, window, cx);
|
||||
AgentDiff::set_active_thread(&self.workspace, thread.clone(), window, cx);
|
||||
}
|
||||
|
||||
pub fn go_back(&mut self, _: &workspace::GoBack, window: &mut Window, cx: &mut Context<Self>) {
|
||||
@@ -1066,7 +1129,7 @@ impl AgentPanel {
|
||||
ActiveView::Thread { message_editor, .. } => {
|
||||
message_editor.focus_handle(cx).focus(window);
|
||||
}
|
||||
ActiveView::AcpThread { thread_view } => {
|
||||
ActiveView::ExternalAgentThread { thread_view } => {
|
||||
thread_view.focus_handle(cx).focus(window);
|
||||
}
|
||||
ActiveView::TextThread { context_editor, .. } => {
|
||||
@@ -1184,11 +1247,16 @@ impl AgentPanel {
|
||||
let thread = thread.read(cx).thread().clone();
|
||||
self.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
AgentDiffPane::deploy_in_workspace(thread, workspace, window, cx)
|
||||
AgentDiffPane::deploy_in_workspace(
|
||||
AgentDiffThread::Native(thread),
|
||||
workspace,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
ActiveView::AcpThread { .. }
|
||||
ActiveView::ExternalAgentThread { .. }
|
||||
| ActiveView::TextThread { .. }
|
||||
| ActiveView::History
|
||||
| ActiveView::Configuration => {}
|
||||
@@ -1244,7 +1312,7 @@ impl AgentPanel {
|
||||
)
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
ActiveView::AcpThread { thread_view } => {
|
||||
ActiveView::ExternalAgentThread { thread_view } => {
|
||||
thread_view
|
||||
.update(cx, |thread_view, cx| {
|
||||
thread_view.open_thread_as_markdown(workspace, window, cx)
|
||||
@@ -1405,7 +1473,7 @@ impl AgentPanel {
|
||||
}
|
||||
})
|
||||
}
|
||||
ActiveView::AcpThread { .. } => {}
|
||||
ActiveView::ExternalAgentThread { .. } => {}
|
||||
ActiveView::History | ActiveView::Configuration => {}
|
||||
}
|
||||
|
||||
@@ -1420,6 +1488,8 @@ impl AgentPanel {
|
||||
self.active_view = new_view;
|
||||
}
|
||||
|
||||
self.acp_message_history.borrow_mut().reset_position();
|
||||
|
||||
self.focus_handle(cx).focus(window);
|
||||
}
|
||||
|
||||
@@ -1492,7 +1562,7 @@ impl Focusable for AgentPanel {
|
||||
fn focus_handle(&self, cx: &App) -> FocusHandle {
|
||||
match &self.active_view {
|
||||
ActiveView::Thread { message_editor, .. } => message_editor.focus_handle(cx),
|
||||
ActiveView::AcpThread { thread_view, .. } => thread_view.focus_handle(cx),
|
||||
ActiveView::ExternalAgentThread { thread_view, .. } => thread_view.focus_handle(cx),
|
||||
ActiveView::History => self.history.focus_handle(cx),
|
||||
ActiveView::TextThread { context_editor, .. } => context_editor.focus_handle(cx),
|
||||
ActiveView::Configuration => {
|
||||
@@ -1649,9 +1719,11 @@ impl AgentPanel {
|
||||
.into_any_element(),
|
||||
}
|
||||
}
|
||||
ActiveView::AcpThread { thread_view } => Label::new(thread_view.read(cx).title(cx))
|
||||
.truncate()
|
||||
.into_any_element(),
|
||||
ActiveView::ExternalAgentThread { thread_view } => {
|
||||
Label::new(thread_view.read(cx).title(cx))
|
||||
.truncate()
|
||||
.into_any_element()
|
||||
}
|
||||
ActiveView::TextThread {
|
||||
title_editor,
|
||||
context_editor,
|
||||
@@ -1786,7 +1858,7 @@ impl AgentPanel {
|
||||
|
||||
let active_thread = match &self.active_view {
|
||||
ActiveView::Thread { thread, .. } => Some(thread.read(cx).thread().clone()),
|
||||
ActiveView::AcpThread { .. }
|
||||
ActiveView::ExternalAgentThread { .. }
|
||||
| ActiveView::TextThread { .. }
|
||||
| ActiveView::History
|
||||
| ActiveView::Configuration => None,
|
||||
@@ -1824,7 +1896,27 @@ impl AgentPanel {
|
||||
.when(cx.has_flag::<feature_flags::AcpFeatureFlag>(), |this| {
|
||||
this.separator()
|
||||
.header("External Agents")
|
||||
.action("New Gemini Thread", NewAcpThread.boxed_clone())
|
||||
.action(
|
||||
"New Gemini Thread",
|
||||
NewExternalAgentThread {
|
||||
agent: Some(crate::ExternalAgent::Gemini),
|
||||
}
|
||||
.boxed_clone(),
|
||||
)
|
||||
.action(
|
||||
"New Claude Code Thread",
|
||||
NewExternalAgentThread {
|
||||
agent: Some(crate::ExternalAgent::ClaudeCode),
|
||||
}
|
||||
.boxed_clone(),
|
||||
)
|
||||
.action(
|
||||
"New Codex Thread",
|
||||
NewExternalAgentThread {
|
||||
agent: Some(crate::ExternalAgent::Codex),
|
||||
}
|
||||
.boxed_clone(),
|
||||
)
|
||||
});
|
||||
menu
|
||||
}))
|
||||
@@ -1896,6 +1988,7 @@ impl AgentPanel {
|
||||
category_filter: Some(
|
||||
zed_actions::ExtensionCategoryFilter::ContextServers,
|
||||
),
|
||||
id: None,
|
||||
}),
|
||||
)
|
||||
.action("Add Custom Server…", Box::new(AddContextServer))
|
||||
@@ -1949,48 +2042,45 @@ impl AgentPanel {
|
||||
}
|
||||
|
||||
fn render_token_count(&self, cx: &App) -> Option<AnyElement> {
|
||||
let (active_thread, message_editor) = match &self.active_view {
|
||||
match &self.active_view {
|
||||
ActiveView::Thread {
|
||||
thread,
|
||||
message_editor,
|
||||
..
|
||||
} => (thread.read(cx), message_editor.read(cx)),
|
||||
ActiveView::AcpThread { .. } => {
|
||||
return None;
|
||||
}
|
||||
ActiveView::TextThread { .. } | ActiveView::History | ActiveView::Configuration => {
|
||||
return None;
|
||||
}
|
||||
};
|
||||
} => {
|
||||
let active_thread = thread.read(cx);
|
||||
let message_editor = message_editor.read(cx);
|
||||
|
||||
let editor_empty = message_editor.is_editor_fully_empty(cx);
|
||||
let editor_empty = message_editor.is_editor_fully_empty(cx);
|
||||
|
||||
if active_thread.is_empty() && editor_empty {
|
||||
return None;
|
||||
}
|
||||
if active_thread.is_empty() && editor_empty {
|
||||
return None;
|
||||
}
|
||||
|
||||
let thread = active_thread.thread().read(cx);
|
||||
let is_generating = thread.is_generating();
|
||||
let conversation_token_usage = thread.total_token_usage()?;
|
||||
let thread = active_thread.thread().read(cx);
|
||||
let is_generating = thread.is_generating();
|
||||
let conversation_token_usage = thread.total_token_usage()?;
|
||||
|
||||
let (total_token_usage, is_estimating) =
|
||||
if let Some((editing_message_id, unsent_tokens)) = active_thread.editing_message_id() {
|
||||
let combined = thread
|
||||
.token_usage_up_to_message(editing_message_id)
|
||||
.add(unsent_tokens);
|
||||
let (total_token_usage, is_estimating) =
|
||||
if let Some((editing_message_id, unsent_tokens)) =
|
||||
active_thread.editing_message_id()
|
||||
{
|
||||
let combined = thread
|
||||
.token_usage_up_to_message(editing_message_id)
|
||||
.add(unsent_tokens);
|
||||
|
||||
(combined, unsent_tokens > 0)
|
||||
} else {
|
||||
let unsent_tokens = message_editor.last_estimated_token_count().unwrap_or(0);
|
||||
let combined = conversation_token_usage.add(unsent_tokens);
|
||||
(combined, unsent_tokens > 0)
|
||||
} else {
|
||||
let unsent_tokens =
|
||||
message_editor.last_estimated_token_count().unwrap_or(0);
|
||||
let combined = conversation_token_usage.add(unsent_tokens);
|
||||
|
||||
(combined, unsent_tokens > 0)
|
||||
};
|
||||
(combined, unsent_tokens > 0)
|
||||
};
|
||||
|
||||
let is_waiting_to_update_token_count = message_editor.is_waiting_to_update_token_count();
|
||||
let is_waiting_to_update_token_count =
|
||||
message_editor.is_waiting_to_update_token_count();
|
||||
|
||||
match &self.active_view {
|
||||
ActiveView::Thread { .. } => {
|
||||
if total_token_usage.total == 0 {
|
||||
return None;
|
||||
}
|
||||
@@ -2067,7 +2157,11 @@ impl AgentPanel {
|
||||
|
||||
Some(element.into_any_element())
|
||||
}
|
||||
_ => None,
|
||||
ActiveView::ExternalAgentThread { .. }
|
||||
| ActiveView::History
|
||||
| ActiveView::Configuration => {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2096,7 +2190,7 @@ impl AgentPanel {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
ActiveView::AcpThread { .. } => {
|
||||
ActiveView::ExternalAgentThread { .. } => {
|
||||
return false;
|
||||
}
|
||||
ActiveView::TextThread { .. } | ActiveView::History | ActiveView::Configuration => {
|
||||
@@ -2683,7 +2777,7 @@ impl AgentPanel {
|
||||
) -> Option<AnyElement> {
|
||||
let active_thread = match &self.active_view {
|
||||
ActiveView::Thread { thread, .. } => thread,
|
||||
ActiveView::AcpThread { .. } => {
|
||||
ActiveView::ExternalAgentThread { .. } => {
|
||||
return None;
|
||||
}
|
||||
ActiveView::TextThread { .. } | ActiveView::History | ActiveView::Configuration => {
|
||||
@@ -3032,7 +3126,7 @@ impl AgentPanel {
|
||||
.detach();
|
||||
});
|
||||
}
|
||||
ActiveView::AcpThread { .. } => {
|
||||
ActiveView::ExternalAgentThread { .. } => {
|
||||
unimplemented!()
|
||||
}
|
||||
ActiveView::TextThread { context_editor, .. } => {
|
||||
@@ -3054,7 +3148,7 @@ impl AgentPanel {
|
||||
let mut key_context = KeyContext::new_with_defaults();
|
||||
key_context.add("AgentPanel");
|
||||
match &self.active_view {
|
||||
ActiveView::AcpThread { .. } => key_context.add("acp_thread"),
|
||||
ActiveView::ExternalAgentThread { .. } => key_context.add("external_agent_thread"),
|
||||
ActiveView::TextThread { .. } => key_context.add("prompt_editor"),
|
||||
ActiveView::Thread { .. } | ActiveView::History | ActiveView::Configuration => {}
|
||||
}
|
||||
@@ -3110,7 +3204,7 @@ impl Render for AgentPanel {
|
||||
});
|
||||
this.continue_conversation(window, cx);
|
||||
}
|
||||
ActiveView::AcpThread { .. } => {}
|
||||
ActiveView::ExternalAgentThread { .. } => {}
|
||||
ActiveView::TextThread { .. }
|
||||
| ActiveView::History
|
||||
| ActiveView::Configuration => {}
|
||||
@@ -3152,7 +3246,7 @@ impl Render for AgentPanel {
|
||||
})
|
||||
.child(h_flex().child(message_editor.clone()))
|
||||
.child(self.render_drag_target(cx)),
|
||||
ActiveView::AcpThread { thread_view, .. } => parent
|
||||
ActiveView::ExternalAgentThread { thread_view, .. } => parent
|
||||
.relative()
|
||||
.child(thread_view.clone())
|
||||
.child(self.render_drag_target(cx)),
|
||||
|
||||
@@ -25,6 +25,7 @@ mod thread_history;
|
||||
mod tool_compatibility;
|
||||
mod ui;
|
||||
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use agent::{Thread, ThreadId};
|
||||
@@ -40,7 +41,7 @@ use language_model::{
|
||||
};
|
||||
use prompt_store::PromptBuilder;
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings as _, SettingsStore};
|
||||
|
||||
pub use crate::active_thread::ActiveThread;
|
||||
@@ -57,8 +58,6 @@ actions!(
|
||||
[
|
||||
/// Creates a new text-based conversation thread.
|
||||
NewTextThread,
|
||||
/// Creates a new external agent conversation thread.
|
||||
NewAcpThread,
|
||||
/// Toggles the context picker interface for adding files, symbols, or other context.
|
||||
ToggleContextPicker,
|
||||
/// Toggles the navigation menu for switching between threads and views.
|
||||
@@ -133,6 +132,34 @@ pub struct NewThread {
|
||||
from_thread_id: Option<ThreadId>,
|
||||
}
|
||||
|
||||
/// Creates a new external agent conversation thread.
|
||||
#[derive(Default, Clone, PartialEq, Deserialize, JsonSchema, Action)]
|
||||
#[action(namespace = agent)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
pub struct NewExternalAgentThread {
|
||||
/// Which agent to use for the conversation.
|
||||
agent: Option<ExternalAgent>,
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Copy, PartialEq, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
enum ExternalAgent {
|
||||
#[default]
|
||||
Gemini,
|
||||
ClaudeCode,
|
||||
Codex,
|
||||
}
|
||||
|
||||
impl ExternalAgent {
|
||||
pub fn server(&self) -> Rc<dyn agent_servers::AgentServer> {
|
||||
match self {
|
||||
ExternalAgent::Gemini => Rc::new(agent_servers::Gemini),
|
||||
ExternalAgent::ClaudeCode => Rc::new(agent_servers::ClaudeCode),
|
||||
ExternalAgent::Codex => Rc::new(agent_servers::Codex),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Opens the profile management interface for configuring agent tools and settings.
|
||||
#[derive(PartialEq, Clone, Default, Debug, Deserialize, JsonSchema, Action)]
|
||||
#[action(namespace = agent)]
|
||||
|
||||
@@ -660,7 +660,6 @@ impl InlineAssistant {
|
||||
height: Some(prompt_editor_height),
|
||||
render: build_assist_editor_renderer(prompt_editor),
|
||||
priority: 0,
|
||||
render_in_minimap: false,
|
||||
},
|
||||
BlockProperties {
|
||||
style: BlockStyle::Sticky,
|
||||
@@ -675,7 +674,6 @@ impl InlineAssistant {
|
||||
.into_any_element()
|
||||
}),
|
||||
priority: 0,
|
||||
render_in_minimap: false,
|
||||
},
|
||||
];
|
||||
|
||||
@@ -1451,7 +1449,6 @@ impl InlineAssistant {
|
||||
.into_any_element()
|
||||
}),
|
||||
priority: 0,
|
||||
render_in_minimap: false,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@ use std::collections::BTreeMap;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::agent_diff::AgentDiffThread;
|
||||
use crate::agent_model_selector::AgentModelSelector;
|
||||
use crate::language_model_selector::ToggleModelSelector;
|
||||
use crate::tool_compatibility::{IncompatibleToolsState, IncompatibleToolsTooltip};
|
||||
@@ -475,9 +476,12 @@ impl MessageEditor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if let Ok(diff) =
|
||||
AgentDiffPane::deploy(self.thread.clone(), self.workspace.clone(), window, cx)
|
||||
{
|
||||
if let Ok(diff) = AgentDiffPane::deploy(
|
||||
AgentDiffThread::Native(self.thread.clone()),
|
||||
self.workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
) {
|
||||
let path_key = multi_buffer::PathKey::for_buffer(&buffer, cx);
|
||||
diff.update(cx, |diff, cx| diff.move_to_path(path_key, window, cx));
|
||||
}
|
||||
|
||||
@@ -1256,7 +1256,6 @@ impl TextThreadEditor {
|
||||
),
|
||||
priority: usize::MAX,
|
||||
render: render_block(MessageMetadata::from(message)),
|
||||
render_in_minimap: false,
|
||||
};
|
||||
let mut new_blocks = vec![];
|
||||
let mut block_index_to_message = vec![];
|
||||
@@ -1858,7 +1857,6 @@ impl TextThreadEditor {
|
||||
.into_any_element()
|
||||
}),
|
||||
priority: 0,
|
||||
render_in_minimap: false,
|
||||
})
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
@@ -34,6 +34,11 @@ impl ExtensionSlashCommandProxy for SlashCommandRegistryProxy {
|
||||
self.slash_command_registry
|
||||
.register_command(ExtensionSlashCommand::new(extension, command), false)
|
||||
}
|
||||
|
||||
fn unregister_slash_command(&self, command_name: Arc<str>) {
|
||||
self.slash_command_registry
|
||||
.unregister_command_by_name(&command_name)
|
||||
}
|
||||
}
|
||||
|
||||
/// An adapter that allows an [`LspAdapterDelegate`] to be used as a [`WorktreeDelegate`].
|
||||
|
||||
@@ -40,6 +40,7 @@ collections = { workspace = true, features = ["test-support"] }
|
||||
clock = { workspace = true, features = ["test-support"] }
|
||||
ctor.workspace = true
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
indoc.workspace = true
|
||||
language = { workspace = true, features = ["test-support"] }
|
||||
language_model = { workspace = true, features = ["test-support"] }
|
||||
log.workspace = true
|
||||
|
||||
@@ -8,7 +8,10 @@ use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
|
||||
use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
|
||||
use std::{cmp, ops::Range, sync::Arc};
|
||||
use text::{Edit, Patch, Rope};
|
||||
use util::RangeExt;
|
||||
use util::{
|
||||
RangeExt, ResultExt as _,
|
||||
paths::{PathStyle, RemotePathBuf},
|
||||
};
|
||||
|
||||
/// Tracks actions performed by tools in a thread
|
||||
pub struct ActionLog {
|
||||
@@ -18,8 +21,6 @@ pub struct ActionLog {
|
||||
edited_since_project_diagnostics_check: bool,
|
||||
/// The project this action log is associated with
|
||||
project: Entity<Project>,
|
||||
/// Tracks which buffer versions have already been notified as changed externally
|
||||
notified_versions: BTreeMap<Entity<Buffer>, clock::Global>,
|
||||
}
|
||||
|
||||
impl ActionLog {
|
||||
@@ -29,7 +30,6 @@ impl ActionLog {
|
||||
tracked_buffers: BTreeMap::default(),
|
||||
edited_since_project_diagnostics_check: false,
|
||||
project,
|
||||
notified_versions: BTreeMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -47,6 +47,71 @@ impl ActionLog {
|
||||
self.edited_since_project_diagnostics_check
|
||||
}
|
||||
|
||||
pub fn latest_snapshot(&self, buffer: &Entity<Buffer>) -> Option<text::BufferSnapshot> {
|
||||
Some(self.tracked_buffers.get(buffer)?.snapshot.clone())
|
||||
}
|
||||
|
||||
pub fn has_unnotified_user_edits(&self) -> bool {
|
||||
self.tracked_buffers
|
||||
.values()
|
||||
.any(|tracked| tracked.has_unnotified_user_edits)
|
||||
}
|
||||
|
||||
/// Return a unified diff patch with user edits made since last read or notification
|
||||
pub fn unnotified_user_edits(&self, cx: &Context<Self>) -> Option<String> {
|
||||
if !self.has_unnotified_user_edits() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let unified_diff = self
|
||||
.tracked_buffers
|
||||
.values()
|
||||
.filter_map(|tracked| {
|
||||
if !tracked.has_unnotified_user_edits {
|
||||
return None;
|
||||
}
|
||||
|
||||
let text_with_latest_user_edits = tracked.diff_base.to_string();
|
||||
let text_with_last_seen_user_edits = tracked.last_seen_base.to_string();
|
||||
if text_with_latest_user_edits == text_with_last_seen_user_edits {
|
||||
return None;
|
||||
}
|
||||
let patch = language::unified_diff(
|
||||
&text_with_last_seen_user_edits,
|
||||
&text_with_latest_user_edits,
|
||||
);
|
||||
|
||||
let buffer = tracked.buffer.clone();
|
||||
let file_path = buffer
|
||||
.read(cx)
|
||||
.file()
|
||||
.map(|file| RemotePathBuf::new(file.full_path(cx), PathStyle::Posix).to_proto())
|
||||
.unwrap_or_else(|| format!("buffer_{}", buffer.entity_id()));
|
||||
|
||||
let mut result = String::new();
|
||||
result.push_str(&format!("--- a/{}\n", file_path));
|
||||
result.push_str(&format!("+++ b/{}\n", file_path));
|
||||
result.push_str(&patch);
|
||||
|
||||
Some(result)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n\n");
|
||||
|
||||
Some(unified_diff)
|
||||
}
|
||||
|
||||
/// Return a unified diff patch with user edits made since last read/notification
|
||||
/// and mark them as notified
|
||||
pub fn flush_unnotified_user_edits(&mut self, cx: &Context<Self>) -> Option<String> {
|
||||
let patch = self.unnotified_user_edits(cx);
|
||||
self.tracked_buffers.values_mut().for_each(|tracked| {
|
||||
tracked.has_unnotified_user_edits = false;
|
||||
tracked.last_seen_base = tracked.diff_base.clone();
|
||||
});
|
||||
patch
|
||||
}
|
||||
|
||||
fn track_buffer_internal(
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
@@ -55,7 +120,6 @@ impl ActionLog {
|
||||
) -> &mut TrackedBuffer {
|
||||
let status = if is_created {
|
||||
if let Some(tracked) = self.tracked_buffers.remove(&buffer) {
|
||||
self.notified_versions.remove(&buffer);
|
||||
match tracked.status {
|
||||
TrackedBufferStatus::Created {
|
||||
existing_file_content,
|
||||
@@ -97,26 +161,31 @@ impl ActionLog {
|
||||
let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
|
||||
let (diff_update_tx, diff_update_rx) = mpsc::unbounded();
|
||||
let diff_base;
|
||||
let last_seen_base;
|
||||
let unreviewed_edits;
|
||||
if is_created {
|
||||
diff_base = Rope::default();
|
||||
last_seen_base = Rope::default();
|
||||
unreviewed_edits = Patch::new(vec![Edit {
|
||||
old: 0..1,
|
||||
new: 0..text_snapshot.max_point().row + 1,
|
||||
}])
|
||||
} else {
|
||||
diff_base = buffer.read(cx).as_rope().clone();
|
||||
last_seen_base = diff_base.clone();
|
||||
unreviewed_edits = Patch::default();
|
||||
}
|
||||
TrackedBuffer {
|
||||
buffer: buffer.clone(),
|
||||
diff_base,
|
||||
last_seen_base,
|
||||
unreviewed_edits,
|
||||
snapshot: text_snapshot.clone(),
|
||||
status,
|
||||
version: buffer.read(cx).version(),
|
||||
diff,
|
||||
diff_update: diff_update_tx,
|
||||
has_unnotified_user_edits: false,
|
||||
_open_lsp_handle: open_lsp_handle,
|
||||
_maintain_diff: cx.spawn({
|
||||
let buffer = buffer.clone();
|
||||
@@ -170,7 +239,6 @@ impl ActionLog {
|
||||
// If the buffer had been edited by a tool, but it got
|
||||
// deleted externally, we want to stop tracking it.
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
self.notified_versions.remove(&buffer);
|
||||
}
|
||||
cx.notify();
|
||||
}
|
||||
@@ -184,7 +252,6 @@ impl ActionLog {
|
||||
// resurrected externally, we want to clear the edits we
|
||||
// were tracking and reset the buffer's state.
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
self.notified_versions.remove(&buffer);
|
||||
self.track_buffer_internal(buffer, false, cx);
|
||||
}
|
||||
cx.notify();
|
||||
@@ -258,10 +325,10 @@ impl ActionLog {
|
||||
buffer_snapshot: text::BufferSnapshot,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<()> {
|
||||
let rebase = this.read_with(cx, |this, cx| {
|
||||
let rebase = this.update(cx, |this, cx| {
|
||||
let tracked_buffer = this
|
||||
.tracked_buffers
|
||||
.get(buffer)
|
||||
.get_mut(buffer)
|
||||
.context("buffer not tracked")?;
|
||||
|
||||
let rebase = cx.background_spawn({
|
||||
@@ -269,8 +336,13 @@ impl ActionLog {
|
||||
let old_snapshot = tracked_buffer.snapshot.clone();
|
||||
let new_snapshot = buffer_snapshot.clone();
|
||||
let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
|
||||
let edits = diff_snapshots(&old_snapshot, &new_snapshot);
|
||||
if let ChangeAuthor::User = author
|
||||
&& !edits.is_empty()
|
||||
{
|
||||
tracked_buffer.has_unnotified_user_edits = true;
|
||||
}
|
||||
async move {
|
||||
let edits = diff_snapshots(&old_snapshot, &new_snapshot);
|
||||
if let ChangeAuthor::User = author {
|
||||
apply_non_conflicting_edits(
|
||||
&unreviewed_edits,
|
||||
@@ -490,7 +562,6 @@ impl ActionLog {
|
||||
match tracked_buffer.status {
|
||||
TrackedBufferStatus::Created { .. } => {
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
self.notified_versions.remove(&buffer);
|
||||
cx.notify();
|
||||
}
|
||||
TrackedBufferStatus::Modified => {
|
||||
@@ -516,7 +587,6 @@ impl ActionLog {
|
||||
match tracked_buffer.status {
|
||||
TrackedBufferStatus::Deleted => {
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
self.notified_versions.remove(&buffer);
|
||||
cx.notify();
|
||||
}
|
||||
_ => {
|
||||
@@ -625,7 +695,6 @@ impl ActionLog {
|
||||
};
|
||||
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
self.notified_versions.remove(&buffer);
|
||||
cx.notify();
|
||||
task
|
||||
}
|
||||
@@ -639,7 +708,6 @@ impl ActionLog {
|
||||
|
||||
// Clear all tracked edits for this buffer and start over as if we just read it.
|
||||
self.tracked_buffers.remove(&buffer);
|
||||
self.notified_versions.remove(&buffer);
|
||||
self.buffer_read(buffer.clone(), cx);
|
||||
cx.notify();
|
||||
save
|
||||
@@ -715,6 +783,22 @@ impl ActionLog {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn reject_all_edits(&mut self, cx: &mut Context<Self>) -> Task<()> {
|
||||
let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
|
||||
let reject = self.reject_edits_in_ranges(buffer, vec![Anchor::MIN..Anchor::MAX], cx);
|
||||
|
||||
async move {
|
||||
reject.await.log_err();
|
||||
}
|
||||
});
|
||||
|
||||
let task = futures::future::join_all(futures);
|
||||
|
||||
cx.spawn(async move |_, _| {
|
||||
task.await;
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the set of buffers that contain edits that haven't been reviewed by the user.
|
||||
pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
|
||||
self.tracked_buffers
|
||||
@@ -724,33 +808,6 @@ impl ActionLog {
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Returns stale buffers that haven't been notified yet
|
||||
pub fn unnotified_stale_buffers<'a>(
|
||||
&'a self,
|
||||
cx: &'a App,
|
||||
) -> impl Iterator<Item = &'a Entity<Buffer>> {
|
||||
self.stale_buffers(cx).filter(|buffer| {
|
||||
let buffer_entity = buffer.read(cx);
|
||||
self.notified_versions
|
||||
.get(buffer)
|
||||
.map_or(true, |notified_version| {
|
||||
*notified_version != buffer_entity.version
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/// Marks the given buffers as notified at their current versions
|
||||
pub fn mark_buffers_as_notified(
|
||||
&mut self,
|
||||
buffers: impl IntoIterator<Item = Entity<Buffer>>,
|
||||
cx: &App,
|
||||
) {
|
||||
for buffer in buffers {
|
||||
let version = buffer.read(cx).version.clone();
|
||||
self.notified_versions.insert(buffer, version);
|
||||
}
|
||||
}
|
||||
|
||||
/// Iterate over buffers changed since last read or edited by the model
|
||||
pub fn stale_buffers<'a>(&'a self, cx: &'a App) -> impl Iterator<Item = &'a Entity<Buffer>> {
|
||||
self.tracked_buffers
|
||||
@@ -894,12 +951,14 @@ enum TrackedBufferStatus {
|
||||
struct TrackedBuffer {
|
||||
buffer: Entity<Buffer>,
|
||||
diff_base: Rope,
|
||||
last_seen_base: Rope,
|
||||
unreviewed_edits: Patch<u32>,
|
||||
status: TrackedBufferStatus,
|
||||
version: clock::Global,
|
||||
diff: Entity<BufferDiff>,
|
||||
snapshot: text::BufferSnapshot,
|
||||
diff_update: mpsc::UnboundedSender<(ChangeAuthor, text::BufferSnapshot)>,
|
||||
has_unnotified_user_edits: bool,
|
||||
_open_lsp_handle: OpenLspBufferHandle,
|
||||
_maintain_diff: Task<()>,
|
||||
_subscription: Subscription,
|
||||
@@ -930,6 +989,7 @@ mod tests {
|
||||
use super::*;
|
||||
use buffer_diff::DiffHunkStatusKind;
|
||||
use gpui::TestAppContext;
|
||||
use indoc::indoc;
|
||||
use language::Point;
|
||||
use project::{FakeFs, Fs, Project, RemoveOptions};
|
||||
use rand::prelude::*;
|
||||
@@ -1212,6 +1272,110 @@ mod tests {
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_user_edits_notifications(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/dir"),
|
||||
json!({"file": indoc! {"
|
||||
abc
|
||||
def
|
||||
ghi
|
||||
jkl
|
||||
mno"}}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let file_path = project
|
||||
.read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
|
||||
.unwrap();
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_buffer(file_path, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Agent edits
|
||||
cx.update(|cx| {
|
||||
action_log.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx));
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer
|
||||
.edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
|
||||
.unwrap()
|
||||
});
|
||||
action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
indoc! {"
|
||||
abc
|
||||
deF
|
||||
GHI
|
||||
jkl
|
||||
mno"}
|
||||
);
|
||||
assert_eq!(
|
||||
unreviewed_hunks(&action_log, cx),
|
||||
vec![(
|
||||
buffer.clone(),
|
||||
vec![HunkStatus {
|
||||
range: Point::new(1, 0)..Point::new(3, 0),
|
||||
diff_status: DiffHunkStatusKind::Modified,
|
||||
old_text: "def\nghi\n".into(),
|
||||
}],
|
||||
)]
|
||||
);
|
||||
|
||||
// User edits
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(
|
||||
[
|
||||
(Point::new(0, 2)..Point::new(0, 2), "X"),
|
||||
(Point::new(3, 0)..Point::new(3, 0), "Y"),
|
||||
],
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
indoc! {"
|
||||
abXc
|
||||
deF
|
||||
GHI
|
||||
Yjkl
|
||||
mno"}
|
||||
);
|
||||
|
||||
// User edits should be stored separately from agent's
|
||||
let user_edits = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
|
||||
assert_eq!(
|
||||
user_edits.expect("should have some user edits"),
|
||||
indoc! {"
|
||||
--- a/dir/file
|
||||
+++ b/dir/file
|
||||
@@ -1,5 +1,5 @@
|
||||
-abc
|
||||
+abXc
|
||||
def
|
||||
ghi
|
||||
-jkl
|
||||
+Yjkl
|
||||
mno
|
||||
"}
|
||||
);
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.keep_edits_in_range(buffer.clone(), Point::new(0, 0)..Point::new(1, 0), cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_creating_files(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
@@ -2201,4 +2365,61 @@ mod tests {
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_format_patch(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/dir"),
|
||||
json!({"test.txt": "line 1\nline 2\nline 3\n"}),
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
|
||||
let file_path = project
|
||||
.read_with(cx, |project, cx| {
|
||||
project.find_project_path("dir/test.txt", cx)
|
||||
})
|
||||
.unwrap();
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| project.open_buffer(file_path, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.update(|cx| {
|
||||
// Track the buffer and mark it as read first
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.buffer_read(buffer.clone(), cx);
|
||||
});
|
||||
|
||||
// Make some edits to create a patch
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer
|
||||
.edit([(Point::new(1, 0)..Point::new(1, 6), "CHANGED")], None, cx)
|
||||
.unwrap(); // Replace "line2" with "CHANGED"
|
||||
});
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
// Get the patch
|
||||
let patch = action_log.update(cx, |log, cx| log.unnotified_user_edits(cx));
|
||||
|
||||
// Verify the patch format contains expected unified diff elements
|
||||
assert_eq!(
|
||||
patch.unwrap(),
|
||||
indoc! {"
|
||||
--- a/dir/test.txt
|
||||
+++ b/dir/test.txt
|
||||
@@ -1,3 +1,3 @@
|
||||
line 1
|
||||
-line 2
|
||||
+CHANGED
|
||||
line 3
|
||||
"}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,6 +63,7 @@ which.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
workspace.workspace = true
|
||||
zed_llm_client.workspace = true
|
||||
diffy = "0.4.2"
|
||||
|
||||
[dev-dependencies]
|
||||
lsp = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -365,17 +365,23 @@ fn eval_disable_cursor_blinking() {
|
||||
// Model | Pass rate
|
||||
// ============================================
|
||||
//
|
||||
// claude-3.7-sonnet | 0.99 (2025-06-14)
|
||||
// claude-sonnet-4 | 0.85 (2025-06-14)
|
||||
// gemini-2.5-pro-preview-latest | 0.97 (2025-06-16)
|
||||
// gemini-2.5-flash-preview-04-17 |
|
||||
// gpt-4.1 |
|
||||
// claude-3.7-sonnet | 0.59 (2025-07-14)
|
||||
// claude-sonnet-4 | 0.81 (2025-07-14)
|
||||
// gemini-2.5-pro | 0.95 (2025-07-14)
|
||||
// gemini-2.5-flash-preview-04-17 | 0.78 (2025-07-14)
|
||||
// gpt-4.1 | 0.00 (2025-07-14) (follows edit_description too literally)
|
||||
let input_file_path = "root/editor.rs";
|
||||
let input_file_content = include_str!("evals/fixtures/disable_cursor_blinking/before.rs");
|
||||
let edit_description = "Comment out the call to `BlinkManager::enable`";
|
||||
let possible_diffs = vec![
|
||||
include_str!("evals/fixtures/disable_cursor_blinking/possible-01.diff"),
|
||||
include_str!("evals/fixtures/disable_cursor_blinking/possible-02.diff"),
|
||||
include_str!("evals/fixtures/disable_cursor_blinking/possible-03.diff"),
|
||||
include_str!("evals/fixtures/disable_cursor_blinking/possible-04.diff"),
|
||||
];
|
||||
eval(
|
||||
100,
|
||||
0.95,
|
||||
0.51,
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
@@ -433,11 +439,7 @@ fn eval_disable_cursor_blinking() {
|
||||
),
|
||||
],
|
||||
Some(input_file_content.into()),
|
||||
EvalAssertion::judge_diff(indoc! {"
|
||||
- Calls to BlinkManager in `observe_window_activation` were commented out
|
||||
- The call to `blink_manager.enable` above the call to show_cursor_names was commented out
|
||||
- All the edits have valid indentation
|
||||
"}),
|
||||
EvalAssertion::assert_diff_any(possible_diffs),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,28 @@
|
||||
--- before.rs 2025-07-07 11:37:48.434629001 +0300
|
||||
+++ expected.rs 2025-07-14 10:33:53.346906775 +0300
|
||||
@@ -1780,11 +1780,11 @@
|
||||
cx.observe_window_activation(window, |editor, window, cx| {
|
||||
let active = window.is_window_active();
|
||||
editor.blink_manager.update(cx, |blink_manager, cx| {
|
||||
- if active {
|
||||
- blink_manager.enable(cx);
|
||||
- } else {
|
||||
- blink_manager.disable(cx);
|
||||
- }
|
||||
+ // if active {
|
||||
+ // blink_manager.enable(cx);
|
||||
+ // } else {
|
||||
+ // blink_manager.disable(cx);
|
||||
+ // }
|
||||
});
|
||||
}),
|
||||
],
|
||||
@@ -18463,7 +18463,7 @@
|
||||
}
|
||||
|
||||
self.blink_manager.update(cx, |blink_manager, cx| {
|
||||
- blink_manager.enable(cx);
|
||||
+ // blink_manager.enable(cx);
|
||||
});
|
||||
self.show_cursor_names(window, cx);
|
||||
self.buffer.update(cx, |buffer, cx| {
|
||||
@@ -0,0 +1,29 @@
|
||||
@@ -1778,13 +1778,13 @@
|
||||
cx.observe_global_in::<SettingsStore>(window, Self::settings_changed),
|
||||
observe_buffer_font_size_adjustment(cx, |_, cx| cx.notify()),
|
||||
cx.observe_window_activation(window, |editor, window, cx| {
|
||||
- let active = window.is_window_active();
|
||||
+ // let active = window.is_window_active();
|
||||
editor.blink_manager.update(cx, |blink_manager, cx| {
|
||||
- if active {
|
||||
- blink_manager.enable(cx);
|
||||
- } else {
|
||||
- blink_manager.disable(cx);
|
||||
- }
|
||||
+ // if active {
|
||||
+ // blink_manager.enable(cx);
|
||||
+ // } else {
|
||||
+ // blink_manager.disable(cx);
|
||||
+ // }
|
||||
});
|
||||
}),
|
||||
],
|
||||
@@ -18463,7 +18463,7 @@
|
||||
}
|
||||
|
||||
self.blink_manager.update(cx, |blink_manager, cx| {
|
||||
- blink_manager.enable(cx);
|
||||
+ // blink_manager.enable(cx);
|
||||
});
|
||||
self.show_cursor_names(window, cx);
|
||||
self.buffer.update(cx, |buffer, cx| {
|
||||
@@ -0,0 +1,34 @@
|
||||
@@ -1774,17 +1774,17 @@
|
||||
cx.observe(&buffer, Self::on_buffer_changed),
|
||||
cx.subscribe_in(&buffer, window, Self::on_buffer_event),
|
||||
cx.observe_in(&display_map, window, Self::on_display_map_changed),
|
||||
- cx.observe(&blink_manager, |_, _, cx| cx.notify()),
|
||||
+ // cx.observe(&blink_manager, |_, _, cx| cx.notify()),
|
||||
cx.observe_global_in::<SettingsStore>(window, Self::settings_changed),
|
||||
observe_buffer_font_size_adjustment(cx, |_, cx| cx.notify()),
|
||||
cx.observe_window_activation(window, |editor, window, cx| {
|
||||
- let active = window.is_window_active();
|
||||
+ // let active = window.is_window_active();
|
||||
editor.blink_manager.update(cx, |blink_manager, cx| {
|
||||
- if active {
|
||||
- blink_manager.enable(cx);
|
||||
- } else {
|
||||
- blink_manager.disable(cx);
|
||||
- }
|
||||
+ // if active {
|
||||
+ // blink_manager.enable(cx);
|
||||
+ // } else {
|
||||
+ // blink_manager.disable(cx);
|
||||
+ // }
|
||||
});
|
||||
}),
|
||||
],
|
||||
@@ -18463,7 +18463,7 @@
|
||||
}
|
||||
|
||||
self.blink_manager.update(cx, |blink_manager, cx| {
|
||||
- blink_manager.enable(cx);
|
||||
+ // blink_manager.enable(cx);
|
||||
});
|
||||
self.show_cursor_names(window, cx);
|
||||
self.buffer.update(cx, |buffer, cx| {
|
||||
@@ -0,0 +1,33 @@
|
||||
@@ -1774,17 +1774,17 @@
|
||||
cx.observe(&buffer, Self::on_buffer_changed),
|
||||
cx.subscribe_in(&buffer, window, Self::on_buffer_event),
|
||||
cx.observe_in(&display_map, window, Self::on_display_map_changed),
|
||||
- cx.observe(&blink_manager, |_, _, cx| cx.notify()),
|
||||
+ // cx.observe(&blink_manager, |_, _, cx| cx.notify()),
|
||||
cx.observe_global_in::<SettingsStore>(window, Self::settings_changed),
|
||||
observe_buffer_font_size_adjustment(cx, |_, cx| cx.notify()),
|
||||
cx.observe_window_activation(window, |editor, window, cx| {
|
||||
let active = window.is_window_active();
|
||||
editor.blink_manager.update(cx, |blink_manager, cx| {
|
||||
- if active {
|
||||
- blink_manager.enable(cx);
|
||||
- } else {
|
||||
- blink_manager.disable(cx);
|
||||
- }
|
||||
+ // if active {
|
||||
+ // blink_manager.enable(cx);
|
||||
+ // } else {
|
||||
+ // blink_manager.disable(cx);
|
||||
+ // }
|
||||
});
|
||||
}),
|
||||
],
|
||||
@@ -18463,7 +18463,7 @@
|
||||
}
|
||||
|
||||
self.blink_manager.update(cx, |blink_manager, cx| {
|
||||
- blink_manager.enable(cx);
|
||||
+ // blink_manager.enable(cx);
|
||||
});
|
||||
self.show_cursor_names(window, cx);
|
||||
self.buffer.update(cx, |buffer, cx| {
|
||||
@@ -69,10 +69,9 @@ impl FetchTool {
|
||||
.to_str()
|
||||
.context("invalid Content-Type header")?;
|
||||
let content_type = match content_type {
|
||||
"text/html" => ContentType::Html,
|
||||
"text/plain" => ContentType::Plaintext,
|
||||
"text/html" | "application/xhtml+xml" => ContentType::Html,
|
||||
"application/json" => ContentType::Json,
|
||||
_ => ContentType::Html,
|
||||
_ => ContentType::Plaintext,
|
||||
};
|
||||
|
||||
match content_type {
|
||||
|
||||
@@ -6,8 +6,7 @@ use language_model::{LanguageModel, LanguageModelRequest, LanguageModelToolSchem
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fmt::Write as _;
|
||||
use std::sync::Arc;
|
||||
use std::{fmt::Write, sync::Arc};
|
||||
use ui::IconName;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
@@ -52,39 +51,113 @@ impl Tool for ProjectNotificationsTool {
|
||||
_window: Option<AnyWindowHandle>,
|
||||
cx: &mut App,
|
||||
) -> ToolResult {
|
||||
let mut stale_files = String::new();
|
||||
let mut notified_buffers = Vec::new();
|
||||
|
||||
for stale_file in action_log.read(cx).unnotified_stale_buffers(cx) {
|
||||
if let Some(file) = stale_file.read(cx).file() {
|
||||
writeln!(&mut stale_files, "- {}", file.path().display()).ok();
|
||||
notified_buffers.push(stale_file.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if !notified_buffers.is_empty() {
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.mark_buffers_as_notified(notified_buffers, cx);
|
||||
});
|
||||
}
|
||||
|
||||
let response = if stale_files.is_empty() {
|
||||
"No new notifications".to_string()
|
||||
} else {
|
||||
// NOTE: Changes to this prompt require a symmetric update in the LLM Worker
|
||||
const HEADER: &str = include_str!("./project_notifications_tool/prompt_header.txt");
|
||||
format!("{HEADER}{stale_files}").replace("\r\n", "\n")
|
||||
let Some(user_edits_diff) =
|
||||
action_log.update(cx, |log, cx| log.flush_unnotified_user_edits(cx))
|
||||
else {
|
||||
return result("No new notifications");
|
||||
};
|
||||
|
||||
Task::ready(Ok(response.into())).into()
|
||||
// NOTE: Changes to this prompt require a symmetric update in the LLM Worker
|
||||
const HEADER: &str = include_str!("./project_notifications_tool/prompt_header.txt");
|
||||
const MAX_BYTES: usize = 8000;
|
||||
let diff = fit_patch_to_size(&user_edits_diff, MAX_BYTES);
|
||||
result(&format!("{HEADER}\n\n```diff\n{diff}\n```\n").replace("\r\n", "\n"))
|
||||
}
|
||||
}
|
||||
|
||||
fn result(response: &str) -> ToolResult {
|
||||
Task::ready(Ok(response.to_string().into())).into()
|
||||
}
|
||||
|
||||
/// Make sure that the patch fits into the size limit (in bytes).
|
||||
/// Compress the patch by omitting some parts if needed.
|
||||
/// Unified diff format is assumed.
|
||||
fn fit_patch_to_size(patch: &str, max_size: usize) -> String {
|
||||
if patch.len() <= max_size {
|
||||
return patch.to_string();
|
||||
}
|
||||
|
||||
// Compression level 1: remove context lines in diff bodies, but
|
||||
// leave the counts and positions of inserted/deleted lines
|
||||
let mut current_size = patch.len();
|
||||
let mut file_patches = split_patch(&patch);
|
||||
file_patches.sort_by_key(|patch| patch.len());
|
||||
let compressed_patches = file_patches
|
||||
.iter()
|
||||
.rev()
|
||||
.map(|patch| {
|
||||
if current_size > max_size {
|
||||
let compressed = compress_patch(patch).unwrap_or_else(|_| patch.to_string());
|
||||
current_size -= patch.len() - compressed.len();
|
||||
compressed
|
||||
} else {
|
||||
patch.to_string()
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if current_size <= max_size {
|
||||
return compressed_patches.join("\n\n");
|
||||
}
|
||||
|
||||
// Compression level 2: list paths of the changed files only
|
||||
let filenames = file_patches
|
||||
.iter()
|
||||
.map(|patch| {
|
||||
let patch = diffy::Patch::from_str(patch).unwrap();
|
||||
let path = patch
|
||||
.modified()
|
||||
.and_then(|path| path.strip_prefix("b/"))
|
||||
.unwrap_or_default();
|
||||
format!("- {path}\n")
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
filenames.join("")
|
||||
}
|
||||
|
||||
/// Split a potentially multi-file patch into multiple single-file patches
|
||||
fn split_patch(patch: &str) -> Vec<String> {
|
||||
let mut result = Vec::new();
|
||||
let mut current_patch = String::new();
|
||||
|
||||
for line in patch.lines() {
|
||||
if line.starts_with("---") && !current_patch.is_empty() {
|
||||
result.push(current_patch.trim_end_matches('\n').into());
|
||||
current_patch = String::new();
|
||||
}
|
||||
current_patch.push_str(line);
|
||||
current_patch.push('\n');
|
||||
}
|
||||
|
||||
if !current_patch.is_empty() {
|
||||
result.push(current_patch.trim_end_matches('\n').into());
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
fn compress_patch(patch: &str) -> anyhow::Result<String> {
|
||||
let patch = diffy::Patch::from_str(patch)?;
|
||||
let mut out = String::new();
|
||||
|
||||
writeln!(out, "--- {}", patch.original().unwrap_or("a"))?;
|
||||
writeln!(out, "+++ {}", patch.modified().unwrap_or("b"))?;
|
||||
|
||||
for hunk in patch.hunks() {
|
||||
writeln!(out, "@@ -{} +{} @@", hunk.old_range(), hunk.new_range())?;
|
||||
writeln!(out, "[...skipped...]")?;
|
||||
}
|
||||
|
||||
Ok(out)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use assistant_tool::ToolResultContent;
|
||||
use gpui::{AppContext, TestAppContext};
|
||||
use indoc::indoc;
|
||||
use language_model::{LanguageModelRequest, fake_provider::FakeLanguageModelProvider};
|
||||
use project::{FakeFs, Project};
|
||||
use serde_json::json;
|
||||
@@ -123,6 +196,7 @@ mod tests {
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.buffer_read(buffer.clone(), cx);
|
||||
});
|
||||
cx.run_until_parked();
|
||||
|
||||
// Run the tool before any changes
|
||||
let tool = Arc::new(ProjectNotificationsTool);
|
||||
@@ -142,6 +216,7 @@ mod tests {
|
||||
cx,
|
||||
)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
|
||||
let response = result.output.await.unwrap();
|
||||
let response_text = match &response.content {
|
||||
@@ -158,6 +233,7 @@ mod tests {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit([(1..1, "\nChange!\n")], None, cx);
|
||||
});
|
||||
cx.run_until_parked();
|
||||
|
||||
// Run the tool again
|
||||
let result = cx.update(|cx| {
|
||||
@@ -171,6 +247,7 @@ mod tests {
|
||||
cx,
|
||||
)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
|
||||
// This time the buffer is stale, so the tool should return a notification
|
||||
let response = result.output.await.unwrap();
|
||||
@@ -179,10 +256,12 @@ mod tests {
|
||||
_ => panic!("Expected text response"),
|
||||
};
|
||||
|
||||
let expected_content = "[The following is an auto-generated notification; do not reply]\n\nThese files have changed since the last read:\n- code.rs\n";
|
||||
assert_eq!(
|
||||
response_text.as_str(),
|
||||
expected_content,
|
||||
assert!(
|
||||
response_text.contains("These files have changed"),
|
||||
"Tool should return the stale buffer notification"
|
||||
);
|
||||
assert!(
|
||||
response_text.contains("test/code.rs"),
|
||||
"Tool should return the stale buffer notification"
|
||||
);
|
||||
|
||||
@@ -198,6 +277,7 @@ mod tests {
|
||||
cx,
|
||||
)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
|
||||
let response = result.output.await.unwrap();
|
||||
let response_text = match &response.content {
|
||||
@@ -212,6 +292,61 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_patch_compression() {
|
||||
// Given a patch that doesn't fit into the size budget
|
||||
let patch = indoc! {"
|
||||
--- a/dir/test.txt
|
||||
+++ b/dir/test.txt
|
||||
@@ -1,3 +1,3 @@
|
||||
line 1
|
||||
-line 2
|
||||
+CHANGED
|
||||
line 3
|
||||
@@ -10,2 +10,2 @@
|
||||
line 10
|
||||
-line 11
|
||||
+line eleven
|
||||
|
||||
|
||||
--- a/dir/another.txt
|
||||
+++ b/dir/another.txt
|
||||
@@ -100,1 +1,1 @@
|
||||
-before
|
||||
+after
|
||||
"};
|
||||
|
||||
// When the size deficit can be compensated by dropping the body,
|
||||
// then the body should be trimmed for larger files first
|
||||
let limit = patch.len() - 10;
|
||||
let compressed = fit_patch_to_size(patch, limit);
|
||||
let expected = indoc! {"
|
||||
--- a/dir/test.txt
|
||||
+++ b/dir/test.txt
|
||||
@@ -1,3 +1,3 @@
|
||||
[...skipped...]
|
||||
@@ -10,2 +10,2 @@
|
||||
[...skipped...]
|
||||
|
||||
|
||||
--- a/dir/another.txt
|
||||
+++ b/dir/another.txt
|
||||
@@ -100,1 +1,1 @@
|
||||
-before
|
||||
+after"};
|
||||
assert_eq!(compressed, expected);
|
||||
|
||||
// When the size deficit is too large, then only file paths
|
||||
// should be returned
|
||||
let limit = 10;
|
||||
let compressed = fit_patch_to_size(patch, limit);
|
||||
let expected = indoc! {"
|
||||
- dir/another.txt
|
||||
- dir/test.txt
|
||||
"};
|
||||
assert_eq!(compressed, expected);
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
|
||||
@@ -18,7 +18,6 @@ use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
use std::sync::Arc;
|
||||
use ui::IconName;
|
||||
use util::markdown::MarkdownInlineCode;
|
||||
|
||||
/// If the model requests to read a file whose size exceeds this, then
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
@@ -78,11 +77,21 @@ impl Tool for ReadFileTool {
|
||||
fn ui_text(&self, input: &serde_json::Value) -> String {
|
||||
match serde_json::from_value::<ReadFileToolInput>(input.clone()) {
|
||||
Ok(input) => {
|
||||
let path = MarkdownInlineCode(&input.path);
|
||||
let path = &input.path;
|
||||
match (input.start_line, input.end_line) {
|
||||
(Some(start), None) => format!("Read file {path} (from line {start})"),
|
||||
(Some(start), Some(end)) => format!("Read file {path} (lines {start}-{end})"),
|
||||
_ => format!("Read file {path}"),
|
||||
(Some(start), Some(end)) => {
|
||||
format!(
|
||||
"[Read file `{}` (lines {}-{})](@selection:{}:({}-{}))",
|
||||
path, start, end, path, start, end
|
||||
)
|
||||
}
|
||||
(Some(start), None) => {
|
||||
format!(
|
||||
"[Read file `{}` (from line {})](@selection:{}:({}-{}))",
|
||||
path, start, path, start, start
|
||||
)
|
||||
}
|
||||
_ => format!("[Read file `{}`](@file:{})", path, path),
|
||||
}
|
||||
}
|
||||
Err(_) => "Read file".to_string(),
|
||||
@@ -276,7 +285,10 @@ impl Tool for ReadFileTool {
|
||||
|
||||
Using the line numbers in this outline, you can call this tool again
|
||||
while specifying the start_line and end_line fields to see the
|
||||
implementations of symbols in the outline."
|
||||
implementations of symbols in the outline.
|
||||
|
||||
Alternatively, you can fall back to the `grep` tool (if available)
|
||||
to search the file for specific content."
|
||||
}
|
||||
.into())
|
||||
}
|
||||
|
||||
@@ -1389,10 +1389,17 @@ impl Room {
|
||||
let sources = cx.screen_capture_sources();
|
||||
|
||||
cx.spawn(async move |this, cx| {
|
||||
let sources = sources.await??;
|
||||
let source = sources.first().context("no display found")?;
|
||||
let sources = sources
|
||||
.await
|
||||
.map_err(|error| error.into())
|
||||
.and_then(|sources| sources);
|
||||
let source =
|
||||
sources.and_then(|sources| sources.into_iter().next().context("no display found"));
|
||||
|
||||
let publication = participant.publish_screenshare_track(&**source, cx).await;
|
||||
let publication = match source {
|
||||
Ok(source) => participant.publish_screenshare_track(&*source, cx).await,
|
||||
Err(error) => Err(error),
|
||||
};
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
let live_kit = this
|
||||
|
||||
@@ -315,19 +315,19 @@ fn main() -> Result<()> {
|
||||
});
|
||||
|
||||
let stdin_pipe_handle: Option<JoinHandle<anyhow::Result<()>>> =
|
||||
stdin_tmp_file.map(|tmp_file| {
|
||||
stdin_tmp_file.map(|mut tmp_file| {
|
||||
thread::spawn(move || {
|
||||
let stdin = std::io::stdin().lock();
|
||||
if io::IsTerminal::is_terminal(&stdin) {
|
||||
return Ok(());
|
||||
let mut stdin = std::io::stdin().lock();
|
||||
if !io::IsTerminal::is_terminal(&stdin) {
|
||||
io::copy(&mut stdin, &mut tmp_file)?;
|
||||
}
|
||||
return pipe_to_tmp(stdin, tmp_file);
|
||||
Ok(())
|
||||
})
|
||||
});
|
||||
|
||||
let anonymous_fd_pipe_handles: Vec<JoinHandle<anyhow::Result<()>>> = anonymous_fd_tmp_files
|
||||
let anonymous_fd_pipe_handles: Vec<_> = anonymous_fd_tmp_files
|
||||
.into_iter()
|
||||
.map(|(file, tmp_file)| thread::spawn(move || pipe_to_tmp(file, tmp_file)))
|
||||
.map(|(mut file, mut tmp_file)| thread::spawn(move || io::copy(&mut file, &mut tmp_file)))
|
||||
.collect();
|
||||
|
||||
if args.foreground {
|
||||
@@ -349,22 +349,6 @@ fn main() -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn pipe_to_tmp(mut src: impl io::Read, mut dest: fs::File) -> Result<()> {
|
||||
let mut buffer = [0; 8 * 1024];
|
||||
loop {
|
||||
let bytes_read = match src.read(&mut buffer) {
|
||||
Err(err) if err.kind() == io::ErrorKind::Interrupted => continue,
|
||||
res => res?,
|
||||
};
|
||||
if bytes_read == 0 {
|
||||
break;
|
||||
}
|
||||
io::Write::write_all(&mut dest, &buffer[..bytes_read])?;
|
||||
}
|
||||
io::Write::flush(&mut dest)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn anonymous_fd(path: &str) -> Option<fs::File> {
|
||||
#[cfg(target_os = "linux")]
|
||||
{
|
||||
|
||||
@@ -94,6 +94,7 @@ context_server.workspace = true
|
||||
ctor.workspace = true
|
||||
dap = { workspace = true, features = ["test-support"] }
|
||||
dap_adapters = { workspace = true, features = ["test-support"] }
|
||||
dap-types.workspace = true
|
||||
debugger_ui = { workspace = true, features = ["test-support"] }
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
extension.workspace = true
|
||||
@@ -126,6 +127,7 @@ sea-orm = { version = "1.1.0-rc.1", features = ["sqlx-sqlite"] }
|
||||
serde_json.workspace = true
|
||||
session = { workspace = true, features = ["test-support"] }
|
||||
settings = { workspace = true, features = ["test-support"] }
|
||||
smol.workspace = true
|
||||
sqlx = { version = "0.8", features = ["sqlite"] }
|
||||
task.workspace = true
|
||||
theme.workspace = true
|
||||
|
||||
@@ -1,12 +1,33 @@
|
||||
{
|
||||
"admins": [
|
||||
"nathansobo",
|
||||
"as-cii",
|
||||
"maxbrunsfeld",
|
||||
"iamnbutler",
|
||||
"mikayla-maki",
|
||||
"as-cii",
|
||||
"JosephTLyons",
|
||||
"rgbkrk"
|
||||
"maxdeviant",
|
||||
"SomeoneToIgnore",
|
||||
"mikayla-maki",
|
||||
"agu-z",
|
||||
"osiewicz",
|
||||
"ConradIrwin",
|
||||
"benbrandt",
|
||||
"bennetbo",
|
||||
"smitbarmase",
|
||||
"notpeter",
|
||||
"rgbkrk",
|
||||
"JunkuiZhang",
|
||||
"Anthony-Eid",
|
||||
"rtfeldman",
|
||||
"danilo-leal",
|
||||
"MrSubidubi",
|
||||
"cole-miller",
|
||||
"osyvokon",
|
||||
"probably-neb",
|
||||
"mgsloan",
|
||||
"P1n3appl3",
|
||||
"mslzed",
|
||||
"franciskafyi",
|
||||
"katie-z-geer"
|
||||
],
|
||||
"channels": ["zed"]
|
||||
}
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
use anyhow::{Context as _, bail};
|
||||
use axum::routing::put;
|
||||
use axum::{
|
||||
Extension, Json, Router,
|
||||
extract::{self, Query},
|
||||
routing::{get, post},
|
||||
};
|
||||
use chrono::{DateTime, SecondsFormat, Utc};
|
||||
use collections::HashSet;
|
||||
use collections::{HashMap, HashSet};
|
||||
use reqwest::StatusCode;
|
||||
use sea_orm::ActiveValue;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -21,13 +22,14 @@ use stripe::{
|
||||
PaymentMethod, Subscription, SubscriptionId, SubscriptionStatus,
|
||||
};
|
||||
use util::{ResultExt, maybe};
|
||||
use zed_llm_client::LanguageModelProvider;
|
||||
|
||||
use crate::api::events::SnowflakeRow;
|
||||
use crate::db::billing_subscription::{
|
||||
StripeCancellationReason, StripeSubscriptionStatus, SubscriptionKind,
|
||||
};
|
||||
use crate::llm::db::subscription_usage_meter::CompletionMode;
|
||||
use crate::llm::{AGENT_EXTENDED_TRIAL_FEATURE_FLAG, DEFAULT_MAX_MONTHLY_SPEND};
|
||||
use crate::llm::AGENT_EXTENDED_TRIAL_FEATURE_FLAG;
|
||||
use crate::llm::db::subscription_usage_meter::{self, CompletionMode};
|
||||
use crate::rpc::{ResultExt as _, Server};
|
||||
use crate::stripe_client::{
|
||||
StripeCancellationDetailsReason, StripeClient, StripeCustomerId, StripeSubscription,
|
||||
@@ -46,14 +48,8 @@ use crate::{
|
||||
|
||||
pub fn router() -> Router {
|
||||
Router::new()
|
||||
.route(
|
||||
"/billing/preferences",
|
||||
get(get_billing_preferences).put(update_billing_preferences),
|
||||
)
|
||||
.route(
|
||||
"/billing/subscriptions",
|
||||
get(list_billing_subscriptions).post(create_billing_subscription),
|
||||
)
|
||||
.route("/billing/preferences", put(update_billing_preferences))
|
||||
.route("/billing/subscriptions", post(create_billing_subscription))
|
||||
.route(
|
||||
"/billing/subscriptions/manage",
|
||||
post(manage_billing_subscription),
|
||||
@@ -65,11 +61,6 @@ pub fn router() -> Router {
|
||||
.route("/billing/usage", get(get_current_usage))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct GetBillingPreferencesParams {
|
||||
github_user_id: i32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct BillingPreferencesResponse {
|
||||
trial_started_at: Option<String>,
|
||||
@@ -78,43 +69,6 @@ struct BillingPreferencesResponse {
|
||||
model_request_overages_spend_limit_in_cents: i32,
|
||||
}
|
||||
|
||||
async fn get_billing_preferences(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
Query(params): Query<GetBillingPreferencesParams>,
|
||||
) -> Result<Json<BillingPreferencesResponse>> {
|
||||
let user = app
|
||||
.db
|
||||
.get_user_by_github_user_id(params.github_user_id)
|
||||
.await?
|
||||
.context("user not found")?;
|
||||
|
||||
let billing_customer = app.db.get_billing_customer_by_user_id(user.id).await?;
|
||||
let preferences = app.db.get_billing_preferences(user.id).await?;
|
||||
|
||||
Ok(Json(BillingPreferencesResponse {
|
||||
trial_started_at: billing_customer
|
||||
.and_then(|billing_customer| billing_customer.trial_started_at)
|
||||
.map(|trial_started_at| {
|
||||
trial_started_at
|
||||
.and_utc()
|
||||
.to_rfc3339_opts(SecondsFormat::Millis, true)
|
||||
}),
|
||||
max_monthly_llm_usage_spending_in_cents: preferences
|
||||
.as_ref()
|
||||
.map_or(DEFAULT_MAX_MONTHLY_SPEND.0 as i32, |preferences| {
|
||||
preferences.max_monthly_llm_usage_spending_in_cents
|
||||
}),
|
||||
model_request_overages_enabled: preferences.as_ref().map_or(false, |preferences| {
|
||||
preferences.model_request_overages_enabled
|
||||
}),
|
||||
model_request_overages_spend_limit_in_cents: preferences
|
||||
.as_ref()
|
||||
.map_or(0, |preferences| {
|
||||
preferences.model_request_overages_spend_limit_in_cents
|
||||
}),
|
||||
}))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct UpdateBillingPreferencesBody {
|
||||
github_user_id: i32,
|
||||
@@ -209,90 +163,6 @@ async fn update_billing_preferences(
|
||||
}))
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct ListBillingSubscriptionsParams {
|
||||
github_user_id: i32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct BillingSubscriptionJson {
|
||||
id: BillingSubscriptionId,
|
||||
name: String,
|
||||
status: StripeSubscriptionStatus,
|
||||
period: Option<BillingSubscriptionPeriodJson>,
|
||||
trial_end_at: Option<String>,
|
||||
cancel_at: Option<String>,
|
||||
/// Whether this subscription can be canceled.
|
||||
is_cancelable: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct BillingSubscriptionPeriodJson {
|
||||
start_at: String,
|
||||
end_at: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct ListBillingSubscriptionsResponse {
|
||||
subscriptions: Vec<BillingSubscriptionJson>,
|
||||
}
|
||||
|
||||
async fn list_billing_subscriptions(
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
Query(params): Query<ListBillingSubscriptionsParams>,
|
||||
) -> Result<Json<ListBillingSubscriptionsResponse>> {
|
||||
let user = app
|
||||
.db
|
||||
.get_user_by_github_user_id(params.github_user_id)
|
||||
.await?
|
||||
.context("user not found")?;
|
||||
|
||||
let subscriptions = app.db.get_billing_subscriptions(user.id).await?;
|
||||
|
||||
Ok(Json(ListBillingSubscriptionsResponse {
|
||||
subscriptions: subscriptions
|
||||
.into_iter()
|
||||
.map(|subscription| BillingSubscriptionJson {
|
||||
id: subscription.id,
|
||||
name: match subscription.kind {
|
||||
Some(SubscriptionKind::ZedPro) => "Zed Pro".to_string(),
|
||||
Some(SubscriptionKind::ZedProTrial) => "Zed Pro (Trial)".to_string(),
|
||||
Some(SubscriptionKind::ZedFree) => "Zed Free".to_string(),
|
||||
None => "Zed LLM Usage".to_string(),
|
||||
},
|
||||
status: subscription.stripe_subscription_status,
|
||||
period: maybe!({
|
||||
let start_at = subscription.current_period_start_at()?;
|
||||
let end_at = subscription.current_period_end_at()?;
|
||||
|
||||
Some(BillingSubscriptionPeriodJson {
|
||||
start_at: start_at.to_rfc3339_opts(SecondsFormat::Millis, true),
|
||||
end_at: end_at.to_rfc3339_opts(SecondsFormat::Millis, true),
|
||||
})
|
||||
}),
|
||||
trial_end_at: if subscription.kind == Some(SubscriptionKind::ZedProTrial) {
|
||||
maybe!({
|
||||
let end_at = subscription.stripe_current_period_end?;
|
||||
let end_at = DateTime::from_timestamp(end_at, 0)?;
|
||||
|
||||
Some(end_at.to_rfc3339_opts(SecondsFormat::Millis, true))
|
||||
})
|
||||
} else {
|
||||
None
|
||||
},
|
||||
cancel_at: subscription.stripe_cancel_at.map(|cancel_at| {
|
||||
cancel_at
|
||||
.and_utc()
|
||||
.to_rfc3339_opts(SecondsFormat::Millis, true)
|
||||
}),
|
||||
is_cancelable: subscription.kind != Some(SubscriptionKind::ZedFree)
|
||||
&& subscription.stripe_subscription_status.is_cancelable()
|
||||
&& subscription.stripe_cancel_at.is_none(),
|
||||
})
|
||||
.collect(),
|
||||
}))
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Copy, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
enum ProductCode {
|
||||
@@ -1416,18 +1286,21 @@ async fn sync_model_request_usage_with_stripe(
|
||||
let usage_meters = llm_db
|
||||
.get_current_subscription_usage_meters(Utc::now())
|
||||
.await?;
|
||||
let usage_meters = usage_meters
|
||||
.into_iter()
|
||||
.filter(|(_, usage)| !staff_user_ids.contains(&usage.user_id))
|
||||
.collect::<Vec<_>>();
|
||||
let user_ids = usage_meters
|
||||
.iter()
|
||||
.map(|(_, usage)| usage.user_id)
|
||||
.collect::<HashSet<UserId>>();
|
||||
let billing_subscriptions = app
|
||||
.db
|
||||
.get_active_zed_pro_billing_subscriptions(user_ids)
|
||||
.await?;
|
||||
let mut usage_meters_by_user_id =
|
||||
HashMap::<UserId, Vec<subscription_usage_meter::Model>>::default();
|
||||
for (usage_meter, usage) in usage_meters {
|
||||
let meters = usage_meters_by_user_id.entry(usage.user_id).or_default();
|
||||
meters.push(usage_meter);
|
||||
}
|
||||
|
||||
log::info!("Stripe usage sync: Retrieving Zed Pro subscriptions");
|
||||
let get_zed_pro_subscriptions_started_at = Utc::now();
|
||||
let billing_subscriptions = app.db.get_active_zed_pro_billing_subscriptions().await?;
|
||||
log::info!(
|
||||
"Stripe usage sync: Retrieved {} Zed Pro subscriptions in {}",
|
||||
billing_subscriptions.len(),
|
||||
Utc::now() - get_zed_pro_subscriptions_started_at
|
||||
);
|
||||
|
||||
let claude_sonnet_4 = stripe_billing
|
||||
.find_price_by_lookup_key("claude-sonnet-4-requests")
|
||||
@@ -1451,59 +1324,90 @@ async fn sync_model_request_usage_with_stripe(
|
||||
.find_price_by_lookup_key("claude-3-7-sonnet-requests-max")
|
||||
.await?;
|
||||
|
||||
let usage_meter_count = usage_meters.len();
|
||||
let model_mode_combinations = [
|
||||
("claude-opus-4", CompletionMode::Max),
|
||||
("claude-opus-4", CompletionMode::Normal),
|
||||
("claude-sonnet-4", CompletionMode::Max),
|
||||
("claude-sonnet-4", CompletionMode::Normal),
|
||||
("claude-3-7-sonnet", CompletionMode::Max),
|
||||
("claude-3-7-sonnet", CompletionMode::Normal),
|
||||
("claude-3-5-sonnet", CompletionMode::Normal),
|
||||
];
|
||||
|
||||
log::info!("Stripe usage sync: Syncing {usage_meter_count} usage meters");
|
||||
let billing_subscription_count = billing_subscriptions.len();
|
||||
|
||||
for (usage_meter, usage) in usage_meters {
|
||||
log::info!("Stripe usage sync: Syncing {billing_subscription_count} Zed Pro subscriptions");
|
||||
|
||||
for (user_id, (billing_customer, billing_subscription)) in billing_subscriptions {
|
||||
maybe!(async {
|
||||
let Some((billing_customer, billing_subscription)) =
|
||||
billing_subscriptions.get(&usage.user_id)
|
||||
else {
|
||||
bail!(
|
||||
"Attempted to sync usage meter for user who is not a Stripe customer: {}",
|
||||
usage.user_id
|
||||
);
|
||||
};
|
||||
if staff_user_ids.contains(&user_id) {
|
||||
return anyhow::Ok(());
|
||||
}
|
||||
|
||||
let stripe_customer_id =
|
||||
StripeCustomerId(billing_customer.stripe_customer_id.clone().into());
|
||||
let stripe_subscription_id =
|
||||
StripeSubscriptionId(billing_subscription.stripe_subscription_id.clone().into());
|
||||
|
||||
let model = llm_db.model_by_id(usage_meter.model_id)?;
|
||||
let usage_meters = usage_meters_by_user_id.get(&user_id);
|
||||
|
||||
let (price, meter_event_name) = match model.name.as_str() {
|
||||
"claude-opus-4" => match usage_meter.mode {
|
||||
CompletionMode::Normal => (&claude_opus_4, "claude_opus_4/requests"),
|
||||
CompletionMode::Max => (&claude_opus_4_max, "claude_opus_4/requests/max"),
|
||||
},
|
||||
"claude-sonnet-4" => match usage_meter.mode {
|
||||
CompletionMode::Normal => (&claude_sonnet_4, "claude_sonnet_4/requests"),
|
||||
CompletionMode::Max => (&claude_sonnet_4_max, "claude_sonnet_4/requests/max"),
|
||||
},
|
||||
"claude-3-5-sonnet" => (&claude_3_5_sonnet, "claude_3_5_sonnet/requests"),
|
||||
"claude-3-7-sonnet" => match usage_meter.mode {
|
||||
CompletionMode::Normal => (&claude_3_7_sonnet, "claude_3_7_sonnet/requests"),
|
||||
CompletionMode::Max => {
|
||||
(&claude_3_7_sonnet_max, "claude_3_7_sonnet/requests/max")
|
||||
for (model, mode) in &model_mode_combinations {
|
||||
let Ok(model) =
|
||||
llm_db.model(LanguageModelProvider::Anthropic, model)
|
||||
else {
|
||||
log::warn!("Failed to load model for user {user_id}: {model}");
|
||||
continue;
|
||||
};
|
||||
|
||||
let (price, meter_event_name) = match model.name.as_str() {
|
||||
"claude-opus-4" => match mode {
|
||||
CompletionMode::Normal => (&claude_opus_4, "claude_opus_4/requests"),
|
||||
CompletionMode::Max => (&claude_opus_4_max, "claude_opus_4/requests/max"),
|
||||
},
|
||||
"claude-sonnet-4" => match mode {
|
||||
CompletionMode::Normal => (&claude_sonnet_4, "claude_sonnet_4/requests"),
|
||||
CompletionMode::Max => {
|
||||
(&claude_sonnet_4_max, "claude_sonnet_4/requests/max")
|
||||
}
|
||||
},
|
||||
"claude-3-5-sonnet" => (&claude_3_5_sonnet, "claude_3_5_sonnet/requests"),
|
||||
"claude-3-7-sonnet" => match mode {
|
||||
CompletionMode::Normal => {
|
||||
(&claude_3_7_sonnet, "claude_3_7_sonnet/requests")
|
||||
}
|
||||
CompletionMode::Max => {
|
||||
(&claude_3_7_sonnet_max, "claude_3_7_sonnet/requests/max")
|
||||
}
|
||||
},
|
||||
model_name => {
|
||||
bail!("Attempted to sync usage meter for unsupported model: {model_name:?}")
|
||||
}
|
||||
},
|
||||
model_name => {
|
||||
bail!("Attempted to sync usage meter for unsupported model: {model_name:?}")
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
stripe_billing
|
||||
.subscribe_to_price(&stripe_subscription_id, price)
|
||||
.await?;
|
||||
stripe_billing
|
||||
.bill_model_request_usage(
|
||||
&stripe_customer_id,
|
||||
meter_event_name,
|
||||
usage_meter.requests,
|
||||
)
|
||||
.await?;
|
||||
let model_requests = usage_meters
|
||||
.and_then(|usage_meters| {
|
||||
usage_meters
|
||||
.iter()
|
||||
.find(|meter| meter.model_id == model.id && meter.mode == *mode)
|
||||
})
|
||||
.map(|usage_meter| usage_meter.requests)
|
||||
.unwrap_or(0);
|
||||
|
||||
if model_requests > 0 {
|
||||
stripe_billing
|
||||
.subscribe_to_price(&stripe_subscription_id, price)
|
||||
.await?;
|
||||
}
|
||||
|
||||
stripe_billing
|
||||
.bill_model_request_usage(&stripe_customer_id, meter_event_name, model_requests)
|
||||
.await
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Failed to bill model request usage of {model_requests} for {stripe_customer_id}: {meter_event_name}",
|
||||
)
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})
|
||||
@@ -1512,7 +1416,7 @@ async fn sync_model_request_usage_with_stripe(
|
||||
}
|
||||
|
||||
log::info!(
|
||||
"Stripe usage sync: Synced {usage_meter_count} usage meters in {:?}",
|
||||
"Stripe usage sync: Synced {billing_subscription_count} Zed Pro subscriptions in {}",
|
||||
Utc::now() - started_at
|
||||
);
|
||||
|
||||
|
||||
@@ -1,83 +0,0 @@
|
||||
use serde::Serialize;
|
||||
|
||||
/// A number of cents.
|
||||
#[derive(
|
||||
Debug,
|
||||
PartialEq,
|
||||
Eq,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
Hash,
|
||||
Clone,
|
||||
Copy,
|
||||
derive_more::Add,
|
||||
derive_more::AddAssign,
|
||||
derive_more::Sub,
|
||||
derive_more::SubAssign,
|
||||
Serialize,
|
||||
)]
|
||||
pub struct Cents(pub u32);
|
||||
|
||||
impl Cents {
|
||||
pub const ZERO: Self = Self(0);
|
||||
|
||||
pub const fn new(cents: u32) -> Self {
|
||||
Self(cents)
|
||||
}
|
||||
|
||||
pub const fn from_dollars(dollars: u32) -> Self {
|
||||
Self(dollars * 100)
|
||||
}
|
||||
|
||||
pub fn saturating_sub(self, other: Cents) -> Self {
|
||||
Self(self.0.saturating_sub(other.0))
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_cents_new() {
|
||||
assert_eq!(Cents::new(50), Cents(50));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cents_from_dollars() {
|
||||
assert_eq!(Cents::from_dollars(1), Cents(100));
|
||||
assert_eq!(Cents::from_dollars(5), Cents(500));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cents_zero() {
|
||||
assert_eq!(Cents::ZERO, Cents(0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cents_add() {
|
||||
assert_eq!(Cents(50) + Cents(30), Cents(80));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cents_add_assign() {
|
||||
let mut cents = Cents(50);
|
||||
cents += Cents(30);
|
||||
assert_eq!(cents, Cents(80));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cents_saturating_sub() {
|
||||
assert_eq!(Cents(50).saturating_sub(Cents(30)), Cents(20));
|
||||
assert_eq!(Cents(30).saturating_sub(Cents(50)), Cents(0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_cents_ordering() {
|
||||
assert!(Cents(50) > Cents(30));
|
||||
assert!(Cents(30) < Cents(50));
|
||||
assert_eq!(Cents(50), Cents(50));
|
||||
}
|
||||
}
|
||||
@@ -199,6 +199,33 @@ impl Database {
|
||||
|
||||
pub async fn get_active_zed_pro_billing_subscriptions(
|
||||
&self,
|
||||
) -> Result<HashMap<UserId, (billing_customer::Model, billing_subscription::Model)>> {
|
||||
self.transaction(|tx| async move {
|
||||
let mut rows = billing_subscription::Entity::find()
|
||||
.inner_join(billing_customer::Entity)
|
||||
.select_also(billing_customer::Entity)
|
||||
.filter(
|
||||
billing_subscription::Column::StripeSubscriptionStatus
|
||||
.eq(StripeSubscriptionStatus::Active),
|
||||
)
|
||||
.filter(billing_subscription::Column::Kind.eq(SubscriptionKind::ZedPro))
|
||||
.order_by_asc(billing_subscription::Column::Id)
|
||||
.stream(&*tx)
|
||||
.await?;
|
||||
|
||||
let mut subscriptions = HashMap::default();
|
||||
while let Some(row) = rows.next().await {
|
||||
if let (subscription, Some(customer)) = row? {
|
||||
subscriptions.insert(customer.user_id, (customer, subscription));
|
||||
}
|
||||
}
|
||||
Ok(subscriptions)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn get_active_zed_pro_billing_subscriptions_for_users(
|
||||
&self,
|
||||
user_ids: HashSet<UserId>,
|
||||
) -> Result<HashMap<UserId, (billing_customer::Model, billing_subscription::Model)>> {
|
||||
self.transaction(|tx| {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
pub mod api;
|
||||
pub mod auth;
|
||||
mod cents;
|
||||
pub mod db;
|
||||
pub mod env;
|
||||
pub mod executor;
|
||||
@@ -21,7 +20,6 @@ use axum::{
|
||||
http::{HeaderMap, StatusCode},
|
||||
response::IntoResponse,
|
||||
};
|
||||
pub use cents::*;
|
||||
use db::{ChannelId, Database};
|
||||
use executor::Executor;
|
||||
use llm::db::LlmDatabase;
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
pub mod db;
|
||||
mod token;
|
||||
|
||||
use crate::Cents;
|
||||
|
||||
pub use token::*;
|
||||
|
||||
pub const AGENT_EXTENDED_TRIAL_FEATURE_FLAG: &str = "agent-extended-trial";
|
||||
@@ -12,9 +10,3 @@ pub const BYPASS_ACCOUNT_AGE_CHECK_FEATURE_FLAG: &str = "bypass-account-age-chec
|
||||
|
||||
/// The minimum account age an account must have in order to use the LLM service.
|
||||
pub const MIN_ACCOUNT_AGE_FOR_LLM_USE: chrono::Duration = chrono::Duration::days(30);
|
||||
|
||||
/// The default value to use for maximum spend per month if the user did not
|
||||
/// explicitly set a maximum spend.
|
||||
///
|
||||
/// Used to prevent surprise bills.
|
||||
pub const DEFAULT_MAX_MONTHLY_SPEND: Cents = Cents::from_dollars(10);
|
||||
|
||||
@@ -2836,62 +2836,117 @@ async fn make_update_user_plan_message(
|
||||
account_too_young: Some(account_too_young),
|
||||
has_overdue_invoices: billing_customer
|
||||
.map(|billing_customer| billing_customer.has_overdue_invoices),
|
||||
usage: usage.map(|usage| {
|
||||
let plan = match plan {
|
||||
proto::Plan::Free => zed_llm_client::Plan::ZedFree,
|
||||
proto::Plan::ZedPro => zed_llm_client::Plan::ZedPro,
|
||||
proto::Plan::ZedProTrial => zed_llm_client::Plan::ZedProTrial,
|
||||
};
|
||||
|
||||
let model_requests_limit = match plan.model_requests_limit() {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
let limit = if plan == zed_llm_client::Plan::ZedProTrial
|
||||
&& feature_flags
|
||||
.iter()
|
||||
.any(|flag| flag == AGENT_EXTENDED_TRIAL_FEATURE_FLAG)
|
||||
{
|
||||
1_000
|
||||
} else {
|
||||
limit
|
||||
};
|
||||
|
||||
zed_llm_client::UsageLimit::Limited(limit)
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => zed_llm_client::UsageLimit::Unlimited,
|
||||
};
|
||||
|
||||
proto::SubscriptionUsage {
|
||||
model_requests_usage_amount: usage.model_requests as u32,
|
||||
model_requests_usage_limit: Some(proto::UsageLimit {
|
||||
variant: Some(match model_requests_limit {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
proto::usage_limit::Variant::Limited(proto::usage_limit::Limited {
|
||||
limit: limit as u32,
|
||||
})
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => {
|
||||
proto::usage_limit::Variant::Unlimited(proto::usage_limit::Unlimited {})
|
||||
}
|
||||
}),
|
||||
}),
|
||||
edit_predictions_usage_amount: usage.edit_predictions as u32,
|
||||
edit_predictions_usage_limit: Some(proto::UsageLimit {
|
||||
variant: Some(match plan.edit_predictions_limit() {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
proto::usage_limit::Variant::Limited(proto::usage_limit::Limited {
|
||||
limit: limit as u32,
|
||||
})
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => {
|
||||
proto::usage_limit::Variant::Unlimited(proto::usage_limit::Unlimited {})
|
||||
}
|
||||
}),
|
||||
}),
|
||||
}
|
||||
}),
|
||||
usage: Some(
|
||||
usage
|
||||
.map(|usage| subscription_usage_to_proto(plan, usage, &feature_flags))
|
||||
.unwrap_or_else(|| make_default_subscription_usage(plan, &feature_flags)),
|
||||
),
|
||||
})
|
||||
}
|
||||
|
||||
fn model_requests_limit(
|
||||
plan: zed_llm_client::Plan,
|
||||
feature_flags: &Vec<String>,
|
||||
) -> zed_llm_client::UsageLimit {
|
||||
match plan.model_requests_limit() {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
let limit = if plan == zed_llm_client::Plan::ZedProTrial
|
||||
&& feature_flags
|
||||
.iter()
|
||||
.any(|flag| flag == AGENT_EXTENDED_TRIAL_FEATURE_FLAG)
|
||||
{
|
||||
1_000
|
||||
} else {
|
||||
limit
|
||||
};
|
||||
|
||||
zed_llm_client::UsageLimit::Limited(limit)
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => zed_llm_client::UsageLimit::Unlimited,
|
||||
}
|
||||
}
|
||||
|
||||
fn subscription_usage_to_proto(
|
||||
plan: proto::Plan,
|
||||
usage: crate::llm::db::subscription_usage::Model,
|
||||
feature_flags: &Vec<String>,
|
||||
) -> proto::SubscriptionUsage {
|
||||
let plan = match plan {
|
||||
proto::Plan::Free => zed_llm_client::Plan::ZedFree,
|
||||
proto::Plan::ZedPro => zed_llm_client::Plan::ZedPro,
|
||||
proto::Plan::ZedProTrial => zed_llm_client::Plan::ZedProTrial,
|
||||
};
|
||||
|
||||
proto::SubscriptionUsage {
|
||||
model_requests_usage_amount: usage.model_requests as u32,
|
||||
model_requests_usage_limit: Some(proto::UsageLimit {
|
||||
variant: Some(match model_requests_limit(plan, feature_flags) {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
proto::usage_limit::Variant::Limited(proto::usage_limit::Limited {
|
||||
limit: limit as u32,
|
||||
})
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => {
|
||||
proto::usage_limit::Variant::Unlimited(proto::usage_limit::Unlimited {})
|
||||
}
|
||||
}),
|
||||
}),
|
||||
edit_predictions_usage_amount: usage.edit_predictions as u32,
|
||||
edit_predictions_usage_limit: Some(proto::UsageLimit {
|
||||
variant: Some(match plan.edit_predictions_limit() {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
proto::usage_limit::Variant::Limited(proto::usage_limit::Limited {
|
||||
limit: limit as u32,
|
||||
})
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => {
|
||||
proto::usage_limit::Variant::Unlimited(proto::usage_limit::Unlimited {})
|
||||
}
|
||||
}),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn make_default_subscription_usage(
|
||||
plan: proto::Plan,
|
||||
feature_flags: &Vec<String>,
|
||||
) -> proto::SubscriptionUsage {
|
||||
let plan = match plan {
|
||||
proto::Plan::Free => zed_llm_client::Plan::ZedFree,
|
||||
proto::Plan::ZedPro => zed_llm_client::Plan::ZedPro,
|
||||
proto::Plan::ZedProTrial => zed_llm_client::Plan::ZedProTrial,
|
||||
};
|
||||
|
||||
proto::SubscriptionUsage {
|
||||
model_requests_usage_amount: 0,
|
||||
model_requests_usage_limit: Some(proto::UsageLimit {
|
||||
variant: Some(match model_requests_limit(plan, feature_flags) {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
proto::usage_limit::Variant::Limited(proto::usage_limit::Limited {
|
||||
limit: limit as u32,
|
||||
})
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => {
|
||||
proto::usage_limit::Variant::Unlimited(proto::usage_limit::Unlimited {})
|
||||
}
|
||||
}),
|
||||
}),
|
||||
edit_predictions_usage_amount: 0,
|
||||
edit_predictions_usage_limit: Some(proto::UsageLimit {
|
||||
variant: Some(match plan.edit_predictions_limit() {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
proto::usage_limit::Variant::Limited(proto::usage_limit::Limited {
|
||||
limit: limit as u32,
|
||||
})
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => {
|
||||
proto::usage_limit::Variant::Unlimited(proto::usage_limit::Unlimited {})
|
||||
}
|
||||
}),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
async fn update_user_plan(session: &Session) -> Result<()> {
|
||||
let db = session.db().await;
|
||||
|
||||
|
||||
@@ -19,8 +19,8 @@ use crate::stripe_client::{
|
||||
StripeCustomerId, StripeCustomerUpdate, StripeCustomerUpdateAddress, StripeCustomerUpdateName,
|
||||
StripeMeter, StripePrice, StripePriceId, StripeSubscription, StripeSubscriptionId,
|
||||
StripeSubscriptionTrialSettings, StripeSubscriptionTrialSettingsEndBehavior,
|
||||
StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, UpdateSubscriptionItems,
|
||||
UpdateSubscriptionParams,
|
||||
StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, StripeTaxIdCollection,
|
||||
UpdateSubscriptionItems, UpdateSubscriptionParams,
|
||||
};
|
||||
|
||||
pub struct StripeBilling {
|
||||
@@ -252,6 +252,7 @@ impl StripeBilling {
|
||||
name: Some(StripeCustomerUpdateName::Auto),
|
||||
shipping: None,
|
||||
});
|
||||
params.tax_id_collection = Some(StripeTaxIdCollection { enabled: true });
|
||||
|
||||
let session = self.client.create_checkout_session(params).await?;
|
||||
Ok(session.url.context("no checkout session URL")?)
|
||||
@@ -311,6 +312,7 @@ impl StripeBilling {
|
||||
name: Some(StripeCustomerUpdateName::Auto),
|
||||
shipping: None,
|
||||
});
|
||||
params.tax_id_collection = Some(StripeTaxIdCollection { enabled: true });
|
||||
|
||||
let session = self.client.create_checkout_session(params).await?;
|
||||
Ok(session.url.context("no checkout session URL")?)
|
||||
|
||||
@@ -190,6 +190,7 @@ pub struct StripeCreateCheckoutSessionParams<'a> {
|
||||
pub success_url: Option<&'a str>,
|
||||
pub billing_address_collection: Option<StripeBillingAddressCollection>,
|
||||
pub customer_update: Option<StripeCustomerUpdate>,
|
||||
pub tax_id_collection: Option<StripeTaxIdCollection>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
@@ -218,6 +219,11 @@ pub struct StripeCreateCheckoutSessionSubscriptionData {
|
||||
pub trial_settings: Option<StripeSubscriptionTrialSettings>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct StripeTaxIdCollection {
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct StripeCheckoutSession {
|
||||
pub url: Option<String>,
|
||||
|
||||
@@ -14,8 +14,8 @@ use crate::stripe_client::{
|
||||
StripeCreateCheckoutSessionSubscriptionData, StripeCreateMeterEventParams,
|
||||
StripeCreateSubscriptionParams, StripeCustomer, StripeCustomerId, StripeCustomerUpdate,
|
||||
StripeMeter, StripeMeterId, StripePrice, StripePriceId, StripeSubscription,
|
||||
StripeSubscriptionId, StripeSubscriptionItem, StripeSubscriptionItemId, UpdateCustomerParams,
|
||||
UpdateSubscriptionParams,
|
||||
StripeSubscriptionId, StripeSubscriptionItem, StripeSubscriptionItemId, StripeTaxIdCollection,
|
||||
UpdateCustomerParams, UpdateSubscriptionParams,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -38,6 +38,7 @@ pub struct StripeCreateCheckoutSessionCall {
|
||||
pub success_url: Option<String>,
|
||||
pub billing_address_collection: Option<StripeBillingAddressCollection>,
|
||||
pub customer_update: Option<StripeCustomerUpdate>,
|
||||
pub tax_id_collection: Option<StripeTaxIdCollection>,
|
||||
}
|
||||
|
||||
pub struct FakeStripeClient {
|
||||
@@ -236,6 +237,7 @@ impl StripeClient for FakeStripeClient {
|
||||
success_url: params.success_url.map(|url| url.to_string()),
|
||||
billing_address_collection: params.billing_address_collection,
|
||||
customer_update: params.customer_update,
|
||||
tax_id_collection: params.tax_id_collection,
|
||||
});
|
||||
|
||||
Ok(StripeCheckoutSession {
|
||||
|
||||
@@ -27,8 +27,8 @@ use crate::stripe_client::{
|
||||
StripeMeter, StripePrice, StripePriceId, StripePriceRecurring, StripeSubscription,
|
||||
StripeSubscriptionId, StripeSubscriptionItem, StripeSubscriptionItemId,
|
||||
StripeSubscriptionTrialSettings, StripeSubscriptionTrialSettingsEndBehavior,
|
||||
StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, UpdateCustomerParams,
|
||||
UpdateSubscriptionParams,
|
||||
StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, StripeTaxIdCollection,
|
||||
UpdateCustomerParams, UpdateSubscriptionParams,
|
||||
};
|
||||
|
||||
pub struct RealStripeClient {
|
||||
@@ -448,6 +448,7 @@ impl<'a> TryFrom<StripeCreateCheckoutSessionParams<'a>> for CreateCheckoutSessio
|
||||
success_url: value.success_url,
|
||||
billing_address_collection: value.billing_address_collection.map(Into::into),
|
||||
customer_update: value.customer_update.map(Into::into),
|
||||
tax_id_collection: value.tax_id_collection.map(Into::into),
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
@@ -590,3 +591,11 @@ impl From<StripeCustomerUpdate> for stripe::CreateCheckoutSessionCustomerUpdate
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<StripeTaxIdCollection> for stripe::CreateCheckoutSessionTaxIdCollection {
|
||||
fn from(value: StripeTaxIdCollection) -> Self {
|
||||
stripe::CreateCheckoutSessionTaxIdCollection {
|
||||
enabled: value.enabled,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2246,8 +2246,11 @@ async fn test_lsp_document_color(cx_a: &mut TestAppContext, cx_b: &mut TestAppCo
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
||||
async fn test_lsp_pull_diagnostics(
|
||||
should_stream_workspace_diagnostic: bool,
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
) {
|
||||
let mut server = TestServer::start(cx_a.executor()).await;
|
||||
let executor = cx_a.executor();
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
@@ -2396,12 +2399,25 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
|
||||
let closure_workspace_diagnostics_pulls_made = workspace_diagnostics_pulls_made.clone();
|
||||
let closure_workspace_diagnostics_pulls_result_ids =
|
||||
workspace_diagnostics_pulls_result_ids.clone();
|
||||
let (workspace_diagnostic_cancel_tx, closure_workspace_diagnostic_cancel_rx) =
|
||||
smol::channel::bounded::<()>(1);
|
||||
let (closure_workspace_diagnostic_received_tx, workspace_diagnostic_received_rx) =
|
||||
smol::channel::bounded::<()>(1);
|
||||
let expected_workspace_diagnostic_token = lsp::ProgressToken::String(format!(
|
||||
"workspace/diagnostic-{}-1",
|
||||
fake_language_server.server.server_id()
|
||||
));
|
||||
let closure_expected_workspace_diagnostic_token = expected_workspace_diagnostic_token.clone();
|
||||
let mut workspace_diagnostics_pulls_handle = fake_language_server
|
||||
.set_request_handler::<lsp::request::WorkspaceDiagnosticRequest, _, _>(
|
||||
move |params, _| {
|
||||
let workspace_requests_made = closure_workspace_diagnostics_pulls_made.clone();
|
||||
let workspace_diagnostics_pulls_result_ids =
|
||||
closure_workspace_diagnostics_pulls_result_ids.clone();
|
||||
let workspace_diagnostic_cancel_rx = closure_workspace_diagnostic_cancel_rx.clone();
|
||||
let workspace_diagnostic_received_tx = closure_workspace_diagnostic_received_tx.clone();
|
||||
let expected_workspace_diagnostic_token =
|
||||
closure_expected_workspace_diagnostic_token.clone();
|
||||
async move {
|
||||
let workspace_request_count =
|
||||
workspace_requests_made.fetch_add(1, atomic::Ordering::Release) + 1;
|
||||
@@ -2411,6 +2427,21 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
|
||||
.await
|
||||
.extend(params.previous_result_ids.into_iter().map(|id| id.value));
|
||||
}
|
||||
if should_stream_workspace_diagnostic && !workspace_diagnostic_cancel_rx.is_closed()
|
||||
{
|
||||
assert_eq!(
|
||||
params.partial_result_params.partial_result_token,
|
||||
Some(expected_workspace_diagnostic_token)
|
||||
);
|
||||
workspace_diagnostic_received_tx.send(()).await.unwrap();
|
||||
workspace_diagnostic_cancel_rx.recv().await.unwrap();
|
||||
workspace_diagnostic_cancel_rx.close();
|
||||
// https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#partialResults
|
||||
// > The final response has to be empty in terms of result values.
|
||||
return Ok(lsp::WorkspaceDiagnosticReportResult::Report(
|
||||
lsp::WorkspaceDiagnosticReport { items: Vec::new() },
|
||||
));
|
||||
}
|
||||
Ok(lsp::WorkspaceDiagnosticReportResult::Report(
|
||||
lsp::WorkspaceDiagnosticReport {
|
||||
items: vec![
|
||||
@@ -2479,7 +2510,11 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
|
||||
},
|
||||
);
|
||||
|
||||
workspace_diagnostics_pulls_handle.next().await.unwrap();
|
||||
if should_stream_workspace_diagnostic {
|
||||
workspace_diagnostic_received_rx.recv().await.unwrap();
|
||||
} else {
|
||||
workspace_diagnostics_pulls_handle.next().await.unwrap();
|
||||
}
|
||||
assert_eq!(
|
||||
1,
|
||||
workspace_diagnostics_pulls_made.load(atomic::Ordering::Acquire),
|
||||
@@ -2503,10 +2538,10 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
|
||||
"Expected single diagnostic, but got: {all_diagnostics:?}"
|
||||
);
|
||||
let diagnostic = &all_diagnostics[0];
|
||||
let expected_messages = [
|
||||
expected_workspace_pull_diagnostics_main_message,
|
||||
expected_pull_diagnostic_main_message,
|
||||
];
|
||||
let mut expected_messages = vec![expected_pull_diagnostic_main_message];
|
||||
if !should_stream_workspace_diagnostic {
|
||||
expected_messages.push(expected_workspace_pull_diagnostics_main_message);
|
||||
}
|
||||
assert!(
|
||||
expected_messages.contains(&diagnostic.diagnostic.message.as_str()),
|
||||
"Expected {expected_messages:?} on the host, but got: {}",
|
||||
@@ -2556,6 +2591,70 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
|
||||
version: None,
|
||||
},
|
||||
);
|
||||
|
||||
if should_stream_workspace_diagnostic {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
token: expected_workspace_diagnostic_token.clone(),
|
||||
value: lsp::ProgressParamsValue::WorkspaceDiagnostic(
|
||||
lsp::WorkspaceDiagnosticReportResult::Report(lsp::WorkspaceDiagnosticReport {
|
||||
items: vec![
|
||||
lsp::WorkspaceDocumentDiagnosticReport::Full(
|
||||
lsp::WorkspaceFullDocumentDiagnosticReport {
|
||||
uri: lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
|
||||
version: None,
|
||||
full_document_diagnostic_report:
|
||||
lsp::FullDocumentDiagnosticReport {
|
||||
result_id: Some(format!(
|
||||
"workspace_{}",
|
||||
workspace_diagnostics_pulls_made
|
||||
.fetch_add(1, atomic::Ordering::Release)
|
||||
+ 1
|
||||
)),
|
||||
items: vec![lsp::Diagnostic {
|
||||
range: lsp::Range {
|
||||
start: lsp::Position {
|
||||
line: 0,
|
||||
character: 1,
|
||||
},
|
||||
end: lsp::Position {
|
||||
line: 0,
|
||||
character: 2,
|
||||
},
|
||||
},
|
||||
severity: Some(lsp::DiagnosticSeverity::ERROR),
|
||||
message:
|
||||
expected_workspace_pull_diagnostics_main_message
|
||||
.to_string(),
|
||||
..lsp::Diagnostic::default()
|
||||
}],
|
||||
},
|
||||
},
|
||||
),
|
||||
lsp::WorkspaceDocumentDiagnosticReport::Full(
|
||||
lsp::WorkspaceFullDocumentDiagnosticReport {
|
||||
uri: lsp::Url::from_file_path(path!("/a/lib.rs")).unwrap(),
|
||||
version: None,
|
||||
full_document_diagnostic_report:
|
||||
lsp::FullDocumentDiagnosticReport {
|
||||
result_id: Some(format!(
|
||||
"workspace_{}",
|
||||
workspace_diagnostics_pulls_made
|
||||
.fetch_add(1, atomic::Ordering::Release)
|
||||
+ 1
|
||||
)),
|
||||
items: Vec::new(),
|
||||
},
|
||||
},
|
||||
),
|
||||
],
|
||||
}),
|
||||
),
|
||||
});
|
||||
};
|
||||
|
||||
let mut workspace_diagnostic_start_count =
|
||||
workspace_diagnostics_pulls_made.load(atomic::Ordering::Acquire);
|
||||
|
||||
executor.run_until_parked();
|
||||
editor_a_main.update(cx_a, |editor, cx| {
|
||||
let snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
@@ -2599,7 +2698,7 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
|
||||
);
|
||||
executor.run_until_parked();
|
||||
assert_eq!(
|
||||
1,
|
||||
workspace_diagnostic_start_count,
|
||||
workspace_diagnostics_pulls_made.load(atomic::Ordering::Acquire),
|
||||
"Workspace diagnostics should not be changed as the remote client does not initialize the workspace diagnostics pull"
|
||||
);
|
||||
@@ -2646,7 +2745,7 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
|
||||
);
|
||||
executor.run_until_parked();
|
||||
assert_eq!(
|
||||
1,
|
||||
workspace_diagnostic_start_count,
|
||||
workspace_diagnostics_pulls_made.load(atomic::Ordering::Acquire),
|
||||
"The remote client still did not anything to trigger the workspace diagnostics pull"
|
||||
);
|
||||
@@ -2673,6 +2772,75 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
if should_stream_workspace_diagnostic {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
token: expected_workspace_diagnostic_token.clone(),
|
||||
value: lsp::ProgressParamsValue::WorkspaceDiagnostic(
|
||||
lsp::WorkspaceDiagnosticReportResult::Report(lsp::WorkspaceDiagnosticReport {
|
||||
items: vec![lsp::WorkspaceDocumentDiagnosticReport::Full(
|
||||
lsp::WorkspaceFullDocumentDiagnosticReport {
|
||||
uri: lsp::Url::from_file_path(path!("/a/lib.rs")).unwrap(),
|
||||
version: None,
|
||||
full_document_diagnostic_report: lsp::FullDocumentDiagnosticReport {
|
||||
result_id: Some(format!(
|
||||
"workspace_{}",
|
||||
workspace_diagnostics_pulls_made
|
||||
.fetch_add(1, atomic::Ordering::Release)
|
||||
+ 1
|
||||
)),
|
||||
items: vec![lsp::Diagnostic {
|
||||
range: lsp::Range {
|
||||
start: lsp::Position {
|
||||
line: 0,
|
||||
character: 1,
|
||||
},
|
||||
end: lsp::Position {
|
||||
line: 0,
|
||||
character: 2,
|
||||
},
|
||||
},
|
||||
severity: Some(lsp::DiagnosticSeverity::ERROR),
|
||||
message: expected_workspace_pull_diagnostics_lib_message
|
||||
.to_string(),
|
||||
..lsp::Diagnostic::default()
|
||||
}],
|
||||
},
|
||||
},
|
||||
)],
|
||||
}),
|
||||
),
|
||||
});
|
||||
workspace_diagnostic_start_count =
|
||||
workspace_diagnostics_pulls_made.load(atomic::Ordering::Acquire);
|
||||
workspace_diagnostic_cancel_tx.send(()).await.unwrap();
|
||||
workspace_diagnostics_pulls_handle.next().await.unwrap();
|
||||
executor.run_until_parked();
|
||||
editor_b_lib.update(cx_b, |editor, cx| {
|
||||
let snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
let all_diagnostics = snapshot
|
||||
.diagnostics_in_range(0..snapshot.len())
|
||||
.collect::<Vec<_>>();
|
||||
let expected_messages = [
|
||||
expected_workspace_pull_diagnostics_lib_message,
|
||||
// TODO bug: the pushed diagnostics are not being sent to the client when they open the corresponding buffer.
|
||||
// expected_push_diagnostic_lib_message,
|
||||
];
|
||||
assert_eq!(
|
||||
all_diagnostics.len(),
|
||||
1,
|
||||
"Expected pull diagnostics, but got: {all_diagnostics:?}"
|
||||
);
|
||||
for diagnostic in all_diagnostics {
|
||||
assert!(
|
||||
expected_messages.contains(&diagnostic.diagnostic.message.as_str()),
|
||||
"The client should get both push and pull messages: {expected_messages:?}, but got: {}",
|
||||
diagnostic.diagnostic.message
|
||||
);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
{
|
||||
assert!(
|
||||
diagnostics_pulls_result_ids.lock().await.len() > 0,
|
||||
@@ -2701,7 +2869,7 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
|
||||
);
|
||||
workspace_diagnostics_pulls_handle.next().await.unwrap();
|
||||
assert_eq!(
|
||||
2,
|
||||
workspace_diagnostic_start_count + 1,
|
||||
workspace_diagnostics_pulls_made.load(atomic::Ordering::Acquire),
|
||||
"After client lib.rs edits, the workspace diagnostics request should follow"
|
||||
);
|
||||
@@ -2720,7 +2888,7 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
|
||||
);
|
||||
workspace_diagnostics_pulls_handle.next().await.unwrap();
|
||||
assert_eq!(
|
||||
3,
|
||||
workspace_diagnostic_start_count + 2,
|
||||
workspace_diagnostics_pulls_made.load(atomic::Ordering::Acquire),
|
||||
"After client main.rs edits, the workspace diagnostics pull should follow"
|
||||
);
|
||||
@@ -2739,7 +2907,7 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
|
||||
);
|
||||
workspace_diagnostics_pulls_handle.next().await.unwrap();
|
||||
assert_eq!(
|
||||
4,
|
||||
workspace_diagnostic_start_count + 3,
|
||||
workspace_diagnostics_pulls_made.load(atomic::Ordering::Acquire),
|
||||
"After host main.rs edits, the workspace diagnostics pull should follow"
|
||||
);
|
||||
@@ -2769,7 +2937,7 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
|
||||
);
|
||||
workspace_diagnostics_pulls_handle.next().await.unwrap();
|
||||
assert_eq!(
|
||||
5,
|
||||
workspace_diagnostic_start_count + 4,
|
||||
workspace_diagnostics_pulls_made.load(atomic::Ordering::Acquire),
|
||||
"Another workspace diagnostics pull should happen after the diagnostics refresh server request"
|
||||
);
|
||||
@@ -2840,6 +3008,19 @@ async fn test_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestApp
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_non_streamed_lsp_pull_diagnostics(
|
||||
cx_a: &mut TestAppContext,
|
||||
cx_b: &mut TestAppContext,
|
||||
) {
|
||||
test_lsp_pull_diagnostics(false, cx_a, cx_b).await;
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_streamed_lsp_pull_diagnostics(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
||||
test_lsp_pull_diagnostics(true, cx_a, cx_b).await;
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
||||
let mut server = TestServer::start(cx_a.executor()).await;
|
||||
|
||||
@@ -1013,7 +1013,7 @@ async fn test_peers_following_each_other(cx_a: &mut TestAppContext, cx_b: &mut T
|
||||
// and some of which were originally opened by client B.
|
||||
workspace_b.update_in(cx_b, |workspace, window, cx| {
|
||||
workspace.active_pane().update(cx, |pane, cx| {
|
||||
pane.close_inactive_items(&Default::default(), window, cx)
|
||||
pane.close_inactive_items(&Default::default(), None, window, cx)
|
||||
.detach();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,6 +2,7 @@ use crate::tests::TestServer;
|
||||
use call::ActiveCall;
|
||||
use collections::{HashMap, HashSet};
|
||||
|
||||
use dap::{Capabilities, adapters::DebugTaskDefinition, transport::RequestHandling};
|
||||
use debugger_ui::debugger_panel::DebugPanel;
|
||||
use extension::ExtensionHostProxy;
|
||||
use fs::{FakeFs, Fs as _, RemoveOptions};
|
||||
@@ -22,6 +23,7 @@ use language::{
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::{
|
||||
ProjectPath,
|
||||
debugger::session::ThreadId,
|
||||
lsp_store::{FormatTrigger, LspFormatTarget},
|
||||
};
|
||||
use remote::SshRemoteClient;
|
||||
@@ -29,7 +31,11 @@ use remote_server::{HeadlessAppState, HeadlessProject};
|
||||
use rpc::proto;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::{path::Path, sync::Arc};
|
||||
use std::{
|
||||
path::Path,
|
||||
sync::{Arc, atomic::AtomicUsize},
|
||||
};
|
||||
use task::TcpArgumentsTemplate;
|
||||
use util::path;
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
@@ -688,3 +694,162 @@ async fn test_remote_server_debugger(
|
||||
|
||||
shutdown_session.await.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_slow_adapter_startup_retries(
|
||||
cx_a: &mut TestAppContext,
|
||||
server_cx: &mut TestAppContext,
|
||||
executor: BackgroundExecutor,
|
||||
) {
|
||||
cx_a.update(|cx| {
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
command_palette_hooks::init(cx);
|
||||
zlog::init_test();
|
||||
dap_adapters::init(cx);
|
||||
});
|
||||
server_cx.update(|cx| {
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
dap_adapters::init(cx);
|
||||
});
|
||||
let (opts, server_ssh) = SshRemoteClient::fake_server(cx_a, server_cx);
|
||||
let remote_fs = FakeFs::new(server_cx.executor());
|
||||
remote_fs
|
||||
.insert_tree(
|
||||
path!("/code"),
|
||||
json!({
|
||||
"lib.rs": "fn one() -> usize { 1 }"
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
// User A connects to the remote project via SSH.
|
||||
server_cx.update(HeadlessProject::init);
|
||||
let remote_http_client = Arc::new(BlockedHttpClient);
|
||||
let node = NodeRuntime::unavailable();
|
||||
let languages = Arc::new(LanguageRegistry::new(server_cx.executor()));
|
||||
let _headless_project = server_cx.new(|cx| {
|
||||
client::init_settings(cx);
|
||||
HeadlessProject::new(
|
||||
HeadlessAppState {
|
||||
session: server_ssh,
|
||||
fs: remote_fs.clone(),
|
||||
http_client: remote_http_client,
|
||||
node_runtime: node,
|
||||
languages,
|
||||
extension_host_proxy: Arc::new(ExtensionHostProxy::new()),
|
||||
},
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
|
||||
let mut server = TestServer::start(server_cx.executor()).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
cx_a.update(|cx| {
|
||||
debugger_ui::init(cx);
|
||||
command_palette_hooks::init(cx);
|
||||
});
|
||||
let (project_a, _) = client_a
|
||||
.build_ssh_project(path!("/code"), client_ssh.clone(), cx_a)
|
||||
.await;
|
||||
|
||||
let (workspace, cx_a) = client_a.build_workspace(&project_a, cx_a);
|
||||
|
||||
let debugger_panel = workspace
|
||||
.update_in(cx_a, |_workspace, window, cx| {
|
||||
cx.spawn_in(window, DebugPanel::load)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
workspace.update_in(cx_a, |workspace, window, cx| {
|
||||
workspace.add_panel(debugger_panel, window, cx);
|
||||
});
|
||||
|
||||
cx_a.run_until_parked();
|
||||
let debug_panel = workspace
|
||||
.update(cx_a, |workspace, cx| workspace.panel::<DebugPanel>(cx))
|
||||
.unwrap();
|
||||
|
||||
let workspace_window = cx_a
|
||||
.window_handle()
|
||||
.downcast::<workspace::Workspace>()
|
||||
.unwrap();
|
||||
|
||||
let count = Arc::new(AtomicUsize::new(0));
|
||||
let session = debugger_ui::tests::start_debug_session_with(
|
||||
&workspace_window,
|
||||
cx_a,
|
||||
DebugTaskDefinition {
|
||||
adapter: "fake-adapter".into(),
|
||||
label: "test".into(),
|
||||
config: json!({
|
||||
"request": "launch"
|
||||
}),
|
||||
tcp_connection: Some(TcpArgumentsTemplate {
|
||||
port: None,
|
||||
host: None,
|
||||
timeout: None,
|
||||
}),
|
||||
},
|
||||
move |client| {
|
||||
let count = count.clone();
|
||||
client.on_request_ext::<dap::requests::Initialize, _>(move |_seq, _request| {
|
||||
if count.fetch_add(1, std::sync::atomic::Ordering::SeqCst) < 5 {
|
||||
return RequestHandling::Exit;
|
||||
}
|
||||
RequestHandling::Respond(Ok(Capabilities::default()))
|
||||
});
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
cx_a.run_until_parked();
|
||||
|
||||
let client = session.update(cx_a, |session, _| session.adapter_client().unwrap());
|
||||
client
|
||||
.fake_event(dap::messages::Events::Stopped(dap::StoppedEvent {
|
||||
reason: dap::StoppedEventReason::Pause,
|
||||
description: None,
|
||||
thread_id: Some(1),
|
||||
preserve_focus_hint: None,
|
||||
text: None,
|
||||
all_threads_stopped: None,
|
||||
hit_breakpoint_ids: None,
|
||||
}))
|
||||
.await;
|
||||
|
||||
cx_a.run_until_parked();
|
||||
|
||||
let active_session = debug_panel
|
||||
.update(cx_a, |this, _| this.active_session())
|
||||
.unwrap();
|
||||
|
||||
let running_state = active_session.update(cx_a, |active_session, _| {
|
||||
active_session.running_state().clone()
|
||||
});
|
||||
|
||||
assert_eq!(
|
||||
client.id(),
|
||||
running_state.read_with(cx_a, |running_state, _| running_state.session_id())
|
||||
);
|
||||
assert_eq!(
|
||||
ThreadId(1),
|
||||
running_state.read_with(cx_a, |running_state, _| running_state
|
||||
.selected_thread_id()
|
||||
.unwrap())
|
||||
);
|
||||
|
||||
let shutdown_session = workspace.update(cx_a, |workspace, cx| {
|
||||
workspace.project().update(cx, |project, cx| {
|
||||
project.dap_store().update(cx, |dap_store, cx| {
|
||||
dap_store.shutdown_session(session.read(cx).session_id(), cx)
|
||||
})
|
||||
})
|
||||
});
|
||||
|
||||
client_ssh.update(cx_a, |a, _| {
|
||||
a.shutdown_processes(Some(proto::ShutdownRemoteServer {}), executor)
|
||||
});
|
||||
|
||||
shutdown_session.await.unwrap();
|
||||
}
|
||||
|
||||
@@ -48,20 +48,20 @@ impl RenderOnce for ComponentExample {
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.flex()
|
||||
.w_full()
|
||||
.rounded_xl()
|
||||
.min_h(px(100.))
|
||||
.justify_center()
|
||||
.w_full()
|
||||
.p_8()
|
||||
.flex()
|
||||
.items_center()
|
||||
.justify_center()
|
||||
.rounded_xl()
|
||||
.border_1()
|
||||
.border_color(cx.theme().colors().border.opacity(0.5))
|
||||
.bg(pattern_slash(
|
||||
cx.theme().colors().surface_background.opacity(0.5),
|
||||
cx.theme().colors().surface_background.opacity(0.25),
|
||||
12.0,
|
||||
12.0,
|
||||
))
|
||||
.shadow_xs()
|
||||
.child(self.element),
|
||||
)
|
||||
.into_any_element()
|
||||
|
||||
@@ -21,12 +21,14 @@ collections.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
log.workspace = true
|
||||
net.workspace = true
|
||||
parking_lot.workspace = true
|
||||
postage.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
smol.workspace = true
|
||||
tempfile.workspace = true
|
||||
url = { workspace = true, features = ["serde"] }
|
||||
util.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
@@ -70,12 +70,12 @@ fn is_null_value<T: Serialize>(value: &T) -> bool {
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct Request<'a, T> {
|
||||
jsonrpc: &'static str,
|
||||
id: RequestId,
|
||||
method: &'a str,
|
||||
pub struct Request<'a, T> {
|
||||
pub jsonrpc: &'static str,
|
||||
pub id: RequestId,
|
||||
pub method: &'a str,
|
||||
#[serde(skip_serializing_if = "is_null_value")]
|
||||
params: T,
|
||||
pub params: T,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
@@ -88,18 +88,18 @@ struct AnyResponse<'a> {
|
||||
result: Option<&'a RawValue>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
struct Response<T> {
|
||||
jsonrpc: &'static str,
|
||||
id: RequestId,
|
||||
pub(crate) struct Response<T> {
|
||||
pub jsonrpc: &'static str,
|
||||
pub id: RequestId,
|
||||
#[serde(flatten)]
|
||||
value: CspResult<T>,
|
||||
pub value: CspResult<T>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
#[derive(Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
enum CspResult<T> {
|
||||
pub(crate) enum CspResult<T> {
|
||||
#[serde(rename = "result")]
|
||||
Ok(Option<T>),
|
||||
#[allow(dead_code)]
|
||||
@@ -123,8 +123,9 @@ struct AnyNotification<'a> {
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
struct Error {
|
||||
message: String,
|
||||
pub(crate) struct Error {
|
||||
pub message: String,
|
||||
pub code: i32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
pub mod client;
|
||||
pub mod listener;
|
||||
pub mod protocol;
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub mod test;
|
||||
|
||||
236
crates/context_server/src/listener.rs
Normal file
@@ -0,0 +1,236 @@
|
||||
use ::serde::{Deserialize, Serialize};
|
||||
use anyhow::{Context as _, Result};
|
||||
use collections::HashMap;
|
||||
use futures::{
|
||||
AsyncBufReadExt, AsyncRead, AsyncWrite, AsyncWriteExt, FutureExt,
|
||||
channel::mpsc::{UnboundedReceiver, UnboundedSender, unbounded},
|
||||
io::BufReader,
|
||||
select_biased,
|
||||
};
|
||||
use gpui::{App, AppContext, AsyncApp, Task};
|
||||
use net::async_net::{UnixListener, UnixStream};
|
||||
use serde_json::{json, value::RawValue};
|
||||
use smol::stream::StreamExt;
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
path::{Path, PathBuf},
|
||||
rc::Rc,
|
||||
};
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::{
|
||||
client::{CspResult, RequestId, Response},
|
||||
types::Request,
|
||||
};
|
||||
|
||||
pub struct McpServer {
|
||||
socket_path: PathBuf,
|
||||
handlers: Rc<RefCell<HashMap<&'static str, McpHandler>>>,
|
||||
_server_task: Task<()>,
|
||||
}
|
||||
|
||||
type McpHandler = Box<dyn Fn(RequestId, Option<Box<RawValue>>, &App) -> Task<String>>;
|
||||
|
||||
impl McpServer {
|
||||
pub fn new(cx: &AsyncApp) -> Task<Result<Self>> {
|
||||
let task = cx.background_spawn(async move {
|
||||
let temp_dir = tempfile::Builder::new().prefix("zed-mcp").tempdir()?;
|
||||
let socket_path = temp_dir.path().join("mcp.sock");
|
||||
let listener = UnixListener::bind(&socket_path).context("creating mcp socket")?;
|
||||
|
||||
anyhow::Ok((temp_dir, socket_path, listener))
|
||||
});
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let (temp_dir, socket_path, listener) = task.await?;
|
||||
let handlers = Rc::new(RefCell::new(HashMap::default()));
|
||||
let server_task = cx.spawn({
|
||||
let handlers = handlers.clone();
|
||||
async move |cx| {
|
||||
while let Ok((stream, _)) = listener.accept().await {
|
||||
Self::serve_connection(stream, handlers.clone(), cx);
|
||||
}
|
||||
drop(temp_dir)
|
||||
}
|
||||
});
|
||||
Ok(Self {
|
||||
socket_path,
|
||||
_server_task: server_task,
|
||||
handlers: handlers.clone(),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn handle_request<R: Request>(
|
||||
&mut self,
|
||||
f: impl Fn(R::Params, &App) -> Task<Result<R::Response>> + 'static,
|
||||
) {
|
||||
let f = Box::new(f);
|
||||
self.handlers.borrow_mut().insert(
|
||||
R::METHOD,
|
||||
Box::new(move |req_id, opt_params, cx| {
|
||||
let result = match opt_params {
|
||||
Some(params) => serde_json::from_str(params.get()),
|
||||
None => serde_json::from_value(serde_json::Value::Null),
|
||||
};
|
||||
|
||||
let params: R::Params = match result {
|
||||
Ok(params) => params,
|
||||
Err(e) => {
|
||||
return Task::ready(
|
||||
serde_json::to_string(&Response::<R::Response> {
|
||||
jsonrpc: "2.0",
|
||||
id: req_id,
|
||||
value: CspResult::Error(Some(crate::client::Error {
|
||||
message: format!("{e}"),
|
||||
code: -32700,
|
||||
})),
|
||||
})
|
||||
.unwrap(),
|
||||
);
|
||||
}
|
||||
};
|
||||
let task = f(params, cx);
|
||||
cx.background_spawn(async move {
|
||||
match task.await {
|
||||
Ok(result) => serde_json::to_string(&Response {
|
||||
jsonrpc: "2.0",
|
||||
id: req_id,
|
||||
value: CspResult::Ok(Some(result)),
|
||||
})
|
||||
.unwrap(),
|
||||
Err(e) => serde_json::to_string(&Response {
|
||||
jsonrpc: "2.0",
|
||||
id: req_id,
|
||||
value: CspResult::Error::<R::Response>(Some(crate::client::Error {
|
||||
message: format!("{e}"),
|
||||
code: -32603,
|
||||
})),
|
||||
})
|
||||
.unwrap(),
|
||||
}
|
||||
})
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
pub fn socket_path(&self) -> &Path {
|
||||
&self.socket_path
|
||||
}
|
||||
|
||||
fn serve_connection(
|
||||
stream: UnixStream,
|
||||
handlers: Rc<RefCell<HashMap<&'static str, McpHandler>>>,
|
||||
cx: &mut AsyncApp,
|
||||
) {
|
||||
let (read, write) = smol::io::split(stream);
|
||||
let (incoming_tx, mut incoming_rx) = unbounded();
|
||||
let (outgoing_tx, outgoing_rx) = unbounded();
|
||||
|
||||
cx.background_spawn(Self::handle_io(outgoing_rx, incoming_tx, write, read))
|
||||
.detach();
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
while let Some(request) = incoming_rx.next().await {
|
||||
let Some(request_id) = request.id.clone() else {
|
||||
continue;
|
||||
};
|
||||
if let Some(handler) = handlers.borrow().get(&request.method.as_ref()) {
|
||||
let outgoing_tx = outgoing_tx.clone();
|
||||
|
||||
if let Some(task) = cx
|
||||
.update(|cx| handler(request_id, request.params, cx))
|
||||
.log_err()
|
||||
{
|
||||
cx.spawn(async move |_| {
|
||||
let response = task.await;
|
||||
outgoing_tx.unbounded_send(response).ok();
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
} else {
|
||||
outgoing_tx
|
||||
.unbounded_send(
|
||||
serde_json::to_string(&Response::<()> {
|
||||
jsonrpc: "2.0",
|
||||
id: request.id.unwrap(),
|
||||
value: CspResult::Error(Some(crate::client::Error {
|
||||
message: format!("unhandled method {}", request.method),
|
||||
code: -32601,
|
||||
})),
|
||||
})
|
||||
.unwrap(),
|
||||
)
|
||||
.ok();
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
async fn handle_io(
|
||||
mut outgoing_rx: UnboundedReceiver<String>,
|
||||
incoming_tx: UnboundedSender<RawRequest>,
|
||||
mut outgoing_bytes: impl Unpin + AsyncWrite,
|
||||
incoming_bytes: impl Unpin + AsyncRead,
|
||||
) -> Result<()> {
|
||||
let mut output_reader = BufReader::new(incoming_bytes);
|
||||
let mut incoming_line = String::new();
|
||||
loop {
|
||||
select_biased! {
|
||||
message = outgoing_rx.next().fuse() => {
|
||||
if let Some(message) = message {
|
||||
log::trace!("send: {}", &message);
|
||||
outgoing_bytes.write_all(message.as_bytes()).await?;
|
||||
outgoing_bytes.write_all(&[b'\n']).await?;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
bytes_read = output_reader.read_line(&mut incoming_line).fuse() => {
|
||||
if bytes_read? == 0 {
|
||||
break
|
||||
}
|
||||
log::trace!("recv: {}", &incoming_line);
|
||||
match serde_json::from_str(&incoming_line) {
|
||||
Ok(message) => {
|
||||
incoming_tx.unbounded_send(message).log_err();
|
||||
}
|
||||
Err(error) => {
|
||||
outgoing_bytes.write_all(serde_json::to_string(&json!({
|
||||
"jsonrpc": "2.0",
|
||||
"error": json!({
|
||||
"code": -32603,
|
||||
"message": format!("Failed to parse: {error}"),
|
||||
}),
|
||||
}))?.as_bytes()).await?;
|
||||
outgoing_bytes.write_all(&[b'\n']).await?;
|
||||
log::error!("failed to parse incoming message: {error}. Raw: {incoming_line}");
|
||||
}
|
||||
}
|
||||
incoming_line.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct RawRequest {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
id: Option<RequestId>,
|
||||
method: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
params: Option<Box<serde_json::value::RawValue>>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct RawResponse {
|
||||
jsonrpc: &'static str,
|
||||
id: RequestId,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
error: Option<crate::client::Error>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
result: Option<Box<serde_json::value::RawValue>>,
|
||||
}
|
||||
@@ -153,7 +153,7 @@ pub struct InitializeParams {
|
||||
pub struct CallToolParams {
|
||||
pub name: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub arguments: Option<HashMap<String, serde_json::Value>>,
|
||||
pub arguments: Option<serde_json::Value>,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
@@ -378,6 +378,14 @@ pub trait DebugAdapter: 'static + Send + Sync {
|
||||
fn label_for_child_session(&self, _args: &StartDebuggingRequestArguments) -> Option<String> {
|
||||
None
|
||||
}
|
||||
|
||||
fn compact_child_session(&self) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn prefer_thread_name(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
@@ -442,10 +450,18 @@ impl DebugAdapter for FakeAdapter {
|
||||
_: Option<Vec<String>>,
|
||||
_: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
let connection = task_definition
|
||||
.tcp_connection
|
||||
.as_ref()
|
||||
.map(|connection| TcpArguments {
|
||||
host: connection.host(),
|
||||
port: connection.port.unwrap_or(17),
|
||||
timeout: connection.timeout,
|
||||
});
|
||||
Ok(DebugAdapterBinary {
|
||||
command: Some("command".into()),
|
||||
arguments: vec![],
|
||||
connection: None,
|
||||
connection,
|
||||
envs: HashMap::default(),
|
||||
cwd: None,
|
||||
request_args: StartDebuggingRequestArguments {
|
||||
|
||||
@@ -2,7 +2,7 @@ use crate::{
|
||||
adapters::DebugAdapterBinary,
|
||||
transport::{IoKind, LogKind, TransportDelegate},
|
||||
};
|
||||
use anyhow::{Context as _, Result};
|
||||
use anyhow::Result;
|
||||
use dap_types::{
|
||||
messages::{Message, Response},
|
||||
requests::Request,
|
||||
@@ -110,9 +110,7 @@ impl DebugAdapterClient {
|
||||
self.transport_delegate
|
||||
.pending_requests
|
||||
.lock()
|
||||
.as_mut()
|
||||
.context("client is closed")?
|
||||
.insert(sequence_id, callback_tx);
|
||||
.insert(sequence_id, callback_tx)?;
|
||||
|
||||
log::debug!(
|
||||
"Client {} send `{}` request with sequence_id: {}",
|
||||
@@ -170,6 +168,7 @@ impl DebugAdapterClient {
|
||||
pub fn kill(&self) {
|
||||
log::debug!("Killing DAP process");
|
||||
self.transport_delegate.transport.lock().kill();
|
||||
self.transport_delegate.pending_requests.lock().shutdown();
|
||||
}
|
||||
|
||||
pub fn has_adapter_logs(&self) -> bool {
|
||||
@@ -184,11 +183,34 @@ impl DebugAdapterClient {
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn on_request<R: dap_types::requests::Request, F>(&self, handler: F)
|
||||
pub fn on_request<R: dap_types::requests::Request, F>(&self, mut handler: F)
|
||||
where
|
||||
F: 'static
|
||||
+ Send
|
||||
+ FnMut(u64, R::Arguments) -> Result<R::Response, dap_types::ErrorResponse>,
|
||||
{
|
||||
use crate::transport::RequestHandling;
|
||||
|
||||
self.transport_delegate
|
||||
.transport
|
||||
.lock()
|
||||
.as_fake()
|
||||
.on_request::<R, _>(move |seq, request| {
|
||||
RequestHandling::Respond(handler(seq, request))
|
||||
});
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn on_request_ext<R: dap_types::requests::Request, F>(&self, handler: F)
|
||||
where
|
||||
F: 'static
|
||||
+ Send
|
||||
+ FnMut(
|
||||
u64,
|
||||
R::Arguments,
|
||||
) -> crate::transport::RequestHandling<
|
||||
Result<R::Response, dap_types::ErrorResponse>,
|
||||
>,
|
||||
{
|
||||
self.transport_delegate
|
||||
.transport
|
||||
|
||||
@@ -49,6 +49,12 @@ pub enum IoKind {
|
||||
StdErr,
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub enum RequestHandling<T> {
|
||||
Respond(T),
|
||||
Exit,
|
||||
}
|
||||
|
||||
type LogHandlers = Arc<Mutex<SmallVec<[(LogKind, IoHandler); 2]>>>;
|
||||
|
||||
pub trait Transport: Send + Sync {
|
||||
@@ -76,7 +82,11 @@ async fn start(
|
||||
) -> Result<Box<dyn Transport>> {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
if cfg!(any(test, feature = "test-support")) {
|
||||
return Ok(Box::new(FakeTransport::start(cx).await?));
|
||||
if let Some(connection) = binary.connection.clone() {
|
||||
return Ok(Box::new(FakeTransport::start_tcp(connection, cx).await?));
|
||||
} else {
|
||||
return Ok(Box::new(FakeTransport::start_stdio(cx).await?));
|
||||
}
|
||||
}
|
||||
|
||||
if binary.connection.is_some() {
|
||||
@@ -90,11 +100,57 @@ async fn start(
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct PendingRequests {
|
||||
inner: Option<HashMap<u64, oneshot::Sender<Result<Response>>>>,
|
||||
}
|
||||
|
||||
impl PendingRequests {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
inner: Some(HashMap::default()),
|
||||
}
|
||||
}
|
||||
|
||||
fn flush(&mut self, e: anyhow::Error) {
|
||||
let Some(inner) = self.inner.as_mut() else {
|
||||
return;
|
||||
};
|
||||
for (_, sender) in inner.drain() {
|
||||
sender.send(Err(e.cloned())).ok();
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn insert(
|
||||
&mut self,
|
||||
sequence_id: u64,
|
||||
callback_tx: oneshot::Sender<Result<Response>>,
|
||||
) -> anyhow::Result<()> {
|
||||
let Some(inner) = self.inner.as_mut() else {
|
||||
bail!("client is closed")
|
||||
};
|
||||
inner.insert(sequence_id, callback_tx);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn remove(
|
||||
&mut self,
|
||||
sequence_id: u64,
|
||||
) -> anyhow::Result<Option<oneshot::Sender<Result<Response>>>> {
|
||||
let Some(inner) = self.inner.as_mut() else {
|
||||
bail!("client is closed");
|
||||
};
|
||||
Ok(inner.remove(&sequence_id))
|
||||
}
|
||||
|
||||
pub(crate) fn shutdown(&mut self) {
|
||||
self.flush(anyhow!("transport shutdown"));
|
||||
self.inner = None;
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct TransportDelegate {
|
||||
log_handlers: LogHandlers,
|
||||
// TODO this should really be some kind of associative channel
|
||||
pub(crate) pending_requests:
|
||||
Arc<Mutex<Option<HashMap<u64, oneshot::Sender<Result<Response>>>>>>,
|
||||
pub(crate) pending_requests: Arc<Mutex<PendingRequests>>,
|
||||
pub(crate) transport: Mutex<Box<dyn Transport>>,
|
||||
pub(crate) server_tx: smol::lock::Mutex<Option<Sender<Message>>>,
|
||||
tasks: Mutex<Vec<Task<()>>>,
|
||||
@@ -108,7 +164,7 @@ impl TransportDelegate {
|
||||
transport: Mutex::new(transport),
|
||||
log_handlers,
|
||||
server_tx: Default::default(),
|
||||
pending_requests: Arc::new(Mutex::new(Some(HashMap::default()))),
|
||||
pending_requests: Arc::new(Mutex::new(PendingRequests::new())),
|
||||
tasks: Default::default(),
|
||||
})
|
||||
}
|
||||
@@ -151,24 +207,10 @@ impl TransportDelegate {
|
||||
Ok(()) => {
|
||||
pending_requests
|
||||
.lock()
|
||||
.take()
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.for_each(|(_, request)| {
|
||||
request
|
||||
.send(Err(anyhow!("debugger shutdown unexpectedly")))
|
||||
.ok();
|
||||
});
|
||||
.flush(anyhow!("debugger shutdown unexpectedly"));
|
||||
}
|
||||
Err(e) => {
|
||||
pending_requests
|
||||
.lock()
|
||||
.take()
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.for_each(|(_, request)| {
|
||||
request.send(Err(e.cloned())).ok();
|
||||
});
|
||||
pending_requests.lock().flush(e);
|
||||
}
|
||||
}
|
||||
}));
|
||||
@@ -286,7 +328,7 @@ impl TransportDelegate {
|
||||
async fn recv_from_server<Stdout>(
|
||||
server_stdout: Stdout,
|
||||
mut message_handler: DapMessageHandler,
|
||||
pending_requests: Arc<Mutex<Option<HashMap<u64, oneshot::Sender<Result<Response>>>>>>,
|
||||
pending_requests: Arc<Mutex<PendingRequests>>,
|
||||
log_handlers: Option<LogHandlers>,
|
||||
) -> Result<()>
|
||||
where
|
||||
@@ -303,14 +345,10 @@ impl TransportDelegate {
|
||||
ConnectionResult::Timeout => anyhow::bail!("Timed out when connecting to debugger"),
|
||||
ConnectionResult::ConnectionReset => {
|
||||
log::info!("Debugger closed the connection");
|
||||
break Ok(());
|
||||
return Ok(());
|
||||
}
|
||||
ConnectionResult::Result(Ok(Message::Response(res))) => {
|
||||
let tx = pending_requests
|
||||
.lock()
|
||||
.as_mut()
|
||||
.context("client is closed")?
|
||||
.remove(&res.request_seq);
|
||||
let tx = pending_requests.lock().remove(res.request_seq)?;
|
||||
if let Some(tx) = tx {
|
||||
if let Err(e) = tx.send(Self::process_response(res)) {
|
||||
log::trace!("Did not send response `{:?}` for a cancelled", e);
|
||||
@@ -704,8 +742,7 @@ impl Drop for StdioTransport {
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
type RequestHandler =
|
||||
Box<dyn Send + FnMut(u64, serde_json::Value) -> dap_types::messages::Response>;
|
||||
type RequestHandler = Box<dyn Send + FnMut(u64, serde_json::Value) -> RequestHandling<Response>>;
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
type ResponseHandler = Box<dyn Send + Fn(Response)>;
|
||||
@@ -716,23 +753,38 @@ pub struct FakeTransport {
|
||||
request_handlers: Arc<Mutex<HashMap<&'static str, RequestHandler>>>,
|
||||
// for reverse request responses
|
||||
response_handlers: Arc<Mutex<HashMap<&'static str, ResponseHandler>>>,
|
||||
|
||||
stdin_writer: Option<PipeWriter>,
|
||||
stdout_reader: Option<PipeReader>,
|
||||
message_handler: Option<Task<Result<()>>>,
|
||||
kind: FakeTransportKind,
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub enum FakeTransportKind {
|
||||
Stdio {
|
||||
stdin_writer: Option<PipeWriter>,
|
||||
stdout_reader: Option<PipeReader>,
|
||||
},
|
||||
Tcp {
|
||||
connection: TcpArguments,
|
||||
executor: BackgroundExecutor,
|
||||
},
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
impl FakeTransport {
|
||||
pub fn on_request<R: dap_types::requests::Request, F>(&self, mut handler: F)
|
||||
where
|
||||
F: 'static + Send + FnMut(u64, R::Arguments) -> Result<R::Response, ErrorResponse>,
|
||||
F: 'static
|
||||
+ Send
|
||||
+ FnMut(u64, R::Arguments) -> RequestHandling<Result<R::Response, ErrorResponse>>,
|
||||
{
|
||||
self.request_handlers.lock().insert(
|
||||
R::COMMAND,
|
||||
Box::new(move |seq, args| {
|
||||
let result = handler(seq, serde_json::from_value(args).unwrap());
|
||||
let response = match result {
|
||||
let RequestHandling::Respond(response) = result else {
|
||||
return RequestHandling::Exit;
|
||||
};
|
||||
let response = match response {
|
||||
Ok(response) => Response {
|
||||
seq: seq + 1,
|
||||
request_seq: seq,
|
||||
@@ -750,7 +802,7 @@ impl FakeTransport {
|
||||
message: None,
|
||||
},
|
||||
};
|
||||
response
|
||||
RequestHandling::Respond(response)
|
||||
}),
|
||||
);
|
||||
}
|
||||
@@ -764,86 +816,75 @@ impl FakeTransport {
|
||||
.insert(R::COMMAND, Box::new(handler));
|
||||
}
|
||||
|
||||
async fn start(cx: &mut AsyncApp) -> Result<Self> {
|
||||
async fn start_tcp(connection: TcpArguments, cx: &mut AsyncApp) -> Result<Self> {
|
||||
Ok(Self {
|
||||
request_handlers: Arc::new(Mutex::new(HashMap::default())),
|
||||
response_handlers: Arc::new(Mutex::new(HashMap::default())),
|
||||
message_handler: None,
|
||||
kind: FakeTransportKind::Tcp {
|
||||
connection,
|
||||
executor: cx.background_executor().clone(),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
async fn handle_messages(
|
||||
request_handlers: Arc<Mutex<HashMap<&'static str, RequestHandler>>>,
|
||||
response_handlers: Arc<Mutex<HashMap<&'static str, ResponseHandler>>>,
|
||||
stdin_reader: PipeReader,
|
||||
stdout_writer: PipeWriter,
|
||||
) -> Result<()> {
|
||||
use dap_types::requests::{Request, RunInTerminal, StartDebugging};
|
||||
use serde_json::json;
|
||||
|
||||
let (stdin_writer, stdin_reader) = async_pipe::pipe();
|
||||
let (stdout_writer, stdout_reader) = async_pipe::pipe();
|
||||
|
||||
let mut this = Self {
|
||||
request_handlers: Arc::new(Mutex::new(HashMap::default())),
|
||||
response_handlers: Arc::new(Mutex::new(HashMap::default())),
|
||||
stdin_writer: Some(stdin_writer),
|
||||
stdout_reader: Some(stdout_reader),
|
||||
message_handler: None,
|
||||
};
|
||||
|
||||
let request_handlers = this.request_handlers.clone();
|
||||
let response_handlers = this.response_handlers.clone();
|
||||
let mut reader = BufReader::new(stdin_reader);
|
||||
let stdout_writer = Arc::new(smol::lock::Mutex::new(stdout_writer));
|
||||
let mut buffer = String::new();
|
||||
|
||||
this.message_handler = Some(cx.background_spawn(async move {
|
||||
let mut reader = BufReader::new(stdin_reader);
|
||||
let mut buffer = String::new();
|
||||
|
||||
loop {
|
||||
match TransportDelegate::receive_server_message(&mut reader, &mut buffer, None)
|
||||
.await
|
||||
{
|
||||
ConnectionResult::Timeout => {
|
||||
anyhow::bail!("Timed out when connecting to debugger");
|
||||
}
|
||||
ConnectionResult::ConnectionReset => {
|
||||
log::info!("Debugger closed the connection");
|
||||
break Ok(());
|
||||
}
|
||||
ConnectionResult::Result(Err(e)) => break Err(e),
|
||||
ConnectionResult::Result(Ok(message)) => {
|
||||
match message {
|
||||
Message::Request(request) => {
|
||||
// redirect reverse requests to stdout writer/reader
|
||||
if request.command == RunInTerminal::COMMAND
|
||||
|| request.command == StartDebugging::COMMAND
|
||||
{
|
||||
let message =
|
||||
serde_json::to_string(&Message::Request(request)).unwrap();
|
||||
|
||||
let mut writer = stdout_writer.lock().await;
|
||||
writer
|
||||
.write_all(
|
||||
TransportDelegate::build_rpc_message(message)
|
||||
.as_bytes(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
writer.flush().await.unwrap();
|
||||
} else {
|
||||
let response = if let Some(handle) =
|
||||
request_handlers.lock().get_mut(request.command.as_str())
|
||||
{
|
||||
handle(request.seq, request.arguments.unwrap_or(json!({})))
|
||||
} else {
|
||||
panic!("No request handler for {}", request.command);
|
||||
};
|
||||
let message =
|
||||
serde_json::to_string(&Message::Response(response))
|
||||
.unwrap();
|
||||
|
||||
let mut writer = stdout_writer.lock().await;
|
||||
writer
|
||||
.write_all(
|
||||
TransportDelegate::build_rpc_message(message)
|
||||
.as_bytes(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
writer.flush().await.unwrap();
|
||||
}
|
||||
}
|
||||
Message::Event(event) => {
|
||||
loop {
|
||||
match TransportDelegate::receive_server_message(&mut reader, &mut buffer, None).await {
|
||||
ConnectionResult::Timeout => {
|
||||
anyhow::bail!("Timed out when connecting to debugger");
|
||||
}
|
||||
ConnectionResult::ConnectionReset => {
|
||||
log::info!("Debugger closed the connection");
|
||||
break Ok(());
|
||||
}
|
||||
ConnectionResult::Result(Err(e)) => break Err(e),
|
||||
ConnectionResult::Result(Ok(message)) => {
|
||||
match message {
|
||||
Message::Request(request) => {
|
||||
// redirect reverse requests to stdout writer/reader
|
||||
if request.command == RunInTerminal::COMMAND
|
||||
|| request.command == StartDebugging::COMMAND
|
||||
{
|
||||
let message =
|
||||
serde_json::to_string(&Message::Event(event)).unwrap();
|
||||
serde_json::to_string(&Message::Request(request)).unwrap();
|
||||
|
||||
let mut writer = stdout_writer.lock().await;
|
||||
writer
|
||||
.write_all(
|
||||
TransportDelegate::build_rpc_message(message).as_bytes(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
writer.flush().await.unwrap();
|
||||
} else {
|
||||
let response = if let Some(handle) =
|
||||
request_handlers.lock().get_mut(request.command.as_str())
|
||||
{
|
||||
handle(request.seq, request.arguments.unwrap_or(json!({})))
|
||||
} else {
|
||||
panic!("No request handler for {}", request.command);
|
||||
};
|
||||
let response = match response {
|
||||
RequestHandling::Respond(response) => response,
|
||||
RequestHandling::Exit => {
|
||||
break Err(anyhow!("exit in response to request"));
|
||||
}
|
||||
};
|
||||
let message =
|
||||
serde_json::to_string(&Message::Response(response)).unwrap();
|
||||
|
||||
let mut writer = stdout_writer.lock().await;
|
||||
writer
|
||||
@@ -854,20 +895,56 @@ impl FakeTransport {
|
||||
.unwrap();
|
||||
writer.flush().await.unwrap();
|
||||
}
|
||||
Message::Response(response) => {
|
||||
if let Some(handle) =
|
||||
response_handlers.lock().get(response.command.as_str())
|
||||
{
|
||||
handle(response);
|
||||
} else {
|
||||
log::error!("No response handler for {}", response.command);
|
||||
}
|
||||
}
|
||||
Message::Event(event) => {
|
||||
let message = serde_json::to_string(&Message::Event(event)).unwrap();
|
||||
|
||||
let mut writer = stdout_writer.lock().await;
|
||||
writer
|
||||
.write_all(TransportDelegate::build_rpc_message(message).as_bytes())
|
||||
.await
|
||||
.unwrap();
|
||||
writer.flush().await.unwrap();
|
||||
}
|
||||
Message::Response(response) => {
|
||||
if let Some(handle) =
|
||||
response_handlers.lock().get(response.command.as_str())
|
||||
{
|
||||
handle(response);
|
||||
} else {
|
||||
log::error!("No response handler for {}", response.command);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
async fn start_stdio(cx: &mut AsyncApp) -> Result<Self> {
|
||||
let (stdin_writer, stdin_reader) = async_pipe::pipe();
|
||||
let (stdout_writer, stdout_reader) = async_pipe::pipe();
|
||||
let kind = FakeTransportKind::Stdio {
|
||||
stdin_writer: Some(stdin_writer),
|
||||
stdout_reader: Some(stdout_reader),
|
||||
};
|
||||
|
||||
let mut this = Self {
|
||||
request_handlers: Arc::new(Mutex::new(HashMap::default())),
|
||||
response_handlers: Arc::new(Mutex::new(HashMap::default())),
|
||||
message_handler: None,
|
||||
kind,
|
||||
};
|
||||
|
||||
let request_handlers = this.request_handlers.clone();
|
||||
let response_handlers = this.response_handlers.clone();
|
||||
|
||||
this.message_handler = Some(cx.background_spawn(Self::handle_messages(
|
||||
request_handlers,
|
||||
response_handlers,
|
||||
stdin_reader,
|
||||
stdout_writer,
|
||||
)));
|
||||
|
||||
Ok(this)
|
||||
}
|
||||
@@ -876,7 +953,10 @@ impl FakeTransport {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
impl Transport for FakeTransport {
|
||||
fn tcp_arguments(&self) -> Option<TcpArguments> {
|
||||
None
|
||||
match &self.kind {
|
||||
FakeTransportKind::Stdio { .. } => None,
|
||||
FakeTransportKind::Tcp { connection, .. } => Some(connection.clone()),
|
||||
}
|
||||
}
|
||||
|
||||
fn connect(
|
||||
@@ -887,12 +967,33 @@ impl Transport for FakeTransport {
|
||||
Box<dyn AsyncRead + Unpin + Send + 'static>,
|
||||
)>,
|
||||
> {
|
||||
let result = util::maybe!({
|
||||
Ok((
|
||||
Box::new(self.stdin_writer.take().context("Cannot reconnect")?) as _,
|
||||
Box::new(self.stdout_reader.take().context("Cannot reconnect")?) as _,
|
||||
))
|
||||
});
|
||||
let result = match &mut self.kind {
|
||||
FakeTransportKind::Stdio {
|
||||
stdin_writer,
|
||||
stdout_reader,
|
||||
} => util::maybe!({
|
||||
Ok((
|
||||
Box::new(stdin_writer.take().context("Cannot reconnect")?) as _,
|
||||
Box::new(stdout_reader.take().context("Cannot reconnect")?) as _,
|
||||
))
|
||||
}),
|
||||
FakeTransportKind::Tcp { executor, .. } => {
|
||||
let (stdin_writer, stdin_reader) = async_pipe::pipe();
|
||||
let (stdout_writer, stdout_reader) = async_pipe::pipe();
|
||||
|
||||
let request_handlers = self.request_handlers.clone();
|
||||
let response_handlers = self.response_handlers.clone();
|
||||
|
||||
self.message_handler = Some(executor.spawn(Self::handle_messages(
|
||||
request_handlers,
|
||||
response_handlers,
|
||||
stdin_reader,
|
||||
stdout_writer,
|
||||
)));
|
||||
|
||||
Ok((Box::new(stdin_writer) as _, Box::new(stdout_reader) as _))
|
||||
}
|
||||
};
|
||||
Task::ready(result)
|
||||
}
|
||||
|
||||
|
||||
@@ -547,6 +547,7 @@ async fn handle_envs(
|
||||
}
|
||||
};
|
||||
|
||||
let mut env_vars = HashMap::default();
|
||||
for path in env_files {
|
||||
let Some(path) = path
|
||||
.and_then(|s| PathBuf::from_str(s).ok())
|
||||
@@ -556,13 +557,33 @@ async fn handle_envs(
|
||||
};
|
||||
|
||||
if let Ok(file) = fs.open_sync(&path).await {
|
||||
envs.extend(dotenvy::from_read_iter(file).filter_map(Result::ok))
|
||||
let file_envs: HashMap<String, String> = dotenvy::from_read_iter(file)
|
||||
.filter_map(Result::ok)
|
||||
.collect();
|
||||
envs.extend(file_envs.iter().map(|(k, v)| (k.clone(), v.clone())));
|
||||
env_vars.extend(file_envs);
|
||||
} else {
|
||||
warn!("While starting Go debug session: failed to read env file {path:?}");
|
||||
};
|
||||
}
|
||||
|
||||
let mut env_obj: serde_json::Map<String, Value> = serde_json::Map::new();
|
||||
|
||||
for (k, v) in env_vars {
|
||||
env_obj.insert(k, Value::String(v));
|
||||
}
|
||||
|
||||
if let Some(existing_env) = config.get("env").and_then(|v| v.as_object()) {
|
||||
for (k, v) in existing_env {
|
||||
env_obj.insert(k.clone(), v.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if !env_obj.is_empty() {
|
||||
config.insert("env".to_string(), Value::Object(env_obj));
|
||||
}
|
||||
|
||||
// remove envFile now that it's been handled
|
||||
config.remove("entry");
|
||||
config.remove("envFile");
|
||||
Some(())
|
||||
}
|
||||
|
||||
@@ -54,20 +54,6 @@ impl JsDebugAdapter {
|
||||
user_args: Option<Vec<String>>,
|
||||
_: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
let adapter_path = if let Some(user_installed_path) = user_installed_path {
|
||||
user_installed_path
|
||||
} else {
|
||||
let adapter_path = paths::debug_adapters_dir().join(self.name().as_ref());
|
||||
|
||||
let file_name_prefix = format!("{}_", self.name());
|
||||
|
||||
util::fs::find_file_name_in_dir(adapter_path.as_path(), |file_name| {
|
||||
file_name.starts_with(&file_name_prefix)
|
||||
})
|
||||
.await
|
||||
.context("Couldn't find JavaScript dap directory")?
|
||||
};
|
||||
|
||||
let tcp_connection = task_definition.tcp_connection.clone().unwrap_or_default();
|
||||
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
|
||||
|
||||
@@ -136,21 +122,27 @@ impl JsDebugAdapter {
|
||||
.or_insert(true.into());
|
||||
}
|
||||
|
||||
let adapter_path = if let Some(user_installed_path) = user_installed_path {
|
||||
user_installed_path
|
||||
} else {
|
||||
let adapter_path = paths::debug_adapters_dir().join(self.name().as_ref());
|
||||
|
||||
let file_name_prefix = format!("{}_", self.name());
|
||||
|
||||
util::fs::find_file_name_in_dir(adapter_path.as_path(), |file_name| {
|
||||
file_name.starts_with(&file_name_prefix)
|
||||
})
|
||||
.await
|
||||
.context("Couldn't find JavaScript dap directory")?
|
||||
.join(Self::ADAPTER_PATH)
|
||||
};
|
||||
|
||||
let arguments = if let Some(mut args) = user_args {
|
||||
args.insert(
|
||||
0,
|
||||
adapter_path
|
||||
.join(Self::ADAPTER_PATH)
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
);
|
||||
args.insert(0, adapter_path.to_string_lossy().to_string());
|
||||
args
|
||||
} else {
|
||||
vec![
|
||||
adapter_path
|
||||
.join(Self::ADAPTER_PATH)
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
adapter_path.to_string_lossy().to_string(),
|
||||
port.to_string(),
|
||||
host.to_string(),
|
||||
]
|
||||
@@ -534,6 +526,14 @@ impl DebugAdapter for JsDebugAdapter {
|
||||
.filter(|name| !name.is_empty())?;
|
||||
Some(label.to_owned())
|
||||
}
|
||||
|
||||
fn compact_child_session(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn prefer_thread_name(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
fn normalize_task_type(task_type: &mut Value) {
|
||||
|
||||
@@ -40,12 +40,7 @@ impl PythonDebugAdapter {
|
||||
"Using user-installed debugpy adapter from: {}",
|
||||
user_installed_path.display()
|
||||
);
|
||||
vec![
|
||||
user_installed_path
|
||||
.join(Self::ADAPTER_PATH)
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
]
|
||||
vec![user_installed_path.to_string_lossy().to_string()]
|
||||
} else if installed_in_venv {
|
||||
log::debug!("Using venv-installed debugpy");
|
||||
vec!["-m".to_string(), "debugpy.adapter".to_string()]
|
||||
@@ -700,7 +695,7 @@ mod tests {
|
||||
let port = 5678;
|
||||
|
||||
// Case 1: User-defined debugpy path (highest precedence)
|
||||
let user_path = PathBuf::from("/custom/path/to/debugpy");
|
||||
let user_path = PathBuf::from("/custom/path/to/debugpy/src/debugpy/adapter");
|
||||
let user_args = PythonDebugAdapter::generate_debugpy_arguments(
|
||||
&host,
|
||||
port,
|
||||
@@ -717,7 +712,7 @@ mod tests {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(user_args[0].ends_with("src/debugpy/adapter"));
|
||||
assert_eq!(user_args[0], "/custom/path/to/debugpy/src/debugpy/adapter");
|
||||
assert_eq!(user_args[1], "--host=127.0.0.1");
|
||||
assert_eq!(user_args[2], "--port=5678");
|
||||
|
||||
|
||||
@@ -32,12 +32,19 @@ use workspace::{
|
||||
ui::{Button, Clickable, ContextMenu, Label, LabelCommon, PopoverMenu, h_flex},
|
||||
};
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
enum View {
|
||||
AdapterLogs,
|
||||
RpcMessages,
|
||||
InitializationSequence,
|
||||
}
|
||||
|
||||
struct DapLogView {
|
||||
editor: Entity<Editor>,
|
||||
focus_handle: FocusHandle,
|
||||
log_store: Entity<LogStore>,
|
||||
editor_subscriptions: Vec<Subscription>,
|
||||
current_view: Option<(SessionId, LogKind)>,
|
||||
current_view: Option<(SessionId, View)>,
|
||||
project: Entity<Project>,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
@@ -77,6 +84,7 @@ struct DebugAdapterState {
|
||||
id: SessionId,
|
||||
log_messages: VecDeque<SharedString>,
|
||||
rpc_messages: RpcMessages,
|
||||
session_label: SharedString,
|
||||
adapter_name: DebugAdapterName,
|
||||
has_adapter_logs: bool,
|
||||
is_terminated: bool,
|
||||
@@ -121,12 +129,18 @@ impl MessageKind {
|
||||
}
|
||||
|
||||
impl DebugAdapterState {
|
||||
fn new(id: SessionId, adapter_name: DebugAdapterName, has_adapter_logs: bool) -> Self {
|
||||
fn new(
|
||||
id: SessionId,
|
||||
adapter_name: DebugAdapterName,
|
||||
session_label: SharedString,
|
||||
has_adapter_logs: bool,
|
||||
) -> Self {
|
||||
Self {
|
||||
id,
|
||||
log_messages: VecDeque::new(),
|
||||
rpc_messages: RpcMessages::new(),
|
||||
adapter_name,
|
||||
session_label,
|
||||
has_adapter_logs,
|
||||
is_terminated: false,
|
||||
}
|
||||
@@ -371,18 +385,22 @@ impl LogStore {
|
||||
return None;
|
||||
};
|
||||
|
||||
let (adapter_name, has_adapter_logs) = session.read_with(cx, |session, _| {
|
||||
(
|
||||
session.adapter(),
|
||||
session
|
||||
.adapter_client()
|
||||
.map_or(false, |client| client.has_adapter_logs()),
|
||||
)
|
||||
});
|
||||
let (adapter_name, session_label, has_adapter_logs) =
|
||||
session.read_with(cx, |session, _| {
|
||||
(
|
||||
session.adapter(),
|
||||
session.label(),
|
||||
session
|
||||
.adapter_client()
|
||||
.map_or(false, |client| client.has_adapter_logs()),
|
||||
)
|
||||
});
|
||||
|
||||
state.insert(DebugAdapterState::new(
|
||||
id.session_id,
|
||||
adapter_name,
|
||||
session_label
|
||||
.unwrap_or_else(|| format!("Session {} (child)", id.session_id.0).into()),
|
||||
has_adapter_logs,
|
||||
));
|
||||
|
||||
@@ -506,12 +524,13 @@ impl Render for DapLogToolbarItemView {
|
||||
current_client
|
||||
.map(|sub_item| {
|
||||
Cow::Owned(format!(
|
||||
"{} ({}) - {}",
|
||||
"{} - {} - {}",
|
||||
sub_item.adapter_name,
|
||||
sub_item.session_id.0,
|
||||
sub_item.session_label,
|
||||
match sub_item.selected_entry {
|
||||
LogKind::Adapter => ADAPTER_LOGS,
|
||||
LogKind::Rpc => RPC_MESSAGES,
|
||||
View::AdapterLogs => ADAPTER_LOGS,
|
||||
View::RpcMessages => RPC_MESSAGES,
|
||||
View::InitializationSequence => INITIALIZATION_SEQUENCE,
|
||||
}
|
||||
))
|
||||
})
|
||||
@@ -529,8 +548,8 @@ impl Render for DapLogToolbarItemView {
|
||||
.pl_2()
|
||||
.child(
|
||||
Label::new(format!(
|
||||
"{}. {}",
|
||||
row.session_id.0, row.adapter_name,
|
||||
"{} - {}",
|
||||
row.adapter_name, row.session_label
|
||||
))
|
||||
.color(workspace::ui::Color::Muted),
|
||||
)
|
||||
@@ -669,9 +688,16 @@ impl DapLogView {
|
||||
|
||||
let events_subscriptions = cx.subscribe(&log_store, |log_view, _, event, cx| match event {
|
||||
Event::NewLogEntry { id, entry, kind } => {
|
||||
if log_view.current_view == Some((id.session_id, *kind))
|
||||
&& log_view.project == *id.project
|
||||
{
|
||||
let is_current_view = match (log_view.current_view, *kind) {
|
||||
(Some((i, View::AdapterLogs)), LogKind::Adapter)
|
||||
| (Some((i, View::RpcMessages)), LogKind::Rpc)
|
||||
if i == id.session_id =>
|
||||
{
|
||||
log_view.project == *id.project
|
||||
}
|
||||
_ => false,
|
||||
};
|
||||
if is_current_view {
|
||||
log_view.editor.update(cx, |editor, cx| {
|
||||
editor.set_read_only(false);
|
||||
let last_point = editor.buffer().read(cx).len(cx);
|
||||
@@ -768,10 +794,11 @@ impl DapLogView {
|
||||
.map(|state| DapMenuItem {
|
||||
session_id: state.id,
|
||||
adapter_name: state.adapter_name.clone(),
|
||||
session_label: state.session_label.clone(),
|
||||
has_adapter_logs: state.has_adapter_logs,
|
||||
selected_entry: self
|
||||
.current_view
|
||||
.map_or(LogKind::Adapter, |(_, kind)| kind),
|
||||
.map_or(View::AdapterLogs, |(_, kind)| kind),
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
@@ -789,7 +816,7 @@ impl DapLogView {
|
||||
.map(|state| log_contents(state.iter().cloned()))
|
||||
});
|
||||
if let Some(rpc_log) = rpc_log {
|
||||
self.current_view = Some((id.session_id, LogKind::Rpc));
|
||||
self.current_view = Some((id.session_id, View::RpcMessages));
|
||||
let (editor, editor_subscriptions) = Self::editor_for_logs(rpc_log, window, cx);
|
||||
let language = self.project.read(cx).languages().language_for_name("JSON");
|
||||
editor
|
||||
@@ -830,7 +857,7 @@ impl DapLogView {
|
||||
.map(|state| log_contents(state.iter().cloned()))
|
||||
});
|
||||
if let Some(message_log) = message_log {
|
||||
self.current_view = Some((id.session_id, LogKind::Adapter));
|
||||
self.current_view = Some((id.session_id, View::AdapterLogs));
|
||||
let (editor, editor_subscriptions) = Self::editor_for_logs(message_log, window, cx);
|
||||
editor
|
||||
.read(cx)
|
||||
@@ -859,7 +886,7 @@ impl DapLogView {
|
||||
.map(|state| log_contents(state.iter().cloned()))
|
||||
});
|
||||
if let Some(rpc_log) = rpc_log {
|
||||
self.current_view = Some((id.session_id, LogKind::Rpc));
|
||||
self.current_view = Some((id.session_id, View::InitializationSequence));
|
||||
let (editor, editor_subscriptions) = Self::editor_for_logs(rpc_log, window, cx);
|
||||
let language = self.project.read(cx).languages().language_for_name("JSON");
|
||||
editor
|
||||
@@ -899,11 +926,12 @@ fn log_contents(lines: impl Iterator<Item = SharedString>) -> String {
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq)]
|
||||
pub(crate) struct DapMenuItem {
|
||||
pub session_id: SessionId,
|
||||
pub adapter_name: DebugAdapterName,
|
||||
pub has_adapter_logs: bool,
|
||||
pub selected_entry: LogKind,
|
||||
struct DapMenuItem {
|
||||
session_id: SessionId,
|
||||
session_label: SharedString,
|
||||
adapter_name: DebugAdapterName,
|
||||
has_adapter_logs: bool,
|
||||
selected_entry: View,
|
||||
}
|
||||
|
||||
const ADAPTER_LOGS: &str = "Adapter Logs";
|
||||
|
||||
@@ -35,22 +35,27 @@ command_palette_hooks.workspace = true
|
||||
dap.workspace = true
|
||||
dap_adapters = { workspace = true, optional = true }
|
||||
db.workspace = true
|
||||
debugger_tools.workspace = true
|
||||
editor.workspace = true
|
||||
file_icons.workspace = true
|
||||
futures.workspace = true
|
||||
fuzzy.workspace = true
|
||||
gpui.workspace = true
|
||||
hex.workspace = true
|
||||
indoc.workspace = true
|
||||
itertools.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
menu.workspace = true
|
||||
notifications.workspace = true
|
||||
parking_lot.workspace = true
|
||||
parse_int.workspace = true
|
||||
paths.workspace = true
|
||||
picker.workspace = true
|
||||
pretty_assertions.workspace = true
|
||||
project.workspace = true
|
||||
rpc.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
serde_json_lenient.workspace = true
|
||||
@@ -63,14 +68,13 @@ telemetry.workspace = true
|
||||
terminal_view.workspace = true
|
||||
text.workspace = true
|
||||
theme.workspace = true
|
||||
tree-sitter.workspace = true
|
||||
tree-sitter-json.workspace = true
|
||||
tree-sitter.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
workspace.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
debugger_tools.workspace = true
|
||||
unindent = { workspace = true, optional = true }
|
||||
util.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
workspace.workspace = true
|
||||
zed_actions.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
@@ -80,8 +84,8 @@ debugger_tools = { workspace = true, features = ["test-support"] }
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
tree-sitter-go.workspace = true
|
||||
unindent.workspace = true
|
||||
util = { workspace = true, features = ["test-support"] }
|
||||
workspace = { workspace = true, features = ["test-support"] }
|
||||
zlog.workspace = true
|
||||
tree-sitter-go.workspace = true
|
||||
|
||||
@@ -2,6 +2,7 @@ use crate::persistence::DebuggerPaneItem;
|
||||
use crate::session::DebugSession;
|
||||
use crate::session::running::RunningState;
|
||||
use crate::session::running::breakpoint_list::BreakpointList;
|
||||
|
||||
use crate::{
|
||||
ClearAllBreakpoints, Continue, CopyDebugAdapterArguments, Detach, FocusBreakpointList,
|
||||
FocusConsole, FocusFrames, FocusLoadedSources, FocusModules, FocusTerminal, FocusVariables,
|
||||
@@ -9,6 +10,7 @@ use crate::{
|
||||
ToggleExpandItem, ToggleSessionPicker, ToggleThreadPicker, persistence, spawn_task_or_modal,
|
||||
};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use collections::IndexMap;
|
||||
use dap::adapters::DebugAdapterName;
|
||||
use dap::debugger_settings::DebugPanelDockPosition;
|
||||
use dap::{
|
||||
@@ -26,7 +28,7 @@ use text::ToPoint as _;
|
||||
|
||||
use itertools::Itertools as _;
|
||||
use language::Buffer;
|
||||
use project::debugger::session::{Session, SessionStateEvent};
|
||||
use project::debugger::session::{Session, SessionQuirks, SessionState, SessionStateEvent};
|
||||
use project::{DebugScenarioContext, Fs, ProjectPath, TaskSourceKind, WorktreeId};
|
||||
use project::{Project, debugger::session::ThreadStatus};
|
||||
use rpc::proto::{self};
|
||||
@@ -35,7 +37,7 @@ use std::sync::{Arc, LazyLock};
|
||||
use task::{DebugScenario, TaskContext};
|
||||
use tree_sitter::{Query, StreamingIterator as _};
|
||||
use ui::{ContextMenu, Divider, PopoverMenuHandle, Tooltip, prelude::*};
|
||||
use util::{ResultExt, maybe};
|
||||
use util::{ResultExt, debug_panic, maybe};
|
||||
use workspace::SplitDirection;
|
||||
use workspace::item::SaveOptions;
|
||||
use workspace::{
|
||||
@@ -63,13 +65,14 @@ pub enum DebugPanelEvent {
|
||||
|
||||
pub struct DebugPanel {
|
||||
size: Pixels,
|
||||
sessions: Vec<Entity<DebugSession>>,
|
||||
active_session: Option<Entity<DebugSession>>,
|
||||
project: Entity<Project>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
focus_handle: FocusHandle,
|
||||
context_menu: Option<(Entity<ContextMenu>, Point<Pixels>, Subscription)>,
|
||||
debug_scenario_scheduled_last: bool,
|
||||
pub(crate) sessions_with_children:
|
||||
IndexMap<Entity<DebugSession>, Vec<WeakEntity<DebugSession>>>,
|
||||
pub(crate) thread_picker_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
pub(crate) session_picker_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
fs: Arc<dyn Fs>,
|
||||
@@ -100,7 +103,7 @@ impl DebugPanel {
|
||||
|
||||
Self {
|
||||
size: px(300.),
|
||||
sessions: vec![],
|
||||
sessions_with_children: Default::default(),
|
||||
active_session: None,
|
||||
focus_handle,
|
||||
breakpoint_list: BreakpointList::new(
|
||||
@@ -138,8 +141,9 @@ impl DebugPanel {
|
||||
});
|
||||
}
|
||||
|
||||
pub(crate) fn sessions(&self) -> Vec<Entity<DebugSession>> {
|
||||
self.sessions.clone()
|
||||
#[cfg(test)]
|
||||
pub(crate) fn sessions(&self) -> impl Iterator<Item = Entity<DebugSession>> {
|
||||
self.sessions_with_children.keys().cloned()
|
||||
}
|
||||
|
||||
pub fn active_session(&self) -> Option<Entity<DebugSession>> {
|
||||
@@ -185,12 +189,20 @@ impl DebugPanel {
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let dap_store = self.project.read(cx).dap_store();
|
||||
let Some(adapter) = DapRegistry::global(cx).adapter(&scenario.adapter) else {
|
||||
return;
|
||||
};
|
||||
let quirks = SessionQuirks {
|
||||
compact: adapter.compact_child_session(),
|
||||
prefer_thread_name: adapter.prefer_thread_name(),
|
||||
};
|
||||
let session = dap_store.update(cx, |dap_store, cx| {
|
||||
dap_store.new_session(
|
||||
scenario.label.clone(),
|
||||
Some(scenario.label.clone()),
|
||||
DebugAdapterName(scenario.adapter.clone()),
|
||||
task_context.clone(),
|
||||
None,
|
||||
quirks,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -267,22 +279,34 @@ impl DebugPanel {
|
||||
}
|
||||
});
|
||||
|
||||
cx.spawn(async move |_, cx| {
|
||||
if let Err(error) = task.await {
|
||||
log::error!("{error}");
|
||||
session
|
||||
.update(cx, |session, cx| {
|
||||
session
|
||||
.console_output(cx)
|
||||
.unbounded_send(format!("error: {}", error))
|
||||
.ok();
|
||||
session.shutdown(cx)
|
||||
})?
|
||||
.await;
|
||||
let boot_task = cx.spawn({
|
||||
let session = session.clone();
|
||||
|
||||
async move |_, cx| {
|
||||
if let Err(error) = task.await {
|
||||
log::error!("{error}");
|
||||
session
|
||||
.update(cx, |session, cx| {
|
||||
session
|
||||
.console_output(cx)
|
||||
.unbounded_send(format!("error: {}", error))
|
||||
.ok();
|
||||
session.shutdown(cx)
|
||||
})?
|
||||
.await;
|
||||
}
|
||||
anyhow::Ok(())
|
||||
}
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
});
|
||||
|
||||
session.update(cx, |session, _| match &mut session.mode {
|
||||
SessionState::Building(state_task) => {
|
||||
*state_task = Some(boot_task);
|
||||
}
|
||||
SessionState::Running(_) => {
|
||||
debug_panic!("Session state should be in building because we are just starting it");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pub(crate) fn rerun_last_session(
|
||||
@@ -363,14 +387,15 @@ impl DebugPanel {
|
||||
};
|
||||
|
||||
let dap_store_handle = self.project.read(cx).dap_store().clone();
|
||||
let label = curr_session.read(cx).label().clone();
|
||||
let label = curr_session.read(cx).label();
|
||||
let quirks = curr_session.read(cx).quirks();
|
||||
let adapter = curr_session.read(cx).adapter().clone();
|
||||
let binary = curr_session.read(cx).binary().cloned().unwrap();
|
||||
let task_context = curr_session.read(cx).task_context().clone();
|
||||
|
||||
let curr_session_id = curr_session.read(cx).session_id();
|
||||
self.sessions
|
||||
.retain(|session| session.read(cx).session_id(cx) != curr_session_id);
|
||||
self.sessions_with_children
|
||||
.retain(|session, _| session.read(cx).session_id(cx) != curr_session_id);
|
||||
let task = dap_store_handle.update(cx, |dap_store, cx| {
|
||||
dap_store.shutdown_session(curr_session_id, cx)
|
||||
});
|
||||
@@ -379,7 +404,7 @@ impl DebugPanel {
|
||||
task.await.log_err();
|
||||
|
||||
let (session, task) = dap_store_handle.update(cx, |dap_store, cx| {
|
||||
let session = dap_store.new_session(label, adapter, task_context, None, cx);
|
||||
let session = dap_store.new_session(label, adapter, task_context, None, quirks, cx);
|
||||
|
||||
let task = session.update(cx, |session, cx| {
|
||||
session.boot(binary, worktree, dap_store_handle.downgrade(), cx)
|
||||
@@ -425,6 +450,7 @@ impl DebugPanel {
|
||||
let dap_store_handle = self.project.read(cx).dap_store().clone();
|
||||
let label = self.label_for_child_session(&parent_session, request, cx);
|
||||
let adapter = parent_session.read(cx).adapter().clone();
|
||||
let quirks = parent_session.read(cx).quirks();
|
||||
let Some(mut binary) = parent_session.read(cx).binary().cloned() else {
|
||||
log::error!("Attempted to start a child-session without a binary");
|
||||
return;
|
||||
@@ -438,6 +464,7 @@ impl DebugPanel {
|
||||
adapter,
|
||||
task_context,
|
||||
Some(parent_session.clone()),
|
||||
quirks,
|
||||
cx,
|
||||
);
|
||||
|
||||
@@ -463,8 +490,8 @@ impl DebugPanel {
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let Some(session) = self
|
||||
.sessions
|
||||
.iter()
|
||||
.sessions_with_children
|
||||
.keys()
|
||||
.find(|other| entity_id == other.entity_id())
|
||||
.cloned()
|
||||
else {
|
||||
@@ -498,15 +525,14 @@ impl DebugPanel {
|
||||
}
|
||||
session.update(cx, |session, cx| session.shutdown(cx)).ok();
|
||||
this.update(cx, |this, cx| {
|
||||
this.sessions.retain(|other| entity_id != other.entity_id());
|
||||
|
||||
this.retain_sessions(|other| entity_id != other.entity_id());
|
||||
if let Some(active_session_id) = this
|
||||
.active_session
|
||||
.as_ref()
|
||||
.map(|session| session.entity_id())
|
||||
{
|
||||
if active_session_id == entity_id {
|
||||
this.active_session = this.sessions.first().cloned();
|
||||
this.active_session = this.sessions_with_children.keys().next().cloned();
|
||||
}
|
||||
}
|
||||
cx.notify()
|
||||
@@ -813,13 +839,24 @@ impl DebugPanel {
|
||||
.on_click(window.listener_for(
|
||||
&running_state,
|
||||
|this, _, _window, cx| {
|
||||
this.stop_thread(cx);
|
||||
if this.session().read(cx).is_building() {
|
||||
this.session().update(cx, |session, cx| {
|
||||
session.shutdown(cx).detach()
|
||||
});
|
||||
} else {
|
||||
this.stop_thread(cx);
|
||||
}
|
||||
},
|
||||
))
|
||||
.disabled(active_session.as_ref().is_none_or(
|
||||
|session| {
|
||||
session
|
||||
.read(cx)
|
||||
.session(cx)
|
||||
.read(cx)
|
||||
.is_terminated()
|
||||
},
|
||||
))
|
||||
.disabled(
|
||||
thread_status != ThreadStatus::Stopped
|
||||
&& thread_status != ThreadStatus::Running,
|
||||
)
|
||||
.tooltip({
|
||||
let focus_handle = focus_handle.clone();
|
||||
let label = if capabilities
|
||||
@@ -976,8 +1013,8 @@ impl DebugPanel {
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if let Some(session) = self
|
||||
.sessions
|
||||
.iter()
|
||||
.sessions_with_children
|
||||
.keys()
|
||||
.find(|session| session.read(cx).session_id(cx) == session_id)
|
||||
{
|
||||
self.activate_session(session.clone(), window, cx);
|
||||
@@ -990,7 +1027,7 @@ impl DebugPanel {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
debug_assert!(self.sessions.contains(&session_item));
|
||||
debug_assert!(self.sessions_with_children.contains_key(&session_item));
|
||||
session_item.focus_handle(cx).focus(window);
|
||||
session_item.update(cx, |this, cx| {
|
||||
this.running_state().update(cx, |this, cx| {
|
||||
@@ -1261,18 +1298,27 @@ impl DebugPanel {
|
||||
parent_session: &Entity<Session>,
|
||||
request: &StartDebuggingRequestArguments,
|
||||
cx: &mut Context<'_, Self>,
|
||||
) -> SharedString {
|
||||
) -> Option<SharedString> {
|
||||
let adapter = parent_session.read(cx).adapter();
|
||||
if let Some(adapter) = DapRegistry::global(cx).adapter(&adapter) {
|
||||
if let Some(label) = adapter.label_for_child_session(request) {
|
||||
return label.into();
|
||||
return Some(label.into());
|
||||
}
|
||||
}
|
||||
let mut label = parent_session.read(cx).label().clone();
|
||||
if !label.ends_with("(child)") {
|
||||
label = format!("{label} (child)").into();
|
||||
None
|
||||
}
|
||||
|
||||
fn retain_sessions(&mut self, keep: impl Fn(&Entity<DebugSession>) -> bool) {
|
||||
self.sessions_with_children
|
||||
.retain(|session, _| keep(session));
|
||||
for children in self.sessions_with_children.values_mut() {
|
||||
children.retain(|child| {
|
||||
let Some(child) = child.upgrade() else {
|
||||
return false;
|
||||
};
|
||||
keep(&child)
|
||||
});
|
||||
}
|
||||
label
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1302,11 +1348,11 @@ async fn register_session_inner(
|
||||
let serialized_layout = persistence::get_serialized_layout(adapter_name).await;
|
||||
let debug_session = this.update_in(cx, |this, window, cx| {
|
||||
let parent_session = this
|
||||
.sessions
|
||||
.iter()
|
||||
.sessions_with_children
|
||||
.keys()
|
||||
.find(|p| Some(p.read(cx).session_id(cx)) == session.read(cx).parent_id(cx))
|
||||
.cloned();
|
||||
this.sessions.retain(|session| {
|
||||
this.retain_sessions(|session| {
|
||||
!session
|
||||
.read(cx)
|
||||
.running_state()
|
||||
@@ -1337,13 +1383,23 @@ async fn register_session_inner(
|
||||
)
|
||||
.detach();
|
||||
let insert_position = this
|
||||
.sessions
|
||||
.iter()
|
||||
.sessions_with_children
|
||||
.keys()
|
||||
.position(|session| Some(session) == parent_session.as_ref())
|
||||
.map(|position| position + 1)
|
||||
.unwrap_or(this.sessions.len());
|
||||
.unwrap_or(this.sessions_with_children.len());
|
||||
// Maintain topological sort order of sessions
|
||||
this.sessions.insert(insert_position, debug_session.clone());
|
||||
let (_, old) = this.sessions_with_children.insert_before(
|
||||
insert_position,
|
||||
debug_session.clone(),
|
||||
Default::default(),
|
||||
);
|
||||
debug_assert!(old.is_none());
|
||||
if let Some(parent_session) = parent_session {
|
||||
this.sessions_with_children
|
||||
.entry(parent_session)
|
||||
.and_modify(|children| children.push(debug_session.downgrade()));
|
||||
}
|
||||
|
||||
debug_session
|
||||
})?;
|
||||
@@ -1383,7 +1439,7 @@ impl Panel for DebugPanel {
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if position.axis() != self.position(window, cx).axis() {
|
||||
self.sessions.iter().for_each(|session_item| {
|
||||
self.sessions_with_children.keys().for_each(|session_item| {
|
||||
session_item.update(cx, |item, cx| {
|
||||
item.running_state()
|
||||
.update(cx, |state, _| state.invert_axies())
|
||||
@@ -1704,6 +1760,7 @@ impl Render for DebugPanel {
|
||||
category_filter: Some(
|
||||
zed_actions::ExtensionCategoryFilter::DebugAdapters,
|
||||
),
|
||||
id: None,
|
||||
}
|
||||
.boxed_clone(),
|
||||
cx,
|
||||
@@ -1749,6 +1806,7 @@ impl Render for DebugPanel {
|
||||
.child(breakpoint_list)
|
||||
.child(Divider::vertical())
|
||||
.child(welcome_experience)
|
||||
.child(Divider::vertical())
|
||||
} else {
|
||||
this.items_end()
|
||||
.child(welcome_experience)
|
||||
|
||||
@@ -3,10 +3,12 @@ use std::any::TypeId;
|
||||
use dap::debugger_settings::DebuggerSettings;
|
||||
use debugger_panel::DebugPanel;
|
||||
use editor::Editor;
|
||||
use gpui::{App, DispatchPhase, EntityInputHandler, actions};
|
||||
use gpui::{Action, App, DispatchPhase, EntityInputHandler, actions};
|
||||
use new_process_modal::{NewProcessModal, NewProcessMode};
|
||||
use onboarding_modal::DebuggerOnboardingModal;
|
||||
use project::debugger::{self, breakpoint_store::SourceBreakpoint, session::ThreadStatus};
|
||||
use schemars::JsonSchema;
|
||||
use serde::Deserialize;
|
||||
use session::DebugSession;
|
||||
use settings::Settings;
|
||||
use stack_trace_view::StackTraceView;
|
||||
@@ -86,6 +88,20 @@ actions!(
|
||||
]
|
||||
);
|
||||
|
||||
/// Extends selection down by a specified number of lines.
|
||||
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
|
||||
#[action(namespace = debugger)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
/// Set a data breakpoint on the selected variable or memory region.
|
||||
pub struct ToggleDataBreakpoint {
|
||||
/// The type of data breakpoint
|
||||
/// Read & Write
|
||||
/// Read
|
||||
/// Write
|
||||
#[serde(default)]
|
||||
pub access_type: Option<dap::DataBreakpointAccessType>,
|
||||
}
|
||||
|
||||
actions!(
|
||||
dev,
|
||||
[
|
||||
|
||||
@@ -1,16 +1,82 @@
|
||||
use std::time::Duration;
|
||||
use std::{rc::Rc, time::Duration};
|
||||
|
||||
use collections::HashMap;
|
||||
use gpui::{Animation, AnimationExt as _, Entity, Transformation, percentage};
|
||||
use gpui::{Animation, AnimationExt as _, Entity, Transformation, WeakEntity, percentage};
|
||||
use project::debugger::session::{ThreadId, ThreadStatus};
|
||||
use ui::{ContextMenu, DropdownMenu, DropdownStyle, Indicator, prelude::*};
|
||||
use util::truncate_and_trailoff;
|
||||
use util::{maybe, truncate_and_trailoff};
|
||||
|
||||
use crate::{
|
||||
debugger_panel::DebugPanel,
|
||||
session::{DebugSession, running::RunningState},
|
||||
};
|
||||
|
||||
struct SessionListEntry {
|
||||
ancestors: Vec<Entity<DebugSession>>,
|
||||
leaf: Entity<DebugSession>,
|
||||
}
|
||||
|
||||
impl SessionListEntry {
|
||||
pub(crate) fn label_element(&self, depth: usize, cx: &mut App) -> AnyElement {
|
||||
const MAX_LABEL_CHARS: usize = 150;
|
||||
|
||||
let mut label = String::new();
|
||||
for ancestor in &self.ancestors {
|
||||
label.push_str(&ancestor.update(cx, |ancestor, cx| {
|
||||
ancestor.label(cx).unwrap_or("(child)".into())
|
||||
}));
|
||||
label.push_str(" » ");
|
||||
}
|
||||
label.push_str(
|
||||
&self
|
||||
.leaf
|
||||
.update(cx, |leaf, cx| leaf.label(cx).unwrap_or("(child)".into())),
|
||||
);
|
||||
let label = truncate_and_trailoff(&label, MAX_LABEL_CHARS);
|
||||
|
||||
let is_terminated = self
|
||||
.leaf
|
||||
.read(cx)
|
||||
.running_state
|
||||
.read(cx)
|
||||
.session()
|
||||
.read(cx)
|
||||
.is_terminated();
|
||||
let icon = {
|
||||
if is_terminated {
|
||||
Some(Indicator::dot().color(Color::Error))
|
||||
} else {
|
||||
match self
|
||||
.leaf
|
||||
.read(cx)
|
||||
.running_state
|
||||
.read(cx)
|
||||
.thread_status(cx)
|
||||
.unwrap_or_default()
|
||||
{
|
||||
project::debugger::session::ThreadStatus::Stopped => {
|
||||
Some(Indicator::dot().color(Color::Conflict))
|
||||
}
|
||||
_ => Some(Indicator::dot().color(Color::Success)),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
h_flex()
|
||||
.id("session-label")
|
||||
.ml(depth * px(16.0))
|
||||
.gap_2()
|
||||
.when_some(icon, |this, indicator| this.child(indicator))
|
||||
.justify_between()
|
||||
.child(
|
||||
Label::new(label)
|
||||
.size(LabelSize::Small)
|
||||
.when(is_terminated, |this| this.strikethrough()),
|
||||
)
|
||||
.into_any_element()
|
||||
}
|
||||
}
|
||||
|
||||
impl DebugPanel {
|
||||
fn dropdown_label(label: impl Into<SharedString>) -> Label {
|
||||
const MAX_LABEL_CHARS: usize = 50;
|
||||
@@ -25,145 +91,205 @@ impl DebugPanel {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<impl IntoElement> {
|
||||
if let Some(running_state) = running_state {
|
||||
let sessions = self.sessions().clone();
|
||||
let weak = cx.weak_entity();
|
||||
let running_state = running_state.read(cx);
|
||||
let label = if let Some(active_session) = active_session.clone() {
|
||||
active_session.read(cx).session(cx).read(cx).label()
|
||||
} else {
|
||||
SharedString::new_static("Unknown Session")
|
||||
};
|
||||
let running_state = running_state?;
|
||||
|
||||
let is_terminated = running_state.session().read(cx).is_terminated();
|
||||
let is_started = active_session
|
||||
.is_some_and(|session| session.read(cx).session(cx).read(cx).is_started());
|
||||
let mut session_entries = Vec::with_capacity(self.sessions_with_children.len() * 3);
|
||||
let mut sessions_with_children = self.sessions_with_children.iter().peekable();
|
||||
|
||||
let session_state_indicator = if is_terminated {
|
||||
Indicator::dot().color(Color::Error).into_any_element()
|
||||
} else if !is_started {
|
||||
Icon::new(IconName::ArrowCircle)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted)
|
||||
.with_animation(
|
||||
"arrow-circle",
|
||||
Animation::new(Duration::from_secs(2)).repeat(),
|
||||
|icon, delta| icon.transform(Transformation::rotate(percentage(delta))),
|
||||
)
|
||||
.into_any_element()
|
||||
while let Some((root, children)) = sessions_with_children.next() {
|
||||
let root_entry = if let Ok([single_child]) = <&[_; 1]>::try_from(children.as_slice())
|
||||
&& let Some(single_child) = single_child.upgrade()
|
||||
&& single_child.read(cx).quirks.compact
|
||||
{
|
||||
sessions_with_children.next();
|
||||
SessionListEntry {
|
||||
leaf: single_child.clone(),
|
||||
ancestors: vec![root.clone()],
|
||||
}
|
||||
} else {
|
||||
match running_state.thread_status(cx).unwrap_or_default() {
|
||||
ThreadStatus::Stopped => {
|
||||
Indicator::dot().color(Color::Conflict).into_any_element()
|
||||
}
|
||||
_ => Indicator::dot().color(Color::Success).into_any_element(),
|
||||
SessionListEntry {
|
||||
leaf: root.clone(),
|
||||
ancestors: Vec::new(),
|
||||
}
|
||||
};
|
||||
session_entries.push(root_entry);
|
||||
|
||||
let trigger = h_flex()
|
||||
.gap_2()
|
||||
.child(session_state_indicator)
|
||||
.justify_between()
|
||||
.child(
|
||||
DebugPanel::dropdown_label(label)
|
||||
.when(is_terminated, |this| this.strikethrough()),
|
||||
)
|
||||
.into_any_element();
|
||||
|
||||
Some(
|
||||
DropdownMenu::new_with_element(
|
||||
"debugger-session-list",
|
||||
trigger,
|
||||
ContextMenu::build(window, cx, move |mut this, _, cx| {
|
||||
let context_menu = cx.weak_entity();
|
||||
let mut session_depths = HashMap::default();
|
||||
for session in sessions.into_iter() {
|
||||
let weak_session = session.downgrade();
|
||||
let weak_session_id = weak_session.entity_id();
|
||||
let session_id = session.read(cx).session_id(cx);
|
||||
let parent_depth = session
|
||||
.read(cx)
|
||||
.session(cx)
|
||||
.read(cx)
|
||||
.parent_id(cx)
|
||||
.and_then(|parent_id| session_depths.get(&parent_id).cloned());
|
||||
let self_depth =
|
||||
*session_depths.entry(session_id).or_insert_with(|| {
|
||||
parent_depth.map(|depth| depth + 1).unwrap_or(0usize)
|
||||
});
|
||||
this = this.custom_entry(
|
||||
{
|
||||
let weak = weak.clone();
|
||||
let context_menu = context_menu.clone();
|
||||
move |_, cx| {
|
||||
weak_session
|
||||
.read_with(cx, |session, cx| {
|
||||
let context_menu = context_menu.clone();
|
||||
|
||||
let id: SharedString =
|
||||
format!("debug-session-{}", session_id.0)
|
||||
.into();
|
||||
|
||||
h_flex()
|
||||
.w_full()
|
||||
.group(id.clone())
|
||||
.justify_between()
|
||||
.child(session.label_element(self_depth, cx))
|
||||
.child(
|
||||
IconButton::new(
|
||||
"close-debug-session",
|
||||
IconName::Close,
|
||||
)
|
||||
.visible_on_hover(id.clone())
|
||||
.icon_size(IconSize::Small)
|
||||
.on_click({
|
||||
let weak = weak.clone();
|
||||
move |_, window, cx| {
|
||||
weak.update(cx, |panel, cx| {
|
||||
panel.close_session(
|
||||
weak_session_id,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
context_menu
|
||||
.update(cx, |this, cx| {
|
||||
this.cancel(
|
||||
&Default::default(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
}),
|
||||
)
|
||||
.into_any_element()
|
||||
})
|
||||
.unwrap_or_else(|_| div().into_any_element())
|
||||
}
|
||||
},
|
||||
{
|
||||
let weak = weak.clone();
|
||||
move |window, cx| {
|
||||
weak.update(cx, |panel, cx| {
|
||||
panel.activate_session(session.clone(), window, cx);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
this
|
||||
session_entries.extend(
|
||||
sessions_with_children
|
||||
.by_ref()
|
||||
.take_while(|(session, _)| {
|
||||
session
|
||||
.read(cx)
|
||||
.session(cx)
|
||||
.read(cx)
|
||||
.parent_id(cx)
|
||||
.is_some()
|
||||
})
|
||||
.map(|(session, _)| SessionListEntry {
|
||||
leaf: session.clone(),
|
||||
ancestors: vec![],
|
||||
}),
|
||||
)
|
||||
.style(DropdownStyle::Ghost)
|
||||
.handle(self.session_picker_menu_handle.clone()),
|
||||
)
|
||||
} else {
|
||||
None
|
||||
);
|
||||
}
|
||||
|
||||
let weak = cx.weak_entity();
|
||||
let trigger_label = if let Some(active_session) = active_session.clone() {
|
||||
active_session.update(cx, |active_session, cx| {
|
||||
active_session.label(cx).unwrap_or("(child)".into())
|
||||
})
|
||||
} else {
|
||||
SharedString::new_static("Unknown Session")
|
||||
};
|
||||
let running_state = running_state.read(cx);
|
||||
|
||||
let is_terminated = running_state.session().read(cx).is_terminated();
|
||||
let is_started = active_session
|
||||
.is_some_and(|session| session.read(cx).session(cx).read(cx).is_started());
|
||||
|
||||
let session_state_indicator = if is_terminated {
|
||||
Indicator::dot().color(Color::Error).into_any_element()
|
||||
} else if !is_started {
|
||||
Icon::new(IconName::ArrowCircle)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted)
|
||||
.with_animation(
|
||||
"arrow-circle",
|
||||
Animation::new(Duration::from_secs(2)).repeat(),
|
||||
|icon, delta| icon.transform(Transformation::rotate(percentage(delta))),
|
||||
)
|
||||
.into_any_element()
|
||||
} else {
|
||||
match running_state.thread_status(cx).unwrap_or_default() {
|
||||
ThreadStatus::Stopped => Indicator::dot().color(Color::Conflict).into_any_element(),
|
||||
_ => Indicator::dot().color(Color::Success).into_any_element(),
|
||||
}
|
||||
};
|
||||
|
||||
let trigger = h_flex()
|
||||
.gap_2()
|
||||
.child(session_state_indicator)
|
||||
.justify_between()
|
||||
.child(
|
||||
DebugPanel::dropdown_label(trigger_label)
|
||||
.when(is_terminated, |this| this.strikethrough()),
|
||||
)
|
||||
.into_any_element();
|
||||
|
||||
let menu = DropdownMenu::new_with_element(
|
||||
"debugger-session-list",
|
||||
trigger,
|
||||
ContextMenu::build(window, cx, move |mut this, _, cx| {
|
||||
let context_menu = cx.weak_entity();
|
||||
let mut session_depths = HashMap::default();
|
||||
for session_entry in session_entries {
|
||||
let session_id = session_entry.leaf.read(cx).session_id(cx);
|
||||
let parent_depth = session_entry
|
||||
.ancestors
|
||||
.first()
|
||||
.unwrap_or(&session_entry.leaf)
|
||||
.read(cx)
|
||||
.session(cx)
|
||||
.read(cx)
|
||||
.parent_id(cx)
|
||||
.and_then(|parent_id| session_depths.get(&parent_id).cloned());
|
||||
let self_depth = *session_depths
|
||||
.entry(session_id)
|
||||
.or_insert_with(|| parent_depth.map(|depth| depth + 1).unwrap_or(0usize));
|
||||
this = this.custom_entry(
|
||||
{
|
||||
let weak = weak.clone();
|
||||
let context_menu = context_menu.clone();
|
||||
let ancestors: Rc<[_]> = session_entry
|
||||
.ancestors
|
||||
.iter()
|
||||
.map(|session| session.downgrade())
|
||||
.collect();
|
||||
let leaf = session_entry.leaf.downgrade();
|
||||
move |window, cx| {
|
||||
Self::render_session_menu_entry(
|
||||
weak.clone(),
|
||||
context_menu.clone(),
|
||||
ancestors.clone(),
|
||||
leaf.clone(),
|
||||
self_depth,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
},
|
||||
{
|
||||
let weak = weak.clone();
|
||||
let leaf = session_entry.leaf.clone();
|
||||
move |window, cx| {
|
||||
weak.update(cx, |panel, cx| {
|
||||
panel.activate_session(leaf.clone(), window, cx);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
this
|
||||
}),
|
||||
)
|
||||
.style(DropdownStyle::Ghost)
|
||||
.handle(self.session_picker_menu_handle.clone());
|
||||
|
||||
Some(menu)
|
||||
}
|
||||
|
||||
fn render_session_menu_entry(
|
||||
weak: WeakEntity<DebugPanel>,
|
||||
context_menu: WeakEntity<ContextMenu>,
|
||||
ancestors: Rc<[WeakEntity<DebugSession>]>,
|
||||
leaf: WeakEntity<DebugSession>,
|
||||
self_depth: usize,
|
||||
_window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> AnyElement {
|
||||
let Some(session_entry) = maybe!({
|
||||
let ancestors = ancestors
|
||||
.iter()
|
||||
.map(|ancestor| ancestor.upgrade())
|
||||
.collect::<Option<Vec<_>>>()?;
|
||||
let leaf = leaf.upgrade()?;
|
||||
Some(SessionListEntry { ancestors, leaf })
|
||||
}) else {
|
||||
return div().into_any_element();
|
||||
};
|
||||
|
||||
let id: SharedString = format!(
|
||||
"debug-session-{}",
|
||||
session_entry.leaf.read(cx).session_id(cx).0
|
||||
)
|
||||
.into();
|
||||
let session_entity_id = session_entry.leaf.entity_id();
|
||||
|
||||
h_flex()
|
||||
.w_full()
|
||||
.group(id.clone())
|
||||
.justify_between()
|
||||
.child(session_entry.label_element(self_depth, cx))
|
||||
.child(
|
||||
IconButton::new("close-debug-session", IconName::Close)
|
||||
.visible_on_hover(id.clone())
|
||||
.icon_size(IconSize::Small)
|
||||
.on_click({
|
||||
let weak = weak.clone();
|
||||
move |_, window, cx| {
|
||||
weak.update(cx, |panel, cx| {
|
||||
panel.close_session(session_entity_id, window, cx);
|
||||
})
|
||||
.ok();
|
||||
context_menu
|
||||
.update(cx, |this, cx| {
|
||||
this.cancel(&Default::default(), window, cx);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
}),
|
||||
)
|
||||
.into_any_element()
|
||||
}
|
||||
|
||||
pub(crate) fn render_thread_dropdown(
|
||||
|
||||
@@ -766,14 +766,7 @@ impl Render for NewProcessModal {
|
||||
))
|
||||
.child(
|
||||
h_flex()
|
||||
.child(div().child(self.adapter_drop_down_menu(window, cx)))
|
||||
.child(
|
||||
Button::new("debugger-spawn", "Start")
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.start_new_session(window, cx)
|
||||
}))
|
||||
.disabled(disabled),
|
||||
),
|
||||
.child(div().child(self.adapter_drop_down_menu(window, cx))),
|
||||
)
|
||||
}),
|
||||
NewProcessMode::Debug => el,
|
||||
|
||||