Compare commits
22 Commits
update-deb
...
scan-code
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2eac6a9222 | ||
|
|
7c3cffdc52 | ||
|
|
5a3186b659 | ||
|
|
caf54844de | ||
|
|
745ebe2313 | ||
|
|
5c95e942e6 | ||
|
|
f979f24bfa | ||
|
|
411b9abb9e | ||
|
|
81d4d48ef2 | ||
|
|
cd9284761a | ||
|
|
34f9eef879 | ||
|
|
23cf6bf268 | ||
|
|
c97e477eb1 | ||
|
|
16804a81cc | ||
|
|
8bf39bf768 | ||
|
|
75922e8fcd | ||
|
|
2eb83364ae | ||
|
|
5d22585ef5 | ||
|
|
71303fa18b | ||
|
|
5753b978a0 | ||
|
|
9cf2490ed7 | ||
|
|
28ea3ea529 |
2
.github/actions/build_docs/action.yml
vendored
2
.github/actions/build_docs/action.yml
vendored
@@ -22,7 +22,7 @@ runs:
|
||||
- name: Check for broken links
|
||||
uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332 # v2.4.1
|
||||
with:
|
||||
args: --no-progress --exclude '^http' './docs/src/**/*'
|
||||
args: --no-progress './docs/src/**/*'
|
||||
fail: true
|
||||
|
||||
- name: Build book
|
||||
|
||||
13
.github/actions/run_tests/action.yml
vendored
13
.github/actions/run_tests/action.yml
vendored
@@ -1,6 +1,12 @@
|
||||
name: "Run tests"
|
||||
description: "Runs the tests"
|
||||
|
||||
inputs:
|
||||
use-xvfb:
|
||||
description: "Whether to run tests with xvfb"
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
@@ -20,4 +26,9 @@ runs:
|
||||
|
||||
- name: Run tests
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: cargo nextest run --workspace --no-fail-fast
|
||||
run: |
|
||||
if [ "${{ inputs.use-xvfb }}" == "true" ]; then
|
||||
xvfb-run --auto-servernum --server-args="-screen 0 1024x768x24 -nolisten tcp" cargo nextest run --workspace --no-fail-fast
|
||||
else
|
||||
cargo nextest run --workspace --no-fail-fast
|
||||
fi
|
||||
|
||||
8
.github/actions/run_tests_windows/action.yml
vendored
8
.github/actions/run_tests_windows/action.yml
vendored
@@ -10,8 +10,8 @@ inputs:
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Install test runner
|
||||
shell: powershell
|
||||
- name: Install Rust
|
||||
shell: pwsh
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: cargo install cargo-nextest --locked
|
||||
|
||||
@@ -21,6 +21,6 @@ runs:
|
||||
node-version: "18"
|
||||
|
||||
- name: Run tests
|
||||
shell: powershell
|
||||
shell: pwsh
|
||||
working-directory: ${{ inputs.working-directory }}
|
||||
run: cargo nextest run --workspace --no-fail-fast
|
||||
run: cargo nextest run --workspace --no-fail-fast --config='profile.dev.debug="limited"'
|
||||
|
||||
117
.github/workflows/ci.yml
vendored
117
.github/workflows/ci.yml
vendored
@@ -319,6 +319,8 @@ jobs:
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests
|
||||
with:
|
||||
use-xvfb: true
|
||||
|
||||
- name: Build other binaries and features
|
||||
run: |
|
||||
@@ -373,6 +375,64 @@ jobs:
|
||||
if: always()
|
||||
run: rm -rf ./../.cargo
|
||||
|
||||
windows_clippy:
|
||||
timeout-minutes: 60
|
||||
name: (Windows) Run Clippy
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on: windows-2025-16
|
||||
steps:
|
||||
# more info here:- https://github.com/rust-lang/cargo/issues/13020
|
||||
- name: Enable longer pathnames for git
|
||||
run: git config --system core.longpaths true
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Create Dev Drive using ReFS
|
||||
run: ./script/setup-dev-driver.ps1
|
||||
|
||||
# actions/checkout does not let us clone into anywhere outside ${{ github.workspace }}, so we have to copy the clone...
|
||||
- name: Copy Git Repo to Dev Drive
|
||||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.ZED_WORKSPACE }}" -Recurse
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
workspaces: ${{ env.ZED_WORKSPACE }}
|
||||
cache-provider: "github"
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
mkdir -p ${{ env.CARGO_HOME }} -ErrorAction Ignore
|
||||
cp ./.cargo/ci-config.toml ${{ env.CARGO_HOME }}/config.toml
|
||||
|
||||
- name: cargo clippy
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
run: ./script/clippy.ps1
|
||||
|
||||
- name: Check dev drive space
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
# `setup-dev-driver.ps1` creates a 100GB drive, with CI taking up ~45GB of the drive.
|
||||
run: ./script/exit-ci-if-dev-drive-is-full.ps1 95
|
||||
|
||||
# Since the Windows runners are stateful, so we need to remove the config file to prevent potential bug.
|
||||
- name: Clean CI config file
|
||||
if: always()
|
||||
run: |
|
||||
if (Test-Path "${{ env.CARGO_HOME }}/config.toml") {
|
||||
Remove-Item -Path "${{ env.CARGO_HOME }}/config.toml" -Force
|
||||
}
|
||||
|
||||
# Windows CI takes twice as long as our other platforms and fast github hosted runners are expensive.
|
||||
# But we still want to do CI, so let's only run tests on main and come back to this when we're
|
||||
# ready to self host our Windows CI (e.g. during the push for full Windows support)
|
||||
windows_tests:
|
||||
timeout-minutes: 60
|
||||
name: (Windows) Run Tests
|
||||
@@ -380,45 +440,51 @@ jobs:
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
needs.job_spec.outputs.run_tests == 'true'
|
||||
runs-on: [self-hosted, Windows, X64]
|
||||
# Use bigger runners for PRs (speed); smaller for async (cost)
|
||||
runs-on: ${{ github.event_name == 'pull_request' && 'windows-2025-32' || 'windows-2025-16' }}
|
||||
steps:
|
||||
- name: Environment Setup
|
||||
run: |
|
||||
$RunnerDir = Split-Path -Parent $env:RUNNER_WORKSPACE
|
||||
Write-Output `
|
||||
"RUSTUP_HOME=$RunnerDir\.rustup" `
|
||||
"CARGO_HOME=$RunnerDir\.cargo" `
|
||||
"PATH=$RunnerDir\.cargo\bin;$env:PATH" `
|
||||
>> $env:GITHUB_ENV
|
||||
# more info here:- https://github.com/rust-lang/cargo/issues/13020
|
||||
- name: Enable longer pathnames for git
|
||||
run: git config --system core.longpaths true
|
||||
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
with:
|
||||
clean: false
|
||||
|
||||
- name: Setup Cargo and Rustup
|
||||
- name: Create Dev Drive using ReFS
|
||||
run: ./script/setup-dev-driver.ps1
|
||||
|
||||
# actions/checkout does not let us clone into anywhere outside ${{ github.workspace }}, so we have to copy the clone...
|
||||
- name: Copy Git Repo to Dev Drive
|
||||
run: |
|
||||
Copy-Item -Path "${{ github.workspace }}" -Destination "${{ env.ZED_WORKSPACE }}" -Recurse
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6 # v2
|
||||
with:
|
||||
save-if: ${{ github.ref == 'refs/heads/main' }}
|
||||
workspaces: ${{ env.ZED_WORKSPACE }}
|
||||
cache-provider: "github"
|
||||
|
||||
- name: Configure CI
|
||||
run: |
|
||||
mkdir -p ${{ env.CARGO_HOME }} -ErrorAction Ignore
|
||||
cp ./.cargo/ci-config.toml ${{ env.CARGO_HOME }}/config.toml
|
||||
.\script\install-rustup.ps1
|
||||
|
||||
- name: cargo clippy
|
||||
run: |
|
||||
.\script\clippy.ps1
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests_windows
|
||||
with:
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
|
||||
- name: Build Zed
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
run: cargo build
|
||||
|
||||
- name: Limit target directory size
|
||||
run: ./script/clear-target-dir-if-larger-than.ps1 250
|
||||
|
||||
# - name: Check dev drive space
|
||||
# working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
# # `setup-dev-driver.ps1` creates a 100GB drive, with CI taking up ~45GB of the drive.
|
||||
# run: ./script/exit-ci-if-dev-drive-is-full.ps1 95
|
||||
- name: Check dev drive space
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
# `setup-dev-driver.ps1` creates a 100GB drive, with CI taking up ~45GB of the drive.
|
||||
run: ./script/exit-ci-if-dev-drive-is-full.ps1 95
|
||||
|
||||
# Since the Windows runners are stateful, so we need to remove the config file to prevent potential bug.
|
||||
- name: Clean CI config file
|
||||
@@ -434,13 +500,13 @@ jobs:
|
||||
needs:
|
||||
- job_spec
|
||||
- style
|
||||
- check_docs
|
||||
- migration_checks
|
||||
# run_tests: If adding required tests, add them here and to script below.
|
||||
- workspace_hack
|
||||
- linux_tests
|
||||
- build_remote_server
|
||||
- macos_tests
|
||||
- windows_clippy
|
||||
- windows_tests
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
@@ -451,8 +517,7 @@ jobs:
|
||||
# Check dependent jobs...
|
||||
RET_CODE=0
|
||||
# Always check style
|
||||
[[ "${{ needs.style.result }}" != 'success' ]] && { RET_CODE=1; echo "style tests failed"; }
|
||||
[[ "${{ needs.check_docs.result }}" != 'success' ]] && { RET_CODE=1; echo "docs checks failed"; }
|
||||
[[ "${{ needs.style.result }}" != 'success' ]] && { RET_CODE=1; echo "style tests failed"; }
|
||||
|
||||
# Only check test jobs if they were supposed to run
|
||||
if [[ "${{ needs.job_spec.outputs.run_tests }}" == "true" ]]; then
|
||||
@@ -460,6 +525,7 @@ jobs:
|
||||
[[ "${{ needs.macos_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "macOS tests failed"; }
|
||||
[[ "${{ needs.linux_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Linux tests failed"; }
|
||||
[[ "${{ needs.windows_tests.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows tests failed"; }
|
||||
[[ "${{ needs.windows_clippy.result }}" != 'success' ]] && { RET_CODE=1; echo "Windows clippy failed"; }
|
||||
[[ "${{ needs.build_remote_server.result }}" != 'success' ]] && { RET_CODE=1; echo "Remote server build failed"; }
|
||||
# This check is intentionally disabled. See: https://github.com/zed-industries/zed/pull/28431
|
||||
# [[ "${{ needs.migration_checks.result }}" != 'success' ]] && { RET_CODE=1; echo "Migration Checks failed"; }
|
||||
@@ -737,7 +803,6 @@ jobs:
|
||||
name: Build with Nix
|
||||
uses: ./.github/workflows/nix.yml
|
||||
if: github.repository_owner == 'zed-industries' && contains(github.event.pull_request.labels.*.name, 'run-nix')
|
||||
secrets: inherit
|
||||
with:
|
||||
flake-output: debug
|
||||
# excludes the final package to only cache dependencies
|
||||
|
||||
1
.github/workflows/eval.yml
vendored
1
.github/workflows/eval.yml
vendored
@@ -30,7 +30,6 @@ jobs:
|
||||
noop:
|
||||
name: No-op
|
||||
runs-on: ubuntu-latest
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
steps:
|
||||
- name: No-op
|
||||
run: echo "Nothing to do"
|
||||
|
||||
1
.github/workflows/release_nightly.yml
vendored
1
.github/workflows/release_nightly.yml
vendored
@@ -214,7 +214,6 @@ jobs:
|
||||
bundle-nix:
|
||||
name: Build and cache Nix package
|
||||
needs: tests
|
||||
secrets: inherit
|
||||
uses: ./.github/workflows/nix.yml
|
||||
|
||||
update-nightly-tag:
|
||||
|
||||
1
.github/workflows/unit_evals.yml
vendored
1
.github/workflows/unit_evals.yml
vendored
@@ -19,7 +19,6 @@ env:
|
||||
|
||||
jobs:
|
||||
unit_evals:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
timeout-minutes: 60
|
||||
name: Run unit evals
|
||||
runs-on:
|
||||
|
||||
248
Cargo.lock
generated
248
Cargo.lock
generated
@@ -491,6 +491,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"shlex",
|
||||
"smol",
|
||||
"tempfile",
|
||||
"util",
|
||||
@@ -2041,7 +2042,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "blade-graphics"
|
||||
version = "0.6.0"
|
||||
source = "git+https://github.com/kvark/blade?rev=e0ec4e720957edd51b945b64dd85605ea54bcfe5#e0ec4e720957edd51b945b64dd85605ea54bcfe5"
|
||||
source = "git+https://github.com/kvark/blade?rev=416375211bb0b5826b3584dccdb6a43369e499ad#416375211bb0b5826b3584dccdb6a43369e499ad"
|
||||
dependencies = [
|
||||
"ash",
|
||||
"ash-window",
|
||||
@@ -2074,7 +2075,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "blade-macros"
|
||||
version = "0.3.0"
|
||||
source = "git+https://github.com/kvark/blade?rev=e0ec4e720957edd51b945b64dd85605ea54bcfe5#e0ec4e720957edd51b945b64dd85605ea54bcfe5"
|
||||
source = "git+https://github.com/kvark/blade?rev=416375211bb0b5826b3584dccdb6a43369e499ad#416375211bb0b5826b3584dccdb6a43369e499ad"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -2084,7 +2085,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "blade-util"
|
||||
version = "0.2.0"
|
||||
source = "git+https://github.com/kvark/blade?rev=e0ec4e720957edd51b945b64dd85605ea54bcfe5#e0ec4e720957edd51b945b64dd85605ea54bcfe5"
|
||||
source = "git+https://github.com/kvark/blade?rev=416375211bb0b5826b3584dccdb6a43369e499ad#416375211bb0b5826b3584dccdb6a43369e499ad"
|
||||
dependencies = [
|
||||
"blade-graphics",
|
||||
"bytemuck",
|
||||
@@ -2822,11 +2823,9 @@ dependencies = [
|
||||
"collections",
|
||||
"credentials_provider",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"gpui_tokio",
|
||||
"hickory-resolver",
|
||||
"http_client",
|
||||
"http_client_tls",
|
||||
"httparse",
|
||||
@@ -2835,7 +2834,6 @@ dependencies = [
|
||||
"paths",
|
||||
"postage",
|
||||
"rand 0.8.5",
|
||||
"regex",
|
||||
"release_channel",
|
||||
"rpc",
|
||||
"rustls-pki-types",
|
||||
@@ -3542,20 +3540,6 @@ dependencies = [
|
||||
"coreaudio-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "coreaudio-rs"
|
||||
version = "0.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1aae284fbaf7d27aa0e292f7677dfbe26503b0d555026f702940805a630eac17"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"libc",
|
||||
"objc2-audio-toolbox",
|
||||
"objc2-core-audio",
|
||||
"objc2-core-audio-types",
|
||||
"objc2-core-foundation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "coreaudio-sys"
|
||||
version = "0.2.16"
|
||||
@@ -3591,8 +3575,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "cpal"
|
||||
version = "0.15.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "873dab07c8f743075e57f524c583985fbaf745602acbe916a01539364369a779"
|
||||
source = "git+https://github.com/zed-industries/cpal?rev=fd8bc2fd39f1f5fdee5a0690656caff9a26d9d50#fd8bc2fd39f1f5fdee5a0690656caff9a26d9d50"
|
||||
dependencies = [
|
||||
"alsa",
|
||||
"core-foundation-sys",
|
||||
@@ -3602,7 +3585,7 @@ dependencies = [
|
||||
"js-sys",
|
||||
"libc",
|
||||
"mach2",
|
||||
"ndk 0.8.0",
|
||||
"ndk",
|
||||
"ndk-context",
|
||||
"oboe",
|
||||
"wasm-bindgen",
|
||||
@@ -3611,32 +3594,6 @@ dependencies = [
|
||||
"windows 0.54.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cpal"
|
||||
version = "0.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cbd307f43cc2a697e2d1f8bc7a1d824b5269e052209e28883e5bc04d095aaa3f"
|
||||
dependencies = [
|
||||
"alsa",
|
||||
"coreaudio-rs 0.13.0",
|
||||
"dasp_sample",
|
||||
"jni",
|
||||
"js-sys",
|
||||
"libc",
|
||||
"mach2",
|
||||
"ndk 0.9.0",
|
||||
"ndk-context",
|
||||
"num-derive",
|
||||
"num-traits",
|
||||
"objc2-audio-toolbox",
|
||||
"objc2-core-audio",
|
||||
"objc2-core-audio-types",
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-futures",
|
||||
"web-sys",
|
||||
"windows 0.54.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cpp_demangle"
|
||||
version = "0.4.4"
|
||||
@@ -4070,7 +4027,6 @@ dependencies = [
|
||||
"gpui",
|
||||
"http_client",
|
||||
"language",
|
||||
"libc",
|
||||
"log",
|
||||
"node_runtime",
|
||||
"parking_lot",
|
||||
@@ -4094,7 +4050,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "dap-types"
|
||||
version = "0.0.1"
|
||||
source = "git+https://github.com/zed-industries/dap-types?rev=b40956a7f4d1939da67429d941389ee306a3a308#b40956a7f4d1939da67429d941389ee306a3a308"
|
||||
source = "git+https://github.com/zed-industries/dap-types?rev=68516de327fa1be15214133a0a2e52a12982ce75#68516de327fa1be15214133a0a2e52a12982ce75"
|
||||
dependencies = [
|
||||
"schemars",
|
||||
"serde",
|
||||
@@ -4242,7 +4198,6 @@ dependencies = [
|
||||
"gpui",
|
||||
"serde_json",
|
||||
"task",
|
||||
"util",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
@@ -4268,7 +4223,6 @@ dependencies = [
|
||||
name = "debugger_ui"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"alacritty_terminal",
|
||||
"anyhow",
|
||||
"client",
|
||||
"collections",
|
||||
@@ -4278,6 +4232,7 @@ dependencies = [
|
||||
"db",
|
||||
"debugger_tools",
|
||||
"editor",
|
||||
"feature_flags",
|
||||
"file_icons",
|
||||
"futures 0.3.31",
|
||||
"fuzzy",
|
||||
@@ -4294,7 +4249,6 @@ dependencies = [
|
||||
"rpc",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_json_lenient",
|
||||
"settings",
|
||||
"shlex",
|
||||
"sysinfo",
|
||||
@@ -4302,8 +4256,6 @@ dependencies = [
|
||||
"tasks_ui",
|
||||
"terminal_view",
|
||||
"theme",
|
||||
"tree-sitter",
|
||||
"tree-sitter-json",
|
||||
"ui",
|
||||
"unindent",
|
||||
"util",
|
||||
@@ -4743,11 +4695,13 @@ dependencies = [
|
||||
"client",
|
||||
"clock",
|
||||
"collections",
|
||||
"command_palette_hooks",
|
||||
"convert_case 0.8.0",
|
||||
"ctor",
|
||||
"dap",
|
||||
"db",
|
||||
"emojis",
|
||||
"feature_flags",
|
||||
"file_icons",
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
@@ -4910,18 +4864,6 @@ version = "1.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a3d8a32ae18130a3c84dd492d4215c3d913c3b07c6b63c2eb3eb7ff1101ab7bf"
|
||||
|
||||
[[package]]
|
||||
name = "enum-as-inner"
|
||||
version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a1e6a265c649f3f5979b601d26f1d05ada116434c87741c9493cb56218f76cbc"
|
||||
dependencies = [
|
||||
"heck 0.5.0",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.101",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "enumflags2"
|
||||
version = "0.7.11"
|
||||
@@ -6179,7 +6121,6 @@ dependencies = [
|
||||
"anyhow",
|
||||
"askpass",
|
||||
"buffer_diff",
|
||||
"call",
|
||||
"chrono",
|
||||
"collections",
|
||||
"command_palette_hooks",
|
||||
@@ -6218,7 +6159,6 @@ dependencies = [
|
||||
"ui",
|
||||
"unindent",
|
||||
"util",
|
||||
"watch",
|
||||
"windows 0.61.1",
|
||||
"workspace",
|
||||
"workspace-hack",
|
||||
@@ -7501,51 +7441,6 @@ version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dfa686283ad6dd069f105e5ab091b04c62850d3e4cf5d67debad1933f55023df"
|
||||
|
||||
[[package]]
|
||||
name = "hickory-proto"
|
||||
version = "0.24.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "92652067c9ce6f66ce53cc38d1169daa36e6e7eb7dd3b63b5103bd9d97117248"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"cfg-if",
|
||||
"data-encoding",
|
||||
"enum-as-inner",
|
||||
"futures-channel",
|
||||
"futures-io",
|
||||
"futures-util",
|
||||
"idna",
|
||||
"ipnet",
|
||||
"once_cell",
|
||||
"rand 0.8.5",
|
||||
"thiserror 1.0.69",
|
||||
"tinyvec",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hickory-resolver"
|
||||
version = "0.24.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cbb117a1ca520e111743ab2f6688eddee69db4e0ea242545a604dce8a66fd22e"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"futures-util",
|
||||
"hickory-proto",
|
||||
"ipconfig",
|
||||
"lru-cache",
|
||||
"once_cell",
|
||||
"parking_lot",
|
||||
"rand 0.8.5",
|
||||
"resolv-conf",
|
||||
"smallvec",
|
||||
"thiserror 1.0.69",
|
||||
"tokio",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hidden-trait"
|
||||
version = "0.1.2"
|
||||
@@ -8415,18 +8310,6 @@ dependencies = [
|
||||
"windows 0.58.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ipconfig"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f"
|
||||
dependencies = [
|
||||
"socket2",
|
||||
"widestring",
|
||||
"windows-sys 0.48.0",
|
||||
"winreg 0.50.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ipnet"
|
||||
version = "2.11.0"
|
||||
@@ -8947,7 +8830,6 @@ dependencies = [
|
||||
"http_client",
|
||||
"icons",
|
||||
"image",
|
||||
"log",
|
||||
"parking_lot",
|
||||
"proto",
|
||||
"schemars",
|
||||
@@ -8983,7 +8865,6 @@ dependencies = [
|
||||
"gpui",
|
||||
"gpui_tokio",
|
||||
"http_client",
|
||||
"language",
|
||||
"language_model",
|
||||
"lmstudio",
|
||||
"log",
|
||||
@@ -9122,6 +9003,7 @@ dependencies = [
|
||||
"tree-sitter-yaml",
|
||||
"unindent",
|
||||
"util",
|
||||
"which 6.0.3",
|
||||
"workspace",
|
||||
"workspace-hack",
|
||||
]
|
||||
@@ -9313,12 +9195,6 @@ dependencies = [
|
||||
"cc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "linked-hash-map"
|
||||
version = "0.5.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
|
||||
|
||||
[[package]]
|
||||
name = "linkify"
|
||||
version = "0.10.0"
|
||||
@@ -9445,7 +9321,7 @@ dependencies = [
|
||||
"core-foundation 0.10.0",
|
||||
"core-video",
|
||||
"coreaudio-rs 0.12.1",
|
||||
"cpal 0.16.0",
|
||||
"cpal",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"gpui_tokio",
|
||||
@@ -9579,15 +9455,6 @@ dependencies = [
|
||||
"hashbrown 0.15.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lru-cache"
|
||||
version = "0.1.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "31e24f1ad8321ca0e8a1e0ac13f23cb668e6f5466c2c57319f6a5cf1cc8e3b1c"
|
||||
dependencies = [
|
||||
"linked-hash-map",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lsp"
|
||||
version = "0.1.0"
|
||||
@@ -10224,21 +10091,7 @@ dependencies = [
|
||||
"bitflags 2.9.0",
|
||||
"jni-sys",
|
||||
"log",
|
||||
"ndk-sys 0.5.0+25.2.9519653",
|
||||
"num_enum",
|
||||
"thiserror 1.0.69",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ndk"
|
||||
version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c3f42e7bbe13d351b6bead8286a43aac9534b82bd3cc43e47037f012ebfd62d4"
|
||||
dependencies = [
|
||||
"bitflags 2.9.0",
|
||||
"jni-sys",
|
||||
"log",
|
||||
"ndk-sys 0.6.0+11769913",
|
||||
"ndk-sys",
|
||||
"num_enum",
|
||||
"thiserror 1.0.69",
|
||||
]
|
||||
@@ -10258,15 +10111,6 @@ dependencies = [
|
||||
"jni-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ndk-sys"
|
||||
version = "0.6.0+11769913"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ee6cda3051665f1fb8d9e08fc35c96d5a244fb1be711a03b71118828afc9a873"
|
||||
dependencies = [
|
||||
"jni-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "new_debug_unreachable"
|
||||
version = "1.0.6"
|
||||
@@ -10680,43 +10524,6 @@ dependencies = [
|
||||
"objc2-quartz-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "objc2-audio-toolbox"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "10cbe18d879e20a4aea544f8befe38bcf52255eb63d3f23eca2842f3319e4c07"
|
||||
dependencies = [
|
||||
"bitflags 2.9.0",
|
||||
"libc",
|
||||
"objc2",
|
||||
"objc2-core-audio",
|
||||
"objc2-core-audio-types",
|
||||
"objc2-core-foundation",
|
||||
"objc2-foundation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "objc2-core-audio"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ca44961e888e19313b808f23497073e3f6b3c22bb485056674c8b49f3b025c82"
|
||||
dependencies = [
|
||||
"dispatch2",
|
||||
"objc2",
|
||||
"objc2-core-audio-types",
|
||||
"objc2-core-foundation",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "objc2-core-audio-types"
|
||||
version = "0.3.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c0f1cc99bb07ad2ddb6527ddf83db6a15271bb036b3eb94b801cd44fdc666ee1"
|
||||
dependencies = [
|
||||
"bitflags 2.9.0",
|
||||
"objc2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "objc2-core-foundation"
|
||||
version = "0.3.1"
|
||||
@@ -10822,7 +10629,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8b61bebd49e5d43f5f8cc7ee2891c16e0f41ec7954d36bcb6c14c5e0de867fb"
|
||||
dependencies = [
|
||||
"jni",
|
||||
"ndk 0.8.0",
|
||||
"ndk",
|
||||
"ndk-context",
|
||||
"num-derive",
|
||||
"num-traits",
|
||||
@@ -13224,7 +13031,6 @@ dependencies = [
|
||||
"dap",
|
||||
"dap_adapters",
|
||||
"debug_adapter_extension",
|
||||
"editor",
|
||||
"env_logger 0.11.8",
|
||||
"extension",
|
||||
"extension_host",
|
||||
@@ -13263,7 +13069,6 @@ dependencies = [
|
||||
"unindent",
|
||||
"util",
|
||||
"watch",
|
||||
"workspace",
|
||||
"worktree",
|
||||
"zlog",
|
||||
]
|
||||
@@ -13421,7 +13226,6 @@ dependencies = [
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"h2 0.4.9",
|
||||
"hickory-resolver",
|
||||
"http 1.3.1",
|
||||
"http-body 1.0.1",
|
||||
"http-body-util",
|
||||
@@ -13478,12 +13282,6 @@ dependencies = [
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "resolv-conf"
|
||||
version = "0.7.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "95325155c684b1c89f7765e30bc1c42e4a6da51ca513615660cb8a62ef9a88e3"
|
||||
|
||||
[[package]]
|
||||
name = "resvg"
|
||||
version = "0.45.1"
|
||||
@@ -13603,7 +13401,7 @@ version = "0.20.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e7ceb6607dd738c99bc8cb28eff249b7cd5c8ec88b9db96c0608c1480d140fb1"
|
||||
dependencies = [
|
||||
"cpal 0.15.3",
|
||||
"cpal",
|
||||
"hound",
|
||||
]
|
||||
|
||||
@@ -14615,12 +14413,12 @@ dependencies = [
|
||||
"fs",
|
||||
"gpui",
|
||||
"log",
|
||||
"paths",
|
||||
"schemars",
|
||||
"serde",
|
||||
"settings",
|
||||
"theme",
|
||||
"ui",
|
||||
"util",
|
||||
"workspace",
|
||||
"workspace-hack",
|
||||
]
|
||||
@@ -15481,7 +15279,6 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
"smol",
|
||||
"util",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
@@ -15952,7 +15749,6 @@ dependencies = [
|
||||
"theme",
|
||||
"thiserror 2.0.12",
|
||||
"url",
|
||||
"urlencoding",
|
||||
"util",
|
||||
"windows 0.61.1",
|
||||
"workspace-hack",
|
||||
@@ -17360,14 +17156,12 @@ dependencies = [
|
||||
"itertools 0.14.0",
|
||||
"libc",
|
||||
"log",
|
||||
"nix 0.29.0",
|
||||
"rand 0.8.5",
|
||||
"regex",
|
||||
"rust-embed",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_json_lenient",
|
||||
"shlex",
|
||||
"smol",
|
||||
"take-until",
|
||||
"tempfile",
|
||||
@@ -18409,12 +18203,6 @@ dependencies = [
|
||||
"wasite",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "widestring"
|
||||
version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dd7cf3379ca1aac9eea11fba24fd7e315d621f8dfe35c8d7d2be8b793726e07d"
|
||||
|
||||
[[package]]
|
||||
name = "wiggle"
|
||||
version = "29.0.1"
|
||||
@@ -19535,7 +19323,6 @@ dependencies = [
|
||||
"num-rational",
|
||||
"num-traits",
|
||||
"objc2",
|
||||
"objc2-core-foundation",
|
||||
"objc2-foundation",
|
||||
"objc2-metal",
|
||||
"object",
|
||||
@@ -19956,7 +19743,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.193.0"
|
||||
version = "0.191.0"
|
||||
dependencies = [
|
||||
"activity_indicator",
|
||||
"agent",
|
||||
@@ -19996,6 +19783,7 @@ dependencies = [
|
||||
"extension",
|
||||
"extension_host",
|
||||
"extensions_ui",
|
||||
"feature_flags",
|
||||
"feedback",
|
||||
"file_finder",
|
||||
"fs",
|
||||
|
||||
14
Cargo.toml
14
Cargo.toml
@@ -417,9 +417,9 @@ aws-smithy-runtime-api = { version = "1.7.4", features = ["http-1x", "client"] }
|
||||
aws-smithy-types = { version = "1.3.0", features = ["http-body-1-x"] }
|
||||
base64 = "0.22"
|
||||
bitflags = "2.6.0"
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "e0ec4e720957edd51b945b64dd85605ea54bcfe5" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "e0ec4e720957edd51b945b64dd85605ea54bcfe5" }
|
||||
blade-util = { git = "https://github.com/kvark/blade", rev = "e0ec4e720957edd51b945b64dd85605ea54bcfe5" }
|
||||
blade-graphics = { git = "https://github.com/kvark/blade", rev = "416375211bb0b5826b3584dccdb6a43369e499ad" }
|
||||
blade-macros = { git = "https://github.com/kvark/blade", rev = "416375211bb0b5826b3584dccdb6a43369e499ad" }
|
||||
blade-util = { git = "https://github.com/kvark/blade", rev = "416375211bb0b5826b3584dccdb6a43369e499ad" }
|
||||
blake3 = "1.5.3"
|
||||
bytes = "1.0"
|
||||
cargo_metadata = "0.19"
|
||||
@@ -433,10 +433,9 @@ convert_case = "0.8.0"
|
||||
core-foundation = "0.10.0"
|
||||
core-foundation-sys = "0.8.6"
|
||||
core-video = { version = "0.4.3", features = ["metal"] }
|
||||
cpal = "0.16"
|
||||
criterion = { version = "0.5", features = ["html_reports"] }
|
||||
ctor = "0.4.0"
|
||||
dap-types = { git = "https://github.com/zed-industries/dap-types", rev = "b40956a7f4d1939da67429d941389ee306a3a308" }
|
||||
dap-types = { git = "https://github.com/zed-industries/dap-types", rev = "68516de327fa1be15214133a0a2e52a12982ce75" }
|
||||
dashmap = "6.0"
|
||||
derive_more = "0.99.17"
|
||||
dirs = "4.0"
|
||||
@@ -524,7 +523,6 @@ reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "951c77
|
||||
"rustls-tls-native-roots",
|
||||
"socks",
|
||||
"stream",
|
||||
"hickory-dns",
|
||||
] }
|
||||
rsa = "0.9.6"
|
||||
runtimelib = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734", default-features = false, features = [
|
||||
@@ -684,7 +682,9 @@ features = [
|
||||
"Win32_UI_WindowsAndMessaging",
|
||||
]
|
||||
|
||||
# TODO livekit https://github.com/RustAudio/cpal/pull/891
|
||||
[patch.crates-io]
|
||||
cpal = { git = "https://github.com/zed-industries/cpal", rev = "fd8bc2fd39f1f5fdee5a0690656caff9a26d9d50" }
|
||||
notify = { git = "https://github.com/zed-industries/notify.git", rev = "bbb9ea5ae52b253e095737847e367c30653a2e96" }
|
||||
notify-types = { git = "https://github.com/zed-industries/notify.git", rev = "bbb9ea5ae52b253e095737847e367c30653a2e96" }
|
||||
|
||||
@@ -698,8 +698,6 @@ codegen-units = 16
|
||||
[profile.dev.package]
|
||||
taffy = { opt-level = 3 }
|
||||
cranelift-codegen = { opt-level = 3 }
|
||||
cranelift-codegen-meta = { opt-level = 3 }
|
||||
cranelift-codegen-shared = { opt-level = 3 }
|
||||
resvg = { opt-level = 3 }
|
||||
rustybuzz = { opt-level = 3 }
|
||||
ttf-parser = { opt-level = 3 }
|
||||
|
||||
@@ -1 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-blocks-icon lucide-blocks"><rect width="7" height="7" x="14" y="3" rx="1"/><path d="M10 21V8a1 1 0 0 0-1-1H4a1 1 0 0 0-1 1v12a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-5a1 1 0 0 0-1-1H3"/></svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-blocks"><rect width="7" height="7" x="14" y="3" rx="1"/><path d="M10 21V8a1 1 0 0 0-1-1H4a1 1 0 0 0-1 1v12a1 1 0 0 0 1 1h12a1 1 0 0 0 1-1v-5a1 1 0 0 0-1-1H3"/></svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 386 B After Width: | Height: | Size: 368 B |
@@ -1 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-circle-help-icon lucide-circle-help"><circle cx="12" cy="12" r="10"/><path d="M9.09 9a3 3 0 0 1 5.83 1c0 2-3 3-3 3"/><path d="M12 17h.01"/></svg>
|
||||
|
Before Width: | Height: | Size: 348 B |
@@ -115,7 +115,6 @@
|
||||
"ctrl-\"": "editor::ExpandAllDiffHunks",
|
||||
"ctrl-i": "editor::ShowSignatureHelp",
|
||||
"alt-g b": "git::Blame",
|
||||
"alt-g m": "git::OpenModifiedFiles",
|
||||
"menu": "editor::OpenContextMenu",
|
||||
"shift-f10": "editor::OpenContextMenu",
|
||||
"ctrl-shift-e": "editor::ToggleEditPrediction",
|
||||
|
||||
@@ -139,7 +139,6 @@
|
||||
"cmd-'": "editor::ToggleSelectedDiffHunks",
|
||||
"cmd-\"": "editor::ExpandAllDiffHunks",
|
||||
"cmd-alt-g b": "git::Blame",
|
||||
"cmd-alt-g m": "git::OpenModifiedFiles",
|
||||
"cmd-i": "editor::ShowSignatureHelp",
|
||||
"f9": "editor::ToggleBreakpoint",
|
||||
"shift-f9": "editor::EditLogBreakpoint",
|
||||
|
||||
@@ -56,9 +56,6 @@
|
||||
"[ shift-b": ["pane::ActivateItem", 0],
|
||||
"] space": "vim::InsertEmptyLineBelow",
|
||||
"[ space": "vim::InsertEmptyLineAbove",
|
||||
"[ e": "editor::MoveLineUp",
|
||||
"] e": "editor::MoveLineDown",
|
||||
|
||||
// Word motions
|
||||
"w": "vim::NextWordStart",
|
||||
"e": "vim::NextWordEnd",
|
||||
@@ -187,8 +184,6 @@
|
||||
"z f": "editor::FoldSelectedRanges",
|
||||
"z shift-m": "editor::FoldAll",
|
||||
"z shift-r": "editor::UnfoldAll",
|
||||
"z l": "vim::ColumnRight",
|
||||
"z h": "vim::ColumnLeft",
|
||||
"shift-z shift-q": ["pane::CloseActiveItem", { "save_intent": "skip" }],
|
||||
"shift-z shift-z": ["pane::CloseActiveItem", { "save_intent": "save_all" }],
|
||||
// Count support
|
||||
@@ -400,8 +395,6 @@
|
||||
"ctrl-pagedown": "pane::ActivateNextItem",
|
||||
"ctrl-pageup": "pane::ActivatePreviousItem",
|
||||
"insert": "vim::InsertBefore",
|
||||
".": "vim::Repeat",
|
||||
"alt-.": "vim::RepeatFind",
|
||||
// tree-sitter related commands
|
||||
"[ x": "editor::SelectLargerSyntaxNode",
|
||||
"] x": "editor::SelectSmallerSyntaxNode",
|
||||
@@ -428,7 +421,6 @@
|
||||
|
||||
"x": "editor::SelectLine",
|
||||
"shift-x": "editor::SelectLine",
|
||||
"%": "editor::SelectAll",
|
||||
// Window mode
|
||||
"space w h": "workspace::ActivatePaneLeft",
|
||||
"space w l": "workspace::ActivatePaneRight",
|
||||
@@ -458,8 +450,7 @@
|
||||
"ctrl-c": "editor::ToggleComments",
|
||||
"d": "vim::HelixDelete",
|
||||
"c": "vim::Substitute",
|
||||
"shift-c": "editor::AddSelectionBelow",
|
||||
"alt-shift-c": "editor::AddSelectionAbove"
|
||||
"shift-c": "editor::AddSelectionBelow"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -27,11 +27,11 @@ If you are unsure how to fulfill the user's request, gather more information wit
|
||||
If appropriate, use tool calls to explore the current project, which contains the following root directories:
|
||||
|
||||
{{#each worktrees}}
|
||||
- `{{abs_path}}`
|
||||
- `{{root_name}}`
|
||||
{{/each}}
|
||||
|
||||
- Bias towards not asking the user for help if you can find the answer yourself.
|
||||
- When providing paths to tools, the path should always start with the name of a project root directory listed above.
|
||||
- When providing paths to tools, the path should always begin with a path that starts with a project root directory listed above.
|
||||
- Before you read or edit a file, you must first find the full path. DO NOT ever guess a file path!
|
||||
{{# if (has_tool 'grep') }}
|
||||
- When looking for symbols in the project, prefer the `grep` tool.
|
||||
|
||||
@@ -307,8 +307,6 @@
|
||||
// "all"
|
||||
// 4. Draw whitespaces at boundaries only:
|
||||
// "boundary"
|
||||
// 5. Draw whitespaces only after non-whitespace characters:
|
||||
// "trailing"
|
||||
// For a whitespace to be on a boundary, any of the following conditions need to be met:
|
||||
// - It is a tab
|
||||
// - It is adjacent to an edge (start or end)
|
||||
@@ -400,13 +398,6 @@
|
||||
// 3. Never show the minimap:
|
||||
// "never" (default)
|
||||
"show": "never",
|
||||
// Where to show the minimap in the editor.
|
||||
// This setting can take two values:
|
||||
// 1. Show the minimap on the focused editor only:
|
||||
// "active_editor" (default)
|
||||
// 2. Show the minimap on all open editors:
|
||||
// "all_editors"
|
||||
"display_in": "active_editor",
|
||||
// When to show the minimap thumb.
|
||||
// This setting can take two values:
|
||||
// 1. Show the minimap thumb if the mouse is over the minimap:
|
||||
@@ -454,9 +445,7 @@
|
||||
// Whether to show breakpoints in the gutter.
|
||||
"breakpoints": true,
|
||||
// Whether to show fold buttons in the gutter.
|
||||
"folds": true,
|
||||
// Minimum number of characters to reserve space for in the gutter.
|
||||
"min_line_number_digits": 4
|
||||
"folds": true
|
||||
},
|
||||
"indent_guides": {
|
||||
// Whether to show indent guides in the editor.
|
||||
@@ -1045,19 +1034,6 @@
|
||||
// Automatically update Zed. This setting may be ignored on Linux if
|
||||
// installed through a package manager.
|
||||
"auto_update": true,
|
||||
// How to render LSP `textDocument/documentColor` colors in the editor.
|
||||
//
|
||||
// Possible values:
|
||||
//
|
||||
// 1. Do not query and render document colors.
|
||||
// "lsp_document_colors": "none",
|
||||
// 2. Render document colors as inlay hints near the color text (default).
|
||||
// "lsp_document_colors": "inlay",
|
||||
// 3. Draw a border around the color text.
|
||||
// "lsp_document_colors": "border",
|
||||
// 4. Draw a background behind the color text..
|
||||
// "lsp_document_colors": "background",
|
||||
"lsp_document_colors": "inlay",
|
||||
// Diagnostics configuration.
|
||||
"diagnostics": {
|
||||
// Whether to show the project diagnostics button in the status bar.
|
||||
@@ -1502,8 +1478,7 @@
|
||||
"Go": {
|
||||
"code_actions_on_format": {
|
||||
"source.organizeImports": true
|
||||
},
|
||||
"debuggers": ["Delve"]
|
||||
}
|
||||
},
|
||||
"GraphQL": {
|
||||
"prettier": {
|
||||
@@ -1568,15 +1543,9 @@
|
||||
"Plain Text": {
|
||||
"allow_rewrap": "anywhere"
|
||||
},
|
||||
"Python": {
|
||||
"debuggers": ["Debugpy"]
|
||||
},
|
||||
"Ruby": {
|
||||
"language_servers": ["solargraph", "!ruby-lsp", "!rubocop", "!sorbet", "!steep", "..."]
|
||||
},
|
||||
"Rust": {
|
||||
"debuggers": ["CodeLLDB"]
|
||||
},
|
||||
"SCSS": {
|
||||
"prettier": {
|
||||
"allowed": true
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
// Project tasks configuration. See https://zed.dev/docs/tasks for documentation.
|
||||
// Static tasks configuration.
|
||||
//
|
||||
// Example:
|
||||
[
|
||||
|
||||
@@ -7,10 +7,7 @@ use gpui::{
|
||||
InteractiveElement as _, ParentElement as _, Render, SharedString, StatefulInteractiveElement,
|
||||
Styled, Transformation, Window, actions, percentage,
|
||||
};
|
||||
use language::{
|
||||
BinaryStatus, LanguageRegistry, LanguageServerId, LanguageServerName,
|
||||
LanguageServerStatusUpdate, ServerHealth,
|
||||
};
|
||||
use language::{BinaryStatus, LanguageRegistry, LanguageServerId};
|
||||
use project::{
|
||||
EnvironmentErrorMessage, LanguageServerProgress, LspStoreEvent, Project,
|
||||
ProjectEnvironmentEvent,
|
||||
@@ -19,7 +16,6 @@ use project::{
|
||||
use smallvec::SmallVec;
|
||||
use std::{
|
||||
cmp::Reverse,
|
||||
collections::HashSet,
|
||||
fmt::Write,
|
||||
path::Path,
|
||||
sync::Arc,
|
||||
@@ -34,9 +30,9 @@ const GIT_OPERATION_DELAY: Duration = Duration::from_millis(0);
|
||||
actions!(activity_indicator, [ShowErrorMessage]);
|
||||
|
||||
pub enum Event {
|
||||
ShowStatus {
|
||||
server_name: LanguageServerName,
|
||||
status: SharedString,
|
||||
ShowError {
|
||||
server_name: SharedString,
|
||||
error: String,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -49,8 +45,8 @@ pub struct ActivityIndicator {
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ServerStatus {
|
||||
name: LanguageServerName,
|
||||
status: LanguageServerStatusUpdate,
|
||||
name: SharedString,
|
||||
status: BinaryStatus,
|
||||
}
|
||||
|
||||
struct PendingWork<'a> {
|
||||
@@ -149,19 +145,19 @@ impl ActivityIndicator {
|
||||
});
|
||||
|
||||
cx.subscribe_in(&this, window, move |_, _, event, window, cx| match event {
|
||||
Event::ShowStatus {
|
||||
server_name,
|
||||
status,
|
||||
} => {
|
||||
Event::ShowError { server_name, error } => {
|
||||
let create_buffer = project.update(cx, |project, cx| project.create_buffer(cx));
|
||||
let project = project.clone();
|
||||
let status = status.clone();
|
||||
let error = error.clone();
|
||||
let server_name = server_name.clone();
|
||||
cx.spawn_in(window, async move |workspace, cx| {
|
||||
let buffer = create_buffer.await?;
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(
|
||||
[(0..0, format!("Language server {server_name}:\n\n{status}"))],
|
||||
[(
|
||||
0..0,
|
||||
format!("Language server error: {}\n\n{}", server_name, error),
|
||||
)],
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
@@ -170,10 +166,7 @@ impl ActivityIndicator {
|
||||
workspace.update_in(cx, |workspace, window, cx| {
|
||||
workspace.add_item_to_active_pane(
|
||||
Box::new(cx.new(|cx| {
|
||||
let mut editor =
|
||||
Editor::for_buffer(buffer, Some(project.clone()), window, cx);
|
||||
editor.set_read_only(true);
|
||||
editor
|
||||
Editor::for_buffer(buffer, Some(project.clone()), window, cx)
|
||||
})),
|
||||
None,
|
||||
true,
|
||||
@@ -192,34 +185,19 @@ impl ActivityIndicator {
|
||||
}
|
||||
|
||||
fn show_error_message(&mut self, _: &ShowErrorMessage, _: &mut Window, cx: &mut Context<Self>) {
|
||||
let mut status_message_shown = false;
|
||||
self.statuses.retain(|status| match &status.status {
|
||||
LanguageServerStatusUpdate::Binary(BinaryStatus::Failed { error })
|
||||
if !status_message_shown =>
|
||||
{
|
||||
cx.emit(Event::ShowStatus {
|
||||
self.statuses.retain(|status| {
|
||||
if let BinaryStatus::Failed { error } = &status.status {
|
||||
cx.emit(Event::ShowError {
|
||||
server_name: status.name.clone(),
|
||||
status: SharedString::from(error),
|
||||
error: error.clone(),
|
||||
});
|
||||
status_message_shown = true;
|
||||
false
|
||||
} else {
|
||||
true
|
||||
}
|
||||
LanguageServerStatusUpdate::Health(
|
||||
ServerHealth::Error | ServerHealth::Warning,
|
||||
status_string,
|
||||
) if !status_message_shown => match status_string {
|
||||
Some(error) => {
|
||||
cx.emit(Event::ShowStatus {
|
||||
server_name: status.name.clone(),
|
||||
status: error.clone(),
|
||||
});
|
||||
status_message_shown = true;
|
||||
false
|
||||
}
|
||||
None => false,
|
||||
},
|
||||
_ => true,
|
||||
});
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn dismiss_error_message(
|
||||
@@ -289,52 +267,48 @@ impl ActivityIndicator {
|
||||
});
|
||||
}
|
||||
// Show any language server has pending activity.
|
||||
let mut pending_work = self.pending_language_server_work(cx);
|
||||
if let Some(PendingWork {
|
||||
progress_token,
|
||||
progress,
|
||||
..
|
||||
}) = pending_work.next()
|
||||
{
|
||||
let mut pending_work = self.pending_language_server_work(cx);
|
||||
if let Some(PendingWork {
|
||||
progress_token,
|
||||
progress,
|
||||
..
|
||||
}) = pending_work.next()
|
||||
{
|
||||
let mut message = progress
|
||||
.title
|
||||
.as_deref()
|
||||
.unwrap_or(progress_token)
|
||||
.to_string();
|
||||
let mut message = progress
|
||||
.title
|
||||
.as_deref()
|
||||
.unwrap_or(progress_token)
|
||||
.to_string();
|
||||
|
||||
if let Some(percentage) = progress.percentage {
|
||||
write!(&mut message, " ({}%)", percentage).unwrap();
|
||||
}
|
||||
|
||||
if let Some(progress_message) = progress.message.as_ref() {
|
||||
message.push_str(": ");
|
||||
message.push_str(progress_message);
|
||||
}
|
||||
|
||||
let additional_work_count = pending_work.count();
|
||||
if additional_work_count > 0 {
|
||||
write!(&mut message, " + {} more", additional_work_count).unwrap();
|
||||
}
|
||||
|
||||
return Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::ArrowCircle)
|
||||
.size(IconSize::Small)
|
||||
.with_animation(
|
||||
"arrow-circle",
|
||||
Animation::new(Duration::from_secs(2)).repeat(),
|
||||
|icon, delta| {
|
||||
icon.transform(Transformation::rotate(percentage(delta)))
|
||||
},
|
||||
)
|
||||
.into_any_element(),
|
||||
),
|
||||
message,
|
||||
on_click: Some(Arc::new(Self::toggle_language_server_work_context_menu)),
|
||||
tooltip_message: None,
|
||||
});
|
||||
if let Some(percentage) = progress.percentage {
|
||||
write!(&mut message, " ({}%)", percentage).unwrap();
|
||||
}
|
||||
|
||||
if let Some(progress_message) = progress.message.as_ref() {
|
||||
message.push_str(": ");
|
||||
message.push_str(progress_message);
|
||||
}
|
||||
|
||||
let additional_work_count = pending_work.count();
|
||||
if additional_work_count > 0 {
|
||||
write!(&mut message, " + {} more", additional_work_count).unwrap();
|
||||
}
|
||||
|
||||
return Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::ArrowCircle)
|
||||
.size(IconSize::Small)
|
||||
.with_animation(
|
||||
"arrow-circle",
|
||||
Animation::new(Duration::from_secs(2)).repeat(),
|
||||
|icon, delta| icon.transform(Transformation::rotate(percentage(delta))),
|
||||
)
|
||||
.into_any_element(),
|
||||
),
|
||||
message,
|
||||
on_click: Some(Arc::new(Self::toggle_language_server_work_context_menu)),
|
||||
tooltip_message: None,
|
||||
});
|
||||
}
|
||||
|
||||
if let Some(session) = self
|
||||
@@ -395,38 +369,14 @@ impl ActivityIndicator {
|
||||
let mut downloading = SmallVec::<[_; 3]>::new();
|
||||
let mut checking_for_update = SmallVec::<[_; 3]>::new();
|
||||
let mut failed = SmallVec::<[_; 3]>::new();
|
||||
let mut health_messages = SmallVec::<[_; 3]>::new();
|
||||
let mut servers_to_clear_statuses = HashSet::<LanguageServerName>::default();
|
||||
for status in &self.statuses {
|
||||
match &status.status {
|
||||
LanguageServerStatusUpdate::Binary(BinaryStatus::CheckingForUpdate) => {
|
||||
checking_for_update.push(status.name.clone());
|
||||
}
|
||||
LanguageServerStatusUpdate::Binary(BinaryStatus::Downloading) => {
|
||||
downloading.push(status.name.clone());
|
||||
}
|
||||
LanguageServerStatusUpdate::Binary(BinaryStatus::Failed { .. }) => {
|
||||
failed.push(status.name.clone());
|
||||
}
|
||||
LanguageServerStatusUpdate::Binary(BinaryStatus::None) => {}
|
||||
LanguageServerStatusUpdate::Health(health, server_status) => match server_status {
|
||||
Some(server_status) => {
|
||||
health_messages.push((status.name.clone(), *health, server_status.clone()));
|
||||
}
|
||||
None => {
|
||||
servers_to_clear_statuses.insert(status.name.clone());
|
||||
}
|
||||
},
|
||||
match status.status {
|
||||
BinaryStatus::CheckingForUpdate => checking_for_update.push(status.name.clone()),
|
||||
BinaryStatus::Downloading => downloading.push(status.name.clone()),
|
||||
BinaryStatus::Failed { .. } => failed.push(status.name.clone()),
|
||||
BinaryStatus::None => {}
|
||||
}
|
||||
}
|
||||
self.statuses
|
||||
.retain(|status| !servers_to_clear_statuses.contains(&status.name));
|
||||
|
||||
health_messages.sort_by_key(|(_, health, _)| match health {
|
||||
ServerHealth::Error => 2,
|
||||
ServerHealth::Warning => 1,
|
||||
ServerHealth::Ok => 0,
|
||||
});
|
||||
|
||||
if !downloading.is_empty() {
|
||||
return Some(Content {
|
||||
@@ -507,7 +457,7 @@ impl ActivityIndicator {
|
||||
}),
|
||||
),
|
||||
on_click: Some(Arc::new(|this, window, cx| {
|
||||
this.show_error_message(&ShowErrorMessage, window, cx)
|
||||
this.show_error_message(&Default::default(), window, cx)
|
||||
})),
|
||||
tooltip_message: None,
|
||||
});
|
||||
@@ -521,7 +471,7 @@ impl ActivityIndicator {
|
||||
.size(IconSize::Small)
|
||||
.into_any_element(),
|
||||
),
|
||||
message: format!("Formatting failed: {failure}. Click to see logs."),
|
||||
message: format!("Formatting failed: {}. Click to see logs.", failure),
|
||||
on_click: Some(Arc::new(|indicator, window, cx| {
|
||||
indicator.project.update(cx, |project, cx| {
|
||||
project.reset_last_formatting_failure(cx);
|
||||
@@ -532,56 +482,6 @@ impl ActivityIndicator {
|
||||
});
|
||||
}
|
||||
|
||||
// Show any health messages for the language servers
|
||||
if let Some((server_name, health, message)) = health_messages.pop() {
|
||||
let health_str = match health {
|
||||
ServerHealth::Ok => format!("({server_name}) "),
|
||||
ServerHealth::Warning => format!("({server_name}) Warning: "),
|
||||
ServerHealth::Error => format!("({server_name}) Error: "),
|
||||
};
|
||||
let single_line_message = message
|
||||
.lines()
|
||||
.filter_map(|line| {
|
||||
let line = line.trim();
|
||||
if line.is_empty() { None } else { Some(line) }
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ");
|
||||
let mut altered_message = single_line_message != message;
|
||||
let truncated_message = truncate_and_trailoff(
|
||||
&single_line_message,
|
||||
MAX_MESSAGE_LEN.saturating_sub(health_str.len()),
|
||||
);
|
||||
altered_message |= truncated_message != single_line_message;
|
||||
let final_message = format!("{health_str}{truncated_message}");
|
||||
|
||||
let tooltip_message = if altered_message {
|
||||
Some(format!("{health_str}{message}"))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
return Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::Warning)
|
||||
.size(IconSize::Small)
|
||||
.into_any_element(),
|
||||
),
|
||||
message: final_message,
|
||||
tooltip_message,
|
||||
on_click: Some(Arc::new(move |activity_indicator, window, cx| {
|
||||
if altered_message {
|
||||
activity_indicator.show_error_message(&ShowErrorMessage, window, cx)
|
||||
} else {
|
||||
activity_indicator
|
||||
.statuses
|
||||
.retain(|status| status.name != server_name);
|
||||
cx.notify();
|
||||
}
|
||||
})),
|
||||
});
|
||||
}
|
||||
|
||||
// Show any application auto-update info.
|
||||
if let Some(updater) = &self.auto_updater {
|
||||
return match &updater.read(cx).status() {
|
||||
|
||||
@@ -750,7 +750,7 @@ struct EditingMessageState {
|
||||
editor: Entity<Editor>,
|
||||
context_strip: Entity<ContextStrip>,
|
||||
context_picker_menu_handle: PopoverMenuHandle<ContextPicker>,
|
||||
last_estimated_token_count: Option<u64>,
|
||||
last_estimated_token_count: Option<usize>,
|
||||
_subscriptions: [Subscription; 2],
|
||||
_update_token_count_task: Option<Task<()>>,
|
||||
}
|
||||
@@ -857,7 +857,7 @@ impl ActiveThread {
|
||||
}
|
||||
|
||||
/// Returns the editing message id and the estimated token count in the content
|
||||
pub fn editing_message_id(&self) -> Option<(MessageId, u64)> {
|
||||
pub fn editing_message_id(&self) -> Option<(MessageId, usize)> {
|
||||
self.editing_message
|
||||
.as_ref()
|
||||
.map(|(id, state)| (*id, state.last_estimated_token_count.unwrap_or(0)))
|
||||
@@ -1605,7 +1605,6 @@ impl ActiveThread {
|
||||
|
||||
this.thread.update(cx, |thread, cx| {
|
||||
thread.advance_prompt_id();
|
||||
thread.cancel_last_completion(Some(window.window_handle()), cx);
|
||||
thread.send_to_model(
|
||||
model.model,
|
||||
CompletionIntent::UserPrompt,
|
||||
@@ -1681,10 +1680,7 @@ impl ActiveThread {
|
||||
|
||||
let editor = cx.new(|cx| {
|
||||
let mut editor = Editor::new(
|
||||
editor::EditorMode::AutoHeight {
|
||||
min_lines: 1,
|
||||
max_lines: 4,
|
||||
},
|
||||
editor::EditorMode::AutoHeight { max_lines: 4 },
|
||||
buffer,
|
||||
None,
|
||||
window,
|
||||
@@ -3710,7 +3706,7 @@ mod tests {
|
||||
use util::path;
|
||||
use workspace::CollaboratorId;
|
||||
|
||||
use crate::{ContextLoadResult, thread::MessageSegment, thread_store};
|
||||
use crate::{ContextLoadResult, thread_store};
|
||||
|
||||
use super::*;
|
||||
|
||||
@@ -3844,114 +3840,6 @@ mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_editing_message_cancels_previous_completion(cx: &mut TestAppContext) {
|
||||
init_test_settings(cx);
|
||||
|
||||
let project = create_test_project(cx, json!({})).await;
|
||||
|
||||
let (cx, active_thread, _, thread, model) =
|
||||
setup_test_environment(cx, project.clone()).await;
|
||||
|
||||
cx.update(|_, cx| {
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry.set_default_model(
|
||||
Some(ConfiguredModel {
|
||||
provider: Arc::new(FakeLanguageModelProvider),
|
||||
model: model.clone(),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
// Track thread events to verify cancellation
|
||||
let cancellation_events = Arc::new(std::sync::Mutex::new(Vec::new()));
|
||||
let new_request_events = Arc::new(std::sync::Mutex::new(Vec::new()));
|
||||
|
||||
let _subscription = cx.update(|_, cx| {
|
||||
let cancellation_events = cancellation_events.clone();
|
||||
let new_request_events = new_request_events.clone();
|
||||
cx.subscribe(
|
||||
&thread,
|
||||
move |_thread, event: &ThreadEvent, _cx| match event {
|
||||
ThreadEvent::CompletionCanceled => {
|
||||
cancellation_events.lock().unwrap().push(());
|
||||
}
|
||||
ThreadEvent::NewRequest => {
|
||||
new_request_events.lock().unwrap().push(());
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
)
|
||||
});
|
||||
|
||||
// Insert a user message and start streaming a response
|
||||
let message = thread.update(cx, |thread, cx| {
|
||||
let message_id = thread.insert_user_message(
|
||||
"Hello, how are you?",
|
||||
ContextLoadResult::default(),
|
||||
None,
|
||||
vec![],
|
||||
cx,
|
||||
);
|
||||
thread.advance_prompt_id();
|
||||
thread.send_to_model(
|
||||
model.clone(),
|
||||
CompletionIntent::UserPrompt,
|
||||
cx.active_window(),
|
||||
cx,
|
||||
);
|
||||
thread.message(message_id).cloned().unwrap()
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
// Verify that a completion is in progress
|
||||
assert!(cx.read(|cx| thread.read(cx).is_generating()));
|
||||
assert_eq!(new_request_events.lock().unwrap().len(), 1);
|
||||
|
||||
// Edit the message while the completion is still running
|
||||
active_thread.update_in(cx, |active_thread, window, cx| {
|
||||
active_thread.start_editing_message(
|
||||
message.id,
|
||||
message.segments.as_slice(),
|
||||
message.creases.as_slice(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
let editor = active_thread
|
||||
.editing_message
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.1
|
||||
.editor
|
||||
.clone();
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.set_text("What is the weather like?", window, cx);
|
||||
});
|
||||
active_thread.confirm_editing_message(&Default::default(), window, cx);
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
// Verify that the previous completion was cancelled
|
||||
assert_eq!(cancellation_events.lock().unwrap().len(), 1);
|
||||
|
||||
// Verify that a new request was started after cancellation
|
||||
assert_eq!(new_request_events.lock().unwrap().len(), 2);
|
||||
|
||||
// Verify that the edited message contains the new text
|
||||
let edited_message =
|
||||
thread.update(cx, |thread, _| thread.message(message.id).cloned().unwrap());
|
||||
match &edited_message.segments[0] {
|
||||
MessageSegment::Text(text) => {
|
||||
assert_eq!(text, "What is the weather like?");
|
||||
}
|
||||
_ => panic!("Expected text segment"),
|
||||
}
|
||||
}
|
||||
|
||||
fn init_test_settings(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
let settings_store = SettingsStore::test(cx);
|
||||
|
||||
@@ -162,7 +162,7 @@ pub fn init(
|
||||
assistant_slash_command::init(cx);
|
||||
thread_store::init(cx);
|
||||
agent_panel::init(cx);
|
||||
context_server_configuration::init(language_registry, fs.clone(), cx);
|
||||
context_server_configuration::init(language_registry, cx);
|
||||
|
||||
register_slash_commands(cx);
|
||||
inline_assistant::init(
|
||||
|
||||
@@ -586,7 +586,7 @@ impl AgentConfiguration {
|
||||
if let Some(server) =
|
||||
this.get_server(&context_server_id)
|
||||
{
|
||||
this.start_server(server, cx);
|
||||
this.start_server(server, cx).log_err();
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use context_server::ContextServerCommand;
|
||||
use gpui::{DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, WeakEntity, prelude::*};
|
||||
use project::project_settings::{ContextServerSettings, ProjectSettings};
|
||||
use project::project_settings::{ContextServerConfiguration, ProjectSettings};
|
||||
use serde_json::json;
|
||||
use settings::update_settings_file;
|
||||
use ui::{KeyBinding, Modal, ModalFooter, ModalHeader, Section, Tooltip, prelude::*};
|
||||
use ui_input::SingleLineInput;
|
||||
@@ -80,12 +81,13 @@ impl AddContextServerModal {
|
||||
update_settings_file::<ProjectSettings>(fs.clone(), cx, |settings, _| {
|
||||
settings.context_servers.insert(
|
||||
name.into(),
|
||||
ContextServerSettings::Custom {
|
||||
command: ContextServerCommand {
|
||||
ContextServerConfiguration {
|
||||
command: Some(ContextServerCommand {
|
||||
path,
|
||||
args,
|
||||
env: None,
|
||||
},
|
||||
}),
|
||||
settings: Some(json!({})),
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
@@ -15,7 +15,7 @@ use markdown::{Markdown, MarkdownElement, MarkdownStyle};
|
||||
use notifications::status_toast::{StatusToast, ToastIcon};
|
||||
use project::{
|
||||
context_server_store::{ContextServerStatus, ContextServerStore},
|
||||
project_settings::{ContextServerSettings, ProjectSettings},
|
||||
project_settings::{ContextServerConfiguration, ProjectSettings},
|
||||
};
|
||||
use settings::{Settings as _, update_settings_file};
|
||||
use theme::ThemeSettings;
|
||||
@@ -89,7 +89,7 @@ impl ConfigureContextServerModal {
|
||||
}),
|
||||
settings_validator,
|
||||
settings_editor: cx.new(|cx| {
|
||||
let mut editor = Editor::auto_height(1, 16, window, cx);
|
||||
let mut editor = Editor::auto_height(16, window, cx);
|
||||
editor.set_text(config.default_settings.trim(), window, cx);
|
||||
editor.set_show_gutter(false, cx);
|
||||
editor.set_soft_wrap_mode(
|
||||
@@ -175,9 +175,8 @@ impl ConfigureContextServerModal {
|
||||
let settings_changed = ProjectSettings::get_global(cx)
|
||||
.context_servers
|
||||
.get(&id.0)
|
||||
.map_or(true, |settings| match settings {
|
||||
ContextServerSettings::Custom { .. } => false,
|
||||
ContextServerSettings::Extension { settings } => settings != &settings_value,
|
||||
.map_or(true, |config| {
|
||||
config.settings.as_ref() != Some(&settings_value)
|
||||
});
|
||||
|
||||
let is_running = self.context_server_store.read(cx).status_for_server(&id)
|
||||
@@ -222,12 +221,17 @@ impl ConfigureContextServerModal {
|
||||
update_settings_file::<ProjectSettings>(workspace.read(cx).app_state().fs.clone(), cx, {
|
||||
let id = id.clone();
|
||||
|settings, _| {
|
||||
settings.context_servers.insert(
|
||||
id.0,
|
||||
ContextServerSettings::Extension {
|
||||
settings: settings_value,
|
||||
},
|
||||
);
|
||||
if let Some(server_config) = settings.context_servers.get_mut(&id.0) {
|
||||
server_config.settings = Some(settings_value);
|
||||
} else {
|
||||
settings.context_servers.insert(
|
||||
id.0,
|
||||
ContextServerConfiguration {
|
||||
settings: Some(settings_value),
|
||||
..Default::default()
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -31,7 +31,7 @@ use util::ResultExt;
|
||||
use workspace::{
|
||||
Item, ItemHandle, ItemNavHistory, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView,
|
||||
Workspace,
|
||||
item::{BreadcrumbText, ItemEvent, SaveOptions, TabContentParams},
|
||||
item::{BreadcrumbText, ItemEvent, TabContentParams},
|
||||
searchable::SearchableItemHandle,
|
||||
};
|
||||
use zed_actions::assistant::ToggleFocus;
|
||||
@@ -532,12 +532,12 @@ impl Item for AgentDiffPane {
|
||||
|
||||
fn save(
|
||||
&mut self,
|
||||
options: SaveOptions,
|
||||
format: bool,
|
||||
project: Entity<Project>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
self.editor.save(options, project, window, cx)
|
||||
self.editor.save(format, project, window, cx)
|
||||
}
|
||||
|
||||
fn save_as(
|
||||
@@ -1513,7 +1513,7 @@ impl AgentDiff {
|
||||
multibuffer.add_diff(diff_handle.clone(), cx);
|
||||
});
|
||||
|
||||
let new_state = if thread.read(cx).is_generating() {
|
||||
let new_state = if thread.read(cx).has_pending_edit_tool_uses() {
|
||||
EditorState::Generating
|
||||
} else {
|
||||
EditorState::Reviewing
|
||||
|
||||
@@ -91,13 +91,12 @@ impl AgentModelSelector {
|
||||
|
||||
impl Render for AgentModelSelector {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
|
||||
let model = self.selector.read(cx).delegate.active_model(cx);
|
||||
let model_name = model
|
||||
.map(|model| model.model.name().0)
|
||||
.unwrap_or_else(|| SharedString::from("No model selected"));
|
||||
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
|
||||
PickerPopoverMenu::new(
|
||||
self.selector.clone(),
|
||||
Button::new("active-model", model_name)
|
||||
|
||||
@@ -10,9 +10,9 @@ use serde::{Deserialize, Serialize};
|
||||
use agent_settings::{AgentDockPosition, AgentSettings, CompletionMode, DefaultView};
|
||||
use anyhow::{Result, anyhow};
|
||||
use assistant_context_editor::{
|
||||
AgentPanelDelegate, AssistantContext, ContextEditor, ContextEvent, ContextSummary,
|
||||
SlashCommandCompletionProvider, humanize_token_count, make_lsp_adapter_delegate,
|
||||
render_remaining_tokens,
|
||||
AgentPanelDelegate, AssistantContext, ConfigurationError, ContextEditor, ContextEvent,
|
||||
ContextSummary, SlashCommandCompletionProvider, humanize_token_count,
|
||||
make_lsp_adapter_delegate, render_remaining_tokens,
|
||||
};
|
||||
use assistant_slash_command::SlashCommandWorkingSet;
|
||||
use assistant_tool::ToolWorkingSet;
|
||||
@@ -29,8 +29,7 @@ use gpui::{
|
||||
};
|
||||
use language::LanguageRegistry;
|
||||
use language_model::{
|
||||
ConfigurationError, LanguageModelProviderTosView, LanguageModelRegistry, RequestUsage,
|
||||
ZED_CLOUD_PROVIDER_ID,
|
||||
LanguageModelProviderTosView, LanguageModelRegistry, RequestUsage, ZED_CLOUD_PROVIDER_ID,
|
||||
};
|
||||
use project::{Project, ProjectPath, Worktree};
|
||||
use prompt_store::{PromptBuilder, PromptStore, UserPromptId};
|
||||
@@ -520,15 +519,10 @@ impl AgentPanel {
|
||||
});
|
||||
|
||||
let message_editor_subscription =
|
||||
cx.subscribe(&message_editor, |this, _, event, cx| match event {
|
||||
cx.subscribe(&message_editor, |_, _, event, cx| match event {
|
||||
MessageEditorEvent::Changed | MessageEditorEvent::EstimatedTokenCount => {
|
||||
cx.notify();
|
||||
}
|
||||
MessageEditorEvent::ScrollThreadToBottom => {
|
||||
this.thread.update(cx, |thread, cx| {
|
||||
thread.scroll_to_bottom(cx);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
let thread_id = thread.read(cx).id().clone();
|
||||
@@ -808,15 +802,10 @@ impl AgentPanel {
|
||||
self.message_editor.focus_handle(cx).focus(window);
|
||||
|
||||
let message_editor_subscription =
|
||||
cx.subscribe(&self.message_editor, |this, _, event, cx| match event {
|
||||
cx.subscribe(&self.message_editor, |_, _, event, cx| match event {
|
||||
MessageEditorEvent::Changed | MessageEditorEvent::EstimatedTokenCount => {
|
||||
cx.notify();
|
||||
}
|
||||
MessageEditorEvent::ScrollThreadToBottom => {
|
||||
this.thread.update(cx, |thread, cx| {
|
||||
thread.scroll_to_bottom(cx);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
self._active_thread_subscriptions = vec![
|
||||
@@ -1028,15 +1017,10 @@ impl AgentPanel {
|
||||
self.message_editor.focus_handle(cx).focus(window);
|
||||
|
||||
let message_editor_subscription =
|
||||
cx.subscribe(&self.message_editor, |this, _, event, cx| match event {
|
||||
cx.subscribe(&self.message_editor, |_, _, event, cx| match event {
|
||||
MessageEditorEvent::Changed | MessageEditorEvent::EstimatedTokenCount => {
|
||||
cx.notify();
|
||||
}
|
||||
MessageEditorEvent::ScrollThreadToBottom => {
|
||||
this.thread.update(cx, |thread, cx| {
|
||||
thread.scroll_to_bottom(cx);
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
self._active_thread_subscriptions = vec![
|
||||
@@ -2369,6 +2353,24 @@ impl AgentPanel {
|
||||
self.thread.clone().into_any_element()
|
||||
}
|
||||
|
||||
fn configuration_error(&self, cx: &App) -> Option<ConfigurationError> {
|
||||
let Some(model) = LanguageModelRegistry::read_global(cx).default_model() else {
|
||||
return Some(ConfigurationError::NoProvider);
|
||||
};
|
||||
|
||||
if !model.provider.is_authenticated(cx) {
|
||||
return Some(ConfigurationError::ProviderNotAuthenticated);
|
||||
}
|
||||
|
||||
if model.provider.must_accept_terms(cx) {
|
||||
return Some(ConfigurationError::ProviderPendingTermsAcceptance(
|
||||
model.provider,
|
||||
));
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn render_thread_empty_state(
|
||||
&self,
|
||||
window: &mut Window,
|
||||
@@ -2378,9 +2380,7 @@ impl AgentPanel {
|
||||
.history_store
|
||||
.update(cx, |this, cx| this.recent_entries(6, cx));
|
||||
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let configuration_error =
|
||||
model_registry.configuration_error(model_registry.default_model(), cx);
|
||||
let configuration_error = self.configuration_error(cx);
|
||||
let no_error = configuration_error.is_none();
|
||||
let focus_handle = self.focus_handle(cx);
|
||||
|
||||
@@ -2397,7 +2397,11 @@ impl AgentPanel {
|
||||
.justify_center()
|
||||
.items_center()
|
||||
.gap_1()
|
||||
.child(h_flex().child(Headline::new("Welcome to the Agent Panel")))
|
||||
.child(
|
||||
h_flex().child(
|
||||
Headline::new("Welcome to the Agent Panel")
|
||||
),
|
||||
)
|
||||
.when(no_error, |parent| {
|
||||
parent
|
||||
.child(
|
||||
@@ -2421,10 +2425,7 @@ impl AgentPanel {
|
||||
cx,
|
||||
))
|
||||
.on_click(|_event, window, cx| {
|
||||
window.dispatch_action(
|
||||
NewThread::default().boxed_clone(),
|
||||
cx,
|
||||
)
|
||||
window.dispatch_action(NewThread::default().boxed_clone(), cx)
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
@@ -2441,10 +2442,7 @@ impl AgentPanel {
|
||||
cx,
|
||||
))
|
||||
.on_click(|_event, window, cx| {
|
||||
window.dispatch_action(
|
||||
ToggleContextPicker.boxed_clone(),
|
||||
cx,
|
||||
)
|
||||
window.dispatch_action(ToggleContextPicker.boxed_clone(), cx)
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
@@ -2461,10 +2459,7 @@ impl AgentPanel {
|
||||
cx,
|
||||
))
|
||||
.on_click(|_event, window, cx| {
|
||||
window.dispatch_action(
|
||||
ToggleModelSelector.boxed_clone(),
|
||||
cx,
|
||||
)
|
||||
window.dispatch_action(ToggleModelSelector.boxed_clone(), cx)
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
@@ -2481,50 +2476,51 @@ impl AgentPanel {
|
||||
cx,
|
||||
))
|
||||
.on_click(|_event, window, cx| {
|
||||
window.dispatch_action(
|
||||
OpenConfiguration.boxed_clone(),
|
||||
cx,
|
||||
)
|
||||
window.dispatch_action(OpenConfiguration.boxed_clone(), cx)
|
||||
}),
|
||||
)
|
||||
})
|
||||
.map(|parent| match configuration_error_ref {
|
||||
Some(
|
||||
err @ (ConfigurationError::ModelNotFound
|
||||
| ConfigurationError::ProviderNotAuthenticated(_)
|
||||
| ConfigurationError::NoProvider),
|
||||
) => parent
|
||||
.child(h_flex().child(
|
||||
Label::new(err.to_string()).color(Color::Muted).mb_2p5(),
|
||||
))
|
||||
.child(
|
||||
Button::new("settings", "Configure a Provider")
|
||||
.icon(IconName::Settings)
|
||||
.icon_position(IconPosition::Start)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.full_width()
|
||||
.key_binding(KeyBinding::for_action_in(
|
||||
&OpenConfiguration,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
))
|
||||
.on_click(|_event, window, cx| {
|
||||
window.dispatch_action(
|
||||
OpenConfiguration.boxed_clone(),
|
||||
cx,
|
||||
.map(|parent| {
|
||||
match configuration_error_ref {
|
||||
Some(ConfigurationError::ProviderNotAuthenticated)
|
||||
| Some(ConfigurationError::NoProvider) => {
|
||||
parent
|
||||
.child(
|
||||
h_flex().child(
|
||||
Label::new("To start using the agent, configure at least one LLM provider.")
|
||||
.color(Color::Muted)
|
||||
.mb_2p5()
|
||||
)
|
||||
}),
|
||||
),
|
||||
Some(ConfigurationError::ProviderPendingTermsAcceptance(provider)) => {
|
||||
parent.children(provider.render_accept_terms(
|
||||
LanguageModelProviderTosView::ThreadFreshStart,
|
||||
cx,
|
||||
))
|
||||
)
|
||||
.child(
|
||||
Button::new("settings", "Configure a Provider")
|
||||
.icon(IconName::Settings)
|
||||
.icon_position(IconPosition::Start)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.full_width()
|
||||
.key_binding(KeyBinding::for_action_in(
|
||||
&OpenConfiguration,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
))
|
||||
.on_click(|_event, window, cx| {
|
||||
window.dispatch_action(OpenConfiguration.boxed_clone(), cx)
|
||||
}),
|
||||
)
|
||||
}
|
||||
Some(ConfigurationError::ProviderPendingTermsAcceptance(provider)) => {
|
||||
parent.children(
|
||||
provider.render_accept_terms(
|
||||
LanguageModelProviderTosView::ThreadFreshStart,
|
||||
cx,
|
||||
),
|
||||
)
|
||||
}
|
||||
None => parent,
|
||||
}
|
||||
None => parent,
|
||||
}),
|
||||
})
|
||||
)
|
||||
})
|
||||
.when(!recent_history.is_empty(), |parent| {
|
||||
@@ -2559,8 +2555,7 @@ impl AgentPanel {
|
||||
&self.focus_handle(cx),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.map(|kb| kb.size(rems_from_px(12.))),
|
||||
).map(|kb| kb.size(rems_from_px(12.))),
|
||||
)
|
||||
.on_click(move |_event, window, cx| {
|
||||
window.dispatch_action(OpenHistory.boxed_clone(), cx);
|
||||
@@ -2570,68 +2565,79 @@ impl AgentPanel {
|
||||
.child(
|
||||
v_flex()
|
||||
.gap_1()
|
||||
.children(recent_history.into_iter().enumerate().map(
|
||||
|(index, entry)| {
|
||||
.children(
|
||||
recent_history.into_iter().enumerate().map(|(index, entry)| {
|
||||
// TODO: Add keyboard navigation.
|
||||
let is_hovered =
|
||||
self.hovered_recent_history_item == Some(index);
|
||||
let is_hovered = self.hovered_recent_history_item == Some(index);
|
||||
HistoryEntryElement::new(entry.clone(), cx.entity().downgrade())
|
||||
.hovered(is_hovered)
|
||||
.on_hover(cx.listener(
|
||||
move |this, is_hovered, _window, cx| {
|
||||
if *is_hovered {
|
||||
this.hovered_recent_history_item = Some(index);
|
||||
} else if this.hovered_recent_history_item
|
||||
== Some(index)
|
||||
{
|
||||
this.hovered_recent_history_item = None;
|
||||
}
|
||||
cx.notify();
|
||||
},
|
||||
))
|
||||
.on_hover(cx.listener(move |this, is_hovered, _window, cx| {
|
||||
if *is_hovered {
|
||||
this.hovered_recent_history_item = Some(index);
|
||||
} else if this.hovered_recent_history_item == Some(index) {
|
||||
this.hovered_recent_history_item = None;
|
||||
}
|
||||
cx.notify();
|
||||
}))
|
||||
.into_any_element()
|
||||
},
|
||||
)),
|
||||
}),
|
||||
)
|
||||
)
|
||||
.map(|parent| match configuration_error_ref {
|
||||
Some(
|
||||
err @ (ConfigurationError::ModelNotFound
|
||||
| ConfigurationError::ProviderNotAuthenticated(_)
|
||||
| ConfigurationError::NoProvider),
|
||||
) => parent.child(
|
||||
Banner::new()
|
||||
.severity(ui::Severity::Warning)
|
||||
.child(Label::new(err.to_string()).size(LabelSize::Small))
|
||||
.action_slot(
|
||||
Button::new("settings", "Configure Provider")
|
||||
.style(ButtonStyle::Tinted(ui::TintColor::Warning))
|
||||
.label_size(LabelSize::Small)
|
||||
.key_binding(
|
||||
KeyBinding::for_action_in(
|
||||
&OpenConfiguration,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
.map(|parent| {
|
||||
match configuration_error_ref {
|
||||
Some(ConfigurationError::ProviderNotAuthenticated)
|
||||
| Some(ConfigurationError::NoProvider) => {
|
||||
parent
|
||||
.child(
|
||||
Banner::new()
|
||||
.severity(ui::Severity::Warning)
|
||||
.child(
|
||||
Label::new(
|
||||
"Configure at least one LLM provider to start using the panel.",
|
||||
)
|
||||
.size(LabelSize::Small),
|
||||
)
|
||||
.map(|kb| kb.size(rems_from_px(12.))),
|
||||
)
|
||||
.on_click(|_event, window, cx| {
|
||||
window.dispatch_action(
|
||||
OpenConfiguration.boxed_clone(),
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
),
|
||||
),
|
||||
Some(ConfigurationError::ProviderPendingTermsAcceptance(provider)) => {
|
||||
parent.child(Banner::new().severity(ui::Severity::Warning).child(
|
||||
h_flex().w_full().children(provider.render_accept_terms(
|
||||
LanguageModelProviderTosView::ThreadtEmptyState,
|
||||
cx,
|
||||
)),
|
||||
))
|
||||
.action_slot(
|
||||
Button::new("settings", "Configure Provider")
|
||||
.style(ButtonStyle::Tinted(ui::TintColor::Warning))
|
||||
.label_size(LabelSize::Small)
|
||||
.key_binding(
|
||||
KeyBinding::for_action_in(
|
||||
&OpenConfiguration,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.map(|kb| kb.size(rems_from_px(12.))),
|
||||
)
|
||||
.on_click(|_event, window, cx| {
|
||||
window.dispatch_action(
|
||||
OpenConfiguration.boxed_clone(),
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
),
|
||||
)
|
||||
}
|
||||
Some(ConfigurationError::ProviderPendingTermsAcceptance(provider)) => {
|
||||
parent
|
||||
.child(
|
||||
Banner::new()
|
||||
.severity(ui::Severity::Warning)
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.children(
|
||||
provider.render_accept_terms(
|
||||
LanguageModelProviderTosView::ThreadtEmptyState,
|
||||
cx,
|
||||
),
|
||||
),
|
||||
),
|
||||
)
|
||||
}
|
||||
None => parent,
|
||||
}
|
||||
None => parent,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@@ -214,7 +214,6 @@ fn search(
|
||||
&entry_candidates,
|
||||
&query,
|
||||
false,
|
||||
true,
|
||||
100,
|
||||
&Arc::new(AtomicBool::default()),
|
||||
executor,
|
||||
@@ -1067,7 +1066,7 @@ mod tests {
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::{ops::Deref, rc::Rc};
|
||||
use util::path;
|
||||
use util::{path, separator};
|
||||
use workspace::{AppState, Item};
|
||||
|
||||
#[test]
|
||||
@@ -1218,14 +1217,14 @@ mod tests {
|
||||
let mut cx = VisualTestContext::from_window(*window.deref(), cx);
|
||||
|
||||
let paths = vec![
|
||||
path!("a/one.txt"),
|
||||
path!("a/two.txt"),
|
||||
path!("a/three.txt"),
|
||||
path!("a/four.txt"),
|
||||
path!("b/five.txt"),
|
||||
path!("b/six.txt"),
|
||||
path!("b/seven.txt"),
|
||||
path!("b/eight.txt"),
|
||||
separator!("a/one.txt"),
|
||||
separator!("a/two.txt"),
|
||||
separator!("a/three.txt"),
|
||||
separator!("a/four.txt"),
|
||||
separator!("b/five.txt"),
|
||||
separator!("b/six.txt"),
|
||||
separator!("b/seven.txt"),
|
||||
separator!("b/eight.txt"),
|
||||
];
|
||||
|
||||
let mut opened_editors = Vec::new();
|
||||
|
||||
@@ -307,7 +307,6 @@ pub(crate) fn search_symbols(
|
||||
&visible_match_candidates,
|
||||
&query,
|
||||
false,
|
||||
true,
|
||||
MAX_MATCHES,
|
||||
&cancellation_flag,
|
||||
cx.background_executor().clone(),
|
||||
@@ -316,7 +315,6 @@ pub(crate) fn search_symbols(
|
||||
&external_match_candidates,
|
||||
&query,
|
||||
false,
|
||||
true,
|
||||
MAX_MATCHES - visible_matches.len().min(MAX_MATCHES),
|
||||
&cancellation_flag,
|
||||
cx.background_executor().clone(),
|
||||
|
||||
@@ -342,7 +342,6 @@ pub(crate) fn search_threads(
|
||||
&candidates,
|
||||
&query,
|
||||
false,
|
||||
true,
|
||||
100,
|
||||
&cancellation_flag,
|
||||
executor,
|
||||
|
||||
@@ -3,21 +3,16 @@ use std::sync::Arc;
|
||||
use anyhow::Context as _;
|
||||
use context_server::ContextServerId;
|
||||
use extension::{ContextServerConfiguration, ExtensionManifest};
|
||||
use fs::Fs;
|
||||
use gpui::Task;
|
||||
use language::LanguageRegistry;
|
||||
use project::{
|
||||
context_server_store::registry::ContextServerDescriptorRegistry,
|
||||
project_settings::ProjectSettings,
|
||||
};
|
||||
use settings::update_settings_file;
|
||||
use project::context_server_store::registry::ContextServerDescriptorRegistry;
|
||||
use ui::prelude::*;
|
||||
use util::ResultExt;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::agent_configuration::ConfigureContextServerModal;
|
||||
|
||||
pub(crate) fn init(language_registry: Arc<LanguageRegistry>, fs: Arc<dyn Fs>, cx: &mut App) {
|
||||
pub(crate) fn init(language_registry: Arc<LanguageRegistry>, cx: &mut App) {
|
||||
cx.observe_new(move |_: &mut Workspace, window, cx| {
|
||||
let Some(window) = window else {
|
||||
return;
|
||||
@@ -26,7 +21,6 @@ pub(crate) fn init(language_registry: Arc<LanguageRegistry>, fs: Arc<dyn Fs>, cx
|
||||
if let Some(extension_events) = extension::ExtensionEvents::try_global(cx).as_ref() {
|
||||
cx.subscribe_in(extension_events, window, {
|
||||
let language_registry = language_registry.clone();
|
||||
let fs = fs.clone();
|
||||
move |workspace, _, event, window, cx| match event {
|
||||
extension::Event::ExtensionInstalled(manifest) => {
|
||||
show_configure_mcp_modal(
|
||||
@@ -37,13 +31,6 @@ pub(crate) fn init(language_registry: Arc<LanguageRegistry>, fs: Arc<dyn Fs>, cx
|
||||
cx,
|
||||
);
|
||||
}
|
||||
extension::Event::ExtensionUninstalled(manifest) => {
|
||||
remove_context_server_settings(
|
||||
manifest.context_servers.keys().cloned().collect(),
|
||||
fs.clone(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
extension::Event::ConfigureExtensionRequested(manifest) => {
|
||||
if !manifest.context_servers.is_empty() {
|
||||
show_configure_mcp_modal(
|
||||
@@ -68,18 +55,6 @@ pub(crate) fn init(language_registry: Arc<LanguageRegistry>, fs: Arc<dyn Fs>, cx
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn remove_context_server_settings(
|
||||
context_server_ids: Vec<Arc<str>>,
|
||||
fs: Arc<dyn Fs>,
|
||||
cx: &mut App,
|
||||
) {
|
||||
update_settings_file::<ProjectSettings>(fs, cx, move |settings, _| {
|
||||
settings
|
||||
.context_servers
|
||||
.retain(|server_id, _| !context_server_ids.contains(server_id));
|
||||
});
|
||||
}
|
||||
|
||||
pub enum Configuration {
|
||||
NotAvailable(ContextServerId, Option<SharedString>),
|
||||
Required(
|
||||
@@ -96,10 +71,6 @@ fn show_configure_mcp_modal(
|
||||
window: &mut Window,
|
||||
cx: &mut Context<'_, Workspace>,
|
||||
) {
|
||||
if !window.is_window_active() {
|
||||
return;
|
||||
}
|
||||
|
||||
let context_server_store = workspace.project().read(cx).context_server_store();
|
||||
let repository: Option<SharedString> = manifest.repository.as_ref().map(|s| s.clone().into());
|
||||
|
||||
|
||||
@@ -24,7 +24,6 @@ use gpui::{
|
||||
WeakEntity, Window, point,
|
||||
};
|
||||
use language::{Buffer, Point, Selection, TransactionId};
|
||||
use language_model::ConfigurationError;
|
||||
use language_model::ConfiguredModel;
|
||||
use language_model::{LanguageModelRegistry, report_assistant_event};
|
||||
use multi_buffer::MultiBufferRow;
|
||||
@@ -39,7 +38,8 @@ use telemetry_events::{AssistantEventData, AssistantKind, AssistantPhase};
|
||||
use terminal_view::{TerminalView, terminal_panel::TerminalPanel};
|
||||
use text::{OffsetRangeExt, ToPoint as _};
|
||||
use ui::prelude::*;
|
||||
use util::{RangeExt, ResultExt, maybe};
|
||||
use util::RangeExt;
|
||||
use util::ResultExt;
|
||||
use workspace::{ItemHandle, Toast, Workspace, dock::Panel, notifications::NotificationId};
|
||||
use zed_actions::agent::OpenConfiguration;
|
||||
|
||||
@@ -233,9 +233,10 @@ impl InlineAssistant {
|
||||
return;
|
||||
};
|
||||
|
||||
let configuration_error = || {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
model_registry.configuration_error(model_registry.inline_assistant_model(), cx)
|
||||
let is_authenticated = || {
|
||||
LanguageModelRegistry::read_global(cx)
|
||||
.inline_assistant_model()
|
||||
.map_or(false, |model| model.provider.is_authenticated(cx))
|
||||
};
|
||||
|
||||
let Some(agent_panel) = workspace.panel::<AgentPanel>(cx) else {
|
||||
@@ -283,23 +284,20 @@ impl InlineAssistant {
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(error) = configuration_error() {
|
||||
if let ConfigurationError::ProviderNotAuthenticated(provider) = error {
|
||||
cx.spawn(async move |_, cx| {
|
||||
cx.update(|cx| provider.authenticate(cx))?.await?;
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
if configuration_error().is_none() {
|
||||
handle_assist(window, cx);
|
||||
}
|
||||
} else {
|
||||
cx.spawn_in(window, async move |_, cx| {
|
||||
if is_authenticated() {
|
||||
handle_assist(window, cx);
|
||||
} else {
|
||||
cx.spawn_in(window, async move |_workspace, cx| {
|
||||
let Some(task) = cx.update(|_, cx| {
|
||||
LanguageModelRegistry::read_global(cx)
|
||||
.inline_assistant_model()
|
||||
.map_or(None, |model| Some(model.provider.authenticate(cx)))
|
||||
})?
|
||||
else {
|
||||
let answer = cx
|
||||
.prompt(
|
||||
gpui::PromptLevel::Warning,
|
||||
&error.to_string(),
|
||||
"No language model provider configured",
|
||||
None,
|
||||
&["Configure", "Cancel"],
|
||||
)
|
||||
@@ -313,12 +311,17 @@ impl InlineAssistant {
|
||||
.ok();
|
||||
}
|
||||
}
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
return Ok(());
|
||||
};
|
||||
task.await?;
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
if is_authenticated() {
|
||||
handle_assist(window, cx);
|
||||
}
|
||||
} else {
|
||||
handle_assist(window, cx);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -765,6 +768,9 @@ impl InlineAssistant {
|
||||
PromptEditorEvent::CancelRequested => {
|
||||
self.finish_assist(assist_id, true, window, cx);
|
||||
}
|
||||
PromptEditorEvent::DismissRequested => {
|
||||
self.dismiss_assist(assist_id, window, cx);
|
||||
}
|
||||
PromptEditorEvent::Resized { .. } => {
|
||||
// This only matters for the terminal inline assistant
|
||||
}
|
||||
@@ -1165,31 +1171,27 @@ impl InlineAssistant {
|
||||
selections.select_anchor_ranges([position..position])
|
||||
});
|
||||
|
||||
let mut scroll_target_range = None;
|
||||
let mut scroll_target_top;
|
||||
let mut scroll_target_bottom;
|
||||
if let Some(decorations) = assist.decorations.as_ref() {
|
||||
scroll_target_range = maybe!({
|
||||
let top = editor.row_for_block(decorations.prompt_block_id, cx)?.0 as f32;
|
||||
let bottom = editor.row_for_block(decorations.end_block_id, cx)?.0 as f32;
|
||||
Some((top, bottom))
|
||||
});
|
||||
if scroll_target_range.is_none() {
|
||||
log::error!("bug: failed to find blocks for scrolling to inline assist");
|
||||
}
|
||||
}
|
||||
let scroll_target_range = scroll_target_range.unwrap_or_else(|| {
|
||||
scroll_target_top = editor
|
||||
.row_for_block(decorations.prompt_block_id, cx)
|
||||
.unwrap()
|
||||
.0 as f32;
|
||||
scroll_target_bottom = editor
|
||||
.row_for_block(decorations.end_block_id, cx)
|
||||
.unwrap()
|
||||
.0 as f32;
|
||||
} else {
|
||||
let snapshot = editor.snapshot(window, cx);
|
||||
let start_row = assist
|
||||
.range
|
||||
.start
|
||||
.to_display_point(&snapshot.display_snapshot)
|
||||
.row();
|
||||
let top = start_row.0 as f32;
|
||||
let bottom = top + 1.0;
|
||||
(top, bottom)
|
||||
});
|
||||
let mut scroll_target_top = scroll_target_range.0;
|
||||
let mut scroll_target_bottom = scroll_target_range.1;
|
||||
|
||||
scroll_target_top = start_row.0 as f32;
|
||||
scroll_target_bottom = scroll_target_top + 1.;
|
||||
}
|
||||
scroll_target_top -= editor.vertical_scroll_margin() as f32;
|
||||
scroll_target_bottom += editor.vertical_scroll_margin() as f32;
|
||||
|
||||
|
||||
@@ -261,7 +261,7 @@ impl<T: 'static> PromptEditor<T> {
|
||||
|
||||
let focus = self.editor.focus_handle(cx).contains_focused(window, cx);
|
||||
self.editor = cx.new(|cx| {
|
||||
let mut editor = Editor::auto_height(1, Self::MAX_LINES as usize, window, cx);
|
||||
let mut editor = Editor::auto_height(Self::MAX_LINES as usize, window, cx);
|
||||
editor.set_soft_wrap_mode(language::language_settings::SoftWrap::EditorWidth, cx);
|
||||
editor.set_placeholder_text("Add a prompt…", cx);
|
||||
editor.set_text(prompt, window, cx);
|
||||
@@ -403,7 +403,9 @@ impl<T: 'static> PromptEditor<T> {
|
||||
CodegenStatus::Idle => {
|
||||
cx.emit(PromptEditorEvent::StartRequested);
|
||||
}
|
||||
CodegenStatus::Pending => {}
|
||||
CodegenStatus::Pending => {
|
||||
cx.emit(PromptEditorEvent::DismissRequested);
|
||||
}
|
||||
CodegenStatus::Done => {
|
||||
if self.edited_since_done {
|
||||
cx.emit(PromptEditorEvent::StartRequested);
|
||||
@@ -829,6 +831,7 @@ pub enum PromptEditorEvent {
|
||||
StopRequested,
|
||||
ConfirmRequested { execute: bool },
|
||||
CancelRequested,
|
||||
DismissRequested,
|
||||
Resized { height_in_lines: u8 },
|
||||
}
|
||||
|
||||
@@ -869,7 +872,6 @@ impl PromptEditor<BufferCodegen> {
|
||||
let prompt_editor = cx.new(|cx| {
|
||||
let mut editor = Editor::new(
|
||||
EditorMode::AutoHeight {
|
||||
min_lines: 1,
|
||||
max_lines: Self::MAX_LINES as usize,
|
||||
},
|
||||
prompt_buffer,
|
||||
@@ -1048,7 +1050,6 @@ impl PromptEditor<TerminalCodegen> {
|
||||
let prompt_editor = cx.new(|cx| {
|
||||
let mut editor = Editor::new(
|
||||
EditorMode::AutoHeight {
|
||||
min_lines: 1,
|
||||
max_lines: Self::MAX_LINES as usize,
|
||||
},
|
||||
prompt_buffer,
|
||||
|
||||
@@ -39,9 +39,7 @@ use proto::Plan;
|
||||
use settings::Settings;
|
||||
use std::time::Duration;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{
|
||||
Callout, Disclosure, Divider, DividerColor, KeyBinding, PopoverMenuHandle, Tooltip, prelude::*,
|
||||
};
|
||||
use ui::{Disclosure, KeyBinding, PopoverMenuHandle, Tooltip, prelude::*};
|
||||
use util::{ResultExt as _, maybe};
|
||||
use workspace::{CollaboratorId, Workspace};
|
||||
use zed_llm_client::CompletionIntent;
|
||||
@@ -76,12 +74,11 @@ pub struct MessageEditor {
|
||||
profile_selector: Entity<ProfileSelector>,
|
||||
edits_expanded: bool,
|
||||
editor_is_expanded: bool,
|
||||
last_estimated_token_count: Option<u64>,
|
||||
last_estimated_token_count: Option<usize>,
|
||||
update_token_count_task: Option<Task<()>>,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
|
||||
const MIN_EDITOR_LINES: usize = 4;
|
||||
const MAX_EDITOR_LINES: usize = 8;
|
||||
|
||||
pub(crate) fn create_editor(
|
||||
@@ -105,7 +102,6 @@ pub(crate) fn create_editor(
|
||||
let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
|
||||
let mut editor = Editor::new(
|
||||
editor::EditorMode::AutoHeight {
|
||||
min_lines: MIN_EDITOR_LINES,
|
||||
max_lines: MAX_EDITOR_LINES,
|
||||
},
|
||||
buffer,
|
||||
@@ -257,7 +253,6 @@ impl MessageEditor {
|
||||
})
|
||||
} else {
|
||||
editor.set_mode(EditorMode::AutoHeight {
|
||||
min_lines: MIN_EDITOR_LINES,
|
||||
max_lines: MAX_EDITOR_LINES,
|
||||
})
|
||||
}
|
||||
@@ -301,7 +296,6 @@ impl MessageEditor {
|
||||
self.set_editor_is_expanded(false, cx);
|
||||
self.send_to_model(window, cx);
|
||||
|
||||
cx.emit(MessageEditorEvent::ScrollThreadToBottom);
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
@@ -434,6 +428,10 @@ impl MessageEditor {
|
||||
}
|
||||
|
||||
fn handle_review_click(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if self.thread.read(cx).has_pending_edit_tool_uses() {
|
||||
return;
|
||||
}
|
||||
|
||||
self.edits_expanded = true;
|
||||
AgentDiffPane::deploy(self.thread.clone(), self.workspace.clone(), window, cx).log_err();
|
||||
cx.notify();
|
||||
@@ -508,47 +506,7 @@ impl MessageEditor {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn handle_reject_file_changes(
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if self.thread.read(cx).has_pending_edit_tool_uses() {
|
||||
return;
|
||||
}
|
||||
|
||||
self.thread.update(cx, |thread, cx| {
|
||||
let buffer_snapshot = buffer.read(cx);
|
||||
let start = buffer_snapshot.anchor_before(Point::new(0, 0));
|
||||
let end = buffer_snapshot.anchor_after(buffer_snapshot.max_point());
|
||||
thread
|
||||
.reject_edits_in_ranges(buffer, vec![start..end], cx)
|
||||
.detach();
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn handle_accept_file_changes(
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if self.thread.read(cx).has_pending_edit_tool_uses() {
|
||||
return;
|
||||
}
|
||||
|
||||
self.thread.update(cx, |thread, cx| {
|
||||
let buffer_snapshot = buffer.read(cx);
|
||||
let start = buffer_snapshot.anchor_before(Point::new(0, 0));
|
||||
let end = buffer_snapshot.anchor_after(buffer_snapshot.max_point());
|
||||
thread.keep_edits_in_range(buffer, start..end, cx);
|
||||
});
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn render_burn_mode_toggle(&self, cx: &mut Context<Self>) -> Option<AnyElement> {
|
||||
fn render_max_mode_toggle(&self, cx: &mut Context<Self>) -> Option<AnyElement> {
|
||||
let thread = self.thread.read(cx);
|
||||
let model = thread.configured_model();
|
||||
if !model?.model.supports_max_mode() {
|
||||
@@ -683,87 +641,96 @@ impl MessageEditor {
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(
|
||||
h_flex()
|
||||
.items_start()
|
||||
.justify_between()
|
||||
.child(self.context_strip.clone())
|
||||
.when(focus_handle.is_focused(window), |this| {
|
||||
this.child(
|
||||
IconButton::new("toggle-height", expand_icon)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.tooltip({
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |window, cx| {
|
||||
let expand_label = if is_editor_expanded {
|
||||
"Minimize Message Editor".to_string()
|
||||
} else {
|
||||
"Expand Message Editor".to_string()
|
||||
};
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.when(focus_handle.is_focused(window), |this| {
|
||||
this.child(
|
||||
IconButton::new("toggle-height", expand_icon)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.tooltip({
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |window, cx| {
|
||||
let expand_label = if is_editor_expanded {
|
||||
"Minimize Message Editor".to_string()
|
||||
} else {
|
||||
"Expand Message Editor".to_string()
|
||||
};
|
||||
|
||||
Tooltip::for_action_in(
|
||||
expand_label,
|
||||
&ExpandMessageEditor,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
})
|
||||
.on_click(cx.listener(|_, _, window, cx| {
|
||||
window.dispatch_action(Box::new(ExpandMessageEditor), cx);
|
||||
})),
|
||||
)
|
||||
}),
|
||||
Tooltip::for_action_in(
|
||||
expand_label,
|
||||
&ExpandMessageEditor,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
})
|
||||
.on_click(cx.listener(|_, _, window, cx| {
|
||||
window
|
||||
.dispatch_action(Box::new(ExpandMessageEditor), cx);
|
||||
})),
|
||||
)
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
v_flex()
|
||||
.size_full()
|
||||
.gap_1()
|
||||
.gap_4()
|
||||
.when(is_editor_expanded, |this| {
|
||||
this.h(vh(0.8, window)).justify_between()
|
||||
})
|
||||
.child({
|
||||
let settings = ThemeSettings::get_global(cx);
|
||||
let font_size = TextSize::Small
|
||||
.rems(cx)
|
||||
.to_pixels(settings.agent_font_size(cx));
|
||||
let line_height = settings.buffer_line_height.value() * font_size;
|
||||
.child(
|
||||
v_flex()
|
||||
.min_h_16()
|
||||
.when(is_editor_expanded, |this| this.h_full())
|
||||
.child({
|
||||
let settings = ThemeSettings::get_global(cx);
|
||||
let font_size = TextSize::Small
|
||||
.rems(cx)
|
||||
.to_pixels(settings.agent_font_size(cx));
|
||||
let line_height = settings.buffer_line_height.value() * font_size;
|
||||
|
||||
let text_style = TextStyle {
|
||||
color: cx.theme().colors().text,
|
||||
font_family: settings.buffer_font.family.clone(),
|
||||
font_fallbacks: settings.buffer_font.fallbacks.clone(),
|
||||
font_features: settings.buffer_font.features.clone(),
|
||||
font_size: font_size.into(),
|
||||
line_height: line_height.into(),
|
||||
..Default::default()
|
||||
};
|
||||
let text_style = TextStyle {
|
||||
color: cx.theme().colors().text,
|
||||
font_family: settings.buffer_font.family.clone(),
|
||||
font_fallbacks: settings.buffer_font.fallbacks.clone(),
|
||||
font_features: settings.buffer_font.features.clone(),
|
||||
font_size: font_size.into(),
|
||||
line_height: line_height.into(),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
EditorElement::new(
|
||||
&self.editor,
|
||||
EditorStyle {
|
||||
background: editor_bg_color,
|
||||
local_player: cx.theme().players().local(),
|
||||
text: text_style,
|
||||
syntax: cx.theme().syntax().clone(),
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.into_any()
|
||||
})
|
||||
EditorElement::new(
|
||||
&self.editor,
|
||||
EditorStyle {
|
||||
background: editor_bg_color,
|
||||
local_player: cx.theme().players().local(),
|
||||
text: text_style,
|
||||
syntax: cx.theme().syntax().clone(),
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.into_any()
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.flex_none()
|
||||
.flex_wrap()
|
||||
.justify_between()
|
||||
.child(
|
||||
h_flex()
|
||||
.child(self.render_follow_toggle(cx))
|
||||
.children(self.render_burn_mode_toggle(cx)),
|
||||
.children(self.render_max_mode_toggle(cx)),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.flex_wrap()
|
||||
.when(!incompatible_tools.is_empty(), |this| {
|
||||
this.child(
|
||||
IconButton::new(
|
||||
@@ -907,7 +874,7 @@ impl MessageEditor {
|
||||
)
|
||||
}
|
||||
|
||||
fn render_edits_bar(
|
||||
fn render_changed_buffers(
|
||||
&self,
|
||||
changed_buffers: &BTreeMap<Entity<Buffer>, Entity<BufferDiff>>,
|
||||
window: &mut Window,
|
||||
@@ -1031,7 +998,7 @@ impl MessageEditor {
|
||||
this.handle_review_click(window, cx)
|
||||
})),
|
||||
)
|
||||
.child(Divider::vertical().color(DividerColor::Border))
|
||||
.child(ui::Divider::vertical().color(ui::DividerColor::Border))
|
||||
.child(
|
||||
Button::new("reject-all-changes", "Reject All")
|
||||
.label_size(LabelSize::Small)
|
||||
@@ -1081,7 +1048,7 @@ impl MessageEditor {
|
||||
let file = buffer.read(cx).file()?;
|
||||
let path = file.path();
|
||||
|
||||
let file_path = path.parent().and_then(|parent| {
|
||||
let parent_label = path.parent().and_then(|parent| {
|
||||
let parent_str = parent.to_string_lossy();
|
||||
|
||||
if parent_str.is_empty() {
|
||||
@@ -1100,7 +1067,7 @@ impl MessageEditor {
|
||||
}
|
||||
});
|
||||
|
||||
let file_name = path.file_name().map(|name| {
|
||||
let name_label = path.file_name().map(|name| {
|
||||
Label::new(name.to_string_lossy().to_string())
|
||||
.size(LabelSize::XSmall)
|
||||
.buffer_font(cx)
|
||||
@@ -1115,22 +1082,36 @@ impl MessageEditor {
|
||||
.size(IconSize::Small)
|
||||
});
|
||||
|
||||
let hover_color = cx
|
||||
.theme()
|
||||
.colors()
|
||||
.element_background
|
||||
.blend(cx.theme().colors().editor_foreground.opacity(0.025));
|
||||
|
||||
let overlay_gradient = linear_gradient(
|
||||
90.,
|
||||
linear_color_stop(editor_bg_color, 1.),
|
||||
linear_color_stop(editor_bg_color.opacity(0.2), 0.),
|
||||
);
|
||||
|
||||
let overlay_gradient_hover = linear_gradient(
|
||||
90.,
|
||||
linear_color_stop(hover_color, 1.),
|
||||
linear_color_stop(hover_color.opacity(0.2), 0.),
|
||||
);
|
||||
|
||||
let element = h_flex()
|
||||
.group("edited-code")
|
||||
.id(("file-container", index))
|
||||
.cursor_pointer()
|
||||
.relative()
|
||||
.py_1()
|
||||
.pl_2()
|
||||
.pr_1()
|
||||
.gap_2()
|
||||
.justify_between()
|
||||
.bg(editor_bg_color)
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.hover(|style| style.bg(hover_color))
|
||||
.when(index < changed_buffers.len() - 1, |parent| {
|
||||
parent.border_color(border_color).border_b_1()
|
||||
})
|
||||
@@ -1145,75 +1126,47 @@ impl MessageEditor {
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_0p5()
|
||||
.children(file_name)
|
||||
.children(file_path),
|
||||
.children(name_label)
|
||||
.children(parent_label),
|
||||
), // TODO: Implement line diff
|
||||
// .child(Label::new("+").color(Color::Created))
|
||||
// .child(Label::new("-").color(Color::Deleted)),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.visible_on_hover("edited-code")
|
||||
.child(
|
||||
Button::new("review", "Review")
|
||||
.label_size(LabelSize::Small)
|
||||
.on_click({
|
||||
let buffer = buffer.clone();
|
||||
cx.listener(move |this, _, window, cx| {
|
||||
this.handle_file_click(
|
||||
buffer.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
Divider::vertical().color(DividerColor::BorderVariant),
|
||||
)
|
||||
.child(
|
||||
Button::new("reject-file", "Reject")
|
||||
.label_size(LabelSize::Small)
|
||||
.disabled(pending_edits)
|
||||
.on_click({
|
||||
let buffer = buffer.clone();
|
||||
cx.listener(move |this, _, window, cx| {
|
||||
this.handle_reject_file_changes(
|
||||
buffer.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
Button::new("accept-file", "Accept")
|
||||
.label_size(LabelSize::Small)
|
||||
.disabled(pending_edits)
|
||||
.on_click({
|
||||
let buffer = buffer.clone();
|
||||
cx.listener(move |this, _, window, cx| {
|
||||
this.handle_accept_file_changes(
|
||||
buffer.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
}),
|
||||
),
|
||||
div().visible_on_hover("edited-code").child(
|
||||
Button::new("review", "Review")
|
||||
.label_size(LabelSize::Small)
|
||||
.on_click({
|
||||
let buffer = buffer.clone();
|
||||
cx.listener(move |this, _, window, cx| {
|
||||
this.handle_file_click(
|
||||
buffer.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.id("gradient-overlay")
|
||||
.absolute()
|
||||
.h_full()
|
||||
.h_5_6()
|
||||
.w_12()
|
||||
.top_0()
|
||||
.bottom_0()
|
||||
.right(px(152.))
|
||||
.bg(overlay_gradient),
|
||||
);
|
||||
.right(px(52.))
|
||||
.bg(overlay_gradient)
|
||||
.group_hover("edited-code", |style| {
|
||||
style.bg(overlay_gradient_hover)
|
||||
}),
|
||||
)
|
||||
.on_click({
|
||||
let buffer = buffer.clone();
|
||||
cx.listener(move |this, _, window, cx| {
|
||||
this.handle_file_click(buffer.clone(), window, cx);
|
||||
})
|
||||
});
|
||||
|
||||
Some(element)
|
||||
},
|
||||
@@ -1230,7 +1183,6 @@ impl MessageEditor {
|
||||
.map_or(false, |model| {
|
||||
model.provider.id().0 == ZED_CLOUD_PROVIDER_ID
|
||||
});
|
||||
|
||||
if !is_using_zed_provider {
|
||||
return None;
|
||||
}
|
||||
@@ -1285,6 +1237,14 @@ impl MessageEditor {
|
||||
token_usage_ratio: TokenUsageRatio,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Div> {
|
||||
let title = if token_usage_ratio == TokenUsageRatio::Exceeded {
|
||||
"Thread reached the token limit"
|
||||
} else {
|
||||
"Thread reaching the token limit soon"
|
||||
};
|
||||
|
||||
let message = "Start a new thread from a summary to continue the conversation.";
|
||||
|
||||
let icon = if token_usage_ratio == TokenUsageRatio::Exceeded {
|
||||
Icon::new(IconName::X)
|
||||
.color(Color::Error)
|
||||
@@ -1295,47 +1255,23 @@ impl MessageEditor {
|
||||
.size(IconSize::XSmall)
|
||||
};
|
||||
|
||||
let title = if token_usage_ratio == TokenUsageRatio::Exceeded {
|
||||
"Thread reached the token limit"
|
||||
} else {
|
||||
"Thread reaching the token limit soon"
|
||||
};
|
||||
|
||||
Some(
|
||||
div()
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(
|
||||
Callout::new()
|
||||
.line_height(line_height)
|
||||
.icon(icon)
|
||||
.title(title)
|
||||
.description(
|
||||
"To continue, start a new thread from a summary or turn burn mode on.",
|
||||
)
|
||||
.primary_action(
|
||||
Button::new("start-new-thread", "Start New Thread")
|
||||
.label_size(LabelSize::Small)
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
let from_thread_id = Some(this.thread.read(cx).id().clone());
|
||||
window.dispatch_action(
|
||||
Box::new(NewThread { from_thread_id }),
|
||||
cx,
|
||||
);
|
||||
})),
|
||||
)
|
||||
.secondary_action(
|
||||
IconButton::new("burn-mode-callout", IconName::ZedBurnMode)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.on_click(cx.listener(|this, _event, window, cx| {
|
||||
this.toggle_burn_mode(&ToggleBurnMode, window, cx);
|
||||
})),
|
||||
),
|
||||
),
|
||||
.child(ui::Callout::multi_line(
|
||||
title,
|
||||
message,
|
||||
icon,
|
||||
"Start New Thread",
|
||||
Box::new(cx.listener(|this, _, window, cx| {
|
||||
let from_thread_id = Some(this.thread.read(cx).id().clone());
|
||||
window.dispatch_action(Box::new(NewThread { from_thread_id }), cx);
|
||||
})),
|
||||
))
|
||||
.line_height(line_height),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn last_estimated_token_count(&self) -> Option<u64> {
|
||||
pub fn last_estimated_token_count(&self) -> Option<usize> {
|
||||
self.last_estimated_token_count
|
||||
}
|
||||
|
||||
@@ -1511,7 +1447,6 @@ impl EventEmitter<MessageEditorEvent> for MessageEditor {}
|
||||
pub enum MessageEditorEvent {
|
||||
EstimatedTokenCount,
|
||||
Changed,
|
||||
ScrollThreadToBottom,
|
||||
}
|
||||
|
||||
impl Focusable for MessageEditor {
|
||||
@@ -1529,8 +1464,6 @@ impl Render for MessageEditor {
|
||||
total_token_usage.ratio()
|
||||
});
|
||||
|
||||
let burn_mode_enabled = thread.completion_mode() == CompletionMode::Burn;
|
||||
|
||||
let action_log = self.thread.read(cx).action_log();
|
||||
let changed_buffers = action_log.read(cx).changed_buffers(cx);
|
||||
|
||||
@@ -1539,7 +1472,7 @@ impl Render for MessageEditor {
|
||||
v_flex()
|
||||
.size_full()
|
||||
.when(changed_buffers.len() > 0, |parent| {
|
||||
parent.child(self.render_edits_bar(&changed_buffers, window, cx))
|
||||
parent.child(self.render_changed_buffers(&changed_buffers, window, cx))
|
||||
})
|
||||
.child(self.render_editor(window, cx))
|
||||
.children({
|
||||
@@ -1547,7 +1480,7 @@ impl Render for MessageEditor {
|
||||
|
||||
if usage_callout.is_some() {
|
||||
usage_callout
|
||||
} else if token_usage_ratio != TokenUsageRatio::Normal && !burn_mode_enabled {
|
||||
} else if token_usage_ratio != TokenUsageRatio::Normal {
|
||||
self.render_token_limit_callout(line_height, token_usage_ratio, cx)
|
||||
} else {
|
||||
None
|
||||
|
||||
@@ -1,3 +1 @@
|
||||
[The following is an auto-generated notification; do not reply]
|
||||
|
||||
These files have changed since the last read:
|
||||
These files changed since last read:
|
||||
|
||||
@@ -167,6 +167,9 @@ impl TerminalInlineAssistant {
|
||||
PromptEditorEvent::CancelRequested => {
|
||||
self.finish_assist(assist_id, true, false, window, cx);
|
||||
}
|
||||
PromptEditorEvent::DismissRequested => {
|
||||
self.dismiss_assist(assist_id, window, cx);
|
||||
}
|
||||
PromptEditorEvent::Resized { height_in_lines } => {
|
||||
self.insert_prompt_editor_into_terminal(assist_id, *height_in_lines, window, cx);
|
||||
}
|
||||
|
||||
@@ -272,8 +272,8 @@ impl DetailedSummaryState {
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub struct TotalTokenUsage {
|
||||
pub total: u64,
|
||||
pub max: u64,
|
||||
pub total: usize,
|
||||
pub max: usize,
|
||||
}
|
||||
|
||||
impl TotalTokenUsage {
|
||||
@@ -299,7 +299,7 @@ impl TotalTokenUsage {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add(&self, tokens: u64) -> TotalTokenUsage {
|
||||
pub fn add(&self, tokens: usize) -> TotalTokenUsage {
|
||||
TotalTokenUsage {
|
||||
total: self.total + tokens,
|
||||
max: self.max,
|
||||
@@ -396,7 +396,7 @@ pub struct ExceededWindowError {
|
||||
/// Model used when last message exceeded context window
|
||||
model_id: LanguageModelId,
|
||||
/// Token count including last message
|
||||
token_count: u64,
|
||||
token_count: usize,
|
||||
}
|
||||
|
||||
impl Thread {
|
||||
@@ -1389,7 +1389,7 @@ impl Thread {
|
||||
request.messages[message_ix_to_cache].cache = true;
|
||||
}
|
||||
|
||||
self.attach_tracked_files_state(&mut request.messages, cx);
|
||||
self.attached_tracked_files_state(&mut request.messages, cx);
|
||||
|
||||
request.tools = available_tools;
|
||||
request.mode = if model.supports_max_mode() {
|
||||
@@ -1453,57 +1453,43 @@ impl Thread {
|
||||
request
|
||||
}
|
||||
|
||||
fn attach_tracked_files_state(
|
||||
fn attached_tracked_files_state(
|
||||
&self,
|
||||
messages: &mut Vec<LanguageModelRequestMessage>,
|
||||
cx: &App,
|
||||
) {
|
||||
let mut stale_files = String::new();
|
||||
const STALE_FILES_HEADER: &str = include_str!("./prompts/stale_files_prompt_header.txt");
|
||||
|
||||
let mut stale_message = String::new();
|
||||
|
||||
let action_log = self.action_log.read(cx);
|
||||
|
||||
for stale_file in action_log.stale_buffers(cx) {
|
||||
if let Some(file) = stale_file.read(cx).file() {
|
||||
writeln!(&mut stale_files, "- {}", file.path().display()).ok();
|
||||
let Some(file) = stale_file.read(cx).file() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if stale_message.is_empty() {
|
||||
write!(&mut stale_message, "{}\n", STALE_FILES_HEADER.trim()).ok();
|
||||
}
|
||||
|
||||
writeln!(&mut stale_message, "- {}", file.path().display()).ok();
|
||||
}
|
||||
|
||||
if stale_files.is_empty() {
|
||||
return;
|
||||
let mut content = Vec::with_capacity(2);
|
||||
|
||||
if !stale_message.is_empty() {
|
||||
content.push(stale_message.into());
|
||||
}
|
||||
|
||||
// NOTE: Changes to this prompt require a symmetric update in the LLM Worker
|
||||
const STALE_FILES_HEADER: &str = include_str!("./prompts/stale_files_prompt_header.txt");
|
||||
let content = MessageContent::Text(
|
||||
format!("{STALE_FILES_HEADER}{stale_files}").replace("\r\n", "\n"),
|
||||
);
|
||||
if !content.is_empty() {
|
||||
let context_message = LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content,
|
||||
cache: false,
|
||||
};
|
||||
|
||||
// Insert our message before the last Assistant message.
|
||||
// Inserting it to the tail distracts the agent too much
|
||||
let insert_position = messages
|
||||
.iter()
|
||||
.enumerate()
|
||||
.rfind(|(_, message)| message.role == Role::Assistant)
|
||||
.map_or(messages.len(), |(i, _)| i);
|
||||
|
||||
let request_message = LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![content],
|
||||
cache: false,
|
||||
};
|
||||
|
||||
messages.insert(insert_position, request_message);
|
||||
|
||||
// It makes no sense to cache messages after this one because
|
||||
// the cache is invalidated when this message is gone.
|
||||
// Move the cache marker before this message.
|
||||
let has_cached_messages_after = messages
|
||||
.iter()
|
||||
.skip(insert_position + 1)
|
||||
.any(|message| message.cache);
|
||||
|
||||
if has_cached_messages_after {
|
||||
messages[insert_position - 1].cache = true;
|
||||
messages.push(context_message);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2769,7 +2755,7 @@ impl Thread {
|
||||
.unwrap_or_default();
|
||||
|
||||
TotalTokenUsage {
|
||||
total: token_usage.total_tokens(),
|
||||
total: token_usage.total_tokens() as usize,
|
||||
max,
|
||||
}
|
||||
}
|
||||
@@ -2791,7 +2777,7 @@ impl Thread {
|
||||
let total = self
|
||||
.token_usage_at_last_message()
|
||||
.unwrap_or_default()
|
||||
.total_tokens();
|
||||
.total_tokens() as usize;
|
||||
|
||||
Some(TotalTokenUsage { total, max })
|
||||
}
|
||||
@@ -3309,24 +3295,12 @@ fn main() {{
|
||||
assert_eq!(last_message.role, Role::User);
|
||||
|
||||
// Check the exact content of the message
|
||||
let expected_content = "[The following is an auto-generated notification; do not reply]
|
||||
|
||||
These files have changed since the last read:
|
||||
- code.rs
|
||||
";
|
||||
let expected_content = "These files changed since last read:\n- code.rs\n";
|
||||
assert_eq!(
|
||||
last_message.string_contents(),
|
||||
expected_content,
|
||||
"Last message should be exactly the stale buffer notification"
|
||||
);
|
||||
|
||||
// The message before the notification should be cached
|
||||
let index = new_request.messages.len() - 2;
|
||||
let previous_message = new_request.messages.get(index).unwrap();
|
||||
assert!(
|
||||
previous_message.cache,
|
||||
"Message before the stale buffer notification should be cached"
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
|
||||
@@ -224,7 +224,6 @@ impl ThreadHistory {
|
||||
&candidates,
|
||||
&query,
|
||||
false,
|
||||
true,
|
||||
MAX_MATCHES,
|
||||
&Default::default(),
|
||||
executor,
|
||||
@@ -595,11 +594,10 @@ impl Render for ThreadHistory {
|
||||
view.pr_5()
|
||||
.child(
|
||||
uniform_list(
|
||||
cx.entity().clone(),
|
||||
"thread-history",
|
||||
self.list_item_count(),
|
||||
cx.processor(|this, range: Range<usize>, window, cx| {
|
||||
this.list_items(range, window, cx)
|
||||
}),
|
||||
Self::list_items,
|
||||
)
|
||||
.p_1()
|
||||
.track_scroll(self.scroll_handle.clone())
|
||||
|
||||
@@ -305,19 +305,17 @@ impl ThreadStore {
|
||||
project: Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Task<(WorktreeContext, Option<RulesLoadingError>)> {
|
||||
let tree = worktree.read(cx);
|
||||
let root_name = tree.root_name().into();
|
||||
let abs_path = tree.abs_path();
|
||||
|
||||
let mut context = WorktreeContext {
|
||||
root_name,
|
||||
abs_path,
|
||||
rules_file: None,
|
||||
};
|
||||
let root_name = worktree.read(cx).root_name().into();
|
||||
|
||||
let rules_task = Self::load_worktree_rules_file(worktree, project, cx);
|
||||
let Some(rules_task) = rules_task else {
|
||||
return Task::ready((context, None));
|
||||
return Task::ready((
|
||||
WorktreeContext {
|
||||
root_name,
|
||||
rules_file: None,
|
||||
},
|
||||
None,
|
||||
));
|
||||
};
|
||||
|
||||
cx.spawn(async move |_| {
|
||||
@@ -330,8 +328,11 @@ impl ThreadStore {
|
||||
}),
|
||||
),
|
||||
};
|
||||
context.rules_file = rules_file;
|
||||
(context, rules_file_error)
|
||||
let worktree_info = WorktreeContext {
|
||||
root_name,
|
||||
rules_file,
|
||||
};
|
||||
(worktree_info, rules_file_error)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -340,12 +341,12 @@ impl ThreadStore {
|
||||
project: Entity<Project>,
|
||||
cx: &mut App,
|
||||
) -> Option<Task<Result<RulesFileContext>>> {
|
||||
let worktree = worktree.read(cx);
|
||||
let worktree_id = worktree.id();
|
||||
let worktree_ref = worktree.read(cx);
|
||||
let worktree_id = worktree_ref.id();
|
||||
let selected_rules_file = RULES_FILE_NAMES
|
||||
.into_iter()
|
||||
.filter_map(|name| {
|
||||
worktree
|
||||
worktree_ref
|
||||
.entry_for_path(name)
|
||||
.filter(|entry| entry.is_file())
|
||||
.map(|entry| entry.path.clone())
|
||||
|
||||
@@ -427,7 +427,7 @@ impl ToolUseState {
|
||||
|
||||
// Protect from overly large output
|
||||
let tool_output_limit = configured_model
|
||||
.map(|model| model.model.max_token_count() as usize * BYTES_PER_TOKEN_ESTIMATE)
|
||||
.map(|model| model.model.max_token_count() * BYTES_PER_TOKEN_ESTIMATE)
|
||||
.unwrap_or(usize::MAX);
|
||||
|
||||
let content = match tool_result {
|
||||
|
||||
@@ -2,7 +2,7 @@ use client::zed_urls;
|
||||
use component::{empty_example, example_group_with_title, single_example};
|
||||
use gpui::{AnyElement, App, IntoElement, RenderOnce, Window};
|
||||
use language_model::RequestUsage;
|
||||
use ui::{Callout, prelude::*};
|
||||
use ui::{Callout, Color, Icon, IconName, IconSize, prelude::*};
|
||||
use zed_llm_client::{Plan, UsageLimit};
|
||||
|
||||
#[derive(IntoElement, RegisterComponent)]
|
||||
@@ -91,23 +91,16 @@ impl RenderOnce for UsageCallout {
|
||||
.size(IconSize::XSmall)
|
||||
};
|
||||
|
||||
div()
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(
|
||||
Callout::new()
|
||||
.icon(icon)
|
||||
.title(title)
|
||||
.description(message)
|
||||
.primary_action(
|
||||
Button::new("upgrade", button_text)
|
||||
.label_size(LabelSize::Small)
|
||||
.on_click(move |_, _, cx| {
|
||||
cx.open_url(&url);
|
||||
}),
|
||||
),
|
||||
)
|
||||
.into_any_element()
|
||||
Callout::multi_line(
|
||||
title,
|
||||
message,
|
||||
icon,
|
||||
button_text,
|
||||
Box::new(move |_, _, cx| {
|
||||
cx.open_url(&url);
|
||||
}),
|
||||
)
|
||||
.into_any_element()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -196,8 +189,10 @@ impl Component for UsageCallout {
|
||||
);
|
||||
|
||||
Some(
|
||||
v_flex()
|
||||
div()
|
||||
.p_4()
|
||||
.flex()
|
||||
.flex_col()
|
||||
.gap_4()
|
||||
.child(free_examples)
|
||||
.child(trial_examples)
|
||||
|
||||
@@ -386,9 +386,7 @@ impl AgentSettingsContent {
|
||||
_ => None,
|
||||
};
|
||||
settings.provider = Some(AgentProviderContentV1::LmStudio {
|
||||
default_model: Some(lmstudio::Model::new(
|
||||
&model, None, None, false, false,
|
||||
)),
|
||||
default_model: Some(lmstudio::Model::new(&model, None, None, false)),
|
||||
api_url,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ pub const ANTHROPIC_API_URL: &str = "https://api.anthropic.com";
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
|
||||
pub struct AnthropicModelCacheConfiguration {
|
||||
pub min_total_token: u64,
|
||||
pub min_total_token: usize,
|
||||
pub should_speculate: bool,
|
||||
pub max_cache_anchors: usize,
|
||||
}
|
||||
@@ -33,6 +33,15 @@ pub enum AnthropicModelMode {
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
|
||||
pub enum Model {
|
||||
#[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-latest")]
|
||||
Claude3_5Sonnet,
|
||||
#[serde(rename = "claude-3-7-sonnet", alias = "claude-3-7-sonnet-latest")]
|
||||
Claude3_7Sonnet,
|
||||
#[serde(
|
||||
rename = "claude-3-7-sonnet-thinking",
|
||||
alias = "claude-3-7-sonnet-thinking-latest"
|
||||
)]
|
||||
Claude3_7SonnetThinking,
|
||||
#[serde(rename = "claude-opus-4", alias = "claude-opus-4-latest")]
|
||||
ClaudeOpus4,
|
||||
#[serde(
|
||||
@@ -48,15 +57,6 @@ pub enum Model {
|
||||
alias = "claude-sonnet-4-thinking-latest"
|
||||
)]
|
||||
ClaudeSonnet4Thinking,
|
||||
#[serde(rename = "claude-3-7-sonnet", alias = "claude-3-7-sonnet-latest")]
|
||||
Claude3_7Sonnet,
|
||||
#[serde(
|
||||
rename = "claude-3-7-sonnet-thinking",
|
||||
alias = "claude-3-7-sonnet-thinking-latest"
|
||||
)]
|
||||
Claude3_7SonnetThinking,
|
||||
#[serde(rename = "claude-3-5-sonnet", alias = "claude-3-5-sonnet-latest")]
|
||||
Claude3_5Sonnet,
|
||||
#[serde(rename = "claude-3-5-haiku", alias = "claude-3-5-haiku-latest")]
|
||||
Claude3_5Haiku,
|
||||
#[serde(rename = "claude-3-opus", alias = "claude-3-opus-latest")]
|
||||
@@ -68,14 +68,14 @@ pub enum Model {
|
||||
#[serde(rename = "custom")]
|
||||
Custom {
|
||||
name: String,
|
||||
max_tokens: u64,
|
||||
max_tokens: usize,
|
||||
/// The name displayed in the UI, such as in the assistant panel model dropdown menu.
|
||||
display_name: Option<String>,
|
||||
/// Override this model with a different Anthropic model for tool calls.
|
||||
tool_override: Option<String>,
|
||||
/// Indicates whether this custom model supports caching.
|
||||
cache_configuration: Option<AnthropicModelCacheConfiguration>,
|
||||
max_output_tokens: Option<u64>,
|
||||
max_output_tokens: Option<u32>,
|
||||
default_temperature: Option<f32>,
|
||||
#[serde(default)]
|
||||
extra_beta_headers: Vec<String>,
|
||||
@@ -90,66 +90,46 @@ impl Model {
|
||||
}
|
||||
|
||||
pub fn from_id(id: &str) -> Result<Self> {
|
||||
if id.starts_with("claude-opus-4-thinking") {
|
||||
return Ok(Self::ClaudeOpus4Thinking);
|
||||
}
|
||||
|
||||
if id.starts_with("claude-opus-4") {
|
||||
return Ok(Self::ClaudeOpus4);
|
||||
}
|
||||
|
||||
if id.starts_with("claude-sonnet-4-thinking") {
|
||||
return Ok(Self::ClaudeSonnet4Thinking);
|
||||
}
|
||||
|
||||
if id.starts_with("claude-sonnet-4") {
|
||||
return Ok(Self::ClaudeSonnet4);
|
||||
}
|
||||
|
||||
if id.starts_with("claude-3-7-sonnet-thinking") {
|
||||
return Ok(Self::Claude3_7SonnetThinking);
|
||||
}
|
||||
|
||||
if id.starts_with("claude-3-7-sonnet") {
|
||||
return Ok(Self::Claude3_7Sonnet);
|
||||
}
|
||||
|
||||
if id.starts_with("claude-3-5-sonnet") {
|
||||
return Ok(Self::Claude3_5Sonnet);
|
||||
Ok(Self::Claude3_5Sonnet)
|
||||
} else if id.starts_with("claude-3-7-sonnet-thinking") {
|
||||
Ok(Self::Claude3_7SonnetThinking)
|
||||
} else if id.starts_with("claude-3-7-sonnet") {
|
||||
Ok(Self::Claude3_7Sonnet)
|
||||
} else if id.starts_with("claude-3-5-haiku") {
|
||||
Ok(Self::Claude3_5Haiku)
|
||||
} else if id.starts_with("claude-3-opus") {
|
||||
Ok(Self::Claude3Opus)
|
||||
} else if id.starts_with("claude-3-sonnet") {
|
||||
Ok(Self::Claude3Sonnet)
|
||||
} else if id.starts_with("claude-3-haiku") {
|
||||
Ok(Self::Claude3Haiku)
|
||||
} else if id.starts_with("claude-opus-4-thinking") {
|
||||
Ok(Self::ClaudeOpus4Thinking)
|
||||
} else if id.starts_with("claude-opus-4") {
|
||||
Ok(Self::ClaudeOpus4)
|
||||
} else if id.starts_with("claude-sonnet-4-thinking") {
|
||||
Ok(Self::ClaudeSonnet4Thinking)
|
||||
} else if id.starts_with("claude-sonnet-4") {
|
||||
Ok(Self::ClaudeSonnet4)
|
||||
} else {
|
||||
anyhow::bail!("invalid model id {id}");
|
||||
}
|
||||
|
||||
if id.starts_with("claude-3-5-haiku") {
|
||||
return Ok(Self::Claude3_5Haiku);
|
||||
}
|
||||
|
||||
if id.starts_with("claude-3-opus") {
|
||||
return Ok(Self::Claude3Opus);
|
||||
}
|
||||
|
||||
if id.starts_with("claude-3-sonnet") {
|
||||
return Ok(Self::Claude3Sonnet);
|
||||
}
|
||||
|
||||
if id.starts_with("claude-3-haiku") {
|
||||
return Ok(Self::Claude3Haiku);
|
||||
}
|
||||
|
||||
Err(anyhow!("invalid model ID: {id}"))
|
||||
}
|
||||
|
||||
pub fn id(&self) -> &str {
|
||||
match self {
|
||||
Self::ClaudeOpus4 => "claude-opus-4-latest",
|
||||
Self::ClaudeOpus4Thinking => "claude-opus-4-thinking-latest",
|
||||
Self::ClaudeSonnet4 => "claude-sonnet-4-latest",
|
||||
Self::ClaudeSonnet4Thinking => "claude-sonnet-4-thinking-latest",
|
||||
Self::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
|
||||
Self::Claude3_7Sonnet => "claude-3-7-sonnet-latest",
|
||||
Self::Claude3_7SonnetThinking => "claude-3-7-sonnet-thinking-latest",
|
||||
Self::Claude3_5Haiku => "claude-3-5-haiku-latest",
|
||||
Self::Claude3Opus => "claude-3-opus-latest",
|
||||
Self::Claude3Sonnet => "claude-3-sonnet-20240229",
|
||||
Self::Claude3Haiku => "claude-3-haiku-20240307",
|
||||
Model::ClaudeOpus4 => "claude-opus-4-latest",
|
||||
Model::ClaudeOpus4Thinking => "claude-opus-4-thinking-latest",
|
||||
Model::ClaudeSonnet4 => "claude-sonnet-4-latest",
|
||||
Model::ClaudeSonnet4Thinking => "claude-sonnet-4-thinking-latest",
|
||||
Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
|
||||
Model::Claude3_7Sonnet => "claude-3-7-sonnet-latest",
|
||||
Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-thinking-latest",
|
||||
Model::Claude3_5Haiku => "claude-3-5-haiku-latest",
|
||||
Model::Claude3Opus => "claude-3-opus-latest",
|
||||
Model::Claude3Sonnet => "claude-3-sonnet-20240229",
|
||||
Model::Claude3Haiku => "claude-3-haiku-20240307",
|
||||
Self::Custom { name, .. } => name,
|
||||
}
|
||||
}
|
||||
@@ -157,24 +137,24 @@ impl Model {
|
||||
/// The id of the model that should be used for making API requests
|
||||
pub fn request_id(&self) -> &str {
|
||||
match self {
|
||||
Self::ClaudeOpus4 | Self::ClaudeOpus4Thinking => "claude-opus-4-20250514",
|
||||
Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking => "claude-sonnet-4-20250514",
|
||||
Self::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
|
||||
Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking => "claude-3-7-sonnet-latest",
|
||||
Self::Claude3_5Haiku => "claude-3-5-haiku-latest",
|
||||
Self::Claude3Opus => "claude-3-opus-latest",
|
||||
Self::Claude3Sonnet => "claude-3-sonnet-20240229",
|
||||
Self::Claude3Haiku => "claude-3-haiku-20240307",
|
||||
Model::ClaudeOpus4 | Model::ClaudeOpus4Thinking => "claude-opus-4-20250514",
|
||||
Model::ClaudeSonnet4 | Model::ClaudeSonnet4Thinking => "claude-sonnet-4-20250514",
|
||||
Model::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
|
||||
Model::Claude3_7Sonnet | Model::Claude3_7SonnetThinking => "claude-3-7-sonnet-latest",
|
||||
Model::Claude3_5Haiku => "claude-3-5-haiku-latest",
|
||||
Model::Claude3Opus => "claude-3-opus-latest",
|
||||
Model::Claude3Sonnet => "claude-3-sonnet-20240229",
|
||||
Model::Claude3Haiku => "claude-3-haiku-20240307",
|
||||
Self::Custom { name, .. } => name,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn display_name(&self) -> &str {
|
||||
match self {
|
||||
Self::ClaudeOpus4 => "Claude Opus 4",
|
||||
Self::ClaudeOpus4Thinking => "Claude Opus 4 Thinking",
|
||||
Self::ClaudeSonnet4 => "Claude Sonnet 4",
|
||||
Self::ClaudeSonnet4Thinking => "Claude Sonnet 4 Thinking",
|
||||
Model::ClaudeOpus4 => "Claude Opus 4",
|
||||
Model::ClaudeOpus4Thinking => "Claude Opus 4 Thinking",
|
||||
Model::ClaudeSonnet4 => "Claude Sonnet 4",
|
||||
Model::ClaudeSonnet4Thinking => "Claude Sonnet 4 Thinking",
|
||||
Self::Claude3_7Sonnet => "Claude 3.7 Sonnet",
|
||||
Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
|
||||
Self::Claude3_7SonnetThinking => "Claude 3.7 Sonnet Thinking",
|
||||
@@ -211,7 +191,7 @@ impl Model {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn max_token_count(&self) -> u64 {
|
||||
pub fn max_token_count(&self) -> usize {
|
||||
match self {
|
||||
Self::ClaudeOpus4
|
||||
| Self::ClaudeOpus4Thinking
|
||||
@@ -228,17 +208,17 @@ impl Model {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn max_output_tokens(&self) -> u64 {
|
||||
pub fn max_output_tokens(&self) -> u32 {
|
||||
match self {
|
||||
Self::ClaudeOpus4
|
||||
| Self::ClaudeOpus4Thinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::Claude3_5Sonnet
|
||||
Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3Haiku => 4_096,
|
||||
Self::Claude3_5Sonnet
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_7SonnetThinking
|
||||
| Self::Claude3_5Haiku => 8_192,
|
||||
Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3Haiku => 4_096,
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::ClaudeOpus4
|
||||
| Self::ClaudeOpus4Thinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking => 8_192,
|
||||
Self::Custom {
|
||||
max_output_tokens, ..
|
||||
} => max_output_tokens.unwrap_or(4_096),
|
||||
@@ -267,17 +247,17 @@ impl Model {
|
||||
|
||||
pub fn mode(&self) -> AnthropicModelMode {
|
||||
match self {
|
||||
Self::ClaudeOpus4
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::Claude3_5Sonnet
|
||||
Self::Claude3_5Sonnet
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_5Haiku
|
||||
| Self::ClaudeOpus4
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::Claude3Opus
|
||||
| Self::Claude3Sonnet
|
||||
| Self::Claude3Haiku => AnthropicModelMode::Default,
|
||||
Self::ClaudeOpus4Thinking
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::Claude3_7SonnetThinking => AnthropicModelMode::Thinking {
|
||||
Self::Claude3_7SonnetThinking
|
||||
| Self::ClaudeOpus4Thinking
|
||||
| Self::ClaudeSonnet4Thinking => AnthropicModelMode::Thinking {
|
||||
budget_tokens: Some(4_096),
|
||||
},
|
||||
Self::Custom { mode, .. } => mode.clone(),
|
||||
@@ -288,7 +268,7 @@ impl Model {
|
||||
|
||||
pub fn beta_headers(&self) -> String {
|
||||
let mut headers = Self::DEFAULT_BETA_HEADERS
|
||||
.iter()
|
||||
.into_iter()
|
||||
.map(|header| header.to_string())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
@@ -693,7 +673,7 @@ pub enum StringOrContents {
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Request {
|
||||
pub model: String,
|
||||
pub max_tokens: u64,
|
||||
pub max_tokens: u32,
|
||||
pub messages: Vec<Message>,
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
pub tools: Vec<Tool>,
|
||||
@@ -730,13 +710,13 @@ pub struct Metadata {
|
||||
#[derive(Debug, Serialize, Deserialize, Default)]
|
||||
pub struct Usage {
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub input_tokens: Option<u64>,
|
||||
pub input_tokens: Option<u32>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub output_tokens: Option<u64>,
|
||||
pub output_tokens: Option<u32>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub cache_creation_input_tokens: Option<u64>,
|
||||
pub cache_creation_input_tokens: Option<u32>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub cache_read_input_tokens: Option<u64>,
|
||||
pub cache_read_input_tokens: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
@@ -846,7 +826,7 @@ impl ApiError {
|
||||
matches!(self.error_type.as_str(), "rate_limit_error")
|
||||
}
|
||||
|
||||
pub fn match_window_exceeded(&self) -> Option<u64> {
|
||||
pub fn match_window_exceeded(&self) -> Option<usize> {
|
||||
let Some(ApiErrorCode::InvalidRequestError) = self.code() else {
|
||||
return None;
|
||||
};
|
||||
@@ -855,12 +835,12 @@ impl ApiError {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_prompt_too_long(message: &str) -> Option<u64> {
|
||||
pub fn parse_prompt_too_long(message: &str) -> Option<usize> {
|
||||
message
|
||||
.strip_prefix("prompt is too long: ")?
|
||||
.split_once(" tokens")?
|
||||
.0
|
||||
.parse()
|
||||
.parse::<usize>()
|
||||
.ok()
|
||||
}
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ path = "src/askpass.rs"
|
||||
anyhow.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
shlex.workspace = true
|
||||
smol.workspace = true
|
||||
tempfile.workspace = true
|
||||
util.workspace = true
|
||||
|
||||
@@ -13,9 +13,9 @@ use gpui::{AsyncApp, BackgroundExecutor, Task};
|
||||
#[cfg(unix)]
|
||||
use smol::fs;
|
||||
#[cfg(unix)]
|
||||
use smol::net::unix::UnixListener;
|
||||
use smol::{fs::unix::PermissionsExt as _, net::unix::UnixListener};
|
||||
#[cfg(unix)]
|
||||
use util::{ResultExt as _, fs::make_file_executable, get_shell_safe_zed_path};
|
||||
use util::ResultExt as _;
|
||||
|
||||
#[derive(PartialEq, Eq)]
|
||||
pub enum AskPassResult {
|
||||
@@ -120,7 +120,7 @@ impl AskPassSession {
|
||||
shebang = "#!/bin/sh",
|
||||
);
|
||||
fs::write(&askpass_script_path, askpass_script).await?;
|
||||
make_file_executable(&askpass_script_path).await?;
|
||||
fs::set_permissions(&askpass_script_path, std::fs::Permissions::from_mode(0o755)).await?;
|
||||
|
||||
Ok(Self {
|
||||
script_path: askpass_script_path,
|
||||
@@ -160,6 +160,38 @@ impl AskPassSession {
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn get_shell_safe_zed_path() -> anyhow::Result<String> {
|
||||
let zed_path = std::env::current_exe()
|
||||
.context("Failed to determine current executable path for use in askpass")?
|
||||
.to_string_lossy()
|
||||
// see https://github.com/rust-lang/rust/issues/69343
|
||||
.trim_end_matches(" (deleted)")
|
||||
.to_string();
|
||||
|
||||
// NOTE: this was previously enabled, however, it caused errors when it shouldn't have
|
||||
// (see https://github.com/zed-industries/zed/issues/29819)
|
||||
// The zed path failing to execute within the askpass script results in very vague ssh
|
||||
// authentication failed errors, so this was done to try and surface a better error
|
||||
//
|
||||
// use std::os::unix::fs::MetadataExt;
|
||||
// let metadata = std::fs::metadata(&zed_path)
|
||||
// .context("Failed to check metadata of Zed executable path for use in askpass")?;
|
||||
// let is_executable = metadata.is_file() && metadata.mode() & 0o111 != 0;
|
||||
// anyhow::ensure!(
|
||||
// is_executable,
|
||||
// "Failed to verify Zed executable path for use in askpass"
|
||||
// );
|
||||
|
||||
// As of writing, this can only be fail if the path contains a null byte, which shouldn't be possible
|
||||
// but shlex has annotated the error as #[non_exhaustive] so we can't make it a compile error if other
|
||||
// errors are introduced in the future :(
|
||||
let zed_path_escaped = shlex::try_quote(&zed_path)
|
||||
.context("Failed to shell-escape Zed executable path for use in askpass")?;
|
||||
|
||||
return Ok(zed_path_escaped.to_string());
|
||||
}
|
||||
|
||||
/// The main function for when Zed is running in netcat mode for use in askpass.
|
||||
/// Called from both the remote server binary and the zed binary in their respective main functions.
|
||||
#[cfg(unix)]
|
||||
|
||||
@@ -678,7 +678,7 @@ pub struct AssistantContext {
|
||||
summary_task: Task<Option<()>>,
|
||||
completion_count: usize,
|
||||
pending_completions: Vec<PendingCompletion>,
|
||||
token_count: Option<u64>,
|
||||
token_count: Option<usize>,
|
||||
pending_token_count: Task<Option<()>>,
|
||||
pending_save: Task<Result<()>>,
|
||||
pending_cache_warming_task: Task<Option<()>>,
|
||||
@@ -1250,7 +1250,7 @@ impl AssistantContext {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn token_count(&self) -> Option<u64> {
|
||||
pub fn token_count(&self) -> Option<usize> {
|
||||
self.token_count
|
||||
}
|
||||
|
||||
|
||||
@@ -39,7 +39,7 @@ use language::{
|
||||
language_settings::{SoftWrap, all_language_settings},
|
||||
};
|
||||
use language_model::{
|
||||
ConfigurationError, LanguageModelImage, LanguageModelProviderTosView, LanguageModelRegistry,
|
||||
LanguageModelImage, LanguageModelProvider, LanguageModelProviderTosView, LanguageModelRegistry,
|
||||
Role,
|
||||
};
|
||||
use multi_buffer::MultiBufferRow;
|
||||
@@ -1887,8 +1887,6 @@ impl ContextEditor {
|
||||
// value to not show the nudge.
|
||||
let nudge = Some(false);
|
||||
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
|
||||
if nudge.map_or(false, |value| value) {
|
||||
Some(
|
||||
h_flex()
|
||||
@@ -1937,9 +1935,14 @@ impl ContextEditor {
|
||||
)
|
||||
.into_any_element(),
|
||||
)
|
||||
} else if let Some(configuration_error) =
|
||||
model_registry.configuration_error(model_registry.default_model(), cx)
|
||||
{
|
||||
} else if let Some(configuration_error) = configuration_error(cx) {
|
||||
let label = match configuration_error {
|
||||
ConfigurationError::NoProvider => "No LLM provider selected.",
|
||||
ConfigurationError::ProviderNotAuthenticated => "LLM provider is not configured.",
|
||||
ConfigurationError::ProviderPendingTermsAcceptance(_) => {
|
||||
"LLM provider requires accepting the Terms of Service."
|
||||
}
|
||||
};
|
||||
Some(
|
||||
h_flex()
|
||||
.px_3()
|
||||
@@ -1956,7 +1959,7 @@ impl ContextEditor {
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Warning),
|
||||
)
|
||||
.child(Label::new(configuration_error.to_string())),
|
||||
.child(Label::new(label)),
|
||||
)
|
||||
.child(
|
||||
Button::new("open-configuration", "Configure Providers")
|
||||
@@ -2031,19 +2034,14 @@ impl ContextEditor {
|
||||
/// Will return false if the selected provided has a configuration error or
|
||||
/// if the user has not accepted the terms of service for this provider.
|
||||
fn sending_disabled(&self, cx: &mut Context<'_, ContextEditor>) -> bool {
|
||||
let model_registry = LanguageModelRegistry::read_global(cx);
|
||||
let Some(configuration_error) =
|
||||
model_registry.configuration_error(model_registry.default_model(), cx)
|
||||
else {
|
||||
return false;
|
||||
};
|
||||
let model = LanguageModelRegistry::read_global(cx).default_model();
|
||||
|
||||
match configuration_error {
|
||||
ConfigurationError::NoProvider
|
||||
| ConfigurationError::ModelNotFound
|
||||
| ConfigurationError::ProviderNotAuthenticated(_) => true,
|
||||
ConfigurationError::ProviderPendingTermsAcceptance(_) => self.show_accept_terms,
|
||||
}
|
||||
let has_configuration_error = configuration_error(cx).is_some();
|
||||
let needs_to_accept_terms = self.show_accept_terms
|
||||
&& model
|
||||
.as_ref()
|
||||
.map_or(false, |model| model.provider.must_accept_terms(cx));
|
||||
has_configuration_error || needs_to_accept_terms
|
||||
}
|
||||
|
||||
fn render_inject_context_menu(&self, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
@@ -3121,12 +3119,12 @@ fn invoked_slash_command_fold_placeholder(
|
||||
|
||||
enum TokenState {
|
||||
NoTokensLeft {
|
||||
max_token_count: u64,
|
||||
token_count: u64,
|
||||
max_token_count: usize,
|
||||
token_count: usize,
|
||||
},
|
||||
HasMoreTokens {
|
||||
max_token_count: u64,
|
||||
token_count: u64,
|
||||
max_token_count: usize,
|
||||
token_count: usize,
|
||||
over_warn_threshold: bool,
|
||||
},
|
||||
}
|
||||
@@ -3139,7 +3137,9 @@ fn token_state(context: &Entity<AssistantContext>, cx: &App) -> Option<TokenStat
|
||||
.model;
|
||||
let token_count = context.read(cx).token_count()?;
|
||||
let max_token_count = model.max_token_count();
|
||||
let token_state = if max_token_count.saturating_sub(token_count) == 0 {
|
||||
|
||||
let remaining_tokens = max_token_count as isize - token_count as isize;
|
||||
let token_state = if remaining_tokens <= 0 {
|
||||
TokenState::NoTokensLeft {
|
||||
max_token_count,
|
||||
token_count,
|
||||
@@ -3180,7 +3180,34 @@ fn size_for_image(data: &RenderImage, max_size: Size<Pixels>) -> Size<Pixels> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn humanize_token_count(count: u64) -> String {
|
||||
pub enum ConfigurationError {
|
||||
NoProvider,
|
||||
ProviderNotAuthenticated,
|
||||
ProviderPendingTermsAcceptance(Arc<dyn LanguageModelProvider>),
|
||||
}
|
||||
|
||||
fn configuration_error(cx: &App) -> Option<ConfigurationError> {
|
||||
let model = LanguageModelRegistry::read_global(cx).default_model();
|
||||
let is_authenticated = model
|
||||
.as_ref()
|
||||
.map_or(false, |model| model.provider.is_authenticated(cx));
|
||||
|
||||
if model.is_some() && is_authenticated {
|
||||
return None;
|
||||
}
|
||||
|
||||
if model.is_none() {
|
||||
return Some(ConfigurationError::NoProvider);
|
||||
}
|
||||
|
||||
if !is_authenticated {
|
||||
return Some(ConfigurationError::ProviderNotAuthenticated);
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn humanize_token_count(count: usize) -> String {
|
||||
match count {
|
||||
0..=999 => count.to_string(),
|
||||
1000..=9999 => {
|
||||
|
||||
@@ -745,7 +745,6 @@ impl ContextStore {
|
||||
&candidates,
|
||||
&query,
|
||||
false,
|
||||
true,
|
||||
100,
|
||||
&Default::default(),
|
||||
executor,
|
||||
|
||||
@@ -310,7 +310,6 @@ impl ModelMatcher {
|
||||
&self.candidates,
|
||||
&query,
|
||||
false,
|
||||
true,
|
||||
100,
|
||||
&Default::default(),
|
||||
self.bg_executor.clone(),
|
||||
@@ -665,7 +664,7 @@ mod tests {
|
||||
format!("{}/{}", self.provider_id.0, self.name.0)
|
||||
}
|
||||
|
||||
fn max_token_count(&self) -> u64 {
|
||||
fn max_token_count(&self) -> usize {
|
||||
1000
|
||||
}
|
||||
|
||||
@@ -673,7 +672,7 @@ mod tests {
|
||||
&self,
|
||||
_: LanguageModelRequest,
|
||||
_: &App,
|
||||
) -> BoxFuture<'static, http_client::Result<u64>> {
|
||||
) -> BoxFuture<'static, http_client::Result<usize>> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
|
||||
@@ -62,7 +62,6 @@ impl SlashCommandCompletionProvider {
|
||||
&candidates,
|
||||
&command_name,
|
||||
true,
|
||||
true,
|
||||
usize::MAX,
|
||||
&Default::default(),
|
||||
cx.background_executor().clone(),
|
||||
|
||||
@@ -147,7 +147,6 @@ impl SlashCommand for DiagnosticsSlashCommand {
|
||||
&Options::match_candidates_for_args(),
|
||||
&query,
|
||||
false,
|
||||
true,
|
||||
10,
|
||||
&cancellation_flag,
|
||||
executor,
|
||||
|
||||
@@ -582,7 +582,7 @@ mod test {
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use smol::stream::StreamExt;
|
||||
use util::path;
|
||||
use util::{path, separator};
|
||||
|
||||
use super::collect_files;
|
||||
|
||||
@@ -627,7 +627,7 @@ mod test {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(result_1.text.starts_with(path!("root/dir")));
|
||||
assert!(result_1.text.starts_with(separator!("root/dir")));
|
||||
// 4 files + 2 directories
|
||||
assert_eq!(result_1.sections.len(), 6);
|
||||
|
||||
@@ -643,7 +643,7 @@ mod test {
|
||||
cx.update(|cx| collect_files(project.clone(), &["root/dir*".to_string()], cx).boxed());
|
||||
let result = SlashCommandOutput::from_event_stream(result).await.unwrap();
|
||||
|
||||
assert!(result.text.starts_with(path!("root/dir")));
|
||||
assert!(result.text.starts_with(separator!("root/dir")));
|
||||
// 5 files + 2 directories
|
||||
assert_eq!(result.sections.len(), 7);
|
||||
|
||||
@@ -691,20 +691,24 @@ mod test {
|
||||
.unwrap();
|
||||
|
||||
// Sanity check
|
||||
assert!(result.text.starts_with(path!("zed/assets/themes\n")));
|
||||
assert!(result.text.starts_with(separator!("zed/assets/themes\n")));
|
||||
assert_eq!(result.sections.len(), 7);
|
||||
|
||||
// Ensure that full file paths are included in the real output
|
||||
assert!(
|
||||
result
|
||||
.text
|
||||
.contains(path!("zed/assets/themes/andromeda/LICENSE"))
|
||||
.contains(separator!("zed/assets/themes/andromeda/LICENSE"))
|
||||
);
|
||||
assert!(result.text.contains(path!("zed/assets/themes/ayu/LICENSE")));
|
||||
assert!(
|
||||
result
|
||||
.text
|
||||
.contains(path!("zed/assets/themes/summercamp/LICENSE"))
|
||||
.contains(separator!("zed/assets/themes/ayu/LICENSE"))
|
||||
);
|
||||
assert!(
|
||||
result
|
||||
.text
|
||||
.contains(separator!("zed/assets/themes/summercamp/LICENSE"))
|
||||
);
|
||||
|
||||
assert_eq!(result.sections[5].label, "summercamp");
|
||||
@@ -712,17 +716,17 @@ mod test {
|
||||
// Ensure that things are in descending order, with properly relativized paths
|
||||
assert_eq!(
|
||||
result.sections[0].label,
|
||||
path!("zed/assets/themes/andromeda/LICENSE")
|
||||
separator!("zed/assets/themes/andromeda/LICENSE")
|
||||
);
|
||||
assert_eq!(result.sections[1].label, "andromeda");
|
||||
assert_eq!(
|
||||
result.sections[2].label,
|
||||
path!("zed/assets/themes/ayu/LICENSE")
|
||||
separator!("zed/assets/themes/ayu/LICENSE")
|
||||
);
|
||||
assert_eq!(result.sections[3].label, "ayu");
|
||||
assert_eq!(
|
||||
result.sections[4].label,
|
||||
path!("zed/assets/themes/summercamp/LICENSE")
|
||||
separator!("zed/assets/themes/summercamp/LICENSE")
|
||||
);
|
||||
|
||||
// Ensure that the project lasts until after the last await
|
||||
@@ -763,28 +767,31 @@ mod test {
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
assert!(result.text.starts_with(path!("zed/assets/themes\n")));
|
||||
assert_eq!(result.sections[0].label, path!("zed/assets/themes/LICENSE"));
|
||||
assert!(result.text.starts_with(separator!("zed/assets/themes\n")));
|
||||
assert_eq!(
|
||||
result.sections[0].label,
|
||||
separator!("zed/assets/themes/LICENSE")
|
||||
);
|
||||
assert_eq!(
|
||||
result.sections[1].label,
|
||||
path!("zed/assets/themes/summercamp/LICENSE")
|
||||
separator!("zed/assets/themes/summercamp/LICENSE")
|
||||
);
|
||||
assert_eq!(
|
||||
result.sections[2].label,
|
||||
path!("zed/assets/themes/summercamp/subdir/LICENSE")
|
||||
separator!("zed/assets/themes/summercamp/subdir/LICENSE")
|
||||
);
|
||||
assert_eq!(
|
||||
result.sections[3].label,
|
||||
path!("zed/assets/themes/summercamp/subdir/subsubdir/LICENSE")
|
||||
separator!("zed/assets/themes/summercamp/subdir/subsubdir/LICENSE")
|
||||
);
|
||||
assert_eq!(result.sections[4].label, "subsubdir");
|
||||
assert_eq!(result.sections[5].label, "subdir");
|
||||
assert_eq!(result.sections[6].label, "summercamp");
|
||||
assert_eq!(result.sections[7].label, path!("zed/assets/themes"));
|
||||
assert_eq!(result.sections[7].label, separator!("zed/assets/themes"));
|
||||
|
||||
assert_eq!(
|
||||
result.text,
|
||||
path!(
|
||||
separator!(
|
||||
"zed/assets/themes\n```zed/assets/themes/LICENSE\n1\n```\n\nsummercamp\n```zed/assets/themes/summercamp/LICENSE\n1\n```\n\nsubdir\n```zed/assets/themes/summercamp/subdir/LICENSE\n1\n```\n\nsubsubdir\n```zed/assets/themes/summercamp/subdir/subsubdir/LICENSE\n3\n```\n\n"
|
||||
)
|
||||
);
|
||||
|
||||
@@ -261,7 +261,6 @@ fn tab_items_for_queries(
|
||||
&match_candidates,
|
||||
query,
|
||||
true,
|
||||
true,
|
||||
usize::MAX,
|
||||
&cancel,
|
||||
background_executor.clone(),
|
||||
|
||||
@@ -456,18 +456,18 @@ impl ActionLog {
|
||||
})?
|
||||
}
|
||||
|
||||
/// Track a buffer as read by agent, so we can notify the model about user edits.
|
||||
/// Track a buffer as read, so we can notify the model about user edits.
|
||||
pub fn buffer_read(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
|
||||
self.track_buffer_internal(buffer, false, cx);
|
||||
}
|
||||
|
||||
/// Mark a buffer as created by agent, so we can refresh it in the context
|
||||
/// Mark a buffer as edited, so we can refresh it in the context
|
||||
pub fn buffer_created(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
|
||||
self.edited_since_project_diagnostics_check = true;
|
||||
self.track_buffer_internal(buffer.clone(), true, cx);
|
||||
}
|
||||
|
||||
/// Mark a buffer as edited by agent, so we can refresh it in the context
|
||||
/// Mark a buffer as edited, so we can refresh it in the context
|
||||
pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
|
||||
self.edited_since_project_diagnostics_check = true;
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ use crate::{Template, Templates};
|
||||
use anyhow::Result;
|
||||
use assistant_tool::ActionLog;
|
||||
use create_file_parser::{CreateFileParser, CreateFileParserEvent};
|
||||
pub use edit_parser::EditFormat;
|
||||
use edit_parser::{EditParser, EditParserEvent, EditParserMetrics};
|
||||
use futures::{
|
||||
Stream, StreamExt,
|
||||
@@ -42,23 +41,13 @@ impl Template for CreateFilePromptTemplate {
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct EditFileXmlPromptTemplate {
|
||||
struct EditFilePromptTemplate {
|
||||
path: Option<PathBuf>,
|
||||
edit_description: String,
|
||||
}
|
||||
|
||||
impl Template for EditFileXmlPromptTemplate {
|
||||
const TEMPLATE_NAME: &'static str = "edit_file_prompt_xml.hbs";
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct EditFileDiffFencedPromptTemplate {
|
||||
path: Option<PathBuf>,
|
||||
edit_description: String,
|
||||
}
|
||||
|
||||
impl Template for EditFileDiffFencedPromptTemplate {
|
||||
const TEMPLATE_NAME: &'static str = "edit_file_prompt_diff_fenced.hbs";
|
||||
impl Template for EditFilePromptTemplate {
|
||||
const TEMPLATE_NAME: &'static str = "edit_file_prompt.hbs";
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
@@ -81,7 +70,6 @@ pub struct EditAgent {
|
||||
action_log: Entity<ActionLog>,
|
||||
project: Entity<Project>,
|
||||
templates: Arc<Templates>,
|
||||
edit_format: EditFormat,
|
||||
}
|
||||
|
||||
impl EditAgent {
|
||||
@@ -90,14 +78,12 @@ impl EditAgent {
|
||||
project: Entity<Project>,
|
||||
action_log: Entity<ActionLog>,
|
||||
templates: Arc<Templates>,
|
||||
edit_format: EditFormat,
|
||||
) -> Self {
|
||||
EditAgent {
|
||||
model,
|
||||
project,
|
||||
action_log,
|
||||
templates,
|
||||
edit_format,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -223,23 +209,14 @@ impl EditAgent {
|
||||
let this = self.clone();
|
||||
let (events_tx, events_rx) = mpsc::unbounded();
|
||||
let conversation = conversation.clone();
|
||||
let edit_format = self.edit_format;
|
||||
let output = cx.spawn(async move |cx| {
|
||||
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
|
||||
let path = cx.update(|cx| snapshot.resolve_file_path(cx, true))?;
|
||||
let prompt = match edit_format {
|
||||
EditFormat::XmlTags => EditFileXmlPromptTemplate {
|
||||
path,
|
||||
edit_description,
|
||||
}
|
||||
.render(&this.templates)?,
|
||||
EditFormat::DiffFenced => EditFileDiffFencedPromptTemplate {
|
||||
path,
|
||||
edit_description,
|
||||
}
|
||||
.render(&this.templates)?,
|
||||
};
|
||||
|
||||
let prompt = EditFilePromptTemplate {
|
||||
path,
|
||||
edit_description,
|
||||
}
|
||||
.render(&this.templates)?;
|
||||
let edit_chunks = this
|
||||
.request(conversation, CompletionIntent::EditFile, prompt, cx)
|
||||
.await?;
|
||||
@@ -259,7 +236,7 @@ impl EditAgent {
|
||||
self.action_log
|
||||
.update(cx, |log, cx| log.buffer_read(buffer.clone(), cx))?;
|
||||
|
||||
let (output, edit_events) = Self::parse_edit_chunks(edit_chunks, self.edit_format, cx);
|
||||
let (output, edit_events) = Self::parse_edit_chunks(edit_chunks, cx);
|
||||
let mut edit_events = edit_events.peekable();
|
||||
while let Some(edit_event) = Pin::new(&mut edit_events).peek().await {
|
||||
// Skip events until we're at the start of a new edit.
|
||||
@@ -309,13 +286,7 @@ impl EditAgent {
|
||||
_ => {
|
||||
let ranges = resolved_old_text
|
||||
.into_iter()
|
||||
.map(|text| {
|
||||
let start_line =
|
||||
(snapshot.offset_to_point(text.range.start).row + 1) as usize;
|
||||
let end_line =
|
||||
(snapshot.offset_to_point(text.range.end).row + 1) as usize;
|
||||
start_line..end_line
|
||||
})
|
||||
.map(|text| text.range)
|
||||
.collect();
|
||||
output_events
|
||||
.unbounded_send(EditAgentOutputEvent::AmbiguousEditRange(ranges))
|
||||
@@ -373,7 +344,6 @@ impl EditAgent {
|
||||
|
||||
fn parse_edit_chunks(
|
||||
chunks: impl 'static + Send + Stream<Item = Result<String, LanguageModelCompletionError>>,
|
||||
edit_format: EditFormat,
|
||||
cx: &mut AsyncApp,
|
||||
) -> (
|
||||
Task<Result<EditAgentOutput>>,
|
||||
@@ -383,7 +353,7 @@ impl EditAgent {
|
||||
let output = cx.background_spawn(async move {
|
||||
pin_mut!(chunks);
|
||||
|
||||
let mut parser = EditParser::new(edit_format);
|
||||
let mut parser = EditParser::new();
|
||||
let mut raw_edits = String::new();
|
||||
while let Some(chunk) = chunks.next().await {
|
||||
match chunk {
|
||||
@@ -459,25 +429,25 @@ impl EditAgent {
|
||||
let task = cx.background_spawn(async move {
|
||||
let mut matcher = StreamingFuzzyMatcher::new(snapshot);
|
||||
while let Some(edit_event) = edit_events.next().await {
|
||||
let EditParserEvent::OldTextChunk {
|
||||
chunk,
|
||||
done,
|
||||
line_hint,
|
||||
} = edit_event?
|
||||
else {
|
||||
let EditParserEvent::OldTextChunk { chunk, done } = edit_event? else {
|
||||
break;
|
||||
};
|
||||
|
||||
old_range_tx.send(matcher.push(&chunk, line_hint))?;
|
||||
old_range_tx.send(matcher.push(&chunk))?;
|
||||
if done {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let matches = matcher.finish();
|
||||
let best_match = matcher.select_best_match();
|
||||
|
||||
old_range_tx.send(best_match.clone())?;
|
||||
let old_range = if matches.len() == 1 {
|
||||
matches.first()
|
||||
} else {
|
||||
// No matches or multiple ambiguous matches
|
||||
None
|
||||
};
|
||||
old_range_tx.send(old_range.cloned())?;
|
||||
|
||||
let indent = LineIndent::from_iter(
|
||||
matcher
|
||||
@@ -486,18 +456,10 @@ impl EditAgent {
|
||||
.unwrap_or(&String::new())
|
||||
.chars(),
|
||||
);
|
||||
|
||||
let resolved_old_texts = if let Some(best_match) = best_match {
|
||||
vec![ResolvedOldText {
|
||||
range: best_match,
|
||||
indent,
|
||||
}]
|
||||
} else {
|
||||
matches
|
||||
.into_iter()
|
||||
.map(|range| ResolvedOldText { range, indent })
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
let resolved_old_texts = matches
|
||||
.into_iter()
|
||||
.map(|range| ResolvedOldText { range, indent })
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok((edit_events, resolved_old_texts))
|
||||
});
|
||||
@@ -1379,13 +1341,7 @@ mod tests {
|
||||
let project = Project::test(FakeFs::new(cx.executor()), [], cx).await;
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
EditAgent::new(
|
||||
model,
|
||||
project,
|
||||
action_log,
|
||||
Templates::new(),
|
||||
EditFormat::XmlTags,
|
||||
)
|
||||
EditAgent::new(model, project, action_log, Templates::new())
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
@@ -1418,12 +1374,10 @@ mod tests {
|
||||
&agent,
|
||||
indoc! {"
|
||||
<old_text>
|
||||
return 42;
|
||||
}
|
||||
return 42;
|
||||
</old_text>
|
||||
<new_text>
|
||||
return 100;
|
||||
}
|
||||
return 100;
|
||||
</new_text>
|
||||
"},
|
||||
&mut rng,
|
||||
@@ -1453,7 +1407,7 @@ mod tests {
|
||||
|
||||
// And AmbiguousEditRange even should be emitted
|
||||
let events = drain_events(&mut events);
|
||||
let ambiguous_ranges = vec![2..3, 6..7, 10..11];
|
||||
let ambiguous_ranges = vec![17..31, 52..66, 87..101];
|
||||
assert!(
|
||||
events.contains(&EditAgentOutputEvent::AmbiguousEditRange(ambiguous_ranges)),
|
||||
"Should emit AmbiguousEditRange for non-unique text"
|
||||
|
||||
@@ -1,31 +1,18 @@
|
||||
use anyhow::bail;
|
||||
use derive_more::{Add, AddAssign};
|
||||
use language_model::LanguageModel;
|
||||
use regex::Regex;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smallvec::SmallVec;
|
||||
use std::{mem, ops::Range, str::FromStr, sync::Arc};
|
||||
use std::{mem, ops::Range};
|
||||
|
||||
const OLD_TEXT_END_TAG: &str = "</old_text>";
|
||||
const NEW_TEXT_END_TAG: &str = "</new_text>";
|
||||
const EDITS_END_TAG: &str = "</edits>";
|
||||
const SEARCH_MARKER: &str = "<<<<<<< SEARCH";
|
||||
const SEPARATOR_MARKER: &str = "=======";
|
||||
const REPLACE_MARKER: &str = ">>>>>>> REPLACE";
|
||||
const END_TAGS: [&str; 3] = [OLD_TEXT_END_TAG, NEW_TEXT_END_TAG, EDITS_END_TAG];
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum EditParserEvent {
|
||||
OldTextChunk {
|
||||
chunk: String,
|
||||
done: bool,
|
||||
line_hint: Option<u32>,
|
||||
},
|
||||
NewTextChunk {
|
||||
chunk: String,
|
||||
done: bool,
|
||||
},
|
||||
OldTextChunk { chunk: String, done: bool },
|
||||
NewTextChunk { chunk: String, done: bool },
|
||||
}
|
||||
|
||||
#[derive(
|
||||
@@ -36,164 +23,45 @@ pub struct EditParserMetrics {
|
||||
pub mismatched_tags: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum EditFormat {
|
||||
/// XML-like tags:
|
||||
/// <old_text>...</old_text>
|
||||
/// <new_text>...</new_text>
|
||||
XmlTags,
|
||||
/// Diff-fenced format, in which:
|
||||
/// - Text before the SEARCH marker is ignored
|
||||
/// - Fences are optional
|
||||
/// - Line hint is optional.
|
||||
///
|
||||
/// Example:
|
||||
///
|
||||
/// ```diff
|
||||
/// <<<<<<< SEARCH line=42
|
||||
/// ...
|
||||
/// =======
|
||||
/// ...
|
||||
/// >>>>>>> REPLACE
|
||||
/// ```
|
||||
DiffFenced,
|
||||
}
|
||||
|
||||
impl FromStr for EditFormat {
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(s: &str) -> anyhow::Result<Self> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"xml_tags" | "xml" => Ok(EditFormat::XmlTags),
|
||||
"diff_fenced" | "diff-fenced" | "diff" => Ok(EditFormat::DiffFenced),
|
||||
_ => bail!("Unknown EditFormat: {}", s),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EditFormat {
|
||||
/// Return an optimal edit format for the language model
|
||||
pub fn from_model(model: Arc<dyn LanguageModel>) -> anyhow::Result<Self> {
|
||||
if model.provider_id().0 == "google" || model.id().0.to_lowercase().contains("gemini") {
|
||||
Ok(EditFormat::DiffFenced)
|
||||
} else {
|
||||
Ok(EditFormat::XmlTags)
|
||||
}
|
||||
}
|
||||
|
||||
/// Return an optimal edit format for the language model,
|
||||
/// with the ability to override it by setting the
|
||||
/// `ZED_EDIT_FORMAT` environment variable
|
||||
#[allow(dead_code)]
|
||||
pub fn from_env(model: Arc<dyn LanguageModel>) -> anyhow::Result<Self> {
|
||||
let default = EditFormat::from_model(model)?;
|
||||
std::env::var("ZED_EDIT_FORMAT").map_or(Ok(default), |s| EditFormat::from_str(&s))
|
||||
}
|
||||
}
|
||||
|
||||
pub trait EditFormatParser: Send + std::fmt::Debug {
|
||||
fn push(&mut self, chunk: &str) -> SmallVec<[EditParserEvent; 1]>;
|
||||
fn take_metrics(&mut self) -> EditParserMetrics;
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct XmlEditParser {
|
||||
state: XmlParserState,
|
||||
pub struct EditParser {
|
||||
state: EditParserState,
|
||||
buffer: String,
|
||||
metrics: EditParserMetrics,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
enum XmlParserState {
|
||||
enum EditParserState {
|
||||
Pending,
|
||||
WithinOldText { start: bool, line_hint: Option<u32> },
|
||||
WithinOldText { start: bool },
|
||||
AfterOldText,
|
||||
WithinNewText { start: bool },
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct DiffFencedEditParser {
|
||||
state: DiffParserState,
|
||||
buffer: String,
|
||||
metrics: EditParserMetrics,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq)]
|
||||
enum DiffParserState {
|
||||
Pending,
|
||||
WithinSearch { start: bool, line_hint: Option<u32> },
|
||||
WithinReplace { start: bool },
|
||||
}
|
||||
|
||||
/// Main parser that delegates to format-specific parsers
|
||||
pub struct EditParser {
|
||||
parser: Box<dyn EditFormatParser>,
|
||||
}
|
||||
|
||||
impl XmlEditParser {
|
||||
impl EditParser {
|
||||
pub fn new() -> Self {
|
||||
XmlEditParser {
|
||||
state: XmlParserState::Pending,
|
||||
EditParser {
|
||||
state: EditParserState::Pending,
|
||||
buffer: String::new(),
|
||||
metrics: EditParserMetrics::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn find_end_tag(&self) -> Option<Range<usize>> {
|
||||
let (tag, start_ix) = END_TAGS
|
||||
.iter()
|
||||
.flat_map(|tag| Some((tag, self.buffer.find(tag)?)))
|
||||
.min_by_key(|(_, ix)| *ix)?;
|
||||
Some(start_ix..start_ix + tag.len())
|
||||
}
|
||||
|
||||
fn ends_with_tag_prefix(&self) -> bool {
|
||||
let mut end_prefixes = END_TAGS
|
||||
.iter()
|
||||
.flat_map(|tag| (1..tag.len()).map(move |i| &tag[..i]))
|
||||
.chain(["\n"]);
|
||||
end_prefixes.any(|prefix| self.buffer.ends_with(&prefix))
|
||||
}
|
||||
|
||||
fn parse_line_hint(&self, tag: &str) -> Option<u32> {
|
||||
use std::sync::LazyLock;
|
||||
static LINE_HINT_REGEX: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r#"line=(?:"?)(\d+)"#).unwrap());
|
||||
|
||||
LINE_HINT_REGEX
|
||||
.captures(tag)
|
||||
.and_then(|caps| caps.get(1))
|
||||
.and_then(|m| m.as_str().parse::<u32>().ok())
|
||||
}
|
||||
}
|
||||
|
||||
impl EditFormatParser for XmlEditParser {
|
||||
fn push(&mut self, chunk: &str) -> SmallVec<[EditParserEvent; 1]> {
|
||||
pub fn push(&mut self, chunk: &str) -> SmallVec<[EditParserEvent; 1]> {
|
||||
self.buffer.push_str(chunk);
|
||||
|
||||
let mut edit_events = SmallVec::new();
|
||||
loop {
|
||||
match &mut self.state {
|
||||
XmlParserState::Pending => {
|
||||
if let Some(start) = self.buffer.find("<old_text") {
|
||||
if let Some(tag_end) = self.buffer[start..].find('>') {
|
||||
let tag_end = start + tag_end + 1;
|
||||
let tag = &self.buffer[start..tag_end];
|
||||
let line_hint = self.parse_line_hint(tag);
|
||||
self.buffer.drain(..tag_end);
|
||||
self.state = XmlParserState::WithinOldText {
|
||||
start: true,
|
||||
line_hint,
|
||||
};
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
EditParserState::Pending => {
|
||||
if let Some(start) = self.buffer.find("<old_text>") {
|
||||
self.buffer.drain(..start + "<old_text>".len());
|
||||
self.state = EditParserState::WithinOldText { start: true };
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
XmlParserState::WithinOldText { start, line_hint } => {
|
||||
EditParserState::WithinOldText { start } => {
|
||||
if !self.buffer.is_empty() {
|
||||
if *start && self.buffer.starts_with('\n') {
|
||||
self.buffer.remove(0);
|
||||
@@ -201,7 +69,6 @@ impl EditFormatParser for XmlEditParser {
|
||||
*start = false;
|
||||
}
|
||||
|
||||
let line_hint = *line_hint;
|
||||
if let Some(tag_range) = self.find_end_tag() {
|
||||
let mut chunk = self.buffer[..tag_range.start].to_string();
|
||||
if chunk.ends_with('\n') {
|
||||
@@ -214,32 +81,27 @@ impl EditFormatParser for XmlEditParser {
|
||||
}
|
||||
|
||||
self.buffer.drain(..tag_range.end);
|
||||
self.state = XmlParserState::AfterOldText;
|
||||
edit_events.push(EditParserEvent::OldTextChunk {
|
||||
chunk,
|
||||
done: true,
|
||||
line_hint,
|
||||
});
|
||||
self.state = EditParserState::AfterOldText;
|
||||
edit_events.push(EditParserEvent::OldTextChunk { chunk, done: true });
|
||||
} else {
|
||||
if !self.ends_with_tag_prefix() {
|
||||
edit_events.push(EditParserEvent::OldTextChunk {
|
||||
chunk: mem::take(&mut self.buffer),
|
||||
done: false,
|
||||
line_hint,
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
XmlParserState::AfterOldText => {
|
||||
EditParserState::AfterOldText => {
|
||||
if let Some(start) = self.buffer.find("<new_text>") {
|
||||
self.buffer.drain(..start + "<new_text>".len());
|
||||
self.state = XmlParserState::WithinNewText { start: true };
|
||||
self.state = EditParserState::WithinNewText { start: true };
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
XmlParserState::WithinNewText { start } => {
|
||||
EditParserState::WithinNewText { start } => {
|
||||
if !self.buffer.is_empty() {
|
||||
if *start && self.buffer.starts_with('\n') {
|
||||
self.buffer.remove(0);
|
||||
@@ -259,7 +121,7 @@ impl EditFormatParser for XmlEditParser {
|
||||
}
|
||||
|
||||
self.buffer.drain(..tag_range.end);
|
||||
self.state = XmlParserState::Pending;
|
||||
self.state = EditParserState::Pending;
|
||||
edit_events.push(EditParserEvent::NewTextChunk { chunk, done: true });
|
||||
} else {
|
||||
if !self.ends_with_tag_prefix() {
|
||||
@@ -276,163 +138,24 @@ impl EditFormatParser for XmlEditParser {
|
||||
edit_events
|
||||
}
|
||||
|
||||
fn take_metrics(&mut self) -> EditParserMetrics {
|
||||
std::mem::take(&mut self.metrics)
|
||||
}
|
||||
}
|
||||
|
||||
impl DiffFencedEditParser {
|
||||
pub fn new() -> Self {
|
||||
DiffFencedEditParser {
|
||||
state: DiffParserState::Pending,
|
||||
buffer: String::new(),
|
||||
metrics: EditParserMetrics::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn ends_with_diff_marker_prefix(&self) -> bool {
|
||||
let diff_markers = [SEPARATOR_MARKER, REPLACE_MARKER];
|
||||
let mut diff_prefixes = diff_markers
|
||||
fn find_end_tag(&self) -> Option<Range<usize>> {
|
||||
let (tag, start_ix) = END_TAGS
|
||||
.iter()
|
||||
.flat_map(|marker| (1..marker.len()).map(move |i| &marker[..i]))
|
||||
.flat_map(|tag| Some((tag, self.buffer.find(tag)?)))
|
||||
.min_by_key(|(_, ix)| *ix)?;
|
||||
Some(start_ix..start_ix + tag.len())
|
||||
}
|
||||
|
||||
fn ends_with_tag_prefix(&self) -> bool {
|
||||
let mut end_prefixes = END_TAGS
|
||||
.iter()
|
||||
.flat_map(|tag| (1..tag.len()).map(move |i| &tag[..i]))
|
||||
.chain(["\n"]);
|
||||
diff_prefixes.any(|prefix| self.buffer.ends_with(&prefix))
|
||||
end_prefixes.any(|prefix| self.buffer.ends_with(&prefix))
|
||||
}
|
||||
|
||||
fn parse_line_hint(&self, search_line: &str) -> Option<u32> {
|
||||
use regex::Regex;
|
||||
use std::sync::LazyLock;
|
||||
static LINE_HINT_REGEX: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r#"line=(?:"?)(\d+)"#).unwrap());
|
||||
|
||||
LINE_HINT_REGEX
|
||||
.captures(search_line)
|
||||
.and_then(|caps| caps.get(1))
|
||||
.and_then(|m| m.as_str().parse::<u32>().ok())
|
||||
}
|
||||
}
|
||||
|
||||
impl EditFormatParser for DiffFencedEditParser {
|
||||
fn push(&mut self, chunk: &str) -> SmallVec<[EditParserEvent; 1]> {
|
||||
self.buffer.push_str(chunk);
|
||||
|
||||
let mut edit_events = SmallVec::new();
|
||||
loop {
|
||||
match &mut self.state {
|
||||
DiffParserState::Pending => {
|
||||
if let Some(diff) = self.buffer.find(SEARCH_MARKER) {
|
||||
let search_end = diff + SEARCH_MARKER.len();
|
||||
if let Some(newline_pos) = self.buffer[search_end..].find('\n') {
|
||||
let search_line = &self.buffer[diff..search_end + newline_pos];
|
||||
let line_hint = self.parse_line_hint(search_line);
|
||||
self.buffer.drain(..search_end + newline_pos + 1);
|
||||
self.state = DiffParserState::WithinSearch {
|
||||
start: true,
|
||||
line_hint,
|
||||
};
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
DiffParserState::WithinSearch { start, line_hint } => {
|
||||
if !self.buffer.is_empty() {
|
||||
if *start && self.buffer.starts_with('\n') {
|
||||
self.buffer.remove(0);
|
||||
}
|
||||
*start = false;
|
||||
}
|
||||
|
||||
let line_hint = *line_hint;
|
||||
if let Some(separator_pos) = self.buffer.find(SEPARATOR_MARKER) {
|
||||
let mut chunk = self.buffer[..separator_pos].to_string();
|
||||
if chunk.ends_with('\n') {
|
||||
chunk.pop();
|
||||
}
|
||||
|
||||
let separator_end = separator_pos + SEPARATOR_MARKER.len();
|
||||
if let Some(newline_pos) = self.buffer[separator_end..].find('\n') {
|
||||
self.buffer.drain(..separator_end + newline_pos + 1);
|
||||
self.state = DiffParserState::WithinReplace { start: true };
|
||||
edit_events.push(EditParserEvent::OldTextChunk {
|
||||
chunk,
|
||||
done: true,
|
||||
line_hint,
|
||||
});
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
if !self.ends_with_diff_marker_prefix() {
|
||||
edit_events.push(EditParserEvent::OldTextChunk {
|
||||
chunk: mem::take(&mut self.buffer),
|
||||
done: false,
|
||||
line_hint,
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
DiffParserState::WithinReplace { start } => {
|
||||
if !self.buffer.is_empty() {
|
||||
if *start && self.buffer.starts_with('\n') {
|
||||
self.buffer.remove(0);
|
||||
}
|
||||
*start = false;
|
||||
}
|
||||
|
||||
if let Some(replace_pos) = self.buffer.find(REPLACE_MARKER) {
|
||||
let mut chunk = self.buffer[..replace_pos].to_string();
|
||||
if chunk.ends_with('\n') {
|
||||
chunk.pop();
|
||||
}
|
||||
|
||||
self.buffer.drain(..replace_pos + REPLACE_MARKER.len());
|
||||
if let Some(newline_pos) = self.buffer.find('\n') {
|
||||
self.buffer.drain(..newline_pos + 1);
|
||||
} else {
|
||||
self.buffer.clear();
|
||||
}
|
||||
|
||||
self.state = DiffParserState::Pending;
|
||||
edit_events.push(EditParserEvent::NewTextChunk { chunk, done: true });
|
||||
} else {
|
||||
if !self.ends_with_diff_marker_prefix() {
|
||||
edit_events.push(EditParserEvent::NewTextChunk {
|
||||
chunk: mem::take(&mut self.buffer),
|
||||
done: false,
|
||||
});
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
edit_events
|
||||
}
|
||||
|
||||
fn take_metrics(&mut self) -> EditParserMetrics {
|
||||
std::mem::take(&mut self.metrics)
|
||||
}
|
||||
}
|
||||
|
||||
impl EditParser {
|
||||
pub fn new(format: EditFormat) -> Self {
|
||||
let parser: Box<dyn EditFormatParser> = match format {
|
||||
EditFormat::XmlTags => Box::new(XmlEditParser::new()),
|
||||
EditFormat::DiffFenced => Box::new(DiffFencedEditParser::new()),
|
||||
};
|
||||
EditParser { parser }
|
||||
}
|
||||
|
||||
pub fn push(&mut self, chunk: &str) -> SmallVec<[EditParserEvent; 1]> {
|
||||
self.parser.push(chunk)
|
||||
}
|
||||
|
||||
pub fn finish(mut self) -> EditParserMetrics {
|
||||
self.parser.take_metrics()
|
||||
pub fn finish(self) -> EditParserMetrics {
|
||||
self.metrics
|
||||
}
|
||||
}
|
||||
|
||||
@@ -444,8 +167,8 @@ mod tests {
|
||||
use std::cmp;
|
||||
|
||||
#[gpui::test(iterations = 1000)]
|
||||
fn test_xml_single_edit(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new(EditFormat::XmlTags);
|
||||
fn test_single_edit(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new();
|
||||
assert_eq!(
|
||||
parse_random_chunks(
|
||||
"<old_text>original</old_text><new_text>updated</new_text>",
|
||||
@@ -455,7 +178,6 @@ mod tests {
|
||||
vec![Edit {
|
||||
old_text: "original".to_string(),
|
||||
new_text: "updated".to_string(),
|
||||
line_hint: None,
|
||||
}]
|
||||
);
|
||||
assert_eq!(
|
||||
@@ -468,8 +190,8 @@ mod tests {
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 1000)]
|
||||
fn test_xml_multiple_edits(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new(EditFormat::XmlTags);
|
||||
fn test_multiple_edits(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new();
|
||||
assert_eq!(
|
||||
parse_random_chunks(
|
||||
indoc! {"
|
||||
@@ -487,12 +209,10 @@ mod tests {
|
||||
Edit {
|
||||
old_text: "first old".to_string(),
|
||||
new_text: "first new".to_string(),
|
||||
line_hint: None,
|
||||
},
|
||||
Edit {
|
||||
old_text: "second old".to_string(),
|
||||
new_text: "second new".to_string(),
|
||||
line_hint: None,
|
||||
},
|
||||
]
|
||||
);
|
||||
@@ -506,8 +226,8 @@ mod tests {
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 1000)]
|
||||
fn test_xml_edits_with_extra_text(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new(EditFormat::XmlTags);
|
||||
fn test_edits_with_extra_text(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new();
|
||||
assert_eq!(
|
||||
parse_random_chunks(
|
||||
indoc! {"
|
||||
@@ -524,17 +244,14 @@ mod tests {
|
||||
Edit {
|
||||
old_text: "content".to_string(),
|
||||
new_text: "updated content".to_string(),
|
||||
line_hint: None,
|
||||
},
|
||||
Edit {
|
||||
old_text: "second item".to_string(),
|
||||
new_text: "modified second item".to_string(),
|
||||
line_hint: None,
|
||||
},
|
||||
Edit {
|
||||
old_text: "third case".to_string(),
|
||||
new_text: "improved third case".to_string(),
|
||||
line_hint: None,
|
||||
},
|
||||
]
|
||||
);
|
||||
@@ -548,8 +265,8 @@ mod tests {
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 1000)]
|
||||
fn test_xml_nested_tags(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new(EditFormat::XmlTags);
|
||||
fn test_nested_tags(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new();
|
||||
assert_eq!(
|
||||
parse_random_chunks(
|
||||
"<old_text>code with <tag>nested</tag> elements</old_text><new_text>new <code>content</code></new_text>",
|
||||
@@ -559,7 +276,6 @@ mod tests {
|
||||
vec![Edit {
|
||||
old_text: "code with <tag>nested</tag> elements".to_string(),
|
||||
new_text: "new <code>content</code>".to_string(),
|
||||
line_hint: None,
|
||||
}]
|
||||
);
|
||||
assert_eq!(
|
||||
@@ -572,8 +288,8 @@ mod tests {
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 1000)]
|
||||
fn test_xml_empty_old_and_new_text(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new(EditFormat::XmlTags);
|
||||
fn test_empty_old_and_new_text(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new();
|
||||
assert_eq!(
|
||||
parse_random_chunks(
|
||||
"<old_text></old_text><new_text></new_text>",
|
||||
@@ -583,7 +299,6 @@ mod tests {
|
||||
vec![Edit {
|
||||
old_text: "".to_string(),
|
||||
new_text: "".to_string(),
|
||||
line_hint: None,
|
||||
}]
|
||||
);
|
||||
assert_eq!(
|
||||
@@ -596,8 +311,8 @@ mod tests {
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_xml_multiline_content(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new(EditFormat::XmlTags);
|
||||
fn test_multiline_content(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new();
|
||||
assert_eq!(
|
||||
parse_random_chunks(
|
||||
"<old_text>line1\nline2\nline3</old_text><new_text>line1\nmodified line2\nline3</new_text>",
|
||||
@@ -607,7 +322,6 @@ mod tests {
|
||||
vec![Edit {
|
||||
old_text: "line1\nline2\nline3".to_string(),
|
||||
new_text: "line1\nmodified line2\nline3".to_string(),
|
||||
line_hint: None,
|
||||
}]
|
||||
);
|
||||
assert_eq!(
|
||||
@@ -620,8 +334,8 @@ mod tests {
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 1000)]
|
||||
fn test_xml_mismatched_tags(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new(EditFormat::XmlTags);
|
||||
fn test_mismatched_tags(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new();
|
||||
assert_eq!(
|
||||
parse_random_chunks(
|
||||
// Reduced from an actual Sonnet 3.7 output
|
||||
@@ -654,12 +368,10 @@ mod tests {
|
||||
Edit {
|
||||
old_text: "a\nb\nc".to_string(),
|
||||
new_text: "a\nB\nc".to_string(),
|
||||
line_hint: None,
|
||||
},
|
||||
Edit {
|
||||
old_text: "d\ne\nf".to_string(),
|
||||
new_text: "D\ne\nF".to_string(),
|
||||
line_hint: None,
|
||||
}
|
||||
]
|
||||
);
|
||||
@@ -671,7 +383,7 @@ mod tests {
|
||||
}
|
||||
);
|
||||
|
||||
let mut parser = EditParser::new(EditFormat::XmlTags);
|
||||
let mut parser = EditParser::new();
|
||||
assert_eq!(
|
||||
parse_random_chunks(
|
||||
// Reduced from an actual Opus 4 output
|
||||
@@ -690,7 +402,6 @@ mod tests {
|
||||
vec![Edit {
|
||||
old_text: "Lorem".to_string(),
|
||||
new_text: "LOREM".to_string(),
|
||||
line_hint: None,
|
||||
},]
|
||||
);
|
||||
assert_eq!(
|
||||
@@ -702,297 +413,10 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 1000)]
|
||||
fn test_diff_fenced_single_edit(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new(EditFormat::DiffFenced);
|
||||
assert_eq!(
|
||||
parse_random_chunks(
|
||||
indoc! {"
|
||||
<<<<<<< SEARCH
|
||||
original text
|
||||
=======
|
||||
updated text
|
||||
>>>>>>> REPLACE
|
||||
"},
|
||||
&mut parser,
|
||||
&mut rng
|
||||
),
|
||||
vec![Edit {
|
||||
old_text: "original text".to_string(),
|
||||
new_text: "updated text".to_string(),
|
||||
line_hint: None,
|
||||
}]
|
||||
);
|
||||
assert_eq!(
|
||||
parser.finish(),
|
||||
EditParserMetrics {
|
||||
tags: 0,
|
||||
mismatched_tags: 0
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_diff_fenced_with_markdown_fences(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new(EditFormat::DiffFenced);
|
||||
assert_eq!(
|
||||
parse_random_chunks(
|
||||
indoc! {"
|
||||
```diff
|
||||
<<<<<<< SEARCH
|
||||
from flask import Flask
|
||||
=======
|
||||
import math
|
||||
from flask import Flask
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
"},
|
||||
&mut parser,
|
||||
&mut rng
|
||||
),
|
||||
vec![Edit {
|
||||
old_text: "from flask import Flask".to_string(),
|
||||
new_text: "import math\nfrom flask import Flask".to_string(),
|
||||
line_hint: None,
|
||||
}]
|
||||
);
|
||||
assert_eq!(
|
||||
parser.finish(),
|
||||
EditParserMetrics {
|
||||
tags: 0,
|
||||
mismatched_tags: 0
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_diff_fenced_multiple_edits(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new(EditFormat::DiffFenced);
|
||||
assert_eq!(
|
||||
parse_random_chunks(
|
||||
indoc! {"
|
||||
<<<<<<< SEARCH
|
||||
first old
|
||||
=======
|
||||
first new
|
||||
>>>>>>> REPLACE
|
||||
|
||||
<<<<<<< SEARCH
|
||||
second old
|
||||
=======
|
||||
second new
|
||||
>>>>>>> REPLACE
|
||||
"},
|
||||
&mut parser,
|
||||
&mut rng
|
||||
),
|
||||
vec![
|
||||
Edit {
|
||||
old_text: "first old".to_string(),
|
||||
new_text: "first new".to_string(),
|
||||
line_hint: None,
|
||||
},
|
||||
Edit {
|
||||
old_text: "second old".to_string(),
|
||||
new_text: "second new".to_string(),
|
||||
line_hint: None,
|
||||
},
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
parser.finish(),
|
||||
EditParserMetrics {
|
||||
tags: 0,
|
||||
mismatched_tags: 0
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_mixed_formats(mut rng: StdRng) {
|
||||
// Test XML format parser only parses XML tags
|
||||
let mut xml_parser = EditParser::new(EditFormat::XmlTags);
|
||||
assert_eq!(
|
||||
parse_random_chunks(
|
||||
indoc! {"
|
||||
<old_text>xml style old</old_text><new_text>xml style new</new_text>
|
||||
|
||||
<<<<<<< SEARCH
|
||||
diff style old
|
||||
=======
|
||||
diff style new
|
||||
>>>>>>> REPLACE
|
||||
"},
|
||||
&mut xml_parser,
|
||||
&mut rng
|
||||
),
|
||||
vec![Edit {
|
||||
old_text: "xml style old".to_string(),
|
||||
new_text: "xml style new".to_string(),
|
||||
line_hint: None,
|
||||
},]
|
||||
);
|
||||
assert_eq!(
|
||||
xml_parser.finish(),
|
||||
EditParserMetrics {
|
||||
tags: 2,
|
||||
mismatched_tags: 0
|
||||
}
|
||||
);
|
||||
|
||||
// Test diff-fenced format parser only parses diff markers
|
||||
let mut diff_parser = EditParser::new(EditFormat::DiffFenced);
|
||||
assert_eq!(
|
||||
parse_random_chunks(
|
||||
indoc! {"
|
||||
<old_text>xml style old</old_text><new_text>xml style new</new_text>
|
||||
|
||||
<<<<<<< SEARCH
|
||||
diff style old
|
||||
=======
|
||||
diff style new
|
||||
>>>>>>> REPLACE
|
||||
"},
|
||||
&mut diff_parser,
|
||||
&mut rng
|
||||
),
|
||||
vec![Edit {
|
||||
old_text: "diff style old".to_string(),
|
||||
new_text: "diff style new".to_string(),
|
||||
line_hint: None,
|
||||
},]
|
||||
);
|
||||
assert_eq!(
|
||||
diff_parser.finish(),
|
||||
EditParserMetrics {
|
||||
tags: 0,
|
||||
mismatched_tags: 0
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_diff_fenced_empty_sections(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new(EditFormat::DiffFenced);
|
||||
assert_eq!(
|
||||
parse_random_chunks(
|
||||
indoc! {"
|
||||
<<<<<<< SEARCH
|
||||
=======
|
||||
>>>>>>> REPLACE
|
||||
"},
|
||||
&mut parser,
|
||||
&mut rng
|
||||
),
|
||||
vec![Edit {
|
||||
old_text: "".to_string(),
|
||||
new_text: "".to_string(),
|
||||
line_hint: None,
|
||||
}]
|
||||
);
|
||||
assert_eq!(
|
||||
parser.finish(),
|
||||
EditParserMetrics {
|
||||
tags: 0,
|
||||
mismatched_tags: 0
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_diff_fenced_with_line_hint(mut rng: StdRng) {
|
||||
let mut parser = EditParser::new(EditFormat::DiffFenced);
|
||||
let edits = parse_random_chunks(
|
||||
indoc! {"
|
||||
<<<<<<< SEARCH line=42
|
||||
original text
|
||||
=======
|
||||
updated text
|
||||
>>>>>>> REPLACE
|
||||
"},
|
||||
&mut parser,
|
||||
&mut rng,
|
||||
);
|
||||
assert_eq!(
|
||||
edits,
|
||||
vec![Edit {
|
||||
old_text: "original text".to_string(),
|
||||
line_hint: Some(42),
|
||||
new_text: "updated text".to_string(),
|
||||
}]
|
||||
);
|
||||
}
|
||||
#[gpui::test(iterations = 100)]
|
||||
fn test_xml_line_hints(mut rng: StdRng) {
|
||||
// Line hint is a single quoted line number
|
||||
let mut parser = EditParser::new(EditFormat::XmlTags);
|
||||
|
||||
let edits = parse_random_chunks(
|
||||
r#"
|
||||
<old_text line="23">original code</old_text>
|
||||
<new_text>updated code</new_text>"#,
|
||||
&mut parser,
|
||||
&mut rng,
|
||||
);
|
||||
|
||||
assert_eq!(edits.len(), 1);
|
||||
assert_eq!(edits[0].old_text, "original code");
|
||||
assert_eq!(edits[0].line_hint, Some(23));
|
||||
assert_eq!(edits[0].new_text, "updated code");
|
||||
|
||||
// Line hint is a single unquoted line number
|
||||
let mut parser = EditParser::new(EditFormat::XmlTags);
|
||||
|
||||
let edits = parse_random_chunks(
|
||||
r#"
|
||||
<old_text line=45>original code</old_text>
|
||||
<new_text>updated code</new_text>"#,
|
||||
&mut parser,
|
||||
&mut rng,
|
||||
);
|
||||
|
||||
assert_eq!(edits.len(), 1);
|
||||
assert_eq!(edits[0].old_text, "original code");
|
||||
assert_eq!(edits[0].line_hint, Some(45));
|
||||
assert_eq!(edits[0].new_text, "updated code");
|
||||
|
||||
// Line hint is a range
|
||||
let mut parser = EditParser::new(EditFormat::XmlTags);
|
||||
|
||||
let edits = parse_random_chunks(
|
||||
r#"
|
||||
<old_text line="23:50">original code</old_text>
|
||||
<new_text>updated code</new_text>"#,
|
||||
&mut parser,
|
||||
&mut rng,
|
||||
);
|
||||
|
||||
assert_eq!(edits.len(), 1);
|
||||
assert_eq!(edits[0].old_text, "original code");
|
||||
assert_eq!(edits[0].line_hint, Some(23));
|
||||
assert_eq!(edits[0].new_text, "updated code");
|
||||
|
||||
// No line hint
|
||||
let mut parser = EditParser::new(EditFormat::XmlTags);
|
||||
let edits = parse_random_chunks(
|
||||
r#"
|
||||
<old_text>old</old_text>
|
||||
<new_text>new</new_text>"#,
|
||||
&mut parser,
|
||||
&mut rng,
|
||||
);
|
||||
|
||||
assert_eq!(edits.len(), 1);
|
||||
assert_eq!(edits[0].old_text, "old");
|
||||
assert_eq!(edits[0].line_hint, None);
|
||||
assert_eq!(edits[0].new_text, "new");
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, PartialEq, Eq)]
|
||||
struct Edit {
|
||||
old_text: String,
|
||||
new_text: String,
|
||||
line_hint: Option<u32>,
|
||||
}
|
||||
|
||||
fn parse_random_chunks(input: &str, parser: &mut EditParser, rng: &mut StdRng) -> Vec<Edit> {
|
||||
@@ -1009,15 +433,10 @@ mod tests {
|
||||
for chunk_ix in chunk_indices {
|
||||
for event in parser.push(&input[last_ix..chunk_ix]) {
|
||||
match event {
|
||||
EditParserEvent::OldTextChunk {
|
||||
chunk,
|
||||
done,
|
||||
line_hint,
|
||||
} => {
|
||||
EditParserEvent::OldTextChunk { chunk, done } => {
|
||||
old_text.as_mut().unwrap().push_str(&chunk);
|
||||
if done {
|
||||
pending_edit.old_text = old_text.take().unwrap();
|
||||
pending_edit.line_hint = line_hint;
|
||||
new_text = Some(String::new());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,7 +26,6 @@ use std::{
|
||||
cmp::Reverse,
|
||||
fmt::{self, Display},
|
||||
io::Write as _,
|
||||
path::Path,
|
||||
str::FromStr,
|
||||
sync::mpsc,
|
||||
};
|
||||
@@ -39,11 +38,10 @@ fn eval_extract_handle_command_output() {
|
||||
//
|
||||
// Model | Pass rate
|
||||
// ----------------------------|----------
|
||||
// claude-3.7-sonnet | 0.99 (2025-06-14)
|
||||
// claude-sonnet-4 | 0.97 (2025-06-14)
|
||||
// gemini-2.5-pro-06-05 | 0.98 (2025-06-16)
|
||||
// gemini-2.5-flash | 0.11 (2025-05-22)
|
||||
// gpt-4.1 | 1.00 (2025-05-22)
|
||||
// claude-3.7-sonnet | 0.98
|
||||
// gemini-2.5-pro-06-05 | 0.77
|
||||
// gemini-2.5-flash | 0.11
|
||||
// gpt-4.1 | 1.00
|
||||
|
||||
let input_file_path = "root/blame.rs";
|
||||
let input_file_content = include_str!("evals/fixtures/extract_handle_command_output/before.rs");
|
||||
@@ -59,7 +57,7 @@ fn eval_extract_handle_command_output() {
|
||||
let edit_description = "Extract `handle_command_output` method from `run_git_blame`.";
|
||||
eval(
|
||||
100,
|
||||
0.95,
|
||||
0.7, // Taking the lower bar for Gemini
|
||||
0.05,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
@@ -112,13 +110,6 @@ fn eval_extract_handle_command_output() {
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "eval"), ignore)]
|
||||
fn eval_delete_run_git_blame() {
|
||||
// Model | Pass rate
|
||||
// ----------------------------|----------
|
||||
// claude-3.7-sonnet | 1.0 (2025-06-14)
|
||||
// claude-sonnet-4 | 0.96 (2025-06-14)
|
||||
// gemini-2.5-pro-06-05 | 1.0 (2025-06-16)
|
||||
// gemini-2.5-flash |
|
||||
// gpt-4.1 |
|
||||
let input_file_path = "root/blame.rs";
|
||||
let input_file_content = include_str!("evals/fixtures/delete_run_git_blame/before.rs");
|
||||
let output_file_content = include_str!("evals/fixtures/delete_run_git_blame/after.rs");
|
||||
@@ -174,12 +165,13 @@ fn eval_delete_run_git_blame() {
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "eval"), ignore)]
|
||||
fn eval_translate_doc_comments() {
|
||||
// Results for 2025-05-22
|
||||
//
|
||||
// Model | Pass rate
|
||||
// ============================================
|
||||
//
|
||||
// claude-3.7-sonnet | 1.0 (2025-06-14)
|
||||
// claude-sonnet-4 | 1.0 (2025-06-14)
|
||||
// gemini-2.5-pro-preview-03-25 | 1.0 (2025-05-22)
|
||||
// claude-3.7-sonnet |
|
||||
// gemini-2.5-pro-preview-03-25 | 1.0
|
||||
// gemini-2.5-flash-preview-04-17 |
|
||||
// gpt-4.1 |
|
||||
let input_file_path = "root/canvas.rs";
|
||||
@@ -236,12 +228,13 @@ fn eval_translate_doc_comments() {
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "eval"), ignore)]
|
||||
fn eval_use_wasi_sdk_in_compile_parser_to_wasm() {
|
||||
// Results for 2025-05-22
|
||||
//
|
||||
// Model | Pass rate
|
||||
// ============================================
|
||||
//
|
||||
// claude-3.7-sonnet | 0.96 (2025-06-14)
|
||||
// claude-sonnet-4 | 0.11 (2025-06-14)
|
||||
// gemini-2.5-pro-preview-latest | 0.99 (2025-06-16)
|
||||
// claude-3.7-sonnet | 0.98
|
||||
// gemini-2.5-pro-preview-03-25 | 0.99
|
||||
// gemini-2.5-flash-preview-04-17 |
|
||||
// gpt-4.1 |
|
||||
let input_file_path = "root/lib.rs";
|
||||
@@ -361,12 +354,13 @@ fn eval_use_wasi_sdk_in_compile_parser_to_wasm() {
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "eval"), ignore)]
|
||||
fn eval_disable_cursor_blinking() {
|
||||
// Results for 2025-05-22
|
||||
//
|
||||
// Model | Pass rate
|
||||
// ============================================
|
||||
//
|
||||
// claude-3.7-sonnet | 0.99 (2025-06-14)
|
||||
// claude-sonnet-4 | 0.85 (2025-06-14)
|
||||
// gemini-2.5-pro-preview-latest | 0.97 (2025-06-16)
|
||||
// claude-3.7-sonnet |
|
||||
// gemini-2.5-pro-preview-03-25 | 1.0
|
||||
// gemini-2.5-flash-preview-04-17 |
|
||||
// gpt-4.1 |
|
||||
let input_file_path = "root/editor.rs";
|
||||
@@ -444,20 +438,14 @@ fn eval_disable_cursor_blinking() {
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "eval"), ignore)]
|
||||
fn eval_from_pixels_constructor() {
|
||||
// Results for 2025-06-13
|
||||
// Results for 2025-05-22
|
||||
//
|
||||
// The outcome of this evaluation depends heavily on the LINE_HINT_TOLERANCE
|
||||
// value. Higher values improve the pass rate but may sometimes cause
|
||||
// edits to be misapplied. In the context of this eval, this means
|
||||
// the agent might add from_pixels tests in incorrect locations
|
||||
// (e.g., at the beginning of the file), yet the evaluation may still
|
||||
// rate it highly.
|
||||
// Model | Pass rate
|
||||
// ============================================
|
||||
//
|
||||
// Model | Date | Pass rate
|
||||
// =========================================================
|
||||
// claude-4.0-sonnet | 2025-06-14 | 0.99
|
||||
// claude-3.7-sonnet | 2025-06-14 | 0.88
|
||||
// gemini-2.5-pro-preview-06-05 | 2025-06-16 | 0.98
|
||||
// claude-3.7-sonnet |
|
||||
// gemini-2.5-pro-preview-03-25 | 0.94
|
||||
// gemini-2.5-flash-preview-04-17 |
|
||||
// gpt-4.1 |
|
||||
let input_file_path = "root/canvas.rs";
|
||||
let input_file_content = include_str!("evals/fixtures/from_pixels_constructor/before.rs");
|
||||
@@ -467,7 +455,7 @@ fn eval_from_pixels_constructor() {
|
||||
0.95,
|
||||
// For whatever reason, this eval produces more mismatched tags.
|
||||
// Increasing for now, let's see if we can bring this down.
|
||||
0.25,
|
||||
0.2,
|
||||
EvalInput::from_conversation(
|
||||
vec![
|
||||
message(
|
||||
@@ -653,14 +641,15 @@ fn eval_from_pixels_constructor() {
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "eval"), ignore)]
|
||||
fn eval_zode() {
|
||||
// Results for 2025-05-22
|
||||
//
|
||||
// Model | Pass rate
|
||||
// ============================================
|
||||
//
|
||||
// claude-3.7-sonnet | 1.0 (2025-06-14)
|
||||
// claude-sonnet-4 | 1.0 (2025-06-14)
|
||||
// gemini-2.5-pro-preview-03-25 | 1.0 (2025-05-22)
|
||||
// gemini-2.5-flash-preview-04-17 | 1.0 (2025-05-22)
|
||||
// gpt-4.1 | 1.0 (2025-05-22)
|
||||
// claude-3.7-sonnet | 1.0
|
||||
// gemini-2.5-pro-preview-03-25 | 1.0
|
||||
// gemini-2.5-flash-preview-04-17 | 1.0
|
||||
// gpt-4.1 | 1.0
|
||||
let input_file_path = "root/zode.py";
|
||||
let input_content = None;
|
||||
let edit_description = "Create the main Zode CLI script";
|
||||
@@ -759,12 +748,13 @@ fn eval_zode() {
|
||||
#[test]
|
||||
#[cfg_attr(not(feature = "eval"), ignore)]
|
||||
fn eval_add_overwrite_test() {
|
||||
// Results for 2025-05-22
|
||||
//
|
||||
// Model | Pass rate
|
||||
// ============================================
|
||||
//
|
||||
// claude-3.7-sonnet | 0.65 (2025-06-14)
|
||||
// claude-sonnet-4 | 0.07 (2025-06-14)
|
||||
// gemini-2.5-pro-preview-03-25 | 0.35 (2025-05-22)
|
||||
// claude-3.7-sonnet | 0.16
|
||||
// gemini-2.5-pro-preview-03-25 | 0.35
|
||||
// gemini-2.5-flash-preview-04-17 |
|
||||
// gpt-4.1 |
|
||||
let input_file_path = "root/action_log.rs";
|
||||
@@ -994,14 +984,15 @@ fn eval_create_empty_file() {
|
||||
// thoughts into it. This issue is not specific to empty files, but
|
||||
// it's easier to reproduce with them.
|
||||
//
|
||||
// Results for 2025-05-21:
|
||||
//
|
||||
// Model | Pass rate
|
||||
// ============================================
|
||||
//
|
||||
// claude-3.7-sonnet | 1.00 (2025-06-14)
|
||||
// claude-sonnet-4 | 1.00 (2025-06-14)
|
||||
// gemini-2.5-pro-preview-03-25 | 1.00 (2025-05-21)
|
||||
// gemini-2.5-flash-preview-04-17 | 1.00 (2025-05-21)
|
||||
// gpt-4.1 | 1.00 (2025-05-21)
|
||||
// claude-3.7-sonnet | 1.00
|
||||
// gemini-2.5-pro-preview-03-25 | 1.00
|
||||
// gemini-2.5-flash-preview-04-17 | 1.00
|
||||
// gpt-4.1 | 1.00
|
||||
//
|
||||
//
|
||||
// TODO: gpt-4.1-mini errored 38 times:
|
||||
@@ -1497,16 +1488,8 @@ impl EditAgentTest {
|
||||
.await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
|
||||
let edit_format = EditFormat::from_env(agent_model.clone()).unwrap();
|
||||
|
||||
Self {
|
||||
agent: EditAgent::new(
|
||||
agent_model,
|
||||
project.clone(),
|
||||
action_log,
|
||||
Templates::new(),
|
||||
edit_format,
|
||||
),
|
||||
agent: EditAgent::new(agent_model, project.clone(), action_log, Templates::new()),
|
||||
project,
|
||||
judge_model,
|
||||
}
|
||||
@@ -1566,7 +1549,6 @@ impl EditAgentTest {
|
||||
.collect::<Vec<_>>();
|
||||
let worktrees = vec![WorktreeContext {
|
||||
root_name: "root".to_string(),
|
||||
abs_path: Path::new("/path/to/root").into(),
|
||||
rules_file: None,
|
||||
}];
|
||||
let prompt_builder = PromptBuilder::new(None)?;
|
||||
@@ -1652,20 +1634,15 @@ impl EditAgentTest {
|
||||
}
|
||||
|
||||
async fn retry_on_rate_limit<R>(mut request: impl AsyncFnMut() -> Result<R>) -> Result<R> {
|
||||
let mut attempt = 0;
|
||||
loop {
|
||||
attempt += 1;
|
||||
match request().await {
|
||||
Ok(result) => return Ok(result),
|
||||
Err(err) => match err.downcast::<LanguageModelCompletionError>() {
|
||||
Ok(err) => match err {
|
||||
LanguageModelCompletionError::RateLimit(duration) => {
|
||||
// Wait for the duration supplied, with some jitter to avoid all requests being made at the same time.
|
||||
let jitter = duration.mul_f64(rand::thread_rng().gen_range(0.0..1.0));
|
||||
eprintln!(
|
||||
"Attempt #{attempt}: Rate limit exceeded. Retry after {duration:?} + jitter of {jitter:?}"
|
||||
);
|
||||
Timer::after(duration + jitter).await;
|
||||
// Wait until after we are allowed to try again
|
||||
eprintln!("Rate limit exceeded. Waiting for {duration:?}...",);
|
||||
Timer::after(duration).await;
|
||||
continue;
|
||||
}
|
||||
_ => return Err(err.into()),
|
||||
|
||||
@@ -10,9 +10,8 @@ const DELETION_COST: u32 = 10;
|
||||
pub struct StreamingFuzzyMatcher {
|
||||
snapshot: TextBufferSnapshot,
|
||||
query_lines: Vec<String>,
|
||||
line_hint: Option<u32>,
|
||||
incomplete_line: String,
|
||||
matches: Vec<Range<usize>>,
|
||||
best_matches: Vec<Range<usize>>,
|
||||
matrix: SearchMatrix,
|
||||
}
|
||||
|
||||
@@ -22,9 +21,8 @@ impl StreamingFuzzyMatcher {
|
||||
Self {
|
||||
snapshot,
|
||||
query_lines: Vec::new(),
|
||||
line_hint: None,
|
||||
incomplete_line: String::new(),
|
||||
matches: Vec::new(),
|
||||
best_matches: Vec::new(),
|
||||
matrix: SearchMatrix::new(buffer_line_count + 1),
|
||||
}
|
||||
}
|
||||
@@ -43,14 +41,9 @@ impl StreamingFuzzyMatcher {
|
||||
///
|
||||
/// Returns `Some(range)` if a match has been found with the accumulated
|
||||
/// query so far, or `None` if no suitable match exists yet.
|
||||
pub fn push(&mut self, chunk: &str, line_hint: Option<u32>) -> Option<Range<usize>> {
|
||||
if line_hint.is_some() {
|
||||
self.line_hint = line_hint;
|
||||
}
|
||||
|
||||
pub fn push(&mut self, chunk: &str) -> Option<Range<usize>> {
|
||||
// Add the chunk to our incomplete line buffer
|
||||
self.incomplete_line.push_str(chunk);
|
||||
self.line_hint = line_hint;
|
||||
|
||||
if let Some((last_pos, _)) = self.incomplete_line.match_indices('\n').next_back() {
|
||||
let complete_part = &self.incomplete_line[..=last_pos];
|
||||
@@ -62,11 +55,20 @@ impl StreamingFuzzyMatcher {
|
||||
|
||||
self.incomplete_line.replace_range(..last_pos + 1, "");
|
||||
|
||||
self.matches = self.resolve_location_fuzzy();
|
||||
}
|
||||
self.best_matches = self.resolve_location_fuzzy();
|
||||
|
||||
let best_match = self.select_best_match();
|
||||
best_match.or_else(|| self.matches.first().cloned())
|
||||
if let Some(first_match) = self.best_matches.first() {
|
||||
Some(first_match.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
if let Some(first_match) = self.best_matches.first() {
|
||||
Some(first_match.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Finish processing and return the final best match(es).
|
||||
@@ -78,9 +80,9 @@ impl StreamingFuzzyMatcher {
|
||||
if !self.incomplete_line.is_empty() {
|
||||
self.query_lines.push(self.incomplete_line.clone());
|
||||
self.incomplete_line.clear();
|
||||
self.matches = self.resolve_location_fuzzy();
|
||||
self.best_matches = self.resolve_location_fuzzy();
|
||||
}
|
||||
self.matches.clone()
|
||||
self.best_matches.clone()
|
||||
}
|
||||
|
||||
fn resolve_location_fuzzy(&mut self) -> Vec<Range<usize>> {
|
||||
@@ -196,43 +198,6 @@ impl StreamingFuzzyMatcher {
|
||||
|
||||
valid_matches.into_iter().map(|(_, range)| range).collect()
|
||||
}
|
||||
|
||||
/// Return the best match with starting position close enough to line_hint.
|
||||
pub fn select_best_match(&self) -> Option<Range<usize>> {
|
||||
// Allow line hint to be off by that many lines.
|
||||
// Higher values increase probability of applying edits to a wrong place,
|
||||
// Lower values increase edits failures and overall conversation length.
|
||||
const LINE_HINT_TOLERANCE: u32 = 200;
|
||||
|
||||
if self.matches.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
if self.matches.len() == 1 {
|
||||
return self.matches.first().cloned();
|
||||
}
|
||||
|
||||
let Some(line_hint) = self.line_hint else {
|
||||
// Multiple ambiguous matches
|
||||
return None;
|
||||
};
|
||||
|
||||
let mut best_match = None;
|
||||
let mut best_distance = u32::MAX;
|
||||
|
||||
for range in &self.matches {
|
||||
let start_point = self.snapshot.offset_to_point(range.start);
|
||||
let start_line = start_point.row;
|
||||
let distance = start_line.abs_diff(line_hint);
|
||||
|
||||
if distance <= LINE_HINT_TOLERANCE && distance < best_distance {
|
||||
best_distance = distance;
|
||||
best_match = Some(range.clone());
|
||||
}
|
||||
}
|
||||
|
||||
best_match
|
||||
}
|
||||
}
|
||||
|
||||
fn fuzzy_eq(left: &str, right: &str) -> bool {
|
||||
@@ -675,52 +640,6 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_line_hint_selection() {
|
||||
let text = indoc! {r#"
|
||||
fn first_function() {
|
||||
return 42;
|
||||
}
|
||||
|
||||
fn second_function() {
|
||||
return 42;
|
||||
}
|
||||
|
||||
fn third_function() {
|
||||
return 42;
|
||||
}
|
||||
"#};
|
||||
|
||||
let buffer = TextBuffer::new(0, BufferId::new(1).unwrap(), text.to_string());
|
||||
let snapshot = buffer.snapshot();
|
||||
let mut matcher = StreamingFuzzyMatcher::new(snapshot.clone());
|
||||
|
||||
// Given a query that matches all three functions
|
||||
let query = "return 42;\n";
|
||||
|
||||
// Test with line hint pointing to second function (around line 5)
|
||||
let best_match = matcher.push(query, Some(5)).expect("Failed to match query");
|
||||
|
||||
let matched_text = snapshot
|
||||
.text_for_range(best_match.clone())
|
||||
.collect::<String>();
|
||||
assert!(matched_text.contains("return 42;"));
|
||||
assert_eq!(
|
||||
best_match,
|
||||
63..77,
|
||||
"Expected to match `second_function` based on the line hint"
|
||||
);
|
||||
|
||||
let mut matcher = StreamingFuzzyMatcher::new(snapshot.clone());
|
||||
matcher.push(query, None);
|
||||
matcher.finish();
|
||||
let best_match = matcher.select_best_match();
|
||||
assert!(
|
||||
best_match.is_none(),
|
||||
"Best match should be None when query cannot be uniquely resolved"
|
||||
);
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn assert_location_resolution(text_with_expected_range: &str, query: &str, rng: &mut StdRng) {
|
||||
let (text, expected_ranges) = marked_text_ranges(text_with_expected_range, false);
|
||||
@@ -734,7 +653,7 @@ mod tests {
|
||||
|
||||
// Push chunks incrementally
|
||||
for chunk in &chunks {
|
||||
matcher.push(chunk, None);
|
||||
matcher.push(chunk);
|
||||
}
|
||||
|
||||
let actual_ranges = matcher.finish();
|
||||
@@ -787,7 +706,7 @@ mod tests {
|
||||
|
||||
fn push(finder: &mut StreamingFuzzyMatcher, chunk: &str) -> Option<String> {
|
||||
finder
|
||||
.push(chunk, None)
|
||||
.push(chunk)
|
||||
.map(|range| finder.snapshot.text_for_range(range).collect::<String>())
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::{
|
||||
Templates,
|
||||
edit_agent::{EditAgent, EditAgentOutput, EditAgentOutputEvent, EditFormat},
|
||||
edit_agent::{EditAgent, EditAgentOutput, EditAgentOutputEvent},
|
||||
schema::json_schema_for,
|
||||
ui::{COLLAPSED_LINES, ToolOutputPreview},
|
||||
};
|
||||
@@ -10,7 +10,7 @@ use assistant_tool::{
|
||||
ToolUseStatus,
|
||||
};
|
||||
use buffer_diff::{BufferDiff, BufferDiffSnapshot};
|
||||
use editor::{Editor, EditorMode, MinimapVisibility, MultiBuffer, PathKey, scroll::Autoscroll};
|
||||
use editor::{Editor, EditorMode, MinimapVisibility, MultiBuffer, PathKey};
|
||||
use futures::StreamExt;
|
||||
use gpui::{
|
||||
Animation, AnimationExt, AnyWindowHandle, App, AppContext, AsyncApp, Entity, Task,
|
||||
@@ -69,13 +69,13 @@ pub struct EditFileToolInput {
|
||||
/// start each path with one of the project's root directories.
|
||||
///
|
||||
/// The following examples assume we have two root directories in the project:
|
||||
/// - /a/b/backend
|
||||
/// - /c/d/frontend
|
||||
/// - backend
|
||||
/// - frontend
|
||||
///
|
||||
/// <example>
|
||||
/// `backend/src/main.rs`
|
||||
///
|
||||
/// Notice how the file path starts with `backend`. Without that, the path
|
||||
/// Notice how the file path starts with root-1. Without that, the path
|
||||
/// would be ambiguous and the call would fail!
|
||||
/// </example>
|
||||
///
|
||||
@@ -201,14 +201,8 @@ impl Tool for EditFileTool {
|
||||
let card_clone = card.clone();
|
||||
let action_log_clone = action_log.clone();
|
||||
let task = cx.spawn(async move |cx: &mut AsyncApp| {
|
||||
let edit_format = EditFormat::from_model(model.clone())?;
|
||||
let edit_agent = EditAgent::new(
|
||||
model,
|
||||
project.clone(),
|
||||
action_log_clone,
|
||||
Templates::new(),
|
||||
edit_format,
|
||||
);
|
||||
let edit_agent =
|
||||
EditAgent::new(model, project.clone(), action_log_clone, Templates::new());
|
||||
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| {
|
||||
@@ -339,18 +333,14 @@ impl Tool for EditFileTool {
|
||||
);
|
||||
anyhow::ensure!(
|
||||
ambiguous_ranges.is_empty(),
|
||||
{
|
||||
let line_numbers = ambiguous_ranges
|
||||
.iter()
|
||||
.map(|range| range.start.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
formatdoc! {"
|
||||
<old_text> matches more than one position in the file (lines: {line_numbers}). Read the
|
||||
relevant sections of {input_path} again and extend <old_text> so
|
||||
that I can perform the requested edits.
|
||||
"}
|
||||
}
|
||||
// TODO: Include ambiguous_ranges, converted to line numbers.
|
||||
// This would work best if we add `line_hint` parameter
|
||||
// to edit_file_tool
|
||||
formatdoc! {"
|
||||
<old_text> matches more than one position in the file. Read the
|
||||
relevant sections of {input_path} again and extend <old_text> so
|
||||
that I can perform the requested edits.
|
||||
"}
|
||||
);
|
||||
Ok(ToolResultOutput {
|
||||
content: ToolResultContent::Text("No edits were made.".into()),
|
||||
@@ -810,30 +800,11 @@ impl ToolCard for EditFileToolCard {
|
||||
if let Some(active_editor) = item.downcast::<Editor>() {
|
||||
active_editor
|
||||
.update_in(cx, |editor, window, cx| {
|
||||
let snapshot =
|
||||
editor.buffer().read(cx).snapshot(cx);
|
||||
let first_hunk = editor
|
||||
.diff_hunks_in_ranges(
|
||||
&[editor::Anchor::min()
|
||||
..editor::Anchor::max()],
|
||||
&snapshot,
|
||||
)
|
||||
.next();
|
||||
if let Some(first_hunk) = first_hunk {
|
||||
let first_hunk_start =
|
||||
first_hunk.multi_buffer_range().start;
|
||||
editor.change_selections(
|
||||
Some(Autoscroll::fit()),
|
||||
window,
|
||||
cx,
|
||||
|selections| {
|
||||
selections.select_anchor_ranges([
|
||||
first_hunk_start
|
||||
..first_hunk_start,
|
||||
]);
|
||||
},
|
||||
)
|
||||
}
|
||||
editor.go_to_singleton_buffer_point(
|
||||
language::Point::new(0, 0),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
|
||||
@@ -31,8 +31,8 @@ pub struct ReadFileToolInput {
|
||||
/// <example>
|
||||
/// If the project has the following root directories:
|
||||
///
|
||||
/// - /a/b/directory1
|
||||
/// - /c/d/directory2
|
||||
/// - directory1
|
||||
/// - directory2
|
||||
///
|
||||
/// If you want to access `file.txt` in `directory1`, you should use the path `directory1/file.txt`.
|
||||
/// If you want to access `file.txt` in `directory2`, you should use the path `directory2/file.txt`.
|
||||
|
||||
@@ -3,21 +3,21 @@ You MUST respond with a series of edits to a file, using the following format:
|
||||
```
|
||||
<edits>
|
||||
|
||||
<old_text line=10>
|
||||
<old_text>
|
||||
OLD TEXT 1 HERE
|
||||
</old_text>
|
||||
<new_text>
|
||||
NEW TEXT 1 HERE
|
||||
</new_text>
|
||||
|
||||
<old_text line=456>
|
||||
<old_text>
|
||||
OLD TEXT 2 HERE
|
||||
</old_text>
|
||||
<new_text>
|
||||
NEW TEXT 2 HERE
|
||||
</new_text>
|
||||
|
||||
<old_text line=42>
|
||||
<old_text>
|
||||
OLD TEXT 3 HERE
|
||||
</old_text>
|
||||
<new_text>
|
||||
@@ -33,7 +33,6 @@ NEW TEXT 3 HERE
|
||||
- `<old_text>` must exactly match existing file content, including indentation
|
||||
- `<old_text>` must come from the actual file, not an outline
|
||||
- `<old_text>` cannot be empty
|
||||
- `line` should be a starting line number for the text to be replaced
|
||||
- Be minimal with replacements:
|
||||
- For unique lines, include only those lines
|
||||
- For non-unique lines, include enough context to identify them
|
||||
@@ -49,7 +48,7 @@ Claude and gpt-4.1 don't really need it. --}}
|
||||
<example>
|
||||
<edits>
|
||||
|
||||
<old_text line=3>
|
||||
<old_text>
|
||||
struct User {
|
||||
name: String,
|
||||
email: String,
|
||||
@@ -63,7 +62,7 @@ struct User {
|
||||
}
|
||||
</new_text>
|
||||
|
||||
<old_text line=25>
|
||||
<old_text>
|
||||
let user = User {
|
||||
name: String::from("John"),
|
||||
email: String::from("john@example.com"),
|
||||
@@ -1,77 +0,0 @@
|
||||
You MUST respond with a series of edits to a file, using the following diff format:
|
||||
|
||||
```
|
||||
<<<<<<< SEARCH line=1
|
||||
from flask import Flask
|
||||
=======
|
||||
import math
|
||||
from flask import Flask
|
||||
>>>>>>> REPLACE
|
||||
|
||||
<<<<<<< SEARCH line=325
|
||||
return 0
|
||||
=======
|
||||
print("Done")
|
||||
|
||||
return 0
|
||||
>>>>>>> REPLACE
|
||||
|
||||
```
|
||||
|
||||
# File Editing Instructions
|
||||
|
||||
- Use the SEARCH/REPLACE diff format shown above
|
||||
- The SEARCH section must exactly match existing file content, including indentation
|
||||
- The SEARCH section must come from the actual file, not an outline
|
||||
- The SEARCH section cannot be empty
|
||||
- `line` should be a starting line number for the text to be replaced
|
||||
- Be minimal with replacements:
|
||||
- For unique lines, include only those lines
|
||||
- For non-unique lines, include enough context to identify them
|
||||
- Do not escape quotes, newlines, or other characters
|
||||
- For multiple occurrences, repeat the same diff block for each instance
|
||||
- Edits are sequential - each assumes previous edits are already applied
|
||||
- Only edit the specified file
|
||||
|
||||
# Example
|
||||
|
||||
```
|
||||
<<<<<<< SEARCH line=3
|
||||
struct User {
|
||||
name: String,
|
||||
email: String,
|
||||
}
|
||||
=======
|
||||
struct User {
|
||||
name: String,
|
||||
email: String,
|
||||
active: bool,
|
||||
}
|
||||
>>>>>>> REPLACE
|
||||
|
||||
<<<<<<< SEARCH line=25
|
||||
let user = User {
|
||||
name: String::from("John"),
|
||||
email: String::from("john@example.com"),
|
||||
};
|
||||
=======
|
||||
let user = User {
|
||||
name: String::from("John"),
|
||||
email: String::from("john@example.com"),
|
||||
active: true,
|
||||
};
|
||||
>>>>>>> REPLACE
|
||||
```
|
||||
|
||||
|
||||
# Final instructions
|
||||
|
||||
Tool calls have been disabled. You MUST respond using the SEARCH/REPLACE diff format only.
|
||||
|
||||
<file_to_edit>
|
||||
{{path}}
|
||||
</file_to_edit>
|
||||
|
||||
<edit_description>
|
||||
{{edit_description}}
|
||||
</edit_description>
|
||||
@@ -221,7 +221,7 @@ pub fn check(_: &Check, window: &mut Window, cx: &mut App) {
|
||||
}
|
||||
|
||||
if let Some(updater) = AutoUpdater::get(cx) {
|
||||
updater.update(cx, |updater, cx| updater.poll(UpdateCheckType::Manual, cx));
|
||||
updater.update(cx, |updater, cx| updater.poll(cx));
|
||||
} else {
|
||||
drop(window.prompt(
|
||||
gpui::PromptLevel::Info,
|
||||
@@ -296,11 +296,6 @@ impl InstallerDir {
|
||||
}
|
||||
}
|
||||
|
||||
pub enum UpdateCheckType {
|
||||
Automatic,
|
||||
Manual,
|
||||
}
|
||||
|
||||
impl AutoUpdater {
|
||||
pub fn get(cx: &mut App) -> Option<Entity<Self>> {
|
||||
cx.default_global::<GlobalAutoUpdate>().0.clone()
|
||||
@@ -318,13 +313,13 @@ impl AutoUpdater {
|
||||
pub fn start_polling(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
|
||||
cx.spawn(async move |this, cx| {
|
||||
loop {
|
||||
this.update(cx, |this, cx| this.poll(UpdateCheckType::Automatic, cx))?;
|
||||
this.update(cx, |this, cx| this.poll(cx))?;
|
||||
cx.background_executor().timer(POLL_INTERVAL).await;
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn poll(&mut self, check_type: UpdateCheckType, cx: &mut Context<Self>) {
|
||||
pub fn poll(&mut self, cx: &mut Context<Self>) {
|
||||
if self.pending_poll.is_some() {
|
||||
return;
|
||||
}
|
||||
@@ -336,18 +331,8 @@ impl AutoUpdater {
|
||||
this.update(cx, |this, cx| {
|
||||
this.pending_poll = None;
|
||||
if let Err(error) = result {
|
||||
this.status = match check_type {
|
||||
// Be quiet if the check was automated (e.g. when offline)
|
||||
UpdateCheckType::Automatic => {
|
||||
log::info!("auto-update check failed: error:{:?}", error);
|
||||
AutoUpdateStatus::Idle
|
||||
}
|
||||
UpdateCheckType::Manual => {
|
||||
log::error!("auto-update failed: error:{:?}", error);
|
||||
AutoUpdateStatus::Errored
|
||||
}
|
||||
};
|
||||
|
||||
log::error!("auto-update failed: error:{:?}", error);
|
||||
this.status = AutoUpdateStatus::Errored;
|
||||
cx.notify();
|
||||
}
|
||||
})
|
||||
|
||||
@@ -82,10 +82,7 @@ fn view_release_notes_locally(
|
||||
.update_in(cx, |workspace, window, cx| {
|
||||
let project = workspace.project().clone();
|
||||
let buffer = project.update(cx, |project, cx| {
|
||||
let buffer = project.create_local_buffer("", markdown, cx);
|
||||
project
|
||||
.mark_buffer_as_non_searchable(buffer.read(cx).remote_id(), cx);
|
||||
buffer
|
||||
project.create_local_buffer("", markdown, cx)
|
||||
});
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit([(0..0, body.release_notes)], None, cx)
|
||||
|
||||
@@ -152,7 +152,7 @@ pub enum Thinking {
|
||||
#[derive(Debug)]
|
||||
pub struct Request {
|
||||
pub model: String,
|
||||
pub max_tokens: u64,
|
||||
pub max_tokens: u32,
|
||||
pub messages: Vec<BedrockMessage>,
|
||||
pub tools: Option<BedrockToolConfig>,
|
||||
pub thinking: Option<Thinking>,
|
||||
|
||||
@@ -99,10 +99,10 @@ pub enum Model {
|
||||
#[serde(rename = "custom")]
|
||||
Custom {
|
||||
name: String,
|
||||
max_tokens: u64,
|
||||
max_tokens: usize,
|
||||
/// The name displayed in the UI, such as in the assistant panel model dropdown menu.
|
||||
display_name: Option<String>,
|
||||
max_output_tokens: Option<u64>,
|
||||
max_output_tokens: Option<u32>,
|
||||
default_temperature: Option<f32>,
|
||||
},
|
||||
}
|
||||
@@ -309,7 +309,7 @@ impl Model {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn max_token_count(&self) -> u64 {
|
||||
pub fn max_token_count(&self) -> usize {
|
||||
match self {
|
||||
Self::Claude3_5SonnetV2
|
||||
| Self::Claude3Opus
|
||||
@@ -328,7 +328,7 @@ impl Model {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn max_output_tokens(&self) -> u64 {
|
||||
pub fn max_output_tokens(&self) -> u32 {
|
||||
match self {
|
||||
Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3_5Haiku => 4_096,
|
||||
Self::Claude3_7Sonnet
|
||||
|
||||
@@ -1028,11 +1028,7 @@ impl BufferDiff {
|
||||
let (base_text_changed, mut changed_range) =
|
||||
match (state.base_text_exists, new_state.base_text_exists) {
|
||||
(false, false) => (true, None),
|
||||
(true, true)
|
||||
if state.base_text.remote_id() == new_state.base_text.remote_id()
|
||||
&& state.base_text.syntax_update_count()
|
||||
== new_state.base_text.syntax_update_count() =>
|
||||
{
|
||||
(true, true) if state.base_text.remote_id() == new_state.base_text.remote_id() => {
|
||||
(false, new_state.compare(&state, buffer))
|
||||
}
|
||||
_ => (true, Some(text::Anchor::MIN..text::Anchor::MAX)),
|
||||
|
||||
@@ -269,6 +269,7 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
|
||||
github_login: "nathansobo".into(),
|
||||
avatar_url: "http://avatar.com/nathansobo".into(),
|
||||
name: None,
|
||||
email: None,
|
||||
}],
|
||||
},
|
||||
);
|
||||
@@ -322,6 +323,7 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
|
||||
github_login: "maxbrunsfeld".into(),
|
||||
avatar_url: "http://avatar.com/maxbrunsfeld".into(),
|
||||
name: None,
|
||||
email: None,
|
||||
}],
|
||||
},
|
||||
);
|
||||
@@ -366,6 +368,7 @@ async fn test_channel_messages(cx: &mut TestAppContext) {
|
||||
github_login: "as-cii".into(),
|
||||
avatar_url: "http://avatar.com/as-cii".into(),
|
||||
name: None,
|
||||
email: None,
|
||||
}],
|
||||
},
|
||||
);
|
||||
|
||||
@@ -13,7 +13,6 @@ pub enum CliRequest {
|
||||
Open {
|
||||
paths: Vec<String>,
|
||||
urls: Vec<String>,
|
||||
diff_paths: Vec<[String; 2]>,
|
||||
wait: bool,
|
||||
open_new_workspace: Option<bool>,
|
||||
env: Option<HashMap<String, String>>,
|
||||
|
||||
@@ -89,9 +89,6 @@ struct Args {
|
||||
/// Will attempt to give the correct command to run
|
||||
#[arg(long)]
|
||||
system_specs: bool,
|
||||
/// Pairs of file paths to diff. Can be specified multiple times.
|
||||
#[arg(long, action = clap::ArgAction::Append, num_args = 2, value_names = ["OLD_PATH", "NEW_PATH"])]
|
||||
diff: Vec<String>,
|
||||
/// Uninstall Zed from user system
|
||||
#[cfg(all(
|
||||
any(target_os = "linux", target_os = "macos"),
|
||||
@@ -130,9 +127,6 @@ fn parse_path_with_position(argument_str: &str) -> anyhow::Result<String> {
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
#[cfg(unix)]
|
||||
util::prevent_root_execution();
|
||||
|
||||
// Exit flatpak sandbox if needed
|
||||
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
|
||||
{
|
||||
@@ -235,17 +229,9 @@ fn main() -> Result<()> {
|
||||
let exit_status = Arc::new(Mutex::new(None));
|
||||
let mut paths = vec![];
|
||||
let mut urls = vec![];
|
||||
let mut diff_paths = vec![];
|
||||
let mut stdin_tmp_file: Option<fs::File> = None;
|
||||
let mut anonymous_fd_tmp_files = vec![];
|
||||
|
||||
for path in args.diff.chunks(2) {
|
||||
diff_paths.push([
|
||||
parse_path_with_position(&path[0])?,
|
||||
parse_path_with_position(&path[1])?,
|
||||
]);
|
||||
}
|
||||
|
||||
for path in args.paths_with_position.iter() {
|
||||
if path.starts_with("zed://")
|
||||
|| path.starts_with("http://")
|
||||
@@ -284,7 +270,6 @@ fn main() -> Result<()> {
|
||||
tx.send(CliRequest::Open {
|
||||
paths,
|
||||
urls,
|
||||
diff_paths,
|
||||
wait: args.wait,
|
||||
open_new_workspace,
|
||||
env,
|
||||
|
||||
@@ -28,9 +28,6 @@ feature_flags.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
gpui_tokio.workspace = true
|
||||
# Don't update `hickory-resolver`, it has a bug that causes it to not resolve DNS queries correctly.
|
||||
# See https://github.com/hickory-dns/hickory-dns/issues/3048
|
||||
hickory-resolver = { version = "0.24", features = ["tokio-runtime"] }
|
||||
http_client.workspace = true
|
||||
http_client_tls.workspace = true
|
||||
httparse = "1.10"
|
||||
@@ -39,7 +36,6 @@ paths.workspace = true
|
||||
parking_lot.workspace = true
|
||||
postage.workspace = true
|
||||
rand.workspace = true
|
||||
regex.workspace = true
|
||||
release_channel.workspace = true
|
||||
rpc = { workspace = true, features = ["gpui"] }
|
||||
schemars.workspace = true
|
||||
@@ -52,7 +48,7 @@ telemetry_events.workspace = true
|
||||
text.workspace = true
|
||||
thiserror.workspace = true
|
||||
time.workspace = true
|
||||
tiny_http.workspace = true
|
||||
tiny_http = "0.8"
|
||||
tokio-socks = { version = "0.5.2", default-features = false, features = ["futures-io"] }
|
||||
url.workspace = true
|
||||
util.workspace = true
|
||||
@@ -64,12 +60,11 @@ workspace-hack.workspace = true
|
||||
[dev-dependencies]
|
||||
clock = { workspace = true, features = ["test-support"] }
|
||||
collections = { workspace = true, features = ["test-support"] }
|
||||
fs.workspace = true
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
http_client = { workspace = true, features = ["test-support"] }
|
||||
rpc = { workspace = true, features = ["test-support"] }
|
||||
settings = { workspace = true, features = ["test-support"] }
|
||||
util = { workspace = true, features = ["test-support"] }
|
||||
http_client = { workspace = true, features = ["test-support"] }
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
windows.workspace = true
|
||||
|
||||
@@ -1887,16 +1887,8 @@ mod tests {
|
||||
.set_entity(&entity3, &mut cx.to_async());
|
||||
drop(subscription3);
|
||||
|
||||
server.send(proto::JoinProject {
|
||||
project_id: 1,
|
||||
committer_name: None,
|
||||
committer_email: None,
|
||||
});
|
||||
server.send(proto::JoinProject {
|
||||
project_id: 2,
|
||||
committer_name: None,
|
||||
committer_email: None,
|
||||
});
|
||||
server.send(proto::JoinProject { project_id: 1 });
|
||||
server.send(proto::JoinProject { project_id: 2 });
|
||||
done_rx1.recv().await.unwrap();
|
||||
done_rx2.recv().await.unwrap();
|
||||
}
|
||||
|
||||
@@ -3,30 +3,20 @@
|
||||
mod http_proxy;
|
||||
mod socks_proxy;
|
||||
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use anyhow::{Context as _, Result};
|
||||
use hickory_resolver::{
|
||||
AsyncResolver, TokioAsyncResolver,
|
||||
config::LookupIpStrategy,
|
||||
name_server::{GenericConnector, TokioRuntimeProvider},
|
||||
system_conf,
|
||||
};
|
||||
use http_client::Url;
|
||||
use http_proxy::{HttpProxyType, connect_http_proxy_stream, parse_http_proxy};
|
||||
use socks_proxy::{SocksVersion, connect_socks_proxy_stream, parse_socks_proxy};
|
||||
use tokio_socks::{IntoTargetAddr, TargetAddr};
|
||||
use util::ResultExt;
|
||||
|
||||
pub(crate) async fn connect_proxy_stream(
|
||||
proxy: &Url,
|
||||
rpc_host: (&str, u16),
|
||||
) -> Result<Box<dyn AsyncReadWrite>> {
|
||||
let Some(((proxy_domain, proxy_port), proxy_type)) = parse_proxy_type(proxy).await else {
|
||||
let Some(((proxy_domain, proxy_port), proxy_type)) = parse_proxy_type(proxy) else {
|
||||
// If parsing the proxy URL fails, we must avoid falling back to an insecure connection.
|
||||
// SOCKS proxies are often used in contexts where security and privacy are critical,
|
||||
// so any fallback could expose users to significant risks.
|
||||
anyhow::bail!("Parsing proxy url type failed");
|
||||
anyhow::bail!("Parsing proxy url failed");
|
||||
};
|
||||
|
||||
// Connect to proxy and wrap protocol later
|
||||
@@ -49,8 +39,10 @@ enum ProxyType<'t> {
|
||||
HttpProxy(HttpProxyType<'t>),
|
||||
}
|
||||
|
||||
async fn parse_proxy_type(proxy: &Url) -> Option<((String, u16), ProxyType<'_>)> {
|
||||
fn parse_proxy_type(proxy: &Url) -> Option<((String, u16), ProxyType<'_>)> {
|
||||
let scheme = proxy.scheme();
|
||||
let host = proxy.host()?.to_string();
|
||||
let port = proxy.port_or_known_default()?;
|
||||
let proxy_type = match scheme {
|
||||
scheme if scheme.starts_with("socks") => {
|
||||
Some(ProxyType::SocksProxy(parse_socks_proxy(scheme, proxy)))
|
||||
@@ -60,38 +52,8 @@ async fn parse_proxy_type(proxy: &Url) -> Option<((String, u16), ProxyType<'_>)>
|
||||
}
|
||||
_ => None,
|
||||
}?;
|
||||
let (ip, port) = {
|
||||
let host = proxy.host()?.to_string();
|
||||
let port = proxy.port_or_known_default()?;
|
||||
resolve_proxy_url_if_needed((host, port)).await.log_err()?
|
||||
};
|
||||
|
||||
Some(((ip, port), proxy_type))
|
||||
}
|
||||
|
||||
static SYSTEM_DNS_RESOLVER: LazyLock<AsyncResolver<GenericConnector<TokioRuntimeProvider>>> =
|
||||
LazyLock::new(|| {
|
||||
let (config, mut opts) = system_conf::read_system_conf().unwrap();
|
||||
opts.ip_strategy = LookupIpStrategy::Ipv4AndIpv6;
|
||||
TokioAsyncResolver::tokio(config, opts)
|
||||
});
|
||||
|
||||
async fn resolve_proxy_url_if_needed(proxy: (String, u16)) -> Result<(String, u16)> {
|
||||
let proxy = proxy
|
||||
.into_target_addr()
|
||||
.context("Failed to parse proxy addr")?;
|
||||
match proxy {
|
||||
TargetAddr::Domain(domain, port) => {
|
||||
let ip = SYSTEM_DNS_RESOLVER
|
||||
.lookup_ip(domain.as_ref())
|
||||
.await?
|
||||
.into_iter()
|
||||
.next()
|
||||
.ok_or_else(|| anyhow::anyhow!("No IP found for proxy domain {domain}"))?;
|
||||
Ok((ip.to_string(), port))
|
||||
}
|
||||
TargetAddr::Ip(ip_addr) => Ok((ip_addr.ip().to_string(), ip_addr.port())),
|
||||
}
|
||||
Some(((host, port), proxy_type))
|
||||
}
|
||||
|
||||
pub(crate) trait AsyncReadWrite:
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
//! socks proxy
|
||||
|
||||
use std::net::SocketAddr;
|
||||
|
||||
use anyhow::{Context as _, Result};
|
||||
use http_client::Url;
|
||||
use tokio::net::TcpStream;
|
||||
@@ -10,8 +8,6 @@ use tokio_socks::{
|
||||
tcp::{Socks4Stream, Socks5Stream},
|
||||
};
|
||||
|
||||
use crate::proxy::SYSTEM_DNS_RESOLVER;
|
||||
|
||||
use super::AsyncReadWrite;
|
||||
|
||||
/// Identification to a Socks V4 Proxy
|
||||
@@ -77,14 +73,12 @@ pub(super) async fn connect_socks_proxy_stream(
|
||||
};
|
||||
let rpc_host = match (rpc_host, local_dns) {
|
||||
(TargetAddr::Domain(domain, port), true) => {
|
||||
let ip_addr = SYSTEM_DNS_RESOLVER
|
||||
.lookup_ip(domain.as_ref())
|
||||
let ip_addr = tokio::net::lookup_host((domain.as_ref(), port))
|
||||
.await
|
||||
.with_context(|| format!("Failed to lookup domain {}", domain))?
|
||||
.into_iter()
|
||||
.next()
|
||||
.ok_or_else(|| anyhow::anyhow!("Failed to lookup domain {}", domain))?;
|
||||
TargetAddr::Ip(SocketAddr::new(ip_addr, port))
|
||||
TargetAddr::Ip(ip_addr)
|
||||
}
|
||||
(rpc_host, _) => rpc_host,
|
||||
};
|
||||
|
||||
@@ -8,11 +8,10 @@ use futures::{Future, FutureExt, StreamExt};
|
||||
use gpui::{App, AppContext as _, BackgroundExecutor, Task};
|
||||
use http_client::{self, AsyncBody, HttpClient, HttpClientWithUrl, Method, Request};
|
||||
use parking_lot::Mutex;
|
||||
use regex::Regex;
|
||||
use release_channel::ReleaseChannel;
|
||||
use settings::{Settings, SettingsStore};
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::collections::HashSet;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
use std::sync::LazyLock;
|
||||
@@ -46,13 +45,31 @@ struct TelemetryState {
|
||||
first_event_date_time: Option<Instant>,
|
||||
event_coalescer: EventCoalescer,
|
||||
max_queue_size: usize,
|
||||
worktrees_with_project_type_events_sent: HashSet<WorktreeId>,
|
||||
worktree_id_map: WorktreeIdMap,
|
||||
|
||||
os_name: String,
|
||||
app_version: String,
|
||||
os_version: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct WorktreeIdMap(HashMap<String, ProjectCache>);
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ProjectCache {
|
||||
name: String,
|
||||
worktree_ids_reported: HashSet<WorktreeId>,
|
||||
}
|
||||
|
||||
impl ProjectCache {
|
||||
fn new(name: String) -> Self {
|
||||
Self {
|
||||
name,
|
||||
worktree_ids_reported: HashSet::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
const MAX_QUEUE_LEN: usize = 5;
|
||||
|
||||
@@ -74,10 +91,6 @@ static ZED_CLIENT_CHECKSUM_SEED: LazyLock<Option<Vec<u8>>> = LazyLock::new(|| {
|
||||
})
|
||||
});
|
||||
|
||||
static DOTNET_PROJECT_FILES_REGEX: LazyLock<Regex> = LazyLock::new(|| {
|
||||
Regex::new(r"^(global\.json|Directory\.Build\.props|.*\.(csproj|fsproj|vbproj|sln))$").unwrap()
|
||||
});
|
||||
|
||||
pub fn os_name() -> String {
|
||||
#[cfg(target_os = "macos")]
|
||||
{
|
||||
@@ -181,7 +194,20 @@ impl Telemetry {
|
||||
first_event_date_time: None,
|
||||
event_coalescer: EventCoalescer::new(clock.clone()),
|
||||
max_queue_size: MAX_QUEUE_LEN,
|
||||
worktrees_with_project_type_events_sent: HashSet::new(),
|
||||
worktree_id_map: WorktreeIdMap(HashMap::from_iter([
|
||||
(
|
||||
"pnpm-lock.yaml".to_string(),
|
||||
ProjectCache::new("pnpm".to_string()),
|
||||
),
|
||||
(
|
||||
"yarn.lock".to_string(),
|
||||
ProjectCache::new("yarn".to_string()),
|
||||
),
|
||||
(
|
||||
"package.json".to_string(),
|
||||
ProjectCache::new("node".to_string()),
|
||||
),
|
||||
])),
|
||||
|
||||
os_version: None,
|
||||
os_name: os_name(),
|
||||
@@ -345,14 +371,44 @@ impl Telemetry {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn report_discovered_project_type_events(
|
||||
pub fn report_discovered_project_events(
|
||||
self: &Arc<Self>,
|
||||
worktree_id: WorktreeId,
|
||||
updated_entries_set: &UpdatedEntriesSet,
|
||||
) {
|
||||
let Some(project_type_names) = self.detect_project_types(worktree_id, updated_entries_set)
|
||||
else {
|
||||
return;
|
||||
let project_type_names: Vec<String> = {
|
||||
let mut state = self.state.lock();
|
||||
state
|
||||
.worktree_id_map
|
||||
.0
|
||||
.iter_mut()
|
||||
.filter_map(|(project_file_name, project_type_telemetry)| {
|
||||
if project_type_telemetry
|
||||
.worktree_ids_reported
|
||||
.contains(&worktree_id)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
let project_file_found = updated_entries_set.iter().any(|(path, _, _)| {
|
||||
path.as_ref()
|
||||
.file_name()
|
||||
.and_then(|name| name.to_str())
|
||||
.map(|name_str| name_str == project_file_name)
|
||||
.unwrap_or(false)
|
||||
});
|
||||
|
||||
if !project_file_found {
|
||||
return None;
|
||||
}
|
||||
|
||||
project_type_telemetry
|
||||
.worktree_ids_reported
|
||||
.insert(worktree_id);
|
||||
|
||||
Some(project_type_telemetry.name.clone())
|
||||
})
|
||||
.collect()
|
||||
};
|
||||
|
||||
for project_type_name in project_type_names {
|
||||
@@ -360,55 +416,6 @@ impl Telemetry {
|
||||
}
|
||||
}
|
||||
|
||||
fn detect_project_types(
|
||||
self: &Arc<Self>,
|
||||
worktree_id: WorktreeId,
|
||||
updated_entries_set: &UpdatedEntriesSet,
|
||||
) -> Option<Vec<String>> {
|
||||
let mut state = self.state.lock();
|
||||
|
||||
if state
|
||||
.worktrees_with_project_type_events_sent
|
||||
.contains(&worktree_id)
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut project_types: HashSet<&str> = HashSet::new();
|
||||
|
||||
for (path, _, _) in updated_entries_set.iter() {
|
||||
let Some(file_name) = path.file_name().and_then(|f| f.to_str()) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let project_type = if file_name == "pnpm-lock.yaml" {
|
||||
Some("pnpm")
|
||||
} else if file_name == "yarn.lock" {
|
||||
Some("yarn")
|
||||
} else if file_name == "package.json" {
|
||||
Some("node")
|
||||
} else if DOTNET_PROJECT_FILES_REGEX.is_match(file_name) {
|
||||
Some("dotnet")
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Some(project_type) = project_type {
|
||||
project_types.insert(project_type);
|
||||
};
|
||||
}
|
||||
|
||||
if !project_types.is_empty() {
|
||||
state
|
||||
.worktrees_with_project_type_events_sent
|
||||
.insert(worktree_id);
|
||||
}
|
||||
|
||||
let mut project_types: Vec<_> = project_types.into_iter().map(String::from).collect();
|
||||
project_types.sort();
|
||||
Some(project_types)
|
||||
}
|
||||
|
||||
fn report_event(self: &Arc<Self>, event: Event) {
|
||||
let mut state = self.state.lock();
|
||||
// RUST_LOG=telemetry=trace to debug telemetry events
|
||||
@@ -571,9 +578,7 @@ mod tests {
|
||||
use clock::FakeSystemClock;
|
||||
use gpui::TestAppContext;
|
||||
use http_client::FakeHttpClient;
|
||||
use std::collections::HashMap;
|
||||
use telemetry_events::FlexibleEvent;
|
||||
use worktree::{PathChange, ProjectEntryId, WorktreeId};
|
||||
|
||||
#[gpui::test]
|
||||
fn test_telemetry_flush_on_max_queue_size(cx: &mut TestAppContext) {
|
||||
@@ -691,115 +696,6 @@ mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_project_discovery_does_not_double_report(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let http = FakeHttpClient::with_200_response();
|
||||
let telemetry = cx.update(|cx| Telemetry::new(clock.clone(), http, cx));
|
||||
let worktree_id = 1;
|
||||
|
||||
// Scan of empty worktree finds nothing
|
||||
test_project_discovery_helper(telemetry.clone(), vec![], Some(vec![]), worktree_id);
|
||||
|
||||
// Files added, second scan of worktree 1 finds project type
|
||||
test_project_discovery_helper(
|
||||
telemetry.clone(),
|
||||
vec!["package.json"],
|
||||
Some(vec!["node"]),
|
||||
worktree_id,
|
||||
);
|
||||
|
||||
// Third scan of worktree does not double report, as we already reported
|
||||
test_project_discovery_helper(telemetry.clone(), vec!["package.json"], None, worktree_id);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_pnpm_project_discovery(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let http = FakeHttpClient::with_200_response();
|
||||
let telemetry = cx.update(|cx| Telemetry::new(clock.clone(), http, cx));
|
||||
|
||||
test_project_discovery_helper(
|
||||
telemetry.clone(),
|
||||
vec!["package.json", "pnpm-lock.yaml"],
|
||||
Some(vec!["node", "pnpm"]),
|
||||
1,
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_yarn_project_discovery(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let http = FakeHttpClient::with_200_response();
|
||||
let telemetry = cx.update(|cx| Telemetry::new(clock.clone(), http, cx));
|
||||
|
||||
test_project_discovery_helper(
|
||||
telemetry.clone(),
|
||||
vec!["package.json", "yarn.lock"],
|
||||
Some(vec!["node", "yarn"]),
|
||||
1,
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_dotnet_project_discovery(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let http = FakeHttpClient::with_200_response();
|
||||
let telemetry = cx.update(|cx| Telemetry::new(clock.clone(), http, cx));
|
||||
|
||||
// Using different worktrees, as production code blocks from reporting a
|
||||
// project type for the same worktree multiple times
|
||||
|
||||
test_project_discovery_helper(
|
||||
telemetry.clone().clone(),
|
||||
vec!["global.json"],
|
||||
Some(vec!["dotnet"]),
|
||||
1,
|
||||
);
|
||||
test_project_discovery_helper(
|
||||
telemetry.clone(),
|
||||
vec!["Directory.Build.props"],
|
||||
Some(vec!["dotnet"]),
|
||||
2,
|
||||
);
|
||||
test_project_discovery_helper(
|
||||
telemetry.clone(),
|
||||
vec!["file.csproj"],
|
||||
Some(vec!["dotnet"]),
|
||||
3,
|
||||
);
|
||||
test_project_discovery_helper(
|
||||
telemetry.clone(),
|
||||
vec!["file.fsproj"],
|
||||
Some(vec!["dotnet"]),
|
||||
4,
|
||||
);
|
||||
test_project_discovery_helper(
|
||||
telemetry.clone(),
|
||||
vec!["file.vbproj"],
|
||||
Some(vec!["dotnet"]),
|
||||
5,
|
||||
);
|
||||
test_project_discovery_helper(telemetry.clone(), vec!["file.sln"], Some(vec!["dotnet"]), 6);
|
||||
|
||||
// Each worktree should only send a single project type event, even when
|
||||
// encountering multiple files associated with that project type
|
||||
test_project_discovery_helper(
|
||||
telemetry,
|
||||
vec!["global.json", "Directory.Build.props"],
|
||||
Some(vec!["dotnet"]),
|
||||
7,
|
||||
);
|
||||
}
|
||||
|
||||
// TODO:
|
||||
// Test settings
|
||||
// Update FakeHTTPClient to keep track of the number of requests and assert on it
|
||||
@@ -816,32 +712,4 @@ mod tests {
|
||||
&& telemetry.state.lock().flush_events_task.is_none()
|
||||
&& telemetry.state.lock().first_event_date_time.is_none()
|
||||
}
|
||||
|
||||
fn test_project_discovery_helper(
|
||||
telemetry: Arc<Telemetry>,
|
||||
file_paths: Vec<&str>,
|
||||
expected_project_types: Option<Vec<&str>>,
|
||||
worktree_id_num: usize,
|
||||
) {
|
||||
let worktree_id = WorktreeId::from_usize(worktree_id_num);
|
||||
let entries: Vec<_> = file_paths
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(i, path)| {
|
||||
(
|
||||
Arc::from(std::path::Path::new(path)),
|
||||
ProjectEntryId::from_proto(i as u64 + 1),
|
||||
PathChange::Added,
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
let updated_entries: UpdatedEntriesSet = Arc::from(entries.as_slice());
|
||||
|
||||
let detected_project_types = telemetry.detect_project_types(worktree_id, &updated_entries);
|
||||
|
||||
let expected_project_types =
|
||||
expected_project_types.map(|types| types.iter().map(|&t| t.to_string()).collect());
|
||||
|
||||
assert_eq!(detected_project_types, expected_project_types);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -49,6 +49,7 @@ pub struct User {
|
||||
pub github_login: String,
|
||||
pub avatar_uri: SharedUri,
|
||||
pub name: Option<String>,
|
||||
pub email: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
@@ -57,8 +58,6 @@ pub struct Collaborator {
|
||||
pub replica_id: ReplicaId,
|
||||
pub user_id: UserId,
|
||||
pub is_host: bool,
|
||||
pub committer_name: Option<String>,
|
||||
pub committer_email: Option<String>,
|
||||
}
|
||||
|
||||
impl PartialOrd for User {
|
||||
@@ -882,6 +881,7 @@ impl User {
|
||||
github_login: message.github_login,
|
||||
avatar_uri: message.avatar_url.into(),
|
||||
name: message.name,
|
||||
email: message.email,
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -912,8 +912,6 @@ impl Collaborator {
|
||||
replica_id: message.replica_id as ReplicaId,
|
||||
user_id: message.user_id as UserId,
|
||||
is_host: message.is_host,
|
||||
committer_name: message.committer_name,
|
||||
committer_email: message.committer_email,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -185,9 +185,7 @@ CREATE TABLE "project_collaborators" (
|
||||
"connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE,
|
||||
"user_id" INTEGER NOT NULL,
|
||||
"replica_id" INTEGER NOT NULL,
|
||||
"is_host" BOOLEAN NOT NULL,
|
||||
"committer_name" VARCHAR,
|
||||
"committer_email" VARCHAR
|
||||
"is_host" BOOLEAN NOT NULL
|
||||
);
|
||||
|
||||
CREATE INDEX "index_project_collaborators_on_project_id" ON "project_collaborators" ("project_id");
|
||||
@@ -465,7 +463,6 @@ CREATE TABLE extension_versions (
|
||||
provides_slash_commands BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
provides_indexed_docs_providers BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
provides_snippets BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
provides_debug_adapters BOOLEAN NOT NULL DEFAULT FALSE,
|
||||
PRIMARY KEY (extension_id, version)
|
||||
);
|
||||
|
||||
|
||||
@@ -1,4 +0,0 @@
|
||||
alter table project_collaborators
|
||||
add column committer_name varchar;
|
||||
alter table project_collaborators
|
||||
add column committer_email varchar;
|
||||
@@ -1,2 +0,0 @@
|
||||
alter table extension_versions
|
||||
add column provides_debug_adapters bool not null default false
|
||||
@@ -97,7 +97,7 @@ impl std::fmt::Display for SystemIdHeader {
|
||||
|
||||
pub fn routes(rpc_server: Arc<rpc::Server>) -> Router<(), Body> {
|
||||
Router::new()
|
||||
.route("/user", get(update_or_create_authenticated_user))
|
||||
.route("/user", get(get_authenticated_user))
|
||||
.route("/users/look_up", get(look_up_user))
|
||||
.route("/users/:id/access_tokens", post(create_access_token))
|
||||
.route("/rpc_server_snapshot", get(get_rpc_server_snapshot))
|
||||
@@ -157,7 +157,7 @@ struct AuthenticatedUserResponse {
|
||||
feature_flags: Vec<String>,
|
||||
}
|
||||
|
||||
async fn update_or_create_authenticated_user(
|
||||
async fn get_authenticated_user(
|
||||
Query(params): Query<AuthenticatedUserParams>,
|
||||
Extension(app): Extension<Arc<AppState>>,
|
||||
) -> Result<Json<AuthenticatedUserResponse>> {
|
||||
@@ -165,7 +165,7 @@ async fn update_or_create_authenticated_user(
|
||||
|
||||
let user = app
|
||||
.db
|
||||
.update_or_create_user_by_github_account(
|
||||
.get_or_create_user_by_github_account(
|
||||
¶ms.github_login,
|
||||
params.github_user_id,
|
||||
params.github_email.as_deref(),
|
||||
|
||||
@@ -31,7 +31,7 @@ use crate::llm::{AGENT_EXTENDED_TRIAL_FEATURE_FLAG, DEFAULT_MAX_MONTHLY_SPEND};
|
||||
use crate::rpc::{ResultExt as _, Server};
|
||||
use crate::stripe_client::{
|
||||
StripeCancellationDetailsReason, StripeClient, StripeCustomerId, StripeSubscription,
|
||||
StripeSubscriptionId, UpdateCustomerParams,
|
||||
StripeSubscriptionId,
|
||||
};
|
||||
use crate::{AppState, Error, Result};
|
||||
use crate::{db::UserId, llm::db::LlmDatabase};
|
||||
@@ -353,17 +353,7 @@ async fn create_billing_subscription(
|
||||
}
|
||||
|
||||
let customer_id = if let Some(existing_customer) = &existing_billing_customer {
|
||||
let customer_id = StripeCustomerId(existing_customer.stripe_customer_id.clone().into());
|
||||
if let Some(email) = user.email_address.as_deref() {
|
||||
stripe_billing
|
||||
.client()
|
||||
.update_customer(&customer_id, UpdateCustomerParams { email: Some(email) })
|
||||
.await
|
||||
// Update of email address is best-effort - continue checkout even if it fails
|
||||
.context("error updating stripe customer email address")
|
||||
.log_err();
|
||||
}
|
||||
customer_id
|
||||
StripeCustomerId(existing_customer.stripe_customer_id.clone().into())
|
||||
} else {
|
||||
stripe_billing
|
||||
.find_or_create_customer_by_email(user.email_address.as_deref())
|
||||
|
||||
@@ -751,8 +751,6 @@ pub struct ProjectCollaborator {
|
||||
pub user_id: UserId,
|
||||
pub replica_id: ReplicaId,
|
||||
pub is_host: bool,
|
||||
pub committer_name: Option<String>,
|
||||
pub committer_email: Option<String>,
|
||||
}
|
||||
|
||||
impl ProjectCollaborator {
|
||||
@@ -762,8 +760,6 @@ impl ProjectCollaborator {
|
||||
replica_id: self.replica_id.0 as u32,
|
||||
user_id: self.user_id.to_proto(),
|
||||
is_host: self.is_host,
|
||||
committer_name: self.committer_name.clone(),
|
||||
committer_email: self.committer_email.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -118,8 +118,6 @@ impl Database {
|
||||
user_id: collaborator.user_id.to_proto(),
|
||||
replica_id: collaborator.replica_id.0 as u32,
|
||||
is_host: false,
|
||||
committer_name: None,
|
||||
committer_email: None,
|
||||
})
|
||||
.collect(),
|
||||
})
|
||||
@@ -227,8 +225,6 @@ impl Database {
|
||||
user_id: collaborator.user_id.to_proto(),
|
||||
replica_id: collaborator.replica_id.0 as u32,
|
||||
is_host: false,
|
||||
committer_name: None,
|
||||
committer_email: None,
|
||||
})
|
||||
.collect(),
|
||||
},
|
||||
@@ -265,8 +261,6 @@ impl Database {
|
||||
replica_id: db_collaborator.replica_id.0 as u32,
|
||||
user_id: db_collaborator.user_id.to_proto(),
|
||||
is_host: false,
|
||||
committer_name: None,
|
||||
committer_email: None,
|
||||
})
|
||||
} else {
|
||||
collaborator_ids_to_remove.push(db_collaborator.id);
|
||||
@@ -396,8 +390,6 @@ impl Database {
|
||||
replica_id: row.replica_id.0 as u32,
|
||||
user_id: row.user_id.to_proto(),
|
||||
is_host: false,
|
||||
committer_name: None,
|
||||
committer_email: None,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -739,6 +739,7 @@ impl Database {
|
||||
),
|
||||
github_login: user.github_login,
|
||||
name: user.name,
|
||||
email: user.email_address,
|
||||
})
|
||||
}
|
||||
proto::ChannelMember {
|
||||
|
||||
@@ -71,7 +71,7 @@ impl Database {
|
||||
) -> Result<()> {
|
||||
self.weak_transaction(|tx| async move {
|
||||
let user = self
|
||||
.update_or_create_user_by_github_account_tx(
|
||||
.get_or_create_user_by_github_account_tx(
|
||||
github_login,
|
||||
github_user_id,
|
||||
github_email,
|
||||
|
||||
@@ -321,9 +321,6 @@ impl Database {
|
||||
provides_snippets: ActiveValue::Set(
|
||||
version.provides.contains(&ExtensionProvides::Snippets),
|
||||
),
|
||||
provides_debug_adapters: ActiveValue::Set(
|
||||
version.provides.contains(&ExtensionProvides::DebugAdapters),
|
||||
),
|
||||
download_count: ActiveValue::NotSet,
|
||||
}
|
||||
}))
|
||||
@@ -434,10 +431,6 @@ fn apply_provides_filter(
|
||||
condition = condition.add(extension_version::Column::ProvidesSnippets.eq(true));
|
||||
}
|
||||
|
||||
if provides_filter.contains(&ExtensionProvides::DebugAdapters) {
|
||||
condition = condition.add(extension_version::Column::ProvidesDebugAdapters.eq(true));
|
||||
}
|
||||
|
||||
condition
|
||||
}
|
||||
|
||||
|
||||
@@ -98,9 +98,7 @@ impl Database {
|
||||
user_id: ActiveValue::set(participant.user_id),
|
||||
replica_id: ActiveValue::set(ReplicaId(replica_id)),
|
||||
is_host: ActiveValue::set(true),
|
||||
id: ActiveValue::NotSet,
|
||||
committer_name: ActiveValue::Set(None),
|
||||
committer_email: ActiveValue::Set(None),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(&*tx)
|
||||
.await?;
|
||||
@@ -786,27 +784,13 @@ impl Database {
|
||||
project_id: ProjectId,
|
||||
connection: ConnectionId,
|
||||
user_id: UserId,
|
||||
committer_name: Option<String>,
|
||||
committer_email: Option<String>,
|
||||
) -> Result<TransactionGuard<(Project, ReplicaId)>> {
|
||||
self.project_transaction(project_id, move |tx| {
|
||||
let committer_name = committer_name.clone();
|
||||
let committer_email = committer_email.clone();
|
||||
async move {
|
||||
let (project, role) = self
|
||||
.access_project(project_id, connection, Capability::ReadOnly, &tx)
|
||||
.await?;
|
||||
self.join_project_internal(
|
||||
project,
|
||||
user_id,
|
||||
committer_name,
|
||||
committer_email,
|
||||
connection,
|
||||
role,
|
||||
&tx,
|
||||
)
|
||||
self.project_transaction(project_id, |tx| async move {
|
||||
let (project, role) = self
|
||||
.access_project(project_id, connection, Capability::ReadOnly, &tx)
|
||||
.await?;
|
||||
self.join_project_internal(project, user_id, connection, role, &tx)
|
||||
.await
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
@@ -815,8 +799,6 @@ impl Database {
|
||||
&self,
|
||||
project: project::Model,
|
||||
user_id: UserId,
|
||||
committer_name: Option<String>,
|
||||
committer_email: Option<String>,
|
||||
connection: ConnectionId,
|
||||
role: ChannelRole,
|
||||
tx: &DatabaseTransaction,
|
||||
@@ -840,9 +822,7 @@ impl Database {
|
||||
user_id: ActiveValue::set(user_id),
|
||||
replica_id: ActiveValue::set(replica_id),
|
||||
is_host: ActiveValue::set(false),
|
||||
id: ActiveValue::NotSet,
|
||||
committer_name: ActiveValue::set(committer_name),
|
||||
committer_email: ActiveValue::set(committer_email),
|
||||
..Default::default()
|
||||
}
|
||||
.insert(tx)
|
||||
.await?;
|
||||
@@ -1046,8 +1026,6 @@ impl Database {
|
||||
user_id: collaborator.user_id,
|
||||
replica_id: collaborator.replica_id,
|
||||
is_host: collaborator.is_host,
|
||||
committer_name: collaborator.committer_name,
|
||||
committer_email: collaborator.committer_email,
|
||||
})
|
||||
.collect(),
|
||||
worktrees,
|
||||
|
||||
@@ -553,8 +553,6 @@ impl Database {
|
||||
user_id: collaborator.user_id,
|
||||
replica_id: collaborator.replica_id,
|
||||
is_host: collaborator.is_host,
|
||||
committer_name: collaborator.committer_name.clone(),
|
||||
committer_email: collaborator.committer_email.clone(),
|
||||
})
|
||||
.collect(),
|
||||
worktrees: reshared_project.worktrees.clone(),
|
||||
@@ -859,8 +857,6 @@ impl Database {
|
||||
user_id: collaborator.user_id,
|
||||
replica_id: collaborator.replica_id,
|
||||
is_host: collaborator.is_host,
|
||||
committer_name: collaborator.committer_name,
|
||||
committer_email: collaborator.committer_email,
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
|
||||
@@ -111,7 +111,7 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn update_or_create_user_by_github_account(
|
||||
pub async fn get_or_create_user_by_github_account(
|
||||
&self,
|
||||
github_login: &str,
|
||||
github_user_id: i32,
|
||||
@@ -121,7 +121,7 @@ impl Database {
|
||||
initial_channel_id: Option<ChannelId>,
|
||||
) -> Result<User> {
|
||||
self.transaction(|tx| async move {
|
||||
self.update_or_create_user_by_github_account_tx(
|
||||
self.get_or_create_user_by_github_account_tx(
|
||||
github_login,
|
||||
github_user_id,
|
||||
github_email,
|
||||
@@ -135,7 +135,7 @@ impl Database {
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn update_or_create_user_by_github_account_tx(
|
||||
pub async fn get_or_create_user_by_github_account_tx(
|
||||
&self,
|
||||
github_login: &str,
|
||||
github_user_id: i32,
|
||||
|
||||
@@ -27,7 +27,6 @@ pub struct Model {
|
||||
pub provides_slash_commands: bool,
|
||||
pub provides_indexed_docs_providers: bool,
|
||||
pub provides_snippets: bool,
|
||||
pub provides_debug_adapters: bool,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
@@ -69,10 +68,6 @@ impl Model {
|
||||
provides.insert(ExtensionProvides::Snippets);
|
||||
}
|
||||
|
||||
if self.provides_debug_adapters {
|
||||
provides.insert(ExtensionProvides::DebugAdapters);
|
||||
}
|
||||
|
||||
provides
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,8 +13,6 @@ pub struct Model {
|
||||
pub user_id: UserId,
|
||||
pub replica_id: ReplicaId,
|
||||
pub is_host: bool,
|
||||
pub committer_name: Option<String>,
|
||||
pub committer_email: Option<String>,
|
||||
}
|
||||
|
||||
impl Model {
|
||||
|
||||
@@ -126,16 +126,12 @@ async fn test_channel_buffers(db: &Arc<Database>) {
|
||||
peer_id: Some(rpc::proto::PeerId { id: 1, owner_id }),
|
||||
replica_id: 0,
|
||||
is_host: false,
|
||||
committer_name: None,
|
||||
committer_email: None,
|
||||
},
|
||||
rpc::proto::Collaborator {
|
||||
user_id: b_id.to_proto(),
|
||||
peer_id: Some(rpc::proto::PeerId { id: 2, owner_id }),
|
||||
replica_id: 1,
|
||||
is_host: false,
|
||||
committer_name: None,
|
||||
committer_email: None,
|
||||
}
|
||||
]
|
||||
);
|
||||
|
||||
@@ -72,12 +72,12 @@ async fn test_get_users(db: &Arc<Database>) {
|
||||
}
|
||||
|
||||
test_both_dbs!(
|
||||
test_update_or_create_user_by_github_account,
|
||||
test_update_or_create_user_by_github_account_postgres,
|
||||
test_update_or_create_user_by_github_account_sqlite
|
||||
test_get_or_create_user_by_github_account,
|
||||
test_get_or_create_user_by_github_account_postgres,
|
||||
test_get_or_create_user_by_github_account_sqlite
|
||||
);
|
||||
|
||||
async fn test_update_or_create_user_by_github_account(db: &Arc<Database>) {
|
||||
async fn test_get_or_create_user_by_github_account(db: &Arc<Database>) {
|
||||
db.create_user(
|
||||
"user1@example.com",
|
||||
None,
|
||||
@@ -104,14 +104,7 @@ async fn test_update_or_create_user_by_github_account(db: &Arc<Database>) {
|
||||
.user_id;
|
||||
|
||||
let user = db
|
||||
.update_or_create_user_by_github_account(
|
||||
"the-new-login2",
|
||||
102,
|
||||
None,
|
||||
None,
|
||||
Utc::now(),
|
||||
None,
|
||||
)
|
||||
.get_or_create_user_by_github_account("the-new-login2", 102, None, None, Utc::now(), None)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(user.id, user_id2);
|
||||
@@ -119,7 +112,7 @@ async fn test_update_or_create_user_by_github_account(db: &Arc<Database>) {
|
||||
assert_eq!(user.github_user_id, 102);
|
||||
|
||||
let user = db
|
||||
.update_or_create_user_by_github_account(
|
||||
.get_or_create_user_by_github_account(
|
||||
"login3",
|
||||
103,
|
||||
Some("user3@example.com"),
|
||||
|
||||
@@ -14,7 +14,7 @@ use crate::{
|
||||
db::{
|
||||
self, BufferId, Capability, Channel, ChannelId, ChannelRole, ChannelsForUser,
|
||||
CreatedChannelMessage, Database, InviteMemberResult, MembershipUpdated, MessageId,
|
||||
NotificationId, ProjectId, RejoinedProject, RemoveChannelMemberResult,
|
||||
NotificationId, Project, ProjectId, RejoinedProject, RemoveChannelMemberResult, ReplicaId,
|
||||
RespondToChannelInvite, RoomId, ServerId, UpdatedChannelMessage, User, UserId,
|
||||
},
|
||||
executor::Executor,
|
||||
@@ -323,7 +323,6 @@ impl Server {
|
||||
.add_request_handler(forward_read_only_project_request::<proto::SynchronizeBuffers>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::InlayHints>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::ResolveInlayHint>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::GetColorPresentation>)
|
||||
.add_request_handler(forward_mutating_project_request::<proto::GetCodeLens>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::OpenBufferByPath>)
|
||||
.add_request_handler(forward_read_only_project_request::<proto::GitGetBranches>)
|
||||
@@ -1891,16 +1890,28 @@ async fn join_project(
|
||||
|
||||
let db = session.db().await;
|
||||
let (project, replica_id) = &mut *db
|
||||
.join_project(
|
||||
project_id,
|
||||
session.connection_id,
|
||||
session.user_id(),
|
||||
request.committer_name.clone(),
|
||||
request.committer_email.clone(),
|
||||
)
|
||||
.join_project(project_id, session.connection_id, session.user_id())
|
||||
.await?;
|
||||
drop(db);
|
||||
tracing::info!(%project_id, "join remote project");
|
||||
join_project_internal(response, session, project, replica_id)
|
||||
}
|
||||
|
||||
trait JoinProjectInternalResponse {
|
||||
fn send(self, result: proto::JoinProjectResponse) -> Result<()>;
|
||||
}
|
||||
impl JoinProjectInternalResponse for Response<proto::JoinProject> {
|
||||
fn send(self, result: proto::JoinProjectResponse) -> Result<()> {
|
||||
Response::<proto::JoinProject>::send(self, result)
|
||||
}
|
||||
}
|
||||
|
||||
fn join_project_internal(
|
||||
response: impl JoinProjectInternalResponse,
|
||||
session: Session,
|
||||
project: &mut Project,
|
||||
replica_id: &ReplicaId,
|
||||
) -> Result<()> {
|
||||
let collaborators = project
|
||||
.collaborators
|
||||
.iter()
|
||||
@@ -1928,8 +1939,6 @@ async fn join_project(
|
||||
replica_id: replica_id.0 as u32,
|
||||
user_id: guest_user_id.to_proto(),
|
||||
is_host: false,
|
||||
committer_name: request.committer_name.clone(),
|
||||
committer_email: request.committer_email.clone(),
|
||||
}),
|
||||
};
|
||||
|
||||
@@ -2558,6 +2567,7 @@ async fn get_users(
|
||||
id: user.id.to_proto(),
|
||||
avatar_url: format!("https://github.com/{}.png?size=128", user.github_login),
|
||||
github_login: user.github_login,
|
||||
email: user.email_address,
|
||||
name: user.name,
|
||||
})
|
||||
.collect();
|
||||
@@ -2591,6 +2601,7 @@ async fn fuzzy_search_users(
|
||||
avatar_url: format!("https://github.com/{}.png?size=128", user.github_login),
|
||||
github_login: user.github_login,
|
||||
name: user.name,
|
||||
email: user.email_address,
|
||||
})
|
||||
.collect();
|
||||
response.send(proto::UsersResponse { users })?;
|
||||
|
||||
@@ -127,7 +127,7 @@ pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result
|
||||
log::info!("Seeding {:?} from GitHub", github_user.login);
|
||||
|
||||
let user = db
|
||||
.update_or_create_user_by_github_account(
|
||||
.get_or_create_user_by_github_account(
|
||||
&github_user.login,
|
||||
github_user.id,
|
||||
github_user.email.as_deref(),
|
||||
|
||||
@@ -50,10 +50,6 @@ impl StripeBilling {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn client(&self) -> &Arc<dyn StripeClient> {
|
||||
&self.client
|
||||
}
|
||||
|
||||
pub async fn initialize(&self) -> Result<()> {
|
||||
log::info!("StripeBilling: initializing");
|
||||
|
||||
|
||||
@@ -27,11 +27,6 @@ pub struct CreateCustomerParams<'a> {
|
||||
pub email: Option<&'a str>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct UpdateCustomerParams<'a> {
|
||||
pub email: Option<&'a str>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash, Clone, derive_more::Display)]
|
||||
pub struct StripeSubscriptionId(pub Arc<str>);
|
||||
|
||||
@@ -198,12 +193,6 @@ pub trait StripeClient: Send + Sync {
|
||||
|
||||
async fn create_customer(&self, params: CreateCustomerParams<'_>) -> Result<StripeCustomer>;
|
||||
|
||||
async fn update_customer(
|
||||
&self,
|
||||
customer_id: &StripeCustomerId,
|
||||
params: UpdateCustomerParams<'_>,
|
||||
) -> Result<StripeCustomer>;
|
||||
|
||||
async fn list_subscriptions_for_customer(
|
||||
&self,
|
||||
customer_id: &StripeCustomerId,
|
||||
|
||||
@@ -14,7 +14,7 @@ use crate::stripe_client::{
|
||||
StripeCreateCheckoutSessionSubscriptionData, StripeCreateMeterEventParams,
|
||||
StripeCreateSubscriptionParams, StripeCustomer, StripeCustomerId, StripeMeter, StripeMeterId,
|
||||
StripePrice, StripePriceId, StripeSubscription, StripeSubscriptionId, StripeSubscriptionItem,
|
||||
StripeSubscriptionItemId, UpdateCustomerParams, UpdateSubscriptionParams,
|
||||
StripeSubscriptionItemId, UpdateSubscriptionParams,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -95,22 +95,6 @@ impl StripeClient for FakeStripeClient {
|
||||
Ok(customer)
|
||||
}
|
||||
|
||||
async fn update_customer(
|
||||
&self,
|
||||
customer_id: &StripeCustomerId,
|
||||
params: UpdateCustomerParams<'_>,
|
||||
) -> Result<StripeCustomer> {
|
||||
let mut customers = self.customers.lock();
|
||||
if let Some(customer) = customers.get_mut(customer_id) {
|
||||
if let Some(email) = params.email {
|
||||
customer.email = Some(email.to_string());
|
||||
}
|
||||
Ok(customer.clone())
|
||||
} else {
|
||||
Err(anyhow!("no customer found for {customer_id:?}"))
|
||||
}
|
||||
}
|
||||
|
||||
async fn list_subscriptions_for_customer(
|
||||
&self,
|
||||
customer_id: &StripeCustomerId,
|
||||
|
||||
@@ -11,7 +11,7 @@ use stripe::{
|
||||
CreateCheckoutSessionSubscriptionDataTrialSettingsEndBehavior,
|
||||
CreateCheckoutSessionSubscriptionDataTrialSettingsEndBehaviorMissingPaymentMethod,
|
||||
CreateCustomer, Customer, CustomerId, ListCustomers, Price, PriceId, Recurring, Subscription,
|
||||
SubscriptionId, SubscriptionItem, SubscriptionItemId, UpdateCustomer, UpdateSubscriptionItems,
|
||||
SubscriptionId, SubscriptionItem, SubscriptionItemId, UpdateSubscriptionItems,
|
||||
UpdateSubscriptionTrialSettings, UpdateSubscriptionTrialSettingsEndBehavior,
|
||||
UpdateSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod,
|
||||
};
|
||||
@@ -25,8 +25,7 @@ use crate::stripe_client::{
|
||||
StripePriceId, StripePriceRecurring, StripeSubscription, StripeSubscriptionId,
|
||||
StripeSubscriptionItem, StripeSubscriptionItemId, StripeSubscriptionTrialSettings,
|
||||
StripeSubscriptionTrialSettingsEndBehavior,
|
||||
StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, UpdateCustomerParams,
|
||||
UpdateSubscriptionParams,
|
||||
StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, UpdateSubscriptionParams,
|
||||
};
|
||||
|
||||
pub struct RealStripeClient {
|
||||
@@ -79,24 +78,6 @@ impl StripeClient for RealStripeClient {
|
||||
Ok(StripeCustomer::from(customer))
|
||||
}
|
||||
|
||||
async fn update_customer(
|
||||
&self,
|
||||
customer_id: &StripeCustomerId,
|
||||
params: UpdateCustomerParams<'_>,
|
||||
) -> Result<StripeCustomer> {
|
||||
let customer = Customer::update(
|
||||
&self.client,
|
||||
&customer_id.try_into()?,
|
||||
UpdateCustomer {
|
||||
email: params.email,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(StripeCustomer::from(customer))
|
||||
}
|
||||
|
||||
async fn list_subscriptions_for_customer(
|
||||
&self,
|
||||
customer_id: &StripeCustomerId,
|
||||
|
||||
@@ -180,7 +180,7 @@ async fn test_channel_requires_zed_cla(cx_a: &mut TestAppContext, cx_b: &mut Tes
|
||||
server
|
||||
.app_state
|
||||
.db
|
||||
.update_or_create_user_by_github_account("user_b", 100, None, None, Utc::now(), None)
|
||||
.get_or_create_user_by_github_account("user_b", 100, None, None, Utc::now(), None)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ use crate::{
|
||||
};
|
||||
use call::ActiveCall;
|
||||
use editor::{
|
||||
DocumentColorsRenderMode, Editor, EditorSettings, RowInfo,
|
||||
Editor, RowInfo,
|
||||
actions::{
|
||||
ConfirmCodeAction, ConfirmCompletion, ConfirmRename, ContextMenuFirst,
|
||||
ExpandMacroRecursively, Redo, Rename, SelectAll, ToggleCodeActions, Undo,
|
||||
@@ -16,7 +16,7 @@ use editor::{
|
||||
};
|
||||
use fs::Fs;
|
||||
use futures::StreamExt;
|
||||
use gpui::{App, Rgba, TestAppContext, UpdateGlobal, VisualContext, VisualTestContext};
|
||||
use gpui::{TestAppContext, UpdateGlobal, VisualContext, VisualTestContext};
|
||||
use indoc::indoc;
|
||||
use language::{
|
||||
FakeLspAdapter,
|
||||
@@ -1951,283 +1951,6 @@ async fn test_inlay_hint_refresh_is_forwarded(
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_lsp_document_color(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
||||
let expected_color = Rgba {
|
||||
r: 0.33,
|
||||
g: 0.33,
|
||||
b: 0.33,
|
||||
a: 0.33,
|
||||
};
|
||||
let mut server = TestServer::start(cx_a.executor()).await;
|
||||
let executor = cx_a.executor();
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
server
|
||||
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
|
||||
.await;
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
let active_call_b = cx_b.read(ActiveCall::global);
|
||||
|
||||
cx_a.update(editor::init);
|
||||
cx_b.update(editor::init);
|
||||
|
||||
cx_a.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<EditorSettings>(cx, |settings| {
|
||||
settings.lsp_document_colors = Some(DocumentColorsRenderMode::None);
|
||||
});
|
||||
});
|
||||
});
|
||||
cx_b.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<EditorSettings>(cx, |settings| {
|
||||
settings.lsp_document_colors = Some(DocumentColorsRenderMode::Inlay);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
client_a.language_registry().add(rust_lang());
|
||||
client_b.language_registry().add(rust_lang());
|
||||
let mut fake_language_servers = client_a.language_registry().register_fake_lsp(
|
||||
"Rust",
|
||||
FakeLspAdapter {
|
||||
capabilities: lsp::ServerCapabilities {
|
||||
color_provider: Some(lsp::ColorProviderCapability::Simple(true)),
|
||||
..lsp::ServerCapabilities::default()
|
||||
},
|
||||
..FakeLspAdapter::default()
|
||||
},
|
||||
);
|
||||
|
||||
// Client A opens a project.
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
path!("/a"),
|
||||
json!({
|
||||
"main.rs": "fn main() { a }",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// Client B joins the project
|
||||
let project_b = client_b.join_remote_project(project_id, cx_b).await;
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a);
|
||||
executor.start_waiting();
|
||||
|
||||
// The host opens a rust file.
|
||||
let _buffer_a = project_a
|
||||
.update(cx_a, |project, cx| {
|
||||
project.open_local_buffer(path!("/a/main.rs"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let editor_a = workspace_a
|
||||
.update_in(cx_a, |workspace, window, cx| {
|
||||
workspace.open_path((worktree_id, "main.rs"), None, true, window, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.downcast::<Editor>()
|
||||
.unwrap();
|
||||
|
||||
let fake_language_server = fake_language_servers.next().await.unwrap();
|
||||
|
||||
let requests_made = Arc::new(AtomicUsize::new(0));
|
||||
let closure_requests_made = Arc::clone(&requests_made);
|
||||
let mut color_request_handle = fake_language_server
|
||||
.set_request_handler::<lsp::request::DocumentColor, _, _>(move |params, _| {
|
||||
let requests_made = Arc::clone(&closure_requests_made);
|
||||
async move {
|
||||
assert_eq!(
|
||||
params.text_document.uri,
|
||||
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
|
||||
);
|
||||
requests_made.fetch_add(1, atomic::Ordering::Release);
|
||||
Ok(vec![lsp::ColorInformation {
|
||||
range: lsp::Range {
|
||||
start: lsp::Position {
|
||||
line: 0,
|
||||
character: 0,
|
||||
},
|
||||
end: lsp::Position {
|
||||
line: 0,
|
||||
character: 1,
|
||||
},
|
||||
},
|
||||
color: lsp::Color {
|
||||
red: 0.33,
|
||||
green: 0.33,
|
||||
blue: 0.33,
|
||||
alpha: 0.33,
|
||||
},
|
||||
}])
|
||||
}
|
||||
});
|
||||
executor.run_until_parked();
|
||||
|
||||
assert_eq!(
|
||||
0,
|
||||
requests_made.load(atomic::Ordering::Acquire),
|
||||
"Host did not enable document colors, hence should query for none"
|
||||
);
|
||||
editor_a.update(cx_a, |editor, cx| {
|
||||
assert_eq!(
|
||||
Vec::<Rgba>::new(),
|
||||
extract_color_inlays(editor, cx),
|
||||
"No query colors should result in no hints"
|
||||
);
|
||||
});
|
||||
|
||||
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
|
||||
let editor_b = workspace_b
|
||||
.update_in(cx_b, |workspace, window, cx| {
|
||||
workspace.open_path((worktree_id, "main.rs"), None, true, window, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.downcast::<Editor>()
|
||||
.unwrap();
|
||||
|
||||
color_request_handle.next().await.unwrap();
|
||||
executor.run_until_parked();
|
||||
|
||||
assert_eq!(
|
||||
1,
|
||||
requests_made.load(atomic::Ordering::Acquire),
|
||||
"The client opened the file and got its first colors back"
|
||||
);
|
||||
editor_b.update(cx_b, |editor, cx| {
|
||||
assert_eq!(
|
||||
vec![expected_color],
|
||||
extract_color_inlays(editor, cx),
|
||||
"With document colors as inlays, color inlays should be pushed"
|
||||
);
|
||||
});
|
||||
|
||||
editor_a.update_in(cx_a, |editor, window, cx| {
|
||||
editor.change_selections(None, window, cx, |s| s.select_ranges([13..13].clone()));
|
||||
editor.handle_input(":", window, cx);
|
||||
});
|
||||
color_request_handle.next().await.unwrap();
|
||||
executor.run_until_parked();
|
||||
assert_eq!(
|
||||
2,
|
||||
requests_made.load(atomic::Ordering::Acquire),
|
||||
"After the host edits his file, the client should request the colors again"
|
||||
);
|
||||
editor_a.update(cx_a, |editor, cx| {
|
||||
assert_eq!(
|
||||
Vec::<Rgba>::new(),
|
||||
extract_color_inlays(editor, cx),
|
||||
"Host has no colors still"
|
||||
);
|
||||
});
|
||||
editor_b.update(cx_b, |editor, cx| {
|
||||
assert_eq!(vec![expected_color], extract_color_inlays(editor, cx),);
|
||||
});
|
||||
|
||||
cx_b.update(|_, cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<EditorSettings>(cx, |settings| {
|
||||
settings.lsp_document_colors = Some(DocumentColorsRenderMode::Background);
|
||||
});
|
||||
});
|
||||
});
|
||||
executor.run_until_parked();
|
||||
assert_eq!(
|
||||
2,
|
||||
requests_made.load(atomic::Ordering::Acquire),
|
||||
"After the client have changed the colors settings, no extra queries should happen"
|
||||
);
|
||||
editor_a.update(cx_a, |editor, cx| {
|
||||
assert_eq!(
|
||||
Vec::<Rgba>::new(),
|
||||
extract_color_inlays(editor, cx),
|
||||
"Host is unaffected by the client's settings changes"
|
||||
);
|
||||
});
|
||||
editor_b.update(cx_b, |editor, cx| {
|
||||
assert_eq!(
|
||||
Vec::<Rgba>::new(),
|
||||
extract_color_inlays(editor, cx),
|
||||
"Client should have no colors hints, as in the settings"
|
||||
);
|
||||
});
|
||||
|
||||
cx_b.update(|_, cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<EditorSettings>(cx, |settings| {
|
||||
settings.lsp_document_colors = Some(DocumentColorsRenderMode::Inlay);
|
||||
});
|
||||
});
|
||||
});
|
||||
executor.run_until_parked();
|
||||
assert_eq!(
|
||||
2,
|
||||
requests_made.load(atomic::Ordering::Acquire),
|
||||
"After falling back to colors as inlays, no extra LSP queries are made"
|
||||
);
|
||||
editor_a.update(cx_a, |editor, cx| {
|
||||
assert_eq!(
|
||||
Vec::<Rgba>::new(),
|
||||
extract_color_inlays(editor, cx),
|
||||
"Host is unaffected by the client's settings changes, again"
|
||||
);
|
||||
});
|
||||
editor_b.update(cx_b, |editor, cx| {
|
||||
assert_eq!(
|
||||
vec![expected_color],
|
||||
extract_color_inlays(editor, cx),
|
||||
"Client should have its color hints back"
|
||||
);
|
||||
});
|
||||
|
||||
cx_a.update(|_, cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<EditorSettings>(cx, |settings| {
|
||||
settings.lsp_document_colors = Some(DocumentColorsRenderMode::Border);
|
||||
});
|
||||
});
|
||||
});
|
||||
color_request_handle.next().await.unwrap();
|
||||
executor.run_until_parked();
|
||||
assert_eq!(
|
||||
3,
|
||||
requests_made.load(atomic::Ordering::Acquire),
|
||||
"After the host enables document colors, another LSP query should be made"
|
||||
);
|
||||
editor_a.update(cx_a, |editor, cx| {
|
||||
assert_eq!(
|
||||
Vec::<Rgba>::new(),
|
||||
extract_color_inlays(editor, cx),
|
||||
"Host did not configure document colors as hints hence gets nothing"
|
||||
);
|
||||
});
|
||||
editor_b.update(cx_b, |editor, cx| {
|
||||
assert_eq!(
|
||||
vec![expected_color],
|
||||
extract_color_inlays(editor, cx),
|
||||
"Client should be unaffected by the host's settings changes"
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
||||
let mut server = TestServer::start(cx_a.executor()).await;
|
||||
@@ -3111,16 +2834,6 @@ fn extract_hint_labels(editor: &Editor) -> Vec<String> {
|
||||
labels
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
fn extract_color_inlays(editor: &Editor, cx: &App) -> Vec<Rgba> {
|
||||
editor
|
||||
.all_inlays(cx)
|
||||
.into_iter()
|
||||
.filter_map(|inlay| inlay.get_color())
|
||||
.map(Rgba::from)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn blame_entry(sha: &str, range: Range<u32>) -> git::blame::BlameEntry {
|
||||
git::blame::BlameEntry {
|
||||
sha: sha.parse().unwrap(),
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user