Compare commits

..

1 Commits

Author SHA1 Message Date
David Kleingeld
b5d1eeff60 Add @dvdsk to git reviewers list 2025-11-03 18:50:00 +01:00
214 changed files with 4408 additions and 8678 deletions

View File

@@ -1,39 +0,0 @@
# Generated from xtask::workflows::cherry_pick
# Rebuild with `cargo xtask workflows`.
name: cherry_pick
on:
workflow_dispatch:
inputs:
commit:
description: commit
required: true
type: string
branch:
description: branch
required: true
type: string
channel:
description: channel
required: true
type: string
jobs:
run_cherry_pick:
runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- id: get-app-token
name: cherry_pick::run_cherry_pick::authenticate_as_zippy
uses: actions/create-github-app-token@bef1eaf1c0ac2b148ee2a0a74c65fbe6db0631f1
with:
app-id: ${{ secrets.ZED_ZIPPY_APP_ID }}
private-key: ${{ secrets.ZED_ZIPPY_APP_PRIVATE_KEY }}
- name: cherry_pick::run_cherry_pick::cherry_pick
run: ./script/cherry-pick ${{ inputs.branch }} ${{ inputs.commit }} ${{ inputs.channel }}
shell: bash -euxo pipefail {0}
env:
GIT_COMMITTER_NAME: Zed Zippy
GIT_COMMITTER_EMAIL: hi@zed.dev
GITHUB_TOKEN: ${{ steps.get-app-token.outputs.token }}

View File

@@ -2,77 +2,12 @@
# Rebuild with `cargo xtask workflows`.
name: compare_perf
on:
workflow_dispatch:
inputs:
head:
description: head
required: true
type: string
base:
description: base
required: true
type: string
crate_name:
description: crate_name
type: string
default: ''
workflow_dispatch: {}
jobs:
run_perf:
runs-on: namespace-profile-16x32-ubuntu-2204
runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_cargo_config
run: |
mkdir -p ./../.cargo
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
shell: bash -euxo pipefail {0}
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- name: compare_perf::run_perf::install_hyperfine
run: cargo install hyperfine
shell: bash -euxo pipefail {0}
- name: steps::git_checkout
run: git fetch origin ${{ inputs.base }} && git checkout ${{ inputs.base }}
shell: bash -euxo pipefail {0}
- name: compare_perf::run_perf::cargo_perf_test
run: |2-
if [ -n "${{ inputs.crate_name }}" ]; then
cargo perf-test -p ${{ inputs.crate_name }} -- --json=${{ inputs.base }};
else
cargo perf-test -p vim -- --json=${{ inputs.base }};
fi
shell: bash -euxo pipefail {0}
- name: steps::git_checkout
run: git fetch origin ${{ inputs.head }} && git checkout ${{ inputs.head }}
shell: bash -euxo pipefail {0}
- name: compare_perf::run_perf::cargo_perf_test
run: |2-
if [ -n "${{ inputs.crate_name }}" ]; then
cargo perf-test -p ${{ inputs.crate_name }} -- --json=${{ inputs.head }};
else
cargo perf-test -p vim -- --json=${{ inputs.head }};
fi
shell: bash -euxo pipefail {0}
- name: compare_perf::run_perf::compare_runs
run: cargo perf-compare --save=results.md ${{ inputs.base }} ${{ inputs.head }}
shell: bash -euxo pipefail {0}
- name: '@actions/upload-artifact results.md'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: results.md
path: results.md
if-no-files-found: error
- name: steps::cleanup_cargo_config
if: always()
run: |
rm -rf ./../.cargo
shell: bash -euxo pipefail {0}

View File

@@ -3,7 +3,10 @@
name: release
env:
CARGO_TERM_COLOR: always
CARGO_INCREMENTAL: '0'
RUST_BACKTRACE: '1'
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
on:
push:
tags:
@@ -63,10 +66,6 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- name: steps::cache_rust_dependencies_namespace
uses: namespacelabs/nscloud-cache-action@v1
with:
cache: rust
- name: steps::setup_node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
with:
@@ -78,7 +77,7 @@ jobs:
run: cargo install cargo-nextest --locked
shell: bash -euxo pipefail {0}
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than 250
run: ./script/clear-target-dir-if-larger-than 100
shell: bash -euxo pipefail {0}
- name: steps::cargo_nextest
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
@@ -178,15 +177,11 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
timeout-minutes: 60
bundle_linux_aarch64:
bundle_linux_arm64:
needs:
- run_tests_linux
- check_scripts
runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4
env:
CARGO_INCREMENTAL: 0
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
@@ -205,28 +200,27 @@ jobs:
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
- name: '@actions/upload-artifact zed-linux-aarch64.tar.gz'
- name: '@actions/upload-artifact zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-linux-aarch64.tar.gz
path: target/release/zed-linux-aarch64.tar.gz
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
path: target/release/zed-*.tar.gz
if-no-files-found: error
- name: '@actions/upload-artifact zed-remote-server-linux-aarch64.gz'
- name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-remote-server-linux-aarch64.gz
path: target/zed-remote-server-linux-aarch64.gz
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
path: target/zed-remote-server-*.gz
if-no-files-found: error
outputs:
zed: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
timeout-minutes: 60
bundle_linux_x86_64:
needs:
- run_tests_linux
- check_scripts
runs-on: namespace-profile-32x64-ubuntu-2004
env:
CARGO_INCREMENTAL: 0
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
@@ -245,28 +239,28 @@ jobs:
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
- name: '@actions/upload-artifact zed-linux-x86_64.tar.gz'
- name: '@actions/upload-artifact zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-linux-x86_64.tar.gz
path: target/release/zed-linux-x86_64.tar.gz
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
path: target/release/zed-*.tar.gz
if-no-files-found: error
- name: '@actions/upload-artifact zed-remote-server-linux-x86_64.gz'
- name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-remote-server-linux-x86_64.gz
path: target/zed-remote-server-linux-x86_64.gz
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
path: target/zed-remote-server-*.gz
if-no-files-found: error
outputs:
zed: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
timeout-minutes: 60
bundle_mac_aarch64:
bundle_mac_arm64:
needs:
- run_tests_mac
- check_scripts
runs-on: self-mini-macos
env:
CARGO_INCREMENTAL: 0
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
@@ -288,21 +282,24 @@ jobs:
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than 300
shell: bash -euxo pipefail {0}
- name: run_bundling::bundle_mac::bundle_mac
- name: run_bundling::bundle_mac
run: ./script/bundle-mac aarch64-apple-darwin
shell: bash -euxo pipefail {0}
- name: '@actions/upload-artifact Zed-aarch64.dmg'
- name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: Zed-aarch64.dmg
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
path: target/aarch64-apple-darwin/release/Zed.dmg
if-no-files-found: error
- name: '@actions/upload-artifact zed-remote-server-macos-aarch64.gz'
- name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-remote-server-macos-aarch64.gz
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz
path: target/zed-remote-server-macos-aarch64.gz
if-no-files-found: error
outputs:
zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz
timeout-minutes: 60
bundle_mac_x86_64:
needs:
@@ -310,9 +307,6 @@ jobs:
- check_scripts
runs-on: self-mini-macos
env:
CARGO_INCREMENTAL: 0
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
@@ -334,31 +328,31 @@ jobs:
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than 300
shell: bash -euxo pipefail {0}
- name: run_bundling::bundle_mac::bundle_mac
- name: run_bundling::bundle_mac
run: ./script/bundle-mac x86_64-apple-darwin
shell: bash -euxo pipefail {0}
- name: '@actions/upload-artifact Zed-x86_64.dmg'
- name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: Zed-x86_64.dmg
path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
path: target/x86_64-apple-darwin/release/Zed.dmg
if-no-files-found: error
- name: '@actions/upload-artifact zed-remote-server-macos-x86_64.gz'
- name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-remote-server-macos-x86_64.gz
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz
path: target/zed-remote-server-macos-x86_64.gz
if-no-files-found: error
outputs:
zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz
timeout-minutes: 60
bundle_windows_aarch64:
bundle_windows_arm64:
needs:
- run_tests_windows
- check_scripts
runs-on: self-32vcpu-windows-2022
env:
CARGO_INCREMENTAL: 0
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
@@ -377,16 +371,18 @@ jobs:
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: run_bundling::bundle_windows::bundle_windows
- name: run_bundling::bundle_windows
run: script/bundle-windows.ps1 -Architecture aarch64
shell: pwsh
working-directory: ${{ env.ZED_WORKSPACE }}
- name: '@actions/upload-artifact Zed-aarch64.exe'
- name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: Zed-aarch64.exe
path: target/Zed-aarch64.exe
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe
path: ${{ env.SETUP_PATH }}
if-no-files-found: error
outputs:
zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe
timeout-minutes: 60
bundle_windows_x86_64:
needs:
@@ -394,9 +390,6 @@ jobs:
- check_scripts
runs-on: self-32vcpu-windows-2022
env:
CARGO_INCREMENTAL: 0
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
@@ -415,49 +408,51 @@ jobs:
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: run_bundling::bundle_windows::bundle_windows
- name: run_bundling::bundle_windows
run: script/bundle-windows.ps1 -Architecture x86_64
shell: pwsh
working-directory: ${{ env.ZED_WORKSPACE }}
- name: '@actions/upload-artifact Zed-x86_64.exe'
- name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: Zed-x86_64.exe
path: target/Zed-x86_64.exe
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe
path: ${{ env.SETUP_PATH }}
if-no-files-found: error
outputs:
zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe
timeout-minutes: 60
upload_release_assets:
needs:
- create_draft_release
- bundle_linux_aarch64
- bundle_linux_arm64
- bundle_linux_x86_64
- bundle_mac_aarch64
- bundle_mac_arm64
- bundle_mac_x86_64
- bundle_windows_aarch64
- bundle_windows_arm64
- bundle_windows_x86_64
runs-on: namespace-profile-4x8-ubuntu-2204
steps:
- name: release::download_workflow_artifacts
- name: release::upload_release_assets::download_workflow_artifacts
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
with:
path: ./artifacts/
- name: ls -lR ./artifacts
run: ls -lR ./artifacts
shell: bash -euxo pipefail {0}
- name: release::prep_release_artifacts
- name: release::upload_release_assets::prep_release_artifacts
run: |-
mkdir -p release-artifacts/
mv ./artifacts/Zed-aarch64.dmg/Zed-aarch64.dmg release-artifacts/Zed-aarch64.dmg
mv ./artifacts/Zed-x86_64.dmg/Zed-x86_64.dmg release-artifacts/Zed-x86_64.dmg
mv ./artifacts/zed-linux-aarch64.tar.gz/zed-linux-aarch64.tar.gz release-artifacts/zed-linux-aarch64.tar.gz
mv ./artifacts/zed-linux-x86_64.tar.gz/zed-linux-x86_64.tar.gz release-artifacts/zed-linux-x86_64.tar.gz
mv ./artifacts/Zed-x86_64.exe/Zed-x86_64.exe release-artifacts/Zed-x86_64.exe
mv ./artifacts/Zed-aarch64.exe/Zed-aarch64.exe release-artifacts/Zed-aarch64.exe
mv ./artifacts/zed-remote-server-macos-aarch64.gz/zed-remote-server-macos-aarch64.gz release-artifacts/zed-remote-server-macos-aarch64.gz
mv ./artifacts/zed-remote-server-macos-x86_64.gz/zed-remote-server-macos-x86_64.gz release-artifacts/zed-remote-server-macos-x86_64.gz
mv ./artifacts/zed-remote-server-linux-aarch64.gz/zed-remote-server-linux-aarch64.gz release-artifacts/zed-remote-server-linux-aarch64.gz
mv ./artifacts/zed-remote-server-linux-x86_64.gz/zed-remote-server-linux-x86_64.gz release-artifacts/zed-remote-server-linux-x86_64.gz
mv ./artifacts/${{ needs.bundle_mac_x86_64.outputs.zed }}/* release-artifacts/Zed-x86_64.dmg
mv ./artifacts/${{ needs.bundle_mac_arm64.outputs.zed }}/* release-artifacts/Zed-aarch64.dmg
mv ./artifacts/${{ needs.bundle_windows_x86_64.outputs.zed }}/* release-artifacts/Zed-x86_64.exe
mv ./artifacts/${{ needs.bundle_windows_arm64.outputs.zed }}/* release-artifacts/Zed-aarch64.exe
mv ./artifacts/${{ needs.bundle_linux_arm64.outputs.zed }}/* release-artifacts/zed-linux-aarch64.tar.gz
mv ./artifacts/${{ needs.bundle_linux_x86_64.outputs.zed }}/* release-artifacts/zed-linux-x86_64.tar.gz
mv ./artifacts/${{ needs.bundle_linux_x86_64.outputs.remote-server }}/* release-artifacts/zed-remote-server-linux-x86_64.gz
mv ./artifacts/${{ needs.bundle_linux_arm64.outputs.remote-server }}/* release-artifacts/zed-remote-server-linux-aarch64.gz
mv ./artifacts/${{ needs.bundle_mac_x86_64.outputs.remote-server }}/* release-artifacts/zed-remote-server-macos-x86_64.gz
mv ./artifacts/${{ needs.bundle_mac_arm64.outputs.remote-server }}/* release-artifacts/zed-remote-server-macos-aarch64.gz
shell: bash -euxo pipefail {0}
- name: gh release upload "$GITHUB_REF_NAME" --repo=zed-industries/zed release-artifacts/*
run: gh release upload "$GITHUB_REF_NAME" --repo=zed-industries/zed release-artifacts/*
@@ -467,7 +462,10 @@ jobs:
auto_release_preview:
needs:
- upload_release_assets
if: startsWith(github.ref, 'refs/tags/v') && endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre')
if: |
false
&& startsWith(github.ref, 'refs/tags/v')
&& endsWith(github.ref, '-pre') && !endsWith(github.ref, '.0-pre')
runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- name: gh release edit "$GITHUB_REF_NAME" --repo=zed-industries/zed --draft=false
@@ -475,7 +473,7 @@ jobs:
shell: bash -euxo pipefail {0}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: release::create_sentry_release
- name: release::auto_release_preview::create_sentry_release
uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c
with:
environment: production

View File

@@ -3,7 +3,12 @@
name: release_nightly
env:
CARGO_TERM_COLOR: always
CARGO_INCREMENTAL: '0'
RUST_BACKTRACE: '1'
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
on:
push:
tags:
@@ -27,6 +32,41 @@ jobs:
run: ./script/clippy
shell: bash -euxo pipefail {0}
timeout-minutes: 60
run_tests_mac:
if: github.repository_owner == 'zed-industries'
runs-on: self-mini-macos
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_cargo_config
run: |
mkdir -p ./../.cargo
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
shell: bash -euxo pipefail {0}
- name: steps::setup_node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
with:
node-version: '20'
- name: steps::clippy
run: ./script/clippy
shell: bash -euxo pipefail {0}
- name: steps::cargo_install_nextest
run: cargo install cargo-nextest --locked
shell: bash -euxo pipefail {0}
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than 300
shell: bash -euxo pipefail {0}
- name: steps::cargo_nextest
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
shell: bash -euxo pipefail {0}
- name: steps::cleanup_cargo_config
if: always()
run: |
rm -rf ./../.cargo
shell: bash -euxo pipefail {0}
timeout-minutes: 60
run_tests_windows:
if: github.repository_owner == 'zed-industries'
runs-on: self-32vcpu-windows-2022
@@ -62,109 +102,13 @@ jobs:
Remove-Item -Recurse -Path "./../.cargo" -Force -ErrorAction SilentlyContinue
shell: pwsh
timeout-minutes: 60
bundle_linux_aarch64:
bundle_mac_nightly_x86_64:
needs:
- check_style
- run_tests_windows
runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4
env:
CARGO_INCREMENTAL: 0
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: run_bundling::set_release_channel_to_nightly
run: |
set -eu
version=$(git rev-parse --short HEAD)
echo "Publishing version: ${version} on release channel nightly"
echo "nightly" > crates/zed/RELEASE_CHANNEL
shell: bash -euxo pipefail {0}
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
- name: '@actions/upload-artifact zed-linux-aarch64.tar.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-linux-aarch64.tar.gz
path: target/release/zed-linux-aarch64.tar.gz
if-no-files-found: error
- name: '@actions/upload-artifact zed-remote-server-linux-aarch64.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-remote-server-linux-aarch64.gz
path: target/zed-remote-server-linux-aarch64.gz
if-no-files-found: error
timeout-minutes: 60
bundle_linux_x86_64:
needs:
- check_style
- run_tests_windows
runs-on: namespace-profile-32x64-ubuntu-2004
env:
CARGO_INCREMENTAL: 0
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: run_bundling::set_release_channel_to_nightly
run: |
set -eu
version=$(git rev-parse --short HEAD)
echo "Publishing version: ${version} on release channel nightly"
echo "nightly" > crates/zed/RELEASE_CHANNEL
shell: bash -euxo pipefail {0}
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
- name: '@actions/upload-artifact zed-linux-x86_64.tar.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-linux-x86_64.tar.gz
path: target/release/zed-linux-x86_64.tar.gz
if-no-files-found: error
- name: '@actions/upload-artifact zed-remote-server-linux-x86_64.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-remote-server-linux-x86_64.gz
path: target/zed-remote-server-linux-x86_64.gz
if-no-files-found: error
timeout-minutes: 60
bundle_mac_aarch64:
needs:
- check_style
- run_tests_windows
- run_tests_mac
if: github.repository_owner == 'zed-industries'
runs-on: self-mini-macos
env:
CARGO_INCREMENTAL: 0
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
@@ -175,13 +119,6 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: run_bundling::set_release_channel_to_nightly
run: |
set -eu
version=$(git rev-parse --short HEAD)
echo "Publishing version: ${version} on release channel nightly"
echo "nightly" > crates/zed/RELEASE_CHANNEL
shell: bash -euxo pipefail {0}
- name: steps::setup_node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
with:
@@ -193,84 +130,142 @@ jobs:
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than 300
shell: bash -euxo pipefail {0}
- name: run_bundling::bundle_mac::bundle_mac
run: ./script/bundle-mac aarch64-apple-darwin
shell: bash -euxo pipefail {0}
- name: '@actions/upload-artifact Zed-aarch64.dmg'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: Zed-aarch64.dmg
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
if-no-files-found: error
- name: '@actions/upload-artifact zed-remote-server-macos-aarch64.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-remote-server-macos-aarch64.gz
path: target/zed-remote-server-macos-aarch64.gz
if-no-files-found: error
timeout-minutes: 60
bundle_mac_x86_64:
needs:
- check_style
- run_tests_windows
runs-on: self-mini-macos
env:
CARGO_INCREMENTAL: 0
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: run_bundling::set_release_channel_to_nightly
- name: release_nightly::set_release_channel_to_nightly
run: |
set -eu
version=$(git rev-parse --short HEAD)
echo "Publishing version: ${version} on release channel nightly"
echo "nightly" > crates/zed/RELEASE_CHANNEL
shell: bash -euxo pipefail {0}
- name: steps::setup_node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
with:
node-version: '20'
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than 300
shell: bash -euxo pipefail {0}
- name: run_bundling::bundle_mac::bundle_mac
- name: run_bundling::bundle_mac
run: ./script/bundle-mac x86_64-apple-darwin
shell: bash -euxo pipefail {0}
- name: '@actions/upload-artifact Zed-x86_64.dmg'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: Zed-x86_64.dmg
path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg
if-no-files-found: error
- name: '@actions/upload-artifact zed-remote-server-macos-x86_64.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-remote-server-macos-x86_64.gz
path: target/zed-remote-server-macos-x86_64.gz
if-no-files-found: error
- name: release_nightly::upload_zed_nightly
run: script/upload-nightly macos x86_64
shell: bash -euxo pipefail {0}
timeout-minutes: 60
bundle_windows_aarch64:
bundle_mac_nightly_aarch64:
needs:
- check_style
- run_tests_mac
if: github.repository_owner == 'zed-industries'
runs-on: self-mini-macos
env:
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
with:
node-version: '20'
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than 300
shell: bash -euxo pipefail {0}
- name: release_nightly::set_release_channel_to_nightly
run: |
set -eu
version=$(git rev-parse --short HEAD)
echo "Publishing version: ${version} on release channel nightly"
echo "nightly" > crates/zed/RELEASE_CHANNEL
shell: bash -euxo pipefail {0}
- name: run_bundling::bundle_mac
run: ./script/bundle-mac aarch64-apple-darwin
shell: bash -euxo pipefail {0}
- name: release_nightly::upload_zed_nightly
run: script/upload-nightly macos aarch64
shell: bash -euxo pipefail {0}
timeout-minutes: 60
bundle_linux_nightly_x86_64:
needs:
- check_style
- run_tests_mac
if: github.repository_owner == 'zed-industries'
runs-on: namespace-profile-32x64-ubuntu-2004
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: ./script/linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: ./script/install-mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than 100
shell: bash -euxo pipefail {0}
- name: release_nightly::set_release_channel_to_nightly
run: |
set -eu
version=$(git rev-parse --short HEAD)
echo "Publishing version: ${version} on release channel nightly"
echo "nightly" > crates/zed/RELEASE_CHANNEL
shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
- name: release_nightly::upload_zed_nightly
run: script/upload-nightly linux-targz x86_64
shell: bash -euxo pipefail {0}
timeout-minutes: 60
bundle_linux_nightly_aarch64:
needs:
- check_style
- run_tests_mac
if: github.repository_owner == 'zed-industries'
runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: ./script/linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than 100
shell: bash -euxo pipefail {0}
- name: release_nightly::set_release_channel_to_nightly
run: |
set -eu
version=$(git rev-parse --short HEAD)
echo "Publishing version: ${version} on release channel nightly"
echo "nightly" > crates/zed/RELEASE_CHANNEL
shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
- name: release_nightly::upload_zed_nightly
run: script/upload-nightly linux-targz aarch64
shell: bash -euxo pipefail {0}
timeout-minutes: 60
bundle_windows_nightly_x86_64:
needs:
- check_style
- run_tests_windows
if: github.repository_owner == 'zed-industries'
runs-on: self-32vcpu-windows-2022
env:
CARGO_INCREMENTAL: 0
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
@@ -285,7 +280,11 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: run_bundling::set_release_channel_to_nightly
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: release_nightly::set_release_channel_to_nightly
run: |
$ErrorActionPreference = "Stop"
$version = git rev-parse --short HEAD
@@ -293,71 +292,61 @@ jobs:
"nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL"
shell: pwsh
working-directory: ${{ env.ZED_WORKSPACE }}
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: run_bundling::bundle_windows::bundle_windows
run: script/bundle-windows.ps1 -Architecture aarch64
shell: pwsh
working-directory: ${{ env.ZED_WORKSPACE }}
- name: '@actions/upload-artifact Zed-aarch64.exe'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: Zed-aarch64.exe
path: target/Zed-aarch64.exe
if-no-files-found: error
timeout-minutes: 60
bundle_windows_x86_64:
needs:
- check_style
- run_tests_windows
runs-on: self-32vcpu-windows-2022
env:
CARGO_INCREMENTAL: 0
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
FILE_DIGEST: SHA256
TIMESTAMP_DIGEST: SHA256
TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: run_bundling::set_release_channel_to_nightly
run: |
$ErrorActionPreference = "Stop"
$version = git rev-parse --short HEAD
Write-Host "Publishing version: $version on release channel nightly"
"nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL"
shell: pwsh
working-directory: ${{ env.ZED_WORKSPACE }}
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: run_bundling::bundle_windows::bundle_windows
- name: run_bundling::bundle_windows
run: script/bundle-windows.ps1 -Architecture x86_64
shell: pwsh
working-directory: ${{ env.ZED_WORKSPACE }}
- name: '@actions/upload-artifact Zed-x86_64.exe'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
- name: release_nightly::upload_zed_nightly
run: script/upload-nightly.ps1 -Architecture x86_64
shell: pwsh
working-directory: ${{ env.ZED_WORKSPACE }}
timeout-minutes: 60
bundle_windows_nightly_aarch64:
needs:
- check_style
- run_tests_windows
if: github.repository_owner == 'zed-industries'
runs-on: self-32vcpu-windows-2022
env:
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
FILE_DIGEST: SHA256
TIMESTAMP_DIGEST: SHA256
TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
name: Zed-x86_64.exe
path: target/Zed-x86_64.exe
if-no-files-found: error
clean: false
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: release_nightly::set_release_channel_to_nightly
run: |
$ErrorActionPreference = "Stop"
$version = git rev-parse --short HEAD
Write-Host "Publishing version: $version on release channel nightly"
"nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL"
shell: pwsh
working-directory: ${{ env.ZED_WORKSPACE }}
- name: run_bundling::bundle_windows
run: script/bundle-windows.ps1 -Architecture aarch64
shell: pwsh
working-directory: ${{ env.ZED_WORKSPACE }}
- name: release_nightly::upload_zed_nightly
run: script/upload-nightly.ps1 -Architecture aarch64
shell: pwsh
working-directory: ${{ env.ZED_WORKSPACE }}
timeout-minutes: 60
build_nix_linux_x86_64:
needs:
- check_style
- run_tests_windows
- run_tests_mac
if: github.repository_owner == 'zed-industries'
runs-on: namespace-profile-32x64-ubuntu-2004
env:
@@ -388,7 +377,7 @@ jobs:
build_nix_mac_aarch64:
needs:
- check_style
- run_tests_windows
- run_tests_mac
if: github.repository_owner == 'zed-industries'
runs-on: self-mini-macos
env:
@@ -425,48 +414,20 @@ jobs:
continue-on-error: true
update_nightly_tag:
needs:
- bundle_linux_aarch64
- bundle_linux_x86_64
- bundle_mac_aarch64
- bundle_mac_x86_64
- bundle_windows_aarch64
- bundle_windows_x86_64
- bundle_mac_nightly_x86_64
- bundle_mac_nightly_aarch64
- bundle_linux_nightly_x86_64
- bundle_linux_nightly_aarch64
- bundle_windows_nightly_x86_64
- bundle_windows_nightly_aarch64
if: github.repository_owner == 'zed-industries'
runs-on: namespace-profile-4x8-ubuntu-2204
runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
fetch-depth: 0
- name: release::download_workflow_artifacts
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53
with:
path: ./artifacts/
- name: ls -lR ./artifacts
run: ls -lR ./artifacts
shell: bash -euxo pipefail {0}
- name: release::prep_release_artifacts
run: |-
mkdir -p release-artifacts/
mv ./artifacts/Zed-aarch64.dmg/Zed-aarch64.dmg release-artifacts/Zed-aarch64.dmg
mv ./artifacts/Zed-x86_64.dmg/Zed-x86_64.dmg release-artifacts/Zed-x86_64.dmg
mv ./artifacts/zed-linux-aarch64.tar.gz/zed-linux-aarch64.tar.gz release-artifacts/zed-linux-aarch64.tar.gz
mv ./artifacts/zed-linux-x86_64.tar.gz/zed-linux-x86_64.tar.gz release-artifacts/zed-linux-x86_64.tar.gz
mv ./artifacts/Zed-x86_64.exe/Zed-x86_64.exe release-artifacts/Zed-x86_64.exe
mv ./artifacts/Zed-aarch64.exe/Zed-aarch64.exe release-artifacts/Zed-aarch64.exe
mv ./artifacts/zed-remote-server-macos-aarch64.gz/zed-remote-server-macos-aarch64.gz release-artifacts/zed-remote-server-macos-aarch64.gz
mv ./artifacts/zed-remote-server-macos-x86_64.gz/zed-remote-server-macos-x86_64.gz release-artifacts/zed-remote-server-macos-x86_64.gz
mv ./artifacts/zed-remote-server-linux-aarch64.gz/zed-remote-server-linux-aarch64.gz release-artifacts/zed-remote-server-linux-aarch64.gz
mv ./artifacts/zed-remote-server-linux-x86_64.gz/zed-remote-server-linux-x86_64.gz release-artifacts/zed-remote-server-linux-x86_64.gz
shell: bash -euxo pipefail {0}
- name: ./script/upload-nightly
run: ./script/upload-nightly
shell: bash -euxo pipefail {0}
env:
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
- name: release_nightly::update_nightly_tag_job::update_nightly_tag
run: |
if [ "$(git rev-parse nightly)" = "$(git rev-parse HEAD)" ]; then
@@ -478,7 +439,7 @@ jobs:
git tag -f nightly
git push origin nightly --force
shell: bash -euxo pipefail {0}
- name: release::create_sentry_release
- name: release_nightly::update_nightly_tag_job::create_sentry_release
uses: getsentry/action-release@526942b68292201ac6bbb99b9a0747d4abee354c
with:
environment: production

View File

@@ -30,10 +30,10 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::cache_rust_dependencies_namespace
uses: namespacelabs/nscloud-cache-action@v1
- name: steps::cache_rust_dependencies
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
with:
cache: rust
save-if: ${{ github.ref == 'refs/heads/main' }}
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}

View File

@@ -3,148 +3,22 @@
name: run_bundling
env:
CARGO_TERM_COLOR: always
CARGO_INCREMENTAL: '0'
RUST_BACKTRACE: '1'
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
on:
pull_request:
types:
- labeled
- synchronize
jobs:
bundle_linux_aarch64:
if: |-
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4
env:
CARGO_INCREMENTAL: 0
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
- name: '@actions/upload-artifact zed-linux-aarch64.tar.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-linux-aarch64.tar.gz
path: target/release/zed-linux-aarch64.tar.gz
if-no-files-found: error
- name: '@actions/upload-artifact zed-remote-server-linux-aarch64.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-remote-server-linux-aarch64.gz
path: target/zed-remote-server-linux-aarch64.gz
if-no-files-found: error
timeout-minutes: 60
bundle_linux_x86_64:
if: |-
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
runs-on: namespace-profile-32x64-ubuntu-2004
env:
CARGO_INCREMENTAL: 0
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
- name: '@actions/upload-artifact zed-linux-x86_64.tar.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-linux-x86_64.tar.gz
path: target/release/zed-linux-x86_64.tar.gz
if-no-files-found: error
- name: '@actions/upload-artifact zed-remote-server-linux-x86_64.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-remote-server-linux-x86_64.gz
path: target/zed-remote-server-linux-x86_64.gz
if-no-files-found: error
timeout-minutes: 60
bundle_mac_aarch64:
if: |-
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
runs-on: self-mini-macos
env:
CARGO_INCREMENTAL: 0
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
with:
node-version: '20'
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than 300
shell: bash -euxo pipefail {0}
- name: run_bundling::bundle_mac::bundle_mac
run: ./script/bundle-mac aarch64-apple-darwin
shell: bash -euxo pipefail {0}
- name: '@actions/upload-artifact Zed-aarch64.dmg'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: Zed-aarch64.dmg
path: target/aarch64-apple-darwin/release/Zed-aarch64.dmg
if-no-files-found: error
- name: '@actions/upload-artifact zed-remote-server-macos-aarch64.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-remote-server-macos-aarch64.gz
path: target/zed-remote-server-macos-aarch64.gz
if-no-files-found: error
timeout-minutes: 60
bundle_mac_x86_64:
if: |-
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
runs-on: self-mini-macos
env:
CARGO_INCREMENTAL: 0
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
@@ -166,40 +40,76 @@ jobs:
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than 300
shell: bash -euxo pipefail {0}
- name: run_bundling::bundle_mac::bundle_mac
- name: run_bundling::bundle_mac
run: ./script/bundle-mac x86_64-apple-darwin
shell: bash -euxo pipefail {0}
- name: '@actions/upload-artifact Zed-x86_64.dmg'
- name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: Zed-x86_64.dmg
path: target/x86_64-apple-darwin/release/Zed-x86_64.dmg
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
path: target/x86_64-apple-darwin/release/Zed.dmg
if-no-files-found: error
- name: '@actions/upload-artifact zed-remote-server-macos-x86_64.gz'
- name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-remote-server-macos-x86_64.gz
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz
path: target/zed-remote-server-macos-x86_64.gz
if-no-files-found: error
outputs:
zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.dmg
remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-x86_64.gz
timeout-minutes: 60
bundle_windows_aarch64:
bundle_mac_arm64:
if: |-
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
runs-on: self-32vcpu-windows-2022
runs-on: self-mini-macos
env:
CARGO_INCREMENTAL: 0
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
FILE_DIGEST: SHA256
TIMESTAMP_DIGEST: SHA256
TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com
MACOS_CERTIFICATE: ${{ secrets.MACOS_CERTIFICATE }}
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
with:
node-version: '20'
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than 300
shell: bash -euxo pipefail {0}
- name: run_bundling::bundle_mac
run: ./script/bundle-mac aarch64-apple-darwin
shell: bash -euxo pipefail {0}
- name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
path: target/aarch64-apple-darwin/release/Zed.dmg
if-no-files-found: error
- name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz
path: target/zed-remote-server-macos-aarch64.gz
if-no-files-found: error
outputs:
zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.dmg
remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-macos-aarch64.gz
timeout-minutes: 60
bundle_linux_x86_64:
if: |-
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
runs-on: namespace-profile-32x64-ubuntu-2004
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
@@ -209,16 +119,69 @@ jobs:
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: run_bundling::bundle_windows::bundle_windows
run: script/bundle-windows.ps1 -Architecture aarch64
shell: pwsh
working-directory: ${{ env.ZED_WORKSPACE }}
- name: '@actions/upload-artifact Zed-aarch64.exe'
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
- name: '@actions/upload-artifact zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: Zed-aarch64.exe
path: target/Zed-aarch64.exe
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
path: target/release/zed-*.tar.gz
if-no-files-found: error
- name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
path: target/zed-remote-server-*.gz
if-no-files-found: error
outputs:
zed: zed-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-x86_64-unknown-linux-gnu.tar.gz
timeout-minutes: 60
bundle_linux_arm64:
if: |-
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
runs-on: namespace-profile-8x32-ubuntu-2004-arm-m4
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- name: ./script/bundle-linux
run: ./script/bundle-linux
shell: bash -euxo pipefail {0}
- name: '@actions/upload-artifact zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
path: target/release/zed-*.tar.gz
if-no-files-found: error
- name: '@actions/upload-artifact zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
path: target/zed-remote-server-*.gz
if-no-files-found: error
outputs:
zed: zed-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
remote-server: zed-remote-server-${{ github.event.pull_request.head.sha || github.sha }}-aarch64-unknown-linux-gnu.tar.gz
timeout-minutes: 60
bundle_windows_x86_64:
if: |-
@@ -226,9 +189,6 @@ jobs:
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
runs-on: self-32vcpu-windows-2022
env:
CARGO_INCREMENTAL: 0
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_MINIDUMP_ENDPOINT: ${{ secrets.ZED_SENTRY_MINIDUMP_ENDPOINT }}
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
@@ -247,16 +207,55 @@ jobs:
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: run_bundling::bundle_windows::bundle_windows
- name: run_bundling::bundle_windows
run: script/bundle-windows.ps1 -Architecture x86_64
shell: pwsh
working-directory: ${{ env.ZED_WORKSPACE }}
- name: '@actions/upload-artifact Zed-x86_64.exe'
- name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: Zed-x86_64.exe
path: target/Zed-x86_64.exe
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe
path: ${{ env.SETUP_PATH }}
if-no-files-found: error
outputs:
zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe
timeout-minutes: 60
bundle_windows_arm64:
if: |-
(github.event.action == 'labeled' && github.event.label.name == 'run-bundling') ||
(github.event.action == 'synchronize' && contains(github.event.pull_request.labels.*.name, 'run-bundling'))
runs-on: self-32vcpu-windows-2022
env:
AZURE_TENANT_ID: ${{ secrets.AZURE_SIGNING_TENANT_ID }}
AZURE_CLIENT_ID: ${{ secrets.AZURE_SIGNING_CLIENT_ID }}
AZURE_CLIENT_SECRET: ${{ secrets.AZURE_SIGNING_CLIENT_SECRET }}
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
FILE_DIGEST: SHA256
TIMESTAMP_DIGEST: SHA256
TIMESTAMP_SERVER: http://timestamp.acs.microsoft.com
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_sentry
uses: matbour/setup-sentry-cli@3e938c54b3018bdd019973689ef984e033b0454b
with:
token: ${{ secrets.SENTRY_AUTH_TOKEN }}
- name: run_bundling::bundle_windows
run: script/bundle-windows.ps1 -Architecture aarch64
shell: pwsh
working-directory: ${{ env.ZED_WORKSPACE }}
- name: '@actions/upload-artifact Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe'
uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4
with:
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe
path: ${{ env.SETUP_PATH }}
if-no-files-found: error
outputs:
zed: Zed_${{ github.event.pull_request.head.sha || github.sha }}-aarch64.exe
timeout-minutes: 60
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.ref }}

View File

@@ -66,10 +66,6 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::cache_rust_dependencies_namespace
uses: namespacelabs/nscloud-cache-action@v1
with:
cache: rust
- name: steps::setup_pnpm
uses: pnpm/action-setup@fe02b34f77f8bc703788d5817da081398fad5dd2
with:
@@ -149,10 +145,6 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- name: steps::cache_rust_dependencies_namespace
uses: namespacelabs/nscloud-cache-action@v1
with:
cache: rust
- name: steps::setup_node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020
with:
@@ -164,7 +156,7 @@ jobs:
run: cargo install cargo-nextest --locked
shell: bash -euxo pipefail {0}
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than 250
run: ./script/clear-target-dir-if-larger-than 100
shell: bash -euxo pipefail {0}
- name: steps::cargo_nextest
run: cargo nextest run --workspace --no-fail-fast --failure-output immediate-final
@@ -222,10 +214,10 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::cache_rust_dependencies_namespace
uses: namespacelabs/nscloud-cache-action@v1
- name: steps::cache_rust_dependencies
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
with:
cache: rust
save-if: ${{ github.ref == 'refs/heads/main' }}
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
@@ -269,10 +261,6 @@ jobs:
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- name: steps::cache_rust_dependencies_namespace
uses: namespacelabs/nscloud-cache-action@v1
with:
cache: rust
- name: cargo build -p collab
run: cargo build -p collab
shell: bash -euxo pipefail {0}
@@ -329,10 +317,6 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::cache_rust_dependencies_namespace
uses: namespacelabs/nscloud-cache-action@v1
with:
cache: rust
- name: run_tests::check_dependencies::install_cargo_machete
uses: clechasseur/rs-cargo@8435b10f6e71c2e3d4d3b7573003a8ce4bfc6386
with:
@@ -366,10 +350,10 @@ jobs:
mkdir -p ./../.cargo
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
shell: bash -euxo pipefail {0}
- name: steps::cache_rust_dependencies_namespace
uses: namespacelabs/nscloud-cache-action@v1
- name: steps::cache_rust_dependencies
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
with:
cache: rust
save-if: ${{ github.ref == 'refs/heads/main' }}
- name: run_tests::check_docs::lychee_link_check
uses: lycheeverse/lychee-action@82202e5e9c2f4ef1a55a3d02563e1cb6041e5332
with:
@@ -408,10 +392,6 @@ jobs:
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::cache_rust_dependencies_namespace
uses: namespacelabs/nscloud-cache-action@v1
with:
cache: rust
- name: ./script/check-licenses
run: ./script/check-licenses
shell: bash -euxo pipefail {0}

View File

@@ -23,10 +23,10 @@ jobs:
mkdir -p ./../.cargo
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
shell: bash -euxo pipefail {0}
- name: steps::cache_rust_dependencies_namespace
uses: namespacelabs/nscloud-cache-action@v1
- name: steps::cache_rust_dependencies
uses: swatinem/rust-cache@9d47c6ad4b02e050fd481d890b2ea34778fd09d6
with:
cache: rust
save-if: ${{ github.ref == 'refs/heads/main' }}
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
@@ -37,7 +37,7 @@ jobs:
run: cargo install cargo-nextest --locked
shell: bash -euxo pipefail {0}
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than 250
run: ./script/clear-target-dir-if-larger-than 100
shell: bash -euxo pipefail {0}
- name: ./script/run-unit-evals
run: ./script/run-unit-evals

25
Cargo.lock generated
View File

@@ -1351,7 +1351,6 @@ dependencies = [
"anyhow",
"log",
"simplelog",
"tempfile",
"windows 0.61.3",
"winresource",
]
@@ -5838,6 +5837,8 @@ name = "extension"
version = "0.1.0"
dependencies = [
"anyhow",
"async-compression",
"async-tar",
"async-trait",
"collections",
"dap",
@@ -6961,7 +6962,7 @@ dependencies = [
[[package]]
name = "gh-workflow"
version = "0.8.0"
source = "git+https://github.com/zed-industries/gh-workflow?rev=3eaa84abca0778eb54272f45a312cb24f9a0b435#3eaa84abca0778eb54272f45a312cb24f9a0b435"
source = "git+https://github.com/zed-industries/gh-workflow?rev=0090c6b6ef82fff02bc8616645953e778d1acc08#0090c6b6ef82fff02bc8616645953e778d1acc08"
dependencies = [
"async-trait",
"derive_more 2.0.1",
@@ -6978,7 +6979,7 @@ dependencies = [
[[package]]
name = "gh-workflow-macros"
version = "0.8.0"
source = "git+https://github.com/zed-industries/gh-workflow?rev=3eaa84abca0778eb54272f45a312cb24f9a0b435#3eaa84abca0778eb54272f45a312cb24f9a0b435"
source = "git+https://github.com/zed-industries/gh-workflow?rev=0090c6b6ef82fff02bc8616645953e778d1acc08#0090c6b6ef82fff02bc8616645953e778d1acc08"
dependencies = [
"heck 0.5.0",
"quote",
@@ -7117,8 +7118,6 @@ dependencies = [
"picker",
"pretty_assertions",
"project",
"recent_projects",
"remote",
"schemars 1.0.4",
"serde",
"serde_json",
@@ -12718,6 +12717,12 @@ version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5da3b0203fd7ee5720aa0b5e790b591aa5d3f41c3ed2c34a3a393382198af2f7"
[[package]]
name = "pollster"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2f3a9f18d041e6d0e102a0a46750538147e5e8992d3b4873aaafee2520b00ce3"
[[package]]
name = "portable-atomic"
version = "1.11.1"
@@ -12766,7 +12771,7 @@ dependencies = [
"log",
"parking_lot",
"pin-project",
"pollster",
"pollster 0.2.5",
"static_assertions",
"thiserror 1.0.69",
]
@@ -14318,7 +14323,6 @@ dependencies = [
"gpui",
"log",
"rand 0.9.2",
"rayon",
"sum_tree",
"unicode-segmentation",
"util",
@@ -16244,6 +16248,7 @@ checksum = "2b2231b7c3057d5e4ad0156fb3dc807d900806020c5ffa3ee6ff2c8c76fb8520"
name = "streaming_diff"
version = "0.1.0"
dependencies = [
"gpui",
"ordered-float 2.10.1",
"rand 0.9.2",
"rope",
@@ -16362,9 +16367,11 @@ version = "0.1.0"
dependencies = [
"arrayvec",
"ctor",
"futures 0.3.31",
"futures-lite 1.13.0",
"log",
"pollster 0.4.0",
"rand 0.9.2",
"rayon",
"zlog",
]
@@ -21742,7 +21749,6 @@ dependencies = [
"futures 0.3.31",
"gpui",
"gpui_tokio",
"indoc",
"language",
"language_extension",
"language_model",
@@ -21753,7 +21759,6 @@ dependencies = [
"ordered-float 2.10.1",
"paths",
"polars",
"pretty_assertions",
"project",
"prompt_store",
"pulldown-cmark 0.12.2",

View File

@@ -508,7 +508,7 @@ fork = "0.2.0"
futures = "0.3"
futures-batch = "0.6.1"
futures-lite = "1.13"
gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "3eaa84abca0778eb54272f45a312cb24f9a0b435" }
gh-workflow = { git = "https://github.com/zed-industries/gh-workflow", rev = "0090c6b6ef82fff02bc8616645953e778d1acc08" }
git2 = { version = "0.20.1", default-features = false }
globset = "0.4"
handlebars = "4.3"

View File

@@ -8,110 +8,108 @@
; to other areas too.
<all>
= @cole-miller
= @ConradIrwin
= @danilo-leal
= @dinocosta
= @HactarCE
= @kubkon
= @maxdeviant
= @p1n3appl3
= @probably-neb
= @smitbarmase
= @SomeoneToIgnore
= @Veykril
ai
= @benbrandt
= @bennetbo
= @danilo-leal
= @rtfeldman
audio
= @dvdsk
crashes
= @p1n3appl3
= @Veykril
debugger
= @Anthony-Eid
= @kubkon
= @osiewicz
design
= @danilo-leal
docs
= @probably-neb
extension
= @danilo-leal
= @Veykril
= @kubkon
= @p1n3appl3
= @dinocosta
= @smitbarmase
= @cole-miller
= @HactarCE
vim
= @ConradIrwin
= @probably-neb
= @p1n3appl3
= @dinocosta
gpui
= @mikayla-maki
git
= @cole-miller
= @danilo-leal
= @dvdsk
gpui
= @Anthony-Eid
= @cameron1024
= @mikayla-maki
linux
= @dvdsk
= @smitbarmase
= @p1n3appl3
= @cole-miller
= @probably-neb
windows
= @reflectronic
= @localcc
pickers
= @p1n3appl3
= @dvdsk
= @SomeoneToIgnore
audio
= @dvdsk
helix
= @kubkon
languages
= @osiewicz
= @probably-neb
= @smitbarmase
= @SomeoneToIgnore
= @Veykril
linux
= @cole-miller
= @dvdsk
= @p1n3appl3
= @probably-neb
= @smitbarmase
lsp
= @osiewicz
= @smitbarmase
= @SomeoneToIgnore
= @Veykril
multi_buffer
= @Veykril
= @SomeoneToIgnore
pickers
= @dvdsk
= @p1n3appl3
= @SomeoneToIgnore
project_panel
= @smitbarmase
settings_ui
= @Anthony-Eid
= @danilo-leal
= @probably-neb
tasks
= @SomeoneToIgnore
= @Veykril
terminal
= @kubkon
= @Veykril
vim
= @ConradIrwin
= @dinocosta
debugger
= @kubkon
= @osiewicz
= @Anthony-Eid
extension
= @kubkon
settings_ui
= @probably-neb
= @danilo-leal
= @Anthony-Eid
crashes
= @p1n3appl3
= @Veykril
ai
= @rtfeldman
= @danilo-leal
= @benbrandt
= @bennetbo
design
= @danilo-leal
multi_buffer
= @Veykril
= @SomeoneToIgnore
lsp
= @osiewicz
= @Veykril
= @smitbarmase
= @SomeoneToIgnore
languages
= @osiewicz
= @Veykril
= @smitbarmase
= @SomeoneToIgnore
= @probably-neb
windows
= @localcc
= @reflectronic
project_panel
= @smitbarmase
tasks
= @SomeoneToIgnore
= @Veykril
docs
= @probably-neb

View File

@@ -1,4 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M11.3335 13.3333L8.00017 10L4.66685 13.3333" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M11.3335 2.66669L8.00017 6.00002L4.66685 2.66669" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 382 B

View File

Before

Width:  |  Height:  |  Size: 5.5 KiB

After

Width:  |  Height:  |  Size: 5.5 KiB

View File

Before

Width:  |  Height:  |  Size: 3.9 KiB

After

Width:  |  Height:  |  Size: 3.9 KiB

View File

@@ -43,8 +43,7 @@
"f11": "zed::ToggleFullScreen",
"ctrl-alt-z": "edit_prediction::RateCompletions",
"ctrl-alt-shift-i": "edit_prediction::ToggleMenu",
"ctrl-alt-l": "lsp_tool::ToggleMenu",
"ctrl-alt-.": "project_panel::ToggleHideHidden"
"ctrl-alt-l": "lsp_tool::ToggleMenu"
}
},
{
@@ -236,13 +235,12 @@
"ctrl-alt-n": "agent::NewTextThread",
"ctrl-shift-h": "agent::OpenHistory",
"ctrl-alt-c": "agent::OpenSettings",
"ctrl-alt-p": "agent::ManageProfiles",
"ctrl-alt-l": "agent::OpenRulesLibrary",
"ctrl-alt-p": "agent::OpenRulesLibrary",
"ctrl-i": "agent::ToggleProfileSelector",
"ctrl-alt-/": "agent::ToggleModelSelector",
"ctrl-shift-a": "agent::ToggleContextPicker",
"ctrl-shift-j": "agent::ToggleNavigationMenu",
"ctrl-alt-i": "agent::ToggleOptionsMenu",
"ctrl-shift-i": "agent::ToggleOptionsMenu",
"ctrl-alt-shift-n": "agent::ToggleNewThreadMenu",
"shift-alt-escape": "agent::ExpandMessageEditor",
"ctrl->": "agent::AddSelectionToThread",
@@ -409,7 +407,6 @@
"bindings": {
"escape": "project_search::ToggleFocus",
"shift-find": "search::FocusSearch",
"shift-enter": "project_search::ToggleAllSearchResults",
"ctrl-shift-f": "search::FocusSearch",
"ctrl-shift-h": "search::ToggleReplace",
"alt-ctrl-g": "search::ToggleRegex",
@@ -482,7 +479,6 @@
"alt-w": "search::ToggleWholeWord",
"alt-find": "project_search::ToggleFilters",
"alt-ctrl-f": "project_search::ToggleFilters",
"shift-enter": "project_search::ToggleAllSearchResults",
"ctrl-alt-shift-r": "search::ToggleRegex",
"ctrl-alt-shift-x": "search::ToggleRegex",
"alt-r": "search::ToggleRegex",
@@ -945,7 +941,6 @@
"ctrl-g ctrl-g": "git::Fetch",
"ctrl-g up": "git::Push",
"ctrl-g down": "git::Pull",
"ctrl-g shift-down": "git::PullRebase",
"ctrl-g shift-up": "git::ForcePush",
"ctrl-g d": "git::Diff",
"ctrl-g backspace": "git::RestoreTrackedFiles",
@@ -1257,14 +1252,6 @@
"ctrl-shift-enter": "workspace::OpenWithSystem"
}
},
{
"context": "GitWorktreeSelector || (GitWorktreeSelector > Picker > Editor)",
"use_key_equivalents": true,
"bindings": {
"ctrl-shift-space": "git::WorktreeFromDefaultOnWindow",
"ctrl-space": "git::WorktreeFromDefault"
}
},
{
"context": "SettingsWindow",
"use_key_equivalents": true,

View File

@@ -49,8 +49,7 @@
"ctrl-cmd-f": "zed::ToggleFullScreen",
"ctrl-cmd-z": "edit_prediction::RateCompletions",
"ctrl-cmd-i": "edit_prediction::ToggleMenu",
"ctrl-cmd-l": "lsp_tool::ToggleMenu",
"cmd-alt-.": "project_panel::ToggleHideHidden"
"ctrl-cmd-l": "lsp_tool::ToggleMenu"
}
},
{
@@ -275,13 +274,12 @@
"cmd-alt-n": "agent::NewTextThread",
"cmd-shift-h": "agent::OpenHistory",
"cmd-alt-c": "agent::OpenSettings",
"cmd-alt-l": "agent::OpenRulesLibrary",
"cmd-alt-p": "agent::ManageProfiles",
"cmd-alt-p": "agent::OpenRulesLibrary",
"cmd-i": "agent::ToggleProfileSelector",
"cmd-alt-/": "agent::ToggleModelSelector",
"cmd-shift-a": "agent::ToggleContextPicker",
"cmd-shift-j": "agent::ToggleNavigationMenu",
"cmd-alt-m": "agent::ToggleOptionsMenu",
"cmd-shift-i": "agent::ToggleOptionsMenu",
"cmd-alt-shift-n": "agent::ToggleNewThreadMenu",
"shift-alt-escape": "agent::ExpandMessageEditor",
"cmd->": "agent::AddSelectionToThread",
@@ -470,7 +468,6 @@
"bindings": {
"escape": "project_search::ToggleFocus",
"cmd-shift-j": "project_search::ToggleFilters",
"shift-enter": "project_search::ToggleAllSearchResults",
"cmd-shift-f": "search::FocusSearch",
"cmd-shift-h": "search::ToggleReplace",
"alt-cmd-g": "search::ToggleRegex",
@@ -499,7 +496,6 @@
"bindings": {
"escape": "project_search::ToggleFocus",
"cmd-shift-j": "project_search::ToggleFilters",
"shift-enter": "project_search::ToggleAllSearchResults",
"cmd-shift-h": "search::ToggleReplace",
"alt-cmd-g": "search::ToggleRegex",
"alt-cmd-x": "search::ToggleRegex"
@@ -1038,7 +1034,6 @@
"ctrl-g ctrl-g": "git::Fetch",
"ctrl-g up": "git::Push",
"ctrl-g down": "git::Pull",
"ctrl-g shift-down": "git::PullRebase",
"ctrl-g shift-up": "git::ForcePush",
"ctrl-g d": "git::Diff",
"ctrl-g backspace": "git::RestoreTrackedFiles",
@@ -1362,14 +1357,6 @@
"ctrl-shift-enter": "workspace::OpenWithSystem"
}
},
{
"context": "GitWorktreeSelector || (GitWorktreeSelector > Picker > Editor)",
"use_key_equivalents": true,
"bindings": {
"ctrl-shift-space": "git::WorktreeFromDefaultOnWindow",
"ctrl-space": "git::WorktreeFromDefault"
}
},
{
"context": "SettingsWindow",
"use_key_equivalents": true,

View File

@@ -41,8 +41,7 @@
"shift-f11": "debugger::StepOut",
"f11": "zed::ToggleFullScreen",
"ctrl-shift-i": "edit_prediction::ToggleMenu",
"shift-alt-l": "lsp_tool::ToggleMenu",
"ctrl-alt-.": "project_panel::ToggleHideHidden"
"shift-alt-l": "lsp_tool::ToggleMenu"
}
},
{
@@ -237,13 +236,12 @@
"shift-alt-n": "agent::NewTextThread",
"ctrl-shift-h": "agent::OpenHistory",
"shift-alt-c": "agent::OpenSettings",
"shift-alt-l": "agent::OpenRulesLibrary",
"shift-alt-p": "agent::ManageProfiles",
"shift-alt-p": "agent::OpenRulesLibrary",
"ctrl-i": "agent::ToggleProfileSelector",
"shift-alt-/": "agent::ToggleModelSelector",
"ctrl-shift-a": "agent::ToggleContextPicker",
"ctrl-shift-j": "agent::ToggleNavigationMenu",
"ctrl-alt-i": "agent::ToggleOptionsMenu",
"ctrl-shift-i": "agent::ToggleOptionsMenu",
// "ctrl-shift-alt-n": "agent::ToggleNewThreadMenu",
"shift-alt-escape": "agent::ExpandMessageEditor",
"ctrl-shift-.": "agent::AddSelectionToThread",
@@ -490,7 +488,6 @@
"alt-c": "search::ToggleCaseSensitive",
"alt-w": "search::ToggleWholeWord",
"alt-f": "project_search::ToggleFilters",
"shift-enter": "project_search::ToggleAllSearchResults",
"alt-r": "search::ToggleRegex",
// "ctrl-shift-alt-x": "search::ToggleRegex",
"ctrl-k shift-enter": "pane::TogglePinTab"
@@ -954,7 +951,6 @@
"ctrl-g ctrl-g": "git::Fetch",
"ctrl-g up": "git::Push",
"ctrl-g down": "git::Pull",
"ctrl-g shift-down": "git::PullRebase",
"ctrl-g shift-up": "git::ForcePush",
"ctrl-g d": "git::Diff",
"ctrl-g backspace": "git::RestoreTrackedFiles",
@@ -1284,14 +1280,6 @@
"shift-alt-a": "onboarding::OpenAccount"
}
},
{
"context": "GitWorktreeSelector || (GitWorktreeSelector > Picker > Editor)",
"use_key_equivalents": true,
"bindings": {
"ctrl-shift-space": "git::WorktreeFromDefaultOnWindow",
"ctrl-space": "git::WorktreeFromDefault"
}
},
{
"context": "SettingsWindow",
"use_key_equivalents": true,

View File

@@ -255,19 +255,6 @@
// Whether to display inline and alongside documentation for items in the
// completions menu
"show_completion_documentation": true,
// When to show the scrollbar in the completion menu.
// This setting can take four values:
//
// 1. Show the scrollbar if there's important information or
// follow the system's configured behavior
// "auto"
// 2. Match the system's configured behavior:
// "system"
// 3. Always show the scrollbar:
// "always"
// 4. Never show the scrollbar:
// "never" (default)
"completion_menu_scrollbar": "never",
// Show method signatures in the editor, when inside parentheses.
"auto_signature_help": false,
// Whether to show the signature help after completion or a bracket pair inserted.
@@ -605,7 +592,7 @@
// to both the horizontal and vertical delta values while scrolling. Fast scrolling
// happens when a user holds the alt or option key while scrolling.
"fast_scroll_sensitivity": 4.0,
"relative_line_numbers": "disabled",
"relative_line_numbers": false,
// If 'search_wrap' is disabled, search result do not wrap around the end of the file.
"search_wrap": true,
// Search options to enable by default when opening new project and buffer searches.
@@ -1247,9 +1234,6 @@
// that are overly broad can slow down Zed's file scanning. `file_scan_exclusions` takes
// precedence over these inclusions.
"file_scan_inclusions": [".env*"],
// Globs to match files that will be considered "hidden". These files can be hidden from the
// project panel by toggling the "hide_hidden" setting.
"hidden_files": ["**/.*"],
// Git gutter behavior configuration.
"git": {
// Control whether the git gutter is shown. May take 2 values:
@@ -1737,9 +1721,6 @@
"allowed": true
}
},
"HTML+ERB": {
"language_servers": ["herb", "!ruby-lsp", "..."]
},
"Java": {
"prettier": {
"allowed": true,
@@ -1762,9 +1743,6 @@
"allowed": true
}
},
"JS+ERB": {
"language_servers": ["!ruby-lsp", "..."]
},
"Kotlin": {
"language_servers": ["!kotlin-language-server", "kotlin-lsp", "..."]
},
@@ -1779,7 +1757,6 @@
"Markdown": {
"format_on_save": "off",
"use_on_type_format": false,
"remove_trailing_whitespace_on_save": false,
"allow_rewrap": "anywhere",
"soft_wrap": "editor_width",
"prettier": {
@@ -1870,9 +1847,6 @@
"allowed": true
}
},
"YAML+ERB": {
"language_servers": ["!ruby-lsp", "..."]
},
"Zig": {
"language_servers": ["zls", "..."]
}

View File

@@ -361,10 +361,12 @@ async fn build_buffer_diff(
) -> Result<Entity<BufferDiff>> {
let buffer = cx.update(|cx| buffer.read(cx).snapshot())?;
let executor = cx.background_executor().clone();
let old_text_rope = cx
.background_spawn({
let old_text = old_text.clone();
async move { Rope::from(old_text.as_str()) }
let executor = executor.clone();
async move { Rope::from_str(old_text.as_str(), &executor) }
})
.await;
let base_buffer = cx

View File

@@ -3,7 +3,9 @@ use buffer_diff::BufferDiff;
use clock;
use collections::BTreeMap;
use futures::{FutureExt, StreamExt, channel::mpsc};
use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
use gpui::{
App, AppContext, AsyncApp, BackgroundExecutor, Context, Entity, Subscription, Task, WeakEntity,
};
use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
use std::{cmp, ops::Range, sync::Arc};
@@ -321,6 +323,7 @@ impl ActionLog {
let unreviewed_edits = tracked_buffer.unreviewed_edits.clone();
let edits = diff_snapshots(&old_snapshot, &new_snapshot);
let mut has_user_changes = false;
let executor = cx.background_executor().clone();
async move {
if let ChangeAuthor::User = author {
has_user_changes = apply_non_conflicting_edits(
@@ -328,6 +331,7 @@ impl ActionLog {
edits,
&mut base_text,
new_snapshot.as_rope(),
&executor,
);
}
@@ -382,6 +386,7 @@ impl ActionLog {
let agent_diff_base = tracked_buffer.diff_base.clone();
let git_diff_base = git_diff.read(cx).base_text().as_rope().clone();
let buffer_text = tracked_buffer.snapshot.as_rope().clone();
let executor = cx.background_executor().clone();
anyhow::Ok(cx.background_spawn(async move {
let mut old_unreviewed_edits = old_unreviewed_edits.into_iter().peekable();
let committed_edits = language::line_diff(
@@ -416,8 +421,11 @@ impl ActionLog {
),
new_agent_diff_base.max_point(),
));
new_agent_diff_base
.replace(old_byte_start..old_byte_end, &unreviewed_new);
new_agent_diff_base.replace(
old_byte_start..old_byte_end,
&unreviewed_new,
&executor,
);
row_delta +=
unreviewed.new_len() as i32 - unreviewed.old_len() as i32;
}
@@ -611,6 +619,7 @@ impl ActionLog {
.snapshot
.text_for_range(new_range)
.collect::<String>(),
cx.background_executor(),
);
delta += edit.new_len() as i32 - edit.old_len() as i32;
false
@@ -824,6 +833,7 @@ fn apply_non_conflicting_edits(
edits: Vec<Edit<u32>>,
old_text: &mut Rope,
new_text: &Rope,
executor: &BackgroundExecutor,
) -> bool {
let mut old_edits = patch.edits().iter().cloned().peekable();
let mut new_edits = edits.into_iter().peekable();
@@ -877,6 +887,7 @@ fn apply_non_conflicting_edits(
old_text.replace(
old_bytes,
&new_text.chunks_in_range(new_bytes).collect::<String>(),
executor,
);
applied_delta += new_edit.new_len() as i32 - new_edit.old_len() as i32;
has_made_changes = true;
@@ -2282,6 +2293,7 @@ mod tests {
old_text.replace(
old_start..old_end,
&new_text.slice_rows(edit.new.clone()).to_string(),
cx.background_executor(),
);
}
pretty_assertions::assert_eq!(old_text.to_string(), new_text.to_string());

View File

@@ -305,18 +305,20 @@ impl SearchMatrix {
#[cfg(test)]
mod tests {
use super::*;
use gpui::TestAppContext;
use indoc::indoc;
use language::{BufferId, TextBuffer};
use rand::prelude::*;
use text::ReplicaId;
use util::test::{generate_marked_text, marked_text_ranges};
#[test]
fn test_empty_query() {
#[gpui::test]
fn test_empty_query(cx: &mut gpui::TestAppContext) {
let buffer = TextBuffer::new(
ReplicaId::LOCAL,
BufferId::new(1).unwrap(),
"Hello world\nThis is a test\nFoo bar baz",
cx.background_executor(),
);
let snapshot = buffer.snapshot();
@@ -325,12 +327,13 @@ mod tests {
assert_eq!(finish(finder), None);
}
#[test]
fn test_streaming_exact_match() {
#[gpui::test]
fn test_streaming_exact_match(cx: &mut gpui::TestAppContext) {
let buffer = TextBuffer::new(
ReplicaId::LOCAL,
BufferId::new(1).unwrap(),
"Hello world\nThis is a test\nFoo bar baz",
cx.background_executor(),
);
let snapshot = buffer.snapshot();
@@ -349,8 +352,8 @@ mod tests {
assert_eq!(finish(finder), Some("This is a test".to_string()));
}
#[test]
fn test_streaming_fuzzy_match() {
#[gpui::test]
fn test_streaming_fuzzy_match(cx: &mut gpui::TestAppContext) {
let buffer = TextBuffer::new(
ReplicaId::LOCAL,
BufferId::new(1).unwrap(),
@@ -363,6 +366,7 @@ mod tests {
return x * y;
}
"},
cx.background_executor(),
);
let snapshot = buffer.snapshot();
@@ -383,12 +387,13 @@ mod tests {
);
}
#[test]
fn test_incremental_improvement() {
#[gpui::test]
fn test_incremental_improvement(cx: &mut gpui::TestAppContext) {
let buffer = TextBuffer::new(
ReplicaId::LOCAL,
BufferId::new(1).unwrap(),
"Line 1\nLine 2\nLine 3\nLine 4\nLine 5",
cx.background_executor(),
);
let snapshot = buffer.snapshot();
@@ -408,8 +413,8 @@ mod tests {
assert_eq!(finish(finder), Some("Line 3\nLine 4".to_string()));
}
#[test]
fn test_incomplete_lines_buffering() {
#[gpui::test]
fn test_incomplete_lines_buffering(cx: &mut gpui::TestAppContext) {
let buffer = TextBuffer::new(
ReplicaId::LOCAL,
BufferId::new(1).unwrap(),
@@ -418,6 +423,7 @@ mod tests {
jumps over the lazy dog
Pack my box with five dozen liquor jugs
"},
cx.background_executor(),
);
let snapshot = buffer.snapshot();
@@ -435,8 +441,8 @@ mod tests {
);
}
#[test]
fn test_multiline_fuzzy_match() {
#[gpui::test]
fn test_multiline_fuzzy_match(cx: &mut gpui::TestAppContext) {
let buffer = TextBuffer::new(
ReplicaId::LOCAL,
BufferId::new(1).unwrap(),
@@ -456,6 +462,7 @@ mod tests {
}
}
"#},
cx.background_executor(),
);
let snapshot = buffer.snapshot();
@@ -509,7 +516,7 @@ mod tests {
}
#[gpui::test(iterations = 100)]
fn test_resolve_location_single_line(mut rng: StdRng) {
fn test_resolve_location_single_line(mut rng: StdRng, cx: &mut TestAppContext) {
assert_location_resolution(
concat!(
" Lorem\n",
@@ -519,11 +526,12 @@ mod tests {
),
"ipsum",
&mut rng,
cx,
);
}
#[gpui::test(iterations = 100)]
fn test_resolve_location_multiline(mut rng: StdRng) {
fn test_resolve_location_multiline(mut rng: StdRng, cx: &mut TestAppContext) {
assert_location_resolution(
concat!(
" Lorem\n",
@@ -533,11 +541,12 @@ mod tests {
),
"ipsum\ndolor sit amet",
&mut rng,
cx,
);
}
#[gpui::test(iterations = 100)]
fn test_resolve_location_function_with_typo(mut rng: StdRng) {
fn test_resolve_location_function_with_typo(mut rng: StdRng, cx: &mut TestAppContext) {
assert_location_resolution(
indoc! {"
«fn foo1(a: usize) -> usize {
@@ -550,11 +559,12 @@ mod tests {
"},
"fn foo1(a: usize) -> u32 {\n40\n}",
&mut rng,
cx,
);
}
#[gpui::test(iterations = 100)]
fn test_resolve_location_class_methods(mut rng: StdRng) {
fn test_resolve_location_class_methods(mut rng: StdRng, cx: &mut TestAppContext) {
assert_location_resolution(
indoc! {"
class Something {
@@ -575,11 +585,12 @@ mod tests {
six() { return 6666; }
"},
&mut rng,
cx,
);
}
#[gpui::test(iterations = 100)]
fn test_resolve_location_imports_no_match(mut rng: StdRng) {
fn test_resolve_location_imports_no_match(mut rng: StdRng, cx: &mut TestAppContext) {
assert_location_resolution(
indoc! {"
use std::ops::Range;
@@ -609,11 +620,12 @@ mod tests {
use std::sync::Arc;
"},
&mut rng,
cx,
);
}
#[gpui::test(iterations = 100)]
fn test_resolve_location_nested_closure(mut rng: StdRng) {
fn test_resolve_location_nested_closure(mut rng: StdRng, cx: &mut TestAppContext) {
assert_location_resolution(
indoc! {"
impl Foo {
@@ -641,11 +653,12 @@ mod tests {
" });",
),
&mut rng,
cx,
);
}
#[gpui::test(iterations = 100)]
fn test_resolve_location_tool_invocation(mut rng: StdRng) {
fn test_resolve_location_tool_invocation(mut rng: StdRng, cx: &mut TestAppContext) {
assert_location_resolution(
indoc! {r#"
let tool = cx
@@ -673,11 +686,12 @@ mod tests {
" .output;",
),
&mut rng,
cx,
);
}
#[gpui::test]
fn test_line_hint_selection() {
fn test_line_hint_selection(cx: &mut TestAppContext) {
let text = indoc! {r#"
fn first_function() {
return 42;
@@ -696,6 +710,7 @@ mod tests {
ReplicaId::LOCAL,
BufferId::new(1).unwrap(),
text.to_string(),
cx.background_executor(),
);
let snapshot = buffer.snapshot();
let mut matcher = StreamingFuzzyMatcher::new(snapshot.clone());
@@ -727,9 +742,19 @@ mod tests {
}
#[track_caller]
fn assert_location_resolution(text_with_expected_range: &str, query: &str, rng: &mut StdRng) {
fn assert_location_resolution(
text_with_expected_range: &str,
query: &str,
rng: &mut StdRng,
cx: &mut TestAppContext,
) {
let (text, expected_ranges) = marked_text_ranges(text_with_expected_range, false);
let buffer = TextBuffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), text.clone());
let buffer = TextBuffer::new(
ReplicaId::LOCAL,
BufferId::new(1).unwrap(),
text.clone(),
cx.background_executor(),
);
let snapshot = buffer.snapshot();
let mut matcher = StreamingFuzzyMatcher::new(snapshot);

View File

@@ -569,6 +569,7 @@ mod tests {
use prompt_store::ProjectContext;
use serde_json::json;
use settings::SettingsStore;
use text::Rope;
use util::{path, rel_path::rel_path};
#[gpui::test]
@@ -741,7 +742,7 @@ mod tests {
// Create the file
fs.save(
path!("/root/src/main.rs").as_ref(),
&"initial content".into(),
&Rope::from_str_small("initial content"),
language::LineEnding::Unix,
)
.await
@@ -908,7 +909,7 @@ mod tests {
// Create a simple file with trailing whitespace
fs.save(
path!("/root/src/main.rs").as_ref(),
&"initial content".into(),
&Rope::from_str_small("initial content"),
language::LineEnding::Unix,
)
.await

View File

@@ -178,7 +178,6 @@ impl AcpConnection {
meta: Some(serde_json::json!({
// Experimental: Allow for rendering terminal output from the agents
"terminal_output": true,
"terminal-auth": true,
})),
},
client_info: Some(acp::Implementation {

View File

@@ -4,7 +4,7 @@ use acp_thread::{AcpThread, AgentThreadEntry};
use agent::HistoryStore;
use agent_client_protocol::{self as acp, ToolCallId};
use collections::HashMap;
use editor::{Editor, EditorMode, MinimapVisibility, SizingBehavior};
use editor::{Editor, EditorMode, MinimapVisibility};
use gpui::{
AnyEntity, App, AppContext as _, Entity, EntityId, EventEmitter, FocusHandle, Focusable,
ScrollHandle, SharedString, TextStyleRefinement, WeakEntity, Window,
@@ -357,7 +357,7 @@ fn create_editor_diff(
EditorMode::Full {
scale_ui_elements_with_buffer_font_size: false,
show_active_line_background: false,
sizing_behavior: SizingBehavior::SizeByContent,
sized_by_content: true,
},
diff.read(cx).multibuffer().clone(),
None,

View File

@@ -17,9 +17,7 @@ use client::zed_urls;
use cloud_llm_client::PlanV1;
use collections::{HashMap, HashSet};
use editor::scroll::Autoscroll;
use editor::{
Editor, EditorEvent, EditorMode, MultiBuffer, PathKey, SelectionEffects, SizingBehavior,
};
use editor::{Editor, EditorEvent, EditorMode, MultiBuffer, PathKey, SelectionEffects};
use file_icons::FileIcons;
use fs::Fs;
use futures::FutureExt as _;
@@ -104,7 +102,7 @@ impl ThreadError {
{
Self::AuthenticationRequired(acp_error.message.clone().into())
} else {
let string = format!("{:#}", error);
let string = error.to_string();
// TODO: we should have Gemini return better errors here.
if agent.clone().downcast::<agent_servers::Gemini>().is_some()
&& string.contains("Could not load the default credentials")
@@ -113,7 +111,7 @@ impl ThreadError {
{
Self::AuthenticationRequired(string.into())
} else {
Self::Other(string.into())
Self::Other(error.to_string().into())
}
}
}
@@ -795,8 +793,7 @@ impl AcpThreadView {
if let Some(load_err) = err.downcast_ref::<LoadError>() {
self.thread_state = ThreadState::LoadError(load_err.clone());
} else {
self.thread_state =
ThreadState::LoadError(LoadError::Other(format!("{:#}", err).into()))
self.thread_state = ThreadState::LoadError(LoadError::Other(err.to_string().into()))
}
if self.message_editor.focus_handle(cx).is_focused(window) {
self.focus_handle.focus(window)
@@ -884,7 +881,6 @@ impl AcpThreadView {
cx: &mut Context<Self>,
) {
self.set_editor_is_expanded(!self.editor_expanded, cx);
cx.stop_propagation();
cx.notify();
}
@@ -896,7 +892,7 @@ impl AcpThreadView {
EditorMode::Full {
scale_ui_elements_with_buffer_font_size: false,
show_active_line_background: false,
sizing_behavior: SizingBehavior::ExcludeOverscrollMargin,
sized_by_content: false,
},
cx,
)
@@ -1473,106 +1469,6 @@ impl AcpThreadView {
return;
};
// Check for the experimental "terminal-auth" _meta field
let auth_method = connection.auth_methods().iter().find(|m| m.id == method);
if let Some(auth_method) = auth_method {
if let Some(meta) = &auth_method.meta {
if let Some(terminal_auth) = meta.get("terminal-auth") {
// Extract terminal auth details from meta
if let (Some(command), Some(label)) = (
terminal_auth.get("command").and_then(|v| v.as_str()),
terminal_auth.get("label").and_then(|v| v.as_str()),
) {
let args = terminal_auth
.get("args")
.and_then(|v| v.as_array())
.map(|arr| {
arr.iter()
.filter_map(|v| v.as_str().map(String::from))
.collect()
})
.unwrap_or_default();
let env = terminal_auth
.get("env")
.and_then(|v| v.as_object())
.map(|obj| {
obj.iter()
.filter_map(|(k, v)| {
v.as_str().map(|val| (k.clone(), val.to_string()))
})
.collect::<HashMap<String, String>>()
})
.unwrap_or_default();
// Build SpawnInTerminal from _meta
let login = task::SpawnInTerminal {
id: task::TaskId(format!("external-agent-{}-login", label)),
full_label: label.to_string(),
label: label.to_string(),
command: Some(command.to_string()),
args,
command_label: label.to_string(),
env,
use_new_terminal: true,
allow_concurrent_runs: true,
hide: task::HideStrategy::Always,
..Default::default()
};
self.thread_error.take();
configuration_view.take();
pending_auth_method.replace(method.clone());
if let Some(workspace) = self.workspace.upgrade() {
let authenticate = Self::spawn_external_agent_login(
login, workspace, false, window, cx,
);
cx.notify();
self.auth_task = Some(cx.spawn_in(window, {
let agent = self.agent.clone();
async move |this, cx| {
let result = authenticate.await;
match &result {
Ok(_) => telemetry::event!(
"Authenticate Agent Succeeded",
agent = agent.telemetry_id()
),
Err(_) => {
telemetry::event!(
"Authenticate Agent Failed",
agent = agent.telemetry_id(),
)
}
}
this.update_in(cx, |this, window, cx| {
if let Err(err) = result {
if let ThreadState::Unauthenticated {
pending_auth_method,
..
} = &mut this.thread_state
{
pending_auth_method.take();
}
this.handle_thread_error(err, cx);
} else {
this.reset(window, cx);
}
this.auth_task.take()
})
.ok();
}
}));
}
return;
}
}
}
}
if method.0.as_ref() == "gemini-api-key" {
let registry = LanguageModelRegistry::global(cx);
let provider = registry
@@ -4082,12 +3978,8 @@ impl AcpThreadView {
)
}
})
.on_click(cx.listener(|this, _, window, cx| {
this.expand_message_editor(
&ExpandMessageEditor,
window,
cx,
);
.on_click(cx.listener(|_, _, window, cx| {
window.dispatch_action(Box::new(ExpandMessageEditor), cx);
})),
),
),

View File

@@ -29,6 +29,7 @@ use project::{
agent_server_store::{AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME},
context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore},
};
use rope::Rope;
use settings::{Settings, SettingsStore, update_settings_file};
use ui::{
Button, ButtonStyle, Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor,
@@ -154,42 +155,7 @@ pub enum AssistantConfigurationEvent {
impl EventEmitter<AssistantConfigurationEvent> for AgentConfiguration {}
enum AgentIcon {
Name(IconName),
Path(SharedString),
}
impl AgentConfiguration {
fn render_section_title(
&mut self,
title: impl Into<SharedString>,
description: impl Into<SharedString>,
menu: AnyElement,
) -> impl IntoElement {
h_flex()
.p_4()
.pb_0()
.mb_2p5()
.items_start()
.justify_between()
.child(
v_flex()
.w_full()
.gap_0p5()
.child(
h_flex()
.pr_1()
.w_full()
.gap_2()
.justify_between()
.flex_wrap()
.child(Headline::new(title.into()))
.child(menu),
)
.child(Label::new(description.into()).color(Color::Muted)),
)
}
fn render_provider_configuration_block(
&mut self,
provider: &Arc<dyn LanguageModelProvider>,
@@ -324,7 +290,7 @@ impl AgentConfiguration {
"Start New Thread",
)
.full_width()
.style(ButtonStyle::Outlined)
.style(ButtonStyle::Filled)
.layer(ElevationIndex::ModalSurface)
.icon_position(IconPosition::Start)
.icon(IconName::Thread)
@@ -415,47 +381,80 @@ impl AgentConfiguration {
cx: &mut Context<Self>,
) -> impl IntoElement {
let providers = LanguageModelRegistry::read_global(cx).providers();
let popover_menu = PopoverMenu::new("add-provider-popover")
.trigger(
Button::new("add-provider", "Add Provider")
.style(ButtonStyle::Outlined)
.icon_position(IconPosition::Start)
.icon(IconName::Plus)
.icon_size(IconSize::Small)
.icon_color(Color::Muted)
.label_size(LabelSize::Small),
)
.anchor(gpui::Corner::TopRight)
.menu({
let workspace = self.workspace.clone();
move |window, cx| {
Some(ContextMenu::build(window, cx, |menu, _window, _cx| {
menu.header("Compatible APIs").entry("OpenAI", None, {
let workspace = workspace.clone();
move |window, cx| {
workspace
.update(cx, |workspace, cx| {
AddLlmProviderModal::toggle(
LlmCompatibleProvider::OpenAi,
workspace,
window,
cx,
);
})
.log_err();
}
})
}))
}
});
v_flex()
.w_full()
.child(self.render_section_title(
"LLM Providers",
"Add at least one provider to use AI-powered features with Zed's native agent.",
popover_menu.into_any_element(),
))
.child(
h_flex()
.p(DynamicSpacing::Base16.rems(cx))
.pr(DynamicSpacing::Base20.rems(cx))
.pb_0()
.mb_2p5()
.items_start()
.justify_between()
.child(
v_flex()
.w_full()
.gap_0p5()
.child(
h_flex()
.pr_1()
.w_full()
.gap_2()
.justify_between()
.child(Headline::new("LLM Providers"))
.child(
PopoverMenu::new("add-provider-popover")
.trigger(
Button::new("add-provider", "Add Provider")
.style(ButtonStyle::Filled)
.layer(ElevationIndex::ModalSurface)
.icon_position(IconPosition::Start)
.icon(IconName::Plus)
.icon_size(IconSize::Small)
.icon_color(Color::Muted)
.label_size(LabelSize::Small),
)
.anchor(gpui::Corner::TopRight)
.menu({
let workspace = self.workspace.clone();
move |window, cx| {
Some(ContextMenu::build(
window,
cx,
|menu, _window, _cx| {
menu.header("Compatible APIs").entry(
"OpenAI",
None,
{
let workspace =
workspace.clone();
move |window, cx| {
workspace
.update(cx, |workspace, cx| {
AddLlmProviderModal::toggle(
LlmCompatibleProvider::OpenAi,
workspace,
window,
cx,
);
})
.log_err();
}
},
)
},
))
}
}),
),
)
.child(
Label::new("Add at least one provider to use AI-powered features with Zed's native agent.")
.color(Color::Muted),
),
),
)
.child(
div()
.w_full()
@@ -534,7 +533,8 @@ impl AgentConfiguration {
let add_server_popover = PopoverMenu::new("add-server-popover")
.trigger(
Button::new("add-server", "Add Server")
.style(ButtonStyle::Outlined)
.style(ButtonStyle::Filled)
.layer(ElevationIndex::ModalSurface)
.icon_position(IconPosition::Start)
.icon(IconName::Plus)
.icon_size(IconSize::Small)
@@ -567,57 +567,61 @@ impl AgentConfiguration {
});
v_flex()
.p(DynamicSpacing::Base16.rems(cx))
.pr(DynamicSpacing::Base20.rems(cx))
.gap_2()
.border_b_1()
.border_color(cx.theme().colors().border)
.child(self.render_section_title(
"Model Context Protocol (MCP) Servers",
"All MCP servers connected directly or via a Zed extension.",
add_server_popover.into_any_element(),
))
.child(
v_flex()
.pl_4()
.pb_4()
.pr_5()
h_flex()
.w_full()
.items_start()
.justify_between()
.gap_1()
.map(|mut parent| {
if context_server_ids.is_empty() {
parent.child(
h_flex()
.p_4()
.justify_center()
.border_1()
.border_dashed()
.border_color(cx.theme().colors().border.opacity(0.6))
.rounded_sm()
.child(
Label::new("No MCP servers added yet.")
.color(Color::Muted)
.size(LabelSize::Small),
),
)
} else {
for (index, context_server_id) in
context_server_ids.into_iter().enumerate()
{
if index > 0 {
parent = parent.child(
Divider::horizontal()
.color(DividerColor::BorderFaded)
.into_any_element(),
);
}
parent = parent.child(self.render_context_server(
context_server_id,
window,
cx,
));
}
parent
}
}),
.child(
v_flex()
.gap_0p5()
.child(Headline::new("Model Context Protocol (MCP) Servers"))
.child(
Label::new(
"All MCP servers connected directly or via a Zed extension.",
)
.color(Color::Muted),
),
)
.child(add_server_popover),
)
.child(v_flex().w_full().gap_1().map(|mut parent| {
if context_server_ids.is_empty() {
parent.child(
h_flex()
.p_4()
.justify_center()
.border_1()
.border_dashed()
.border_color(cx.theme().colors().border.opacity(0.6))
.rounded_sm()
.child(
Label::new("No MCP servers added yet.")
.color(Color::Muted)
.size(LabelSize::Small),
),
)
} else {
for (index, context_server_id) in context_server_ids.into_iter().enumerate() {
if index > 0 {
parent = parent.child(
Divider::horizontal()
.color(DividerColor::BorderFaded)
.into_any_element(),
);
}
parent =
parent.child(self.render_context_server(context_server_id, window, cx));
}
parent
}
}))
}
fn render_context_server(
@@ -662,12 +666,12 @@ impl AgentConfiguration {
let (source_icon, source_tooltip) = if is_from_extension {
(
IconName::ZedSrcExtension,
IconName::ZedMcpExtension,
"This MCP server was installed from an extension.",
)
} else {
(
IconName::ZedSrcCustom,
IconName::ZedMcpCustom,
"This custom MCP server was installed directly.",
)
};
@@ -949,9 +953,9 @@ impl AgentConfiguration {
}
fn render_agent_servers_section(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
let agent_server_store = self.agent_server_store.read(cx);
let user_defined_agents = agent_server_store
let user_defined_agents = self
.agent_server_store
.read(cx)
.external_agents()
.filter(|name| {
name.0 != GEMINI_NAME && name.0 != CLAUDE_CODE_NAME && name.0 != CODEX_NAME
@@ -962,121 +966,102 @@ impl AgentConfiguration {
let user_defined_agents = user_defined_agents
.into_iter()
.map(|name| {
let icon = if let Some(icon_path) = agent_server_store.agent_icon(&name) {
AgentIcon::Path(icon_path)
} else {
AgentIcon::Name(IconName::Ai)
};
self.render_agent_server(icon, name, true)
self.render_agent_server(IconName::Ai, name)
.into_any_element()
})
.collect::<Vec<_>>();
let add_agens_button = Button::new("add-agent", "Add Agent")
.style(ButtonStyle::Outlined)
.icon_position(IconPosition::Start)
.icon(IconName::Plus)
.icon_size(IconSize::Small)
.icon_color(Color::Muted)
.label_size(LabelSize::Small)
.on_click(move |_, window, cx| {
if let Some(workspace) = window.root().flatten() {
let workspace = workspace.downgrade();
window
.spawn(cx, async |cx| {
open_new_agent_servers_entry_in_settings_editor(workspace, cx).await
})
.detach_and_log_err(cx);
}
});
v_flex()
.border_b_1()
.border_color(cx.theme().colors().border)
.child(
v_flex()
.child(self.render_section_title(
"External Agents",
"All agents connected through the Agent Client Protocol.",
add_agens_button.into_any_element(),
))
.p(DynamicSpacing::Base16.rems(cx))
.pr(DynamicSpacing::Base20.rems(cx))
.gap_2()
.child(
v_flex()
.p_4()
.pt_0()
.gap_2()
.child(self.render_agent_server(
AgentIcon::Name(IconName::AiClaude),
"Claude Code",
false,
))
.child(Divider::horizontal().color(DividerColor::BorderFaded))
.child(self.render_agent_server(
AgentIcon::Name(IconName::AiOpenAi),
"Codex",
false,
))
.child(Divider::horizontal().color(DividerColor::BorderFaded))
.child(self.render_agent_server(
AgentIcon::Name(IconName::AiGemini),
"Gemini CLI",
false,
))
.map(|mut parent| {
for agent in user_defined_agents {
parent = parent
.child(
Divider::horizontal().color(DividerColor::BorderFaded),
)
.child(agent);
}
parent
}),
),
.gap_0p5()
.child(
h_flex()
.pr_1()
.w_full()
.gap_2()
.justify_between()
.child(Headline::new("External Agents"))
.child(
Button::new("add-agent", "Add Agent")
.style(ButtonStyle::Filled)
.layer(ElevationIndex::ModalSurface)
.icon_position(IconPosition::Start)
.icon(IconName::Plus)
.icon_size(IconSize::Small)
.icon_color(Color::Muted)
.label_size(LabelSize::Small)
.on_click(
move |_, window, cx| {
if let Some(workspace) = window.root().flatten() {
let workspace = workspace.downgrade();
window
.spawn(cx, async |cx| {
open_new_agent_servers_entry_in_settings_editor(
workspace,
cx,
).await
})
.detach_and_log_err(cx);
}
}
),
)
)
.child(
Label::new(
"All agents connected through the Agent Client Protocol.",
)
.color(Color::Muted),
),
)
.child(self.render_agent_server(
IconName::AiClaude,
"Claude Code",
))
.child(Divider::horizontal().color(DividerColor::BorderFaded))
.child(self.render_agent_server(
IconName::AiOpenAi,
"Codex",
))
.child(Divider::horizontal().color(DividerColor::BorderFaded))
.child(self.render_agent_server(
IconName::AiGemini,
"Gemini CLI",
))
.map(|mut parent| {
for agent in user_defined_agents {
parent = parent.child(Divider::horizontal().color(DividerColor::BorderFaded))
.child(agent);
}
parent
})
)
}
fn render_agent_server(
&self,
icon: AgentIcon,
icon: IconName,
name: impl Into<SharedString>,
external: bool,
) -> impl IntoElement {
let name = name.into();
let icon = match icon {
AgentIcon::Name(icon_name) => Icon::new(icon_name)
.size(IconSize::Small)
.color(Color::Muted),
AgentIcon::Path(icon_path) => Icon::from_path(icon_path)
.size(IconSize::Small)
.color(Color::Muted),
};
let tooltip_id = SharedString::new(format!("agent-source-{}", name));
let tooltip_message = format!("The {} agent was installed from an extension.", name);
h_flex()
.gap_1p5()
.child(icon)
.child(Label::new(name))
.when(external, |this| {
this.child(
div()
.id(tooltip_id)
.flex_none()
.tooltip(Tooltip::text(tooltip_message))
.child(
Icon::new(IconName::ZedSrcExtension)
.size(IconSize::Small)
.color(Color::Muted),
),
)
})
.child(
Icon::new(IconName::Check)
.color(Color::Success)
.size(IconSize::Small),
)
h_flex().gap_1p5().justify_between().child(
h_flex()
.gap_1p5()
.child(Icon::new(icon).size(IconSize::Small).color(Color::Muted))
.child(Label::new(name.into()))
.child(
Icon::new(IconName::Check)
.size(IconSize::Small)
.color(Color::Success),
),
)
}
}
@@ -1198,8 +1183,11 @@ async fn open_new_agent_servers_entry_in_settings_editor(
) -> Result<()> {
let settings_editor = workspace
.update_in(cx, |_, window, cx| {
create_and_open_local_file(paths::settings_file(), window, cx, || {
settings::initial_user_settings_content().as_ref().into()
create_and_open_local_file(paths::settings_file(), window, cx, |cx| {
Rope::from_str(
&settings::initial_user_settings_content(),
cx.background_executor(),
)
})
})?
.await?

View File

@@ -19,6 +19,8 @@ use settings::{
use zed_actions::OpenBrowser;
use zed_actions::agent::{OpenClaudeCodeOnboardingModal, ReauthenticateAgent};
use crate::acp::{AcpThreadHistory, ThreadHistoryEvent};
use crate::context_store::ContextStore;
use crate::ui::{AcpOnboardingModal, ClaudeCodeOnboardingModal};
use crate::{
AddContextServer, AgentDiffPane, DeleteRecentlyOpenThread, Follow, InlineAssistant,
@@ -31,14 +33,9 @@ use crate::{
text_thread_editor::{AgentPanelDelegate, TextThreadEditor, make_lsp_adapter_delegate},
ui::{AgentOnboardingModal, EndTrialUpsell},
};
use crate::{
ExpandMessageEditor,
acp::{AcpThreadHistory, ThreadHistoryEvent},
};
use crate::{
ExternalAgent, NewExternalAgentThread, NewNativeAgentThreadFromSummary, placeholder_command,
};
use crate::{ManageProfiles, context_store::ContextStore};
use agent_settings::AgentSettings;
use ai_onboarding::AgentPanelOnboarding;
use anyhow::{Result, anyhow};
@@ -109,12 +106,6 @@ pub fn init(cx: &mut App) {
}
},
)
.register_action(|workspace, _: &ExpandMessageEditor, window, cx| {
if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
workspace.focus_panel::<AgentPanel>(window, cx);
panel.update(cx, |panel, cx| panel.expand_message_editor(window, cx));
}
})
.register_action(|workspace, _: &OpenHistory, window, cx| {
if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
workspace.focus_panel::<AgentPanel>(window, cx);
@@ -953,15 +944,6 @@ impl AgentPanel {
.detach_and_log_err(cx);
}
fn expand_message_editor(&mut self, window: &mut Window, cx: &mut Context<Self>) {
if let Some(thread_view) = self.active_thread_view() {
thread_view.update(cx, |view, cx| {
view.expand_message_editor(&ExpandMessageEditor, window, cx);
view.focus_handle(cx).focus(window);
});
}
}
fn open_history(&mut self, window: &mut Window, cx: &mut Context<Self>) {
if matches!(self.active_view, ActiveView::History) {
if let Some(previous_view) = self.previous_view.take() {
@@ -1780,9 +1762,10 @@ impl AgentPanel {
}),
)
.action("Add Custom Server…", Box::new(AddContextServer))
.separator()
.separator();
menu = menu
.action("Rules", Box::new(OpenRulesLibrary::default()))
.action("Profiles", Box::new(ManageProfiles::default()))
.action("Settings", Box::new(OpenSettings))
.separator()
.action(full_screen_label, Box::new(ToggleZoom));

View File

@@ -487,9 +487,10 @@ impl CodegenAlternative {
) {
let start_time = Instant::now();
let snapshot = self.snapshot.clone();
let selected_text = snapshot
.text_for_range(self.range.start..self.range.end)
.collect::<Rope>();
let selected_text = Rope::from_iter(
snapshot.text_for_range(self.range.start..self.range.end),
cx.background_executor(),
);
let selection_start = self.range.start.to_point(&snapshot);

View File

@@ -620,18 +620,8 @@ impl TextThreadContextHandle {
impl Display for TextThreadContext {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "<text_thread title=\"")?;
for c in self.title.chars() {
match c {
'&' => write!(f, "&amp;")?,
'<' => write!(f, "&lt;")?,
'>' => write!(f, "&gt;")?,
'"' => write!(f, "&quot;")?,
'\'' => write!(f, "&apos;")?,
_ => write!(f, "{}", c)?,
}
}
writeln!(f, "\">")?;
// TODO: escape title?
writeln!(f, "<text_thread title=\"{}\">", self.title)?;
write!(f, "{}", self.text.trim())?;
write!(f, "\n</text_thread>")
}

View File

@@ -260,10 +260,10 @@ impl<T: 'static> PromptEditor<T> {
let agent_panel_keybinding =
ui::text_for_action(&zed_actions::assistant::ToggleFocus, window, cx)
.map(|keybinding| format!("{keybinding} to chat"))
.map(|keybinding| format!("{keybinding} to chat"))
.unwrap_or_default();
format!("{action}… ({agent_panel_keybinding}↓↑ for history — @ to include context)")
format!("{action}… ({agent_panel_keybinding}↓↑ for history)")
}
pub fn prompt(&self, cx: &App) -> String {

View File

@@ -477,7 +477,7 @@ impl TextThreadEditor {
editor.insert(&format!("/{name}"), window, cx);
if command.accepts_arguments() {
editor.insert(" ", window, cx);
editor.show_completions(&ShowCompletions, window, cx);
editor.show_completions(&ShowCompletions::default(), window, cx);
}
});
});

View File

@@ -744,12 +744,13 @@ impl TextThread {
telemetry: Option<Arc<Telemetry>>,
cx: &mut Context<Self>,
) -> Self {
let buffer = cx.new(|_cx| {
let buffer = cx.new(|cx| {
let buffer = Buffer::remote(
language::BufferId::new(1).unwrap(),
replica_id,
capability,
"",
cx.background_executor(),
);
buffer.set_language_registry(language_registry.clone());
buffer

View File

@@ -331,16 +331,6 @@ impl AutoUpdater {
pub fn start_polling(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
cx.spawn(async move |this, cx| {
#[cfg(target_os = "windows")]
{
use util::ResultExt;
cleanup_windows()
.await
.context("failed to cleanup old directories")
.log_err();
}
loop {
this.update(cx, |this, cx| this.poll(UpdateCheckType::Automatic, cx))?;
cx.background_executor().timer(POLL_INTERVAL).await;
@@ -933,32 +923,6 @@ async fn install_release_macos(
Ok(None)
}
#[cfg(target_os = "windows")]
async fn cleanup_windows() -> Result<()> {
use util::ResultExt;
let parent = std::env::current_exe()?
.parent()
.context("No parent dir for Zed.exe")?
.to_owned();
// keep in sync with crates/auto_update_helper/src/updater.rs
smol::fs::remove_dir(parent.join("updates"))
.await
.context("failed to remove updates dir")
.log_err();
smol::fs::remove_dir(parent.join("install"))
.await
.context("failed to remove install dir")
.log_err();
smol::fs::remove_dir(parent.join("old"))
.await
.context("failed to remove old version dir")
.log_err();
Ok(())
}
async fn install_release_windows(downloaded_installer: PathBuf) -> Result<Option<PathBuf>> {
let output = Command::new(downloaded_installer)
.arg("/verysilent")

View File

@@ -21,9 +21,6 @@ simplelog.workspace = true
[target.'cfg(target_os = "windows")'.dependencies]
windows.workspace = true
[target.'cfg(target_os = "windows")'.dev-dependencies]
tempfile.workspace = true
[target.'cfg(target_os = "windows")'.build-dependencies]
winresource = "0.1"

View File

@@ -1,5 +1,4 @@
use std::{
cell::LazyCell,
path::Path,
time::{Duration, Instant},
};
@@ -12,274 +11,210 @@ use windows::Win32::{
use crate::windows_impl::WM_JOB_UPDATED;
pub(crate) struct Job {
pub apply: Box<dyn Fn(&Path) -> Result<()>>,
pub rollback: Box<dyn Fn(&Path) -> Result<()>>,
}
type Job = fn(&Path) -> Result<()>;
impl Job {
pub fn mkdir(name: &'static Path) -> Self {
Job {
apply: Box::new(move |app_dir| {
let dir = app_dir.join(name);
std::fs::create_dir_all(&dir)
.context(format!("Failed to create directory {}", dir.display()))
}),
rollback: Box::new(move |app_dir| {
let dir = app_dir.join(name);
std::fs::remove_dir_all(&dir)
.context(format!("Failed to remove directory {}", dir.display()))
}),
}
}
pub fn mkdir_if_exists(name: &'static Path, check: &'static Path) -> Self {
Job {
apply: Box::new(move |app_dir| {
let dir = app_dir.join(name);
let check = app_dir.join(check);
if check.exists() {
std::fs::create_dir_all(&dir)
.context(format!("Failed to create directory {}", dir.display()))?
}
Ok(())
}),
rollback: Box::new(move |app_dir| {
let dir = app_dir.join(name);
if dir.exists() {
std::fs::remove_dir_all(&dir)
.context(format!("Failed to remove directory {}", dir.display()))?
}
Ok(())
}),
}
}
pub fn move_file(filename: &'static Path, new_filename: &'static Path) -> Self {
Job {
apply: Box::new(move |app_dir| {
let old_file = app_dir.join(filename);
let new_file = app_dir.join(new_filename);
log::info!(
"Moving file: {}->{}",
old_file.display(),
new_file.display()
);
std::fs::rename(&old_file, new_file)
.context(format!("Failed to move file {}", old_file.display()))
}),
rollback: Box::new(move |app_dir| {
let old_file = app_dir.join(filename);
let new_file = app_dir.join(new_filename);
log::info!(
"Rolling back file move: {}->{}",
old_file.display(),
new_file.display()
);
std::fs::rename(&new_file, &old_file).context(format!(
"Failed to rollback file move {}->{}",
new_file.display(),
old_file.display()
))
}),
}
}
pub fn move_if_exists(filename: &'static Path, new_filename: &'static Path) -> Self {
Job {
apply: Box::new(move |app_dir| {
let old_file = app_dir.join(filename);
let new_file = app_dir.join(new_filename);
if old_file.exists() {
log::info!(
"Moving file: {}->{}",
old_file.display(),
new_file.display()
);
std::fs::rename(&old_file, new_file)
.context(format!("Failed to move file {}", old_file.display()))?;
}
Ok(())
}),
rollback: Box::new(move |app_dir| {
let old_file = app_dir.join(filename);
let new_file = app_dir.join(new_filename);
if new_file.exists() {
log::info!(
"Rolling back file move: {}->{}",
old_file.display(),
new_file.display()
);
std::fs::rename(&new_file, &old_file).context(format!(
"Failed to rollback file move {}->{}",
new_file.display(),
old_file.display()
))?
}
Ok(())
}),
}
}
pub fn rmdir_nofail(filename: &'static Path) -> Self {
Job {
apply: Box::new(move |app_dir| {
let filename = app_dir.join(filename);
log::info!("Removing file: {}", filename.display());
if let Err(e) = std::fs::remove_dir_all(&filename) {
log::warn!("Failed to remove directory: {}", e);
}
Ok(())
}),
rollback: Box::new(move |app_dir| {
let filename = app_dir.join(filename);
anyhow::bail!(
"Delete operations cannot be rolled back, file: {}",
filename.display()
)
}),
}
}
}
// app is single threaded
#[cfg(not(test))]
#[allow(clippy::declare_interior_mutable_const)]
pub(crate) const JOBS: LazyCell<[Job; 22]> = LazyCell::new(|| {
fn p(value: &str) -> &Path {
Path::new(value)
}
[
// Move old files
// Not deleting because installing new files can fail
Job::mkdir(p("old")),
Job::move_file(p("Zed.exe"), p("old\\Zed.exe")),
Job::mkdir(p("old\\bin")),
Job::move_file(p("bin\\Zed.exe"), p("old\\bin\\Zed.exe")),
Job::move_file(p("bin\\zed"), p("old\\bin\\zed")),
//
// TODO: remove after a few weeks once everyone is on the new version and this file never exists
Job::move_if_exists(p("OpenConsole.exe"), p("old\\OpenConsole.exe")),
Job::mkdir(p("old\\x64")),
Job::mkdir(p("old\\arm64")),
Job::move_if_exists(p("x64\\OpenConsole.exe"), p("old\\x64\\OpenConsole.exe")),
Job::move_if_exists(
p("arm64\\OpenConsole.exe"),
p("old\\arm64\\OpenConsole.exe"),
),
//
Job::move_file(p("conpty.dll"), p("old\\conpty.dll")),
// Copy new files
Job::move_file(p("install\\Zed.exe"), p("Zed.exe")),
Job::move_file(p("install\\bin\\Zed.exe"), p("bin\\Zed.exe")),
Job::move_file(p("install\\bin\\zed"), p("bin\\zed")),
//
Job::mkdir_if_exists(p("x64"), p("install\\x64")),
Job::mkdir_if_exists(p("arm64"), p("install\\arm64")),
Job::move_if_exists(
p("install\\x64\\OpenConsole.exe"),
p("x64\\OpenConsole.exe"),
),
Job::move_if_exists(
p("install\\arm64\\OpenConsole.exe"),
p("arm64\\OpenConsole.exe"),
),
//
Job::move_file(p("install\\conpty.dll"), p("conpty.dll")),
// Cleanup installer and updates folder
Job::rmdir_nofail(p("updates")),
Job::rmdir_nofail(p("install")),
// Cleanup old installation
Job::rmdir_nofail(p("old")),
]
});
pub(crate) const JOBS: &[Job] = &[
// Delete old files
|app_dir| {
let zed_executable = app_dir.join("Zed.exe");
log::info!("Removing old file: {}", zed_executable.display());
std::fs::remove_file(&zed_executable).context(format!(
"Failed to remove old file {}",
zed_executable.display()
))
},
|app_dir| {
let zed_cli = app_dir.join("bin\\zed.exe");
log::info!("Removing old file: {}", zed_cli.display());
std::fs::remove_file(&zed_cli)
.context(format!("Failed to remove old file {}", zed_cli.display()))
},
|app_dir| {
let zed_wsl = app_dir.join("bin\\zed");
log::info!("Removing old file: {}", zed_wsl.display());
std::fs::remove_file(&zed_wsl)
.context(format!("Failed to remove old file {}", zed_wsl.display()))
},
// TODO: remove after a few weeks once everyone is on the new version and this file never exists
|app_dir| {
let open_console = app_dir.join("OpenConsole.exe");
if open_console.exists() {
log::info!("Removing old file: {}", open_console.display());
std::fs::remove_file(&open_console).context(format!(
"Failed to remove old file {}",
open_console.display()
))?
}
Ok(())
},
|app_dir| {
let archs = ["x64", "arm64"];
for arch in archs {
let open_console = app_dir.join(format!("{arch}\\OpenConsole.exe"));
if open_console.exists() {
log::info!("Removing old file: {}", open_console.display());
std::fs::remove_file(&open_console).context(format!(
"Failed to remove old file {}",
open_console.display()
))?
}
}
Ok(())
},
|app_dir| {
let conpty = app_dir.join("conpty.dll");
log::info!("Removing old file: {}", conpty.display());
std::fs::remove_file(&conpty)
.context(format!("Failed to remove old file {}", conpty.display()))
},
// Copy new files
|app_dir| {
let zed_executable_source = app_dir.join("install\\Zed.exe");
let zed_executable_dest = app_dir.join("Zed.exe");
log::info!(
"Copying new file {} to {}",
zed_executable_source.display(),
zed_executable_dest.display()
);
std::fs::copy(&zed_executable_source, &zed_executable_dest)
.map(|_| ())
.context(format!(
"Failed to copy new file {} to {}",
zed_executable_source.display(),
zed_executable_dest.display()
))
},
|app_dir| {
let zed_cli_source = app_dir.join("install\\bin\\zed.exe");
let zed_cli_dest = app_dir.join("bin\\zed.exe");
log::info!(
"Copying new file {} to {}",
zed_cli_source.display(),
zed_cli_dest.display()
);
std::fs::copy(&zed_cli_source, &zed_cli_dest)
.map(|_| ())
.context(format!(
"Failed to copy new file {} to {}",
zed_cli_source.display(),
zed_cli_dest.display()
))
},
|app_dir| {
let zed_wsl_source = app_dir.join("install\\bin\\zed");
let zed_wsl_dest = app_dir.join("bin\\zed");
log::info!(
"Copying new file {} to {}",
zed_wsl_source.display(),
zed_wsl_dest.display()
);
std::fs::copy(&zed_wsl_source, &zed_wsl_dest)
.map(|_| ())
.context(format!(
"Failed to copy new file {} to {}",
zed_wsl_source.display(),
zed_wsl_dest.display()
))
},
|app_dir| {
let archs = ["x64", "arm64"];
for arch in archs {
let open_console_source = app_dir.join(format!("install\\{arch}\\OpenConsole.exe"));
let open_console_dest = app_dir.join(format!("{arch}\\OpenConsole.exe"));
if open_console_source.exists() {
log::info!(
"Copying new file {} to {}",
open_console_source.display(),
open_console_dest.display()
);
let parent = open_console_dest.parent().context(format!(
"Failed to get parent directory of {}",
open_console_dest.display()
))?;
std::fs::create_dir_all(parent)
.context(format!("Failed to create directory {}", parent.display()))?;
std::fs::copy(&open_console_source, &open_console_dest)
.map(|_| ())
.context(format!(
"Failed to copy new file {} to {}",
open_console_source.display(),
open_console_dest.display()
))?
}
}
Ok(())
},
|app_dir| {
let conpty_source = app_dir.join("install\\conpty.dll");
let conpty_dest = app_dir.join("conpty.dll");
log::info!(
"Copying new file {} to {}",
conpty_source.display(),
conpty_dest.display()
);
std::fs::copy(&conpty_source, &conpty_dest)
.map(|_| ())
.context(format!(
"Failed to copy new file {} to {}",
conpty_source.display(),
conpty_dest.display()
))
},
// Clean up installer folder and updates folder
|app_dir| {
let updates_folder = app_dir.join("updates");
log::info!("Cleaning up: {}", updates_folder.display());
std::fs::remove_dir_all(&updates_folder).context(format!(
"Failed to remove updates folder {}",
updates_folder.display()
))
},
|app_dir| {
let installer_folder = app_dir.join("install");
log::info!("Cleaning up: {}", installer_folder.display());
std::fs::remove_dir_all(&installer_folder).context(format!(
"Failed to remove installer folder {}",
installer_folder.display()
))
},
];
// app is single threaded
#[cfg(test)]
#[allow(clippy::declare_interior_mutable_const)]
pub(crate) const JOBS: LazyCell<[Job; 9]> = LazyCell::new(|| {
fn p(value: &str) -> &Path {
Path::new(value)
}
[
Job {
apply: Box::new(|_| {
std::thread::sleep(Duration::from_millis(1000));
if let Ok(config) = std::env::var("ZED_AUTO_UPDATE") {
match config.as_str() {
"err1" => Err(std::io::Error::other("Simulated error")).context("Anyhow!"),
"err2" => Ok(()),
_ => panic!("Unknown ZED_AUTO_UPDATE value: {}", config),
}
} else {
Ok(())
}
}),
rollback: Box::new(|_| {
unsafe { std::env::set_var("ZED_AUTO_UPDATE_RB", "rollback1") };
Ok(())
}),
},
Job::mkdir(p("test1")),
Job::mkdir_if_exists(p("test_exists"), p("test1")),
Job::mkdir_if_exists(p("test_missing"), p("dont")),
Job {
apply: Box::new(|folder| {
std::fs::write(folder.join("test1/test"), "test")?;
Ok(())
}),
rollback: Box::new(|folder| {
std::fs::remove_file(folder.join("test1/test"))?;
Ok(())
}),
},
Job::move_file(p("test1/test"), p("test1/moved")),
Job::move_if_exists(p("test1/test"), p("test1/noop")),
Job {
apply: Box::new(|_| {
std::thread::sleep(Duration::from_millis(1000));
if let Ok(config) = std::env::var("ZED_AUTO_UPDATE") {
match config.as_str() {
"err1" => Ok(()),
"err2" => Err(std::io::Error::other("Simulated error")).context("Anyhow!"),
_ => panic!("Unknown ZED_AUTO_UPDATE value: {}", config),
}
} else {
Ok(())
}
}),
rollback: Box::new(|_| Ok(())),
},
Job::rmdir_nofail(p("test1/nofolder")),
]
});
pub(crate) const JOBS: &[Job] = &[
|_| {
std::thread::sleep(Duration::from_millis(1000));
if let Ok(config) = std::env::var("ZED_AUTO_UPDATE") {
match config.as_str() {
"err" => Err(std::io::Error::other("Simulated error")).context("Anyhow!"),
_ => panic!("Unknown ZED_AUTO_UPDATE value: {}", config),
}
} else {
Ok(())
}
},
|_| {
std::thread::sleep(Duration::from_millis(1000));
if let Ok(config) = std::env::var("ZED_AUTO_UPDATE") {
match config.as_str() {
"err" => Err(std::io::Error::other("Simulated error")).context("Anyhow!"),
_ => panic!("Unknown ZED_AUTO_UPDATE value: {}", config),
}
} else {
Ok(())
}
},
];
pub(crate) fn perform_update(app_dir: &Path, hwnd: Option<isize>, launch: bool) -> Result<()> {
let hwnd = hwnd.map(|ptr| HWND(ptr as _));
let mut last_successful_job = None;
'outer: for (i, job) in JOBS.iter().enumerate() {
for job in JOBS.iter() {
let start = Instant::now();
loop {
if start.elapsed().as_secs() > 2 {
log::error!("Timed out, rolling back");
break 'outer;
}
match (job.apply)(app_dir) {
anyhow::ensure!(start.elapsed().as_secs() <= 2, "Timed out");
match (*job)(app_dir) {
Ok(_) => {
last_successful_job = Some(i);
unsafe { PostMessageW(hwnd, WM_JOB_UPDATED, WPARAM(0), LPARAM(0))? };
break;
}
@@ -288,7 +223,6 @@ pub(crate) fn perform_update(app_dir: &Path, hwnd: Option<isize>, launch: bool)
let io_err = err.downcast_ref::<std::io::Error>().unwrap();
if io_err.kind() == std::io::ErrorKind::NotFound {
log::warn!("File or folder not found.");
last_successful_job = Some(i);
unsafe { PostMessageW(hwnd, WM_JOB_UPDATED, WPARAM(0), LPARAM(0))? };
break;
}
@@ -299,28 +233,6 @@ pub(crate) fn perform_update(app_dir: &Path, hwnd: Option<isize>, launch: bool)
}
}
}
if last_successful_job
.map(|job| job != JOBS.len() - 1)
.unwrap_or(true)
{
let Some(last_successful_job) = last_successful_job else {
anyhow::bail!("Autoupdate failed, nothing to rollback");
};
for job in (0..=last_successful_job).rev() {
let job = &JOBS[job];
if let Err(e) = (job.rollback)(app_dir) {
anyhow::bail!(
"Job rollback failed, the app might be left in an inconsistent state: ({:?})",
e
);
}
}
anyhow::bail!("Autoupdate failed, rollback successful");
}
if launch {
#[allow(clippy::disallowed_methods, reason = "doesn't run in the main binary")]
let _ = std::process::Command::new(app_dir.join("Zed.exe")).spawn();
@@ -335,27 +247,12 @@ mod test {
#[test]
fn test_perform_update() {
let app_dir = tempfile::tempdir().unwrap();
let app_dir = app_dir.path();
let app_dir = std::path::Path::new("C:/");
assert!(perform_update(app_dir, None, false).is_ok());
let app_dir = tempfile::tempdir().unwrap();
let app_dir = app_dir.path();
// Simulate a timeout
unsafe { std::env::set_var("ZED_AUTO_UPDATE", "err1") };
unsafe { std::env::set_var("ZED_AUTO_UPDATE", "err") };
let ret = perform_update(app_dir, None, false);
assert!(
ret.is_err_and(|e| e.to_string().as_str() == "Autoupdate failed, nothing to rollback")
);
let app_dir = tempfile::tempdir().unwrap();
let app_dir = app_dir.path();
// Simulate a timeout
unsafe { std::env::set_var("ZED_AUTO_UPDATE", "err2") };
let ret = perform_update(app_dir, None, false);
assert!(
ret.is_err_and(|e| e.to_string().as_str() == "Autoupdate failed, rollback successful")
);
assert!(std::env::var("ZED_AUTO_UPDATE_RB").is_ok_and(|e| e == "rollback1"));
assert!(ret.is_err_and(|e| e.to_string().as_str() == "Timed out"));
}
}

View File

@@ -100,21 +100,13 @@ impl Render for Breadcrumbs {
let breadcrumbs_stack = h_flex().gap_1().children(breadcrumbs);
let prefix_element = active_item.breadcrumb_prefix(window, cx);
let breadcrumbs = if let Some(prefix) = prefix_element {
h_flex().gap_1p5().child(prefix).child(breadcrumbs_stack)
} else {
breadcrumbs_stack
};
match active_item
.downcast::<Editor>()
.map(|editor| editor.downgrade())
{
Some(editor) => element.child(
ButtonLike::new("toggle outline view")
.child(breadcrumbs)
.child(breadcrumbs_stack)
.style(ButtonStyle::Transparent)
.on_click({
let editor = editor.clone();
@@ -149,7 +141,7 @@ impl Render for Breadcrumbs {
// Match the height and padding of the `ButtonLike` in the other arm.
.h(rems_from_px(22.))
.pl_1()
.child(breadcrumbs),
.child(breadcrumbs_stack),
}
}
}

View File

@@ -1,6 +1,9 @@
use futures::channel::oneshot;
use git2::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch};
use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task, TaskLabel};
use gpui::{
App, AppContext as _, AsyncApp, BackgroundExecutor, Context, Entity, EventEmitter, Task,
TaskLabel,
};
use language::{Language, LanguageRegistry};
use rope::Rope;
use std::{
@@ -191,7 +194,7 @@ impl BufferDiffSnapshot {
let base_text_exists;
let base_text_snapshot;
if let Some(text) = &base_text {
let base_text_rope = Rope::from(text.as_str());
let base_text_rope = Rope::from_str(text.as_str(), cx.background_executor());
base_text_pair = Some((text.clone(), base_text_rope.clone()));
let snapshot =
language::Buffer::build_snapshot(base_text_rope, language, language_registry, cx);
@@ -311,6 +314,7 @@ impl BufferDiffInner {
hunks: &[DiffHunk],
buffer: &text::BufferSnapshot,
file_exists: bool,
cx: &BackgroundExecutor,
) -> Option<Rope> {
let head_text = self
.base_text_exists
@@ -505,7 +509,7 @@ impl BufferDiffInner {
for (old_range, replacement_text) in edits {
new_index_text.append(index_cursor.slice(old_range.start));
index_cursor.seek_forward(old_range.end);
new_index_text.push(&replacement_text);
new_index_text.push(&replacement_text, cx);
}
new_index_text.append(index_cursor.suffix());
Some(new_index_text)
@@ -962,6 +966,7 @@ impl BufferDiff {
hunks,
buffer,
file_exists,
cx.background_executor(),
);
cx.emit(BufferDiffEvent::HunksStagedOrUnstaged(
@@ -1385,7 +1390,12 @@ mod tests {
"
.unindent();
let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text);
let mut buffer = Buffer::new(
ReplicaId::LOCAL,
BufferId::new(1).unwrap(),
buffer_text,
cx.background_executor(),
);
let mut diff = BufferDiffSnapshot::new_sync(buffer.clone(), diff_base.clone(), cx);
assert_hunks(
diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer),
@@ -1394,7 +1404,7 @@ mod tests {
&[(1..2, "two\n", "HELLO\n", DiffHunkStatus::modified_none())],
);
buffer.edit([(0..0, "point five\n")]);
buffer.edit([(0..0, "point five\n")], cx.background_executor());
diff = BufferDiffSnapshot::new_sync(buffer.clone(), diff_base.clone(), cx);
assert_hunks(
diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer),
@@ -1459,7 +1469,12 @@ mod tests {
"
.unindent();
let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text);
let buffer = Buffer::new(
ReplicaId::LOCAL,
BufferId::new(1).unwrap(),
buffer_text,
cx.background_executor(),
);
let unstaged_diff = BufferDiffSnapshot::new_sync(buffer.clone(), index_text, cx);
let mut uncommitted_diff =
BufferDiffSnapshot::new_sync(buffer.clone(), head_text.clone(), cx);
@@ -1528,7 +1543,12 @@ mod tests {
"
.unindent();
let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text);
let buffer = Buffer::new(
ReplicaId::LOCAL,
BufferId::new(1).unwrap(),
buffer_text,
cx.background_executor(),
);
let diff = cx
.update(|cx| {
BufferDiffSnapshot::new_with_base_text(
@@ -1791,7 +1811,12 @@ mod tests {
for example in table {
let (buffer_text, ranges) = marked_text_ranges(&example.buffer_marked_text, false);
let buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text);
let buffer = Buffer::new(
ReplicaId::LOCAL,
BufferId::new(1).unwrap(),
buffer_text,
cx.background_executor(),
);
let hunk_range =
buffer.anchor_before(ranges[0].start)..buffer.anchor_before(ranges[0].end);
@@ -1868,6 +1893,7 @@ mod tests {
ReplicaId::LOCAL,
BufferId::new(1).unwrap(),
buffer_text.clone(),
cx.background_executor(),
);
let unstaged = BufferDiffSnapshot::new_sync(buffer.clone(), index_text, cx);
let uncommitted = BufferDiffSnapshot::new_sync(buffer.clone(), head_text.clone(), cx);
@@ -1941,7 +1967,12 @@ mod tests {
"
.unindent();
let mut buffer = Buffer::new(ReplicaId::LOCAL, BufferId::new(1).unwrap(), buffer_text_1);
let mut buffer = Buffer::new(
ReplicaId::LOCAL,
BufferId::new(1).unwrap(),
buffer_text_1,
cx.background_executor(),
);
let empty_diff = cx.update(|cx| BufferDiffSnapshot::empty(&buffer, cx));
let diff_1 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx);
@@ -1961,6 +1992,7 @@ mod tests {
NINE
"
.unindent(),
cx.background_executor(),
);
let diff_2 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx);
assert_eq!(None, diff_2.inner.compare(&diff_1.inner, &buffer));
@@ -1978,6 +2010,7 @@ mod tests {
NINE
"
.unindent(),
cx.background_executor(),
);
let diff_3 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx);
let range = diff_3.inner.compare(&diff_2.inner, &buffer).unwrap();
@@ -1995,6 +2028,7 @@ mod tests {
NINE
"
.unindent(),
cx.background_executor(),
);
let diff_4 = BufferDiffSnapshot::new_sync(buffer.clone(), base_text.clone(), cx);
let range = diff_4.inner.compare(&diff_3.inner, &buffer).unwrap();
@@ -2013,6 +2047,7 @@ mod tests {
NINE
"
.unindent(),
cx.background_executor(),
);
let diff_5 = BufferDiffSnapshot::new_sync(buffer.snapshot(), base_text.clone(), cx);
let range = diff_5.inner.compare(&diff_4.inner, &buffer).unwrap();
@@ -2031,6 +2066,7 @@ mod tests {
«nine»
"
.unindent(),
cx.background_executor(),
);
let diff_6 = BufferDiffSnapshot::new_sync(buffer.snapshot(), base_text, cx);
let range = diff_6.inner.compare(&diff_5.inner, &buffer).unwrap();
@@ -2140,14 +2176,14 @@ mod tests {
let working_copy = gen_working_copy(rng, &head_text);
let working_copy = cx.new(|cx| {
language::Buffer::local_normalized(
Rope::from(working_copy.as_str()),
Rope::from_str(working_copy.as_str(), cx.background_executor()),
text::LineEnding::default(),
cx,
)
});
let working_copy = working_copy.read_with(cx, |working_copy, _| working_copy.snapshot());
let mut index_text = if rng.random() {
Rope::from(head_text.as_str())
Rope::from_str(head_text.as_str(), cx.background_executor())
} else {
working_copy.as_rope().clone()
};

View File

@@ -70,6 +70,7 @@ impl ChannelBuffer {
ReplicaId::new(response.replica_id as u16),
capability,
base_text,
cx.background_executor(),
)
})?;
buffer.update(cx, |buffer, cx| buffer.apply_ops(operations, cx))?;

View File

@@ -1,5 +1,4 @@
pub mod predict_edits_v3;
pub mod udiff;
use std::str::FromStr;
use std::sync::Arc;

View File

@@ -1,294 +0,0 @@
use std::{borrow::Cow, fmt::Display};
#[derive(Debug, PartialEq)]
pub enum DiffLine<'a> {
OldPath { path: Cow<'a, str> },
NewPath { path: Cow<'a, str> },
HunkHeader(Option<HunkLocation>),
Context(&'a str),
Deletion(&'a str),
Addition(&'a str),
Garbage(&'a str),
}
#[derive(Debug, PartialEq)]
pub struct HunkLocation {
start_line_old: u32,
count_old: u32,
start_line_new: u32,
count_new: u32,
}
impl<'a> DiffLine<'a> {
pub fn parse(line: &'a str) -> Self {
Self::try_parse(line).unwrap_or(Self::Garbage(line))
}
fn try_parse(line: &'a str) -> Option<Self> {
if let Some(header) = line.strip_prefix("---").and_then(eat_required_whitespace) {
let path = parse_header_path("a/", header);
Some(Self::OldPath { path })
} else if let Some(header) = line.strip_prefix("+++").and_then(eat_required_whitespace) {
Some(Self::NewPath {
path: parse_header_path("b/", header),
})
} else if let Some(header) = line.strip_prefix("@@").and_then(eat_required_whitespace) {
if header.starts_with("...") {
return Some(Self::HunkHeader(None));
}
let (start_line_old, header) = header.strip_prefix('-')?.split_once(',')?;
let mut parts = header.split_ascii_whitespace();
let count_old = parts.next()?;
let (start_line_new, count_new) = parts.next()?.strip_prefix('+')?.split_once(',')?;
Some(Self::HunkHeader(Some(HunkLocation {
start_line_old: start_line_old.parse::<u32>().ok()?.saturating_sub(1),
count_old: count_old.parse().ok()?,
start_line_new: start_line_new.parse::<u32>().ok()?.saturating_sub(1),
count_new: count_new.parse().ok()?,
})))
} else if let Some(deleted_header) = line.strip_prefix("-") {
Some(Self::Deletion(deleted_header))
} else if line.is_empty() {
Some(Self::Context(""))
} else if let Some(context) = line.strip_prefix(" ") {
Some(Self::Context(context))
} else {
Some(Self::Addition(line.strip_prefix("+")?))
}
}
}
impl<'a> Display for DiffLine<'a> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
DiffLine::OldPath { path } => write!(f, "--- {path}"),
DiffLine::NewPath { path } => write!(f, "+++ {path}"),
DiffLine::HunkHeader(Some(hunk_location)) => {
write!(
f,
"@@ -{},{} +{},{} @@",
hunk_location.start_line_old + 1,
hunk_location.count_old,
hunk_location.start_line_new + 1,
hunk_location.count_new
)
}
DiffLine::HunkHeader(None) => write!(f, "@@ ... @@"),
DiffLine::Context(content) => write!(f, " {content}"),
DiffLine::Deletion(content) => write!(f, "-{content}"),
DiffLine::Addition(content) => write!(f, "+{content}"),
DiffLine::Garbage(line) => write!(f, "{line}"),
}
}
}
fn parse_header_path<'a>(strip_prefix: &'static str, header: &'a str) -> Cow<'a, str> {
if !header.contains(['"', '\\']) {
let path = header.split_ascii_whitespace().next().unwrap_or(header);
return Cow::Borrowed(path.strip_prefix(strip_prefix).unwrap_or(path));
}
let mut path = String::with_capacity(header.len());
let mut in_quote = false;
let mut chars = header.chars().peekable();
let mut strip_prefix = Some(strip_prefix);
while let Some(char) = chars.next() {
if char == '"' {
in_quote = !in_quote;
} else if char == '\\' {
let Some(&next_char) = chars.peek() else {
break;
};
chars.next();
path.push(next_char);
} else if char.is_ascii_whitespace() && !in_quote {
break;
} else {
path.push(char);
}
if let Some(prefix) = strip_prefix
&& path == prefix
{
strip_prefix.take();
path.clear();
}
}
Cow::Owned(path)
}
fn eat_required_whitespace(header: &str) -> Option<&str> {
let trimmed = header.trim_ascii_start();
if trimmed.len() == header.len() {
None
} else {
Some(trimmed)
}
}
#[cfg(test)]
mod tests {
use super::*;
use indoc::indoc;
#[test]
fn parse_lines_simple() {
let input = indoc! {"
diff --git a/text.txt b/text.txt
index 86c770d..a1fd855 100644
--- a/file.txt
+++ b/file.txt
@@ -1,2 +1,3 @@
context
-deleted
+inserted
garbage
--- b/file.txt
+++ a/file.txt
"};
let lines = input.lines().map(DiffLine::parse).collect::<Vec<_>>();
pretty_assertions::assert_eq!(
lines,
&[
DiffLine::Garbage("diff --git a/text.txt b/text.txt"),
DiffLine::Garbage("index 86c770d..a1fd855 100644"),
DiffLine::OldPath {
path: "file.txt".into()
},
DiffLine::NewPath {
path: "file.txt".into()
},
DiffLine::HunkHeader(Some(HunkLocation {
start_line_old: 0,
count_old: 2,
start_line_new: 0,
count_new: 3
})),
DiffLine::Context("context"),
DiffLine::Deletion("deleted"),
DiffLine::Addition("inserted"),
DiffLine::Garbage("garbage"),
DiffLine::Context(""),
DiffLine::OldPath {
path: "b/file.txt".into()
},
DiffLine::NewPath {
path: "a/file.txt".into()
},
]
);
}
#[test]
fn file_header_extra_space() {
let options = ["--- file", "--- file", "---\tfile"];
for option in options {
pretty_assertions::assert_eq!(
DiffLine::parse(option),
DiffLine::OldPath {
path: "file".into()
},
"{option}",
);
}
}
#[test]
fn hunk_header_extra_space() {
let options = [
"@@ -1,2 +1,3 @@",
"@@ -1,2 +1,3 @@",
"@@\t-1,2\t+1,3\t@@",
"@@ -1,2 +1,3 @@",
"@@ -1,2 +1,3 @@",
"@@ -1,2 +1,3 @@",
"@@ -1,2 +1,3 @@ garbage",
];
for option in options {
pretty_assertions::assert_eq!(
DiffLine::parse(option),
DiffLine::HunkHeader(Some(HunkLocation {
start_line_old: 0,
count_old: 2,
start_line_new: 0,
count_new: 3
})),
"{option}",
);
}
}
#[test]
fn hunk_header_without_location() {
pretty_assertions::assert_eq!(DiffLine::parse("@@ ... @@"), DiffLine::HunkHeader(None));
}
#[test]
fn test_parse_path() {
assert_eq!(parse_header_path("a/", "foo.txt"), "foo.txt");
assert_eq!(
parse_header_path("a/", "foo/bar/baz.txt"),
"foo/bar/baz.txt"
);
assert_eq!(parse_header_path("a/", "a/foo.txt"), "foo.txt");
assert_eq!(
parse_header_path("a/", "a/foo/bar/baz.txt"),
"foo/bar/baz.txt"
);
// Extra
assert_eq!(
parse_header_path("a/", "a/foo/bar/baz.txt 2025"),
"foo/bar/baz.txt"
);
assert_eq!(
parse_header_path("a/", "a/foo/bar/baz.txt\t2025"),
"foo/bar/baz.txt"
);
assert_eq!(
parse_header_path("a/", "a/foo/bar/baz.txt \""),
"foo/bar/baz.txt"
);
// Quoted
assert_eq!(
parse_header_path("a/", "a/foo/bar/\"baz quox.txt\""),
"foo/bar/baz quox.txt"
);
assert_eq!(
parse_header_path("a/", "\"a/foo/bar/baz quox.txt\""),
"foo/bar/baz quox.txt"
);
assert_eq!(
parse_header_path("a/", "\"foo/bar/baz quox.txt\""),
"foo/bar/baz quox.txt"
);
assert_eq!(parse_header_path("a/", "\"whatever 🤷\""), "whatever 🤷");
assert_eq!(
parse_header_path("a/", "\"foo/bar/baz quox.txt\" 2025"),
"foo/bar/baz quox.txt"
);
// unescaped quotes are dropped
assert_eq!(parse_header_path("a/", "foo/\"bar\""), "foo/bar");
// Escaped
assert_eq!(
parse_header_path("a/", "\"foo/\\\"bar\\\"/baz.txt\""),
"foo/\"bar\"/baz.txt"
);
assert_eq!(
parse_header_path("a/", "\"C:\\\\Projects\\\\My App\\\\old file.txt\""),
"C:\\Projects\\My App\\old file.txt"
);
}
}

View File

@@ -701,12 +701,12 @@ impl Database {
return Ok(());
}
let mut text_buffer = text::Buffer::new(
let mut text_buffer = text::Buffer::new_slow(
clock::ReplicaId::LOCAL,
text::BufferId::new(1).unwrap(),
base_text,
);
text_buffer.apply_ops(operations.into_iter().filter_map(operation_from_wire));
text_buffer.apply_ops(operations.into_iter().filter_map(operation_from_wire), None);
let base_text = text_buffer.text();
let epoch = buffer.epoch + 1;

View File

@@ -74,11 +74,21 @@ async fn test_channel_buffers(db: &Arc<Database>) {
ReplicaId::new(0),
text::BufferId::new(1).unwrap(),
"".to_string(),
&db.test_options.as_ref().unwrap().executor,
);
let operations = vec![
buffer_a.edit([(0..0, "hello world")]),
buffer_a.edit([(5..5, ", cruel")]),
buffer_a.edit([(0..5, "goodbye")]),
buffer_a.edit(
[(0..0, "hello world")],
&db.test_options.as_ref().unwrap().executor,
),
buffer_a.edit(
[(5..5, ", cruel")],
&db.test_options.as_ref().unwrap().executor,
),
buffer_a.edit(
[(0..5, "goodbye")],
&db.test_options.as_ref().unwrap().executor,
),
buffer_a.undo().unwrap().1,
];
assert_eq!(buffer_a.text(), "hello, cruel world");
@@ -102,15 +112,19 @@ async fn test_channel_buffers(db: &Arc<Database>) {
ReplicaId::new(0),
text::BufferId::new(1).unwrap(),
buffer_response_b.base_text,
&db.test_options.as_ref().unwrap().executor,
);
buffer_b.apply_ops(
buffer_response_b.operations.into_iter().map(|operation| {
let operation = proto::deserialize_operation(operation).unwrap();
if let language::Operation::Buffer(operation) = operation {
operation
} else {
unreachable!()
}
}),
None,
);
buffer_b.apply_ops(buffer_response_b.operations.into_iter().map(|operation| {
let operation = proto::deserialize_operation(operation).unwrap();
if let language::Operation::Buffer(operation) = operation {
operation
} else {
unreachable!()
}
}));
assert_eq!(buffer_b.text(), "hello, cruel world");
@@ -247,6 +261,7 @@ async fn test_channel_buffers_last_operations(db: &Database) {
ReplicaId::new(res.replica_id as u16),
text::BufferId::new(1).unwrap(),
"".to_string(),
&db.test_options.as_ref().unwrap().executor,
));
}
@@ -255,9 +270,9 @@ async fn test_channel_buffers_last_operations(db: &Database) {
user_id,
db,
vec![
text_buffers[0].edit([(0..0, "a")]),
text_buffers[0].edit([(0..0, "b")]),
text_buffers[0].edit([(0..0, "c")]),
text_buffers[0].edit([(0..0, "a")], &db.test_options.as_ref().unwrap().executor),
text_buffers[0].edit([(0..0, "b")], &db.test_options.as_ref().unwrap().executor),
text_buffers[0].edit([(0..0, "c")], &db.test_options.as_ref().unwrap().executor),
],
)
.await;
@@ -267,9 +282,9 @@ async fn test_channel_buffers_last_operations(db: &Database) {
user_id,
db,
vec![
text_buffers[1].edit([(0..0, "d")]),
text_buffers[1].edit([(1..1, "e")]),
text_buffers[1].edit([(2..2, "f")]),
text_buffers[1].edit([(0..0, "d")], &db.test_options.as_ref().unwrap().executor),
text_buffers[1].edit([(1..1, "e")], &db.test_options.as_ref().unwrap().executor),
text_buffers[1].edit([(2..2, "f")], &db.test_options.as_ref().unwrap().executor),
],
)
.await;
@@ -286,14 +301,15 @@ async fn test_channel_buffers_last_operations(db: &Database) {
replica_id,
text::BufferId::new(1).unwrap(),
"def".to_string(),
&db.test_options.as_ref().unwrap().executor,
);
update_buffer(
buffers[1].channel_id,
user_id,
db,
vec![
text_buffers[1].edit([(0..0, "g")]),
text_buffers[1].edit([(0..0, "h")]),
text_buffers[1].edit([(0..0, "g")], &db.test_options.as_ref().unwrap().executor),
text_buffers[1].edit([(0..0, "h")], &db.test_options.as_ref().unwrap().executor),
],
)
.await;
@@ -302,7 +318,7 @@ async fn test_channel_buffers_last_operations(db: &Database) {
buffers[2].channel_id,
user_id,
db,
vec![text_buffers[2].edit([(0..0, "i")])],
vec![text_buffers[2].edit([(0..0, "i")], &db.test_options.as_ref().unwrap().executor)],
)
.await;

View File

@@ -3694,7 +3694,7 @@ async fn test_buffer_reloading(
assert_eq!(buf.line_ending(), LineEnding::Unix);
});
let new_contents = Rope::from("d\ne\nf");
let new_contents = Rope::from_str_small("d\ne\nf");
client_a
.fs()
.save(
@@ -4479,7 +4479,7 @@ async fn test_reloading_buffer_manually(
.fs()
.save(
path!("/a/a.rs").as_ref(),
&Rope::from("let seven = 7;"),
&Rope::from_str_small("let seven = 7;"),
LineEnding::Unix,
)
.await

View File

@@ -27,6 +27,7 @@ use std::{
rc::Rc,
sync::Arc,
};
use text::Rope;
use util::{
ResultExt, path,
paths::PathStyle,
@@ -938,7 +939,11 @@ impl RandomizedTest for ProjectCollaborationTest {
client
.fs()
.save(&path, &content.as_str().into(), text::LineEnding::Unix)
.save(
&path,
&Rope::from_str_small(content.as_str()),
text::LineEnding::Unix,
)
.await
.unwrap();
}

View File

@@ -489,11 +489,7 @@ impl Copilot {
let node_path = node_runtime.binary_path().await?;
ensure_node_version_for_copilot(&node_path).await?;
let arguments: Vec<OsString> = vec![
"--experimental-sqlite".into(),
server_path.into(),
"--stdio".into(),
];
let arguments: Vec<OsString> = vec![server_path.into(), "--stdio".into()];
let binary = LanguageServerBinary {
path: node_path,
arguments,

View File

@@ -697,7 +697,6 @@ impl Render for NewProcessModal {
.justify_between()
.border_t_1()
.border_color(cx.theme().colors().border_variant);
let secondary_action = menu::SecondaryConfirm.boxed_clone();
match self.mode {
NewProcessMode::Launch => el.child(
container
@@ -707,7 +706,6 @@ impl Render for NewProcessModal {
.on_click(cx.listener(|this, _, window, cx| {
this.save_debug_scenario(window, cx);
}))
.key_binding(KeyBinding::for_action(&*secondary_action, cx))
.disabled(
self.debugger.is_none()
|| self
@@ -751,6 +749,7 @@ impl Render for NewProcessModal {
container
.child(div().child({
Button::new("edit-attach-task", "Edit in debug.json")
.label_size(LabelSize::Small)
.key_binding(KeyBinding::for_action(&*secondary_action, cx))
.on_click(move |_, window, cx| {
window.dispatch_action(secondary_action.boxed_clone(), cx)
@@ -1193,7 +1192,7 @@ impl PickerDelegate for DebugDelegate {
}
fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> std::sync::Arc<str> {
"Find a debug task, or debug a command".into()
"Find a debug task, or debug a command.".into()
}
fn update_matches(
@@ -1454,17 +1453,18 @@ impl PickerDelegate for DebugDelegate {
.child({
let action = menu::SecondaryConfirm.boxed_clone();
if self.matches.is_empty() {
Button::new("edit-debug-json", "Edit debug.json").on_click(cx.listener(
|_picker, _, window, cx| {
Button::new("edit-debug-json", "Edit debug.json")
.label_size(LabelSize::Small)
.on_click(cx.listener(|_picker, _, window, cx| {
window.dispatch_action(
zed_actions::OpenProjectDebugTasks.boxed_clone(),
cx,
);
cx.emit(DismissEvent);
},
))
}))
} else {
Button::new("edit-debug-task", "Edit in debug.json")
.label_size(LabelSize::Small)
.key_binding(KeyBinding::for_action(&*action, cx))
.on_click(move |_, window, cx| {
window.dispatch_action(action.boxed_clone(), cx)

View File

@@ -6,10 +6,7 @@ use alacritty_terminal::vte::ansi;
use anyhow::Result;
use collections::HashMap;
use dap::{CompletionItem, CompletionItemType, OutputEvent};
use editor::{
Bias, CompletionProvider, Editor, EditorElement, EditorMode, EditorStyle, ExcerptId,
SizingBehavior,
};
use editor::{Bias, CompletionProvider, Editor, EditorElement, EditorStyle, ExcerptId};
use fuzzy::StringMatchCandidate;
use gpui::{
Action as _, AppContext, Context, Corner, Entity, FocusHandle, Focusable, HighlightStyle, Hsla,
@@ -62,11 +59,6 @@ impl Console {
) -> Self {
let console = cx.new(|cx| {
let mut editor = Editor::multi_line(window, cx);
editor.set_mode(EditorMode::Full {
scale_ui_elements_with_buffer_font_size: true,
show_active_line_background: true,
sizing_behavior: SizingBehavior::ExcludeOverscrollMargin,
});
editor.move_to_end(&editor::actions::MoveToEnd, window, cx);
editor.set_read_only(true);
editor.disable_scrollbars_and_minimap(window, cx);

View File

@@ -887,7 +887,7 @@ async fn test_random_diagnostics_with_inlays(cx: &mut TestAppContext, mut rng: S
vec![Inlay::edit_prediction(
post_inc(&mut next_inlay_id),
snapshot.buffer_snapshot().anchor_before(position),
Rope::from_iter(["Test inlay ", "next_inlay_id"]),
Rope::from_iter_small(["Test inlay ", "next_inlay_id"]),
)],
cx,
);
@@ -2080,7 +2080,7 @@ fn random_lsp_diagnostic(
const ERROR_MARGIN: usize = 10;
let file_content = fs.read_file_sync(path).unwrap();
let file_text = Rope::from(String::from_utf8_lossy(&file_content).as_ref());
let file_text = Rope::from_str_small(String::from_utf8_lossy(&file_content).as_ref());
let start = rng.random_range(0..file_text.len().saturating_add(ERROR_MARGIN));
let end = rng.random_range(start..file_text.len().saturating_add(ERROR_MARGIN));

View File

@@ -13,7 +13,7 @@ use gpui::{
};
use indoc::indoc;
use language::{
EditPredictionsMode, File, Language,
EditPredictionsMode, File, Language, Rope,
language_settings::{self, AllLanguageSettings, EditPredictionProvider, all_language_settings},
};
use project::DisableAiSettings;
@@ -1056,8 +1056,11 @@ async fn open_disabled_globs_setting_in_editor(
) -> Result<()> {
let settings_editor = workspace
.update_in(cx, |_, window, cx| {
create_and_open_local_file(paths::settings_file(), window, cx, || {
settings::initial_user_settings_content().as_ref().into()
create_and_open_local_file(paths::settings_file(), window, cx, |cx| {
Rope::from_str(
settings::initial_user_settings_content().as_ref(),
cx.background_executor(),
)
})
})?
.await?

View File

@@ -213,6 +213,15 @@ pub struct ExpandExcerptsDown {
pub(super) lines: u32,
}
/// Shows code completion suggestions at the cursor position.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
#[serde(deny_unknown_fields)]
pub struct ShowCompletions {
#[serde(default)]
pub(super) trigger: Option<String>,
}
/// Handles text input in the editor.
#[derive(PartialEq, Clone, Deserialize, Default, JsonSchema, Action)]
#[action(namespace = editor)]
@@ -727,8 +736,6 @@ actions!(
SelectToStartOfParagraph,
/// Extends selection up.
SelectUp,
/// Shows code completion suggestions at the cursor position.
ShowCompletions,
/// Shows the system character palette.
ShowCharacterPalette,
/// Shows edit prediction at cursor.

View File

@@ -28,12 +28,10 @@ use std::{
rc::Rc,
};
use task::ResolvedTask;
use ui::{
Color, IntoElement, ListItem, Pixels, Popover, ScrollAxes, Scrollbars, Styled, WithScrollbar,
prelude::*,
};
use ui::{Color, IntoElement, ListItem, Pixels, Popover, Styled, prelude::*};
use util::ResultExt;
use crate::CodeActionSource;
use crate::hover_popover::{hover_markdown_style, open_markdown_url};
use crate::{
CodeActionProvider, CompletionId, CompletionItemKind, CompletionProvider, DisplayRow, Editor,
@@ -41,8 +39,7 @@ use crate::{
actions::{ConfirmCodeAction, ConfirmCompletion},
split_words, styled_runs_for_code_label,
};
use crate::{CodeActionSource, EditorSettings};
use settings::{Settings, SnippetSortOrder};
use settings::SnippetSortOrder;
pub const MENU_GAP: Pixels = px(4.);
pub const MENU_ASIDE_X_PADDING: Pixels = px(16.);
@@ -252,17 +249,8 @@ enum MarkdownCacheKey {
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub enum CompletionsMenuSource {
/// Show all completions (words, snippets, LSP)
Normal,
/// Show only snippets (not words or LSP)
///
/// Used after typing a non-word character
SnippetsOnly,
/// Tab stops within a snippet that have a predefined finite set of choices
SnippetChoices,
/// Show only words (not snippets or LSP)
///
/// Used when word completions are explicitly triggered
Words { ignore_threshold: bool },
}
@@ -273,20 +261,6 @@ impl Drop for CompletionsMenu {
}
}
struct CompletionMenuScrollBarSetting;
impl ui::scrollbars::GlobalSetting for CompletionMenuScrollBarSetting {
fn get_value(_cx: &App) -> &Self {
&Self
}
}
impl ui::scrollbars::ScrollbarVisibility for CompletionMenuScrollBarSetting {
fn visibility(&self, cx: &App) -> ui::scrollbars::ShowScrollbar {
EditorSettings::get_global(cx).completion_menu_scrollbar
}
}
impl CompletionsMenu {
pub fn new(
id: CompletionId,
@@ -924,17 +898,7 @@ impl CompletionsMenu {
}
});
Popover::new()
.child(
div().child(list).custom_scrollbars(
Scrollbars::for_settings::<CompletionMenuScrollBarSetting>()
.show_along(ScrollAxes::Vertical)
.tracked_scroll_handle(self.scroll_handle.clone()),
window,
cx,
),
)
.into_any_element()
Popover::new().child(list).into_any_element()
}
fn render_aside(

View File

@@ -17,9 +17,6 @@
//! [Editor]: crate::Editor
//! [EditorElement]: crate::element::EditorElement
#[macro_use]
mod dimensions;
mod block_map;
mod crease_map;
mod custom_highlights;
@@ -170,12 +167,11 @@ impl DisplayMap {
}
pub fn snapshot(&mut self, cx: &mut Context<Self>) -> DisplaySnapshot {
let tab_size = Self::tab_size(&self.buffer, cx);
let buffer_snapshot = self.buffer.read(cx).snapshot(cx);
let edits = self.buffer_subscription.consume().into_inner();
let (inlay_snapshot, edits) = self.inlay_map.sync(buffer_snapshot, edits);
let (fold_snapshot, edits) = self.fold_map.read(inlay_snapshot, edits);
let tab_size = Self::tab_size(&self.buffer, cx);
let (tab_snapshot, edits) = self.tab_map.sync(fold_snapshot, edits, tab_size);
let (wrap_snapshot, edits) = self
.wrap_map
@@ -919,7 +915,7 @@ impl DisplaySnapshot {
pub fn text_chunks(&self, display_row: DisplayRow) -> impl Iterator<Item = &str> {
self.block_snapshot
.chunks(
BlockRow(display_row.0)..BlockRow(self.max_point().row().next_row().0),
display_row.0..self.max_point().row().next_row().0,
false,
self.masked,
Highlights::default(),
@@ -931,12 +927,7 @@ impl DisplaySnapshot {
pub fn reverse_text_chunks(&self, display_row: DisplayRow) -> impl Iterator<Item = &str> {
(0..=display_row.0).rev().flat_map(move |row| {
self.block_snapshot
.chunks(
BlockRow(row)..BlockRow(row + 1),
false,
self.masked,
Highlights::default(),
)
.chunks(row..row + 1, false, self.masked, Highlights::default())
.map(|h| h.text)
.collect::<Vec<_>>()
.into_iter()
@@ -951,7 +942,7 @@ impl DisplaySnapshot {
highlight_styles: HighlightStyles,
) -> DisplayChunks<'_> {
self.block_snapshot.chunks(
BlockRow(display_rows.start.0)..BlockRow(display_rows.end.0),
display_rows.start.0..display_rows.end.0,
language_aware,
self.masked,
Highlights {
@@ -1187,8 +1178,8 @@ impl DisplaySnapshot {
rows: Range<DisplayRow>,
) -> impl Iterator<Item = (DisplayRow, &Block)> {
self.block_snapshot
.blocks_in_range(BlockRow(rows.start.0)..BlockRow(rows.end.0))
.map(|(row, block)| (DisplayRow(row.0), block))
.blocks_in_range(rows.start.0..rows.end.0)
.map(|(row, block)| (DisplayRow(row), block))
}
pub fn sticky_header_excerpt(&self, row: f64) -> Option<StickyHeaderExcerpt<'_>> {
@@ -1220,7 +1211,7 @@ impl DisplaySnapshot {
pub fn soft_wrap_indent(&self, display_row: DisplayRow) -> Option<u32> {
let wrap_row = self
.block_snapshot
.to_wrap_point(BlockPoint::new(BlockRow(display_row.0), 0), Bias::Left)
.to_wrap_point(BlockPoint::new(display_row.0, 0), Bias::Left)
.row();
self.wrap_snapshot().soft_wrap_indent(wrap_row)
}
@@ -1251,7 +1242,7 @@ impl DisplaySnapshot {
}
pub fn longest_row(&self) -> DisplayRow {
DisplayRow(self.block_snapshot.longest_row().0)
DisplayRow(self.block_snapshot.longest_row())
}
pub fn longest_row_in_range(&self, range: Range<DisplayRow>) -> DisplayRow {
@@ -1578,6 +1569,7 @@ pub mod tests {
use lsp::LanguageServerId;
use project::Project;
use rand::{Rng, prelude::*};
use rope::Rope;
use settings::{SettingsContent, SettingsStore};
use smol::stream::StreamExt;
use std::{env, sync::Arc};
@@ -2083,7 +2075,7 @@ pub mod tests {
vec![Inlay::edit_prediction(
0,
buffer_snapshot.anchor_after(0),
"\n",
Rope::from_str_small("\n"),
)],
cx,
);

File diff suppressed because it is too large Load Diff

View File

@@ -1,96 +0,0 @@
#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)]
pub struct RowDelta(pub u32);
impl RowDelta {
pub fn saturating_sub(self, other: RowDelta) -> RowDelta {
RowDelta(self.0.saturating_sub(other.0))
}
}
impl ::std::ops::Add for RowDelta {
type Output = RowDelta;
fn add(self, rhs: RowDelta) -> Self::Output {
RowDelta(self.0 + rhs.0)
}
}
impl ::std::ops::Sub for RowDelta {
type Output = RowDelta;
fn sub(self, rhs: RowDelta) -> Self::Output {
RowDelta(self.0 - rhs.0)
}
}
impl ::std::ops::AddAssign for RowDelta {
fn add_assign(&mut self, rhs: RowDelta) {
self.0 += rhs.0;
}
}
impl ::std::ops::SubAssign for RowDelta {
fn sub_assign(&mut self, rhs: RowDelta) {
self.0 -= rhs.0;
}
}
macro_rules! impl_for_row_types {
($row:ident => $row_delta:ident) => {
impl $row {
pub fn saturating_sub(self, other: $row_delta) -> $row {
$row(self.0.saturating_sub(other.0))
}
}
impl ::std::ops::Add for $row {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
Self(self.0 + rhs.0)
}
}
impl ::std::ops::Add<$row_delta> for $row {
type Output = Self;
fn add(self, rhs: $row_delta) -> Self::Output {
Self(self.0 + rhs.0)
}
}
impl ::std::ops::Sub for $row {
type Output = $row_delta;
fn sub(self, rhs: Self) -> Self::Output {
$row_delta(self.0 - rhs.0)
}
}
impl ::std::ops::Sub<$row_delta> for $row {
type Output = $row;
fn sub(self, rhs: $row_delta) -> Self::Output {
$row(self.0 - rhs.0)
}
}
impl ::std::ops::AddAssign for $row {
fn add_assign(&mut self, rhs: Self) {
self.0 += rhs.0;
}
}
impl ::std::ops::AddAssign<$row_delta> for $row {
fn add_assign(&mut self, rhs: $row_delta) {
self.0 += rhs.0;
}
}
impl ::std::ops::SubAssign<$row_delta> for $row {
fn sub_assign(&mut self, rhs: $row_delta) {
self.0 -= rhs.0;
}
}
};
}

View File

@@ -700,16 +700,20 @@ impl InlayMap {
.collect::<String>();
let next_inlay = if i % 2 == 0 {
use rope::Rope;
Inlay::mock_hint(
post_inc(next_inlay_id),
snapshot.buffer.anchor_at(position, bias),
&text,
Rope::from_str_small(&text),
)
} else {
use rope::Rope;
Inlay::edit_prediction(
post_inc(next_inlay_id),
snapshot.buffer.anchor_at(position, bias),
&text,
Rope::from_str_small(&text),
)
};
let inlay_id = next_inlay.id;
@@ -1301,7 +1305,7 @@ mod tests {
vec![Inlay::mock_hint(
post_inc(&mut next_inlay_id),
buffer.read(cx).snapshot(cx).anchor_after(3),
"|123|",
Rope::from_str_small("|123|"),
)],
);
assert_eq!(inlay_snapshot.text(), "abc|123|defghi");
@@ -1378,12 +1382,12 @@ mod tests {
Inlay::mock_hint(
post_inc(&mut next_inlay_id),
buffer.read(cx).snapshot(cx).anchor_before(3),
"|123|",
Rope::from_str_small("|123|"),
),
Inlay::edit_prediction(
post_inc(&mut next_inlay_id),
buffer.read(cx).snapshot(cx).anchor_after(3),
"|456|",
Rope::from_str_small("|456|"),
),
],
);
@@ -1593,17 +1597,17 @@ mod tests {
Inlay::mock_hint(
post_inc(&mut next_inlay_id),
buffer.read(cx).snapshot(cx).anchor_before(0),
"|123|\n",
Rope::from_str_small("|123|\n"),
),
Inlay::mock_hint(
post_inc(&mut next_inlay_id),
buffer.read(cx).snapshot(cx).anchor_before(4),
"|456|",
Rope::from_str_small("|456|"),
),
Inlay::edit_prediction(
post_inc(&mut next_inlay_id),
buffer.read(cx).snapshot(cx).anchor_before(7),
"\n|567|\n",
Rope::from_str_small("\n|567|\n"),
),
],
);
@@ -1677,9 +1681,14 @@ mod tests {
(offset, inlay.clone())
})
.collect::<Vec<_>>();
let mut expected_text = Rope::from(&buffer_snapshot.text());
let mut expected_text =
Rope::from_str(&buffer_snapshot.text(), cx.background_executor());
for (offset, inlay) in inlays.iter().rev() {
expected_text.replace(*offset..*offset, &inlay.text().to_string());
expected_text.replace(
*offset..*offset,
&inlay.text().to_string(),
cx.background_executor(),
);
}
assert_eq!(inlay_snapshot.text(), expected_text.to_string());
@@ -2067,7 +2076,7 @@ mod tests {
let inlay = Inlay {
id: InlayId::Hint(0),
position,
content: InlayContent::Text(text::Rope::from(inlay_text)),
content: InlayContent::Text(text::Rope::from_str(inlay_text, cx.background_executor())),
};
let (inlay_snapshot, _) = inlay_map.splice(&[], vec![inlay]);
@@ -2181,7 +2190,10 @@ mod tests {
let inlay = Inlay {
id: InlayId::Hint(0),
position,
content: InlayContent::Text(text::Rope::from(test_case.inlay_text)),
content: InlayContent::Text(text::Rope::from_str(
test_case.inlay_text,
cx.background_executor(),
)),
};
let (inlay_snapshot, _) = inlay_map.splice(&[], vec![inlay]);

View File

@@ -1042,7 +1042,7 @@ mod tests {
let (mut tab_map, _) = TabMap::new(fold_snapshot, tab_size);
let tabs_snapshot = tab_map.set_max_expansion_column(32);
let text = text::Rope::from(tabs_snapshot.text().as_str());
let text = text::Rope::from_str(tabs_snapshot.text().as_str(), cx.background_executor());
log::info!(
"TabMap text (tab size: {}): {:?}",
tab_size,

View File

@@ -1,6 +1,5 @@
use super::{
Highlights,
dimensions::RowDelta,
fold_map::{Chunk, FoldRows},
tab_map::{self, TabEdit, TabPoint, TabSnapshot},
};
@@ -8,20 +7,13 @@ use gpui::{App, AppContext as _, Context, Entity, Font, LineWrapper, Pixels, Tas
use language::Point;
use multi_buffer::{MultiBufferSnapshot, RowInfo};
use smol::future::yield_now;
use std::{cmp, collections::VecDeque, mem, ops::Range, sync::LazyLock, time::Duration};
use std::sync::LazyLock;
use std::{cmp, collections::VecDeque, mem, ops::Range, time::Duration};
use sum_tree::{Bias, Cursor, Dimensions, SumTree};
use text::Patch;
pub use super::tab_map::TextSummary;
pub type WrapEdit = text::Edit<WrapRow>;
pub type WrapPatch = text::Patch<WrapRow>;
#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)]
pub struct WrapRow(pub u32);
impl_for_row_types! {
WrapRow => RowDelta
}
pub type WrapEdit = text::Edit<u32>;
/// Handles soft wrapping of text.
///
@@ -29,8 +21,8 @@ impl_for_row_types! {
pub struct WrapMap {
snapshot: WrapSnapshot,
pending_edits: VecDeque<(TabSnapshot, Vec<TabEdit>)>,
interpolated_edits: WrapPatch,
edits_since_sync: WrapPatch,
interpolated_edits: Patch<u32>,
edits_since_sync: Patch<u32>,
wrap_width: Option<Pixels>,
background_task: Option<Task<()>>,
font_with_size: (Font, Pixels),
@@ -62,7 +54,7 @@ pub struct WrapChunks<'a> {
input_chunks: tab_map::TabChunks<'a>,
input_chunk: Chunk<'a>,
output_position: WrapPoint,
max_output_row: WrapRow,
max_output_row: u32,
transforms: Cursor<'a, 'static, Transform, Dimensions<WrapPoint, TabPoint>>,
snapshot: &'a WrapSnapshot,
}
@@ -71,19 +63,19 @@ pub struct WrapChunks<'a> {
pub struct WrapRows<'a> {
input_buffer_rows: FoldRows<'a>,
input_buffer_row: RowInfo,
output_row: WrapRow,
output_row: u32,
soft_wrapped: bool,
max_output_row: WrapRow,
max_output_row: u32,
transforms: Cursor<'a, 'static, Transform, Dimensions<WrapPoint, TabPoint>>,
}
impl WrapRows<'_> {
pub(crate) fn seek(&mut self, start_row: WrapRow) {
pub(crate) fn seek(&mut self, start_row: u32) {
self.transforms
.seek(&WrapPoint::new(start_row, 0), Bias::Left);
let mut input_row = self.transforms.start().1.row();
if self.transforms.item().is_some_and(|t| t.is_isomorphic()) {
input_row += (start_row - self.transforms.start().0.row()).0;
input_row += start_row - self.transforms.start().0.row();
}
self.soft_wrapped = self.transforms.item().is_some_and(|t| !t.is_isomorphic());
self.input_buffer_rows.seek(input_row);
@@ -128,7 +120,7 @@ impl WrapMap {
tab_snapshot: TabSnapshot,
edits: Vec<TabEdit>,
cx: &mut Context<Self>,
) -> (WrapSnapshot, WrapPatch) {
) -> (WrapSnapshot, Patch<u32>) {
if self.wrap_width.is_some() {
self.pending_edits.push_back((tab_snapshot, edits));
self.flush_edits(cx);
@@ -234,8 +226,8 @@ impl WrapMap {
let new_rows = self.snapshot.transforms.summary().output.lines.row + 1;
self.snapshot.interpolated = false;
self.edits_since_sync = self.edits_since_sync.compose(Patch::new(vec![WrapEdit {
old: WrapRow(0)..WrapRow(old_rows),
new: WrapRow(0)..WrapRow(new_rows),
old: 0..old_rows,
new: 0..new_rows,
}]));
}
}
@@ -339,7 +331,7 @@ impl WrapSnapshot {
self.tab_snapshot.buffer_snapshot()
}
fn interpolate(&mut self, new_tab_snapshot: TabSnapshot, tab_edits: &[TabEdit]) -> WrapPatch {
fn interpolate(&mut self, new_tab_snapshot: TabSnapshot, tab_edits: &[TabEdit]) -> Patch<u32> {
let mut new_transforms;
if tab_edits.is_empty() {
new_transforms = self.transforms.clone();
@@ -409,7 +401,7 @@ impl WrapSnapshot {
tab_edits: &[TabEdit],
wrap_width: Pixels,
line_wrapper: &mut LineWrapper,
) -> WrapPatch {
) -> Patch<u32> {
#[derive(Debug)]
struct RowEdit {
old_rows: Range<u32>,
@@ -562,7 +554,7 @@ impl WrapSnapshot {
old_snapshot.compute_edits(tab_edits, self)
}
fn compute_edits(&self, tab_edits: &[TabEdit], new_snapshot: &WrapSnapshot) -> WrapPatch {
fn compute_edits(&self, tab_edits: &[TabEdit], new_snapshot: &WrapSnapshot) -> Patch<u32> {
let mut wrap_edits = Vec::with_capacity(tab_edits.len());
let mut old_cursor = self.transforms.cursor::<TransformSummary>(());
let mut new_cursor = new_snapshot.transforms.cursor::<TransformSummary>(());
@@ -589,8 +581,8 @@ impl WrapSnapshot {
new_end += tab_edit.new.end.0 - new_cursor.start().input.lines;
wrap_edits.push(WrapEdit {
old: WrapRow(old_start.row)..WrapRow(old_end.row),
new: WrapRow(new_start.row)..WrapRow(new_end.row),
old: old_start.row..old_end.row,
new: new_start.row..new_end.row,
});
}
@@ -600,7 +592,7 @@ impl WrapSnapshot {
pub(crate) fn chunks<'a>(
&'a self,
rows: Range<WrapRow>,
rows: Range<u32>,
language_aware: bool,
highlights: Highlights<'a>,
) -> WrapChunks<'a> {
@@ -635,17 +627,17 @@ impl WrapSnapshot {
WrapPoint(self.transforms.summary().output.lines)
}
pub fn line_len(&self, row: WrapRow) -> u32 {
pub fn line_len(&self, row: u32) -> u32 {
let (start, _, item) = self.transforms.find::<Dimensions<WrapPoint, TabPoint>, _>(
(),
&WrapPoint::new(row + WrapRow(1), 0),
&WrapPoint::new(row + 1, 0),
Bias::Left,
);
if item.is_some_and(|transform| transform.is_isomorphic()) {
let overshoot = row - start.0.row();
let tab_row = start.1.row() + overshoot.0;
let tab_row = start.1.row() + overshoot;
let tab_line_len = self.tab_snapshot.line_len(tab_row);
if overshoot.0 == 0 {
if overshoot == 0 {
start.0.column() + (tab_line_len - start.1.column())
} else {
tab_line_len
@@ -655,7 +647,7 @@ impl WrapSnapshot {
}
}
pub fn text_summary_for_range(&self, rows: Range<WrapRow>) -> TextSummary {
pub fn text_summary_for_range(&self, rows: Range<u32>) -> TextSummary {
let mut summary = TextSummary::default();
let start = WrapPoint::new(rows.start, 0);
@@ -716,12 +708,10 @@ impl WrapSnapshot {
summary
}
pub fn soft_wrap_indent(&self, row: WrapRow) -> Option<u32> {
let (.., item) = self.transforms.find::<WrapPoint, _>(
(),
&WrapPoint::new(row + WrapRow(1), 0),
Bias::Right,
);
pub fn soft_wrap_indent(&self, row: u32) -> Option<u32> {
let (.., item) =
self.transforms
.find::<WrapPoint, _>((), &WrapPoint::new(row + 1, 0), Bias::Right);
item.and_then(|transform| {
if transform.is_isomorphic() {
None
@@ -735,14 +725,14 @@ impl WrapSnapshot {
self.transforms.summary().output.longest_row
}
pub fn row_infos(&self, start_row: WrapRow) -> WrapRows<'_> {
pub fn row_infos(&self, start_row: u32) -> WrapRows<'_> {
let mut transforms = self
.transforms
.cursor::<Dimensions<WrapPoint, TabPoint>>(());
transforms.seek(&WrapPoint::new(start_row, 0), Bias::Left);
let mut input_row = transforms.start().1.row();
if transforms.item().is_some_and(|t| t.is_isomorphic()) {
input_row += (start_row - transforms.start().0.row()).0;
input_row += start_row - transforms.start().0.row();
}
let soft_wrapped = transforms.item().is_some_and(|t| !t.is_isomorphic());
let mut input_buffer_rows = self.tab_snapshot.rows(input_row);
@@ -797,9 +787,9 @@ impl WrapSnapshot {
self.tab_point_to_wrap_point(self.tab_snapshot.clip_point(self.to_tab_point(point), bias))
}
pub fn prev_row_boundary(&self, mut point: WrapPoint) -> WrapRow {
pub fn prev_row_boundary(&self, mut point: WrapPoint) -> u32 {
if self.transforms.is_empty() {
return WrapRow(0);
return 0;
}
*point.column_mut() = 0;
@@ -823,7 +813,7 @@ impl WrapSnapshot {
unreachable!()
}
pub fn next_row_boundary(&self, mut point: WrapPoint) -> Option<WrapRow> {
pub fn next_row_boundary(&self, mut point: WrapPoint) -> Option<u32> {
point.0 += Point::new(1, 0);
let mut cursor = self
@@ -843,13 +833,13 @@ impl WrapSnapshot {
#[cfg(test)]
pub fn text(&self) -> String {
self.text_chunks(WrapRow(0)).collect()
self.text_chunks(0).collect()
}
#[cfg(test)]
pub fn text_chunks(&self, wrap_row: WrapRow) -> impl Iterator<Item = &str> {
pub fn text_chunks(&self, wrap_row: u32) -> impl Iterator<Item = &str> {
self.chunks(
wrap_row..self.max_point().row() + WrapRow(1),
wrap_row..self.max_point().row() + 1,
false,
Highlights::default(),
)
@@ -873,26 +863,25 @@ impl WrapSnapshot {
}
}
let text = language::Rope::from(self.text().as_str());
let text = language::Rope::from_str_small(self.text().as_str());
let mut input_buffer_rows = self.tab_snapshot.rows(0);
let mut expected_buffer_rows = Vec::new();
let mut prev_tab_row = 0;
for display_row in 0..=self.max_point().row().0 {
let display_row = WrapRow(display_row);
for display_row in 0..=self.max_point().row() {
let tab_point = self.to_tab_point(WrapPoint::new(display_row, 0));
if tab_point.row() == prev_tab_row && display_row != WrapRow(0) {
if tab_point.row() == prev_tab_row && display_row != 0 {
expected_buffer_rows.push(None);
} else {
expected_buffer_rows.push(input_buffer_rows.next().unwrap().buffer_row);
}
prev_tab_row = tab_point.row();
assert_eq!(self.line_len(display_row), text.line_len(display_row.0));
assert_eq!(self.line_len(display_row), text.line_len(display_row));
}
for start_display_row in 0..expected_buffer_rows.len() {
assert_eq!(
self.row_infos(WrapRow(start_display_row as u32))
self.row_infos(start_display_row as u32)
.map(|row_info| row_info.buffer_row)
.collect::<Vec<_>>(),
&expected_buffer_rows[start_display_row..],
@@ -905,7 +894,7 @@ impl WrapSnapshot {
}
impl WrapChunks<'_> {
pub(crate) fn seek(&mut self, rows: Range<WrapRow>) {
pub(crate) fn seek(&mut self, rows: Range<u32>) {
let output_start = WrapPoint::new(rows.start, 0);
let output_end = WrapPoint::new(rows.end, 0);
self.transforms.seek(&output_start, Bias::Right);
@@ -942,7 +931,7 @@ impl<'a> Iterator for WrapChunks<'a> {
// Exclude newline starting prior to the desired row.
start_ix = 1;
summary.row = 0;
} else if self.output_position.row() + WrapRow(1) >= self.max_output_row {
} else if self.output_position.row() + 1 >= self.max_output_row {
// Exclude soft indentation ending after the desired row.
end_ix = 1;
summary.column = 0;
@@ -1008,7 +997,7 @@ impl Iterator for WrapRows<'_> {
let soft_wrapped = self.soft_wrapped;
let diff_status = self.input_buffer_row.diff_status;
self.output_row += WrapRow(1);
self.output_row += 1;
self.transforms
.seek_forward(&WrapPoint::new(self.output_row, 0), Bias::Left);
if self.transforms.item().is_some_and(|t| t.is_isomorphic()) {
@@ -1025,7 +1014,6 @@ impl Iterator for WrapRows<'_> {
multibuffer_row: None,
diff_status,
expand_info: None,
wrapped_buffer_row: buffer_row.buffer_row,
}
} else {
buffer_row
@@ -1119,12 +1107,12 @@ impl SumTreeExt for SumTree<Transform> {
}
impl WrapPoint {
pub fn new(row: WrapRow, column: u32) -> Self {
Self(Point::new(row.0, column))
pub fn new(row: u32, column: u32) -> Self {
Self(Point::new(row, column))
}
pub fn row(self) -> WrapRow {
WrapRow(self.0.row)
pub fn row(self) -> u32 {
self.0.row
}
pub fn row_mut(&mut self) -> &mut u32 {
@@ -1425,25 +1413,26 @@ mod tests {
}
}
let mut initial_text = Rope::from(initial_snapshot.text().as_str());
let mut initial_text =
Rope::from_str(initial_snapshot.text().as_str(), cx.background_executor());
for (snapshot, patch) in edits {
let snapshot_text = Rope::from(snapshot.text().as_str());
let snapshot_text = Rope::from_str(snapshot.text().as_str(), cx.background_executor());
for edit in &patch {
let old_start = initial_text.point_to_offset(Point::new(edit.new.start.0, 0));
let old_start = initial_text.point_to_offset(Point::new(edit.new.start, 0));
let old_end = initial_text.point_to_offset(cmp::min(
Point::new(edit.new.start.0 + (edit.old.end - edit.old.start).0, 0),
Point::new(edit.new.start + edit.old.len() as u32, 0),
initial_text.max_point(),
));
let new_start = snapshot_text.point_to_offset(Point::new(edit.new.start.0, 0));
let new_start = snapshot_text.point_to_offset(Point::new(edit.new.start, 0));
let new_end = snapshot_text.point_to_offset(cmp::min(
Point::new(edit.new.end.0, 0),
Point::new(edit.new.end, 0),
snapshot_text.max_point(),
));
let new_text = snapshot_text
.chunks_in_range(new_start..new_end)
.collect::<String>();
initial_text.replace(old_start..old_end, &new_text);
initial_text.replace(old_start..old_end, &new_text, cx.background_executor());
}
assert_eq!(initial_text.to_string(), snapshot_text.to_string());
}
@@ -1496,11 +1485,11 @@ mod tests {
impl WrapSnapshot {
fn verify_chunks(&mut self, rng: &mut impl Rng) {
for _ in 0..5 {
let mut end_row = rng.random_range(0..=self.max_point().row().0);
let mut end_row = rng.random_range(0..=self.max_point().row());
let start_row = rng.random_range(0..=end_row);
end_row += 1;
let mut expected_text = self.text_chunks(WrapRow(start_row)).collect::<String>();
let mut expected_text = self.text_chunks(start_row).collect::<String>();
if expected_text.ends_with('\n') {
expected_text.push('\n');
}
@@ -1509,16 +1498,12 @@ mod tests {
.take((end_row - start_row) as usize)
.collect::<Vec<_>>()
.join("\n");
if end_row <= self.max_point().row().0 {
if end_row <= self.max_point().row() {
expected_text.push('\n');
}
let actual_text = self
.chunks(
WrapRow(start_row)..WrapRow(end_row),
true,
Highlights::default(),
)
.chunks(start_row..end_row, true, Highlights::default())
.map(|c| c.text)
.collect::<String>();
assert_eq!(

View File

@@ -1,13 +1,12 @@
use edit_prediction::EditPredictionProvider;
use gpui::{Entity, KeyBinding, Modifiers, prelude::*};
use gpui::{Entity, prelude::*};
use indoc::indoc;
use multi_buffer::{Anchor, MultiBufferSnapshot, ToPoint};
use std::ops::Range;
use text::{Point, ToOffset};
use crate::{
AcceptEditPrediction, EditPrediction, MenuEditPredictionsPolicy, editor_tests::init_test,
test::editor_test_context::EditorTestContext,
EditPrediction, editor_tests::init_test, test::editor_test_context::EditorTestContext,
};
#[gpui::test]
@@ -271,63 +270,6 @@ async fn test_edit_prediction_jump_disabled_for_non_zed_providers(cx: &mut gpui:
});
}
#[gpui::test]
async fn test_edit_prediction_preview_cleanup_on_toggle_off(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
// Bind `ctrl-shift-a` to accept the provided edit prediction. The actual key
// binding here doesn't matter, we simply need to confirm that holding the
// binding's modifiers triggers the edit prediction preview.
cx.update(|cx| cx.bind_keys([KeyBinding::new("ctrl-shift-a", AcceptEditPrediction, None)]));
let mut cx = EditorTestContext::new(cx).await;
let provider = cx.new(|_| FakeEditPredictionProvider::default());
assign_editor_completion_provider(provider.clone(), &mut cx);
cx.set_state("let x = ˇ;");
propose_edits(&provider, vec![(8..8, "42")], &mut cx);
cx.update_editor(|editor, window, cx| {
editor.set_menu_edit_predictions_policy(MenuEditPredictionsPolicy::ByProvider);
editor.update_visible_edit_prediction(window, cx)
});
cx.editor(|editor, _, _| {
assert!(editor.has_active_edit_prediction());
});
// Simulate pressing the modifiers for `AcceptEditPrediction`, namely
// `ctrl-shift`, so that we can confirm that the edit prediction preview is
// activated.
let modifiers = Modifiers::control_shift();
cx.simulate_modifiers_change(modifiers);
cx.run_until_parked();
cx.editor(|editor, _, _| {
assert!(editor.edit_prediction_preview_is_active());
});
// Disable showing edit predictions without issuing a new modifiers changed
// event, to confirm that the edit prediction preview is still active.
cx.update_editor(|editor, window, cx| {
editor.set_show_edit_predictions(Some(false), window, cx);
});
cx.editor(|editor, _, _| {
assert!(!editor.has_active_edit_prediction());
assert!(editor.edit_prediction_preview_is_active());
});
// Now release the modifiers
// Simulate releasing all modifiers, ensuring that even with edit prediction
// disabled, the edit prediction preview is cleaned up.
cx.simulate_modifiers_change(Modifiers::none());
cx.run_until_parked();
cx.editor(|editor, _, _| {
assert!(!editor.edit_prediction_preview_is_active());
});
}
fn assert_editor_active_edit_completion(
cx: &mut EditorTestContext,
assert: impl FnOnce(MultiBufferSnapshot, &Vec<(Range<Anchor>, String)>),
@@ -453,7 +395,7 @@ impl EditPredictionProvider for FakeEditPredictionProvider {
}
fn show_completions_in_menu() -> bool {
true
false
}
fn supports_jump_to_edit() -> bool {

View File

@@ -163,10 +163,7 @@ use rpc::{ErrorCode, ErrorExt, proto::PeerId};
use scroll::{Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager};
use selections_collection::{MutableSelectionsCollection, SelectionsCollection};
use serde::{Deserialize, Serialize};
use settings::{
GitGutterSetting, RelativeLineNumbers, Settings, SettingsLocation, SettingsStore,
update_settings_file,
};
use settings::{GitGutterSetting, Settings, SettingsLocation, SettingsStore, update_settings_file};
use smallvec::{SmallVec, smallvec};
use snippet::Snippet;
use std::{
@@ -455,20 +452,6 @@ pub enum SelectMode {
All,
}
#[derive(Copy, Clone, Default, PartialEq, Eq, Debug)]
pub enum SizingBehavior {
/// The editor will layout itself using `size_full` and will include the vertical
/// scroll margin as requested by user settings.
#[default]
Default,
/// The editor will layout itself using `size_full`, but will not have any
/// vertical overscroll.
ExcludeOverscrollMargin,
/// The editor will request a vertical size according to its content and will be
/// layouted without a vertical scroll margin.
SizeByContent,
}
#[derive(Clone, PartialEq, Eq, Debug)]
pub enum EditorMode {
SingleLine,
@@ -481,8 +464,8 @@ pub enum EditorMode {
scale_ui_elements_with_buffer_font_size: bool,
/// When set to `true`, the editor will render a background for the active line.
show_active_line_background: bool,
/// Determines the sizing behavior for this editor
sizing_behavior: SizingBehavior,
/// When set to `true`, the editor's height will be determined by its content.
sized_by_content: bool,
},
Minimap {
parent: WeakEntity<Editor>,
@@ -494,7 +477,7 @@ impl EditorMode {
Self::Full {
scale_ui_elements_with_buffer_font_size: true,
show_active_line_background: true,
sizing_behavior: SizingBehavior::Default,
sized_by_content: false,
}
}
@@ -3142,7 +3125,7 @@ impl Editor {
};
if continue_showing {
self.open_or_update_completions_menu(None, None, false, window, cx);
self.show_completions(&ShowCompletions { trigger: None }, window, cx);
} else {
self.hide_context_menu(window, cx);
}
@@ -4972,18 +4955,57 @@ impl Editor {
ignore_threshold: false,
}),
None,
trigger_in_words,
window,
cx,
);
}
_ => self.open_or_update_completions_menu(
None,
Some(text.to_owned()).filter(|x| !x.is_empty()),
true,
window,
Some(CompletionsMenuSource::Normal)
| Some(CompletionsMenuSource::SnippetChoices)
| None
if self.is_completion_trigger(
text,
trigger_in_words,
completions_source.is_some(),
cx,
) =>
{
self.show_completions(
&ShowCompletions {
trigger: Some(text.to_owned()).filter(|x| !x.is_empty()),
},
window,
cx,
)
}
_ => {
self.hide_context_menu(window, cx);
}
}
}
fn is_completion_trigger(
&self,
text: &str,
trigger_in_words: bool,
menu_is_open: bool,
cx: &mut Context<Self>,
) -> bool {
let position = self.selections.newest_anchor().head();
let Some(buffer) = self.buffer.read(cx).buffer_for_anchor(position, cx) else {
return false;
};
if let Some(completion_provider) = &self.completion_provider {
completion_provider.is_completion_trigger(
&buffer,
position.text_anchor,
text,
trigger_in_words,
menu_is_open,
cx,
),
)
} else {
false
}
}
@@ -5261,7 +5283,6 @@ impl Editor {
ignore_threshold: true,
}),
None,
false,
window,
cx,
);
@@ -5269,18 +5290,17 @@ impl Editor {
pub fn show_completions(
&mut self,
_: &ShowCompletions,
options: &ShowCompletions,
window: &mut Window,
cx: &mut Context<Self>,
) {
self.open_or_update_completions_menu(None, None, false, window, cx);
self.open_or_update_completions_menu(None, options.trigger.as_deref(), window, cx);
}
fn open_or_update_completions_menu(
&mut self,
requested_source: Option<CompletionsMenuSource>,
trigger: Option<String>,
trigger_in_words: bool,
trigger: Option<&str>,
window: &mut Window,
cx: &mut Context<Self>,
) {
@@ -5288,15 +5308,6 @@ impl Editor {
return;
}
let completions_source = self
.context_menu
.borrow()
.as_ref()
.and_then(|menu| match menu {
CodeContextMenu::Completions(completions_menu) => Some(completions_menu.source),
CodeContextMenu::CodeActions(_) => None,
});
let multibuffer_snapshot = self.buffer.read(cx).read(cx);
// Typically `start` == `end`, but with snippet tabstop choices the default choice is
@@ -5344,8 +5355,7 @@ impl Editor {
ignore_word_threshold = ignore_threshold;
None
}
Some(CompletionsMenuSource::SnippetChoices)
| Some(CompletionsMenuSource::SnippetsOnly) => {
Some(CompletionsMenuSource::SnippetChoices) => {
log::error!("bug: SnippetChoices requested_source is not handled");
None
}
@@ -5359,19 +5369,13 @@ impl Editor {
.as_ref()
.is_none_or(|provider| provider.filter_completions());
let was_snippets_only = matches!(
completions_source,
Some(CompletionsMenuSource::SnippetsOnly)
);
if let Some(CodeContextMenu::Completions(menu)) = self.context_menu.borrow_mut().as_mut() {
if filter_completions {
menu.filter(query.clone(), provider.clone(), window, cx);
}
// When `is_incomplete` is false, no need to re-query completions when the current query
// is a suffix of the initial query.
let was_complete = !menu.is_incomplete;
if was_complete && !was_snippets_only {
if !menu.is_incomplete {
// If the new query is a suffix of the old query (typing more characters) and
// the previous result was complete, the existing completions can be filtered.
//
@@ -5395,6 +5399,23 @@ impl Editor {
}
};
let trigger_kind = match trigger {
Some(trigger) if buffer.read(cx).completion_triggers().contains(trigger) => {
CompletionTriggerKind::TRIGGER_CHARACTER
}
_ => CompletionTriggerKind::INVOKED,
};
let completion_context = CompletionContext {
trigger_character: trigger.and_then(|trigger| {
if trigger_kind == CompletionTriggerKind::TRIGGER_CHARACTER {
Some(String::from(trigger))
} else {
None
}
}),
trigger_kind,
};
let Anchor {
excerpt_id: buffer_excerpt_id,
text_anchor: buffer_position,
@@ -5452,72 +5473,49 @@ impl Editor {
&& match &query {
Some(query) => query.chars().count() < completion_settings.words_min_length,
None => completion_settings.words_min_length != 0,
})
|| (provider.is_some() && completion_settings.words == WordsCompletionMode::Disabled);
});
let mut words = if omit_word_completions {
Task::ready(BTreeMap::default())
} else {
cx.background_spawn(async move {
buffer_snapshot.words_in_range(WordsQuery {
fuzzy_contents: None,
range: word_search_range,
skip_digits,
})
})
};
let load_provider_completions = provider.as_ref().is_some_and(|provider| {
trigger.as_ref().is_none_or(|trigger| {
provider.is_completion_trigger(
let (mut words, provider_responses) = match &provider {
Some(provider) => {
let provider_responses = provider.completions(
buffer_excerpt_id,
&buffer,
position.text_anchor,
trigger,
trigger_in_words,
completions_source.is_some(),
buffer_position,
completion_context,
window,
cx,
)
})
});
);
let provider_responses = if let Some(provider) = &provider
&& load_provider_completions
{
let trigger_character =
trigger.filter(|trigger| buffer.read(cx).completion_triggers().contains(trigger));
let completion_context = CompletionContext {
trigger_kind: match &trigger_character {
Some(_) => CompletionTriggerKind::TRIGGER_CHARACTER,
None => CompletionTriggerKind::INVOKED,
},
trigger_character,
};
let words = match (omit_word_completions, completion_settings.words) {
(true, _) | (_, WordsCompletionMode::Disabled) => {
Task::ready(BTreeMap::default())
}
(false, WordsCompletionMode::Enabled | WordsCompletionMode::Fallback) => cx
.background_spawn(async move {
buffer_snapshot.words_in_range(WordsQuery {
fuzzy_contents: None,
range: word_search_range,
skip_digits,
})
}),
};
provider.completions(
buffer_excerpt_id,
&buffer,
buffer_position,
completion_context,
window,
cx,
)
} else {
Task::ready(Ok(Vec::new()))
};
let snippets = if let Some(provider) = &provider
&& provider.show_snippets()
&& let Some(project) = self.project()
{
project.update(cx, |project, cx| {
snippet_completions(project, &buffer, buffer_position, cx)
})
} else {
Task::ready(Ok(CompletionResponse {
completions: Vec::new(),
display_options: Default::default(),
is_incomplete: false,
}))
(words, provider_responses)
}
None => {
let words = if omit_word_completions {
Task::ready(BTreeMap::default())
} else {
cx.background_spawn(async move {
buffer_snapshot.words_in_range(WordsQuery {
fuzzy_contents: None,
range: word_search_range,
skip_digits,
})
})
};
(words, Task::ready(Ok(Vec::new())))
}
};
let snippet_sort_order = EditorSettings::get_global(cx).snippet_sort_order;
@@ -5575,13 +5573,6 @@ impl Editor {
confirm: None,
}));
completions.extend(
snippets
.await
.into_iter()
.flat_map(|response| response.completions),
);
let menu = if completions.is_empty() {
None
} else {
@@ -5593,11 +5584,7 @@ impl Editor {
.map(|workspace| workspace.read(cx).app_state().languages.clone());
let menu = CompletionsMenu::new(
id,
requested_source.unwrap_or(if load_provider_completions {
CompletionsMenuSource::Normal
} else {
CompletionsMenuSource::SnippetsOnly
}),
requested_source.unwrap_or(CompletionsMenuSource::Normal),
sort_completions,
show_completion_documentation,
position,
@@ -5927,7 +5914,7 @@ impl Editor {
.as_ref()
.is_some_and(|confirm| confirm(intent, window, cx));
if show_new_completions_on_confirm {
self.open_or_update_completions_menu(None, None, false, window, cx);
self.show_completions(&ShowCompletions { trigger: None }, window, cx);
}
let provider = self.completion_provider.as_ref()?;
@@ -7572,14 +7559,7 @@ impl Editor {
window: &mut Window,
cx: &mut Context<Self>,
) {
// Ensure that the edit prediction preview is updated, even when not
// enabled, if there's an active edit prediction preview.
if self.show_edit_predictions_in_menu()
|| matches!(
self.edit_prediction_preview,
EditPredictionPreview::Active { .. }
)
{
if self.show_edit_predictions_in_menu() {
self.update_edit_prediction_preview(&modifiers, window, cx);
}
@@ -7878,7 +7858,7 @@ impl Editor {
let inlay = Inlay::edit_prediction(
post_inc(&mut self.next_inlay_id),
range.start,
new_text.as_str(),
Rope::from_str_small(new_text.as_str()),
);
inlay_ids.push(inlay.id);
inlays.push(inlay);
@@ -12720,10 +12700,6 @@ impl Editor {
});
}
// 🤔 | .. | show_in_menu |
// | .. | true true
// | had_edit_prediction | false true
let trigger_in_words =
this.show_edit_predictions_in_menu() || !had_active_edit_prediction;
@@ -16025,6 +16001,7 @@ impl Editor {
}
fn filtered<'a>(
snapshot: EditorSnapshot,
severity: GoToDiagnosticSeverityFilter,
diagnostics: impl Iterator<Item = DiagnosticEntryRef<'a, usize>>,
) -> impl Iterator<Item = DiagnosticEntryRef<'a, usize>> {
@@ -16032,15 +16009,19 @@ impl Editor {
.filter(move |entry| severity.matches(entry.diagnostic.severity))
.filter(|entry| entry.range.start != entry.range.end)
.filter(|entry| !entry.diagnostic.is_unnecessary)
.filter(move |entry| !snapshot.intersects_fold(entry.range.start))
}
let snapshot = self.snapshot(window, cx);
let before = filtered(
snapshot.clone(),
severity,
buffer
.diagnostics_in_range(0..selection.start)
.filter(|entry| entry.range.start <= selection.start),
);
let after = filtered(
snapshot,
severity,
buffer
.diagnostics_in_range(selection.start..buffer.len())
@@ -16079,15 +16060,6 @@ impl Editor {
let Some(buffer_id) = buffer.buffer_id_for_anchor(next_diagnostic_start) else {
return;
};
let snapshot = self.snapshot(window, cx);
if snapshot.intersects_fold(next_diagnostic.range.start) {
self.unfold_ranges(
std::slice::from_ref(&next_diagnostic.range),
true,
false,
cx,
);
}
self.change_selections(Default::default(), window, cx, |s| {
s.select_ranges(vec![
next_diagnostic.range.start..next_diagnostic.range.start,
@@ -17861,7 +17833,6 @@ impl Editor {
.unwrap_or(self.diagnostics_max_severity);
if !self.inline_diagnostics_enabled()
|| !self.diagnostics_enabled()
|| !self.show_inline_diagnostics
|| max_severity == DiagnosticSeverity::Off
{
@@ -17940,7 +17911,7 @@ impl Editor {
window: &Window,
cx: &mut Context<Self>,
) -> Option<()> {
if self.ignore_lsp_data() || !self.diagnostics_enabled() {
if self.ignore_lsp_data() {
return None;
}
let pull_diagnostics_settings = ProjectSettings::get_global(cx)
@@ -19556,16 +19527,9 @@ impl Editor {
EditorSettings::get_global(cx).gutter.line_numbers
}
pub fn relative_line_numbers(&self, cx: &mut App) -> RelativeLineNumbers {
match (
self.use_relative_line_numbers,
EditorSettings::get_global(cx).relative_line_numbers,
) {
(None, setting) => setting,
(Some(false), _) => RelativeLineNumbers::Disabled,
(Some(true), RelativeLineNumbers::Wrapped) => RelativeLineNumbers::Wrapped,
(Some(true), _) => RelativeLineNumbers::Enabled,
}
pub fn should_use_relative_line_numbers(&self, cx: &mut App) -> bool {
self.use_relative_line_numbers
.unwrap_or(EditorSettings::get_global(cx).relative_line_numbers)
}
pub fn toggle_relative_line_numbers(
@@ -19574,8 +19538,8 @@ impl Editor {
_: &mut Window,
cx: &mut Context<Self>,
) {
let is_relative = self.relative_line_numbers(cx);
self.set_relative_line_number(Some(!is_relative.enabled()), cx)
let is_relative = self.should_use_relative_line_numbers(cx);
self.set_relative_line_number(Some(!is_relative), cx)
}
pub fn set_relative_line_number(&mut self, is_relative: Option<bool>, cx: &mut Context<Self>) {
@@ -22925,10 +22889,6 @@ pub trait CompletionProvider {
fn filter_completions(&self) -> bool {
true
}
fn show_snippets(&self) -> bool {
false
}
}
pub trait CodeActionProvider {
@@ -23189,8 +23149,16 @@ impl CompletionProvider for Entity<Project> {
cx: &mut Context<Editor>,
) -> Task<Result<Vec<CompletionResponse>>> {
self.update(cx, |project, cx| {
let task = project.completions(buffer, buffer_position, options, cx);
cx.background_spawn(task)
let snippets = snippet_completions(project, buffer, buffer_position, cx);
let project_completions = project.completions(buffer, buffer_position, options, cx);
cx.background_spawn(async move {
let mut responses = project_completions.await?;
let snippets = snippets.await?;
if !snippets.completions.is_empty() {
responses.push(snippets);
}
Ok(responses)
})
})
}
@@ -23262,10 +23230,6 @@ impl CompletionProvider for Entity<Project> {
buffer.completion_triggers().contains(text)
}
fn show_snippets(&self) -> bool {
true
}
}
impl SemanticsProvider for Entity<Project> {
@@ -24194,10 +24158,6 @@ impl EntityInputHandler for Editor {
let utf16_offset = anchor.to_offset_utf16(&position_map.snapshot.buffer_snapshot());
Some(utf16_offset.0)
}
fn accepts_text_input(&self, _window: &mut Window, _cx: &mut Context<Self>) -> bool {
self.input_enabled
}
}
trait SelectionExt {

View File

@@ -3,12 +3,12 @@ use core::num;
use gpui::App;
use language::CursorShape;
use project::project_settings::DiagnosticSeverity;
use settings::Settings;
pub use settings::{
CurrentLineHighlight, DelayMs, DisplayIn, DocumentColorsRenderMode, DoubleClickInMultibuffer,
GoToDefinitionFallback, HideMouseMode, MinimapThumb, MinimapThumbBorder, MultiCursorModifier,
ScrollBeyondLastLine, ScrollbarDiagnostics, SeedQuerySetting, ShowMinimap, SnippetSortOrder,
};
use settings::{RelativeLineNumbers, Settings};
use ui::scrollbars::{ScrollbarVisibility, ShowScrollbar};
/// Imports from the VSCode settings at
@@ -33,7 +33,7 @@ pub struct EditorSettings {
pub horizontal_scroll_margin: f32,
pub scroll_sensitivity: f32,
pub fast_scroll_sensitivity: f32,
pub relative_line_numbers: RelativeLineNumbers,
pub relative_line_numbers: bool,
pub seed_search_query_from_cursor: SeedQuerySetting,
pub use_smartcase_search: bool,
pub multi_cursor_modifier: MultiCursorModifier,
@@ -55,7 +55,6 @@ pub struct EditorSettings {
pub drag_and_drop_selection: DragAndDropSelection,
pub lsp_document_colors: DocumentColorsRenderMode,
pub minimum_contrast_for_highlights: f32,
pub completion_menu_scrollbar: ShowScrollbar,
}
#[derive(Debug, Clone)]
pub struct Jupyter {
@@ -269,7 +268,6 @@ impl Settings for EditorSettings {
},
lsp_document_colors: editor.lsp_document_colors.unwrap(),
minimum_contrast_for_highlights: editor.minimum_contrast_for_highlights.unwrap().0,
completion_menu_scrollbar: editor.completion_menu_scrollbar.map(Into::into).unwrap(),
}
}
}

View File

@@ -3136,77 +3136,6 @@ fn test_newline(cx: &mut TestAppContext) {
});
}
#[gpui::test]
async fn test_newline_yaml(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
let yaml_language = languages::language("yaml", tree_sitter_yaml::LANGUAGE.into());
cx.update_buffer(|buffer, cx| buffer.set_language(Some(yaml_language), cx));
// Object (between 2 fields)
cx.set_state(indoc! {"
test:ˇ
hello: bye"});
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
cx.assert_editor_state(indoc! {"
test:
ˇ
hello: bye"});
// Object (first and single line)
cx.set_state(indoc! {"
test:ˇ"});
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
cx.assert_editor_state(indoc! {"
test:
ˇ"});
// Array with objects (after first element)
cx.set_state(indoc! {"
test:
- foo: barˇ"});
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
cx.assert_editor_state(indoc! {"
test:
- foo: bar
ˇ"});
// Array with objects and comment
cx.set_state(indoc! {"
test:
- foo: bar
- bar: # testˇ"});
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
cx.assert_editor_state(indoc! {"
test:
- foo: bar
- bar: # test
ˇ"});
// Array with objects (after second element)
cx.set_state(indoc! {"
test:
- foo: bar
- bar: fooˇ"});
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
cx.assert_editor_state(indoc! {"
test:
- foo: bar
- bar: foo
ˇ"});
// Array with strings (after first element)
cx.set_state(indoc! {"
test:
- fooˇ"});
cx.update_editor(|e, window, cx| e.newline(&Newline, window, cx));
cx.assert_editor_state(indoc! {"
test:
- foo
ˇ"});
}
#[gpui::test]
fn test_newline_with_old_selections(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -13883,7 +13812,7 @@ async fn test_completion_mode(cx: &mut TestAppContext) {
cx.set_state(&run.initial_state);
cx.update_editor(|editor, window, cx| {
editor.show_completions(&ShowCompletions, window, cx);
editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
});
let counter = Arc::new(AtomicUsize::new(0));
@@ -13943,7 +13872,7 @@ async fn test_completion_with_mode_specified_by_action(cx: &mut TestAppContext)
cx.set_state(initial_state);
cx.update_editor(|editor, window, cx| {
editor.show_completions(&ShowCompletions, window, cx);
editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
});
let counter = Arc::new(AtomicUsize::new(0));
@@ -13979,7 +13908,7 @@ async fn test_completion_with_mode_specified_by_action(cx: &mut TestAppContext)
cx.set_state(initial_state);
cx.update_editor(|editor, window, cx| {
editor.show_completions(&ShowCompletions, window, cx);
editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
});
handle_completion_request_with_insert_and_replace(
&mut cx,
@@ -14066,7 +13995,7 @@ async fn test_completion_replacing_surrounding_text_with_multicursors(cx: &mut T
"};
cx.set_state(initial_state);
cx.update_editor(|editor, window, cx| {
editor.show_completions(&ShowCompletions, window, cx);
editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
});
handle_completion_request_with_insert_and_replace(
&mut cx,
@@ -14120,7 +14049,7 @@ async fn test_completion_replacing_surrounding_text_with_multicursors(cx: &mut T
"};
cx.set_state(initial_state);
cx.update_editor(|editor, window, cx| {
editor.show_completions(&ShowCompletions, window, cx);
editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
});
handle_completion_request_with_insert_and_replace(
&mut cx,
@@ -14169,7 +14098,7 @@ async fn test_completion_replacing_surrounding_text_with_multicursors(cx: &mut T
"};
cx.set_state(initial_state);
cx.update_editor(|editor, window, cx| {
editor.show_completions(&ShowCompletions, window, cx);
editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
});
handle_completion_request_with_insert_and_replace(
&mut cx,
@@ -14288,7 +14217,7 @@ async fn test_completion_in_multibuffer_with_replace_range(cx: &mut TestAppConte
EditorMode::Full {
scale_ui_elements_with_buffer_font_size: false,
show_active_line_background: false,
sizing_behavior: SizingBehavior::Default,
sized_by_content: false,
},
multi_buffer.clone(),
Some(project.clone()),
@@ -14320,7 +14249,7 @@ async fn test_completion_in_multibuffer_with_replace_range(cx: &mut TestAppConte
});
editor.update_in(cx, |editor, window, cx| {
editor.show_completions(&ShowCompletions, window, cx);
editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
});
fake_server
@@ -14559,7 +14488,7 @@ async fn test_completion(cx: &mut TestAppContext) {
cx.assert_editor_state("editor.cloˇ");
assert!(cx.editor(|e, _, _| e.context_menu.borrow_mut().is_none()));
cx.update_editor(|editor, window, cx| {
editor.show_completions(&ShowCompletions, window, cx);
editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
});
handle_completion_request(
"editor.<clo|>",
@@ -14958,7 +14887,7 @@ async fn test_word_completions_usually_skip_digits(cx: &mut TestAppContext) {
4.5f32
"});
cx.update_editor(|editor, window, cx| {
editor.show_completions(&ShowCompletions, window, cx);
editor.show_completions(&ShowCompletions::default(), window, cx);
});
cx.executor().run_until_parked();
cx.condition(|editor, _| editor.context_menu_visible())
@@ -14984,7 +14913,7 @@ async fn test_word_completions_usually_skip_digits(cx: &mut TestAppContext) {
33.35f32
"});
cx.update_editor(|editor, window, cx| {
editor.show_completions(&ShowCompletions, window, cx);
editor.show_completions(&ShowCompletions::default(), window, cx);
});
cx.executor().run_until_parked();
cx.condition(|editor, _| editor.context_menu_visible())
@@ -15408,7 +15337,13 @@ async fn test_as_is_completions(cx: &mut TestAppContext) {
cx.set_state("fn a() {}\n");
cx.executor().run_until_parked();
cx.update_editor(|editor, window, cx| {
editor.trigger_completion_on_input("n", true, window, cx)
editor.show_completions(
&ShowCompletions {
trigger: Some("\n".into()),
},
window,
cx,
);
});
cx.executor().run_until_parked();
@@ -15506,7 +15441,7 @@ int fn_branch(bool do_branch1, bool do_branch2);
})))
});
cx.update_editor(|editor, window, cx| {
editor.show_completions(&ShowCompletions, window, cx);
editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
});
cx.executor().run_until_parked();
cx.update_editor(|editor, window, cx| {
@@ -15555,7 +15490,7 @@ int fn_branch(bool do_branch1, bool do_branch2);
})))
});
cx.update_editor(|editor, window, cx| {
editor.show_completions(&ShowCompletions, window, cx);
editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
});
cx.executor().run_until_parked();
cx.update_editor(|editor, window, cx| {
@@ -18045,7 +17980,7 @@ async fn test_context_menus_hide_hover_popover(cx: &mut gpui::TestAppContext) {
}
});
cx.update_editor(|editor, window, cx| {
editor.show_completions(&ShowCompletions, window, cx);
editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
});
completion_requests.next().await;
cx.condition(|editor, _| editor.context_menu_visible())
@@ -24441,7 +24376,7 @@ async fn test_html_linked_edits_on_completion(cx: &mut TestAppContext) {
])))
});
editor.update_in(cx, |editor, window, cx| {
editor.show_completions(&ShowCompletions, window, cx);
editor.show_completions(&ShowCompletions { trigger: None }, window, cx);
});
cx.run_until_parked();
completion_handle.next().await.unwrap();

View File

@@ -8,8 +8,8 @@ use crate::{
HandleInput, HoveredCursor, InlayHintRefreshReason, JumpData, LineDown, LineHighlight, LineUp,
MAX_LINE_LEN, MINIMAP_FONT_SIZE, MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, OpenExcerpts, PageDown,
PageUp, PhantomBreakpointIndicator, Point, RowExt, RowRangeExt, SelectPhase,
SelectedTextHighlight, Selection, SelectionDragState, SizingBehavior, SoftWrap,
StickyHeaderExcerpt, ToPoint, ToggleFold, ToggleFoldAll,
SelectedTextHighlight, Selection, SelectionDragState, SoftWrap, StickyHeaderExcerpt, ToPoint,
ToggleFold, ToggleFoldAll,
code_context_menus::{CodeActionsMenu, MENU_ASIDE_MAX_WIDTH, MENU_ASIDE_MIN_WIDTH, MENU_GAP},
display_map::{
Block, BlockContext, BlockStyle, ChunkRendererId, DisplaySnapshot, EditorMargins,
@@ -766,14 +766,8 @@ impl EditorElement {
.row;
if line_numbers
.get(&MultiBufferRow(multi_buffer_row))
.is_some_and(|line_layout| {
line_layout.segments.iter().any(|segment| {
segment
.hitbox
.as_ref()
.is_some_and(|hitbox| hitbox.contains(&event.position))
})
})
.and_then(|line_number| line_number.hitbox.as_ref())
.is_some_and(|hitbox| hitbox.contains(&event.position))
{
let line_offset_from_top = display_row - position_map.scroll_position.y as u32;
@@ -1316,14 +1310,7 @@ impl EditorElement {
hover_at(editor, Some(anchor), window, cx);
Self::update_visible_cursor(editor, point, position_map, window, cx);
} else {
editor.update_inlay_link_and_hover_points(
&position_map.snapshot,
point_for_position,
modifiers.secondary(),
modifiers.shift,
window,
cx,
);
hover_at(editor, None, window, cx);
}
} else {
editor.hide_hovered_link(cx);
@@ -3163,7 +3150,6 @@ impl EditorElement {
snapshot: &EditorSnapshot,
rows: &Range<DisplayRow>,
relative_to: Option<DisplayRow>,
count_wrapped_lines: bool,
) -> HashMap<DisplayRow, DisplayRowDelta> {
let mut relative_rows: HashMap<DisplayRow, DisplayRowDelta> = Default::default();
let Some(relative_to) = relative_to else {
@@ -3181,15 +3167,8 @@ impl EditorElement {
let head_idx = relative_to.minus(start);
let mut delta = 1;
let mut i = head_idx + 1;
let should_count_line = |row_info: &RowInfo| {
if count_wrapped_lines {
row_info.buffer_row.is_some() || row_info.wrapped_buffer_row.is_some()
} else {
row_info.buffer_row.is_some()
}
};
while i < buffer_rows.len() as u32 {
if should_count_line(&buffer_rows[i as usize]) {
if buffer_rows[i as usize].buffer_row.is_some() {
if rows.contains(&DisplayRow(i + start.0)) {
relative_rows.insert(DisplayRow(i + start.0), delta);
}
@@ -3199,13 +3178,13 @@ impl EditorElement {
}
delta = 1;
i = head_idx.min(buffer_rows.len().saturating_sub(1) as u32);
while i > 0 && buffer_rows[i as usize].buffer_row.is_none() && !count_wrapped_lines {
while i > 0 && buffer_rows[i as usize].buffer_row.is_none() {
i -= 1;
}
while i > 0 {
i -= 1;
if should_count_line(&buffer_rows[i as usize]) {
if buffer_rows[i as usize].buffer_row.is_some() {
if rows.contains(&DisplayRow(i + start.0)) {
relative_rows.insert(DisplayRow(i + start.0), delta);
}
@@ -3237,7 +3216,7 @@ impl EditorElement {
return Arc::default();
}
let (newest_selection_head, relative) = self.editor.update(cx, |editor, cx| {
let (newest_selection_head, is_relative) = self.editor.update(cx, |editor, cx| {
let newest_selection_head = newest_selection_head.unwrap_or_else(|| {
let newest = editor
.selections
@@ -3253,93 +3232,79 @@ impl EditorElement {
)
.head
});
let relative = editor.relative_line_numbers(cx);
(newest_selection_head, relative)
let is_relative = editor.should_use_relative_line_numbers(cx);
(newest_selection_head, is_relative)
});
let relative_to = if relative.enabled() {
let relative_to = if is_relative {
Some(newest_selection_head.row())
} else {
None
};
let relative_rows =
self.calculate_relative_line_numbers(snapshot, &rows, relative_to, relative.wrapped());
let relative_rows = self.calculate_relative_line_numbers(snapshot, &rows, relative_to);
let mut line_number = String::new();
let segments = buffer_rows.iter().enumerate().flat_map(|(ix, row_info)| {
let display_row = DisplayRow(rows.start.0 + ix as u32);
line_number.clear();
let non_relative_number = if relative.wrapped() {
row_info.buffer_row.or(row_info.wrapped_buffer_row)? + 1
} else {
row_info.buffer_row? + 1
};
let number = relative_rows
.get(&display_row)
.unwrap_or(&non_relative_number);
write!(&mut line_number, "{number}").unwrap();
if row_info
.diff_status
.is_some_and(|status| status.is_deleted())
{
return None;
}
let line_numbers = buffer_rows
.iter()
.enumerate()
.flat_map(|(ix, row_info)| {
let display_row = DisplayRow(rows.start.0 + ix as u32);
line_number.clear();
let non_relative_number = row_info.buffer_row? + 1;
let number = relative_rows
.get(&display_row)
.unwrap_or(&non_relative_number);
write!(&mut line_number, "{number}").unwrap();
if row_info
.diff_status
.is_some_and(|status| status.is_deleted())
{
return None;
}
let color = active_rows
.get(&display_row)
.map(|spec| {
if spec.breakpoint {
cx.theme().colors().debugger_accent
} else {
cx.theme().colors().editor_active_line_number
}
})
.unwrap_or_else(|| cx.theme().colors().editor_line_number);
let shaped_line =
self.shape_line_number(SharedString::from(&line_number), color, window);
let scroll_top = scroll_position.y * ScrollPixelOffset::from(line_height);
let line_origin = gutter_hitbox.map(|hitbox| {
hitbox.origin
+ point(
hitbox.size.width - shaped_line.width - gutter_dimensions.right_padding,
ix as f32 * line_height
- Pixels::from(scroll_top % ScrollPixelOffset::from(line_height)),
let color = active_rows
.get(&display_row)
.map(|spec| {
if spec.breakpoint {
cx.theme().colors().debugger_accent
} else {
cx.theme().colors().editor_active_line_number
}
})
.unwrap_or_else(|| cx.theme().colors().editor_line_number);
let shaped_line =
self.shape_line_number(SharedString::from(&line_number), color, window);
let scroll_top = scroll_position.y * ScrollPixelOffset::from(line_height);
let line_origin = gutter_hitbox.map(|hitbox| {
hitbox.origin
+ point(
hitbox.size.width - shaped_line.width - gutter_dimensions.right_padding,
ix as f32 * line_height
- Pixels::from(scroll_top % ScrollPixelOffset::from(line_height)),
)
});
#[cfg(not(test))]
let hitbox = line_origin.map(|line_origin| {
window.insert_hitbox(
Bounds::new(line_origin, size(shaped_line.width, line_height)),
HitboxBehavior::Normal,
)
});
});
#[cfg(test)]
let hitbox = {
let _ = line_origin;
None
};
#[cfg(not(test))]
let hitbox = line_origin.map(|line_origin| {
window.insert_hitbox(
Bounds::new(line_origin, size(shaped_line.width, line_height)),
HitboxBehavior::Normal,
)
});
#[cfg(test)]
let hitbox = {
let _ = line_origin;
None
};
let segment = LineNumberSegment {
shaped_line,
hitbox,
};
let buffer_row = DisplayPoint::new(display_row, 0).to_point(snapshot).row;
let multi_buffer_row = MultiBufferRow(buffer_row);
Some((multi_buffer_row, segment))
});
let mut line_numbers: HashMap<MultiBufferRow, LineNumberLayout> = HashMap::default();
for (buffer_row, segment) in segments {
line_numbers
.entry(buffer_row)
.or_insert_with(|| LineNumberLayout {
segments: Default::default(),
})
.segments
.push(segment);
}
let multi_buffer_row = DisplayPoint::new(display_row, 0).to_point(snapshot).row;
let multi_buffer_row = MultiBufferRow(multi_buffer_row);
let line_number = LineNumberLayout {
shaped_line,
hitbox,
};
Some((multi_buffer_row, line_number))
})
.collect();
Arc::new(line_numbers)
}
@@ -5880,36 +5845,34 @@ impl EditorElement {
let line_height = layout.position_map.line_height;
window.set_cursor_style(CursorStyle::Arrow, &layout.gutter_hitbox);
for line_layout in layout.line_numbers.values() {
for LineNumberSegment {
shaped_line,
hitbox,
} in &line_layout.segments
{
let Some(hitbox) = hitbox else {
continue;
};
for LineNumberLayout {
shaped_line,
hitbox,
} in layout.line_numbers.values()
{
let Some(hitbox) = hitbox else {
continue;
};
let Some(()) = (if !is_singleton && hitbox.is_hovered(window) {
let color = cx.theme().colors().editor_hover_line_number;
let Some(()) = (if !is_singleton && hitbox.is_hovered(window) {
let color = cx.theme().colors().editor_hover_line_number;
let line = self.shape_line_number(shaped_line.text.clone(), color, window);
line.paint(hitbox.origin, line_height, window, cx).log_err()
} else {
shaped_line
.paint(hitbox.origin, line_height, window, cx)
.log_err()
}) else {
continue;
};
let line = self.shape_line_number(shaped_line.text.clone(), color, window);
line.paint(hitbox.origin, line_height, window, cx).log_err()
} else {
shaped_line
.paint(hitbox.origin, line_height, window, cx)
.log_err()
}) else {
continue;
};
// In singleton buffers, we select corresponding lines on the line number click, so use | -like cursor.
// In multi buffers, we open file at the line number clicked, so use a pointing hand cursor.
if is_singleton {
window.set_cursor_style(CursorStyle::IBeam, hitbox);
} else {
window.set_cursor_style(CursorStyle::PointingHand, hitbox);
}
// In singleton buffers, we select corresponding lines on the line number click, so use | -like cursor.
// In multi buffers, we open file at the line number clicked, so use a pointing hand cursor.
if is_singleton {
window.set_cursor_style(CursorStyle::IBeam, hitbox);
} else {
window.set_cursor_style(CursorStyle::PointingHand, hitbox);
}
}
}
@@ -8478,11 +8441,11 @@ impl Element for EditorElement {
window.request_layout(style, None, cx)
}
EditorMode::Full {
sizing_behavior, ..
sized_by_content, ..
} => {
let mut style = Style::default();
style.size.width = relative(1.).into();
if sizing_behavior == SizingBehavior::SizeByContent {
if sized_by_content {
let snapshot = editor.snapshot(window, cx);
let line_height =
self.style.text.line_height_in_pixels(window.rem_size());
@@ -8646,8 +8609,7 @@ impl Element for EditorElement {
EditorMode::SingleLine
| EditorMode::AutoHeight { .. }
| EditorMode::Full {
sizing_behavior: SizingBehavior::ExcludeOverscrollMargin
| SizingBehavior::SizeByContent,
sized_by_content: true,
..
}
) {
@@ -9823,17 +9785,11 @@ impl EditorLayout {
}
}
#[derive(Debug)]
struct LineNumberSegment {
struct LineNumberLayout {
shaped_line: ShapedLine,
hitbox: Option<Hitbox>,
}
#[derive(Debug)]
struct LineNumberLayout {
segments: SmallVec<[LineNumberSegment; 1]>,
}
struct ColoredRange<T> {
start: T,
end: T,
@@ -10897,7 +10853,6 @@ mod tests {
&snapshot,
&(DisplayRow(0)..DisplayRow(6)),
Some(DisplayRow(3)),
false,
)
})
.unwrap();
@@ -10916,7 +10871,6 @@ mod tests {
&snapshot,
&(DisplayRow(3)..DisplayRow(6)),
Some(DisplayRow(1)),
false,
)
})
.unwrap();
@@ -10933,7 +10887,6 @@ mod tests {
&snapshot,
&(DisplayRow(0)..DisplayRow(3)),
Some(DisplayRow(6)),
false,
)
})
.unwrap();
@@ -10943,81 +10896,6 @@ mod tests {
assert_eq!(relative_rows[&DisplayRow(2)], 3);
}
#[gpui::test]
fn test_shape_line_numbers_wrapping(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let window = cx.add_window(|window, cx| {
let buffer = MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx);
Editor::new(EditorMode::full(), buffer, None, window, cx)
});
update_test_language_settings(cx, |s| {
s.defaults.preferred_line_length = Some(5_u32);
s.defaults.soft_wrap = Some(language_settings::SoftWrap::PreferredLineLength);
});
let editor = window.root(cx).unwrap();
let style = cx.update(|cx| editor.read(cx).style().unwrap().clone());
let line_height = window
.update(cx, |_, window, _| {
style.text.line_height_in_pixels(window.rem_size())
})
.unwrap();
let element = EditorElement::new(&editor, style);
let snapshot = window
.update(cx, |editor, window, cx| editor.snapshot(window, cx))
.unwrap();
let layouts = cx
.update_window(*window, |_, window, cx| {
element.layout_line_numbers(
None,
GutterDimensions {
left_padding: Pixels::ZERO,
right_padding: Pixels::ZERO,
width: px(30.0),
margin: Pixels::ZERO,
git_blame_entries_width: None,
},
line_height,
gpui::Point::default(),
DisplayRow(0)..DisplayRow(6),
&(0..6)
.map(|row| RowInfo {
buffer_row: Some(row),
..Default::default()
})
.collect::<Vec<_>>(),
&BTreeMap::default(),
Some(DisplayPoint::new(DisplayRow(0), 0)),
&snapshot,
window,
cx,
)
})
.unwrap();
assert_eq!(layouts.len(), 3);
let relative_rows = window
.update(cx, |editor, window, cx| {
let snapshot = editor.snapshot(window, cx);
element.calculate_relative_line_numbers(
&snapshot,
&(DisplayRow(0)..DisplayRow(6)),
Some(DisplayRow(3)),
true,
)
})
.unwrap();
assert_eq!(relative_rows[&DisplayRow(0)], 3);
assert_eq!(relative_rows[&DisplayRow(1)], 2);
assert_eq!(relative_rows[&DisplayRow(2)], 1);
// current line has no relative number
assert_eq!(relative_rows[&DisplayRow(4)], 1);
assert_eq!(relative_rows[&DisplayRow(5)], 2);
}
#[gpui::test]
async fn test_vim_visual_selections(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -11131,13 +11009,7 @@ mod tests {
state
.line_numbers
.get(&MultiBufferRow(0))
.map(|line_number| line_number
.segments
.first()
.unwrap()
.shaped_line
.text
.as_ref()),
.map(|line_number| line_number.shaped_line.text.as_ref()),
Some("1")
);
}

View File

@@ -1115,18 +1115,19 @@ mod tests {
let fs = FakeFs::new(cx.executor());
let buffer_initial_text_len = rng.random_range(5..15);
let mut buffer_initial_text = Rope::from(
let mut buffer_initial_text = Rope::from_str(
RandomCharIter::new(&mut rng)
.take(buffer_initial_text_len)
.collect::<String>()
.as_str(),
cx.background_executor(),
);
let mut newline_ixs = (0..buffer_initial_text_len).choose_multiple(&mut rng, 5);
newline_ixs.sort_unstable();
for newline_ix in newline_ixs.into_iter().rev() {
let newline_ix = buffer_initial_text.clip_offset(newline_ix, Bias::Right);
buffer_initial_text.replace(newline_ix..newline_ix, "\n");
buffer_initial_text.replace(newline_ix..newline_ix, "\n", cx.background_executor());
}
log::info!("initial buffer text: {:?}", buffer_initial_text);

View File

@@ -59,10 +59,10 @@ impl Inlay {
pub fn hint(id: InlayId, position: Anchor, hint: &InlayHint) -> Self {
let mut text = hint.text();
if hint.padding_right && text.reversed_chars_at(text.len()).next() != Some(' ') {
text.push(" ");
text.push_small(" ");
}
if hint.padding_left && text.chars_at(0).next() != Some(' ') {
text.push_front(" ");
text.push_front_small(" ");
}
Self {
id,
@@ -72,11 +72,11 @@ impl Inlay {
}
#[cfg(any(test, feature = "test-support"))]
pub fn mock_hint(id: usize, position: Anchor, text: impl Into<Rope>) -> Self {
pub fn mock_hint(id: usize, position: Anchor, text: Rope) -> Self {
Self {
id: InlayId::Hint(id),
position,
content: InlayContent::Text(text.into()),
content: InlayContent::Text(text),
}
}
@@ -88,19 +88,19 @@ impl Inlay {
}
}
pub fn edit_prediction<T: Into<Rope>>(id: usize, position: Anchor, text: T) -> Self {
pub fn edit_prediction(id: usize, position: Anchor, text: Rope) -> Self {
Self {
id: InlayId::EditPrediction(id),
position,
content: InlayContent::Text(text.into()),
content: InlayContent::Text(text),
}
}
pub fn debugger<T: Into<Rope>>(id: usize, position: Anchor, text: T) -> Self {
pub fn debugger(id: usize, position: Anchor, text: Rope) -> Self {
Self {
id: InlayId::DebuggerValue(id),
position,
content: InlayContent::Text(text.into()),
content: InlayContent::Text(text),
}
}
@@ -108,7 +108,7 @@ impl Inlay {
static COLOR_TEXT: OnceLock<Rope> = OnceLock::new();
match &self.content {
InlayContent::Text(text) => text,
InlayContent::Color(_) => COLOR_TEXT.get_or_init(|| Rope::from("")),
InlayContent::Color(_) => COLOR_TEXT.get_or_init(|| Rope::from_str_small("")),
}
}

View File

@@ -878,6 +878,7 @@ mod tests {
use gpui::{AppContext as _, font, px};
use language::Capability;
use project::{Project, project_settings::DiagnosticSeverity};
use rope::Rope;
use settings::SettingsStore;
use util::post_inc;
@@ -1024,22 +1025,22 @@ mod tests {
Inlay::edit_prediction(
post_inc(&mut id),
buffer_snapshot.anchor_before(offset),
"test",
Rope::from_str_small("test"),
),
Inlay::edit_prediction(
post_inc(&mut id),
buffer_snapshot.anchor_after(offset),
"test",
Rope::from_str_small("test"),
),
Inlay::mock_hint(
post_inc(&mut id),
buffer_snapshot.anchor_before(offset),
"test",
Rope::from_str_small("test"),
),
Inlay::mock_hint(
post_inc(&mut id),
buffer_snapshot.anchor_after(offset),
"test",
Rope::from_str_small("test"),
),
]
})

View File

@@ -193,7 +193,7 @@ impl Editor {
if let Some(language) = language {
for signature in &mut signature_help.signatures {
let text = Rope::from(signature.label.as_ref());
let text = Rope::from_str_small(signature.label.as_ref());
let highlights = language
.highlight_text(&text, 0..signature.label.len())
.into_iter()

View File

@@ -13,6 +13,8 @@ path = "src/extension.rs"
[dependencies]
anyhow.workspace = true
async-compression.workspace = true
async-tar.workspace = true
async-trait.workspace = true
collections.workspace = true
dap.workspace = true

View File

@@ -3,7 +3,9 @@ use crate::{
parse_wasm_extension_version,
};
use anyhow::{Context as _, Result, bail};
use futures::AsyncReadExt;
use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive;
use futures::{AsyncReadExt, io::Cursor};
use heck::ToSnakeCase;
use http_client::{self, AsyncBody, HttpClient};
use serde::Deserialize;
@@ -415,48 +417,28 @@ impl ExtensionBuilder {
return Ok(clang_path);
}
let tar_out_dir = self.cache_dir.join("wasi-sdk-temp");
let mut tar_out_dir = wasi_sdk_dir.clone();
tar_out_dir.set_extension("archive");
fs::remove_dir_all(&wasi_sdk_dir).ok();
fs::remove_dir_all(&tar_out_dir).ok();
fs::create_dir_all(&tar_out_dir).context("failed to create extraction directory")?;
log::info!("downloading wasi-sdk to {}", wasi_sdk_dir.display());
let mut response = self.http.get(&url, AsyncBody::default(), true).await?;
let body = GzipDecoder::new({
// stream the entire request into memory at once as the artifact is quite big (100MB+)
let mut b = vec![];
response.body_mut().read_to_end(&mut b).await?;
Cursor::new(b)
});
let tar = Archive::new(body);
// Write the response to a temporary file
let tar_gz_path = self.cache_dir.join("wasi-sdk.tar.gz");
let mut tar_gz_file =
fs::File::create(&tar_gz_path).context("failed to create temporary tar.gz file")?;
let response_body = response.body_mut();
let mut body_bytes = Vec::new();
response_body.read_to_end(&mut body_bytes).await?;
std::io::Write::write_all(&mut tar_gz_file, &body_bytes)?;
drop(tar_gz_file);
log::info!("un-tarring wasi-sdk to {}", tar_out_dir.display());
// Shell out to tar to extract the archive
let tar_output = util::command::new_smol_command("tar")
.arg("-xzf")
.arg(&tar_gz_path)
.arg("-C")
.arg(&tar_out_dir)
.output()
log::info!("un-tarring wasi-sdk to {}", wasi_sdk_dir.display());
tar.unpack(&tar_out_dir)
.await
.context("failed to run tar")?;
if !tar_output.status.success() {
bail!(
"failed to extract wasi-sdk archive: {}",
String::from_utf8_lossy(&tar_output.stderr)
);
}
.context("failed to unpack wasi-sdk archive")?;
log::info!("finished downloading wasi-sdk");
// Clean up the temporary tar.gz file
fs::remove_file(&tar_gz_path).ok();
let inner_dir = fs::read_dir(&tar_out_dir)?
.next()
.context("no content")?

View File

@@ -164,15 +164,6 @@ pub struct AgentServerManifestEntry {
/// args = ["--serve"]
/// sha256 = "abc123..." # optional
/// ```
///
/// For Node.js-based agents, you can use "node" as the cmd to automatically
/// use Zed's managed Node.js runtime instead of relying on the user's PATH:
/// ```toml
/// [agent_servers.nodeagent.targets.darwin-aarch64]
/// archive = "https://example.com/nodeagent.zip"
/// cmd = "node"
/// args = ["index.js", "--port", "3000"]
/// ```
pub targets: HashMap<String, TargetConfig>,
}

View File

@@ -1468,6 +1468,7 @@ impl ExtensionStore {
let extensions_dir = self.installed_dir.clone();
let index_path = self.index_path.clone();
let proxy = self.proxy.clone();
let executor = cx.background_executor().clone();
cx.background_spawn(async move {
let start_time = Instant::now();
let mut index = ExtensionIndex::default();
@@ -1501,10 +1502,14 @@ impl ExtensionStore {
}
if let Ok(index_json) = serde_json::to_string_pretty(&index) {
fs.save(&index_path, &index_json.as_str().into(), Default::default())
.await
.context("failed to save extension index")
.log_err();
fs.save(
&index_path,
&Rope::from_str(&index_json, &executor),
Default::default(),
)
.await
.context("failed to save extension index")
.log_err();
}
log::info!("rebuilt extension index in {:?}", start_time.elapsed());
@@ -1671,7 +1676,7 @@ impl ExtensionStore {
let manifest_toml = toml::to_string(&loaded_extension.manifest)?;
fs.save(
&tmp_dir.join(EXTENSION_TOML),
&Rope::from(manifest_toml),
&Rope::from_str_small(&manifest_toml),
language::LineEnding::Unix,
)
.await?;

View File

@@ -7,7 +7,6 @@ use menu::{Confirm, SelectNext, SelectPrevious};
use pretty_assertions::{assert_eq, assert_matches};
use project::{FS_WATCH_LATENCY, RemoveOptions};
use serde_json::json;
use settings::SettingsStore;
use util::{path, rel_path::rel_path};
use workspace::{AppState, CloseActiveItem, OpenOptions, ToggleFileFinder, Workspace, open_paths};
@@ -659,147 +658,6 @@ async fn test_matching_cancellation(cx: &mut TestAppContext) {
});
}
#[gpui::test]
async fn test_ignored_root_with_file_inclusions(cx: &mut TestAppContext) {
let app_state = init_test(cx);
cx.update(|cx| {
cx.update_global::<SettingsStore, _>(|store, cx| {
store.update_user_settings(cx, |settings| {
settings.project.worktree.file_scan_inclusions = Some(vec![
"height_demo/**/hi_bonjour".to_string(),
"**/height_1".to_string(),
]);
});
})
});
app_state
.fs
.as_fake()
.insert_tree(
"/ancestor",
json!({
".gitignore": "ignored-root",
"ignored-root": {
"happiness": "",
"height": "",
"hi": "",
"hiccup": "",
},
"tracked-root": {
".gitignore": "height*",
"happiness": "",
"height": "",
"heights": {
"height_1": "",
"height_2": "",
},
"height_demo": {
"test_1": {
"hi_bonjour": "hi_bonjour",
"hi": "hello",
},
"hihi": "bye",
"test_2": {
"hoi": "nl"
}
},
"height_include": {
"height_1_include": "",
"height_2_include": "",
},
"hi": "",
"hiccup": "",
},
}),
)
.await;
let project = Project::test(
app_state.fs.clone(),
[
Path::new(path!("/ancestor/tracked-root")),
Path::new(path!("/ancestor/ignored-root")),
],
cx,
)
.await;
let (picker, _workspace, cx) = build_find_picker(project, cx);
picker
.update_in(cx, |picker, window, cx| {
picker
.delegate
.spawn_search(test_path_position("hi"), window, cx)
})
.await;
picker.update(cx, |picker, _| {
let matches = collect_search_matches(picker);
assert_eq!(matches.history.len(), 0);
assert_eq!(
matches.search,
vec![
rel_path("ignored-root/hi").into(),
rel_path("tracked-root/hi").into(),
rel_path("ignored-root/hiccup").into(),
rel_path("tracked-root/hiccup").into(),
rel_path("tracked-root/height_demo/test_1/hi_bonjour").into(),
rel_path("ignored-root/height").into(),
rel_path("tracked-root/heights/height_1").into(),
rel_path("ignored-root/happiness").into(),
rel_path("tracked-root/happiness").into(),
],
"All ignored files that were indexed are found for default ignored mode"
);
});
}
#[gpui::test]
async fn test_ignored_root_with_file_inclusions_repro(cx: &mut TestAppContext) {
let app_state = init_test(cx);
cx.update(|cx| {
cx.update_global::<SettingsStore, _>(|store, cx| {
store.update_user_settings(cx, |settings| {
settings.project.worktree.file_scan_inclusions = Some(vec!["**/.env".to_string()]);
});
})
});
app_state
.fs
.as_fake()
.insert_tree(
"/src",
json!({
".gitignore": "node_modules",
"node_modules": {
"package.json": "// package.json",
".env": "BAR=FOO"
},
".env": "FOO=BAR"
}),
)
.await;
let project = Project::test(app_state.fs.clone(), [Path::new(path!("/src"))], cx).await;
let (picker, _workspace, cx) = build_find_picker(project, cx);
picker
.update_in(cx, |picker, window, cx| {
picker
.delegate
.spawn_search(test_path_position("json"), window, cx)
})
.await;
picker.update(cx, |picker, _| {
let matches = collect_search_matches(picker);
assert_eq!(matches.history.len(), 0);
assert_eq!(
matches.search,
vec![],
"All ignored files that were indexed are found for default ignored mode"
);
});
}
#[gpui::test]
async fn test_ignored_root(cx: &mut TestAppContext) {
let app_state = init_test(cx);

View File

@@ -7,7 +7,7 @@ use git::{
blame::Blame,
repository::{
AskPassDelegate, Branch, CommitDetails, CommitOptions, FetchOptions, GitRepository,
GitRepositoryCheckpoint, PushOptions, Remote, RepoPath, ResetMode, Worktree,
GitRepositoryCheckpoint, PushOptions, Remote, RepoPath, ResetMode,
},
status::{
DiffTreeType, FileStatus, GitStatus, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
@@ -387,19 +387,6 @@ impl GitRepository for FakeGitRepository {
})
}
fn worktrees(&self) -> BoxFuture<'_, Result<Vec<Worktree>>> {
unimplemented!()
}
fn create_worktree(
&self,
_: String,
_: PathBuf,
_: Option<String>,
) -> BoxFuture<'_, Result<()>> {
unimplemented!()
}
fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
self.with_state_async(true, |state| {
state.current_branch_name = Some(name);
@@ -543,7 +530,6 @@ impl GitRepository for FakeGitRepository {
&self,
_branch: String,
_remote: String,
_rebase: bool,
_askpass: AskPassDelegate,
_env: Arc<HashMap<String, String>>,
_cx: AsyncApp,

View File

@@ -72,8 +72,6 @@ actions!(
ForcePush,
/// Pulls changes from the remote repository.
Pull,
/// Pulls changes from the remote repository with rebase.
PullRebase,
/// Fetches changes from the remote repository.
Fetch,
/// Fetches changes from a specific remote.

View File

@@ -72,50 +72,6 @@ impl Branch {
}
}
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub struct Worktree {
pub path: PathBuf,
pub ref_name: SharedString,
pub sha: SharedString,
}
impl Worktree {
pub fn branch(&self) -> &str {
self.ref_name
.as_ref()
.strip_prefix("refs/heads/")
.or_else(|| self.ref_name.as_ref().strip_prefix("refs/remotes/"))
.unwrap_or(self.ref_name.as_ref())
}
}
pub fn parse_worktrees_from_str<T: AsRef<str>>(raw_worktrees: T) -> Vec<Worktree> {
let mut worktrees = Vec::new();
let entries = raw_worktrees.as_ref().split("\n\n");
for entry in entries {
let mut parts = entry.splitn(3, '\n');
let path = parts
.next()
.and_then(|p| p.split_once(' ').map(|(_, path)| path.to_string()));
let sha = parts
.next()
.and_then(|p| p.split_once(' ').map(|(_, sha)| sha.to_string()));
let ref_name = parts
.next()
.and_then(|p| p.split_once(' ').map(|(_, ref_name)| ref_name.to_string()));
if let (Some(path), Some(sha), Some(ref_name)) = (path, sha, ref_name) {
worktrees.push(Worktree {
path: PathBuf::from(path),
ref_name: ref_name.into(),
sha: sha.into(),
})
}
}
worktrees
}
#[derive(Clone, Debug, Hash, PartialEq, Eq)]
pub struct Upstream {
pub ref_name: SharedString,
@@ -434,15 +390,6 @@ pub trait GitRepository: Send + Sync {
fn create_branch(&self, name: String) -> BoxFuture<'_, Result<()>>;
fn rename_branch(&self, branch: String, new_name: String) -> BoxFuture<'_, Result<()>>;
fn worktrees(&self) -> BoxFuture<'_, Result<Vec<Worktree>>>;
fn create_worktree(
&self,
name: String,
directory: PathBuf,
from_commit: Option<String>,
) -> BoxFuture<'_, Result<()>>;
fn reset(
&self,
commit: String,
@@ -533,7 +480,6 @@ pub trait GitRepository: Send + Sync {
&self,
branch_name: String,
upstream_name: String,
rebase: bool,
askpass: AskPassDelegate,
env: Arc<HashMap<String, String>>,
// This method takes an AsyncApp to ensure it's invoked on the main thread,
@@ -1260,66 +1206,6 @@ impl GitRepository for RealGitRepository {
.boxed()
}
fn worktrees(&self) -> BoxFuture<'_, Result<Vec<Worktree>>> {
let git_binary_path = self.any_git_binary_path.clone();
let working_directory = self.working_directory();
self.executor
.spawn(async move {
let output = new_smol_command(&git_binary_path)
.current_dir(working_directory?)
.args(&["--no-optional-locks", "worktree", "list", "--porcelain"])
.output()
.await?;
if output.status.success() {
let stdout = String::from_utf8_lossy(&output.stdout);
Ok(parse_worktrees_from_str(&stdout))
} else {
let stderr = String::from_utf8_lossy(&output.stderr);
anyhow::bail!("git worktree list failed: {stderr}");
}
})
.boxed()
}
fn create_worktree(
&self,
name: String,
directory: PathBuf,
from_commit: Option<String>,
) -> BoxFuture<'_, Result<()>> {
let git_binary_path = self.any_git_binary_path.clone();
let working_directory = self.working_directory();
let final_path = directory.join(&name);
let mut args = vec![
OsString::from("--no-optional-locks"),
OsString::from("worktree"),
OsString::from("add"),
OsString::from(final_path.as_os_str()),
];
if let Some(from_commit) = from_commit {
args.extend([
OsString::from("-b"),
OsString::from(name.as_str()),
OsString::from(from_commit),
]);
}
self.executor
.spawn(async move {
let output = new_smol_command(&git_binary_path)
.current_dir(working_directory?)
.args(args)
.output()
.await?;
if output.status.success() {
Ok(())
} else {
let stderr = String::from_utf8_lossy(&output.stderr);
anyhow::bail!("git worktree list failed: {stderr}");
}
})
.boxed()
}
fn change_branch(&self, name: String) -> BoxFuture<'_, Result<()>> {
let repo = self.repository.clone();
let working_directory = self.working_directory();
@@ -1691,7 +1577,6 @@ impl GitRepository for RealGitRepository {
&self,
branch_name: String,
remote_name: String,
rebase: bool,
ask_pass: AskPassDelegate,
env: Arc<HashMap<String, String>>,
cx: AsyncApp,
@@ -1705,13 +1590,7 @@ impl GitRepository for RealGitRepository {
command
.envs(env.iter())
.current_dir(&working_directory?)
.arg("pull");
if rebase {
command.arg("--rebase");
}
command
.args(["pull"])
.arg(remote_name)
.arg(branch_name)
.stdout(smol::process::Stdio::piped())

View File

@@ -45,8 +45,6 @@ notifications.workspace = true
panel.workspace = true
picker.workspace = true
project.workspace = true
recent_projects.workspace = true
remote.workspace = true
schemars.workspace = true
serde.workspace = true
serde_json.workspace = true

View File

@@ -170,7 +170,10 @@ impl CommitView {
ReplicaId::LOCAL,
cx.entity_id().as_non_zero_u64().into(),
LineEnding::default(),
format_commit(&commit, stash.is_some()).into(),
Rope::from_str(
&format_commit(&commit, stash.is_some()),
cx.background_executor(),
),
);
metadata_buffer_id = Some(buffer.remote_id());
Buffer::build(buffer, Some(file.clone()), Capability::ReadWrite)
@@ -336,7 +339,7 @@ async fn build_buffer(
) -> Result<Entity<Buffer>> {
let line_ending = LineEnding::detect(&text);
LineEnding::normalize(&mut text);
let text = Rope::from(text);
let text = Rope::from_str(&text, cx.background_executor());
let language = cx.update(|cx| language_registry.language_for_file(&blob, Some(&text), cx))?;
let language = if let Some(language) = language {
language_registry
@@ -376,7 +379,7 @@ async fn build_buffer_diff(
let base_buffer = cx
.update(|cx| {
Buffer::build_snapshot(
old_text.as_deref().unwrap_or("").into(),
Rope::from_str(old_text.as_deref().unwrap_or(""), cx.background_executor()),
buffer.language().cloned(),
Some(language_registry.clone()),
cx,

View File

@@ -359,6 +359,7 @@ mod tests {
use super::*;
use editor::test::editor_test_context::assert_state_with_diff;
use gpui::TestAppContext;
use language::Rope;
use project::{FakeFs, Fs, Project};
use settings::SettingsStore;
use std::path::PathBuf;
@@ -429,7 +430,7 @@ mod tests {
// Modify the new file on disk
fs.save(
path!("/test/new_file.txt").as_ref(),
&unindent(
&Rope::from_str_small(&unindent(
"
new line 1
line 2
@@ -437,8 +438,7 @@ mod tests {
line 4
new line 5
",
)
.into(),
)),
Default::default(),
)
.await
@@ -465,15 +465,14 @@ mod tests {
// Modify the old file on disk
fs.save(
path!("/test/old_file.txt").as_ref(),
&unindent(
&Rope::from_str_small(&unindent(
"
new line 1
line 2
old line 3
line 4
",
)
.into(),
)),
Default::default(),
)
.await

View File

@@ -2219,7 +2219,7 @@ impl GitPanel {
.detach();
}
pub(crate) fn pull(&mut self, rebase: bool, window: &mut Window, cx: &mut Context<Self>) {
pub(crate) fn pull(&mut self, window: &mut Window, cx: &mut Context<Self>) {
if !self.can_push_and_pull(cx) {
return;
}
@@ -2254,7 +2254,6 @@ impl GitPanel {
repo.pull(
branch.name().to_owned().into(),
remote.name.clone(),
rebase,
askpass,
cx,
)

View File

@@ -46,7 +46,6 @@ pub(crate) mod remote_output;
pub mod repository_selector;
pub mod stash_picker;
pub mod text_diff_view;
pub mod worktree_picker;
actions!(
git,
@@ -73,7 +72,6 @@ pub fn init(cx: &mut App) {
git_panel::register(workspace);
repository_selector::register(workspace);
branch_picker::register(workspace);
worktree_picker::register(workspace);
stash_picker::register(workspace);
let project = workspace.project().read(cx);
@@ -126,15 +124,7 @@ pub fn init(cx: &mut App) {
return;
};
panel.update(cx, |panel, cx| {
panel.pull(false, window, cx);
});
});
workspace.register_action(|workspace, _: &git::PullRebase, window, cx| {
let Some(panel) = workspace.panel::<git_panel::GitPanel>(cx) else {
return;
};
panel.update(cx, |panel, cx| {
panel.pull(true, window, cx);
panel.pull(window, cx);
});
});
}
@@ -605,7 +595,6 @@ mod remote_button {
.action("Fetch", git::Fetch.boxed_clone())
.action("Fetch From", git::FetchFrom.boxed_clone())
.action("Pull", git::Pull.boxed_clone())
.action("Pull (Rebase)", git::PullRebase.boxed_clone())
.separator()
.action("Push", git::Push.boxed_clone())
.action("Push To", git::PushTo.boxed_clone())

View File

@@ -470,6 +470,11 @@ impl ProjectDiff {
window: &mut Window,
cx: &mut Context<Self>,
) {
if self.branch_diff.read(cx).diff_base().is_merge_base() {
self.multibuffer.update(cx, |multibuffer, cx| {
multibuffer.add_diff(diff.clone(), cx);
});
}
let subscription = cx.subscribe_in(&diff, window, move |this, _, _, window, cx| {
this._task = window.spawn(cx, {
let this = cx.weak_entity();
@@ -486,8 +491,8 @@ impl ProjectDiff {
.expect("project diff editor should have a conflict addon");
let snapshot = buffer.read(cx).snapshot();
let diff_read = diff.read(cx);
let diff_hunk_ranges = diff_read
let diff = diff.read(cx);
let diff_hunk_ranges = diff
.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &snapshot, cx)
.map(|diff_hunk| diff_hunk.buffer_range);
let conflicts = conflict_addon
@@ -510,9 +515,6 @@ impl ProjectDiff {
multibuffer_context_lines(cx),
cx,
);
if self.branch_diff.read(cx).diff_base().is_merge_base() {
multibuffer.add_diff(diff.clone(), cx);
}
(was_empty, is_newly_added)
});

View File

@@ -1,743 +0,0 @@
use anyhow::Context as _;
use fuzzy::StringMatchCandidate;
use git::repository::Worktree as GitWorktree;
use gpui::{
Action, App, AsyncApp, Context, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable,
InteractiveElement, IntoElement, Modifiers, ModifiersChangedEvent, ParentElement,
PathPromptOptions, Render, SharedString, Styled, Subscription, Task, WeakEntity, Window,
actions, rems,
};
use picker::{Picker, PickerDelegate, PickerEditorPosition};
use project::{DirectoryLister, git_store::Repository};
use recent_projects::{RemoteConnectionModal, connect};
use remote::{RemoteConnectionOptions, remote_client::ConnectionIdentifier};
use std::{path::PathBuf, sync::Arc};
use ui::{HighlightedLabel, KeyBinding, ListItem, ListItemSpacing, Tooltip, prelude::*};
use util::ResultExt;
use workspace::{ModalView, Workspace, notifications::DetachAndPromptErr};
actions!(git, [WorktreeFromDefault, WorktreeFromDefaultOnWindow]);
pub fn register(workspace: &mut Workspace) {
workspace.register_action(open);
}
pub fn open(
workspace: &mut Workspace,
_: &zed_actions::git::Worktree,
window: &mut Window,
cx: &mut Context<Workspace>,
) {
let repository = workspace.project().read(cx).active_repository(cx);
let workspace_handle = workspace.weak_handle();
workspace.toggle_modal(window, cx, |window, cx| {
WorktreeList::new(repository, workspace_handle, rems(34.), window, cx)
})
}
pub struct WorktreeList {
width: Rems,
pub picker: Entity<Picker<WorktreeListDelegate>>,
picker_focus_handle: FocusHandle,
_subscription: Subscription,
}
impl WorktreeList {
fn new(
repository: Option<Entity<Repository>>,
workspace: WeakEntity<Workspace>,
width: Rems,
window: &mut Window,
cx: &mut Context<Self>,
) -> Self {
let all_worktrees_request = repository
.clone()
.map(|repository| repository.update(cx, |repository, _| repository.worktrees()));
let default_branch_request = repository
.clone()
.map(|repository| repository.update(cx, |repository, _| repository.default_branch()));
cx.spawn_in(window, async move |this, cx| {
let all_worktrees = all_worktrees_request
.context("No active repository")?
.await??;
let default_branch = default_branch_request
.context("No active repository")?
.await
.map(Result::ok)
.ok()
.flatten()
.flatten();
this.update_in(cx, |this, window, cx| {
this.picker.update(cx, |picker, cx| {
picker.delegate.all_worktrees = Some(all_worktrees);
picker.delegate.default_branch = default_branch;
picker.refresh(window, cx);
})
})?;
anyhow::Ok(())
})
.detach_and_log_err(cx);
let delegate = WorktreeListDelegate::new(workspace, repository, window, cx);
let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx));
let picker_focus_handle = picker.focus_handle(cx);
picker.update(cx, |picker, _| {
picker.delegate.focus_handle = picker_focus_handle.clone();
});
let _subscription = cx.subscribe(&picker, |_, _, _, cx| {
cx.emit(DismissEvent);
});
Self {
picker,
picker_focus_handle,
width,
_subscription,
}
}
fn handle_modifiers_changed(
&mut self,
ev: &ModifiersChangedEvent,
_: &mut Window,
cx: &mut Context<Self>,
) {
self.picker
.update(cx, |picker, _| picker.delegate.modifiers = ev.modifiers)
}
fn handle_new_worktree(
&mut self,
replace_current_window: bool,
window: &mut Window,
cx: &mut Context<Self>,
) {
self.picker.update(cx, |picker, cx| {
let ix = picker.delegate.selected_index();
let Some(entry) = picker.delegate.matches.get(ix) else {
return;
};
let Some(default_branch) = picker.delegate.default_branch.clone() else {
return;
};
if !entry.is_new {
return;
}
picker.delegate.create_worktree(
entry.worktree.branch(),
replace_current_window,
Some(default_branch.into()),
window,
cx,
);
})
}
}
impl ModalView for WorktreeList {}
impl EventEmitter<DismissEvent> for WorktreeList {}
impl Focusable for WorktreeList {
fn focus_handle(&self, _: &App) -> FocusHandle {
self.picker_focus_handle.clone()
}
}
impl Render for WorktreeList {
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
v_flex()
.key_context("GitWorktreeSelector")
.w(self.width)
.on_modifiers_changed(cx.listener(Self::handle_modifiers_changed))
.on_action(cx.listener(|this, _: &WorktreeFromDefault, w, cx| {
this.handle_new_worktree(false, w, cx)
}))
.on_action(cx.listener(|this, _: &WorktreeFromDefaultOnWindow, w, cx| {
this.handle_new_worktree(true, w, cx)
}))
.child(self.picker.clone())
.on_mouse_down_out({
cx.listener(move |this, _, window, cx| {
this.picker.update(cx, |this, cx| {
this.cancel(&Default::default(), window, cx);
})
})
})
}
}
#[derive(Debug, Clone)]
struct WorktreeEntry {
worktree: GitWorktree,
positions: Vec<usize>,
is_new: bool,
}
pub struct WorktreeListDelegate {
matches: Vec<WorktreeEntry>,
all_worktrees: Option<Vec<GitWorktree>>,
workspace: WeakEntity<Workspace>,
repo: Option<Entity<Repository>>,
selected_index: usize,
last_query: String,
modifiers: Modifiers,
focus_handle: FocusHandle,
default_branch: Option<SharedString>,
}
impl WorktreeListDelegate {
fn new(
workspace: WeakEntity<Workspace>,
repo: Option<Entity<Repository>>,
_window: &mut Window,
cx: &mut Context<WorktreeList>,
) -> Self {
Self {
matches: vec![],
all_worktrees: None,
workspace,
selected_index: 0,
repo,
last_query: Default::default(),
modifiers: Default::default(),
focus_handle: cx.focus_handle(),
default_branch: None,
}
}
fn create_worktree(
&self,
worktree_branch: &str,
replace_current_window: bool,
commit: Option<String>,
window: &mut Window,
cx: &mut Context<Picker<Self>>,
) {
let workspace = self.workspace.clone();
let Some(repo) = self.repo.clone() else {
return;
};
let worktree_path = self
.workspace
.clone()
.update(cx, |this, cx| {
this.prompt_for_open_path(
PathPromptOptions {
files: false,
directories: true,
multiple: false,
prompt: Some("Select directory for new worktree".into()),
},
DirectoryLister::Project(this.project().clone()),
window,
cx,
)
})
.log_err();
let Some(worktree_path) = worktree_path else {
return;
};
let branch = worktree_branch.to_string();
let window_handle = window.window_handle();
cx.spawn_in(window, async move |_, cx| {
let Some(paths) = worktree_path.await? else {
return anyhow::Ok(());
};
let path = paths.get(0).cloned().context("No path selected")?;
repo.update(cx, |repo, _| {
repo.create_worktree(branch.clone(), path.clone(), commit)
})?
.await??;
let final_path = path.join(branch);
let (connection_options, app_state, is_local) =
workspace.update(cx, |workspace, cx| {
let project = workspace.project().clone();
let connection_options = project.read(cx).remote_connection_options(cx);
let app_state = workspace.app_state().clone();
let is_local = project.read(cx).is_local();
(connection_options, app_state, is_local)
})?;
if is_local {
workspace
.update_in(cx, |workspace, window, cx| {
workspace.open_workspace_for_paths(
replace_current_window,
vec![final_path],
window,
cx,
)
})?
.await?;
} else if let Some(connection_options) = connection_options {
open_remote_worktree(
connection_options,
vec![final_path],
app_state,
window_handle,
replace_current_window,
cx,
)
.await?;
}
anyhow::Ok(())
})
.detach_and_prompt_err("Failed to create worktree", window, cx, |e, _, _| {
Some(e.to_string())
});
}
fn open_worktree(
&self,
worktree_path: &PathBuf,
replace_current_window: bool,
window: &mut Window,
cx: &mut Context<Picker<Self>>,
) {
let workspace = self.workspace.clone();
let path = worktree_path.clone();
let Some((connection_options, app_state, is_local)) = workspace
.update(cx, |workspace, cx| {
let project = workspace.project().clone();
let connection_options = project.read(cx).remote_connection_options(cx);
let app_state = workspace.app_state().clone();
let is_local = project.read(cx).is_local();
(connection_options, app_state, is_local)
})
.log_err()
else {
return;
};
if is_local {
let open_task = workspace.update(cx, |workspace, cx| {
workspace.open_workspace_for_paths(replace_current_window, vec![path], window, cx)
});
cx.spawn(async move |_, _| {
open_task?.await?;
anyhow::Ok(())
})
.detach_and_prompt_err(
"Failed to open worktree",
window,
cx,
|e, _, _| Some(e.to_string()),
);
} else if let Some(connection_options) = connection_options {
let window_handle = window.window_handle();
cx.spawn_in(window, async move |_, cx| {
open_remote_worktree(
connection_options,
vec![path],
app_state,
window_handle,
replace_current_window,
cx,
)
.await
})
.detach_and_prompt_err(
"Failed to open worktree",
window,
cx,
|e, _, _| Some(e.to_string()),
);
}
cx.emit(DismissEvent);
}
fn base_branch<'a>(&'a self, cx: &'a mut Context<Picker<Self>>) -> Option<&'a str> {
self.repo
.as_ref()
.and_then(|repo| repo.read(cx).branch.as_ref().map(|b| b.name()))
}
}
async fn open_remote_worktree(
connection_options: RemoteConnectionOptions,
paths: Vec<PathBuf>,
app_state: Arc<workspace::AppState>,
window: gpui::AnyWindowHandle,
replace_current_window: bool,
cx: &mut AsyncApp,
) -> anyhow::Result<()> {
let workspace_window = window
.downcast::<Workspace>()
.ok_or_else(|| anyhow::anyhow!("Window is not a Workspace window"))?;
let connect_task = workspace_window.update(cx, |workspace, window, cx| {
workspace.toggle_modal(window, cx, |window, cx| {
RemoteConnectionModal::new(&connection_options, Vec::new(), window, cx)
});
let prompt = workspace
.active_modal::<RemoteConnectionModal>(cx)
.expect("Modal just created")
.read(cx)
.prompt
.clone();
connect(
ConnectionIdentifier::setup(),
connection_options.clone(),
prompt,
window,
cx,
)
.prompt_err("Failed to connect", window, cx, |_, _, _| None)
})?;
let session = connect_task.await;
workspace_window.update(cx, |workspace, _window, cx| {
if let Some(prompt) = workspace.active_modal::<RemoteConnectionModal>(cx) {
prompt.update(cx, |prompt, cx| prompt.finished(cx))
}
})?;
let Some(Some(session)) = session else {
return Ok(());
};
let new_project = cx.update(|cx| {
project::Project::remote(
session,
app_state.client.clone(),
app_state.node_runtime.clone(),
app_state.user_store.clone(),
app_state.languages.clone(),
app_state.fs.clone(),
cx,
)
})?;
let window_to_use = if replace_current_window {
workspace_window
} else {
let workspace_position = cx
.update(|cx| {
workspace::remote_workspace_position_from_db(connection_options.clone(), &paths, cx)
})?
.await
.context("fetching workspace position from db")?;
let mut options =
cx.update(|cx| (app_state.build_window_options)(workspace_position.display, cx))?;
options.window_bounds = workspace_position.window_bounds;
cx.open_window(options, |window, cx| {
cx.new(|cx| {
let mut workspace =
Workspace::new(None, new_project.clone(), app_state.clone(), window, cx);
workspace.centered_layout = workspace_position.centered_layout;
workspace
})
})?
};
workspace::open_remote_project_with_existing_connection(
connection_options,
new_project,
paths,
app_state,
window_to_use,
cx,
)
.await?;
Ok(())
}
impl PickerDelegate for WorktreeListDelegate {
type ListItem = ListItem;
fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
"Select worktree…".into()
}
fn editor_position(&self) -> PickerEditorPosition {
PickerEditorPosition::Start
}
fn match_count(&self) -> usize {
self.matches.len()
}
fn selected_index(&self) -> usize {
self.selected_index
}
fn set_selected_index(
&mut self,
ix: usize,
_window: &mut Window,
_: &mut Context<Picker<Self>>,
) {
self.selected_index = ix;
}
fn update_matches(
&mut self,
query: String,
window: &mut Window,
cx: &mut Context<Picker<Self>>,
) -> Task<()> {
let Some(all_worktrees) = self.all_worktrees.clone() else {
return Task::ready(());
};
cx.spawn_in(window, async move |picker, cx| {
let mut matches: Vec<WorktreeEntry> = if query.is_empty() {
all_worktrees
.into_iter()
.map(|worktree| WorktreeEntry {
worktree,
positions: Vec::new(),
is_new: false,
})
.collect()
} else {
let candidates = all_worktrees
.iter()
.enumerate()
.map(|(ix, worktree)| StringMatchCandidate::new(ix, worktree.branch()))
.collect::<Vec<StringMatchCandidate>>();
fuzzy::match_strings(
&candidates,
&query,
true,
true,
10000,
&Default::default(),
cx.background_executor().clone(),
)
.await
.into_iter()
.map(|candidate| WorktreeEntry {
worktree: all_worktrees[candidate.candidate_id].clone(),
positions: candidate.positions,
is_new: false,
})
.collect()
};
picker
.update(cx, |picker, _| {
if !query.is_empty()
&& !matches
.first()
.is_some_and(|entry| entry.worktree.branch() == query)
{
let query = query.replace(' ', "-");
matches.push(WorktreeEntry {
worktree: GitWorktree {
path: Default::default(),
ref_name: format!("refs/heads/{query}").into(),
sha: Default::default(),
},
positions: Vec::new(),
is_new: true,
})
}
let delegate = &mut picker.delegate;
delegate.matches = matches;
if delegate.matches.is_empty() {
delegate.selected_index = 0;
} else {
delegate.selected_index =
core::cmp::min(delegate.selected_index, delegate.matches.len() - 1);
}
delegate.last_query = query;
})
.log_err();
})
}
fn confirm(&mut self, secondary: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
let Some(entry) = self.matches.get(self.selected_index()) else {
return;
};
if entry.is_new {
self.create_worktree(&entry.worktree.branch(), secondary, None, window, cx);
} else {
self.open_worktree(&entry.worktree.path, secondary, window, cx);
}
cx.emit(DismissEvent);
}
fn dismissed(&mut self, _: &mut Window, cx: &mut Context<Picker<Self>>) {
cx.emit(DismissEvent);
}
fn render_match(
&self,
ix: usize,
selected: bool,
_window: &mut Window,
cx: &mut Context<Picker<Self>>,
) -> Option<Self::ListItem> {
let entry = &self.matches.get(ix)?;
let path = entry.worktree.path.to_string_lossy().to_string();
let sha = entry
.worktree
.sha
.clone()
.chars()
.take(7)
.collect::<String>();
let focus_handle = self.focus_handle.clone();
let icon = if let Some(default_branch) = self.default_branch.clone()
&& entry.is_new
{
Some(
IconButton::new("worktree-from-default", IconName::GitBranchAlt)
.on_click(|_, window, cx| {
window.dispatch_action(WorktreeFromDefault.boxed_clone(), cx)
})
.on_right_click(|_, window, cx| {
window.dispatch_action(WorktreeFromDefaultOnWindow.boxed_clone(), cx)
})
.tooltip(move |_, cx| {
Tooltip::for_action_in(
format!("From default branch {default_branch}"),
&WorktreeFromDefault,
&focus_handle,
cx,
)
}),
)
} else {
None
};
let branch_name = if entry.is_new {
h_flex()
.gap_1()
.child(
Icon::new(IconName::Plus)
.size(IconSize::Small)
.color(Color::Muted),
)
.child(
Label::new(format!("Create worktree \"{}\"", entry.worktree.branch()))
.single_line()
.truncate(),
)
.into_any_element()
} else {
h_flex()
.gap_1()
.child(
Icon::new(IconName::GitBranch)
.size(IconSize::Small)
.color(Color::Muted),
)
.child(HighlightedLabel::new(
entry.worktree.branch().to_owned(),
entry.positions.clone(),
))
.truncate()
.into_any_element()
};
let sublabel = if entry.is_new {
format!(
"based off {}",
self.base_branch(cx).unwrap_or("the current branch")
)
} else {
format!("at {}", path)
};
Some(
ListItem::new(SharedString::from(format!("worktree-menu-{ix}")))
.inset(true)
.spacing(ListItemSpacing::Sparse)
.toggle_state(selected)
.child(
v_flex()
.w_full()
.overflow_hidden()
.child(
h_flex()
.gap_6()
.justify_between()
.overflow_x_hidden()
.child(branch_name)
.when(!entry.is_new, |el| {
el.child(
Label::new(sha)
.size(LabelSize::Small)
.color(Color::Muted)
.into_element(),
)
}),
)
.child(
div().max_w_96().child(
Label::new(sublabel)
.size(LabelSize::Small)
.color(Color::Muted)
.truncate()
.into_any_element(),
),
),
)
.end_slot::<IconButton>(icon),
)
}
fn no_matches_text(&self, _window: &mut Window, _cx: &mut App) -> Option<SharedString> {
Some("No worktrees found".into())
}
fn render_footer(&self, _: &mut Window, cx: &mut Context<Picker<Self>>) -> Option<AnyElement> {
let focus_handle = self.focus_handle.clone();
Some(
h_flex()
.w_full()
.p_1p5()
.gap_0p5()
.justify_end()
.border_t_1()
.border_color(cx.theme().colors().border_variant)
.child(
Button::new("open-in-new-window", "Open in new window")
.key_binding(
KeyBinding::for_action_in(&menu::Confirm, &focus_handle, cx)
.map(|kb| kb.size(rems_from_px(12.))),
)
.on_click(|_, window, cx| {
window.dispatch_action(menu::Confirm.boxed_clone(), cx)
}),
)
.child(
Button::new("open-in-window", "Open")
.key_binding(
KeyBinding::for_action_in(&menu::SecondaryConfirm, &focus_handle, cx)
.map(|kb| kb.size(rems_from_px(12.))),
)
.on_click(|_, window, cx| {
window.dispatch_action(menu::SecondaryConfirm.boxed_clone(), cx)
}),
)
.into_any(),
)
}
}

View File

@@ -253,7 +253,6 @@ util = { workspace = true, features = ["test-support"] }
[target.'cfg(target_os = "windows")'.build-dependencies]
embed-resource = "3.0"
windows-registry = "0.5"
[target.'cfg(target_os = "macos")'.build-dependencies]
bindgen = "0.71"

View File

@@ -248,7 +248,6 @@ mod macos {
#[cfg(target_os = "windows")]
mod windows {
use std::{
ffi::OsString,
fs,
io::Write,
path::{Path, PathBuf},
@@ -326,49 +325,6 @@ mod windows {
}
}
/// Locate `binary` in the newest installed Windows SDK.
pub fn find_latest_windows_sdk_binary(
binary: &str,
) -> Result<Option<PathBuf>, Box<dyn std::error::Error>> {
let key = windows_registry::LOCAL_MACHINE
.open("SOFTWARE\\WOW6432Node\\Microsoft\\Microsoft SDKs\\Windows\\v10.0")?;
let install_folder: String = key.get_string("InstallationFolder")?; // "C:\Program Files (x86)\Windows Kits\10\"
let install_folder_bin = Path::new(&install_folder).join("bin");
let mut versions: Vec<_> = std::fs::read_dir(&install_folder_bin)?
.flatten()
.filter(|entry| entry.path().is_dir())
.filter_map(|entry| entry.file_name().into_string().ok())
.collect();
versions.sort_by_key(|s| {
s.split('.')
.filter_map(|p| p.parse().ok())
.collect::<Vec<u32>>()
});
let arch = match std::env::consts::ARCH {
"x86_64" => "x64",
"aarch64" => "arm64",
_ => Err(format!(
"Unsupported architecture: {}",
std::env::consts::ARCH
))?,
};
if let Some(highest_version) = versions.last() {
return Ok(Some(
install_folder_bin
.join(highest_version)
.join(arch)
.join(binary),
));
}
Ok(None)
}
/// You can set the `GPUI_FXC_PATH` environment variable to specify the path to the fxc.exe compiler.
fn find_fxc_compiler() -> String {
// Check environment variable
@@ -389,8 +345,12 @@ mod windows {
return path.trim().to_string();
}
if let Ok(Some(path)) = find_latest_windows_sdk_binary("fxc.exe") {
return path.to_string_lossy().into_owned();
// Check the default path
if Path::new(r"C:\Program Files (x86)\Windows Kits\10\bin\10.0.26100.0\x64\fxc.exe")
.exists()
{
return r"C:\Program Files (x86)\Windows Kits\10\bin\10.0.26100.0\x64\fxc.exe"
.to_string();
}
panic!("Failed to find fxc.exe");

View File

@@ -260,6 +260,19 @@ impl AsyncApp {
}
}
impl sum_tree::BackgroundSpawn for BackgroundExecutor {
type Task<R>
= Task<R>
where
R: Send + Sync;
fn background_spawn<R>(&self, future: impl Future<Output = R> + Send + 'static) -> Self::Task<R>
where
R: Send + Sync + 'static,
{
self.spawn(future)
}
}
/// A cloneable, owned handle to the application context,
/// composed with the window associated with the current task.
#[derive(Clone, Deref, DerefMut)]

View File

@@ -393,6 +393,11 @@ impl TestAppContext {
}
}
/// Returns the background executor for this context.
pub fn background_executor(&self) -> &BackgroundExecutor {
&self.background_executor
}
/// Wait until there are no more pending tasks.
pub fn run_until_parked(&mut self) {
self.background_executor.run_until_parked()

View File

@@ -251,6 +251,8 @@ impl Element for UniformList {
None
}
// self.max_found_width = 0.0
//
fn request_layout(
&mut self,
global_id: Option<&GlobalElementId>,

View File

@@ -342,7 +342,7 @@ impl BackgroundExecutor {
/// for all of them to complete before returning.
pub async fn scoped<'scope, F>(&self, scheduler: F)
where
F: FnOnce(&mut Scope<'scope>),
F: for<'a> FnOnce(&'a mut Scope<'scope>),
{
let mut scope = Scope::new(self.clone());
(scheduler)(&mut scope);

View File

@@ -70,11 +70,6 @@ pub trait EntityInputHandler: 'static + Sized {
window: &mut Window,
cx: &mut Context<Self>,
) -> Option<usize>;
/// See [`InputHandler::accepts_text_input`] for details
fn accepts_text_input(&self, _window: &mut Window, _cx: &mut Context<Self>) -> bool {
true
}
}
/// The canonical implementation of [`crate::PlatformInputHandler`]. Call [`Window::handle_input`]
@@ -182,9 +177,4 @@ impl<V: EntityInputHandler> InputHandler for ElementInputHandler<V> {
view.character_index_for_point(point, window, cx)
})
}
fn accepts_text_input(&mut self, window: &mut Window, cx: &mut App) -> bool {
self.view
.update(cx, |view, cx| view.accepts_text_input(window, cx))
}
}

View File

@@ -25,12 +25,6 @@ pub struct KeyDownEvent {
/// Whether the key is currently held down.
pub is_held: bool,
/// Whether the modifiers are excessive for producing this character.
/// When false, the modifiers are essential for character input (e.g., AltGr),
/// and character input should be prioritized over keybindings.
/// When true, the modifiers are for keybindings (e.g., Ctrl+A).
pub prefer_character_input: bool,
}
impl Sealed for KeyDownEvent {}

View File

@@ -1012,11 +1012,6 @@ impl PlatformInputHandler {
.ok()
.flatten()
}
#[allow(dead_code)]
pub(crate) fn accepts_text_input(&mut self, window: &mut Window, cx: &mut App) -> bool {
self.handler.accepts_text_input(window, cx)
}
}
/// A struct representing a selection in a text buffer, in UTF16 characters.
@@ -1125,11 +1120,6 @@ pub trait InputHandler: 'static {
fn apple_press_and_hold_enabled(&mut self) -> bool {
true
}
/// Returns whether this handler is accepting text input to be inserted.
fn accepts_text_input(&mut self, _window: &mut Window, _cx: &mut App) -> bool {
true
}
}
/// The variables that can be configured when creating a new window

View File

@@ -1382,7 +1382,6 @@ impl Dispatch<wl_keyboard::WlKeyboard, ()> for WaylandClientStatePtr {
let input = PlatformInput::KeyDown(KeyDownEvent {
keystroke: keystroke.clone(),
is_held: false,
prefer_character_input: false,
});
state.repeat.current_id += 1;
@@ -1396,7 +1395,6 @@ impl Dispatch<wl_keyboard::WlKeyboard, ()> for WaylandClientStatePtr {
let input = PlatformInput::KeyDown(KeyDownEvent {
keystroke,
is_held: true,
prefer_character_input: false,
});
move |_event, _metadata, this| {
let mut client = this.get_client();
@@ -1481,7 +1479,6 @@ impl Dispatch<zwp_text_input_v3::ZwpTextInputV3, ()> for WaylandClientStatePtr {
key_char: Some(commit_text),
},
is_held: false,
prefer_character_input: false,
}));
} else {
window.handle_ime(ImeInput::InsertText(commit_text));

View File

@@ -1047,7 +1047,6 @@ impl X11Client {
window.handle_input(PlatformInput::KeyDown(crate::KeyDownEvent {
keystroke,
is_held: false,
prefer_character_input: false,
}));
}
Event::KeyRelease(event) => {

View File

@@ -131,7 +131,6 @@ impl PlatformInput {
NSEventType::NSKeyDown => Some(Self::KeyDown(KeyDownEvent {
keystroke: parse_keystroke(native_event),
is_held: native_event.isARepeat() == YES,
prefer_character_input: false,
})),
NSEventType::NSKeyUp => Some(Self::KeyUp(KeyUpEvent {
keystroke: parse_keystroke(native_event),

View File

@@ -2318,7 +2318,6 @@ extern "C" fn do_command_by_selector(this: &Object, _: Sel, _: Sel) {
let handled = (callback)(PlatformInput::KeyDown(KeyDownEvent {
keystroke,
is_held: false,
prefer_character_input: false,
}));
state.as_ref().lock().do_command_handled = Some(!handled.propagate);
}

View File

@@ -171,9 +171,7 @@ fn add_recent_folders(
)?)?;
}
if tasks.GetCount().unwrap_or(0) > 0 {
list.AppendCategory(&HSTRING::from("Recent Folders"), &tasks)?;
}
list.AppendCategory(&HSTRING::from("Recent Folders"), &tasks)?;
Ok(())
}
}

View File

@@ -1,5 +1,4 @@
use std::{
sync::atomic::{AtomicBool, Ordering},
thread::{ThreadId, current},
time::Duration,
};
@@ -22,7 +21,6 @@ use crate::{
};
pub(crate) struct WindowsDispatcher {
pub(crate) wake_posted: AtomicBool,
main_sender: Sender<Runnable>,
main_thread_id: ThreadId,
platform_window_handle: SafeHwnd,
@@ -43,7 +41,6 @@ impl WindowsDispatcher {
main_thread_id,
platform_window_handle,
validation_number,
wake_posted: AtomicBool::new(false),
}
}
@@ -84,19 +81,15 @@ impl PlatformDispatcher for WindowsDispatcher {
fn dispatch_on_main_thread(&self, runnable: Runnable) {
match self.main_sender.send(runnable) {
Ok(_) => {
if !self.wake_posted.swap(true, Ordering::AcqRel) {
unsafe {
PostMessageW(
Some(self.platform_window_handle.as_raw()),
WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD,
WPARAM(self.validation_number),
LPARAM(0),
)
.log_err();
}
}
}
Ok(_) => unsafe {
PostMessageW(
Some(self.platform_window_handle.as_raw()),
WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD,
WPARAM(self.validation_number),
LPARAM(0),
)
.log_err();
},
Err(runnable) => {
// NOTE: Runnable may wrap a Future that is !Send.
//

View File

@@ -26,7 +26,6 @@ pub(crate) const WM_GPUI_DOCK_MENU_ACTION: u32 = WM_USER + 4;
pub(crate) const WM_GPUI_FORCE_UPDATE_WINDOW: u32 = WM_USER + 5;
pub(crate) const WM_GPUI_KEYBOARD_LAYOUT_CHANGED: u32 = WM_USER + 6;
pub(crate) const WM_GPUI_GPU_DEVICE_LOST: u32 = WM_USER + 7;
pub(crate) const WM_GPUI_KEYDOWN: u32 = WM_USER + 8;
const SIZE_MOVE_LOOP_TIMER_ID: usize = 1;
const AUTO_HIDE_TASKBAR_THICKNESS_PX: i32 = 1;
@@ -93,10 +92,13 @@ impl WindowsWindowInner {
}
WM_MOUSEWHEEL => self.handle_mouse_wheel_msg(handle, wparam, lparam),
WM_MOUSEHWHEEL => self.handle_mouse_horizontal_wheel_msg(handle, wparam, lparam),
WM_SYSKEYUP => self.handle_syskeyup_msg(wparam, lparam),
WM_KEYUP => self.handle_keyup_msg(wparam, lparam),
WM_GPUI_KEYDOWN => self.handle_keydown_msg(wparam, lparam),
WM_SYSKEYDOWN => self.handle_syskeydown_msg(handle, wparam, lparam),
WM_SYSKEYUP => self.handle_syskeyup_msg(handle, wparam, lparam),
WM_SYSCOMMAND => self.handle_system_command(wparam),
WM_KEYDOWN => self.handle_keydown_msg(handle, wparam, lparam),
WM_KEYUP => self.handle_keyup_msg(handle, wparam, lparam),
WM_CHAR => self.handle_char_msg(wparam),
WM_DEADCHAR => self.handle_dead_char_msg(wparam),
WM_IME_STARTCOMPOSITION => self.handle_ime_position(handle),
WM_IME_COMPOSITION => self.handle_ime_composition(handle, lparam),
WM_SETCURSOR => self.handle_set_cursor(handle, lparam),
@@ -325,9 +327,35 @@ impl WindowsWindowInner {
Some(0)
}
fn handle_syskeyup_msg(&self, wparam: WPARAM, lparam: LPARAM) -> Option<isize> {
fn handle_syskeydown_msg(&self, handle: HWND, wparam: WPARAM, lparam: LPARAM) -> Option<isize> {
let mut lock = self.state.borrow_mut();
let input = handle_key_event(wparam, lparam, &mut lock, |keystroke, _| {
let input = handle_key_event(handle, wparam, lparam, &mut lock, |keystroke| {
PlatformInput::KeyDown(KeyDownEvent {
keystroke,
is_held: lparam.0 & (0x1 << 30) > 0,
})
})?;
let mut func = lock.callbacks.input.take()?;
drop(lock);
let handled = !func(input).propagate;
let mut lock = self.state.borrow_mut();
lock.callbacks.input = Some(func);
if handled {
lock.system_key_handled = true;
Some(0)
} else {
// we need to call `DefWindowProcW`, or we will lose the system-wide `Alt+F4`, `Alt+{other keys}`
// shortcuts.
None
}
}
fn handle_syskeyup_msg(&self, handle: HWND, wparam: WPARAM, lparam: LPARAM) -> Option<isize> {
let mut lock = self.state.borrow_mut();
let input = handle_key_event(handle, wparam, lparam, &mut lock, |keystroke| {
PlatformInput::KeyUp(KeyUpEvent { keystroke })
})?;
let mut func = lock.callbacks.input.take()?;
@@ -341,24 +369,27 @@ impl WindowsWindowInner {
// It's a known bug that you can't trigger `ctrl-shift-0`. See:
// https://superuser.com/questions/1455762/ctrl-shift-number-key-combination-has-stopped-working-for-a-few-numbers
fn handle_keydown_msg(&self, wparam: WPARAM, lparam: LPARAM) -> Option<isize> {
fn handle_keydown_msg(&self, handle: HWND, wparam: WPARAM, lparam: LPARAM) -> Option<isize> {
let mut lock = self.state.borrow_mut();
let Some(input) = handle_key_event(
wparam,
lparam,
&mut lock,
|keystroke, prefer_character_input| {
PlatformInput::KeyDown(KeyDownEvent {
keystroke,
is_held: lparam.0 & (0x1 << 30) > 0,
prefer_character_input,
})
},
) else {
let Some(input) = handle_key_event(handle, wparam, lparam, &mut lock, |keystroke| {
PlatformInput::KeyDown(KeyDownEvent {
keystroke,
is_held: lparam.0 & (0x1 << 30) > 0,
})
}) else {
return Some(1);
};
drop(lock);
let is_composing = self
.with_input_handler(|input_handler| input_handler.marked_text_range())
.flatten()
.is_some();
if is_composing {
translate_message(handle, wparam, lparam);
return Some(0);
}
let Some(mut func) = self.state.borrow_mut().callbacks.input.take() else {
return Some(1);
};
@@ -367,12 +398,17 @@ impl WindowsWindowInner {
self.state.borrow_mut().callbacks.input = Some(func);
if handled { Some(0) } else { Some(1) }
if handled {
Some(0)
} else {
translate_message(handle, wparam, lparam);
Some(1)
}
}
fn handle_keyup_msg(&self, wparam: WPARAM, lparam: LPARAM) -> Option<isize> {
fn handle_keyup_msg(&self, handle: HWND, wparam: WPARAM, lparam: LPARAM) -> Option<isize> {
let mut lock = self.state.borrow_mut();
let Some(input) = handle_key_event(wparam, lparam, &mut lock, |keystroke, _| {
let Some(input) = handle_key_event(handle, wparam, lparam, &mut lock, |keystroke| {
PlatformInput::KeyUp(KeyUpEvent { keystroke })
}) else {
return Some(1);
@@ -398,6 +434,14 @@ impl WindowsWindowInner {
Some(0)
}
fn handle_dead_char_msg(&self, wparam: WPARAM) -> Option<isize> {
let ch = char::from_u32(wparam.0 as u32)?.to_string();
self.with_input_handler(|input_handler| {
input_handler.replace_and_mark_text_in_range(None, &ch, None);
});
None
}
fn handle_mouse_down_msg(
&self,
handle: HWND,
@@ -1083,6 +1127,17 @@ impl WindowsWindowInner {
Some(0)
}
fn handle_system_command(&self, wparam: WPARAM) -> Option<isize> {
if wparam.0 == SC_KEYMENU as usize {
let mut lock = self.state.borrow_mut();
if lock.system_key_handled {
lock.system_key_handled = false;
return Some(0);
}
}
None
}
fn handle_system_theme_changed(&self, handle: HWND, lparam: LPARAM) -> Option<isize> {
// lParam is a pointer to a string that indicates the area containing the system parameter
// that was changed.
@@ -1226,14 +1281,30 @@ impl WindowsWindowInner {
}
}
#[inline]
fn translate_message(handle: HWND, wparam: WPARAM, lparam: LPARAM) {
let msg = MSG {
hwnd: handle,
message: WM_KEYDOWN,
wParam: wparam,
lParam: lparam,
// It seems like leaving the following two parameters empty doesn't break key events, they still work as expected.
// But if any bugs pop up after this PR, this is probably the place to look first.
time: 0,
pt: POINT::default(),
};
unsafe { TranslateMessage(&msg).ok().log_err() };
}
fn handle_key_event<F>(
handle: HWND,
wparam: WPARAM,
lparam: LPARAM,
state: &mut WindowsWindowState,
f: F,
) -> Option<PlatformInput>
where
F: FnOnce(Keystroke, bool) -> PlatformInput,
F: FnOnce(Keystroke) -> PlatformInput,
{
let virtual_key = VIRTUAL_KEY(wparam.loword());
let modifiers = current_modifiers();
@@ -1252,7 +1323,10 @@ where
capslock: current_capslock(),
}))
}
VK_PACKET => None,
VK_PACKET => {
translate_message(handle, wparam, lparam);
None
}
VK_CAPITAL => {
let capslock = current_capslock();
if state
@@ -1268,8 +1342,13 @@ where
}))
}
vkey => {
let vkey = if vkey == VK_PROCESSKEY {
VIRTUAL_KEY(unsafe { ImmGetVirtualKey(handle) } as u16)
} else {
vkey
};
let keystroke = parse_normal_key(vkey, lparam, modifiers)?;
Some(f(keystroke.0, keystroke.1))
Some(f(keystroke))
}
}
}
@@ -1329,7 +1408,7 @@ fn parse_normal_key(
vkey: VIRTUAL_KEY,
lparam: LPARAM,
mut modifiers: Modifiers,
) -> Option<(Keystroke, bool)> {
) -> Option<Keystroke> {
let mut key_char = None;
let key = parse_immutable(vkey).or_else(|| {
let scan_code = lparam.hiword() & 0xFF;
@@ -1342,86 +1421,11 @@ fn parse_normal_key(
);
get_keystroke_key(vkey, scan_code as u32, &mut modifiers)
})?;
let prefer_character_input = should_prefer_character_input(vkey, lparam.hiword() & 0xFF);
Some((
Keystroke {
modifiers,
key,
key_char,
},
prefer_character_input,
))
}
fn should_prefer_character_input(vkey: VIRTUAL_KEY, scan_code: u16) -> bool {
let mut keyboard_state = [0u8; 256];
unsafe {
if GetKeyboardState(&mut keyboard_state).is_err() {
return false;
}
}
let mut buffer_c = [0u16; 8];
let result_c = unsafe {
ToUnicode(
vkey.0 as u32,
scan_code as u32,
Some(&keyboard_state),
&mut buffer_c,
0x4,
)
};
if result_c < 0 {
return false;
}
let c = &buffer_c[..result_c as usize];
if char::decode_utf16(c.iter().copied())
.next()
.and_then(|ch| ch.ok())
.map(|ch| ch.is_control())
.unwrap_or(true)
{
return false;
}
let ctrl_down = (keyboard_state[VK_CONTROL.0 as usize] & 0x80) != 0;
let alt_down = (keyboard_state[VK_MENU.0 as usize] & 0x80) != 0;
let win_down = (keyboard_state[VK_LWIN.0 as usize] & 0x80) != 0
|| (keyboard_state[VK_RWIN.0 as usize] & 0x80) != 0;
let has_modifiers = ctrl_down || alt_down || win_down;
if !has_modifiers {
return false;
}
let mut state_no_modifiers = keyboard_state;
state_no_modifiers[VK_CONTROL.0 as usize] = 0;
state_no_modifiers[VK_LCONTROL.0 as usize] = 0;
state_no_modifiers[VK_RCONTROL.0 as usize] = 0;
state_no_modifiers[VK_MENU.0 as usize] = 0;
state_no_modifiers[VK_LMENU.0 as usize] = 0;
state_no_modifiers[VK_RMENU.0 as usize] = 0;
state_no_modifiers[VK_LWIN.0 as usize] = 0;
state_no_modifiers[VK_RWIN.0 as usize] = 0;
let mut buffer_c_no_modifiers = [0u16; 8];
let result_c_no_modifiers = unsafe {
ToUnicode(
vkey.0 as u32,
scan_code as u32,
Some(&state_no_modifiers),
&mut buffer_c_no_modifiers,
0x4,
)
};
if result_c_no_modifiers <= 0 {
return false;
}
let c_no_modifiers = &buffer_c_no_modifiers[..result_c_no_modifiers as usize];
c != c_no_modifiers
Some(Keystroke {
modifiers,
key,
key_char,
})
}
fn parse_ime_composition_string(ctx: HIMC, comp_type: IME_COMPOSITION_STRING) -> Option<String> {
@@ -1456,11 +1460,25 @@ fn is_virtual_key_pressed(vkey: VIRTUAL_KEY) -> bool {
unsafe { GetKeyState(vkey.0 as i32) < 0 }
}
fn keyboard_uses_altgr() -> bool {
use crate::platform::windows::keyboard::WindowsKeyboardLayout;
WindowsKeyboardLayout::new()
.map(|layout| layout.uses_altgr())
.unwrap_or(false)
}
#[inline]
pub(crate) fn current_modifiers() -> Modifiers {
let lmenu_pressed = is_virtual_key_pressed(VK_LMENU);
let rmenu_pressed = is_virtual_key_pressed(VK_RMENU);
let lcontrol_pressed = is_virtual_key_pressed(VK_LCONTROL);
// Only treat right Alt + left Ctrl as AltGr on keyboards that actually use it
let altgr = keyboard_uses_altgr() && rmenu_pressed && lcontrol_pressed;
Modifiers {
control: is_virtual_key_pressed(VK_CONTROL),
alt: is_virtual_key_pressed(VK_MENU),
control: is_virtual_key_pressed(VK_CONTROL) && !altgr,
alt: (lmenu_pressed || rmenu_pressed) && !altgr,
shift: is_virtual_key_pressed(VK_SHIFT),
platform: is_virtual_key_pressed(VK_LWIN) || is_virtual_key_pressed(VK_RWIN),
function: false,

View File

@@ -108,6 +108,39 @@ impl WindowsKeyboardLayout {
name: "unknown".to_string(),
}
}
pub(crate) fn uses_altgr(&self) -> bool {
// Check if this is a known AltGr layout by examining the layout ID
// The layout ID is a hex string like "00000409" (US) or "00000407" (German)
// Extract the language ID (last 4 bytes)
let id_bytes = self.id.as_bytes();
if id_bytes.len() >= 4 {
let lang_id = &id_bytes[id_bytes.len() - 4..];
// List of keyboard layouts that use AltGr (non-exhaustive)
matches!(
lang_id,
b"0407" | // German
b"040C" | // French
b"040A" | // Spanish
b"0415" | // Polish
b"0413" | // Dutch
b"0816" | // Portuguese
b"041D" | // Swedish
b"0414" | // Norwegian
b"040B" | // Finnish
b"041F" | // Turkish
b"0419" | // Russian
b"0405" | // Czech
b"040E" | // Hungarian
b"0424" | // Slovenian
b"041A" | // Croatian
b"041B" | // Slovak
b"0418" // Romanian
)
} else {
false
}
}
}
impl WindowsKeyboardMapper {
@@ -225,7 +258,7 @@ pub(crate) fn generate_key_char(
}
let mut buffer = [0; 8];
let len = unsafe { ToUnicode(vkey.0 as u32, scan_code, Some(&state), &mut buffer, 0x5) };
let len = unsafe { ToUnicode(vkey.0 as u32, scan_code, Some(&state), &mut buffer, 1 << 2) };
match len {
len if len > 0 => String::from_utf16(&buffer[..len as usize])

View File

@@ -4,7 +4,7 @@ use std::{
mem::ManuallyDrop,
path::{Path, PathBuf},
rc::{Rc, Weak},
sync::{Arc, atomic::Ordering},
sync::Arc,
};
use ::util::{ResultExt, paths::SanitizedPath};
@@ -48,7 +48,6 @@ struct WindowsPlatformInner {
// The below members will never change throughout the entire lifecycle of the app.
validation_number: usize,
main_receiver: flume::Receiver<Runnable>,
dispatcher: Arc<WindowsDispatcher>,
}
pub(crate) struct WindowsPlatformState {
@@ -110,10 +109,8 @@ impl WindowsPlatform {
inner: None,
raw_window_handles: Arc::downgrade(&raw_window_handles),
validation_number,
main_sender: Some(main_sender),
main_receiver: Some(main_receiver),
directx_devices: Some(directx_devices),
dispatcher: None,
};
let result = unsafe {
CreateWindowExW(
@@ -132,9 +129,12 @@ impl WindowsPlatform {
)
};
let inner = context.inner.take().unwrap()?;
let dispatcher = context.dispatcher.take().unwrap();
let handle = result?;
let dispatcher = Arc::new(WindowsDispatcher::new(
main_sender,
handle,
validation_number,
));
let disable_direct_composition = std::env::var(DISABLE_DIRECT_COMPOSITION)
.is_ok_and(|value| value == "true" || value == "1");
let background_executor = BackgroundExecutor::new(dispatcher.clone());
@@ -272,22 +272,6 @@ impl WindowsPlatform {
}
}
fn translate_accelerator(msg: &MSG) -> Option<()> {
if msg.message != WM_KEYDOWN && msg.message != WM_SYSKEYDOWN {
return None;
}
let result = unsafe {
SendMessageW(
msg.hwnd,
WM_GPUI_KEYDOWN,
Some(msg.wParam),
Some(msg.lParam),
)
};
(result.0 == 0).then_some(())
}
impl Platform for WindowsPlatform {
fn background_executor(&self) -> BackgroundExecutor {
self.background_executor.clone()
@@ -328,10 +312,7 @@ impl Platform for WindowsPlatform {
let mut msg = MSG::default();
unsafe {
while GetMessageW(&mut msg, None, 0, 0).as_bool() {
if translate_accelerator(&msg).is_none() {
_ = TranslateMessage(&msg);
DispatchMessageW(&msg);
}
DispatchMessageW(&msg);
}
}
@@ -701,7 +682,6 @@ impl WindowsPlatformInner {
Ok(Rc::new(Self {
state,
raw_window_handles: context.raw_window_handles.clone(),
dispatcher: context.dispatcher.as_ref().unwrap().clone(),
validation_number: context.validation_number,
main_receiver: context.main_receiver.take().unwrap(),
}))
@@ -766,28 +746,9 @@ impl WindowsPlatformInner {
#[inline]
fn run_foreground_task(&self) -> Option<isize> {
loop {
for runnable in self.main_receiver.drain() {
runnable.run();
}
// Someone could enqueue a Runnable here. The flag is still true, so they will not PostMessage.
// We need to check for those Runnables after we clear the flag.
let dispatcher = self.dispatcher.clone();
dispatcher.wake_posted.store(false, Ordering::Release);
match self.main_receiver.try_recv() {
Ok(runnable) => {
let _ = dispatcher.wake_posted.swap(true, Ordering::AcqRel);
runnable.run();
continue;
}
_ => {
break;
}
}
for runnable in self.main_receiver.drain() {
runnable.run();
}
Some(0)
}
@@ -871,10 +832,8 @@ struct PlatformWindowCreateContext {
inner: Option<Result<Rc<WindowsPlatformInner>>>,
raw_window_handles: std::sync::Weak<RwLock<SmallVec<[SafeHwnd; 4]>>>,
validation_number: usize,
main_sender: Option<flume::Sender<Runnable>>,
main_receiver: Option<flume::Receiver<Runnable>>,
directx_devices: Option<DirectXDevices>,
dispatcher: Option<Arc<WindowsDispatcher>>,
}
fn open_target(target: impl AsRef<OsStr>) -> Result<()> {
@@ -1156,13 +1115,6 @@ unsafe extern "system" fn window_procedure(
let params = unsafe { &*params };
let creation_context = params.lpCreateParams as *mut PlatformWindowCreateContext;
let creation_context = unsafe { &mut *creation_context };
creation_context.dispatcher = Some(Arc::new(WindowsDispatcher::new(
creation_context.main_sender.take().unwrap(),
hwnd,
creation_context.validation_number,
)));
return match WindowsPlatformInner::new(creation_context) {
Ok(inner) => {
let weak = Box::new(Rc::downgrade(&inner));

View File

@@ -45,6 +45,7 @@ pub struct WindowsWindowState {
pub pending_surrogate: Option<u16>,
pub last_reported_modifiers: Option<Modifiers>,
pub last_reported_capslock: Option<Capslock>,
pub system_key_handled: bool,
pub hovered: bool,
pub renderer: DirectXRenderer,
@@ -111,6 +112,7 @@ impl WindowsWindowState {
let pending_surrogate = None;
let last_reported_modifiers = None;
let last_reported_capslock = None;
let system_key_handled = false;
let hovered = false;
let click_state = ClickState::new();
let nc_button_pressed = None;
@@ -131,6 +133,7 @@ impl WindowsWindowState {
pending_surrogate,
last_reported_modifiers,
last_reported_capslock,
system_key_handled,
hovered,
renderer,
click_state,

Some files were not shown because too many files have changed in this diff Show More