Compare commits
48 Commits
pretty-typ
...
linux/keys
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8276554222 | ||
|
|
2400ed34e0 | ||
|
|
ce23637dc1 | ||
|
|
7bcd6d839f | ||
|
|
20b9989b79 | ||
|
|
3269029a3e | ||
|
|
ea0e908714 | ||
|
|
8b69ccb488 | ||
|
|
2d6d53219b | ||
|
|
de615870cc | ||
|
|
f1f9470a14 | ||
|
|
9433c381eb | ||
|
|
6ede6a1573 | ||
|
|
7e0d74db8a | ||
|
|
e019ce8260 | ||
|
|
7ab7eab54e | ||
|
|
7da2e2af50 | ||
|
|
92a986cffd | ||
|
|
8b6658c650 | ||
|
|
9bc4c0f7f5 | ||
|
|
f829a178f6 | ||
|
|
2df80de880 | ||
|
|
2976b99b14 | ||
|
|
ad3d44119a | ||
|
|
0b5a264f88 | ||
|
|
f22d06cf6e | ||
|
|
26718da9bb | ||
|
|
671a91e69d | ||
|
|
bbda5d4f78 | ||
|
|
2fb674088f | ||
|
|
5c4ea49793 | ||
|
|
21e7cc3fed | ||
|
|
f53168c56c | ||
|
|
3182f14972 | ||
|
|
eca211e0f1 | ||
|
|
d6add799dc | ||
|
|
e9649dc25c | ||
|
|
71daf47ad5 | ||
|
|
66c6f5066e | ||
|
|
be79ccde07 | ||
|
|
061ba9b6d2 | ||
|
|
1ba3c2f589 | ||
|
|
bbee877ca8 | ||
|
|
c6e697fc7f | ||
|
|
b7b83105fc | ||
|
|
7d901c5e47 | ||
|
|
1a8d1944b0 | ||
|
|
35db644beb |
64
.github/actions/install_trusted_signing/action.yml
vendored
Normal file
64
.github/actions/install_trusted_signing/action.yml
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
name: "Trusted Signing on Windows"
|
||||
description: "Install trusted signing on Windows."
|
||||
|
||||
# Modified from https://github.com/Azure/trusted-signing-action
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
- name: Set variables
|
||||
id: set-variables
|
||||
shell: "pwsh"
|
||||
run: |
|
||||
$defaultPath = $env:PSModulePath -split ';' | Select-Object -First 1
|
||||
"PSMODULEPATH=$defaultPath" | Out-File -FilePath $env:GITHUB_OUTPUT -Append
|
||||
|
||||
"TRUSTED_SIGNING_MODULE_VERSION=0.5.3" | Out-File -FilePath $env:GITHUB_OUTPUT -Append
|
||||
"BUILD_TOOLS_NUGET_VERSION=10.0.22621.3233" | Out-File -FilePath $env:GITHUB_OUTPUT -Append
|
||||
"TRUSTED_SIGNING_NUGET_VERSION=1.0.53" | Out-File -FilePath $env:GITHUB_OUTPUT -Append
|
||||
"DOTNET_SIGNCLI_NUGET_VERSION=0.9.1-beta.24469.1" | Out-File -FilePath $env:GITHUB_OUTPUT -Append
|
||||
|
||||
- name: Cache TrustedSigning PowerShell module
|
||||
id: cache-module
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-module
|
||||
with:
|
||||
path: ${{ steps.set-variables.outputs.PSMODULEPATH }}\TrustedSigning\${{ steps.set-variables.outputs.TRUSTED_SIGNING_MODULE_VERSION }}
|
||||
key: TrustedSigning-${{ steps.set-variables.outputs.TRUSTED_SIGNING_MODULE_VERSION }}
|
||||
if: ${{ inputs.cache-dependencies == 'true' }}
|
||||
|
||||
- name: Cache Microsoft.Windows.SDK.BuildTools NuGet package
|
||||
id: cache-buildtools
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-buildtools
|
||||
with:
|
||||
path: ~\AppData\Local\TrustedSigning\Microsoft.Windows.SDK.BuildTools\Microsoft.Windows.SDK.BuildTools.${{ steps.set-variables.outputs.BUILD_TOOLS_NUGET_VERSION }}
|
||||
key: Microsoft.Windows.SDK.BuildTools-${{ steps.set-variables.outputs.BUILD_TOOLS_NUGET_VERSION }}
|
||||
if: ${{ inputs.cache-dependencies == 'true' }}
|
||||
|
||||
- name: Cache Microsoft.Trusted.Signing.Client NuGet package
|
||||
id: cache-tsclient
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-tsclient
|
||||
with:
|
||||
path: ~\AppData\Local\TrustedSigning\Microsoft.Trusted.Signing.Client\Microsoft.Trusted.Signing.Client.${{ steps.set-variables.outputs.TRUSTED_SIGNING_NUGET_VERSION }}
|
||||
key: Microsoft.Trusted.Signing.Client-${{ steps.set-variables.outputs.TRUSTED_SIGNING_NUGET_VERSION }}
|
||||
if: ${{ inputs.cache-dependencies == 'true' }}
|
||||
|
||||
- name: Cache SignCli NuGet package
|
||||
id: cache-signcli
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-signcli
|
||||
with:
|
||||
path: ~\AppData\Local\TrustedSigning\sign\sign.${{ steps.set-variables.outputs.DOTNET_SIGNCLI_NUGET_VERSION }}
|
||||
key: SignCli-${{ steps.set-variables.outputs.DOTNET_SIGNCLI_NUGET_VERSION }}
|
||||
if: ${{ inputs.cache-dependencies == 'true' }}
|
||||
|
||||
- name: Install Trusted Signing module
|
||||
shell: "pwsh"
|
||||
run: |
|
||||
Install-Module -Name TrustedSigning -RequiredVersion ${{ steps.set-variables.outputs.TRUSTED_SIGNING_MODULE_VERSION }} -Force -Repository PSGallery
|
||||
if: ${{ inputs.cache-dependencies != 'true' || steps.cache-module.outputs.cache-hit != 'true' }}
|
||||
13
.github/actions/run_tests/action.yml
vendored
13
.github/actions/run_tests/action.yml
vendored
@@ -1,6 +1,12 @@
|
||||
name: "Run tests"
|
||||
description: "Runs the tests"
|
||||
|
||||
inputs:
|
||||
use-xvfb:
|
||||
description: "Whether to run tests with xvfb"
|
||||
required: false
|
||||
default: "false"
|
||||
|
||||
runs:
|
||||
using: "composite"
|
||||
steps:
|
||||
@@ -20,4 +26,9 @@ runs:
|
||||
|
||||
- name: Run tests
|
||||
shell: bash -euxo pipefail {0}
|
||||
run: cargo nextest run --workspace --no-fail-fast
|
||||
run: |
|
||||
if [ "${{ inputs.use-xvfb }}" == "true" ]; then
|
||||
xvfb-run --auto-servernum --server-args="-screen 0 1024x768x24 -nolisten tcp" cargo nextest run --workspace --no-fail-fast
|
||||
else
|
||||
cargo nextest run --workspace --no-fail-fast
|
||||
fi
|
||||
|
||||
30
.github/workflows/ci.yml
vendored
30
.github/workflows/ci.yml
vendored
@@ -21,9 +21,6 @@ env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
|
||||
jobs:
|
||||
job_spec:
|
||||
@@ -55,10 +52,9 @@ jobs:
|
||||
fi
|
||||
# Specify anything which should skip full CI in this regex:
|
||||
# - docs/
|
||||
# - script/update_top_ranking_issues/
|
||||
# - .github/ISSUE_TEMPLATE/
|
||||
# - .github/workflows/ (except .github/workflows/ci.yml)
|
||||
SKIP_REGEX='^(docs/|script/update_top_ranking_issues/|\.github/(ISSUE_TEMPLATE|workflows/(?!ci)))'
|
||||
SKIP_REGEX='^(docs/|\.github/(ISSUE_TEMPLATE|workflows/(?!ci)))'
|
||||
if [[ $(git diff --name-only $COMPARE_REV ${{ github.sha }} | grep -vP "$SKIP_REGEX") ]]; then
|
||||
echo "run_tests=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
@@ -75,7 +71,7 @@ jobs:
|
||||
echo "run_license=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
NIX_REGEX='^(nix/|flake\.|Cargo\.|rust-toolchain.toml|\.cargo/config.toml)'
|
||||
if [[ $(git diff --name-only $COMPARE_REV ${{ github.sha }} | grep -P "$NIX_REGEX") ]]; then
|
||||
if [[ $(git diff --name-only $COMPARE_REV ${{ github.sha }} | grep "$NIX_REGEX") ]]; then
|
||||
echo "run_nix=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "run_nix=false" >> $GITHUB_OUTPUT
|
||||
@@ -338,6 +334,8 @@ jobs:
|
||||
|
||||
- name: Run tests
|
||||
uses: ./.github/actions/run_tests
|
||||
with:
|
||||
use-xvfb: true
|
||||
|
||||
- name: Build other binaries and features
|
||||
run: |
|
||||
@@ -496,6 +494,9 @@ jobs:
|
||||
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@@ -578,6 +579,10 @@ jobs:
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
needs: [linux_tests]
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
@@ -631,6 +636,10 @@ jobs:
|
||||
startsWith(github.ref, 'refs/tags/v')
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
needs: [linux_tests]
|
||||
env:
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
@@ -683,12 +692,16 @@ jobs:
|
||||
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
needs: [linux_tests]
|
||||
name: Build Zed on FreeBSD
|
||||
# env:
|
||||
# MYTOKEN : ${{ secrets.MYTOKEN }}
|
||||
# MYTOKEN2: "value2"
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Build FreeBSD remote-server
|
||||
id: freebsd-build
|
||||
uses: vmactions/freebsd-vm@c3ae29a132c8ef1924775414107a97cac042aad5 # v1.2.0
|
||||
with:
|
||||
# envs: "MYTOKEN MYTOKEN2"
|
||||
usesh: true
|
||||
release: 13.5
|
||||
copyback: true
|
||||
@@ -755,6 +768,8 @@ jobs:
|
||||
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
|
||||
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
|
||||
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
FILE_DIGEST: SHA256
|
||||
TIMESTAMP_DIGEST: SHA256
|
||||
TIMESTAMP_SERVER: "http://timestamp.acs.microsoft.com"
|
||||
@@ -771,6 +786,9 @@ jobs:
|
||||
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
|
||||
script/determine-release-channel.ps1
|
||||
|
||||
- name: Install trusted signing
|
||||
uses: ./.github/actions/install_trusted_signing
|
||||
|
||||
- name: Build Zed installer
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
run: script/bundle-windows.ps1
|
||||
|
||||
22
.github/workflows/release_nightly.yml
vendored
22
.github/workflows/release_nightly.yml
vendored
@@ -12,9 +12,6 @@ env:
|
||||
CARGO_TERM_COLOR: always
|
||||
CARGO_INCREMENTAL: 0
|
||||
RUST_BACKTRACE: 1
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
|
||||
jobs:
|
||||
style:
|
||||
@@ -94,6 +91,9 @@ jobs:
|
||||
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
|
||||
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
|
||||
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
steps:
|
||||
- name: Install Node
|
||||
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
|
||||
@@ -125,6 +125,10 @@ jobs:
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2004
|
||||
needs: tests
|
||||
env:
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
@@ -160,6 +164,10 @@ jobs:
|
||||
runs-on:
|
||||
- buildjet-16vcpu-ubuntu-2204-arm
|
||||
needs: tests
|
||||
env:
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
|
||||
steps:
|
||||
- name: Checkout repo
|
||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
@@ -190,6 +198,9 @@ jobs:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: github-8vcpu-ubuntu-2404
|
||||
needs: tests
|
||||
env:
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
name: Build Zed on FreeBSD
|
||||
# env:
|
||||
# MYTOKEN : ${{ secrets.MYTOKEN }}
|
||||
@@ -246,6 +257,8 @@ jobs:
|
||||
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
|
||||
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
|
||||
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
|
||||
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
|
||||
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
|
||||
FILE_DIGEST: SHA256
|
||||
TIMESTAMP_DIGEST: SHA256
|
||||
TIMESTAMP_SERVER: "http://timestamp.acs.microsoft.com"
|
||||
@@ -263,6 +276,9 @@ jobs:
|
||||
Write-Host "Publishing version: $version on release channel nightly"
|
||||
"nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL"
|
||||
|
||||
- name: Install trusted signing
|
||||
uses: ./.github/actions/install_trusted_signing
|
||||
|
||||
- name: Build Zed installer
|
||||
working-directory: ${{ env.ZED_WORKSPACE }}
|
||||
run: script/bundle-windows.ps1
|
||||
|
||||
@@ -40,7 +40,7 @@
|
||||
},
|
||||
"file_types": {
|
||||
"Dockerfile": ["Dockerfile*[!dockerignore]"],
|
||||
"JSONC": ["**/assets/**/*.json", "renovate.json"],
|
||||
"JSONC": ["assets/**/*.json", "renovate.json"],
|
||||
"Git Ignore": ["dockerignore"]
|
||||
},
|
||||
"hard_tabs": false,
|
||||
|
||||
12
Cargo.lock
generated
12
Cargo.lock
generated
@@ -9,7 +9,6 @@ dependencies = [
|
||||
"agent_servers",
|
||||
"agentic-coding-protocol",
|
||||
"anyhow",
|
||||
"assistant_tool",
|
||||
"async-pipe",
|
||||
"buffer_diff",
|
||||
"editor",
|
||||
@@ -264,9 +263,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "agentic-coding-protocol"
|
||||
version = "0.0.7"
|
||||
version = "0.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a75f520bcc049ebe40c8c99427aa61b48ad78a01bcc96a13b350b903dcfb9438"
|
||||
checksum = "d1ac0351749af7bf53c65042ef69fefb9351aa8b7efa0a813d6281377605c37d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"chrono",
|
||||
@@ -608,7 +607,6 @@ dependencies = [
|
||||
"parking_lot",
|
||||
"smol",
|
||||
"tempfile",
|
||||
"unindent",
|
||||
"util",
|
||||
"workspace-hack",
|
||||
]
|
||||
@@ -8980,7 +8978,6 @@ dependencies = [
|
||||
"gpui",
|
||||
"language",
|
||||
"lsp",
|
||||
"project",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"util",
|
||||
@@ -9134,7 +9131,6 @@ dependencies = [
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"http_client",
|
||||
"indoc",
|
||||
"language",
|
||||
"log",
|
||||
"lsp",
|
||||
@@ -14100,7 +14096,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "scap"
|
||||
version = "0.0.8"
|
||||
source = "git+https://github.com/zed-industries/scap?rev=28dd306ff2e3374404936dec778fc1e975b8dd12#28dd306ff2e3374404936dec778fc1e975b8dd12"
|
||||
source = "git+https://github.com/zed-industries/scap?rev=08f0a01417505cc0990b9931a37e5120db92e0d0#08f0a01417505cc0990b9931a37e5120db92e0d0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"cocoa 0.25.0",
|
||||
@@ -19696,9 +19692,7 @@ dependencies = [
|
||||
"wasmtime-cranelift",
|
||||
"wasmtime-environ",
|
||||
"winapi",
|
||||
"windows 0.61.1",
|
||||
"windows-core 0.61.0",
|
||||
"windows-future",
|
||||
"windows-numerics",
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.52.0",
|
||||
|
||||
@@ -404,7 +404,7 @@ zlog_settings = { path = "crates/zlog_settings" }
|
||||
# External crates
|
||||
#
|
||||
|
||||
agentic-coding-protocol = "0.0.7"
|
||||
agentic-coding-protocol = "0.0.6"
|
||||
aho-corasick = "1.1"
|
||||
alacritty_terminal = { git = "https://github.com/zed-industries/alacritty.git", branch = "add-hush-login-flag" }
|
||||
any_vec = "0.14"
|
||||
@@ -546,7 +546,7 @@ rustc-demangle = "0.1.23"
|
||||
rustc-hash = "2.1.0"
|
||||
rustls = { version = "0.23.26" }
|
||||
rustls-platform-verifier = "0.5.0"
|
||||
scap = { git = "https://github.com/zed-industries/scap", rev = "28dd306ff2e3374404936dec778fc1e975b8dd12", default-features = false }
|
||||
scap = { git = "https://github.com/zed-industries/scap", rev = "08f0a01417505cc0990b9931a37e5120db92e0d0", default-features = false }
|
||||
schemars = { version = "1.0", features = ["indexmap2"] }
|
||||
semver = "1.0"
|
||||
serde = { version = "1.0", features = ["derive", "rc"] }
|
||||
|
||||
@@ -320,8 +320,7 @@
|
||||
"bindings": {
|
||||
"enter": "agent::Chat",
|
||||
"up": "agent::PreviousHistoryMessage",
|
||||
"down": "agent::NextHistoryMessage",
|
||||
"shift-ctrl-r": "agent::OpenAgentDiff"
|
||||
"down": "agent::NextHistoryMessage"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -1113,10 +1112,7 @@
|
||||
"context": "KeymapEditor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-f": "search::FocusSearch",
|
||||
"alt-find": "keymap_editor::ToggleKeystrokeSearch",
|
||||
"alt-ctrl-f": "keymap_editor::ToggleKeystrokeSearch",
|
||||
"alt-c": "keymap_editor::ToggleConflictFilter"
|
||||
"ctrl-f": "search::FocusSearch"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -371,8 +371,7 @@
|
||||
"bindings": {
|
||||
"enter": "agent::Chat",
|
||||
"up": "agent::PreviousHistoryMessage",
|
||||
"down": "agent::NextHistoryMessage",
|
||||
"shift-ctrl-r": "agent::OpenAgentDiff"
|
||||
"down": "agent::NextHistoryMessage"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -1212,8 +1211,7 @@
|
||||
"context": "KeymapEditor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-alt-f": "keymap_editor::ToggleKeystrokeSearch",
|
||||
"cmd-alt-c": "keymap_editor::ToggleConflictFilter"
|
||||
"cmd-f": "search::FocusSearch"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1157,14 +1157,16 @@
|
||||
// Control whether the git blame information is shown inline,
|
||||
// in the currently focused line.
|
||||
"inline_blame": {
|
||||
"enabled": true,
|
||||
"enabled": true
|
||||
// Sets a delay after which the inline blame information is shown.
|
||||
// Delay is restarted with every cursor movement.
|
||||
"delay_ms": 0,
|
||||
// "delay_ms": 600
|
||||
//
|
||||
// Whether or not to display the git commit summary on the same line.
|
||||
"show_commit_summary": false,
|
||||
// "show_commit_summary": false
|
||||
//
|
||||
// The minimum column number to show the inline blame information at
|
||||
"min_column": 0
|
||||
// "min_column": 0
|
||||
},
|
||||
// How git hunks are displayed visually in the editor.
|
||||
// This setting can take two values:
|
||||
@@ -1377,11 +1379,11 @@
|
||||
// This will be merged with the platform's default font fallbacks
|
||||
// "font_fallbacks": ["FiraCode Nerd Fonts"],
|
||||
// The weight of the editor font in standard CSS units from 100 to 900.
|
||||
"font_weight": 400,
|
||||
// "font_weight": 400
|
||||
// Sets the maximum number of lines in the terminal's scrollback buffer.
|
||||
// Default: 10_000, maximum: 100_000 (all bigger values set will be treated as 100_000), 0 disables the scrolling.
|
||||
// Existing terminals will not pick up this change until they are recreated.
|
||||
"max_scroll_history_lines": 10000,
|
||||
// "max_scroll_history_lines": 10000,
|
||||
// The minimum APCA perceptual contrast between foreground and background colors.
|
||||
// APCA (Accessible Perceptual Contrast Algorithm) is more accurate than WCAG 2.x,
|
||||
// especially for dark mode. Values range from 0 to 106.
|
||||
|
||||
@@ -20,7 +20,6 @@ gemini = []
|
||||
agent_servers.workspace = true
|
||||
agentic-coding-protocol.workspace = true
|
||||
anyhow.workspace = true
|
||||
assistant_tool.workspace = true
|
||||
buffer_diff.workspace = true
|
||||
editor.workspace = true
|
||||
futures.workspace = true
|
||||
|
||||
@@ -2,19 +2,14 @@ pub use acp::ToolCallId;
|
||||
use agent_servers::AgentServer;
|
||||
use agentic_coding_protocol::{self as acp, UserMessageChunk};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::ActionLog;
|
||||
use buffer_diff::BufferDiff;
|
||||
use editor::{MultiBuffer, PathKey};
|
||||
use futures::{FutureExt, channel::oneshot, future::BoxFuture};
|
||||
use gpui::{AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Task, WeakEntity};
|
||||
use itertools::Itertools;
|
||||
use language::{
|
||||
Anchor, Buffer, BufferSnapshot, Capability, LanguageRegistry, OffsetRangeExt as _, Point,
|
||||
text_diff,
|
||||
};
|
||||
use language::{Anchor, Buffer, Capability, LanguageRegistry, OffsetRangeExt as _};
|
||||
use markdown::Markdown;
|
||||
use project::{AgentLocation, Project};
|
||||
use std::collections::HashMap;
|
||||
use project::Project;
|
||||
use std::error::Error;
|
||||
use std::fmt::{Formatter, Write};
|
||||
use std::{
|
||||
@@ -164,26 +159,6 @@ impl AgentThreadEntry {
|
||||
Self::ToolCall(too_call) => too_call.to_markdown(cx),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn diff(&self) -> Option<&Diff> {
|
||||
if let AgentThreadEntry::ToolCall(ToolCall {
|
||||
content: Some(ToolCallContent::Diff { diff }),
|
||||
..
|
||||
}) = self
|
||||
{
|
||||
Some(&diff)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn locations(&self) -> Option<&[acp::ToolCallLocation]> {
|
||||
if let AgentThreadEntry::ToolCall(ToolCall { locations, .. }) = self {
|
||||
Some(locations)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -193,7 +168,6 @@ pub struct ToolCall {
|
||||
pub icon: IconName,
|
||||
pub content: Option<ToolCallContent>,
|
||||
pub status: ToolCallStatus,
|
||||
pub locations: Vec<acp::ToolCallLocation>,
|
||||
}
|
||||
|
||||
impl ToolCall {
|
||||
@@ -354,8 +328,6 @@ impl ToolCallContent {
|
||||
pub struct Diff {
|
||||
pub multibuffer: Entity<MultiBuffer>,
|
||||
pub path: PathBuf,
|
||||
pub new_buffer: Entity<Buffer>,
|
||||
pub old_buffer: Entity<Buffer>,
|
||||
_task: Task<Result<()>>,
|
||||
}
|
||||
|
||||
@@ -390,7 +362,6 @@ impl Diff {
|
||||
let task = cx.spawn({
|
||||
let multibuffer = multibuffer.clone();
|
||||
let path = path.clone();
|
||||
let new_buffer = new_buffer.clone();
|
||||
async move |cx| {
|
||||
diff_task.await?;
|
||||
|
||||
@@ -430,8 +401,6 @@ impl Diff {
|
||||
Self {
|
||||
multibuffer,
|
||||
path,
|
||||
new_buffer,
|
||||
old_buffer,
|
||||
_task: task,
|
||||
}
|
||||
}
|
||||
@@ -452,8 +421,6 @@ pub struct AcpThread {
|
||||
entries: Vec<AgentThreadEntry>,
|
||||
title: SharedString,
|
||||
project: Entity<Project>,
|
||||
action_log: Entity<ActionLog>,
|
||||
shared_buffers: HashMap<Entity<Buffer>, BufferSnapshot>,
|
||||
send_task: Option<Task<()>>,
|
||||
connection: Arc<acp::AgentConnection>,
|
||||
child_status: Option<Task<Result<()>>>,
|
||||
@@ -555,11 +522,7 @@ impl AcpThread {
|
||||
}
|
||||
});
|
||||
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
|
||||
Self {
|
||||
action_log,
|
||||
shared_buffers: Default::default(),
|
||||
entries: Default::default(),
|
||||
title: "ACP Thread".into(),
|
||||
project,
|
||||
@@ -571,14 +534,6 @@ impl AcpThread {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn action_log(&self) -> &Entity<ActionLog> {
|
||||
&self.action_log
|
||||
}
|
||||
|
||||
pub fn project(&self) -> &Entity<Project> {
|
||||
&self.project
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn fake(
|
||||
stdin: async_pipe::PipeWriter,
|
||||
@@ -603,11 +558,7 @@ impl AcpThread {
|
||||
}
|
||||
});
|
||||
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
|
||||
Self {
|
||||
action_log,
|
||||
shared_buffers: Default::default(),
|
||||
entries: Default::default(),
|
||||
title: "ACP Thread".into(),
|
||||
project,
|
||||
@@ -638,26 +589,6 @@ impl AcpThread {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_pending_edit_tool_calls(&self) -> bool {
|
||||
for entry in self.entries.iter().rev() {
|
||||
match entry {
|
||||
AgentThreadEntry::UserMessage(_) => return false,
|
||||
AgentThreadEntry::ToolCall(ToolCall {
|
||||
status:
|
||||
ToolCallStatus::Allowed {
|
||||
status: acp::ToolCallStatus::Running,
|
||||
..
|
||||
},
|
||||
content: Some(ToolCallContent::Diff { .. }),
|
||||
..
|
||||
}) => return true,
|
||||
AgentThreadEntry::ToolCall(_) | AgentThreadEntry::AssistantMessage(_) => {}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
pub fn push_entry(&mut self, entry: AgentThreadEntry, cx: &mut Context<Self>) {
|
||||
self.entries.push(entry);
|
||||
cx.emit(AcpThreadEvent::NewEntry);
|
||||
@@ -713,63 +644,65 @@ impl AcpThread {
|
||||
|
||||
pub fn request_tool_call(
|
||||
&mut self,
|
||||
tool_call: acp::RequestToolCallConfirmationParams,
|
||||
label: String,
|
||||
icon: acp::Icon,
|
||||
content: Option<acp::ToolCallContent>,
|
||||
confirmation: acp::ToolCallConfirmation,
|
||||
cx: &mut Context<Self>,
|
||||
) -> ToolCallRequest {
|
||||
let (tx, rx) = oneshot::channel();
|
||||
|
||||
let status = ToolCallStatus::WaitingForConfirmation {
|
||||
confirmation: ToolCallConfirmation::from_acp(
|
||||
tool_call.confirmation,
|
||||
confirmation,
|
||||
self.project.read(cx).languages().clone(),
|
||||
cx,
|
||||
),
|
||||
respond_tx: tx,
|
||||
};
|
||||
|
||||
let id = self.insert_tool_call(tool_call.tool_call, status, cx);
|
||||
let id = self.insert_tool_call(label, status, icon, content, cx);
|
||||
ToolCallRequest { id, outcome: rx }
|
||||
}
|
||||
|
||||
pub fn push_tool_call(
|
||||
&mut self,
|
||||
request: acp::PushToolCallParams,
|
||||
label: String,
|
||||
icon: acp::Icon,
|
||||
content: Option<acp::ToolCallContent>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> acp::ToolCallId {
|
||||
let status = ToolCallStatus::Allowed {
|
||||
status: acp::ToolCallStatus::Running,
|
||||
};
|
||||
|
||||
self.insert_tool_call(request, status, cx)
|
||||
self.insert_tool_call(label, status, icon, content, cx)
|
||||
}
|
||||
|
||||
fn insert_tool_call(
|
||||
&mut self,
|
||||
tool_call: acp::PushToolCallParams,
|
||||
label: String,
|
||||
status: ToolCallStatus,
|
||||
icon: acp::Icon,
|
||||
content: Option<acp::ToolCallContent>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> acp::ToolCallId {
|
||||
let language_registry = self.project.read(cx).languages().clone();
|
||||
let id = acp::ToolCallId(self.entries.len() as u64);
|
||||
let call = ToolCall {
|
||||
id,
|
||||
label: cx.new(|cx| {
|
||||
Markdown::new(
|
||||
tool_call.label.into(),
|
||||
Some(language_registry.clone()),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
icon: acp_icon_to_ui_icon(tool_call.icon),
|
||||
content: tool_call
|
||||
.content
|
||||
.map(|content| ToolCallContent::from_acp(content, language_registry, cx)),
|
||||
locations: tool_call.locations,
|
||||
status,
|
||||
};
|
||||
|
||||
self.push_entry(AgentThreadEntry::ToolCall(call), cx);
|
||||
self.push_entry(
|
||||
AgentThreadEntry::ToolCall(ToolCall {
|
||||
id,
|
||||
label: cx.new(|cx| {
|
||||
Markdown::new(label.into(), Some(language_registry.clone()), None, cx)
|
||||
}),
|
||||
icon: acp_icon_to_ui_icon(icon),
|
||||
content: content
|
||||
.map(|content| ToolCallContent::from_acp(content, language_registry, cx)),
|
||||
status,
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
|
||||
id
|
||||
}
|
||||
@@ -871,16 +804,14 @@ impl AcpThread {
|
||||
false
|
||||
}
|
||||
|
||||
pub fn initialize(
|
||||
&self,
|
||||
) -> impl use<> + Future<Output = Result<acp::InitializeResponse, acp::Error>> {
|
||||
pub fn initialize(&self) -> impl use<> + Future<Output = Result<acp::InitializeResponse>> {
|
||||
let connection = self.connection.clone();
|
||||
async move { connection.request(acp::InitializeParams).await }
|
||||
async move { Ok(connection.request(acp::InitializeParams).await?) }
|
||||
}
|
||||
|
||||
pub fn authenticate(&self) -> impl use<> + Future<Output = Result<(), acp::Error>> {
|
||||
pub fn authenticate(&self) -> impl use<> + Future<Output = Result<()>> {
|
||||
let connection = self.connection.clone();
|
||||
async move { connection.request(acp::AuthenticateParams).await }
|
||||
async move { Ok(connection.request(acp::AuthenticateParams).await?) }
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -888,7 +819,7 @@ impl AcpThread {
|
||||
&mut self,
|
||||
message: &str,
|
||||
cx: &mut Context<Self>,
|
||||
) -> BoxFuture<'static, Result<(), acp::Error>> {
|
||||
) -> BoxFuture<'static, Result<()>> {
|
||||
self.send(
|
||||
acp::SendUserMessageParams {
|
||||
chunks: vec![acp::UserMessageChunk::Text {
|
||||
@@ -903,7 +834,7 @@ impl AcpThread {
|
||||
&mut self,
|
||||
message: acp::SendUserMessageParams,
|
||||
cx: &mut Context<Self>,
|
||||
) -> BoxFuture<'static, Result<(), acp::Error>> {
|
||||
) -> BoxFuture<'static, Result<()>> {
|
||||
let agent = self.connection.clone();
|
||||
self.push_entry(
|
||||
AgentThreadEntry::UserMessage(UserMessage::from_acp(
|
||||
@@ -934,7 +865,7 @@ impl AcpThread {
|
||||
.boxed()
|
||||
}
|
||||
|
||||
pub fn cancel(&mut self, cx: &mut Context<Self>) -> Task<Result<(), acp::Error>> {
|
||||
pub fn cancel(&mut self, cx: &mut Context<Self>) -> Task<Result<()>> {
|
||||
let agent = self.connection.clone();
|
||||
|
||||
if self.send_task.take().is_some() {
|
||||
@@ -967,123 +898,13 @@ impl AcpThread {
|
||||
}
|
||||
}
|
||||
}
|
||||
})?;
|
||||
Ok(())
|
||||
})
|
||||
})
|
||||
} else {
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read_text_file(
|
||||
&self,
|
||||
request: acp::ReadTextFileParams,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<String>> {
|
||||
let project = self.project.clone();
|
||||
let action_log = self.action_log.clone();
|
||||
cx.spawn(async move |this, cx| {
|
||||
let load = project.update(cx, |project, cx| {
|
||||
let path = project
|
||||
.project_path_for_absolute_path(&request.path, cx)
|
||||
.context("invalid path")?;
|
||||
anyhow::Ok(project.open_buffer(path, cx))
|
||||
});
|
||||
let buffer = load??.await?;
|
||||
|
||||
action_log.update(cx, |action_log, cx| {
|
||||
action_log.buffer_read(buffer.clone(), cx);
|
||||
})?;
|
||||
project.update(cx, |project, cx| {
|
||||
let position = buffer
|
||||
.read(cx)
|
||||
.snapshot()
|
||||
.anchor_before(Point::new(request.line.unwrap_or_default(), 0));
|
||||
project.set_agent_location(
|
||||
Some(AgentLocation {
|
||||
buffer: buffer.downgrade(),
|
||||
position,
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
})?;
|
||||
let snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
|
||||
this.update(cx, |this, _| {
|
||||
let text = snapshot.text();
|
||||
this.shared_buffers.insert(buffer.clone(), snapshot);
|
||||
text
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn write_text_file(
|
||||
&self,
|
||||
path: PathBuf,
|
||||
content: String,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let project = self.project.clone();
|
||||
let action_log = self.action_log.clone();
|
||||
cx.spawn(async move |this, cx| {
|
||||
let load = project.update(cx, |project, cx| {
|
||||
let path = project
|
||||
.project_path_for_absolute_path(&path, cx)
|
||||
.context("invalid path")?;
|
||||
anyhow::Ok(project.open_buffer(path, cx))
|
||||
});
|
||||
let buffer = load??.await?;
|
||||
let snapshot = this.update(cx, |this, cx| {
|
||||
this.shared_buffers
|
||||
.get(&buffer)
|
||||
.cloned()
|
||||
.unwrap_or_else(|| buffer.read(cx).snapshot())
|
||||
})?;
|
||||
let edits = cx
|
||||
.background_executor()
|
||||
.spawn(async move {
|
||||
let old_text = snapshot.text();
|
||||
text_diff(old_text.as_str(), &content)
|
||||
.into_iter()
|
||||
.map(|(range, replacement)| {
|
||||
(
|
||||
snapshot.anchor_after(range.start)
|
||||
..snapshot.anchor_before(range.end),
|
||||
replacement,
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.await;
|
||||
cx.update(|cx| {
|
||||
project.update(cx, |project, cx| {
|
||||
project.set_agent_location(
|
||||
Some(AgentLocation {
|
||||
buffer: buffer.downgrade(),
|
||||
position: edits
|
||||
.last()
|
||||
.map(|(range, _)| range.end)
|
||||
.unwrap_or(Anchor::MIN),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
action_log.update(cx, |action_log, cx| {
|
||||
action_log.buffer_read(buffer.clone(), cx);
|
||||
});
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit(edits, None, cx);
|
||||
});
|
||||
action_log.update(cx, |action_log, cx| {
|
||||
action_log.buffer_edited(buffer.clone(), cx);
|
||||
});
|
||||
})?;
|
||||
project
|
||||
.update(cx, |project, cx| project.save_buffer(buffer, cx))?
|
||||
.await
|
||||
})
|
||||
}
|
||||
|
||||
pub fn child_status(&mut self) -> Option<Task<Result<()>>> {
|
||||
self.child_status.take()
|
||||
}
|
||||
@@ -1109,7 +930,7 @@ impl acp::Client for AcpClientDelegate {
|
||||
async fn stream_assistant_message_chunk(
|
||||
&self,
|
||||
params: acp::StreamAssistantMessageChunkParams,
|
||||
) -> Result<(), acp::Error> {
|
||||
) -> Result<()> {
|
||||
let cx = &mut self.cx.clone();
|
||||
|
||||
cx.update(|cx| {
|
||||
@@ -1126,37 +947,45 @@ impl acp::Client for AcpClientDelegate {
|
||||
async fn request_tool_call_confirmation(
|
||||
&self,
|
||||
request: acp::RequestToolCallConfirmationParams,
|
||||
) -> Result<acp::RequestToolCallConfirmationResponse, acp::Error> {
|
||||
) -> Result<acp::RequestToolCallConfirmationResponse> {
|
||||
let cx = &mut self.cx.clone();
|
||||
let ToolCallRequest { id, outcome } = cx
|
||||
.update(|cx| {
|
||||
self.thread
|
||||
.update(cx, |thread, cx| thread.request_tool_call(request, cx))
|
||||
self.thread.update(cx, |thread, cx| {
|
||||
thread.request_tool_call(
|
||||
request.label,
|
||||
request.icon,
|
||||
request.content,
|
||||
request.confirmation,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
})?
|
||||
.context("Failed to update thread")?;
|
||||
|
||||
Ok(acp::RequestToolCallConfirmationResponse {
|
||||
id,
|
||||
outcome: outcome.await.map_err(acp::Error::into_internal_error)?,
|
||||
outcome: outcome.await?,
|
||||
})
|
||||
}
|
||||
|
||||
async fn push_tool_call(
|
||||
&self,
|
||||
request: acp::PushToolCallParams,
|
||||
) -> Result<acp::PushToolCallResponse, acp::Error> {
|
||||
) -> Result<acp::PushToolCallResponse> {
|
||||
let cx = &mut self.cx.clone();
|
||||
let id = cx
|
||||
.update(|cx| {
|
||||
self.thread
|
||||
.update(cx, |thread, cx| thread.push_tool_call(request, cx))
|
||||
self.thread.update(cx, |thread, cx| {
|
||||
thread.push_tool_call(request.label, request.icon, request.content, cx)
|
||||
})
|
||||
})?
|
||||
.context("Failed to update thread")?;
|
||||
|
||||
Ok(acp::PushToolCallResponse { id })
|
||||
}
|
||||
|
||||
async fn update_tool_call(&self, request: acp::UpdateToolCallParams) -> Result<(), acp::Error> {
|
||||
async fn update_tool_call(&self, request: acp::UpdateToolCallParams) -> Result<()> {
|
||||
let cx = &mut self.cx.clone();
|
||||
|
||||
cx.update(|cx| {
|
||||
@@ -1168,34 +997,6 @@ impl acp::Client for AcpClientDelegate {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn read_text_file(
|
||||
&self,
|
||||
request: acp::ReadTextFileParams,
|
||||
) -> Result<acp::ReadTextFileResponse, acp::Error> {
|
||||
let content = self
|
||||
.cx
|
||||
.update(|cx| {
|
||||
self.thread
|
||||
.update(cx, |thread, cx| thread.read_text_file(request, cx))
|
||||
})?
|
||||
.context("Failed to update thread")?
|
||||
.await?;
|
||||
Ok(acp::ReadTextFileResponse { content })
|
||||
}
|
||||
|
||||
async fn write_text_file(&self, request: acp::WriteTextFileParams) -> Result<(), acp::Error> {
|
||||
self.cx
|
||||
.update(|cx| {
|
||||
self.thread.update(cx, |thread, cx| {
|
||||
thread.write_text_file(request.path, request.content, cx)
|
||||
})
|
||||
})?
|
||||
.context("Failed to update thread")?
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn acp_icon_to_ui_icon(icon: acp::Icon) -> IconName {
|
||||
@@ -1299,80 +1100,6 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_edits_concurrently_to_user(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(path!("/tmp"), json!({"foo": "one\ntwo\nthree\n"}))
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [], cx).await;
|
||||
let (thread, fake_server) = fake_acp_thread(project.clone(), cx);
|
||||
let (worktree, pathbuf) = project
|
||||
.update(cx, |project, cx| {
|
||||
project.find_or_create_worktree(path!("/tmp/foo"), true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_buffer((worktree.read(cx).id(), pathbuf), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let (read_file_tx, read_file_rx) = oneshot::channel::<()>();
|
||||
let read_file_tx = Rc::new(RefCell::new(Some(read_file_tx)));
|
||||
|
||||
fake_server.update(cx, |fake_server, _| {
|
||||
fake_server.on_user_message(move |_, server, mut cx| {
|
||||
let read_file_tx = read_file_tx.clone();
|
||||
async move {
|
||||
let content = server
|
||||
.update(&mut cx, |server, _| {
|
||||
server.send_to_zed(acp::ReadTextFileParams {
|
||||
path: path!("/tmp/foo").into(),
|
||||
line: None,
|
||||
limit: None,
|
||||
})
|
||||
})?
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(content.content, "one\ntwo\nthree\n");
|
||||
read_file_tx.take().unwrap().send(()).unwrap();
|
||||
server
|
||||
.update(&mut cx, |server, _| {
|
||||
server.send_to_zed(acp::WriteTextFileParams {
|
||||
path: path!("/tmp/foo").into(),
|
||||
content: "one\ntwo\nthree\nfour\nfive\n".to_string(),
|
||||
})
|
||||
})?
|
||||
.await
|
||||
.unwrap();
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
let request = thread.update(cx, |thread, cx| {
|
||||
thread.send_raw("Extend the count in /tmp/foo", cx)
|
||||
});
|
||||
read_file_rx.await.ok();
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.edit([(0..0, "zero\n".to_string())], None, cx);
|
||||
});
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
"zero\none\ntwo\nthree\nfour\nfive\n"
|
||||
);
|
||||
assert_eq!(
|
||||
String::from_utf8(fs.read_file_sync(path!("/tmp/foo")).unwrap()).unwrap(),
|
||||
"zero\none\ntwo\nthree\nfour\nfive\n"
|
||||
);
|
||||
request.await.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_succeeding_canceled_toolcall(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
@@ -1397,7 +1124,6 @@ mod tests {
|
||||
label: "Fetch".to_string(),
|
||||
icon: acp::Icon::Globe,
|
||||
content: None,
|
||||
locations: vec![],
|
||||
})
|
||||
})?
|
||||
.await
|
||||
@@ -1827,7 +1553,7 @@ mod tests {
|
||||
acp::SendUserMessageParams,
|
||||
Entity<FakeAcpServer>,
|
||||
AsyncApp,
|
||||
) -> LocalBoxFuture<'static, Result<(), acp::Error>>,
|
||||
) -> LocalBoxFuture<'static, Result<()>>,
|
||||
>,
|
||||
>,
|
||||
}
|
||||
@@ -1839,24 +1565,21 @@ mod tests {
|
||||
}
|
||||
|
||||
impl acp::Agent for FakeAgent {
|
||||
async fn initialize(&self) -> Result<acp::InitializeResponse, acp::Error> {
|
||||
async fn initialize(&self) -> Result<acp::InitializeResponse> {
|
||||
Ok(acp::InitializeResponse {
|
||||
is_authenticated: true,
|
||||
})
|
||||
}
|
||||
|
||||
async fn authenticate(&self) -> Result<(), acp::Error> {
|
||||
async fn authenticate(&self) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn cancel_send_message(&self) -> Result<(), acp::Error> {
|
||||
async fn cancel_send_message(&self) -> Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn send_user_message(
|
||||
&self,
|
||||
request: acp::SendUserMessageParams,
|
||||
) -> Result<(), acp::Error> {
|
||||
async fn send_user_message(&self, request: acp::SendUserMessageParams) -> Result<()> {
|
||||
let mut cx = self.cx.clone();
|
||||
let handler = self
|
||||
.server
|
||||
@@ -1866,7 +1589,7 @@ mod tests {
|
||||
if let Some(handler) = handler {
|
||||
handler(request, self.server.clone(), self.cx.clone()).await
|
||||
} else {
|
||||
Err(anyhow::anyhow!("No handler for on_user_message").into())
|
||||
anyhow::bail!("No handler for on_user_message")
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1901,7 +1624,7 @@ mod tests {
|
||||
handler: impl for<'a> Fn(acp::SendUserMessageParams, Entity<FakeAcpServer>, AsyncApp) -> F
|
||||
+ 'static,
|
||||
) where
|
||||
F: Future<Output = Result<(), acp::Error>> + 'static,
|
||||
F: Future<Output = Result<()>> + 'static,
|
||||
{
|
||||
self.on_user_message
|
||||
.replace(Rc::new(move |request, server, cx| {
|
||||
|
||||
@@ -2,5 +2,4 @@ mod completion_provider;
|
||||
mod message_history;
|
||||
mod thread_view;
|
||||
|
||||
pub use message_history::MessageHistory;
|
||||
pub use thread_view::AcpThreadView;
|
||||
|
||||
@@ -3,25 +3,19 @@ pub struct MessageHistory<T> {
|
||||
current: Option<usize>,
|
||||
}
|
||||
|
||||
impl<T> Default for MessageHistory<T> {
|
||||
fn default() -> Self {
|
||||
impl<T> MessageHistory<T> {
|
||||
pub fn new() -> Self {
|
||||
MessageHistory {
|
||||
items: Vec::new(),
|
||||
current: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T> MessageHistory<T> {
|
||||
pub fn push(&mut self, message: T) {
|
||||
self.current.take();
|
||||
self.items.push(message);
|
||||
}
|
||||
|
||||
pub fn reset_position(&mut self) {
|
||||
self.current.take();
|
||||
}
|
||||
|
||||
pub fn prev(&mut self) -> Option<&T> {
|
||||
if self.items.is_empty() {
|
||||
return None;
|
||||
@@ -52,7 +46,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_prev_next() {
|
||||
let mut history = MessageHistory::default();
|
||||
let mut history = MessageHistory::new();
|
||||
|
||||
// Test empty history
|
||||
assert_eq!(history.prev(), None);
|
||||
|
||||
@@ -1,38 +1,33 @@
|
||||
use std::cell::RefCell;
|
||||
use std::collections::BTreeMap;
|
||||
use std::path::Path;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use agentic_coding_protocol::{self as acp};
|
||||
use assistant_tool::ActionLog;
|
||||
use buffer_diff::BufferDiff;
|
||||
use collections::{HashMap, HashSet};
|
||||
use editor::{
|
||||
AnchorRangeExt, ContextMenuOptions, ContextMenuPlacement, Editor, EditorElement, EditorMode,
|
||||
EditorStyle, MinimapVisibility, MultiBuffer, PathKey,
|
||||
EditorStyle, MinimapVisibility, MultiBuffer,
|
||||
};
|
||||
use file_icons::FileIcons;
|
||||
use futures::channel::oneshot;
|
||||
use gpui::{
|
||||
Action, Animation, AnimationExt, App, BorderStyle, EdgesRefinement, Empty, Entity, EntityId,
|
||||
FocusHandle, Focusable, Hsla, Length, ListOffset, ListState, SharedString, StyleRefinement,
|
||||
Subscription, Task, TextStyle, TextStyleRefinement, Transformation, UnderlineStyle, WeakEntity,
|
||||
Window, div, linear_color_stop, linear_gradient, list, percentage, point, prelude::*,
|
||||
pulsating_between,
|
||||
Animation, AnimationExt, App, BorderStyle, EdgesRefinement, Empty, Entity, EntityId, Focusable,
|
||||
Hsla, Length, ListOffset, ListState, SharedString, StyleRefinement, Subscription, TextStyle,
|
||||
TextStyleRefinement, Transformation, UnderlineStyle, WeakEntity, Window, div, list, percentage,
|
||||
prelude::*, pulsating_between,
|
||||
};
|
||||
use gpui::{FocusHandle, Task};
|
||||
use language::language_settings::SoftWrap;
|
||||
use language::{Buffer, Language};
|
||||
use markdown::{HeadingLevelStyles, Markdown, MarkdownElement, MarkdownStyle};
|
||||
use parking_lot::Mutex;
|
||||
use project::Project;
|
||||
use settings::Settings as _;
|
||||
use text::Anchor;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{Disclosure, Divider, DividerColor, KeyBinding, Tooltip, prelude::*};
|
||||
use ui::{Disclosure, Tooltip, prelude::*};
|
||||
use util::ResultExt;
|
||||
use workspace::{CollaboratorId, Workspace};
|
||||
use workspace::Workspace;
|
||||
use zed_actions::agent::{Chat, NextHistoryMessage, PreviousHistoryMessage};
|
||||
|
||||
use ::acp::{
|
||||
@@ -43,8 +38,6 @@ use ::acp::{
|
||||
|
||||
use crate::acp::completion_provider::{ContextPickerCompletionProvider, MentionSet};
|
||||
use crate::acp::message_history::MessageHistory;
|
||||
use crate::agent_diff::AgentDiff;
|
||||
use crate::{AgentDiffPane, Follow, KeepAll, OpenAgentDiff, RejectAll};
|
||||
|
||||
const RESPONSE_PADDING_X: Pixels = px(19.);
|
||||
|
||||
@@ -54,16 +47,13 @@ pub struct AcpThreadView {
|
||||
thread_state: ThreadState,
|
||||
diff_editors: HashMap<EntityId, Entity<Editor>>,
|
||||
message_editor: Entity<Editor>,
|
||||
message_set_from_history: bool,
|
||||
_message_editor_subscription: Subscription,
|
||||
mention_set: Arc<Mutex<MentionSet>>,
|
||||
last_error: Option<Entity<Markdown>>,
|
||||
list_state: ListState,
|
||||
auth_task: Option<Task<()>>,
|
||||
expanded_tool_calls: HashSet<ToolCallId>,
|
||||
expanded_thinking_blocks: HashSet<(usize, usize)>,
|
||||
edits_expanded: bool,
|
||||
message_history: Rc<RefCell<MessageHistory<acp::SendUserMessageParams>>>,
|
||||
message_history: MessageHistory<acp::SendUserMessageParams>,
|
||||
}
|
||||
|
||||
enum ThreadState {
|
||||
@@ -72,7 +62,7 @@ enum ThreadState {
|
||||
},
|
||||
Ready {
|
||||
thread: Entity<AcpThread>,
|
||||
_subscription: [Subscription; 2],
|
||||
_subscription: Subscription,
|
||||
},
|
||||
LoadError(LoadError),
|
||||
Unauthenticated {
|
||||
@@ -84,7 +74,6 @@ impl AcpThreadView {
|
||||
pub fn new(
|
||||
workspace: WeakEntity<Workspace>,
|
||||
project: Entity<Project>,
|
||||
message_history: Rc<RefCell<MessageHistory<acp::SendUserMessageParams>>>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
@@ -129,17 +118,6 @@ impl AcpThreadView {
|
||||
editor
|
||||
});
|
||||
|
||||
let message_editor_subscription = cx.subscribe(&message_editor, |this, _, event, _| {
|
||||
if let editor::EditorEvent::BufferEdited = &event {
|
||||
if !this.message_set_from_history {
|
||||
this.message_history.borrow_mut().reset_position();
|
||||
}
|
||||
this.message_set_from_history = false;
|
||||
}
|
||||
});
|
||||
|
||||
let mention_set = mention_set.clone();
|
||||
|
||||
let list_state = ListState::new(
|
||||
0,
|
||||
gpui::ListAlignment::Bottom,
|
||||
@@ -158,12 +136,10 @@ impl AcpThreadView {
|
||||
);
|
||||
|
||||
Self {
|
||||
workspace: workspace.clone(),
|
||||
workspace,
|
||||
project: project.clone(),
|
||||
thread_state: Self::initial_state(workspace, project, window, cx),
|
||||
thread_state: Self::initial_state(project, window, cx),
|
||||
message_editor,
|
||||
message_set_from_history: false,
|
||||
_message_editor_subscription: message_editor_subscription,
|
||||
mention_set,
|
||||
diff_editors: Default::default(),
|
||||
list_state: list_state,
|
||||
@@ -171,13 +147,11 @@ impl AcpThreadView {
|
||||
auth_task: None,
|
||||
expanded_tool_calls: HashSet::default(),
|
||||
expanded_thinking_blocks: HashSet::default(),
|
||||
edits_expanded: false,
|
||||
message_history,
|
||||
message_history: MessageHistory::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn initial_state(
|
||||
workspace: WeakEntity<Workspace>,
|
||||
project: Entity<Project>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
@@ -245,23 +219,15 @@ impl AcpThreadView {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
match result {
|
||||
Ok(()) => {
|
||||
let thread_subscription =
|
||||
let subscription =
|
||||
cx.subscribe_in(&thread, window, Self::handle_thread_event);
|
||||
|
||||
let action_log = thread.read(cx).action_log().clone();
|
||||
let action_log_subscription =
|
||||
cx.observe(&action_log, |_, _, cx| cx.notify());
|
||||
|
||||
this.list_state
|
||||
.splice(0..0, thread.read(cx).entries().len());
|
||||
|
||||
AgentDiff::set_active_thread(&workspace, thread.clone(), window, cx);
|
||||
|
||||
this.thread_state = ThreadState::Ready {
|
||||
thread,
|
||||
_subscription: [thread_subscription, action_log_subscription],
|
||||
_subscription: subscription,
|
||||
};
|
||||
|
||||
cx.notify();
|
||||
}
|
||||
Err(err) => {
|
||||
@@ -284,7 +250,7 @@ impl AcpThreadView {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn thread(&self) -> Option<&Entity<AcpThread>> {
|
||||
fn thread(&self) -> Option<&Entity<AcpThread>> {
|
||||
match &self.thread_state {
|
||||
ThreadState::Ready { thread, .. } | ThreadState::Unauthenticated { thread } => {
|
||||
Some(thread)
|
||||
@@ -315,6 +281,7 @@ impl AcpThreadView {
|
||||
|
||||
let mut ix = 0;
|
||||
let mut chunks: Vec<acp::UserMessageChunk> = Vec::new();
|
||||
|
||||
let project = self.project.clone();
|
||||
self.message_editor.update(cx, |editor, cx| {
|
||||
let text = editor.text(cx);
|
||||
@@ -375,7 +342,7 @@ impl AcpThreadView {
|
||||
editor.remove_creases(mention_set.lock().drain(), cx)
|
||||
});
|
||||
|
||||
self.message_history.borrow_mut().push(message);
|
||||
self.message_history.push(message);
|
||||
}
|
||||
|
||||
fn previous_history_message(
|
||||
@@ -384,11 +351,11 @@ impl AcpThreadView {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.message_set_from_history = Self::set_draft_message(
|
||||
Self::set_draft_message(
|
||||
self.message_editor.clone(),
|
||||
self.mention_set.clone(),
|
||||
self.project.clone(),
|
||||
self.message_history.borrow_mut().prev(),
|
||||
self.message_history.prev(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
@@ -400,11 +367,11 @@ impl AcpThreadView {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.message_set_from_history = Self::set_draft_message(
|
||||
Self::set_draft_message(
|
||||
self.message_editor.clone(),
|
||||
self.mention_set.clone(),
|
||||
self.project.clone(),
|
||||
self.message_history.borrow_mut().next(),
|
||||
self.message_history.next(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
@@ -417,11 +384,15 @@ impl AcpThreadView {
|
||||
message: Option<&acp::SendUserMessageParams>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> bool {
|
||||
) {
|
||||
cx.notify();
|
||||
|
||||
let Some(message) = message else {
|
||||
return false;
|
||||
message_editor.update(cx, |editor, cx| {
|
||||
editor.clear(window, cx);
|
||||
editor.remove_creases(mention_set.lock().drain(), cx)
|
||||
});
|
||||
return;
|
||||
};
|
||||
|
||||
let mut text = String::new();
|
||||
@@ -481,35 +452,6 @@ impl AcpThreadView {
|
||||
mention_set.lock().insert(crease_id, project_path);
|
||||
}
|
||||
}
|
||||
|
||||
true
|
||||
}
|
||||
|
||||
fn open_agent_diff(&mut self, _: &OpenAgentDiff, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if let Some(thread) = self.thread() {
|
||||
AgentDiffPane::deploy(thread.clone(), self.workspace.clone(), window, cx).log_err();
|
||||
}
|
||||
}
|
||||
|
||||
fn open_edited_buffer(
|
||||
&mut self,
|
||||
buffer: &Entity<Buffer>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let Some(thread) = self.thread() else {
|
||||
return;
|
||||
};
|
||||
|
||||
let Some(diff) =
|
||||
AgentDiffPane::deploy(thread.clone(), self.workspace.clone(), window, cx).log_err()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
diff.update(cx, |diff, cx| {
|
||||
diff.move_to_path(PathKey::for_buffer(&buffer, cx), window, cx)
|
||||
})
|
||||
}
|
||||
|
||||
fn handle_thread_event(
|
||||
@@ -522,8 +464,7 @@ impl AcpThreadView {
|
||||
let count = self.list_state.item_count();
|
||||
match event {
|
||||
AcpThreadEvent::NewEntry => {
|
||||
let index = thread.read(cx).entries().len() - 1;
|
||||
self.sync_thread_entry_view(index, window, cx);
|
||||
self.sync_thread_entry_view(thread.read(cx).entries().len() - 1, window, cx);
|
||||
self.list_state.splice(count..count, 1);
|
||||
}
|
||||
AcpThreadEvent::EntryUpdated(index) => {
|
||||
@@ -596,7 +537,15 @@ impl AcpThreadView {
|
||||
|
||||
fn entry_diff_multibuffer(&self, entry_ix: usize, cx: &App) -> Option<Entity<MultiBuffer>> {
|
||||
let entry = self.thread()?.read(cx).entries().get(entry_ix)?;
|
||||
entry.diff().map(|diff| diff.multibuffer.clone())
|
||||
if let AgentThreadEntry::ToolCall(ToolCall {
|
||||
content: Some(ToolCallContent::Diff { diff }),
|
||||
..
|
||||
}) = &entry
|
||||
{
|
||||
Some(diff.multibuffer.clone())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn authenticate(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
@@ -617,8 +566,7 @@ impl AcpThreadView {
|
||||
Markdown::new(format!("Error: {err}").into(), None, None, cx)
|
||||
}))
|
||||
} else {
|
||||
this.thread_state =
|
||||
Self::initial_state(this.workspace.clone(), project.clone(), window, cx)
|
||||
this.thread_state = Self::initial_state(project.clone(), window, cx)
|
||||
}
|
||||
this.auth_task.take()
|
||||
})
|
||||
@@ -925,43 +873,10 @@ impl AcpThreadView {
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(if tool_call.locations.len() == 1 {
|
||||
let name = tool_call.locations[0]
|
||||
.path
|
||||
.file_name()
|
||||
.unwrap_or_default()
|
||||
.display()
|
||||
.to_string();
|
||||
|
||||
h_flex()
|
||||
.id(("open-tool-call-location", entry_ix))
|
||||
.child(name)
|
||||
.w_full()
|
||||
.max_w_full()
|
||||
.pr_1()
|
||||
.gap_0p5()
|
||||
.cursor_pointer()
|
||||
.rounded_sm()
|
||||
.opacity(0.8)
|
||||
.hover(|label| {
|
||||
label.opacity(1.).bg(cx
|
||||
.theme()
|
||||
.colors()
|
||||
.element_hover
|
||||
.opacity(0.5))
|
||||
})
|
||||
.tooltip(Tooltip::text("Jump to File"))
|
||||
.on_click(cx.listener(move |this, _, window, cx| {
|
||||
this.open_tool_call_location(entry_ix, 0, window, cx);
|
||||
}))
|
||||
.into_any_element()
|
||||
} else {
|
||||
self.render_markdown(
|
||||
tool_call.label.clone(),
|
||||
default_markdown_style(needs_confirmation, window, cx),
|
||||
)
|
||||
.into_any()
|
||||
}),
|
||||
.child(self.render_markdown(
|
||||
tool_call.label.clone(),
|
||||
default_markdown_style(needs_confirmation, window, cx),
|
||||
)),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
@@ -1021,19 +936,15 @@ impl AcpThreadView {
|
||||
cx: &Context<Self>,
|
||||
) -> AnyElement {
|
||||
match content {
|
||||
ToolCallContent::Markdown { markdown } => {
|
||||
div()
|
||||
.p_2()
|
||||
.child(self.render_markdown(
|
||||
markdown.clone(),
|
||||
default_markdown_style(false, window, cx),
|
||||
))
|
||||
.into_any_element()
|
||||
}
|
||||
ToolCallContent::Markdown { markdown } => self
|
||||
.render_markdown(markdown.clone(), default_markdown_style(false, window, cx))
|
||||
.into_any_element(),
|
||||
ToolCallContent::Diff {
|
||||
diff: Diff { multibuffer, .. },
|
||||
diff: Diff {
|
||||
path, multibuffer, ..
|
||||
},
|
||||
..
|
||||
} => self.render_diff_editor(multibuffer),
|
||||
} => self.render_diff_editor(multibuffer, path),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1453,9 +1364,10 @@ impl AcpThreadView {
|
||||
}
|
||||
}
|
||||
|
||||
fn render_diff_editor(&self, multibuffer: &Entity<MultiBuffer>) -> AnyElement {
|
||||
fn render_diff_editor(&self, multibuffer: &Entity<MultiBuffer>, path: &Path) -> AnyElement {
|
||||
v_flex()
|
||||
.h_full()
|
||||
.child(path.to_string_lossy().to_string())
|
||||
.child(
|
||||
if let Some(editor) = self.diff_editors.get(&multibuffer.entity_id()) {
|
||||
editor.clone().into_any_element()
|
||||
@@ -1617,357 +1529,6 @@ impl AcpThreadView {
|
||||
container.into_any()
|
||||
}
|
||||
|
||||
fn render_edits_bar(
|
||||
&self,
|
||||
thread_entity: &Entity<AcpThread>,
|
||||
window: &mut Window,
|
||||
cx: &Context<Self>,
|
||||
) -> Option<AnyElement> {
|
||||
let thread = thread_entity.read(cx);
|
||||
let action_log = thread.action_log();
|
||||
let changed_buffers = action_log.read(cx).changed_buffers(cx);
|
||||
|
||||
if changed_buffers.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let editor_bg_color = cx.theme().colors().editor_background;
|
||||
let active_color = cx.theme().colors().element_selected;
|
||||
let bg_edit_files_disclosure = editor_bg_color.blend(active_color.opacity(0.3));
|
||||
|
||||
let pending_edits = thread.has_pending_edit_tool_calls();
|
||||
let expanded = self.edits_expanded;
|
||||
|
||||
v_flex()
|
||||
.mt_1()
|
||||
.mx_2()
|
||||
.bg(bg_edit_files_disclosure)
|
||||
.border_1()
|
||||
.border_b_0()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.rounded_t_md()
|
||||
.shadow(vec![gpui::BoxShadow {
|
||||
color: gpui::black().opacity(0.15),
|
||||
offset: point(px(1.), px(-1.)),
|
||||
blur_radius: px(3.),
|
||||
spread_radius: px(0.),
|
||||
}])
|
||||
.child(self.render_edits_bar_summary(
|
||||
action_log,
|
||||
&changed_buffers,
|
||||
expanded,
|
||||
pending_edits,
|
||||
window,
|
||||
cx,
|
||||
))
|
||||
.when(expanded, |parent| {
|
||||
parent.child(self.render_edits_bar_files(
|
||||
action_log,
|
||||
&changed_buffers,
|
||||
pending_edits,
|
||||
cx,
|
||||
))
|
||||
})
|
||||
.into_any()
|
||||
.into()
|
||||
}
|
||||
|
||||
fn render_edits_bar_summary(
|
||||
&self,
|
||||
action_log: &Entity<ActionLog>,
|
||||
changed_buffers: &BTreeMap<Entity<Buffer>, Entity<BufferDiff>>,
|
||||
expanded: bool,
|
||||
pending_edits: bool,
|
||||
window: &mut Window,
|
||||
cx: &Context<Self>,
|
||||
) -> Div {
|
||||
const EDIT_NOT_READY_TOOLTIP_LABEL: &str = "Wait until file edits are complete.";
|
||||
|
||||
let focus_handle = self.focus_handle(cx);
|
||||
|
||||
h_flex()
|
||||
.p_1()
|
||||
.justify_between()
|
||||
.when(expanded, |this| {
|
||||
this.border_b_1().border_color(cx.theme().colors().border)
|
||||
})
|
||||
.child(
|
||||
h_flex()
|
||||
.id("edits-container")
|
||||
.cursor_pointer()
|
||||
.w_full()
|
||||
.gap_1()
|
||||
.child(Disclosure::new("edits-disclosure", expanded))
|
||||
.map(|this| {
|
||||
if pending_edits {
|
||||
this.child(
|
||||
Label::new(format!(
|
||||
"Editing {} {}…",
|
||||
changed_buffers.len(),
|
||||
if changed_buffers.len() == 1 {
|
||||
"file"
|
||||
} else {
|
||||
"files"
|
||||
}
|
||||
))
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small)
|
||||
.with_animation(
|
||||
"edit-label",
|
||||
Animation::new(Duration::from_secs(2))
|
||||
.repeat()
|
||||
.with_easing(pulsating_between(0.3, 0.7)),
|
||||
|label, delta| label.alpha(delta),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
this.child(
|
||||
Label::new("Edits")
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
.child(Label::new("•").size(LabelSize::XSmall).color(Color::Muted))
|
||||
.child(
|
||||
Label::new(format!(
|
||||
"{} {}",
|
||||
changed_buffers.len(),
|
||||
if changed_buffers.len() == 1 {
|
||||
"file"
|
||||
} else {
|
||||
"files"
|
||||
}
|
||||
))
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
)
|
||||
}
|
||||
})
|
||||
.on_click(cx.listener(|this, _, _, cx| {
|
||||
this.edits_expanded = !this.edits_expanded;
|
||||
cx.notify();
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(
|
||||
IconButton::new("review-changes", IconName::ListTodo)
|
||||
.icon_size(IconSize::Small)
|
||||
.tooltip({
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |window, cx| {
|
||||
Tooltip::for_action_in(
|
||||
"Review Changes",
|
||||
&OpenAgentDiff,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
})
|
||||
.on_click(cx.listener(|_, _, window, cx| {
|
||||
window.dispatch_action(OpenAgentDiff.boxed_clone(), cx);
|
||||
})),
|
||||
)
|
||||
.child(Divider::vertical().color(DividerColor::Border))
|
||||
.child(
|
||||
Button::new("reject-all-changes", "Reject All")
|
||||
.label_size(LabelSize::Small)
|
||||
.disabled(pending_edits)
|
||||
.when(pending_edits, |this| {
|
||||
this.tooltip(Tooltip::text(EDIT_NOT_READY_TOOLTIP_LABEL))
|
||||
})
|
||||
.key_binding(
|
||||
KeyBinding::for_action_in(
|
||||
&RejectAll,
|
||||
&focus_handle.clone(),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.map(|kb| kb.size(rems_from_px(10.))),
|
||||
)
|
||||
.on_click({
|
||||
let action_log = action_log.clone();
|
||||
cx.listener(move |_, _, _, cx| {
|
||||
action_log.update(cx, |action_log, cx| {
|
||||
action_log.reject_all_edits(cx).detach();
|
||||
})
|
||||
})
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
Button::new("keep-all-changes", "Keep All")
|
||||
.label_size(LabelSize::Small)
|
||||
.disabled(pending_edits)
|
||||
.when(pending_edits, |this| {
|
||||
this.tooltip(Tooltip::text(EDIT_NOT_READY_TOOLTIP_LABEL))
|
||||
})
|
||||
.key_binding(
|
||||
KeyBinding::for_action_in(&KeepAll, &focus_handle, window, cx)
|
||||
.map(|kb| kb.size(rems_from_px(10.))),
|
||||
)
|
||||
.on_click({
|
||||
let action_log = action_log.clone();
|
||||
cx.listener(move |_, _, _, cx| {
|
||||
action_log.update(cx, |action_log, cx| {
|
||||
action_log.keep_all_edits(cx);
|
||||
})
|
||||
})
|
||||
}),
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_edits_bar_files(
|
||||
&self,
|
||||
action_log: &Entity<ActionLog>,
|
||||
changed_buffers: &BTreeMap<Entity<Buffer>, Entity<BufferDiff>>,
|
||||
pending_edits: bool,
|
||||
cx: &Context<Self>,
|
||||
) -> Div {
|
||||
let editor_bg_color = cx.theme().colors().editor_background;
|
||||
|
||||
v_flex().children(changed_buffers.into_iter().enumerate().flat_map(
|
||||
|(index, (buffer, _diff))| {
|
||||
let file = buffer.read(cx).file()?;
|
||||
let path = file.path();
|
||||
|
||||
let file_path = path.parent().and_then(|parent| {
|
||||
let parent_str = parent.to_string_lossy();
|
||||
|
||||
if parent_str.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(
|
||||
Label::new(format!("/{}{}", parent_str, std::path::MAIN_SEPARATOR_STR))
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::XSmall)
|
||||
.buffer_font(cx),
|
||||
)
|
||||
}
|
||||
});
|
||||
|
||||
let file_name = path.file_name().map(|name| {
|
||||
Label::new(name.to_string_lossy().to_string())
|
||||
.size(LabelSize::XSmall)
|
||||
.buffer_font(cx)
|
||||
});
|
||||
|
||||
let file_icon = FileIcons::get_icon(&path, cx)
|
||||
.map(Icon::from_path)
|
||||
.map(|icon| icon.color(Color::Muted).size(IconSize::Small))
|
||||
.unwrap_or_else(|| {
|
||||
Icon::new(IconName::File)
|
||||
.color(Color::Muted)
|
||||
.size(IconSize::Small)
|
||||
});
|
||||
|
||||
let overlay_gradient = linear_gradient(
|
||||
90.,
|
||||
linear_color_stop(editor_bg_color, 1.),
|
||||
linear_color_stop(editor_bg_color.opacity(0.2), 0.),
|
||||
);
|
||||
|
||||
let element = h_flex()
|
||||
.group("edited-code")
|
||||
.id(("file-container", index))
|
||||
.relative()
|
||||
.py_1()
|
||||
.pl_2()
|
||||
.pr_1()
|
||||
.gap_2()
|
||||
.justify_between()
|
||||
.bg(editor_bg_color)
|
||||
.when(index < changed_buffers.len() - 1, |parent| {
|
||||
parent.border_color(cx.theme().colors().border).border_b_1()
|
||||
})
|
||||
.child(
|
||||
h_flex()
|
||||
.id(("file-name", index))
|
||||
.pr_8()
|
||||
.gap_1p5()
|
||||
.max_w_full()
|
||||
.overflow_x_scroll()
|
||||
.child(file_icon)
|
||||
.child(h_flex().gap_0p5().children(file_name).children(file_path))
|
||||
.on_click({
|
||||
let buffer = buffer.clone();
|
||||
cx.listener(move |this, _, window, cx| {
|
||||
this.open_edited_buffer(&buffer, window, cx);
|
||||
})
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.visible_on_hover("edited-code")
|
||||
.child(
|
||||
Button::new("review", "Review")
|
||||
.label_size(LabelSize::Small)
|
||||
.on_click({
|
||||
let buffer = buffer.clone();
|
||||
cx.listener(move |this, _, window, cx| {
|
||||
this.open_edited_buffer(&buffer, window, cx);
|
||||
})
|
||||
}),
|
||||
)
|
||||
.child(Divider::vertical().color(DividerColor::BorderVariant))
|
||||
.child(
|
||||
Button::new("reject-file", "Reject")
|
||||
.label_size(LabelSize::Small)
|
||||
.disabled(pending_edits)
|
||||
.on_click({
|
||||
let buffer = buffer.clone();
|
||||
let action_log = action_log.clone();
|
||||
move |_, _, cx| {
|
||||
action_log.update(cx, |action_log, cx| {
|
||||
action_log
|
||||
.reject_edits_in_ranges(
|
||||
buffer.clone(),
|
||||
vec![Anchor::MIN..Anchor::MAX],
|
||||
cx,
|
||||
)
|
||||
.detach_and_log_err(cx);
|
||||
})
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
Button::new("keep-file", "Keep")
|
||||
.label_size(LabelSize::Small)
|
||||
.disabled(pending_edits)
|
||||
.on_click({
|
||||
let buffer = buffer.clone();
|
||||
let action_log = action_log.clone();
|
||||
move |_, _, cx| {
|
||||
action_log.update(cx, |action_log, cx| {
|
||||
action_log.keep_edits_in_range(
|
||||
buffer.clone(),
|
||||
Anchor::MIN..Anchor::MAX,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
}
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.id("gradient-overlay")
|
||||
.absolute()
|
||||
.h_full()
|
||||
.w_12()
|
||||
.top_0()
|
||||
.bottom_0()
|
||||
.right(px(152.))
|
||||
.bg(overlay_gradient),
|
||||
);
|
||||
|
||||
Some(element)
|
||||
},
|
||||
))
|
||||
}
|
||||
|
||||
fn render_message_editor(&mut self, cx: &mut Context<Self>) -> AnyElement {
|
||||
let settings = ThemeSettings::get_global(cx);
|
||||
let font_size = TextSize::Small
|
||||
@@ -1998,76 +1559,6 @@ impl AcpThreadView {
|
||||
.into_any()
|
||||
}
|
||||
|
||||
fn render_send_button(&self, cx: &mut Context<Self>) -> AnyElement {
|
||||
if self.thread().map_or(true, |thread| {
|
||||
thread.read(cx).status() == ThreadStatus::Idle
|
||||
}) {
|
||||
let is_editor_empty = self.message_editor.read(cx).is_empty(cx);
|
||||
IconButton::new("send-message", IconName::Send)
|
||||
.icon_color(Color::Accent)
|
||||
.style(ButtonStyle::Filled)
|
||||
.disabled(self.thread().is_none() || is_editor_empty)
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
this.chat(&Chat, window, cx);
|
||||
}))
|
||||
.when(!is_editor_empty, |button| {
|
||||
button.tooltip(move |window, cx| Tooltip::for_action("Send", &Chat, window, cx))
|
||||
})
|
||||
.when(is_editor_empty, |button| {
|
||||
button.tooltip(Tooltip::text("Type a message to submit"))
|
||||
})
|
||||
.into_any_element()
|
||||
} else {
|
||||
IconButton::new("stop-generation", IconName::StopFilled)
|
||||
.icon_color(Color::Error)
|
||||
.style(ButtonStyle::Tinted(ui::TintColor::Error))
|
||||
.tooltip(move |window, cx| {
|
||||
Tooltip::for_action("Stop Generation", &editor::actions::Cancel, window, cx)
|
||||
})
|
||||
.on_click(cx.listener(|this, _event, _, cx| this.cancel(cx)))
|
||||
.into_any_element()
|
||||
}
|
||||
}
|
||||
|
||||
fn render_follow_toggle(&self, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let following = self
|
||||
.workspace
|
||||
.read_with(cx, |workspace, _| {
|
||||
workspace.is_being_followed(CollaboratorId::Agent)
|
||||
})
|
||||
.unwrap_or(false);
|
||||
|
||||
IconButton::new("follow-agent", IconName::Crosshair)
|
||||
.icon_size(IconSize::Small)
|
||||
.icon_color(Color::Muted)
|
||||
.toggle_state(following)
|
||||
.selected_icon_color(Some(Color::Custom(cx.theme().players().agent().cursor)))
|
||||
.tooltip(move |window, cx| {
|
||||
if following {
|
||||
Tooltip::for_action("Stop Following Agent", &Follow, window, cx)
|
||||
} else {
|
||||
Tooltip::with_meta(
|
||||
"Follow Agent",
|
||||
Some(&Follow),
|
||||
"Track the agent's location as it reads and edits files.",
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
})
|
||||
.on_click(cx.listener(move |this, _, window, cx| {
|
||||
this.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
if following {
|
||||
workspace.unfollow(CollaboratorId::Agent, window, cx);
|
||||
} else {
|
||||
workspace.follow(CollaboratorId::Agent, window, cx);
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
}))
|
||||
}
|
||||
|
||||
fn render_markdown(&self, markdown: Entity<Markdown>, style: MarkdownStyle) -> MarkdownElement {
|
||||
let workspace = self.workspace.clone();
|
||||
MarkdownElement::new(markdown, style).on_url_click(move |text, window, cx| {
|
||||
@@ -2112,64 +1603,6 @@ impl AcpThreadView {
|
||||
}
|
||||
}
|
||||
|
||||
fn open_tool_call_location(
|
||||
&self,
|
||||
entry_ix: usize,
|
||||
location_ix: usize,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<()> {
|
||||
let location = self
|
||||
.thread()?
|
||||
.read(cx)
|
||||
.entries()
|
||||
.get(entry_ix)?
|
||||
.locations()?
|
||||
.get(location_ix)?;
|
||||
|
||||
let project_path = self
|
||||
.project
|
||||
.read(cx)
|
||||
.find_project_path(&location.path, cx)?;
|
||||
|
||||
let open_task = self
|
||||
.workspace
|
||||
.update(cx, |worskpace, cx| {
|
||||
worskpace.open_path(project_path, None, true, window, cx)
|
||||
})
|
||||
.log_err()?;
|
||||
|
||||
window
|
||||
.spawn(cx, async move |cx| {
|
||||
let item = open_task.await?;
|
||||
|
||||
let Some(active_editor) = item.downcast::<Editor>() else {
|
||||
return anyhow::Ok(());
|
||||
};
|
||||
|
||||
active_editor.update_in(cx, |editor, window, cx| {
|
||||
let snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
let first_hunk = editor
|
||||
.diff_hunks_in_ranges(
|
||||
&[editor::Anchor::min()..editor::Anchor::max()],
|
||||
&snapshot,
|
||||
)
|
||||
.next();
|
||||
if let Some(first_hunk) = first_hunk {
|
||||
let first_hunk_start = first_hunk.multi_buffer_range().start;
|
||||
editor.change_selections(Default::default(), window, cx, |selections| {
|
||||
selections.select_anchor_ranges([first_hunk_start..first_hunk_start]);
|
||||
})
|
||||
}
|
||||
})?;
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn open_thread_as_markdown(
|
||||
&self,
|
||||
workspace: Entity<Workspace>,
|
||||
@@ -2240,6 +1673,10 @@ impl Focusable for AcpThreadView {
|
||||
|
||||
impl Render for AcpThreadView {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let text = self.message_editor.read(cx).text(cx);
|
||||
let is_editor_empty = text.is_empty();
|
||||
let focus_handle = self.message_editor.focus_handle(cx);
|
||||
|
||||
let open_as_markdown = IconButton::new("open-as-markdown", IconName::DocumentText)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Ignored)
|
||||
@@ -2265,7 +1702,6 @@ impl Render for AcpThreadView {
|
||||
.on_action(cx.listener(Self::chat))
|
||||
.on_action(cx.listener(Self::previous_history_message))
|
||||
.on_action(cx.listener(Self::next_history_message))
|
||||
.on_action(cx.listener(Self::open_agent_diff))
|
||||
.child(match &self.thread_state {
|
||||
ThreadState::Unauthenticated { .. } => v_flex()
|
||||
.p_2()
|
||||
@@ -2319,7 +1755,6 @@ impl Render for AcpThreadView {
|
||||
.child(LoadingLabel::new("").size(LabelSize::Small))
|
||||
.into(),
|
||||
})
|
||||
.children(self.render_edits_bar(&thread, window, cx))
|
||||
} else {
|
||||
this.child(self.render_empty_state(false, cx))
|
||||
}
|
||||
@@ -2347,12 +1782,47 @@ impl Render for AcpThreadView {
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(self.render_message_editor(cx))
|
||||
.child(
|
||||
h_flex()
|
||||
.justify_between()
|
||||
.child(self.render_follow_toggle(cx))
|
||||
.child(self.render_send_button(cx)),
|
||||
),
|
||||
.child({
|
||||
let thread = self.thread();
|
||||
|
||||
h_flex().justify_end().child(
|
||||
if thread.map_or(true, |thread| {
|
||||
thread.read(cx).status() == ThreadStatus::Idle
|
||||
}) {
|
||||
IconButton::new("send-message", IconName::Send)
|
||||
.icon_color(Color::Accent)
|
||||
.style(ButtonStyle::Filled)
|
||||
.disabled(thread.is_none() || is_editor_empty)
|
||||
.on_click({
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |_event, window, cx| {
|
||||
focus_handle.dispatch_action(&Chat, window, cx);
|
||||
}
|
||||
})
|
||||
.when(!is_editor_empty, |button| {
|
||||
button.tooltip(move |window, cx| {
|
||||
Tooltip::for_action("Send", &Chat, window, cx)
|
||||
})
|
||||
})
|
||||
.when(is_editor_empty, |button| {
|
||||
button.tooltip(Tooltip::text("Type a message to submit"))
|
||||
})
|
||||
} else {
|
||||
IconButton::new("stop-generation", IconName::StopFilled)
|
||||
.icon_color(Color::Error)
|
||||
.style(ButtonStyle::Tinted(ui::TintColor::Error))
|
||||
.tooltip(move |window, cx| {
|
||||
Tooltip::for_action(
|
||||
"Stop Generation",
|
||||
&editor::actions::Cancel,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.on_click(cx.listener(|this, _event, _, cx| this.cancel(cx)))
|
||||
},
|
||||
)
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -787,15 +787,6 @@ impl ActiveThread {
|
||||
.unwrap()
|
||||
}
|
||||
});
|
||||
|
||||
let workspace_subscription = if let Some(workspace) = workspace.upgrade() {
|
||||
Some(cx.observe_release(&workspace, |this, _, cx| {
|
||||
this.dismiss_notifications(cx);
|
||||
}))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut this = Self {
|
||||
language_registry,
|
||||
thread_store,
|
||||
@@ -843,10 +834,6 @@ impl ActiveThread {
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(subscription) = workspace_subscription {
|
||||
this._subscriptions.push(subscription);
|
||||
}
|
||||
|
||||
this
|
||||
}
|
||||
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
use crate::{Keep, KeepAll, OpenAgentDiff, Reject, RejectAll};
|
||||
use acp::{AcpThread, AcpThreadEvent};
|
||||
use agent::{Thread, ThreadEvent, ThreadSummary};
|
||||
use agent::{Thread, ThreadEvent};
|
||||
use agent_settings::AgentSettings;
|
||||
use anyhow::Result;
|
||||
use assistant_tool::ActionLog;
|
||||
use buffer_diff::DiffHunkStatus;
|
||||
use collections::{HashMap, HashSet};
|
||||
use editor::{
|
||||
@@ -43,108 +41,16 @@ use zed_actions::assistant::ToggleFocus;
|
||||
pub struct AgentDiffPane {
|
||||
multibuffer: Entity<MultiBuffer>,
|
||||
editor: Entity<Editor>,
|
||||
thread: AgentDiffThread,
|
||||
thread: Entity<Thread>,
|
||||
focus_handle: FocusHandle,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
title: SharedString,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone)]
|
||||
pub enum AgentDiffThread {
|
||||
Native(Entity<Thread>),
|
||||
AcpThread(Entity<AcpThread>),
|
||||
}
|
||||
|
||||
impl AgentDiffThread {
|
||||
fn project(&self, cx: &App) -> Entity<Project> {
|
||||
match self {
|
||||
AgentDiffThread::Native(thread) => thread.read(cx).project().clone(),
|
||||
AgentDiffThread::AcpThread(thread) => thread.read(cx).project().clone(),
|
||||
}
|
||||
}
|
||||
fn action_log(&self, cx: &App) -> Entity<ActionLog> {
|
||||
match self {
|
||||
AgentDiffThread::Native(thread) => thread.read(cx).action_log().clone(),
|
||||
AgentDiffThread::AcpThread(thread) => thread.read(cx).action_log().clone(),
|
||||
}
|
||||
}
|
||||
|
||||
fn summary(&self, cx: &App) -> ThreadSummary {
|
||||
match self {
|
||||
AgentDiffThread::Native(thread) => thread.read(cx).summary().clone(),
|
||||
AgentDiffThread::AcpThread(thread) => ThreadSummary::Ready(thread.read(cx).title()),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_generating(&self, cx: &App) -> bool {
|
||||
match self {
|
||||
AgentDiffThread::Native(thread) => thread.read(cx).is_generating(),
|
||||
AgentDiffThread::AcpThread(thread) => {
|
||||
thread.read(cx).status() == acp::ThreadStatus::Generating
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn has_pending_edit_tool_uses(&self, cx: &App) -> bool {
|
||||
match self {
|
||||
AgentDiffThread::Native(thread) => thread.read(cx).has_pending_edit_tool_uses(),
|
||||
AgentDiffThread::AcpThread(thread) => thread.read(cx).has_pending_edit_tool_calls(),
|
||||
}
|
||||
}
|
||||
|
||||
fn downgrade(&self) -> WeakAgentDiffThread {
|
||||
match self {
|
||||
AgentDiffThread::Native(thread) => WeakAgentDiffThread::Native(thread.downgrade()),
|
||||
AgentDiffThread::AcpThread(thread) => {
|
||||
WeakAgentDiffThread::AcpThread(thread.downgrade())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Entity<Thread>> for AgentDiffThread {
|
||||
fn from(entity: Entity<Thread>) -> Self {
|
||||
AgentDiffThread::Native(entity)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Entity<AcpThread>> for AgentDiffThread {
|
||||
fn from(entity: Entity<AcpThread>) -> Self {
|
||||
AgentDiffThread::AcpThread(entity)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone)]
|
||||
pub enum WeakAgentDiffThread {
|
||||
Native(WeakEntity<Thread>),
|
||||
AcpThread(WeakEntity<AcpThread>),
|
||||
}
|
||||
|
||||
impl WeakAgentDiffThread {
|
||||
pub fn upgrade(&self) -> Option<AgentDiffThread> {
|
||||
match self {
|
||||
WeakAgentDiffThread::Native(weak) => weak.upgrade().map(AgentDiffThread::Native),
|
||||
WeakAgentDiffThread::AcpThread(weak) => weak.upgrade().map(AgentDiffThread::AcpThread),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<WeakEntity<Thread>> for WeakAgentDiffThread {
|
||||
fn from(entity: WeakEntity<Thread>) -> Self {
|
||||
WeakAgentDiffThread::Native(entity)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<WeakEntity<AcpThread>> for WeakAgentDiffThread {
|
||||
fn from(entity: WeakEntity<AcpThread>) -> Self {
|
||||
WeakAgentDiffThread::AcpThread(entity)
|
||||
}
|
||||
}
|
||||
|
||||
impl AgentDiffPane {
|
||||
pub fn deploy(
|
||||
thread: impl Into<AgentDiffThread>,
|
||||
thread: Entity<Thread>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
@@ -155,16 +61,14 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
pub fn deploy_in_workspace(
|
||||
thread: impl Into<AgentDiffThread>,
|
||||
thread: Entity<Thread>,
|
||||
workspace: &mut Workspace,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Workspace>,
|
||||
) -> Entity<Self> {
|
||||
let thread = thread.into();
|
||||
let existing_diff = workspace
|
||||
.items_of_type::<AgentDiffPane>(cx)
|
||||
.find(|diff| diff.read(cx).thread == thread);
|
||||
|
||||
if let Some(existing_diff) = existing_diff {
|
||||
workspace.activate_item(&existing_diff, true, true, window, cx);
|
||||
existing_diff
|
||||
@@ -177,7 +81,7 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
thread: AgentDiffThread,
|
||||
thread: Entity<Thread>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
@@ -185,7 +89,7 @@ impl AgentDiffPane {
|
||||
let focus_handle = cx.focus_handle();
|
||||
let multibuffer = cx.new(|_| MultiBuffer::new(Capability::ReadWrite));
|
||||
|
||||
let project = thread.project(cx).clone();
|
||||
let project = thread.read(cx).project().clone();
|
||||
let editor = cx.new(|cx| {
|
||||
let mut editor =
|
||||
Editor::for_multibuffer(multibuffer.clone(), Some(project.clone()), window, cx);
|
||||
@@ -196,27 +100,16 @@ impl AgentDiffPane {
|
||||
editor
|
||||
});
|
||||
|
||||
let action_log = thread.action_log(cx).clone();
|
||||
|
||||
let action_log = thread.read(cx).action_log().clone();
|
||||
let mut this = Self {
|
||||
_subscriptions: [
|
||||
Some(
|
||||
cx.observe_in(&action_log, window, |this, _action_log, window, cx| {
|
||||
this.update_excerpts(window, cx)
|
||||
}),
|
||||
),
|
||||
match &thread {
|
||||
AgentDiffThread::Native(thread) => {
|
||||
Some(cx.subscribe(&thread, |this, _thread, event, cx| {
|
||||
this.handle_thread_event(event, cx)
|
||||
}))
|
||||
}
|
||||
AgentDiffThread::AcpThread(_) => None,
|
||||
},
|
||||
]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect(),
|
||||
_subscriptions: vec![
|
||||
cx.observe_in(&action_log, window, |this, _action_log, window, cx| {
|
||||
this.update_excerpts(window, cx)
|
||||
}),
|
||||
cx.subscribe(&thread, |this, _thread, event, cx| {
|
||||
this.handle_thread_event(event, cx)
|
||||
}),
|
||||
],
|
||||
title: SharedString::default(),
|
||||
multibuffer,
|
||||
editor,
|
||||
@@ -230,7 +123,8 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
fn update_excerpts(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
let changed_buffers = self.thread.action_log(cx).read(cx).changed_buffers(cx);
|
||||
let thread = self.thread.read(cx);
|
||||
let changed_buffers = thread.action_log().read(cx).changed_buffers(cx);
|
||||
let mut paths_to_delete = self.multibuffer.read(cx).paths().collect::<HashSet<_>>();
|
||||
|
||||
for (buffer, diff_handle) in changed_buffers {
|
||||
@@ -317,7 +211,7 @@ impl AgentDiffPane {
|
||||
}
|
||||
|
||||
fn update_title(&mut self, cx: &mut Context<Self>) {
|
||||
let new_title = self.thread.summary(cx).unwrap_or("Agent Changes");
|
||||
let new_title = self.thread.read(cx).summary().unwrap_or("Agent Changes");
|
||||
if new_title != self.title {
|
||||
self.title = new_title;
|
||||
cx.emit(EditorEvent::TitleChanged);
|
||||
@@ -381,15 +275,14 @@ impl AgentDiffPane {
|
||||
|
||||
fn keep_all(&mut self, _: &KeepAll, _window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.thread
|
||||
.action_log(cx)
|
||||
.update(cx, |action_log, cx| action_log.keep_all_edits(cx))
|
||||
.update(cx, |thread, cx| thread.keep_all_edits(cx));
|
||||
}
|
||||
}
|
||||
|
||||
fn keep_edits_in_selection(
|
||||
editor: &mut Editor,
|
||||
buffer_snapshot: &MultiBufferSnapshot,
|
||||
thread: &AgentDiffThread,
|
||||
thread: &Entity<Thread>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
@@ -404,7 +297,7 @@ fn keep_edits_in_selection(
|
||||
fn reject_edits_in_selection(
|
||||
editor: &mut Editor,
|
||||
buffer_snapshot: &MultiBufferSnapshot,
|
||||
thread: &AgentDiffThread,
|
||||
thread: &Entity<Thread>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
@@ -418,7 +311,7 @@ fn reject_edits_in_selection(
|
||||
fn keep_edits_in_ranges(
|
||||
editor: &mut Editor,
|
||||
buffer_snapshot: &MultiBufferSnapshot,
|
||||
thread: &AgentDiffThread,
|
||||
thread: &Entity<Thread>,
|
||||
ranges: Vec<Range<editor::Anchor>>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
@@ -433,8 +326,8 @@ fn keep_edits_in_ranges(
|
||||
for hunk in &diff_hunks_in_ranges {
|
||||
let buffer = multibuffer.read(cx).buffer(hunk.buffer_id);
|
||||
if let Some(buffer) = buffer {
|
||||
thread.action_log(cx).update(cx, |action_log, cx| {
|
||||
action_log.keep_edits_in_range(buffer, hunk.buffer_range.clone(), cx)
|
||||
thread.update(cx, |thread, cx| {
|
||||
thread.keep_edits_in_range(buffer, hunk.buffer_range.clone(), cx)
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -443,7 +336,7 @@ fn keep_edits_in_ranges(
|
||||
fn reject_edits_in_ranges(
|
||||
editor: &mut Editor,
|
||||
buffer_snapshot: &MultiBufferSnapshot,
|
||||
thread: &AgentDiffThread,
|
||||
thread: &Entity<Thread>,
|
||||
ranges: Vec<Range<editor::Anchor>>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
@@ -469,9 +362,8 @@ fn reject_edits_in_ranges(
|
||||
|
||||
for (buffer, ranges) in ranges_by_buffer {
|
||||
thread
|
||||
.action_log(cx)
|
||||
.update(cx, |action_log, cx| {
|
||||
action_log.reject_edits_in_ranges(buffer, ranges, cx)
|
||||
.update(cx, |thread, cx| {
|
||||
thread.reject_edits_in_ranges(buffer, ranges, cx)
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
@@ -569,7 +461,7 @@ impl Item for AgentDiffPane {
|
||||
}
|
||||
|
||||
fn tab_content(&self, params: TabContentParams, _window: &Window, cx: &App) -> AnyElement {
|
||||
let summary = self.thread.summary(cx).unwrap_or("Agent Changes");
|
||||
let summary = self.thread.read(cx).summary().unwrap_or("Agent Changes");
|
||||
Label::new(format!("Review: {}", summary))
|
||||
.color(if params.selected {
|
||||
Color::Default
|
||||
@@ -749,7 +641,7 @@ impl Render for AgentDiffPane {
|
||||
}
|
||||
}
|
||||
|
||||
fn diff_hunk_controls(thread: &AgentDiffThread) -> editor::RenderDiffHunkControlsFn {
|
||||
fn diff_hunk_controls(thread: &Entity<Thread>) -> editor::RenderDiffHunkControlsFn {
|
||||
let thread = thread.clone();
|
||||
|
||||
Arc::new(
|
||||
@@ -784,7 +676,7 @@ fn render_diff_hunk_controls(
|
||||
hunk_range: Range<editor::Anchor>,
|
||||
is_created_file: bool,
|
||||
line_height: Pixels,
|
||||
thread: &AgentDiffThread,
|
||||
thread: &Entity<Thread>,
|
||||
editor: &Entity<Editor>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
@@ -1220,8 +1112,11 @@ impl Render for AgentDiffToolbar {
|
||||
return Empty.into_any();
|
||||
};
|
||||
|
||||
let has_pending_edit_tool_use =
|
||||
agent_diff.read(cx).thread.has_pending_edit_tool_uses(cx);
|
||||
let has_pending_edit_tool_use = agent_diff
|
||||
.read(cx)
|
||||
.thread
|
||||
.read(cx)
|
||||
.has_pending_edit_tool_uses();
|
||||
|
||||
if has_pending_edit_tool_use {
|
||||
return div().px_2().child(spinner_icon).into_any();
|
||||
@@ -1292,8 +1187,8 @@ pub enum EditorState {
|
||||
}
|
||||
|
||||
struct WorkspaceThread {
|
||||
thread: WeakAgentDiffThread,
|
||||
_thread_subscriptions: (Subscription, Subscription),
|
||||
thread: WeakEntity<Thread>,
|
||||
_thread_subscriptions: [Subscription; 2],
|
||||
singleton_editors: HashMap<WeakEntity<Buffer>, HashMap<WeakEntity<Editor>, Subscription>>,
|
||||
_settings_subscription: Subscription,
|
||||
_workspace_subscription: Option<Subscription>,
|
||||
@@ -1317,23 +1212,23 @@ impl AgentDiff {
|
||||
|
||||
pub fn set_active_thread(
|
||||
workspace: &WeakEntity<Workspace>,
|
||||
thread: impl Into<AgentDiffThread>,
|
||||
thread: &Entity<Thread>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) {
|
||||
Self::global(cx).update(cx, |this, cx| {
|
||||
this.register_active_thread_impl(workspace, thread.into(), window, cx);
|
||||
this.register_active_thread_impl(workspace, thread, window, cx);
|
||||
});
|
||||
}
|
||||
|
||||
fn register_active_thread_impl(
|
||||
&mut self,
|
||||
workspace: &WeakEntity<Workspace>,
|
||||
thread: AgentDiffThread,
|
||||
thread: &Entity<Thread>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let action_log = thread.action_log(cx).clone();
|
||||
let action_log = thread.read(cx).action_log().clone();
|
||||
|
||||
let action_log_subscription = cx.observe_in(&action_log, window, {
|
||||
let workspace = workspace.clone();
|
||||
@@ -1342,25 +1237,17 @@ impl AgentDiff {
|
||||
}
|
||||
});
|
||||
|
||||
let thread_subscription = match &thread {
|
||||
AgentDiffThread::Native(thread) => cx.subscribe_in(&thread, window, {
|
||||
let workspace = workspace.clone();
|
||||
move |this, _thread, event, window, cx| {
|
||||
this.handle_native_thread_event(&workspace, event, window, cx)
|
||||
}
|
||||
}),
|
||||
AgentDiffThread::AcpThread(thread) => cx.subscribe_in(&thread, window, {
|
||||
let workspace = workspace.clone();
|
||||
move |this, thread, event, window, cx| {
|
||||
this.handle_acp_thread_event(&workspace, thread, event, window, cx)
|
||||
}
|
||||
}),
|
||||
};
|
||||
let thread_subscription = cx.subscribe_in(&thread, window, {
|
||||
let workspace = workspace.clone();
|
||||
move |this, _thread, event, window, cx| {
|
||||
this.handle_thread_event(&workspace, event, window, cx)
|
||||
}
|
||||
});
|
||||
|
||||
if let Some(workspace_thread) = self.workspace_threads.get_mut(&workspace) {
|
||||
// replace thread and action log subscription, but keep editors
|
||||
workspace_thread.thread = thread.downgrade();
|
||||
workspace_thread._thread_subscriptions = (action_log_subscription, thread_subscription);
|
||||
workspace_thread._thread_subscriptions = [action_log_subscription, thread_subscription];
|
||||
self.update_reviewing_editors(&workspace, window, cx);
|
||||
return;
|
||||
}
|
||||
@@ -1385,7 +1272,7 @@ impl AgentDiff {
|
||||
workspace.clone(),
|
||||
WorkspaceThread {
|
||||
thread: thread.downgrade(),
|
||||
_thread_subscriptions: (action_log_subscription, thread_subscription),
|
||||
_thread_subscriptions: [action_log_subscription, thread_subscription],
|
||||
singleton_editors: HashMap::default(),
|
||||
_settings_subscription: settings_subscription,
|
||||
_workspace_subscription: workspace_subscription,
|
||||
@@ -1432,7 +1319,7 @@ impl AgentDiff {
|
||||
|
||||
fn register_review_action<T: Action>(
|
||||
workspace: &mut Workspace,
|
||||
review: impl Fn(&Entity<Editor>, &AgentDiffThread, &mut Window, &mut App) -> PostReviewState
|
||||
review: impl Fn(&Entity<Editor>, &Entity<Thread>, &mut Window, &mut App) -> PostReviewState
|
||||
+ 'static,
|
||||
this: &Entity<AgentDiff>,
|
||||
) {
|
||||
@@ -1451,7 +1338,7 @@ impl AgentDiff {
|
||||
});
|
||||
}
|
||||
|
||||
fn handle_native_thread_event(
|
||||
fn handle_thread_event(
|
||||
&mut self,
|
||||
workspace: &WeakEntity<Workspace>,
|
||||
event: &ThreadEvent,
|
||||
@@ -1493,40 +1380,6 @@ impl AgentDiff {
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_acp_thread_event(
|
||||
&mut self,
|
||||
workspace: &WeakEntity<Workspace>,
|
||||
thread: &Entity<AcpThread>,
|
||||
event: &AcpThreadEvent,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
match event {
|
||||
AcpThreadEvent::NewEntry => {
|
||||
if thread
|
||||
.read(cx)
|
||||
.entries()
|
||||
.last()
|
||||
.and_then(|entry| entry.diff())
|
||||
.is_some()
|
||||
{
|
||||
self.update_reviewing_editors(workspace, window, cx);
|
||||
}
|
||||
}
|
||||
AcpThreadEvent::EntryUpdated(ix) => {
|
||||
if thread
|
||||
.read(cx)
|
||||
.entries()
|
||||
.get(*ix)
|
||||
.and_then(|entry| entry.diff())
|
||||
.is_some()
|
||||
{
|
||||
self.update_reviewing_editors(workspace, window, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_workspace_event(
|
||||
&mut self,
|
||||
workspace: &Entity<Workspace>,
|
||||
@@ -1632,7 +1485,7 @@ impl AgentDiff {
|
||||
return;
|
||||
};
|
||||
|
||||
let action_log = thread.action_log(cx);
|
||||
let action_log = thread.read(cx).action_log();
|
||||
let changed_buffers = action_log.read(cx).changed_buffers(cx);
|
||||
|
||||
let mut unaffected = self.reviewing_editors.clone();
|
||||
@@ -1657,7 +1510,7 @@ impl AgentDiff {
|
||||
multibuffer.add_diff(diff_handle.clone(), cx);
|
||||
});
|
||||
|
||||
let new_state = if thread.is_generating(cx) {
|
||||
let new_state = if thread.read(cx).is_generating() {
|
||||
EditorState::Generating
|
||||
} else {
|
||||
EditorState::Reviewing
|
||||
@@ -1753,7 +1606,7 @@ impl AgentDiff {
|
||||
|
||||
fn keep_all(
|
||||
editor: &Entity<Editor>,
|
||||
thread: &AgentDiffThread,
|
||||
thread: &Entity<Thread>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> PostReviewState {
|
||||
@@ -1773,7 +1626,7 @@ impl AgentDiff {
|
||||
|
||||
fn reject_all(
|
||||
editor: &Entity<Editor>,
|
||||
thread: &AgentDiffThread,
|
||||
thread: &Entity<Thread>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> PostReviewState {
|
||||
@@ -1793,7 +1646,7 @@ impl AgentDiff {
|
||||
|
||||
fn keep(
|
||||
editor: &Entity<Editor>,
|
||||
thread: &AgentDiffThread,
|
||||
thread: &Entity<Thread>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> PostReviewState {
|
||||
@@ -1806,7 +1659,7 @@ impl AgentDiff {
|
||||
|
||||
fn reject(
|
||||
editor: &Entity<Editor>,
|
||||
thread: &AgentDiffThread,
|
||||
thread: &Entity<Thread>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> PostReviewState {
|
||||
@@ -1829,7 +1682,7 @@ impl AgentDiff {
|
||||
fn review_in_active_editor(
|
||||
&mut self,
|
||||
workspace: &mut Workspace,
|
||||
review: impl Fn(&Entity<Editor>, &AgentDiffThread, &mut Window, &mut App) -> PostReviewState,
|
||||
review: impl Fn(&Entity<Editor>, &Entity<Thread>, &mut Window, &mut App) -> PostReviewState,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Task<Result<()>>> {
|
||||
@@ -1850,7 +1703,7 @@ impl AgentDiff {
|
||||
|
||||
if let PostReviewState::AllReviewed = review(&editor, &thread, window, cx) {
|
||||
if let Some(curr_buffer) = editor.read(cx).buffer().read(cx).as_singleton() {
|
||||
let changed_buffers = thread.action_log(cx).read(cx).changed_buffers(cx);
|
||||
let changed_buffers = thread.read(cx).action_log().read(cx).changed_buffers(cx);
|
||||
|
||||
let mut keys = changed_buffers.keys().cycle();
|
||||
keys.find(|k| *k == &curr_buffer);
|
||||
@@ -1948,9 +1801,8 @@ mod tests {
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let thread =
|
||||
AgentDiffThread::Native(thread_store.update(cx, |store, cx| store.create_thread(cx)));
|
||||
let action_log = cx.read(|cx| thread.action_log(cx));
|
||||
let thread = thread_store.update(cx, |store, cx| store.create_thread(cx));
|
||||
let action_log = thread.read_with(cx, |thread, _| thread.action_log().clone());
|
||||
|
||||
let (workspace, cx) =
|
||||
cx.add_window_view(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
@@ -2136,9 +1988,8 @@ mod tests {
|
||||
});
|
||||
|
||||
// Set the active thread
|
||||
let thread = AgentDiffThread::Native(thread);
|
||||
cx.update(|window, cx| {
|
||||
AgentDiff::set_active_thread(&workspace.downgrade(), thread.clone(), window, cx)
|
||||
AgentDiff::set_active_thread(&workspace.downgrade(), &thread, window, cx)
|
||||
});
|
||||
|
||||
let buffer1 = project
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use std::cell::RefCell;
|
||||
use std::ops::Range;
|
||||
use std::path::Path;
|
||||
use std::rc::Rc;
|
||||
@@ -9,7 +8,6 @@ use db::kvp::{Dismissable, KEY_VALUE_STORE};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::NewAcpThread;
|
||||
use crate::agent_diff::AgentDiffThread;
|
||||
use crate::language_model_selector::ToggleModelSelector;
|
||||
use crate::{
|
||||
AddContextServer, AgentDiffPane, ContinueThread, ContinueWithBurnMode,
|
||||
@@ -434,8 +432,6 @@ pub struct AgentPanel {
|
||||
configuration_subscription: Option<Subscription>,
|
||||
local_timezone: UtcOffset,
|
||||
active_view: ActiveView,
|
||||
acp_message_history:
|
||||
Rc<RefCell<crate::acp::MessageHistory<agentic_coding_protocol::SendUserMessageParams>>>,
|
||||
previous_view: Option<ActiveView>,
|
||||
history_store: Entity<HistoryStore>,
|
||||
history: Entity<ThreadHistory>,
|
||||
@@ -628,7 +624,7 @@ impl AgentPanel {
|
||||
}
|
||||
};
|
||||
|
||||
AgentDiff::set_active_thread(&workspace, thread.clone(), window, cx);
|
||||
AgentDiff::set_active_thread(&workspace, &thread, window, cx);
|
||||
|
||||
let weak_panel = weak_self.clone();
|
||||
|
||||
@@ -702,7 +698,6 @@ impl AgentPanel {
|
||||
.unwrap(),
|
||||
inline_assist_context_store,
|
||||
previous_view: None,
|
||||
acp_message_history: Default::default(),
|
||||
history_store: history_store.clone(),
|
||||
history: cx.new(|cx| ThreadHistory::new(weak_self, history_store, window, cx)),
|
||||
hovered_recent_history_item: None,
|
||||
@@ -775,10 +770,13 @@ impl AgentPanel {
|
||||
}
|
||||
|
||||
fn new_thread(&mut self, action: &NewThread, window: &mut Window, cx: &mut Context<Self>) {
|
||||
// Preserve chat box text when using creating new thread
|
||||
let preserved_text = self
|
||||
.active_message_editor()
|
||||
.map(|editor| editor.read(cx).get_text(cx).trim().to_string());
|
||||
// Preserve chat box text when using creating new thread from summary'
|
||||
let preserved_text = if action.from_thread_id.is_some() {
|
||||
self.active_message_editor()
|
||||
.map(|editor| editor.read(cx).get_text(cx).trim().to_string())
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let thread = self
|
||||
.thread_store
|
||||
@@ -850,7 +848,7 @@ impl AgentPanel {
|
||||
let thread_view = ActiveView::thread(active_thread.clone(), message_editor, window, cx);
|
||||
self.set_active_view(thread_view, window, cx);
|
||||
|
||||
AgentDiff::set_active_thread(&self.workspace, thread.clone(), window, cx);
|
||||
AgentDiff::set_active_thread(&self.workspace, &thread, window, cx);
|
||||
}
|
||||
|
||||
fn new_prompt_editor(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
@@ -892,30 +890,14 @@ impl AgentPanel {
|
||||
fn new_gemini_thread(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
let workspace = self.workspace.clone();
|
||||
let project = self.project.clone();
|
||||
let message_history = self.acp_message_history.clone();
|
||||
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let thread_view = cx.new_window_entity(|window, cx| {
|
||||
crate::acp::AcpThreadView::new(
|
||||
workspace.clone(),
|
||||
project,
|
||||
message_history,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
crate::acp::AcpThreadView::new(workspace, project, window, cx)
|
||||
})?;
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.set_active_view(
|
||||
ActiveView::AcpThread {
|
||||
thread_view: thread_view.clone(),
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
this.set_active_view(ActiveView::AcpThread { thread_view }, window, cx);
|
||||
})
|
||||
.log_err();
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
@@ -1071,7 +1053,7 @@ impl AgentPanel {
|
||||
|
||||
let thread_view = ActiveView::thread(active_thread.clone(), message_editor, window, cx);
|
||||
self.set_active_view(thread_view, window, cx);
|
||||
AgentDiff::set_active_thread(&self.workspace, thread.clone(), window, cx);
|
||||
AgentDiff::set_active_thread(&self.workspace, &thread, window, cx);
|
||||
}
|
||||
|
||||
pub fn go_back(&mut self, _: &workspace::GoBack, window: &mut Window, cx: &mut Context<Self>) {
|
||||
@@ -1202,12 +1184,7 @@ impl AgentPanel {
|
||||
let thread = thread.read(cx).thread().clone();
|
||||
self.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
AgentDiffPane::deploy_in_workspace(
|
||||
AgentDiffThread::Native(thread),
|
||||
workspace,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
AgentDiffPane::deploy_in_workspace(thread, workspace, window, cx)
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
@@ -1443,8 +1420,6 @@ impl AgentPanel {
|
||||
self.active_view = new_view;
|
||||
}
|
||||
|
||||
self.acp_message_history.borrow_mut().reset_position();
|
||||
|
||||
self.focus_handle(cx).focus(window);
|
||||
}
|
||||
|
||||
|
||||
@@ -2,7 +2,6 @@ use std::collections::BTreeMap;
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::agent_diff::AgentDiffThread;
|
||||
use crate::agent_model_selector::AgentModelSelector;
|
||||
use crate::language_model_selector::ToggleModelSelector;
|
||||
use crate::tool_compatibility::{IncompatibleToolsState, IncompatibleToolsTooltip};
|
||||
@@ -476,12 +475,9 @@ impl MessageEditor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if let Ok(diff) = AgentDiffPane::deploy(
|
||||
AgentDiffThread::Native(self.thread.clone()),
|
||||
self.workspace.clone(),
|
||||
window,
|
||||
cx,
|
||||
) {
|
||||
if let Ok(diff) =
|
||||
AgentDiffPane::deploy(self.thread.clone(), self.workspace.clone(), window, cx)
|
||||
{
|
||||
let path_key = multi_buffer::PathKey::for_buffer(&buffer, cx);
|
||||
diff.update(cx, |diff, cx| diff.move_to_path(path_key, window, cx));
|
||||
}
|
||||
|
||||
@@ -19,6 +19,5 @@ net.workspace = true
|
||||
parking_lot.workspace = true
|
||||
smol.workspace = true
|
||||
tempfile.workspace = true
|
||||
unindent.workspace = true
|
||||
util.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
@@ -40,21 +40,11 @@ impl AskPassDelegate {
|
||||
self.tx.send((prompt, tx)).await?;
|
||||
Ok(rx.await?)
|
||||
}
|
||||
|
||||
pub fn new_always_failing() -> Self {
|
||||
let (tx, _rx) = mpsc::unbounded::<(String, oneshot::Sender<String>)>();
|
||||
Self {
|
||||
tx,
|
||||
_task: Task::ready(()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AskPassSession {
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
script_path: std::path::PathBuf,
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
gpg_script_path: std::path::PathBuf,
|
||||
#[cfg(target_os = "windows")]
|
||||
askpass_helper: String,
|
||||
#[cfg(target_os = "windows")]
|
||||
@@ -69,9 +59,6 @@ const ASKPASS_SCRIPT_NAME: &str = "askpass.sh";
|
||||
#[cfg(target_os = "windows")]
|
||||
const ASKPASS_SCRIPT_NAME: &str = "askpass.ps1";
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
const GPG_SCRIPT_NAME: &str = "gpg.sh";
|
||||
|
||||
impl AskPassSession {
|
||||
/// This will create a new AskPassSession.
|
||||
/// You must retain this session until the master process exits.
|
||||
@@ -85,8 +72,6 @@ impl AskPassSession {
|
||||
let temp_dir = tempfile::Builder::new().prefix("zed-askpass").tempdir()?;
|
||||
let askpass_socket = temp_dir.path().join("askpass.sock");
|
||||
let askpass_script_path = temp_dir.path().join(ASKPASS_SCRIPT_NAME);
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
let gpg_script_path = temp_dir.path().join(GPG_SCRIPT_NAME);
|
||||
let (askpass_opened_tx, askpass_opened_rx) = oneshot::channel::<()>();
|
||||
let listener = UnixListener::bind(&askpass_socket).context("creating askpass socket")?;
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
@@ -150,20 +135,9 @@ impl AskPassSession {
|
||||
askpass_script_path.display()
|
||||
);
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
{
|
||||
let gpg_script = generate_gpg_script();
|
||||
fs::write(&gpg_script_path, gpg_script)
|
||||
.await
|
||||
.with_context(|| format!("creating gpg wrapper script at {gpg_script_path:?}"))?;
|
||||
make_file_executable(&gpg_script_path).await?;
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
script_path: askpass_script_path,
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
gpg_script_path,
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
secret,
|
||||
@@ -186,19 +160,6 @@ impl AskPassSession {
|
||||
&self.askpass_helper
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
pub fn gpg_script_path(&self) -> Option<impl AsRef<OsStr>> {
|
||||
Some(&self.gpg_script_path)
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
pub fn gpg_script_path(&self) -> Option<impl AsRef<OsStr>> {
|
||||
// TODO implement wrapping GPG on Windows. This is more difficult than on Unix
|
||||
// because we can't use --passphrase-fd with a nonstandard FD, and both --passphrase
|
||||
// and --passphrase-file are insecure.
|
||||
None::<std::path::PathBuf>
|
||||
}
|
||||
|
||||
// This will run the askpass task forever, resolving as many authentication requests as needed.
|
||||
// The caller is responsible for examining the result of their own commands and cancelling this
|
||||
// future when this is no longer needed. Note that this can only be called once, but due to the
|
||||
@@ -302,23 +263,3 @@ fn generate_askpass_script(zed_path: &std::path::Path, askpass_socket: &std::pat
|
||||
askpass_socket = askpass_socket.display(),
|
||||
)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
fn generate_gpg_script() -> String {
|
||||
use unindent::Unindent as _;
|
||||
|
||||
r#"
|
||||
#!/bin/sh
|
||||
set -eu
|
||||
|
||||
unset GIT_CONFIG_PARAMETERS
|
||||
GPG_PROGRAM=$(git config gpg.program || echo 'gpg')
|
||||
PROMPT="Enter passphrase to unlock GPG key:"
|
||||
PASSPHRASE=$(${GIT_ASKPASS} "${PROMPT}")
|
||||
|
||||
exec "${GPG_PROGRAM}" --batch --no-tty --yes --passphrase-fd 3 --pinentry-mode loopback "$@" 3<<EOF
|
||||
${PASSPHRASE}
|
||||
EOF
|
||||
"#.unindent()
|
||||
}
|
||||
|
||||
@@ -34,11 +34,6 @@ impl ExtensionSlashCommandProxy for SlashCommandRegistryProxy {
|
||||
self.slash_command_registry
|
||||
.register_command(ExtensionSlashCommand::new(extension, command), false)
|
||||
}
|
||||
|
||||
fn unregister_slash_command(&self, command_name: Arc<str>) {
|
||||
self.slash_command_registry
|
||||
.unregister_command_by_name(&command_name)
|
||||
}
|
||||
}
|
||||
|
||||
/// An adapter that allows an [`LspAdapterDelegate`] to be used as a [`WorktreeDelegate`].
|
||||
|
||||
@@ -8,7 +8,7 @@ use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
|
||||
use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
|
||||
use std::{cmp, ops::Range, sync::Arc};
|
||||
use text::{Edit, Patch, Rope};
|
||||
use util::{RangeExt, ResultExt as _};
|
||||
use util::RangeExt;
|
||||
|
||||
/// Tracks actions performed by tools in a thread
|
||||
pub struct ActionLog {
|
||||
@@ -47,10 +47,6 @@ impl ActionLog {
|
||||
self.edited_since_project_diagnostics_check
|
||||
}
|
||||
|
||||
pub fn latest_snapshot(&self, buffer: &Entity<Buffer>) -> Option<text::BufferSnapshot> {
|
||||
Some(self.tracked_buffers.get(buffer)?.snapshot.clone())
|
||||
}
|
||||
|
||||
fn track_buffer_internal(
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
@@ -719,22 +715,6 @@ impl ActionLog {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub fn reject_all_edits(&mut self, cx: &mut Context<Self>) -> Task<()> {
|
||||
let futures = self.changed_buffers(cx).into_keys().map(|buffer| {
|
||||
let reject = self.reject_edits_in_ranges(buffer, vec![Anchor::MIN..Anchor::MAX], cx);
|
||||
|
||||
async move {
|
||||
reject.await.log_err();
|
||||
}
|
||||
});
|
||||
|
||||
let task = futures::future::join_all(futures);
|
||||
|
||||
cx.spawn(async move |_, _| {
|
||||
task.await;
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the set of buffers that contain edits that haven't been reviewed by the user.
|
||||
pub fn changed_buffers(&self, cx: &App) -> BTreeMap<Entity<Buffer>, Entity<BufferDiff>> {
|
||||
self.tracked_buffers
|
||||
|
||||
@@ -18,6 +18,7 @@ use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
use std::sync::Arc;
|
||||
use ui::IconName;
|
||||
use util::markdown::MarkdownInlineCode;
|
||||
|
||||
/// If the model requests to read a file whose size exceeds this, then
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
@@ -77,21 +78,11 @@ impl Tool for ReadFileTool {
|
||||
fn ui_text(&self, input: &serde_json::Value) -> String {
|
||||
match serde_json::from_value::<ReadFileToolInput>(input.clone()) {
|
||||
Ok(input) => {
|
||||
let path = &input.path;
|
||||
let path = MarkdownInlineCode(&input.path);
|
||||
match (input.start_line, input.end_line) {
|
||||
(Some(start), Some(end)) => {
|
||||
format!(
|
||||
"[Read file `{}` (lines {}-{})](@selection:{}:({}-{}))",
|
||||
path, start, end, path, start, end
|
||||
)
|
||||
}
|
||||
(Some(start), None) => {
|
||||
format!(
|
||||
"[Read file `{}` (from line {})](@selection:{}:({}-{}))",
|
||||
path, start, path, start, start
|
||||
)
|
||||
}
|
||||
_ => format!("[Read file `{}`](@file:{})", path, path),
|
||||
(Some(start), None) => format!("Read file {path} (from line {start})"),
|
||||
(Some(start), Some(end)) => format!("Read file {path} (lines {start}-{end})"),
|
||||
_ => format!("Read file {path}"),
|
||||
}
|
||||
}
|
||||
Err(_) => "Read file".to_string(),
|
||||
|
||||
@@ -1389,17 +1389,10 @@ impl Room {
|
||||
let sources = cx.screen_capture_sources();
|
||||
|
||||
cx.spawn(async move |this, cx| {
|
||||
let sources = sources
|
||||
.await
|
||||
.map_err(|error| error.into())
|
||||
.and_then(|sources| sources);
|
||||
let source =
|
||||
sources.and_then(|sources| sources.into_iter().next().context("no display found"));
|
||||
let sources = sources.await??;
|
||||
let source = sources.first().context("no display found")?;
|
||||
|
||||
let publication = match source {
|
||||
Ok(source) => participant.publish_screenshare_track(&*source, cx).await,
|
||||
Err(error) => Err(error),
|
||||
};
|
||||
let publication = participant.publish_screenshare_track(&**source, cx).await;
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
let live_kit = this
|
||||
|
||||
@@ -1,33 +1,12 @@
|
||||
{
|
||||
"admins": [
|
||||
"nathansobo",
|
||||
"maxbrunsfeld",
|
||||
"as-cii",
|
||||
"JosephTLyons",
|
||||
"maxdeviant",
|
||||
"SomeoneToIgnore",
|
||||
"maxbrunsfeld",
|
||||
"iamnbutler",
|
||||
"mikayla-maki",
|
||||
"agu-z",
|
||||
"osiewicz",
|
||||
"ConradIrwin",
|
||||
"benbrandt",
|
||||
"bennetbo",
|
||||
"smitbarmase",
|
||||
"notpeter",
|
||||
"rgbkrk",
|
||||
"JunkuiZhang",
|
||||
"Anthony-Eid",
|
||||
"rtfeldman",
|
||||
"danilo-leal",
|
||||
"MrSubidubi",
|
||||
"cole-miller",
|
||||
"osyvokon",
|
||||
"probably-neb",
|
||||
"mgsloan",
|
||||
"P1n3appl3",
|
||||
"mslzed",
|
||||
"franciskafyi",
|
||||
"katie-z-geer"
|
||||
"JosephTLyons",
|
||||
"rgbkrk"
|
||||
],
|
||||
"channels": ["zed"]
|
||||
}
|
||||
|
||||
@@ -2836,115 +2836,60 @@ async fn make_update_user_plan_message(
|
||||
account_too_young: Some(account_too_young),
|
||||
has_overdue_invoices: billing_customer
|
||||
.map(|billing_customer| billing_customer.has_overdue_invoices),
|
||||
usage: Some(
|
||||
usage
|
||||
.map(|usage| subscription_usage_to_proto(plan, usage, &feature_flags))
|
||||
.unwrap_or_else(|| make_default_subscription_usage(plan, &feature_flags)),
|
||||
),
|
||||
})
|
||||
}
|
||||
|
||||
fn model_requests_limit(
|
||||
plan: zed_llm_client::Plan,
|
||||
feature_flags: &Vec<String>,
|
||||
) -> zed_llm_client::UsageLimit {
|
||||
match plan.model_requests_limit() {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
let limit = if plan == zed_llm_client::Plan::ZedProTrial
|
||||
&& feature_flags
|
||||
.iter()
|
||||
.any(|flag| flag == AGENT_EXTENDED_TRIAL_FEATURE_FLAG)
|
||||
{
|
||||
1_000
|
||||
} else {
|
||||
limit
|
||||
usage: usage.map(|usage| {
|
||||
let plan = match plan {
|
||||
proto::Plan::Free => zed_llm_client::Plan::ZedFree,
|
||||
proto::Plan::ZedPro => zed_llm_client::Plan::ZedPro,
|
||||
proto::Plan::ZedProTrial => zed_llm_client::Plan::ZedProTrial,
|
||||
};
|
||||
|
||||
zed_llm_client::UsageLimit::Limited(limit)
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => zed_llm_client::UsageLimit::Unlimited,
|
||||
}
|
||||
}
|
||||
|
||||
fn subscription_usage_to_proto(
|
||||
plan: proto::Plan,
|
||||
usage: crate::llm::db::subscription_usage::Model,
|
||||
feature_flags: &Vec<String>,
|
||||
) -> proto::SubscriptionUsage {
|
||||
let plan = match plan {
|
||||
proto::Plan::Free => zed_llm_client::Plan::ZedFree,
|
||||
proto::Plan::ZedPro => zed_llm_client::Plan::ZedPro,
|
||||
proto::Plan::ZedProTrial => zed_llm_client::Plan::ZedProTrial,
|
||||
};
|
||||
|
||||
proto::SubscriptionUsage {
|
||||
model_requests_usage_amount: usage.model_requests as u32,
|
||||
model_requests_usage_limit: Some(proto::UsageLimit {
|
||||
variant: Some(match model_requests_limit(plan, feature_flags) {
|
||||
let model_requests_limit = match plan.model_requests_limit() {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
proto::usage_limit::Variant::Limited(proto::usage_limit::Limited {
|
||||
limit: limit as u32,
|
||||
})
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => {
|
||||
proto::usage_limit::Variant::Unlimited(proto::usage_limit::Unlimited {})
|
||||
}
|
||||
}),
|
||||
}),
|
||||
edit_predictions_usage_amount: usage.edit_predictions as u32,
|
||||
edit_predictions_usage_limit: Some(proto::UsageLimit {
|
||||
variant: Some(match plan.edit_predictions_limit() {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
proto::usage_limit::Variant::Limited(proto::usage_limit::Limited {
|
||||
limit: limit as u32,
|
||||
})
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => {
|
||||
proto::usage_limit::Variant::Unlimited(proto::usage_limit::Unlimited {})
|
||||
}
|
||||
}),
|
||||
}),
|
||||
}
|
||||
}
|
||||
let limit = if plan == zed_llm_client::Plan::ZedProTrial
|
||||
&& feature_flags
|
||||
.iter()
|
||||
.any(|flag| flag == AGENT_EXTENDED_TRIAL_FEATURE_FLAG)
|
||||
{
|
||||
1_000
|
||||
} else {
|
||||
limit
|
||||
};
|
||||
|
||||
fn make_default_subscription_usage(
|
||||
plan: proto::Plan,
|
||||
feature_flags: &Vec<String>,
|
||||
) -> proto::SubscriptionUsage {
|
||||
let plan = match plan {
|
||||
proto::Plan::Free => zed_llm_client::Plan::ZedFree,
|
||||
proto::Plan::ZedPro => zed_llm_client::Plan::ZedPro,
|
||||
proto::Plan::ZedProTrial => zed_llm_client::Plan::ZedProTrial,
|
||||
};
|
||||
zed_llm_client::UsageLimit::Limited(limit)
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => zed_llm_client::UsageLimit::Unlimited,
|
||||
};
|
||||
|
||||
proto::SubscriptionUsage {
|
||||
model_requests_usage_amount: 0,
|
||||
model_requests_usage_limit: Some(proto::UsageLimit {
|
||||
variant: Some(match model_requests_limit(plan, feature_flags) {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
proto::usage_limit::Variant::Limited(proto::usage_limit::Limited {
|
||||
limit: limit as u32,
|
||||
})
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => {
|
||||
proto::usage_limit::Variant::Unlimited(proto::usage_limit::Unlimited {})
|
||||
}
|
||||
}),
|
||||
proto::SubscriptionUsage {
|
||||
model_requests_usage_amount: usage.model_requests as u32,
|
||||
model_requests_usage_limit: Some(proto::UsageLimit {
|
||||
variant: Some(match model_requests_limit {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
proto::usage_limit::Variant::Limited(proto::usage_limit::Limited {
|
||||
limit: limit as u32,
|
||||
})
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => {
|
||||
proto::usage_limit::Variant::Unlimited(proto::usage_limit::Unlimited {})
|
||||
}
|
||||
}),
|
||||
}),
|
||||
edit_predictions_usage_amount: usage.edit_predictions as u32,
|
||||
edit_predictions_usage_limit: Some(proto::UsageLimit {
|
||||
variant: Some(match plan.edit_predictions_limit() {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
proto::usage_limit::Variant::Limited(proto::usage_limit::Limited {
|
||||
limit: limit as u32,
|
||||
})
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => {
|
||||
proto::usage_limit::Variant::Unlimited(proto::usage_limit::Unlimited {})
|
||||
}
|
||||
}),
|
||||
}),
|
||||
}
|
||||
}),
|
||||
edit_predictions_usage_amount: 0,
|
||||
edit_predictions_usage_limit: Some(proto::UsageLimit {
|
||||
variant: Some(match plan.edit_predictions_limit() {
|
||||
zed_llm_client::UsageLimit::Limited(limit) => {
|
||||
proto::usage_limit::Variant::Limited(proto::usage_limit::Limited {
|
||||
limit: limit as u32,
|
||||
})
|
||||
}
|
||||
zed_llm_client::UsageLimit::Unlimited => {
|
||||
proto::usage_limit::Variant::Unlimited(proto::usage_limit::Unlimited {})
|
||||
}
|
||||
}),
|
||||
}),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async fn update_user_plan(session: &Session) -> Result<()> {
|
||||
|
||||
@@ -19,8 +19,8 @@ use crate::stripe_client::{
|
||||
StripeCustomerId, StripeCustomerUpdate, StripeCustomerUpdateAddress, StripeCustomerUpdateName,
|
||||
StripeMeter, StripePrice, StripePriceId, StripeSubscription, StripeSubscriptionId,
|
||||
StripeSubscriptionTrialSettings, StripeSubscriptionTrialSettingsEndBehavior,
|
||||
StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, StripeTaxIdCollection,
|
||||
UpdateSubscriptionItems, UpdateSubscriptionParams,
|
||||
StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, UpdateSubscriptionItems,
|
||||
UpdateSubscriptionParams,
|
||||
};
|
||||
|
||||
pub struct StripeBilling {
|
||||
@@ -252,7 +252,6 @@ impl StripeBilling {
|
||||
name: Some(StripeCustomerUpdateName::Auto),
|
||||
shipping: None,
|
||||
});
|
||||
params.tax_id_collection = Some(StripeTaxIdCollection { enabled: true });
|
||||
|
||||
let session = self.client.create_checkout_session(params).await?;
|
||||
Ok(session.url.context("no checkout session URL")?)
|
||||
@@ -312,7 +311,6 @@ impl StripeBilling {
|
||||
name: Some(StripeCustomerUpdateName::Auto),
|
||||
shipping: None,
|
||||
});
|
||||
params.tax_id_collection = Some(StripeTaxIdCollection { enabled: true });
|
||||
|
||||
let session = self.client.create_checkout_session(params).await?;
|
||||
Ok(session.url.context("no checkout session URL")?)
|
||||
|
||||
@@ -190,7 +190,6 @@ pub struct StripeCreateCheckoutSessionParams<'a> {
|
||||
pub success_url: Option<&'a str>,
|
||||
pub billing_address_collection: Option<StripeBillingAddressCollection>,
|
||||
pub customer_update: Option<StripeCustomerUpdate>,
|
||||
pub tax_id_collection: Option<StripeTaxIdCollection>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
@@ -219,11 +218,6 @@ pub struct StripeCreateCheckoutSessionSubscriptionData {
|
||||
pub trial_settings: Option<StripeSubscriptionTrialSettings>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct StripeTaxIdCollection {
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct StripeCheckoutSession {
|
||||
pub url: Option<String>,
|
||||
|
||||
@@ -14,8 +14,8 @@ use crate::stripe_client::{
|
||||
StripeCreateCheckoutSessionSubscriptionData, StripeCreateMeterEventParams,
|
||||
StripeCreateSubscriptionParams, StripeCustomer, StripeCustomerId, StripeCustomerUpdate,
|
||||
StripeMeter, StripeMeterId, StripePrice, StripePriceId, StripeSubscription,
|
||||
StripeSubscriptionId, StripeSubscriptionItem, StripeSubscriptionItemId, StripeTaxIdCollection,
|
||||
UpdateCustomerParams, UpdateSubscriptionParams,
|
||||
StripeSubscriptionId, StripeSubscriptionItem, StripeSubscriptionItemId, UpdateCustomerParams,
|
||||
UpdateSubscriptionParams,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -38,7 +38,6 @@ pub struct StripeCreateCheckoutSessionCall {
|
||||
pub success_url: Option<String>,
|
||||
pub billing_address_collection: Option<StripeBillingAddressCollection>,
|
||||
pub customer_update: Option<StripeCustomerUpdate>,
|
||||
pub tax_id_collection: Option<StripeTaxIdCollection>,
|
||||
}
|
||||
|
||||
pub struct FakeStripeClient {
|
||||
@@ -237,7 +236,6 @@ impl StripeClient for FakeStripeClient {
|
||||
success_url: params.success_url.map(|url| url.to_string()),
|
||||
billing_address_collection: params.billing_address_collection,
|
||||
customer_update: params.customer_update,
|
||||
tax_id_collection: params.tax_id_collection,
|
||||
});
|
||||
|
||||
Ok(StripeCheckoutSession {
|
||||
|
||||
@@ -27,8 +27,8 @@ use crate::stripe_client::{
|
||||
StripeMeter, StripePrice, StripePriceId, StripePriceRecurring, StripeSubscription,
|
||||
StripeSubscriptionId, StripeSubscriptionItem, StripeSubscriptionItemId,
|
||||
StripeSubscriptionTrialSettings, StripeSubscriptionTrialSettingsEndBehavior,
|
||||
StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, StripeTaxIdCollection,
|
||||
UpdateCustomerParams, UpdateSubscriptionParams,
|
||||
StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, UpdateCustomerParams,
|
||||
UpdateSubscriptionParams,
|
||||
};
|
||||
|
||||
pub struct RealStripeClient {
|
||||
@@ -448,7 +448,6 @@ impl<'a> TryFrom<StripeCreateCheckoutSessionParams<'a>> for CreateCheckoutSessio
|
||||
success_url: value.success_url,
|
||||
billing_address_collection: value.billing_address_collection.map(Into::into),
|
||||
customer_update: value.customer_update.map(Into::into),
|
||||
tax_id_collection: value.tax_id_collection.map(Into::into),
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
@@ -591,11 +590,3 @@ impl From<StripeCustomerUpdate> for stripe::CreateCheckoutSessionCustomerUpdate
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<StripeTaxIdCollection> for stripe::CreateCheckoutSessionTaxIdCollection {
|
||||
fn from(value: StripeTaxIdCollection) -> Self {
|
||||
stripe::CreateCheckoutSessionTaxIdCollection {
|
||||
enabled: value.enabled,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -547,7 +547,6 @@ async fn handle_envs(
|
||||
}
|
||||
};
|
||||
|
||||
let mut env_vars = HashMap::default();
|
||||
for path in env_files {
|
||||
let Some(path) = path
|
||||
.and_then(|s| PathBuf::from_str(s).ok())
|
||||
@@ -557,33 +556,13 @@ async fn handle_envs(
|
||||
};
|
||||
|
||||
if let Ok(file) = fs.open_sync(&path).await {
|
||||
let file_envs: HashMap<String, String> = dotenvy::from_read_iter(file)
|
||||
.filter_map(Result::ok)
|
||||
.collect();
|
||||
envs.extend(file_envs.iter().map(|(k, v)| (k.clone(), v.clone())));
|
||||
env_vars.extend(file_envs);
|
||||
envs.extend(dotenvy::from_read_iter(file).filter_map(Result::ok))
|
||||
} else {
|
||||
warn!("While starting Go debug session: failed to read env file {path:?}");
|
||||
};
|
||||
}
|
||||
|
||||
let mut env_obj: serde_json::Map<String, Value> = serde_json::Map::new();
|
||||
|
||||
for (k, v) in env_vars {
|
||||
env_obj.insert(k, Value::String(v));
|
||||
}
|
||||
|
||||
if let Some(existing_env) = config.get("env").and_then(|v| v.as_object()) {
|
||||
for (k, v) in existing_env {
|
||||
env_obj.insert(k.clone(), v.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if !env_obj.is_empty() {
|
||||
config.insert("env".to_string(), Value::Object(env_obj));
|
||||
}
|
||||
|
||||
// remove envFile now that it's been handled
|
||||
config.remove("envFile");
|
||||
config.remove("entry");
|
||||
Some(())
|
||||
}
|
||||
|
||||
@@ -32,19 +32,12 @@ use workspace::{
|
||||
ui::{Button, Clickable, ContextMenu, Label, LabelCommon, PopoverMenu, h_flex},
|
||||
};
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
|
||||
enum View {
|
||||
AdapterLogs,
|
||||
RpcMessages,
|
||||
InitializationSequence,
|
||||
}
|
||||
|
||||
struct DapLogView {
|
||||
editor: Entity<Editor>,
|
||||
focus_handle: FocusHandle,
|
||||
log_store: Entity<LogStore>,
|
||||
editor_subscriptions: Vec<Subscription>,
|
||||
current_view: Option<(SessionId, View)>,
|
||||
current_view: Option<(SessionId, LogKind)>,
|
||||
project: Entity<Project>,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
@@ -84,7 +77,6 @@ struct DebugAdapterState {
|
||||
id: SessionId,
|
||||
log_messages: VecDeque<SharedString>,
|
||||
rpc_messages: RpcMessages,
|
||||
session_label: SharedString,
|
||||
adapter_name: DebugAdapterName,
|
||||
has_adapter_logs: bool,
|
||||
is_terminated: bool,
|
||||
@@ -129,18 +121,12 @@ impl MessageKind {
|
||||
}
|
||||
|
||||
impl DebugAdapterState {
|
||||
fn new(
|
||||
id: SessionId,
|
||||
adapter_name: DebugAdapterName,
|
||||
session_label: SharedString,
|
||||
has_adapter_logs: bool,
|
||||
) -> Self {
|
||||
fn new(id: SessionId, adapter_name: DebugAdapterName, has_adapter_logs: bool) -> Self {
|
||||
Self {
|
||||
id,
|
||||
log_messages: VecDeque::new(),
|
||||
rpc_messages: RpcMessages::new(),
|
||||
adapter_name,
|
||||
session_label,
|
||||
has_adapter_logs,
|
||||
is_terminated: false,
|
||||
}
|
||||
@@ -385,21 +371,18 @@ impl LogStore {
|
||||
return None;
|
||||
};
|
||||
|
||||
let (adapter_name, session_label, has_adapter_logs) =
|
||||
session.read_with(cx, |session, _| {
|
||||
(
|
||||
session.adapter(),
|
||||
session.label(),
|
||||
session
|
||||
.adapter_client()
|
||||
.map_or(false, |client| client.has_adapter_logs()),
|
||||
)
|
||||
});
|
||||
let (adapter_name, has_adapter_logs) = session.read_with(cx, |session, _| {
|
||||
(
|
||||
session.adapter(),
|
||||
session
|
||||
.adapter_client()
|
||||
.map_or(false, |client| client.has_adapter_logs()),
|
||||
)
|
||||
});
|
||||
|
||||
state.insert(DebugAdapterState::new(
|
||||
id.session_id,
|
||||
adapter_name,
|
||||
session_label,
|
||||
has_adapter_logs,
|
||||
));
|
||||
|
||||
@@ -523,13 +506,12 @@ impl Render for DapLogToolbarItemView {
|
||||
current_client
|
||||
.map(|sub_item| {
|
||||
Cow::Owned(format!(
|
||||
"{} - {} - {}",
|
||||
"{} ({}) - {}",
|
||||
sub_item.adapter_name,
|
||||
sub_item.session_label,
|
||||
sub_item.session_id.0,
|
||||
match sub_item.selected_entry {
|
||||
View::AdapterLogs => ADAPTER_LOGS,
|
||||
View::RpcMessages => RPC_MESSAGES,
|
||||
View::InitializationSequence => INITIALIZATION_SEQUENCE,
|
||||
LogKind::Adapter => ADAPTER_LOGS,
|
||||
LogKind::Rpc => RPC_MESSAGES,
|
||||
}
|
||||
))
|
||||
})
|
||||
@@ -547,8 +529,8 @@ impl Render for DapLogToolbarItemView {
|
||||
.pl_2()
|
||||
.child(
|
||||
Label::new(format!(
|
||||
"{} - {}",
|
||||
row.adapter_name, row.session_label
|
||||
"{}. {}",
|
||||
row.session_id.0, row.adapter_name,
|
||||
))
|
||||
.color(workspace::ui::Color::Muted),
|
||||
)
|
||||
@@ -687,16 +669,9 @@ impl DapLogView {
|
||||
|
||||
let events_subscriptions = cx.subscribe(&log_store, |log_view, _, event, cx| match event {
|
||||
Event::NewLogEntry { id, entry, kind } => {
|
||||
let is_current_view = match (log_view.current_view, *kind) {
|
||||
(Some((i, View::AdapterLogs)), LogKind::Adapter)
|
||||
| (Some((i, View::RpcMessages)), LogKind::Rpc)
|
||||
if i == id.session_id =>
|
||||
{
|
||||
log_view.project == *id.project
|
||||
}
|
||||
_ => false,
|
||||
};
|
||||
if is_current_view {
|
||||
if log_view.current_view == Some((id.session_id, *kind))
|
||||
&& log_view.project == *id.project
|
||||
{
|
||||
log_view.editor.update(cx, |editor, cx| {
|
||||
editor.set_read_only(false);
|
||||
let last_point = editor.buffer().read(cx).len(cx);
|
||||
@@ -793,11 +768,10 @@ impl DapLogView {
|
||||
.map(|state| DapMenuItem {
|
||||
session_id: state.id,
|
||||
adapter_name: state.adapter_name.clone(),
|
||||
session_label: state.session_label.clone(),
|
||||
has_adapter_logs: state.has_adapter_logs,
|
||||
selected_entry: self
|
||||
.current_view
|
||||
.map_or(View::AdapterLogs, |(_, kind)| kind),
|
||||
.map_or(LogKind::Adapter, |(_, kind)| kind),
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
@@ -815,7 +789,7 @@ impl DapLogView {
|
||||
.map(|state| log_contents(state.iter().cloned()))
|
||||
});
|
||||
if let Some(rpc_log) = rpc_log {
|
||||
self.current_view = Some((id.session_id, View::RpcMessages));
|
||||
self.current_view = Some((id.session_id, LogKind::Rpc));
|
||||
let (editor, editor_subscriptions) = Self::editor_for_logs(rpc_log, window, cx);
|
||||
let language = self.project.read(cx).languages().language_for_name("JSON");
|
||||
editor
|
||||
@@ -856,7 +830,7 @@ impl DapLogView {
|
||||
.map(|state| log_contents(state.iter().cloned()))
|
||||
});
|
||||
if let Some(message_log) = message_log {
|
||||
self.current_view = Some((id.session_id, View::AdapterLogs));
|
||||
self.current_view = Some((id.session_id, LogKind::Adapter));
|
||||
let (editor, editor_subscriptions) = Self::editor_for_logs(message_log, window, cx);
|
||||
editor
|
||||
.read(cx)
|
||||
@@ -885,7 +859,7 @@ impl DapLogView {
|
||||
.map(|state| log_contents(state.iter().cloned()))
|
||||
});
|
||||
if let Some(rpc_log) = rpc_log {
|
||||
self.current_view = Some((id.session_id, View::InitializationSequence));
|
||||
self.current_view = Some((id.session_id, LogKind::Rpc));
|
||||
let (editor, editor_subscriptions) = Self::editor_for_logs(rpc_log, window, cx);
|
||||
let language = self.project.read(cx).languages().language_for_name("JSON");
|
||||
editor
|
||||
@@ -925,12 +899,11 @@ fn log_contents(lines: impl Iterator<Item = SharedString>) -> String {
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq)]
|
||||
struct DapMenuItem {
|
||||
session_id: SessionId,
|
||||
session_label: SharedString,
|
||||
adapter_name: DebugAdapterName,
|
||||
has_adapter_logs: bool,
|
||||
selected_entry: View,
|
||||
pub(crate) struct DapMenuItem {
|
||||
pub session_id: SessionId,
|
||||
pub adapter_name: DebugAdapterName,
|
||||
pub has_adapter_logs: bool,
|
||||
pub selected_entry: LogKind,
|
||||
}
|
||||
|
||||
const ADAPTER_LOGS: &str = "Adapter Logs";
|
||||
|
||||
@@ -11,7 +11,7 @@ use project::worktree_store::WorktreeStore;
|
||||
use rpc::proto;
|
||||
use running::RunningState;
|
||||
use std::{cell::OnceCell, sync::OnceLock};
|
||||
use ui::{Indicator, prelude::*};
|
||||
use ui::{Indicator, Tooltip, prelude::*};
|
||||
use util::truncate_and_trailoff;
|
||||
use workspace::{
|
||||
CollaboratorId, FollowableItem, ViewId, Workspace,
|
||||
@@ -158,6 +158,7 @@ impl DebugSession {
|
||||
|
||||
h_flex()
|
||||
.id("session-label")
|
||||
.tooltip(Tooltip::text(format!("Session {}", self.session_id(cx).0,)))
|
||||
.ml(depth * px(16.0))
|
||||
.gap_2()
|
||||
.when_some(icon, |this, indicator| this.child(indicator))
|
||||
|
||||
@@ -2241,34 +2241,3 @@ func main() {
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_trim_multi_line_inline_value(executor: BackgroundExecutor, cx: &mut TestAppContext) {
|
||||
let variables = [("y", "hello\n world")];
|
||||
|
||||
let before = r#"
|
||||
fn main() {
|
||||
let y = "hello\n world";
|
||||
}
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
let after = r#"
|
||||
fn main() {
|
||||
let y: hello… = "hello\n world";
|
||||
}
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
test_inline_values_util(
|
||||
&variables,
|
||||
&[],
|
||||
&before,
|
||||
&after,
|
||||
None,
|
||||
rust_lang(),
|
||||
executor,
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ use editor::{
|
||||
hover_popover::diagnostics_markdown_style,
|
||||
};
|
||||
use gpui::{AppContext, Entity, Focusable, WeakEntity};
|
||||
use language::{BufferId, Diagnostic, DiagnosticEntry, LanguageRegistry};
|
||||
use language::{BufferId, Diagnostic, DiagnosticEntry};
|
||||
use lsp::DiagnosticSeverity;
|
||||
use markdown::{Markdown, MarkdownElement};
|
||||
use settings::Settings;
|
||||
@@ -27,7 +27,6 @@ impl DiagnosticRenderer {
|
||||
diagnostic_group: Vec<DiagnosticEntry<Point>>,
|
||||
buffer_id: BufferId,
|
||||
diagnostics_editor: Option<WeakEntity<ProjectDiagnosticsEditor>>,
|
||||
languages: Arc<LanguageRegistry>,
|
||||
cx: &mut App,
|
||||
) -> Vec<DiagnosticBlock> {
|
||||
let Some(primary_ix) = diagnostic_group
|
||||
@@ -80,9 +79,7 @@ impl DiagnosticRenderer {
|
||||
initial_range: primary.range.clone(),
|
||||
severity: primary.diagnostic.severity,
|
||||
diagnostics_editor: diagnostics_editor.clone(),
|
||||
markdown: cx.new(|cx| {
|
||||
Markdown::new(markdown.into(), Some(languages.clone()), None, cx)
|
||||
}),
|
||||
markdown: cx.new(|cx| Markdown::new(markdown.into(), None, None, cx)),
|
||||
});
|
||||
} else if entry.range.start.row.abs_diff(primary.range.start.row) < 5 {
|
||||
let markdown = Self::markdown(&entry.diagnostic);
|
||||
@@ -91,9 +88,7 @@ impl DiagnosticRenderer {
|
||||
initial_range: entry.range.clone(),
|
||||
severity: entry.diagnostic.severity,
|
||||
diagnostics_editor: diagnostics_editor.clone(),
|
||||
markdown: cx.new(|cx| {
|
||||
Markdown::new(markdown.into(), Some(languages.clone()), None, cx)
|
||||
}),
|
||||
markdown: cx.new(|cx| Markdown::new(markdown.into(), None, None, cx)),
|
||||
});
|
||||
} else {
|
||||
let mut markdown = Self::markdown(&entry.diagnostic);
|
||||
@@ -105,9 +100,7 @@ impl DiagnosticRenderer {
|
||||
initial_range: entry.range.clone(),
|
||||
severity: entry.diagnostic.severity,
|
||||
diagnostics_editor: diagnostics_editor.clone(),
|
||||
markdown: cx.new(|cx| {
|
||||
Markdown::new(markdown.into(), Some(languages.clone()), None, cx)
|
||||
}),
|
||||
markdown: cx.new(|cx| Markdown::new(markdown.into(), None, None, cx)),
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -134,11 +127,9 @@ impl editor::DiagnosticRenderer for DiagnosticRenderer {
|
||||
buffer_id: BufferId,
|
||||
snapshot: EditorSnapshot,
|
||||
editor: WeakEntity<Editor>,
|
||||
languages: Arc<LanguageRegistry>,
|
||||
cx: &mut App,
|
||||
) -> Vec<BlockProperties<Anchor>> {
|
||||
let blocks =
|
||||
Self::diagnostic_blocks_for_group(diagnostic_group, buffer_id, None, languages, cx);
|
||||
let blocks = Self::diagnostic_blocks_for_group(diagnostic_group, buffer_id, None, cx);
|
||||
blocks
|
||||
.into_iter()
|
||||
.map(|block| {
|
||||
@@ -164,11 +155,9 @@ impl editor::DiagnosticRenderer for DiagnosticRenderer {
|
||||
diagnostic_group: Vec<DiagnosticEntry<Point>>,
|
||||
range: Range<Point>,
|
||||
buffer_id: BufferId,
|
||||
languages: Arc<LanguageRegistry>,
|
||||
cx: &mut App,
|
||||
) -> Option<Entity<Markdown>> {
|
||||
let blocks =
|
||||
Self::diagnostic_blocks_for_group(diagnostic_group, buffer_id, None, languages, cx);
|
||||
let blocks = Self::diagnostic_blocks_for_group(diagnostic_group, buffer_id, None, cx);
|
||||
blocks.into_iter().find_map(|block| {
|
||||
if block.initial_range == range {
|
||||
Some(block.markdown)
|
||||
|
||||
@@ -508,15 +508,6 @@ impl ProjectDiagnosticsEditor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let languages = self
|
||||
.editor
|
||||
.read(cx)
|
||||
.project
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.languages()
|
||||
.clone();
|
||||
let was_empty = self.multibuffer.read(cx).is_empty();
|
||||
let buffer_snapshot = buffer.read(cx).snapshot();
|
||||
let buffer_id = buffer_snapshot.remote_id();
|
||||
@@ -568,7 +559,6 @@ impl ProjectDiagnosticsEditor {
|
||||
group,
|
||||
buffer_snapshot.remote_id(),
|
||||
Some(this.clone()),
|
||||
languages.clone(),
|
||||
cx,
|
||||
)
|
||||
})?;
|
||||
|
||||
@@ -111,9 +111,8 @@ use itertools::Itertools;
|
||||
use language::{
|
||||
AutoindentMode, BracketMatch, BracketPair, Buffer, Capability, CharKind, CodeLabel,
|
||||
CursorShape, DiagnosticEntry, DiffOptions, DocumentationConfig, EditPredictionsMode,
|
||||
EditPreview, HighlightedText, IndentKind, IndentSize, Language, LanguageRegistry,
|
||||
OffsetRangeExt, Point, Selection, SelectionGoal, TextObject, TransactionId, TreeSitterOptions,
|
||||
WordsQuery,
|
||||
EditPreview, HighlightedText, IndentKind, IndentSize, Language, OffsetRangeExt, Point,
|
||||
Selection, SelectionGoal, TextObject, TransactionId, TreeSitterOptions, WordsQuery,
|
||||
language_settings::{
|
||||
self, InlayHintSettings, LspInsertMode, RewrapBehavior, WordsCompletionMode,
|
||||
all_language_settings, language_settings,
|
||||
@@ -403,7 +402,6 @@ pub trait DiagnosticRenderer {
|
||||
buffer_id: BufferId,
|
||||
snapshot: EditorSnapshot,
|
||||
editor: WeakEntity<Editor>,
|
||||
languages: Arc<LanguageRegistry>,
|
||||
cx: &mut App,
|
||||
) -> Vec<BlockProperties<Anchor>>;
|
||||
|
||||
@@ -412,7 +410,6 @@ pub trait DiagnosticRenderer {
|
||||
diagnostic_group: Vec<DiagnosticEntry<Point>>,
|
||||
range: Range<Point>,
|
||||
buffer_id: BufferId,
|
||||
languages: Arc<LanguageRegistry>,
|
||||
cx: &mut App,
|
||||
) -> Option<Entity<markdown::Markdown>>;
|
||||
|
||||
@@ -2325,10 +2322,7 @@ impl Editor {
|
||||
editor.update_lsp_data(false, None, window, cx);
|
||||
}
|
||||
|
||||
if editor.mode.is_full() {
|
||||
editor.report_editor_event("Editor Opened", None, cx);
|
||||
}
|
||||
|
||||
editor.report_editor_event("Editor Opened", None, cx);
|
||||
editor
|
||||
}
|
||||
|
||||
@@ -16577,20 +16571,13 @@ impl Editor {
|
||||
let Some(renderer) = GlobalDiagnosticRenderer::global(cx) else {
|
||||
return;
|
||||
};
|
||||
let languages = self.project.as_ref().unwrap().read(cx).languages().clone();
|
||||
|
||||
let diagnostic_group = buffer
|
||||
.diagnostic_group(buffer_id, diagnostic.diagnostic.group_id)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let blocks = renderer.render_group(
|
||||
diagnostic_group,
|
||||
buffer_id,
|
||||
snapshot,
|
||||
cx.weak_entity(),
|
||||
languages,
|
||||
cx,
|
||||
);
|
||||
let blocks =
|
||||
renderer.render_group(diagnostic_group, buffer_id, snapshot, cx.weak_entity(), cx);
|
||||
|
||||
let blocks = self.display_map.update(cx, |display_map, cx| {
|
||||
display_map.insert_blocks(blocks, cx).into_iter().collect()
|
||||
@@ -19668,9 +19655,8 @@ impl Editor {
|
||||
Anchor::in_buffer(excerpt_id, buffer_id, hint.position),
|
||||
hint.text(),
|
||||
);
|
||||
if !inlay.text.chars().contains(&'\n') {
|
||||
new_inlays.push(inlay);
|
||||
}
|
||||
|
||||
new_inlays.push(inlay);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -8035,25 +8035,23 @@ impl Element for EditorElement {
|
||||
}
|
||||
};
|
||||
|
||||
let (
|
||||
autoscroll_request,
|
||||
autoscroll_containing_element,
|
||||
needs_horizontal_autoscroll,
|
||||
) = self.editor.update(cx, |editor, cx| {
|
||||
let autoscroll_request = editor.autoscroll_request();
|
||||
let autoscroll_containing_element =
|
||||
// TODO: Autoscrolling for both axes
|
||||
let mut autoscroll_request = None;
|
||||
let mut autoscroll_containing_element = false;
|
||||
let mut autoscroll_horizontally = false;
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
autoscroll_request = editor.autoscroll_request();
|
||||
autoscroll_containing_element =
|
||||
autoscroll_request.is_some() || editor.has_pending_selection();
|
||||
|
||||
let (needs_horizontal_autoscroll, was_scrolled) = editor
|
||||
.autoscroll_vertically(bounds, line_height, max_scroll_top, window, cx);
|
||||
if was_scrolled.0 {
|
||||
snapshot = editor.snapshot(window, cx);
|
||||
}
|
||||
(
|
||||
autoscroll_request,
|
||||
autoscroll_containing_element,
|
||||
needs_horizontal_autoscroll,
|
||||
)
|
||||
// TODO: Is this horizontal or vertical?!
|
||||
autoscroll_horizontally = editor.autoscroll_vertically(
|
||||
bounds,
|
||||
line_height,
|
||||
max_scroll_top,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
snapshot = editor.snapshot(window, cx);
|
||||
});
|
||||
|
||||
let mut scroll_position = snapshot.scroll_position();
|
||||
@@ -8462,12 +8460,10 @@ impl Element for EditorElement {
|
||||
);
|
||||
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
if editor.scroll_manager.clamp_scroll_left(scroll_max.x) {
|
||||
scroll_position.x = scroll_position.x.min(scroll_max.x);
|
||||
}
|
||||
let clamped = editor.scroll_manager.clamp_scroll_left(scroll_max.x);
|
||||
|
||||
if needs_horizontal_autoscroll.0
|
||||
&& let Some(new_scroll_position) = editor.autoscroll_horizontally(
|
||||
let autoscrolled = if autoscroll_horizontally {
|
||||
editor.autoscroll_horizontally(
|
||||
start_row,
|
||||
editor_content_width,
|
||||
scroll_width,
|
||||
@@ -8476,8 +8472,13 @@ impl Element for EditorElement {
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
{
|
||||
scroll_position = new_scroll_position;
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if clamped || autoscrolled {
|
||||
snapshot = editor.snapshot(window, cx);
|
||||
scroll_position = snapshot.scroll_position();
|
||||
}
|
||||
});
|
||||
|
||||
@@ -8592,9 +8593,7 @@ impl Element for EditorElement {
|
||||
}
|
||||
} else {
|
||||
log::error!(
|
||||
"bug: line_ix {} is out of bounds - row_infos.len(): {}, \
|
||||
line_layouts.len(): {}, \
|
||||
crease_trailers.len(): {}",
|
||||
"bug: line_ix {} is out of bounds - row_infos.len(): {}, line_layouts.len(): {}, crease_trailers.len(): {}",
|
||||
line_ix,
|
||||
row_infos.len(),
|
||||
line_layouts.len(),
|
||||
@@ -8615,6 +8614,29 @@ impl Element for EditorElement {
|
||||
cx,
|
||||
);
|
||||
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
let clamped = editor.scroll_manager.clamp_scroll_left(scroll_max.x);
|
||||
|
||||
let autoscrolled = if autoscroll_horizontally {
|
||||
editor.autoscroll_horizontally(
|
||||
start_row,
|
||||
editor_content_width,
|
||||
scroll_width,
|
||||
em_advance,
|
||||
&line_layouts,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if clamped || autoscrolled {
|
||||
snapshot = editor.snapshot(window, cx);
|
||||
scroll_position = snapshot.scroll_position();
|
||||
}
|
||||
});
|
||||
|
||||
let line_elements = self.prepaint_lines(
|
||||
start_row,
|
||||
&mut line_layouts,
|
||||
@@ -8840,7 +8862,7 @@ impl Element for EditorElement {
|
||||
underline: None,
|
||||
strikethrough: None,
|
||||
}],
|
||||
None,
|
||||
None
|
||||
);
|
||||
let space_invisible = window.text_system().shape_line(
|
||||
"•".into(),
|
||||
@@ -8853,7 +8875,7 @@ impl Element for EditorElement {
|
||||
underline: None,
|
||||
strikethrough: None,
|
||||
}],
|
||||
None,
|
||||
None
|
||||
);
|
||||
|
||||
let mode = snapshot.mode.clone();
|
||||
|
||||
@@ -275,13 +275,6 @@ fn show_hover(
|
||||
return None;
|
||||
}
|
||||
}
|
||||
let languages = editor
|
||||
.project
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.languages()
|
||||
.clone();
|
||||
|
||||
let hover_popover_delay = EditorSettings::get_global(cx).hover_popover_delay;
|
||||
let all_diagnostics_active = editor.active_diagnostics == ActiveDiagnostic::All;
|
||||
@@ -347,7 +340,7 @@ fn show_hover(
|
||||
renderer
|
||||
.as_ref()
|
||||
.and_then(|renderer| {
|
||||
renderer.render_hover(group, point_range, buffer_id, languages, cx)
|
||||
renderer.render_hover(group, point_range, buffer_id, cx)
|
||||
})
|
||||
.context("no rendered diagnostic")
|
||||
})??;
|
||||
|
||||
@@ -813,13 +813,7 @@ impl Item for Editor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
// Add meta data tracking # of auto saves
|
||||
if options.autosave {
|
||||
self.report_editor_event("Editor Autosaved", None, cx);
|
||||
} else {
|
||||
self.report_editor_event("Editor Saved", None, cx);
|
||||
}
|
||||
|
||||
self.report_editor_event("Editor Saved", None, cx);
|
||||
let buffers = self.buffer().clone().read(cx).all_buffers();
|
||||
let buffers = buffers
|
||||
.into_iter()
|
||||
|
||||
@@ -27,8 +27,6 @@ use workspace::{ItemId, WorkspaceId};
|
||||
pub const SCROLL_EVENT_SEPARATION: Duration = Duration::from_millis(28);
|
||||
const SCROLLBAR_SHOW_INTERVAL: Duration = Duration::from_secs(1);
|
||||
|
||||
pub struct WasScrolled(pub(crate) bool);
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ScrollbarAutoHide(pub bool);
|
||||
|
||||
@@ -217,56 +215,87 @@ impl ScrollManager {
|
||||
workspace_id: Option<WorkspaceId>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> WasScrolled {
|
||||
let scroll_top = scroll_position.y.max(0.);
|
||||
let scroll_top = match EditorSettings::get_global(cx).scroll_beyond_last_line {
|
||||
ScrollBeyondLastLine::OnePage => scroll_top,
|
||||
ScrollBeyondLastLine::Off => {
|
||||
if let Some(height_in_lines) = self.visible_line_count {
|
||||
let max_row = map.max_point().row().0 as f32;
|
||||
scroll_top.min(max_row - height_in_lines + 1.).max(0.)
|
||||
} else {
|
||||
scroll_top
|
||||
) {
|
||||
let (new_anchor, top_row) = if scroll_position.y <= 0. && scroll_position.x <= 0. {
|
||||
(
|
||||
ScrollAnchor {
|
||||
anchor: Anchor::min(),
|
||||
offset: scroll_position.max(&gpui::Point::default()),
|
||||
},
|
||||
0,
|
||||
)
|
||||
} else if scroll_position.y <= 0. {
|
||||
let buffer_point = map
|
||||
.clip_point(
|
||||
DisplayPoint::new(DisplayRow(0), scroll_position.x as u32),
|
||||
Bias::Left,
|
||||
)
|
||||
.to_point(map);
|
||||
let anchor = map.buffer_snapshot.anchor_at(buffer_point, Bias::Right);
|
||||
|
||||
(
|
||||
ScrollAnchor {
|
||||
anchor: anchor,
|
||||
offset: scroll_position.max(&gpui::Point::default()),
|
||||
},
|
||||
0,
|
||||
)
|
||||
} else {
|
||||
let scroll_top = scroll_position.y;
|
||||
let scroll_top = match EditorSettings::get_global(cx).scroll_beyond_last_line {
|
||||
ScrollBeyondLastLine::OnePage => scroll_top,
|
||||
ScrollBeyondLastLine::Off => {
|
||||
if let Some(height_in_lines) = self.visible_line_count {
|
||||
let max_row = map.max_point().row().0 as f32;
|
||||
scroll_top.min(max_row - height_in_lines + 1.).max(0.)
|
||||
} else {
|
||||
scroll_top
|
||||
}
|
||||
}
|
||||
}
|
||||
ScrollBeyondLastLine::VerticalScrollMargin => {
|
||||
if let Some(height_in_lines) = self.visible_line_count {
|
||||
let max_row = map.max_point().row().0 as f32;
|
||||
scroll_top
|
||||
.min(max_row - height_in_lines + 1. + self.vertical_scroll_margin)
|
||||
.max(0.)
|
||||
} else {
|
||||
scroll_top
|
||||
ScrollBeyondLastLine::VerticalScrollMargin => {
|
||||
if let Some(height_in_lines) = self.visible_line_count {
|
||||
let max_row = map.max_point().row().0 as f32;
|
||||
scroll_top
|
||||
.min(max_row - height_in_lines + 1. + self.vertical_scroll_margin)
|
||||
.max(0.)
|
||||
} else {
|
||||
scroll_top
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let scroll_top_row = DisplayRow(scroll_top as u32);
|
||||
let scroll_top_buffer_point = map
|
||||
.clip_point(
|
||||
DisplayPoint::new(scroll_top_row, scroll_position.x as u32),
|
||||
Bias::Left,
|
||||
)
|
||||
.to_point(map);
|
||||
let top_anchor = map
|
||||
.buffer_snapshot
|
||||
.anchor_at(scroll_top_buffer_point, Bias::Right);
|
||||
|
||||
(
|
||||
ScrollAnchor {
|
||||
anchor: top_anchor,
|
||||
offset: point(
|
||||
scroll_position.x.max(0.),
|
||||
scroll_top - top_anchor.to_display_point(map).row().as_f32(),
|
||||
),
|
||||
},
|
||||
scroll_top_buffer_point.row,
|
||||
)
|
||||
};
|
||||
|
||||
let scroll_top_row = DisplayRow(scroll_top as u32);
|
||||
let scroll_top_buffer_point = map
|
||||
.clip_point(
|
||||
DisplayPoint::new(scroll_top_row, scroll_position.x as u32),
|
||||
Bias::Left,
|
||||
)
|
||||
.to_point(map);
|
||||
let top_anchor = map
|
||||
.buffer_snapshot
|
||||
.anchor_at(scroll_top_buffer_point, Bias::Right);
|
||||
|
||||
self.set_anchor(
|
||||
ScrollAnchor {
|
||||
anchor: top_anchor,
|
||||
offset: point(
|
||||
scroll_position.x.max(0.),
|
||||
scroll_top - top_anchor.to_display_point(map).row().as_f32(),
|
||||
),
|
||||
},
|
||||
scroll_top_buffer_point.row,
|
||||
new_anchor,
|
||||
top_row,
|
||||
local,
|
||||
autoscroll,
|
||||
workspace_id,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
fn set_anchor(
|
||||
@@ -278,7 +307,7 @@ impl ScrollManager {
|
||||
workspace_id: Option<WorkspaceId>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> WasScrolled {
|
||||
) {
|
||||
let adjusted_anchor = if self.forbid_vertical_scroll {
|
||||
ScrollAnchor {
|
||||
offset: gpui::Point::new(anchor.offset.x, self.anchor.offset.y),
|
||||
@@ -288,14 +317,10 @@ impl ScrollManager {
|
||||
anchor
|
||||
};
|
||||
|
||||
self.autoscroll_request.take();
|
||||
if self.anchor == adjusted_anchor {
|
||||
return WasScrolled(false);
|
||||
}
|
||||
|
||||
self.anchor = adjusted_anchor;
|
||||
cx.emit(EditorEvent::ScrollPositionChanged { local, autoscroll });
|
||||
self.show_scrollbars(window, cx);
|
||||
self.autoscroll_request.take();
|
||||
if let Some(workspace_id) = workspace_id {
|
||||
let item_id = cx.entity().entity_id().as_u64() as ItemId;
|
||||
|
||||
@@ -317,8 +342,6 @@ impl ScrollManager {
|
||||
.detach()
|
||||
}
|
||||
cx.notify();
|
||||
|
||||
WasScrolled(true)
|
||||
}
|
||||
|
||||
pub fn show_scrollbars(&mut self, window: &mut Window, cx: &mut Context<Editor>) {
|
||||
@@ -529,13 +552,13 @@ impl Editor {
|
||||
scroll_position: gpui::Point<f32>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> WasScrolled {
|
||||
) {
|
||||
let mut position = scroll_position;
|
||||
if self.scroll_manager.forbid_vertical_scroll {
|
||||
let current_position = self.scroll_position(cx);
|
||||
position.y = current_position.y;
|
||||
}
|
||||
self.set_scroll_position_internal(position, true, false, window, cx)
|
||||
self.set_scroll_position_internal(position, true, false, window, cx);
|
||||
}
|
||||
|
||||
/// Scrolls so that `row` is at the top of the editor view.
|
||||
@@ -567,7 +590,7 @@ impl Editor {
|
||||
autoscroll: bool,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> WasScrolled {
|
||||
) {
|
||||
let map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
self.set_scroll_position_taking_display_map(
|
||||
scroll_position,
|
||||
@@ -576,7 +599,7 @@ impl Editor {
|
||||
map,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
fn set_scroll_position_taking_display_map(
|
||||
@@ -587,7 +610,7 @@ impl Editor {
|
||||
display_map: DisplaySnapshot,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> WasScrolled {
|
||||
) {
|
||||
hide_hover(self, cx);
|
||||
let workspace_id = self.workspace.as_ref().and_then(|workspace| workspace.1);
|
||||
|
||||
@@ -601,7 +624,7 @@ impl Editor {
|
||||
scroll_position
|
||||
};
|
||||
|
||||
let editor_was_scrolled = self.scroll_manager.set_scroll_position(
|
||||
self.scroll_manager.set_scroll_position(
|
||||
adjusted_position,
|
||||
&display_map,
|
||||
local,
|
||||
@@ -613,7 +636,6 @@ impl Editor {
|
||||
|
||||
self.refresh_inlay_hints(InlayHintRefreshReason::NewLinesShown, cx);
|
||||
self.refresh_colors(false, None, window, cx);
|
||||
editor_was_scrolled
|
||||
}
|
||||
|
||||
pub fn scroll_position(&self, cx: &mut Context<Self>) -> gpui::Point<f32> {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use crate::{
|
||||
DisplayRow, Editor, EditorMode, LineWithInvisibles, RowExt, SelectionEffects,
|
||||
display_map::ToDisplayPoint, scroll::WasScrolled,
|
||||
display_map::ToDisplayPoint,
|
||||
};
|
||||
use gpui::{Bounds, Context, Pixels, Window, px};
|
||||
use language::Point;
|
||||
@@ -99,21 +99,19 @@ impl AutoscrollStrategy {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) struct NeedsHorizontalAutoscroll(pub(crate) bool);
|
||||
|
||||
impl Editor {
|
||||
pub fn autoscroll_request(&self) -> Option<Autoscroll> {
|
||||
self.scroll_manager.autoscroll_request()
|
||||
}
|
||||
|
||||
pub(crate) fn autoscroll_vertically(
|
||||
pub fn autoscroll_vertically(
|
||||
&mut self,
|
||||
bounds: Bounds<Pixels>,
|
||||
line_height: Pixels,
|
||||
max_scroll_top: f32,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) -> (NeedsHorizontalAutoscroll, WasScrolled) {
|
||||
) -> bool {
|
||||
let viewport_height = bounds.size.height;
|
||||
let visible_lines = viewport_height / line_height;
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
@@ -131,14 +129,12 @@ impl Editor {
|
||||
scroll_position.y = max_scroll_top;
|
||||
}
|
||||
|
||||
let editor_was_scrolled = if original_y != scroll_position.y {
|
||||
self.set_scroll_position(scroll_position, window, cx)
|
||||
} else {
|
||||
WasScrolled(false)
|
||||
};
|
||||
if original_y != scroll_position.y {
|
||||
self.set_scroll_position(scroll_position, window, cx);
|
||||
}
|
||||
|
||||
let Some((autoscroll, local)) = self.scroll_manager.autoscroll_request.take() else {
|
||||
return (NeedsHorizontalAutoscroll(false), editor_was_scrolled);
|
||||
return false;
|
||||
};
|
||||
|
||||
let mut target_top;
|
||||
@@ -216,7 +212,7 @@ impl Editor {
|
||||
target_bottom = target_top + 1.;
|
||||
}
|
||||
|
||||
let was_autoscrolled = match strategy {
|
||||
match strategy {
|
||||
AutoscrollStrategy::Fit | AutoscrollStrategy::Newest => {
|
||||
let margin = margin.min(self.scroll_manager.vertical_scroll_margin);
|
||||
let target_top = (target_top - margin).max(0.0);
|
||||
@@ -229,42 +225,39 @@ impl Editor {
|
||||
|
||||
if needs_scroll_up && !needs_scroll_down {
|
||||
scroll_position.y = target_top;
|
||||
} else if !needs_scroll_up && needs_scroll_down {
|
||||
scroll_position.y = target_bottom - visible_lines;
|
||||
self.set_scroll_position_internal(scroll_position, local, true, window, cx);
|
||||
}
|
||||
|
||||
if needs_scroll_up ^ needs_scroll_down {
|
||||
self.set_scroll_position_internal(scroll_position, local, true, window, cx)
|
||||
} else {
|
||||
WasScrolled(false)
|
||||
if !needs_scroll_up && needs_scroll_down {
|
||||
scroll_position.y = target_bottom - visible_lines;
|
||||
self.set_scroll_position_internal(scroll_position, local, true, window, cx);
|
||||
}
|
||||
}
|
||||
AutoscrollStrategy::Center => {
|
||||
scroll_position.y = (target_top - margin).max(0.0);
|
||||
self.set_scroll_position_internal(scroll_position, local, true, window, cx)
|
||||
self.set_scroll_position_internal(scroll_position, local, true, window, cx);
|
||||
}
|
||||
AutoscrollStrategy::Focused => {
|
||||
let margin = margin.min(self.scroll_manager.vertical_scroll_margin);
|
||||
scroll_position.y = (target_top - margin).max(0.0);
|
||||
self.set_scroll_position_internal(scroll_position, local, true, window, cx)
|
||||
self.set_scroll_position_internal(scroll_position, local, true, window, cx);
|
||||
}
|
||||
AutoscrollStrategy::Top => {
|
||||
scroll_position.y = (target_top).max(0.0);
|
||||
self.set_scroll_position_internal(scroll_position, local, true, window, cx)
|
||||
self.set_scroll_position_internal(scroll_position, local, true, window, cx);
|
||||
}
|
||||
AutoscrollStrategy::Bottom => {
|
||||
scroll_position.y = (target_bottom - visible_lines).max(0.0);
|
||||
self.set_scroll_position_internal(scroll_position, local, true, window, cx)
|
||||
self.set_scroll_position_internal(scroll_position, local, true, window, cx);
|
||||
}
|
||||
AutoscrollStrategy::TopRelative(lines) => {
|
||||
scroll_position.y = target_top - lines as f32;
|
||||
self.set_scroll_position_internal(scroll_position, local, true, window, cx)
|
||||
self.set_scroll_position_internal(scroll_position, local, true, window, cx);
|
||||
}
|
||||
AutoscrollStrategy::BottomRelative(lines) => {
|
||||
scroll_position.y = target_bottom + lines as f32;
|
||||
self.set_scroll_position_internal(scroll_position, local, true, window, cx)
|
||||
self.set_scroll_position_internal(scroll_position, local, true, window, cx);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
self.scroll_manager.last_autoscroll = Some((
|
||||
self.scroll_manager.anchor.offset,
|
||||
@@ -273,8 +266,7 @@ impl Editor {
|
||||
strategy,
|
||||
));
|
||||
|
||||
let was_scrolled = WasScrolled(editor_was_scrolled.0 || was_autoscrolled.0);
|
||||
(NeedsHorizontalAutoscroll(true), was_scrolled)
|
||||
true
|
||||
}
|
||||
|
||||
pub(crate) fn autoscroll_horizontally(
|
||||
@@ -286,7 +278,7 @@ impl Editor {
|
||||
layouts: &[LineWithInvisibles],
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<gpui::Point<f32>> {
|
||||
) -> bool {
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let selections = self.selections.all::<Point>(cx);
|
||||
let mut scroll_position = self.scroll_manager.scroll_position(&display_map);
|
||||
@@ -327,26 +319,22 @@ impl Editor {
|
||||
target_right = target_right.min(scroll_width);
|
||||
|
||||
if target_right - target_left > viewport_width {
|
||||
return None;
|
||||
return false;
|
||||
}
|
||||
|
||||
let scroll_left = self.scroll_manager.anchor.offset.x * em_advance;
|
||||
let scroll_right = scroll_left + viewport_width;
|
||||
|
||||
let was_scrolled = if target_left < scroll_left {
|
||||
if target_left < scroll_left {
|
||||
scroll_position.x = target_left / em_advance;
|
||||
self.set_scroll_position_internal(scroll_position, true, true, window, cx)
|
||||
self.set_scroll_position_internal(scroll_position, true, true, window, cx);
|
||||
true
|
||||
} else if target_right > scroll_right {
|
||||
scroll_position.x = (target_right - viewport_width) / em_advance;
|
||||
self.set_scroll_position_internal(scroll_position, true, true, window, cx)
|
||||
self.set_scroll_position_internal(scroll_position, true, true, window, cx);
|
||||
true
|
||||
} else {
|
||||
WasScrolled(false)
|
||||
};
|
||||
|
||||
if was_scrolled.0 {
|
||||
Some(scroll_position)
|
||||
} else {
|
||||
None
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ mod tool_metrics;
|
||||
|
||||
use assertions::{AssertionsReport, display_error_row};
|
||||
use instance::{ExampleInstance, JudgeOutput, RunOutput, run_git};
|
||||
use language_extension::LspAccess;
|
||||
pub(crate) use tool_metrics::*;
|
||||
|
||||
use ::fs::RealFs;
|
||||
@@ -416,11 +415,7 @@ pub fn init(cx: &mut App) -> Arc<AgentAppState> {
|
||||
|
||||
language::init(cx);
|
||||
debug_adapter_extension::init(extension_host_proxy.clone(), cx);
|
||||
language_extension::init(
|
||||
LspAccess::Noop,
|
||||
extension_host_proxy.clone(),
|
||||
languages.clone(),
|
||||
);
|
||||
language_extension::init(extension_host_proxy.clone(), languages.clone());
|
||||
language_model::init(client.clone(), cx);
|
||||
language_models::init(user_store.clone(), client.clone(), cx);
|
||||
languages::init(languages.clone(), node_runtime.clone(), cx);
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
Publisher="CN=Zed Industries Inc, O=Zed Industries Inc, L=Denver, S=Colorado, C=US"
|
||||
Version="1.0.0.0" />
|
||||
<Properties>
|
||||
<DisplayName>Zed Nightly</DisplayName>
|
||||
<DisplayName>Zed Editor Nightly</DisplayName>
|
||||
<PublisherDisplayName>Zed Industries</PublisherDisplayName>
|
||||
<!-- TODO: Use actual icon here. -->
|
||||
<Logo>resources\logo_150x150.png</Logo>
|
||||
@@ -45,8 +45,8 @@
|
||||
<!-- TODO: Use actual icon here. -->
|
||||
<uap:VisualElements
|
||||
AppListEntry="none"
|
||||
DisplayName="Zed Nightly"
|
||||
Description="Zed Nightly explorer command injector"
|
||||
DisplayName="Zed Editor Nightly"
|
||||
Description="Zed Editor Nightly explorer command injector"
|
||||
BackgroundColor="transparent"
|
||||
Square150x150Logo="resources\logo_150x150.png"
|
||||
Square44x44Logo="resources\logo_70x70.png">
|
||||
@@ -67,7 +67,7 @@
|
||||
</desktop4:Extension>
|
||||
<com:Extension Category="windows.comServer">
|
||||
<com:ComServer>
|
||||
<com:SurrogateServer DisplayName="Zed Nightly">
|
||||
<com:SurrogateServer DisplayName="Zed Editor Nightly">
|
||||
<com:Class Id="266f2cfe-1653-42af-b55c-fe3590c83871" Path="zed_explorer_command_injector.dll" ThreadingModel="STA"/>
|
||||
</com:SurrogateServer>
|
||||
</com:ComServer>
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
Publisher="CN=Zed Industries Inc, O=Zed Industries Inc, L=Denver, S=Colorado, C=US"
|
||||
Version="1.0.0.0" />
|
||||
<Properties>
|
||||
<DisplayName>Zed Preview</DisplayName>
|
||||
<DisplayName>Zed Editor Preview</DisplayName>
|
||||
<PublisherDisplayName>Zed Industries</PublisherDisplayName>
|
||||
<!-- TODO: Use actual icon here. -->
|
||||
<Logo>resources\logo_150x150.png</Logo>
|
||||
@@ -45,8 +45,8 @@
|
||||
<!-- TODO: Use actual icon here. -->
|
||||
<uap:VisualElements
|
||||
AppListEntry="none"
|
||||
DisplayName="Zed Preview"
|
||||
Description="Zed Preview explorer command injector"
|
||||
DisplayName="Zed Editor Preview"
|
||||
Description="Zed Editor Preview explorer command injector"
|
||||
BackgroundColor="transparent"
|
||||
Square150x150Logo="resources\logo_150x150.png"
|
||||
Square44x44Logo="resources\logo_70x70.png">
|
||||
@@ -67,7 +67,7 @@
|
||||
</desktop4:Extension>
|
||||
<com:Extension Category="windows.comServer">
|
||||
<com:ComServer>
|
||||
<com:SurrogateServer DisplayName="Zed Preview">
|
||||
<com:SurrogateServer DisplayName="Zed Editor Preview">
|
||||
<com:Class Id="af8e85ea-fb20-4db2-93cf-56513c1ec697" Path="zed_explorer_command_injector.dll" ThreadingModel="STA"/>
|
||||
</com:SurrogateServer>
|
||||
</com:ComServer>
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
Publisher="CN=Zed Industries Inc, O=Zed Industries Inc, L=Denver, S=Colorado, C=US"
|
||||
Version="1.0.0.0" />
|
||||
<Properties>
|
||||
<DisplayName>Zed</DisplayName>
|
||||
<DisplayName>Zed Editor</DisplayName>
|
||||
|
||||
<PublisherDisplayName>Zed Industries</PublisherDisplayName>
|
||||
<!-- TODO: Use actual icon here. -->
|
||||
@@ -46,8 +46,8 @@
|
||||
<!-- TODO: Use actual icon here. -->
|
||||
<uap:VisualElements
|
||||
AppListEntry="none"
|
||||
DisplayName="Zed"
|
||||
Description="Zed explorer command injector"
|
||||
DisplayName="Zed Editor"
|
||||
Description="Zed Editor explorer command injector"
|
||||
BackgroundColor="transparent"
|
||||
Square150x150Logo="resources\logo_150x150.png"
|
||||
Square44x44Logo="resources\logo_70x70.png">
|
||||
@@ -68,7 +68,7 @@
|
||||
</desktop4:Extension>
|
||||
<com:Extension Category="windows.comServer">
|
||||
<com:ComServer>
|
||||
<com:SurrogateServer DisplayName="Zed">
|
||||
<com:SurrogateServer DisplayName="Zed Editor">
|
||||
<com:Class Id="6a1f6b13-3b82-48a1-9e06-7bb0a6d0bffd" Path="zed_explorer_command_injector.dll" ThreadingModel="STA"/>
|
||||
</com:SurrogateServer>
|
||||
</com:ComServer>
|
||||
|
||||
@@ -286,8 +286,7 @@ pub trait ExtensionLanguageServerProxy: Send + Sync + 'static {
|
||||
&self,
|
||||
language: &LanguageName,
|
||||
language_server_id: &LanguageServerName,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<()>>;
|
||||
);
|
||||
|
||||
fn update_language_server_status(
|
||||
&self,
|
||||
@@ -314,13 +313,12 @@ impl ExtensionLanguageServerProxy for ExtensionHostProxy {
|
||||
&self,
|
||||
language: &LanguageName,
|
||||
language_server_id: &LanguageServerName,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<()>> {
|
||||
) {
|
||||
let Some(proxy) = self.language_server_proxy.read().clone() else {
|
||||
return Task::ready(Ok(()));
|
||||
return;
|
||||
};
|
||||
|
||||
proxy.remove_language_server(language, language_server_id, cx)
|
||||
proxy.remove_language_server(language, language_server_id)
|
||||
}
|
||||
|
||||
fn update_language_server_status(
|
||||
@@ -352,8 +350,6 @@ impl ExtensionSnippetProxy for ExtensionHostProxy {
|
||||
|
||||
pub trait ExtensionSlashCommandProxy: Send + Sync + 'static {
|
||||
fn register_slash_command(&self, extension: Arc<dyn Extension>, command: SlashCommand);
|
||||
|
||||
fn unregister_slash_command(&self, command_name: Arc<str>);
|
||||
}
|
||||
|
||||
impl ExtensionSlashCommandProxy for ExtensionHostProxy {
|
||||
@@ -364,14 +360,6 @@ impl ExtensionSlashCommandProxy for ExtensionHostProxy {
|
||||
|
||||
proxy.register_slash_command(extension, command)
|
||||
}
|
||||
|
||||
fn unregister_slash_command(&self, command_name: Arc<str>) {
|
||||
let Some(proxy) = self.slash_command_proxy.read().clone() else {
|
||||
return;
|
||||
};
|
||||
|
||||
proxy.unregister_slash_command(command_name)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ExtensionContextServerProxy: Send + Sync + 'static {
|
||||
@@ -410,8 +398,6 @@ impl ExtensionContextServerProxy for ExtensionHostProxy {
|
||||
|
||||
pub trait ExtensionIndexedDocsProviderProxy: Send + Sync + 'static {
|
||||
fn register_indexed_docs_provider(&self, extension: Arc<dyn Extension>, provider_id: Arc<str>);
|
||||
|
||||
fn unregister_indexed_docs_provider(&self, provider_id: Arc<str>);
|
||||
}
|
||||
|
||||
impl ExtensionIndexedDocsProviderProxy for ExtensionHostProxy {
|
||||
@@ -422,14 +408,6 @@ impl ExtensionIndexedDocsProviderProxy for ExtensionHostProxy {
|
||||
|
||||
proxy.register_indexed_docs_provider(extension, provider_id)
|
||||
}
|
||||
|
||||
fn unregister_indexed_docs_provider(&self, provider_id: Arc<str>) {
|
||||
let Some(proxy) = self.indexed_docs_provider_proxy.read().clone() else {
|
||||
return;
|
||||
};
|
||||
|
||||
proxy.unregister_indexed_docs_provider(provider_id)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ExtensionDebugAdapterProviderProxy: Send + Sync + 'static {
|
||||
|
||||
@@ -20,7 +20,6 @@ use extension::{
|
||||
ExtensionSnippetProxy, ExtensionThemeProxy,
|
||||
};
|
||||
use fs::{Fs, RemoveOptions};
|
||||
use futures::future::join_all;
|
||||
use futures::{
|
||||
AsyncReadExt as _, Future, FutureExt as _, StreamExt as _,
|
||||
channel::{
|
||||
@@ -861,8 +860,8 @@ impl ExtensionStore {
|
||||
btree_map::Entry::Vacant(e) => e.insert(ExtensionOperation::Remove),
|
||||
};
|
||||
|
||||
cx.spawn(async move |extension_store, cx| {
|
||||
let _finish = cx.on_drop(&extension_store, {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let _finish = cx.on_drop(&this, {
|
||||
let extension_id = extension_id.clone();
|
||||
move |this, cx| {
|
||||
this.outstanding_operations.remove(extension_id.as_ref());
|
||||
@@ -877,39 +876,22 @@ impl ExtensionStore {
|
||||
ignore_if_not_exists: true,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.with_context(|| format!("Removing extension dir {extension_dir:?}"))?;
|
||||
.await?;
|
||||
|
||||
extension_store
|
||||
.update(cx, |extension_store, cx| extension_store.reload(None, cx))?
|
||||
.await;
|
||||
// todo(windows)
|
||||
// Stop the server here.
|
||||
this.update(cx, |this, cx| this.reload(None, cx))?.await;
|
||||
|
||||
// There's a race between wasm extension fully stopping and the directory removal.
|
||||
// On Windows, it's impossible to remove a directory that has a process running in it.
|
||||
for i in 0..3 {
|
||||
cx.background_executor()
|
||||
.timer(Duration::from_millis(i * 100))
|
||||
.await;
|
||||
let removal_result = fs
|
||||
.remove_dir(
|
||||
&work_dir,
|
||||
RemoveOptions {
|
||||
recursive: true,
|
||||
ignore_if_not_exists: true,
|
||||
},
|
||||
)
|
||||
.await;
|
||||
match removal_result {
|
||||
Ok(()) => break,
|
||||
Err(e) => {
|
||||
if i == 2 {
|
||||
log::error!("Failed to remove extension work dir {work_dir:?} : {e}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
fs.remove_dir(
|
||||
&work_dir,
|
||||
RemoveOptions {
|
||||
recursive: true,
|
||||
ignore_if_not_exists: true,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
extension_store.update(cx, |_, cx| {
|
||||
this.update(cx, |_, cx| {
|
||||
cx.emit(Event::ExtensionUninstalled(extension_id.clone()));
|
||||
if let Some(events) = ExtensionEvents::try_global(cx) {
|
||||
if let Some(manifest) = extension_manifest {
|
||||
@@ -1161,38 +1143,27 @@ impl ExtensionStore {
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let mut grammars_to_remove = Vec::new();
|
||||
let mut server_removal_tasks = Vec::with_capacity(extensions_to_unload.len());
|
||||
for extension_id in &extensions_to_unload {
|
||||
let Some(extension) = old_index.extensions.get(extension_id) else {
|
||||
continue;
|
||||
};
|
||||
grammars_to_remove.extend(extension.manifest.grammars.keys().cloned());
|
||||
for (language_server_name, config) in &extension.manifest.language_servers {
|
||||
for (language_server_name, config) in extension.manifest.language_servers.iter() {
|
||||
for language in config.languages() {
|
||||
server_removal_tasks.push(self.proxy.remove_language_server(
|
||||
&language,
|
||||
language_server_name,
|
||||
cx,
|
||||
));
|
||||
self.proxy
|
||||
.remove_language_server(&language, language_server_name);
|
||||
}
|
||||
}
|
||||
|
||||
for (server_id, _) in &extension.manifest.context_servers {
|
||||
for (server_id, _) in extension.manifest.context_servers.iter() {
|
||||
self.proxy.unregister_context_server(server_id.clone(), cx);
|
||||
}
|
||||
for (adapter, _) in &extension.manifest.debug_adapters {
|
||||
for (adapter, _) in extension.manifest.debug_adapters.iter() {
|
||||
self.proxy.unregister_debug_adapter(adapter.clone());
|
||||
}
|
||||
for (locator, _) in &extension.manifest.debug_locators {
|
||||
for (locator, _) in extension.manifest.debug_locators.iter() {
|
||||
self.proxy.unregister_debug_locator(locator.clone());
|
||||
}
|
||||
for (command_name, _) in &extension.manifest.slash_commands {
|
||||
self.proxy.unregister_slash_command(command_name.clone());
|
||||
}
|
||||
for (provider_id, _) in &extension.manifest.indexed_docs_providers {
|
||||
self.proxy
|
||||
.unregister_indexed_docs_provider(provider_id.clone());
|
||||
}
|
||||
}
|
||||
|
||||
self.wasm_extensions
|
||||
@@ -1297,15 +1268,14 @@ impl ExtensionStore {
|
||||
cx.background_spawn({
|
||||
let fs = fs.clone();
|
||||
async move {
|
||||
let _ = join_all(server_removal_tasks).await;
|
||||
for theme_path in themes_to_add {
|
||||
for theme_path in themes_to_add.into_iter() {
|
||||
proxy
|
||||
.load_user_theme(theme_path, fs.clone())
|
||||
.await
|
||||
.log_err();
|
||||
}
|
||||
|
||||
for (icon_theme_path, icons_root_path) in icon_themes_to_add {
|
||||
for (icon_theme_path, icons_root_path) in icon_themes_to_add.into_iter() {
|
||||
proxy
|
||||
.load_icon_theme(icon_theme_path, icons_root_path, fs.clone())
|
||||
.await
|
||||
|
||||
@@ -11,7 +11,6 @@ use futures::{AsyncReadExt, StreamExt, io::BufReader};
|
||||
use gpui::{AppContext as _, SemanticVersion, TestAppContext};
|
||||
use http_client::{FakeHttpClient, Response};
|
||||
use language::{BinaryStatus, LanguageMatcher, LanguageRegistry};
|
||||
use language_extension::LspAccess;
|
||||
use lsp::LanguageServerName;
|
||||
use node_runtime::NodeRuntime;
|
||||
use parking_lot::Mutex;
|
||||
@@ -272,7 +271,7 @@ async fn test_extension_store(cx: &mut TestAppContext) {
|
||||
let theme_registry = Arc::new(ThemeRegistry::new(Box::new(())));
|
||||
theme_extension::init(proxy.clone(), theme_registry.clone(), cx.executor());
|
||||
let language_registry = Arc::new(LanguageRegistry::test(cx.executor()));
|
||||
language_extension::init(LspAccess::Noop, proxy.clone(), language_registry.clone());
|
||||
language_extension::init(proxy.clone(), language_registry.clone());
|
||||
let node_runtime = NodeRuntime::unavailable();
|
||||
|
||||
let store = cx.new(|cx| {
|
||||
@@ -555,11 +554,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
|
||||
let theme_registry = Arc::new(ThemeRegistry::new(Box::new(())));
|
||||
theme_extension::init(proxy.clone(), theme_registry.clone(), cx.executor());
|
||||
let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
|
||||
language_extension::init(
|
||||
LspAccess::ViaLspStore(project.update(cx, |project, _| project.lsp_store())),
|
||||
proxy.clone(),
|
||||
language_registry.clone(),
|
||||
);
|
||||
language_extension::init(proxy.clone(), language_registry.clone());
|
||||
let node_runtime = NodeRuntime::unavailable();
|
||||
|
||||
let mut status_updates = language_registry.language_server_binary_statuses();
|
||||
@@ -820,6 +815,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
|
||||
extension_store
|
||||
.update(cx, |store, cx| store.reload(Some("gleam".into()), cx))
|
||||
.await;
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
project.update(cx, |project, cx| {
|
||||
project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx)
|
||||
|
||||
@@ -11,7 +11,6 @@ use extension::{
|
||||
ExtensionLanguageServerProxy, ExtensionManifest,
|
||||
};
|
||||
use fs::{Fs, RemoveOptions, RenameOptions};
|
||||
use futures::future::join_all;
|
||||
use gpui::{App, AppContext as _, AsyncApp, Context, Entity, Task, WeakEntity};
|
||||
use http_client::HttpClient;
|
||||
use language::{LanguageConfig, LanguageName, LanguageQueries, LoadedLanguage};
|
||||
@@ -231,27 +230,18 @@ impl HeadlessExtensionStore {
|
||||
.unwrap_or_default();
|
||||
self.proxy.remove_languages(&languages_to_remove, &[]);
|
||||
|
||||
let servers_to_remove = self
|
||||
for (language_server_name, language) in self
|
||||
.loaded_language_servers
|
||||
.remove(extension_id)
|
||||
.unwrap_or_default();
|
||||
let proxy = self.proxy.clone();
|
||||
.unwrap_or_default()
|
||||
{
|
||||
self.proxy
|
||||
.remove_language_server(&language, &language_server_name);
|
||||
}
|
||||
|
||||
let path = self.extension_dir.join(&extension_id.to_string());
|
||||
let fs = self.fs.clone();
|
||||
cx.spawn(async move |_, cx| {
|
||||
let mut removal_tasks = Vec::with_capacity(servers_to_remove.len());
|
||||
cx.update(|cx| {
|
||||
for (language_server_name, language) in servers_to_remove {
|
||||
removal_tasks.push(proxy.remove_language_server(
|
||||
&language,
|
||||
&language_server_name,
|
||||
cx,
|
||||
));
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
let _ = join_all(removal_tasks).await;
|
||||
|
||||
cx.spawn(async move |_, _| {
|
||||
fs.remove_dir(
|
||||
&path,
|
||||
RemoveOptions {
|
||||
@@ -260,7 +250,6 @@ impl HeadlessExtensionStore {
|
||||
},
|
||||
)
|
||||
.await
|
||||
.with_context(|| format!("Removing directory {path:?}"))
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -54,7 +54,7 @@ pub struct WasmHost {
|
||||
main_thread_message_tx: mpsc::UnboundedSender<MainThreadCall>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[derive(Clone)]
|
||||
pub struct WasmExtension {
|
||||
tx: UnboundedSender<ExtensionCall>,
|
||||
pub manifest: Arc<ExtensionManifest>,
|
||||
@@ -63,12 +63,6 @@ pub struct WasmExtension {
|
||||
pub zed_api_version: SemanticVersion,
|
||||
}
|
||||
|
||||
impl Drop for WasmExtension {
|
||||
fn drop(&mut self) {
|
||||
self.tx.close_channel();
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl extension::Extension for WasmExtension {
|
||||
fn manifest(&self) -> Arc<ExtensionManifest> {
|
||||
@@ -748,6 +742,7 @@ impl WasmExtension {
|
||||
{
|
||||
let (return_tx, return_rx) = oneshot::channel();
|
||||
self.tx
|
||||
.clone()
|
||||
.unbounded_send(Box::new(move |extension, store| {
|
||||
async {
|
||||
let result = f(extension, store).await;
|
||||
|
||||
@@ -54,7 +54,6 @@ const SUGGESTIONS_BY_EXTENSION_ID: &[(&str, &[&str])] = &[
|
||||
("nu", &["nu"]),
|
||||
("ocaml", &["ml", "mli"]),
|
||||
("php", &["php"]),
|
||||
("powershell", &["ps1", "psm1"]),
|
||||
("prisma", &["prisma"]),
|
||||
("proto", &["proto"]),
|
||||
("purescript", &["purs"]),
|
||||
|
||||
@@ -375,10 +375,8 @@ impl GitRepository for FakeGitRepository {
|
||||
_message: gpui::SharedString,
|
||||
_name_and_email: Option<(gpui::SharedString, gpui::SharedString)>,
|
||||
_options: CommitOptions,
|
||||
_ask_pass: AskPassDelegate,
|
||||
_env: Arc<HashMap<String, String>>,
|
||||
_cx: AsyncApp,
|
||||
) -> BoxFuture<'static, Result<()>> {
|
||||
) -> BoxFuture<'_, Result<()>> {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
|
||||
@@ -41,9 +41,9 @@ futures.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
pretty_assertions.workspace = true
|
||||
serde_json.workspace = true
|
||||
tempfile.workspace = true
|
||||
text = { workspace = true, features = ["test-support"] }
|
||||
unindent.workspace = true
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
tempfile.workspace = true
|
||||
|
||||
@@ -391,12 +391,8 @@ pub trait GitRepository: Send + Sync {
|
||||
message: SharedString,
|
||||
name_and_email: Option<(SharedString, SharedString)>,
|
||||
options: CommitOptions,
|
||||
askpass: AskPassDelegate,
|
||||
env: Arc<HashMap<String, String>>,
|
||||
// This method takes an AsyncApp to ensure it's invoked on the main thread,
|
||||
// otherwise git-credentials-manager won't work.
|
||||
cx: AsyncApp,
|
||||
) -> BoxFuture<'static, Result<()>>;
|
||||
) -> BoxFuture<'_, Result<()>>;
|
||||
|
||||
fn push(
|
||||
&self,
|
||||
@@ -1197,68 +1193,36 @@ impl GitRepository for RealGitRepository {
|
||||
message: SharedString,
|
||||
name_and_email: Option<(SharedString, SharedString)>,
|
||||
options: CommitOptions,
|
||||
ask_pass: AskPassDelegate,
|
||||
env: Arc<HashMap<String, String>>,
|
||||
cx: AsyncApp,
|
||||
) -> BoxFuture<'static, Result<()>> {
|
||||
) -> BoxFuture<'_, Result<()>> {
|
||||
let working_directory = self.working_directory();
|
||||
let executor = cx.background_executor().clone();
|
||||
async move {
|
||||
let working_directory = working_directory?;
|
||||
let have_user_git_askpass = env.contains_key("GIT_ASKPASS");
|
||||
let mut command = new_smol_command("git");
|
||||
command.current_dir(&working_directory).envs(env.iter());
|
||||
self.executor
|
||||
.spawn(async move {
|
||||
let mut cmd = new_smol_command("git");
|
||||
cmd.current_dir(&working_directory?)
|
||||
.envs(env.iter())
|
||||
.args(["commit", "--quiet", "-m"])
|
||||
.arg(&message.to_string())
|
||||
.arg("--cleanup=strip");
|
||||
|
||||
let ask_pass = if have_user_git_askpass {
|
||||
None
|
||||
} else {
|
||||
Some(AskPassSession::new(&executor, ask_pass).await?)
|
||||
};
|
||||
if options.amend {
|
||||
cmd.arg("--amend");
|
||||
}
|
||||
|
||||
if let Some(program) = ask_pass
|
||||
.as_ref()
|
||||
.and_then(|ask_pass| ask_pass.gpg_script_path())
|
||||
{
|
||||
command.arg("-c").arg(format!(
|
||||
"gpg.program={}",
|
||||
program.as_ref().to_string_lossy()
|
||||
));
|
||||
}
|
||||
if let Some((name, email)) = name_and_email {
|
||||
cmd.arg("--author").arg(&format!("{name} <{email}>"));
|
||||
}
|
||||
|
||||
command
|
||||
.args(["commit", "-m"])
|
||||
.arg(message.to_string())
|
||||
.arg("--cleanup=strip")
|
||||
.stdin(smol::process::Stdio::null())
|
||||
.stdout(smol::process::Stdio::piped())
|
||||
.stderr(smol::process::Stdio::piped());
|
||||
let output = cmd.output().await?;
|
||||
|
||||
if options.amend {
|
||||
command.arg("--amend");
|
||||
}
|
||||
|
||||
if let Some((name, email)) = name_and_email {
|
||||
command.arg("--author").arg(&format!("{name} <{email}>"));
|
||||
}
|
||||
|
||||
if let Some(ask_pass) = ask_pass {
|
||||
command.env("GIT_ASKPASS", ask_pass.script_path());
|
||||
let git_process = command.spawn()?;
|
||||
|
||||
run_askpass_command(ask_pass, git_process).await?;
|
||||
Ok(())
|
||||
} else {
|
||||
let git_process = command.spawn()?;
|
||||
let output = git_process.output().await?;
|
||||
anyhow::ensure!(
|
||||
output.status.success(),
|
||||
"{}",
|
||||
"Failed to commit:\n{}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
.boxed()
|
||||
})
|
||||
.boxed()
|
||||
}
|
||||
|
||||
fn push(
|
||||
@@ -2082,16 +2046,12 @@ mod tests {
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
cx.spawn(|cx| {
|
||||
repo.commit(
|
||||
"Initial commit".into(),
|
||||
None,
|
||||
CommitOptions::default(),
|
||||
AskPassDelegate::new_always_failing(),
|
||||
Arc::new(checkpoint_author_envs()),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
repo.commit(
|
||||
"Initial commit".into(),
|
||||
None,
|
||||
CommitOptions::default(),
|
||||
Arc::new(checkpoint_author_envs()),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@@ -2115,16 +2075,12 @@ mod tests {
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
cx.spawn(|cx| {
|
||||
repo.commit(
|
||||
"Commit after checkpoint".into(),
|
||||
None,
|
||||
CommitOptions::default(),
|
||||
AskPassDelegate::new_always_failing(),
|
||||
Arc::new(checkpoint_author_envs()),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
repo.commit(
|
||||
"Commit after checkpoint".into(),
|
||||
None,
|
||||
CommitOptions::default(),
|
||||
Arc::new(checkpoint_author_envs()),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@@ -2257,16 +2213,12 @@ mod tests {
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
cx.spawn(|cx| {
|
||||
repo.commit(
|
||||
"Initial commit".into(),
|
||||
None,
|
||||
CommitOptions::default(),
|
||||
AskPassDelegate::new_always_failing(),
|
||||
Arc::new(checkpoint_author_envs()),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
repo.commit(
|
||||
"Initial commit".into(),
|
||||
None,
|
||||
CommitOptions::default(),
|
||||
Arc::new(checkpoint_author_envs()),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
|
||||
@@ -11,7 +11,10 @@ use gpui::{
|
||||
use language::{Anchor, Buffer, BufferId};
|
||||
use project::{ConflictRegion, ConflictSet, ConflictSetUpdate, ProjectItem as _};
|
||||
use std::{ops::Range, sync::Arc};
|
||||
use ui::{ActiveTheme, Element as _, Styled, Window, prelude::*};
|
||||
use ui::{
|
||||
ActiveTheme, AnyElement, Element as _, StatefulInteractiveElement, Styled,
|
||||
StyledTypography as _, Window, div, h_flex, rems,
|
||||
};
|
||||
use util::{ResultExt as _, debug_panic, maybe};
|
||||
|
||||
pub(crate) struct ConflictAddon {
|
||||
@@ -388,15 +391,20 @@ fn render_conflict_buttons(
|
||||
cx: &mut BlockContext,
|
||||
) -> AnyElement {
|
||||
h_flex()
|
||||
.id(cx.block_id)
|
||||
.h(cx.line_height)
|
||||
.ml(cx.margins.gutter.width)
|
||||
.items_end()
|
||||
.gap_1()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.ml(cx.margins.gutter.width)
|
||||
.id(cx.block_id)
|
||||
.gap_0p5()
|
||||
.child(
|
||||
Button::new("head", "Use HEAD")
|
||||
.label_size(LabelSize::Small)
|
||||
div()
|
||||
.id("ours")
|
||||
.px_1()
|
||||
.child("Take Ours")
|
||||
.rounded_t(rems(0.2))
|
||||
.text_ui_sm(cx)
|
||||
.hover(|this| this.bg(cx.theme().colors().element_background))
|
||||
.cursor_pointer()
|
||||
.on_click({
|
||||
let editor = editor.clone();
|
||||
let conflict = conflict.clone();
|
||||
@@ -415,8 +423,14 @@ fn render_conflict_buttons(
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
Button::new("origin", "Use Origin")
|
||||
.label_size(LabelSize::Small)
|
||||
div()
|
||||
.id("theirs")
|
||||
.px_1()
|
||||
.child("Take Theirs")
|
||||
.rounded_t(rems(0.2))
|
||||
.text_ui_sm(cx)
|
||||
.hover(|this| this.bg(cx.theme().colors().element_background))
|
||||
.cursor_pointer()
|
||||
.on_click({
|
||||
let editor = editor.clone();
|
||||
let conflict = conflict.clone();
|
||||
@@ -435,8 +449,14 @@ fn render_conflict_buttons(
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
Button::new("both", "Use Both")
|
||||
.label_size(LabelSize::Small)
|
||||
div()
|
||||
.id("both")
|
||||
.px_1()
|
||||
.child("Take Both")
|
||||
.rounded_t(rems(0.2))
|
||||
.text_ui_sm(cx)
|
||||
.hover(|this| this.bg(cx.theme().colors().element_background))
|
||||
.cursor_pointer()
|
||||
.on_click({
|
||||
let editor = editor.clone();
|
||||
let conflict = conflict.clone();
|
||||
|
||||
@@ -1574,15 +1574,10 @@ impl GitPanel {
|
||||
|
||||
let task = if self.has_staged_changes() {
|
||||
// Repository serializes all git operations, so we can just send a commit immediately
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let askpass_delegate = this.update_in(cx, |this, window, cx| {
|
||||
this.askpass_delegate("git commit", window, cx)
|
||||
})?;
|
||||
let commit_task = active_repository.update(cx, |repo, cx| {
|
||||
repo.commit(message.into(), None, options, askpass_delegate, cx)
|
||||
})?;
|
||||
commit_task.await?
|
||||
})
|
||||
let commit_task = active_repository.update(cx, |repo, cx| {
|
||||
repo.commit(message.into(), None, options, cx)
|
||||
});
|
||||
cx.background_spawn(async move { commit_task.await? })
|
||||
} else {
|
||||
let changed_files = self
|
||||
.entries
|
||||
@@ -1599,13 +1594,10 @@ impl GitPanel {
|
||||
|
||||
let stage_task =
|
||||
active_repository.update(cx, |repo, cx| repo.stage_entries(changed_files, cx));
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
cx.spawn(async move |_, cx| {
|
||||
stage_task.await?;
|
||||
let askpass_delegate = this.update_in(cx, |this, window, cx| {
|
||||
this.askpass_delegate("git commit".to_string(), window, cx)
|
||||
})?;
|
||||
let commit_task = active_repository.update(cx, |repo, cx| {
|
||||
repo.commit(message.into(), None, options, askpass_delegate, cx)
|
||||
repo.commit(message.into(), None, options, cx)
|
||||
})?;
|
||||
commit_task.await?
|
||||
})
|
||||
|
||||
@@ -150,9 +150,6 @@ metal.workspace = true
|
||||
[target.'cfg(any(target_os = "linux", target_os = "freebsd", target_os = "macos"))'.dependencies]
|
||||
pathfinder_geometry = "0.5"
|
||||
|
||||
[target.'cfg(any(target_os = "linux", target_os = "freebsd", target_os = "windows"))'.dependencies]
|
||||
scap = { workspace = true, optional = true }
|
||||
|
||||
[target.'cfg(any(target_os = "linux", target_os = "freebsd"))'.dependencies]
|
||||
# Always used
|
||||
flume = "0.11"
|
||||
@@ -171,6 +168,7 @@ cosmic-text = { version = "0.14.0", optional = true }
|
||||
font-kit = { git = "https://github.com/zed-industries/font-kit", rev = "5474cfad4b719a72ec8ed2cb7327b2b01fd10568", features = [
|
||||
"source-fontconfig-dlopen",
|
||||
], optional = true }
|
||||
scap = { workspace = true, optional = true }
|
||||
|
||||
calloop = { version = "0.13.0" }
|
||||
filedescriptor = { version = "0.8.2", optional = true }
|
||||
|
||||
@@ -26,13 +26,8 @@ mod windows;
|
||||
|
||||
#[cfg(all(
|
||||
feature = "screen-capture",
|
||||
any(
|
||||
target_os = "windows",
|
||||
all(
|
||||
any(target_os = "linux", target_os = "freebsd"),
|
||||
any(feature = "wayland", feature = "x11"),
|
||||
)
|
||||
)
|
||||
any(target_os = "linux", target_os = "freebsd"),
|
||||
any(feature = "wayland", feature = "x11"),
|
||||
))]
|
||||
pub(crate) mod scap_screen_capture;
|
||||
|
||||
|
||||
@@ -1,6 +1,13 @@
|
||||
#[cfg(any(feature = "wayland", feature = "x11"))]
|
||||
use collections::{HashMap, HashSet};
|
||||
#[cfg(any(feature = "wayland", feature = "x11"))]
|
||||
use strum::{EnumIter, IntoEnumIterator as _};
|
||||
#[cfg(any(feature = "wayland", feature = "x11"))]
|
||||
use xkbcommon::xkb::{Keycode, Keymap, Keysym, MOD_NAME_SHIFT, State};
|
||||
|
||||
use crate::{PlatformKeyboardLayout, SharedString};
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct LinuxKeyboardLayout {
|
||||
name: SharedString,
|
||||
}
|
||||
@@ -20,3 +27,449 @@ impl LinuxKeyboardLayout {
|
||||
Self { name }
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "wayland", feature = "x11"))]
|
||||
pub(crate) struct LinuxKeyboardMapper {
|
||||
letters: HashMap<Keycode, String>,
|
||||
code_to_key: HashMap<Keycode, String>,
|
||||
code_to_shifted_key: HashMap<Keycode, String>,
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "wayland", feature = "x11"))]
|
||||
impl LinuxKeyboardMapper {
|
||||
pub(crate) fn new(
|
||||
keymap: &Keymap,
|
||||
base_group: u32,
|
||||
latched_group: u32,
|
||||
locked_group: u32,
|
||||
) -> Self {
|
||||
let mut xkb_state = State::new(keymap);
|
||||
xkb_state.update_mask(0, 0, 0, base_group, latched_group, locked_group);
|
||||
|
||||
let mut shifted_state = State::new(&keymap);
|
||||
let shift_mod = keymap.mod_get_index(MOD_NAME_SHIFT);
|
||||
let shift_mask = 1 << shift_mod;
|
||||
shifted_state.update_mask(shift_mask, 0, 0, base_group, latched_group, locked_group);
|
||||
|
||||
let mut letters = HashMap::default();
|
||||
let mut code_to_key = HashMap::default();
|
||||
let mut code_to_shifted_key = HashMap::default();
|
||||
let mut inserted_letters = HashSet::default();
|
||||
|
||||
for scan_code in LinuxScanCodes::iter() {
|
||||
let keycode = Keycode::new(scan_code as u32);
|
||||
|
||||
let key = xkb_state.key_get_utf8(keycode);
|
||||
if !key.is_empty() {
|
||||
if key_is_a_letter(&key) {
|
||||
letters.insert(keycode, key.clone());
|
||||
inserted_letters.insert(key);
|
||||
} else {
|
||||
code_to_key.insert(keycode, key.clone());
|
||||
}
|
||||
} else {
|
||||
// keycode might be a dead key
|
||||
let keysym = xkb_state.key_get_one_sym(keycode);
|
||||
if let Some(key) = underlying_dead_key(keysym) {
|
||||
code_to_key.insert(keycode, key.clone());
|
||||
}
|
||||
}
|
||||
|
||||
let shifted_key = shifted_state.key_get_utf8(keycode);
|
||||
if !shifted_key.is_empty() {
|
||||
code_to_shifted_key.insert(keycode, shifted_key);
|
||||
} else {
|
||||
// keycode might be a dead key
|
||||
let shifted_keysym = shifted_state.key_get_one_sym(keycode);
|
||||
if let Some(shifted_key) = underlying_dead_key(shifted_keysym) {
|
||||
code_to_shifted_key.insert(keycode, shifted_key);
|
||||
}
|
||||
}
|
||||
}
|
||||
insert_letters_if_missing(&inserted_letters, &mut letters);
|
||||
|
||||
Self {
|
||||
letters,
|
||||
code_to_key,
|
||||
code_to_shifted_key,
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn get_key(
|
||||
&self,
|
||||
keycode: Keycode,
|
||||
modifiers: &mut crate::Modifiers,
|
||||
) -> Option<String> {
|
||||
if let Some(key) = self.letters.get(&keycode) {
|
||||
return Some(key.clone());
|
||||
}
|
||||
if modifiers.shift {
|
||||
modifiers.shift = false;
|
||||
self.code_to_shifted_key.get(&keycode).cloned()
|
||||
} else {
|
||||
self.code_to_key.get(&keycode).cloned()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "wayland", feature = "x11"))]
|
||||
fn key_is_a_letter(key: &str) -> bool {
|
||||
matches!(
|
||||
key,
|
||||
"a" | "b"
|
||||
| "c"
|
||||
| "d"
|
||||
| "e"
|
||||
| "f"
|
||||
| "g"
|
||||
| "h"
|
||||
| "i"
|
||||
| "j"
|
||||
| "k"
|
||||
| "l"
|
||||
| "m"
|
||||
| "n"
|
||||
| "o"
|
||||
| "p"
|
||||
| "q"
|
||||
| "r"
|
||||
| "s"
|
||||
| "t"
|
||||
| "u"
|
||||
| "v"
|
||||
| "w"
|
||||
| "x"
|
||||
| "y"
|
||||
| "z"
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns which symbol the dead key represents
|
||||
* <https://developer.mozilla.org/en-US/docs/Web/API/UI_Events/Keyboard_event_key_values#dead_keycodes_for_linux>
|
||||
*/
|
||||
#[cfg(any(feature = "wayland", feature = "x11"))]
|
||||
pub(crate) fn underlying_dead_key(keysym: Keysym) -> Option<String> {
|
||||
match keysym {
|
||||
Keysym::dead_grave => Some("`".to_owned()),
|
||||
Keysym::dead_acute => Some("´".to_owned()),
|
||||
Keysym::dead_circumflex => Some("^".to_owned()),
|
||||
Keysym::dead_tilde => Some("~".to_owned()),
|
||||
Keysym::dead_macron => Some("¯".to_owned()),
|
||||
Keysym::dead_breve => Some("˘".to_owned()),
|
||||
Keysym::dead_abovedot => Some("˙".to_owned()),
|
||||
Keysym::dead_diaeresis => Some("¨".to_owned()),
|
||||
Keysym::dead_abovering => Some("˚".to_owned()),
|
||||
Keysym::dead_doubleacute => Some("˝".to_owned()),
|
||||
Keysym::dead_caron => Some("ˇ".to_owned()),
|
||||
Keysym::dead_cedilla => Some("¸".to_owned()),
|
||||
Keysym::dead_ogonek => Some("˛".to_owned()),
|
||||
Keysym::dead_iota => Some("ͅ".to_owned()),
|
||||
Keysym::dead_voiced_sound => Some("゙".to_owned()),
|
||||
Keysym::dead_semivoiced_sound => Some("゚".to_owned()),
|
||||
Keysym::dead_belowdot => Some("̣̣".to_owned()),
|
||||
Keysym::dead_hook => Some("̡".to_owned()),
|
||||
Keysym::dead_horn => Some("̛".to_owned()),
|
||||
Keysym::dead_stroke => Some("̶̶".to_owned()),
|
||||
Keysym::dead_abovecomma => Some("̓̓".to_owned()),
|
||||
Keysym::dead_abovereversedcomma => Some("ʽ".to_owned()),
|
||||
Keysym::dead_doublegrave => Some("̏".to_owned()),
|
||||
Keysym::dead_belowring => Some("˳".to_owned()),
|
||||
Keysym::dead_belowmacron => Some("̱".to_owned()),
|
||||
Keysym::dead_belowcircumflex => Some("ꞈ".to_owned()),
|
||||
Keysym::dead_belowtilde => Some("̰".to_owned()),
|
||||
Keysym::dead_belowbreve => Some("̮".to_owned()),
|
||||
Keysym::dead_belowdiaeresis => Some("̤".to_owned()),
|
||||
Keysym::dead_invertedbreve => Some("̯".to_owned()),
|
||||
Keysym::dead_belowcomma => Some("̦".to_owned()),
|
||||
Keysym::dead_currency => None,
|
||||
Keysym::dead_lowline => None,
|
||||
Keysym::dead_aboveverticalline => None,
|
||||
Keysym::dead_belowverticalline => None,
|
||||
Keysym::dead_longsolidusoverlay => None,
|
||||
Keysym::dead_a => None,
|
||||
Keysym::dead_A => None,
|
||||
Keysym::dead_e => None,
|
||||
Keysym::dead_E => None,
|
||||
Keysym::dead_i => None,
|
||||
Keysym::dead_I => None,
|
||||
Keysym::dead_o => None,
|
||||
Keysym::dead_O => None,
|
||||
Keysym::dead_u => None,
|
||||
Keysym::dead_U => None,
|
||||
Keysym::dead_small_schwa => Some("ə".to_owned()),
|
||||
Keysym::dead_capital_schwa => Some("Ə".to_owned()),
|
||||
Keysym::dead_greek => None,
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "wayland", feature = "x11"))]
|
||||
fn insert_letters_if_missing(inserted: &HashSet<String>, letters: &mut HashMap<Keycode, String>) {
|
||||
for scan_code in LinuxScanCodes::LETTERS.iter() {
|
||||
let keycode = Keycode::new(*scan_code as u32);
|
||||
let key = scan_code.to_str();
|
||||
if !inserted.contains(key) {
|
||||
letters.insert(keycode, key.to_owned());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "wayland", feature = "x11"))]
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, EnumIter)]
|
||||
enum LinuxScanCodes {
|
||||
A = 0x0026,
|
||||
B = 0x0038,
|
||||
C = 0x0036,
|
||||
D = 0x0028,
|
||||
E = 0x001a,
|
||||
F = 0x0029,
|
||||
G = 0x002a,
|
||||
H = 0x002b,
|
||||
I = 0x001f,
|
||||
J = 0x002c,
|
||||
K = 0x002d,
|
||||
L = 0x002e,
|
||||
M = 0x003a,
|
||||
N = 0x0039,
|
||||
O = 0x0020,
|
||||
P = 0x0021,
|
||||
Q = 0x0018,
|
||||
R = 0x001b,
|
||||
S = 0x0027,
|
||||
T = 0x001c,
|
||||
U = 0x001e,
|
||||
V = 0x0037,
|
||||
W = 0x0019,
|
||||
X = 0x0035,
|
||||
Y = 0x001d,
|
||||
Z = 0x0034,
|
||||
Digit0 = 0x0013,
|
||||
Digit1 = 0x000a,
|
||||
Digit2 = 0x000b,
|
||||
Digit3 = 0x000c,
|
||||
Digit4 = 0x000d,
|
||||
Digit5 = 0x000e,
|
||||
Digit6 = 0x000f,
|
||||
Digit7 = 0x0010,
|
||||
Digit8 = 0x0011,
|
||||
Digit9 = 0x0012,
|
||||
Backquote = 0x0031,
|
||||
Minus = 0x0014,
|
||||
Equal = 0x0015,
|
||||
LeftBracket = 0x0022,
|
||||
RightBracket = 0x0023,
|
||||
Backslash = 0x0033,
|
||||
Semicolon = 0x002f,
|
||||
Quote = 0x0030,
|
||||
Comma = 0x003b,
|
||||
Period = 0x003c,
|
||||
Slash = 0x003d,
|
||||
// This key is typically located near LeftShift key, varies on international keyboards: Dan: <> Dutch: ][ Ger: <> UK: \|
|
||||
IntlBackslash = 0x005e,
|
||||
// Used for Brazilian /? and Japanese _ 'ro'.
|
||||
IntlRo = 0x0061,
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "wayland", feature = "x11"))]
|
||||
impl LinuxScanCodes {
|
||||
const LETTERS: &'static [LinuxScanCodes] = &[
|
||||
LinuxScanCodes::A,
|
||||
LinuxScanCodes::B,
|
||||
LinuxScanCodes::C,
|
||||
LinuxScanCodes::D,
|
||||
LinuxScanCodes::E,
|
||||
LinuxScanCodes::F,
|
||||
LinuxScanCodes::G,
|
||||
LinuxScanCodes::H,
|
||||
LinuxScanCodes::I,
|
||||
LinuxScanCodes::J,
|
||||
LinuxScanCodes::K,
|
||||
LinuxScanCodes::L,
|
||||
LinuxScanCodes::M,
|
||||
LinuxScanCodes::N,
|
||||
LinuxScanCodes::O,
|
||||
LinuxScanCodes::P,
|
||||
LinuxScanCodes::Q,
|
||||
LinuxScanCodes::R,
|
||||
LinuxScanCodes::S,
|
||||
LinuxScanCodes::T,
|
||||
LinuxScanCodes::U,
|
||||
LinuxScanCodes::V,
|
||||
LinuxScanCodes::W,
|
||||
LinuxScanCodes::X,
|
||||
LinuxScanCodes::Y,
|
||||
LinuxScanCodes::Z,
|
||||
];
|
||||
|
||||
fn to_str(&self) -> &str {
|
||||
match self {
|
||||
LinuxScanCodes::A => "a",
|
||||
LinuxScanCodes::B => "b",
|
||||
LinuxScanCodes::C => "c",
|
||||
LinuxScanCodes::D => "d",
|
||||
LinuxScanCodes::E => "e",
|
||||
LinuxScanCodes::F => "f",
|
||||
LinuxScanCodes::G => "g",
|
||||
LinuxScanCodes::H => "h",
|
||||
LinuxScanCodes::I => "i",
|
||||
LinuxScanCodes::J => "j",
|
||||
LinuxScanCodes::K => "k",
|
||||
LinuxScanCodes::L => "l",
|
||||
LinuxScanCodes::M => "m",
|
||||
LinuxScanCodes::N => "n",
|
||||
LinuxScanCodes::O => "o",
|
||||
LinuxScanCodes::P => "p",
|
||||
LinuxScanCodes::Q => "q",
|
||||
LinuxScanCodes::R => "r",
|
||||
LinuxScanCodes::S => "s",
|
||||
LinuxScanCodes::T => "t",
|
||||
LinuxScanCodes::U => "u",
|
||||
LinuxScanCodes::V => "v",
|
||||
LinuxScanCodes::W => "w",
|
||||
LinuxScanCodes::X => "x",
|
||||
LinuxScanCodes::Y => "y",
|
||||
LinuxScanCodes::Z => "z",
|
||||
LinuxScanCodes::Digit0 => "0",
|
||||
LinuxScanCodes::Digit1 => "1",
|
||||
LinuxScanCodes::Digit2 => "2",
|
||||
LinuxScanCodes::Digit3 => "3",
|
||||
LinuxScanCodes::Digit4 => "4",
|
||||
LinuxScanCodes::Digit5 => "5",
|
||||
LinuxScanCodes::Digit6 => "6",
|
||||
LinuxScanCodes::Digit7 => "7",
|
||||
LinuxScanCodes::Digit8 => "8",
|
||||
LinuxScanCodes::Digit9 => "9",
|
||||
LinuxScanCodes::Backquote => "`",
|
||||
LinuxScanCodes::Minus => "-",
|
||||
LinuxScanCodes::Equal => "=",
|
||||
LinuxScanCodes::LeftBracket => "[",
|
||||
LinuxScanCodes::RightBracket => "]",
|
||||
LinuxScanCodes::Backslash => "\\",
|
||||
LinuxScanCodes::Semicolon => ";",
|
||||
LinuxScanCodes::Quote => "'",
|
||||
LinuxScanCodes::Comma => ",",
|
||||
LinuxScanCodes::Period => ".",
|
||||
LinuxScanCodes::Slash => "/",
|
||||
LinuxScanCodes::IntlBackslash => "unknown",
|
||||
LinuxScanCodes::IntlRo => "unknown",
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn to_shifted(&self) -> &str {
|
||||
match self {
|
||||
LinuxScanCodes::A => "a",
|
||||
LinuxScanCodes::B => "b",
|
||||
LinuxScanCodes::C => "c",
|
||||
LinuxScanCodes::D => "d",
|
||||
LinuxScanCodes::E => "e",
|
||||
LinuxScanCodes::F => "f",
|
||||
LinuxScanCodes::G => "g",
|
||||
LinuxScanCodes::H => "h",
|
||||
LinuxScanCodes::I => "i",
|
||||
LinuxScanCodes::J => "j",
|
||||
LinuxScanCodes::K => "k",
|
||||
LinuxScanCodes::L => "l",
|
||||
LinuxScanCodes::M => "m",
|
||||
LinuxScanCodes::N => "n",
|
||||
LinuxScanCodes::O => "o",
|
||||
LinuxScanCodes::P => "p",
|
||||
LinuxScanCodes::Q => "q",
|
||||
LinuxScanCodes::R => "r",
|
||||
LinuxScanCodes::S => "s",
|
||||
LinuxScanCodes::T => "t",
|
||||
LinuxScanCodes::U => "u",
|
||||
LinuxScanCodes::V => "v",
|
||||
LinuxScanCodes::W => "w",
|
||||
LinuxScanCodes::X => "x",
|
||||
LinuxScanCodes::Y => "y",
|
||||
LinuxScanCodes::Z => "z",
|
||||
LinuxScanCodes::Digit0 => ")",
|
||||
LinuxScanCodes::Digit1 => "!",
|
||||
LinuxScanCodes::Digit2 => "@",
|
||||
LinuxScanCodes::Digit3 => "#",
|
||||
LinuxScanCodes::Digit4 => "$",
|
||||
LinuxScanCodes::Digit5 => "%",
|
||||
LinuxScanCodes::Digit6 => "^",
|
||||
LinuxScanCodes::Digit7 => "&",
|
||||
LinuxScanCodes::Digit8 => "*",
|
||||
LinuxScanCodes::Digit9 => "(",
|
||||
LinuxScanCodes::Backquote => "~",
|
||||
LinuxScanCodes::Minus => "_",
|
||||
LinuxScanCodes::Equal => "+",
|
||||
LinuxScanCodes::LeftBracket => "{",
|
||||
LinuxScanCodes::RightBracket => "}",
|
||||
LinuxScanCodes::Backslash => "|",
|
||||
LinuxScanCodes::Semicolon => ":",
|
||||
LinuxScanCodes::Quote => "\"",
|
||||
LinuxScanCodes::Comma => "<",
|
||||
LinuxScanCodes::Period => ">",
|
||||
LinuxScanCodes::Slash => "?",
|
||||
LinuxScanCodes::IntlBackslash => "unknown",
|
||||
LinuxScanCodes::IntlRo => "unknown",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(test, any(feature = "wayland", feature = "x11")))]
|
||||
mod tests {
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use strum::IntoEnumIterator;
|
||||
use x11rb::{protocol::xkb::ConnectionExt, xcb_ffi::XCBConnection};
|
||||
use xkbcommon::xkb::{
|
||||
CONTEXT_NO_FLAGS, KEYMAP_COMPILE_NO_FLAGS, Keymap,
|
||||
x11::ffi::{XKB_X11_MIN_MAJOR_XKB_VERSION, XKB_X11_MIN_MINOR_XKB_VERSION},
|
||||
};
|
||||
|
||||
use crate::platform::linux::keyboard::LinuxScanCodes;
|
||||
|
||||
use super::LinuxKeyboardMapper;
|
||||
|
||||
fn get_keymap() -> Keymap {
|
||||
static XCB_CONNECTION: LazyLock<XCBConnection> =
|
||||
LazyLock::new(|| XCBConnection::connect(None).unwrap().0);
|
||||
|
||||
let _ = XCB_CONNECTION
|
||||
.xkb_use_extension(XKB_X11_MIN_MAJOR_XKB_VERSION, XKB_X11_MIN_MINOR_XKB_VERSION)
|
||||
.unwrap()
|
||||
.reply()
|
||||
.unwrap();
|
||||
let xkb_context = xkbcommon::xkb::Context::new(CONTEXT_NO_FLAGS);
|
||||
let xkb_device_id = xkbcommon::xkb::x11::get_core_keyboard_device_id(&*XCB_CONNECTION);
|
||||
xkbcommon::xkb::x11::keymap_new_from_device(
|
||||
&xkb_context,
|
||||
&*XCB_CONNECTION,
|
||||
xkb_device_id,
|
||||
KEYMAP_COMPILE_NO_FLAGS,
|
||||
)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_us_layout_mapper() {
|
||||
let keymap = get_keymap();
|
||||
let mapper = LinuxKeyboardMapper::new(&keymap, 0, 0, 0);
|
||||
for scan_code in super::LinuxScanCodes::iter() {
|
||||
if scan_code == LinuxScanCodes::IntlBackslash || scan_code == LinuxScanCodes::IntlRo {
|
||||
continue;
|
||||
}
|
||||
let keycode = xkbcommon::xkb::Keycode::new(scan_code as u32);
|
||||
let key = mapper
|
||||
.get_key(keycode, &mut crate::Modifiers::default())
|
||||
.unwrap();
|
||||
assert_eq!(key.as_str(), scan_code.to_str());
|
||||
|
||||
let shifted_key = mapper
|
||||
.get_key(
|
||||
keycode,
|
||||
&mut crate::Modifiers {
|
||||
shift: true,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
assert_eq!(shifted_key.as_str(), scan_code.to_shifted());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -29,6 +29,9 @@ use crate::{
|
||||
Point, Result, Task, WindowAppearance, WindowParams, px,
|
||||
};
|
||||
|
||||
#[cfg(any(feature = "wayland", feature = "x11"))]
|
||||
use super::LinuxKeyboardMapper;
|
||||
|
||||
#[cfg(any(feature = "wayland", feature = "x11"))]
|
||||
pub(crate) const SCROLL_LINES: f32 = 3.0;
|
||||
|
||||
@@ -710,6 +713,7 @@ pub(super) fn log_cursor_icon_warning(message: impl std::fmt::Display) {
|
||||
impl crate::Keystroke {
|
||||
pub(super) fn from_xkb(
|
||||
state: &State,
|
||||
keyboard_mapper: &LinuxKeyboardMapper,
|
||||
mut modifiers: crate::Modifiers,
|
||||
keycode: Keycode,
|
||||
) -> Self {
|
||||
@@ -718,76 +722,67 @@ impl crate::Keystroke {
|
||||
let key_sym = state.key_get_one_sym(keycode);
|
||||
|
||||
let key = match key_sym {
|
||||
Keysym::space => "space".to_owned(),
|
||||
Keysym::BackSpace => "backspace".to_owned(),
|
||||
Keysym::Return => "enter".to_owned(),
|
||||
Keysym::Prior => "pageup".to_owned(),
|
||||
Keysym::Next => "pagedown".to_owned(),
|
||||
// Keysym::Tab => "tab".to_owned(),
|
||||
Keysym::ISO_Left_Tab => "tab".to_owned(),
|
||||
Keysym::KP_Prior => "pageup".to_owned(),
|
||||
Keysym::KP_Next => "pagedown".to_owned(),
|
||||
Keysym::uparrow => "up".to_owned(),
|
||||
Keysym::downarrow => "down".to_owned(),
|
||||
Keysym::leftarrow => "left".to_owned(),
|
||||
Keysym::rightarrow => "right".to_owned(),
|
||||
Keysym::Home | Keysym::KP_Home => "home".to_owned(),
|
||||
Keysym::End | Keysym::KP_End => "end".to_owned(),
|
||||
Keysym::Prior | Keysym::KP_Prior => "pageup".to_owned(),
|
||||
Keysym::Next | Keysym::KP_Next => "pagedown".to_owned(),
|
||||
Keysym::XF86_Back => "back".to_owned(),
|
||||
Keysym::XF86_Forward => "forward".to_owned(),
|
||||
Keysym::Escape => "escape".to_owned(),
|
||||
Keysym::Insert | Keysym::KP_Insert => "insert".to_owned(),
|
||||
Keysym::Delete | Keysym::KP_Delete => "delete".to_owned(),
|
||||
Keysym::Menu => "menu".to_owned(),
|
||||
Keysym::XF86_Cut => "cut".to_owned(),
|
||||
Keysym::XF86_Copy => "copy".to_owned(),
|
||||
Keysym::XF86_Paste => "paste".to_owned(),
|
||||
Keysym::XF86_New => "new".to_owned(),
|
||||
Keysym::XF86_Open => "open".to_owned(),
|
||||
Keysym::XF86_Save => "save".to_owned(),
|
||||
|
||||
Keysym::comma => ",".to_owned(),
|
||||
Keysym::period => ".".to_owned(),
|
||||
Keysym::less => "<".to_owned(),
|
||||
Keysym::greater => ">".to_owned(),
|
||||
Keysym::slash => "/".to_owned(),
|
||||
Keysym::question => "?".to_owned(),
|
||||
|
||||
Keysym::semicolon => ";".to_owned(),
|
||||
Keysym::colon => ":".to_owned(),
|
||||
Keysym::apostrophe => "'".to_owned(),
|
||||
Keysym::quotedbl => "\"".to_owned(),
|
||||
|
||||
Keysym::bracketleft => "[".to_owned(),
|
||||
Keysym::braceleft => "{".to_owned(),
|
||||
Keysym::bracketright => "]".to_owned(),
|
||||
Keysym::braceright => "}".to_owned(),
|
||||
Keysym::backslash => "\\".to_owned(),
|
||||
Keysym::bar => "|".to_owned(),
|
||||
|
||||
Keysym::grave => "`".to_owned(),
|
||||
Keysym::asciitilde => "~".to_owned(),
|
||||
Keysym::exclam => "!".to_owned(),
|
||||
Keysym::at => "@".to_owned(),
|
||||
Keysym::numbersign => "#".to_owned(),
|
||||
Keysym::dollar => "$".to_owned(),
|
||||
Keysym::percent => "%".to_owned(),
|
||||
Keysym::asciicircum => "^".to_owned(),
|
||||
Keysym::ampersand => "&".to_owned(),
|
||||
Keysym::asterisk => "*".to_owned(),
|
||||
Keysym::parenleft => "(".to_owned(),
|
||||
Keysym::parenright => ")".to_owned(),
|
||||
Keysym::minus => "-".to_owned(),
|
||||
Keysym::underscore => "_".to_owned(),
|
||||
Keysym::equal => "=".to_owned(),
|
||||
Keysym::plus => "+".to_owned(),
|
||||
|
||||
_ => {
|
||||
let name = xkb::keysym_get_name(key_sym).to_lowercase();
|
||||
if key_sym.is_keypad_key() {
|
||||
name.replace("kp_", "")
|
||||
} else {
|
||||
name
|
||||
}
|
||||
}
|
||||
Keysym::F1 => "f1".to_owned(),
|
||||
Keysym::F2 => "f2".to_owned(),
|
||||
Keysym::F3 => "f3".to_owned(),
|
||||
Keysym::F4 => "f4".to_owned(),
|
||||
Keysym::F5 => "f5".to_owned(),
|
||||
Keysym::F6 => "f6".to_owned(),
|
||||
Keysym::F7 => "f7".to_owned(),
|
||||
Keysym::F8 => "f8".to_owned(),
|
||||
Keysym::F9 => "f9".to_owned(),
|
||||
Keysym::F10 => "f10".to_owned(),
|
||||
Keysym::F11 => "f11".to_owned(),
|
||||
Keysym::F12 => "f12".to_owned(),
|
||||
Keysym::F13 => "f13".to_owned(),
|
||||
Keysym::F14 => "f14".to_owned(),
|
||||
Keysym::F15 => "f15".to_owned(),
|
||||
Keysym::F16 => "f16".to_owned(),
|
||||
Keysym::F17 => "f17".to_owned(),
|
||||
Keysym::F18 => "f18".to_owned(),
|
||||
Keysym::F19 => "f19".to_owned(),
|
||||
Keysym::F20 => "f20".to_owned(),
|
||||
Keysym::F21 => "f21".to_owned(),
|
||||
Keysym::F22 => "f22".to_owned(),
|
||||
Keysym::F23 => "f23".to_owned(),
|
||||
Keysym::F24 => "f24".to_owned(),
|
||||
_ => keyboard_mapper
|
||||
.get_key(keycode, &mut modifiers)
|
||||
.unwrap_or_else(|| {
|
||||
let name = xkb::keysym_get_name(key_sym).to_lowercase();
|
||||
if key_sym.is_keypad_key() {
|
||||
name.replace("kp_", "")
|
||||
} else {
|
||||
name
|
||||
}
|
||||
}),
|
||||
};
|
||||
|
||||
if modifiers.shift {
|
||||
// we only include the shift for upper-case letters by convention,
|
||||
// so don't include for numbers and symbols, but do include for
|
||||
// tab/enter, etc.
|
||||
if key.chars().count() == 1 && key.to_lowercase() == key.to_uppercase() {
|
||||
modifiers.shift = false;
|
||||
}
|
||||
}
|
||||
|
||||
// Ignore control characters (and DEL) for the purposes of key_char
|
||||
let key_char =
|
||||
(key_utf32 >= 32 && key_utf32 != 127 && !key_utf8.is_empty()).then_some(key_utf8);
|
||||
@@ -798,65 +793,6 @@ impl crate::Keystroke {
|
||||
key_char,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns which symbol the dead key represents
|
||||
* <https://developer.mozilla.org/en-US/docs/Web/API/UI_Events/Keyboard_event_key_values#dead_keycodes_for_linux>
|
||||
*/
|
||||
pub fn underlying_dead_key(keysym: Keysym) -> Option<String> {
|
||||
match keysym {
|
||||
Keysym::dead_grave => Some("`".to_owned()),
|
||||
Keysym::dead_acute => Some("´".to_owned()),
|
||||
Keysym::dead_circumflex => Some("^".to_owned()),
|
||||
Keysym::dead_tilde => Some("~".to_owned()),
|
||||
Keysym::dead_macron => Some("¯".to_owned()),
|
||||
Keysym::dead_breve => Some("˘".to_owned()),
|
||||
Keysym::dead_abovedot => Some("˙".to_owned()),
|
||||
Keysym::dead_diaeresis => Some("¨".to_owned()),
|
||||
Keysym::dead_abovering => Some("˚".to_owned()),
|
||||
Keysym::dead_doubleacute => Some("˝".to_owned()),
|
||||
Keysym::dead_caron => Some("ˇ".to_owned()),
|
||||
Keysym::dead_cedilla => Some("¸".to_owned()),
|
||||
Keysym::dead_ogonek => Some("˛".to_owned()),
|
||||
Keysym::dead_iota => Some("ͅ".to_owned()),
|
||||
Keysym::dead_voiced_sound => Some("゙".to_owned()),
|
||||
Keysym::dead_semivoiced_sound => Some("゚".to_owned()),
|
||||
Keysym::dead_belowdot => Some("̣̣".to_owned()),
|
||||
Keysym::dead_hook => Some("̡".to_owned()),
|
||||
Keysym::dead_horn => Some("̛".to_owned()),
|
||||
Keysym::dead_stroke => Some("̶̶".to_owned()),
|
||||
Keysym::dead_abovecomma => Some("̓̓".to_owned()),
|
||||
Keysym::dead_abovereversedcomma => Some("ʽ".to_owned()),
|
||||
Keysym::dead_doublegrave => Some("̏".to_owned()),
|
||||
Keysym::dead_belowring => Some("˳".to_owned()),
|
||||
Keysym::dead_belowmacron => Some("̱".to_owned()),
|
||||
Keysym::dead_belowcircumflex => Some("ꞈ".to_owned()),
|
||||
Keysym::dead_belowtilde => Some("̰".to_owned()),
|
||||
Keysym::dead_belowbreve => Some("̮".to_owned()),
|
||||
Keysym::dead_belowdiaeresis => Some("̤".to_owned()),
|
||||
Keysym::dead_invertedbreve => Some("̯".to_owned()),
|
||||
Keysym::dead_belowcomma => Some("̦".to_owned()),
|
||||
Keysym::dead_currency => None,
|
||||
Keysym::dead_lowline => None,
|
||||
Keysym::dead_aboveverticalline => None,
|
||||
Keysym::dead_belowverticalline => None,
|
||||
Keysym::dead_longsolidusoverlay => None,
|
||||
Keysym::dead_a => None,
|
||||
Keysym::dead_A => None,
|
||||
Keysym::dead_e => None,
|
||||
Keysym::dead_E => None,
|
||||
Keysym::dead_i => None,
|
||||
Keysym::dead_I => None,
|
||||
Keysym::dead_o => None,
|
||||
Keysym::dead_O => None,
|
||||
Keysym::dead_u => None,
|
||||
Keysym::dead_U => None,
|
||||
Keysym::dead_small_schwa => Some("ə".to_owned()),
|
||||
Keysym::dead_capital_schwa => Some("Ə".to_owned()),
|
||||
Keysym::dead_greek => None,
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(feature = "wayland", feature = "x11"))]
|
||||
|
||||
@@ -61,22 +61,20 @@ use wayland_protocols::xdg::decoration::zv1::client::{
|
||||
};
|
||||
use wayland_protocols::xdg::shell::client::{xdg_surface, xdg_toplevel, xdg_wm_base};
|
||||
use wayland_protocols_plasma::blur::client::{org_kde_kwin_blur, org_kde_kwin_blur_manager};
|
||||
use xkbcommon::xkb::ffi::XKB_KEYMAP_FORMAT_TEXT_V1;
|
||||
use xkbcommon::xkb::{self, KEYMAP_COMPILE_NO_FLAGS, Keycode};
|
||||
use xkbcommon::xkb::{self, KEYMAP_COMPILE_NO_FLAGS, ffi::XKB_KEYMAP_FORMAT_TEXT_V1};
|
||||
|
||||
use super::{
|
||||
display::WaylandDisplay,
|
||||
window::{ImeInput, WaylandWindowStatePtr},
|
||||
};
|
||||
|
||||
use crate::platform::{PlatformWindow, blade::BladeContext};
|
||||
use crate::{
|
||||
AnyWindowHandle, Bounds, Capslock, CursorStyle, DOUBLE_CLICK_INTERVAL, DevicePixels, DisplayId,
|
||||
FileDropEvent, ForegroundExecutor, KeyDownEvent, KeyUpEvent, Keystroke, LinuxCommon,
|
||||
LinuxKeyboardLayout, Modifiers, ModifiersChangedEvent, MouseButton, MouseDownEvent,
|
||||
MouseExitEvent, MouseMoveEvent, MouseUpEvent, NavigationDirection, Pixels, PlatformDisplay,
|
||||
PlatformInput, PlatformKeyboardLayout, Point, SCROLL_LINES, ScaledPixels, ScrollDelta,
|
||||
ScrollWheelEvent, Size, TouchPhase, WindowParams, point, px, size,
|
||||
LinuxKeyboardLayout, LinuxKeyboardMapper, Modifiers, ModifiersChangedEvent, MouseButton,
|
||||
MouseDownEvent, MouseExitEvent, MouseMoveEvent, MouseUpEvent, NavigationDirection, Pixels,
|
||||
PlatformDisplay, PlatformInput, PlatformKeyboardLayout, Point, SCROLL_LINES, ScaledPixels,
|
||||
ScrollDelta, ScrollWheelEvent, Size, TouchPhase, WindowParams, point, px, size,
|
||||
};
|
||||
use crate::{
|
||||
SharedString,
|
||||
@@ -92,6 +90,10 @@ use crate::{
|
||||
xdg_desktop_portal::{Event as XDPEvent, XDPEventSource},
|
||||
},
|
||||
};
|
||||
use crate::{
|
||||
platform::{PlatformWindow, blade::BladeContext},
|
||||
underlying_dead_key,
|
||||
};
|
||||
|
||||
/// Used to convert evdev scancode to xkb scancode
|
||||
const MIN_KEYCODE: u32 = 8;
|
||||
@@ -208,9 +210,11 @@ pub(crate) struct WaylandClientState {
|
||||
// Output to scale mapping
|
||||
outputs: HashMap<ObjectId, Output>,
|
||||
in_progress_outputs: HashMap<ObjectId, InProgressOutput>,
|
||||
keyboard_layout: LinuxKeyboardLayout,
|
||||
keymap_state: Option<xkb::State>,
|
||||
compose_state: Option<xkb::compose::State>,
|
||||
keyboard_layout: LinuxKeyboardLayout,
|
||||
keyboard_mapper: Option<Rc<LinuxKeyboardMapper>>,
|
||||
keyboard_mapper_cache: HashMap<String, Rc<LinuxKeyboardMapper>>,
|
||||
drag: DragState,
|
||||
click: ClickState,
|
||||
repeat: KeyRepeat,
|
||||
@@ -340,7 +344,7 @@ impl WaylandClientStatePtr {
|
||||
text_input.commit();
|
||||
}
|
||||
|
||||
pub fn handle_keyboard_layout_change(&self) {
|
||||
pub fn handle_keyboard_layout_change(&self, locked_group: u32) {
|
||||
let client = self.get_client();
|
||||
let mut state = client.borrow_mut();
|
||||
let changed = if let Some(keymap_state) = &state.keymap_state {
|
||||
@@ -350,6 +354,17 @@ impl WaylandClientStatePtr {
|
||||
let changed = layout_name != state.keyboard_layout.name();
|
||||
if changed {
|
||||
state.keyboard_layout = LinuxKeyboardLayout::new(layout_name.to_string().into());
|
||||
let mapper = state
|
||||
.keyboard_mapper_cache
|
||||
.entry(layout_name.to_string())
|
||||
.or_insert(Rc::new(LinuxKeyboardMapper::new(
|
||||
&keymap,
|
||||
0,
|
||||
0,
|
||||
locked_group,
|
||||
)))
|
||||
.clone();
|
||||
state.keyboard_mapper = Some(mapper);
|
||||
}
|
||||
changed
|
||||
} else {
|
||||
@@ -447,6 +462,7 @@ impl WaylandClient {
|
||||
pub(crate) fn new() -> Self {
|
||||
let conn = Connection::connect_to_env().unwrap();
|
||||
|
||||
let keyboard_layout = LinuxKeyboardLayout::new(UNKNOWN_KEYBOARD_LAYOUT_NAME);
|
||||
let (globals, mut event_queue) =
|
||||
registry_queue_init::<WaylandClientStatePtr>(&conn).unwrap();
|
||||
let qh = event_queue.handle();
|
||||
@@ -567,9 +583,11 @@ impl WaylandClient {
|
||||
in_progress_outputs,
|
||||
windows: HashMap::default(),
|
||||
common,
|
||||
keyboard_layout: LinuxKeyboardLayout::new(UNKNOWN_KEYBOARD_LAYOUT_NAME),
|
||||
keymap_state: None,
|
||||
compose_state: None,
|
||||
keyboard_layout,
|
||||
keyboard_mapper: None,
|
||||
keyboard_mapper_cache: HashMap::default(),
|
||||
drag: DragState {
|
||||
data_offer: None,
|
||||
window: None,
|
||||
@@ -1214,7 +1232,7 @@ impl Dispatch<wl_keyboard::WlKeyboard, ()> for WaylandClientStatePtr {
|
||||
state.compose_state = get_xkb_compose_state(&xkb_context);
|
||||
drop(state);
|
||||
|
||||
this.handle_keyboard_layout_change();
|
||||
this.handle_keyboard_layout_change(0);
|
||||
}
|
||||
wl_keyboard::Event::Enter { surface, .. } => {
|
||||
state.keyboard_focused_window = get_window(&mut state, &surface.id());
|
||||
@@ -1270,7 +1288,7 @@ impl Dispatch<wl_keyboard::WlKeyboard, ()> for WaylandClientStatePtr {
|
||||
}
|
||||
|
||||
if group != old_layout {
|
||||
this.handle_keyboard_layout_change();
|
||||
this.handle_keyboard_layout_change(group);
|
||||
}
|
||||
}
|
||||
wl_keyboard::Event::Key {
|
||||
@@ -1288,20 +1306,25 @@ impl Dispatch<wl_keyboard::WlKeyboard, ()> for WaylandClientStatePtr {
|
||||
let focused_window = focused_window.clone();
|
||||
|
||||
let keymap_state = state.keymap_state.as_ref().unwrap();
|
||||
let keycode = Keycode::from(key + MIN_KEYCODE);
|
||||
let keyboard_mapper = state.keyboard_mapper.as_ref().unwrap();
|
||||
let keycode = xkb::Keycode::from(key + MIN_KEYCODE);
|
||||
let keysym = keymap_state.key_get_one_sym(keycode);
|
||||
|
||||
match key_state {
|
||||
wl_keyboard::KeyState::Pressed if !keysym.is_modifier_key() => {
|
||||
let mut keystroke =
|
||||
Keystroke::from_xkb(&keymap_state, state.modifiers, keycode);
|
||||
let mut keystroke = Keystroke::from_xkb(
|
||||
keymap_state,
|
||||
keyboard_mapper,
|
||||
state.modifiers,
|
||||
keycode,
|
||||
);
|
||||
if let Some(mut compose) = state.compose_state.take() {
|
||||
compose.feed(keysym);
|
||||
match compose.status() {
|
||||
xkb::Status::Composing => {
|
||||
keystroke.key_char = None;
|
||||
state.pre_edit_text =
|
||||
compose.utf8().or(Keystroke::underlying_dead_key(keysym));
|
||||
compose.utf8().or(underlying_dead_key(keysym));
|
||||
let pre_edit =
|
||||
state.pre_edit_text.clone().unwrap_or(String::default());
|
||||
drop(state);
|
||||
@@ -1318,7 +1341,7 @@ impl Dispatch<wl_keyboard::WlKeyboard, ()> for WaylandClientStatePtr {
|
||||
}
|
||||
xkb::Status::Cancelled => {
|
||||
let pre_edit = state.pre_edit_text.take();
|
||||
let new_pre_edit = Keystroke::underlying_dead_key(keysym);
|
||||
let new_pre_edit = underlying_dead_key(keysym);
|
||||
state.pre_edit_text = new_pre_edit.clone();
|
||||
drop(state);
|
||||
if let Some(pre_edit) = pre_edit {
|
||||
@@ -1379,7 +1402,12 @@ impl Dispatch<wl_keyboard::WlKeyboard, ()> for WaylandClientStatePtr {
|
||||
}
|
||||
wl_keyboard::KeyState::Released if !keysym.is_modifier_key() => {
|
||||
let input = PlatformInput::KeyUp(KeyUpEvent {
|
||||
keystroke: Keystroke::from_xkb(keymap_state, state.modifiers, keycode),
|
||||
keystroke: Keystroke::from_xkb(
|
||||
keymap_state,
|
||||
keyboard_mapper,
|
||||
state.modifiers,
|
||||
keycode,
|
||||
),
|
||||
});
|
||||
|
||||
if state.repeat.current_keycode == Some(keycode) {
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use crate::{Capslock, xcb_flush};
|
||||
use core::str;
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
@@ -49,7 +48,7 @@ use super::{
|
||||
};
|
||||
|
||||
use crate::platform::{
|
||||
LinuxCommon, PlatformWindow,
|
||||
Capslock, LinuxCommon, PlatformWindow,
|
||||
blade::BladeContext,
|
||||
linux::{
|
||||
DEFAULT_CURSOR_ICON_NAME, LinuxClient, get_xkb_compose_state, is_within_click_distance,
|
||||
@@ -58,13 +57,14 @@ use crate::platform::{
|
||||
reveal_path_internal,
|
||||
xdg_desktop_portal::{Event as XDPEvent, XDPEventSource},
|
||||
},
|
||||
xcb_flush,
|
||||
};
|
||||
use crate::{
|
||||
AnyWindowHandle, Bounds, ClipboardItem, CursorStyle, DisplayId, FileDropEvent, Keystroke,
|
||||
LinuxKeyboardLayout, Modifiers, ModifiersChangedEvent, MouseButton, Pixels, Platform,
|
||||
PlatformDisplay, PlatformInput, PlatformKeyboardLayout, Point, RequestFrameOptions,
|
||||
ScaledPixels, ScrollDelta, Size, TouchPhase, WindowParams, X11Window,
|
||||
modifiers_from_xinput_info, point, px,
|
||||
LinuxKeyboardLayout, LinuxKeyboardMapper, Modifiers, ModifiersChangedEvent, MouseButton,
|
||||
Pixels, Platform, PlatformDisplay, PlatformInput, PlatformKeyboardLayout, Point,
|
||||
RequestFrameOptions, ScaledPixels, ScrollDelta, Size, TouchPhase, WindowParams, X11Window,
|
||||
modifiers_from_xinput_info, point, px, underlying_dead_key,
|
||||
};
|
||||
|
||||
/// Value for DeviceId parameters which selects all devices.
|
||||
@@ -77,8 +77,6 @@ pub(crate) const XINPUT_ALL_DEVICES: xinput::DeviceId = 0;
|
||||
/// terminology is both archaic and unclear.
|
||||
pub(crate) const XINPUT_ALL_DEVICE_GROUPS: xinput::DeviceId = 1;
|
||||
|
||||
const GPUI_X11_SCALE_FACTOR_ENV: &str = "GPUI_X11_SCALE_FACTOR";
|
||||
|
||||
pub(crate) struct WindowRef {
|
||||
window: X11WindowStatePtr,
|
||||
refresh_state: Option<RefreshState>,
|
||||
@@ -202,6 +200,8 @@ pub struct X11ClientState {
|
||||
pub(crate) xkb: xkbc::State,
|
||||
previous_xkb_state: XKBStateNotiy,
|
||||
keyboard_layout: LinuxKeyboardLayout,
|
||||
keyboard_mapper: Rc<LinuxKeyboardMapper>,
|
||||
keyboard_mapper_cache: HashMap<String, Rc<LinuxKeyboardMapper>>,
|
||||
pub(crate) ximc: Option<X11rbClient<Rc<XCBConnection>>>,
|
||||
pub(crate) xim_handler: Option<XimHandler>,
|
||||
pub modifiers: Modifiers,
|
||||
@@ -405,28 +405,35 @@ impl X11Client {
|
||||
|
||||
let xkb_context = xkbc::Context::new(xkbc::CONTEXT_NO_FLAGS);
|
||||
let xkb_device_id = xkbc::x11::get_core_keyboard_device_id(&xcb_connection);
|
||||
let xkb_state = {
|
||||
let xkb_keymap = xkbc::x11::keymap_new_from_device(
|
||||
&xkb_context,
|
||||
&xcb_connection,
|
||||
xkb_device_id,
|
||||
xkbc::KEYMAP_COMPILE_NO_FLAGS,
|
||||
);
|
||||
xkbc::x11::state_new_from_device(&xkb_keymap, &xcb_connection, xkb_device_id)
|
||||
};
|
||||
let xkb_keymap = xkbc::x11::keymap_new_from_device(
|
||||
&xkb_context,
|
||||
&xcb_connection,
|
||||
xkb_device_id,
|
||||
xkbc::KEYMAP_COMPILE_NO_FLAGS,
|
||||
);
|
||||
let xkb_state =
|
||||
xkbc::x11::state_new_from_device(&xkb_keymap, &xcb_connection, xkb_device_id);
|
||||
let compose_state = get_xkb_compose_state(&xkb_context);
|
||||
let layout_idx = xkb_state.serialize_layout(STATE_LAYOUT_EFFECTIVE);
|
||||
let layout_name = xkb_state
|
||||
.get_keymap()
|
||||
.layout_get_name(layout_idx)
|
||||
.to_string();
|
||||
let keyboard_layout = LinuxKeyboardLayout::new(layout_name.into());
|
||||
let keyboard_layout = LinuxKeyboardLayout::new(layout_name.clone().into());
|
||||
let keyboard_mapper = Rc::new(LinuxKeyboardMapper::new(&xkb_keymap, 0, 0, 0));
|
||||
let mut keyboard_mapper_cache = HashMap::default();
|
||||
keyboard_mapper_cache.insert(layout_name, keyboard_mapper.clone());
|
||||
|
||||
let gpu_context = BladeContext::new().context("Unable to init GPU context")?;
|
||||
|
||||
let resource_database = x11rb::resource_manager::new_from_default(&xcb_connection)
|
||||
.context("Failed to create resource database")?;
|
||||
let scale_factor = get_scale_factor(&xcb_connection, &resource_database, x_root_index);
|
||||
let scale_factor = resource_database
|
||||
.get_value("Xft.dpi", "Xft.dpi")
|
||||
.ok()
|
||||
.flatten()
|
||||
.map(|dpi: f32| dpi / 96.0)
|
||||
.unwrap_or(1.0);
|
||||
let cursor_handle = cursor::Handle::new(&xcb_connection, x_root_index, &resource_database)
|
||||
.context("Failed to initialize cursor theme handler")?
|
||||
.reply()
|
||||
@@ -509,6 +516,8 @@ impl X11Client {
|
||||
xkb: xkb_state,
|
||||
previous_xkb_state: XKBStateNotiy::default(),
|
||||
keyboard_layout,
|
||||
keyboard_mapper,
|
||||
keyboard_mapper_cache,
|
||||
ximc,
|
||||
xim_handler,
|
||||
|
||||
@@ -969,24 +978,27 @@ impl X11Client {
|
||||
};
|
||||
state.xkb = xkb_state;
|
||||
drop(state);
|
||||
self.handle_keyboard_layout_change();
|
||||
self.handle_keyboard_layout_change(depressed_layout, latched_layout, locked_layout);
|
||||
}
|
||||
Event::XkbStateNotify(event) => {
|
||||
let mut state = self.0.borrow_mut();
|
||||
let old_layout = state.xkb.serialize_layout(STATE_LAYOUT_EFFECTIVE);
|
||||
let new_layout = u32::from(event.group);
|
||||
let base_group = event.base_group as u32;
|
||||
let latched_group = event.latched_group as u32;
|
||||
let locked_group = event.locked_group.into();
|
||||
state.xkb.update_mask(
|
||||
event.base_mods.into(),
|
||||
event.latched_mods.into(),
|
||||
event.locked_mods.into(),
|
||||
event.base_group as u32,
|
||||
event.latched_group as u32,
|
||||
event.locked_group.into(),
|
||||
base_group,
|
||||
latched_group,
|
||||
locked_group,
|
||||
);
|
||||
state.previous_xkb_state = XKBStateNotiy {
|
||||
depressed_layout: event.base_group as u32,
|
||||
latched_layout: event.latched_group as u32,
|
||||
locked_layout: event.locked_group.into(),
|
||||
depressed_layout: base_group,
|
||||
latched_layout: latched_group,
|
||||
locked_layout: locked_group,
|
||||
};
|
||||
|
||||
let modifiers = Modifiers::from_xkb(&state.xkb);
|
||||
@@ -1013,7 +1025,7 @@ impl X11Client {
|
||||
}
|
||||
|
||||
if new_layout != old_layout {
|
||||
self.handle_keyboard_layout_change();
|
||||
self.handle_keyboard_layout_change(base_group, latched_group, locked_group);
|
||||
}
|
||||
}
|
||||
Event::KeyPress(event) => {
|
||||
@@ -1034,7 +1046,12 @@ impl X11Client {
|
||||
xkb_state.latched_layout,
|
||||
xkb_state.locked_layout,
|
||||
);
|
||||
let mut keystroke = crate::Keystroke::from_xkb(&state.xkb, modifiers, code);
|
||||
let mut keystroke = crate::Keystroke::from_xkb(
|
||||
&state.xkb,
|
||||
&state.keyboard_mapper,
|
||||
modifiers,
|
||||
code,
|
||||
);
|
||||
let keysym = state.xkb.key_get_one_sym(code);
|
||||
if keysym.is_modifier_key() {
|
||||
return Some(());
|
||||
@@ -1051,9 +1068,8 @@ impl X11Client {
|
||||
}
|
||||
xkbc::Status::Composing => {
|
||||
keystroke.key_char = None;
|
||||
state.pre_edit_text = compose_state
|
||||
.utf8()
|
||||
.or(crate::Keystroke::underlying_dead_key(keysym));
|
||||
state.pre_edit_text =
|
||||
compose_state.utf8().or(underlying_dead_key(keysym));
|
||||
let pre_edit =
|
||||
state.pre_edit_text.clone().unwrap_or(String::default());
|
||||
drop(state);
|
||||
@@ -1066,7 +1082,7 @@ impl X11Client {
|
||||
if let Some(pre_edit) = pre_edit {
|
||||
window.handle_ime_commit(pre_edit);
|
||||
}
|
||||
if let Some(current_key) = Keystroke::underlying_dead_key(keysym) {
|
||||
if let Some(current_key) = underlying_dead_key(keysym) {
|
||||
window.handle_ime_preedit(current_key);
|
||||
}
|
||||
state = self.0.borrow_mut();
|
||||
@@ -1102,7 +1118,12 @@ impl X11Client {
|
||||
xkb_state.latched_layout,
|
||||
xkb_state.locked_layout,
|
||||
);
|
||||
let keystroke = crate::Keystroke::from_xkb(&state.xkb, modifiers, code);
|
||||
let keystroke = crate::Keystroke::from_xkb(
|
||||
&state.xkb,
|
||||
&state.keyboard_mapper,
|
||||
modifiers,
|
||||
code,
|
||||
);
|
||||
let keysym = state.xkb.key_get_one_sym(code);
|
||||
if keysym.is_modifier_key() {
|
||||
return Some(());
|
||||
@@ -1324,6 +1345,7 @@ impl X11Client {
|
||||
let mut state = self.0.borrow_mut();
|
||||
state.pre_key_char_down = Some(Keystroke::from_xkb(
|
||||
&state.xkb,
|
||||
&state.keyboard_mapper,
|
||||
state.modifiers,
|
||||
event.detail.into(),
|
||||
));
|
||||
@@ -1409,13 +1431,29 @@ impl X11Client {
|
||||
Some(())
|
||||
}
|
||||
|
||||
fn handle_keyboard_layout_change(&self) {
|
||||
fn handle_keyboard_layout_change(
|
||||
&self,
|
||||
base_group: u32,
|
||||
latched_group: u32,
|
||||
locked_group: u32,
|
||||
) {
|
||||
let mut state = self.0.borrow_mut();
|
||||
let layout_idx = state.xkb.serialize_layout(STATE_LAYOUT_EFFECTIVE);
|
||||
let keymap = state.xkb.get_keymap();
|
||||
let layout_name = keymap.layout_get_name(layout_idx);
|
||||
if layout_name != state.keyboard_layout.name() {
|
||||
state.keyboard_layout = LinuxKeyboardLayout::new(layout_name.to_string().into());
|
||||
let mapper = state
|
||||
.keyboard_mapper_cache
|
||||
.entry(layout_name.to_string())
|
||||
.or_insert(Rc::new(LinuxKeyboardMapper::new(
|
||||
&keymap,
|
||||
base_group,
|
||||
latched_group,
|
||||
locked_group,
|
||||
)))
|
||||
.clone();
|
||||
state.keyboard_mapper = mapper;
|
||||
if let Some(mut callback) = state.common.callbacks.keyboard_layout_change.take() {
|
||||
drop(state);
|
||||
callback();
|
||||
@@ -2269,253 +2307,3 @@ fn create_invisible_cursor(
|
||||
xcb_flush(connection);
|
||||
Ok(cursor)
|
||||
}
|
||||
|
||||
enum DpiMode {
|
||||
Randr,
|
||||
Scale(f32),
|
||||
NotSet,
|
||||
}
|
||||
|
||||
fn get_scale_factor(
|
||||
connection: &XCBConnection,
|
||||
resource_database: &Database,
|
||||
screen_index: usize,
|
||||
) -> f32 {
|
||||
let env_dpi = std::env::var(GPUI_X11_SCALE_FACTOR_ENV)
|
||||
.ok()
|
||||
.map(|var| {
|
||||
if var.to_lowercase() == "randr" {
|
||||
DpiMode::Randr
|
||||
} else if let Ok(scale) = var.parse::<f32>() {
|
||||
if valid_scale_factor(scale) {
|
||||
DpiMode::Scale(scale)
|
||||
} else {
|
||||
panic!(
|
||||
"`{}` must be a positive normal number or `randr`. Got `{}`",
|
||||
GPUI_X11_SCALE_FACTOR_ENV, var
|
||||
);
|
||||
}
|
||||
} else if var.is_empty() {
|
||||
DpiMode::NotSet
|
||||
} else {
|
||||
panic!(
|
||||
"`{}` must be a positive number or `randr`. Got `{}`",
|
||||
GPUI_X11_SCALE_FACTOR_ENV, var
|
||||
);
|
||||
}
|
||||
})
|
||||
.unwrap_or(DpiMode::NotSet);
|
||||
|
||||
match env_dpi {
|
||||
DpiMode::Scale(scale) => {
|
||||
log::info!(
|
||||
"Using scale factor from {}: {}",
|
||||
GPUI_X11_SCALE_FACTOR_ENV,
|
||||
scale
|
||||
);
|
||||
return scale;
|
||||
}
|
||||
DpiMode::Randr => {
|
||||
if let Some(scale) = get_randr_scale_factor(connection, screen_index) {
|
||||
log::info!(
|
||||
"Using RandR scale factor from {}=randr: {}",
|
||||
GPUI_X11_SCALE_FACTOR_ENV,
|
||||
scale
|
||||
);
|
||||
return scale;
|
||||
}
|
||||
log::warn!("Failed to calculate RandR scale factor, falling back to default");
|
||||
return 1.0;
|
||||
}
|
||||
DpiMode::NotSet => {}
|
||||
}
|
||||
|
||||
// TODO: Use scale factor from XSettings here
|
||||
|
||||
if let Some(dpi) = resource_database
|
||||
.get_value::<f32>("Xft.dpi", "Xft.dpi")
|
||||
.ok()
|
||||
.flatten()
|
||||
{
|
||||
let scale = dpi / 96.0; // base dpi
|
||||
log::info!("Using scale factor from Xft.dpi: {}", scale);
|
||||
return scale;
|
||||
}
|
||||
|
||||
if let Some(scale) = get_randr_scale_factor(connection, screen_index) {
|
||||
log::info!("Using RandR scale factor: {}", scale);
|
||||
return scale;
|
||||
}
|
||||
|
||||
log::info!("Using default scale factor: 1.0");
|
||||
1.0
|
||||
}
|
||||
|
||||
fn get_randr_scale_factor(connection: &XCBConnection, screen_index: usize) -> Option<f32> {
|
||||
let root = connection.setup().roots.get(screen_index)?.root;
|
||||
|
||||
let version_cookie = connection.randr_query_version(1, 6).ok()?;
|
||||
let version_reply = version_cookie.reply().ok()?;
|
||||
if version_reply.major_version < 1
|
||||
|| (version_reply.major_version == 1 && version_reply.minor_version < 5)
|
||||
{
|
||||
return legacy_get_randr_scale_factor(connection, root); // for randr <1.5
|
||||
}
|
||||
|
||||
let monitors_cookie = connection.randr_get_monitors(root, true).ok()?; // true for active only
|
||||
let monitors_reply = monitors_cookie.reply().ok()?;
|
||||
|
||||
let mut fallback_scale: Option<f32> = None;
|
||||
for monitor in monitors_reply.monitors {
|
||||
if monitor.width_in_millimeters == 0 || monitor.height_in_millimeters == 0 {
|
||||
continue;
|
||||
}
|
||||
let scale_factor = get_dpi_factor(
|
||||
(monitor.width as u32, monitor.height as u32),
|
||||
(
|
||||
monitor.width_in_millimeters as u64,
|
||||
monitor.height_in_millimeters as u64,
|
||||
),
|
||||
);
|
||||
if monitor.primary {
|
||||
return Some(scale_factor);
|
||||
} else if fallback_scale.is_none() {
|
||||
fallback_scale = Some(scale_factor);
|
||||
}
|
||||
}
|
||||
|
||||
fallback_scale
|
||||
}
|
||||
|
||||
fn legacy_get_randr_scale_factor(connection: &XCBConnection, root: u32) -> Option<f32> {
|
||||
let primary_cookie = connection.randr_get_output_primary(root).ok()?;
|
||||
let primary_reply = primary_cookie.reply().ok()?;
|
||||
let primary_output = primary_reply.output;
|
||||
|
||||
let primary_output_cookie = connection
|
||||
.randr_get_output_info(primary_output, x11rb::CURRENT_TIME)
|
||||
.ok()?;
|
||||
let primary_output_info = primary_output_cookie.reply().ok()?;
|
||||
|
||||
// try primary
|
||||
if primary_output_info.connection == randr::Connection::CONNECTED
|
||||
&& primary_output_info.mm_width > 0
|
||||
&& primary_output_info.mm_height > 0
|
||||
&& primary_output_info.crtc != 0
|
||||
{
|
||||
let crtc_cookie = connection
|
||||
.randr_get_crtc_info(primary_output_info.crtc, x11rb::CURRENT_TIME)
|
||||
.ok()?;
|
||||
let crtc_info = crtc_cookie.reply().ok()?;
|
||||
|
||||
if crtc_info.width > 0 && crtc_info.height > 0 {
|
||||
let scale_factor = get_dpi_factor(
|
||||
(crtc_info.width as u32, crtc_info.height as u32),
|
||||
(
|
||||
primary_output_info.mm_width as u64,
|
||||
primary_output_info.mm_height as u64,
|
||||
),
|
||||
);
|
||||
return Some(scale_factor);
|
||||
}
|
||||
}
|
||||
|
||||
// fallback: full scan
|
||||
let resources_cookie = connection.randr_get_screen_resources_current(root).ok()?;
|
||||
let screen_resources = resources_cookie.reply().ok()?;
|
||||
|
||||
let mut crtc_cookies = Vec::with_capacity(screen_resources.crtcs.len());
|
||||
for &crtc in &screen_resources.crtcs {
|
||||
if let Ok(cookie) = connection.randr_get_crtc_info(crtc, x11rb::CURRENT_TIME) {
|
||||
crtc_cookies.push((crtc, cookie));
|
||||
}
|
||||
}
|
||||
|
||||
let mut crtc_infos: HashMap<randr::Crtc, randr::GetCrtcInfoReply> = HashMap::default();
|
||||
let mut valid_outputs: HashSet<randr::Output> = HashSet::new();
|
||||
for (crtc, cookie) in crtc_cookies {
|
||||
if let Ok(reply) = cookie.reply() {
|
||||
if reply.width > 0 && reply.height > 0 && !reply.outputs.is_empty() {
|
||||
crtc_infos.insert(crtc, reply.clone());
|
||||
valid_outputs.extend(&reply.outputs);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if valid_outputs.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut output_cookies = Vec::with_capacity(valid_outputs.len());
|
||||
for &output in &valid_outputs {
|
||||
if let Ok(cookie) = connection.randr_get_output_info(output, x11rb::CURRENT_TIME) {
|
||||
output_cookies.push((output, cookie));
|
||||
}
|
||||
}
|
||||
let mut output_infos: HashMap<randr::Output, randr::GetOutputInfoReply> = HashMap::default();
|
||||
for (output, cookie) in output_cookies {
|
||||
if let Ok(reply) = cookie.reply() {
|
||||
output_infos.insert(output, reply);
|
||||
}
|
||||
}
|
||||
|
||||
let mut fallback_scale: Option<f32> = None;
|
||||
for crtc_info in crtc_infos.values() {
|
||||
for &output in &crtc_info.outputs {
|
||||
if let Some(output_info) = output_infos.get(&output) {
|
||||
if output_info.connection != randr::Connection::CONNECTED {
|
||||
continue;
|
||||
}
|
||||
|
||||
if output_info.mm_width == 0 || output_info.mm_height == 0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let scale_factor = get_dpi_factor(
|
||||
(crtc_info.width as u32, crtc_info.height as u32),
|
||||
(output_info.mm_width as u64, output_info.mm_height as u64),
|
||||
);
|
||||
|
||||
if output != primary_output && fallback_scale.is_none() {
|
||||
fallback_scale = Some(scale_factor);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fallback_scale
|
||||
}
|
||||
|
||||
fn get_dpi_factor((width_px, height_px): (u32, u32), (width_mm, height_mm): (u64, u64)) -> f32 {
|
||||
let ppmm = ((width_px as f64 * height_px as f64) / (width_mm as f64 * height_mm as f64)).sqrt(); // pixels per mm
|
||||
|
||||
const MM_PER_INCH: f64 = 25.4;
|
||||
const BASE_DPI: f64 = 96.0;
|
||||
const QUANTIZE_STEP: f64 = 12.0; // e.g. 1.25 = 15/12, 1.5 = 18/12, 1.75 = 21/12, 2.0 = 24/12
|
||||
const MIN_SCALE: f64 = 1.0;
|
||||
const MAX_SCALE: f64 = 20.0;
|
||||
|
||||
let dpi_factor =
|
||||
((ppmm * (QUANTIZE_STEP * MM_PER_INCH / BASE_DPI)).round() / QUANTIZE_STEP).max(MIN_SCALE);
|
||||
|
||||
let validated_factor = if dpi_factor <= MAX_SCALE {
|
||||
dpi_factor
|
||||
} else {
|
||||
MIN_SCALE
|
||||
};
|
||||
|
||||
if valid_scale_factor(validated_factor as f32) {
|
||||
validated_factor as f32
|
||||
} else {
|
||||
log::warn!(
|
||||
"Calculated DPI factor {} is invalid, using 1.0",
|
||||
validated_factor
|
||||
);
|
||||
1.0
|
||||
}
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn valid_scale_factor(scale_factor: f32) -> bool {
|
||||
scale_factor.is_sign_positive() && scale_factor.is_normal()
|
||||
}
|
||||
|
||||
@@ -26,7 +26,4 @@ pub(crate) use wrapper::*;
|
||||
|
||||
pub(crate) use windows::Win32::Foundation::HWND;
|
||||
|
||||
#[cfg(feature = "screen-capture")]
|
||||
pub(crate) type PlatformScreenCaptureFrame = scap::frame::Frame;
|
||||
#[cfg(not(feature = "screen-capture"))]
|
||||
pub(crate) type PlatformScreenCaptureFrame = ();
|
||||
|
||||
@@ -130,13 +130,11 @@ pub(crate) fn generate_key_char(
|
||||
let mut buffer = [0; 8];
|
||||
let len = unsafe { ToUnicode(vkey.0 as u32, scan_code, Some(&state), &mut buffer, 1 << 2) };
|
||||
|
||||
match len {
|
||||
len if len > 0 => String::from_utf16(&buffer[..len as usize])
|
||||
.ok()
|
||||
.filter(|candidate| {
|
||||
!candidate.is_empty() && !candidate.chars().next().unwrap().is_control()
|
||||
}),
|
||||
len if len < 0 => String::from_utf16(&buffer[..(-len as usize)]).ok(),
|
||||
_ => None,
|
||||
if len > 0 {
|
||||
let candidate = String::from_utf16_lossy(&buffer[..len as usize]);
|
||||
if !candidate.is_empty() && !candidate.chars().next().unwrap().is_control() {
|
||||
return Some(candidate);
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
@@ -434,14 +434,16 @@ impl Platform for WindowsPlatform {
|
||||
|
||||
#[cfg(feature = "screen-capture")]
|
||||
fn is_screen_capture_supported(&self) -> bool {
|
||||
true
|
||||
false
|
||||
}
|
||||
|
||||
#[cfg(feature = "screen-capture")]
|
||||
fn screen_capture_sources(
|
||||
&self,
|
||||
) -> oneshot::Receiver<Result<Vec<Box<dyn ScreenCaptureSource>>>> {
|
||||
crate::platform::scap_screen_capture::scap_screen_sources(&self.foreground_executor)
|
||||
let (mut tx, rx) = oneshot::channel();
|
||||
tx.send(Err(anyhow!("screen capture not implemented"))).ok();
|
||||
rx
|
||||
}
|
||||
|
||||
fn active_window(&self) -> Option<AnyWindowHandle> {
|
||||
|
||||
@@ -29,11 +29,6 @@ impl ExtensionIndexedDocsProviderProxy for IndexedDocsRegistryProxy {
|
||||
ProviderId(provider_id),
|
||||
)));
|
||||
}
|
||||
|
||||
fn unregister_indexed_docs_provider(&self, provider_id: Arc<str>) {
|
||||
self.indexed_docs_registry
|
||||
.unregister_provider(&ProviderId(provider_id));
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ExtensionIndexedDocsProvider {
|
||||
|
||||
@@ -52,10 +52,6 @@ impl IndexedDocsRegistry {
|
||||
);
|
||||
}
|
||||
|
||||
pub fn unregister_provider(&self, provider_id: &ProviderId) {
|
||||
self.stores_by_provider.write().remove(provider_id);
|
||||
}
|
||||
|
||||
pub fn get_provider_store(&self, provider_id: ProviderId) -> Option<Arc<IndexedDocsStore>> {
|
||||
self.stores_by_provider.read().get(&provider_id).cloned()
|
||||
}
|
||||
|
||||
@@ -21,7 +21,6 @@ fs.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
lsp.workspace = true
|
||||
project.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
util.workspace = true
|
||||
|
||||
@@ -6,24 +6,21 @@ use std::sync::Arc;
|
||||
|
||||
use anyhow::{Context as _, Result};
|
||||
use async_trait::async_trait;
|
||||
use collections::{HashMap, HashSet};
|
||||
use collections::HashMap;
|
||||
use extension::{Extension, ExtensionLanguageServerProxy, WorktreeDelegate};
|
||||
use fs::Fs;
|
||||
use futures::{Future, FutureExt, future::join_all};
|
||||
use gpui::{App, AppContext, AsyncApp, Task};
|
||||
use futures::{Future, FutureExt};
|
||||
use gpui::AsyncApp;
|
||||
use language::{
|
||||
BinaryStatus, CodeLabel, HighlightId, Language, LanguageName, LanguageToolchainStore,
|
||||
LspAdapter, LspAdapterDelegate,
|
||||
};
|
||||
use lsp::{
|
||||
CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerName,
|
||||
LanguageServerSelector,
|
||||
};
|
||||
use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerName};
|
||||
use serde::Serialize;
|
||||
use serde_json::Value;
|
||||
use util::{ResultExt, fs::make_file_executable, maybe};
|
||||
|
||||
use crate::{LanguageServerRegistryProxy, LspAccess};
|
||||
use crate::LanguageServerRegistryProxy;
|
||||
|
||||
/// An adapter that allows an [`LspAdapterDelegate`] to be used as a [`WorktreeDelegate`].
|
||||
struct WorktreeDelegateAdapter(pub Arc<dyn LspAdapterDelegate>);
|
||||
@@ -74,50 +71,10 @@ impl ExtensionLanguageServerProxy for LanguageServerRegistryProxy {
|
||||
fn remove_language_server(
|
||||
&self,
|
||||
language: &LanguageName,
|
||||
language_server_name: &LanguageServerName,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<()>> {
|
||||
language_server_id: &LanguageServerName,
|
||||
) {
|
||||
self.language_registry
|
||||
.remove_lsp_adapter(language, language_server_name);
|
||||
|
||||
let mut tasks = Vec::new();
|
||||
match &self.lsp_access {
|
||||
LspAccess::ViaLspStore(lsp_store) => lsp_store.update(cx, |lsp_store, cx| {
|
||||
let stop_task = lsp_store.stop_language_servers_for_buffers(
|
||||
Vec::new(),
|
||||
HashSet::from_iter([LanguageServerSelector::Name(
|
||||
language_server_name.clone(),
|
||||
)]),
|
||||
cx,
|
||||
);
|
||||
tasks.push(stop_task);
|
||||
}),
|
||||
LspAccess::ViaWorkspaces(lsp_store_provider) => {
|
||||
if let Ok(lsp_stores) = lsp_store_provider(cx) {
|
||||
for lsp_store in lsp_stores {
|
||||
lsp_store.update(cx, |lsp_store, cx| {
|
||||
let stop_task = lsp_store.stop_language_servers_for_buffers(
|
||||
Vec::new(),
|
||||
HashSet::from_iter([LanguageServerSelector::Name(
|
||||
language_server_name.clone(),
|
||||
)]),
|
||||
cx,
|
||||
);
|
||||
tasks.push(stop_task);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
LspAccess::Noop => {}
|
||||
}
|
||||
|
||||
cx.background_spawn(async move {
|
||||
let results = join_all(tasks).await;
|
||||
for result in results {
|
||||
result?;
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
.remove_lsp_adapter(language, language_server_id);
|
||||
}
|
||||
|
||||
fn update_language_server_status(
|
||||
|
||||
@@ -5,26 +5,13 @@ use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use extension::{ExtensionGrammarProxy, ExtensionHostProxy, ExtensionLanguageProxy};
|
||||
use gpui::{App, Entity};
|
||||
use language::{LanguageMatcher, LanguageName, LanguageRegistry, LoadedLanguage};
|
||||
use project::LspStore;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum LspAccess {
|
||||
ViaLspStore(Entity<LspStore>),
|
||||
ViaWorkspaces(Arc<dyn Fn(&mut App) -> Result<Vec<Entity<LspStore>>> + Send + Sync + 'static>),
|
||||
Noop,
|
||||
}
|
||||
|
||||
pub fn init(
|
||||
lsp_access: LspAccess,
|
||||
extension_host_proxy: Arc<ExtensionHostProxy>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
) {
|
||||
let language_server_registry_proxy = LanguageServerRegistryProxy {
|
||||
language_registry,
|
||||
lsp_access,
|
||||
};
|
||||
let language_server_registry_proxy = LanguageServerRegistryProxy { language_registry };
|
||||
extension_host_proxy.register_grammar_proxy(language_server_registry_proxy.clone());
|
||||
extension_host_proxy.register_language_proxy(language_server_registry_proxy.clone());
|
||||
extension_host_proxy.register_language_server_proxy(language_server_registry_proxy);
|
||||
@@ -33,7 +20,6 @@ pub fn init(
|
||||
#[derive(Clone)]
|
||||
struct LanguageServerRegistryProxy {
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
lsp_access: LspAccess,
|
||||
}
|
||||
|
||||
impl ExtensionGrammarProxy for LanguageServerRegistryProxy {
|
||||
|
||||
@@ -166,9 +166,46 @@ impl State {
|
||||
}
|
||||
|
||||
let response = Self::fetch_models(client, llm_api_token, use_cloud).await?;
|
||||
this.update(cx, |this, cx| {
|
||||
this.update_models(response, cx);
|
||||
})
|
||||
cx.update(|cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
let mut models = Vec::new();
|
||||
|
||||
for model in response.models {
|
||||
models.push(Arc::new(model.clone()));
|
||||
|
||||
// Right now we represent thinking variants of models as separate models on the client,
|
||||
// so we need to insert variants for any model that supports thinking.
|
||||
if model.supports_thinking {
|
||||
models.push(Arc::new(zed_llm_client::LanguageModel {
|
||||
id: zed_llm_client::LanguageModelId(
|
||||
format!("{}-thinking", model.id).into(),
|
||||
),
|
||||
display_name: format!("{} Thinking", model.display_name),
|
||||
..model
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
this.default_model = models
|
||||
.iter()
|
||||
.find(|model| model.id == response.default_model)
|
||||
.cloned();
|
||||
this.default_fast_model = models
|
||||
.iter()
|
||||
.find(|model| model.id == response.default_fast_model)
|
||||
.cloned();
|
||||
this.recommended_models = response
|
||||
.recommended_models
|
||||
.iter()
|
||||
.filter_map(|id| models.iter().find(|model| &model.id == id))
|
||||
.cloned()
|
||||
.collect();
|
||||
this.models = models;
|
||||
cx.notify();
|
||||
})
|
||||
})??;
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.await
|
||||
.context("failed to fetch Zed models")
|
||||
@@ -179,15 +216,12 @@ impl State {
|
||||
}),
|
||||
_llm_token_subscription: cx.subscribe(
|
||||
&refresh_llm_token_listener,
|
||||
move |this, _listener, _event, cx| {
|
||||
|this, _listener, _event, cx| {
|
||||
let client = this.client.clone();
|
||||
let llm_api_token = this.llm_api_token.clone();
|
||||
cx.spawn(async move |this, cx| {
|
||||
cx.spawn(async move |_this, _cx| {
|
||||
llm_api_token.refresh(&client).await?;
|
||||
let response = Self::fetch_models(client, llm_api_token, use_cloud).await?;
|
||||
this.update(cx, |this, cx| {
|
||||
this.update_models(response, cx);
|
||||
})
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
},
|
||||
@@ -230,41 +264,6 @@ impl State {
|
||||
}));
|
||||
}
|
||||
|
||||
fn update_models(&mut self, response: ListModelsResponse, cx: &mut Context<Self>) {
|
||||
let mut models = Vec::new();
|
||||
|
||||
for model in response.models {
|
||||
models.push(Arc::new(model.clone()));
|
||||
|
||||
// Right now we represent thinking variants of models as separate models on the client,
|
||||
// so we need to insert variants for any model that supports thinking.
|
||||
if model.supports_thinking {
|
||||
models.push(Arc::new(zed_llm_client::LanguageModel {
|
||||
id: zed_llm_client::LanguageModelId(format!("{}-thinking", model.id).into()),
|
||||
display_name: format!("{} Thinking", model.display_name),
|
||||
..model
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
self.default_model = models
|
||||
.iter()
|
||||
.find(|model| model.id == response.default_model)
|
||||
.cloned();
|
||||
self.default_fast_model = models
|
||||
.iter()
|
||||
.find(|model| model.id == response.default_fast_model)
|
||||
.cloned();
|
||||
self.recommended_models = response
|
||||
.recommended_models
|
||||
.iter()
|
||||
.filter_map(|id| models.iter().find(|model| &model.id == id))
|
||||
.cloned()
|
||||
.collect();
|
||||
self.models = models;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
async fn fetch_models(
|
||||
client: Arc<Client>,
|
||||
llm_api_token: LlmApiToken,
|
||||
|
||||
@@ -110,7 +110,6 @@ impl LanguageServerHealthStatus {
|
||||
|
||||
impl LanguageServerState {
|
||||
fn fill_menu(&self, mut menu: ContextMenu, cx: &mut Context<Self>) -> ContextMenu {
|
||||
menu = menu.align_popover_bottom();
|
||||
let lsp_logs = cx
|
||||
.try_global::<GlobalLogStore>()
|
||||
.and_then(|lsp_logs| lsp_logs.0.upgrade());
|
||||
@@ -119,7 +118,6 @@ impl LanguageServerState {
|
||||
return menu;
|
||||
};
|
||||
|
||||
let mut first_button_encountered = false;
|
||||
for (i, item) in self.items.iter().enumerate() {
|
||||
if let LspItem::ToggleServersButton { restart } = item {
|
||||
let label = if *restart {
|
||||
@@ -184,11 +182,7 @@ impl LanguageServerState {
|
||||
.ok();
|
||||
}
|
||||
});
|
||||
if !first_button_encountered {
|
||||
menu = menu.separator();
|
||||
first_button_encountered = true;
|
||||
}
|
||||
menu = menu.item(button);
|
||||
menu = menu.separator().item(button);
|
||||
continue;
|
||||
};
|
||||
|
||||
@@ -711,7 +705,6 @@ impl LspTool {
|
||||
new_lsp_items.extend(other_servers.into_iter().map(ServerData::into_lsp_item));
|
||||
if !new_lsp_items.is_empty() {
|
||||
if can_stop_all {
|
||||
new_lsp_items.push(LspItem::ToggleServersButton { restart: true });
|
||||
new_lsp_items.push(LspItem::ToggleServersButton { restart: false });
|
||||
} else if can_restart_all {
|
||||
new_lsp_items.push(LspItem::ToggleServersButton { restart: true });
|
||||
|
||||
@@ -44,7 +44,6 @@ dap.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
http_client.workspace = true
|
||||
indoc.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
lsp.workspace = true
|
||||
|
||||
@@ -262,7 +262,6 @@ impl LspAdapter for RustLspAdapter {
|
||||
_: LanguageServerId,
|
||||
_: Option<&'_ Buffer>,
|
||||
) {
|
||||
// https://zed.dev/cla
|
||||
static REGEX: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r"(?m)`([^`]+)\n`$").expect("Failed to create REGEX"));
|
||||
|
||||
|
||||
@@ -8,10 +8,9 @@ use futures::future::join_all;
|
||||
use gpui::{App, AppContext, AsyncApp, Task};
|
||||
use http_client::github::{AssetKind, GitHubLspBinaryVersion, build_asset_url};
|
||||
use language::{
|
||||
Buffer, ContextLocation, ContextProvider, File, LanguageToolchainStore, LspAdapter,
|
||||
LspAdapterDelegate,
|
||||
ContextLocation, ContextProvider, File, LanguageToolchainStore, LspAdapter, LspAdapterDelegate,
|
||||
};
|
||||
use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerId, LanguageServerName};
|
||||
use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerName};
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::{Fs, lsp_store::language_server_settings};
|
||||
use serde_json::{Value, json};
|
||||
@@ -606,7 +605,6 @@ impl LspAdapter for TypeScriptLspAdapter {
|
||||
}
|
||||
}
|
||||
|
||||
// >>> https://zed.dev/cla <<<
|
||||
async fn fetch_server_binary(
|
||||
&self,
|
||||
latest_version: Box<dyn 'static + Send + Any>,
|
||||
@@ -750,15 +748,6 @@ impl LspAdapter for TypeScriptLspAdapter {
|
||||
("TSX".into(), "typescriptreact".into()),
|
||||
])
|
||||
}
|
||||
|
||||
fn process_diagnostics(
|
||||
&self,
|
||||
d: &mut lsp::PublishDiagnosticsParams,
|
||||
_: LanguageServerId,
|
||||
_: Option<&'_ Buffer>,
|
||||
) {
|
||||
dbg!("called with ", d);
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_cached_ts_server_binary(
|
||||
|
||||
@@ -280,185 +280,6 @@ impl LspAdapter for VtslsLspAdapter {
|
||||
("TSX".into(), "typescriptreact".into()),
|
||||
])
|
||||
}
|
||||
|
||||
fn diagnostic_message_to_markdown(&self, message: &str) -> Option<String> {
|
||||
use regex::{Captures, Regex};
|
||||
dbg!(&message);
|
||||
|
||||
// Helper functions for formatting
|
||||
let format_type_block = |prefix: &str, content: &str| -> String {
|
||||
if prefix.is_empty() {
|
||||
if content.len() > 50 || content.contains('\n') || content.contains('`') {
|
||||
format!("\n```typescript\ntype a ={}\n```\n", dbg!(content))
|
||||
} else {
|
||||
format!("`{}`", dbg!(content))
|
||||
}
|
||||
} else {
|
||||
if content.len() > 50 || content.contains('\n') || content.contains('`') {
|
||||
format!(
|
||||
"{}\n```typescript\ntype a ={}\n```\n",
|
||||
prefix,
|
||||
dbg!(content)
|
||||
)
|
||||
} else {
|
||||
format!("{} `{}`", prefix, dbg!(content))
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let format_typescript_block =
|
||||
|content: &str| -> String { format!("\n\n```typescript\n{}\n```\n", dbg!(content)) };
|
||||
|
||||
let format_simple_type_block = |content: &str| -> String { format!("`{}`", dbg!(content)) };
|
||||
|
||||
let unstyle_code_block = |content: &str| -> String { format!("`{}`", dbg!(content)) };
|
||||
|
||||
let mut result = message.to_string();
|
||||
|
||||
// Format 'key' with "value"
|
||||
let re = Regex::new(r#"(\w+)(\s+)'(.+?)'(\s+)with(\s+)"(.+?)""#).unwrap();
|
||||
result = re
|
||||
.replace_all(&result, |caps: &Captures| {
|
||||
format!(
|
||||
"{}{}`{}`{} with `\"{}\"`",
|
||||
&caps[1], &caps[2], &caps[3], &caps[4], &caps[6]
|
||||
)
|
||||
})
|
||||
.to_string();
|
||||
|
||||
// Format "key"
|
||||
let re = Regex::new(r#"(\s)'"(.*?)"'(\s|:|.|$)"#).unwrap();
|
||||
result = re
|
||||
.replace_all(&result, |caps: &Captures| {
|
||||
format!("{}`\"{}\"`{}", &caps[1], &caps[2], &caps[3])
|
||||
})
|
||||
.to_string();
|
||||
|
||||
// Format declare module snippet
|
||||
let re = Regex::new(r#"['"](declare module )['"](.*)['""];['"']"#).unwrap();
|
||||
result = re
|
||||
.replace_all(&result, |caps: &Captures| {
|
||||
format_typescript_block(&format!("{} \"{}\"", &caps[1], &caps[2]))
|
||||
})
|
||||
.to_string();
|
||||
|
||||
// Format missing props error
|
||||
let re = Regex::new(r#"(is missing the following properties from type\s?)'(.*)': ([^:]+)"#)
|
||||
.unwrap();
|
||||
result = re
|
||||
.replace_all(&result, |caps: &Captures| {
|
||||
let props: Vec<&str> = caps[3].split(", ").filter(|s| !s.is_empty()).collect();
|
||||
let props_html = props
|
||||
.iter()
|
||||
.map(|prop| format!("<li>{}</li>", prop))
|
||||
.collect::<Vec<_>>()
|
||||
.join("");
|
||||
format!("{}`{}`: <ul>{}</ul>", &caps[1], &caps[2], props_html)
|
||||
})
|
||||
.to_string();
|
||||
|
||||
// Format type pairs
|
||||
let re = Regex::new(r#"(?i)(types) ['"](.*?)['"] and ['"](.*?)['"][.]?"#).unwrap();
|
||||
result = re
|
||||
.replace_all(&result, |caps: &Captures| {
|
||||
format!("{} `{}` and `{}`", &caps[1], &caps[2], &caps[3])
|
||||
})
|
||||
.to_string();
|
||||
|
||||
// Format type annotation options
|
||||
let re = Regex::new(r#"(?i)type annotation must be ['"](.*?)['"] or ['"](.*?)['"][.]?"#)
|
||||
.unwrap();
|
||||
result = re
|
||||
.replace_all(&result, |caps: &Captures| {
|
||||
format!("type annotation must be `{}` or `{}`", &caps[1], &caps[2])
|
||||
})
|
||||
.to_string();
|
||||
|
||||
// Format overload
|
||||
let re = Regex::new(r#"(?i)(Overload \d of \d), ['"](.*?)['"], "#).unwrap();
|
||||
result = re
|
||||
.replace_all(&result, |caps: &Captures| {
|
||||
format!("{}, `{}`, ", &caps[1], &caps[2])
|
||||
})
|
||||
.to_string();
|
||||
|
||||
// Format simple strings
|
||||
let re = Regex::new(r#"^['"]"[^"]*"['"]$"#).unwrap();
|
||||
result = re
|
||||
.replace_all(&result, |caps: &Captures| format_typescript_block(&caps[0]))
|
||||
.to_string();
|
||||
|
||||
// Replace module 'x' by module "x" for ts error #2307
|
||||
let re = Regex::new(r#"(?i)(module )'([^"]*?)'"#).unwrap();
|
||||
result = re
|
||||
.replace_all(&result, |caps: &Captures| {
|
||||
format!("{}\"{}\"", &caps[1], &caps[2])
|
||||
})
|
||||
.to_string();
|
||||
|
||||
// Format string types
|
||||
let re = Regex::new(r#"(?i)(module|file|file name|imported via) ['""](.*?)['""]"#).unwrap();
|
||||
result = re
|
||||
.replace_all(&result, |caps: &Captures| {
|
||||
format_type_block(&caps[1], &format!("\"{}\"", &caps[2]))
|
||||
})
|
||||
.to_string();
|
||||
|
||||
// Format types
|
||||
dbg!(&result);
|
||||
let re = Regex::new(r#"(?i)(type|type alias|interface|module|file|file name|class|method's|subtype of constraint) ['"](.*?)['"]"#).unwrap();
|
||||
result = re
|
||||
.replace_all(&result, |caps: &Captures| {
|
||||
dbg!(&caps);
|
||||
format_type_block(&caps[1], &caps[2])
|
||||
})
|
||||
.to_string();
|
||||
|
||||
// Format reversed types
|
||||
let re = Regex::new(r#"(?i)(.*)['"]([^>]*)['"] (type|interface|return type|file|module|is (not )?assignable)"#).unwrap();
|
||||
result = re
|
||||
.replace_all(&result, |caps: &Captures| {
|
||||
format!("{}`{}` {}", &caps[1], &caps[2], &caps[3])
|
||||
})
|
||||
.to_string();
|
||||
|
||||
// Format simple types that didn't captured before
|
||||
let re = Regex::new(
|
||||
r#"['"]((void|null|undefined|any|boolean|string|number|bigint|symbol)(\[\])?)['"']"#,
|
||||
)
|
||||
.unwrap();
|
||||
result = re
|
||||
.replace_all(&result, |caps: &Captures| {
|
||||
format_simple_type_block(&caps[1])
|
||||
})
|
||||
.to_string();
|
||||
|
||||
// Format some typescript keywords
|
||||
let re = Regex::new(r#"['"](import|export|require|in|continue|break|let|false|true|const|new|throw|await|for await|[0-9]+)( ?.*?)['"]"#).unwrap();
|
||||
result = re
|
||||
.replace_all(&result, |caps: &Captures| {
|
||||
format_typescript_block(&format!("{}{}", &caps[1], &caps[2]))
|
||||
})
|
||||
.to_string();
|
||||
|
||||
// Format return values
|
||||
let re = Regex::new(r#"(?i)(return|operator) ['"](.*?)['"']"#).unwrap();
|
||||
result = re
|
||||
.replace_all(&result, |caps: &Captures| {
|
||||
format!("{} {}", &caps[1], format_typescript_block(&caps[2]))
|
||||
})
|
||||
.to_string();
|
||||
|
||||
// Format regular code blocks
|
||||
let re = Regex::new(r#"(\W|^)'([^'"]*?)'(\W|$)"#).unwrap();
|
||||
result = re
|
||||
.replace_all(&result, |caps: &Captures| {
|
||||
format!("{}{}{}", &caps[1], unstyle_code_block(&caps[2]), &caps[3])
|
||||
})
|
||||
.to_string();
|
||||
|
||||
Some(result)
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_cached_ts_server_binary(
|
||||
@@ -480,25 +301,3 @@ async fn get_cached_ts_server_binary(
|
||||
.await
|
||||
.log_err()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
use indoc::indoc;
|
||||
|
||||
#[test]
|
||||
fn test_diagnostic_message_to_markdown() {
|
||||
let message = "Property 'user' is missing in type '{ person: { username: string; email: string; }; }' but required in type '{ user: { name: string; email: `${string}@${string}.${string}`; age: number; }; }'.";
|
||||
let expected = indoc! { "
|
||||
Property `user` is missing in type `{ person: { username: string; email: string; }; }` but required in type
|
||||
|
||||
```typescript
|
||||
{ user: { name: string; email: `${string}@${string}.${string}`; age: number; }; }
|
||||
```
|
||||
"};
|
||||
let result = VtslsLspAdapter::new(NodeRuntime::unavailable())
|
||||
.diagnostic_message_to_markdown(message)
|
||||
.unwrap();
|
||||
pretty_assertions::assert_eq!(result, expected.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -25,7 +25,7 @@ async-trait.workspace = true
|
||||
collections.workspace = true
|
||||
cpal.workspace = true
|
||||
futures.workspace = true
|
||||
gpui = { workspace = true, features = ["screen-capture", "x11", "wayland", "windows-manifest"] }
|
||||
gpui = { workspace = true, features = ["screen-capture", "x11", "wayland"] }
|
||||
gpui_tokio.workspace = true
|
||||
http_client_tls.workspace = true
|
||||
image.workspace = true
|
||||
@@ -45,7 +45,7 @@ livekit = { rev = "d2eade7a6b15d6dbdb38ba12a1ff7bf07fcebba4", git = "https://git
|
||||
"__rustls-tls"
|
||||
] }
|
||||
|
||||
[target.'cfg(any(target_os = "linux", target_os = "freebsd", target_os = "windows"))'.dependencies]
|
||||
[target.'cfg(any(target_os = "linux", target_os = "freebsd"))'.dependencies]
|
||||
scap.workspace = true
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
|
||||
@@ -3,16 +3,36 @@ use collections::HashMap;
|
||||
mod remote_video_track_view;
|
||||
pub use remote_video_track_view::{RemoteVideoTrackView, RemoteVideoTrackViewEvent};
|
||||
|
||||
#[cfg(not(any(test, feature = "test-support", target_os = "freebsd")))]
|
||||
#[cfg(not(any(
|
||||
test,
|
||||
feature = "test-support",
|
||||
any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")
|
||||
)))]
|
||||
mod livekit_client;
|
||||
#[cfg(not(any(test, feature = "test-support", target_os = "freebsd")))]
|
||||
#[cfg(not(any(
|
||||
test,
|
||||
feature = "test-support",
|
||||
any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")
|
||||
)))]
|
||||
pub use livekit_client::*;
|
||||
|
||||
#[cfg(any(test, feature = "test-support", target_os = "freebsd"))]
|
||||
#[cfg(any(
|
||||
test,
|
||||
feature = "test-support",
|
||||
any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")
|
||||
))]
|
||||
mod mock_client;
|
||||
#[cfg(any(test, feature = "test-support", target_os = "freebsd"))]
|
||||
#[cfg(any(
|
||||
test,
|
||||
feature = "test-support",
|
||||
any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")
|
||||
))]
|
||||
pub mod test;
|
||||
#[cfg(any(test, feature = "test-support", target_os = "freebsd"))]
|
||||
#[cfg(any(
|
||||
test,
|
||||
feature = "test-support",
|
||||
any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")
|
||||
))]
|
||||
pub use mock_client::*;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
|
||||
@@ -585,10 +585,10 @@ fn video_frame_buffer_from_webrtc(buffer: Box<dyn VideoBuffer>) -> Option<Remote
|
||||
if start_ptr.is_null() {
|
||||
return None;
|
||||
}
|
||||
let argb_frame_slice = std::slice::from_raw_parts_mut(start_ptr, byte_len);
|
||||
let bgra_frame_slice = std::slice::from_raw_parts_mut(start_ptr, byte_len);
|
||||
buffer.to_argb(
|
||||
VideoFormatType::ARGB,
|
||||
argb_frame_slice,
|
||||
VideoFormatType::ARGB, // For some reason, this displays correctly while RGBA (the correct format) does not
|
||||
bgra_frame_slice,
|
||||
stride,
|
||||
width as i32,
|
||||
height as i32,
|
||||
@@ -596,13 +596,12 @@ fn video_frame_buffer_from_webrtc(buffer: Box<dyn VideoBuffer>) -> Option<Remote
|
||||
Vec::from_raw_parts(start_ptr, byte_len, byte_len)
|
||||
};
|
||||
|
||||
// TODO: Unclear why providing argb_image to RgbaImage works properly.
|
||||
let image = RgbaImage::from_raw(width, height, argb_image)
|
||||
.with_context(|| "Bug: not enough bytes allocated for image.")
|
||||
.log_err()?;
|
||||
|
||||
Some(Arc::new(RenderImage::new(SmallVec::from_elem(
|
||||
Frame::new(image),
|
||||
Frame::new(
|
||||
RgbaImage::from_raw(width, height, argb_image)
|
||||
.with_context(|| "Bug: not enough bytes allocated for image.")
|
||||
.log_err()?,
|
||||
),
|
||||
1,
|
||||
))))
|
||||
}
|
||||
@@ -618,9 +617,9 @@ fn video_frame_buffer_to_webrtc(frame: ScreenCaptureFrame) -> Option<impl AsRef<
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
#[cfg(any(target_os = "linux", target_os = "freebsd"))]
|
||||
fn video_frame_buffer_to_webrtc(frame: ScreenCaptureFrame) -> Option<impl AsRef<dyn VideoBuffer>> {
|
||||
use libwebrtc::native::yuv_helper::{abgr_to_nv12, argb_to_nv12};
|
||||
use libwebrtc::native::yuv_helper::argb_to_nv12;
|
||||
use livekit::webrtc::prelude::NV12Buffer;
|
||||
match frame.0 {
|
||||
scap::frame::Frame::BGRx(frame) => {
|
||||
@@ -639,22 +638,6 @@ fn video_frame_buffer_to_webrtc(frame: ScreenCaptureFrame) -> Option<impl AsRef<
|
||||
);
|
||||
Some(buffer)
|
||||
}
|
||||
scap::frame::Frame::RGBx(frame) => {
|
||||
let mut buffer = NV12Buffer::new(frame.width as u32, frame.height as u32);
|
||||
let (stride_y, stride_uv) = buffer.strides();
|
||||
let (data_y, data_uv) = buffer.data_mut();
|
||||
abgr_to_nv12(
|
||||
&frame.data,
|
||||
frame.width as u32 * 4,
|
||||
data_y,
|
||||
stride_y,
|
||||
data_uv,
|
||||
stride_uv,
|
||||
frame.width,
|
||||
frame.height,
|
||||
);
|
||||
Some(buffer)
|
||||
}
|
||||
scap::frame::Frame::YUVFrame(yuvframe) => {
|
||||
let mut buffer = NV12Buffer::with_strides(
|
||||
yuvframe.width as u32,
|
||||
@@ -676,6 +659,11 @@ fn video_frame_buffer_to_webrtc(frame: ScreenCaptureFrame) -> Option<impl AsRef<
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
fn video_frame_buffer_to_webrtc(_frame: ScreenCaptureFrame) -> Option<impl AsRef<dyn VideoBuffer>> {
|
||||
None as Option<Box<dyn VideoBuffer>>
|
||||
}
|
||||
|
||||
trait DeviceChangeListenerApi: Stream<Item = ()> + Sized {
|
||||
fn new(input: bool) -> Result<Self>;
|
||||
}
|
||||
|
||||
@@ -1534,26 +1534,12 @@ impl MarkdownElementBuilder {
|
||||
rendered_index: self.pending_line.text.len(),
|
||||
source_index: source_range.start,
|
||||
});
|
||||
if text.starts_with("type a =") {
|
||||
self.pending_line.text.push_str(&text["type a =".len()..]);
|
||||
} else {
|
||||
self.pending_line.text.push_str(text);
|
||||
}
|
||||
self.pending_line.text.push_str(text);
|
||||
self.current_source_index = source_range.end;
|
||||
|
||||
if let Some(Some(language)) = self.code_block_stack.last() {
|
||||
dbg!(&language);
|
||||
let mut offset = 0;
|
||||
for (mut range, highlight_id) in
|
||||
language.highlight_text(&Rope::from(text), 0..text.len())
|
||||
{
|
||||
if text.starts_with("type a =") {
|
||||
if range.start < "type a =".len() || range.end < "type a =".len() {
|
||||
continue;
|
||||
}
|
||||
range.start -= "type a =".len();
|
||||
range.end -= "type a =".len();
|
||||
};
|
||||
for (range, highlight_id) in language.highlight_text(&Rope::from(text), 0..text.len()) {
|
||||
if range.start > offset {
|
||||
self.pending_line
|
||||
.runs
|
||||
|
||||
@@ -560,11 +560,6 @@ impl DapStore {
|
||||
fn format_value(mut value: String) -> String {
|
||||
const LIMIT: usize = 100;
|
||||
|
||||
if let Some(index) = value.find("\n") {
|
||||
value.truncate(index);
|
||||
value.push_str("…");
|
||||
}
|
||||
|
||||
if value.len() > LIMIT {
|
||||
let mut index = LIMIT;
|
||||
// If index isn't a char boundary truncate will cause a panic
|
||||
@@ -572,7 +567,7 @@ impl DapStore {
|
||||
index -= 1;
|
||||
}
|
||||
value.truncate(index);
|
||||
value.push_str("…");
|
||||
value.push_str("...");
|
||||
}
|
||||
|
||||
format!(": {}", value)
|
||||
|
||||
@@ -1726,18 +1726,6 @@ impl GitStore {
|
||||
let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
|
||||
let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
|
||||
|
||||
let askpass = if let Some(askpass_id) = envelope.payload.askpass_id {
|
||||
make_remote_delegate(
|
||||
this,
|
||||
envelope.payload.project_id,
|
||||
repository_id,
|
||||
askpass_id,
|
||||
&mut cx,
|
||||
)
|
||||
} else {
|
||||
AskPassDelegate::new_always_failing()
|
||||
};
|
||||
|
||||
let message = SharedString::from(envelope.payload.message);
|
||||
let name = envelope.payload.name.map(SharedString::from);
|
||||
let email = envelope.payload.email.map(SharedString::from);
|
||||
@@ -1751,7 +1739,6 @@ impl GitStore {
|
||||
CommitOptions {
|
||||
amend: options.amend,
|
||||
},
|
||||
askpass,
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
@@ -3475,14 +3462,11 @@ impl Repository {
|
||||
message: SharedString,
|
||||
name_and_email: Option<(SharedString, SharedString)>,
|
||||
options: CommitOptions,
|
||||
askpass: AskPassDelegate,
|
||||
_cx: &mut App,
|
||||
) -> oneshot::Receiver<Result<()>> {
|
||||
let id = self.id;
|
||||
let askpass_delegates = self.askpass_delegates.clone();
|
||||
let askpass_id = util::post_inc(&mut self.latest_askpass_id);
|
||||
|
||||
self.send_job(Some("git commit".into()), move |git_repo, cx| async move {
|
||||
self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
|
||||
match git_repo {
|
||||
RepositoryState::Local {
|
||||
backend,
|
||||
@@ -3490,16 +3474,10 @@ impl Repository {
|
||||
..
|
||||
} => {
|
||||
backend
|
||||
.commit(message, name_and_email, options, askpass, environment, cx)
|
||||
.commit(message, name_and_email, options, environment)
|
||||
.await
|
||||
}
|
||||
RepositoryState::Remote { project_id, client } => {
|
||||
askpass_delegates.lock().insert(askpass_id, askpass);
|
||||
let _defer = util::defer(|| {
|
||||
let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
|
||||
debug_assert!(askpass_delegate.is_some());
|
||||
});
|
||||
|
||||
let (name, email) = name_and_email.unzip();
|
||||
client
|
||||
.request(proto::Commit {
|
||||
@@ -3511,9 +3489,9 @@ impl Repository {
|
||||
options: Some(proto::commit::CommitOptions {
|
||||
amend: options.amend,
|
||||
}),
|
||||
askpass_id: Some(askpass_id),
|
||||
})
|
||||
.await?;
|
||||
.await
|
||||
.context("sending commit request")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -9712,31 +9712,29 @@ impl LspStore {
|
||||
} else {
|
||||
let buffers =
|
||||
lsp_store.buffer_ids_to_buffers(envelope.payload.buffer_ids.into_iter(), cx);
|
||||
lsp_store
|
||||
.stop_language_servers_for_buffers(
|
||||
buffers,
|
||||
envelope
|
||||
.payload
|
||||
.also_servers
|
||||
.into_iter()
|
||||
.filter_map(|selector| {
|
||||
Some(match selector.selector? {
|
||||
proto::language_server_selector::Selector::ServerId(
|
||||
lsp_store.stop_language_servers_for_buffers(
|
||||
buffers,
|
||||
envelope
|
||||
.payload
|
||||
.also_servers
|
||||
.into_iter()
|
||||
.filter_map(|selector| {
|
||||
Some(match selector.selector? {
|
||||
proto::language_server_selector::Selector::ServerId(server_id) => {
|
||||
LanguageServerSelector::Id(LanguageServerId::from_proto(
|
||||
server_id,
|
||||
) => LanguageServerSelector::Id(LanguageServerId::from_proto(
|
||||
server_id,
|
||||
)),
|
||||
proto::language_server_selector::Selector::Name(name) => {
|
||||
LanguageServerSelector::Name(LanguageServerName(
|
||||
SharedString::from(name),
|
||||
))
|
||||
}
|
||||
})
|
||||
))
|
||||
}
|
||||
proto::language_server_selector::Selector::Name(name) => {
|
||||
LanguageServerSelector::Name(LanguageServerName(
|
||||
SharedString::from(name),
|
||||
))
|
||||
}
|
||||
})
|
||||
.collect(),
|
||||
cx,
|
||||
)
|
||||
.detach_and_log_err(cx);
|
||||
})
|
||||
.collect(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
})?;
|
||||
|
||||
@@ -10292,9 +10290,9 @@ impl LspStore {
|
||||
pub fn stop_language_servers_for_buffers(
|
||||
&mut self,
|
||||
buffers: Vec<Entity<Buffer>>,
|
||||
also_stop_servers: HashSet<LanguageServerSelector>,
|
||||
also_restart_servers: HashSet<LanguageServerSelector>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
) {
|
||||
if let Some((client, project_id)) = self.upstream_client() {
|
||||
let request = client.request(proto::StopLanguageServers {
|
||||
project_id,
|
||||
@@ -10302,7 +10300,7 @@ impl LspStore {
|
||||
.into_iter()
|
||||
.map(|b| b.read(cx).remote_id().to_proto())
|
||||
.collect(),
|
||||
also_servers: also_stop_servers
|
||||
also_servers: also_restart_servers
|
||||
.into_iter()
|
||||
.map(|selector| {
|
||||
let selector = match selector {
|
||||
@@ -10324,31 +10322,24 @@ impl LspStore {
|
||||
.collect(),
|
||||
all: false,
|
||||
});
|
||||
cx.background_spawn(async move {
|
||||
let _ = request.await?;
|
||||
Ok(())
|
||||
})
|
||||
cx.background_spawn(request).detach_and_log_err(cx);
|
||||
} else {
|
||||
let task =
|
||||
self.stop_local_language_servers_for_buffers(&buffers, also_stop_servers, cx);
|
||||
cx.background_spawn(async move {
|
||||
task.await;
|
||||
Ok(())
|
||||
})
|
||||
self.stop_local_language_servers_for_buffers(&buffers, also_restart_servers, cx)
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
|
||||
fn stop_local_language_servers_for_buffers(
|
||||
&mut self,
|
||||
buffers: &[Entity<Buffer>],
|
||||
also_stop_servers: HashSet<LanguageServerSelector>,
|
||||
also_restart_servers: HashSet<LanguageServerSelector>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<()> {
|
||||
let Some(local) = self.as_local_mut() else {
|
||||
return Task::ready(());
|
||||
};
|
||||
let mut language_server_names_to_stop = BTreeSet::default();
|
||||
let mut language_servers_to_stop = also_stop_servers
|
||||
let mut language_servers_to_stop = also_restart_servers
|
||||
.into_iter()
|
||||
.flat_map(|selector| match selector {
|
||||
LanguageServerSelector::Id(id) => Some(id),
|
||||
|
||||
@@ -3217,11 +3217,9 @@ impl Project {
|
||||
also_restart_servers: HashSet<LanguageServerSelector>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.lsp_store
|
||||
.update(cx, |lsp_store, cx| {
|
||||
lsp_store.stop_language_servers_for_buffers(buffers, also_restart_servers, cx)
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
self.lsp_store.update(cx, |lsp_store, cx| {
|
||||
lsp_store.stop_language_servers_for_buffers(buffers, also_restart_servers, cx)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn cancel_language_server_work_for_buffers(
|
||||
|
||||
@@ -294,7 +294,6 @@ message Commit {
|
||||
optional string email = 5;
|
||||
string message = 6;
|
||||
optional CommitOptions options = 7;
|
||||
optional uint64 askpass_id = 8;
|
||||
|
||||
message CommitOptions {
|
||||
bool amend = 1;
|
||||
|
||||
@@ -1573,7 +1573,7 @@ impl SshRemoteConnection {
|
||||
// https://github.com/PowerShell/Win32-OpenSSH/wiki/Project-Scope
|
||||
#[cfg(target_os = "windows")]
|
||||
let args = ["-N"];
|
||||
let mut master_process = util::command::new_smol_command("ssh");
|
||||
let mut master_process = process::Command::new("ssh");
|
||||
master_process
|
||||
.kill_on_drop(true)
|
||||
.stdin(Stdio::null())
|
||||
|
||||
@@ -77,6 +77,7 @@ impl HeadlessProject {
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
debug_adapter_extension::init(proxy.clone(), cx);
|
||||
language_extension::init(proxy.clone(), languages.clone());
|
||||
languages::init(languages.clone(), node_runtime.clone(), cx);
|
||||
|
||||
let worktree_store = cx.new(|cx| {
|
||||
@@ -184,11 +185,6 @@ impl HeadlessProject {
|
||||
});
|
||||
|
||||
cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach();
|
||||
language_extension::init(
|
||||
language_extension::LspAccess::ViaLspStore(lsp_store.clone()),
|
||||
proxy.clone(),
|
||||
languages.clone(),
|
||||
);
|
||||
|
||||
cx.subscribe(
|
||||
&buffer_store,
|
||||
|
||||
@@ -623,55 +623,49 @@ impl KeymapFile {
|
||||
// We don't want to modify the file if it's invalid.
|
||||
let keymap = Self::parse(&keymap_contents).context("Failed to parse keymap")?;
|
||||
|
||||
if let KeybindUpdateOperation::Remove {
|
||||
target,
|
||||
target_keybind_source,
|
||||
} = operation
|
||||
{
|
||||
if target_keybind_source != KeybindSource::User {
|
||||
anyhow::bail!("Cannot remove non-user created keybinding. Not implemented yet");
|
||||
}
|
||||
let target_action_value = target
|
||||
.action_value()
|
||||
.context("Failed to generate target action JSON value")?;
|
||||
let Some((index, keystrokes_str)) =
|
||||
find_binding(&keymap, &target, &target_action_value)
|
||||
else {
|
||||
anyhow::bail!("Failed to find keybinding to remove");
|
||||
};
|
||||
let is_only_binding = keymap.0[index]
|
||||
.bindings
|
||||
.as_ref()
|
||||
.map_or(true, |bindings| bindings.len() == 1);
|
||||
let key_path: &[&str] = if is_only_binding {
|
||||
&[]
|
||||
} else {
|
||||
&["bindings", keystrokes_str]
|
||||
};
|
||||
let (replace_range, replace_value) = replace_top_level_array_value_in_json_text(
|
||||
&keymap_contents,
|
||||
key_path,
|
||||
None,
|
||||
None,
|
||||
index,
|
||||
tab_size,
|
||||
)
|
||||
.context("Failed to remove keybinding")?;
|
||||
keymap_contents.replace_range(replace_range, &replace_value);
|
||||
return Ok(keymap_contents);
|
||||
}
|
||||
|
||||
if let KeybindUpdateOperation::Replace { source, target, .. } = operation {
|
||||
let mut found_index = None;
|
||||
let target_action_value = target
|
||||
.action_value()
|
||||
.context("Failed to generate target action JSON value")?;
|
||||
let source_action_value = source
|
||||
.action_value()
|
||||
.context("Failed to generate source action JSON value")?;
|
||||
'sections: for (index, section) in keymap.sections().enumerate() {
|
||||
if section.context != target.context.unwrap_or("") {
|
||||
continue;
|
||||
}
|
||||
if section.use_key_equivalents != target.use_key_equivalents {
|
||||
continue;
|
||||
}
|
||||
let Some(bindings) = §ion.bindings else {
|
||||
continue;
|
||||
};
|
||||
for (keystrokes, action) in bindings {
|
||||
let Ok(keystrokes) = keystrokes
|
||||
.split_whitespace()
|
||||
.map(Keystroke::parse)
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
if keystrokes.len() != target.keystrokes.len()
|
||||
|| !keystrokes
|
||||
.iter()
|
||||
.zip(target.keystrokes)
|
||||
.all(|(a, b)| a.should_match(b))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if action.0 != target_action_value {
|
||||
continue;
|
||||
}
|
||||
found_index = Some(index);
|
||||
break 'sections;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((index, keystrokes_str)) =
|
||||
find_binding(&keymap, &target, &target_action_value)
|
||||
{
|
||||
if let Some(index) = found_index {
|
||||
if target.context == source.context {
|
||||
// if we are only changing the keybinding (common case)
|
||||
// not the context, etc. Then just update the binding in place
|
||||
@@ -679,7 +673,7 @@ impl KeymapFile {
|
||||
let (replace_range, replace_value) =
|
||||
replace_top_level_array_value_in_json_text(
|
||||
&keymap_contents,
|
||||
&["bindings", keystrokes_str],
|
||||
&["bindings", &target.keystrokes_unparsed()],
|
||||
Some(&source_action_value),
|
||||
Some(&source.keystrokes_unparsed()),
|
||||
index,
|
||||
@@ -701,7 +695,7 @@ impl KeymapFile {
|
||||
let (replace_range, replace_value) =
|
||||
replace_top_level_array_value_in_json_text(
|
||||
&keymap_contents,
|
||||
&["bindings", keystrokes_str],
|
||||
&["bindings", &target.keystrokes_unparsed()],
|
||||
Some(&source_action_value),
|
||||
Some(&source.keystrokes_unparsed()),
|
||||
index,
|
||||
@@ -731,7 +725,7 @@ impl KeymapFile {
|
||||
let (replace_range, replace_value) =
|
||||
replace_top_level_array_value_in_json_text(
|
||||
&keymap_contents,
|
||||
&["bindings", keystrokes_str],
|
||||
&["bindings", &target.keystrokes_unparsed()],
|
||||
None,
|
||||
None,
|
||||
index,
|
||||
@@ -777,50 +771,6 @@ impl KeymapFile {
|
||||
keymap_contents.replace_range(replace_range, &replace_value);
|
||||
}
|
||||
return Ok(keymap_contents);
|
||||
|
||||
fn find_binding<'a, 'b>(
|
||||
keymap: &'b KeymapFile,
|
||||
target: &KeybindUpdateTarget<'a>,
|
||||
target_action_value: &Value,
|
||||
) -> Option<(usize, &'b str)> {
|
||||
let target_context_parsed =
|
||||
KeyBindingContextPredicate::parse(target.context.unwrap_or("")).ok();
|
||||
for (index, section) in keymap.sections().enumerate() {
|
||||
let section_context_parsed =
|
||||
KeyBindingContextPredicate::parse(§ion.context).ok();
|
||||
if section_context_parsed != target_context_parsed {
|
||||
continue;
|
||||
}
|
||||
if section.use_key_equivalents != target.use_key_equivalents {
|
||||
continue;
|
||||
}
|
||||
let Some(bindings) = §ion.bindings else {
|
||||
continue;
|
||||
};
|
||||
for (keystrokes_str, action) in bindings {
|
||||
let Ok(keystrokes) = keystrokes_str
|
||||
.split_whitespace()
|
||||
.map(Keystroke::parse)
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
if keystrokes.len() != target.keystrokes.len()
|
||||
|| !keystrokes
|
||||
.iter()
|
||||
.zip(target.keystrokes)
|
||||
.all(|(a, b)| a.should_match(b))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
if &action.0 != target_action_value {
|
||||
continue;
|
||||
}
|
||||
return Some((index, &keystrokes_str));
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -833,13 +783,8 @@ pub enum KeybindUpdateOperation<'a> {
|
||||
target_keybind_source: KeybindSource,
|
||||
},
|
||||
Add(KeybindUpdateTarget<'a>),
|
||||
Remove {
|
||||
target: KeybindUpdateTarget<'a>,
|
||||
target_keybind_source: KeybindSource,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct KeybindUpdateTarget<'a> {
|
||||
pub context: Option<&'a str>,
|
||||
pub keystrokes: &'a [Keystroke],
|
||||
@@ -1355,118 +1300,5 @@ mod tests {
|
||||
]"#
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
check_keymap_update(
|
||||
r#"[
|
||||
{
|
||||
"context": "SomeContext",
|
||||
"bindings": {
|
||||
"a": "foo::bar",
|
||||
"c": "foo::baz",
|
||||
}
|
||||
},
|
||||
]"#
|
||||
.unindent(),
|
||||
KeybindUpdateOperation::Remove {
|
||||
target: KeybindUpdateTarget {
|
||||
context: Some("SomeContext"),
|
||||
keystrokes: &parse_keystrokes("a"),
|
||||
action_name: "foo::bar",
|
||||
use_key_equivalents: false,
|
||||
input: None,
|
||||
},
|
||||
target_keybind_source: KeybindSource::User,
|
||||
},
|
||||
r#"[
|
||||
{
|
||||
"context": "SomeContext",
|
||||
"bindings": {
|
||||
"c": "foo::baz",
|
||||
}
|
||||
},
|
||||
]"#
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
check_keymap_update(
|
||||
r#"[
|
||||
{
|
||||
"context": "SomeContext",
|
||||
"bindings": {
|
||||
"a": ["foo::bar", true],
|
||||
"c": "foo::baz",
|
||||
}
|
||||
},
|
||||
]"#
|
||||
.unindent(),
|
||||
KeybindUpdateOperation::Remove {
|
||||
target: KeybindUpdateTarget {
|
||||
context: Some("SomeContext"),
|
||||
keystrokes: &parse_keystrokes("a"),
|
||||
action_name: "foo::bar",
|
||||
use_key_equivalents: false,
|
||||
input: Some("true"),
|
||||
},
|
||||
target_keybind_source: KeybindSource::User,
|
||||
},
|
||||
r#"[
|
||||
{
|
||||
"context": "SomeContext",
|
||||
"bindings": {
|
||||
"c": "foo::baz",
|
||||
}
|
||||
},
|
||||
]"#
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
check_keymap_update(
|
||||
r#"[
|
||||
{
|
||||
"context": "SomeContext",
|
||||
"bindings": {
|
||||
"b": "foo::baz",
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SomeContext",
|
||||
"bindings": {
|
||||
"a": ["foo::bar", true],
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SomeContext",
|
||||
"bindings": {
|
||||
"c": "foo::baz",
|
||||
}
|
||||
},
|
||||
]"#
|
||||
.unindent(),
|
||||
KeybindUpdateOperation::Remove {
|
||||
target: KeybindUpdateTarget {
|
||||
context: Some("SomeContext"),
|
||||
keystrokes: &parse_keystrokes("a"),
|
||||
action_name: "foo::bar",
|
||||
use_key_equivalents: false,
|
||||
input: Some("true"),
|
||||
},
|
||||
target_keybind_source: KeybindSource::User,
|
||||
},
|
||||
r#"[
|
||||
{
|
||||
"context": "SomeContext",
|
||||
"bindings": {
|
||||
"b": "foo::baz",
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SomeContext",
|
||||
"bindings": {
|
||||
"c": "foo::baz",
|
||||
}
|
||||
},
|
||||
]"#
|
||||
.unindent(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -353,58 +353,29 @@ pub fn replace_top_level_array_value_in_json_text(
|
||||
let range = cursor.node().range();
|
||||
let indent_width = range.start_point.column;
|
||||
let offset = range.start_byte;
|
||||
let text_range = range.start_byte..range.end_byte;
|
||||
let value_str = &text[text_range.clone()];
|
||||
let value_str = &text[range.start_byte..range.end_byte];
|
||||
let needs_indent = range.start_point.row > 0;
|
||||
|
||||
if new_value.is_none() && key_path.is_empty() {
|
||||
let mut remove_range = text_range.clone();
|
||||
if index == 0 {
|
||||
while cursor.goto_next_sibling()
|
||||
&& (cursor.node().is_extra() || cursor.node().is_missing())
|
||||
{}
|
||||
if cursor.node().kind() == "," {
|
||||
remove_range.end = cursor.node().range().end_byte;
|
||||
}
|
||||
if let Some(next_newline) = &text[remove_range.end + 1..].find('\n') {
|
||||
if text[remove_range.end + 1..remove_range.end + next_newline]
|
||||
.chars()
|
||||
.all(|c| c.is_ascii_whitespace())
|
||||
{
|
||||
remove_range.end = remove_range.end + next_newline;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
while cursor.goto_previous_sibling()
|
||||
&& (cursor.node().is_extra() || cursor.node().is_missing())
|
||||
{}
|
||||
if cursor.node().kind() == "," {
|
||||
remove_range.start = cursor.node().range().start_byte;
|
||||
}
|
||||
}
|
||||
return Ok((remove_range, String::new()));
|
||||
let (mut replace_range, mut replace_value) =
|
||||
replace_value_in_json_text(value_str, key_path, tab_size, new_value, replace_key);
|
||||
|
||||
replace_range.start += offset;
|
||||
replace_range.end += offset;
|
||||
|
||||
if needs_indent {
|
||||
let increased_indent = format!("\n{space:width$}", space = ' ', width = indent_width);
|
||||
replace_value = replace_value.replace('\n', &increased_indent);
|
||||
// replace_value.push('\n');
|
||||
} else {
|
||||
let (mut replace_range, mut replace_value) =
|
||||
replace_value_in_json_text(value_str, key_path, tab_size, new_value, replace_key);
|
||||
|
||||
replace_range.start += offset;
|
||||
replace_range.end += offset;
|
||||
|
||||
if needs_indent {
|
||||
let increased_indent = format!("\n{space:width$}", space = ' ', width = indent_width);
|
||||
replace_value = replace_value.replace('\n', &increased_indent);
|
||||
// replace_value.push('\n');
|
||||
} else {
|
||||
while let Some(idx) = replace_value.find("\n ") {
|
||||
replace_value.remove(idx + 1);
|
||||
}
|
||||
while let Some(idx) = replace_value.find("\n") {
|
||||
replace_value.replace_range(idx..idx + 1, " ");
|
||||
}
|
||||
while let Some(idx) = replace_value.find("\n ") {
|
||||
replace_value.remove(idx + 1);
|
||||
}
|
||||
while let Some(idx) = replace_value.find("\n") {
|
||||
replace_value.replace_range(idx..idx + 1, " ");
|
||||
}
|
||||
|
||||
return Ok((replace_range, replace_value));
|
||||
}
|
||||
|
||||
return Ok((replace_range, replace_value));
|
||||
}
|
||||
|
||||
pub fn append_top_level_array_value_in_json_text(
|
||||
@@ -1034,14 +1005,14 @@ mod tests {
|
||||
input: impl ToString,
|
||||
index: usize,
|
||||
key_path: &[&str],
|
||||
value: Option<Value>,
|
||||
value: Value,
|
||||
expected: impl ToString,
|
||||
) {
|
||||
let input = input.to_string();
|
||||
let result = replace_top_level_array_value_in_json_text(
|
||||
&input,
|
||||
key_path,
|
||||
value.as_ref(),
|
||||
Some(&value),
|
||||
None,
|
||||
index,
|
||||
4,
|
||||
@@ -1052,10 +1023,10 @@ mod tests {
|
||||
pretty_assertions::assert_eq!(expected.to_string(), result_str);
|
||||
}
|
||||
|
||||
check_array_replace(r#"[1, 3, 3]"#, 1, &[], Some(json!(2)), r#"[1, 2, 3]"#);
|
||||
check_array_replace(r#"[1, 3, 3]"#, 2, &[], Some(json!(2)), r#"[1, 3, 2]"#);
|
||||
check_array_replace(r#"[1, 3, 3,]"#, 3, &[], Some(json!(2)), r#"[1, 3, 3, 2]"#);
|
||||
check_array_replace(r#"[1, 3, 3,]"#, 100, &[], Some(json!(2)), r#"[1, 3, 3, 2]"#);
|
||||
check_array_replace(r#"[1, 3, 3]"#, 1, &[], json!(2), r#"[1, 2, 3]"#);
|
||||
check_array_replace(r#"[1, 3, 3]"#, 2, &[], json!(2), r#"[1, 3, 2]"#);
|
||||
check_array_replace(r#"[1, 3, 3,]"#, 3, &[], json!(2), r#"[1, 3, 3, 2]"#);
|
||||
check_array_replace(r#"[1, 3, 3,]"#, 100, &[], json!(2), r#"[1, 3, 3, 2]"#);
|
||||
check_array_replace(
|
||||
r#"[
|
||||
1,
|
||||
@@ -1065,7 +1036,7 @@ mod tests {
|
||||
.unindent(),
|
||||
1,
|
||||
&[],
|
||||
Some(json!({"foo": "bar", "baz": "qux"})),
|
||||
json!({"foo": "bar", "baz": "qux"}),
|
||||
r#"[
|
||||
1,
|
||||
{
|
||||
@@ -1080,7 +1051,7 @@ mod tests {
|
||||
r#"[1, 3, 3,]"#,
|
||||
1,
|
||||
&[],
|
||||
Some(json!({"foo": "bar", "baz": "qux"})),
|
||||
json!({"foo": "bar", "baz": "qux"}),
|
||||
r#"[1, { "foo": "bar", "baz": "qux" }, 3,]"#,
|
||||
);
|
||||
|
||||
@@ -1088,7 +1059,7 @@ mod tests {
|
||||
r#"[1, { "foo": "bar", "baz": "qux" }, 3,]"#,
|
||||
1,
|
||||
&["baz"],
|
||||
Some(json!({"qux": "quz"})),
|
||||
json!({"qux": "quz"}),
|
||||
r#"[1, { "foo": "bar", "baz": { "qux": "quz" } }, 3,]"#,
|
||||
);
|
||||
|
||||
@@ -1103,7 +1074,7 @@ mod tests {
|
||||
]"#,
|
||||
1,
|
||||
&["baz"],
|
||||
Some(json!({"qux": "quz"})),
|
||||
json!({"qux": "quz"}),
|
||||
r#"[
|
||||
1,
|
||||
{
|
||||
@@ -1129,7 +1100,7 @@ mod tests {
|
||||
]"#,
|
||||
1,
|
||||
&["baz"],
|
||||
Some(json!("qux")),
|
||||
json!("qux"),
|
||||
r#"[
|
||||
1,
|
||||
{
|
||||
@@ -1156,7 +1127,7 @@ mod tests {
|
||||
]"#,
|
||||
1,
|
||||
&["baz"],
|
||||
Some(json!("qux")),
|
||||
json!("qux"),
|
||||
r#"[
|
||||
1,
|
||||
{
|
||||
@@ -1180,7 +1151,7 @@ mod tests {
|
||||
]"#,
|
||||
2,
|
||||
&[],
|
||||
Some(json!("replaced")),
|
||||
json!("replaced"),
|
||||
r#"[
|
||||
1,
|
||||
// This is element 2
|
||||
@@ -1198,7 +1169,7 @@ mod tests {
|
||||
.unindent(),
|
||||
0,
|
||||
&[],
|
||||
Some(json!("first")),
|
||||
json!("first"),
|
||||
r#"[
|
||||
// Empty array with comment
|
||||
"first"
|
||||
@@ -1209,7 +1180,7 @@ mod tests {
|
||||
r#"[]"#.unindent(),
|
||||
0,
|
||||
&[],
|
||||
Some(json!("first")),
|
||||
json!("first"),
|
||||
r#"[
|
||||
"first"
|
||||
]"#
|
||||
@@ -1226,7 +1197,7 @@ mod tests {
|
||||
]"#,
|
||||
0,
|
||||
&[],
|
||||
Some(json!({"new": "object"})),
|
||||
json!({"new": "object"}),
|
||||
r#"[
|
||||
// Leading comment
|
||||
// Another leading comment
|
||||
@@ -1246,7 +1217,7 @@ mod tests {
|
||||
]"#,
|
||||
1,
|
||||
&[],
|
||||
Some(json!("deep")),
|
||||
json!("deep"),
|
||||
r#"[
|
||||
1,
|
||||
"deep",
|
||||
@@ -1259,7 +1230,7 @@ mod tests {
|
||||
r#"[1,2, 3, 4]"#,
|
||||
2,
|
||||
&[],
|
||||
Some(json!("spaced")),
|
||||
json!("spaced"),
|
||||
r#"[1,2, "spaced", 4]"#,
|
||||
);
|
||||
|
||||
@@ -1272,7 +1243,7 @@ mod tests {
|
||||
]"#,
|
||||
1,
|
||||
&[],
|
||||
Some(json!(["a", "b", "c", "d"])),
|
||||
json!(["a", "b", "c", "d"]),
|
||||
r#"[
|
||||
[1, 2, 3],
|
||||
[
|
||||
@@ -1297,7 +1268,7 @@ mod tests {
|
||||
]"#,
|
||||
0,
|
||||
&[],
|
||||
Some(json!("updated")),
|
||||
json!("updated"),
|
||||
r#"[
|
||||
/*
|
||||
* This is a
|
||||
@@ -1313,7 +1284,7 @@ mod tests {
|
||||
r#"[true, false, true]"#,
|
||||
1,
|
||||
&[],
|
||||
Some(json!(null)),
|
||||
json!(null),
|
||||
r#"[true, null, true]"#,
|
||||
);
|
||||
|
||||
@@ -1322,7 +1293,7 @@ mod tests {
|
||||
r#"[42]"#,
|
||||
0,
|
||||
&[],
|
||||
Some(json!({"answer": 42})),
|
||||
json!({"answer": 42}),
|
||||
r#"[{ "answer": 42 }]"#,
|
||||
);
|
||||
|
||||
@@ -1336,7 +1307,7 @@ mod tests {
|
||||
.unindent(),
|
||||
10,
|
||||
&[],
|
||||
Some(json!(123)),
|
||||
json!(123),
|
||||
r#"[
|
||||
// Comment 1
|
||||
// Comment 2
|
||||
@@ -1345,54 +1316,6 @@ mod tests {
|
||||
]"#
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
check_array_replace(
|
||||
r#"[
|
||||
{
|
||||
"key": "value"
|
||||
},
|
||||
{
|
||||
"key": "value2"
|
||||
}
|
||||
]"#
|
||||
.unindent(),
|
||||
0,
|
||||
&[],
|
||||
None,
|
||||
r#"[
|
||||
{
|
||||
"key": "value2"
|
||||
}
|
||||
]"#
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
check_array_replace(
|
||||
r#"[
|
||||
{
|
||||
"key": "value"
|
||||
},
|
||||
{
|
||||
"key": "value2"
|
||||
},
|
||||
{
|
||||
"key": "value3"
|
||||
},
|
||||
]"#
|
||||
.unindent(),
|
||||
1,
|
||||
&[],
|
||||
None,
|
||||
r#"[
|
||||
{
|
||||
"key": "value"
|
||||
},
|
||||
{
|
||||
"key": "value3"
|
||||
},
|
||||
]"#
|
||||
.unindent(),
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -371,48 +371,35 @@ impl TerminalBuilder {
|
||||
release_channel::AppVersion::global(cx).to_string(),
|
||||
);
|
||||
|
||||
#[derive(Default)]
|
||||
struct ShellParams {
|
||||
program: String,
|
||||
args: Option<Vec<String>>,
|
||||
title_override: Option<SharedString>,
|
||||
}
|
||||
|
||||
let shell_params = match shell.clone() {
|
||||
Shell::System => {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
Some(ShellParams {
|
||||
program: util::get_windows_system_shell(),
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
None
|
||||
}
|
||||
Shell::Program(program) => Some(ShellParams {
|
||||
program,
|
||||
..Default::default()
|
||||
}),
|
||||
Shell::WithArguments {
|
||||
program,
|
||||
args,
|
||||
title_override,
|
||||
} => Some(ShellParams {
|
||||
program,
|
||||
args: Some(args),
|
||||
title_override,
|
||||
}),
|
||||
};
|
||||
let terminal_title_override = shell_params.as_ref().and_then(|e| e.title_override.clone());
|
||||
|
||||
#[cfg(windows)]
|
||||
let shell_program = shell_params.as_ref().map(|params| params.program.clone());
|
||||
let mut terminal_title_override = None;
|
||||
|
||||
let pty_options = {
|
||||
let alac_shell = shell_params.map(|params| {
|
||||
alacritty_terminal::tty::Shell::new(params.program, params.args.unwrap_or_default())
|
||||
});
|
||||
let alac_shell = match shell.clone() {
|
||||
Shell::System => {
|
||||
#[cfg(target_os = "windows")]
|
||||
{
|
||||
Some(alacritty_terminal::tty::Shell::new(
|
||||
util::get_windows_system_shell(),
|
||||
Vec::new(),
|
||||
))
|
||||
}
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
{
|
||||
None
|
||||
}
|
||||
}
|
||||
Shell::Program(program) => {
|
||||
Some(alacritty_terminal::tty::Shell::new(program, Vec::new()))
|
||||
}
|
||||
Shell::WithArguments {
|
||||
program,
|
||||
args,
|
||||
title_override,
|
||||
} => {
|
||||
terminal_title_override = title_override;
|
||||
Some(alacritty_terminal::tty::Shell::new(program, args))
|
||||
}
|
||||
};
|
||||
|
||||
alacritty_terminal::tty::Options {
|
||||
shell: alac_shell,
|
||||
@@ -516,8 +503,6 @@ impl TerminalBuilder {
|
||||
python_venv_directory,
|
||||
last_mouse_move_time: Instant::now(),
|
||||
last_hyperlink_search_position: None,
|
||||
#[cfg(windows)]
|
||||
shell_program,
|
||||
};
|
||||
|
||||
Ok(TerminalBuilder {
|
||||
@@ -678,8 +663,6 @@ pub struct Terminal {
|
||||
is_ssh_terminal: bool,
|
||||
last_mouse_move_time: Instant,
|
||||
last_hyperlink_search_position: Option<Point<Pixels>>,
|
||||
#[cfg(windows)]
|
||||
shell_program: Option<String>,
|
||||
}
|
||||
|
||||
pub struct TaskState {
|
||||
@@ -725,20 +708,6 @@ impl Terminal {
|
||||
fn process_event(&mut self, event: AlacTermEvent, cx: &mut Context<Self>) {
|
||||
match event {
|
||||
AlacTermEvent::Title(title) => {
|
||||
// ignore default shell program title change as windows always sends those events
|
||||
// and it would end up showing the shell executable path in breadcrumbs
|
||||
#[cfg(windows)]
|
||||
{
|
||||
if self
|
||||
.shell_program
|
||||
.as_ref()
|
||||
.map(|e| *e == title)
|
||||
.unwrap_or(false)
|
||||
{
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
self.breadcrumb_text = title;
|
||||
cx.emit(Event::BreadcrumbsChanged);
|
||||
}
|
||||
|
||||
@@ -127,7 +127,7 @@ impl BatchedTextRun {
|
||||
cx: &mut App,
|
||||
) {
|
||||
let pos = Point::new(
|
||||
origin.x + self.start_point.column as f32 * dimensions.cell_width,
|
||||
(origin.x + self.start_point.column as f32 * dimensions.cell_width).floor(),
|
||||
origin.y + self.start_point.line as f32 * dimensions.line_height,
|
||||
);
|
||||
|
||||
@@ -494,22 +494,6 @@ impl TerminalElement {
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks if a character is a decorative block/box-like character that should
|
||||
/// preserve its exact colors without contrast adjustment.
|
||||
///
|
||||
/// Fixes https://github.com/zed-industries/zed/issues/34234 - we can
|
||||
/// expand this list if we run into more similar cases, but the goal
|
||||
/// is to be conservative here.
|
||||
fn is_decorative_character(ch: char) -> bool {
|
||||
matches!(
|
||||
ch as u32,
|
||||
// 0x2500..=0x257F Box Drawing
|
||||
// 0x2580..=0x259F Block Elements
|
||||
// 0x25A0..=0x25D7 Geometric Shapes (block/box-like subset)
|
||||
0x2500..=0x25D7
|
||||
)
|
||||
}
|
||||
|
||||
/// Converts the Alacritty cell styles to GPUI text styles and background color.
|
||||
fn cell_style(
|
||||
indexed: &IndexedCell,
|
||||
@@ -524,10 +508,7 @@ impl TerminalElement {
|
||||
let mut fg = convert_color(&fg, colors);
|
||||
let bg = convert_color(&bg, colors);
|
||||
|
||||
// Only apply contrast adjustment to non-decorative characters
|
||||
if !Self::is_decorative_character(indexed.c) {
|
||||
fg = color_contrast::ensure_minimum_contrast(fg, bg, minimum_contrast);
|
||||
}
|
||||
fg = color_contrast::ensure_minimum_contrast(fg, bg, minimum_contrast);
|
||||
|
||||
// Ghostty uses (175/255) as the multiplier (~0.69), Alacritty uses 0.66, Kitty
|
||||
// uses 0.75. We're using 0.7 because it's pretty well in the middle of that.
|
||||
@@ -1594,91 +1575,6 @@ mod tests {
|
||||
use super::*;
|
||||
use gpui::{AbsoluteLength, Hsla, font};
|
||||
|
||||
#[test]
|
||||
fn test_is_decorative_character() {
|
||||
// Box Drawing characters (U+2500 to U+257F)
|
||||
assert!(TerminalElement::is_decorative_character('─')); // U+2500
|
||||
assert!(TerminalElement::is_decorative_character('│')); // U+2502
|
||||
assert!(TerminalElement::is_decorative_character('┌')); // U+250C
|
||||
assert!(TerminalElement::is_decorative_character('┐')); // U+2510
|
||||
assert!(TerminalElement::is_decorative_character('└')); // U+2514
|
||||
assert!(TerminalElement::is_decorative_character('┘')); // U+2518
|
||||
assert!(TerminalElement::is_decorative_character('┼')); // U+253C
|
||||
|
||||
// Block Elements (U+2580 to U+259F)
|
||||
assert!(TerminalElement::is_decorative_character('▀')); // U+2580
|
||||
assert!(TerminalElement::is_decorative_character('▄')); // U+2584
|
||||
assert!(TerminalElement::is_decorative_character('█')); // U+2588
|
||||
assert!(TerminalElement::is_decorative_character('░')); // U+2591
|
||||
assert!(TerminalElement::is_decorative_character('▒')); // U+2592
|
||||
assert!(TerminalElement::is_decorative_character('▓')); // U+2593
|
||||
|
||||
// Geometric Shapes - block/box-like subset (U+25A0 to U+25D7)
|
||||
assert!(TerminalElement::is_decorative_character('■')); // U+25A0
|
||||
assert!(TerminalElement::is_decorative_character('□')); // U+25A1
|
||||
assert!(TerminalElement::is_decorative_character('▲')); // U+25B2
|
||||
assert!(TerminalElement::is_decorative_character('▼')); // U+25BC
|
||||
assert!(TerminalElement::is_decorative_character('◆')); // U+25C6
|
||||
assert!(TerminalElement::is_decorative_character('●')); // U+25CF
|
||||
|
||||
// The specific character from the issue
|
||||
assert!(TerminalElement::is_decorative_character('◗')); // U+25D7
|
||||
|
||||
// Characters that should NOT be considered decorative
|
||||
assert!(!TerminalElement::is_decorative_character('A'));
|
||||
assert!(!TerminalElement::is_decorative_character('a'));
|
||||
assert!(!TerminalElement::is_decorative_character('0'));
|
||||
assert!(!TerminalElement::is_decorative_character(' '));
|
||||
assert!(!TerminalElement::is_decorative_character('←')); // U+2190 (Arrow, not in our ranges)
|
||||
assert!(!TerminalElement::is_decorative_character('→')); // U+2192 (Arrow, not in our ranges)
|
||||
assert!(!TerminalElement::is_decorative_character('◘')); // U+25D8 (Just outside our range)
|
||||
assert!(!TerminalElement::is_decorative_character('◙')); // U+25D9 (Just outside our range)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_decorative_character_boundary_cases() {
|
||||
// Test exact boundaries of our ranges
|
||||
// Box Drawing range boundaries
|
||||
assert!(TerminalElement::is_decorative_character('\u{2500}')); // First char
|
||||
assert!(TerminalElement::is_decorative_character('\u{257F}')); // Last char
|
||||
assert!(!TerminalElement::is_decorative_character('\u{24FF}')); // Just before
|
||||
|
||||
// Block Elements range boundaries
|
||||
assert!(TerminalElement::is_decorative_character('\u{2580}')); // First char
|
||||
assert!(TerminalElement::is_decorative_character('\u{259F}')); // Last char
|
||||
|
||||
// Geometric Shapes subset boundaries
|
||||
assert!(TerminalElement::is_decorative_character('\u{25A0}')); // First char
|
||||
assert!(TerminalElement::is_decorative_character('\u{25D7}')); // Last char (◗)
|
||||
assert!(!TerminalElement::is_decorative_character('\u{25D8}')); // Just after
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_decorative_characters_bypass_contrast_adjustment() {
|
||||
// Decorative characters should not be affected by contrast adjustment
|
||||
|
||||
// The specific character from issue #34234
|
||||
let problematic_char = '◗'; // U+25D7
|
||||
assert!(
|
||||
TerminalElement::is_decorative_character(problematic_char),
|
||||
"Character ◗ (U+25D7) should be recognized as decorative"
|
||||
);
|
||||
|
||||
// Verify some other commonly used decorative characters
|
||||
assert!(TerminalElement::is_decorative_character('│')); // Vertical line
|
||||
assert!(TerminalElement::is_decorative_character('─')); // Horizontal line
|
||||
assert!(TerminalElement::is_decorative_character('█')); // Full block
|
||||
assert!(TerminalElement::is_decorative_character('▓')); // Dark shade
|
||||
assert!(TerminalElement::is_decorative_character('■')); // Black square
|
||||
assert!(TerminalElement::is_decorative_character('●')); // Black circle
|
||||
|
||||
// Verify normal text characters are NOT decorative
|
||||
assert!(!TerminalElement::is_decorative_character('A'));
|
||||
assert!(!TerminalElement::is_decorative_character('1'));
|
||||
assert!(!TerminalElement::is_decorative_character('$'));
|
||||
assert!(!TerminalElement::is_decorative_character(' '));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_contrast_adjustment_logic() {
|
||||
// Test the core contrast adjustment logic without needing full app context
|
||||
|
||||
@@ -51,6 +51,7 @@ impl OnboardingBanner {
|
||||
}
|
||||
|
||||
fn dismiss(&mut self, cx: &mut Context<Self>) {
|
||||
telemetry::event!("Banner Dismissed", source = self.source);
|
||||
persist_dismissed(&self.source, cx);
|
||||
self.dismissed = true;
|
||||
cx.notify();
|
||||
@@ -143,10 +144,7 @@ impl Render for OnboardingBanner {
|
||||
div().border_l_1().border_color(border_color).child(
|
||||
IconButton::new("close", IconName::Close)
|
||||
.icon_size(IconSize::Indicator)
|
||||
.on_click(cx.listener(|this, _, _window, cx| {
|
||||
telemetry::event!("Banner Dismissed", source = this.source);
|
||||
this.dismiss(cx)
|
||||
}))
|
||||
.on_click(cx.listener(|this, _, _window, cx| this.dismiss(cx)))
|
||||
.tooltip(|window, cx| {
|
||||
Tooltip::with_meta(
|
||||
"Close Announcement Banner",
|
||||
|
||||
@@ -159,7 +159,6 @@ pub struct ContextMenu {
|
||||
keep_open_on_confirm: bool,
|
||||
documentation_aside: Option<(usize, DocumentationAside)>,
|
||||
fixed_width: Option<DefiniteLength>,
|
||||
align_popover_top: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq)]
|
||||
@@ -216,7 +215,6 @@ impl ContextMenu {
|
||||
key_context: "menu".into(),
|
||||
_on_blur_subscription,
|
||||
keep_open_on_confirm: false,
|
||||
align_popover_top: true,
|
||||
documentation_aside: None,
|
||||
fixed_width: None,
|
||||
end_slot_action: None,
|
||||
@@ -259,7 +257,6 @@ impl ContextMenu {
|
||||
key_context: "menu".into(),
|
||||
_on_blur_subscription,
|
||||
keep_open_on_confirm: true,
|
||||
align_popover_top: true,
|
||||
documentation_aside: None,
|
||||
fixed_width: None,
|
||||
end_slot_action: None,
|
||||
@@ -300,7 +297,6 @@ impl ContextMenu {
|
||||
|this: &mut ContextMenu, window, cx| this.cancel(&menu::Cancel, window, cx),
|
||||
),
|
||||
keep_open_on_confirm: false,
|
||||
align_popover_top: true,
|
||||
documentation_aside: None,
|
||||
fixed_width: None,
|
||||
end_slot_action: None,
|
||||
@@ -782,11 +778,6 @@ impl ContextMenu {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn align_popover_bottom(mut self) -> Self {
|
||||
self.align_popover_top = false;
|
||||
self
|
||||
}
|
||||
|
||||
fn render_menu_item(
|
||||
&self,
|
||||
ix: usize,
|
||||
@@ -1109,13 +1100,7 @@ impl Render for ContextMenu {
|
||||
.when(is_wide_window, |this| this.flex_row())
|
||||
.when(!is_wide_window, |this| this.flex_col())
|
||||
.w_full()
|
||||
.map(|div| {
|
||||
if self.align_popover_top {
|
||||
div.items_start()
|
||||
} else {
|
||||
div.items_end()
|
||||
}
|
||||
})
|
||||
.items_start()
|
||||
.gap_1()
|
||||
.child(div().children(aside.clone().and_then(|(_, aside)| {
|
||||
(aside.side == DocumentationSide::Left).then(|| render_aside(aside, cx))
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use gpui::{Action, AnyElement, AnyView, AppContext as _, FocusHandle, IntoElement, Render};
|
||||
use settings::Settings;
|
||||
use theme::ThemeSettings;
|
||||
@@ -9,36 +7,15 @@ use crate::{Color, KeyBinding, Label, LabelSize, StyledExt, h_flex, v_flex};
|
||||
|
||||
#[derive(RegisterComponent)]
|
||||
pub struct Tooltip {
|
||||
title: Title,
|
||||
title: SharedString,
|
||||
meta: Option<SharedString>,
|
||||
key_binding: Option<KeyBinding>,
|
||||
}
|
||||
|
||||
#[derive(Clone, IntoElement)]
|
||||
enum Title {
|
||||
Str(SharedString),
|
||||
Callback(Rc<dyn Fn(&mut Window, &mut App) -> AnyElement>),
|
||||
}
|
||||
|
||||
impl From<SharedString> for Title {
|
||||
fn from(value: SharedString) -> Self {
|
||||
Title::Str(value)
|
||||
}
|
||||
}
|
||||
|
||||
impl RenderOnce for Title {
|
||||
fn render(self, window: &mut Window, cx: &mut App) -> impl gpui::IntoElement {
|
||||
match self {
|
||||
Title::Str(title) => title.into_any_element(),
|
||||
Title::Callback(element) => element(window, cx),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Tooltip {
|
||||
pub fn simple(title: impl Into<SharedString>, cx: &mut App) -> AnyView {
|
||||
cx.new(|_| Self {
|
||||
title: Title::Str(title.into()),
|
||||
title: title.into(),
|
||||
meta: None,
|
||||
key_binding: None,
|
||||
})
|
||||
@@ -49,7 +26,7 @@ impl Tooltip {
|
||||
let title = title.into();
|
||||
move |_, cx| {
|
||||
cx.new(|_| Self {
|
||||
title: title.clone().into(),
|
||||
title: title.clone(),
|
||||
meta: None,
|
||||
key_binding: None,
|
||||
})
|
||||
@@ -57,15 +34,15 @@ impl Tooltip {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn for_action_title<T: Into<SharedString>>(
|
||||
title: T,
|
||||
pub fn for_action_title<Title: Into<SharedString>>(
|
||||
title: Title,
|
||||
action: &dyn Action,
|
||||
) -> impl Fn(&mut Window, &mut App) -> AnyView + use<T> {
|
||||
) -> impl Fn(&mut Window, &mut App) -> AnyView + use<Title> {
|
||||
let title = title.into();
|
||||
let action = action.boxed_clone();
|
||||
move |window, cx| {
|
||||
cx.new(|cx| Self {
|
||||
title: Title::Str(title.clone()),
|
||||
title: title.clone(),
|
||||
meta: None,
|
||||
key_binding: KeyBinding::for_action(action.as_ref(), window, cx),
|
||||
})
|
||||
@@ -83,7 +60,7 @@ impl Tooltip {
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |window, cx| {
|
||||
cx.new(|cx| Self {
|
||||
title: Title::Str(title.clone()),
|
||||
title: title.clone(),
|
||||
meta: None,
|
||||
key_binding: KeyBinding::for_action_in(action.as_ref(), &focus_handle, window, cx),
|
||||
})
|
||||
@@ -98,7 +75,7 @@ impl Tooltip {
|
||||
cx: &mut App,
|
||||
) -> AnyView {
|
||||
cx.new(|cx| Self {
|
||||
title: Title::Str(title.into()),
|
||||
title: title.into(),
|
||||
meta: None,
|
||||
key_binding: KeyBinding::for_action(action, window, cx),
|
||||
})
|
||||
@@ -113,7 +90,7 @@ impl Tooltip {
|
||||
cx: &mut App,
|
||||
) -> AnyView {
|
||||
cx.new(|cx| Self {
|
||||
title: title.into().into(),
|
||||
title: title.into(),
|
||||
meta: None,
|
||||
key_binding: KeyBinding::for_action_in(action, focus_handle, window, cx),
|
||||
})
|
||||
@@ -128,7 +105,7 @@ impl Tooltip {
|
||||
cx: &mut App,
|
||||
) -> AnyView {
|
||||
cx.new(|cx| Self {
|
||||
title: title.into().into(),
|
||||
title: title.into(),
|
||||
meta: Some(meta.into()),
|
||||
key_binding: action.and_then(|action| KeyBinding::for_action(action, window, cx)),
|
||||
})
|
||||
@@ -144,7 +121,7 @@ impl Tooltip {
|
||||
cx: &mut App,
|
||||
) -> AnyView {
|
||||
cx.new(|cx| Self {
|
||||
title: title.into().into(),
|
||||
title: title.into(),
|
||||
meta: Some(meta.into()),
|
||||
key_binding: action
|
||||
.and_then(|action| KeyBinding::for_action_in(action, focus_handle, window, cx)),
|
||||
@@ -154,35 +131,12 @@ impl Tooltip {
|
||||
|
||||
pub fn new(title: impl Into<SharedString>) -> Self {
|
||||
Self {
|
||||
title: title.into().into(),
|
||||
title: title.into(),
|
||||
meta: None,
|
||||
key_binding: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_element(title: impl Fn(&mut Window, &mut App) -> AnyElement + 'static) -> Self {
|
||||
Self {
|
||||
title: Title::Callback(Rc::new(title)),
|
||||
meta: None,
|
||||
key_binding: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn element(
|
||||
title: impl Fn(&mut Window, &mut App) -> AnyElement + 'static,
|
||||
) -> impl Fn(&mut Window, &mut App) -> AnyView {
|
||||
let title = Title::Callback(Rc::new(title));
|
||||
move |_, cx| {
|
||||
let title = title.clone();
|
||||
cx.new(|_| Self {
|
||||
title: title,
|
||||
meta: None,
|
||||
key_binding: None,
|
||||
})
|
||||
.into()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn meta(mut self, meta: impl Into<SharedString>) -> Self {
|
||||
self.meta = Some(meta.into());
|
||||
self
|
||||
|
||||
@@ -54,7 +54,6 @@ impl sqlez::bindable::Bind for SerializedAxis {
|
||||
}
|
||||
}
|
||||
|
||||
// > https://zed.dev/cla
|
||||
impl sqlez::bindable::Column for SerializedAxis {
|
||||
fn column(
|
||||
statement: &mut sqlez::statement::Statement,
|
||||
|
||||
@@ -6657,10 +6657,6 @@ impl WorkspaceStore {
|
||||
Ok(())
|
||||
})?
|
||||
}
|
||||
|
||||
pub fn workspaces(&self) -> &HashSet<WindowHandle<Workspace>> {
|
||||
&self.workspaces
|
||||
}
|
||||
}
|
||||
|
||||
impl ViewId {
|
||||
|
||||
@@ -50,12 +50,12 @@ fn main() {
|
||||
println!("cargo:rustc-link-arg=/stack:{}", 8 * 1024 * 1024);
|
||||
}
|
||||
|
||||
let release_channel = option_env!("RELEASE_CHANNEL").unwrap_or("dev");
|
||||
let release_channel = option_env!("RELEASE_CHANNEL").unwrap_or("nightly");
|
||||
|
||||
let icon = match release_channel {
|
||||
"stable" => "resources/windows/app-icon.ico",
|
||||
"preview" => "resources/windows/app-icon-preview.ico",
|
||||
"nightly" => "resources/windows/app-icon-nightly.ico",
|
||||
"dev" => "resources/windows/app-icon-dev.ico",
|
||||
_ => "resources/windows/app-icon-dev.ico",
|
||||
};
|
||||
let icon = std::path::Path::new(icon);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user