Compare commits

..

3 Commits

Author SHA1 Message Date
Ben Kunkle
97ff79138a try sending schema to json language server
Co-Authored-By: Cole <cole@zed.dev>
2025-07-10 16:07:15 -05:00
Ben Kunkle
b2805e4559 json lsp ext 2025-07-10 16:07:15 -05:00
Ben Kunkle
b788549556 json language server running in action input 2025-07-10 16:07:15 -05:00
30 changed files with 690 additions and 1281 deletions

View File

@@ -0,0 +1,64 @@
name: "Trusted Signing on Windows"
description: "Install trusted signing on Windows."
# Modified from https://github.com/Azure/trusted-signing-action
runs:
using: "composite"
steps:
- name: Set variables
id: set-variables
shell: "pwsh"
run: |
$defaultPath = $env:PSModulePath -split ';' | Select-Object -First 1
"PSMODULEPATH=$defaultPath" | Out-File -FilePath $env:GITHUB_OUTPUT -Append
"TRUSTED_SIGNING_MODULE_VERSION=0.5.3" | Out-File -FilePath $env:GITHUB_OUTPUT -Append
"BUILD_TOOLS_NUGET_VERSION=10.0.22621.3233" | Out-File -FilePath $env:GITHUB_OUTPUT -Append
"TRUSTED_SIGNING_NUGET_VERSION=1.0.53" | Out-File -FilePath $env:GITHUB_OUTPUT -Append
"DOTNET_SIGNCLI_NUGET_VERSION=0.9.1-beta.24469.1" | Out-File -FilePath $env:GITHUB_OUTPUT -Append
- name: Cache TrustedSigning PowerShell module
id: cache-module
uses: actions/cache@v4
env:
cache-name: cache-module
with:
path: ${{ steps.set-variables.outputs.PSMODULEPATH }}\TrustedSigning\${{ steps.set-variables.outputs.TRUSTED_SIGNING_MODULE_VERSION }}
key: TrustedSigning-${{ steps.set-variables.outputs.TRUSTED_SIGNING_MODULE_VERSION }}
if: ${{ inputs.cache-dependencies == 'true' }}
- name: Cache Microsoft.Windows.SDK.BuildTools NuGet package
id: cache-buildtools
uses: actions/cache@v4
env:
cache-name: cache-buildtools
with:
path: ~\AppData\Local\TrustedSigning\Microsoft.Windows.SDK.BuildTools\Microsoft.Windows.SDK.BuildTools.${{ steps.set-variables.outputs.BUILD_TOOLS_NUGET_VERSION }}
key: Microsoft.Windows.SDK.BuildTools-${{ steps.set-variables.outputs.BUILD_TOOLS_NUGET_VERSION }}
if: ${{ inputs.cache-dependencies == 'true' }}
- name: Cache Microsoft.Trusted.Signing.Client NuGet package
id: cache-tsclient
uses: actions/cache@v4
env:
cache-name: cache-tsclient
with:
path: ~\AppData\Local\TrustedSigning\Microsoft.Trusted.Signing.Client\Microsoft.Trusted.Signing.Client.${{ steps.set-variables.outputs.TRUSTED_SIGNING_NUGET_VERSION }}
key: Microsoft.Trusted.Signing.Client-${{ steps.set-variables.outputs.TRUSTED_SIGNING_NUGET_VERSION }}
if: ${{ inputs.cache-dependencies == 'true' }}
- name: Cache SignCli NuGet package
id: cache-signcli
uses: actions/cache@v4
env:
cache-name: cache-signcli
with:
path: ~\AppData\Local\TrustedSigning\sign\sign.${{ steps.set-variables.outputs.DOTNET_SIGNCLI_NUGET_VERSION }}
key: SignCli-${{ steps.set-variables.outputs.DOTNET_SIGNCLI_NUGET_VERSION }}
if: ${{ inputs.cache-dependencies == 'true' }}
- name: Install Trusted Signing module
shell: "pwsh"
run: |
Install-Module -Name TrustedSigning -RequiredVersion ${{ steps.set-variables.outputs.TRUSTED_SIGNING_MODULE_VERSION }} -Force -Repository PSGallery
if: ${{ inputs.cache-dependencies != 'true' || steps.cache-module.outputs.cache-hit != 'true' }}

View File

@@ -21,9 +21,6 @@ env:
CARGO_TERM_COLOR: always
CARGO_INCREMENTAL: 0
RUST_BACKTRACE: 1
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
jobs:
job_spec:
@@ -496,6 +493,9 @@ jobs:
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
steps:
- name: Install Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
@@ -578,6 +578,10 @@ jobs:
startsWith(github.ref, 'refs/tags/v')
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
needs: [linux_tests]
env:
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
@@ -631,6 +635,10 @@ jobs:
startsWith(github.ref, 'refs/tags/v')
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
needs: [linux_tests]
env:
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
@@ -683,12 +691,16 @@ jobs:
|| contains(github.event.pull_request.labels.*.name, 'run-bundling')
needs: [linux_tests]
name: Build Zed on FreeBSD
# env:
# MYTOKEN : ${{ secrets.MYTOKEN }}
# MYTOKEN2: "value2"
steps:
- uses: actions/checkout@v4
- name: Build FreeBSD remote-server
id: freebsd-build
uses: vmactions/freebsd-vm@c3ae29a132c8ef1924775414107a97cac042aad5 # v1.2.0
with:
# envs: "MYTOKEN MYTOKEN2"
usesh: true
release: 13.5
copyback: true
@@ -755,6 +767,8 @@ jobs:
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
FILE_DIGEST: SHA256
TIMESTAMP_DIGEST: SHA256
TIMESTAMP_SERVER: "http://timestamp.acs.microsoft.com"
@@ -771,6 +785,9 @@ jobs:
# This exports RELEASE_CHANNEL into env (GITHUB_ENV)
script/determine-release-channel.ps1
- name: Install trusted signing
uses: ./.github/actions/install_trusted_signing
- name: Build Zed installer
working-directory: ${{ env.ZED_WORKSPACE }}
run: script/bundle-windows.ps1

View File

@@ -12,9 +12,6 @@ env:
CARGO_TERM_COLOR: always
CARGO_INCREMENTAL: 0
RUST_BACKTRACE: 1
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
jobs:
style:
@@ -94,6 +91,9 @@ jobs:
APPLE_NOTARIZATION_KEY: ${{ secrets.APPLE_NOTARIZATION_KEY }}
APPLE_NOTARIZATION_KEY_ID: ${{ secrets.APPLE_NOTARIZATION_KEY_ID }}
APPLE_NOTARIZATION_ISSUER_ID: ${{ secrets.APPLE_NOTARIZATION_ISSUER_ID }}
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
steps:
- name: Install Node
uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4
@@ -125,6 +125,10 @@ jobs:
runs-on:
- buildjet-16vcpu-ubuntu-2004
needs: tests
env:
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
@@ -160,6 +164,10 @@ jobs:
runs-on:
- buildjet-16vcpu-ubuntu-2204-arm
needs: tests
env:
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
steps:
- name: Checkout repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
@@ -190,6 +198,9 @@ jobs:
if: github.repository_owner == 'zed-industries'
runs-on: github-8vcpu-ubuntu-2404
needs: tests
env:
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
name: Build Zed on FreeBSD
# env:
# MYTOKEN : ${{ secrets.MYTOKEN }}
@@ -246,6 +257,8 @@ jobs:
ACCOUNT_NAME: ${{ vars.AZURE_SIGNING_ACCOUNT_NAME }}
CERT_PROFILE_NAME: ${{ vars.AZURE_SIGNING_CERT_PROFILE_NAME }}
ENDPOINT: ${{ vars.AZURE_SIGNING_ENDPOINT }}
DIGITALOCEAN_SPACES_ACCESS_KEY: ${{ secrets.DIGITALOCEAN_SPACES_ACCESS_KEY }}
DIGITALOCEAN_SPACES_SECRET_KEY: ${{ secrets.DIGITALOCEAN_SPACES_SECRET_KEY }}
FILE_DIGEST: SHA256
TIMESTAMP_DIGEST: SHA256
TIMESTAMP_SERVER: "http://timestamp.acs.microsoft.com"
@@ -263,6 +276,9 @@ jobs:
Write-Host "Publishing version: $version on release channel nightly"
"nightly" | Set-Content -Path "crates/zed/RELEASE_CHANNEL"
- name: Install trusted signing
uses: ./.github/actions/install_trusted_signing
- name: Build Zed installer
working-directory: ${{ env.ZED_WORKSPACE }}
run: script/bundle-windows.ps1

2
Cargo.lock generated
View File

@@ -607,7 +607,6 @@ dependencies = [
"parking_lot",
"smol",
"tempfile",
"unindent",
"util",
"workspace-hack",
]
@@ -14677,6 +14676,7 @@ dependencies = [
"search",
"serde",
"settings",
"tempfile",
"theme",
"tree-sitter-json",
"tree-sitter-rust",

View File

@@ -1112,10 +1112,7 @@
"context": "KeymapEditor",
"use_key_equivalents": true,
"bindings": {
"ctrl-f": "search::FocusSearch",
"alt-find": "keymap_editor::ToggleKeystrokeSearch",
"alt-ctrl-f": "keymap_editor::ToggleKeystrokeSearch",
"alt-c": "keymap_editor::ToggleConflictFilter"
"ctrl-f": "search::FocusSearch"
}
}
]

View File

@@ -1211,8 +1211,7 @@
"context": "KeymapEditor",
"use_key_equivalents": true,
"bindings": {
"cmd-alt-f": "keymap_editor::ToggleKeystrokeSearch",
"cmd-alt-c": "keymap_editor::ToggleConflictFilter"
"cmd-f": "search::FocusSearch"
}
}
]

View File

@@ -19,6 +19,5 @@ net.workspace = true
parking_lot.workspace = true
smol.workspace = true
tempfile.workspace = true
unindent.workspace = true
util.workspace = true
workspace-hack.workspace = true

View File

@@ -40,21 +40,11 @@ impl AskPassDelegate {
self.tx.send((prompt, tx)).await?;
Ok(rx.await?)
}
pub fn new_always_failing() -> Self {
let (tx, _rx) = mpsc::unbounded::<(String, oneshot::Sender<String>)>();
Self {
tx,
_task: Task::ready(()),
}
}
}
pub struct AskPassSession {
#[cfg(not(target_os = "windows"))]
script_path: std::path::PathBuf,
#[cfg(not(target_os = "windows"))]
gpg_script_path: std::path::PathBuf,
#[cfg(target_os = "windows")]
askpass_helper: String,
#[cfg(target_os = "windows")]
@@ -69,9 +59,6 @@ const ASKPASS_SCRIPT_NAME: &str = "askpass.sh";
#[cfg(target_os = "windows")]
const ASKPASS_SCRIPT_NAME: &str = "askpass.ps1";
#[cfg(not(target_os = "windows"))]
const GPG_SCRIPT_NAME: &str = "gpg.sh";
impl AskPassSession {
/// This will create a new AskPassSession.
/// You must retain this session until the master process exits.
@@ -85,8 +72,6 @@ impl AskPassSession {
let temp_dir = tempfile::Builder::new().prefix("zed-askpass").tempdir()?;
let askpass_socket = temp_dir.path().join("askpass.sock");
let askpass_script_path = temp_dir.path().join(ASKPASS_SCRIPT_NAME);
#[cfg(not(target_os = "windows"))]
let gpg_script_path = temp_dir.path().join(GPG_SCRIPT_NAME);
let (askpass_opened_tx, askpass_opened_rx) = oneshot::channel::<()>();
let listener = UnixListener::bind(&askpass_socket).context("creating askpass socket")?;
#[cfg(not(target_os = "windows"))]
@@ -150,20 +135,9 @@ impl AskPassSession {
askpass_script_path.display()
);
#[cfg(not(target_os = "windows"))]
{
let gpg_script = generate_gpg_script();
fs::write(&gpg_script_path, gpg_script)
.await
.with_context(|| format!("creating gpg wrapper script at {gpg_script_path:?}"))?;
make_file_executable(&gpg_script_path).await?;
}
Ok(Self {
#[cfg(not(target_os = "windows"))]
script_path: askpass_script_path,
#[cfg(not(target_os = "windows"))]
gpg_script_path,
#[cfg(target_os = "windows")]
secret,
@@ -186,19 +160,6 @@ impl AskPassSession {
&self.askpass_helper
}
#[cfg(not(target_os = "windows"))]
pub fn gpg_script_path(&self) -> Option<impl AsRef<OsStr>> {
Some(&self.gpg_script_path)
}
#[cfg(target_os = "windows")]
pub fn gpg_script_path(&self) -> Option<impl AsRef<OsStr>> {
// TODO implement wrapping GPG on Windows. This is more difficult than on Unix
// because we can't use --passphrase-fd with a nonstandard FD, and both --passphrase
// and --passphrase-file are insecure.
None::<std::path::PathBuf>
}
// This will run the askpass task forever, resolving as many authentication requests as needed.
// The caller is responsible for examining the result of their own commands and cancelling this
// future when this is no longer needed. Note that this can only be called once, but due to the
@@ -302,23 +263,3 @@ fn generate_askpass_script(zed_path: &std::path::Path, askpass_socket: &std::pat
askpass_socket = askpass_socket.display(),
)
}
#[inline]
#[cfg(not(target_os = "windows"))]
fn generate_gpg_script() -> String {
use unindent::Unindent as _;
r#"
#!/bin/sh
set -eu
unset GIT_CONFIG_PARAMETERS
GPG_PROGRAM=$(git config gpg.program || echo 'gpg')
PROMPT="Enter passphrase to unlock GPG key:"
PASSPHRASE=$(${GIT_ASKPASS} "${PROMPT}")
exec "${GPG_PROGRAM}" --batch --no-tty --yes --passphrase-fd 3 --pinentry-mode loopback "$@" 3<<EOF
${PASSPHRASE}
EOF
"#.unindent()
}

View File

@@ -18,6 +18,7 @@ use serde::{Deserialize, Serialize};
use settings::Settings;
use std::sync::Arc;
use ui::IconName;
use util::markdown::MarkdownInlineCode;
/// If the model requests to read a file whose size exceeds this, then
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
@@ -77,21 +78,11 @@ impl Tool for ReadFileTool {
fn ui_text(&self, input: &serde_json::Value) -> String {
match serde_json::from_value::<ReadFileToolInput>(input.clone()) {
Ok(input) => {
let path = &input.path;
let path = MarkdownInlineCode(&input.path);
match (input.start_line, input.end_line) {
(Some(start), Some(end)) => {
format!(
"[Read file `{}` (lines {}-{})](@selection:{}:({}-{}))",
path, start, end, path, start, end
)
}
(Some(start), None) => {
format!(
"[Read file `{}` (from line {})](@selection:{}:({}-{}))",
path, start, path, start, start
)
}
_ => format!("[Read file `{}`](@file:{})", path, path),
(Some(start), None) => format!("Read file {path} (from line {start})"),
(Some(start), Some(end)) => format!("Read file {path} (lines {start}-{end})"),
_ => format!("Read file {path}"),
}
}
Err(_) => "Read file".to_string(),

View File

@@ -1389,17 +1389,10 @@ impl Room {
let sources = cx.screen_capture_sources();
cx.spawn(async move |this, cx| {
let sources = sources
.await
.map_err(|error| error.into())
.and_then(|sources| sources);
let source =
sources.and_then(|sources| sources.into_iter().next().context("no display found"));
let sources = sources.await??;
let source = sources.first().context("no display found")?;
let publication = match source {
Ok(source) => participant.publish_screenshare_track(&*source, cx).await,
Err(error) => Err(error),
};
let publication = participant.publish_screenshare_track(&**source, cx).await;
this.update(cx, |this, cx| {
let live_kit = this

View File

@@ -375,10 +375,8 @@ impl GitRepository for FakeGitRepository {
_message: gpui::SharedString,
_name_and_email: Option<(gpui::SharedString, gpui::SharedString)>,
_options: CommitOptions,
_ask_pass: AskPassDelegate,
_env: Arc<HashMap<String, String>>,
_cx: AsyncApp,
) -> BoxFuture<'static, Result<()>> {
) -> BoxFuture<'_, Result<()>> {
unimplemented!()
}

View File

@@ -41,9 +41,9 @@ futures.workspace = true
workspace-hack.workspace = true
[dev-dependencies]
gpui = { workspace = true, features = ["test-support"] }
pretty_assertions.workspace = true
serde_json.workspace = true
tempfile.workspace = true
text = { workspace = true, features = ["test-support"] }
unindent.workspace = true
gpui = { workspace = true, features = ["test-support"] }
tempfile.workspace = true

View File

@@ -391,12 +391,8 @@ pub trait GitRepository: Send + Sync {
message: SharedString,
name_and_email: Option<(SharedString, SharedString)>,
options: CommitOptions,
askpass: AskPassDelegate,
env: Arc<HashMap<String, String>>,
// This method takes an AsyncApp to ensure it's invoked on the main thread,
// otherwise git-credentials-manager won't work.
cx: AsyncApp,
) -> BoxFuture<'static, Result<()>>;
) -> BoxFuture<'_, Result<()>>;
fn push(
&self,
@@ -1197,68 +1193,36 @@ impl GitRepository for RealGitRepository {
message: SharedString,
name_and_email: Option<(SharedString, SharedString)>,
options: CommitOptions,
ask_pass: AskPassDelegate,
env: Arc<HashMap<String, String>>,
cx: AsyncApp,
) -> BoxFuture<'static, Result<()>> {
) -> BoxFuture<'_, Result<()>> {
let working_directory = self.working_directory();
let executor = cx.background_executor().clone();
async move {
let working_directory = working_directory?;
let have_user_git_askpass = env.contains_key("GIT_ASKPASS");
let mut command = new_smol_command("git");
command.current_dir(&working_directory).envs(env.iter());
self.executor
.spawn(async move {
let mut cmd = new_smol_command("git");
cmd.current_dir(&working_directory?)
.envs(env.iter())
.args(["commit", "--quiet", "-m"])
.arg(&message.to_string())
.arg("--cleanup=strip");
let ask_pass = if have_user_git_askpass {
None
} else {
Some(AskPassSession::new(&executor, ask_pass).await?)
};
if options.amend {
cmd.arg("--amend");
}
if let Some(program) = ask_pass
.as_ref()
.and_then(|ask_pass| ask_pass.gpg_script_path())
{
command.arg("-c").arg(format!(
"gpg.program={}",
program.as_ref().to_string_lossy()
));
}
if let Some((name, email)) = name_and_email {
cmd.arg("--author").arg(&format!("{name} <{email}>"));
}
command
.args(["commit", "-m"])
.arg(message.to_string())
.arg("--cleanup=strip")
.stdin(smol::process::Stdio::null())
.stdout(smol::process::Stdio::piped())
.stderr(smol::process::Stdio::piped());
let output = cmd.output().await?;
if options.amend {
command.arg("--amend");
}
if let Some((name, email)) = name_and_email {
command.arg("--author").arg(&format!("{name} <{email}>"));
}
if let Some(ask_pass) = ask_pass {
command.env("GIT_ASKPASS", ask_pass.script_path());
let git_process = command.spawn()?;
run_askpass_command(ask_pass, git_process).await?;
Ok(())
} else {
let git_process = command.spawn()?;
let output = git_process.output().await?;
anyhow::ensure!(
output.status.success(),
"{}",
"Failed to commit:\n{}",
String::from_utf8_lossy(&output.stderr)
);
Ok(())
}
}
.boxed()
})
.boxed()
}
fn push(
@@ -2082,16 +2046,12 @@ mod tests {
)
.await
.unwrap();
cx.spawn(|cx| {
repo.commit(
"Initial commit".into(),
None,
CommitOptions::default(),
AskPassDelegate::new_always_failing(),
Arc::new(checkpoint_author_envs()),
cx,
)
})
repo.commit(
"Initial commit".into(),
None,
CommitOptions::default(),
Arc::new(checkpoint_author_envs()),
)
.await
.unwrap();
@@ -2115,16 +2075,12 @@ mod tests {
)
.await
.unwrap();
cx.spawn(|cx| {
repo.commit(
"Commit after checkpoint".into(),
None,
CommitOptions::default(),
AskPassDelegate::new_always_failing(),
Arc::new(checkpoint_author_envs()),
cx,
)
})
repo.commit(
"Commit after checkpoint".into(),
None,
CommitOptions::default(),
Arc::new(checkpoint_author_envs()),
)
.await
.unwrap();
@@ -2257,16 +2213,12 @@ mod tests {
)
.await
.unwrap();
cx.spawn(|cx| {
repo.commit(
"Initial commit".into(),
None,
CommitOptions::default(),
AskPassDelegate::new_always_failing(),
Arc::new(checkpoint_author_envs()),
cx,
)
})
repo.commit(
"Initial commit".into(),
None,
CommitOptions::default(),
Arc::new(checkpoint_author_envs()),
)
.await
.unwrap();

View File

@@ -1574,15 +1574,10 @@ impl GitPanel {
let task = if self.has_staged_changes() {
// Repository serializes all git operations, so we can just send a commit immediately
cx.spawn_in(window, async move |this, cx| {
let askpass_delegate = this.update_in(cx, |this, window, cx| {
this.askpass_delegate("git commit", window, cx)
})?;
let commit_task = active_repository.update(cx, |repo, cx| {
repo.commit(message.into(), None, options, askpass_delegate, cx)
})?;
commit_task.await?
})
let commit_task = active_repository.update(cx, |repo, cx| {
repo.commit(message.into(), None, options, cx)
});
cx.background_spawn(async move { commit_task.await? })
} else {
let changed_files = self
.entries
@@ -1599,13 +1594,10 @@ impl GitPanel {
let stage_task =
active_repository.update(cx, |repo, cx| repo.stage_entries(changed_files, cx));
cx.spawn_in(window, async move |this, cx| {
cx.spawn(async move |_, cx| {
stage_task.await?;
let askpass_delegate = this.update_in(cx, |this, window, cx| {
this.askpass_delegate("git commit".to_string(), window, cx)
})?;
let commit_task = active_repository.update(cx, |repo, cx| {
repo.commit(message.into(), None, options, askpass_delegate, cx)
repo.commit(message.into(), None, options, cx)
})?;
commit_task.await?
})

View File

@@ -77,8 +77,6 @@ pub(crate) const XINPUT_ALL_DEVICES: xinput::DeviceId = 0;
/// terminology is both archaic and unclear.
pub(crate) const XINPUT_ALL_DEVICE_GROUPS: xinput::DeviceId = 1;
const GPUI_X11_SCALE_FACTOR_ENV: &str = "GPUI_X11_SCALE_FACTOR";
pub(crate) struct WindowRef {
window: X11WindowStatePtr,
refresh_state: Option<RefreshState>,
@@ -426,7 +424,12 @@ impl X11Client {
let resource_database = x11rb::resource_manager::new_from_default(&xcb_connection)
.context("Failed to create resource database")?;
let scale_factor = get_scale_factor(&xcb_connection, &resource_database, x_root_index);
let scale_factor = resource_database
.get_value("Xft.dpi", "Xft.dpi")
.ok()
.flatten()
.map(|dpi: f32| dpi / 96.0)
.unwrap_or(1.0);
let cursor_handle = cursor::Handle::new(&xcb_connection, x_root_index, &resource_database)
.context("Failed to initialize cursor theme handler")?
.reply()
@@ -2269,253 +2272,3 @@ fn create_invisible_cursor(
xcb_flush(connection);
Ok(cursor)
}
enum DpiMode {
Randr,
Scale(f32),
NotSet,
}
fn get_scale_factor(
connection: &XCBConnection,
resource_database: &Database,
screen_index: usize,
) -> f32 {
let env_dpi = std::env::var(GPUI_X11_SCALE_FACTOR_ENV)
.ok()
.map(|var| {
if var.to_lowercase() == "randr" {
DpiMode::Randr
} else if let Ok(scale) = var.parse::<f32>() {
if valid_scale_factor(scale) {
DpiMode::Scale(scale)
} else {
panic!(
"`{}` must be a positive normal number or `randr`. Got `{}`",
GPUI_X11_SCALE_FACTOR_ENV, var
);
}
} else if var.is_empty() {
DpiMode::NotSet
} else {
panic!(
"`{}` must be a positive number or `randr`. Got `{}`",
GPUI_X11_SCALE_FACTOR_ENV, var
);
}
})
.unwrap_or(DpiMode::NotSet);
match env_dpi {
DpiMode::Scale(scale) => {
log::info!(
"Using scale factor from {}: {}",
GPUI_X11_SCALE_FACTOR_ENV,
scale
);
return scale;
}
DpiMode::Randr => {
if let Some(scale) = get_randr_scale_factor(connection, screen_index) {
log::info!(
"Using RandR scale factor from {}=randr: {}",
GPUI_X11_SCALE_FACTOR_ENV,
scale
);
return scale;
}
log::warn!("Failed to calculate RandR scale factor, falling back to default");
return 1.0;
}
DpiMode::NotSet => {}
}
// TODO: Use scale factor from XSettings here
if let Some(dpi) = resource_database
.get_value::<f32>("Xft.dpi", "Xft.dpi")
.ok()
.flatten()
{
let scale = dpi / 96.0; // base dpi
log::info!("Using scale factor from Xft.dpi: {}", scale);
return scale;
}
if let Some(scale) = get_randr_scale_factor(connection, screen_index) {
log::info!("Using RandR scale factor: {}", scale);
return scale;
}
log::info!("Using default scale factor: 1.0");
1.0
}
fn get_randr_scale_factor(connection: &XCBConnection, screen_index: usize) -> Option<f32> {
let root = connection.setup().roots.get(screen_index)?.root;
let version_cookie = connection.randr_query_version(1, 6).ok()?;
let version_reply = version_cookie.reply().ok()?;
if version_reply.major_version < 1
|| (version_reply.major_version == 1 && version_reply.minor_version < 5)
{
return legacy_get_randr_scale_factor(connection, root); // for randr <1.5
}
let monitors_cookie = connection.randr_get_monitors(root, true).ok()?; // true for active only
let monitors_reply = monitors_cookie.reply().ok()?;
let mut fallback_scale: Option<f32> = None;
for monitor in monitors_reply.monitors {
if monitor.width_in_millimeters == 0 || monitor.height_in_millimeters == 0 {
continue;
}
let scale_factor = get_dpi_factor(
(monitor.width as u32, monitor.height as u32),
(
monitor.width_in_millimeters as u64,
monitor.height_in_millimeters as u64,
),
);
if monitor.primary {
return Some(scale_factor);
} else if fallback_scale.is_none() {
fallback_scale = Some(scale_factor);
}
}
fallback_scale
}
fn legacy_get_randr_scale_factor(connection: &XCBConnection, root: u32) -> Option<f32> {
let primary_cookie = connection.randr_get_output_primary(root).ok()?;
let primary_reply = primary_cookie.reply().ok()?;
let primary_output = primary_reply.output;
let primary_output_cookie = connection
.randr_get_output_info(primary_output, x11rb::CURRENT_TIME)
.ok()?;
let primary_output_info = primary_output_cookie.reply().ok()?;
// try primary
if primary_output_info.connection == randr::Connection::CONNECTED
&& primary_output_info.mm_width > 0
&& primary_output_info.mm_height > 0
&& primary_output_info.crtc != 0
{
let crtc_cookie = connection
.randr_get_crtc_info(primary_output_info.crtc, x11rb::CURRENT_TIME)
.ok()?;
let crtc_info = crtc_cookie.reply().ok()?;
if crtc_info.width > 0 && crtc_info.height > 0 {
let scale_factor = get_dpi_factor(
(crtc_info.width as u32, crtc_info.height as u32),
(
primary_output_info.mm_width as u64,
primary_output_info.mm_height as u64,
),
);
return Some(scale_factor);
}
}
// fallback: full scan
let resources_cookie = connection.randr_get_screen_resources_current(root).ok()?;
let screen_resources = resources_cookie.reply().ok()?;
let mut crtc_cookies = Vec::with_capacity(screen_resources.crtcs.len());
for &crtc in &screen_resources.crtcs {
if let Ok(cookie) = connection.randr_get_crtc_info(crtc, x11rb::CURRENT_TIME) {
crtc_cookies.push((crtc, cookie));
}
}
let mut crtc_infos: HashMap<randr::Crtc, randr::GetCrtcInfoReply> = HashMap::default();
let mut valid_outputs: HashSet<randr::Output> = HashSet::new();
for (crtc, cookie) in crtc_cookies {
if let Ok(reply) = cookie.reply() {
if reply.width > 0 && reply.height > 0 && !reply.outputs.is_empty() {
crtc_infos.insert(crtc, reply.clone());
valid_outputs.extend(&reply.outputs);
}
}
}
if valid_outputs.is_empty() {
return None;
}
let mut output_cookies = Vec::with_capacity(valid_outputs.len());
for &output in &valid_outputs {
if let Ok(cookie) = connection.randr_get_output_info(output, x11rb::CURRENT_TIME) {
output_cookies.push((output, cookie));
}
}
let mut output_infos: HashMap<randr::Output, randr::GetOutputInfoReply> = HashMap::default();
for (output, cookie) in output_cookies {
if let Ok(reply) = cookie.reply() {
output_infos.insert(output, reply);
}
}
let mut fallback_scale: Option<f32> = None;
for crtc_info in crtc_infos.values() {
for &output in &crtc_info.outputs {
if let Some(output_info) = output_infos.get(&output) {
if output_info.connection != randr::Connection::CONNECTED {
continue;
}
if output_info.mm_width == 0 || output_info.mm_height == 0 {
continue;
}
let scale_factor = get_dpi_factor(
(crtc_info.width as u32, crtc_info.height as u32),
(output_info.mm_width as u64, output_info.mm_height as u64),
);
if output != primary_output && fallback_scale.is_none() {
fallback_scale = Some(scale_factor);
}
}
}
}
fallback_scale
}
fn get_dpi_factor((width_px, height_px): (u32, u32), (width_mm, height_mm): (u64, u64)) -> f32 {
let ppmm = ((width_px as f64 * height_px as f64) / (width_mm as f64 * height_mm as f64)).sqrt(); // pixels per mm
const MM_PER_INCH: f64 = 25.4;
const BASE_DPI: f64 = 96.0;
const QUANTIZE_STEP: f64 = 12.0; // e.g. 1.25 = 15/12, 1.5 = 18/12, 1.75 = 21/12, 2.0 = 24/12
const MIN_SCALE: f64 = 1.0;
const MAX_SCALE: f64 = 20.0;
let dpi_factor =
((ppmm * (QUANTIZE_STEP * MM_PER_INCH / BASE_DPI)).round() / QUANTIZE_STEP).max(MIN_SCALE);
let validated_factor = if dpi_factor <= MAX_SCALE {
dpi_factor
} else {
MIN_SCALE
};
if valid_scale_factor(validated_factor as f32) {
validated_factor as f32
} else {
log::warn!(
"Calculated DPI factor {} is invalid, using 1.0",
validated_factor
);
1.0
}
}
#[inline]
fn valid_scale_factor(scale_factor: f32) -> bool {
scale_factor.is_sign_positive() && scale_factor.is_normal()
}

View File

@@ -130,13 +130,11 @@ pub(crate) fn generate_key_char(
let mut buffer = [0; 8];
let len = unsafe { ToUnicode(vkey.0 as u32, scan_code, Some(&state), &mut buffer, 1 << 2) };
match len {
len if len > 0 => String::from_utf16(&buffer[..len as usize])
.ok()
.filter(|candidate| {
!candidate.is_empty() && !candidate.chars().next().unwrap().is_control()
}),
len if len < 0 => String::from_utf16(&buffer[..(-len as usize)]).ok(),
_ => None,
if len > 0 {
let candidate = String::from_utf16_lossy(&buffer[..len as usize]);
if !candidate.is_empty() && !candidate.chars().next().unwrap().is_control() {
return Some(candidate);
}
}
None
}

View File

@@ -110,7 +110,6 @@ impl LanguageServerHealthStatus {
impl LanguageServerState {
fn fill_menu(&self, mut menu: ContextMenu, cx: &mut Context<Self>) -> ContextMenu {
menu = menu.align_popover_bottom();
let lsp_logs = cx
.try_global::<GlobalLogStore>()
.and_then(|lsp_logs| lsp_logs.0.upgrade());
@@ -119,7 +118,6 @@ impl LanguageServerState {
return menu;
};
let mut first_button_encountered = false;
for (i, item) in self.items.iter().enumerate() {
if let LspItem::ToggleServersButton { restart } = item {
let label = if *restart {
@@ -184,11 +182,7 @@ impl LanguageServerState {
.ok();
}
});
if !first_button_encountered {
menu = menu.separator();
first_button_encountered = true;
}
menu = menu.item(button);
menu = menu.separator().item(button);
continue;
};
@@ -711,7 +705,6 @@ impl LspTool {
new_lsp_items.extend(other_servers.into_iter().map(ServerData::into_lsp_item));
if !new_lsp_items.is_empty() {
if can_stop_all {
new_lsp_items.push(LspItem::ToggleServersButton { restart: true });
new_lsp_items.push(LspItem::ToggleServersButton { restart: false });
} else if can_restart_all {
new_lsp_items.push(LspItem::ToggleServersButton { restart: true });

View File

@@ -216,7 +216,7 @@ impl JsonLspAdapter {
paths::local_debug_file_relative_path()
],
"schema": debug_schema,
},
}
]);
#[cfg(debug_assertions)]

View File

@@ -1242,6 +1242,18 @@ impl LanguageServer {
params,
})
.unwrap();
eprintln!("{}", {
let value = serde_json::from_str::<serde_json::Value>(&message).unwrap();
if !value
.get("method")
.and_then(|method| method.as_str())
.map_or(false, |method| method.starts_with("json"))
{
"other".to_string()
} else {
serde_json::to_string_pretty(&value).unwrap()
}
});
outbound_tx.try_send(message)?;
Ok(())
}

View File

@@ -1726,18 +1726,6 @@ impl GitStore {
let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
let askpass = if let Some(askpass_id) = envelope.payload.askpass_id {
make_remote_delegate(
this,
envelope.payload.project_id,
repository_id,
askpass_id,
&mut cx,
)
} else {
AskPassDelegate::new_always_failing()
};
let message = SharedString::from(envelope.payload.message);
let name = envelope.payload.name.map(SharedString::from);
let email = envelope.payload.email.map(SharedString::from);
@@ -1751,7 +1739,6 @@ impl GitStore {
CommitOptions {
amend: options.amend,
},
askpass,
cx,
)
})?
@@ -3475,14 +3462,11 @@ impl Repository {
message: SharedString,
name_and_email: Option<(SharedString, SharedString)>,
options: CommitOptions,
askpass: AskPassDelegate,
_cx: &mut App,
) -> oneshot::Receiver<Result<()>> {
let id = self.id;
let askpass_delegates = self.askpass_delegates.clone();
let askpass_id = util::post_inc(&mut self.latest_askpass_id);
self.send_job(Some("git commit".into()), move |git_repo, cx| async move {
self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
match git_repo {
RepositoryState::Local {
backend,
@@ -3490,16 +3474,10 @@ impl Repository {
..
} => {
backend
.commit(message, name_and_email, options, askpass, environment, cx)
.commit(message, name_and_email, options, environment)
.await
}
RepositoryState::Remote { project_id, client } => {
askpass_delegates.lock().insert(askpass_id, askpass);
let _defer = util::defer(|| {
let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
debug_assert!(askpass_delegate.is_some());
});
let (name, email) = name_and_email.unzip();
client
.request(proto::Commit {
@@ -3511,9 +3489,9 @@ impl Repository {
options: Some(proto::commit::CommitOptions {
amend: options.amend,
}),
askpass_id: Some(askpass_id),
})
.await?;
.await
.context("sending commit request")?;
Ok(())
}

View File

@@ -1,4 +1,5 @@
pub mod clangd_ext;
pub mod json_language_server_ext;
pub mod lsp_ext_command;
pub mod rust_analyzer_ext;

View File

@@ -0,0 +1,147 @@
use ::serde::{Deserialize, Serialize};
use gpui::{App, Entity, WeakEntity};
use language::Buffer;
use language::{File as _, LocalFile as _};
use lsp::{DidCloseTextDocumentParams, DidOpenTextDocumentParams, LanguageServer};
use util::ResultExt as _;
use crate::{LspStore, Project};
// https://github.com/microsoft/vscode/blob/main/extensions/json-language-features/server/README.md#schema-associations-notification
struct SchemaAssociationsNotification {}
/// interface ISchemaAssociation {
/// /**
/// * The URI of the schema, which is also the identifier of the schema.
/// */
/// uri: string;
///
/// /**
/// * A list of file path patterns that are associated to the schema. The '*' wildcard can be used. Exclusion patterns starting with '!'.
/// * For example '*.schema.json', 'package.json', '!foo*.schema.json'.
/// * A match succeeds when there is at least one pattern matching and last matching pattern does not start with '!'.
/// */
/// fileMatch: string[];
/// /**
/// * If provided, the association is only used if the validated document is located in the given folder (directly or in a subfolder)
/// */
/// folderUri?: string;
/// /*
/// * The schema for the given URI.
/// * If no schema is provided, the schema will be fetched with the schema request service (if available).
/// */
/// schema?: JSONSchema;
/// }
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "camelCase")]
pub struct SchemaAssociation {
pub uri: String,
pub file_match: Vec<String>,
pub folder_uri: Option<String>,
pub schema: Option<serde_json::Value>,
}
impl lsp::notification::Notification for SchemaAssociationsNotification {
type Params = Vec<SchemaAssociation>;
const METHOD: &'static str = "json/schemaAssociations";
}
pub fn send_schema_associations_notification(
project: Entity<Project>,
buffer: Entity<Buffer>,
schema_associations: &Vec<SchemaAssociation>,
cx: &mut App,
) {
let lsp_store = project.read(cx).lsp_store();
lsp_store.update(cx, |lsp_store, cx| {
let Some(local) = lsp_store.as_local_mut() else {
return;
};
buffer.update(cx, |buffer, cx| {
for (adapter, server) in local
.language_servers_for_buffer(buffer, cx)
.map(|(a, b)| (a.clone(), b.clone()))
.collect::<Vec<_>>()
{
if dbg!(!adapter.adapter.is_primary_zed_json_schema_adapter()) {
continue;
}
server
.notify::<SchemaAssociationsNotification>(schema_associations)
.log_err(); // todo! don't ignore error
let file = match worktree::File::from_dyn(buffer.file()) {
Some(file) => file,
None => continue,
};
let language = match buffer.language() {
Some(language) => language,
None => continue,
};
let uri = lsp::Url::from_file_path(file.abs_path(cx)).unwrap();
let versions = local
.buffer_snapshots
.entry(buffer.remote_id())
.or_default()
.entry(server.server_id())
// .and_modify(|_| {
// assert!(
// false,
// "There should not be an existing snapshot for a newly inserted buffer"
// )
// })
.or_insert_with(|| {
vec![crate::lsp_store::LspBufferSnapshot {
version: 0,
snapshot: buffer.text_snapshot(),
}]
});
let snapshot = versions.last().unwrap();
let version = snapshot.version;
let initial_snapshot = &snapshot.snapshot;
// if file.worktree.read(cx).id() != key.0
// || !self
// .languages
// .lsp_adapters(&language.name())
// .iter()
// .any(|a| a.name == key.1)
// {
// continue;
// }
// didOpen
let file = match file.as_local() {
Some(file) => file,
None => continue,
};
let Some(_) = server
.notify::<lsp::notification::DidCloseTextDocument>(
&DidCloseTextDocumentParams {
text_document: lsp::TextDocumentIdentifier { uri: uri.clone() },
},
)
.log_err()
else {
continue;
};
let initial_text = buffer.text();
server
.notify::<lsp::notification::DidOpenTextDocument>(&DidOpenTextDocumentParams {
text_document: lsp::TextDocumentItem::new(
uri,
adapter.language_id(&language.name()),
version,
initial_text,
),
})
.log_err();
}
})
})
}

View File

@@ -294,7 +294,6 @@ message Commit {
optional string email = 5;
string message = 6;
optional CommitOptions options = 7;
optional uint64 askpass_id = 8;
message CommitOptions {
bool amend = 1;

View File

@@ -623,55 +623,49 @@ impl KeymapFile {
// We don't want to modify the file if it's invalid.
let keymap = Self::parse(&keymap_contents).context("Failed to parse keymap")?;
if let KeybindUpdateOperation::Remove {
target,
target_keybind_source,
} = operation
{
if target_keybind_source != KeybindSource::User {
anyhow::bail!("Cannot remove non-user created keybinding. Not implemented yet");
}
let target_action_value = target
.action_value()
.context("Failed to generate target action JSON value")?;
let Some((index, keystrokes_str)) =
find_binding(&keymap, &target, &target_action_value)
else {
anyhow::bail!("Failed to find keybinding to remove");
};
let is_only_binding = keymap.0[index]
.bindings
.as_ref()
.map_or(true, |bindings| bindings.len() == 1);
let key_path: &[&str] = if is_only_binding {
&[]
} else {
&["bindings", keystrokes_str]
};
let (replace_range, replace_value) = replace_top_level_array_value_in_json_text(
&keymap_contents,
key_path,
None,
None,
index,
tab_size,
)
.context("Failed to remove keybinding")?;
keymap_contents.replace_range(replace_range, &replace_value);
return Ok(keymap_contents);
}
if let KeybindUpdateOperation::Replace { source, target, .. } = operation {
let mut found_index = None;
let target_action_value = target
.action_value()
.context("Failed to generate target action JSON value")?;
let source_action_value = source
.action_value()
.context("Failed to generate source action JSON value")?;
'sections: for (index, section) in keymap.sections().enumerate() {
if section.context != target.context.unwrap_or("") {
continue;
}
if section.use_key_equivalents != target.use_key_equivalents {
continue;
}
let Some(bindings) = &section.bindings else {
continue;
};
for (keystrokes, action) in bindings {
let Ok(keystrokes) = keystrokes
.split_whitespace()
.map(Keystroke::parse)
.collect::<Result<Vec<_>, _>>()
else {
continue;
};
if keystrokes.len() != target.keystrokes.len()
|| !keystrokes
.iter()
.zip(target.keystrokes)
.all(|(a, b)| a.should_match(b))
{
continue;
}
if action.0 != target_action_value {
continue;
}
found_index = Some(index);
break 'sections;
}
}
if let Some((index, keystrokes_str)) =
find_binding(&keymap, &target, &target_action_value)
{
if let Some(index) = found_index {
if target.context == source.context {
// if we are only changing the keybinding (common case)
// not the context, etc. Then just update the binding in place
@@ -679,7 +673,7 @@ impl KeymapFile {
let (replace_range, replace_value) =
replace_top_level_array_value_in_json_text(
&keymap_contents,
&["bindings", keystrokes_str],
&["bindings", &target.keystrokes_unparsed()],
Some(&source_action_value),
Some(&source.keystrokes_unparsed()),
index,
@@ -701,7 +695,7 @@ impl KeymapFile {
let (replace_range, replace_value) =
replace_top_level_array_value_in_json_text(
&keymap_contents,
&["bindings", keystrokes_str],
&["bindings", &target.keystrokes_unparsed()],
Some(&source_action_value),
Some(&source.keystrokes_unparsed()),
index,
@@ -731,7 +725,7 @@ impl KeymapFile {
let (replace_range, replace_value) =
replace_top_level_array_value_in_json_text(
&keymap_contents,
&["bindings", keystrokes_str],
&["bindings", &target.keystrokes_unparsed()],
None,
None,
index,
@@ -777,46 +771,6 @@ impl KeymapFile {
keymap_contents.replace_range(replace_range, &replace_value);
}
return Ok(keymap_contents);
fn find_binding<'a, 'b>(
keymap: &'b KeymapFile,
target: &KeybindUpdateTarget<'a>,
target_action_value: &Value,
) -> Option<(usize, &'b str)> {
for (index, section) in keymap.sections().enumerate() {
if section.context != target.context.unwrap_or("") {
continue;
}
if section.use_key_equivalents != target.use_key_equivalents {
continue;
}
let Some(bindings) = &section.bindings else {
continue;
};
for (keystrokes_str, action) in bindings {
let Ok(keystrokes) = keystrokes_str
.split_whitespace()
.map(Keystroke::parse)
.collect::<Result<Vec<_>, _>>()
else {
continue;
};
if keystrokes.len() != target.keystrokes.len()
|| !keystrokes
.iter()
.zip(target.keystrokes)
.all(|(a, b)| a.should_match(b))
{
continue;
}
if &action.0 != target_action_value {
continue;
}
return Some((index, &keystrokes_str));
}
}
None
}
}
}
@@ -829,10 +783,6 @@ pub enum KeybindUpdateOperation<'a> {
target_keybind_source: KeybindSource,
},
Add(KeybindUpdateTarget<'a>),
Remove {
target: KeybindUpdateTarget<'a>,
target_keybind_source: KeybindSource,
},
}
pub struct KeybindUpdateTarget<'a> {
@@ -1350,118 +1300,5 @@ mod tests {
]"#
.unindent(),
);
check_keymap_update(
r#"[
{
"context": "SomeContext",
"bindings": {
"a": "foo::bar",
"c": "foo::baz",
}
},
]"#
.unindent(),
KeybindUpdateOperation::Remove {
target: KeybindUpdateTarget {
context: Some("SomeContext"),
keystrokes: &parse_keystrokes("a"),
action_name: "foo::bar",
use_key_equivalents: false,
input: None,
},
target_keybind_source: KeybindSource::User,
},
r#"[
{
"context": "SomeContext",
"bindings": {
"c": "foo::baz",
}
},
]"#
.unindent(),
);
check_keymap_update(
r#"[
{
"context": "SomeContext",
"bindings": {
"a": ["foo::bar", true],
"c": "foo::baz",
}
},
]"#
.unindent(),
KeybindUpdateOperation::Remove {
target: KeybindUpdateTarget {
context: Some("SomeContext"),
keystrokes: &parse_keystrokes("a"),
action_name: "foo::bar",
use_key_equivalents: false,
input: Some("true"),
},
target_keybind_source: KeybindSource::User,
},
r#"[
{
"context": "SomeContext",
"bindings": {
"c": "foo::baz",
}
},
]"#
.unindent(),
);
check_keymap_update(
r#"[
{
"context": "SomeContext",
"bindings": {
"b": "foo::baz",
}
},
{
"context": "SomeContext",
"bindings": {
"a": ["foo::bar", true],
}
},
{
"context": "SomeContext",
"bindings": {
"c": "foo::baz",
}
},
]"#
.unindent(),
KeybindUpdateOperation::Remove {
target: KeybindUpdateTarget {
context: Some("SomeContext"),
keystrokes: &parse_keystrokes("a"),
action_name: "foo::bar",
use_key_equivalents: false,
input: Some("true"),
},
target_keybind_source: KeybindSource::User,
},
r#"[
{
"context": "SomeContext",
"bindings": {
"b": "foo::baz",
}
},
{
"context": "SomeContext",
"bindings": {
"c": "foo::baz",
}
},
]"#
.unindent(),
);
}
}

View File

@@ -353,58 +353,29 @@ pub fn replace_top_level_array_value_in_json_text(
let range = cursor.node().range();
let indent_width = range.start_point.column;
let offset = range.start_byte;
let text_range = range.start_byte..range.end_byte;
let value_str = &text[text_range.clone()];
let value_str = &text[range.start_byte..range.end_byte];
let needs_indent = range.start_point.row > 0;
if new_value.is_none() && key_path.is_empty() {
let mut remove_range = text_range.clone();
if index == 0 {
while cursor.goto_next_sibling()
&& (cursor.node().is_extra() || cursor.node().is_missing())
{}
if cursor.node().kind() == "," {
remove_range.end = cursor.node().range().end_byte;
}
if let Some(next_newline) = &text[remove_range.end + 1..].find('\n') {
if text[remove_range.end + 1..remove_range.end + next_newline]
.chars()
.all(|c| c.is_ascii_whitespace())
{
remove_range.end = remove_range.end + next_newline;
}
}
} else {
while cursor.goto_previous_sibling()
&& (cursor.node().is_extra() || cursor.node().is_missing())
{}
if cursor.node().kind() == "," {
remove_range.start = cursor.node().range().start_byte;
}
}
return Ok((remove_range, String::new()));
let (mut replace_range, mut replace_value) =
replace_value_in_json_text(value_str, key_path, tab_size, new_value, replace_key);
replace_range.start += offset;
replace_range.end += offset;
if needs_indent {
let increased_indent = format!("\n{space:width$}", space = ' ', width = indent_width);
replace_value = replace_value.replace('\n', &increased_indent);
// replace_value.push('\n');
} else {
let (mut replace_range, mut replace_value) =
replace_value_in_json_text(value_str, key_path, tab_size, new_value, replace_key);
replace_range.start += offset;
replace_range.end += offset;
if needs_indent {
let increased_indent = format!("\n{space:width$}", space = ' ', width = indent_width);
replace_value = replace_value.replace('\n', &increased_indent);
// replace_value.push('\n');
} else {
while let Some(idx) = replace_value.find("\n ") {
replace_value.remove(idx + 1);
}
while let Some(idx) = replace_value.find("\n") {
replace_value.replace_range(idx..idx + 1, " ");
}
while let Some(idx) = replace_value.find("\n ") {
replace_value.remove(idx + 1);
}
while let Some(idx) = replace_value.find("\n") {
replace_value.replace_range(idx..idx + 1, " ");
}
return Ok((replace_range, replace_value));
}
return Ok((replace_range, replace_value));
}
pub fn append_top_level_array_value_in_json_text(
@@ -1034,14 +1005,14 @@ mod tests {
input: impl ToString,
index: usize,
key_path: &[&str],
value: Option<Value>,
value: Value,
expected: impl ToString,
) {
let input = input.to_string();
let result = replace_top_level_array_value_in_json_text(
&input,
key_path,
value.as_ref(),
Some(&value),
None,
index,
4,
@@ -1052,10 +1023,10 @@ mod tests {
pretty_assertions::assert_eq!(expected.to_string(), result_str);
}
check_array_replace(r#"[1, 3, 3]"#, 1, &[], Some(json!(2)), r#"[1, 2, 3]"#);
check_array_replace(r#"[1, 3, 3]"#, 2, &[], Some(json!(2)), r#"[1, 3, 2]"#);
check_array_replace(r#"[1, 3, 3,]"#, 3, &[], Some(json!(2)), r#"[1, 3, 3, 2]"#);
check_array_replace(r#"[1, 3, 3,]"#, 100, &[], Some(json!(2)), r#"[1, 3, 3, 2]"#);
check_array_replace(r#"[1, 3, 3]"#, 1, &[], json!(2), r#"[1, 2, 3]"#);
check_array_replace(r#"[1, 3, 3]"#, 2, &[], json!(2), r#"[1, 3, 2]"#);
check_array_replace(r#"[1, 3, 3,]"#, 3, &[], json!(2), r#"[1, 3, 3, 2]"#);
check_array_replace(r#"[1, 3, 3,]"#, 100, &[], json!(2), r#"[1, 3, 3, 2]"#);
check_array_replace(
r#"[
1,
@@ -1065,7 +1036,7 @@ mod tests {
.unindent(),
1,
&[],
Some(json!({"foo": "bar", "baz": "qux"})),
json!({"foo": "bar", "baz": "qux"}),
r#"[
1,
{
@@ -1080,7 +1051,7 @@ mod tests {
r#"[1, 3, 3,]"#,
1,
&[],
Some(json!({"foo": "bar", "baz": "qux"})),
json!({"foo": "bar", "baz": "qux"}),
r#"[1, { "foo": "bar", "baz": "qux" }, 3,]"#,
);
@@ -1088,7 +1059,7 @@ mod tests {
r#"[1, { "foo": "bar", "baz": "qux" }, 3,]"#,
1,
&["baz"],
Some(json!({"qux": "quz"})),
json!({"qux": "quz"}),
r#"[1, { "foo": "bar", "baz": { "qux": "quz" } }, 3,]"#,
);
@@ -1103,7 +1074,7 @@ mod tests {
]"#,
1,
&["baz"],
Some(json!({"qux": "quz"})),
json!({"qux": "quz"}),
r#"[
1,
{
@@ -1129,7 +1100,7 @@ mod tests {
]"#,
1,
&["baz"],
Some(json!("qux")),
json!("qux"),
r#"[
1,
{
@@ -1156,7 +1127,7 @@ mod tests {
]"#,
1,
&["baz"],
Some(json!("qux")),
json!("qux"),
r#"[
1,
{
@@ -1180,7 +1151,7 @@ mod tests {
]"#,
2,
&[],
Some(json!("replaced")),
json!("replaced"),
r#"[
1,
// This is element 2
@@ -1198,7 +1169,7 @@ mod tests {
.unindent(),
0,
&[],
Some(json!("first")),
json!("first"),
r#"[
// Empty array with comment
"first"
@@ -1209,7 +1180,7 @@ mod tests {
r#"[]"#.unindent(),
0,
&[],
Some(json!("first")),
json!("first"),
r#"[
"first"
]"#
@@ -1226,7 +1197,7 @@ mod tests {
]"#,
0,
&[],
Some(json!({"new": "object"})),
json!({"new": "object"}),
r#"[
// Leading comment
// Another leading comment
@@ -1246,7 +1217,7 @@ mod tests {
]"#,
1,
&[],
Some(json!("deep")),
json!("deep"),
r#"[
1,
"deep",
@@ -1259,7 +1230,7 @@ mod tests {
r#"[1,2, 3, 4]"#,
2,
&[],
Some(json!("spaced")),
json!("spaced"),
r#"[1,2, "spaced", 4]"#,
);
@@ -1272,7 +1243,7 @@ mod tests {
]"#,
1,
&[],
Some(json!(["a", "b", "c", "d"])),
json!(["a", "b", "c", "d"]),
r#"[
[1, 2, 3],
[
@@ -1297,7 +1268,7 @@ mod tests {
]"#,
0,
&[],
Some(json!("updated")),
json!("updated"),
r#"[
/*
* This is a
@@ -1313,7 +1284,7 @@ mod tests {
r#"[true, false, true]"#,
1,
&[],
Some(json!(null)),
json!(null),
r#"[true, null, true]"#,
);
@@ -1322,7 +1293,7 @@ mod tests {
r#"[42]"#,
0,
&[],
Some(json!({"answer": 42})),
json!({"answer": 42}),
r#"[{ "answer": 42 }]"#,
);
@@ -1336,7 +1307,7 @@ mod tests {
.unindent(),
10,
&[],
Some(json!(123)),
json!(123),
r#"[
// Comment 1
// Comment 2
@@ -1345,54 +1316,6 @@ mod tests {
]"#
.unindent(),
);
check_array_replace(
r#"[
{
"key": "value"
},
{
"key": "value2"
}
]"#
.unindent(),
0,
&[],
None,
r#"[
{
"key": "value2"
}
]"#
.unindent(),
);
check_array_replace(
r#"[
{
"key": "value"
},
{
"key": "value2"
},
{
"key": "value3"
},
]"#
.unindent(),
1,
&[],
None,
r#"[
{
"key": "value"
},
{
"key": "value3"
},
]"#
.unindent(),
);
}
#[test]

View File

@@ -32,6 +32,7 @@ schemars.workspace = true
search.workspace = true
serde.workspace = true
settings.workspace = true
tempfile.workspace = true
theme.workspace = true
tree-sitter-json.workspace = true
tree-sitter-rust.workspace = true

View File

@@ -1,5 +1,6 @@
use std::{
ops::{Not, Range},
path::PathBuf,
sync::Arc,
};
@@ -10,11 +11,13 @@ use feature_flags::FeatureFlagViewExt;
use fs::Fs;
use fuzzy::{StringMatch, StringMatchCandidate};
use gpui::{
Action, AppContext as _, AsyncApp, ClickEvent, Context, DismissEvent, Entity, EventEmitter,
AppContext as _, AsyncApp, ClickEvent, Context, DismissEvent, Entity, EventEmitter,
FocusHandle, Focusable, Global, KeyContext, Keystroke, ModifiersChangedEvent, ScrollStrategy,
StyledText, Subscription, WeakEntity, actions, div,
};
use language::{Language, LanguageConfig, ToOffset as _};
use project::Project;
use schemars::JsonSchema as _;
use settings::{BaseKeymap, KeybindSource, KeymapFile, SettingsAssets};
use util::ResultExt;
@@ -23,10 +26,7 @@ use ui::{
ActiveTheme as _, App, Banner, BorrowAppContext, ContextMenu, ParentElement as _, Render,
SharedString, Styled as _, Tooltip, Window, prelude::*, right_click_menu,
};
use workspace::{
Item, ModalView, SerializableItem, Workspace, notifications::NotifyTaskExt as _,
register_serializable_item,
};
use workspace::{Item, ModalView, SerializableItem, Workspace, register_serializable_item};
use crate::{
SettingsUiFeatureFlag,
@@ -52,16 +52,10 @@ actions!(
EditBinding,
/// Creates a new key binding for the selected action.
CreateBinding,
/// Deletes the selected key binding.
DeleteBinding,
/// Copies the action name to clipboard.
CopyAction,
/// Copies the context predicate to clipboard.
CopyContext,
/// Toggles Conflict Filtering
ToggleConflictFilter,
/// Toggle Keystroke search
ToggleKeystrokeSearch,
CopyContext
]
);
@@ -71,6 +65,7 @@ pub fn init(cx: &mut App) {
cx.on_action(|_: &OpenKeymapEditor, cx| {
workspace::with_active_or_new_workspace(cx, move |workspace, window, cx| {
// todo! with_local_workspace
let existing = workspace
.active_pane()
.read(cx)
@@ -147,22 +142,6 @@ impl KeymapEventChannel {
}
#[derive(Default, PartialEq)]
enum SearchMode {
#[default]
Normal,
KeyStroke,
}
impl SearchMode {
fn invert(&self) -> Self {
match self {
SearchMode::Normal => SearchMode::KeyStroke,
SearchMode::KeyStroke => SearchMode::Normal,
}
}
}
#[derive(Default, PartialEq, Copy, Clone)]
enum FilterState {
#[default]
All,
@@ -241,13 +220,11 @@ struct KeymapEditor {
keybindings: Vec<ProcessedKeybinding>,
keybinding_conflict_state: ConflictState,
filter_state: FilterState,
search_mode: SearchMode,
// corresponds 1 to 1 with keybindings
string_match_candidates: Arc<Vec<StringMatchCandidate>>,
matches: Vec<StringMatch>,
table_interaction_state: Entity<TableInteractionState>,
filter_editor: Entity<Editor>,
keystroke_editor: Entity<KeystrokeInput>,
selected_index: Option<usize>,
}
@@ -267,12 +244,6 @@ impl KeymapEditor {
cx.observe_global::<KeymapEventChannel>(Self::update_keybindings);
let table_interaction_state = TableInteractionState::new(window, cx);
let keystroke_editor = cx.new(|cx| {
let mut keystroke_editor = KeystrokeInput::new(window, cx);
keystroke_editor.highlight_on_focus = false;
keystroke_editor
});
let filter_editor = cx.new(|cx| {
let mut editor = Editor::single_line(window, cx);
editor.set_placeholder_text("Filter action names…", cx);
@@ -288,28 +259,17 @@ impl KeymapEditor {
})
.detach();
cx.subscribe(&keystroke_editor, |this, _, _, cx| {
if matches!(this.search_mode, SearchMode::Normal) {
return;
}
this.update_matches(cx);
})
.detach();
let mut this = Self {
workspace,
keybindings: vec![],
keybinding_conflict_state: ConflictState::default(),
filter_state: FilterState::default(),
search_mode: SearchMode::default(),
string_match_candidates: Arc::new(vec![]),
matches: vec![],
focus_handle: focus_handle.clone(),
_keymap_subscription,
table_interaction_state,
filter_editor,
keystroke_editor,
selected_index: None,
};
@@ -318,47 +278,30 @@ impl KeymapEditor {
this
}
fn current_action_query(&self, cx: &App) -> String {
fn current_query(&self, cx: &mut Context<Self>) -> String {
self.filter_editor.read(cx).text(cx)
}
fn current_keystroke_query(&self, cx: &App) -> Vec<Keystroke> {
match self.search_mode {
SearchMode::KeyStroke => self
.keystroke_editor
.read(cx)
.keystrokes()
.iter()
.cloned()
.collect(),
SearchMode::Normal => Default::default(),
}
}
fn update_matches(&self, cx: &mut Context<Self>) {
let action_query = self.current_action_query(cx);
let keystroke_query = self.current_keystroke_query(cx);
let query = self.current_query(cx);
cx.spawn(async move |this, cx| {
Self::process_query(this, action_query, keystroke_query, cx).await
})
.detach();
cx.spawn(async move |this, cx| Self::process_query(this, query, cx).await)
.detach();
}
async fn process_query(
this: WeakEntity<Self>,
action_query: String,
keystroke_query: Vec<Keystroke>,
query: String,
cx: &mut AsyncApp,
) -> anyhow::Result<()> {
let action_query = command_palette::normalize_action_query(&action_query);
let query = command_palette::normalize_action_query(&query);
let (string_match_candidates, keybind_count) = this.read_with(cx, |this, _| {
(this.string_match_candidates.clone(), this.keybindings.len())
})?;
let executor = cx.background_executor().clone();
let mut matches = fuzzy::match_strings(
&string_match_candidates,
&action_query,
&query,
true,
true,
keybind_count,
@@ -377,26 +320,7 @@ impl KeymapEditor {
FilterState::All => {}
}
match this.search_mode {
SearchMode::KeyStroke => {
matches.retain(|item| {
this.keybindings[item.candidate_id]
.ui_key_binding
.as_ref()
.is_some_and(|binding| {
keystroke_query.iter().all(|key| {
binding.keystrokes.iter().any(|keystroke| {
keystroke.key == key.key
&& keystroke.modifiers == key.modifiers
})
})
})
});
}
SearchMode::Normal => {}
}
if action_query.is_empty() {
if query.is_empty() {
// apply default sort
// sorts by source precedence, and alphabetically by action name within each source
matches.sort_by_key(|match_item| {
@@ -474,7 +398,9 @@ impl KeymapEditor {
action_name: action_name.into(),
action_input,
action_docs,
action_schema: action_schema.get(action_name).cloned(),
action_schema: action_schema.get(action_name).map(|action_schema| {
root_schema_from_action_schema(action_schema, &mut generator)
}),
context: Some(context),
source,
});
@@ -491,7 +417,9 @@ impl KeymapEditor {
action_name: action_name.into(),
action_input: None,
action_docs: action_documentation.get(action_name).copied(),
action_schema: action_schema.get(action_name).cloned(),
action_schema: action_schema.get(action_name).map(|action_schema| {
root_schema_from_action_schema(action_schema, &mut generator)
}),
context: None,
source: None,
});
@@ -504,10 +432,16 @@ impl KeymapEditor {
fn update_keybindings(&mut self, cx: &mut Context<KeymapEditor>) {
let workspace = self.workspace.clone();
cx.spawn(async move |this, cx| {
let json_language = load_json_language(workspace.clone(), cx).await;
let json_language = load_json_language(
workspace
.read_with(cx, |workspace, _cx| workspace.project().downgrade())
.ok(),
cx,
)
.await;
let rust_language = load_rust_language(workspace.clone(), cx).await;
let (action_query, keystroke_query) = this.update(cx, |this, cx| {
let query = this.update(cx, |this, cx| {
let (key_bindings, string_match_candidates) =
Self::process_bindings(json_language, rust_language, cx);
@@ -530,13 +464,10 @@ impl KeymapEditor {
string: candidate.string.clone(),
})
.collect();
(
this.current_action_query(cx),
this.current_keystroke_query(cx),
)
this.current_query(cx)
})?;
// calls cx.notify
Self::process_query(this, action_query, keystroke_query, cx).await
Self::process_query(this, query, cx).await
})
.detach_and_log_err(cx);
}
@@ -666,17 +597,27 @@ impl KeymapEditor {
return;
};
let keymap_editor = cx.entity();
let Some((fs, project)) = self
.workspace
.read_with(cx, |workspace, _| {
(
workspace.app_state().fs.clone(),
workspace.project().clone(),
)
})
.ok()
else {
return;
};
self.workspace
.update(cx, |workspace, cx| {
let fs = workspace.app_state().fs.clone();
let workspace_weak = cx.weak_entity();
workspace.toggle_modal(window, cx, |window, cx| {
let modal = KeybindingEditorModal::new(
create,
keybind,
keybind_idx,
keymap_editor,
workspace_weak,
project,
fs,
window,
cx,
@@ -696,21 +637,6 @@ impl KeymapEditor {
self.open_edit_keybinding_modal(true, window, cx);
}
fn delete_binding(&mut self, _: &DeleteBinding, window: &mut Window, cx: &mut Context<Self>) {
let Some(to_remove) = self.selected_binding().cloned() else {
return;
};
let Ok(fs) = self
.workspace
.read_with(cx, |workspace, _| workspace.app_state().fs.clone())
else {
return;
};
let tab_size = cx.global::<settings::SettingsStore>().json_tab_size();
cx.spawn(async move |_, _| remove_keybinding(to_remove, &fs, tab_size).await)
.detach_and_notify_err(window, cx);
}
fn copy_context_to_clipboard(
&mut self,
_: &CopyContext,
@@ -742,33 +668,6 @@ impl KeymapEditor {
};
cx.write_to_clipboard(gpui::ClipboardItem::new_string(action.clone()));
}
fn toggle_conflict_filter(
&mut self,
_: &ToggleConflictFilter,
_: &mut Window,
cx: &mut Context<Self>,
) {
self.filter_state = self.filter_state.invert();
self.update_matches(cx);
}
fn toggle_keystroke_search(
&mut self,
_: &ToggleKeystrokeSearch,
window: &mut Window,
cx: &mut Context<Self>,
) {
self.search_mode = self.search_mode.invert();
self.update_matches(cx);
match self.search_mode {
SearchMode::KeyStroke => {
window.focus(&self.keystroke_editor.focus_handle(cx));
}
SearchMode::Normal => {}
}
}
}
#[derive(Clone)]
@@ -865,100 +764,43 @@ impl Render for KeymapEditor {
.on_action(cx.listener(Self::confirm))
.on_action(cx.listener(Self::edit_binding))
.on_action(cx.listener(Self::create_binding))
.on_action(cx.listener(Self::delete_binding))
.on_action(cx.listener(Self::copy_action_to_clipboard))
.on_action(cx.listener(Self::copy_context_to_clipboard))
.on_action(cx.listener(Self::toggle_conflict_filter))
.on_action(cx.listener(Self::toggle_keystroke_search))
.size_full()
.p_2()
.gap_1()
.bg(theme.colors().editor_background)
.child(
h_flex()
.p_2()
.gap_1()
.key_context({
let mut context = KeyContext::new_with_defaults();
context.add("BufferSearchBar");
context
})
.child(
div()
.size_full()
.h_8()
.pl_2()
.pr_1()
.py_1()
.border_1()
.border_color(theme.colors().border)
.rounded_lg()
.child(self.filter_editor.clone()),
)
.child(
// TODO: Ask Mikyala if there's a way to get have items be aligned by horizontally
// without embedding a h_flex in another h_flex
h_flex()
.when(self.keybinding_conflict_state.any_conflicts(), |this| {
this.child(
IconButton::new("KeymapEditorConflictIcon", IconName::Warning)
.tooltip({
let filter_state = self.filter_state;
move |window, cx| {
Tooltip::for_action(
match filter_state {
FilterState::All => "Show conflicts",
FilterState::Conflicts => "Hide conflicts",
},
&ToggleConflictFilter,
window,
cx,
)
}
})
.selected_icon_color(Color::Error)
.toggle_state(matches!(
self.filter_state,
FilterState::Conflicts
))
.on_click(|_, window, cx| {
window.dispatch_action(
ToggleConflictFilter.boxed_clone(),
cx,
);
}),
)
})
.child(
IconButton::new("KeymapEditorToggleFiltersIcon", IconName::Filter)
.tooltip(|window, cx| {
Tooltip::for_action(
"Toggle Keystroke Search",
&ToggleKeystrokeSearch,
window,
cx,
)
})
.toggle_state(matches!(self.search_mode, SearchMode::KeyStroke))
.on_click(|_, window, cx| {
window.dispatch_action(
ToggleKeystrokeSearch.boxed_clone(),
cx,
);
}),
),
),
.h_8()
.pl_2()
.pr_1()
.py_1()
.border_1()
.border_color(theme.colors().border)
.rounded_lg()
.child(self.filter_editor.clone())
.when(self.keybinding_conflict_state.any_conflicts(), |this| {
this.child(
IconButton::new("KeymapEditorConflictIcon", IconName::Warning)
.tooltip(Tooltip::text(match self.filter_state {
FilterState::All => "Show conflicts",
FilterState::Conflicts => "Hide conflicts",
}))
.selected_icon_color(Color::Error)
.toggle_state(matches!(self.filter_state, FilterState::Conflicts))
.on_click(cx.listener(|this, _, _, cx| {
this.filter_state = this.filter_state.invert();
this.update_matches(cx);
})),
)
}),
)
.when(matches!(self.search_mode, SearchMode::KeyStroke), |this| {
this.child(
div()
.child(self.keystroke_editor.clone())
.border_1()
.border_color(theme.colors().border)
.rounded_lg(),
)
})
.child(
Table::new()
.interactable(&self.table_interaction_state)
@@ -1153,6 +995,10 @@ struct KeybindingEditorModal {
keybind_editor: Entity<KeystrokeInput>,
context_editor: Entity<Editor>,
input_editor: Option<Entity<Editor>>,
_input_editor_data: (
Option<Entity<Entity<language::Buffer>>>,
Option<tempfile::TempDir>,
),
fs: Arc<dyn Fs>,
error: Option<InputError>,
keymap_editor: Entity<KeymapEditor>,
@@ -1174,10 +1020,10 @@ impl KeybindingEditorModal {
editing_keybind: ProcessedKeybinding,
editing_keybind_idx: usize,
keymap_editor: Entity<KeymapEditor>,
workspace: WeakEntity<Workspace>,
project: Entity<Project>,
fs: Arc<dyn Fs>,
window: &mut Window,
cx: &mut App,
cx: &mut Context<Self>,
) -> Self {
let keybind_editor = cx.new(|cx| KeystrokeInput::new(window, cx));
@@ -1212,31 +1058,104 @@ impl KeybindingEditorModal {
editor
});
let input_editor = editing_keybind.action_schema.clone().map(|_schema| {
cx.new(|cx| {
let mut editor = Editor::auto_height_unbounded(1, window, cx);
if let Some(input) = editing_keybind.action_input.clone() {
editor.set_text(input.text, window, cx);
} else {
// TODO: default value from schema?
editor.set_placeholder_text("Action input", cx);
}
cx.spawn(async |editor, cx| {
let json_language = load_json_language(workspace, cx).await;
editor
.update(cx, |editor, cx| {
if let Some(buffer) = editor.buffer().read(cx).as_singleton() {
buffer.update(cx, |buffer, cx| {
buffer.set_language(Some(json_language), cx)
});
if let Some(schema) = editing_keybind.action_schema.clone() {
let project = project.downgrade();
let fs = fs.clone();
let file_name = file_name_for_action_input(&editing_keybind.action_name);
let action_input = editing_keybind
.action_input
.as_ref()
.map(|input| input.text.clone());
cx.spawn_in(window, async move |this, cx| {
// todo! fix when modal is dropped, buffer and temp_dir are dropped before worktree, resulting in worktree scan errors
// being printed due to the non existant worktree
let (buffer, temp_dir) = create_temp_buffer_for_action_input(file_name.clone(), project.clone(), fs, cx)
.await
.context("Failed to create temporary buffer for action input. Auto-complete will not work")
.log_err()
.unzip();
let buffer = match buffer {
Some(buffer) => buffer,
None => cx.new(|cx| language::Buffer::local("", cx))?
};
let open_lsp_handle =
project.update(cx, |project, cx| {
project.register_buffer_with_language_servers(&buffer, cx)
}).ok();
cx.spawn(
{
let project = project.clone();
let buffer = buffer.downgrade();
async move |cx| {
let json_language = load_json_language(Some(project), cx).await;
buffer
.update(cx, |buffer, cx| {
buffer.set_language(Some(json_language), cx)
})
.context(
"Failed to load JSON language for editing keybinding action input",
).log_err()
}}).detach();
cx.spawn({
let project = project.clone();
let buffer = buffer.downgrade();
async move |cx| {
cx.background_executor().timer(std::time::Duration::from_secs(10)).await;
let Some(project) = project.upgrade() else {
return;
};
let Some(buffer) = buffer.upgrade() else {
return;
};
let uri = "lol://some.uri".into();
let schema_associations = vec![
project::lsp_store::json_language_server_ext::SchemaAssociation {
uri,
file_match: vec![file_name],
folder_uri: None,
schema: Some(schema.to_value()),
}
})
.context("Failed to load JSON language for editing keybinding action input")
];
cx.update(|_, cx| {
project::lsp_store::json_language_server_ext::send_schema_associations_notification(project, buffer, &schema_associations, cx);
}).ok();
}
}).detach();
let editor = cx.new_window_entity(|window, cx| {
let multi_buffer =
cx.new(|cx| editor::MultiBuffer::singleton(buffer.clone(), cx));
let mut editor = Editor::new(
editor::EditorMode::AutoHeight {
min_lines: 1,
max_lines: Some(10),
},
multi_buffer.clone(),
project.upgrade(),
window,
cx,
);
if let Some(input) = action_input {
editor.set_text(input, window, cx);
} else {
// TODO: default value from schema?
editor.set_placeholder_text("Action input", cx);
}
editor
})?;
this.update(cx, |this, _cx| {
this.input_editor = Some(editor);
this._input_editor_data = (open_lsp_handle, temp_dir);
})
.detach_and_log_err(cx);
editor
})
});
.detach_and_log_err(cx);
}
Self {
creating: create,
@@ -1245,7 +1164,8 @@ impl KeybindingEditorModal {
fs,
keybind_editor,
context_editor,
input_editor,
input_editor: None,
_input_editor_data: (None, None),
error: None,
keymap_editor,
}
@@ -1458,6 +1378,53 @@ impl Render for KeybindingEditorModal {
}
}
fn file_name_for_action_input(action_name: &SharedString) -> String {
let mut file_name = action_name.as_ref().replace("::", "_");
file_name.push_str(".json");
file_name
}
async fn create_temp_buffer_for_action_input(
file_name: String,
project: WeakEntity<Project>,
fs: Arc<dyn Fs>,
cx: &mut AsyncApp,
) -> anyhow::Result<(Entity<language::Buffer>, tempfile::TempDir)> {
let (temp_file_path, temp_dir) = create_temp_file_for_action_input(file_name.clone(), fs)
.await
.context("Failed to create backing file")?;
project
.update(cx, |project, cx| {
project.open_local_buffer(temp_file_path, cx)
})?
.await
.context("Failed to create buffer")
.map(|buffer| (buffer, temp_dir))
}
async fn create_temp_file_for_action_input(
file_name: String,
fs: Arc<dyn Fs>,
) -> anyhow::Result<(PathBuf, tempfile::TempDir)> {
let temp_dir = paths::temp_dir();
let sub_temp_dir = tempfile::Builder::new()
.tempdir_in(temp_dir)
.context("Failed to create temporary directory")?;
let path = sub_temp_dir.path().join(file_name);
fs.create_file(
&path,
fs::CreateOptions {
ignore_if_exists: true,
overwrite: false,
},
)
.await
.context("Failed to create temporary file")?;
Ok((path, sub_temp_dir))
}
struct KeyContextCompletionProvider {
contexts: Vec<SharedString>,
}
@@ -1521,17 +1488,18 @@ impl CompletionProvider for KeyContextCompletionProvider {
}
}
async fn load_json_language(workspace: WeakEntity<Workspace>, cx: &mut AsyncApp) -> Arc<Language> {
let json_language_task = workspace
.read_with(cx, |workspace, cx| {
workspace
.project()
.read(cx)
.languages()
.language_for_name("JSON")
})
.context("Failed to load JSON language")
.log_err();
async fn load_json_language(
project: Option<WeakEntity<Project>>,
cx: &mut AsyncApp,
) -> Arc<Language> {
let json_language_task = project.and_then(|project| {
project
.read_with(cx, |project, _| {
project.languages().language_for_name("JSON")
})
.context("Failed to load JSON language")
.log_err()
});
let json_language = match json_language_task {
Some(task) => task.await.context("Failed to load JSON language").log_err(),
None => None,
@@ -1640,50 +1608,30 @@ async fn save_keybinding_update(
Ok(())
}
async fn remove_keybinding(
existing: ProcessedKeybinding,
fs: &Arc<dyn Fs>,
tab_size: usize,
) -> anyhow::Result<()> {
let Some(ui_key_binding) = existing.ui_key_binding else {
anyhow::bail!("Cannot remove a keybinding that does not exist");
};
let keymap_contents = settings::KeymapFile::load_keymap_file(fs)
.await
.context("Failed to load keymap file")?;
let operation = settings::KeybindUpdateOperation::Remove {
target: settings::KeybindUpdateTarget {
context: existing
.context
.as_ref()
.and_then(KeybindContextString::local_str),
keystrokes: &ui_key_binding.keystrokes,
action_name: &existing.action_name,
use_key_equivalents: false,
input: existing
.action_input
.as_ref()
.map(|input| input.text.as_ref()),
},
target_keybind_source: existing
.source
.map(|(source, _name)| source)
.unwrap_or(KeybindSource::User),
};
let updated_keymap_contents =
settings::KeymapFile::update_keybinding(operation, keymap_contents, tab_size)
.context("Failed to update keybinding")?;
fs.atomic_write(paths::keymap_file().clone(), updated_keymap_contents)
.await
.context("Failed to write keymap file")?;
Ok(())
fn root_schema_from_action_schema(
action_schema: &schemars::Schema,
generator: &mut schemars::SchemaGenerator,
) -> schemars::Schema {
let meta_schema = generator
.settings()
.meta_schema
.as_ref()
.expect("meta_schema should be present in schemars settings")
.to_string();
let defs = generator.definitions();
let mut schema = schemars::json_schema!({
"$schema": meta_schema,
"allowTrailingCommas": true,
"$defs": defs,
});
schema
.ensure_object()
.extend(std::mem::take(action_schema.clone().ensure_object()).into_iter());
schema
}
struct KeystrokeInput {
keystrokes: Vec<Keystroke>,
highlight_on_focus: bool,
focus_handle: FocusHandle,
intercept_subscription: Option<Subscription>,
_focus_subscriptions: [Subscription; 2],
@@ -1698,7 +1646,6 @@ impl KeystrokeInput {
];
Self {
keystrokes: Vec::new(),
highlight_on_focus: true,
focus_handle,
intercept_subscription: None,
_focus_subscriptions,
@@ -1716,7 +1663,6 @@ impl KeystrokeInput {
{
if !event.modifiers.modified() {
self.keystrokes.pop();
cx.emit(());
} else {
last.modifiers = event.modifiers;
}
@@ -1726,7 +1672,6 @@ impl KeystrokeInput {
key: "".to_string(),
key_char: None,
});
cx.emit(());
}
cx.stop_propagation();
cx.notify();
@@ -1740,7 +1685,6 @@ impl KeystrokeInput {
} else if Some(keystroke) != self.keystrokes.last() {
self.keystrokes.push(keystroke.clone());
}
cx.emit(());
cx.stop_propagation();
cx.notify();
}
@@ -1755,7 +1699,6 @@ impl KeystrokeInput {
&& !last.key.is_empty()
&& last.modifiers == event.keystroke.modifiers
{
cx.emit(());
self.keystrokes.push(Keystroke {
modifiers: event.keystroke.modifiers,
key: "".to_string(),
@@ -1796,8 +1739,6 @@ impl KeystrokeInput {
}
}
impl EventEmitter<()> for KeystrokeInput {}
impl Focusable for KeystrokeInput {
fn focus_handle(&self, _cx: &App) -> FocusHandle {
self.focus_handle.clone()
@@ -1814,11 +1755,9 @@ impl Render for KeystrokeInput {
.track_focus(&self.focus_handle)
.on_modifiers_changed(cx.listener(Self::on_modifiers_changed))
.on_key_up(cx.listener(Self::on_key_up))
.when(self.highlight_on_focus, |this| {
this.focus(|mut style| {
style.border_color = Some(colors.border_focused);
style
})
.focus(|mut style| {
style.border_color = Some(colors.border_focused);
style
})
.py_2()
.px_3()
@@ -1859,7 +1798,6 @@ impl Render for KeystrokeInput {
.when(!is_focused, |this| this.icon_color(Color::Muted))
.on_click(cx.listener(|this, _event, _window, cx| {
this.keystrokes.pop();
cx.emit(());
cx.notify();
})),
)
@@ -1869,7 +1807,6 @@ impl Render for KeystrokeInput {
.when(!is_focused, |this| this.icon_color(Color::Muted))
.on_click(cx.listener(|this, _event, _window, cx| {
this.keystrokes.clear();
cx.emit(());
cx.notify();
})),
),
@@ -1902,25 +1839,16 @@ fn build_keybind_context_menu(
.and_then(KeybindContextString::local)
.is_none();
let selected_binding_is_unbound_action = selected_binding.ui_key_binding.is_none();
let selected_binding_is_unbound = selected_binding.ui_key_binding.is_none();
menu.action_disabled_when(
selected_binding_is_unbound_action,
"Edit",
Box::new(EditBinding),
)
.action("Create", Box::new(CreateBinding))
.action_disabled_when(
selected_binding_is_unbound_action,
"Delete",
Box::new(DeleteBinding),
)
.action("Copy action", Box::new(CopyAction))
.action_disabled_when(
selected_binding_has_no_context,
"Copy Context",
Box::new(CopyContext),
)
menu.action_disabled_when(selected_binding_is_unbound, "Edit", Box::new(EditBinding))
.action("Create", Box::new(CreateBinding))
.action("Copy action", Box::new(CopyAction))
.action_disabled_when(
selected_binding_has_no_context,
"Copy Context",
Box::new(CopyContext),
)
})
}

View File

@@ -494,22 +494,6 @@ impl TerminalElement {
}
}
/// Checks if a character is a decorative block/box-like character that should
/// preserve its exact colors without contrast adjustment.
///
/// Fixes https://github.com/zed-industries/zed/issues/34234 - we can
/// expand this list if we run into more similar cases, but the goal
/// is to be conservative here.
fn is_decorative_character(ch: char) -> bool {
matches!(
ch as u32,
// 0x2500..=0x257F Box Drawing
// 0x2580..=0x259F Block Elements
// 0x25A0..=0x25D7 Geometric Shapes (block/box-like subset)
0x2500..=0x25D7
)
}
/// Converts the Alacritty cell styles to GPUI text styles and background color.
fn cell_style(
indexed: &IndexedCell,
@@ -524,10 +508,7 @@ impl TerminalElement {
let mut fg = convert_color(&fg, colors);
let bg = convert_color(&bg, colors);
// Only apply contrast adjustment to non-decorative characters
if !Self::is_decorative_character(indexed.c) {
fg = color_contrast::ensure_minimum_contrast(fg, bg, minimum_contrast);
}
fg = color_contrast::ensure_minimum_contrast(fg, bg, minimum_contrast);
// Ghostty uses (175/255) as the multiplier (~0.69), Alacritty uses 0.66, Kitty
// uses 0.75. We're using 0.7 because it's pretty well in the middle of that.
@@ -1594,91 +1575,6 @@ mod tests {
use super::*;
use gpui::{AbsoluteLength, Hsla, font};
#[test]
fn test_is_decorative_character() {
// Box Drawing characters (U+2500 to U+257F)
assert!(TerminalElement::is_decorative_character('─')); // U+2500
assert!(TerminalElement::is_decorative_character('│')); // U+2502
assert!(TerminalElement::is_decorative_character('┌')); // U+250C
assert!(TerminalElement::is_decorative_character('┐')); // U+2510
assert!(TerminalElement::is_decorative_character('└')); // U+2514
assert!(TerminalElement::is_decorative_character('┘')); // U+2518
assert!(TerminalElement::is_decorative_character('┼')); // U+253C
// Block Elements (U+2580 to U+259F)
assert!(TerminalElement::is_decorative_character('▀')); // U+2580
assert!(TerminalElement::is_decorative_character('▄')); // U+2584
assert!(TerminalElement::is_decorative_character('█')); // U+2588
assert!(TerminalElement::is_decorative_character('░')); // U+2591
assert!(TerminalElement::is_decorative_character('▒')); // U+2592
assert!(TerminalElement::is_decorative_character('▓')); // U+2593
// Geometric Shapes - block/box-like subset (U+25A0 to U+25D7)
assert!(TerminalElement::is_decorative_character('■')); // U+25A0
assert!(TerminalElement::is_decorative_character('□')); // U+25A1
assert!(TerminalElement::is_decorative_character('▲')); // U+25B2
assert!(TerminalElement::is_decorative_character('▼')); // U+25BC
assert!(TerminalElement::is_decorative_character('◆')); // U+25C6
assert!(TerminalElement::is_decorative_character('●')); // U+25CF
// The specific character from the issue
assert!(TerminalElement::is_decorative_character('◗')); // U+25D7
// Characters that should NOT be considered decorative
assert!(!TerminalElement::is_decorative_character('A'));
assert!(!TerminalElement::is_decorative_character('a'));
assert!(!TerminalElement::is_decorative_character('0'));
assert!(!TerminalElement::is_decorative_character(' '));
assert!(!TerminalElement::is_decorative_character('←')); // U+2190 (Arrow, not in our ranges)
assert!(!TerminalElement::is_decorative_character('→')); // U+2192 (Arrow, not in our ranges)
assert!(!TerminalElement::is_decorative_character('◘')); // U+25D8 (Just outside our range)
assert!(!TerminalElement::is_decorative_character('◙')); // U+25D9 (Just outside our range)
}
#[test]
fn test_decorative_character_boundary_cases() {
// Test exact boundaries of our ranges
// Box Drawing range boundaries
assert!(TerminalElement::is_decorative_character('\u{2500}')); // First char
assert!(TerminalElement::is_decorative_character('\u{257F}')); // Last char
assert!(!TerminalElement::is_decorative_character('\u{24FF}')); // Just before
// Block Elements range boundaries
assert!(TerminalElement::is_decorative_character('\u{2580}')); // First char
assert!(TerminalElement::is_decorative_character('\u{259F}')); // Last char
// Geometric Shapes subset boundaries
assert!(TerminalElement::is_decorative_character('\u{25A0}')); // First char
assert!(TerminalElement::is_decorative_character('\u{25D7}')); // Last char (◗)
assert!(!TerminalElement::is_decorative_character('\u{25D8}')); // Just after
}
#[test]
fn test_decorative_characters_bypass_contrast_adjustment() {
// Decorative characters should not be affected by contrast adjustment
// The specific character from issue #34234
let problematic_char = '◗'; // U+25D7
assert!(
TerminalElement::is_decorative_character(problematic_char),
"Character ◗ (U+25D7) should be recognized as decorative"
);
// Verify some other commonly used decorative characters
assert!(TerminalElement::is_decorative_character('│')); // Vertical line
assert!(TerminalElement::is_decorative_character('─')); // Horizontal line
assert!(TerminalElement::is_decorative_character('█')); // Full block
assert!(TerminalElement::is_decorative_character('▓')); // Dark shade
assert!(TerminalElement::is_decorative_character('■')); // Black square
assert!(TerminalElement::is_decorative_character('●')); // Black circle
// Verify normal text characters are NOT decorative
assert!(!TerminalElement::is_decorative_character('A'));
assert!(!TerminalElement::is_decorative_character('1'));
assert!(!TerminalElement::is_decorative_character('$'));
assert!(!TerminalElement::is_decorative_character(' '));
}
#[test]
fn test_contrast_adjustment_logic() {
// Test the core contrast adjustment logic without needing full app context

View File

@@ -159,7 +159,6 @@ pub struct ContextMenu {
keep_open_on_confirm: bool,
documentation_aside: Option<(usize, DocumentationAside)>,
fixed_width: Option<DefiniteLength>,
align_popover_top: bool,
}
#[derive(Copy, Clone, PartialEq, Eq)]
@@ -216,7 +215,6 @@ impl ContextMenu {
key_context: "menu".into(),
_on_blur_subscription,
keep_open_on_confirm: false,
align_popover_top: true,
documentation_aside: None,
fixed_width: None,
end_slot_action: None,
@@ -259,7 +257,6 @@ impl ContextMenu {
key_context: "menu".into(),
_on_blur_subscription,
keep_open_on_confirm: true,
align_popover_top: true,
documentation_aside: None,
fixed_width: None,
end_slot_action: None,
@@ -300,7 +297,6 @@ impl ContextMenu {
|this: &mut ContextMenu, window, cx| this.cancel(&menu::Cancel, window, cx),
),
keep_open_on_confirm: false,
align_popover_top: true,
documentation_aside: None,
fixed_width: None,
end_slot_action: None,
@@ -782,11 +778,6 @@ impl ContextMenu {
self
}
pub fn align_popover_bottom(mut self) -> Self {
self.align_popover_top = false;
self
}
fn render_menu_item(
&self,
ix: usize,
@@ -1109,13 +1100,7 @@ impl Render for ContextMenu {
.when(is_wide_window, |this| this.flex_row())
.when(!is_wide_window, |this| this.flex_col())
.w_full()
.map(|div| {
if self.align_popover_top {
div.items_start()
} else {
div.items_end()
}
})
.items_start()
.gap_1()
.child(div().children(aside.clone().and_then(|(_, aside)| {
(aside.side == DocumentationSide::Left).then(|| render_aside(aside, cx))

View File

@@ -409,10 +409,12 @@ impl Render for QuickActionBar {
);
if supports_inline_diagnostics {
let mut inline_diagnostics_item = ContextMenuEntry::new("Inline Diagnostics")
.toggleable(IconPosition::Start, diagnostics_enabled && inline_diagnostics_enabled)
.action(ToggleInlineDiagnostics.boxed_clone())
.handler({
menu = menu.toggleable_entry(
"Inline Diagnostics",
inline_diagnostics_enabled,
IconPosition::Start,
Some(ToggleInlineDiagnostics.boxed_clone()),
{
let editor = editor.clone();
move |window, cx| {
editor
@@ -425,11 +427,8 @@ impl Render for QuickActionBar {
})
.ok();
}
});
if !diagnostics_enabled {
inline_diagnostics_item = inline_diagnostics_item.disabled(true).documentation_aside(DocumentationSide::Left, |_| Label::new("Inline diagnostics are not available until regular diagnostics are enabled.").into_any_element());
}
menu = menu.item(inline_diagnostics_item)
},
);
}
menu = menu.separator();