Compare commits

..

1 Commits

Author SHA1 Message Date
Smit Barmase
79037ff600 add some test file 2025-11-11 14:55:18 +05:30
210 changed files with 1624 additions and 7001 deletions

View File

@@ -1,4 +1,4 @@
# yaml-language-server: $schema=https://www.schemastore.org/github-issue-config.json
# yaml-language-server: $schema=https://json.schemastore.org/github-issue-config.json
blank_issues_enabled: false
contact_links:
- name: Feature Request

View File

@@ -56,14 +56,14 @@ jobs:
- id: set-package-name
name: after_release::publish_winget::set_package_name
run: |
if ("${{ github.event.release.prerelease }}" -eq "true") {
$PACKAGE_NAME = "ZedIndustries.Zed.Preview"
} else {
$PACKAGE_NAME = "ZedIndustries.Zed"
}
if [ "${{ github.event.release.prerelease }}" == "true" ]; then
PACKAGE_NAME=ZedIndustries.Zed.Preview
else
PACKAGE_NAME=ZedIndustries.Zed
fi
echo "PACKAGE_NAME=$PACKAGE_NAME" >> $env:GITHUB_OUTPUT
shell: pwsh
echo "PACKAGE_NAME=$PACKAGE_NAME" >> "$GITHUB_OUTPUT"
shell: bash -euxo pipefail {0}
- name: after_release::publish_winget::winget_releaser
uses: vedantmgoyal9/winget-releaser@19e706d4c9121098010096f9c495a70a7518b30f
with:
@@ -86,19 +86,3 @@ jobs:
SENTRY_ORG: zed-dev
SENTRY_PROJECT: zed
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
notify_on_failure:
needs:
- rebuild_releases_page
- post_to_discord
- publish_winget
- create_sentry_release
if: failure()
runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- name: release::notify_on_failure::notify_slack
run: |-
curl -X POST -H 'Content-type: application/json'\
--data '{"text":"${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK"
shell: bash -euxo pipefail {0}
env:
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}

View File

@@ -1,7 +1,6 @@
# Generated from xtask::workflows::cherry_pick
# Rebuild with `cargo xtask workflows`.
name: cherry_pick
run-name: 'cherry_pick to ${{ inputs.channel }} #${{ inputs.pr_number }}'
on:
workflow_dispatch:
inputs:
@@ -17,10 +16,6 @@ on:
description: channel
required: true
type: string
pr_number:
description: pr_number
required: true
type: string
jobs:
run_cherry_pick:
runs-on: namespace-profile-2x4-ubuntu-2404

View File

@@ -15,13 +15,13 @@ jobs:
stale-issue-message: >
Hi there! 👋
We're working to clean up our issue tracker by closing older bugs that might not be relevant anymore. If you are able to reproduce this issue in the latest version of Zed, please let us know by commenting on this issue, and it will be kept open. If you can't reproduce it, feel free to close the issue yourself. Otherwise, it will close automatically in 14 days.
We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. If you are able to reproduce this issue in the latest version of Zed, please let us know by commenting on this issue, and we will keep it open. If you can't reproduce it, feel free to close the issue yourself. Otherwise, we'll close it in 7 days.
Thanks for your help!
close-issue-message: "This issue was closed due to inactivity. If you're still experiencing this problem, please open a new issue with a link to this issue."
days-before-stale: 60
days-before-close: 14
only-issue-types: "Bug,Crash"
days-before-stale: 120
days-before-close: 7
any-of-issue-labels: "bug,panic / crash"
operations-per-run: 1000
ascending: true
enable-statistics: true

View File

@@ -484,20 +484,6 @@ jobs:
shell: bash -euxo pipefail {0}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
notify_on_failure:
needs:
- upload_release_assets
- auto_release_preview
if: failure()
runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- name: release::notify_on_failure::notify_slack
run: |-
curl -X POST -H 'Content-type: application/json'\
--data '{"text":"${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK"
shell: bash -euxo pipefail {0}
env:
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
cancel-in-progress: true

View File

@@ -493,21 +493,3 @@ jobs:
SENTRY_PROJECT: zed
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
timeout-minutes: 60
notify_on_failure:
needs:
- bundle_linux_aarch64
- bundle_linux_x86_64
- bundle_mac_aarch64
- bundle_mac_x86_64
- bundle_windows_aarch64
- bundle_windows_x86_64
if: failure()
runs-on: namespace-profile-2x4-ubuntu-2404
steps:
- name: release::notify_on_failure::notify_slack
run: |-
curl -X POST -H 'Content-type: application/json'\
--data '{"text":"${{ github.workflow }} failed: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"}' "$SLACK_WEBHOOK"
shell: bash -euxo pipefail {0}
env:
SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK_WORKFLOW_FAILURES }}

View File

@@ -6,9 +6,6 @@ env:
CARGO_INCREMENTAL: '0'
RUST_BACKTRACE: '1'
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }}
GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }}
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_EVAL_TELEMETRY: '1'
MODEL_NAME: ${{ inputs.model_name }}
@@ -51,11 +48,6 @@ jobs:
- name: run_agent_evals::agent_evals::run_eval
run: cargo run --package=eval -- --repetitions=8 --concurrency=1 --model "${MODEL_NAME}"
shell: bash -euxo pipefail {0}
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }}
GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }}
- name: steps::cleanup_cargo_config
if: always()
run: |

View File

@@ -1,69 +0,0 @@
# Generated from xtask::workflows::run_cron_unit_evals
# Rebuild with `cargo xtask workflows`.
name: run_cron_unit_evals
env:
CARGO_TERM_COLOR: always
CARGO_INCREMENTAL: '0'
RUST_BACKTRACE: '1'
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
on:
schedule:
- cron: 47 1 * * 2
workflow_dispatch: {}
jobs:
cron_unit_evals:
runs-on: namespace-profile-16x32-ubuntu-2204
steps:
- name: steps::checkout_repo
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683
with:
clean: false
- name: steps::setup_cargo_config
run: |
mkdir -p ./../.cargo
cp ./.cargo/ci-config.toml ./../.cargo/config.toml
shell: bash -euxo pipefail {0}
- name: steps::cache_rust_dependencies_namespace
uses: namespacelabs/nscloud-cache-action@v1
with:
cache: rust
- name: steps::setup_linux
run: ./script/linux
shell: bash -euxo pipefail {0}
- name: steps::install_mold
run: ./script/install-mold
shell: bash -euxo pipefail {0}
- name: steps::download_wasi_sdk
run: ./script/download-wasi-sdk
shell: bash -euxo pipefail {0}
- name: steps::cargo_install_nextest
run: cargo install cargo-nextest --locked
shell: bash -euxo pipefail {0}
- name: steps::clear_target_dir_if_large
run: ./script/clear-target-dir-if-larger-than 250
shell: bash -euxo pipefail {0}
- name: ./script/run-unit-evals
run: ./script/run-unit-evals
shell: bash -euxo pipefail {0}
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }}
GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }}
- name: steps::cleanup_cargo_config
if: always()
run: |
rm -rf ./../.cargo
shell: bash -euxo pipefail {0}
- name: run_agent_evals::cron_unit_evals::send_failure_to_slack
if: ${{ failure() }}
uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52
with:
method: chat.postMessage
token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }}
payload: |
channel: C04UDRNNJFQ
text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}"
concurrency:
group: ${{ github.workflow }}-${{ github.ref_name }}-${{ github.ref_name == 'main' && github.sha || 'anysha' }}
cancel-in-progress: true

View File

@@ -1,26 +1,17 @@
# Generated from xtask::workflows::run_unit_evals
# Generated from xtask::workflows::run_agent_evals
# Rebuild with `cargo xtask workflows`.
name: run_unit_evals
name: run_agent_evals
env:
CARGO_TERM_COLOR: always
CARGO_INCREMENTAL: '0'
RUST_BACKTRACE: '1'
ZED_CLIENT_CHECKSUM_SEED: ${{ secrets.ZED_CLIENT_CHECKSUM_SEED }}
ZED_EVAL_TELEMETRY: '1'
MODEL_NAME: ${{ inputs.model_name }}
on:
workflow_dispatch:
inputs:
model_name:
description: model_name
required: true
type: string
commit_sha:
description: commit_sha
required: true
type: string
schedule:
- cron: 47 1 * * 2
workflow_dispatch: {}
jobs:
run_unit_evals:
unit_evals:
runs-on: namespace-profile-16x32-ubuntu-2204
steps:
- name: steps::checkout_repo
@@ -56,10 +47,15 @@ jobs:
shell: bash -euxo pipefail {0}
env:
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
GOOGLE_AI_API_KEY: ${{ secrets.GOOGLE_AI_API_KEY }}
GOOGLE_CLOUD_PROJECT: ${{ secrets.GOOGLE_CLOUD_PROJECT }}
UNIT_EVAL_COMMIT: ${{ inputs.commit_sha }}
- name: run_agent_evals::unit_evals::send_failure_to_slack
if: ${{ failure() }}
uses: slackapi/slack-github-action@b0fa283ad8fea605de13dc3f449259339835fc52
with:
method: chat.postMessage
token: ${{ secrets.SLACK_APP_ZED_UNIT_EVALS_BOT_TOKEN }}
payload: |
channel: C04UDRNNJFQ
text: "Unit Evals Failed: https://github.com/zed-industries/zed/actions/runs/${{ github.run_id }}"
- name: steps::cleanup_cargo_config
if: always()
run: |

36
Cargo.lock generated
View File

@@ -6248,7 +6248,7 @@ dependencies = [
"futures-core",
"futures-sink",
"nanorand",
"spin 0.9.8",
"spin",
]
[[package]]
@@ -6359,9 +6359,9 @@ checksum = "aa9a19cbb55df58761df49b23516a86d432839add4af60fc256da840f66ed35b"
[[package]]
name = "fork"
version = "0.4.0"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30268f1eefccc9d72f43692e8b89e659aeb52e84016c3b32b6e7e9f1c8f38f94"
checksum = "05dc8b302e04a1c27f4fe694439ef0f29779ca4edc205b7b58f00db04e29656d"
dependencies = [
"libc",
]
@@ -7287,7 +7287,6 @@ dependencies = [
"calloop",
"calloop-wayland-source",
"cbindgen",
"circular-buffer",
"cocoa 0.26.0",
"cocoa-foundation 0.2.0",
"collections",
@@ -7343,7 +7342,6 @@ dependencies = [
"slotmap",
"smallvec",
"smol",
"spin 0.10.0",
"stacksafe",
"strum 0.27.2",
"sum_tree",
@@ -9074,7 +9072,7 @@ version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe"
dependencies = [
"spin 0.9.8",
"spin",
]
[[package]]
@@ -10016,18 +10014,6 @@ version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
[[package]]
name = "miniprofiler_ui"
version = "0.1.0"
dependencies = [
"gpui",
"serde_json",
"smol",
"util",
"workspace",
"zed_actions",
]
[[package]]
name = "miniz_oxide"
version = "0.8.9"
@@ -13092,7 +13078,6 @@ dependencies = [
"settings",
"smallvec",
"telemetry",
"tempfile",
"theme",
"ui",
"util",
@@ -15868,15 +15853,6 @@ dependencies = [
"lock_api",
]
[[package]]
name = "spin"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d5fe4ccb98d9c292d56fec89a5e07da7fc4cf0dc11e156b41793132775d3e591"
dependencies = [
"lock_api",
]
[[package]]
name = "spirv"
version = "0.3.0+sdk-1.3.268.0"
@@ -21170,7 +21146,7 @@ dependencies = [
[[package]]
name = "zed"
version = "0.213.8"
version = "0.213.0"
dependencies = [
"acp_tools",
"activity_indicator",
@@ -21188,7 +21164,6 @@ dependencies = [
"breadcrumbs",
"call",
"channel",
"chrono",
"clap",
"cli",
"client",
@@ -21246,7 +21221,6 @@ dependencies = [
"menu",
"migrator",
"mimalloc",
"miniprofiler_ui",
"nc",
"nix 0.29.0",
"node_runtime",

View File

@@ -110,7 +110,6 @@ members = [
"crates/menu",
"crates/migrator",
"crates/mistral",
"crates/miniprofiler_ui",
"crates/multi_buffer",
"crates/nc",
"crates/net",
@@ -342,7 +341,6 @@ menu = { path = "crates/menu" }
migrator = { path = "crates/migrator" }
mistral = { path = "crates/mistral" }
multi_buffer = { path = "crates/multi_buffer" }
miniprofiler_ui = { path = "crates/miniprofiler_ui" }
nc = { path = "crates/nc" }
net = { path = "crates/net" }
node_runtime = { path = "crates/node_runtime" }
@@ -506,7 +504,7 @@ emojis = "0.6.1"
env_logger = "0.11"
exec = "0.3.1"
fancy-regex = "0.14.0"
fork = "0.4.0"
fork = "0.2.0"
futures = "0.3"
futures-batch = "0.6.1"
futures-lite = "1.13"

0
a.txt Normal file
View File

View File

@@ -1,4 +0,0 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M8.00156 10.3996C9.32705 10.3996 10.4016 9.32509 10.4016 7.99961C10.4016 6.67413 9.32705 5.59961 8.00156 5.59961C6.67608 5.59961 5.60156 6.67413 5.60156 7.99961C5.60156 9.32509 6.67608 10.3996 8.00156 10.3996Z" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M10.4 5.6V8.6C10.4 9.07739 10.5896 9.53523 10.9272 9.8728C11.2648 10.2104 11.7226 10.4 12.2 10.4C12.6774 10.4 13.1352 10.2104 13.4728 9.8728C13.8104 9.53523 14 9.07739 14 8.6V8C14 6.64839 13.5436 5.33636 12.7048 4.27651C11.8661 3.21665 10.694 2.47105 9.37852 2.16051C8.06306 1.84997 6.68129 1.99269 5.45707 2.56554C4.23285 3.13838 3.23791 4.1078 2.63344 5.31672C2.02898 6.52565 1.85041 7.90325 2.12667 9.22633C2.40292 10.5494 3.11782 11.7405 4.15552 12.6065C5.19323 13.4726 6.49295 13.9629 7.84411 13.998C9.19527 14.0331 10.5187 13.611 11.6 12.8" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

Before

Width:  |  Height:  |  Size: 1.0 KiB

View File

@@ -421,6 +421,12 @@
"ctrl-[": "editor::Cancel"
}
},
{
"context": "vim_mode == helix_select && !menu",
"bindings": {
"escape": "vim::SwitchToHelixNormalMode"
}
},
{
"context": "(vim_mode == helix_normal || vim_mode == helix_select) && !menu",
"bindings": {

View File

@@ -616,13 +616,9 @@
"search": {
// Whether to show the project search button in the status bar.
"button": true,
// Whether to only match on whole words.
"whole_word": false,
// Whether to match case sensitively.
"case_sensitive": false,
// Whether to include gitignored files in search results.
"include_ignored": false,
// Whether to interpret the search query as a regular expression.
"regex": false,
// Whether to center the cursor on each search match when navigating.
"center_on_match": false
@@ -748,15 +744,8 @@
"hide_root": false,
// Whether to hide the hidden entries in the project panel.
"hide_hidden": false,
// Settings for automatically opening files.
"auto_open": {
// Whether to automatically open newly created files in the editor.
"on_create": true,
// Whether to automatically open files after pasting or duplicating them.
"on_paste": true,
// Whether to automatically open files dropped from external sources.
"on_drop": true
}
// Whether to automatically open files when pasting them in the project panel.
"open_file_on_paste": true
},
"outline_panel": {
// Whether to show the outline panel button in the status bar

0
b.txt Normal file
View File

View File

@@ -1866,14 +1866,10 @@ impl AcpThread {
.checkpoint
.as_ref()
.map(|c| c.git_checkpoint.clone());
// Cancel any in-progress generation before restoring
let cancel_task = self.cancel(cx);
let rewind = self.rewind(id.clone(), cx);
let git_store = self.project.read(cx).git_store().clone();
cx.spawn(async move |_, cx| {
cancel_task.await;
rewind.await?;
if let Some(checkpoint) = checkpoint {
git_store
@@ -1898,25 +1894,9 @@ impl AcpThread {
cx.update(|cx| truncate.run(id.clone(), cx))?.await?;
this.update(cx, |this, cx| {
if let Some((ix, _)) = this.user_message_mut(&id) {
// Collect all terminals from entries that will be removed
let terminals_to_remove: Vec<acp::TerminalId> = this.entries[ix..]
.iter()
.flat_map(|entry| entry.terminals())
.filter_map(|terminal| terminal.read(cx).id().clone().into())
.collect();
let range = ix..this.entries.len();
this.entries.truncate(ix);
cx.emit(AcpThreadEvent::EntriesRemoved(range));
// Kill and remove the terminals
for terminal_id in terminals_to_remove {
if let Some(terminal) = this.terminals.remove(&terminal_id) {
terminal.update(cx, |terminal, cx| {
terminal.kill(cx);
});
}
}
}
this.action_log().update(cx, |action_log, cx| {
action_log.reject_all_edits(Some(telemetry), cx)
@@ -3823,314 +3803,4 @@ mod tests {
}
});
}
/// Tests that restoring a checkpoint properly cleans up terminals that were
/// created after that checkpoint, and cancels any in-progress generation.
///
/// Reproduces issue #35142: When a checkpoint is restored, any terminal processes
/// that were started after that checkpoint should be terminated, and any in-progress
/// AI generation should be canceled.
#[gpui::test]
async fn test_restore_checkpoint_kills_terminal(cx: &mut TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor());
let project = Project::test(fs, [], cx).await;
let connection = Rc::new(FakeAgentConnection::new());
let thread = cx
.update(|cx| connection.new_thread(project, Path::new(path!("/test")), cx))
.await
.unwrap();
// Send first user message to create a checkpoint
cx.update(|cx| {
thread.update(cx, |thread, cx| {
thread.send(vec!["first message".into()], cx)
})
})
.await
.unwrap();
// Send second message (creates another checkpoint) - we'll restore to this one
cx.update(|cx| {
thread.update(cx, |thread, cx| {
thread.send(vec!["second message".into()], cx)
})
})
.await
.unwrap();
// Create 2 terminals BEFORE the checkpoint that have completed running
let terminal_id_1 = acp::TerminalId(uuid::Uuid::new_v4().to_string().into());
let mock_terminal_1 = cx.new(|cx| {
let builder = ::terminal::TerminalBuilder::new_display_only(
::terminal::terminal_settings::CursorShape::default(),
::terminal::terminal_settings::AlternateScroll::On,
None,
0,
)
.unwrap();
builder.subscribe(cx)
});
thread.update(cx, |thread, cx| {
thread.on_terminal_provider_event(
TerminalProviderEvent::Created {
terminal_id: terminal_id_1.clone(),
label: "echo 'first'".to_string(),
cwd: Some(PathBuf::from("/test")),
output_byte_limit: None,
terminal: mock_terminal_1.clone(),
},
cx,
);
});
thread.update(cx, |thread, cx| {
thread.on_terminal_provider_event(
TerminalProviderEvent::Output {
terminal_id: terminal_id_1.clone(),
data: b"first\n".to_vec(),
},
cx,
);
});
thread.update(cx, |thread, cx| {
thread.on_terminal_provider_event(
TerminalProviderEvent::Exit {
terminal_id: terminal_id_1.clone(),
status: acp::TerminalExitStatus {
exit_code: Some(0),
signal: None,
meta: None,
},
},
cx,
);
});
let terminal_id_2 = acp::TerminalId(uuid::Uuid::new_v4().to_string().into());
let mock_terminal_2 = cx.new(|cx| {
let builder = ::terminal::TerminalBuilder::new_display_only(
::terminal::terminal_settings::CursorShape::default(),
::terminal::terminal_settings::AlternateScroll::On,
None,
0,
)
.unwrap();
builder.subscribe(cx)
});
thread.update(cx, |thread, cx| {
thread.on_terminal_provider_event(
TerminalProviderEvent::Created {
terminal_id: terminal_id_2.clone(),
label: "echo 'second'".to_string(),
cwd: Some(PathBuf::from("/test")),
output_byte_limit: None,
terminal: mock_terminal_2.clone(),
},
cx,
);
});
thread.update(cx, |thread, cx| {
thread.on_terminal_provider_event(
TerminalProviderEvent::Output {
terminal_id: terminal_id_2.clone(),
data: b"second\n".to_vec(),
},
cx,
);
});
thread.update(cx, |thread, cx| {
thread.on_terminal_provider_event(
TerminalProviderEvent::Exit {
terminal_id: terminal_id_2.clone(),
status: acp::TerminalExitStatus {
exit_code: Some(0),
signal: None,
meta: None,
},
},
cx,
);
});
// Get the second message ID to restore to
let second_message_id = thread.read_with(cx, |thread, _| {
// At this point we have:
// - Index 0: First user message (with checkpoint)
// - Index 1: Second user message (with checkpoint)
// No assistant responses because FakeAgentConnection just returns EndTurn
let AgentThreadEntry::UserMessage(message) = &thread.entries[1] else {
panic!("expected user message at index 1");
};
message.id.clone().unwrap()
});
// Create a terminal AFTER the checkpoint we'll restore to.
// This simulates the AI agent starting a long-running terminal command.
let terminal_id = acp::TerminalId(uuid::Uuid::new_v4().to_string().into());
let mock_terminal = cx.new(|cx| {
let builder = ::terminal::TerminalBuilder::new_display_only(
::terminal::terminal_settings::CursorShape::default(),
::terminal::terminal_settings::AlternateScroll::On,
None,
0,
)
.unwrap();
builder.subscribe(cx)
});
// Register the terminal as created
thread.update(cx, |thread, cx| {
thread.on_terminal_provider_event(
TerminalProviderEvent::Created {
terminal_id: terminal_id.clone(),
label: "sleep 1000".to_string(),
cwd: Some(PathBuf::from("/test")),
output_byte_limit: None,
terminal: mock_terminal.clone(),
},
cx,
);
});
// Simulate the terminal producing output (still running)
thread.update(cx, |thread, cx| {
thread.on_terminal_provider_event(
TerminalProviderEvent::Output {
terminal_id: terminal_id.clone(),
data: b"terminal is running...\n".to_vec(),
},
cx,
);
});
// Create a tool call entry that references this terminal
// This represents the agent requesting a terminal command
thread.update(cx, |thread, cx| {
thread
.handle_session_update(
acp::SessionUpdate::ToolCall(acp::ToolCall {
id: acp::ToolCallId("terminal-tool-1".into()),
title: "Running command".into(),
kind: acp::ToolKind::Execute,
status: acp::ToolCallStatus::InProgress,
content: vec![acp::ToolCallContent::Terminal {
terminal_id: terminal_id.clone(),
}],
locations: vec![],
raw_input: Some(
serde_json::json!({"command": "sleep 1000", "cd": "/test"}),
),
raw_output: None,
meta: None,
}),
cx,
)
.unwrap();
});
// Verify terminal exists and is in the thread
let terminal_exists_before =
thread.read_with(cx, |thread, _| thread.terminals.contains_key(&terminal_id));
assert!(
terminal_exists_before,
"Terminal should exist before checkpoint restore"
);
// Verify the terminal's underlying task is still running (not completed)
let terminal_running_before = thread.read_with(cx, |thread, _cx| {
let terminal_entity = thread.terminals.get(&terminal_id).unwrap();
terminal_entity.read_with(cx, |term, _cx| {
term.output().is_none() // output is None means it's still running
})
});
assert!(
terminal_running_before,
"Terminal should be running before checkpoint restore"
);
// Verify we have the expected entries before restore
let entry_count_before = thread.read_with(cx, |thread, _| thread.entries.len());
assert!(
entry_count_before > 1,
"Should have multiple entries before restore"
);
// Restore the checkpoint to the second message.
// This should:
// 1. Cancel any in-progress generation (via the cancel() call)
// 2. Remove the terminal that was created after that point
thread
.update(cx, |thread, cx| {
thread.restore_checkpoint(second_message_id, cx)
})
.await
.unwrap();
// Verify that no send_task is in progress after restore
// (cancel() clears the send_task)
let has_send_task_after = thread.read_with(cx, |thread, _| thread.send_task.is_some());
assert!(
!has_send_task_after,
"Should not have a send_task after restore (cancel should have cleared it)"
);
// Verify the entries were truncated (restoring to index 1 truncates at 1, keeping only index 0)
let entry_count = thread.read_with(cx, |thread, _| thread.entries.len());
assert_eq!(
entry_count, 1,
"Should have 1 entry after restore (only the first user message)"
);
// Verify the 2 completed terminals from before the checkpoint still exist
let terminal_1_exists = thread.read_with(cx, |thread, _| {
thread.terminals.contains_key(&terminal_id_1)
});
assert!(
terminal_1_exists,
"Terminal 1 (from before checkpoint) should still exist"
);
let terminal_2_exists = thread.read_with(cx, |thread, _| {
thread.terminals.contains_key(&terminal_id_2)
});
assert!(
terminal_2_exists,
"Terminal 2 (from before checkpoint) should still exist"
);
// Verify they're still in completed state
let terminal_1_completed = thread.read_with(cx, |thread, _cx| {
let terminal_entity = thread.terminals.get(&terminal_id_1).unwrap();
terminal_entity.read_with(cx, |term, _cx| term.output().is_some())
});
assert!(terminal_1_completed, "Terminal 1 should still be completed");
let terminal_2_completed = thread.read_with(cx, |thread, _cx| {
let terminal_entity = thread.terminals.get(&terminal_id_2).unwrap();
terminal_entity.read_with(cx, |term, _cx| term.output().is_some())
});
assert!(terminal_2_completed, "Terminal 2 should still be completed");
// Verify the running terminal (created after checkpoint) was removed
let terminal_3_exists =
thread.read_with(cx, |thread, _| thread.terminals.contains_key(&terminal_id));
assert!(
!terminal_3_exists,
"Terminal 3 (created after checkpoint) should have been removed"
);
// Verify total count is 2 (the two from before the checkpoint)
let terminal_count = thread.read_with(cx, |thread, _| thread.terminals.len());
assert_eq!(
terminal_count, 2,
"Should have exactly 2 terminals (the completed ones from before checkpoint)"
);
}
}

View File

@@ -133,7 +133,9 @@ impl LanguageModels {
for model in provider.provided_models(cx) {
let model_info = Self::map_language_model_to_info(&model, &provider);
let model_id = model_info.id.clone();
provider_models.push(model_info);
if !recommended_models.contains(&(model.provider_id(), model.id())) {
provider_models.push(model_info);
}
models.insert(model_id, model);
}
if !provider_models.is_empty() {

View File

@@ -150,7 +150,6 @@ impl DbThread {
.unwrap_or_default(),
input: tool_use.input,
is_input_complete: true,
thought_signature: None,
},
));
}
@@ -182,7 +181,6 @@ impl DbThread {
crate::Message::Agent(AgentMessage {
content,
tool_results,
reasoning_details: None,
})
}
language_model::Role::System => {

View File

@@ -703,7 +703,6 @@ impl EditAgent {
role: Role::User,
content: vec![MessageContent::Text(prompt)],
cache: false,
reasoning_details: None,
});
// Include tools in the request so that we can take advantage of

View File

@@ -1081,7 +1081,6 @@ fn message(
role,
content: contents.into_iter().collect(),
cache: false,
reasoning_details: None,
}
}
@@ -1109,7 +1108,6 @@ fn tool_use(
raw_input: serde_json::to_string_pretty(&input).unwrap(),
input: serde_json::to_value(input).unwrap(),
is_input_complete: true,
thought_signature: None,
})
}
@@ -1269,7 +1267,6 @@ impl EvalAssertion {
role: Role::User,
content: vec![prompt.into()],
cache: false,
reasoning_details: None,
}],
thinking_allowed: true,
..Default::default()
@@ -1596,7 +1593,6 @@ impl EditAgentTest {
role: Role::System,
content: vec![MessageContent::Text(system_prompt)],
cache: true,
reasoning_details: None,
}]
.into_iter()
.chain(eval.conversation)

View File

@@ -215,8 +215,7 @@ async fn test_prompt_caching(cx: &mut TestAppContext) {
vec![LanguageModelRequestMessage {
role: Role::User,
content: vec!["Message 1".into()],
cache: true,
reasoning_details: None,
cache: true
}]
);
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::Text(
@@ -240,20 +239,17 @@ async fn test_prompt_caching(cx: &mut TestAppContext) {
LanguageModelRequestMessage {
role: Role::User,
content: vec!["Message 1".into()],
cache: false,
reasoning_details: None,
cache: false
},
LanguageModelRequestMessage {
role: Role::Assistant,
content: vec!["Response to Message 1".into()],
cache: false,
reasoning_details: None,
cache: false
},
LanguageModelRequestMessage {
role: Role::User,
content: vec!["Message 2".into()],
cache: true,
reasoning_details: None,
cache: true
}
]
);
@@ -278,7 +274,6 @@ async fn test_prompt_caching(cx: &mut TestAppContext) {
raw_input: json!({"text": "test"}).to_string(),
input: json!({"text": "test"}),
is_input_complete: true,
thought_signature: None,
};
fake_model
.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use.clone()));
@@ -299,44 +294,37 @@ async fn test_prompt_caching(cx: &mut TestAppContext) {
LanguageModelRequestMessage {
role: Role::User,
content: vec!["Message 1".into()],
cache: false,
reasoning_details: None,
cache: false
},
LanguageModelRequestMessage {
role: Role::Assistant,
content: vec!["Response to Message 1".into()],
cache: false,
reasoning_details: None,
cache: false
},
LanguageModelRequestMessage {
role: Role::User,
content: vec!["Message 2".into()],
cache: false,
reasoning_details: None,
cache: false
},
LanguageModelRequestMessage {
role: Role::Assistant,
content: vec!["Response to Message 2".into()],
cache: false,
reasoning_details: None,
cache: false
},
LanguageModelRequestMessage {
role: Role::User,
content: vec!["Use the echo tool".into()],
cache: false,
reasoning_details: None,
cache: false
},
LanguageModelRequestMessage {
role: Role::Assistant,
content: vec![MessageContent::ToolUse(tool_use)],
cache: false,
reasoning_details: None,
cache: false
},
LanguageModelRequestMessage {
role: Role::User,
content: vec![MessageContent::ToolResult(tool_result)],
cache: true,
reasoning_details: None,
cache: true
}
]
);
@@ -473,7 +461,6 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
raw_input: "{}".into(),
input: json!({}),
is_input_complete: true,
thought_signature: None,
},
));
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
@@ -483,7 +470,6 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
raw_input: "{}".into(),
input: json!({}),
is_input_complete: true,
thought_signature: None,
},
));
fake_model.end_last_completion_stream();
@@ -534,7 +520,6 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
raw_input: "{}".into(),
input: json!({}),
is_input_complete: true,
thought_signature: None,
},
));
fake_model.end_last_completion_stream();
@@ -569,7 +554,6 @@ async fn test_tool_authorization(cx: &mut TestAppContext) {
raw_input: "{}".into(),
input: json!({}),
is_input_complete: true,
thought_signature: None,
},
));
fake_model.end_last_completion_stream();
@@ -608,7 +592,6 @@ async fn test_tool_hallucination(cx: &mut TestAppContext) {
raw_input: "{}".into(),
input: json!({}),
is_input_complete: true,
thought_signature: None,
},
));
fake_model.end_last_completion_stream();
@@ -638,7 +621,6 @@ async fn test_resume_after_tool_use_limit(cx: &mut TestAppContext) {
raw_input: "{}".into(),
input: serde_json::to_value(&EchoToolInput { text: "def".into() }).unwrap(),
is_input_complete: true,
thought_signature: None,
};
fake_model
.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(tool_use.clone()));
@@ -659,20 +641,17 @@ async fn test_resume_after_tool_use_limit(cx: &mut TestAppContext) {
LanguageModelRequestMessage {
role: Role::User,
content: vec!["abc".into()],
cache: false,
reasoning_details: None,
cache: false
},
LanguageModelRequestMessage {
role: Role::Assistant,
content: vec![MessageContent::ToolUse(tool_use.clone())],
cache: false,
reasoning_details: None,
cache: false
},
LanguageModelRequestMessage {
role: Role::User,
content: vec![MessageContent::ToolResult(tool_result.clone())],
cache: true,
reasoning_details: None,
cache: true
},
]
);
@@ -698,26 +677,22 @@ async fn test_resume_after_tool_use_limit(cx: &mut TestAppContext) {
LanguageModelRequestMessage {
role: Role::User,
content: vec!["abc".into()],
cache: false,
reasoning_details: None,
cache: false
},
LanguageModelRequestMessage {
role: Role::Assistant,
content: vec![MessageContent::ToolUse(tool_use)],
cache: false,
reasoning_details: None,
cache: false
},
LanguageModelRequestMessage {
role: Role::User,
content: vec![MessageContent::ToolResult(tool_result)],
cache: false,
reasoning_details: None,
cache: false
},
LanguageModelRequestMessage {
role: Role::User,
content: vec!["Continue where you left off".into()],
cache: true,
reasoning_details: None,
cache: true
}
]
);
@@ -756,7 +731,6 @@ async fn test_send_after_tool_use_limit(cx: &mut TestAppContext) {
raw_input: "{}".into(),
input: serde_json::to_value(&EchoToolInput { text: "def".into() }).unwrap(),
is_input_complete: true,
thought_signature: None,
};
let tool_result = LanguageModelToolResult {
tool_use_id: "tool_id_1".into(),
@@ -791,26 +765,22 @@ async fn test_send_after_tool_use_limit(cx: &mut TestAppContext) {
LanguageModelRequestMessage {
role: Role::User,
content: vec!["abc".into()],
cache: false,
reasoning_details: None,
cache: false
},
LanguageModelRequestMessage {
role: Role::Assistant,
content: vec![MessageContent::ToolUse(tool_use)],
cache: false,
reasoning_details: None,
cache: false
},
LanguageModelRequestMessage {
role: Role::User,
content: vec![MessageContent::ToolResult(tool_result)],
cache: false,
reasoning_details: None,
cache: false
},
LanguageModelRequestMessage {
role: Role::User,
content: vec!["ghi".into()],
cache: true,
reasoning_details: None,
cache: true
}
]
);
@@ -1067,7 +1037,6 @@ async fn test_mcp_tools(cx: &mut TestAppContext) {
raw_input: json!({"text": "test"}).to_string(),
input: json!({"text": "test"}),
is_input_complete: true,
thought_signature: None,
},
));
fake_model.end_last_completion_stream();
@@ -1111,7 +1080,6 @@ async fn test_mcp_tools(cx: &mut TestAppContext) {
raw_input: json!({"text": "mcp"}).to_string(),
input: json!({"text": "mcp"}),
is_input_complete: true,
thought_signature: None,
},
));
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
@@ -1121,7 +1089,6 @@ async fn test_mcp_tools(cx: &mut TestAppContext) {
raw_input: json!({"text": "native"}).to_string(),
input: json!({"text": "native"}),
is_input_complete: true,
thought_signature: None,
},
));
fake_model.end_last_completion_stream();
@@ -1821,7 +1788,6 @@ async fn test_building_request_with_pending_tools(cx: &mut TestAppContext) {
raw_input: "{}".into(),
input: json!({}),
is_input_complete: true,
thought_signature: None,
};
let echo_tool_use = LanguageModelToolUse {
id: "tool_id_2".into(),
@@ -1829,7 +1795,6 @@ async fn test_building_request_with_pending_tools(cx: &mut TestAppContext) {
raw_input: json!({"text": "test"}).to_string(),
input: json!({"text": "test"}),
is_input_complete: true,
thought_signature: None,
};
fake_model.send_last_completion_stream_text_chunk("Hi!");
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
@@ -1853,8 +1818,7 @@ async fn test_building_request_with_pending_tools(cx: &mut TestAppContext) {
LanguageModelRequestMessage {
role: Role::User,
content: vec!["Hey!".into()],
cache: true,
reasoning_details: None,
cache: true
},
LanguageModelRequestMessage {
role: Role::Assistant,
@@ -1862,8 +1826,7 @@ async fn test_building_request_with_pending_tools(cx: &mut TestAppContext) {
MessageContent::Text("Hi!".into()),
MessageContent::ToolUse(echo_tool_use.clone())
],
cache: false,
reasoning_details: None,
cache: false
},
LanguageModelRequestMessage {
role: Role::User,
@@ -1874,8 +1837,7 @@ async fn test_building_request_with_pending_tools(cx: &mut TestAppContext) {
content: "test".into(),
output: Some("test".into())
})],
cache: false,
reasoning_details: None,
cache: false
},
],
);
@@ -2038,7 +2000,6 @@ async fn test_tool_updates_to_completion(cx: &mut TestAppContext) {
raw_input: input.to_string(),
input,
is_input_complete: false,
thought_signature: None,
},
));
@@ -2051,7 +2012,6 @@ async fn test_tool_updates_to_completion(cx: &mut TestAppContext) {
raw_input: input.to_string(),
input,
is_input_complete: true,
thought_signature: None,
},
));
fake_model.end_last_completion_stream();
@@ -2254,7 +2214,6 @@ async fn test_send_retry_finishes_tool_calls_on_error(cx: &mut TestAppContext) {
raw_input: json!({"text": "test"}).to_string(),
input: json!({"text": "test"}),
is_input_complete: true,
thought_signature: None,
};
fake_model.send_last_completion_stream_event(LanguageModelCompletionEvent::ToolUse(
tool_use_1.clone(),
@@ -2273,14 +2232,12 @@ async fn test_send_retry_finishes_tool_calls_on_error(cx: &mut TestAppContext) {
LanguageModelRequestMessage {
role: Role::User,
content: vec!["Call the echo tool!".into()],
cache: false,
reasoning_details: None,
cache: false
},
LanguageModelRequestMessage {
role: Role::Assistant,
content: vec![language_model::MessageContent::ToolUse(tool_use_1.clone())],
cache: false,
reasoning_details: None,
cache: false
},
LanguageModelRequestMessage {
role: Role::User,
@@ -2293,8 +2250,7 @@ async fn test_send_retry_finishes_tool_calls_on_error(cx: &mut TestAppContext) {
output: Some("test".into())
}
)],
cache: true,
reasoning_details: None,
cache: true
},
]
);
@@ -2308,8 +2264,7 @@ async fn test_send_retry_finishes_tool_calls_on_error(cx: &mut TestAppContext) {
thread.last_message(),
Some(Message::Agent(AgentMessage {
content: vec![AgentMessageContent::Text("Done".into())],
tool_results: IndexMap::default(),
reasoning_details: None,
tool_results: IndexMap::default()
}))
);
})

View File

@@ -113,7 +113,6 @@ impl Message {
role: Role::User,
content: vec!["Continue where you left off".into()],
cache: false,
reasoning_details: None,
}],
}
}
@@ -178,7 +177,6 @@ impl UserMessage {
role: Role::User,
content: Vec::with_capacity(self.content.len()),
cache: false,
reasoning_details: None,
};
const OPEN_CONTEXT: &str = "<context>\n\
@@ -446,7 +444,6 @@ impl AgentMessage {
role: Role::Assistant,
content: Vec::with_capacity(self.content.len()),
cache: false,
reasoning_details: self.reasoning_details.clone(),
};
for chunk in &self.content {
match chunk {
@@ -482,7 +479,6 @@ impl AgentMessage {
role: Role::User,
content: Vec::new(),
cache: false,
reasoning_details: None,
};
for tool_result in self.tool_results.values() {
@@ -512,7 +508,6 @@ impl AgentMessage {
pub struct AgentMessage {
pub content: Vec<AgentMessageContent>,
pub tool_results: IndexMap<LanguageModelToolUseId, LanguageModelToolResult>,
pub reasoning_details: Option<serde_json::Value>,
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
@@ -1398,18 +1393,6 @@ impl Thread {
self.handle_thinking_event(text, signature, event_stream, cx)
}
RedactedThinking { data } => self.handle_redacted_thinking_event(data, cx),
ReasoningDetails(details) => {
let last_message = self.pending_message();
// Store the last non-empty reasoning_details (overwrites earlier ones)
// This ensures we keep the encrypted reasoning with signatures, not the early text reasoning
if let serde_json::Value::Array(ref arr) = details {
if !arr.is_empty() {
last_message.reasoning_details = Some(details);
}
} else {
last_message.reasoning_details = Some(details);
}
}
ToolUse(tool_use) => {
return Ok(self.handle_tool_use_event(tool_use, event_stream, cx));
}
@@ -1689,7 +1672,6 @@ impl Thread {
role: Role::User,
content: vec![SUMMARIZE_THREAD_DETAILED_PROMPT.into()],
cache: false,
reasoning_details: None,
});
let task = cx
@@ -1756,7 +1738,6 @@ impl Thread {
role: Role::User,
content: vec![SUMMARIZE_THREAD_PROMPT.into()],
cache: false,
reasoning_details: None,
});
self.pending_title_generation = Some(cx.spawn(async move |this, cx| {
let mut title = String::new();
@@ -2006,7 +1987,6 @@ impl Thread {
role: Role::System,
content: vec![system_prompt.into()],
cache: false,
reasoning_details: None,
}];
for message in &self.messages {
messages.extend(message.to_request());

View File

@@ -50,14 +50,13 @@ impl crate::AgentServer for CustomAgentServer {
fn set_default_mode(&self, mode_id: Option<acp::SessionModeId>, fs: Arc<dyn Fs>, cx: &mut App) {
let name = self.name();
update_settings_file(fs, cx, move |settings, _| {
if let Some(settings) = settings
settings
.agent_servers
.get_or_insert_default()
.custom
.get_mut(&name)
{
settings.default_mode = mode_id.map(|m| m.to_string())
}
.unwrap()
.default_mode = mode_id.map(|m| m.to_string())
});
}

View File

@@ -109,8 +109,6 @@ impl ContextPickerCompletionProvider {
icon_path: Some(mode.icon().path().into()),
documentation: None,
source: project::CompletionSource::Custom,
match_start: None,
snippet_deduplication_key: None,
insert_text_mode: None,
// This ensures that when a user accepts this completion, the
// completion menu will still be shown after "@category " is
@@ -148,8 +146,6 @@ impl ContextPickerCompletionProvider {
documentation: None,
insert_text_mode: None,
source: project::CompletionSource::Custom,
match_start: None,
snippet_deduplication_key: None,
icon_path: Some(icon_for_completion),
confirm: Some(confirm_completion_callback(
thread_entry.title().clone(),
@@ -181,8 +177,6 @@ impl ContextPickerCompletionProvider {
documentation: None,
insert_text_mode: None,
source: project::CompletionSource::Custom,
match_start: None,
snippet_deduplication_key: None,
icon_path: Some(icon_path),
confirm: Some(confirm_completion_callback(
rule.title,
@@ -239,8 +233,6 @@ impl ContextPickerCompletionProvider {
documentation: None,
source: project::CompletionSource::Custom,
icon_path: Some(completion_icon_path),
match_start: None,
snippet_deduplication_key: None,
insert_text_mode: None,
confirm: Some(confirm_completion_callback(
file_name,
@@ -292,8 +284,6 @@ impl ContextPickerCompletionProvider {
documentation: None,
source: project::CompletionSource::Custom,
icon_path: Some(icon_path),
match_start: None,
snippet_deduplication_key: None,
insert_text_mode: None,
confirm: Some(confirm_completion_callback(
symbol.name.into(),
@@ -326,8 +316,6 @@ impl ContextPickerCompletionProvider {
documentation: None,
source: project::CompletionSource::Custom,
icon_path: Some(icon_path),
match_start: None,
snippet_deduplication_key: None,
insert_text_mode: None,
confirm: Some(confirm_completion_callback(
url_to_fetch.to_string().into(),
@@ -396,8 +384,6 @@ impl ContextPickerCompletionProvider {
icon_path: Some(action.icon().path().into()),
documentation: None,
source: project::CompletionSource::Custom,
match_start: None,
snippet_deduplication_key: None,
insert_text_mode: None,
// This ensures that when a user accepts this completion, the
// completion menu will still be shown after "@category " is
@@ -708,18 +694,14 @@ fn build_symbol_label(symbol_name: &str, file_name: &str, line: u32, cx: &App) -
}
fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx: &App) -> CodeLabel {
let path = cx
.theme()
.syntax()
.highlight_id("variable")
.map(HighlightId);
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
let mut label = CodeLabelBuilder::default();
label.push_str(file_name, None);
label.push_str(" ", None);
if let Some(directory) = directory {
label.push_str(directory, path);
label.push_str(directory, comment_id);
}
label.build()
@@ -788,8 +770,6 @@ impl CompletionProvider for ContextPickerCompletionProvider {
)),
source: project::CompletionSource::Custom,
icon_path: None,
match_start: None,
snippet_deduplication_key: None,
insert_text_mode: None,
confirm: Some(Arc::new({
let editor = editor.clone();

View File

@@ -15,7 +15,6 @@ use editor::{
EditorEvent, EditorMode, EditorSnapshot, EditorStyle, ExcerptId, FoldPlaceholder, Inlay,
MultiBuffer, ToOffset,
actions::Paste,
code_context_menus::CodeContextMenu,
display_map::{Crease, CreaseId, FoldId},
scroll::Autoscroll,
};
@@ -273,15 +272,6 @@ impl MessageEditor {
self.editor.read(cx).is_empty(cx)
}
pub fn is_completions_menu_visible(&self, cx: &App) -> bool {
self.editor
.read(cx)
.context_menu()
.borrow()
.as_ref()
.is_some_and(|menu| matches!(menu, CodeContextMenu::Completions(_)) && menu.visible())
}
pub fn mentions(&self) -> HashSet<MentionUri> {
self.mention_set
.mentions
@@ -846,45 +836,6 @@ impl MessageEditor {
cx.emit(MessageEditorEvent::Send)
}
pub fn trigger_completion_menu(&mut self, window: &mut Window, cx: &mut Context<Self>) {
let editor = self.editor.clone();
cx.spawn_in(window, async move |_, cx| {
editor
.update_in(cx, |editor, window, cx| {
let menu_is_open =
editor.context_menu().borrow().as_ref().is_some_and(|menu| {
matches!(menu, CodeContextMenu::Completions(_)) && menu.visible()
});
let has_at_sign = {
let snapshot = editor.display_snapshot(cx);
let cursor = editor.selections.newest::<text::Point>(&snapshot).head();
let offset = cursor.to_offset(&snapshot);
if offset > 0 {
snapshot
.buffer_snapshot()
.reversed_chars_at(offset)
.next()
.map(|sign| sign == '@')
.unwrap_or(false)
} else {
false
}
};
if menu_is_open && has_at_sign {
return;
}
editor.insert("@", window, cx);
editor.show_completions(&editor::actions::ShowCompletions, window, cx);
})
.log_err();
})
.detach();
}
fn chat(&mut self, _: &Chat, _: &mut Window, cx: &mut Context<Self>) {
self.send(cx);
}

View File

@@ -4188,8 +4188,6 @@ impl AcpThreadView {
.justify_between()
.child(
h_flex()
.gap_0p5()
.child(self.render_add_context_button(cx))
.child(self.render_follow_toggle(cx))
.children(self.render_burn_mode_toggle(cx)),
)
@@ -4504,29 +4502,6 @@ impl AcpThreadView {
}))
}
fn render_add_context_button(&self, cx: &mut Context<Self>) -> impl IntoElement {
let message_editor = self.message_editor.clone();
let menu_visible = message_editor.read(cx).is_completions_menu_visible(cx);
IconButton::new("add-context", IconName::AtSign)
.icon_size(IconSize::Small)
.icon_color(Color::Muted)
.when(!menu_visible, |this| {
this.tooltip(move |_window, cx| {
Tooltip::with_meta("Add Context", None, "Or type @ to include context", cx)
})
})
.on_click(cx.listener(move |_this, _, window, cx| {
let message_editor_clone = message_editor.clone();
window.defer(cx, move |window, cx| {
message_editor_clone.update(cx, |message_editor, cx| {
message_editor.trigger_completion_menu(window, cx);
});
});
}))
}
fn render_markdown(&self, markdown: Entity<Markdown>, style: MarkdownStyle) -> MarkdownElement {
let workspace = self.workspace.clone();
MarkdownElement::new(markdown, style).on_url_click(move |text, window, cx| {

View File

@@ -8,7 +8,6 @@ use std::{ops::Range, sync::Arc};
use agent::ContextServerRegistry;
use anyhow::Result;
use client::zed_urls;
use cloud_llm_client::{Plan, PlanV1, PlanV2};
use collections::HashMap;
use context_server::ContextServerId;
@@ -27,20 +26,18 @@ use language_model::{
use language_models::AllLanguageModelSettings;
use notifications::status_toast::{StatusToast, ToastIcon};
use project::{
agent_server_store::{
AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, ExternalAgentServerName, GEMINI_NAME,
},
agent_server_store::{AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME},
context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore},
};
use settings::{Settings, SettingsStore, update_settings_file};
use ui::{
Button, ButtonStyle, Chip, CommonAnimationExt, ContextMenu, ContextMenuEntry, Disclosure,
Divider, DividerColor, ElevationIndex, IconName, IconPosition, IconSize, Indicator, LabelSize,
PopoverMenu, Switch, SwitchColor, Tooltip, WithScrollbar, prelude::*,
Button, ButtonStyle, Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor,
ElevationIndex, IconName, IconPosition, IconSize, Indicator, LabelSize, PopoverMenu, Switch,
SwitchColor, Tooltip, WithScrollbar, prelude::*,
};
use util::ResultExt as _;
use workspace::{Workspace, create_and_open_local_file};
use zed_actions::{ExtensionCategoryFilter, OpenBrowser};
use zed_actions::ExtensionCategoryFilter;
pub(crate) use configure_context_server_modal::ConfigureContextServerModal;
pub(crate) use configure_context_server_tools_modal::ConfigureContextServerToolsModal;
@@ -418,7 +415,6 @@ impl AgentConfiguration {
cx: &mut Context<Self>,
) -> impl IntoElement {
let providers = LanguageModelRegistry::read_global(cx).providers();
let popover_menu = PopoverMenu::new("add-provider-popover")
.trigger(
Button::new("add-provider", "Add Provider")
@@ -429,6 +425,7 @@ impl AgentConfiguration {
.icon_color(Color::Muted)
.label_size(LabelSize::Small),
)
.anchor(gpui::Corner::TopRight)
.menu({
let workspace = self.workspace.clone();
move |window, cx| {
@@ -450,11 +447,6 @@ impl AgentConfiguration {
})
}))
}
})
.anchor(gpui::Corner::TopRight)
.offset(gpui::Point {
x: px(0.0),
y: px(2.0),
});
v_flex()
@@ -549,6 +541,7 @@ impl AgentConfiguration {
.icon_color(Color::Muted)
.label_size(LabelSize::Small),
)
.anchor(gpui::Corner::TopRight)
.menu({
move |window, cx| {
Some(ContextMenu::build(window, cx, |menu, _window, _cx| {
@@ -571,11 +564,6 @@ impl AgentConfiguration {
})
}))
}
})
.anchor(gpui::Corner::TopRight)
.offset(gpui::Point {
x: px(0.0),
y: px(2.0),
});
v_flex()
@@ -955,7 +943,7 @@ impl AgentConfiguration {
.cloned()
.collect::<Vec<_>>();
let user_defined_agents: Vec<_> = user_defined_agents
let user_defined_agents = user_defined_agents
.into_iter()
.map(|name| {
let icon = if let Some(icon_path) = agent_server_store.agent_icon(&name) {
@@ -963,93 +951,27 @@ impl AgentConfiguration {
} else {
AgentIcon::Name(IconName::Ai)
};
(name, icon)
self.render_agent_server(icon, name, true)
.into_any_element()
})
.collect();
.collect::<Vec<_>>();
let add_agent_popover = PopoverMenu::new("add-agent-server-popover")
.trigger(
Button::new("add-agent", "Add Agent")
.style(ButtonStyle::Outlined)
.icon_position(IconPosition::Start)
.icon(IconName::Plus)
.icon_size(IconSize::Small)
.icon_color(Color::Muted)
.label_size(LabelSize::Small),
)
.menu({
move |window, cx| {
Some(ContextMenu::build(window, cx, |menu, _window, _cx| {
menu.entry("Install from Extensions", None, {
|window, cx| {
window.dispatch_action(
zed_actions::Extensions {
category_filter: Some(
ExtensionCategoryFilter::AgentServers,
),
id: None,
}
.boxed_clone(),
cx,
)
}
let add_agens_button = Button::new("add-agent", "Add Agent")
.style(ButtonStyle::Outlined)
.icon_position(IconPosition::Start)
.icon(IconName::Plus)
.icon_size(IconSize::Small)
.icon_color(Color::Muted)
.label_size(LabelSize::Small)
.on_click(move |_, window, cx| {
if let Some(workspace) = window.root().flatten() {
let workspace = workspace.downgrade();
window
.spawn(cx, async |cx| {
open_new_agent_servers_entry_in_settings_editor(workspace, cx).await
})
.entry("Add Custom Agent", None, {
move |window, cx| {
if let Some(workspace) = window.root().flatten() {
let workspace = workspace.downgrade();
window
.spawn(cx, async |cx| {
open_new_agent_servers_entry_in_settings_editor(
workspace, cx,
)
.await
})
.detach_and_log_err(cx);
}
}
})
.separator()
.header("Learn More")
.item(
ContextMenuEntry::new("Agent Servers Docs")
.icon(IconName::ArrowUpRight)
.icon_color(Color::Muted)
.icon_position(IconPosition::End)
.handler({
move |window, cx| {
window.dispatch_action(
Box::new(OpenBrowser {
url: zed_urls::agent_server_docs(cx),
}),
cx,
);
}
}),
)
.item(
ContextMenuEntry::new("ACP Docs")
.icon(IconName::ArrowUpRight)
.icon_color(Color::Muted)
.icon_position(IconPosition::End)
.handler({
move |window, cx| {
window.dispatch_action(
Box::new(OpenBrowser {
url: "https://agentclientprotocol.com/".into(),
}),
cx,
);
}
}),
)
}))
.detach_and_log_err(cx);
}
})
.anchor(gpui::Corner::TopRight)
.offset(gpui::Point {
x: px(0.0),
y: px(2.0),
});
v_flex()
@@ -1060,7 +982,7 @@ impl AgentConfiguration {
.child(self.render_section_title(
"External Agents",
"All agents connected through the Agent Client Protocol.",
add_agent_popover.into_any_element(),
add_agens_button.into_any_element(),
))
.child(
v_flex()
@@ -1071,29 +993,26 @@ impl AgentConfiguration {
AgentIcon::Name(IconName::AiClaude),
"Claude Code",
false,
cx,
))
.child(Divider::horizontal().color(DividerColor::BorderFaded))
.child(self.render_agent_server(
AgentIcon::Name(IconName::AiOpenAi),
"Codex CLI",
false,
cx,
))
.child(Divider::horizontal().color(DividerColor::BorderFaded))
.child(self.render_agent_server(
AgentIcon::Name(IconName::AiGemini),
"Gemini CLI",
false,
cx,
))
.map(|mut parent| {
for (name, icon) in user_defined_agents {
for agent in user_defined_agents {
parent = parent
.child(
Divider::horizontal().color(DividerColor::BorderFaded),
)
.child(self.render_agent_server(icon, name, true, cx));
.child(agent);
}
parent
}),
@@ -1106,7 +1025,6 @@ impl AgentConfiguration {
icon: AgentIcon,
name: impl Into<SharedString>,
external: bool,
cx: &mut Context<Self>,
) -> impl IntoElement {
let name = name.into();
let icon = match icon {
@@ -1121,53 +1039,28 @@ impl AgentConfiguration {
let tooltip_id = SharedString::new(format!("agent-source-{}", name));
let tooltip_message = format!("The {} agent was installed from an extension.", name);
let agent_server_name = ExternalAgentServerName(name.clone());
let uninstall_btn_id = SharedString::from(format!("uninstall-{}", name));
let uninstall_button = IconButton::new(uninstall_btn_id, IconName::Trash)
.icon_color(Color::Muted)
.icon_size(IconSize::Small)
.tooltip(Tooltip::text("Uninstall Agent Extension"))
.on_click(cx.listener(move |this, _, _window, cx| {
let agent_name = agent_server_name.clone();
if let Some(ext_id) = this.agent_server_store.update(cx, |store, _cx| {
store.get_extension_id_for_agent(&agent_name)
}) {
ExtensionStore::global(cx)
.update(cx, |store, cx| store.uninstall_extension(ext_id, cx))
.detach_and_log_err(cx);
}
}));
h_flex()
.gap_1()
.justify_between()
.gap_1p5()
.child(icon)
.child(Label::new(name))
.when(external, |this| {
this.child(
div()
.id(tooltip_id)
.flex_none()
.tooltip(Tooltip::text(tooltip_message))
.child(
Icon::new(IconName::ZedSrcExtension)
.size(IconSize::Small)
.color(Color::Muted),
),
)
})
.child(
h_flex()
.gap_1p5()
.child(icon)
.child(Label::new(name))
.when(external, |this| {
this.child(
div()
.id(tooltip_id)
.flex_none()
.tooltip(Tooltip::text(tooltip_message))
.child(
Icon::new(IconName::ZedSrcExtension)
.size(IconSize::Small)
.color(Color::Muted),
),
)
})
.child(
Icon::new(IconName::Check)
.color(Color::Success)
.size(IconSize::Small),
),
Icon::new(IconName::Check)
.color(Color::Success)
.size(IconSize::Small),
)
.when(external, |this| this.child(uninstall_button))
}
}

View File

@@ -452,7 +452,6 @@ impl CodegenAlternative {
role: Role::User,
content: Vec::new(),
cache: false,
reasoning_details: None,
};
if let Some(context_task) = context_task {

View File

@@ -278,8 +278,6 @@ impl ContextPickerCompletionProvider {
icon_path: Some(mode.icon().path().into()),
documentation: None,
source: project::CompletionSource::Custom,
match_start: None,
snippet_deduplication_key: None,
insert_text_mode: None,
// This ensures that when a user accepts this completion, the
// completion menu will still be shown after "@category " is
@@ -388,8 +386,6 @@ impl ContextPickerCompletionProvider {
icon_path: Some(action.icon().path().into()),
documentation: None,
source: project::CompletionSource::Custom,
match_start: None,
snippet_deduplication_key: None,
insert_text_mode: None,
// This ensures that when a user accepts this completion, the
// completion menu will still be shown after "@category " is
@@ -421,8 +417,6 @@ impl ContextPickerCompletionProvider {
replace_range: source_range.clone(),
new_text,
label: CodeLabel::plain(thread_entry.title().to_string(), None),
match_start: None,
snippet_deduplication_key: None,
documentation: None,
insert_text_mode: None,
source: project::CompletionSource::Custom,
@@ -490,8 +484,6 @@ impl ContextPickerCompletionProvider {
replace_range: source_range.clone(),
new_text,
label: CodeLabel::plain(rules.title.to_string(), None),
match_start: None,
snippet_deduplication_key: None,
documentation: None,
insert_text_mode: None,
source: project::CompletionSource::Custom,
@@ -532,8 +524,6 @@ impl ContextPickerCompletionProvider {
documentation: None,
source: project::CompletionSource::Custom,
icon_path: Some(IconName::ToolWeb.path().into()),
match_start: None,
snippet_deduplication_key: None,
insert_text_mode: None,
confirm: Some(confirm_completion_callback(
IconName::ToolWeb.path().into(),
@@ -622,8 +612,6 @@ impl ContextPickerCompletionProvider {
documentation: None,
source: project::CompletionSource::Custom,
icon_path: Some(completion_icon_path),
match_start: None,
snippet_deduplication_key: None,
insert_text_mode: None,
confirm: Some(confirm_completion_callback(
crease_icon_path,
@@ -701,8 +689,6 @@ impl ContextPickerCompletionProvider {
documentation: None,
source: project::CompletionSource::Custom,
icon_path: Some(IconName::Code.path().into()),
match_start: None,
snippet_deduplication_key: None,
insert_text_mode: None,
confirm: Some(confirm_completion_callback(
IconName::Code.path().into(),

View File

@@ -1,6 +1,6 @@
use std::{cmp::Reverse, sync::Arc};
use collections::IndexMap;
use collections::{HashSet, IndexMap};
use fuzzy::{StringMatch, StringMatchCandidate, match_strings};
use gpui::{Action, AnyElement, App, BackgroundExecutor, DismissEvent, Subscription, Task};
use language_model::{
@@ -57,7 +57,7 @@ fn all_models(cx: &App) -> GroupedModels {
})
.collect();
let all = providers
let other = providers
.iter()
.flat_map(|provider| {
provider
@@ -70,7 +70,7 @@ fn all_models(cx: &App) -> GroupedModels {
})
.collect();
GroupedModels::new(all, recommended)
GroupedModels::new(other, recommended)
}
#[derive(Clone)]
@@ -210,24 +210,33 @@ impl LanguageModelPickerDelegate {
struct GroupedModels {
recommended: Vec<ModelInfo>,
all: IndexMap<LanguageModelProviderId, Vec<ModelInfo>>,
other: IndexMap<LanguageModelProviderId, Vec<ModelInfo>>,
}
impl GroupedModels {
pub fn new(all: Vec<ModelInfo>, recommended: Vec<ModelInfo>) -> Self {
let mut all_by_provider: IndexMap<_, Vec<ModelInfo>> = IndexMap::default();
for model in all {
pub fn new(other: Vec<ModelInfo>, recommended: Vec<ModelInfo>) -> Self {
let recommended_ids = recommended
.iter()
.map(|info| (info.model.provider_id(), info.model.id()))
.collect::<HashSet<_>>();
let mut other_by_provider: IndexMap<_, Vec<ModelInfo>> = IndexMap::default();
for model in other {
if recommended_ids.contains(&(model.model.provider_id(), model.model.id())) {
continue;
}
let provider = model.model.provider_id();
if let Some(models) = all_by_provider.get_mut(&provider) {
if let Some(models) = other_by_provider.get_mut(&provider) {
models.push(model);
} else {
all_by_provider.insert(provider, vec![model]);
other_by_provider.insert(provider, vec![model]);
}
}
Self {
recommended,
all: all_by_provider,
other: other_by_provider,
}
}
@@ -243,7 +252,7 @@ impl GroupedModels {
);
}
for models in self.all.values() {
for models in self.other.values() {
if models.is_empty() {
continue;
}
@@ -258,6 +267,20 @@ impl GroupedModels {
}
entries
}
fn model_infos(&self) -> Vec<ModelInfo> {
let other = self
.other
.values()
.flat_map(|model| model.iter())
.cloned()
.collect::<Vec<_>>();
self.recommended
.iter()
.chain(&other)
.cloned()
.collect::<Vec<_>>()
}
}
enum LanguageModelPickerEntry {
@@ -402,9 +425,8 @@ impl PickerDelegate for LanguageModelPickerDelegate {
.collect::<Vec<_>>();
let available_models = all_models
.all
.values()
.flat_map(|models| models.iter())
.model_infos()
.iter()
.filter(|m| configured_provider_ids.contains(&m.model.provider_id()))
.cloned()
.collect::<Vec<_>>();
@@ -742,52 +764,46 @@ mod tests {
}
#[gpui::test]
fn test_recommended_models_also_appear_in_other(_cx: &mut TestAppContext) {
fn test_exclude_recommended_models(_cx: &mut TestAppContext) {
let recommended_models = create_models(vec![("zed", "claude")]);
let all_models = create_models(vec![
("zed", "claude"), // Should also appear in "other"
("zed", "claude"), // Should be filtered out from "other"
("zed", "gemini"),
("copilot", "o3"),
]);
let grouped_models = GroupedModels::new(all_models, recommended_models);
let actual_all_models = grouped_models
.all
let actual_other_models = grouped_models
.other
.values()
.flatten()
.cloned()
.collect::<Vec<_>>();
// Recommended models should also appear in "all"
assert_models_eq(
actual_all_models,
vec!["zed/claude", "zed/gemini", "copilot/o3"],
);
// Recommended models should not appear in "other"
assert_models_eq(actual_other_models, vec!["zed/gemini", "copilot/o3"]);
}
#[gpui::test]
fn test_models_from_different_providers(_cx: &mut TestAppContext) {
fn test_dont_exclude_models_from_other_providers(_cx: &mut TestAppContext) {
let recommended_models = create_models(vec![("zed", "claude")]);
let all_models = create_models(vec![
("zed", "claude"), // Should also appear in "other"
("zed", "claude"), // Should be filtered out from "other"
("zed", "gemini"),
("copilot", "claude"), // Different provider, should appear in "other"
("copilot", "claude"), // Should not be filtered out from "other"
]);
let grouped_models = GroupedModels::new(all_models, recommended_models);
let actual_all_models = grouped_models
.all
let actual_other_models = grouped_models
.other
.values()
.flatten()
.cloned()
.collect::<Vec<_>>();
// All models should appear in "all" regardless of recommended status
assert_models_eq(
actual_all_models,
vec!["zed/claude", "zed/gemini", "copilot/claude"],
);
// Recommended models should not appear in "other"
assert_models_eq(actual_other_models, vec!["zed/gemini", "copilot/claude"]);
}
}

View File

@@ -127,8 +127,6 @@ impl SlashCommandCompletionProvider {
new_text,
label: command.label(cx),
icon_path: None,
match_start: None,
snippet_deduplication_key: None,
insert_text_mode: None,
confirm,
source: CompletionSource::Custom,
@@ -234,8 +232,6 @@ impl SlashCommandCompletionProvider {
icon_path: None,
new_text,
documentation: None,
match_start: None,
snippet_deduplication_key: None,
confirm,
insert_text_mode: None,
source: CompletionSource::Custom,

View File

@@ -277,7 +277,6 @@ impl TerminalInlineAssistant {
role: Role::User,
content: vec![],
cache: false,
reasoning_details: None,
};
context_load_task

View File

@@ -2592,11 +2592,12 @@ impl SearchableItem for TextThreadEditor {
&mut self,
index: usize,
matches: &[Self::Match],
collapse: bool,
window: &mut Window,
cx: &mut Context<Self>,
) {
self.editor.update(cx, |editor, cx| {
editor.activate_match(index, matches, window, cx);
editor.activate_match(index, matches, collapse, window, cx);
});
}

View File

@@ -67,13 +67,6 @@ pub enum Model {
alias = "claude-opus-4-1-thinking-latest"
)]
ClaudeOpus4_1Thinking,
#[serde(rename = "claude-opus-4-5", alias = "claude-opus-4-5-latest")]
ClaudeOpus4_5,
#[serde(
rename = "claude-opus-4-5-thinking",
alias = "claude-opus-4-5-thinking-latest"
)]
ClaudeOpus4_5Thinking,
#[serde(rename = "claude-sonnet-4", alias = "claude-sonnet-4-latest")]
ClaudeSonnet4,
#[serde(
@@ -138,14 +131,6 @@ impl Model {
}
pub fn from_id(id: &str) -> Result<Self> {
if id.starts_with("claude-opus-4-5-thinking") {
return Ok(Self::ClaudeOpus4_5Thinking);
}
if id.starts_with("claude-opus-4-5") {
return Ok(Self::ClaudeOpus4_5);
}
if id.starts_with("claude-opus-4-1-thinking") {
return Ok(Self::ClaudeOpus4_1Thinking);
}
@@ -223,8 +208,6 @@ impl Model {
Self::ClaudeOpus4_1 => "claude-opus-4-1-latest",
Self::ClaudeOpus4Thinking => "claude-opus-4-thinking-latest",
Self::ClaudeOpus4_1Thinking => "claude-opus-4-1-thinking-latest",
Self::ClaudeOpus4_5 => "claude-opus-4-5-latest",
Self::ClaudeOpus4_5Thinking => "claude-opus-4-5-thinking-latest",
Self::ClaudeSonnet4 => "claude-sonnet-4-latest",
Self::ClaudeSonnet4Thinking => "claude-sonnet-4-thinking-latest",
Self::ClaudeSonnet4_5 => "claude-sonnet-4-5-latest",
@@ -247,7 +230,6 @@ impl Model {
match self {
Self::ClaudeOpus4 | Self::ClaudeOpus4Thinking => "claude-opus-4-20250514",
Self::ClaudeOpus4_1 | Self::ClaudeOpus4_1Thinking => "claude-opus-4-1-20250805",
Self::ClaudeOpus4_5 | Self::ClaudeOpus4_5Thinking => "claude-opus-4-5-20251101",
Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking => "claude-sonnet-4-20250514",
Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4_5Thinking => "claude-sonnet-4-5-20250929",
Self::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
@@ -267,8 +249,6 @@ impl Model {
Self::ClaudeOpus4_1 => "Claude Opus 4.1",
Self::ClaudeOpus4Thinking => "Claude Opus 4 Thinking",
Self::ClaudeOpus4_1Thinking => "Claude Opus 4.1 Thinking",
Self::ClaudeOpus4_5 => "Claude Opus 4.5",
Self::ClaudeOpus4_5Thinking => "Claude Opus 4.5 Thinking",
Self::ClaudeSonnet4 => "Claude Sonnet 4",
Self::ClaudeSonnet4Thinking => "Claude Sonnet 4 Thinking",
Self::ClaudeSonnet4_5 => "Claude Sonnet 4.5",
@@ -294,8 +274,6 @@ impl Model {
| Self::ClaudeOpus4_1
| Self::ClaudeOpus4Thinking
| Self::ClaudeOpus4_1Thinking
| Self::ClaudeOpus4_5
| Self::ClaudeOpus4_5Thinking
| Self::ClaudeSonnet4
| Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
@@ -325,8 +303,6 @@ impl Model {
| Self::ClaudeOpus4_1
| Self::ClaudeOpus4Thinking
| Self::ClaudeOpus4_1Thinking
| Self::ClaudeOpus4_5
| Self::ClaudeOpus4_5Thinking
| Self::ClaudeSonnet4
| Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
@@ -350,8 +326,6 @@ impl Model {
| Self::ClaudeOpus4_1
| Self::ClaudeOpus4Thinking
| Self::ClaudeOpus4_1Thinking
| Self::ClaudeOpus4_5
| Self::ClaudeOpus4_5Thinking
| Self::ClaudeSonnet4
| Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
@@ -374,8 +348,6 @@ impl Model {
| Self::ClaudeOpus4_1
| Self::ClaudeOpus4Thinking
| Self::ClaudeOpus4_1Thinking
| Self::ClaudeOpus4_5
| Self::ClaudeOpus4_5Thinking
| Self::ClaudeSonnet4
| Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
@@ -400,7 +372,6 @@ impl Model {
match self {
Self::ClaudeOpus4
| Self::ClaudeOpus4_1
| Self::ClaudeOpus4_5
| Self::ClaudeSonnet4
| Self::ClaudeSonnet4_5
| Self::Claude3_5Sonnet
@@ -412,7 +383,6 @@ impl Model {
| Self::Claude3Haiku => AnthropicModelMode::Default,
Self::ClaudeOpus4Thinking
| Self::ClaudeOpus4_1Thinking
| Self::ClaudeOpus4_5Thinking
| Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5Thinking
| Self::ClaudeHaiku4_5Thinking

View File

@@ -254,7 +254,6 @@ impl PasswordProxy {
.await
.with_context(|| format!("creating askpass script at {askpass_script_path:?}"))?;
make_file_executable(&askpass_script_path).await?;
// todo(shell): There might be no powershell on the system
#[cfg(target_os = "windows")]
let askpass_helper = format!(
"powershell.exe -ExecutionPolicy Bypass -File {}",

View File

@@ -1416,7 +1416,6 @@ impl TextThread {
role: Role::User,
content: vec!["Respond only with OK, nothing else.".into()],
cache: false,
reasoning_details: None,
});
req
};
@@ -2084,11 +2083,6 @@ impl TextThread {
}
}
LanguageModelCompletionEvent::StartMessage { .. } => {}
LanguageModelCompletionEvent::ReasoningDetails(_) => {
// ReasoningDetails are metadata (signatures, encrypted data, format info)
// used for request/response validation, not UI content.
// The displayable thinking text is already handled by the Thinking event.
}
LanguageModelCompletionEvent::Stop(reason) => {
stop_reason = reason;
}
@@ -2312,7 +2306,6 @@ impl TextThread {
role: message.role,
content: Vec::new(),
cache: message.cache.as_ref().is_some_and(|cache| cache.is_anchor),
reasoning_details: None,
};
while let Some(content) = contents.peek() {
@@ -2684,7 +2677,6 @@ impl TextThread {
role: Role::User,
content: vec![SUMMARIZE_THREAD_PROMPT.into()],
cache: false,
reasoning_details: None,
});
// If there is no summary, it is set with `done: false` so that "Loading Summary…" can

View File

@@ -350,7 +350,8 @@ impl AutoUpdater {
pub fn start_polling(&self, cx: &mut Context<Self>) -> Task<Result<()>> {
cx.spawn(async move |this, cx| {
if cfg!(target_os = "windows") {
#[cfg(target_os = "windows")]
{
use util::ResultExt;
cleanup_windows()
@@ -902,16 +903,28 @@ async fn install_release_macos(
Ok(None)
}
#[cfg(target_os = "windows")]
async fn cleanup_windows() -> Result<()> {
use util::ResultExt;
let parent = std::env::current_exe()?
.parent()
.context("No parent dir for Zed.exe")?
.to_owned();
// keep in sync with crates/auto_update_helper/src/updater.rs
_ = smol::fs::remove_dir(parent.join("updates")).await;
_ = smol::fs::remove_dir(parent.join("install")).await;
_ = smol::fs::remove_dir(parent.join("old")).await;
smol::fs::remove_dir(parent.join("updates"))
.await
.context("failed to remove updates dir")
.log_err();
smol::fs::remove_dir(parent.join("install"))
.await
.context("failed to remove install dir")
.log_err();
smol::fs::remove_dir(parent.join("old"))
.await
.context("failed to remove old version dir")
.log_err();
Ok(())
}

View File

@@ -1,6 +1,6 @@
use std::{
cell::LazyCell,
path::Path,
sync::LazyLock,
time::{Duration, Instant},
};
@@ -13,8 +13,8 @@ use windows::Win32::{
use crate::windows_impl::WM_JOB_UPDATED;
pub(crate) struct Job {
pub apply: Box<dyn Fn(&Path) -> Result<()> + Send + Sync>,
pub rollback: Box<dyn Fn(&Path) -> Result<()> + Send + Sync>,
pub apply: Box<dyn Fn(&Path) -> Result<()>>,
pub rollback: Box<dyn Fn(&Path) -> Result<()>>,
}
impl Job {
@@ -154,8 +154,10 @@ impl Job {
}
}
// app is single threaded
#[cfg(not(test))]
pub(crate) static JOBS: LazyLock<[Job; 22]> = LazyLock::new(|| {
#[allow(clippy::declare_interior_mutable_const)]
pub(crate) const JOBS: LazyCell<[Job; 22]> = LazyCell::new(|| {
fn p(value: &str) -> &Path {
Path::new(value)
}
@@ -204,8 +206,10 @@ pub(crate) static JOBS: LazyLock<[Job; 22]> = LazyLock::new(|| {
]
});
// app is single threaded
#[cfg(test)]
pub(crate) static JOBS: LazyLock<[Job; 9]> = LazyLock::new(|| {
#[allow(clippy::declare_interior_mutable_const)]
pub(crate) const JOBS: LazyCell<[Job; 9]> = LazyCell::new(|| {
fn p(value: &str) -> &Path {
Path::new(value)
}

View File

@@ -51,13 +51,6 @@ pub enum Model {
alias = "claude-opus-4-1-thinking-latest"
)]
ClaudeOpus4_1Thinking,
#[serde(rename = "claude-opus-4-5", alias = "claude-opus-4-5-latest")]
ClaudeOpus4_5,
#[serde(
rename = "claude-opus-4-5-thinking",
alias = "claude-opus-4-5-thinking-latest"
)]
ClaudeOpus4_5Thinking,
#[serde(rename = "claude-3-5-sonnet-v2", alias = "claude-3-5-sonnet-latest")]
Claude3_5SonnetV2,
#[serde(rename = "claude-3-7-sonnet", alias = "claude-3-7-sonnet-latest")]
@@ -148,19 +141,7 @@ impl Model {
}
pub fn from_id(id: &str) -> anyhow::Result<Self> {
if id.starts_with("claude-opus-4-5-thinking") {
Ok(Self::ClaudeOpus4_5Thinking)
} else if id.starts_with("claude-opus-4-5") {
Ok(Self::ClaudeOpus4_5)
} else if id.starts_with("claude-opus-4-1-thinking") {
Ok(Self::ClaudeOpus4_1Thinking)
} else if id.starts_with("claude-opus-4-1") {
Ok(Self::ClaudeOpus4_1)
} else if id.starts_with("claude-opus-4-thinking") {
Ok(Self::ClaudeOpus4Thinking)
} else if id.starts_with("claude-opus-4") {
Ok(Self::ClaudeOpus4)
} else if id.starts_with("claude-3-5-sonnet-v2") {
if id.starts_with("claude-3-5-sonnet-v2") {
Ok(Self::Claude3_5SonnetV2)
} else if id.starts_with("claude-3-opus") {
Ok(Self::Claude3Opus)
@@ -197,8 +178,6 @@ impl Model {
Model::ClaudeOpus4_1 => "claude-opus-4-1",
Model::ClaudeOpus4Thinking => "claude-opus-4-thinking",
Model::ClaudeOpus4_1Thinking => "claude-opus-4-1-thinking",
Model::ClaudeOpus4_5 => "claude-opus-4-5",
Model::ClaudeOpus4_5Thinking => "claude-opus-4-5-thinking",
Model::Claude3_5SonnetV2 => "claude-3-5-sonnet-v2",
Model::Claude3_5Sonnet => "claude-3-5-sonnet",
Model::Claude3Opus => "claude-3-opus",
@@ -266,9 +245,6 @@ impl Model {
Model::ClaudeOpus4_1 | Model::ClaudeOpus4_1Thinking => {
"anthropic.claude-opus-4-1-20250805-v1:0"
}
Model::ClaudeOpus4_5 | Model::ClaudeOpus4_5Thinking => {
"anthropic.claude-opus-4-5-20251101-v1:0"
}
Model::Claude3_5SonnetV2 => "anthropic.claude-3-5-sonnet-20241022-v2:0",
Model::Claude3_5Sonnet => "anthropic.claude-3-5-sonnet-20240620-v1:0",
Model::Claude3Opus => "anthropic.claude-3-opus-20240229-v1:0",
@@ -333,8 +309,6 @@ impl Model {
Self::ClaudeOpus4_1 => "Claude Opus 4.1",
Self::ClaudeOpus4Thinking => "Claude Opus 4 Thinking",
Self::ClaudeOpus4_1Thinking => "Claude Opus 4.1 Thinking",
Self::ClaudeOpus4_5 => "Claude Opus 4.5",
Self::ClaudeOpus4_5Thinking => "Claude Opus 4.5 Thinking",
Self::Claude3_5SonnetV2 => "Claude 3.5 Sonnet v2",
Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
Self::Claude3Opus => "Claude 3 Opus",
@@ -405,9 +379,7 @@ impl Model {
| Self::ClaudeSonnet4_5
| Self::ClaudeSonnet4_5Thinking
| Self::ClaudeOpus4Thinking
| Self::ClaudeOpus4_1Thinking
| Self::ClaudeOpus4_5
| Self::ClaudeOpus4_5Thinking => 200_000,
| Self::ClaudeOpus4_1Thinking => 200_000,
Self::AmazonNovaPremier => 1_000_000,
Self::PalmyraWriterX5 => 1_000_000,
Self::PalmyraWriterX4 => 128_000,
@@ -421,11 +393,7 @@ impl Model {
Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3_5Haiku => 4_096,
Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking => 128_000,
Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking => 64_000,
Self::ClaudeSonnet4_5
| Self::ClaudeSonnet4_5Thinking
| Self::ClaudeHaiku4_5
| Self::ClaudeOpus4_5
| Self::ClaudeOpus4_5Thinking => 64_000,
Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4_5Thinking | Self::ClaudeHaiku4_5 => 64_000,
Self::ClaudeOpus4
| Self::ClaudeOpus4Thinking
| Self::ClaudeOpus4_1
@@ -450,8 +418,6 @@ impl Model {
| Self::ClaudeOpus4Thinking
| Self::ClaudeOpus4_1
| Self::ClaudeOpus4_1Thinking
| Self::ClaudeOpus4_5
| Self::ClaudeOpus4_5Thinking
| Self::ClaudeSonnet4
| Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
@@ -477,8 +443,6 @@ impl Model {
| Self::ClaudeOpus4Thinking
| Self::ClaudeOpus4_1
| Self::ClaudeOpus4_1Thinking
| Self::ClaudeOpus4_5
| Self::ClaudeOpus4_5Thinking
| Self::ClaudeSonnet4
| Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
@@ -520,9 +484,7 @@ impl Model {
| Self::ClaudeOpus4
| Self::ClaudeOpus4Thinking
| Self::ClaudeOpus4_1
| Self::ClaudeOpus4_1Thinking
| Self::ClaudeOpus4_5
| Self::ClaudeOpus4_5Thinking => true,
| Self::ClaudeOpus4_1Thinking => true,
// Custom models - check if they have cache configuration
Self::Custom {
@@ -544,9 +506,7 @@ impl Model {
| Self::ClaudeOpus4
| Self::ClaudeOpus4Thinking
| Self::ClaudeOpus4_1
| Self::ClaudeOpus4_1Thinking
| Self::ClaudeOpus4_5
| Self::ClaudeOpus4_5Thinking => Some(BedrockModelCacheConfiguration {
| Self::ClaudeOpus4_1Thinking => Some(BedrockModelCacheConfiguration {
max_cache_anchors: 4,
min_total_token: 1024,
}),
@@ -575,11 +535,11 @@ impl Model {
budget_tokens: Some(4096),
}
}
Model::ClaudeOpus4Thinking
| Model::ClaudeOpus4_1Thinking
| Model::ClaudeOpus4_5Thinking => BedrockModelMode::Thinking {
budget_tokens: Some(4096),
},
Model::ClaudeOpus4Thinking | Model::ClaudeOpus4_1Thinking => {
BedrockModelMode::Thinking {
budget_tokens: Some(4096),
}
}
_ => BedrockModelMode::Default,
}
}
@@ -633,8 +593,6 @@ impl Model {
| Model::ClaudeOpus4Thinking
| Model::ClaudeOpus4_1
| Model::ClaudeOpus4_1Thinking
| Model::ClaudeOpus4_5
| Model::ClaudeOpus4_5Thinking
| Model::Claude3Haiku
| Model::Claude3Opus
| Model::Claude3Sonnet

View File

@@ -51,11 +51,3 @@ pub fn external_agents_docs(cx: &App) -> String {
server_url = server_url(cx)
)
}
/// Returns the URL to Zed agent servers documentation.
pub fn agent_server_docs(cx: &App) -> String {
format!(
"{server_url}/docs/extensions/agent-servers",
server_url = server_url(cx)
)
}

View File

@@ -1 +0,0 @@
drop table embeddings;

View File

@@ -5,6 +5,7 @@ pub mod buffers;
pub mod channels;
pub mod contacts;
pub mod contributors;
pub mod embeddings;
pub mod extensions;
pub mod notifications;
pub mod projects;

View File

@@ -0,0 +1,94 @@
use super::*;
use time::Duration;
use time::OffsetDateTime;
impl Database {
pub async fn get_embeddings(
&self,
model: &str,
digests: &[Vec<u8>],
) -> Result<HashMap<Vec<u8>, Vec<f32>>> {
self.transaction(|tx| async move {
let embeddings = {
let mut db_embeddings = embedding::Entity::find()
.filter(
embedding::Column::Model.eq(model).and(
embedding::Column::Digest
.is_in(digests.iter().map(|digest| digest.as_slice())),
),
)
.stream(&*tx)
.await?;
let mut embeddings = HashMap::default();
while let Some(db_embedding) = db_embeddings.next().await {
let db_embedding = db_embedding?;
embeddings.insert(db_embedding.digest, db_embedding.dimensions);
}
embeddings
};
if !embeddings.is_empty() {
let now = OffsetDateTime::now_utc();
let retrieved_at = PrimitiveDateTime::new(now.date(), now.time());
embedding::Entity::update_many()
.filter(
embedding::Column::Digest
.is_in(embeddings.keys().map(|digest| digest.as_slice())),
)
.col_expr(embedding::Column::RetrievedAt, Expr::value(retrieved_at))
.exec(&*tx)
.await?;
}
Ok(embeddings)
})
.await
}
pub async fn save_embeddings(
&self,
model: &str,
embeddings: &HashMap<Vec<u8>, Vec<f32>>,
) -> Result<()> {
self.transaction(|tx| async move {
embedding::Entity::insert_many(embeddings.iter().map(|(digest, dimensions)| {
let now_offset_datetime = OffsetDateTime::now_utc();
let retrieved_at =
PrimitiveDateTime::new(now_offset_datetime.date(), now_offset_datetime.time());
embedding::ActiveModel {
model: ActiveValue::set(model.to_string()),
digest: ActiveValue::set(digest.clone()),
dimensions: ActiveValue::set(dimensions.clone()),
retrieved_at: ActiveValue::set(retrieved_at),
}
}))
.on_conflict(
OnConflict::columns([embedding::Column::Model, embedding::Column::Digest])
.do_nothing()
.to_owned(),
)
.exec_without_returning(&*tx)
.await?;
Ok(())
})
.await
}
pub async fn purge_old_embeddings(&self) -> Result<()> {
self.transaction(|tx| async move {
embedding::Entity::delete_many()
.filter(
embedding::Column::RetrievedAt
.lte(OffsetDateTime::now_utc() - Duration::days(60)),
)
.exec(&*tx)
.await?;
Ok(())
})
.await
}
}

View File

@@ -8,6 +8,7 @@ pub mod channel_chat_participant;
pub mod channel_member;
pub mod contact;
pub mod contributor;
pub mod embedding;
pub mod extension;
pub mod extension_version;
pub mod follower;

View File

@@ -0,0 +1,18 @@
use sea_orm::entity::prelude::*;
use time::PrimitiveDateTime;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "embeddings")]
pub struct Model {
#[sea_orm(primary_key)]
pub model: String,
#[sea_orm(primary_key)]
pub digest: Vec<u8>,
pub dimensions: Vec<f32>,
pub retrieved_at: PrimitiveDateTime,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -2,6 +2,9 @@ mod buffer_tests;
mod channel_tests;
mod contributor_tests;
mod db_tests;
// we only run postgres tests on macos right now
#[cfg(target_os = "macos")]
mod embedding_tests;
mod extension_tests;
use crate::migrations::run_database_migrations;

View File

@@ -0,0 +1,87 @@
use super::TestDb;
use crate::db::embedding;
use collections::HashMap;
use sea_orm::{ColumnTrait, EntityTrait, QueryFilter, sea_query::Expr};
use std::ops::Sub;
use time::{Duration, OffsetDateTime, PrimitiveDateTime};
// SQLite does not support array arguments, so we only test this against a real postgres instance
#[gpui::test]
async fn test_get_embeddings_postgres(cx: &mut gpui::TestAppContext) {
let test_db = TestDb::postgres(cx.executor());
let db = test_db.db();
let provider = "test_model";
let digest1 = vec![1, 2, 3];
let digest2 = vec![4, 5, 6];
let embeddings = HashMap::from_iter([
(digest1.clone(), vec![0.1, 0.2, 0.3]),
(digest2.clone(), vec![0.4, 0.5, 0.6]),
]);
// Save embeddings
db.save_embeddings(provider, &embeddings).await.unwrap();
// Retrieve embeddings
let retrieved_embeddings = db
.get_embeddings(provider, &[digest1.clone(), digest2.clone()])
.await
.unwrap();
assert_eq!(retrieved_embeddings.len(), 2);
assert!(retrieved_embeddings.contains_key(&digest1));
assert!(retrieved_embeddings.contains_key(&digest2));
// Check if the retrieved embeddings are correct
assert_eq!(retrieved_embeddings[&digest1], vec![0.1, 0.2, 0.3]);
assert_eq!(retrieved_embeddings[&digest2], vec![0.4, 0.5, 0.6]);
}
#[gpui::test]
async fn test_purge_old_embeddings(cx: &mut gpui::TestAppContext) {
let test_db = TestDb::postgres(cx.executor());
let db = test_db.db();
let model = "test_model";
let digest = vec![7, 8, 9];
let embeddings = HashMap::from_iter([(digest.clone(), vec![0.7, 0.8, 0.9])]);
// Save old embeddings
db.save_embeddings(model, &embeddings).await.unwrap();
// Reach into the DB and change the retrieved at to be > 60 days
db.transaction(|tx| {
let digest = digest.clone();
async move {
let sixty_days_ago = OffsetDateTime::now_utc().sub(Duration::days(61));
let retrieved_at = PrimitiveDateTime::new(sixty_days_ago.date(), sixty_days_ago.time());
embedding::Entity::update_many()
.filter(
embedding::Column::Model
.eq(model)
.and(embedding::Column::Digest.eq(digest)),
)
.col_expr(embedding::Column::RetrievedAt, Expr::value(retrieved_at))
.exec(&*tx)
.await
.unwrap();
Ok(())
}
})
.await
.unwrap();
// Purge old embeddings
db.purge_old_embeddings().await.unwrap();
// Try to retrieve the purged embeddings
let retrieved_embeddings = db
.get_embeddings(model, std::slice::from_ref(&digest))
.await
.unwrap();
assert!(
retrieved_embeddings.is_empty(),
"Old embeddings should have been purged"
);
}

View File

@@ -13,7 +13,7 @@ use collab::llm::db::LlmDatabase;
use collab::migrations::run_database_migrations;
use collab::{
AppState, Config, Result, api::fetch_extensions_from_blob_store_periodically, db, env,
executor::Executor,
executor::Executor, rpc::ResultExt,
};
use db::Database;
use std::{
@@ -95,6 +95,8 @@ async fn main() -> Result<()> {
let state = AppState::new(config, Executor::Production).await?;
if mode.is_collab() {
state.db.purge_old_embeddings().await.trace_err();
let epoch = state
.db
.create_server(&state.config.zed_environment)

View File

@@ -2169,28 +2169,16 @@ async fn test_inlay_hint_refresh_is_forwarded(
} else {
"initial hint"
};
Ok(Some(vec![
lsp::InlayHint {
position: lsp::Position::new(0, character),
label: lsp::InlayHintLabel::String(label.to_string()),
kind: None,
text_edits: None,
tooltip: None,
padding_left: None,
padding_right: None,
data: None,
},
lsp::InlayHint {
position: lsp::Position::new(1090, 1090),
label: lsp::InlayHintLabel::String("out-of-bounds hint".to_string()),
kind: None,
text_edits: None,
tooltip: None,
padding_left: None,
padding_right: None,
data: None,
},
]))
Ok(Some(vec![lsp::InlayHint {
position: lsp::Position::new(0, character),
label: lsp::InlayHintLabel::String(label.to_string()),
kind: None,
text_edits: None,
tooltip: None,
padding_left: None,
padding_right: None,
data: None,
}]))
}
})
.next()

View File

@@ -523,7 +523,7 @@ async fn test_basic_following(
});
// Client B activates a panel, and the previously-opened screen-sharing item gets activated.
let panel = cx_b.new(|cx| TestPanel::new(DockPosition::Left, 100, cx));
let panel = cx_b.new(|cx| TestPanel::new(DockPosition::Left, cx));
workspace_b.update_in(cx_b, |workspace, window, cx| {
workspace.add_panel(panel, window, cx);
workspace.toggle_panel_focus::<TestPanel>(window, cx);

View File

@@ -294,10 +294,6 @@ pub enum ChatMessage {
content: ChatMessageContent,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
tool_calls: Vec<ToolCall>,
#[serde(default, skip_serializing_if = "Option::is_none")]
reasoning_opaque: Option<String>,
#[serde(default, skip_serializing_if = "Option::is_none")]
reasoning_text: Option<String>,
},
User {
content: ChatMessageContent,
@@ -357,8 +353,6 @@ pub enum ToolCallContent {
pub struct FunctionContent {
pub name: String,
pub arguments: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub thought_signature: Option<String>,
}
#[derive(Deserialize, Debug)]
@@ -390,8 +384,6 @@ pub struct ResponseDelta {
pub role: Option<Role>,
#[serde(default)]
pub tool_calls: Vec<ToolCallChunk>,
pub reasoning_opaque: Option<String>,
pub reasoning_text: Option<String>,
}
#[derive(Deserialize, Debug, Eq, PartialEq)]
pub struct ToolCallChunk {
@@ -404,7 +396,6 @@ pub struct ToolCallChunk {
pub struct FunctionChunk {
pub name: Option<String>,
pub arguments: Option<String>,
pub thought_signature: Option<String>,
}
#[derive(Deserialize)]
@@ -792,13 +783,13 @@ async fn stream_completion(
is_user_initiated: bool,
) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
let is_vision_request = request.messages.iter().any(|message| match message {
ChatMessage::User { content }
| ChatMessage::Assistant { content, .. }
| ChatMessage::Tool { content, .. } => {
matches!(content, ChatMessageContent::Multipart(parts) if parts.iter().any(|part| matches!(part, ChatMessagePart::Image { .. })))
}
_ => false,
});
ChatMessage::User { content }
| ChatMessage::Assistant { content, .. }
| ChatMessage::Tool { content, .. } => {
matches!(content, ChatMessageContent::Multipart(parts) if parts.iter().any(|part| matches!(part, ChatMessagePart::Image { .. })))
}
_ => false,
});
let request_initiator = if is_user_initiated { "user" } else { "agent" };

View File

@@ -127,8 +127,6 @@ pub enum ResponseInputItem {
arguments: String,
#[serde(skip_serializing_if = "Option::is_none")]
status: Option<ItemStatus>,
#[serde(default, skip_serializing_if = "Option::is_none")]
thought_signature: Option<String>,
},
FunctionCallOutput {
call_id: String,
@@ -253,8 +251,6 @@ pub enum ResponseOutputItem {
arguments: String,
#[serde(skip_serializing_if = "Option::is_none")]
status: Option<ItemStatus>,
#[serde(default, skip_serializing_if = "Option::is_none")]
thought_signature: Option<String>,
},
Reasoning {
id: String,
@@ -313,8 +309,7 @@ pub async fn stream_response(
};
let is_streaming = request.stream;
let json = serde_json::to_string(&request)?;
let request = request_builder.body(AsyncBody::from(json))?;
let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?;
let mut response = client.send(request).await?;
if !response.status().is_success() {

View File

@@ -289,7 +289,6 @@ impl minidumper::ServerHandler for CrashServer {
pub fn panic_hook(info: &PanicHookInfo) {
// Don't handle a panic on threads that are not relevant to the main execution.
if extension_host::wasm_host::IS_WASM_THREAD.with(|v| v.load(Ordering::Acquire)) {
log::error!("wasm thread panicked!");
return;
}

View File

@@ -1029,11 +1029,13 @@ impl SearchableItem for DapLogView {
&mut self,
index: usize,
matches: &[Self::Match],
collapse: bool,
window: &mut Window,
cx: &mut Context<Self>,
) {
self.editor
.update(cx, |e, cx| e.activate_match(index, matches, window, cx))
self.editor.update(cx, |e, cx| {
e.activate_match(index, matches, collapse, window, cx)
})
}
fn select_matches(

View File

@@ -677,8 +677,6 @@ impl ConsoleQueryBarCompletionProvider {
),
new_text: string_match.string.clone(),
label: CodeLabel::plain(string_match.string.clone(), None),
match_start: None,
snippet_deduplication_key: None,
icon_path: None,
documentation: Some(CompletionDocumentation::MultiLineMarkdown(
variable_value.into(),
@@ -792,8 +790,6 @@ impl ConsoleQueryBarCompletionProvider {
documentation: completion.detail.map(|detail| {
CompletionDocumentation::MultiLineMarkdown(detail.into())
}),
match_start: None,
snippet_deduplication_key: None,
confirm: None,
source: project::CompletionSource::Dap { sort_text },
insert_text_mode: None,

View File

@@ -182,6 +182,7 @@ impl ProjectDiagnosticsEditor {
project::Event::DiskBasedDiagnosticsFinished { language_server_id } => {
log::debug!("disk based diagnostics finished for server {language_server_id}");
this.close_diagnosticless_buffers(
window,
cx,
this.editor.focus_handle(cx).contains_focused(window, cx)
|| this.focus_handle.contains_focused(window, cx),
@@ -246,10 +247,10 @@ impl ProjectDiagnosticsEditor {
window.focus(&this.focus_handle);
}
}
EditorEvent::Blurred => this.close_diagnosticless_buffers(cx, false),
EditorEvent::Saved => this.close_diagnosticless_buffers(cx, true),
EditorEvent::Blurred => this.close_diagnosticless_buffers(window, cx, false),
EditorEvent::Saved => this.close_diagnosticless_buffers(window, cx, true),
EditorEvent::SelectionsChanged { .. } => {
this.close_diagnosticless_buffers(cx, true)
this.close_diagnosticless_buffers(window, cx, true)
}
_ => {}
}
@@ -297,7 +298,12 @@ impl ProjectDiagnosticsEditor {
/// - have no diagnostics anymore
/// - are saved (not dirty)
/// - and, if `retain_selections` is true, do not have selections within them
fn close_diagnosticless_buffers(&mut self, cx: &mut Context<Self>, retain_selections: bool) {
fn close_diagnosticless_buffers(
&mut self,
_window: &mut Window,
cx: &mut Context<Self>,
retain_selections: bool,
) {
let snapshot = self
.editor
.update(cx, |editor, cx| editor.display_snapshot(cx));
@@ -441,7 +447,7 @@ impl ProjectDiagnosticsEditor {
fn focus_out(&mut self, _: FocusOutEvent, window: &mut Window, cx: &mut Context<Self>) {
if !self.focus_handle.is_focused(window) && !self.editor.focus_handle(cx).is_focused(window)
{
self.close_diagnosticless_buffers(cx, false);
self.close_diagnosticless_buffers(window, cx, false);
}
}
@@ -455,7 +461,8 @@ impl ProjectDiagnosticsEditor {
});
}
});
self.close_diagnosticless_buffers(cx, false);
self.multibuffer
.update(cx, |multibuffer, cx| multibuffer.clear(cx));
self.project.update(cx, |project, cx| {
self.paths_to_update = project
.diagnostic_summaries(false, cx)
@@ -491,7 +498,7 @@ impl ProjectDiagnosticsEditor {
cx: &mut Context<Self>,
) -> Task<Result<()>> {
let was_empty = self.multibuffer.read(cx).is_empty();
let buffer_snapshot = buffer.read(cx).snapshot();
let mut buffer_snapshot = buffer.read(cx).snapshot();
let buffer_id = buffer_snapshot.remote_id();
let max_severity = if self.include_warnings {
@@ -598,6 +605,7 @@ impl ProjectDiagnosticsEditor {
cx,
)
.await;
buffer_snapshot = cx.update(|_, cx| buffer.read(cx).snapshot())?;
let initial_range = buffer_snapshot.anchor_after(b.initial_range.start)
..buffer_snapshot.anchor_before(b.initial_range.end);
let excerpt_range = ExcerptRange {
@@ -1005,14 +1013,11 @@ async fn heuristic_syntactic_expand(
snapshot: BufferSnapshot,
cx: &mut AsyncApp,
) -> Option<RangeInclusive<BufferRow>> {
let start = snapshot.clip_point(input_range.start, Bias::Right);
let end = snapshot.clip_point(input_range.end, Bias::Left);
let input_row_count = input_range.end.row - input_range.start.row;
if input_row_count > max_row_count {
return None;
}
let input_range = start..end;
// If the outline node contains the diagnostic and is small enough, just use that.
let outline_range = snapshot.outline_range_containing(input_range.clone());
if let Some(outline_range) = outline_range.clone() {

View File

@@ -43,8 +43,7 @@ actions!(
]
);
const COPILOT_SETTINGS_PATH: &str = "/settings/copilot";
const COPILOT_SETTINGS_URL: &str = concat!("https://github.com", "/settings/copilot");
const COPILOT_SETTINGS_URL: &str = "https://github.com/settings/copilot";
const PRIVACY_DOCS: &str = "https://zed.dev/docs/ai/privacy-and-security";
struct CopilotErrorToast;
@@ -129,21 +128,20 @@ impl Render for EditPredictionButton {
}),
);
}
let this = cx.weak_entity();
let this = cx.entity();
div().child(
PopoverMenu::new("copilot")
.menu(move |window, cx| {
let current_status = Copilot::global(cx)?.read(cx).status();
match current_status {
Some(match current_status {
Status::Authorized => this.update(cx, |this, cx| {
this.build_copilot_context_menu(window, cx)
}),
_ => this.update(cx, |this, cx| {
this.build_copilot_start_menu(window, cx)
}),
}
.ok()
})
})
.anchor(Corner::BottomRight)
.trigger_with_tooltip(
@@ -184,7 +182,7 @@ impl Render for EditPredictionButton {
let icon = status.to_icon();
let tooltip_text = status.to_tooltip();
let has_menu = status.has_menu();
let this = cx.weak_entity();
let this = cx.entity();
let fs = self.fs.clone();
div().child(
@@ -211,11 +209,9 @@ impl Render for EditPredictionButton {
)
}))
}
SupermavenButtonStatus::Ready => this
.update(cx, |this, cx| {
this.build_supermaven_context_menu(window, cx)
})
.ok(),
SupermavenButtonStatus::Ready => Some(this.update(cx, |this, cx| {
this.build_supermaven_context_menu(window, cx)
})),
_ => None,
})
.anchor(Corner::BottomRight)
@@ -237,16 +233,15 @@ impl Render for EditPredictionButton {
let enabled = self.editor_enabled.unwrap_or(true);
let has_api_key = CodestralCompletionProvider::has_api_key(cx);
let fs = self.fs.clone();
let this = cx.weak_entity();
let this = cx.entity();
div().child(
PopoverMenu::new("codestral")
.menu(move |window, cx| {
if has_api_key {
this.update(cx, |this, cx| {
Some(this.update(cx, |this, cx| {
this.build_codestral_context_menu(window, cx)
})
.ok()
}))
} else {
Some(ContextMenu::build(window, cx, |menu, _, _| {
let fs = fs.clone();
@@ -837,16 +832,6 @@ impl EditPredictionButton {
window: &mut Window,
cx: &mut Context<Self>,
) -> Entity<ContextMenu> {
let all_language_settings = all_language_settings(None, cx);
let copilot_config = copilot::copilot_chat::CopilotChatConfiguration {
enterprise_uri: all_language_settings
.edit_predictions
.copilot
.enterprise_uri
.clone(),
};
let settings_url = copilot_settings_url(copilot_config.enterprise_uri.as_deref());
ContextMenu::build(window, cx, |menu, window, cx| {
let menu = self.build_language_settings_menu(menu, window, cx);
let menu =
@@ -855,7 +840,10 @@ impl EditPredictionButton {
menu.separator()
.link(
"Go to Copilot Settings",
OpenBrowser { url: settings_url }.boxed_clone(),
OpenBrowser {
url: COPILOT_SETTINGS_URL.to_string(),
}
.boxed_clone(),
)
.action("Sign Out", copilot::SignOut.boxed_clone())
})
@@ -1184,99 +1172,3 @@ fn toggle_edit_prediction_mode(fs: Arc<dyn Fs>, mode: EditPredictionsMode, cx: &
});
}
}
fn copilot_settings_url(enterprise_uri: Option<&str>) -> String {
match enterprise_uri {
Some(uri) => {
format!("{}{}", uri.trim_end_matches('/'), COPILOT_SETTINGS_PATH)
}
None => COPILOT_SETTINGS_URL.to_string(),
}
}
#[cfg(test)]
mod tests {
use super::*;
use gpui::TestAppContext;
#[gpui::test]
async fn test_copilot_settings_url_with_enterprise_uri(cx: &mut TestAppContext) {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
});
cx.update_global(|settings_store: &mut SettingsStore, cx| {
settings_store
.set_user_settings(
r#"{"edit_predictions":{"copilot":{"enterprise_uri":"https://my-company.ghe.com"}}}"#,
cx,
)
.unwrap();
});
let url = cx.update(|cx| {
let all_language_settings = all_language_settings(None, cx);
copilot_settings_url(
all_language_settings
.edit_predictions
.copilot
.enterprise_uri
.as_deref(),
)
});
assert_eq!(url, "https://my-company.ghe.com/settings/copilot");
}
#[gpui::test]
async fn test_copilot_settings_url_with_enterprise_uri_trailing_slash(cx: &mut TestAppContext) {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
});
cx.update_global(|settings_store: &mut SettingsStore, cx| {
settings_store
.set_user_settings(
r#"{"edit_predictions":{"copilot":{"enterprise_uri":"https://my-company.ghe.com/"}}}"#,
cx,
)
.unwrap();
});
let url = cx.update(|cx| {
let all_language_settings = all_language_settings(None, cx);
copilot_settings_url(
all_language_settings
.edit_predictions
.copilot
.enterprise_uri
.as_deref(),
)
});
assert_eq!(url, "https://my-company.ghe.com/settings/copilot");
}
#[gpui::test]
async fn test_copilot_settings_url_without_enterprise_uri(cx: &mut TestAppContext) {
cx.update(|cx| {
let settings_store = SettingsStore::test(cx);
cx.set_global(settings_store);
});
let url = cx.update(|cx| {
let all_language_settings = all_language_settings(None, cx);
copilot_settings_url(
all_language_settings
.edit_predictions
.copilot
.enterprise_uri
.as_deref(),
)
});
assert_eq!(url, "https://github.com/settings/copilot");
}
}

View File

@@ -305,8 +305,6 @@ impl CompletionBuilder {
icon_path: None,
insert_text_mode: None,
confirm: None,
match_start: None,
snippet_deduplication_key: None,
}
}
}

View File

@@ -17,6 +17,7 @@ use project::{CompletionDisplayOptions, CompletionSource};
use task::DebugScenario;
use task::TaskContext;
use std::collections::VecDeque;
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, Ordering};
use std::{
@@ -35,13 +36,12 @@ use util::ResultExt;
use crate::hover_popover::{hover_markdown_style, open_markdown_url};
use crate::{
CodeActionProvider, CompletionId, CompletionProvider, DisplayRow, Editor, EditorStyle,
ResolvedTasks,
CodeActionProvider, CompletionId, CompletionItemKind, CompletionProvider, DisplayRow, Editor,
EditorStyle, ResolvedTasks,
actions::{ConfirmCodeAction, ConfirmCompletion},
split_words, styled_runs_for_code_label,
};
use crate::{CodeActionSource, EditorSettings};
use collections::{HashSet, VecDeque};
use settings::{Settings, SnippetSortOrder};
pub const MENU_GAP: Pixels = px(4.);
@@ -220,9 +220,7 @@ pub struct CompletionsMenu {
pub is_incomplete: bool,
pub buffer: Entity<Buffer>,
pub completions: Rc<RefCell<Box<[Completion]>>>,
/// String match candidate for each completion, grouped by `match_start`.
match_candidates: Arc<[(Option<text::Anchor>, Vec<StringMatchCandidate>)]>,
/// Entries displayed in the menu, which is a filtered and sorted subset of `match_candidates`.
match_candidates: Arc<[StringMatchCandidate]>,
pub entries: Rc<RefCell<Box<[StringMatch]>>>,
pub selected_item: usize,
filter_task: Task<()>,
@@ -310,8 +308,6 @@ impl CompletionsMenu {
.iter()
.enumerate()
.map(|(id, completion)| StringMatchCandidate::new(id, completion.label.filter_text()))
.into_group_map_by(|candidate| completions[candidate.id].match_start)
.into_iter()
.collect();
let completions_menu = Self {
@@ -359,8 +355,6 @@ impl CompletionsMenu {
replace_range: selection.start.text_anchor..selection.end.text_anchor,
new_text: choice.to_string(),
label: CodeLabel::plain(choice.to_string(), None),
match_start: None,
snippet_deduplication_key: None,
icon_path: None,
documentation: None,
confirm: None,
@@ -369,14 +363,11 @@ impl CompletionsMenu {
})
.collect();
let match_candidates = Arc::new([(
None,
choices
.iter()
.enumerate()
.map(|(id, completion)| StringMatchCandidate::new(id, completion))
.collect(),
)]);
let match_candidates = choices
.iter()
.enumerate()
.map(|(id, completion)| StringMatchCandidate::new(id, completion))
.collect();
let entries = choices
.iter()
.enumerate()
@@ -957,7 +948,7 @@ impl CompletionsMenu {
}
let mat = &self.entries.borrow()[self.selected_item];
let completions = self.completions.borrow();
let completions = self.completions.borrow_mut();
let multiline_docs = match completions[mat.candidate_id].documentation.as_ref() {
Some(CompletionDocumentation::MultiLinePlainText(text)) => div().child(text.clone()),
Some(CompletionDocumentation::SingleLineAndMultiLinePlainText {
@@ -1035,74 +1026,57 @@ impl CompletionsMenu {
pub fn filter(
&mut self,
query: Arc<String>,
query_end: text::Anchor,
buffer: &Entity<Buffer>,
query: Option<Arc<String>>,
provider: Option<Rc<dyn CompletionProvider>>,
window: &mut Window,
cx: &mut Context<Editor>,
) {
self.cancel_filter.store(true, Ordering::Relaxed);
self.cancel_filter = Arc::new(AtomicBool::new(false));
let matches = self.do_async_filtering(query, query_end, buffer, cx);
let id = self.id;
self.filter_task = cx.spawn_in(window, async move |editor, cx| {
let matches = matches.await;
editor
.update_in(cx, |editor, window, cx| {
editor.with_completions_menu_matching_id(id, |this| {
if let Some(this) = this {
this.set_filter_results(matches, provider, window, cx);
}
});
})
.ok();
});
if let Some(query) = query {
self.cancel_filter = Arc::new(AtomicBool::new(false));
let matches = self.do_async_filtering(query, cx);
let id = self.id;
self.filter_task = cx.spawn_in(window, async move |editor, cx| {
let matches = matches.await;
editor
.update_in(cx, |editor, window, cx| {
editor.with_completions_menu_matching_id(id, |this| {
if let Some(this) = this {
this.set_filter_results(matches, provider, window, cx);
}
});
})
.ok();
});
} else {
self.filter_task = Task::ready(());
let matches = self.unfiltered_matches();
self.set_filter_results(matches, provider, window, cx);
}
}
pub fn do_async_filtering(
&self,
query: Arc<String>,
query_end: text::Anchor,
buffer: &Entity<Buffer>,
cx: &Context<Editor>,
) -> Task<Vec<StringMatch>> {
let buffer_snapshot = buffer.read(cx).snapshot();
let background_executor = cx.background_executor().clone();
let match_candidates = self.match_candidates.clone();
let cancel_filter = self.cancel_filter.clone();
let default_query = query.clone();
let matches_task = cx.background_spawn(async move {
let queries_and_candidates = match_candidates
.iter()
.map(|(query_start, candidates)| {
let query_for_batch = match query_start {
Some(start) => {
Arc::new(buffer_snapshot.text_for_range(*start..query_end).collect())
}
None => default_query.clone(),
};
(query_for_batch, candidates)
})
.collect_vec();
let mut results = vec![];
for (query, match_candidates) in queries_and_candidates {
results.extend(
fuzzy::match_strings(
&match_candidates,
&query,
query.chars().any(|c| c.is_uppercase()),
false,
1000,
&cancel_filter,
background_executor.clone(),
)
.await,
);
let matches_task = cx.background_spawn({
let query = query.clone();
let match_candidates = self.match_candidates.clone();
let cancel_filter = self.cancel_filter.clone();
let background_executor = cx.background_executor().clone();
async move {
fuzzy::match_strings(
&match_candidates,
&query,
query.chars().any(|c| c.is_uppercase()),
false,
1000,
&cancel_filter,
background_executor,
)
.await
}
results
});
let completions = self.completions.clone();
@@ -1111,31 +1085,45 @@ impl CompletionsMenu {
cx.foreground_executor().spawn(async move {
let mut matches = matches_task.await;
let completions_ref = completions.borrow();
if sort_completions {
matches = Self::sort_string_matches(
matches,
Some(&query), // used for non-snippets only
Some(&query),
snippet_sort_order,
&completions_ref,
completions.borrow().as_ref(),
);
}
// Remove duplicate snippet prefixes (e.g., "cool code" will match
// the text "c c" in two places; we should only show the longer one)
let mut snippets_seen = HashSet::<(usize, usize)>::default();
matches.retain(|result| {
match completions_ref[result.candidate_id].snippet_deduplication_key {
Some(key) => snippets_seen.insert(key),
None => true,
}
});
matches
})
}
/// Like `do_async_filtering` but there is no filter query, so no need to spawn tasks.
pub fn unfiltered_matches(&self) -> Vec<StringMatch> {
let mut matches = self
.match_candidates
.iter()
.enumerate()
.map(|(candidate_id, candidate)| StringMatch {
candidate_id,
score: Default::default(),
positions: Default::default(),
string: candidate.string.clone(),
})
.collect();
if self.sort_completions {
matches = Self::sort_string_matches(
matches,
None,
self.snippet_sort_order,
self.completions.borrow().as_ref(),
);
}
matches
}
pub fn set_filter_results(
&mut self,
matches: Vec<StringMatch>,
@@ -1178,13 +1166,28 @@ impl CompletionsMenu {
.and_then(|c| c.to_lowercase().next());
if snippet_sort_order == SnippetSortOrder::None {
matches
.retain(|string_match| !completions[string_match.candidate_id].is_snippet_kind());
matches.retain(|string_match| {
let completion = &completions[string_match.candidate_id];
let is_snippet = matches!(
&completion.source,
CompletionSource::Lsp { lsp_completion, .. }
if lsp_completion.kind == Some(CompletionItemKind::SNIPPET)
);
!is_snippet
});
}
matches.sort_unstable_by_key(|string_match| {
let completion = &completions[string_match.candidate_id];
let is_snippet = matches!(
&completion.source,
CompletionSource::Lsp { lsp_completion, .. }
if lsp_completion.kind == Some(CompletionItemKind::SNIPPET)
);
let sort_text = match &completion.source {
CompletionSource::Lsp { lsp_completion, .. } => lsp_completion.sort_text.as_deref(),
CompletionSource::Dap { sort_text } => Some(sort_text.as_str()),
@@ -1196,17 +1199,14 @@ impl CompletionsMenu {
let score = string_match.score;
let sort_score = Reverse(OrderedFloat(score));
// Snippets do their own first-letter matching logic elsewhere.
let is_snippet = completion.is_snippet_kind();
let query_start_doesnt_match_split_words = !is_snippet
&& query_start_lower
.map(|query_char| {
!split_words(&string_match.string).any(|word| {
word.chars().next().and_then(|c| c.to_lowercase().next())
== Some(query_char)
})
let query_start_doesnt_match_split_words = query_start_lower
.map(|query_char| {
!split_words(&string_match.string).any(|word| {
word.chars().next().and_then(|c| c.to_lowercase().next())
== Some(query_char)
})
.unwrap_or(false);
})
.unwrap_or(false);
if query_start_doesnt_match_split_words {
MatchTier::OtherMatch { sort_score }
@@ -1218,7 +1218,6 @@ impl CompletionsMenu {
SnippetSortOrder::None => Reverse(0),
};
let sort_positions = string_match.positions.clone();
// This exact matching won't work for multi-word snippets, but it's fine
let sort_exact = Reverse(if Some(completion.label.filter_text()) == query {
1
} else {

View File

@@ -1097,7 +1097,7 @@ impl DisplaySnapshot {
details: &TextLayoutDetails,
) -> u32 {
let layout_line = self.layout_row(display_row, details);
layout_line.closest_index_for_x(x) as u32
layout_line.index_for_x(x) as u32
}
pub fn grapheme_at(&self, mut point: DisplayPoint) -> Option<SharedString> {

View File

@@ -74,7 +74,7 @@ use ::git::{
blame::{BlameEntry, ParsedCommitMessage},
status::FileStatus,
};
use aho_corasick::{AhoCorasick, AhoCorasickBuilder, BuildError};
use aho_corasick::AhoCorasick;
use anyhow::{Context as _, Result, anyhow};
use blink_manager::BlinkManager;
use buffer_diff::DiffHunkStatus;
@@ -1096,7 +1096,6 @@ pub struct Editor {
searchable: bool,
cursor_shape: CursorShape,
current_line_highlight: Option<CurrentLineHighlight>,
collapse_matches: bool,
autoindent_mode: Option<AutoindentMode>,
workspace: Option<(WeakEntity<Workspace>, Option<WorkspaceId>)>,
input_enabled: bool,
@@ -1191,7 +1190,6 @@ pub struct Editor {
refresh_colors_task: Task<()>,
inlay_hints: Option<LspInlayHintData>,
folding_newlines: Task<()>,
select_next_is_case_sensitive: Option<bool>,
pub lookup_key: Option<Box<dyn Any + Send + Sync>>,
}
@@ -2214,7 +2212,7 @@ impl Editor {
.unwrap_or_default(),
current_line_highlight: None,
autoindent_mode: Some(AutoindentMode::EachLine),
collapse_matches: false,
workspace: None,
input_enabled: !is_minimap,
use_modal_editing: full_mode,
@@ -2335,7 +2333,6 @@ impl Editor {
selection_drag_state: SelectionDragState::None,
folding_newlines: Task::ready(()),
lookup_key: None,
select_next_is_case_sensitive: None,
};
if is_minimap {
@@ -2387,10 +2384,7 @@ impl Editor {
}
}
EditorEvent::Edited { .. } => {
let vim_mode = vim_mode_setting::VimModeSetting::try_get(cx)
.map(|vim_mode| vim_mode.0)
.unwrap_or(false);
if !vim_mode {
if vim_flavor(cx).is_none() {
let display_map = editor.display_snapshot(cx);
let selections = editor.selections.all_adjusted_display(&display_map);
let pop_state = editor
@@ -3015,12 +3009,12 @@ impl Editor {
self.current_line_highlight = current_line_highlight;
}
pub fn set_collapse_matches(&mut self, collapse_matches: bool) {
self.collapse_matches = collapse_matches;
}
pub fn range_for_match<T: std::marker::Copy>(&self, range: &Range<T>) -> Range<T> {
if self.collapse_matches {
pub fn range_for_match<T: std::marker::Copy>(
&self,
range: &Range<T>,
collapse: bool,
) -> Range<T> {
if collapse {
return range.start..range.start;
}
range.clone()
@@ -3453,21 +3447,6 @@ impl Editor {
Subscription::join(other_subscription, this_subscription)
}
fn unfold_buffers_with_selections(&mut self, cx: &mut Context<Self>) {
if self.buffer().read(cx).is_singleton() {
return;
}
let snapshot = self.buffer.read(cx).snapshot(cx);
let buffer_ids: HashSet<BufferId> = self
.selections
.disjoint_anchor_ranges()
.flat_map(|range| snapshot.buffer_ids_for_range(range))
.collect();
for buffer_id in buffer_ids {
self.unfold_buffer(buffer_id, cx);
}
}
/// Changes selections using the provided mutation function. Changes to `self.selections` occur
/// immediately, but when run within `transact` or `with_selection_effects_deferred` other
/// effects of selection change occur at the end of the transaction.
@@ -4080,24 +4059,17 @@ impl Editor {
self.selection_mark_mode = false;
self.selection_drag_state = SelectionDragState::None;
if self.dismiss_menus_and_popups(true, window, cx) {
cx.notify();
return;
}
if self.clear_expanded_diff_hunks(cx) {
cx.notify();
return;
}
if self.show_git_blame_gutter {
self.show_git_blame_gutter = false;
cx.notify();
if self.dismiss_menus_and_popups(true, window, cx) {
return;
}
if self.mode.is_full()
&& self.change_selections(Default::default(), window, cx, |s| s.try_cancel())
{
cx.notify();
return;
}
@@ -4216,8 +4188,6 @@ impl Editor {
self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx);
self.unfold_buffers_with_selections(cx);
let selections = self.selections.all_adjusted(&self.display_snapshot(cx));
let mut bracket_inserted = false;
let mut edits = Vec::new();
@@ -5521,14 +5491,7 @@ impl Editor {
if let Some(CodeContextMenu::Completions(menu)) = self.context_menu.borrow_mut().as_mut() {
if filter_completions {
menu.filter(
query.clone().unwrap_or_default(),
buffer_position.text_anchor,
&buffer,
provider.clone(),
window,
cx,
);
menu.filter(query.clone(), provider.clone(), window, cx);
}
// When `is_incomplete` is false, no need to re-query completions when the current query
// is a suffix of the initial query.
@@ -5537,7 +5500,7 @@ impl Editor {
// If the new query is a suffix of the old query (typing more characters) and
// the previous result was complete, the existing completions can be filtered.
//
// Note that snippet completions are always complete.
// Note that this is always true for snippet completions.
let query_matches = match (&menu.initial_query, &query) {
(Some(initial_query), Some(query)) => query.starts_with(initial_query.as_ref()),
(None, _) => true,
@@ -5667,15 +5630,12 @@ impl Editor {
};
let mut words = if load_word_completions {
cx.background_spawn({
let buffer_snapshot = buffer_snapshot.clone();
async move {
buffer_snapshot.words_in_range(WordsQuery {
fuzzy_contents: None,
range: word_search_range,
skip_digits,
})
}
cx.background_spawn(async move {
buffer_snapshot.words_in_range(WordsQuery {
fuzzy_contents: None,
range: word_search_range,
skip_digits,
})
})
} else {
Task::ready(BTreeMap::default())
@@ -5685,11 +5645,8 @@ impl Editor {
&& provider.show_snippets()
&& let Some(project) = self.project()
{
let char_classifier = buffer_snapshot
.char_classifier_at(buffer_position)
.scope_context(Some(CharScopeContext::Completion));
project.update(cx, |project, cx| {
snippet_completions(project, &buffer, buffer_position, char_classifier, cx)
snippet_completions(project, &buffer, buffer_position, cx)
})
} else {
Task::ready(Ok(CompletionResponse {
@@ -5744,8 +5701,6 @@ impl Editor {
replace_range: word_replace_range.clone(),
new_text: word.clone(),
label: CodeLabel::plain(word, None),
match_start: None,
snippet_deduplication_key: None,
icon_path: None,
documentation: None,
source: CompletionSource::BufferWord {
@@ -5794,12 +5749,11 @@ impl Editor {
);
let query = if filter_completions { query } else { None };
let matches_task = menu.do_async_filtering(
query.unwrap_or_default(),
buffer_position,
&buffer,
cx,
);
let matches_task = if let Some(query) = query {
menu.do_async_filtering(query, cx)
} else {
Task::ready(menu.unfiltered_matches())
};
(menu, matches_task)
}) else {
return;
@@ -5816,7 +5770,7 @@ impl Editor {
return;
};
// Only valid to take prev_menu because either the new menu is immediately set
// Only valid to take prev_menu because it the new menu is immediately set
// below, or the menu is hidden.
if let Some(CodeContextMenu::Completions(prev_menu)) =
editor.context_menu.borrow_mut().take()
@@ -14691,7 +14645,7 @@ impl Editor {
.collect::<String>();
let is_empty = query.is_empty();
let select_state = SelectNextState {
query: self.build_query(&[query], cx)?,
query: AhoCorasick::new(&[query])?,
wordwise: true,
done: is_empty,
};
@@ -14701,7 +14655,7 @@ impl Editor {
}
} else if let Some(selected_text) = selected_text {
self.select_next_state = Some(SelectNextState {
query: self.build_query(&[selected_text], cx)?,
query: AhoCorasick::new(&[selected_text])?,
wordwise: false,
done: false,
});
@@ -14909,7 +14863,7 @@ impl Editor {
.collect::<String>();
let is_empty = query.is_empty();
let select_state = SelectNextState {
query: self.build_query(&[query.chars().rev().collect::<String>()], cx)?,
query: AhoCorasick::new(&[query.chars().rev().collect::<String>()])?,
wordwise: true,
done: is_empty,
};
@@ -14919,8 +14873,7 @@ impl Editor {
}
} else if let Some(selected_text) = selected_text {
self.select_prev_state = Some(SelectNextState {
query: self
.build_query(&[selected_text.chars().rev().collect::<String>()], cx)?,
query: AhoCorasick::new(&[selected_text.chars().rev().collect::<String>()])?,
wordwise: false,
done: false,
});
@@ -14930,25 +14883,6 @@ impl Editor {
Ok(())
}
/// Builds an `AhoCorasick` automaton from the provided patterns, while
/// setting the case sensitivity based on the global
/// `SelectNextCaseSensitive` setting, if set, otherwise based on the
/// editor's settings.
fn build_query<I, P>(&self, patterns: I, cx: &Context<Self>) -> Result<AhoCorasick, BuildError>
where
I: IntoIterator<Item = P>,
P: AsRef<[u8]>,
{
let case_sensitive = self.select_next_is_case_sensitive.map_or_else(
|| EditorSettings::get_global(cx).search.case_sensitive,
|value| value,
);
let mut builder = AhoCorasickBuilder::new();
builder.ascii_case_insensitive(!case_sensitive);
builder.build(patterns)
}
pub fn find_next_match(
&mut self,
_: &FindNextMatch,
@@ -16892,7 +16826,7 @@ impl Editor {
editor.update_in(cx, |editor, window, cx| {
let range = target_range.to_point(target_buffer.read(cx));
let range = editor.range_for_match(&range);
let range = editor.range_for_match(&range, false);
let range = collapse_multiline_range(range);
if !split
@@ -18923,17 +18857,10 @@ impl Editor {
if self.buffer().read(cx).is_singleton() || self.is_buffer_folded(buffer_id, cx) {
return;
}
let folded_excerpts = self.buffer().read(cx).excerpts_for_buffer(buffer_id, cx);
self.display_map.update(cx, |display_map, cx| {
display_map.fold_buffers([buffer_id], cx)
});
let snapshot = self.display_snapshot(cx);
self.selections.change_with(&snapshot, |selections| {
selections.remove_selections_from_buffer(buffer_id);
});
cx.emit(EditorEvent::BufferFoldToggled {
ids: folded_excerpts.iter().map(|&(id, _)| id).collect(),
folded: true,
@@ -21712,9 +21639,7 @@ impl Editor {
.and_then(|e| e.to_str())
.map(|a| a.to_string()));
let vim_mode = vim_mode_setting::VimModeSetting::try_get(cx)
.map(|vim_mode| vim_mode.0)
.unwrap_or(false);
let vim_mode = vim_flavor(cx).is_some();
let edit_predictions_provider = all_language_settings(file, cx).edit_predictions.provider;
let copilot_enabled = edit_predictions_provider
@@ -22349,6 +22274,28 @@ fn edit_for_markdown_paste<'a>(
(range, new_text)
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub enum VimFlavor {
Vim,
Helix,
}
pub fn vim_flavor(cx: &App) -> Option<VimFlavor> {
if vim_mode_setting::HelixModeSetting::try_get(cx)
.map(|helix_mode| helix_mode.0)
.unwrap_or(false)
{
Some(VimFlavor::Helix)
} else if vim_mode_setting::VimModeSetting::try_get(cx)
.map(|vim_mode| vim_mode.0)
.unwrap_or(false)
{
Some(VimFlavor::Vim)
} else {
None // neither vim nor helix mode
}
}
fn process_completion_for_edit(
completion: &Completion,
intent: CompletionIntent,
@@ -23201,11 +23148,10 @@ impl CodeActionProvider for Entity<Project> {
fn snippet_completions(
project: &Project,
buffer: &Entity<Buffer>,
buffer_anchor: text::Anchor,
classifier: CharClassifier,
buffer_position: text::Anchor,
cx: &mut App,
) -> Task<Result<CompletionResponse>> {
let languages = buffer.read(cx).languages_at(buffer_anchor);
let languages = buffer.read(cx).languages_at(buffer_position);
let snippet_store = project.snippets().read(cx);
let scopes: Vec<_> = languages
@@ -23234,146 +23180,97 @@ fn snippet_completions(
let executor = cx.background_executor().clone();
cx.background_spawn(async move {
let is_word_char = |c| classifier.is_word(c);
let mut is_incomplete = false;
let mut completions: Vec<Completion> = Vec::new();
for (scope, snippets) in scopes.into_iter() {
let classifier =
CharClassifier::new(Some(scope)).scope_context(Some(CharScopeContext::Completion));
const MAX_PREFIX_LEN: usize = 128;
let buffer_offset = text::ToOffset::to_offset(&buffer_anchor, &snapshot);
let window_start = buffer_offset.saturating_sub(MAX_PREFIX_LEN);
let window_start = snapshot.clip_offset(window_start, Bias::Left);
const MAX_WORD_PREFIX_LEN: usize = 128;
let last_word: String = snapshot
.reversed_chars_for_range(text::Anchor::MIN..buffer_position)
.take(MAX_WORD_PREFIX_LEN)
.take_while(|c| classifier.is_word(*c))
.collect::<String>()
.chars()
.rev()
.collect();
let max_buffer_window: String = snapshot
.text_for_range(window_start..buffer_offset)
.collect();
if max_buffer_window.is_empty() {
return Ok(CompletionResponse {
completions: vec![],
display_options: CompletionDisplayOptions::default(),
is_incomplete: true,
});
}
for (_scope, snippets) in scopes.into_iter() {
// Sort snippets by word count to match longer snippet prefixes first.
let mut sorted_snippet_candidates = snippets
.iter()
.enumerate()
.flat_map(|(snippet_ix, snippet)| {
snippet
.prefix
.iter()
.enumerate()
.map(move |(prefix_ix, prefix)| {
let word_count =
snippet_candidate_suffixes(prefix, is_word_char).count();
((snippet_ix, prefix_ix), prefix, word_count)
})
})
.collect_vec();
sorted_snippet_candidates
.sort_unstable_by_key(|(_, _, word_count)| Reverse(*word_count));
// Each prefix may be matched multiple times; the completion menu must filter out duplicates.
let buffer_windows = snippet_candidate_suffixes(&max_buffer_window, is_word_char)
.take(
sorted_snippet_candidates
.first()
.map(|(_, _, word_count)| *word_count)
.unwrap_or_default(),
)
.collect_vec();
const MAX_RESULTS: usize = 100;
// Each match also remembers how many characters from the buffer it consumed
let mut matches: Vec<(StringMatch, usize)> = vec![];
let mut snippet_list_cutoff_index = 0;
for (buffer_index, buffer_window) in buffer_windows.iter().enumerate().rev() {
let word_count = buffer_index + 1;
// Increase `snippet_list_cutoff_index` until we have all of the
// snippets with sufficiently many words.
while sorted_snippet_candidates
.get(snippet_list_cutoff_index)
.is_some_and(|(_ix, _prefix, snippet_word_count)| {
*snippet_word_count >= word_count
})
{
snippet_list_cutoff_index += 1;
}
// Take only the candidates with at least `word_count` many words
let snippet_candidates_at_word_len =
&sorted_snippet_candidates[..snippet_list_cutoff_index];
let candidates = snippet_candidates_at_word_len
.iter()
.map(|(_snippet_ix, prefix, _snippet_word_count)| prefix)
.enumerate() // index in `sorted_snippet_candidates`
// First char must match
.filter(|(_ix, prefix)| {
itertools::equal(
prefix
.chars()
.next()
.into_iter()
.flat_map(|c| c.to_lowercase()),
buffer_window
.chars()
.next()
.into_iter()
.flat_map(|c| c.to_lowercase()),
)
})
.map(|(ix, prefix)| StringMatchCandidate::new(ix, prefix))
.collect::<Vec<StringMatchCandidate>>();
matches.extend(
fuzzy::match_strings(
&candidates,
&buffer_window,
buffer_window.chars().any(|c| c.is_uppercase()),
true,
MAX_RESULTS - matches.len(), // always prioritize longer snippets
&Default::default(),
executor.clone(),
)
.await
.into_iter()
.map(|string_match| (string_match, buffer_window.len())),
);
if matches.len() >= MAX_RESULTS {
break;
}
if last_word.is_empty() {
return Ok(CompletionResponse {
completions: vec![],
display_options: CompletionDisplayOptions::default(),
is_incomplete: true,
});
}
let as_offset = text::ToOffset::to_offset(&buffer_position, &snapshot);
let to_lsp = |point: &text::Anchor| {
let end = text::ToPointUtf16::to_point_utf16(point, &snapshot);
point_to_lsp(end)
};
let lsp_end = to_lsp(&buffer_anchor);
let lsp_end = to_lsp(&buffer_position);
let candidates = snippets
.iter()
.enumerate()
.flat_map(|(ix, snippet)| {
snippet
.prefix
.iter()
.map(move |prefix| StringMatchCandidate::new(ix, prefix))
})
.collect::<Vec<StringMatchCandidate>>();
const MAX_RESULTS: usize = 100;
let mut matches = fuzzy::match_strings(
&candidates,
&last_word,
last_word.chars().any(|c| c.is_uppercase()),
true,
MAX_RESULTS,
&Default::default(),
executor.clone(),
)
.await;
if matches.len() >= MAX_RESULTS {
is_incomplete = true;
}
completions.extend(matches.iter().map(|(string_match, buffer_window_len)| {
let ((snippet_index, prefix_index), matching_prefix, _snippet_word_count) =
sorted_snippet_candidates[string_match.candidate_id];
let snippet = &snippets[snippet_index];
let start = buffer_offset - buffer_window_len;
// Remove all candidates where the query's start does not match the start of any word in the candidate
if let Some(query_start) = last_word.chars().next() {
matches.retain(|string_match| {
split_words(&string_match.string).any(|word| {
// Check that the first codepoint of the word as lowercase matches the first
// codepoint of the query as lowercase
word.chars()
.flat_map(|codepoint| codepoint.to_lowercase())
.zip(query_start.to_lowercase())
.all(|(word_cp, query_cp)| word_cp == query_cp)
})
});
}
let matched_strings = matches
.into_iter()
.map(|m| m.string)
.collect::<HashSet<_>>();
completions.extend(snippets.iter().filter_map(|snippet| {
let matching_prefix = snippet
.prefix
.iter()
.find(|prefix| matched_strings.contains(*prefix))?;
let start = as_offset - last_word.len();
let start = snapshot.anchor_before(start);
let range = start..buffer_anchor;
let range = start..buffer_position;
let lsp_start = to_lsp(&start);
let lsp_range = lsp::Range {
start: lsp_start,
end: lsp_end,
};
Completion {
Some(Completion {
replace_range: range,
new_text: snippet.body.clone(),
source: CompletionSource::Lsp {
@@ -23403,11 +23300,7 @@ fn snippet_completions(
}),
lsp_defaults: None,
},
label: CodeLabel {
text: matching_prefix.clone(),
runs: Vec::new(),
filter_range: 0..matching_prefix.len(),
},
label: CodeLabel::plain(matching_prefix.clone(), None),
icon_path: None,
documentation: Some(CompletionDocumentation::SingleLineAndMultiLinePlainText {
single_line: snippet.name.clone().into(),
@@ -23418,10 +23311,8 @@ fn snippet_completions(
}),
insert_text_mode: None,
confirm: None,
match_start: Some(start),
snippet_deduplication_key: Some((snippet_index, prefix_index)),
}
}));
})
}))
}
Ok(CompletionResponse {
@@ -24667,33 +24558,6 @@ pub(crate) fn split_words(text: &str) -> impl std::iter::Iterator<Item = &str> +
})
}
/// Given a string of text immediately before the cursor, iterates over possible
/// strings a snippet could match to. More precisely: returns an iterator over
/// suffixes of `text` created by splitting at word boundaries (before & after
/// every non-word character).
///
/// Shorter suffixes are returned first.
pub(crate) fn snippet_candidate_suffixes(
text: &str,
is_word_char: impl Fn(char) -> bool,
) -> impl std::iter::Iterator<Item = &str> {
let mut prev_index = text.len();
let mut prev_codepoint = None;
text.char_indices()
.rev()
.chain([(0, '\0')])
.filter_map(move |(index, codepoint)| {
let prev_index = std::mem::replace(&mut prev_index, index);
let prev_codepoint = prev_codepoint.replace(codepoint)?;
if is_word_char(prev_codepoint) && is_word_char(codepoint) {
None
} else {
let chunk = &text[prev_index..]; // go to end of string
Some(chunk)
}
})
}
pub trait RangeToAnchorExt: Sized {
fn to_anchors(self, snapshot: &MultiBufferSnapshot) -> Range<Anchor>;

View File

@@ -162,15 +162,10 @@ pub struct DragAndDropSelection {
pub struct SearchSettings {
/// Whether to show the project search button in the status bar.
pub button: bool,
/// Whether to only match on whole words.
pub whole_word: bool,
/// Whether to match case sensitively.
pub case_sensitive: bool,
/// Whether to include gitignored files in search results.
pub include_ignored: bool,
/// Whether to interpret the search query as a regular expression.
pub regex: bool,
/// Whether to center the cursor on each search match when navigating.
pub center_on_match: bool,
}

View File

@@ -44,8 +44,8 @@ use project::{
};
use serde_json::{self, json};
use settings::{
AllLanguageSettingsContent, EditorSettingsContent, IndentGuideBackgroundColoring,
IndentGuideColoring, ProjectSettingsContent, SearchSettingsContent,
AllLanguageSettingsContent, IndentGuideBackgroundColoring, IndentGuideColoring,
ProjectSettingsContent,
};
use std::{cell::RefCell, future::Future, rc::Rc, sync::atomic::AtomicBool, time::Instant};
use std::{
@@ -8314,15 +8314,8 @@ async fn test_add_selection_above_below_multi_cursor_existing_state(cx: &mut Tes
#[gpui::test]
async fn test_select_next(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
// Enable case sensitive search.
update_test_editor_settings(&mut cx, |settings| {
let mut search_settings = SearchSettingsContent::default();
search_settings.case_sensitive = Some(true);
settings.search = Some(search_settings);
});
cx.set_state("abc\nˇabc abc\ndefabc\nabc");
cx.update_editor(|e, window, cx| e.select_next(&SelectNext::default(), window, cx))
@@ -8353,40 +8346,13 @@ async fn test_select_next(cx: &mut TestAppContext) {
cx.update_editor(|e, window, cx| e.select_next(&SelectNext::default(), window, cx))
.unwrap();
cx.assert_editor_state("abc\n«ˇabc» «ˇabc»\ndefabc\nabc");
// Test case sensitivity
cx.set_state("«ˇfoo»\nFOO\nFoo\nfoo");
cx.update_editor(|e, window, cx| {
e.select_next(&SelectNext::default(), window, cx).unwrap();
});
cx.assert_editor_state("«ˇfoo»\nFOO\nFoo\n«ˇfoo»");
// Disable case sensitive search.
update_test_editor_settings(&mut cx, |settings| {
let mut search_settings = SearchSettingsContent::default();
search_settings.case_sensitive = Some(false);
settings.search = Some(search_settings);
});
cx.set_state("«ˇfoo»\nFOO\nFoo");
cx.update_editor(|e, window, cx| {
e.select_next(&SelectNext::default(), window, cx).unwrap();
e.select_next(&SelectNext::default(), window, cx).unwrap();
});
cx.assert_editor_state("«ˇfoo»\n«ˇFOO»\n«ˇFoo»");
}
#[gpui::test]
async fn test_select_all_matches(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
// Enable case sensitive search.
update_test_editor_settings(&mut cx, |settings| {
let mut search_settings = SearchSettingsContent::default();
search_settings.case_sensitive = Some(true);
settings.search = Some(search_settings);
});
let mut cx = EditorTestContext::new(cx).await;
// Test caret-only selections
cx.set_state("abc\nˇabc abc\ndefabc\nabc");
@@ -8432,26 +8398,6 @@ async fn test_select_all_matches(cx: &mut TestAppContext) {
e.set_clip_at_line_ends(false, cx);
});
cx.assert_editor_state("«abcˇ»");
// Test case sensitivity
cx.set_state("fˇoo\nFOO\nFoo");
cx.update_editor(|e, window, cx| {
e.select_all_matches(&SelectAllMatches, window, cx).unwrap();
});
cx.assert_editor_state("«fooˇ»\nFOO\nFoo");
// Disable case sensitive search.
update_test_editor_settings(&mut cx, |settings| {
let mut search_settings = SearchSettingsContent::default();
search_settings.case_sensitive = Some(false);
settings.search = Some(search_settings);
});
cx.set_state("fˇoo\nFOO\nFoo");
cx.update_editor(|e, window, cx| {
e.select_all_matches(&SelectAllMatches, window, cx).unwrap();
});
cx.assert_editor_state("«fooˇ»\n«FOOˇ»\n«Fooˇ»");
}
#[gpui::test]
@@ -8823,15 +8769,8 @@ let foo = «2ˇ»;"#,
#[gpui::test]
async fn test_select_previous_with_single_selection(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
// Enable case sensitive search.
update_test_editor_settings(&mut cx, |settings| {
let mut search_settings = SearchSettingsContent::default();
search_settings.case_sensitive = Some(true);
settings.search = Some(search_settings);
});
cx.set_state("abc\n«ˇabc» abc\ndefabc\nabc");
cx.update_editor(|e, window, cx| e.select_previous(&SelectPrevious::default(), window, cx))
@@ -8856,32 +8795,6 @@ async fn test_select_previous_with_single_selection(cx: &mut TestAppContext) {
cx.update_editor(|e, window, cx| e.select_previous(&SelectPrevious::default(), window, cx))
.unwrap();
cx.assert_editor_state("«ˇabc»\n«ˇabc» «ˇabc»\ndef«ˇabc»\n«ˇabc»");
// Test case sensitivity
cx.set_state("foo\nFOO\nFoo\n«ˇfoo»");
cx.update_editor(|e, window, cx| {
e.select_previous(&SelectPrevious::default(), window, cx)
.unwrap();
e.select_previous(&SelectPrevious::default(), window, cx)
.unwrap();
});
cx.assert_editor_state("«ˇfoo»\nFOO\nFoo\n«ˇfoo»");
// Disable case sensitive search.
update_test_editor_settings(&mut cx, |settings| {
let mut search_settings = SearchSettingsContent::default();
search_settings.case_sensitive = Some(false);
settings.search = Some(search_settings);
});
cx.set_state("foo\nFOO\n«ˇFoo»");
cx.update_editor(|e, window, cx| {
e.select_previous(&SelectPrevious::default(), window, cx)
.unwrap();
e.select_previous(&SelectPrevious::default(), window, cx)
.unwrap();
});
cx.assert_editor_state("«ˇfoo»\n«ˇFOO»\n«ˇFoo»");
}
#[gpui::test]
@@ -11414,53 +11327,6 @@ async fn test_snippet_indentation(cx: &mut TestAppContext) {
ˇ"});
}
#[gpui::test]
async fn test_snippet_with_multi_word_prefix(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
cx.update_editor(|editor, _, cx| {
editor.project().unwrap().update(cx, |project, cx| {
project.snippets().update(cx, |snippets, _cx| {
let snippet = project::snippet_provider::Snippet {
prefix: vec!["multi word".to_string()],
body: "this is many words".to_string(),
description: Some("description".to_string()),
name: "multi-word snippet test".to_string(),
};
snippets.add_snippet_for_test(
None,
PathBuf::from("test_snippets.json"),
vec![Arc::new(snippet)],
);
});
})
});
for (input_to_simulate, should_match_snippet) in [
("m", true),
("m ", true),
("m w", true),
("aa m w", true),
("aa m g", false),
] {
cx.set_state("ˇ");
cx.simulate_input(input_to_simulate); // fails correctly
cx.update_editor(|editor, _, _| {
let Some(CodeContextMenu::Completions(context_menu)) = &*editor.context_menu.borrow()
else {
assert!(!should_match_snippet); // no completions! don't even show the menu
return;
};
assert!(context_menu.visible());
let completions = context_menu.completions.borrow();
assert_eq!(!completions.is_empty(), should_match_snippet);
});
}
}
#[gpui::test]
async fn test_document_format_during_save(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -17416,41 +17282,6 @@ fn test_split_words() {
assert_eq!(split(":do_the_thing"), &[":", "do_", "the_", "thing"]);
}
#[test]
fn test_split_words_for_snippet_prefix() {
fn split(text: &str) -> Vec<&str> {
snippet_candidate_suffixes(text, |c| c.is_alphanumeric() || c == '_').collect()
}
assert_eq!(split("HelloWorld"), &["HelloWorld"]);
assert_eq!(split("hello_world"), &["hello_world"]);
assert_eq!(split("_hello_world_"), &["_hello_world_"]);
assert_eq!(split("Hello_World"), &["Hello_World"]);
assert_eq!(split("helloWOrld"), &["helloWOrld"]);
assert_eq!(split("helloworld"), &["helloworld"]);
assert_eq!(
split("this@is!@#$^many . symbols"),
&[
"symbols",
" symbols",
". symbols",
" . symbols",
" . symbols",
" . symbols",
"many . symbols",
"^many . symbols",
"$^many . symbols",
"#$^many . symbols",
"@#$^many . symbols",
"!@#$^many . symbols",
"is!@#$^many . symbols",
"@is!@#$^many . symbols",
"this@is!@#$^many . symbols",
],
);
assert_eq!(split("a.s"), &["s", ".s", "a.s"]);
}
#[gpui::test]
async fn test_move_to_enclosing_bracket(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -22460,7 +22291,7 @@ async fn test_folding_buffers(cx: &mut TestAppContext) {
assert_eq!(
multi_buffer_editor.update(cx, |editor, cx| editor.display_text(cx)),
"\n\naaaa\nBbbbb\ncccc\n\n\nffff\ngggg\n\n\njjjj\n\n\nllll\nmmmm\nnnnn\n\n\nqqqq\nrrrr\n\n\nuuuu\n\n",
"\n\nB\n\n\n\n\n\n\nllll\nmmmm\nnnnn\n\n\nqqqq\nrrrr\n\n\nuuuu\n\n",
"After unfolding the first buffer, its and 2nd buffer's text should be displayed"
);
@@ -22469,7 +22300,7 @@ async fn test_folding_buffers(cx: &mut TestAppContext) {
});
assert_eq!(
multi_buffer_editor.update(cx, |editor, cx| editor.display_text(cx)),
"\n\naaaa\nBbbbb\ncccc\n\n\nffff\ngggg\n\n\njjjj\n\n\nllll\nmmmm\nnnnn\n\n\nqqqq\nrrrr\n\n\nuuuu\n\n\nvvvv\nwwww\nxxxx\n\n\n1111\n2222\n\n\n5555",
"\n\nB\n\n\n\n\n\n\nllll\nmmmm\nnnnn\n\n\nqqqq\nrrrr\n\n\nuuuu\n\n\nvvvv\nwwww\nxxxx\n\n\n1111\n2222\n\n\n5555",
"After unfolding the all buffers, all original text should be displayed"
);
}
@@ -25702,195 +25533,6 @@ pub fn check_displayed_completions(expected: Vec<&'static str>, cx: &mut EditorL
});
}
#[gpui::test]
async fn test_mixed_completions_with_multi_word_snippet(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorLspTestContext::new_rust(
lsp::ServerCapabilities {
completion_provider: Some(lsp::CompletionOptions {
..Default::default()
}),
..Default::default()
},
cx,
)
.await;
cx.lsp
.set_request_handler::<lsp::request::Completion, _, _>(move |_, _| async move {
Ok(Some(lsp::CompletionResponse::Array(vec![
lsp::CompletionItem {
label: "unsafe".into(),
text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
range: lsp::Range {
start: lsp::Position {
line: 0,
character: 9,
},
end: lsp::Position {
line: 0,
character: 11,
},
},
new_text: "unsafe".to_string(),
})),
insert_text_mode: Some(lsp::InsertTextMode::AS_IS),
..Default::default()
},
])))
});
cx.update_editor(|editor, _, cx| {
editor.project().unwrap().update(cx, |project, cx| {
project.snippets().update(cx, |snippets, _cx| {
snippets.add_snippet_for_test(
None,
PathBuf::from("test_snippets.json"),
vec![
Arc::new(project::snippet_provider::Snippet {
prefix: vec![
"unlimited word count".to_string(),
"unlimit word count".to_string(),
"unlimited unknown".to_string(),
],
body: "this is many words".to_string(),
description: Some("description".to_string()),
name: "multi-word snippet test".to_string(),
}),
Arc::new(project::snippet_provider::Snippet {
prefix: vec!["unsnip".to_string(), "@few".to_string()],
body: "fewer words".to_string(),
description: Some("alt description".to_string()),
name: "other name".to_string(),
}),
Arc::new(project::snippet_provider::Snippet {
prefix: vec!["ab aa".to_string()],
body: "abcd".to_string(),
description: None,
name: "alphabet".to_string(),
}),
],
);
});
})
});
let get_completions = |cx: &mut EditorLspTestContext| {
cx.update_editor(|editor, _, _| match &*editor.context_menu.borrow() {
Some(CodeContextMenu::Completions(context_menu)) => {
let entries = context_menu.entries.borrow();
entries
.iter()
.map(|entry| entry.string.clone())
.collect_vec()
}
_ => vec![],
})
};
// snippets:
// @foo
// foo bar
//
// when typing:
//
// when typing:
// - if I type a symbol "open the completions with snippets only"
// - if I type a word character "open the completions menu" (if it had been open snippets only, clear it out)
//
// stuff we need:
// - filtering logic change?
// - remember how far back the completion started.
let test_cases: &[(&str, &[&str])] = &[
(
"un",
&[
"unsafe",
"unlimit word count",
"unlimited unknown",
"unlimited word count",
"unsnip",
],
),
(
"u ",
&[
"unlimit word count",
"unlimited unknown",
"unlimited word count",
],
),
("u a", &["ab aa", "unsafe"]), // unsAfe
(
"u u",
&[
"unsafe",
"unlimit word count",
"unlimited unknown", // ranked highest among snippets
"unlimited word count",
"unsnip",
],
),
("uw c", &["unlimit word count", "unlimited word count"]),
(
"u w",
&[
"unlimit word count",
"unlimited word count",
"unlimited unknown",
],
),
("u w ", &["unlimit word count", "unlimited word count"]),
(
"u ",
&[
"unlimit word count",
"unlimited unknown",
"unlimited word count",
],
),
("wor", &[]),
("uf", &["unsafe"]),
("af", &["unsafe"]),
("afu", &[]),
(
"ue",
&["unsafe", "unlimited unknown", "unlimited word count"],
),
("@", &["@few"]),
("@few", &["@few"]),
("@ ", &[]),
("a@", &["@few"]),
("a@f", &["@few", "unsafe"]),
("a@fw", &["@few"]),
("a", &["ab aa", "unsafe"]),
("aa", &["ab aa"]),
("aaa", &["ab aa"]),
("ab", &["ab aa"]),
("ab ", &["ab aa"]),
("ab a", &["ab aa", "unsafe"]),
("ab ab", &["ab aa"]),
("ab ab aa", &["ab aa"]),
];
for &(input_to_simulate, expected_completions) in test_cases {
cx.set_state("fn a() { ˇ }\n");
for c in input_to_simulate.split("") {
cx.simulate_input(c);
cx.run_until_parked();
}
let expected_completions = expected_completions
.iter()
.map(|s| s.to_string())
.collect_vec();
assert_eq!(
get_completions(&mut cx),
expected_completions,
"< actual / expected >, input = {input_to_simulate:?}",
);
}
}
/// Handle completion request passing a marked string specifying where the completion
/// should be triggered from using '|' character, what range should be replaced, and what completions
/// should be returned using '<' and '>' to delimit the range.
@@ -26075,17 +25717,6 @@ pub(crate) fn update_test_project_settings(
});
}
pub(crate) fn update_test_editor_settings(
cx: &mut TestAppContext,
f: impl Fn(&mut EditorSettingsContent),
) {
cx.update(|cx| {
SettingsStore::update_global(cx, |store, cx| {
store.update_user_settings(cx, |settings| f(&mut settings.editor));
})
})
}
pub(crate) fn init_test(cx: &mut TestAppContext, f: fn(&mut AllLanguageSettingsContent)) {
cx.update(|cx| {
assets::Assets.load_test_fonts(cx);
@@ -27724,213 +27355,3 @@ async fn test_next_prev_reference(cx: &mut TestAppContext) {
_move(Direction::Prev, 2, &mut cx).await;
cx.assert_editor_state(CYCLE_POSITIONS[1]);
}
#[gpui::test]
async fn test_multibuffer_selections_with_folding(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let (editor, cx) = cx.add_window_view(|window, cx| {
let multi_buffer = MultiBuffer::build_multi(
[
("1\n2\n3\n", vec![Point::row_range(0..3)]),
("1\n2\n3\n", vec![Point::row_range(0..3)]),
],
cx,
);
Editor::new(EditorMode::full(), multi_buffer, None, window, cx)
});
let mut cx = EditorTestContext::for_editor_in(editor.clone(), cx).await;
let buffer_ids = cx.multibuffer(|mb, _| mb.excerpt_buffer_ids());
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
ˇ1
2
3
[EXCERPT]
1
2
3
"});
// Scenario 1: Unfolded buffers, position cursor on "2", select all matches, then insert
cx.update_editor(|editor, window, cx| {
editor.change_selections(None.into(), window, cx, |s| {
s.select_ranges([2..3]);
});
});
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
1
3
[EXCERPT]
1
2
3
"});
cx.update_editor(|editor, window, cx| {
editor
.select_all_matches(&SelectAllMatches, window, cx)
.unwrap();
});
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
1
3
[EXCERPT]
1
3
"});
cx.update_editor(|editor, window, cx| {
editor.handle_input("X", window, cx);
});
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
1
3
[EXCERPT]
1
3
"});
// Scenario 2: Select "2", then fold second buffer before insertion
cx.update_multibuffer(|mb, cx| {
for buffer_id in buffer_ids.iter() {
let buffer = mb.buffer(*buffer_id).unwrap();
buffer.update(cx, |buffer, cx| {
buffer.edit([(0..buffer.len(), "1\n2\n3\n")], None, cx);
});
}
});
// Select "2" and select all matches
cx.update_editor(|editor, window, cx| {
editor.change_selections(None.into(), window, cx, |s| {
s.select_ranges([2..3]);
});
editor
.select_all_matches(&SelectAllMatches, window, cx)
.unwrap();
});
// Fold second buffer - should remove selections from folded buffer
cx.update_editor(|editor, _, cx| {
editor.fold_buffer(buffer_ids[1], cx);
});
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
1
3
[EXCERPT]
[FOLDED]
"});
// Insert text - should only affect first buffer
cx.update_editor(|editor, window, cx| {
editor.handle_input("Y", window, cx);
});
cx.update_editor(|editor, _, cx| {
editor.unfold_buffer(buffer_ids[1], cx);
});
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
1
3
[EXCERPT]
1
2
3
"});
// Scenario 3: Select "2", then fold first buffer before insertion
cx.update_multibuffer(|mb, cx| {
for buffer_id in buffer_ids.iter() {
let buffer = mb.buffer(*buffer_id).unwrap();
buffer.update(cx, |buffer, cx| {
buffer.edit([(0..buffer.len(), "1\n2\n3\n")], None, cx);
});
}
});
// Select "2" and select all matches
cx.update_editor(|editor, window, cx| {
editor.change_selections(None.into(), window, cx, |s| {
s.select_ranges([2..3]);
});
editor
.select_all_matches(&SelectAllMatches, window, cx)
.unwrap();
});
// Fold first buffer - should remove selections from folded buffer
cx.update_editor(|editor, _, cx| {
editor.fold_buffer(buffer_ids[0], cx);
});
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
[FOLDED]
[EXCERPT]
1
3
"});
// Insert text - should only affect second buffer
cx.update_editor(|editor, window, cx| {
editor.handle_input("Z", window, cx);
});
cx.update_editor(|editor, _, cx| {
editor.unfold_buffer(buffer_ids[0], cx);
});
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
1
2
3
[EXCERPT]
1
3
"});
// Edge case scenario: fold all buffers, then try to insert
cx.update_editor(|editor, _, cx| {
editor.fold_buffer(buffer_ids[0], cx);
editor.fold_buffer(buffer_ids[1], cx);
});
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
ˇ[FOLDED]
[EXCERPT]
[FOLDED]
"});
// Insert should work via default selection
cx.update_editor(|editor, window, cx| {
editor.handle_input("W", window, cx);
});
cx.update_editor(|editor, _, cx| {
editor.unfold_buffer(buffer_ids[0], cx);
editor.unfold_buffer(buffer_ids[1], cx);
});
cx.assert_excerpts_with_selections(indoc! {"
[EXCERPT]
Wˇ1
2
3
[EXCERPT]
1
Z
3
"});
}

View File

@@ -6,10 +6,10 @@ use crate::{
EditDisplayMode, EditPrediction, Editor, EditorMode, EditorSettings, EditorSnapshot,
EditorStyle, FILE_HEADER_HEIGHT, FocusedBlock, GutterDimensions, HalfPageDown, HalfPageUp,
HandleInput, HoveredCursor, InlayHintRefreshReason, JumpData, LineDown, LineHighlight, LineUp,
MAX_LINE_LEN, MINIMAP_FONT_SIZE, MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, OpenExcerpts, PageDown,
PageUp, PhantomBreakpointIndicator, Point, RowExt, RowRangeExt, SelectPhase,
SelectedTextHighlight, Selection, SelectionDragState, SelectionEffects, SizingBehavior,
SoftWrap, StickyHeaderExcerpt, ToPoint, ToggleFold, ToggleFoldAll,
MAX_LINE_LEN, MINIMAP_FONT_SIZE, MULTI_BUFFER_EXCERPT_HEADER_HEIGHT, OpenExcerpts,
OpenExcerptsSplit, PageDown, PageUp, PhantomBreakpointIndicator, Point, RowExt, RowRangeExt,
SelectPhase, SelectedTextHighlight, Selection, SelectionDragState, SelectionEffects,
SizingBehavior, SoftWrap, StickyHeaderExcerpt, ToPoint, ToggleFold, ToggleFoldAll,
code_context_menus::{CodeActionsMenu, MENU_ASIDE_MAX_WIDTH, MENU_ASIDE_MIN_WIDTH, MENU_GAP},
display_map::{
Block, BlockContext, BlockStyle, ChunkRendererId, DisplaySnapshot, EditorMargins,
@@ -4042,17 +4042,24 @@ impl EditorElement {
)
.group_hover("", |div| div.underline()),
)
.on_click(window.listener_for(&self.editor, {
let jump_data = jump_data.clone();
move |editor, e: &ClickEvent, window, cx| {
editor.open_excerpts_common(
Some(jump_data.clone()),
e.modifiers().secondary(),
window,
cx,
);
.on_click({
let focus_handle = focus_handle.clone();
move |event, window, cx| {
if event.modifiers().secondary() {
focus_handle.dispatch_action(
&OpenExcerptsSplit,
window,
cx,
);
} else {
focus_handle.dispatch_action(
&OpenExcerpts,
window,
cx,
);
}
}
})),
}),
)
.when_some(parent_path, |then, path| {
then.child(div().child(path).text_color(
@@ -4080,17 +4087,24 @@ impl EditorElement {
cx,
)),
)
.on_click(window.listener_for(&self.editor, {
let jump_data = jump_data.clone();
move |editor, e: &ClickEvent, window, cx| {
editor.open_excerpts_common(
Some(jump_data.clone()),
e.modifiers().secondary(),
window,
cx,
);
.on_click({
let focus_handle = focus_handle.clone();
move |event, window, cx| {
if event.modifiers().secondary() {
focus_handle.dispatch_action(
&OpenExcerptsSplit,
window,
cx,
);
} else {
focus_handle.dispatch_action(
&OpenExcerpts,
window,
cx,
);
}
}
})),
}),
)
},
)
@@ -8606,7 +8620,7 @@ impl LineWithInvisibles {
let fragment_end_x = fragment_start_x + shaped_line.width;
if x < fragment_end_x {
return Some(
fragment_start_index + shaped_line.index_for_x(x - fragment_start_x)?,
fragment_start_index + shaped_line.index_for_x(x - fragment_start_x),
);
}
fragment_start_x = fragment_end_x;

View File

@@ -1,5 +1,4 @@
use std::{
collections::hash_map,
ops::{ControlFlow, Range},
time::Duration,
};
@@ -779,7 +778,6 @@ impl Editor {
}
let excerpts = self.buffer.read(cx).excerpt_ids();
let mut inserted_hint_text = HashMap::default();
let hints_to_insert = new_hints
.into_iter()
.filter_map(|(chunk_range, hints_result)| {
@@ -806,35 +804,8 @@ impl Editor {
}
}
})
.flat_map(|new_hints| {
let mut hints_deduplicated = Vec::new();
if new_hints.len() > 1 {
for (server_id, new_hints) in new_hints {
for (new_id, new_hint) in new_hints {
let hints_text_for_position = inserted_hint_text
.entry(new_hint.position)
.or_insert_with(HashMap::default);
let insert =
match hints_text_for_position.entry(new_hint.text().to_string()) {
hash_map::Entry::Occupied(o) => o.get() == &server_id,
hash_map::Entry::Vacant(v) => {
v.insert(server_id);
true
}
};
if insert {
hints_deduplicated.push((new_id, new_hint));
}
}
}
} else {
hints_deduplicated.extend(new_hints.into_values().flatten());
}
hints_deduplicated
})
.flat_map(|hints| hints.into_values())
.flatten()
.filter_map(|(hint_id, lsp_hint)| {
if inlay_hints.allowed_hint_kinds.contains(&lsp_hint.kind)
&& inlay_hints
@@ -3761,7 +3732,6 @@ let c = 3;"#
let mut fake_servers = language_registry.register_fake_lsp(
"Rust",
FakeLspAdapter {
name: "rust-analyzer",
capabilities: lsp::ServerCapabilities {
inlay_hint_provider: Some(lsp::OneOf::Left(true)),
..lsp::ServerCapabilities::default()
@@ -3834,78 +3804,6 @@ let c = 3;"#
},
);
// Add another server that does send the same, duplicate hints back
let mut fake_servers_2 = language_registry.register_fake_lsp(
"Rust",
FakeLspAdapter {
name: "CrabLang-ls",
capabilities: lsp::ServerCapabilities {
inlay_hint_provider: Some(lsp::OneOf::Left(true)),
..lsp::ServerCapabilities::default()
},
initializer: Some(Box::new(move |fake_server| {
fake_server.set_request_handler::<lsp::request::InlayHintRequest, _, _>(
move |params, _| async move {
if params.text_document.uri
== lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap()
{
Ok(Some(vec![
lsp::InlayHint {
position: lsp::Position::new(1, 9),
label: lsp::InlayHintLabel::String(": i32".to_owned()),
kind: Some(lsp::InlayHintKind::TYPE),
text_edits: None,
tooltip: None,
padding_left: None,
padding_right: None,
data: None,
},
lsp::InlayHint {
position: lsp::Position::new(19, 9),
label: lsp::InlayHintLabel::String(": i33".to_owned()),
kind: Some(lsp::InlayHintKind::TYPE),
text_edits: None,
tooltip: None,
padding_left: None,
padding_right: None,
data: None,
},
]))
} else if params.text_document.uri
== lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap()
{
Ok(Some(vec![
lsp::InlayHint {
position: lsp::Position::new(1, 10),
label: lsp::InlayHintLabel::String(": i34".to_owned()),
kind: Some(lsp::InlayHintKind::TYPE),
text_edits: None,
tooltip: None,
padding_left: None,
padding_right: None,
data: None,
},
lsp::InlayHint {
position: lsp::Position::new(29, 10),
label: lsp::InlayHintLabel::String(": i35".to_owned()),
kind: Some(lsp::InlayHintKind::TYPE),
text_edits: None,
tooltip: None,
padding_left: None,
padding_right: None,
data: None,
},
]))
} else {
panic!("Unexpected file path {:?}", params.text_document.uri);
}
},
);
})),
..FakeLspAdapter::default()
},
);
let (buffer_1, _handle_1) = project
.update(cx, |project, cx| {
project.open_local_buffer_with_lsp(path!("/a/main.rs"), cx)
@@ -3949,7 +3847,6 @@ let c = 3;"#
});
let fake_server = fake_servers.next().await.unwrap();
let _fake_server_2 = fake_servers_2.next().await.unwrap();
cx.executor().advance_clock(Duration::from_millis(100));
cx.executor().run_until_parked();
@@ -3958,16 +3855,11 @@ let c = 3;"#
assert_eq!(
vec![
": i32".to_string(),
": i32".to_string(),
": i33".to_string(),
": i33".to_string(),
": i34".to_string(),
": i34".to_string(),
": i35".to_string(),
": i35".to_string(),
],
sorted_cached_hint_labels(editor, cx),
"We receive duplicate hints from 2 servers and cache them all"
);
assert_eq!(
vec![
@@ -3977,7 +3869,7 @@ let c = 3;"#
": i33".to_string(),
],
visible_hint_labels(editor, cx),
"lib.rs is added before main.rs , so its excerpts should be visible first; hints should be deduplicated per label"
"lib.rs is added before main.rs , so its excerpts should be visible first"
);
})
.unwrap();
@@ -4027,12 +3919,8 @@ let c = 3;"#
assert_eq!(
vec![
": i32".to_string(),
": i32".to_string(),
": i33".to_string(),
": i33".to_string(),
": i34".to_string(),
": i34".to_string(),
": i35".to_string(),
": i35".to_string(),
],
sorted_cached_hint_labels(editor, cx),

View File

@@ -1586,11 +1586,12 @@ impl SearchableItem for Editor {
&mut self,
index: usize,
matches: &[Range<Anchor>],
collapse: bool,
window: &mut Window,
cx: &mut Context<Self>,
) {
self.unfold_ranges(&[matches[index].clone()], false, true, cx);
let range = self.range_for_match(&matches[index]);
let range = self.range_for_match(&matches[index], collapse);
let autoscroll = if EditorSettings::get_global(cx).search.center_on_match {
Autoscroll::center()
} else {
@@ -1795,14 +1796,6 @@ impl SearchableItem for Editor {
fn search_bar_visibility_changed(&mut self, _: bool, _: &mut Window, _: &mut Context<Self>) {
self.expect_bounds_change = self.last_bounds;
}
fn set_search_is_case_sensitive(
&mut self,
case_sensitive: Option<bool>,
_cx: &mut Context<Self>,
) {
self.select_next_is_case_sensitive = case_sensitive;
}
}
pub fn active_match_index(

View File

@@ -372,7 +372,7 @@ impl SelectionsCollection {
let is_empty = positions.start == positions.end;
let line_len = display_map.line_len(row);
let line = display_map.layout_row(row, text_layout_details);
let start_col = line.closest_index_for_x(positions.start) as u32;
let start_col = line.index_for_x(positions.start) as u32;
let (start, end) = if is_empty {
let point = DisplayPoint::new(row, std::cmp::min(start_col, line_len));
@@ -382,7 +382,7 @@ impl SelectionsCollection {
return None;
}
let start = DisplayPoint::new(row, start_col);
let end_col = line.closest_index_for_x(positions.end) as u32;
let end_col = line.index_for_x(positions.end) as u32;
let end = DisplayPoint::new(row, end_col);
(start, end)
};
@@ -487,43 +487,6 @@ impl<'snap, 'a> MutableSelectionsCollection<'snap, 'a> {
self.selections_changed |= changed;
}
pub fn remove_selections_from_buffer(&mut self, buffer_id: language::BufferId) {
let mut changed = false;
let filtered_selections: Arc<[Selection<Anchor>]> = {
self.disjoint
.iter()
.filter(|selection| {
if let Some(selection_buffer_id) =
self.snapshot.buffer_id_for_anchor(selection.start)
{
let should_remove = selection_buffer_id == buffer_id;
changed |= should_remove;
!should_remove
} else {
true
}
})
.cloned()
.collect()
};
if filtered_selections.is_empty() {
let default_anchor = self.snapshot.anchor_before(0);
self.collection.disjoint = Arc::from([Selection {
id: post_inc(&mut self.collection.next_selection_id),
start: default_anchor,
end: default_anchor,
reversed: false,
goal: SelectionGoal::None,
}]);
} else {
self.collection.disjoint = filtered_selections;
}
self.selections_changed |= changed;
}
pub fn clear_pending(&mut self) {
if self.collection.pending.is_some() {
self.collection.pending = None;

View File

@@ -59,17 +59,6 @@ impl EditorTestContext {
})
.await
.unwrap();
let language = project
.read_with(cx, |project, _cx| {
project.languages().language_for_name("Plain Text")
})
.await
.unwrap();
buffer.update(cx, |buffer, cx| {
buffer.set_language(Some(language), cx);
});
let editor = cx.add_window(|window, cx| {
let editor = build_editor_with_project(
project,

View File

@@ -463,8 +463,8 @@ pub fn find_model(
.ok_or_else(|| {
anyhow::anyhow!(
"No language model with ID {}/{} was available. Available models: {}",
selected.provider.0,
selected.model.0,
selected.provider.0,
model_registry
.available_models(cx)
.map(|model| format!("{}/{}", model.provider_id().0, model.id().0))

View File

@@ -553,7 +553,6 @@ impl ExampleInstance {
role: Role::User,
content: vec![MessageContent::Text(to_prompt(assertion.description))],
cache: false,
reasoning_details: None,
}],
temperature: None,
tools: Vec::new(),
@@ -1253,8 +1252,7 @@ pub fn response_events_to_markdown(
Ok(
LanguageModelCompletionEvent::UsageUpdate(_)
| LanguageModelCompletionEvent::StartMessage { .. }
| LanguageModelCompletionEvent::StatusUpdate(_)
| LanguageModelCompletionEvent::ReasoningDetails(_),
| LanguageModelCompletionEvent::StatusUpdate { .. },
) => {}
Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
json_parse_error, ..
@@ -1339,9 +1337,8 @@ impl ThreadDialog {
// Skip these
Ok(LanguageModelCompletionEvent::UsageUpdate(_))
| Ok(LanguageModelCompletionEvent::RedactedThinking { .. })
| Ok(LanguageModelCompletionEvent::StatusUpdate(_))
| Ok(LanguageModelCompletionEvent::StatusUpdate { .. })
| Ok(LanguageModelCompletionEvent::StartMessage { .. })
| Ok(LanguageModelCompletionEvent::ReasoningDetails(_))
| Ok(LanguageModelCompletionEvent::Stop(_)) => {}
Ok(LanguageModelCompletionEvent::ToolUseJsonParseError {
@@ -1369,7 +1366,6 @@ impl ThreadDialog {
role: Role::Assistant,
content,
cache: false,
reasoning_details: None,
})
} else {
None

View File

@@ -267,9 +267,10 @@ impl ExtensionManifest {
let mut extension_manifest_path = extension_dir.join("extension.json");
if fs.is_file(&extension_manifest_path).await {
let manifest_content = fs.load(&extension_manifest_path).await.with_context(|| {
format!("loading {extension_name} extension.json, {extension_manifest_path:?}")
})?;
let manifest_content = fs
.load(&extension_manifest_path)
.await
.with_context(|| format!("failed to load {extension_name} extension.json"))?;
let manifest_json = serde_json::from_str::<OldExtensionManifest>(&manifest_content)
.with_context(|| {
format!("invalid extension.json for extension {extension_name}")
@@ -278,9 +279,10 @@ impl ExtensionManifest {
Ok(manifest_from_old_manifest(manifest_json, extension_name))
} else {
extension_manifest_path.set_extension("toml");
let manifest_content = fs.load(&extension_manifest_path).await.with_context(|| {
format!("loading {extension_name} extension.toml, {extension_manifest_path:?}")
})?;
let manifest_content = fs
.load(&extension_manifest_path)
.await
.with_context(|| format!("failed to load {extension_name} extension.toml"))?;
toml::from_str(&manifest_content).map_err(|err| {
anyhow!("Invalid extension.toml for extension {extension_name}:\n{err}")
})

View File

@@ -537,6 +537,7 @@ fn wasm_engine(executor: &BackgroundExecutor) -> wasmtime::Engine {
let engine_ref = engine.weak();
executor
.spawn(async move {
IS_WASM_THREAD.with(|v| v.store(true, Ordering::Release));
// Somewhat arbitrary interval, as it isn't a guaranteed interval.
// But this is a rough upper bound for how long the extension execution can block on
// `Future::poll`.
@@ -642,12 +643,6 @@ impl WasmHost {
let (tx, mut rx) = mpsc::unbounded::<ExtensionCall>();
let extension_task = async move {
// note: Setting the thread local here will slowly "poison" all tokio threads
// causing us to not record their panics any longer.
//
// This is fine though, the main zed binary only uses tokio for livekit and wasm extensions.
// Livekit seldom (if ever) panics 🤞 so the likelihood of us missing a panic in sentry is very low.
IS_WASM_THREAD.with(|v| v.store(true, Ordering::Release));
while let Some(call) = rx.next().await {
(call)(&mut extension, &mut store).await;
}
@@ -664,8 +659,8 @@ impl WasmHost {
cx.spawn(async move |cx| {
let (extension_task, manifest, work_dir, tx, zed_api_version) =
cx.background_executor().spawn(load_extension_task).await?;
// we need to run run the task in a tokio context as wasmtime_wasi may
// call into tokio, accessing its runtime handle when we trigger the `engine.increment_epoch()` above.
// we need to run run the task in an extension context as wasmtime_wasi may
// call into tokio, accessing its runtime handle
let task = Arc::new(gpui_tokio::Tokio::spawn(cx, extension_task)?);
Ok(WasmExtension {
@@ -768,17 +763,17 @@ impl WasmExtension {
.fs
.open_sync(&path)
.await
.context(format!("opening wasm file, path: {path:?}"))?;
.context("failed to open wasm file")?;
let mut wasm_bytes = Vec::new();
wasm_file
.read_to_end(&mut wasm_bytes)
.context(format!("reading wasm file, path: {path:?}"))?;
.context("failed to read wasm")?;
wasm_host
.load_extension(wasm_bytes, manifest, cx)
.await
.with_context(|| format!("loading wasm extension: {}", manifest.id))
.with_context(|| format!("failed to load wasm extension {}", manifest.id))
}
pub async fn call<T, Fn>(&self, f: Fn) -> Result<T>

View File

@@ -75,7 +75,6 @@ const SUGGESTIONS_BY_EXTENSION_ID: &[(&str, &[&str])] = &[
("vue", &["vue"]),
("wgsl", &["wgsl"]),
("wit", &["wit"]),
("xml", &["xml"]),
("zig", &["zig"]),
];

View File

@@ -3452,99 +3452,3 @@ async fn test_paths_with_starting_slash(cx: &mut TestAppContext) {
assert_eq!(active_editor.read(cx).title(cx), "file1.txt");
});
}
#[gpui::test]
async fn test_clear_navigation_history(cx: &mut TestAppContext) {
let app_state = init_test(cx);
app_state
.fs
.as_fake()
.insert_tree(
path!("/src"),
json!({
"test": {
"first.rs": "// First file",
"second.rs": "// Second file",
"third.rs": "// Third file",
}
}),
)
.await;
let project = Project::test(app_state.fs.clone(), [path!("/src").as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
workspace.update_in(cx, |_workspace, window, cx| window.focused(cx));
// Open some files to generate navigation history
open_close_queried_buffer("fir", 1, "first.rs", &workspace, cx).await;
open_close_queried_buffer("sec", 1, "second.rs", &workspace, cx).await;
let history_before_clear =
open_close_queried_buffer("thi", 1, "third.rs", &workspace, cx).await;
assert_eq!(
history_before_clear.len(),
2,
"Should have history items before clearing"
);
// Verify that file finder shows history items
let picker = open_file_picker(&workspace, cx);
cx.simulate_input("fir");
picker.update(cx, |finder, _| {
let matches = collect_search_matches(finder);
assert!(
!matches.history.is_empty(),
"File finder should show history items before clearing"
);
});
workspace.update_in(cx, |_, window, cx| {
window.dispatch_action(menu::Cancel.boxed_clone(), cx);
});
// Verify navigation state before clear
workspace.update(cx, |workspace, cx| {
let pane = workspace.active_pane();
pane.read(cx).can_navigate_backward()
});
// Clear navigation history
cx.dispatch_action(workspace::ClearNavigationHistory);
// Verify that navigation is disabled immediately after clear
workspace.update(cx, |workspace, cx| {
let pane = workspace.active_pane();
assert!(
!pane.read(cx).can_navigate_backward(),
"Should not be able to navigate backward after clearing history"
);
assert!(
!pane.read(cx).can_navigate_forward(),
"Should not be able to navigate forward after clearing history"
);
});
// Verify that file finder no longer shows history items
let picker = open_file_picker(&workspace, cx);
cx.simulate_input("fir");
picker.update(cx, |finder, _| {
let matches = collect_search_matches(finder);
assert!(
matches.history.is_empty(),
"File finder should not show history items after clearing"
);
});
workspace.update_in(cx, |_, window, cx| {
window.dispatch_action(menu::Cancel.boxed_clone(), cx);
});
// Verify history is empty by opening a new file
// (this should not show any previous history)
let history_after_clear =
open_close_queried_buffer("sec", 1, "second.rs", &workspace, cx).await;
assert_eq!(
history_after_clear.len(),
0,
"Should have no history items after clearing"
);
}

View File

@@ -399,12 +399,7 @@ impl PickerDelegate for OpenPathDelegate {
}
})
.unwrap_or(false);
let current_dir_in_new_entries = new_entries
.iter()
.any(|entry| &entry.path.string == current_dir);
if should_prepend_with_current_dir && !current_dir_in_new_entries {
if should_prepend_with_current_dir {
new_entries.insert(
0,
CandidateInfo {

View File

@@ -372,9 +372,7 @@ unsafe extern "C" {
pub fn FSEventsGetCurrentEventId() -> u64;
}
// These tests are disabled by default because they seem to be unresolvably flaky.
// Feel free to bring them back to help test this code
#[cfg(false)]
#[cfg(test)]
mod tests {
use super::*;
use std::{fs, sync::mpsc, thread, time::Duration};

View File

@@ -371,7 +371,6 @@ impl GitPanel {
let is_sort_by_path = GitPanelSettings::get_global(cx).sort_by_path;
if is_sort_by_path != was_sort_by_path {
this.entries.clear();
this.bulk_staging.take();
this.update_visible_entries(window, cx);
}
was_sort_by_path = is_sort_by_path
@@ -410,10 +409,17 @@ impl GitPanel {
}
GitStoreEvent::RepositoryUpdated(
_,
RepositoryEvent::StatusesChanged
RepositoryEvent::StatusesChanged { full_scan: true }
| RepositoryEvent::BranchChanged
| RepositoryEvent::MergeHeadsChanged,
true,
) => {
this.schedule_update(window, cx);
}
GitStoreEvent::RepositoryUpdated(
_,
RepositoryEvent::StatusesChanged { full_scan: false },
true,
)
| GitStoreEvent::RepositoryAdded
| GitStoreEvent::RepositoryRemoved(_) => {
@@ -1218,18 +1224,14 @@ impl GitPanel {
let Some(active_repository) = self.active_repository.as_ref() else {
return;
};
let repo = active_repository.read(cx);
let (stage, repo_paths) = match entry {
GitListEntry::Status(status_entry) => {
let repo_paths = vec![status_entry.clone()];
let stage = if repo
let stage = if active_repository
.read(cx)
.pending_ops_for_path(&status_entry.repo_path)
.map(|ops| ops.staging() || ops.staged())
.or_else(|| {
repo.status_for_path(&status_entry.repo_path)
.map(|status| status.status.staging().has_staged())
})
.unwrap_or(status_entry.staging.has_staged())
.unwrap_or(status_entry.status.staging().has_staged())
{
if let Some(op) = self.bulk_staging.clone()
&& op.anchor == status_entry.repo_path
@@ -1245,12 +1247,13 @@ impl GitPanel {
}
GitListEntry::Header(section) => {
let goal_staged_state = !self.header_state(section.header).selected();
let repository = active_repository.read(cx);
let entries = self
.entries
.iter()
.filter_map(|entry| entry.status_entry())
.filter(|status_entry| {
section.contains(status_entry, repo)
section.contains(status_entry, repository)
&& status_entry.staging.as_bool() != Some(goal_staged_state)
})
.cloned()
@@ -1864,7 +1867,6 @@ impl GitPanel {
role: Role::User,
content: vec![content.into()],
cache: false,
reasoning_details: None,
}],
tools: Vec::new(),
tool_choice: None,
@@ -2682,8 +2684,14 @@ impl GitPanel {
self.single_staged_entry = single_staged_entry;
}
}
} else if repo.pending_ops_summary().item_summary.staging_count == 1 {
self.single_staged_entry = repo.pending_ops().find_map(|ops| {
} else if repo
.pending_ops_by_path
.summary()
.item_summary
.staging_count
== 1
{
self.single_staged_entry = repo.pending_ops_by_path.iter().find_map(|ops| {
if ops.staging() {
repo.status_for_path(&ops.repo_path)
.map(|status| GitStatusEntry {
@@ -3216,12 +3224,18 @@ impl GitPanel {
) -> Option<impl IntoElement> {
self.active_repository.as_ref()?;
let (text, action, stage, tooltip) =
if self.total_staged_count() == self.entry_count && self.entry_count > 0 {
("Unstage All", UnstageAll.boxed_clone(), false, "git reset")
} else {
("Stage All", StageAll.boxed_clone(), true, "git add --all")
};
let text;
let action;
let tooltip;
if self.total_staged_count() == self.entry_count && self.entry_count > 0 {
text = "Unstage All";
action = git::UnstageAll.boxed_clone();
tooltip = "git reset";
} else {
text = "Stage All";
action = git::StageAll.boxed_clone();
tooltip = "git add --all ."
}
let change_string = match self.entry_count {
0 => "No Changes".to_string(),
@@ -3259,15 +3273,11 @@ impl GitPanel {
&self.focus_handle,
))
.disabled(self.entry_count == 0)
.on_click({
let git_panel = cx.weak_entity();
move |_, _, cx| {
git_panel
.update(cx, |git_panel, cx| {
git_panel.change_all_files_stage(stage, cx);
})
.ok();
}
.on_click(move |_, _, cx| {
let action = action.boxed_clone();
cx.defer(move |cx| {
cx.dispatch_action(action.as_ref());
})
}),
),
),
@@ -3651,18 +3661,13 @@ impl GitPanel {
let ix = self.entry_by_path(&repo_path, cx)?;
let entry = self.entries.get(ix)?;
let is_staging_or_staged = repo
.pending_ops_for_path(&repo_path)
.map(|ops| ops.staging() || ops.staged())
.or_else(|| {
repo.status_for_path(&repo_path)
.and_then(|status| status.status.staging().as_bool())
})
.or_else(|| {
entry
.status_entry()
.and_then(|entry| entry.staging.as_bool())
});
let is_staging_or_staged = if let Some(status_entry) = entry.status_entry() {
repo.pending_ops_for_path(&repo_path)
.map(|ops| ops.staging() || ops.staged())
.unwrap_or(status_entry.staging.has_staged())
} else {
false
};
let checkbox = Checkbox::new("stage-file", is_staging_or_staged.into())
.disabled(!self.has_write_access(cx))
@@ -3975,9 +3980,9 @@ impl GitPanel {
.map(|ops| ops.staging() || ops.staged())
.or_else(|| {
repo.status_for_path(&entry.repo_path)
.and_then(|status| status.status.staging().as_bool())
.map(|status| status.status.staging().has_staged())
})
.or_else(|| entry.staging.as_bool());
.unwrap_or(entry.staging.has_staged());
let mut is_staged: ToggleState = is_staging_or_staged.into();
if self.show_placeholders && !self.has_staged_changes() && !entry.status.is_created() {
is_staged = ToggleState::Selected;
@@ -4097,9 +4102,7 @@ impl GitPanel {
}
})
.tooltip(move |_window, cx| {
// If is_staging_or_staged is None, this implies the file was partially staged, and so
// we allow the user to stage it in full by displaying `Stage` in the tooltip.
let action = if is_staging_or_staged.unwrap_or(false) {
let action = if is_staging_or_staged {
"Unstage"
} else {
"Stage"

View File

@@ -229,10 +229,6 @@ pub struct GenerativeContentBlob {
#[serde(rename_all = "camelCase")]
pub struct FunctionCallPart {
pub function_call: FunctionCall,
/// Thought signature returned by the model for function calls.
/// Only present on the first function call in parallel call scenarios.
#[serde(skip_serializing_if = "Option::is_none")]
pub thought_signature: Option<String>,
}
#[derive(Debug, Serialize, Deserialize)]
@@ -521,8 +517,6 @@ pub enum Model {
alias = "gemini-2.5-pro-preview-06-05"
)]
Gemini25Pro,
#[serde(rename = "gemini-3-pro-preview")]
Gemini3ProPreview,
#[serde(rename = "custom")]
Custom {
name: String,
@@ -549,7 +543,6 @@ impl Model {
Self::Gemini25FlashLitePreview => "gemini-2.5-flash-lite-preview",
Self::Gemini25Flash => "gemini-2.5-flash",
Self::Gemini25Pro => "gemini-2.5-pro",
Self::Gemini3ProPreview => "gemini-3-pro-preview",
Self::Custom { name, .. } => name,
}
}
@@ -563,7 +556,6 @@ impl Model {
Self::Gemini25FlashLitePreview => "gemini-2.5-flash-lite-preview-06-17",
Self::Gemini25Flash => "gemini-2.5-flash",
Self::Gemini25Pro => "gemini-2.5-pro",
Self::Gemini3ProPreview => "gemini-3-pro-preview",
Self::Custom { name, .. } => name,
}
}
@@ -578,7 +570,6 @@ impl Model {
Self::Gemini25FlashLitePreview => "Gemini 2.5 Flash-Lite Preview",
Self::Gemini25Flash => "Gemini 2.5 Flash",
Self::Gemini25Pro => "Gemini 2.5 Pro",
Self::Gemini3ProPreview => "Gemini 3 Pro",
Self::Custom {
name, display_name, ..
} => display_name.as_ref().unwrap_or(name),
@@ -595,7 +586,6 @@ impl Model {
Self::Gemini25FlashLitePreview => 1_000_000,
Self::Gemini25Flash => 1_048_576,
Self::Gemini25Pro => 1_048_576,
Self::Gemini3ProPreview => 1_048_576,
Self::Custom { max_tokens, .. } => *max_tokens,
}
}
@@ -610,7 +600,6 @@ impl Model {
Model::Gemini25FlashLitePreview => Some(64_000),
Model::Gemini25Flash => Some(65_536),
Model::Gemini25Pro => Some(65_536),
Model::Gemini3ProPreview => Some(65_536),
Model::Custom { .. } => None,
}
}
@@ -630,10 +619,7 @@ impl Model {
| Self::Gemini15Flash
| Self::Gemini20FlashLite
| Self::Gemini20Flash => GoogleModelMode::Default,
Self::Gemini25FlashLitePreview
| Self::Gemini25Flash
| Self::Gemini25Pro
| Self::Gemini3ProPreview => {
Self::Gemini25FlashLitePreview | Self::Gemini25Flash | Self::Gemini25Pro => {
GoogleModelMode::Thinking {
// By default these models are set to "auto", so we preserve that behavior
// but indicate they are capable of thinking mode
@@ -650,109 +636,3 @@ impl std::fmt::Display for Model {
write!(f, "{}", self.id())
}
}
#[cfg(test)]
mod tests {
use super::*;
use serde_json::json;
#[test]
fn test_function_call_part_with_signature_serializes_correctly() {
let part = FunctionCallPart {
function_call: FunctionCall {
name: "test_function".to_string(),
args: json!({"arg": "value"}),
},
thought_signature: Some("test_signature".to_string()),
};
let serialized = serde_json::to_value(&part).unwrap();
assert_eq!(serialized["functionCall"]["name"], "test_function");
assert_eq!(serialized["functionCall"]["args"]["arg"], "value");
assert_eq!(serialized["thoughtSignature"], "test_signature");
}
#[test]
fn test_function_call_part_without_signature_omits_field() {
let part = FunctionCallPart {
function_call: FunctionCall {
name: "test_function".to_string(),
args: json!({"arg": "value"}),
},
thought_signature: None,
};
let serialized = serde_json::to_value(&part).unwrap();
assert_eq!(serialized["functionCall"]["name"], "test_function");
assert_eq!(serialized["functionCall"]["args"]["arg"], "value");
// thoughtSignature field should not be present when None
assert!(serialized.get("thoughtSignature").is_none());
}
#[test]
fn test_function_call_part_deserializes_with_signature() {
let json = json!({
"functionCall": {
"name": "test_function",
"args": {"arg": "value"}
},
"thoughtSignature": "test_signature"
});
let part: FunctionCallPart = serde_json::from_value(json).unwrap();
assert_eq!(part.function_call.name, "test_function");
assert_eq!(part.thought_signature, Some("test_signature".to_string()));
}
#[test]
fn test_function_call_part_deserializes_without_signature() {
let json = json!({
"functionCall": {
"name": "test_function",
"args": {"arg": "value"}
}
});
let part: FunctionCallPart = serde_json::from_value(json).unwrap();
assert_eq!(part.function_call.name, "test_function");
assert_eq!(part.thought_signature, None);
}
#[test]
fn test_function_call_part_round_trip() {
let original = FunctionCallPart {
function_call: FunctionCall {
name: "test_function".to_string(),
args: json!({"arg": "value", "nested": {"key": "val"}}),
},
thought_signature: Some("round_trip_signature".to_string()),
};
let serialized = serde_json::to_value(&original).unwrap();
let deserialized: FunctionCallPart = serde_json::from_value(serialized).unwrap();
assert_eq!(deserialized.function_call.name, original.function_call.name);
assert_eq!(deserialized.function_call.args, original.function_call.args);
assert_eq!(deserialized.thought_signature, original.thought_signature);
}
#[test]
fn test_function_call_part_with_empty_signature_serializes() {
let part = FunctionCallPart {
function_call: FunctionCall {
name: "test_function".to_string(),
args: json!({"arg": "value"}),
},
thought_signature: Some("".to_string()),
};
let serialized = serde_json::to_value(&part).unwrap();
// Empty string should still be serialized (normalization happens at a higher level)
assert_eq!(serialized["thoughtSignature"], "");
}
}

View File

@@ -138,8 +138,6 @@ waker-fn = "1.2.0"
lyon = "1.0"
libc.workspace = true
pin-project = "1.1.10"
circular-buffer.workspace = true
spin = "0.10.0"
[target.'cfg(target_os = "macos")'.dependencies]
block = "0.1"

View File

@@ -178,7 +178,7 @@ impl TextInput {
if position.y > bounds.bottom() {
return self.content.len();
}
line.closest_index_for_x(position.x - bounds.left())
line.index_for_x(position.x - bounds.left())
}
fn select_to(&mut self, offset: usize, cx: &mut Context<Self>) {
@@ -380,7 +380,7 @@ impl EntityInputHandler for TextInput {
let last_layout = self.last_layout.as_ref()?;
assert_eq!(last_layout.text, self.content);
let utf8_index = last_layout.index_for_x(point.x - line_point.x)?;
let utf8_index = last_layout.index_for_x(point.x - line_point.x);
Some(self.offset_to_utf16(utf8_index))
}
}

View File

@@ -1410,7 +1410,7 @@ impl App {
let quit_on_empty = match cx.quit_mode {
QuitMode::Explicit => false,
QuitMode::LastWindowClosed => true,
QuitMode::Default => cfg!(not(target_os = "macos")),
QuitMode::Default => !cfg!(macos),
};
if quit_on_empty && cx.windows.is_empty() {

View File

@@ -1,4 +1,4 @@
use crate::{App, PlatformDispatcher, RunnableMeta, RunnableVariant};
use crate::{App, PlatformDispatcher};
use async_task::Runnable;
use futures::channel::mpsc;
use smol::prelude::*;
@@ -62,7 +62,7 @@ enum TaskState<T> {
Ready(Option<T>),
/// A task that is currently running.
Spawned(async_task::Task<T, RunnableMeta>),
Spawned(async_task::Task<T>),
}
impl<T> Task<T> {
@@ -146,7 +146,6 @@ impl BackgroundExecutor {
}
/// Enqueues the given future to be run to completion on a background thread.
#[track_caller]
pub fn spawn<R>(&self, future: impl Future<Output = R> + Send + 'static) -> Task<R>
where
R: Send + 'static,
@@ -156,7 +155,6 @@ impl BackgroundExecutor {
/// Enqueues the given future to be run to completion on a background thread.
/// The given label can be used to control the priority of the task in tests.
#[track_caller]
pub fn spawn_labeled<R>(
&self,
label: TaskLabel,
@@ -168,20 +166,14 @@ impl BackgroundExecutor {
self.spawn_internal::<R>(Box::pin(future), Some(label))
}
#[track_caller]
fn spawn_internal<R: Send + 'static>(
&self,
future: AnyFuture<R>,
label: Option<TaskLabel>,
) -> Task<R> {
let dispatcher = self.dispatcher.clone();
let location = core::panic::Location::caller();
let (runnable, task) = async_task::Builder::new()
.metadata(RunnableMeta { location })
.spawn(
move |_| future,
move |runnable| dispatcher.dispatch(RunnableVariant::Meta(runnable), label),
);
let (runnable, task) =
async_task::spawn(future, move |runnable| dispatcher.dispatch(runnable, label));
runnable.schedule();
Task(TaskState::Spawned(task))
}
@@ -289,11 +281,7 @@ impl BackgroundExecutor {
});
let mut cx = std::task::Context::from_waker(&waker);
let duration = Duration::from_secs(
option_env!("GPUI_TEST_TIMEOUT")
.and_then(|s| s.parse::<u64>().ok())
.unwrap_or(180),
);
let duration = Duration::from_secs(180);
let mut test_should_end_by = Instant::now() + duration;
loop {
@@ -327,8 +315,10 @@ impl BackgroundExecutor {
"parked with nothing left to run{waiting_message}{backtrace_message}",
)
}
dispatcher.push_unparker(unparker.clone());
parker.park_timeout(Duration::from_millis(1));
dispatcher.set_unparker(unparker.clone());
parker.park_timeout(
test_should_end_by.saturating_duration_since(Instant::now()),
);
if Instant::now() > test_should_end_by {
panic!("test timed out after {duration:?} with allow_parking")
}
@@ -380,13 +370,10 @@ impl BackgroundExecutor {
if duration.is_zero() {
return Task::ready(());
}
let location = core::panic::Location::caller();
let (runnable, task) = async_task::Builder::new()
.metadata(RunnableMeta { location })
.spawn(move |_| async move {}, {
let dispatcher = self.dispatcher.clone();
move |runnable| dispatcher.dispatch_after(duration, RunnableVariant::Meta(runnable))
});
let (runnable, task) = async_task::spawn(async move {}, {
let dispatcher = self.dispatcher.clone();
move |runnable| dispatcher.dispatch_after(duration, runnable)
});
runnable.schedule();
Task(TaskState::Spawned(task))
}
@@ -492,29 +479,24 @@ impl ForegroundExecutor {
}
/// Enqueues the given Task to run on the main thread at some point in the future.
#[track_caller]
pub fn spawn<R>(&self, future: impl Future<Output = R> + 'static) -> Task<R>
where
R: 'static,
{
let dispatcher = self.dispatcher.clone();
let location = core::panic::Location::caller();
#[track_caller]
fn inner<R: 'static>(
dispatcher: Arc<dyn PlatformDispatcher>,
future: AnyLocalFuture<R>,
location: &'static core::panic::Location<'static>,
) -> Task<R> {
let (runnable, task) = spawn_local_with_source_location(
future,
move |runnable| dispatcher.dispatch_on_main_thread(RunnableVariant::Meta(runnable)),
RunnableMeta { location },
);
let (runnable, task) = spawn_local_with_source_location(future, move |runnable| {
dispatcher.dispatch_on_main_thread(runnable)
});
runnable.schedule();
Task(TaskState::Spawned(task))
}
inner::<R>(dispatcher, Box::pin(future), location)
inner::<R>(dispatcher, Box::pin(future))
}
}
@@ -523,16 +505,14 @@ impl ForegroundExecutor {
/// Copy-modified from:
/// <https://github.com/smol-rs/async-task/blob/ca9dbe1db9c422fd765847fa91306e30a6bb58a9/src/runnable.rs#L405>
#[track_caller]
fn spawn_local_with_source_location<Fut, S, M>(
fn spawn_local_with_source_location<Fut, S>(
future: Fut,
schedule: S,
metadata: M,
) -> (Runnable<M>, async_task::Task<Fut::Output, M>)
) -> (Runnable<()>, async_task::Task<Fut::Output, ()>)
where
Fut: Future + 'static,
Fut::Output: 'static,
S: async_task::Schedule<M> + Send + Sync + 'static,
M: 'static,
S: async_task::Schedule<()> + Send + Sync + 'static,
{
#[inline]
fn thread_id() -> ThreadId {
@@ -580,11 +560,7 @@ where
location: Location::caller(),
};
unsafe {
async_task::Builder::new()
.metadata(metadata)
.spawn_unchecked(move |_| future, schedule)
}
unsafe { async_task::spawn_unchecked(future, schedule) }
}
/// Scope manages a set of tasks that are enqueued and waited on together. See [`BackgroundExecutor::scoped`].
@@ -614,7 +590,6 @@ impl<'a> Scope<'a> {
}
/// Spawn a future into this scope.
#[track_caller]
pub fn spawn<F>(&mut self, f: F)
where
F: Future<Output = ()> + Send + 'a,

View File

@@ -30,7 +30,6 @@ mod keymap;
mod path_builder;
mod platform;
pub mod prelude;
mod profiler;
mod scene;
mod shared_string;
mod shared_uri;
@@ -88,7 +87,6 @@ use key_dispatch::*;
pub use keymap::*;
pub use path_builder::*;
pub use platform::*;
pub use profiler::*;
pub use refineable::*;
pub use scene::*;
pub use shared_string::*;

View File

@@ -40,8 +40,8 @@ use crate::{
DEFAULT_WINDOW_SIZE, DevicePixels, DispatchEventResult, Font, FontId, FontMetrics, FontRun,
ForegroundExecutor, GlyphId, GpuSpecs, ImageSource, Keymap, LineLayout, Pixels, PlatformInput,
Point, RenderGlyphParams, RenderImage, RenderImageParams, RenderSvgParams, Scene, ShapedGlyph,
ShapedRun, SharedString, Size, SvgRenderer, SystemWindowTab, Task, TaskLabel, TaskTiming,
ThreadTaskTimings, Window, WindowControlArea, hash, point, px, size,
ShapedRun, SharedString, Size, SvgRenderer, SystemWindowTab, Task, TaskLabel, Window,
WindowControlArea, hash, point, px, size,
};
use anyhow::Result;
use async_task::Runnable;
@@ -559,32 +559,14 @@ pub(crate) trait PlatformWindow: HasWindowHandle + HasDisplayHandle {
}
}
/// This type is public so that our test macro can generate and use it, but it should not
/// be considered part of our public API.
#[doc(hidden)]
#[derive(Debug)]
pub struct RunnableMeta {
/// Location of the runnable
pub location: &'static core::panic::Location<'static>,
}
#[doc(hidden)]
pub enum RunnableVariant {
Meta(Runnable<RunnableMeta>),
Compat(Runnable),
}
/// This type is public so that our test macro can generate and use it, but it should not
/// be considered part of our public API.
#[doc(hidden)]
pub trait PlatformDispatcher: Send + Sync {
fn get_all_timings(&self) -> Vec<ThreadTaskTimings>;
fn get_current_thread_timings(&self) -> Vec<TaskTiming>;
fn is_main_thread(&self) -> bool;
fn dispatch(&self, runnable: RunnableVariant, label: Option<TaskLabel>);
fn dispatch_on_main_thread(&self, runnable: RunnableVariant);
fn dispatch_after(&self, duration: Duration, runnable: RunnableVariant);
fn dispatch(&self, runnable: Runnable, label: Option<TaskLabel>);
fn dispatch_on_main_thread(&self, runnable: Runnable);
fn dispatch_after(&self, duration: Duration, runnable: Runnable);
fn now(&self) -> Instant {
Instant::now()
}

View File

@@ -1,7 +1,5 @@
use crate::{
GLOBAL_THREAD_TIMINGS, PlatformDispatcher, RunnableVariant, THREAD_TIMINGS, TaskLabel,
TaskTiming, ThreadTaskTimings,
};
use crate::{PlatformDispatcher, TaskLabel};
use async_task::Runnable;
use calloop::{
EventLoop,
channel::{self, Sender},
@@ -15,20 +13,20 @@ use util::ResultExt;
struct TimerAfter {
duration: Duration,
runnable: RunnableVariant,
runnable: Runnable,
}
pub(crate) struct LinuxDispatcher {
main_sender: Sender<RunnableVariant>,
main_sender: Sender<Runnable>,
timer_sender: Sender<TimerAfter>,
background_sender: flume::Sender<RunnableVariant>,
background_sender: flume::Sender<Runnable>,
_background_threads: Vec<thread::JoinHandle<()>>,
main_thread_id: thread::ThreadId,
}
impl LinuxDispatcher {
pub fn new(main_sender: Sender<RunnableVariant>) -> Self {
let (background_sender, background_receiver) = flume::unbounded::<RunnableVariant>();
pub fn new(main_sender: Sender<Runnable>) -> Self {
let (background_sender, background_receiver) = flume::unbounded::<Runnable>();
let thread_count = std::thread::available_parallelism()
.map(|i| i.get())
.unwrap_or(1);
@@ -42,36 +40,7 @@ impl LinuxDispatcher {
for runnable in receiver {
let start = Instant::now();
let mut location = match runnable {
RunnableVariant::Meta(runnable) => {
let location = runnable.metadata().location;
let timing = TaskTiming {
location,
start,
end: None,
};
Self::add_task_timing(timing);
runnable.run();
timing
}
RunnableVariant::Compat(runnable) => {
let location = core::panic::Location::caller();
let timing = TaskTiming {
location,
start,
end: None,
};
Self::add_task_timing(timing);
runnable.run();
timing
}
};
let end = Instant::now();
location.end = Some(end);
Self::add_task_timing(location);
runnable.run();
log::trace!(
"background thread {}: ran runnable. took: {:?}",
@@ -103,36 +72,7 @@ impl LinuxDispatcher {
calloop::timer::Timer::from_duration(timer.duration),
move |_, _, _| {
if let Some(runnable) = runnable.take() {
let start = Instant::now();
let mut timing = match runnable {
RunnableVariant::Meta(runnable) => {
let location = runnable.metadata().location;
let timing = TaskTiming {
location,
start,
end: None,
};
Self::add_task_timing(timing);
runnable.run();
timing
}
RunnableVariant::Compat(runnable) => {
let timing = TaskTiming {
location: core::panic::Location::caller(),
start,
end: None,
};
Self::add_task_timing(timing);
runnable.run();
timing
}
};
let end = Instant::now();
timing.end = Some(end);
Self::add_task_timing(timing);
runnable.run();
}
TimeoutAction::Drop
},
@@ -156,53 +96,18 @@ impl LinuxDispatcher {
main_thread_id: thread::current().id(),
}
}
pub(crate) fn add_task_timing(timing: TaskTiming) {
THREAD_TIMINGS.with(|timings| {
let mut timings = timings.lock();
let timings = &mut timings.timings;
if let Some(last_timing) = timings.iter_mut().rev().next() {
if last_timing.location == timing.location {
last_timing.end = timing.end;
return;
}
}
timings.push_back(timing);
});
}
}
impl PlatformDispatcher for LinuxDispatcher {
fn get_all_timings(&self) -> Vec<crate::ThreadTaskTimings> {
let global_timings = GLOBAL_THREAD_TIMINGS.lock();
ThreadTaskTimings::convert(&global_timings)
}
fn get_current_thread_timings(&self) -> Vec<crate::TaskTiming> {
THREAD_TIMINGS.with(|timings| {
let timings = timings.lock();
let timings = &timings.timings;
let mut vec = Vec::with_capacity(timings.len());
let (s1, s2) = timings.as_slices();
vec.extend_from_slice(s1);
vec.extend_from_slice(s2);
vec
})
}
fn is_main_thread(&self) -> bool {
thread::current().id() == self.main_thread_id
}
fn dispatch(&self, runnable: RunnableVariant, _: Option<TaskLabel>) {
fn dispatch(&self, runnable: Runnable, _: Option<TaskLabel>) {
self.background_sender.send(runnable).unwrap();
}
fn dispatch_on_main_thread(&self, runnable: RunnableVariant) {
fn dispatch_on_main_thread(&self, runnable: Runnable) {
self.main_sender.send(runnable).unwrap_or_else(|runnable| {
// NOTE: Runnable may wrap a Future that is !Send.
//
@@ -216,7 +121,7 @@ impl PlatformDispatcher for LinuxDispatcher {
});
}
fn dispatch_after(&self, duration: Duration, runnable: RunnableVariant) {
fn dispatch_after(&self, duration: Duration, runnable: Runnable) {
self.timer_sender
.send(TimerAfter { duration, runnable })
.ok();

View File

@@ -31,10 +31,7 @@ impl HeadlessClient {
handle
.insert_source(main_receiver, |event, _, _: &mut HeadlessClient| {
if let calloop::channel::Event::Msg(runnable) = event {
match runnable {
crate::RunnableVariant::Meta(runnable) => runnable.run(),
crate::RunnableVariant::Compat(runnable) => runnable.run(),
};
runnable.run();
}
})
.ok();

View File

@@ -15,6 +15,7 @@ use std::{
};
use anyhow::{Context as _, anyhow};
use async_task::Runnable;
use calloop::{LoopSignal, channel::Channel};
use futures::channel::oneshot;
use util::ResultExt as _;
@@ -25,8 +26,7 @@ use crate::{
Action, AnyWindowHandle, BackgroundExecutor, ClipboardItem, CursorStyle, DisplayId,
ForegroundExecutor, Keymap, LinuxDispatcher, Menu, MenuItem, OwnedMenu, PathPromptOptions,
Pixels, Platform, PlatformDisplay, PlatformKeyboardLayout, PlatformKeyboardMapper,
PlatformTextSystem, PlatformWindow, Point, Result, RunnableVariant, Task, WindowAppearance,
WindowParams, px,
PlatformTextSystem, PlatformWindow, Point, Result, Task, WindowAppearance, WindowParams, px,
};
#[cfg(any(feature = "wayland", feature = "x11"))]
@@ -105,8 +105,8 @@ pub(crate) struct LinuxCommon {
}
impl LinuxCommon {
pub fn new(signal: LoopSignal) -> (Self, Channel<RunnableVariant>) {
let (main_sender, main_receiver) = calloop::channel::channel::<RunnableVariant>();
pub fn new(signal: LoopSignal) -> (Self, Channel<Runnable>) {
let (main_sender, main_receiver) = calloop::channel::channel::<Runnable>();
#[cfg(any(feature = "wayland", feature = "x11"))]
let text_system = Arc::new(crate::CosmicTextSystem::new());

View File

@@ -71,6 +71,7 @@ use super::{
window::{ImeInput, WaylandWindowStatePtr},
};
use crate::platform::{PlatformWindow, blade::BladeContext};
use crate::{
AnyWindowHandle, Bounds, Capslock, CursorStyle, DOUBLE_CLICK_INTERVAL, DevicePixels, DisplayId,
FileDropEvent, ForegroundExecutor, KeyDownEvent, KeyUpEvent, Keystroke, LinuxCommon,
@@ -79,10 +80,6 @@ use crate::{
PlatformInput, PlatformKeyboardLayout, Point, SCROLL_LINES, ScrollDelta, ScrollWheelEvent,
Size, TouchPhase, WindowParams, point, px, size,
};
use crate::{
LinuxDispatcher, RunnableVariant, TaskTiming,
platform::{PlatformWindow, blade::BladeContext},
};
use crate::{
SharedString,
platform::linux::{
@@ -494,37 +491,7 @@ impl WaylandClient {
move |event, _, _: &mut WaylandClientStatePtr| {
if let calloop::channel::Event::Msg(runnable) = event {
handle.insert_idle(|_| {
let start = Instant::now();
let mut timing = match runnable {
RunnableVariant::Meta(runnable) => {
let location = runnable.metadata().location;
let timing = TaskTiming {
location,
start,
end: None,
};
LinuxDispatcher::add_task_timing(timing);
runnable.run();
timing
}
RunnableVariant::Compat(runnable) => {
let location = core::panic::Location::caller();
let timing = TaskTiming {
location,
start,
end: None,
};
LinuxDispatcher::add_task_timing(timing);
runnable.run();
timing
}
};
let end = Instant::now();
timing.end = Some(end);
LinuxDispatcher::add_task_timing(timing);
runnable.run();
});
}
}

View File

@@ -1,4 +1,4 @@
use crate::{Capslock, LinuxDispatcher, RunnableVariant, TaskTiming, xcb_flush};
use crate::{Capslock, xcb_flush};
use anyhow::{Context as _, anyhow};
use ashpd::WindowIdentifier;
use calloop::{
@@ -313,37 +313,7 @@ impl X11Client {
// events have higher priority and runnables are only worked off after the event
// callbacks.
handle.insert_idle(|_| {
let start = Instant::now();
let mut timing = match runnable {
RunnableVariant::Meta(runnable) => {
let location = runnable.metadata().location;
let timing = TaskTiming {
location,
start,
end: None,
};
LinuxDispatcher::add_task_timing(timing);
runnable.run();
timing
}
RunnableVariant::Compat(runnable) => {
let location = core::panic::Location::caller();
let timing = TaskTiming {
location,
start,
end: None,
};
LinuxDispatcher::add_task_timing(timing);
runnable.run();
timing
}
};
let end = Instant::now();
timing.end = Some(end);
LinuxDispatcher::add_task_timing(timing);
runnable.run();
});
}
}

View File

@@ -2,11 +2,7 @@
#![allow(non_camel_case_types)]
#![allow(non_snake_case)]
use crate::{
GLOBAL_THREAD_TIMINGS, PlatformDispatcher, RunnableMeta, RunnableVariant, THREAD_TIMINGS,
TaskLabel, TaskTiming, ThreadTaskTimings,
};
use crate::{PlatformDispatcher, TaskLabel};
use async_task::Runnable;
use objc::{
class, msg_send,
@@ -16,7 +12,7 @@ use objc::{
use std::{
ffi::c_void,
ptr::{NonNull, addr_of},
time::{Duration, Instant},
time::Duration,
};
/// All items in the generated file are marked as pub, so we're gonna wrap it in a separate mod to prevent
@@ -33,155 +29,47 @@ pub(crate) fn dispatch_get_main_queue() -> dispatch_queue_t {
pub(crate) struct MacDispatcher;
impl PlatformDispatcher for MacDispatcher {
fn get_all_timings(&self) -> Vec<ThreadTaskTimings> {
let global_timings = GLOBAL_THREAD_TIMINGS.lock();
ThreadTaskTimings::convert(&global_timings)
}
fn get_current_thread_timings(&self) -> Vec<TaskTiming> {
THREAD_TIMINGS.with(|timings| {
let timings = &timings.lock().timings;
let mut vec = Vec::with_capacity(timings.len());
let (s1, s2) = timings.as_slices();
vec.extend_from_slice(s1);
vec.extend_from_slice(s2);
vec
})
}
fn is_main_thread(&self) -> bool {
let is_main_thread: BOOL = unsafe { msg_send![class!(NSThread), isMainThread] };
is_main_thread == YES
}
fn dispatch(&self, runnable: RunnableVariant, _: Option<TaskLabel>) {
let (context, trampoline) = match runnable {
RunnableVariant::Meta(runnable) => (
runnable.into_raw().as_ptr() as *mut c_void,
Some(trampoline as unsafe extern "C" fn(*mut c_void)),
),
RunnableVariant::Compat(runnable) => (
runnable.into_raw().as_ptr() as *mut c_void,
Some(trampoline_compat as unsafe extern "C" fn(*mut c_void)),
),
};
fn dispatch(&self, runnable: Runnable, _: Option<TaskLabel>) {
unsafe {
dispatch_async_f(
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH.try_into().unwrap(), 0),
context,
trampoline,
runnable.into_raw().as_ptr() as *mut c_void,
Some(trampoline),
);
}
}
fn dispatch_on_main_thread(&self, runnable: RunnableVariant) {
let (context, trampoline) = match runnable {
RunnableVariant::Meta(runnable) => (
runnable.into_raw().as_ptr() as *mut c_void,
Some(trampoline as unsafe extern "C" fn(*mut c_void)),
),
RunnableVariant::Compat(runnable) => (
runnable.into_raw().as_ptr() as *mut c_void,
Some(trampoline_compat as unsafe extern "C" fn(*mut c_void)),
),
};
fn dispatch_on_main_thread(&self, runnable: Runnable) {
unsafe {
dispatch_async_f(dispatch_get_main_queue(), context, trampoline);
dispatch_async_f(
dispatch_get_main_queue(),
runnable.into_raw().as_ptr() as *mut c_void,
Some(trampoline),
);
}
}
fn dispatch_after(&self, duration: Duration, runnable: RunnableVariant) {
let (context, trampoline) = match runnable {
RunnableVariant::Meta(runnable) => (
runnable.into_raw().as_ptr() as *mut c_void,
Some(trampoline as unsafe extern "C" fn(*mut c_void)),
),
RunnableVariant::Compat(runnable) => (
runnable.into_raw().as_ptr() as *mut c_void,
Some(trampoline_compat as unsafe extern "C" fn(*mut c_void)),
),
};
fn dispatch_after(&self, duration: Duration, runnable: Runnable) {
unsafe {
let queue =
dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH.try_into().unwrap(), 0);
let when = dispatch_time(DISPATCH_TIME_NOW as u64, duration.as_nanos() as i64);
dispatch_after_f(when, queue, context, trampoline);
dispatch_after_f(
when,
queue,
runnable.into_raw().as_ptr() as *mut c_void,
Some(trampoline),
);
}
}
}
extern "C" fn trampoline(runnable: *mut c_void) {
let task =
unsafe { Runnable::<RunnableMeta>::from_raw(NonNull::new_unchecked(runnable as *mut ())) };
let location = task.metadata().location;
let start = Instant::now();
let timing = TaskTiming {
location,
start,
end: None,
};
THREAD_TIMINGS.with(|timings| {
let mut timings = timings.lock();
let timings = &mut timings.timings;
if let Some(last_timing) = timings.iter_mut().rev().next() {
if last_timing.location == timing.location {
return;
}
}
timings.push_back(timing);
});
task.run();
let end = Instant::now();
THREAD_TIMINGS.with(|timings| {
let mut timings = timings.lock();
let timings = &mut timings.timings;
let Some(last_timing) = timings.iter_mut().rev().next() else {
return;
};
last_timing.end = Some(end);
});
}
extern "C" fn trampoline_compat(runnable: *mut c_void) {
let task = unsafe { Runnable::<()>::from_raw(NonNull::new_unchecked(runnable as *mut ())) };
let location = core::panic::Location::caller();
let start = Instant::now();
let timing = TaskTiming {
location,
start,
end: None,
};
THREAD_TIMINGS.with(|timings| {
let mut timings = timings.lock();
let timings = &mut timings.timings;
if let Some(last_timing) = timings.iter_mut().rev().next() {
if last_timing.location == timing.location {
return;
}
}
timings.push_back(timing);
});
task.run();
let end = Instant::now();
THREAD_TIMINGS.with(|timings| {
let mut timings = timings.lock();
let timings = &mut timings.timings;
let Some(last_timing) = timings.iter_mut().rev().next() else {
return;
};
last_timing.end = Some(end);
});
}

View File

@@ -651,12 +651,9 @@ impl Platform for MacPlatform {
fn open_url(&self, url: &str) {
unsafe {
let ns_url = NSURL::alloc(nil).initWithString_(ns_string(url));
if ns_url.is_null() {
log::error!("Failed to create NSURL from string: {}", url);
return;
}
let url = ns_url.autorelease();
let url = NSURL::alloc(nil)
.initWithString_(ns_string(url))
.autorelease();
let workspace: id = msg_send![class!(NSWorkspace), sharedWorkspace];
msg_send![workspace, openURL: url]
}
@@ -1132,7 +1129,32 @@ impl Platform for MacPlatform {
}
}
// If it wasn't a string, try the various supported image types.
// Next, check for URL flavors (including file URLs). Some tools only provide a URL
// with no plain text entry.
{
// Try the modern UTType identifiers first.
let file_url_type: id = ns_string("public.file-url");
let url_type: id = ns_string("public.url");
let url_data = if msg_send![types, containsObject: file_url_type] {
pasteboard.dataForType(file_url_type)
} else if msg_send![types, containsObject: url_type] {
pasteboard.dataForType(url_type)
} else {
nil
};
if url_data != nil && !url_data.bytes().is_null() {
let bytes = slice::from_raw_parts(
url_data.bytes() as *mut u8,
url_data.length() as usize,
);
return Some(self.read_string_from_clipboard(&state, bytes));
}
}
// If it wasn't a string or URL, try the various supported image types.
for format in ImageFormat::iter() {
if let Some(item) = try_clipboard_image(pasteboard, format) {
return Some(item);
@@ -1140,7 +1162,7 @@ impl Platform for MacPlatform {
}
}
// If it wasn't a string or a supported image type, give up.
// If it wasn't a string, URL, or a supported image type, give up.
None
}
@@ -1715,6 +1737,40 @@ mod tests {
);
}
#[test]
fn test_file_url_reads_as_url_string() {
let platform = build_platform();
// Create a file URL for an arbitrary test path and write it to the pasteboard.
// This path does not need to exist; we only validate URL→path conversion.
let mock_path = "/tmp/zed-clipboard-file-url-test";
unsafe {
// Build an NSURL from the file path
let url: id = msg_send![class!(NSURL), fileURLWithPath: ns_string(mock_path)];
let abs: id = msg_send![url, absoluteString];
// Encode the URL string as UTF-8 bytes
let len: usize = msg_send![abs, lengthOfBytesUsingEncoding: NSUTF8StringEncoding];
let bytes_ptr = abs.UTF8String() as *const u8;
let data = NSData::dataWithBytes_length_(nil, bytes_ptr as *const c_void, len as u64);
// Write as public.file-url to the unique pasteboard
let file_url_type: id = ns_string("public.file-url");
platform
.0
.lock()
.pasteboard
.setData_forType(data, file_url_type);
}
// Ensure the clipboard read returns the URL string, not a converted path
let expected_url = format!("file://{}", mock_path);
assert_eq!(
platform.read_from_clipboard(),
Some(ClipboardItem::new_string(expected_url))
);
}
fn build_platform() -> MacPlatform {
let platform = MacPlatform::new(false);
platform.0.lock().pasteboard = unsafe { NSPasteboard::pasteboardWithUniqueName(nil) };

View File

@@ -1,4 +1,5 @@
use crate::{PlatformDispatcher, RunnableVariant, TaskLabel};
use crate::{PlatformDispatcher, TaskLabel};
use async_task::Runnable;
use backtrace::Backtrace;
use collections::{HashMap, HashSet, VecDeque};
use parking::Unparker;
@@ -25,10 +26,10 @@ pub struct TestDispatcher {
struct TestDispatcherState {
random: StdRng,
foreground: HashMap<TestDispatcherId, VecDeque<RunnableVariant>>,
background: Vec<RunnableVariant>,
deprioritized_background: Vec<RunnableVariant>,
delayed: Vec<(Duration, RunnableVariant)>,
foreground: HashMap<TestDispatcherId, VecDeque<Runnable>>,
background: Vec<Runnable>,
deprioritized_background: Vec<Runnable>,
delayed: Vec<(Duration, Runnable)>,
start_time: Instant,
time: Duration,
is_main_thread: bool,
@@ -38,7 +39,7 @@ struct TestDispatcherState {
waiting_backtrace: Option<Backtrace>,
deprioritized_task_labels: HashSet<TaskLabel>,
block_on_ticks: RangeInclusive<usize>,
unparkers: Vec<Unparker>,
last_parked: Option<Unparker>,
}
impl TestDispatcher {
@@ -58,7 +59,7 @@ impl TestDispatcher {
waiting_backtrace: None,
deprioritized_task_labels: Default::default(),
block_on_ticks: 0..=1000,
unparkers: Default::default(),
last_parked: None,
};
TestDispatcher {
@@ -174,13 +175,7 @@ impl TestDispatcher {
let was_main_thread = state.is_main_thread;
state.is_main_thread = main_thread;
drop(state);
// todo(localcc): add timings to tests
match runnable {
RunnableVariant::Meta(runnable) => runnable.run(),
RunnableVariant::Compat(runnable) => runnable.run(),
};
runnable.run();
self.state.lock().is_main_thread = was_main_thread;
true
@@ -245,14 +240,20 @@ impl TestDispatcher {
let block_on_ticks = lock.block_on_ticks.clone();
lock.random.random_range(block_on_ticks)
}
pub fn unpark_all(&self) {
self.state.lock().unparkers.retain(|parker| parker.unpark());
pub fn unpark_last(&self) {
self.state
.lock()
.last_parked
.take()
.as_ref()
.map(Unparker::unpark);
}
pub fn push_unparker(&self, unparker: Unparker) {
let mut state = self.state.lock();
state.unparkers.push(unparker);
pub fn set_unparker(&self, unparker: Unparker) {
let last = { self.state.lock().last_parked.replace(unparker) };
if let Some(last) = last {
last.unpark();
}
}
}
@@ -267,14 +268,6 @@ impl Clone for TestDispatcher {
}
impl PlatformDispatcher for TestDispatcher {
fn get_all_timings(&self) -> Vec<crate::ThreadTaskTimings> {
Vec::new()
}
fn get_current_thread_timings(&self) -> Vec<crate::TaskTiming> {
Vec::new()
}
fn is_main_thread(&self) -> bool {
self.state.lock().is_main_thread
}
@@ -284,7 +277,7 @@ impl PlatformDispatcher for TestDispatcher {
state.start_time + state.time
}
fn dispatch(&self, runnable: RunnableVariant, label: Option<TaskLabel>) {
fn dispatch(&self, runnable: Runnable, label: Option<TaskLabel>) {
{
let mut state = self.state.lock();
if label.is_some_and(|label| state.deprioritized_task_labels.contains(&label)) {
@@ -293,20 +286,20 @@ impl PlatformDispatcher for TestDispatcher {
state.background.push(runnable);
}
}
self.unpark_all();
self.unpark_last();
}
fn dispatch_on_main_thread(&self, runnable: RunnableVariant) {
fn dispatch_on_main_thread(&self, runnable: Runnable) {
self.state
.lock()
.foreground
.entry(self.id)
.or_default()
.push_back(runnable);
self.unpark_all();
self.unpark_last();
}
fn dispatch_after(&self, duration: std::time::Duration, runnable: RunnableVariant) {
fn dispatch_after(&self, duration: std::time::Duration, runnable: Runnable) {
let mut state = self.state.lock();
let next_time = state.time + duration;
let ix = match state.delayed.binary_search_by_key(&next_time, |e| e.0) {

View File

@@ -234,14 +234,11 @@ impl DirectXAtlasState {
}
fn texture(&self, id: AtlasTextureId) -> &DirectXAtlasTexture {
match id.kind {
crate::AtlasTextureKind::Monochrome => &self.monochrome_textures[id.index as usize]
.as_ref()
.unwrap(),
crate::AtlasTextureKind::Polychrome => &self.polychrome_textures[id.index as usize]
.as_ref()
.unwrap(),
}
let textures = match id.kind {
crate::AtlasTextureKind::Monochrome => &self.monochrome_textures,
crate::AtlasTextureKind::Polychrome => &self.polychrome_textures,
};
textures[id.index as usize].as_ref().unwrap()
}
}

View File

@@ -48,12 +48,6 @@ pub(crate) struct DirectXRenderer {
width: u32,
height: u32,
/// Whether we want to skip drwaing due to device lost events.
///
/// In that case we want to discard the first frame that we draw as we got reset in the middle of a frame
/// meaning we lost all the allocated gpu textures and scene resources.
skip_draws: bool,
}
/// Direct3D objects
@@ -173,7 +167,6 @@ impl DirectXRenderer {
font_info: Self::get_font_info(),
width: 1,
height: 1,
skip_draws: false,
})
}
@@ -199,13 +192,8 @@ impl DirectXRenderer {
}],
)?;
unsafe {
device_context.ClearRenderTargetView(
resources
.render_target_view
.as_ref()
.context("missing render target view")?,
&[0.0; 4],
);
device_context
.ClearRenderTargetView(resources.render_target_view.as_ref().unwrap(), &[0.0; 4]);
device_context
.OMSetRenderTargets(Some(slice::from_ref(&resources.render_target_view)), None);
device_context.RSSetViewports(Some(slice::from_ref(&resources.viewport)));
@@ -295,16 +283,10 @@ impl DirectXRenderer {
self.globals = globals;
self.pipelines = pipelines;
self.direct_composition = direct_composition;
self.skip_draws = true;
Ok(())
}
pub(crate) fn draw(&mut self, scene: &Scene) -> Result<()> {
if self.skip_draws {
// skip drawing this frame, we just recovered from a device lost event
// and so likely do not have the textures anymore that are required for drawing
return Ok(());
}
self.pre_draw()?;
for batch in scene.batches() {
match batch {
@@ -324,18 +306,14 @@ impl DirectXRenderer {
sprites,
} => self.draw_polychrome_sprites(texture_id, sprites),
PrimitiveBatch::Surfaces(surfaces) => self.draw_surfaces(surfaces),
}
.context(format!(
"scene too large:\
{} paths, {} shadows, {} quads, {} underlines, {} mono, {} poly, {} surfaces",
scene.paths.len(),
scene.shadows.len(),
scene.quads.len(),
scene.underlines.len(),
scene.monochrome_sprites.len(),
scene.polychrome_sprites.len(),
scene.surfaces.len(),
))?;
}.context(format!("scene too large: {} paths, {} shadows, {} quads, {} underlines, {} mono, {} poly, {} surfaces",
scene.paths.len(),
scene.shadows.len(),
scene.quads.len(),
scene.underlines.len(),
scene.monochrome_sprites.len(),
scene.polychrome_sprites.len(),
scene.surfaces.len(),))?;
}
self.present()
}
@@ -374,7 +352,6 @@ impl DirectXRenderer {
}
resources.recreate_resources(devices, width, height)?;
unsafe {
devices
.device_context
@@ -670,10 +647,6 @@ impl DirectXRenderer {
}
})
}
pub(crate) fn mark_drawable(&mut self) {
self.skip_draws = false;
}
}
impl DirectXResources {

View File

@@ -1,9 +1,10 @@
use std::{
sync::atomic::{AtomicBool, Ordering},
thread::{ThreadId, current},
time::{Duration, Instant},
time::Duration,
};
use async_task::Runnable;
use flume::Sender;
use util::ResultExt;
use windows::{
@@ -17,13 +18,12 @@ use windows::{
};
use crate::{
GLOBAL_THREAD_TIMINGS, HWND, PlatformDispatcher, RunnableVariant, SafeHwnd, THREAD_TIMINGS,
TaskLabel, TaskTiming, ThreadTaskTimings, WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD,
HWND, PlatformDispatcher, SafeHwnd, TaskLabel, WM_GPUI_TASK_DISPATCHED_ON_MAIN_THREAD,
};
pub(crate) struct WindowsDispatcher {
pub(crate) wake_posted: AtomicBool,
main_sender: Sender<RunnableVariant>,
main_sender: Sender<Runnable>,
main_thread_id: ThreadId,
platform_window_handle: SafeHwnd,
validation_number: usize,
@@ -31,7 +31,7 @@ pub(crate) struct WindowsDispatcher {
impl WindowsDispatcher {
pub(crate) fn new(
main_sender: Sender<RunnableVariant>,
main_sender: Sender<Runnable>,
platform_window_handle: HWND,
validation_number: usize,
) -> Self {
@@ -47,115 +47,42 @@ impl WindowsDispatcher {
}
}
fn dispatch_on_threadpool(&self, runnable: RunnableVariant) {
fn dispatch_on_threadpool(&self, runnable: Runnable) {
let handler = {
let mut task_wrapper = Some(runnable);
WorkItemHandler::new(move |_| {
Self::execute_runnable(task_wrapper.take().unwrap());
task_wrapper.take().unwrap().run();
Ok(())
})
};
ThreadPool::RunWithPriorityAsync(&handler, WorkItemPriority::High).log_err();
}
fn dispatch_on_threadpool_after(&self, runnable: RunnableVariant, duration: Duration) {
fn dispatch_on_threadpool_after(&self, runnable: Runnable, duration: Duration) {
let handler = {
let mut task_wrapper = Some(runnable);
TimerElapsedHandler::new(move |_| {
Self::execute_runnable(task_wrapper.take().unwrap());
task_wrapper.take().unwrap().run();
Ok(())
})
};
ThreadPoolTimer::CreateTimer(&handler, duration.into()).log_err();
}
#[inline(always)]
pub(crate) fn execute_runnable(runnable: RunnableVariant) {
let start = Instant::now();
let mut timing = match runnable {
RunnableVariant::Meta(runnable) => {
let location = runnable.metadata().location;
let timing = TaskTiming {
location,
start,
end: None,
};
Self::add_task_timing(timing);
runnable.run();
timing
}
RunnableVariant::Compat(runnable) => {
let timing = TaskTiming {
location: core::panic::Location::caller(),
start,
end: None,
};
Self::add_task_timing(timing);
runnable.run();
timing
}
};
let end = Instant::now();
timing.end = Some(end);
Self::add_task_timing(timing);
}
pub(crate) fn add_task_timing(timing: TaskTiming) {
THREAD_TIMINGS.with(|timings| {
let mut timings = timings.lock();
let timings = &mut timings.timings;
if let Some(last_timing) = timings.iter_mut().rev().next() {
if last_timing.location == timing.location {
last_timing.end = timing.end;
return;
}
}
timings.push_back(timing);
});
}
}
impl PlatformDispatcher for WindowsDispatcher {
fn get_all_timings(&self) -> Vec<ThreadTaskTimings> {
let global_thread_timings = GLOBAL_THREAD_TIMINGS.lock();
ThreadTaskTimings::convert(&global_thread_timings)
}
fn get_current_thread_timings(&self) -> Vec<crate::TaskTiming> {
THREAD_TIMINGS.with(|timings| {
let timings = timings.lock();
let timings = &timings.timings;
let mut vec = Vec::with_capacity(timings.len());
let (s1, s2) = timings.as_slices();
vec.extend_from_slice(s1);
vec.extend_from_slice(s2);
vec
})
}
fn is_main_thread(&self) -> bool {
current().id() == self.main_thread_id
}
fn dispatch(&self, runnable: RunnableVariant, label: Option<TaskLabel>) {
fn dispatch(&self, runnable: Runnable, label: Option<TaskLabel>) {
self.dispatch_on_threadpool(runnable);
if let Some(label) = label {
log::debug!("TaskLabel: {label:?}");
}
}
fn dispatch_on_main_thread(&self, runnable: RunnableVariant) {
fn dispatch_on_main_thread(&self, runnable: Runnable) {
match self.main_sender.send(runnable) {
Ok(_) => {
if !self.wake_posted.swap(true, Ordering::AcqRel) {
@@ -184,7 +111,7 @@ impl PlatformDispatcher for WindowsDispatcher {
}
}
fn dispatch_after(&self, duration: Duration, runnable: RunnableVariant) {
fn dispatch_after(&self, duration: Duration, runnable: Runnable) {
self.dispatch_on_threadpool_after(runnable, duration);
}
}

View File

@@ -201,10 +201,8 @@ impl WindowsWindowInner {
let new_logical_size = device_size.to_pixels(scale_factor);
let mut lock = self.state.borrow_mut();
lock.logical_size = new_logical_size;
if should_resize_renderer && let Err(e) = lock.renderer.resize(device_size) {
log::error!("Failed to resize renderer, invalidating devices: {}", e);
lock.invalidate_devices
.store(true, std::sync::atomic::Ordering::Release);
if should_resize_renderer {
lock.renderer.resize(device_size).log_err();
}
if let Some(mut callback) = lock.callbacks.resize.take() {
drop(lock);
@@ -241,7 +239,7 @@ impl WindowsWindowInner {
fn handle_timer_msg(&self, handle: HWND, wparam: WPARAM) -> Option<isize> {
if wparam.0 == SIZE_MOVE_LOOP_TIMER_ID {
for runnable in self.main_receiver.drain() {
WindowsDispatcher::execute_runnable(runnable);
runnable.run();
}
self.handle_paint_msg(handle)
} else {
@@ -489,12 +487,14 @@ impl WindowsWindowInner {
let scale_factor = lock.scale_factor;
let wheel_scroll_amount = match modifiers.shift {
true => {
self.system_settings()
self.system_settings
.borrow()
.mouse_wheel_settings
.wheel_scroll_chars
}
false => {
self.system_settings()
self.system_settings
.borrow()
.mouse_wheel_settings
.wheel_scroll_lines
}
@@ -541,7 +541,8 @@ impl WindowsWindowInner {
};
let scale_factor = lock.scale_factor;
let wheel_scroll_chars = self
.system_settings()
.system_settings
.borrow()
.mouse_wheel_settings
.wheel_scroll_chars;
drop(lock);
@@ -676,7 +677,8 @@ impl WindowsWindowInner {
// used by Chrome. However, it may result in one row of pixels being obscured
// in our client area. But as Chrome says, "there seems to be no better solution."
if is_maximized
&& let Some(ref taskbar_position) = self.system_settings().auto_hide_taskbar_position
&& let Some(ref taskbar_position) =
self.system_settings.borrow().auto_hide_taskbar_position
{
// For the auto-hide taskbar, adjust in by 1 pixel on taskbar edge,
// so the window isn't treated as a "fullscreen app", which would cause
@@ -1070,7 +1072,7 @@ impl WindowsWindowInner {
lock.border_offset.update(handle).log_err();
// system settings may emit a window message which wants to take the refcell lock, so drop it
drop(lock);
self.system_settings_mut().update(display, wparam.0);
self.system_settings.borrow_mut().update(display, wparam.0);
} else {
self.handle_system_theme_changed(handle, lparam)?;
};
@@ -1140,19 +1142,12 @@ impl WindowsWindowInner {
#[inline]
fn draw_window(&self, handle: HWND, force_render: bool) -> Option<isize> {
let mut request_frame = self.state.borrow_mut().callbacks.request_frame.take()?;
// we are instructing gpui to force render a frame, this will
// re-populate all the gpu textures for us so we can resume drawing in
// case we disabled drawing earlier due to a device loss
self.state.borrow_mut().renderer.mark_drawable();
request_frame(RequestFrameOptions {
require_presentation: false,
force_render,
});
self.state.borrow_mut().callbacks.request_frame = Some(request_frame);
unsafe { ValidateRect(Some(handle), None).ok().log_err() };
Some(0)
}

View File

@@ -3,14 +3,12 @@ use std::{
ffi::OsStr,
path::{Path, PathBuf},
rc::{Rc, Weak},
sync::{
Arc,
atomic::{AtomicBool, Ordering},
},
sync::{Arc, atomic::Ordering},
};
use ::util::{ResultExt, paths::SanitizedPath};
use anyhow::{Context as _, Result, anyhow};
use async_task::Runnable;
use futures::channel::oneshot::{self, Receiver};
use itertools::Itertools;
use parking_lot::RwLock;
@@ -39,9 +37,6 @@ pub(crate) struct WindowsPlatform {
text_system: Arc<DirectWriteTextSystem>,
windows_version: WindowsVersion,
drop_target_helper: IDropTargetHelper,
/// Flag to instruct the `VSyncProvider` thread to invalidate the directx devices
/// as resizing them has failed, causing us to have lost at least the render target.
invalidate_devices: Arc<AtomicBool>,
handle: HWND,
disable_direct_composition: bool,
}
@@ -51,7 +46,7 @@ struct WindowsPlatformInner {
raw_window_handles: std::sync::Weak<RwLock<SmallVec<[SafeHwnd; 4]>>>,
// The below members will never change throughout the entire lifecycle of the app.
validation_number: usize,
main_receiver: flume::Receiver<RunnableVariant>,
main_receiver: flume::Receiver<Runnable>,
dispatcher: Arc<WindowsDispatcher>,
}
@@ -98,7 +93,7 @@ impl WindowsPlatform {
OleInitialize(None).context("unable to initialize Windows OLE")?;
}
let directx_devices = DirectXDevices::new().context("Creating DirectX devices")?;
let (main_sender, main_receiver) = flume::unbounded::<RunnableVariant>();
let (main_sender, main_receiver) = flume::unbounded::<Runnable>();
let validation_number = if usize::BITS == 64 {
rand::random::<u64>() as usize
} else {
@@ -168,7 +163,6 @@ impl WindowsPlatform {
disable_direct_composition,
windows_version,
drop_target_helper,
invalidate_devices: Arc::new(AtomicBool::new(false)),
})
}
@@ -202,7 +196,6 @@ impl WindowsPlatform {
platform_window_handle: self.handle,
disable_direct_composition: self.disable_direct_composition,
directx_devices: self.inner.state.borrow().directx_devices.clone().unwrap(),
invalidate_devices: self.invalidate_devices.clone(),
}
}
@@ -255,17 +248,13 @@ impl WindowsPlatform {
let validation_number = self.inner.validation_number;
let all_windows = Arc::downgrade(&self.raw_window_handles);
let text_system = Arc::downgrade(&self.text_system);
let invalidate_devices = self.invalidate_devices.clone();
std::thread::Builder::new()
.name("VSyncProvider".to_owned())
.spawn(move || {
let vsync_provider = VSyncProvider::new();
loop {
vsync_provider.wait_for_vsync();
if check_device_lost(&directx_device.device)
|| invalidate_devices.fetch_and(false, Ordering::Acquire)
{
if check_device_lost(&directx_device.device) {
if let Err(err) = handle_gpu_device_lost(
&mut directx_device,
platform_window.as_raw(),
@@ -353,8 +342,9 @@ impl Platform for WindowsPlatform {
}
}
self.inner
.with_callback(|callbacks| &mut callbacks.quit, |callback| callback());
if let Some(ref mut callback) = self.inner.state.borrow_mut().callbacks.quit {
callback();
}
}
fn quit(&self) {
@@ -389,12 +379,11 @@ impl Platform for WindowsPlatform {
#[allow(
clippy::disallowed_methods,
reason = "We are restarting ourselves, using std command thus is fine"
)] // todo(shell): There might be no powershell on the system
let restart_process =
util::command::new_std_command(util::shell::get_windows_system_shell())
.arg("-command")
.arg(script)
.spawn();
)]
let restart_process = util::command::new_std_command("powershell.exe")
.arg("-command")
.arg(script)
.spawn();
match restart_process {
Ok(_) => self.quit(),
@@ -589,13 +578,14 @@ impl Platform for WindowsPlatform {
fn set_cursor_style(&self, style: CursorStyle) {
let hcursor = load_cursor(style);
if self.inner.state.borrow_mut().current_cursor.map(|c| c.0) != hcursor.map(|c| c.0) {
let mut lock = self.inner.state.borrow_mut();
if lock.current_cursor.map(|c| c.0) != hcursor.map(|c| c.0) {
self.post_message(
WM_GPUI_CURSOR_STYLE_CHANGED,
WPARAM(0),
LPARAM(hcursor.map_or(0, |c| c.0 as isize)),
);
self.inner.state.borrow_mut().current_cursor = hcursor;
lock.current_cursor = hcursor;
}
}
@@ -734,19 +724,6 @@ impl WindowsPlatformInner {
}))
}
/// Calls `project` to project to the corresponding callback field, removes it from callbacks, calls `f` with the callback and then puts the callback back.
fn with_callback<T>(
&self,
project: impl Fn(&mut PlatformCallbacks) -> &mut Option<T>,
f: impl FnOnce(&mut T),
) {
let callback = project(&mut self.state.borrow_mut().callbacks).take();
if let Some(mut callback) = callback {
f(&mut callback);
*project(&mut self.state.borrow_mut().callbacks) = Some(callback)
}
}
fn handle_msg(
self: &Rc<Self>,
handle: HWND,
@@ -806,7 +783,7 @@ impl WindowsPlatformInner {
fn run_foreground_task(&self) -> Option<isize> {
loop {
for runnable in self.main_receiver.drain() {
WindowsDispatcher::execute_runnable(runnable);
runnable.run();
}
// Someone could enqueue a Runnable here. The flag is still true, so they will not PostMessage.
@@ -817,8 +794,7 @@ impl WindowsPlatformInner {
match self.main_receiver.try_recv() {
Ok(runnable) => {
let _ = dispatcher.wake_posted.swap(true, Ordering::AcqRel);
WindowsDispatcher::execute_runnable(runnable);
runnable.run();
continue;
}
_ => {
@@ -831,36 +807,40 @@ impl WindowsPlatformInner {
}
fn handle_dock_action_event(&self, action_idx: usize) -> Option<isize> {
let Some(action) = self
.state
.borrow_mut()
let mut lock = self.state.borrow_mut();
let mut callback = lock.callbacks.app_menu_action.take()?;
let Some(action) = lock
.jump_list
.dock_menus
.get(action_idx)
.map(|dock_menu| dock_menu.action.boxed_clone())
else {
lock.callbacks.app_menu_action = Some(callback);
log::error!("Dock menu for index {action_idx} not found");
return Some(1);
};
self.with_callback(
|callbacks| &mut callbacks.app_menu_action,
|callback| callback(&*action),
);
drop(lock);
callback(&*action);
self.state.borrow_mut().callbacks.app_menu_action = Some(callback);
Some(0)
}
fn handle_keyboard_layout_change(&self) -> Option<isize> {
self.with_callback(
|callbacks| &mut callbacks.keyboard_layout_change,
|callback| callback(),
);
let mut callback = self
.state
.borrow_mut()
.callbacks
.keyboard_layout_change
.take()?;
callback();
self.state.borrow_mut().callbacks.keyboard_layout_change = Some(callback);
Some(0)
}
fn handle_device_lost(&self, lparam: LPARAM) -> Option<isize> {
let mut lock = self.state.borrow_mut();
let directx_devices = lparam.0 as *const DirectXDevices;
let directx_devices = unsafe { &*directx_devices };
let mut lock = self.state.borrow_mut();
lock.directx_devices.take();
lock.directx_devices = Some(directx_devices.clone());
@@ -886,21 +866,18 @@ pub(crate) struct WindowCreationInfo {
pub(crate) windows_version: WindowsVersion,
pub(crate) drop_target_helper: IDropTargetHelper,
pub(crate) validation_number: usize,
pub(crate) main_receiver: flume::Receiver<RunnableVariant>,
pub(crate) main_receiver: flume::Receiver<Runnable>,
pub(crate) platform_window_handle: HWND,
pub(crate) disable_direct_composition: bool,
pub(crate) directx_devices: DirectXDevices,
/// Flag to instruct the `VSyncProvider` thread to invalidate the directx devices
/// as resizing them has failed, causing us to have lost at least the render target.
pub(crate) invalidate_devices: Arc<AtomicBool>,
}
struct PlatformWindowCreateContext {
inner: Option<Result<Rc<WindowsPlatformInner>>>,
raw_window_handles: std::sync::Weak<RwLock<SmallVec<[SafeHwnd; 4]>>>,
validation_number: usize,
main_sender: Option<flume::Sender<RunnableVariant>>,
main_receiver: Option<flume::Receiver<RunnableVariant>>,
main_sender: Option<flume::Sender<Runnable>>,
main_receiver: Option<flume::Receiver<Runnable>>,
directx_devices: Option<DirectXDevices>,
dispatcher: Option<Arc<WindowsDispatcher>>,
}

View File

@@ -6,12 +6,13 @@ use std::{
path::PathBuf,
rc::{Rc, Weak},
str::FromStr,
sync::{Arc, Once, atomic::AtomicBool},
sync::{Arc, Once},
time::{Duration, Instant},
};
use ::util::ResultExt;
use anyhow::{Context as _, Result};
use async_task::Runnable;
use futures::channel::oneshot::{self, Receiver};
use raw_window_handle as rwh;
use smallvec::SmallVec;
@@ -53,9 +54,6 @@ pub struct WindowsWindowState {
pub nc_button_pressed: Option<u32>,
pub display: WindowsDisplay,
/// Flag to instruct the `VSyncProvider` thread to invalidate the directx devices
/// as resizing them has failed, causing us to have lost at least the render target.
pub invalidate_devices: Arc<AtomicBool>,
fullscreen: Option<StyleAndBounds>,
initial_placement: Option<WindowOpenStatus>,
hwnd: HWND,
@@ -65,14 +63,14 @@ pub(crate) struct WindowsWindowInner {
hwnd: HWND,
drop_target_helper: IDropTargetHelper,
pub(crate) state: RefCell<WindowsWindowState>,
system_settings: RefCell<WindowsSystemSettings>,
pub(crate) system_settings: RefCell<WindowsSystemSettings>,
pub(crate) handle: AnyWindowHandle,
pub(crate) hide_title_bar: bool,
pub(crate) is_movable: bool,
pub(crate) executor: ForegroundExecutor,
pub(crate) windows_version: WindowsVersion,
pub(crate) validation_number: usize,
pub(crate) main_receiver: flume::Receiver<RunnableVariant>,
pub(crate) main_receiver: flume::Receiver<Runnable>,
pub(crate) platform_window_handle: HWND,
}
@@ -86,7 +84,6 @@ impl WindowsWindowState {
min_size: Option<Size<Pixels>>,
appearance: WindowAppearance,
disable_direct_composition: bool,
invalidate_devices: Arc<AtomicBool>,
) -> Result<Self> {
let scale_factor = {
let monitor_dpi = unsafe { GetDpiForWindow(hwnd) } as f32;
@@ -142,7 +139,6 @@ impl WindowsWindowState {
fullscreen,
initial_placement,
hwnd,
invalidate_devices,
})
}
@@ -216,7 +212,6 @@ impl WindowsWindowInner {
context.min_size,
context.appearance,
context.disable_direct_composition,
context.invalidate_devices.clone(),
)?);
Ok(Rc::new(Self {
@@ -326,14 +321,6 @@ impl WindowsWindowInner {
}
Ok(())
}
pub(crate) fn system_settings(&self) -> std::cell::Ref<'_, WindowsSystemSettings> {
self.system_settings.borrow()
}
pub(crate) fn system_settings_mut(&self) -> std::cell::RefMut<'_, WindowsSystemSettings> {
self.system_settings.borrow_mut()
}
}
#[derive(Default)]
@@ -362,12 +349,11 @@ struct WindowCreateContext {
windows_version: WindowsVersion,
drop_target_helper: IDropTargetHelper,
validation_number: usize,
main_receiver: flume::Receiver<RunnableVariant>,
main_receiver: flume::Receiver<Runnable>,
platform_window_handle: HWND,
appearance: WindowAppearance,
disable_direct_composition: bool,
directx_devices: DirectXDevices,
invalidate_devices: Arc<AtomicBool>,
}
impl WindowsWindow {
@@ -387,7 +373,6 @@ impl WindowsWindow {
platform_window_handle,
disable_direct_composition,
directx_devices,
invalidate_devices,
} = creation_info;
register_window_class(icon);
let hide_title_bar = params
@@ -448,7 +433,6 @@ impl WindowsWindow {
appearance,
disable_direct_composition,
directx_devices,
invalidate_devices,
};
let creation_result = unsafe {
CreateWindowExW(
@@ -469,9 +453,8 @@ impl WindowsWindow {
// Failure to create a `WindowsWindowState` can cause window creation to fail,
// so check the inner result first.
let this = context.inner.take().transpose()?;
let this = context.inner.take().unwrap()?;
let hwnd = creation_result?;
let this = this.unwrap();
register_drag_drop(&this)?;
configure_dwm_dark_mode(hwnd, appearance);

View File

@@ -1,218 +0,0 @@
use std::{
cell::LazyCell,
hash::Hasher,
hash::{DefaultHasher, Hash},
sync::Arc,
thread::ThreadId,
time::Instant,
};
use serde::{Deserialize, Serialize};
#[doc(hidden)]
#[derive(Debug, Copy, Clone)]
pub struct TaskTiming {
pub location: &'static core::panic::Location<'static>,
pub start: Instant,
pub end: Option<Instant>,
}
#[doc(hidden)]
#[derive(Debug, Clone)]
pub struct ThreadTaskTimings {
pub thread_name: Option<String>,
pub thread_id: ThreadId,
pub timings: Vec<TaskTiming>,
}
impl ThreadTaskTimings {
pub(crate) fn convert(timings: &[GlobalThreadTimings]) -> Vec<Self> {
timings
.iter()
.filter_map(|t| match t.timings.upgrade() {
Some(timings) => Some((t.thread_id, timings)),
_ => None,
})
.map(|(thread_id, timings)| {
let timings = timings.lock();
let thread_name = timings.thread_name.clone();
let timings = &timings.timings;
let mut vec = Vec::with_capacity(timings.len());
let (s1, s2) = timings.as_slices();
vec.extend_from_slice(s1);
vec.extend_from_slice(s2);
ThreadTaskTimings {
thread_name,
thread_id,
timings: vec,
}
})
.collect()
}
}
/// Serializable variant of [`core::panic::Location`]
#[derive(Debug, Copy, Clone, Serialize, Deserialize)]
pub struct SerializedLocation<'a> {
/// Name of the source file
pub file: &'a str,
/// Line in the source file
pub line: u32,
/// Column in the source file
pub column: u32,
}
impl<'a> From<&'a core::panic::Location<'a>> for SerializedLocation<'a> {
fn from(value: &'a core::panic::Location<'a>) -> Self {
SerializedLocation {
file: value.file(),
line: value.line(),
column: value.column(),
}
}
}
/// Serializable variant of [`TaskTiming`]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SerializedTaskTiming<'a> {
/// Location of the timing
#[serde(borrow)]
pub location: SerializedLocation<'a>,
/// Time at which the measurement was reported in nanoseconds
pub start: u128,
/// Duration of the measurement in nanoseconds
pub duration: u128,
}
impl<'a> SerializedTaskTiming<'a> {
/// Convert an array of [`TaskTiming`] into their serializable format
///
/// # Params
///
/// `anchor` - [`Instant`] that should be earlier than all timings to use as base anchor
pub fn convert(anchor: Instant, timings: &[TaskTiming]) -> Vec<SerializedTaskTiming<'static>> {
let serialized = timings
.iter()
.map(|timing| {
let start = timing.start.duration_since(anchor).as_nanos();
let duration = timing
.end
.unwrap_or_else(|| Instant::now())
.duration_since(timing.start)
.as_nanos();
SerializedTaskTiming {
location: timing.location.into(),
start,
duration,
}
})
.collect::<Vec<_>>();
serialized
}
}
/// Serializable variant of [`ThreadTaskTimings`]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SerializedThreadTaskTimings<'a> {
/// Thread name
pub thread_name: Option<String>,
/// Hash of the thread id
pub thread_id: u64,
/// Timing records for this thread
#[serde(borrow)]
pub timings: Vec<SerializedTaskTiming<'a>>,
}
impl<'a> SerializedThreadTaskTimings<'a> {
/// Convert [`ThreadTaskTimings`] into their serializable format
///
/// # Params
///
/// `anchor` - [`Instant`] that should be earlier than all timings to use as base anchor
pub fn convert(
anchor: Instant,
timings: ThreadTaskTimings,
) -> SerializedThreadTaskTimings<'static> {
let serialized_timings = SerializedTaskTiming::convert(anchor, &timings.timings);
let mut hasher = DefaultHasher::new();
timings.thread_id.hash(&mut hasher);
let thread_id = hasher.finish();
SerializedThreadTaskTimings {
thread_name: timings.thread_name,
thread_id,
timings: serialized_timings,
}
}
}
// Allow 20mb of task timing entries
const MAX_TASK_TIMINGS: usize = (20 * 1024 * 1024) / core::mem::size_of::<TaskTiming>();
pub(crate) type TaskTimings = circular_buffer::CircularBuffer<MAX_TASK_TIMINGS, TaskTiming>;
pub(crate) type GuardedTaskTimings = spin::Mutex<ThreadTimings>;
pub(crate) struct GlobalThreadTimings {
pub thread_id: ThreadId,
pub timings: std::sync::Weak<GuardedTaskTimings>,
}
pub(crate) static GLOBAL_THREAD_TIMINGS: spin::Mutex<Vec<GlobalThreadTimings>> =
spin::Mutex::new(Vec::new());
thread_local! {
pub(crate) static THREAD_TIMINGS: LazyCell<Arc<GuardedTaskTimings>> = LazyCell::new(|| {
let current_thread = std::thread::current();
let thread_name = current_thread.name();
let thread_id = current_thread.id();
let timings = ThreadTimings::new(thread_name.map(|e| e.to_string()), thread_id);
let timings = Arc::new(spin::Mutex::new(timings));
{
let timings = Arc::downgrade(&timings);
let global_timings = GlobalThreadTimings {
thread_id: std::thread::current().id(),
timings,
};
GLOBAL_THREAD_TIMINGS.lock().push(global_timings);
}
timings
});
}
pub(crate) struct ThreadTimings {
pub thread_name: Option<String>,
pub thread_id: ThreadId,
pub timings: Box<TaskTimings>,
}
impl ThreadTimings {
pub(crate) fn new(thread_name: Option<String>, thread_id: ThreadId) -> Self {
ThreadTimings {
thread_name,
thread_id,
timings: TaskTimings::boxed(),
}
}
}
impl Drop for ThreadTimings {
fn drop(&mut self) {
let mut thread_timings = GLOBAL_THREAD_TIMINGS.lock();
let Some((index, _)) = thread_timings
.iter()
.enumerate()
.find(|(_, t)| t.thread_id == self.thread_id)
else {
return;
};
thread_timings.swap_remove(index);
}
}

Some files were not shown because too many files have changed in this diff Show More