Compare commits

..

1 Commits

Author SHA1 Message Date
Thorsten Ball
f312763c33 wip: Performance improvements for project search
Do not merge. Need to check with Conrad.

Co-authored-by: Antonio <antonio@zed.dev>
2024-08-27 14:48:17 +02:00
179 changed files with 2099 additions and 4332 deletions

View File

@@ -13,7 +13,7 @@ jobs:
steps:
- name: Check for specific strings in comment
id: check_comment
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
uses: actions/github-script@v7
with:
script: |
const comment = context.payload.comment.body;
@@ -22,7 +22,7 @@ jobs:
- name: Delete comment if it contains any of the specific strings
if: steps.check_comment.outputs.result == 'true'
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
uses: actions/github-script@v7
with:
script: |
const commentId = context.payload.comment.id;

View File

@@ -11,7 +11,7 @@ If you're looking for ideas about what to work on, check out:
- Our [public roadmap](https://zed.dev/roadmap) contains a rough outline of our near-term priorities for Zed.
- Our [top-ranking issues](https://github.com/zed-industries/zed/issues/5393) based on votes by the community.
For adding themes or support for a new language to Zed, check out our [docs on developing extensions](https://zed.dev/docs/extensions/developing-extensions).
For adding themes or support for a new language to Zed, check out our [extension docs](https://github.com/zed-industries/extensions/blob/main/AUTHORING_EXTENSIONS.md).
## Proposing changes

464
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -322,15 +322,15 @@ async-watch = "0.3.1"
async_zip = { version = "0.0.17", features = ["deflate", "deflate64"] }
base64 = "0.22"
bitflags = "2.6.0"
blade-graphics = { git = "https://github.com/kvark/blade", rev = "fee06c42f658b36dd9ac85444a9ee2a481383695" }
blade-macros = { git = "https://github.com/kvark/blade", rev = "fee06c42f658b36dd9ac85444a9ee2a481383695" }
blade-util = { git = "https://github.com/kvark/blade", rev = "fee06c42f658b36dd9ac85444a9ee2a481383695" }
blade-graphics = { git = "https://github.com/kvark/blade", rev = "b37a9a994709d256f4634efd29281c78ba89071a" }
blade-macros = { git = "https://github.com/kvark/blade", rev = "b37a9a994709d256f4634efd29281c78ba89071a" }
blade-util = { git = "https://github.com/kvark/blade", rev = "b37a9a994709d256f4634efd29281c78ba89071a" }
cargo_metadata = "0.18"
cargo_toml = "0.20"
chrono = { version = "0.4", features = ["serde"] }
clap = { version = "4.4", features = ["derive"] }
clickhouse = "0.11.6"
cocoa = "0.26"
cocoa = "0.25"
core-foundation = "0.9.3"
core-foundation-sys = "0.8.6"
ctor = "0.2.6"
@@ -340,7 +340,7 @@ dirs = "4.0"
emojis = "0.6.1"
env_logger = "0.11"
exec = "0.3.1"
fork = "0.2.0"
fork = "0.1.23"
futures = "0.3"
futures-batch = "0.6.1"
futures-lite = "1.13"
@@ -358,7 +358,7 @@ indoc = "2"
isahc = { version = "1.7.2", default-features = false, features = [
"text-decoding",
] }
itertools = "0.13.0"
itertools = "0.11.0"
jsonwebtoken = "9.3"
libc = "0.2"
linkify = "0.10.0"

View File

@@ -41,16 +41,7 @@
"context": "Pane",
"bindings": {
"f4": "search::SelectNextMatch",
"shift-f4": "search::SelectPrevMatch",
"alt-1": ["pane::ActivateItem", 0],
"alt-2": ["pane::ActivateItem", 1],
"alt-3": ["pane::ActivateItem", 2],
"alt-4": ["pane::ActivateItem", 3],
"alt-5": ["pane::ActivateItem", 4],
"alt-6": ["pane::ActivateItem", 5],
"alt-7": ["pane::ActivateItem", 6],
"alt-8": ["pane::ActivateItem", 7],
"alt-9": "pane::ActivateLastItem"
"shift-f4": "search::SelectPrevMatch"
}
},
{

View File

@@ -1,4 +1,4 @@
// Default Keymap (Atom) for Zed on macOS
// Default Keymap (Atom) for Zed on MacOS
[
{
"bindings": {

View File

@@ -45,16 +45,7 @@
"context": "Pane",
"bindings": {
"f4": "search::SelectNextMatch",
"shift-f4": "search::SelectPrevMatch",
"cmd-1": ["pane::ActivateItem", 0],
"cmd-2": ["pane::ActivateItem", 1],
"cmd-3": ["pane::ActivateItem", 2],
"cmd-4": ["pane::ActivateItem", 3],
"cmd-5": ["pane::ActivateItem", 4],
"cmd-6": ["pane::ActivateItem", 5],
"cmd-7": ["pane::ActivateItem", 6],
"cmd-8": ["pane::ActivateItem", 7],
"cmd-9": "pane::ActivateLastItem"
"shift-f4": "search::SelectPrevMatch"
}
},
{

View File

@@ -28,7 +28,7 @@
"buffer_font_family": "Zed Plex Mono",
// Set the buffer text's font fallbacks, this will be merged with
// the platform's default fallbacks.
"buffer_font_fallbacks": null,
"buffer_font_fallbacks": [],
// The OpenType features to enable for text in the editor.
"buffer_font_features": {
// Disable ligatures:
@@ -54,7 +54,7 @@
"ui_font_family": "Zed Plex Sans",
// Set the UI's font fallbacks, this will be merged with the platform's
// default font fallbacks.
"ui_font_fallbacks": null,
"ui_font_fallbacks": [],
// The OpenType features to enable for text in the UI
"ui_font_features": {
// Disable ligatures:
@@ -730,13 +730,7 @@
//
"file_types": {
"JSON": ["flake.lock"],
"JSONC": [
"**/.zed/**/*.json",
"**/zed/**/*.json",
"**/Zed/**/*.json",
"tsconfig.json",
"pyrightconfig.json"
]
"JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json", "**/Zed/**/*.json", "tsconfig.json"]
},
// The extensions that Zed should automatically install on startup.
//

View File

@@ -15,10 +15,8 @@ static SUPPORTED_COUNTRIES: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
vec![
"AL", // Albania
"DZ", // Algeria
"AS", // American Samoa (US)
"AD", // Andorra
"AO", // Angola
"AI", // Anguilla (UK)
"AG", // Antigua and Barbuda
"AR", // Argentina
"AM", // Armenia
@@ -32,13 +30,11 @@ static SUPPORTED_COUNTRIES: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
"BE", // Belgium
"BZ", // Belize
"BJ", // Benin
"BM", // Bermuda (UK)
"BT", // Bhutan
"BO", // Bolivia
"BA", // Bosnia and Herzegovina
"BW", // Botswana
"BR", // Brazil
"IO", // British Indian Ocean Territory (UK)
"BN", // Brunei
"BG", // Bulgaria
"BF", // Burkina Faso
@@ -47,15 +43,11 @@ static SUPPORTED_COUNTRIES: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
"KH", // Cambodia
"CM", // Cameroon
"CA", // Canada
"KY", // Cayman Islands (UK)
"TD", // Chad
"CL", // Chile
"CX", // Christmas Island (AU)
"CC", // Cocos (Keeling) Islands (AU)
"CO", // Colombia
"KM", // Comoros
"CG", // Congo (Brazzaville)
"CK", // Cook Islands (NZ)
"CR", // Costa Rica
"CI", // Côte d'Ivoire
"HR", // Croatia
@@ -71,28 +63,21 @@ static SUPPORTED_COUNTRIES: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
"GQ", // Equatorial Guinea
"EE", // Estonia
"SZ", // Eswatini
"FK", // Falkland Islands (UK)
"FJ", // Fiji
"FI", // Finland
"FR", // France
"GF", // French Guiana (FR)
"PF", // French Polynesia (FR)
"TF", // French Southern Territories
"GA", // Gabon
"GM", // Gambia
"GE", // Georgia
"DE", // Germany
"GH", // Ghana
"GI", // Gibraltar (UK)
"GR", // Greece
"GD", // Grenada
"GT", // Guatemala
"GU", // Guam (US)
"GN", // Guinea
"GW", // Guinea-Bissau
"GY", // Guyana
"HT", // Haiti
"HM", // Heard Island and McDonald Islands (AU)
"HN", // Honduras
"HU", // Hungary
"IS", // Iceland
@@ -131,7 +116,6 @@ static SUPPORTED_COUNTRIES: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
"MD", // Moldova
"MC", // Monaco
"MN", // Mongolia
"MS", // Montserrat (UK)
"ME", // Montenegro
"MA", // Morocco
"MZ", // Mozambique
@@ -142,11 +126,8 @@ static SUPPORTED_COUNTRIES: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
"NZ", // New Zealand
"NE", // Niger
"NG", // Nigeria
"NF", // Norfolk Island (AU)
"MK", // North Macedonia
"MI", // Northern Mariana Islands (UK)
"NO", // Norway
"NU", // Niue (NZ)
"OM", // Oman
"PK", // Pakistan
"PW", // Palau
@@ -156,18 +137,13 @@ static SUPPORTED_COUNTRIES: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
"PY", // Paraguay
"PE", // Peru
"PH", // Philippines
"PN", // Pitcairn (UK)
"PL", // Poland
"PT", // Portugal
"PR", // Puerto Rico (US)
"QA", // Qatar
"RO", // Romania
"RW", // Rwanda
"BL", // Saint Barthélemy (FR)
"KN", // Saint Kitts and Nevis
"LC", // Saint Lucia
"MF", // Saint Martin (FR)
"PM", // Saint Pierre and Miquelon (FR)
"VC", // Saint Vincent and the Grenadines
"WS", // Samoa
"SM", // San Marino
@@ -176,7 +152,6 @@ static SUPPORTED_COUNTRIES: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
"SN", // Senegal
"RS", // Serbia
"SC", // Seychelles
"SH", // Saint Helena, Ascension and Tristan da Cunha (UK)
"SL", // Sierra Leone
"SG", // Singapore
"SK", // Slovakia
@@ -195,28 +170,22 @@ static SUPPORTED_COUNTRIES: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
"TH", // Thailand
"TL", // Timor-Leste
"TG", // Togo
"TK", // Tokelau (NZ)
"TO", // Tonga
"TT", // Trinidad and Tobago
"TN", // Tunisia
"TR", // Türkiye (Turkey)
"TM", // Turkmenistan
"TC", // Turks and Caicos Islands (UK)
"TV", // Tuvalu
"UG", // Uganda
"UA", // Ukraine (except Crimea, Donetsk, and Luhansk regions)
"AE", // United Arab Emirates
"GB", // United Kingdom
"UM", // United States Minor Outlying Islands (US)
"US", // United States of America
"UY", // Uruguay
"UZ", // Uzbekistan
"VU", // Vanuatu
"VA", // Vatican City
"VN", // Vietnam
"VI", // Virgin Islands (US)
"VG", // Virgin Islands (UK)
"WF", // Wallis and Futuna (FR)
"ZM", // Zambia
"ZW", // Zimbabwe
]

View File

@@ -32,8 +32,8 @@ client.workspace = true
clock.workspace = true
collections.workspace = true
command_palette_hooks.workspace = true
context_servers.workspace = true
db.workspace = true
context_servers.workspace = true
editor.workspace = true
feature_flags.workspace = true
fs.workspace = true
@@ -58,11 +58,9 @@ open_ai = { workspace = true, features = ["schemars"] }
ordered-float.workspace = true
parking_lot.workspace = true
paths.workspace = true
picker.workspace = true
project.workspace = true
proto.workspace = true
regex.workspace = true
release_channel.workspace = true
rope.workspace = true
schemars.workspace = true
search.workspace = true
@@ -70,8 +68,8 @@ semantic_index.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true
similar.workspace = true
smallvec.workspace = true
similar.workspace = true
smol.workspace = true
telemetry_events.workspace = true
terminal.workspace = true
@@ -83,6 +81,7 @@ ui.workspace = true
util.workspace = true
uuid.workspace = true
workspace.workspace = true
picker.workspace = true
zed_actions.workspace = true
[dev-dependencies]

View File

@@ -360,7 +360,12 @@ impl AssistantPanel {
}
}))
.tooltip(move |cx| {
Tooltip::for_action_in("Open History", &DeployHistory, &focus_handle, cx)
cx.new_view(|cx| {
let keybind =
KeyBinding::for_action_in(&DeployHistory, &focus_handle, cx);
Tooltip::new("Open History").key_binding(keybind)
})
.into()
})
.selected(
pane.active_item()

View File

@@ -1136,7 +1136,7 @@ impl InlineAssistant {
editor.set_show_gutter(false, cx);
editor.scroll_manager.set_forbid_vertical_scroll(true);
editor.set_read_only(true);
editor.set_show_inline_completions(Some(false), cx);
editor.set_show_inline_completions(false);
editor.highlight_rows::<DeletedLines>(
Anchor::min()..=Anchor::max(),
Some(cx.theme().status().deleted_background),

View File

@@ -23,7 +23,6 @@ use language_model::{
};
use parking_lot::RwLock;
use picker::{Picker, PickerDelegate};
use release_channel::ReleaseChannel;
use rope::Rope;
use serde::{Deserialize, Serialize};
use settings::Settings;
@@ -95,16 +94,14 @@ pub fn open_prompt_library(
cx.spawn(|cx| async move {
let store = store.await?;
cx.update(|cx| {
let app_id = ReleaseChannel::global(cx).app_id();
let bounds = Bounds::centered(None, size(px(1024.0), px(768.0)), cx);
cx.open_window(
WindowOptions {
titlebar: Some(TitlebarOptions {
title: Some("Prompt Library".into()),
appears_transparent: cfg!(target_os = "macos"),
appears_transparent: !cfg!(windows),
traffic_light_position: Some(point(px(9.0), px(9.0))),
}),
app_id: Some(app_id.to_owned()),
window_bounds: Some(WindowBounds::Windowed(bounds)),
..Default::default()
},
@@ -498,7 +495,7 @@ impl PromptLibrary {
editor.set_text(prompt_metadata.title.unwrap_or_default(), cx);
if prompt_id.is_built_in() {
editor.set_read_only(true);
editor.set_show_inline_completions(Some(false), cx);
editor.set_show_inline_completions(false);
}
editor
});
@@ -513,7 +510,7 @@ impl PromptLibrary {
let mut editor = Editor::for_buffer(buffer, None, cx);
if prompt_id.is_built_in() {
editor.set_read_only(true);
editor.set_show_inline_completions(Some(false), cx);
editor.set_show_inline_completions(false);
}
editor.set_soft_wrap_mode(SoftWrap::EditorWidth, cx);
editor.set_show_gutter(false, cx);

View File

@@ -84,15 +84,11 @@ impl SlashCommand for ContextServerSlashCommand {
Ok(SlashCommandOutput {
sections: vec![SlashCommandOutputSection {
range: 0..(result.prompt.len()),
range: 0..result.len(),
icon: IconName::ZedAssistant,
label: SharedString::from(
result
.description
.unwrap_or(format!("Result from {}", prompt_name)),
),
label: SharedString::from(format!("Result from {}", prompt_name)),
}],
text: result.prompt,
text: result,
run_commands_in_text: false,
})
})

View File

@@ -79,11 +79,6 @@ pub enum WorkflowSuggestion {
symbol_path: SymbolPath,
range: Range<language::Anchor>,
},
FindReplace {
replacement: String,
range: Range<language::Anchor>,
description: String,
},
}
impl WorkflowStep {
@@ -284,7 +279,6 @@ impl WorkflowSuggestion {
| Self::PrependChild { position, .. }
| Self::AppendChild { position, .. } => *position..*position,
Self::Delete { range, .. } => range.clone(),
Self::FindReplace { range, .. } => range.clone(),
}
}
@@ -296,7 +290,6 @@ impl WorkflowSuggestion {
| Self::InsertSiblingAfter { description, .. }
| Self::PrependChild { description, .. }
| Self::AppendChild { description, .. } => Some(description),
Self::FindReplace { .. } => None,
Self::Delete { .. } => None,
}
}
@@ -309,7 +302,6 @@ impl WorkflowSuggestion {
| Self::InsertSiblingAfter { description, .. }
| Self::PrependChild { description, .. }
| Self::AppendChild { description, .. } => Some(description),
Self::FindReplace { .. } => None,
Self::Delete { .. } => None,
}
}
@@ -322,7 +314,6 @@ impl WorkflowSuggestion {
Self::PrependChild { symbol_path, .. } => symbol_path.as_ref(),
Self::AppendChild { symbol_path, .. } => symbol_path.as_ref(),
Self::Delete { symbol_path, .. } => Some(symbol_path),
Self::FindReplace { .. } => None,
Self::CreateFile { .. } => None,
}
}
@@ -336,7 +327,6 @@ impl WorkflowSuggestion {
Self::PrependChild { .. } => "PrependChild",
Self::AppendChild { .. } => "AppendChild",
Self::Delete { .. } => "Delete",
Self::FindReplace { .. } => "FindReplace",
}
}
@@ -459,15 +449,6 @@ impl WorkflowSuggestion {
suggestion_range = snapshot.anchor_in_excerpt(excerpt_id, range.start)?
..snapshot.anchor_in_excerpt(excerpt_id, range.end)?;
}
Self::FindReplace {
range,
replacement,
description,
} => {
initial_prompt = description.clone();
suggestion_range = snapshot.anchor_in_excerpt(excerpt_id, range.start)?
..snapshot.anchor_in_excerpt(excerpt_id, range.end)?;
}
}
InlineAssistant::update_global(cx, |inline_assistant, cx| {
@@ -697,39 +678,11 @@ pub mod tool {
let range = snapshot.anchor_before(start)..snapshot.anchor_after(end);
WorkflowSuggestion::Delete { range, symbol_path }
}
WorkflowSuggestionToolKind::FindReplace {
target,
replacement,
description,
} => {
let range = Self::find_target_range(&snapshot, &target)?;
WorkflowSuggestion::FindReplace {
replacement,
range,
description,
}
}
};
Ok((buffer, suggestion))
}
fn find_target_range(
snapshot: &BufferSnapshot,
target: &str,
) -> Result<Range<language::Anchor>> {
let text = snapshot.text();
let start_offset = text
.find(target)
.ok_or_else(|| anyhow!("Target text not found in file"))?;
let end_offset = start_offset + target.len();
let start = snapshot.anchor_at(start_offset, language::Bias::Left);
let end = snapshot.anchor_at(end_offset, language::Bias::Right);
Ok(start..end)
}
fn resolve_symbol(
snapshot: &BufferSnapshot,
outline: &Outline<Anchor>,
@@ -797,16 +750,6 @@ pub mod tool {
/// A brief description of the transformation to apply to the symbol.
description: String,
},
/// Finds and replaces a specified code block with a new one.
/// This operation replaces an entire block of code with new content.
FindReplace {
/// A string representing the full code block to be replaced.
target: String,
/// A string representing the new code block that will replace the target.
replacement: String,
/// A brief description of the find and replace operation.
description: String,
},
/// Creates a new file with the given path based on the provided description.
/// This operation adds a new file to the codebase.
Create {

View File

@@ -78,7 +78,7 @@ impl WorkflowStepView {
editor.set_show_wrap_guides(false, cx);
editor.set_show_indent_guides(false, cx);
editor.set_read_only(true);
editor.set_show_inline_completions(Some(false), cx);
editor.set_show_inline_completions(false);
editor.insert_blocks(
[
BlockProperties {

View File

@@ -88,34 +88,6 @@ struct JsonRelease {
url: String,
}
struct MacOsUnmounter {
mount_path: PathBuf,
}
impl Drop for MacOsUnmounter {
fn drop(&mut self) {
let unmount_output = std::process::Command::new("hdiutil")
.args(&["detach", "-force"])
.arg(&self.mount_path)
.output();
match unmount_output {
Ok(output) if output.status.success() => {
log::info!("Successfully unmounted the disk image");
}
Ok(output) => {
log::error!(
"Failed to unmount disk image: {:?}",
String::from_utf8_lossy(&output.stderr)
);
}
Err(error) => {
log::error!("Error while trying to unmount disk image: {:?}", error);
}
}
}
}
struct AutoUpdateSetting(bool);
/// Whether or not to automatically check for updates.
@@ -767,11 +739,6 @@ async fn install_release_macos(
String::from_utf8_lossy(&output.stderr)
);
// Create an MacOsUnmounter that will be dropped (and thus unmount the disk) when this function exits
let _unmounter = MacOsUnmounter {
mount_path: mount_path.clone(),
};
let output = Command::new("rsync")
.args(&["-av", "--delete"])
.arg(&mounted_app_path)
@@ -785,5 +752,17 @@ async fn install_release_macos(
String::from_utf8_lossy(&output.stderr)
);
let output = Command::new("hdiutil")
.args(&["detach"])
.arg(&mount_path)
.output()
.await?;
anyhow::ensure!(
output.status.success(),
"failed to unount: {:?}",
String::from_utf8_lossy(&output.stderr)
);
Ok(running_app_path)
}

View File

@@ -19,7 +19,6 @@ path = "src/main.rs"
[dependencies]
anyhow.workspace = true
clap.workspace = true
collections.workspace = true
ipc-channel = "0.18"
once_cell.workspace = true
parking_lot.workspace = true

View File

@@ -1,4 +1,3 @@
use collections::HashMap;
pub use ipc_channel::ipc;
use serde::{Deserialize, Serialize};
@@ -16,7 +15,6 @@ pub enum CliRequest {
wait: bool,
open_new_workspace: Option<bool>,
dev_server_token: Option<String>,
env: Option<HashMap<String, String>>,
},
}

View File

@@ -3,7 +3,6 @@
use anyhow::{Context, Result};
use clap::Parser;
use cli::{ipc::IpcOneShotServer, CliRequest, CliResponse, IpcHandshake};
use collections::HashMap;
use parking_lot::Mutex;
use std::{
env, fs, io,
@@ -123,7 +122,6 @@ fn main() -> Result<()> {
None
};
let env = Some(std::env::vars().collect::<HashMap<_, _>>());
let exit_status = Arc::new(Mutex::new(None));
let mut paths = vec![];
let mut urls = vec![];
@@ -151,14 +149,12 @@ fn main() -> Result<()> {
move || {
let (_, handshake) = server.accept().context("Handshake after Zed spawn")?;
let (tx, rx) = (handshake.requests, handshake.responses);
tx.send(CliRequest::Open {
paths,
urls,
wait: args.wait,
open_new_workspace,
dev_server_token: args.dev_server_token,
env,
})?;
while let Ok(response) = rx.recv() {

View File

@@ -50,14 +50,14 @@ rand.workspace = true
reqwest = { version = "0.11", features = ["json"] }
rpc.workspace = true
scrypt = "0.11"
sea-orm = { version = "1.1.0-rc.1", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] }
sea-orm = { version = "0.12.x", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] }
semantic_version.workspace = true
semver.workspace = true
serde.workspace = true
serde_derive.workspace = true
serde_json.workspace = true
sha2.workspace = true
sqlx = { version = "0.8", features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid", "any"] }
sqlx = { version = "0.7", features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid", "any"] }
strum.workspace = true
subtle.workspace = true
rustc-demangle.workspace = true
@@ -109,11 +109,11 @@ remote = { workspace = true, features = ["test-support"] }
remote_server.workspace = true
dev_server_projects.workspace = true
rpc = { workspace = true, features = ["test-support"] }
sea-orm = { version = "1.1.0-rc.1", features = ["sqlx-sqlite"] }
sea-orm = { version = "0.12.x", features = ["sqlx-sqlite"] }
serde_json.workspace = true
session = { workspace = true, features = ["test-support"] }
settings = { workspace = true, features = ["test-support"] }
sqlx = { version = "0.8", features = ["sqlite"] }
sqlx = { version = "0.7", features = ["sqlite"] }
theme.workspace = true
unindent.workspace = true
util.workspace = true

View File

@@ -99,7 +99,7 @@ id_type!(UserId);
#[derive(
Eq, PartialEq, Copy, Clone, Debug, EnumIter, DeriveActiveEnum, Default, Hash, Serialize,
)]
#[sea_orm(rs_type = "String", db_type = "String(StringLen::None)")]
#[sea_orm(rs_type = "String", db_type = "String(None)")]
pub enum ChannelRole {
/// Admin can read/write and change permissions.
#[sea_orm(string_value = "admin")]
@@ -239,7 +239,7 @@ impl Into<i32> for ChannelRole {
/// ChannelVisibility controls whether channels are public or private.
#[derive(Eq, PartialEq, Copy, Clone, Debug, EnumIter, DeriveActiveEnum, Default, Hash)]
#[sea_orm(rs_type = "String", db_type = "String(StringLen::None)")]
#[sea_orm(rs_type = "String", db_type = "String(None)")]
pub enum ChannelVisibility {
/// Public channels are visible to anyone with the link. People join with the Guest role by default.
#[sea_orm(string_value = "public")]

View File

@@ -39,7 +39,7 @@ impl ActiveModelBehavior for ActiveModel {}
#[derive(
Eq, PartialEq, Copy, Clone, Debug, EnumIter, DeriveActiveEnum, Default, Hash, Serialize,
)]
#[sea_orm(rs_type = "String", db_type = "String(StringLen::None)")]
#[sea_orm(rs_type = "String", db_type = "String(None)")]
#[serde(rename_all = "snake_case")]
pub enum StripeSubscriptionStatus {
#[default]

View File

@@ -237,22 +237,10 @@ async fn perform_completion(
.await
.map_err(|err| match err {
anthropic::AnthropicError::ApiError(ref api_error) => match api_error.code() {
Some(anthropic::ApiErrorCode::RateLimitError) => {
tracing::info!(
target: "upstream rate limit exceeded",
user_id = claims.user_id,
login = claims.github_user_login,
authn.jti = claims.jti,
is_staff = claims.is_staff,
provider = params.provider.to_string(),
model = model
);
Error::http(
StatusCode::TOO_MANY_REQUESTS,
"Upstream Anthropic rate limit exceeded.".to_string(),
)
}
Some(anthropic::ApiErrorCode::RateLimitError) => Error::http(
StatusCode::TOO_MANY_REQUESTS,
"Upstream Anthropic rate limit exceeded.".to_string(),
),
Some(anthropic::ApiErrorCode::InvalidRequestError) => {
Error::http(StatusCode::BAD_REQUEST, api_error.message.clone())
}

View File

@@ -85,9 +85,7 @@ fn authorize_access_for_country(
if !is_country_supported_by_provider {
Err(Error::http(
StatusCode::UNAVAILABLE_FOR_LEGAL_REASONS,
format!(
"access to {provider:?} models is not available in your region ({country_code})"
),
format!("access to {provider:?} models is not available in your region"),
))?
}
@@ -197,7 +195,7 @@ mod tests {
.to_vec();
assert_eq!(
String::from_utf8(response_body).unwrap(),
format!("access to {provider:?} models is not available in your region ({country_code})")
format!("access to {provider:?} models is not available in your region")
);
}
}

View File

@@ -1,12 +1,10 @@
use anyhow::anyhow;
use axum::headers::HeaderMapExt;
use axum::{
extract::MatchedPath,
http::{Request, Response},
routing::get,
Extension, Router,
};
use collab::api::CloudflareIpCountryHeader;
use collab::llm::{db::LlmDatabase, log_usage_periodically};
use collab::migrations::run_database_migrations;
use collab::user_backfiller::spawn_user_backfiller;
@@ -152,16 +150,10 @@ async fn main() -> Result<()> {
.get::<MatchedPath>()
.map(MatchedPath::as_str);
let geoip_country_code = request
.headers()
.typed_get::<CloudflareIpCountryHeader>()
.map(|header| header.to_string());
tracing::info_span!(
"http_request",
method = ?request.method(),
matched_path,
geoip_country_code,
user_id = tracing::field::Empty,
login = tracing::field::Empty,
authn.jti = tracing::field::Empty,

View File

@@ -78,6 +78,8 @@ use tracing::{
info_span, instrument, Instrument,
};
use self::connection_pool::VersionedMessage;
pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30);
// kubernetes gives terminated pods 10s to shutdown gracefully. After they're gone, we can clean up old resources.
@@ -505,7 +507,7 @@ impl Server {
forward_mutating_project_request::<proto::ApplyCompletionAdditionalEdits>,
))
.add_request_handler(user_handler(
forward_mutating_project_request::<proto::OpenNewBuffer>,
forward_versioned_mutating_project_request::<proto::OpenNewBuffer>,
))
.add_request_handler(user_handler(
forward_mutating_project_request::<proto::ResolveCompletionDocumentation>,
@@ -547,7 +549,7 @@ impl Server {
forward_mutating_project_request::<proto::OnTypeFormatting>,
))
.add_request_handler(user_handler(
forward_mutating_project_request::<proto::SaveBuffer>,
forward_versioned_mutating_project_request::<proto::SaveBuffer>,
))
.add_request_handler(user_handler(
forward_mutating_project_request::<proto::BlameBuffer>,
@@ -3045,6 +3047,45 @@ where
Ok(())
}
/// forward a project request to the host. These requests are disallowed
/// for guests.
async fn forward_versioned_mutating_project_request<T>(
request: T,
response: Response<T>,
session: UserSession,
) -> Result<()>
where
T: EntityMessage + RequestMessage + VersionedMessage,
{
let project_id = ProjectId::from_proto(request.remote_entity_id());
let host_connection_id = session
.db()
.await
.host_for_mutating_project_request(project_id, session.connection_id, session.user_id())
.await?;
if let Some(host_version) = session
.connection_pool()
.await
.connection(host_connection_id)
.map(|c| c.zed_version)
{
if let Some(min_required_version) = request.required_host_version() {
if min_required_version > host_version {
return Err(anyhow!(ErrorCode::RemoteUpgradeRequired
.with_tag("required", &min_required_version.to_string())))?;
}
}
}
let payload = session
.peer
.forward_request(session.connection_id, host_connection_id, request)
.await?;
response.send(payload)?;
Ok(())
}
/// Notify other participants that a new buffer has been created
async fn create_buffer_for_peer(
request: proto::CreateBufferForPeer,

View File

@@ -32,7 +32,11 @@ impl fmt::Display for ZedVersion {
impl ZedVersion {
pub fn can_collaborate(&self) -> bool {
self.0 >= SemanticVersion::new(0, 134, 0)
self.0 >= SemanticVersion::new(0, 129, 2)
}
pub fn with_save_as() -> ZedVersion {
ZedVersion(SemanticVersion::new(0, 134, 0))
}
pub fn with_list_directory() -> ZedVersion {
@@ -44,6 +48,28 @@ impl ZedVersion {
}
}
pub trait VersionedMessage {
fn required_host_version(&self) -> Option<ZedVersion> {
None
}
}
impl VersionedMessage for proto::SaveBuffer {
fn required_host_version(&self) -> Option<ZedVersion> {
if self.new_path.is_some() {
Some(ZedVersion::with_save_as())
} else {
None
}
}
}
impl VersionedMessage for proto::OpenNewBuffer {
fn required_host_version(&self) -> Option<ZedVersion> {
Some(ZedVersion::with_save_as())
}
}
#[derive(Serialize)]
pub struct Connection {
pub principal_id: PrincipalId,

View File

@@ -3178,7 +3178,7 @@ async fn test_fs_operations(
project_b
.update(cx_b, |project, cx| {
project.copy_entry(entry.id, None, Path::new("f.txt"), cx)
project.copy_entry(entry.id, Path::new("f.txt"), cx)
})
.await
.unwrap()
@@ -4920,7 +4920,6 @@ async fn test_project_search(
false,
Default::default(),
Default::default(),
None,
)
.unwrap(),
cx,

View File

@@ -883,7 +883,6 @@ impl RandomizedTest for ProjectCollaborationTest {
false,
Default::default(),
Default::default(),
None,
)
.unwrap(),
cx,

View File

@@ -916,7 +916,6 @@ impl TestClient {
self.app_state.user_store.clone(),
self.app_state.languages.clone(),
self.app_state.fs.clone(),
None,
cx,
)
})

View File

@@ -31,7 +31,6 @@ static MENTIONS_SEARCH: LazyLock<SearchQuery> = LazyLock::new(|| {
false,
Default::default(),
Default::default(),
None,
)
.unwrap()
});

View File

@@ -112,7 +112,7 @@ impl InitializedContextServerProtocol {
&self,
prompt: P,
arguments: HashMap<String, String>,
) -> Result<types::PromptsGetResponse> {
) -> Result<String> {
self.check_capability(ServerCapability::Prompts)?;
let params = types::PromptsGetParams {
@@ -125,7 +125,7 @@ impl InitializedContextServerProtocol {
.request(types::RequestType::PromptsGet.as_str(), params)
.await?;
Ok(response)
Ok(response.prompt)
}
}

View File

@@ -102,7 +102,6 @@ pub struct ResourcesListResponse {
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct PromptsGetResponse {
pub description: Option<String>,
pub prompt: String,
}

View File

@@ -317,7 +317,6 @@ gpui::actions!(
ToggleSelectionMenu,
ToggleHunkDiff,
ToggleInlayHints,
ToggleInlineCompletions,
ToggleLineNumbers,
ToggleRelativeLineNumbers,
ToggleIndentGuides,

View File

@@ -76,8 +76,8 @@ use gpui::{
FocusOutEvent, FocusableView, FontId, FontWeight, HighlightStyle, Hsla, InteractiveText,
KeyContext, ListSizingBehavior, Model, MouseButton, PaintQuad, ParentElement, Pixels, Render,
SharedString, Size, StrikethroughStyle, Styled, StyledText, Subscription, Task, TextStyle,
UTF16Selection, UnderlineStyle, UniformListScrollHandle, View, ViewContext, ViewInputHandler,
VisualContext, WeakFocusHandle, WeakView, WindowContext,
UnderlineStyle, UniformListScrollHandle, View, ViewContext, ViewInputHandler, VisualContext,
WeakFocusHandle, WeakView, WindowContext,
};
use highlight_matching_bracket::refresh_matching_bracket_highlights;
use hover_popover::{hide_hover, HoverState};
@@ -307,7 +307,7 @@ pub fn init(cx: &mut AppContext) {
cx.on_action(move |_: &workspace::NewFile, cx| {
let app_state = workspace::AppState::global(cx);
if let Some(app_state) = app_state.upgrade() {
workspace::open_new(Default::default(), app_state, cx, |workspace, cx| {
workspace::open_new(app_state, cx, |workspace, cx| {
Editor::new_file(workspace, &Default::default(), cx)
})
.detach();
@@ -316,7 +316,7 @@ pub fn init(cx: &mut AppContext) {
cx.on_action(move |_: &workspace::NewWindow, cx| {
let app_state = workspace::AppState::global(cx);
if let Some(app_state) = app_state.upgrade() {
workspace::open_new(Default::default(), app_state, cx, |workspace, cx| {
workspace::open_new(app_state, cx, |workspace, cx| {
Editor::new_file(workspace, &Default::default(), cx)
})
.detach();
@@ -556,7 +556,7 @@ pub struct Editor {
hovered_link_state: Option<HoveredLinkState>,
inline_completion_provider: Option<RegisteredInlineCompletionProvider>,
active_inline_completion: Option<(Inlay, Option<Range<Anchor>>)>,
show_inline_completions_override: Option<bool>,
show_inline_completions: bool,
inlay_hint_cache: InlayHintCache,
expanded_hunks: ExpandedHunks,
next_inlay_id: usize,
@@ -1912,7 +1912,7 @@ impl Editor {
hovered_cursors: Default::default(),
next_editor_action_id: EditorActionId::default(),
editor_actions: Rc::default(),
show_inline_completions_override: None,
show_inline_completions: mode == EditorMode::Full,
custom_context_menu: None,
show_git_blame_gutter: false,
show_git_blame_inline: false,
@@ -2305,49 +2305,8 @@ impl Editor {
self.auto_replace_emoji_shortcode = auto_replace;
}
pub fn toggle_inline_completions(
&mut self,
_: &ToggleInlineCompletions,
cx: &mut ViewContext<Self>,
) {
if self.show_inline_completions_override.is_some() {
self.set_show_inline_completions(None, cx);
} else {
let cursor = self.selections.newest_anchor().head();
if let Some((buffer, cursor_buffer_position)) =
self.buffer.read(cx).text_anchor_for_position(cursor, cx)
{
let show_inline_completions =
!self.should_show_inline_completions(&buffer, cursor_buffer_position, cx);
self.set_show_inline_completions(Some(show_inline_completions), cx);
}
}
}
pub fn set_show_inline_completions(
&mut self,
show_inline_completions: Option<bool>,
cx: &mut ViewContext<Self>,
) {
self.show_inline_completions_override = show_inline_completions;
self.refresh_inline_completion(false, true, cx);
}
fn should_show_inline_completions(
&self,
buffer: &Model<Buffer>,
buffer_position: language::Anchor,
cx: &AppContext,
) -> bool {
if let Some(provider) = self.inline_completion_provider() {
if let Some(show_inline_completions) = self.show_inline_completions_override {
show_inline_completions
} else {
self.mode == EditorMode::Full && provider.is_enabled(&buffer, buffer_position, cx)
}
} else {
false
}
pub fn set_show_inline_completions(&mut self, show_inline_completions: bool) {
self.show_inline_completions = show_inline_completions;
}
pub fn set_use_modal_editing(&mut self, to: bool) {
@@ -2365,8 +2324,6 @@ impl Editor {
show_completions: bool,
cx: &mut ViewContext<Self>,
) {
cx.invalidate_character_coordinates();
// Copy selections to primary selection buffer
#[cfg(target_os = "linux")]
if local {
@@ -4977,7 +4934,8 @@ impl Editor {
let (buffer, cursor_buffer_position) =
self.buffer.read(cx).text_anchor_for_position(cursor, cx)?;
if !user_requested
&& !self.should_show_inline_completions(&buffer, cursor_buffer_position, cx)
&& (!self.show_inline_completions
|| !provider.is_enabled(&buffer, cursor_buffer_position, cx))
{
self.discard_inline_completion(false, cx);
return None;
@@ -4997,7 +4955,9 @@ impl Editor {
let cursor = self.selections.newest_anchor().head();
let (buffer, cursor_buffer_position) =
self.buffer.read(cx).text_anchor_for_position(cursor, cx)?;
if !self.should_show_inline_completions(&buffer, cursor_buffer_position, cx) {
if !self.show_inline_completions
|| !provider.is_enabled(&buffer, cursor_buffer_position, cx)
{
return None;
}
@@ -9111,16 +9071,18 @@ impl Editor {
cx: &mut ViewContext<Self>,
) -> Task<Result<Navigated>> {
let definition = self.go_to_definition_of_kind(GotoDefinitionKind::Symbol, false, cx);
cx.spawn(|editor, mut cx| async move {
let references = self.find_all_references(&FindAllReferences, cx);
cx.background_executor().spawn(async move {
if definition.await? == Navigated::Yes {
return Ok(Navigated::Yes);
}
match editor.update(&mut cx, |editor, cx| {
editor.find_all_references(&FindAllReferences, cx)
})? {
Some(references) => references.await,
None => Ok(Navigated::No),
if let Some(references) = references {
if references.await? == Navigated::Yes {
return Ok(Navigated::Yes);
}
}
Ok(Navigated::No)
})
}
@@ -11839,12 +11801,12 @@ impl Editor {
let snapshot = buffer.read(cx).snapshot();
let range = self
.selected_text_range(false, cx)
.and_then(|selection| {
if selection.range.is_empty() {
.selected_text_range(cx)
.and_then(|selected_range| {
if selected_range.is_empty() {
None
} else {
Some(selection.range)
Some(selected_range)
}
})
.unwrap_or_else(|| 0..snapshot.len());
@@ -12796,24 +12758,15 @@ impl ViewInputHandler for Editor {
)
}
fn selected_text_range(
&mut self,
ignore_disabled_input: bool,
cx: &mut ViewContext<Self>,
) -> Option<UTF16Selection> {
fn selected_text_range(&mut self, cx: &mut ViewContext<Self>) -> Option<Range<usize>> {
// Prevent the IME menu from appearing when holding down an alphabetic key
// while input is disabled.
if !ignore_disabled_input && !self.input_enabled {
if !self.input_enabled {
return None;
}
let selection = self.selections.newest::<OffsetUtf16>(cx);
let range = selection.range();
Some(UTF16Selection {
range: range.start.0..range.end.0,
reversed: selection.reversed,
})
let range = self.selections.newest::<OffsetUtf16>(cx).range();
Some(range.start.0..range.end.0)
}
fn marked_text_range(&self, cx: &mut ViewContext<Self>) -> Option<Range<usize>> {

View File

@@ -7507,7 +7507,6 @@ async fn test_completion(cx: &mut gpui::TestAppContext) {
resolve_provider: Some(true),
..Default::default()
}),
signature_help_provider: Some(lsp::SignatureHelpOptions::default()),
..Default::default()
},
cx,
@@ -7536,37 +7535,6 @@ async fn test_completion(cx: &mut gpui::TestAppContext) {
.await;
assert_eq!(counter.load(atomic::Ordering::Acquire), 1);
let _handler = handle_signature_help_request(
&mut cx,
lsp::SignatureHelp {
signatures: vec![lsp::SignatureInformation {
label: "test signature".to_string(),
documentation: None,
parameters: Some(vec![lsp::ParameterInformation {
label: lsp::ParameterLabel::Simple("foo: u8".to_string()),
documentation: None,
}]),
active_parameter: None,
}],
active_signature: None,
active_parameter: None,
},
);
cx.update_editor(|editor, cx| {
assert!(
!editor.signature_help_state.is_shown(),
"No signature help was called for"
);
editor.show_signature_help(&ShowSignatureHelp, cx);
});
cx.run_until_parked();
cx.update_editor(|editor, _| {
assert!(
!editor.signature_help_state.is_shown(),
"No signature help should be shown when completions menu is open"
);
});
let apply_additional_edits = cx.update_editor(|editor, cx| {
editor.context_menu_next(&Default::default(), cx);
editor

View File

@@ -347,7 +347,6 @@ impl EditorElement {
register_action(view, cx, Editor::toggle_relative_line_numbers);
register_action(view, cx, Editor::toggle_indent_guides);
register_action(view, cx, Editor::toggle_inlay_hints);
register_action(view, cx, Editor::toggle_inline_completions);
register_action(view, cx, hover_popover::hover);
register_action(view, cx, Editor::reveal_in_finder);
register_action(view, cx, Editor::copy_path);

View File

@@ -782,7 +782,7 @@ fn editor_with_deleted_text(
editor.set_show_gutter(false, cx);
editor.scroll_manager.set_forbid_vertical_scroll(true);
editor.set_read_only(true);
editor.set_show_inline_completions(Some(false), cx);
editor.set_show_inline_completions(false);
editor.highlight_rows::<DiffRowHighlight>(
Anchor::min()..=Anchor::max(),
Some(deleted_color),

View File

@@ -149,7 +149,7 @@ impl Editor {
}
pub fn show_signature_help(&mut self, _: &ShowSignatureHelp, cx: &mut ViewContext<Self>) {
if self.pending_rename.is_some() || self.has_active_completions_menu() {
if self.pending_rename.is_some() {
return;
}

View File

@@ -452,34 +452,28 @@ impl ExtensionsPage {
)
.child(
h_flex()
.gap_2()
.justify_between()
.child(
div().overflow_x_hidden().text_ellipsis().child(
Label::new(format!(
"{}: {}",
if extension.authors.len() > 1 {
"Authors"
} else {
"Author"
},
extension.authors.join(", ")
))
.size(LabelSize::Small),
),
Label::new(format!(
"{}: {}",
if extension.authors.len() > 1 {
"Authors"
} else {
"Author"
},
extension.authors.join(", ")
))
.size(LabelSize::Small),
)
.child(Label::new("<>").size(LabelSize::Small)),
)
.child(
h_flex()
.gap_2()
.justify_between()
.children(extension.description.as_ref().map(|description| {
div().overflow_x_hidden().text_ellipsis().child(
Label::new(description.clone())
.size(LabelSize::Small)
.color(Color::Default),
)
Label::new(description.clone())
.size(LabelSize::Small)
.color(Color::Default)
}))
.children(repository_url.map(|repository_url| {
IconButton::new(
@@ -553,21 +547,18 @@ impl ExtensionsPage {
)
.child(
h_flex()
.gap_2()
.justify_between()
.child(
div().overflow_x_hidden().text_ellipsis().child(
Label::new(format!(
"{}: {}",
if extension.manifest.authors.len() > 1 {
"Authors"
} else {
"Author"
},
extension.manifest.authors.join(", ")
))
.size(LabelSize::Small),
),
Label::new(format!(
"{}: {}",
if extension.manifest.authors.len() > 1 {
"Authors"
} else {
"Author"
},
extension.manifest.authors.join(", ")
))
.size(LabelSize::Small),
)
.child(
Label::new(format!(
@@ -582,7 +573,7 @@ impl ExtensionsPage {
.gap_2()
.justify_between()
.children(extension.manifest.description.as_ref().map(|description| {
div().overflow_x_hidden().text_ellipsis().child(
h_flex().overflow_x_hidden().child(
Label::new(description.clone())
.size(LabelSize::Small)
.color(Color::Default),

View File

@@ -186,7 +186,7 @@ impl FeedbackModal {
);
editor.set_show_gutter(false, cx);
editor.set_show_indent_guides(false, cx);
editor.set_show_inline_completions(Some(false), cx);
editor.set_show_inline_completions(false);
editor.set_vertical_scroll_margin(5, cx);
editor.set_use_modal_editing(false);
editor

View File

@@ -19,6 +19,7 @@ editor.workspace = true
futures.workspace = true
fuzzy.workspace = true
gpui.workspace = true
itertools = "0.11"
menu.workspace = true
picker.workspace = true
project.workspace = true

View File

@@ -4,7 +4,7 @@ mod file_finder_tests;
mod new_path_prompt;
mod open_path_prompt;
use collections::HashMap;
use collections::{BTreeSet, HashMap};
use editor::{scroll::Autoscroll, Bias, Editor};
use fuzzy::{CharBag, PathMatch, PathMatchCandidate};
use gpui::{
@@ -12,6 +12,7 @@ use gpui::{
FocusableView, Model, Modifiers, ModifiersChangedEvent, ParentElement, Render, Styled, Task,
View, ViewContext, VisualContext, WeakView,
};
use itertools::Itertools;
use new_path_prompt::NewPathPrompt;
use open_path_prompt::OpenPathPrompt;
use picker::{Picker, PickerDelegate};
@@ -165,7 +166,6 @@ pub struct FileFinderDelegate {
cancel_flag: Arc<AtomicBool>,
history_items: Vec<FoundPath>,
separate_history: bool,
first_update: bool,
}
/// Use a custom ordering for file finder: the regular one
@@ -209,29 +209,10 @@ struct Matches {
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)]
enum Match {
History {
path: FoundPath,
panel_match: Option<ProjectPanelOrdMatch>,
},
History(FoundPath, Option<ProjectPanelOrdMatch>),
Search(ProjectPanelOrdMatch),
}
impl Match {
fn path(&self) -> &Arc<Path> {
match self {
Match::History { path, .. } => &path.project.path,
Match::Search(panel_match) => &panel_match.0.path,
}
}
fn panel_match(&self) -> Option<&ProjectPanelOrdMatch> {
match self {
Match::History { panel_match, .. } => panel_match.as_ref(),
Match::Search(panel_match) => Some(&panel_match),
}
}
}
impl Matches {
fn len(&self) -> usize {
self.matches.len()
@@ -241,33 +222,6 @@ impl Matches {
self.matches.get(index)
}
fn position(
&self,
entry: &Match,
currently_opened: Option<&FoundPath>,
) -> Result<usize, usize> {
if let Match::History {
path,
panel_match: None,
} = entry
{
// Slow case: linear search by path. Should not happen actually,
// since we call `position` only if matches set changed, but the query has not changed.
// And History entries do not have panel_match if query is empty, so there's no
// reason for the matches set to change.
self.matches
.iter()
.position(|m| path.project.path == *m.path())
.ok_or(0)
} else {
self.matches.binary_search_by(|m| {
// `reverse()` since if cmp_matches(a, b) == Ordering::Greater, then a is better than b.
// And we want the better entries go first.
Self::cmp_matches(self.separate_history, currently_opened, &m, &entry).reverse()
})
}
}
fn push_new_matches<'a>(
&'a mut self,
history_items: impl IntoIterator<Item = &'a FoundPath> + Clone,
@@ -276,95 +230,88 @@ impl Matches {
new_search_matches: impl Iterator<Item = ProjectPanelOrdMatch>,
extend_old_matches: bool,
) {
let Some(query) = query else {
// assuming that if there's no query, then there's no search matches.
self.matches.clear();
let path_to_entry = |found_path: &FoundPath| Match::History {
path: found_path.clone(),
panel_match: None,
};
self.matches
.extend(currently_opened.into_iter().map(path_to_entry));
self.matches.extend(
history_items
.into_iter()
.filter(|found_path| Some(*found_path) != currently_opened)
.map(path_to_entry),
);
return;
};
let new_history_matches = matching_history_items(history_items, currently_opened, query);
let new_search_matches: Vec<Match> = new_search_matches
.filter(|path_match| !new_history_matches.contains_key(&path_match.0.path))
let no_history_score = 0;
let matching_history_paths =
matching_history_item_paths(history_items.clone(), currently_opened, query);
let new_search_matches = new_search_matches
.filter(|path_match| !matching_history_paths.contains_key(&path_match.0.path))
.map(Match::Search)
.collect();
.map(|m| (no_history_score, m));
let old_search_matches = self
.matches
.drain(..)
.filter(|_| extend_old_matches)
.filter(|m| matches!(m, Match::Search(_)))
.map(|m| (no_history_score, m));
let history_matches = history_items
.into_iter()
.chain(currently_opened)
.enumerate()
.filter_map(|(i, history_item)| {
let query_match = matching_history_paths
.get(&history_item.project.path)
.cloned();
let query_match = if query.is_some() {
query_match?
} else {
query_match.flatten()
};
Some((i + 1, Match::History(history_item.clone(), query_match)))
});
if extend_old_matches {
// since we take history matches instead of new search matches
// and history matches has not changed(since the query has not changed and we do not extend old matches otherwise),
// old matches can't contain paths present in history_matches as well.
self.matches.retain(|m| matches!(m, Match::Search(_)));
} else {
self.matches.clear();
}
// At this point we have an unsorted set of new history matches, an unsorted set of new search matches
// and a sorted set of old search matches.
// It is possible that the new search matches' paths contain some of the old search matches' paths.
// History matches' paths are unique, since store in a HashMap by path.
// We build a sorted Vec<Match>, eliminating duplicate search matches.
// Search matches with the same paths should have equal `ProjectPanelOrdMatch`, so we should
// not have any duplicates after building the final list.
for new_match in new_history_matches
.into_values()
.chain(new_search_matches.into_iter())
{
match self.position(&new_match, currently_opened) {
Ok(_duplicate) => continue,
Err(i) => {
self.matches.insert(i, new_match);
if self.matches.len() == 100 {
break;
let mut unique_matches = BTreeSet::new();
self.matches = old_search_matches
.chain(history_matches)
.chain(new_search_matches)
.filter(|(_, m)| unique_matches.insert(m.clone()))
.sorted_by(|(history_score_a, a), (history_score_b, b)| {
match (a, b) {
// bubble currently opened files to the top
(Match::History(path, _), _) if Some(path) == currently_opened => {
cmp::Ordering::Less
}
(_, Match::History(path, _)) if Some(path) == currently_opened => {
cmp::Ordering::Greater
}
(Match::History(_, _), Match::Search(_)) if self.separate_history => {
cmp::Ordering::Less
}
(Match::Search(_), Match::History(_, _)) if self.separate_history => {
cmp::Ordering::Greater
}
(Match::History(_, match_a), Match::History(_, match_b)) => {
match_b.cmp(match_a)
}
(Match::History(_, match_a), Match::Search(match_b)) => {
Some(match_b).cmp(&match_a.as_ref())
}
(Match::Search(match_a), Match::History(_, match_b)) => {
match_b.as_ref().cmp(&Some(match_a))
}
(Match::Search(match_a), Match::Search(match_b)) => match_b.cmp(match_a),
}
}
}
}
/// If a < b, then a is a worse match, aligning with the `ProjectPanelOrdMatch` ordering.
fn cmp_matches(
separate_history: bool,
currently_opened: Option<&FoundPath>,
a: &Match,
b: &Match,
) -> cmp::Ordering {
debug_assert!(a.panel_match().is_some() && b.panel_match().is_some());
match (&a, &b) {
// bubble currently opened files to the top
(Match::History { path, .. }, _) if Some(path) == currently_opened => {
cmp::Ordering::Greater
}
(_, Match::History { path, .. }) if Some(path) == currently_opened => {
cmp::Ordering::Less
}
(Match::History { .. }, Match::Search(_)) if separate_history => cmp::Ordering::Greater,
(Match::Search(_), Match::History { .. }) if separate_history => cmp::Ordering::Less,
_ => a.panel_match().cmp(&b.panel_match()),
}
.then(history_score_a.cmp(history_score_b))
})
.take(100)
.map(|(_, m)| m)
.collect();
}
}
fn matching_history_items<'a>(
fn matching_history_item_paths<'a>(
history_items: impl IntoIterator<Item = &'a FoundPath>,
currently_opened: Option<&'a FoundPath>,
query: &FileSearchQuery,
) -> HashMap<Arc<Path>, Match> {
let mut candidates_paths = HashMap::default();
query: Option<&FileSearchQuery>,
) -> HashMap<Arc<Path>, Option<ProjectPanelOrdMatch>> {
let Some(query) = query else {
return history_items
.into_iter()
.chain(currently_opened)
.map(|found_path| (Arc::clone(&found_path.project.path), None))
.collect();
};
let history_items_by_worktrees = history_items
.into_iter()
@@ -386,7 +333,6 @@ fn matching_history_items<'a>(
.chars(),
),
};
candidates_paths.insert(Arc::clone(&found_path.project.path), found_path);
Some((found_path.project.worktree_id, candidate))
})
.fold(
@@ -412,15 +358,9 @@ fn matching_history_items<'a>(
)
.into_iter()
.map(|path_match| {
let (_, found_path) = candidates_paths
.remove_entry(&path_match.path)
.expect("candidate info not found");
(
Arc::clone(&path_match.path),
Match::History {
path: found_path.clone(),
panel_match: Some(ProjectPanelOrdMatch(path_match)),
},
Some(ProjectPanelOrdMatch(path_match)),
)
}),
);
@@ -499,7 +439,6 @@ impl FileFinderDelegate {
cancel_flag: Arc::new(AtomicBool::new(false)),
history_items,
separate_history,
first_update: true,
}
}
@@ -585,19 +524,12 @@ impl FileFinderDelegate {
) {
if search_id >= self.latest_search_id {
self.latest_search_id = search_id;
let query_changed = Some(query.path_query())
!= self
.latest_search_query
.as_ref()
.map(|query| query.path_query());
let extend_old_matches = self.latest_search_did_cancel && !query_changed;
let selected_match = if query_changed {
None
} else {
self.matches.get(self.selected_index).cloned()
};
let extend_old_matches = self.latest_search_did_cancel
&& Some(query.path_query())
== self
.latest_search_query
.as_ref()
.map(|query| query.path_query());
self.matches.push_new_matches(
&self.history_items,
self.currently_opened_path.as_ref(),
@@ -605,19 +537,9 @@ impl FileFinderDelegate {
matches.into_iter(),
extend_old_matches,
);
self.selected_index = selected_match.map_or_else(
|| self.calculate_selected_index(),
|m| {
self.matches
.position(&m, self.currently_opened_path.as_ref())
.unwrap_or(0)
},
);
self.latest_search_query = Some(query);
self.latest_search_did_cancel = did_cancel;
self.selected_index = self.calculate_selected_index();
cx.notify();
}
}
@@ -628,13 +550,10 @@ impl FileFinderDelegate {
cx: &AppContext,
ix: usize,
) -> (String, Vec<usize>, String, Vec<usize>) {
let (file_name, file_name_positions, full_path, full_path_positions) = match &path_match {
Match::History {
path: entry_path,
panel_match,
} => {
let worktree_id = entry_path.project.worktree_id;
let project_relative_path = &entry_path.project.path;
let (file_name, file_name_positions, full_path, full_path_positions) = match path_match {
Match::History(found_path, found_path_match) => {
let worktree_id = found_path.project.worktree_id;
let project_relative_path = &found_path.project.path;
let has_worktree = self
.project
.read(cx)
@@ -642,7 +561,7 @@ impl FileFinderDelegate {
.is_some();
if !has_worktree {
if let Some(absolute_path) = &entry_path.absolute {
if let Some(absolute_path) = &found_path.absolute {
return (
absolute_path
.file_name()
@@ -660,7 +579,7 @@ impl FileFinderDelegate {
let mut path = Arc::clone(project_relative_path);
if project_relative_path.as_ref() == Path::new("") {
if let Some(absolute_path) = &entry_path.absolute {
if let Some(absolute_path) = &found_path.absolute {
path = Arc::from(absolute_path.as_path());
}
}
@@ -674,7 +593,7 @@ impl FileFinderDelegate {
path_prefix: "".into(),
distance_to_relative_ancestor: usize::MAX,
};
if let Some(found_path_match) = &panel_match {
if let Some(found_path_match) = found_path_match {
path_match
.positions
.extend(found_path_match.0.positions.iter())
@@ -799,7 +718,7 @@ impl FileFinderDelegate {
/// Skips first history match (that is displayed topmost) if it's currently opened.
fn calculate_selected_index(&self) -> usize {
if let Some(Match::History { path, .. }) = self.matches.get(0) {
if let Some(Match::History(path, _)) = self.matches.get(0) {
if Some(path) == self.currently_opened_path.as_ref() {
let elements_after_first = self.matches.len() - 1;
if elements_after_first > 0 {
@@ -807,7 +726,6 @@ impl FileFinderDelegate {
}
}
}
0
}
}
@@ -840,7 +758,7 @@ impl PickerDelegate for FileFinderDelegate {
.matches
.iter()
.enumerate()
.find(|(_, m)| !matches!(m, Match::History { .. }))
.find(|(_, m)| !matches!(m, Match::History(_, _)))
.map(|(i, _)| i);
if let Some(first_non_history_index) = first_non_history_index {
if first_non_history_index > 0 {
@@ -859,34 +777,26 @@ impl PickerDelegate for FileFinderDelegate {
let raw_query = raw_query.replace(' ', "");
let raw_query = raw_query.trim();
if raw_query.is_empty() {
// if there was no query before, and we already have some (history) matches
// there's no need to update anything, since nothing has changed.
// We also want to populate matches set from history entries on the first update.
if self.latest_search_query.is_some() || self.first_update {
let project = self.project.read(cx);
let project = self.project.read(cx);
self.latest_search_id = post_inc(&mut self.search_count);
self.matches = Matches {
separate_history: self.separate_history,
..Matches::default()
};
self.matches.push_new_matches(
self.history_items.iter().filter(|history_item| {
project
.worktree_for_id(history_item.project.worktree_id, cx)
.is_some()
|| (project.is_local_or_ssh() && history_item.absolute.is_some())
}),
self.currently_opened_path.as_ref(),
None,
None.into_iter(),
false,
);
self.latest_search_id = post_inc(&mut self.search_count);
self.latest_search_query = None;
self.matches = Matches {
separate_history: self.separate_history,
..Matches::default()
};
self.matches.push_new_matches(
self.history_items.iter().filter(|history_item| {
project
.worktree_for_id(history_item.project.worktree_id, cx)
.is_some()
|| (project.is_local_or_ssh() && history_item.absolute.is_some())
}),
self.currently_opened_path.as_ref(),
None,
None.into_iter(),
false,
);
self.first_update = false;
self.selected_index = 0;
}
self.selected_index = 0;
cx.notify();
Task::ready(())
} else {
@@ -933,9 +843,9 @@ impl PickerDelegate for FileFinderDelegate {
)
}
};
match &m {
Match::History { path, .. } => {
let worktree_id = path.project.worktree_id;
match m {
Match::History(history_match, _) => {
let worktree_id = history_match.project.worktree_id;
if workspace
.project()
.read(cx)
@@ -946,12 +856,12 @@ impl PickerDelegate for FileFinderDelegate {
workspace,
ProjectPath {
worktree_id,
path: Arc::clone(&path.project.path),
path: Arc::clone(&history_match.project.path),
},
cx,
)
} else {
match path.absolute.as_ref() {
match history_match.absolute.as_ref() {
Some(abs_path) => {
if secondary {
workspace.split_abs_path(
@@ -971,7 +881,7 @@ impl PickerDelegate for FileFinderDelegate {
workspace,
ProjectPath {
worktree_id,
path: Arc::clone(&path.project.path),
path: Arc::clone(&history_match.project.path),
},
cx,
),
@@ -1047,7 +957,7 @@ impl PickerDelegate for FileFinderDelegate {
.expect("Invalid matches state: no element for index {ix}");
let icon = match &path_match {
Match::History { .. } => Icon::new(IconName::HistoryRerun)
Match::History(_, _) => Icon::new(IconName::HistoryRerun)
.color(Color::Muted)
.size(IconSize::Small)
.into_any_element(),

View File

@@ -1323,62 +1323,6 @@ async fn test_history_items_shown_in_order_of_open(cx: &mut TestAppContext) {
});
}
#[gpui::test]
async fn test_selected_history_item_stays_selected_on_worktree_updated(cx: &mut TestAppContext) {
let app_state = init_test(cx);
app_state
.fs
.as_fake()
.insert_tree(
"/test",
json!({
"test": {
"1.txt": "// One",
"2.txt": "// Two",
"3.txt": "// Three",
}
}),
)
.await;
let project = Project::test(app_state.fs.clone(), ["/test".as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project, cx));
open_close_queried_buffer("1", 1, "1.txt", &workspace, cx).await;
open_close_queried_buffer("2", 1, "2.txt", &workspace, cx).await;
open_close_queried_buffer("3", 1, "3.txt", &workspace, cx).await;
let picker = open_file_picker(&workspace, cx);
picker.update(cx, |finder, _| {
assert_eq!(finder.delegate.matches.len(), 3);
assert_match_selection(finder, 0, "3.txt");
assert_match_at_position(finder, 1, "2.txt");
assert_match_at_position(finder, 2, "1.txt");
});
cx.dispatch_action(SelectNext);
// Add more files to the worktree to trigger update matches
for i in 0..5 {
let filename = format!("/test/{}.txt", 4 + i);
app_state
.fs
.create_file(Path::new(&filename), Default::default())
.await
.expect("unable to create file");
}
cx.executor().advance_clock(FS_WATCH_LATENCY);
picker.update(cx, |finder, _| {
assert_eq!(finder.delegate.matches.len(), 3);
assert_match_at_position(finder, 0, "3.txt");
assert_match_selection(finder, 1, "2.txt");
assert_match_at_position(finder, 2, "1.txt");
});
}
#[gpui::test]
async fn test_history_items_vs_very_good_external_match(cx: &mut gpui::TestAppContext) {
let app_state = init_test(cx);
@@ -1597,107 +1541,6 @@ async fn test_search_results_refreshed_on_adding_and_removing_worktrees(
});
}
#[gpui::test]
async fn test_selected_match_stays_selected_after_matches_refreshed(cx: &mut gpui::TestAppContext) {
let app_state = init_test(cx);
app_state.fs.as_fake().insert_tree("/src", json!({})).await;
app_state
.fs
.create_dir("/src/even".as_ref())
.await
.expect("unable to create dir");
let initial_files_num = 5;
for i in 0..initial_files_num {
let filename = format!("/src/even/file_{}.txt", 10 + i);
app_state
.fs
.create_file(Path::new(&filename), Default::default())
.await
.expect("unable to create file");
}
let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx));
// Initial state
let picker = open_file_picker(&workspace, cx);
cx.simulate_input("file");
let selected_index = 3;
// Checking only the filename, not the whole path
let selected_file = format!("file_{}.txt", 10 + selected_index);
// Select even/file_13.txt
for _ in 0..selected_index {
cx.dispatch_action(SelectNext);
}
picker.update(cx, |finder, _| {
assert_match_selection(finder, selected_index, &selected_file)
});
// Add more matches to the search results
let files_to_add = 10;
for i in 0..files_to_add {
let filename = format!("/src/file_{}.txt", 20 + i);
app_state
.fs
.create_file(Path::new(&filename), Default::default())
.await
.expect("unable to create file");
}
cx.executor().advance_clock(FS_WATCH_LATENCY);
// file_13.txt is still selected
picker.update(cx, |finder, _| {
let expected_selected_index = selected_index + files_to_add;
assert_match_selection(finder, expected_selected_index, &selected_file);
});
}
#[gpui::test]
async fn test_first_match_selected_if_previous_one_is_not_in_the_match_list(
cx: &mut gpui::TestAppContext,
) {
let app_state = init_test(cx);
app_state
.fs
.as_fake()
.insert_tree(
"/src",
json!({
"file_1.txt": "// file_1",
"file_2.txt": "// file_2",
"file_3.txt": "// file_3",
}),
)
.await;
let project = Project::test(app_state.fs.clone(), ["/src".as_ref()], cx).await;
let (workspace, cx) = cx.add_window_view(|cx| Workspace::test_new(project.clone(), cx));
// Initial state
let picker = open_file_picker(&workspace, cx);
cx.simulate_input("file");
// Select even/file_2.txt
cx.dispatch_action(SelectNext);
// Remove the selected entry
app_state
.fs
.remove_file("/src/file_2.txt".as_ref(), Default::default())
.await
.expect("unable to remove file");
cx.executor().advance_clock(FS_WATCH_LATENCY);
// file_1.txt is now selected
picker.update(cx, |finder, _| {
assert_match_selection(finder, 0, "file_1.txt");
});
}
#[gpui::test]
async fn test_keeps_file_finder_open_after_modifier_keys_release(cx: &mut gpui::TestAppContext) {
let app_state = init_test(cx);
@@ -2097,11 +1940,8 @@ impl SearchEntries {
fn collect_search_matches(picker: &Picker<FileFinderDelegate>) -> SearchEntries {
let mut search_entries = SearchEntries::default();
for m in &picker.delegate.matches.matches {
match &m {
Match::History {
path: history_path,
panel_match: path_match,
} => {
match m {
Match::History(history_path, path_match) => {
search_entries.history.push(
path_match
.as_ref()
@@ -2156,8 +1996,8 @@ fn assert_match_at_position(
.matches
.get(match_index)
.unwrap_or_else(|| panic!("Finder has no match for index {match_index}"));
let match_file_name = match &match_item {
Match::History { path, .. } => path.absolute.as_deref().unwrap().file_name(),
let match_file_name = match match_item {
Match::History(found_path, _) => found_path.absolute.as_deref().unwrap().file_name(),
Match::Search(path_match) => path_match.0.path.file_name(),
}
.unwrap()

View File

@@ -107,10 +107,8 @@ impl Match {
if let Some(path_match) = &self.path_match {
text.push_str(&path_match.path.to_string_lossy());
let mut whole_path = PathBuf::from(path_match.path_prefix.to_string());
whole_path = whole_path.join(path_match.path.clone());
for (range, style) in highlight_ranges(
&whole_path.to_string_lossy(),
&path_match.path.to_string_lossy(),
&path_match.positions,
gpui::HighlightStyle::color(Color::Accent.color(cx)),
) {

View File

@@ -236,12 +236,7 @@ impl PickerDelegate for OpenPathDelegate {
let Some(candidate) = directory_state.match_candidates.get(*m) else {
return;
};
let result = Path::new(
self.lister
.resolve_tilde(&directory_state.path, cx)
.as_ref(),
)
.join(&candidate.string);
let result = Path::new(&directory_state.path).join(&candidate.string);
if let Some(tx) = self.tx.take() {
tx.send(Some(vec![result])).ok();
}

View File

@@ -35,7 +35,7 @@ util.workspace = true
[target.'cfg(target_os = "macos")'.dependencies]
fsevent.workspace = true
objc = "0.2"
cocoa = "0.26"
cocoa = "0.25"
[target.'cfg(not(target_os = "macos"))'.dependencies]
notify = "6.1.1"

View File

@@ -43,7 +43,7 @@ pub fn get_messages(working_directory: &Path, shas: &[Oid]) -> Result<HashMap<Oi
String::from_utf8_lossy(&output.stdout)
.trim()
.split_terminator(MARKER)
.map(|str| str.trim().replace("<", "&lt;").replace(">", "&gt;")),
.map(|str| String::from(str.trim())),
)
.collect::<HashMap<Oid, String>>())
}

View File

@@ -15,7 +15,6 @@ default = []
test-support = [
"backtrace",
"collections/test-support",
"rand",
"util/test-support",
"http_client/test-support",
]
@@ -37,6 +36,7 @@ bytemuck = { version = "1", optional = true }
collections.workspace = true
ctor.workspace = true
derive_more.workspace = true
env_logger.workspace = true
etagere = "0.2"
futures.workspace = true
gpui_macros.workspace = true
@@ -50,7 +50,7 @@ parking = "2.0.0"
parking_lot.workspace = true
postage.workspace = true
profiling.workspace = true
rand = { optional = true, workspace = true}
rand.workspace = true
raw-window-handle = "0.6"
refineable.workspace = true
resvg = { version = "0.41.0", default-features = false }
@@ -68,6 +68,7 @@ strum.workspace = true
sum_tree.workspace = true
taffy = "0.4.3"
thiserror.workspace = true
time.workspace = true
util.workspace = true
uuid.workspace = true
waker-fn = "1.2.0"
@@ -75,8 +76,6 @@ waker-fn = "1.2.0"
[dev-dependencies]
backtrace = "0.3"
collections = { workspace = true, features = ["test-support"] }
env_logger.workspace = true
rand.workspace = true
util = { workspace = true, features = ["test-support"] }
http_client = { workspace = true, features = ["test-support"] }
unicode-segmentation.workspace = true
@@ -85,8 +84,8 @@ unicode-segmentation.workspace = true
embed-resource = "2.4"
[target.'cfg(target_os = "macos")'.build-dependencies]
bindgen = "0.70.0"
cbindgen = { version = "0.27.0", default-features = false }
bindgen = "0.65.1"
cbindgen = { version = "0.26.0", default-features = false }
[target.'cfg(target_os = "macos")'.dependencies]
block = "0.1"
@@ -153,7 +152,6 @@ font-kit = { git = "https://github.com/zed-industries/font-kit", rev = "40391b7"
x11-clipboard = "0.9.2"
[target.'cfg(windows)'.dependencies]
rand.workspace = true
windows.workspace = true
windows-core = "0.58"

View File

@@ -67,7 +67,7 @@ mod macos {
.allowlist_function("dispatch_suspend")
.allowlist_function("dispatch_source_cancel")
.allowlist_function("dispatch_set_context")
.parse_callbacks(Box::new(bindgen::CargoCallbacks::new()))
.parse_callbacks(Box::new(bindgen::CargoCallbacks))
.layout_tests(false)
.generate()
.expect("unable to generate bindings");

View File

@@ -225,15 +225,8 @@ impl ViewInputHandler for TextInput {
Some(self.content[range].to_string())
}
fn selected_text_range(
&mut self,
_ignore_disabled_input: bool,
_cx: &mut ViewContext<Self>,
) -> Option<UTF16Selection> {
Some(UTF16Selection {
range: self.range_to_utf16(&self.selected_range),
reversed: self.selection_reversed,
})
fn selected_text_range(&mut self, _cx: &mut ViewContext<Self>) -> Option<Range<usize>> {
Some(self.range_to_utf16(&self.selected_range))
}
fn marked_text_range(&self, _cx: &mut ViewContext<Self>) -> Option<Range<usize>> {

View File

@@ -1,4 +1,4 @@
use crate::{Bounds, InputHandler, Pixels, UTF16Selection, View, ViewContext, WindowContext};
use crate::{Bounds, InputHandler, Pixels, View, ViewContext, WindowContext};
use std::ops::Range;
/// Implement this trait to allow views to handle textual input when implementing an editor, field, etc.
@@ -13,11 +13,7 @@ pub trait ViewInputHandler: 'static + Sized {
-> Option<String>;
/// See [`InputHandler::selected_text_range`] for details
fn selected_text_range(
&mut self,
ignore_disabled_input: bool,
cx: &mut ViewContext<Self>,
) -> Option<UTF16Selection>;
fn selected_text_range(&mut self, cx: &mut ViewContext<Self>) -> Option<Range<usize>>;
/// See [`InputHandler::marked_text_range`] for details
fn marked_text_range(&self, cx: &mut ViewContext<Self>) -> Option<Range<usize>>;
@@ -72,14 +68,9 @@ impl<V: 'static> ElementInputHandler<V> {
}
impl<V: ViewInputHandler> InputHandler for ElementInputHandler<V> {
fn selected_text_range(
&mut self,
ignore_disabled_input: bool,
cx: &mut WindowContext,
) -> Option<UTF16Selection> {
self.view.update(cx, |view, cx| {
view.selected_text_range(ignore_disabled_input, cx)
})
fn selected_text_range(&mut self, cx: &mut WindowContext) -> Option<Range<usize>> {
self.view
.update(cx, |view, cx| view.selected_text_range(cx))
}
fn marked_text_range(&mut self, cx: &mut WindowContext) -> Option<Range<usize>> {

View File

@@ -383,8 +383,6 @@ pub(crate) trait PlatformWindow: HasWindowHandle + HasDisplayHandle {
fn gpu_specs(&self) -> Option<GPUSpecs>;
fn fps(&self) -> Option<f32>;
fn update_ime_position(&self, _bounds: Bounds<Pixels>);
#[cfg(any(test, feature = "test-support"))]
fn as_test(&mut self) -> Option<&mut TestWindow> {
None
@@ -528,9 +526,9 @@ impl PlatformInputHandler {
Self { cx, handler }
}
fn selected_text_range(&mut self, ignore_disabled_input: bool) -> Option<UTF16Selection> {
fn selected_text_range(&mut self) -> Option<Range<usize>> {
self.cx
.update(|cx| self.handler.selected_text_range(ignore_disabled_input, cx))
.update(|cx| self.handler.selected_text_range(cx))
.ok()
.flatten()
}
@@ -591,31 +589,6 @@ impl PlatformInputHandler {
pub(crate) fn dispatch_input(&mut self, input: &str, cx: &mut WindowContext) {
self.handler.replace_text_in_range(None, input, cx);
}
pub fn selected_bounds(&mut self, cx: &mut WindowContext) -> Option<Bounds<Pixels>> {
let Some(selection) = self.handler.selected_text_range(true, cx) else {
return None;
};
self.handler.bounds_for_range(
if selection.reversed {
selection.range.start..selection.range.start
} else {
selection.range.end..selection.range.end
},
cx,
)
}
}
/// A struct representing a selection in a text buffer, in UTF16 characters.
/// This is different from a range because the head may be before the tail.
pub struct UTF16Selection {
/// The range of text in the document this selection corresponds to
/// in UTF16 characters.
pub range: Range<usize>,
/// Whether the head of this selection is at the start (true), or end (false)
/// of the range
pub reversed: bool,
}
/// Zed's interface for handling text input from the platform's IME system
@@ -627,11 +600,7 @@ pub trait InputHandler: 'static {
/// Corresponds to [selectedRange()](https://developer.apple.com/documentation/appkit/nstextinputclient/1438242-selectedrange)
///
/// Return value is in terms of UTF-16 characters, from 0 to the length of the document
fn selected_text_range(
&mut self,
ignore_disabled_input: bool,
cx: &mut WindowContext,
) -> Option<UTF16Selection>;
fn selected_text_range(&mut self, cx: &mut WindowContext) -> Option<Range<usize>>;
/// Get the range of the currently marked text, if any
/// Corresponds to [markedRange()](https://developer.apple.com/documentation/appkit/nstextinputclient/1438250-markedrange)
@@ -815,8 +784,7 @@ pub struct TitlebarOptions {
/// The initial title of the window
pub title: Option<SharedString>,
/// Should the default system titlebar be hidden to allow for a custom-drawn titlebar? (macOS and Windows only)
/// Refer to [`WindowOptions::window_decorations`] on Linux
/// Whether the titlebar should appear transparent (macOS only)
pub appears_transparent: bool,
/// The position of the macOS traffic light buttons

View File

@@ -162,10 +162,9 @@ impl Keystroke {
fn is_printable_key(key: &str) -> bool {
match key {
"f1" | "f2" | "f3" | "f4" | "f5" | "f6" | "f7" | "f8" | "f9" | "f10" | "f11" | "f12"
| "f13" | "f14" | "f15" | "f16" | "f17" | "f18" | "f19" | "backspace" | "delete"
| "left" | "right" | "up" | "down" | "pageup" | "pagedown" | "insert" | "home" | "end"
| "escape" => false,
"up" | "down" | "left" | "right" | "pageup" | "pagedown" | "home" | "end" | "delete"
| "escape" | "backspace" | "f1" | "f2" | "f3" | "f4" | "f5" | "f6" | "f7" | "f8" | "f9"
| "f10" | "f11" | "f12" => false,
_ => true,
}
}

View File

@@ -312,23 +312,6 @@ impl WaylandClientStatePtr {
}
}
pub fn update_ime_position(&self, bounds: Bounds<Pixels>) {
let client = self.get_client();
let mut state = client.borrow_mut();
if state.composing || state.text_input.is_none() {
return;
}
let text_input = state.text_input.as_ref().unwrap();
text_input.set_cursor_rectangle(
bounds.origin.x.0 as i32,
bounds.origin.y.0 as i32,
bounds.size.width.0 as i32,
bounds.size.height.0 as i32,
);
text_input.commit();
}
pub fn drop_window(&self, surface_id: &ObjectId) {
let mut client = self.get_client();
let mut state = client.borrow_mut();
@@ -982,8 +965,7 @@ impl Dispatch<xdg_toplevel::XdgToplevel, ObjectId> for WaylandClientStatePtr {
let should_close = window.handle_toplevel_event(event);
if should_close {
// The close logic will be handled in drop_window()
window.close();
this.drop_window(surface_id);
}
}
}
@@ -1371,7 +1353,6 @@ impl Dispatch<zwp_text_input_v3::ZwpTextInputV3, ()> for WaylandClientStatePtr {
}
}
} else {
state.composing = false;
drop(state);
window.handle_ime(ImeInput::DeleteText);
}

View File

@@ -622,12 +622,8 @@ impl WaylandWindowStatePtr {
let mut bounds: Option<Bounds<Pixels>> = None;
if let Some(mut input_handler) = state.input_handler.take() {
drop(state);
if let Some(selection) = input_handler.selected_text_range(true) {
bounds = input_handler.bounds_for_range(if selection.reversed {
selection.range.start..selection.range.start
} else {
selection.range.end..selection.range.end
});
if let Some(range) = input_handler.selected_text_range() {
bounds = input_handler.bounds_for_range(range);
}
self.state.borrow_mut().input_handler = Some(input_handler);
}
@@ -1010,13 +1006,6 @@ impl PlatformWindow for WaylandWindow {
}
}
fn update_ime_position(&self, bounds: Bounds<Pixels>) {
let state = self.borrow();
let client = state.client.clone();
drop(state);
client.update_ime_position(bounds);
}
fn gpu_specs(&self) -> Option<GPUSpecs> {
self.borrow().renderer.gpu_specs().into()
}

View File

@@ -148,12 +148,8 @@ pub struct X11ClientState {
pub struct X11ClientStatePtr(pub Weak<RefCell<X11ClientState>>);
impl X11ClientStatePtr {
fn get_client(&self) -> X11Client {
X11Client(self.0.upgrade().expect("client already dropped"))
}
pub fn drop_window(&self, x_window: u32) {
let client = self.get_client();
let client = X11Client(self.0.upgrade().expect("client already dropped"));
let mut state = client.0.borrow_mut();
if let Some(window_ref) = state.windows.remove(&x_window) {
@@ -171,42 +167,6 @@ impl X11ClientStatePtr {
state.common.signal.stop();
}
}
pub fn update_ime_position(&self, bounds: Bounds<Pixels>) {
let client = self.get_client();
let mut state = client.0.borrow_mut();
if state.composing || state.ximc.is_none() {
return;
}
let mut ximc = state.ximc.take().unwrap();
let xim_handler = state.xim_handler.take().unwrap();
let ic_attributes = ximc
.build_ic_attributes()
.push(
xim::AttributeName::InputStyle,
xim::InputStyle::PREEDIT_CALLBACKS
| xim::InputStyle::STATUS_NOTHING
| xim::InputStyle::PREEDIT_POSITION,
)
.push(xim::AttributeName::ClientWindow, xim_handler.window)
.push(xim::AttributeName::FocusWindow, xim_handler.window)
.nested_list(xim::AttributeName::PreeditAttributes, |b| {
b.push(
xim::AttributeName::SpotLocation,
xim::Point {
x: u32::from(bounds.origin.x + bounds.size.width) as i16,
y: u32::from(bounds.origin.y + bounds.size.height) as i16,
},
);
})
.build();
let _ = ximc
.set_ic_values(xim_handler.im_id, xim_handler.ic_id, ic_attributes)
.log_err();
state.ximc = Some(ximc);
state.xim_handler = Some(xim_handler);
}
}
#[derive(Clone)]
@@ -1069,13 +1029,13 @@ impl X11Client {
fn xim_handle_preedit(&self, window: xproto::Window, text: String) -> Option<()> {
let window = self.get_window(window).unwrap();
window.handle_ime_preedit(text);
let mut state = self.0.borrow_mut();
let mut ximc = state.ximc.take().unwrap();
let mut xim_handler = state.xim_handler.take().unwrap();
state.composing = !text.is_empty();
state.composing = true;
drop(state);
window.handle_ime_preedit(text);
if let Some(area) = window.get_ime_area() {
let ic_attributes = ximc

View File

@@ -873,8 +873,8 @@ impl X11WindowStatePtr {
let mut bounds: Option<Bounds<Pixels>> = None;
if let Some(mut input_handler) = state.input_handler.take() {
drop(state);
if let Some(selection) = input_handler.selected_text_range(true) {
bounds = input_handler.bounds_for_range(selection.range);
if let Some(range) = input_handler.selected_text_range() {
bounds = input_handler.bounds_for_range(range);
}
let mut state = self.state.borrow_mut();
state.input_handler = Some(input_handler);
@@ -1396,13 +1396,6 @@ impl PlatformWindow for X11Window {
}
}
fn update_ime_position(&self, bounds: Bounds<Pixels>) {
let mut state = self.0.state.borrow_mut();
let client = state.client.clone();
drop(state);
client.update_ime_position(bounds);
}
fn gpu_specs(&self) -> Option<GPUSpecs> {
self.0.state.borrow().renderer.gpu_specs().into()
}

View File

@@ -56,7 +56,6 @@ pub fn key_to_native(key: &str) -> Cow<str> {
"home" => NSHomeFunctionKey,
"end" => NSEndFunctionKey,
"delete" => NSDeleteFunctionKey,
"insert" => NSHelpFunctionKey,
"f1" => NSF1FunctionKey,
"f2" => NSF2FunctionKey,
"f3" => NSF3FunctionKey,
@@ -69,13 +68,6 @@ pub fn key_to_native(key: &str) -> Cow<str> {
"f10" => NSF10FunctionKey,
"f11" => NSF11FunctionKey,
"f12" => NSF12FunctionKey,
"f13" => NSF13FunctionKey,
"f14" => NSF14FunctionKey,
"f15" => NSF15FunctionKey,
"f16" => NSF16FunctionKey,
"f17" => NSF17FunctionKey,
"f18" => NSF18FunctionKey,
"f19" => NSF19FunctionKey,
_ => return Cow::Borrowed(key),
};
Cow::Owned(String::from_utf16(&[code]).unwrap())
@@ -292,8 +284,6 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke {
Some(NSHomeFunctionKey) => "home".to_string(),
Some(NSEndFunctionKey) => "end".to_string(),
Some(NSDeleteFunctionKey) => "delete".to_string(),
// Observed Insert==NSHelpFunctionKey not NSInsertFunctionKey.
Some(NSHelpFunctionKey) => "insert".to_string(),
Some(NSF1FunctionKey) => "f1".to_string(),
Some(NSF2FunctionKey) => "f2".to_string(),
Some(NSF3FunctionKey) => "f3".to_string(),
@@ -306,13 +296,6 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke {
Some(NSF10FunctionKey) => "f10".to_string(),
Some(NSF11FunctionKey) => "f11".to_string(),
Some(NSF12FunctionKey) => "f12".to_string(),
Some(NSF13FunctionKey) => "f13".to_string(),
Some(NSF14FunctionKey) => "f14".to_string(),
Some(NSF15FunctionKey) => "f15".to_string(),
Some(NSF16FunctionKey) => "f16".to_string(),
Some(NSF17FunctionKey) => "f17".to_string(),
Some(NSF18FunctionKey) => "f18".to_string(),
Some(NSF19FunctionKey) => "f19".to_string(),
_ => {
let mut chars_ignoring_modifiers_and_shift =
chars_for_modified_key(native_event.keyCode(), false, false);

View File

@@ -35,25 +35,50 @@ pub fn apply_features_and_fallbacks(
fallbacks: Option<&FontFallbacks>,
) -> anyhow::Result<()> {
unsafe {
let mut keys = vec![kCTFontFeatureSettingsAttribute];
let mut values = vec![generate_feature_array(features)];
let fallback_array = CFArrayCreateMutable(kCFAllocatorDefault, 0, &kCFTypeArrayCallBacks);
if let Some(fallbacks) = fallbacks {
if !fallbacks.fallback_list().is_empty() {
keys.push(kCTFontCascadeListAttribute);
values.push(generate_fallback_array(
fallbacks,
font.native_font().as_concrete_TypeRef(),
));
for user_fallback in fallbacks.fallback_list() {
let name = CFString::from(user_fallback.as_str());
let fallback_desc =
CTFontDescriptorCreateWithNameAndSize(name.as_concrete_TypeRef(), 0.0);
CFArrayAppendValue(fallback_array, fallback_desc as _);
CFRelease(fallback_desc as _);
}
}
{
let preferred_languages: CFArray<CFString> =
CFArray::wrap_under_create_rule(CFLocaleCopyPreferredLanguages());
let default_fallbacks = CTFontCopyDefaultCascadeListForLanguages(
font.native_font().as_concrete_TypeRef(),
preferred_languages.as_concrete_TypeRef(),
);
let default_fallbacks: CFArray<CTFontDescriptor> =
CFArray::wrap_under_create_rule(default_fallbacks);
default_fallbacks
.iter()
.filter(|desc| desc.font_path().is_some())
.map(|desc| {
CFArrayAppendValue(fallback_array, desc.as_concrete_TypeRef() as _);
});
}
let feature_array = generate_feature_array(features);
let keys = [kCTFontFeatureSettingsAttribute, kCTFontCascadeListAttribute];
let values = [feature_array, fallback_array];
let attrs = CFDictionaryCreate(
kCFAllocatorDefault,
keys.as_ptr() as _,
values.as_ptr() as _,
keys.len() as isize,
2,
&kCFTypeDictionaryKeyCallBacks,
&kCFTypeDictionaryValueCallBacks,
);
CFRelease(feature_array as *const _ as _);
CFRelease(fallback_array as *const _ as _);
let new_descriptor = CTFontDescriptorCreateWithAttributes(attrs);
CFRelease(attrs as _);
let new_descriptor = CTFontDescriptor::wrap_under_create_rule(new_descriptor);
@@ -72,7 +97,8 @@ pub fn apply_features_and_fallbacks(
fn generate_feature_array(features: &FontFeatures) -> CFMutableArrayRef {
unsafe {
let feature_array = CFArrayCreateMutable(kCFAllocatorDefault, 0, &kCFTypeArrayCallBacks);
let mut feature_array =
CFArrayCreateMutable(kCFAllocatorDefault, 0, &kCFTypeArrayCallBacks);
for (tag, value) in features.tag_value_list() {
let keys = [kCTFontOpenTypeFeatureTag, kCTFontOpenTypeFeatureValue];
let values = [
@@ -95,42 +121,6 @@ fn generate_feature_array(features: &FontFeatures) -> CFMutableArrayRef {
}
}
fn generate_fallback_array(fallbacks: &FontFallbacks, font_ref: CTFontRef) -> CFMutableArrayRef {
unsafe {
let fallback_array = CFArrayCreateMutable(kCFAllocatorDefault, 0, &kCFTypeArrayCallBacks);
for user_fallback in fallbacks.fallback_list() {
let name = CFString::from(user_fallback.as_str());
let fallback_desc =
CTFontDescriptorCreateWithNameAndSize(name.as_concrete_TypeRef(), 0.0);
CFArrayAppendValue(fallback_array, fallback_desc as _);
CFRelease(fallback_desc as _);
}
append_system_fallbacks(fallback_array, font_ref);
fallback_array
}
}
fn append_system_fallbacks(fallback_array: CFMutableArrayRef, font_ref: CTFontRef) {
unsafe {
let preferred_languages: CFArray<CFString> =
CFArray::wrap_under_create_rule(CFLocaleCopyPreferredLanguages());
let default_fallbacks = CTFontCopyDefaultCascadeListForLanguages(
font_ref,
preferred_languages.as_concrete_TypeRef(),
);
let default_fallbacks: CFArray<CTFontDescriptor> =
CFArray::wrap_under_create_rule(default_fallbacks);
default_fallbacks
.iter()
.filter(|desc| desc.font_path().is_some())
.map(|desc| {
CFArrayAppendValue(fallback_array, desc.as_concrete_TypeRef() as _);
});
}
}
#[link(name = "CoreText", kind = "framework")]
extern "C" {
static kCTFontOpenTypeFeatureTag: CFStringRef;

View File

@@ -4,7 +4,7 @@ use crate::{
RenderGlyphParams, Result, ShapedGlyph, ShapedRun, SharedString, Size, SUBPIXEL_VARIANTS,
};
use anyhow::anyhow;
use cocoa::appkit::CGFloat;
use cocoa::appkit::{CGFloat, CGPoint};
use collections::HashMap;
use core_foundation::{
attributed_string::CFMutableAttributedString,
@@ -16,7 +16,6 @@ use core_graphics::{
base::{kCGImageAlphaPremultipliedLast, CGGlyph},
color_space::CGColorSpace,
context::CGContext,
display::CGPoint,
};
use core_text::{
font::CTFont,

View File

@@ -9,7 +9,7 @@ use crate::{
use block::ConcreteBlock;
use cocoa::{
appkit::{
NSApplication, NSBackingStoreBuffered, NSColor, NSEvent, NSEventModifierFlags,
CGPoint, NSApplication, NSBackingStoreBuffered, NSColor, NSEvent, NSEventModifierFlags,
NSFilenamesPboardType, NSPasteboard, NSScreen, NSView, NSViewHeightSizable,
NSViewWidthSizable, NSWindow, NSWindowButton, NSWindowCollectionBehavior,
NSWindowOcclusionState, NSWindowStyleMask, NSWindowTitleVisibility,
@@ -20,7 +20,7 @@ use cocoa::{
NSSize, NSString, NSUInteger,
},
};
use core_graphics::display::{CGDirectDisplayID, CGPoint, CGRect};
use core_graphics::display::{CGDirectDisplayID, CGRect};
use ctor::ctor;
use futures::channel::oneshot;
use objc::{
@@ -54,7 +54,7 @@ static mut VIEW_CLASS: *const Class = ptr::null();
#[allow(non_upper_case_globals)]
const NSWindowStyleMaskNonactivatingPanel: NSWindowStyleMask =
NSWindowStyleMask::from_bits_retain(1 << 7);
unsafe { NSWindowStyleMask::from_bits_unchecked(1 << 7) };
#[allow(non_upper_case_globals)]
const NSNormalWindowLevel: NSInteger = 0;
#[allow(non_upper_case_globals)]
@@ -233,7 +233,7 @@ unsafe fn build_classes() {
pub(crate) fn convert_mouse_position(position: NSPoint, window_height: Pixels) -> Point<Pixels> {
point(
px(position.x as f32),
// macOS screen coordinates are relative to bottom left
// MacOS screen coordinates are relative to bottom left
window_height - px(position.y as f32),
)
}
@@ -1120,13 +1120,6 @@ impl PlatformWindow for MacWindow {
None
}
fn update_ime_position(&self, _bounds: Bounds<Pixels>) {
unsafe {
let input_context: id = msg_send![class!(NSTextInputContext), currentInputContext];
let _: () = msg_send![input_context, invalidateCharacterCoordinates];
}
}
fn fps(&self) -> Option<f32> {
Some(self.0.lock().renderer.fps())
}
@@ -1314,11 +1307,11 @@ extern "C" fn handle_key_event(this: &Object, native_event: id, key_equivalent:
if !handled && is_held {
if let Some(text) = previous_keydown_inserted_text {
// macOS IME is a bit funky, and even when you've told it there's nothing to
// MacOS IME is a bit funky, and even when you've told it there's nothing to
// enter it will still swallow certain keys (e.g. 'f', 'j') and not others
// (e.g. 'n'). This is a problem for certain kinds of views, like the terminal.
with_input_handler(this, |input_handler| {
if input_handler.selected_text_range(false).is_none() {
if input_handler.selected_text_range().is_none() {
handled = true;
input_handler.replace_text_in_range(None, &text)
}
@@ -1690,12 +1683,10 @@ extern "C" fn marked_range(this: &Object, _: Sel) -> NSRange {
}
extern "C" fn selected_range(this: &Object, _: Sel) -> NSRange {
let selected_range_result = with_input_handler(this, |input_handler| {
input_handler.selected_text_range(false)
})
.flatten();
let selected_range_result =
with_input_handler(this, |input_handler| input_handler.selected_text_range()).flatten();
selected_range_result.map_or(NSRange::invalid(), |selection| selection.range.into())
selected_range_result.map_or(NSRange::invalid(), |range| range.into())
}
extern "C" fn first_rect_for_character_range(

View File

@@ -279,8 +279,6 @@ impl PlatformWindow for TestWindow {
unimplemented!()
}
fn update_ime_position(&self, _bounds: Bounds<Pixels>) {}
fn gpu_specs(&self) -> Option<GPUSpecs> {
None
}

View File

@@ -275,52 +275,54 @@ impl DirectWriteState {
fn generate_font_fallbacks(
&self,
fallbacks: &FontFallbacks,
fallbacks: Option<&FontFallbacks>,
) -> Result<Option<IDWriteFontFallback>> {
if fallbacks.fallback_list().is_empty() {
if fallbacks.is_some_and(|fallbacks| fallbacks.fallback_list().is_empty()) {
return Ok(None);
}
unsafe {
let builder = self.components.factory.CreateFontFallbackBuilder()?;
let font_set = &self.system_font_collection.GetFontSet()?;
for family_name in fallbacks.fallback_list() {
let Some(fonts) = font_set
.GetMatchingFonts(
&HSTRING::from(family_name),
DWRITE_FONT_WEIGHT_NORMAL,
DWRITE_FONT_STRETCH_NORMAL,
DWRITE_FONT_STYLE_NORMAL,
)
.log_err()
else {
continue;
};
if fonts.GetFontCount() == 0 {
log::error!("No matching font found for {}", family_name);
continue;
if let Some(fallbacks) = fallbacks {
for family_name in fallbacks.fallback_list() {
let Some(fonts) = font_set
.GetMatchingFonts(
&HSTRING::from(family_name),
DWRITE_FONT_WEIGHT_NORMAL,
DWRITE_FONT_STRETCH_NORMAL,
DWRITE_FONT_STYLE_NORMAL,
)
.log_err()
else {
continue;
};
if fonts.GetFontCount() == 0 {
log::error!("No matching font found for {}", family_name);
continue;
}
let font = fonts.GetFontFaceReference(0)?.CreateFontFace()?;
let mut count = 0;
font.GetUnicodeRanges(None, &mut count).ok();
if count == 0 {
continue;
}
let mut unicode_ranges = vec![DWRITE_UNICODE_RANGE::default(); count as usize];
let Some(_) = font
.GetUnicodeRanges(Some(&mut unicode_ranges), &mut count)
.log_err()
else {
continue;
};
let target_family_name = HSTRING::from(family_name);
builder.AddMapping(
&unicode_ranges,
&[target_family_name.as_ptr()],
None,
None,
None,
1.0,
)?;
}
let font = fonts.GetFontFaceReference(0)?.CreateFontFace()?;
let mut count = 0;
font.GetUnicodeRanges(None, &mut count).ok();
if count == 0 {
continue;
}
let mut unicode_ranges = vec![DWRITE_UNICODE_RANGE::default(); count as usize];
let Some(_) = font
.GetUnicodeRanges(Some(&mut unicode_ranges), &mut count)
.log_err()
else {
continue;
};
let target_family_name = HSTRING::from(family_name);
builder.AddMapping(
&unicode_ranges,
&[target_family_name.as_ptr()],
None,
None,
None,
1.0,
)?;
}
let system_fallbacks = self.components.factory.GetSystemFontFallback()?;
builder.AddMappings(&system_fallbacks)?;
@@ -376,8 +378,10 @@ impl DirectWriteState {
else {
continue;
};
let fallbacks = font_fallbacks
.and_then(|fallbacks| self.generate_font_fallbacks(fallbacks).log_err().flatten());
let fallbacks = self
.generate_font_fallbacks(font_fallbacks)
.log_err()
.unwrap_or_default();
let font_info = FontInfo {
font_family: family_name.to_owned(),
font_face,

View File

@@ -19,7 +19,6 @@ pub(crate) const CURSOR_STYLE_CHANGED: u32 = WM_USER + 1;
pub(crate) const CLOSE_ONE_WINDOW: u32 = WM_USER + 2;
const SIZE_MOVE_LOOP_TIMER_ID: usize = 1;
const AUTO_HIDE_TASKBAR_THICKNESS_PX: i32 = 1;
pub(crate) fn handle_msg(
handle: HWND,
@@ -85,7 +84,6 @@ pub(crate) fn handle_msg(
WM_IME_COMPOSITION => handle_ime_composition(handle, lparam, state_ptr),
WM_SETCURSOR => handle_set_cursor(lparam, state_ptr),
WM_SETTINGCHANGE => handle_system_settings_changed(handle, state_ptr),
WM_DWMCOLORIZATIONCOLORCHANGED => handle_system_theme_changed(state_ptr),
CURSOR_STYLE_CHANGED => handle_cursor_changed(lparam, state_ptr),
_ => None,
};
@@ -388,18 +386,22 @@ fn handle_char_msg(
keystroke,
is_held: lparam.0 & (0x1 << 30) > 0,
};
let dispatch_event_result = func(PlatformInput::KeyDown(event));
state_ptr.state.borrow_mut().callbacks.input = Some(func);
let dispatch_event_result = func(PlatformInput::KeyDown(event));
let mut lock = state_ptr.state.borrow_mut();
lock.callbacks.input = Some(func);
if dispatch_event_result.default_prevented || !dispatch_event_result.propagate {
return Some(0);
}
let Some(ime_char) = ime_key else {
return Some(1);
};
with_input_handler(&state_ptr, |input_handler| {
input_handler.replace_text_in_range(None, &ime_char);
});
let Some(mut input_handler) = lock.input_handler.take() else {
return Some(1);
};
drop(lock);
input_handler.replace_text_in_range(None, &ime_char);
state_ptr.state.borrow_mut().input_handler = Some(input_handler);
Some(0)
}
@@ -579,42 +581,33 @@ fn handle_mouse_horizontal_wheel_msg(
}
}
fn retrieve_caret_position(state_ptr: &Rc<WindowsWindowStatePtr>) -> Option<POINT> {
with_input_handler_and_scale_factor(state_ptr, |input_handler, scale_factor| {
let caret_range = input_handler.selected_text_range(false)?;
let caret_position = input_handler.bounds_for_range(caret_range.range)?;
Some(POINT {
// logical to physical
x: (caret_position.origin.x.0 * scale_factor) as i32,
y: (caret_position.origin.y.0 * scale_factor) as i32
+ ((caret_position.size.height.0 * scale_factor) as i32 / 2),
})
})
}
fn handle_ime_position(handle: HWND, state_ptr: Rc<WindowsWindowStatePtr>) -> Option<isize> {
unsafe {
let mut lock = state_ptr.state.borrow_mut();
let ctx = ImmGetContext(handle);
let Some(mut input_handler) = lock.input_handler.take() else {
return Some(1);
};
let scale_factor = lock.scale_factor;
drop(lock);
let Some(caret_position) = retrieve_caret_position(&state_ptr) else {
let Some(caret_range) = input_handler.selected_text_range() else {
state_ptr.state.borrow_mut().input_handler = Some(input_handler);
return Some(0);
};
{
let config = COMPOSITIONFORM {
dwStyle: CFS_POINT,
ptCurrentPos: caret_position,
..Default::default()
};
ImmSetCompositionWindow(ctx, &config as _).ok().log_err();
}
{
let config = CANDIDATEFORM {
dwStyle: CFS_CANDIDATEPOS,
ptCurrentPos: caret_position,
..Default::default()
};
ImmSetCandidateWindow(ctx, &config as _).ok().log_err();
}
let caret_position = input_handler.bounds_for_range(caret_range).unwrap();
state_ptr.state.borrow_mut().input_handler = Some(input_handler);
let config = CANDIDATEFORM {
dwStyle: CFS_CANDIDATEPOS,
// logical to physical
ptCurrentPos: POINT {
x: (caret_position.origin.x.0 * scale_factor) as i32,
y: (caret_position.origin.y.0 * scale_factor) as i32
+ ((caret_position.size.height.0 * scale_factor) as i32 / 2),
},
..Default::default()
};
ImmSetCandidateWindow(ctx, &config as _).ok().log_err();
ImmReleaseContext(handle, ctx).ok().log_err();
Some(0)
}
@@ -624,46 +617,35 @@ fn handle_ime_composition(
handle: HWND,
lparam: LPARAM,
state_ptr: Rc<WindowsWindowStatePtr>,
) -> Option<isize> {
let ctx = unsafe { ImmGetContext(handle) };
let result = handle_ime_composition_inner(ctx, lparam, state_ptr);
unsafe { ImmReleaseContext(handle, ctx).ok().log_err() };
result
}
fn handle_ime_composition_inner(
ctx: HIMC,
lparam: LPARAM,
state_ptr: Rc<WindowsWindowStatePtr>,
) -> Option<isize> {
let mut ime_input = None;
if lparam.0 as u32 & GCS_COMPSTR.0 > 0 {
let (comp_string, string_len) = parse_ime_compostion_string(ctx)?;
with_input_handler(&state_ptr, |input_handler| {
input_handler.replace_and_mark_text_in_range(
None,
&comp_string,
Some(string_len..string_len),
);
})?;
let (comp_string, string_len) = parse_ime_compostion_string(handle)?;
let mut input_handler = state_ptr.state.borrow_mut().input_handler.take()?;
input_handler.replace_and_mark_text_in_range(
None,
&comp_string,
Some(string_len..string_len),
);
state_ptr.state.borrow_mut().input_handler = Some(input_handler);
ime_input = Some(comp_string);
}
if lparam.0 as u32 & GCS_CURSORPOS.0 > 0 {
let comp_string = &ime_input?;
let caret_pos = retrieve_composition_cursor_position(ctx);
with_input_handler(&state_ptr, |input_handler| {
input_handler.replace_and_mark_text_in_range(
None,
comp_string,
Some(caret_pos..caret_pos),
);
})?;
let caret_pos = retrieve_composition_cursor_position(handle);
let mut input_handler = state_ptr.state.borrow_mut().input_handler.take()?;
input_handler.replace_and_mark_text_in_range(None, comp_string, Some(caret_pos..caret_pos));
state_ptr.state.borrow_mut().input_handler = Some(input_handler);
}
if lparam.0 as u32 & GCS_RESULTSTR.0 > 0 {
let comp_result = parse_ime_compostion_result(ctx)?;
with_input_handler(&state_ptr, |input_handler| {
input_handler.replace_text_in_range(None, &comp_result);
})?;
let comp_result = parse_ime_compostion_result(handle)?;
let mut lock = state_ptr.state.borrow_mut();
let Some(mut input_handler) = lock.input_handler.take() else {
return Some(1);
};
drop(lock);
input_handler.replace_text_in_range(None, &comp_result);
state_ptr.state.borrow_mut().input_handler = Some(input_handler);
return Some(0);
}
// currently, we don't care other stuff
@@ -681,43 +663,29 @@ fn handle_calc_client_size(
return None;
}
let is_maximized = state_ptr.state.borrow().is_maximized();
let insets = get_client_area_insets(handle, is_maximized, state_ptr.windows_version);
let dpi = unsafe { GetDpiForWindow(handle) };
let frame_x = unsafe { GetSystemMetricsForDpi(SM_CXFRAME, dpi) };
let frame_y = unsafe { GetSystemMetricsForDpi(SM_CYFRAME, dpi) };
let padding = unsafe { GetSystemMetricsForDpi(SM_CXPADDEDBORDER, dpi) };
// wparam is TRUE so lparam points to an NCCALCSIZE_PARAMS structure
let mut params = lparam.0 as *mut NCCALCSIZE_PARAMS;
let mut requested_client_rect = unsafe { &mut ((*params).rgrc) };
requested_client_rect[0].left += insets.left;
requested_client_rect[0].top += insets.top;
requested_client_rect[0].right -= insets.right;
requested_client_rect[0].bottom -= insets.bottom;
requested_client_rect[0].right -= frame_x + padding;
requested_client_rect[0].left += frame_x + padding;
requested_client_rect[0].bottom -= frame_y + padding;
// Fix auto hide taskbar not showing. This solution is based on the approach
// used by Chrome. However, it may result in one row of pixels being obscured
// in our client area. But as Chrome says, "there seems to be no better solution."
if is_maximized {
if let Some(ref taskbar_position) = state_ptr
.state
.borrow()
.system_settings
.auto_hide_taskbar_position
{
// Fot the auto-hide taskbar, adjust in by 1 pixel on taskbar edge,
// so the window isn't treated as a "fullscreen app", which would cause
// the taskbar to disappear.
match taskbar_position {
AutoHideTaskbarPosition::Left => {
requested_client_rect[0].left += AUTO_HIDE_TASKBAR_THICKNESS_PX
}
AutoHideTaskbarPosition::Top => {
requested_client_rect[0].top += AUTO_HIDE_TASKBAR_THICKNESS_PX
}
AutoHideTaskbarPosition::Right => {
requested_client_rect[0].right -= AUTO_HIDE_TASKBAR_THICKNESS_PX
}
AutoHideTaskbarPosition::Bottom => {
requested_client_rect[0].bottom -= AUTO_HIDE_TASKBAR_THICKNESS_PX
}
if state_ptr.state.borrow().is_maximized() {
requested_client_rect[0].top += frame_y + padding;
} else {
match state_ptr.windows_version {
WindowsVersion::Win10 => {}
WindowsVersion::Win11 => {
// Magic number that calculates the width of the border
let border = (dpi as f32 / USER_DEFAULT_SCREEN_DPI as f32).round() as i32;
requested_client_rect[0].top += border;
}
}
}
@@ -757,12 +725,28 @@ fn handle_activate_msg(
}
fn handle_create_msg(handle: HWND, state_ptr: Rc<WindowsWindowStatePtr>) -> Option<isize> {
let mut size_rect = RECT::default();
unsafe { GetWindowRect(handle, &mut size_rect).log_err() };
let width = size_rect.right - size_rect.left;
let height = size_rect.bottom - size_rect.top;
if state_ptr.hide_title_bar {
notify_frame_changed(handle);
Some(0)
} else {
None
unsafe {
SetWindowPos(
handle,
None,
size_rect.left,
size_rect.top,
width,
height,
SWP_FRAMECHANGED | SWP_NOMOVE | SWP_NOSIZE,
)
.log_err()
};
}
Some(0)
}
fn handle_dpi_changed_msg(
@@ -1098,17 +1082,12 @@ fn handle_system_settings_changed(
state_ptr: Rc<WindowsWindowStatePtr>,
) -> Option<isize> {
let mut lock = state_ptr.state.borrow_mut();
let display = lock.display;
// system settings
lock.system_settings.update(display);
// mouse wheel
lock.system_settings.mouse_wheel_settings.update();
// mouse double click
lock.click_state.system_update();
// window border offset
lock.border_offset.update(handle).log_err();
drop(lock);
// Force to trigger WM_NCCALCSIZE event to ensure that we handle auto hide
// taskbar correctly.
notify_frame_changed(handle);
Some(0)
}
@@ -1123,18 +1102,6 @@ fn handle_system_command(wparam: WPARAM, state_ptr: Rc<WindowsWindowStatePtr>) -
None
}
fn handle_system_theme_changed(state_ptr: Rc<WindowsWindowStatePtr>) -> Option<isize> {
let mut callback = state_ptr
.state
.borrow_mut()
.callbacks
.appearance_changed
.take()?;
callback();
state_ptr.state.borrow_mut().callbacks.appearance_changed = Some(callback);
Some(0)
}
fn parse_syskeydown_msg_keystroke(wparam: WPARAM) -> Option<Keystroke> {
let modifiers = current_modifiers();
if !modifiers.alt {
@@ -1251,10 +1218,11 @@ fn parse_char_msg_keystroke(wparam: WPARAM) -> Option<Keystroke> {
}
}
fn parse_ime_compostion_string(ctx: HIMC) -> Option<(String, usize)> {
fn parse_ime_compostion_string(handle: HWND) -> Option<(String, usize)> {
unsafe {
let ctx = ImmGetContext(handle);
let string_len = ImmGetCompositionStringW(ctx, GCS_COMPSTR, None, 0);
if string_len >= 0 {
let result = if string_len >= 0 {
let mut buffer = vec![0u8; string_len as usize + 2];
ImmGetCompositionStringW(
ctx,
@@ -1270,19 +1238,26 @@ fn parse_ime_compostion_string(ctx: HIMC) -> Option<(String, usize)> {
Some((string, string_len as usize / 2))
} else {
None
}
};
ImmReleaseContext(handle, ctx).ok().log_err();
result
}
}
#[inline]
fn retrieve_composition_cursor_position(ctx: HIMC) -> usize {
unsafe { ImmGetCompositionStringW(ctx, GCS_CURSORPOS, None, 0) as usize }
fn retrieve_composition_cursor_position(handle: HWND) -> usize {
unsafe {
let ctx = ImmGetContext(handle);
let ret = ImmGetCompositionStringW(ctx, GCS_CURSORPOS, None, 0);
ImmReleaseContext(handle, ctx).ok().log_err();
ret as usize
}
}
fn parse_ime_compostion_result(ctx: HIMC) -> Option<String> {
fn parse_ime_compostion_result(handle: HWND) -> Option<String> {
unsafe {
let ctx = ImmGetContext(handle);
let string_len = ImmGetCompositionStringW(ctx, GCS_RESULTSTR, None, 0);
if string_len >= 0 {
let result = if string_len >= 0 {
let mut buffer = vec![0u8; string_len as usize + 2];
ImmGetCompositionStringW(
ctx,
@@ -1298,7 +1273,9 @@ fn parse_ime_compostion_result(ctx: HIMC) -> Option<String> {
Some(string)
} else {
None
}
};
ImmReleaseContext(handle, ctx).ok().log_err();
result
}
}
@@ -1346,100 +1323,3 @@ pub(crate) fn current_modifiers() -> Modifiers {
function: false,
}
}
fn get_client_area_insets(
handle: HWND,
is_maximized: bool,
windows_version: WindowsVersion,
) -> RECT {
// For maximized windows, Windows outdents the window rect from the screen's client rect
// by `frame_thickness` on each edge, meaning `insets` must contain `frame_thickness`
// on all sides (including the top) to avoid the client area extending onto adjacent
// monitors.
//
// For non-maximized windows, things become complicated:
//
// - On Windows 10
// The top inset must be zero, since if there is any nonclient area, Windows will draw
// a full native titlebar outside the client area. (This doesn't occur in the maximized
// case.)
//
// - On Windows 11
// The top inset is calculated using an empirical formula that I derived through various
// tests. Without this, the top 1-2 rows of pixels in our window would be obscured.
let dpi = unsafe { GetDpiForWindow(handle) };
let frame_thickness = get_frame_thickness(dpi);
let top_insets = if is_maximized {
frame_thickness
} else {
match windows_version {
WindowsVersion::Win10 => 0,
WindowsVersion::Win11 => (dpi as f32 / USER_DEFAULT_SCREEN_DPI as f32).round() as i32,
}
};
RECT {
left: frame_thickness,
top: top_insets,
right: frame_thickness,
bottom: frame_thickness,
}
}
// there is some additional non-visible space when talking about window
// borders on Windows:
// - SM_CXSIZEFRAME: The resize handle.
// - SM_CXPADDEDBORDER: Additional border space that isn't part of the resize handle.
fn get_frame_thickness(dpi: u32) -> i32 {
let resize_frame_thickness = unsafe { GetSystemMetricsForDpi(SM_CXSIZEFRAME, dpi) };
let padding_thickness = unsafe { GetSystemMetricsForDpi(SM_CXPADDEDBORDER, dpi) };
resize_frame_thickness + padding_thickness
}
fn notify_frame_changed(handle: HWND) {
unsafe {
SetWindowPos(
handle,
None,
0,
0,
0,
0,
SWP_FRAMECHANGED
| SWP_NOACTIVATE
| SWP_NOCOPYBITS
| SWP_NOMOVE
| SWP_NOOWNERZORDER
| SWP_NOREPOSITION
| SWP_NOSENDCHANGING
| SWP_NOSIZE
| SWP_NOZORDER,
)
.log_err();
}
}
fn with_input_handler<F, R>(state_ptr: &Rc<WindowsWindowStatePtr>, f: F) -> Option<R>
where
F: FnOnce(&mut PlatformInputHandler) -> R,
{
let mut input_handler = state_ptr.state.borrow_mut().input_handler.take()?;
let result = f(&mut input_handler);
state_ptr.state.borrow_mut().input_handler = Some(input_handler);
Some(result)
}
fn with_input_handler_and_scale_factor<F, R>(
state_ptr: &Rc<WindowsWindowStatePtr>,
f: F,
) -> Option<R>
where
F: FnOnce(&mut PlatformInputHandler, f32) -> Option<R>,
{
let mut lock = state_ptr.state.borrow_mut();
let mut input_handler = lock.input_handler.take()?;
let scale_factor = lock.scale_factor;
drop(lock);
let result = f(&mut input_handler, scale_factor);
state_ptr.state.borrow_mut().input_handler = Some(input_handler);
result
}

View File

@@ -1,6 +1,11 @@
// todo(windows): remove
#![allow(unused_variables)]
use std::{
cell::RefCell,
cell::{Cell, RefCell},
ffi::{c_void, OsString},
mem::ManuallyDrop,
os::windows::ffi::{OsStrExt, OsStringExt},
path::{Path, PathBuf},
rc::Rc,
sync::Arc,
@@ -283,7 +288,7 @@ impl Platform for WindowsPlatform {
}
// todo(windows)
fn activate(&self, _ignoring_other_apps: bool) {}
fn activate(&self, ignoring_other_apps: bool) {}
// todo(windows)
fn hide(&self) {
@@ -361,9 +366,68 @@ impl Platform for WindowsPlatform {
options: PathPromptOptions,
) -> Receiver<Result<Option<Vec<PathBuf>>>> {
let (tx, rx) = oneshot::channel();
self.foreground_executor()
.spawn(async move {
let _ = tx.send(file_open_dialog(options));
let tx = Cell::new(Some(tx));
// create file open dialog
let folder_dialog: IFileOpenDialog = unsafe {
CoCreateInstance::<std::option::Option<&IUnknown>, IFileOpenDialog>(
&FileOpenDialog,
None,
CLSCTX_ALL,
)
.unwrap()
};
// dialog options
let mut dialog_options: FILEOPENDIALOGOPTIONS = FOS_FILEMUSTEXIST;
if options.multiple {
dialog_options |= FOS_ALLOWMULTISELECT;
}
if options.directories {
dialog_options |= FOS_PICKFOLDERS;
}
unsafe {
folder_dialog.SetOptions(dialog_options).unwrap();
folder_dialog
.SetTitle(&HSTRING::from(OsString::from("Select a folder")))
.unwrap();
}
let hr = unsafe { folder_dialog.Show(None) };
if hr.is_err() {
if hr.unwrap_err().code() == HRESULT(0x800704C7u32 as i32) {
// user canceled error
if let Some(tx) = tx.take() {
tx.send(Ok(None)).unwrap();
}
return;
}
}
let mut results = unsafe { folder_dialog.GetResults().unwrap() };
let mut paths: Vec<PathBuf> = Vec::new();
for i in 0..unsafe { results.GetCount().unwrap() } {
let mut item: IShellItem = unsafe { results.GetItemAt(i).unwrap() };
let mut path: PWSTR =
unsafe { item.GetDisplayName(SIGDN_FILESYSPATH).unwrap() };
let mut path_os_string = OsString::from_wide(unsafe { path.as_wide() });
paths.push(PathBuf::from(path_os_string));
}
if let Some(tx) = tx.take() {
if paths.is_empty() {
tx.send(Ok(None)).unwrap();
} else {
tx.send(Ok(Some(paths))).unwrap();
}
}
})
.detach();
@@ -375,7 +439,23 @@ impl Platform for WindowsPlatform {
let (tx, rx) = oneshot::channel();
self.foreground_executor()
.spawn(async move {
let _ = tx.send(file_save_dialog(directory));
unsafe {
let Ok(dialog) = show_savefile_dialog(directory) else {
let _ = tx.send(Ok(None));
return;
};
let Ok(_) = dialog.Show(None) else {
let _ = tx.send(Ok(None)); // user cancel
return;
};
if let Ok(shell_item) = dialog.GetResult() {
if let Ok(file) = shell_item.GetDisplayName(SIGDN_FILESYSPATH) {
let _ = tx.send(Ok(Some(PathBuf::from(file.to_string().unwrap()))));
return;
}
}
let _ = tx.send(Ok(None));
}
})
.detach();
@@ -409,8 +489,8 @@ impl Platform for WindowsPlatform {
}
// todo(windows)
fn set_menus(&self, _menus: Vec<Menu>, _keymap: &Keymap) {}
fn set_dock_menu(&self, _menus: Vec<MenuItem>, _keymap: &Keymap) {}
fn set_menus(&self, menus: Vec<Menu>, keymap: &Keymap) {}
fn set_dock_menu(&self, menus: Vec<MenuItem>, keymap: &Keymap) {}
fn on_app_menu_action(&self, callback: Box<dyn FnMut(&dyn Action)>) {
self.state.borrow_mut().callbacks.app_menu_action = Some(callback);
@@ -429,7 +509,7 @@ impl Platform for WindowsPlatform {
}
// todo(windows)
fn path_for_auxiliary_executable(&self, _name: &str) -> Result<PathBuf> {
fn path_for_auxiliary_executable(&self, name: &str) -> Result<PathBuf> {
Err(anyhow!("not yet implemented"))
}
@@ -509,7 +589,7 @@ impl Platform for WindowsPlatform {
)
};
let password = credential_blob.to_vec();
unsafe { CredFree(credentials as *const _ as _) };
unsafe { CredFree(credentials as *const c_void) };
Ok(Some((username, password)))
}
})
@@ -575,61 +655,27 @@ fn open_target_in_explorer(target: &str) {
}
}
fn file_open_dialog(options: PathPromptOptions) -> Result<Option<Vec<PathBuf>>> {
let folder_dialog: IFileOpenDialog =
unsafe { CoCreateInstance(&FileOpenDialog, None, CLSCTX_ALL)? };
let mut dialog_options = FOS_FILEMUSTEXIST;
if options.multiple {
dialog_options |= FOS_ALLOWMULTISELECT;
unsafe fn show_savefile_dialog(directory: PathBuf) -> Result<IFileSaveDialog> {
let dialog: IFileSaveDialog = CoCreateInstance(&FileSaveDialog, None, CLSCTX_ALL)?;
let bind_context = CreateBindCtx(0)?;
let Ok(full_path) = directory.canonicalize() else {
return Ok(dialog);
};
let dir_str = full_path.into_os_string();
if dir_str.is_empty() {
return Ok(dialog);
}
if options.directories {
dialog_options |= FOS_PICKFOLDERS;
let dir_vec = dir_str.encode_wide().collect_vec();
let ret = SHCreateItemFromParsingName(PCWSTR::from_raw(dir_vec.as_ptr()), &bind_context)
.inspect_err(|e| log::error!("unable to create IShellItem: {}", e));
if ret.is_ok() {
let dir_shell_item: IShellItem = ret.unwrap();
let _ = dialog
.SetFolder(&dir_shell_item)
.inspect_err(|e| log::error!("unable to set folder for save file dialog: {}", e));
}
unsafe {
folder_dialog.SetOptions(dialog_options)?;
if folder_dialog.Show(None).is_err() {
// User cancelled
return Ok(None);
}
}
let results = unsafe { folder_dialog.GetResults()? };
let file_count = unsafe { results.GetCount()? };
if file_count == 0 {
return Ok(None);
}
let mut paths = Vec::new();
for i in 0..file_count {
let item = unsafe { results.GetItemAt(i)? };
let path = unsafe { item.GetDisplayName(SIGDN_FILESYSPATH)?.to_string()? };
paths.push(PathBuf::from(path));
}
Ok(Some(paths))
}
fn file_save_dialog(directory: PathBuf) -> Result<Option<PathBuf>> {
let dialog: IFileSaveDialog = unsafe { CoCreateInstance(&FileSaveDialog, None, CLSCTX_ALL)? };
if let Some(full_path) = directory.canonicalize().log_err() {
let full_path = full_path.to_string_lossy().to_string();
if !full_path.is_empty() {
let path_item: IShellItem =
unsafe { SHCreateItemFromParsingName(&HSTRING::from(&full_path), None)? };
unsafe { dialog.SetFolder(&path_item).log_err() };
}
}
unsafe {
if dialog.Show(None).is_err() {
// User cancelled
return Ok(None);
}
}
let shell_item = unsafe { dialog.GetResult()? };
let file_path_string = unsafe { shell_item.GetDisplayName(SIGDN_FILESYSPATH)?.to_string()? };
Ok(Some(PathBuf::from(file_path_string)))
Ok(dialog)
}
fn begin_vsync(vsync_event: HANDLE) {

View File

@@ -1,24 +1,16 @@
use std::ffi::{c_uint, c_void};
use ::util::ResultExt;
use windows::Win32::UI::{
Shell::{SHAppBarMessage, ABM_GETSTATE, ABM_GETTASKBARPOS, ABS_AUTOHIDE, APPBARDATA},
WindowsAndMessaging::{
SystemParametersInfoW, SPI_GETWHEELSCROLLCHARS, SPI_GETWHEELSCROLLLINES,
SYSTEM_PARAMETERS_INFO_UPDATE_FLAGS,
},
use util::ResultExt;
use windows::Win32::UI::WindowsAndMessaging::{
SystemParametersInfoW, SPI_GETWHEELSCROLLCHARS, SPI_GETWHEELSCROLLLINES,
SYSTEM_PARAMETERS_INFO_UPDATE_FLAGS,
};
use crate::*;
use super::WindowsDisplay;
/// Windows settings pulled from SystemParametersInfo
/// https://learn.microsoft.com/en-us/windows/win32/api/winuser/nf-winuser-systemparametersinfow
#[derive(Default, Debug, Clone, Copy)]
pub(crate) struct WindowsSystemSettings {
pub(crate) mouse_wheel_settings: MouseWheelSettings,
pub(crate) auto_hide_taskbar_position: Option<AutoHideTaskbarPosition>,
}
#[derive(Default, Debug, Clone, Copy)]
@@ -30,20 +22,19 @@ pub(crate) struct MouseWheelSettings {
}
impl WindowsSystemSettings {
pub(crate) fn new(display: WindowsDisplay) -> Self {
pub(crate) fn new() -> Self {
let mut settings = Self::default();
settings.update(display);
settings.init();
settings
}
pub(crate) fn update(&mut self, display: WindowsDisplay) {
fn init(&mut self) {
self.mouse_wheel_settings.update();
self.auto_hide_taskbar_position = AutoHideTaskbarPosition::new(display).log_err().flatten();
}
}
impl MouseWheelSettings {
fn update(&mut self) {
pub(crate) fn update(&mut self) {
self.update_wheel_scroll_chars();
self.update_wheel_scroll_lines();
}
@@ -80,100 +71,3 @@ impl MouseWheelSettings {
}
}
}
#[derive(Debug, Clone, Copy, Default)]
pub(crate) enum AutoHideTaskbarPosition {
Left,
Right,
Top,
#[default]
Bottom,
}
impl AutoHideTaskbarPosition {
fn new(display: WindowsDisplay) -> anyhow::Result<Option<Self>> {
if !check_auto_hide_taskbar_enable() {
// If auto hide taskbar is not enable, we do nothing in this case.
return Ok(None);
}
let mut info = APPBARDATA {
cbSize: std::mem::size_of::<APPBARDATA>() as u32,
..Default::default()
};
let ret = unsafe { SHAppBarMessage(ABM_GETTASKBARPOS, &mut info) };
if ret == 0 {
anyhow::bail!(
"Unable to retrieve taskbar position: {}",
std::io::Error::last_os_error()
);
}
let taskbar_bounds: Bounds<DevicePixels> = Bounds::new(
point(info.rc.left.into(), info.rc.top.into()),
size(
(info.rc.right - info.rc.left).into(),
(info.rc.bottom - info.rc.top).into(),
),
);
let display_bounds = display.physical_bounds();
if display_bounds.intersect(&taskbar_bounds) != taskbar_bounds {
// This case indicates that taskbar is not on the current monitor.
return Ok(None);
}
if taskbar_bounds.bottom() == display_bounds.bottom()
&& taskbar_bounds.right() == display_bounds.right()
{
if taskbar_bounds.size.height < display_bounds.size.height
&& taskbar_bounds.size.width == display_bounds.size.width
{
return Ok(Some(Self::Bottom));
}
if taskbar_bounds.size.width < display_bounds.size.width
&& taskbar_bounds.size.height == display_bounds.size.height
{
return Ok(Some(Self::Right));
}
log::error!(
"Unrecognized taskbar bounds {:?} give display bounds {:?}",
taskbar_bounds,
display_bounds
);
return Ok(None);
}
if taskbar_bounds.top() == display_bounds.top()
&& taskbar_bounds.left() == display_bounds.left()
{
if taskbar_bounds.size.height < display_bounds.size.height
&& taskbar_bounds.size.width == display_bounds.size.width
{
return Ok(Some(Self::Top));
}
if taskbar_bounds.size.width < display_bounds.size.width
&& taskbar_bounds.size.height == display_bounds.size.height
{
return Ok(Some(Self::Left));
}
log::error!(
"Unrecognized taskbar bounds {:?} give display bounds {:?}",
taskbar_bounds,
display_bounds
);
return Ok(None);
}
log::error!(
"Unrecognized taskbar bounds {:?} give display bounds {:?}",
taskbar_bounds,
display_bounds
);
Ok(None)
}
}
/// Check if auto hide taskbar is enable or not.
fn check_auto_hide_taskbar_enable() -> bool {
let mut info = APPBARDATA {
cbSize: std::mem::size_of::<APPBARDATA>() as u32,
..Default::default()
};
let ret = unsafe { SHAppBarMessage(ABM_GETSTATE, &mut info) } as u32;
ret == ABS_AUTOHIDE
}

View File

@@ -92,7 +92,7 @@ impl WindowsWindowState {
let input_handler = None;
let system_key_handled = false;
let click_state = ClickState::new();
let system_settings = WindowsSystemSettings::new(display);
let system_settings = WindowsSystemSettings::new();
let nc_button_pressed = None;
let fullscreen = None;
@@ -676,10 +676,6 @@ impl PlatformWindow for WindowsWindow {
Some(self.0.state.borrow().renderer.gpu_specs())
}
fn update_ime_position(&self, _bounds: Bounds<Pixels>) {
// todo(windows)
}
fn fps(&self) -> Option<f32> {
None
}

View File

@@ -84,7 +84,6 @@ impl TextSystem {
.iter()
.map(|font| font.family.to_string()),
);
names.push(".SystemUIFont".to_string());
names.sort();
names.dedup();
names

View File

@@ -3389,7 +3389,7 @@ impl<'a> WindowContext<'a> {
self.window.pending_input.is_some()
}
pub(crate) fn clear_pending_keystrokes(&mut self) {
fn clear_pending_keystrokes(&mut self) {
self.window.pending_input.take();
}
@@ -3575,18 +3575,6 @@ impl<'a> WindowContext<'a> {
self.window.platform_window.toggle_fullscreen();
}
/// Updates the IME panel position suggestions for languages like japanese, chinese.
pub fn invalidate_character_coordinates(&mut self) {
self.on_next_frame(|cx| {
if let Some(mut input_handler) = cx.window.platform_window.take_input_handler() {
if let Some(bounds) = input_handler.selected_bounds(cx) {
cx.window.platform_window.update_ime_position(bounds);
}
cx.window.platform_window.set_input_handler(input_handler);
}
});
}
/// Present a platform dialog.
/// The provided message will be presented, along with buttons for each answer.
/// When a button is clicked, the returned Receiver will receive the index of the clicked button.

View File

@@ -244,7 +244,6 @@ impl DevServer {
this.app_state.user_store.clone(),
this.app_state.languages.clone(),
this.app_state.fs.clone(),
None,
cx,
);

View File

@@ -16,8 +16,6 @@ doctest = false
anyhow.workspace = true
db.workspace = true
gpui.workspace = true
file_icons.workspace = true
ui.workspace = true
settings.workspace = true
workspace.workspace = true
project.workspace = true

View File

@@ -7,13 +7,11 @@ use gpui::{
use persistence::IMAGE_VIEWER;
use ui::prelude::*;
use file_icons::FileIcons;
use project::{Project, ProjectEntryId, ProjectPath};
use settings::Settings;
use std::{ffi::OsStr, path::PathBuf};
use workspace::{
item::{Item, ProjectItem, SerializableItem, TabContentParams},
ItemId, ItemSettings, Pane, Workspace, WorkspaceId,
ItemId, Pane, Workspace, WorkspaceId,
};
const IMAGE_VIEWER_KIND: &str = "ImageView";
@@ -87,14 +85,6 @@ impl Item for ImageView {
.into_any_element()
}
fn tab_icon(&self, cx: &WindowContext) -> Option<Icon> {
ItemSettings::get_global(cx)
.file_icons
.then(|| FileIcons::get_icon(self.path.as_path(), cx))
.flatten()
.map(|icon| Icon::from_path(icon))
}
fn clone_on_split(
&self,
_workspace_id: Option<WorkspaceId>,

View File

@@ -61,14 +61,14 @@ pub fn init(_: Arc<AppState>, cx: &mut AppContext) {
cx.observe_new_views(
|workspace: &mut Workspace, _cx: &mut ViewContext<Workspace>| {
workspace.register_action(|workspace, _: &NewJournalEntry, cx| {
new_journal_entry(&workspace, cx);
new_journal_entry(workspace.app_state().clone(), cx);
});
},
)
.detach();
}
pub fn new_journal_entry(workspace: &Workspace, cx: &mut WindowContext) {
pub fn new_journal_entry(app_state: Arc<AppState>, cx: &mut WindowContext) {
let settings = JournalSettings::get_global(cx);
let journal_dir = match journal_dir(settings.path.as_ref().unwrap()) {
Some(journal_dir) => journal_dir,
@@ -77,7 +77,6 @@ pub fn new_journal_entry(workspace: &Workspace, cx: &mut WindowContext) {
return;
}
};
let journal_dir_clone = journal_dir.clone();
let now = Local::now();
let month_dir = journal_dir
@@ -97,51 +96,24 @@ pub fn new_journal_entry(workspace: &Workspace, cx: &mut WindowContext) {
Ok::<_, std::io::Error>((journal_dir, entry_path))
});
let worktrees = workspace.visible_worktrees(cx).collect::<Vec<_>>();
let mut open_new_workspace = true;
'outer: for worktree in worktrees.iter() {
let worktree_root = worktree.read(cx).abs_path();
if *worktree_root == journal_dir_clone {
open_new_workspace = false;
break;
}
for directory in worktree.read(cx).directories(true, 1) {
let full_directory_path = worktree_root.join(&directory.path);
if full_directory_path.ends_with(&journal_dir_clone) {
open_new_workspace = false;
break 'outer;
}
}
}
let app_state = workspace.app_state().clone();
let view_snapshot = workspace.weak_handle().clone();
cx.spawn(|mut cx| async move {
let (journal_dir, entry_path) = create_entry.await?;
let opened = if open_new_workspace {
let (new_workspace, _) = cx
.update(|cx| {
workspace::open_paths(
&[journal_dir],
app_state,
workspace::OpenOptions::default(),
cx,
)
})?
.await?;
new_workspace
.update(&mut cx, |workspace, cx| {
workspace.open_paths(vec![entry_path], OpenVisible::All, None, cx)
})?
.await
} else {
view_snapshot
.update(&mut cx, |workspace, cx| {
workspace.open_paths(vec![entry_path], OpenVisible::All, None, cx)
})?
.await
};
let (workspace, _) = cx
.update(|cx| {
workspace::open_paths(
&[journal_dir],
app_state,
workspace::OpenOptions::default(),
cx,
)
})?
.await?;
let opened = workspace
.update(&mut cx, |workspace, cx| {
workspace.open_paths(vec![entry_path], OpenVisible::All, None, cx)
})?
.await;
if let Some(Some(Ok(item))) = opened.first() {
if let Some(editor) = item.downcast::<Editor>().map(|editor| editor.downgrade()) {

View File

@@ -719,7 +719,6 @@ impl LanguageRegistry {
self.lsp_binary_status_tx.send(server_name, status);
}
#[allow(clippy::too_many_arguments)]
pub fn create_pending_language_server(
self: &Arc<Self>,
stderr_capture: Arc<Mutex<Option<String>>>,
@@ -727,7 +726,6 @@ impl LanguageRegistry {
adapter: Arc<CachedLspAdapter>,
root_path: Arc<Path>,
delegate: Arc<dyn LspAdapterDelegate>,
cli_environment: Option<HashMap<String, String>>,
cx: &mut AppContext,
) -> Option<PendingLanguageServer> {
let server_id = self.state.write().next_language_server_id();
@@ -766,19 +764,7 @@ impl LanguageRegistry {
delegate.update_status(adapter.name.clone(), LanguageServerBinaryStatus::None);
let mut binary = binary_result?;
// If this Zed project was opened from the CLI and the language server command itself
// doesn't have an environment (which it would have, if it was found in $PATH), then
// we pass along the CLI environment that we inherited.
if binary.env.is_none() && cli_environment.is_some() {
log::info!(
"using CLI environment for language server {:?}, id: {server_id}",
adapter.name.0
);
binary.env = cli_environment.clone();
}
let binary = binary_result?;
let options = adapter
.adapter
.clone()

View File

@@ -647,7 +647,7 @@ impl LspLogView {
editor.set_text(log_contents, cx);
editor.move_to_end(&MoveToEnd, cx);
editor.set_read_only(true);
editor.set_show_inline_completions(Some(false), cx);
editor.set_show_inline_completions(false);
editor
});
let editor_subscription = cx.subscribe(

View File

@@ -9,5 +9,6 @@ brackets = [
{ start = "\"", end = "\"", close = true, newline = false, not_in = ["string", "comment"] },
{ start = "'", end = "'", close = true, newline = false, not_in = ["string", "comment"] },
]
word_characters = ["-"]
block_comment = ["/* ", " */"]
prettier_parser_name = "css"

View File

@@ -25,4 +25,5 @@ block_comment = ["{/* ", " */}"]
opt_into_language_servers = ["emmet-language-server"]
[overrides.string]
word_characters = ["-"]
opt_into_language_servers = ["tailwindcss-language-server"]

View File

@@ -1,6 +1,7 @@
name = "Markdown-Inline"
grammar = "markdown-inline"
path_suffixes = []
word_characters = ["-"]
brackets = [
{ start = "{", end = "}", close = true, newline = true },
{ start = "[", end = "]", close = true, newline = true },

View File

@@ -1,6 +1,7 @@
name = "Markdown"
grammar = "markdown"
path_suffixes = ["md", "mdx", "mdwn", "markdown"]
word_characters = ["-"]
brackets = [
{ start = "{", end = "}", close = true, newline = true },
{ start = "[", end = "]", close = true, newline = true },

View File

@@ -23,16 +23,10 @@ const SERVER_PATH: &str = "node_modules/.bin/tailwindcss-language-server.ps1";
#[cfg(not(target_os = "windows"))]
const SERVER_PATH: &str = "node_modules/.bin/tailwindcss-language-server";
#[cfg(not(target_os = "windows"))]
fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
vec![server_path.into(), "--stdio".into()]
}
#[cfg(target_os = "windows")]
fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
vec!["-File".into(), server_path.into(), "--stdio".into()]
}
pub struct TailwindLspAdapter {
node: Arc<dyn NodeRuntime>,
}
@@ -119,7 +113,20 @@ impl LspAdapter for TailwindLspAdapter {
#[cfg(target_os = "windows")]
{
let env_path = self.node.node_environment_path().await?;
let mut env_path = vec![self
.node
.binary_path()
.await?
.parent()
.expect("invalid node binary path")
.to_path_buf()];
if let Some(existing_path) = std::env::var_os("PATH") {
let mut paths = std::env::split_paths(&existing_path).collect::<Vec<_>>();
env_path.append(&mut paths);
}
let env_path = std::env::join_paths(env_path)?;
let mut env = HashMap::default();
env.insert("PATH".to_string(), env_path.to_string_lossy().to_string());

View File

@@ -24,4 +24,5 @@ block_comment = ["{/* ", " */}"]
opt_into_language_servers = ["emmet-language-server"]
[overrides.string]
word_characters = ["-"]
opt_into_language_servers = ["tailwindcss-language-server"]

View File

@@ -3,12 +3,11 @@ use async_trait::async_trait;
use futures::StreamExt;
use gpui::AsyncAppContext;
use language::{
language_settings::AllLanguageSettings, LanguageServerName, LspAdapter, LspAdapterDelegate,
language_settings::all_language_settings, LanguageServerName, LspAdapter, LspAdapterDelegate,
};
use lsp::LanguageServerBinary;
use node_runtime::NodeRuntime;
use serde_json::Value;
use settings::{Settings, SettingsLocation};
use smol::fs;
use std::{
any::Any,
@@ -96,16 +95,11 @@ impl LspAdapter for YamlLspAdapter {
async fn workspace_configuration(
self: Arc<Self>,
delegate: &Arc<dyn LspAdapterDelegate>,
_: &Arc<dyn LspAdapterDelegate>,
cx: &mut AsyncAppContext,
) -> Result<Value> {
let location = SettingsLocation {
worktree_id: delegate.worktree_id() as usize,
path: delegate.worktree_root_path(),
};
let tab_size = cx.update(|cx| {
AllLanguageSettings::get(Some(location), cx)
all_language_settings(None, cx)
.language(Some("YAML"))
.tab_size
})?;

View File

@@ -22,4 +22,4 @@ metal = "0.29"
objc = "0.2"
[build-dependencies]
bindgen = "0.70.0"
bindgen = "0.65.1"

View File

@@ -28,7 +28,7 @@ fn main() {
.allowlist_var("kCMVideoCodecType_.*")
.allowlist_var("kCMTime.*")
.allowlist_var("kCMSampleAttachmentKey_.*")
.parse_callbacks(Box::new(bindgen::CargoCallbacks::new()))
.parse_callbacks(Box::new(bindgen::CargoCallbacks))
.layout_tests(false)
.generate()
.expect("unable to generate bindings");

View File

@@ -10,7 +10,6 @@ use semver::Version;
use serde::Deserialize;
use smol::io::BufReader;
use smol::{fs, lock::Mutex, process::Command};
use std::ffi::OsString;
use std::io;
use std::process::{Output, Stdio};
use std::{
@@ -56,7 +55,6 @@ pub struct NpmInfoDistTags {
#[async_trait::async_trait]
pub trait NodeRuntime: Send + Sync {
async fn binary_path(&self) -> Result<PathBuf>;
async fn node_environment_path(&self) -> Result<OsString>;
async fn run_npm_subcommand(
&self,
@@ -218,22 +216,6 @@ impl NodeRuntime for RealNodeRuntime {
Ok(installation_path.join(NODE_PATH))
}
async fn node_environment_path(&self) -> Result<OsString> {
let installation_path = self.install_if_needed().await?;
let node_binary = installation_path.join(NODE_PATH);
let mut env_path = vec![node_binary
.parent()
.expect("invalid node binary path")
.to_path_buf()];
if let Some(existing_path) = std::env::var_os("PATH") {
let mut paths = std::env::split_paths(&existing_path).collect::<Vec<_>>();
env_path.append(&mut paths);
}
Ok(std::env::join_paths(env_path).context("failed to create PATH env variable")?)
}
async fn run_npm_subcommand(
&self,
directory: Option<&Path>,
@@ -242,9 +224,21 @@ impl NodeRuntime for RealNodeRuntime {
) -> Result<Output> {
let attempt = || async move {
let installation_path = self.install_if_needed().await?;
let node_binary = installation_path.join(NODE_PATH);
let npm_file = installation_path.join(NPM_PATH);
let env_path = self.node_environment_path().await?;
let mut env_path = vec![node_binary
.parent()
.expect("invalid node binary path")
.to_path_buf()];
if let Some(existing_path) = std::env::var_os("PATH") {
let mut paths = std::env::split_paths(&existing_path).collect::<Vec<_>>();
env_path.append(&mut paths);
}
let env_path =
std::env::join_paths(env_path).context("failed to create PATH env variable")?;
if smol::fs::metadata(&node_binary).await.is_err() {
return Err(anyhow!("missing node binary file"));
@@ -429,10 +423,6 @@ impl NodeRuntime for FakeNodeRuntime {
unreachable!()
}
async fn node_environment_path(&self) -> anyhow::Result<OsString> {
unreachable!()
}
async fn run_npm_subcommand(
&self,
_: Option<&Path>,

View File

@@ -15,8 +15,6 @@ static SUPPORTED_COUNTRIES: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
vec![
"AL", // Albania
"DZ", // Algeria
"AS", // American Samoa (US)
"AI", // Anguilla (UK)
"AF", // Afghanistan
"AD", // Andorra
"AO", // Angola
@@ -33,13 +31,11 @@ static SUPPORTED_COUNTRIES: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
"BE", // Belgium
"BZ", // Belize
"BJ", // Benin
"BM", // Bermuda (UK)
"BT", // Bhutan
"BO", // Bolivia
"BA", // Bosnia and Herzegovina
"BW", // Botswana
"BR", // Brazil
"IO", // British Indian Ocean Territory (UK)
"BN", // Brunei
"BG", // Bulgaria
"BF", // Burkina Faso
@@ -48,17 +44,13 @@ static SUPPORTED_COUNTRIES: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
"KH", // Cambodia
"CM", // Cameroon
"CA", // Canada
"KY", // Cayman Islands (UK)
"CF", // Central African Republic
"TD", // Chad
"CL", // Chile
"CX", // Christmas Island (AU)
"CC", // Cocos (Keeling) Islands (AU)
"CO", // Colombia
"KM", // Comoros
"CG", // Congo (Brazzaville)
"CD", // Congo (DRC)
"CK", // Cook Islands (NZ)
"CR", // Costa Rica
"CI", // Côte d'Ivoire
"HR", // Croatia
@@ -76,28 +68,21 @@ static SUPPORTED_COUNTRIES: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
"EE", // Estonia
"SZ", // Eswatini (Swaziland)
"ET", // Ethiopia
"FK", // Falkland Islands (UK)
"FJ", // Fiji
"FI", // Finland
"FR", // France
"GF", // French Guiana (FR)
"PF", // French Polynesia (FR)
"TF", // French Southern Territories
"GA", // Gabon
"GM", // Gambia
"GE", // Georgia
"DE", // Germany
"GH", // Ghana
"GI", // Gibraltar (UK)
"GR", // Greece
"GD", // Grenada
"GT", // Guatemala
"GU", // Guam (US)
"GN", // Guinea
"GW", // Guinea-Bissau
"GY", // Guyana
"HT", // Haiti
"HM", // Heard Island and McDonald Islands (AU)
"VA", // Holy See (Vatican City)
"HN", // Honduras
"HU", // Hungary
@@ -139,7 +124,6 @@ static SUPPORTED_COUNTRIES: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
"MD", // Moldova
"MC", // Monaco
"MN", // Mongolia
"MS", // Montserrat (UK)
"ME", // Montenegro
"MA", // Morocco
"MZ", // Mozambique
@@ -152,11 +136,8 @@ static SUPPORTED_COUNTRIES: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
"NI", // Nicaragua
"NE", // Niger
"NG", // Nigeria
"NF", // Norfolk Island (AU)
"MK", // North Macedonia
"MI", // Northern Mariana Islands (UK)
"NO", // Norway
"NU", // Niue (NZ)
"OM", // Oman
"PK", // Pakistan
"PW", // Palau
@@ -166,18 +147,13 @@ static SUPPORTED_COUNTRIES: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
"PY", // Paraguay
"PE", // Peru
"PH", // Philippines
"PN", // Pitcairn (UK)
"PL", // Poland
"PT", // Portugal
"PR", // Puerto Rico (US)
"QA", // Qatar
"RO", // Romania
"RW", // Rwanda
"BL", // Saint Barthélemy (FR)
"KN", // Saint Kitts and Nevis
"LC", // Saint Lucia
"MF", // Saint Martin (FR)
"PM", // Saint Pierre and Miquelon (FR)
"VC", // Saint Vincent and the Grenadines
"WS", // Samoa
"SM", // San Marino
@@ -186,7 +162,6 @@ static SUPPORTED_COUNTRIES: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
"SN", // Senegal
"RS", // Serbia
"SC", // Seychelles
"SH", // Saint Helena, Ascension and Tristan da Cunha (UK)
"SL", // Sierra Leone
"SG", // Singapore
"SK", // Slovakia
@@ -208,27 +183,21 @@ static SUPPORTED_COUNTRIES: LazyLock<HashSet<&'static str>> = LazyLock::new(|| {
"TH", // Thailand
"TL", // Timor-Leste (East Timor)
"TG", // Togo
"TK", // Tokelau (NZ)
"TO", // Tonga
"TT", // Trinidad and Tobago
"TN", // Tunisia
"TR", // Turkey
"TM", // Turkmenistan
"TC", // Turks and Caicos Islands (UK)
"TV", // Tuvalu
"UG", // Uganda
"UA", // Ukraine (with certain exceptions)
"AE", // United Arab Emirates
"GB", // United Kingdom
"UM", // United States Minor Outlying Islands (US)
"US", // United States of America
"UY", // Uruguay
"UZ", // Uzbekistan
"VU", // Vanuatu
"VN", // Vietnam
"VI", // Virgin Islands (US)
"VG", // Virgin Islands (UK)
"WF", // Wallis and Futuna (FR)
"YE", // Yemen
"ZM", // Zambia
"ZW", // Zimbabwe

View File

@@ -71,9 +71,6 @@ text.workspace = true
util.workspace = true
which.workspace = true
[target.'cfg(target_os = "windows")'.dependencies]
windows.workspace = true
[dev-dependencies]
client = { workspace = true, features = ["test-support"] }
collections = { workspace = true, features = ["test-support"] }

View File

@@ -4,7 +4,6 @@ use crate::{
Item, NoRepositoryError, ProjectPath,
};
use anyhow::{anyhow, Context as _, Result};
use client::Client;
use collections::{hash_map, HashMap, HashSet};
use fs::Fs;
use futures::{channel::oneshot, stream::FuturesUnordered, StreamExt};
@@ -18,13 +17,13 @@ use language::{
Buffer, Capability, Event as BufferEvent, File as _, Language, Operation,
};
use rpc::{
proto::{self, AnyProtoClient, EnvelopedMessage},
proto::{self, AnyProtoClient, EnvelopedMessage, PeerId},
ErrorExt as _, TypedEnvelope,
};
use smol::channel::Receiver;
use std::{io, path::Path, str::FromStr as _, sync::Arc};
use text::BufferId;
use util::{debug_panic, maybe, ResultExt as _, TryFutureExt};
use util::{debug_panic, maybe, ResultExt as _};
use worktree::{
File, PathChange, ProjectEntryId, RemoteWorktree, UpdatedGitRepositoriesSet, Worktree,
WorktreeId,
@@ -46,7 +45,6 @@ pub struct BufferStore {
loading_remote_buffers_by_id: HashMap<BufferId, Model<Buffer>>,
remote_buffer_listeners:
HashMap<BufferId, Vec<oneshot::Sender<Result<Model<Buffer>, anyhow::Error>>>>,
shared_buffers: HashMap<proto::PeerId, HashSet<BufferId>>,
}
enum OpenBuffer {
@@ -57,7 +55,6 @@ enum OpenBuffer {
pub enum BufferStoreEvent {
BufferAdded(Model<Buffer>),
BufferDropped(BufferId),
BufferChangedFilePath {
buffer: Model<Buffer>,
old_file: Option<Arc<dyn language::File>>,
@@ -96,7 +93,6 @@ impl BufferStore {
local_buffer_ids_by_path: Default::default(),
local_buffer_ids_by_entry_id: Default::default(),
loading_buffers_by_path: Default::default(),
shared_buffers: Default::default(),
}
}
@@ -617,18 +613,6 @@ impl BufferStore {
OpenBuffer::Weak(buffer.downgrade())
};
let handle = cx.handle().downgrade();
buffer.update(cx, move |_, cx| {
cx.on_release(move |buffer, cx| {
handle
.update(cx, |_, cx| {
cx.emit(BufferStoreEvent::BufferDropped(buffer.remote_id()))
})
.ok();
})
.detach()
});
match self.opened_buffers.entry(remote_id) {
hash_map::Entry::Vacant(entry) => {
entry.insert(open_buffer);
@@ -800,34 +784,31 @@ impl BufferStore {
pub fn find_search_candidates(
&mut self,
query: &SearchQuery,
mut limit: usize,
limit: usize,
fs: Arc<dyn Fs>,
cx: &mut ModelContext<Self>,
) -> Receiver<Model<Buffer>> {
let (tx, rx) = smol::channel::unbounded();
let mut open_buffers = HashSet::default();
let mut unnamed_buffers = Vec::new();
for handle in self.buffers() {
let buffer = handle.read(cx);
if let Some(entry_id) = buffer.entry_id(cx) {
open_buffers.insert(entry_id);
} else {
limit = limit.saturating_sub(1);
unnamed_buffers.push(handle)
};
}
const MAGIC_NUMBER: usize = 64;
let (tx, rx) = smol::channel::bounded(MAGIC_NUMBER);
let open_buffers = self.find_open_search_candidates(query, cx);
let skip_entries: HashSet<_> = open_buffers
.iter()
.filter_map(|buffer| buffer.read(cx).entry_id(cx))
.collect();
let limit = limit.saturating_sub(open_buffers.len());
const MAX_CONCURRENT_BUFFER_OPENS: usize = 64;
let mut project_paths_rx = self
.worktree_store
.update(cx, |worktree_store, cx| {
worktree_store.find_search_candidates(query.clone(), limit, open_buffers, fs, cx)
worktree_store.find_search_candidates(query.clone(), limit, skip_entries, fs, cx)
})
.chunks(MAX_CONCURRENT_BUFFER_OPENS);
.chunks(MAGIC_NUMBER);
cx.spawn(|this, mut cx| async move {
for buffer in unnamed_buffers {
tx.send(buffer).await.ok();
for open_buffer in open_buffers {
tx.send(open_buffer).await.ok();
}
while let Some(project_paths) = project_paths_rx.next().await {
@@ -840,6 +821,7 @@ impl BufferStore {
for buffer_task in buffers {
if let Some(buffer) = buffer_task.await.log_err() {
if tx.send(buffer).await.is_err() {
println!("other end dropped, returning");
return anyhow::Ok(());
}
}
@@ -851,6 +833,49 @@ impl BufferStore {
rx
}
/// Returns open buffers filtered by filename
/// Does *not* check the buffer content, the caller must do that
fn find_open_search_candidates(
&self,
query: &SearchQuery,
cx: &ModelContext<Self>,
) -> Vec<Model<Buffer>> {
let worktree_count = self.worktree_store.read(cx).visible_worktrees(cx).count();
let include_root = worktree_count > 1;
self.buffers()
.filter_map(|buffer| {
let handle = buffer.clone();
buffer.read_with(cx, |buffer, cx| {
let worktree_store = self.worktree_store.read(cx);
let entry_id = buffer.entry_id(cx);
let is_ignored = entry_id
.and_then(|entry_id| worktree_store.entry_for_id(entry_id, cx))
.map_or(false, |entry| entry.is_ignored);
if is_ignored && !query.include_ignored() {
return None;
}
if let Some(file) = buffer.file() {
let matched_path = if include_root {
query.file_matches(Some(&file.full_path(cx)))
} else {
query.file_matches(Some(file.path()))
};
if matched_path {
Some(handle)
} else {
None
}
} else {
Some(handle)
}
})
})
.collect()
}
fn on_buffer_event(
&mut self,
buffer: Model<Buffer>,
@@ -1013,6 +1038,55 @@ impl BufferStore {
Some(())
}
pub async fn create_buffer_for_peer(
this: Model<Self>,
peer_id: PeerId,
buffer_id: BufferId,
project_id: u64,
client: AnyProtoClient,
cx: &mut AsyncAppContext,
) -> Result<()> {
let Some(buffer) = this.update(cx, |this, _| this.get(buffer_id))? else {
return Ok(());
};
let operations = buffer.update(cx, |b, cx| b.serialize_ops(None, cx))?;
let operations = operations.await;
let state = buffer.update(cx, |buffer, cx| buffer.to_proto(cx))?;
let initial_state = proto::CreateBufferForPeer {
project_id,
peer_id: Some(peer_id),
variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
};
if client.send(initial_state).log_err().is_some() {
let client = client.clone();
cx.background_executor()
.spawn(async move {
let mut chunks = split_operations(operations).peekable();
while let Some(chunk) = chunks.next() {
let is_last = chunks.peek().is_none();
client.send(proto::CreateBufferForPeer {
project_id,
peer_id: Some(peer_id),
variant: Some(proto::create_buffer_for_peer::Variant::Chunk(
proto::BufferChunk {
buffer_id: buffer_id.into(),
operations: chunk,
is_last,
},
)),
})?;
}
anyhow::Ok(())
})
.await
.log_err();
}
Ok(())
}
pub async fn handle_update_buffer(
this: Model<Self>,
envelope: TypedEnvelope<proto::UpdateBuffer>,
@@ -1042,90 +1116,6 @@ impl BufferStore {
})?
}
pub fn handle_synchronize_buffers(
&mut self,
envelope: TypedEnvelope<proto::SynchronizeBuffers>,
cx: &mut ModelContext<Self>,
client: Arc<Client>,
) -> Result<proto::SynchronizeBuffersResponse> {
let project_id = envelope.payload.project_id;
let mut response = proto::SynchronizeBuffersResponse {
buffers: Default::default(),
};
let Some(guest_id) = envelope.original_sender_id else {
anyhow::bail!("missing original_sender_id on SynchronizeBuffers request");
};
self.shared_buffers.entry(guest_id).or_default().clear();
for buffer in envelope.payload.buffers {
let buffer_id = BufferId::new(buffer.id)?;
let remote_version = language::proto::deserialize_version(&buffer.version);
if let Some(buffer) = self.get(buffer_id) {
self.shared_buffers
.entry(guest_id)
.or_default()
.insert(buffer_id);
let buffer = buffer.read(cx);
response.buffers.push(proto::BufferVersion {
id: buffer_id.into(),
version: language::proto::serialize_version(&buffer.version),
});
let operations = buffer.serialize_ops(Some(remote_version), cx);
let client = client.clone();
if let Some(file) = buffer.file() {
client
.send(proto::UpdateBufferFile {
project_id,
buffer_id: buffer_id.into(),
file: Some(file.to_proto(cx)),
})
.log_err();
}
client
.send(proto::UpdateDiffBase {
project_id,
buffer_id: buffer_id.into(),
diff_base: buffer.diff_base().map(ToString::to_string),
})
.log_err();
client
.send(proto::BufferReloaded {
project_id,
buffer_id: buffer_id.into(),
version: language::proto::serialize_version(buffer.saved_version()),
mtime: buffer.saved_mtime().map(|time| time.into()),
line_ending: language::proto::serialize_line_ending(buffer.line_ending())
as i32,
})
.log_err();
cx.background_executor()
.spawn(
async move {
let operations = operations.await;
for chunk in split_operations(operations) {
client
.request(proto::UpdateBuffer {
project_id,
buffer_id: buffer_id.into(),
operations: chunk,
})
.await?;
}
anyhow::Ok(())
}
.log_err(),
)
.detach();
}
}
Ok(response)
}
pub fn handle_create_buffer_for_peer(
&mut self,
envelope: TypedEnvelope<proto::CreateBufferForPeer>,
@@ -1305,30 +1295,6 @@ impl BufferStore {
})
}
pub async fn handle_close_buffer(
this: Model<Self>,
envelope: TypedEnvelope<proto::CloseBuffer>,
mut cx: AsyncAppContext,
) -> Result<()> {
let peer_id = envelope.sender_id;
let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
this.update(&mut cx, |this, _| {
if let Some(shared) = this.shared_buffers.get_mut(&peer_id) {
if shared.remove(&buffer_id) {
if shared.is_empty() {
this.shared_buffers.remove(&peer_id);
}
return;
}
};
debug_panic!(
"peer_id {} closed buffer_id {} which was either not open or already closed",
peer_id,
buffer_id
)
})
}
pub async fn handle_buffer_saved(
this: Model<Self>,
envelope: TypedEnvelope<proto::BufferSaved>,
@@ -1401,85 +1367,6 @@ impl BufferStore {
receiver.next().await;
}
}
pub fn create_buffer_for_peer(
&mut self,
buffer: &Model<Buffer>,
peer_id: proto::PeerId,
project_id: u64,
client: AnyProtoClient,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
let buffer_id = buffer.read(cx).remote_id();
if !self
.shared_buffers
.entry(peer_id)
.or_default()
.insert(buffer_id)
{
return Task::ready(Ok(()));
}
cx.spawn(|this, mut cx| async move {
let Some(buffer) = this.update(&mut cx, |this, _| this.get(buffer_id))? else {
return anyhow::Ok(());
};
let operations = buffer.update(&mut cx, |b, cx| b.serialize_ops(None, cx))?;
let operations = operations.await;
let state = buffer.update(&mut cx, |buffer, cx| buffer.to_proto(cx))?;
let initial_state = proto::CreateBufferForPeer {
project_id,
peer_id: Some(peer_id),
variant: Some(proto::create_buffer_for_peer::Variant::State(state)),
};
if client.send(initial_state).log_err().is_some() {
let client = client.clone();
cx.background_executor()
.spawn(async move {
let mut chunks = split_operations(operations).peekable();
while let Some(chunk) = chunks.next() {
let is_last = chunks.peek().is_none();
client.send(proto::CreateBufferForPeer {
project_id,
peer_id: Some(peer_id),
variant: Some(proto::create_buffer_for_peer::Variant::Chunk(
proto::BufferChunk {
buffer_id: buffer_id.into(),
operations: chunk,
is_last,
},
)),
})?;
}
anyhow::Ok(())
})
.await
.log_err();
}
Ok(())
})
}
pub fn forget_shared_buffers(&mut self) {
self.shared_buffers.clear();
}
pub fn forget_shared_buffers_for(&mut self, peer_id: &proto::PeerId) {
self.shared_buffers.remove(peer_id);
}
pub fn update_peer_id(&mut self, old_peer_id: &proto::PeerId, new_peer_id: proto::PeerId) {
if let Some(buffers) = self.shared_buffers.remove(old_peer_id) {
self.shared_buffers.insert(new_peer_id, buffers);
}
}
pub fn shared_buffers(&self) -> &HashMap<proto::PeerId, HashSet<BufferId>> {
&self.shared_buffers
}
}
impl OpenBuffer {

View File

@@ -1,269 +0,0 @@
use anyhow::{anyhow, Context as _, Result};
use futures::{future::Shared, FutureExt};
use std::{
path::{Path, PathBuf},
sync::Arc,
};
use util::{parse_env_output, ResultExt};
use collections::HashMap;
use gpui::{AppContext, Context, Model, ModelContext, Task};
use settings::Settings as _;
use worktree::WorktreeId;
use crate::project_settings::{DirenvSettings, ProjectSettings};
pub(crate) struct ProjectEnvironment {
cli_environment: Option<HashMap<String, String>>,
get_environment_task: Option<Shared<Task<Option<HashMap<String, String>>>>>,
cached_shell_environments: HashMap<WorktreeId, HashMap<String, String>>,
}
impl ProjectEnvironment {
pub(crate) fn new(
cli_environment: Option<HashMap<String, String>>,
cx: &mut AppContext,
) -> Model<Self> {
cx.new_model(|_| Self {
cli_environment,
get_environment_task: None,
cached_shell_environments: Default::default(),
})
}
#[cfg(any(test, feature = "test-support"))]
pub(crate) fn test(
shell_environments: &[(WorktreeId, HashMap<String, String>)],
cx: &mut AppContext,
) -> Model<Self> {
cx.new_model(|_| Self {
cli_environment: None,
get_environment_task: None,
cached_shell_environments: shell_environments
.iter()
.cloned()
.collect::<HashMap<_, _>>(),
})
}
pub(crate) fn remove_worktree_environment(&mut self, worktree_id: WorktreeId) {
self.cached_shell_environments.remove(&worktree_id);
}
/// Returns the inherited CLI environment, if this project was opened from the Zed CLI.
pub(crate) fn get_cli_environment(&self) -> Option<HashMap<String, String>> {
if let Some(mut env) = self.cli_environment.clone() {
set_origin_marker(&mut env, EnvironmentOrigin::Cli);
Some(env)
} else {
None
}
}
/// Returns the project environment, if possible.
/// If the project was opened from the CLI, then the inherited CLI environment is returned.
/// If it wasn't opened from the CLI, and a worktree is given, then a shell is spawned in
/// the worktree's path, to get environment variables as if the user has `cd`'d into
/// the worktrees path.
pub(crate) fn get_environment(
&mut self,
worktree_id: Option<WorktreeId>,
worktree_abs_path: Option<Arc<Path>>,
cx: &ModelContext<Self>,
) -> Shared<Task<Option<HashMap<String, String>>>> {
if let Some(task) = self.get_environment_task.as_ref() {
task.clone()
} else {
let task = self
.build_environment_task(worktree_id, worktree_abs_path, cx)
.shared();
self.get_environment_task = Some(task.clone());
task
}
}
fn build_environment_task(
&mut self,
worktree_id: Option<WorktreeId>,
worktree_abs_path: Option<Arc<Path>>,
cx: &ModelContext<Self>,
) -> Task<Option<HashMap<String, String>>> {
let worktree = worktree_id.zip(worktree_abs_path);
let cli_environment = self.get_cli_environment();
if cli_environment.is_some() {
Task::ready(cli_environment)
} else if let Some((worktree_id, worktree_abs_path)) = worktree {
self.get_worktree_env(worktree_id, worktree_abs_path, cx)
} else {
Task::ready(None)
}
}
fn get_worktree_env(
&mut self,
worktree_id: WorktreeId,
worktree_abs_path: Arc<Path>,
cx: &ModelContext<Self>,
) -> Task<Option<HashMap<String, String>>> {
let cached_env = self.cached_shell_environments.get(&worktree_id).cloned();
if let Some(env) = cached_env {
Task::ready(Some(env))
} else {
let load_direnv = ProjectSettings::get_global(cx).load_direnv.clone();
cx.spawn(|this, mut cx| async move {
let mut shell_env = cx
.background_executor()
.spawn({
let cwd = worktree_abs_path.clone();
async move { load_shell_environment(&cwd, &load_direnv).await }
})
.await
.ok();
if let Some(shell_env) = shell_env.as_mut() {
this.update(&mut cx, |this, _| {
this.cached_shell_environments
.insert(worktree_id, shell_env.clone())
})
.log_err();
set_origin_marker(shell_env, EnvironmentOrigin::WorktreeShell);
}
shell_env
})
}
}
}
fn set_origin_marker(env: &mut HashMap<String, String>, origin: EnvironmentOrigin) {
env.insert(ZED_ENVIRONMENT_ORIGIN_MARKER.to_string(), origin.into());
}
const ZED_ENVIRONMENT_ORIGIN_MARKER: &str = "ZED_ENVIRONMENT";
enum EnvironmentOrigin {
Cli,
WorktreeShell,
}
impl Into<String> for EnvironmentOrigin {
fn into(self) -> String {
match self {
EnvironmentOrigin::Cli => "cli".into(),
EnvironmentOrigin::WorktreeShell => "worktree-shell".into(),
}
}
}
async fn load_shell_environment(
dir: &Path,
load_direnv: &DirenvSettings,
) -> Result<HashMap<String, String>> {
let direnv_environment = match load_direnv {
DirenvSettings::ShellHook => None,
DirenvSettings::Direct => load_direnv_environment(dir).await?,
}
.unwrap_or(HashMap::default());
let marker = "ZED_SHELL_START";
let shell = std::env::var("SHELL").context(
"SHELL environment variable is not assigned so we can't source login environment variables",
)?;
// What we're doing here is to spawn a shell and then `cd` into
// the project directory to get the env in there as if the user
// `cd`'d into it. We do that because tools like direnv, asdf, ...
// hook into `cd` and only set up the env after that.
//
// If the user selects `Direct` for direnv, it would set an environment
// variable that later uses to know that it should not run the hook.
// We would include in `.envs` call so it is okay to run the hook
// even if direnv direct mode is enabled.
//
// In certain shells we need to execute additional_command in order to
// trigger the behavior of direnv, etc.
//
//
// The `exit 0` is the result of hours of debugging, trying to find out
// why running this command here, without `exit 0`, would mess
// up signal process for our process so that `ctrl-c` doesn't work
// anymore.
//
// We still don't know why `$SHELL -l -i -c '/usr/bin/env -0'` would
// do that, but it does, and `exit 0` helps.
let additional_command = PathBuf::from(&shell)
.file_name()
.and_then(|f| f.to_str())
.and_then(|shell| match shell {
"fish" => Some("emit fish_prompt;"),
_ => None,
});
let command = format!(
"cd '{}';{} printf '%s' {marker}; /usr/bin/env; exit 0;",
dir.display(),
additional_command.unwrap_or("")
);
let output = smol::process::Command::new(&shell)
.args(["-i", "-c", &command])
.envs(direnv_environment)
.output()
.await
.context("failed to spawn login shell to source login environment variables")?;
anyhow::ensure!(
output.status.success(),
"login shell exited with error {:?}",
output.status
);
let stdout = String::from_utf8_lossy(&output.stdout);
let env_output_start = stdout.find(marker).ok_or_else(|| {
anyhow!(
"failed to parse output of `env` command in login shell: {}",
stdout
)
})?;
let mut parsed_env = HashMap::default();
let env_output = &stdout[env_output_start + marker.len()..];
parse_env_output(env_output, |key, value| {
parsed_env.insert(key, value);
});
Ok(parsed_env)
}
async fn load_direnv_environment(dir: &Path) -> Result<Option<HashMap<String, String>>> {
let Ok(direnv_path) = which::which("direnv") else {
return Ok(None);
};
let direnv_output = smol::process::Command::new(direnv_path)
.args(["export", "json"])
.current_dir(dir)
.output()
.await
.context("failed to spawn direnv to get local environment variables")?;
anyhow::ensure!(
direnv_output.status.success(),
"direnv exited with error {:?}",
direnv_output.status
);
let output = String::from_utf8_lossy(&direnv_output.stdout);
if output.is_empty() {
return Ok(None);
}
Ok(Some(
serde_json::from_str(&output).context("failed to parse direnv output")?,
))
}

File diff suppressed because it is too large Load Diff

View File

@@ -3941,8 +3941,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
true,
false,
Default::default(),
Default::default(),
None
Default::default()
)
.unwrap(),
cx
@@ -3975,8 +3974,7 @@ async fn test_search(cx: &mut gpui::TestAppContext) {
true,
false,
Default::default(),
Default::default(),
None,
Default::default()
)
.unwrap(),
cx
@@ -4019,8 +4017,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
true,
false,
PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
Default::default(),
None
Default::default()
)
.unwrap(),
cx
@@ -4040,8 +4037,7 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
true,
false,
PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
Default::default(),
None
Default::default()
)
.unwrap(),
cx
@@ -4067,7 +4063,6 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
Default::default(),
None,
).unwrap(),
cx
)
@@ -4092,7 +4087,6 @@ async fn test_search_with_inclusions(cx: &mut gpui::TestAppContext) {
PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
Default::default(),
None,
).unwrap(),
cx
)
@@ -4137,7 +4131,6 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
false,
Default::default(),
PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
None,
)
.unwrap(),
cx
@@ -4162,8 +4155,7 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
true,
false,
Default::default(),
PathMatcher::new(&["*.rs".to_owned()]).unwrap(),
None,
PathMatcher::new(&["*.rs".to_owned()]).unwrap()
)
.unwrap(),
cx
@@ -4188,7 +4180,6 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
Default::default(),
PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
None,
).unwrap(),
cx
@@ -4213,7 +4204,6 @@ async fn test_search_with_exclusions(cx: &mut gpui::TestAppContext) {
Default::default(),
PathMatcher::new(&["*.rs".to_owned(), "*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
None,
).unwrap(),
cx
@@ -4253,7 +4243,6 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
false,
PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
PathMatcher::new(&["*.odd".to_owned()]).unwrap(),
None,
)
.unwrap(),
cx
@@ -4274,7 +4263,6 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
false,
PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
None,
).unwrap(),
cx
)
@@ -4294,7 +4282,6 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
false,
PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
None,
)
.unwrap(),
cx
@@ -4315,7 +4302,6 @@ async fn test_search_with_exclusions_and_inclusions(cx: &mut gpui::TestAppContex
false,
PathMatcher::new(&["*.ts".to_owned(), "*.odd".to_owned()]).unwrap(),
PathMatcher::new(&["*.rs".to_owned(), "*.odd".to_owned()]).unwrap(),
None,
)
.unwrap(),
cx
@@ -4368,8 +4354,7 @@ async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppCo
true,
false,
PathMatcher::new(&["worktree-a/*.rs".to_owned()]).unwrap(),
Default::default(),
None,
Default::default()
)
.unwrap(),
cx
@@ -4388,8 +4373,7 @@ async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppCo
true,
false,
PathMatcher::new(&["worktree-b/*.rs".to_owned()]).unwrap(),
Default::default(),
None,
Default::default()
)
.unwrap(),
cx
@@ -4409,8 +4393,7 @@ async fn test_search_multiple_worktrees_with_inclusions(cx: &mut gpui::TestAppCo
true,
false,
PathMatcher::new(&["*.ts".to_owned()]).unwrap(),
Default::default(),
None,
Default::default()
)
.unwrap(),
cx
@@ -4464,8 +4447,7 @@ async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
false,
false,
Default::default(),
Default::default(),
None,
Default::default()
)
.unwrap(),
cx
@@ -4486,8 +4468,7 @@ async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
false,
true,
Default::default(),
Default::default(),
None,
Default::default()
)
.unwrap(),
cx
@@ -4514,7 +4495,7 @@ async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
"Unrestricted search with ignored directories should find every file with the query"
);
let files_to_include = PathMatcher::new(&["node_modules/prettier/**".to_owned()]).unwrap();
let files_to_include = PathMatcher::new(&["/dir/node_modules/prettier/**".to_owned()]).unwrap();
let files_to_exclude = PathMatcher::new(&["*.ts".to_owned()]).unwrap();
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
assert_eq!(
@@ -4527,7 +4508,6 @@ async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
true,
files_to_include,
files_to_exclude,
None,
)
.unwrap(),
cx
@@ -4542,61 +4522,6 @@ async fn test_search_in_gitignored_dirs(cx: &mut gpui::TestAppContext) {
);
}
#[gpui::test]
async fn test_search_ordering(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
"/dir",
json!({
".git": {},
".gitignore": "**/target\n/node_modules\n",
"aaa.txt": "key:value",
"bbb": {
"index.txt": "index_key:index_value"
},
"node_modules": {
"10 eleven": "key",
"1 two": "key"
},
}),
)
.await;
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
let mut search = project.update(cx, |project, cx| {
project.search(
SearchQuery::text(
"key",
false,
false,
true,
Default::default(),
Default::default(),
None,
)
.unwrap(),
cx,
)
});
fn file_name(search_result: Option<SearchResult>, cx: &mut gpui::TestAppContext) -> String {
match search_result.unwrap() {
SearchResult::Buffer { buffer, .. } => buffer.read_with(cx, |buffer, _| {
buffer.file().unwrap().path().to_string_lossy().to_string()
}),
_ => panic!("Expected buffer"),
}
}
assert_eq!(file_name(search.next().await, cx), "bbb/index.txt");
assert_eq!(file_name(search.next().await, cx), "node_modules/1 two");
assert_eq!(file_name(search.next().await, cx), "node_modules/10 eleven");
assert_eq!(file_name(search.next().await, cx), "aaa.txt");
assert!(search.next().await.is_none())
}
#[test]
fn test_glob_literal_prefix() {
assert_eq!(glob_literal_prefix("**/*.js"), "");

View File

@@ -30,7 +30,6 @@ pub struct SearchInputs {
query: Arc<str>,
files_to_include: PathMatcher,
files_to_exclude: PathMatcher,
buffers: Option<Vec<Model<Buffer>>>,
}
impl SearchInputs {
@@ -43,9 +42,6 @@ impl SearchInputs {
pub fn files_to_exclude(&self) -> &PathMatcher {
&self.files_to_exclude
}
pub fn buffers(&self) -> &Option<Vec<Model<Buffer>>> {
&self.buffers
}
}
#[derive(Clone, Debug)]
pub enum SearchQuery {
@@ -77,7 +73,6 @@ impl SearchQuery {
include_ignored: bool,
files_to_include: PathMatcher,
files_to_exclude: PathMatcher,
buffers: Option<Vec<Model<Buffer>>>,
) -> Result<Self> {
let query = query.to_string();
let search = AhoCorasickBuilder::new()
@@ -87,7 +82,6 @@ impl SearchQuery {
query: query.into(),
files_to_exclude,
files_to_include,
buffers,
};
Ok(Self::Text {
search: Arc::new(search),
@@ -106,7 +100,6 @@ impl SearchQuery {
include_ignored: bool,
files_to_include: PathMatcher,
files_to_exclude: PathMatcher,
buffers: Option<Vec<Model<Buffer>>>,
) -> Result<Self> {
let mut query = query.to_string();
let initial_query = Arc::from(query.as_str());
@@ -127,7 +120,6 @@ impl SearchQuery {
query: initial_query,
files_to_exclude,
files_to_include,
buffers,
};
Ok(Self::Regex {
regex,
@@ -149,7 +141,6 @@ impl SearchQuery {
message.include_ignored,
deserialize_path_matches(&message.files_to_include)?,
deserialize_path_matches(&message.files_to_exclude)?,
None,
)
} else {
Self::text(
@@ -159,7 +150,6 @@ impl SearchQuery {
message.include_ignored,
deserialize_path_matches(&message.files_to_include)?,
deserialize_path_matches(&message.files_to_exclude)?,
None,
)
}
}
@@ -173,7 +163,6 @@ impl SearchQuery {
message.include_ignored,
deserialize_path_matches(&message.files_to_include)?,
deserialize_path_matches(&message.files_to_exclude)?,
None, // search opened only don't need search remote
)
} else {
Self::text(
@@ -183,7 +172,6 @@ impl SearchQuery {
message.include_ignored,
deserialize_path_matches(&message.files_to_include)?,
deserialize_path_matches(&message.files_to_exclude)?,
None, // search opened only don't need search remote
)
}
}
@@ -432,31 +420,23 @@ impl SearchQuery {
self.as_inner().files_to_exclude()
}
pub fn buffers(&self) -> Option<&Vec<Model<Buffer>>> {
self.as_inner().buffers.as_ref()
}
pub fn is_opened_only(&self) -> bool {
self.as_inner().buffers.is_some()
}
pub fn filters_path(&self) -> bool {
!(self.files_to_exclude().sources().is_empty()
&& self.files_to_include().sources().is_empty())
}
pub fn file_matches(&self, file_path: &Path) -> bool {
let mut path = file_path.to_path_buf();
loop {
if self.files_to_exclude().is_match(&path) {
return false;
} else if self.files_to_include().sources().is_empty()
|| self.files_to_include().is_match(&path)
{
return true;
} else if !path.pop() {
return false;
pub fn file_matches(&self, file_path: Option<&Path>) -> bool {
match file_path {
Some(file_path) => {
let mut path = file_path.to_path_buf();
loop {
if self.files_to_exclude().is_match(&path) {
return false;
} else if self.files_to_include().sources().is_empty()
|| self.files_to_include().is_match(&path)
{
return true;
} else if !path.pop() {
return false;
}
}
}
None => self.files_to_include().sources().is_empty(),
}
}
pub fn as_inner(&self) -> &SearchInputs {

View File

@@ -112,15 +112,7 @@ impl Project {
let (completion_tx, completion_rx) = bounded(1);
// Start with the environment that we might have inherited from the Zed CLI.
let mut env = self
.environment
.read(cx)
.get_cli_environment()
.unwrap_or_default();
// Then extend it with the explicit env variables from the settings, so they take
// precedence.
env.extend(settings.env.clone());
let mut env = settings.env.clone();
let local_path = if ssh_command.is_none() {
path.clone()

View File

@@ -1,35 +1,32 @@
use std::{
path::{Path, PathBuf},
sync::Arc,
cmp,
collections::VecDeque,
path::PathBuf,
sync::{
atomic::{AtomicUsize, Ordering::SeqCst},
Arc,
},
};
use anyhow::{anyhow, Context as _, Result};
use collections::{HashMap, HashSet};
use fs::Fs;
use futures::{future::BoxFuture, SinkExt};
use gpui::{AppContext, AsyncAppContext, EntityId, EventEmitter, Model, ModelContext, WeakModel};
use postage::oneshot;
use rpc::{
proto::{self, AnyProtoClient},
TypedEnvelope,
};
use smol::{
channel::{Receiver, Sender},
future::FutureExt,
lock::Semaphore,
stream::StreamExt,
};
use text::ReplicaId;
use util::{paths::compare_paths, ResultExt};
use worktree::{Entry, ProjectEntryId, Worktree, WorktreeId, WorktreeSettings};
use util::ResultExt;
use worktree::{Entry, ProjectEntryId, Snapshot, Worktree, WorktreeId, WorktreeSettings};
use crate::{search::SearchQuery, ProjectPath};
struct MatchingEntry {
worktree_path: Arc<Path>,
path: ProjectPath,
respond: oneshot::Sender<ProjectPath>,
}
pub struct WorktreeStore {
is_shared: bool,
worktrees: Vec<WorktreeHandle>,
@@ -269,15 +266,17 @@ impl WorktreeStore {
}
}
/// search over all worktrees and return buffers that *might* match the search.
/// search over all worktrees (ignoring open buffers)
/// the query is tested against the file on disk and matching files are returned.
pub fn find_search_candidates(
&self,
query: SearchQuery,
limit: usize,
open_entries: HashSet<ProjectEntryId>,
skip_entries: HashSet<ProjectEntryId>,
fs: Arc<dyn Fs>,
cx: &ModelContext<Self>,
) -> Receiver<ProjectPath> {
let (matching_paths_tx, matching_paths_rx) = smol::channel::unbounded();
let snapshots = self
.visible_worktrees(cx)
.filter_map(|tree| {
@@ -285,248 +284,270 @@ impl WorktreeStore {
Some((tree.snapshot(), tree.as_local()?.settings()))
})
.collect::<Vec<_>>();
let include_root = snapshots.len() > 1;
let path_count: usize = snapshots
.iter()
.map(|(snapshot, _)| {
if query.include_ignored() {
snapshot.file_count()
} else {
snapshot.visible_file_count()
}
})
.sum();
let remaining_paths = AtomicUsize::new(limit);
if path_count == 0 {
return matching_paths_rx;
}
let workers = cx.background_executor().num_cpus().min(path_count);
let paths_per_worker = (path_count + workers - 1) / workers;
let executor = cx.background_executor().clone();
// We want to return entries in the order they are in the worktrees, so we have one
// thread that iterates over the worktrees (and ignored directories) as necessary,
// and pushes a oneshot::Receiver to the output channel and a oneshot::Sender to the filter
// channel.
// We spawn a number of workers that take items from the filter channel and check the query
// against the version of the file on disk.
let (filter_tx, filter_rx) = smol::channel::bounded(64);
let (output_tx, mut output_rx) = smol::channel::bounded(64);
let (matching_paths_tx, matching_paths_rx) = smol::channel::unbounded();
let input = cx.background_executor().spawn({
let fs = fs.clone();
let query = query.clone();
async move {
Self::find_candidate_paths(
fs,
snapshots,
open_entries,
query,
filter_tx,
output_tx,
)
.await
.log_err();
}
});
const MAX_CONCURRENT_FILE_SCANS: usize = 64;
let filters = cx.background_executor().spawn(async move {
let fs = &fs;
let query = &query;
executor
.scoped(move |scope| {
for _ in 0..MAX_CONCURRENT_FILE_SCANS {
let filter_rx = filter_rx.clone();
scope.spawn(async move {
Self::filter_paths(fs, filter_rx, query).await.log_err();
})
}
})
.await;
});
cx.background_executor()
.spawn(async move {
let mut matched = 0;
while let Some(mut receiver) = output_rx.next().await {
let Some(path) = receiver.next().await else {
continue;
};
let Ok(_) = matching_paths_tx.send(path).await else {
break;
};
matched += 1;
if matched == limit {
break;
}
}
drop(input);
drop(filters);
let fs = &fs;
let query = &query;
let matching_paths_tx = &matching_paths_tx;
let snapshots = &snapshots;
let remaining_paths = &remaining_paths;
executor
.scoped(move |scope| {
let max_concurrent_workers = Arc::new(Semaphore::new(workers));
for worker_ix in 0..workers {
let snapshots = snapshots.clone();
let worker_start_ix = worker_ix * paths_per_worker;
let worker_end_ix = worker_start_ix + paths_per_worker;
let skip_entries = skip_entries.clone();
let limiter = Arc::clone(&max_concurrent_workers);
scope.spawn(async move {
let _guard = limiter.acquire().await;
Self::search_snapshots(
&snapshots,
worker_start_ix,
worker_end_ix,
&query,
remaining_paths,
&matching_paths_tx,
&skip_entries,
include_root,
fs,
)
.await;
});
}
if query.include_ignored() {
for (snapshot, settings) in snapshots {
for ignored_entry in
snapshot.entries(true, 0).filter(|e| e.is_ignored)
{
let limiter = Arc::clone(&max_concurrent_workers);
scope.spawn(async move {
let _guard = limiter.acquire().await;
if remaining_paths.load(SeqCst) == 0 {
return;
}
Self::search_ignored_entry(
&snapshot,
&settings,
ignored_entry,
&fs,
&query,
remaining_paths,
&matching_paths_tx,
)
.await;
});
}
}
}
})
.await
})
.detach();
return matching_paths_rx;
}
fn scan_ignored_dir<'a>(
fs: &'a Arc<dyn Fs>,
snapshot: &'a worktree::Snapshot,
path: &'a Path,
query: &'a SearchQuery,
#[allow(clippy::too_many_arguments)]
async fn search_snapshots(
snapshots: &Vec<(worktree::Snapshot, WorktreeSettings)>,
worker_start_ix: usize,
worker_end_ix: usize,
query: &SearchQuery,
remaining_paths: &AtomicUsize,
results_tx: &Sender<ProjectPath>,
skip_entries: &HashSet<ProjectEntryId>,
include_root: bool,
filter_tx: &'a Sender<MatchingEntry>,
output_tx: &'a Sender<oneshot::Receiver<ProjectPath>>,
) -> BoxFuture<'a, Result<()>> {
async move {
let abs_path = snapshot.abs_path().join(&path);
let Some(mut files) = fs
.read_dir(&abs_path)
.await
.with_context(|| format!("listing ignored path {abs_path:?}"))
.log_err()
else {
return Ok(());
};
fs: &Arc<dyn Fs>,
) {
let mut snapshot_start_ix = 0;
let mut abs_path = PathBuf::new();
let mut results = Vec::new();
while let Some(Ok(file)) = files.next().await {
let Some(metadata) = fs
.metadata(&file)
.await
.with_context(|| format!("fetching fs metadata for {abs_path:?}"))
.log_err()
.flatten()
else {
continue;
};
if metadata.is_symlink || metadata.is_fifo {
continue;
}
results.push((
file.strip_prefix(snapshot.abs_path())?.to_path_buf(),
!metadata.is_dir,
))
}
results.sort_by(|(a_path, a_is_file), (b_path, b_is_file)| {
compare_paths((a_path, *a_is_file), (b_path, *b_is_file))
});
for (path, is_file) in results {
if is_file {
if query.filters_path() {
let matched_path = if include_root {
let mut full_path = PathBuf::from(snapshot.root_name());
full_path.push(&path);
query.file_matches(&full_path)
} else {
query.file_matches(&path)
};
if !matched_path {
continue;
}
}
let (tx, rx) = oneshot::channel();
output_tx.send(rx).await?;
filter_tx
.send(MatchingEntry {
respond: tx,
worktree_path: snapshot.abs_path().clone(),
path: ProjectPath {
worktree_id: snapshot.id(),
path: Arc::from(path),
},
})
.await?;
for (snapshot, _) in snapshots {
let snapshot_end_ix = snapshot_start_ix
+ if query.include_ignored() {
snapshot.file_count()
} else {
Self::scan_ignored_dir(
fs,
snapshot,
&path,
query,
include_root,
filter_tx,
output_tx,
)
.await?;
}
}
Ok(())
}
.boxed()
}
snapshot.visible_file_count()
};
if worker_end_ix <= snapshot_start_ix {
break;
} else if worker_start_ix > snapshot_end_ix {
snapshot_start_ix = snapshot_end_ix;
continue;
} else {
let start_in_snapshot = worker_start_ix.saturating_sub(snapshot_start_ix);
let end_in_snapshot = cmp::min(worker_end_ix, snapshot_end_ix) - snapshot_start_ix;
async fn find_candidate_paths(
fs: Arc<dyn Fs>,
snapshots: Vec<(worktree::Snapshot, WorktreeSettings)>,
open_entries: HashSet<ProjectEntryId>,
query: SearchQuery,
filter_tx: Sender<MatchingEntry>,
output_tx: Sender<oneshot::Receiver<ProjectPath>>,
) -> Result<()> {
let include_root = snapshots.len() > 1;
for (snapshot, settings) in snapshots {
let mut entries: Vec<_> = snapshot.entries(query.include_ignored(), 0).collect();
entries.sort_by(|a, b| compare_paths((&a.path, a.is_file()), (&b.path, b.is_file())));
for entry in entries {
if entry.is_dir() && entry.is_ignored {
if !settings.is_path_excluded(&entry.path) {
Self::scan_ignored_dir(
&fs,
&snapshot,
&entry.path,
&query,
include_root,
&filter_tx,
&output_tx,
)
.await?;
for entry in snapshot
.files(false, start_in_snapshot)
.take(end_in_snapshot - start_in_snapshot)
{
if results_tx.is_closed() {
break;
}
if skip_entries.contains(&entry.id) {
continue;
}
if entry.is_fifo {
continue;
}
continue;
}
if entry.is_fifo || !entry.is_file() {
continue;
}
if query.filters_path() {
let matched_path = if include_root {
let mut full_path = PathBuf::from(snapshot.root_name());
full_path.push(&entry.path);
query.file_matches(&full_path)
query.file_matches(Some(&full_path))
} else {
query.file_matches(&entry.path)
query.file_matches(Some(&entry.path))
};
if !matched_path {
continue;
let matches = if matched_path {
abs_path.clear();
abs_path.push(&snapshot.abs_path());
abs_path.push(&entry.path);
if let Some(file) = fs.open_sync(&abs_path).await.log_err() {
query.detect(file).unwrap_or(false)
} else {
false
}
} else {
false
};
if matches {
if remaining_paths
.fetch_update(SeqCst, SeqCst, |value| {
if value > 0 {
Some(value - 1)
} else {
None
}
})
.is_err()
{
return;
}
let project_path = ProjectPath {
worktree_id: snapshot.id(),
path: entry.path.clone(),
};
if results_tx.send(project_path).await.is_err() {
return;
}
}
}
let (mut tx, rx) = oneshot::channel();
if open_entries.contains(&entry.id) {
tx.send(ProjectPath {
worktree_id: snapshot.id(),
path: entry.path.clone(),
})
.await?;
} else {
filter_tx
.send(MatchingEntry {
respond: tx,
worktree_path: snapshot.abs_path().clone(),
path: ProjectPath {
worktree_id: snapshot.id(),
path: entry.path.clone(),
},
})
.await?;
}
output_tx.send(rx).await?;
snapshot_start_ix = snapshot_end_ix;
}
}
Ok(())
}
async fn filter_paths(
async fn search_ignored_entry(
snapshot: &Snapshot,
settings: &WorktreeSettings,
ignored_entry: &Entry,
fs: &Arc<dyn Fs>,
mut input: Receiver<MatchingEntry>,
query: &SearchQuery,
) -> Result<()> {
while let Some(mut entry) = input.next().await {
let abs_path = entry.worktree_path.join(&entry.path.path);
let Some(file) = fs.open_sync(&abs_path).await.log_err() else {
continue;
};
if query.detect(file).unwrap_or(false) {
entry.respond.send(entry.path).await?
remaining_paths: &AtomicUsize,
counter_tx: &Sender<ProjectPath>,
) {
let mut ignored_paths_to_process =
VecDeque::from([snapshot.abs_path().join(&ignored_entry.path)]);
while let Some(ignored_abs_path) = ignored_paths_to_process.pop_front() {
let metadata = fs
.metadata(&ignored_abs_path)
.await
.with_context(|| format!("fetching fs metadata for {ignored_abs_path:?}"))
.log_err()
.flatten();
if let Some(fs_metadata) = metadata {
if fs_metadata.is_dir {
let files = fs
.read_dir(&ignored_abs_path)
.await
.with_context(|| format!("listing ignored path {ignored_abs_path:?}"))
.log_err();
if let Some(mut subfiles) = files {
while let Some(subfile) = subfiles.next().await {
if let Some(subfile) = subfile.log_err() {
ignored_paths_to_process.push_back(subfile);
}
}
}
} else if !fs_metadata.is_symlink {
if !query.file_matches(Some(&ignored_abs_path))
|| settings.is_path_excluded(&ignored_entry.path)
{
continue;
}
let matches = if let Some(file) = fs
.open_sync(&ignored_abs_path)
.await
.with_context(|| format!("Opening ignored path {ignored_abs_path:?}"))
.log_err()
{
query.detect(file).unwrap_or(false)
} else {
false
};
if matches {
if remaining_paths
.fetch_update(SeqCst, SeqCst, |value| {
if value > 0 {
Some(value - 1)
} else {
None
}
})
.is_err()
{
return;
}
let project_path = ProjectPath {
worktree_id: snapshot.id(),
path: Arc::from(
ignored_abs_path
.strip_prefix(snapshot.abs_path())
.expect("scanning worktree-related files"),
),
};
if counter_tx.send(project_path).await.is_err() {
return;
}
}
}
}
}
Ok(())
}
pub async fn handle_create_project_entry(

View File

@@ -18,7 +18,7 @@ collections.workspace = true
db.workspace = true
editor.workspace = true
file_icons.workspace = true
indexmap.workspace = true
futures.workspace = true
git.workspace = true
gpui.workspace = true
menu.workspace = true

View File

@@ -23,12 +23,8 @@ use gpui::{
PromptLevel, Render, Stateful, Styled, Subscription, Task, UniformListScrollHandle, View,
ViewContext, VisualContext as _, WeakView, WindowContext,
};
use indexmap::IndexMap;
use menu::{Confirm, SelectFirst, SelectLast, SelectNext, SelectPrev};
use project::{
relativize_path, Entry, EntryKind, Fs, Project, ProjectEntryId, ProjectPath, Worktree,
WorktreeId,
};
use project::{Entry, EntryKind, Fs, Project, ProjectEntryId, ProjectPath, Worktree, WorktreeId};
use project_panel_settings::{ProjectPanelDockPosition, ProjectPanelSettings, ShowScrollbar};
use serde::{Deserialize, Serialize};
use std::{
@@ -499,8 +495,23 @@ impl ProjectPanel {
.action("Copy", Box::new(Copy))
.action("Duplicate", Box::new(Duplicate))
// TODO: Paste should always be visible, cbut disabled when clipboard is empty
.when(self.clipboard.as_ref().is_some(), |menu| {
menu.action("Paste", Box::new(Paste))
.when_some(self.clipboard.as_ref(), |menu, entry| {
let entries_for_worktree_id = (SelectedEntry {
worktree_id,
entry_id: ProjectEntryId::MIN,
})
..(SelectedEntry {
worktree_id,
entry_id: ProjectEntryId::MAX,
});
menu.when(
entry
.items()
.range(entries_for_worktree_id)
.next()
.is_some(),
|menu| menu.action("Paste", Box::new(Paste)),
)
})
.separator()
.action("Copy Path", Box::new(CopyPath))
@@ -1293,99 +1304,46 @@ impl ProjectPanel {
.as_ref()
.filter(|clipboard| !clipboard.items().is_empty())?;
enum PasteTask {
Rename(Task<Result<CreatedEntry>>),
Copy(Task<Result<Option<Entry>>>),
}
let mut paste_entry_tasks: IndexMap<(ProjectEntryId, bool), PasteTask> =
IndexMap::default();
let clip_is_cut = clipboard_entries.is_cut();
let mut tasks = Vec::new();
for clipboard_entry in clipboard_entries.items() {
if clipboard_entry.worktree_id != worktree_id {
return None;
}
let new_path =
self.create_paste_path(clipboard_entry, self.selected_entry_handle(cx)?, cx)?;
let clip_entry_id = clipboard_entry.entry_id;
let is_same_worktree = clipboard_entry.worktree_id == worktree_id;
let relative_worktree_source_path = if !is_same_worktree {
let target_base_path = worktree.read(cx).abs_path();
let clipboard_project_path =
self.project.read(cx).path_for_entry(clip_entry_id, cx)?;
let clipboard_abs_path = self
.project
.read(cx)
.absolute_path(&clipboard_project_path, cx)?;
Some(relativize_path(
&target_base_path,
clipboard_abs_path.as_path(),
))
if clipboard_entries.is_cut() {
self.project
.update(cx, |project, cx| {
project.rename_entry(clipboard_entry.entry_id, new_path, cx)
})
.detach_and_log_err(cx);
} else {
None
};
let task = if clip_is_cut && is_same_worktree {
let task = self.project.update(cx, |project, cx| {
project.rename_entry(clip_entry_id, new_path, cx)
project.copy_entry(clipboard_entry.entry_id, new_path, cx)
});
PasteTask::Rename(task)
} else {
let entry_id = if is_same_worktree {
clip_entry_id
} else {
entry.id
};
let task = self.project.update(cx, |project, cx| {
project.copy_entry(entry_id, relative_worktree_source_path, new_path, cx)
});
PasteTask::Copy(task)
};
let needs_delete = !is_same_worktree && clip_is_cut;
paste_entry_tasks.insert((clip_entry_id, needs_delete), task);
tasks.push(task);
}
}
cx.spawn(|project_panel, mut cx| async move {
let mut last_succeed = None;
let mut need_delete_ids = Vec::new();
for ((entry_id, need_delete), task) in paste_entry_tasks.into_iter() {
match task {
PasteTask::Rename(task) => {
if let Some(CreatedEntry::Included(entry)) = task.await.log_err() {
last_succeed = Some(entry.id);
}
}
PasteTask::Copy(task) => {
if let Some(Some(entry)) = task.await.log_err() {
last_succeed = Some(entry.id);
if need_delete {
need_delete_ids.push(entry_id);
}
}
}
}
}
// update selection
if let Some(entry_id) = last_succeed {
let entry_ids = futures::future::join_all(tasks).await;
if let Some(Some(entry)) = entry_ids
.into_iter()
.rev()
.find_map(|entry_id| entry_id.ok())
{
project_panel
.update(&mut cx, |project_panel, _cx| {
project_panel.selection = Some(SelectedEntry {
worktree_id,
entry_id,
entry_id: entry.id,
});
})
.ok();
}
// remove entry for cut in difference worktree
for entry_id in need_delete_ids {
project_panel
.update(&mut cx, |project_panel, cx| {
project_panel
.project
.update(cx, |project, cx| project.delete_entry(entry_id, true, cx))
.ok_or_else(|| anyhow!("no such entry"))
})??
.await?;
}
anyhow::Ok(())
})
.detach_and_log_err(cx);
.detach();
self.expand_entry(worktree_id, entry.id, cx);
Some(())
@@ -1884,7 +1842,7 @@ impl ProjectPanel {
)?;
self.project
.update(cx, |project, cx| {
project.copy_entry(selection.entry_id, None, new_path, cx)
project.copy_entry(selection.entry_id, new_path, cx)
})
.detach_and_log_err(cx)
}
@@ -3717,236 +3675,6 @@ mod tests {
);
}
#[gpui::test]
async fn test_cut_paste_between_different_worktrees(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor().clone());
fs.insert_tree(
"/root1",
json!({
"one.txt": "",
"two.txt": "",
"three.txt": "",
"a": {
"0": { "q": "", "r": "", "s": "" },
"1": { "t": "", "u": "" },
"2": { "v": "", "w": "", "x": "", "y": "" },
},
}),
)
.await;
fs.insert_tree(
"/root2",
json!({
"one.txt": "",
"two.txt": "",
"four.txt": "",
"b": {
"3": { "Q": "" },
"4": { "R": "", "S": "", "T": "", "U": "" },
},
}),
)
.await;
let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await;
let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let panel = workspace
.update(cx, |workspace, cx| ProjectPanel::new(workspace, cx))
.unwrap();
select_path(&panel, "root1/three.txt", cx);
panel.update(cx, |panel, cx| {
panel.cut(&Default::default(), cx);
});
select_path(&panel, "root2/one.txt", cx);
panel.update(cx, |panel, cx| {
panel.select_next(&Default::default(), cx);
panel.paste(&Default::default(), cx);
});
cx.executor().run_until_parked();
assert_eq!(
visible_entries_as_strings(&panel, 0..50, cx),
&[
//
"v root1",
" > a",
" one.txt",
" two.txt",
"v root2",
" > b",
" four.txt",
" one.txt",
" three.txt <== selected",
" two.txt",
]
);
select_path(&panel, "root1/a", cx);
panel.update(cx, |panel, cx| {
panel.cut(&Default::default(), cx);
});
select_path(&panel, "root2/two.txt", cx);
panel.update(cx, |panel, cx| {
panel.select_next(&Default::default(), cx);
panel.paste(&Default::default(), cx);
});
cx.executor().run_until_parked();
assert_eq!(
visible_entries_as_strings(&panel, 0..50, cx),
&[
//
"v root1",
" one.txt",
" two.txt",
"v root2",
" > a <== selected",
" > b",
" four.txt",
" one.txt",
" three.txt",
" two.txt",
]
);
}
#[gpui::test]
async fn test_copy_paste_between_different_worktrees(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor().clone());
fs.insert_tree(
"/root1",
json!({
"one.txt": "",
"two.txt": "",
"three.txt": "",
"a": {
"0": { "q": "", "r": "", "s": "" },
"1": { "t": "", "u": "" },
"2": { "v": "", "w": "", "x": "", "y": "" },
},
}),
)
.await;
fs.insert_tree(
"/root2",
json!({
"one.txt": "",
"two.txt": "",
"four.txt": "",
"b": {
"3": { "Q": "" },
"4": { "R": "", "S": "", "T": "", "U": "" },
},
}),
)
.await;
let project = Project::test(fs.clone(), ["/root1".as_ref(), "/root2".as_ref()], cx).await;
let workspace = cx.add_window(|cx| Workspace::test_new(project.clone(), cx));
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let panel = workspace
.update(cx, |workspace, cx| ProjectPanel::new(workspace, cx))
.unwrap();
select_path(&panel, "root1/three.txt", cx);
panel.update(cx, |panel, cx| {
panel.copy(&Default::default(), cx);
});
select_path(&panel, "root2/one.txt", cx);
panel.update(cx, |panel, cx| {
panel.select_next(&Default::default(), cx);
panel.paste(&Default::default(), cx);
});
cx.executor().run_until_parked();
assert_eq!(
visible_entries_as_strings(&panel, 0..50, cx),
&[
//
"v root1",
" > a",
" one.txt",
" three.txt",
" two.txt",
"v root2",
" > b",
" four.txt",
" one.txt",
" three.txt <== selected",
" two.txt",
]
);
select_path(&panel, "root1/three.txt", cx);
panel.update(cx, |panel, cx| {
panel.copy(&Default::default(), cx);
});
select_path(&panel, "root2/two.txt", cx);
panel.update(cx, |panel, cx| {
panel.select_next(&Default::default(), cx);
panel.paste(&Default::default(), cx);
});
cx.executor().run_until_parked();
assert_eq!(
visible_entries_as_strings(&panel, 0..50, cx),
&[
//
"v root1",
" > a",
" one.txt",
" three.txt",
" two.txt",
"v root2",
" > b",
" four.txt",
" one.txt",
" three copy.txt <== selected",
" three.txt",
" two.txt",
]
);
select_path(&panel, "root1/a", cx);
panel.update(cx, |panel, cx| {
panel.copy(&Default::default(), cx);
});
select_path(&panel, "root2/two.txt", cx);
panel.update(cx, |panel, cx| {
panel.select_next(&Default::default(), cx);
panel.paste(&Default::default(), cx);
});
cx.executor().run_until_parked();
assert_eq!(
visible_entries_as_strings(&panel, 0..50, cx),
&[
//
"v root1",
" > a",
" one.txt",
" three.txt",
" two.txt",
"v root2",
" > a <== selected",
" > b",
" four.txt",
" one.txt",
" three copy.txt",
" three.txt",
" two.txt",
]
);
}
#[gpui::test]
async fn test_copy_paste_directory(cx: &mut gpui::TestAppContext) {
init_test(cx);
@@ -4632,9 +4360,9 @@ mod tests {
&[
"v project_root",
" v dir_1",
" v nested_dir",
" v nested_dir <== selected",
" file_1.py <== marked",
" file_a.py <== selected <== marked",
" file_a.py <== marked",
]
);
cx.simulate_modifiers_change(modifiers_with_shift);

View File

@@ -278,9 +278,7 @@ message Envelope {
LspExtSwitchSourceHeaderResponse lsp_ext_switch_source_header_response = 242;
FindSearchCandidates find_search_candidates = 243;
FindSearchCandidatesResponse find_search_candidates_response = 244;
CloseBuffer close_buffer = 245; // current max
FindSearchCandidatesResponse find_search_candidates_response = 244; // current max
}
reserved 158 to 161;
@@ -656,7 +654,6 @@ message CopyProjectEntry {
uint64 project_id = 1;
uint64 entry_id = 2;
string new_path = 3;
optional string relative_worktree_source_path = 4;
}
message DeleteProjectEntry {
@@ -872,11 +869,6 @@ message SaveBuffer {
optional ProjectPath new_path = 4;
}
message CloseBuffer {
uint64 project_id = 1;
uint64 buffer_id = 2;
}
message ProjectPath {
uint64 worktree_id = 1;
string path = 2;

View File

@@ -411,8 +411,7 @@ messages!(
(AddWorktree, Foreground),
(AddWorktreeResponse, Foreground),
(FindSearchCandidates, Background),
(FindSearchCandidatesResponse, Background),
(CloseBuffer, Foreground)
(FindSearchCandidatesResponse, Background)
);
request_messages!(

Some files were not shown because too many files have changed in this diff Show More