Compare commits

...

44 Commits

Author SHA1 Message Date
Conrad Irwin
7e22d05409 Cleanup
* Remove the mutexes and have methods return the detected
  encoding.
* Try to handle the BOM safely...
* Clean up a bunch of code to make it more Zeddy
2025-11-04 00:58:03 -07:00
R Aadarsh
4330e5ff14 - Change the order in which cx and encoding appear
- Add a licence symlink to `encodings`
2025-11-01 14:53:06 +05:30
R Aadarsh
2e18a5b662 Use Buffer::update and Buffer::update_encoding to set the encoding
field of `Buffer`
2025-11-01 11:32:46 +05:30
R Aadarsh
0b942fe8a7 - Use EncodingOptions for parameters
- Implement `From` for `Encoding` and `Clone` for `EncodingOptions`
2025-11-01 11:32:46 +05:30
R Aadarsh
08032bd1ff Move the invalid encoding UI from project to workspace 2025-11-01 11:32:45 +05:30
R Aadarsh
19b06e5089 Fix conflicts 2025-11-01 11:32:45 +05:30
R Aadarsh
0e89634a16 Fix conflicts 2025-11-01 11:32:45 +05:30
R Aadarsh
c130110987 Remove calls to lock and unwrap as they are no longer needed 2025-11-01 11:31:24 +05:30
R Aadarsh
b2187e5f93 - Fix an issue that caused UTF-8 to be used when a file was closed and
re-opened, while retaining the text.

- Fix an issue that prevented `InvalidBufferView` from being shown when
an incorrect encoding was chosen from the status bar.

- Centre the error message in `InvalidBufferView`.
2025-11-01 11:31:24 +05:30
R Aadarsh
8580683f01 - Move the functionality in fs::encodings to a seperate crate
`encodings`

- `EncodingWrapper` is replaced with `encodings::Encoding`
2025-11-01 11:31:24 +05:30
R Aadarsh
1d95a18a11 Create a new crate encodings that will have all that is not related to
UI. The `encodings_ui` crate will only have UI related components in the
future.
2025-11-01 11:31:08 +05:30
R Aadarsh
0e3870450e Fix an issue that caused the buffer to be in a modified state after
choosing the correct encoding from `InvalidBufferView`
2025-11-01 11:31:07 +05:30
R Aadarsh
25c6af48d1 Fix an issue that caused a reopened buffer to use UTF-8 even if the
associated file was in a different encoding, rather than showing an
error.
2025-11-01 11:31:07 +05:30
R Aadarsh
b629b1f9ab Enable a file to be opened with an invalid encoding with the invalid
bytes replaced with replacement characters

- Fix UTF-16 file handling

- Introduce a `ForceOpen` action to allow users to open files despite
encoding errors

- Add `force` and `detect_utf16` flags

- Update UI to provide "Accept the Risk and Open" button for invalid
encoding files
2025-11-01 11:31:07 +05:30
R Aadarsh
0d3095a5d1 Clicking on Choose another encoding and selecting an encoding should
now open the file in the chosen encoding if it is valid or show the
invalid screen again if not.

(UTF-16 files aren't being handled correctly as of now)
2025-11-01 11:31:07 +05:30
R Aadarsh
d515ddd2ec - Add optional encoding parameter to Worktree::load_file
- Remove the parameter from `BufferStore::open_buffer` as it is not
needed
2025-11-01 11:31:07 +05:30
R Aadarsh
183bff580e - Add a field encoding in both Workspace and Project
- Pass encoding to `ProjectRegistry::open_path` and set the `encoding`
field in `Project`
2025-11-01 11:30:33 +05:30
R Aadarsh
44abaed857 - Return an error if the file contains invalid bytes for the specified
encoding instead of replacing the invalid bytes with replacement
 characters

 - Add `encoding` field in `Workspace`
2025-11-01 11:30:33 +05:30
R Aadarsh
37754b0fd5 Add a call to open_abs_path to enable opening of files from
`InvalidBufferView`
2025-11-01 11:30:33 +05:30
R Aadarsh
13ea13b0a4 Pass file path to EncodingSelector via Toggle action, if there is
one.
2025-11-01 11:30:33 +05:30
R Aadarsh
80631446e0 Update tests in copilot.rs to match the new load method signature 2025-11-01 11:30:33 +05:30
R Aadarsh
6561c890da - Make File::load use load_with_encoding
- Update the places where `File::load` is called

WARNING: The changes were committed with an error
2025-11-01 11:30:33 +05:30
R Aadarsh
9dc21f5a12 Re-Add load_with_encoding method to the Fs trait 2025-11-01 11:30:33 +05:30
R Aadarsh
591fe9f4a0 - Remove load_with_encoding from the Fs trait and make appropriate
changes to the file loading logic
- Add a module and an action in
`zed_actions` to open `EncodingSelector` from `InvalidBufferView`
- Add an `init` function in `encodings` crate to register the action handler

Clicking the second button in `InvalidBufferView` will cause the
`EncodingSelector` to be displayed. However, clicking on an encoding
currently does nothing.
2025-11-01 11:30:32 +05:30
R Aadarsh
dd5ddbfc43 - Remove unnecessary calls to collect
- Simplify UTF-16 BOM detection

Co-authored-by: CrazyboyQCD

Release Notes:
- Add support for opening and saving files in different encodings and a setting to enable or disable the indicator
2025-11-01 11:30:32 +05:30
R Aadarsh
fb6ec66887 Fix an error caused by UTF-16 BOM checking when there are no BOMs 2025-11-01 11:30:32 +05:30
R Aadarsh
2a193aeb66 Make the EncodingSaveOrReopenSelector open only when the current buffer is associated with a file 2025-11-01 11:30:32 +05:30
R Aadarsh
680a72b397 - Update encoding detection to automatically switch to UTF-16LE/BE if
BOM is present

- Change `encoding` in `Buffer` to `Arc<Mutex<&\'static Encoding>>`

- Observe changes in the `encoding` field of `Buffer` and update the
status bar indicator
2025-11-01 11:30:32 +05:30
R Aadarsh
27376e051e Write BOM once for UTF-16 files 2025-11-01 11:26:46 +05:30
R Aadarsh
0e2eda0a23 Add UTF-16 LE/BE support and remove unused serde code
- Add UTF-16 LE and UTF-16 BE to encoding lists and mappings

- Implement encode/decode for UTF-16LE/BE in `EncodingWrapper`

- Remove unused serde deserialisation code and tests
2025-11-01 11:26:46 +05:30
R Aadarsh
81376ddf05 Add a licence symlink 2025-11-01 11:26:46 +05:30
R Aadarsh
009c4ea319 Fix issues caused by Cargo.lock 2025-11-01 11:26:46 +05:30
R Aadarsh
3b77f57d46 - Format code properly
- Remove unused dependencies
2025-11-01 11:26:46 +05:30
R Aadarsh
93281b2fc9 Fix cargo clippy errors 2025-11-01 11:26:46 +05:30
R Aadarsh
6e1a5faa75 Migrate from encoding to encoding_rs
Co-authored-by: GitHub Copilot
2025-11-01 11:26:46 +05:30
R Aadarsh
cca3bb85a5 - Fix cargo clippy errors
- Add workspace-hack as a dependency
2025-11-01 11:26:46 +05:30
R Aadarsh
a61d94d3ae fix: Fix an issue which caused the focus to not be on EncodingSelector after an action was selected from EncodingSaveOrReopenSelector 2025-11-01 11:26:45 +05:30
R Aadarsh
1078f85be7 feat: Make the encoding indicator appear only when an editor is open.
feat: Enable the user to choose whether or not the encoding indicator should be displayed by enabling or disabling `encoding_indicator` in `settings.json`
2025-11-01 11:26:45 +05:30
R Aadarsh
12e21b5796 Add support for saving and opening files in different encodings. The implementation is now complete. 2025-11-01 11:26:45 +05:30
R Aadarsh
d3e95b9f52 Add a new load_with_encoding function to handle files with various encodings.
Modified `Buffer::reload` in `buffer.rs` to use this new function, allowing Zed
to open files with any encoding in UTF-8 mode. Files with characters that
are invalid in UTF-8 will have those bytes replaced with the � character.

Add comments and documentation.
2025-11-01 11:21:34 +05:30
R Aadarsh
ee70462b69 Make the status bar encoding indicator update the encoding when an encoding from the selector is chosen. 2025-11-01 11:21:34 +05:30
R Aadarsh
6c25c99600 refactor: encoding in EncodingIndicator is now an optional trait object
feat: Add all supported encodings, and open the encoding selector when an action(save or reopen) is chosen.
2025-11-01 11:21:34 +05:30
R Aadarsh
18cd271734 Implement the actual encoding selector. There are currently only two encodings in the selector used as placeholders, but more will be added in the future. As of now, the encoding picker is not actually triggered. 2025-11-01 11:21:33 +05:30
R Aadarsh
678605a0cd Add a status indicator to indicate the current file's encoding. When clicked a modal view opens that lets user choose to either reopen or save a file with a particular encoding. The actual implementations are incomplete 2025-11-01 11:21:33 +05:30
59 changed files with 2369 additions and 217 deletions

0
..gitignore.swp Normal file
View File

108
Cargo.lock generated
View File

@@ -153,6 +153,7 @@ dependencies = [
"db",
"derive_more 0.99.20",
"editor",
"encodings",
"env_logger 0.11.8",
"fs",
"futures 0.3.31",
@@ -3354,6 +3355,7 @@ dependencies = [
"dashmap 6.1.0",
"debugger_ui",
"editor",
"encodings",
"envy",
"extension",
"file_finder",
@@ -3720,6 +3722,7 @@ dependencies = [
"dirs 4.0.0",
"edit_prediction",
"editor",
"encodings",
"fs",
"futures 0.3.31",
"gpui",
@@ -5511,6 +5514,70 @@ version = "1.0.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0"
[[package]]
name = "encoding"
version = "0.2.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6b0d943856b990d12d3b55b359144ff341533e516d94098b1d3fc1ac666d36ec"
dependencies = [
"encoding-index-japanese",
"encoding-index-korean",
"encoding-index-simpchinese",
"encoding-index-singlebyte",
"encoding-index-tradchinese",
]
[[package]]
name = "encoding-index-japanese"
version = "1.20141219.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04e8b2ff42e9a05335dbf8b5c6f7567e5591d0d916ccef4e0b1710d32a0d0c91"
dependencies = [
"encoding_index_tests",
]
[[package]]
name = "encoding-index-korean"
version = "1.20141219.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4dc33fb8e6bcba213fe2f14275f0963fd16f0a02c878e3095ecfdf5bee529d81"
dependencies = [
"encoding_index_tests",
]
[[package]]
name = "encoding-index-simpchinese"
version = "1.20141219.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d87a7194909b9118fc707194baa434a4e3b0fb6a5a757c73c3adb07aa25031f7"
dependencies = [
"encoding_index_tests",
]
[[package]]
name = "encoding-index-singlebyte"
version = "1.20141219.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3351d5acffb224af9ca265f435b859c7c01537c0849754d3db3fdf2bfe2ae84a"
dependencies = [
"encoding_index_tests",
]
[[package]]
name = "encoding-index-tradchinese"
version = "1.20141219.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fd0e20d5688ce3cab59eb3ef3a2083a5c77bf496cb798dc6fcdb75f323890c18"
dependencies = [
"encoding_index_tests",
]
[[package]]
name = "encoding_index_tests"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a246d82be1c9d791c5dfde9a2bd045fc3cbba3fa2b11ad558f27d01712f00569"
[[package]]
name = "encoding_rs"
version = "0.8.35"
@@ -5520,6 +5587,34 @@ dependencies = [
"cfg-if",
]
[[package]]
name = "encodings"
version = "0.1.0"
dependencies = [
"anyhow",
"encoding_rs",
]
[[package]]
name = "encodings_ui"
version = "0.1.0"
dependencies = [
"anyhow",
"editor",
"encodings",
"fs",
"futures 0.3.31",
"fuzzy",
"gpui",
"language",
"picker",
"settings",
"ui",
"util",
"workspace",
"zed_actions",
]
[[package]]
name = "endi"
version = "1.1.0"
@@ -5897,6 +5992,7 @@ dependencies = [
"criterion",
"ctor",
"dap",
"encodings",
"extension",
"fs",
"futures 0.3.31",
@@ -6396,6 +6492,8 @@ dependencies = [
"async-trait",
"cocoa 0.26.0",
"collections",
"encoding",
"encodings",
"fsevent",
"futures 0.3.31",
"git",
@@ -7097,6 +7195,7 @@ dependencies = [
"ctor",
"db",
"editor",
"encodings",
"futures 0.3.31",
"fuzzy",
"git",
@@ -8771,6 +8870,8 @@ dependencies = [
"ctor",
"diffy",
"ec4rs",
"encoding",
"encodings",
"fs",
"futures 0.3.31",
"fuzzy",
@@ -12986,6 +13087,7 @@ dependencies = [
"context_server",
"dap",
"dap_adapters",
"encodings",
"extension",
"fancy-regex 0.14.0",
"fs",
@@ -13961,6 +14063,7 @@ dependencies = [
"dap_adapters",
"debug_adapter_extension",
"editor",
"encodings",
"env_logger 0.11.8",
"extension",
"extension_host",
@@ -20725,6 +20828,7 @@ dependencies = [
"component",
"dap",
"db",
"encodings",
"fs",
"futures 0.3.31",
"gpui",
@@ -20767,6 +20871,8 @@ dependencies = [
"async-lock 2.8.0",
"clock",
"collections",
"encoding",
"encodings",
"fs",
"futures 0.3.31",
"fuzzy",
@@ -21176,6 +21282,8 @@ dependencies = [
"diagnostics",
"edit_prediction_button",
"editor",
"encodings",
"encodings_ui",
"env_logger 0.11.8",
"extension",
"extension_host",

View File

@@ -59,6 +59,8 @@ members = [
"crates/zeta2_tools",
"crates/editor",
"crates/eval",
"crates/encodings",
"crates/encodings_ui",
"crates/explorer_command_injector",
"crates/extension",
"crates/extension_api",
@@ -221,6 +223,8 @@ members = [
"tooling/perf",
"tooling/xtask",
"crates/encodings",
"crates/encodings_ui",
]
default-members = ["crates/zed"]
@@ -242,7 +246,6 @@ activity_indicator = { path = "crates/activity_indicator" }
agent_ui = { path = "crates/agent_ui" }
agent_settings = { path = "crates/agent_settings" }
agent_servers = { path = "crates/agent_servers" }
ai = { path = "crates/ai" }
ai_onboarding = { path = "crates/ai_onboarding" }
anthropic = { path = "crates/anthropic" }
askpass = { path = "crates/askpass" }
@@ -252,7 +255,6 @@ assistant_slash_command = { path = "crates/assistant_slash_command" }
assistant_slash_commands = { path = "crates/assistant_slash_commands" }
audio = { path = "crates/audio" }
auto_update = { path = "crates/auto_update" }
auto_update_helper = { path = "crates/auto_update_helper" }
auto_update_ui = { path = "crates/auto_update_ui" }
aws_http_client = { path = "crates/aws_http_client" }
bedrock = { path = "crates/bedrock" }
@@ -315,6 +317,8 @@ edit_prediction = { path = "crates/edit_prediction" }
edit_prediction_button = { path = "crates/edit_prediction_button" }
edit_prediction_context = { path = "crates/edit_prediction_context" }
zeta2_tools = { path = "crates/zeta2_tools" }
encodings = {path = "crates/encodings"}
encodings_ui = {path = "crates/encodings_ui"}
inspector_ui = { path = "crates/inspector_ui" }
install_cli = { path = "crates/install_cli" }
journal = { path = "crates/journal" }
@@ -355,8 +359,6 @@ panel = { path = "crates/panel" }
paths = { path = "crates/paths" }
perf = { path = "tooling/perf" }
picker = { path = "crates/picker" }
plugin = { path = "crates/plugin" }
plugin_macros = { path = "crates/plugin_macros" }
prettier = { path = "crates/prettier" }
settings_profile_selector = { path = "crates/settings_profile_selector" }
project = { path = "crates/project" }
@@ -390,7 +392,6 @@ snippets_ui = { path = "crates/snippets_ui" }
sqlez = { path = "crates/sqlez" }
sqlez_macros = { path = "crates/sqlez_macros" }
story = { path = "crates/story" }
storybook = { path = "crates/storybook" }
streaming_diff = { path = "crates/streaming_diff" }
sum_tree = { path = "crates/sum_tree" }
supermaven = { path = "crates/supermaven" }
@@ -407,7 +408,6 @@ terminal_view = { path = "crates/terminal_view" }
text = { path = "crates/text" }
theme = { path = "crates/theme" }
theme_extension = { path = "crates/theme_extension" }
theme_importer = { path = "crates/theme_importer" }
theme_selector = { path = "crates/theme_selector" }
time_format = { path = "crates/time_format" }
title_bar = { path = "crates/title_bar" }
@@ -501,6 +501,7 @@ documented = "0.9.1"
dotenvy = "0.15.0"
ec4rs = "1.1"
emojis = "0.6.1"
encoding_rs = "0.8"
env_logger = "0.11"
exec = "0.3.1"
fancy-regex = "0.14.0"
@@ -790,11 +791,7 @@ codegen-units = 16
[profile.dev.package]
taffy = { opt-level = 3 }
cranelift-codegen = { opt-level = 3 }
cranelift-codegen-meta = { opt-level = 3 }
cranelift-codegen-shared = { opt-level = 3 }
resvg = { opt-level = 3 }
rustybuzz = { opt-level = 3 }
ttf-parser = { opt-level = 3 }
wasmtime-cranelift = { opt-level = 3 }
wasmtime = { opt-level = 3 }
# Build single-source-file crates with cg=1 as it helps make `cargo build` of a whole workspace a bit faster
@@ -804,7 +801,6 @@ breadcrumbs = { codegen-units = 1 }
collections = { codegen-units = 1 }
command_palette = { codegen-units = 1 }
command_palette_hooks = { codegen-units = 1 }
extension_cli = { codegen-units = 1 }
feature_flags = { codegen-units = 1 }
file_icons = { codegen-units = 1 }
fsevent = { codegen-units = 1 }

View File

@@ -1352,7 +1352,9 @@
// Whether to show the cursor position button in the status bar.
"cursor_position_button": true,
// Whether to show active line endings button in the status bar.
"line_endings_button": false
"line_endings_button": false,
// Whether to show the encoding indicator in the status bar.
"encoding_indicator": true
},
// Settings specific to the terminal
"terminal": {

View File

@@ -32,6 +32,7 @@ collections.workspace = true
context_server.workspace = true
db.workspace = true
derive_more.workspace = true
encodings.workspace = true
fs.workspace = true
futures.workspace = true
git.workspace = true

View File

@@ -563,6 +563,7 @@ mod tests {
use super::*;
use crate::{ContextServerRegistry, Templates};
use client::TelemetrySettings;
use encodings::Encoding;
use fs::Fs;
use gpui::{TestAppContext, UpdateGlobal};
use language_model::fake_provider::FakeLanguageModel;
@@ -744,6 +745,7 @@ mod tests {
path!("/root/src/main.rs").as_ref(),
&Rope::from_str_small("initial content"),
language::LineEnding::Unix,
Encoding::default(),
)
.await
.unwrap();
@@ -911,6 +913,7 @@ mod tests {
path!("/root/src/main.rs").as_ref(),
&Rope::from_str_small("initial content"),
language::LineEnding::Unix,
Encoding::default(),
)
.await
.unwrap();

View File

@@ -1248,7 +1248,7 @@ fn full_mention_for_directory(
worktree_id,
path: worktree_path,
};
buffer_store.open_buffer(project_path, cx)
buffer_store.open_buffer(project_path, &Default::default(), cx)
})
});

View File

@@ -287,7 +287,7 @@ impl DirectoryContextHandle {
let open_task = project.update(cx, |project, cx| {
project.buffer_store().update(cx, |buffer_store, cx| {
let project_path = ProjectPath { worktree_id, path };
buffer_store.open_buffer(project_path, cx)
buffer_store.open_buffer(project_path, &Default::default(), cx)
})
});

View File

@@ -31,6 +31,7 @@ chrono.workspace = true
clock.workspace = true
collections.workspace = true
dashmap.workspace = true
encodings.workspace = true
envy = "0.4.2"
futures.workspace = true
gpui.workspace = true

View File

@@ -12,6 +12,7 @@ use buffer_diff::{DiffHunkSecondaryStatus, DiffHunkStatus, assert_hunks};
use call::{ActiveCall, ParticipantLocation, Room, room};
use client::{RECEIVE_TIMEOUT, User};
use collections::{HashMap, HashSet};
use encodings::Encoding;
use fs::{FakeFs, Fs as _, RemoveOptions};
use futures::{StreamExt as _, channel::mpsc};
use git::{
@@ -3701,6 +3702,7 @@ async fn test_buffer_reloading(
path!("/dir/a.txt").as_ref(),
&new_contents,
LineEnding::Windows,
Encoding::default(),
)
.await
.unwrap();
@@ -4481,6 +4483,7 @@ async fn test_reloading_buffer_manually(
path!("/a/a.rs").as_ref(),
&Rope::from_str_small("let seven = 7;"),
LineEnding::Unix,
Encoding::default(),
)
.await
.unwrap();

View File

@@ -5,6 +5,7 @@ use async_trait::async_trait;
use call::ActiveCall;
use collections::{BTreeMap, HashMap};
use editor::Bias;
use encodings::Encoding;
use fs::{FakeFs, Fs as _};
use git::status::{FileStatus, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode};
use gpui::{BackgroundExecutor, Entity, TestAppContext};
@@ -943,6 +944,7 @@ impl RandomizedTest for ProjectCollaborationTest {
&path,
&Rope::from_str_small(content.as_str()),
text::LineEnding::Unix,
Encoding::default(),
)
.await
.unwrap();

View File

@@ -30,6 +30,7 @@ client.workspace = true
collections.workspace = true
command_palette_hooks.workspace = true
dirs.workspace = true
encodings.workspace = true
fs.workspace = true
futures.workspace = true
gpui.workspace = true
@@ -54,6 +55,7 @@ util.workspace = true
workspace.workspace = true
itertools.workspace = true
[target.'cfg(windows)'.dependencies]
async-std = { version = "1.12.0", features = ["unstable"] }

View File

@@ -1241,6 +1241,7 @@ async fn get_copilot_lsp(fs: Arc<dyn Fs>, node_runtime: NodeRuntime) -> anyhow::
#[cfg(test)]
mod tests {
use super::*;
use encodings::Encoding;
use gpui::TestAppContext;
use util::{path, paths::PathStyle, rel_path::rel_path};
@@ -1451,7 +1452,7 @@ mod tests {
self.abs_path.clone()
}
fn load(&self, _: &App) -> Task<Result<String>> {
fn load(&self, _: &App, _: Encoding) -> Task<Result<String>> {
unimplemented!()
}

View File

@@ -523,8 +523,9 @@ impl SyntaxIndex {
};
let snapshot_task = worktree.update(cx, |worktree, cx| {
let load_task = worktree.load_file(&project_path.path, cx);
let load_task = worktree.load_file(&project_path.path, &Default::default(), cx);
let worktree_abs_path = worktree.abs_path();
cx.spawn(async move |_this, cx| {
let loaded_file = load_task.await?;
let language = language.await?;

View File

@@ -0,0 +1,17 @@
[package]
name = "encodings"
version = "0.1.0"
publish.workspace = true
edition.workspace = true
[lib]
path = "src/encodings.rs"
doctest = false
[dependencies]
anyhow.workspace = true
encoding_rs.workspace = true
[lints]
workspace = true

View File

@@ -0,0 +1 @@
../../LICENSE-GPL

View File

@@ -0,0 +1,214 @@
use encoding_rs;
use std::{borrow::Cow, fmt::Debug};
pub use encoding_rs::{
BIG5, EUC_JP, EUC_KR, GB18030, GBK, IBM866, ISO_2022_JP, ISO_8859_2, ISO_8859_3, ISO_8859_4,
ISO_8859_5, ISO_8859_6, ISO_8859_7, ISO_8859_8, ISO_8859_8_I, ISO_8859_10, ISO_8859_13,
ISO_8859_14, ISO_8859_15, ISO_8859_16, KOI8_R, KOI8_U, MACINTOSH, SHIFT_JIS, UTF_8, UTF_16BE,
UTF_16LE, WINDOWS_874, WINDOWS_1250, WINDOWS_1251, WINDOWS_1252, WINDOWS_1253, WINDOWS_1254,
WINDOWS_1255, WINDOWS_1256, WINDOWS_1257, WINDOWS_1258, X_MAC_CYRILLIC,
};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct Encoding {
pub encoding: &'static encoding_rs::Encoding,
pub with_bom: bool,
}
impl Default for Encoding {
fn default() -> Self {
Encoding {
encoding: UTF_8,
with_bom: false,
}
}
}
impl Encoding {
pub fn decode(&self, input: Vec<u8>) -> anyhow::Result<String> {
if self.encoding == UTF_8 && !self.with_bom {
return Ok(String::from_utf8(input)?);
}
let Some(result) = self
.encoding
.decode_without_bom_handling_and_without_replacement(&input)
else {
return Err(anyhow::anyhow!(
"input is not valid {}",
self.encoding.name()
));
};
if self.with_bom && result.starts_with("\u{FEFF}") {
Ok(result[3..].to_string())
} else {
Ok(result.into_owned())
}
}
pub fn bom(&self) -> Option<&'static [u8]> {
if !self.with_bom {
return None;
}
if self.encoding == UTF_8 {
Some(&[0xEF, 0xBB, 0xBF])
} else if self.encoding == UTF_16BE {
Some(&[0xFE, 0xFF])
} else if self.encoding == UTF_16LE {
Some(&[0xFF, 0xFE])
} else {
None
}
}
pub fn encode_chunk<'a>(&self, input: &'a str) -> anyhow::Result<Cow<'a, [u8]>> {
if self.encoding == UTF_8 {
Ok(Cow::Borrowed(input.as_bytes()))
} else if self.encoding == UTF_16BE {
let mut data = Vec::<u8>::with_capacity(input.len() * 2);
// Convert the input string to UTF-16BE bytes
let utf16be_bytes = input.encode_utf16().flat_map(|u| u.to_be_bytes());
data.extend(utf16be_bytes);
Ok(Cow::Owned(data))
} else if self.encoding == UTF_16LE {
let mut data = Vec::<u8>::with_capacity(input.len() * 2);
// Convert the input string to UTF-16LE bytes
let utf16le_bytes = input.encode_utf16().flat_map(|u| u.to_le_bytes());
data.extend(utf16le_bytes);
Ok(Cow::Owned(data))
} else {
// todo: should we error on invalid content when encoding?
let (cow, _encoding_used, _had_errors) = self.encoding.encode(&input);
Ok(cow)
}
}
pub fn name(&self) -> &'static str {
let name = self.encoding.name();
match name {
"UTF-8" => "UTF-8",
"UTF-16LE" => "UTF-16 LE",
"UTF-16BE" => "UTF-16 BE",
"windows-1252" => "Windows-1252",
"windows-1251" => "Windows-1251",
"windows-1250" => "Windows-1250",
"ISO-8859-2" => "ISO 8859-2",
"ISO-8859-3" => "ISO 8859-3",
"ISO-8859-4" => "ISO 8859-4",
"ISO-8859-5" => "ISO 8859-5",
"ISO-8859-6" => "ISO 8859-6",
"ISO-8859-7" => "ISO 8859-7",
"ISO-8859-8" => "ISO 8859-8",
"ISO-8859-13" => "ISO 8859-13",
"ISO-8859-15" => "ISO 8859-15",
"KOI8-R" => "KOI8-R",
"KOI8-U" => "KOI8-U",
"macintosh" => "MacRoman",
"x-mac-cyrillic" => "Mac Cyrillic",
"windows-874" => "Windows-874",
"windows-1253" => "Windows-1253",
"windows-1254" => "Windows-1254",
"windows-1255" => "Windows-1255",
"windows-1256" => "Windows-1256",
"windows-1257" => "Windows-1257",
"windows-1258" => "Windows-1258",
"EUC-KR" => "Windows-949",
"EUC-JP" => "EUC-JP",
"ISO-2022-JP" => "ISO 2022-JP",
"GBK" => "GBK",
"gb18030" => "GB18030",
"Big5" => "Big5",
_ => name,
}
}
pub fn from_name(name: &str) -> Self {
let encoding = match name {
"UTF-8" => encoding_rs::UTF_8,
"UTF-16 LE" => encoding_rs::UTF_16LE,
"UTF-16 BE" => encoding_rs::UTF_16BE,
"Windows-1252" => encoding_rs::WINDOWS_1252,
"Windows-1251" => encoding_rs::WINDOWS_1251,
"Windows-1250" => encoding_rs::WINDOWS_1250,
"ISO 8859-2" => encoding_rs::ISO_8859_2,
"ISO 8859-3" => encoding_rs::ISO_8859_3,
"ISO 8859-4" => encoding_rs::ISO_8859_4,
"ISO 8859-5" => encoding_rs::ISO_8859_5,
"ISO 8859-6" => encoding_rs::ISO_8859_6,
"ISO 8859-7" => encoding_rs::ISO_8859_7,
"ISO 8859-8" => encoding_rs::ISO_8859_8,
"ISO 8859-13" => encoding_rs::ISO_8859_13,
"ISO 8859-15" => encoding_rs::ISO_8859_15,
"KOI8-R" => encoding_rs::KOI8_R,
"KOI8-U" => encoding_rs::KOI8_U,
"MacRoman" => encoding_rs::MACINTOSH,
"Mac Cyrillic" => encoding_rs::X_MAC_CYRILLIC,
"Windows-874" => encoding_rs::WINDOWS_874,
"Windows-1253" => encoding_rs::WINDOWS_1253,
"Windows-1254" => encoding_rs::WINDOWS_1254,
"Windows-1255" => encoding_rs::WINDOWS_1255,
"Windows-1256" => encoding_rs::WINDOWS_1256,
"Windows-1257" => encoding_rs::WINDOWS_1257,
"Windows-1258" => encoding_rs::WINDOWS_1258,
"Windows-949" => encoding_rs::EUC_KR,
"EUC-JP" => encoding_rs::EUC_JP,
"ISO 2022-JP" => encoding_rs::ISO_2022_JP,
"GBK" => encoding_rs::GBK,
"GB18030" => encoding_rs::GB18030,
"Big5" => encoding_rs::BIG5,
_ => encoding_rs::UTF_8, // Default to UTF-8 for unknown names
};
Encoding {
encoding,
with_bom: false,
}
}
}
#[derive(Default, Clone)]
pub struct EncodingOptions {
pub expected: Encoding,
pub auto_detect: bool,
}
impl EncodingOptions {
pub fn process(&self, bytes: Vec<u8>) -> anyhow::Result<(Encoding, String)> {
let encoding = if self.auto_detect
&& let Some(encoding) = Self::detect(&bytes)
{
encoding
} else {
self.expected
};
Ok((encoding, encoding.decode(bytes)?))
}
fn detect(bytes: &[u8]) -> Option<Encoding> {
if bytes.starts_with(&[0xFE, 0xFF]) {
Some(Encoding {
encoding: UTF_8,
with_bom: true,
})
} else if bytes.starts_with(&[0xFF, 0xFE]) {
Some(Encoding {
encoding: UTF_16LE,
with_bom: true,
})
} else if bytes.starts_with(&[0xEF, 0xBB, 0xBF]) {
Some(Encoding {
encoding: UTF_8,
with_bom: true,
})
} else {
None
}
}
}

View File

@@ -0,0 +1,28 @@
[package]
name = "encodings_ui"
version = "0.1.0"
publish.workspace = true
edition.workspace = true
[dependencies]
anyhow.workspace = true
editor.workspace = true
encodings.workspace = true
fs.workspace = true
futures.workspace = true
fuzzy.workspace = true
gpui.workspace = true
language.workspace = true
picker.workspace = true
settings.workspace = true
ui.workspace = true
util.workspace = true
workspace.workspace = true
zed_actions.workspace = true
[lib]
path = "src/encodings_ui.rs"
doctest = false
[lints]
workspace = true

View File

@@ -0,0 +1 @@
../../LICENSE-GPL

View File

@@ -0,0 +1,108 @@
//! A crate for handling file encodings in the text editor.
use editor::Editor;
use gpui::{Entity, Subscription, WeakEntity};
use language::{Buffer, BufferEvent};
use ui::{
App, Button, ButtonCommon, Context, IntoElement, LabelSize, Render, Tooltip, Window, div,
};
use ui::{Clickable, ParentElement};
use workspace::notifications::NotifyTaskExt;
use workspace::{ItemHandle, StatusItemView, Workspace};
use zed_actions::encodings_ui::OpenWithEncoding;
// use zed_actions::encodings_ui::Toggle;
/// A status bar item that shows the current file encoding and allows changing it.
pub struct EncodingIndicator {
pub buffer: Option<WeakEntity<Buffer>>,
pub workspace: WeakEntity<Workspace>,
observe_buffer: Option<Subscription>,
}
pub mod selectors;
impl Render for EncodingIndicator {
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl ui::IntoElement {
let Some(buffer) = self.buffer() else {
return gpui::Empty.into_any_element();
};
div()
.child(
Button::new("encoding", buffer.read(cx).encoding().name())
.label_size(LabelSize::Small)
.tooltip(Tooltip::text("Select Encoding"))
.on_click(cx.listener(move |this, _, window, cx| {
let Some(buffer) = this.buffer() else {
return;
};
this.workspace
.update(cx, move |workspace, cx| {
if buffer.read(cx).file().is_some() {
selectors::save_or_reopen(buffer, workspace, window, cx)
} else {
// todo!()
}
})
.ok();
})),
)
.into_any_element()
}
}
impl EncodingIndicator {
pub fn new(workspace: WeakEntity<Workspace>) -> EncodingIndicator {
EncodingIndicator {
workspace,
buffer: None,
observe_buffer: None,
}
}
fn buffer(&self) -> Option<Entity<Buffer>> {
self.buffer.as_ref().and_then(|b| b.upgrade())
}
/// Update the encoding when the `encoding` field of the `Buffer` struct changes.
pub fn on_buffer_event(
&mut self,
_: Entity<Buffer>,
e: &BufferEvent,
cx: &mut Context<EncodingIndicator>,
) {
if matches!(e, BufferEvent::EncodingChanged) {
cx.notify();
}
}
}
impl StatusItemView for EncodingIndicator {
fn set_active_pane_item(
&mut self,
active_pane_item: Option<&dyn ItemHandle>,
_window: &mut Window,
cx: &mut Context<Self>,
) {
if let Some(editor) = active_pane_item.and_then(|item| item.act_as::<Editor>(cx))
&& let Some(buffer) = editor.read(cx).buffer().read(cx).as_singleton()
{
self.buffer = Some(buffer.downgrade());
self.observe_buffer = Some(cx.subscribe(&buffer, Self::on_buffer_event));
} else {
self.buffer = None;
self.observe_buffer = None;
}
cx.notify();
}
}
pub fn init(cx: &mut App) {
cx.observe_new(|workspace: &mut Workspace, _, _| {
workspace.register_action(|workspace, action: &OpenWithEncoding, window, cx| {
selectors::open_with_encoding(action.0.clone(), workspace, window, cx)
.detach_and_notify_err(window, cx);
});
})
.detach();
}

View File

@@ -0,0 +1,345 @@
//! A crate for handling file encodings in the text editor.
use crate::selectors::encoding::Action;
use editor::Editor;
use encoding_rs::Encoding;
use gpui::{ClickEvent, Entity, Subscription, WeakEntity};
use language::Buffer;
use ui::{App, Button, ButtonCommon, Context, LabelSize, Render, Tooltip, Window, div};
use ui::{Clickable, ParentElement};
use util::ResultExt;
use workspace::{
CloseActiveItem, ItemHandle, OpenOptions, StatusItemView, Workspace,
with_active_or_new_workspace,
};
use zed_actions::encodings_ui::{ForceOpen, Toggle};
use crate::selectors::encoding::EncodingSelector;
use crate::selectors::save_or_reopen::EncodingSaveOrReopenSelector;
/// A status bar item that shows the current file encoding and allows changing it.
pub struct EncodingIndicator {
pub encoding: Option<&'static Encoding>,
pub workspace: WeakEntity<Workspace>,
/// Subscription to observe changes in the active editor
observe_editor: Option<Subscription>,
/// Subscription to observe changes in the `encoding` field of the `Buffer` struct
observe_buffer_encoding: Option<Subscription>,
/// Whether to show the indicator or not, based on whether an editor is active
show: bool,
/// Whether to show `EncodingSaveOrReopenSelector`. It will be shown only when
/// the current buffer is associated with a file.
show_save_or_reopen_selector: bool,
}
pub mod selectors;
impl Render for EncodingIndicator {
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl ui::IntoElement {
let status_element = div();
let show_save_or_reopen_selector = self.show_save_or_reopen_selector;
if !self.show {
return status_element;
}
status_element.child(
Button::new(
"encoding",
encoding_name(self.encoding.unwrap_or(encoding_rs::UTF_8)),
)
.label_size(LabelSize::Small)
.tooltip(Tooltip::text("Select Encoding"))
.on_click(cx.listener(move |indicator, _: &ClickEvent, window, cx| {
if let Some(workspace) = indicator.workspace.upgrade() {
workspace.update(cx, move |workspace, cx| {
// Open the `EncodingSaveOrReopenSelector` if the buffer is associated with a file,
if show_save_or_reopen_selector {
EncodingSaveOrReopenSelector::toggle(workspace, window, cx)
}
// otherwise, open the `EncodingSelector` directly.
else {
let (_, buffer, _) = workspace
.active_item(cx)
.unwrap()
.act_as::<Editor>(cx)
.unwrap()
.read(cx)
.active_excerpt(cx)
.unwrap();
let weak_workspace = workspace.weak_handle();
if let Some(path) = buffer.read(cx).file() {
let path = path.clone().path().to_rel_path_buf();
workspace.toggle_modal(window, cx, |window, cx| {
let selector = EncodingSelector::new(
window,
cx,
Action::Save,
Some(buffer.downgrade()),
weak_workspace,
Some(path.as_std_path().to_path_buf()),
);
selector
});
}
}
})
}
})),
)
}
}
impl EncodingIndicator {
pub fn new(
encoding: Option<&'static Encoding>,
workspace: WeakEntity<Workspace>,
observe_editor: Option<Subscription>,
observe_buffer_encoding: Option<Subscription>,
) -> EncodingIndicator {
EncodingIndicator {
encoding,
workspace,
observe_editor,
show: false,
observe_buffer_encoding,
show_save_or_reopen_selector: false,
}
}
/// Update the encoding when the active editor is switched.
pub fn update_when_editor_is_switched(
&mut self,
editor: Entity<Editor>,
_: &mut Window,
cx: &mut Context<EncodingIndicator>,
) {
let editor = editor.read(cx);
if let Some((_, buffer, _)) = editor.active_excerpt(cx) {
let encoding = buffer.read(cx).encoding.clone();
self.encoding = Some(encoding.get());
if let Some(_) = buffer.read(cx).file() {
self.show_save_or_reopen_selector = true;
} else {
self.show_save_or_reopen_selector = false;
}
}
cx.notify();
}
/// Update the encoding when the `encoding` field of the `Buffer` struct changes.
pub fn update_when_buffer_encoding_changes(
&mut self,
buffer: Entity<Buffer>,
_: &mut Window,
cx: &mut Context<EncodingIndicator>,
) {
let encoding = buffer.read(cx).encoding.clone();
self.encoding = Some(encoding.get());
cx.notify();
}
}
impl StatusItemView for EncodingIndicator {
fn set_active_pane_item(
&mut self,
active_pane_item: Option<&dyn ItemHandle>,
window: &mut Window,
cx: &mut Context<Self>,
) {
match active_pane_item.and_then(|item| item.downcast::<Editor>()) {
Some(editor) => {
self.observe_editor =
Some(cx.observe_in(&editor, window, Self::update_when_editor_is_switched));
if let Some((_, buffer, _)) = &editor.read(cx).active_excerpt(cx) {
self.observe_buffer_encoding = Some(cx.observe_in(
buffer,
window,
Self::update_when_buffer_encoding_changes,
));
}
self.update_when_editor_is_switched(editor, window, cx);
self.show = true;
}
None => {
self.encoding = None;
self.observe_editor = None;
self.show = false;
}
}
}
}
/// Get a human-readable name for the given encoding.
pub fn encoding_name(encoding: &'static Encoding) -> String {
let name = encoding.name();
match name {
"UTF-8" => "UTF-8",
"UTF-16LE" => "UTF-16 LE",
"UTF-16BE" => "UTF-16 BE",
"windows-1252" => "Windows-1252",
"windows-1251" => "Windows-1251",
"windows-1250" => "Windows-1250",
"ISO-8859-2" => "ISO 8859-2",
"ISO-8859-3" => "ISO 8859-3",
"ISO-8859-4" => "ISO 8859-4",
"ISO-8859-5" => "ISO 8859-5",
"ISO-8859-6" => "ISO 8859-6",
"ISO-8859-7" => "ISO 8859-7",
"ISO-8859-8" => "ISO 8859-8",
"ISO-8859-13" => "ISO 8859-13",
"ISO-8859-15" => "ISO 8859-15",
"KOI8-R" => "KOI8-R",
"KOI8-U" => "KOI8-U",
"macintosh" => "MacRoman",
"x-mac-cyrillic" => "Mac Cyrillic",
"windows-874" => "Windows-874",
"windows-1253" => "Windows-1253",
"windows-1254" => "Windows-1254",
"windows-1255" => "Windows-1255",
"windows-1256" => "Windows-1256",
"windows-1257" => "Windows-1257",
"windows-1258" => "Windows-1258",
"EUC-KR" => "Windows-949",
"EUC-JP" => "EUC-JP",
"ISO-2022-JP" => "ISO 2022-JP",
"GBK" => "GBK",
"gb18030" => "GB18030",
"Big5" => "Big5",
_ => name,
}
.to_string()
}
/// Get an encoding from its index in the predefined list.
/// If the index is out of range, UTF-8 is returned as a default.
pub fn encoding_from_index(index: usize) -> &'static Encoding {
match index {
0 => encoding_rs::UTF_8,
1 => encoding_rs::UTF_16LE,
2 => encoding_rs::UTF_16BE,
3 => encoding_rs::WINDOWS_1252,
4 => encoding_rs::WINDOWS_1251,
5 => encoding_rs::WINDOWS_1250,
6 => encoding_rs::ISO_8859_2,
7 => encoding_rs::ISO_8859_3,
8 => encoding_rs::ISO_8859_4,
9 => encoding_rs::ISO_8859_5,
10 => encoding_rs::ISO_8859_6,
11 => encoding_rs::ISO_8859_7,
12 => encoding_rs::ISO_8859_8,
13 => encoding_rs::ISO_8859_13,
14 => encoding_rs::ISO_8859_15,
15 => encoding_rs::KOI8_R,
16 => encoding_rs::KOI8_U,
17 => encoding_rs::MACINTOSH,
18 => encoding_rs::X_MAC_CYRILLIC,
19 => encoding_rs::WINDOWS_874,
20 => encoding_rs::WINDOWS_1253,
21 => encoding_rs::WINDOWS_1254,
22 => encoding_rs::WINDOWS_1255,
23 => encoding_rs::WINDOWS_1256,
24 => encoding_rs::WINDOWS_1257,
25 => encoding_rs::WINDOWS_1258,
26 => encoding_rs::EUC_KR,
27 => encoding_rs::EUC_JP,
28 => encoding_rs::ISO_2022_JP,
29 => encoding_rs::GBK,
30 => encoding_rs::GB18030,
31 => encoding_rs::BIG5,
_ => encoding_rs::UTF_8,
}
}
/// Get an encoding from its name.
pub fn encoding_from_name(name: &str) -> &'static Encoding {
match name {
"UTF-8" => encoding_rs::UTF_8,
"UTF-16 LE" => encoding_rs::UTF_16LE,
"UTF-16 BE" => encoding_rs::UTF_16BE,
"Windows-1252" => encoding_rs::WINDOWS_1252,
"Windows-1251" => encoding_rs::WINDOWS_1251,
"Windows-1250" => encoding_rs::WINDOWS_1250,
"ISO 8859-2" => encoding_rs::ISO_8859_2,
"ISO 8859-3" => encoding_rs::ISO_8859_3,
"ISO 8859-4" => encoding_rs::ISO_8859_4,
"ISO 8859-5" => encoding_rs::ISO_8859_5,
"ISO 8859-6" => encoding_rs::ISO_8859_6,
"ISO 8859-7" => encoding_rs::ISO_8859_7,
"ISO 8859-8" => encoding_rs::ISO_8859_8,
"ISO 8859-13" => encoding_rs::ISO_8859_13,
"ISO 8859-15" => encoding_rs::ISO_8859_15,
"KOI8-R" => encoding_rs::KOI8_R,
"KOI8-U" => encoding_rs::KOI8_U,
"MacRoman" => encoding_rs::MACINTOSH,
"Mac Cyrillic" => encoding_rs::X_MAC_CYRILLIC,
"Windows-874" => encoding_rs::WINDOWS_874,
"Windows-1253" => encoding_rs::WINDOWS_1253,
"Windows-1254" => encoding_rs::WINDOWS_1254,
"Windows-1255" => encoding_rs::WINDOWS_1255,
"Windows-1256" => encoding_rs::WINDOWS_1256,
"Windows-1257" => encoding_rs::WINDOWS_1257,
"Windows-1258" => encoding_rs::WINDOWS_1258,
"Windows-949" => encoding_rs::EUC_KR,
"EUC-JP" => encoding_rs::EUC_JP,
"ISO 2022-JP" => encoding_rs::ISO_2022_JP,
"GBK" => encoding_rs::GBK,
"GB18030" => encoding_rs::GB18030,
"Big5" => encoding_rs::BIG5,
_ => encoding_rs::UTF_8, // Default to UTF-8 for unknown names
}
}
pub fn init(cx: &mut App) {
cx.on_action(|action: &Toggle, cx: &mut App| {
let Toggle(path) = action.clone();
let path = path.to_path_buf();
with_active_or_new_workspace(cx, |workspace, window, cx| {
let weak_workspace = workspace.weak_handle();
workspace.toggle_modal(window, cx, |window, cx| {
EncodingSelector::new(window, cx, Action::Reopen, None, weak_workspace, Some(path))
});
});
});
cx.on_action(|action: &ForceOpen, cx: &mut App| {
let ForceOpen(path) = action.clone();
let path = path.to_path_buf();
with_active_or_new_workspace(cx, |workspace, window, cx| {
workspace.active_pane().update(cx, |pane, cx| {
pane.close_active_item(&CloseActiveItem::default(), window, cx)
.detach();
});
workspace
.encoding_options
.force
.store(true, std::sync::atomic::Ordering::Release);
let open_task = workspace.open_abs_path(path, OpenOptions::default(), window, cx);
let weak_workspace = workspace.weak_handle();
cx.spawn(async move |_, cx| {
let workspace = weak_workspace.upgrade().unwrap();
open_task.await.log_err();
workspace
.update(cx, |workspace: &mut Workspace, _| {
*workspace.encoding_options.force.get_mut() = false;
})
.log_err();
})
.detach();
});
});
}

View File

@@ -0,0 +1,409 @@
use anyhow::Result;
use editor::Editor;
use encodings::Encoding;
use encodings::EncodingOptions;
use futures::channel::oneshot;
use gpui::ParentElement;
use gpui::Task;
use language::Buffer;
use picker::Picker;
use picker::PickerDelegate;
use std::path::Path;
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
use ui::Label;
use ui::ListItemSpacing;
use ui::rems;
use util::ResultExt;
use fuzzy::{StringMatch, StringMatchCandidate};
use gpui::{DismissEvent, Entity, WeakEntity};
use ui::{Context, HighlightedLabel, ListItem, Window};
use workspace::Workspace;
pub fn save_or_reopen(
buffer: Entity<Buffer>,
workspace: &mut Workspace,
window: &mut Window,
cx: &mut Context<Workspace>,
) {
let weak_workspace = cx.weak_entity();
workspace.toggle_modal(window, cx, |window, cx| {
let delegate = EncodingSaveOrReopenDelegate::new(buffer, weak_workspace);
Picker::nonsearchable_uniform_list(delegate, window, cx)
.modal(true)
.width(rems(34.0))
})
}
pub fn open_with_encoding(
path: Arc<Path>,
workspace: &mut Workspace,
window: &mut Window,
cx: &mut Context<Workspace>,
) -> Task<Result<()>> {
let (tx, rx) = oneshot::channel();
workspace.toggle_modal(window, cx, |window, cx| {
let delegate = EncodingSelectorDelegate::new(None, tx);
Picker::uniform_list(delegate, window, cx)
});
let project = workspace.project().clone();
cx.spawn_in(window, async move |workspace, cx| {
let encoding = rx.await.unwrap();
let (worktree, rel_path) = project
.update(cx, |project, cx| {
project.find_or_create_worktree(path, false, cx)
})?
.await?;
let project_path = (worktree.update(cx, |worktree, _| worktree.id())?, rel_path).into();
let buffer = project
.update(cx, |project, cx| {
project.buffer_store().update(cx, |buffer_store, cx| {
buffer_store.open_buffer(
project_path,
&EncodingOptions {
expected: encoding,
auto_detect: true,
},
cx,
)
})
})?
.await?;
workspace.update_in(cx, |workspace, window, cx| {
workspace.open_project_item::<Editor>(
workspace.active_pane().clone(),
buffer,
true,
true,
window,
cx,
)
})?;
Ok(())
})
}
pub fn reopen_with_encoding(
buffer: Entity<Buffer>,
workspace: &mut Workspace,
window: &mut Window,
cx: &mut Context<Workspace>,
) {
let encoding = buffer.read(cx).encoding();
let (tx, rx) = oneshot::channel();
workspace.toggle_modal(window, cx, |window, cx| {
let delegate = EncodingSelectorDelegate::new(Some(encoding), tx);
Picker::uniform_list(delegate, window, cx)
});
cx.spawn(async move |_, cx| {
let encoding = rx.await.unwrap();
let (task, prev) = buffer.update(cx, |buffer, cx| {
let prev = buffer.encoding();
buffer.set_encoding(encoding, cx);
(buffer.reload(cx), prev)
})?;
if task.await.is_err() {
buffer.update(cx, |buffer, cx| {
buffer.set_encoding(prev, cx);
})?;
}
anyhow::Ok(())
})
.detach();
}
pub fn save_with_encoding(
buffer: Entity<Buffer>,
workspace: &mut Workspace,
window: &mut Window,
cx: &mut Context<Workspace>,
) {
let encoding = buffer.read(cx).encoding();
let (tx, rx) = oneshot::channel();
workspace.toggle_modal(window, cx, |window, cx| {
let delegate = EncodingSelectorDelegate::new(Some(encoding), tx);
Picker::uniform_list(delegate, window, cx)
});
cx.spawn(async move |workspace, cx| {
let encoding = rx.await.unwrap();
workspace
.update(cx, |workspace, cx| {
buffer.update(cx, |buffer, cx| {
buffer.set_encoding(encoding, cx);
});
workspace
.project()
.update(cx, |project, cx| project.save_buffer(buffer, cx))
})
.ok();
})
.detach();
}
pub enum SaveOrReopen {
Save,
Reopen,
}
pub struct EncodingSaveOrReopenDelegate {
current_selection: usize,
actions: Vec<SaveOrReopen>,
workspace: WeakEntity<Workspace>,
buffer: Entity<Buffer>,
}
impl EncodingSaveOrReopenDelegate {
pub fn new(buffer: Entity<Buffer>, workspace: WeakEntity<Workspace>) -> Self {
Self {
current_selection: 0,
actions: vec![SaveOrReopen::Save, SaveOrReopen::Reopen],
workspace,
buffer,
}
}
}
impl PickerDelegate for EncodingSaveOrReopenDelegate {
type ListItem = ListItem;
fn match_count(&self) -> usize {
self.actions.len()
}
fn selected_index(&self) -> usize {
self.current_selection
}
fn set_selected_index(
&mut self,
ix: usize,
_window: &mut Window,
_cx: &mut Context<Picker<Self>>,
) {
self.current_selection = ix;
}
fn placeholder_text(&self, _window: &mut Window, _cx: &mut ui::App) -> std::sync::Arc<str> {
"Select an action...".into()
}
fn update_matches(
&mut self,
_query: String,
_window: &mut Window,
_cx: &mut Context<Picker<Self>>,
) -> Task<()> {
return Task::ready(());
}
fn confirm(&mut self, _: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
self.dismissed(window, cx);
cx.defer_in(window, |this, window, cx| {
let this = &this.delegate;
this.workspace
.update(cx, |workspace, cx| {
match this.actions[this.current_selection] {
SaveOrReopen::Reopen => {
reopen_with_encoding(this.buffer.clone(), workspace, window, cx);
}
SaveOrReopen::Save => {
save_with_encoding(this.buffer.clone(), workspace, window, cx);
}
}
})
.ok();
})
}
fn dismissed(&mut self, _window: &mut Window, cx: &mut Context<Picker<Self>>) {
cx.emit(DismissEvent)
}
fn render_match(
&self,
ix: usize,
_: bool,
_: &mut Window,
_: &mut Context<Picker<Self>>,
) -> Option<Self::ListItem> {
Some(
ListItem::new(ix)
.child(match self.actions[ix] {
SaveOrReopen::Save => Label::new("Save with encoding"),
SaveOrReopen::Reopen => Label::new("Reopen with encoding"),
})
.spacing(ui::ListItemSpacing::Sparse),
)
}
}
pub struct EncodingSelectorDelegate {
current_selection: usize,
encodings: Vec<StringMatchCandidate>,
matches: Vec<StringMatch>,
tx: Option<oneshot::Sender<Encoding>>,
}
impl EncodingSelectorDelegate {
pub fn new(
encoding: Option<Encoding>,
tx: oneshot::Sender<Encoding>,
) -> EncodingSelectorDelegate {
let encodings = vec![
StringMatchCandidate::new(0, "UTF-8"),
StringMatchCandidate::new(1, "UTF-16 LE"),
StringMatchCandidate::new(2, "UTF-16 BE"),
StringMatchCandidate::new(3, "Windows-1252"),
StringMatchCandidate::new(4, "Windows-1251"),
StringMatchCandidate::new(5, "Windows-1250"),
StringMatchCandidate::new(6, "ISO 8859-2"),
StringMatchCandidate::new(7, "ISO 8859-3"),
StringMatchCandidate::new(8, "ISO 8859-4"),
StringMatchCandidate::new(9, "ISO 8859-5"),
StringMatchCandidate::new(10, "ISO 8859-6"),
StringMatchCandidate::new(11, "ISO 8859-7"),
StringMatchCandidate::new(12, "ISO 8859-8"),
StringMatchCandidate::new(13, "ISO 8859-13"),
StringMatchCandidate::new(14, "ISO 8859-15"),
StringMatchCandidate::new(15, "KOI8-R"),
StringMatchCandidate::new(16, "KOI8-U"),
StringMatchCandidate::new(17, "MacRoman"),
StringMatchCandidate::new(18, "Mac Cyrillic"),
StringMatchCandidate::new(19, "Windows-874"),
StringMatchCandidate::new(20, "Windows-1253"),
StringMatchCandidate::new(21, "Windows-1254"),
StringMatchCandidate::new(22, "Windows-1255"),
StringMatchCandidate::new(23, "Windows-1256"),
StringMatchCandidate::new(24, "Windows-1257"),
StringMatchCandidate::new(25, "Windows-1258"),
StringMatchCandidate::new(26, "Windows-949"),
StringMatchCandidate::new(27, "EUC-JP"),
StringMatchCandidate::new(28, "ISO 2022-JP"),
StringMatchCandidate::new(29, "GBK"),
StringMatchCandidate::new(30, "GB18030"),
StringMatchCandidate::new(31, "Big5"),
];
let current_selection = if let Some(encoding) = encoding {
encodings
.iter()
.position(|e| encoding.name() == e.string)
.unwrap_or_default()
} else {
0
};
EncodingSelectorDelegate {
current_selection,
encodings,
matches: Vec::new(),
tx: Some(tx),
}
}
}
impl PickerDelegate for EncodingSelectorDelegate {
type ListItem = ListItem;
fn match_count(&self) -> usize {
self.matches.len()
}
fn selected_index(&self) -> usize {
self.current_selection
}
fn set_selected_index(&mut self, ix: usize, _: &mut Window, _: &mut Context<Picker<Self>>) {
self.current_selection = ix;
}
fn placeholder_text(&self, _window: &mut Window, _cx: &mut ui::App) -> std::sync::Arc<str> {
"Select an encoding...".into()
}
fn update_matches(
&mut self,
query: String,
window: &mut Window,
cx: &mut Context<Picker<Self>>,
) -> Task<()> {
let executor = cx.background_executor().clone();
let encodings = self.encodings.clone();
cx.spawn_in(window, async move |picker, cx| {
let matches: Vec<StringMatch>;
if query.is_empty() {
matches = encodings
.into_iter()
.enumerate()
.map(|(index, value)| StringMatch {
candidate_id: index,
score: 0.0,
positions: Vec::new(),
string: value.string,
})
.collect();
} else {
matches = fuzzy::match_strings(
&encodings,
&query,
true,
false,
30,
&AtomicBool::new(false),
executor,
)
.await
}
picker
.update(cx, |picker, cx| {
let delegate = &mut picker.delegate;
delegate.matches = matches;
delegate.current_selection = delegate
.current_selection
.min(delegate.matches.len().saturating_sub(1));
cx.notify();
})
.log_err();
})
}
fn confirm(&mut self, _: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
let current_selection = self.matches[self.current_selection].string.clone();
let encoding = Encoding::from_name(&current_selection);
if let Some(tx) = self.tx.take() {
tx.send(encoding).log_err();
}
self.dismissed(window, cx);
}
fn dismissed(&mut self, _: &mut Window, cx: &mut Context<Picker<Self>>) {
cx.emit(DismissEvent);
}
fn render_match(
&self,
ix: usize,
_: bool,
_: &mut Window,
_: &mut Context<Picker<Self>>,
) -> Option<Self::ListItem> {
Some(
ListItem::new(ix)
.child(HighlightedLabel::new(
&self.matches[ix].string,
self.matches[ix].positions.clone(),
))
.spacing(ListItemSpacing::Sparse),
)
}
}

View File

@@ -23,6 +23,7 @@ async-trait.workspace = true
client.workspace = true
collections.workspace = true
dap.workspace = true
encodings.workspace = true
extension.workspace = true
fs.workspace = true
futures.workspace = true

View File

@@ -12,6 +12,7 @@ use async_tar::Archive;
use client::ExtensionProvides;
use client::{Client, ExtensionMetadata, GetExtensionsResponse, proto, telemetry::Telemetry};
use collections::{BTreeMap, BTreeSet, HashMap, HashSet, btree_map};
use encodings::Encoding;
pub use extension::ExtensionManifest;
use extension::extension_builder::{CompileExtensionOptions, ExtensionBuilder};
use extension::{
@@ -1506,6 +1507,7 @@ impl ExtensionStore {
&index_path,
&Rope::from_str(&index_json, &executor),
Default::default(),
Encoding::default(),
)
.await
.context("failed to save extension index")
@@ -1678,6 +1680,7 @@ impl ExtensionStore {
&tmp_dir.join(EXTENSION_TOML),
&Rope::from_str_small(&manifest_toml),
language::LineEnding::Unix,
Encoding::default(),
)
.await?;
} else {

View File

@@ -16,6 +16,7 @@ anyhow.workspace = true
async-tar.workspace = true
async-trait.workspace = true
collections.workspace = true
encodings.workspace = true
futures.workspace = true
git.workspace = true
gpui.workspace = true
@@ -33,6 +34,8 @@ tempfile.workspace = true
text.workspace = true
time.workspace = true
util.workspace = true
encoding = "0.2.33"
[target.'cfg(target_os = "macos")'.dependencies]
fsevent.workspace = true

View File

@@ -58,6 +58,7 @@ use smol::io::AsyncReadExt;
#[cfg(any(test, feature = "test-support"))]
use std::ffi::OsStr;
use encodings::{Encoding, EncodingOptions};
#[cfg(any(test, feature = "test-support"))]
pub use fake_git_repo::{LOAD_HEAD_TEXT_TASK, LOAD_INDEX_TEXT_TASK};
@@ -115,9 +116,25 @@ pub trait Fs: Send + Sync {
async fn load(&self, path: &Path) -> Result<String> {
Ok(String::from_utf8(self.load_bytes(path).await?)?)
}
async fn load_with_encoding(
&self,
path: &Path,
options: &EncodingOptions,
) -> Result<(Encoding, String)> {
let bytes = self.load_bytes(path).await?;
options.process(bytes)
}
async fn load_bytes(&self, path: &Path) -> Result<Vec<u8>>;
async fn atomic_write(&self, path: PathBuf, text: String) -> Result<()>;
async fn save(&self, path: &Path, text: &Rope, line_ending: LineEnding) -> Result<()>;
async fn save(
&self,
path: &Path,
text: &Rope,
line_ending: LineEnding,
encoding: Encoding,
) -> Result<()>;
async fn write(&self, path: &Path, content: &[u8]) -> Result<()>;
async fn canonicalize(&self, path: &Path) -> Result<PathBuf>;
async fn is_file(&self, path: &Path) -> bool;
@@ -599,9 +616,8 @@ impl Fs for RealFs {
async fn load(&self, path: &Path) -> Result<String> {
let path = path.to_path_buf();
self.executor
.spawn(async move { Ok(std::fs::read_to_string(path)?) })
.await
let text = smol::unblock(|| std::fs::read_to_string(path)).await?;
Ok(text)
}
async fn load_bytes(&self, path: &Path) -> Result<Vec<u8>> {
@@ -659,16 +675,28 @@ impl Fs for RealFs {
Ok(())
}
async fn save(&self, path: &Path, text: &Rope, line_ending: LineEnding) -> Result<()> {
async fn save(
&self,
path: &Path,
text: &Rope,
line_ending: LineEnding,
encoding: Encoding,
) -> Result<()> {
let buffer_size = text.summary().len.min(10 * 1024);
if let Some(path) = path.parent() {
self.create_dir(path).await?;
}
let file = smol::fs::File::create(path).await?;
let mut writer = smol::io::BufWriter::with_capacity(buffer_size, file);
for chunk in chunks(text, line_ending) {
writer.write_all(chunk.as_bytes()).await?;
if let Some(bom) = encoding.bom() {
writer.write_all(bom).await?;
}
for chunk in chunks(text, line_ending) {
writer.write_all(&encoding.encode_chunk(chunk)?).await?
}
writer.flush().await?;
Ok(())
}
@@ -2380,14 +2408,25 @@ impl Fs for FakeFs {
Ok(())
}
async fn save(&self, path: &Path, text: &Rope, line_ending: LineEnding) -> Result<()> {
async fn save(
&self,
path: &Path,
text: &Rope,
line_ending: LineEnding,
encoding: Encoding,
) -> Result<()> {
self.simulate_random_delay().await;
let path = normalize_path(path);
let content = chunks(text, line_ending).collect::<String>();
if let Some(path) = path.parent() {
self.create_dir(path).await?;
}
self.write_file_internal(path, content.into_bytes(), false)?;
let mut bytes = Vec::new();
if let Some(bom) = encoding.bom() {
bytes.extend_from_slice(bom);
}
bytes.extend_from_slice(&encoding.encode_chunk(&content)?);
self.write_file_internal(path, bytes, false)?;
Ok(())
}

View File

@@ -29,6 +29,7 @@ command_palette_hooks.workspace = true
component.workspace = true
db.workspace = true
editor.workspace = true
encodings.workspace = true
futures.workspace = true
fuzzy.workspace = true
git.workspace = true

View File

@@ -358,6 +358,7 @@ impl Render for FileDiffView {
mod tests {
use super::*;
use editor::test::editor_test_context::assert_state_with_diff;
use encodings::Encoding;
use gpui::TestAppContext;
use language::Rope;
use project::{FakeFs, Fs, Project};
@@ -440,6 +441,7 @@ mod tests {
",
)),
Default::default(),
Encoding::default(),
)
.await
.unwrap();
@@ -474,6 +476,7 @@ mod tests {
",
)),
Default::default(),
Encoding::default(),
)
.await
.unwrap();

View File

@@ -30,7 +30,9 @@ anyhow.workspace = true
async-trait.workspace = true
clock.workspace = true
collections.workspace = true
diffy = "0.4.2"
ec4rs.workspace = true
encodings.workspace = true
fs.workspace = true
futures.workspace = true
fuzzy.workspace = true
@@ -67,7 +69,7 @@ unicase = "2.6"
util.workspace = true
watch.workspace = true
zlog.workspace = true
diffy = "0.4.2"
encoding = "0.2.33"
[dev-dependencies]
collections = { workspace = true, features = ["test-support"] }

View File

@@ -21,6 +21,7 @@ use anyhow::{Context as _, Result};
use clock::Lamport;
pub use clock::ReplicaId;
use collections::HashMap;
use encodings::{Encoding, EncodingOptions};
use fs::MTime;
use futures::channel::oneshot;
use gpui::{
@@ -126,6 +127,7 @@ pub struct Buffer {
has_unsaved_edits: Cell<(clock::Global, bool)>,
change_bits: Vec<rc::Weak<Cell<bool>>>,
_subscriptions: Vec<gpui::Subscription>,
encoding: Encoding,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
@@ -330,6 +332,8 @@ pub enum BufferEvent {
DiagnosticsUpdated,
/// The buffer gained or lost editing capabilities.
CapabilityChanged,
/// The buffer's encoding was changed.
EncodingChanged,
}
/// The file associated with a buffer.
@@ -371,6 +375,10 @@ pub trait File: Send + Sync + Any {
/// Return whether Zed considers this to be a private file.
fn is_private(&self) -> bool;
fn encoding(&self) -> Option<Arc<Encoding>> {
unimplemented!()
}
}
/// The file's storage status - whether it's stored (`Present`), and if so when it was last
@@ -412,7 +420,7 @@ pub trait LocalFile: File {
fn abs_path(&self, cx: &App) -> PathBuf;
/// Loads the file contents from disk and returns them as a UTF-8 encoded string.
fn load(&self, cx: &App) -> Task<Result<String>>;
fn load(&self, cx: &App, options: EncodingOptions) -> Task<Result<(Encoding, String)>>;
/// Loads the file's contents from disk.
fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>>;
@@ -839,6 +847,18 @@ impl Buffer {
)
}
/// Replace the text buffer. This function is in contrast to `set_text` in that it does not
/// change the buffer's editing state
pub fn replace_text_buffer(&mut self, new: TextBuffer, cx: &mut Context<Self>) {
self.text = new;
self.saved_version = self.version.clone();
self.has_unsaved_edits.set((self.version.clone(), false));
self.was_changed();
cx.emit(BufferEvent::DirtyChanged);
cx.notify();
}
/// Create a new buffer with the given base text that has proper line endings and other normalization applied.
pub fn local_normalized(
base_text_normalized: Rope,
@@ -1006,6 +1026,7 @@ impl Buffer {
has_conflict: false,
change_bits: Default::default(),
_subscriptions: Vec::new(),
encoding: Encoding::default(),
}
}
@@ -1341,18 +1362,27 @@ impl Buffer {
/// Reloads the contents of the buffer from disk.
pub fn reload(&mut self, cx: &Context<Self>) -> oneshot::Receiver<Option<Transaction>> {
let (tx, rx) = futures::channel::oneshot::channel();
let prev_version = self.text.version();
self.reload_task = Some(cx.spawn(async move |this, cx| {
let Some((new_mtime, new_text)) = this.update(cx, |this, cx| {
let file = this.file.as_ref()?.as_local()?;
Some((file.disk_state().mtime(), file.load(cx)))
Some((file.disk_state().mtime(), {
file.load(
cx,
EncodingOptions {
expected: this.encoding,
auto_detect: false,
},
)
}))
})?
else {
return Ok(());
};
let new_text = new_text.await?;
let (new_encoding, new_text) = new_text.await?;
let diff = this
.update(cx, |this, cx| this.diff(new_text.clone(), cx))?
.await;
@@ -1362,6 +1392,9 @@ impl Buffer {
this.apply_diff(diff, cx);
tx.send(this.finalize_last_transaction().cloned()).ok();
this.has_conflict = false;
if new_encoding != this.encoding {
this.set_encoding(new_encoding, cx);
}
this.did_reload(this.version(), this.line_ending(), new_mtime, cx);
} else {
if !diff.edits.is_empty()
@@ -1399,6 +1432,9 @@ impl Buffer {
cx.notify();
}
pub fn replace_file(&mut self, new_file: Arc<dyn File>) {
self.file = Some(new_file);
}
/// Updates the [`File`] backing this buffer. This should be called when
/// the file has changed or has been deleted.
pub fn file_updated(&mut self, new_file: Arc<dyn File>, cx: &mut Context<Self>) {
@@ -2899,6 +2935,16 @@ impl Buffer {
pub fn preserve_preview(&self) -> bool {
!self.has_edits_since(&self.preview_version)
}
pub fn encoding(&self) -> Encoding {
self.encoding
}
/// Update the buffer
pub fn set_encoding(&mut self, encoding: Encoding, cx: &mut Context<Self>) {
self.encoding = encoding;
cx.emit(BufferEvent::EncodingChanged);
}
}
#[doc(hidden)]
@@ -5220,7 +5266,7 @@ impl LocalFile for TestFile {
.join(self.path.as_std_path())
}
fn load(&self, _cx: &App) -> Task<Result<String>> {
fn load(&self, _cx: &App, _options: EncodingOptions) -> Task<Result<(Encoding, String)>> {
unimplemented!()
}

View File

@@ -56,7 +56,9 @@ impl ContextProvider for JsonTaskProvider {
cx.spawn(async move |cx| {
let contents = file
.worktree
.update(cx, |this, cx| this.load_file(&file.path, cx))
.update(cx, |this, cx| {
this.load_file(&file.path, &Default::default(), cx)
})
.ok()?
.await
.ok()?;

View File

@@ -1730,7 +1730,9 @@ impl MultiBuffer {
self.capability = buffer.read(cx).capability();
return;
}
BufferEvent::Operation { .. } | BufferEvent::ReloadNeeded => return,
BufferEvent::Operation { .. }
| BufferEvent::ReloadNeeded
| BufferEvent::EncodingChanged => return,
});
}

View File

@@ -39,6 +39,7 @@ clock.workspace = true
collections.workspace = true
context_server.workspace = true
dap.workspace = true
encodings.workspace = true
extension.workspace = true
fancy-regex.workspace = true
fs.workspace = true
@@ -90,6 +91,7 @@ worktree.workspace = true
zeroize.workspace = true
zlog.workspace = true
[dev-dependencies]
client = { workspace = true, features = ["test-support"] }
collections = { workspace = true, features = ["test-support"] }

View File

@@ -7,8 +7,10 @@ use crate::{
use anyhow::{Context as _, Result, anyhow};
use client::Client;
use collections::{HashMap, HashSet, hash_map};
use encodings::EncodingOptions;
use fs::Fs;
use futures::{Future, FutureExt as _, StreamExt, channel::oneshot, future::Shared};
use futures::StreamExt;
use futures::{Future, FutureExt as _, channel::oneshot, future::Shared};
use gpui::{
App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity,
};
@@ -26,7 +28,7 @@ use rpc::{
use smol::channel::Receiver;
use std::{io, pin::pin, sync::Arc, time::Instant};
use text::{BufferId, ReplicaId};
use util::{ResultExt as _, TryFutureExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
use util::{ResultExt as _, TryFutureExt, debug_panic, maybe, rel_path::RelPath};
use worktree::{File, PathChange, ProjectEntryId, Worktree, WorktreeId};
/// A set of open buffers.
@@ -387,6 +389,8 @@ impl LocalBufferStore {
let version = buffer.version();
let buffer_id = buffer.remote_id();
let file = buffer.file().cloned();
let encoding = buffer.encoding().clone();
if file
.as_ref()
.is_some_and(|file| file.disk_state() == DiskState::New)
@@ -395,7 +399,7 @@ impl LocalBufferStore {
}
let save = worktree.update(cx, |worktree, cx| {
worktree.write_file(path, text, line_ending, cx)
worktree.write_file(path.clone(), text, line_ending, encoding, cx)
});
cx.spawn(async move |this, cx| {
@@ -623,27 +627,43 @@ impl LocalBufferStore {
&self,
path: Arc<RelPath>,
worktree: Entity<Worktree>,
options: &EncodingOptions,
cx: &mut Context<BufferStore>,
) -> Task<Result<Entity<Buffer>>> {
let load_file = worktree.update(cx, |worktree, cx| worktree.load_file(path.as_ref(), cx));
let options = options.clone();
let load_buffer = worktree.update(cx, |worktree, cx| {
let reservation = cx.reserve_entity();
let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64());
let load_file_task = worktree.load_file(path.as_ref(), &options, cx);
cx.spawn(async move |_, cx| {
let loaded_file = load_file_task.await?;
let background_executor = cx.background_executor().clone();
let buffer = cx.insert_entity(reservation, |cx| {
let mut buffer = Buffer::build(
text::Buffer::new(
ReplicaId::LOCAL,
buffer_id,
loaded_file.text,
&background_executor,
),
Some(loaded_file.file),
Capability::ReadWrite,
);
buffer.set_encoding(loaded_file.encoding, cx);
buffer
})?;
Ok(buffer)
})
});
cx.spawn(async move |this, cx| {
let path = path.clone();
let buffer = match load_file.await.with_context(|| {
format!("Could not open path: {}", path.display(PathStyle::local()))
}) {
Ok(loaded) => {
let reservation = cx.reserve_entity::<Buffer>()?;
let buffer_id = BufferId::from(reservation.entity_id().as_non_zero_u64());
let executor = cx.background_executor().clone();
let text_buffer = cx
.background_spawn(async move {
text::Buffer::new(ReplicaId::LOCAL, buffer_id, loaded.text, &executor)
})
.await;
cx.insert_entity(reservation, |_| {
Buffer::build(text_buffer, Some(loaded.file), Capability::ReadWrite)
})?
}
let buffer = match load_buffer.await {
Ok(buffer) => buffer,
Err(error) if is_not_found_error(&error) => cx.new(|cx| {
let buffer_id = BufferId::from(cx.entity_id().as_non_zero_u64());
let text_buffer = text::Buffer::new(
@@ -818,6 +838,7 @@ impl BufferStore {
pub fn open_buffer(
&mut self,
project_path: ProjectPath,
options: &EncodingOptions,
cx: &mut Context<Self>,
) -> Task<Result<Entity<Buffer>>> {
if let Some(buffer) = self.get_by_path(&project_path) {
@@ -841,7 +862,7 @@ impl BufferStore {
return Task::ready(Err(anyhow!("no such worktree")));
};
let load_buffer = match &self.state {
BufferStoreState::Local(this) => this.open_buffer(path, worktree, cx),
BufferStoreState::Local(this) => this.open_buffer(path, worktree, options, cx),
BufferStoreState::Remote(this) => this.open_buffer(path, worktree, cx),
};
@@ -1154,7 +1175,7 @@ impl BufferStore {
let buffers = this.update(cx, |this, cx| {
project_paths
.into_iter()
.map(|project_path| this.open_buffer(project_path, cx))
.map(|project_path| this.open_buffer(project_path, &Default::default(), cx))
.collect::<Vec<_>>()
})?;
for buffer_task in buffers {

View File

@@ -796,7 +796,7 @@ impl BreakpointStore {
worktree_id: worktree.read(cx).id(),
path: relative_path,
};
this.open_buffer(path, cx)
this.open_buffer(path, &Default::default(), cx)
})?
.await;
let Ok(buffer) = buffer else {

View File

@@ -1,118 +0,0 @@
use std::{path::Path, sync::Arc};
use gpui::{EventEmitter, FocusHandle, Focusable};
use ui::{
App, Button, ButtonCommon, ButtonStyle, Clickable, Context, FluentBuilder, InteractiveElement,
KeyBinding, Label, LabelCommon, LabelSize, ParentElement, Render, SharedString, Styled as _,
Window, h_flex, v_flex,
};
use zed_actions::workspace::OpenWithSystem;
use crate::Item;
/// A view to display when a certain buffer fails to open.
#[derive(Debug)]
pub struct InvalidItemView {
/// Which path was attempted to open.
pub abs_path: Arc<Path>,
/// An error message, happened when opening the buffer.
pub error: SharedString,
is_local: bool,
focus_handle: FocusHandle,
}
impl InvalidItemView {
pub fn new(
abs_path: &Path,
is_local: bool,
e: &anyhow::Error,
_: &mut Window,
cx: &mut App,
) -> Self {
Self {
is_local,
abs_path: Arc::from(abs_path),
error: format!("{}", e.root_cause()).into(),
focus_handle: cx.focus_handle(),
}
}
}
impl Item for InvalidItemView {
type Event = ();
fn tab_content_text(&self, mut detail: usize, _: &App) -> SharedString {
// Ensure we always render at least the filename.
detail += 1;
let path = self.abs_path.as_ref();
let mut prefix = path;
while detail > 0 {
if let Some(parent) = prefix.parent() {
prefix = parent;
detail -= 1;
} else {
break;
}
}
let path = if detail > 0 {
path
} else {
path.strip_prefix(prefix).unwrap_or(path)
};
SharedString::new(path.to_string_lossy())
}
}
impl EventEmitter<()> for InvalidItemView {}
impl Focusable for InvalidItemView {
fn focus_handle(&self, _: &App) -> FocusHandle {
self.focus_handle.clone()
}
}
impl Render for InvalidItemView {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl gpui::IntoElement {
let abs_path = self.abs_path.clone();
v_flex()
.size_full()
.track_focus(&self.focus_handle(cx))
.flex_none()
.justify_center()
.overflow_hidden()
.key_context("InvalidBuffer")
.child(
h_flex().size_full().justify_center().child(
v_flex()
.justify_center()
.gap_2()
.child(h_flex().justify_center().child("Could not open file"))
.child(
h_flex()
.justify_center()
.child(Label::new(self.error.clone()).size(LabelSize::Small)),
)
.when(self.is_local, |contents| {
contents.child(
h_flex().justify_center().child(
Button::new("open-with-system", "Open in Default App")
.on_click(move |_, _, cx| {
cx.open_with_system(&abs_path);
})
.style(ButtonStyle::Outlined)
.key_binding(KeyBinding::for_action(
&OpenWithSystem,
window,
cx,
)),
),
)
}),
),
)
}
}

View File

@@ -8336,7 +8336,7 @@ impl LspStore {
lsp_store
.update(cx, |lsp_store, cx| {
lsp_store.buffer_store().update(cx, |buffer_store, cx| {
buffer_store.open_buffer(project_path, cx)
buffer_store.open_buffer(project_path, &Default::default(), cx)
})
})?
.await

View File

@@ -91,7 +91,7 @@ pub fn cancel_flycheck(
let buffer = buffer_path.map(|buffer_path| {
project.update(cx, |project, cx| {
project.buffer_store().update(cx, |buffer_store, cx| {
buffer_store.open_buffer(buffer_path, cx)
buffer_store.open_buffer(buffer_path, &Default::default(), cx)
})
})
});
@@ -140,7 +140,7 @@ pub fn run_flycheck(
let buffer = buffer_path.map(|buffer_path| {
project.update(cx, |project, cx| {
project.buffer_store().update(cx, |buffer_store, cx| {
buffer_store.open_buffer(buffer_path, cx)
buffer_store.open_buffer(buffer_path, &Default::default(), cx)
})
})
});
@@ -198,7 +198,7 @@ pub fn clear_flycheck(
let buffer = buffer_path.map(|buffer_path| {
project.update(cx, |project, cx| {
project.buffer_store().update(cx, |buffer_store, cx| {
buffer_store.open_buffer(buffer_path, cx)
buffer_store.open_buffer(buffer_path, &Default::default(), cx)
})
})
});

View File

@@ -7,6 +7,7 @@ use std::{
use anyhow::{Context as _, Result, anyhow};
use collections::{HashMap, HashSet};
use encodings::Encoding;
use fs::Fs;
use futures::{
FutureExt,
@@ -981,10 +982,12 @@ async fn save_prettier_server_file(
executor: &BackgroundExecutor,
) -> anyhow::Result<()> {
let prettier_wrapper_path = default_prettier_dir().join(prettier::PRETTIER_SERVER_FILE);
let encoding = Encoding::default();
fs.save(
&prettier_wrapper_path,
&text::Rope::from_str(prettier::PRETTIER_SERVER_JS, executor),
text::LineEnding::Unix,
encoding,
)
.await
.with_context(|| {

View File

@@ -26,9 +26,12 @@ mod project_tests;
mod environment;
use buffer_diff::BufferDiff;
use context_server_store::ContextServerStore;
use encodings::Encoding;
pub use environment::ProjectEnvironmentEvent;
use git::repository::get_git_committer;
use git_store::{Repository, RepositoryId};
pub mod search_history;
mod yarn;
@@ -1664,6 +1667,7 @@ impl Project {
toolchain_store: None,
agent_location: None,
};
project.set_role(role, cx);
for worktree in worktrees {
project.add_worktree(&worktree, cx);
@@ -2712,7 +2716,7 @@ impl Project {
}
self.buffer_store.update(cx, |buffer_store, cx| {
buffer_store.open_buffer(path.into(), cx)
buffer_store.open_buffer(path.into(), &Default::default(), cx)
})
}
@@ -5394,7 +5398,9 @@ impl Project {
};
cx.spawn(async move |cx| {
let file = worktree
.update(cx, |worktree, cx| worktree.load_file(&rel_path, cx))?
.update(cx, |worktree, cx| {
worktree.load_file(&rel_path, &Default::default(), cx)
})?
.await
.context("Failed to load settings file")?;
@@ -5408,6 +5414,7 @@ impl Project {
rel_path.clone(),
Rope::from_str(&new_text, cx.background_executor()),
line_ending,
Encoding::default(),
cx,
)
})?

View File

@@ -0,0 +1,754 @@
use std::{
io::{BufRead, BufReader},
path::Path,
pin::pin,
sync::{
Arc,
atomic::{AtomicUsize, Ordering},
},
};
use anyhow::Context;
use collections::HashSet;
use fs::Fs;
use futures::{SinkExt, StreamExt, select_biased, stream::FuturesOrdered};
use gpui::{App, AppContext, AsyncApp, Entity, Task};
use language::{Buffer, BufferSnapshot};
use parking_lot::Mutex;
use postage::oneshot;
use rpc::{AnyProtoClient, proto};
use smol::{
channel::{Receiver, Sender, bounded, unbounded},
future::FutureExt,
};
use text::BufferId;
use util::{ResultExt, maybe, paths::compare_rel_paths};
use worktree::{Entry, ProjectEntryId, Snapshot, Worktree};
use crate::{
Project, ProjectItem, ProjectPath, RemotelyCreatedModels,
buffer_store::BufferStore,
search::{SearchQuery, SearchResult},
worktree_store::WorktreeStore,
};
pub struct Search {
buffer_store: Entity<BufferStore>,
worktree_store: Entity<WorktreeStore>,
limit: usize,
kind: SearchKind,
}
/// Represents search setup, before it is actually kicked off with Search::into_results
enum SearchKind {
/// Search for candidates by inspecting file contents on file system, avoiding loading the buffer unless we know that a given file contains a match.
Local {
fs: Arc<dyn Fs>,
worktrees: Vec<Entity<Worktree>>,
},
/// Query remote host for candidates. As of writing, the host runs a local search in "buffers with matches only" mode.
Remote {
client: AnyProtoClient,
remote_id: u64,
models: Arc<Mutex<RemotelyCreatedModels>>,
},
/// Run search against a known set of candidates. Even when working with a remote host, this won't round-trip to host.
OpenBuffersOnly,
}
/// Represents results of project search and allows one to either obtain match positions OR
/// just the handles to buffers that may match the search. Grabbing the handles is cheaper than obtaining full match positions, because in that case we'll look for
/// at most one match in each file.
#[must_use]
pub struct SearchResultsHandle {
results: Receiver<SearchResult>,
matching_buffers: Receiver<Entity<Buffer>>,
trigger_search: Box<dyn FnOnce(&mut App) -> Task<()> + Send + Sync>,
}
impl SearchResultsHandle {
pub fn results(self, cx: &mut App) -> Receiver<SearchResult> {
(self.trigger_search)(cx).detach();
self.results
}
pub fn matching_buffers(self, cx: &mut App) -> Receiver<Entity<Buffer>> {
(self.trigger_search)(cx).detach();
self.matching_buffers
}
}
#[derive(Clone)]
enum FindSearchCandidates {
Local {
fs: Arc<dyn Fs>,
/// Start off with all paths in project and filter them based on:
/// - Include filters
/// - Exclude filters
/// - Only open buffers
/// - Scan ignored files
/// Put another way: filter out files that can't match (without looking at file contents)
input_paths_rx: Receiver<InputPath>,
/// After that, if the buffer is not yet loaded, we'll figure out if it contains at least one match
/// based on disk contents of a buffer. This step is not performed for buffers we already have in memory.
confirm_contents_will_match_tx: Sender<MatchingEntry>,
confirm_contents_will_match_rx: Receiver<MatchingEntry>,
/// Of those that contain at least one match (or are already in memory), look for rest of matches (and figure out their ranges).
/// But wait - first, we need to go back to the main thread to open a buffer (& create an entity for it).
get_buffer_for_full_scan_tx: Sender<ProjectPath>,
},
Remote,
OpenBuffersOnly,
}
impl Search {
pub fn local(
fs: Arc<dyn Fs>,
buffer_store: Entity<BufferStore>,
worktree_store: Entity<WorktreeStore>,
limit: usize,
cx: &mut App,
) -> Self {
let worktrees = worktree_store.read(cx).visible_worktrees(cx).collect();
Self {
kind: SearchKind::Local { fs, worktrees },
buffer_store,
worktree_store,
limit,
}
}
pub(crate) fn remote(
buffer_store: Entity<BufferStore>,
worktree_store: Entity<WorktreeStore>,
limit: usize,
client_state: (AnyProtoClient, u64, Arc<Mutex<RemotelyCreatedModels>>),
) -> Self {
Self {
kind: SearchKind::Remote {
client: client_state.0,
remote_id: client_state.1,
models: client_state.2,
},
buffer_store,
worktree_store,
limit,
}
}
pub(crate) fn open_buffers_only(
buffer_store: Entity<BufferStore>,
worktree_store: Entity<WorktreeStore>,
limit: usize,
) -> Self {
Self {
kind: SearchKind::OpenBuffersOnly,
buffer_store,
worktree_store,
limit,
}
}
pub(crate) const MAX_SEARCH_RESULT_FILES: usize = 5_000;
pub(crate) const MAX_SEARCH_RESULT_RANGES: usize = 10_000;
/// Prepares a project search run. The resulting [`SearchResultsHandle`] has to be used to specify whether you're interested in matching buffers
/// or full search results.
pub fn into_handle(mut self, query: SearchQuery, cx: &mut App) -> SearchResultsHandle {
let mut open_buffers = HashSet::default();
let mut unnamed_buffers = Vec::new();
const MAX_CONCURRENT_BUFFER_OPENS: usize = 64;
let buffers = self.buffer_store.read(cx);
for handle in buffers.buffers() {
let buffer = handle.read(cx);
if !buffers.is_searchable(&buffer.remote_id()) {
continue;
} else if let Some(entry_id) = buffer.entry_id(cx) {
open_buffers.insert(entry_id);
} else {
self.limit -= self.limit.saturating_sub(1);
unnamed_buffers.push(handle)
};
}
let executor = cx.background_executor().clone();
let (tx, rx) = unbounded();
let (grab_buffer_snapshot_tx, grab_buffer_snapshot_rx) = unbounded();
let matching_buffers = grab_buffer_snapshot_rx.clone();
let trigger_search = Box::new(move |cx: &mut App| {
cx.spawn(async move |cx| {
for buffer in unnamed_buffers {
_ = grab_buffer_snapshot_tx.send(buffer).await;
}
let (find_all_matches_tx, find_all_matches_rx) =
bounded(MAX_CONCURRENT_BUFFER_OPENS);
let (candidate_searcher, tasks) = match self.kind {
SearchKind::OpenBuffersOnly => {
let Ok(open_buffers) = cx.update(|cx| self.all_loaded_buffers(&query, cx))
else {
return;
};
let fill_requests = cx
.background_spawn(async move {
for buffer in open_buffers {
if let Err(_) = grab_buffer_snapshot_tx.send(buffer).await {
return;
}
}
})
.boxed_local();
(FindSearchCandidates::OpenBuffersOnly, vec![fill_requests])
}
SearchKind::Local {
fs,
ref mut worktrees,
} => {
let (get_buffer_for_full_scan_tx, get_buffer_for_full_scan_rx) =
unbounded();
let (confirm_contents_will_match_tx, confirm_contents_will_match_rx) =
bounded(64);
let (sorted_search_results_tx, sorted_search_results_rx) = unbounded();
let (input_paths_tx, input_paths_rx) = unbounded();
let tasks = vec![
cx.spawn(Self::provide_search_paths(
std::mem::take(worktrees),
query.include_ignored(),
input_paths_tx,
sorted_search_results_tx,
))
.boxed_local(),
Self::open_buffers(
&self.buffer_store,
get_buffer_for_full_scan_rx,
grab_buffer_snapshot_tx,
cx.clone(),
)
.boxed_local(),
cx.background_spawn(Self::maintain_sorted_search_results(
sorted_search_results_rx,
get_buffer_for_full_scan_tx.clone(),
self.limit,
))
.boxed_local(),
];
(
FindSearchCandidates::Local {
fs,
get_buffer_for_full_scan_tx,
confirm_contents_will_match_tx,
confirm_contents_will_match_rx,
input_paths_rx,
},
tasks,
)
}
SearchKind::Remote {
client,
remote_id,
models,
} => {
let request = client.request(proto::FindSearchCandidates {
project_id: remote_id,
query: Some(query.to_proto()),
limit: self.limit as _,
});
let Ok(guard) = cx.update(|cx| {
Project::retain_remotely_created_models_impl(
&models,
&self.buffer_store,
&self.worktree_store,
cx,
)
}) else {
return;
};
let buffer_store = self.buffer_store.downgrade();
let issue_remote_buffers_request = cx
.spawn(async move |cx| {
let _ = maybe!(async move {
let response = request.await?;
for buffer_id in response.buffer_ids {
let buffer_id = BufferId::new(buffer_id)?;
let buffer = buffer_store
.update(cx, |buffer_store, cx| {
buffer_store.wait_for_remote_buffer(buffer_id, cx)
})?
.await?;
let _ = grab_buffer_snapshot_tx.send(buffer).await;
}
drop(guard);
anyhow::Ok(())
})
.await
.log_err();
})
.boxed_local();
(
FindSearchCandidates::Remote,
vec![issue_remote_buffers_request],
)
}
};
let matches_count = AtomicUsize::new(0);
let matched_buffer_count = AtomicUsize::new(0);
let worker_pool = executor.scoped(|scope| {
let num_cpus = executor.num_cpus();
assert!(num_cpus > 0);
for _ in 0..executor.num_cpus() - 1 {
let worker = Worker {
query: &query,
open_buffers: &open_buffers,
matched_buffer_count: &matched_buffer_count,
matches_count: &matches_count,
candidates: candidate_searcher.clone(),
find_all_matches_rx: find_all_matches_rx.clone(),
publish_matches: tx.clone(),
};
scope.spawn(worker.run());
}
drop(tx);
drop(find_all_matches_rx);
drop(candidate_searcher);
});
let buffer_snapshots = Self::grab_buffer_snapshots(
grab_buffer_snapshot_rx,
find_all_matches_tx,
cx.clone(),
);
futures::future::join_all(
[worker_pool.boxed_local(), buffer_snapshots.boxed_local()]
.into_iter()
.chain(tasks),
)
.await;
})
});
SearchResultsHandle {
results: rx,
matching_buffers,
trigger_search,
}
}
fn provide_search_paths(
worktrees: Vec<Entity<Worktree>>,
include_ignored: bool,
tx: Sender<InputPath>,
results: Sender<oneshot::Receiver<ProjectPath>>,
) -> impl AsyncFnOnce(&mut AsyncApp) {
async move |cx| {
_ = maybe!(async move {
for worktree in worktrees {
let (mut snapshot, worktree_settings) = worktree
.read_with(cx, |this, _| {
Some((this.snapshot(), this.as_local()?.settings()))
})?
.context("The worktree is not local")?;
if include_ignored {
// Pre-fetch all of the ignored directories as they're going to be searched.
let mut entries_to_refresh = vec![];
for entry in snapshot.entries(include_ignored, 0) {
if entry.is_ignored && entry.kind.is_unloaded() {
if !worktree_settings.is_path_excluded(&entry.path) {
entries_to_refresh.push(entry.path.clone());
}
}
}
let barrier = worktree.update(cx, |this, _| {
let local = this.as_local_mut()?;
let barrier = entries_to_refresh
.into_iter()
.map(|path| local.add_path_prefix_to_scan(path).into_future())
.collect::<Vec<_>>();
Some(barrier)
})?;
if let Some(barriers) = barrier {
futures::future::join_all(barriers).await;
}
snapshot = worktree.read_with(cx, |this, _| this.snapshot())?;
}
cx.background_executor()
.scoped(|scope| {
scope.spawn(async {
for entry in snapshot.files(include_ignored, 0) {
let (should_scan_tx, should_scan_rx) = oneshot::channel();
let Ok(_) = tx
.send(InputPath {
entry: entry.clone(),
snapshot: snapshot.clone(),
should_scan_tx,
})
.await
else {
return;
};
if results.send(should_scan_rx).await.is_err() {
return;
};
}
})
})
.await;
}
anyhow::Ok(())
})
.await;
}
}
async fn maintain_sorted_search_results(
rx: Receiver<oneshot::Receiver<ProjectPath>>,
paths_for_full_scan: Sender<ProjectPath>,
limit: usize,
) {
let mut rx = pin!(rx);
let mut matched = 0;
while let Some(mut next_path_result) = rx.next().await {
let Some(successful_path) = next_path_result.next().await else {
// This math did not produce a match, hence skip it.
continue;
};
if paths_for_full_scan.send(successful_path).await.is_err() {
return;
};
matched += 1;
if matched >= limit {
break;
}
}
}
/// Background workers cannot open buffers by themselves, hence main thread will do it on their behalf.
async fn open_buffers(
buffer_store: &Entity<BufferStore>,
rx: Receiver<ProjectPath>,
find_all_matches_tx: Sender<Entity<Buffer>>,
mut cx: AsyncApp,
) {
let mut rx = pin!(rx.ready_chunks(64));
_ = maybe!(async move {
while let Some(requested_paths) = rx.next().await {
let mut buffers = buffer_store.update(&mut cx, |this, cx| {
requested_paths
.into_iter()
.map(|path| this.open_buffer(path, None, false, true, cx))
.collect::<FuturesOrdered<_>>()
})?;
while let Some(buffer) = buffers.next().await {
if let Some(buffer) = buffer.log_err() {
find_all_matches_tx.send(buffer).await?;
}
}
}
Result::<_, anyhow::Error>::Ok(())
})
.await;
}
async fn grab_buffer_snapshots(
rx: Receiver<Entity<Buffer>>,
find_all_matches_tx: Sender<(Entity<Buffer>, BufferSnapshot)>,
mut cx: AsyncApp,
) {
_ = maybe!(async move {
while let Ok(buffer) = rx.recv().await {
let snapshot = buffer.read_with(&mut cx, |this, _| this.snapshot())?;
find_all_matches_tx.send((buffer, snapshot)).await?;
}
Result::<_, anyhow::Error>::Ok(())
})
.await;
}
fn all_loaded_buffers(&self, search_query: &SearchQuery, cx: &App) -> Vec<Entity<Buffer>> {
let worktree_store = self.worktree_store.read(cx);
let mut buffers = search_query
.buffers()
.into_iter()
.flatten()
.filter(|buffer| {
let b = buffer.read(cx);
if let Some(file) = b.file() {
if !search_query.match_path(file.path().as_std_path()) {
return false;
}
if !search_query.include_ignored()
&& let Some(entry) = b
.entry_id(cx)
.and_then(|entry_id| worktree_store.entry_for_id(entry_id, cx))
&& entry.is_ignored
{
return false;
}
}
true
})
.cloned()
.collect::<Vec<_>>();
buffers.sort_by(|a, b| {
let a = a.read(cx);
let b = b.read(cx);
match (a.file(), b.file()) {
(None, None) => a.remote_id().cmp(&b.remote_id()),
(None, Some(_)) => std::cmp::Ordering::Less,
(Some(_), None) => std::cmp::Ordering::Greater,
(Some(a), Some(b)) => compare_rel_paths((a.path(), true), (b.path(), true)),
}
});
buffers
}
}
struct Worker<'search> {
query: &'search SearchQuery,
matched_buffer_count: &'search AtomicUsize,
matches_count: &'search AtomicUsize,
open_buffers: &'search HashSet<ProjectEntryId>,
candidates: FindSearchCandidates,
/// Ok, we're back in background: run full scan & find all matches in a given buffer snapshot.
find_all_matches_rx: Receiver<(Entity<Buffer>, BufferSnapshot)>,
/// Cool, we have results; let's share them with the world.
publish_matches: Sender<SearchResult>,
}
impl Worker<'_> {
async fn run(mut self) {
let (
input_paths_rx,
confirm_contents_will_match_rx,
mut confirm_contents_will_match_tx,
mut get_buffer_for_full_scan_tx,
fs,
) = match self.candidates {
FindSearchCandidates::Local {
fs,
input_paths_rx,
confirm_contents_will_match_rx,
confirm_contents_will_match_tx,
get_buffer_for_full_scan_tx,
} => (
input_paths_rx,
confirm_contents_will_match_rx,
confirm_contents_will_match_tx,
get_buffer_for_full_scan_tx,
Some(fs),
),
FindSearchCandidates::Remote | FindSearchCandidates::OpenBuffersOnly => (
unbounded().1,
unbounded().1,
unbounded().0,
unbounded().0,
None,
),
};
let mut find_all_matches = pin!(self.find_all_matches_rx.fuse());
let mut find_first_match = pin!(confirm_contents_will_match_rx.fuse());
let mut scan_path = pin!(input_paths_rx.fuse());
loop {
let handler = RequestHandler {
query: self.query,
open_entries: &self.open_buffers,
fs: fs.as_deref(),
matched_buffer_count: self.matched_buffer_count,
matches_count: self.matches_count,
confirm_contents_will_match_tx: &confirm_contents_will_match_tx,
get_buffer_for_full_scan_tx: &get_buffer_for_full_scan_tx,
publish_matches: &self.publish_matches,
};
// Whenever we notice that some step of a pipeline is closed, we don't want to close subsequent
// steps straight away. Another worker might be about to produce a value that will
// be pushed there, thus we'll replace current worker's pipe with a dummy one.
// That way, we'll only ever close a next-stage channel when ALL workers do so.
select_biased! {
find_all_matches = find_all_matches.next() => {
if self.publish_matches.is_closed() {
break;
}
let Some(matches) = find_all_matches else {
self.publish_matches = bounded(1).0;
continue;
};
let result = handler.handle_find_all_matches(matches).await;
if let Some(_should_bail) = result {
self.publish_matches = bounded(1).0;
continue;
}
},
find_first_match = find_first_match.next() => {
if let Some(buffer_with_at_least_one_match) = find_first_match {
handler.handle_find_first_match(buffer_with_at_least_one_match).await;
} else {
get_buffer_for_full_scan_tx = bounded(1).0;
}
},
scan_path = scan_path.next() => {
if let Some(path_to_scan) = scan_path {
handler.handle_scan_path(path_to_scan).await;
} else {
// If we're the last worker to notice that this is not producing values, close the upstream.
confirm_contents_will_match_tx = bounded(1).0;
}
}
complete => {
break
},
}
}
}
}
struct RequestHandler<'worker> {
query: &'worker SearchQuery,
fs: Option<&'worker dyn Fs>,
open_entries: &'worker HashSet<ProjectEntryId>,
matched_buffer_count: &'worker AtomicUsize,
matches_count: &'worker AtomicUsize,
confirm_contents_will_match_tx: &'worker Sender<MatchingEntry>,
get_buffer_for_full_scan_tx: &'worker Sender<ProjectPath>,
publish_matches: &'worker Sender<SearchResult>,
}
struct LimitReached;
impl RequestHandler<'_> {
async fn handle_find_all_matches(
&self,
(buffer, snapshot): (Entity<Buffer>, BufferSnapshot),
) -> Option<LimitReached> {
let ranges = self
.query
.search(&snapshot, None)
.await
.iter()
.map(|range| snapshot.anchor_before(range.start)..snapshot.anchor_after(range.end))
.collect::<Vec<_>>();
let matched_ranges = ranges.len();
if self.matched_buffer_count.fetch_add(1, Ordering::Release)
> Search::MAX_SEARCH_RESULT_FILES
|| self
.matches_count
.fetch_add(matched_ranges, Ordering::Release)
> Search::MAX_SEARCH_RESULT_RANGES
{
_ = self.publish_matches.send(SearchResult::LimitReached).await;
Some(LimitReached)
} else {
_ = self
.publish_matches
.send(SearchResult::Buffer { buffer, ranges })
.await;
None
}
}
async fn handle_find_first_match(&self, mut entry: MatchingEntry) {
_=maybe!(async move {
let abs_path = entry.worktree_root.join(entry.path.path.as_std_path());
let Some(file) = self.fs.context("Trying to query filesystem in remote project search")?.open_sync(&abs_path).await.log_err() else {
return anyhow::Ok(());
};
let mut file = BufReader::new(file);
let file_start = file.fill_buf()?;
if let Err(Some(starting_position)) =
std::str::from_utf8(file_start).map_err(|e| e.error_len())
{
// Before attempting to match the file content, throw away files that have invalid UTF-8 sequences early on;
// That way we can still match files in a streaming fashion without having look at "obviously binary" files.
log::debug!(
"Invalid UTF-8 sequence in file {abs_path:?} at byte position {starting_position}"
);
return Ok(());
}
if self.query.detect(file).unwrap_or(false) {
// Yes, we should scan the whole file.
entry.should_scan_tx.send(entry.path).await?;
}
Ok(())
}).await;
}
async fn handle_scan_path(&self, req: InputPath) {
_ = maybe!(async move {
let InputPath {
entry,
snapshot,
should_scan_tx,
} = req;
if entry.is_fifo || !entry.is_file() {
return Ok(());
}
if self.query.filters_path() {
let matched_path = if self.query.match_full_paths() {
let mut full_path = snapshot.root_name().as_std_path().to_owned();
full_path.push(entry.path.as_std_path());
self.query.match_path(&full_path)
} else {
self.query.match_path(entry.path.as_std_path())
};
if !matched_path {
return Ok(());
}
}
if self.open_entries.contains(&entry.id) {
// The buffer is already in memory and that's the version we want to scan;
// hence skip the dilly-dally and look for all matches straight away.
self.get_buffer_for_full_scan_tx
.send(ProjectPath {
worktree_id: snapshot.id(),
path: entry.path.clone(),
})
.await?;
} else {
self.confirm_contents_will_match_tx
.send(MatchingEntry {
should_scan_tx: should_scan_tx,
worktree_root: snapshot.abs_path().clone(),
path: ProjectPath {
worktree_id: snapshot.id(),
path: entry.path.clone(),
},
})
.await?;
}
anyhow::Ok(())
})
.await;
}
}
struct InputPath {
entry: Entry,
snapshot: Snapshot,
should_scan_tx: oneshot::Sender<ProjectPath>,
}
struct MatchingEntry {
worktree_root: Arc<Path>,
path: ProjectPath,
should_scan_tx: oneshot::Sender<ProjectPath>,
}

View File

@@ -12,6 +12,7 @@ use buffer_diff::{
BufferDiffEvent, CALCULATE_DIFF_TASK, DiffHunkSecondaryStatus, DiffHunkStatus,
DiffHunkStatusKind, assert_hunks,
};
use encodings::Encoding;
use fs::FakeFs;
use futures::{StreamExt, future};
use git::{
@@ -1459,10 +1460,14 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
)
.await
.unwrap();
let encoding = Encoding::default();
fs.save(
path!("/the-root/Cargo.lock").as_ref(),
&Rope::default(),
Default::default(),
encoding.clone(),
)
.await
.unwrap();
@@ -1470,6 +1475,7 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
path!("/the-stdlib/LICENSE").as_ref(),
&Rope::default(),
Default::default(),
encoding.clone(),
)
.await
.unwrap();
@@ -1477,6 +1483,7 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
path!("/the/stdlib/src/string.rs").as_ref(),
&Rope::default(),
Default::default(),
encoding,
)
.await
.unwrap();
@@ -3871,7 +3878,7 @@ async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
assert_eq!(
worktree
.update(cx, |worktree, cx| {
worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), cx)
worktree.load_file(rel_path("dir1/dir2/dir3/test.txt"), &Default::default(), cx)
})
.await
.unwrap()
@@ -3918,7 +3925,7 @@ async fn test_rename_file_to_new_directory(cx: &mut gpui::TestAppContext) {
assert_eq!(
worktree
.update(cx, |worktree, cx| {
worktree.load_file(rel_path("dir1/dir2/test.txt"), cx)
worktree.load_file(rel_path("dir1/dir2/test.txt"), &Default::default(), cx)
})
.await
.unwrap()
@@ -4074,6 +4081,7 @@ async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext)
path!("/dir/file1").as_ref(),
&Rope::from_str("the first contents", cx.background_executor()),
Default::default(),
Default::default(),
)
.await
.unwrap();
@@ -4085,6 +4093,7 @@ async fn test_file_changes_multiple_times_on_disk(cx: &mut gpui::TestAppContext)
path!("/dir/file1").as_ref(),
&Rope::from_str("the second contents", cx.background_executor()),
Default::default(),
Default::default(),
)
.await
.unwrap();
@@ -4129,6 +4138,7 @@ async fn test_edit_buffer_while_it_reloads(cx: &mut gpui::TestAppContext) {
path!("/dir/file1").as_ref(),
&Rope::from_str("the first contents", cx.background_executor()),
Default::default(),
Default::default(),
)
.await
.unwrap();
@@ -4803,10 +4813,12 @@ async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
let (new_contents, new_offsets) =
marked_text_offsets("oneˇ\nthree ˇFOURˇ five\nsixtyˇ seven\n");
fs.save(
path!("/dir/the-file").as_ref(),
&Rope::from_str(new_contents.as_str(), cx.background_executor()),
LineEnding::Unix,
Default::default(),
)
.await
.unwrap();
@@ -4839,6 +4851,7 @@ async fn test_buffer_file_changes_on_disk(cx: &mut gpui::TestAppContext) {
path!("/dir/the-file").as_ref(),
&Rope::from_str("\n\n\nAAAA\naaa\nBB\nbbbbb\n", cx.background_executor()),
LineEnding::Unix,
Default::default(),
)
.await
.unwrap();
@@ -4891,6 +4904,7 @@ async fn test_buffer_line_endings(cx: &mut gpui::TestAppContext) {
path!("/dir/file1").as_ref(),
&Rope::from_str("aaa\nb\nc\n", cx.background_executor()),
LineEnding::Windows,
Default::default(),
)
.await
.unwrap();
@@ -8979,7 +8993,11 @@ async fn test_ignored_dirs_events(cx: &mut gpui::TestAppContext) {
let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
tree.flush_fs_events(cx).await;
tree.update(cx, |tree, cx| {
tree.load_file(rel_path("project/target/debug/important_text.txt"), cx)
tree.load_file(
rel_path("project/target/debug/important_text.txt"),
&Default::default(),
cx,
)
})
.await
.unwrap();
@@ -9140,7 +9158,7 @@ async fn test_odd_events_for_ignored_dirs(
let tree = project.read_with(cx, |project, cx| project.worktrees(cx).next().unwrap());
tree.update(cx, |tree, cx| {
tree.load_file(rel_path("target/debug/foo.txt"), cx)
tree.load_file(rel_path("target/debug/foo.txt"), &Default::default(), cx)
})
.await
.unwrap();

View File

@@ -28,6 +28,7 @@ clap.workspace = true
client.workspace = true
dap_adapters.workspace = true
debug_adapter_extension.workspace = true
encodings.workspace = true
env_logger.workspace = true
extension.workspace = true
extension_host.workspace = true

View File

@@ -506,7 +506,7 @@ impl HeadlessProject {
let (buffer_store, buffer) = this.update(&mut cx, |this, cx| {
let buffer_store = this.buffer_store.clone();
let buffer = this.buffer_store.update(cx, |buffer_store, cx| {
buffer_store.open_buffer(ProjectPath { worktree_id, path }, cx)
buffer_store.open_buffer(ProjectPath { worktree_id, path }, &Default::default(), cx)
});
anyhow::Ok((buffer_store, buffer))
})??;
@@ -597,6 +597,7 @@ impl HeadlessProject {
worktree_id: worktree.read(cx).id(),
path: path,
},
&Default::default(),
cx,
)
});

View File

@@ -6,16 +6,17 @@ use agent::{AgentTool, ReadFileTool, ReadFileToolInput, ToolCallEventStream};
use client::{Client, UserStore};
use clock::FakeSystemClock;
use collections::{HashMap, HashSet};
use language_model::LanguageModelToolResultContent;
use encodings::Encoding;
use extension::ExtensionHostProxy;
use fs::{FakeFs, Fs};
use gpui::{AppContext as _, Entity, SemanticVersion, SharedString, TestAppContext};
use gpui::{AppContext as _, Entity, SemanticVersion, TestAppContext};
use http_client::{BlockedHttpClient, FakeHttpClient};
use language::{
Buffer, FakeLspAdapter, LanguageConfig, LanguageMatcher, LanguageRegistry, LineEnding, Rope,
language_settings::{AllLanguageSettings, language_settings},
};
use language_model::LanguageModelToolResultContent;
use lsp::{CompletionContext, CompletionResponse, CompletionTriggerKind, LanguageServerName};
use node_runtime::NodeRuntime;
use project::{
@@ -34,6 +35,8 @@ use std::{
use unindent::Unindent as _;
use util::{path, rel_path::rel_path};
use gpui::SharedString;
#[gpui::test]
async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut TestAppContext) {
let fs = FakeFs::new(server_cx.executor());
@@ -122,6 +125,7 @@ async fn test_basic_remote_editing(cx: &mut TestAppContext, server_cx: &mut Test
path!("/code/project1/src/main.rs").as_ref(),
&Rope::from_str_small("fn main() {}"),
Default::default(),
Encoding::default(),
)
.await
.unwrap();
@@ -768,6 +772,7 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont
&PathBuf::from(path!("/code/project1/src/lib.rs")),
&Rope::from_str_small("bangles"),
LineEnding::Unix,
Encoding::default(),
)
.await
.unwrap();
@@ -783,6 +788,7 @@ async fn test_remote_reload(cx: &mut TestAppContext, server_cx: &mut TestAppCont
&PathBuf::from(path!("/code/project1/src/lib.rs")),
&Rope::from_str_small("bloop"),
LineEnding::Unix,
Encoding::default(),
)
.await
.unwrap();

View File

@@ -54,6 +54,7 @@ vim_mode_setting.workspace = true
workspace.workspace = true
zed_actions.workspace = true
[dev-dependencies]
assets.workspace = true
command_palette.workspace = true

View File

@@ -35,6 +35,7 @@ clock.workspace = true
collections.workspace = true
component.workspace = true
db.workspace = true
encodings.workspace = true
fs.workspace = true
futures.workspace = true
gpui.workspace = true

View File

@@ -1,6 +1,6 @@
use std::{path::Path, sync::Arc};
use gpui::{EventEmitter, FocusHandle, Focusable};
use gpui::{EventEmitter, FocusHandle, Focusable, div};
use ui::{
App, Button, ButtonCommon, ButtonStyle, Clickable, Context, FluentBuilder, InteractiveElement,
KeyBinding, Label, LabelCommon, LabelSize, ParentElement, Render, SharedString, Styled as _,
@@ -77,6 +77,8 @@ impl Focusable for InvalidItemView {
impl Render for InvalidItemView {
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl gpui::IntoElement {
let abs_path = self.abs_path.clone();
let path0 = self.abs_path.clone();
v_flex()
.size_full()
.track_focus(&self.focus_handle(cx))
@@ -91,21 +93,49 @@ impl Render for InvalidItemView {
.gap_2()
.child(h_flex().justify_center().child("Could not open file"))
.child(
h_flex()
.justify_center()
.child(Label::new(self.error.clone()).size(LabelSize::Small)),
h_flex().justify_center().child(
div()
.whitespace_normal()
.text_center()
.child(Label::new(self.error.clone()).size(LabelSize::Small)),
),
)
.when(self.is_local, |contents| {
contents.child(
h_flex().justify_center().child(
Button::new("open-with-system", "Open in Default App")
.on_click(move |_, _, cx| {
cx.open_with_system(&abs_path);
})
contents
.child(
h_flex().justify_center().child(
Button::new("open-with-system", "Open in Default App")
.on_click(move |_, _, cx| {
cx.open_with_system(&abs_path);
})
.style(ButtonStyle::Outlined)
.key_binding(KeyBinding::for_action(
&OpenWithSystem,
cx,
)),
),
)
.child(
h_flex().justify_center().child(
Button::new(
"open-with-encoding",
"Try a Different Encoding",
)
.style(ButtonStyle::Outlined)
.key_binding(KeyBinding::for_action(&OpenWithSystem, cx)),
),
)
.on_click(
move |_, window, cx| {
window.dispatch_action(
Box::new(
zed_actions::encodings_ui::OpenWithEncoding(
path0.clone(),
),
),
cx,
)
},
),
),
)
}),
),
)

View File

@@ -19,6 +19,7 @@ mod workspace_settings;
pub use crate::notifications::NotificationFrame;
pub use dock::Panel;
pub use path_list::PathList;
pub use toast_layer::{ToastAction, ToastLayer, ToastView};
@@ -30,6 +31,7 @@ use client::{
};
use collections::{HashMap, HashSet, hash_map};
use dock::{Dock, DockPosition, PanelButtons, PanelHandle, RESIZE_HANDLE_SIZE};
use futures::{
Future, FutureExt, StreamExt,
channel::{
@@ -644,6 +646,7 @@ impl ProjectItemRegistry {
self.build_project_item_for_path_fns
.push(|project, project_path, window, cx| {
let project_path = project_path.clone();
let is_file = project
.read(cx)
.entry_for_path(&project_path, cx)
@@ -1519,7 +1522,6 @@ impl Workspace {
serializable_items_tx,
_items_serializer,
session_id: Some(session_id),
scheduled_tasks: Vec::new(),
last_open_dock_positions: Vec::new(),
}
@@ -3562,7 +3564,7 @@ impl Workspace {
cx: &mut App,
) -> Task<Result<(Option<ProjectEntryId>, WorkspaceItemBuilder)>> {
let registry = cx.default_global::<ProjectItemRegistry>().clone();
registry.open_path(self.project(), &path, window, cx)
registry.open_path(&self.project, &path, window, cx)
}
pub fn find_project_item<T>(
@@ -7586,8 +7588,13 @@ pub fn create_and_open_local_file(
let fs = workspace.read_with(cx, |workspace, _| workspace.app_state().fs.clone())?;
if !fs.is_file(path).await {
fs.create_file(path, Default::default()).await?;
fs.save(path, &default_content(cx), Default::default())
.await?;
fs.save(
path,
&default_content(cx),
Default::default(),
Default::default(),
)
.await?;
}
let mut items = workspace

View File

@@ -27,6 +27,7 @@ anyhow.workspace = true
async-lock.workspace = true
clock.workspace = true
collections.workspace = true
encodings.workspace = true
fs.workspace = true
futures.workspace = true
fuzzy.workspace = true
@@ -47,6 +48,8 @@ smol.workspace = true
sum_tree.workspace = true
text.workspace = true
util.workspace = true
encoding = "0.2.33"
[dev-dependencies]
clock = { workspace = true, features = ["test-support"] }

View File

@@ -7,6 +7,7 @@ use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
use anyhow::{Context as _, Result, anyhow};
use clock::ReplicaId;
use collections::{HashMap, HashSet, VecDeque};
use encodings::{Encoding, EncodingOptions};
use fs::{Fs, MTime, PathEvent, RemoveOptions, Watcher, copy_recursive, read_dir_items};
use futures::{
FutureExt as _, Stream, StreamExt,
@@ -99,6 +100,7 @@ pub enum CreatedEntry {
pub struct LoadedFile {
pub file: Arc<File>,
pub encoding: Encoding,
pub text: String,
}
@@ -703,9 +705,14 @@ impl Worktree {
}
}
pub fn load_file(&self, path: &RelPath, cx: &Context<Worktree>) -> Task<Result<LoadedFile>> {
pub fn load_file(
&self,
path: &RelPath,
options: &EncodingOptions,
cx: &Context<Worktree>,
) -> Task<Result<LoadedFile>> {
match self {
Worktree::Local(this) => this.load_file(path, cx),
Worktree::Local(this) => this.load_file(path, options, cx),
Worktree::Remote(_) => {
Task::ready(Err(anyhow!("remote worktrees can't yet load files")))
}
@@ -730,10 +737,11 @@ impl Worktree {
path: Arc<RelPath>,
text: Rope,
line_ending: LineEnding,
encoding: Encoding,
cx: &Context<Worktree>,
) -> Task<Result<Arc<File>>> {
match self {
Worktree::Local(this) => this.write_file(path, text, line_ending, cx),
Worktree::Local(this) => this.write_file(path, text, line_ending, encoding, cx),
Worktree::Remote(_) => {
Task::ready(Err(anyhow!("remote worktree can't yet write files")))
}
@@ -1311,12 +1319,18 @@ impl LocalWorktree {
})
}
fn load_file(&self, path: &RelPath, cx: &Context<Worktree>) -> Task<Result<LoadedFile>> {
fn load_file(
&self,
path: &RelPath,
options: &EncodingOptions,
cx: &Context<Worktree>,
) -> Task<Result<LoadedFile>> {
let path = Arc::from(path);
let abs_path = self.absolutize(&path);
let fs = self.fs.clone();
let entry = self.refresh_entry(path.clone(), None, cx);
let is_private = self.is_path_private(path.as_ref());
let options = options.clone();
let this = cx.weak_entity();
cx.background_spawn(async move {
@@ -1334,7 +1348,7 @@ impl LocalWorktree {
anyhow::bail!("File is too large to load");
}
}
let text = fs.load(&abs_path).await?;
let (encoding, text) = fs.load_with_encoding(&abs_path, &options).await?;
let worktree = this.upgrade().context("worktree was dropped")?;
let file = match entry.await? {
@@ -1362,7 +1376,11 @@ impl LocalWorktree {
}
};
Ok(LoadedFile { file, text })
Ok(LoadedFile {
file,
encoding,
text,
})
})
}
@@ -1445,6 +1463,7 @@ impl LocalWorktree {
path: Arc<RelPath>,
text: Rope,
line_ending: LineEnding,
encoding: Encoding,
cx: &Context<Worktree>,
) -> Task<Result<Arc<File>>> {
let fs = self.fs.clone();
@@ -1454,7 +1473,10 @@ impl LocalWorktree {
let write = cx.background_spawn({
let fs = fs.clone();
let abs_path = abs_path.clone();
async move { fs.save(&abs_path, &text, line_ending).await }
{
let encoding = encoding.clone();
async move { fs.save(&abs_path, &text, line_ending, encoding).await }
}
});
cx.spawn(async move |this, cx| {
@@ -3104,11 +3126,11 @@ impl language::LocalFile for File {
self.worktree.read(cx).absolutize(&self.path)
}
fn load(&self, cx: &App) -> Task<Result<String>> {
fn load(&self, cx: &App, encoding: EncodingOptions) -> Task<Result<(Encoding, String)>> {
let worktree = self.worktree.read(cx).as_local().unwrap();
let abs_path = worktree.absolutize(&self.path);
let fs = worktree.fs.clone();
cx.background_spawn(async move { fs.load(&abs_path).await })
cx.background_spawn(async move { fs.load_with_encoding(&abs_path, &encoding).await })
}
fn load_bytes(&self, cx: &App) -> Task<Result<Vec<u8>>> {

View File

@@ -467,7 +467,11 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
let prev_read_dir_count = fs.read_dir_call_count();
let loaded = tree
.update(cx, |tree, cx| {
tree.load_file(rel_path("one/node_modules/b/b1.js"), cx)
tree.load_file(
rel_path("one/node_modules/b/b1.js"),
&Default::default(),
cx,
)
})
.await
.unwrap();
@@ -507,7 +511,11 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
let prev_read_dir_count = fs.read_dir_call_count();
let loaded = tree
.update(cx, |tree, cx| {
tree.load_file(rel_path("one/node_modules/a/a2.js"), cx)
tree.load_file(
rel_path("one/node_modules/a/a2.js"),
&Default::default(),
cx,
)
})
.await
.unwrap();
@@ -651,6 +659,7 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
"/root/.gitignore".as_ref(),
&Rope::from_str("e", cx.background_executor()),
Default::default(),
Default::default(),
)
.await
.unwrap();
@@ -723,6 +732,7 @@ async fn test_write_file(cx: &mut TestAppContext) {
rel_path("tracked-dir/file.txt").into(),
Rope::from_str("hello", cx.background_executor()),
Default::default(),
Default::default(),
cx,
)
})
@@ -734,6 +744,7 @@ async fn test_write_file(cx: &mut TestAppContext) {
rel_path("ignored-dir/file.txt").into(),
Rope::from_str("world", cx.background_executor()),
Default::default(),
Default::default(),
cx,
)
})
@@ -1768,6 +1779,7 @@ fn randomly_mutate_worktree(
entry.path.clone(),
Rope::default(),
Default::default(),
Default::default(),
cx,
);
cx.background_spawn(async move {
@@ -1861,6 +1873,7 @@ async fn randomly_mutate_fs(
&ignore_path,
&Rope::from_str(ignore_contents.as_str(), executor),
Default::default(),
Default::default(),
)
.await
.unwrap();

View File

@@ -52,6 +52,8 @@ debugger_ui.workspace = true
diagnostics.workspace = true
editor.workspace = true
zeta2_tools.workspace = true
encodings.workspace = true
encodings_ui.workspace = true
env_logger.workspace = true
extension.workspace = true
extension_host.workspace = true

View File

@@ -630,6 +630,7 @@ pub fn main() {
zeta::init(cx);
inspector_ui::init(app_state.clone(), cx);
json_schema_store::init(cx);
encodings_ui::init(cx);
cx.observe_global::<SettingsStore>({
let http = app_state.client.http_client();

View File

@@ -443,6 +443,9 @@ pub fn initialize_workspace(
}
});
let encoding_indicator =
cx.new(|_cx| encodings_ui::EncodingIndicator::new(workspace.weak_handle()));
let cursor_position =
cx.new(|_| go_to_line::cursor_position::CursorPosition::new(workspace));
let line_ending_indicator =
@@ -458,6 +461,7 @@ pub fn initialize_workspace(
status_bar.add_right_item(line_ending_indicator, window, cx);
status_bar.add_right_item(vim_mode_indicator, window, cx);
status_bar.add_right_item(cursor_position, window, cx);
status_bar.add_right_item(encoding_indicator, window, cx);
status_bar.add_right_item(image_info, window, cx);
});
@@ -2172,6 +2176,8 @@ mod tests {
use assets::Assets;
use collections::HashSet;
use editor::{DisplayPoint, Editor, SelectionEffects, display_map::DisplayRow};
use encodings::Encoding;
use gpui::{
Action, AnyWindowHandle, App, AssetSource, BorrowAppContext, SemanticVersion,
TestAppContext, UpdateGlobal, VisualTestContext, WindowHandle, actions,
@@ -4377,6 +4383,7 @@ mod tests {
"/settings.json".as_ref(),
&Rope::from_str_small(r#"{"base_keymap": "Atom"}"#),
Default::default(),
Encoding::default(),
)
.await
.unwrap();
@@ -4387,6 +4394,7 @@ mod tests {
"/keymap.json".as_ref(),
&Rope::from_str_small(r#"[{"bindings": {"backspace": "test_only::ActionA"}}]"#),
Default::default(),
Encoding::default(),
)
.await
.unwrap();
@@ -4435,6 +4443,7 @@ mod tests {
"/keymap.json".as_ref(),
&Rope::from_str_small(r#"[{"bindings": {"backspace": "test_only::ActionB"}}]"#),
Default::default(),
Encoding::default(),
)
.await
.unwrap();
@@ -4455,6 +4464,7 @@ mod tests {
"/settings.json".as_ref(),
&Rope::from_str_small(r#"{"base_keymap": "JetBrains"}"#),
Default::default(),
Encoding::default(),
)
.await
.unwrap();
@@ -4495,6 +4505,7 @@ mod tests {
"/settings.json".as_ref(),
&Rope::from_str_small(r#"{"base_keymap": "Atom"}"#),
Default::default(),
Encoding::default(),
)
.await
.unwrap();
@@ -4504,6 +4515,7 @@ mod tests {
"/keymap.json".as_ref(),
&Rope::from_str_small(r#"[{"bindings": {"backspace": "test_only::ActionA"}}]"#),
Default::default(),
Encoding::default(),
)
.await
.unwrap();
@@ -4547,6 +4559,7 @@ mod tests {
"/keymap.json".as_ref(),
&Rope::from_str_small(r#"[{"bindings": {"backspace": null}}]"#),
Default::default(),
Encoding::default(),
)
.await
.unwrap();
@@ -4567,6 +4580,7 @@ mod tests {
"/settings.json".as_ref(),
&Rope::from_str_small(r#"{"base_keymap": "JetBrains"}"#),
Default::default(),
Encoding::default(),
)
.await
.unwrap();

View File

@@ -653,6 +653,7 @@ mod tests {
ipc::{self},
};
use editor::Editor;
use encodings::Encoding;
use gpui::TestAppContext;
use language::LineEnding;
use remote::SshConnectionOptions;
@@ -863,6 +864,7 @@ mod tests {
Path::new(file1_path),
&Rope::from_str("content1", cx.background_executor()),
LineEnding::Unix,
Encoding::default(),
)
.await
.unwrap();
@@ -877,6 +879,7 @@ mod tests {
Path::new(file2_path),
&Rope::from_str("content2", cx.background_executor()),
LineEnding::Unix,
Encoding::default(),
)
.await
.unwrap();

View File

@@ -299,6 +299,20 @@ pub mod settings_profile_selector {
pub struct Toggle;
}
pub mod encodings_ui {
use std::sync::Arc;
use gpui::Action;
use schemars::JsonSchema;
use serde::Deserialize;
#[derive(PartialEq, Debug, Clone, Action, JsonSchema, Deserialize)]
pub struct OpenWithEncoding(pub Arc<std::path::Path>);
#[derive(PartialEq, Debug, Clone, Action, JsonSchema, Deserialize)]
pub struct ForceOpen(pub Arc<std::path::Path>);
}
pub mod agent {
use gpui::actions;

View File

@@ -1986,7 +1986,7 @@ mod tests {
.worktree_for_root_name("closed_source_worktree", cx)
.unwrap();
worktree2.update(cx, |worktree2, cx| {
worktree2.load_file(rel_path("main.rs"), cx)
worktree2.load_file(rel_path("main.rs"), &Default::default(), cx)
})
})
.await

View File

@@ -1541,7 +1541,8 @@ Positive `integer` value between 1 and 32. Values outside of this range will be
"status_bar": {
"active_language_button": true,
"cursor_position_button": true,
"line_endings_button": false
"line_endings_button": false,
"encoding_indicator": true,
},
```