Compare commits

...

2 Commits

Author SHA1 Message Date
Julia Ryan
0702d73d27 Revert 2025-10-28 01:29:24 -07:00
Julia Ryan
59e8cd493c Revert "Fix ESLint linebreak-style errors by preserving line endings in LSP communication (#38773)"
This reverts commit 435eab6896.
2025-10-28 01:18:04 -07:00
7 changed files with 91 additions and 273 deletions

2
Cargo.lock generated
View File

@@ -14311,7 +14311,6 @@ dependencies = [
"log",
"rand 0.9.2",
"rayon",
"regex",
"sum_tree",
"unicode-segmentation",
"util",
@@ -17147,6 +17146,7 @@ dependencies = [
"parking_lot",
"postage",
"rand 0.9.2",
"regex",
"rope",
"smallvec",
"sum_tree",

View File

@@ -12629,12 +12629,6 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) {
);
}
});
#[cfg(target_os = "windows")]
let line_ending = "\r\n";
#[cfg(not(target_os = "windows"))]
let line_ending = "\n";
// Handle formatting requests to the language server.
cx.lsp
.set_request_handler::<lsp::request::Formatting, _, _>({
@@ -12658,7 +12652,7 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) {
),
(
lsp::Range::new(lsp::Position::new(3, 4), lsp::Position::new(3, 4)),
line_ending.into()
"\n".into()
),
]
);
@@ -12669,14 +12663,14 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) {
lsp::Position::new(1, 0),
lsp::Position::new(1, 0),
),
new_text: line_ending.into(),
new_text: "\n".into(),
},
lsp::TextEdit {
range: lsp::Range::new(
lsp::Position::new(2, 0),
lsp::Position::new(2, 0),
),
new_text: line_ending.into(),
new_text: "\n".into(),
},
]))
}
@@ -26662,83 +26656,6 @@ async fn test_paste_url_from_other_app_creates_markdown_link_selectively_in_mult
));
}
#[gpui::test]
async fn test_non_linux_line_endings_registration(cx: &mut TestAppContext) {
init_test(cx, |_| {});
let unix_newlines_file_text = "fn main() {
let a = 5;
}";
let clrf_file_text = unix_newlines_file_text.lines().join("\r\n");
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
path!("/a"),
json!({
"first.rs": &clrf_file_text,
}),
)
.await;
let project = Project::test(fs, [path!("/a").as_ref()], cx).await;
let workspace = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
let cx = &mut VisualTestContext::from_window(*workspace, cx);
let registered_text = Arc::new(Mutex::new(Vec::new()));
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
language_registry.add(rust_lang());
let mut fake_servers = language_registry.register_fake_lsp(
"Rust",
FakeLspAdapter {
capabilities: lsp::ServerCapabilities {
color_provider: Some(lsp::ColorProviderCapability::Simple(true)),
..lsp::ServerCapabilities::default()
},
name: "rust-analyzer",
initializer: Some({
let registered_text = registered_text.clone();
Box::new(move |fake_server| {
fake_server.handle_notification::<lsp::notification::DidOpenTextDocument, _>({
let registered_text = registered_text.clone();
move |params, _| {
registered_text.lock().push(params.text_document.text);
}
});
})
}),
..FakeLspAdapter::default()
},
);
let editor = workspace
.update(cx, |workspace, window, cx| {
workspace.open_abs_path(
PathBuf::from(path!("/a/first.rs")),
OpenOptions::default(),
window,
cx,
)
})
.unwrap()
.await
.unwrap()
.downcast::<Editor>()
.unwrap();
let _fake_language_server = fake_servers.next().await.unwrap();
cx.executor().run_until_parked();
assert_eq!(
editor.update(cx, |editor, cx| editor.text(cx)),
unix_newlines_file_text,
"Default text API returns \n-separated text",
);
assert_eq!(
vec![clrf_file_text],
registered_text.lock().drain(..).collect::<Vec<_>>(),
"Expected the language server to receive the exact same text from the FS",
);
}
#[gpui::test]
async fn test_race_in_multibuffer_save(cx: &mut TestAppContext) {
init_test(cx, |_| {});

View File

@@ -2492,7 +2492,7 @@ impl LocalLspStore {
uri.clone(),
adapter.language_id(&language.name()),
0,
initial_snapshot.text_with_original_line_endings(),
initial_snapshot.text(),
);
vec![snapshot]
@@ -7574,7 +7574,6 @@ impl LspStore {
let previous_snapshot = buffer_snapshots.last()?;
let build_incremental_change = || {
let line_ending = next_snapshot.line_ending();
buffer
.edits_since::<Dimensions<PointUtf16, usize>>(
previous_snapshot.snapshot.version(),
@@ -7582,18 +7581,16 @@ impl LspStore {
.map(|edit| {
let edit_start = edit.new.start.0;
let edit_end = edit_start + (edit.old.end.0 - edit.old.start.0);
let new_text = next_snapshot
.text_for_range(edit.new.start.1..edit.new.end.1)
.collect();
lsp::TextDocumentContentChangeEvent {
range: Some(lsp::Range::new(
point_to_lsp(edit_start),
point_to_lsp(edit_end),
)),
range_length: None,
// Collect changed text and preserve line endings.
// text_for_range returns chunks with normalized \n, so we need to
// convert to the buffer's actual line ending for LSP.
text: line_ending.into_string(
next_snapshot.text_for_range(edit.new.start.1..edit.new.end.1),
),
text: new_text,
}
})
.collect()
@@ -7613,7 +7610,7 @@ impl LspStore {
vec![lsp::TextDocumentContentChangeEvent {
range: None,
range_length: None,
text: next_snapshot.text_with_original_line_endings(),
text: next_snapshot.text(),
}]
}
Some(lsp::TextDocumentSyncKind::INCREMENTAL) => build_incremental_change(),
@@ -10998,12 +10995,13 @@ impl LspStore {
let snapshot = versions.last().unwrap();
let version = snapshot.version;
let initial_snapshot = &snapshot.snapshot;
let uri = lsp::Uri::from_file_path(file.abs_path(cx)).unwrap();
language_server.register_buffer(
uri,
adapter.language_id(&language.name()),
version,
buffer_handle.read(cx).text_with_original_line_endings(),
initial_snapshot.text(),
);
buffer_paths_registered.push((buffer_id, file.abs_path(cx)));
local

View File

@@ -15,7 +15,6 @@ path = "src/rope.rs"
arrayvec = "0.7.1"
log.workspace = true
rayon.workspace = true
regex.workspace = true
sum_tree.workspace = true
unicode-segmentation.workspace = true
util.workspace = true

View File

@@ -6,13 +6,10 @@ mod unclipped;
use arrayvec::ArrayVec;
use rayon::iter::{IntoParallelIterator, ParallelIterator as _};
use regex::Regex;
use std::{
borrow::Cow,
cmp, fmt, io, mem,
ops::{self, AddAssign, Range},
str,
sync::{Arc, LazyLock},
};
use sum_tree::{Bias, Dimension, Dimensions, SumTree};
@@ -24,95 +21,6 @@ pub use unclipped::Unclipped;
use crate::chunk::Bitmap;
static LINE_SEPARATORS_REGEX: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX"));
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LineEnding {
Unix,
Windows,
}
impl Default for LineEnding {
fn default() -> Self {
#[cfg(unix)]
return Self::Unix;
#[cfg(not(unix))]
return Self::Windows;
}
}
impl LineEnding {
pub fn as_str(&self) -> &'static str {
match self {
LineEnding::Unix => "\n",
LineEnding::Windows => "\r\n",
}
}
pub fn label(&self) -> &'static str {
match self {
LineEnding::Unix => "LF",
LineEnding::Windows => "CRLF",
}
}
pub fn detect(text: &str) -> Self {
let mut max_ix = cmp::min(text.len(), 1000);
while !text.is_char_boundary(max_ix) {
max_ix -= 1;
}
if let Some(ix) = text[..max_ix].find(['\n']) {
if ix > 0 && text.as_bytes()[ix - 1] == b'\r' {
Self::Windows
} else {
Self::Unix
}
} else {
Self::default()
}
}
pub fn normalize(text: &mut String) {
if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
*text = replaced;
}
}
pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
replaced.into()
} else {
text
}
}
pub fn normalize_cow(text: Cow<str>) -> Cow<str> {
if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
replaced.into()
} else {
text
}
}
/// Converts text chunks into a [`String`] using the current line ending.
pub fn into_string(&self, chunks: Chunks<'_>) -> String {
match self {
LineEnding::Unix => chunks.collect(),
LineEnding::Windows => {
let line_ending = self.as_str();
let mut result = String::new();
for chunk in chunks {
result.push_str(&chunk.replace('\n', line_ending));
}
result
}
}
}
}
#[derive(Clone, Default)]
pub struct Rope {
chunks: SumTree<Chunk>,
@@ -460,16 +368,6 @@ impl Rope {
Chunks::new(self, range, true)
}
/// Formats the rope's text with the specified line ending string.
/// This replaces all `\n` characters with the provided line ending.
///
/// The rope internally stores all line breaks as `\n` (see `Display` impl).
/// Use this method to convert to different line endings for file operations,
/// LSP communication, or other scenarios requiring specific line ending formats.
pub fn to_string_with_line_ending(&self, line_ending: LineEnding) -> String {
line_ending.into_string(self.chunks())
}
pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
if offset >= self.summary().len {
return self.summary().len_utf16;
@@ -711,16 +609,10 @@ impl From<&String> for Rope {
}
}
/// Display implementation for Rope.
///
/// Note: This always uses `\n` as the line separator, regardless of the original
/// file's line endings. The rope internally normalizes all line breaks to `\n`.
/// If you need to preserve original line endings (e.g., for LSP communication),
/// use `to_string_with_line_ending` instead.
impl fmt::Display for Rope {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
for chunk in self.chunks() {
write!(f, "{chunk}")?;
write!(f, "{}", chunk)?;
}
Ok(())
}
@@ -2370,53 +2262,6 @@ mod tests {
}
}
#[test]
fn test_to_string_with_line_ending() {
// Test Unix line endings (no conversion)
let rope = Rope::from("line1\nline2\nline3");
assert_eq!(
rope.to_string_with_line_ending(LineEnding::Unix),
"line1\nline2\nline3"
);
// Test Windows line endings
assert_eq!(
rope.to_string_with_line_ending(LineEnding::Windows),
"line1\r\nline2\r\nline3"
);
// Test empty rope
let empty_rope = Rope::from("");
assert_eq!(
empty_rope.to_string_with_line_ending(LineEnding::Windows),
""
);
// Test single line (no newlines)
let single_line = Rope::from("single line");
assert_eq!(
single_line.to_string_with_line_ending(LineEnding::Windows),
"single line"
);
// Test rope ending with newline
let ending_newline = Rope::from("line1\nline2\n");
assert_eq!(
ending_newline.to_string_with_line_ending(LineEnding::Windows),
"line1\r\nline2\r\n"
);
// Test large rope with multiple chunks
let mut large_rope = Rope::new();
for i in 0..100 {
large_rope.push(&format!("line{}\n", i));
}
let result = large_rope.to_string_with_line_ending(LineEnding::Windows);
assert!(result.contains("\r\n"));
assert!(!result.contains("\n\n"));
assert_eq!(result.matches("\r\n").count(), 100);
}
fn clip_offset(text: &str, mut offset: usize, bias: Bias) -> usize {
while !text.is_char_boundary(offset) {
match bias {

View File

@@ -23,6 +23,7 @@ log.workspace = true
parking_lot.workspace = true
postage.workspace = true
rand = { workspace = true, optional = true }
regex.workspace = true
rope.workspace = true
smallvec.workspace = true
sum_tree.workspace = true

View File

@@ -20,9 +20,11 @@ use operation_queue::OperationQueue;
pub use patch::Patch;
use postage::{oneshot, prelude::*};
use regex::Regex;
pub use rope::*;
pub use selection::*;
use std::{
borrow::Cow,
cmp::{self, Ordering, Reverse},
fmt::Display,
future::Future,
@@ -30,7 +32,7 @@ use std::{
num::NonZeroU64,
ops::{self, Deref, Range, Sub},
str,
sync::Arc,
sync::{Arc, LazyLock},
time::{Duration, Instant},
};
pub use subscription::*;
@@ -41,6 +43,9 @@ use undo_map::UndoMap;
#[cfg(any(test, feature = "test-support"))]
use util::RandomCharIter;
static LINE_SEPARATORS_REGEX: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r"\r\n|\r").expect("Failed to create LINE_SEPARATORS_REGEX"));
pub type TransactionId = clock::Lamport;
pub struct Buffer {
@@ -2014,24 +2019,10 @@ impl BufferSnapshot {
start..position
}
/// Returns the buffer's text as a String.
///
/// Note: This always uses `\n` as the line separator, regardless of the buffer's
/// actual line ending setting. For LSP communication or other cases where you need
/// to preserve the original line endings, use [`Self::text_with_original_line_endings`] instead.
pub fn text(&self) -> String {
self.visible_text.to_string()
}
/// Returns the buffer's text with line same endings as in buffer's file.
///
/// Unlike [`Self::text`] which always uses `\n`, this method formats the text using
/// the buffer's actual line ending setting (Unix `\n` or Windows `\r\n`).
pub fn text_with_original_line_endings(&self) -> String {
self.visible_text
.to_string_with_line_ending(self.line_ending)
}
pub fn line_ending(&self) -> LineEnding {
self.line_ending
}
@@ -2135,10 +2126,6 @@ impl BufferSnapshot {
self.visible_text.reversed_bytes_in_range(start..end)
}
/// Returns the text in the given range.
///
/// Note: This always uses `\n` as the line separator, regardless of the buffer's
/// actual line ending setting.
pub fn text_for_range<T: ToOffset>(&self, range: Range<T>) -> Chunks<'_> {
let start = range.start.to_offset(self);
let end = range.end.to_offset(self);
@@ -3265,6 +3252,77 @@ impl FromAnchor for usize {
}
}
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum LineEnding {
Unix,
Windows,
}
impl Default for LineEnding {
fn default() -> Self {
#[cfg(unix)]
return Self::Unix;
#[cfg(not(unix))]
return Self::Windows;
}
}
impl LineEnding {
pub fn as_str(&self) -> &'static str {
match self {
LineEnding::Unix => "\n",
LineEnding::Windows => "\r\n",
}
}
pub fn label(&self) -> &'static str {
match self {
LineEnding::Unix => "LF",
LineEnding::Windows => "CRLF",
}
}
pub fn detect(text: &str) -> Self {
let mut max_ix = cmp::min(text.len(), 1000);
while !text.is_char_boundary(max_ix) {
max_ix -= 1;
}
if let Some(ix) = text[..max_ix].find(['\n']) {
if ix > 0 && text.as_bytes()[ix - 1] == b'\r' {
Self::Windows
} else {
Self::Unix
}
} else {
Self::default()
}
}
pub fn normalize(text: &mut String) {
if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(text, "\n") {
*text = replaced;
}
}
pub fn normalize_arc(text: Arc<str>) -> Arc<str> {
if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
replaced.into()
} else {
text
}
}
pub fn normalize_cow(text: Cow<str>) -> Cow<str> {
if let Cow::Owned(replaced) = LINE_SEPARATORS_REGEX.replace_all(&text, "\n") {
replaced.into()
} else {
text
}
}
}
#[cfg(debug_assertions)]
pub mod debug {
use super::*;