Compare commits

...

4 Commits

Author SHA1 Message Date
Antonio Scandurra
3d81b007bc Go to rope.rs, scroll around line 30. Notice underlines disappearing 2024-10-29 10:37:42 +01:00
Antonio Scandurra
33d12fb8fb Checkpoint 2024-10-28 23:10:49 +01:00
Antonio Scandurra
e1e5cb75a5 WIP 2024-10-28 19:31:04 +01:00
Antonio Scandurra
d71314b4df Start on indexing rope chunks 2024-10-28 16:18:34 +01:00
3 changed files with 954 additions and 320 deletions

839
crates/rope/src/chunk.rs Normal file
View File

@@ -0,0 +1,839 @@
use crate::{OffsetUtf16, Point, PointUtf16, TextSummary, Unclipped};
use arrayvec::ArrayString;
use std::{cmp, ops::Range};
use sum_tree::Bias;
use unicode_segmentation::GraphemeCursor;
use util::debug_panic;
#[cfg(test)]
pub(crate) const MIN_BASE: usize = 6;
#[cfg(not(test))]
pub(crate) const MIN_BASE: usize = 32;
pub(crate) const MAX_BASE: usize = MIN_BASE * 2;
#[derive(Clone, Debug, Default)]
pub struct Chunk {
chars: usize,
chars_utf16: usize,
tabs: usize,
newlines: usize,
pub text: ArrayString<MAX_BASE>,
}
impl Chunk {
#[inline(always)]
pub fn new(text: &str) -> Self {
let mut this = Chunk::default();
this.push_str(text);
this
}
#[inline(always)]
pub fn push_str(&mut self, text: &str) {
for (char_ix, c) in text.char_indices() {
let ix = self.text.len() + char_ix;
self.chars |= 1 << ix;
self.chars_utf16 |= 1 << ix;
self.chars_utf16 |= c.len_utf16() << ix;
self.tabs |= ((c == '\t') as usize) << ix;
self.newlines |= ((c == '\n') as usize) << ix;
}
self.text.push_str(text);
}
#[inline(always)]
pub fn append(&mut self, slice: ChunkSlice) {
if slice.is_empty() {
return;
};
let base_ix = self.text.len();
self.chars |= slice.chars << base_ix;
self.chars_utf16 |= slice.chars_utf16 << base_ix;
self.tabs |= slice.tabs << base_ix;
self.newlines |= slice.newlines << base_ix;
self.text.push_str(&slice.text);
}
#[inline(always)]
pub fn as_slice(&self) -> ChunkSlice {
ChunkSlice {
chars: self.chars,
chars_utf16: self.chars_utf16,
tabs: self.tabs,
newlines: self.newlines,
text: &self.text,
}
}
#[inline(always)]
pub fn slice(&self, range: Range<usize>) -> ChunkSlice {
self.as_slice().slice(range)
}
}
#[derive(Clone, Copy, Debug)]
pub struct ChunkSlice<'a> {
chars: usize,
chars_utf16: usize,
tabs: usize,
newlines: usize,
text: &'a str,
}
impl<'a> Into<Chunk> for ChunkSlice<'a> {
fn into(self) -> Chunk {
Chunk {
chars: self.chars,
chars_utf16: self.chars_utf16,
tabs: self.tabs,
newlines: self.newlines,
text: self.text.try_into().unwrap(),
}
}
}
impl<'a> ChunkSlice<'a> {
#[inline(always)]
pub fn is_empty(self) -> bool {
self.text.is_empty()
}
#[inline(always)]
pub fn is_char_boundary(self, offset: usize) -> bool {
self.text.is_char_boundary(offset)
}
#[inline(always)]
pub fn split_at(self, mid: usize) -> (ChunkSlice<'a>, ChunkSlice<'a>) {
if mid == 64 {
let left = self;
let right = ChunkSlice {
chars: 0,
chars_utf16: 0,
tabs: 0,
newlines: 0,
text: "",
};
(left, right)
} else {
let mask = ((1u128 << mid) - 1) as usize;
let (left_text, right_text) = self.text.split_at(mid);
let left = ChunkSlice {
chars: self.chars & mask,
chars_utf16: self.chars_utf16 & mask,
tabs: self.tabs & mask,
newlines: self.newlines & mask,
text: left_text,
};
let right = ChunkSlice {
chars: self.chars >> mid,
chars_utf16: self.chars_utf16 >> mid,
tabs: self.tabs >> mid,
newlines: self.newlines >> mid,
text: right_text,
};
(left, right)
}
}
#[inline(always)]
pub fn slice(self, range: Range<usize>) -> Self {
let mask = ((1u128 << range.end) - 1) as usize;
if range.start == 64 {
Self {
chars: 0,
chars_utf16: 0,
tabs: 0,
newlines: 0,
text: "",
}
} else {
Self {
chars: (self.chars & mask) >> range.start,
chars_utf16: (self.chars_utf16 & mask) >> range.start,
tabs: (self.tabs & mask) >> range.start,
newlines: (self.newlines & mask) >> range.start,
text: &self.text[range],
}
}
}
#[inline(always)]
pub fn text_summary(&self) -> TextSummary {
let (longest_row, longest_row_chars) = self.longest_row();
TextSummary {
len: self.len(),
len_utf16: self.len_utf16(),
lines: self.lines(),
first_line_chars: self.first_line_chars(),
last_line_chars: self.last_line_chars(),
last_line_len_utf16: self.last_line_len_utf16(),
longest_row,
longest_row_chars,
}
}
/// Get length in bytes
#[inline(always)]
pub fn len(&self) -> usize {
self.text.len()
}
/// Get length in UTF-16 code units
#[inline(always)]
pub fn len_utf16(&self) -> OffsetUtf16 {
OffsetUtf16(self.chars_utf16.count_ones() as usize)
}
/// Get point representing number of lines and length of last line
#[inline(always)]
pub fn lines(&self) -> Point {
let row = self.newlines.count_ones();
let column = self.newlines.leading_zeros() - (usize::BITS - self.text.len() as u32);
Point::new(row, column)
}
/// Get number of chars in first line
#[inline(always)]
pub fn first_line_chars(&self) -> u32 {
if self.newlines == 0 {
self.chars.count_ones()
} else {
let mask = ((1u128 << self.newlines.trailing_zeros() as usize) - 1) as usize;
(self.chars & mask).count_ones()
}
}
/// Get number of chars in last line
#[inline(always)]
pub fn last_line_chars(&self) -> u32 {
if self.newlines == 0 {
self.chars.count_ones()
} else {
let mask = !(usize::MAX >> self.newlines.leading_zeros());
(self.chars & mask).count_ones()
}
}
/// Get number of UTF-16 code units in last line
#[inline(always)]
pub fn last_line_len_utf16(&self) -> u32 {
if self.newlines == 0 {
self.chars_utf16.count_ones()
} else {
let mask = !(usize::MAX >> self.newlines.leading_zeros());
(self.chars_utf16 & mask).count_ones()
}
}
/// Get the longest row in the chunk and its length in characters.
#[inline(always)]
pub fn longest_row(&self) -> (u32, u32) {
let mut chars = self.chars;
let mut newlines = self.newlines;
let mut row = 0;
let mut longest_row = 0;
let mut longest_row_chars = 0;
while newlines > 0 {
let newline_ix = newlines.trailing_zeros();
let row_chars = (chars & ((1 << newline_ix) - 1)).count_ones() as u8;
if row_chars > longest_row_chars {
longest_row = row;
longest_row_chars = row_chars;
}
newlines >>= newline_ix;
newlines >>= 1;
chars >>= newline_ix;
chars >>= 1;
row += 1;
}
let row_chars = chars.count_ones() as u8;
if row_chars > longest_row_chars {
(row, row_chars as u32)
} else {
(longest_row, longest_row_chars as u32)
}
}
#[inline(always)]
pub fn offset_to_point(&self, offset: usize) -> Point {
if !self.text.is_char_boundary(offset) {
debug_panic!(
"offset {:?} is not a char boundary for string {:?}",
offset,
self.text
);
return Point::zero();
}
let mask = ((1u128 << offset) - 1) as usize;
let row = (self.newlines & mask).count_ones();
let newline_ix = usize::BITS - (self.newlines & mask).leading_zeros();
let column = (offset - newline_ix as usize) as u32;
Point::new(row, column)
}
#[inline(always)]
pub fn point_to_offset(&self, point: Point) -> usize {
if point.row > self.newlines.count_ones() {
debug_panic!(
"point {:?} extends beyond rows for string {:?}",
point,
self.text
);
return 0;
}
let row_start_offset = if point.row > 0 {
(nth_set_bit(self.newlines, point.row as usize) + 1) as usize
} else {
0
};
let newlines = if row_start_offset == usize::BITS as usize {
0
} else {
self.newlines >> row_start_offset
};
let row_len = cmp::min(newlines.trailing_zeros(), self.text.len() as u32);
if point.column > row_len {
debug_panic!(
"point {:?} extends beyond row for string {:?}",
point,
self.text
);
return row_start_offset + row_len as usize;
}
row_start_offset + point.column as usize
}
#[inline(always)]
pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
let mask = ((1u128 << offset) - 1) as usize;
OffsetUtf16((self.chars_utf16 & mask).count_ones() as usize)
}
#[inline(always)]
pub fn offset_utf16_to_offset(&self, target: OffsetUtf16) -> usize {
if target.0 == 0 {
0
} else {
let ix = nth_set_bit(self.chars_utf16, target.0) + 1;
if ix == 64 {
64
} else {
let utf8_additional_len = cmp::min(
(self.chars_utf16 >> ix).trailing_zeros() as usize,
self.text.len() - ix,
);
ix + utf8_additional_len
}
}
}
#[inline(always)]
pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
let mask = ((1u128 << offset) - 1) as usize;
let row = (self.newlines & mask).count_ones();
let newline_ix = usize::BITS - (self.newlines & mask).leading_zeros();
let column = if newline_ix == 64 {
0
} else {
((self.chars_utf16 & mask) >> newline_ix).count_ones()
};
PointUtf16::new(row, column)
}
#[inline(always)]
pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
self.offset_to_point_utf16(self.point_to_offset(point))
}
#[inline(always)]
pub fn point_utf16_to_offset(&self, point: PointUtf16, clip: bool) -> usize {
let lines = self.lines();
if point.row > lines.row {
if !clip {
debug_panic!(
"point {:?} is beyond this chunk's extent {:?}",
point,
self.text
);
}
return self.len();
}
let row_start_offset = if point.row > 0 {
(nth_set_bit(self.newlines, point.row as usize) + 1) as usize
} else {
0
};
let row_len_utf8 = if row_start_offset == 64 {
0
} else {
cmp::min(
(self.newlines >> row_start_offset).trailing_zeros(),
(self.text.len() - row_start_offset) as u32,
)
};
let mask = ((1u128 << row_len_utf8) - 1) as usize;
let row_chars_utf16 = if row_start_offset == 64 {
0
} else {
(self.chars_utf16 >> row_start_offset) & mask
};
if point.column > row_chars_utf16.count_ones() {
if !clip {
debug_panic!(
"point {:?} is beyond the end of the line in chunk {:?}",
point,
self.text
);
}
return row_start_offset + row_len_utf8 as usize;
}
let mut offset = row_start_offset;
if point.column > 0 {
let offset_within_row = nth_set_bit(row_chars_utf16, point.column as usize) + 1;
offset += offset_within_row;
if offset < 64 {
offset += cmp::min(
(self.chars_utf16 >> offset).trailing_zeros() as usize,
self.text.len() - offset,
);
}
if !self.text.is_char_boundary(offset) {
offset -= 1;
while !self.text.is_char_boundary(offset) {
offset -= 1;
}
if !clip {
debug_panic!(
"point {:?} is within character in chunk {:?}",
point,
self.text,
);
}
}
}
offset
}
pub fn unclipped_point_utf16_to_point(&self, target: Unclipped<PointUtf16>) -> Point {
let mut point = Point::zero();
let mut point_utf16 = PointUtf16::zero();
for ch in self.text.chars() {
if point_utf16 == target.0 {
break;
}
if point_utf16 > target.0 {
// If the point is past the end of a line or inside of a code point,
// return the last valid point before the target.
return point;
}
if ch == '\n' {
point_utf16 += PointUtf16::new(1, 0);
point += Point::new(1, 0);
} else {
point_utf16 += PointUtf16::new(0, ch.len_utf16() as u32);
point += Point::new(0, ch.len_utf8() as u32);
}
}
point
}
// todo!("use bitsets")
pub fn clip_point(&self, target: Point, bias: Bias) -> Point {
for (row, line) in self.text.split('\n').enumerate() {
if row == target.row as usize {
let bytes = line.as_bytes();
let mut column = target.column.min(bytes.len() as u32) as usize;
if column == 0
|| column == bytes.len()
|| (bytes[column - 1] < 128 && bytes[column] < 128)
{
return Point::new(row as u32, column as u32);
}
let mut grapheme_cursor = GraphemeCursor::new(column, bytes.len(), true);
loop {
if line.is_char_boundary(column)
&& grapheme_cursor.is_boundary(line, 0).unwrap_or(false)
{
break;
}
match bias {
Bias::Left => column -= 1,
Bias::Right => column += 1,
}
grapheme_cursor.set_cursor(column);
}
return Point::new(row as u32, column as u32);
}
}
unreachable!()
}
// todo!("use bitsets")
pub fn clip_point_utf16(&self, target: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
for (row, line) in self.text.split('\n').enumerate() {
if row == target.0.row as usize {
let mut code_units = line.encode_utf16();
let mut column = code_units.by_ref().take(target.0.column as usize).count();
if char::decode_utf16(code_units).next().transpose().is_err() {
match bias {
Bias::Left => column -= 1,
Bias::Right => column += 1,
}
}
return PointUtf16::new(row as u32, column as u32);
}
}
unreachable!()
}
// todo!("use bitsets")
pub fn clip_offset_utf16(&self, target: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
let mut code_units = self.text.encode_utf16();
let mut offset = code_units.by_ref().take(target.0).count();
if char::decode_utf16(code_units).next().transpose().is_err() {
match bias {
Bias::Left => offset -= 1,
Bias::Right => offset += 1,
}
}
OffsetUtf16(offset)
}
}
/// Finds the n-th bit that is set to 1.
#[inline(always)]
fn nth_set_bit(v: usize, mut n: usize) -> usize {
let v = v.reverse_bits();
let mut s: usize = 64;
let mut t: usize;
// Parallel bit count intermediates
let a = v - ((v >> 1) & usize::MAX / 3);
let b = (a & usize::MAX / 5) + ((a >> 2) & usize::MAX / 5);
let c = (b + (b >> 4)) & usize::MAX / 0x11;
let d = (c + (c >> 8)) & usize::MAX / 0x101;
t = (d >> 32) + (d >> 48);
// Branchless select
s -= ((t.wrapping_sub(n)) & 256) >> 3;
n -= t & ((t.wrapping_sub(n)) >> 8);
t = (d >> (s - 16)) & 0xff;
s -= ((t.wrapping_sub(n)) & 256) >> 4;
n -= t & ((t.wrapping_sub(n)) >> 8);
t = (c >> (s - 8)) & 0xf;
s -= ((t.wrapping_sub(n)) & 256) >> 5;
n -= t & ((t.wrapping_sub(n)) >> 8);
t = (b >> (s - 4)) & 0x7;
s -= ((t.wrapping_sub(n)) & 256) >> 6;
n -= t & ((t.wrapping_sub(n)) >> 8);
t = (a >> (s - 2)) & 0x3;
s -= ((t.wrapping_sub(n)) & 256) >> 7;
n -= t & ((t.wrapping_sub(n)) >> 8);
t = (v >> (s - 1)) & 0x1;
s -= ((t.wrapping_sub(n)) & 256) >> 8;
65 - s - 1
}
#[cfg(test)]
mod tests {
use super::*;
use rand::prelude::*;
use util::RandomCharIter;
#[gpui::test(iterations = 100)]
fn test_random_chunks(mut rng: StdRng) {
let max_len = std::env::var("CHUNK_MAX_LEN")
.ok()
.and_then(|s| s.parse().ok())
.unwrap_or(64);
let chunk_len = rng.gen_range(0..=max_len);
let text = RandomCharIter::new(&mut rng)
.take(chunk_len)
.collect::<String>();
let mut ix = chunk_len;
while !text.is_char_boundary(ix) {
ix -= 1;
}
let text = &text[..ix];
log::info!("Chunk: {:?}", text);
let chunk = Chunk::new(&text);
verify_chunk(chunk.as_slice(), text);
for _ in 0..10 {
let mut start = rng.gen_range(0..=chunk.text.len());
let mut end = rng.gen_range(start..=chunk.text.len());
while !chunk.text.is_char_boundary(start) {
start -= 1;
}
while !chunk.text.is_char_boundary(end) {
end -= 1;
}
let range = start..end;
log::info!("Range: {:?}", range);
let text_slice = &text[range.clone()];
let chunk_slice = chunk.slice(range);
verify_chunk(chunk_slice, text_slice);
}
}
#[test]
fn test_nth_set_bit() {
assert_eq!(
nth_set_bit(
0b1000000000000000000000000000000000000000000000000000000000000000,
1
),
63
);
assert_eq!(
nth_set_bit(
0b1100000000000000000000000000000000000000000000000000000000000000,
1
),
62
);
assert_eq!(
nth_set_bit(
0b1100000000000000000000000000000000000000000000000000000000000000,
2
),
63
);
assert_eq!(
nth_set_bit(
0b0000000000000000000000000000000000000000000000000000000000000001,
1
),
0
);
assert_eq!(
nth_set_bit(
0b0000000000000000000000000000000000000000000000000000000000000011,
2
),
1
);
assert_eq!(
nth_set_bit(
0b0101010101010101010101010101010101010101010101010101010101010101,
1
),
0
);
assert_eq!(
nth_set_bit(
0b0101010101010101010101010101010101010101010101010101010101010101,
32
),
62
);
assert_eq!(
nth_set_bit(
0b1111111111111111111111111111111111111111111111111111111111111111,
64
),
63
);
assert_eq!(
nth_set_bit(
0b1111111111111111111111111111111111111111111111111111111111111111,
1
),
0
);
assert_eq!(
nth_set_bit(
0b1010101010101010101010101010101010101010101010101010101010101010,
1
),
1
);
assert_eq!(
nth_set_bit(
0b1111000011110000111100001111000011110000111100001111000011110000,
8
),
15
);
}
fn verify_chunk(chunk: ChunkSlice<'_>, text: &str) {
let mut offset = 0;
let mut offset_utf16 = OffsetUtf16(0);
let mut point = Point::zero();
let mut point_utf16 = PointUtf16::zero();
log::info!("Verifying chunk {:?}", text);
assert_eq!(chunk.offset_to_point(0), Point::zero());
for c in text.chars() {
let expected_point = chunk.offset_to_point(offset);
assert_eq!(point, expected_point, "mismatch at offset {}", offset);
assert_eq!(
chunk.point_to_offset(point),
offset,
"mismatch at point {:?}",
point
);
assert_eq!(
chunk.offset_to_offset_utf16(offset),
offset_utf16,
"mismatch at offset {}",
offset
);
assert_eq!(
chunk.offset_utf16_to_offset(offset_utf16),
offset,
"mismatch at offset_utf16 {:?}",
offset_utf16
);
assert_eq!(
chunk.point_to_point_utf16(point),
point_utf16,
"mismatch at point {:?}",
point
);
assert_eq!(
chunk.point_utf16_to_offset(point_utf16, false),
offset,
"mismatch at point_utf16 {:?}",
point_utf16
);
if c == '\n' {
point.row += 1;
point.column = 0;
point_utf16.row += 1;
point_utf16.column = 0;
} else {
point.column += c.len_utf8() as u32;
point_utf16.column += c.len_utf16() as u32;
}
offset += c.len_utf8();
offset_utf16.0 += c.len_utf16();
}
let final_point = chunk.offset_to_point(offset);
assert_eq!(point, final_point, "mismatch at final offset {}", offset);
assert_eq!(
chunk.point_to_offset(point),
offset,
"mismatch at point {:?}",
point
);
assert_eq!(
chunk.offset_to_offset_utf16(offset),
offset_utf16,
"mismatch at offset {}",
offset
);
assert_eq!(
chunk.offset_utf16_to_offset(offset_utf16),
offset,
"mismatch at offset_utf16 {:?}",
offset_utf16
);
assert_eq!(
chunk.point_to_point_utf16(point),
point_utf16,
"mismatch at final point {:?}",
point
);
assert_eq!(
chunk.point_utf16_to_offset(point_utf16, false),
offset,
"mismatch at final point_utf16 {:?}",
point_utf16
);
// Verify length methods
assert_eq!(chunk.len(), text.len());
assert_eq!(
chunk.len_utf16().0,
text.chars().map(|c| c.len_utf16()).sum::<usize>()
);
// Verify line counting
let lines = chunk.lines();
let mut newline_count = 0;
let mut last_line_len = 0;
for c in text.chars() {
if c == '\n' {
newline_count += 1;
last_line_len = 0;
} else {
last_line_len += c.len_utf8() as u32;
}
}
assert_eq!(lines, Point::new(newline_count, last_line_len));
// Verify first/last line chars
if !text.is_empty() {
let first_line = text.split('\n').next().unwrap();
assert_eq!(chunk.first_line_chars(), first_line.chars().count() as u32);
let last_line = text.split('\n').last().unwrap();
assert_eq!(chunk.last_line_chars(), last_line.chars().count() as u32);
assert_eq!(
chunk.last_line_len_utf16(),
last_line.chars().map(|c| c.len_utf16() as u32).sum::<u32>()
);
}
// Verify longest row
let (longest_row, longest_chars) = chunk.longest_row();
let mut max_chars = 0;
let mut current_row = 0;
let mut current_chars = 0;
let mut max_row = 0;
for c in text.chars() {
if c == '\n' {
if current_chars > max_chars {
max_chars = current_chars;
max_row = current_row;
}
current_row += 1;
current_chars = 0;
} else {
current_chars += 1;
}
}
if current_chars > max_chars {
max_chars = current_chars;
max_row = current_row;
}
assert_eq!((max_row, max_chars as u32), (longest_row, longest_chars));
}
}

View File

@@ -1,9 +1,10 @@
mod chunk;
mod offset_utf16;
mod point;
mod point_utf16;
mod unclipped;
use arrayvec::ArrayString;
use chunk::{Chunk, ChunkSlice};
use smallvec::SmallVec;
use std::{
cmp, fmt, io, mem,
@@ -11,20 +12,12 @@ use std::{
str,
};
use sum_tree::{Bias, Dimension, SumTree};
use unicode_segmentation::GraphemeCursor;
use util::debug_panic;
pub use offset_utf16::OffsetUtf16;
pub use point::Point;
pub use point_utf16::PointUtf16;
pub use unclipped::Unclipped;
#[cfg(test)]
const CHUNK_BASE: usize = 6;
#[cfg(not(test))]
const CHUNK_BASE: usize = 64;
#[derive(Clone, Default)]
pub struct Rope {
chunks: SumTree<Chunk>,
@@ -39,10 +32,13 @@ impl Rope {
let mut chunks = rope.chunks.cursor::<()>(&());
chunks.next(&());
if let Some(chunk) = chunks.item() {
if self.chunks.last().map_or(false, |c| c.0.len() < CHUNK_BASE)
|| chunk.0.len() < CHUNK_BASE
if self
.chunks
.last()
.map_or(false, |c| c.text.len() < chunk::MIN_BASE)
|| chunk.text.len() < chunk::MIN_BASE
{
self.push(&chunk.0);
self.push(&chunk.text);
chunks.next(&());
}
}
@@ -77,11 +73,13 @@ impl Rope {
pub fn push(&mut self, mut text: &str) {
self.chunks.update_last(
|last_chunk| {
let split_ix = if last_chunk.0.len() + text.len() <= 2 * CHUNK_BASE {
let split_ix = if last_chunk.text.len() + text.len() <= chunk::MAX_BASE {
text.len()
} else {
let mut split_ix =
cmp::min(CHUNK_BASE.saturating_sub(last_chunk.0.len()), text.len());
let mut split_ix = cmp::min(
chunk::MIN_BASE.saturating_sub(last_chunk.text.len()),
text.len(),
);
while !text.is_char_boundary(split_ix) {
split_ix += 1;
}
@@ -89,7 +87,7 @@ impl Rope {
};
let (suffix, remainder) = text.split_at(split_ix);
last_chunk.0.push_str(suffix);
last_chunk.push_str(suffix);
text = remainder;
},
&(),
@@ -101,12 +99,12 @@ impl Rope {
let mut new_chunks = SmallVec::<[_; 16]>::new();
while !text.is_empty() {
let mut split_ix = cmp::min(2 * CHUNK_BASE, text.len());
let mut split_ix = cmp::min(chunk::MAX_BASE, text.len());
while !text.is_char_boundary(split_ix) {
split_ix -= 1;
}
let (chunk, remainder) = text.split_at(split_ix);
new_chunks.push(Chunk(ArrayString::from(chunk).unwrap()));
new_chunks.push(Chunk::new(chunk));
text = remainder;
}
@@ -135,7 +133,7 @@ impl Rope {
// a chunk ends with 3 bytes of a 4-byte character. These 3 bytes end up being stored in the following chunk, thus wasting
// 3 bytes of storage in current chunk.
// For example, a 1024-byte string can occupy between 32 (full ASCII, 1024/32) and 36 (full 4-byte UTF-8, 1024 / 29 rounded up) chunks.
const MIN_CHUNK_SIZE: usize = 2 * CHUNK_BASE - 3;
const MIN_CHUNK_SIZE: usize = chunk::MAX_BASE - 3;
// We also round up the capacity up by one, for a good measure; we *really* don't want to realloc here, as we assume that the # of characters
// we're working with there is large.
@@ -143,12 +141,12 @@ impl Rope {
let mut new_chunks = Vec::with_capacity(capacity);
while !text.is_empty() {
let mut split_ix = cmp::min(2 * CHUNK_BASE, text.len());
let mut split_ix = cmp::min(chunk::MAX_BASE, text.len());
while !text.is_char_boundary(split_ix) {
split_ix -= 1;
}
let (chunk, remainder) = text.split_at(split_ix);
new_chunks.push(Chunk(ArrayString::from(chunk).unwrap()));
new_chunks.push(Chunk::new(chunk));
text = remainder;
}
@@ -165,6 +163,35 @@ impl Rope {
self.check_invariants();
}
fn push_chunk(&mut self, mut chunk: ChunkSlice) {
self.chunks.update_last(
|last_chunk| {
let split_ix = if last_chunk.text.len() + chunk.len() <= chunk::MAX_BASE {
chunk.len()
} else {
let mut split_ix = cmp::min(
chunk::MIN_BASE.saturating_sub(last_chunk.text.len()),
chunk.len(),
);
while !chunk.is_char_boundary(split_ix) {
split_ix += 1;
}
split_ix
};
let (suffix, remainder) = chunk.split_at(split_ix);
last_chunk.append(suffix);
chunk = remainder;
},
&(),
);
if !chunk.is_empty() {
self.chunks.push(chunk.into());
}
}
pub fn push_front(&mut self, text: &str) {
let suffix = mem::replace(self, Rope::from(text));
self.append(suffix);
@@ -178,7 +205,7 @@ impl Rope {
let mut chunks = self.chunks.cursor::<()>(&()).peekable();
while let Some(chunk) = chunks.next() {
if chunks.peek().is_some() {
assert!(chunk.0.len() + 3 >= CHUNK_BASE);
assert!(chunk.text.len() + 3 >= chunk::MIN_BASE);
}
}
}
@@ -250,7 +277,7 @@ impl Rope {
let overshoot = offset - cursor.start().0;
cursor.start().1
+ cursor.item().map_or(Default::default(), |chunk| {
chunk.offset_to_offset_utf16(overshoot)
chunk.as_slice().offset_to_offset_utf16(overshoot)
})
}
@@ -263,7 +290,7 @@ impl Rope {
let overshoot = offset - cursor.start().0;
cursor.start().1
+ cursor.item().map_or(Default::default(), |chunk| {
chunk.offset_utf16_to_offset(overshoot)
chunk.as_slice().offset_utf16_to_offset(overshoot)
})
}
@@ -275,9 +302,9 @@ impl Rope {
cursor.seek(&offset, Bias::Left, &());
let overshoot = offset - cursor.start().0;
cursor.start().1
+ cursor
.item()
.map_or(Point::zero(), |chunk| chunk.offset_to_point(overshoot))
+ cursor.item().map_or(Point::zero(), |chunk| {
chunk.as_slice().offset_to_point(overshoot)
})
}
pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
@@ -289,7 +316,7 @@ impl Rope {
let overshoot = offset - cursor.start().0;
cursor.start().1
+ cursor.item().map_or(PointUtf16::zero(), |chunk| {
chunk.offset_to_point_utf16(overshoot)
chunk.as_slice().offset_to_point_utf16(overshoot)
})
}
@@ -302,7 +329,7 @@ impl Rope {
let overshoot = point - cursor.start().0;
cursor.start().1
+ cursor.item().map_or(PointUtf16::zero(), |chunk| {
chunk.point_to_point_utf16(overshoot)
chunk.as_slice().point_to_point_utf16(overshoot)
})
}
@@ -316,7 +343,7 @@ impl Rope {
cursor.start().1
+ cursor
.item()
.map_or(0, |chunk| chunk.point_to_offset(overshoot))
.map_or(0, |chunk| chunk.as_slice().point_to_offset(overshoot))
}
pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
@@ -335,9 +362,9 @@ impl Rope {
cursor.seek(&point, Bias::Left, &());
let overshoot = point - cursor.start().0;
cursor.start().1
+ cursor
.item()
.map_or(0, |chunk| chunk.point_utf16_to_offset(overshoot, clip))
+ cursor.item().map_or(0, |chunk| {
chunk.as_slice().point_utf16_to_offset(overshoot, clip)
})
}
pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
@@ -349,7 +376,7 @@ impl Rope {
let overshoot = Unclipped(point.0 - cursor.start().0);
cursor.start().1
+ cursor.item().map_or(Point::zero(), |chunk| {
chunk.unclipped_point_utf16_to_point(overshoot)
chunk.as_slice().unclipped_point_utf16_to_point(overshoot)
})
}
@@ -358,7 +385,7 @@ impl Rope {
cursor.seek(&offset, Bias::Left, &());
if let Some(chunk) = cursor.item() {
let mut ix = offset - cursor.start();
while !chunk.0.is_char_boundary(ix) {
while !chunk.text.is_char_boundary(ix) {
match bias {
Bias::Left => {
ix -= 1;
@@ -381,7 +408,7 @@ impl Rope {
cursor.seek(&offset, Bias::Right, &());
if let Some(chunk) = cursor.item() {
let overshoot = offset - cursor.start();
*cursor.start() + chunk.clip_offset_utf16(overshoot, bias)
*cursor.start() + chunk.as_slice().clip_offset_utf16(overshoot, bias)
} else {
self.summary().len_utf16
}
@@ -392,7 +419,7 @@ impl Rope {
cursor.seek(&point, Bias::Right, &());
if let Some(chunk) = cursor.item() {
let overshoot = point - cursor.start();
*cursor.start() + chunk.clip_point(overshoot, bias)
*cursor.start() + chunk.as_slice().clip_point(overshoot, bias)
} else {
self.summary().lines
}
@@ -403,7 +430,7 @@ impl Rope {
cursor.seek(&point.0, Bias::Right, &());
if let Some(chunk) = cursor.item() {
let overshoot = Unclipped(point.0 - cursor.start());
*cursor.start() + chunk.clip_point_utf16(overshoot, bias)
*cursor.start() + chunk.as_slice().clip_point_utf16(overshoot, bias)
} else {
self.summary().lines_utf16()
}
@@ -466,7 +493,7 @@ impl fmt::Debug for Rope {
pub struct Cursor<'a> {
rope: &'a Rope,
chunks: sum_tree::Cursor<'a, Chunk, usize>,
chunks: sum_tree::Cursor<'a, Chunk<{ chunk::MAX_BASE }>, usize>,
offset: usize,
}
@@ -500,7 +527,7 @@ impl<'a> Cursor<'a> {
if let Some(start_chunk) = self.chunks.item() {
let start_ix = self.offset - self.chunks.start();
let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start();
slice.push(&start_chunk.0[start_ix..end_ix]);
slice.push(&start_chunk.text[start_ix..end_ix]);
}
if end_offset > self.chunks.end(&()) {
@@ -510,7 +537,7 @@ impl<'a> Cursor<'a> {
});
if let Some(end_chunk) = self.chunks.item() {
let end_ix = end_offset - self.chunks.start();
slice.push(&end_chunk.0[..end_ix]);
slice.push(&end_chunk.text[..end_ix]);
}
}
@@ -525,9 +552,7 @@ impl<'a> Cursor<'a> {
if let Some(start_chunk) = self.chunks.item() {
let start_ix = self.offset - self.chunks.start();
let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start();
summary.add_assign(&D::from_text_summary(&TextSummary::from(
&start_chunk.0[start_ix..end_ix],
)));
summary.add_assign(&D::from_chunk(start_chunk.slice(start_ix..end_ix)));
}
if end_offset > self.chunks.end(&()) {
@@ -535,9 +560,7 @@ impl<'a> Cursor<'a> {
summary.add_assign(&self.chunks.summary(&end_offset, Bias::Right, &()));
if let Some(end_chunk) = self.chunks.item() {
let end_ix = end_offset - self.chunks.start();
summary.add_assign(&D::from_text_summary(&TextSummary::from(
&end_chunk.0[..end_ix],
)));
summary.add_assign(&D::from_chunk(end_chunk.slice(0..end_ix)));
}
}
@@ -555,7 +578,7 @@ impl<'a> Cursor<'a> {
}
pub struct Chunks<'a> {
chunks: sum_tree::Cursor<'a, Chunk, usize>,
chunks: sum_tree::Cursor<'a, Chunk<{ chunk::MAX_BASE }>, usize>,
range: Range<usize>,
offset: usize,
reversed: bool,
@@ -678,11 +701,11 @@ impl<'a> Chunks<'a> {
if let Some(chunk) = self.chunks.item() {
let mut end_ix = self.offset - *self.chunks.start();
if chunk.0.as_bytes()[end_ix - 1] == b'\n' {
if chunk.text.as_bytes()[end_ix - 1] == b'\n' {
end_ix -= 1;
}
if let Some(newline_ix) = chunk.0[..end_ix].rfind('\n') {
if let Some(newline_ix) = chunk.text[..end_ix].rfind('\n') {
self.offset = *self.chunks.start() + newline_ix + 1;
if self.offset_is_valid() {
return true;
@@ -694,7 +717,7 @@ impl<'a> Chunks<'a> {
.search_backward(|summary| summary.text.lines.row > 0, &());
self.offset = *self.chunks.start();
if let Some(chunk) = self.chunks.item() {
if let Some(newline_ix) = chunk.0.rfind('\n') {
if let Some(newline_ix) = chunk.text.rfind('\n') {
self.offset += newline_ix + 1;
if self.offset_is_valid() {
if self.offset == self.chunks.end(&()) {
@@ -731,7 +754,7 @@ impl<'a> Chunks<'a> {
slice_start..slice_end
};
Some(&chunk.0[slice_range])
Some(&chunk.text[slice_range])
}
pub fn lines(self) -> Lines<'a> {
@@ -767,7 +790,7 @@ impl<'a> Iterator for Chunks<'a> {
}
pub struct Bytes<'a> {
chunks: sum_tree::Cursor<'a, Chunk, usize>,
chunks: sum_tree::Cursor<'a, Chunk<{ chunk::MAX_BASE }>, usize>,
range: Range<usize>,
reversed: bool,
}
@@ -798,7 +821,7 @@ impl<'a> Bytes<'a> {
}
let start = self.range.start.saturating_sub(chunk_start);
let end = self.range.end - chunk_start;
Some(&chunk.0.as_bytes()[start..chunk.0.len().min(end)])
Some(&chunk.text.as_bytes()[start..chunk.text.len().min(end)])
}
}
@@ -902,265 +925,13 @@ impl<'a> Lines<'a> {
}
}
#[derive(Clone, Debug, Default)]
struct Chunk(ArrayString<{ 2 * CHUNK_BASE }>);
impl Chunk {
fn offset_to_offset_utf16(&self, target: usize) -> OffsetUtf16 {
let mut offset = 0;
let mut offset_utf16 = OffsetUtf16(0);
for ch in self.0.chars() {
if offset >= target {
break;
}
offset += ch.len_utf8();
offset_utf16.0 += ch.len_utf16();
}
offset_utf16
}
fn offset_utf16_to_offset(&self, target: OffsetUtf16) -> usize {
let mut offset_utf16 = OffsetUtf16(0);
let mut offset = 0;
for ch in self.0.chars() {
if offset_utf16 >= target {
break;
}
offset += ch.len_utf8();
offset_utf16.0 += ch.len_utf16();
}
offset
}
fn offset_to_point(&self, target: usize) -> Point {
let mut offset = 0;
let mut point = Point::new(0, 0);
for ch in self.0.chars() {
if offset >= target {
break;
}
if ch == '\n' {
point.row += 1;
point.column = 0;
} else {
point.column += ch.len_utf8() as u32;
}
offset += ch.len_utf8();
}
point
}
fn offset_to_point_utf16(&self, target: usize) -> PointUtf16 {
let mut offset = 0;
let mut point = PointUtf16::new(0, 0);
for ch in self.0.chars() {
if offset >= target {
break;
}
if ch == '\n' {
point.row += 1;
point.column = 0;
} else {
point.column += ch.len_utf16() as u32;
}
offset += ch.len_utf8();
}
point
}
fn point_to_offset(&self, target: Point) -> usize {
let mut offset = 0;
let mut point = Point::new(0, 0);
for ch in self.0.chars() {
if point >= target {
if point > target {
debug_panic!("point {target:?} is inside of character {ch:?}");
}
break;
}
if ch == '\n' {
point.row += 1;
point.column = 0;
if point.row > target.row {
debug_panic!(
"point {target:?} is beyond the end of a line with length {}",
point.column
);
break;
}
} else {
point.column += ch.len_utf8() as u32;
}
offset += ch.len_utf8();
}
offset
}
fn point_to_point_utf16(&self, target: Point) -> PointUtf16 {
let mut point = Point::zero();
let mut point_utf16 = PointUtf16::new(0, 0);
for ch in self.0.chars() {
if point >= target {
break;
}
if ch == '\n' {
point_utf16.row += 1;
point_utf16.column = 0;
point.row += 1;
point.column = 0;
} else {
point_utf16.column += ch.len_utf16() as u32;
point.column += ch.len_utf8() as u32;
}
}
point_utf16
}
fn point_utf16_to_offset(&self, target: PointUtf16, clip: bool) -> usize {
let mut offset = 0;
let mut point = PointUtf16::new(0, 0);
for ch in self.0.chars() {
if point == target {
break;
}
if ch == '\n' {
point.row += 1;
point.column = 0;
if point.row > target.row {
if !clip {
debug_panic!(
"point {target:?} is beyond the end of a line with length {}",
point.column
);
}
// Return the offset of the newline
return offset;
}
} else {
point.column += ch.len_utf16() as u32;
}
if point > target {
if !clip {
debug_panic!("point {target:?} is inside of codepoint {ch:?}");
}
// Return the offset of the codepoint which we have landed within, bias left
return offset;
}
offset += ch.len_utf8();
}
offset
}
fn unclipped_point_utf16_to_point(&self, target: Unclipped<PointUtf16>) -> Point {
let mut point = Point::zero();
let mut point_utf16 = PointUtf16::zero();
for ch in self.0.chars() {
if point_utf16 == target.0 {
break;
}
if point_utf16 > target.0 {
// If the point is past the end of a line or inside of a code point,
// return the last valid point before the target.
return point;
}
if ch == '\n' {
point_utf16 += PointUtf16::new(1, 0);
point += Point::new(1, 0);
} else {
point_utf16 += PointUtf16::new(0, ch.len_utf16() as u32);
point += Point::new(0, ch.len_utf8() as u32);
}
}
point
}
fn clip_point(&self, target: Point, bias: Bias) -> Point {
for (row, line) in self.0.split('\n').enumerate() {
if row == target.row as usize {
let bytes = line.as_bytes();
let mut column = target.column.min(bytes.len() as u32) as usize;
if column == 0
|| column == bytes.len()
|| (bytes[column - 1] < 128 && bytes[column] < 128)
{
return Point::new(row as u32, column as u32);
}
let mut grapheme_cursor = GraphemeCursor::new(column, bytes.len(), true);
loop {
if line.is_char_boundary(column)
&& grapheme_cursor.is_boundary(line, 0).unwrap_or(false)
{
break;
}
match bias {
Bias::Left => column -= 1,
Bias::Right => column += 1,
}
grapheme_cursor.set_cursor(column);
}
return Point::new(row as u32, column as u32);
}
}
unreachable!()
}
fn clip_point_utf16(&self, target: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
for (row, line) in self.0.split('\n').enumerate() {
if row == target.0.row as usize {
let mut code_units = line.encode_utf16();
let mut column = code_units.by_ref().take(target.0.column as usize).count();
if char::decode_utf16(code_units).next().transpose().is_err() {
match bias {
Bias::Left => column -= 1,
Bias::Right => column += 1,
}
}
return PointUtf16::new(row as u32, column as u32);
}
}
unreachable!()
}
fn clip_offset_utf16(&self, target: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
let mut code_units = self.0.encode_utf16();
let mut offset = code_units.by_ref().take(target.0).count();
if char::decode_utf16(code_units).next().transpose().is_err() {
match bias {
Bias::Left => offset -= 1,
Bias::Right => offset += 1,
}
}
OffsetUtf16(offset)
}
}
impl sum_tree::Item for Chunk {
impl sum_tree::Item for Chunk<{ chunk::MAX_BASE }> {
type Summary = ChunkSummary;
fn summary(&self, _cx: &()) -> Self::Summary {
ChunkSummary::from(self.0.as_str())
ChunkSummary {
text: self.as_slice().text_summary(),
}
}
}
@@ -1169,14 +940,6 @@ pub struct ChunkSummary {
text: TextSummary,
}
impl<'a> From<&'a str> for ChunkSummary {
fn from(text: &'a str) -> Self {
Self {
text: TextSummary::from(text),
}
}
}
impl sum_tree::Summary for ChunkSummary {
type Context = ();
@@ -1323,6 +1086,7 @@ impl std::ops::AddAssign<Self> for TextSummary {
pub trait TextDimension: 'static + for<'a> Dimension<'a, ChunkSummary> {
fn from_text_summary(summary: &TextSummary) -> Self;
fn from_chunk(chunk: ChunkSlice) -> Self;
fn add_assign(&mut self, other: &Self);
}
@@ -1334,6 +1098,10 @@ impl<D1: TextDimension, D2: TextDimension> TextDimension for (D1, D2) {
)
}
fn from_chunk(chunk: ChunkSlice) -> Self {
(D1::from_chunk(chunk), D2::from_chunk(chunk))
}
fn add_assign(&mut self, other: &Self) {
self.0.add_assign(&other.0);
self.1.add_assign(&other.1);
@@ -1355,6 +1123,10 @@ impl TextDimension for TextSummary {
summary.clone()
}
fn from_chunk(chunk: ChunkSlice) -> Self {
chunk.text_summary()
}
fn add_assign(&mut self, other: &Self) {
*self += other;
}
@@ -1375,6 +1147,10 @@ impl TextDimension for usize {
summary.len
}
fn from_chunk(chunk: ChunkSlice) -> Self {
chunk.len()
}
fn add_assign(&mut self, other: &Self) {
*self += other;
}
@@ -1395,6 +1171,10 @@ impl TextDimension for OffsetUtf16 {
summary.len_utf16
}
fn from_chunk(chunk: ChunkSlice) -> Self {
chunk.len_utf16()
}
fn add_assign(&mut self, other: &Self) {
*self += other;
}
@@ -1415,6 +1195,10 @@ impl TextDimension for Point {
summary.lines
}
fn from_chunk(chunk: ChunkSlice) -> Self {
chunk.lines()
}
fn add_assign(&mut self, other: &Self) {
*self += other;
}
@@ -1435,6 +1219,13 @@ impl TextDimension for PointUtf16 {
summary.lines_utf16()
}
fn from_chunk(chunk: ChunkSlice) -> Self {
PointUtf16 {
row: chunk.lines().row,
column: chunk.last_line_len_utf16(),
}
}
fn add_assign(&mut self, other: &Self) {
*self += other;
}
@@ -1919,7 +1710,7 @@ mod tests {
fn text(&self) -> String {
let mut text = String::new();
for chunk in self.chunks.cursor::<()>(&()) {
text.push_str(&chunk.0);
text.push_str(&chunk.text);
}
text
}

View File

@@ -1,4 +1,4 @@
use crate::{ChunkSummary, TextDimension, TextSummary};
use crate::{chunk::ChunkSlice, ChunkSummary, TextDimension, TextSummary};
use std::ops::{Add, AddAssign, Sub, SubAssign};
#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
@@ -27,6 +27,10 @@ impl<T: TextDimension> TextDimension for Unclipped<T> {
Unclipped(T::from_text_summary(summary))
}
fn from_chunk(chunk: ChunkSlice) -> Self {
Unclipped(T::from_chunk(chunk))
}
fn add_assign(&mut self, other: &Self) {
TextDimension::add_assign(&mut self.0, &other.0);
}