Compare commits

...

11 Commits

Author SHA1 Message Date
João Marcos
8b2d0fdf19 Check index versions to clear pending hunks 2025-03-12 19:14:26 -03:00
Max Brunsfeld
bf91777d1a Refactor buffer diff versioning system for better reliability
Co-authored-by: João Marcos <marcospb19@hotmail.com>
2025-03-11 23:28:29 -03:00
Max Brunsfeld
facfae18a0 Hold repo lock while writing index text
Co-authored-by: João Marcos <marcospb19@hotmail.com>
2025-03-11 23:28:29 -03:00
João Marcos
731618aada WIP 2 2025-03-11 23:28:29 -03:00
Max Brunsfeld
89438fe91b WIP
Co-authored-by: João Marcos <marcospb19@hotmail.com>
2025-03-11 23:28:29 -03:00
Max Brunsfeld
709edc547f wip 2025-03-11 23:28:28 -03:00
João Marcos
8aad1df1ad pending work
Co-authored-by: Conrad Irwin <conrad.irwin@gmail.com>
Co-authored-by: Max Brunsfeld <maxbrunsfeld@gmail.com>
2025-03-11 23:28:28 -03:00
João Marcos
3a1c631c91 clippy 2025-03-11 23:28:28 -03:00
João Marcos
7f2bce566e add project test test_staging_lots_of_hunks_fast 2025-03-11 23:28:28 -03:00
João Marcos
c8fa671960 fix [un]staging operation cancelling previous one
a race condition for when you press `git::StageAndNext` too fast

also
- change pending_hunks from `TreeMap` to `SumTree`
- rename `DiffHunkSecondaryStatus` variants

Co-Authored-By: Max <max@zed.dev>
Co-Authored-By: Cole <cole@zed.dev>
2025-03-11 23:28:28 -03:00
João Marcos
7f3dd3fde6 use pretty_assertions
Co-Authored-By: Cole <cole@zed.dev>
2025-03-11 23:28:28 -03:00
13 changed files with 759 additions and 262 deletions

1
Cargo.lock generated
View File

@@ -2093,6 +2093,7 @@ dependencies = [
"ctor",
"env_logger 0.11.6",
"futures 0.3.31",
"git",
"git2",
"gpui",
"language",

View File

@@ -18,10 +18,12 @@ test-support = []
anyhow.workspace = true
clock.workspace = true
futures.workspace = true
git.workspace = true
git2.workspace = true
gpui.workspace = true
language.workspace = true
log.workspace = true
pretty_assertions.workspace = true
rope.workspace = true
sum_tree.workspace = true
text.workspace = true
@@ -31,7 +33,6 @@ util.workspace = true
ctor.workspace = true
env_logger.workspace = true
gpui = { workspace = true, features = ["test-support"] }
pretty_assertions.workspace = true
rand.workspace = true
serde_json.workspace = true
text = { workspace = true, features = ["test-support"] }

View File

@@ -1,14 +1,13 @@
use futures::channel::oneshot;
use git::repository::IndexTextVersion;
use git2::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch};
use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task};
use language::{Language, LanguageRegistry};
use rope::Rope;
use std::cmp::Ordering;
use std::mem;
use std::{future::Future, iter, ops::Range, sync::Arc};
use sum_tree::{SumTree, TreeMap};
use text::ToOffset as _;
use std::{cmp::Ordering, future::Future, iter, mem, ops::Range, sync::Arc};
use sum_tree::SumTree;
use text::{Anchor, Bias, BufferId, OffsetRangeExt, Point};
use text::{AnchorRangeExt, ToOffset as _};
use util::ResultExt;
pub struct BufferDiff {
@@ -26,7 +25,7 @@ pub struct BufferDiffSnapshot {
#[derive(Clone)]
struct BufferDiffInner {
hunks: SumTree<InternalDiffHunk>,
pending_hunks: TreeMap<usize, PendingHunk>,
pending_hunks: SumTree<PendingHunk>,
base_text: language::BufferSnapshot,
base_text_exists: bool,
}
@@ -48,7 +47,7 @@ pub enum DiffHunkStatusKind {
pub enum DiffHunkSecondaryStatus {
HasSecondaryHunk,
OverlapsWithSecondaryHunk,
None,
NoSecondaryHunk,
SecondaryHunkAdditionPending,
SecondaryHunkRemovalPending,
}
@@ -74,6 +73,8 @@ struct InternalDiffHunk {
#[derive(Debug, Clone, PartialEq, Eq)]
struct PendingHunk {
buffer_range: Range<Anchor>,
diff_base_byte_range: Range<usize>,
buffer_version: clock::Global,
new_status: DiffHunkSecondaryStatus,
}
@@ -93,6 +94,16 @@ impl sum_tree::Item for InternalDiffHunk {
}
}
impl sum_tree::Item for PendingHunk {
type Summary = DiffHunkSummary;
fn summary(&self, _cx: &text::BufferSnapshot) -> Self::Summary {
DiffHunkSummary {
buffer_range: self.buffer_range.clone(),
}
}
}
impl sum_tree::Summary for DiffHunkSummary {
type Context = text::BufferSnapshot;
@@ -176,6 +187,7 @@ impl BufferDiffSnapshot {
}
impl BufferDiffInner {
/// Returns the new index text and new pending hunks.
fn stage_or_unstage_hunks(
&mut self,
unstaged_diff: &Self,
@@ -183,7 +195,7 @@ impl BufferDiffInner {
hunks: &[DiffHunk],
buffer: &text::BufferSnapshot,
file_exists: bool,
) -> (Option<Rope>, Vec<(usize, PendingHunk)>) {
) -> Option<Rope> {
let head_text = self
.base_text_exists
.then(|| self.base_text.as_rope().clone());
@@ -195,41 +207,39 @@ impl BufferDiffInner {
// entire file must be either created or deleted in the index.
let (index_text, head_text) = match (index_text, head_text) {
(Some(index_text), Some(head_text)) if file_exists || !stage => (index_text, head_text),
(_, head_text @ _) => {
if stage {
(index_text, head_text) => {
let (rope, new_status) = if stage {
log::debug!("stage all");
return (
(
file_exists.then(|| buffer.as_rope().clone()),
vec![(
0,
PendingHunk {
buffer_version: buffer.version().clone(),
new_status: DiffHunkSecondaryStatus::SecondaryHunkRemovalPending,
},
)],
);
DiffHunkSecondaryStatus::SecondaryHunkRemovalPending,
)
} else {
log::debug!("unstage all");
return (
(
head_text,
vec![(
0,
PendingHunk {
buffer_version: buffer.version().clone(),
new_status: DiffHunkSecondaryStatus::SecondaryHunkAdditionPending,
},
)],
);
}
DiffHunkSecondaryStatus::SecondaryHunkAdditionPending,
)
};
let hunk = PendingHunk {
buffer_range: Anchor::MIN..Anchor::MAX,
diff_base_byte_range: 0..index_text.map_or(0, |rope| rope.len()),
buffer_version: buffer.version().clone(),
new_status,
};
self.pending_hunks = SumTree::from_item(hunk, buffer);
return rope;
}
};
let mut unstaged_hunk_cursor = unstaged_diff.hunks.cursor::<DiffHunkSummary>(buffer);
unstaged_hunk_cursor.next(buffer);
let mut edits = Vec::new();
let mut pending_hunks = Vec::new();
let mut prev_unstaged_hunk_buffer_offset = 0;
let mut prev_unstaged_hunk_base_text_offset = 0;
let mut pending_hunks = SumTree::new(buffer);
let mut old_pending_hunks = self.pending_hunks.cursor::<DiffHunkSummary>(buffer);
// first, merge new hunks into pending_hunks
for DiffHunk {
buffer_range,
diff_base_byte_range,
@@ -237,12 +247,58 @@ impl BufferDiffInner {
..
} in hunks.iter().cloned()
{
if (stage && secondary_status == DiffHunkSecondaryStatus::None)
let preceding_pending_hunks =
old_pending_hunks.slice(&buffer_range.start, Bias::Left, buffer);
pending_hunks.append(preceding_pending_hunks, buffer);
// skip all overlapping old pending hunks
while old_pending_hunks
.item()
.is_some_and(|preceding_pending_hunk_item| {
preceding_pending_hunk_item
.buffer_range
.overlaps(&buffer_range, buffer)
})
{
old_pending_hunks.next(buffer);
}
// merge into pending hunks
if (stage && secondary_status == DiffHunkSecondaryStatus::NoSecondaryHunk)
|| (!stage && secondary_status == DiffHunkSecondaryStatus::HasSecondaryHunk)
{
continue;
}
pending_hunks.push(
PendingHunk {
buffer_range,
diff_base_byte_range,
buffer_version: buffer.version().clone(),
new_status: if stage {
DiffHunkSecondaryStatus::SecondaryHunkRemovalPending
} else {
DiffHunkSecondaryStatus::SecondaryHunkAdditionPending
},
},
buffer,
);
}
// append the remainder
pending_hunks.append(old_pending_hunks.suffix(buffer), buffer);
let mut prev_unstaged_hunk_buffer_offset = 0;
let mut prev_unstaged_hunk_base_text_offset = 0;
let mut edits = Vec::<(Range<usize>, String)>::new();
// then, iterate over all pending hunks (both new ones and the existing ones) and compute the edits
for PendingHunk {
buffer_range,
diff_base_byte_range,
..
} in pending_hunks.iter().cloned()
{
let skipped_hunks = unstaged_hunk_cursor.slice(&buffer_range.start, Bias::Left, buffer);
if let Some(secondary_hunk) = skipped_hunks.last() {
@@ -294,29 +350,24 @@ impl BufferDiffInner {
.chunks_in_range(diff_base_byte_range.clone())
.collect::<String>()
};
pending_hunks.push((
diff_base_byte_range.start,
PendingHunk {
buffer_version: buffer.version().clone(),
new_status: if stage {
DiffHunkSecondaryStatus::SecondaryHunkRemovalPending
} else {
DiffHunkSecondaryStatus::SecondaryHunkAdditionPending
},
},
));
edits.push((index_range, replacement_text));
}
debug_assert!(edits.iter().is_sorted_by_key(|(range, _)| range.start));
let mut new_index_text = Rope::new();
let mut index_cursor = index_text.cursor(0);
for (old_range, replacement_text) in edits {
new_index_text.append(index_cursor.slice(old_range.start));
index_cursor.seek_forward(old_range.end);
new_index_text.push(&replacement_text);
}
new_index_text.append(index_cursor.suffix());
(Some(new_index_text), pending_hunks)
drop(old_pending_hunks);
self.pending_hunks = pending_hunks;
Some(new_index_text)
}
fn hunks_intersecting_range<'a>(
@@ -353,13 +404,14 @@ impl BufferDiffInner {
]
});
let mut pending_hunks_cursor = self.pending_hunks.cursor::<DiffHunkSummary>(buffer);
pending_hunks_cursor.next(buffer);
let mut secondary_cursor = None;
let mut pending_hunks = TreeMap::default();
if let Some(secondary) = secondary.as_ref() {
let mut cursor = secondary.hunks.cursor::<DiffHunkSummary>(buffer);
cursor.next(buffer);
secondary_cursor = Some(cursor);
pending_hunks = secondary.pending_hunks.clone();
}
let max_point = buffer.max_point();
@@ -378,16 +430,31 @@ impl BufferDiffInner {
end_anchor = buffer.anchor_before(end_point);
}
let mut secondary_status = DiffHunkSecondaryStatus::None;
let mut secondary_status = DiffHunkSecondaryStatus::NoSecondaryHunk;
let mut has_pending = false;
if let Some(pending_hunk) = pending_hunks.get(&start_base) {
if !buffer.has_edits_since_in_range(
&pending_hunk.buffer_version,
start_anchor..end_anchor,
) {
has_pending = true;
secondary_status = pending_hunk.new_status;
if start_anchor
.cmp(&pending_hunks_cursor.start().buffer_range.start, buffer)
.is_gt()
{
pending_hunks_cursor.seek_forward(&start_anchor, Bias::Left, buffer);
}
if let Some(pending_hunk) = pending_hunks_cursor.item() {
let mut pending_range = pending_hunk.buffer_range.to_point(buffer);
if pending_range.end.column > 0 {
pending_range.end.row += 1;
pending_range.end.column = 0;
}
if pending_range == (start_point..end_point) {
if !buffer.has_edits_since_in_range(
&pending_hunk.buffer_version,
start_anchor..end_anchor,
) {
has_pending = true;
secondary_status = pending_hunk.new_status;
}
}
}
@@ -449,7 +516,7 @@ impl BufferDiffInner {
diff_base_byte_range: hunk.diff_base_byte_range.clone(),
buffer_range: hunk.buffer_range.clone(),
// The secondary status is not used by callers of this method.
secondary_status: DiffHunkSecondaryStatus::None,
secondary_status: DiffHunkSecondaryStatus::NoSecondaryHunk,
})
})
}
@@ -724,7 +791,7 @@ impl BufferDiff {
base_text,
hunks,
base_text_exists,
pending_hunks: TreeMap::default(),
pending_hunks: SumTree::new(&buffer),
}
}
}
@@ -740,8 +807,8 @@ impl BufferDiff {
cx.background_spawn(async move {
BufferDiffInner {
base_text: base_text_snapshot,
pending_hunks: SumTree::new(&buffer),
hunks: compute_hunks(base_text_pair, buffer),
pending_hunks: TreeMap::default(),
base_text_exists,
}
})
@@ -751,7 +818,7 @@ impl BufferDiff {
BufferDiffInner {
base_text: language::Buffer::build_empty_snapshot(cx),
hunks: SumTree::new(buffer),
pending_hunks: TreeMap::default(),
pending_hunks: SumTree::new(buffer),
base_text_exists: false,
}
}
@@ -764,17 +831,23 @@ impl BufferDiff {
self.secondary_diff.clone()
}
pub fn clear_pending_hunks(&mut self, cx: &mut Context<Self>) {
if let Some(secondary_diff) = &self.secondary_diff {
secondary_diff.update(cx, |diff, _| {
diff.inner.pending_hunks.clear();
});
pub fn failed_to_persist(&mut self, cx: &mut Context<Self>) {
if let Some(changed_range) = self.clear_pending_hunks() {
cx.emit(BufferDiffEvent::DiffChanged {
changed_range: Some(Anchor::MIN..Anchor::MAX),
changed_range: Some(changed_range),
});
}
}
fn clear_pending_hunks(&mut self) -> Option<Range<Anchor>> {
let hunks = mem::replace(
&mut self.inner.pending_hunks,
SumTree::from_summary(DiffHunkSummary::default()),
);
let (first, last) = hunks.first().zip(hunks.last())?;
Some(first.buffer_range.start..last.buffer_range.end)
}
pub fn stage_or_unstage_hunks(
&mut self,
stage: bool,
@@ -783,20 +856,14 @@ impl BufferDiff {
file_exists: bool,
cx: &mut Context<Self>,
) -> Option<Rope> {
let (new_index_text, pending_hunks) = self.inner.stage_or_unstage_hunks(
let new_index_text = self.inner.stage_or_unstage_hunks(
&self.secondary_diff.as_ref()?.read(cx).inner,
stage,
&hunks,
buffer,
file_exists,
);
if let Some(unstaged_diff) = &self.secondary_diff {
unstaged_diff.update(cx, |diff, _| {
for (offset, pending_hunk) in pending_hunks {
diff.inner.pending_hunks.insert(offset, pending_hunk);
}
});
}
cx.emit(BufferDiffEvent::HunksStagedOrUnstaged(
new_index_text.clone(),
));
@@ -872,9 +939,10 @@ impl BufferDiff {
new_snapshot: BufferDiffSnapshot,
language_changed: bool,
secondary_changed_range: Option<Range<Anchor>>,
read_index_version: Option<IndexTextVersion>,
cx: &mut Context<Self>,
) -> Option<Range<Anchor>> {
let changed_range = self.set_state(new_snapshot.inner, buffer);
let changed_range = self.set_state(new_snapshot.inner, read_index_version, buffer);
if language_changed {
cx.emit(BufferDiffEvent::LanguageChanged);
}
@@ -903,24 +971,32 @@ impl BufferDiff {
fn set_state(
&mut self,
new_state: BufferDiffInner,
mut new_state: BufferDiffInner,
read_index_version: Option<IndexTextVersion>,
buffer: &text::BufferSnapshot,
) -> Option<Range<Anchor>> {
let (base_text_changed, changed_range) =
match (self.inner.base_text_exists, new_state.base_text_exists) {
(false, false) => (true, None),
(true, true)
if self.inner.base_text.remote_id() == new_state.base_text.remote_id() =>
{
(false, new_state.compare(&self.inner, buffer))
let mut changed_range = match (self.inner.base_text_exists, new_state.base_text_exists) {
(false, false) => None,
(true, true) if self.inner.base_text.remote_id() == new_state.base_text.remote_id() => {
new_state.compare(&self.inner, buffer)
}
_ => Some(text::Anchor::MIN..text::Anchor::MAX),
};
if read_index_version.is_some_and(|version| version.is_outdated()) {
if let Some(cleared_range) = self.clear_pending_hunks() {
if let Some(changed_range) = changed_range.as_mut() {
changed_range.start = changed_range.start.min(&cleared_range.start, &buffer);
changed_range.end = changed_range.end.max(&cleared_range.end, &buffer);
} else {
changed_range = Some(cleared_range);
}
_ => (true, Some(text::Anchor::MIN..text::Anchor::MAX)),
};
let pending_hunks = mem::take(&mut self.inner.pending_hunks);
self.inner = new_state;
if !base_text_changed {
self.inner.pending_hunks = pending_hunks;
}
} else {
new_state.pending_hunks = self.inner.pending_hunks.clone();
}
self.inner = new_state;
changed_range
}
@@ -1014,7 +1090,7 @@ impl BufferDiff {
return;
};
this.update(&mut cx, |this, _| {
this.set_state(snapshot, &buffer);
this.set_state(snapshot, None, &buffer);
})
.log_err();
drop(complete_on_drop)
@@ -1070,7 +1146,7 @@ impl BufferDiff {
cx,
);
let snapshot = cx.background_executor().block(snapshot);
let changed_range = self.set_state(snapshot, &buffer);
let changed_range = self.set_state(snapshot, None, &buffer);
cx.emit(BufferDiffEvent::DiffChanged { changed_range });
}
}
@@ -1149,21 +1225,21 @@ impl DiffHunkStatus {
pub fn deleted_none() -> Self {
Self {
kind: DiffHunkStatusKind::Deleted,
secondary: DiffHunkSecondaryStatus::None,
secondary: DiffHunkSecondaryStatus::NoSecondaryHunk,
}
}
pub fn added_none() -> Self {
Self {
kind: DiffHunkStatusKind::Added,
secondary: DiffHunkSecondaryStatus::None,
secondary: DiffHunkSecondaryStatus::NoSecondaryHunk,
}
}
pub fn modified_none() -> Self {
Self {
kind: DiffHunkStatusKind::Modified,
secondary: DiffHunkSecondaryStatus::None,
secondary: DiffHunkSecondaryStatus::NoSecondaryHunk,
}
}
}
@@ -1171,13 +1247,14 @@ impl DiffHunkStatus {
/// Range (crossing new lines), old, new
#[cfg(any(test, feature = "test-support"))]
#[track_caller]
pub fn assert_hunks<Iter>(
diff_hunks: Iter,
pub fn assert_hunks<ExpectedText, HunkIter>(
diff_hunks: HunkIter,
buffer: &text::BufferSnapshot,
diff_base: &str,
expected_hunks: &[(Range<u32>, &str, &str, DiffHunkStatus)],
expected_hunks: &[(Range<u32>, ExpectedText, ExpectedText, DiffHunkStatus)],
) where
Iter: Iterator<Item = DiffHunk>,
HunkIter: Iterator<Item = DiffHunk>,
ExpectedText: AsRef<str>,
{
let actual_hunks = diff_hunks
.map(|hunk| {
@@ -1197,14 +1274,14 @@ pub fn assert_hunks<Iter>(
.map(|(r, old_text, new_text, status)| {
(
Point::new(r.start, 0)..Point::new(r.end, 0),
*old_text,
new_text.to_string(),
old_text.as_ref(),
new_text.as_ref().to_string(),
*status,
)
})
.collect();
assert_eq!(actual_hunks, expected_hunks);
pretty_assertions::assert_eq!(actual_hunks, expected_hunks);
}
#[cfg(test)]
@@ -1263,7 +1340,7 @@ mod tests {
);
diff = cx.update(|cx| BufferDiff::build_empty(&buffer, cx));
assert_hunks(
assert_hunks::<&str, _>(
diff.hunks_intersecting_range(Anchor::MIN..Anchor::MAX, &buffer, None),
&buffer,
&diff_base,
@@ -1585,13 +1662,13 @@ mod tests {
let unstaged_diff = cx.new(|cx| {
let mut diff = BufferDiff::new(&buffer, cx);
diff.set_state(unstaged, &buffer);
diff.set_state(unstaged, None, &buffer);
diff
});
let uncommitted_diff = cx.new(|cx| {
let mut diff = BufferDiff::new(&buffer, cx);
diff.set_state(uncommitted, &buffer);
diff.set_state(uncommitted, None, &buffer);
diff.set_secondary_diff(unstaged_diff);
diff
});
@@ -1601,7 +1678,10 @@ mod tests {
.hunks_intersecting_range(hunk_range.clone(), &buffer, &cx)
.collect::<Vec<_>>();
for hunk in &hunks {
assert_ne!(hunk.secondary_status, DiffHunkSecondaryStatus::None)
assert_ne!(
hunk.secondary_status,
DiffHunkSecondaryStatus::NoSecondaryHunk
)
}
let new_index_text = diff
@@ -1880,10 +1960,10 @@ mod tests {
let hunk_to_change = hunk.clone();
let stage = match hunk.secondary_status {
DiffHunkSecondaryStatus::HasSecondaryHunk => {
hunk.secondary_status = DiffHunkSecondaryStatus::None;
hunk.secondary_status = DiffHunkSecondaryStatus::NoSecondaryHunk;
true
}
DiffHunkSecondaryStatus::None => {
DiffHunkSecondaryStatus::NoSecondaryHunk => {
hunk.secondary_status = DiffHunkSecondaryStatus::HasSecondaryHunk;
false
}

View File

@@ -1445,6 +1445,12 @@ impl FakeFs {
});
}
pub fn delay_git_index_write(&self, dot_git: &Path) {
self.with_git_state(dot_git, true, |state| {
state.index_write_delayed = true;
});
}
pub fn paths(&self, include_dot_git: bool) -> Vec<PathBuf> {
let mut result = Vec::new();
let mut queue = collections::VecDeque::new();

View File

@@ -12,13 +12,15 @@ use rope::Rope;
use schemars::JsonSchema;
use serde::Deserialize;
use std::borrow::Borrow;
use std::cmp::Ordering;
use std::io::Write as _;
use std::process::Stdio;
use std::sync::LazyLock;
use std::{
cmp::Ordering,
path::{Component, Path, PathBuf},
sync::Arc,
sync::{
atomic::{AtomicUsize, Ordering as AtomicOrdering},
Arc, LazyLock,
},
};
use sum_tree::MapSeekTarget;
use util::command::{new_smol_command, new_std_command};
@@ -150,15 +152,43 @@ pub enum ResetMode {
Mixed,
}
/// Track whether we did override the index text after reading it.
///
/// After this is created, you can call `is_outdated` to see if we wrote
///
/// Note: this doesn't account for external processis writing to the `index`, but it's still useful
/// if the invariant we want to hold is only dependent on our writes (currently, it is).
#[derive(Debug, Clone)]
pub struct IndexTextVersion {
version_reference: Arc<AtomicUsize>,
read_value: usize,
}
impl IndexTextVersion {
fn new(version_reference: Arc<AtomicUsize>) -> Self {
IndexTextVersion {
read_value: version_reference.load(AtomicOrdering::Relaxed),
version_reference,
}
}
/// Returns `true` if the index was overriden by ourselves.
pub fn is_outdated(&self) -> bool {
self.read_value != self.version_reference.load(AtomicOrdering::Relaxed)
}
}
pub trait GitRepository: Send + Sync {
fn reload_index(&self);
/// Returns the contents of an entry in the repository's index, or None if there is no entry for the given path.
/// Returns the contents of an entry in the repository's index, or None if there is no entry
/// for the given path.
///
/// Also returns `None` for symlinks.
fn load_index_text(&self, path: &RepoPath) -> Option<String>;
fn load_index_text(&self, path: &RepoPath) -> Option<(String, IndexTextVersion)>;
/// Returns the contents of an entry in the repository's HEAD, or None if HEAD does not exist or has no entry for the given path.
/// Returns the contents of an entry in the repository's HEAD, or None if HEAD does not exist
/// or has no entry for the given path.
///
/// Also returns `None` for symlinks.
fn load_committed_text(&self, path: &RepoPath) -> Option<String>;
@@ -277,6 +307,8 @@ impl std::fmt::Debug for dyn GitRepository {
pub struct RealGitRepository {
pub repository: Mutex<git2::Repository>,
pub git_binary_path: PathBuf,
/// Incremented when we write to the index.
pub index_version: Arc<AtomicUsize>,
}
impl RealGitRepository {
@@ -284,6 +316,7 @@ impl RealGitRepository {
Self {
repository: Mutex::new(repository),
git_binary_path: git_binary_path.unwrap_or_else(|| PathBuf::from("git")),
index_version: Arc::new(AtomicUsize::new(0)),
}
}
@@ -294,6 +327,63 @@ impl RealGitRepository {
.context("failed to read git work directory")
.map(Path::to_path_buf)
}
fn set_index_text_impl(
&self,
path: &RepoPath,
content: Option<String>,
env: &HashMap<String, String>,
repo: &git2::Repository,
) -> anyhow::Result<()> {
let working_directory = repo
.workdir()
.context("failed to read git work directory")
.map(Path::to_path_buf)?;
if let Some(content) = content {
let mut child = new_std_command(&self.git_binary_path)
.current_dir(&working_directory)
.envs(env)
.args(["hash-object", "-w", "--stdin"])
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.spawn()?;
child.stdin.take().unwrap().write_all(content.as_bytes())?;
let output = child.wait_with_output()?.stdout;
let sha = String::from_utf8(output)?;
log::debug!("indexing SHA: {sha}, path {path:?}");
let output = new_std_command(&self.git_binary_path)
.current_dir(&working_directory)
.envs(env)
.args(["update-index", "--add", "--cacheinfo", "100644", &sha])
.arg(path.as_ref())
.output()?;
if !output.status.success() {
return Err(anyhow!(
"Failed to stage:\n{}",
String::from_utf8_lossy(&output.stderr)
));
}
} else {
let output = new_std_command(&self.git_binary_path)
.current_dir(&working_directory)
.envs(env)
.args(["update-index", "--force-remove"])
.arg(path.as_ref())
.output()?;
if !output.status.success() {
return Err(anyhow!(
"Failed to unstage:\n{}",
String::from_utf8_lossy(&output.stderr)
));
}
}
Ok(())
}
}
// https://git-scm.com/book/en/v2/Git-Internals-Git-Objects
@@ -302,7 +392,7 @@ const GIT_MODE_SYMLINK: u32 = 0o120000;
impl GitRepository for RealGitRepository {
fn reload_index(&self) {
if let Ok(mut index) = self.repository.lock().index() {
_ = index.read(false);
index.read(false).log_err();
}
}
@@ -385,14 +475,16 @@ impl GitRepository for RealGitRepository {
Ok(())
}
fn load_index_text(&self, path: &RepoPath) -> Option<String> {
fn load_index_text(&self, path: &RepoPath) -> Option<(String, IndexTextVersion)> {
fn logic(repo: &git2::Repository, path: &RepoPath) -> Result<Option<String>> {
const STAGE_NORMAL: i32 = 0;
let index = repo.index()?;
let mut index = repo.index()?;
// This check is required because index.get_path() unwraps internally :(
check_path_to_repo_path_errors(path)?;
index.read(false)?;
let oid = match index.get_path(path, STAGE_NORMAL) {
Some(entry) if entry.mode != GIT_MODE_SYMLINK => entry.id,
_ => return Ok(None),
@@ -402,11 +494,16 @@ impl GitRepository for RealGitRepository {
Ok(Some(String::from_utf8(content)?))
}
match logic(&self.repository.lock(), path) {
Ok(value) => return value,
Err(err) => log::error!("Error loading index text: {:?}", err),
let repo = self.repository.lock();
let version = IndexTextVersion::new(self.index_version.clone());
match logic(&repo, path) {
Ok(value) => value.map(|string| (string, version)),
Err(err) => {
log::error!("Error loading index text: {:?}", err);
None
}
}
None
}
fn load_committed_text(&self, path: &RepoPath) -> Option<String> {
@@ -417,8 +514,7 @@ impl GitRepository for RealGitRepository {
return None;
}
let content = repo.find_blob(entry.id()).log_err()?.content().to_owned();
let content = String::from_utf8(content).log_err()?;
Some(content)
String::from_utf8(content).log_err()
}
fn set_index_text(
@@ -427,51 +523,10 @@ impl GitRepository for RealGitRepository {
content: Option<String>,
env: &HashMap<String, String>,
) -> anyhow::Result<()> {
let working_directory = self.working_directory()?;
if let Some(content) = content {
let mut child = new_std_command(&self.git_binary_path)
.current_dir(&working_directory)
.envs(env)
.args(["hash-object", "-w", "--stdin"])
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.spawn()?;
child.stdin.take().unwrap().write_all(content.as_bytes())?;
let output = child.wait_with_output()?.stdout;
let sha = String::from_utf8(output)?;
log::debug!("indexing SHA: {sha}, path {path:?}");
let output = new_std_command(&self.git_binary_path)
.current_dir(&working_directory)
.envs(env)
.args(["update-index", "--add", "--cacheinfo", "100644", &sha])
.arg(path.as_ref())
.output()?;
if !output.status.success() {
return Err(anyhow!(
"Failed to stage:\n{}",
String::from_utf8_lossy(&output.stderr)
));
}
} else {
let output = new_std_command(&self.git_binary_path)
.current_dir(&working_directory)
.envs(env)
.args(["update-index", "--force-remove"])
.arg(path.as_ref())
.output()?;
if !output.status.success() {
return Err(anyhow!(
"Failed to unstage:\n{}",
String::from_utf8_lossy(&output.stderr)
));
}
}
Ok(())
let repo = self.repository.lock();
let result = self.set_index_text_impl(path, content, env, &repo);
self.index_version.fetch_add(1, AtomicOrdering::Relaxed);
result
}
fn remote_url(&self, name: &str) -> Option<String> {
@@ -941,7 +996,9 @@ pub struct FakeGitRepositoryState {
pub statuses: HashMap<RepoPath, FileStatus>,
pub current_branch_name: Option<String>,
pub branches: HashSet<String>,
pub index_version: Arc<AtomicUsize>,
pub simulated_index_write_error_message: Option<String>,
pub index_write_delayed: bool,
}
impl FakeGitRepository {
@@ -961,7 +1018,9 @@ impl FakeGitRepositoryState {
statuses: Default::default(),
current_branch_name: Default::default(),
branches: Default::default(),
index_version: Arc::new(AtomicUsize::new(0)),
simulated_index_write_error_message: None,
index_write_delayed: false,
}
}
}
@@ -969,9 +1028,11 @@ impl FakeGitRepositoryState {
impl GitRepository for FakeGitRepository {
fn reload_index(&self) {}
fn load_index_text(&self, path: &RepoPath) -> Option<String> {
fn load_index_text(&self, path: &RepoPath) -> Option<(String, IndexTextVersion)> {
let state = self.state.lock();
state.index_contents.get(path.as_ref()).cloned()
let version = IndexTextVersion::new(state.index_version.clone());
let string = state.index_contents.get(path.as_ref()).cloned();
string.map(|content| (content, version))
}
fn load_committed_text(&self, path: &RepoPath) -> Option<String> {
@@ -994,10 +1055,13 @@ impl GitRepository for FakeGitRepository {
} else {
state.index_contents.remove(path);
}
state.index_version.fetch_add(1, AtomicOrdering::Relaxed);
state
.event_emitter
.try_send(state.path.clone())
.expect("Dropped repo change event");
Ok(())
}

View File

@@ -274,7 +274,7 @@ impl ProjectDiff {
has_staged_hunks = true;
has_unstaged_hunks = true;
}
DiffHunkSecondaryStatus::None
DiffHunkSecondaryStatus::NoSecondaryHunk
| DiffHunkSecondaryStatus::SecondaryHunkRemovalPending => {
has_staged_hunks = true;
}

View File

@@ -11,7 +11,10 @@ use client::Client;
use collections::{hash_map, HashMap, HashSet};
use fs::Fs;
use futures::{channel::oneshot, future::Shared, Future, FutureExt as _, StreamExt};
use git::{blame::Blame, repository::RepoPath};
use git::{
blame::Blame,
repository::{IndexTextVersion, RepoPath},
};
use gpui::{
App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task, WeakEntity,
};
@@ -68,7 +71,7 @@ struct SharedBuffer {
}
#[derive(Default)]
struct BufferDiffState {
pub struct BufferDiffState {
unstaged_diff: Option<WeakEntity<BufferDiff>>,
uncommitted_diff: Option<WeakEntity<BufferDiff>>,
recalculate_diff_task: Option<Task<Result<()>>>,
@@ -84,7 +87,7 @@ struct BufferDiffState {
}
#[derive(Clone, Debug)]
enum DiffBasesChange {
pub enum DiffBasesChange {
SetIndex(Option<String>),
SetHead(Option<String>),
SetEach {
@@ -98,7 +101,7 @@ impl BufferDiffState {
fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
self.language = buffer.read(cx).language().cloned();
self.language_changed = true;
let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), None, cx);
}
fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
@@ -131,7 +134,7 @@ impl BufferDiffState {
},
};
let _ = self.diff_bases_changed(buffer, diff_bases_change, cx);
let _ = self.diff_bases_changed(buffer, diff_bases_change, None, cx);
}
pub fn wait_for_recalculation(&mut self) -> Option<oneshot::Receiver<()>> {
@@ -147,6 +150,8 @@ impl BufferDiffState {
&mut self,
buffer: text::BufferSnapshot,
diff_bases_change: DiffBasesChange,
// Provided when this function is called from event updates and we read the index.
read_index_version: Option<IndexTextVersion>,
cx: &mut Context<Self>,
) -> oneshot::Receiver<()> {
match diff_bases_change {
@@ -188,12 +193,13 @@ impl BufferDiffState {
}
}
self.recalculate_diffs(buffer, cx)
self.recalculate_diffs(buffer, read_index_version, cx)
}
fn recalculate_diffs(
&mut self,
buffer: text::BufferSnapshot,
read_index_version: Option<IndexTextVersion>,
cx: &mut Context<Self>,
) -> oneshot::Receiver<()> {
log::debug!("recalculate diffs");
@@ -257,7 +263,14 @@ impl BufferDiffState {
unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
{
unstaged_diff.update(&mut cx, |diff, cx| {
diff.set_snapshot(&buffer, new_unstaged_diff, language_changed, None, cx)
diff.set_snapshot(
&buffer,
new_unstaged_diff,
language_changed,
None,
read_index_version,
cx,
)
})?
} else {
None
@@ -272,6 +285,7 @@ impl BufferDiffState {
new_uncommitted_diff,
language_changed,
unstaged_changed_range,
None,
cx,
);
})?;
@@ -836,17 +850,23 @@ impl LocalBufferStore {
cx.spawn(move |this, mut cx| async move {
let snapshot =
worktree_handle.update(&mut cx, |tree, _| tree.as_local().unwrap().snapshot())?;
let diff_bases_changes_by_buffer = cx
let diff_bases_changes_by_buffer: Vec<(
Entity<Buffer>,
Option<DiffBasesChange>,
Option<IndexTextVersion>,
)> = cx
.background_spawn(async move {
diff_state_updates
.into_iter()
.filter_map(|(buffer, path, current_index_text, current_head_text)| {
let local_repo = snapshot.local_repo_for_path(&path)?;
let relative_path = local_repo.relativize(&path).ok()?;
let index_text = if current_index_text.is_some() {
local_repo.repo().load_index_text(&relative_path)
let (index_text, index_version) = if current_index_text.is_some() {
let repo = local_repo.repo();
repo.load_index_text(&relative_path).unzip()
} else {
None
(None, None)
};
let head_text = if current_head_text.is_some() {
local_repo.repo().load_committed_text(&relative_path)
@@ -880,14 +900,14 @@ impl LocalBufferStore {
(false, false) => None,
};
Some((buffer, diff_bases_change))
Some((buffer, diff_bases_change, index_version))
})
.collect::<Vec<_>>()
.collect()
})
.await;
this.update(&mut cx, |this, cx| {
for (buffer, diff_bases_change) in diff_bases_changes_by_buffer {
for (buffer, diff_bases_change, index_version) in diff_bases_changes_by_buffer {
let Some(OpenBuffer::Complete { diff_state, .. }) =
this.opened_buffers.get_mut(&buffer.read(cx).remote_id())
else {
@@ -929,6 +949,7 @@ impl LocalBufferStore {
let _ = diff_state.diff_bases_changed(
buffer.text_snapshot(),
diff_bases_change,
index_version,
cx,
);
});
@@ -1533,7 +1554,8 @@ impl BufferStore {
}
};
let rx = diff_state.diff_bases_changed(text_snapshot, diff_bases_change, cx);
let rx =
diff_state.diff_bases_changed(text_snapshot, diff_bases_change, None, cx);
Ok(async move {
rx.await.ok();
@@ -1852,6 +1874,14 @@ impl BufferStore {
}
}
pub fn get_diff_state(&self, buffer_id: BufferId, _: &App) -> Option<Entity<BufferDiffState>> {
if let OpenBuffer::Complete { diff_state, .. } = self.opened_buffers.get(&buffer_id)? {
Some(diff_state.clone())
} else {
None
}
}
pub fn get_uncommitted_diff(
&self,
buffer_id: BufferId,
@@ -1983,7 +2013,7 @@ impl BufferStore {
{
let buffer = buffer.read(cx).text_snapshot();
futures.push(diff_state.update(cx, |diff_state, cx| {
diff_state.recalculate_diffs(buffer, cx)
diff_state.recalculate_diffs(buffer, None, cx)
}));
}
}

View File

@@ -37,6 +37,7 @@ use std::{
future::Future,
path::{Path, PathBuf},
sync::Arc,
time::Duration,
};
use text::BufferId;
use util::{debug_panic, maybe, ResultExt};
@@ -50,6 +51,8 @@ pub struct GitStore {
repositories: Vec<Entity<Repository>>,
active_index: Option<usize>,
update_sender: mpsc::UnboundedSender<GitJob>,
#[cfg(any(test, feature = "test-support"))]
pub simulate_slow_index_write: bool,
_subscriptions: [Subscription; 2],
}
@@ -124,6 +127,8 @@ impl GitStore {
active_index: None,
update_sender,
_subscriptions,
#[cfg(any(test, feature = "test-support"))]
simulate_slow_index_write: false,
}
}
@@ -300,13 +305,22 @@ impl GitStore {
});
let diff = diff.downgrade();
cx.spawn(|this, mut cx| async move {
if let Some(result) = cx.background_spawn(async move { recv.await.ok() }).await
let result = cx.background_spawn(async move { recv.await.ok() }).await;
#[cfg(any(test, feature = "test-support"))]
if this
.update(&mut cx, |this, _| this.simulate_slow_index_write)
.unwrap_or_default()
{
cx.background_executor()
.timer(Duration::from_millis(100))
.await;
}
if let Some(result) = result {
if let Err(error) = result {
diff.update(&mut cx, |diff, cx| {
diff.clear_pending_hunks(cx);
})
.ok();
diff.update(&mut cx, |diff, cx| diff.failed_to_persist(cx))
.ok();
this.update(&mut cx, |_, cx| cx.emit(GitEvent::IndexWriteError(error)))
.ok();
}

View File

@@ -21,7 +21,7 @@ use pretty_assertions::{assert_eq, assert_matches};
use serde_json::json;
#[cfg(not(windows))]
use std::os;
use std::{str::FromStr, sync::OnceLock};
use std::{cell::RefCell, rc::Rc, str::FromStr, sync::OnceLock};
use std::{mem, num::NonZeroU32, ops::Range, task::Poll};
use task::{ResolvedTask, TaskContext};
@@ -941,7 +941,7 @@ async fn test_reporting_fs_changes_to_language_servers(cx: &mut gpui::TestAppCon
cx.executor().run_until_parked();
// Start the language server by opening a buffer with a compatible file extension.
let _ = project
project
.update(cx, |project, cx| {
project.open_local_buffer_with_lsp(path!("/the-root/src/a.rs"), cx)
})
@@ -6008,13 +6008,13 @@ async fn test_uncommitted_diff_for_buffer(cx: &mut gpui::TestAppContext) {
0..0,
"// the-deleted-contents\n",
"",
DiffHunkStatus::deleted(DiffHunkSecondaryStatus::None),
DiffHunkStatus::deleted(DiffHunkSecondaryStatus::NoSecondaryHunk),
)],
);
});
}
#[gpui::test]
#[gpui::test(iterations = 1)]
async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
use DiffHunkSecondaryStatus::*;
init_test(cx);
@@ -6071,7 +6071,30 @@ async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
})
.await
.unwrap();
let mut diff_events = cx.events(&uncommitted_diff);
let invalidated_range: Rc<RefCell<Option<Range<Point>>>> = Rc::new(RefCell::new(None));
buffer.update(cx, |_, cx| {
let invalidated_range = invalidated_range.clone();
cx.subscribe(&uncommitted_diff, move |buffer, _, event, _| match event {
BufferDiffEvent::DiffChanged { changed_range } => {
let Some(changed_range) = changed_range else {
return;
};
let snapshot = buffer.snapshot();
let changed_range = changed_range.to_point(&snapshot);
let mut invalidated_range = invalidated_range.borrow_mut();
if let Some(range) = invalidated_range.take() {
invalidated_range.replace(
range.start.min(changed_range.start)..range.end.max(changed_range.end),
);
} else {
invalidated_range.replace(changed_range);
}
}
_ => {}
})
.detach();
});
// The hunks are initially unstaged.
uncommitted_diff.read_with(cx, |diff, cx| {
@@ -6139,23 +6162,12 @@ async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
});
// The diff emits a change event for the range of the staged hunk.
assert!(matches!(
diff_events.next().await.unwrap(),
BufferDiffEvent::HunksStagedOrUnstaged(_)
));
let event = diff_events.next().await.unwrap();
if let BufferDiffEvent::DiffChanged {
changed_range: Some(changed_range),
} = event
{
let changed_range = changed_range.to_point(&snapshot);
assert_eq!(changed_range, Point::new(1, 0)..Point::new(2, 0));
} else {
panic!("Unexpected event {event:?}");
}
// When the write to the index completes, it appears as staged.
assert_eq!(
invalidated_range.borrow_mut().take(),
Some(Point::new(1, 0)..Point::new(2, 0))
);
cx.run_until_parked();
uncommitted_diff.update(cx, |diff, cx| {
assert_hunks(
diff.hunks(&snapshot, cx),
@@ -6168,7 +6180,12 @@ async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
"",
DiffHunkStatus::deleted(HasSecondaryHunk),
),
(1..2, "two\n", "TWO\n", DiffHunkStatus::modified(None)),
(
1..2,
"two\n",
"TWO\n",
DiffHunkStatus::modified(NoSecondaryHunk),
),
(
3..4,
"four\n",
@@ -6179,17 +6196,10 @@ async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
);
});
// The diff emits a change event for the changed index text.
let event = diff_events.next().await.unwrap();
if let BufferDiffEvent::DiffChanged {
changed_range: Some(changed_range),
} = event
{
let changed_range = changed_range.to_point(&snapshot);
assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
} else {
panic!("Unexpected event {event:?}");
}
assert_eq!(
invalidated_range.borrow_mut().take(),
Some(Point::new(0, 0)..Point::new(5, 0))
);
// Simulate a problem writing to the git index.
fs.set_error_message_for_index_write(
@@ -6217,7 +6227,12 @@ async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
"",
DiffHunkStatus::deleted(HasSecondaryHunk),
),
(1..2, "two\n", "TWO\n", DiffHunkStatus::modified(None)),
(
1..2,
"two\n",
"TWO\n",
DiffHunkStatus::modified(NoSecondaryHunk),
),
(
3..4,
"four\n",
@@ -6227,20 +6242,11 @@ async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
],
);
});
assert!(matches!(
diff_events.next().await.unwrap(),
BufferDiffEvent::HunksStagedOrUnstaged(_)
));
let event = diff_events.next().await.unwrap();
if let BufferDiffEvent::DiffChanged {
changed_range: Some(changed_range),
} = event
{
let changed_range = changed_range.to_point(&snapshot);
assert_eq!(changed_range, Point::new(3, 0)..Point::new(4, 0));
} else {
panic!("Unexpected event {event:?}");
}
assert_eq!(
invalidated_range.borrow_mut().take(),
Some(Point::new(3, 0)..Point::new(4, 0))
);
// When the write fails, the hunk returns to being unstaged.
cx.run_until_parked();
@@ -6256,7 +6262,12 @@ async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
"",
DiffHunkStatus::deleted(HasSecondaryHunk),
),
(1..2, "two\n", "TWO\n", DiffHunkStatus::modified(None)),
(
1..2,
"two\n",
"TWO\n",
DiffHunkStatus::modified(NoSecondaryHunk),
),
(
3..4,
"four\n",
@@ -6267,16 +6278,287 @@ async fn test_staging_hunks(cx: &mut gpui::TestAppContext) {
);
});
let event = diff_events.next().await.unwrap();
if let BufferDiffEvent::DiffChanged {
changed_range: Some(changed_range),
} = event
{
let changed_range = changed_range.to_point(&snapshot);
assert_eq!(changed_range, Point::new(0, 0)..Point::new(5, 0));
} else {
panic!("Unexpected event {event:?}");
assert_eq!(
invalidated_range.borrow_mut().take(),
Some(Point::new(3, 0)..Point::new(4, 0))
);
// Allow writing to the git index to succeed again.
fs.set_error_message_for_index_write("/dir/.git".as_ref(), None);
// Stage two hunks with separate operations.
uncommitted_diff.update(cx, |diff, cx| {
let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
diff.stage_or_unstage_hunks(true, &hunks[0..1], &snapshot, true, cx);
diff.stage_or_unstage_hunks(true, &hunks[2..3], &snapshot, true, cx);
});
// Both staged hunks appear as pending.
uncommitted_diff.update(cx, |diff, cx| {
assert_hunks(
diff.hunks(&snapshot, cx),
&snapshot,
&diff.base_text_string().unwrap(),
&[
(
0..0,
"zero\n",
"",
DiffHunkStatus::deleted(SecondaryHunkRemovalPending),
),
(
1..2,
"two\n",
"TWO\n",
DiffHunkStatus::modified(NoSecondaryHunk),
),
(
3..4,
"four\n",
"FOUR\n",
DiffHunkStatus::modified(SecondaryHunkRemovalPending),
),
],
);
});
// Both staging operations take effect.
cx.run_until_parked();
uncommitted_diff.update(cx, |diff, cx| {
assert_hunks(
diff.hunks(&snapshot, cx),
&snapshot,
&diff.base_text_string().unwrap(),
&[
(0..0, "zero\n", "", DiffHunkStatus::deleted(NoSecondaryHunk)),
(
1..2,
"two\n",
"TWO\n",
DiffHunkStatus::modified(NoSecondaryHunk),
),
(
3..4,
"four\n",
"FOUR\n",
DiffHunkStatus::modified(NoSecondaryHunk),
),
],
);
});
// Set a delay for the next assertions
project.update(cx, |project, cx| {
project.git_store().update(cx, |git, _cx| {
git.simulate_slow_index_write = true;
});
});
fs.delay_git_index_write("/dir/.git".as_ref());
// Unstage two hunks, detect update from index, and then stage a third hunk.
uncommitted_diff.update(cx, |diff, cx| {
let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
let after_one = diff.stage_or_unstage_hunks(false, &hunks[0..=0], &snapshot, true, cx);
diff.stage_or_unstage_hunks(false, &hunks[1..=1], &snapshot, true, cx);
after_one.unwrap().to_string()
});
uncommitted_diff.update(cx, |diff, cx| {
let hunks = diff.hunks(&snapshot, cx).collect::<Vec<_>>();
diff.stage_or_unstage_hunks(false, &hunks[2..=2], &snapshot, true, cx);
});
// bad sequence:
// * unstage hunk
// * start write to the index
// * get fs event for git index
// * finish write to the index
// Wait till all unstaging operations take effect.
cx.run_until_parked();
cx.executor().advance_clock(Duration::from_millis(100));
uncommitted_diff.update(cx, |diff, cx| {
assert_hunks(
diff.hunks(&snapshot, cx),
&snapshot,
&diff.base_text_string().unwrap(),
&[
(
0..0,
"zero\n",
"",
DiffHunkStatus::deleted(HasSecondaryHunk),
),
(
1..2,
"two\n",
"TWO\n",
DiffHunkStatus::modified(HasSecondaryHunk), // NoSecondaryHunk
),
(
3..4,
"four\n",
"FOUR\n",
DiffHunkStatus::modified(HasSecondaryHunk),
),
],
);
});
}
#[allow(clippy::format_collect)]
#[gpui::test]
async fn test_staging_lots_of_hunks_fast(cx: &mut gpui::TestAppContext) {
use DiffHunkSecondaryStatus::*;
init_test(cx);
let different_lines = (0..500)
.step_by(5)
.map(|i| format!("diff {}\n", i))
.collect::<Vec<String>>();
let committed_contents = (0..500).map(|i| format!("{}\n", i)).collect::<String>();
let file_contents = (0..500)
.map(|i| {
if i % 5 == 0 {
different_lines[i / 5].clone()
} else {
format!("{}\n", i)
}
})
.collect::<String>();
let fs = FakeFs::new(cx.background_executor.clone());
fs.insert_tree(
"/dir",
json!({
".git": {},
"file.txt": file_contents.clone()
}),
)
.await;
fs.set_head_for_repo(
"/dir/.git".as_ref(),
&[("file.txt".into(), committed_contents.clone())],
);
fs.set_index_for_repo(
"/dir/.git".as_ref(),
&[("file.txt".into(), committed_contents.clone())],
);
let project = Project::test(fs.clone(), ["/dir".as_ref()], cx).await;
let buffer = project
.update(cx, |project, cx| {
project.open_local_buffer("/dir/file.txt", cx)
})
.await
.unwrap();
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot());
let uncommitted_diff = project
.update(cx, |project, cx| {
project.open_uncommitted_diff(buffer.clone(), cx)
})
.await
.unwrap();
let range = Anchor::MIN..snapshot.anchor_after(snapshot.max_point());
let mut expected_hunks: Vec<(Range<u32>, String, String, DiffHunkStatus)> = (0..500)
.step_by(5)
.map(|i| {
(
i as u32..i as u32 + 1,
format!("{}\n", i),
different_lines[i / 5].clone(),
DiffHunkStatus::modified(HasSecondaryHunk),
)
})
.collect();
// The hunks are initially unstaged
uncommitted_diff.read_with(cx, |diff, cx| {
assert_hunks(
diff.hunks(&snapshot, cx),
&snapshot,
&diff.base_text_string().unwrap(),
&expected_hunks,
);
});
for (_, _, _, status) in expected_hunks.iter_mut() {
*status = DiffHunkStatus::modified(SecondaryHunkRemovalPending);
}
// Stage every hunk with a different call
uncommitted_diff.update(cx, |diff, cx| {
let hunks = diff
.hunks_intersecting_range(range.clone(), &snapshot, cx)
.collect::<Vec<_>>();
for hunk in hunks {
diff.stage_or_unstage_hunks(true, &[hunk], &snapshot, true, cx);
}
assert_hunks(
diff.hunks(&snapshot, cx),
&snapshot,
&diff.base_text_string().unwrap(),
&expected_hunks,
);
});
// If we wait, we'll have no pending hunks
cx.run_until_parked();
for (_, _, _, status) in expected_hunks.iter_mut() {
*status = DiffHunkStatus::modified(NoSecondaryHunk);
}
uncommitted_diff.update(cx, |diff, cx| {
assert_hunks(
diff.hunks(&snapshot, cx),
&snapshot,
&diff.base_text_string().unwrap(),
&expected_hunks,
);
});
for (_, _, _, status) in expected_hunks.iter_mut() {
*status = DiffHunkStatus::modified(SecondaryHunkAdditionPending);
}
// Unstage every hunk with a different call
uncommitted_diff.update(cx, |diff, cx| {
let hunks = diff
.hunks_intersecting_range(range, &snapshot, cx)
.collect::<Vec<_>>();
for hunk in hunks {
diff.stage_or_unstage_hunks(false, &[hunk], &snapshot, true, cx);
}
assert_hunks(
diff.hunks(&snapshot, cx),
&snapshot,
&diff.base_text_string().unwrap(),
&expected_hunks,
);
});
// If we wait, we'll have no pending hunks, again
cx.run_until_parked();
for (_, _, _, status) in expected_hunks.iter_mut() {
*status = DiffHunkStatus::modified(HasSecondaryHunk);
}
uncommitted_diff.update(cx, |diff, cx| {
assert_hunks(
diff.hunks(&snapshot, cx),
&snapshot,
&diff.base_text_string().unwrap(),
&expected_hunks,
);
});
}
#[gpui::test]

View File

@@ -406,6 +406,7 @@ where
self.seek_internal(pos, bias, &mut (), cx)
}
/// Advances the cursor and returns traversed items as a tree.
#[track_caller]
pub fn slice<Target>(
&mut self,

View File

@@ -225,6 +225,15 @@ impl<T: Item> SumTree<T> {
}))
}
/// Useful in cases where the item type has a non-trivial context type, but the zero value of the summary type doesn't depend on that context.
pub fn from_summary(summary: T::Summary) -> Self {
SumTree(Arc::new(Node::Leaf {
summary,
items: ArrayVec::new(),
item_summaries: ArrayVec::new(),
}))
}
pub fn from_item(item: T, cx: &<T::Summary as Summary>::Context) -> Self {
let mut tree = Self::new(cx);
tree.push(item, cx);

View File

@@ -136,6 +136,7 @@ where
pub trait AnchorRangeExt {
fn cmp(&self, b: &Range<Anchor>, buffer: &BufferSnapshot) -> Ordering;
fn overlaps(&self, b: &Range<Anchor>, buffer: &BufferSnapshot) -> bool;
}
impl AnchorRangeExt for Range<Anchor> {
@@ -145,4 +146,8 @@ impl AnchorRangeExt for Range<Anchor> {
ord => ord,
}
}
fn overlaps(&self, other: &Range<Anchor>, buffer: &BufferSnapshot) -> bool {
self.start.cmp(&other.end, buffer).is_lt() && other.start.cmp(&self.end, buffer).is_lt()
}
}

View File

@@ -1057,12 +1057,16 @@ impl Worktree {
let snapshot = this.snapshot();
cx.background_spawn(async move {
if let Some(repo) = snapshot.repository_for_path(&path) {
if let Some(repo_path) = repo.relativize(&path).log_err() {
if let Some(git_repo) =
snapshot.git_repositories.get(&repo.work_directory_id)
{
return Ok(git_repo.repo_ptr.load_index_text(&repo_path));
}
if let Some((repo_path, git_repo)) = repo
.relativize(&path)
.log_err()
.zip(snapshot.git_repositories.get(&repo.work_directory_id))
{
let string = git_repo
.repo_ptr
.load_index_text(&repo_path)
.map(|(string, _)| string);
return Ok(string);
}
}
Err(anyhow!("No repository found for {path:?}"))
@@ -1081,12 +1085,12 @@ impl Worktree {
let snapshot = this.snapshot();
cx.background_spawn(async move {
if let Some(repo) = snapshot.repository_for_path(&path) {
if let Some(repo_path) = repo.relativize(&path).log_err() {
if let Some(git_repo) =
snapshot.git_repositories.get(&repo.work_directory_id)
{
return Ok(git_repo.repo_ptr.load_committed_text(&repo_path));
}
if let Some((repo_path, git_repo)) = repo
.relativize(&path)
.log_err()
.zip(snapshot.git_repositories.get(&repo.work_directory_id))
{
return Ok(git_repo.repo_ptr.load_committed_text(&repo_path));
}
}
Err(anyhow!("No repository found for {path:?}"))