Compare commits
7 Commits
vim-syntax
...
rework-age
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
35a773c492 | ||
|
|
ea7fe49fb5 | ||
|
|
7340513eee | ||
|
|
4568ed12c3 | ||
|
|
fd00f0ba73 | ||
|
|
43c5db9583 | ||
|
|
cba4effb2d |
@@ -984,8 +984,10 @@ mod tests {
|
||||
)
|
||||
.unwrap()
|
||||
});
|
||||
action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
|
||||
});
|
||||
action_log.update(cx, |log, cx| log.save_edited_buffer(buffer.clone(), cx))
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx.run_until_parked();
|
||||
|
||||
// When opening the assistant diff, the cursor is positioned on the first hunk.
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use std::fmt::Write as _;
|
||||
use std::io::Write;
|
||||
use std::ops::Range;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
|
||||
@@ -355,7 +356,7 @@ impl Thread {
|
||||
last_restore_checkpoint: None,
|
||||
pending_checkpoint: None,
|
||||
tool_use: ToolUseState::new(tools.clone()),
|
||||
action_log: cx.new(|_| ActionLog::new(project.clone())),
|
||||
action_log: cx.new(|cx| ActionLog::new(project.clone(), cx)),
|
||||
initial_project_snapshot: {
|
||||
let project_snapshot = Self::project_snapshot(project, cx);
|
||||
cx.foreground_executor()
|
||||
@@ -430,7 +431,7 @@ impl Thread {
|
||||
prompt_builder,
|
||||
tools,
|
||||
tool_use,
|
||||
action_log: cx.new(|_| ActionLog::new(project)),
|
||||
action_log: cx.new(|cx| ActionLog::new(project, cx)),
|
||||
initial_project_snapshot: Task::ready(serialized.initial_project_snapshot).shared(),
|
||||
request_token_usage: serialized.request_token_usage,
|
||||
cumulative_token_usage: serialized.cumulative_token_usage,
|
||||
@@ -1070,30 +1071,91 @@ impl Thread {
|
||||
fn attached_tracked_files_state(
|
||||
&self,
|
||||
messages: &mut Vec<LanguageModelRequestMessage>,
|
||||
cx: &App,
|
||||
cx: &mut App,
|
||||
) {
|
||||
const STALE_FILES_HEADER: &str = "These files changed since last read:";
|
||||
let mut message = String::new();
|
||||
|
||||
let mut stale_message = String::new();
|
||||
|
||||
let action_log = self.action_log.read(cx);
|
||||
|
||||
for stale_file in action_log.stale_buffers(cx) {
|
||||
let Some(file) = stale_file.read(cx).file() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if stale_message.is_empty() {
|
||||
write!(&mut stale_message, "{}\n", STALE_FILES_HEADER).ok();
|
||||
self.action_log.update(cx, |action_log, cx| {
|
||||
let stale_files = action_log
|
||||
.stale_buffers(cx)
|
||||
.filter_map(|buffer| buffer.read(cx).file());
|
||||
for (i, file) in stale_files.enumerate() {
|
||||
if i == 0 {
|
||||
writeln!(&mut message, "These files changed since last read:").ok();
|
||||
}
|
||||
writeln!(&mut message, "- {}", file.full_path(cx).display()).ok();
|
||||
}
|
||||
|
||||
writeln!(&mut stale_message, "- {}", file.path().display()).ok();
|
||||
}
|
||||
if let Some(diagnostic_changes) = action_log.flush_diagnostic_changes(cx) {
|
||||
let project = self.project.read(cx);
|
||||
writeln!(
|
||||
&mut message,
|
||||
"Diagnostics have changed in the following files:",
|
||||
)
|
||||
.ok();
|
||||
for change in diagnostic_changes {
|
||||
let path = change.project_path;
|
||||
let Some(worktree) = project.worktree_for_id(path.worktree_id, cx) else {
|
||||
continue;
|
||||
};
|
||||
let path = PathBuf::from(worktree.read(cx).root_name()).join(path.path);
|
||||
|
||||
write!(&mut message, "- {} (", path.display()).ok();
|
||||
if change.fixed_diagnostic_count > 0 {
|
||||
write!(&mut message, "{} fixed", change.fixed_diagnostic_count).ok();
|
||||
if change.introduced_diagnostic_count > 0 {
|
||||
write!(
|
||||
&mut message,
|
||||
", {} introduced",
|
||||
change.introduced_diagnostic_count
|
||||
)
|
||||
.ok();
|
||||
}
|
||||
} else if change.introduced_diagnostic_count > 0 {
|
||||
write!(
|
||||
&mut message,
|
||||
"{} introduced",
|
||||
change.introduced_diagnostic_count
|
||||
)
|
||||
.ok();
|
||||
}
|
||||
write!(&mut message, ") ").ok();
|
||||
|
||||
if change.diagnostics.is_empty() {
|
||||
writeln!(&mut message, "No diagnostics remaining.").ok();
|
||||
} else {
|
||||
writeln!(&mut message, "Remaining diagnostics:").ok();
|
||||
}
|
||||
|
||||
for entry in change.diagnostics {
|
||||
let mut lines = entry.diagnostic.message.split('\n');
|
||||
writeln!(
|
||||
&mut message,
|
||||
" - line {}: {}",
|
||||
entry.range.start.0.row + 1,
|
||||
lines.next().unwrap()
|
||||
)
|
||||
.ok();
|
||||
for line in lines {
|
||||
writeln!(&mut message, " {}", line).ok();
|
||||
}
|
||||
}
|
||||
|
||||
if action_log
|
||||
.last_edited_buffer()
|
||||
.map_or(false, |last_edited| last_edited.consecutive_edit_count > 2)
|
||||
{
|
||||
writeln!(&mut message, "Because you've failed repeatedly, give up. Don't attempt to fix the diagnostics. Wait user input or continue with the next task.").ok();
|
||||
writeln!(&mut message, "Don't keep trying to fix the diagnostics. Stop and wait for the user to help you fix them.").ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let mut content = Vec::with_capacity(2);
|
||||
|
||||
if !stale_message.is_empty() {
|
||||
content.push(stale_message.into());
|
||||
if !message.is_empty() {
|
||||
content.push(message.into());
|
||||
}
|
||||
|
||||
if !content.is_empty() {
|
||||
|
||||
@@ -27,7 +27,7 @@ pub struct ToolUse {
|
||||
pub needs_confirmation: bool,
|
||||
}
|
||||
|
||||
pub const USING_TOOL_MARKER: &str = "<using_tool>";
|
||||
pub const USING_TOOL_MARKER: &str = "Using tool:";
|
||||
|
||||
pub struct ToolUseState {
|
||||
tools: Entity<ToolWorkingSet>,
|
||||
|
||||
@@ -22,6 +22,7 @@ gpui.workspace = true
|
||||
icons.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
log.workspace = true
|
||||
parking_lot.workspace = true
|
||||
project.workspace = true
|
||||
serde.workspace = true
|
||||
|
||||
@@ -1,42 +1,123 @@
|
||||
use anyhow::{Context as _, Result};
|
||||
use buffer_diff::BufferDiff;
|
||||
use collections::BTreeMap;
|
||||
use futures::{StreamExt, channel::mpsc};
|
||||
use collections::{BTreeMap, HashMap, HashSet};
|
||||
use futures::{
|
||||
FutureExt as _, StreamExt,
|
||||
channel::{mpsc, oneshot},
|
||||
};
|
||||
use gpui::{App, AppContext, AsyncApp, Context, Entity, Subscription, Task, WeakEntity};
|
||||
use language::{Anchor, Buffer, BufferEvent, DiskState, Point, ToPoint};
|
||||
use project::{Project, ProjectItem, lsp_store::OpenLspBufferHandle};
|
||||
use std::{cmp, ops::Range, sync::Arc};
|
||||
use text::{Edit, Patch, Rope};
|
||||
use language::{Anchor, Buffer, BufferEvent, DiagnosticEntry, DiskState, Point, ToPoint};
|
||||
use project::{Project, ProjectItem, ProjectPath, lsp_store::OpenLspBufferHandle};
|
||||
use std::{
|
||||
cmp::{self, Ordering},
|
||||
ops::Range,
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
};
|
||||
use text::{BufferId, Edit, Patch, PointUtf16, Rope, Unclipped};
|
||||
use util::RangeExt;
|
||||
|
||||
/// Tracks actions performed by tools in a thread
|
||||
pub struct ActionLog {
|
||||
/// Buffers that we want to notify the model about when they change.
|
||||
tracked_buffers: BTreeMap<Entity<Buffer>, TrackedBuffer>,
|
||||
/// Has the model edited a file since it last checked diagnostics?
|
||||
edited_since_project_diagnostics_check: bool,
|
||||
paths_with_pre_existing_diagnostics: HashSet<ProjectPath>,
|
||||
edited_since_diagnostics_report: bool,
|
||||
diagnostic_state: DiagnosticState,
|
||||
last_edited_buffer: Option<LastEditedBuffer>,
|
||||
/// The project this action log is associated with
|
||||
project: Entity<Project>,
|
||||
_project_subscription: Subscription,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct LastEditedBuffer {
|
||||
pub buffer_id: BufferId,
|
||||
pub consecutive_edit_count: usize,
|
||||
}
|
||||
|
||||
impl ActionLog {
|
||||
/// Creates a new, empty action log associated with the given project.
|
||||
pub fn new(project: Entity<Project>) -> Self {
|
||||
pub fn new(project: Entity<Project>, cx: &mut Context<Self>) -> Self {
|
||||
let pre_existing_diagnostics = project.update(cx, |project, cx| {
|
||||
project
|
||||
.lsp_store()
|
||||
.read(cx)
|
||||
.diagnostic_summaries(true, cx)
|
||||
.map(|(path, _, _)| path.clone())
|
||||
.collect()
|
||||
});
|
||||
|
||||
let _project_subscription = cx.subscribe(&project, |this, _, event, cx| {
|
||||
if let project::Event::BufferEdited(buffer) = event {
|
||||
if let Some(project_path) = buffer.read(cx).project_path(cx) {
|
||||
this.paths_with_pre_existing_diagnostics
|
||||
.remove(&project_path);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Self {
|
||||
tracked_buffers: BTreeMap::default(),
|
||||
edited_since_project_diagnostics_check: false,
|
||||
paths_with_pre_existing_diagnostics: pre_existing_diagnostics,
|
||||
edited_since_diagnostics_report: false,
|
||||
diagnostic_state: Default::default(),
|
||||
project,
|
||||
last_edited_buffer: None,
|
||||
_project_subscription,
|
||||
}
|
||||
}
|
||||
|
||||
/// Notifies a diagnostics check
|
||||
pub fn checked_project_diagnostics(&mut self) {
|
||||
self.edited_since_project_diagnostics_check = false;
|
||||
pub fn flush_diagnostic_changes(&mut self, cx: &App) -> Option<Vec<DiagnosticChange>> {
|
||||
if !self.edited_since_diagnostics_report {
|
||||
return None;
|
||||
}
|
||||
|
||||
let new_state = self.diagnostic_state(cx);
|
||||
let changes = new_state.compare(&self.diagnostic_state, cx);
|
||||
self.diagnostic_state = new_state;
|
||||
self.edited_since_diagnostics_report = false;
|
||||
Some(changes).filter(|changes| !changes.is_empty())
|
||||
}
|
||||
|
||||
/// Returns true if any files have been edited since the last project diagnostics check
|
||||
pub fn has_edited_files_since_project_diagnostics_check(&self) -> bool {
|
||||
self.edited_since_project_diagnostics_check
|
||||
pub fn last_edited_buffer(&self) -> Option<LastEditedBuffer> {
|
||||
self.last_edited_buffer.clone()
|
||||
}
|
||||
|
||||
fn diagnostic_state(&self, cx: &App) -> DiagnosticState {
|
||||
let mut diagnostics_for_open_buffers = HashMap::default();
|
||||
let mut diagnostics_for_non_open_buffers = HashMap::default();
|
||||
|
||||
let project = self.project.read(cx);
|
||||
let all_diagnostics = project.lsp_store().read(cx).all_diagnostics();
|
||||
|
||||
for (project_path, diagnostics) in all_diagnostics {
|
||||
if self
|
||||
.paths_with_pre_existing_diagnostics
|
||||
.contains(&project_path)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
match project.get_open_buffer(&project_path, cx) {
|
||||
Some(buffer) => {
|
||||
let diagnostics = buffer
|
||||
.read(cx)
|
||||
.snapshot()
|
||||
.diagnostics_in_range(Anchor::MIN..Anchor::MAX, false)
|
||||
.filter(|entry| entry.diagnostic.is_primary)
|
||||
.collect();
|
||||
diagnostics_for_open_buffers.insert(buffer, diagnostics);
|
||||
}
|
||||
None => {
|
||||
diagnostics_for_non_open_buffers.insert(project_path.clone(), diagnostics);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
DiagnosticState {
|
||||
diagnostics_for_open_buffers,
|
||||
diagnostics_for_non_open_paths: diagnostics_for_non_open_buffers,
|
||||
}
|
||||
}
|
||||
|
||||
fn track_buffer(
|
||||
@@ -71,6 +152,12 @@ impl ActionLog {
|
||||
status = TrackedBufferStatus::Modified;
|
||||
unreviewed_changes = Patch::default();
|
||||
}
|
||||
|
||||
if let Some(project_path) = buffer.read(cx).project_path(cx) {
|
||||
self.paths_with_pre_existing_diagnostics
|
||||
.remove(&project_path);
|
||||
}
|
||||
|
||||
TrackedBuffer {
|
||||
buffer: buffer.clone(),
|
||||
base_text,
|
||||
@@ -269,21 +356,88 @@ impl ActionLog {
|
||||
self.track_buffer(buffer, false, cx);
|
||||
}
|
||||
|
||||
/// Track a buffer as read, so we can notify the model about user edits.
|
||||
pub fn will_create_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
|
||||
/// Save and track a new buffer
|
||||
pub fn save_new_buffer(
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
self.track_buffer(buffer.clone(), true, cx);
|
||||
self.buffer_edited(buffer, cx)
|
||||
self.save_edited_buffer(buffer, cx)
|
||||
}
|
||||
|
||||
/// Mark a buffer as edited, so we can refresh it in the context
|
||||
pub fn buffer_edited(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
|
||||
self.edited_since_project_diagnostics_check = true;
|
||||
/// Save and track an edited buffer
|
||||
pub fn save_edited_buffer(
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
self.edited_since_diagnostics_report = true;
|
||||
|
||||
let saved_buffer_id = buffer.read(cx).remote_id();
|
||||
match &mut self.last_edited_buffer {
|
||||
Some(LastEditedBuffer {
|
||||
buffer_id,
|
||||
consecutive_edit_count,
|
||||
}) if *buffer_id == saved_buffer_id => *consecutive_edit_count += 1,
|
||||
_ => {
|
||||
self.last_edited_buffer = Some(LastEditedBuffer {
|
||||
buffer_id: saved_buffer_id,
|
||||
consecutive_edit_count: 1,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
let tracked_buffer = self.track_buffer(buffer.clone(), false, cx);
|
||||
if let TrackedBufferStatus::Deleted = tracked_buffer.status {
|
||||
tracked_buffer.status = TrackedBufferStatus::Modified;
|
||||
}
|
||||
tracked_buffer.schedule_diff_update(ChangeAuthor::Agent, cx);
|
||||
|
||||
let project = self.project.clone();
|
||||
|
||||
cx.spawn(async move |_this, cx| {
|
||||
let (tx, mut rx) = oneshot::channel();
|
||||
let mut tx = Some(tx);
|
||||
|
||||
let _subscription = cx.subscribe(&project, move |_, event, _| match event {
|
||||
project::Event::DiskBasedDiagnosticsFinished { .. } => {
|
||||
if let Some(tx) = tx.take() {
|
||||
tx.send(()).ok();
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
});
|
||||
|
||||
project
|
||||
.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))?
|
||||
.await?;
|
||||
|
||||
let has_lang_server = project.update(cx, |project, cx| {
|
||||
project.lsp_store().update(cx, |lsp_store, cx| {
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
lsp_store
|
||||
.language_servers_for_local_buffer(buffer, cx)
|
||||
.next()
|
||||
.is_some()
|
||||
})
|
||||
})
|
||||
})?;
|
||||
|
||||
if has_lang_server {
|
||||
let timeout = cx.background_executor().timer(Duration::from_secs(30));
|
||||
futures::select! {
|
||||
_ = rx => Ok(()),
|
||||
_ = timeout.fuse() => {
|
||||
log::info!("Did not receive diagnostics update 30s after agent edit");
|
||||
// We don't want to fail the tool here
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn will_delete_buffer(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
|
||||
@@ -497,6 +651,149 @@ impl ActionLog {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct DiagnosticState {
|
||||
diagnostics_for_open_buffers: HashMap<Entity<Buffer>, Vec<DiagnosticEntry<Anchor>>>,
|
||||
diagnostics_for_non_open_paths:
|
||||
HashMap<ProjectPath, Vec<DiagnosticEntry<Unclipped<PointUtf16>>>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
pub struct DiagnosticChange {
|
||||
pub project_path: ProjectPath,
|
||||
pub fixed_diagnostic_count: usize,
|
||||
pub introduced_diagnostic_count: usize,
|
||||
pub diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
|
||||
}
|
||||
|
||||
impl DiagnosticState {
|
||||
fn compare(&self, old_state: &Self, cx: &App) -> Vec<DiagnosticChange> {
|
||||
let mut changes = Vec::new();
|
||||
let empty = Vec::new();
|
||||
|
||||
for (buffer, new) in &self.diagnostics_for_open_buffers {
|
||||
let old = old_state
|
||||
.diagnostics_for_open_buffers
|
||||
.get(&buffer)
|
||||
.unwrap_or(&empty);
|
||||
let buffer = buffer.read(cx);
|
||||
let Some(project_path) = buffer.project_path(cx) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let (introduced, fixed) = Self::compare_diagnostics(new, old, |a, b| a.cmp(b, buffer));
|
||||
if introduced > 0 || fixed > 0 {
|
||||
changes.push(DiagnosticChange {
|
||||
project_path,
|
||||
fixed_diagnostic_count: fixed,
|
||||
introduced_diagnostic_count: introduced,
|
||||
diagnostics: new.into_iter().map(|entry| entry.resolve(buffer)).collect(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (buffer, old) in &old_state.diagnostics_for_open_buffers {
|
||||
if !self.diagnostics_for_open_buffers.contains_key(&buffer) && old.len() > 0 {
|
||||
let buffer = buffer.read(cx);
|
||||
let Some(project_path) = buffer.project_path(cx) else {
|
||||
continue;
|
||||
};
|
||||
changes.push(DiagnosticChange {
|
||||
project_path,
|
||||
fixed_diagnostic_count: old.len(),
|
||||
introduced_diagnostic_count: 0,
|
||||
diagnostics: vec![],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
let empty = Vec::new();
|
||||
|
||||
for (project_path, new) in &self.diagnostics_for_non_open_paths {
|
||||
let old = old_state
|
||||
.diagnostics_for_non_open_paths
|
||||
.get(&project_path)
|
||||
.unwrap_or(&empty);
|
||||
|
||||
let (introduced, fixed) = Self::compare_diagnostics(new, old, |a, b| a.cmp(b));
|
||||
if introduced > 0 || fixed > 0 {
|
||||
changes.push(DiagnosticChange {
|
||||
project_path: project_path.clone(),
|
||||
fixed_diagnostic_count: fixed,
|
||||
introduced_diagnostic_count: introduced,
|
||||
diagnostics: new.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (project_path, old) in &old_state.diagnostics_for_non_open_paths {
|
||||
if !self
|
||||
.diagnostics_for_non_open_paths
|
||||
.contains_key(&project_path)
|
||||
&& old.len() > 0
|
||||
{
|
||||
changes.push(DiagnosticChange {
|
||||
project_path: project_path.clone(),
|
||||
fixed_diagnostic_count: old.len(),
|
||||
introduced_diagnostic_count: 0,
|
||||
diagnostics: vec![],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
changes
|
||||
}
|
||||
|
||||
fn compare_diagnostics<T>(
|
||||
new: &[DiagnosticEntry<T>],
|
||||
old: &[DiagnosticEntry<T>],
|
||||
cmp: impl Fn(&DiagnosticEntry<T>, &DiagnosticEntry<T>) -> Ordering,
|
||||
) -> (usize, usize) {
|
||||
let mut introduced = 0;
|
||||
let mut fixed = 0;
|
||||
|
||||
let mut old_iter = old.iter().peekable();
|
||||
let mut new_iter = new.iter().peekable();
|
||||
|
||||
loop {
|
||||
match (old_iter.peek(), new_iter.peek()) {
|
||||
(Some(old_entry), Some(new_entry)) => {
|
||||
match cmp(old_entry, new_entry) {
|
||||
Ordering::Less => {
|
||||
// Old entry comes first and isn't in new - it's fixed
|
||||
fixed += 1;
|
||||
old_iter.next();
|
||||
}
|
||||
Ordering::Greater => {
|
||||
// New entry comes first and isn't in old - it's introduced
|
||||
introduced += 1;
|
||||
new_iter.next();
|
||||
}
|
||||
Ordering::Equal => {
|
||||
// They're the same - just advance both iterators
|
||||
old_iter.next();
|
||||
new_iter.next();
|
||||
}
|
||||
}
|
||||
}
|
||||
(Some(_), None) => {
|
||||
// Only old entries left - they're all fixed
|
||||
old_iter.next();
|
||||
fixed += 1;
|
||||
}
|
||||
(None, Some(_)) => {
|
||||
// Only new entries left - they're all introduced
|
||||
new_iter.next();
|
||||
introduced += 1;
|
||||
}
|
||||
(None, None) => break,
|
||||
}
|
||||
}
|
||||
|
||||
(introduced, fixed)
|
||||
}
|
||||
}
|
||||
|
||||
fn apply_non_conflicting_edits(
|
||||
patch: &Patch<u32>,
|
||||
edits: Vec<Edit<u32>>,
|
||||
@@ -667,7 +964,7 @@ mod tests {
|
||||
use super::*;
|
||||
use buffer_diff::DiffHunkStatusKind;
|
||||
use gpui::TestAppContext;
|
||||
use language::Point;
|
||||
use language::{Diagnostic, LanguageServerId, Point};
|
||||
use project::{FakeFs, Fs, Project, RemoveOptions};
|
||||
use rand::prelude::*;
|
||||
use serde_json::json;
|
||||
@@ -696,7 +993,7 @@ mod tests {
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let project = Project::test(fs.clone(), [], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let action_log = cx.new(|cx| ActionLog::new(project.clone(), cx));
|
||||
let buffer = cx.new(|cx| Buffer::local("abc\ndef\nghi\njkl\nmno", cx));
|
||||
|
||||
cx.update(|cx| {
|
||||
@@ -711,8 +1008,10 @@ mod tests {
|
||||
.edit([(Point::new(4, 2)..Point::new(4, 3), "O")], None, cx)
|
||||
.unwrap()
|
||||
});
|
||||
action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
|
||||
});
|
||||
action_log.update(cx, |log, cx| log.save_edited_buffer(buffer.clone(), cx))
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
@@ -766,7 +1065,7 @@ mod tests {
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let project = Project::test(fs.clone(), [], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let action_log = cx.new(|cx| ActionLog::new(project.clone(), cx));
|
||||
let buffer = cx.new(|cx| Buffer::local("abc\ndef\nghi\njkl\nmno\npqr", cx));
|
||||
|
||||
cx.update(|cx| {
|
||||
@@ -783,8 +1082,10 @@ mod tests {
|
||||
.unwrap();
|
||||
buffer.finalize_last_transaction();
|
||||
});
|
||||
action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
|
||||
});
|
||||
action_log.update(cx, |log, cx| log.save_edited_buffer(buffer.clone(), cx))
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
@@ -840,7 +1141,7 @@ mod tests {
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let project = Project::test(fs.clone(), [], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let action_log = cx.new(|cx| ActionLog::new(project.clone(), cx));
|
||||
let buffer = cx.new(|cx| Buffer::local("abc\ndef\nghi\njkl\nmno", cx));
|
||||
|
||||
cx.update(|cx| {
|
||||
@@ -850,8 +1151,10 @@ mod tests {
|
||||
.edit([(Point::new(1, 2)..Point::new(2, 3), "F\nGHI")], None, cx)
|
||||
.unwrap()
|
||||
});
|
||||
action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
|
||||
});
|
||||
action_log.update(cx, |log, cx| log.save_edited_buffer(buffer.clone(), cx))
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
@@ -929,7 +1232,7 @@ mod tests {
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let action_log = cx.new(|cx| ActionLog::new(project.clone(), cx));
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(path!("/dir"), json!({})).await;
|
||||
@@ -946,12 +1249,10 @@ mod tests {
|
||||
.unwrap();
|
||||
cx.update(|cx| {
|
||||
buffer.update(cx, |buffer, cx| buffer.set_text("lorem", cx));
|
||||
action_log.update(cx, |log, cx| log.will_create_buffer(buffer.clone(), cx));
|
||||
});
|
||||
project
|
||||
.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
action_log.update(cx, |log, cx| log.save_new_buffer(buffer.clone(), cx))
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
unreviewed_hunks(&action_log, cx),
|
||||
@@ -1005,7 +1306,7 @@ mod tests {
|
||||
.read_with(cx, |project, cx| project.find_project_path("dir/file2", cx))
|
||||
.unwrap();
|
||||
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let action_log = cx.new(|cx| ActionLog::new(project.clone(), cx));
|
||||
let buffer1 = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_buffer(file1_path.clone(), cx)
|
||||
@@ -1068,9 +1369,8 @@ mod tests {
|
||||
.await
|
||||
.unwrap();
|
||||
buffer2.update(cx, |buffer, cx| buffer.set_text("IPSUM", cx));
|
||||
action_log.update(cx, |log, cx| log.will_create_buffer(buffer2.clone(), cx));
|
||||
project
|
||||
.update(cx, |project, cx| project.save_buffer(buffer2.clone(), cx))
|
||||
action_log
|
||||
.update(cx, |log, cx| log.save_new_buffer(buffer2.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@@ -1103,7 +1403,7 @@ mod tests {
|
||||
fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let action_log = cx.new(|cx| ActionLog::new(project.clone(), cx));
|
||||
let file_path = project
|
||||
.read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
|
||||
.unwrap();
|
||||
@@ -1124,8 +1424,11 @@ mod tests {
|
||||
.edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
|
||||
.unwrap()
|
||||
});
|
||||
action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
|
||||
});
|
||||
action_log.update(cx, |log, cx| log.save_edited_buffer(buffer.clone(), cx))
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
@@ -1238,7 +1541,7 @@ mod tests {
|
||||
fs.insert_tree(path!("/dir"), json!({"file": "abc\ndef\nghi\njkl\nmno"}))
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let action_log = cx.new(|cx| ActionLog::new(project.clone(), cx));
|
||||
let file_path = project
|
||||
.read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
|
||||
.unwrap();
|
||||
@@ -1259,8 +1562,10 @@ mod tests {
|
||||
.edit([(Point::new(5, 2)..Point::new(5, 3), "O")], None, cx)
|
||||
.unwrap()
|
||||
});
|
||||
action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
|
||||
});
|
||||
action_log.update(cx, |log, cx| log.save_edited_buffer(buffer.clone(), cx))
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
buffer.read_with(cx, |buffer, _| buffer.text()),
|
||||
@@ -1314,7 +1619,7 @@ mod tests {
|
||||
fs.insert_tree(path!("/dir"), json!({"file": "content"}))
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let action_log = cx.new(|cx| ActionLog::new(project.clone(), cx));
|
||||
let file_path = project
|
||||
.read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
|
||||
.unwrap();
|
||||
@@ -1369,7 +1674,7 @@ mod tests {
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let action_log = cx.new(|cx| ActionLog::new(project.clone(), cx));
|
||||
let file_path = project
|
||||
.read_with(cx, |project, cx| {
|
||||
project.find_project_path("dir/new_file", cx)
|
||||
@@ -1382,8 +1687,10 @@ mod tests {
|
||||
.unwrap();
|
||||
cx.update(|cx| {
|
||||
buffer.update(cx, |buffer, cx| buffer.set_text("content", cx));
|
||||
action_log.update(cx, |log, cx| log.will_create_buffer(buffer.clone(), cx));
|
||||
});
|
||||
action_log.update(cx, |log, cx| log.save_new_buffer(buffer.clone(), cx))
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
project
|
||||
.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))
|
||||
.await
|
||||
@@ -1417,6 +1724,177 @@ mod tests {
|
||||
assert_eq!(unreviewed_hunks(&action_log, cx), vec![]);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_track_diagnostics(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
"/dir",
|
||||
json!({
|
||||
"src": {
|
||||
"one.rs": "fn one(a: B) -> C { d }",
|
||||
"two.rs": "fn two(e: F) { G::H }",
|
||||
"three.rs": "fn three() -> { i(); }",
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let language_server_id = LanguageServerId(0);
|
||||
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
|
||||
let diagnostics_1 = vec![language::DiagnosticEntry {
|
||||
range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 11)),
|
||||
diagnostic: Diagnostic {
|
||||
message: "pre-existing error 1".into(),
|
||||
group_id: 0,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
},
|
||||
}];
|
||||
let diagnostics_2 = vec![language::DiagnosticEntry {
|
||||
range: Unclipped(PointUtf16::new(0, 18))..Unclipped(PointUtf16::new(0, 19)),
|
||||
diagnostic: Diagnostic {
|
||||
message: "pre-existing error 2".into(),
|
||||
group_id: 0,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
},
|
||||
}];
|
||||
project.update(cx, |project, cx| {
|
||||
project.lsp_store().update(cx, |lsp_store, cx| {
|
||||
lsp_store
|
||||
.update_diagnostic_entries(
|
||||
language_server_id,
|
||||
"/dir/src/one.rs".into(),
|
||||
None,
|
||||
diagnostics_1.clone(),
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
lsp_store
|
||||
.update_diagnostic_entries(
|
||||
language_server_id,
|
||||
"/dir/src/two.rs".into(),
|
||||
None,
|
||||
diagnostics_2.clone(),
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
});
|
||||
});
|
||||
|
||||
let action_log = cx.new(|cx| ActionLog::new(project.clone(), cx));
|
||||
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer("/dir/src/one.rs", cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let worktree_id = buffer.read_with(cx, |buffer, cx| buffer.file().unwrap().worktree_id(cx));
|
||||
|
||||
action_log.update(cx, |action_log, cx| {
|
||||
action_log.track_buffer(buffer.clone(), false, cx);
|
||||
});
|
||||
|
||||
let diagnostic_changes = action_log
|
||||
.update(cx, |action_log, cx| action_log.flush_diagnostic_changes(cx))
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
diagnostic_changes,
|
||||
vec![DiagnosticChange {
|
||||
project_path: (worktree_id, "src/one.rs").into(),
|
||||
fixed_diagnostic_count: 0,
|
||||
introduced_diagnostic_count: 1,
|
||||
diagnostics: diagnostics_1
|
||||
},]
|
||||
);
|
||||
|
||||
let save_task = action_log.update(cx, |action_log, cx| {
|
||||
action_log.save_edited_buffer(buffer.clone(), cx)
|
||||
});
|
||||
|
||||
let diagnostics_1 = vec![
|
||||
language::DiagnosticEntry {
|
||||
range: Unclipped(PointUtf16::new(0, 10))..Unclipped(PointUtf16::new(0, 11)),
|
||||
diagnostic: Diagnostic {
|
||||
message: "pre-existing error 1".into(),
|
||||
group_id: 0,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
language::DiagnosticEntry {
|
||||
range: Unclipped(PointUtf16::new(0, 20))..Unclipped(PointUtf16::new(0, 21)),
|
||||
diagnostic: Diagnostic {
|
||||
message: "new error".into(),
|
||||
group_id: 0,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
},
|
||||
},
|
||||
];
|
||||
let diagnostics_3 = vec![language::DiagnosticEntry {
|
||||
range: Unclipped(PointUtf16::new(0, 0))..Unclipped(PointUtf16::new(0, 0)),
|
||||
diagnostic: Diagnostic {
|
||||
message: "new error 3".into(),
|
||||
group_id: 0,
|
||||
is_primary: true,
|
||||
..Default::default()
|
||||
},
|
||||
}];
|
||||
project.update(cx, |project, cx| {
|
||||
project.lsp_store().update(cx, |lsp_store, cx| {
|
||||
lsp_store
|
||||
.update_diagnostic_entries(
|
||||
language_server_id,
|
||||
"/dir/src/one.rs".into(),
|
||||
None,
|
||||
diagnostics_1.clone(),
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
lsp_store
|
||||
.update_diagnostic_entries(
|
||||
language_server_id,
|
||||
"/dir/src/three.rs".into(),
|
||||
None,
|
||||
diagnostics_3.clone(),
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
lsp_store.disk_based_diagnostics_finished(language_server_id, cx);
|
||||
});
|
||||
});
|
||||
|
||||
save_task.await.unwrap();
|
||||
|
||||
// The diagnostics in the file `two.rs` existed are pre-existing, and
|
||||
// that file has not been edited, so they are not included.
|
||||
let diagnostic_changes = action_log
|
||||
.update(cx, |action_log, cx| action_log.flush_diagnostic_changes(cx))
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
diagnostic_changes,
|
||||
vec![
|
||||
DiagnosticChange {
|
||||
project_path: (worktree_id, "src/one.rs").into(),
|
||||
fixed_diagnostic_count: 0,
|
||||
introduced_diagnostic_count: 1,
|
||||
diagnostics: diagnostics_1
|
||||
},
|
||||
DiagnosticChange {
|
||||
project_path: (worktree_id, "src/three.rs").into(),
|
||||
fixed_diagnostic_count: 0,
|
||||
introduced_diagnostic_count: 1,
|
||||
diagnostics: diagnostics_3
|
||||
}
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 100)]
|
||||
async fn test_random_diffs(mut rng: StdRng, cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
@@ -1429,7 +1907,7 @@ mod tests {
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(path!("/dir"), json!({"file": text})).await;
|
||||
let project = Project::test(fs.clone(), [path!("/dir").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let action_log = cx.new(|cx| ActionLog::new(project.clone(), cx));
|
||||
let file_path = project
|
||||
.read_with(cx, |project, cx| project.find_project_path("dir/file", cx))
|
||||
.unwrap();
|
||||
@@ -1469,9 +1947,14 @@ mod tests {
|
||||
cx.update(|cx| {
|
||||
buffer.update(cx, |buffer, cx| buffer.randomly_edit(&mut rng, 1, cx));
|
||||
if is_agent_change {
|
||||
action_log.update(cx, |log, cx| log.buffer_edited(buffer.clone(), cx));
|
||||
action_log
|
||||
.update(cx, |log, cx| log.save_edited_buffer(buffer.clone(), cx))
|
||||
} else {
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
});
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -241,13 +241,10 @@ impl Tool for CodeActionTool {
|
||||
format!("Completed code action: {}", title)
|
||||
};
|
||||
|
||||
project
|
||||
.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))?
|
||||
.await?;
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.buffer_edited(buffer.clone(), cx)
|
||||
})?;
|
||||
log.save_edited_buffer(buffer.clone(), cx)
|
||||
})?.await?;
|
||||
|
||||
Ok(response)
|
||||
} else {
|
||||
|
||||
@@ -97,14 +97,11 @@ impl Tool for CreateFileTool {
|
||||
cx.update(|cx| {
|
||||
buffer.update(cx, |buffer, cx| buffer.set_text(contents, cx));
|
||||
action_log.update(cx, |action_log, cx| {
|
||||
action_log.will_create_buffer(buffer.clone(), cx)
|
||||
});
|
||||
})?;
|
||||
|
||||
project
|
||||
.update(cx, |project, cx| project.save_buffer(buffer, cx))?
|
||||
.await
|
||||
.map_err(|err| anyhow!("Unable to save buffer for {destination_path}: {err}"))?;
|
||||
action_log.save_new_buffer(buffer.clone(), cx)
|
||||
})
|
||||
})?
|
||||
.await
|
||||
.map_err(|err| anyhow!("Unable to save buffer for {destination_path}: {err}"))?;
|
||||
|
||||
Ok(format!("Created file {destination_path}"))
|
||||
})
|
||||
|
||||
@@ -81,7 +81,7 @@ impl Tool for DiagnosticsTool {
|
||||
input: serde_json::Value,
|
||||
_messages: &[LanguageModelRequestMessage],
|
||||
project: Entity<Project>,
|
||||
action_log: Entity<ActionLog>,
|
||||
_action_log: Entity<ActionLog>,
|
||||
cx: &mut App,
|
||||
) -> ToolResult {
|
||||
match serde_json::from_value::<DiagnosticsToolInput>(input)
|
||||
@@ -152,10 +152,6 @@ impl Tool for DiagnosticsTool {
|
||||
}
|
||||
}
|
||||
|
||||
action_log.update(cx, |action_log, _cx| {
|
||||
action_log.checked_project_diagnostics();
|
||||
});
|
||||
|
||||
if has_diagnostics {
|
||||
Task::ready(Ok(output)).into()
|
||||
} else {
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
use crate::{replace::replace_with_flexible_indent, schema::json_schema_for};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::{ActionLog, Tool, ToolResult};
|
||||
|
||||
use gpui::{App, AppContext, AsyncApp, Entity, Task};
|
||||
|
||||
use language_model::{LanguageModelRequestMessage, LanguageModelToolSchemaFormat};
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
use ui::IconName;
|
||||
|
||||
@@ -160,24 +163,22 @@ impl Tool for EditFileTool {
|
||||
buffer.finalize_last_transaction();
|
||||
buffer.snapshot()
|
||||
});
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.buffer_edited(buffer.clone(), cx)
|
||||
});
|
||||
snapshot
|
||||
})?;
|
||||
|
||||
project.update( cx, |project, cx| {
|
||||
project.save_buffer(buffer, cx)
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.save_edited_buffer(buffer.clone(), cx)
|
||||
})?.await?;
|
||||
|
||||
let diff_str = cx.background_spawn(async move {
|
||||
let new_text = snapshot.text();
|
||||
language::unified_diff(&old_text, &new_text)
|
||||
let diff_str = cx.background_spawn({
|
||||
let snapshot = snapshot.clone();
|
||||
async move {
|
||||
let new_text = snapshot.text();
|
||||
language::unified_diff(&old_text, &new_text)
|
||||
}
|
||||
}).await;
|
||||
|
||||
|
||||
Ok(format!("Edited {}:\n\n```diff\n{}\n```", input.path.display(), diff_str))
|
||||
|
||||
}).into()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -186,7 +186,7 @@ mod test {
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree("/root", json!({})).await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let action_log = cx.new(|cx| ActionLog::new(project.clone(), cx));
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
@@ -216,7 +216,7 @@ mod test {
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let action_log = cx.new(|cx| ActionLog::new(project.clone(), cx));
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
@@ -245,7 +245,7 @@ mod test {
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let language_registry = project.read_with(cx, |project, _| project.languages().clone());
|
||||
language_registry.add(Arc::new(rust_lang()));
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let action_log = cx.new(|cx| ActionLog::new(project.clone(), cx));
|
||||
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
@@ -314,7 +314,7 @@ mod test {
|
||||
)
|
||||
.await;
|
||||
let project = Project::test(fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
let action_log = cx.new(|_| ActionLog::new(project.clone()));
|
||||
let action_log = cx.new(|cx| ActionLog::new(project.clone(), cx));
|
||||
let result = cx
|
||||
.update(|cx| {
|
||||
let input = json!({
|
||||
|
||||
@@ -129,13 +129,9 @@ impl Tool for RenameTool {
|
||||
})?
|
||||
.await?;
|
||||
|
||||
project
|
||||
.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx))?
|
||||
.await?;
|
||||
|
||||
action_log.update(cx, |log, cx| {
|
||||
log.buffer_edited(buffer.clone(), cx)
|
||||
})?;
|
||||
log.save_edited_buffer(buffer.clone(), cx)
|
||||
})?.await?;
|
||||
|
||||
Ok(format!("Renamed '{}' to '{}'", input.symbol, input.new_name))
|
||||
}).into()
|
||||
|
||||
@@ -230,6 +230,23 @@ pub struct Diagnostic {
|
||||
pub data: Option<Value>,
|
||||
}
|
||||
|
||||
impl Ord for Diagnostic {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
other
|
||||
.is_primary
|
||||
.cmp(&self.is_primary)
|
||||
.then_with(|| self.is_disk_based.cmp(&other.is_disk_based))
|
||||
.then_with(|| self.severity.cmp(&other.severity))
|
||||
.then_with(|| self.message.cmp(&other.message))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Diagnostic {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
/// An operation used to synchronize this buffer with its other replicas.
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub enum Operation {
|
||||
|
||||
@@ -9,7 +9,7 @@ use std::{
|
||||
ops::Range,
|
||||
};
|
||||
use sum_tree::{self, Bias, SumTree};
|
||||
use text::{Anchor, FromAnchor, PointUtf16, ToOffset};
|
||||
use text::{Anchor, AnchorRangeExt, FromAnchor, PointUtf16, ToOffset, Unclipped};
|
||||
|
||||
/// A set of diagnostics associated with a given buffer, provided
|
||||
/// by a single language server.
|
||||
@@ -246,6 +246,12 @@ impl sum_tree::Item for DiagnosticEntry<Anchor> {
|
||||
}
|
||||
|
||||
impl DiagnosticEntry<Anchor> {
|
||||
pub fn cmp(&self, other: &Self, buffer: &text::BufferSnapshot) -> Ordering {
|
||||
self.range
|
||||
.cmp(&other.range, buffer)
|
||||
.then_with(|| self.diagnostic.cmp(&other.diagnostic))
|
||||
}
|
||||
|
||||
/// Converts the [DiagnosticEntry] to a different buffer coordinate type.
|
||||
pub fn resolve<O: FromAnchor>(&self, buffer: &text::BufferSnapshot) -> DiagnosticEntry<O> {
|
||||
DiagnosticEntry {
|
||||
@@ -256,6 +262,20 @@ impl DiagnosticEntry<Anchor> {
|
||||
}
|
||||
}
|
||||
|
||||
impl DiagnosticEntry<Unclipped<PointUtf16>> {
|
||||
pub fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.range
|
||||
.start
|
||||
.0
|
||||
.row
|
||||
.cmp(&other.range.start.0.row)
|
||||
.then_with(|| self.range.start.0.column.cmp(&other.range.start.0.column))
|
||||
.then_with(|| self.range.end.0.row.cmp(&other.range.end.0.row))
|
||||
.then_with(|| self.range.end.0.column.cmp(&other.range.end.0.column))
|
||||
.then_with(|| self.diagnostic.cmp(&other.diagnostic))
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Summary {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
|
||||
@@ -2169,19 +2169,11 @@ impl LocalLspStore {
|
||||
mut diagnostics: Vec<DiagnosticEntry<Unclipped<PointUtf16>>>,
|
||||
cx: &mut Context<LspStore>,
|
||||
) -> Result<()> {
|
||||
fn compare_diagnostics(a: &Diagnostic, b: &Diagnostic) -> Ordering {
|
||||
Ordering::Equal
|
||||
.then_with(|| b.is_primary.cmp(&a.is_primary))
|
||||
.then_with(|| a.is_disk_based.cmp(&b.is_disk_based))
|
||||
.then_with(|| a.severity.cmp(&b.severity))
|
||||
.then_with(|| a.message.cmp(&b.message))
|
||||
}
|
||||
|
||||
diagnostics.sort_unstable_by(|a, b| {
|
||||
Ordering::Equal
|
||||
.then_with(|| a.range.start.cmp(&b.range.start))
|
||||
.then_with(|| b.range.end.cmp(&a.range.end))
|
||||
.then_with(|| compare_diagnostics(&a.diagnostic, &b.diagnostic))
|
||||
.then_with(|| a.diagnostic.cmp(&b.diagnostic))
|
||||
});
|
||||
|
||||
let snapshot = self.buffer_snapshot_for_lsp_version(buffer, server_id, version, cx)?;
|
||||
@@ -5938,6 +5930,29 @@ impl LspStore {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn all_diagnostics(
|
||||
&self,
|
||||
) -> HashMap<ProjectPath, Vec<DiagnosticEntry<Unclipped<PointUtf16>>>> {
|
||||
let mut result = HashMap::default();
|
||||
if let Some(this) = self.as_local() {
|
||||
for (worktree_id, diagnostics_by_path) in &this.diagnostics {
|
||||
for (path, diagnostics_by_server_id) in diagnostics_by_path {
|
||||
let project_path = ProjectPath {
|
||||
worktree_id: *worktree_id,
|
||||
path: path.clone(),
|
||||
};
|
||||
let mut all_diagnostics = Vec::new();
|
||||
for (_, diagnostics) in diagnostics_by_server_id {
|
||||
all_diagnostics.extend_from_slice(&diagnostics);
|
||||
}
|
||||
all_diagnostics.sort_unstable_by_key(|entry| entry.range.start);
|
||||
result.insert(project_path, all_diagnostics);
|
||||
}
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
pub fn diagnostic_summary(&self, include_ignored: bool, cx: &App) -> DiagnosticSummary {
|
||||
let mut summary = DiagnosticSummary::default();
|
||||
for (_, _, path_summary) in self.diagnostic_summaries(include_ignored, cx) {
|
||||
|
||||
@@ -303,6 +303,7 @@ pub enum Event {
|
||||
RevealInProjectPanel(ProjectEntryId),
|
||||
SnippetEdit(BufferId, Vec<(lsp::Range, Snippet)>),
|
||||
ExpandedAllForEntry(WorktreeId, ProjectEntryId),
|
||||
BufferEdited(Entity<Buffer>),
|
||||
}
|
||||
|
||||
pub enum DebugAdapterClientState {
|
||||
@@ -2905,7 +2906,7 @@ impl Project {
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
|
||||
cx.emit(Event::BufferEdited(buffer.clone()));
|
||||
self.enqueue_buffer_ordered_message(BufferOrderedMessage::Operation {
|
||||
buffer_id,
|
||||
operation,
|
||||
|
||||
@@ -3125,6 +3125,12 @@ impl FromAnchor for usize {
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: FromAnchor> FromAnchor for Unclipped<T> {
|
||||
fn from_anchor(anchor: &Anchor, snapshot: &BufferSnapshot) -> Self {
|
||||
Unclipped(T::from_anchor(anchor, snapshot))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
pub enum LineEnding {
|
||||
Unix,
|
||||
|
||||
Reference in New Issue
Block a user