Compare commits

...

21 Commits

Author SHA1 Message Date
Michael Sloan
f3cb4e1b28 WIP implementation, pausing work on this for now 2025-09-04 16:07:14 -06:00
Michael Sloan
dc2879759c Progress towards scoping zeta edit history to projects 2025-09-02 00:26:50 -06:00
Michael Sloan
523c514669 Merge branch 'main' into zeta-record-recently-active-files-when-data-collection-is-enabled 2025-09-01 12:54:22 -06:00
Michael Sloan
9199f975dd Resolve merge conflicts 2025-08-28 18:33:43 -06:00
Michael Sloan
e045025f71 Merge branch 'main' into zeta-record-recently-active-files-when-data-collection-is-enabled 2025-08-28 18:30:46 -06:00
Michael Sloan
177b0d7f5e Estimate time spent navigating and editing code 2025-08-28 17:43:43 -06:00
Michael Sloan
05b066aa98 Only track recent editors if data collection is enabled 2025-08-27 13:50:15 -06:00
Michael Sloan
56ac4320fa Misc cleanup 2025-08-27 11:41:51 -06:00
Michael Sloan
8b2147aeef Send additional context for edit predictions data via a telemetry event
Also:

* Removes old PredictEditsBody fields that don't have anticipated future use

* Sorts diagnostics by proximity to cursor and truncates based on json byte count

* Brings back cursor_offset
2025-08-27 00:09:08 -06:00
Michael Sloan
7447dc81d9 Clippy 2025-08-26 15:49:21 -06:00
Michael Sloan
952a26998d Include last cursor position in recent file entries + handle multibuffers 2025-08-26 15:46:14 -06:00
Michael Sloan
dd607049f8 Make entry_for_path return a reference instead of cloning 2025-08-26 14:31:39 -06:00
Michael Sloan
09b3650f6c Remove cursor_offset 2025-08-26 13:05:53 -06:00
Michael Sloan
884e7e6750 Filter out rapid changes in active item 2025-08-26 12:00:39 -06:00
Michael Sloan
5b97ebb109 Include cursor position of current file 2025-08-26 11:50:46 -06:00
Michael Sloan
829cffd37f Also record repo relative file path 2025-08-26 00:24:20 -06:00
Michael Sloan
b9cd8f5d2a Cleanup + only record git info if current file may be in repo 2025-08-26 00:22:41 -06:00
Michael Sloan
87609557f0 Filter out excessively long paths 2025-08-25 22:47:00 -06:00
Michael Sloan
ee6a8a20e2 Use worktree status and git status when filtering recent files list 2025-08-25 22:44:08 -06:00
Michael Sloan
b40794d413 Refactor Repository::status_for_path to return FileStatus instead of StatusEntry 2025-08-25 22:01:36 -06:00
Michael Sloan
b696a32518 zeta: Record recently active files when data collection is enabled 2025-08-25 21:57:15 -06:00
20 changed files with 1080 additions and 288 deletions

1
Cargo.lock generated
View File

@@ -20793,6 +20793,7 @@ dependencies = [
"language_model",
"log",
"menu",
"multi_buffer",
"postage",
"project",
"rand 0.8.5",

View File

@@ -438,13 +438,16 @@ impl MessageEditor {
let Some(entry) = self.project.read(cx).entry_for_path(&project_path, cx) else {
return Task::ready(Err(anyhow!("project entry not found")));
};
let Some(worktree) = self.project.read(cx).worktree_for_entry(entry.id, cx) else {
let directory_path = entry.path.clone();
let Some(worktree) = self
.project
.read(cx)
.worktree_for_id(project_path.worktree_id, cx)
else {
return Task::ready(Err(anyhow!("worktree not found")));
};
let project = self.project.clone();
cx.spawn(async move |_, cx| {
let directory_path = entry.path.clone();
let worktree_id = worktree.read_with(cx, |worktree, _| worktree.id())?;
let file_paths = worktree.read_with(cx, |worktree, _cx| {
collect_files_in_path(worktree, &directory_path)

View File

@@ -3948,15 +3948,15 @@ impl AcpThreadView {
MentionUri::PastedImage => {}
MentionUri::Directory { abs_path } => {
let project = workspace.project();
let Some(entry) = project.update(cx, |project, cx| {
let Some(entry_id) = project.update(cx, |project, cx| {
let path = project.find_project_path(abs_path, cx)?;
project.entry_for_path(&path, cx)
project.entry_for_path(&path, cx).map(|entry| entry.id)
}) else {
return;
};
project.update(cx, |_, cx| {
cx.emit(project::Event::RevealInProjectPanel(entry.id));
cx.emit(project::Event::RevealInProjectPanel(entry_id));
});
}
MentionUri::Symbol {
@@ -3969,11 +3969,9 @@ impl AcpThreadView {
line_range,
} => {
let project = workspace.project();
let Some((path, _)) = project.update(cx, |project, cx| {
let path = project.find_project_path(path, cx)?;
let entry = project.entry_for_path(&path, cx)?;
Some((path, entry))
}) else {
let Some(path) =
project.update(cx, |project, cx| project.find_project_path(path, cx))
else {
return;
};

View File

@@ -987,7 +987,8 @@ impl MentionLink {
.read(cx)
.project()
.read(cx)
.entry_for_path(&project_path, cx)?;
.entry_for_path(&project_path, cx)?
.clone();
Some(MentionLink::File(project_path, entry))
}
Self::SYMBOL => {

View File

@@ -2,7 +2,9 @@ use std::str::FromStr;
use std::sync::Arc;
use anyhow::Context as _;
use serde::{Deserialize, Serialize};
use serde::{Deserialize, Deserializer, Serialize, Serializer};
use serde_json::value::RawValue;
use std::marker::PhantomData;
use strum::{Display, EnumIter, EnumString};
use uuid::Uuid;
@@ -138,24 +140,24 @@ pub enum LanguageModelProvider {
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PredictEditsBody {
#[serde(skip_serializing_if = "Option::is_none", default)]
pub outline: Option<String>,
pub input_events: String,
pub input_excerpt: String,
#[serde(skip_serializing_if = "Option::is_none", default)]
pub speculated_output: Option<String>,
/// Whether the user provided consent for sampling this interaction.
#[serde(default, alias = "data_collection_permission")]
pub can_collect_data: bool,
/// Note that this is no longer sent, in favor of `PredictEditsAdditionalContext`.
#[serde(skip_serializing_if = "Option::is_none", default)]
pub diagnostic_groups: Option<Vec<(String, serde_json::Value)>>,
/// Info about the git repository state, only present when can_collect_data is true.
/// Info about the git repository state, only present when can_collect_data is true. Note that
/// this is no longer sent, in favor of `PredictEditsAdditionalContext`.
#[serde(skip_serializing_if = "Option::is_none", default)]
pub git_info: Option<PredictEditsGitInfo>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PredictEditsGitInfo {
/// full_path to the repo (worktree name + relative path to repo)
pub worktree_path: Option<String>,
/// SHA of git HEAD commit at time of prediction.
#[serde(skip_serializing_if = "Option::is_none", default)]
pub head_sha: Option<String>,
@@ -178,6 +180,55 @@ pub struct AcceptEditPredictionBody {
pub request_id: Uuid,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PredictEditsTrainingData {
pub request_id: Uuid,
/// When true, `request_id` is an ID that corresponds to an edit prediction.
pub has_prediction: bool,
/// State that `events` is based on. Initially this is `GitHead` and subsequent uploads will
/// then be based on the previous upload.
pub diff_base: PredictEditsDiffBase,
/// Fine-grained edit events atop `diff_base`.
#[serde(skip_serializing_if = "Vec::is_empty", default)]
pub events: Vec<SerializedJson<PredictEditsEvent>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum PredictEditsDiffBase {
GitHead { git_info: PredictEditsGitInfo },
PreviousUpload { request_id: Uuid },
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PredictEditsEvent {
pub entry_id: usize,
#[serde(skip_serializing_if = "Option::is_none", default)]
pub path: Option<String>,
pub timestamp_ms: u64,
pub data: PredictEditsEventData,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum PredictEditsEventData {
MoveCursor {
offset: usize,
#[serde(skip_serializing_if = "Vec::is_empty", default)]
diagnostic_groups: Vec<(String, Box<RawValue>)>,
#[serde(skip_serializing_if = "is_default", default)]
diagnostic_groups_truncated: bool,
},
Create {
content: String,
},
Delete,
Edit {
unified_diff: String,
},
MarkDiffTooLarge,
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum CompletionMode {
@@ -336,6 +387,62 @@ pub struct UsageData {
pub limit: UsageLimit,
}
#[derive(Debug, Clone)]
pub struct SerializedJson<T> {
raw: Box<RawValue>,
_phantom: PhantomData<T>,
}
impl<T> SerializedJson<T>
where
T: Serialize + for<'de> Deserialize<'de>,
{
pub fn new(value: &T) -> Result<Self, serde_json::Error> {
Ok(SerializedJson {
raw: serde_json::value::to_raw_value(value)?,
_phantom: PhantomData,
})
}
pub fn deserialize(&self) -> Result<T, serde_json::Error> {
serde_json::from_str(self.raw.get())
}
pub fn as_raw(&self) -> &RawValue {
&self.raw
}
pub fn into_raw(self) -> Box<RawValue> {
self.raw
}
}
impl<T> Serialize for SerializedJson<T> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.raw.serialize(serializer)
}
}
impl<'de, T> Deserialize<'de> for SerializedJson<T> {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let raw = Box::<RawValue>::deserialize(deserializer)?;
Ok(SerializedJson {
raw,
_phantom: PhantomData,
})
}
}
fn is_default<T: Default + PartialEq>(value: &T) -> bool {
*value == T::default()
}
#[cfg(test)]
mod tests {
use pretty_assertions::assert_eq;

View File

@@ -3072,12 +3072,7 @@ async fn test_git_status_sync(
.collect::<Vec<_>>();
assert_eq!(repos.len(), 1);
let repo = repos.into_iter().next().unwrap();
assert_eq!(
repo.read(cx)
.status_for_path(&file.into())
.map(|entry| entry.status),
status
);
assert_eq!(repo.read(cx).status_for_path(&file.into()), status);
}
project_local.read_with(cx_a, |project, cx| {

View File

@@ -685,7 +685,7 @@ impl Item for Editor {
.git_store()
.read(cx)
.repository_and_path_for_buffer_id(buffer_id, cx)?;
let status = repo.read(cx).status_for_path(&repo_path)?.status;
let status = repo.read(cx).status_for_path(&repo_path)?;
Some(entry_git_aware_label_color(
status.summary(),

View File

@@ -146,7 +146,7 @@ pub struct BufferSnapshot {
pub text: text::BufferSnapshot,
pub(crate) syntax: SyntaxSnapshot,
file: Option<Arc<dyn File>>,
diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
pub diagnostics: SmallVec<[(LanguageServerId, DiagnosticSet); 2]>,
remote_selections: TreeMap<ReplicaId, SelectionSet>,
language: Option<Arc<Language>>,
non_text_state_update_count: usize,

View File

@@ -2693,9 +2693,7 @@ impl OutlinePanel {
let status = git_store
.read(cx)
.repository_and_path_for_buffer_id(buffer_id, cx)
.and_then(|(repo, path)| {
Some(repo.read(cx).status_for_path(&path)?.status)
});
.and_then(|(repo, path)| repo.read(cx).status_for_path(&path));
buffer_excerpts
.entry(buffer_id)
.or_insert_with(|| {

View File

@@ -782,7 +782,7 @@ impl GitStore {
cx: &App,
) -> Option<FileStatus> {
let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
Some(repo.read(cx).status_for_path(&repo_path)?.status)
repo.read(cx).status_for_path(&repo_path)
}
pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
@@ -1360,8 +1360,7 @@ impl GitStore {
pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
let status = repo.read(cx).snapshot.status_for_path(&path)?;
Some(status.status)
repo.read(cx).snapshot.status_for_path(&path)
}
pub fn repository_and_path_for_buffer_id(
@@ -2810,10 +2809,10 @@ impl RepositorySnapshot {
self.statuses_by_path.summary().item_summary
}
pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
pub fn status_for_path(&self, path: &RepoPath) -> Option<FileStatus> {
self.statuses_by_path
.get(&PathKey(path.0.clone()), &())
.cloned()
.map(|entry| entry.status)
}
pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
@@ -2840,7 +2839,7 @@ impl RepositorySnapshot {
self.merge.conflicted_paths.contains(repo_path);
let has_conflict_currently = self
.status_for_path(repo_path)
.is_some_and(|entry| entry.status.is_conflicted());
.is_some_and(|status| status.is_conflicted());
had_conflict_on_last_merge_head_change || has_conflict_currently
}

View File

@@ -4314,7 +4314,7 @@ impl Project {
self.active_entry
}
pub fn entry_for_path(&self, path: &ProjectPath, cx: &App) -> Option<Entry> {
pub fn entry_for_path<'a>(&'a self, path: &ProjectPath, cx: &'a App) -> Option<&'a Entry> {
self.worktree_store.read(cx).entry_for_path(path, cx)
}

View File

@@ -8246,7 +8246,7 @@ async fn test_repository_subfolder_git_status(
assert_eq!(repository.status_for_path(&C_TXT.into()), None);
assert_eq!(
repository.status_for_path(&E_TXT.into()).unwrap().status,
repository.status_for_path(&E_TXT.into()).unwrap(),
FileStatus::Untracked
);
});
@@ -8459,15 +8459,11 @@ async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
root_path.join("projects/project1").as_path()
);
assert_eq!(
repository
.status_for_path(&"a".into())
.map(|entry| entry.status),
repository.status_for_path(&"a".into()),
Some(StatusCode::Modified.worktree()),
);
assert_eq!(
repository
.status_for_path(&"b".into())
.map(|entry| entry.status),
repository.status_for_path(&"b".into()),
Some(FileStatus::Untracked),
);
});
@@ -8485,11 +8481,11 @@ async fn test_rename_work_directory(cx: &mut gpui::TestAppContext) {
root_path.join("projects/project2").as_path()
);
assert_eq!(
repository.status_for_path(&"a".into()).unwrap().status,
repository.status_for_path(&"a".into()).unwrap(),
StatusCode::Modified.worktree(),
);
assert_eq!(
repository.status_for_path(&"b".into()).unwrap().status,
repository.status_for_path(&"b".into()).unwrap(),
FileStatus::Untracked,
);
});
@@ -8562,11 +8558,11 @@ async fn test_file_status(cx: &mut gpui::TestAppContext) {
);
assert_eq!(
repository.status_for_path(&B_TXT.into()).unwrap().status,
repository.status_for_path(&B_TXT.into()).unwrap(),
FileStatus::Untracked,
);
assert_eq!(
repository.status_for_path(&F_TXT.into()).unwrap().status,
repository.status_for_path(&F_TXT.into()).unwrap(),
FileStatus::Untracked,
);
});
@@ -8582,7 +8578,7 @@ async fn test_file_status(cx: &mut gpui::TestAppContext) {
// The worktree detects that the file's git status has changed.
repository.read_with(cx, |repository, _| {
assert_eq!(
repository.status_for_path(&A_TXT.into()).unwrap().status,
repository.status_for_path(&A_TXT.into()).unwrap(),
StatusCode::Modified.worktree(),
);
});
@@ -8600,7 +8596,7 @@ async fn test_file_status(cx: &mut gpui::TestAppContext) {
// The worktree detects that the files' git status have changed.
repository.read_with(cx, |repository, _cx| {
assert_eq!(
repository.status_for_path(&F_TXT.into()).unwrap().status,
repository.status_for_path(&F_TXT.into()).unwrap(),
FileStatus::Untracked,
);
assert_eq!(repository.status_for_path(&B_TXT.into()), None);
@@ -8623,11 +8619,11 @@ async fn test_file_status(cx: &mut gpui::TestAppContext) {
repository.read_with(cx, |repository, _cx| {
assert_eq!(repository.status_for_path(&A_TXT.into()), None);
assert_eq!(
repository.status_for_path(&B_TXT.into()).unwrap().status,
repository.status_for_path(&B_TXT.into()).unwrap(),
FileStatus::Untracked,
);
assert_eq!(
repository.status_for_path(&E_TXT.into()).unwrap().status,
repository.status_for_path(&E_TXT.into()).unwrap(),
StatusCode::Modified.worktree(),
);
});
@@ -8666,8 +8662,7 @@ async fn test_file_status(cx: &mut gpui::TestAppContext) {
assert_eq!(
repository
.status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
.unwrap()
.status,
.unwrap(),
FileStatus::Untracked,
);
});
@@ -8690,8 +8685,7 @@ async fn test_file_status(cx: &mut gpui::TestAppContext) {
assert_eq!(
repository
.status_for_path(&Path::new(renamed_dir_name).join(RENAMED_FILE).into())
.unwrap()
.status,
.unwrap(),
FileStatus::Untracked,
);
});
@@ -9000,7 +8994,7 @@ async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
barrier.await.unwrap();
worktree_repo.update(cx, |repo, _| {
pretty_assertions::assert_eq!(
repo.status_for_path(&"src/b.txt".into()).unwrap().status,
repo.status_for_path(&"src/b.txt".into()).unwrap(),
StatusCode::Modified.worktree(),
);
});
@@ -9039,7 +9033,7 @@ async fn test_git_worktrees_and_submodules(cx: &mut gpui::TestAppContext) {
barrier.await.unwrap();
submodule_repo.update(cx, |repo, _| {
pretty_assertions::assert_eq!(
repo.status_for_path(&"c.txt".into()).unwrap().status,
repo.status_for_path(&"c.txt".into()).unwrap(),
StatusCode::Modified.worktree(),
);
});
@@ -9304,9 +9298,7 @@ fn assert_entry_git_state(
let entry = tree
.entry_for_path(path)
.unwrap_or_else(|| panic!("entry {path} not found"));
let status = repository
.status_for_path(&path.into())
.map(|entry| entry.status);
let status = repository.status_for_path(&path.into()).map(|entry| entry);
let expected = index_status.map(|index_status| {
TrackedStatus {
index_status,

View File

@@ -203,11 +203,10 @@ impl WorktreeStore {
})
}
pub fn entry_for_path(&self, path: &ProjectPath, cx: &App) -> Option<Entry> {
pub fn entry_for_path<'a>(&'a self, path: &ProjectPath, cx: &'a App) -> Option<&'a Entry> {
self.worktree_for_id(path.worktree_id, cx)?
.read(cx)
.entry_for_path(&path.path)
.cloned()
}
pub fn create_worktree(

View File

@@ -594,9 +594,10 @@ impl project::ProjectItem for NotebookItem {
};
let id = project
.update(cx, |project, cx| project.entry_for_path(&path, cx))?
.context("Entry not found")?
.id;
.update(cx, |project, cx| {
project.entry_for_path(&path, cx).map(|entry| entry.id)
})?
.context("Entry not found")?;
cx.new(|_| NotebookItem {
path: abs_path,

View File

@@ -3188,6 +3188,7 @@ pub mod tests {
.read(cx)
.entry_for_path(&(worktree_id, "a").into(), cx)
.expect("no entry for /a/ directory")
.clone()
});
assert!(a_dir_entry.is_dir());
window

View File

@@ -207,9 +207,11 @@ fn assign_edit_prediction_provider(
if let Some(buffer) = &singleton_buffer
&& buffer.read(cx).file().is_some()
// todo!
&& let Some(project) = editor.project()
{
zeta.update(cx, |zeta, cx| {
zeta.register_buffer(buffer, cx);
zeta.register_buffer(buffer, project, cx);
});
}

View File

@@ -21,6 +21,7 @@ ai_onboarding.workspace = true
anyhow.workspace = true
arrayvec.workspace = true
client.workspace = true
clock.workspace = true
cloud_llm_client.workspace = true
collections.workspace = true
command_palette_hooks.workspace = true
@@ -39,6 +40,7 @@ language.workspace = true
language_model.workspace = true
log.workspace = true
menu.workspace = true
multi_buffer.workspace = true
postage.workspace = true
project.workspace = true
rand.workspace = true

View File

@@ -0,0 +1,226 @@
use std::collections::hash_map;
use cloud_llm_client::{PredictEditsEvent, PredictEditsGitInfo, SerializedJson};
use collections::{HashMap, HashSet};
use fs::MTime;
use gpui::{AppContext as _, Context, Entity, EntityId, Task, WeakEntity};
use language::{Buffer, BufferEvent};
use project::{
Project, ProjectEntryId, ProjectPath,
buffer_store::{BufferStore, BufferStoreEvent},
git_store::{GitStore, GitStoreEvent, Repository, RepositoryId},
worktree_store::{WorktreeStore, WorktreeStoreEvent},
};
use uuid::Uuid;
use crate::license_detection::LicenseDetectionWatcher;
// todos:
//
// * Don't subscribe to all buffers
//
// * Currently MoveCursor event will only happen for edit prediction requests.
pub struct TrainingDataUploader {
projects: HashMap<EntityId, Entity<ZetaProject>>,
_upload_task: Task<()>,
}
struct ZetaProject {
project: WeakEntity<Project>,
repositories: HashMap<RepositoryId, Entity<ZetaRepository>>,
buffers_changed: HashSet<WeakEntity<Buffer>>,
project_entries_changed: HashSet<ProjectEntryId>,
}
struct ZetaRepository {
unsent_events: Vec<SerializedJson<PredictEditsEvent>>,
pending_event: Option<PredictEditsEvent>,
last_snapshot: Option<ZetaRepositorySnapshot>,
license_watcher: LicenseDetectionWatcher,
}
struct ZetaRepositorySnapshot {
request_id: Uuid,
git_info: PredictEditsGitInfo,
buffers: HashMap<ProjectEntryId, ZetaBufferSnapshot>,
files: HashMap<ProjectEntryId, ZetaFileSnapshot>,
}
struct ZetaBufferSnapshot {
path: ProjectPath,
text: String,
buffer: WeakEntity<Buffer>,
version: clock::Global,
}
struct ZetaFileSnapshot {
path: ProjectPath,
text: String,
mtime: MTime,
}
impl TrainingDataUploader {
pub fn new(cx: &mut Context<Self>) -> Self {
let _upload_task = cx.spawn(|this, cx| {
loop {
todo!();
}
});
Self {
projects: HashMap::default(),
_upload_task,
}
}
fn register(&mut self, project: &Entity<Project>, path: ProjectPath, cx: &mut Context<Self>) {
let project_entity_id = project.entity_id();
let zeta_project = match self.projects.entry(project_entity_id) {
hash_map::Entry::Vacant(entry) => {
let zeta_project = cx.new(|cx| ZetaProject::new(project, cx));
cx.observe_release(project, move |this, project, cx| {
this.projects.remove(&project_entity_id);
});
entry.insert(zeta_project)
}
hash_map::Entry::Occupied(entry) => entry.into_mut(),
};
// todo!
// zeta_project.update(|zeta_project, cx| zeta_project.register(path, cx));
}
}
impl ZetaProject {
pub fn new(project: &Entity<Project>, cx: &mut Context<Self>) -> Self {
cx.subscribe(&project, Self::handle_project_event).detach();
cx.subscribe(
&project.read(cx).git_store().clone(),
Self::handle_git_store_event,
)
.detach();
cx.subscribe(
&project.read(cx).worktree_store(),
Self::handle_worktree_store_event,
)
.detach();
let buffer_store = project.read(cx).buffer_store().clone();
for buffer in buffer_store.read(cx).buffers().collect::<Vec<_>>() {
Self::register_buffer(&buffer, cx);
}
cx.subscribe(&buffer_store, Self::handle_buffer_store_event)
.detach();
Self {
project: project.downgrade(),
repositories: HashMap::default(),
buffers_changed: HashSet::default(),
project_entries_changed: HashSet::default(),
}
}
fn handle_git_store_event(
&mut self,
_git_store: Entity<GitStore>,
event: &GitStoreEvent,
cx: &mut Context<Self>,
) {
use GitStoreEvent::*;
match event {
RepositoryRemoved(repository_id) => {
self.repositories.remove(&repository_id);
}
RepositoryAdded(repository_id) => {
self.repositories
.insert(*repository_id, cx.new(|cx| ZetaRepository::new(cx)));
}
RepositoryUpdated(repository_id, event, is_active) => {}
ActiveRepositoryChanged { .. }
| IndexWriteError { .. }
| JobsUpdated
| ConflictsUpdated => {}
}
}
fn handle_worktree_store_event(
&mut self,
_worktree_store: Entity<WorktreeStore>,
event: &WorktreeStoreEvent,
cx: &mut Context<Self>,
) {
use WorktreeStoreEvent::*;
match event {
WorktreeAdded(worktree) => {}
WorktreeRemoved(worktree_entity_id, worktree_id) => {}
WorktreeUpdatedEntries(worktree_id, updated_entries_set) => {
for (path, entry_id, _path_change) in updated_entries_set.iter() {
self.project_entries_changed.insert(*entry_id);
}
}
WorktreeUpdatedGitRepositories(worktree_id, updated_git_repositories) => {}
WorktreeDeletedEntry(worktree_id, project_entry_id) => {}
WorktreeReleased { .. } | WorktreeOrderChanged | WorktreeUpdateSent { .. } => {}
}
}
fn handle_buffer_store_event(
&mut self,
_buffer_store: Entity<BufferStore>,
event: &BufferStoreEvent,
cx: &mut Context<Self>,
) {
use BufferStoreEvent::*;
match event {
BufferAdded(buffer) => Self::register_buffer(buffer, cx),
BufferOpened { .. }
| BufferChangedFilePath { .. }
| BufferDropped { .. }
| SharedBufferClosed { .. } => {}
}
}
fn register_buffer(buffer: &Entity<Buffer>, cx: &mut Context<Self>) {
cx.subscribe(buffer, Self::handle_buffer_event);
}
fn handle_buffer_event(
&mut self,
buffer: Entity<Buffer>,
event: &BufferEvent,
_cx: &mut Context<Self>,
) {
match event {
BufferEvent::Edited => {
self.buffers_changed.insert(buffer.downgrade());
}
_ => {}
}
}
fn handle_project_event(
&mut self,
_project: Entity<Project>,
event: &project::Event,
cx: &mut Context<Self>,
) {
match event {
project::Event::ActiveEntryChanged(entry_id) => {
todo!()
}
_ => {}
}
}
}
impl ZetaRepository {
pub fn new(cx: &mut Context<Self>) -> Self {
Self {
unsent_events: Vec::new(),
pending_event: None,
last_snapshot: None,
license_watcher: LicenseDetectionWatcher::new(cx),
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -18,7 +18,7 @@ use std::process::exit;
use std::str::FromStr;
use std::sync::Arc;
use std::time::Duration;
use zeta::{GatherContextOutput, PerformPredictEditsParams, Zeta, gather_context};
use zeta::{CanCollectData, GatherContextOutput, PerformPredictEditsParams, Zeta, gather_context};
use crate::headless::ZetaCliAppState;
@@ -129,15 +129,15 @@ async fn get_context(
return Err(anyhow!("Absolute paths are not supported in --cursor"));
}
let (project, _lsp_open_handle, buffer) = if use_language_server {
let (project, lsp_open_handle, buffer) =
let (_lsp_open_handle, buffer) = if use_language_server {
let (_project, lsp_open_handle, buffer) =
open_buffer_with_language_server(&worktree_path, &cursor.path, app_state, cx).await?;
(Some(project), Some(lsp_open_handle), buffer)
(Some(lsp_open_handle), buffer)
} else {
let abs_path = worktree_path.join(&cursor.path);
let content = smol::fs::read_to_string(&abs_path).await?;
let buffer = cx.new(|cx| Buffer::local(content, cx))?;
(None, None, buffer)
(None, buffer)
};
let worktree_name = worktree_path
@@ -172,22 +172,14 @@ async fn get_context(
None => String::new(),
};
// Enable gathering extra data not currently needed for edit predictions
let can_collect_data = true;
let git_info = None;
let mut gather_context_output = cx
.update(|cx| {
gather_context(
project.as_ref(),
full_path_str,
&snapshot,
clipped_cursor,
move || events,
can_collect_data,
git_info,
cx,
)
})?
.await;
let mut gather_context_output = gather_context(
full_path_str,
snapshot,
clipped_cursor,
move || events,
CanCollectData(true),
)
.await;
// Disable data collection for these requests, as this is currently just used for evals
if let Ok(gather_context_output) = gather_context_output.as_mut() {