Compare commits

..

5 Commits

Author SHA1 Message Date
Richard Feldman
1c8300b4d2 Add test for reasoning_opaque and reasoning_text 2025-11-25 11:43:22 -05:00
Richard Feldman
a53133a733 Revise ChatMessage matching logic 2025-11-25 11:43:22 -05:00
Richard Feldman
589be2ce8e Send ChatMessageContent::empty() when necessary 2025-11-25 11:43:22 -05:00
Richard Feldman
eb3b879de4 Add reasoning_opaque and reasoning_text 2025-11-25 11:43:22 -05:00
Richard Feldman
69cb08bfd9 Add some copilot debug logging 2025-11-25 11:43:22 -05:00
83 changed files with 1143 additions and 1092 deletions

65
Cargo.lock generated
View File

@@ -11530,7 +11530,7 @@ dependencies = [
[[package]]
name = "pet"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"clap",
"env_logger 0.10.2",
@@ -11555,7 +11555,6 @@ dependencies = [
"pet-python-utils",
"pet-reporter",
"pet-telemetry",
"pet-uv",
"pet-venv",
"pet-virtualenv",
"pet-virtualenvwrapper",
@@ -11568,7 +11567,7 @@ dependencies = [
[[package]]
name = "pet-conda"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"env_logger 0.10.2",
"lazy_static",
@@ -11587,7 +11586,7 @@ dependencies = [
[[package]]
name = "pet-core"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"clap",
"lazy_static",
@@ -11602,7 +11601,7 @@ dependencies = [
[[package]]
name = "pet-env-var-path"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"lazy_static",
"log",
@@ -11618,7 +11617,7 @@ dependencies = [
[[package]]
name = "pet-fs"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"log",
"msvc_spectre_libs",
@@ -11627,7 +11626,7 @@ dependencies = [
[[package]]
name = "pet-global-virtualenvs"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"log",
"msvc_spectre_libs",
@@ -11640,7 +11639,7 @@ dependencies = [
[[package]]
name = "pet-homebrew"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"lazy_static",
"log",
@@ -11658,7 +11657,7 @@ dependencies = [
[[package]]
name = "pet-jsonrpc"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"env_logger 0.10.2",
"log",
@@ -11671,7 +11670,7 @@ dependencies = [
[[package]]
name = "pet-linux-global-python"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"log",
"msvc_spectre_libs",
@@ -11684,7 +11683,7 @@ dependencies = [
[[package]]
name = "pet-mac-commandlinetools"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"log",
"msvc_spectre_libs",
@@ -11697,7 +11696,7 @@ dependencies = [
[[package]]
name = "pet-mac-python-org"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"log",
"msvc_spectre_libs",
@@ -11710,7 +11709,7 @@ dependencies = [
[[package]]
name = "pet-mac-xcode"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"log",
"msvc_spectre_libs",
@@ -11723,7 +11722,7 @@ dependencies = [
[[package]]
name = "pet-pipenv"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"log",
"msvc_spectre_libs",
@@ -11736,7 +11735,7 @@ dependencies = [
[[package]]
name = "pet-pixi"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"log",
"msvc_spectre_libs",
@@ -11748,7 +11747,7 @@ dependencies = [
[[package]]
name = "pet-poetry"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"base64 0.22.1",
"lazy_static",
@@ -11769,7 +11768,7 @@ dependencies = [
[[package]]
name = "pet-pyenv"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"lazy_static",
"log",
@@ -11787,7 +11786,7 @@ dependencies = [
[[package]]
name = "pet-python-utils"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"env_logger 0.10.2",
"lazy_static",
@@ -11804,7 +11803,7 @@ dependencies = [
[[package]]
name = "pet-reporter"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"env_logger 0.10.2",
"log",
@@ -11818,7 +11817,7 @@ dependencies = [
[[package]]
name = "pet-telemetry"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"env_logger 0.10.2",
"lazy_static",
@@ -11830,22 +11829,10 @@ dependencies = [
"regex",
]
[[package]]
name = "pet-uv"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
dependencies = [
"log",
"pet-core",
"pet-python-utils",
"serde",
"toml 0.9.8",
]
[[package]]
name = "pet-venv"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"log",
"msvc_spectre_libs",
@@ -11857,7 +11844,7 @@ dependencies = [
[[package]]
name = "pet-virtualenv"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"log",
"msvc_spectre_libs",
@@ -11869,7 +11856,7 @@ dependencies = [
[[package]]
name = "pet-virtualenvwrapper"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"log",
"msvc_spectre_libs",
@@ -11882,7 +11869,7 @@ dependencies = [
[[package]]
name = "pet-windows-registry"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"lazy_static",
"log",
@@ -11900,7 +11887,7 @@ dependencies = [
[[package]]
name = "pet-windows-store"
version = "0.1.0"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da#1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da"
source = "git+https://github.com/microsoft/python-environment-tools.git?rev=e97b9508befa0062929da65a01054d25c4be861c#e97b9508befa0062929da65a01054d25c4be861c"
dependencies = [
"lazy_static",
"log",
@@ -17326,8 +17313,8 @@ dependencies = [
[[package]]
name = "tiktoken-rs"
version = "0.9.1"
source = "git+https://github.com/zed-industries/tiktoken-rs?rev=7249f999c5fdf9bf3cc5c288c964454e4dac0c00#7249f999c5fdf9bf3cc5c288c964454e4dac0c00"
version = "0.8.0"
source = "git+https://github.com/zed-industries/tiktoken-rs?rev=30c32a4522751699adeda0d5840c71c3b75ae73d#30c32a4522751699adeda0d5840c71c3b75ae73d"
dependencies = [
"anyhow",
"base64 0.22.1",

View File

@@ -583,14 +583,14 @@ partial-json-fixer = "0.5.3"
parse_int = "0.9"
pciid-parser = "0.8.0"
pathdiff = "0.2"
pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
pet-fs = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
pet-pixi = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
pet-virtualenv = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "1e86914c3ce2f3a08c0cedbcb0615a7f9fa7a5da" }
pet = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" }
pet-conda = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" }
pet-core = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" }
pet-fs = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" }
pet-pixi = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" }
pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" }
pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" }
pet-virtualenv = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "e97b9508befa0062929da65a01054d25c4be861c" }
portable-pty = "0.9.0"
postage = { version = "0.5", features = ["futures-traits"] }
pretty_assertions = { version = "1.3.0", features = ["unstable"] }
@@ -655,7 +655,7 @@ sysinfo = "0.37.0"
take-until = "0.2.0"
tempfile = "3.20.0"
thiserror = "2.0.12"
tiktoken-rs = { git = "https://github.com/zed-industries/tiktoken-rs", rev = "7249f999c5fdf9bf3cc5c288c964454e4dac0c00" }
tiktoken-rs = { git = "https://github.com/zed-industries/tiktoken-rs", rev = "30c32a4522751699adeda0d5840c71c3b75ae73d" }
time = { version = "0.3", features = [
"macros",
"parsing",

View File

@@ -96,9 +96,9 @@
"terminal.ansi.bright_white": "#fafafaff",
"terminal.ansi.dim_white": "#575d65ff",
"link_text.hover": "#74ade8ff",
"version_control.added": "#2EA048ff",
"version_control.added": "#27a657ff",
"version_control.modified": "#d3b020ff",
"version_control.deleted": "#78081Bff",
"version_control.deleted": "#e06c76ff",
"version_control.conflict_marker.ours": "#a1c1811a",
"version_control.conflict_marker.theirs": "#74ade81a",
"conflict": "#dec184ff",
@@ -497,9 +497,9 @@
"terminal.ansi.bright_white": "#ffffffff",
"terminal.ansi.dim_white": "#aaaaaaff",
"link_text.hover": "#5c78e2ff",
"version_control.added": "#2EA048ff",
"version_control.added": "#27a657ff",
"version_control.modified": "#d3b020ff",
"version_control.deleted": "#F85149ff",
"version_control.deleted": "#e06c76ff",
"conflict": "#a48819ff",
"conflict.background": "#faf2e6ff",
"conflict.border": "#f4e7d1ff",

View File

@@ -46,7 +46,7 @@ struct WorktreeDelegateAdapter(Arc<dyn LspAdapterDelegate>);
#[async_trait]
impl WorktreeDelegate for WorktreeDelegateAdapter {
fn id(&self) -> u64 {
self.0.project_worktree().worktree_id.to_proto()
self.0.worktree_id().to_proto()
}
fn root_path(&self) -> String {

View File

@@ -10,7 +10,7 @@ use http_client::{self, AsyncBody, HttpClient, HttpClientWithUrl, Method, Reques
use parking_lot::Mutex;
use regex::Regex;
use release_channel::ReleaseChannel;
use settings::{Settings, SettingsStore, WorktreeId};
use settings::{Settings, SettingsStore};
use sha2::{Digest, Sha256};
use std::collections::HashSet;
use std::fs::File;
@@ -20,7 +20,7 @@ use std::time::Instant;
use std::{env, mem, path::PathBuf, sync::Arc, time::Duration};
use telemetry_events::{AssistantEventData, AssistantPhase, Event, EventRequestBody, EventWrapper};
use util::TryFutureExt;
use worktree::UpdatedEntriesSet;
use worktree::{UpdatedEntriesSet, WorktreeId};
use self::event_coalescer::EventCoalescer;
@@ -604,11 +604,10 @@ mod tests {
use clock::FakeSystemClock;
use gpui::TestAppContext;
use http_client::FakeHttpClient;
use settings::WorktreeId;
use std::collections::HashMap;
use telemetry_events::FlexibleEvent;
use util::rel_path::RelPath;
use worktree::{PathChange, ProjectEntryId};
use worktree::{PathChange, ProjectEntryId, WorktreeId};
#[gpui::test]
fn test_telemetry_flush_on_max_queue_size(cx: &mut TestAppContext) {

View File

@@ -1441,8 +1441,8 @@ mod tests {
unimplemented!()
}
fn project_worktree(self, _: &App) -> settings::ProjectWorktree {
settings::ProjectWorktree::from_u64(0)
fn worktree_id(&self, _: &App) -> settings::WorktreeId {
settings::WorktreeId::from_usize(0)
}
fn is_private(&self) -> bool {

View File

@@ -823,6 +823,10 @@ async fn stream_completion(
let is_streaming = request.stream;
let json = serde_json::to_string(&request)?;
eprintln!(
"Copilot chat completion request to {}: {}",
completion_url, json
);
let request = request_builder.body(AsyncBody::from(json))?;
let mut response = client.send(request).await?;
@@ -830,6 +834,11 @@ async fn stream_completion(
let mut body = Vec::new();
response.body_mut().read_to_end(&mut body).await?;
let body_str = std::str::from_utf8(&body)?;
eprintln!(
"Copilot chat completion HTTP error: status={}, response_body={}",
response.status(),
body_str
);
anyhow::bail!(
"Failed to connect to API: {} {}",
response.status(),
@@ -837,6 +846,11 @@ async fn stream_completion(
);
}
eprintln!(
"Copilot chat completion response status: {}",
response.status()
);
if is_streaming {
let reader = BufReader::new(response.into_body());
Ok(reader
@@ -844,6 +858,7 @@ async fn stream_completion(
.filter_map(|line| async move {
match line {
Ok(line) => {
eprintln!("Copilot chat completion stream line: {}", line);
let line = line.strip_prefix("data: ")?;
if line.starts_with("[DONE]") {
return None;
@@ -857,7 +872,14 @@ async fn stream_completion(
Some(Ok(response))
}
}
Err(error) => Some(Err(anyhow!(error))),
Err(error) => {
eprintln!(
"Failed to parse Copilot chat completion stream event: {}\nLine: {}",
error,
line
);
Some(Err(anyhow!(error)))
}
}
}
Err(error) => Some(Err(anyhow!(error))),
@@ -868,6 +890,10 @@ async fn stream_completion(
let mut body = Vec::new();
response.body_mut().read_to_end(&mut body).await?;
let body_str = std::str::from_utf8(&body)?;
eprintln!(
"Copilot chat completion non-streaming response body: {}",
body_str
);
let response: ResponseEvent = serde_json::from_str(body_str)?;
Ok(futures::stream::once(async move { Ok(response) }).boxed())

View File

@@ -314,15 +314,23 @@ pub async fn stream_response(
let is_streaming = request.stream;
let json = serde_json::to_string(&request)?;
eprintln!("Copilot responses request to {}: {}", api_url, json);
let request = request_builder.body(AsyncBody::from(json))?;
let mut response = client.send(request).await?;
if !response.status().is_success() {
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
eprintln!(
"Copilot responses HTTP error: status={}, response_body={}",
response.status(),
body
);
anyhow::bail!("Failed to connect to API: {} {}", response.status(), body);
}
eprintln!("Copilot responses response status: {}", response.status());
if is_streaming {
let reader = BufReader::new(response.into_body());
Ok(reader
@@ -330,6 +338,7 @@ pub async fn stream_response(
.filter_map(|line| async move {
match line {
Ok(line) => {
eprintln!("Copilot responses stream line: {}", line);
let line = line.strip_prefix("data: ")?;
if line.starts_with("[DONE]") || line.is_empty() {
return None;
@@ -356,6 +365,7 @@ pub async fn stream_response(
// Removes the need of having a method to map StreamEvent and another to map Response to a LanguageCompletionEvent
let mut body = String::new();
response.body_mut().read_to_string(&mut body).await?;
eprintln!("Copilot responses non-streaming response body: {}", body);
match serde_json::from_str::<Response>(&body) {
Ok(response) => {

View File

@@ -60,7 +60,7 @@ mod test_mocks {
#[async_trait::async_trait]
impl adapters::DapDelegate for MockDelegate {
fn worktree_id(&self) -> settings::WorktreeId {
WorktreeId(0)
settings::WorktreeId::from_usize(0)
}
fn worktree_root_path(&self) -> &std::path::Path {

View File

@@ -868,7 +868,7 @@ impl DebugAdapter for PythonDebugAdapter {
if let Some(found_toolchain) = delegate
.toolchain_store()
.active_toolchain(
delegate.worktree_id()
delegate.worktree_id(),
base_path.into_arc(),
language::LanguageName::new(Self::LANGUAGE_NAME),
cx,

View File

@@ -52,7 +52,7 @@ struct WorktreeDelegateAdapter(pub Arc<dyn DapDelegate>);
#[async_trait]
impl WorktreeDelegate for WorktreeDelegateAdapter {
fn id(&self) -> u64 {
self.0.worktree_id().0 as u64
self.0.worktree_id().to_proto()
}
fn root_path(&self) -> String {

View File

@@ -195,7 +195,7 @@ impl DebugPanel {
active_buffer
.as_ref()
.and_then(|buffer| buffer.read(cx).file())
.map(|f| f.project_worktree(cx))
.map(|f| f.worktree_id(cx))
});
let Some(worktree) = worktree

View File

@@ -239,89 +239,6 @@ async fn test_fuzzy_over_sort_positions(cx: &mut TestAppContext) {
assert_eq!(matches[2].string, "fetch_code_lens");
}
#[gpui::test]
async fn test_semver_label_sort_by_latest_version(cx: &mut TestAppContext) {
let mut versions = [
"10.4.112",
"10.4.22",
"10.4.2",
"10.4.20",
"10.4.21",
"10.4.12",
// Pre-release versions
"10.4.22-alpha",
"10.4.22-beta.1",
"10.4.22-rc.1",
// Build metadata versions
"10.4.21+build.123",
"10.4.20+20210327",
];
versions.sort_by(|a, b| {
match (
semver::Version::parse(a).ok(),
semver::Version::parse(b).ok(),
) {
(Some(a_ver), Some(b_ver)) => b_ver.cmp(&a_ver),
_ => std::cmp::Ordering::Equal,
}
});
let completions: Vec<_> = versions
.iter()
.enumerate()
.map(|(i, version)| {
// This sort text would come from the LSP
let sort_text = format!("{:08}", i);
CompletionBuilder::new(version, None, &sort_text, None)
})
.collect();
// Case 1: User types just the major and minor version
let matches =
filter_and_sort_matches("10.4.", &completions, SnippetSortOrder::default(), cx).await;
// Versions are ordered by recency (latest first)
let expected_versions = [
"10.4.112",
"10.4.22",
"10.4.22-rc.1",
"10.4.22-beta.1",
"10.4.22-alpha",
"10.4.21+build.123",
"10.4.21",
"10.4.20+20210327",
"10.4.20",
"10.4.12",
"10.4.2",
];
for (match_item, expected) in matches.iter().zip(expected_versions.iter()) {
assert_eq!(match_item.string.as_ref() as &str, *expected);
}
// Case 2: User types the major, minor, and patch version
let matches =
filter_and_sort_matches("10.4.2", &completions, SnippetSortOrder::default(), cx).await;
let expected_versions = [
// Exact match comes first
"10.4.2",
// Ordered by recency with exact major, minor, and patch versions
"10.4.22",
"10.4.22-rc.1",
"10.4.22-beta.1",
"10.4.22-alpha",
"10.4.21+build.123",
"10.4.21",
"10.4.20+20210327",
"10.4.20",
// Versions with non-exact patch versions are ordered by fuzzy score
// Higher fuzzy score than 112 patch version since "2" appears before "1"
// in "12", making it rank higher than "112"
"10.4.12",
"10.4.112",
];
for (match_item, expected) in matches.iter().zip(expected_versions.iter()) {
assert_eq!(match_item.string.as_ref() as &str, *expected);
}
}
async fn test_for_each_prefix<F>(
target: &str,
completions: &Vec<Completion>,
@@ -342,55 +259,30 @@ struct CompletionBuilder;
impl CompletionBuilder {
fn constant(label: &str, filter_text: Option<&str>, sort_text: &str) -> Completion {
Self::new(
label,
filter_text,
sort_text,
Some(CompletionItemKind::CONSTANT),
)
Self::new(label, filter_text, sort_text, CompletionItemKind::CONSTANT)
}
fn function(label: &str, filter_text: Option<&str>, sort_text: &str) -> Completion {
Self::new(
label,
filter_text,
sort_text,
Some(CompletionItemKind::FUNCTION),
)
Self::new(label, filter_text, sort_text, CompletionItemKind::FUNCTION)
}
fn method(label: &str, filter_text: Option<&str>, sort_text: &str) -> Completion {
Self::new(
label,
filter_text,
sort_text,
Some(CompletionItemKind::METHOD),
)
Self::new(label, filter_text, sort_text, CompletionItemKind::METHOD)
}
fn variable(label: &str, filter_text: Option<&str>, sort_text: &str) -> Completion {
Self::new(
label,
filter_text,
sort_text,
Some(CompletionItemKind::VARIABLE),
)
Self::new(label, filter_text, sort_text, CompletionItemKind::VARIABLE)
}
fn snippet(label: &str, filter_text: Option<&str>, sort_text: &str) -> Completion {
Self::new(
label,
filter_text,
sort_text,
Some(CompletionItemKind::SNIPPET),
)
Self::new(label, filter_text, sort_text, CompletionItemKind::SNIPPET)
}
fn new(
label: &str,
filter_text: Option<&str>,
sort_text: &str,
kind: Option<CompletionItemKind>,
kind: CompletionItemKind,
) -> Completion {
Completion {
replace_range: Anchor::MIN..Anchor::MAX,
@@ -402,7 +294,7 @@ impl CompletionBuilder {
server_id: LanguageServerId(0),
lsp_completion: Box::new(CompletionItem {
label: label.to_string(),
kind: kind,
kind: Some(kind),
sort_text: Some(sort_text.to_string()),
filter_text: filter_text.map(|text| text.to_string()),
..Default::default()

View File

@@ -16033,7 +16033,7 @@ impl Editor {
let (worktree_id, file) = project
.buffer_for_id(runnable.buffer, cx)
.and_then(|buffer| buffer.read(cx).file())
.map(|file| (file.project_worktree(cx), file.clone()))
.map(|file| (file.worktree_id(cx), file.clone()))
.unzip();
(
@@ -21189,7 +21189,7 @@ impl Editor {
file.is_private()
&& EditorSettings::get(
Some(SettingsLocation {
worktree: file.project_worktree(cx),
worktree_id: file.worktree_id(cx),
path: file.path().as_ref(),
}),
cx,

View File

@@ -1256,7 +1256,7 @@ impl EditorElement {
let Some(abs_path) = project.read(cx).absolute_path(
&ProjectPath {
path: file.path().clone(),
worktree_id: file.project_worktree(cx),
worktree_id: file.worktree_id(cx),
},
cx,
) else {
@@ -4157,7 +4157,7 @@ impl EditorElement {
if let Some(file) = file
&& let Some(project) = editor.read(cx).project()
&& let Some(worktree) =
project.read(cx).worktree_for_id(file.project_worktree(cx), cx)
project.read(cx).worktree_for_id(file.worktree_id(cx), cx)
{
let path_style = file.path_style(cx);
let worktree = worktree.read(cx);

View File

@@ -1281,7 +1281,7 @@ impl SerializableItem for Editor {
let buffer = self.buffer().read(cx).as_singleton()?;
let abs_path = buffer.read(cx).file().and_then(|file| {
let worktree_id = file.project_worktree(cx);
let worktree_id = file.worktree_id(cx);
project
.read(cx)
.worktree_for_id(worktree_id, cx)

View File

@@ -68,7 +68,7 @@ async fn lsp_task_context(
let worktree_abs_path = cx
.update(|cx| {
let worktree_id = buffer.read(cx).file().map(|f| f.project_worktree(cx));
let worktree_id = buffer.read(cx).file().map(|f| f.worktree_id(cx));
worktree_id
.and_then(|worktree_id| worktree_store.read(cx).worktree_for_id(worktree_id, cx))

View File

@@ -1,6 +1,6 @@
use crate::wasm_host::{WasmState, wit::ToWasmtimeResult};
use ::http_client::{AsyncBody, HttpRequestExt};
use ::settings::{Settings, ProjectWorktree};
use ::settings::{Settings, WorktreeId};
use anyhow::{Context as _, Result, bail};
use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive;
@@ -430,7 +430,7 @@ impl ExtensionImports for WasmState {
.as_ref()
.zip(location.as_ref())
.map(|(path, location)| ::settings::SettingsLocation {
worktree: ProjectWorktree::from_proto(location.worktree_id),
worktree_id: WorktreeId::from_proto(location.worktree_id),
path,
});

View File

@@ -8,7 +8,7 @@ use crate::wasm_host::wit::since_v0_6_0::{
use crate::wasm_host::wit::{CompletionKind, CompletionLabelDetails, InsertTextFormat, SymbolKind};
use crate::wasm_host::{WasmState, wit::ToWasmtimeResult};
use ::http_client::{AsyncBody, HttpRequestExt};
use ::settings::{Settings, ProjectWorktree};
use ::settings::{Settings, WorktreeId};
use anyhow::{Context as _, Result, bail};
use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive;
@@ -923,7 +923,7 @@ impl ExtensionImports for WasmState {
.as_ref()
.zip(location.as_ref())
.map(|(path, location)| ::settings::SettingsLocation {
worktree: ProjectWorktree::from_proto(location.worktree_id),
worktree_id: WorktreeId::from_proto(location.worktree_id),
path,
});

View File

@@ -284,7 +284,7 @@ impl language::File for GitBlob {
self.path.file_name().unwrap()
}
fn project_worktree(&self, _: &App) -> WorktreeId {
fn worktree_id(&self, _: &App) -> WorktreeId {
self.worktree_id
}
@@ -322,7 +322,7 @@ impl language::File for CommitMetadataFile {
self.title.file_name().unwrap()
}
fn project_worktree(&self, _: &App) -> WorktreeId {
fn worktree_id(&self, _: &App) -> WorktreeId {
self.worktree_id
}

View File

@@ -3832,7 +3832,7 @@ impl GitPanel {
cx: &App,
) -> Option<AnyElement> {
let repo = self.active_repository.as_ref()?.read(cx);
let project_path = (file.project_worktree(cx), file.path().clone()).into();
let project_path = (file.worktree_id(cx), file.path().clone()).into();
let repo_path = repo.project_path_to_repo_path(&project_path, cx)?;
let ix = self.entry_by_path(&repo_path, cx)?;
let entry = self.entries.get(ix)?;

View File

@@ -349,7 +349,7 @@ impl ProjectDiff {
let file = buffer.read(cx).file()?;
Some(ProjectPath {
worktree_id: file.project_worktree(cx),
worktree_id: file.worktree_id(cx),
path: file.path().clone(),
})
}

View File

@@ -37,7 +37,7 @@ use lsp::{LanguageServerId, NumberOrString};
use parking_lot::{Mutex, RawMutex, lock_api::MutexGuard};
use serde::{Deserialize, Serialize};
use serde_json::Value;
use settings::{ProjectWorktree, WorktreeId};
use settings::WorktreeId;
use smallvec::SmallVec;
use smol::future::yield_now;
use std::{
@@ -393,12 +393,7 @@ pub trait File: Send + Sync + Any {
/// Returns the id of the worktree to which this file belongs.
///
/// This is needed for looking up project-specific settings.
fn project_worktree(&self, cx: &App) -> ProjectWorktree;
/// worktree_id
fn worktree_id(&self, cx: &App) -> WorktreeId {
self.project_worktree(cx).worktree_id
}
fn worktree_id(&self, cx: &App) -> WorktreeId;
/// Converts this file into a protobuf message.
fn to_proto(&self, cx: &App) -> rpc::proto::File;
@@ -5427,8 +5422,8 @@ impl File for TestFile {
self.path().file_name().unwrap_or(self.root_name.as_ref())
}
fn project_worktree(&self, _: &App) -> ProjectWorktree {
ProjectWorktree::from_u64(0)
fn worktree_id(&self, _: &App) -> WorktreeId {
WorktreeId::from_usize(0)
}
fn to_proto(&self, _: &App) -> rpc::proto::File {

View File

@@ -43,7 +43,7 @@ use regex::Regex;
use schemars::{JsonSchema, SchemaGenerator, json_schema};
use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
use serde_json::Value;
use settings::ProjectWorktree;
use settings::WorktreeId;
use smol::future::FutureExt as _;
use std::num::NonZeroU32;
use std::{
@@ -294,7 +294,7 @@ impl CachedLspAdapter {
pub trait LspAdapterDelegate: Send + Sync {
fn show_notification(&self, message: &str, cx: &mut App);
fn http_client(&self) -> Arc<dyn HttpClient>;
fn project_worktree(&self) -> ProjectWorktree;
fn worktree_id(&self) -> WorktreeId;
fn worktree_root_path(&self) -> &Path;
fn resolve_executable_path(&self, path: PathBuf) -> PathBuf;
fn update_status(&self, language: LanguageServerName, status: BinaryStatus);

View File

@@ -26,7 +26,7 @@ pub fn language_settings<'a>(
cx: &'a App,
) -> Cow<'a, LanguageSettings> {
let location = file.map(|f| SettingsLocation {
worktree: f.project_worktree(cx),
worktree_id: f.worktree_id(cx),
path: f.path().as_ref(),
});
AllLanguageSettings::get(location, cx).language(location, language.as_ref(), cx)
@@ -38,7 +38,7 @@ pub fn all_language_settings<'a>(
cx: &'a App,
) -> &'a AllLanguageSettings {
let location = file.map(|f| SettingsLocation {
worktree: f.project_worktree(cx),
worktree_id: f.worktree_id(cx),
path: f.path().as_ref(),
});
AllLanguageSettings::get(location, cx)
@@ -435,7 +435,7 @@ impl AllLanguageSettings {
let editorconfig_properties = location.and_then(|location| {
cx.global::<SettingsStore>()
.editorconfig_properties(location.worktree, location.path)
.editorconfig_properties(location.worktree_id, location.path)
});
if let Some(editorconfig_properties) = editorconfig_properties {
let mut settings = settings.clone();

View File

@@ -1,7 +1,7 @@
use std::{borrow::Borrow, sync::Arc};
use gpui::SharedString;
use settings::ProjectWorktree;
use settings::WorktreeId;
use util::rel_path::RelPath;
#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
@@ -54,6 +54,6 @@ pub trait ManifestProvider {
}
pub trait ManifestDelegate: Send + Sync {
fn worktree_id(&self) -> ProjectWorktree;
fn worktree_id(&self) -> WorktreeId;
fn exists(&self, path: &RelPath, is_dir: Option<bool>) -> bool;
}

View File

@@ -28,7 +28,7 @@ struct WorktreeDelegateAdapter(pub Arc<dyn LspAdapterDelegate>);
#[async_trait]
impl WorktreeDelegate for WorktreeDelegateAdapter {
fn id(&self) -> u64 {
self.0.project_worktree().worktree_id.to_proto()
self.0.worktree_id().to_proto()
}
fn root_path(&self) -> String {

View File

@@ -457,9 +457,7 @@ pub fn map_to_language_model_completion_events(
)));
}
Some("tool_calls") => {
// Gemini 3 models send reasoning_opaque/reasoning_text that must
// be preserved and sent back in subsequent requests. Emit as
// ReasoningDetails so the agent stores it in the message.
// Emit reasoning details if we have them (e.g. for Gemini 3)
if state.reasoning_opaque.is_some()
|| state.reasoning_text.is_some()
{

View File

@@ -277,7 +277,6 @@ impl LanguageModel for OpenAiLanguageModel {
| Model::Five
| Model::FiveMini
| Model::FiveNano
| Model::FivePointOne
| Model::O1
| Model::O3
| Model::O4Mini => true,
@@ -645,6 +644,7 @@ pub fn count_open_ai_tokens(
) -> BoxFuture<'static, Result<u64>> {
cx.background_spawn(async move {
let messages = collect_tiktoken_messages(request);
match model {
Model::Custom { max_tokens, .. } => {
let model = if max_tokens >= 100_000 {
@@ -672,11 +672,11 @@ pub fn count_open_ai_tokens(
| Model::O1
| Model::O3
| Model::O3Mini
| Model::O4Mini
| Model::Five
| Model::FiveMini
| Model::FiveNano => tiktoken_rs::num_tokens_from_messages(model.id(), &messages), // GPT-5.1 doesn't have tiktoken support yet; fall back on gpt-4o tokenizer
Model::FivePointOne => tiktoken_rs::num_tokens_from_messages("gpt-5", &messages),
| Model::O4Mini => tiktoken_rs::num_tokens_from_messages(model.id(), &messages),
// GPT-5 models don't have tiktoken support yet; fall back on gpt-4o tokenizer
Model::Five | Model::FiveMini | Model::FiveNano => {
tiktoken_rs::num_tokens_from_messages("gpt-4o", &messages)
}
}
.map(|tokens| tokens as u64)
})

View File

@@ -340,11 +340,11 @@ impl LspLogView {
* Configuration: {CONFIGURATION}",
NAME = info.status.name,
ID = info.id,
BINARY = info
.status
.binary
.as_ref()
.map_or_else(|| "Unknown".to_string(), |binary| format!("{:#?}", binary)),
BINARY = info.status.binary.as_ref().map_or_else(
|| "Unknown".to_string(),
|binary| serde_json::to_string_pretty(binary)
.unwrap_or_else(|e| format!("Failed to serialize binary info: {e:#}"))
),
WORKSPACE_FOLDERS = info
.status
.workspace_folders

View File

@@ -11,6 +11,5 @@ brackets = [
tab_size = 2
prettier_parser_name = "json"
debuggers = ["JavaScript"]
[overrides.string]
completion_query_characters = [":", " ", "."]
completion_query_characters = [":", " "]

View File

@@ -722,7 +722,7 @@ impl ContextProvider for PythonContextProvider {
let module_target = self.build_module_target(variables);
let location_file = location.file_location.buffer.read(cx).file().cloned();
let worktree_id = location_file.as_ref().map(|f| f.project_worktree(cx));
let worktree_id = location_file.as_ref().map(|f| f.worktree_id(cx));
cx.spawn(async move |cx| {
let active_toolchain = if let Some(worktree_id) = worktree_id {
@@ -991,8 +991,6 @@ fn python_env_kind_display(k: &PythonEnvironmentKind) -> &'static str {
PythonEnvironmentKind::VirtualEnvWrapper => "virtualenvwrapper",
PythonEnvironmentKind::WindowsStore => "global (Windows Store)",
PythonEnvironmentKind::WindowsRegistry => "global (Windows Registry)",
PythonEnvironmentKind::Uv => "uv",
PythonEnvironmentKind::UvWorkspace => "uv (Workspace)",
}
}
@@ -1000,8 +998,6 @@ pub(crate) struct PythonToolchainProvider;
static ENV_PRIORITY_LIST: &[PythonEnvironmentKind] = &[
// Prioritize non-Conda environments.
PythonEnvironmentKind::UvWorkspace,
PythonEnvironmentKind::Uv,
PythonEnvironmentKind::Poetry,
PythonEnvironmentKind::Pipenv,
PythonEnvironmentKind::VirtualEnvWrapper,

View File

@@ -138,7 +138,7 @@ impl LspAdapter for YamlLspAdapter {
cx: &mut AsyncApp,
) -> Result<Value> {
let location = SettingsLocation {
worktree: delegate.project_worktree(),
worktree_id: delegate.worktree_id(),
path: RelPath::empty(),
};

View File

@@ -1,6 +1,6 @@
name = "YAML"
grammar = "yaml"
path_suffixes = ["yml", "yaml", "pixi.lock", "clang-format"]
path_suffixes = ["yml", "yaml", "pixi.lock"]
line_comments = ["# "]
autoclose_before = ",]}"
brackets = [

View File

@@ -29,7 +29,7 @@ impl PathKey {
pub fn for_buffer(buffer: &Entity<Buffer>, cx: &App) -> Self {
if let Some(file) = buffer.read(cx).file() {
Self::with_sort_prefix(file.worktree_id(cx).0 as u64, file.path().clone())
Self::with_sort_prefix(file.worktree_id(cx).to_proto(), file.path().clone())
} else {
Self {
sort_prefix: None,

View File

@@ -85,8 +85,7 @@ pub enum Model {
FiveMini,
#[serde(rename = "gpt-5-nano")]
FiveNano,
#[serde(rename = "gpt-5.1")]
FivePointOne,
#[serde(rename = "custom")]
Custom {
name: String,
@@ -122,7 +121,6 @@ impl Model {
"gpt-5" => Ok(Self::Five),
"gpt-5-mini" => Ok(Self::FiveMini),
"gpt-5-nano" => Ok(Self::FiveNano),
"gpt-5.1" => Ok(Self::FivePointOne),
invalid_id => anyhow::bail!("invalid model id '{invalid_id}'"),
}
}
@@ -144,7 +142,6 @@ impl Model {
Self::Five => "gpt-5",
Self::FiveMini => "gpt-5-mini",
Self::FiveNano => "gpt-5-nano",
Self::FivePointOne => "gpt-5.1",
Self::Custom { name, .. } => name,
}
}
@@ -166,7 +163,6 @@ impl Model {
Self::Five => "gpt-5",
Self::FiveMini => "gpt-5-mini",
Self::FiveNano => "gpt-5-nano",
Self::FivePointOne => "gpt-5.1",
Self::Custom {
name, display_name, ..
} => display_name.as_ref().unwrap_or(name),
@@ -190,7 +186,6 @@ impl Model {
Self::Five => 272_000,
Self::FiveMini => 272_000,
Self::FiveNano => 272_000,
Self::FivePointOne => 400_000,
Self::Custom { max_tokens, .. } => *max_tokens,
}
}
@@ -215,7 +210,6 @@ impl Model {
Self::Five => Some(128_000),
Self::FiveMini => Some(128_000),
Self::FiveNano => Some(128_000),
Self::FivePointOne => Some(128_000),
}
}
@@ -243,7 +237,6 @@ impl Model {
| Self::FourPointOneNano
| Self::Five
| Self::FiveMini
| Self::FivePointOne
| Self::FiveNano => true,
Self::O1 | Self::O3 | Self::O3Mini | Self::O4Mini | Model::Custom { .. } => false,
}

View File

@@ -2054,7 +2054,7 @@ mod extension_agent_tests {
async fn archive_agent_uses_extension_and_agent_id_for_cache_key(cx: &mut TestAppContext) {
let fs = fs::FakeFs::new(cx.background_executor.clone());
let http_client = http_client::FakeHttpClient::with_404_response();
let worktree_store = cx.new(|_| WorktreeStore::local(0, false, fs.clone()));
let worktree_store = cx.new(|_| WorktreeStore::local(false, fs.clone()));
let project_environment = cx.new(|cx| {
crate::ProjectEnvironment::new(None, worktree_store.downgrade(), None, false, cx)
});
@@ -2135,7 +2135,7 @@ mod extension_agent_tests {
let fs = fs::FakeFs::new(cx.background_executor.clone());
let http_client = http_client::FakeHttpClient::with_404_response();
let node_runtime = NodeRuntime::unavailable();
let worktree_store = cx.new(|_| WorktreeStore::local(0, false, fs.clone()));
let worktree_store = cx.new(|_| WorktreeStore::local(false, fs.clone()));
let project_environment = cx.new(|cx| {
crate::ProjectEnvironment::new(None, worktree_store.downgrade(), None, false, cx)
});
@@ -2178,7 +2178,7 @@ mod extension_agent_tests {
let fs = fs::FakeFs::new(cx.background_executor.clone());
let http_client = http_client::FakeHttpClient::with_404_response();
let node_runtime = NodeRuntime::unavailable();
let worktree_store = cx.new(|_| WorktreeStore::local(0, false, fs.clone()));
let worktree_store = cx.new(|_| WorktreeStore::local(false, fs.clone()));
let project_environment = cx.new(|cx| {
crate::ProjectEnvironment::new(None, worktree_store.downgrade(), None, false, cx)
});

View File

@@ -21,13 +21,11 @@ use rpc::{
AnyProtoClient, ErrorCode, ErrorExt as _, TypedEnvelope,
proto::{self},
};
use settings::WorktreeId;
use std::{io, sync::Arc, time::Instant};
use text::{BufferId, ReplicaId};
use util::{ResultExt as _, TryFutureExt, debug_panic, maybe, paths::PathStyle, rel_path::RelPath};
use worktree::{File, PathChange, ProjectEntryId, Worktree};
use worktree::{File, PathChange, ProjectEntryId, Worktree, WorktreeId};
/// A set of open buffers.
pub struct BufferStore {
@@ -169,7 +167,7 @@ impl RemoteBufferStore {
let buffer_result = maybe!({
let mut buffer_file = None;
if let Some(file) = state.file.take() {
let worktree_id = WorktreeId::from_proto(file.worktree_id);
let worktree_id = worktree::WorktreeId::from_proto(file.worktree_id);
let worktree = self
.worktree_store
.read(cx)

View File

@@ -199,12 +199,12 @@ impl ContextServerStore {
)
}
/// Returns all configured context server ids, excluding the ones that are disabled
/// Returns all configured context server ids, regardless of enabled state.
pub fn configured_server_ids(&self) -> Vec<ContextServerId> {
self.context_server_settings
.iter()
.filter(|(_, settings)| settings.enabled())
.map(|(id, _)| ContextServerId(id.clone()))
.keys()
.cloned()
.map(ContextServerId)
.collect()
}
@@ -529,7 +529,7 @@ impl ContextServerStore {
.visible_worktrees(cx)
.next()
.map(|worktree| settings::SettingsLocation {
worktree: worktree.read(cx).project_worktree(),
worktree_id: worktree.read(cx).id(),
path: RelPath::empty(),
});
&ProjectSettings::get(location, cx).context_servers

View File

@@ -32,7 +32,7 @@ use gpui::{App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedStrin
use http_client::HttpClient;
use language::{Buffer, LanguageToolchainStore};
use node_runtime::NodeRuntime;
use settings::{InlayHintKind, WorktreeId};
use settings::InlayHintKind;
use remote::RemoteClient;
use rpc::{
@@ -40,7 +40,7 @@ use rpc::{
proto::{self},
};
use serde::{Deserialize, Serialize};
use settings::{ProjectWorktree, Settings, SettingsLocation};
use settings::{Settings, SettingsLocation, WorktreeId};
use std::{
borrow::Borrow,
collections::BTreeMap,
@@ -253,7 +253,7 @@ impl DapStore {
};
let settings_location = SettingsLocation {
worktree: worktree.read(cx).project_worktree(),
worktree_id: worktree.read(cx).id(),
path: RelPath::empty(),
};
let dap_settings = ProjectSettings::get(Some(settings_location), cx)
@@ -855,10 +855,9 @@ impl DapStore {
let worktree = this
.update(&mut cx, |this, cx| {
this.worktree_store.read(cx).worktree_for_id(
WorktreeId::from_proto(envelope.payload.worktree_id),
cx,
)
this.worktree_store
.read(cx)
.worktree_for_id(WorktreeId::from_proto(envelope.payload.worktree_id), cx)
})?
.context("Failed to find worktree with a given ID")?;
let binary = this

View File

@@ -137,7 +137,7 @@ impl ProjectEnvironment {
None => Some({
let shell = TerminalSettings::get(
Some(settings::SettingsLocation {
worktree: worktree.project_worktree(),
worktree_id: worktree.id(),
path: RelPath::empty(),
}),
cx,
@@ -180,7 +180,7 @@ impl ProjectEnvironment {
worktree
.as_ref()
.map(|(worktree, path)| settings::SettingsLocation {
worktree: worktree.read(cx).project_worktree(),
worktree_id: worktree.read(cx).id(),
path: &path,
}),
cx,

View File

@@ -55,7 +55,7 @@ use rpc::{
proto::{self, git_reset, split_repository_update},
};
use serde::Deserialize;
use settings::ProjectWorktree;
use settings::WorktreeId;
use std::{
cmp::Ordering,
collections::{BTreeSet, HashSet, VecDeque},
@@ -90,7 +90,7 @@ pub struct GitStore {
buffer_store: Entity<BufferStore>,
worktree_store: Entity<WorktreeStore>,
repositories: HashMap<RepositoryId, Entity<Repository>>,
worktree_ids: HashMap<RepositoryId, HashSet<ProjectWorktree>>,
worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
active_repo_id: Option<RepositoryId>,
#[allow(clippy::type_complexity)]
loading_diffs:
@@ -1166,7 +1166,7 @@ impl GitStore {
return;
}
self.update_repositories_from_worktree(
worktree_id.worktree_id,
*worktree_id,
project_environment.clone(),
next_repository_id.clone(),
downstream
@@ -1267,7 +1267,7 @@ impl GitStore {
/// Update our list of repositories and schedule git scans in response to a notification from a worktree,
fn update_repositories_from_worktree(
&mut self,
worktree_id: ProjectWorktree,
worktree_id: WorktreeId,
project_environment: Entity<ProjectEnvironment>,
next_repository_id: Arc<AtomicU64>,
updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,

View File

@@ -15,9 +15,8 @@ use rpc::{AnyProtoClient, ErrorExt as _, TypedEnvelope, proto};
use std::num::NonZeroU64;
use std::path::PathBuf;
use std::sync::Arc;
use settings::WorktreeId;
use util::{ResultExt, rel_path::RelPath};
use worktree::{LoadedBinaryFile, PathChange, Worktree, };
use worktree::{LoadedBinaryFile, PathChange, Worktree, WorktreeId};
#[derive(Clone, Copy, Debug, Hash, PartialEq, PartialOrd, Ord, Eq)]
pub struct ImageId(NonZeroU64);

View File

@@ -94,7 +94,7 @@ use rpc::{
};
use serde::Serialize;
use serde_json::Value;
use settings::{ProjectWorktree, Settings, SettingsLocation, SettingsStore, WorktreeId};
use settings::{Settings, SettingsLocation, SettingsStore};
use sha2::{Digest, Sha256};
use smol::channel::Sender;
use snippet::Snippet;
@@ -134,7 +134,7 @@ pub use lsp_store::inlay_hint_cache::{CacheInlayHints, InvalidationStrategy};
pub use prettier::FORMAT_SUFFIX as TEST_PRETTIER_FORMAT_SUFFIX;
pub use worktree::{
Entry, EntryKind, FS_WATCH_LATENCY, File, LocalWorktree, PathChange, ProjectEntryId,
UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree, WorktreeSettings,
UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree, WorktreeId, WorktreeSettings,
};
const SERVER_LAUNCHING_BEFORE_SHUTDOWN_TIMEOUT: Duration = Duration::from_secs(5);
@@ -8427,10 +8427,6 @@ impl LspStore {
}
}
pub fn project_id_for_settings(&self, cx: &Context<Self>) -> u64 {
cx.entity_id().as_u64()
}
pub(crate) fn open_local_buffer_via_lsp(
&mut self,
abs_path: lsp::Uri,
@@ -8477,12 +8473,7 @@ impl LspStore {
let worktree = lsp_store
.update(cx, |lsp_store, cx| {
lsp_store.worktree_store.update(cx, |worktree_store, cx| {
worktree_store.create_worktree(
&worktree_root_target,
self.project_id_for_settings(cx),
false,
cx,
)
worktree_store.create_worktree(&worktree_root_target, false, cx)
})
})?
.await?;
@@ -10868,7 +10859,7 @@ impl LspStore {
for buffer in buffers {
buffer.update(cx, |buffer, cx| {
language_servers_to_stop.extend(local.language_server_ids_for_buffer(buffer, cx));
if let Some(worktree_id) = buffer.file().map(|f| f.project_worktree(cx))
if let Some(worktree_id) = buffer.file().map(|f| f.worktree_id(cx))
&& covered_worktrees.insert(worktree_id)
{
language_server_names_to_stop.retain(|name| {
@@ -13614,7 +13605,7 @@ pub fn language_server_settings<'a>(
) -> Option<&'a LspSettings> {
language_server_settings_for(
SettingsLocation {
worktree: delegate.project_worktree(),
worktree_id: delegate.worktree_id(),
path: RelPath::empty(),
},
language,
@@ -13633,7 +13624,6 @@ pub(crate) fn language_server_settings_for<'a>(
pub struct LocalLspAdapterDelegate {
lsp_store: WeakEntity<LspStore>,
worktree: worktree::Snapshot,
project_worktree: ProjectWorktree,
fs: Arc<dyn Fs>,
http_client: Arc<dyn HttpClient>,
language_registry: Arc<LanguageRegistry>,
@@ -13653,12 +13643,9 @@ impl LocalLspAdapterDelegate {
let load_shell_env_task =
environment.update(cx, |env, cx| env.worktree_environment(worktree.clone(), cx));
let project_worktree = worktree.read(cx).snapshot().project_worktree();
Arc::new(Self {
lsp_store,
worktree: worktree.read(cx).snapshot(),
project_worktree,
fs,
http_client,
language_registry,
@@ -13697,8 +13684,8 @@ impl LspAdapterDelegate for LocalLspAdapterDelegate {
self.http_client.clone()
}
fn project_worktree(&self) -> ProjectWorktree {
self.project_worktree
fn worktree_id(&self) -> WorktreeId {
self.worktree.id()
}
fn worktree_root_path(&self) -> &Path {

View File

@@ -8,8 +8,9 @@ use lsp::{
MessageType, TraceValue,
};
use rpc::proto;
use settings::WorktreeId;
use crate::{LanguageServerLogType, LspStore, Project, ProjectItem as _, WorktreeId};
use crate::{LanguageServerLogType, LspStore, Project, ProjectItem as _};
const SEND_LINE: &str = "\n// Send:";
const RECEIVE_LINE: &str = "\n// Receive:";

View File

@@ -14,10 +14,9 @@ use gpui::{App, AppContext as _, Context, Entity, Subscription};
use language::{ManifestDelegate, ManifestName, ManifestQuery};
pub use manifest_store::ManifestProvidersStore;
use path_trie::{LabelPresence, RootPathTrie, TriePath};
use settings::SettingsStore;
use settings::{SettingsStore, WorktreeId};
use util::rel_path::RelPath;
use settings::WorktreeId;
use worktree::{Event as WorktreeEvent, Snapshot, Worktree, };
use worktree::{Event as WorktreeEvent, Snapshot, Worktree};
use crate::{
ProjectPath,
@@ -97,7 +96,7 @@ impl ManifestTree {
delegate: &Arc<dyn ManifestDelegate>,
cx: &mut App,
) -> Option<ProjectPath> {
debug_assert_eq!(delegate.worktree_id().worktree_id, *worktree_id);
debug_assert_eq!(delegate.worktree_id(), *worktree_id);
let (mut marked_path, mut current_presence) = (None, LabelPresence::KnownAbsent);
let worktree_roots = match self.root_points.entry(*worktree_id) {
Entry::Occupied(occupied_entry) => occupied_entry.get().clone(),
@@ -219,7 +218,7 @@ impl ManifestDelegate for ManifestQueryDelegate {
})
}
fn worktree_id(&self) -> settings::ProjectWorktree {
self.worktree.project_worktree()
fn worktree_id(&self) -> WorktreeId {
self.worktree.id()
}
}

View File

@@ -18,7 +18,7 @@ use language::{
language_settings::AllLanguageSettings,
};
use lsp::LanguageServerName;
use settings::{WorktreeId,Settings, SettingsLocation};
use settings::{Settings, SettingsLocation, WorktreeId};
use std::sync::OnceLock;
use util::rel_path::RelPath;
@@ -144,7 +144,7 @@ impl LanguageServerTree {
cx: &mut App,
) -> impl Iterator<Item = LanguageServerId> + 'a {
let manifest_location = self.manifest_location_for_path(&path, manifest_name, delegate, cx);
let adapters = self.adapters_for_language(&manifest_location, &language_name, delegate, cx);
let adapters = self.adapters_for_language(&manifest_location, &language_name, cx);
self.get_with_adapters(manifest_location, adapters)
}
@@ -158,8 +158,8 @@ impl LanguageServerTree {
cx: &'a mut App,
) -> impl Iterator<Item = LanguageServerTreeNode> + 'a {
let manifest_location = self.manifest_location_for_path(&path, manifest_name, delegate, cx);
let adapters = self.adapters_for_language(&manifest_location, &language_name, delegate, cx);
self.init_with_adapters(manifest_location, language_name, adapters, delegate.clone(), cx)
let adapters = self.adapters_for_language(&manifest_location, &language_name, cx);
self.init_with_adapters(manifest_location, language_name, adapters, cx)
}
fn init_with_adapters<'a>(
@@ -167,7 +167,6 @@ impl LanguageServerTree {
root_path: ProjectPath,
language_name: LanguageName,
adapters: IndexMap<LanguageServerName, (LspSettings, Arc<CachedLspAdapter>)>,
delegate: Arc<dyn ManifestDelegate>,
cx: &'a App,
) -> impl Iterator<Item = LanguageServerTreeNode> + 'a {
adapters.into_iter().map(move |(_, (settings, adapter))| {
@@ -182,7 +181,7 @@ impl LanguageServerTree {
.entry(adapter.name());
let (node, languages) = inner_node.or_insert_with(|| {
let toolchain = self.toolchains.read(cx).active_toolchain(
delegate.worktree_id(),
root_path.worktree_id,
&root_path.path,
language_name.clone(),
);
@@ -237,11 +236,10 @@ impl LanguageServerTree {
&self,
manifest_location: &ProjectPath,
language_name: &LanguageName,
delegate: &Arc<dyn ManifestDelegate>,
cx: &App,
) -> IndexMap<LanguageServerName, (LspSettings, Arc<CachedLspAdapter>)> {
let settings_location = SettingsLocation {
worktree: delegate.worktree_id(),
worktree_id: manifest_location.worktree_id,
path: &manifest_location.path,
};
let settings = AllLanguageSettings::get(Some(settings_location), cx).language(
@@ -403,10 +401,10 @@ impl ServerTreeRebase {
.manifest_location_for_path(&path, manifest_name, &delegate, cx);
let adapters = self
.new_tree
.adapters_for_language(&manifest, &language_name, &delegate, cx);
.adapters_for_language(&manifest, &language_name, cx);
self.new_tree
.init_with_adapters(manifest, language_name, adapters, delegate, cx)
.init_with_adapters(manifest, language_name, adapters, cx)
.filter_map(|node| {
// Inspect result of the query and initialize it ourselves before
// handing it off to the caller.

View File

@@ -22,11 +22,13 @@ use lsp::{LanguageServer, LanguageServerId, LanguageServerName};
use node_runtime::NodeRuntime;
use paths::default_prettier_dir;
use prettier::Prettier;
use settings::WorktreeId;
use smol::stream::StreamExt;
use util::{ResultExt, TryFutureExt, rel_path::RelPath};
use crate::{File, PathChange, ProjectEntryId, Worktree, worktree_store::WorktreeStore};
use crate::{
File, PathChange, ProjectEntryId, Worktree, lsp_store::WorktreeId,
worktree_store::WorktreeStore,
};
pub struct PrettierStore {
node: NodeRuntime,

View File

@@ -104,7 +104,7 @@ use rpc::{
};
use search::{SearchInputKind, SearchQuery, SearchResult};
use search_history::SearchHistory;
use settings::{InvalidSettingsError, RegisterSetting, Settings, SettingsLocation, SettingsStore, WorktreeId};
use settings::{InvalidSettingsError, RegisterSetting, Settings, SettingsLocation, SettingsStore};
use smol::channel::Receiver;
use snippet::Snippet;
pub use snippet_provider;
@@ -133,7 +133,7 @@ use util::{
use worktree::{CreatedEntry, Snapshot, Traversal};
pub use worktree::{
Entry, EntryKind, FS_WATCH_LATENCY, File, LocalWorktree, PathChange, ProjectEntryId,
UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree, WorktreeSettings,
UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree, WorktreeId, WorktreeSettings,
};
use worktree_store::{WorktreeStore, WorktreeStoreEvent};
@@ -367,7 +367,7 @@ pub struct ProjectPath {
impl ProjectPath {
pub fn from_file(value: &dyn language::File, cx: &App) -> Self {
ProjectPath {
worktree_id: value.project_worktree(cx).worktree_id,
worktree_id: value.worktree_id(cx),
path: value.path().clone(),
}
}
@@ -1520,7 +1520,6 @@ impl Project {
WorktreeStore::remote(
true,
client.clone().into(),
self.id,
response.payload.project_id,
path_style,
)
@@ -4228,7 +4227,7 @@ impl Project {
}
}
let buffer_worktree_id = buffer.read(cx).file().map(|file| file.project_worktree(cx));
let buffer_worktree_id = buffer.read(cx).file().map(|file| file.worktree_id(cx));
let worktrees_with_ids: Vec<_> = self
.worktrees(cx)
.map(|worktree| {
@@ -4325,7 +4324,7 @@ impl Project {
cx: &mut Context<Self>,
) -> Task<Result<Entity<Worktree>>> {
self.worktree_store.update(cx, |worktree_store, cx| {
worktree_store.create_worktree(abs_path, self.project_id_for_settings(cx), visible, cx)
worktree_store.create_worktree(abs_path, visible, cx)
})
}
@@ -5436,6 +5435,15 @@ impl<'a> Iterator for PathMatchCandidateSetIter<'a> {
impl EventEmitter<Event> for Project {}
impl<'a> From<&'a ProjectPath> for SettingsLocation<'a> {
fn from(val: &'a ProjectPath) -> Self {
SettingsLocation {
worktree_id: val.worktree_id,
path: val.path.as_ref(),
}
}
}
impl<P: Into<Arc<RelPath>>> From<(WorktreeId, P)> for ProjectPath {
fn from((worktree_id, path): (WorktreeId, P)) -> Self {
Self {
@@ -5508,7 +5516,7 @@ impl ProjectItem for Buffer {
fn project_path(&self, cx: &App) -> Option<ProjectPath> {
self.file().map(|file| ProjectPath {
worktree_id: file.project_worktree(cx).worktree_id.into(),
worktree_id: file.worktree_id(cx),
path: file.path().clone(),
})
}

View File

@@ -21,12 +21,12 @@ pub use settings::DirenvSettings;
pub use settings::LspSettings;
use settings::{
DapSettingsContent, InvalidSettingsError, LocalSettingsKind, RegisterSetting, Settings,
SettingsLocation, SettingsStore, WorktreeId, parse_json_with_comments, watch_config_file,
SettingsLocation, SettingsStore, parse_json_with_comments, watch_config_file,
};
use std::{path::PathBuf, sync::Arc, time::Duration};
use task::{DebugTaskFile, TaskTemplates, VsCodeDebugTaskFile, VsCodeTaskFile};
use util::{ResultExt, rel_path::RelPath, serde::default_true};
use worktree::{PathChange, UpdatedEntriesSet, Worktree};
use worktree::{PathChange, UpdatedEntriesSet, Worktree, WorktreeId};
use crate::{
task_store::{TaskSettingsLocation, TaskStore},
@@ -702,8 +702,8 @@ impl SettingsObserver {
let store = cx.global::<SettingsStore>();
for worktree in self.worktree_store.read(cx).worktrees() {
let worktree_id = worktree.read(cx).id();
for (path, content) in store.local_settings(worktree_id.with_project_id(project_id)) {
let worktree_id = worktree.read(cx).id().to_proto();
for (path, content) in store.local_settings(worktree.read(cx).id()) {
let content = serde_json::to_string(&content).unwrap();
downstream_client
.send(proto::UpdateWorktreeSettings {
@@ -1002,7 +1002,7 @@ impl SettingsObserver {
let result = task_store.update(cx, |task_store, cx| {
task_store.update_user_tasks(
TaskSettingsLocation::Worktree(SettingsLocation {
worktree: worktree_id,
worktree_id,
path: directory.as_ref(),
}),
file_content.as_deref(),
@@ -1031,7 +1031,7 @@ impl SettingsObserver {
let result = task_store.update(cx, |task_store, cx| {
task_store.update_user_debug_scenarios(
TaskSettingsLocation::Worktree(SettingsLocation {
worktree: worktree_id,
worktree_id,
path: directory.as_ref(),
}),
file_content.as_deref(),

View File

@@ -19,7 +19,6 @@ use language::{
};
use lsp::{LanguageServerId, LanguageServerName};
use paths::{debug_task_file_name, task_file_name};
use settings::WorktreeId;
use settings::{InvalidSettingsError, parse_json_with_comments};
use task::{
DebugScenario, ResolvedTask, TaskContext, TaskId, TaskTemplate, TaskTemplates, TaskVariables,
@@ -27,6 +26,7 @@ use task::{
};
use text::{BufferId, Point, ToPoint};
use util::{NumericPrefixWithSuffix, ResultExt as _, post_inc, rel_path::RelPath};
use worktree::WorktreeId;
use crate::{task_store::TaskSettingsLocation, worktree_store::WorktreeStore};
@@ -355,7 +355,7 @@ impl Inventory {
let buffer = buffer.read(cx);
let file = buffer.file().cloned();
(
file.as_ref().map(|file| file.project_worktree(cx)),
file.as_ref().map(|file| file.worktree_id(cx)),
file,
buffer.language().cloned(),
)
@@ -682,16 +682,15 @@ impl Inventory {
TaskSettingsLocation::Worktree(location) => {
let new_templates = new_templates.collect::<Vec<_>>();
if new_templates.is_empty() {
if let Some(worktree_tasks) = parsed_templates
.worktree
.get_mut(&WorktreeId(location.worktree.worktree_id as usize))
if let Some(worktree_tasks) =
parsed_templates.worktree.get_mut(&location.worktree_id)
{
worktree_tasks.remove(location.path);
}
} else {
parsed_templates
.worktree
.entry(WorktreeId(location.worktree.worktree_id as usize))
.entry(location.worktree_id)
.or_default()
.insert(Arc::from(location.path), new_templates);
}
@@ -702,7 +701,7 @@ impl Inventory {
..
} = kind
{
id.0 != location.worktree.worktree_id as usize
*id != location.worktree_id
|| directory_in_worktree.as_ref() != location.path
} else {
true
@@ -768,20 +767,20 @@ impl Inventory {
}
TaskSettingsLocation::Worktree(location) => {
previously_existing_scenarios = parsed_scenarios
.worktree_scenarios(location.worktree)
.worktree_scenarios(location.worktree_id)
.map(|(_, scenario)| scenario.label)
.collect::<HashSet<_>>();
if new_templates.is_empty() {
if let Some(worktree_tasks) =
parsed_scenarios.worktree.get_mut(&location.worktree)
parsed_scenarios.worktree.get_mut(&location.worktree_id)
{
worktree_tasks.remove(location.path);
}
} else {
parsed_scenarios
.worktree
.entry(location.worktree)
.entry(location.worktree_id)
.or_default()
.insert(Arc::from(location.path), new_templates);
}
@@ -849,8 +848,8 @@ fn task_variables_preference(task: &ResolvedTask) -> Reverse<usize> {
mod test_inventory {
use gpui::{AppContext as _, Entity, Task, TestAppContext};
use itertools::Itertools;
use settings::WorktreeId;
use task::TaskContext;
use worktree::WorktreeId;
use crate::Inventory;
@@ -1016,7 +1015,7 @@ impl ContextProvider for BasicContextProvider {
}
let worktree = buffer
.file()
.map(|file| file.project_worktree(cx))
.map(|file| file.worktree_id(cx))
.and_then(|worktree_id| {
self.worktree_store
.read(cx)
@@ -1182,7 +1181,7 @@ mod tests {
let worktree_id = WorktreeId::from_usize(0);
let local_worktree_location = SettingsLocation {
worktree,
worktree_id,
path: rel_path("foo"),
};
inventory.update(cx, |inventory, _| {
@@ -1398,8 +1397,8 @@ mod tests {
init_test(cx);
let inventory = cx.update(|cx| Inventory::new(cx));
let common_name = "common_task_name";
let worktree_1 = WorktreeId::from_usize(1).with_project_id(0);
let worktree_2 = WorktreeId::from_usize(2).with_project_id(0);
let worktree_1 = WorktreeId::from_usize(1);
let worktree_2 = WorktreeId::from_usize(2);
cx.run_until_parked();
let worktree_independent_tasks = vec![
@@ -1428,7 +1427,7 @@ mod tests {
let worktree_1_tasks = [
(
TaskSourceKind::Worktree {
id: worktree_1.worktree_id,
id: worktree_1,
directory_in_worktree: rel_path(".zed").into(),
id_base: "local worktree tasks from directory \".zed\"".into(),
},
@@ -1436,7 +1435,7 @@ mod tests {
),
(
TaskSourceKind::Worktree {
id: worktree_1.worktree_id,
id: worktree_1,
directory_in_worktree: rel_path(".zed").into(),
id_base: "local worktree tasks from directory \".zed\"".into(),
},
@@ -1446,7 +1445,7 @@ mod tests {
let worktree_2_tasks = [
(
TaskSourceKind::Worktree {
id: worktree_2.worktree_id,
id: worktree_2,
directory_in_worktree: rel_path(".zed").into(),
id_base: "local worktree tasks from directory \".zed\"".into(),
},
@@ -1454,7 +1453,7 @@ mod tests {
),
(
TaskSourceKind::Worktree {
id: worktree_2.worktree_id,
id: worktree_2,
directory_in_worktree: rel_path(".zed").into(),
id_base: "local worktree tasks from directory \".zed\"".into(),
},
@@ -1476,7 +1475,7 @@ mod tests {
inventory
.update_file_based_tasks(
TaskSettingsLocation::Worktree(SettingsLocation {
worktree: worktree_1,
worktree_id: worktree_1,
path: rel_path(".zed"),
}),
Some(&mock_tasks_from_names(
@@ -1487,7 +1486,7 @@ mod tests {
inventory
.update_file_based_tasks(
TaskSettingsLocation::Worktree(SettingsLocation {
worktree: worktree_2,
worktree_id: worktree_2,
path: rel_path(".zed"),
}),
Some(&mock_tasks_from_names(
@@ -1503,7 +1502,7 @@ mod tests {
"Without a worktree, only worktree-independent tasks should be listed"
);
assert_eq!(
list_tasks_sorted_by_last_used(&inventory, Some(worktree_1.worktree_id), cx).await,
list_tasks_sorted_by_last_used(&inventory, Some(worktree_1), cx).await,
worktree_1_tasks
.iter()
.chain(worktree_independent_tasks.iter())
@@ -1512,7 +1511,7 @@ mod tests {
.collect::<Vec<_>>(),
);
assert_eq!(
list_tasks_sorted_by_last_used(&inventory, Some(worktree_2.worktree_id), cx).await,
list_tasks_sorted_by_last_used(&inventory, Some(worktree_2), cx).await,
worktree_2_tasks
.iter()
.chain(worktree_independent_tasks.iter())
@@ -1527,7 +1526,7 @@ mod tests {
"Without a worktree, only worktree-independent tasks should be listed"
);
assert_eq!(
list_tasks(&inventory, Some(worktree_1.worktree_id), cx).await,
list_tasks(&inventory, Some(worktree_1), cx).await,
worktree_1_tasks
.iter()
.chain(worktree_independent_tasks.iter())
@@ -1535,7 +1534,7 @@ mod tests {
.collect::<Vec<_>>(),
);
assert_eq!(
list_tasks(&inventory, Some(worktree_2.worktree_id), cx).await,
list_tasks(&inventory, Some(worktree_2), cx).await,
worktree_2_tasks
.iter()
.chain(worktree_independent_tasks.iter())

View File

@@ -427,7 +427,7 @@ fn worktree_root(
.buffer
.read(cx)
.file()
.map(|f| f.project_worktree(cx))
.map(|f| f.worktree_id(cx))
.and_then(|worktree_id| worktree_store.read(cx).worktree_for_id(worktree_id, cx))
.and_then(|worktree| {
let worktree = worktree.read(cx);

View File

@@ -67,7 +67,7 @@ impl Project {
&& let Some((worktree, _)) = self.find_worktree(path, cx)
{
settings_location = Some(SettingsLocation {
worktree: worktree.read(cx).id(),
worktree_id: worktree.read(cx).id(),
path: RelPath::empty(),
});
}
@@ -289,7 +289,7 @@ impl Project {
&& let Some((worktree, _)) = self.find_worktree(path, cx)
{
settings_location = Some(SettingsLocation {
worktree: worktree.read(cx).id(),
worktree_id: worktree.read(cx).id(),
path: RelPath::empty(),
});
}
@@ -464,7 +464,7 @@ impl Project {
&& let Some((worktree, _)) = self.find_worktree(path, cx)
{
settings_location = Some(SettingsLocation {
worktree: worktree.read(cx).id(),
worktree_id: worktree.read(cx).id(),
path: RelPath::empty(),
});
}

View File

@@ -19,7 +19,7 @@ use rpc::{
resolve_toolchain_response::Response as ResolveResponsePayload,
},
};
use settings::{ProjectWorktree, WorktreeId};
use settings::WorktreeId;
use task::Shell;
use util::{ResultExt as _, rel_path::RelPath};
@@ -297,7 +297,7 @@ impl ToolchainStore {
let toolchains = this
.update(&mut cx, |this, cx| {
let language_name = LanguageName::from_proto(envelope.payload.language_name);
let worktree_id = ProjectWorktree::from_proto(envelope.payload.worktree_id);
let worktree_id = WorktreeId::from_proto(envelope.payload.worktree_id);
let path = RelPath::from_proto(envelope.payload.path.as_deref().unwrap_or(""))?;
anyhow::Ok(this.list_toolchains(
ProjectPath { worktree_id, path },
@@ -431,7 +431,7 @@ impl language::LanguageToolchainStore for RemoteStore {
) -> Option<Toolchain> {
self.0
.update(cx, |this, cx| {
this.active_toolchain(ProjectPath { worktree_id: worktree_id.worktree_id, path }, language_name, cx)
this.active_toolchain(ProjectPath { worktree_id, path }, language_name, cx)
})
.ok()?
.await
@@ -554,7 +554,7 @@ impl LocalToolchainStore {
}
pub(crate) fn active_toolchain(
&self,
worktree_id: ProjectWorktree,
worktree_id: WorktreeId,
relative_path: &Arc<RelPath>,
language_name: LanguageName,
) -> Option<Toolchain> {

View File

@@ -17,7 +17,6 @@ use rpc::{
AnyProtoClient, ErrorExt, TypedEnvelope,
proto::{self, REMOTE_SERVER_PROJECT_ID},
};
use settings::WorktreeId;
use smol::{
channel::{Receiver, Sender},
stream::StreamExt,
@@ -30,7 +29,7 @@ use util::{
};
use worktree::{
CreatedEntry, Entry, ProjectEntryId, UpdatedEntriesSet, UpdatedGitRepositoriesSet, Worktree,
WorktreeSettings,
WorktreeId, WorktreeSettings,
};
use crate::{ProjectPath, search::SearchQuery};
@@ -53,7 +52,6 @@ enum WorktreeStoreState {
}
pub struct WorktreeStore {
project_id: u64,
next_entry_id: Arc<AtomicUsize>,
downstream_client: Option<(AnyProtoClient, u64)>,
retain_worktrees: bool,
@@ -88,9 +86,8 @@ impl WorktreeStore {
client.add_entity_request_handler(Self::handle_expand_all_for_project_entry);
}
pub fn local(project_id: u64, retain_worktrees: bool, fs: Arc<dyn Fs>) -> Self {
pub fn local(retain_worktrees: bool, fs: Arc<dyn Fs>) -> Self {
Self {
project_id,
next_entry_id: Default::default(),
loading_worktrees: Default::default(),
downstream_client: None,
@@ -104,12 +101,10 @@ impl WorktreeStore {
pub fn remote(
retain_worktrees: bool,
upstream_client: AnyProtoClient,
local_project_id: u64,
upstream_project_id: u64,
path_style: PathStyle,
) -> Self {
Self {
project_id: local_project_id,
next_entry_id: Default::default(),
loading_worktrees: Default::default(),
downstream_client: None,
@@ -202,7 +197,7 @@ impl WorktreeStore {
if let Some((tree, relative_path)) = self.find_worktree(abs_path, cx) {
Task::ready(Ok((tree, relative_path)))
} else {
let worktree = self.create_worktree(abs_path, visible, cx);
let worktree = self.create_worktree(abs_path, visible, cx);
cx.background_spawn(async move { Ok((worktree.await?, RelPath::empty().into())) })
}
}
@@ -483,20 +478,12 @@ impl WorktreeStore {
Task::ready(Err(Arc::new(anyhow!("cannot create worktrees via collab"))))
} else {
let abs_path = RemotePathBuf::new(abs_path.to_string(), *path_style);
self.create_remote_worktree(
upstream_client.clone(),
abs_path,
visible,
cx,
)
self.create_remote_worktree(upstream_client.clone(), abs_path, visible, cx)
}
}
WorktreeStoreState::Local { fs } => self.create_local_worktree(
fs.clone(),
abs_path.clone(),
visible,
cx,
),
WorktreeStoreState::Local { fs } => {
self.create_local_worktree(fs.clone(), abs_path.clone(), visible, cx)
}
};
self.loading_worktrees
@@ -540,7 +527,7 @@ impl WorktreeStore {
let path = RemotePathBuf::new(abs_path, path_style);
let response = client
.request(proto::AddWorktree {
project_id: self.project_id,
project_id: REMOTE_SERVER_PROJECT_ID,
path: path.to_proto(),
visible,
})
@@ -560,7 +547,7 @@ impl WorktreeStore {
let worktree = cx.update(|cx| {
Worktree::remote(
self.project_id,
REMOTE_SERVER_PROJECT_ID,
ReplicaId::REMOTE_SERVER,
proto::WorktreeMetadata {
id: response.worktree_id,
@@ -586,7 +573,6 @@ impl WorktreeStore {
fs: Arc<dyn Fs>,
abs_path: Arc<SanitizedPath>,
visible: bool,
project_id_for_settings: u64,
cx: &mut Context<Self>,
) -> Task<Result<Entity<Worktree>, Arc<anyhow::Error>>> {
let next_entry_id = self.next_entry_id.clone();
@@ -594,7 +580,6 @@ impl WorktreeStore {
cx.spawn(async move |this, cx| {
let worktree = Worktree::local(
SanitizedPath::cast_arc(abs_path.clone()),
project_id_for_settings,
visible,
fs,
next_entry_id,

View File

@@ -92,7 +92,7 @@ impl HeadlessProject {
languages::init(languages.clone(), fs.clone(), node_runtime.clone(), cx);
let worktree_store = cx.new(|cx| {
let mut store = WorktreeStore::local(REMOTE_SERVER_PROJECT_ID, true, fs.clone());
let mut store = WorktreeStore::local(true, fs.clone());
store.shared(REMOTE_SERVER_PROJECT_ID, session.clone(), cx);
store
});

View File

@@ -354,7 +354,7 @@ async fn test_remote_settings(cx: &mut TestAppContext, server_cx: &mut TestAppCo
assert_eq!(
AllLanguageSettings::get(
Some(SettingsLocation {
worktree,
worktree_id,
path: rel_path("src/lib.rs")
}),
cx

View File

@@ -46,11 +46,8 @@ pub struct ActiveSettingsProfileName(pub String);
impl Global for ActiveSettingsProfileName {}
/// Worktree ID within a project. Prefer [`ProjectWorktree`] when something globally unique is needed.
///
/// These are usually globally unique, but not always (particularly with remote projects).
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord, serde::Serialize)]
pub struct WorktreeId(pub usize);
pub struct WorktreeId(usize);
impl From<WorktreeId> for usize {
fn from(value: WorktreeId) -> Self {
@@ -74,32 +71,6 @@ impl WorktreeId {
pub fn to_usize(self) -> usize {
self.0
}
pub fn with_project_id(self, project_id: u64) -> ProjectWorktree {
ProjectWorktree {
project_id,
worktree_id: self,
}
}
}
/// `worktree_id` and the `project_id` it belongs to.
///
/// `worktree_id` is usually globally unique, but sometimes only unique within a
/// project (e.g., when remoting).
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord, serde::Serialize)]
pub struct ProjectWorktree {
pub project_id: u64,
pub worktree_id: WorktreeId,
}
impl ProjectWorktree {
#[cfg(any(test, feature = "test-support"))]
pub fn from_u64(n: u64) -> Self {
Self {
project_id: 0,
worktree_id: WorktreeId(n as usize),
}
}
}
impl fmt::Display for WorktreeId {

View File

@@ -32,8 +32,7 @@ pub type EditorconfigProperties = ec4rs::Properties;
use crate::{
ActiveSettingsProfileName, FontFamilyName, IconThemeName, LanguageSettingsContent,
LanguageToSettingsMap, ProjectWorktree, ThemeName, VsCodeSettings,
fallible_options,
LanguageToSettingsMap, ThemeName, VsCodeSettings, WorktreeId, fallible_options,
merge_from::MergeFrom,
settings_content::{
ExtensionsSettingsContent, ProjectSettingsContent, SettingsContent, UserSettingsContent,
@@ -135,7 +134,7 @@ inventory::collect!(RegisteredSetting);
#[derive(Clone, Copy, Debug)]
pub struct SettingsLocation<'a> {
pub worktree: ProjectWorktree,
pub worktree_id: WorktreeId,
pub path: &'a RelPath,
}
@@ -150,9 +149,8 @@ pub struct SettingsStore {
merged_settings: Rc<SettingsContent>,
local_settings: BTreeMap<(ProjectWorktree, Arc<RelPath>), SettingsContent>,
raw_editorconfig_settings:
BTreeMap<(ProjectWorktree, Arc<RelPath>), (String, Option<Editorconfig>)>,
local_settings: BTreeMap<(WorktreeId, Arc<RelPath>), SettingsContent>,
raw_editorconfig_settings: BTreeMap<(WorktreeId, Arc<RelPath>), (String, Option<Editorconfig>)>,
_setting_file_updates: Task<()>,
setting_file_updates_tx:
@@ -167,7 +165,7 @@ pub enum SettingsFile {
User,
Server,
/// Represents project settings in ssh projects as well as local projects
Project((ProjectWorktree, Arc<RelPath>)),
Project((WorktreeId, Arc<RelPath>)),
}
impl PartialOrd for SettingsFile {
@@ -236,7 +234,7 @@ pub struct SettingValue<T> {
#[doc(hidden)]
pub global_value: Option<T>,
#[doc(hidden)]
pub local_values: Vec<(ProjectWorktree, Arc<RelPath>, T)>,
pub local_values: Vec<(WorktreeId, Arc<RelPath>, T)>,
}
#[doc(hidden)]
@@ -246,14 +244,9 @@ pub trait AnySettingValue: 'static + Send + Sync {
fn from_settings(&self, s: &SettingsContent) -> Box<dyn Any>;
fn value_for_path(&self, path: Option<SettingsLocation>) -> &dyn Any;
fn all_local_values(&self) -> Vec<(ProjectWorktree, Arc<RelPath>, &dyn Any)>;
fn all_local_values(&self) -> Vec<(WorktreeId, Arc<RelPath>, &dyn Any)>;
fn set_global_value(&mut self, value: Box<dyn Any>);
fn set_local_value(
&mut self,
root_id: ProjectWorktree,
path: Arc<RelPath>,
value: Box<dyn Any>,
);
fn set_local_value(&mut self, root_id: WorktreeId, path: Arc<RelPath>, value: Box<dyn Any>);
}
/// Parameters that are used when generating some JSON schemas at runtime.
@@ -365,7 +358,7 @@ impl SettingsStore {
}
/// Get all values from project specific settings
pub fn get_all_locals<T: Settings>(&self) -> Vec<(ProjectWorktree, Arc<RelPath>, &T)> {
pub fn get_all_locals<T: Settings>(&self) -> Vec<(WorktreeId, Arc<RelPath>, &T)> {
self.setting_values
.get(&TypeId::of::<T>())
.unwrap_or_else(|| panic!("unregistered setting type {}", type_name::<T>()))
@@ -832,7 +825,7 @@ impl SettingsStore {
/// Add or remove a set of local settings via a JSON string.
pub fn set_local_settings(
&mut self,
root_id: ProjectWorktree,
root_id: WorktreeId,
directory_path: Arc<RelPath>,
kind: LocalSettingsKind,
settings_content: Option<&str>,
@@ -975,7 +968,7 @@ impl SettingsStore {
}
/// Add or remove a set of local settings via a JSON string.
pub fn clear_local_settings(&mut self, root_id: ProjectWorktree, cx: &mut App) -> Result<()> {
pub fn clear_local_settings(&mut self, root_id: WorktreeId, cx: &mut App) -> Result<()> {
self.local_settings
.retain(|(worktree_id, _), _| worktree_id != &root_id);
self.recompute_values(Some((root_id, RelPath::empty())), cx);
@@ -984,21 +977,31 @@ impl SettingsStore {
pub fn local_settings(
&self,
root_id: ProjectWorktree,
root_id: WorktreeId,
) -> impl '_ + Iterator<Item = (Arc<RelPath>, &ProjectSettingsContent)> {
self.local_settings
.range((root_id, RelPath::empty().into())..)
.take_while(move |((project_worktree, _), _)| *project_worktree == root_id)
.range(
(root_id, RelPath::empty().into())
..(
WorktreeId::from_usize(root_id.to_usize() + 1),
RelPath::empty().into(),
),
)
.map(|((_, path), content)| (path.clone(), &content.project))
}
pub fn local_editorconfig_settings(
&self,
root_id: ProjectWorktree,
root_id: WorktreeId,
) -> impl '_ + Iterator<Item = (Arc<RelPath>, String, Option<Editorconfig>)> {
self.raw_editorconfig_settings
.range((root_id, RelPath::empty().into())..)
.take_while(move |((project_worktree, _), _)| *project_worktree == root_id)
.range(
(root_id, RelPath::empty().into())
..(
WorktreeId::from_usize(root_id.to_usize() + 1),
RelPath::empty().into(),
),
)
.map(|((_, path), (content, parsed_content))| {
(path.clone(), content.clone(), parsed_content.clone())
})
@@ -1060,12 +1063,12 @@ impl SettingsStore {
fn recompute_values(
&mut self,
changed_local_path: Option<(ProjectWorktree, &RelPath)>,
changed_local_path: Option<(WorktreeId, &RelPath)>,
cx: &mut App,
) {
// Reload the global and local values for every setting.
let mut project_settings_stack = Vec::<SettingsContent>::new();
let mut paths_stack = Vec::<Option<(ProjectWorktree, &RelPath)>>::new();
let mut paths_stack = Vec::<Option<(WorktreeId, &RelPath)>>::new();
if changed_local_path.is_none() {
let mut merged = self.default_settings.as_ref().clone();
@@ -1126,7 +1129,7 @@ impl SettingsStore {
pub fn editorconfig_properties(
&self,
for_worktree: ProjectWorktree,
for_worktree: WorktreeId,
for_path: &RelPath,
) -> Option<EditorconfigProperties> {
let mut properties = EditorconfigProperties::new();
@@ -1299,7 +1302,7 @@ impl<T: Settings> AnySettingValue for SettingValue<T> {
type_name::<T>()
}
fn all_local_values(&self) -> Vec<(ProjectWorktree, Arc<RelPath>, &dyn Any)> {
fn all_local_values(&self) -> Vec<(WorktreeId, Arc<RelPath>, &dyn Any)> {
self.local_values
.iter()
.map(|(id, path, value)| (*id, path.clone(), value as _))
@@ -1307,11 +1310,7 @@ impl<T: Settings> AnySettingValue for SettingValue<T> {
}
fn value_for_path(&self, path: Option<SettingsLocation>) -> &dyn Any {
if let Some(SettingsLocation {
worktree: worktree_id,
path,
}) = path
{
if let Some(SettingsLocation { worktree_id, path }) = path {
for (settings_root_id, settings_path, value) in self.local_values.iter().rev() {
if worktree_id == *settings_root_id && path.starts_with(settings_path) {
return value;
@@ -1328,12 +1327,7 @@ impl<T: Settings> AnySettingValue for SettingValue<T> {
self.global_value = Some(*value.downcast().unwrap());
}
fn set_local_value(
&mut self,
root_id: ProjectWorktree,
path: Arc<RelPath>,
value: Box<dyn Any>,
) {
fn set_local_value(&mut self, root_id: WorktreeId, path: Arc<RelPath>, value: Box<dyn Any>) {
let value = *value.downcast().unwrap();
match self
.local_values
@@ -1350,7 +1344,7 @@ mod tests {
use std::num::NonZeroU32;
use crate::{
ClosePosition, ItemSettingsContent, VsCodeSettingsSource, WorktreeId, default_settings,
ClosePosition, ItemSettingsContent, VsCodeSettingsSource, default_settings,
settings_content::LanguageSettingsContent, test_settings,
};
@@ -1456,14 +1450,9 @@ mod tests {
ClosePosition::Left
);
let worktree = ProjectWorktree {
project_id: 1,
worktree_id: WorktreeId(1),
};
store
.set_local_settings(
worktree,
WorktreeId::from_usize(1),
rel_path("root1").into(),
LocalSettingsKind::Settings,
Some(r#"{ "tab_size": 5 }"#),
@@ -1472,7 +1461,7 @@ mod tests {
.unwrap();
store
.set_local_settings(
worktree,
WorktreeId::from_usize(1),
rel_path("root1/subdir").into(),
LocalSettingsKind::Settings,
Some(r#"{ "preferred_line_length": 50 }"#),
@@ -1482,7 +1471,7 @@ mod tests {
store
.set_local_settings(
worktree,
WorktreeId::from_usize(1),
rel_path("root2").into(),
LocalSettingsKind::Settings,
Some(r#"{ "tab_size": 9, "auto_update": true}"#),
@@ -1492,7 +1481,7 @@ mod tests {
assert_eq!(
store.get::<DefaultLanguageSettings>(Some(SettingsLocation {
worktree: worktree,
worktree_id: WorktreeId::from_usize(1),
path: rel_path("root1/something"),
})),
&DefaultLanguageSettings {
@@ -1502,7 +1491,7 @@ mod tests {
);
assert_eq!(
store.get::<DefaultLanguageSettings>(Some(SettingsLocation {
worktree: worktree,
worktree_id: WorktreeId::from_usize(1),
path: rel_path("root1/subdir/something"),
})),
&DefaultLanguageSettings {
@@ -1512,7 +1501,7 @@ mod tests {
);
assert_eq!(
store.get::<DefaultLanguageSettings>(Some(SettingsLocation {
worktree,
worktree_id: WorktreeId::from_usize(1),
path: rel_path("root2/something"),
})),
&DefaultLanguageSettings {
@@ -1522,7 +1511,7 @@ mod tests {
);
assert_eq!(
store.get::<AutoUpdateSetting>(Some(SettingsLocation {
worktree,
worktree_id: WorktreeId::from_usize(1),
path: rel_path("root2/something")
})),
&AutoUpdateSetting { auto_update: false }
@@ -1946,10 +1935,11 @@ mod tests {
fn test_get_value_for_field_basic(cx: &mut App) {
let mut store = SettingsStore::new(cx, &test_settings());
store.register_setting::<DefaultLanguageSettings>();
store
.set_user_settings(r#"{"preferred_line_length": 0}"#, cx)
.unwrap();
let local = (ProjectWorktree::from_u64(0), RelPath::empty().into_arc());
let local = (WorktreeId::from_usize(0), RelPath::empty().into_arc());
store
.set_local_settings(
local.0,
@@ -2009,10 +1999,10 @@ mod tests {
store.register_setting::<DefaultLanguageSettings>();
store.register_setting::<AutoUpdateSetting>();
let local_1 = (ProjectWorktree::from_u64(0), RelPath::empty().into_arc());
let local_1 = (WorktreeId::from_usize(0), RelPath::empty().into_arc());
let local_1_child = (
ProjectWorktree::from_u64(0),
WorktreeId::from_usize(0),
RelPath::new(
std::path::Path::new("child1"),
util::paths::PathStyle::Posix,
@@ -2021,9 +2011,9 @@ mod tests {
.into_arc(),
);
let local_2 = (ProjectWorktree::from_u64(1), RelPath::empty().into_arc());
let local_2 = (WorktreeId::from_usize(1), RelPath::empty().into_arc());
let local_2_child = (
ProjectWorktree::from_u64(1),
WorktreeId::from_usize(1),
RelPath::new(
std::path::Path::new("child2"),
util::paths::PathStyle::Posix,
@@ -2142,12 +2132,12 @@ mod tests {
let mut store = SettingsStore::new(cx, &test_settings());
store.register_setting::<DefaultLanguageSettings>();
let wt0_root = (ProjectWorktree::from_u64(0), RelPath::empty().into_arc());
let wt0_child1 = (ProjectWorktree::from_u64(0), rel_path("child1").into_arc());
let wt0_child2 = (ProjectWorktree::from_u64(0), rel_path("child2").into_arc());
let wt0_root = (WorktreeId::from_usize(0), RelPath::empty().into_arc());
let wt0_child1 = (WorktreeId::from_usize(0), rel_path("child1").into_arc());
let wt0_child2 = (WorktreeId::from_usize(0), rel_path("child2").into_arc());
let wt1_root = (ProjectWorktree::from_u64(1), RelPath::empty().into_arc());
let wt1_subdir = (ProjectWorktree::from_u64(1), rel_path("subdir").into_arc());
let wt1_root = (WorktreeId::from_usize(1), RelPath::empty().into_arc());
let wt1_subdir = (WorktreeId::from_usize(1), rel_path("subdir").into_arc());
fn get(content: &SettingsContent) -> &Option<u32> {
&content.project.all_languages.defaults.preferred_line_length
@@ -2242,7 +2232,7 @@ mod tests {
assert_eq!(overrides, vec![]);
let wt0_deep_child = (
ProjectWorktree::from_u64(0),
WorktreeId::from_usize(0),
rel_path("child1/subdir").into_arc(),
);
store
@@ -2265,16 +2255,16 @@ mod tests {
#[test]
fn test_file_ord() {
let wt0_root =
SettingsFile::Project((ProjectWorktree::from_u64(0), RelPath::empty().into_arc()));
SettingsFile::Project((WorktreeId::from_usize(0), RelPath::empty().into_arc()));
let wt0_child1 =
SettingsFile::Project((ProjectWorktree::from_u64(0), rel_path("child1").into_arc()));
SettingsFile::Project((WorktreeId::from_usize(0), rel_path("child1").into_arc()));
let wt0_child2 =
SettingsFile::Project((ProjectWorktree::from_u64(0), rel_path("child2").into_arc()));
SettingsFile::Project((WorktreeId::from_usize(0), rel_path("child2").into_arc()));
let wt1_root =
SettingsFile::Project((ProjectWorktree::from_u64(1), RelPath::empty().into_arc()));
SettingsFile::Project((WorktreeId::from_usize(1), RelPath::empty().into_arc()));
let wt1_subdir =
SettingsFile::Project((ProjectWorktree::from_u64(1), rel_path("subdir").into_arc()));
SettingsFile::Project((WorktreeId::from_usize(1), rel_path("subdir").into_arc()));
let mut files = vec![
&wt1_root,

View File

@@ -649,10 +649,9 @@ impl TerminalView {
// When focused, check blinking settings and blink manager state
match TerminalSettings::get_global(cx).blinking {
TerminalBlink::Off => true,
TerminalBlink::TerminalControlled => {
!self.blinking_terminal_enabled || self.blink_manager.read(cx).visible()
TerminalBlink::On | TerminalBlink::TerminalControlled => {
self.blink_manager.read(cx).visible()
}
TerminalBlink::On => self.blink_manager.read(cx).visible(),
}
}

View File

@@ -384,7 +384,7 @@ impl TitleBar {
let worktree_id = worktree.read(cx).id();
let settings_location = Some(SettingsLocation {
worktree: worktree_id,
worktree_id,
path: RelPath::empty(),
});

View File

@@ -452,7 +452,7 @@ impl TitleBar {
.map(|worktree| {
let worktree = worktree.read(cx);
let settings_location = SettingsLocation {
worktree: worktree.id(),
worktree_id: worktree.id(),
path: RelPath::empty(),
};

View File

@@ -83,7 +83,7 @@ impl ActiveToolchain {
let (worktree_id, path) = active_file
.update(cx, |this, cx| {
this.file().and_then(|file| {
Some((file.project_worktree(cx), file.path().parent()?.into()))
Some((file.worktree_id(cx), file.path().parent()?.into()))
})
})
.ok()
@@ -118,7 +118,7 @@ impl ActiveToolchain {
) {
let editor = editor.read(cx);
if let Some((_, buffer, _)) = editor.active_excerpt(cx)
&& let Some(worktree_id) = buffer.read(cx).file().map(|file| file.project_worktree(cx))
&& let Some(worktree_id) = buffer.read(cx).file().map(|file| file.worktree_id(cx))
{
let subscription = cx.subscribe_in(
&buffer,

View File

@@ -581,7 +581,7 @@ impl ToolchainSelector {
let project = workspace.project().clone();
let language_name = buffer.read(cx).language()?.name();
let worktree_id = buffer.read(cx).file()?.project_worktree(cx);
let worktree_id = buffer.read(cx).file()?.worktree_id(cx);
let relative_path: Arc<RelPath> = buffer.read(cx).file()?.path().parent()?.into();
let worktree_root_path = project
.read(cx)

View File

@@ -16,7 +16,6 @@ use gpui::{
SharedString, Task, WeakEntity, Window,
};
use project::{Project, ProjectEntryId, ProjectPath};
use settings::ProjectWorktree;
pub use settings::{
ActivateOnClose, ClosePosition, RegisterSetting, Settings, SettingsLocation, ShowCloseButton,
ShowDiagnostics,
@@ -593,18 +592,11 @@ impl<T: Item> ItemHandle for Entity<T> {
result
}
fn workspace_settings<'a>(
&self,
project: &Entity<Project>,
cx: &'a App,
) -> &'a WorkspaceSettings {
fn workspace_settings<'a>(&self, cx: &'a App) -> &'a WorkspaceSettings {
if let Some(project_path) = self.project_path(cx) {
WorkspaceSettings::get(
Some(SettingsLocation {
worktree: ProjectWorktree {
worktree_id: project_path.worktree_id,
project_id: project.read(cx),
},
worktree_id: project_path.worktree_id,
path: &project_path.path,
}),
cx,
@@ -813,9 +805,7 @@ impl<T: Item> ItemHandle for Entity<T> {
if item.has_deleted_file(cx)
&& !item.is_dirty(cx)
&& item
.workspace_settings(&workspace.project, cx)
.close_on_file_delete
&& item.workspace_settings(cx).close_on_file_delete
{
let item_id = item.item_id();
let close_item_task = pane.update(cx, |pane, cx| {
@@ -845,7 +835,7 @@ impl<T: Item> ItemHandle for Entity<T> {
}
ItemEvent::Edit => {
let autosave = item.workspace_settings(&workspace.project, cx).autosave;
let autosave = item.workspace_settings(cx).autosave;
if let AutosaveSetting::AfterDelay { milliseconds } = autosave {
let delay = Duration::from_millis(milliseconds.0);
@@ -877,8 +867,7 @@ impl<T: Item> ItemHandle for Entity<T> {
window,
move |workspace, window, cx| {
if let Some(item) = weak_item.upgrade()
&& item.workspace_settings(&workspace.project, cx).autosave
== AutosaveSetting::OnFocusChange
&& item.workspace_settings(cx).autosave == AutosaveSetting::OnFocusChange
{
Pane::autosave_item(&item, workspace.project.clone(), window, cx)
.detach_and_log_err(cx);

View File

@@ -4596,7 +4596,7 @@ impl Workspace {
for (i, worktree) in project.visible_worktrees(cx).enumerate() {
let name = {
let settings_location = SettingsLocation {
worktree: worktree.read(cx).id(),
worktree_id: worktree.read(cx).id(),
path: RelPath::empty(),
};

View File

@@ -38,7 +38,8 @@ use rpc::{
AnyProtoClient,
proto::{self, split_worktree_update},
};
use settings::{ProjectWorktree, Settings, SettingsLocation, SettingsStore, WorktreeId};
pub use settings::WorktreeId;
use settings::{Settings, SettingsLocation, SettingsStore};
use smallvec::{SmallVec, smallvec};
use smol::channel::{self, Sender};
use std::{
@@ -157,7 +158,6 @@ pub struct RemoteWorktree {
#[derive(Clone)]
pub struct Snapshot {
id: WorktreeId,
project_id_for_settings: u64,
/// The absolute path of the worktree root.
abs_path: Arc<SanitizedPath>,
path_style: PathStyle,
@@ -353,7 +353,6 @@ impl EventEmitter<Event> for Worktree {}
impl Worktree {
pub async fn local(
path: impl Into<Arc<Path>>,
project_id_for_settings: u64,
visible: bool,
fs: Arc<dyn Fs>,
next_entry_id: Arc<AtomicUsize>,
@@ -393,7 +392,6 @@ impl Worktree {
git_repositories: Default::default(),
snapshot: Snapshot::new(
cx.entity_id().as_u64(),
project_id_for_settings,
abs_path
.file_name()
.and_then(|f| f.to_str())
@@ -407,8 +405,9 @@ impl Worktree {
executor: cx.background_executor().clone(),
};
let worktree_id = snapshot.id();
let settings_location = Some(SettingsLocation {
worktree: snapshot.project_worktree(),
worktree_id,
path: RelPath::empty(),
});
@@ -467,8 +466,7 @@ impl Worktree {
}
pub fn remote(
remote_project_id: u64,
project_id_for_settings: u64,
project_id: u64,
replica_id: ReplicaId,
worktree: proto::WorktreeMetadata,
client: AnyProtoClient,
@@ -478,7 +476,6 @@ impl Worktree {
cx.new(|cx: &mut Context<Self>| {
let snapshot = Snapshot::new(
worktree.id,
project_id_for_settings,
RelPath::from_proto(&worktree.root_name)
.unwrap_or_else(|_| RelPath::empty().into()),
Path::new(&worktree.abs_path).into(),
@@ -495,17 +492,14 @@ impl Worktree {
let worktree_id = snapshot.id();
let settings_location = Some(SettingsLocation {
worktree: settings::ProjectWorktree {
worktree_id,
project_id: project_id_for_settings,
},
worktree_id,
path: RelPath::empty(),
});
let settings = WorktreeSettings::get(settings_location, cx).clone();
let worktree = RemoteWorktree {
client,
project_id: remote_project_id,
project_id,
replica_id,
snapshot,
file_scan_inclusions: settings.parent_dir_scan_inclusions.clone(),
@@ -617,10 +611,7 @@ impl Worktree {
pub fn settings_location(&self, _: &Context<Self>) -> SettingsLocation<'static> {
SettingsLocation {
worktree: ProjectWorktree {
project_id: self.project_id_for_settings,
worktree_id: self.id,
},
worktree_id: self.id(),
path: RelPath::empty(),
}
}
@@ -2041,14 +2032,12 @@ impl RemoteWorktree {
impl Snapshot {
pub fn new(
id: u64,
project_id_for_settings: u64,
root_name: Arc<RelPath>,
abs_path: Arc<Path>,
path_style: PathStyle,
) -> Self {
Snapshot {
id: WorktreeId::from_usize(id as usize),
project_id_for_settings,
abs_path: SanitizedPath::from_arc(abs_path),
path_style,
root_char_bag: root_name
@@ -2069,13 +2058,6 @@ impl Snapshot {
self.id
}
pub fn project_worktree(&self) -> ProjectWorktree {
ProjectWorktree {
worktree_id: self.id,
project_id: self.project_id_for_settings,
}
}
// TODO:
// Consider the following:
//
@@ -3122,8 +3104,8 @@ impl language::File for File {
.unwrap_or_else(|| self.worktree.read(cx).root_name_str())
}
fn project_worktree(&self, cx: &App) -> ProjectWorktree {
self.worktree.read(cx).project_worktree()
fn worktree_id(&self, cx: &App) -> WorktreeId {
self.worktree.read(cx).id()
}
fn to_proto(&self, cx: &App) -> rpc::proto::File {

View File

@@ -41,7 +41,6 @@ async fn test_traversal(cx: &mut TestAppContext) {
let tree = Worktree::local(
Path::new("/root"),
0,
true,
fs,
Default::default(),
@@ -106,7 +105,6 @@ async fn test_circular_symlinks(cx: &mut TestAppContext) {
let tree = Worktree::local(
Path::new("/root"),
0,
true,
fs.clone(),
Default::default(),
@@ -206,7 +204,6 @@ async fn test_symlinks_pointing_outside(cx: &mut TestAppContext) {
let tree = Worktree::local(
Path::new("/root/dir1"),
0,
true,
fs.clone(),
Default::default(),
@@ -357,7 +354,6 @@ async fn test_renaming_case_only(cx: &mut TestAppContext) {
let tree = Worktree::local(
temp_root.path(),
0,
true,
fs.clone(),
Default::default(),
@@ -434,7 +430,6 @@ async fn test_open_gitignored_files(cx: &mut TestAppContext) {
let tree = Worktree::local(
Path::new("/root"),
0,
true,
fs.clone(),
Default::default(),
@@ -599,7 +594,6 @@ async fn test_dirs_no_longer_ignored(cx: &mut TestAppContext) {
let tree = Worktree::local(
Path::new("/root"),
0,
true,
fs.clone(),
Default::default(),
@@ -700,7 +694,6 @@ async fn test_write_file(cx: &mut TestAppContext) {
let worktree = Worktree::local(
dir.path(),
0,
true,
Arc::new(RealFs::new(None, cx.executor())),
Default::default(),
@@ -794,7 +787,6 @@ async fn test_file_scan_inclusions(cx: &mut TestAppContext) {
let tree = Worktree::local(
dir.path(),
0,
true,
Arc::new(RealFs::new(None, cx.executor())),
Default::default(),
@@ -860,7 +852,6 @@ async fn test_file_scan_exclusions_overrules_inclusions(cx: &mut TestAppContext)
let tree = Worktree::local(
dir.path(),
0,
true,
Arc::new(RealFs::new(None, cx.executor())),
Default::default(),
@@ -919,7 +910,6 @@ async fn test_file_scan_inclusions_reindexes_on_setting_change(cx: &mut TestAppC
});
let tree = Worktree::local(
dir.path(),
0,
true,
Arc::new(RealFs::new(None, cx.executor())),
Default::default(),
@@ -1005,7 +995,6 @@ async fn test_file_scan_exclusions(cx: &mut TestAppContext) {
let tree = Worktree::local(
dir.path(),
0,
true,
Arc::new(RealFs::new(None, cx.executor())),
Default::default(),
@@ -1087,7 +1076,6 @@ async fn test_hidden_files(cx: &mut TestAppContext) {
let tree = Worktree::local(
dir.path(),
0,
true,
Arc::new(RealFs::new(None, cx.executor())),
Default::default(),
@@ -1198,7 +1186,6 @@ async fn test_fs_events_in_exclusions(cx: &mut TestAppContext) {
let tree = Worktree::local(
dir.path(),
0,
true,
Arc::new(RealFs::new(None, cx.executor())),
Default::default(),
@@ -1310,7 +1297,6 @@ async fn test_fs_events_in_dot_git_worktree(cx: &mut TestAppContext) {
let tree = Worktree::local(
dot_git_worktree_dir.clone(),
0,
true,
Arc::new(RealFs::new(None, cx.executor())),
Default::default(),
@@ -1349,7 +1335,6 @@ async fn test_create_directory_during_initial_scan(cx: &mut TestAppContext) {
let tree = Worktree::local(
"/root".as_ref(),
0,
true,
fs,
Default::default(),
@@ -1418,7 +1403,6 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
let tree_fake = Worktree::local(
"/root".as_ref(),
0,
true,
fs_fake,
Default::default(),
@@ -1460,7 +1444,6 @@ async fn test_create_dir_all_on_create_entry(cx: &mut TestAppContext) {
let tree_real = Worktree::local(
temp_root.path(),
0,
true,
fs_real,
Default::default(),
@@ -1572,7 +1555,6 @@ async fn test_random_worktree_operations_during_initial_scan(
let worktree = Worktree::local(
root_dir,
0,
true,
fs.clone(),
Default::default(),
@@ -1663,7 +1645,6 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
let worktree = Worktree::local(
root_dir,
0,
true,
fs.clone(),
Default::default(),
@@ -1736,7 +1717,6 @@ async fn test_random_worktree_changes(cx: &mut TestAppContext, mut rng: StdRng)
{
let new_worktree = Worktree::local(
root_dir,
0,
true,
fs.clone(),
Default::default(),
@@ -2049,7 +2029,6 @@ async fn test_private_single_file_worktree(cx: &mut TestAppContext) {
.await;
let tree = Worktree::local(
Path::new("/.env"),
0,
true,
fs.clone(),
Default::default(),
@@ -2082,7 +2061,6 @@ async fn test_repository_above_root(executor: BackgroundExecutor, cx: &mut TestA
.await;
let worktree = Worktree::local(
path!("/root/subproject").as_ref(),
0,
true,
fs.clone(),
Arc::default(),
@@ -2160,7 +2138,6 @@ async fn test_global_gitignore(executor: BackgroundExecutor, cx: &mut TestAppCon
.await;
let worktree = Worktree::local(
home.join("project"),
0,
true,
fs.clone(),
Arc::default(),

View File

@@ -77,7 +77,7 @@ impl EditPredictionProvider for ZetaEditPredictionProvider {
) -> bool {
let zeta = self.zeta.read(cx);
if zeta.edit_prediction_model == ZetaEditPredictionModel::Sweep {
zeta.sweep_ai.api_token.is_some()
zeta.sweep_api_token.is_some()
} else {
true
}

View File

@@ -1,269 +1,10 @@
use anyhow::{Context as _, Result};
use cloud_llm_client::predict_edits_v3::Event;
use futures::AsyncReadExt as _;
use gpui::{
App, AppContext as _, Entity, Task,
http_client::{self, AsyncBody, Method},
};
use language::{Buffer, BufferSnapshot, Point, ToOffset as _, ToPoint as _};
use lsp::DiagnosticSeverity;
use project::{Project, ProjectPath};
use std::fmt;
use std::{path::Path, sync::Arc};
use serde::{Deserialize, Serialize};
use std::{
collections::VecDeque,
fmt::{self, Write as _},
ops::Range,
path::Path,
sync::Arc,
time::Instant,
};
use util::ResultExt as _;
use crate::{EditPrediction, EditPredictionId, EditPredictionInputs};
const SWEEP_API_URL: &str = "https://autocomplete.sweep.dev/backend/next_edit_autocomplete";
pub struct SweepAi {
pub api_token: Option<String>,
pub debug_info: Arc<str>,
}
impl SweepAi {
pub fn new(cx: &App) -> Self {
SweepAi {
api_token: std::env::var("SWEEP_AI_TOKEN")
.context("No SWEEP_AI_TOKEN environment variable set")
.log_err(),
debug_info: debug_info(cx),
}
}
pub fn request_prediction_with_sweep(
&self,
project: &Entity<Project>,
active_buffer: &Entity<Buffer>,
snapshot: BufferSnapshot,
position: language::Anchor,
events: Vec<Arc<Event>>,
recent_paths: &VecDeque<ProjectPath>,
diagnostic_search_range: Range<Point>,
cx: &mut App,
) -> Task<Result<Option<EditPrediction>>> {
let debug_info = self.debug_info.clone();
let Some(api_token) = self.api_token.clone() else {
return Task::ready(Ok(None));
};
let full_path: Arc<Path> = snapshot
.file()
.map(|file| file.full_path(cx))
.unwrap_or_else(|| "untitled".into())
.into();
let project_file = project::File::from_dyn(snapshot.file());
let repo_name = project_file
.map(|file| file.worktree.read(cx).root_name_str())
.unwrap_or("untitled")
.into();
let offset = position.to_offset(&snapshot);
let recent_buffers = recent_paths.iter().cloned();
let http_client = cx.http_client();
let recent_buffer_snapshots = recent_buffers
.filter_map(|project_path| {
let buffer = project.read(cx).get_open_buffer(&project_path, cx)?;
if active_buffer == &buffer {
None
} else {
Some(buffer.read(cx).snapshot())
}
})
.take(3)
.collect::<Vec<_>>();
let cursor_point = position.to_point(&snapshot);
let buffer_snapshotted_at = Instant::now();
let result = cx.background_spawn(async move {
let text = snapshot.text();
let mut recent_changes = String::new();
for event in &events {
write_event(event.as_ref(), &mut recent_changes).unwrap();
}
let mut file_chunks = recent_buffer_snapshots
.into_iter()
.map(|snapshot| {
let end_point = Point::new(30, 0).min(snapshot.max_point());
FileChunk {
content: snapshot.text_for_range(Point::zero()..end_point).collect(),
file_path: snapshot
.file()
.map(|f| f.path().as_unix_str())
.unwrap_or("untitled")
.to_string(),
start_line: 0,
end_line: end_point.row as usize,
timestamp: snapshot.file().and_then(|file| {
Some(
file.disk_state()
.mtime()?
.to_seconds_and_nanos_for_persistence()?
.0,
)
}),
}
})
.collect::<Vec<_>>();
let diagnostic_entries = snapshot.diagnostics_in_range(diagnostic_search_range, false);
let mut diagnostic_content = String::new();
let mut diagnostic_count = 0;
for entry in diagnostic_entries {
let start_point: Point = entry.range.start;
let severity = match entry.diagnostic.severity {
DiagnosticSeverity::ERROR => "error",
DiagnosticSeverity::WARNING => "warning",
DiagnosticSeverity::INFORMATION => "info",
DiagnosticSeverity::HINT => "hint",
_ => continue,
};
diagnostic_count += 1;
writeln!(
&mut diagnostic_content,
"{} at line {}: {}",
severity,
start_point.row + 1,
entry.diagnostic.message
)?;
}
if !diagnostic_content.is_empty() {
file_chunks.push(FileChunk {
file_path: format!("Diagnostics for {}", full_path.display()),
start_line: 0,
end_line: diagnostic_count,
content: diagnostic_content,
timestamp: None,
});
}
let request_body = AutocompleteRequest {
debug_info,
repo_name,
file_path: full_path.clone(),
file_contents: text.clone(),
original_file_contents: text,
cursor_position: offset,
recent_changes: recent_changes.clone(),
changes_above_cursor: true,
multiple_suggestions: false,
branch: None,
file_chunks,
retrieval_chunks: vec![],
recent_user_actions: vec![],
// TODO
privacy_mode_enabled: false,
};
let mut buf: Vec<u8> = Vec::new();
let writer = brotli::CompressorWriter::new(&mut buf, 4096, 11, 22);
serde_json::to_writer(writer, &request_body)?;
let body: AsyncBody = buf.into();
let inputs = EditPredictionInputs {
events,
included_files: vec![cloud_llm_client::predict_edits_v3::IncludedFile {
path: full_path.clone(),
max_row: cloud_llm_client::predict_edits_v3::Line(snapshot.max_point().row),
excerpts: vec![cloud_llm_client::predict_edits_v3::Excerpt {
start_line: cloud_llm_client::predict_edits_v3::Line(0),
text: request_body.file_contents.into(),
}],
}],
cursor_point: cloud_llm_client::predict_edits_v3::Point {
column: cursor_point.column,
line: cloud_llm_client::predict_edits_v3::Line(cursor_point.row),
},
cursor_path: full_path.clone(),
};
let request = http_client::Request::builder()
.uri(SWEEP_API_URL)
.header("Content-Type", "application/json")
.header("Authorization", format!("Bearer {}", api_token))
.header("Connection", "keep-alive")
.header("Content-Encoding", "br")
.method(Method::POST)
.body(body)?;
let mut response = http_client.send(request).await?;
let mut body: Vec<u8> = Vec::new();
response.body_mut().read_to_end(&mut body).await?;
let response_received_at = Instant::now();
if !response.status().is_success() {
anyhow::bail!(
"Request failed with status: {:?}\nBody: {}",
response.status(),
String::from_utf8_lossy(&body),
);
};
let response: AutocompleteResponse = serde_json::from_slice(&body)?;
let old_text = snapshot
.text_for_range(response.start_index..response.end_index)
.collect::<String>();
let edits = language::text_diff(&old_text, &response.completion)
.into_iter()
.map(|(range, text)| {
(
snapshot.anchor_after(response.start_index + range.start)
..snapshot.anchor_before(response.start_index + range.end),
text,
)
})
.collect::<Vec<_>>();
anyhow::Ok((
response.autocomplete_id,
edits,
snapshot,
response_received_at,
inputs,
))
});
let buffer = active_buffer.clone();
cx.spawn(async move |cx| {
let (id, edits, old_snapshot, response_received_at, inputs) = result.await?;
anyhow::Ok(
EditPrediction::new(
EditPredictionId(id.into()),
&buffer,
&old_snapshot,
edits.into(),
buffer_snapshotted_at,
response_received_at,
inputs,
cx,
)
.await,
)
})
}
}
#[derive(Debug, Clone, Serialize)]
struct AutocompleteRequest {
pub struct AutocompleteRequest {
pub debug_info: Arc<str>,
pub repo_name: String,
pub branch: Option<String>,
@@ -281,7 +22,7 @@ struct AutocompleteRequest {
}
#[derive(Debug, Clone, Serialize)]
struct FileChunk {
pub struct FileChunk {
pub file_path: String,
pub start_line: usize,
pub end_line: usize,
@@ -290,7 +31,7 @@ struct FileChunk {
}
#[derive(Debug, Clone, Serialize)]
struct RetrievalChunk {
pub struct RetrievalChunk {
pub file_path: String,
pub start_line: usize,
pub end_line: usize,
@@ -299,7 +40,7 @@ struct RetrievalChunk {
}
#[derive(Debug, Clone, Serialize)]
struct UserAction {
pub struct UserAction {
pub action_type: ActionType,
pub line_number: usize,
pub offset: usize,
@@ -310,7 +51,7 @@ struct UserAction {
#[allow(dead_code)]
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
enum ActionType {
pub enum ActionType {
CursorMovement,
InsertChar,
DeleteChar,
@@ -319,7 +60,7 @@ enum ActionType {
}
#[derive(Debug, Clone, Deserialize)]
struct AutocompleteResponse {
pub struct AutocompleteResponse {
pub autocomplete_id: String,
pub start_index: usize,
pub end_index: usize,
@@ -339,7 +80,7 @@ struct AutocompleteResponse {
#[allow(dead_code)]
#[derive(Debug, Clone, Deserialize)]
struct AdditionalCompletion {
pub struct AdditionalCompletion {
pub start_index: usize,
pub end_index: usize,
pub completion: String,
@@ -349,7 +90,7 @@ struct AdditionalCompletion {
pub finish_reason: Option<String>,
}
fn write_event(
pub(crate) fn write_event(
event: &cloud_llm_client::predict_edits_v3::Event,
f: &mut impl fmt::Write,
) -> fmt::Result {
@@ -374,7 +115,7 @@ fn write_event(
}
}
fn debug_info(cx: &gpui::App) -> Arc<str> {
pub(crate) fn debug_info(cx: &gpui::App) -> Arc<str> {
format!(
"Zed v{version} ({sha}) - OS: {os} - Zed v{version}",
version = release_channel::AppVersion::global(cx),

View File

@@ -30,6 +30,7 @@ use language::{
};
use language::{BufferSnapshot, OffsetRangeExt};
use language_model::{LlmApiToken, RefreshLlmTokenListener};
use lsp::DiagnosticSeverity;
use open_ai::FunctionDefinition;
use project::{DisableAiSettings, Project, ProjectPath, WorktreeId};
use release_channel::AppVersion;
@@ -41,6 +42,7 @@ use std::collections::{VecDeque, hash_map};
use telemetry_events::EditPredictionRating;
use workspace::Workspace;
use std::fmt::Write as _;
use std::ops::Range;
use std::path::Path;
use std::rc::Rc;
@@ -78,7 +80,6 @@ use crate::rate_prediction_modal::{
NextEdit, PreviousEdit, RatePredictionsModal, ThumbsDownActivePrediction,
ThumbsUpActivePrediction,
};
use crate::sweep_ai::SweepAi;
use crate::zeta1::request_prediction_with_zeta1;
pub use provider::ZetaEditPredictionProvider;
@@ -170,7 +171,7 @@ impl FeatureFlag for Zeta2FeatureFlag {
const NAME: &'static str = "zeta2";
fn enabled_for_staff() -> bool {
true
false
}
}
@@ -191,7 +192,8 @@ pub struct Zeta {
#[cfg(feature = "eval-support")]
eval_cache: Option<Arc<dyn EvalCache>>,
edit_prediction_model: ZetaEditPredictionModel,
sweep_ai: SweepAi,
sweep_api_token: Option<String>,
sweep_ai_debug_info: Arc<str>,
data_collection_choice: DataCollectionChoice,
rejected_predictions: Vec<EditPredictionRejection>,
reject_predictions_tx: mpsc::UnboundedSender<()>,
@@ -200,7 +202,7 @@ pub struct Zeta {
rated_predictions: HashSet<EditPredictionId>,
}
#[derive(Copy, Clone, Default, PartialEq, Eq)]
#[derive(Default, PartialEq, Eq)]
pub enum ZetaEditPredictionModel {
#[default]
Zeta1,
@@ -497,8 +499,11 @@ impl Zeta {
#[cfg(feature = "eval-support")]
eval_cache: None,
edit_prediction_model: ZetaEditPredictionModel::Zeta2,
sweep_ai: SweepAi::new(cx),
sweep_api_token: std::env::var("SWEEP_AI_TOKEN")
.context("No SWEEP_AI_TOKEN environment variable set")
.log_err(),
data_collection_choice,
sweep_ai_debug_info: sweep_ai::debug_info(cx),
rejected_predictions: Vec::new(),
reject_predictions_debounce_task: None,
reject_predictions_tx: reject_tx,
@@ -512,7 +517,7 @@ impl Zeta {
}
pub fn has_sweep_api_token(&self) -> bool {
self.sweep_ai.api_token.is_some()
self.sweep_api_token.is_some()
}
#[cfg(feature = "eval-support")]
@@ -638,9 +643,7 @@ impl Zeta {
}
}
project::Event::DiagnosticsUpdated { .. } => {
if cx.has_flag::<Zeta2FeatureFlag>() {
self.refresh_prediction_from_diagnostics(project, cx);
}
self.refresh_prediction_from_diagnostics(project, cx);
}
_ => (),
}
@@ -1123,6 +1126,7 @@ impl Zeta {
zeta_project.next_pending_prediction_id += 1;
let last_request = zeta_project.last_prediction_refresh;
// TODO report cancelled requests like in zeta1
let task = cx.spawn(async move |this, cx| {
if let Some((last_entity, last_timestamp)) = last_request
&& throttle_entity == last_entity
@@ -1132,12 +1136,6 @@ impl Zeta {
cx.background_executor().timer(timeout).await;
}
this.update(cx, |this, cx| {
this.get_or_init_zeta_project(&project, cx)
.last_prediction_refresh = Some((throttle_entity, Instant::now()));
})
.ok();
let edit_prediction_id = do_refresh(this.clone(), cx).await.log_err().flatten();
// When a prediction completes, remove it from the pending list, and cancel
@@ -1185,77 +1183,249 @@ impl Zeta {
position: language::Anchor,
cx: &mut Context<Self>,
) -> Task<Result<Option<EditPrediction>>> {
self.request_prediction_internal(
project.clone(),
active_buffer.clone(),
position,
cx.has_flag::<Zeta2FeatureFlag>(),
cx,
)
match self.edit_prediction_model {
ZetaEditPredictionModel::Zeta1 => {
request_prediction_with_zeta1(self, project, active_buffer, position, cx)
}
ZetaEditPredictionModel::Zeta2 => {
self.request_prediction_with_zeta2(project, active_buffer, position, cx)
}
ZetaEditPredictionModel::Sweep => {
self.request_prediction_with_sweep(project, active_buffer, position, true, cx)
}
}
}
fn request_prediction_internal(
fn request_prediction_with_sweep(
&mut self,
project: Entity<Project>,
active_buffer: Entity<Buffer>,
project: &Entity<Project>,
active_buffer: &Entity<Buffer>,
position: language::Anchor,
allow_jump: bool,
cx: &mut Context<Self>,
) -> Task<Result<Option<EditPrediction>>> {
let snapshot = active_buffer.read(cx).snapshot();
let debug_info = self.sweep_ai_debug_info.clone();
let Some(api_token) = self.sweep_api_token.clone() else {
return Task::ready(Ok(None));
};
let full_path: Arc<Path> = snapshot
.file()
.map(|file| file.full_path(cx))
.unwrap_or_else(|| "untitled".into())
.into();
let project_file = project::File::from_dyn(snapshot.file());
let repo_name = project_file
.map(|file| file.worktree.read(cx).root_name_str())
.unwrap_or("untitled")
.into();
let offset = position.to_offset(&snapshot);
let project_state = self.get_or_init_zeta_project(project, cx);
let events = project_state.events(cx);
let has_events = !events.is_empty();
let recent_buffers = project_state.recent_paths.iter().cloned();
let http_client = cx.http_client();
let recent_buffer_snapshots = recent_buffers
.filter_map(|project_path| {
let buffer = project.read(cx).get_open_buffer(&project_path, cx)?;
if active_buffer == &buffer {
None
} else {
Some(buffer.read(cx).snapshot())
}
})
.take(3)
.collect::<Vec<_>>();
const DIAGNOSTIC_LINES_RANGE: u32 = 20;
self.get_or_init_zeta_project(&project, cx);
let zeta_project = self.projects.get(&project.entity_id()).unwrap();
let events = zeta_project.events(cx);
let has_events = !events.is_empty();
let snapshot = active_buffer.read(cx).snapshot();
let cursor_point = position.to_point(&snapshot);
let diagnostic_search_start = cursor_point.row.saturating_sub(DIAGNOSTIC_LINES_RANGE);
let diagnostic_search_end = cursor_point.row + DIAGNOSTIC_LINES_RANGE;
let diagnostic_search_range =
Point::new(diagnostic_search_start, 0)..Point::new(diagnostic_search_end, 0);
let buffer_snapshotted_at = Instant::now();
let task = match self.edit_prediction_model {
ZetaEditPredictionModel::Zeta1 => request_prediction_with_zeta1(
self,
&project,
&active_buffer,
snapshot.clone(),
position,
events,
cx,
),
ZetaEditPredictionModel::Zeta2 => self.request_prediction_with_zeta2(
&project,
&active_buffer,
snapshot.clone(),
position,
events,
cx,
),
ZetaEditPredictionModel::Sweep => self.sweep_ai.request_prediction_with_sweep(
&project,
&active_buffer,
snapshot.clone(),
position,
events,
&zeta_project.recent_paths,
diagnostic_search_range.clone(),
cx,
),
};
let result = cx.background_spawn({
let snapshot = snapshot.clone();
let diagnostic_search_range = diagnostic_search_range.clone();
async move {
let text = snapshot.text();
let mut recent_changes = String::new();
for event in &events {
sweep_ai::write_event(event.as_ref(), &mut recent_changes).unwrap();
}
let mut file_chunks = recent_buffer_snapshots
.into_iter()
.map(|snapshot| {
let end_point = Point::new(30, 0).min(snapshot.max_point());
sweep_ai::FileChunk {
content: snapshot.text_for_range(Point::zero()..end_point).collect(),
file_path: snapshot
.file()
.map(|f| f.path().as_unix_str())
.unwrap_or("untitled")
.to_string(),
start_line: 0,
end_line: end_point.row as usize,
timestamp: snapshot.file().and_then(|file| {
Some(
file.disk_state()
.mtime()?
.to_seconds_and_nanos_for_persistence()?
.0,
)
}),
}
})
.collect::<Vec<_>>();
let diagnostic_entries =
snapshot.diagnostics_in_range(diagnostic_search_range, false);
let mut diagnostic_content = String::new();
let mut diagnostic_count = 0;
for entry in diagnostic_entries {
let start_point: Point = entry.range.start;
let severity = match entry.diagnostic.severity {
DiagnosticSeverity::ERROR => "error",
DiagnosticSeverity::WARNING => "warning",
DiagnosticSeverity::INFORMATION => "info",
DiagnosticSeverity::HINT => "hint",
_ => continue,
};
diagnostic_count += 1;
writeln!(
&mut diagnostic_content,
"{} at line {}: {}",
severity,
start_point.row + 1,
entry.diagnostic.message
)?;
}
if !diagnostic_content.is_empty() {
file_chunks.push(sweep_ai::FileChunk {
file_path: format!("Diagnostics for {}", full_path.display()),
start_line: 0,
end_line: diagnostic_count,
content: diagnostic_content,
timestamp: None,
});
}
let request_body = sweep_ai::AutocompleteRequest {
debug_info,
repo_name,
file_path: full_path.clone(),
file_contents: text.clone(),
original_file_contents: text,
cursor_position: offset,
recent_changes: recent_changes.clone(),
changes_above_cursor: true,
multiple_suggestions: false,
branch: None,
file_chunks,
retrieval_chunks: vec![],
recent_user_actions: vec![],
// TODO
privacy_mode_enabled: false,
};
let mut buf: Vec<u8> = Vec::new();
let writer = brotli::CompressorWriter::new(&mut buf, 4096, 11, 22);
serde_json::to_writer(writer, &request_body)?;
let body: AsyncBody = buf.into();
let inputs = EditPredictionInputs {
events,
included_files: vec![cloud_llm_client::predict_edits_v3::IncludedFile {
path: full_path.clone(),
max_row: cloud_llm_client::predict_edits_v3::Line(snapshot.max_point().row),
excerpts: vec![cloud_llm_client::predict_edits_v3::Excerpt {
start_line: cloud_llm_client::predict_edits_v3::Line(0),
text: request_body.file_contents.into(),
}],
}],
cursor_point: cloud_llm_client::predict_edits_v3::Point {
column: cursor_point.column,
line: cloud_llm_client::predict_edits_v3::Line(cursor_point.row),
},
cursor_path: full_path.clone(),
};
const SWEEP_API_URL: &str =
"https://autocomplete.sweep.dev/backend/next_edit_autocomplete";
let request = http_client::Request::builder()
.uri(SWEEP_API_URL)
.header("Content-Type", "application/json")
.header("Authorization", format!("Bearer {}", api_token))
.header("Connection", "keep-alive")
.header("Content-Encoding", "br")
.method(Method::POST)
.body(body)?;
let mut response = http_client.send(request).await?;
let mut body: Vec<u8> = Vec::new();
response.body_mut().read_to_end(&mut body).await?;
let response_received_at = Instant::now();
if !response.status().is_success() {
anyhow::bail!(
"Request failed with status: {:?}\nBody: {}",
response.status(),
String::from_utf8_lossy(&body),
);
};
let response: sweep_ai::AutocompleteResponse = serde_json::from_slice(&body)?;
let old_text = snapshot
.text_for_range(response.start_index..response.end_index)
.collect::<String>();
let edits = language::text_diff(&old_text, &response.completion)
.into_iter()
.map(|(range, text)| {
(
snapshot.anchor_after(response.start_index + range.start)
..snapshot.anchor_before(response.start_index + range.end),
text,
)
})
.collect::<Vec<_>>();
anyhow::Ok((
response.autocomplete_id,
edits,
snapshot,
response_received_at,
inputs,
))
}
});
let buffer = active_buffer.clone();
let project = project.clone();
let active_buffer = active_buffer.clone();
cx.spawn(async move |this, cx| {
let prediction = task
.await?
.filter(|prediction| !prediction.edits.is_empty());
let (id, edits, old_snapshot, response_received_at, inputs) = result.await?;
if prediction.is_none() && allow_jump {
let cursor_point = position.to_point(&snapshot);
if edits.is_empty() {
if has_events
&& allow_jump
&& let Some((jump_buffer, jump_position)) = Self::next_diagnostic_location(
active_buffer.clone(),
active_buffer,
&snapshot,
diagnostic_search_range,
cursor_point,
@@ -1266,9 +1436,9 @@ impl Zeta {
{
return this
.update(cx, |this, cx| {
this.request_prediction_internal(
project,
jump_buffer,
this.request_prediction_with_sweep(
&project,
&jump_buffer,
jump_position,
false,
cx,
@@ -1280,7 +1450,19 @@ impl Zeta {
return anyhow::Ok(None);
}
Ok(prediction)
anyhow::Ok(
EditPrediction::new(
EditPredictionId(id.into()),
&buffer,
&old_snapshot,
edits.into(),
buffer_snapshotted_at,
response_received_at,
inputs,
cx,
)
.await,
)
})
}
@@ -1367,9 +1549,7 @@ impl Zeta {
&mut self,
project: &Entity<Project>,
active_buffer: &Entity<Buffer>,
active_snapshot: BufferSnapshot,
position: language::Anchor,
events: Vec<Arc<Event>>,
cx: &mut Context<Self>,
) -> Task<Result<Option<EditPrediction>>> {
let project_state = self.projects.get(&project.entity_id());
@@ -1381,6 +1561,7 @@ impl Zeta {
.map(|syntax_index| syntax_index.read_with(cx, |index, _cx| index.state().clone()))
});
let options = self.options.clone();
let active_snapshot = active_buffer.read(cx).snapshot();
let buffer_snapshotted_at = Instant::now();
let Some(excerpt_path) = active_snapshot
.file()
@@ -1398,6 +1579,10 @@ impl Zeta {
.collect::<Vec<_>>();
let debug_tx = self.debug_tx.clone();
let events = project_state
.map(|state| state.events(cx))
.unwrap_or_default();
let diagnostics = active_snapshot.diagnostic_sets().clone();
let file = active_buffer.read(cx).file();
@@ -2357,7 +2542,7 @@ impl Zeta {
};
zeta_project
.license_detection_watchers
.get(&file.project_worktree(cx))
.get(&file.worktree_id(cx))
.as_ref()
.is_some_and(|watcher| watcher.is_project_open_source())
}

View File

@@ -32,17 +32,19 @@ pub(crate) fn request_prediction_with_zeta1(
zeta: &mut Zeta,
project: &Entity<Project>,
buffer: &Entity<Buffer>,
snapshot: BufferSnapshot,
position: language::Anchor,
events: Vec<Arc<Event>>,
cx: &mut Context<Zeta>,
) -> Task<Result<Option<EditPrediction>>> {
let buffer = buffer.clone();
let buffer_snapshotted_at = Instant::now();
let snapshot = buffer.read(cx).snapshot();
let client = zeta.client.clone();
let llm_token = zeta.llm_token.clone();
let app_version = AppVersion::global(cx);
let zeta_project = zeta.get_or_init_zeta_project(project, cx);
let events = Arc::new(zeta_project.events(cx));
let (git_info, can_collect_file) = if let Some(file) = snapshot.file() {
let can_collect_file = zeta.can_collect_file(project, file, cx);
let git_info = if can_collect_file {

View File

@@ -42,48 +42,43 @@ actions!(
pub fn init(cx: &mut App) {
cx.observe_new(move |workspace: &mut Workspace, _, _cx| {
workspace.register_action_renderer(|div, _, _, cx| {
let has_flag = cx.has_flag::<Zeta2FeatureFlag>();
div.when(has_flag, |div| {
div.on_action(
cx.listener(move |workspace, _: &OpenZeta2Inspector, window, cx| {
let project = workspace.project();
workspace.split_item(
SplitDirection::Right,
Box::new(cx.new(|cx| {
Zeta2Inspector::new(
&project,
workspace.client(),
workspace.user_store(),
window,
cx,
)
})),
window,
cx,
)
}),
)
.on_action(cx.listener(
move |workspace, _: &OpenZeta2ContextView, window, cx| {
let project = workspace.project();
workspace.split_item(
SplitDirection::Right,
Box::new(cx.new(|cx| {
Zeta2ContextView::new(
project.clone(),
workspace.client(),
workspace.user_store(),
window,
cx,
)
})),
window,
cx,
);
},
))
})
workspace.register_action(move |workspace, _: &OpenZeta2Inspector, window, cx| {
let project = workspace.project();
workspace.split_item(
SplitDirection::Right,
Box::new(cx.new(|cx| {
Zeta2Inspector::new(
&project,
workspace.client(),
workspace.user_store(),
window,
cx,
)
})),
window,
cx,
);
});
})
.detach();
cx.observe_new(move |workspace: &mut Workspace, _, _cx| {
workspace.register_action(move |workspace, _: &OpenZeta2ContextView, window, cx| {
let project = workspace.project();
workspace.split_item(
SplitDirection::Right,
Box::new(cx.new(|cx| {
Zeta2ContextView::new(
project.clone(),
workspace.client(),
workspace.user_store(),
window,
cx,
)
})),
window,
cx,
);
});
})
.detach();

View File

@@ -1,5 +1,5 @@
use std::{
collections::HashMap,
collections::{BTreeSet, HashMap},
io::{IsTerminal, Write},
sync::Arc,
};
@@ -125,10 +125,21 @@ fn write_aggregated_scores(
.peekable();
let has_edit_predictions = edit_predictions.peek().is_some();
let aggregated_result = EvaluationResult {
context: Scores::aggregate(successful.iter().map(|r| &r.context)),
edit_prediction: has_edit_predictions.then(|| Scores::aggregate(edit_predictions)),
prompt_len: successful.iter().map(|r| r.prompt_len).sum::<usize>() / successful.len(),
generated_len: successful.iter().map(|r| r.generated_len).sum::<usize>()
/ successful.len(),
context_lines_found_in_context: successful
.iter()
.map(|r| r.context_lines_found_in_context)
.sum::<usize>()
/ successful.len(),
context_lines_in_expected_patch: successful
.iter()
.map(|r| r.context_lines_in_expected_patch)
.sum::<usize>()
/ successful.len(),
};
writeln!(w, "\n{}", "-".repeat(80))?;
@@ -250,8 +261,11 @@ fn write_eval_result(
#[derive(Debug, Default)]
pub struct EvaluationResult {
pub edit_prediction: Option<Scores>,
pub context: Scores,
pub prompt_len: usize,
pub generated_len: usize,
pub context_lines_in_expected_patch: usize,
pub context_lines_found_in_context: usize,
}
#[derive(Default, Debug)]
@@ -349,6 +363,14 @@ impl std::fmt::Display for EvaluationResult {
impl EvaluationResult {
fn fmt_markdown(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
r#"
### Context Scores
{}
"#,
self.context.to_markdown(),
)?;
if let Some(prediction) = &self.edit_prediction {
write!(
f,
@@ -365,18 +387,34 @@ impl EvaluationResult {
writeln!(f, "### Scores\n")?;
writeln!(
f,
" Prompt Generated TP FP FN Precision Recall F1"
" Prompt Generated RetrievedContext PatchContext TP FP FN Precision Recall F1"
)?;
writeln!(
f,
"───────────────────────────────────────────────────────────────────────────────────────────────"
"─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────"
)?;
writeln!(
f,
"Context Retrieval {:<7} {:<9} {:<16} {:<16} {:<6} {:<6} {:<6} {:>10.2} {:>7.2} {:>7.2}",
"",
"",
"",
"",
self.context.true_positives,
self.context.false_positives,
self.context.false_negatives,
self.context.precision() * 100.0,
self.context.recall() * 100.0,
self.context.f1_score() * 100.0
)?;
if let Some(edit_prediction) = &self.edit_prediction {
writeln!(
f,
"Edit Prediction {:<7} {:<9} {:<6} {:<6} {:<6} {:>9.2} {:>8.2} {:>7.2}",
"Edit Prediction {:<7} {:<9} {:<16} {:<16} {:<6} {:<6} {:<6} {:>10.2} {:>7.2} {:>7.2}",
self.prompt_len,
self.generated_len,
self.context_lines_found_in_context,
self.context_lines_in_expected_patch,
edit_prediction.true_positives,
edit_prediction.false_positives,
edit_prediction.false_negatives,
@@ -396,6 +434,53 @@ fn evaluate(example: &Example, preds: &PredictionDetails, predict: bool) -> Eval
..Default::default()
};
let actual_context_lines: HashSet<_> = preds
.excerpts
.iter()
.flat_map(|excerpt| {
excerpt
.text
.lines()
.map(|line| format!("{}: {line}", excerpt.path.display()))
})
.collect();
let mut false_positive_lines = actual_context_lines.clone();
for entry in &example.expected_context {
let mut best_alternative_score: Option<Scores> = None;
for alternative in &entry.alternatives {
let expected: HashSet<_> = alternative
.excerpts
.iter()
.flat_map(|excerpt| {
excerpt
.text
.lines()
.map(|line| format!("{}: {line}", excerpt.path.display()))
})
.collect();
let scores = Scores::new(&expected, &actual_context_lines);
false_positive_lines.retain(|line| !expected.contains(line));
if best_alternative_score
.as_ref()
.is_none_or(|best| scores.recall() > best.recall())
{
best_alternative_score = Some(scores);
}
}
let best_alternative = best_alternative_score.unwrap_or_default();
eval_result.context.false_negatives += best_alternative.false_negatives;
eval_result.context.true_positives += best_alternative.true_positives;
}
eval_result.context.false_positives = false_positive_lines.len();
if predict {
// todo: alternatives for patches
let expected_patch = example
@@ -408,6 +493,25 @@ fn evaluate(example: &Example, preds: &PredictionDetails, predict: bool) -> Eval
.filter(|line| matches!(line, DiffLine::Addition(_) | DiffLine::Deletion(_)))
.map(|line| line.to_string())
.collect();
let expected_context_lines = expected_patch
.iter()
.filter_map(|line| {
if let DiffLine::Context(str) = line {
Some(String::from(*str))
} else {
None
}
})
.collect::<BTreeSet<_>>();
let actual_context_lines = preds
.excerpts
.iter()
.flat_map(|excerpt| excerpt.text.lines().map(ToOwned::to_owned))
.collect::<BTreeSet<_>>();
let matched = expected_context_lines
.intersection(&actual_context_lines)
.count();
let actual_patch_lines = preds
.diff
@@ -418,6 +522,8 @@ fn evaluate(example: &Example, preds: &PredictionDetails, predict: bool) -> Eval
.collect();
eval_result.edit_prediction = Some(Scores::new(&expected_patch_lines, &actual_patch_lines));
eval_result.context_lines_in_expected_patch = expected_context_lines.len();
eval_result.context_lines_found_in_context = matched;
}
eval_result

View File

@@ -14,6 +14,7 @@ use anyhow::{Context as _, Result, anyhow};
use clap::ValueEnum;
use cloud_zeta2_prompt::CURSOR_MARKER;
use collections::HashMap;
use edit_prediction_context::Line;
use futures::{
AsyncWriteExt as _,
lock::{Mutex, OwnedMutexGuard},
@@ -52,6 +53,7 @@ pub struct Example {
pub cursor_position: String,
pub edit_history: String,
pub expected_patch: String,
pub expected_context: Vec<ExpectedContextEntry>,
}
pub type ActualExcerpt = Excerpt;
@@ -62,6 +64,25 @@ pub struct Excerpt {
pub text: String,
}
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct ExpectedContextEntry {
pub heading: String,
pub alternatives: Vec<ExpectedExcerptSet>,
}
#[derive(Default, Clone, Debug, Serialize, Deserialize)]
pub struct ExpectedExcerptSet {
pub heading: String,
pub excerpts: Vec<ExpectedExcerpt>,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct ExpectedExcerpt {
pub path: PathBuf,
pub text: String,
pub required_lines: Vec<Line>,
}
#[derive(ValueEnum, Debug, Clone)]
pub enum ExampleFormat {
Json,
@@ -111,6 +132,7 @@ impl NamedExample {
cursor_position: String::new(),
edit_history: String::new(),
expected_patch: String::new(),
expected_context: Vec::new(),
},
};
@@ -175,10 +197,30 @@ impl NamedExample {
};
}
Event::End(TagEnd::Heading(HeadingLevel::H3)) => {
mem::take(&mut text);
let heading = mem::take(&mut text);
match current_section {
Section::ExpectedExcerpts => {
named.example.expected_context.push(ExpectedContextEntry {
heading,
alternatives: Vec::new(),
});
}
_ => {}
}
}
Event::End(TagEnd::Heading(HeadingLevel::H4)) => {
mem::take(&mut text);
let heading = mem::take(&mut text);
match current_section {
Section::ExpectedExcerpts => {
let expected_context = &mut named.example.expected_context;
let last_entry = expected_context.last_mut().unwrap();
last_entry.alternatives.push(ExpectedExcerptSet {
heading,
excerpts: Vec::new(),
})
}
_ => {}
}
}
Event::End(TagEnd::Heading(level)) => {
anyhow::bail!("Unexpected heading level: {level}");
@@ -211,7 +253,41 @@ impl NamedExample {
named.example.cursor_position = mem::take(&mut text);
}
Section::ExpectedExcerpts => {
mem::take(&mut text);
let text = mem::take(&mut text);
for excerpt in text.split("\n\n") {
let (mut text, required_lines) = extract_required_lines(&excerpt);
if !text.ends_with('\n') {
text.push('\n');
}
if named.example.expected_context.is_empty() {
named.example.expected_context.push(Default::default());
}
let alternatives = &mut named
.example
.expected_context
.last_mut()
.unwrap()
.alternatives;
if alternatives.is_empty() {
alternatives.push(ExpectedExcerptSet {
heading: String::new(),
excerpts: vec![],
});
}
alternatives
.last_mut()
.unwrap()
.excerpts
.push(ExpectedExcerpt {
path: block_info.into(),
text,
required_lines,
});
}
}
Section::ExpectedPatch => {
named.example.expected_patch = mem::take(&mut text);
@@ -485,6 +561,47 @@ impl NamedExample {
}
}
fn extract_required_lines(text: &str) -> (String, Vec<Line>) {
const MARKER: &str = "[ZETA]";
let mut new_text = String::new();
let mut required_lines = Vec::new();
let mut skipped_lines = 0_u32;
for (row, mut line) in text.split('\n').enumerate() {
if let Some(marker_column) = line.find(MARKER) {
let mut strip_column = marker_column;
while strip_column > 0 {
let prev_char = line[strip_column - 1..].chars().next().unwrap();
if prev_char.is_whitespace() || ['/', '#'].contains(&prev_char) {
strip_column -= 1;
} else {
break;
}
}
let metadata = &line[marker_column + MARKER.len()..];
if metadata.contains("required") {
required_lines.push(Line(row as u32 - skipped_lines));
}
if strip_column == 0 {
skipped_lines += 1;
continue;
}
line = &line[..strip_column];
}
new_text.push_str(line);
new_text.push('\n');
}
new_text.pop();
(new_text, required_lines)
}
async fn run_git(repo_path: &Path, args: &[&str]) -> Result<String> {
let output = smol::process::Command::new("git")
.current_dir(repo_path)
@@ -539,6 +656,37 @@ impl Display for NamedExample {
)?;
}
if !self.example.expected_context.is_empty() {
write!(f, "\n## {EXPECTED_CONTEXT_HEADING}\n\n")?;
for entry in &self.example.expected_context {
write!(f, "\n### {}\n\n", entry.heading)?;
let skip_h4 =
entry.alternatives.len() == 1 && entry.alternatives[0].heading.is_empty();
for excerpt_set in &entry.alternatives {
if !skip_h4 {
write!(f, "\n#### {}\n\n", excerpt_set.heading)?;
}
for excerpt in &excerpt_set.excerpts {
write!(
f,
"`````{}{}\n{}`````\n\n",
excerpt
.path
.extension()
.map(|ext| format!("{} ", ext.to_string_lossy()))
.unwrap_or_default(),
excerpt.path.display(),
excerpt.text
)?;
}
}
}
}
Ok(())
}
}
@@ -559,3 +707,38 @@ pub async fn lock_repo(path: impl AsRef<Path>) -> OwnedMutexGuard<()> {
.lock_owned()
.await
}
#[cfg(test)]
mod tests {
use super::*;
use indoc::indoc;
use pretty_assertions::assert_eq;
#[test]
fn test_extract_required_lines() {
let input = indoc! {"
zero
one // [ZETA] required
two
// [ZETA] something
three
four # [ZETA] required
five
"};
let expected_updated_input = indoc! {"
zero
one
two
three
four
five
"};
let expected_required_lines = vec![Line(1), Line(4)];
let (updated_input, required_lines) = extract_required_lines(input);
assert_eq!(updated_input, expected_updated_input);
assert_eq!(required_lines, expected_required_lines);
}
}

View File

@@ -128,6 +128,8 @@ pub struct PredictArguments {
#[derive(Clone, Debug, Args)]
pub struct PredictionOptions {
#[arg(long)]
use_expected_context: bool,
#[clap(flatten)]
zeta2: Zeta2Args,
#[clap(long)]

View File

@@ -1,4 +1,4 @@
use crate::example::{ActualExcerpt, NamedExample};
use crate::example::{ActualExcerpt, ExpectedExcerpt, NamedExample};
use crate::headless::ZetaCliAppState;
use crate::paths::{CACHE_DIR, LATEST_EXAMPLE_RUN_DIR, RUN_DIR, print_run_data_dir};
use crate::{
@@ -7,13 +7,16 @@ use crate::{
use ::serde::Serialize;
use anyhow::{Context, Result, anyhow};
use cloud_zeta2_prompt::{CURSOR_MARKER, write_codeblock};
use collections::HashMap;
use futures::StreamExt as _;
use gpui::{AppContext, AsyncApp, Entity};
use language::{Anchor, Buffer, Point};
use project::Project;
use project::buffer_store::BufferStoreEvent;
use serde::Deserialize;
use std::fs;
use std::io::{IsTerminal, Write};
use std::ops::Range;
use std::path::PathBuf;
use std::sync::Arc;
use std::sync::Mutex;
@@ -201,12 +204,15 @@ pub async fn perform_predict(
let mut result = result.lock().unwrap();
result.generated_len = response.chars().count();
result.planning_search_time =
Some(search_queries_generated_at.unwrap() - start_time.unwrap());
result.running_search_time = Some(
search_queries_executed_at.unwrap()
- search_queries_generated_at.unwrap(),
);
if !options.use_expected_context {
result.planning_search_time = Some(
search_queries_generated_at.unwrap() - start_time.unwrap(),
);
result.running_search_time = Some(
search_queries_executed_at.unwrap()
- search_queries_generated_at.unwrap(),
);
}
result.prediction_time = prediction_finished_at - prediction_started_at;
result.total_time = prediction_finished_at - start_time.unwrap();
@@ -218,10 +224,37 @@ pub async fn perform_predict(
}
});
zeta.update(cx, |zeta, cx| {
zeta.refresh_context(project.clone(), cursor_buffer.clone(), cursor_anchor, cx)
})?
.await?;
if options.use_expected_context {
let context_excerpts_tasks = example
.example
.expected_context
.iter()
.flat_map(|section| {
section.alternatives[0].excerpts.iter().map(|excerpt| {
resolve_context_entry(project.clone(), excerpt.clone(), cx.clone())
})
})
.collect::<Vec<_>>();
let context_excerpts_vec =
futures::future::try_join_all(context_excerpts_tasks).await?;
let mut context_excerpts = HashMap::default();
for (buffer, mut excerpts) in context_excerpts_vec {
context_excerpts
.entry(buffer)
.or_insert(Vec::new())
.append(&mut excerpts);
}
zeta.update(cx, |zeta, _cx| {
zeta.set_context(project.clone(), context_excerpts)
})?;
} else {
zeta.update(cx, |zeta, cx| {
zeta.refresh_context(project.clone(), cursor_buffer.clone(), cursor_anchor, cx)
})?
.await?;
}
}
let prediction = zeta
@@ -241,6 +274,38 @@ pub async fn perform_predict(
anyhow::Ok(result)
}
async fn resolve_context_entry(
project: Entity<Project>,
excerpt: ExpectedExcerpt,
mut cx: AsyncApp,
) -> Result<(Entity<Buffer>, Vec<Range<Anchor>>)> {
let buffer = project
.update(&mut cx, |project, cx| {
let project_path = project.find_project_path(&excerpt.path, cx).unwrap();
project.open_buffer(project_path, cx)
})?
.await?;
let ranges = buffer.read_with(&mut cx, |buffer, _| {
let full_text = buffer.text();
let offset = full_text
.find(&excerpt.text)
.expect("Expected context not found");
let point = buffer.offset_to_point(offset);
excerpt
.required_lines
.iter()
.map(|line| {
let row = point.row + line.0;
let range = Point::new(row, 0)..Point::new(row + 1, 0);
buffer.anchor_after(range.start)..buffer.anchor_before(range.end)
})
.collect()
})?;
Ok((buffer, ranges))
}
struct RunCache {
cache_mode: CacheMode,
example_run_dir: PathBuf,

View File

@@ -36,7 +36,7 @@
- [Code Completions](./completions.md)
- [Collaboration](./collaboration/overview.md)
- [Channels](./collaboration/channels.md)
- [Contacts and Private Calls](./collaboration/contacts-and-private-calls.md)
- [Private Calls](./collaboration/private-calls.md)
- [Git](./git.md)
- [Debugger](./debugger.md)
- [Diagnostics](./diagnostics.md)

View File

@@ -1,122 +1,50 @@
# Channels
## Overview
Channels provide a way to streamline collaborating for software engineers in many ways, but particularly:
- Pairing when working on something together, you both have your own screen, mouse, and keyboard.
- Mentoring it's easy to jump in to someone else's context, and help them get unstuck, without the friction of pushing code up.
- Mentoring its easy to jump in to someone elses context, and help them get unstuck, without the friction of pushing code up.
- Refactoring you can have multiple people join in on large refactoring without fear of conflict.
- Ambient awareness you can see what everyone else is working on with no need for status emails or meetings.
Each channel corresponds to an ongoing project or work-stream.
You can see who's in a channel as their avatars will show up in the sidebar.
This makes it easy to see what everyone is doing and where to find them if needed.
## Channels
Create a channel by clicking the `+` icon next to the `Channels` text in the collab panel.
Create a subchannel by right clicking an existing channel and selecting `New Subchannel`.
To open the collaboration panel hit {#kb collab_panel::ToggleFocus} or `collab panel: toggle focus`.
You can mix channels for your day job, as well as side-projects in your collab panel.
Each channel corresponds to an ongoing project or work-stream. You can see whos in a channel as their avatars will show up in the sidebar. This makes it easy to see what everyone is doing and where to find them if needed.
You can create as many channels as you need. As in the example above, you can mix channels for your day job, as well as side-projects in one instance of Zed.
Joining a channel adds you to a shared room where you can work on projects together.
_Join [our channel tree](https://zed.dev/channel/zed-283) to get an idea of how you can organize yours._
## Sharing projects
## Inviting People
After joining a channel, you can `Share` a project with the other people there. This will enable them to edit the code hosted on your machine as though they had it checked out locally.
By default, channels you create can only be accessed by you.
You can invite collaborators by right clicking and selecting `Manage members`.
When you are editing someone elses project, you still have the full power of the editor at your fingertips, you can jump to definitions, use the AI assistant, and see any diagnostic errors. This is extremely powerful for pairing, as one of you can be implementing the current method while the other is reading and researching the correct solution to the next problem. And, because you have your own config running, it feels like youre using your own machine.
When you have subchannels nested under others, permissions are inherited.
For instance, adding people to the top-level channel in your channel tree will automatically give them access to its subchannels.
See [our collaboration documentation](./private-calls.md) for more details about how this works.
Once you have added someone, they can either join your channel by clicking on it in their Zed sidebar, or you can share the link to the channel so that they can join directly.
## Notes
## Voice Chat
You can mute/unmute your microphone via the microphone icon in the upper right-hand side of the window.
> Note: When joining a channel, Zed will automatically share your microphone with other users in the call, if your OS allows it.
> If you'd prefer your microphone to be off when joining a channel, you can do so via the [`mute_on_join`](../configuring-zed.md#calls) setting.
## Sharing Projects
After joining a channel, you can share a project over the channel via the `Share` button in the upper right-hand side of the window.
This will allow channel members to edit the code hosted on your machine as though they had it checked out locally.
When you are editing someone else's project, you still have the full power of the editor at your fingertips; you can jump to definitions, use the AI assistant, and see any diagnostic errors.
This is extremely powerful for pairing, as one of you can be implementing the current method while the other is reading and researching the correct solution to the next problem.
And, because you have your own config running, it feels like you're using your own machine.
We aim to eliminate the distinction between local and remote projects as much as possible.
Collaborators can open, edit, and save files, perform searches, interact with the language server, etc.
Guests have a read-only view of the project, including access to language server info.
### Unsharing a Project
You can remove a project from a channel by clicking on the `Unshare` button in the title bar.
Collaborators that are currently in that project will be disconnected from the project and will not be able to rejoin it unless you share it again.
## Channel Notes
Each channel has a Markdown notes file associated with it to keep track of current status, new ideas, or to collaborate on building out the design for the feature that you're working on before diving into code.
Each channel has a notes file associated with it to keep track of current status, new ideas, or to collaborate on building out the design for the feature that youre working on before diving into code.
This is similar to a Google Doc, except powered by Zed's collaborative software and persisted to our servers.
Open the channel notes by clicking on the document icon to the right of the channel name in the collaboration panel.
## Inviting people
> Note: You can view a channel's notes without joining the channel, if you'd just like to read up on what has been written.
By default, channels you create can only be accessed by you. You can invite collaborators by right clicking and selecting `Manage members`.
## Following Collaborators
When you have channels nested under each other, permissions are inherited. For instance, in the example above, we only need to add people to the `#zed` channel, and they will automatically gain access to `#core-editor`, `#new-languages`, and `#stability`.
To follow a collaborator, click on their avatar in the top left of the title bar.
You can also cycle through collaborators using {#kb workspace::FollowNextCollaborator} or `workspace: follow next collaborator` in the command palette.
When you join a project, you'll immediately start following the collaborator that invited you.
When you are in a pane that is following a collaborator, you will:
- follow their cursor and scroll position
- follow them to other files in the same project
- instantly swap to viewing their screenshare in that pane, if they are sharing their screen and leave the project
To stop following, simply move your mouse or make an edit via your keyboard.
### How Following Works
Following is confined to a particular pane.
When a pane is following a collaborator, it is outlined in their cursor color.
Avatars of collaborators in the same project as you are in color, and have a cursor color.
Collaborators in other projects are shown in gray.
This pane-specific behavior allows you to follow someone in one pane while navigating independently in another and can be an effective layout for some collaboration styles.
### Following a Terminal
Following is not currently supported in the terminal in the way it is supported in the editor.
As a workaround, collaborators can share their screen and you can follow that instead.
## Screen Sharing
Share your screen with collaborators in the current channel by clicking on the `Share screen` (monitor icon) button in the top right of the title bar.
If you have multiple displays, you can choose which one to share via the chevron to the right of the monitor icon.
After you've shared your screen, others can click on the `Screen` entry under your name in the collaboration panel to open a tab that always keeps it visible.
If they are following you, Zed will automatically switch between following your cursor in their Zed instance and your screen share, depending on whether you are focused on Zed or another application, like a web browser.
> Note: Collaborators can see your entire screen when you are screen sharing, so be careful not to share anything you don't want to share.
> Remember to stop screen sharing when you are finished.
Once you have added someone, they can either join your channel by clicking on it in their Zed sidebar, or you can share the link to the channel so that they can join directly.
## Livestreaming & Guests
A Channel can also be made Public.
This allows anyone to join the channel by clicking on the link.
A Channel can also be made Public. This allows anyone to join the channel by clicking on the link.
Guest users in channels can hear and see everything that is happening, and have read only access to projects and channel notes.
If you'd like to invite a guest to participate in a channel for the duration of a call you can do so by right clicking on them in the Collaboration Panel.
"Allowing Write Access" will allow them to edit any projects shared into the call, and to use their microphone and share their screen if they wish.
## Leaving a Call
You can leave a channel by clicking on the `Leave call` button in the upper right-hand side of the window.
If you'd like to invite a guest to participate in a channel for the duration of a call you can do so by right clicking on them in the Collaboration Panel. "Allowing Write Access" will allow them to edit any projects shared into the call, and to use their microphone and share their screen if they wish.

View File

@@ -1,25 +0,0 @@
# Contacts and Private Calls
Zed allows you to have private calls / collaboration sessions with those in your contacts.
These calls can be one-on-ones or contain any number of users from your contacts.
## Adding a Contact
1. In the collaboration panel, click the `+` button next to the `Contacts` section
1. Search for the contact using their GitHub handle.\
_Note: Your contact must be an existing Zed user who has completed the GitHub authentication sign-in flow._
1. Your contact will receive a notification.
Once they accept, you'll both appear in each other's contact list.
## Private Calls
To start up a private call...
1. Click the `...` menu next to an online contact's name in the collaboration panel.
1. Click `Call <username>`
Once you've begun a private call, you can add other online contacts by clicking on their name in the collaboration panel.
---
_Aside from a few additional features (channel notes, etc.), collaboration in private calls is largely the same as it is in [channels](./channels.md)._

View File

@@ -2,17 +2,12 @@
At Zed, we believe that great things are built by great people working together.
We have designed Zed to help individuals work faster and help teams of people work together more effectively.
In Zed, all collaboration happens in the collaboration panel, which can be opened via {#kb collab_panel::ToggleFocus} or `collab panel: toggle focus` from the command palette.
You will need to [sign in](../authentication.md#signing-in) in order to access features within the collaboration panel.
## Collaboration panel
The collaboration panel is broken down into two sections:
Zed has two mechanisms for collaborating:
1. [Channels](./channels.md): Ongoing project rooms where team members can share projects, collaborate on code, and maintain ambient awareness of what everyone is working on.
1. [Contacts and Private Calls](./contacts-and-private-calls.md): Your contacts list for ad-hoc private collaboration.
1. [Private Calls](./private-calls.md): Ad-hoc private collaboration with those in your contacts list.
You will need to [sign in](../authentication.md#signing-in) in order to begin using Zed's collaboration features.
---
@@ -20,5 +15,3 @@ The collaboration panel is broken down into two sections:
> Since sharing a project gives them access to your local file system, you should not share projects with people you do not trust; they could potentially do some nasty things.
>
> In the future, we will do more to prevent this type of access beyond the shared project and add more control over what collaborators can do, but for now, only collaborate with people you trust.
See our [Data and Privacy FAQs](https://zed.dev/faq#data-and-privacy) for collaboration.

View File

@@ -0,0 +1,99 @@
# Private Calls
## Adding a collaborator to a call
Before you can collaborate, you'll need to add a collaborator to your contacts. To do this:
1. Open the contacts menu by clicking on the `Show contacts menu` button in the upper right-hand corner of the window or by running `collab: toggle contacts menu` (`cmd-shift-c`).
2. Click the add button to the right of the search box.
3. Search for the contact you want to add using their GitHub handle. Note: the person you are trying to add as a contact must be an existing Zed user.
### Inviting a collaborator
You can add an existing Zed user as a contact from the contacts menu, deployed from the `Show contacts menu` button in the upper right-hand corner of the window or by `collab: toggle contacts menu` (`cmd-shift-c`) and then clicking the `Search for new contact` button to the right of the search box.
![Inviting a collaborator to the current project](https://zed.dev/img/collaboration/add-a-collaborator.png)
When you invite a collaborator to a project not in a call they will receive a notification to join, and a new call is created.
![Receiving an invite to join a call](https://zed.dev/img/collaboration/receiving-an-invite.jpg)
### Inviting non-Zed users
If someone you want to collaborate with has not yet signed up for Zed, they will need to [download the app](https://zed.dev/download) and sign in for the first time before you can add them. Identity is tied to GitHub accounts, so new users will need to authenticate with GitHub in order to sign into Zed.
### Voice chat
When joining a call, Zed will automatically share your microphone with other users in the call, if your OS allows it. This isn't tied to your project. You can disable this for your client via the [`mute_on_join`](../configuring-zed.md#calls) setting.
## Collaborating on a project
### Share a project
When you invite a collaborator to join your project, a new call begins. Your Zed windows will show the call participants in the title bar of the window.
![A new Zed call with two collaborators](https://zed.dev/img/collaboration/new-call.png)
Collaborators in the same project as you are in color, and have a cursor color. Collaborators in other projects are shown in gray. Collaborators that have access to the current project will have their own cursor color under their avatar.
We aim to eliminate the distinction between local and remote projects as much as possible. Collaborators can open, edit, and save files, perform searches, interact with the language server, etc. Guests have a read-only view of the project, including access to language server info.
#### Unshared Projects
If a collaborator is currently in a project that is not shared, you will not be able to jump to their project or follow them until they either share the project or return to a project that is shared.
If you are in a project that isn't shared, others will not be able to join it or see its contents.
### Follow a collaborator
To follow a collaborator, click on their avatar in the top right of the window. You can also cycle through collaborators using `workspace: follow next collaborator` (`ctrl-alt-cmd-f`).
When you join a project, you'll immediately start following the collaborator that invited you.
![Automatically following the person inviting us to a project](https://zed.dev/img/collaboration/joining-a-call.png)
When you are in a pane that is following a collaborator, you will:
- follow their cursor and scroll position
- follow them to other files in the same project
- instantly swap to viewing their screen in that pane, if they are sharing their screen and leave the project
If you move your cursor or make an edit in that pane, you will stop following.
To start following again, you can click on a collaborator's avatar or cycle through following different participants by pressing `workspace: follow next collaborator` (`ctrl-alt-cmd-f`).
#### How following works
Following is confined to a particular pane. When a pane is following a collaborator, it is outlined in their cursor color.
This pane-specific behavior allows you to follow someone in one pane while navigating independently in another and can be an effective layout for some collaboration styles.
### Sharing your screen
Share your screen with collaborators in the current call by clicking on the `Share screen` button in the top right of the window.
Collaborators will see your screen if they are following you and you start viewing a window outside Zed or a project that is not shared.
Collaborators can see your entire screen when you are screen sharing, so be careful not to share anything you don't want to share. Remember to stop screen sharing when you are finished.
Call participants can open a dedicated tab for your screen share by opening the contacts menu in the top right and clicking on the `Screen` entry if you are sharing your screen.
### Adding a project
You can add a project to a call by clicking on the `Share` button next to the project name in the title bar.
### Removing a project
You can remove a project from a call by clicking on the `Unshare` button next to the project name in the title bar.
Collaborators that are currently in that project will be disconnected from the project and will not be able to rejoin it unless you share it again.
### Following a collaborator's terminal
You can follow what a collaborator is doing in their terminal by having them share their screen and following it.
In the future, we plan to allow you to collaborate in the terminal directly in a shared project.
### Leave call
You can leave a call by opening the contacts menu in the top right and clicking on the `Leave call` button.

View File

@@ -27,10 +27,9 @@ By default clang and gcc will recognize `*.C` and `*.H` (uppercase extensions) a
## Formatting
By default Zed will use the `clangd` language server for formatting C code like the `clang-format` CLI tool. To configure this you can add a `.clang-format` file. For example:
By default Zed will use the `clangd` language server for formatting C code. The Clangd is the same as the `clang-format` CLI tool. To configure this you can add a `.clang-format` file. For example:
```yaml
# yaml-language-server: $schema=https://json.schemastore.org/clang-format-21.x.json
---
BasedOnStyle: GNU
IndentWidth: 2