Compare commits

..

2 Commits

Author SHA1 Message Date
Conrad Irwin
c9210c3be1 WIP. New in-call controls
Co-authored-by: Matt Miller <mattrx@gmail.com>
2025-09-25 14:26:55 -06:00
Conrad Irwin
ff59912a81 Show an overlay on the left
Co-authored-by: Matt Miller <mattrx@gmail.com>
2025-09-25 12:40:45 -06:00
314 changed files with 3736 additions and 5260 deletions

View File

@@ -5,8 +5,7 @@ rustflags = ["-C", "symbol-mangling-version=v0", "--cfg", "tokio_unstable"]
[alias]
xtask = "run --package xtask --"
perf-test = ["test", "--profile", "release-fast", "--lib", "--bins", "--tests", "--all-features", "--config", "target.'cfg(true)'.runner='cargo run -p perf --release'", "--config", "target.'cfg(true)'.rustflags=[\"--cfg\", \"perf_enabled\"]"]
# Keep similar flags here to share some ccache
perf-compare = ["run", "--profile", "release-fast", "-p", "perf", "--config", "target.'cfg(true)'.rustflags=[\"--cfg\", \"perf_enabled\"]", "--", "compare"]
perf-compare = ["run", "--release", "-p", "perf", "--", "compare"]
[target.'cfg(target_os = "windows")']
rustflags = [

View File

@@ -63,7 +63,6 @@ Although there are few hard and fast rules, typically we don't merge:
- New file icons. Zed's default icon theme consists of icons that are hand-designed to fit together in a cohesive manner, please don't submit PRs with off-the-shelf SVGs.
- Giant refactorings.
- Non-trivial changes with no tests.
- Stylistic code changes that do not alter any app logic. Reducing allocations, removing `.unwrap()`s, fixing typos is great; making code "more readable" — maybe not so much.
- Features where (in our subjective opinion) the extra complexity isn't worth it for the number of people who will benefit.
- Anything that seems completely AI generated.

106
Cargo.lock generated
View File

@@ -2728,7 +2728,7 @@ dependencies = [
"cap-primitives",
"cap-std",
"io-lifetimes",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -2757,7 +2757,7 @@ dependencies = [
"maybe-owned",
"rustix 1.0.7",
"rustix-linux-procfs",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
"winx",
]
@@ -3696,7 +3696,6 @@ dependencies = [
"paths",
"project",
"rpc",
"semver",
"serde",
"serde_json",
"settings",
@@ -4111,9 +4110,9 @@ dependencies = [
[[package]]
name = "crc"
version = "3.3.0"
version = "3.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9710d3b3739c2e349eb44fe848ad0b7c8cb1e42bd87ee49371df2f7acaf3e675"
checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636"
dependencies = [
"crc-catalog",
]
@@ -5125,6 +5124,7 @@ dependencies = [
"client",
"gpui",
"language",
"project",
"workspace-hack",
]
@@ -5171,7 +5171,6 @@ dependencies = [
"collections",
"futures 0.3.31",
"gpui",
"hashbrown 0.15.3",
"indoc",
"itertools 0.14.0",
"language",
@@ -5488,7 +5487,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "976dd42dc7e85965fe702eb8164f21f450704bdde31faefd6471dba214cb594e"
dependencies = [
"libc",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -5855,7 +5854,7 @@ checksum = "0ce92ff622d6dadf7349484f42c93271a0d49b7cc4d466a936405bacbe10aa78"
dependencies = [
"cfg-if",
"rustix 1.0.7",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -6243,7 +6242,7 @@ checksum = "94e7099f6313ecacbe1256e8ff9d617b75d1bcb16a6fddef94866d225a01a14a"
dependencies = [
"io-lifetimes",
"rustix 1.0.7",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -6269,7 +6268,6 @@ dependencies = [
"bitflags 2.9.0",
"core-foundation 0.10.0",
"fsevent-sys 3.1.0",
"log",
"parking_lot",
"tempfile",
"workspace-hack",
@@ -8147,7 +8145,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2285ddfe3054097ef4b2fe909ef8c3bcd1ea52a8f0d274416caebeef39f04a65"
dependencies = [
"io-lifetimes",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -8220,7 +8218,7 @@ checksum = "e04d7f318608d35d4b61ddd75cbdaee86b023ebe2bd5a66ee0915f0bf93095a9"
dependencies = [
"hermit-abi 0.5.0",
"libc",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -8390,28 +8388,6 @@ dependencies = [
"thiserror 1.0.69",
]
[[package]]
name = "json_schema_store"
version = "0.1.0"
dependencies = [
"anyhow",
"dap",
"extension",
"gpui",
"language",
"paths",
"project",
"schemars 1.0.1",
"serde",
"serde_json",
"settings",
"snippet_provider",
"task",
"theme",
"util",
"workspace-hack",
]
[[package]]
name = "jsonschema"
version = "0.30.0"
@@ -8499,7 +8475,6 @@ dependencies = [
"fuzzy",
"gpui",
"itertools 0.14.0",
"json_schema_store",
"language",
"log",
"menu",
@@ -8731,6 +8706,7 @@ dependencies = [
"settings",
"smol",
"strum 0.27.1",
"theme",
"thiserror 2.0.12",
"tiktoken-rs",
"tokio",
@@ -8817,16 +8793,17 @@ dependencies = [
"async-trait",
"chrono",
"collections",
"dap",
"futures 0.3.31",
"gpui",
"http_client",
"itertools 0.14.0",
"json_schema_store",
"language",
"log",
"lsp",
"node_runtime",
"parking_lot",
"paths",
"pet",
"pet-conda",
"pet-core",
@@ -8839,6 +8816,7 @@ dependencies = [
"regex",
"rope",
"rust-embed",
"schemars 1.0.1",
"serde",
"serde_json",
"serde_json_lenient",
@@ -8846,6 +8824,7 @@ dependencies = [
"sha2",
"shlex",
"smol",
"snippet_provider",
"task",
"tempfile",
"text",
@@ -12581,7 +12560,7 @@ dependencies = [
"once_cell",
"socket2",
"tracing",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -13063,7 +13042,6 @@ dependencies = [
"gpui",
"gpui_tokio",
"http_client",
"json_schema_store",
"language",
"language_extension",
"language_model",
@@ -13664,7 +13642,7 @@ dependencies = [
"errno 0.3.11",
"libc",
"linux-raw-sys 0.4.15",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -13677,7 +13655,7 @@ dependencies = [
"errno 0.3.11",
"libc",
"linux-raw-sys 0.9.4",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -13799,7 +13777,7 @@ dependencies = [
"security-framework 3.2.0",
"security-framework-sys",
"webpki-root-certs",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -14484,7 +14462,6 @@ dependencies = [
"serde_with",
"settings_macros",
"smallvec",
"strum 0.27.1",
"tree-sitter",
"tree-sitter-json",
"unindent",
@@ -14545,7 +14522,6 @@ dependencies = [
"serde",
"session",
"settings",
"strum 0.27.1",
"theme",
"ui",
"util",
@@ -14573,9 +14549,9 @@ checksum = "bbfa15b3dddfee50a0fff136974b3e1bde555604ba463834a7eb7deb6417705d"
[[package]]
name = "sha2"
version = "0.10.9"
version = "0.10.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a7507d819769d01a365ab707794a4084392c824f54a7a6a7862f8c3d0892b283"
checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8"
dependencies = [
"cfg-if",
"cpufeatures",
@@ -14831,7 +14807,6 @@ dependencies = [
"paths",
"schemars 1.0.1",
"serde",
"serde_json",
"serde_json_lenient",
"snippet",
"util",
@@ -15184,7 +15159,7 @@ dependencies = [
"cfg-if",
"libc",
"psm",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -15877,7 +15852,7 @@ dependencies = [
"fd-lock",
"io-lifetimes",
"rustix 0.38.44",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
"winx",
]
@@ -16059,7 +16034,7 @@ dependencies = [
"getrandom 0.3.2",
"once_cell",
"rustix 1.0.7",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -16093,7 +16068,6 @@ dependencies = [
"gpui",
"itertools 0.14.0",
"libc",
"log",
"rand 0.9.1",
"regex",
"release_channel",
@@ -16902,9 +16876,9 @@ dependencies = [
[[package]]
name = "tree-sitter"
version = "0.25.10"
version = "0.25.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78f873475d258561b06f1c595d93308a7ed124d9977cb26b148c2084a4a3cc87"
checksum = "a7cf18d43cbf0bfca51f657132cc616a5097edc4424d538bae6fa60142eaf9f0"
dependencies = [
"cc",
"regex",
@@ -17077,9 +17051,8 @@ dependencies = [
[[package]]
name = "tree-sitter-python"
version = "0.25.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6bf85fd39652e740bf60f46f4cda9492c3a9ad75880575bf14960f775cb74a1c"
version = "0.23.6"
source = "git+https://github.com/zed-industries/tree-sitter-python?rev=218fcbf3fda3d029225f3dec005cb497d111b35e#218fcbf3fda3d029225f3dec005cb497d111b35e"
dependencies = [
"cc",
"tree-sitter-language",
@@ -17552,7 +17525,6 @@ dependencies = [
"libc",
"log",
"nix 0.29.0",
"pretty_assertions",
"rand 0.9.1",
"regex",
"rust-embed",
@@ -18644,7 +18616,7 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
dependencies = [
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -19356,7 +19328,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f3fd376f71958b862e7afb20cfe5a22830e1963462f3a17f49d82a6c1d1f42d"
dependencies = [
"bitflags 2.9.0",
"windows-sys 0.59.0",
"windows-sys 0.52.0",
]
[[package]]
@@ -20263,7 +20235,6 @@ dependencies = [
"install_cli",
"itertools 0.14.0",
"journal",
"json_schema_store",
"keymap_editor",
"language",
"language_extension",
@@ -20339,7 +20310,6 @@ dependencies = [
"url",
"urlencoding",
"util",
"util_macros",
"uuid",
"vim",
"vim_mode_setting",
@@ -20347,7 +20317,6 @@ dependencies = [
"web_search",
"web_search_providers",
"windows 0.61.1",
"windows-sys 0.61.0",
"winresource",
"workspace",
"workspace-hack",
@@ -20399,17 +20368,6 @@ dependencies = [
"wit-bindgen 0.41.0",
]
[[package]]
name = "zed_extension_api"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0729d50b4ca0a7e28e590bbe32e3ca0194d97ef654961451a424c661a366fca0"
dependencies = [
"serde",
"serde_json",
"wit-bindgen 0.41.0",
]
[[package]]
name = "zed_glsl"
version = "0.1.0"
@@ -20419,9 +20377,9 @@ dependencies = [
[[package]]
name = "zed_html"
version = "0.2.3"
version = "0.2.2"
dependencies = [
"zed_extension_api 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"zed_extension_api 0.1.0",
]
[[package]]

View File

@@ -91,7 +91,6 @@ members = [
"crates/inspector_ui",
"crates/install_cli",
"crates/journal",
"crates/json_schema_store",
"crates/keymap_editor",
"crates/language",
"crates/language_extension",
@@ -323,7 +322,6 @@ zeta2_tools = { path = "crates/zeta2_tools" }
inspector_ui = { path = "crates/inspector_ui" }
install_cli = { path = "crates/install_cli" }
journal = { path = "crates/journal" }
json_schema_store = { path = "crates/json_schema_store" }
keymap_editor = { path = "crates/keymap_editor" }
language = { path = "crates/language" }
language_extension = { path = "crates/language_extension" }
@@ -386,7 +384,6 @@ search = { path = "crates/search" }
semantic_version = { path = "crates/semantic_version" }
session = { path = "crates/session" }
settings = { path = "crates/settings" }
settings_macros = { path = "crates/settings_macros" }
settings_ui = { path = "crates/settings_ui" }
snippet = { path = "crates/snippet" }
snippet_provider = { path = "crates/snippet_provider" }
@@ -514,7 +511,6 @@ futures-lite = "1.13"
git2 = { version = "0.20.1", default-features = false }
globset = "0.4"
handlebars = "4.3"
hashbrown = "0.15.3"
heck = "0.5"
heed = { version = "0.21.0", features = ["read-txn-no-tls"] }
hex = "0.4.3"
@@ -669,7 +665,7 @@ tokio = { version = "1" }
tokio-tungstenite = { version = "0.26", features = ["__rustls-tls"] }
toml = "0.8"
tower-http = "0.4.4"
tree-sitter = { version = "0.25.10", features = ["wasm"] }
tree-sitter = { version = "0.25.6", features = ["wasm"] }
tree-sitter-bash = "0.25.0"
tree-sitter-c = "0.23"
tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev = "5cb9b693cfd7bfacab1d9ff4acac1a4150700609" }
@@ -686,7 +682,7 @@ tree-sitter-html = "0.23"
tree-sitter-jsdoc = "0.23"
tree-sitter-json = "0.24"
tree-sitter-md = { git = "https://github.com/tree-sitter-grammars/tree-sitter-markdown", rev = "9a23c1a96c0513d8fc6520972beedd419a973539" }
tree-sitter-python = "0.25"
tree-sitter-python = { git = "https://github.com/zed-industries/tree-sitter-python", rev = "218fcbf3fda3d029225f3dec005cb497d111b35e" }
tree-sitter-regex = "0.24"
tree-sitter-ruby = "0.23"
tree-sitter-rust = "0.24"
@@ -714,7 +710,6 @@ wasmtime = { version = "29", default-features = false, features = [
wasmtime-wasi = "29"
which = "6.0.0"
windows-core = "0.61"
windows-sys = "0.61"
wit-component = "0.221"
workspace-hack = "0.1.0"
yawc = "0.2.5"
@@ -814,7 +809,6 @@ image_viewer = { codegen-units = 1 }
edit_prediction_button = { codegen-units = 1 }
install_cli = { codegen-units = 1 }
journal = { codegen-units = 1 }
json_schema_store = { codegen-units = 1 }
lmstudio = { codegen-units = 1 }
menu = { codegen-units = 1 }
notifications = { codegen-units = 1 }
@@ -866,7 +860,6 @@ todo = "deny"
declare_interior_mutable_const = "deny"
redundant_clone = "deny"
disallowed_methods = "deny"
# We currently do not restrict any style rules
# as it slows down shipping code to Zed.

8
assets/icons/audio.svg Normal file
View File

@@ -0,0 +1,8 @@
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
<path d="M3.5 6.66666V8.66666" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M5.5 5V11" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M7.5 3V13" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M9.5 5.33334V10" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M11.5 4V12" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
<path d="M13.5 6.66666V8.66666" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
</svg>

After

Width:  |  Height:  |  Size: 751 B

View File

@@ -345,7 +345,7 @@
}
},
{
"context": "AcpThread > Editor && !use_modifier_to_send",
"context": "AcpThread > Editor",
"use_key_equivalents": true,
"bindings": {
"enter": "agent::Chat",
@@ -355,17 +355,6 @@
"shift-tab": "agent::CycleModeSelector"
}
},
{
"context": "AcpThread > Editor && use_modifier_to_send",
"use_key_equivalents": true,
"bindings": {
"ctrl-enter": "agent::Chat",
"ctrl-shift-r": "agent::OpenAgentDiff",
"ctrl-shift-y": "agent::KeepAll",
"ctrl-shift-n": "agent::RejectAll",
"shift-tab": "agent::CycleModeSelector"
}
},
{
"context": "ThreadHistory",
"use_key_equivalents": true,

View File

@@ -4,7 +4,6 @@
// from the command palette.
[
{
"context": "!GitPanel",
"bindings": {
"ctrl-g": "menu::Cancel"
}

View File

@@ -240,7 +240,6 @@
"delete": "vim::DeleteRight",
"g shift-j": "vim::JoinLinesNoWhitespace",
"y": "vim::PushYank",
"shift-y": "vim::YankToEndOfLine",
"x": "vim::DeleteRight",
"shift-x": "vim::DeleteLeft",
"ctrl-a": "vim::Increment",
@@ -393,7 +392,7 @@
"escape": "editor::Cancel",
"shift-d": "vim::DeleteToEndOfLine",
"shift-j": "vim::JoinLines",
"shift-y": "vim::YankToEndOfLine",
"shift-y": "vim::YankLine",
"shift-i": "vim::InsertFirstNonWhitespace",
"shift-a": "vim::InsertEndOfLine",
"o": "vim::InsertLineBelow",

View File

@@ -115,7 +115,6 @@
// Whether to enable vim modes and key bindings.
"vim_mode": false,
// Whether to enable helix mode and key bindings.
// Enabling this mode will automatically enable vim mode.
"helix_mode": false,
// Whether to show the informational hover box when moving the mouse
// over symbols in the editor.

Binary file not shown.

View File

@@ -239,7 +239,7 @@
"hint": {
"color": "#628b80ff",
"font_style": null,
"font_weight": null
"font_weight": 700
},
"keyword": {
"color": "#ff8f3fff",

View File

@@ -248,7 +248,7 @@
"hint": {
"color": "#8c957dff",
"font_style": null,
"font_weight": null
"font_weight": 700
},
"keyword": {
"color": "#fb4833ff",

View File

@@ -13,7 +13,7 @@
"border.selected": "#293b5bff",
"border.transparent": "#00000000",
"border.disabled": "#414754ff",
"elevated_surface.background": "#2f343eff",
"elevated_surface.background": "#3F4550FF",
"surface.background": "#2f343eff",
"background": "#3b414dff",
"element.background": "#2e343eff",
@@ -244,7 +244,7 @@
"hint": {
"color": "#788ca6ff",
"font_style": null,
"font_weight": null
"font_weight": 700
},
"keyword": {
"color": "#b477cfff",
@@ -414,7 +414,7 @@
"border.selected": "#cbcdf6ff",
"border.transparent": "#00000000",
"border.disabled": "#d3d3d4ff",
"elevated_surface.background": "#ebebecff",
"elevated_surface.background": "#ffffffff",
"surface.background": "#ebebecff",
"background": "#dcdcddff",
"element.background": "#ebebecff",

View File

@@ -5,14 +5,3 @@ ignore-interior-mutability = [
# and Hash impls do not use fields with interior mutability.
"agent::context::AgentContextKey"
]
disallowed-methods = [
{ path = "std::process::Command::spawn", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::spawn" },
{ path = "std::process::Command::output", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::output" },
{ path = "std::process::Command::status", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::status" },
]
disallowed-types = [
# { path = "std::collections::HashMap", replacement = "collections::HashMap" },
# { path = "std::collections::HashSet", replacement = "collections::HashSet" },
# { path = "indexmap::IndexSet", replacement = "collections::IndexSet" },
# { path = "indexmap::IndexMap", replacement = "collections::IndexMap" },
]

View File

@@ -573,7 +573,7 @@ impl ToolCallContent {
))),
acp::ToolCallContent::Diff { diff } => Ok(Self::Diff(cx.new(|cx| {
Diff::finalized(
diff.path.to_string_lossy().into_owned(),
diff.path.to_string_lossy().to_string(),
diff.old_text,
diff.new_text,
language_registry,
@@ -1985,7 +1985,7 @@ impl AcpThread {
let terminal_id = terminal_id.clone();
async move |_this, cx| {
let env = env.await;
let (task_command, task_args) = ShellBuilder::new(
let (command, args) = ShellBuilder::new(
project
.update(cx, |project, cx| {
project
@@ -1996,13 +1996,13 @@ impl AcpThread {
&Shell::Program(get_default_system_shell()),
)
.redirect_stdin_to_dev_null()
.build(Some(command.clone()), &args);
.build(Some(command), &args);
let terminal = project
.update(cx, |project, cx| {
project.create_terminal_task(
task::SpawnInTerminal {
command: Some(task_command),
args: task_args,
command: Some(command.clone()),
args: args.clone(),
cwd: cwd.clone(),
env,
..Default::default()

View File

@@ -126,39 +126,6 @@ impl MentionUri {
abs_path: None,
line_range,
})
} else if let Some(name) = path.strip_prefix("/agent/symbol/") {
let fragment = url
.fragment()
.context("Missing fragment for untitled buffer selection")?;
let line_range = parse_line_range(fragment)?;
let path =
single_query_param(&url, "path")?.context("Missing path for symbol")?;
Ok(Self::Symbol {
name: name.to_string(),
abs_path: path.into(),
line_range,
})
} else if path.starts_with("/agent/file") {
let path =
single_query_param(&url, "path")?.context("Missing path for file")?;
Ok(Self::File {
abs_path: path.into(),
})
} else if path.starts_with("/agent/directory") {
let path =
single_query_param(&url, "path")?.context("Missing path for directory")?;
Ok(Self::Directory {
abs_path: path.into(),
})
} else if path.starts_with("/agent/selection") {
let fragment = url.fragment().context("Missing fragment for selection")?;
let line_range = parse_line_range(fragment)?;
let path =
single_query_param(&url, "path")?.context("Missing path for selection")?;
Ok(Self::Selection {
abs_path: Some(path.into()),
line_range,
})
} else {
bail!("invalid zed url: {:?}", input);
}
@@ -213,29 +180,20 @@ impl MentionUri {
pub fn to_uri(&self) -> Url {
match self {
MentionUri::File { abs_path } => {
let mut url = Url::parse("zed:///").unwrap();
url.set_path("/agent/file");
url.query_pairs_mut()
.append_pair("path", &abs_path.to_string_lossy());
url
Url::from_file_path(abs_path).expect("mention path should be absolute")
}
MentionUri::PastedImage => Url::parse("zed:///agent/pasted-image").unwrap(),
MentionUri::Directory { abs_path } => {
let mut url = Url::parse("zed:///").unwrap();
url.set_path("/agent/directory");
url.query_pairs_mut()
.append_pair("path", &abs_path.to_string_lossy());
url
Url::from_directory_path(abs_path).expect("mention path should be absolute")
}
MentionUri::Symbol {
abs_path,
name,
line_range,
} => {
let mut url = Url::parse("zed:///").unwrap();
url.set_path(&format!("/agent/symbol/{name}"));
url.query_pairs_mut()
.append_pair("path", &abs_path.to_string_lossy());
let mut url =
Url::from_file_path(abs_path).expect("mention path should be absolute");
url.query_pairs_mut().append_pair("symbol", name);
url.set_fragment(Some(&format!(
"L{}:{}",
line_range.start() + 1,
@@ -244,16 +202,15 @@ impl MentionUri {
url
}
MentionUri::Selection {
abs_path,
abs_path: path,
line_range,
} => {
let mut url = Url::parse("zed:///").unwrap();
if let Some(abs_path) = abs_path {
url.set_path("/agent/selection");
url.query_pairs_mut()
.append_pair("path", &abs_path.to_string_lossy());
let mut url = if let Some(path) = path {
Url::from_file_path(path).expect("mention path should be absolute")
} else {
let mut url = Url::parse("zed:///").unwrap();
url.set_path("/agent/untitled-buffer");
url
};
url.set_fragment(Some(&format!(
"L{}:{}",
@@ -338,32 +295,37 @@ mod tests {
#[test]
fn test_parse_file_uri() {
let old_uri = uri!("file:///path/to/file.rs");
let parsed = MentionUri::parse(old_uri).unwrap();
let file_uri = uri!("file:///path/to/file.rs");
let parsed = MentionUri::parse(file_uri).unwrap();
match &parsed {
MentionUri::File { abs_path } => {
assert_eq!(abs_path.to_str().unwrap(), path!("/path/to/file.rs"));
}
_ => panic!("Expected File variant"),
}
let new_uri = parsed.to_uri().to_string();
assert!(new_uri.starts_with("zed:///agent/file"));
assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed);
assert_eq!(parsed.to_uri().to_string(), file_uri);
}
#[test]
fn test_parse_directory_uri() {
let old_uri = uri!("file:///path/to/dir/");
let parsed = MentionUri::parse(old_uri).unwrap();
let file_uri = uri!("file:///path/to/dir/");
let parsed = MentionUri::parse(file_uri).unwrap();
match &parsed {
MentionUri::Directory { abs_path } => {
assert_eq!(abs_path.to_str().unwrap(), path!("/path/to/dir/"));
}
_ => panic!("Expected Directory variant"),
}
let new_uri = parsed.to_uri().to_string();
assert!(new_uri.starts_with("zed:///agent/directory"));
assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed);
assert_eq!(parsed.to_uri().to_string(), file_uri);
}
#[test]
fn test_to_directory_uri_with_slash() {
let uri = MentionUri::Directory {
abs_path: PathBuf::from(path!("/path/to/dir/")),
};
let expected = uri!("file:///path/to/dir/");
assert_eq!(uri.to_uri().to_string(), expected);
}
#[test]
@@ -371,15 +333,14 @@ mod tests {
let uri = MentionUri::Directory {
abs_path: PathBuf::from(path!("/path/to/dir")),
};
let uri_string = uri.to_uri().to_string();
assert!(uri_string.starts_with("zed:///agent/directory"));
assert_eq!(MentionUri::parse(&uri_string).unwrap(), uri);
let expected = uri!("file:///path/to/dir/");
assert_eq!(uri.to_uri().to_string(), expected);
}
#[test]
fn test_parse_symbol_uri() {
let old_uri = uri!("file:///path/to/file.rs?symbol=MySymbol#L10:20");
let parsed = MentionUri::parse(old_uri).unwrap();
let symbol_uri = uri!("file:///path/to/file.rs?symbol=MySymbol#L10:20");
let parsed = MentionUri::parse(symbol_uri).unwrap();
match &parsed {
MentionUri::Symbol {
abs_path: path,
@@ -393,15 +354,13 @@ mod tests {
}
_ => panic!("Expected Symbol variant"),
}
let new_uri = parsed.to_uri().to_string();
assert!(new_uri.starts_with("zed:///agent/symbol/MySymbol"));
assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed);
assert_eq!(parsed.to_uri().to_string(), symbol_uri);
}
#[test]
fn test_parse_selection_uri() {
let old_uri = uri!("file:///path/to/file.rs#L5:15");
let parsed = MentionUri::parse(old_uri).unwrap();
let selection_uri = uri!("file:///path/to/file.rs#L5:15");
let parsed = MentionUri::parse(selection_uri).unwrap();
match &parsed {
MentionUri::Selection {
abs_path: path,
@@ -416,9 +375,7 @@ mod tests {
}
_ => panic!("Expected Selection variant"),
}
let new_uri = parsed.to_uri().to_string();
assert!(new_uri.starts_with("zed:///agent/selection"));
assert_eq!(MentionUri::parse(&new_uri).unwrap(), parsed);
assert_eq!(parsed.to_uri().to_string(), selection_uri);
}
#[test]

View File

@@ -159,7 +159,7 @@ pub struct FileContextHandle {
#[derive(Debug, Clone)]
pub struct FileContext {
pub handle: FileContextHandle,
pub full_path: String,
pub full_path: Arc<Path>,
pub text: SharedString,
pub is_outline: bool,
}
@@ -187,7 +187,7 @@ impl FileContextHandle {
log::error!("file context missing path");
return Task::ready(None);
};
let full_path = file.full_path(cx).to_string_lossy().into_owned();
let full_path: Arc<Path> = file.full_path(cx).into();
let rope = buffer_ref.as_rope().clone();
let buffer = self.buffer.clone();
@@ -236,7 +236,7 @@ pub struct DirectoryContextHandle {
#[derive(Debug, Clone)]
pub struct DirectoryContext {
pub handle: DirectoryContextHandle,
pub full_path: String,
pub full_path: Arc<Path>,
pub descendants: Vec<DirectoryContextDescendant>,
}
@@ -274,16 +274,13 @@ impl DirectoryContextHandle {
}
let directory_path = entry.path.clone();
let directory_full_path = worktree_ref
.full_path(&directory_path)
.to_string_lossy()
.to_string();
let directory_full_path = worktree_ref.full_path(&directory_path).into();
let file_paths = collect_files_in_path(worktree_ref, &directory_path);
let descendants_future = future::join_all(file_paths.into_iter().map(|path| {
let worktree_ref = worktree.read(cx);
let worktree_id = worktree_ref.id();
let full_path = worktree_ref.full_path(&path).to_string_lossy().into_owned();
let full_path = worktree_ref.full_path(&path);
let rel_path = path
.strip_prefix(&directory_path)
@@ -364,7 +361,7 @@ pub struct SymbolContextHandle {
#[derive(Debug, Clone)]
pub struct SymbolContext {
pub handle: SymbolContextHandle,
pub full_path: String,
pub full_path: Arc<Path>,
pub line_range: Range<Point>,
pub text: SharedString,
}
@@ -403,7 +400,7 @@ impl SymbolContextHandle {
log::error!("symbol context's file has no path");
return Task::ready(None);
};
let full_path = file.full_path(cx).to_string_lossy().into_owned();
let full_path = file.full_path(cx).into();
let line_range = self.enclosing_range.to_point(&buffer_ref.snapshot());
let text = self.text(cx);
let buffer = self.buffer.clone();
@@ -437,7 +434,7 @@ pub struct SelectionContextHandle {
#[derive(Debug, Clone)]
pub struct SelectionContext {
pub handle: SelectionContextHandle,
pub full_path: String,
pub full_path: Arc<Path>,
pub line_range: Range<Point>,
pub text: SharedString,
}
@@ -476,7 +473,7 @@ impl SelectionContextHandle {
let text = self.text(cx);
let buffer = self.buffer.clone();
let context = AgentContext::Selection(SelectionContext {
full_path: full_path.to_string_lossy().into_owned(),
full_path: full_path.into(),
line_range: self.line_range(cx),
text,
handle: self,
@@ -706,7 +703,7 @@ impl Display for RulesContext {
#[derive(Debug, Clone)]
pub struct ImageContext {
pub project_path: Option<ProjectPath>,
pub full_path: Option<String>,
pub full_path: Option<Arc<Path>>,
pub original_image: Arc<gpui::Image>,
// TODO: handle this elsewhere and remove `ignore-interior-mutability` opt-out in clippy.toml
// needed due to a false positive of `clippy::mutable_key_type`.
@@ -986,17 +983,14 @@ fn collect_files_in_path(worktree: &Worktree, path: &RelPath) -> Vec<Arc<RelPath
files
}
fn codeblock_tag(full_path: &str, line_range: Option<Range<Point>>) -> String {
fn codeblock_tag(full_path: &Path, line_range: Option<Range<Point>>) -> String {
let mut result = String::new();
if let Some(extension) = Path::new(full_path)
.extension()
.and_then(|ext| ext.to_str())
{
if let Some(extension) = full_path.extension().and_then(|ext| ext.to_str()) {
let _ = write!(result, "{} ", extension);
}
let _ = write!(result, "{}", full_path);
let _ = write!(result, "{}", full_path.display());
if let Some(range) = line_range {
if range.start.row == range.end.row {

View File

@@ -312,7 +312,7 @@ impl ContextStore {
let item = image_item.read(cx);
this.insert_image(
Some(item.project_path(cx)),
Some(item.file.full_path(cx).to_string_lossy().into_owned()),
Some(item.file.full_path(cx).into()),
item.image.clone(),
remove_if_exists,
cx,
@@ -328,7 +328,7 @@ impl ContextStore {
fn insert_image(
&mut self,
project_path: Option<ProjectPath>,
full_path: Option<String>,
full_path: Option<Arc<Path>>,
image: Arc<Image>,
remove_if_exists: bool,
cx: &mut Context<ContextStore>,

View File

@@ -155,7 +155,7 @@ impl HistoryStore {
.iter()
.filter_map(|entry| match entry {
HistoryEntryId::Context(path) => path.file_name().map(|file| {
SerializedRecentOpen::ContextName(file.to_string_lossy().into_owned())
SerializedRecentOpen::ContextName(file.to_string_lossy().to_string())
}),
HistoryEntryId::Thread(id) => Some(SerializedRecentOpen::Thread(id.to_string())),
})

View File

@@ -2875,7 +2875,7 @@ impl Thread {
// Get worktree path and snapshot
let worktree_info = cx.update(|app_cx| {
let worktree = worktree.read(app_cx);
let path = worktree.abs_path().to_string_lossy().into_owned();
let path = worktree.abs_path().to_string_lossy().to_string();
let snapshot = worktree.snapshot();
(path, snapshot)
});

View File

@@ -235,7 +235,7 @@ impl ThreadStore {
if items.iter().any(|(path, _, _)| {
RULES_FILE_NAMES
.iter()
.any(|name| path.as_ref() == RelPath::unix(name).unwrap())
.any(|name| path.as_ref() == RelPath::new(name).unwrap())
}) {
self.enqueue_system_prompt_reload();
}
@@ -368,7 +368,7 @@ impl ThreadStore {
.into_iter()
.filter_map(|name| {
worktree
.entry_for_path(RelPath::unix(name).unwrap())
.entry_for_path(RelPath::new(name).unwrap())
.filter(|entry| entry.is_file())
.map(|entry| entry.path.clone())
})

View File

@@ -475,7 +475,7 @@ impl NativeAgent {
.into_iter()
.filter_map(|name| {
worktree
.entry_for_path(RelPath::unix(name).unwrap())
.entry_for_path(RelPath::new(name).unwrap())
.filter(|entry| entry.is_file())
.map(|entry| entry.path.clone())
})
@@ -559,7 +559,7 @@ impl NativeAgent {
if items.iter().any(|(path, _, _)| {
RULES_FILE_NAMES
.iter()
.any(|name| path.as_ref() == RelPath::unix(name).unwrap())
.any(|name| path.as_ref() == RelPath::new(name).unwrap())
}) {
self.project_context_needs_refresh.send(()).ok();
}
@@ -1205,7 +1205,7 @@ mod tests {
use acp_thread::{AgentConnection, AgentModelGroupName, AgentModelInfo, MentionUri};
use fs::FakeFs;
use gpui::TestAppContext;
use indoc::formatdoc;
use indoc::indoc;
use language_model::fake_provider::FakeLanguageModel;
use serde_json::json;
use settings::SettingsStore;
@@ -1502,17 +1502,13 @@ mod tests {
summary_model.end_last_completion_stream();
send.await.unwrap();
let uri = MentionUri::File {
abs_path: path!("/a/b.md").into(),
}
.to_uri();
acp_thread.read_with(cx, |thread, cx| {
assert_eq!(
thread.to_markdown(cx),
formatdoc! {"
indoc! {"
## User
What does [@b.md]({uri}) mean?
What does [@b.md](file:///a/b.md) mean?
## Assistant
@@ -1548,10 +1544,10 @@ mod tests {
acp_thread.read_with(cx, |thread, cx| {
assert_eq!(
thread.to_markdown(cx),
formatdoc! {"
indoc! {"
## User
What does [@b.md]({uri}) mean?
What does [@b.md](file:///a/b.md) mean?
## Assistant

View File

@@ -262,7 +262,7 @@ impl HistoryStore {
.iter()
.filter_map(|entry| match entry {
HistoryEntryId::TextThread(path) => path.file_name().map(|file| {
SerializedRecentOpen::TextThread(file.to_string_lossy().into_owned())
SerializedRecentOpen::TextThread(file.to_string_lossy().to_string())
}),
HistoryEntryId::AcpThread(id) => {
Some(SerializedRecentOpen::AcpThread(id.to_string()))

View File

@@ -898,7 +898,7 @@ impl Thread {
// Get worktree path and snapshot
let worktree_info = cx.update(|app_cx| {
let worktree = worktree.read(app_cx);
let path = worktree.abs_path().to_string_lossy().into_owned();
let path = worktree.abs_path().to_string_lossy().to_string();
let snapshot = worktree.snapshot();
(path, snapshot)
});

View File

@@ -218,7 +218,7 @@ impl AgentTool for EditFileTool {
.read(cx)
.short_full_path_for_project_path(&project_path, cx)
})
.unwrap_or(input.path.to_string_lossy().into_owned())
.unwrap_or(input.path.to_string_lossy().to_string())
.into(),
Err(raw_input) => {
if let Some(input) =
@@ -476,7 +476,7 @@ impl AgentTool for EditFileTool {
) -> Result<()> {
event_stream.update_diff(cx.new(|cx| {
Diff::finalized(
output.input_path.to_string_lossy().into_owned(),
output.input_path.to_string_lossy().to_string(),
Some(output.old_text.to_string()),
output.new_text,
self.language_registry.clone(),
@@ -541,7 +541,7 @@ fn resolve_path(
.path
.file_name()
.and_then(|file_name| file_name.to_str())
.and_then(|file_name| RelPath::unix(file_name).ok())
.and_then(|file_name| RelPath::new(file_name).ok())
.context("Can't create file: invalid filename")?;
let new_file_path = parent_project_path.map(|parent| ProjectPath {
@@ -565,7 +565,7 @@ mod tests {
use prompt_store::ProjectContext;
use serde_json::json;
use settings::SettingsStore;
use util::{path, rel_path::rel_path};
use util::path;
#[gpui::test]
async fn test_edit_nonexistent_file(cx: &mut TestAppContext) {
@@ -614,13 +614,13 @@ mod tests {
let mode = &EditFileMode::Create;
let result = test_resolve_path(mode, "root/new.txt", cx);
assert_resolved_path_eq(result.await, rel_path("new.txt"));
assert_resolved_path_eq(result.await, "new.txt");
let result = test_resolve_path(mode, "new.txt", cx);
assert_resolved_path_eq(result.await, rel_path("new.txt"));
assert_resolved_path_eq(result.await, "new.txt");
let result = test_resolve_path(mode, "dir/new.txt", cx);
assert_resolved_path_eq(result.await, rel_path("dir/new.txt"));
assert_resolved_path_eq(result.await, "dir/new.txt");
let result = test_resolve_path(mode, "root/dir/subdir/existing.txt", cx);
assert_eq!(
@@ -642,10 +642,10 @@ mod tests {
let path_with_root = "root/dir/subdir/existing.txt";
let path_without_root = "dir/subdir/existing.txt";
let result = test_resolve_path(mode, path_with_root, cx);
assert_resolved_path_eq(result.await, rel_path(path_without_root));
assert_resolved_path_eq(result.await, path_without_root);
let result = test_resolve_path(mode, path_without_root, cx);
assert_resolved_path_eq(result.await, rel_path(path_without_root));
assert_resolved_path_eq(result.await, path_without_root);
let result = test_resolve_path(mode, "root/nonexistent.txt", cx);
assert_eq!(
@@ -691,9 +691,10 @@ mod tests {
}
#[track_caller]
fn assert_resolved_path_eq(path: anyhow::Result<ProjectPath>, expected: &RelPath) {
fn assert_resolved_path_eq(path: anyhow::Result<ProjectPath>, expected: &str) {
let actual = path.expect("Should return valid path").path;
assert_eq!(actual.as_ref(), expected);
let actual = actual.as_str();
assert_eq!(actual, expected);
}
#[gpui::test]

View File

@@ -104,7 +104,7 @@ mod tests {
async fn test_to_absolute_path(cx: &mut TestAppContext) {
init_test(cx);
let temp_dir = TempDir::new().expect("Failed to create temp directory");
let temp_path = temp_dir.path().to_string_lossy().into_owned();
let temp_path = temp_dir.path().to_string_lossy().to_string();
let fs = FakeFs::new(cx.executor());
fs.insert_tree(

View File

@@ -225,12 +225,9 @@ impl AgentTool for ReadFileTool {
Ok(result.into())
} else {
// No line ranges specified, so check file size to see if it's too big.
let buffer_content = outline::get_buffer_content_or_outline(
buffer.clone(),
Some(&abs_path.to_string_lossy()),
cx,
)
.await?;
let buffer_content =
outline::get_buffer_content_or_outline(buffer.clone(), Some(&abs_path), cx)
.await?;
action_log.update(cx, |log, cx| {
log.buffer_read(buffer.clone(), cx);

View File

@@ -99,9 +99,6 @@ pub fn load_proxy_env(cx: &mut App) -> HashMap<String, String> {
if let Some(no_proxy) = read_no_proxy_from_env() {
env.insert("NO_PROXY".to_owned(), no_proxy);
} else if proxy_url.is_some() {
// We sometimes need local MCP servers that we don't want to proxy
env.insert("NO_PROXY".to_owned(), "localhost,127.0.0.1".to_owned());
}
env

View File

@@ -62,7 +62,7 @@ impl AgentServer for ClaudeCode {
cx: &mut App,
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
let name = self.name();
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().to_string());
let is_remote = delegate.project.read(cx).is_via_remote_server();
let store = delegate.store.downgrade();
let extra_env = load_proxy_env(cx);

View File

@@ -67,7 +67,7 @@ impl crate::AgentServer for CustomAgentServer {
cx: &mut App,
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
let name = self.name();
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().to_string());
let is_remote = delegate.project.read(cx).is_via_remote_server();
let default_mode = self.default_mode(cx);
let store = delegate.store.downgrade();

View File

@@ -31,7 +31,7 @@ impl AgentServer for Gemini {
cx: &mut App,
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
let name = self.name();
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().into_owned());
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().to_string());
let is_remote = delegate.project.read(cx).is_via_remote_server();
let store = delegate.store.downgrade();
let mut extra_env = load_proxy_env(cx);

View File

@@ -48,7 +48,7 @@ use std::{
use text::OffsetRangeExt;
use theme::ThemeSettings;
use ui::{ButtonLike, TintColor, Toggleable, prelude::*};
use util::{ResultExt, debug_panic, rel_path::RelPath};
use util::{ResultExt, debug_panic, paths::PathStyle, rel_path::RelPath};
use workspace::{Workspace, notifications::NotifyResultExt as _};
use zed_actions::agent::Chat;
@@ -76,7 +76,7 @@ pub enum MessageEditorEvent {
impl EventEmitter<MessageEditorEvent> for MessageEditor {}
const COMMAND_HINT_INLAY_ID: u32 = 0;
const COMMAND_HINT_INLAY_ID: usize = 0;
impl MessageEditor {
pub fn new(
@@ -108,6 +108,11 @@ impl MessageEditor {
available_commands.clone(),
));
let mention_set = MentionSet::default();
// TODO: fix mentions when remoting with mixed path styles.
let host_and_guest_paths_differ = project
.read(cx)
.remote_client()
.is_some_and(|client| client.read(cx).path_style() != PathStyle::local());
let editor = cx.new(|cx| {
let buffer = cx.new(|cx| Buffer::local("", cx).with_language(Arc::new(language), cx));
let buffer = cx.new(|cx| MultiBuffer::singleton(buffer, cx));
@@ -117,7 +122,9 @@ impl MessageEditor {
editor.set_show_indent_guides(false, cx);
editor.set_soft_wrap();
editor.set_use_modal_editing(true);
editor.set_completion_provider(Some(completion_provider.clone()));
if !host_and_guest_paths_differ {
editor.set_completion_provider(Some(completion_provider.clone()));
}
editor.set_context_menu_options(ContextMenuOptions {
min_entries_visible: 12,
max_entries_visible: 12,
@@ -452,12 +459,9 @@ impl MessageEditor {
.update(cx, |project, cx| project.open_buffer(project_path, cx));
cx.spawn(async move |_, cx| {
let buffer = buffer.await?;
let buffer_content = outline::get_buffer_content_or_outline(
buffer.clone(),
Some(&abs_path.to_string_lossy()),
&cx,
)
.await?;
let buffer_content =
outline::get_buffer_content_or_outline(buffer.clone(), Some(&abs_path), &cx)
.await?;
Ok(Mention::Text {
content: buffer_content.text,
@@ -1177,20 +1181,14 @@ fn full_mention_for_directory(
abs_path: &Path,
cx: &mut App,
) -> Task<Result<Mention>> {
fn collect_files_in_path(worktree: &Worktree, path: &RelPath) -> Vec<(Arc<RelPath>, String)> {
fn collect_files_in_path(worktree: &Worktree, path: &RelPath) -> Vec<(Arc<RelPath>, PathBuf)> {
let mut files = Vec::new();
for entry in worktree.child_entries(path) {
if entry.is_dir() {
files.extend(collect_files_in_path(worktree, &entry.path));
} else if entry.is_file() {
files.push((
entry.path.clone(),
worktree
.full_path(&entry.path)
.to_string_lossy()
.to_string(),
));
files.push((entry.path.clone(), worktree.full_path(&entry.path)));
}
}
@@ -1268,7 +1266,7 @@ fn full_mention_for_directory(
})
}
fn render_directory_contents(entries: Vec<(Arc<RelPath>, String, String)>) -> String {
fn render_directory_contents(entries: Vec<(Arc<RelPath>, PathBuf, String)>) -> String {
let mut output = String::new();
for (_relative_path, full_path, content) in entries {
let fence = codeblock_fence_for_path(Some(&full_path), None);
@@ -1602,7 +1600,7 @@ mod tests {
use serde_json::json;
use text::Point;
use ui::{App, Context, IntoElement, Render, SharedString, Window};
use util::{path, paths::PathStyle, rel_path::rel_path};
use util::{path, paths::PathStyle, rel_path::rel_path, uri};
use workspace::{AppState, Item, Workspace};
use crate::acp::{
@@ -2268,11 +2266,7 @@ mod tests {
editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx);
});
let url_one = MentionUri::File {
abs_path: path!("/dir/a/one.txt").into(),
}
.to_uri()
.to_string();
let url_one = uri!("file:///dir/a/one.txt");
editor.update(&mut cx, |editor, cx| {
let text = editor.text(cx);
assert_eq!(text, format!("Lorem [@one.txt]({url_one}) "));
@@ -2377,11 +2371,7 @@ mod tests {
.into_values()
.collect::<Vec<_>>();
let url_eight = MentionUri::File {
abs_path: path!("/dir/b/eight.txt").into(),
}
.to_uri()
.to_string();
let url_eight = uri!("file:///dir/b/eight.txt");
{
let [_, (uri, Mention::Text { content, .. })] = contents.as_slice() else {
@@ -2480,12 +2470,6 @@ mod tests {
editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx);
});
let symbol = MentionUri::Symbol {
abs_path: path!("/dir/a/one.txt").into(),
name: "MySymbol".into(),
line_range: 0..=0,
};
let contents = message_editor
.update(&mut cx, |message_editor, cx| {
message_editor.mention_set().contents(
@@ -2505,7 +2489,12 @@ mod tests {
panic!("Unexpected mentions");
};
pretty_assertions::assert_eq!(content, "1");
pretty_assertions::assert_eq!(uri, &symbol);
pretty_assertions::assert_eq!(
uri,
&format!("{url_one}?symbol=MySymbol#L1:1")
.parse::<MentionUri>()
.unwrap()
);
}
cx.run_until_parked();
@@ -2513,10 +2502,7 @@ mod tests {
editor.read_with(&cx, |editor, cx| {
assert_eq!(
editor.text(cx),
format!(
"Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({}) ",
symbol.to_uri(),
)
format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({url_one}?symbol=MySymbol#L1:1) ")
);
});
@@ -2526,7 +2512,7 @@ mod tests {
editor.update(&mut cx, |editor, cx| {
assert_eq!(
editor.text(cx),
format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({}) @file x.png", symbol.to_uri())
format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({url_one}?symbol=MySymbol#L1:1) @file x.png")
);
assert!(editor.has_visible_completions_menu());
assert_eq!(current_completion_labels(editor), &[format!("x.png dir{slash}")]);
@@ -2555,10 +2541,7 @@ mod tests {
editor.read_with(&cx, |editor, cx| {
assert_eq!(
editor.text(cx),
format!(
"Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({}) ",
symbol.to_uri()
)
format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({url_one}?symbol=MySymbol#L1:1) ")
);
});
@@ -2568,7 +2551,7 @@ mod tests {
editor.update(&mut cx, |editor, cx| {
assert_eq!(
editor.text(cx),
format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({}) @file x.png", symbol.to_uri())
format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({url_one}?symbol=MySymbol#L1:1) @file x.png")
);
assert!(editor.has_visible_completions_menu());
assert_eq!(current_completion_labels(editor), &[format!("x.png dir{slash}")]);
@@ -2583,14 +2566,11 @@ mod tests {
// Mention was removed
editor.read_with(&cx, |editor, cx| {
assert_eq!(
editor.text(cx),
format!(
"Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({}) ",
symbol.to_uri()
)
);
});
assert_eq!(
editor.text(cx),
format!("Lorem [@one.txt]({url_one}) Ipsum [@eight.txt]({url_eight}) [@MySymbol]({url_one}?symbol=MySymbol#L1:1) ")
);
});
// Now getting the contents succeeds, because the invalid mention was removed
let contents = message_editor

View File

@@ -238,7 +238,7 @@ impl TerminalInlineAssistant {
let latest_output = terminal.last_n_non_empty_lines(DEFAULT_CONTEXT_LINES);
let working_directory = terminal
.working_directory()
.map(|path| path.to_string_lossy().into_owned());
.map(|path| path.to_string_lossy().to_string());
(latest_output, working_directory)
})
.ok()

View File

@@ -17,7 +17,6 @@ use agent::context::{
FileContextHandle, ImageContext, ImageStatus, RulesContextHandle, SelectionContextHandle,
SymbolContextHandle, TextThreadContextHandle, ThreadContextHandle,
};
use util::paths::PathStyle;
#[derive(IntoElement)]
pub enum ContextPill {
@@ -304,54 +303,33 @@ impl AddedContext {
cx: &App,
) -> Option<AddedContext> {
match handle {
AgentContextHandle::File(handle) => {
Self::pending_file(handle, project.path_style(cx), cx)
}
AgentContextHandle::File(handle) => Self::pending_file(handle, cx),
AgentContextHandle::Directory(handle) => Self::pending_directory(handle, project, cx),
AgentContextHandle::Symbol(handle) => {
Self::pending_symbol(handle, project.path_style(cx), cx)
}
AgentContextHandle::Selection(handle) => {
Self::pending_selection(handle, project.path_style(cx), cx)
}
AgentContextHandle::Symbol(handle) => Self::pending_symbol(handle, cx),
AgentContextHandle::Selection(handle) => Self::pending_selection(handle, cx),
AgentContextHandle::FetchedUrl(handle) => Some(Self::fetched_url(handle)),
AgentContextHandle::Thread(handle) => Some(Self::pending_thread(handle, cx)),
AgentContextHandle::TextThread(handle) => Some(Self::pending_text_thread(handle, cx)),
AgentContextHandle::Rules(handle) => Self::pending_rules(handle, prompt_store, cx),
AgentContextHandle::Image(handle) => {
Some(Self::image(handle, model, project.path_style(cx), cx))
}
AgentContextHandle::Image(handle) => Some(Self::image(handle, model, cx)),
}
}
fn pending_file(
handle: FileContextHandle,
path_style: PathStyle,
cx: &App,
) -> Option<AddedContext> {
let full_path = handle
.buffer
.read(cx)
.file()?
.full_path(cx)
.to_string_lossy()
.to_string();
Some(Self::file(handle, &full_path, path_style, cx))
fn pending_file(handle: FileContextHandle, cx: &App) -> Option<AddedContext> {
let full_path = handle.buffer.read(cx).file()?.full_path(cx);
Some(Self::file(handle, &full_path, cx))
}
fn file(
handle: FileContextHandle,
full_path: &str,
path_style: PathStyle,
cx: &App,
) -> AddedContext {
let (name, parent) = extract_file_name_and_directory_from_full_path(full_path, path_style);
fn file(handle: FileContextHandle, full_path: &Path, cx: &App) -> AddedContext {
let full_path_string: SharedString = full_path.to_string_lossy().into_owned().into();
let (name, parent) =
extract_file_name_and_directory_from_full_path(full_path, &full_path_string);
AddedContext {
kind: ContextKind::File,
name,
parent,
tooltip: Some(SharedString::new(full_path)),
icon_path: FileIcons::get_icon(Path::new(full_path), cx),
tooltip: Some(full_path_string),
icon_path: FileIcons::get_icon(full_path, cx),
status: ContextStatus::Ready,
render_hover: None,
handle: AgentContextHandle::File(handle),
@@ -365,24 +343,19 @@ impl AddedContext {
) -> Option<AddedContext> {
let worktree = project.worktree_for_entry(handle.entry_id, cx)?.read(cx);
let entry = worktree.entry_for_id(handle.entry_id)?;
let full_path = worktree
.full_path(&entry.path)
.to_string_lossy()
.to_string();
Some(Self::directory(handle, &full_path, project.path_style(cx)))
let full_path = worktree.full_path(&entry.path);
Some(Self::directory(handle, &full_path))
}
fn directory(
handle: DirectoryContextHandle,
full_path: &str,
path_style: PathStyle,
) -> AddedContext {
let (name, parent) = extract_file_name_and_directory_from_full_path(full_path, path_style);
fn directory(handle: DirectoryContextHandle, full_path: &Path) -> AddedContext {
let full_path_string: SharedString = full_path.to_string_lossy().into_owned().into();
let (name, parent) =
extract_file_name_and_directory_from_full_path(full_path, &full_path_string);
AddedContext {
kind: ContextKind::Directory,
name,
parent,
tooltip: Some(SharedString::new(full_path)),
tooltip: Some(full_path_string),
icon_path: None,
status: ContextStatus::Ready,
render_hover: None,
@@ -390,17 +363,9 @@ impl AddedContext {
}
}
fn pending_symbol(
handle: SymbolContextHandle,
path_style: PathStyle,
cx: &App,
) -> Option<AddedContext> {
let excerpt = ContextFileExcerpt::new(
&handle.full_path(cx)?.to_string_lossy(),
handle.enclosing_line_range(cx),
path_style,
cx,
);
fn pending_symbol(handle: SymbolContextHandle, cx: &App) -> Option<AddedContext> {
let excerpt =
ContextFileExcerpt::new(&handle.full_path(cx)?, handle.enclosing_line_range(cx), cx);
Some(AddedContext {
kind: ContextKind::Symbol,
name: handle.symbol.clone(),
@@ -418,17 +383,8 @@ impl AddedContext {
})
}
fn pending_selection(
handle: SelectionContextHandle,
path_style: PathStyle,
cx: &App,
) -> Option<AddedContext> {
let excerpt = ContextFileExcerpt::new(
&handle.full_path(cx)?.to_string_lossy(),
handle.line_range(cx),
path_style,
cx,
);
fn pending_selection(handle: SelectionContextHandle, cx: &App) -> Option<AddedContext> {
let excerpt = ContextFileExcerpt::new(&handle.full_path(cx)?, handle.line_range(cx), cx);
Some(AddedContext {
kind: ContextKind::Selection,
name: excerpt.file_name_and_range.clone(),
@@ -529,13 +485,13 @@ impl AddedContext {
fn image(
context: ImageContext,
model: Option<&Arc<dyn language_model::LanguageModel>>,
path_style: PathStyle,
cx: &App,
) -> AddedContext {
let (name, parent, icon_path) = if let Some(full_path) = context.full_path.as_ref() {
let full_path_string: SharedString = full_path.to_string_lossy().into_owned().into();
let (name, parent) =
extract_file_name_and_directory_from_full_path(full_path, path_style);
let icon_path = FileIcons::get_icon(Path::new(full_path), cx);
extract_file_name_and_directory_from_full_path(full_path, &full_path_string);
let icon_path = FileIcons::get_icon(full_path, cx);
(name, parent, icon_path)
} else {
("Image".into(), None, None)
@@ -584,20 +540,19 @@ impl AddedContext {
}
fn extract_file_name_and_directory_from_full_path(
path: &str,
path_style: PathStyle,
path: &Path,
name_fallback: &SharedString,
) -> (SharedString, Option<SharedString>) {
let (parent, file_name) = path_style.split(path);
let parent = parent.and_then(|parent| {
let parent = parent.trim_end_matches(path_style.separator());
let (_, parent) = path_style.split(parent);
if parent.is_empty() {
None
} else {
Some(SharedString::new(parent))
}
});
(SharedString::new(file_name), parent)
let name = path
.file_name()
.map(|n| n.to_string_lossy().into_owned().into())
.unwrap_or_else(|| name_fallback.clone());
let parent = path
.parent()
.and_then(|p| p.file_name())
.map(|n| n.to_string_lossy().into_owned().into());
(name, parent)
}
#[derive(Debug, Clone)]
@@ -609,25 +564,25 @@ struct ContextFileExcerpt {
}
impl ContextFileExcerpt {
pub fn new(full_path: &str, line_range: Range<Point>, path_style: PathStyle, cx: &App) -> Self {
let (parent, file_name) = path_style.split(full_path);
pub fn new(full_path: &Path, line_range: Range<Point>, cx: &App) -> Self {
let full_path_string = full_path.to_string_lossy().into_owned();
let file_name = full_path
.file_name()
.map(|n| n.to_string_lossy().into_owned())
.unwrap_or_else(|| full_path_string.clone());
let line_range_text = format!(" ({}-{})", line_range.start.row + 1, line_range.end.row + 1);
let mut full_path_and_range = full_path.to_owned();
let mut full_path_and_range = full_path_string;
full_path_and_range.push_str(&line_range_text);
let mut file_name_and_range = file_name.to_owned();
let mut file_name_and_range = file_name;
file_name_and_range.push_str(&line_range_text);
let parent_name = parent.and_then(|parent| {
let parent = parent.trim_end_matches(path_style.separator());
let (_, parent) = path_style.split(parent);
if parent.is_empty() {
None
} else {
Some(SharedString::new(parent))
}
});
let parent_name = full_path
.parent()
.and_then(|p| p.file_name())
.map(|n| n.to_string_lossy().into_owned().into());
let icon_path = FileIcons::get_icon(Path::new(full_path), cx);
let icon_path = FileIcons::get_icon(full_path, cx);
ContextFileExcerpt {
file_name_and_range: file_name_and_range.into(),
@@ -735,7 +690,6 @@ impl Component for AddedContext {
image_task: Task::ready(Some(LanguageModelImage::empty())).shared(),
},
None,
PathStyle::local(),
cx,
),
);
@@ -756,7 +710,6 @@ impl Component for AddedContext {
.shared(),
},
None,
PathStyle::local(),
cx,
),
);
@@ -772,7 +725,6 @@ impl Component for AddedContext {
image_task: Task::ready(None).shared(),
},
None,
PathStyle::local(),
cx,
),
);
@@ -815,8 +767,7 @@ mod tests {
full_path: None,
};
let added_context =
AddedContext::image(image_context, Some(&model), PathStyle::local(), cx);
let added_context = AddedContext::image(image_context, Some(&model), cx);
assert!(matches!(
added_context.status,
@@ -839,7 +790,7 @@ mod tests {
full_path: None,
};
let added_context = AddedContext::image(image_context, None, PathStyle::local(), cx);
let added_context = AddedContext::image(image_context, None, cx);
assert!(
matches!(added_context.status, ContextStatus::Ready),

View File

@@ -40,7 +40,7 @@ impl AgentOnboardingModal {
}
fn view_blog(&mut self, _: &ClickEvent, _: &mut Window, cx: &mut Context<Self>) {
cx.open_url("https://zed.dev/blog/fastest-ai-code-editor");
cx.open_url("http://zed.dev/blog/fastest-ai-code-editor");
cx.notify();
agent_onboarding_event!("Blog Link Clicked");

View File

@@ -67,6 +67,7 @@ pub enum Model {
alias = "claude-opus-4-1-thinking-latest"
)]
ClaudeOpus4_1Thinking,
#[default]
#[serde(rename = "claude-sonnet-4", alias = "claude-sonnet-4-latest")]
ClaudeSonnet4,
#[serde(
@@ -74,14 +75,6 @@ pub enum Model {
alias = "claude-sonnet-4-thinking-latest"
)]
ClaudeSonnet4Thinking,
#[default]
#[serde(rename = "claude-sonnet-4-5", alias = "claude-sonnet-4-5-latest")]
ClaudeSonnet4_5,
#[serde(
rename = "claude-sonnet-4-5-thinking",
alias = "claude-sonnet-4-5-thinking-latest"
)]
ClaudeSonnet4_5Thinking,
#[serde(rename = "claude-3-7-sonnet", alias = "claude-3-7-sonnet-latest")]
Claude3_7Sonnet,
#[serde(
@@ -140,14 +133,6 @@ impl Model {
return Ok(Self::ClaudeOpus4);
}
if id.starts_with("claude-sonnet-4-5-thinking") {
return Ok(Self::ClaudeSonnet4_5Thinking);
}
if id.starts_with("claude-sonnet-4-5") {
return Ok(Self::ClaudeSonnet4_5);
}
if id.starts_with("claude-sonnet-4-thinking") {
return Ok(Self::ClaudeSonnet4Thinking);
}
@@ -195,8 +180,6 @@ impl Model {
Self::ClaudeOpus4_1Thinking => "claude-opus-4-1-thinking-latest",
Self::ClaudeSonnet4 => "claude-sonnet-4-latest",
Self::ClaudeSonnet4Thinking => "claude-sonnet-4-thinking-latest",
Self::ClaudeSonnet4_5 => "claude-sonnet-4-5-latest",
Self::ClaudeSonnet4_5Thinking => "claude-sonnet-4-5-thinking-latest",
Self::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
Self::Claude3_7Sonnet => "claude-3-7-sonnet-latest",
Self::Claude3_7SonnetThinking => "claude-3-7-sonnet-thinking-latest",
@@ -214,7 +197,6 @@ impl Model {
Self::ClaudeOpus4 | Self::ClaudeOpus4Thinking => "claude-opus-4-20250514",
Self::ClaudeOpus4_1 | Self::ClaudeOpus4_1Thinking => "claude-opus-4-1-20250805",
Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking => "claude-sonnet-4-20250514",
Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4_5Thinking => "claude-sonnet-4-5-20250929",
Self::Claude3_5Sonnet => "claude-3-5-sonnet-latest",
Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking => "claude-3-7-sonnet-latest",
Self::Claude3_5Haiku => "claude-3-5-haiku-latest",
@@ -233,8 +215,6 @@ impl Model {
Self::ClaudeOpus4_1Thinking => "Claude Opus 4.1 Thinking",
Self::ClaudeSonnet4 => "Claude Sonnet 4",
Self::ClaudeSonnet4Thinking => "Claude Sonnet 4 Thinking",
Self::ClaudeSonnet4_5 => "Claude Sonnet 4.5",
Self::ClaudeSonnet4_5Thinking => "Claude Sonnet 4.5 Thinking",
Self::Claude3_7Sonnet => "Claude 3.7 Sonnet",
Self::Claude3_5Sonnet => "Claude 3.5 Sonnet",
Self::Claude3_7SonnetThinking => "Claude 3.7 Sonnet Thinking",
@@ -256,8 +236,6 @@ impl Model {
| Self::ClaudeOpus4_1Thinking
| Self::ClaudeSonnet4
| Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
| Self::ClaudeSonnet4_5Thinking
| Self::Claude3_5Sonnet
| Self::Claude3_5Haiku
| Self::Claude3_7Sonnet
@@ -283,8 +261,6 @@ impl Model {
| Self::ClaudeOpus4_1Thinking
| Self::ClaudeSonnet4
| Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
| Self::ClaudeSonnet4_5Thinking
| Self::Claude3_5Sonnet
| Self::Claude3_5Haiku
| Self::Claude3_7Sonnet
@@ -304,8 +280,6 @@ impl Model {
| Self::ClaudeOpus4_1Thinking
| Self::ClaudeSonnet4
| Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
| Self::ClaudeSonnet4_5Thinking
| Self::Claude3_5Sonnet
| Self::Claude3_7Sonnet
| Self::Claude3_7SonnetThinking
@@ -325,8 +299,6 @@ impl Model {
| Self::ClaudeOpus4_1Thinking
| Self::ClaudeSonnet4
| Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
| Self::ClaudeSonnet4_5Thinking
| Self::Claude3_5Sonnet
| Self::Claude3_7Sonnet
| Self::Claude3_7SonnetThinking
@@ -346,7 +318,6 @@ impl Model {
Self::ClaudeOpus4
| Self::ClaudeOpus4_1
| Self::ClaudeSonnet4
| Self::ClaudeSonnet4_5
| Self::Claude3_5Sonnet
| Self::Claude3_7Sonnet
| Self::Claude3_5Haiku
@@ -356,7 +327,6 @@ impl Model {
Self::ClaudeOpus4Thinking
| Self::ClaudeOpus4_1Thinking
| Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5Thinking
| Self::Claude3_7SonnetThinking => AnthropicModelMode::Thinking {
budget_tokens: Some(4_096),
},

View File

@@ -50,7 +50,7 @@ impl WorktreeDelegate for WorktreeDelegateAdapter {
}
fn root_path(&self) -> String {
self.0.worktree_root_path().to_string_lossy().into_owned()
self.0.worktree_root_path().to_string_lossy().to_string()
}
async fn read_text_file(&self, path: &RelPath) -> Result<String> {
@@ -61,7 +61,7 @@ impl WorktreeDelegate for WorktreeDelegateAdapter {
self.0
.which(binary_name.as_ref())
.await
.map(|path| path.to_string_lossy().into_owned())
.map(|path| path.to_string_lossy().to_string())
}
async fn shell_env(&self) -> Vec<(String, String)> {

View File

@@ -290,7 +290,7 @@ fn collect_files(
folded_directory_names.join(&path_including_worktree_name);
} else {
folded_directory_names =
folded_directory_names.join(RelPath::unix(&filename).unwrap());
folded_directory_names.join(RelPath::new(&filename).unwrap());
}
continue;
}
@@ -320,7 +320,7 @@ fn collect_files(
directory_stack.push(entry.path.clone());
} else {
let entry_name =
folded_directory_names.join(RelPath::unix(&filename).unwrap());
folded_directory_names.join(RelPath::new(&filename).unwrap());
let entry_name = entry_name.display(path_style);
events_tx.unbounded_send(Ok(SlashCommandEvent::StartSection {
icon: IconName::Folder,
@@ -355,7 +355,9 @@ fn collect_files(
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
append_buffer_to_output(
&snapshot,
Some(path_including_worktree_name.display(path_style).as_ref()),
Some(Path::new(
path_including_worktree_name.display(path_style).as_ref(),
)),
&mut output,
)
.log_err();
@@ -380,18 +382,18 @@ fn collect_files(
}
pub fn codeblock_fence_for_path(
path: Option<&str>,
path: Option<&Path>,
row_range: Option<RangeInclusive<u32>>,
) -> String {
let mut text = String::new();
write!(text, "```").unwrap();
if let Some(path) = path {
if let Some(extension) = Path::new(path).extension().and_then(|ext| ext.to_str()) {
if let Some(extension) = path.extension().and_then(|ext| ext.to_str()) {
write!(text, "{} ", extension).unwrap();
}
write!(text, "{path}").unwrap();
write!(text, "{}", path.display()).unwrap();
} else {
write!(text, "untitled").unwrap();
}
@@ -411,12 +413,12 @@ pub struct FileCommandMetadata {
pub fn build_entry_output_section(
range: Range<usize>,
path: Option<&str>,
path: Option<&Path>,
is_directory: bool,
line_range: Option<Range<u32>>,
) -> SlashCommandOutputSection<usize> {
let mut label = if let Some(path) = path {
path.to_string()
path.to_string_lossy().to_string()
} else {
"untitled".to_string()
};
@@ -439,7 +441,7 @@ pub fn build_entry_output_section(
} else {
path.and_then(|path| {
serde_json::to_value(FileCommandMetadata {
path: path.to_string(),
path: path.to_string_lossy().to_string(),
})
.ok()
})
@@ -503,7 +505,7 @@ mod custom_path_matcher {
.iter()
.zip(self.sources_with_trailing_slash.iter())
.any(|(source, with_slash)| {
let as_bytes = other.as_unix_str().as_bytes();
let as_bytes = other.as_str().as_bytes();
let with_slash = if source.ends_with('/') {
source.as_bytes()
} else {
@@ -512,12 +514,12 @@ mod custom_path_matcher {
as_bytes.starts_with(with_slash) || as_bytes.ends_with(source.as_bytes())
})
|| self.glob.is_match(other.as_std_path())
|| self.glob.is_match(other)
|| self.check_with_end_separator(other)
}
fn check_with_end_separator(&self, path: &RelPath) -> bool {
let path_str = path.as_unix_str();
let path_str = path.as_str();
let separator = "/";
if path_str.ends_with(separator) {
false
@@ -530,7 +532,7 @@ mod custom_path_matcher {
pub fn append_buffer_to_output(
buffer: &BufferSnapshot,
path: Option<&str>,
path: Option<&Path>,
output: &mut SlashCommandOutput,
) -> Result<()> {
let prev_len = output.text.len();

View File

@@ -137,9 +137,7 @@ pub fn selections_creases(
None
};
let language_name = language_name.as_deref().unwrap_or("");
let filename = snapshot
.file_at(range.start)
.map(|file| file.full_path(cx).to_string_lossy().into_owned());
let filename = snapshot.file_at(range.start).map(|file| file.full_path(cx));
let text = if language_name == "markdown" {
selected_text
.lines()
@@ -189,9 +187,9 @@ pub fn selections_creases(
let start_line = range.start.row + 1;
let end_line = range.end.row + 1;
if start_line == end_line {
format!("{path}, Line {start_line}")
format!("{}, Line {}", path.display(), start_line)
} else {
format!("{path}, Lines {start_line} to {end_line}")
format!("{}, Lines {} to {}", path.display(), start_line, end_line)
}
} else {
"Quoted selection".to_string()

View File

@@ -7,8 +7,8 @@ use editor::Editor;
use gpui::{AppContext as _, Task, WeakEntity};
use language::{BufferSnapshot, LspAdapterDelegate};
use std::sync::Arc;
use std::sync::atomic::AtomicBool;
use ui::{App, IconName, SharedString, Window};
use std::{path::Path, sync::atomic::AtomicBool};
use ui::{App, IconName, Window};
use workspace::Workspace;
pub struct OutlineSlashCommand;
@@ -67,13 +67,13 @@ impl SlashCommand for OutlineSlashCommand {
};
let snapshot = buffer.read(cx).snapshot();
let path = snapshot.resolve_file_path(true, cx);
let path = snapshot.resolve_file_path(cx, true);
cx.background_spawn(async move {
let outline = snapshot.outline(None);
let path = path.as_deref().unwrap_or("untitled");
let mut outline_text = format!("Symbols for {path}:\n");
let path = path.as_deref().unwrap_or(Path::new("untitled"));
let mut outline_text = format!("Symbols for {}:\n", path.display());
for item in &outline.path_candidates {
outline_text.push_str("- ");
outline_text.push_str(&item.string);
@@ -84,7 +84,7 @@ impl SlashCommand for OutlineSlashCommand {
sections: vec![SlashCommandOutputSection {
range: 0..outline_text.len(),
icon: IconName::ListTree,
label: SharedString::new(path),
label: path.to_string_lossy().to_string().into(),
metadata: None,
}],
text: outline_text,

View File

@@ -8,9 +8,12 @@ use editor::Editor;
use futures::future::join_all;
use gpui::{Task, WeakEntity};
use language::{BufferSnapshot, CodeLabel, HighlightId, LspAdapterDelegate};
use std::sync::{Arc, atomic::AtomicBool};
use std::{
path::PathBuf,
sync::{Arc, atomic::AtomicBool},
};
use ui::{ActiveTheme, App, Window, prelude::*};
use util::{ResultExt, paths::PathStyle};
use util::ResultExt;
use workspace::Workspace;
use crate::file_command::append_buffer_to_output;
@@ -69,42 +72,35 @@ impl SlashCommand for TabSlashCommand {
return Task::ready(Ok(Vec::new()));
}
let Some(workspace) = workspace.and_then(|workspace| workspace.upgrade()) else {
return Task::ready(Err(anyhow::anyhow!("no workspace")));
};
let active_item_path = workspace.update(cx, |workspace, cx| {
let snapshot = active_item_buffer(workspace, cx).ok()?;
snapshot.resolve_file_path(true, cx)
let active_item_path = workspace.as_ref().and_then(|workspace| {
workspace
.update(cx, |workspace, cx| {
let snapshot = active_item_buffer(workspace, cx).ok()?;
snapshot.resolve_file_path(cx, true)
})
.ok()
.flatten()
});
let path_style = workspace.read(cx).path_style(cx);
let current_query = arguments.last().cloned().unwrap_or_default();
let tab_items_search = tab_items_for_queries(
Some(workspace.downgrade()),
&[current_query],
cancel,
false,
window,
cx,
);
let tab_items_search =
tab_items_for_queries(workspace, &[current_query], cancel, false, window, cx);
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
window.spawn(cx, async move |_| {
let tab_items = tab_items_search.await?;
let run_command = tab_items.len() == 1;
let tab_completion_items = tab_items.into_iter().filter_map(|(path, ..)| {
let path = path?;
if argument_set.contains(&path) {
let path_string = path.as_deref()?.to_string_lossy().to_string();
if argument_set.contains(&path_string) {
return None;
}
if active_item_path.as_ref() == Some(&path) {
if active_item_path.is_some() && active_item_path == path {
return None;
}
let label = create_tab_completion_label(&path, path_style, comment_id);
let label = create_tab_completion_label(path.as_ref()?, comment_id);
Some(ArgumentCompletion {
label,
new_text: path,
new_text: path_string,
replace_previous_arguments: false,
after_completion: run_command.into(),
})
@@ -113,9 +109,8 @@ impl SlashCommand for TabSlashCommand {
let active_item_completion = active_item_path
.as_deref()
.map(|active_item_path| {
let path_string = active_item_path.to_string();
let label =
create_tab_completion_label(active_item_path, path_style, comment_id);
let path_string = active_item_path.to_string_lossy().to_string();
let label = create_tab_completion_label(active_item_path, comment_id);
ArgumentCompletion {
label,
new_text: path_string,
@@ -174,7 +169,7 @@ fn tab_items_for_queries(
strict_match: bool,
window: &mut Window,
cx: &mut App,
) -> Task<anyhow::Result<Vec<(Option<String>, BufferSnapshot, usize)>>> {
) -> Task<anyhow::Result<Vec<(Option<PathBuf>, BufferSnapshot, usize)>>> {
let empty_query = queries.is_empty() || queries.iter().all(|query| query.trim().is_empty());
let queries = queries.to_owned();
window.spawn(cx, async move |cx| {
@@ -184,7 +179,7 @@ fn tab_items_for_queries(
.update(cx, |workspace, cx| {
if strict_match && empty_query {
let snapshot = active_item_buffer(workspace, cx)?;
let full_path = snapshot.resolve_file_path(true, cx);
let full_path = snapshot.resolve_file_path(cx, true);
return anyhow::Ok(vec![(full_path, snapshot, 0)]);
}
@@ -206,7 +201,7 @@ fn tab_items_for_queries(
&& visited_buffers.insert(buffer.read(cx).remote_id())
{
let snapshot = buffer.read(cx).snapshot();
let full_path = snapshot.resolve_file_path(true, cx);
let full_path = snapshot.resolve_file_path(cx, true);
open_buffers.push((full_path, snapshot, *timestamp));
}
}
@@ -229,7 +224,10 @@ fn tab_items_for_queries(
let match_candidates = open_buffers
.iter()
.enumerate()
.filter_map(|(id, (full_path, ..))| Some((id, full_path.clone()?)))
.filter_map(|(id, (full_path, ..))| {
let path_string = full_path.as_deref()?.to_string_lossy().to_string();
Some((id, path_string))
})
.fold(HashMap::default(), |mut candidates, (id, path_string)| {
candidates
.entry(path_string)
@@ -251,7 +249,8 @@ fn tab_items_for_queries(
.iter()
.enumerate()
.filter_map(|(id, (full_path, ..))| {
Some(fuzzy::StringMatchCandidate::new(id, full_path.as_ref()?))
let path_string = full_path.as_deref()?.to_string_lossy().to_string();
Some(fuzzy::StringMatchCandidate::new(id, &path_string))
})
.collect::<Vec<_>>();
let mut processed_matches = HashSet::default();
@@ -303,15 +302,21 @@ fn active_item_buffer(
}
fn create_tab_completion_label(
path: &str,
path_style: PathStyle,
path: &std::path::Path,
comment_id: Option<HighlightId>,
) -> CodeLabel {
let (parent_path, file_name) = path_style.split(path);
let file_name = path
.file_name()
.map(|f| f.to_string_lossy())
.unwrap_or_default();
let parent_path = path
.parent()
.map(|p| p.to_string_lossy())
.unwrap_or_default();
let mut label = CodeLabel::default();
label.push_str(file_name, None);
label.push_str(&file_name, None);
label.push_str(" ", None);
label.push_str(parent_path.unwrap_or_default(), comment_id);
label.push_str(&parent_path, comment_id);
label.filter_range = 0..file_name.len();
label
}

View File

@@ -5,6 +5,7 @@ use language::{Buffer, OutlineItem, ParseStatus};
use project::Project;
use regex::Regex;
use std::fmt::Write;
use std::path::Path;
use text::Point;
/// For files over this size, instead of reading them (or including them in context),
@@ -142,7 +143,7 @@ pub struct BufferContent {
/// For smaller files, returns the full content.
pub async fn get_buffer_content_or_outline(
buffer: Entity<Buffer>,
path: Option<&str>,
path: Option<&Path>,
cx: &AsyncApp,
) -> Result<BufferContent> {
let file_size = buffer.read_with(cx, |buffer, _| buffer.text().len())?;
@@ -169,10 +170,15 @@ pub async fn get_buffer_content_or_outline(
let text = if let Some(path) = path {
format!(
"# File outline for {path} (file too large to show full content)\n\n{outline_text}",
"# File outline for {} (file too large to show full content)\n\n{}",
path.display(),
outline_text
)
} else {
format!("# File outline (file too large to show full content)\n\n{outline_text}",)
format!(
"# File outline (file too large to show full content)\n\n{}",
outline_text
)
};
Ok(BufferContent {
text,

View File

@@ -26,13 +26,13 @@ use language_model::{
use project::{AgentLocation, Project};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use std::{cmp, iter, mem, ops::Range, pin::Pin, sync::Arc, task::Poll};
use std::{cmp, iter, mem, ops::Range, path::PathBuf, pin::Pin, sync::Arc, task::Poll};
use streaming_diff::{CharOperation, StreamingDiff};
use streaming_fuzzy_matcher::StreamingFuzzyMatcher;
#[derive(Serialize)]
struct CreateFilePromptTemplate {
path: Option<String>,
path: Option<PathBuf>,
edit_description: String,
}
@@ -42,7 +42,7 @@ impl Template for CreateFilePromptTemplate {
#[derive(Serialize)]
struct EditFileXmlPromptTemplate {
path: Option<String>,
path: Option<PathBuf>,
edit_description: String,
}
@@ -52,7 +52,7 @@ impl Template for EditFileXmlPromptTemplate {
#[derive(Serialize)]
struct EditFileDiffFencedPromptTemplate {
path: Option<String>,
path: Option<PathBuf>,
edit_description: String,
}
@@ -115,7 +115,7 @@ impl EditAgent {
let conversation = conversation.clone();
let output = cx.spawn(async move |cx| {
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
let path = cx.update(|cx| snapshot.resolve_file_path(true, cx))?;
let path = cx.update(|cx| snapshot.resolve_file_path(cx, true))?;
let prompt = CreateFilePromptTemplate {
path,
edit_description,
@@ -229,7 +229,7 @@ impl EditAgent {
let edit_format = self.edit_format;
let output = cx.spawn(async move |cx| {
let snapshot = buffer.read_with(cx, |buffer, _| buffer.snapshot())?;
let path = cx.update(|cx| snapshot.resolve_file_path(true, cx))?;
let path = cx.update(|cx| snapshot.resolve_file_path(cx, true))?;
let prompt = match edit_format {
EditFormat::XmlTags => EditFileXmlPromptTemplate {
path,

View File

@@ -554,7 +554,7 @@ fn resolve_path(
.context("Can't create file: invalid filename")?;
let new_file_path = parent_project_path.map(|parent| ProjectPath {
path: parent.path.join(RelPath::unix(file_name).unwrap()),
path: parent.path.join(RelPath::new(file_name).unwrap()),
..parent
});

View File

@@ -262,7 +262,7 @@ impl ToolCard for FindPathToolCard {
.children(self.paths.iter().enumerate().map(|(index, path)| {
let path_clone = path.clone();
let workspace_clone = workspace.clone();
let button_label = path.to_string_lossy().into_owned();
let button_label = path.to_string_lossy().to_string();
Button::new(("path", index), button_label)
.icon(IconName::ArrowUpRight)

View File

@@ -86,7 +86,6 @@ impl Tool for ListDirectoryTool {
_window: Option<AnyWindowHandle>,
cx: &mut App,
) -> ToolResult {
let path_style = project.read(cx).path_style(cx);
let input = match serde_json::from_value::<ListDirectoryToolInput>(input) {
Ok(input) => input,
Err(err) => return Task::ready(Err(anyhow!(err))).into(),
@@ -101,7 +100,7 @@ impl Tool for ListDirectoryTool {
.filter_map(|worktree| {
worktree.read(cx).root_entry().and_then(|entry| {
if entry.is_dir() {
Some(entry.path.display(path_style))
Some(entry.path.as_str())
} else {
None
}

View File

@@ -104,7 +104,7 @@ mod tests {
async fn test_to_absolute_path(cx: &mut TestAppContext) {
init_test(cx);
let temp_dir = TempDir::new().expect("Failed to create temp directory");
let temp_path = temp_dir.path().to_string_lossy().into_owned();
let temp_path = temp_dir.path().to_string_lossy().to_string();
let fs = FakeFs::new(cx.executor());
fs.insert_tree(

View File

@@ -261,8 +261,9 @@ impl Tool for ReadFileTool {
Ok(result)
} else {
// No line ranges specified, so check file size to see if it's too big.
let path_buf = std::path::PathBuf::from(&file_path);
let buffer_content =
outline::get_buffer_content_or_outline(buffer.clone(), Some(&file_path), cx)
outline::get_buffer_content_or_outline(buffer.clone(), Some(&path_buf), cx)
.await?;
action_log.update(cx, |log, cx| {

View File

@@ -139,25 +139,18 @@ impl Tool for TerminalTool {
env
});
let build_cmd = {
let input_command = input.command.clone();
move || {
ShellBuilder::new(
remote_shell.as_deref(),
&Shell::Program(get_default_system_shell()),
)
.redirect_stdin_to_dev_null()
.build(Some(input_command.clone()), &[])
}
};
let Some(window) = window else {
// Headless setup, a test or eval. Our terminal subsystem requires a workspace,
// so bypass it and provide a convincing imitation using a pty.
let task = cx.background_spawn(async move {
let env = env.await;
let pty_system = native_pty_system();
let (command, args) = build_cmd();
let (command, args) = ShellBuilder::new(
remote_shell.as_deref(),
&Shell::Program(get_default_system_shell()),
)
.redirect_stdin_to_dev_null()
.build(Some(input.command.clone()), &[]);
let mut cmd = CommandBuilder::new(command);
cmd.args(args);
for (k, v) in env {
@@ -194,10 +187,16 @@ impl Tool for TerminalTool {
};
};
let command = input.command.clone();
let terminal = cx.spawn({
let project = project.downgrade();
async move |cx| {
let (command, args) = build_cmd();
let (command, args) = ShellBuilder::new(
remote_shell.as_deref(),
&Shell::Program(get_default_system_shell()),
)
.redirect_stdin_to_dev_null()
.build(Some(input.command), &[]);
let env = env.await;
project
.update(cx, |project, cx| {
@@ -216,18 +215,18 @@ impl Tool for TerminalTool {
}
});
let command_markdown = cx.new(|cx| {
Markdown::new(
format!("```bash\n{}\n```", input.command).into(),
None,
None,
let command_markdown =
cx.new(|cx| Markdown::new(format!("```bash\n{}\n```", command).into(), None, None, cx));
let card = cx.new(|cx| {
TerminalToolCard::new(
command_markdown.clone(),
working_dir.clone(),
cx.entity_id(),
cx,
)
});
let card =
cx.new(|cx| TerminalToolCard::new(command_markdown, working_dir, cx.entity_id(), cx));
let output = cx.spawn({
let card = card.clone();
async move |cx| {
@@ -268,7 +267,7 @@ impl Tool for TerminalTool {
let previous_len = content.len();
let (processed_content, finished_with_empty_output) = process_content(
&content,
&input.command,
&command,
exit_status.map(portable_pty::ExitStatus::from),
);

View File

@@ -55,7 +55,6 @@ pub fn init(cx: &mut App) {
#[derive(Debug, Copy, Clone, Eq, Hash, PartialEq)]
pub enum Sound {
Joined,
GuestJoined,
Leave,
Mute,
Unmute,
@@ -68,7 +67,6 @@ impl Sound {
fn file(&self) -> &'static str {
match self {
Self::Joined => "joined_call",
Self::GuestJoined => "guest_joined_call",
Self::Leave => "leave_call",
Self::Mute => "mute",
Self::Unmute => "unmute",

View File

@@ -310,10 +310,10 @@ impl AutoUpdater {
// the app after an update, we use `set_restart_path` to run the auto
// update helper instead of the app, so that it can overwrite the app
// and then spawn the new binary.
#[cfg(target_os = "windows")]
let quit_subscription = Some(cx.on_app_quit(|_, _| finalize_auto_update_on_quit()));
#[cfg(not(target_os = "windows"))]
let quit_subscription = None;
let quit_subscription = Some(cx.on_app_quit(|_, _| async move {
#[cfg(target_os = "windows")]
finalize_auto_update_on_quit();
}));
cx.on_app_restart(|this, _| {
this.quit_subscription.take();
@@ -942,12 +942,11 @@ async fn install_release_windows(downloaded_installer: PathBuf) -> Result<Option
let helper_path = std::env::current_exe()?
.parent()
.context("No parent dir for Zed.exe")?
.join("tools")
.join("auto_update_helper.exe");
.join("tools\\auto_update_helper.exe");
Ok(Some(helper_path))
}
pub async fn finalize_auto_update_on_quit() {
pub fn finalize_auto_update_on_quit() {
let Some(installer_path) = std::env::current_exe()
.ok()
.and_then(|p| p.parent().map(|p| p.join("updates")))
@@ -960,14 +959,12 @@ pub async fn finalize_auto_update_on_quit() {
if flag_file.exists()
&& let Some(helper) = installer_path
.parent()
.map(|p| p.join("tools").join("auto_update_helper.exe"))
.map(|p| p.join("tools\\auto_update_helper.exe"))
{
let mut command = smol::process::Command::new(helper);
let mut command = std::process::Command::new(helper);
command.arg("--launch");
command.arg("false");
if let Ok(mut cmd) = command.spawn() {
_ = cmd.status().await;
}
let _ = command.spawn();
}
}

View File

@@ -160,7 +160,6 @@ pub(crate) fn perform_update(app_dir: &Path, hwnd: Option<isize>, launch: bool)
}
}
if launch {
#[allow(clippy::disallowed_methods, reason = "doesn't run in the main binary")]
let _ = std::process::Command::new(app_dir.join("Zed.exe"))
.creation_flags(CREATE_NEW_PROCESS_GROUP.0)
.spawn();

View File

@@ -22,6 +22,7 @@ pub struct BedrockModelCacheConfiguration {
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
pub enum Model {
// Anthropic models (already included)
#[default]
#[serde(rename = "claude-sonnet-4", alias = "claude-sonnet-4-latest")]
ClaudeSonnet4,
#[serde(
@@ -29,14 +30,6 @@ pub enum Model {
alias = "claude-sonnet-4-thinking-latest"
)]
ClaudeSonnet4Thinking,
#[default]
#[serde(rename = "claude-sonnet-4-5", alias = "claude-sonnet-4-5-latest")]
ClaudeSonnet4_5,
#[serde(
rename = "claude-sonnet-4-5-thinking",
alias = "claude-sonnet-4-5-thinking-latest"
)]
ClaudeSonnet4_5Thinking,
#[serde(rename = "claude-opus-4", alias = "claude-opus-4-latest")]
ClaudeOpus4,
#[serde(rename = "claude-opus-4-1", alias = "claude-opus-4-1-latest")]
@@ -151,14 +144,6 @@ impl Model {
Ok(Self::Claude3_7Sonnet)
} else if id.starts_with("claude-3-7-sonnet-thinking") {
Ok(Self::Claude3_7SonnetThinking)
} else if id.starts_with("claude-sonnet-4-5-thinking") {
Ok(Self::ClaudeSonnet4_5Thinking)
} else if id.starts_with("claude-sonnet-4-5") {
Ok(Self::ClaudeSonnet4_5)
} else if id.starts_with("claude-sonnet-4-thinking") {
Ok(Self::ClaudeSonnet4Thinking)
} else if id.starts_with("claude-sonnet-4") {
Ok(Self::ClaudeSonnet4)
} else {
anyhow::bail!("invalid model id {id}");
}
@@ -168,8 +153,6 @@ impl Model {
match self {
Model::ClaudeSonnet4 => "claude-sonnet-4",
Model::ClaudeSonnet4Thinking => "claude-sonnet-4-thinking",
Model::ClaudeSonnet4_5 => "claude-sonnet-4-5",
Model::ClaudeSonnet4_5Thinking => "claude-sonnet-4-5-thinking",
Model::ClaudeOpus4 => "claude-opus-4",
Model::ClaudeOpus4_1 => "claude-opus-4-1",
Model::ClaudeOpus4Thinking => "claude-opus-4-thinking",
@@ -231,9 +214,6 @@ impl Model {
Model::ClaudeSonnet4 | Model::ClaudeSonnet4Thinking => {
"anthropic.claude-sonnet-4-20250514-v1:0"
}
Model::ClaudeSonnet4_5 | Model::ClaudeSonnet4_5Thinking => {
"anthropic.claude-sonnet-4-5-20250929-v1:0"
}
Model::ClaudeOpus4 | Model::ClaudeOpus4Thinking => {
"anthropic.claude-opus-4-20250514-v1:0"
}
@@ -297,8 +277,6 @@ impl Model {
match self {
Self::ClaudeSonnet4 => "Claude Sonnet 4",
Self::ClaudeSonnet4Thinking => "Claude Sonnet 4 Thinking",
Self::ClaudeSonnet4_5 => "Claude Sonnet 4.5",
Self::ClaudeSonnet4_5Thinking => "Claude Sonnet 4.5 Thinking",
Self::ClaudeOpus4 => "Claude Opus 4",
Self::ClaudeOpus4_1 => "Claude Opus 4.1",
Self::ClaudeOpus4Thinking => "Claude Opus 4 Thinking",
@@ -368,8 +346,6 @@ impl Model {
| Self::ClaudeOpus4
| Self::ClaudeOpus4_1
| Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
| Self::ClaudeSonnet4_5Thinking
| Self::ClaudeOpus4Thinking
| Self::ClaudeOpus4_1Thinking => 200_000,
Self::AmazonNovaPremier => 1_000_000,
@@ -385,7 +361,6 @@ impl Model {
Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3_5Haiku => 4_096,
Self::Claude3_7Sonnet | Self::Claude3_7SonnetThinking => 128_000,
Self::ClaudeSonnet4 | Self::ClaudeSonnet4Thinking => 64_000,
Self::ClaudeSonnet4_5 | Self::ClaudeSonnet4_5Thinking => 64_000,
Self::ClaudeOpus4
| Self::ClaudeOpus4Thinking
| Self::ClaudeOpus4_1
@@ -410,9 +385,7 @@ impl Model {
| Self::ClaudeOpus4_1
| Self::ClaudeOpus4_1Thinking
| Self::ClaudeSonnet4
| Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
| Self::ClaudeSonnet4_5Thinking => 1.0,
| Self::ClaudeSonnet4Thinking => 1.0,
Self::Custom {
default_temperature,
..
@@ -436,8 +409,6 @@ impl Model {
| Self::ClaudeOpus4_1Thinking
| Self::ClaudeSonnet4
| Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
| Self::ClaudeSonnet4_5Thinking
| Self::Claude3_5Haiku => true,
// Amazon Nova models (all support tool use)
@@ -468,8 +439,6 @@ impl Model {
| Self::Claude3_7SonnetThinking
| Self::ClaudeSonnet4
| Self::ClaudeSonnet4Thinking
| Self::ClaudeSonnet4_5
| Self::ClaudeSonnet4_5Thinking
| Self::ClaudeOpus4
| Self::ClaudeOpus4Thinking
| Self::ClaudeOpus4_1
@@ -519,11 +488,9 @@ impl Model {
Model::Claude3_7SonnetThinking => BedrockModelMode::Thinking {
budget_tokens: Some(4096),
},
Model::ClaudeSonnet4Thinking | Model::ClaudeSonnet4_5Thinking => {
BedrockModelMode::Thinking {
budget_tokens: Some(4096),
}
}
Model::ClaudeSonnet4Thinking => BedrockModelMode::Thinking {
budget_tokens: Some(4096),
},
Model::ClaudeOpus4Thinking | Model::ClaudeOpus4_1Thinking => {
BedrockModelMode::Thinking {
budget_tokens: Some(4096),
@@ -575,8 +542,6 @@ impl Model {
| Model::Claude3_7SonnetThinking
| Model::ClaudeSonnet4
| Model::ClaudeSonnet4Thinking
| Model::ClaudeSonnet4_5
| Model::ClaudeSonnet4_5Thinking
| Model::ClaudeOpus4
| Model::ClaudeOpus4Thinking
| Model::ClaudeOpus4_1
@@ -610,8 +575,6 @@ impl Model {
| Model::Claude3_7SonnetThinking
| Model::ClaudeSonnet4
| Model::ClaudeSonnet4Thinking
| Model::ClaudeSonnet4_5
| Model::ClaudeSonnet4_5Thinking
| Model::Claude3Haiku
| Model::Claude3Sonnet
| Model::MetaLlama321BInstructV1
@@ -629,9 +592,7 @@ impl Model {
| Model::Claude3_7Sonnet
| Model::Claude3_7SonnetThinking
| Model::ClaudeSonnet4
| Model::ClaudeSonnet4Thinking
| Model::ClaudeSonnet4_5
| Model::ClaudeSonnet4_5Thinking,
| Model::ClaudeSonnet4Thinking,
"apac",
) => Ok(format!("{}.{}", region_group, model_id)),
@@ -670,10 +631,6 @@ mod tests {
Model::ClaudeSonnet4.cross_region_inference_id("eu-west-1")?,
"eu.anthropic.claude-sonnet-4-20250514-v1:0"
);
assert_eq!(
Model::ClaudeSonnet4_5.cross_region_inference_id("eu-west-1")?,
"eu.anthropic.claude-sonnet-4-5-20250929-v1:0"
);
assert_eq!(
Model::Claude3Sonnet.cross_region_inference_id("eu-west-1")?,
"eu.anthropic.claude-3-sonnet-20240229-v1:0"

View File

@@ -23,7 +23,7 @@ use livekit_client::{self as livekit, AudioStream, TrackSid};
use postage::{sink::Sink, stream::Stream, watch};
use project::Project;
use settings::Settings as _;
use std::{future::Future, mem, rc::Rc, sync::Arc, time::Duration, time::Instant};
use std::{future::Future, mem, rc::Rc, sync::Arc, time::Duration};
use util::{ResultExt, TryFutureExt, paths::PathStyle, post_inc};
pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30);
@@ -86,7 +86,6 @@ pub struct Room {
room_update_completed_rx: watch::Receiver<Option<()>>,
pending_room_update: Option<Task<()>>,
maintain_connection: Option<Task<Option<()>>>,
created: Instant,
}
impl EventEmitter<Event> for Room {}
@@ -158,7 +157,6 @@ impl Room {
maintain_connection: Some(maintain_connection),
room_update_completed_tx,
room_update_completed_rx,
created: cx.background_executor().now(),
}
}
@@ -829,17 +827,7 @@ impl Room {
},
);
// When joining a room start_room_connection gets
// called but we have already played the join sound.
// Dont play extra sounds over that.
if this.created.elapsed() > Duration::from_millis(100) {
if let proto::ChannelRole::Guest = role {
Audio::play_sound(Sound::GuestJoined, cx);
} else {
Audio::play_sound(Sound::Joined, cx);
}
}
Audio::play_sound(Sound::Joined, cx);
if let Some(livekit_participants) = &livekit_participants
&& let Some(livekit_participant) = livekit_participants
.get(&ParticipantIdentity(user.id.to_string()))

View File

@@ -1,4 +1,3 @@
#![allow(clippy::disallowed_methods, reason = "build scripts are exempt")]
use std::process::Command;
fn main() {

View File

@@ -1,7 +1,3 @@
#![allow(
clippy::disallowed_methods,
reason = "We are not in an async environment, so std::process::Command is fine"
)]
#![cfg_attr(
any(target_os = "linux", target_os = "freebsd", target_os = "windows"),
allow(dead_code)
@@ -143,7 +139,7 @@ fn parse_path_with_position(argument_str: &str) -> anyhow::Result<String> {
}
.with_context(|| format!("parsing as path with position {argument_str}"))?,
};
Ok(canonicalized.to_string(|path| path.to_string_lossy().into_owned()))
Ok(canonicalized.to_string(|path| path.to_string_lossy().to_string()))
}
fn parse_path_in_wsl(source: &str, wsl: &str) -> Result<String> {
@@ -320,12 +316,12 @@ fn main() -> Result<()> {
urls.push(path.to_string());
} else if path == "-" && args.paths_with_position.len() == 1 {
let file = NamedTempFile::new()?;
paths.push(file.path().to_string_lossy().into_owned());
paths.push(file.path().to_string_lossy().to_string());
let (file, _) = file.keep()?;
stdin_tmp_file = Some(file);
} else if let Some(file) = anonymous_fd(path) {
let tmp_file = NamedTempFile::new()?;
paths.push(tmp_file.path().to_string_lossy().into_owned());
paths.push(tmp_file.path().to_string_lossy().to_string());
let (tmp_file, _) = tmp_file.keep()?;
anonymous_fd_tmp_files.push((file, tmp_file));
} else if let Some(wsl) = wsl {

View File

@@ -858,7 +858,7 @@ mod tests {
.enumerate()
.filter_map(|(i, path)| {
Some((
Arc::from(RelPath::unix(path).ok()?),
Arc::from(RelPath::new(path).ok()?),
ProjectEntryId::from_proto(i as u64 + 1),
PathChange::Added,
))

View File

@@ -55,9 +55,6 @@ pub const CLIENT_SUPPORTS_STATUS_MESSAGES_HEADER_NAME: &str =
pub const SERVER_SUPPORTS_STATUS_MESSAGES_HEADER_NAME: &str =
"x-zed-server-supports-status-messages";
/// The name of the header used by the client to indicate that it supports receiving xAI models.
pub const CLIENT_SUPPORTS_X_AI_HEADER_NAME: &str = "x-zed-client-supports-x-ai";
#[derive(Debug, PartialEq, Clone, Copy, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum UsageLimit {
@@ -147,7 +144,6 @@ pub enum LanguageModelProvider {
Anthropic,
OpenAi,
Google,
XAi,
}
#[derive(Debug, Clone, Serialize, Deserialize)]

View File

@@ -43,22 +43,11 @@ pub struct PredictEditsRequest {
pub prompt_format: PromptFormat,
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, EnumIter)]
#[derive(Default, Debug, Clone, Copy, Serialize, Deserialize, PartialEq, EnumIter)]
pub enum PromptFormat {
#[default]
MarkedExcerpt,
LabeledSections,
/// Prompt format intended for use via zeta_cli
OnlySnippets,
}
impl PromptFormat {
pub const DEFAULT: PromptFormat = PromptFormat::LabeledSections;
}
impl Default for PromptFormat {
fn default() -> Self {
Self::DEFAULT
}
}
impl PromptFormat {
@@ -72,7 +61,6 @@ impl std::fmt::Display for PromptFormat {
match self {
PromptFormat::MarkedExcerpt => write!(f, "Marked Excerpt"),
PromptFormat::LabeledSections => write!(f, "Labeled Sections"),
PromptFormat::OnlySnippets => write!(f, "Only Snippets"),
}
}
}
@@ -112,13 +100,13 @@ pub struct ReferencedDeclaration {
/// Index within `signatures`.
#[serde(skip_serializing_if = "Option::is_none", default)]
pub parent_index: Option<usize>,
pub score_components: DeclarationScoreComponents,
pub score_components: ScoreComponents,
pub signature_score: f32,
pub declaration_score: f32,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DeclarationScoreComponents {
pub struct ScoreComponents {
pub is_same_file: bool,
pub is_referenced_nearby: bool,
pub is_referenced_in_breadcrumb: bool,
@@ -128,12 +116,12 @@ pub struct DeclarationScoreComponents {
pub reference_line_distance: u32,
pub declaration_line_distance: u32,
pub declaration_line_distance_rank: usize,
pub excerpt_vs_item_jaccard: f32,
pub excerpt_vs_signature_jaccard: f32,
pub containing_range_vs_item_jaccard: f32,
pub containing_range_vs_signature_jaccard: f32,
pub adjacent_vs_item_jaccard: f32,
pub adjacent_vs_signature_jaccard: f32,
pub excerpt_vs_item_weighted_overlap: f32,
pub excerpt_vs_signature_weighted_overlap: f32,
pub containing_range_vs_item_weighted_overlap: f32,
pub containing_range_vs_signature_weighted_overlap: f32,
pub adjacent_vs_item_weighted_overlap: f32,
pub adjacent_vs_signature_weighted_overlap: f32,
}

View File

@@ -54,8 +54,6 @@ pub fn system_prompt(format: PromptFormat) -> &'static str {
match format {
PromptFormat::MarkedExcerpt => MARKED_EXCERPT_SYSTEM_PROMPT,
PromptFormat::LabeledSections => LABELED_SECTIONS_SYSTEM_PROMPT,
// only intended for use via zeta_cli
PromptFormat::OnlySnippets => "",
}
}
@@ -70,7 +68,7 @@ pub struct PlannedSnippet<'a> {
}
#[derive(EnumIter, Clone, Copy, PartialEq, Eq, Hash, Debug, PartialOrd, Ord)]
pub enum DeclarationStyle {
pub enum SnippetStyle {
Signature,
Declaration,
}
@@ -84,10 +82,10 @@ pub struct SectionLabels {
impl<'a> PlannedPrompt<'a> {
/// Greedy one-pass knapsack algorithm to populate the prompt plan. Does the following:
///
/// Initializes a priority queue by populating it with each snippet, finding the
/// DeclarationStyle that minimizes `score_density = score / snippet.range(style).len()`. When a
/// "signature" snippet is popped, insert an entry for the "declaration" variant that reflects
/// the cost of upgrade.
/// Initializes a priority queue by populating it with each snippet, finding the SnippetStyle
/// that minimizes `score_density = score / snippet.range(style).len()`. When a "signature"
/// snippet is popped, insert an entry for the "declaration" variant that reflects the cost of
/// upgrade.
///
/// TODO: Implement an early halting condition. One option might be to have another priority
/// queue where the score is the size, and update it accordingly. Another option might be to
@@ -131,13 +129,13 @@ impl<'a> PlannedPrompt<'a> {
struct QueueEntry {
score_density: OrderedFloat<f32>,
declaration_index: usize,
style: DeclarationStyle,
style: SnippetStyle,
}
// Initialize priority queue with the best score for each snippet.
let mut queue: BinaryHeap<QueueEntry> = BinaryHeap::new();
for (declaration_index, declaration) in request.referenced_declarations.iter().enumerate() {
let (style, score_density) = DeclarationStyle::iter()
let (style, score_density) = SnippetStyle::iter()
.map(|style| {
(
style,
@@ -186,7 +184,7 @@ impl<'a> PlannedPrompt<'a> {
this.budget_used += additional_bytes;
this.add_parents(&mut included_parents, additional_parents);
let planned_snippet = match queue_entry.style {
DeclarationStyle::Signature => {
SnippetStyle::Signature => {
let Some(text) = declaration.text.get(declaration.signature_range.clone())
else {
return Err(anyhow!(
@@ -203,7 +201,7 @@ impl<'a> PlannedPrompt<'a> {
text_is_truncated: declaration.text_is_truncated,
}
}
DeclarationStyle::Declaration => PlannedSnippet {
SnippetStyle::Declaration => PlannedSnippet {
path: declaration.path.clone(),
range: declaration.range.clone(),
text: &declaration.text,
@@ -213,13 +211,11 @@ impl<'a> PlannedPrompt<'a> {
this.snippets.push(planned_snippet);
// When a Signature is consumed, insert an entry for Definition style.
if queue_entry.style == DeclarationStyle::Signature {
let signature_size = declaration_size(&declaration, DeclarationStyle::Signature);
let declaration_size =
declaration_size(&declaration, DeclarationStyle::Declaration);
let signature_score = declaration_score(&declaration, DeclarationStyle::Signature);
let declaration_score =
declaration_score(&declaration, DeclarationStyle::Declaration);
if queue_entry.style == SnippetStyle::Signature {
let signature_size = declaration_size(&declaration, SnippetStyle::Signature);
let declaration_size = declaration_size(&declaration, SnippetStyle::Declaration);
let signature_score = declaration_score(&declaration, SnippetStyle::Signature);
let declaration_score = declaration_score(&declaration, SnippetStyle::Declaration);
let score_diff = declaration_score - signature_score;
let size_diff = declaration_size.saturating_sub(signature_size);
@@ -227,7 +223,7 @@ impl<'a> PlannedPrompt<'a> {
queue.push(QueueEntry {
declaration_index: queue_entry.declaration_index,
score_density: OrderedFloat(score_diff / (size_diff as f32)),
style: DeclarationStyle::Declaration,
style: SnippetStyle::Declaration,
});
}
}
@@ -347,7 +343,6 @@ impl<'a> PlannedPrompt<'a> {
self.request.excerpt_range.start + self.request.cursor_offset,
CURSOR_MARKER,
)],
PromptFormat::OnlySnippets => vec![],
};
let mut prompt = String::new();
@@ -437,13 +432,12 @@ impl<'a> PlannedPrompt<'a> {
}
writeln!(output, "```{}", file_path.display()).ok();
let mut skipped_last_snippet = false;
for (snippet, range) in disjoint_snippets {
let section_index = section_ranges.len();
match self.request.prompt_format {
PromptFormat::MarkedExcerpt | PromptFormat::OnlySnippets => {
if range.start > 0 && !skipped_last_snippet {
PromptFormat::MarkedExcerpt => {
if range.start > 0 {
output.push_str("\n");
}
}
@@ -460,38 +454,25 @@ impl<'a> PlannedPrompt<'a> {
}
if is_excerpt_file {
if self.request.prompt_format == PromptFormat::OnlySnippets {
if range.start >= self.request.excerpt_range.start
&& range.end <= self.request.excerpt_range.end
{
skipped_last_snippet = true;
} else {
skipped_last_snippet = false;
output.push_str(snippet.text);
excerpt_index = Some(section_index);
let mut last_offset = range.start;
let mut i = 0;
while i < excerpt_file_insertions.len() {
let (offset, insertion) = &excerpt_file_insertions[i];
let found = *offset >= range.start && *offset <= range.end;
if found {
output.push_str(
&snippet.text[last_offset - range.start..offset - range.start],
);
output.push_str(insertion);
last_offset = *offset;
excerpt_file_insertions.remove(i);
continue;
}
} else {
let mut last_offset = range.start;
let mut i = 0;
while i < excerpt_file_insertions.len() {
let (offset, insertion) = &excerpt_file_insertions[i];
let found = *offset >= range.start && *offset <= range.end;
if found {
excerpt_index = Some(section_index);
output.push_str(
&snippet.text[last_offset - range.start..offset - range.start],
);
output.push_str(insertion);
last_offset = *offset;
excerpt_file_insertions.remove(i);
continue;
}
i += 1;
}
skipped_last_snippet = false;
output.push_str(&snippet.text[last_offset - range.start..]);
i += 1;
}
output.push_str(&snippet.text[last_offset - range.start..]);
} else {
skipped_last_snippet = false;
output.push_str(snippet.text);
}
@@ -502,30 +483,26 @@ impl<'a> PlannedPrompt<'a> {
}
Ok(SectionLabels {
// TODO: Clean this up
excerpt_index: match self.request.prompt_format {
PromptFormat::OnlySnippets => 0,
_ => excerpt_index.context("bug: no snippet found for excerpt")?,
},
excerpt_index: excerpt_index.context("bug: no snippet found for excerpt")?,
section_ranges,
})
}
}
fn declaration_score_density(declaration: &ReferencedDeclaration, style: DeclarationStyle) -> f32 {
fn declaration_score_density(declaration: &ReferencedDeclaration, style: SnippetStyle) -> f32 {
declaration_score(declaration, style) / declaration_size(declaration, style) as f32
}
fn declaration_score(declaration: &ReferencedDeclaration, style: DeclarationStyle) -> f32 {
fn declaration_score(declaration: &ReferencedDeclaration, style: SnippetStyle) -> f32 {
match style {
DeclarationStyle::Signature => declaration.signature_score,
DeclarationStyle::Declaration => declaration.declaration_score,
SnippetStyle::Signature => declaration.signature_score,
SnippetStyle::Declaration => declaration.declaration_score,
}
}
fn declaration_size(declaration: &ReferencedDeclaration, style: DeclarationStyle) -> usize {
fn declaration_size(declaration: &ReferencedDeclaration, style: SnippetStyle) -> usize {
match style {
DeclarationStyle::Signature => declaration.signature_range.len(),
DeclarationStyle::Declaration => declaration.text.len(),
SnippetStyle::Signature => declaration.signature_range.len(),
SnippetStyle::Declaration => declaration.text.len(),
}
}

View File

@@ -1408,12 +1408,12 @@ async fn test_share_project(
project_b.read_with(cx_b, |project, cx| {
let worktree = project.worktrees(cx).next().unwrap().read(cx);
assert_eq!(
worktree.paths().collect::<Vec<_>>(),
worktree.paths().map(AsRef::as_ref).collect::<Vec<_>>(),
[
rel_path(".gitignore"),
rel_path("a.txt"),
rel_path("b.txt"),
rel_path("ignored-dir"),
Path::new(".gitignore"),
Path::new("a.txt"),
Path::new("b.txt"),
Path::new("ignored-dir"),
]
);
});
@@ -1433,14 +1433,14 @@ async fn test_share_project(
project_b.read_with(cx_b, |project, cx| {
let worktree = project.worktrees(cx).next().unwrap().read(cx);
assert_eq!(
worktree.paths().collect::<Vec<_>>(),
worktree.paths().map(AsRef::as_ref).collect::<Vec<_>>(),
[
rel_path(".gitignore"),
rel_path("a.txt"),
rel_path("b.txt"),
rel_path("ignored-dir"),
rel_path("ignored-dir/c.txt"),
rel_path("ignored-dir/d.txt"),
Path::new(".gitignore"),
Path::new("a.txt"),
Path::new("b.txt"),
Path::new("ignored-dir"),
Path::new("ignored-dir/c.txt"),
Path::new("ignored-dir/d.txt"),
]
);
});

View File

@@ -632,16 +632,13 @@ async fn test_following_tab_order(
let pane_paths = |pane: &Entity<workspace::Pane>, cx: &mut VisualTestContext| {
pane.update(cx, |pane, cx| {
pane.items()
.map(|item| item.project_path(cx).unwrap().path)
.map(|item| item.project_path(cx).unwrap().path.as_str().to_owned())
.collect::<Vec<_>>()
})
};
//Verify that the tabs opened in the order we expect
assert_eq!(
&pane_paths(&pane_a, cx_a),
&[rel_path("1.txt").into(), rel_path("3.txt").into()]
);
assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt"]);
//Follow client B as client A
workspace_a.update_in(cx_a, |workspace, window, cx| {
@@ -659,14 +656,7 @@ async fn test_following_tab_order(
executor.run_until_parked();
// Verify that newly opened followed file is at the end
assert_eq!(
&pane_paths(&pane_a, cx_a),
&[
rel_path("1.txt").into(),
rel_path("3.txt").into(),
rel_path("2.txt").into()
]
);
assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt", "2.txt"]);
//Open just 1 on client B
workspace_b
@@ -675,21 +665,11 @@ async fn test_following_tab_order(
})
.await
.unwrap();
assert_eq!(
&pane_paths(&pane_b, cx_b),
&[rel_path("2.txt").into(), rel_path("1.txt").into()]
);
assert_eq!(&pane_paths(&pane_b, cx_b), &["2.txt", "1.txt"]);
executor.run_until_parked();
// Verify that following into 1 did not reorder
assert_eq!(
&pane_paths(&pane_a, cx_a),
&[
rel_path("1.txt").into(),
rel_path("3.txt").into(),
rel_path("2.txt").into()
]
);
assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt", "2.txt"]);
}
#[gpui::test(iterations = 10)]

View File

@@ -1699,8 +1699,13 @@ async fn test_project_reconnect(
);
assert!(worktree_a3.read(cx).has_update_observer());
assert_eq!(
worktree_a3.read(cx).snapshot().paths().collect::<Vec<_>>(),
vec![rel_path("w.txt"), rel_path("x.txt"), rel_path("y.txt")]
worktree_a3
.read(cx)
.snapshot()
.paths()
.map(|p| p.as_str())
.collect::<Vec<_>>(),
vec!["w.txt", "x.txt", "y.txt"]
);
});
@@ -1732,8 +1737,9 @@ async fn test_project_reconnect(
.read(cx)
.snapshot()
.paths()
.map(|p| p.as_str())
.collect::<Vec<_>>(),
vec![rel_path("w.txt"), rel_path("x.txt"), rel_path("y.txt")]
vec!["w.txt", "x.txt", "y.txt"]
);
});
@@ -1827,7 +1833,7 @@ async fn test_project_reconnect(
.read(cx)
.snapshot()
.paths()
.map(|p| p.as_unix_str())
.map(|p| p.as_str())
.collect::<Vec<_>>(),
vec!["z.txt"]
);
@@ -2465,39 +2471,39 @@ async fn test_propagate_saves_and_fs_changes(
worktree_a.read_with(cx_a, |tree, _| {
assert_eq!(
tree.paths().collect::<Vec<_>>(),
[rel_path("file1.js"), rel_path("file3"), rel_path("file4")]
tree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
["file1.js", "file3", "file4"]
)
});
worktree_b.read_with(cx_b, |tree, _| {
assert_eq!(
tree.paths().collect::<Vec<_>>(),
[rel_path("file1.js"), rel_path("file3"), rel_path("file4")]
tree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
["file1.js", "file3", "file4"]
)
});
worktree_c.read_with(cx_c, |tree, _| {
assert_eq!(
tree.paths().collect::<Vec<_>>(),
[rel_path("file1.js"), rel_path("file3"), rel_path("file4")]
tree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
["file1.js", "file3", "file4"]
)
});
// Ensure buffer files are updated as well.
buffer_a.read_with(cx_a, |buffer, _| {
assert_eq!(buffer.file().unwrap().path().as_ref(), rel_path("file1.js"));
assert_eq!(buffer.file().unwrap().path().as_str(), "file1.js");
assert_eq!(buffer.language().unwrap().name(), "JavaScript".into());
});
buffer_b.read_with(cx_b, |buffer, _| {
assert_eq!(buffer.file().unwrap().path().as_ref(), rel_path("file1.js"));
assert_eq!(buffer.file().unwrap().path().as_str(), "file1.js");
assert_eq!(buffer.language().unwrap().name(), "JavaScript".into());
});
buffer_c.read_with(cx_c, |buffer, _| {
assert_eq!(buffer.file().unwrap().path().as_ref(), rel_path("file1.js"));
assert_eq!(buffer.file().unwrap().path().as_str(), "file1.js");
assert_eq!(buffer.language().unwrap().name(), "JavaScript".into());
});
@@ -3211,15 +3217,15 @@ async fn test_fs_operations(
worktree_a.read_with(cx_a, |worktree, _| {
assert_eq!(
worktree.paths().collect::<Vec<_>>(),
[rel_path("a.txt"), rel_path("b.txt"), rel_path("c.txt")]
worktree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
["a.txt", "b.txt", "c.txt"]
);
});
worktree_b.read_with(cx_b, |worktree, _| {
assert_eq!(
worktree.paths().collect::<Vec<_>>(),
[rel_path("a.txt"), rel_path("b.txt"), rel_path("c.txt")]
worktree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
["a.txt", "b.txt", "c.txt"]
);
});
@@ -3234,17 +3240,14 @@ async fn test_fs_operations(
worktree_a.read_with(cx_a, |worktree, _| {
assert_eq!(
worktree.paths().collect::<Vec<_>>(),
[rel_path("a.txt"), rel_path("b.txt"), rel_path("d.txt")]
worktree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
["a.txt", "b.txt", "d.txt"]
);
});
worktree_b.read_with(cx_b, |worktree, _| {
assert_eq!(
worktree
.paths()
.map(|p| p.as_unix_str())
.collect::<Vec<_>>(),
worktree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
["a.txt", "b.txt", "d.txt"]
);
});
@@ -3260,20 +3263,14 @@ async fn test_fs_operations(
worktree_a.read_with(cx_a, |worktree, _| {
assert_eq!(
worktree
.paths()
.map(|p| p.as_unix_str())
.collect::<Vec<_>>(),
worktree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
["DIR", "a.txt", "b.txt", "d.txt"]
);
});
worktree_b.read_with(cx_b, |worktree, _| {
assert_eq!(
worktree
.paths()
.map(|p| p.as_unix_str())
.collect::<Vec<_>>(),
worktree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
["DIR", "a.txt", "b.txt", "d.txt"]
);
});
@@ -3389,20 +3386,14 @@ async fn test_fs_operations(
worktree_a.read_with(cx_a, |worktree, _| {
assert_eq!(
worktree
.paths()
.map(|p| p.as_unix_str())
.collect::<Vec<_>>(),
worktree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
["a.txt", "b.txt", "d.txt", "f.txt"]
);
});
worktree_b.read_with(cx_b, |worktree, _| {
assert_eq!(
worktree
.paths()
.map(|p| p.as_unix_str())
.collect::<Vec<_>>(),
worktree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
["a.txt", "b.txt", "d.txt", "f.txt"]
);
});
@@ -3416,20 +3407,14 @@ async fn test_fs_operations(
worktree_a.read_with(cx_a, |worktree, _| {
assert_eq!(
worktree
.paths()
.map(|p| p.as_unix_str())
.collect::<Vec<_>>(),
worktree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
["a.txt", "b.txt", "f.txt"]
);
});
worktree_b.read_with(cx_b, |worktree, _| {
assert_eq!(
worktree
.paths()
.map(|p| p.as_unix_str())
.collect::<Vec<_>>(),
worktree.paths().map(|p| p.as_str()).collect::<Vec<_>>(),
["a.txt", "b.txt", "f.txt"]
);
});

View File

@@ -973,7 +973,7 @@ impl RandomizedTest for ProjectCollaborationTest {
let dot_git_dir = repo_path.join(".git");
let contents = contents
.iter()
.map(|(path, contents)| (path.as_unix_str(), contents.clone()))
.map(|(path, contents)| (path.as_str(), contents.clone()))
.collect::<Vec<_>>();
if client.fs().metadata(&dot_git_dir).await?.is_none() {
client.fs().create_dir(&dot_git_dir).await?;
@@ -1031,7 +1031,7 @@ impl RandomizedTest for ProjectCollaborationTest {
let statuses = statuses
.iter()
.map(|(path, val)| (path.as_unix_str(), *val))
.map(|(path, val)| (path.as_str(), *val))
.collect::<Vec<_>>();
if client.fs().metadata(&dot_git_dir).await?.is_none() {
@@ -1463,7 +1463,7 @@ fn generate_git_operation(rng: &mut StdRng, client: &TestClient) -> GitOperation
paths
.iter()
.map(|path| {
RelPath::new(path.strip_prefix(repo_path).unwrap(), PathStyle::local())
RelPath::from_std_path(path.strip_prefix(repo_path).unwrap(), PathStyle::local())
.unwrap()
.to_rel_path_buf()
})

View File

@@ -0,0 +1,354 @@
use std::rc::Rc;
use call::{ActiveCall, Room};
use channel::ChannelStore;
use gpui::{AppContext, Entity, RenderOnce, WeakEntity};
use project::Project;
use ui::{
ActiveTheme, AnyElement, App, Avatar, Button, ButtonCommon, ButtonSize, ButtonStyle, Clickable,
Color, Context, ContextMenu, ContextMenuItem, Element, FluentBuilder, Icon, IconButton,
IconName, IconSize, IntoElement, Label, LabelCommon, LabelSize, ParentElement, PopoverMenu,
PopoverMenuHandle, Render, SelectableButton, SharedString, SplitButton, SplitButtonStyle,
Styled, StyledExt, TintColor, Toggleable, Tooltip, Window, div, h_flex, px, v_flex,
};
use workspace::Workspace;
pub struct CallOverlay {
active_call: Entity<ActiveCall>,
channel_store: Entity<ChannelStore>,
project: Entity<Project>,
workspace: WeakEntity<Workspace>,
screen_share_popover_handle: PopoverMenuHandle<ContextMenu>,
}
impl CallOverlay {
pub(crate) fn render_call_controls(
&self,
window: &mut Window,
cx: &mut Context<Self>,
) -> Vec<AnyElement> {
let Some(room) = self.active_call.read(cx).room() else {
return Vec::default();
};
let room = room.read(cx);
let project = self.project.read(cx);
let is_local = project.is_local() || project.is_via_remote_server();
let is_shared = is_local && project.is_shared();
let is_muted = room.is_muted();
let muted_by_user = room.muted_by_user();
let is_deafened = room.is_deafened().unwrap_or(false);
let is_screen_sharing = room.is_sharing_screen();
let can_use_microphone = room.can_use_microphone();
let can_share_projects = room.can_share_projects();
let screen_sharing_supported = cx.is_screen_capture_supported();
let is_connecting_to_project = self
.workspace
.update(cx, |workspace, cx| workspace.has_active_modal(window, cx))
.unwrap_or(false);
let mut children = Vec::new();
if can_use_microphone {
children.push(
IconButton::new(
"mute-microphone",
if is_muted {
IconName::MicMute
} else {
IconName::Mic
},
)
.tooltip(move |window, cx| {
if is_muted {
if is_deafened {
Tooltip::with_meta(
"Unmute Microphone",
None,
"Audio will be unmuted",
window,
cx,
)
} else {
Tooltip::simple("Unmute Microphone", cx)
}
} else {
Tooltip::simple("Mute Microphone", cx)
}
})
.style(ButtonStyle::Subtle)
.icon_size(IconSize::Small)
.toggle_state(is_muted)
.selected_icon_color(Color::Error)
.on_click(move |_, _window, cx| {
// toggle_mute(&Default::default(), cx);
// todo!()
})
.into_any_element(),
);
}
children.push(
IconButton::new(
"mute-sound",
if is_deafened {
IconName::AudioOff
} else {
IconName::AudioOn
},
)
.style(ButtonStyle::Subtle)
.selected_icon_color(Color::Error)
.icon_size(IconSize::Small)
.toggle_state(is_deafened)
.tooltip(move |window, cx| {
if is_deafened {
let label = "Unmute Audio";
if !muted_by_user {
Tooltip::with_meta(label, None, "Microphone will be unmuted", window, cx)
} else {
Tooltip::simple(label, cx)
}
} else {
let label = "Mute Audio";
if !muted_by_user {
Tooltip::with_meta(label, None, "Microphone will be muted", window, cx)
} else {
Tooltip::simple(label, cx)
}
}
})
.on_click(move |_, _, cx| {
// toggle_deafen(&Default::default(), cx))
// todo!()
})
.into_any_element(),
);
if can_use_microphone && screen_sharing_supported {
children.push(
IconButton::new("screen-share", IconName::Screen)
.style(ButtonStyle::Subtle)
.icon_size(IconSize::Small)
.toggle_state(is_screen_sharing)
.selected_icon_color(Color::Error)
.tooltip(Tooltip::text(if is_screen_sharing {
"Stop Sharing Screen"
} else {
"Share Screen"
}))
.on_click(move |_, window, cx| {
let should_share = ActiveCall::global(cx)
.read(cx)
.room()
.is_some_and(|room| !room.read(cx).is_sharing_screen());
// window
// .spawn(cx, async move |cx| {
// let screen = if should_share {
// // cx.update(|_, cx| {
// // // pick_default_screen(cx)}
// // // todo!()
// // })?
// // .await
// } else {
// Ok(None)
// };
// cx.update(|window, cx| {
// // toggle_screen_sharing(screen, window, cx)
// // todo!()
// })?;
// Result::<_, anyhow::Error>::Ok(())
// })
// .detach();
// self.render_screen_list().into_any_element(),
})
.into_any_element(),
);
// children.push(
// SplitButton::new(trigger.render(window, cx))
// .style(SplitButtonStyle::Transparent)
// .into_any_element(),
// );
}
children.push(div().pr_2().into_any_element());
children
}
fn render_screen_list(&self) -> impl IntoElement {
PopoverMenu::new("screen-share-screen-list")
.with_handle(self.screen_share_popover_handle.clone())
.trigger(
ui::ButtonLike::new_rounded_right("screen-share-screen-list-trigger")
.child(
h_flex()
.mx_neg_0p5()
.h_full()
.justify_center()
.child(Icon::new(IconName::ChevronDown).size(IconSize::XSmall)),
)
.toggle_state(self.screen_share_popover_handle.is_deployed()),
)
.menu(|window, cx| {
let screens = cx.screen_capture_sources();
Some(ContextMenu::build(window, cx, |context_menu, _, cx| {
cx.spawn(async move |this: WeakEntity<ContextMenu>, cx| {
let screens = screens.await??;
this.update(cx, |this, cx| {
let active_screenshare_id = ActiveCall::global(cx)
.read(cx)
.room()
.and_then(|room| room.read(cx).shared_screen_id());
for screen in screens {
let Ok(meta) = screen.metadata() else {
continue;
};
let label = meta
.label
.clone()
.unwrap_or_else(|| SharedString::from("Unknown screen"));
let resolution = SharedString::from(format!(
"{} × {}",
meta.resolution.width.0, meta.resolution.height.0
));
this.push_item(ContextMenuItem::CustomEntry {
entry_render: Box::new(move |_, _| {
h_flex()
.gap_2()
.child(
Icon::new(IconName::Screen)
.size(IconSize::XSmall)
.map(|this| {
if active_screenshare_id == Some(meta.id) {
this.color(Color::Accent)
} else {
this.color(Color::Muted)
}
}),
)
.child(Label::new(label.clone()))
.child(
Label::new(resolution.clone())
.color(Color::Muted)
.size(LabelSize::Small),
)
.into_any()
}),
selectable: true,
documentation_aside: None,
handler: Rc::new(move |_, window, cx| {
// toggle_screen_sharing(Ok(Some(screen.clone())), window, cx);
}),
});
}
})
})
.detach_and_log_err(cx);
context_menu
}))
})
}
}
impl Render for CallOverlay {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
let Some(room) = self.active_call.read(cx).room() else {
return gpui::Empty.into_any_element();
};
let title = if let Some(channel_id) = room.read(cx).channel_id()
&& let Some(channel) = self.channel_store.read(cx).channel_for_id(channel_id)
{
channel.name.clone()
} else {
"Unknown".into()
};
div()
.p_1()
.child(
v_flex()
.elevation_3(cx)
.bg(cx.theme().colors().editor_background)
.p_2()
.w_full()
.gap_2()
.child(
h_flex()
.justify_between()
.child(
h_flex()
.gap_1()
.child(
Icon::new(IconName::Audio)
.color(Color::VersionControlAdded),
)
.child(Label::new(title)),
)
.child(Icon::new(IconName::ChevronDown)),
)
.child(
h_flex()
.justify_between()
.child(h_flex().children(self.render_call_controls(window, cx)))
.child(
h_flex()
.gap_1()
.child(
Button::new("leave-call", "Leave")
.icon(Some(IconName::Exit))
.label_size(LabelSize::Small)
.style(ButtonStyle::Tinted(TintColor::Error))
.tooltip(Tooltip::text("Leave Call"))
.icon_size(IconSize::Small)
.on_click(move |_, _window, cx| {
ActiveCall::global(cx)
.update(cx, |call, cx| call.hang_up(cx))
.detach_and_log_err(cx);
}),
)
.into_any_element(),
),
),
)
.into_any_element()
}
}
pub fn init(cx: &App) {
cx.observe_new(|workspace: &mut Workspace, _, cx| {
let dock = workspace.dock_at_position(workspace::dock::DockPosition::Left);
let handle = cx.weak_entity();
let project = workspace.project().clone();
dock.update(cx, |dock, cx| {
let overlay = cx.new(|cx| {
let active_call = ActiveCall::global(cx);
cx.observe(&active_call, |_, _, cx| cx.notify()).detach();
let channel_store = ChannelStore::global(cx);
CallOverlay {
channel_store,
active_call,
workspace: handle,
project,
screen_share_popover_handle: PopoverMenuHandle::default(),
}
});
dock.add_overlay(
cx,
Box::new(move |window, cx| {
overlay.update(cx, |overlay, cx| {
overlay.render(window, cx).into_any_element()
})
}),
)
});
})
.detach();
}

View File

@@ -1,3 +1,4 @@
pub mod call_overlay;
pub mod channel_view;
pub mod collab_panel;
pub mod notification_panel;
@@ -23,6 +24,7 @@ pub fn init(app_state: &Arc<AppState>, cx: &mut App) {
channel_view::init(cx);
collab_panel::init(cx);
call_overlay::init(cx);
notification_panel::init(cx);
notifications::init(app_state, cx);
title_bar::init(cx);

View File

@@ -1,8 +1,27 @@
#[cfg(feature = "test-support")]
pub type HashMap<K, V> = FxHashMap<K, V>;
#[cfg(feature = "test-support")]
pub type HashSet<T> = FxHashSet<T>;
#[cfg(feature = "test-support")]
pub type IndexMap<K, V> = indexmap::IndexMap<K, V, rustc_hash::FxBuildHasher>;
#[cfg(feature = "test-support")]
pub type IndexSet<T> = indexmap::IndexSet<T, rustc_hash::FxBuildHasher>;
#[cfg(not(feature = "test-support"))]
pub type HashMap<K, V> = std::collections::HashMap<K, V>;
#[cfg(not(feature = "test-support"))]
pub type HashSet<T> = std::collections::HashSet<T>;
#[cfg(not(feature = "test-support"))]
pub type IndexMap<K, V> = indexmap::IndexMap<K, V>;
#[cfg(not(feature = "test-support"))]
pub type IndexSet<T> = indexmap::IndexSet<T>;
pub use indexmap::Equivalent;
pub use rustc_hash::FxHasher;
pub use rustc_hash::{FxHashMap, FxHashSet};

View File

@@ -172,7 +172,7 @@ impl Client {
let server_name = binary
.executable
.file_name()
.map(|name| name.to_string_lossy().into_owned())
.map(|name| name.to_string_lossy().to_string())
.unwrap_or_else(String::new);
let timeout = binary.timeout.map(Duration::from_millis);

View File

@@ -43,7 +43,6 @@ node_runtime.workspace = true
parking_lot.workspace = true
paths.workspace = true
project.workspace = true
semver.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true

View File

@@ -25,7 +25,6 @@ use node_runtime::{NodeRuntime, VersionStrategy};
use parking_lot::Mutex;
use project::DisableAiSettings;
use request::StatusNotification;
use semver::Version;
use serde_json::json;
use settings::Settings;
use settings::SettingsStore;
@@ -486,8 +485,6 @@ impl Copilot {
let start_language_server = async {
let server_path = get_copilot_lsp(fs, node_runtime.clone()).await?;
let node_path = node_runtime.binary_path().await?;
ensure_node_version_for_copilot(&node_path).await?;
let arguments: Vec<OsString> = vec![server_path.into(), "--stdio".into()];
let binary = LanguageServerBinary {
path: node_path,
@@ -1164,44 +1161,6 @@ async fn clear_copilot_config_dir() {
remove_matching(copilot_chat::copilot_chat_config_dir(), |_| true).await
}
async fn ensure_node_version_for_copilot(node_path: &Path) -> anyhow::Result<()> {
const MIN_COPILOT_NODE_VERSION: Version = Version::new(20, 8, 0);
log::info!("Checking Node.js version for Copilot at: {:?}", node_path);
let output = util::command::new_smol_command(node_path)
.arg("--version")
.output()
.await
.with_context(|| format!("checking Node.js version at {:?}", node_path))?;
if !output.status.success() {
anyhow::bail!(
"failed to run node --version for Copilot. stdout: {}, stderr: {}",
String::from_utf8_lossy(&output.stdout),
String::from_utf8_lossy(&output.stderr),
);
}
let version_str = String::from_utf8_lossy(&output.stdout);
let version = Version::parse(version_str.trim().trim_start_matches('v'))
.with_context(|| format!("parsing Node.js version from '{}'", version_str.trim()))?;
if version < MIN_COPILOT_NODE_VERSION {
anyhow::bail!(
"GitHub Copilot language server requires Node.js {MIN_COPILOT_NODE_VERSION} or later, but found {version}. \
Please update your Node.js version or configure a different Node.js path in settings."
);
}
log::info!(
"Node.js version {} meets Copilot requirements (>= {})",
version,
MIN_COPILOT_NODE_VERSION
);
Ok(())
}
async fn get_copilot_lsp(fs: Arc<dyn Fs>, node_runtime: NodeRuntime) -> anyhow::Result<PathBuf> {
const PACKAGE_NAME: &str = "@github/copilot-language-server";
const SERVER_PATH: &str =

View File

@@ -3,6 +3,7 @@ use anyhow::Result;
use edit_prediction::{Direction, EditPrediction, EditPredictionProvider};
use gpui::{App, Context, Entity, EntityId, Task};
use language::{Buffer, OffsetRangeExt, ToOffset, language_settings::AllLanguageSettings};
use project::Project;
use settings::Settings;
use std::{path::Path, time::Duration};
@@ -83,6 +84,7 @@ impl EditPredictionProvider for CopilotCompletionProvider {
fn refresh(
&mut self,
_project: Option<Entity<Project>>,
buffer: Entity<Buffer>,
cursor_position: language::Anchor,
debounce: bool,
@@ -247,7 +249,7 @@ impl EditPredictionProvider for CopilotCompletionProvider {
None
} else {
let position = cursor_position.bias_right(buffer);
Some(EditPrediction::Local {
Some(EditPrediction {
id: None,
edits: vec![(position..position, completion_text.into())],
edit_preview: None,

View File

@@ -3,7 +3,6 @@ use log::info;
use minidumper::{Client, LoopAction, MinidumpBinary};
use release_channel::{RELEASE_CHANNEL, ReleaseChannel};
use serde::{Deserialize, Serialize};
use smol::process::Command;
#[cfg(target_os = "macos")]
use std::sync::atomic::AtomicU32;
@@ -13,7 +12,7 @@ use std::{
io,
panic::{self, PanicHookInfo},
path::{Path, PathBuf},
process::{self},
process::{self, Command},
sync::{
Arc, OnceLock,
atomic::{AtomicBool, Ordering},
@@ -54,13 +53,13 @@ pub async fn init(crash_init: InitCrashHandler) {
// used by the crash handler isn't destroyed correctly which causes it to stay on the file
// system and block further attempts to initialize crash handlers with that socket path.
let socket_name = paths::temp_dir().join(format!("zed-crash-handler-{zed_pid}"));
let _crash_handler = Command::new(exe)
#[allow(unused)]
let server_pid = Command::new(exe)
.arg("--crash-handler")
.arg(&socket_name)
.spawn()
.expect("unable to spawn server process");
#[cfg(target_os = "linux")]
let server_pid = _crash_handler.id();
.expect("unable to spawn server process")
.id();
info!("spawning crash handler process");
let mut elapsed = Duration::ZERO;
@@ -154,7 +153,6 @@ pub struct CrashInfo {
pub struct InitCrashHandler {
pub session_id: String,
pub zed_version: String,
pub binary: String,
pub release_channel: String,
pub commit_sha: String,
}

View File

@@ -238,7 +238,7 @@ impl DebugAdapterBinary {
cwd: self
.cwd
.as_ref()
.map(|cwd| cwd.to_string_lossy().into_owned()),
.map(|cwd| cwd.to_string_lossy().to_string()),
connection: self.connection.as_ref().map(|c| c.to_proto()),
launch_type: match self.request_args.request {
StartDebuggingRequestArgumentsRequest::Launch => {

View File

@@ -64,19 +64,19 @@ impl DapRegistry {
.and_then(|adapter| adapter.adapter_language_name())
}
pub fn adapters_schema(&self) -> task::AdapterSchemas {
let mut schemas = vec![];
pub async fn adapters_schema(&self) -> task::AdapterSchemas {
let mut schemas = AdapterSchemas(vec![]);
let adapters = &self.0.read().adapters;
let adapters = self.0.read().adapters.clone();
for (name, adapter) in adapters.into_iter() {
schemas.push(AdapterSchema {
adapter: name.clone().into(),
schemas.0.push(AdapterSchema {
adapter: name.into(),
schema: adapter.dap_schema(),
});
}
AdapterSchemas(schemas)
schemas
}
pub fn locators(&self) -> FxHashMap<SharedString, Arc<dyn DapLocator>> {

View File

@@ -332,7 +332,7 @@ impl DebugAdapter for CodeLldbDebugAdapter {
_: &mut AsyncApp,
) -> Result<DebugAdapterBinary> {
let mut command = user_installed_path
.map(|p| p.to_string_lossy().into_owned())
.map(|p| p.to_string_lossy().to_string())
.or(self.path_to_codelldb.get().cloned());
if command.is_none() {
@@ -372,7 +372,7 @@ impl DebugAdapter for CodeLldbDebugAdapter {
}
};
let adapter_dir = version_path.join("extension").join("adapter");
let path = adapter_dir.join("codelldb").to_string_lossy().into_owned();
let path = adapter_dir.join("codelldb").to_string_lossy().to_string();
self.path_to_codelldb.set(path.clone()).ok();
command = Some(path);
};

View File

@@ -415,11 +415,11 @@ impl DebugAdapter for GoDebugAdapter {
let dlv_path = adapter_path.join("dlv");
let delve_path = if let Some(path) = user_installed_path {
path.to_string_lossy().into_owned()
path.to_string_lossy().to_string()
} else if let Some(path) = delegate.which(OsStr::new("dlv")).await {
path.to_string_lossy().into_owned()
path.to_string_lossy().to_string()
} else if delegate.fs().is_file(&dlv_path).await {
dlv_path.to_string_lossy().into_owned()
dlv_path.to_string_lossy().to_string()
} else {
let go = delegate
.which(OsStr::new("go"))
@@ -443,7 +443,7 @@ impl DebugAdapter for GoDebugAdapter {
);
}
adapter_path.join("dlv").to_string_lossy().into_owned()
adapter_path.join("dlv").to_string_lossy().to_string()
};
let cwd = Some(

View File

@@ -138,11 +138,11 @@ impl JsDebugAdapter {
};
let arguments = if let Some(mut args) = user_args {
args.insert(0, adapter_path.to_string_lossy().into_owned());
args.insert(0, adapter_path.to_string_lossy().to_string());
args
} else {
vec![
adapter_path.to_string_lossy().into_owned(),
adapter_path.to_string_lossy().to_string(),
port.to_string(),
host.to_string(),
]

View File

@@ -46,7 +46,7 @@ impl PythonDebugAdapter {
"Using user-installed debugpy adapter from: {}",
user_installed_path.display()
);
vec![user_installed_path.to_string_lossy().into_owned()]
vec![user_installed_path.to_string_lossy().to_string()]
} else {
let adapter_path = paths::debug_adapters_dir().join(Self::DEBUG_ADAPTER_NAME.as_ref());
let path = adapter_path
@@ -264,7 +264,7 @@ impl PythonDebugAdapter {
name = delegate
.which(OsStr::new(cmd))
.await
.map(|path| path.to_string_lossy().into_owned());
.map(|path| path.to_string_lossy().to_string());
if name.is_some() {
break;
}
@@ -726,7 +726,7 @@ impl DebugAdapter for PythonDebugAdapter {
.config
.get("cwd")
.and_then(|cwd| {
RelPath::new(
RelPath::from_std_path(
cwd.as_str()
.map(Path::new)?
.strip_prefix(delegate.worktree_root_path())
@@ -740,7 +740,7 @@ impl DebugAdapter for PythonDebugAdapter {
.toolchain_store()
.active_toolchain(
delegate.worktree_id(),
base_path.into_arc(),
base_path,
language::LanguageName::new(Self::LANGUAGE_NAME),
cx,
)

View File

@@ -55,7 +55,7 @@ impl WorktreeDelegate for WorktreeDelegateAdapter {
}
fn root_path(&self) -> String {
self.0.worktree_root_path().to_string_lossy().into_owned()
self.0.worktree_root_path().to_string_lossy().to_string()
}
async fn read_text_file(&self, path: &RelPath) -> Result<String> {
@@ -66,7 +66,7 @@ impl WorktreeDelegate for WorktreeDelegateAdapter {
self.0
.which(binary_name.as_ref())
.await
.map(|path| path.to_string_lossy().into_owned())
.map(|path| path.to_string_lossy().to_string())
}
async fn shell_env(&self) -> Vec<(String, String)> {

View File

@@ -373,7 +373,7 @@ fn get_processes_for_project(project: &Entity<Project>, cx: &mut App) -> Task<Ar
command: process
.cmd()
.iter()
.map(|s| s.to_string_lossy().into_owned())
.map(|s| s.to_string_lossy().to_string())
.collect::<Vec<_>>(),
}
})

View File

@@ -1062,10 +1062,10 @@ impl DebugPanel {
directory_in_worktree: dir,
..
} => {
let relative_path = if dir.ends_with(RelPath::unix(".vscode").unwrap()) {
dir.join(RelPath::unix("launch.json").unwrap())
let relative_path = if dir.ends_with(RelPath::new(".vscode").unwrap()) {
dir.join(RelPath::new("launch.json").unwrap())
} else {
dir.join(RelPath::unix("debug.json").unwrap())
dir.join(RelPath::new("debug.json").unwrap())
};
ProjectPath {
worktree_id: id,
@@ -1136,7 +1136,7 @@ impl DebugPanel {
}
path.pop();
path.push(paths::local_debug_file_relative_path().as_std_path());
path.push(paths::local_debug_file_relative_path());
let path = path.as_path();
if !fs.is_file(path).await {

View File

@@ -1037,10 +1037,10 @@ impl DebugDelegate {
match path.components().next_back() {
Some(".zed") => {
path.push(RelPath::unix("debug.json").unwrap());
path.push(RelPath::new("debug.json").unwrap());
}
Some(".vscode") => {
path.push(RelPath::unix("launch.json").unwrap());
path.push(RelPath::new("launch.json").unwrap());
}
_ => {}
}
@@ -1133,7 +1133,7 @@ impl DebugDelegate {
id: _,
directory_in_worktree: dir,
id_base: _,
} => dir.ends_with(RelPath::unix(".zed").unwrap()),
} => dir.ends_with(RelPath::new(".zed").unwrap()),
_ => false,
});
@@ -1154,7 +1154,7 @@ impl DebugDelegate {
id_base: _,
} => {
!(hide_vscode
&& dir.ends_with(RelPath::unix(".vscode").unwrap()))
&& dir.ends_with(RelPath::new(".vscode").unwrap()))
}
_ => true,
})
@@ -1578,7 +1578,7 @@ impl PickerDelegate for DebugDelegate {
pub(crate) fn resolve_path(path: &mut String) {
if path.starts_with('~') {
let home = paths::home_dir().to_string_lossy().into_owned();
let home = paths::home_dir().to_string_lossy().to_string();
let trimmed_path = path.trim().to_owned();
*path = trimmed_path.replacen('~', &home, 1);
} else if let Some(strip_path) = path.strip_prefix(&format!(".{}", std::path::MAIN_SEPARATOR)) {

View File

@@ -40,7 +40,7 @@ impl DebuggerOnboardingModal {
}
fn view_blog(&mut self, _: &ClickEvent, _: &mut Window, cx: &mut Context<Self>) {
cx.open_url("https://zed.dev/blog/debugger");
cx.open_url("http://zed.dev/blog/debugger");
cx.notify();
debugger_onboarding_event!("Blog Link Clicked");

View File

@@ -682,11 +682,10 @@ impl Render for BreakpointList {
breakpoints.into_iter().filter_map(move |breakpoint| {
debug_assert_eq!(&path, &breakpoint.path);
let file_name = breakpoint.path.file_name()?;
let breakpoint_path = RelPath::new(&breakpoint.path, path_style).ok();
let dir = relative_worktree_path
.as_deref()
.or(breakpoint_path.as_deref())?
.clone()
.or_else(|| RelPath::from_std_path(&breakpoint.path, path_style).ok())?
.parent()
.map(|parent| SharedString::from(parent.display(path_style).to_string()));
let name = file_name

View File

@@ -351,7 +351,7 @@ async fn test_handle_successful_run_in_terminal_reverse_request(
.fake_reverse_request::<RunInTerminal>(RunInTerminalRequestArguments {
kind: None,
title: None,
cwd: std::env::temp_dir().to_string_lossy().into_owned(),
cwd: std::env::temp_dir().to_string_lossy().to_string(),
args: vec![],
env: None,
args_can_be_interpreted_by_shell: None,

View File

@@ -473,7 +473,7 @@ fn generate_big_table_of_actions() -> String {
output.push_str(action.name);
output.push_str("</code><br>\n");
if !action.deprecated_aliases.is_empty() {
output.push_str("Deprecated Alias(es): ");
output.push_str("Deprecated Aliases:");
for alias in action.deprecated_aliases.iter() {
output.push_str("<code>");
output.push_str(alias);

View File

@@ -15,4 +15,5 @@ path = "src/edit_prediction.rs"
client.workspace = true
gpui.workspace = true
language.workspace = true
project.workspace = true
workspace-hack.workspace = true

View File

@@ -3,6 +3,7 @@ use std::ops::Range;
use client::EditPredictionUsage;
use gpui::{App, Context, Entity, SharedString};
use language::Buffer;
use project::Project;
// TODO: Find a better home for `Direction`.
//
@@ -15,19 +16,11 @@ pub enum Direction {
}
#[derive(Clone)]
pub enum EditPrediction {
/// Edits within the buffer that requested the prediction
Local {
id: Option<SharedString>,
edits: Vec<(Range<language::Anchor>, String)>,
edit_preview: Option<language::EditPreview>,
},
/// Jump to a different file from the one that requested the prediction
Jump {
id: Option<SharedString>,
snapshot: language::BufferSnapshot,
target: language::Anchor,
},
pub struct EditPrediction {
/// The ID of the completion, if it has one.
pub id: Option<SharedString>,
pub edits: Vec<(Range<language::Anchor>, String)>,
pub edit_preview: Option<language::EditPreview>,
}
pub enum DataCollectionState {
@@ -90,6 +83,7 @@ pub trait EditPredictionProvider: 'static + Sized {
fn is_refreshing(&self) -> bool;
fn refresh(
&mut self,
project: Option<Entity<Project>>,
buffer: Entity<Buffer>,
cursor_position: language::Anchor,
debounce: bool,
@@ -130,6 +124,7 @@ pub trait EditPredictionProviderHandle {
fn is_refreshing(&self, cx: &App) -> bool;
fn refresh(
&self,
project: Option<Entity<Project>>,
buffer: Entity<Buffer>,
cursor_position: language::Anchor,
debounce: bool,
@@ -203,13 +198,14 @@ where
fn refresh(
&self,
project: Option<Entity<Project>>,
buffer: Entity<Buffer>,
cursor_position: language::Anchor,
debounce: bool,
cx: &mut App,
) {
self.update(cx, |this, cx| {
this.refresh(buffer, cursor_position, debounce, cx)
this.refresh(project, buffer, cursor_position, debounce, cx)
})
}

View File

@@ -132,8 +132,7 @@ impl Render for EditPredictionButton {
div().child(
PopoverMenu::new("copilot")
.menu(move |window, cx| {
let current_status = Copilot::global(cx)?.read(cx).status();
Some(match current_status {
Some(match status {
Status::Authorized => this.update(cx, |this, cx| {
this.build_copilot_context_menu(window, cx)
}),

View File

@@ -18,7 +18,6 @@ cloud_llm_client.workspace = true
collections.workspace = true
futures.workspace = true
gpui.workspace = true
hashbrown.workspace = true
itertools.workspace = true
language.workspace = true
log.workspace = true

View File

@@ -1,4 +1,4 @@
use cloud_llm_client::predict_edits_v3::DeclarationScoreComponents;
use cloud_llm_client::predict_edits_v3::ScoreComponents;
use itertools::Itertools as _;
use language::BufferSnapshot;
use ordered_float::OrderedFloat;
@@ -8,67 +8,76 @@ use strum::EnumIter;
use text::{Point, ToPoint};
use crate::{
Declaration, EditPredictionExcerpt, Identifier,
Declaration, EditPredictionExcerpt, EditPredictionExcerptText, Identifier,
reference::{Reference, ReferenceRegion},
syntax_index::SyntaxIndexState,
text_similarity::{Occurrences, jaccard_similarity, weighted_overlap_coefficient},
text_similarity::{IdentifierOccurrences, jaccard_similarity, weighted_overlap_coefficient},
};
const MAX_IDENTIFIER_DECLARATION_COUNT: usize = 16;
#[derive(Clone, Debug)]
pub struct ScoredDeclaration {
pub struct ScoredSnippet {
pub identifier: Identifier,
pub declaration: Declaration,
pub score_components: DeclarationScoreComponents,
pub scores: DeclarationScores,
pub score_components: ScoreComponents,
pub scores: Scores,
}
#[derive(EnumIter, Clone, Copy, PartialEq, Eq, Hash, Debug)]
pub enum DeclarationStyle {
pub enum SnippetStyle {
Signature,
Declaration,
}
impl ScoredDeclaration {
/// Returns the score for this declaration with the specified style.
pub fn score(&self, style: DeclarationStyle) -> f32 {
impl ScoredSnippet {
/// Returns the score for this snippet with the specified style.
pub fn score(&self, style: SnippetStyle) -> f32 {
match style {
DeclarationStyle::Signature => self.scores.signature,
DeclarationStyle::Declaration => self.scores.declaration,
SnippetStyle::Signature => self.scores.signature,
SnippetStyle::Declaration => self.scores.declaration,
}
}
pub fn size(&self, style: DeclarationStyle) -> usize {
pub fn size(&self, style: SnippetStyle) -> usize {
match &self.declaration {
Declaration::File { declaration, .. } => match style {
DeclarationStyle::Signature => declaration.signature_range.len(),
DeclarationStyle::Declaration => declaration.text.len(),
SnippetStyle::Signature => declaration.signature_range.len(),
SnippetStyle::Declaration => declaration.text.len(),
},
Declaration::Buffer { declaration, .. } => match style {
DeclarationStyle::Signature => declaration.signature_range.len(),
DeclarationStyle::Declaration => declaration.item_range.len(),
SnippetStyle::Signature => declaration.signature_range.len(),
SnippetStyle::Declaration => declaration.item_range.len(),
},
}
}
pub fn score_density(&self, style: DeclarationStyle) -> f32 {
pub fn score_density(&self, style: SnippetStyle) -> f32 {
self.score(style) / (self.size(style)) as f32
}
}
pub fn scored_declarations(
pub fn scored_snippets(
index: &SyntaxIndexState,
excerpt: &EditPredictionExcerpt,
excerpt_occurrences: &Occurrences,
adjacent_occurrences: &Occurrences,
excerpt_text: &EditPredictionExcerptText,
identifier_to_references: HashMap<Identifier, Vec<Reference>>,
cursor_offset: usize,
current_buffer: &BufferSnapshot,
) -> Vec<ScoredDeclaration> {
) -> Vec<ScoredSnippet> {
let containing_range_identifier_occurrences =
IdentifierOccurrences::within_string(&excerpt_text.body);
let cursor_point = cursor_offset.to_point(&current_buffer);
let mut declarations = identifier_to_references
let start_point = Point::new(cursor_point.row.saturating_sub(2), 0);
let end_point = Point::new(cursor_point.row + 1, 0);
let adjacent_identifier_occurrences = IdentifierOccurrences::within_string(
&current_buffer
.text_for_range(start_point..end_point)
.collect::<String>(),
);
let mut snippets = identifier_to_references
.into_iter()
.flat_map(|(identifier, references)| {
let declarations =
@@ -128,7 +137,7 @@ pub fn scored_declarations(
)| {
let same_file_declaration_count = index.file_declaration_count(declaration);
score_declaration(
score_snippet(
&identifier,
&references,
declaration.clone(),
@@ -137,8 +146,8 @@ pub fn scored_declarations(
declaration_line_distance_rank,
same_file_declaration_count,
declaration_count,
&excerpt_occurrences,
&adjacent_occurrences,
&containing_range_identifier_occurrences,
&adjacent_identifier_occurrences,
cursor_point,
current_buffer,
)
@@ -149,14 +158,14 @@ pub fn scored_declarations(
.flatten()
.collect::<Vec<_>>();
declarations.sort_unstable_by_key(|declaration| {
let score_density = declaration
.score_density(DeclarationStyle::Declaration)
.max(declaration.score_density(DeclarationStyle::Signature));
snippets.sort_unstable_by_key(|snippet| {
let score_density = snippet
.score_density(SnippetStyle::Declaration)
.max(snippet.score_density(SnippetStyle::Signature));
Reverse(OrderedFloat(score_density))
});
declarations
snippets
}
fn range_intersection<T: Ord + Clone>(a: &Range<T>, b: &Range<T>) -> Option<Range<T>> {
@@ -169,7 +178,7 @@ fn range_intersection<T: Ord + Clone>(a: &Range<T>, b: &Range<T>) -> Option<Rang
}
}
fn score_declaration(
fn score_snippet(
identifier: &Identifier,
references: &[Reference],
declaration: Declaration,
@@ -178,11 +187,11 @@ fn score_declaration(
declaration_line_distance_rank: usize,
same_file_declaration_count: usize,
declaration_count: usize,
excerpt_occurrences: &Occurrences,
adjacent_occurrences: &Occurrences,
containing_range_identifier_occurrences: &IdentifierOccurrences,
adjacent_identifier_occurrences: &IdentifierOccurrences,
cursor: Point,
current_buffer: &BufferSnapshot,
) -> Option<ScoredDeclaration> {
) -> Option<ScoredSnippet> {
let is_referenced_nearby = references
.iter()
.any(|r| r.region == ReferenceRegion::Nearby);
@@ -199,27 +208,37 @@ fn score_declaration(
.min()
.unwrap();
let item_source_occurrences = Occurrences::within_string(&declaration.item_text().0);
let item_signature_occurrences = Occurrences::within_string(&declaration.signature_text().0);
let excerpt_vs_item_jaccard = jaccard_similarity(excerpt_occurrences, &item_source_occurrences);
let excerpt_vs_signature_jaccard =
jaccard_similarity(excerpt_occurrences, &item_signature_occurrences);
let item_source_occurrences = IdentifierOccurrences::within_string(&declaration.item_text().0);
let item_signature_occurrences =
IdentifierOccurrences::within_string(&declaration.signature_text().0);
let containing_range_vs_item_jaccard = jaccard_similarity(
containing_range_identifier_occurrences,
&item_source_occurrences,
);
let containing_range_vs_signature_jaccard = jaccard_similarity(
containing_range_identifier_occurrences,
&item_signature_occurrences,
);
let adjacent_vs_item_jaccard =
jaccard_similarity(adjacent_occurrences, &item_source_occurrences);
jaccard_similarity(adjacent_identifier_occurrences, &item_source_occurrences);
let adjacent_vs_signature_jaccard =
jaccard_similarity(adjacent_occurrences, &item_signature_occurrences);
jaccard_similarity(adjacent_identifier_occurrences, &item_signature_occurrences);
let excerpt_vs_item_weighted_overlap =
weighted_overlap_coefficient(excerpt_occurrences, &item_source_occurrences);
let excerpt_vs_signature_weighted_overlap =
weighted_overlap_coefficient(excerpt_occurrences, &item_signature_occurrences);
let containing_range_vs_item_weighted_overlap = weighted_overlap_coefficient(
containing_range_identifier_occurrences,
&item_source_occurrences,
);
let containing_range_vs_signature_weighted_overlap = weighted_overlap_coefficient(
containing_range_identifier_occurrences,
&item_signature_occurrences,
);
let adjacent_vs_item_weighted_overlap =
weighted_overlap_coefficient(adjacent_occurrences, &item_source_occurrences);
weighted_overlap_coefficient(adjacent_identifier_occurrences, &item_source_occurrences);
let adjacent_vs_signature_weighted_overlap =
weighted_overlap_coefficient(adjacent_occurrences, &item_signature_occurrences);
weighted_overlap_coefficient(adjacent_identifier_occurrences, &item_signature_occurrences);
// TODO: Consider adding declaration_file_count
let score_components = DeclarationScoreComponents {
let score_components = ScoreComponents {
is_same_file,
is_referenced_nearby,
is_referenced_in_breadcrumb,
@@ -229,32 +248,32 @@ fn score_declaration(
reference_count,
same_file_declaration_count,
declaration_count,
excerpt_vs_item_jaccard,
excerpt_vs_signature_jaccard,
containing_range_vs_item_jaccard,
containing_range_vs_signature_jaccard,
adjacent_vs_item_jaccard,
adjacent_vs_signature_jaccard,
excerpt_vs_item_weighted_overlap,
excerpt_vs_signature_weighted_overlap,
containing_range_vs_item_weighted_overlap,
containing_range_vs_signature_weighted_overlap,
adjacent_vs_item_weighted_overlap,
adjacent_vs_signature_weighted_overlap,
};
Some(ScoredDeclaration {
Some(ScoredSnippet {
identifier: identifier.clone(),
declaration: declaration,
scores: DeclarationScores::score(&score_components),
scores: Scores::score(&score_components),
score_components,
})
}
#[derive(Clone, Debug, Serialize)]
pub struct DeclarationScores {
pub struct Scores {
pub signature: f32,
pub declaration: f32,
}
impl DeclarationScores {
fn score(components: &DeclarationScoreComponents) -> DeclarationScores {
impl Scores {
fn score(components: &ScoreComponents) -> Scores {
// TODO: handle truncation
// Score related to how likely this is the correct declaration, range 0 to 1
@@ -276,11 +295,13 @@ impl DeclarationScores {
// For now instead of linear combination, the scores are just multiplied together.
let combined_score = 10.0 * accuracy_score * distance_score;
DeclarationScores {
signature: combined_score * components.excerpt_vs_signature_weighted_overlap,
Scores {
signature: combined_score * components.containing_range_vs_signature_weighted_overlap,
// declaration score gets boosted both by being multiplied by 2 and by there being more
// weighted overlap.
declaration: 2.0 * combined_score * components.excerpt_vs_item_weighted_overlap,
declaration: 2.0
* combined_score
* components.containing_range_vs_item_weighted_overlap,
}
}
}

View File

@@ -21,7 +21,7 @@ pub struct EditPredictionContext {
pub excerpt: EditPredictionExcerpt,
pub excerpt_text: EditPredictionExcerptText,
pub cursor_offset_in_excerpt: usize,
pub declarations: Vec<ScoredDeclaration>,
pub snippets: Vec<ScoredSnippet>,
}
impl EditPredictionContext {
@@ -58,28 +58,17 @@ impl EditPredictionContext {
index_state,
)?;
let excerpt_text = excerpt.text(buffer);
let excerpt_occurrences = text_similarity::Occurrences::within_string(&excerpt_text.body);
let adjacent_start = Point::new(cursor_point.row.saturating_sub(2), 0);
let adjacent_end = Point::new(cursor_point.row + 1, 0);
let adjacent_occurrences = text_similarity::Occurrences::within_string(
&buffer
.text_for_range(adjacent_start..adjacent_end)
.collect::<String>(),
);
let cursor_offset_in_file = cursor_point.to_offset(buffer);
// TODO fix this to not need saturating_sub
let cursor_offset_in_excerpt = cursor_offset_in_file.saturating_sub(excerpt.range.start);
let declarations = if let Some(index_state) = index_state {
let snippets = if let Some(index_state) = index_state {
let references = references_in_excerpt(&excerpt, &excerpt_text, buffer);
scored_declarations(
scored_snippets(
&index_state,
&excerpt,
&excerpt_occurrences,
&adjacent_occurrences,
&excerpt_text,
references,
cursor_offset_in_file,
buffer,
@@ -92,7 +81,7 @@ impl EditPredictionContext {
excerpt,
excerpt_text,
cursor_offset_in_excerpt,
declarations,
snippets,
})
}
}
@@ -148,7 +137,7 @@ mod tests {
.unwrap();
let mut snippet_identifiers = context
.declarations
.snippets
.iter()
.map(|snippet| snippet.identifier.name.as_ref())
.collect::<Vec<_>>();

View File

@@ -1,9 +1,5 @@
use hashbrown::HashTable;
use regex::Regex;
use std::{
hash::{Hash, Hasher as _},
sync::LazyLock,
};
use std::{collections::HashMap, sync::LazyLock};
use crate::reference::Reference;
@@ -18,74 +14,47 @@ use crate::reference::Reference;
static IDENTIFIER_REGEX: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\b\w+\b").unwrap());
/// Multiset of text occurrences for text similarity that only stores hashes and counts.
#[derive(Debug, Default)]
pub struct Occurrences {
table: HashTable<OccurrenceEntry>,
// TODO: use &str or Cow<str> keys?
#[derive(Debug)]
pub struct IdentifierOccurrences {
identifier_to_count: HashMap<String, usize>,
total_count: usize,
}
#[derive(Debug)]
struct OccurrenceEntry {
hash: u64,
count: usize,
}
impl Occurrences {
pub fn within_string(text: &str) -> Self {
Self::from_identifiers(IDENTIFIER_REGEX.find_iter(text).map(|mat| mat.as_str()))
impl IdentifierOccurrences {
pub fn within_string(code: &str) -> Self {
Self::from_iterator(IDENTIFIER_REGEX.find_iter(code).map(|mat| mat.as_str()))
}
#[allow(dead_code)]
pub fn within_references(references: &[Reference]) -> Self {
Self::from_identifiers(
Self::from_iterator(
references
.iter()
.map(|reference| reference.identifier.name.as_ref()),
)
}
pub fn from_identifiers<'a>(identifiers: impl IntoIterator<Item = &'a str>) -> Self {
let mut this = Self::default();
// TODO: Score matches that match case higher?
//
// TODO: Also include unsplit identifier?
for identifier in identifiers {
pub fn from_iterator<'a>(identifier_iterator: impl Iterator<Item = &'a str>) -> Self {
let mut identifier_to_count = HashMap::new();
let mut total_count = 0;
for identifier in identifier_iterator {
// TODO: Score matches that match case higher?
//
// TODO: Also include unsplit identifier?
for identifier_part in split_identifier(identifier) {
this.add_hash(fx_hash(&identifier_part.to_lowercase()));
identifier_to_count
.entry(identifier_part.to_lowercase())
.and_modify(|count| *count += 1)
.or_insert(1);
total_count += 1;
}
}
this
IdentifierOccurrences {
identifier_to_count,
total_count,
}
}
fn add_hash(&mut self, hash: u64) {
self.table
.entry(
hash,
|entry: &OccurrenceEntry| entry.hash == hash,
|entry| entry.hash,
)
.and_modify(|entry| entry.count += 1)
.or_insert(OccurrenceEntry { hash, count: 1 });
self.total_count += 1;
}
fn contains_hash(&self, hash: u64) -> bool {
self.get_count(hash) != 0
}
fn get_count(&self, hash: u64) -> usize {
self.table
.find(hash, |entry| entry.hash == hash)
.map(|entry| entry.count)
.unwrap_or(0)
}
}
pub fn fx_hash<T: Hash + ?Sized>(data: &T) -> u64 {
let mut hasher = collections::FxHasher::default();
data.hash(&mut hasher);
hasher.finish()
}
// Splits camelcase / snakecase / kebabcase / pascalcase
@@ -146,49 +115,54 @@ fn split_identifier(identifier: &str) -> Vec<&str> {
parts.into_iter().filter(|s| !s.is_empty()).collect()
}
pub fn jaccard_similarity<'a>(mut set_a: &'a Occurrences, mut set_b: &'a Occurrences) -> f32 {
if set_a.table.len() > set_b.table.len() {
pub fn jaccard_similarity<'a>(
mut set_a: &'a IdentifierOccurrences,
mut set_b: &'a IdentifierOccurrences,
) -> f32 {
if set_a.identifier_to_count.len() > set_b.identifier_to_count.len() {
std::mem::swap(&mut set_a, &mut set_b);
}
let intersection = set_a
.table
.iter()
.filter(|entry| set_b.contains_hash(entry.hash))
.identifier_to_count
.keys()
.filter(|key| set_b.identifier_to_count.contains_key(*key))
.count();
let union = set_a.table.len() + set_b.table.len() - intersection;
let union = set_a.identifier_to_count.len() + set_b.identifier_to_count.len() - intersection;
intersection as f32 / union as f32
}
// TODO
#[allow(dead_code)]
pub fn overlap_coefficient<'a>(mut set_a: &'a Occurrences, mut set_b: &'a Occurrences) -> f32 {
if set_a.table.len() > set_b.table.len() {
pub fn overlap_coefficient<'a>(
mut set_a: &'a IdentifierOccurrences,
mut set_b: &'a IdentifierOccurrences,
) -> f32 {
if set_a.identifier_to_count.len() > set_b.identifier_to_count.len() {
std::mem::swap(&mut set_a, &mut set_b);
}
let intersection = set_a
.table
.iter()
.filter(|entry| set_b.contains_hash(entry.hash))
.identifier_to_count
.keys()
.filter(|key| set_b.identifier_to_count.contains_key(*key))
.count();
intersection as f32 / set_a.table.len() as f32
intersection as f32 / set_a.identifier_to_count.len() as f32
}
// TODO
#[allow(dead_code)]
pub fn weighted_jaccard_similarity<'a>(
mut set_a: &'a Occurrences,
mut set_b: &'a Occurrences,
mut set_a: &'a IdentifierOccurrences,
mut set_b: &'a IdentifierOccurrences,
) -> f32 {
if set_a.table.len() > set_b.table.len() {
if set_a.identifier_to_count.len() > set_b.identifier_to_count.len() {
std::mem::swap(&mut set_a, &mut set_b);
}
let mut numerator = 0;
let mut denominator_a = 0;
let mut used_count_b = 0;
for entry_a in set_a.table.iter() {
let count_a = entry_a.count;
let count_b = set_b.get_count(entry_a.hash);
for (symbol, count_a) in set_a.identifier_to_count.iter() {
let count_b = set_b.identifier_to_count.get(symbol).unwrap_or(&0);
numerator += count_a.min(count_b);
denominator_a += count_a.max(count_b);
used_count_b += count_b;
@@ -203,17 +177,16 @@ pub fn weighted_jaccard_similarity<'a>(
}
pub fn weighted_overlap_coefficient<'a>(
mut set_a: &'a Occurrences,
mut set_b: &'a Occurrences,
mut set_a: &'a IdentifierOccurrences,
mut set_b: &'a IdentifierOccurrences,
) -> f32 {
if set_a.table.len() > set_b.table.len() {
if set_a.identifier_to_count.len() > set_b.identifier_to_count.len() {
std::mem::swap(&mut set_a, &mut set_b);
}
let mut numerator = 0;
for entry_a in set_a.table.iter() {
let count_a = entry_a.count;
let count_b = set_b.get_count(entry_a.hash);
for (symbol, count_a) in set_a.identifier_to_count.iter() {
let count_b = set_b.identifier_to_count.get(symbol).unwrap_or(&0);
numerator += count_a.min(count_b);
}
@@ -242,12 +215,12 @@ mod test {
fn test_similarity_functions() {
// 10 identifier parts, 8 unique
// Repeats: 2 "outline", 2 "items"
let set_a = Occurrences::within_string(
let set_a = IdentifierOccurrences::within_string(
"let mut outline_items = query_outline_items(&language, &tree, &source);",
);
// 14 identifier parts, 11 unique
// Repeats: 2 "outline", 2 "language", 2 "tree"
let set_b = Occurrences::within_string(
let set_b = IdentifierOccurrences::within_string(
"pub fn query_outline_items(language: &Language, tree: &Tree, source: &str) -> Vec<OutlineItem> {",
);

View File

@@ -776,8 +776,6 @@ actions!(
UniqueLinesCaseInsensitive,
/// Removes duplicate lines (case-sensitive).
UniqueLinesCaseSensitive,
/// Removes the surrounding syntax node (for example brackets, or closures)
/// from the current selections.
UnwrapSyntaxNode,
/// Wraps selections in tag specified by language.
WrapSelectionsInTag

View File

@@ -26,8 +26,8 @@ use sum_tree::{Bias, ContextLessSummary, Dimensions, SumTree, TreeMap};
use text::{BufferId, Edit};
use ui::ElementId;
const NEWLINES: &[u8; u128::BITS as usize] = &[b'\n'; _];
const BULLETS: &[u8; u128::BITS as usize] = &[b'*'; _];
const NEWLINES: &[u8] = &[b'\n'; u128::BITS as usize];
const BULLETS: &str = "********************************************************************************************************************************";
/// Tracks custom blocks such as diagnostics that should be displayed within buffer.
///
@@ -1774,7 +1774,7 @@ impl<'a> Iterator for BlockChunks<'a> {
// need to have the same number of bytes in the input as output.
let chars_count = prefix.chars().count();
let bullet_len = chars_count;
prefix = unsafe { std::str::from_utf8_unchecked(&BULLETS[..bullet_len]) };
prefix = &BULLETS[..bullet_len];
chars = 1u128.unbounded_shl(bullet_len as u32) - 1;
tabs = 0;
}

View File

@@ -1443,7 +1443,11 @@ impl<'a> Iterator for FoldChunks<'a> {
[(self.inlay_offset - buffer_chunk_start).0..(chunk_end - buffer_chunk_start).0];
let bit_end = (chunk_end - buffer_chunk_start).0;
let mask = 1u128.unbounded_shl(bit_end as u32).wrapping_sub(1);
let mask = if bit_end >= 128 {
u128::MAX
} else {
(1u128 << bit_end) - 1
};
chunk.tabs = (chunk.tabs >> (self.inlay_offset - buffer_chunk_start).0) & mask;
chunk.chars = (chunk.chars >> (self.inlay_offset - buffer_chunk_start).0) & mask;

View File

@@ -8,7 +8,7 @@ use multi_buffer::{
use std::{
cmp,
ops::{Add, AddAssign, Range, Sub, SubAssign},
sync::{Arc, OnceLock},
sync::Arc,
};
use sum_tree::{Bias, Cursor, Dimensions, SumTree};
use text::{ChunkBitmaps, Patch, Rope};
@@ -41,17 +41,12 @@ enum Transform {
pub struct Inlay {
pub id: InlayId,
pub position: Anchor,
pub content: InlayContent,
}
#[derive(Debug, Clone)]
pub enum InlayContent {
Text(text::Rope),
Color(Hsla),
pub text: text::Rope,
color: Option<Hsla>,
}
impl Inlay {
pub fn hint(id: u32, position: Anchor, hint: &project::InlayHint) -> Self {
pub fn hint(id: usize, position: Anchor, hint: &project::InlayHint) -> Self {
let mut text = hint.text();
if hint.padding_right && text.reversed_chars_at(text.len()).next() != Some(' ') {
text.push(" ");
@@ -62,57 +57,51 @@ impl Inlay {
Self {
id: InlayId::Hint(id),
position,
content: InlayContent::Text(text),
text,
color: None,
}
}
#[cfg(any(test, feature = "test-support"))]
pub fn mock_hint(id: u32, position: Anchor, text: impl Into<Rope>) -> Self {
pub fn mock_hint(id: usize, position: Anchor, text: impl Into<Rope>) -> Self {
Self {
id: InlayId::Hint(id),
position,
content: InlayContent::Text(text.into()),
text: text.into(),
color: None,
}
}
pub fn color(id: u32, position: Anchor, color: Rgba) -> Self {
pub fn color(id: usize, position: Anchor, color: Rgba) -> Self {
Self {
id: InlayId::Color(id),
position,
content: InlayContent::Color(color.into()),
text: Rope::from(""),
color: Some(Hsla::from(color)),
}
}
pub fn edit_prediction<T: Into<Rope>>(id: u32, position: Anchor, text: T) -> Self {
pub fn edit_prediction<T: Into<Rope>>(id: usize, position: Anchor, text: T) -> Self {
Self {
id: InlayId::EditPrediction(id),
position,
content: InlayContent::Text(text.into()),
text: text.into(),
color: None,
}
}
pub fn debugger<T: Into<Rope>>(id: u32, position: Anchor, text: T) -> Self {
pub fn debugger<T: Into<Rope>>(id: usize, position: Anchor, text: T) -> Self {
Self {
id: InlayId::DebuggerValue(id),
position,
content: InlayContent::Text(text.into()),
}
}
pub fn text(&self) -> &Rope {
static COLOR_TEXT: OnceLock<Rope> = OnceLock::new();
match &self.content {
InlayContent::Text(text) => text,
InlayContent::Color(_) => COLOR_TEXT.get_or_init(|| Rope::from("")),
text: text.into(),
color: None,
}
}
#[cfg(any(test, feature = "test-support"))]
pub fn get_color(&self) -> Option<Hsla> {
match self.content {
InlayContent::Color(color) => Some(color),
_ => None,
}
self.color
}
}
@@ -127,7 +116,7 @@ impl sum_tree::Item for Transform {
},
Transform::Inlay(inlay) => TransformSummary {
input: TextSummary::default(),
output: inlay.text().summary(),
output: inlay.text.summary(),
},
}
}
@@ -365,7 +354,7 @@ impl<'a> Iterator for InlayChunks<'a> {
let mut renderer = None;
let mut highlight_style = match inlay.id {
InlayId::EditPrediction(_) => self.highlight_styles.edit_prediction.map(|s| {
if inlay.text().chars().all(|c| c.is_whitespace()) {
if inlay.text.chars().all(|c| c.is_whitespace()) {
s.whitespace
} else {
s.insertion
@@ -374,7 +363,7 @@ impl<'a> Iterator for InlayChunks<'a> {
InlayId::Hint(_) => self.highlight_styles.inlay_hint,
InlayId::DebuggerValue(_) => self.highlight_styles.inlay_hint,
InlayId::Color(_) => {
if let InlayContent::Color(color) = inlay.content {
if let Some(color) = inlay.color {
renderer = Some(ChunkRenderer {
id: ChunkRendererId::Inlay(inlay.id),
render: Arc::new(move |cx| {
@@ -421,7 +410,7 @@ impl<'a> Iterator for InlayChunks<'a> {
let start = offset_in_inlay;
let end = cmp::min(self.max_output_offset, self.transforms.end().0)
- self.transforms.start().0;
let chunks = inlay.text().chunks_in_range(start.0..end.0);
let chunks = inlay.text.chunks_in_range(start.0..end.0);
text::ChunkWithBitmaps(chunks)
});
let ChunkBitmaps {
@@ -717,7 +706,7 @@ impl InlayMap {
for inlay_to_insert in to_insert {
// Avoid inserting empty inlays.
if inlay_to_insert.text().is_empty() {
if inlay_to_insert.text.is_empty() {
continue;
}
@@ -755,7 +744,7 @@ impl InlayMap {
#[cfg(test)]
pub(crate) fn randomly_mutate(
&mut self,
next_inlay_id: &mut u32,
next_inlay_id: &mut usize,
rng: &mut rand::rngs::StdRng,
) -> (InlaySnapshot, Vec<InlayEdit>) {
use rand::prelude::*;
@@ -833,7 +822,7 @@ impl InlaySnapshot {
InlayPoint(cursor.start().1.0 + (buffer_end - buffer_start))
}
Some(Transform::Inlay(inlay)) => {
let overshoot = inlay.text().offset_to_point(overshoot);
let overshoot = inlay.text.offset_to_point(overshoot);
InlayPoint(cursor.start().1.0 + overshoot)
}
None => self.max_point(),
@@ -863,7 +852,7 @@ impl InlaySnapshot {
InlayOffset(cursor.start().1.0 + (buffer_offset_end - buffer_offset_start))
}
Some(Transform::Inlay(inlay)) => {
let overshoot = inlay.text().point_to_offset(overshoot);
let overshoot = inlay.text.point_to_offset(overshoot);
InlayOffset(cursor.start().1.0 + overshoot)
}
None => self.len(),
@@ -1075,7 +1064,7 @@ impl InlaySnapshot {
Some(Transform::Inlay(inlay)) => {
let suffix_start = overshoot;
let suffix_end = cmp::min(cursor.end().0, range.end).0 - cursor.start().0.0;
summary = inlay.text().cursor(suffix_start).summary(suffix_end);
summary = inlay.text.cursor(suffix_start).summary(suffix_end);
cursor.next();
}
None => {}
@@ -1097,7 +1086,7 @@ impl InlaySnapshot {
}
Some(Transform::Inlay(inlay)) => {
let prefix_end = overshoot;
summary += inlay.text().cursor(0).summary::<TextSummary>(prefix_end);
summary += inlay.text.cursor(0).summary::<TextSummary>(prefix_end);
}
None => {}
}
@@ -1280,7 +1269,7 @@ mod tests {
resolve_state: ResolveState::Resolved,
},
)
.text()
.text
.to_string(),
"a",
"Should not pad label if not requested"
@@ -1300,7 +1289,7 @@ mod tests {
resolve_state: ResolveState::Resolved,
},
)
.text()
.text
.to_string(),
" a ",
"Should pad label for every side requested"
@@ -1320,7 +1309,7 @@ mod tests {
resolve_state: ResolveState::Resolved,
},
)
.text()
.text
.to_string(),
" a ",
"Should not change already padded label"
@@ -1340,7 +1329,7 @@ mod tests {
resolve_state: ResolveState::Resolved,
},
)
.text()
.text
.to_string(),
" a ",
"Should not change already padded label"
@@ -1363,7 +1352,7 @@ mod tests {
resolve_state: ResolveState::Resolved,
},
)
.text()
.text
.to_string(),
" 🎨 ",
"Should pad single emoji correctly"
@@ -1761,7 +1750,7 @@ mod tests {
.collect::<Vec<_>>();
let mut expected_text = Rope::from(&buffer_snapshot.text());
for (offset, inlay) in inlays.iter().rev() {
expected_text.replace(*offset..*offset, &inlay.text().to_string());
expected_text.replace(*offset..*offset, &inlay.text.to_string());
}
assert_eq!(inlay_snapshot.text(), expected_text.to_string());
@@ -1814,7 +1803,7 @@ mod tests {
.into_iter()
.filter_map(|i| {
let (_, inlay) = &inlays[i];
let inlay_text_len = inlay.text().len();
let inlay_text_len = inlay.text.len();
match inlay_text_len {
0 => None,
1 => Some(InlayHighlight {
@@ -1823,7 +1812,7 @@ mod tests {
range: 0..1,
}),
n => {
let inlay_text = inlay.text().to_string();
let inlay_text = inlay.text.to_string();
let mut highlight_end = rng.random_range(1..n);
let mut highlight_start = rng.random_range(0..highlight_end);
while !inlay_text.is_char_boundary(highlight_end) {
@@ -2149,7 +2138,8 @@ mod tests {
let inlay = Inlay {
id: InlayId::Hint(0),
position,
content: InlayContent::Text(text::Rope::from(inlay_text)),
text: text::Rope::from(inlay_text),
color: None,
};
let (inlay_snapshot, _) = inlay_map.splice(&[], vec![inlay]);
@@ -2263,7 +2253,8 @@ mod tests {
let inlay = Inlay {
id: InlayId::Hint(0),
position,
content: InlayContent::Text(text::Rope::from(test_case.inlay_text)),
text: text::Rope::from(test_case.inlay_text),
color: None,
};
let (inlay_snapshot, _) = inlay_map.splice(&[], vec![inlay]);

View File

@@ -53,12 +53,9 @@ pub fn replacement(c: char) -> Option<&'static str> {
} else if contains(c, PRESERVE) {
None
} else {
Some(FIXED_WIDTH_SPACE)
Some("\u{2007}") // fixed width space
}
}
const FIXED_WIDTH_SPACE: &str = "\u{2007}";
// IDEOGRAPHIC SPACE is common alongside Chinese and other wide character sets.
// We don't highlight this for now (as it already shows up wide in the editor),
// but could if we tracked state in the classifier.
@@ -120,11 +117,11 @@ const PRESERVE: &[(char, char)] = &[
];
fn contains(c: char, list: &[(char, char)]) -> bool {
for &(start, end) in list {
if c < start {
for (start, end) in list {
if c < *start {
return false;
}
if c <= end {
if c <= *end {
return true;
}
}

View File

@@ -10,10 +10,6 @@ use sum_tree::Bias;
const MAX_EXPANSION_COLUMN: u32 = 256;
// Handles a tab width <= 128
const SPACES: &[u8; u128::BITS as usize] = &[b' '; _];
const MAX_TABS: NonZeroU32 = NonZeroU32::new(SPACES.len() as u32).unwrap();
/// Keeps track of hard tabs in a text buffer.
///
/// See the [`display_map` module documentation](crate::display_map) for more information.
@@ -23,7 +19,7 @@ impl TabMap {
pub fn new(fold_snapshot: FoldSnapshot, tab_size: NonZeroU32) -> (Self, TabSnapshot) {
let snapshot = TabSnapshot {
fold_snapshot,
tab_size: tab_size.min(MAX_TABS),
tab_size,
max_expansion_column: MAX_EXPANSION_COLUMN,
version: 0,
};
@@ -45,7 +41,7 @@ impl TabMap {
let old_snapshot = &mut self.0;
let mut new_snapshot = TabSnapshot {
fold_snapshot,
tab_size: tab_size.min(MAX_TABS),
tab_size,
max_expansion_column: old_snapshot.max_expansion_column,
version: old_snapshot.version,
};
@@ -54,7 +50,9 @@ impl TabMap {
new_snapshot.version += 1;
}
let tab_edits = if old_snapshot.tab_size == new_snapshot.tab_size {
let mut tab_edits = Vec::with_capacity(fold_edits.len());
if old_snapshot.tab_size == new_snapshot.tab_size {
// Expand each edit to include the next tab on the same line as the edit,
// and any subsequent tabs on that line that moved across the tab expansion
// boundary.
@@ -110,7 +108,7 @@ impl TabMap {
let _old_alloc_ptr = fold_edits.as_ptr();
// Combine any edits that overlap due to the expansion.
let mut fold_edits = fold_edits.into_iter();
if let Some(mut first_edit) = fold_edits.next() {
let fold_edits = if let Some(mut first_edit) = fold_edits.next() {
// This code relies on reusing allocations from the Vec<_> - at the time of writing .flatten() prevents them.
#[allow(clippy::filter_map_identity)]
let mut v: Vec<_> = fold_edits
@@ -130,30 +128,29 @@ impl TabMap {
.collect();
v.push(first_edit);
debug_assert_eq!(v.as_ptr(), _old_alloc_ptr, "Fold edits were reallocated");
v.into_iter()
.map(|fold_edit| {
let old_start = fold_edit.old.start.to_point(&old_snapshot.fold_snapshot);
let old_end = fold_edit.old.end.to_point(&old_snapshot.fold_snapshot);
let new_start = fold_edit.new.start.to_point(&new_snapshot.fold_snapshot);
let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot);
TabEdit {
old: old_snapshot.to_tab_point(old_start)
..old_snapshot.to_tab_point(old_end),
new: new_snapshot.to_tab_point(new_start)
..new_snapshot.to_tab_point(new_end),
}
})
.collect()
v
} else {
vec![]
};
for fold_edit in fold_edits {
let old_start = fold_edit.old.start.to_point(&old_snapshot.fold_snapshot);
let old_end = fold_edit.old.end.to_point(&old_snapshot.fold_snapshot);
let new_start = fold_edit.new.start.to_point(&new_snapshot.fold_snapshot);
let new_end = fold_edit.new.end.to_point(&new_snapshot.fold_snapshot);
tab_edits.push(TabEdit {
old: old_snapshot.to_tab_point(old_start)..old_snapshot.to_tab_point(old_end),
new: new_snapshot.to_tab_point(new_start)..new_snapshot.to_tab_point(new_end),
});
}
} else {
new_snapshot.version += 1;
vec![TabEdit {
tab_edits.push(TabEdit {
old: TabPoint::zero()..old_snapshot.max_point(),
new: TabPoint::zero()..new_snapshot.max_point(),
}]
};
});
}
*old_snapshot = new_snapshot;
(old_snapshot.clone(), tab_edits)
}
@@ -194,28 +191,37 @@ impl TabSnapshot {
.fold_snapshot
.text_summary_for_range(input_start..input_end);
let mut first_line_chars = 0;
let line_end = if range.start.row() == range.end.row() {
range.end
} else {
self.max_point()
};
let first_line_chars = self
for c in self
.chunks(range.start..line_end, false, Highlights::default())
.flat_map(|chunk| chunk.text.chars())
.take_while(|&c| c != '\n')
.count() as u32;
{
if c == '\n' {
break;
}
first_line_chars += 1;
}
let last_line_chars = if range.start.row() == range.end.row() {
first_line_chars
let mut last_line_chars = 0;
if range.start.row() == range.end.row() {
last_line_chars = first_line_chars;
} else {
self.chunks(
TabPoint::new(range.end.row(), 0)..range.end,
false,
Highlights::default(),
)
.flat_map(|chunk| chunk.text.chars())
.count() as u32
};
for _ in self
.chunks(
TabPoint::new(range.end.row(), 0)..range.end,
false,
Highlights::default(),
)
.flat_map(|chunk| chunk.text.chars())
{
last_line_chars += 1;
}
}
TextSummary {
lines: range.end.0 - range.start.0,
@@ -260,7 +266,7 @@ impl TabSnapshot {
max_output_position: range.end.0,
tab_size: self.tab_size,
chunk: Chunk {
text: unsafe { std::str::from_utf8_unchecked(&SPACES[..to_next_stop as usize]) },
text: &SPACES[0..(to_next_stop as usize)],
is_tab: true,
..Default::default()
},
@@ -311,11 +317,13 @@ impl TabSnapshot {
let (collapsed, expanded_char_column, to_next_stop) =
self.collapse_tabs(tab_cursor, expanded, bias);
(
let result = (
FoldPoint::new(output.row(), collapsed),
expanded_char_column,
to_next_stop,
)
);
result
}
pub fn make_tab_point(&self, point: Point, bias: Bias) -> TabPoint {
@@ -502,19 +510,20 @@ impl<'a> std::ops::AddAssign<&'a Self> for TextSummary {
}
}
// Handles a tab width <= 16
const SPACES: &str = " ";
pub struct TabChunks<'a> {
snapshot: &'a TabSnapshot,
max_expansion_column: u32,
max_output_position: Point,
tab_size: NonZeroU32,
// region: iteration state
fold_chunks: FoldChunks<'a>,
chunk: Chunk<'a>,
column: u32,
max_expansion_column: u32,
output_position: Point,
input_column: u32,
max_output_position: Point,
tab_size: NonZeroU32,
inside_leading_tab: bool,
// endregion: iteration state
}
impl TabChunks<'_> {
@@ -540,7 +549,7 @@ impl TabChunks<'_> {
self.output_position = range.start.0;
self.max_output_position = range.end.0;
self.chunk = Chunk {
text: unsafe { std::str::from_utf8_unchecked(&SPACES[..to_next_stop as usize]) },
text: &SPACES[0..(to_next_stop as usize)],
is_tab: true,
chars: 1u128.unbounded_shl(to_next_stop) - 1,
..Default::default()
@@ -612,7 +621,7 @@ impl<'a> Iterator for TabChunks<'a> {
self.input_column += 1;
self.output_position = next_output_position;
return Some(Chunk {
text: unsafe { std::str::from_utf8_unchecked(&SPACES[..len as usize]) },
text: &SPACES[..len as usize],
is_tab: true,
chars: 1u128.unbounded_shl(len) - 1,
tabs: 0,

View File

@@ -2,6 +2,7 @@ use edit_prediction::EditPredictionProvider;
use gpui::{Entity, prelude::*};
use indoc::indoc;
use multi_buffer::{Anchor, MultiBufferSnapshot, ToPoint};
use project::Project;
use std::ops::Range;
use text::{Point, ToOffset};
@@ -260,7 +261,7 @@ async fn test_edit_prediction_jump_disabled_for_non_zed_providers(cx: &mut gpui:
EditPrediction::Edit { .. } => {
// This is expected for non-Zed providers
}
EditPrediction::MoveWithin { .. } | EditPrediction::MoveOutside { .. } => {
EditPrediction::Move { .. } => {
panic!(
"Non-Zed providers should not show Move predictions (jump functionality)"
);
@@ -298,7 +299,7 @@ fn assert_editor_active_move_completion(
.as_ref()
.expect("editor has no active completion");
if let EditPrediction::MoveWithin { target, .. } = &completion_state.completion {
if let EditPrediction::Move { target, .. } = &completion_state.completion {
assert(editor.buffer().read(cx).snapshot(cx), *target);
} else {
panic!("expected move completion");
@@ -325,7 +326,7 @@ fn propose_edits<T: ToOffset>(
cx.update(|_, cx| {
provider.update(cx, |provider, _| {
provider.set_edit_prediction(Some(edit_prediction::EditPrediction::Local {
provider.set_edit_prediction(Some(edit_prediction::EditPrediction {
id: None,
edits: edits.collect(),
edit_preview: None,
@@ -356,7 +357,7 @@ fn propose_edits_non_zed<T: ToOffset>(
cx.update(|_, cx| {
provider.update(cx, |provider, _| {
provider.set_edit_prediction(Some(edit_prediction::EditPrediction::Local {
provider.set_edit_prediction(Some(edit_prediction::EditPrediction {
id: None,
edits: edits.collect(),
edit_preview: None,
@@ -417,6 +418,7 @@ impl EditPredictionProvider for FakeEditPredictionProvider {
fn refresh(
&mut self,
_project: Option<Entity<Project>>,
_buffer: gpui::Entity<language::Buffer>,
_cursor_position: language::Anchor,
_debounce: bool,
@@ -490,6 +492,7 @@ impl EditPredictionProvider for FakeNonZedEditPredictionProvider {
fn refresh(
&mut self,
_project: Option<Entity<Project>>,
_buffer: gpui::Entity<language::Buffer>,
_cursor_position: language::Anchor,
_debounce: bool,

Some files were not shown because too many files have changed in this diff Show More