Compare commits
15 Commits
rodio-audi
...
run-comman
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cfab93eb15 | ||
|
|
fa0df6da1c | ||
|
|
99102a84fa | ||
|
|
5f01f6d75f | ||
|
|
a66cd820b3 | ||
|
|
f07da9d9f2 | ||
|
|
8d05bb090c | ||
|
|
2325f14713 | ||
|
|
fe2aa3f4cb | ||
|
|
10989c702c | ||
|
|
3f80ac0127 | ||
|
|
a1a6031c6a | ||
|
|
2d20b5d850 | ||
|
|
11ad0b5793 | ||
|
|
2755cd8ec7 |
@@ -26,7 +26,7 @@ third-party = [
|
||||
# build of remote_server should not include scap / its x11 dependency
|
||||
{ name = "scap", git = "https://github.com/zed-industries/scap", rev = "808aa5c45b41e8f44729d02e38fd00a2fe2722e7" },
|
||||
# build of remote_server should not need to include on libalsa through rodio
|
||||
{ name = "rodio", git = "https://github.com/RustAudio/rodio", branch = "better_wav_output"},
|
||||
{ name = "rodio" },
|
||||
]
|
||||
|
||||
[final-excludes]
|
||||
|
||||
150
Cargo.lock
generated
150
Cargo.lock
generated
@@ -308,22 +308,18 @@ dependencies = [
|
||||
"libc",
|
||||
"log",
|
||||
"nix 0.29.0",
|
||||
"node_runtime",
|
||||
"paths",
|
||||
"project",
|
||||
"reqwest_client",
|
||||
"schemars",
|
||||
"semver",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
"smol",
|
||||
"task",
|
||||
"tempfile",
|
||||
"thiserror 2.0.12",
|
||||
"ui",
|
||||
"util",
|
||||
"watch",
|
||||
"which 6.0.3",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
@@ -1385,19 +1381,12 @@ name = "audio"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-tar",
|
||||
"collections",
|
||||
"crossbeam",
|
||||
"gpui",
|
||||
"libwebrtc",
|
||||
"log",
|
||||
"parking_lot",
|
||||
"rodio",
|
||||
"schemars",
|
||||
"serde",
|
||||
"settings",
|
||||
"smol",
|
||||
"thiserror 2.0.12",
|
||||
"util",
|
||||
"workspace-hack",
|
||||
]
|
||||
@@ -2618,7 +2607,6 @@ dependencies = [
|
||||
"audio",
|
||||
"client",
|
||||
"collections",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
@@ -4152,19 +4140,6 @@ dependencies = [
|
||||
"itertools 0.10.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam"
|
||||
version = "0.8.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1137cd7e7fc0fb5d3c5a8678be38ec56e819125d8d7907411fe24ccb943faca8"
|
||||
dependencies = [
|
||||
"crossbeam-channel",
|
||||
"crossbeam-deque",
|
||||
"crossbeam-epoch",
|
||||
"crossbeam-queue",
|
||||
"crossbeam-utils",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "crossbeam-channel"
|
||||
version = "0.5.15"
|
||||
@@ -5069,6 +5044,7 @@ dependencies = [
|
||||
"multi_buffer",
|
||||
"ordered-float 2.10.1",
|
||||
"parking_lot",
|
||||
"postage",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"rand 0.9.1",
|
||||
@@ -9206,6 +9182,19 @@ dependencies = [
|
||||
"x_ai",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "language_onboarding"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"db",
|
||||
"editor",
|
||||
"gpui",
|
||||
"project",
|
||||
"ui",
|
||||
"workspace",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "language_selector"
|
||||
version = "0.1.0"
|
||||
@@ -9268,7 +9257,6 @@ dependencies = [
|
||||
"chrono",
|
||||
"collections",
|
||||
"dap",
|
||||
"feature_flags",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"http_client",
|
||||
@@ -9673,7 +9661,6 @@ dependencies = [
|
||||
"scap",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_urlencoded",
|
||||
"settings",
|
||||
"sha2",
|
||||
"simplelog",
|
||||
@@ -12627,6 +12614,7 @@ dependencies = [
|
||||
"remote",
|
||||
"rpc",
|
||||
"schemars",
|
||||
"semver",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"settings",
|
||||
@@ -12646,6 +12634,7 @@ dependencies = [
|
||||
"unindent",
|
||||
"url",
|
||||
"util",
|
||||
"watch",
|
||||
"which 6.0.3",
|
||||
"workspace-hack",
|
||||
"worktree",
|
||||
@@ -13875,15 +13864,15 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rodio"
|
||||
version = "0.21.1"
|
||||
source = "git+https://github.com/RustAudio/rodio?branch=better_wav_output#82514bd1f2c6cfd9a1a885019b26a8ffea75bc5c"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e40ecf59e742e03336be6a3d53755e789fd05a059fa22dfa0ed624722319e183"
|
||||
dependencies = [
|
||||
"cpal",
|
||||
"dasp_sample",
|
||||
"hound",
|
||||
"num-rational",
|
||||
"rtrb",
|
||||
"symphonia",
|
||||
"thiserror 2.0.12",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -13957,12 +13946,6 @@ dependencies = [
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rtrb"
|
||||
version = "0.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ad8388ea1a9e0ea807e442e8263a699e7edcb320ecbcd21b4fa8ff859acce3ba"
|
||||
|
||||
[[package]]
|
||||
name = "rules_library"
|
||||
version = "0.1.0"
|
||||
@@ -15924,53 +15907,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "815c942ae7ee74737bb00f965fa5b5a2ac2ce7b6c01c0cc169bbeaf7abd5f5a9"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"symphonia-bundle-flac",
|
||||
"symphonia-bundle-mp3",
|
||||
"symphonia-codec-aac",
|
||||
"symphonia-codec-pcm",
|
||||
"symphonia-codec-vorbis",
|
||||
"symphonia-core",
|
||||
"symphonia-format-isomp4",
|
||||
"symphonia-format-ogg",
|
||||
"symphonia-format-riff",
|
||||
"symphonia-metadata",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "symphonia-bundle-flac"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72e34f34298a7308d4397a6c7fbf5b84c5d491231ce3dd379707ba673ab3bd97"
|
||||
dependencies = [
|
||||
"log",
|
||||
"symphonia-core",
|
||||
"symphonia-metadata",
|
||||
"symphonia-utils-xiph",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "symphonia-bundle-mp3"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c01c2aae70f0f1fb096b6f0ff112a930b1fb3626178fba3ae68b09dce71706d4"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"log",
|
||||
"symphonia-core",
|
||||
"symphonia-metadata",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "symphonia-codec-aac"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cdbf25b545ad0d3ee3e891ea643ad115aff4ca92f6aec472086b957a58522f70"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
"log",
|
||||
"symphonia-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "symphonia-codec-pcm"
|
||||
version = "0.5.4"
|
||||
@@ -15981,17 +15923,6 @@ dependencies = [
|
||||
"symphonia-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "symphonia-codec-vorbis"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5a98765fb46a0a6732b007f7e2870c2129b6f78d87db7987e6533c8f164a9f30"
|
||||
dependencies = [
|
||||
"log",
|
||||
"symphonia-core",
|
||||
"symphonia-utils-xiph",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "symphonia-core"
|
||||
version = "0.5.4"
|
||||
@@ -16005,31 +15936,6 @@ dependencies = [
|
||||
"log",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "symphonia-format-isomp4"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "abfdf178d697e50ce1e5d9b982ba1b94c47218e03ec35022d9f0e071a16dc844"
|
||||
dependencies = [
|
||||
"encoding_rs",
|
||||
"log",
|
||||
"symphonia-core",
|
||||
"symphonia-metadata",
|
||||
"symphonia-utils-xiph",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "symphonia-format-ogg"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ada3505789516bcf00fc1157c67729eded428b455c27ca370e41f4d785bfa931"
|
||||
dependencies = [
|
||||
"log",
|
||||
"symphonia-core",
|
||||
"symphonia-metadata",
|
||||
"symphonia-utils-xiph",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "symphonia-format-riff"
|
||||
version = "0.5.4"
|
||||
@@ -16054,16 +15960,6 @@ dependencies = [
|
||||
"symphonia-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "symphonia-utils-xiph"
|
||||
version = "0.5.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "484472580fa49991afda5f6550ece662237b00c6f562c7d9638d1b086ed010fe"
|
||||
dependencies = [
|
||||
"symphonia-core",
|
||||
"symphonia-metadata",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "1.0.109"
|
||||
@@ -19954,7 +19850,6 @@ dependencies = [
|
||||
"core-foundation-sys",
|
||||
"cranelift-codegen",
|
||||
"crc32fast",
|
||||
"crossbeam-channel",
|
||||
"crossbeam-epoch",
|
||||
"crossbeam-utils",
|
||||
"crypto-common",
|
||||
@@ -19998,7 +19893,6 @@ dependencies = [
|
||||
"libsqlite3-sys",
|
||||
"linux-raw-sys 0.4.15",
|
||||
"linux-raw-sys 0.9.4",
|
||||
"livekit-runtime",
|
||||
"log",
|
||||
"lyon",
|
||||
"lyon_path",
|
||||
@@ -20484,7 +20378,6 @@ dependencies = [
|
||||
"acp_tools",
|
||||
"activity_indicator",
|
||||
"agent",
|
||||
"agent_servers",
|
||||
"agent_settings",
|
||||
"agent_ui",
|
||||
"anyhow",
|
||||
@@ -20548,11 +20441,13 @@ dependencies = [
|
||||
"language_extension",
|
||||
"language_model",
|
||||
"language_models",
|
||||
"language_onboarding",
|
||||
"language_selector",
|
||||
"language_tools",
|
||||
"languages",
|
||||
"libc",
|
||||
"line_ending_selector",
|
||||
"livekit_client",
|
||||
"log",
|
||||
"markdown",
|
||||
"markdown_preview",
|
||||
@@ -20569,6 +20464,7 @@ dependencies = [
|
||||
"parking_lot",
|
||||
"paths",
|
||||
"picker",
|
||||
"postage",
|
||||
"pretty_assertions",
|
||||
"profiling",
|
||||
"project",
|
||||
|
||||
@@ -94,6 +94,7 @@ members = [
|
||||
"crates/language_extension",
|
||||
"crates/language_model",
|
||||
"crates/language_models",
|
||||
"crates/language_onboarding",
|
||||
"crates/language_selector",
|
||||
"crates/language_tools",
|
||||
"crates/languages",
|
||||
@@ -276,7 +277,6 @@ context_server = { path = "crates/context_server" }
|
||||
copilot = { path = "crates/copilot" }
|
||||
crashes = { path = "crates/crashes" }
|
||||
credentials_provider = { path = "crates/credentials_provider" }
|
||||
crossbeam = "0.8.4"
|
||||
dap = { path = "crates/dap" }
|
||||
dap_adapters = { path = "crates/dap_adapters" }
|
||||
db = { path = "crates/db" }
|
||||
@@ -321,6 +321,7 @@ language = { path = "crates/language" }
|
||||
language_extension = { path = "crates/language_extension" }
|
||||
language_model = { path = "crates/language_model" }
|
||||
language_models = { path = "crates/language_models" }
|
||||
language_onboarding = { path = "crates/language_onboarding" }
|
||||
language_selector = { path = "crates/language_selector" }
|
||||
language_tools = { path = "crates/language_tools" }
|
||||
languages = { path = "crates/languages" }
|
||||
@@ -368,7 +369,7 @@ remote_server = { path = "crates/remote_server" }
|
||||
repl = { path = "crates/repl" }
|
||||
reqwest_client = { path = "crates/reqwest_client" }
|
||||
rich_text = { path = "crates/rich_text" }
|
||||
rodio = { git = "https://github.com/RustAudio/rodio", branch = "better_wav_output"}
|
||||
rodio = { version = "0.21.1", default-features = false }
|
||||
rope = { path = "crates/rope" }
|
||||
rpc = { path = "crates/rpc" }
|
||||
rules_library = { path = "crates/rules_library" }
|
||||
|
||||
@@ -32,34 +32,6 @@
|
||||
"(": "vim::SentenceBackward",
|
||||
")": "vim::SentenceForward",
|
||||
"|": "vim::GoToColumn",
|
||||
"] ]": "vim::NextSectionStart",
|
||||
"] [": "vim::NextSectionEnd",
|
||||
"[ [": "vim::PreviousSectionStart",
|
||||
"[ ]": "vim::PreviousSectionEnd",
|
||||
"] m": "vim::NextMethodStart",
|
||||
"] shift-m": "vim::NextMethodEnd",
|
||||
"[ m": "vim::PreviousMethodStart",
|
||||
"[ shift-m": "vim::PreviousMethodEnd",
|
||||
"[ *": "vim::PreviousComment",
|
||||
"[ /": "vim::PreviousComment",
|
||||
"] *": "vim::NextComment",
|
||||
"] /": "vim::NextComment",
|
||||
"[ -": "vim::PreviousLesserIndent",
|
||||
"[ +": "vim::PreviousGreaterIndent",
|
||||
"[ =": "vim::PreviousSameIndent",
|
||||
"] -": "vim::NextLesserIndent",
|
||||
"] +": "vim::NextGreaterIndent",
|
||||
"] =": "vim::NextSameIndent",
|
||||
"] b": "pane::ActivateNextItem",
|
||||
"[ b": "pane::ActivatePreviousItem",
|
||||
"] shift-b": "pane::ActivateLastItem",
|
||||
"[ shift-b": ["pane::ActivateItem", 0],
|
||||
"] space": "vim::InsertEmptyLineBelow",
|
||||
"[ space": "vim::InsertEmptyLineAbove",
|
||||
"[ e": "editor::MoveLineUp",
|
||||
"] e": "editor::MoveLineDown",
|
||||
"[ f": "workspace::FollowNextCollaborator",
|
||||
"] f": "workspace::FollowNextCollaborator",
|
||||
|
||||
// Word motions
|
||||
"w": "vim::NextWordStart",
|
||||
@@ -83,10 +55,6 @@
|
||||
"n": "vim::MoveToNextMatch",
|
||||
"shift-n": "vim::MoveToPreviousMatch",
|
||||
"%": "vim::Matching",
|
||||
"] }": ["vim::UnmatchedForward", { "char": "}" }],
|
||||
"[ {": ["vim::UnmatchedBackward", { "char": "{" }],
|
||||
"] )": ["vim::UnmatchedForward", { "char": ")" }],
|
||||
"[ (": ["vim::UnmatchedBackward", { "char": "(" }],
|
||||
"f": ["vim::PushFindForward", { "before": false, "multiline": false }],
|
||||
"t": ["vim::PushFindForward", { "before": true, "multiline": false }],
|
||||
"shift-f": ["vim::PushFindBackward", { "after": false, "multiline": false }],
|
||||
@@ -219,6 +187,46 @@
|
||||
".": "vim::Repeat"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_mode == normal || vim_mode == visual || vim_mode == operator",
|
||||
"bindings": {
|
||||
"] ]": "vim::NextSectionStart",
|
||||
"] [": "vim::NextSectionEnd",
|
||||
"[ [": "vim::PreviousSectionStart",
|
||||
"[ ]": "vim::PreviousSectionEnd",
|
||||
"] m": "vim::NextMethodStart",
|
||||
"] shift-m": "vim::NextMethodEnd",
|
||||
"[ m": "vim::PreviousMethodStart",
|
||||
"[ shift-m": "vim::PreviousMethodEnd",
|
||||
"[ *": "vim::PreviousComment",
|
||||
"[ /": "vim::PreviousComment",
|
||||
"] *": "vim::NextComment",
|
||||
"] /": "vim::NextComment",
|
||||
"[ -": "vim::PreviousLesserIndent",
|
||||
"[ +": "vim::PreviousGreaterIndent",
|
||||
"[ =": "vim::PreviousSameIndent",
|
||||
"] -": "vim::NextLesserIndent",
|
||||
"] +": "vim::NextGreaterIndent",
|
||||
"] =": "vim::NextSameIndent",
|
||||
"] b": "pane::ActivateNextItem",
|
||||
"[ b": "pane::ActivatePreviousItem",
|
||||
"] shift-b": "pane::ActivateLastItem",
|
||||
"[ shift-b": ["pane::ActivateItem", 0],
|
||||
"] space": "vim::InsertEmptyLineBelow",
|
||||
"[ space": "vim::InsertEmptyLineAbove",
|
||||
"[ e": "editor::MoveLineUp",
|
||||
"] e": "editor::MoveLineDown",
|
||||
"[ f": "workspace::FollowNextCollaborator",
|
||||
"] f": "workspace::FollowNextCollaborator",
|
||||
"] }": ["vim::UnmatchedForward", { "char": "}" }],
|
||||
"[ {": ["vim::UnmatchedBackward", { "char": "{" }],
|
||||
"] )": ["vim::UnmatchedForward", { "char": ")" }],
|
||||
"[ (": ["vim::UnmatchedBackward", { "char": "(" }],
|
||||
// tree-sitter related commands
|
||||
"[ x": "vim::SelectLargerSyntaxNode",
|
||||
"] x": "vim::SelectSmallerSyntaxNode"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_mode == normal",
|
||||
"bindings": {
|
||||
@@ -249,9 +257,6 @@
|
||||
"g w": "vim::PushRewrap",
|
||||
"g q": "vim::PushRewrap",
|
||||
"insert": "vim::InsertBefore",
|
||||
// tree-sitter related commands
|
||||
"[ x": "vim::SelectLargerSyntaxNode",
|
||||
"] x": "vim::SelectSmallerSyntaxNode",
|
||||
"] d": "editor::GoToDiagnostic",
|
||||
"[ d": "editor::GoToPreviousDiagnostic",
|
||||
"] c": "editor::GoToHunk",
|
||||
@@ -317,10 +322,7 @@
|
||||
"g w": "vim::Rewrap",
|
||||
"g ?": "vim::ConvertToRot13",
|
||||
// "g ?": "vim::ConvertToRot47",
|
||||
"\"": "vim::PushRegister",
|
||||
// tree-sitter related commands
|
||||
"[ x": "editor::SelectLargerSyntaxNode",
|
||||
"] x": "editor::SelectSmallerSyntaxNode"
|
||||
"\"": "vim::PushRegister"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -397,6 +399,9 @@
|
||||
"ctrl-[": "editor::Cancel",
|
||||
";": "vim::HelixCollapseSelection",
|
||||
":": "command_palette::Toggle",
|
||||
"m": "vim::PushHelixMatch",
|
||||
"]": ["vim::PushHelixNext", { "around": true }],
|
||||
"[": ["vim::PushHelixPrevious", { "around": true }],
|
||||
"left": "vim::WrappingLeft",
|
||||
"right": "vim::WrappingRight",
|
||||
"h": "vim::WrappingLeft",
|
||||
@@ -419,13 +424,6 @@
|
||||
"insert": "vim::InsertBefore",
|
||||
"alt-.": "vim::RepeatFind",
|
||||
"alt-s": ["editor::SplitSelectionIntoLines", { "keep_selections": true }],
|
||||
// tree-sitter related commands
|
||||
"[ x": "editor::SelectLargerSyntaxNode",
|
||||
"] x": "editor::SelectSmallerSyntaxNode",
|
||||
"] d": "editor::GoToDiagnostic",
|
||||
"[ d": "editor::GoToPreviousDiagnostic",
|
||||
"] c": "editor::GoToHunk",
|
||||
"[ c": "editor::GoToPreviousHunk",
|
||||
// Goto mode
|
||||
"g n": "pane::ActivateNextItem",
|
||||
"g p": "pane::ActivatePreviousItem",
|
||||
@@ -469,9 +467,6 @@
|
||||
"space c": "editor::ToggleComments",
|
||||
"space y": "editor::Copy",
|
||||
"space p": "editor::Paste",
|
||||
// Match mode
|
||||
"m m": "vim::Matching",
|
||||
"m i w": ["workspace::SendKeystrokes", "v i w"],
|
||||
"shift-u": "editor::Redo",
|
||||
"ctrl-c": "editor::ToggleComments",
|
||||
"d": "vim::HelixDelete",
|
||||
@@ -540,7 +535,7 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_operator == a || vim_operator == i || vim_operator == cs",
|
||||
"context": "vim_operator == a || vim_operator == i || vim_operator == cs || vim_operator == helix_next || vim_operator == helix_previous",
|
||||
"bindings": {
|
||||
"w": "vim::Word",
|
||||
"shift-w": ["vim::Word", { "ignore_punctuation": true }],
|
||||
@@ -577,6 +572,48 @@
|
||||
"e": "vim::EntireFile"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_operator == helix_m",
|
||||
"bindings": {
|
||||
"m": "vim::Matching"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_operator == helix_next",
|
||||
"bindings": {
|
||||
"z": "vim::NextSectionStart",
|
||||
"shift-z": "vim::NextSectionEnd",
|
||||
"*": "vim::NextComment",
|
||||
"/": "vim::NextComment",
|
||||
"-": "vim::NextLesserIndent",
|
||||
"+": "vim::NextGreaterIndent",
|
||||
"=": "vim::NextSameIndent",
|
||||
"b": "pane::ActivateNextItem",
|
||||
"shift-b": "pane::ActivateLastItem",
|
||||
"x": "editor::SelectSmallerSyntaxNode",
|
||||
"d": "editor::GoToDiagnostic",
|
||||
"c": "editor::GoToHunk",
|
||||
"space": "vim::InsertEmptyLineBelow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_operator == helix_previous",
|
||||
"bindings": {
|
||||
"z": "vim::PreviousSectionStart",
|
||||
"shift-z": "vim::PreviousSectionEnd",
|
||||
"*": "vim::PreviousComment",
|
||||
"/": "vim::PreviousComment",
|
||||
"-": "vim::PreviousLesserIndent",
|
||||
"+": "vim::PreviousGreaterIndent",
|
||||
"=": "vim::PreviousSameIndent",
|
||||
"b": "pane::ActivatePreviousItem",
|
||||
"shift-b": ["pane::ActivateItem", 0],
|
||||
"x": "editor::SelectLargerSyntaxNode",
|
||||
"d": "editor::GoToPreviousDiagnostic",
|
||||
"c": "editor::GoToPreviousHunk",
|
||||
"space": "vim::InsertEmptyLineAbove"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "vim_operator == c",
|
||||
"bindings": {
|
||||
|
||||
@@ -2758,7 +2758,7 @@ mod tests {
|
||||
}));
|
||||
|
||||
let thread = cx
|
||||
.update(|cx| connection.new_thread(project, Path::new("/test"), cx))
|
||||
.update(|cx| connection.new_thread(project, Path::new(path!("/test")), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
|
||||
@@ -35,10 +35,15 @@ impl AgentServer for NativeAgentServer {
|
||||
|
||||
fn connect(
|
||||
&self,
|
||||
_root_dir: &Path,
|
||||
_root_dir: Option<&Path>,
|
||||
delegate: AgentServerDelegate,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Rc<dyn acp_thread::AgentConnection>>> {
|
||||
) -> Task<
|
||||
Result<(
|
||||
Rc<dyn acp_thread::AgentConnection>,
|
||||
Option<task::SpawnInTerminal>,
|
||||
)>,
|
||||
> {
|
||||
log::debug!(
|
||||
"NativeAgentServer::connect called for path: {:?}",
|
||||
_root_dir
|
||||
@@ -60,7 +65,10 @@ impl AgentServer for NativeAgentServer {
|
||||
let connection = NativeAgentConnection(agent);
|
||||
log::debug!("NativeAgentServer connection established successfully");
|
||||
|
||||
Ok(Rc::new(connection) as Rc<dyn acp_thread::AgentConnection>)
|
||||
Ok((
|
||||
Rc::new(connection) as Rc<dyn acp_thread::AgentConnection>,
|
||||
None,
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -24,7 +24,11 @@ impl AgentTool for EchoTool {
|
||||
acp::ToolKind::Other
|
||||
}
|
||||
|
||||
fn initial_title(&self, _input: Result<Self::Input, serde_json::Value>) -> SharedString {
|
||||
fn initial_title(
|
||||
&self,
|
||||
_input: Result<Self::Input, serde_json::Value>,
|
||||
_cx: &mut App,
|
||||
) -> SharedString {
|
||||
"Echo".into()
|
||||
}
|
||||
|
||||
@@ -55,7 +59,11 @@ impl AgentTool for DelayTool {
|
||||
"delay"
|
||||
}
|
||||
|
||||
fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
|
||||
fn initial_title(
|
||||
&self,
|
||||
input: Result<Self::Input, serde_json::Value>,
|
||||
_cx: &mut App,
|
||||
) -> SharedString {
|
||||
if let Ok(input) = input {
|
||||
format!("Delay {}ms", input.ms).into()
|
||||
} else {
|
||||
@@ -100,7 +108,11 @@ impl AgentTool for ToolRequiringPermission {
|
||||
acp::ToolKind::Other
|
||||
}
|
||||
|
||||
fn initial_title(&self, _input: Result<Self::Input, serde_json::Value>) -> SharedString {
|
||||
fn initial_title(
|
||||
&self,
|
||||
_input: Result<Self::Input, serde_json::Value>,
|
||||
_cx: &mut App,
|
||||
) -> SharedString {
|
||||
"This tool requires permission".into()
|
||||
}
|
||||
|
||||
@@ -135,7 +147,11 @@ impl AgentTool for InfiniteTool {
|
||||
acp::ToolKind::Other
|
||||
}
|
||||
|
||||
fn initial_title(&self, _input: Result<Self::Input, serde_json::Value>) -> SharedString {
|
||||
fn initial_title(
|
||||
&self,
|
||||
_input: Result<Self::Input, serde_json::Value>,
|
||||
_cx: &mut App,
|
||||
) -> SharedString {
|
||||
"Infinite Tool".into()
|
||||
}
|
||||
|
||||
@@ -186,7 +202,11 @@ impl AgentTool for WordListTool {
|
||||
acp::ToolKind::Other
|
||||
}
|
||||
|
||||
fn initial_title(&self, _input: Result<Self::Input, serde_json::Value>) -> SharedString {
|
||||
fn initial_title(
|
||||
&self,
|
||||
_input: Result<Self::Input, serde_json::Value>,
|
||||
_cx: &mut App,
|
||||
) -> SharedString {
|
||||
"List of random words".into()
|
||||
}
|
||||
|
||||
|
||||
@@ -741,7 +741,7 @@ impl Thread {
|
||||
return;
|
||||
};
|
||||
|
||||
let title = tool.initial_title(tool_use.input.clone());
|
||||
let title = tool.initial_title(tool_use.input.clone(), cx);
|
||||
let kind = tool.kind();
|
||||
stream.send_tool_call(&tool_use.id, title, kind, tool_use.input.clone());
|
||||
|
||||
@@ -1062,7 +1062,11 @@ impl Thread {
|
||||
self.action_log.clone(),
|
||||
));
|
||||
self.add_tool(DiagnosticsTool::new(self.project.clone()));
|
||||
self.add_tool(EditFileTool::new(cx.weak_entity(), language_registry));
|
||||
self.add_tool(EditFileTool::new(
|
||||
self.project.clone(),
|
||||
cx.weak_entity(),
|
||||
language_registry,
|
||||
));
|
||||
self.add_tool(FetchTool::new(self.project.read(cx).client().http_client()));
|
||||
self.add_tool(FindPathTool::new(self.project.clone()));
|
||||
self.add_tool(GrepTool::new(self.project.clone()));
|
||||
@@ -1514,7 +1518,7 @@ impl Thread {
|
||||
let mut title = SharedString::from(&tool_use.name);
|
||||
let mut kind = acp::ToolKind::Other;
|
||||
if let Some(tool) = tool.as_ref() {
|
||||
title = tool.initial_title(tool_use.input.clone());
|
||||
title = tool.initial_title(tool_use.input.clone(), cx);
|
||||
kind = tool.kind();
|
||||
}
|
||||
|
||||
@@ -2148,7 +2152,11 @@ where
|
||||
fn kind() -> acp::ToolKind;
|
||||
|
||||
/// The initial tool title to display. Can be updated during the tool run.
|
||||
fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString;
|
||||
fn initial_title(
|
||||
&self,
|
||||
input: Result<Self::Input, serde_json::Value>,
|
||||
cx: &mut App,
|
||||
) -> SharedString;
|
||||
|
||||
/// Returns the JSON schema that describes the tool's input.
|
||||
fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Schema {
|
||||
@@ -2196,7 +2204,7 @@ pub trait AnyAgentTool {
|
||||
fn name(&self) -> SharedString;
|
||||
fn description(&self) -> SharedString;
|
||||
fn kind(&self) -> acp::ToolKind;
|
||||
fn initial_title(&self, input: serde_json::Value) -> SharedString;
|
||||
fn initial_title(&self, input: serde_json::Value, _cx: &mut App) -> SharedString;
|
||||
fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value>;
|
||||
fn supported_provider(&self, _provider: &LanguageModelProviderId) -> bool {
|
||||
true
|
||||
@@ -2232,9 +2240,9 @@ where
|
||||
T::kind()
|
||||
}
|
||||
|
||||
fn initial_title(&self, input: serde_json::Value) -> SharedString {
|
||||
fn initial_title(&self, input: serde_json::Value, _cx: &mut App) -> SharedString {
|
||||
let parsed_input = serde_json::from_value(input.clone()).map_err(|_| input);
|
||||
self.0.initial_title(parsed_input)
|
||||
self.0.initial_title(parsed_input, _cx)
|
||||
}
|
||||
|
||||
fn input_schema(&self, format: LanguageModelToolSchemaFormat) -> Result<serde_json::Value> {
|
||||
|
||||
@@ -145,7 +145,7 @@ impl AnyAgentTool for ContextServerTool {
|
||||
ToolKind::Other
|
||||
}
|
||||
|
||||
fn initial_title(&self, _input: serde_json::Value) -> SharedString {
|
||||
fn initial_title(&self, _input: serde_json::Value, _cx: &mut App) -> SharedString {
|
||||
format!("Run MCP tool `{}`", self.tool.name).into()
|
||||
}
|
||||
|
||||
@@ -176,7 +176,7 @@ impl AnyAgentTool for ContextServerTool {
|
||||
return Task::ready(Err(anyhow!("Context server not found")));
|
||||
};
|
||||
let tool_name = self.tool.name.clone();
|
||||
let authorize = event_stream.authorize(self.initial_title(input.clone()), cx);
|
||||
let authorize = event_stream.authorize(self.initial_title(input.clone(), cx), cx);
|
||||
|
||||
cx.spawn(async move |_cx| {
|
||||
authorize.await?;
|
||||
|
||||
@@ -58,7 +58,11 @@ impl AgentTool for CopyPathTool {
|
||||
ToolKind::Move
|
||||
}
|
||||
|
||||
fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> ui::SharedString {
|
||||
fn initial_title(
|
||||
&self,
|
||||
input: Result<Self::Input, serde_json::Value>,
|
||||
_cx: &mut App,
|
||||
) -> ui::SharedString {
|
||||
if let Ok(input) = input {
|
||||
let src = MarkdownInlineCode(&input.source_path);
|
||||
let dest = MarkdownInlineCode(&input.destination_path);
|
||||
|
||||
@@ -49,7 +49,11 @@ impl AgentTool for CreateDirectoryTool {
|
||||
ToolKind::Read
|
||||
}
|
||||
|
||||
fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
|
||||
fn initial_title(
|
||||
&self,
|
||||
input: Result<Self::Input, serde_json::Value>,
|
||||
_cx: &mut App,
|
||||
) -> SharedString {
|
||||
if let Ok(input) = input {
|
||||
format!("Create directory {}", MarkdownInlineCode(&input.path)).into()
|
||||
} else {
|
||||
|
||||
@@ -52,7 +52,11 @@ impl AgentTool for DeletePathTool {
|
||||
ToolKind::Delete
|
||||
}
|
||||
|
||||
fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
|
||||
fn initial_title(
|
||||
&self,
|
||||
input: Result<Self::Input, serde_json::Value>,
|
||||
_cx: &mut App,
|
||||
) -> SharedString {
|
||||
if let Ok(input) = input {
|
||||
format!("Delete “`{}`”", input.path).into()
|
||||
} else {
|
||||
|
||||
@@ -71,7 +71,11 @@ impl AgentTool for DiagnosticsTool {
|
||||
acp::ToolKind::Read
|
||||
}
|
||||
|
||||
fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
|
||||
fn initial_title(
|
||||
&self,
|
||||
input: Result<Self::Input, serde_json::Value>,
|
||||
_cx: &mut App,
|
||||
) -> SharedString {
|
||||
if let Some(path) = input.ok().and_then(|input| match input.path {
|
||||
Some(path) if !path.is_empty() => Some(path),
|
||||
_ => None,
|
||||
|
||||
@@ -120,11 +120,17 @@ impl From<EditFileToolOutput> for LanguageModelToolResultContent {
|
||||
pub struct EditFileTool {
|
||||
thread: WeakEntity<Thread>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
project: Entity<Project>,
|
||||
}
|
||||
|
||||
impl EditFileTool {
|
||||
pub fn new(thread: WeakEntity<Thread>, language_registry: Arc<LanguageRegistry>) -> Self {
|
||||
pub fn new(
|
||||
project: Entity<Project>,
|
||||
thread: WeakEntity<Thread>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
) -> Self {
|
||||
Self {
|
||||
project,
|
||||
thread,
|
||||
language_registry,
|
||||
}
|
||||
@@ -195,22 +201,50 @@ impl AgentTool for EditFileTool {
|
||||
acp::ToolKind::Edit
|
||||
}
|
||||
|
||||
fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
|
||||
fn initial_title(
|
||||
&self,
|
||||
input: Result<Self::Input, serde_json::Value>,
|
||||
cx: &mut App,
|
||||
) -> SharedString {
|
||||
match input {
|
||||
Ok(input) => input.display_description.into(),
|
||||
Ok(input) => self
|
||||
.project
|
||||
.read(cx)
|
||||
.find_project_path(&input.path, cx)
|
||||
.and_then(|project_path| {
|
||||
self.project
|
||||
.read(cx)
|
||||
.short_full_path_for_project_path(&project_path, cx)
|
||||
})
|
||||
.unwrap_or(Path::new(&input.path).into())
|
||||
.to_string_lossy()
|
||||
.to_string()
|
||||
.into(),
|
||||
Err(raw_input) => {
|
||||
if let Some(input) =
|
||||
serde_json::from_value::<EditFileToolPartialInput>(raw_input).ok()
|
||||
{
|
||||
let path = input.path.trim();
|
||||
if !path.is_empty() {
|
||||
return self
|
||||
.project
|
||||
.read(cx)
|
||||
.find_project_path(&input.path, cx)
|
||||
.and_then(|project_path| {
|
||||
self.project
|
||||
.read(cx)
|
||||
.short_full_path_for_project_path(&project_path, cx)
|
||||
})
|
||||
.unwrap_or(Path::new(&input.path).into())
|
||||
.to_string_lossy()
|
||||
.to_string()
|
||||
.into();
|
||||
}
|
||||
|
||||
let description = input.display_description.trim();
|
||||
if !description.is_empty() {
|
||||
return description.to_string().into();
|
||||
}
|
||||
|
||||
let path = input.path.trim().to_string();
|
||||
if !path.is_empty() {
|
||||
return path.into();
|
||||
}
|
||||
}
|
||||
|
||||
DEFAULT_UI_TEXT.into()
|
||||
@@ -545,7 +579,7 @@ mod tests {
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project,
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
@@ -560,11 +594,12 @@ mod tests {
|
||||
path: "root/nonexistent_file.txt".into(),
|
||||
mode: EditFileMode::Edit,
|
||||
};
|
||||
Arc::new(EditFileTool::new(thread.downgrade(), language_registry)).run(
|
||||
input,
|
||||
ToolCallEventStream::test().0,
|
||||
cx,
|
||||
)
|
||||
Arc::new(EditFileTool::new(
|
||||
project,
|
||||
thread.downgrade(),
|
||||
language_registry,
|
||||
))
|
||||
.run(input, ToolCallEventStream::test().0, cx)
|
||||
})
|
||||
.await;
|
||||
assert_eq!(
|
||||
@@ -743,7 +778,7 @@ mod tests {
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project,
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
@@ -775,6 +810,7 @@ mod tests {
|
||||
mode: EditFileMode::Overwrite,
|
||||
};
|
||||
Arc::new(EditFileTool::new(
|
||||
project.clone(),
|
||||
thread.downgrade(),
|
||||
language_registry.clone(),
|
||||
))
|
||||
@@ -833,11 +869,12 @@ mod tests {
|
||||
path: "root/src/main.rs".into(),
|
||||
mode: EditFileMode::Overwrite,
|
||||
};
|
||||
Arc::new(EditFileTool::new(thread.downgrade(), language_registry)).run(
|
||||
input,
|
||||
ToolCallEventStream::test().0,
|
||||
cx,
|
||||
)
|
||||
Arc::new(EditFileTool::new(
|
||||
project.clone(),
|
||||
thread.downgrade(),
|
||||
language_registry,
|
||||
))
|
||||
.run(input, ToolCallEventStream::test().0, cx)
|
||||
});
|
||||
|
||||
// Stream the unformatted content
|
||||
@@ -885,7 +922,7 @@ mod tests {
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project,
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
@@ -918,6 +955,7 @@ mod tests {
|
||||
mode: EditFileMode::Overwrite,
|
||||
};
|
||||
Arc::new(EditFileTool::new(
|
||||
project.clone(),
|
||||
thread.downgrade(),
|
||||
language_registry.clone(),
|
||||
))
|
||||
@@ -969,11 +1007,12 @@ mod tests {
|
||||
path: "root/src/main.rs".into(),
|
||||
mode: EditFileMode::Overwrite,
|
||||
};
|
||||
Arc::new(EditFileTool::new(thread.downgrade(), language_registry)).run(
|
||||
input,
|
||||
ToolCallEventStream::test().0,
|
||||
cx,
|
||||
)
|
||||
Arc::new(EditFileTool::new(
|
||||
project.clone(),
|
||||
thread.downgrade(),
|
||||
language_registry,
|
||||
))
|
||||
.run(input, ToolCallEventStream::test().0, cx)
|
||||
});
|
||||
|
||||
// Stream the content with trailing whitespace
|
||||
@@ -1012,7 +1051,7 @@ mod tests {
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project,
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
@@ -1020,7 +1059,11 @@ mod tests {
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let tool = Arc::new(EditFileTool::new(thread.downgrade(), language_registry));
|
||||
let tool = Arc::new(EditFileTool::new(
|
||||
project.clone(),
|
||||
thread.downgrade(),
|
||||
language_registry,
|
||||
));
|
||||
fs.insert_tree("/root", json!({})).await;
|
||||
|
||||
// Test 1: Path with .zed component should require confirmation
|
||||
@@ -1148,7 +1191,7 @@ mod tests {
|
||||
let model = Arc::new(FakeLanguageModel::default());
|
||||
let thread = cx.new(|cx| {
|
||||
Thread::new(
|
||||
project,
|
||||
project.clone(),
|
||||
cx.new(|_cx| ProjectContext::default()),
|
||||
context_server_registry,
|
||||
Templates::new(),
|
||||
@@ -1156,7 +1199,11 @@ mod tests {
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let tool = Arc::new(EditFileTool::new(thread.downgrade(), language_registry));
|
||||
let tool = Arc::new(EditFileTool::new(
|
||||
project.clone(),
|
||||
thread.downgrade(),
|
||||
language_registry,
|
||||
));
|
||||
|
||||
// Test global config paths - these should require confirmation if they exist and are outside the project
|
||||
let test_cases = vec![
|
||||
@@ -1264,7 +1311,11 @@ mod tests {
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let tool = Arc::new(EditFileTool::new(thread.downgrade(), language_registry));
|
||||
let tool = Arc::new(EditFileTool::new(
|
||||
project.clone(),
|
||||
thread.downgrade(),
|
||||
language_registry,
|
||||
));
|
||||
|
||||
// Test files in different worktrees
|
||||
let test_cases = vec![
|
||||
@@ -1344,7 +1395,11 @@ mod tests {
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let tool = Arc::new(EditFileTool::new(thread.downgrade(), language_registry));
|
||||
let tool = Arc::new(EditFileTool::new(
|
||||
project.clone(),
|
||||
thread.downgrade(),
|
||||
language_registry,
|
||||
));
|
||||
|
||||
// Test edge cases
|
||||
let test_cases = vec![
|
||||
@@ -1427,7 +1482,11 @@ mod tests {
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let tool = Arc::new(EditFileTool::new(thread.downgrade(), language_registry));
|
||||
let tool = Arc::new(EditFileTool::new(
|
||||
project.clone(),
|
||||
thread.downgrade(),
|
||||
language_registry,
|
||||
));
|
||||
|
||||
// Test different EditFileMode values
|
||||
let modes = vec![
|
||||
@@ -1507,48 +1566,67 @@ mod tests {
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let tool = Arc::new(EditFileTool::new(thread.downgrade(), language_registry));
|
||||
let tool = Arc::new(EditFileTool::new(
|
||||
project,
|
||||
thread.downgrade(),
|
||||
language_registry,
|
||||
));
|
||||
|
||||
assert_eq!(
|
||||
tool.initial_title(Err(json!({
|
||||
"path": "src/main.rs",
|
||||
"display_description": "",
|
||||
"old_string": "old code",
|
||||
"new_string": "new code"
|
||||
}))),
|
||||
"src/main.rs"
|
||||
);
|
||||
assert_eq!(
|
||||
tool.initial_title(Err(json!({
|
||||
"path": "",
|
||||
"display_description": "Fix error handling",
|
||||
"old_string": "old code",
|
||||
"new_string": "new code"
|
||||
}))),
|
||||
"Fix error handling"
|
||||
);
|
||||
assert_eq!(
|
||||
tool.initial_title(Err(json!({
|
||||
"path": "src/main.rs",
|
||||
"display_description": "Fix error handling",
|
||||
"old_string": "old code",
|
||||
"new_string": "new code"
|
||||
}))),
|
||||
"Fix error handling"
|
||||
);
|
||||
assert_eq!(
|
||||
tool.initial_title(Err(json!({
|
||||
"path": "",
|
||||
"display_description": "",
|
||||
"old_string": "old code",
|
||||
"new_string": "new code"
|
||||
}))),
|
||||
DEFAULT_UI_TEXT
|
||||
);
|
||||
assert_eq!(
|
||||
tool.initial_title(Err(serde_json::Value::Null)),
|
||||
DEFAULT_UI_TEXT
|
||||
);
|
||||
cx.update(|cx| {
|
||||
// ...
|
||||
assert_eq!(
|
||||
tool.initial_title(
|
||||
Err(json!({
|
||||
"path": "src/main.rs",
|
||||
"display_description": "",
|
||||
"old_string": "old code",
|
||||
"new_string": "new code"
|
||||
})),
|
||||
cx
|
||||
),
|
||||
"src/main.rs"
|
||||
);
|
||||
assert_eq!(
|
||||
tool.initial_title(
|
||||
Err(json!({
|
||||
"path": "",
|
||||
"display_description": "Fix error handling",
|
||||
"old_string": "old code",
|
||||
"new_string": "new code"
|
||||
})),
|
||||
cx
|
||||
),
|
||||
"Fix error handling"
|
||||
);
|
||||
assert_eq!(
|
||||
tool.initial_title(
|
||||
Err(json!({
|
||||
"path": "src/main.rs",
|
||||
"display_description": "Fix error handling",
|
||||
"old_string": "old code",
|
||||
"new_string": "new code"
|
||||
})),
|
||||
cx
|
||||
),
|
||||
"src/main.rs"
|
||||
);
|
||||
assert_eq!(
|
||||
tool.initial_title(
|
||||
Err(json!({
|
||||
"path": "",
|
||||
"display_description": "",
|
||||
"old_string": "old code",
|
||||
"new_string": "new code"
|
||||
})),
|
||||
cx
|
||||
),
|
||||
DEFAULT_UI_TEXT
|
||||
);
|
||||
assert_eq!(
|
||||
tool.initial_title(Err(serde_json::Value::Null), cx),
|
||||
DEFAULT_UI_TEXT
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -1575,7 +1653,11 @@ mod tests {
|
||||
|
||||
// Ensure the diff is finalized after the edit completes.
|
||||
{
|
||||
let tool = Arc::new(EditFileTool::new(thread.downgrade(), languages.clone()));
|
||||
let tool = Arc::new(EditFileTool::new(
|
||||
project.clone(),
|
||||
thread.downgrade(),
|
||||
languages.clone(),
|
||||
));
|
||||
let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
|
||||
let edit = cx.update(|cx| {
|
||||
tool.run(
|
||||
@@ -1600,7 +1682,11 @@ mod tests {
|
||||
// Ensure the diff is finalized if an error occurs while editing.
|
||||
{
|
||||
model.forbid_requests();
|
||||
let tool = Arc::new(EditFileTool::new(thread.downgrade(), languages.clone()));
|
||||
let tool = Arc::new(EditFileTool::new(
|
||||
project.clone(),
|
||||
thread.downgrade(),
|
||||
languages.clone(),
|
||||
));
|
||||
let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
|
||||
let edit = cx.update(|cx| {
|
||||
tool.run(
|
||||
@@ -1623,7 +1709,11 @@ mod tests {
|
||||
|
||||
// Ensure the diff is finalized if the tool call gets dropped.
|
||||
{
|
||||
let tool = Arc::new(EditFileTool::new(thread.downgrade(), languages.clone()));
|
||||
let tool = Arc::new(EditFileTool::new(
|
||||
project.clone(),
|
||||
thread.downgrade(),
|
||||
languages.clone(),
|
||||
));
|
||||
let (stream_tx, mut stream_rx) = ToolCallEventStream::test();
|
||||
let edit = cx.update(|cx| {
|
||||
tool.run(
|
||||
|
||||
@@ -126,7 +126,11 @@ impl AgentTool for FetchTool {
|
||||
acp::ToolKind::Fetch
|
||||
}
|
||||
|
||||
fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
|
||||
fn initial_title(
|
||||
&self,
|
||||
input: Result<Self::Input, serde_json::Value>,
|
||||
_cx: &mut App,
|
||||
) -> SharedString {
|
||||
match input {
|
||||
Ok(input) => format!("Fetch {}", MarkdownEscaped(&input.url)).into(),
|
||||
Err(_) => "Fetch URL".into(),
|
||||
|
||||
@@ -93,7 +93,11 @@ impl AgentTool for FindPathTool {
|
||||
acp::ToolKind::Search
|
||||
}
|
||||
|
||||
fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
|
||||
fn initial_title(
|
||||
&self,
|
||||
input: Result<Self::Input, serde_json::Value>,
|
||||
_cx: &mut App,
|
||||
) -> SharedString {
|
||||
let mut title = "Find paths".to_string();
|
||||
if let Ok(input) = input {
|
||||
title.push_str(&format!(" matching “`{}`”", input.glob));
|
||||
|
||||
@@ -75,7 +75,11 @@ impl AgentTool for GrepTool {
|
||||
acp::ToolKind::Search
|
||||
}
|
||||
|
||||
fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
|
||||
fn initial_title(
|
||||
&self,
|
||||
input: Result<Self::Input, serde_json::Value>,
|
||||
_cx: &mut App,
|
||||
) -> SharedString {
|
||||
match input {
|
||||
Ok(input) => {
|
||||
let page = input.page();
|
||||
|
||||
@@ -59,7 +59,11 @@ impl AgentTool for ListDirectoryTool {
|
||||
ToolKind::Read
|
||||
}
|
||||
|
||||
fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
|
||||
fn initial_title(
|
||||
&self,
|
||||
input: Result<Self::Input, serde_json::Value>,
|
||||
_cx: &mut App,
|
||||
) -> SharedString {
|
||||
if let Ok(input) = input {
|
||||
let path = MarkdownInlineCode(&input.path);
|
||||
format!("List the {path} directory's contents").into()
|
||||
|
||||
@@ -60,7 +60,11 @@ impl AgentTool for MovePathTool {
|
||||
ToolKind::Move
|
||||
}
|
||||
|
||||
fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
|
||||
fn initial_title(
|
||||
&self,
|
||||
input: Result<Self::Input, serde_json::Value>,
|
||||
_cx: &mut App,
|
||||
) -> SharedString {
|
||||
if let Ok(input) = input {
|
||||
let src = MarkdownInlineCode(&input.source_path);
|
||||
let dest = MarkdownInlineCode(&input.destination_path);
|
||||
|
||||
@@ -41,7 +41,11 @@ impl AgentTool for NowTool {
|
||||
acp::ToolKind::Other
|
||||
}
|
||||
|
||||
fn initial_title(&self, _input: Result<Self::Input, serde_json::Value>) -> SharedString {
|
||||
fn initial_title(
|
||||
&self,
|
||||
_input: Result<Self::Input, serde_json::Value>,
|
||||
_cx: &mut App,
|
||||
) -> SharedString {
|
||||
"Get current time".into()
|
||||
}
|
||||
|
||||
|
||||
@@ -45,7 +45,11 @@ impl AgentTool for OpenTool {
|
||||
ToolKind::Execute
|
||||
}
|
||||
|
||||
fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
|
||||
fn initial_title(
|
||||
&self,
|
||||
input: Result<Self::Input, serde_json::Value>,
|
||||
_cx: &mut App,
|
||||
) -> SharedString {
|
||||
if let Ok(input) = input {
|
||||
format!("Open `{}`", MarkdownEscaped(&input.path_or_url)).into()
|
||||
} else {
|
||||
@@ -61,7 +65,7 @@ impl AgentTool for OpenTool {
|
||||
) -> Task<Result<Self::Output>> {
|
||||
// If path_or_url turns out to be a path in the project, make it absolute.
|
||||
let abs_path = to_absolute_path(&input.path_or_url, self.project.clone(), cx);
|
||||
let authorize = event_stream.authorize(self.initial_title(Ok(input.clone())), cx);
|
||||
let authorize = event_stream.authorize(self.initial_title(Ok(input.clone()), cx), cx);
|
||||
cx.background_spawn(async move {
|
||||
authorize.await?;
|
||||
|
||||
|
||||
@@ -10,7 +10,7 @@ use project::{AgentLocation, ImageItem, Project, WorktreeSettings, image_store};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
use std::{path::Path, sync::Arc};
|
||||
use std::sync::Arc;
|
||||
use util::markdown::MarkdownCodeBlock;
|
||||
|
||||
use crate::{AgentTool, ToolCallEventStream};
|
||||
@@ -68,13 +68,31 @@ impl AgentTool for ReadFileTool {
|
||||
acp::ToolKind::Read
|
||||
}
|
||||
|
||||
fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
|
||||
input
|
||||
.ok()
|
||||
.as_ref()
|
||||
.and_then(|input| Path::new(&input.path).file_name())
|
||||
.map(|file_name| file_name.to_string_lossy().to_string().into())
|
||||
.unwrap_or_default()
|
||||
fn initial_title(
|
||||
&self,
|
||||
input: Result<Self::Input, serde_json::Value>,
|
||||
cx: &mut App,
|
||||
) -> SharedString {
|
||||
if let Ok(input) = input
|
||||
&& let Some(project_path) = self.project.read(cx).find_project_path(&input.path, cx)
|
||||
&& let Some(path) = self
|
||||
.project
|
||||
.read(cx)
|
||||
.short_full_path_for_project_path(&project_path, cx)
|
||||
{
|
||||
match (input.start_line, input.end_line) {
|
||||
(Some(start), Some(end)) => {
|
||||
format!("Read file `{}` (lines {}-{})", path.display(), start, end,)
|
||||
}
|
||||
(Some(start), None) => {
|
||||
format!("Read file `{}` (from line {})", path.display(), start)
|
||||
}
|
||||
_ => format!("Read file `{}`", path.display()),
|
||||
}
|
||||
.into()
|
||||
} else {
|
||||
"Read file".into()
|
||||
}
|
||||
}
|
||||
|
||||
fn run(
|
||||
@@ -86,6 +104,12 @@ impl AgentTool for ReadFileTool {
|
||||
let Some(project_path) = self.project.read(cx).find_project_path(&input.path, cx) else {
|
||||
return Task::ready(Err(anyhow!("Path {} not found in project", &input.path)));
|
||||
};
|
||||
let Some(abs_path) = self.project.read(cx).absolute_path(&project_path, cx) else {
|
||||
return Task::ready(Err(anyhow!(
|
||||
"Failed to convert {} to absolute path",
|
||||
&input.path
|
||||
)));
|
||||
};
|
||||
|
||||
// Error out if this path is either excluded or private in global settings
|
||||
let global_settings = WorktreeSettings::get_global(cx);
|
||||
@@ -121,6 +145,14 @@ impl AgentTool for ReadFileTool {
|
||||
|
||||
let file_path = input.path.clone();
|
||||
|
||||
event_stream.update_fields(ToolCallUpdateFields {
|
||||
locations: Some(vec![acp::ToolCallLocation {
|
||||
path: abs_path,
|
||||
line: input.start_line.map(|line| line.saturating_sub(1)),
|
||||
}]),
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
if image_store::is_image_file(&self.project, &project_path, cx) {
|
||||
return cx.spawn(async move |cx| {
|
||||
let image_entity: Entity<ImageItem> = cx
|
||||
@@ -229,34 +261,25 @@ impl AgentTool for ReadFileTool {
|
||||
};
|
||||
|
||||
project.update(cx, |project, cx| {
|
||||
if let Some(abs_path) = project.absolute_path(&project_path, cx) {
|
||||
project.set_agent_location(
|
||||
Some(AgentLocation {
|
||||
buffer: buffer.downgrade(),
|
||||
position: anchor.unwrap_or(text::Anchor::MIN),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
project.set_agent_location(
|
||||
Some(AgentLocation {
|
||||
buffer: buffer.downgrade(),
|
||||
position: anchor.unwrap_or(text::Anchor::MIN),
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
if let Ok(LanguageModelToolResultContent::Text(text)) = &result {
|
||||
let markdown = MarkdownCodeBlock {
|
||||
tag: &input.path,
|
||||
text,
|
||||
}
|
||||
.to_string();
|
||||
event_stream.update_fields(ToolCallUpdateFields {
|
||||
locations: Some(vec![acp::ToolCallLocation {
|
||||
path: abs_path,
|
||||
line: input.start_line.map(|line| line.saturating_sub(1)),
|
||||
content: Some(vec![acp::ToolCallContent::Content {
|
||||
content: markdown.into(),
|
||||
}]),
|
||||
..Default::default()
|
||||
});
|
||||
if let Ok(LanguageModelToolResultContent::Text(text)) = &result {
|
||||
let markdown = MarkdownCodeBlock {
|
||||
tag: &input.path,
|
||||
text,
|
||||
}
|
||||
.to_string();
|
||||
event_stream.update_fields(ToolCallUpdateFields {
|
||||
content: Some(vec![acp::ToolCallContent::Content {
|
||||
content: markdown.into(),
|
||||
}]),
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
})?;
|
||||
|
||||
|
||||
@@ -60,7 +60,11 @@ impl AgentTool for TerminalTool {
|
||||
acp::ToolKind::Execute
|
||||
}
|
||||
|
||||
fn initial_title(&self, input: Result<Self::Input, serde_json::Value>) -> SharedString {
|
||||
fn initial_title(
|
||||
&self,
|
||||
input: Result<Self::Input, serde_json::Value>,
|
||||
_cx: &mut App,
|
||||
) -> SharedString {
|
||||
if let Ok(input) = input {
|
||||
let mut lines = input.command.lines();
|
||||
let first_line = lines.next().unwrap_or_default();
|
||||
@@ -93,7 +97,7 @@ impl AgentTool for TerminalTool {
|
||||
Err(err) => return Task::ready(Err(err)),
|
||||
};
|
||||
|
||||
let authorize = event_stream.authorize(self.initial_title(Ok(input.clone())), cx);
|
||||
let authorize = event_stream.authorize(self.initial_title(Ok(input.clone()), cx), cx);
|
||||
cx.spawn(async move |cx| {
|
||||
authorize.await?;
|
||||
|
||||
|
||||
@@ -29,7 +29,11 @@ impl AgentTool for ThinkingTool {
|
||||
acp::ToolKind::Think
|
||||
}
|
||||
|
||||
fn initial_title(&self, _input: Result<Self::Input, serde_json::Value>) -> SharedString {
|
||||
fn initial_title(
|
||||
&self,
|
||||
_input: Result<Self::Input, serde_json::Value>,
|
||||
_cx: &mut App,
|
||||
) -> SharedString {
|
||||
"Thinking".into()
|
||||
}
|
||||
|
||||
|
||||
@@ -48,7 +48,11 @@ impl AgentTool for WebSearchTool {
|
||||
acp::ToolKind::Fetch
|
||||
}
|
||||
|
||||
fn initial_title(&self, _input: Result<Self::Input, serde_json::Value>) -> SharedString {
|
||||
fn initial_title(
|
||||
&self,
|
||||
_input: Result<Self::Input, serde_json::Value>,
|
||||
_cx: &mut App,
|
||||
) -> SharedString {
|
||||
"Searching the Web".into()
|
||||
}
|
||||
|
||||
|
||||
@@ -35,22 +35,18 @@ language.workspace = true
|
||||
language_model.workspace = true
|
||||
language_models.workspace = true
|
||||
log.workspace = true
|
||||
node_runtime.workspace = true
|
||||
paths.workspace = true
|
||||
project.workspace = true
|
||||
reqwest_client = { workspace = true, optional = true }
|
||||
schemars.workspace = true
|
||||
semver.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
smol.workspace = true
|
||||
task.workspace = true
|
||||
tempfile.workspace = true
|
||||
thiserror.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
watch.workspace = true
|
||||
which.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
[target.'cfg(unix)'.dependencies]
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use crate::AgentServerCommand;
|
||||
use acp_thread::AgentConnection;
|
||||
use acp_tools::AcpConnectionRegistry;
|
||||
use action_log::ActionLog;
|
||||
@@ -8,8 +7,10 @@ use collections::HashMap;
|
||||
use futures::AsyncBufReadExt as _;
|
||||
use futures::io::BufReader;
|
||||
use project::Project;
|
||||
use project::agent_server_store::AgentServerCommand;
|
||||
use serde::Deserialize;
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::{any::Any, cell::RefCell};
|
||||
use std::{path::Path, rc::Rc};
|
||||
use thiserror::Error;
|
||||
@@ -29,6 +30,7 @@ pub struct AcpConnection {
|
||||
sessions: Rc<RefCell<HashMap<acp::SessionId, AcpSession>>>,
|
||||
auth_methods: Vec<acp::AuthMethod>,
|
||||
agent_capabilities: acp::AgentCapabilities,
|
||||
root_dir: PathBuf,
|
||||
_io_task: Task<Result<()>>,
|
||||
_wait_task: Task<Result<()>>,
|
||||
_stderr_task: Task<Result<()>>,
|
||||
@@ -43,9 +45,10 @@ pub async fn connect(
|
||||
server_name: SharedString,
|
||||
command: AgentServerCommand,
|
||||
root_dir: &Path,
|
||||
is_remote: bool,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<Rc<dyn AgentConnection>> {
|
||||
let conn = AcpConnection::stdio(server_name, command.clone(), root_dir, cx).await?;
|
||||
let conn = AcpConnection::stdio(server_name, command.clone(), root_dir, is_remote, cx).await?;
|
||||
Ok(Rc::new(conn) as _)
|
||||
}
|
||||
|
||||
@@ -56,17 +59,21 @@ impl AcpConnection {
|
||||
server_name: SharedString,
|
||||
command: AgentServerCommand,
|
||||
root_dir: &Path,
|
||||
is_remote: bool,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<Self> {
|
||||
let mut child = util::command::new_smol_command(command.path)
|
||||
let mut child = util::command::new_smol_command(command.path);
|
||||
child
|
||||
.args(command.args.iter().map(|arg| arg.as_str()))
|
||||
.envs(command.env.iter().flatten())
|
||||
.current_dir(root_dir)
|
||||
.stdin(std::process::Stdio::piped())
|
||||
.stdout(std::process::Stdio::piped())
|
||||
.stderr(std::process::Stdio::piped())
|
||||
.kill_on_drop(true)
|
||||
.spawn()?;
|
||||
.kill_on_drop(true);
|
||||
if !is_remote {
|
||||
child.current_dir(root_dir);
|
||||
}
|
||||
let mut child = child.spawn()?;
|
||||
|
||||
let stdout = child.stdout.take().context("Failed to take stdout")?;
|
||||
let stdin = child.stdin.take().context("Failed to take stdin")?;
|
||||
@@ -145,6 +152,7 @@ impl AcpConnection {
|
||||
|
||||
Ok(Self {
|
||||
auth_methods: response.auth_methods,
|
||||
root_dir: root_dir.to_owned(),
|
||||
connection,
|
||||
server_name,
|
||||
sessions,
|
||||
@@ -158,6 +166,10 @@ impl AcpConnection {
|
||||
pub fn prompt_capabilities(&self) -> &acp::PromptCapabilities {
|
||||
&self.agent_capabilities.prompt_capabilities
|
||||
}
|
||||
|
||||
pub fn root_dir(&self) -> &Path {
|
||||
&self.root_dir
|
||||
}
|
||||
}
|
||||
|
||||
impl AgentConnection for AcpConnection {
|
||||
@@ -171,29 +183,36 @@ impl AgentConnection for AcpConnection {
|
||||
let sessions = self.sessions.clone();
|
||||
let cwd = cwd.to_path_buf();
|
||||
let context_server_store = project.read(cx).context_server_store().read(cx);
|
||||
let mcp_servers = context_server_store
|
||||
.configured_server_ids()
|
||||
.iter()
|
||||
.filter_map(|id| {
|
||||
let configuration = context_server_store.configuration_for_server(id)?;
|
||||
let command = configuration.command();
|
||||
Some(acp::McpServer {
|
||||
name: id.0.to_string(),
|
||||
command: command.path.clone(),
|
||||
args: command.args.clone(),
|
||||
env: if let Some(env) = command.env.as_ref() {
|
||||
env.iter()
|
||||
.map(|(name, value)| acp::EnvVariable {
|
||||
name: name.clone(),
|
||||
value: value.clone(),
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
vec![]
|
||||
},
|
||||
let mcp_servers = if project.read(cx).is_local() {
|
||||
context_server_store
|
||||
.configured_server_ids()
|
||||
.iter()
|
||||
.filter_map(|id| {
|
||||
let configuration = context_server_store.configuration_for_server(id)?;
|
||||
let command = configuration.command();
|
||||
Some(acp::McpServer {
|
||||
name: id.0.to_string(),
|
||||
command: command.path.clone(),
|
||||
args: command.args.clone(),
|
||||
env: if let Some(env) = command.env.as_ref() {
|
||||
env.iter()
|
||||
.map(|(name, value)| acp::EnvVariable {
|
||||
name: name.clone(),
|
||||
value: value.clone(),
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
vec![]
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
.collect()
|
||||
} else {
|
||||
// In SSH projects, the external agent is running on the remote
|
||||
// machine, and currently we only run MCP servers on the local
|
||||
// machine. So don't pass any MCP servers to the agent in that case.
|
||||
Vec::new()
|
||||
};
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let response = conn
|
||||
|
||||
@@ -2,47 +2,25 @@ mod acp;
|
||||
mod claude;
|
||||
mod custom;
|
||||
mod gemini;
|
||||
mod settings;
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub mod e2e_tests;
|
||||
|
||||
use anyhow::Context as _;
|
||||
pub use claude::*;
|
||||
pub use custom::*;
|
||||
use fs::Fs;
|
||||
use fs::RemoveOptions;
|
||||
use fs::RenameOptions;
|
||||
use futures::StreamExt as _;
|
||||
pub use gemini::*;
|
||||
use gpui::AppContext;
|
||||
use node_runtime::NodeRuntime;
|
||||
pub use settings::*;
|
||||
use project::agent_server_store::AgentServerStore;
|
||||
|
||||
use acp_thread::AgentConnection;
|
||||
use acp_thread::LoadError;
|
||||
use anyhow::Result;
|
||||
use anyhow::anyhow;
|
||||
use collections::HashMap;
|
||||
use gpui::{App, AsyncApp, Entity, SharedString, Task};
|
||||
use gpui::{App, Entity, SharedString, Task};
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use semver::Version;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::str::FromStr as _;
|
||||
use std::{
|
||||
any::Any,
|
||||
path::{Path, PathBuf},
|
||||
rc::Rc,
|
||||
sync::Arc,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
use std::{any::Any, path::Path, rc::Rc};
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
settings::init(cx);
|
||||
}
|
||||
pub use acp::AcpConnection;
|
||||
|
||||
pub struct AgentServerDelegate {
|
||||
store: Entity<AgentServerStore>,
|
||||
project: Entity<Project>,
|
||||
status_tx: Option<watch::Sender<SharedString>>,
|
||||
new_version_available: Option<watch::Sender<Option<String>>>,
|
||||
@@ -50,11 +28,13 @@ pub struct AgentServerDelegate {
|
||||
|
||||
impl AgentServerDelegate {
|
||||
pub fn new(
|
||||
store: Entity<AgentServerStore>,
|
||||
project: Entity<Project>,
|
||||
status_tx: Option<watch::Sender<SharedString>>,
|
||||
new_version_tx: Option<watch::Sender<Option<String>>>,
|
||||
) -> Self {
|
||||
Self {
|
||||
store,
|
||||
project,
|
||||
status_tx,
|
||||
new_version_available: new_version_tx,
|
||||
@@ -64,188 +44,6 @@ impl AgentServerDelegate {
|
||||
pub fn project(&self) -> &Entity<Project> {
|
||||
&self.project
|
||||
}
|
||||
|
||||
fn get_or_npm_install_builtin_agent(
|
||||
self,
|
||||
binary_name: SharedString,
|
||||
package_name: SharedString,
|
||||
entrypoint_path: PathBuf,
|
||||
ignore_system_version: bool,
|
||||
minimum_version: Option<Version>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<AgentServerCommand>> {
|
||||
let project = self.project;
|
||||
let fs = project.read(cx).fs().clone();
|
||||
let Some(node_runtime) = project.read(cx).node_runtime().cloned() else {
|
||||
return Task::ready(Err(anyhow!(
|
||||
"External agents are not yet available in remote projects."
|
||||
)));
|
||||
};
|
||||
let status_tx = self.status_tx;
|
||||
let new_version_available = self.new_version_available;
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
if !ignore_system_version {
|
||||
if let Some(bin) = find_bin_in_path(binary_name.clone(), &project, cx).await {
|
||||
return Ok(AgentServerCommand {
|
||||
path: bin,
|
||||
args: Vec::new(),
|
||||
env: Default::default(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let node_path = node_runtime.binary_path().await?;
|
||||
let dir = paths::data_dir()
|
||||
.join("external_agents")
|
||||
.join(binary_name.as_str());
|
||||
fs.create_dir(&dir).await?;
|
||||
|
||||
let mut stream = fs.read_dir(&dir).await?;
|
||||
let mut versions = Vec::new();
|
||||
let mut to_delete = Vec::new();
|
||||
while let Some(entry) = stream.next().await {
|
||||
let Ok(entry) = entry else { continue };
|
||||
let Some(file_name) = entry.file_name() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if let Some(name) = file_name.to_str()
|
||||
&& let Some(version) = semver::Version::from_str(name).ok()
|
||||
&& fs
|
||||
.is_file(&dir.join(file_name).join(&entrypoint_path))
|
||||
.await
|
||||
{
|
||||
versions.push((version, file_name.to_owned()));
|
||||
} else {
|
||||
to_delete.push(file_name.to_owned())
|
||||
}
|
||||
}
|
||||
|
||||
versions.sort();
|
||||
let newest_version = if let Some((version, file_name)) = versions.last().cloned()
|
||||
&& minimum_version.is_none_or(|minimum_version| version >= minimum_version)
|
||||
{
|
||||
versions.pop();
|
||||
Some(file_name)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
log::debug!("existing version of {package_name}: {newest_version:?}");
|
||||
to_delete.extend(versions.into_iter().map(|(_, file_name)| file_name));
|
||||
|
||||
cx.background_spawn({
|
||||
let fs = fs.clone();
|
||||
let dir = dir.clone();
|
||||
async move {
|
||||
for file_name in to_delete {
|
||||
fs.remove_dir(
|
||||
&dir.join(file_name),
|
||||
RemoveOptions {
|
||||
recursive: true,
|
||||
ignore_if_not_exists: false,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.ok();
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
let version = if let Some(file_name) = newest_version {
|
||||
cx.background_spawn({
|
||||
let file_name = file_name.clone();
|
||||
let dir = dir.clone();
|
||||
let fs = fs.clone();
|
||||
async move {
|
||||
let latest_version =
|
||||
node_runtime.npm_package_latest_version(&package_name).await;
|
||||
if let Ok(latest_version) = latest_version
|
||||
&& &latest_version != &file_name.to_string_lossy()
|
||||
{
|
||||
Self::download_latest_version(
|
||||
fs,
|
||||
dir.clone(),
|
||||
node_runtime,
|
||||
package_name,
|
||||
)
|
||||
.await
|
||||
.log_err();
|
||||
if let Some(mut new_version_available) = new_version_available {
|
||||
new_version_available.send(Some(latest_version)).ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
file_name
|
||||
} else {
|
||||
if let Some(mut status_tx) = status_tx {
|
||||
status_tx.send("Installing…".into()).ok();
|
||||
}
|
||||
let dir = dir.clone();
|
||||
cx.background_spawn(Self::download_latest_version(
|
||||
fs.clone(),
|
||||
dir.clone(),
|
||||
node_runtime,
|
||||
package_name,
|
||||
))
|
||||
.await?
|
||||
.into()
|
||||
};
|
||||
|
||||
let agent_server_path = dir.join(version).join(entrypoint_path);
|
||||
let agent_server_path_exists = fs.is_file(&agent_server_path).await;
|
||||
anyhow::ensure!(
|
||||
agent_server_path_exists,
|
||||
"Missing entrypoint path {} after installation",
|
||||
agent_server_path.to_string_lossy()
|
||||
);
|
||||
|
||||
anyhow::Ok(AgentServerCommand {
|
||||
path: node_path,
|
||||
args: vec![agent_server_path.to_string_lossy().to_string()],
|
||||
env: Default::default(),
|
||||
})
|
||||
})
|
||||
.await
|
||||
.map_err(|e| LoadError::FailedToInstall(e.to_string().into()).into())
|
||||
})
|
||||
}
|
||||
|
||||
async fn download_latest_version(
|
||||
fs: Arc<dyn Fs>,
|
||||
dir: PathBuf,
|
||||
node_runtime: NodeRuntime,
|
||||
package_name: SharedString,
|
||||
) -> Result<String> {
|
||||
log::debug!("downloading latest version of {package_name}");
|
||||
|
||||
let tmp_dir = tempfile::tempdir_in(&dir)?;
|
||||
|
||||
node_runtime
|
||||
.npm_install_packages(tmp_dir.path(), &[(&package_name, "latest")])
|
||||
.await?;
|
||||
|
||||
let version = node_runtime
|
||||
.npm_package_installed_version(tmp_dir.path(), &package_name)
|
||||
.await?
|
||||
.context("expected package to be installed")?;
|
||||
|
||||
fs.rename(
|
||||
&tmp_dir.keep(),
|
||||
&dir.join(&version),
|
||||
RenameOptions {
|
||||
ignore_if_exists: true,
|
||||
overwrite: false,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
anyhow::Ok(version)
|
||||
}
|
||||
}
|
||||
|
||||
pub trait AgentServer: Send {
|
||||
@@ -255,10 +53,10 @@ pub trait AgentServer: Send {
|
||||
|
||||
fn connect(
|
||||
&self,
|
||||
root_dir: &Path,
|
||||
root_dir: Option<&Path>,
|
||||
delegate: AgentServerDelegate,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Rc<dyn AgentConnection>>>;
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>>;
|
||||
|
||||
fn into_any(self: Rc<Self>) -> Rc<dyn Any>;
|
||||
}
|
||||
@@ -268,120 +66,3 @@ impl dyn AgentServer {
|
||||
self.into_any().downcast().ok()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for AgentServerCommand {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let filtered_env = self.env.as_ref().map(|env| {
|
||||
env.iter()
|
||||
.map(|(k, v)| {
|
||||
(
|
||||
k,
|
||||
if util::redact::should_redact(k) {
|
||||
"[REDACTED]"
|
||||
} else {
|
||||
v
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
f.debug_struct("AgentServerCommand")
|
||||
.field("path", &self.path)
|
||||
.field("args", &self.args)
|
||||
.field("env", &filtered_env)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Clone, PartialEq, Eq, JsonSchema)]
|
||||
pub struct AgentServerCommand {
|
||||
#[serde(rename = "command")]
|
||||
pub path: PathBuf,
|
||||
#[serde(default)]
|
||||
pub args: Vec<String>,
|
||||
pub env: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
impl AgentServerCommand {
|
||||
pub async fn resolve(
|
||||
path_bin_name: &'static str,
|
||||
extra_args: &[&'static str],
|
||||
fallback_path: Option<&Path>,
|
||||
settings: Option<BuiltinAgentServerSettings>,
|
||||
project: &Entity<Project>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Option<Self> {
|
||||
if let Some(settings) = settings
|
||||
&& let Some(command) = settings.custom_command()
|
||||
{
|
||||
Some(command)
|
||||
} else {
|
||||
match find_bin_in_path(path_bin_name.into(), project, cx).await {
|
||||
Some(path) => Some(Self {
|
||||
path,
|
||||
args: extra_args.iter().map(|arg| arg.to_string()).collect(),
|
||||
env: None,
|
||||
}),
|
||||
None => fallback_path.and_then(|path| {
|
||||
if path.exists() {
|
||||
Some(Self {
|
||||
path: path.to_path_buf(),
|
||||
args: extra_args.iter().map(|arg| arg.to_string()).collect(),
|
||||
env: None,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn find_bin_in_path(
|
||||
bin_name: SharedString,
|
||||
project: &Entity<Project>,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Option<PathBuf> {
|
||||
let (env_task, root_dir) = project
|
||||
.update(cx, |project, cx| {
|
||||
let worktree = project.visible_worktrees(cx).next();
|
||||
match worktree {
|
||||
Some(worktree) => {
|
||||
let env_task = project.environment().update(cx, |env, cx| {
|
||||
env.get_worktree_environment(worktree.clone(), cx)
|
||||
});
|
||||
|
||||
let path = worktree.read(cx).abs_path();
|
||||
(env_task, path)
|
||||
}
|
||||
None => {
|
||||
let path: Arc<Path> = paths::home_dir().as_path().into();
|
||||
let env_task = project.environment().update(cx, |env, cx| {
|
||||
env.get_directory_environment(path.clone(), cx)
|
||||
});
|
||||
(env_task, path)
|
||||
}
|
||||
}
|
||||
})
|
||||
.log_err()?;
|
||||
|
||||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
let which_result = if cfg!(windows) {
|
||||
which::which(bin_name.as_str())
|
||||
} else {
|
||||
let env = env_task.await.unwrap_or_default();
|
||||
let shell_path = env.get("PATH").cloned();
|
||||
which::which_in(bin_name.as_str(), shell_path.as_ref(), root_dir.as_ref())
|
||||
};
|
||||
|
||||
if let Err(which::Error::CannotFindBinaryPath) = which_result {
|
||||
return None;
|
||||
}
|
||||
|
||||
which_result.log_err()
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
@@ -1,60 +1,22 @@
|
||||
use settings::SettingsStore;
|
||||
use std::path::Path;
|
||||
use std::rc::Rc;
|
||||
use std::{any::Any, path::PathBuf};
|
||||
|
||||
use anyhow::Result;
|
||||
use gpui::{App, AppContext as _, SharedString, Task};
|
||||
use anyhow::{Context as _, Result};
|
||||
use gpui::{App, SharedString, Task};
|
||||
use project::agent_server_store::CLAUDE_CODE_NAME;
|
||||
|
||||
use crate::{AgentServer, AgentServerDelegate, AllAgentServersSettings};
|
||||
use crate::{AgentServer, AgentServerDelegate};
|
||||
use acp_thread::AgentConnection;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ClaudeCode;
|
||||
|
||||
pub struct ClaudeCodeLoginCommand {
|
||||
pub struct AgentServerLoginCommand {
|
||||
pub path: PathBuf,
|
||||
pub arguments: Vec<String>,
|
||||
}
|
||||
|
||||
impl ClaudeCode {
|
||||
const BINARY_NAME: &'static str = "claude-code-acp";
|
||||
const PACKAGE_NAME: &'static str = "@zed-industries/claude-code-acp";
|
||||
|
||||
pub fn login_command(
|
||||
delegate: AgentServerDelegate,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<ClaudeCodeLoginCommand>> {
|
||||
let settings = cx.read_global(|settings: &SettingsStore, _| {
|
||||
settings.get::<AllAgentServersSettings>(None).claude.clone()
|
||||
});
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let mut command = if let Some(settings) = settings {
|
||||
settings.command
|
||||
} else {
|
||||
cx.update(|cx| {
|
||||
delegate.get_or_npm_install_builtin_agent(
|
||||
Self::BINARY_NAME.into(),
|
||||
Self::PACKAGE_NAME.into(),
|
||||
"node_modules/@anthropic-ai/claude-code/cli.js".into(),
|
||||
true,
|
||||
Some("0.2.5".parse().unwrap()),
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await?
|
||||
};
|
||||
command.args.push("/login".into());
|
||||
|
||||
Ok(ClaudeCodeLoginCommand {
|
||||
path: command.path,
|
||||
arguments: command.args,
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl AgentServer for ClaudeCode {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
"claude-code"
|
||||
@@ -70,56 +32,33 @@ impl AgentServer for ClaudeCode {
|
||||
|
||||
fn connect(
|
||||
&self,
|
||||
root_dir: &Path,
|
||||
root_dir: Option<&Path>,
|
||||
delegate: AgentServerDelegate,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Rc<dyn AgentConnection>>> {
|
||||
let root_dir = root_dir.to_path_buf();
|
||||
let fs = delegate.project().read(cx).fs().clone();
|
||||
let server_name = self.name();
|
||||
let settings = cx.read_global(|settings: &SettingsStore, _| {
|
||||
settings.get::<AllAgentServersSettings>(None).claude.clone()
|
||||
});
|
||||
let project = delegate.project().clone();
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().to_string());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let store = delegate.store.downgrade();
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let mut project_env = project
|
||||
.update(cx, |project, cx| {
|
||||
project.directory_environment(root_dir.as_path().into(), cx)
|
||||
})?
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
let mut command = if let Some(settings) = settings {
|
||||
settings.command
|
||||
} else {
|
||||
cx.update(|cx| {
|
||||
delegate.get_or_npm_install_builtin_agent(
|
||||
Self::BINARY_NAME.into(),
|
||||
Self::PACKAGE_NAME.into(),
|
||||
format!("node_modules/{}/dist/index.js", Self::PACKAGE_NAME).into(),
|
||||
true,
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await?
|
||||
};
|
||||
project_env.extend(command.env.take().unwrap_or_default());
|
||||
command.env = Some(project_env);
|
||||
|
||||
command
|
||||
.env
|
||||
.get_or_insert_default()
|
||||
.insert("ANTHROPIC_API_KEY".to_owned(), "".to_owned());
|
||||
|
||||
let root_dir_exists = fs.is_dir(&root_dir).await;
|
||||
anyhow::ensure!(
|
||||
root_dir_exists,
|
||||
"Session root {} does not exist or is not a directory",
|
||||
root_dir.to_string_lossy()
|
||||
);
|
||||
|
||||
crate::acp::connect(server_name, command.clone(), &root_dir, cx).await
|
||||
let (command, root_dir, login) = store
|
||||
.update(cx, |store, cx| {
|
||||
let agent = store
|
||||
.get_external_agent(&CLAUDE_CODE_NAME.into())
|
||||
.context("Claude Code is not registered")?;
|
||||
anyhow::Ok(agent.get_command(
|
||||
root_dir.as_deref(),
|
||||
Default::default(),
|
||||
delegate.status_tx,
|
||||
delegate.new_version_available,
|
||||
&mut cx.to_async(),
|
||||
))
|
||||
})??
|
||||
.await?;
|
||||
let connection =
|
||||
crate::acp::connect(name, command, root_dir.as_ref(), is_remote, cx).await?;
|
||||
Ok((connection, login))
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
use crate::{AgentServerCommand, AgentServerDelegate};
|
||||
use crate::AgentServerDelegate;
|
||||
use acp_thread::AgentConnection;
|
||||
use anyhow::Result;
|
||||
use anyhow::{Context as _, Result};
|
||||
use gpui::{App, SharedString, Task};
|
||||
use project::agent_server_store::ExternalAgentServerName;
|
||||
use std::{path::Path, rc::Rc};
|
||||
use ui::IconName;
|
||||
|
||||
/// A generic agent server implementation for custom user-defined agents
|
||||
pub struct CustomAgentServer {
|
||||
name: SharedString,
|
||||
command: AgentServerCommand,
|
||||
}
|
||||
|
||||
impl CustomAgentServer {
|
||||
pub fn new(name: SharedString, command: AgentServerCommand) -> Self {
|
||||
Self { name, command }
|
||||
pub fn new(name: SharedString) -> Self {
|
||||
Self { name }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,14 +32,36 @@ impl crate::AgentServer for CustomAgentServer {
|
||||
|
||||
fn connect(
|
||||
&self,
|
||||
root_dir: &Path,
|
||||
_delegate: AgentServerDelegate,
|
||||
root_dir: Option<&Path>,
|
||||
delegate: AgentServerDelegate,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Rc<dyn AgentConnection>>> {
|
||||
let server_name = self.name();
|
||||
let command = self.command.clone();
|
||||
let root_dir = root_dir.to_path_buf();
|
||||
cx.spawn(async move |cx| crate::acp::connect(server_name, command, &root_dir, cx).await)
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().to_string());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let store = delegate.store.downgrade();
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let (command, root_dir, login) = store
|
||||
.update(cx, |store, cx| {
|
||||
let agent = store
|
||||
.get_external_agent(&ExternalAgentServerName(name.clone()))
|
||||
.with_context(|| {
|
||||
format!("Custom agent server `{}` is not registered", name)
|
||||
})?;
|
||||
anyhow::Ok(agent.get_command(
|
||||
root_dir.as_deref(),
|
||||
Default::default(),
|
||||
delegate.status_tx,
|
||||
delegate.new_version_available,
|
||||
&mut cx.to_async(),
|
||||
))
|
||||
})??
|
||||
.await?;
|
||||
let connection =
|
||||
crate::acp::connect(name, command, root_dir.as_ref(), is_remote, cx).await?;
|
||||
Ok((connection, login))
|
||||
})
|
||||
}
|
||||
|
||||
fn into_any(self: Rc<Self>) -> Rc<dyn std::any::Any> {
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
use crate::{AgentServer, AgentServerDelegate};
|
||||
#[cfg(test)]
|
||||
use crate::{AgentServerCommand, CustomAgentServerSettings};
|
||||
use acp_thread::{AcpThread, AgentThreadEntry, ToolCall, ToolCallStatus};
|
||||
use agent_client_protocol as acp;
|
||||
use futures::{FutureExt, StreamExt, channel::mpsc, select};
|
||||
use gpui::{AppContext, Entity, TestAppContext};
|
||||
use indoc::indoc;
|
||||
use project::{FakeFs, Project};
|
||||
#[cfg(test)]
|
||||
use project::agent_server_store::{AgentServerCommand, CustomAgentServerSettings};
|
||||
use project::{FakeFs, Project, agent_server_store::AllAgentServersSettings};
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
@@ -449,7 +449,6 @@ pub use common_e2e_tests;
|
||||
// Helpers
|
||||
|
||||
pub async fn init_test(cx: &mut TestAppContext) -> Arc<FakeFs> {
|
||||
#[cfg(test)]
|
||||
use settings::Settings;
|
||||
|
||||
env_logger::try_init().ok();
|
||||
@@ -468,11 +467,11 @@ pub async fn init_test(cx: &mut TestAppContext) -> Arc<FakeFs> {
|
||||
language_model::init(client.clone(), cx);
|
||||
language_models::init(user_store, client, cx);
|
||||
agent_settings::init(cx);
|
||||
crate::settings::init(cx);
|
||||
AllAgentServersSettings::register(cx);
|
||||
|
||||
#[cfg(test)]
|
||||
crate::AllAgentServersSettings::override_global(
|
||||
crate::AllAgentServersSettings {
|
||||
AllAgentServersSettings::override_global(
|
||||
AllAgentServersSettings {
|
||||
claude: Some(CustomAgentServerSettings {
|
||||
command: AgentServerCommand {
|
||||
path: "claude-code-acp".into(),
|
||||
@@ -498,10 +497,11 @@ pub async fn new_test_thread(
|
||||
current_dir: impl AsRef<Path>,
|
||||
cx: &mut TestAppContext,
|
||||
) -> Entity<AcpThread> {
|
||||
let delegate = AgentServerDelegate::new(project.clone(), None, None);
|
||||
let store = project.read_with(cx, |project, _| project.agent_server_store().clone());
|
||||
let delegate = AgentServerDelegate::new(store, project.clone(), None, None);
|
||||
|
||||
let connection = cx
|
||||
.update(|cx| server.connect(current_dir.as_ref(), delegate, cx))
|
||||
let (connection, _) = cx
|
||||
.update(|cx| server.connect(Some(current_dir.as_ref()), delegate, cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
|
||||
@@ -1,21 +1,17 @@
|
||||
use std::rc::Rc;
|
||||
use std::{any::Any, path::Path};
|
||||
|
||||
use crate::acp::AcpConnection;
|
||||
use crate::{AgentServer, AgentServerDelegate};
|
||||
use acp_thread::{AgentConnection, LoadError};
|
||||
use anyhow::Result;
|
||||
use gpui::{App, AppContext as _, SharedString, Task};
|
||||
use acp_thread::AgentConnection;
|
||||
use anyhow::{Context as _, Result};
|
||||
use collections::HashMap;
|
||||
use gpui::{App, SharedString, Task};
|
||||
use language_models::provider::google::GoogleLanguageModelProvider;
|
||||
use settings::SettingsStore;
|
||||
|
||||
use crate::AllAgentServersSettings;
|
||||
use project::agent_server_store::GEMINI_NAME;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Gemini;
|
||||
|
||||
const ACP_ARG: &str = "--experimental-acp";
|
||||
|
||||
impl AgentServer for Gemini {
|
||||
fn telemetry_id(&self) -> &'static str {
|
||||
"gemini-cli"
|
||||
@@ -31,126 +27,37 @@ impl AgentServer for Gemini {
|
||||
|
||||
fn connect(
|
||||
&self,
|
||||
root_dir: &Path,
|
||||
root_dir: Option<&Path>,
|
||||
delegate: AgentServerDelegate,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Rc<dyn AgentConnection>>> {
|
||||
let root_dir = root_dir.to_path_buf();
|
||||
let fs = delegate.project().read(cx).fs().clone();
|
||||
let server_name = self.name();
|
||||
let settings = cx.read_global(|settings: &SettingsStore, _| {
|
||||
settings.get::<AllAgentServersSettings>(None).gemini.clone()
|
||||
});
|
||||
let project = delegate.project().clone();
|
||||
) -> Task<Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
let name = self.name();
|
||||
let root_dir = root_dir.map(|root_dir| root_dir.to_string_lossy().to_string());
|
||||
let is_remote = delegate.project.read(cx).is_via_remote_server();
|
||||
let store = delegate.store.downgrade();
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let ignore_system_version = settings
|
||||
.as_ref()
|
||||
.and_then(|settings| settings.ignore_system_version)
|
||||
.unwrap_or(true);
|
||||
let mut project_env = project
|
||||
.update(cx, |project, cx| {
|
||||
project.directory_environment(root_dir.as_path().into(), cx)
|
||||
})?
|
||||
.await
|
||||
.unwrap_or_default();
|
||||
let mut command = if let Some(settings) = settings
|
||||
&& let Some(command) = settings.custom_command()
|
||||
{
|
||||
command
|
||||
} else {
|
||||
cx.update(|cx| {
|
||||
delegate.get_or_npm_install_builtin_agent(
|
||||
Self::BINARY_NAME.into(),
|
||||
Self::PACKAGE_NAME.into(),
|
||||
format!("node_modules/{}/dist/index.js", Self::PACKAGE_NAME).into(),
|
||||
ignore_system_version,
|
||||
Some(Self::MINIMUM_VERSION.parse().unwrap()),
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await?
|
||||
};
|
||||
if !command.args.contains(&ACP_ARG.into()) {
|
||||
command.args.push(ACP_ARG.into());
|
||||
}
|
||||
let mut extra_env = HashMap::default();
|
||||
if let Some(api_key) = cx.update(GoogleLanguageModelProvider::api_key)?.await.ok() {
|
||||
project_env
|
||||
.insert("GEMINI_API_KEY".to_owned(), api_key.key);
|
||||
extra_env.insert("GEMINI_API_KEY".into(), api_key.key);
|
||||
}
|
||||
project_env.extend(command.env.take().unwrap_or_default());
|
||||
command.env = Some(project_env);
|
||||
|
||||
let root_dir_exists = fs.is_dir(&root_dir).await;
|
||||
anyhow::ensure!(
|
||||
root_dir_exists,
|
||||
"Session root {} does not exist or is not a directory",
|
||||
root_dir.to_string_lossy()
|
||||
);
|
||||
|
||||
let result = crate::acp::connect(server_name, command.clone(), &root_dir, cx).await;
|
||||
match &result {
|
||||
Ok(connection) => {
|
||||
if let Some(connection) = connection.clone().downcast::<AcpConnection>()
|
||||
&& !connection.prompt_capabilities().image
|
||||
{
|
||||
let version_output = util::command::new_smol_command(&command.path)
|
||||
.args(command.args.iter())
|
||||
.arg("--version")
|
||||
.kill_on_drop(true)
|
||||
.output()
|
||||
.await;
|
||||
let current_version =
|
||||
String::from_utf8(version_output?.stdout)?.trim().to_owned();
|
||||
|
||||
log::error!("connected to gemini, but missing prompt_capabilities.image (version is {current_version})");
|
||||
return Err(LoadError::Unsupported {
|
||||
current_version: current_version.into(),
|
||||
command: (command.path.to_string_lossy().to_string() + " " + &command.args.join(" ")).into(),
|
||||
minimum_version: Self::MINIMUM_VERSION.into(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
let version_fut = util::command::new_smol_command(&command.path)
|
||||
.args(command.args.iter())
|
||||
.arg("--version")
|
||||
.kill_on_drop(true)
|
||||
.output();
|
||||
|
||||
let help_fut = util::command::new_smol_command(&command.path)
|
||||
.args(command.args.iter())
|
||||
.arg("--help")
|
||||
.kill_on_drop(true)
|
||||
.output();
|
||||
|
||||
let (version_output, help_output) =
|
||||
futures::future::join(version_fut, help_fut).await;
|
||||
let Some(version_output) = version_output.ok().and_then(|output| String::from_utf8(output.stdout).ok()) else {
|
||||
return result;
|
||||
};
|
||||
let Some((help_stdout, help_stderr)) = help_output.ok().and_then(|output| String::from_utf8(output.stdout).ok().zip(String::from_utf8(output.stderr).ok())) else {
|
||||
return result;
|
||||
};
|
||||
|
||||
let current_version = version_output.trim().to_string();
|
||||
let supported = help_stdout.contains(ACP_ARG) || current_version.parse::<semver::Version>().is_ok_and(|version| version >= Self::MINIMUM_VERSION.parse::<semver::Version>().unwrap());
|
||||
|
||||
log::error!("failed to create ACP connection to gemini (version is {current_version}, supported: {supported}): {e}");
|
||||
log::debug!("gemini --help stdout: {help_stdout:?}");
|
||||
log::debug!("gemini --help stderr: {help_stderr:?}");
|
||||
if !supported {
|
||||
return Err(LoadError::Unsupported {
|
||||
current_version: current_version.into(),
|
||||
command: (command.path.to_string_lossy().to_string() + " " + &command.args.join(" ")).into(),
|
||||
minimum_version: Self::MINIMUM_VERSION.into(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
result
|
||||
let (command, root_dir, login) = store
|
||||
.update(cx, |store, cx| {
|
||||
let agent = store
|
||||
.get_external_agent(&GEMINI_NAME.into())
|
||||
.context("Gemini CLI is not registered")?;
|
||||
anyhow::Ok(agent.get_command(
|
||||
root_dir.as_deref(),
|
||||
extra_env,
|
||||
delegate.status_tx,
|
||||
delegate.new_version_available,
|
||||
&mut cx.to_async(),
|
||||
))
|
||||
})??
|
||||
.await?;
|
||||
let connection =
|
||||
crate::acp::connect(name, command, root_dir.as_ref(), is_remote, cx).await?;
|
||||
Ok((connection, login))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -159,18 +66,11 @@ impl AgentServer for Gemini {
|
||||
}
|
||||
}
|
||||
|
||||
impl Gemini {
|
||||
const PACKAGE_NAME: &str = "@google/gemini-cli";
|
||||
|
||||
const MINIMUM_VERSION: &str = "0.2.1";
|
||||
|
||||
const BINARY_NAME: &str = "gemini";
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub(crate) mod tests {
|
||||
use project::agent_server_store::AgentServerCommand;
|
||||
|
||||
use super::*;
|
||||
use crate::AgentServerCommand;
|
||||
use std::path::Path;
|
||||
|
||||
crate::common_e2e_tests!(async |_, _, _| Gemini, allow_option_id = "proceed_once");
|
||||
|
||||
@@ -1,110 +0,0 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use crate::AgentServerCommand;
|
||||
use anyhow::Result;
|
||||
use collections::HashMap;
|
||||
use gpui::{App, SharedString};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
AllAgentServersSettings::register(cx);
|
||||
}
|
||||
|
||||
#[derive(Default, Deserialize, Serialize, Clone, JsonSchema, Debug, SettingsUi, SettingsKey)]
|
||||
#[settings_key(key = "agent_servers")]
|
||||
pub struct AllAgentServersSettings {
|
||||
pub gemini: Option<BuiltinAgentServerSettings>,
|
||||
pub claude: Option<CustomAgentServerSettings>,
|
||||
|
||||
/// Custom agent servers configured by the user
|
||||
#[serde(flatten)]
|
||||
pub custom: HashMap<SharedString, CustomAgentServerSettings>,
|
||||
}
|
||||
|
||||
#[derive(Default, Deserialize, Serialize, Clone, JsonSchema, Debug, PartialEq)]
|
||||
pub struct BuiltinAgentServerSettings {
|
||||
/// Absolute path to a binary to be used when launching this agent.
|
||||
///
|
||||
/// This can be used to run a specific binary without automatic downloads or searching `$PATH`.
|
||||
#[serde(rename = "command")]
|
||||
pub path: Option<PathBuf>,
|
||||
/// If a binary is specified in `command`, it will be passed these arguments.
|
||||
pub args: Option<Vec<String>>,
|
||||
/// If a binary is specified in `command`, it will be passed these environment variables.
|
||||
pub env: Option<HashMap<String, String>>,
|
||||
/// Whether to skip searching `$PATH` for an agent server binary when
|
||||
/// launching this agent.
|
||||
///
|
||||
/// This has no effect if a `command` is specified. Otherwise, when this is
|
||||
/// `false`, Zed will search `$PATH` for an agent server binary and, if one
|
||||
/// is found, use it for threads with this agent. If no agent binary is
|
||||
/// found on `$PATH`, Zed will automatically install and use its own binary.
|
||||
/// When this is `true`, Zed will not search `$PATH`, and will always use
|
||||
/// its own binary.
|
||||
///
|
||||
/// Default: true
|
||||
pub ignore_system_version: Option<bool>,
|
||||
}
|
||||
|
||||
impl BuiltinAgentServerSettings {
|
||||
pub(crate) fn custom_command(self) -> Option<AgentServerCommand> {
|
||||
self.path.map(|path| AgentServerCommand {
|
||||
path,
|
||||
args: self.args.unwrap_or_default(),
|
||||
env: self.env,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl From<AgentServerCommand> for BuiltinAgentServerSettings {
|
||||
fn from(value: AgentServerCommand) -> Self {
|
||||
BuiltinAgentServerSettings {
|
||||
path: Some(value.path),
|
||||
args: Some(value.args),
|
||||
env: value.env,
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Clone, JsonSchema, Debug, PartialEq)]
|
||||
pub struct CustomAgentServerSettings {
|
||||
#[serde(flatten)]
|
||||
pub command: AgentServerCommand,
|
||||
}
|
||||
|
||||
impl settings::Settings for AllAgentServersSettings {
|
||||
type FileContent = Self;
|
||||
|
||||
fn load(sources: SettingsSources<Self::FileContent>, _: &mut App) -> Result<Self> {
|
||||
let mut settings = AllAgentServersSettings::default();
|
||||
|
||||
for AllAgentServersSettings {
|
||||
gemini,
|
||||
claude,
|
||||
custom,
|
||||
} in sources.defaults_and_customizations()
|
||||
{
|
||||
if gemini.is_some() {
|
||||
settings.gemini = gemini.clone();
|
||||
}
|
||||
if claude.is_some() {
|
||||
settings.claude = claude.clone();
|
||||
}
|
||||
|
||||
// Merge custom agents
|
||||
for (name, config) in custom {
|
||||
// Skip built-in agent names to avoid conflicts
|
||||
if name != "gemini" && name != "claude" {
|
||||
settings.custom.insert(name.clone(), config.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(settings)
|
||||
}
|
||||
|
||||
fn import_from_vscode(_vscode: &settings::VsCodeSettings, _current: &mut Self::FileContent) {}
|
||||
}
|
||||
@@ -699,10 +699,15 @@ impl MessageEditor {
|
||||
self.project.read(cx).fs().clone(),
|
||||
self.history_store.clone(),
|
||||
));
|
||||
let delegate = AgentServerDelegate::new(self.project.clone(), None, None);
|
||||
let connection = server.connect(Path::new(""), delegate, cx);
|
||||
let delegate = AgentServerDelegate::new(
|
||||
self.project.read(cx).agent_server_store().clone(),
|
||||
self.project.clone(),
|
||||
None,
|
||||
None,
|
||||
);
|
||||
let connection = server.connect(None, delegate, cx);
|
||||
cx.spawn(async move |_, cx| {
|
||||
let agent = connection.await?;
|
||||
let (agent, _) = connection.await?;
|
||||
let agent = agent.downcast::<agent2::NativeAgentConnection>().unwrap();
|
||||
let summary = agent
|
||||
.0
|
||||
|
||||
@@ -6,7 +6,7 @@ use acp_thread::{
|
||||
use acp_thread::{AgentConnection, Plan};
|
||||
use action_log::ActionLog;
|
||||
use agent_client_protocol::{self as acp, PromptCapabilities};
|
||||
use agent_servers::{AgentServer, AgentServerDelegate, ClaudeCode};
|
||||
use agent_servers::{AgentServer, AgentServerDelegate};
|
||||
use agent_settings::{AgentProfileId, AgentSettings, CompletionMode, NotifyWhenAgentWaiting};
|
||||
use agent2::{DbThreadMetadata, HistoryEntry, HistoryEntryId, HistoryStore, NativeAgentServer};
|
||||
use anyhow::{Context as _, Result, anyhow, bail};
|
||||
@@ -40,7 +40,6 @@ use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::time::Instant;
|
||||
use std::{collections::BTreeMap, rc::Rc, time::Duration};
|
||||
use task::SpawnInTerminal;
|
||||
use terminal_view::terminal_panel::TerminalPanel;
|
||||
use text::Anchor;
|
||||
use theme::{AgentFontSize, ThemeSettings};
|
||||
@@ -263,6 +262,7 @@ pub struct AcpThreadView {
|
||||
workspace: WeakEntity<Workspace>,
|
||||
project: Entity<Project>,
|
||||
thread_state: ThreadState,
|
||||
login: Option<task::SpawnInTerminal>,
|
||||
history_store: Entity<HistoryStore>,
|
||||
hovered_recent_history_item: Option<usize>,
|
||||
entry_view_state: Entity<EntryViewState>,
|
||||
@@ -392,6 +392,7 @@ impl AcpThreadView {
|
||||
project: project.clone(),
|
||||
entry_view_state,
|
||||
thread_state: Self::initial_state(agent, resume_thread, workspace, project, window, cx),
|
||||
login: None,
|
||||
message_editor,
|
||||
model_selector: None,
|
||||
profile_selector: None,
|
||||
@@ -444,9 +445,11 @@ impl AcpThreadView {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> ThreadState {
|
||||
if !project.read(cx).is_local() && agent.clone().downcast::<NativeAgentServer>().is_none() {
|
||||
if project.read(cx).is_via_collab()
|
||||
&& agent.clone().downcast::<NativeAgentServer>().is_none()
|
||||
{
|
||||
return ThreadState::LoadError(LoadError::Other(
|
||||
"External agents are not yet supported for remote projects.".into(),
|
||||
"External agents are not yet supported in shared projects.".into(),
|
||||
));
|
||||
}
|
||||
let mut worktrees = project.read(cx).visible_worktrees(cx).collect::<Vec<_>>();
|
||||
@@ -466,20 +469,23 @@ impl AcpThreadView {
|
||||
Some(worktree.read(cx).abs_path())
|
||||
}
|
||||
})
|
||||
.next()
|
||||
.unwrap_or_else(|| paths::home_dir().as_path().into());
|
||||
.next();
|
||||
let (status_tx, mut status_rx) = watch::channel("Loading…".into());
|
||||
let (new_version_available_tx, mut new_version_available_rx) = watch::channel(None);
|
||||
let delegate = AgentServerDelegate::new(
|
||||
project.read(cx).agent_server_store().clone(),
|
||||
project.clone(),
|
||||
Some(status_tx),
|
||||
Some(new_version_available_tx),
|
||||
);
|
||||
|
||||
let connect_task = agent.connect(&root_dir, delegate, cx);
|
||||
let connect_task = agent.connect(root_dir.as_deref(), delegate, cx);
|
||||
let load_task = cx.spawn_in(window, async move |this, cx| {
|
||||
let connection = match connect_task.await {
|
||||
Ok(connection) => connection,
|
||||
Ok((connection, login)) => {
|
||||
this.update(cx, |this, _| this.login = login).ok();
|
||||
connection
|
||||
}
|
||||
Err(err) => {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
if err.downcast_ref::<LoadError>().is_some() {
|
||||
@@ -506,6 +512,14 @@ impl AcpThreadView {
|
||||
})
|
||||
.log_err()
|
||||
} else {
|
||||
let root_dir = if let Some(acp_agent) = connection
|
||||
.clone()
|
||||
.downcast::<agent_servers::AcpConnection>()
|
||||
{
|
||||
acp_agent.root_dir().into()
|
||||
} else {
|
||||
root_dir.unwrap_or(paths::home_dir().as_path().into())
|
||||
};
|
||||
cx.update(|_, cx| {
|
||||
connection
|
||||
.clone()
|
||||
@@ -1462,9 +1476,12 @@ impl AcpThreadView {
|
||||
self.thread_error.take();
|
||||
configuration_view.take();
|
||||
pending_auth_method.replace(method.clone());
|
||||
let authenticate = if method.0.as_ref() == "claude-login" {
|
||||
let authenticate = if (method.0.as_ref() == "claude-login"
|
||||
|| method.0.as_ref() == "spawn-gemini-cli")
|
||||
&& let Some(login) = self.login.clone()
|
||||
{
|
||||
if let Some(workspace) = self.workspace.upgrade() {
|
||||
Self::spawn_claude_login(&workspace, window, cx)
|
||||
Self::spawn_external_agent_login(login, workspace, false, window, cx)
|
||||
} else {
|
||||
Task::ready(Ok(()))
|
||||
}
|
||||
@@ -1511,31 +1528,28 @@ impl AcpThreadView {
|
||||
}));
|
||||
}
|
||||
|
||||
fn spawn_claude_login(
|
||||
workspace: &Entity<Workspace>,
|
||||
fn spawn_external_agent_login(
|
||||
login: task::SpawnInTerminal,
|
||||
workspace: Entity<Workspace>,
|
||||
previous_attempt: bool,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<()>> {
|
||||
let Some(terminal_panel) = workspace.read(cx).panel::<TerminalPanel>(cx) else {
|
||||
return Task::ready(Ok(()));
|
||||
};
|
||||
let project_entity = workspace.read(cx).project();
|
||||
let project = project_entity.read(cx);
|
||||
let cwd = project.first_project_directory(cx);
|
||||
let shell = project.terminal_settings(&cwd, cx).shell.clone();
|
||||
|
||||
let delegate = AgentServerDelegate::new(project_entity.clone(), None, None);
|
||||
let command = ClaudeCode::login_command(delegate, cx);
|
||||
let project = workspace.read(cx).project().clone();
|
||||
let cwd = project.read(cx).first_project_directory(cx);
|
||||
let shell = project.read(cx).terminal_settings(&cwd, cx).shell.clone();
|
||||
|
||||
window.spawn(cx, async move |cx| {
|
||||
let login_command = command.await?;
|
||||
let command = login_command
|
||||
.path
|
||||
.to_str()
|
||||
.with_context(|| format!("invalid login command: {:?}", login_command.path))?;
|
||||
let command = shlex::try_quote(command)?;
|
||||
let args = login_command
|
||||
.arguments
|
||||
let mut task = login.clone();
|
||||
task.command = task
|
||||
.command
|
||||
.map(|command| anyhow::Ok(shlex::try_quote(&command)?.to_string()))
|
||||
.transpose()?;
|
||||
task.args = task
|
||||
.args
|
||||
.iter()
|
||||
.map(|arg| {
|
||||
Ok(shlex::try_quote(arg)
|
||||
@@ -1543,26 +1557,16 @@ impl AcpThreadView {
|
||||
.to_string())
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
task.full_label = task.label.clone();
|
||||
task.id = task::TaskId(format!("external-agent-{}-login", task.label));
|
||||
task.command_label = task.label.clone();
|
||||
task.use_new_terminal = true;
|
||||
task.allow_concurrent_runs = true;
|
||||
task.hide = task::HideStrategy::Always;
|
||||
task.shell = shell;
|
||||
|
||||
let terminal = terminal_panel.update_in(cx, |terminal_panel, window, cx| {
|
||||
terminal_panel.spawn_task(
|
||||
&SpawnInTerminal {
|
||||
id: task::TaskId("claude-login".into()),
|
||||
full_label: "claude /login".to_owned(),
|
||||
label: "claude /login".to_owned(),
|
||||
command: Some(command.into()),
|
||||
args,
|
||||
command_label: "claude /login".to_owned(),
|
||||
cwd,
|
||||
use_new_terminal: true,
|
||||
allow_concurrent_runs: true,
|
||||
hide: task::HideStrategy::Always,
|
||||
shell,
|
||||
..Default::default()
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
terminal_panel.spawn_task(&login, window, cx)
|
||||
})?;
|
||||
|
||||
let terminal = terminal.await?;
|
||||
@@ -1578,7 +1582,9 @@ impl AcpThreadView {
|
||||
cx.background_executor().timer(Duration::from_secs(1)).await;
|
||||
let content =
|
||||
terminal.update(cx, |terminal, _cx| terminal.get_content())?;
|
||||
if content.contains("Login successful") {
|
||||
if content.contains("Login successful")
|
||||
|| content.contains("Type your message")
|
||||
{
|
||||
return anyhow::Ok(());
|
||||
}
|
||||
}
|
||||
@@ -1594,6 +1600,9 @@ impl AcpThreadView {
|
||||
}
|
||||
}
|
||||
_ = exit_status => {
|
||||
if !previous_attempt && project.read_with(cx, |project, _| project.is_via_remote_server())? && login.label.contains("gemini") {
|
||||
return cx.update(|window, cx| Self::spawn_external_agent_login(login, workspace, true, window, cx))?.await
|
||||
}
|
||||
return Err(anyhow!("exited before logging in"));
|
||||
}
|
||||
}
|
||||
@@ -2024,35 +2033,34 @@ impl AcpThreadView {
|
||||
window: &Window,
|
||||
cx: &Context<Self>,
|
||||
) -> Div {
|
||||
let has_location = tool_call.locations.len() == 1;
|
||||
let card_header_id = SharedString::from("inner-tool-call-header");
|
||||
|
||||
let tool_icon =
|
||||
if tool_call.kind == acp::ToolKind::Edit && tool_call.locations.len() == 1 {
|
||||
FileIcons::get_icon(&tool_call.locations[0].path, cx)
|
||||
.map(Icon::from_path)
|
||||
.unwrap_or(Icon::new(IconName::ToolPencil))
|
||||
} else {
|
||||
Icon::new(match tool_call.kind {
|
||||
acp::ToolKind::Read => IconName::ToolSearch,
|
||||
acp::ToolKind::Edit => IconName::ToolPencil,
|
||||
acp::ToolKind::Delete => IconName::ToolDeleteFile,
|
||||
acp::ToolKind::Move => IconName::ArrowRightLeft,
|
||||
acp::ToolKind::Search => IconName::ToolSearch,
|
||||
acp::ToolKind::Execute => IconName::ToolTerminal,
|
||||
acp::ToolKind::Think => IconName::ToolThink,
|
||||
acp::ToolKind::Fetch => IconName::ToolWeb,
|
||||
acp::ToolKind::Other => IconName::ToolHammer,
|
||||
})
|
||||
}
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted);
|
||||
let tool_icon = if tool_call.kind == acp::ToolKind::Edit && has_location {
|
||||
FileIcons::get_icon(&tool_call.locations[0].path, cx)
|
||||
.map(Icon::from_path)
|
||||
.unwrap_or(Icon::new(IconName::ToolPencil))
|
||||
} else {
|
||||
Icon::new(match tool_call.kind {
|
||||
acp::ToolKind::Read => IconName::ToolSearch,
|
||||
acp::ToolKind::Edit => IconName::ToolPencil,
|
||||
acp::ToolKind::Delete => IconName::ToolDeleteFile,
|
||||
acp::ToolKind::Move => IconName::ArrowRightLeft,
|
||||
acp::ToolKind::Search => IconName::ToolSearch,
|
||||
acp::ToolKind::Execute => IconName::ToolTerminal,
|
||||
acp::ToolKind::Think => IconName::ToolThink,
|
||||
acp::ToolKind::Fetch => IconName::ToolWeb,
|
||||
acp::ToolKind::Other => IconName::ToolHammer,
|
||||
})
|
||||
}
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Muted);
|
||||
|
||||
let failed_or_canceled = match &tool_call.status {
|
||||
ToolCallStatus::Rejected | ToolCallStatus::Canceled | ToolCallStatus::Failed => true,
|
||||
_ => false,
|
||||
};
|
||||
|
||||
let has_location = tool_call.locations.len() == 1;
|
||||
let needs_confirmation = matches!(
|
||||
tool_call.status,
|
||||
ToolCallStatus::WaitingForConfirmation { .. }
|
||||
@@ -2195,13 +2203,6 @@ impl AcpThreadView {
|
||||
.overflow_hidden()
|
||||
.child(tool_icon)
|
||||
.child(if has_location {
|
||||
let name = tool_call.locations[0]
|
||||
.path
|
||||
.file_name()
|
||||
.unwrap_or_default()
|
||||
.display()
|
||||
.to_string();
|
||||
|
||||
h_flex()
|
||||
.id(("open-tool-call-location", entry_ix))
|
||||
.w_full()
|
||||
@@ -2212,7 +2213,13 @@ impl AcpThreadView {
|
||||
this.text_color(cx.theme().colors().text_muted)
|
||||
}
|
||||
})
|
||||
.child(name)
|
||||
.child(self.render_markdown(
|
||||
tool_call.label.clone(),
|
||||
MarkdownStyle {
|
||||
prevent_mouse_interaction: true,
|
||||
..default_markdown_style(false, true, window, cx)
|
||||
},
|
||||
))
|
||||
.tooltip(Tooltip::text("Jump to File"))
|
||||
.on_click(cx.listener(move |this, _, window, cx| {
|
||||
this.open_tool_call_location(entry_ix, 0, window, cx);
|
||||
@@ -3090,26 +3097,38 @@ impl AcpThreadView {
|
||||
})
|
||||
.children(connection.auth_methods().iter().enumerate().rev().map(
|
||||
|(ix, method)| {
|
||||
Button::new(
|
||||
SharedString::from(method.id.0.clone()),
|
||||
method.name.clone(),
|
||||
)
|
||||
.when(ix == 0, |el| {
|
||||
el.style(ButtonStyle::Tinted(ui::TintColor::Warning))
|
||||
})
|
||||
.label_size(LabelSize::Small)
|
||||
.on_click({
|
||||
let method_id = method.id.clone();
|
||||
cx.listener(move |this, _, window, cx| {
|
||||
telemetry::event!(
|
||||
"Authenticate Agent Started",
|
||||
agent = this.agent.telemetry_id(),
|
||||
method = method_id
|
||||
);
|
||||
let (method_id, name) = if self
|
||||
.project
|
||||
.read(cx)
|
||||
.is_via_remote_server()
|
||||
&& method.id.0.as_ref() == "oauth-personal"
|
||||
&& method.name == "Log in with Google"
|
||||
{
|
||||
("spawn-gemini-cli".into(), "Log in with Gemini CLI".into())
|
||||
} else {
|
||||
(method.id.0.clone(), method.name.clone())
|
||||
};
|
||||
|
||||
this.authenticate(method_id.clone(), window, cx)
|
||||
Button::new(SharedString::from(method_id.clone()), name)
|
||||
.when(ix == 0, |el| {
|
||||
el.style(ButtonStyle::Tinted(ui::TintColor::Warning))
|
||||
})
|
||||
.label_size(LabelSize::Small)
|
||||
.on_click({
|
||||
cx.listener(move |this, _, window, cx| {
|
||||
telemetry::event!(
|
||||
"Authenticate Agent Started",
|
||||
agent = this.agent.telemetry_id(),
|
||||
method = method_id
|
||||
);
|
||||
|
||||
this.authenticate(
|
||||
acp::AuthMethodId(method_id.clone()),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
},
|
||||
)),
|
||||
)
|
||||
@@ -5712,11 +5731,11 @@ pub(crate) mod tests {
|
||||
|
||||
fn connect(
|
||||
&self,
|
||||
_root_dir: &Path,
|
||||
_root_dir: Option<&Path>,
|
||||
_delegate: AgentServerDelegate,
|
||||
_cx: &mut App,
|
||||
) -> Task<gpui::Result<Rc<dyn AgentConnection>>> {
|
||||
Task::ready(Ok(Rc::new(self.connection.clone())))
|
||||
) -> Task<gpui::Result<(Rc<dyn AgentConnection>, Option<task::SpawnInTerminal>)>> {
|
||||
Task::ready(Ok((Rc::new(self.connection.clone()), None)))
|
||||
}
|
||||
|
||||
fn into_any(self: Rc<Self>) -> Rc<dyn Any> {
|
||||
|
||||
@@ -5,7 +5,6 @@ mod tool_picker;
|
||||
|
||||
use std::{ops::Range, sync::Arc};
|
||||
|
||||
use agent_servers::{AgentServerCommand, AllAgentServersSettings, CustomAgentServerSettings};
|
||||
use agent_settings::AgentSettings;
|
||||
use anyhow::Result;
|
||||
use assistant_tool::{ToolSource, ToolWorkingSet};
|
||||
@@ -26,6 +25,10 @@ use language_model::{
|
||||
};
|
||||
use notifications::status_toast::{StatusToast, ToastIcon};
|
||||
use project::{
|
||||
agent_server_store::{
|
||||
AgentServerCommand, AgentServerStore, AllAgentServersSettings, CLAUDE_CODE_NAME,
|
||||
CustomAgentServerSettings, GEMINI_NAME,
|
||||
},
|
||||
context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore},
|
||||
project_settings::{ContextServerSettings, ProjectSettings},
|
||||
};
|
||||
@@ -45,11 +48,13 @@ pub(crate) use manage_profiles_modal::ManageProfilesModal;
|
||||
use crate::{
|
||||
AddContextServer, ExternalAgent, NewExternalAgentThread,
|
||||
agent_configuration::add_llm_provider_modal::{AddLlmProviderModal, LlmCompatibleProvider},
|
||||
placeholder_command,
|
||||
};
|
||||
|
||||
pub struct AgentConfiguration {
|
||||
fs: Arc<dyn Fs>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
agent_server_store: Entity<AgentServerStore>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
focus_handle: FocusHandle,
|
||||
configuration_views_by_provider: HashMap<LanguageModelProviderId, AnyView>,
|
||||
@@ -66,6 +71,7 @@ pub struct AgentConfiguration {
|
||||
impl AgentConfiguration {
|
||||
pub fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
agent_server_store: Entity<AgentServerStore>,
|
||||
context_server_store: Entity<ContextServerStore>,
|
||||
tools: Entity<ToolWorkingSet>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
@@ -104,6 +110,7 @@ impl AgentConfiguration {
|
||||
workspace,
|
||||
focus_handle,
|
||||
configuration_views_by_provider: HashMap::default(),
|
||||
agent_server_store,
|
||||
context_server_store,
|
||||
expanded_context_server_tools: HashMap::default(),
|
||||
expanded_provider_configurations: HashMap::default(),
|
||||
@@ -991,17 +998,30 @@ impl AgentConfiguration {
|
||||
}
|
||||
|
||||
fn render_agent_servers_section(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let settings = AllAgentServersSettings::get_global(cx).clone();
|
||||
let user_defined_agents = settings
|
||||
let custom_settings = cx
|
||||
.global::<SettingsStore>()
|
||||
.get::<AllAgentServersSettings>(None)
|
||||
.custom
|
||||
.iter()
|
||||
.map(|(name, settings)| {
|
||||
.clone();
|
||||
let user_defined_agents = self
|
||||
.agent_server_store
|
||||
.read(cx)
|
||||
.external_agents()
|
||||
.filter(|name| name.0 != GEMINI_NAME && name.0 != CLAUDE_CODE_NAME)
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
let user_defined_agents = user_defined_agents
|
||||
.into_iter()
|
||||
.map(|name| {
|
||||
self.render_agent_server(
|
||||
IconName::Ai,
|
||||
name.clone(),
|
||||
ExternalAgent::Custom {
|
||||
name: name.clone(),
|
||||
command: settings.command.clone(),
|
||||
name: name.clone().into(),
|
||||
command: custom_settings
|
||||
.get(&name.0)
|
||||
.map(|settings| settings.command.clone())
|
||||
.unwrap_or(placeholder_command()),
|
||||
},
|
||||
cx,
|
||||
)
|
||||
|
||||
@@ -5,9 +5,11 @@ use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use acp_thread::AcpThread;
|
||||
use agent_servers::AgentServerCommand;
|
||||
use agent2::{DbThreadMetadata, HistoryEntry};
|
||||
use db::kvp::{Dismissable, KEY_VALUE_STORE};
|
||||
use project::agent_server_store::{
|
||||
AgentServerCommand, AllAgentServersSettings, CLAUDE_CODE_NAME, GEMINI_NAME,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use zed_actions::OpenBrowser;
|
||||
use zed_actions::agent::{OpenClaudeCodeOnboardingModal, ReauthenticateAgent};
|
||||
@@ -33,7 +35,9 @@ use crate::{
|
||||
thread_history::{HistoryEntryElement, ThreadHistory},
|
||||
ui::{AgentOnboardingModal, EndTrialUpsell},
|
||||
};
|
||||
use crate::{ExternalAgent, NewExternalAgentThread, NewNativeAgentThreadFromSummary};
|
||||
use crate::{
|
||||
ExternalAgent, NewExternalAgentThread, NewNativeAgentThreadFromSummary, placeholder_command,
|
||||
};
|
||||
use agent::{
|
||||
Thread, ThreadError, ThreadEvent, ThreadId, ThreadSummary, TokenUsageRatio,
|
||||
context_store::ContextStore,
|
||||
@@ -62,7 +66,7 @@ use project::{DisableAiSettings, Project, ProjectPath, Worktree};
|
||||
use prompt_store::{PromptBuilder, PromptStore, UserPromptId};
|
||||
use rules_library::{RulesLibrary, open_rules_library};
|
||||
use search::{BufferSearchBar, buffer_search};
|
||||
use settings::{Settings, update_settings_file};
|
||||
use settings::{Settings, SettingsStore, update_settings_file};
|
||||
use theme::ThemeSettings;
|
||||
use time::UtcOffset;
|
||||
use ui::utils::WithRemSize;
|
||||
@@ -1094,7 +1098,7 @@ impl AgentPanel {
|
||||
let workspace = self.workspace.clone();
|
||||
let project = self.project.clone();
|
||||
let fs = self.fs.clone();
|
||||
let is_not_local = !self.project.read(cx).is_local();
|
||||
let is_via_collab = self.project.read(cx).is_via_collab();
|
||||
|
||||
const LAST_USED_EXTERNAL_AGENT_KEY: &str = "agent_panel__last_used_external_agent";
|
||||
|
||||
@@ -1126,7 +1130,7 @@ impl AgentPanel {
|
||||
agent
|
||||
}
|
||||
None => {
|
||||
if is_not_local {
|
||||
if is_via_collab {
|
||||
ExternalAgent::NativeAgent
|
||||
} else {
|
||||
cx.background_spawn(async move {
|
||||
@@ -1503,6 +1507,7 @@ impl AgentPanel {
|
||||
}
|
||||
|
||||
pub(crate) fn open_configuration(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
let agent_server_store = self.project.read(cx).agent_server_store().clone();
|
||||
let context_server_store = self.project.read(cx).context_server_store();
|
||||
let tools = self.thread_store.read(cx).tools();
|
||||
let fs = self.fs.clone();
|
||||
@@ -1511,6 +1516,7 @@ impl AgentPanel {
|
||||
self.configuration = Some(cx.new(|cx| {
|
||||
AgentConfiguration::new(
|
||||
fs,
|
||||
agent_server_store,
|
||||
context_server_store,
|
||||
tools,
|
||||
self.language_registry.clone(),
|
||||
@@ -2503,6 +2509,7 @@ impl AgentPanel {
|
||||
}
|
||||
|
||||
fn render_toolbar_new(&self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let agent_server_store = self.project.read(cx).agent_server_store().clone();
|
||||
let focus_handle = self.focus_handle(cx);
|
||||
|
||||
let active_thread = match &self.active_view {
|
||||
@@ -2535,8 +2542,10 @@ impl AgentPanel {
|
||||
.with_handle(self.new_thread_menu_handle.clone())
|
||||
.menu({
|
||||
let workspace = self.workspace.clone();
|
||||
let is_not_local = workspace
|
||||
.update(cx, |workspace, cx| !workspace.project().read(cx).is_local())
|
||||
let is_via_collab = workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
workspace.project().read(cx).is_via_collab()
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
move |window, cx| {
|
||||
@@ -2628,7 +2637,7 @@ impl AgentPanel {
|
||||
ContextMenuEntry::new("New Gemini CLI Thread")
|
||||
.icon(IconName::AiGemini)
|
||||
.icon_color(Color::Muted)
|
||||
.disabled(is_not_local)
|
||||
.disabled(is_via_collab)
|
||||
.handler({
|
||||
let workspace = workspace.clone();
|
||||
move |window, cx| {
|
||||
@@ -2655,7 +2664,7 @@ impl AgentPanel {
|
||||
menu.item(
|
||||
ContextMenuEntry::new("New Claude Code Thread")
|
||||
.icon(IconName::AiClaude)
|
||||
.disabled(is_not_local)
|
||||
.disabled(is_via_collab)
|
||||
.icon_color(Color::Muted)
|
||||
.handler({
|
||||
let workspace = workspace.clone();
|
||||
@@ -2680,19 +2689,25 @@ impl AgentPanel {
|
||||
)
|
||||
})
|
||||
.when(cx.has_flag::<GeminiAndNativeFeatureFlag>(), |mut menu| {
|
||||
// Add custom agents from settings
|
||||
let settings =
|
||||
agent_servers::AllAgentServersSettings::get_global(cx);
|
||||
for (agent_name, agent_settings) in &settings.custom {
|
||||
let agent_names = agent_server_store
|
||||
.read(cx)
|
||||
.external_agents()
|
||||
.filter(|name| {
|
||||
name.0 != GEMINI_NAME && name.0 != CLAUDE_CODE_NAME
|
||||
})
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
let custom_settings = cx.global::<SettingsStore>().get::<AllAgentServersSettings>(None).custom.clone();
|
||||
for agent_name in agent_names {
|
||||
menu = menu.item(
|
||||
ContextMenuEntry::new(format!("New {} Thread", agent_name))
|
||||
.icon(IconName::Terminal)
|
||||
.icon_color(Color::Muted)
|
||||
.disabled(is_not_local)
|
||||
.disabled(is_via_collab)
|
||||
.handler({
|
||||
let workspace = workspace.clone();
|
||||
let agent_name = agent_name.clone();
|
||||
let agent_settings = agent_settings.clone();
|
||||
let custom_settings = custom_settings.clone();
|
||||
move |window, cx| {
|
||||
if let Some(workspace) = workspace.upgrade() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
@@ -2703,10 +2718,9 @@ impl AgentPanel {
|
||||
panel.new_agent_thread(
|
||||
AgentType::Custom {
|
||||
name: agent_name
|
||||
.clone(),
|
||||
command: agent_settings
|
||||
.command
|
||||
.clone(),
|
||||
.clone()
|
||||
.into(),
|
||||
command: custom_settings.get(&agent_name.0).map(|settings| settings.command.clone()).unwrap_or(placeholder_command())
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
|
||||
@@ -28,7 +28,6 @@ use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
|
||||
use agent::{Thread, ThreadId};
|
||||
use agent_servers::AgentServerCommand;
|
||||
use agent_settings::{AgentProfileId, AgentSettings, LanguageModelSelection};
|
||||
use assistant_slash_command::SlashCommandRegistry;
|
||||
use client::Client;
|
||||
@@ -41,6 +40,7 @@ use language_model::{
|
||||
ConfiguredModel, LanguageModel, LanguageModelId, LanguageModelProviderId, LanguageModelRegistry,
|
||||
};
|
||||
use project::DisableAiSettings;
|
||||
use project::agent_server_store::AgentServerCommand;
|
||||
use prompt_store::PromptBuilder;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -174,6 +174,14 @@ enum ExternalAgent {
|
||||
},
|
||||
}
|
||||
|
||||
fn placeholder_command() -> AgentServerCommand {
|
||||
AgentServerCommand {
|
||||
path: "/placeholder".into(),
|
||||
args: vec![],
|
||||
env: None,
|
||||
}
|
||||
}
|
||||
|
||||
impl ExternalAgent {
|
||||
fn name(&self) -> &'static str {
|
||||
match self {
|
||||
@@ -193,10 +201,9 @@ impl ExternalAgent {
|
||||
Self::Gemini => Rc::new(agent_servers::Gemini),
|
||||
Self::ClaudeCode => Rc::new(agent_servers::ClaudeCode),
|
||||
Self::NativeAgent => Rc::new(agent2::NativeAgentServer::new(fs, history)),
|
||||
Self::Custom { name, command } => Rc::new(agent_servers::CustomAgentServer::new(
|
||||
name.clone(),
|
||||
command.clone(),
|
||||
)),
|
||||
Self::Custom { name, command: _ } => {
|
||||
Rc::new(agent_servers::CustomAgentServer::new(name.clone()))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14,20 +14,11 @@ doctest = false
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
async-tar.workspace = true
|
||||
collections.workspace = true
|
||||
crossbeam.workspace = true
|
||||
gpui.workspace = true
|
||||
log.workspace = true
|
||||
parking_lot.workspace = true
|
||||
rodio = { workspace = true, features = [ "wav", "playback", "wav_output" ] }
|
||||
settings.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
settings.workspace = true
|
||||
smol.workspace = true
|
||||
thiserror.workspace = true
|
||||
rodio = { workspace = true, features = [ "wav", "playback", "tracing" ] }
|
||||
util.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
[target.'cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))'.dependencies]
|
||||
libwebrtc = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks" }
|
||||
|
||||
@@ -1,56 +1,19 @@
|
||||
use anyhow::{Context as _, Result};
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use collections::HashMap;
|
||||
use gpui::{App, BackgroundExecutor, BorrowAppContext, Global};
|
||||
|
||||
#[cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))]
|
||||
mod non_windows_and_freebsd_deps {
|
||||
pub(super) use gpui::AsyncApp;
|
||||
pub(super) use libwebrtc::native::apm;
|
||||
pub(super) use log::info;
|
||||
pub(super) use parking_lot::Mutex;
|
||||
pub(super) use rodio::cpal::Sample;
|
||||
pub(super) use rodio::source::{LimitSettings, UniformSourceIterator};
|
||||
pub(super) use std::sync::Arc;
|
||||
}
|
||||
|
||||
#[cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))]
|
||||
use non_windows_and_freebsd_deps::*;
|
||||
|
||||
use rodio::{
|
||||
Decoder, OutputStream, OutputStreamBuilder, Source, mixer::Mixer, nz, source::Buffered,
|
||||
};
|
||||
use gpui::{App, BorrowAppContext, Global};
|
||||
use rodio::{Decoder, OutputStream, OutputStreamBuilder, Source, source::Buffered};
|
||||
use settings::Settings;
|
||||
use std::{io::Cursor, num::NonZero, path::PathBuf, sync::atomic::Ordering, time::Duration};
|
||||
use std::io::Cursor;
|
||||
use util::ResultExt;
|
||||
|
||||
mod audio_settings;
|
||||
mod replays;
|
||||
mod rodio_ext;
|
||||
pub use audio_settings::AudioSettings;
|
||||
pub use rodio_ext::RodioExt;
|
||||
|
||||
use crate::audio_settings::LIVE_SETTINGS;
|
||||
|
||||
// NOTE: We used to use WebRTC's mixer which only supported
|
||||
// 16kHz, 32kHz and 48kHz. As 48 is the most common "next step up"
|
||||
// for audio output devices like speakers/bluetooth, we just hard-code
|
||||
// this; and downsample when we need to.
|
||||
//
|
||||
// Since most noise cancelling requires 16kHz we will move to
|
||||
// that in the future.
|
||||
pub const SAMPLE_RATE: NonZero<u32> = nz!(48000);
|
||||
pub const CHANNEL_COUNT: NonZero<u16> = nz!(2);
|
||||
pub const BUFFER_SIZE: usize = // echo canceller and livekit want 10ms of audio
|
||||
(SAMPLE_RATE.get() as usize / 100) * CHANNEL_COUNT.get() as usize;
|
||||
|
||||
pub const REPLAY_DURATION: Duration = Duration::from_secs(30);
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
AudioSettings::register(cx);
|
||||
LIVE_SETTINGS.initialize(cx);
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, Eq, Hash, PartialEq)]
|
||||
#[derive(Copy, Clone, Eq, Hash, PartialEq)]
|
||||
pub enum Sound {
|
||||
Joined,
|
||||
Leave,
|
||||
@@ -75,152 +38,32 @@ impl Sound {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Audio {
|
||||
output_handle: Option<OutputStream>,
|
||||
output_mixer: Option<Mixer>,
|
||||
#[cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))]
|
||||
pub echo_canceller: Arc<Mutex<apm::AudioProcessingModule>>,
|
||||
source_cache: HashMap<Sound, Buffered<Decoder<Cursor<Vec<u8>>>>>,
|
||||
replays: replays::Replays,
|
||||
}
|
||||
|
||||
impl Default for Audio {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
output_handle: Default::default(),
|
||||
output_mixer: Default::default(),
|
||||
#[cfg(not(any(
|
||||
all(target_os = "windows", target_env = "gnu"),
|
||||
target_os = "freebsd"
|
||||
)))]
|
||||
echo_canceller: Arc::new(Mutex::new(apm::AudioProcessingModule::new(
|
||||
true, false, false, false,
|
||||
))),
|
||||
source_cache: Default::default(),
|
||||
replays: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Global for Audio {}
|
||||
|
||||
impl Audio {
|
||||
fn ensure_output_exists(&mut self) -> Result<&Mixer> {
|
||||
fn ensure_output_exists(&mut self) -> Option<&OutputStream> {
|
||||
if self.output_handle.is_none() {
|
||||
self.output_handle = Some(
|
||||
OutputStreamBuilder::open_default_stream()
|
||||
.context("Could not open default output stream")?,
|
||||
);
|
||||
if let Some(output_handle) = &self.output_handle {
|
||||
let (mixer, source) = rodio::mixer::mixer(CHANNEL_COUNT, SAMPLE_RATE);
|
||||
// or the mixer will end immediately as its empty.
|
||||
mixer.add(rodio::source::Zero::new(CHANNEL_COUNT, SAMPLE_RATE));
|
||||
self.output_mixer = Some(mixer);
|
||||
|
||||
// The webrtc apm is not yet compiling for windows & freebsd
|
||||
#[cfg(not(any(
|
||||
any(all(target_os = "windows", target_env = "gnu")),
|
||||
target_os = "freebsd"
|
||||
)))]
|
||||
let echo_canceller = Arc::clone(&self.echo_canceller);
|
||||
#[cfg(not(any(
|
||||
any(all(target_os = "windows", target_env = "gnu")),
|
||||
target_os = "freebsd"
|
||||
)))]
|
||||
let source = source.inspect_buffer::<BUFFER_SIZE, _>(move |buffer| {
|
||||
let mut buf: [i16; _] = buffer.map(|s| s.to_sample());
|
||||
echo_canceller
|
||||
.lock()
|
||||
.process_reverse_stream(
|
||||
&mut buf,
|
||||
SAMPLE_RATE.get() as i32,
|
||||
CHANNEL_COUNT.get().into(),
|
||||
)
|
||||
.expect("Audio input and output threads should not panic");
|
||||
});
|
||||
output_handle.mixer().add(source);
|
||||
}
|
||||
self.output_handle = OutputStreamBuilder::open_default_stream().log_err();
|
||||
}
|
||||
|
||||
Ok(self
|
||||
.output_mixer
|
||||
.as_ref()
|
||||
.expect("we only get here if opening the outputstream succeeded"))
|
||||
self.output_handle.as_ref()
|
||||
}
|
||||
|
||||
pub fn save_replays(
|
||||
&self,
|
||||
executor: BackgroundExecutor,
|
||||
) -> gpui::Task<anyhow::Result<(PathBuf, Duration)>> {
|
||||
self.replays.replays_to_tar(executor)
|
||||
}
|
||||
|
||||
#[cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))]
|
||||
pub fn open_microphone(voip_parts: VoipParts) -> anyhow::Result<impl Source> {
|
||||
let stream = rodio::microphone::MicrophoneBuilder::new()
|
||||
.default_device()?
|
||||
.default_config()?
|
||||
.prefer_sample_rates([SAMPLE_RATE, SAMPLE_RATE.saturating_mul(nz!(2))])
|
||||
.prefer_channel_counts([nz!(1), nz!(2)])
|
||||
.prefer_buffer_sizes(512..)
|
||||
.open_stream()?;
|
||||
info!("Opened microphone: {:?}", stream.config());
|
||||
|
||||
let (replay, stream) = UniformSourceIterator::new(stream, CHANNEL_COUNT, SAMPLE_RATE)
|
||||
.limit(LimitSettings::live_performance())
|
||||
.process_buffer::<BUFFER_SIZE, _>(move |buffer| {
|
||||
let mut int_buffer: [i16; _] = buffer.map(|s| s.to_sample());
|
||||
if voip_parts
|
||||
.echo_canceller
|
||||
.lock()
|
||||
.process_stream(
|
||||
&mut int_buffer,
|
||||
SAMPLE_RATE.get() as i32,
|
||||
CHANNEL_COUNT.get() as i32,
|
||||
)
|
||||
.context("livekit audio processor error")
|
||||
.log_err()
|
||||
.is_some()
|
||||
{
|
||||
for (sample, processed) in buffer.iter_mut().zip(&int_buffer) {
|
||||
*sample = (*processed).to_sample();
|
||||
}
|
||||
}
|
||||
})
|
||||
.automatic_gain_control(1.0, 4.0, 0.0, 5.0)
|
||||
.periodic_access(Duration::from_millis(100), move |agc_source| {
|
||||
agc_source.set_enabled(LIVE_SETTINGS.control_input_volume.load(Ordering::Relaxed));
|
||||
})
|
||||
.replayable(REPLAY_DURATION)?;
|
||||
|
||||
voip_parts
|
||||
.replays
|
||||
.add_voip_stream("local microphone".to_string(), replay);
|
||||
Ok(stream)
|
||||
}
|
||||
|
||||
pub fn play_voip_stream(
|
||||
pub fn play_source(
|
||||
source: impl rodio::Source + Send + 'static,
|
||||
speaker_name: String,
|
||||
is_staff: bool,
|
||||
cx: &mut App,
|
||||
) -> anyhow::Result<()> {
|
||||
let (replay_source, source) = source
|
||||
.automatic_gain_control(1.0, 4.0, 0.0, 5.0)
|
||||
.periodic_access(Duration::from_millis(100), move |agc_source| {
|
||||
agc_source.set_enabled(LIVE_SETTINGS.control_input_volume.load(Ordering::Relaxed));
|
||||
})
|
||||
.replayable(REPLAY_DURATION)
|
||||
.expect("REPLAY_DURATION is longer then 100ms");
|
||||
|
||||
cx.update_default_global(|this: &mut Self, _cx| {
|
||||
let output_mixer = this
|
||||
let output_handle = this
|
||||
.ensure_output_exists()
|
||||
.context("Could not get output mixer")?;
|
||||
output_mixer.add(source);
|
||||
if is_staff {
|
||||
this.replays.add_voip_stream(speaker_name, replay_source);
|
||||
}
|
||||
.ok_or_else(|| anyhow!("Could not open audio output"))?;
|
||||
output_handle.mixer().add(source);
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
@@ -228,12 +71,8 @@ impl Audio {
|
||||
pub fn play_sound(sound: Sound, cx: &mut App) {
|
||||
cx.update_default_global(|this: &mut Self, cx| {
|
||||
let source = this.sound_source(sound, cx).log_err()?;
|
||||
let output_mixer = this
|
||||
.ensure_output_exists()
|
||||
.context("Could not get output mixer")
|
||||
.log_err()?;
|
||||
|
||||
output_mixer.add(source);
|
||||
let output_handle = this.ensure_output_exists()?;
|
||||
output_handle.mixer().add(source);
|
||||
Some(())
|
||||
});
|
||||
}
|
||||
@@ -264,23 +103,3 @@ impl Audio {
|
||||
Ok(source)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))]
|
||||
pub struct VoipParts {
|
||||
echo_canceller: Arc<Mutex<apm::AudioProcessingModule>>,
|
||||
replays: replays::Replays,
|
||||
}
|
||||
|
||||
#[cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))]
|
||||
impl VoipParts {
|
||||
pub fn new(cx: &AsyncApp) -> anyhow::Result<Self> {
|
||||
let (apm, replays) = cx.try_read_default_global::<Audio, _>(|audio, _| {
|
||||
(Arc::clone(&audio.echo_canceller), audio.replays.clone())
|
||||
})?;
|
||||
|
||||
Ok(Self {
|
||||
echo_canceller: apm,
|
||||
replays,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,29 +1,14 @@
|
||||
use std::sync::atomic::{AtomicBool, Ordering};
|
||||
|
||||
use anyhow::Result;
|
||||
use gpui::App;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsKey, SettingsSources, SettingsStore, SettingsUi};
|
||||
use settings::{Settings, SettingsKey, SettingsSources, SettingsUi};
|
||||
|
||||
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug, SettingsUi)]
|
||||
pub struct AudioSettings {
|
||||
/// Opt into the new audio system.
|
||||
#[serde(rename = "experimental.rodio_audio", default)]
|
||||
pub rodio_audio: bool, // default is false
|
||||
/// Requires 'rodio_audio: true'
|
||||
///
|
||||
/// Use the new audio systems automatic gain control for your microphone.
|
||||
/// This affects how loud you sound to others.
|
||||
#[serde(rename = "experimental.control_input_volume", default)]
|
||||
pub control_input_volume: bool,
|
||||
/// Requires 'rodio_audio: true'
|
||||
///
|
||||
/// Use the new audio systems automatic gain control on everyone in the
|
||||
/// call. This makes call members who are too quite louder and those who are
|
||||
/// too loud quieter. This only affects how things sound for you.
|
||||
#[serde(rename = "experimental.control_output_volume", default)]
|
||||
pub control_output_volume: bool,
|
||||
}
|
||||
|
||||
/// Configuration of audio in Zed.
|
||||
@@ -31,22 +16,9 @@ pub struct AudioSettings {
|
||||
#[serde(default)]
|
||||
#[settings_key(key = "audio")]
|
||||
pub struct AudioSettingsContent {
|
||||
/// Opt into the new audio system.
|
||||
/// Whether to use the experimental audio system
|
||||
#[serde(rename = "experimental.rodio_audio", default)]
|
||||
pub rodio_audio: bool, // default is false
|
||||
/// Requires 'rodio_audio: true'
|
||||
///
|
||||
/// Use the new audio systems automatic gain control for your microphone.
|
||||
/// This affects how loud you sound to others.
|
||||
#[serde(rename = "experimental.control_input_volume", default)]
|
||||
pub control_input_volume: bool,
|
||||
/// Requires 'rodio_audio: true'
|
||||
///
|
||||
/// Use the new audio systems automatic gain control on everyone in the
|
||||
/// call. This makes call members who are too quite louder and those who are
|
||||
/// too loud quieter. This only affects how things sound for you.
|
||||
#[serde(rename = "experimental.control_output_volume", default)]
|
||||
pub control_output_volume: bool,
|
||||
pub rodio_audio: bool,
|
||||
}
|
||||
|
||||
impl Settings for AudioSettings {
|
||||
@@ -58,42 +30,3 @@ impl Settings for AudioSettings {
|
||||
|
||||
fn import_from_vscode(_vscode: &settings::VsCodeSettings, _current: &mut Self::FileContent) {}
|
||||
}
|
||||
|
||||
/// See docs on [LIVE_SETTINGS]
|
||||
pub(crate) struct LiveSettings {
|
||||
pub(crate) control_input_volume: AtomicBool,
|
||||
pub(crate) control_output_volume: AtomicBool,
|
||||
}
|
||||
|
||||
impl LiveSettings {
|
||||
pub(crate) fn initialize(&self, cx: &mut App) {
|
||||
cx.observe_global::<SettingsStore>(move |cx| {
|
||||
LIVE_SETTINGS.control_input_volume.store(
|
||||
AudioSettings::get_global(cx).control_input_volume,
|
||||
Ordering::Relaxed,
|
||||
);
|
||||
LIVE_SETTINGS.control_output_volume.store(
|
||||
AudioSettings::get_global(cx).control_output_volume,
|
||||
Ordering::Relaxed,
|
||||
);
|
||||
})
|
||||
.detach();
|
||||
|
||||
let init_settings = AudioSettings::get_global(cx);
|
||||
LIVE_SETTINGS
|
||||
.control_input_volume
|
||||
.store(init_settings.control_input_volume, Ordering::Relaxed);
|
||||
LIVE_SETTINGS
|
||||
.control_output_volume
|
||||
.store(init_settings.control_output_volume, Ordering::Relaxed);
|
||||
}
|
||||
}
|
||||
|
||||
/// Allows access to settings from the audio thread. Updated by
|
||||
/// observer of SettingsStore. Needed because audio playback and recording are
|
||||
/// real time and must each run in a dedicated OS thread, therefore we can not
|
||||
/// use the background executor.
|
||||
pub(crate) static LIVE_SETTINGS: LiveSettings = LiveSettings {
|
||||
control_input_volume: AtomicBool::new(true),
|
||||
control_output_volume: AtomicBool::new(true),
|
||||
};
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
use anyhow::{Context, anyhow};
|
||||
use async_tar::{Builder, Header};
|
||||
use gpui::{BackgroundExecutor, Task};
|
||||
|
||||
use collections::HashMap;
|
||||
use parking_lot::Mutex;
|
||||
use rodio::Source;
|
||||
use smol::fs::File;
|
||||
use std::{io, path::PathBuf, sync::Arc, time::Duration};
|
||||
|
||||
use crate::{REPLAY_DURATION, rodio_ext::Replay};
|
||||
|
||||
#[derive(Default, Clone)]
|
||||
pub(crate) struct Replays(Arc<Mutex<HashMap<String, Replay>>>);
|
||||
|
||||
impl Replays {
|
||||
pub(crate) fn add_voip_stream(&self, stream_name: String, source: Replay) {
|
||||
let mut map = self.0.lock();
|
||||
map.retain(|_, replay| replay.source_is_active());
|
||||
map.insert(stream_name, source);
|
||||
}
|
||||
|
||||
pub(crate) fn replays_to_tar(
|
||||
&self,
|
||||
executor: BackgroundExecutor,
|
||||
) -> Task<anyhow::Result<(PathBuf, Duration)>> {
|
||||
let map = Arc::clone(&self.0);
|
||||
executor.spawn(async move {
|
||||
let recordings: Vec<_> = map
|
||||
.lock()
|
||||
.iter_mut()
|
||||
.map(|(name, replay)| {
|
||||
let queued = REPLAY_DURATION.min(replay.duration_ready());
|
||||
(name.clone(), replay.take_duration(queued).record())
|
||||
})
|
||||
.collect();
|
||||
let longest = recordings
|
||||
.iter()
|
||||
.map(|(_, r)| {
|
||||
r.total_duration()
|
||||
.expect("SamplesBuffer always returns a total duration")
|
||||
})
|
||||
.max()
|
||||
.ok_or(anyhow!("There is no audio to capture"))?;
|
||||
|
||||
let path = std::env::current_dir()
|
||||
.context("Could not get current dir")?
|
||||
.join("replays.tar");
|
||||
let tar = File::create(&path)
|
||||
.await
|
||||
.context("Could not create file for tar")?;
|
||||
|
||||
let mut tar = Builder::new(tar);
|
||||
|
||||
for (name, recording) in recordings {
|
||||
let mut writer = io::Cursor::new(Vec::new());
|
||||
rodio::wav_to_writer(recording, &mut writer).context("failed to encode wav")?;
|
||||
let wav_data = writer.into_inner();
|
||||
let path = name.replace(' ', "_") + ".wav";
|
||||
let mut header = Header::new_gnu();
|
||||
// rw permissions for everyone
|
||||
header.set_mode(0o666);
|
||||
header.set_size(wav_data.len() as u64);
|
||||
tar.append_data(&mut header, path, wav_data.as_slice())
|
||||
.await
|
||||
.context("failed to apped wav to tar")?;
|
||||
}
|
||||
tar.into_inner()
|
||||
.await
|
||||
.context("Could not finish writing tar")?
|
||||
.sync_all()
|
||||
.await
|
||||
.context("Could not flush tar file to disk")?;
|
||||
Ok((path, longest))
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,598 +0,0 @@
|
||||
use std::{
|
||||
sync::{
|
||||
Arc, Mutex,
|
||||
atomic::{AtomicBool, Ordering},
|
||||
},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use crossbeam::queue::ArrayQueue;
|
||||
use rodio::{ChannelCount, Sample, SampleRate, Source};
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error("Replay duration is too short must be >= 100ms")]
|
||||
pub struct ReplayDurationTooShort;
|
||||
|
||||
pub trait RodioExt: Source + Sized {
|
||||
fn process_buffer<const N: usize, F>(self, callback: F) -> ProcessBuffer<N, Self, F>
|
||||
where
|
||||
F: FnMut(&mut [Sample; N]);
|
||||
fn inspect_buffer<const N: usize, F>(self, callback: F) -> InspectBuffer<N, Self, F>
|
||||
where
|
||||
F: FnMut(&[Sample; N]);
|
||||
fn replayable(
|
||||
self,
|
||||
duration: Duration,
|
||||
) -> Result<(Replay, Replayable<Self>), ReplayDurationTooShort>;
|
||||
fn take_samples(self, n: usize) -> TakeSamples<Self>;
|
||||
}
|
||||
|
||||
impl<S: Source> RodioExt for S {
|
||||
fn process_buffer<const N: usize, F>(self, callback: F) -> ProcessBuffer<N, Self, F>
|
||||
where
|
||||
F: FnMut(&mut [Sample; N]),
|
||||
{
|
||||
ProcessBuffer {
|
||||
inner: self,
|
||||
callback,
|
||||
buffer: [0.0; N],
|
||||
next: N,
|
||||
}
|
||||
}
|
||||
fn inspect_buffer<const N: usize, F>(self, callback: F) -> InspectBuffer<N, Self, F>
|
||||
where
|
||||
F: FnMut(&[Sample; N]),
|
||||
{
|
||||
InspectBuffer {
|
||||
inner: self,
|
||||
callback,
|
||||
buffer: [0.0; N],
|
||||
free: 0,
|
||||
}
|
||||
}
|
||||
/// Maintains a live replay with a history of at least `duration` seconds.
|
||||
///
|
||||
/// Note:
|
||||
/// History can be 100ms longer if the source drops before or while the
|
||||
/// replay is being read
|
||||
///
|
||||
/// # Errors
|
||||
/// If duration is smaller then 100ms
|
||||
fn replayable(
|
||||
self,
|
||||
duration: Duration,
|
||||
) -> Result<(Replay, Replayable<Self>), ReplayDurationTooShort> {
|
||||
if duration < Duration::from_millis(100) {
|
||||
return Err(ReplayDurationTooShort);
|
||||
}
|
||||
|
||||
let samples_per_second = self.sample_rate().get() as usize * self.channels().get() as usize;
|
||||
let samples_to_queue = duration.as_secs_f64() * samples_per_second as f64;
|
||||
let samples_to_queue =
|
||||
(samples_to_queue as usize).next_multiple_of(self.channels().get().into());
|
||||
|
||||
let chunk_size =
|
||||
(samples_per_second.div_ceil(10)).next_multiple_of(self.channels().get() as usize);
|
||||
let chunks_to_queue = samples_to_queue.div_ceil(chunk_size);
|
||||
|
||||
let is_active = Arc::new(AtomicBool::new(true));
|
||||
let queue = Arc::new(ReplayQueue::new(chunks_to_queue, chunk_size));
|
||||
Ok((
|
||||
Replay {
|
||||
rx: Arc::clone(&queue),
|
||||
buffer: Vec::new().into_iter(),
|
||||
sleep_duration: duration / 2,
|
||||
sample_rate: self.sample_rate(),
|
||||
channel_count: self.channels(),
|
||||
source_is_active: is_active.clone(),
|
||||
},
|
||||
Replayable {
|
||||
tx: queue,
|
||||
inner: self,
|
||||
buffer: Vec::with_capacity(chunk_size),
|
||||
chunk_size,
|
||||
is_active,
|
||||
},
|
||||
))
|
||||
}
|
||||
fn take_samples(self, n: usize) -> TakeSamples<S> {
|
||||
TakeSamples {
|
||||
inner: self,
|
||||
left_to_take: n,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct TakeSamples<S> {
|
||||
inner: S,
|
||||
left_to_take: usize,
|
||||
}
|
||||
|
||||
impl<S: Source> Iterator for TakeSamples<S> {
|
||||
type Item = Sample;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.left_to_take == 0 {
|
||||
None
|
||||
} else {
|
||||
self.left_to_take -= 1;
|
||||
self.inner.next()
|
||||
}
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
(0, Some(self.left_to_take))
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: Source> Source for TakeSamples<S> {
|
||||
fn current_span_len(&self) -> Option<usize> {
|
||||
None // does not support spans
|
||||
}
|
||||
|
||||
fn channels(&self) -> ChannelCount {
|
||||
self.inner.channels()
|
||||
}
|
||||
|
||||
fn sample_rate(&self) -> SampleRate {
|
||||
self.inner.sample_rate()
|
||||
}
|
||||
|
||||
fn total_duration(&self) -> Option<Duration> {
|
||||
Some(Duration::from_secs_f64(
|
||||
self.left_to_take as f64
|
||||
/ self.sample_rate().get() as f64
|
||||
/ self.channels().get() as f64,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ReplayQueue {
|
||||
inner: ArrayQueue<Vec<Sample>>,
|
||||
normal_chunk_len: usize,
|
||||
/// The last chunk in the queue may be smaller then
|
||||
/// the normal chunk size. This is always equal to the
|
||||
/// size of the last element in the queue.
|
||||
/// (so normally chunk_size)
|
||||
last_chunk: Mutex<Vec<Sample>>,
|
||||
}
|
||||
|
||||
impl ReplayQueue {
|
||||
fn new(queue_len: usize, chunk_size: usize) -> Self {
|
||||
Self {
|
||||
inner: ArrayQueue::new(queue_len),
|
||||
normal_chunk_len: chunk_size,
|
||||
last_chunk: Mutex::new(Vec::new()),
|
||||
}
|
||||
}
|
||||
/// Returns the length in samples
|
||||
fn len(&self) -> usize {
|
||||
self.inner.len().saturating_sub(1) * self.normal_chunk_len
|
||||
+ self
|
||||
.last_chunk
|
||||
.lock()
|
||||
.expect("Self::push_last can not poison this lock")
|
||||
.len()
|
||||
}
|
||||
|
||||
fn pop(&self) -> Option<Vec<Sample>> {
|
||||
self.inner.pop() // removes element that was inserted first
|
||||
}
|
||||
|
||||
fn push_last(&self, mut samples: Vec<Sample>) {
|
||||
let mut last_chunk = self
|
||||
.last_chunk
|
||||
.lock()
|
||||
.expect("Self::len can not poison this lock");
|
||||
std::mem::swap(&mut *last_chunk, &mut samples);
|
||||
}
|
||||
|
||||
fn push_normal(&self, samples: Vec<Sample>) {
|
||||
let _pushed_out_of_ringbuf = self.inner.force_push(samples);
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ProcessBuffer<const N: usize, S, F>
|
||||
where
|
||||
S: Source + Sized,
|
||||
F: FnMut(&mut [Sample; N]),
|
||||
{
|
||||
inner: S,
|
||||
callback: F,
|
||||
/// Buffer used for both input and output.
|
||||
buffer: [Sample; N],
|
||||
/// Next already processed sample is at this index
|
||||
/// in buffer.
|
||||
///
|
||||
/// If this is equal to the length of the buffer we have no more samples and
|
||||
/// we must get new ones and process them
|
||||
next: usize,
|
||||
}
|
||||
|
||||
impl<const N: usize, S, F> Iterator for ProcessBuffer<N, S, F>
|
||||
where
|
||||
S: Source + Sized,
|
||||
F: FnMut(&mut [Sample; N]),
|
||||
{
|
||||
type Item = Sample;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
self.next += 1;
|
||||
if self.next < self.buffer.len() {
|
||||
let sample = self.buffer[self.next];
|
||||
return Some(sample);
|
||||
}
|
||||
|
||||
for sample in &mut self.buffer {
|
||||
*sample = self.inner.next()?
|
||||
}
|
||||
(self.callback)(&mut self.buffer);
|
||||
|
||||
self.next = 0;
|
||||
Some(self.buffer[0])
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.inner.size_hint()
|
||||
}
|
||||
}
|
||||
|
||||
impl<const N: usize, S, F> Source for ProcessBuffer<N, S, F>
|
||||
where
|
||||
S: Source + Sized,
|
||||
F: FnMut(&mut [Sample; N]),
|
||||
{
|
||||
fn current_span_len(&self) -> Option<usize> {
|
||||
None
|
||||
}
|
||||
|
||||
fn channels(&self) -> rodio::ChannelCount {
|
||||
self.inner.channels()
|
||||
}
|
||||
|
||||
fn sample_rate(&self) -> rodio::SampleRate {
|
||||
self.inner.sample_rate()
|
||||
}
|
||||
|
||||
fn total_duration(&self) -> Option<std::time::Duration> {
|
||||
self.inner.total_duration()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct InspectBuffer<const N: usize, S, F>
|
||||
where
|
||||
S: Source + Sized,
|
||||
F: FnMut(&[Sample; N]),
|
||||
{
|
||||
inner: S,
|
||||
callback: F,
|
||||
/// Stores already emitted samples, once its full we call the callback.
|
||||
buffer: [Sample; N],
|
||||
/// Next free element in buffer. If this is equal to the buffer length
|
||||
/// we have no more free lements.
|
||||
free: usize,
|
||||
}
|
||||
|
||||
impl<const N: usize, S, F> Iterator for InspectBuffer<N, S, F>
|
||||
where
|
||||
S: Source + Sized,
|
||||
F: FnMut(&[Sample; N]),
|
||||
{
|
||||
type Item = Sample;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
let Some(sample) = self.inner.next() else {
|
||||
return None;
|
||||
};
|
||||
|
||||
self.buffer[self.free] = sample;
|
||||
self.free += 1;
|
||||
|
||||
if self.free == self.buffer.len() {
|
||||
(self.callback)(&self.buffer);
|
||||
self.free = 0
|
||||
}
|
||||
|
||||
Some(sample)
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.inner.size_hint()
|
||||
}
|
||||
}
|
||||
|
||||
impl<const N: usize, S, F> Source for InspectBuffer<N, S, F>
|
||||
where
|
||||
S: Source + Sized,
|
||||
F: FnMut(&[Sample; N]),
|
||||
{
|
||||
fn current_span_len(&self) -> Option<usize> {
|
||||
None
|
||||
}
|
||||
|
||||
fn channels(&self) -> rodio::ChannelCount {
|
||||
self.inner.channels()
|
||||
}
|
||||
|
||||
fn sample_rate(&self) -> rodio::SampleRate {
|
||||
self.inner.sample_rate()
|
||||
}
|
||||
|
||||
fn total_duration(&self) -> Option<std::time::Duration> {
|
||||
self.inner.total_duration()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Replayable<S: Source> {
|
||||
inner: S,
|
||||
buffer: Vec<Sample>,
|
||||
chunk_size: usize,
|
||||
tx: Arc<ReplayQueue>,
|
||||
is_active: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl<S: Source> Iterator for Replayable<S> {
|
||||
type Item = Sample;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if let Some(sample) = self.inner.next() {
|
||||
self.buffer.push(sample);
|
||||
// If the buffer is full send it
|
||||
if self.buffer.len() == self.chunk_size {
|
||||
self.tx.push_normal(std::mem::take(&mut self.buffer));
|
||||
}
|
||||
Some(sample)
|
||||
} else {
|
||||
let last_chunk = std::mem::take(&mut self.buffer);
|
||||
self.tx.push_last(last_chunk);
|
||||
self.is_active.store(false, Ordering::Relaxed);
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
self.inner.size_hint()
|
||||
}
|
||||
}
|
||||
|
||||
impl<S: Source> Source for Replayable<S> {
|
||||
fn current_span_len(&self) -> Option<usize> {
|
||||
self.inner.current_span_len()
|
||||
}
|
||||
|
||||
fn channels(&self) -> ChannelCount {
|
||||
self.inner.channels()
|
||||
}
|
||||
|
||||
fn sample_rate(&self) -> SampleRate {
|
||||
self.inner.sample_rate()
|
||||
}
|
||||
|
||||
fn total_duration(&self) -> Option<Duration> {
|
||||
self.inner.total_duration()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Replay {
|
||||
rx: Arc<ReplayQueue>,
|
||||
buffer: std::vec::IntoIter<Sample>,
|
||||
sleep_duration: Duration,
|
||||
sample_rate: SampleRate,
|
||||
channel_count: ChannelCount,
|
||||
source_is_active: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl Replay {
|
||||
pub fn source_is_active(&self) -> bool {
|
||||
// - source could return None and not drop
|
||||
// - source could be dropped before returning None
|
||||
self.source_is_active.load(Ordering::Relaxed) && Arc::strong_count(&self.rx) < 2
|
||||
}
|
||||
|
||||
/// Duration of what is in the buffer and can be returned without blocking.
|
||||
pub fn duration_ready(&self) -> Duration {
|
||||
let samples_per_second = self.channels().get() as u32 * self.sample_rate().get();
|
||||
|
||||
let seconds_queued = self.samples_ready() as f64 / samples_per_second as f64;
|
||||
Duration::from_secs_f64(seconds_queued)
|
||||
}
|
||||
|
||||
/// Number of samples in the buffer and can be returned without blocking.
|
||||
pub fn samples_ready(&self) -> usize {
|
||||
self.rx.len() + self.buffer.len()
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for Replay {
|
||||
type Item = Sample;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if let Some(sample) = self.buffer.next() {
|
||||
return Some(sample);
|
||||
}
|
||||
|
||||
loop {
|
||||
if let Some(new_buffer) = self.rx.pop() {
|
||||
self.buffer = new_buffer.into_iter();
|
||||
return self.buffer.next();
|
||||
}
|
||||
|
||||
if !self.source_is_active() {
|
||||
return None;
|
||||
}
|
||||
|
||||
// The queue does not support blocking on a next item. We want this queue as it
|
||||
// is quite fast and provides a fixed size. We know how many samples are in a
|
||||
// buffer so if we do not get one now we must be getting one after `sleep_duration`.
|
||||
std::thread::sleep(self.sleep_duration);
|
||||
}
|
||||
}
|
||||
|
||||
fn size_hint(&self) -> (usize, Option<usize>) {
|
||||
((self.rx.len() + self.buffer.len()), None)
|
||||
}
|
||||
}
|
||||
|
||||
impl Source for Replay {
|
||||
fn current_span_len(&self) -> Option<usize> {
|
||||
None // source is not compatible with spans
|
||||
}
|
||||
|
||||
fn channels(&self) -> ChannelCount {
|
||||
self.channel_count
|
||||
}
|
||||
|
||||
fn sample_rate(&self) -> SampleRate {
|
||||
self.sample_rate
|
||||
}
|
||||
|
||||
fn total_duration(&self) -> Option<Duration> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use rodio::{nz, static_buffer::StaticSamplesBuffer};
|
||||
|
||||
use super::*;
|
||||
|
||||
const SAMPLES: [Sample; 5] = [0.0, 1.0, 2.0, 3.0, 4.0];
|
||||
|
||||
fn test_source() -> StaticSamplesBuffer {
|
||||
StaticSamplesBuffer::new(nz!(1), nz!(1), &SAMPLES)
|
||||
}
|
||||
|
||||
mod process_buffer {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn callback_gets_all_samples() {
|
||||
let input = test_source();
|
||||
|
||||
let _ = input
|
||||
.process_buffer::<{ SAMPLES.len() }, _>(|buffer| assert_eq!(*buffer, SAMPLES))
|
||||
.count();
|
||||
}
|
||||
#[test]
|
||||
fn callback_modifies_yielded() {
|
||||
let input = test_source();
|
||||
|
||||
let yielded: Vec<_> = input
|
||||
.process_buffer::<{ SAMPLES.len() }, _>(|buffer| {
|
||||
for sample in buffer {
|
||||
*sample += 1.0;
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
assert_eq!(
|
||||
yielded,
|
||||
SAMPLES.into_iter().map(|s| s + 1.0).collect::<Vec<_>>()
|
||||
)
|
||||
}
|
||||
#[test]
|
||||
fn source_truncates_to_whole_buffers() {
|
||||
let input = test_source();
|
||||
|
||||
let yielded = input
|
||||
.process_buffer::<3, _>(|buffer| assert_eq!(buffer, &SAMPLES[..3]))
|
||||
.count();
|
||||
assert_eq!(yielded, 3)
|
||||
}
|
||||
}
|
||||
|
||||
mod inspect_buffer {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn callback_gets_all_samples() {
|
||||
let input = test_source();
|
||||
|
||||
let _ = input
|
||||
.inspect_buffer::<{ SAMPLES.len() }, _>(|buffer| assert_eq!(*buffer, SAMPLES))
|
||||
.count();
|
||||
}
|
||||
#[test]
|
||||
fn source_does_not_truncate() {
|
||||
let input = test_source();
|
||||
|
||||
let yielded = input
|
||||
.inspect_buffer::<3, _>(|buffer| assert_eq!(buffer, &SAMPLES[..3]))
|
||||
.count();
|
||||
assert_eq!(yielded, SAMPLES.len())
|
||||
}
|
||||
}
|
||||
|
||||
mod instant_replay {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn continues_after_history() {
|
||||
let input = test_source();
|
||||
|
||||
let (mut replay, mut source) = input
|
||||
.replayable(Duration::from_secs(3))
|
||||
.expect("longer then 100ms");
|
||||
|
||||
source.by_ref().take(3).count();
|
||||
let yielded: Vec<Sample> = replay.by_ref().take(3).collect();
|
||||
assert_eq!(&yielded, &SAMPLES[0..3],);
|
||||
|
||||
source.count();
|
||||
let yielded: Vec<Sample> = replay.collect();
|
||||
assert_eq!(&yielded, &SAMPLES[3..5],);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keeps_only_latest() {
|
||||
let input = test_source();
|
||||
|
||||
let (mut replay, mut source) = input
|
||||
.replayable(Duration::from_secs(2))
|
||||
.expect("longer then 100ms");
|
||||
|
||||
source.by_ref().take(5).count(); // get all items but do not end the source
|
||||
let yielded: Vec<Sample> = replay.by_ref().take(2).collect();
|
||||
assert_eq!(&yielded, &SAMPLES[3..5]);
|
||||
source.count(); // exhaust source
|
||||
assert_eq!(replay.next(), None);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn keeps_correct_amount_of_seconds() {
|
||||
let input = StaticSamplesBuffer::new(nz!(1), nz!(16_000), &[0.0; 40_000]);
|
||||
|
||||
let (replay, mut source) = input
|
||||
.replayable(Duration::from_secs(2))
|
||||
.expect("longer then 100ms");
|
||||
|
||||
// exhaust but do not yet end source
|
||||
source.by_ref().take(40_000).count();
|
||||
|
||||
// take all samples we can without blocking
|
||||
let ready = replay.samples_ready();
|
||||
let n_yielded = replay.take_samples(ready).count();
|
||||
|
||||
let max = source.sample_rate().get() * source.channels().get() as u32 * 2;
|
||||
let margin = 16_000 / 10; // 100ms
|
||||
assert!(n_yielded as u32 >= max - margin);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn samples_ready() {
|
||||
let input = StaticSamplesBuffer::new(nz!(1), nz!(16_000), &[0.0; 40_000]);
|
||||
let (mut replay, source) = input
|
||||
.replayable(Duration::from_secs(2))
|
||||
.expect("longer then 100ms");
|
||||
assert_eq!(replay.by_ref().samples_ready(), 0);
|
||||
|
||||
source.take(8000).count(); // half a second
|
||||
let margin = 16_000 / 10; // 100ms
|
||||
let ready = replay.samples_ready();
|
||||
assert!(ready >= 8000 - margin);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -29,7 +29,6 @@ client.workspace = true
|
||||
collections.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
feature_flags.workspace = true
|
||||
gpui = { workspace = true, features = ["screen-capture"] }
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
|
||||
@@ -9,7 +9,6 @@ use client::{
|
||||
proto::{self, PeerId},
|
||||
};
|
||||
use collections::{BTreeMap, HashMap, HashSet};
|
||||
use feature_flags::FeatureFlagAppExt;
|
||||
use fs::Fs;
|
||||
use futures::StreamExt;
|
||||
use gpui::{
|
||||
@@ -1323,18 +1322,8 @@ impl Room {
|
||||
return Task::ready(Err(anyhow!("live-kit was not initialized")));
|
||||
};
|
||||
|
||||
let is_staff = cx.is_staff();
|
||||
let user_name = self
|
||||
.user_store
|
||||
.read(cx)
|
||||
.current_user()
|
||||
.and_then(|user| user.name.clone())
|
||||
.unwrap_or_else(|| "unknown".to_string());
|
||||
|
||||
cx.spawn(async move |this, cx| {
|
||||
let publication = room
|
||||
.publish_local_microphone_track(user_name, is_staff, cx)
|
||||
.await;
|
||||
let publication = room.publish_local_microphone_track(cx).await;
|
||||
this.update(cx, |this, cx| {
|
||||
let live_kit = this
|
||||
.live_kit
|
||||
|
||||
982
crates/diagnostics/src/buffer_diagnostics.rs
Normal file
982
crates/diagnostics/src/buffer_diagnostics.rs
Normal file
@@ -0,0 +1,982 @@
|
||||
use crate::{
|
||||
DIAGNOSTICS_UPDATE_DELAY, IncludeWarnings, ToggleWarnings, context_range_for_entry,
|
||||
diagnostic_renderer::{DiagnosticBlock, DiagnosticRenderer},
|
||||
toolbar_controls::DiagnosticsToolbarEditor,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use collections::HashMap;
|
||||
use editor::{
|
||||
Editor, EditorEvent, ExcerptRange, MultiBuffer, PathKey,
|
||||
display_map::{BlockPlacement, BlockProperties, BlockStyle, CustomBlockId},
|
||||
multibuffer_context_lines,
|
||||
};
|
||||
use gpui::{
|
||||
AnyElement, App, AppContext, Context, Entity, EntityId, EventEmitter, FocusHandle, Focusable,
|
||||
InteractiveElement, IntoElement, ParentElement, Render, SharedString, Styled, Subscription,
|
||||
Task, WeakEntity, Window, actions, div,
|
||||
};
|
||||
use language::{Buffer, DiagnosticEntry, Point};
|
||||
use project::{
|
||||
DiagnosticSummary, Event, Project, ProjectItem, ProjectPath,
|
||||
project_settings::{DiagnosticSeverity, ProjectSettings},
|
||||
};
|
||||
use settings::Settings;
|
||||
use std::{
|
||||
any::{Any, TypeId},
|
||||
cmp::Ordering,
|
||||
sync::Arc,
|
||||
};
|
||||
use text::{Anchor, BufferSnapshot, OffsetRangeExt};
|
||||
use ui::{Button, ButtonStyle, Icon, IconName, Label, Tooltip, h_flex, prelude::*};
|
||||
use util::paths::PathExt;
|
||||
use workspace::{
|
||||
ItemHandle, ItemNavHistory, ToolbarItemLocation, Workspace,
|
||||
item::{BreadcrumbText, Item, ItemEvent, TabContentParams},
|
||||
};
|
||||
|
||||
actions!(
|
||||
diagnostics,
|
||||
[
|
||||
/// Opens the project diagnostics view for the currently focused file.
|
||||
DeployCurrentFile,
|
||||
]
|
||||
);
|
||||
|
||||
/// The `BufferDiagnosticsEditor` is meant to be used when dealing specifically
|
||||
/// with diagnostics for a single buffer, as only the excerpts of the buffer
|
||||
/// where diagnostics are available are displayed.
|
||||
pub(crate) struct BufferDiagnosticsEditor {
|
||||
pub project: Entity<Project>,
|
||||
focus_handle: FocusHandle,
|
||||
editor: Entity<Editor>,
|
||||
/// The current diagnostic entries in the `BufferDiagnosticsEditor`. Used to
|
||||
/// allow quick comparison of updated diagnostics, to confirm if anything
|
||||
/// has changed.
|
||||
pub(crate) diagnostics: Vec<DiagnosticEntry<Anchor>>,
|
||||
/// The blocks used to display the diagnostics' content in the editor, next
|
||||
/// to the excerpts where the diagnostic originated.
|
||||
blocks: Vec<CustomBlockId>,
|
||||
/// Multibuffer to contain all excerpts that contain diagnostics, which are
|
||||
/// to be rendered in the editor.
|
||||
multibuffer: Entity<MultiBuffer>,
|
||||
/// The buffer for which the editor is displaying diagnostics and excerpts
|
||||
/// for.
|
||||
buffer: Option<Entity<Buffer>>,
|
||||
/// The path for which the editor is displaying diagnostics for.
|
||||
project_path: ProjectPath,
|
||||
/// Summary of the number of warnings and errors for the path. Used to
|
||||
/// display the number of warnings and errors in the tab's content.
|
||||
summary: DiagnosticSummary,
|
||||
/// Whether to include warnings in the list of diagnostics shown in the
|
||||
/// editor.
|
||||
pub(crate) include_warnings: bool,
|
||||
/// Keeps track of whether there's a background task already running to
|
||||
/// update the excerpts, in order to avoid firing multiple tasks for this purpose.
|
||||
pub(crate) update_excerpts_task: Option<Task<Result<()>>>,
|
||||
/// The project's subscription, responsible for processing events related to
|
||||
/// diagnostics.
|
||||
_subscription: Subscription,
|
||||
}
|
||||
|
||||
impl BufferDiagnosticsEditor {
|
||||
/// Creates new instance of the `BufferDiagnosticsEditor` which can then be
|
||||
/// displayed by adding it to a pane.
|
||||
pub fn new(
|
||||
project_path: ProjectPath,
|
||||
project_handle: Entity<Project>,
|
||||
buffer: Option<Entity<Buffer>>,
|
||||
include_warnings: bool,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
// Subscribe to project events related to diagnostics so the
|
||||
// `BufferDiagnosticsEditor` can update its state accordingly.
|
||||
let project_event_subscription = cx.subscribe_in(
|
||||
&project_handle,
|
||||
window,
|
||||
|buffer_diagnostics_editor, _project, event, window, cx| match event {
|
||||
Event::DiskBasedDiagnosticsStarted { .. } => {
|
||||
cx.notify();
|
||||
}
|
||||
Event::DiskBasedDiagnosticsFinished { .. } => {
|
||||
buffer_diagnostics_editor.update_all_excerpts(window, cx);
|
||||
}
|
||||
Event::DiagnosticsUpdated {
|
||||
paths,
|
||||
language_server_id,
|
||||
} => {
|
||||
// When diagnostics have been updated, the
|
||||
// `BufferDiagnosticsEditor` should update its state only if
|
||||
// one of the paths matches its `project_path`, otherwise
|
||||
// the event should be ignored.
|
||||
if paths.contains(&buffer_diagnostics_editor.project_path) {
|
||||
buffer_diagnostics_editor.update_diagnostic_summary(cx);
|
||||
|
||||
if buffer_diagnostics_editor.editor.focus_handle(cx).contains_focused(window, cx) || buffer_diagnostics_editor.focus_handle.contains_focused(window, cx) {
|
||||
log::debug!("diagnostics updated for server {language_server_id}. recording change");
|
||||
} else {
|
||||
log::debug!("diagnostics updated for server {language_server_id}. updating excerpts");
|
||||
buffer_diagnostics_editor.update_all_excerpts(window, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
);
|
||||
|
||||
let focus_handle = cx.focus_handle();
|
||||
|
||||
cx.on_focus_in(
|
||||
&focus_handle,
|
||||
window,
|
||||
|buffer_diagnostics_editor, window, cx| buffer_diagnostics_editor.focus_in(window, cx),
|
||||
)
|
||||
.detach();
|
||||
|
||||
cx.on_focus_out(
|
||||
&focus_handle,
|
||||
window,
|
||||
|buffer_diagnostics_editor, _event, window, cx| {
|
||||
buffer_diagnostics_editor.focus_out(window, cx)
|
||||
},
|
||||
)
|
||||
.detach();
|
||||
|
||||
let summary = project_handle
|
||||
.read(cx)
|
||||
.diagnostic_summary_for_path(&project_path, cx);
|
||||
|
||||
let multibuffer = cx.new(|cx| MultiBuffer::new(project_handle.read(cx).capability()));
|
||||
let max_severity = Self::max_diagnostics_severity(include_warnings);
|
||||
let editor = cx.new(|cx| {
|
||||
let mut editor = Editor::for_multibuffer(
|
||||
multibuffer.clone(),
|
||||
Some(project_handle.clone()),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
editor.set_vertical_scroll_margin(5, cx);
|
||||
editor.disable_inline_diagnostics();
|
||||
editor.set_max_diagnostics_severity(max_severity, cx);
|
||||
editor.set_all_diagnostics_active(cx);
|
||||
editor
|
||||
});
|
||||
|
||||
// Subscribe to events triggered by the editor in order to correctly
|
||||
// update the buffer's excerpts.
|
||||
cx.subscribe_in(
|
||||
&editor,
|
||||
window,
|
||||
|buffer_diagnostics_editor, _editor, event: &EditorEvent, window, cx| {
|
||||
cx.emit(event.clone());
|
||||
|
||||
match event {
|
||||
// If the user tries to focus on the editor but there's actually
|
||||
// no excerpts for the buffer, focus back on the
|
||||
// `BufferDiagnosticsEditor` instance.
|
||||
EditorEvent::Focused => {
|
||||
if buffer_diagnostics_editor.multibuffer.read(cx).is_empty() {
|
||||
window.focus(&buffer_diagnostics_editor.focus_handle);
|
||||
}
|
||||
}
|
||||
EditorEvent::Blurred => {
|
||||
buffer_diagnostics_editor.update_all_excerpts(window, cx)
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
},
|
||||
)
|
||||
.detach();
|
||||
|
||||
let diagnostics = vec![];
|
||||
let update_excerpts_task = None;
|
||||
let mut buffer_diagnostics_editor = Self {
|
||||
project: project_handle,
|
||||
focus_handle,
|
||||
editor,
|
||||
diagnostics,
|
||||
blocks: Default::default(),
|
||||
multibuffer,
|
||||
buffer,
|
||||
project_path,
|
||||
summary,
|
||||
include_warnings,
|
||||
update_excerpts_task,
|
||||
_subscription: project_event_subscription,
|
||||
};
|
||||
|
||||
buffer_diagnostics_editor.update_all_diagnostics(window, cx);
|
||||
buffer_diagnostics_editor
|
||||
}
|
||||
|
||||
fn deploy(
|
||||
workspace: &mut Workspace,
|
||||
_: &DeployCurrentFile,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Workspace>,
|
||||
) {
|
||||
// Determine the currently opened path by finding the active editor and
|
||||
// finding the project path for the buffer.
|
||||
// If there's no active editor with a project path, avoiding deploying
|
||||
// the buffer diagnostics view.
|
||||
if let Some(editor) = workspace.active_item_as::<Editor>(cx)
|
||||
&& let Some(project_path) = editor.project_path(cx)
|
||||
{
|
||||
// Check if there's already a `BufferDiagnosticsEditor` tab for this
|
||||
// same path, and if so, focus on that one instead of creating a new
|
||||
// one.
|
||||
let existing_editor = workspace
|
||||
.items_of_type::<BufferDiagnosticsEditor>(cx)
|
||||
.find(|editor| editor.read(cx).project_path == project_path);
|
||||
|
||||
if let Some(editor) = existing_editor {
|
||||
workspace.activate_item(&editor, true, true, window, cx);
|
||||
} else {
|
||||
let include_warnings = match cx.try_global::<IncludeWarnings>() {
|
||||
Some(include_warnings) => include_warnings.0,
|
||||
None => ProjectSettings::get_global(cx).diagnostics.include_warnings,
|
||||
};
|
||||
|
||||
let item = cx.new(|cx| {
|
||||
Self::new(
|
||||
project_path,
|
||||
workspace.project().clone(),
|
||||
editor.read(cx).buffer().read(cx).as_singleton(),
|
||||
include_warnings,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
workspace.add_item_to_active_pane(Box::new(item), None, true, window, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn register(
|
||||
workspace: &mut Workspace,
|
||||
_window: Option<&mut Window>,
|
||||
_: &mut Context<Workspace>,
|
||||
) {
|
||||
workspace.register_action(Self::deploy);
|
||||
}
|
||||
|
||||
fn update_all_diagnostics(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.update_all_excerpts(window, cx);
|
||||
}
|
||||
|
||||
fn update_diagnostic_summary(&mut self, cx: &mut Context<Self>) {
|
||||
let project = self.project.read(cx);
|
||||
|
||||
self.summary = project.diagnostic_summary_for_path(&self.project_path, cx);
|
||||
}
|
||||
|
||||
/// Enqueue an update to the excerpts and diagnostic blocks being shown in
|
||||
/// the editor.
|
||||
pub(crate) fn update_all_excerpts(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
// If there's already a task updating the excerpts, early return and let
|
||||
// the other task finish.
|
||||
if self.update_excerpts_task.is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
let buffer = self.buffer.clone();
|
||||
|
||||
self.update_excerpts_task = Some(cx.spawn_in(window, async move |editor, cx| {
|
||||
cx.background_executor()
|
||||
.timer(DIAGNOSTICS_UPDATE_DELAY)
|
||||
.await;
|
||||
|
||||
if let Some(buffer) = buffer {
|
||||
editor
|
||||
.update_in(cx, |editor, window, cx| {
|
||||
editor.update_excerpts(buffer, window, cx)
|
||||
})?
|
||||
.await?;
|
||||
};
|
||||
|
||||
let _ = editor.update(cx, |editor, cx| {
|
||||
editor.update_excerpts_task = None;
|
||||
cx.notify();
|
||||
});
|
||||
|
||||
Ok(())
|
||||
}));
|
||||
}
|
||||
|
||||
/// Updates the excerpts in the `BufferDiagnosticsEditor` for a single
|
||||
/// buffer.
|
||||
fn update_excerpts(
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
let was_empty = self.multibuffer.read(cx).is_empty();
|
||||
let multibuffer_context = multibuffer_context_lines(cx);
|
||||
let buffer_snapshot = buffer.read(cx).snapshot();
|
||||
let buffer_snapshot_max = buffer_snapshot.max_point();
|
||||
let max_severity = Self::max_diagnostics_severity(self.include_warnings)
|
||||
.into_lsp()
|
||||
.unwrap_or(lsp::DiagnosticSeverity::WARNING);
|
||||
|
||||
cx.spawn_in(window, async move |buffer_diagnostics_editor, mut cx| {
|
||||
// Fetch the diagnostics for the whole of the buffer
|
||||
// (`Point::zero()..buffer_snapshot.max_point()`) so we can confirm
|
||||
// if the diagnostics changed, if it didn't, early return as there's
|
||||
// nothing to update.
|
||||
let diagnostics = buffer_snapshot
|
||||
.diagnostics_in_range::<_, Anchor>(Point::zero()..buffer_snapshot_max, false)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let unchanged =
|
||||
buffer_diagnostics_editor.update(cx, |buffer_diagnostics_editor, _cx| {
|
||||
if buffer_diagnostics_editor
|
||||
.diagnostics_are_unchanged(&diagnostics, &buffer_snapshot)
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
buffer_diagnostics_editor.set_diagnostics(&diagnostics);
|
||||
return false;
|
||||
})?;
|
||||
|
||||
if unchanged {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
// Mapping between the Group ID and a vector of DiagnosticEntry.
|
||||
let mut grouped: HashMap<usize, Vec<_>> = HashMap::default();
|
||||
for entry in diagnostics {
|
||||
grouped
|
||||
.entry(entry.diagnostic.group_id)
|
||||
.or_default()
|
||||
.push(DiagnosticEntry {
|
||||
range: entry.range.to_point(&buffer_snapshot),
|
||||
diagnostic: entry.diagnostic,
|
||||
})
|
||||
}
|
||||
|
||||
let mut blocks: Vec<DiagnosticBlock> = Vec::new();
|
||||
for (_, group) in grouped {
|
||||
// If the minimum severity of the group is higher than the
|
||||
// maximum severity, or it doesn't even have severity, skip this
|
||||
// group.
|
||||
if group
|
||||
.iter()
|
||||
.map(|d| d.diagnostic.severity)
|
||||
.min()
|
||||
.is_none_or(|severity| severity > max_severity)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
let diagnostic_blocks = cx.update(|_window, cx| {
|
||||
DiagnosticRenderer::diagnostic_blocks_for_group(
|
||||
group,
|
||||
buffer_snapshot.remote_id(),
|
||||
Some(Arc::new(buffer_diagnostics_editor.clone())),
|
||||
cx,
|
||||
)
|
||||
})?;
|
||||
|
||||
// For each of the diagnostic blocks to be displayed in the
|
||||
// editor, figure out its index in the list of blocks.
|
||||
//
|
||||
// The following rules are used to determine the order:
|
||||
// 1. Blocks with a lower start position should come first.
|
||||
// 2. If two blocks have the same start position, the one with
|
||||
// the higher end position should come first.
|
||||
for diagnostic_block in diagnostic_blocks {
|
||||
let index = blocks.partition_point(|probe| {
|
||||
match probe
|
||||
.initial_range
|
||||
.start
|
||||
.cmp(&diagnostic_block.initial_range.start)
|
||||
{
|
||||
Ordering::Less => true,
|
||||
Ordering::Greater => false,
|
||||
Ordering::Equal => {
|
||||
probe.initial_range.end > diagnostic_block.initial_range.end
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
blocks.insert(index, diagnostic_block);
|
||||
}
|
||||
}
|
||||
|
||||
// Build the excerpt ranges for this specific buffer's diagnostics,
|
||||
// so those excerpts can later be used to update the excerpts shown
|
||||
// in the editor.
|
||||
// This is done by iterating over the list of diagnostic blocks and
|
||||
// determine what range does the diagnostic block span.
|
||||
let mut excerpt_ranges: Vec<ExcerptRange<Point>> = Vec::new();
|
||||
|
||||
for diagnostic_block in blocks.iter() {
|
||||
let excerpt_range = context_range_for_entry(
|
||||
diagnostic_block.initial_range.clone(),
|
||||
multibuffer_context,
|
||||
buffer_snapshot.clone(),
|
||||
&mut cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
let index = excerpt_ranges
|
||||
.binary_search_by(|probe| {
|
||||
probe
|
||||
.context
|
||||
.start
|
||||
.cmp(&excerpt_range.start)
|
||||
.then(probe.context.end.cmp(&excerpt_range.end))
|
||||
.then(
|
||||
probe
|
||||
.primary
|
||||
.start
|
||||
.cmp(&diagnostic_block.initial_range.start),
|
||||
)
|
||||
.then(probe.primary.end.cmp(&diagnostic_block.initial_range.end))
|
||||
.then(Ordering::Greater)
|
||||
})
|
||||
.unwrap_or_else(|index| index);
|
||||
|
||||
excerpt_ranges.insert(
|
||||
index,
|
||||
ExcerptRange {
|
||||
context: excerpt_range,
|
||||
primary: diagnostic_block.initial_range.clone(),
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
// Finally, update the editor's content with the new excerpt ranges
|
||||
// for this editor, as well as the diagnostic blocks.
|
||||
buffer_diagnostics_editor.update_in(cx, |buffer_diagnostics_editor, window, cx| {
|
||||
// Remove the list of `CustomBlockId` from the editor's display
|
||||
// map, ensuring that if any diagnostics have been solved, the
|
||||
// associated block stops being shown.
|
||||
let block_ids = buffer_diagnostics_editor.blocks.clone();
|
||||
|
||||
buffer_diagnostics_editor.editor.update(cx, |editor, cx| {
|
||||
editor.display_map.update(cx, |display_map, cx| {
|
||||
display_map.remove_blocks(block_ids.into_iter().collect(), cx);
|
||||
})
|
||||
});
|
||||
|
||||
let (anchor_ranges, _) =
|
||||
buffer_diagnostics_editor
|
||||
.multibuffer
|
||||
.update(cx, |multibuffer, cx| {
|
||||
multibuffer.set_excerpt_ranges_for_path(
|
||||
PathKey::for_buffer(&buffer, cx),
|
||||
buffer.clone(),
|
||||
&buffer_snapshot,
|
||||
excerpt_ranges,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
if was_empty {
|
||||
if let Some(anchor_range) = anchor_ranges.first() {
|
||||
let range_to_select = anchor_range.start..anchor_range.start;
|
||||
|
||||
buffer_diagnostics_editor.editor.update(cx, |editor, cx| {
|
||||
editor.change_selections(Default::default(), window, cx, |selection| {
|
||||
selection.select_anchor_ranges([range_to_select])
|
||||
})
|
||||
});
|
||||
|
||||
// If the `BufferDiagnosticsEditor` is currently
|
||||
// focused, move focus to its editor.
|
||||
if buffer_diagnostics_editor.focus_handle.is_focused(window) {
|
||||
buffer_diagnostics_editor
|
||||
.editor
|
||||
.read(cx)
|
||||
.focus_handle(cx)
|
||||
.focus(window);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Cloning the blocks before moving ownership so these can later
|
||||
// be used to set the block contents for testing purposes.
|
||||
#[cfg(test)]
|
||||
let cloned_blocks = blocks.clone();
|
||||
|
||||
// Build new diagnostic blocks to be added to the editor's
|
||||
// display map for the new diagnostics. Update the `blocks`
|
||||
// property before finishing, to ensure the blocks are removed
|
||||
// on the next execution.
|
||||
let editor_blocks =
|
||||
anchor_ranges
|
||||
.into_iter()
|
||||
.zip(blocks.into_iter())
|
||||
.map(|(anchor, block)| {
|
||||
let editor = buffer_diagnostics_editor.editor.downgrade();
|
||||
|
||||
BlockProperties {
|
||||
placement: BlockPlacement::Near(anchor.start),
|
||||
height: Some(1),
|
||||
style: BlockStyle::Flex,
|
||||
render: Arc::new(move |block_context| {
|
||||
block.render_block(editor.clone(), block_context)
|
||||
}),
|
||||
priority: 1,
|
||||
}
|
||||
});
|
||||
|
||||
let block_ids = buffer_diagnostics_editor.editor.update(cx, |editor, cx| {
|
||||
editor.display_map.update(cx, |display_map, cx| {
|
||||
display_map.insert_blocks(editor_blocks, cx)
|
||||
})
|
||||
});
|
||||
|
||||
// In order to be able to verify which diagnostic blocks are
|
||||
// rendered in the editor, the `set_block_content_for_tests`
|
||||
// function must be used, so that the
|
||||
// `editor::test::editor_content_with_blocks` function can then
|
||||
// be called to fetch these blocks.
|
||||
#[cfg(test)]
|
||||
{
|
||||
for (block_id, block) in block_ids.iter().zip(cloned_blocks.iter()) {
|
||||
let markdown = block.markdown.clone();
|
||||
editor::test::set_block_content_for_tests(
|
||||
&buffer_diagnostics_editor.editor,
|
||||
*block_id,
|
||||
cx,
|
||||
move |cx| {
|
||||
markdown::MarkdownElement::rendered_text(
|
||||
markdown.clone(),
|
||||
cx,
|
||||
editor::hover_popover::diagnostics_markdown_style,
|
||||
)
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
buffer_diagnostics_editor.blocks = block_ids;
|
||||
cx.notify()
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn set_diagnostics(&mut self, diagnostics: &Vec<DiagnosticEntry<Anchor>>) {
|
||||
self.diagnostics = diagnostics.clone();
|
||||
}
|
||||
|
||||
fn diagnostics_are_unchanged(
|
||||
&self,
|
||||
diagnostics: &Vec<DiagnosticEntry<Anchor>>,
|
||||
snapshot: &BufferSnapshot,
|
||||
) -> bool {
|
||||
if self.diagnostics.len() != diagnostics.len() {
|
||||
return false;
|
||||
}
|
||||
|
||||
self.diagnostics
|
||||
.iter()
|
||||
.zip(diagnostics.iter())
|
||||
.all(|(existing, new)| {
|
||||
existing.diagnostic.message == new.diagnostic.message
|
||||
&& existing.diagnostic.severity == new.diagnostic.severity
|
||||
&& existing.diagnostic.is_primary == new.diagnostic.is_primary
|
||||
&& existing.range.to_offset(snapshot) == new.range.to_offset(snapshot)
|
||||
})
|
||||
}
|
||||
|
||||
fn focus_in(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
// If the `BufferDiagnosticsEditor` is focused and the multibuffer is
|
||||
// not empty, focus on the editor instead, which will allow the user to
|
||||
// start interacting and editing the buffer's contents.
|
||||
if self.focus_handle.is_focused(window) && !self.multibuffer.read(cx).is_empty() {
|
||||
self.editor.focus_handle(cx).focus(window)
|
||||
}
|
||||
}
|
||||
|
||||
fn focus_out(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if !self.focus_handle.is_focused(window) && !self.editor.focus_handle(cx).is_focused(window)
|
||||
{
|
||||
self.update_all_excerpts(window, cx);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn toggle_warnings(
|
||||
&mut self,
|
||||
_: &ToggleWarnings,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let include_warnings = !self.include_warnings;
|
||||
let max_severity = Self::max_diagnostics_severity(include_warnings);
|
||||
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
editor.set_max_diagnostics_severity(max_severity, cx);
|
||||
});
|
||||
|
||||
self.include_warnings = include_warnings;
|
||||
self.diagnostics.clear();
|
||||
self.update_all_diagnostics(window, cx);
|
||||
}
|
||||
|
||||
fn max_diagnostics_severity(include_warnings: bool) -> DiagnosticSeverity {
|
||||
match include_warnings {
|
||||
true => DiagnosticSeverity::Warning,
|
||||
false => DiagnosticSeverity::Error,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn editor(&self) -> &Entity<Editor> {
|
||||
&self.editor
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn summary(&self) -> &DiagnosticSummary {
|
||||
&self.summary
|
||||
}
|
||||
}
|
||||
|
||||
impl Focusable for BufferDiagnosticsEditor {
|
||||
fn focus_handle(&self, _: &App) -> FocusHandle {
|
||||
self.focus_handle.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<EditorEvent> for BufferDiagnosticsEditor {}
|
||||
|
||||
impl Item for BufferDiagnosticsEditor {
|
||||
type Event = EditorEvent;
|
||||
|
||||
fn act_as_type<'a>(
|
||||
&'a self,
|
||||
type_id: std::any::TypeId,
|
||||
self_handle: &'a Entity<Self>,
|
||||
_: &'a App,
|
||||
) -> Option<gpui::AnyView> {
|
||||
if type_id == TypeId::of::<Self>() {
|
||||
Some(self_handle.to_any())
|
||||
} else if type_id == TypeId::of::<Editor>() {
|
||||
Some(self.editor.to_any())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn added_to_workspace(
|
||||
&mut self,
|
||||
workspace: &mut Workspace,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
editor.added_to_workspace(workspace, window, cx)
|
||||
});
|
||||
}
|
||||
|
||||
fn breadcrumb_location(&self, _: &App) -> ToolbarItemLocation {
|
||||
ToolbarItemLocation::PrimaryLeft
|
||||
}
|
||||
|
||||
fn breadcrumbs(&self, theme: &theme::Theme, cx: &App) -> Option<Vec<BreadcrumbText>> {
|
||||
self.editor.breadcrumbs(theme, cx)
|
||||
}
|
||||
|
||||
fn can_save(&self, _cx: &App) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn clone_on_split(
|
||||
&self,
|
||||
_workspace_id: Option<workspace::WorkspaceId>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Entity<Self>>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
Some(cx.new(|cx| {
|
||||
BufferDiagnosticsEditor::new(
|
||||
self.project_path.clone(),
|
||||
self.project.clone(),
|
||||
self.buffer.clone(),
|
||||
self.include_warnings,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}))
|
||||
}
|
||||
|
||||
fn deactivated(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| editor.deactivated(window, cx));
|
||||
}
|
||||
|
||||
fn for_each_project_item(&self, cx: &App, f: &mut dyn FnMut(EntityId, &dyn ProjectItem)) {
|
||||
self.editor.for_each_project_item(cx, f);
|
||||
}
|
||||
|
||||
fn has_conflict(&self, cx: &App) -> bool {
|
||||
self.multibuffer.read(cx).has_conflict(cx)
|
||||
}
|
||||
|
||||
fn has_deleted_file(&self, cx: &App) -> bool {
|
||||
self.multibuffer.read(cx).has_deleted_file(cx)
|
||||
}
|
||||
|
||||
fn is_dirty(&self, cx: &App) -> bool {
|
||||
self.multibuffer.read(cx).is_dirty(cx)
|
||||
}
|
||||
|
||||
fn is_singleton(&self, _cx: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn navigate(
|
||||
&mut self,
|
||||
data: Box<dyn Any>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> bool {
|
||||
self.editor
|
||||
.update(cx, |editor, cx| editor.navigate(data, window, cx))
|
||||
}
|
||||
|
||||
fn reload(
|
||||
&mut self,
|
||||
project: Entity<Project>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
self.editor.reload(project, window, cx)
|
||||
}
|
||||
|
||||
fn save(
|
||||
&mut self,
|
||||
options: workspace::item::SaveOptions,
|
||||
project: Entity<Project>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
self.editor.save(options, project, window, cx)
|
||||
}
|
||||
|
||||
fn save_as(
|
||||
&mut self,
|
||||
_project: Entity<Project>,
|
||||
_path: ProjectPath,
|
||||
_window: &mut Window,
|
||||
_cx: &mut Context<Self>,
|
||||
) -> Task<Result<()>> {
|
||||
unreachable!()
|
||||
}
|
||||
|
||||
fn set_nav_history(
|
||||
&mut self,
|
||||
nav_history: ItemNavHistory,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.editor.update(cx, |editor, _| {
|
||||
editor.set_nav_history(Some(nav_history));
|
||||
})
|
||||
}
|
||||
|
||||
// Builds the content to be displayed in the tab.
|
||||
fn tab_content(&self, params: TabContentParams, _window: &Window, _cx: &App) -> AnyElement {
|
||||
let error_count = self.summary.error_count;
|
||||
let warning_count = self.summary.warning_count;
|
||||
let label = Label::new(
|
||||
self.project_path
|
||||
.path
|
||||
.file_name()
|
||||
.map(|f| f.to_sanitized_string())
|
||||
.unwrap_or_else(|| self.project_path.path.to_sanitized_string()),
|
||||
);
|
||||
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(label)
|
||||
.when(error_count == 0 && warning_count == 0, |parent| {
|
||||
parent.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(Icon::new(IconName::Check).color(Color::Success)),
|
||||
)
|
||||
})
|
||||
.when(error_count > 0, |parent| {
|
||||
parent.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(Icon::new(IconName::XCircle).color(Color::Error))
|
||||
.child(Label::new(error_count.to_string()).color(params.text_color())),
|
||||
)
|
||||
})
|
||||
.when(warning_count > 0, |parent| {
|
||||
parent.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.child(Icon::new(IconName::Warning).color(Color::Warning))
|
||||
.child(Label::new(warning_count.to_string()).color(params.text_color())),
|
||||
)
|
||||
})
|
||||
.into_any_element()
|
||||
}
|
||||
|
||||
fn tab_content_text(&self, _detail: usize, _app: &App) -> SharedString {
|
||||
"Buffer Diagnostics".into()
|
||||
}
|
||||
|
||||
fn tab_tooltip_text(&self, _: &App) -> Option<SharedString> {
|
||||
Some(
|
||||
format!(
|
||||
"Buffer Diagnostics - {}",
|
||||
self.project_path.path.to_sanitized_string()
|
||||
)
|
||||
.into(),
|
||||
)
|
||||
}
|
||||
|
||||
fn telemetry_event_text(&self) -> Option<&'static str> {
|
||||
Some("Buffer Diagnostics Opened")
|
||||
}
|
||||
|
||||
fn to_item_events(event: &EditorEvent, f: impl FnMut(ItemEvent)) {
|
||||
Editor::to_item_events(event, f)
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for BufferDiagnosticsEditor {
|
||||
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let filename = self.project_path.path.to_sanitized_string();
|
||||
let error_count = self.summary.error_count;
|
||||
let warning_count = match self.include_warnings {
|
||||
true => self.summary.warning_count,
|
||||
false => 0,
|
||||
};
|
||||
|
||||
let child = if error_count + warning_count == 0 {
|
||||
let label = match warning_count {
|
||||
0 => "No problems in",
|
||||
_ => "No errors in",
|
||||
};
|
||||
|
||||
v_flex()
|
||||
.key_context("EmptyPane")
|
||||
.size_full()
|
||||
.gap_1()
|
||||
.justify_center()
|
||||
.items_center()
|
||||
.text_center()
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.child(
|
||||
div()
|
||||
.h_flex()
|
||||
.child(Label::new(label).color(Color::Muted))
|
||||
.child(
|
||||
Button::new("open-file", filename)
|
||||
.style(ButtonStyle::Transparent)
|
||||
.tooltip(Tooltip::text("Open File"))
|
||||
.on_click(cx.listener(|buffer_diagnostics, _, window, cx| {
|
||||
if let Some(workspace) = window.root::<Workspace>().flatten() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
workspace
|
||||
.open_path(
|
||||
buffer_diagnostics.project_path.clone(),
|
||||
None,
|
||||
true,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
.detach_and_log_err(cx);
|
||||
})
|
||||
}
|
||||
})),
|
||||
),
|
||||
)
|
||||
.when(self.summary.warning_count > 0, |div| {
|
||||
let label = match self.summary.warning_count {
|
||||
1 => "Show 1 warning".into(),
|
||||
warning_count => format!("Show {} warnings", warning_count),
|
||||
};
|
||||
|
||||
div.child(
|
||||
Button::new("diagnostics-show-warning-label", label).on_click(cx.listener(
|
||||
|buffer_diagnostics_editor, _, window, cx| {
|
||||
buffer_diagnostics_editor.toggle_warnings(
|
||||
&Default::default(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
cx.notify();
|
||||
},
|
||||
)),
|
||||
)
|
||||
})
|
||||
} else {
|
||||
div().size_full().child(self.editor.clone())
|
||||
};
|
||||
|
||||
div()
|
||||
.key_context("Diagnostics")
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.size_full()
|
||||
.child(child)
|
||||
}
|
||||
}
|
||||
|
||||
impl DiagnosticsToolbarEditor for WeakEntity<BufferDiagnosticsEditor> {
|
||||
fn include_warnings(&self, cx: &App) -> bool {
|
||||
self.read_with(cx, |buffer_diagnostics_editor, _cx| {
|
||||
buffer_diagnostics_editor.include_warnings
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn has_stale_excerpts(&self, _cx: &App) -> bool {
|
||||
false
|
||||
}
|
||||
|
||||
fn is_updating(&self, cx: &App) -> bool {
|
||||
self.read_with(cx, |buffer_diagnostics_editor, cx| {
|
||||
buffer_diagnostics_editor.update_excerpts_task.is_some()
|
||||
|| buffer_diagnostics_editor
|
||||
.project
|
||||
.read(cx)
|
||||
.language_servers_running_disk_based_diagnostics(cx)
|
||||
.next()
|
||||
.is_some()
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn stop_updating(&self, cx: &mut App) {
|
||||
let _ = self.update(cx, |buffer_diagnostics_editor, cx| {
|
||||
buffer_diagnostics_editor.update_excerpts_task = None;
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
|
||||
fn refresh_diagnostics(&self, window: &mut Window, cx: &mut App) {
|
||||
let _ = self.update(cx, |buffer_diagnostics_editor, cx| {
|
||||
buffer_diagnostics_editor.update_all_excerpts(window, cx);
|
||||
});
|
||||
}
|
||||
|
||||
fn toggle_warnings(&self, window: &mut Window, cx: &mut App) {
|
||||
let _ = self.update(cx, |buffer_diagnostics_editor, cx| {
|
||||
buffer_diagnostics_editor.toggle_warnings(&Default::default(), window, cx);
|
||||
});
|
||||
}
|
||||
|
||||
fn get_diagnostics_for_buffer(
|
||||
&self,
|
||||
_buffer_id: text::BufferId,
|
||||
cx: &App,
|
||||
) -> Vec<language::DiagnosticEntry<text::Anchor>> {
|
||||
self.read_with(cx, |buffer_diagnostics_editor, _cx| {
|
||||
buffer_diagnostics_editor.diagnostics.clone()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
}
|
||||
@@ -18,7 +18,7 @@ use ui::{
|
||||
};
|
||||
use util::maybe;
|
||||
|
||||
use crate::ProjectDiagnosticsEditor;
|
||||
use crate::toolbar_controls::DiagnosticsToolbarEditor;
|
||||
|
||||
pub struct DiagnosticRenderer;
|
||||
|
||||
@@ -26,7 +26,7 @@ impl DiagnosticRenderer {
|
||||
pub fn diagnostic_blocks_for_group(
|
||||
diagnostic_group: Vec<DiagnosticEntry<Point>>,
|
||||
buffer_id: BufferId,
|
||||
diagnostics_editor: Option<WeakEntity<ProjectDiagnosticsEditor>>,
|
||||
diagnostics_editor: Option<Arc<dyn DiagnosticsToolbarEditor>>,
|
||||
cx: &mut App,
|
||||
) -> Vec<DiagnosticBlock> {
|
||||
let Some(primary_ix) = diagnostic_group
|
||||
@@ -130,6 +130,7 @@ impl editor::DiagnosticRenderer for DiagnosticRenderer {
|
||||
cx: &mut App,
|
||||
) -> Vec<BlockProperties<Anchor>> {
|
||||
let blocks = Self::diagnostic_blocks_for_group(diagnostic_group, buffer_id, None, cx);
|
||||
|
||||
blocks
|
||||
.into_iter()
|
||||
.map(|block| {
|
||||
@@ -182,7 +183,7 @@ pub(crate) struct DiagnosticBlock {
|
||||
pub(crate) initial_range: Range<Point>,
|
||||
pub(crate) severity: DiagnosticSeverity,
|
||||
pub(crate) markdown: Entity<Markdown>,
|
||||
pub(crate) diagnostics_editor: Option<WeakEntity<ProjectDiagnosticsEditor>>,
|
||||
pub(crate) diagnostics_editor: Option<Arc<dyn DiagnosticsToolbarEditor>>,
|
||||
}
|
||||
|
||||
impl DiagnosticBlock {
|
||||
@@ -233,7 +234,7 @@ impl DiagnosticBlock {
|
||||
|
||||
pub fn open_link(
|
||||
editor: &mut Editor,
|
||||
diagnostics_editor: &Option<WeakEntity<ProjectDiagnosticsEditor>>,
|
||||
diagnostics_editor: &Option<Arc<dyn DiagnosticsToolbarEditor>>,
|
||||
link: SharedString,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
@@ -254,18 +255,10 @@ impl DiagnosticBlock {
|
||||
|
||||
if let Some(diagnostics_editor) = diagnostics_editor {
|
||||
if let Some(diagnostic) = diagnostics_editor
|
||||
.read_with(cx, |diagnostics, _| {
|
||||
diagnostics
|
||||
.diagnostics
|
||||
.get(&buffer_id)
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
.into_iter()
|
||||
.filter(|d| d.diagnostic.group_id == group_id)
|
||||
.nth(ix)
|
||||
})
|
||||
.ok()
|
||||
.flatten()
|
||||
.get_diagnostics_for_buffer(buffer_id, cx)
|
||||
.into_iter()
|
||||
.filter(|d| d.diagnostic.group_id == group_id)
|
||||
.nth(ix)
|
||||
{
|
||||
let multibuffer = editor.buffer().read(cx);
|
||||
let Some(snapshot) = multibuffer
|
||||
@@ -297,9 +290,9 @@ impl DiagnosticBlock {
|
||||
};
|
||||
}
|
||||
|
||||
fn jump_to<T: ToOffset>(
|
||||
fn jump_to<I: ToOffset>(
|
||||
editor: &mut Editor,
|
||||
range: Range<T>,
|
||||
range: Range<I>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
pub mod items;
|
||||
mod toolbar_controls;
|
||||
|
||||
mod buffer_diagnostics;
|
||||
mod diagnostic_renderer;
|
||||
|
||||
#[cfg(test)]
|
||||
mod diagnostics_tests;
|
||||
|
||||
use anyhow::Result;
|
||||
use buffer_diagnostics::BufferDiagnosticsEditor;
|
||||
use collections::{BTreeSet, HashMap};
|
||||
use diagnostic_renderer::DiagnosticBlock;
|
||||
use editor::{
|
||||
@@ -36,6 +38,7 @@ use std::{
|
||||
};
|
||||
use text::{BufferId, OffsetRangeExt};
|
||||
use theme::ActiveTheme;
|
||||
use toolbar_controls::DiagnosticsToolbarEditor;
|
||||
pub use toolbar_controls::ToolbarControls;
|
||||
use ui::{Icon, IconName, Label, h_flex, prelude::*};
|
||||
use util::ResultExt;
|
||||
@@ -64,6 +67,7 @@ impl Global for IncludeWarnings {}
|
||||
pub fn init(cx: &mut App) {
|
||||
editor::set_diagnostic_renderer(diagnostic_renderer::DiagnosticRenderer {}, cx);
|
||||
cx.observe_new(ProjectDiagnosticsEditor::register).detach();
|
||||
cx.observe_new(BufferDiagnosticsEditor::register).detach();
|
||||
}
|
||||
|
||||
pub(crate) struct ProjectDiagnosticsEditor {
|
||||
@@ -85,6 +89,7 @@ pub(crate) struct ProjectDiagnosticsEditor {
|
||||
impl EventEmitter<EditorEvent> for ProjectDiagnosticsEditor {}
|
||||
|
||||
const DIAGNOSTICS_UPDATE_DELAY: Duration = Duration::from_millis(50);
|
||||
const DIAGNOSTICS_SUMMARY_UPDATE_DELAY: Duration = Duration::from_millis(30);
|
||||
|
||||
impl Render for ProjectDiagnosticsEditor {
|
||||
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
@@ -144,7 +149,7 @@ impl Render for ProjectDiagnosticsEditor {
|
||||
}
|
||||
|
||||
impl ProjectDiagnosticsEditor {
|
||||
fn register(
|
||||
pub fn register(
|
||||
workspace: &mut Workspace,
|
||||
_window: Option<&mut Window>,
|
||||
_: &mut Context<Workspace>,
|
||||
@@ -160,7 +165,7 @@ impl ProjectDiagnosticsEditor {
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let project_event_subscription =
|
||||
cx.subscribe_in(&project_handle, window, |this, project, event, window, cx| match event {
|
||||
cx.subscribe_in(&project_handle, window, |this, _project, event, window, cx| match event {
|
||||
project::Event::DiskBasedDiagnosticsStarted { .. } => {
|
||||
cx.notify();
|
||||
}
|
||||
@@ -173,13 +178,12 @@ impl ProjectDiagnosticsEditor {
|
||||
paths,
|
||||
} => {
|
||||
this.paths_to_update.extend(paths.clone());
|
||||
let project = project.clone();
|
||||
this.diagnostic_summary_update = cx.spawn(async move |this, cx| {
|
||||
cx.background_executor()
|
||||
.timer(Duration::from_millis(30))
|
||||
.timer(DIAGNOSTICS_SUMMARY_UPDATE_DELAY)
|
||||
.await;
|
||||
this.update(cx, |this, cx| {
|
||||
this.summary = project.read(cx).diagnostic_summary(false, cx);
|
||||
this.update_diagnostic_summary(cx);
|
||||
})
|
||||
.log_err();
|
||||
});
|
||||
@@ -326,6 +330,7 @@ impl ProjectDiagnosticsEditor {
|
||||
let is_active = workspace
|
||||
.active_item(cx)
|
||||
.is_some_and(|item| item.item_id() == existing.item_id());
|
||||
|
||||
workspace.activate_item(&existing, true, !is_active, window, cx);
|
||||
} else {
|
||||
let workspace_handle = cx.entity().downgrade();
|
||||
@@ -383,22 +388,25 @@ impl ProjectDiagnosticsEditor {
|
||||
/// currently have diagnostics or are currently present in this view.
|
||||
fn update_all_excerpts(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.project.update(cx, |project, cx| {
|
||||
let mut paths = project
|
||||
let mut project_paths = project
|
||||
.diagnostic_summaries(false, cx)
|
||||
.map(|(path, _, _)| path)
|
||||
.map(|(project_path, _, _)| project_path)
|
||||
.collect::<BTreeSet<_>>();
|
||||
|
||||
self.multibuffer.update(cx, |multibuffer, cx| {
|
||||
for buffer in multibuffer.all_buffers() {
|
||||
if let Some(file) = buffer.read(cx).file() {
|
||||
paths.insert(ProjectPath {
|
||||
project_paths.insert(ProjectPath {
|
||||
path: file.path().clone(),
|
||||
worktree_id: file.worktree_id(cx),
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
self.paths_to_update = paths;
|
||||
|
||||
self.paths_to_update = project_paths;
|
||||
});
|
||||
|
||||
self.update_stale_excerpts(window, cx);
|
||||
}
|
||||
|
||||
@@ -428,6 +436,7 @@ impl ProjectDiagnosticsEditor {
|
||||
let was_empty = self.multibuffer.read(cx).is_empty();
|
||||
let buffer_snapshot = buffer.read(cx).snapshot();
|
||||
let buffer_id = buffer_snapshot.remote_id();
|
||||
|
||||
let max_severity = if self.include_warnings {
|
||||
lsp::DiagnosticSeverity::WARNING
|
||||
} else {
|
||||
@@ -441,6 +450,7 @@ impl ProjectDiagnosticsEditor {
|
||||
false,
|
||||
)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let unchanged = this.update(cx, |this, _| {
|
||||
if this.diagnostics.get(&buffer_id).is_some_and(|existing| {
|
||||
this.diagnostics_are_unchanged(existing, &diagnostics, &buffer_snapshot)
|
||||
@@ -475,7 +485,7 @@ impl ProjectDiagnosticsEditor {
|
||||
crate::diagnostic_renderer::DiagnosticRenderer::diagnostic_blocks_for_group(
|
||||
group,
|
||||
buffer_snapshot.remote_id(),
|
||||
Some(this.clone()),
|
||||
Some(Arc::new(this.clone())),
|
||||
cx,
|
||||
)
|
||||
})?;
|
||||
@@ -505,6 +515,7 @@ impl ProjectDiagnosticsEditor {
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
let i = excerpt_ranges
|
||||
.binary_search_by(|probe| {
|
||||
probe
|
||||
@@ -574,6 +585,7 @@ impl ProjectDiagnosticsEditor {
|
||||
priority: 1,
|
||||
}
|
||||
});
|
||||
|
||||
let block_ids = this.editor.update(cx, |editor, cx| {
|
||||
editor.display_map.update(cx, |display_map, cx| {
|
||||
display_map.insert_blocks(editor_blocks, cx)
|
||||
@@ -604,6 +616,10 @@ impl ProjectDiagnosticsEditor {
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn update_diagnostic_summary(&mut self, cx: &mut Context<Self>) {
|
||||
self.summary = self.project.read(cx).diagnostic_summary(false, cx);
|
||||
}
|
||||
}
|
||||
|
||||
impl Focusable for ProjectDiagnosticsEditor {
|
||||
@@ -812,6 +828,68 @@ impl Item for ProjectDiagnosticsEditor {
|
||||
}
|
||||
}
|
||||
|
||||
impl DiagnosticsToolbarEditor for WeakEntity<ProjectDiagnosticsEditor> {
|
||||
fn include_warnings(&self, cx: &App) -> bool {
|
||||
self.read_with(cx, |project_diagnostics_editor, _cx| {
|
||||
project_diagnostics_editor.include_warnings
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn has_stale_excerpts(&self, cx: &App) -> bool {
|
||||
self.read_with(cx, |project_diagnostics_editor, _cx| {
|
||||
!project_diagnostics_editor.paths_to_update.is_empty()
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn is_updating(&self, cx: &App) -> bool {
|
||||
self.read_with(cx, |project_diagnostics_editor, cx| {
|
||||
project_diagnostics_editor.update_excerpts_task.is_some()
|
||||
|| project_diagnostics_editor
|
||||
.project
|
||||
.read(cx)
|
||||
.language_servers_running_disk_based_diagnostics(cx)
|
||||
.next()
|
||||
.is_some()
|
||||
})
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
fn stop_updating(&self, cx: &mut App) {
|
||||
let _ = self.update(cx, |project_diagnostics_editor, cx| {
|
||||
project_diagnostics_editor.update_excerpts_task = None;
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
|
||||
fn refresh_diagnostics(&self, window: &mut Window, cx: &mut App) {
|
||||
let _ = self.update(cx, |project_diagnostics_editor, cx| {
|
||||
project_diagnostics_editor.update_all_excerpts(window, cx);
|
||||
});
|
||||
}
|
||||
|
||||
fn toggle_warnings(&self, window: &mut Window, cx: &mut App) {
|
||||
let _ = self.update(cx, |project_diagnostics_editor, cx| {
|
||||
project_diagnostics_editor.toggle_warnings(&Default::default(), window, cx);
|
||||
});
|
||||
}
|
||||
|
||||
fn get_diagnostics_for_buffer(
|
||||
&self,
|
||||
buffer_id: text::BufferId,
|
||||
cx: &App,
|
||||
) -> Vec<language::DiagnosticEntry<text::Anchor>> {
|
||||
self.read_with(cx, |project_diagnostics_editor, _cx| {
|
||||
project_diagnostics_editor
|
||||
.diagnostics
|
||||
.get(&buffer_id)
|
||||
.cloned()
|
||||
.unwrap_or_default()
|
||||
})
|
||||
.unwrap_or_default()
|
||||
}
|
||||
}
|
||||
const DIAGNOSTIC_EXPANSION_ROW_LIMIT: u32 = 32;
|
||||
|
||||
async fn context_range_for_entry(
|
||||
|
||||
@@ -1567,6 +1567,440 @@ async fn go_to_diagnostic_with_severity(cx: &mut TestAppContext) {
|
||||
cx.assert_editor_state(indoc! {"error ˇwarning info hint"});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_buffer_diagnostics(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
// We'll be creating two different files, both with diagnostics, so we can
|
||||
// later verify that, since the `BufferDiagnosticsEditor` only shows
|
||||
// diagnostics for the provided path, the diagnostics for the other file
|
||||
// will not be shown, contrary to what happens with
|
||||
// `ProjectDiagnosticsEditor`.
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/test"),
|
||||
json!({
|
||||
"main.rs": "
|
||||
fn main() {
|
||||
let x = vec![];
|
||||
let y = vec![];
|
||||
a(x);
|
||||
b(y);
|
||||
c(y);
|
||||
d(x);
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
"other.rs": "
|
||||
fn other() {
|
||||
let unused = 42;
|
||||
undefined_function();
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await;
|
||||
let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let cx = &mut VisualTestContext::from_window(*window, cx);
|
||||
let project_path = project::ProjectPath {
|
||||
worktree_id: project.read_with(cx, |project, cx| {
|
||||
project.worktrees(cx).next().unwrap().read(cx).id()
|
||||
}),
|
||||
path: Arc::from(Path::new("main.rs")),
|
||||
};
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_buffer(project_path.clone(), cx)
|
||||
})
|
||||
.await
|
||||
.ok();
|
||||
|
||||
// Create the diagnostics for `main.rs`.
|
||||
let language_server_id = LanguageServerId(0);
|
||||
let uri = lsp::Uri::from_file_path(path!("/test/main.rs")).unwrap();
|
||||
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
|
||||
|
||||
lsp_store.update(cx, |lsp_store, cx| {
|
||||
lsp_store.update_diagnostics(language_server_id, lsp::PublishDiagnosticsParams {
|
||||
uri: uri.clone(),
|
||||
diagnostics: vec![
|
||||
lsp::Diagnostic{
|
||||
range: lsp::Range::new(lsp::Position::new(5, 6), lsp::Position::new(5, 7)),
|
||||
severity: Some(lsp::DiagnosticSeverity::WARNING),
|
||||
message: "use of moved value\nvalue used here after move".to_string(),
|
||||
related_information: Some(vec![
|
||||
lsp::DiagnosticRelatedInformation {
|
||||
location: lsp::Location::new(uri.clone(), lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 9))),
|
||||
message: "move occurs because `y` has type `Vec<char>`, which does not implement the `Copy` trait".to_string()
|
||||
},
|
||||
lsp::DiagnosticRelatedInformation {
|
||||
location: lsp::Location::new(uri.clone(), lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 7))),
|
||||
message: "value moved here".to_string()
|
||||
},
|
||||
]),
|
||||
..Default::default()
|
||||
},
|
||||
lsp::Diagnostic{
|
||||
range: lsp::Range::new(lsp::Position::new(6, 6), lsp::Position::new(6, 7)),
|
||||
severity: Some(lsp::DiagnosticSeverity::ERROR),
|
||||
message: "use of moved value\nvalue used here after move".to_string(),
|
||||
related_information: Some(vec![
|
||||
lsp::DiagnosticRelatedInformation {
|
||||
location: lsp::Location::new(uri.clone(), lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9))),
|
||||
message: "move occurs because `x` has type `Vec<char>`, which does not implement the `Copy` trait".to_string()
|
||||
},
|
||||
lsp::DiagnosticRelatedInformation {
|
||||
location: lsp::Location::new(uri.clone(), lsp::Range::new(lsp::Position::new(3, 6), lsp::Position::new(3, 7))),
|
||||
message: "value moved here".to_string()
|
||||
},
|
||||
]),
|
||||
..Default::default()
|
||||
}
|
||||
],
|
||||
version: None
|
||||
}, None, DiagnosticSourceKind::Pushed, &[], cx).unwrap();
|
||||
|
||||
// Create diagnostics for other.rs to ensure that the file and
|
||||
// diagnostics are not included in `BufferDiagnosticsEditor` when it is
|
||||
// deployed for main.rs.
|
||||
lsp_store.update_diagnostics(language_server_id, lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(path!("/test/other.rs")).unwrap(),
|
||||
diagnostics: vec![
|
||||
lsp::Diagnostic{
|
||||
range: lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 14)),
|
||||
severity: Some(lsp::DiagnosticSeverity::WARNING),
|
||||
message: "unused variable: `unused`".to_string(),
|
||||
..Default::default()
|
||||
},
|
||||
lsp::Diagnostic{
|
||||
range: lsp::Range::new(lsp::Position::new(2, 4), lsp::Position::new(2, 22)),
|
||||
severity: Some(lsp::DiagnosticSeverity::ERROR),
|
||||
message: "cannot find function `undefined_function` in this scope".to_string(),
|
||||
..Default::default()
|
||||
}
|
||||
],
|
||||
version: None
|
||||
}, None, DiagnosticSourceKind::Pushed, &[], cx).unwrap();
|
||||
});
|
||||
|
||||
let buffer_diagnostics = window.build_entity(cx, |window, cx| {
|
||||
BufferDiagnosticsEditor::new(
|
||||
project_path.clone(),
|
||||
project.clone(),
|
||||
buffer,
|
||||
true,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let editor = buffer_diagnostics.update(cx, |buffer_diagnostics, _| {
|
||||
buffer_diagnostics.editor().clone()
|
||||
});
|
||||
|
||||
// Since the excerpt updates is handled by a background task, we need to
|
||||
// wait a little bit to ensure that the buffer diagnostic's editor content
|
||||
// is rendered.
|
||||
cx.executor()
|
||||
.advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10));
|
||||
|
||||
pretty_assertions::assert_eq!(
|
||||
editor_content_with_blocks(&editor, cx),
|
||||
indoc::indoc! {
|
||||
"§ main.rs
|
||||
§ -----
|
||||
fn main() {
|
||||
let x = vec![];
|
||||
§ move occurs because `x` has type `Vec<char>`, which does not implement
|
||||
§ the `Copy` trait (back)
|
||||
let y = vec![];
|
||||
§ move occurs because `y` has type `Vec<char>`, which does not implement
|
||||
§ the `Copy` trait
|
||||
a(x); § value moved here
|
||||
b(y); § value moved here
|
||||
c(y);
|
||||
§ use of moved value
|
||||
§ value used here after move
|
||||
d(x);
|
||||
§ use of moved value
|
||||
§ value used here after move
|
||||
§ hint: move occurs because `x` has type `Vec<char>`, which does not
|
||||
§ implement the `Copy` trait
|
||||
}"
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_buffer_diagnostics_without_warnings(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/test"),
|
||||
json!({
|
||||
"main.rs": "
|
||||
fn main() {
|
||||
let x = vec![];
|
||||
let y = vec![];
|
||||
a(x);
|
||||
b(y);
|
||||
c(y);
|
||||
d(x);
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await;
|
||||
let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let cx = &mut VisualTestContext::from_window(*window, cx);
|
||||
let project_path = project::ProjectPath {
|
||||
worktree_id: project.read_with(cx, |project, cx| {
|
||||
project.worktrees(cx).next().unwrap().read(cx).id()
|
||||
}),
|
||||
path: Arc::from(Path::new("main.rs")),
|
||||
};
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_buffer(project_path.clone(), cx)
|
||||
})
|
||||
.await
|
||||
.ok();
|
||||
|
||||
let language_server_id = LanguageServerId(0);
|
||||
let uri = lsp::Uri::from_file_path(path!("/test/main.rs")).unwrap();
|
||||
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
|
||||
|
||||
lsp_store.update(cx, |lsp_store, cx| {
|
||||
lsp_store.update_diagnostics(language_server_id, lsp::PublishDiagnosticsParams {
|
||||
uri: uri.clone(),
|
||||
diagnostics: vec![
|
||||
lsp::Diagnostic{
|
||||
range: lsp::Range::new(lsp::Position::new(5, 6), lsp::Position::new(5, 7)),
|
||||
severity: Some(lsp::DiagnosticSeverity::WARNING),
|
||||
message: "use of moved value\nvalue used here after move".to_string(),
|
||||
related_information: Some(vec![
|
||||
lsp::DiagnosticRelatedInformation {
|
||||
location: lsp::Location::new(uri.clone(), lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 9))),
|
||||
message: "move occurs because `y` has type `Vec<char>`, which does not implement the `Copy` trait".to_string()
|
||||
},
|
||||
lsp::DiagnosticRelatedInformation {
|
||||
location: lsp::Location::new(uri.clone(), lsp::Range::new(lsp::Position::new(4, 6), lsp::Position::new(4, 7))),
|
||||
message: "value moved here".to_string()
|
||||
},
|
||||
]),
|
||||
..Default::default()
|
||||
},
|
||||
lsp::Diagnostic{
|
||||
range: lsp::Range::new(lsp::Position::new(6, 6), lsp::Position::new(6, 7)),
|
||||
severity: Some(lsp::DiagnosticSeverity::ERROR),
|
||||
message: "use of moved value\nvalue used here after move".to_string(),
|
||||
related_information: Some(vec![
|
||||
lsp::DiagnosticRelatedInformation {
|
||||
location: lsp::Location::new(uri.clone(), lsp::Range::new(lsp::Position::new(1, 8), lsp::Position::new(1, 9))),
|
||||
message: "move occurs because `x` has type `Vec<char>`, which does not implement the `Copy` trait".to_string()
|
||||
},
|
||||
lsp::DiagnosticRelatedInformation {
|
||||
location: lsp::Location::new(uri.clone(), lsp::Range::new(lsp::Position::new(3, 6), lsp::Position::new(3, 7))),
|
||||
message: "value moved here".to_string()
|
||||
},
|
||||
]),
|
||||
..Default::default()
|
||||
}
|
||||
],
|
||||
version: None
|
||||
}, None, DiagnosticSourceKind::Pushed, &[], cx).unwrap();
|
||||
});
|
||||
|
||||
let include_warnings = false;
|
||||
let buffer_diagnostics = window.build_entity(cx, |window, cx| {
|
||||
BufferDiagnosticsEditor::new(
|
||||
project_path.clone(),
|
||||
project.clone(),
|
||||
buffer,
|
||||
include_warnings,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let editor = buffer_diagnostics.update(cx, |buffer_diagnostics, _cx| {
|
||||
buffer_diagnostics.editor().clone()
|
||||
});
|
||||
|
||||
// Since the excerpt updates is handled by a background task, we need to
|
||||
// wait a little bit to ensure that the buffer diagnostic's editor content
|
||||
// is rendered.
|
||||
cx.executor()
|
||||
.advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10));
|
||||
|
||||
pretty_assertions::assert_eq!(
|
||||
editor_content_with_blocks(&editor, cx),
|
||||
indoc::indoc! {
|
||||
"§ main.rs
|
||||
§ -----
|
||||
fn main() {
|
||||
let x = vec![];
|
||||
§ move occurs because `x` has type `Vec<char>`, which does not implement
|
||||
§ the `Copy` trait (back)
|
||||
let y = vec![];
|
||||
a(x); § value moved here
|
||||
b(y);
|
||||
c(y);
|
||||
d(x);
|
||||
§ use of moved value
|
||||
§ value used here after move
|
||||
§ hint: move occurs because `x` has type `Vec<char>`, which does not
|
||||
§ implement the `Copy` trait
|
||||
}"
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_buffer_diagnostics_multiple_servers(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
fs.insert_tree(
|
||||
path!("/test"),
|
||||
json!({
|
||||
"main.rs": "
|
||||
fn main() {
|
||||
let x = vec![];
|
||||
let y = vec![];
|
||||
a(x);
|
||||
b(y);
|
||||
c(y);
|
||||
d(x);
|
||||
}
|
||||
"
|
||||
.unindent(),
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs.clone(), [path!("/test").as_ref()], cx).await;
|
||||
let window = cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let cx = &mut VisualTestContext::from_window(*window, cx);
|
||||
let project_path = project::ProjectPath {
|
||||
worktree_id: project.read_with(cx, |project, cx| {
|
||||
project.worktrees(cx).next().unwrap().read(cx).id()
|
||||
}),
|
||||
path: Arc::from(Path::new("main.rs")),
|
||||
};
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_buffer(project_path.clone(), cx)
|
||||
})
|
||||
.await
|
||||
.ok();
|
||||
|
||||
// Create the diagnostics for `main.rs`.
|
||||
// Two warnings are being created, one for each language server, in order to
|
||||
// assert that both warnings are rendered in the editor.
|
||||
let language_server_id_a = LanguageServerId(0);
|
||||
let language_server_id_b = LanguageServerId(1);
|
||||
let uri = lsp::Uri::from_file_path(path!("/test/main.rs")).unwrap();
|
||||
let lsp_store = project.read_with(cx, |project, _| project.lsp_store());
|
||||
|
||||
lsp_store.update(cx, |lsp_store, cx| {
|
||||
lsp_store
|
||||
.update_diagnostics(
|
||||
language_server_id_a,
|
||||
lsp::PublishDiagnosticsParams {
|
||||
uri: uri.clone(),
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
range: lsp::Range::new(lsp::Position::new(5, 6), lsp::Position::new(5, 7)),
|
||||
severity: Some(lsp::DiagnosticSeverity::WARNING),
|
||||
message: "use of moved value\nvalue used here after move".to_string(),
|
||||
related_information: None,
|
||||
..Default::default()
|
||||
}],
|
||||
version: None,
|
||||
},
|
||||
None,
|
||||
DiagnosticSourceKind::Pushed,
|
||||
&[],
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
lsp_store
|
||||
.update_diagnostics(
|
||||
language_server_id_b,
|
||||
lsp::PublishDiagnosticsParams {
|
||||
uri: uri.clone(),
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
range: lsp::Range::new(lsp::Position::new(6, 6), lsp::Position::new(6, 7)),
|
||||
severity: Some(lsp::DiagnosticSeverity::WARNING),
|
||||
message: "use of moved value\nvalue used here after move".to_string(),
|
||||
related_information: None,
|
||||
..Default::default()
|
||||
}],
|
||||
version: None,
|
||||
},
|
||||
None,
|
||||
DiagnosticSourceKind::Pushed,
|
||||
&[],
|
||||
cx,
|
||||
)
|
||||
.unwrap();
|
||||
});
|
||||
|
||||
let buffer_diagnostics = window.build_entity(cx, |window, cx| {
|
||||
BufferDiagnosticsEditor::new(
|
||||
project_path.clone(),
|
||||
project.clone(),
|
||||
buffer,
|
||||
true,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let editor = buffer_diagnostics.update(cx, |buffer_diagnostics, _| {
|
||||
buffer_diagnostics.editor().clone()
|
||||
});
|
||||
|
||||
// Since the excerpt updates is handled by a background task, we need to
|
||||
// wait a little bit to ensure that the buffer diagnostic's editor content
|
||||
// is rendered.
|
||||
cx.executor()
|
||||
.advance_clock(DIAGNOSTICS_UPDATE_DELAY + Duration::from_millis(10));
|
||||
|
||||
pretty_assertions::assert_eq!(
|
||||
editor_content_with_blocks(&editor, cx),
|
||||
indoc::indoc! {
|
||||
"§ main.rs
|
||||
§ -----
|
||||
a(x);
|
||||
b(y);
|
||||
c(y);
|
||||
§ use of moved value
|
||||
§ value used here after move
|
||||
d(x);
|
||||
§ use of moved value
|
||||
§ value used here after move
|
||||
}"
|
||||
}
|
||||
);
|
||||
|
||||
buffer_diagnostics.update(cx, |buffer_diagnostics, _cx| {
|
||||
assert_eq!(
|
||||
*buffer_diagnostics.summary(),
|
||||
DiagnosticSummary {
|
||||
warning_count: 2,
|
||||
error_count: 0
|
||||
}
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
zlog::init_test();
|
||||
|
||||
@@ -1,33 +1,56 @@
|
||||
use crate::{ProjectDiagnosticsEditor, ToggleDiagnosticsRefresh};
|
||||
use gpui::{Context, Entity, EventEmitter, ParentElement, Render, WeakEntity, Window};
|
||||
use crate::{BufferDiagnosticsEditor, ProjectDiagnosticsEditor, ToggleDiagnosticsRefresh};
|
||||
use gpui::{Context, EventEmitter, ParentElement, Render, Window};
|
||||
use language::DiagnosticEntry;
|
||||
use text::{Anchor, BufferId};
|
||||
use ui::prelude::*;
|
||||
use ui::{IconButton, IconButtonShape, IconName, Tooltip};
|
||||
use workspace::{ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, item::ItemHandle};
|
||||
|
||||
pub struct ToolbarControls {
|
||||
editor: Option<WeakEntity<ProjectDiagnosticsEditor>>,
|
||||
editor: Option<Box<dyn DiagnosticsToolbarEditor>>,
|
||||
}
|
||||
|
||||
pub(crate) trait DiagnosticsToolbarEditor: Send + Sync {
|
||||
/// Informs the toolbar whether warnings are included in the diagnostics.
|
||||
fn include_warnings(&self, cx: &App) -> bool;
|
||||
/// Toggles whether warning diagnostics should be displayed by the
|
||||
/// diagnostics editor.
|
||||
fn toggle_warnings(&self, window: &mut Window, cx: &mut App);
|
||||
/// Indicates whether any of the excerpts displayed by the diagnostics
|
||||
/// editor are stale.
|
||||
fn has_stale_excerpts(&self, cx: &App) -> bool;
|
||||
/// Indicates whether the diagnostics editor is currently updating the
|
||||
/// diagnostics.
|
||||
fn is_updating(&self, cx: &App) -> bool;
|
||||
/// Requests that the diagnostics editor stop updating the diagnostics.
|
||||
fn stop_updating(&self, cx: &mut App);
|
||||
/// Requests that the diagnostics editor updates the displayed diagnostics
|
||||
/// with the latest information.
|
||||
fn refresh_diagnostics(&self, window: &mut Window, cx: &mut App);
|
||||
/// Returns a list of diagnostics for the provided buffer id.
|
||||
fn get_diagnostics_for_buffer(
|
||||
&self,
|
||||
buffer_id: BufferId,
|
||||
cx: &App,
|
||||
) -> Vec<DiagnosticEntry<Anchor>>;
|
||||
}
|
||||
|
||||
impl Render for ToolbarControls {
|
||||
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let mut include_warnings = false;
|
||||
let mut has_stale_excerpts = false;
|
||||
let mut include_warnings = false;
|
||||
let mut is_updating = false;
|
||||
|
||||
if let Some(editor) = self.diagnostics() {
|
||||
let diagnostics = editor.read(cx);
|
||||
include_warnings = diagnostics.include_warnings;
|
||||
has_stale_excerpts = !diagnostics.paths_to_update.is_empty();
|
||||
is_updating = diagnostics.update_excerpts_task.is_some()
|
||||
|| diagnostics
|
||||
.project
|
||||
.read(cx)
|
||||
.language_servers_running_disk_based_diagnostics(cx)
|
||||
.next()
|
||||
.is_some();
|
||||
match &self.editor {
|
||||
Some(editor) => {
|
||||
include_warnings = editor.include_warnings(cx);
|
||||
has_stale_excerpts = editor.has_stale_excerpts(cx);
|
||||
is_updating = editor.is_updating(cx);
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
||||
let tooltip = if include_warnings {
|
||||
let warning_tooltip = if include_warnings {
|
||||
"Exclude Warnings"
|
||||
} else {
|
||||
"Include Warnings"
|
||||
@@ -52,11 +75,12 @@ impl Render for ToolbarControls {
|
||||
&ToggleDiagnosticsRefresh,
|
||||
))
|
||||
.on_click(cx.listener(move |toolbar_controls, _, _, cx| {
|
||||
if let Some(diagnostics) = toolbar_controls.diagnostics() {
|
||||
diagnostics.update(cx, |diagnostics, cx| {
|
||||
diagnostics.update_excerpts_task = None;
|
||||
match toolbar_controls.editor() {
|
||||
Some(editor) => {
|
||||
editor.stop_updating(cx);
|
||||
cx.notify();
|
||||
});
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
})),
|
||||
)
|
||||
@@ -71,12 +95,11 @@ impl Render for ToolbarControls {
|
||||
&ToggleDiagnosticsRefresh,
|
||||
))
|
||||
.on_click(cx.listener({
|
||||
move |toolbar_controls, _, window, cx| {
|
||||
if let Some(diagnostics) = toolbar_controls.diagnostics() {
|
||||
diagnostics.update(cx, move |diagnostics, cx| {
|
||||
diagnostics.update_all_excerpts(window, cx);
|
||||
});
|
||||
}
|
||||
move |toolbar_controls, _, window, cx| match toolbar_controls
|
||||
.editor()
|
||||
{
|
||||
Some(editor) => editor.refresh_diagnostics(window, cx),
|
||||
None => {}
|
||||
}
|
||||
})),
|
||||
)
|
||||
@@ -86,13 +109,10 @@ impl Render for ToolbarControls {
|
||||
IconButton::new("toggle-warnings", IconName::Warning)
|
||||
.icon_color(warning_color)
|
||||
.shape(IconButtonShape::Square)
|
||||
.tooltip(Tooltip::text(tooltip))
|
||||
.on_click(cx.listener(|this, _, window, cx| {
|
||||
if let Some(editor) = this.diagnostics() {
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.toggle_warnings(&Default::default(), window, cx);
|
||||
});
|
||||
}
|
||||
.tooltip(Tooltip::text(warning_tooltip))
|
||||
.on_click(cx.listener(|this, _, window, cx| match &this.editor {
|
||||
Some(editor) => editor.toggle_warnings(window, cx),
|
||||
None => {}
|
||||
})),
|
||||
)
|
||||
}
|
||||
@@ -109,7 +129,10 @@ impl ToolbarItemView for ToolbarControls {
|
||||
) -> ToolbarItemLocation {
|
||||
if let Some(pane_item) = active_pane_item.as_ref() {
|
||||
if let Some(editor) = pane_item.downcast::<ProjectDiagnosticsEditor>() {
|
||||
self.editor = Some(editor.downgrade());
|
||||
self.editor = Some(Box::new(editor.downgrade()));
|
||||
ToolbarItemLocation::PrimaryRight
|
||||
} else if let Some(editor) = pane_item.downcast::<BufferDiagnosticsEditor>() {
|
||||
self.editor = Some(Box::new(editor.downgrade()));
|
||||
ToolbarItemLocation::PrimaryRight
|
||||
} else {
|
||||
ToolbarItemLocation::Hidden
|
||||
@@ -131,7 +154,7 @@ impl ToolbarControls {
|
||||
ToolbarControls { editor: None }
|
||||
}
|
||||
|
||||
fn diagnostics(&self) -> Option<Entity<ProjectDiagnosticsEditor>> {
|
||||
self.editor.as_ref()?.upgrade()
|
||||
fn editor(&self) -> Option<&dyn DiagnosticsToolbarEditor> {
|
||||
self.editor.as_deref()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -92,6 +92,7 @@ uuid.workspace = true
|
||||
workspace.workspace = true
|
||||
zed_actions.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
postage.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
ctor.workspace = true
|
||||
|
||||
@@ -177,17 +177,15 @@ use snippet::Snippet;
|
||||
use std::{
|
||||
any::TypeId,
|
||||
borrow::Cow,
|
||||
cell::OnceCell,
|
||||
cell::RefCell,
|
||||
cell::{OnceCell, RefCell},
|
||||
cmp::{self, Ordering, Reverse},
|
||||
iter::Peekable,
|
||||
mem,
|
||||
num::NonZeroU32,
|
||||
ops::Not,
|
||||
ops::{ControlFlow, Deref, DerefMut, Range, RangeInclusive},
|
||||
ops::{ControlFlow, Deref, DerefMut, Not, Range, RangeInclusive},
|
||||
path::{Path, PathBuf},
|
||||
rc::Rc,
|
||||
sync::Arc,
|
||||
sync::{Arc, LazyLock},
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use task::{ResolvedTask, RunnableTag, TaskTemplate, TaskVariables};
|
||||
@@ -236,6 +234,21 @@ pub(crate) const EDIT_PREDICTION_KEY_CONTEXT: &str = "edit_prediction";
|
||||
pub(crate) const EDIT_PREDICTION_CONFLICT_KEY_CONTEXT: &str = "edit_prediction_conflict";
|
||||
pub(crate) const MINIMAP_FONT_SIZE: AbsoluteLength = AbsoluteLength::Pixels(px(2.));
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub struct LastCursorPosition {
|
||||
pub path: PathBuf,
|
||||
pub worktree_path: Arc<Path>,
|
||||
pub point: Point,
|
||||
}
|
||||
|
||||
pub static LAST_CURSOR_POSITION_WATCH: LazyLock<(
|
||||
Mutex<postage::watch::Sender<Option<LastCursorPosition>>>,
|
||||
postage::watch::Receiver<Option<LastCursorPosition>>,
|
||||
)> = LazyLock::new(|| {
|
||||
let (sender, receiver) = postage::watch::channel();
|
||||
(Mutex::new(sender), receiver)
|
||||
});
|
||||
|
||||
pub type RenderDiffHunkControlsFn = Arc<
|
||||
dyn Fn(
|
||||
u32,
|
||||
@@ -3064,10 +3077,28 @@ impl Editor {
|
||||
let new_cursor_position = newest_selection.head();
|
||||
let selection_start = newest_selection.start;
|
||||
|
||||
let new_cursor_point = new_cursor_position.to_point(buffer);
|
||||
if let Some(project) = self.project()
|
||||
&& let Some((path, worktree_path)) =
|
||||
self.file_at(new_cursor_point, cx).and_then(|file| {
|
||||
file.as_local().and_then(|file| {
|
||||
let worktree =
|
||||
project.read(cx).worktree_for_id(file.worktree_id(cx), cx)?;
|
||||
Some((file.abs_path(cx), worktree.read(cx).abs_path()))
|
||||
})
|
||||
})
|
||||
{
|
||||
*LAST_CURSOR_POSITION_WATCH.0.lock().borrow_mut() = Some(LastCursorPosition {
|
||||
path,
|
||||
worktree_path,
|
||||
point: new_cursor_point,
|
||||
});
|
||||
}
|
||||
|
||||
if effects.nav_history.is_none() || effects.nav_history == Some(true) {
|
||||
self.push_to_nav_history(
|
||||
*old_cursor_position,
|
||||
Some(new_cursor_position.to_point(buffer)),
|
||||
Some(new_cursor_point),
|
||||
false,
|
||||
effects.nav_history == Some(true),
|
||||
cx,
|
||||
@@ -18998,6 +19029,8 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the project path for the editor's buffer, if any buffer is
|
||||
/// opened in the editor.
|
||||
pub fn project_path(&self, cx: &App) -> Option<ProjectPath> {
|
||||
if let Some(buffer) = self.buffer.read(cx).as_singleton() {
|
||||
buffer.read(cx).project_path(cx)
|
||||
|
||||
@@ -4,7 +4,7 @@
|
||||
use super::{Bias, DisplayPoint, DisplaySnapshot, SelectionGoal, ToDisplayPoint};
|
||||
use crate::{DisplayRow, EditorStyle, ToOffset, ToPoint, scroll::ScrollAnchor};
|
||||
use gpui::{Pixels, WindowTextSystem};
|
||||
use language::Point;
|
||||
use language::{CharClassifier, Point};
|
||||
use multi_buffer::{MultiBufferRow, MultiBufferSnapshot};
|
||||
use serde::Deserialize;
|
||||
use workspace::searchable::Direction;
|
||||
@@ -405,15 +405,18 @@ pub fn previous_subword_start(map: &DisplaySnapshot, point: DisplayPoint) -> Dis
|
||||
let classifier = map.buffer_snapshot.char_classifier_at(raw_point);
|
||||
|
||||
find_preceding_boundary_display_point(map, point, FindRange::MultiLine, |left, right| {
|
||||
let is_word_start =
|
||||
classifier.kind(left) != classifier.kind(right) && !right.is_whitespace();
|
||||
let is_subword_start = classifier.is_word('-') && left == '-' && right != '-'
|
||||
|| left == '_' && right != '_'
|
||||
|| left.is_lowercase() && right.is_uppercase();
|
||||
is_word_start || is_subword_start || left == '\n'
|
||||
is_subword_start(left, right, &classifier) || left == '\n'
|
||||
})
|
||||
}
|
||||
|
||||
pub fn is_subword_start(left: char, right: char, classifier: &CharClassifier) -> bool {
|
||||
let is_word_start = classifier.kind(left) != classifier.kind(right) && !right.is_whitespace();
|
||||
let is_subword_start = classifier.is_word('-') && left == '-' && right != '-'
|
||||
|| left == '_' && right != '_'
|
||||
|| left.is_lowercase() && right.is_uppercase();
|
||||
is_word_start || is_subword_start
|
||||
}
|
||||
|
||||
/// Returns a position of the next word boundary, where a word character is defined as either
|
||||
/// uppercase letter, lowercase letter, '_' character or language-specific word character (like '-' in CSS).
|
||||
pub fn next_word_end(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPoint {
|
||||
@@ -463,15 +466,19 @@ pub fn next_subword_end(map: &DisplaySnapshot, point: DisplayPoint) -> DisplayPo
|
||||
let classifier = map.buffer_snapshot.char_classifier_at(raw_point);
|
||||
|
||||
find_boundary(map, point, FindRange::MultiLine, |left, right| {
|
||||
let is_word_end =
|
||||
(classifier.kind(left) != classifier.kind(right)) && !classifier.is_whitespace(left);
|
||||
let is_subword_end = classifier.is_word('-') && left != '-' && right == '-'
|
||||
|| left != '_' && right == '_'
|
||||
|| left.is_lowercase() && right.is_uppercase();
|
||||
is_word_end || is_subword_end || right == '\n'
|
||||
is_subword_end(left, right, &classifier) || right == '\n'
|
||||
})
|
||||
}
|
||||
|
||||
pub fn is_subword_end(left: char, right: char, classifier: &CharClassifier) -> bool {
|
||||
let is_word_end =
|
||||
(classifier.kind(left) != classifier.kind(right)) && !classifier.is_whitespace(left);
|
||||
let is_subword_end = classifier.is_word('-') && left != '-' && right == '-'
|
||||
|| left != '_' && right == '_'
|
||||
|| left.is_lowercase() && right.is_uppercase();
|
||||
is_word_end || is_subword_end
|
||||
}
|
||||
|
||||
/// Returns a position of the start of the current paragraph, where a paragraph
|
||||
/// is defined as a run of non-blank lines.
|
||||
pub fn start_of_paragraph(
|
||||
|
||||
@@ -218,23 +218,6 @@ impl AsyncApp {
|
||||
Some(read(app.try_global()?, &app))
|
||||
}
|
||||
|
||||
/// Reads the global state of the specified type, passing it to the given callback.
|
||||
/// A default value is assigned if a global of this type has not yet been assigned.
|
||||
///
|
||||
/// # Errors
|
||||
/// If the app has ben dropped this returns an error.
|
||||
pub fn try_read_default_global<G: Global + Default, R>(
|
||||
&self,
|
||||
read: impl FnOnce(&G, &App) -> R,
|
||||
) -> Result<R> {
|
||||
let app = self.app.upgrade().context("app was released")?;
|
||||
let mut app = app.borrow_mut();
|
||||
app.update(|cx| {
|
||||
cx.default_global::<G>();
|
||||
});
|
||||
Ok(read(app.try_global().context("app was released")?, &app))
|
||||
}
|
||||
|
||||
/// A convenience method for [`App::update_global`](BorrowAppContext::update_global)
|
||||
/// for updating the global state of the specified type.
|
||||
pub fn update_global<G: Global, R>(
|
||||
|
||||
@@ -590,6 +590,11 @@ pub trait LspAdapter: 'static + Send + Sync {
|
||||
"Not implemented for this adapter. This method should only be called on the default JSON language server adapter"
|
||||
);
|
||||
}
|
||||
|
||||
/// True for the extension adapter and false otherwise.
|
||||
fn is_extension(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
async fn try_fetch_server_binary<L: LspAdapter + 'static + Send + Sync + ?Sized>(
|
||||
@@ -2270,6 +2275,10 @@ impl LspAdapter for FakeLspAdapter {
|
||||
let label_for_completion = self.label_for_completion.as_ref()?;
|
||||
label_for_completion(item, language)
|
||||
}
|
||||
|
||||
fn is_extension(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
fn get_capture_indices(query: &Query, captures: &mut [(&str, &mut Option<u32>)]) {
|
||||
|
||||
@@ -374,14 +374,23 @@ impl LanguageRegistry {
|
||||
pub fn register_available_lsp_adapter(
|
||||
&self,
|
||||
name: LanguageServerName,
|
||||
load: impl Fn() -> Arc<dyn LspAdapter> + 'static + Send + Sync,
|
||||
adapter: Arc<dyn LspAdapter>,
|
||||
) {
|
||||
self.state.write().available_lsp_adapters.insert(
|
||||
let mut state = self.state.write();
|
||||
|
||||
if adapter.is_extension()
|
||||
&& let Some(existing_adapter) = state.all_lsp_adapters.get(&name)
|
||||
&& !existing_adapter.adapter.is_extension()
|
||||
{
|
||||
log::warn!(
|
||||
"not registering extension-provided language server {name:?}, since a builtin language server exists with that name",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
state.available_lsp_adapters.insert(
|
||||
name,
|
||||
Arc::new(move || {
|
||||
let lsp_adapter = load();
|
||||
CachedLspAdapter::new(lsp_adapter)
|
||||
}),
|
||||
Arc::new(move || CachedLspAdapter::new(adapter.clone())),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -396,13 +405,21 @@ impl LanguageRegistry {
|
||||
Some(load_lsp_adapter())
|
||||
}
|
||||
|
||||
pub fn register_lsp_adapter(
|
||||
&self,
|
||||
language_name: LanguageName,
|
||||
adapter: Arc<dyn LspAdapter>,
|
||||
) -> Arc<CachedLspAdapter> {
|
||||
let cached = CachedLspAdapter::new(adapter);
|
||||
pub fn register_lsp_adapter(&self, language_name: LanguageName, adapter: Arc<dyn LspAdapter>) {
|
||||
let mut state = self.state.write();
|
||||
|
||||
if adapter.is_extension()
|
||||
&& let Some(existing_adapter) = state.all_lsp_adapters.get(&adapter.name())
|
||||
&& !existing_adapter.adapter.is_extension()
|
||||
{
|
||||
log::warn!(
|
||||
"not registering extension-provided language server {:?} for language {language_name:?}, since a builtin language server exists with that name",
|
||||
adapter.name(),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
let cached = CachedLspAdapter::new(adapter);
|
||||
state
|
||||
.lsp_adapters
|
||||
.entry(language_name)
|
||||
@@ -411,8 +428,6 @@ impl LanguageRegistry {
|
||||
state
|
||||
.all_lsp_adapters
|
||||
.insert(cached.name.clone(), cached.clone());
|
||||
|
||||
cached
|
||||
}
|
||||
|
||||
/// Register a fake language server and adapter
|
||||
|
||||
@@ -398,6 +398,10 @@ impl LspAdapter for ExtensionLspAdapter {
|
||||
|
||||
Ok(labels_from_extension(labels, language))
|
||||
}
|
||||
|
||||
fn is_extension(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
fn labels_from_extension(
|
||||
|
||||
30
crates/language_onboarding/Cargo.toml
Normal file
30
crates/language_onboarding/Cargo.toml
Normal file
@@ -0,0 +1,30 @@
|
||||
[package]
|
||||
name = "language_onboarding"
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
publish.workspace = true
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/python.rs"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
||||
[dependencies]
|
||||
db.workspace = true
|
||||
editor.workspace = true
|
||||
gpui.workspace = true
|
||||
project.workspace = true
|
||||
ui.workspace = true
|
||||
workspace.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
# Uncomment other workspace dependencies as needed
|
||||
# assistant.workspace = true
|
||||
# client.workspace = true
|
||||
# project.workspace = true
|
||||
# settings.workspace = true
|
||||
1
crates/language_onboarding/LICENSE-GPL
Symbolic link
1
crates/language_onboarding/LICENSE-GPL
Symbolic link
@@ -0,0 +1 @@
|
||||
../../LICENSE-GPL
|
||||
95
crates/language_onboarding/src/python.rs
Normal file
95
crates/language_onboarding/src/python.rs
Normal file
@@ -0,0 +1,95 @@
|
||||
use db::kvp::Dismissable;
|
||||
use editor::Editor;
|
||||
use gpui::{Context, EventEmitter, Subscription};
|
||||
use ui::{
|
||||
Banner, Button, Clickable, Color, FluentBuilder as _, IconButton, IconName,
|
||||
InteractiveElement as _, IntoElement, Label, LabelCommon, LabelSize, ParentElement as _,
|
||||
Render, Styled as _, Window, div, h_flex, v_flex,
|
||||
};
|
||||
use workspace::{ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace};
|
||||
|
||||
pub struct BasedPyrightBanner {
|
||||
dismissed: bool,
|
||||
have_basedpyright: bool,
|
||||
_subscriptions: [Subscription; 1],
|
||||
}
|
||||
|
||||
impl Dismissable for BasedPyrightBanner {
|
||||
const KEY: &str = "basedpyright-banner";
|
||||
}
|
||||
|
||||
impl BasedPyrightBanner {
|
||||
pub fn new(workspace: &Workspace, cx: &mut Context<Self>) -> Self {
|
||||
let subscription = cx.subscribe(workspace.project(), |this, _, event, _| {
|
||||
if let project::Event::LanguageServerAdded(_, name, _) = event
|
||||
&& name == "basedpyright"
|
||||
{
|
||||
this.have_basedpyright = true;
|
||||
}
|
||||
});
|
||||
let dismissed = Self::dismissed();
|
||||
Self {
|
||||
dismissed,
|
||||
have_basedpyright: false,
|
||||
_subscriptions: [subscription],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<ToolbarItemEvent> for BasedPyrightBanner {}
|
||||
|
||||
impl Render for BasedPyrightBanner {
|
||||
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
div()
|
||||
.id("basedpyright-banner")
|
||||
.when(!self.dismissed && self.have_basedpyright, |el| {
|
||||
el.child(
|
||||
Banner::new()
|
||||
.severity(ui::Severity::Info)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(v_flex()
|
||||
.child("Basedpyright is now the only default language server for Python")
|
||||
.child(Label::new("We have disabled PyRight and pylsp by default. They can be re-enabled in your settings.").size(LabelSize::XSmall).color(Color::Muted))
|
||||
)
|
||||
.child(
|
||||
Button::new("learn-more", "Learn More")
|
||||
.icon(IconName::ArrowUpRight)
|
||||
.on_click(|_, _, cx| {
|
||||
cx.open_url("https://zed.dev/docs/languages/python")
|
||||
}),
|
||||
),
|
||||
)
|
||||
.action_slot(IconButton::new("dismiss", IconName::Close).on_click(
|
||||
cx.listener(|this, _, _, cx| {
|
||||
this.dismissed = true;
|
||||
Self::set_dismissed(true, cx);
|
||||
cx.notify();
|
||||
}),
|
||||
))
|
||||
.into_any_element(),
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ToolbarItemView for BasedPyrightBanner {
|
||||
fn set_active_pane_item(
|
||||
&mut self,
|
||||
active_pane_item: Option<&dyn workspace::ItemHandle>,
|
||||
_window: &mut ui::Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> ToolbarItemLocation {
|
||||
if let Some(item) = active_pane_item
|
||||
&& let Some(editor) = item.act_as::<Editor>(cx)
|
||||
&& let Some(path) = editor.update(cx, |editor, cx| editor.target_file_abs_path(cx))
|
||||
&& let Some(file_name) = path.file_name()
|
||||
&& file_name.as_encoded_bytes().ends_with(".py".as_bytes())
|
||||
{
|
||||
return ToolbarItemLocation::Secondary;
|
||||
}
|
||||
|
||||
ToolbarItemLocation::Hidden
|
||||
}
|
||||
}
|
||||
@@ -42,7 +42,6 @@ async-trait.workspace = true
|
||||
chrono.workspace = true
|
||||
collections.workspace = true
|
||||
dap.workspace = true
|
||||
feature_flags.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
http_client.workspace = true
|
||||
|
||||
@@ -73,3 +73,9 @@
|
||||
arguments: (arguments (template_string (string_fragment) @injection.content
|
||||
(#set! injection.language "graphql")))
|
||||
)
|
||||
|
||||
(call_expression
|
||||
function: (identifier) @_name(#match? @_name "^iso$")
|
||||
arguments: (arguments (template_string (string_fragment) @injection.content
|
||||
(#set! injection.language "isograph")))
|
||||
)
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use anyhow::Context as _;
|
||||
use feature_flags::{FeatureFlag, FeatureFlagAppExt as _};
|
||||
use gpui::{App, SharedString, UpdateGlobal};
|
||||
use node_runtime::NodeRuntime;
|
||||
use python::PyprojectTomlManifestProvider;
|
||||
@@ -54,12 +53,6 @@ pub static LANGUAGE_GIT_COMMIT: std::sync::LazyLock<Arc<Language>> =
|
||||
))
|
||||
});
|
||||
|
||||
struct BasedPyrightFeatureFlag;
|
||||
|
||||
impl FeatureFlag for BasedPyrightFeatureFlag {
|
||||
const NAME: &'static str = "basedpyright";
|
||||
}
|
||||
|
||||
pub fn init(languages: Arc<LanguageRegistry>, node: NodeRuntime, cx: &mut App) {
|
||||
#[cfg(feature = "load-grammars")]
|
||||
languages.register_native_grammars([
|
||||
@@ -174,7 +167,7 @@ pub fn init(languages: Arc<LanguageRegistry>, node: NodeRuntime, cx: &mut App) {
|
||||
},
|
||||
LanguageInfo {
|
||||
name: "python",
|
||||
adapters: vec![python_lsp_adapter, py_lsp_adapter],
|
||||
adapters: vec![basedpyright_lsp_adapter],
|
||||
context: Some(python_context_provider),
|
||||
toolchain: Some(python_toolchain_provider),
|
||||
manifest_name: Some(SharedString::new_static("pyproject.toml").into()),
|
||||
@@ -240,17 +233,6 @@ pub fn init(languages: Arc<LanguageRegistry>, node: NodeRuntime, cx: &mut App) {
|
||||
);
|
||||
}
|
||||
|
||||
let mut basedpyright_lsp_adapter = Some(basedpyright_lsp_adapter);
|
||||
cx.observe_flag::<BasedPyrightFeatureFlag, _>({
|
||||
let languages = languages.clone();
|
||||
move |enabled, _| {
|
||||
if enabled && let Some(adapter) = basedpyright_lsp_adapter.take() {
|
||||
languages.register_available_lsp_adapter(adapter.name(), move || adapter.clone());
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
// Register globally available language servers.
|
||||
//
|
||||
// This will allow users to add support for a built-in language server (e.g., Tailwind)
|
||||
@@ -267,27 +249,19 @@ pub fn init(languages: Arc<LanguageRegistry>, node: NodeRuntime, cx: &mut App) {
|
||||
// ```
|
||||
languages.register_available_lsp_adapter(
|
||||
LanguageServerName("tailwindcss-language-server".into()),
|
||||
{
|
||||
let adapter = tailwind_adapter.clone();
|
||||
move || adapter.clone()
|
||||
},
|
||||
tailwind_adapter.clone(),
|
||||
);
|
||||
languages.register_available_lsp_adapter(LanguageServerName("eslint".into()), {
|
||||
let adapter = eslint_adapter.clone();
|
||||
move || adapter.clone()
|
||||
});
|
||||
languages.register_available_lsp_adapter(LanguageServerName("vtsls".into()), {
|
||||
let adapter = vtsls_adapter;
|
||||
move || adapter.clone()
|
||||
});
|
||||
languages.register_available_lsp_adapter(
|
||||
LanguageServerName("eslint".into()),
|
||||
eslint_adapter.clone(),
|
||||
);
|
||||
languages.register_available_lsp_adapter(LanguageServerName("vtsls".into()), vtsls_adapter);
|
||||
languages.register_available_lsp_adapter(
|
||||
LanguageServerName("typescript-language-server".into()),
|
||||
{
|
||||
let adapter = typescript_lsp_adapter;
|
||||
move || adapter.clone()
|
||||
},
|
||||
typescript_lsp_adapter,
|
||||
);
|
||||
|
||||
languages.register_available_lsp_adapter(python_lsp_adapter.name(), python_lsp_adapter);
|
||||
languages.register_available_lsp_adapter(py_lsp_adapter.name(), py_lsp_adapter);
|
||||
// Register Tailwind for the existing languages that should have it by default.
|
||||
//
|
||||
// This can be driven by the `language_servers` setting once we have a way for
|
||||
|
||||
@@ -35,7 +35,7 @@ use std::{
|
||||
sync::Arc,
|
||||
};
|
||||
use task::{ShellKind, TaskTemplate, TaskTemplates, VariableName};
|
||||
use util::ResultExt;
|
||||
use util::{ResultExt, maybe};
|
||||
|
||||
pub(crate) struct PyprojectTomlManifestProvider;
|
||||
|
||||
@@ -1619,23 +1619,37 @@ impl LspAdapter for BasedPyrightLspAdapter {
|
||||
}
|
||||
}
|
||||
|
||||
// Always set the python interpreter path
|
||||
// Get or create the python section
|
||||
let python = object
|
||||
// Set both pythonPath and defaultInterpreterPath for compatibility
|
||||
if let Some(python) = object
|
||||
.entry("python")
|
||||
.or_insert(Value::Object(serde_json::Map::default()))
|
||||
.as_object_mut()
|
||||
.unwrap();
|
||||
|
||||
// Set both pythonPath and defaultInterpreterPath for compatibility
|
||||
python.insert(
|
||||
"pythonPath".to_owned(),
|
||||
Value::String(interpreter_path.clone()),
|
||||
);
|
||||
python.insert(
|
||||
"defaultInterpreterPath".to_owned(),
|
||||
Value::String(interpreter_path),
|
||||
);
|
||||
{
|
||||
python.insert(
|
||||
"pythonPath".to_owned(),
|
||||
Value::String(interpreter_path.clone()),
|
||||
);
|
||||
python.insert(
|
||||
"defaultInterpreterPath".to_owned(),
|
||||
Value::String(interpreter_path),
|
||||
);
|
||||
}
|
||||
// Basedpyright by default uses `strict` type checking, we tone it down as to not surpris users
|
||||
maybe!({
|
||||
let basedpyright = object
|
||||
.entry("basedpyright")
|
||||
.or_insert(Value::Object(serde_json::Map::default()));
|
||||
let analysis = basedpyright
|
||||
.as_object_mut()?
|
||||
.entry("analysis")
|
||||
.or_insert(Value::Object(serde_json::Map::default()));
|
||||
if let serde_json::map::Entry::Vacant(v) =
|
||||
analysis.as_object_mut()?.entry("typeCheckingMode")
|
||||
{
|
||||
v.insert(Value::String("standard".to_owned()));
|
||||
}
|
||||
Some(())
|
||||
});
|
||||
}
|
||||
|
||||
user_settings
|
||||
|
||||
@@ -73,3 +73,9 @@
|
||||
arguments: (arguments (template_string (string_fragment) @injection.content
|
||||
(#set! injection.language "graphql")))
|
||||
)
|
||||
|
||||
(call_expression
|
||||
function: (identifier) @_name(#match? @_name "^iso$")
|
||||
arguments: (arguments (template_string (string_fragment) @injection.content
|
||||
(#set! injection.language "isograph")))
|
||||
)
|
||||
|
||||
@@ -78,6 +78,12 @@
|
||||
(#set! injection.language "graphql")))
|
||||
)
|
||||
|
||||
(call_expression
|
||||
function: (identifier) @_name(#match? @_name "^iso$")
|
||||
arguments: (arguments (template_string (string_fragment) @injection.content
|
||||
(#set! injection.language "isograph")))
|
||||
)
|
||||
|
||||
;; Angular Component template injection
|
||||
(call_expression
|
||||
function: [
|
||||
|
||||
@@ -22,10 +22,10 @@ test-support = ["collections/test-support", "gpui/test-support"]
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
async-trait.workspace = true
|
||||
audio.workspace = true
|
||||
collections.workspace = true
|
||||
cpal.workspace = true
|
||||
futures.workspace = true
|
||||
audio.workspace = true
|
||||
gpui = { workspace = true, features = ["screen-capture", "x11", "wayland", "windows-manifest"] }
|
||||
gpui_tokio.workspace = true
|
||||
http_client_tls.workspace = true
|
||||
@@ -35,15 +35,14 @@ log.workspace = true
|
||||
nanoid.workspace = true
|
||||
parking_lot.workspace = true
|
||||
postage.workspace = true
|
||||
rodio = { workspace = true, features = ["wav_output", "recording"] }
|
||||
serde.workspace = true
|
||||
serde_urlencoded.workspace = true
|
||||
settings.workspace = true
|
||||
smallvec.workspace = true
|
||||
settings.workspace = true
|
||||
tokio-tungstenite.workspace = true
|
||||
util.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
rodio = { workspace = true, features = ["wav_output"] }
|
||||
|
||||
[target.'cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))'.dependencies]
|
||||
libwebrtc = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks" }
|
||||
livekit = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks", features = [
|
||||
|
||||
@@ -255,10 +255,7 @@ impl LivekitWindow {
|
||||
} else {
|
||||
let room = self.room.clone();
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let (publication, stream) = room
|
||||
.publish_local_microphone_track("test_user".to_string(), false, cx)
|
||||
.await
|
||||
.unwrap();
|
||||
let (publication, stream) = room.publish_local_microphone_track(cx).await.unwrap();
|
||||
this.update(cx, |this, cx| {
|
||||
this.microphone_track = Some(publication);
|
||||
this.microphone_stream = Some(stream);
|
||||
|
||||
@@ -97,13 +97,9 @@ impl Room {
|
||||
|
||||
pub async fn publish_local_microphone_track(
|
||||
&self,
|
||||
user_name: String,
|
||||
is_staff: bool,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<(LocalTrackPublication, playback::AudioStream)> {
|
||||
let (track, stream) = self
|
||||
.playback
|
||||
.capture_local_microphone_track(user_name, is_staff, &cx)?;
|
||||
let (track, stream) = self.playback.capture_local_microphone_track()?;
|
||||
let publication = self
|
||||
.local_participant()
|
||||
.publish_track(
|
||||
@@ -133,7 +129,7 @@ impl Room {
|
||||
cx: &mut App,
|
||||
) -> Result<playback::AudioStream> {
|
||||
if AudioSettings::get_global(cx).rodio_audio {
|
||||
info!("Using experimental.rodio_audio audio pipeline for output");
|
||||
info!("Using experimental.rodio_audio audio pipeline");
|
||||
playback::play_remote_audio_track(&track.0, cx)
|
||||
} else {
|
||||
Ok(self.playback.play_remote_audio_track(&track.0))
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
use anyhow::{Context as _, Result};
|
||||
|
||||
use audio::{AudioSettings, CHANNEL_COUNT, SAMPLE_RATE};
|
||||
use cpal::traits::{DeviceTrait, StreamTrait as _};
|
||||
use futures::channel::mpsc::UnboundedSender;
|
||||
use futures::{Stream, StreamExt as _};
|
||||
use gpui::{
|
||||
AsyncApp, BackgroundExecutor, ScreenCaptureFrame, ScreenCaptureSource, ScreenCaptureStream,
|
||||
Task,
|
||||
BackgroundExecutor, ScreenCaptureFrame, ScreenCaptureSource, ScreenCaptureStream, Task,
|
||||
};
|
||||
use libwebrtc::native::{apm, audio_mixer, audio_resampler};
|
||||
use livekit::track;
|
||||
@@ -19,11 +17,8 @@ use livekit::webrtc::{
|
||||
video_source::{RtcVideoSource, VideoResolution, native::NativeVideoSource},
|
||||
video_stream::native::NativeVideoStream,
|
||||
};
|
||||
use log::info;
|
||||
use parking_lot::Mutex;
|
||||
use rodio::Source;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::Settings;
|
||||
use std::cell::RefCell;
|
||||
use std::sync::Weak;
|
||||
use std::sync::atomic::{AtomicBool, AtomicI32, Ordering};
|
||||
@@ -41,28 +36,27 @@ pub(crate) struct AudioStack {
|
||||
next_ssrc: AtomicI32,
|
||||
}
|
||||
|
||||
// NOTE: We use WebRTC's mixer which only supports
|
||||
// 16kHz, 32kHz and 48kHz. As 48 is the most common "next step up"
|
||||
// for audio output devices like speakers/bluetooth, we just hard-code
|
||||
// this; and downsample when we need to.
|
||||
const SAMPLE_RATE: u32 = 48000;
|
||||
const NUM_CHANNELS: u32 = 2;
|
||||
|
||||
pub(crate) fn play_remote_audio_track(
|
||||
track: &livekit::track::RemoteAudioTrack,
|
||||
cx: &mut gpui::App,
|
||||
) -> Result<AudioStream> {
|
||||
let stop_handle = Arc::new(AtomicBool::new(false));
|
||||
let stop_handle_clone = stop_handle.clone();
|
||||
let stream = source::LiveKitStream::new(cx.background_executor(), track);
|
||||
|
||||
let stream = stream
|
||||
let stream = source::LiveKitStream::new(cx.background_executor(), track)
|
||||
.stoppable()
|
||||
.periodic_access(Duration::from_millis(50), move |s| {
|
||||
if stop_handle.load(Ordering::Relaxed) {
|
||||
s.stop();
|
||||
}
|
||||
});
|
||||
|
||||
let speaker: Speaker = serde_urlencoded::from_str(&track.name()).unwrap_or_else(|_| Speaker {
|
||||
name: track.name(),
|
||||
is_staff: false,
|
||||
});
|
||||
audio::Audio::play_voip_stream(stream, speaker.name, speaker.is_staff, cx)
|
||||
.context("Could not play audio")?;
|
||||
audio::Audio::play_source(stream, cx).context("Could not play audio")?;
|
||||
|
||||
let on_drop = util::defer(move || {
|
||||
stop_handle_clone.store(true, Ordering::Relaxed);
|
||||
@@ -96,8 +90,8 @@ impl AudioStack {
|
||||
let next_ssrc = self.next_ssrc.fetch_add(1, Ordering::Relaxed);
|
||||
let source = AudioMixerSource {
|
||||
ssrc: next_ssrc,
|
||||
sample_rate: SAMPLE_RATE.get(),
|
||||
num_channels: CHANNEL_COUNT.get() as u32,
|
||||
sample_rate: SAMPLE_RATE,
|
||||
num_channels: NUM_CHANNELS,
|
||||
buffer: Arc::default(),
|
||||
};
|
||||
self.mixer.lock().add_source(source.clone());
|
||||
@@ -137,7 +131,7 @@ impl AudioStack {
|
||||
let apm = self.apm.clone();
|
||||
let mixer = self.mixer.clone();
|
||||
async move {
|
||||
Self::play_output(apm, mixer, SAMPLE_RATE.get(), CHANNEL_COUNT.get().into())
|
||||
Self::play_output(apm, mixer, SAMPLE_RATE, NUM_CHANNELS)
|
||||
.await
|
||||
.log_err();
|
||||
}
|
||||
@@ -148,26 +142,17 @@ impl AudioStack {
|
||||
|
||||
pub(crate) fn capture_local_microphone_track(
|
||||
&self,
|
||||
user_name: String,
|
||||
is_staff: bool,
|
||||
cx: &AsyncApp,
|
||||
) -> Result<(crate::LocalAudioTrack, AudioStream)> {
|
||||
let source = NativeAudioSource::new(
|
||||
// n.b. this struct's options are always ignored, noise cancellation is provided by apm.
|
||||
AudioSourceOptions::default(),
|
||||
SAMPLE_RATE.get(),
|
||||
CHANNEL_COUNT.get().into(),
|
||||
SAMPLE_RATE,
|
||||
NUM_CHANNELS,
|
||||
10,
|
||||
);
|
||||
|
||||
let track_name = serde_urlencoded::to_string(Speaker {
|
||||
name: user_name,
|
||||
is_staff,
|
||||
})
|
||||
.context("Could not encode user information in track name")?;
|
||||
|
||||
let track = track::LocalAudioTrack::create_audio_track(
|
||||
&track_name,
|
||||
"microphone",
|
||||
RtcAudioSource::Native(source.clone()),
|
||||
);
|
||||
|
||||
@@ -181,26 +166,9 @@ impl AudioStack {
|
||||
}
|
||||
}
|
||||
});
|
||||
let rodio_pipeline =
|
||||
AudioSettings::try_read_global(cx, |setting| setting.rodio_audio).unwrap_or_default();
|
||||
let capture_task = if rodio_pipeline {
|
||||
info!("Using experimental.rodio_audio audio pipeline");
|
||||
let voip_parts = audio::VoipParts::new(cx)?;
|
||||
// Audio needs to run real-time and should never be paused. That is why we are using a
|
||||
// normal std::thread and not a background task
|
||||
thread::spawn(move || {
|
||||
// microphone is non send on mac
|
||||
let microphone = audio::Audio::open_microphone(voip_parts)?;
|
||||
send_to_livekit(frame_tx, microphone);
|
||||
Ok::<(), anyhow::Error>(())
|
||||
});
|
||||
Task::ready(Ok(()))
|
||||
} else {
|
||||
self.executor.spawn(async move {
|
||||
Self::capture_input(apm, frame_tx, SAMPLE_RATE.get(), CHANNEL_COUNT.get().into())
|
||||
.await
|
||||
})
|
||||
};
|
||||
let capture_task = self.executor.spawn(async move {
|
||||
Self::capture_input(apm, frame_tx, SAMPLE_RATE, NUM_CHANNELS).await
|
||||
});
|
||||
|
||||
let on_drop = util::defer(|| {
|
||||
drop(transmit_task);
|
||||
@@ -378,36 +346,6 @@ impl AudioStack {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct Speaker {
|
||||
name: String,
|
||||
is_staff: bool,
|
||||
}
|
||||
|
||||
fn send_to_livekit(frame_tx: UnboundedSender<AudioFrame<'static>>, mut microphone: impl Source) {
|
||||
use cpal::Sample;
|
||||
loop {
|
||||
let sampled: Vec<_> = microphone
|
||||
.by_ref()
|
||||
.take(audio::BUFFER_SIZE)
|
||||
.map(|s| s.to_sample())
|
||||
.collect();
|
||||
|
||||
if frame_tx
|
||||
.unbounded_send(AudioFrame {
|
||||
sample_rate: SAMPLE_RATE.get(),
|
||||
num_channels: CHANNEL_COUNT.get() as u32,
|
||||
samples_per_channel: sampled.len() as u32 / CHANNEL_COUNT.get() as u32,
|
||||
data: Cow::Owned(sampled),
|
||||
})
|
||||
.is_err()
|
||||
{
|
||||
// must rx has dropped or is not consuming
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
use super::LocalVideoTrack;
|
||||
|
||||
pub enum AudioStream {
|
||||
|
||||
@@ -1,21 +1,15 @@
|
||||
use std::num::NonZero;
|
||||
|
||||
use futures::StreamExt;
|
||||
use libwebrtc::{audio_stream::native::NativeAudioStream, prelude::AudioFrame};
|
||||
use livekit::track::RemoteAudioTrack;
|
||||
use rodio::{Source, buffer::SamplesBuffer, conversions::SampleTypeConverter, nz};
|
||||
use rodio::{Source, buffer::SamplesBuffer, conversions::SampleTypeConverter};
|
||||
|
||||
use audio::{CHANNEL_COUNT, SAMPLE_RATE};
|
||||
use crate::livekit_client::playback::{NUM_CHANNELS, SAMPLE_RATE};
|
||||
|
||||
fn frame_to_samplesbuffer(frame: AudioFrame) -> SamplesBuffer {
|
||||
let samples = frame.data.iter().copied();
|
||||
let samples = SampleTypeConverter::<_, _>::new(samples);
|
||||
let samples: Vec<f32> = samples.collect();
|
||||
SamplesBuffer::new(
|
||||
nz!(2), // frame always has two channels
|
||||
NonZero::new(frame.sample_rate).expect("audio frame sample rate is nonzero"),
|
||||
samples,
|
||||
)
|
||||
SamplesBuffer::new(frame.num_channels as u16, frame.sample_rate, samples)
|
||||
}
|
||||
|
||||
pub struct LiveKitStream {
|
||||
@@ -26,11 +20,8 @@ pub struct LiveKitStream {
|
||||
|
||||
impl LiveKitStream {
|
||||
pub fn new(executor: &gpui::BackgroundExecutor, track: &RemoteAudioTrack) -> Self {
|
||||
let mut stream = NativeAudioStream::new(
|
||||
track.rtc_track(),
|
||||
SAMPLE_RATE.get() as i32,
|
||||
CHANNEL_COUNT.get().into(),
|
||||
);
|
||||
let mut stream =
|
||||
NativeAudioStream::new(track.rtc_track(), SAMPLE_RATE as i32, NUM_CHANNELS as i32);
|
||||
let (queue_input, queue_output) = rodio::queue::queue(true);
|
||||
// spawn rtc stream
|
||||
let receiver_task = executor.spawn({
|
||||
@@ -63,17 +54,11 @@ impl Source for LiveKitStream {
|
||||
}
|
||||
|
||||
fn channels(&self) -> rodio::ChannelCount {
|
||||
// This must be hardcoded because the playback source assumes constant
|
||||
// sample rate and channel count. The queue upon which this is build
|
||||
// will however report different counts and rates. Even though we put in
|
||||
// only items with our (constant) CHANNEL_COUNT & SAMPLE_RATE this will
|
||||
// play silence on one channel and at 44100 which is not what our
|
||||
// constants are.
|
||||
CHANNEL_COUNT
|
||||
self.inner.channels()
|
||||
}
|
||||
|
||||
fn sample_rate(&self) -> rodio::SampleRate {
|
||||
SAMPLE_RATE // see comment on channels
|
||||
self.inner.sample_rate()
|
||||
}
|
||||
|
||||
fn total_duration(&self) -> Option<std::time::Duration> {
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use std::{
|
||||
env,
|
||||
num::NonZero,
|
||||
path::{Path, PathBuf},
|
||||
sync::{Arc, Mutex},
|
||||
time::Duration,
|
||||
@@ -84,12 +83,8 @@ fn write_out(
|
||||
.expect("Stream has ended, callback cant hold the lock"),
|
||||
);
|
||||
let samples: Vec<f32> = SampleTypeConverter::<_, f32>::new(samples.into_iter()).collect();
|
||||
let mut samples = SamplesBuffer::new(
|
||||
NonZero::new(config.channels()).expect("config channel is never zero"),
|
||||
NonZero::new(config.sample_rate().0).expect("config sample_rate is never zero"),
|
||||
samples,
|
||||
);
|
||||
match rodio::wav_to_file(&mut samples, path) {
|
||||
let mut samples = SamplesBuffer::new(config.channels(), config.sample_rate().0, samples);
|
||||
match rodio::output_to_wav(&mut samples, path) {
|
||||
Ok(_) => Ok(()),
|
||||
Err(e) => Err(anyhow::anyhow!("Failed to write wav file: {}", e)),
|
||||
}
|
||||
|
||||
@@ -728,8 +728,6 @@ impl Room {
|
||||
|
||||
pub async fn publish_local_microphone_track(
|
||||
&self,
|
||||
_track_name: String,
|
||||
_is_staff: bool,
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<(LocalTrackPublication, AudioStream)> {
|
||||
self.local_participant().publish_microphone_track(cx).await
|
||||
|
||||
@@ -166,6 +166,12 @@ impl<'a> From<&'a str> for LanguageServerName {
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq<str> for LanguageServerName {
|
||||
fn eq(&self, other: &str) -> bool {
|
||||
self.0 == other
|
||||
}
|
||||
}
|
||||
|
||||
/// Handle to a language server RPC activity subscription.
|
||||
pub enum Subscription {
|
||||
Notification {
|
||||
|
||||
@@ -69,6 +69,7 @@ pub struct MarkdownStyle {
|
||||
pub heading_level_styles: Option<HeadingLevelStyles>,
|
||||
pub table_overflow_x_scroll: bool,
|
||||
pub height_is_multiple_of_line_height: bool,
|
||||
pub prevent_mouse_interaction: bool,
|
||||
}
|
||||
|
||||
impl Default for MarkdownStyle {
|
||||
@@ -89,6 +90,7 @@ impl Default for MarkdownStyle {
|
||||
heading_level_styles: None,
|
||||
table_overflow_x_scroll: false,
|
||||
height_is_multiple_of_line_height: false,
|
||||
prevent_mouse_interaction: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -575,16 +577,22 @@ impl MarkdownElement {
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) {
|
||||
if self.style.prevent_mouse_interaction {
|
||||
return;
|
||||
}
|
||||
|
||||
let is_hovering_link = hitbox.is_hovered(window)
|
||||
&& !self.markdown.read(cx).selection.pending
|
||||
&& rendered_text
|
||||
.link_for_position(window.mouse_position())
|
||||
.is_some();
|
||||
|
||||
if is_hovering_link {
|
||||
window.set_cursor_style(CursorStyle::PointingHand, hitbox);
|
||||
} else {
|
||||
window.set_cursor_style(CursorStyle::IBeam, hitbox);
|
||||
if !self.style.prevent_mouse_interaction {
|
||||
if is_hovering_link {
|
||||
window.set_cursor_style(CursorStyle::PointingHand, hitbox);
|
||||
} else {
|
||||
window.set_cursor_style(CursorStyle::IBeam, hitbox);
|
||||
}
|
||||
}
|
||||
|
||||
let on_open_url = self.on_url_click.take();
|
||||
|
||||
@@ -449,28 +449,28 @@ impl FontPickerDelegate {
|
||||
) -> Self {
|
||||
let font_family_cache = FontFamilyCache::global(cx);
|
||||
|
||||
let fonts: Vec<SharedString> = font_family_cache
|
||||
.list_font_families(cx)
|
||||
.into_iter()
|
||||
.collect();
|
||||
|
||||
let fonts = font_family_cache
|
||||
.try_list_font_families()
|
||||
.unwrap_or_else(|| vec![current_font.clone()]);
|
||||
let selected_index = fonts
|
||||
.iter()
|
||||
.position(|font| *font == current_font)
|
||||
.unwrap_or(0);
|
||||
|
||||
let filtered_fonts = fonts
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, font)| StringMatch {
|
||||
candidate_id: index,
|
||||
string: font.to_string(),
|
||||
positions: Vec::new(),
|
||||
score: 0.0,
|
||||
})
|
||||
.collect();
|
||||
|
||||
Self {
|
||||
fonts: fonts.clone(),
|
||||
filtered_fonts: fonts
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(index, font)| StringMatch {
|
||||
candidate_id: index,
|
||||
string: font.to_string(),
|
||||
positions: Vec::new(),
|
||||
score: 0.0,
|
||||
})
|
||||
.collect(),
|
||||
fonts,
|
||||
filtered_fonts,
|
||||
selected_index,
|
||||
current_font,
|
||||
on_font_changed: Arc::new(on_font_changed),
|
||||
|
||||
@@ -242,12 +242,25 @@ struct Onboarding {
|
||||
|
||||
impl Onboarding {
|
||||
fn new(workspace: &Workspace, cx: &mut App) -> Entity<Self> {
|
||||
cx.new(|cx| Self {
|
||||
workspace: workspace.weak_handle(),
|
||||
focus_handle: cx.focus_handle(),
|
||||
selected_page: SelectedPage::Basics,
|
||||
user_store: workspace.user_store().clone(),
|
||||
_settings_subscription: cx.observe_global::<SettingsStore>(move |_, cx| cx.notify()),
|
||||
let font_family_cache = theme::FontFamilyCache::global(cx);
|
||||
|
||||
cx.new(|cx| {
|
||||
cx.spawn(async move |this, cx| {
|
||||
font_family_cache.prefetch(cx).await;
|
||||
this.update(cx, |_, cx| {
|
||||
cx.notify();
|
||||
})
|
||||
})
|
||||
.detach();
|
||||
|
||||
Self {
|
||||
workspace: workspace.weak_handle(),
|
||||
focus_handle: cx.focus_handle(),
|
||||
selected_page: SelectedPage::Basics,
|
||||
user_store: workspace.user_store().clone(),
|
||||
_settings_subscription: cx
|
||||
.observe_global::<SettingsStore>(move |_, cx| cx.notify()),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -67,6 +67,7 @@ regex.workspace = true
|
||||
remote.workspace = true
|
||||
rpc.workspace = true
|
||||
schemars.workspace = true
|
||||
semver.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
@@ -85,6 +86,7 @@ text.workspace = true
|
||||
toml.workspace = true
|
||||
url.workspace = true
|
||||
util.workspace = true
|
||||
watch.workspace = true
|
||||
which.workspace = true
|
||||
worktree.workspace = true
|
||||
zlog.workspace = true
|
||||
|
||||
1091
crates/project/src/agent_server_store.rs
Normal file
1091
crates/project/src/agent_server_store.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -86,7 +86,6 @@ use node_runtime::read_package_installed_version;
|
||||
use parking_lot::Mutex;
|
||||
use postage::{mpsc, sink::Sink, stream::Stream, watch};
|
||||
use rand::prelude::*;
|
||||
|
||||
use rpc::{
|
||||
AnyProtoClient,
|
||||
proto::{FromProto, LspRequestId, LspRequestMessage as _, ToProto},
|
||||
@@ -7124,6 +7123,36 @@ impl LspStore {
|
||||
summary
|
||||
}
|
||||
|
||||
/// Returns the diagnostic summary for a specific project path.
|
||||
pub fn diagnostic_summary_for_path(
|
||||
&self,
|
||||
project_path: &ProjectPath,
|
||||
_: &App,
|
||||
) -> DiagnosticSummary {
|
||||
if let Some(summaries) = self
|
||||
.diagnostic_summaries
|
||||
.get(&project_path.worktree_id)
|
||||
.and_then(|map| map.get(&project_path.path))
|
||||
{
|
||||
let (error_count, warning_count) = summaries.iter().fold(
|
||||
(0, 0),
|
||||
|(error_count, warning_count), (_language_server_id, summary)| {
|
||||
(
|
||||
error_count + summary.error_count,
|
||||
warning_count + summary.warning_count,
|
||||
)
|
||||
},
|
||||
);
|
||||
|
||||
DiagnosticSummary {
|
||||
error_count,
|
||||
warning_count,
|
||||
}
|
||||
} else {
|
||||
DiagnosticSummary::default()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn diagnostic_summaries<'a>(
|
||||
&'a self,
|
||||
include_ignored: bool,
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
pub mod agent_server_store;
|
||||
pub mod buffer_store;
|
||||
mod color_extractor;
|
||||
pub mod connection_manager;
|
||||
@@ -34,7 +35,11 @@ mod yarn;
|
||||
|
||||
use dap::inline_value::{InlineValueLocation, VariableLookupKind, VariableScope};
|
||||
|
||||
use crate::{git_store::GitStore, lsp_store::log_store::LogKind};
|
||||
use crate::{
|
||||
agent_server_store::{AgentServerStore, AllAgentServersSettings},
|
||||
git_store::GitStore,
|
||||
lsp_store::log_store::LogKind,
|
||||
};
|
||||
pub use git_store::{
|
||||
ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate,
|
||||
git_traversal::{ChildEntriesGitIter, GitEntry, GitEntryRef, GitTraversal},
|
||||
@@ -179,6 +184,7 @@ pub struct Project {
|
||||
buffer_ordered_messages_tx: mpsc::UnboundedSender<BufferOrderedMessage>,
|
||||
languages: Arc<LanguageRegistry>,
|
||||
dap_store: Entity<DapStore>,
|
||||
agent_server_store: Entity<AgentServerStore>,
|
||||
|
||||
breakpoint_store: Entity<BreakpointStore>,
|
||||
collab_client: Arc<client::Client>,
|
||||
@@ -1019,6 +1025,7 @@ impl Project {
|
||||
WorktreeSettings::register(cx);
|
||||
ProjectSettings::register(cx);
|
||||
DisableAiSettings::register(cx);
|
||||
AllAgentServersSettings::register(cx);
|
||||
}
|
||||
|
||||
pub fn init(client: &Arc<Client>, cx: &mut App) {
|
||||
@@ -1174,6 +1181,10 @@ impl Project {
|
||||
)
|
||||
});
|
||||
|
||||
let agent_server_store = cx.new(|cx| {
|
||||
AgentServerStore::local(node.clone(), fs.clone(), environment.clone(), cx)
|
||||
});
|
||||
|
||||
cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach();
|
||||
|
||||
Self {
|
||||
@@ -1200,6 +1211,7 @@ impl Project {
|
||||
remote_client: None,
|
||||
breakpoint_store,
|
||||
dap_store,
|
||||
agent_server_store,
|
||||
|
||||
buffers_needing_diff: Default::default(),
|
||||
git_diff_debouncer: DebouncedDelay::new(),
|
||||
@@ -1338,6 +1350,9 @@ impl Project {
|
||||
)
|
||||
});
|
||||
|
||||
let agent_server_store =
|
||||
cx.new(|cx| AgentServerStore::remote(REMOTE_SERVER_PROJECT_ID, remote.clone(), cx));
|
||||
|
||||
cx.subscribe(&remote, Self::on_remote_client_event).detach();
|
||||
|
||||
let this = Self {
|
||||
@@ -1353,6 +1368,7 @@ impl Project {
|
||||
join_project_response_message_id: 0,
|
||||
client_state: ProjectClientState::Local,
|
||||
git_store,
|
||||
agent_server_store,
|
||||
client_subscriptions: Vec::new(),
|
||||
_subscriptions: vec![
|
||||
cx.on_release(Self::release),
|
||||
@@ -1407,6 +1423,7 @@ impl Project {
|
||||
remote_proto.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &this.dap_store);
|
||||
remote_proto.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &this.settings_observer);
|
||||
remote_proto.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &this.git_store);
|
||||
remote_proto.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &this.agent_server_store);
|
||||
|
||||
remote_proto.add_entity_message_handler(Self::handle_create_buffer_for_peer);
|
||||
remote_proto.add_entity_message_handler(Self::handle_update_worktree);
|
||||
@@ -1422,6 +1439,7 @@ impl Project {
|
||||
ToolchainStore::init(&remote_proto);
|
||||
DapStore::init(&remote_proto, cx);
|
||||
GitStore::init(&remote_proto);
|
||||
AgentServerStore::init_remote(&remote_proto);
|
||||
|
||||
this
|
||||
})
|
||||
@@ -1564,6 +1582,8 @@ impl Project {
|
||||
)
|
||||
})?;
|
||||
|
||||
let agent_server_store = cx.new(|cx| AgentServerStore::collab(cx))?;
|
||||
|
||||
let project = cx.new(|cx| {
|
||||
let replica_id = response.payload.replica_id as ReplicaId;
|
||||
|
||||
@@ -1624,6 +1644,7 @@ impl Project {
|
||||
breakpoint_store,
|
||||
dap_store: dap_store.clone(),
|
||||
git_store: git_store.clone(),
|
||||
agent_server_store,
|
||||
buffers_needing_diff: Default::default(),
|
||||
git_diff_debouncer: DebouncedDelay::new(),
|
||||
terminals: Terminals {
|
||||
@@ -4400,6 +4421,13 @@ impl Project {
|
||||
.diagnostic_summary(include_ignored, cx)
|
||||
}
|
||||
|
||||
/// Returns a summary of the diagnostics for the provided project path only.
|
||||
pub fn diagnostic_summary_for_path(&self, path: &ProjectPath, cx: &App) -> DiagnosticSummary {
|
||||
self.lsp_store
|
||||
.read(cx)
|
||||
.diagnostic_summary_for_path(path, cx)
|
||||
}
|
||||
|
||||
pub fn diagnostic_summaries<'a>(
|
||||
&'a self,
|
||||
include_ignored: bool,
|
||||
@@ -4490,6 +4518,23 @@ impl Project {
|
||||
None
|
||||
}
|
||||
|
||||
/// If there's only one visible worktree, returns the given worktree-relative path with no prefix.
|
||||
///
|
||||
/// Otherwise, returns the full path for the project path (obtained by prefixing the worktree-relative path with the name of the worktree).
|
||||
pub fn short_full_path_for_project_path(
|
||||
&self,
|
||||
project_path: &ProjectPath,
|
||||
cx: &App,
|
||||
) -> Option<PathBuf> {
|
||||
if self.visible_worktrees(cx).take(2).count() < 2 {
|
||||
return Some(project_path.path.to_path_buf());
|
||||
}
|
||||
self.worktree_for_id(project_path.worktree_id, cx)
|
||||
.and_then(|worktree| {
|
||||
Some(Path::new(worktree.read(cx).abs_path().file_name()?).join(&project_path.path))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn project_path_for_absolute_path(&self, abs_path: &Path, cx: &App) -> Option<ProjectPath> {
|
||||
self.find_worktree(abs_path, cx)
|
||||
.map(|(worktree, relative_path)| ProjectPath {
|
||||
@@ -5175,6 +5220,10 @@ impl Project {
|
||||
&self.git_store
|
||||
}
|
||||
|
||||
pub fn agent_server_store(&self) -> &Entity<AgentServerStore> {
|
||||
&self.agent_server_store
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
fn git_scans_complete(&self, cx: &Context<Self>) -> Task<()> {
|
||||
cx.spawn(async move |this, cx| {
|
||||
|
||||
@@ -2,6 +2,7 @@ syntax = "proto3";
|
||||
package zed.messages;
|
||||
|
||||
import "buffer.proto";
|
||||
import "task.proto";
|
||||
|
||||
message Context {
|
||||
repeated ContextOperation operations = 1;
|
||||
@@ -164,3 +165,35 @@ enum LanguageModelRole {
|
||||
LanguageModelSystem = 2;
|
||||
reserved 3;
|
||||
}
|
||||
|
||||
message GetAgentServerCommand {
|
||||
uint64 project_id = 1;
|
||||
string name = 2;
|
||||
optional string root_dir = 3;
|
||||
}
|
||||
|
||||
message AgentServerCommand {
|
||||
string path = 1;
|
||||
repeated string args = 2;
|
||||
map<string, string> env = 3;
|
||||
string root_dir = 4;
|
||||
|
||||
optional SpawnInTerminal login = 5;
|
||||
}
|
||||
|
||||
message ExternalAgentsUpdated {
|
||||
uint64 project_id = 1;
|
||||
repeated string names = 2;
|
||||
}
|
||||
|
||||
message ExternalAgentLoadingStatusUpdated {
|
||||
uint64 project_id = 1;
|
||||
string name = 2;
|
||||
string status = 3;
|
||||
}
|
||||
|
||||
message NewExternalAgentVersionAvailable {
|
||||
uint64 project_id = 1;
|
||||
string name = 2;
|
||||
string version = 3;
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ package zed.messages;
|
||||
|
||||
import "core.proto";
|
||||
import "buffer.proto";
|
||||
import "task.proto";
|
||||
|
||||
enum BreakpointState {
|
||||
Enabled = 0;
|
||||
@@ -533,14 +534,6 @@ message DebugScenario {
|
||||
optional string configuration = 7;
|
||||
}
|
||||
|
||||
message SpawnInTerminal {
|
||||
string label = 1;
|
||||
optional string command = 2;
|
||||
repeated string args = 3;
|
||||
map<string, string> env = 4;
|
||||
optional string cwd = 5;
|
||||
}
|
||||
|
||||
message LogToDebugConsole {
|
||||
uint64 project_id = 1;
|
||||
uint64 session_id = 2;
|
||||
|
||||
@@ -40,3 +40,11 @@ enum HideStrategy {
|
||||
HideNever = 1;
|
||||
HideOnSuccess = 2;
|
||||
}
|
||||
|
||||
message SpawnInTerminal {
|
||||
string label = 1;
|
||||
optional string command = 2;
|
||||
repeated string args = 3;
|
||||
map<string, string> env = 4;
|
||||
optional string cwd = 5;
|
||||
}
|
||||
|
||||
@@ -405,7 +405,15 @@ message Envelope {
|
||||
GetProcessesResponse get_processes_response = 370;
|
||||
|
||||
ResolveToolchain resolve_toolchain = 371;
|
||||
ResolveToolchainResponse resolve_toolchain_response = 372; // current max
|
||||
ResolveToolchainResponse resolve_toolchain_response = 372;
|
||||
|
||||
GetAgentServerCommand get_agent_server_command = 373;
|
||||
AgentServerCommand agent_server_command = 374;
|
||||
|
||||
ExternalAgentsUpdated external_agents_updated = 375;
|
||||
|
||||
ExternalAgentLoadingStatusUpdated external_agent_loading_status_updated = 376;
|
||||
NewExternalAgentVersionAvailable new_external_agent_version_available = 377; // current max
|
||||
}
|
||||
|
||||
reserved 87 to 88;
|
||||
|
||||
@@ -319,6 +319,11 @@ messages!(
|
||||
(GitClone, Background),
|
||||
(GitCloneResponse, Background),
|
||||
(ToggleLspLogs, Background),
|
||||
(GetAgentServerCommand, Background),
|
||||
(AgentServerCommand, Background),
|
||||
(ExternalAgentsUpdated, Background),
|
||||
(ExternalAgentLoadingStatusUpdated, Background),
|
||||
(NewExternalAgentVersionAvailable, Background),
|
||||
);
|
||||
|
||||
request_messages!(
|
||||
@@ -491,6 +496,7 @@ request_messages!(
|
||||
(GitClone, GitCloneResponse),
|
||||
(ToggleLspLogs, Ack),
|
||||
(GetProcesses, GetProcessesResponse),
|
||||
(GetAgentServerCommand, AgentServerCommand)
|
||||
);
|
||||
|
||||
lsp_messages!(
|
||||
@@ -644,7 +650,11 @@ entity_messages!(
|
||||
GetDocumentDiagnostics,
|
||||
PullWorkspaceDiagnostics,
|
||||
GetDefaultBranch,
|
||||
GitClone
|
||||
GitClone,
|
||||
GetAgentServerCommand,
|
||||
ExternalAgentsUpdated,
|
||||
ExternalAgentLoadingStatusUpdated,
|
||||
NewExternalAgentVersionAvailable,
|
||||
);
|
||||
|
||||
entity_messages!(
|
||||
|
||||
@@ -12,6 +12,7 @@ use node_runtime::NodeRuntime;
|
||||
use project::{
|
||||
LspStore, LspStoreEvent, ManifestTree, PrettierStore, ProjectEnvironment, ProjectPath,
|
||||
ToolchainStore, WorktreeId,
|
||||
agent_server_store::AgentServerStore,
|
||||
buffer_store::{BufferStore, BufferStoreEvent},
|
||||
debugger::{breakpoint_store::BreakpointStore, dap_store::DapStore},
|
||||
git_store::GitStore,
|
||||
@@ -44,6 +45,7 @@ pub struct HeadlessProject {
|
||||
pub lsp_store: Entity<LspStore>,
|
||||
pub task_store: Entity<TaskStore>,
|
||||
pub dap_store: Entity<DapStore>,
|
||||
pub agent_server_store: Entity<AgentServerStore>,
|
||||
pub settings_observer: Entity<SettingsObserver>,
|
||||
pub next_entry_id: Arc<AtomicUsize>,
|
||||
pub languages: Arc<LanguageRegistry>,
|
||||
@@ -182,7 +184,7 @@ impl HeadlessProject {
|
||||
.as_local_store()
|
||||
.expect("Toolchain store to be local")
|
||||
.clone(),
|
||||
environment,
|
||||
environment.clone(),
|
||||
manifest_tree,
|
||||
languages.clone(),
|
||||
http_client.clone(),
|
||||
@@ -193,6 +195,13 @@ impl HeadlessProject {
|
||||
lsp_store
|
||||
});
|
||||
|
||||
let agent_server_store = cx.new(|cx| {
|
||||
let mut agent_server_store =
|
||||
AgentServerStore::local(node_runtime.clone(), fs.clone(), environment, cx);
|
||||
agent_server_store.shared(REMOTE_SERVER_PROJECT_ID, session.clone());
|
||||
agent_server_store
|
||||
});
|
||||
|
||||
cx.subscribe(&lsp_store, Self::on_lsp_store_event).detach();
|
||||
language_extension::init(
|
||||
language_extension::LspAccess::ViaLspStore(lsp_store.clone()),
|
||||
@@ -226,6 +235,7 @@ impl HeadlessProject {
|
||||
session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &dap_store);
|
||||
session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &settings_observer);
|
||||
session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &git_store);
|
||||
session.subscribe_to_entity(REMOTE_SERVER_PROJECT_ID, &agent_server_store);
|
||||
|
||||
session.add_request_handler(cx.weak_entity(), Self::handle_list_remote_directory);
|
||||
session.add_request_handler(cx.weak_entity(), Self::handle_get_path_metadata);
|
||||
@@ -264,6 +274,7 @@ impl HeadlessProject {
|
||||
// todo(debugger): Re init breakpoint store when we set it up for collab
|
||||
// BreakpointStore::init(&client);
|
||||
GitStore::init(&session);
|
||||
AgentServerStore::init_headless(&session);
|
||||
|
||||
HeadlessProject {
|
||||
next_entry_id: Default::default(),
|
||||
@@ -275,6 +286,7 @@ impl HeadlessProject {
|
||||
lsp_store,
|
||||
task_store,
|
||||
dap_store,
|
||||
agent_server_store,
|
||||
languages,
|
||||
extensions,
|
||||
git_store,
|
||||
|
||||
@@ -1100,6 +1100,24 @@ mod tests {
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
check_keymap_update(
|
||||
"[]",
|
||||
KeybindUpdateOperation::add(KeybindUpdateTarget {
|
||||
keystrokes: &parse_keystrokes("\\ a"),
|
||||
action_name: "zed::SomeAction",
|
||||
context: None,
|
||||
action_arguments: None,
|
||||
}),
|
||||
r#"[
|
||||
{
|
||||
"bindings": {
|
||||
"\\ a": "zed::SomeAction"
|
||||
}
|
||||
}
|
||||
]"#
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
check_keymap_update(
|
||||
"[]",
|
||||
KeybindUpdateOperation::add(KeybindUpdateTarget {
|
||||
@@ -1302,6 +1320,79 @@ mod tests {
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
check_keymap_update(
|
||||
r#"[
|
||||
{
|
||||
"bindings": {
|
||||
"\\ a": "zed::SomeAction"
|
||||
}
|
||||
}
|
||||
]"#
|
||||
.unindent(),
|
||||
KeybindUpdateOperation::Replace {
|
||||
target: KeybindUpdateTarget {
|
||||
keystrokes: &parse_keystrokes("\\ a"),
|
||||
action_name: "zed::SomeAction",
|
||||
context: None,
|
||||
action_arguments: None,
|
||||
},
|
||||
source: KeybindUpdateTarget {
|
||||
keystrokes: &parse_keystrokes("\\ b"),
|
||||
action_name: "zed::SomeOtherAction",
|
||||
context: None,
|
||||
action_arguments: Some(r#"{"foo": "bar"}"#),
|
||||
},
|
||||
target_keybind_source: KeybindSource::User,
|
||||
},
|
||||
r#"[
|
||||
{
|
||||
"bindings": {
|
||||
"\\ b": [
|
||||
"zed::SomeOtherAction",
|
||||
{
|
||||
"foo": "bar"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
]"#
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
check_keymap_update(
|
||||
r#"[
|
||||
{
|
||||
"bindings": {
|
||||
"\\ a": "zed::SomeAction"
|
||||
}
|
||||
}
|
||||
]"#
|
||||
.unindent(),
|
||||
KeybindUpdateOperation::Replace {
|
||||
target: KeybindUpdateTarget {
|
||||
keystrokes: &parse_keystrokes("\\ a"),
|
||||
action_name: "zed::SomeAction",
|
||||
context: None,
|
||||
action_arguments: None,
|
||||
},
|
||||
source: KeybindUpdateTarget {
|
||||
keystrokes: &parse_keystrokes("\\ a"),
|
||||
action_name: "zed::SomeAction",
|
||||
context: None,
|
||||
action_arguments: None,
|
||||
},
|
||||
target_keybind_source: KeybindSource::User,
|
||||
},
|
||||
r#"[
|
||||
{
|
||||
"bindings": {
|
||||
"\\ a": "zed::SomeAction"
|
||||
}
|
||||
}
|
||||
]"#
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
check_keymap_update(
|
||||
r#"[
|
||||
{
|
||||
@@ -1494,6 +1585,37 @@ mod tests {
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
check_keymap_update(
|
||||
r#"[
|
||||
{
|
||||
"context": "SomeContext",
|
||||
"bindings": {
|
||||
"\\ a": "foo::bar",
|
||||
"c": "foo::baz",
|
||||
}
|
||||
},
|
||||
]"#
|
||||
.unindent(),
|
||||
KeybindUpdateOperation::Remove {
|
||||
target: KeybindUpdateTarget {
|
||||
context: Some("SomeContext"),
|
||||
keystrokes: &parse_keystrokes("\\ a"),
|
||||
action_name: "foo::bar",
|
||||
action_arguments: None,
|
||||
},
|
||||
target_keybind_source: KeybindSource::User,
|
||||
},
|
||||
r#"[
|
||||
{
|
||||
"context": "SomeContext",
|
||||
"bindings": {
|
||||
"c": "foo::baz",
|
||||
}
|
||||
},
|
||||
]"#
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
check_keymap_update(
|
||||
r#"[
|
||||
{
|
||||
|
||||
@@ -140,8 +140,10 @@ pub fn replace_value_in_json_text<T: AsRef<str>>(
|
||||
|
||||
let found_key = text
|
||||
.get(key_range.clone())
|
||||
.map(|key_text| {
|
||||
depth < key_path.len() && key_text == format!("\"{}\"", key_path[depth].as_ref())
|
||||
.and_then(|key_text| {
|
||||
serde_json::to_string(key_path[depth].as_ref())
|
||||
.ok()
|
||||
.map(|key_path| depth < key_path.len() && key_text == key_path)
|
||||
})
|
||||
.unwrap_or(false);
|
||||
|
||||
@@ -163,8 +165,8 @@ pub fn replace_value_in_json_text<T: AsRef<str>>(
|
||||
if depth == key_path.len() {
|
||||
if let Some(new_value) = new_value {
|
||||
let new_val = to_pretty_json(new_value, tab_size, tab_size * depth);
|
||||
if let Some(replace_key) = replace_key {
|
||||
let new_key = format!("\"{}\": ", replace_key);
|
||||
if let Some(replace_key) = replace_key.and_then(|str| serde_json::to_string(str).ok()) {
|
||||
let new_key = format!("{}: ", replace_key);
|
||||
if let Some(key_start) = text[..existing_value_range.start].rfind('"') {
|
||||
if let Some(prev_key_start) = text[..key_start].rfind('"') {
|
||||
existing_value_range.start = prev_key_start;
|
||||
|
||||
@@ -78,6 +78,7 @@ impl SettingsValue<serde_json::Value> {
|
||||
let fs = <dyn Fs>::global(cx);
|
||||
|
||||
let rx = settings_store.update_settings_file_at_path(fs.clone(), path.as_slice(), value);
|
||||
|
||||
let path = path.clone();
|
||||
cx.background_spawn(async move {
|
||||
rx.await?
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -16,7 +16,7 @@ struct FontFamilyCacheState {
|
||||
/// so we do it once and then use the cached values each render.
|
||||
#[derive(Default)]
|
||||
pub struct FontFamilyCache {
|
||||
state: RwLock<FontFamilyCacheState>,
|
||||
state: Arc<RwLock<FontFamilyCacheState>>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -52,4 +52,44 @@ impl FontFamilyCache {
|
||||
|
||||
lock.font_families.clone()
|
||||
}
|
||||
|
||||
/// Returns the list of font families if they have been loaded
|
||||
pub fn try_list_font_families(&self) -> Option<Vec<SharedString>> {
|
||||
self.state
|
||||
.try_read()
|
||||
.filter(|state| state.loaded_at.is_some())
|
||||
.map(|state| state.font_families.clone())
|
||||
}
|
||||
|
||||
/// Prefetch all font names in the background
|
||||
pub async fn prefetch(&self, cx: &gpui::AsyncApp) {
|
||||
if self
|
||||
.state
|
||||
.try_read()
|
||||
.is_none_or(|state| state.loaded_at.is_some())
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
let Ok(text_system) = cx.update(|cx| App::text_system(cx).clone()) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let state = self.state.clone();
|
||||
|
||||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
// We take this lock in the background executor to ensure that synchronous calls to `list_font_families` are blocked while we are prefetching,
|
||||
// while not blocking the main thread and risking deadlocks
|
||||
let mut lock = state.write();
|
||||
let all_font_names = text_system
|
||||
.all_font_names()
|
||||
.into_iter()
|
||||
.map(SharedString::from)
|
||||
.collect();
|
||||
lock.font_families = all_font_names;
|
||||
lock.loaded_at = Some(Instant::now());
|
||||
})
|
||||
.await;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,7 @@
|
||||
mod boundary;
|
||||
mod object;
|
||||
mod select;
|
||||
|
||||
use editor::display_map::DisplaySnapshot;
|
||||
use editor::{
|
||||
DisplayPoint, Editor, HideMouseCursorOrigin, SelectionEffects, ToOffset, ToPoint, movement,
|
||||
|
||||
740
crates/vim/src/helix/boundary.rs
Normal file
740
crates/vim/src/helix/boundary.rs
Normal file
@@ -0,0 +1,740 @@
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
ops::{Deref, DerefMut, Range},
|
||||
};
|
||||
|
||||
use editor::{
|
||||
DisplayPoint,
|
||||
display_map::{DisplaySnapshot, ToDisplayPoint},
|
||||
movement,
|
||||
};
|
||||
use language::{CharClassifier, CharKind};
|
||||
use text::Bias;
|
||||
|
||||
use crate::helix::object::HelixTextObject;
|
||||
|
||||
/// Text objects (after helix definition) that can easily be
|
||||
/// found by reading a buffer and comparing two neighboring chars
|
||||
/// until a start / end is found
|
||||
trait BoundedObject {
|
||||
/// The next start since `from` (inclusive).
|
||||
/// If outer is true it is the start of "a" object (m a) rather than "inner" object (m i).
|
||||
fn next_start(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset>;
|
||||
/// The next end since `from` (inclusive).
|
||||
/// If outer is true it is the end of "a" object (m a) rather than "inner" object (m i).
|
||||
fn next_end(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset>;
|
||||
/// The previous start since `from` (inclusive).
|
||||
/// If outer is true it is the start of "a" object (m a) rather than "inner" object (m i).
|
||||
fn previous_start(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset>;
|
||||
/// The previous end since `from` (inclusive).
|
||||
/// If outer is true it is the end of "a" object (m a) rather than "inner" object (m i).
|
||||
fn previous_end(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset>;
|
||||
|
||||
/// Whether the range inside the object can be zero characters wide.
|
||||
/// If so, the trait assumes that these ranges can't be directly adjacent to each other.
|
||||
fn inner_range_can_be_zero_width(&self) -> bool;
|
||||
/// Whether the "ma" can exceed the "mi" range on both sides at the same time
|
||||
fn surround_on_both_sides(&self) -> bool;
|
||||
/// Whether the outer range of an object could overlap with the outer range of the neighboring
|
||||
/// object. If so, they can't be nested.
|
||||
fn ambiguous_outer(&self) -> bool;
|
||||
|
||||
fn can_be_zero_width(&self, around: bool) -> bool {
|
||||
if around {
|
||||
false
|
||||
} else {
|
||||
self.inner_range_can_be_zero_width()
|
||||
}
|
||||
}
|
||||
|
||||
/// Switches from an "mi" range to an "ma" one.
|
||||
/// Assumes the inner range is valid.
|
||||
fn around(&self, map: &DisplaySnapshot, inner_range: Range<Offset>) -> Range<Offset> {
|
||||
if self.surround_on_both_sides() {
|
||||
let start = self
|
||||
.previous_start(map, inner_range.start, true)
|
||||
.unwrap_or(inner_range.start);
|
||||
let end = self
|
||||
.next_end(map, inner_range.end, true)
|
||||
.unwrap_or(inner_range.end);
|
||||
|
||||
return start..end;
|
||||
}
|
||||
|
||||
let mut start = inner_range.start;
|
||||
let end = self
|
||||
.next_end(map, inner_range.end, true)
|
||||
.unwrap_or(inner_range.end);
|
||||
if end == inner_range.end {
|
||||
start = self
|
||||
.previous_start(map, inner_range.start, true)
|
||||
.unwrap_or(inner_range.start)
|
||||
}
|
||||
|
||||
start..end
|
||||
}
|
||||
/// Switches from an "ma" range to an "mi" one.
|
||||
/// Assumes the inner range is valid.
|
||||
fn inside(&self, map: &DisplaySnapshot, outer_range: Range<Offset>) -> Range<Offset> {
|
||||
let inner_start = self
|
||||
.next_start(map, outer_range.start, false)
|
||||
.unwrap_or_else(|| {
|
||||
log::warn!("The motion might not have found the text object correctly");
|
||||
outer_range.start
|
||||
});
|
||||
let inner_end = self
|
||||
.previous_end(map, outer_range.end, false)
|
||||
.unwrap_or_else(|| {
|
||||
log::warn!("The motion might not have found the text object correctly");
|
||||
outer_range.end
|
||||
});
|
||||
inner_start..inner_end
|
||||
}
|
||||
|
||||
/// The next end since `start` (inclusive) on the same nesting level.
|
||||
fn close_at_end(&self, start: Offset, map: &DisplaySnapshot, outer: bool) -> Option<Offset> {
|
||||
let mut end_search_start = if self.can_be_zero_width(outer) {
|
||||
start
|
||||
} else {
|
||||
start.next(map)?
|
||||
};
|
||||
let mut start_search_start = start.next(map)?;
|
||||
|
||||
loop {
|
||||
let next_end = self.next_end(map, end_search_start, outer)?;
|
||||
let maybe_next_start = self.next_start(map, start_search_start, outer);
|
||||
if let Some(next_start) = maybe_next_start
|
||||
&& (*next_start < *next_end
|
||||
|| *next_start == *next_end && self.can_be_zero_width(outer))
|
||||
&& !self.ambiguous_outer()
|
||||
{
|
||||
let closing = self.close_at_end(next_start, map, outer)?;
|
||||
end_search_start = closing.next(map)?;
|
||||
start_search_start = if self.can_be_zero_width(outer) {
|
||||
closing.next(map)?
|
||||
} else {
|
||||
closing
|
||||
};
|
||||
} else {
|
||||
return Some(next_end);
|
||||
}
|
||||
}
|
||||
}
|
||||
/// The previous start since `end` (inclusive) on the same nesting level.
|
||||
fn close_at_start(&self, end: Offset, map: &DisplaySnapshot, outer: bool) -> Option<Offset> {
|
||||
let mut start_search_end = if self.can_be_zero_width(outer) {
|
||||
end
|
||||
} else {
|
||||
end.previous(map)?
|
||||
};
|
||||
let mut end_search_end = end.previous(map)?;
|
||||
|
||||
loop {
|
||||
let previous_start = self.previous_start(map, start_search_end, outer)?;
|
||||
let maybe_previous_end = self.previous_end(map, end_search_end, outer);
|
||||
if let Some(previous_end) = maybe_previous_end
|
||||
&& (*previous_end > *previous_start
|
||||
|| *previous_end == *previous_start && self.can_be_zero_width(outer))
|
||||
&& !self.ambiguous_outer()
|
||||
{
|
||||
let closing = self.close_at_start(previous_end, map, outer)?;
|
||||
start_search_end = closing.previous(map)?;
|
||||
end_search_end = if self.can_be_zero_width(outer) {
|
||||
closing.previous(map)?
|
||||
} else {
|
||||
closing
|
||||
};
|
||||
} else {
|
||||
return Some(previous_start);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, Debug)]
|
||||
struct Offset(usize);
|
||||
impl Deref for Offset {
|
||||
type Target = usize;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
impl DerefMut for Offset {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
&mut self.0
|
||||
}
|
||||
}
|
||||
impl Offset {
|
||||
fn next(self, map: &DisplaySnapshot) -> Option<Self> {
|
||||
let next = Self(map.buffer_snapshot.clip_offset(*self + 1, Bias::Right));
|
||||
(*next > *self).then(|| next)
|
||||
}
|
||||
fn previous(self, map: &DisplaySnapshot) -> Option<Self> {
|
||||
if *self == 0 {
|
||||
return None;
|
||||
}
|
||||
Some(Self(map.buffer_snapshot.clip_offset(*self - 1, Bias::Left)))
|
||||
}
|
||||
fn range(
|
||||
start: (DisplayPoint, Bias),
|
||||
end: (DisplayPoint, Bias),
|
||||
map: &DisplaySnapshot,
|
||||
) -> Range<Self> {
|
||||
Self(start.0.to_offset(map, start.1))..Self(end.0.to_offset(map, end.1))
|
||||
}
|
||||
}
|
||||
|
||||
impl<B: BoundedObject> HelixTextObject for B {
|
||||
fn range(
|
||||
&self,
|
||||
map: &DisplaySnapshot,
|
||||
relative_to: Range<DisplayPoint>,
|
||||
around: bool,
|
||||
) -> Option<Range<DisplayPoint>> {
|
||||
let relative_to = Offset::range(
|
||||
(relative_to.start, Bias::Left),
|
||||
(relative_to.end, Bias::Left),
|
||||
map,
|
||||
);
|
||||
|
||||
relative_range(self, around, map, |find_outer| {
|
||||
let search_start = if self.can_be_zero_width(find_outer) {
|
||||
relative_to.end
|
||||
} else {
|
||||
// If the objects can be directly next to each other an object end the
|
||||
// cursor (relative_to) end would not count for close_at_end, so the search
|
||||
// needs to start one character to the left.
|
||||
relative_to.end.previous(map)?
|
||||
};
|
||||
let max_end = self.close_at_end(search_start, map, find_outer)?;
|
||||
let min_start = self.close_at_start(max_end, map, find_outer)?;
|
||||
|
||||
(*min_start <= *relative_to.start).then(|| min_start..max_end)
|
||||
})
|
||||
}
|
||||
|
||||
fn next_range(
|
||||
&self,
|
||||
map: &DisplaySnapshot,
|
||||
relative_to: Range<DisplayPoint>,
|
||||
around: bool,
|
||||
) -> Option<Range<DisplayPoint>> {
|
||||
let relative_to = Offset::range(
|
||||
(relative_to.start, Bias::Left),
|
||||
(relative_to.end, Bias::Left),
|
||||
map,
|
||||
);
|
||||
|
||||
relative_range(self, around, map, |find_outer| {
|
||||
let min_start = self.next_start(map, relative_to.end, find_outer)?;
|
||||
let max_end = self.close_at_end(min_start, map, find_outer)?;
|
||||
|
||||
Some(min_start..max_end)
|
||||
})
|
||||
}
|
||||
|
||||
fn previous_range(
|
||||
&self,
|
||||
map: &DisplaySnapshot,
|
||||
relative_to: Range<DisplayPoint>,
|
||||
around: bool,
|
||||
) -> Option<Range<DisplayPoint>> {
|
||||
let relative_to = Offset::range(
|
||||
(relative_to.start, Bias::Left),
|
||||
(relative_to.end, Bias::Left),
|
||||
map,
|
||||
);
|
||||
|
||||
relative_range(self, around, map, |find_outer| {
|
||||
let max_end = self.previous_end(map, relative_to.start, find_outer)?;
|
||||
let min_start = self.close_at_start(max_end, map, find_outer)?;
|
||||
|
||||
Some(min_start..max_end)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn relative_range<B: BoundedObject>(
|
||||
object: &B,
|
||||
outer: bool,
|
||||
map: &DisplaySnapshot,
|
||||
find_range: impl Fn(bool) -> Option<Range<Offset>>,
|
||||
) -> Option<Range<DisplayPoint>> {
|
||||
// The cursor could be inside the outer range, but not the inner range.
|
||||
// Whether that should count as found.
|
||||
let find_outer = object.surround_on_both_sides() && !object.ambiguous_outer();
|
||||
let range = find_range(find_outer)?;
|
||||
let min_start = range.start;
|
||||
let max_end = range.end;
|
||||
|
||||
let wanted_range = if outer && !find_outer {
|
||||
// max_end is not yet the outer end
|
||||
object.around(map, min_start..max_end)
|
||||
} else if !outer && find_outer {
|
||||
// max_end is the outer end, but the final result should have the inner end
|
||||
object.inside(map, min_start..max_end)
|
||||
} else {
|
||||
min_start..max_end
|
||||
};
|
||||
|
||||
let start = wanted_range.start.clone().to_display_point(map);
|
||||
let end = wanted_range.end.clone().to_display_point(map);
|
||||
|
||||
Some(start..end)
|
||||
}
|
||||
|
||||
/// A textobject whose boundaries can easily be found between two chars
|
||||
pub enum ImmediateBoundary {
|
||||
Word { ignore_punctuation: bool },
|
||||
Subword { ignore_punctuation: bool },
|
||||
AngleBrackets,
|
||||
BackQuotes,
|
||||
CurlyBrackets,
|
||||
DoubleQuotes,
|
||||
Parentheses,
|
||||
SingleQuotes,
|
||||
SquareBrackets,
|
||||
VerticalBars,
|
||||
}
|
||||
|
||||
/// A textobject whose start and end can be found from an easy-to-find
|
||||
/// boundary between two chars by following a simple path from there
|
||||
pub enum FuzzyBoundary {
|
||||
Sentence,
|
||||
Paragraph,
|
||||
}
|
||||
|
||||
impl ImmediateBoundary {
|
||||
fn is_inner_start(&self, left: char, right: char, classifier: CharClassifier) -> bool {
|
||||
match self {
|
||||
Self::Word { ignore_punctuation } => {
|
||||
let classifier = classifier.ignore_punctuation(*ignore_punctuation);
|
||||
is_word_start(left, right, &classifier)
|
||||
|| (is_buffer_start(left) && classifier.kind(right) != CharKind::Whitespace)
|
||||
}
|
||||
Self::Subword { ignore_punctuation } => {
|
||||
let classifier = classifier.ignore_punctuation(*ignore_punctuation);
|
||||
movement::is_subword_start(left, right, &classifier)
|
||||
|| (is_buffer_start(left) && classifier.kind(right) != CharKind::Whitespace)
|
||||
}
|
||||
Self::AngleBrackets => left == '<',
|
||||
Self::BackQuotes => left == '`',
|
||||
Self::CurlyBrackets => left == '{',
|
||||
Self::DoubleQuotes => left == '"',
|
||||
Self::Parentheses => left == '(',
|
||||
Self::SingleQuotes => left == '\'',
|
||||
Self::SquareBrackets => left == '[',
|
||||
Self::VerticalBars => left == '|',
|
||||
}
|
||||
}
|
||||
fn is_inner_end(&self, left: char, right: char, classifier: CharClassifier) -> bool {
|
||||
match self {
|
||||
Self::Word { ignore_punctuation } => {
|
||||
let classifier = classifier.ignore_punctuation(*ignore_punctuation);
|
||||
is_word_end(left, right, &classifier)
|
||||
|| (is_buffer_end(right) && classifier.kind(left) != CharKind::Whitespace)
|
||||
}
|
||||
Self::Subword { ignore_punctuation } => {
|
||||
let classifier = classifier.ignore_punctuation(*ignore_punctuation);
|
||||
movement::is_subword_start(left, right, &classifier)
|
||||
|| (is_buffer_end(right) && classifier.kind(left) != CharKind::Whitespace)
|
||||
}
|
||||
Self::AngleBrackets => right == '>',
|
||||
Self::BackQuotes => right == '`',
|
||||
Self::CurlyBrackets => right == '}',
|
||||
Self::DoubleQuotes => right == '"',
|
||||
Self::Parentheses => right == ')',
|
||||
Self::SingleQuotes => right == '\'',
|
||||
Self::SquareBrackets => right == ']',
|
||||
Self::VerticalBars => right == '|',
|
||||
}
|
||||
}
|
||||
fn is_outer_start(&self, left: char, right: char, classifier: CharClassifier) -> bool {
|
||||
match self {
|
||||
word @ Self::Word { .. } => word.is_inner_end(left, right, classifier) || left == '\n',
|
||||
subword @ Self::Subword { .. } => {
|
||||
subword.is_inner_end(left, right, classifier) || left == '\n'
|
||||
}
|
||||
Self::AngleBrackets => right == '<',
|
||||
Self::BackQuotes => right == '`',
|
||||
Self::CurlyBrackets => right == '{',
|
||||
Self::DoubleQuotes => right == '"',
|
||||
Self::Parentheses => right == '(',
|
||||
Self::SingleQuotes => right == '\'',
|
||||
Self::SquareBrackets => right == '[',
|
||||
Self::VerticalBars => right == '|',
|
||||
}
|
||||
}
|
||||
fn is_outer_end(&self, left: char, right: char, classifier: CharClassifier) -> bool {
|
||||
match self {
|
||||
word @ Self::Word { .. } => {
|
||||
word.is_inner_start(left, right, classifier) || right == '\n'
|
||||
}
|
||||
subword @ Self::Subword { .. } => {
|
||||
subword.is_inner_start(left, right, classifier) || right == '\n'
|
||||
}
|
||||
Self::AngleBrackets => left == '>',
|
||||
Self::BackQuotes => left == '`',
|
||||
Self::CurlyBrackets => left == '}',
|
||||
Self::DoubleQuotes => left == '"',
|
||||
Self::Parentheses => left == ')',
|
||||
Self::SingleQuotes => left == '\'',
|
||||
Self::SquareBrackets => left == ']',
|
||||
Self::VerticalBars => left == '|',
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl BoundedObject for ImmediateBoundary {
|
||||
fn next_start(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset> {
|
||||
try_find_boundary(map, from, |left, right| {
|
||||
let classifier = map.buffer_snapshot.char_classifier_at(*from);
|
||||
if outer {
|
||||
self.is_outer_start(left, right, classifier)
|
||||
} else {
|
||||
self.is_inner_start(left, right, classifier)
|
||||
}
|
||||
})
|
||||
}
|
||||
fn next_end(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset> {
|
||||
try_find_boundary(map, from, |left, right| {
|
||||
let classifier = map.buffer_snapshot.char_classifier_at(*from);
|
||||
if outer {
|
||||
self.is_outer_end(left, right, classifier)
|
||||
} else {
|
||||
self.is_inner_end(left, right, classifier)
|
||||
}
|
||||
})
|
||||
}
|
||||
fn previous_start(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset> {
|
||||
try_find_preceding_boundary(map, from, |left, right| {
|
||||
let classifier = map.buffer_snapshot.char_classifier_at(*from);
|
||||
if outer {
|
||||
self.is_outer_start(left, right, classifier)
|
||||
} else {
|
||||
self.is_inner_start(left, right, classifier)
|
||||
}
|
||||
})
|
||||
}
|
||||
fn previous_end(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset> {
|
||||
try_find_preceding_boundary(map, from, |left, right| {
|
||||
let classifier = map.buffer_snapshot.char_classifier_at(*from);
|
||||
if outer {
|
||||
self.is_outer_end(left, right, classifier)
|
||||
} else {
|
||||
self.is_inner_end(left, right, classifier)
|
||||
}
|
||||
})
|
||||
}
|
||||
fn inner_range_can_be_zero_width(&self) -> bool {
|
||||
match self {
|
||||
Self::Subword { .. } | Self::Word { .. } => false,
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
fn surround_on_both_sides(&self) -> bool {
|
||||
match self {
|
||||
Self::Subword { .. } | Self::Word { .. } => false,
|
||||
_ => true,
|
||||
}
|
||||
}
|
||||
fn ambiguous_outer(&self) -> bool {
|
||||
match self {
|
||||
Self::BackQuotes
|
||||
| Self::DoubleQuotes
|
||||
| Self::SingleQuotes
|
||||
| Self::VerticalBars
|
||||
| Self::Subword { .. }
|
||||
| Self::Word { .. } => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FuzzyBoundary {
|
||||
/// When between two chars that form an easy-to-find identifier boundary,
|
||||
/// what's the way to get to the actual start of the object, if any
|
||||
fn is_near_potential_inner_start<'a>(
|
||||
&self,
|
||||
left: char,
|
||||
right: char,
|
||||
classifier: &CharClassifier,
|
||||
) -> Option<Box<dyn Fn(Offset, &'a DisplaySnapshot) -> Option<Offset>>> {
|
||||
if is_buffer_start(left) {
|
||||
return Some(Box::new(|identifier, _| Some(identifier)));
|
||||
}
|
||||
match self {
|
||||
Self::Paragraph => {
|
||||
if left != '\n' || right != '\n' {
|
||||
return None;
|
||||
}
|
||||
Some(Box::new(|identifier, map| {
|
||||
try_find_boundary(map, identifier, |left, right| left == '\n' && right != '\n')
|
||||
}))
|
||||
}
|
||||
Self::Sentence => {
|
||||
if let Some(find_paragraph_start) =
|
||||
Self::Paragraph.is_near_potential_inner_start(left, right, classifier)
|
||||
{
|
||||
return Some(find_paragraph_start);
|
||||
} else if !is_sentence_end(left, right, classifier) {
|
||||
return None;
|
||||
}
|
||||
Some(Box::new(|identifier, map| {
|
||||
let word = ImmediateBoundary::Word {
|
||||
ignore_punctuation: false,
|
||||
};
|
||||
word.next_start(map, identifier, false)
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
/// When between two chars that form an easy-to-find identifier boundary,
|
||||
/// what's the way to get to the actual end of the object, if any
|
||||
fn is_near_potential_inner_end<'a>(
|
||||
&self,
|
||||
left: char,
|
||||
right: char,
|
||||
classifier: &CharClassifier,
|
||||
) -> Option<Box<dyn Fn(Offset, &'a DisplaySnapshot) -> Option<Offset>>> {
|
||||
if is_buffer_end(right) {
|
||||
return Some(Box::new(|identifier, _| Some(identifier)));
|
||||
}
|
||||
match self {
|
||||
Self::Paragraph => {
|
||||
if left != '\n' || right != '\n' {
|
||||
return None;
|
||||
}
|
||||
Some(Box::new(|identifier, map| {
|
||||
try_find_preceding_boundary(map, identifier, |left, right| {
|
||||
left != '\n' && right == '\n'
|
||||
})
|
||||
}))
|
||||
}
|
||||
Self::Sentence => {
|
||||
if let Some(find_paragraph_end) =
|
||||
Self::Paragraph.is_near_potential_inner_end(left, right, classifier)
|
||||
{
|
||||
return Some(find_paragraph_end);
|
||||
} else if !is_sentence_end(left, right, classifier) {
|
||||
return None;
|
||||
}
|
||||
Some(Box::new(|identifier, _| Some(identifier)))
|
||||
}
|
||||
}
|
||||
}
|
||||
/// When between two chars that form an easy-to-find identifier boundary,
|
||||
/// what's the way to get to the actual end of the object, if any
|
||||
fn is_near_potential_outer_start<'a>(
|
||||
&self,
|
||||
left: char,
|
||||
right: char,
|
||||
classifier: &CharClassifier,
|
||||
) -> Option<Box<dyn Fn(Offset, &'a DisplaySnapshot) -> Option<Offset>>> {
|
||||
match self {
|
||||
paragraph @ Self::Paragraph => {
|
||||
paragraph.is_near_potential_inner_end(left, right, classifier)
|
||||
}
|
||||
sentence @ Self::Sentence => {
|
||||
sentence.is_near_potential_inner_end(left, right, classifier)
|
||||
}
|
||||
}
|
||||
}
|
||||
/// When between two chars that form an easy-to-find identifier boundary,
|
||||
/// what's the way to get to the actual end of the object, if any
|
||||
fn is_near_potential_outer_end<'a>(
|
||||
&self,
|
||||
left: char,
|
||||
right: char,
|
||||
classifier: &CharClassifier,
|
||||
) -> Option<Box<dyn Fn(Offset, &'a DisplaySnapshot) -> Option<Offset>>> {
|
||||
match self {
|
||||
paragraph @ Self::Paragraph => {
|
||||
paragraph.is_near_potential_inner_start(left, right, classifier)
|
||||
}
|
||||
sentence @ Self::Sentence => {
|
||||
sentence.is_near_potential_inner_start(left, right, classifier)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// The boundary can be on the other side of `from` than the identifier, so the search needs to go both ways.
|
||||
// Also, the distance (and direction) between identifier and boundary could vary, so a few ones need to be
|
||||
// compared, even if one boundary was already found on the right side of `from`.
|
||||
fn to_boundary(
|
||||
&self,
|
||||
map: &DisplaySnapshot,
|
||||
from: Offset,
|
||||
outer: bool,
|
||||
backward: bool,
|
||||
boundary_kind: Boundary,
|
||||
) -> Option<Offset> {
|
||||
let generate_boundary_data = |left, right, point: Offset| {
|
||||
let classifier = map.buffer_snapshot.char_classifier_at(*from);
|
||||
let reach_boundary = if outer && boundary_kind == Boundary::Start {
|
||||
self.is_near_potential_outer_start(left, right, &classifier)
|
||||
} else if !outer && boundary_kind == Boundary::Start {
|
||||
self.is_near_potential_inner_start(left, right, &classifier)
|
||||
} else if outer && boundary_kind == Boundary::End {
|
||||
self.is_near_potential_outer_end(left, right, &classifier)
|
||||
} else {
|
||||
self.is_near_potential_inner_end(left, right, &classifier)
|
||||
};
|
||||
|
||||
reach_boundary.map(|reach_start| (point, reach_start))
|
||||
};
|
||||
|
||||
let forwards = try_find_boundary_data(map, from, generate_boundary_data);
|
||||
let backwards = try_find_preceding_boundary_data(map, from, generate_boundary_data);
|
||||
let boundaries = [forwards, backwards]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.filter_map(|(identifier, reach_boundary)| reach_boundary(identifier, map))
|
||||
.filter(|boundary| match boundary.cmp(&from) {
|
||||
Ordering::Equal => true,
|
||||
Ordering::Less => backward,
|
||||
Ordering::Greater => !backward,
|
||||
});
|
||||
if backward {
|
||||
boundaries.max_by_key(|boundary| **boundary)
|
||||
} else {
|
||||
boundaries.min_by_key(|boundary| **boundary)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq)]
|
||||
enum Boundary {
|
||||
Start,
|
||||
End,
|
||||
}
|
||||
|
||||
impl BoundedObject for FuzzyBoundary {
|
||||
fn next_start(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset> {
|
||||
self.to_boundary(map, from, outer, false, Boundary::Start)
|
||||
}
|
||||
fn next_end(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset> {
|
||||
self.to_boundary(map, from, outer, false, Boundary::End)
|
||||
}
|
||||
fn previous_start(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset> {
|
||||
self.to_boundary(map, from, outer, true, Boundary::Start)
|
||||
}
|
||||
fn previous_end(&self, map: &DisplaySnapshot, from: Offset, outer: bool) -> Option<Offset> {
|
||||
self.to_boundary(map, from, outer, true, Boundary::End)
|
||||
}
|
||||
fn inner_range_can_be_zero_width(&self) -> bool {
|
||||
false
|
||||
}
|
||||
fn surround_on_both_sides(&self) -> bool {
|
||||
false
|
||||
}
|
||||
fn ambiguous_outer(&self) -> bool {
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the first boundary after or at `from` in text direction.
|
||||
/// The start and end of the file are the chars `'\0'`.
|
||||
fn try_find_boundary(
|
||||
map: &DisplaySnapshot,
|
||||
from: Offset,
|
||||
is_boundary: impl Fn(char, char) -> bool,
|
||||
) -> Option<Offset> {
|
||||
let boundary = try_find_boundary_data(map, from, |left, right, point| {
|
||||
if is_boundary(left, right) {
|
||||
Some(point)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})?;
|
||||
Some(boundary)
|
||||
}
|
||||
|
||||
/// Returns some information about it (of type `T`) as soon as
|
||||
/// there is a boundary after or at `from` in text direction
|
||||
/// The start and end of the file are the chars `'\0'`.
|
||||
fn try_find_boundary_data<T>(
|
||||
map: &DisplaySnapshot,
|
||||
mut from: Offset,
|
||||
boundary_information: impl Fn(char, char, Offset) -> Option<T>,
|
||||
) -> Option<T> {
|
||||
let mut prev_ch = map
|
||||
.buffer_snapshot
|
||||
.reversed_chars_at(*from)
|
||||
.next()
|
||||
.unwrap_or('\0');
|
||||
|
||||
for ch in map.buffer_snapshot.chars_at(*from).chain(['\0']) {
|
||||
if let Some(boundary_information) = boundary_information(prev_ch, ch, from) {
|
||||
return Some(boundary_information);
|
||||
}
|
||||
*from += ch.len_utf8();
|
||||
prev_ch = ch;
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
/// Returns the first boundary after or at `from` in text direction.
|
||||
/// The start and end of the file are the chars `'\0'`.
|
||||
fn try_find_preceding_boundary(
|
||||
map: &DisplaySnapshot,
|
||||
from: Offset,
|
||||
is_boundary: impl Fn(char, char) -> bool,
|
||||
) -> Option<Offset> {
|
||||
let boundary = try_find_preceding_boundary_data(map, from, |left, right, point| {
|
||||
if is_boundary(left, right) {
|
||||
Some(point)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})?;
|
||||
Some(boundary)
|
||||
}
|
||||
|
||||
/// Returns some information about it (of type `T`) as soon as
|
||||
/// there is a boundary before or at `from` in opposite text direction
|
||||
/// The start and end of the file are the chars `'\0'`.
|
||||
fn try_find_preceding_boundary_data<T>(
|
||||
map: &DisplaySnapshot,
|
||||
mut from: Offset,
|
||||
is_boundary: impl Fn(char, char, Offset) -> Option<T>,
|
||||
) -> Option<T> {
|
||||
let mut prev_ch = map.buffer_snapshot.chars_at(*from).next().unwrap_or('\0');
|
||||
|
||||
for ch in map.buffer_snapshot.reversed_chars_at(*from).chain(['\0']) {
|
||||
if let Some(boundary_information) = is_boundary(ch, prev_ch, from) {
|
||||
return Some(boundary_information);
|
||||
}
|
||||
from.0 = from.0.saturating_sub(ch.len_utf8());
|
||||
prev_ch = ch;
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn is_buffer_start(left: char) -> bool {
|
||||
left == '\0'
|
||||
}
|
||||
|
||||
fn is_buffer_end(right: char) -> bool {
|
||||
right == '\0'
|
||||
}
|
||||
|
||||
fn is_word_start(left: char, right: char, classifier: &CharClassifier) -> bool {
|
||||
classifier.kind(left) != classifier.kind(right)
|
||||
&& classifier.kind(right) != CharKind::Whitespace
|
||||
}
|
||||
|
||||
fn is_word_end(left: char, right: char, classifier: &CharClassifier) -> bool {
|
||||
classifier.kind(left) != classifier.kind(right) && classifier.kind(left) != CharKind::Whitespace
|
||||
}
|
||||
|
||||
fn is_sentence_end(left: char, right: char, classifier: &CharClassifier) -> bool {
|
||||
const ENDS: [char; 1] = ['.'];
|
||||
|
||||
if classifier.kind(right) != CharKind::Whitespace {
|
||||
return false;
|
||||
}
|
||||
ENDS.into_iter().any(|end| left == end)
|
||||
}
|
||||
182
crates/vim/src/helix/object.rs
Normal file
182
crates/vim/src/helix/object.rs
Normal file
@@ -0,0 +1,182 @@
|
||||
use std::{
|
||||
error::Error,
|
||||
fmt::{self, Display},
|
||||
ops::Range,
|
||||
};
|
||||
|
||||
use editor::{DisplayPoint, display_map::DisplaySnapshot, movement};
|
||||
use text::Selection;
|
||||
|
||||
use crate::{
|
||||
helix::boundary::{FuzzyBoundary, ImmediateBoundary},
|
||||
object::Object as VimObject,
|
||||
};
|
||||
|
||||
/// A text object from helix or an extra one
|
||||
pub trait HelixTextObject {
|
||||
fn range(
|
||||
&self,
|
||||
map: &DisplaySnapshot,
|
||||
relative_to: Range<DisplayPoint>,
|
||||
around: bool,
|
||||
) -> Option<Range<DisplayPoint>>;
|
||||
|
||||
fn next_range(
|
||||
&self,
|
||||
map: &DisplaySnapshot,
|
||||
relative_to: Range<DisplayPoint>,
|
||||
around: bool,
|
||||
) -> Option<Range<DisplayPoint>>;
|
||||
|
||||
fn previous_range(
|
||||
&self,
|
||||
map: &DisplaySnapshot,
|
||||
relative_to: Range<DisplayPoint>,
|
||||
around: bool,
|
||||
) -> Option<Range<DisplayPoint>>;
|
||||
}
|
||||
|
||||
impl VimObject {
|
||||
/// Returns the range of the object the cursor is over.
|
||||
/// Follows helix convention.
|
||||
pub fn helix_range(
|
||||
self,
|
||||
map: &DisplaySnapshot,
|
||||
selection: Selection<DisplayPoint>,
|
||||
around: bool,
|
||||
) -> Result<Option<Range<DisplayPoint>>, VimToHelixError> {
|
||||
let cursor = cursor_range(&selection, map);
|
||||
if let Some(helix_object) = self.to_helix_object() {
|
||||
Ok(helix_object.range(map, cursor, around))
|
||||
} else {
|
||||
Err(VimToHelixError)
|
||||
}
|
||||
}
|
||||
/// Returns the range of the next object the cursor is not over.
|
||||
/// Follows helix convention.
|
||||
pub fn helix_next_range(
|
||||
self,
|
||||
map: &DisplaySnapshot,
|
||||
selection: Selection<DisplayPoint>,
|
||||
around: bool,
|
||||
) -> Result<Option<Range<DisplayPoint>>, VimToHelixError> {
|
||||
let cursor = cursor_range(&selection, map);
|
||||
if let Some(helix_object) = self.to_helix_object() {
|
||||
Ok(helix_object.next_range(map, cursor, around))
|
||||
} else {
|
||||
Err(VimToHelixError)
|
||||
}
|
||||
}
|
||||
/// Returns the range of the previous object the cursor is not over.
|
||||
/// Follows helix convention.
|
||||
pub fn helix_previous_range(
|
||||
self,
|
||||
map: &DisplaySnapshot,
|
||||
selection: Selection<DisplayPoint>,
|
||||
around: bool,
|
||||
) -> Result<Option<Range<DisplayPoint>>, VimToHelixError> {
|
||||
let cursor = cursor_range(&selection, map);
|
||||
if let Some(helix_object) = self.to_helix_object() {
|
||||
Ok(helix_object.previous_range(map, cursor, around))
|
||||
} else {
|
||||
Err(VimToHelixError)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct VimToHelixError;
|
||||
impl Display for VimToHelixError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"Not all vim text objects have an implemented helix equivalent"
|
||||
)
|
||||
}
|
||||
}
|
||||
impl Error for VimToHelixError {}
|
||||
|
||||
impl VimObject {
|
||||
fn to_helix_object(self) -> Option<Box<dyn HelixTextObject>> {
|
||||
Some(match self {
|
||||
Self::AngleBrackets => Box::new(ImmediateBoundary::AngleBrackets),
|
||||
Self::BackQuotes => Box::new(ImmediateBoundary::BackQuotes),
|
||||
Self::CurlyBrackets => Box::new(ImmediateBoundary::CurlyBrackets),
|
||||
Self::DoubleQuotes => Box::new(ImmediateBoundary::DoubleQuotes),
|
||||
Self::Paragraph => Box::new(FuzzyBoundary::Paragraph),
|
||||
Self::Parentheses => Box::new(ImmediateBoundary::Parentheses),
|
||||
Self::Quotes => Box::new(ImmediateBoundary::SingleQuotes),
|
||||
Self::Sentence => Box::new(FuzzyBoundary::Sentence),
|
||||
Self::SquareBrackets => Box::new(ImmediateBoundary::SquareBrackets),
|
||||
Self::Subword { ignore_punctuation } => {
|
||||
Box::new(ImmediateBoundary::Subword { ignore_punctuation })
|
||||
}
|
||||
Self::VerticalBars => Box::new(ImmediateBoundary::VerticalBars),
|
||||
Self::Word { ignore_punctuation } => {
|
||||
Box::new(ImmediateBoundary::Word { ignore_punctuation })
|
||||
}
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the start of the cursor of a selection, whether that is collapsed or not.
|
||||
pub(crate) fn cursor_range(
|
||||
selection: &Selection<DisplayPoint>,
|
||||
map: &DisplaySnapshot,
|
||||
) -> Range<DisplayPoint> {
|
||||
if selection.is_empty() | selection.reversed {
|
||||
selection.head()..movement::right(map, selection.head())
|
||||
} else {
|
||||
movement::left(map, selection.head())..selection.head()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use db::indoc;
|
||||
|
||||
use crate::{state::Mode, test::VimTestContext};
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_select_word_object(cx: &mut gpui::TestAppContext) {
|
||||
let mut cx = VimTestContext::new(cx, true).await;
|
||||
let start = indoc! {"
|
||||
The quick brˇowˇnˇ
|
||||
fox «ˇjumps» ov«er
|
||||
the laˇ»zy dogˇ
|
||||
|
||||
"
|
||||
};
|
||||
|
||||
cx.set_state(start, Mode::HelixNormal);
|
||||
|
||||
cx.simulate_keystrokes("m i w");
|
||||
|
||||
cx.assert_state(
|
||||
indoc! {"
|
||||
The quick «brownˇ»
|
||||
fox «jumpsˇ» over
|
||||
the «lazyˇ» dogˇ
|
||||
|
||||
"
|
||||
},
|
||||
Mode::HelixNormal,
|
||||
);
|
||||
|
||||
cx.set_state(start, Mode::HelixNormal);
|
||||
|
||||
cx.simulate_keystrokes("m a w");
|
||||
|
||||
cx.assert_state(
|
||||
indoc! {"
|
||||
The quick« brownˇ»
|
||||
fox «jumps ˇ»over
|
||||
the «lazy ˇ»dogˇ
|
||||
|
||||
"
|
||||
},
|
||||
Mode::HelixNormal,
|
||||
);
|
||||
}
|
||||
}
|
||||
84
crates/vim/src/helix/select.rs
Normal file
84
crates/vim/src/helix/select.rs
Normal file
@@ -0,0 +1,84 @@
|
||||
use text::SelectionGoal;
|
||||
use ui::{Context, Window};
|
||||
|
||||
use crate::{Vim, helix::object::cursor_range, object::Object};
|
||||
|
||||
impl Vim {
|
||||
/// Selects the object each cursor is over.
|
||||
/// Follows helix convention.
|
||||
pub fn select_current_object(
|
||||
&mut self,
|
||||
object: Object,
|
||||
around: bool,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.stop_recording(cx);
|
||||
self.update_editor(cx, |_, editor, cx| {
|
||||
editor.change_selections(Default::default(), window, cx, |s| {
|
||||
s.move_with(|map, selection| {
|
||||
let Some(range) = object
|
||||
.helix_range(map, selection.clone(), around)
|
||||
.unwrap_or({
|
||||
let vim_range = object.range(map, selection.clone(), around, None);
|
||||
vim_range.filter(|r| r.start <= cursor_range(selection, map).start)
|
||||
})
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
selection.set_head_tail(range.end, range.start, SelectionGoal::None);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/// Selects the next object from each cursor which the cursor is not over.
|
||||
/// Follows helix convention.
|
||||
pub fn select_next_object(
|
||||
&mut self,
|
||||
object: Object,
|
||||
around: bool,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.stop_recording(cx);
|
||||
self.update_editor(cx, |_, editor, cx| {
|
||||
editor.change_selections(Default::default(), window, cx, |s| {
|
||||
s.move_with(|map, selection| {
|
||||
let Ok(Some(range)) = object.helix_next_range(map, selection.clone(), around)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
selection.set_head_tail(range.end, range.start, SelectionGoal::None);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/// Selects the previous object from each cursor which the cursor is not over.
|
||||
/// Follows helix convention.
|
||||
pub fn select_previous_object(
|
||||
&mut self,
|
||||
object: Object,
|
||||
around: bool,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.stop_recording(cx);
|
||||
self.update_editor(cx, |_, editor, cx| {
|
||||
editor.change_selections(Default::default(), window, cx, |s| {
|
||||
s.move_with(|map, selection| {
|
||||
let Ok(Some(range)) =
|
||||
object.helix_previous_range(map, selection.clone(), around)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
selection.set_head_tail(range.start, range.end, SelectionGoal::None);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -495,10 +495,19 @@ impl Vim {
|
||||
self.replace_with_register_object(object, around, window, cx)
|
||||
}
|
||||
Some(Operator::Exchange) => self.exchange_object(object, around, window, cx),
|
||||
Some(Operator::HelixMatch) => {
|
||||
self.select_current_object(object, around, window, cx)
|
||||
}
|
||||
_ => {
|
||||
// Can't do anything for namespace operators. Ignoring
|
||||
}
|
||||
},
|
||||
Some(Operator::HelixNext { around }) => {
|
||||
self.select_next_object(object, around, window, cx);
|
||||
}
|
||||
Some(Operator::HelixPrevious { around }) => {
|
||||
self.select_previous_object(object, around, window, cx);
|
||||
}
|
||||
Some(Operator::DeleteSurrounds) => {
|
||||
waiting_operator = Some(Operator::DeleteSurrounds);
|
||||
}
|
||||
|
||||
@@ -397,11 +397,11 @@ impl Vim {
|
||||
let count = Self::take_count(cx);
|
||||
|
||||
match self.mode {
|
||||
Mode::Normal => self.normal_object(object, count, window, cx),
|
||||
Mode::Normal | Mode::HelixNormal => self.normal_object(object, count, window, cx),
|
||||
Mode::Visual | Mode::VisualLine | Mode::VisualBlock => {
|
||||
self.visual_object(object, count, window, cx)
|
||||
}
|
||||
Mode::Insert | Mode::Replace | Mode::HelixNormal => {
|
||||
Mode::Insert | Mode::Replace => {
|
||||
// Shouldn't execute a text object in insert mode. Ignoring
|
||||
}
|
||||
}
|
||||
@@ -1364,7 +1364,7 @@ fn is_sentence_end(map: &DisplaySnapshot, offset: usize) -> bool {
|
||||
|
||||
/// Expands the passed range to include whitespace on one side or the other in a line. Attempts to add the
|
||||
/// whitespace to the end first and falls back to the start if there was none.
|
||||
fn expand_to_include_whitespace(
|
||||
pub fn expand_to_include_whitespace(
|
||||
map: &DisplaySnapshot,
|
||||
range: Range<DisplayPoint>,
|
||||
stop_at_newline: bool,
|
||||
|
||||
@@ -134,6 +134,13 @@ pub enum Operator {
|
||||
ToggleComments,
|
||||
ReplaceWithRegister,
|
||||
Exchange,
|
||||
HelixMatch,
|
||||
HelixNext {
|
||||
around: bool,
|
||||
},
|
||||
HelixPrevious {
|
||||
around: bool,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Debug)]
|
||||
@@ -1020,6 +1027,9 @@ impl Operator {
|
||||
Operator::RecordRegister => "q",
|
||||
Operator::ReplayRegister => "@",
|
||||
Operator::ToggleComments => "gc",
|
||||
Operator::HelixMatch => "helix_m",
|
||||
Operator::HelixNext { .. } => "helix_next",
|
||||
Operator::HelixPrevious { .. } => "helix_previous",
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1041,6 +1051,9 @@ impl Operator {
|
||||
} => format!("^V{}", make_visible(prefix)),
|
||||
Operator::AutoIndent => "=".to_string(),
|
||||
Operator::ShellCommand => "=".to_string(),
|
||||
Operator::HelixMatch => "m".to_string(),
|
||||
Operator::HelixNext { .. } => "]".to_string(),
|
||||
Operator::HelixPrevious { .. } => "[".to_string(),
|
||||
_ => self.id().to_string(),
|
||||
}
|
||||
}
|
||||
@@ -1079,7 +1092,10 @@ impl Operator {
|
||||
| Operator::Object { .. }
|
||||
| Operator::ChangeSurrounds { target: None }
|
||||
| Operator::OppositeCase
|
||||
| Operator::ToggleComments => false,
|
||||
| Operator::ToggleComments
|
||||
| Operator::HelixMatch
|
||||
| Operator::HelixNext { .. }
|
||||
| Operator::HelixPrevious { .. } => false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1103,7 +1119,9 @@ impl Operator {
|
||||
| Operator::AddSurrounds { target: None }
|
||||
| Operator::ChangeSurrounds { target: None }
|
||||
| Operator::DeleteSurrounds
|
||||
| Operator::Exchange => true,
|
||||
| Operator::Exchange
|
||||
| Operator::HelixNext { .. }
|
||||
| Operator::HelixPrevious { .. } => true,
|
||||
Operator::Yank
|
||||
| Operator::Object { .. }
|
||||
| Operator::FindForward { .. }
|
||||
@@ -1118,7 +1136,8 @@ impl Operator {
|
||||
| Operator::Jump { .. }
|
||||
| Operator::Register
|
||||
| Operator::RecordRegister
|
||||
| Operator::ReplayRegister => false,
|
||||
| Operator::ReplayRegister
|
||||
| Operator::HelixMatch => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -86,6 +86,22 @@ struct PushFindBackward {
|
||||
multiline: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
|
||||
#[action(namespace = vim)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
/// Selects the next object.
|
||||
struct PushHelixNext {
|
||||
around: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
|
||||
#[action(namespace = vim)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
/// Selects the previous object.
|
||||
struct PushHelixPrevious {
|
||||
around: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Deserialize, JsonSchema, PartialEq, Action)]
|
||||
#[action(namespace = vim)]
|
||||
#[serde(deny_unknown_fields)]
|
||||
@@ -224,6 +240,8 @@ actions!(
|
||||
PushReplaceWithRegister,
|
||||
/// Toggles comments.
|
||||
PushToggleComments,
|
||||
/// Starts a match operation.
|
||||
PushHelixMatch,
|
||||
]
|
||||
);
|
||||
|
||||
@@ -761,6 +779,27 @@ impl Vim {
|
||||
Vim::action(editor, cx, |vim, _: &Enter, window, cx| {
|
||||
vim.input_ignored("\n".into(), window, cx)
|
||||
});
|
||||
Vim::action(editor, cx, |vim, _: &PushHelixMatch, window, cx| {
|
||||
vim.push_operator(Operator::HelixMatch, window, cx)
|
||||
});
|
||||
Vim::action(editor, cx, |vim, action: &PushHelixNext, window, cx| {
|
||||
vim.push_operator(
|
||||
Operator::HelixNext {
|
||||
around: action.around,
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
Vim::action(editor, cx, |vim, action: &PushHelixPrevious, window, cx| {
|
||||
vim.push_operator(
|
||||
Operator::HelixPrevious {
|
||||
around: action.around,
|
||||
},
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
normal::register(editor, cx);
|
||||
insert::register(editor, cx);
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user