Compare commits
71 Commits
html_trees
...
data-break
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
61da7007b9 | ||
|
|
8867debd0a | ||
|
|
a201263448 | ||
|
|
1a4ba59d4e | ||
|
|
24ad97008b | ||
|
|
f6c81a0595 | ||
|
|
7ac51e4c82 | ||
|
|
c8105863c8 | ||
|
|
9f72e05c40 | ||
|
|
bb15f4c493 | ||
|
|
dc6004066d | ||
|
|
2863bd1836 | ||
|
|
f1ce83b533 | ||
|
|
edc7d73643 | ||
|
|
4839195003 | ||
|
|
56eb650f09 | ||
|
|
61be869352 | ||
|
|
7537f0557f | ||
|
|
85740ddaa4 | ||
|
|
6550a96e15 | ||
|
|
93c0056065 | ||
|
|
84dd2366bc | ||
|
|
8e12eb0ab1 | ||
|
|
3b158461be | ||
|
|
12a8b850ef | ||
|
|
d35f5a4197 | ||
|
|
1f5d57bece | ||
|
|
ca9fb2399e | ||
|
|
a360365410 | ||
|
|
df6ee1fc4a | ||
|
|
fc99557952 | ||
|
|
52c1e0021c | ||
|
|
6ead57d5ed | ||
|
|
4a10a0ca77 | ||
|
|
cc6d4e3c62 | ||
|
|
3f7c8c97c2 | ||
|
|
ab5ba66b94 | ||
|
|
a20a534ecf | ||
|
|
5bb979820b | ||
|
|
2dee03ebca | ||
|
|
1c7cf1a5c1 | ||
|
|
f15a241d3e | ||
|
|
76d3a9a0f0 | ||
|
|
e6c473a488 | ||
|
|
06960670bd | ||
|
|
71ddb3dad4 | ||
|
|
2bf9c472bd | ||
|
|
82a06f0ca9 | ||
|
|
cd6b1d32d0 | ||
|
|
5033a2aba0 | ||
|
|
0392ef10cf | ||
|
|
7354ef91e1 | ||
|
|
926d10cc45 | ||
|
|
a7697be857 | ||
|
|
97392a23e3 | ||
|
|
3f40e0f433 | ||
|
|
3e6d5c0814 | ||
|
|
2bc91e8c59 | ||
|
|
bbc80c78fd | ||
|
|
24ab5afa10 | ||
|
|
af8acba353 | ||
|
|
231e9c2000 | ||
|
|
47b94e5ef0 | ||
|
|
29e2e13e6d | ||
|
|
e635798fe0 | ||
|
|
6924720b35 | ||
|
|
1e8b50f471 | ||
|
|
5f8c53ffe8 | ||
|
|
6e82bbf367 | ||
|
|
0ac717c3a8 | ||
|
|
44aff7cd46 |
210
Cargo.lock
generated
210
Cargo.lock
generated
@@ -86,7 +86,7 @@ version = "0.25.1-dev"
|
||||
source = "git+https://github.com/zed-industries/alacritty.git?branch=add-hush-login-flag#828457c9ff1f7ea0a0469337cc8a37ee3a1b0590"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"home",
|
||||
"libc",
|
||||
"log",
|
||||
@@ -128,7 +128,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed7572b7ba83a31e20d1b48970ee402d2e3e0537dcfe0a3ff4d6eb7508617d43"
|
||||
dependencies = [
|
||||
"alsa-sys",
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"cfg-if",
|
||||
"libc",
|
||||
]
|
||||
@@ -459,6 +459,7 @@ dependencies = [
|
||||
"collections",
|
||||
"command_palette_hooks",
|
||||
"context_server",
|
||||
"convert_case 0.8.0",
|
||||
"db",
|
||||
"editor",
|
||||
"feature_flags",
|
||||
@@ -491,7 +492,6 @@ dependencies = [
|
||||
"prompt_store",
|
||||
"proto",
|
||||
"rand 0.8.5",
|
||||
"regex",
|
||||
"release_channel",
|
||||
"rope",
|
||||
"serde",
|
||||
@@ -581,6 +581,7 @@ dependencies = [
|
||||
"client",
|
||||
"collections",
|
||||
"context_server",
|
||||
"dap",
|
||||
"env_logger 0.11.7",
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
@@ -671,7 +672,6 @@ dependencies = [
|
||||
"indexed_docs",
|
||||
"language",
|
||||
"language_model",
|
||||
"log",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"prompt_store",
|
||||
@@ -725,6 +725,7 @@ dependencies = [
|
||||
"itertools 0.14.0",
|
||||
"language",
|
||||
"language_model",
|
||||
"open",
|
||||
"project",
|
||||
"rand 0.8.5",
|
||||
"release_channel",
|
||||
@@ -800,9 +801,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "async-compression"
|
||||
version = "0.4.21"
|
||||
version = "0.4.22"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c0cf008e5e1a9e9e22a7d3c9a4992e21a350290069e36d8fb72304ed17e8f2d2"
|
||||
checksum = "59a194f9d963d8099596278594b3107448656ba73831c9d8c783e613ce86da64"
|
||||
dependencies = [
|
||||
"deflate64",
|
||||
"flate2",
|
||||
@@ -1881,10 +1882,10 @@ version = "0.69.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"cexpr",
|
||||
"clang-sys",
|
||||
"itertools 0.12.1",
|
||||
"itertools 0.10.5",
|
||||
"lazy_static",
|
||||
"lazycell",
|
||||
"log",
|
||||
@@ -1904,10 +1905,10 @@ version = "0.70.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f49d8fed880d473ea71efb9bf597651e77201bdd4893efe54c9e5d65ae04ce6f"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"cexpr",
|
||||
"clang-sys",
|
||||
"itertools 0.12.1",
|
||||
"itertools 0.10.5",
|
||||
"log",
|
||||
"prettyplease",
|
||||
"proc-macro2",
|
||||
@@ -1962,9 +1963,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.8.0"
|
||||
version = "2.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f68f53c83ab957f72c32642f3868eec03eb974d1fb82e453128456482613d36"
|
||||
checksum = "5c8214115b7bf84099f1309324e63141d4c5d7cc26862f97a0a857dbefe165bd"
|
||||
dependencies = [
|
||||
"serde",
|
||||
]
|
||||
@@ -1994,7 +1995,7 @@ source = "git+https://github.com/kvark/blade?rev=b16f5c7bd873c7126f48c82c39e7ae6
|
||||
dependencies = [
|
||||
"ash",
|
||||
"ash-window",
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"bytemuck",
|
||||
"codespan-reporting",
|
||||
"glow",
|
||||
@@ -2042,9 +2043,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "blake3"
|
||||
version = "1.6.1"
|
||||
version = "1.7.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "675f87afced0413c9bb02843499dbbd3882a237645883f71a2b59644a6d2f753"
|
||||
checksum = "b17679a8d69b6d7fd9cd9801a536cec9fa5e5970b69f9d4747f70b39b031f5e7"
|
||||
dependencies = [
|
||||
"arrayref",
|
||||
"arrayvec",
|
||||
@@ -2323,7 +2324,7 @@ version = "0.13.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b99da2f8558ca23c71f4fd15dc57c906239752dd27ff3c00a1d56b685b7cbfec"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"log",
|
||||
"polling",
|
||||
"rustix",
|
||||
@@ -2669,9 +2670,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.32"
|
||||
version = "4.5.34"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6088f3ae8c3608d19260cd7445411865a485688711b78b5be70d78cd96136f83"
|
||||
checksum = "e958897981290da2a852763fe9cdb89cd36977a5d729023127095fa94d95e2ff"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
@@ -2679,9 +2680,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.32"
|
||||
version = "4.5.34"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22a7ef7f676155edfb82daa97f99441f3ebf4a58d5e32f295a56259f1b6facc8"
|
||||
checksum = "83b0f35019843db2160b5bb19ae09b4e6411ac33fc6a712003c33e03090e2489"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
@@ -2829,7 +2830,7 @@ version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f79398230a6e2c08f5c9760610eb6924b52aa9e7950a619602baba59dcbbdbb2"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"block",
|
||||
"cocoa-foundation 0.2.0",
|
||||
"core-foundation 0.10.0",
|
||||
@@ -2859,7 +2860,7 @@ version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e14045fb83be07b5acf1c0884b2180461635b433455fa35d1cd6f17f1450679d"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"block",
|
||||
"core-foundation 0.10.0",
|
||||
"core-graphics-types 0.2.0",
|
||||
@@ -3328,7 +3329,7 @@ version = "0.24.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fa95a34622365fa5bbf40b20b75dba8dfa8c94c734aea8ac9a5ca38af14316f1"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"core-foundation 0.10.0",
|
||||
"core-graphics-types 0.2.0",
|
||||
"foreign-types 0.5.0",
|
||||
@@ -3352,7 +3353,7 @@ version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3d44a101f213f6c4cdc1853d4b78aef6db6bdfa3468798cc1d9912f4735013eb"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"core-foundation 0.10.0",
|
||||
"libc",
|
||||
]
|
||||
@@ -3424,7 +3425,7 @@ version = "0.13.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e418dd4f5128c3e93eab12246391c54a20c496811131f85754dc8152ee207892"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"fontdb 0.16.2",
|
||||
"log",
|
||||
"rangemap",
|
||||
@@ -3875,6 +3876,7 @@ dependencies = [
|
||||
"node_runtime",
|
||||
"parking_lot",
|
||||
"paths",
|
||||
"regex",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -3908,7 +3910,6 @@ dependencies = [
|
||||
"regex",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sysinfo",
|
||||
"task",
|
||||
"util",
|
||||
]
|
||||
@@ -4642,6 +4643,7 @@ dependencies = [
|
||||
"client",
|
||||
"clock",
|
||||
"collections",
|
||||
"dap",
|
||||
"env_logger 0.11.7",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
@@ -4750,6 +4752,7 @@ dependencies = [
|
||||
"env_logger 0.11.7",
|
||||
"extension",
|
||||
"fs",
|
||||
"gpui",
|
||||
"language",
|
||||
"log",
|
||||
"reqwest_client",
|
||||
@@ -5076,7 +5079,7 @@ name = "font-kit"
|
||||
version = "0.14.1"
|
||||
source = "git+https://github.com/zed-industries/font-kit?rev=40391b7#40391b7c0041d8a8572af2afa3de32ae088f0120"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"byteorder",
|
||||
"core-foundation 0.9.4",
|
||||
"core-graphics 0.23.2",
|
||||
@@ -5276,7 +5279,7 @@ checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c"
|
||||
name = "fsevent"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"core-foundation 0.9.4",
|
||||
"fsevent-sys 3.1.0",
|
||||
"parking_lot",
|
||||
@@ -5604,7 +5607,7 @@ version = "0.20.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5220b8ba44c68a9a7f7a7659e864dd73692e417ef0211bea133c7b74e031eeb9"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"libc",
|
||||
"libgit2-sys",
|
||||
"log",
|
||||
@@ -5770,7 +5773,7 @@ version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fbcd2dba93594b227a1f57ee09b8b9da8892c34d55aa332e034a228d0fe6a171"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"gpu-alloc-types",
|
||||
]
|
||||
|
||||
@@ -5791,7 +5794,7 @@ version = "0.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "98ff03b468aa837d70984d55f5d3f846f6ec31fe34bbb97c4f85219caeee1ca4"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6103,7 +6106,7 @@ version = "0.21.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bd54745cfacb7b97dee45e8fdb91814b62bccddb481debb7de0f9ee6b7bf5b43"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"byteorder",
|
||||
"heed-traits",
|
||||
"heed-types",
|
||||
@@ -6663,9 +6666,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "image"
|
||||
version = "0.25.5"
|
||||
version = "0.25.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cd6f44aed642f18953a158afeb30206f4d50da59fbc66ecb53c66488de73563b"
|
||||
checksum = "db35664ce6b9810857a38a906215e75a9c879f0696556a39f59c62829710251a"
|
||||
dependencies = [
|
||||
"bytemuck",
|
||||
"byteorder-lite",
|
||||
@@ -6840,7 +6843,7 @@ version = "0.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"inotify-sys",
|
||||
"libc",
|
||||
]
|
||||
@@ -7556,7 +7559,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"windows-targets 0.52.6",
|
||||
"windows-targets 0.48.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -7581,7 +7584,7 @@ version = "0.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"libc",
|
||||
"redox_syscall 0.5.8",
|
||||
]
|
||||
@@ -7860,9 +7863,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.26"
|
||||
version = "0.4.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e"
|
||||
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"value-bag",
|
||||
@@ -8220,7 +8223,7 @@ version = "0.31.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f569fb946490b5743ad69813cb19629130ce9374034abe31614a36402d18f99e"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"block",
|
||||
"core-graphics-types 0.1.3",
|
||||
"foreign-types 0.5.0",
|
||||
@@ -8385,7 +8388,7 @@ checksum = "364f94bc34f61332abebe8cad6f6cd82a5b65cff22c828d05d0968911462ca4f"
|
||||
dependencies = [
|
||||
"arrayvec",
|
||||
"bit-set 0.8.0",
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"cfg_aliases 0.1.1",
|
||||
"codespan-reporting",
|
||||
"hexf-parse",
|
||||
@@ -8454,7 +8457,7 @@ version = "0.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2076a31b7010b17a38c01907c45b945e8f11495ee4dd588309718901b1f7a5b7"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"jni-sys",
|
||||
"log",
|
||||
"ndk-sys",
|
||||
@@ -8489,7 +8492,7 @@ version = "0.29.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "71e2746dc3a24dd78b3cfcb7be93368c6de9963d30f43a6a73998a9cf4b17b46"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"cfg-if",
|
||||
"cfg_aliases 0.2.1",
|
||||
"libc",
|
||||
@@ -8573,7 +8576,7 @@ version = "8.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2fee8403b3d66ac7b26aee6e40a897d85dc5ce26f44da36b8b73e987cc52e943"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"filetime",
|
||||
"fsevent-sys 4.1.0",
|
||||
"inotify",
|
||||
@@ -8591,7 +8594,7 @@ name = "notify"
|
||||
version = "8.0.0"
|
||||
source = "git+https://github.com/zed-industries/notify.git?rev=bbb9ea5ae52b253e095737847e367c30653a2e96#bbb9ea5ae52b253e095737847e367c30653a2e96"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"filetime",
|
||||
"fsevent-sys 4.1.0",
|
||||
"inotify",
|
||||
@@ -8867,7 +8870,7 @@ version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e4e89ad9e3d7d297152b17d39ed92cd50ca8063a89a9fa569046d41568891eff"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"block2",
|
||||
"libc",
|
||||
"objc2",
|
||||
@@ -8883,7 +8886,7 @@ version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "74dd3b56391c7a0596a295029734d3c1c5e7e510a4cb30245f8221ccea96b009"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"block2",
|
||||
"objc2",
|
||||
"objc2-core-location",
|
||||
@@ -8907,7 +8910,7 @@ version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "617fbf49e071c178c0b24c080767db52958f716d9eabdf0890523aeae54773ef"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"block2",
|
||||
"objc2",
|
||||
"objc2-foundation",
|
||||
@@ -8949,7 +8952,7 @@ version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0ee638a5da3799329310ad4cfa62fbf045d5f56e3ef5ba4149e7452dcf89d5a8"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"block2",
|
||||
"libc",
|
||||
"objc2",
|
||||
@@ -8973,7 +8976,7 @@ version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "dd0cba1276f6023976a406a14ffa85e1fdd19df6b0f737b063b95f6c8c7aadd6"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"block2",
|
||||
"objc2",
|
||||
"objc2-foundation",
|
||||
@@ -8985,7 +8988,7 @@ version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e42bee7bff906b14b167da2bac5efe6b6a07e6f7c0a21a7308d40c960242dc7a"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"block2",
|
||||
"objc2",
|
||||
"objc2-foundation",
|
||||
@@ -9008,7 +9011,7 @@ version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8bb46798b20cd6b91cbd113524c490f1686f4c4e8f49502431415f3512e2b6f"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"block2",
|
||||
"objc2",
|
||||
"objc2-cloud-kit",
|
||||
@@ -9040,7 +9043,7 @@ version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76cfcbf642358e8689af64cee815d139339f3ed8ad05103ed5eaf73db8d84cb3"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"block2",
|
||||
"objc2",
|
||||
"objc2-core-location",
|
||||
@@ -9102,9 +9105,9 @@ checksum = "945462a4b81e43c4e3ba96bd7b49d834c6f61198356aa858733bc4acf3cbe62e"
|
||||
|
||||
[[package]]
|
||||
name = "oo7"
|
||||
version = "0.4.1"
|
||||
version = "0.4.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72c84df357c7049f98c8b157abe71ee751531166c14ba09366e08bc6ab1ea2c9"
|
||||
checksum = "6cb23d3ec3527d65a83be1c1795cb883c52cfa57147d42acc797127df56fc489"
|
||||
dependencies = [
|
||||
"aes",
|
||||
"ashpd",
|
||||
@@ -9183,7 +9186,7 @@ version = "0.10.70"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "61cfb4e166a8bb8c9b55c500bc2308550148ece889be90f609377e58140f42c6"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"cfg-if",
|
||||
"foreign-types 0.3.2",
|
||||
"libc",
|
||||
@@ -10227,9 +10230,9 @@ checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2"
|
||||
|
||||
[[package]]
|
||||
name = "plist"
|
||||
version = "1.7.0"
|
||||
version = "1.7.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "42cf17e9a1800f5f396bc67d193dc9411b59012a5876445ef450d449881e1016"
|
||||
checksum = "eac26e981c03a6e53e0aee43c113e3202f5581d5360dae7bd2c70e800dd0451d"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"indexmap",
|
||||
@@ -10711,8 +10714,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "22505a5c94da8e3b7c2996394d1c933236c4d743e81a410bcca4e6989fc066a4"
|
||||
dependencies = [
|
||||
"bytes 1.10.1",
|
||||
"heck 0.5.0",
|
||||
"itertools 0.12.1",
|
||||
"heck 0.4.1",
|
||||
"itertools 0.10.5",
|
||||
"log",
|
||||
"multimap 0.10.0",
|
||||
"once_cell",
|
||||
@@ -10745,7 +10748,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "81bddcdb20abf9501610992b6759a4c888aef7d1a7247ef75e2404275ac24af1"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"itertools 0.12.1",
|
||||
"itertools 0.10.5",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.100",
|
||||
@@ -10822,7 +10825,7 @@ version = "0.10.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76979bea66e7875e7509c4ec5300112b316af87fa7a252ca91c448b32dfe3993"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"memchr",
|
||||
"pulldown-cmark-escape",
|
||||
"unicase",
|
||||
@@ -10834,7 +10837,7 @@ version = "0.12.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f86ba2052aebccc42cbbb3ed234b8b13ce76f75c3551a303cb2bcffcff12bb14"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"memchr",
|
||||
"unicase",
|
||||
]
|
||||
@@ -11215,7 +11218,7 @@ version = "0.5.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -11348,6 +11351,7 @@ dependencies = [
|
||||
"clap",
|
||||
"client",
|
||||
"clock",
|
||||
"dap",
|
||||
"env_logger 0.11.7",
|
||||
"extension",
|
||||
"extension_host",
|
||||
@@ -11858,7 +11862,7 @@ version = "0.38.42"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f93dc38ecbab2eb790ff964bb77fa94faf256fd3e73285fd7ba0903b76bedb85"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"errno 0.3.10",
|
||||
"itoa",
|
||||
"libc",
|
||||
@@ -12018,7 +12022,7 @@ version = "0.14.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cfb9cf8877777222e4a3bc7eb247e398b56baba500c38c1c46842431adc8b55c"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"bytemuck",
|
||||
"libm",
|
||||
"smallvec",
|
||||
@@ -12035,7 +12039,7 @@ version = "0.20.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fd3c7c96f8a08ee34eff8857b11b49b07d71d1c3f4e88f8a88d4c9e9f90b1702"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"bytemuck",
|
||||
"core_maths",
|
||||
"log",
|
||||
@@ -12258,7 +12262,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"any_vec",
|
||||
"anyhow",
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"client",
|
||||
"collections",
|
||||
"editor",
|
||||
@@ -12300,7 +12304,7 @@ version = "2.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"core-foundation 0.9.4",
|
||||
"core-foundation-sys",
|
||||
"libc",
|
||||
@@ -12313,7 +12317,7 @@ version = "3.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e1415a607e92bec364ea2cf9264646dcce0f91e6d65281bd6f2819cca3bf39c8"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"core-foundation 0.10.0",
|
||||
"core-foundation-sys",
|
||||
"libc",
|
||||
@@ -12397,18 +12401,18 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.218"
|
||||
version = "1.0.219"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e8dfc9d19bdbf6d17e22319da49161d5d0108e4188e8b680aef6299eed22df60"
|
||||
checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.218"
|
||||
version = "1.0.219"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f09503e191f4e797cb8aac08e9a4a4695c5edf6a2e70e376d961ddd5c969f82b"
|
||||
checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
@@ -12913,7 +12917,7 @@ version = "0.3.0+sdk-1.3.268.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eda41003dc44290527a59b13432d4a0379379fa074b70174882adfbdfd917844"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -13080,7 +13084,7 @@ dependencies = [
|
||||
"atoi",
|
||||
"base64 0.22.1",
|
||||
"bigdecimal",
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"byteorder",
|
||||
"bytes 1.10.1",
|
||||
"chrono",
|
||||
@@ -13127,7 +13131,7 @@ dependencies = [
|
||||
"atoi",
|
||||
"base64 0.22.1",
|
||||
"bigdecimal",
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"byteorder",
|
||||
"chrono",
|
||||
"crc",
|
||||
@@ -13235,6 +13239,7 @@ dependencies = [
|
||||
"theme",
|
||||
"title_bar",
|
||||
"ui",
|
||||
"workspace",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -13589,7 +13594,7 @@ version = "0.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"core-foundation 0.9.4",
|
||||
"system-configuration-sys 0.6.0",
|
||||
]
|
||||
@@ -13633,7 +13638,7 @@ version = "0.27.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cc4592f674ce18521c2a81483873a49596655b179f71c5e05d10c1fe66c78745"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"cap-fs-ext",
|
||||
"cap-std",
|
||||
"fd-lock",
|
||||
@@ -14057,9 +14062,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "time"
|
||||
version = "0.3.40"
|
||||
version = "0.3.41"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9d9c75b47bdff86fa3334a3db91356b8d7d86a9b839dab7d0bdc5c3d3a077618"
|
||||
checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40"
|
||||
dependencies = [
|
||||
"deranged",
|
||||
"itoa",
|
||||
@@ -14080,9 +14085,9 @@ checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c"
|
||||
|
||||
[[package]]
|
||||
name = "time-macros"
|
||||
version = "0.2.21"
|
||||
version = "0.2.22"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "29aa485584182073ed57fd5004aa09c371f021325014694e432313345865fd04"
|
||||
checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49"
|
||||
dependencies = [
|
||||
"num-conv",
|
||||
"time-core",
|
||||
@@ -14453,7 +14458,7 @@ version = "0.4.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"bytes 1.10.1",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
@@ -15366,7 +15371,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a5924018406ce0063cd67f8e008104968b74b563ee1b85dde3ed1f7cb87d3dbd"
|
||||
dependencies = [
|
||||
"arrayvec",
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"cursor-icon",
|
||||
"log",
|
||||
"memchr",
|
||||
@@ -15590,7 +15595,7 @@ version = "0.201.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "84e5df6dba6c0d7fafc63a450f1738451ed7a0b52295d83e868218fa286bf708"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"indexmap",
|
||||
"semver",
|
||||
]
|
||||
@@ -15601,7 +15606,7 @@ version = "0.221.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d06bfa36ab3ac2be0dee563380147a5b81ba10dd8885d7fbbc9eb574be67d185"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"hashbrown 0.15.2",
|
||||
"indexmap",
|
||||
"semver",
|
||||
@@ -15627,7 +15632,7 @@ checksum = "11976a250672556d1c4c04c6d5d7656ac9192ac9edc42a4587d6c21460010e69"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"bumpalo",
|
||||
"cc",
|
||||
"cfg-if",
|
||||
@@ -15833,7 +15838,7 @@ checksum = "8d1be69bfcab1bdac74daa7a1f9695ab992b9c8e21b9b061e7d66434097e0ca4"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"bytes 1.10.1",
|
||||
"cap-fs-ext",
|
||||
"cap-net-ext",
|
||||
@@ -15914,7 +15919,7 @@ version = "0.31.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c2120de3d33638aaef5b9f4472bff75f07c56379cf76ea320bd3a3d65ecaf73f"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"rustix",
|
||||
"wayland-backend",
|
||||
"wayland-scanner",
|
||||
@@ -15937,7 +15942,7 @@ version = "0.31.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8f81f365b8b4a97f422ac0e8737c438024b5951734506b0e1d775c73030561f4"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"wayland-backend",
|
||||
"wayland-client",
|
||||
"wayland-scanner",
|
||||
@@ -15949,7 +15954,7 @@ version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "23803551115ff9ea9bce586860c5c5a971e360825a0309264102a9495a5ff479"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"wayland-backend",
|
||||
"wayland-client",
|
||||
"wayland-protocols",
|
||||
@@ -16116,7 +16121,7 @@ checksum = "4b9af35bc9629c52c261465320a9a07959164928b4241980ba1cf923b9e6751d"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"thiserror 1.0.69",
|
||||
"tracing",
|
||||
"wasmtime",
|
||||
@@ -16172,7 +16177,7 @@ version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -16731,7 +16736,7 @@ version = "0.36.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3f3fd376f71958b862e7afb20cfe5a22830e1963462f3a17f49d82a6c1d1f42d"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
@@ -16750,7 +16755,7 @@ version = "0.22.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "288f992ea30e6b5c531b52cdd5f3be81c148554b09ea416f058d16556ba92c27"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"wit-bindgen-rt 0.22.0",
|
||||
"wit-bindgen-rust-macro",
|
||||
]
|
||||
@@ -16777,7 +16782,7 @@ version = "0.33.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3268f3d866458b787f390cf61f4bbb563b922d091359f9608842999eaee3943c"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -16815,7 +16820,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "421c0c848a0660a8c22e2fd217929a0191f14476b68962afd2af89fd22e39825"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"indexmap",
|
||||
"log",
|
||||
"serde",
|
||||
@@ -16834,7 +16839,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "66c55ca8772d2b270e28066caed50ce4e53a28c3ac10e01efbd90e5be31e448b"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
"indexmap",
|
||||
"log",
|
||||
"serde",
|
||||
@@ -16907,6 +16912,7 @@ dependencies = [
|
||||
"clock",
|
||||
"collections",
|
||||
"component",
|
||||
"dap",
|
||||
"db",
|
||||
"derive_more",
|
||||
"env_logger 0.11.7",
|
||||
@@ -17077,7 +17083,7 @@ name = "xim-parser"
|
||||
version = "0.2.1"
|
||||
source = "git+https://github.com/XDeme1/xim-rs?rev=d50d461764c2213655cd9cf65a0ea94c70d3c4fd#d50d461764c2213655cd9cf65a0ea94c70d3c4fd"
|
||||
dependencies = [
|
||||
"bitflags 2.8.0",
|
||||
"bitflags 2.9.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -17283,6 +17289,8 @@ dependencies = [
|
||||
"command_palette_hooks",
|
||||
"component_preview",
|
||||
"copilot",
|
||||
"dap",
|
||||
"dap_adapters",
|
||||
"db",
|
||||
"debugger_tools",
|
||||
"debugger_ui",
|
||||
@@ -17430,7 +17438,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed_html"
|
||||
version = "0.1.6"
|
||||
version = "0.2.0"
|
||||
dependencies = [
|
||||
"zed_extension_api 0.1.0",
|
||||
]
|
||||
|
||||
@@ -470,6 +470,7 @@ mlua = { version = "0.10", features = ["lua54", "vendored", "async", "send"] }
|
||||
nanoid = "0.4"
|
||||
nbformat = { version = "0.10.0" }
|
||||
nix = "0.29"
|
||||
open = "5.0.0"
|
||||
num-format = "0.4.4"
|
||||
ordered-float = "2.1.1"
|
||||
palette = { version = "0.7.5", default-features = false, features = ["std"] }
|
||||
|
||||
@@ -1,5 +0,0 @@
|
||||
<svg width="14" height="14" viewBox="0 0 14 14" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M5.27772 1.38585L4.39187 4.07909C4.34653 4.21692 4.26946 4.34219 4.16685 4.44479C4.06425 4.5474 3.93898 4.62447 3.80115 4.66981L1.10791 5.55566L3.80115 6.44151C3.93898 6.48685 4.06425 6.56392 4.16685 6.66653C4.26946 6.76913 4.34653 6.8944 4.39187 7.03223L5.27772 9.72547L6.16357 7.03223C6.20891 6.8944 6.28598 6.76913 6.38859 6.66653C6.49119 6.56392 6.61646 6.48685 6.7543 6.44151L9.44753 5.55566L6.7543 4.66981C6.61646 4.62447 6.49119 4.5474 6.38859 4.44479C6.28598 4.34219 6.20891 4.21692 6.16357 4.07909L5.27772 1.38585Z" fill="black" fill-opacity="0.15" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M8.35938 12.3555C8.35938 12.0664 8.52734 11.8086 9.00781 11.3594L10.2031 10.2344C10.6094 9.85156 10.7891 9.60156 10.7891 9.34375C10.7891 9.05469 10.5781 8.85938 10.2734 8.85938C10.0391 8.85938 9.87109 8.95312 9.66406 9.21094C9.42578 9.50781 9.25391 9.60938 8.99219 9.60938C8.61719 9.60938 8.35156 9.35938 8.35156 9.01172C8.35156 8.25 9.26953 7.57812 10.3594 7.57812C11.4961 7.57812 12.3438 8.26172 12.3438 9.17969C12.3438 9.75391 12.0391 10.3008 11.418 10.8516L10.4961 11.6719V11.7344H11.8047C12.2578 11.7344 12.5391 11.9766 12.5391 12.3711C12.5391 12.7656 12.2656 13 11.8047 13H9.08203C8.65234 13 8.35938 12.7383 8.35938 12.3555Z" fill="black"/>
|
||||
<path d="M11.0834 1.38585V3.71918M9.91675 2.55248H12.2501" stroke="black" stroke-opacity="0.75" stroke-width="1.25" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
|
Before Width: | Height: | Size: 1.5 KiB |
@@ -643,6 +643,7 @@
|
||||
"name": "Code Writer",
|
||||
"tools": {
|
||||
"bash": true,
|
||||
"batch-tool": true,
|
||||
"copy-path": true,
|
||||
"create-file": true,
|
||||
"delete-path": true,
|
||||
@@ -660,6 +661,7 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
// Shows a notification when the agent needs confirmation before running an edit tool call or when that's concluded.
|
||||
"notify_when_agent_waiting": true
|
||||
},
|
||||
// The settings for slash commands.
|
||||
|
||||
@@ -585,6 +585,13 @@ pub enum Thinking {
|
||||
Enabled { budget_tokens: Option<u32> },
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum StringOrContents {
|
||||
String(String),
|
||||
Content(Vec<RequestContent>),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct Request {
|
||||
pub model: String,
|
||||
@@ -597,7 +604,7 @@ pub struct Request {
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub tool_choice: Option<ToolChoice>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub system: Option<String>,
|
||||
pub system: Option<StringOrContents>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub metadata: Option<Metadata>,
|
||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||
|
||||
@@ -15,7 +15,7 @@ use client::Client;
|
||||
use command_palette_hooks::CommandPaletteFilter;
|
||||
use feature_flags::FeatureFlagAppExt;
|
||||
use fs::Fs;
|
||||
use gpui::{actions, App, Global, UpdateGlobal};
|
||||
use gpui::{actions, App, Global, ReadGlobal, UpdateGlobal};
|
||||
use language_model::{
|
||||
LanguageModelId, LanguageModelProviderId, LanguageModelRegistry, LanguageModelResponseMessage,
|
||||
};
|
||||
@@ -86,6 +86,10 @@ impl Assistant {
|
||||
filter.show_namespace(Self::NAMESPACE);
|
||||
});
|
||||
}
|
||||
|
||||
pub fn enabled(cx: &App) -> bool {
|
||||
Self::global(cx).enabled
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(
|
||||
@@ -242,18 +246,6 @@ fn register_slash_commands(prompt_builder: Option<Arc<PromptBuilder>>, cx: &mut
|
||||
.detach();
|
||||
}
|
||||
|
||||
cx.observe_flag::<assistant_slash_commands::AutoSlashCommandFeatureFlag, _>({
|
||||
let slash_command_registry = slash_command_registry.clone();
|
||||
move |is_enabled, _cx| {
|
||||
if is_enabled {
|
||||
// [#auto-staff-ship] TODO remove this when /auto is no longer staff-shipped
|
||||
slash_command_registry
|
||||
.register_command(assistant_slash_commands::AutoCommand, true);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
cx.observe_flag::<assistant_slash_commands::StreamingExampleSlashCommandFeatureFlag, _>({
|
||||
let slash_command_registry = slash_command_registry.clone();
|
||||
move |is_enabled, _cx| {
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::assistant_configuration::{ConfigurationView, ConfigurationViewEvent};
|
||||
use crate::Assistant;
|
||||
use crate::{
|
||||
terminal_inline_assistant::TerminalInlineAssistant, DeployHistory, InlineAssistant, NewChat,
|
||||
};
|
||||
@@ -58,8 +59,7 @@ pub fn init(cx: &mut App) {
|
||||
|
||||
cx.observe_new(
|
||||
|terminal_panel: &mut TerminalPanel, _, cx: &mut Context<TerminalPanel>| {
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
terminal_panel.set_assistant_enabled(settings.enabled, cx);
|
||||
terminal_panel.set_assistant_enabled(Assistant::enabled(cx), cx);
|
||||
},
|
||||
)
|
||||
.detach();
|
||||
@@ -342,12 +342,12 @@ impl AssistantPanel {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Workspace>,
|
||||
) {
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
if !settings.enabled {
|
||||
return;
|
||||
if workspace
|
||||
.panel::<Self>(cx)
|
||||
.is_some_and(|panel| panel.read(cx).enabled(cx))
|
||||
{
|
||||
workspace.toggle_panel_focus::<Self>(window, cx);
|
||||
}
|
||||
|
||||
workspace.toggle_panel_focus::<Self>(window, cx);
|
||||
}
|
||||
|
||||
fn watch_client_status(
|
||||
@@ -595,12 +595,10 @@ impl AssistantPanel {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Workspace>,
|
||||
) {
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
if !settings.enabled {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(assistant_panel) = workspace.panel::<AssistantPanel>(cx) else {
|
||||
let Some(assistant_panel) = workspace
|
||||
.panel::<AssistantPanel>(cx)
|
||||
.filter(|panel| panel.read(cx).enabled(cx))
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
@@ -1298,12 +1296,8 @@ impl Panel for AssistantPanel {
|
||||
}
|
||||
|
||||
fn icon(&self, _: &Window, cx: &App) -> Option<IconName> {
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
if !settings.enabled || !settings.button {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(IconName::ZedAssistant)
|
||||
(self.enabled(cx) && AssistantSettings::get_global(cx).button)
|
||||
.then_some(IconName::ZedAssistant)
|
||||
}
|
||||
|
||||
fn icon_tooltip(&self, _: &Window, _: &App) -> Option<&'static str> {
|
||||
@@ -1317,6 +1311,10 @@ impl Panel for AssistantPanel {
|
||||
fn activation_priority(&self) -> u32 {
|
||||
4
|
||||
}
|
||||
|
||||
fn enabled(&self, cx: &App) -> bool {
|
||||
Assistant::enabled(cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<PanelEvent> for AssistantPanel {}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use crate::{
|
||||
AssistantPanel, AssistantPanelEvent, CycleNextInlineAssist, CyclePreviousInlineAssist,
|
||||
Assistant, AssistantPanel, AssistantPanelEvent, CycleNextInlineAssist,
|
||||
CyclePreviousInlineAssist,
|
||||
};
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use assistant_context_editor::{humanize_token_count, RequestType};
|
||||
@@ -3555,7 +3556,7 @@ impl CodeActionProvider for AssistantCodeActionProvider {
|
||||
_: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Vec<CodeAction>>> {
|
||||
if !AssistantSettings::get_global(cx).enabled {
|
||||
if !Assistant::enabled(cx) {
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
}
|
||||
|
||||
@@ -3712,7 +3713,7 @@ mod tests {
|
||||
language_settings, tree_sitter_rust, Buffer, Language, LanguageConfig, LanguageMatcher,
|
||||
Point,
|
||||
};
|
||||
use language_model::LanguageModelRegistry;
|
||||
use language_model::{LanguageModelRegistry, TokenUsage};
|
||||
use rand::prelude::*;
|
||||
use serde::Serialize;
|
||||
use settings::SettingsStore;
|
||||
@@ -4091,6 +4092,7 @@ mod tests {
|
||||
future::ready(Ok(LanguageModelTextStream {
|
||||
message_id: None,
|
||||
stream: chunks_rx.map(Ok).boxed(),
|
||||
last_token_usage: Arc::new(Mutex::new(TokenUsage::default())),
|
||||
})),
|
||||
cx,
|
||||
);
|
||||
|
||||
@@ -31,6 +31,7 @@ clock.workspace = true
|
||||
collections.workspace = true
|
||||
command_palette_hooks.workspace = true
|
||||
context_server.workspace = true
|
||||
convert_case.workspace = true
|
||||
db.workspace = true
|
||||
editor.workspace = true
|
||||
feature_flags.workspace = true
|
||||
@@ -62,7 +63,6 @@ prompt_library.workspace = true
|
||||
prompt_store.workspace = true
|
||||
proto.workspace = true
|
||||
release_channel.workspace = true
|
||||
regex.workspace = true
|
||||
rope.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
|
||||
@@ -47,6 +47,7 @@ pub struct ActiveThread {
|
||||
last_error: Option<ThreadError>,
|
||||
pop_ups: Vec<WindowHandle<ToolReadyPopUp>>,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
pop_up_subscriptions: HashMap<WindowHandle<ToolReadyPopUp>, Vec<Subscription>>,
|
||||
}
|
||||
|
||||
struct RenderedMessage {
|
||||
@@ -253,6 +254,7 @@ impl ActiveThread {
|
||||
last_error: None,
|
||||
pop_ups: Vec::new(),
|
||||
_subscriptions: subscriptions,
|
||||
pop_up_subscriptions: HashMap::default(),
|
||||
};
|
||||
|
||||
for message in thread.read(cx).messages().cloned().collect::<Vec<_>>() {
|
||||
@@ -381,7 +383,7 @@ impl ActiveThread {
|
||||
ThreadEvent::DoneStreaming => {
|
||||
if !self.thread().read(cx).is_generating() {
|
||||
self.show_notification(
|
||||
"Your changes have been applied.",
|
||||
"The assistant response has concluded.",
|
||||
IconName::Check,
|
||||
Color::Success,
|
||||
window,
|
||||
@@ -548,42 +550,64 @@ impl ActiveThread {
|
||||
.log_err()
|
||||
{
|
||||
if let Some(pop_up) = screen_window.entity(cx).log_err() {
|
||||
cx.subscribe_in(&pop_up, window, {
|
||||
|this, _, event, window, cx| match event {
|
||||
ToolReadyPopupEvent::Accepted => {
|
||||
let handle = window.window_handle();
|
||||
cx.activate(true); // Switch back to the Zed application
|
||||
self.pop_up_subscriptions
|
||||
.entry(screen_window)
|
||||
.or_insert_with(Vec::new)
|
||||
.push(cx.subscribe_in(&pop_up, window, {
|
||||
|this, _, event, window, cx| match event {
|
||||
ToolReadyPopupEvent::Accepted => {
|
||||
let handle = window.window_handle();
|
||||
cx.activate(true); // Switch back to the Zed application
|
||||
|
||||
let workspace_handle = this.workspace.clone();
|
||||
let workspace_handle = this.workspace.clone();
|
||||
|
||||
// If there are multiple Zed windows, activate the correct one.
|
||||
cx.defer(move |cx| {
|
||||
handle
|
||||
.update(cx, |_view, window, _cx| {
|
||||
window.activate_window();
|
||||
// If there are multiple Zed windows, activate the correct one.
|
||||
cx.defer(move |cx| {
|
||||
handle
|
||||
.update(cx, |_view, window, _cx| {
|
||||
window.activate_window();
|
||||
|
||||
if let Some(workspace) = workspace_handle.upgrade()
|
||||
{
|
||||
workspace.update(_cx, |workspace, cx| {
|
||||
workspace.focus_panel::<AssistantPanel>(
|
||||
window, cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
})
|
||||
.log_err();
|
||||
});
|
||||
if let Some(workspace) =
|
||||
workspace_handle.upgrade()
|
||||
{
|
||||
workspace.update(_cx, |workspace, cx| {
|
||||
workspace
|
||||
.focus_panel::<AssistantPanel>(
|
||||
window, cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
})
|
||||
.log_err();
|
||||
});
|
||||
|
||||
this.dismiss_notifications(cx);
|
||||
this.dismiss_notifications(cx);
|
||||
}
|
||||
ToolReadyPopupEvent::Dismissed => {
|
||||
this.dismiss_notifications(cx);
|
||||
}
|
||||
}
|
||||
ToolReadyPopupEvent::Dismissed => {
|
||||
this.dismiss_notifications(cx);
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}));
|
||||
|
||||
self.pop_ups.push(screen_window);
|
||||
|
||||
// If the user manually refocuses the original window, dismiss the popup.
|
||||
self.pop_up_subscriptions
|
||||
.entry(screen_window)
|
||||
.or_insert_with(Vec::new)
|
||||
.push({
|
||||
let pop_up_weak = pop_up.downgrade();
|
||||
|
||||
cx.observe_window_activation(window, move |_, window, cx| {
|
||||
if window.is_window_active() {
|
||||
if let Some(pop_up) = pop_up_weak.upgrade() {
|
||||
pop_up.update(cx, |_, cx| {
|
||||
cx.emit(ToolReadyPopupEvent::Dismissed);
|
||||
});
|
||||
}
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1750,6 +1774,8 @@ impl ActiveThread {
|
||||
window.remove_window();
|
||||
})
|
||||
.ok();
|
||||
|
||||
self.pop_up_subscriptions.remove(&window);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -32,7 +32,7 @@ use prompt_store::PromptBuilder;
|
||||
use settings::Settings as _;
|
||||
|
||||
pub use crate::active_thread::ActiveThread;
|
||||
use crate::assistant_configuration::AddContextServerModal;
|
||||
use crate::assistant_configuration::{AddContextServerModal, ManageProfilesModal};
|
||||
pub use crate::assistant_panel::{AssistantPanel, ConcreteAssistantPanelDelegate};
|
||||
pub use crate::inline_assistant::InlineAssistant;
|
||||
pub use crate::thread::{Message, RequestKind, Thread, ThreadEvent};
|
||||
@@ -47,6 +47,7 @@ actions!(
|
||||
RemoveAllContext,
|
||||
OpenHistory,
|
||||
OpenConfiguration,
|
||||
ManageProfiles,
|
||||
AddContextServer,
|
||||
RemoveSelectedThread,
|
||||
Chat,
|
||||
@@ -89,6 +90,7 @@ pub fn init(
|
||||
cx,
|
||||
);
|
||||
cx.observe_new(AddContextServerModal::register).detach();
|
||||
cx.observe_new(ManageProfilesModal::register).detach();
|
||||
|
||||
feature_gate_assistant2_actions(cx);
|
||||
}
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
mod add_context_server_modal;
|
||||
mod manage_profiles_modal;
|
||||
mod profile_picker;
|
||||
mod tool_picker;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
@@ -12,6 +15,7 @@ use util::ResultExt as _;
|
||||
use zed_actions::ExtensionCategoryFilter;
|
||||
|
||||
pub(crate) use add_context_server_modal::AddContextServerModal;
|
||||
pub(crate) use manage_profiles_modal::ManageProfilesModal;
|
||||
|
||||
use crate::AddContextServer;
|
||||
|
||||
|
||||
@@ -0,0 +1,454 @@
|
||||
mod profile_modal_header;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use assistant_settings::{
|
||||
AgentProfile, AgentProfileContent, AssistantSettings, AssistantSettingsContent,
|
||||
ContextServerPresetContent, VersionedAssistantSettingsContent,
|
||||
};
|
||||
use assistant_tool::ToolWorkingSet;
|
||||
use convert_case::{Case, Casing as _};
|
||||
use editor::Editor;
|
||||
use fs::Fs;
|
||||
use gpui::{prelude::*, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Subscription};
|
||||
use settings::{update_settings_file, Settings as _};
|
||||
use ui::{prelude::*, ListItem, ListItemSpacing, ListSeparator, Navigable, NavigableEntry};
|
||||
use workspace::{ModalView, Workspace};
|
||||
|
||||
use crate::assistant_configuration::manage_profiles_modal::profile_modal_header::ProfileModalHeader;
|
||||
use crate::assistant_configuration::profile_picker::{ProfilePicker, ProfilePickerDelegate};
|
||||
use crate::assistant_configuration::tool_picker::{ToolPicker, ToolPickerDelegate};
|
||||
use crate::{AssistantPanel, ManageProfiles};
|
||||
|
||||
enum Mode {
|
||||
ChooseProfile {
|
||||
profile_picker: Entity<ProfilePicker>,
|
||||
_subscription: Subscription,
|
||||
},
|
||||
NewProfile(NewProfileMode),
|
||||
ViewProfile(ViewProfileMode),
|
||||
ConfigureTools {
|
||||
profile_id: Arc<str>,
|
||||
tool_picker: Entity<ToolPicker>,
|
||||
_subscription: Subscription,
|
||||
},
|
||||
}
|
||||
|
||||
impl Mode {
|
||||
pub fn choose_profile(window: &mut Window, cx: &mut Context<ManageProfilesModal>) -> Self {
|
||||
let this = cx.entity();
|
||||
|
||||
let profile_picker = cx.new(|cx| {
|
||||
let delegate = ProfilePickerDelegate::new(
|
||||
move |profile_id, window, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.view_profile(profile_id.clone(), window, cx);
|
||||
})
|
||||
},
|
||||
cx,
|
||||
);
|
||||
ProfilePicker::new(delegate, window, cx)
|
||||
});
|
||||
let dismiss_subscription = cx.subscribe_in(
|
||||
&profile_picker,
|
||||
window,
|
||||
|_this, _profile_picker, _: &DismissEvent, _window, cx| {
|
||||
cx.emit(DismissEvent);
|
||||
},
|
||||
);
|
||||
|
||||
Self::ChooseProfile {
|
||||
profile_picker,
|
||||
_subscription: dismiss_subscription,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct ViewProfileMode {
|
||||
profile_id: Arc<str>,
|
||||
fork_profile: NavigableEntry,
|
||||
configure_tools: NavigableEntry,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct NewProfileMode {
|
||||
name_editor: Entity<Editor>,
|
||||
base_profile_id: Option<Arc<str>>,
|
||||
}
|
||||
|
||||
pub struct ManageProfilesModal {
|
||||
fs: Arc<dyn Fs>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
focus_handle: FocusHandle,
|
||||
mode: Mode,
|
||||
}
|
||||
|
||||
impl ManageProfilesModal {
|
||||
pub fn register(
|
||||
workspace: &mut Workspace,
|
||||
_window: Option<&mut Window>,
|
||||
_cx: &mut Context<Workspace>,
|
||||
) {
|
||||
workspace.register_action(|workspace, _: &ManageProfiles, window, cx| {
|
||||
if let Some(panel) = workspace.panel::<AssistantPanel>(cx) {
|
||||
let fs = workspace.app_state().fs.clone();
|
||||
let thread_store = panel.read(cx).thread_store().read(cx);
|
||||
let tools = thread_store.tools();
|
||||
workspace.toggle_modal(window, cx, |window, cx| Self::new(fs, tools, window, cx))
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
tools: Arc<ToolWorkingSet>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let focus_handle = cx.focus_handle();
|
||||
|
||||
Self {
|
||||
fs,
|
||||
tools,
|
||||
focus_handle,
|
||||
mode: Mode::choose_profile(window, cx),
|
||||
}
|
||||
}
|
||||
|
||||
fn choose_profile(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.mode = Mode::choose_profile(window, cx);
|
||||
self.focus_handle(cx).focus(window);
|
||||
}
|
||||
|
||||
fn new_profile(
|
||||
&mut self,
|
||||
base_profile_id: Option<Arc<str>>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let name_editor = cx.new(|cx| Editor::single_line(window, cx));
|
||||
name_editor.update(cx, |editor, cx| {
|
||||
editor.set_placeholder_text("Profile name", cx);
|
||||
});
|
||||
|
||||
self.mode = Mode::NewProfile(NewProfileMode {
|
||||
name_editor,
|
||||
base_profile_id,
|
||||
});
|
||||
self.focus_handle(cx).focus(window);
|
||||
}
|
||||
|
||||
pub fn view_profile(
|
||||
&mut self,
|
||||
profile_id: Arc<str>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.mode = Mode::ViewProfile(ViewProfileMode {
|
||||
profile_id,
|
||||
fork_profile: NavigableEntry::focusable(cx),
|
||||
configure_tools: NavigableEntry::focusable(cx),
|
||||
});
|
||||
self.focus_handle(cx).focus(window);
|
||||
}
|
||||
|
||||
fn configure_tools(
|
||||
&mut self,
|
||||
profile_id: Arc<str>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
let Some(profile) = settings.profiles.get(&profile_id).cloned() else {
|
||||
return;
|
||||
};
|
||||
|
||||
let tool_picker = cx.new(|cx| {
|
||||
let delegate = ToolPickerDelegate::new(
|
||||
self.fs.clone(),
|
||||
self.tools.clone(),
|
||||
profile_id.clone(),
|
||||
profile,
|
||||
cx,
|
||||
);
|
||||
ToolPicker::new(delegate, window, cx)
|
||||
});
|
||||
let dismiss_subscription = cx.subscribe_in(&tool_picker, window, {
|
||||
let profile_id = profile_id.clone();
|
||||
move |this, _tool_picker, _: &DismissEvent, window, cx| {
|
||||
this.view_profile(profile_id.clone(), window, cx);
|
||||
}
|
||||
});
|
||||
|
||||
self.mode = Mode::ConfigureTools {
|
||||
profile_id,
|
||||
tool_picker,
|
||||
_subscription: dismiss_subscription,
|
||||
};
|
||||
self.focus_handle(cx).focus(window);
|
||||
}
|
||||
|
||||
fn confirm(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
match &self.mode {
|
||||
Mode::ChooseProfile { .. } => {}
|
||||
Mode::NewProfile(mode) => {
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
|
||||
let base_profile = mode
|
||||
.base_profile_id
|
||||
.as_ref()
|
||||
.and_then(|profile_id| settings.profiles.get(profile_id).cloned());
|
||||
|
||||
let name = mode.name_editor.read(cx).text(cx);
|
||||
let profile_id: Arc<str> = name.to_case(Case::Kebab).into();
|
||||
|
||||
let profile = AgentProfile {
|
||||
name: name.into(),
|
||||
tools: base_profile
|
||||
.as_ref()
|
||||
.map(|profile| profile.tools.clone())
|
||||
.unwrap_or_default(),
|
||||
context_servers: base_profile
|
||||
.map(|profile| profile.context_servers)
|
||||
.unwrap_or_default(),
|
||||
};
|
||||
|
||||
self.create_profile(profile_id.clone(), profile, cx);
|
||||
self.view_profile(profile_id, window, cx);
|
||||
}
|
||||
Mode::ViewProfile(_) => {}
|
||||
Mode::ConfigureTools { .. } => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn cancel(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
match &self.mode {
|
||||
Mode::ChooseProfile { .. } => {}
|
||||
Mode::NewProfile(mode) => {
|
||||
if let Some(profile_id) = mode.base_profile_id.clone() {
|
||||
self.view_profile(profile_id, window, cx);
|
||||
} else {
|
||||
self.choose_profile(window, cx);
|
||||
}
|
||||
}
|
||||
Mode::ViewProfile(_) => self.choose_profile(window, cx),
|
||||
Mode::ConfigureTools { .. } => {}
|
||||
}
|
||||
}
|
||||
|
||||
fn create_profile(&self, profile_id: Arc<str>, profile: AgentProfile, cx: &mut Context<Self>) {
|
||||
update_settings_file::<AssistantSettings>(self.fs.clone(), cx, {
|
||||
move |settings, _cx| match settings {
|
||||
AssistantSettingsContent::Versioned(VersionedAssistantSettingsContent::V2(
|
||||
settings,
|
||||
)) => {
|
||||
let profiles = settings.profiles.get_or_insert_default();
|
||||
if profiles.contains_key(&profile_id) {
|
||||
log::error!("profile with ID '{profile_id}' already exists");
|
||||
return;
|
||||
}
|
||||
|
||||
profiles.insert(
|
||||
profile_id,
|
||||
AgentProfileContent {
|
||||
name: profile.name.into(),
|
||||
tools: profile.tools,
|
||||
context_servers: profile
|
||||
.context_servers
|
||||
.into_iter()
|
||||
.map(|(server_id, preset)| {
|
||||
(
|
||||
server_id,
|
||||
ContextServerPresetContent {
|
||||
tools: preset.tools,
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
},
|
||||
);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl ModalView for ManageProfilesModal {}
|
||||
|
||||
impl Focusable for ManageProfilesModal {
|
||||
fn focus_handle(&self, cx: &App) -> FocusHandle {
|
||||
match &self.mode {
|
||||
Mode::ChooseProfile { profile_picker, .. } => profile_picker.focus_handle(cx),
|
||||
Mode::NewProfile(mode) => mode.name_editor.focus_handle(cx),
|
||||
Mode::ViewProfile(_) => self.focus_handle.clone(),
|
||||
Mode::ConfigureTools { tool_picker, .. } => tool_picker.focus_handle(cx),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<DismissEvent> for ManageProfilesModal {}
|
||||
|
||||
impl ManageProfilesModal {
|
||||
fn render_new_profile(
|
||||
&mut self,
|
||||
mode: NewProfileMode,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> impl IntoElement {
|
||||
v_flex()
|
||||
.id("new-profile")
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.child(h_flex().p_2().child(mode.name_editor.clone()))
|
||||
}
|
||||
|
||||
fn render_view_profile(
|
||||
&mut self,
|
||||
mode: ViewProfileMode,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> impl IntoElement {
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
|
||||
let profile_name = settings
|
||||
.profiles
|
||||
.get(&mode.profile_id)
|
||||
.map(|profile| profile.name.clone())
|
||||
.unwrap_or_else(|| "Unknown".into());
|
||||
|
||||
Navigable::new(
|
||||
div()
|
||||
.track_focus(&self.focus_handle(cx))
|
||||
.size_full()
|
||||
.child(ProfileModalHeader::new(
|
||||
profile_name,
|
||||
IconName::ZedAssistant,
|
||||
))
|
||||
.child(
|
||||
v_flex()
|
||||
.pb_1()
|
||||
.child(ListSeparator)
|
||||
.child(
|
||||
div()
|
||||
.id("fork-profile")
|
||||
.track_focus(&mode.fork_profile.focus_handle)
|
||||
.on_action({
|
||||
let profile_id = mode.profile_id.clone();
|
||||
cx.listener(move |this, _: &menu::Confirm, window, cx| {
|
||||
this.new_profile(Some(profile_id.clone()), window, cx);
|
||||
})
|
||||
})
|
||||
.child(
|
||||
ListItem::new("fork-profile")
|
||||
.toggle_state(
|
||||
mode.fork_profile
|
||||
.focus_handle
|
||||
.contains_focused(window, cx),
|
||||
)
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.start_slot(Icon::new(IconName::GitBranch))
|
||||
.child(Label::new("Fork Profile"))
|
||||
.on_click({
|
||||
let profile_id = mode.profile_id.clone();
|
||||
cx.listener(move |this, _, window, cx| {
|
||||
this.new_profile(
|
||||
Some(profile_id.clone()),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
}),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.id("configure-tools")
|
||||
.track_focus(&mode.configure_tools.focus_handle)
|
||||
.on_action({
|
||||
let profile_id = mode.profile_id.clone();
|
||||
cx.listener(move |this, _: &menu::Confirm, window, cx| {
|
||||
this.configure_tools(profile_id.clone(), window, cx);
|
||||
})
|
||||
})
|
||||
.child(
|
||||
ListItem::new("configure-tools")
|
||||
.toggle_state(
|
||||
mode.configure_tools
|
||||
.focus_handle
|
||||
.contains_focused(window, cx),
|
||||
)
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.start_slot(Icon::new(IconName::Cog))
|
||||
.child(Label::new("Configure Tools"))
|
||||
.on_click({
|
||||
let profile_id = mode.profile_id.clone();
|
||||
cx.listener(move |this, _, window, cx| {
|
||||
this.configure_tools(
|
||||
profile_id.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
}),
|
||||
),
|
||||
),
|
||||
)
|
||||
.into_any_element(),
|
||||
)
|
||||
.entry(mode.fork_profile)
|
||||
.entry(mode.configure_tools)
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for ManageProfilesModal {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
|
||||
div()
|
||||
.elevation_3(cx)
|
||||
.w(rems(34.))
|
||||
.key_context("ManageProfilesModal")
|
||||
.on_action(cx.listener(|this, _: &menu::Cancel, window, cx| this.cancel(window, cx)))
|
||||
.on_action(cx.listener(|this, _: &menu::Confirm, window, cx| this.confirm(window, cx)))
|
||||
.capture_any_mouse_down(cx.listener(|this, _, window, cx| {
|
||||
this.focus_handle(cx).focus(window);
|
||||
}))
|
||||
.on_mouse_down_out(cx.listener(|_this, _, _, cx| cx.emit(DismissEvent)))
|
||||
.child(match &self.mode {
|
||||
Mode::ChooseProfile { profile_picker, .. } => div()
|
||||
.child(ProfileModalHeader::new("Profiles", IconName::ZedAssistant))
|
||||
.child(ListSeparator)
|
||||
.child(profile_picker.clone())
|
||||
.into_any_element(),
|
||||
Mode::NewProfile(mode) => self
|
||||
.render_new_profile(mode.clone(), window, cx)
|
||||
.into_any_element(),
|
||||
Mode::ViewProfile(mode) => self
|
||||
.render_view_profile(mode.clone(), window, cx)
|
||||
.into_any_element(),
|
||||
Mode::ConfigureTools {
|
||||
profile_id,
|
||||
tool_picker,
|
||||
..
|
||||
} => {
|
||||
let profile_name = settings
|
||||
.profiles
|
||||
.get(profile_id)
|
||||
.map(|profile| profile.name.clone())
|
||||
.unwrap_or_else(|| "Unknown".into());
|
||||
|
||||
div()
|
||||
.child(ProfileModalHeader::new(
|
||||
format!("{profile_name}: Configure Tools"),
|
||||
IconName::Cog,
|
||||
))
|
||||
.child(ListSeparator)
|
||||
.child(tool_picker.clone())
|
||||
.into_any_element()
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,38 @@
|
||||
use ui::prelude::*;
|
||||
|
||||
#[derive(IntoElement)]
|
||||
pub struct ProfileModalHeader {
|
||||
label: SharedString,
|
||||
icon: IconName,
|
||||
}
|
||||
|
||||
impl ProfileModalHeader {
|
||||
pub fn new(label: impl Into<SharedString>, icon: IconName) -> Self {
|
||||
Self {
|
||||
label: label.into(),
|
||||
icon,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl RenderOnce for ProfileModalHeader {
|
||||
fn render(self, _window: &mut Window, cx: &mut App) -> impl IntoElement {
|
||||
h_flex()
|
||||
.w_full()
|
||||
.px(DynamicSpacing::Base12.rems(cx))
|
||||
.pt(DynamicSpacing::Base08.rems(cx))
|
||||
.pb(DynamicSpacing::Base04.rems(cx))
|
||||
.rounded_t_sm()
|
||||
.gap_1p5()
|
||||
.child(Icon::new(self.icon).size(IconSize::XSmall))
|
||||
.child(
|
||||
h_flex().gap_1().overflow_x_hidden().child(
|
||||
div()
|
||||
.max_w_96()
|
||||
.overflow_x_hidden()
|
||||
.text_ellipsis()
|
||||
.child(Headline::new(self.label).size(HeadlineSize::XSmall)),
|
||||
),
|
||||
)
|
||||
}
|
||||
}
|
||||
194
crates/assistant2/src/assistant_configuration/profile_picker.rs
Normal file
194
crates/assistant2/src/assistant_configuration/profile_picker.rs
Normal file
@@ -0,0 +1,194 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use assistant_settings::AssistantSettings;
|
||||
use fuzzy::{match_strings, StringMatch, StringMatchCandidate};
|
||||
use gpui::{
|
||||
App, Context, DismissEvent, Entity, EventEmitter, Focusable, SharedString, Task, WeakEntity,
|
||||
Window,
|
||||
};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use settings::Settings;
|
||||
use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing};
|
||||
use util::ResultExt as _;
|
||||
|
||||
pub struct ProfilePicker {
|
||||
picker: Entity<Picker<ProfilePickerDelegate>>,
|
||||
}
|
||||
|
||||
impl ProfilePicker {
|
||||
pub fn new(
|
||||
delegate: ProfilePickerDelegate,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx).modal(false));
|
||||
Self { picker }
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<DismissEvent> for ProfilePicker {}
|
||||
|
||||
impl Focusable for ProfilePicker {
|
||||
fn focus_handle(&self, cx: &App) -> gpui::FocusHandle {
|
||||
self.picker.focus_handle(cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for ProfilePicker {
|
||||
fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
|
||||
v_flex().w(rems(34.)).child(self.picker.clone())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ProfileEntry {
|
||||
pub id: Arc<str>,
|
||||
pub name: SharedString,
|
||||
}
|
||||
|
||||
pub struct ProfilePickerDelegate {
|
||||
profile_picker: WeakEntity<ProfilePicker>,
|
||||
profiles: Vec<ProfileEntry>,
|
||||
matches: Vec<StringMatch>,
|
||||
selected_index: usize,
|
||||
on_confirm: Arc<dyn Fn(&Arc<str>, &mut Window, &mut App) + 'static>,
|
||||
}
|
||||
|
||||
impl ProfilePickerDelegate {
|
||||
pub fn new(
|
||||
on_confirm: impl Fn(&Arc<str>, &mut Window, &mut App) + 'static,
|
||||
cx: &mut Context<ProfilePicker>,
|
||||
) -> Self {
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
|
||||
let profiles = settings
|
||||
.profiles
|
||||
.iter()
|
||||
.map(|(id, profile)| ProfileEntry {
|
||||
id: id.clone(),
|
||||
name: profile.name.clone(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Self {
|
||||
profile_picker: cx.entity().downgrade(),
|
||||
profiles,
|
||||
matches: Vec::new(),
|
||||
selected_index: 0,
|
||||
on_confirm: Arc::new(on_confirm),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PickerDelegate for ProfilePickerDelegate {
|
||||
type ListItem = ListItem;
|
||||
|
||||
fn match_count(&self) -> usize {
|
||||
self.matches.len()
|
||||
}
|
||||
|
||||
fn selected_index(&self) -> usize {
|
||||
self.selected_index
|
||||
}
|
||||
|
||||
fn set_selected_index(
|
||||
&mut self,
|
||||
ix: usize,
|
||||
_window: &mut Window,
|
||||
_cx: &mut Context<Picker<Self>>,
|
||||
) {
|
||||
self.selected_index = ix;
|
||||
}
|
||||
|
||||
fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
|
||||
"Search profiles…".into()
|
||||
}
|
||||
|
||||
fn update_matches(
|
||||
&mut self,
|
||||
query: String,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Task<()> {
|
||||
let background = cx.background_executor().clone();
|
||||
let candidates = self
|
||||
.profiles
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(id, profile)| StringMatchCandidate::new(id, profile.name.as_ref()))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let matches = if query.is_empty() {
|
||||
candidates
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(index, candidate)| StringMatch {
|
||||
candidate_id: index,
|
||||
string: candidate.string,
|
||||
positions: Vec::new(),
|
||||
score: 0.,
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
match_strings(
|
||||
&candidates,
|
||||
&query,
|
||||
false,
|
||||
100,
|
||||
&Default::default(),
|
||||
background,
|
||||
)
|
||||
.await
|
||||
};
|
||||
|
||||
this.update(cx, |this, _cx| {
|
||||
this.delegate.matches = matches;
|
||||
this.delegate.selected_index = this
|
||||
.delegate
|
||||
.selected_index
|
||||
.min(this.delegate.matches.len().saturating_sub(1));
|
||||
})
|
||||
.log_err();
|
||||
})
|
||||
}
|
||||
|
||||
fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
if self.matches.is_empty() {
|
||||
self.dismissed(window, cx);
|
||||
return;
|
||||
}
|
||||
|
||||
let candidate_id = self.matches[self.selected_index].candidate_id;
|
||||
let profile = &self.profiles[candidate_id];
|
||||
|
||||
(self.on_confirm)(&profile.id, window, cx);
|
||||
}
|
||||
|
||||
fn dismissed(&mut self, _window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
self.profile_picker
|
||||
.update(cx, |_this, cx| cx.emit(DismissEvent))
|
||||
.log_err();
|
||||
}
|
||||
|
||||
fn render_match(
|
||||
&self,
|
||||
ix: usize,
|
||||
selected: bool,
|
||||
_window: &mut Window,
|
||||
_cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<Self::ListItem> {
|
||||
let profile_match = &self.matches[ix];
|
||||
|
||||
Some(
|
||||
ListItem::new(ix)
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.toggle_state(selected)
|
||||
.child(HighlightedLabel::new(
|
||||
profile_match.string.clone(),
|
||||
profile_match.positions.clone(),
|
||||
)),
|
||||
)
|
||||
}
|
||||
}
|
||||
285
crates/assistant2/src/assistant_configuration/tool_picker.rs
Normal file
285
crates/assistant2/src/assistant_configuration/tool_picker.rs
Normal file
@@ -0,0 +1,285 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use assistant_settings::{
|
||||
AgentProfile, AgentProfileContent, AssistantSettings, AssistantSettingsContent,
|
||||
ContextServerPresetContent, VersionedAssistantSettingsContent,
|
||||
};
|
||||
use assistant_tool::{ToolSource, ToolWorkingSet};
|
||||
use fs::Fs;
|
||||
use fuzzy::{match_strings, StringMatch, StringMatchCandidate};
|
||||
use gpui::{App, Context, DismissEvent, Entity, EventEmitter, Focusable, Task, WeakEntity, Window};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use settings::update_settings_file;
|
||||
use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing};
|
||||
use util::ResultExt as _;
|
||||
|
||||
pub struct ToolPicker {
|
||||
picker: Entity<Picker<ToolPickerDelegate>>,
|
||||
}
|
||||
|
||||
impl ToolPicker {
|
||||
pub fn new(delegate: ToolPickerDelegate, window: &mut Window, cx: &mut Context<Self>) -> Self {
|
||||
let picker = cx.new(|cx| Picker::uniform_list(delegate, window, cx).modal(false));
|
||||
Self { picker }
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<DismissEvent> for ToolPicker {}
|
||||
|
||||
impl Focusable for ToolPicker {
|
||||
fn focus_handle(&self, cx: &App) -> gpui::FocusHandle {
|
||||
self.picker.focus_handle(cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for ToolPicker {
|
||||
fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
|
||||
v_flex().w(rems(34.)).child(self.picker.clone())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ToolEntry {
|
||||
pub name: Arc<str>,
|
||||
pub source: ToolSource,
|
||||
}
|
||||
|
||||
pub struct ToolPickerDelegate {
|
||||
tool_picker: WeakEntity<ToolPicker>,
|
||||
fs: Arc<dyn Fs>,
|
||||
tools: Vec<ToolEntry>,
|
||||
profile_id: Arc<str>,
|
||||
profile: AgentProfile,
|
||||
matches: Vec<StringMatch>,
|
||||
selected_index: usize,
|
||||
}
|
||||
|
||||
impl ToolPickerDelegate {
|
||||
pub fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
tool_set: Arc<ToolWorkingSet>,
|
||||
profile_id: Arc<str>,
|
||||
profile: AgentProfile,
|
||||
cx: &mut Context<ToolPicker>,
|
||||
) -> Self {
|
||||
let mut tool_entries = Vec::new();
|
||||
|
||||
for (source, tools) in tool_set.tools_by_source(cx) {
|
||||
tool_entries.extend(tools.into_iter().map(|tool| ToolEntry {
|
||||
name: tool.name().into(),
|
||||
source: source.clone(),
|
||||
}));
|
||||
}
|
||||
|
||||
Self {
|
||||
tool_picker: cx.entity().downgrade(),
|
||||
fs,
|
||||
tools: tool_entries,
|
||||
profile_id,
|
||||
profile,
|
||||
matches: Vec::new(),
|
||||
selected_index: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PickerDelegate for ToolPickerDelegate {
|
||||
type ListItem = ListItem;
|
||||
|
||||
fn match_count(&self) -> usize {
|
||||
self.matches.len()
|
||||
}
|
||||
|
||||
fn selected_index(&self) -> usize {
|
||||
self.selected_index
|
||||
}
|
||||
|
||||
fn set_selected_index(
|
||||
&mut self,
|
||||
ix: usize,
|
||||
_window: &mut Window,
|
||||
_cx: &mut Context<Picker<Self>>,
|
||||
) {
|
||||
self.selected_index = ix;
|
||||
}
|
||||
|
||||
fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
|
||||
"Search tools…".into()
|
||||
}
|
||||
|
||||
fn update_matches(
|
||||
&mut self,
|
||||
query: String,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Task<()> {
|
||||
let background = cx.background_executor().clone();
|
||||
let candidates = self
|
||||
.tools
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(id, profile)| StringMatchCandidate::new(id, profile.name.as_ref()))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let matches = if query.is_empty() {
|
||||
candidates
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(index, candidate)| StringMatch {
|
||||
candidate_id: index,
|
||||
string: candidate.string,
|
||||
positions: Vec::new(),
|
||||
score: 0.,
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
match_strings(
|
||||
&candidates,
|
||||
&query,
|
||||
false,
|
||||
100,
|
||||
&Default::default(),
|
||||
background,
|
||||
)
|
||||
.await
|
||||
};
|
||||
|
||||
this.update(cx, |this, _cx| {
|
||||
this.delegate.matches = matches;
|
||||
this.delegate.selected_index = this
|
||||
.delegate
|
||||
.selected_index
|
||||
.min(this.delegate.matches.len().saturating_sub(1));
|
||||
})
|
||||
.log_err();
|
||||
})
|
||||
}
|
||||
|
||||
fn confirm(&mut self, _secondary: bool, window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
if self.matches.is_empty() {
|
||||
self.dismissed(window, cx);
|
||||
return;
|
||||
}
|
||||
|
||||
let candidate_id = self.matches[self.selected_index].candidate_id;
|
||||
let tool = &self.tools[candidate_id];
|
||||
|
||||
let is_enabled = match &tool.source {
|
||||
ToolSource::Native => {
|
||||
let is_enabled = self.profile.tools.entry(tool.name.clone()).or_default();
|
||||
*is_enabled = !*is_enabled;
|
||||
*is_enabled
|
||||
}
|
||||
ToolSource::ContextServer { id } => {
|
||||
let preset = self
|
||||
.profile
|
||||
.context_servers
|
||||
.entry(id.clone().into())
|
||||
.or_default();
|
||||
let is_enabled = preset.tools.entry(tool.name.clone()).or_default();
|
||||
*is_enabled = !*is_enabled;
|
||||
*is_enabled
|
||||
}
|
||||
};
|
||||
|
||||
update_settings_file::<AssistantSettings>(self.fs.clone(), cx, {
|
||||
let profile_id = self.profile_id.clone();
|
||||
let default_profile = self.profile.clone();
|
||||
let tool = tool.clone();
|
||||
move |settings, _cx| match settings {
|
||||
AssistantSettingsContent::Versioned(VersionedAssistantSettingsContent::V2(
|
||||
settings,
|
||||
)) => {
|
||||
let profiles = settings.profiles.get_or_insert_default();
|
||||
let profile =
|
||||
profiles
|
||||
.entry(profile_id)
|
||||
.or_insert_with(|| AgentProfileContent {
|
||||
name: default_profile.name.into(),
|
||||
tools: default_profile.tools,
|
||||
context_servers: default_profile
|
||||
.context_servers
|
||||
.into_iter()
|
||||
.map(|(server_id, preset)| {
|
||||
(
|
||||
server_id,
|
||||
ContextServerPresetContent {
|
||||
tools: preset.tools,
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
});
|
||||
|
||||
match tool.source {
|
||||
ToolSource::Native => {
|
||||
*profile.tools.entry(tool.name).or_default() = is_enabled;
|
||||
}
|
||||
ToolSource::ContextServer { id } => {
|
||||
let preset = profile
|
||||
.context_servers
|
||||
.entry(id.clone().into())
|
||||
.or_default();
|
||||
*preset.tools.entry(tool.name.clone()).or_default() = is_enabled;
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn dismissed(&mut self, _window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
self.tool_picker
|
||||
.update(cx, |_this, cx| cx.emit(DismissEvent))
|
||||
.log_err();
|
||||
}
|
||||
|
||||
fn render_match(
|
||||
&self,
|
||||
ix: usize,
|
||||
selected: bool,
|
||||
_window: &mut Window,
|
||||
_cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<Self::ListItem> {
|
||||
let tool_match = &self.matches[ix];
|
||||
let tool = &self.tools[tool_match.candidate_id];
|
||||
|
||||
let is_enabled = match &tool.source {
|
||||
ToolSource::Native => self.profile.tools.get(&tool.name).copied().unwrap_or(false),
|
||||
ToolSource::ContextServer { id } => self
|
||||
.profile
|
||||
.context_servers
|
||||
.get(id.as_ref())
|
||||
.and_then(|preset| preset.tools.get(&tool.name))
|
||||
.copied()
|
||||
.unwrap_or(false),
|
||||
};
|
||||
|
||||
Some(
|
||||
ListItem::new(ix)
|
||||
.inset(true)
|
||||
.spacing(ListItemSpacing::Sparse)
|
||||
.toggle_state(selected)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(HighlightedLabel::new(
|
||||
tool_match.string.clone(),
|
||||
tool_match.positions.clone(),
|
||||
))
|
||||
.map(|parent| match &tool.source {
|
||||
ToolSource::Native => parent,
|
||||
ToolSource::ContextServer { id } => parent
|
||||
.child(Label::new(id).size(LabelSize::XSmall).color(Color::Muted)),
|
||||
}),
|
||||
)
|
||||
.end_slot::<Icon>(is_enabled.then(|| {
|
||||
Icon::new(IconName::Check)
|
||||
.size(IconSize::Small)
|
||||
.color(Color::Success)
|
||||
})),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -225,12 +225,12 @@ impl AssistantPanel {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Workspace>,
|
||||
) {
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
if !settings.enabled {
|
||||
return;
|
||||
if workspace
|
||||
.panel::<Self>(cx)
|
||||
.is_some_and(|panel| panel.read(cx).enabled(cx))
|
||||
{
|
||||
workspace.toggle_panel_focus::<Self>(window, cx);
|
||||
}
|
||||
|
||||
workspace.toggle_panel_focus::<Self>(window, cx);
|
||||
}
|
||||
|
||||
pub(crate) fn local_timezone(&self) -> UtcOffset {
|
||||
@@ -637,12 +637,8 @@ impl Panel for AssistantPanel {
|
||||
}
|
||||
|
||||
fn icon(&self, _window: &Window, cx: &App) -> Option<IconName> {
|
||||
let settings = AssistantSettings::get_global(cx);
|
||||
if !settings.enabled || !settings.button {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(IconName::ZedAssistant)
|
||||
(self.enabled(cx) && AssistantSettings::get_global(cx).button)
|
||||
.then_some(IconName::ZedAssistant)
|
||||
}
|
||||
|
||||
fn icon_tooltip(&self, _window: &Window, _cx: &App) -> Option<&'static str> {
|
||||
@@ -656,6 +652,10 @@ impl Panel for AssistantPanel {
|
||||
fn activation_priority(&self) -> u32 {
|
||||
3
|
||||
}
|
||||
|
||||
fn enabled(&self, cx: &App) -> bool {
|
||||
AssistantSettings::get_global(cx).enabled
|
||||
}
|
||||
}
|
||||
|
||||
impl AssistantPanel {
|
||||
|
||||
@@ -482,11 +482,17 @@ impl CodegenAlternative {
|
||||
|
||||
self.generation = cx.spawn(async move |codegen, cx| {
|
||||
let stream = stream.await;
|
||||
let token_usage = stream
|
||||
.as_ref()
|
||||
.ok()
|
||||
.map(|stream| stream.last_token_usage.clone());
|
||||
let message_id = stream
|
||||
.as_ref()
|
||||
.ok()
|
||||
.and_then(|stream| stream.message_id.clone());
|
||||
let generate = async {
|
||||
let model_telemetry_id = model_telemetry_id.clone();
|
||||
let model_provider_id = model_provider_id.clone();
|
||||
let (mut diff_tx, mut diff_rx) = mpsc::channel(1);
|
||||
let executor = cx.background_executor().clone();
|
||||
let message_id = message_id.clone();
|
||||
@@ -596,7 +602,7 @@ impl CodegenAlternative {
|
||||
kind: AssistantKind::Inline,
|
||||
phase: AssistantPhase::Response,
|
||||
model: model_telemetry_id,
|
||||
model_provider: model_provider_id.to_string(),
|
||||
model_provider: model_provider_id,
|
||||
response_latency,
|
||||
error_message,
|
||||
language_name: language_name.map(|name| name.to_proto()),
|
||||
@@ -677,6 +683,16 @@ impl CodegenAlternative {
|
||||
}
|
||||
this.elapsed_time = Some(elapsed_time);
|
||||
this.completion = Some(completion.lock().clone());
|
||||
if let Some(usage) = token_usage {
|
||||
let usage = usage.lock();
|
||||
telemetry::event!(
|
||||
"Inline Assistant Completion",
|
||||
model = model_telemetry_id,
|
||||
model_provider = model_provider_id,
|
||||
input_tokens = usage.input_tokens,
|
||||
output_tokens = usage.output_tokens,
|
||||
)
|
||||
}
|
||||
cx.emit(CodegenEvent::Finished);
|
||||
cx.notify();
|
||||
})
|
||||
@@ -1021,7 +1037,7 @@ mod tests {
|
||||
language_settings, tree_sitter_rust, Buffer, Language, LanguageConfig, LanguageMatcher,
|
||||
Point,
|
||||
};
|
||||
use language_model::LanguageModelRegistry;
|
||||
use language_model::{LanguageModelRegistry, TokenUsage};
|
||||
use rand::prelude::*;
|
||||
use serde::Serialize;
|
||||
use settings::SettingsStore;
|
||||
@@ -1405,6 +1421,7 @@ mod tests {
|
||||
future::ready(Ok(LanguageModelTextStream {
|
||||
message_id: None,
|
||||
stream: chunks_rx.map(Ok).boxed(),
|
||||
last_token_usage: Arc::new(Mutex::new(TokenUsage::default())),
|
||||
})),
|
||||
cx,
|
||||
);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::cell::RefCell;
|
||||
use std::ops::Range;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::path::Path;
|
||||
use std::rc::Rc;
|
||||
use std::sync::atomic::AtomicBool;
|
||||
use std::sync::Arc;
|
||||
@@ -70,18 +70,18 @@ impl ContextPickerCompletionProvider {
|
||||
.filter_map(|entry| match entry {
|
||||
super::RecentEntry::File {
|
||||
project_path,
|
||||
path_prefix: _,
|
||||
} => Self::completion_for_path(
|
||||
path_prefix,
|
||||
} => Some(Self::completion_for_path(
|
||||
project_path.clone(),
|
||||
path_prefix,
|
||||
true,
|
||||
false,
|
||||
excerpt_id,
|
||||
source_range.clone(),
|
||||
editor.clone(),
|
||||
context_store.clone(),
|
||||
workspace.clone(),
|
||||
cx,
|
||||
),
|
||||
)),
|
||||
super::RecentEntry::Thread(thread_context_entry) => {
|
||||
let thread_store = thread_store
|
||||
.as_ref()
|
||||
@@ -120,56 +120,24 @@ impl ContextPickerCompletionProvider {
|
||||
completions
|
||||
}
|
||||
|
||||
fn full_path_for_entry(
|
||||
worktree_id: WorktreeId,
|
||||
path: &Path,
|
||||
workspace: Entity<Workspace>,
|
||||
cx: &App,
|
||||
) -> Option<PathBuf> {
|
||||
let worktree = workspace
|
||||
.read(cx)
|
||||
.project()
|
||||
.read(cx)
|
||||
.worktree_for_id(worktree_id, cx)?
|
||||
.read(cx);
|
||||
|
||||
let mut full_path = PathBuf::from(worktree.root_name());
|
||||
full_path.push(path);
|
||||
Some(full_path)
|
||||
}
|
||||
|
||||
fn build_code_label_for_full_path(
|
||||
worktree_id: WorktreeId,
|
||||
path: &Path,
|
||||
workspace: Entity<Workspace>,
|
||||
file_name: &str,
|
||||
directory: Option<&str>,
|
||||
cx: &App,
|
||||
) -> Option<CodeLabel> {
|
||||
) -> CodeLabel {
|
||||
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
|
||||
let mut label = CodeLabel::default();
|
||||
let worktree = workspace
|
||||
.read(cx)
|
||||
.project()
|
||||
.read(cx)
|
||||
.worktree_for_id(worktree_id, cx)?;
|
||||
|
||||
let entry = worktree.read(cx).entry_for_path(&path)?;
|
||||
let file_name = path.file_name()?.to_string_lossy();
|
||||
label.push_str(&file_name, None);
|
||||
if entry.is_dir() {
|
||||
label.push_str("/ ", None);
|
||||
} else {
|
||||
label.push_str(" ", None);
|
||||
};
|
||||
label.push_str(" ", None);
|
||||
|
||||
let mut path_hint = PathBuf::from(worktree.read(cx).root_name());
|
||||
if let Some(path_to_entry) = path.parent() {
|
||||
path_hint.push(path_to_entry);
|
||||
if let Some(directory) = directory {
|
||||
label.push_str(&directory, comment_id);
|
||||
}
|
||||
label.push_str(&path_hint.to_string_lossy(), comment_id);
|
||||
|
||||
label.filter_range = 0..label.text().len();
|
||||
|
||||
Some(label)
|
||||
label
|
||||
}
|
||||
|
||||
fn completion_for_thread(
|
||||
@@ -274,32 +242,36 @@ impl ContextPickerCompletionProvider {
|
||||
|
||||
fn completion_for_path(
|
||||
project_path: ProjectPath,
|
||||
path_prefix: &str,
|
||||
is_recent: bool,
|
||||
is_directory: bool,
|
||||
excerpt_id: ExcerptId,
|
||||
source_range: Range<Anchor>,
|
||||
editor: Entity<Editor>,
|
||||
context_store: Entity<ContextStore>,
|
||||
workspace: Entity<Workspace>,
|
||||
cx: &App,
|
||||
) -> Option<Completion> {
|
||||
) -> Completion {
|
||||
let (file_name, directory) = super::file_context_picker::extract_file_name_and_directory(
|
||||
&project_path.path,
|
||||
path_prefix,
|
||||
);
|
||||
|
||||
let label = Self::build_code_label_for_full_path(
|
||||
project_path.worktree_id,
|
||||
&project_path.path,
|
||||
workspace.clone(),
|
||||
&file_name,
|
||||
directory.as_ref().map(|s| s.as_ref()),
|
||||
cx,
|
||||
)?;
|
||||
let full_path = Self::full_path_for_entry(
|
||||
project_path.worktree_id,
|
||||
&project_path.path,
|
||||
workspace.clone(),
|
||||
cx,
|
||||
)?;
|
||||
);
|
||||
let full_path = if let Some(directory) = directory {
|
||||
format!("{}{}", directory, file_name)
|
||||
} else {
|
||||
file_name.to_string()
|
||||
};
|
||||
|
||||
let crease_icon_path = if is_directory {
|
||||
FileIcons::get_folder_icon(false, cx).unwrap_or_else(|| IconName::Folder.path().into())
|
||||
} else {
|
||||
FileIcons::get_icon(&full_path, cx).unwrap_or_else(|| IconName::File.path().into())
|
||||
FileIcons::get_icon(Path::new(&full_path), cx)
|
||||
.unwrap_or_else(|| IconName::File.path().into())
|
||||
};
|
||||
let completion_icon_path = if is_recent {
|
||||
IconName::HistoryRerun.path().into()
|
||||
@@ -307,15 +279,9 @@ impl ContextPickerCompletionProvider {
|
||||
crease_icon_path.clone()
|
||||
};
|
||||
|
||||
let crease_name = project_path
|
||||
.path
|
||||
.file_name()
|
||||
.map(|file_name| file_name.to_string_lossy().to_string())
|
||||
.unwrap_or_else(|| "untitled".to_string());
|
||||
|
||||
let new_text = format!("@file {}", full_path.to_string_lossy());
|
||||
let new_text = format!("@file {}", full_path);
|
||||
let new_text_len = new_text.len();
|
||||
Some(Completion {
|
||||
Completion {
|
||||
old_range: source_range.clone(),
|
||||
new_text,
|
||||
label,
|
||||
@@ -324,7 +290,7 @@ impl ContextPickerCompletionProvider {
|
||||
icon_path: Some(completion_icon_path),
|
||||
confirm: Some(confirm_completion_callback(
|
||||
crease_icon_path,
|
||||
crease_name.into(),
|
||||
file_name,
|
||||
excerpt_id,
|
||||
source_range.start,
|
||||
new_text_len,
|
||||
@@ -340,7 +306,7 @@ impl ContextPickerCompletionProvider {
|
||||
})
|
||||
},
|
||||
)),
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -397,33 +363,34 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
.update(|cx| {
|
||||
super::file_context_picker::search_paths(
|
||||
query,
|
||||
Arc::new(AtomicBool::default()),
|
||||
Arc::<AtomicBool>::default(),
|
||||
&workspace,
|
||||
cx,
|
||||
)
|
||||
})?
|
||||
.await;
|
||||
|
||||
completions.reserve(path_matches.len());
|
||||
cx.update(|cx| {
|
||||
completions.extend(path_matches.iter().filter_map(|mat| {
|
||||
let editor = editor.upgrade()?;
|
||||
Self::completion_for_path(
|
||||
ProjectPath {
|
||||
worktree_id: WorktreeId::from_usize(mat.worktree_id),
|
||||
path: mat.path.clone(),
|
||||
},
|
||||
false,
|
||||
mat.is_dir,
|
||||
excerpt_id,
|
||||
source_range.clone(),
|
||||
editor.clone(),
|
||||
context_store.clone(),
|
||||
workspace.clone(),
|
||||
cx,
|
||||
)
|
||||
}));
|
||||
})?;
|
||||
if let Some(editor) = editor.upgrade() {
|
||||
completions.reserve(path_matches.len());
|
||||
cx.update(|cx| {
|
||||
completions.extend(path_matches.iter().map(|mat| {
|
||||
Self::completion_for_path(
|
||||
ProjectPath {
|
||||
worktree_id: WorktreeId::from_usize(mat.worktree_id),
|
||||
path: mat.path.clone(),
|
||||
},
|
||||
&mat.path_prefix,
|
||||
false,
|
||||
mat.is_dir,
|
||||
excerpt_id,
|
||||
source_range.clone(),
|
||||
editor.clone(),
|
||||
context_store.clone(),
|
||||
cx,
|
||||
)
|
||||
}));
|
||||
})?;
|
||||
}
|
||||
}
|
||||
Some(ContextPickerMode::Fetch) => {
|
||||
if let Some(editor) = editor.upgrade() {
|
||||
@@ -771,7 +738,6 @@ mod tests {
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
//TODO: Construct the editor without an actual buffer that points to a file
|
||||
let item = workspace
|
||||
.update_in(&mut cx, |workspace, window, cx| {
|
||||
workspace.open_path(
|
||||
@@ -821,10 +787,10 @@ mod tests {
|
||||
assert_eq!(
|
||||
current_completion_labels(editor),
|
||||
&[
|
||||
format!("seven.txt {}", separator!("dir/b")).as_str(),
|
||||
format!("six.txt {}", separator!("dir/b")).as_str(),
|
||||
format!("five.txt {}", separator!("dir/b")).as_str(),
|
||||
format!("four.txt {}", separator!("dir/a")).as_str(),
|
||||
"seven.txt dir/b/",
|
||||
"six.txt dir/b/",
|
||||
"five.txt dir/b/",
|
||||
"four.txt dir/a/",
|
||||
"Files & Directories",
|
||||
"Fetch"
|
||||
]
|
||||
@@ -853,10 +819,7 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(editor.text(cx), "Lorem @file one");
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
current_completion_labels(editor),
|
||||
vec![format!("one.txt {}", separator!("dir/a")).as_str(),]
|
||||
);
|
||||
assert_eq!(current_completion_labels(editor), vec!["one.txt dir/a/"]);
|
||||
});
|
||||
|
||||
editor.update_in(&mut cx, |editor, window, cx| {
|
||||
@@ -865,10 +828,7 @@ mod tests {
|
||||
});
|
||||
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!("Lorem @file {}", separator!("dir/a/one.txt"))
|
||||
);
|
||||
assert_eq!(editor.text(cx), "Lorem @file dir/a/one.txt",);
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
crease_ranges(editor, cx),
|
||||
@@ -879,10 +839,7 @@ mod tests {
|
||||
cx.simulate_input(" ");
|
||||
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!("Lorem @file {} ", separator!("dir/a/one.txt"))
|
||||
);
|
||||
assert_eq!(editor.text(cx), "Lorem @file dir/a/one.txt ",);
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
crease_ranges(editor, cx),
|
||||
@@ -893,10 +850,7 @@ mod tests {
|
||||
cx.simulate_input("Ipsum ");
|
||||
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!("Lorem @file {} Ipsum ", separator!("dir/a/one.txt"))
|
||||
);
|
||||
assert_eq!(editor.text(cx), "Lorem @file dir/a/one.txt Ipsum ",);
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
crease_ranges(editor, cx),
|
||||
@@ -907,10 +861,7 @@ mod tests {
|
||||
cx.simulate_input("@file ");
|
||||
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!("Lorem @file {} Ipsum @file ", separator!("dir/a/one.txt"))
|
||||
);
|
||||
assert_eq!(editor.text(cx), "Lorem @file dir/a/one.txt Ipsum @file ",);
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
crease_ranges(editor, cx),
|
||||
@@ -927,11 +878,7 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!(
|
||||
"Lorem @file {} Ipsum @file {}",
|
||||
separator!("dir/a/one.txt"),
|
||||
separator!("dir/b/seven.txt")
|
||||
)
|
||||
"Lorem @file dir/a/one.txt Ipsum @file dir/b/seven.txt"
|
||||
);
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
@@ -948,11 +895,7 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!(
|
||||
"Lorem @file {} Ipsum @file {}\n@",
|
||||
separator!("dir/a/one.txt"),
|
||||
separator!("dir/b/seven.txt")
|
||||
)
|
||||
"Lorem @file dir/a/one.txt Ipsum @file dir/b/seven.txt\n@"
|
||||
);
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
@@ -973,12 +916,7 @@ mod tests {
|
||||
editor.update(&mut cx, |editor, cx| {
|
||||
assert_eq!(
|
||||
editor.text(cx),
|
||||
format!(
|
||||
"Lorem @file {} Ipsum @file {}\n@file {}",
|
||||
separator!("dir/a/one.txt"),
|
||||
separator!("dir/b/seven.txt"),
|
||||
separator!("dir/b/six.txt"),
|
||||
)
|
||||
"Lorem @file dir/a/one.txt Ipsum @file dir/b/seven.txt\n@file dir/b/six.txt"
|
||||
);
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
assert_eq!(
|
||||
|
||||
@@ -273,17 +273,17 @@ pub(crate) fn search_paths(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn render_file_context_entry(
|
||||
id: ElementId,
|
||||
pub fn extract_file_name_and_directory(
|
||||
path: &Path,
|
||||
path_prefix: &Arc<str>,
|
||||
is_directory: bool,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
cx: &App,
|
||||
) -> Stateful<Div> {
|
||||
let (file_name, directory) = if path == Path::new("") {
|
||||
path_prefix: &str,
|
||||
) -> (SharedString, Option<SharedString>) {
|
||||
if path == Path::new("") {
|
||||
(
|
||||
SharedString::from(path_prefix.trim_end_matches('/').to_string()),
|
||||
SharedString::from(
|
||||
path_prefix
|
||||
.trim_end_matches(std::path::MAIN_SEPARATOR)
|
||||
.to_string(),
|
||||
),
|
||||
None,
|
||||
)
|
||||
} else {
|
||||
@@ -294,7 +294,9 @@ pub fn render_file_context_entry(
|
||||
.to_string()
|
||||
.into();
|
||||
|
||||
let mut directory = path_prefix.to_string();
|
||||
let mut directory = path_prefix
|
||||
.trim_end_matches(std::path::MAIN_SEPARATOR)
|
||||
.to_string();
|
||||
if !directory.ends_with('/') {
|
||||
directory.push('/');
|
||||
}
|
||||
@@ -303,8 +305,19 @@ pub fn render_file_context_entry(
|
||||
directory.push('/');
|
||||
}
|
||||
|
||||
(file_name, Some(directory))
|
||||
};
|
||||
(file_name, Some(directory.into()))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn render_file_context_entry(
|
||||
id: ElementId,
|
||||
path: &Path,
|
||||
path_prefix: &Arc<str>,
|
||||
is_directory: bool,
|
||||
context_store: WeakEntity<ContextStore>,
|
||||
cx: &App,
|
||||
) -> Stateful<Div> {
|
||||
let (file_name, directory) = extract_file_name_and_directory(path, path_prefix);
|
||||
|
||||
let added = context_store.upgrade().and_then(|context_store| {
|
||||
if is_directory {
|
||||
|
||||
@@ -317,7 +317,7 @@ impl Render for MessageEditor {
|
||||
|
||||
let project = self.thread.read(cx).project();
|
||||
let changed_files = if let Some(repository) = project.read(cx).active_repository(cx) {
|
||||
repository.read(cx).status().count()
|
||||
repository.read(cx).cached_status().count()
|
||||
} else {
|
||||
0
|
||||
};
|
||||
|
||||
@@ -1,19 +1,14 @@
|
||||
use std::sync::{Arc, LazyLock};
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use assistant_settings::{AgentProfile, AssistantSettings};
|
||||
use editor::scroll::Autoscroll;
|
||||
use editor::Editor;
|
||||
use fs::Fs;
|
||||
use gpui::{prelude::*, AsyncWindowContext, Entity, Subscription, WeakEntity};
|
||||
use gpui::{prelude::*, Action, Entity, Subscription, WeakEntity};
|
||||
use indexmap::IndexMap;
|
||||
use regex::Regex;
|
||||
use settings::{update_settings_file, Settings as _, SettingsStore};
|
||||
use ui::{prelude::*, ContextMenu, ContextMenuEntry, PopoverMenu, Tooltip};
|
||||
use util::ResultExt as _;
|
||||
use workspace::{create_and_open_local_file, Workspace};
|
||||
|
||||
use crate::ThreadStore;
|
||||
use crate::{ManageProfiles, ThreadStore};
|
||||
|
||||
pub struct ProfileSelector {
|
||||
profiles: IndexMap<Arc<str>, AgentProfile>,
|
||||
@@ -92,89 +87,13 @@ impl ProfileSelector {
|
||||
.icon(IconName::Pencil)
|
||||
.icon_color(Color::Muted)
|
||||
.handler(move |window, cx| {
|
||||
if let Some(workspace) = window.root().flatten() {
|
||||
let workspace = workspace.downgrade();
|
||||
window
|
||||
.spawn(cx, async |cx| {
|
||||
Self::open_profiles_setting_in_editor(workspace, cx).await
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
window.dispatch_action(ManageProfiles.boxed_clone(), cx);
|
||||
}),
|
||||
);
|
||||
|
||||
menu
|
||||
})
|
||||
}
|
||||
|
||||
async fn open_profiles_setting_in_editor(
|
||||
workspace: WeakEntity<Workspace>,
|
||||
cx: &mut AsyncWindowContext,
|
||||
) -> Result<()> {
|
||||
let settings_editor = workspace
|
||||
.update_in(cx, |_, window, cx| {
|
||||
create_and_open_local_file(paths::settings_file(), window, cx, || {
|
||||
settings::initial_user_settings_content().as_ref().into()
|
||||
})
|
||||
})?
|
||||
.await?
|
||||
.downcast::<Editor>()
|
||||
.unwrap();
|
||||
|
||||
settings_editor
|
||||
.downgrade()
|
||||
.update_in(cx, |editor, window, cx| {
|
||||
let text = editor.buffer().read(cx).snapshot(cx).text();
|
||||
|
||||
let settings = cx.global::<SettingsStore>();
|
||||
|
||||
let edits =
|
||||
settings.edits_for_update::<AssistantSettings>(
|
||||
&text,
|
||||
|settings| match settings {
|
||||
assistant_settings::AssistantSettingsContent::Versioned(settings) => {
|
||||
match settings {
|
||||
assistant_settings::VersionedAssistantSettingsContent::V2(
|
||||
settings,
|
||||
) => {
|
||||
settings.profiles.get_or_insert_with(IndexMap::default);
|
||||
}
|
||||
assistant_settings::VersionedAssistantSettingsContent::V1(
|
||||
_,
|
||||
) => {}
|
||||
}
|
||||
}
|
||||
assistant_settings::AssistantSettingsContent::Legacy(_) => {}
|
||||
},
|
||||
);
|
||||
|
||||
if !edits.is_empty() {
|
||||
editor.edit(edits.iter().cloned(), cx);
|
||||
}
|
||||
|
||||
let text = editor.buffer().read(cx).snapshot(cx).text();
|
||||
|
||||
static PROFILES_REGEX: LazyLock<Regex> =
|
||||
LazyLock::new(|| Regex::new(r#"(?P<key>"profiles":)\s*\{"#).unwrap());
|
||||
let range = PROFILES_REGEX.captures(&text).and_then(|captures| {
|
||||
captures
|
||||
.name("key")
|
||||
.map(|inner_match| inner_match.start()..inner_match.end())
|
||||
});
|
||||
if let Some(range) = range {
|
||||
editor.change_selections(
|
||||
Some(Autoscroll::newest()),
|
||||
window,
|
||||
cx,
|
||||
|selections| {
|
||||
selections.select_ranges(vec![range]);
|
||||
},
|
||||
);
|
||||
}
|
||||
})?;
|
||||
|
||||
anyhow::Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for ProfileSelector {
|
||||
|
||||
@@ -286,8 +286,7 @@ impl Thread {
|
||||
tool_use,
|
||||
action_log: cx.new(|_| ActionLog::new()),
|
||||
initial_project_snapshot: Task::ready(serialized.initial_project_snapshot).shared(),
|
||||
// TODO: persist token usage?
|
||||
cumulative_token_usage: TokenUsage::default(),
|
||||
cumulative_token_usage: serialized.cumulative_token_usage,
|
||||
feedback: None,
|
||||
}
|
||||
}
|
||||
@@ -648,6 +647,7 @@ impl Thread {
|
||||
})
|
||||
.collect(),
|
||||
initial_project_snapshot,
|
||||
cumulative_token_usage: this.cumulative_token_usage.clone(),
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -857,6 +857,13 @@ impl Thread {
|
||||
request.messages.push(request_message);
|
||||
}
|
||||
|
||||
// Set a cache breakpoint at the second-to-last message.
|
||||
// https://docs.anthropic.com/en/docs/build-with-claude/prompt-caching
|
||||
let breakpoint_index = request.messages.len() - 2;
|
||||
for (index, message) in request.messages.iter_mut().enumerate() {
|
||||
message.cache = index == breakpoint_index;
|
||||
}
|
||||
|
||||
if !referenced_context_ids.is_empty() {
|
||||
let mut context_message = LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
@@ -1410,7 +1417,7 @@ impl Thread {
|
||||
git_store
|
||||
.repositories()
|
||||
.values()
|
||||
.find(|repo| repo.read(cx).worktree_id == snapshot.id())
|
||||
.find(|repo| repo.read(cx).worktree_id == Some(snapshot.id()))
|
||||
.and_then(|repo| {
|
||||
let repo = repo.read(cx);
|
||||
Some((repo.branch().cloned(), repo.local_repository()?))
|
||||
@@ -1429,7 +1436,7 @@ impl Thread {
|
||||
|
||||
// Get diff asynchronously
|
||||
let diff = repo
|
||||
.diff(git::repository::DiffType::HeadToWorktree, cx.clone())
|
||||
.diff(git::repository::DiffType::HeadToWorktree)
|
||||
.await
|
||||
.ok();
|
||||
|
||||
|
||||
@@ -16,7 +16,7 @@ use gpui::{
|
||||
};
|
||||
use heed::types::SerdeBincode;
|
||||
use heed::Database;
|
||||
use language_model::{LanguageModelToolUseId, Role};
|
||||
use language_model::{LanguageModelToolUseId, Role, TokenUsage};
|
||||
use project::Project;
|
||||
use prompt_store::PromptBuilder;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -308,6 +308,8 @@ pub struct SerializedThread {
|
||||
pub messages: Vec<SerializedMessage>,
|
||||
#[serde(default)]
|
||||
pub initial_project_snapshot: Option<Arc<ProjectSnapshot>>,
|
||||
#[serde(default)]
|
||||
pub cumulative_token_usage: TokenUsage,
|
||||
}
|
||||
|
||||
impl SerializedThread {
|
||||
@@ -390,6 +392,7 @@ impl LegacySerializedThread {
|
||||
updated_at: self.updated_at,
|
||||
messages: self.messages.into_iter().map(|msg| msg.upgrade()).collect(),
|
||||
initial_project_snapshot: self.initial_project_snapshot,
|
||||
cumulative_token_usage: TokenUsage::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,7 +24,7 @@ impl ToolReadyPopUp {
|
||||
|
||||
pub fn window_options(screen: Rc<dyn PlatformDisplay>, cx: &App) -> WindowOptions {
|
||||
let size = Size {
|
||||
width: px(440.),
|
||||
width: px(450.),
|
||||
height: px(72.),
|
||||
};
|
||||
|
||||
@@ -93,8 +93,18 @@ impl Render for ToolReadyPopUp {
|
||||
)
|
||||
.child(
|
||||
v_flex()
|
||||
.child(Headline::new("Agent Panel").size(HeadlineSize::XSmall))
|
||||
.child(Label::new(self.caption.clone()).color(Color::Muted)),
|
||||
.child(
|
||||
div()
|
||||
.text_size(px(16.))
|
||||
.text_color(cx.theme().colors().text)
|
||||
.child("Agent Panel"),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.text_size(px(14.))
|
||||
.text_color(cx.theme().colors().text_muted)
|
||||
.child(self.caption.clone()),
|
||||
),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
|
||||
@@ -21,6 +21,7 @@ clap.workspace = true
|
||||
client.workspace = true
|
||||
collections.workspace = true
|
||||
context_server.workspace = true
|
||||
dap.workspace = true
|
||||
env_logger.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
|
||||
@@ -3,6 +3,7 @@ use assistant2::{RequestKind, Thread, ThreadEvent, ThreadStore};
|
||||
use assistant_tool::ToolWorkingSet;
|
||||
use client::{Client, UserStore};
|
||||
use collections::HashMap;
|
||||
use dap::DapRegistry;
|
||||
use futures::StreamExt;
|
||||
use gpui::{prelude::*, App, AsyncApp, Entity, SemanticVersion, Subscription, Task};
|
||||
use language::LanguageRegistry;
|
||||
@@ -50,6 +51,7 @@ impl HeadlessAssistant {
|
||||
app_state.node_runtime.clone(),
|
||||
app_state.user_store.clone(),
|
||||
app_state.languages.clone(),
|
||||
Arc::new(DapRegistry::default()),
|
||||
app_state.fs.clone(),
|
||||
env,
|
||||
cx,
|
||||
@@ -149,7 +151,10 @@ pub fn init(cx: &mut App) -> Arc<HeadlessAppState> {
|
||||
cx.set_http_client(client.http_client().clone());
|
||||
|
||||
let git_binary_path = None;
|
||||
let fs = Arc::new(RealFs::new(git_binary_path));
|
||||
let fs = Arc::new(RealFs::new(
|
||||
git_binary_path,
|
||||
cx.background_executor().clone(),
|
||||
));
|
||||
|
||||
let languages = Arc::new(LanguageRegistry::new(cx.background_executor().clone()));
|
||||
|
||||
|
||||
@@ -12,7 +12,7 @@ pub struct AgentProfile {
|
||||
pub context_servers: IndexMap<Arc<str>, ContextServerPreset>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct ContextServerPreset {
|
||||
pub tools: IndexMap<Arc<str>, bool>,
|
||||
}
|
||||
|
||||
@@ -442,7 +442,7 @@ pub struct AgentProfileContent {
|
||||
pub context_servers: IndexMap<Arc<str>, ContextServerPresetContent>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Serialize, Deserialize, JsonSchema)]
|
||||
#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct ContextServerPresetContent {
|
||||
pub tools: IndexMap<Arc<str>, bool>,
|
||||
}
|
||||
|
||||
@@ -30,7 +30,6 @@ http_client.workspace = true
|
||||
indexed_docs.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
log.workspace = true
|
||||
project.workspace = true
|
||||
prompt_store.workspace = true
|
||||
rope.workspace = true
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
mod auto_command;
|
||||
mod cargo_workspace_command;
|
||||
mod context_server_command;
|
||||
mod default_command;
|
||||
@@ -21,7 +20,6 @@ use gpui::App;
|
||||
use language::{CodeLabel, HighlightId};
|
||||
use ui::ActiveTheme as _;
|
||||
|
||||
pub use crate::auto_command::*;
|
||||
pub use crate::cargo_workspace_command::*;
|
||||
pub use crate::context_server_command::*;
|
||||
pub use crate::default_command::*;
|
||||
|
||||
@@ -1,371 +0,0 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
SlashCommandResult,
|
||||
};
|
||||
use feature_flags::FeatureFlag;
|
||||
use futures::StreamExt;
|
||||
use gpui::{App, AsyncApp, Task, WeakEntity, Window};
|
||||
use language::{CodeLabel, LspAdapterDelegate};
|
||||
use language_model::{
|
||||
LanguageModelCompletionEvent, LanguageModelRegistry, LanguageModelRequest,
|
||||
LanguageModelRequestMessage, Role,
|
||||
};
|
||||
use semantic_index::{FileSummary, SemanticDb};
|
||||
use smol::channel;
|
||||
use std::sync::{atomic::AtomicBool, Arc};
|
||||
use ui::{prelude::*, BorrowAppContext};
|
||||
use util::ResultExt;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::create_label_for_command;
|
||||
|
||||
pub struct AutoSlashCommandFeatureFlag;
|
||||
|
||||
impl FeatureFlag for AutoSlashCommandFeatureFlag {
|
||||
const NAME: &'static str = "auto-slash-command";
|
||||
}
|
||||
|
||||
pub struct AutoCommand;
|
||||
|
||||
impl SlashCommand for AutoCommand {
|
||||
fn name(&self) -> String {
|
||||
"auto".into()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
"Automatically infer what context to add".into()
|
||||
}
|
||||
|
||||
fn icon(&self) -> IconName {
|
||||
IconName::Wand
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
self.description()
|
||||
}
|
||||
|
||||
fn label(&self, cx: &App) -> CodeLabel {
|
||||
create_label_for_command("auto", &["--prompt"], cx)
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
_arguments: &[String],
|
||||
_cancel: Arc<AtomicBool>,
|
||||
workspace: Option<WeakEntity<Workspace>>,
|
||||
_window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
// There's no autocomplete for a prompt, since it's arbitrary text.
|
||||
// However, we can use this opportunity to kick off a drain of the backlog.
|
||||
// That way, it can hopefully be done resummarizing by the time we've actually
|
||||
// typed out our prompt. This re-runs on every keystroke during autocomplete,
|
||||
// but in the future, we could instead do it only once, when /auto is first entered.
|
||||
let Some(workspace) = workspace.and_then(|ws| ws.upgrade()) else {
|
||||
log::warn!("workspace was dropped or unavailable during /auto autocomplete");
|
||||
|
||||
return Task::ready(Ok(Vec::new()));
|
||||
};
|
||||
|
||||
let project = workspace.read(cx).project().clone();
|
||||
let Some(project_index) =
|
||||
cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx))
|
||||
else {
|
||||
return Task::ready(Err(anyhow!("No project indexer, cannot use /auto")));
|
||||
};
|
||||
|
||||
let cx: &mut App = cx;
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let task = project_index.read_with(cx, |project_index, cx| {
|
||||
project_index.flush_summary_backlogs(cx)
|
||||
})?;
|
||||
|
||||
cx.background_spawn(task).await;
|
||||
|
||||
anyhow::Ok(Vec::new())
|
||||
})
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: language::BufferSnapshot,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
_delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Task<SlashCommandResult> {
|
||||
let Some(workspace) = workspace.upgrade() else {
|
||||
return Task::ready(Err(anyhow::anyhow!("workspace was dropped")));
|
||||
};
|
||||
if arguments.is_empty() {
|
||||
return Task::ready(Err(anyhow!("missing prompt")));
|
||||
};
|
||||
let argument = arguments.join(" ");
|
||||
let original_prompt = argument.to_string();
|
||||
let project = workspace.read(cx).project().clone();
|
||||
let Some(project_index) =
|
||||
cx.update_global(|index: &mut SemanticDb, cx| index.project_index(project, cx))
|
||||
else {
|
||||
return Task::ready(Err(anyhow!("no project indexer")));
|
||||
};
|
||||
|
||||
let task = window.spawn(cx, async move |cx| {
|
||||
let summaries = project_index
|
||||
.read_with(cx, |project_index, cx| project_index.all_summaries(cx))?
|
||||
.await?;
|
||||
|
||||
commands_for_summaries(&summaries, &original_prompt, &cx).await
|
||||
});
|
||||
|
||||
// As a convenience, append /auto's argument to the end of the prompt
|
||||
// so you don't have to write it again.
|
||||
let original_prompt = argument.to_string();
|
||||
|
||||
cx.background_spawn(async move {
|
||||
let commands = task.await?;
|
||||
let mut prompt = String::new();
|
||||
|
||||
log::info!(
|
||||
"Translating this response into slash-commands: {:?}",
|
||||
commands
|
||||
);
|
||||
|
||||
for command in commands {
|
||||
prompt.push('/');
|
||||
prompt.push_str(&command.name);
|
||||
prompt.push(' ');
|
||||
prompt.push_str(&command.arg);
|
||||
prompt.push('\n');
|
||||
}
|
||||
|
||||
prompt.push('\n');
|
||||
prompt.push_str(&original_prompt);
|
||||
|
||||
Ok(SlashCommandOutput {
|
||||
text: prompt,
|
||||
sections: Vec::new(),
|
||||
run_commands_in_text: true,
|
||||
}
|
||||
.to_event_stream())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const PROMPT_INSTRUCTIONS_BEFORE_SUMMARY: &str = include_str!("prompt_before_summary.txt");
|
||||
const PROMPT_INSTRUCTIONS_AFTER_SUMMARY: &str = include_str!("prompt_after_summary.txt");
|
||||
|
||||
fn summaries_prompt(summaries: &[FileSummary], original_prompt: &str) -> String {
|
||||
let json_summaries = serde_json::to_string(summaries).unwrap();
|
||||
|
||||
format!("{PROMPT_INSTRUCTIONS_BEFORE_SUMMARY}\n{json_summaries}\n{PROMPT_INSTRUCTIONS_AFTER_SUMMARY}\n{original_prompt}")
|
||||
}
|
||||
|
||||
/// The slash commands that the model is told about, and which we look for in the inference response.
|
||||
const SUPPORTED_SLASH_COMMANDS: &[&str] = &["search", "file"];
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct CommandToRun {
|
||||
name: String,
|
||||
arg: String,
|
||||
}
|
||||
|
||||
/// Given the pre-indexed file summaries for this project, as well as the original prompt
|
||||
/// string passed to `/auto`, get a list of slash commands to run, along with their arguments.
|
||||
///
|
||||
/// The prompt's output does not include the slashes (to reduce the chance that it makes a mistake),
|
||||
/// so taking one of these returned Strings and turning it into a real slash-command-with-argument
|
||||
/// involves prepending a slash to it.
|
||||
///
|
||||
/// This function will validate that each of the returned lines begins with one of SUPPORTED_SLASH_COMMANDS.
|
||||
/// Any other lines it encounters will be discarded, with a warning logged.
|
||||
async fn commands_for_summaries(
|
||||
summaries: &[FileSummary],
|
||||
original_prompt: &str,
|
||||
cx: &AsyncApp,
|
||||
) -> Result<Vec<CommandToRun>> {
|
||||
if summaries.is_empty() {
|
||||
log::warn!("Inferring no context because there were no summaries available.");
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
// Use the globally configured model to translate the summaries into slash-commands,
|
||||
// because Qwen2-7B-Instruct has not done a good job at that task.
|
||||
let Some(model) = cx.update(|cx| LanguageModelRegistry::read_global(cx).active_model())? else {
|
||||
log::warn!("Can't infer context because there's no active model.");
|
||||
return Ok(Vec::new());
|
||||
};
|
||||
// Only go up to 90% of the actual max token count, to reduce chances of
|
||||
// exceeding the token count due to inaccuracies in the token counting heuristic.
|
||||
let max_token_count = (model.max_token_count() * 9) / 10;
|
||||
|
||||
// Rather than recursing (which would require this async function use a pinned box),
|
||||
// we use an explicit stack of arguments and answers for when we need to "recurse."
|
||||
let mut stack = vec![summaries];
|
||||
let mut final_response = Vec::new();
|
||||
let mut prompts = Vec::new();
|
||||
|
||||
// TODO We only need to create multiple Requests because we currently
|
||||
// don't have the ability to tell if a CompletionProvider::complete response
|
||||
// was a "too many tokens in this request" error. If we had that, then
|
||||
// we could try the request once, instead of having to make separate requests
|
||||
// to check the token count and then afterwards to run the actual prompt.
|
||||
let make_request = |prompt: String| LanguageModelRequest {
|
||||
messages: vec![LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![prompt.into()],
|
||||
// Nothing in here will benefit from caching
|
||||
cache: false,
|
||||
}],
|
||||
tools: Vec::new(),
|
||||
stop: Vec::new(),
|
||||
temperature: None,
|
||||
};
|
||||
|
||||
while let Some(current_summaries) = stack.pop() {
|
||||
// The split can result in one slice being empty and the other having one element.
|
||||
// Whenever that happens, skip the empty one.
|
||||
if current_summaries.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
log::info!(
|
||||
"Inferring prompt context using {} file summaries",
|
||||
current_summaries.len()
|
||||
);
|
||||
|
||||
let prompt = summaries_prompt(¤t_summaries, original_prompt);
|
||||
let start = std::time::Instant::now();
|
||||
// Per OpenAI, 1 token ~= 4 chars in English (we go with 4.5 to overestimate a bit, because failed API requests cost a lot of perf)
|
||||
// Verifying this against an actual model.count_tokens() confirms that it's usually within ~5% of the correct answer, whereas
|
||||
// getting the correct answer from tiktoken takes hundreds of milliseconds (compared to this arithmetic being ~free).
|
||||
// source: https://help.openai.com/en/articles/4936856-what-are-tokens-and-how-to-count-them
|
||||
let token_estimate = prompt.len() * 2 / 9;
|
||||
let duration = start.elapsed();
|
||||
log::info!(
|
||||
"Time taken to count tokens for prompt of length {:?}B: {:?}",
|
||||
prompt.len(),
|
||||
duration
|
||||
);
|
||||
|
||||
if token_estimate < max_token_count {
|
||||
prompts.push(prompt);
|
||||
} else if current_summaries.len() == 1 {
|
||||
log::warn!("Inferring context for a single file's summary failed because the prompt's token length exceeded the model's token limit.");
|
||||
} else {
|
||||
log::info!(
|
||||
"Context inference using file summaries resulted in a prompt containing {token_estimate} tokens, which exceeded the model's max of {max_token_count}. Retrying as two separate prompts, each including half the number of summaries.",
|
||||
);
|
||||
let (left, right) = current_summaries.split_at(current_summaries.len() / 2);
|
||||
stack.push(right);
|
||||
stack.push(left);
|
||||
}
|
||||
}
|
||||
|
||||
let all_start = std::time::Instant::now();
|
||||
|
||||
let (tx, rx) = channel::bounded(1024);
|
||||
|
||||
let completion_streams = prompts
|
||||
.into_iter()
|
||||
.map(|prompt| {
|
||||
let request = make_request(prompt.clone());
|
||||
let model = model.clone();
|
||||
let tx = tx.clone();
|
||||
let stream = model.stream_completion(request, &cx);
|
||||
|
||||
(stream, tx)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
cx.background_spawn(async move {
|
||||
let futures = completion_streams
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(ix, (stream, tx))| async move {
|
||||
let start = std::time::Instant::now();
|
||||
let events = stream.await?;
|
||||
log::info!("Time taken for awaiting /await chunk stream #{ix}: {:?}", start.elapsed());
|
||||
|
||||
let completion: String = events
|
||||
.filter_map(|event| async {
|
||||
if let Ok(LanguageModelCompletionEvent::Text(text)) = event {
|
||||
Some(text)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
.await;
|
||||
|
||||
log::info!("Time taken for all /auto chunks to come back for #{ix}: {:?}", start.elapsed());
|
||||
|
||||
for line in completion.split('\n') {
|
||||
if let Some(first_space) = line.find(' ') {
|
||||
let command = &line[..first_space].trim();
|
||||
let arg = &line[first_space..].trim();
|
||||
|
||||
tx.send(CommandToRun {
|
||||
name: command.to_string(),
|
||||
arg: arg.to_string(),
|
||||
})
|
||||
.await?;
|
||||
} else if !line.trim().is_empty() {
|
||||
// All slash-commands currently supported in context inference need a space for the argument.
|
||||
log::warn!(
|
||||
"Context inference returned a non-blank line that contained no spaces (meaning no argument for the slash command): {:?}",
|
||||
line
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let _ = futures::future::try_join_all(futures).await.log_err();
|
||||
|
||||
let duration = all_start.elapsed();
|
||||
eprintln!("All futures completed in {:?}", duration);
|
||||
})
|
||||
.await;
|
||||
|
||||
drop(tx); // Close the channel so that rx.collect() won't hang. This is safe because all futures have completed.
|
||||
let results = rx.collect::<Vec<_>>().await;
|
||||
eprintln!(
|
||||
"Finished collecting from the channel with {} results",
|
||||
results.len()
|
||||
);
|
||||
for command in results {
|
||||
// Don't return empty or duplicate commands
|
||||
if !command.name.is_empty()
|
||||
&& !final_response
|
||||
.iter()
|
||||
.any(|cmd: &CommandToRun| cmd.name == command.name && cmd.arg == command.arg)
|
||||
{
|
||||
if SUPPORTED_SLASH_COMMANDS
|
||||
.iter()
|
||||
.any(|supported| &command.name == supported)
|
||||
{
|
||||
final_response.push(command);
|
||||
} else {
|
||||
log::warn!(
|
||||
"Context inference returned an unrecognized slash command: {:?}",
|
||||
command
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Sort the commands by name (reversed just so that /search appears before /file)
|
||||
final_response.sort_by(|cmd1, cmd2| cmd1.name.cmp(&cmd2.name).reverse());
|
||||
|
||||
Ok(final_response)
|
||||
}
|
||||
@@ -1,24 +0,0 @@
|
||||
Actions have a cost, so only include actions that you think
|
||||
will be helpful to you in doing a great job answering the
|
||||
prompt in the future.
|
||||
|
||||
You must respond ONLY with a list of actions you would like to
|
||||
perform. Each action should be on its own line, and followed by a space and then its parameter.
|
||||
|
||||
Actions can be performed more than once with different parameters.
|
||||
Here is an example valid response:
|
||||
|
||||
```
|
||||
file path/to/my/file.txt
|
||||
file path/to/another/file.txt
|
||||
search something to search for
|
||||
search something else to search for
|
||||
```
|
||||
|
||||
Once again, do not forget: you must respond ONLY in the format of
|
||||
one action per line, and the action name should be followed by
|
||||
its parameter. Your response must not include anything other
|
||||
than a list of actions, with one action per line, in this format.
|
||||
It is extremely important that you do not deviate from this format even slightly!
|
||||
|
||||
This is the end of my instructions for how to respond. The rest is the prompt:
|
||||
@@ -1,31 +0,0 @@
|
||||
I'm going to give you a prompt. I don't want you to respond
|
||||
to the prompt itself. I want you to figure out which of the following
|
||||
actions on my project, if any, would help you answer the prompt.
|
||||
|
||||
Here are the actions:
|
||||
|
||||
## file
|
||||
|
||||
This action's parameter is a file path to one of the files
|
||||
in the project. If you ask for this action, I will tell you
|
||||
the full contents of the file, so you can learn all the
|
||||
details of the file.
|
||||
|
||||
## search
|
||||
|
||||
This action's parameter is a string to do a semantic search for
|
||||
across the files in the project. (You will have a JSON summary
|
||||
of all the files in the project.) It will tell you which files this string
|
||||
(or similar strings; it is a semantic search) appear in,
|
||||
as well as some context of the lines surrounding each result.
|
||||
It's very important that you only use this action when you think
|
||||
that searching across the specific files in this project for the query
|
||||
in question will be useful. For example, don't use this command to search
|
||||
for queries you might put into a general Web search engine, because those
|
||||
will be too general to give useful results in this project-specific search.
|
||||
|
||||
---
|
||||
|
||||
That was the end of the list of actions.
|
||||
|
||||
Here is a JSON summary of each of the files in my project:
|
||||
@@ -35,6 +35,7 @@ ui.workspace = true
|
||||
util.workspace = true
|
||||
workspace.workspace = true
|
||||
worktree.workspace = true
|
||||
open = { workspace = true }
|
||||
|
||||
[dev-dependencies]
|
||||
collections = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
mod bash_tool;
|
||||
mod batch_tool;
|
||||
mod copy_path_tool;
|
||||
mod create_directory_tool;
|
||||
mod create_file_tool;
|
||||
@@ -10,6 +11,7 @@ mod find_replace_file_tool;
|
||||
mod list_directory_tool;
|
||||
mod move_path_tool;
|
||||
mod now_tool;
|
||||
mod open_tool;
|
||||
mod path_search_tool;
|
||||
mod read_file_tool;
|
||||
mod regex_search_tool;
|
||||
@@ -25,6 +27,7 @@ use http_client::HttpClientWithUrl;
|
||||
use move_path_tool::MovePathTool;
|
||||
|
||||
use crate::bash_tool::BashTool;
|
||||
use crate::batch_tool::BatchTool;
|
||||
use crate::create_directory_tool::CreateDirectoryTool;
|
||||
use crate::create_file_tool::CreateFileTool;
|
||||
use crate::delete_path_tool::DeletePathTool;
|
||||
@@ -34,6 +37,7 @@ use crate::fetch_tool::FetchTool;
|
||||
use crate::find_replace_file_tool::FindReplaceFileTool;
|
||||
use crate::list_directory_tool::ListDirectoryTool;
|
||||
use crate::now_tool::NowTool;
|
||||
use crate::open_tool::OpenTool;
|
||||
use crate::path_search_tool::PathSearchTool;
|
||||
use crate::read_file_tool::ReadFileTool;
|
||||
use crate::regex_search_tool::RegexSearchTool;
|
||||
@@ -45,6 +49,7 @@ pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {
|
||||
|
||||
let registry = ToolRegistry::global(cx);
|
||||
registry.register_tool(BashTool);
|
||||
registry.register_tool(BatchTool);
|
||||
registry.register_tool(CreateDirectoryTool);
|
||||
registry.register_tool(CreateFileTool);
|
||||
registry.register_tool(CopyPathTool);
|
||||
@@ -55,6 +60,7 @@ pub fn init(http_client: Arc<HttpClientWithUrl>, cx: &mut App) {
|
||||
registry.register_tool(EditFilesTool);
|
||||
registry.register_tool(ListDirectoryTool);
|
||||
registry.register_tool(NowTool);
|
||||
registry.register_tool(OpenTool);
|
||||
registry.register_tool(PathSearchTool);
|
||||
registry.register_tool(ReadFileTool);
|
||||
registry.register_tool(RegexSearchTool);
|
||||
|
||||
@@ -5,6 +5,7 @@ use language_model::LanguageModelRequestMessage;
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use ui::IconName;
|
||||
use util::command::new_smol_command;
|
||||
@@ -45,11 +46,10 @@ impl Tool for BashTool {
|
||||
fn ui_text(&self, input: &serde_json::Value) -> String {
|
||||
match serde_json::from_value::<BashToolInput>(input.clone()) {
|
||||
Ok(input) => {
|
||||
let cmd = MarkdownString::escape(&input.command);
|
||||
if input.command.contains('\n') {
|
||||
format!("```bash\n{cmd}\n```")
|
||||
MarkdownString::code_block("bash", &input.command).0
|
||||
} else {
|
||||
format!("`{cmd}`")
|
||||
MarkdownString::inline_code(&input.command).0
|
||||
}
|
||||
}
|
||||
Err(_) => "Run bash command".to_string(),
|
||||
@@ -69,10 +69,44 @@ impl Tool for BashTool {
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))),
|
||||
};
|
||||
|
||||
let Some(worktree) = project.read(cx).worktree_for_root_name(&input.cd, cx) else {
|
||||
return Task::ready(Err(anyhow!("Working directory not found in the project")));
|
||||
let project = project.read(cx);
|
||||
let input_path = Path::new(&input.cd);
|
||||
let working_dir = if input.cd == "." {
|
||||
// Accept "." as meaning "the one worktree" if we only have one worktree.
|
||||
let mut worktrees = project.worktrees(cx);
|
||||
|
||||
let only_worktree = match worktrees.next() {
|
||||
Some(worktree) => worktree,
|
||||
None => return Task::ready(Err(anyhow!("No worktrees found in the project"))),
|
||||
};
|
||||
|
||||
if worktrees.next().is_some() {
|
||||
return Task::ready(Err(anyhow!("'.' is ambiguous in multi-root workspaces. Please specify a root directory explicitly.")));
|
||||
}
|
||||
|
||||
only_worktree.read(cx).abs_path()
|
||||
} else if input_path.is_absolute() {
|
||||
// Absolute paths are allowed, but only if they're in one of the project's worktrees.
|
||||
if !project
|
||||
.worktrees(cx)
|
||||
.any(|worktree| input_path.starts_with(&worktree.read(cx).abs_path()))
|
||||
{
|
||||
return Task::ready(Err(anyhow!(
|
||||
"The absolute path must be within one of the project's worktrees"
|
||||
)));
|
||||
}
|
||||
|
||||
input_path.into()
|
||||
} else {
|
||||
let Some(worktree) = project.worktree_for_root_name(&input.cd, cx) else {
|
||||
return Task::ready(Err(anyhow!(
|
||||
"`cd` directory {} not found in the project",
|
||||
&input.cd
|
||||
)));
|
||||
};
|
||||
|
||||
worktree.read(cx).abs_path()
|
||||
};
|
||||
let working_directory = worktree.read(cx).abs_path();
|
||||
|
||||
cx.spawn(async move |_| {
|
||||
// Add 2>&1 to merge stderr into stdout for proper interleaving.
|
||||
@@ -81,7 +115,7 @@ impl Tool for BashTool {
|
||||
let output = new_smol_command("bash")
|
||||
.arg("-c")
|
||||
.arg(&command)
|
||||
.current_dir(working_directory)
|
||||
.current_dir(working_dir)
|
||||
.output()
|
||||
.await
|
||||
.context("Failed to execute bash command")?;
|
||||
|
||||
301
crates/assistant_tools/src/batch_tool.rs
Normal file
301
crates/assistant_tools/src/batch_tool.rs
Normal file
@@ -0,0 +1,301 @@
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_tool::{ActionLog, Tool, ToolWorkingSet};
|
||||
use futures::future::join_all;
|
||||
use gpui::{App, AppContext, Entity, Task};
|
||||
use language_model::LanguageModelRequestMessage;
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::Arc;
|
||||
use ui::IconName;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct ToolInvocation {
|
||||
/// The name of the tool to invoke
|
||||
pub name: String,
|
||||
|
||||
/// The input to the tool in JSON format
|
||||
pub input: serde_json::Value,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct BatchToolInput {
|
||||
/// The tool invocations to run as a batch. These tools will be run either sequentially
|
||||
/// or concurrently depending on the `run_tools_concurrently` flag.
|
||||
///
|
||||
/// <example>
|
||||
/// Basic file operations (concurrent)
|
||||
///
|
||||
/// ```json
|
||||
/// {
|
||||
/// "invocations": [
|
||||
/// {
|
||||
/// "name": "read-file",
|
||||
/// "input": {
|
||||
/// "path": "src/main.rs"
|
||||
/// }
|
||||
/// },
|
||||
/// {
|
||||
/// "name": "list-directory",
|
||||
/// "input": {
|
||||
/// "path": "src/lib"
|
||||
/// }
|
||||
/// },
|
||||
/// {
|
||||
/// "name": "regex-search",
|
||||
/// "input": {
|
||||
/// "regex": "fn run\\("
|
||||
/// }
|
||||
/// }
|
||||
/// ],
|
||||
/// "run_tools_concurrently": true
|
||||
/// }
|
||||
/// ```
|
||||
/// </example>
|
||||
///
|
||||
/// <example>
|
||||
/// Multiple find-replace operations on the same file (sequential)
|
||||
///
|
||||
/// ```json
|
||||
/// {
|
||||
/// "invocations": [
|
||||
/// {
|
||||
/// "name": "find-replace-file",
|
||||
/// "input": {
|
||||
/// "path": "src/config.rs",
|
||||
/// "display_description": "Update default timeout value",
|
||||
/// "find": "pub const DEFAULT_TIMEOUT: u64 = 30;\n\npub const MAX_RETRIES: u32 = 3;\n\npub const SERVER_URL: &str = \"https://api.example.com\";",
|
||||
/// "replace": "pub const DEFAULT_TIMEOUT: u64 = 60;\n\npub const MAX_RETRIES: u32 = 3;\n\npub const SERVER_URL: &str = \"https://api.example.com\";"
|
||||
/// }
|
||||
/// },
|
||||
/// {
|
||||
/// "name": "find-replace-file",
|
||||
/// "input": {
|
||||
/// "path": "src/config.rs",
|
||||
/// "display_description": "Update API endpoint URL",
|
||||
/// "find": "pub const MAX_RETRIES: u32 = 3;\n\npub const SERVER_URL: &str = \"https://api.example.com\";\n\npub const API_VERSION: &str = \"v1\";",
|
||||
/// "replace": "pub const MAX_RETRIES: u32 = 3;\n\npub const SERVER_URL: &str = \"https://api.newdomain.com\";\n\npub const API_VERSION: &str = \"v1\";"
|
||||
/// }
|
||||
/// }
|
||||
/// ],
|
||||
/// "run_tools_concurrently": false
|
||||
/// }
|
||||
/// ```
|
||||
/// </example>
|
||||
///
|
||||
/// <example>
|
||||
/// Searching and analyzing code (concurrent)
|
||||
///
|
||||
/// ```json
|
||||
/// {
|
||||
/// "invocations": [
|
||||
/// {
|
||||
/// "name": "regex-search",
|
||||
/// "input": {
|
||||
/// "regex": "impl Database"
|
||||
/// }
|
||||
/// },
|
||||
/// {
|
||||
/// "name": "path-search",
|
||||
/// "input": {
|
||||
/// "glob": "**/*test*.rs"
|
||||
/// }
|
||||
/// }
|
||||
/// ],
|
||||
/// "run_tools_concurrently": true
|
||||
/// }
|
||||
/// ```
|
||||
/// </example>
|
||||
///
|
||||
/// <example>
|
||||
/// Multi-file refactoring (concurrent)
|
||||
///
|
||||
/// ```json
|
||||
/// {
|
||||
/// "invocations": [
|
||||
/// {
|
||||
/// "name": "find-replace-file",
|
||||
/// "input": {
|
||||
/// "path": "src/models/user.rs",
|
||||
/// "display_description": "Add email field to User struct",
|
||||
/// "find": "pub struct User {\n pub id: u64,\n pub username: String,\n pub created_at: DateTime<Utc>,\n}",
|
||||
/// "replace": "pub struct User {\n pub id: u64,\n pub username: String,\n pub email: String,\n pub created_at: DateTime<Utc>,\n}"
|
||||
/// }
|
||||
/// },
|
||||
/// {
|
||||
/// "name": "find-replace-file",
|
||||
/// "input": {
|
||||
/// "path": "src/db/queries.rs",
|
||||
/// "display_description": "Update user insertion query",
|
||||
/// "find": "pub async fn insert_user(conn: &mut Connection, user: &User) -> Result<(), DbError> {\n conn.execute(\n \"INSERT INTO users (id, username, created_at) VALUES ($1, $2, $3)\",\n &[&user.id, &user.username, &user.created_at],\n ).await?;\n \n Ok(())\n}",
|
||||
/// "replace": "pub async fn insert_user(conn: &mut Connection, user: &User) -> Result<(), DbError> {\n conn.execute(\n \"INSERT INTO users (id, username, email, created_at) VALUES ($1, $2, $3, $4)\",\n &[&user.id, &user.username, &user.email, &user.created_at],\n ).await?;\n \n Ok(())\n}"
|
||||
/// }
|
||||
/// }
|
||||
/// ],
|
||||
/// "run_tools_concurrently": true
|
||||
/// }
|
||||
/// ```
|
||||
/// </example>
|
||||
pub invocations: Vec<ToolInvocation>,
|
||||
|
||||
/// Whether to run the tools in this batch concurrently. If this is false (the default), the tools will run sequentially.
|
||||
#[serde(default)]
|
||||
pub run_tools_concurrently: bool,
|
||||
}
|
||||
|
||||
pub struct BatchTool;
|
||||
|
||||
impl Tool for BatchTool {
|
||||
fn name(&self) -> String {
|
||||
"batch-tool".into()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./batch_tool/description.md").into()
|
||||
}
|
||||
|
||||
fn icon(&self) -> IconName {
|
||||
IconName::Cog
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
let schema = schemars::schema_for!(BatchToolInput);
|
||||
serde_json::to_value(&schema).unwrap()
|
||||
}
|
||||
|
||||
fn ui_text(&self, input: &serde_json::Value) -> String {
|
||||
match serde_json::from_value::<BatchToolInput>(input.clone()) {
|
||||
Ok(input) => {
|
||||
let count = input.invocations.len();
|
||||
let mode = if input.run_tools_concurrently {
|
||||
"concurrently"
|
||||
} else {
|
||||
"sequentially"
|
||||
};
|
||||
|
||||
let first_tool_name = input
|
||||
.invocations
|
||||
.first()
|
||||
.map(|inv| inv.name.clone())
|
||||
.unwrap_or_default();
|
||||
|
||||
let all_same = input
|
||||
.invocations
|
||||
.iter()
|
||||
.all(|invocation| invocation.name == first_tool_name);
|
||||
|
||||
if all_same {
|
||||
format!(
|
||||
"Run `{}` {} times {}",
|
||||
first_tool_name,
|
||||
input.invocations.len(),
|
||||
mode
|
||||
)
|
||||
} else {
|
||||
format!("Run {} tools {}", count, mode)
|
||||
}
|
||||
}
|
||||
Err(_) => "Batch tools".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
messages: &[LanguageModelRequestMessage],
|
||||
project: Entity<Project>,
|
||||
action_log: Entity<ActionLog>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
let input = match serde_json::from_value::<BatchToolInput>(input) {
|
||||
Ok(input) => input,
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))),
|
||||
};
|
||||
|
||||
if input.invocations.is_empty() {
|
||||
return Task::ready(Err(anyhow!("No tool invocations provided")));
|
||||
}
|
||||
|
||||
let run_tools_concurrently = input.run_tools_concurrently;
|
||||
|
||||
let foreground_task = {
|
||||
let working_set = ToolWorkingSet::default();
|
||||
let invocations = input.invocations;
|
||||
let messages = messages.to_vec();
|
||||
|
||||
cx.spawn(async move |cx| {
|
||||
let mut tasks = Vec::new();
|
||||
let mut tool_names = Vec::new();
|
||||
|
||||
for invocation in invocations {
|
||||
let tool_name = invocation.name.clone();
|
||||
tool_names.push(tool_name.clone());
|
||||
|
||||
let tool = cx
|
||||
.update(|cx| working_set.tool(&tool_name, cx))
|
||||
.map_err(|err| {
|
||||
anyhow!("Failed to look up tool '{}': {}", tool_name, err)
|
||||
})?;
|
||||
|
||||
let Some(tool) = tool else {
|
||||
return Err(anyhow!("Tool '{}' not found", tool_name));
|
||||
};
|
||||
|
||||
let project = project.clone();
|
||||
let action_log = action_log.clone();
|
||||
let messages = messages.clone();
|
||||
let task = cx
|
||||
.update(|cx| tool.run(invocation.input, &messages, project, action_log, cx))
|
||||
.map_err(|err| anyhow!("Failed to start tool '{}': {}", tool_name, err))?;
|
||||
|
||||
tasks.push(task);
|
||||
}
|
||||
|
||||
Ok((tasks, tool_names))
|
||||
})
|
||||
};
|
||||
|
||||
cx.background_spawn(async move {
|
||||
let (tasks, tool_names) = foreground_task.await?;
|
||||
let mut results = Vec::with_capacity(tasks.len());
|
||||
|
||||
if run_tools_concurrently {
|
||||
results.extend(join_all(tasks).await)
|
||||
} else {
|
||||
for task in tasks {
|
||||
results.push(task.await);
|
||||
}
|
||||
};
|
||||
|
||||
let mut formatted_results = String::new();
|
||||
let mut error_occurred = false;
|
||||
|
||||
for (i, result) in results.into_iter().enumerate() {
|
||||
let tool_name = &tool_names[i];
|
||||
|
||||
match result {
|
||||
Ok(output) => {
|
||||
formatted_results
|
||||
.push_str(&format!("Tool '{}' result:\n{}\n\n", tool_name, output));
|
||||
}
|
||||
Err(err) => {
|
||||
error_occurred = true;
|
||||
formatted_results
|
||||
.push_str(&format!("Tool '{}' error: {}\n\n", tool_name, err));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if error_occurred {
|
||||
formatted_results
|
||||
.push_str("Note: Some tool invocations failed. See individual results above.");
|
||||
}
|
||||
|
||||
Ok(formatted_results.trim().to_string())
|
||||
})
|
||||
}
|
||||
}
|
||||
9
crates/assistant_tools/src/batch_tool/description.md
Normal file
9
crates/assistant_tools/src/batch_tool/description.md
Normal file
@@ -0,0 +1,9 @@
|
||||
Invoke multiple other tool calls either sequentially or concurrently.
|
||||
|
||||
This tool is useful when you need to perform several operations at once, improving efficiency by reducing the number of back-and-forth interactions needed to complete complex tasks.
|
||||
|
||||
If the tool calls are set to be run sequentially, then each tool call within the batch is executed in the order provided. If it's set to run concurrently, then they may run in a different order. Regardless, all tool calls will have the same permissions and context as if they were called individually.
|
||||
|
||||
This tool should never be used to run a total of one tool. Instead, just run that one tool directly. You can run batches within batches if desired, which is a way you can mix concurrent and sequential tool call execution.
|
||||
|
||||
When it's possible to run tools in a batch, you should run as many as possible in the batch, up to a maximum of 32. For example, don't run multiple consecutive batches of 10 when you could instead run one batch of 30.
|
||||
@@ -61,9 +61,9 @@ impl Tool for CopyPathTool {
|
||||
fn ui_text(&self, input: &serde_json::Value) -> String {
|
||||
match serde_json::from_value::<CopyPathToolInput>(input.clone()) {
|
||||
Ok(input) => {
|
||||
let src = MarkdownString::escape(&input.source_path);
|
||||
let dest = MarkdownString::escape(&input.destination_path);
|
||||
format!("Copy `{src}` to `{dest}`")
|
||||
let src = MarkdownString::inline_code(&input.source_path);
|
||||
let dest = MarkdownString::inline_code(&input.destination_path);
|
||||
format!("Copy {src} to {dest}")
|
||||
}
|
||||
Err(_) => "Copy path".to_string(),
|
||||
}
|
||||
|
||||
@@ -51,7 +51,10 @@ impl Tool for CreateDirectoryTool {
|
||||
fn ui_text(&self, input: &serde_json::Value) -> String {
|
||||
match serde_json::from_value::<CreateDirectoryToolInput>(input.clone()) {
|
||||
Ok(input) => {
|
||||
format!("Create directory `{}`", MarkdownString::escape(&input.path))
|
||||
format!(
|
||||
"Create directory {}",
|
||||
MarkdownString::inline_code(&input.path)
|
||||
)
|
||||
}
|
||||
Err(_) => "Create directory".to_string(),
|
||||
}
|
||||
|
||||
@@ -58,8 +58,8 @@ impl Tool for CreateFileTool {
|
||||
fn ui_text(&self, input: &serde_json::Value) -> String {
|
||||
match serde_json::from_value::<CreateFileToolInput>(input.clone()) {
|
||||
Ok(input) => {
|
||||
let path = MarkdownString::escape(&input.path);
|
||||
format!("Create file `{path}`")
|
||||
let path = MarkdownString::inline_code(&input.path);
|
||||
format!("Create file {path}")
|
||||
}
|
||||
Err(_) => "Create file".to_string(),
|
||||
}
|
||||
|
||||
@@ -66,11 +66,11 @@ impl Tool for DiagnosticsTool {
|
||||
if let Some(path) = serde_json::from_value::<DiagnosticsToolInput>(input.clone())
|
||||
.ok()
|
||||
.and_then(|input| match input.path {
|
||||
Some(path) if !path.is_empty() => Some(MarkdownString::escape(&path)),
|
||||
Some(path) if !path.is_empty() => Some(MarkdownString::inline_code(&path)),
|
||||
_ => None,
|
||||
})
|
||||
{
|
||||
format!("Check diagnostics for `{path}`")
|
||||
format!("Check diagnostics for {path}")
|
||||
} else {
|
||||
"Check project diagnostics".to_string()
|
||||
}
|
||||
|
||||
@@ -1,5 +1,7 @@
|
||||
Edit files in the current project by specifying instructions in natural language.
|
||||
|
||||
IMPORTANT NOTE: If there is a find-replace tool, use that instead of this tool! This tool is only to be used as a fallback in case that tool is unavailable. Always prefer that tool if it is available.
|
||||
|
||||
When using this tool, you should suggest one coherent edit that can be made to the codebase.
|
||||
|
||||
When the set of edits you want to make is large or complex, feel free to invoke this tool multiple times, each time focusing on a specific change you wanna make.
|
||||
|
||||
@@ -63,8 +63,8 @@ impl Tool for ListDirectoryTool {
|
||||
fn ui_text(&self, input: &serde_json::Value) -> String {
|
||||
match serde_json::from_value::<ListDirectoryToolInput>(input.clone()) {
|
||||
Ok(input) => {
|
||||
let path = MarkdownString::escape(&input.path);
|
||||
format!("List the `{path}` directory's contents")
|
||||
let path = MarkdownString::inline_code(&input.path);
|
||||
format!("List the {path} directory's contents")
|
||||
}
|
||||
Err(_) => "List directory".to_string(),
|
||||
}
|
||||
|
||||
@@ -61,8 +61,8 @@ impl Tool for MovePathTool {
|
||||
fn ui_text(&self, input: &serde_json::Value) -> String {
|
||||
match serde_json::from_value::<MovePathToolInput>(input.clone()) {
|
||||
Ok(input) => {
|
||||
let src = MarkdownString::escape(&input.source_path);
|
||||
let dest = MarkdownString::escape(&input.destination_path);
|
||||
let src = MarkdownString::inline_code(&input.source_path);
|
||||
let dest = MarkdownString::inline_code(&input.destination_path);
|
||||
let src_path = Path::new(&input.source_path);
|
||||
let dest_path = Path::new(&input.destination_path);
|
||||
|
||||
@@ -71,11 +71,11 @@ impl Tool for MovePathTool {
|
||||
.and_then(|os_str| os_str.to_os_string().into_string().ok())
|
||||
{
|
||||
Some(filename) if src_path.parent() == dest_path.parent() => {
|
||||
let filename = MarkdownString::escape(&filename);
|
||||
format!("Rename `{src}` to `{filename}`")
|
||||
let filename = MarkdownString::inline_code(&filename);
|
||||
format!("Rename {src} to {filename}")
|
||||
}
|
||||
_ => {
|
||||
format!("Move `{src}` to `{dest}`")
|
||||
format!("Move {src} to {dest}")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
68
crates/assistant_tools/src/open_tool.rs
Normal file
68
crates/assistant_tools/src/open_tool.rs
Normal file
@@ -0,0 +1,68 @@
|
||||
use anyhow::{anyhow, Context as _, Result};
|
||||
use assistant_tool::{ActionLog, Tool};
|
||||
use gpui::{App, AppContext, Entity, Task};
|
||||
use language_model::LanguageModelRequestMessage;
|
||||
use project::Project;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::sync::Arc;
|
||||
use ui::IconName;
|
||||
use util::markdown::MarkdownString;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct OpenToolInput {
|
||||
/// The path or URL to open with the default application.
|
||||
path_or_url: String,
|
||||
}
|
||||
|
||||
pub struct OpenTool;
|
||||
|
||||
impl Tool for OpenTool {
|
||||
fn name(&self) -> String {
|
||||
"open".to_string()
|
||||
}
|
||||
|
||||
fn needs_confirmation(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
include_str!("./open_tool/description.md").to_string()
|
||||
}
|
||||
|
||||
fn icon(&self) -> IconName {
|
||||
IconName::ExternalLink
|
||||
}
|
||||
|
||||
fn input_schema(&self) -> serde_json::Value {
|
||||
let schema = schemars::schema_for!(OpenToolInput);
|
||||
serde_json::to_value(&schema).unwrap()
|
||||
}
|
||||
|
||||
fn ui_text(&self, input: &serde_json::Value) -> String {
|
||||
match serde_json::from_value::<OpenToolInput>(input.clone()) {
|
||||
Ok(input) => format!("Open `{}`", MarkdownString::escape(&input.path_or_url)),
|
||||
Err(_) => "Open file or URL".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
input: serde_json::Value,
|
||||
_messages: &[LanguageModelRequestMessage],
|
||||
_project: Entity<Project>,
|
||||
_action_log: Entity<ActionLog>,
|
||||
cx: &mut App,
|
||||
) -> Task<Result<String>> {
|
||||
let input: OpenToolInput = match serde_json::from_value(input) {
|
||||
Ok(input) => input,
|
||||
Err(err) => return Task::ready(Err(anyhow!(err))),
|
||||
};
|
||||
|
||||
cx.background_spawn(async move {
|
||||
open::that(&input.path_or_url).context("Failed to open URL or file path")?;
|
||||
|
||||
Ok(format!("Successfully opened {}", input.path_or_url))
|
||||
})
|
||||
}
|
||||
}
|
||||
6
crates/assistant_tools/src/open_tool/description.md
Normal file
6
crates/assistant_tools/src/open_tool/description.md
Normal file
@@ -0,0 +1,6 @@
|
||||
This tool opens a file or URL with the default application associated with it on the user's operating system:
|
||||
- On macOS, it's equivalent to the `open` command
|
||||
- On Windows, it's equivalent to `start`
|
||||
- On Linux, it uses something like `xdg-open`, `gio open`, `gnome-open`, `kde-open`, `wslview` as appropriate
|
||||
|
||||
For example, it can open a web browser with a URL, open a PDF file with the default PDF viewer, etc.
|
||||
@@ -66,8 +66,8 @@ impl Tool for ReadFileTool {
|
||||
fn ui_text(&self, input: &serde_json::Value) -> String {
|
||||
match serde_json::from_value::<ReadFileToolInput>(input.clone()) {
|
||||
Ok(input) => {
|
||||
let path = MarkdownString::escape(&input.path.display().to_string());
|
||||
format!("Read file `{path}`")
|
||||
let path = MarkdownString::inline_code(&input.path.display().to_string());
|
||||
format!("Read file {path}")
|
||||
}
|
||||
Err(_) => "Read file".to_string(),
|
||||
}
|
||||
|
||||
@@ -64,12 +64,12 @@ impl Tool for RegexSearchTool {
|
||||
match serde_json::from_value::<RegexSearchToolInput>(input.clone()) {
|
||||
Ok(input) => {
|
||||
let page = input.page();
|
||||
let regex = MarkdownString::escape(&input.regex);
|
||||
let regex = MarkdownString::inline_code(&input.regex);
|
||||
|
||||
if page > 1 {
|
||||
format!("Get page {page} of search results for regex “`{regex}`”")
|
||||
format!("Get page {page} of search results for regex “{regex}”")
|
||||
} else {
|
||||
format!("Search files for regex “`{regex}`”")
|
||||
format!("Search files for regex “{regex}”")
|
||||
}
|
||||
}
|
||||
Err(_) => "Search with regex".to_string(),
|
||||
|
||||
@@ -37,6 +37,8 @@ pub enum Model {
|
||||
CohereCommandRV1,
|
||||
CohereCommandRPlusV1,
|
||||
CohereCommandLightTextV14_4k,
|
||||
// DeepSeek
|
||||
DeepSeekR1,
|
||||
// Meta models
|
||||
MetaLlama38BInstructV1,
|
||||
MetaLlama370BInstructV1,
|
||||
@@ -91,6 +93,7 @@ impl Model {
|
||||
Model::AmazonNovaLite => "us.amazon.nova-lite-v1:0",
|
||||
Model::AmazonNovaMicro => "us.amazon.nova-micro-v1:0",
|
||||
Model::AmazonNovaPro => "us.amazon.nova-pro-v1:0",
|
||||
Model::DeepSeekR1 => "us.deepseek.r1-v1:0",
|
||||
Model::AI21J2GrandeInstruct => "ai21.j2-grande-instruct",
|
||||
Model::AI21J2JumboInstruct => "ai21.j2-jumbo-instruct",
|
||||
Model::AI21J2Mid => "ai21.j2-mid",
|
||||
@@ -133,6 +136,7 @@ impl Model {
|
||||
Self::AmazonNovaLite => "Amazon Nova Lite",
|
||||
Self::AmazonNovaMicro => "Amazon Nova Micro",
|
||||
Self::AmazonNovaPro => "Amazon Nova Pro",
|
||||
Self::DeepSeekR1 => "DeepSeek R1",
|
||||
Self::AI21J2GrandeInstruct => "AI21 Jurassic2 Grande Instruct",
|
||||
Self::AI21J2JumboInstruct => "AI21 Jurassic2 Jumbo Instruct",
|
||||
Self::AI21J2Mid => "AI21 Jurassic2 Mid",
|
||||
|
||||
@@ -3,9 +3,7 @@ use git2::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as
|
||||
use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Task};
|
||||
use language::{Language, LanguageRegistry};
|
||||
use rope::Rope;
|
||||
use std::cmp::Ordering;
|
||||
use std::mem;
|
||||
use std::{future::Future, iter, ops::Range, sync::Arc};
|
||||
use std::{cmp::Ordering, future::Future, iter, mem, ops::Range, sync::Arc};
|
||||
use sum_tree::SumTree;
|
||||
use text::{Anchor, Bias, BufferId, OffsetRangeExt, Point, ToOffset as _};
|
||||
use util::ResultExt;
|
||||
@@ -195,7 +193,7 @@ impl BufferDiffInner {
|
||||
hunks: &[DiffHunk],
|
||||
buffer: &text::BufferSnapshot,
|
||||
file_exists: bool,
|
||||
) -> (Option<Rope>, SumTree<PendingHunk>) {
|
||||
) -> Option<Rope> {
|
||||
let head_text = self
|
||||
.base_text_exists
|
||||
.then(|| self.base_text.as_rope().clone());
|
||||
@@ -208,7 +206,7 @@ impl BufferDiffInner {
|
||||
let (index_text, head_text) = match (index_text, head_text) {
|
||||
(Some(index_text), Some(head_text)) if file_exists || !stage => (index_text, head_text),
|
||||
(index_text, head_text) => {
|
||||
let (rope, new_status) = if stage {
|
||||
let (new_index_text, new_status) = if stage {
|
||||
log::debug!("stage all");
|
||||
(
|
||||
file_exists.then(|| buffer.as_rope().clone()),
|
||||
@@ -228,15 +226,13 @@ impl BufferDiffInner {
|
||||
buffer_version: buffer.version().clone(),
|
||||
new_status,
|
||||
};
|
||||
let tree = SumTree::from_item(hunk, buffer);
|
||||
return (rope, tree);
|
||||
self.pending_hunks = SumTree::from_item(hunk, buffer);
|
||||
return new_index_text;
|
||||
}
|
||||
};
|
||||
|
||||
let mut pending_hunks = SumTree::new(buffer);
|
||||
let mut old_pending_hunks = unstaged_diff
|
||||
.pending_hunks
|
||||
.cursor::<DiffHunkSummary>(buffer);
|
||||
let mut old_pending_hunks = self.pending_hunks.cursor::<DiffHunkSummary>(buffer);
|
||||
|
||||
// first, merge new hunks into pending_hunks
|
||||
for DiffHunk {
|
||||
@@ -261,7 +257,6 @@ impl BufferDiffInner {
|
||||
old_pending_hunks.next(buffer);
|
||||
}
|
||||
|
||||
// merge into pending hunks
|
||||
if (stage && secondary_status == DiffHunkSecondaryStatus::NoSecondaryHunk)
|
||||
|| (!stage && secondary_status == DiffHunkSecondaryStatus::HasSecondaryHunk)
|
||||
{
|
||||
@@ -288,56 +283,71 @@ impl BufferDiffInner {
|
||||
let mut unstaged_hunk_cursor = unstaged_diff.hunks.cursor::<DiffHunkSummary>(buffer);
|
||||
unstaged_hunk_cursor.next(buffer);
|
||||
|
||||
let mut prev_unstaged_hunk_buffer_offset = 0;
|
||||
let mut prev_unstaged_hunk_base_text_offset = 0;
|
||||
let mut edits = Vec::<(Range<usize>, String)>::new();
|
||||
|
||||
// then, iterate over all pending hunks (both new ones and the existing ones) and compute the edits
|
||||
for PendingHunk {
|
||||
let mut prev_unstaged_hunk_buffer_end = 0;
|
||||
let mut prev_unstaged_hunk_base_text_end = 0;
|
||||
let mut edits = Vec::<(Range<usize>, String)>::new();
|
||||
let mut pending_hunks_iter = pending_hunks.iter().cloned().peekable();
|
||||
while let Some(PendingHunk {
|
||||
buffer_range,
|
||||
diff_base_byte_range,
|
||||
..
|
||||
} in pending_hunks.iter().cloned()
|
||||
}) = pending_hunks_iter.next()
|
||||
{
|
||||
let skipped_hunks = unstaged_hunk_cursor.slice(&buffer_range.start, Bias::Left, buffer);
|
||||
// Advance unstaged_hunk_cursor to skip unstaged hunks before current hunk
|
||||
let skipped_unstaged =
|
||||
unstaged_hunk_cursor.slice(&buffer_range.start, Bias::Left, buffer);
|
||||
|
||||
if let Some(secondary_hunk) = skipped_hunks.last() {
|
||||
prev_unstaged_hunk_base_text_offset = secondary_hunk.diff_base_byte_range.end;
|
||||
prev_unstaged_hunk_buffer_offset =
|
||||
secondary_hunk.buffer_range.end.to_offset(buffer);
|
||||
if let Some(unstaged_hunk) = skipped_unstaged.last() {
|
||||
prev_unstaged_hunk_base_text_end = unstaged_hunk.diff_base_byte_range.end;
|
||||
prev_unstaged_hunk_buffer_end = unstaged_hunk.buffer_range.end.to_offset(buffer);
|
||||
}
|
||||
|
||||
// Find where this hunk is in the index if it doesn't overlap
|
||||
let mut buffer_offset_range = buffer_range.to_offset(buffer);
|
||||
let start_overshoot = buffer_offset_range.start - prev_unstaged_hunk_buffer_offset;
|
||||
let mut index_start = prev_unstaged_hunk_base_text_offset + start_overshoot;
|
||||
let start_overshoot = buffer_offset_range.start - prev_unstaged_hunk_buffer_end;
|
||||
let mut index_start = prev_unstaged_hunk_base_text_end + start_overshoot;
|
||||
|
||||
while let Some(unstaged_hunk) = unstaged_hunk_cursor.item().filter(|item| {
|
||||
item.buffer_range
|
||||
.start
|
||||
.cmp(&buffer_range.end, buffer)
|
||||
.is_le()
|
||||
}) {
|
||||
let unstaged_hunk_offset_range = unstaged_hunk.buffer_range.to_offset(buffer);
|
||||
prev_unstaged_hunk_base_text_offset = unstaged_hunk.diff_base_byte_range.end;
|
||||
prev_unstaged_hunk_buffer_offset = unstaged_hunk_offset_range.end;
|
||||
loop {
|
||||
// Merge this hunk with any overlapping unstaged hunks.
|
||||
if let Some(unstaged_hunk) = unstaged_hunk_cursor.item() {
|
||||
let unstaged_hunk_offset_range = unstaged_hunk.buffer_range.to_offset(buffer);
|
||||
if unstaged_hunk_offset_range.start <= buffer_offset_range.end {
|
||||
prev_unstaged_hunk_base_text_end = unstaged_hunk.diff_base_byte_range.end;
|
||||
prev_unstaged_hunk_buffer_end = unstaged_hunk_offset_range.end;
|
||||
|
||||
index_start = index_start.min(unstaged_hunk.diff_base_byte_range.start);
|
||||
buffer_offset_range.start = buffer_offset_range
|
||||
.start
|
||||
.min(unstaged_hunk_offset_range.start);
|
||||
index_start = index_start.min(unstaged_hunk.diff_base_byte_range.start);
|
||||
buffer_offset_range.start = buffer_offset_range
|
||||
.start
|
||||
.min(unstaged_hunk_offset_range.start);
|
||||
buffer_offset_range.end =
|
||||
buffer_offset_range.end.max(unstaged_hunk_offset_range.end);
|
||||
|
||||
unstaged_hunk_cursor.next(buffer);
|
||||
unstaged_hunk_cursor.next(buffer);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// If any unstaged hunks were merged, then subsequent pending hunks may
|
||||
// now overlap this hunk. Merge them.
|
||||
if let Some(next_pending_hunk) = pending_hunks_iter.peek() {
|
||||
let next_pending_hunk_offset_range =
|
||||
next_pending_hunk.buffer_range.to_offset(buffer);
|
||||
if next_pending_hunk_offset_range.start <= buffer_offset_range.end {
|
||||
buffer_offset_range.end = next_pending_hunk_offset_range.end;
|
||||
pending_hunks_iter.next();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
let end_overshoot = buffer_offset_range
|
||||
.end
|
||||
.saturating_sub(prev_unstaged_hunk_buffer_offset);
|
||||
let index_end = prev_unstaged_hunk_base_text_offset + end_overshoot;
|
||||
|
||||
let index_range = index_start..index_end;
|
||||
buffer_offset_range.end = buffer_offset_range
|
||||
.end
|
||||
.max(prev_unstaged_hunk_buffer_offset);
|
||||
.saturating_sub(prev_unstaged_hunk_buffer_end);
|
||||
let index_end = prev_unstaged_hunk_base_text_end + end_overshoot;
|
||||
let index_byte_range = index_start..index_end;
|
||||
|
||||
let replacement_text = if stage {
|
||||
log::debug!("stage hunk {:?}", buffer_offset_range);
|
||||
@@ -351,8 +361,11 @@ impl BufferDiffInner {
|
||||
.collect::<String>()
|
||||
};
|
||||
|
||||
edits.push((index_range, replacement_text));
|
||||
edits.push((index_byte_range, replacement_text));
|
||||
}
|
||||
drop(pending_hunks_iter);
|
||||
drop(old_pending_hunks);
|
||||
self.pending_hunks = pending_hunks;
|
||||
|
||||
#[cfg(debug_assertions)] // invariants: non-overlapping and sorted
|
||||
{
|
||||
@@ -371,7 +384,7 @@ impl BufferDiffInner {
|
||||
new_index_text.push(&replacement_text);
|
||||
}
|
||||
new_index_text.append(index_cursor.suffix());
|
||||
(Some(new_index_text), pending_hunks)
|
||||
Some(new_index_text)
|
||||
}
|
||||
|
||||
fn hunks_intersecting_range<'a>(
|
||||
@@ -408,15 +421,14 @@ impl BufferDiffInner {
|
||||
]
|
||||
});
|
||||
|
||||
let mut pending_hunks_cursor = self.pending_hunks.cursor::<DiffHunkSummary>(buffer);
|
||||
pending_hunks_cursor.next(buffer);
|
||||
|
||||
let mut secondary_cursor = None;
|
||||
let mut pending_hunks_cursor = None;
|
||||
if let Some(secondary) = secondary.as_ref() {
|
||||
let mut cursor = secondary.hunks.cursor::<DiffHunkSummary>(buffer);
|
||||
cursor.next(buffer);
|
||||
secondary_cursor = Some(cursor);
|
||||
let mut cursor = secondary.pending_hunks.cursor::<DiffHunkSummary>(buffer);
|
||||
cursor.next(buffer);
|
||||
pending_hunks_cursor = Some(cursor);
|
||||
}
|
||||
|
||||
let max_point = buffer.max_point();
|
||||
@@ -438,29 +450,27 @@ impl BufferDiffInner {
|
||||
let mut secondary_status = DiffHunkSecondaryStatus::NoSecondaryHunk;
|
||||
|
||||
let mut has_pending = false;
|
||||
if let Some(pending_cursor) = pending_hunks_cursor.as_mut() {
|
||||
if start_anchor
|
||||
.cmp(&pending_cursor.start().buffer_range.start, buffer)
|
||||
.is_gt()
|
||||
{
|
||||
pending_cursor.seek_forward(&start_anchor, Bias::Left, buffer);
|
||||
if start_anchor
|
||||
.cmp(&pending_hunks_cursor.start().buffer_range.start, buffer)
|
||||
.is_gt()
|
||||
{
|
||||
pending_hunks_cursor.seek_forward(&start_anchor, Bias::Left, buffer);
|
||||
}
|
||||
|
||||
if let Some(pending_hunk) = pending_hunks_cursor.item() {
|
||||
let mut pending_range = pending_hunk.buffer_range.to_point(buffer);
|
||||
if pending_range.end.column > 0 {
|
||||
pending_range.end.row += 1;
|
||||
pending_range.end.column = 0;
|
||||
}
|
||||
|
||||
if let Some(pending_hunk) = pending_cursor.item() {
|
||||
let mut pending_range = pending_hunk.buffer_range.to_point(buffer);
|
||||
if pending_range.end.column > 0 {
|
||||
pending_range.end.row += 1;
|
||||
pending_range.end.column = 0;
|
||||
}
|
||||
|
||||
if pending_range == (start_point..end_point) {
|
||||
if !buffer.has_edits_since_in_range(
|
||||
&pending_hunk.buffer_version,
|
||||
start_anchor..end_anchor,
|
||||
) {
|
||||
has_pending = true;
|
||||
secondary_status = pending_hunk.new_status;
|
||||
}
|
||||
if pending_range == (start_point..end_point) {
|
||||
if !buffer.has_edits_since_in_range(
|
||||
&pending_hunk.buffer_version,
|
||||
start_anchor..end_anchor,
|
||||
) {
|
||||
has_pending = true;
|
||||
secondary_status = pending_hunk.new_status;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -839,10 +849,8 @@ impl BufferDiff {
|
||||
}
|
||||
|
||||
pub fn clear_pending_hunks(&mut self, cx: &mut Context<Self>) {
|
||||
if let Some(secondary_diff) = &self.secondary_diff {
|
||||
secondary_diff.update(cx, |diff, _| {
|
||||
diff.inner.pending_hunks = SumTree::from_summary(DiffHunkSummary::default());
|
||||
});
|
||||
if self.secondary_diff.is_some() {
|
||||
self.inner.pending_hunks = SumTree::from_summary(DiffHunkSummary::default());
|
||||
cx.emit(BufferDiffEvent::DiffChanged {
|
||||
changed_range: Some(Anchor::MIN..Anchor::MAX),
|
||||
});
|
||||
@@ -857,7 +865,7 @@ impl BufferDiff {
|
||||
file_exists: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<Rope> {
|
||||
let (new_index_text, new_pending_hunks) = self.inner.stage_or_unstage_hunks_impl(
|
||||
let new_index_text = self.inner.stage_or_unstage_hunks_impl(
|
||||
&self.secondary_diff.as_ref()?.read(cx).inner,
|
||||
stage,
|
||||
&hunks,
|
||||
@@ -865,11 +873,6 @@ impl BufferDiff {
|
||||
file_exists,
|
||||
);
|
||||
|
||||
if let Some(unstaged_diff) = &self.secondary_diff {
|
||||
unstaged_diff.update(cx, |diff, _| {
|
||||
diff.inner.pending_hunks = new_pending_hunks;
|
||||
});
|
||||
}
|
||||
cx.emit(BufferDiffEvent::HunksStagedOrUnstaged(
|
||||
new_index_text.clone(),
|
||||
));
|
||||
@@ -1649,6 +1652,75 @@ mod tests {
|
||||
"
|
||||
.unindent(),
|
||||
},
|
||||
Example {
|
||||
name: "one unstaged hunk that contains two uncommitted hunks",
|
||||
head_text: "
|
||||
one
|
||||
two
|
||||
|
||||
three
|
||||
four
|
||||
"
|
||||
.unindent(),
|
||||
index_text: "
|
||||
one
|
||||
two
|
||||
three
|
||||
four
|
||||
"
|
||||
.unindent(),
|
||||
buffer_marked_text: "
|
||||
«one
|
||||
|
||||
three // modified
|
||||
four»
|
||||
"
|
||||
.unindent(),
|
||||
final_index_text: "
|
||||
one
|
||||
|
||||
three // modified
|
||||
four
|
||||
"
|
||||
.unindent(),
|
||||
},
|
||||
Example {
|
||||
name: "one uncommitted hunk that contains two unstaged hunks",
|
||||
head_text: "
|
||||
one
|
||||
two
|
||||
three
|
||||
four
|
||||
five
|
||||
"
|
||||
.unindent(),
|
||||
index_text: "
|
||||
ZERO
|
||||
one
|
||||
TWO
|
||||
THREE
|
||||
FOUR
|
||||
five
|
||||
"
|
||||
.unindent(),
|
||||
buffer_marked_text: "
|
||||
«one
|
||||
TWO_HUNDRED
|
||||
THREE
|
||||
FOUR_HUNDRED
|
||||
five»
|
||||
"
|
||||
.unindent(),
|
||||
final_index_text: "
|
||||
ZERO
|
||||
one
|
||||
TWO_HUNDRED
|
||||
THREE
|
||||
FOUR_HUNDRED
|
||||
five
|
||||
"
|
||||
.unindent(),
|
||||
},
|
||||
];
|
||||
|
||||
for example in table {
|
||||
|
||||
@@ -43,10 +43,10 @@ telemetry.workspace = true
|
||||
util.workspace = true
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
livekit_client_macos = { workspace = true }
|
||||
livekit_client_macos.workspace = true
|
||||
|
||||
[target.'cfg(not(target_os = "macos"))'.dependencies]
|
||||
livekit_client = { workspace = true }
|
||||
livekit_client.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
client = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -115,7 +115,7 @@ notifications = { workspace = true, features = ["test-support"] }
|
||||
pretty_assertions.workspace = true
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
prompt_store.workspace = true
|
||||
recent_projects = { workspace = true }
|
||||
recent_projects.workspace = true
|
||||
release_channel.workspace = true
|
||||
remote = { workspace = true, features = ["test-support"] }
|
||||
remote_server.workspace = true
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
// todo(windows): Actually run the tests
|
||||
#![cfg(not(target_os = "windows"))]
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use call::Room;
|
||||
|
||||
@@ -36,6 +36,7 @@ use std::{
|
||||
},
|
||||
};
|
||||
use text::Point;
|
||||
use util::{path, uri};
|
||||
use workspace::{CloseIntent, Workspace};
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
@@ -190,9 +191,9 @@ async fn test_newline_above_or_below_does_not_move_guest_cursor(
|
||||
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree("/dir", json!({ "a.txt": "Some text\n" }))
|
||||
.insert_tree(path!("/dir"), json!({ "a.txt": "Some text\n" }))
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/dir"), cx_a).await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
@@ -306,14 +307,14 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/a",
|
||||
path!("/a"),
|
||||
json!({
|
||||
"main.rs": "fn main() { a }",
|
||||
"other.rs": "",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
@@ -351,7 +352,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
|
||||
.set_request_handler::<lsp::request::Completion, _, _>(|params, _| async move {
|
||||
assert_eq!(
|
||||
params.text_document_position.text_document.uri,
|
||||
lsp::Url::from_file_path("/a/main.rs").unwrap(),
|
||||
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
|
||||
);
|
||||
assert_eq!(
|
||||
params.text_document_position.position,
|
||||
@@ -468,7 +469,7 @@ async fn test_collaborating_with_completion(cx_a: &mut TestAppContext, cx_b: &mu
|
||||
.set_request_handler::<lsp::request::Completion, _, _>(|params, _| async move {
|
||||
assert_eq!(
|
||||
params.text_document_position.text_document.uri,
|
||||
lsp::Url::from_file_path("/a/main.rs").unwrap(),
|
||||
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
|
||||
);
|
||||
assert_eq!(
|
||||
params.text_document_position.position,
|
||||
@@ -562,14 +563,14 @@ async fn test_collaborating_with_code_actions(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/a",
|
||||
path!("/a"),
|
||||
json!({
|
||||
"main.rs": "mod other;\nfn main() { let foo = other::foo(); }",
|
||||
"other.rs": "pub fn foo() -> usize { 4 }",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
@@ -592,7 +593,7 @@ async fn test_collaborating_with_code_actions(
|
||||
.set_request_handler::<lsp::request::CodeActionRequest, _, _>(|params, _| async move {
|
||||
assert_eq!(
|
||||
params.text_document.uri,
|
||||
lsp::Url::from_file_path("/a/main.rs").unwrap(),
|
||||
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
|
||||
);
|
||||
assert_eq!(params.range.start, lsp::Position::new(0, 0));
|
||||
assert_eq!(params.range.end, lsp::Position::new(0, 0));
|
||||
@@ -614,7 +615,7 @@ async fn test_collaborating_with_code_actions(
|
||||
.set_request_handler::<lsp::request::CodeActionRequest, _, _>(|params, _| async move {
|
||||
assert_eq!(
|
||||
params.text_document.uri,
|
||||
lsp::Url::from_file_path("/a/main.rs").unwrap(),
|
||||
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
|
||||
);
|
||||
assert_eq!(params.range.start, lsp::Position::new(1, 31));
|
||||
assert_eq!(params.range.end, lsp::Position::new(1, 31));
|
||||
@@ -626,7 +627,7 @@ async fn test_collaborating_with_code_actions(
|
||||
changes: Some(
|
||||
[
|
||||
(
|
||||
lsp::Url::from_file_path("/a/main.rs").unwrap(),
|
||||
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
|
||||
vec![lsp::TextEdit::new(
|
||||
lsp::Range::new(
|
||||
lsp::Position::new(1, 22),
|
||||
@@ -636,7 +637,7 @@ async fn test_collaborating_with_code_actions(
|
||||
)],
|
||||
),
|
||||
(
|
||||
lsp::Url::from_file_path("/a/other.rs").unwrap(),
|
||||
lsp::Url::from_file_path(path!("/a/other.rs")).unwrap(),
|
||||
vec![lsp::TextEdit::new(
|
||||
lsp::Range::new(
|
||||
lsp::Position::new(0, 0),
|
||||
@@ -697,7 +698,7 @@ async fn test_collaborating_with_code_actions(
|
||||
changes: Some(
|
||||
[
|
||||
(
|
||||
lsp::Url::from_file_path("/a/main.rs").unwrap(),
|
||||
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
|
||||
vec![lsp::TextEdit::new(
|
||||
lsp::Range::new(
|
||||
lsp::Position::new(1, 22),
|
||||
@@ -707,7 +708,7 @@ async fn test_collaborating_with_code_actions(
|
||||
)],
|
||||
),
|
||||
(
|
||||
lsp::Url::from_file_path("/a/other.rs").unwrap(),
|
||||
lsp::Url::from_file_path(path!("/a/other.rs")).unwrap(),
|
||||
vec![lsp::TextEdit::new(
|
||||
lsp::Range::new(
|
||||
lsp::Position::new(0, 0),
|
||||
@@ -780,14 +781,14 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/dir",
|
||||
path!("/dir"),
|
||||
json!({
|
||||
"one.rs": "const ONE: usize = 1;",
|
||||
"two.rs": "const TWO: usize = one::ONE + one::ONE;"
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/dir"), cx_a).await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
@@ -813,7 +814,10 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
|
||||
|
||||
fake_language_server
|
||||
.set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
|
||||
assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
|
||||
assert_eq!(
|
||||
params.text_document.uri.as_str(),
|
||||
uri!("file:///dir/one.rs")
|
||||
);
|
||||
assert_eq!(params.position, lsp::Position::new(0, 7));
|
||||
Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
|
||||
lsp::Position::new(0, 6),
|
||||
@@ -856,7 +860,10 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
|
||||
|
||||
fake_language_server
|
||||
.set_request_handler::<lsp::request::PrepareRenameRequest, _, _>(|params, _| async move {
|
||||
assert_eq!(params.text_document.uri.as_str(), "file:///dir/one.rs");
|
||||
assert_eq!(
|
||||
params.text_document.uri.as_str(),
|
||||
uri!("file:///dir/one.rs")
|
||||
);
|
||||
assert_eq!(params.position, lsp::Position::new(0, 8));
|
||||
Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
|
||||
lsp::Position::new(0, 6),
|
||||
@@ -894,7 +901,7 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
|
||||
.set_request_handler::<lsp::request::Rename, _, _>(|params, _| async move {
|
||||
assert_eq!(
|
||||
params.text_document_position.text_document.uri.as_str(),
|
||||
"file:///dir/one.rs"
|
||||
uri!("file:///dir/one.rs")
|
||||
);
|
||||
assert_eq!(
|
||||
params.text_document_position.position,
|
||||
@@ -905,14 +912,14 @@ async fn test_collaborating_with_renames(cx_a: &mut TestAppContext, cx_b: &mut T
|
||||
changes: Some(
|
||||
[
|
||||
(
|
||||
lsp::Url::from_file_path("/dir/one.rs").unwrap(),
|
||||
lsp::Url::from_file_path(path!("/dir/one.rs")).unwrap(),
|
||||
vec![lsp::TextEdit::new(
|
||||
lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
|
||||
"THREE".to_string(),
|
||||
)],
|
||||
),
|
||||
(
|
||||
lsp::Url::from_file_path("/dir/two.rs").unwrap(),
|
||||
lsp::Url::from_file_path(path!("/dir/two.rs")).unwrap(),
|
||||
vec![
|
||||
lsp::TextEdit::new(
|
||||
lsp::Range::new(
|
||||
@@ -999,17 +1006,17 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/dir",
|
||||
path!("/dir"),
|
||||
json!({
|
||||
"main.rs": "const ONE: usize = 1;",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, _) = client_a.build_local_project("/dir", cx_a).await;
|
||||
let (project_a, _) = client_a.build_local_project(path!("/dir"), cx_a).await;
|
||||
|
||||
let _buffer_a = project_a
|
||||
.update(cx_a, |p, cx| {
|
||||
p.open_local_buffer_with_lsp("/dir/main.rs", cx)
|
||||
p.open_local_buffer_with_lsp(path!("/dir/main.rs"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -1106,7 +1113,7 @@ async fn test_share_project(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/a",
|
||||
path!("/a"),
|
||||
json!({
|
||||
".gitignore": "ignored-dir",
|
||||
"a.txt": "a-contents",
|
||||
@@ -1120,7 +1127,7 @@ async fn test_share_project(
|
||||
.await;
|
||||
|
||||
// Invite client B to collaborate on a project
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| {
|
||||
call.invite(client_b.user_id().unwrap(), Some(project_a.clone()), cx)
|
||||
@@ -1292,14 +1299,14 @@ async fn test_on_input_format_from_host_to_guest(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/a",
|
||||
path!("/a"),
|
||||
json!({
|
||||
"main.rs": "fn main() { a }",
|
||||
"other.rs": "// Test file",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
@@ -1325,7 +1332,7 @@ async fn test_on_input_format_from_host_to_guest(
|
||||
|params, _| async move {
|
||||
assert_eq!(
|
||||
params.text_document_position.text_document.uri,
|
||||
lsp::Url::from_file_path("/a/main.rs").unwrap(),
|
||||
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
|
||||
);
|
||||
assert_eq!(
|
||||
params.text_document_position.position,
|
||||
@@ -1414,14 +1421,14 @@ async fn test_on_input_format_from_guest_to_host(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/a",
|
||||
path!("/a"),
|
||||
json!({
|
||||
"main.rs": "fn main() { a }",
|
||||
"other.rs": "// Test file",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
@@ -1455,7 +1462,7 @@ async fn test_on_input_format_from_guest_to_host(
|
||||
.set_request_handler::<lsp::request::OnTypeFormatting, _, _>(|params, _| async move {
|
||||
assert_eq!(
|
||||
params.text_document_position.text_document.uri,
|
||||
lsp::Url::from_file_path("/a/main.rs").unwrap(),
|
||||
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
|
||||
);
|
||||
assert_eq!(
|
||||
params.text_document_position.position,
|
||||
@@ -1575,14 +1582,14 @@ async fn test_mutual_editor_inlay_hint_cache_update(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/a",
|
||||
path!("/a"),
|
||||
json!({
|
||||
"main.rs": "fn main() { a } // and some long comment to ensure inlay hints are not trimmed out",
|
||||
"other.rs": "// Test file",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
|
||||
.await
|
||||
@@ -1605,7 +1612,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
|
||||
// The host opens a rust file.
|
||||
let _buffer_a = project_a
|
||||
.update(cx_a, |project, cx| {
|
||||
project.open_local_buffer("/a/main.rs", cx)
|
||||
project.open_local_buffer(path!("/a/main.rs"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -1629,7 +1636,7 @@ async fn test_mutual_editor_inlay_hint_cache_update(
|
||||
async move {
|
||||
assert_eq!(
|
||||
params.text_document.uri,
|
||||
lsp::Url::from_file_path("/a/main.rs").unwrap(),
|
||||
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
|
||||
);
|
||||
let edits_made = task_edits_made.load(atomic::Ordering::Acquire);
|
||||
Ok(Some(vec![lsp::InlayHint {
|
||||
@@ -1809,14 +1816,14 @@ async fn test_inlay_hint_refresh_is_forwarded(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/a",
|
||||
path!("/a"),
|
||||
json!({
|
||||
"main.rs": "fn main() { a } // and some long comment to ensure inlay hints are not trimmed out",
|
||||
"other.rs": "// Test file",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
|
||||
.await
|
||||
@@ -1864,7 +1871,7 @@ async fn test_inlay_hint_refresh_is_forwarded(
|
||||
async move {
|
||||
assert_eq!(
|
||||
params.text_document.uri,
|
||||
lsp::Url::from_file_path("/a/main.rs").unwrap(),
|
||||
lsp::Url::from_file_path(path!("/a/main.rs")).unwrap(),
|
||||
);
|
||||
let other_hints = task_other_hints.load(atomic::Ordering::Acquire);
|
||||
let character = if other_hints { 0 } else { 2 };
|
||||
@@ -1967,7 +1974,7 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/my-repo",
|
||||
path!("/my-repo"),
|
||||
json!({
|
||||
".git": {},
|
||||
"file.txt": "line1\nline2\nline3\nline\n",
|
||||
@@ -1993,11 +2000,12 @@ async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestA
|
||||
.collect(),
|
||||
remote_url: Some("git@github.com:zed-industries/zed.git".to_string()),
|
||||
};
|
||||
client_a
|
||||
.fs()
|
||||
.set_blame_for_repo(Path::new("/my-repo/.git"), vec![("file.txt".into(), blame)]);
|
||||
client_a.fs().set_blame_for_repo(
|
||||
Path::new(path!("/my-repo/.git")),
|
||||
vec![("file.txt".into(), blame)],
|
||||
);
|
||||
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/my-repo", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/my-repo"), cx_a).await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
@@ -2177,7 +2185,7 @@ async fn test_collaborating_with_editorconfig(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/a",
|
||||
path!("/a"),
|
||||
json!({
|
||||
"src": {
|
||||
"main.rs": "mod other;\nfn main() { let foo = other::foo(); }",
|
||||
@@ -2190,7 +2198,7 @@ async fn test_collaborating_with_editorconfig(
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
@@ -2309,7 +2317,7 @@ fn main() { let foo = other::foo(); }"};
|
||||
client_a
|
||||
.fs()
|
||||
.atomic_write(
|
||||
PathBuf::from("/a/src/.editorconfig"),
|
||||
PathBuf::from(path!("/a/src/.editorconfig")),
|
||||
"[*]\ntab_width = 3\n".to_owned(),
|
||||
)
|
||||
.await
|
||||
|
||||
@@ -16,6 +16,7 @@ use project::WorktreeSettings;
|
||||
use rpc::proto::PeerId;
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use util::path;
|
||||
use workspace::{item::ItemHandle as _, SplitDirection, Workspace};
|
||||
|
||||
use super::TestClient;
|
||||
@@ -50,7 +51,7 @@ async fn test_basic_following(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/a",
|
||||
path!("/a"),
|
||||
json!({
|
||||
"1.txt": "one\none\none",
|
||||
"2.txt": "two\ntwo\ntwo",
|
||||
@@ -58,7 +59,7 @@ async fn test_basic_following(
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
|
||||
.await
|
||||
@@ -436,7 +437,9 @@ async fn test_basic_following(
|
||||
);
|
||||
|
||||
// TODO: Re-enable this test once we can replace our swift Livekit SDK with the rust SDK
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
// todo(windows)
|
||||
// Fix this on Windows
|
||||
#[cfg(all(not(target_os = "macos"), not(target_os = "windows")))]
|
||||
{
|
||||
use crate::rpc::RECONNECT_TIMEOUT;
|
||||
use gpui::TestScreenCaptureSource;
|
||||
@@ -570,7 +573,7 @@ async fn test_following_tab_order(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/a",
|
||||
path!("/a"),
|
||||
json!({
|
||||
"1.txt": "one",
|
||||
"2.txt": "two",
|
||||
@@ -578,7 +581,7 @@ async fn test_following_tab_order(
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
|
||||
.await
|
||||
@@ -1220,7 +1223,7 @@ async fn test_auto_unfollowing(cx_a: &mut TestAppContext, cx_b: &mut TestAppCont
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/a",
|
||||
path!("/a"),
|
||||
json!({
|
||||
"1.txt": "one",
|
||||
"2.txt": "two",
|
||||
@@ -1228,7 +1231,7 @@ async fn test_auto_unfollowing(cx_a: &mut TestAppContext, cx_b: &mut TestAppCont
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
|
||||
.await
|
||||
@@ -1435,7 +1438,7 @@ async fn test_following_across_workspaces(cx_a: &mut TestAppContext, cx_b: &mut
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/a",
|
||||
path!("/a"),
|
||||
json!({
|
||||
"w.rs": "",
|
||||
"x.rs": "",
|
||||
@@ -1446,7 +1449,7 @@ async fn test_following_across_workspaces(cx_a: &mut TestAppContext, cx_b: &mut
|
||||
client_b
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/b",
|
||||
path!("/b"),
|
||||
json!({
|
||||
"y.rs": "",
|
||||
"z.rs": "",
|
||||
@@ -1460,8 +1463,8 @@ async fn test_following_across_workspaces(cx_a: &mut TestAppContext, cx_b: &mut
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
let active_call_b = cx_b.read(ActiveCall::global);
|
||||
|
||||
let (project_a, worktree_id_a) = client_a.build_local_project("/a", cx_a).await;
|
||||
let (project_b, worktree_id_b) = client_b.build_local_project("/b", cx_b).await;
|
||||
let (project_a, worktree_id_a) = client_a.build_local_project(path!("/a"), cx_a).await;
|
||||
let (project_b, worktree_id_b) = client_b.build_local_project(path!("/b"), cx_b).await;
|
||||
|
||||
let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a);
|
||||
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
|
||||
@@ -1718,7 +1721,7 @@ async fn test_following_into_excluded_file(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/a",
|
||||
path!("/a"),
|
||||
json!({
|
||||
".git": {
|
||||
"COMMIT_EDITMSG": "write your commit message here",
|
||||
@@ -1729,7 +1732,7 @@ async fn test_following_into_excluded_file(
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
|
||||
.await
|
||||
|
||||
@@ -9,6 +9,7 @@ use git_ui::project_diff::ProjectDiff;
|
||||
use gpui::{TestAppContext, VisualTestContext};
|
||||
use project::ProjectPath;
|
||||
use serde_json::json;
|
||||
use util::path;
|
||||
use workspace::Workspace;
|
||||
|
||||
//
|
||||
@@ -29,7 +30,7 @@ async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext)
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/a",
|
||||
path!("/a"),
|
||||
json!({
|
||||
".git": {},
|
||||
"changed.txt": "after\n",
|
||||
@@ -41,7 +42,7 @@ async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext)
|
||||
.await;
|
||||
|
||||
client_a.fs().set_git_content_for_repo(
|
||||
Path::new("/a/.git"),
|
||||
Path::new(path!("/a/.git")),
|
||||
&[
|
||||
("changed.txt".into(), "before\n".to_string(), None),
|
||||
("unchanged.txt".into(), "unchanged\n".to_string(), None),
|
||||
@@ -49,7 +50,7 @@ async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext)
|
||||
("secret.pem".into(), "shh\n".to_string(), None),
|
||||
],
|
||||
);
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
@@ -93,7 +94,7 @@ async fn test_project_diff(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext)
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/a",
|
||||
path!("/a"),
|
||||
json!({
|
||||
".git": {},
|
||||
"changed.txt": "before\n",
|
||||
|
||||
@@ -50,6 +50,7 @@ use std::{
|
||||
time::Duration,
|
||||
};
|
||||
use unindent::Unindent as _;
|
||||
use util::{path, separator, uri};
|
||||
use workspace::Pane;
|
||||
|
||||
#[ctor::ctor]
|
||||
@@ -1459,7 +1460,7 @@ async fn test_project_reconnect(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/root-1",
|
||||
path!("/root-1"),
|
||||
json!({
|
||||
"dir1": {
|
||||
"a.txt": "a",
|
||||
@@ -1487,7 +1488,7 @@ async fn test_project_reconnect(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/root-2",
|
||||
path!("/root-2"),
|
||||
json!({
|
||||
"2.txt": "2",
|
||||
}),
|
||||
@@ -1496,7 +1497,7 @@ async fn test_project_reconnect(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/root-3",
|
||||
path!("/root-3"),
|
||||
json!({
|
||||
"3.txt": "3",
|
||||
}),
|
||||
@@ -1504,9 +1505,11 @@ async fn test_project_reconnect(
|
||||
.await;
|
||||
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
let (project_a1, _) = client_a.build_local_project("/root-1/dir1", cx_a).await;
|
||||
let (project_a2, _) = client_a.build_local_project("/root-2", cx_a).await;
|
||||
let (project_a3, _) = client_a.build_local_project("/root-3", cx_a).await;
|
||||
let (project_a1, _) = client_a
|
||||
.build_local_project(path!("/root-1/dir1"), cx_a)
|
||||
.await;
|
||||
let (project_a2, _) = client_a.build_local_project(path!("/root-2"), cx_a).await;
|
||||
let (project_a3, _) = client_a.build_local_project(path!("/root-3"), cx_a).await;
|
||||
let worktree_a1 =
|
||||
project_a1.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap());
|
||||
let project1_id = active_call_a
|
||||
@@ -1533,7 +1536,7 @@ async fn test_project_reconnect(
|
||||
});
|
||||
let (worktree_a2, _) = project_a1
|
||||
.update(cx_a, |p, cx| {
|
||||
p.find_or_create_worktree("/root-1/dir2", true, cx)
|
||||
p.find_or_create_worktree(path!("/root-1/dir2"), true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -1579,7 +1582,7 @@ async fn test_project_reconnect(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/root-1/dir1/subdir2",
|
||||
path!("/root-1/dir1/subdir2"),
|
||||
json!({
|
||||
"f.txt": "f-contents",
|
||||
"g.txt": "g-contents",
|
||||
@@ -1591,7 +1594,7 @@ async fn test_project_reconnect(
|
||||
client_a
|
||||
.fs()
|
||||
.remove_dir(
|
||||
"/root-1/dir1/subdir1".as_ref(),
|
||||
path!("/root-1/dir1/subdir1").as_ref(),
|
||||
RemoveOptions {
|
||||
recursive: true,
|
||||
..Default::default()
|
||||
@@ -1606,7 +1609,7 @@ async fn test_project_reconnect(
|
||||
});
|
||||
let (worktree_a3, _) = project_a1
|
||||
.update(cx_a, |p, cx| {
|
||||
p.find_or_create_worktree("/root-1/dir3", true, cx)
|
||||
p.find_or_create_worktree(path!("/root-1/dir3"), true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -1647,13 +1650,13 @@ async fn test_project_reconnect(
|
||||
.map(|p| p.to_str().unwrap())
|
||||
.collect::<Vec<_>>(),
|
||||
vec![
|
||||
"a.txt",
|
||||
"b.txt",
|
||||
"subdir2",
|
||||
"subdir2/f.txt",
|
||||
"subdir2/g.txt",
|
||||
"subdir2/h.txt",
|
||||
"subdir2/i.txt"
|
||||
separator!("a.txt"),
|
||||
separator!("b.txt"),
|
||||
separator!("subdir2"),
|
||||
separator!("subdir2/f.txt"),
|
||||
separator!("subdir2/g.txt"),
|
||||
separator!("subdir2/h.txt"),
|
||||
separator!("subdir2/i.txt")
|
||||
]
|
||||
);
|
||||
assert!(worktree_a3.read(cx).has_update_observer());
|
||||
@@ -1680,13 +1683,13 @@ async fn test_project_reconnect(
|
||||
.map(|p| p.to_str().unwrap())
|
||||
.collect::<Vec<_>>(),
|
||||
vec![
|
||||
"a.txt",
|
||||
"b.txt",
|
||||
"subdir2",
|
||||
"subdir2/f.txt",
|
||||
"subdir2/g.txt",
|
||||
"subdir2/h.txt",
|
||||
"subdir2/i.txt"
|
||||
separator!("a.txt"),
|
||||
separator!("b.txt"),
|
||||
separator!("subdir2"),
|
||||
separator!("subdir2/f.txt"),
|
||||
separator!("subdir2/g.txt"),
|
||||
separator!("subdir2/h.txt"),
|
||||
separator!("subdir2/i.txt")
|
||||
]
|
||||
);
|
||||
assert!(project.worktree_for_id(worktree2_id, cx).is_none());
|
||||
@@ -1719,18 +1722,21 @@ async fn test_project_reconnect(
|
||||
// While client B is disconnected, add and remove files from client A's project
|
||||
client_a
|
||||
.fs()
|
||||
.insert_file("/root-1/dir1/subdir2/j.txt", "j-contents".into())
|
||||
.insert_file(path!("/root-1/dir1/subdir2/j.txt"), "j-contents".into())
|
||||
.await;
|
||||
client_a
|
||||
.fs()
|
||||
.remove_file("/root-1/dir1/subdir2/i.txt".as_ref(), Default::default())
|
||||
.remove_file(
|
||||
path!("/root-1/dir1/subdir2/i.txt").as_ref(),
|
||||
Default::default(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
// While client B is disconnected, add and remove worktrees from client A's project.
|
||||
let (worktree_a4, _) = project_a1
|
||||
.update(cx_a, |p, cx| {
|
||||
p.find_or_create_worktree("/root-1/dir4", true, cx)
|
||||
p.find_or_create_worktree(path!("/root-1/dir4"), true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -1773,13 +1779,13 @@ async fn test_project_reconnect(
|
||||
.map(|p| p.to_str().unwrap())
|
||||
.collect::<Vec<_>>(),
|
||||
vec![
|
||||
"a.txt",
|
||||
"b.txt",
|
||||
"subdir2",
|
||||
"subdir2/f.txt",
|
||||
"subdir2/g.txt",
|
||||
"subdir2/h.txt",
|
||||
"subdir2/j.txt"
|
||||
separator!("a.txt"),
|
||||
separator!("b.txt"),
|
||||
separator!("subdir2"),
|
||||
separator!("subdir2/f.txt"),
|
||||
separator!("subdir2/g.txt"),
|
||||
separator!("subdir2/h.txt"),
|
||||
separator!("subdir2/j.txt")
|
||||
]
|
||||
);
|
||||
assert!(project.worktree_for_id(worktree2_id, cx).is_none());
|
||||
@@ -2316,14 +2322,14 @@ async fn test_propagate_saves_and_fs_changes(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/a",
|
||||
path!("/a"),
|
||||
json!({
|
||||
"file1.rs": "",
|
||||
"file2": ""
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
|
||||
|
||||
let worktree_a = project_a.read_with(cx_a, |p, cx| p.worktrees(cx).next().unwrap());
|
||||
let project_id = active_call_a
|
||||
@@ -2409,18 +2415,25 @@ async fn test_propagate_saves_and_fs_changes(
|
||||
client_a
|
||||
.fs()
|
||||
.rename(
|
||||
"/a/file1.rs".as_ref(),
|
||||
"/a/file1.js".as_ref(),
|
||||
path!("/a/file1.rs").as_ref(),
|
||||
path!("/a/file1.js").as_ref(),
|
||||
Default::default(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
client_a
|
||||
.fs()
|
||||
.rename("/a/file2".as_ref(), "/a/file3".as_ref(), Default::default())
|
||||
.rename(
|
||||
path!("/a/file2").as_ref(),
|
||||
path!("/a/file3").as_ref(),
|
||||
Default::default(),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
client_a.fs().insert_file("/a/file4", "4".into()).await;
|
||||
client_a
|
||||
.fs()
|
||||
.insert_file(path!("/a/file4"), "4".into())
|
||||
.await;
|
||||
executor.run_until_parked();
|
||||
|
||||
worktree_a.read_with(cx_a, |tree, _| {
|
||||
@@ -2892,15 +2905,17 @@ async fn test_git_branch_name(
|
||||
#[track_caller]
|
||||
fn assert_branch(branch_name: Option<impl Into<String>>, project: &Project, cx: &App) {
|
||||
let branch_name = branch_name.map(Into::into);
|
||||
let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
|
||||
assert_eq!(worktrees.len(), 1);
|
||||
let worktree = worktrees[0].clone();
|
||||
let snapshot = worktree.read(cx).snapshot();
|
||||
let repo = snapshot.repositories().first().unwrap();
|
||||
let repositories = project.repositories(cx).values().collect::<Vec<_>>();
|
||||
assert_eq!(repositories.len(), 1);
|
||||
let repository = repositories[0].clone();
|
||||
assert_eq!(
|
||||
repo.branch().map(|branch| branch.name.to_string()),
|
||||
repository
|
||||
.read(cx)
|
||||
.repository_entry
|
||||
.branch()
|
||||
.map(|branch| branch.name.to_string()),
|
||||
branch_name
|
||||
);
|
||||
)
|
||||
}
|
||||
|
||||
// Smoke test branch reading
|
||||
@@ -2957,7 +2972,7 @@ async fn test_git_status_sync(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/dir",
|
||||
path!("/dir"),
|
||||
json!({
|
||||
".git": {},
|
||||
"a.txt": "a",
|
||||
@@ -2970,11 +2985,11 @@ async fn test_git_status_sync(
|
||||
// Initially, a.txt is uncommitted, but present in the index,
|
||||
// and b.txt is unmerged.
|
||||
client_a.fs().set_head_for_repo(
|
||||
"/dir/.git".as_ref(),
|
||||
path!("/dir/.git").as_ref(),
|
||||
&[("b.txt".into(), "B".into()), ("c.txt".into(), "c".into())],
|
||||
);
|
||||
client_a.fs().set_index_for_repo(
|
||||
"/dir/.git".as_ref(),
|
||||
path!("/dir/.git").as_ref(),
|
||||
&[
|
||||
("a.txt".into(), "".into()),
|
||||
("b.txt".into(), "B".into()),
|
||||
@@ -2982,7 +2997,7 @@ async fn test_git_status_sync(
|
||||
],
|
||||
);
|
||||
client_a.fs().set_unmerged_paths_for_repo(
|
||||
"/dir/.git".as_ref(),
|
||||
path!("/dir/.git").as_ref(),
|
||||
&[(
|
||||
"b.txt".into(),
|
||||
UnmergedStatus {
|
||||
@@ -3001,7 +3016,7 @@ async fn test_git_status_sync(
|
||||
second_head: UnmergedStatusCode::Deleted,
|
||||
});
|
||||
|
||||
let (project_local, _worktree_id) = client_a.build_local_project("/dir", cx_a).await;
|
||||
let (project_local, _worktree_id) = client_a.build_local_project(path!("/dir"), cx_a).await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| {
|
||||
call.share_project(project_local.clone(), cx)
|
||||
@@ -3022,11 +3037,20 @@ async fn test_git_status_sync(
|
||||
cx: &App,
|
||||
) {
|
||||
let file = file.as_ref();
|
||||
let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
|
||||
assert_eq!(worktrees.len(), 1);
|
||||
let worktree = worktrees[0].clone();
|
||||
let snapshot = worktree.read(cx).snapshot();
|
||||
assert_eq!(snapshot.status_for_file(file), status);
|
||||
let repos = project
|
||||
.repositories(cx)
|
||||
.values()
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(repos.len(), 1);
|
||||
let repo = repos.into_iter().next().unwrap();
|
||||
assert_eq!(
|
||||
repo.read(cx)
|
||||
.repository_entry
|
||||
.status_for_path(&file.into())
|
||||
.map(|entry| entry.status),
|
||||
status
|
||||
);
|
||||
}
|
||||
|
||||
project_local.read_with(cx_a, |project, cx| {
|
||||
@@ -3057,15 +3081,15 @@ async fn test_git_status_sync(
|
||||
// Delete b.txt from the index, mark conflict as resolved,
|
||||
// and modify c.txt in the working copy.
|
||||
client_a.fs().set_index_for_repo(
|
||||
"/dir/.git".as_ref(),
|
||||
path!("/dir/.git").as_ref(),
|
||||
&[("a.txt".into(), "a".into()), ("c.txt".into(), "c".into())],
|
||||
);
|
||||
client_a
|
||||
.fs()
|
||||
.set_unmerged_paths_for_repo("/dir/.git".as_ref(), &[]);
|
||||
.set_unmerged_paths_for_repo(path!("/dir/.git").as_ref(), &[]);
|
||||
client_a
|
||||
.fs()
|
||||
.atomic_write("/dir/c.txt".into(), "CC".into())
|
||||
.atomic_write(path!("/dir/c.txt").into(), "CC".into())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@@ -3094,6 +3118,27 @@ async fn test_git_status_sync(
|
||||
assert_status("b.txt", Some(B_STATUS_END), project, cx);
|
||||
assert_status("c.txt", Some(C_STATUS_END), project, cx);
|
||||
});
|
||||
|
||||
// Now remove the original git repository and check that collaborators are notified.
|
||||
client_a
|
||||
.fs()
|
||||
.remove_dir(path!("/dir/.git").as_ref(), RemoveOptions::default())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
executor.run_until_parked();
|
||||
project_remote.update(cx_b, |project, cx| {
|
||||
pretty_assertions::assert_eq!(
|
||||
project.git_store().read(cx).repo_snapshots(cx),
|
||||
HashMap::default()
|
||||
);
|
||||
});
|
||||
project_remote_c.update(cx_c, |project, cx| {
|
||||
pretty_assertions::assert_eq!(
|
||||
project.git_store().read(cx).repo_snapshots(cx),
|
||||
HashMap::default()
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
@@ -3113,14 +3158,14 @@ async fn test_fs_operations(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/dir",
|
||||
path!("/dir"),
|
||||
json!({
|
||||
"a.txt": "a-contents",
|
||||
"b.txt": "b-contents",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/dir"), cx_a).await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
@@ -3251,13 +3296,13 @@ async fn test_fs_operations(
|
||||
.map(|p| p.to_string_lossy())
|
||||
.collect::<Vec<_>>(),
|
||||
[
|
||||
"DIR",
|
||||
"DIR/SUBDIR",
|
||||
"DIR/SUBDIR/f.txt",
|
||||
"DIR/e.txt",
|
||||
"a.txt",
|
||||
"b.txt",
|
||||
"d.txt"
|
||||
separator!("DIR"),
|
||||
separator!("DIR/SUBDIR"),
|
||||
separator!("DIR/SUBDIR/f.txt"),
|
||||
separator!("DIR/e.txt"),
|
||||
separator!("a.txt"),
|
||||
separator!("b.txt"),
|
||||
separator!("d.txt")
|
||||
]
|
||||
);
|
||||
});
|
||||
@@ -3269,13 +3314,13 @@ async fn test_fs_operations(
|
||||
.map(|p| p.to_string_lossy())
|
||||
.collect::<Vec<_>>(),
|
||||
[
|
||||
"DIR",
|
||||
"DIR/SUBDIR",
|
||||
"DIR/SUBDIR/f.txt",
|
||||
"DIR/e.txt",
|
||||
"a.txt",
|
||||
"b.txt",
|
||||
"d.txt"
|
||||
separator!("DIR"),
|
||||
separator!("DIR/SUBDIR"),
|
||||
separator!("DIR/SUBDIR/f.txt"),
|
||||
separator!("DIR/e.txt"),
|
||||
separator!("a.txt"),
|
||||
separator!("b.txt"),
|
||||
separator!("d.txt")
|
||||
]
|
||||
);
|
||||
});
|
||||
@@ -3295,14 +3340,14 @@ async fn test_fs_operations(
|
||||
.map(|p| p.to_string_lossy())
|
||||
.collect::<Vec<_>>(),
|
||||
[
|
||||
"DIR",
|
||||
"DIR/SUBDIR",
|
||||
"DIR/SUBDIR/f.txt",
|
||||
"DIR/e.txt",
|
||||
"a.txt",
|
||||
"b.txt",
|
||||
"d.txt",
|
||||
"f.txt"
|
||||
separator!("DIR"),
|
||||
separator!("DIR/SUBDIR"),
|
||||
separator!("DIR/SUBDIR/f.txt"),
|
||||
separator!("DIR/e.txt"),
|
||||
separator!("a.txt"),
|
||||
separator!("b.txt"),
|
||||
separator!("d.txt"),
|
||||
separator!("f.txt")
|
||||
]
|
||||
);
|
||||
});
|
||||
@@ -3314,14 +3359,14 @@ async fn test_fs_operations(
|
||||
.map(|p| p.to_string_lossy())
|
||||
.collect::<Vec<_>>(),
|
||||
[
|
||||
"DIR",
|
||||
"DIR/SUBDIR",
|
||||
"DIR/SUBDIR/f.txt",
|
||||
"DIR/e.txt",
|
||||
"a.txt",
|
||||
"b.txt",
|
||||
"d.txt",
|
||||
"f.txt"
|
||||
separator!("DIR"),
|
||||
separator!("DIR/SUBDIR"),
|
||||
separator!("DIR/SUBDIR/f.txt"),
|
||||
separator!("DIR/e.txt"),
|
||||
separator!("a.txt"),
|
||||
separator!("b.txt"),
|
||||
separator!("d.txt"),
|
||||
separator!("f.txt")
|
||||
]
|
||||
);
|
||||
});
|
||||
@@ -3538,13 +3583,13 @@ async fn test_buffer_conflict_after_save(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/dir",
|
||||
path!("/dir"),
|
||||
json!({
|
||||
"a.txt": "a-contents",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/dir"), cx_a).await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
@@ -3602,13 +3647,13 @@ async fn test_buffer_reloading(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/dir",
|
||||
path!("/dir"),
|
||||
json!({
|
||||
"a.txt": "a\nb\nc",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/dir"), cx_a).await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
@@ -3630,7 +3675,11 @@ async fn test_buffer_reloading(
|
||||
let new_contents = Rope::from("d\ne\nf");
|
||||
client_a
|
||||
.fs()
|
||||
.save("/dir/a.txt".as_ref(), &new_contents, LineEnding::Windows)
|
||||
.save(
|
||||
path!("/dir/a.txt").as_ref(),
|
||||
&new_contents,
|
||||
LineEnding::Windows,
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
@@ -3660,9 +3709,9 @@ async fn test_editing_while_guest_opens_buffer(
|
||||
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree("/dir", json!({ "a.txt": "a-contents" }))
|
||||
.insert_tree(path!("/dir"), json!({ "a.txt": "a-contents" }))
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/dir"), cx_a).await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
@@ -3975,19 +4024,19 @@ async fn test_collaborating_with_diagnostics(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/a",
|
||||
path!("/a"),
|
||||
json!({
|
||||
"a.rs": "let one = two",
|
||||
"other.rs": "",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
|
||||
|
||||
// Cause the language server to start.
|
||||
let _buffer = project_a
|
||||
.update(cx_a, |project, cx| {
|
||||
project.open_local_buffer_with_lsp("/a/other.rs", cx)
|
||||
project.open_local_buffer_with_lsp(path!("/a/other.rs"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -3999,7 +4048,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
.await;
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Url::from_file_path("/a/a.rs").unwrap(),
|
||||
uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(),
|
||||
version: None,
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
severity: Some(lsp::DiagnosticSeverity::WARNING),
|
||||
@@ -4019,7 +4068,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
.unwrap();
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Url::from_file_path("/a/a.rs").unwrap(),
|
||||
uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(),
|
||||
version: None,
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
severity: Some(lsp::DiagnosticSeverity::ERROR),
|
||||
@@ -4093,7 +4142,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
// Simulate a language server reporting more errors for a file.
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Url::from_file_path("/a/a.rs").unwrap(),
|
||||
uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(),
|
||||
version: None,
|
||||
diagnostics: vec![
|
||||
lsp::Diagnostic {
|
||||
@@ -4187,7 +4236,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
// Simulate a language server reporting no errors for a file.
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Url::from_file_path("/a/a.rs").unwrap(),
|
||||
uri: lsp::Url::from_file_path(path!("/a/a.rs")).unwrap(),
|
||||
version: None,
|
||||
diagnostics: vec![],
|
||||
},
|
||||
@@ -4243,7 +4292,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/test",
|
||||
path!("/test"),
|
||||
json!({
|
||||
"one.rs": "const ONE: usize = 1;",
|
||||
"two.rs": "const TWO: usize = 2;",
|
||||
@@ -4254,7 +4303,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
|
||||
)
|
||||
.await;
|
||||
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/test", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/test"), cx_a).await;
|
||||
|
||||
// Share a project as client A
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
@@ -4293,7 +4342,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
|
||||
for file_name in file_names {
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Url::from_file_path(Path::new("/test").join(file_name)).unwrap(),
|
||||
uri: lsp::Url::from_file_path(Path::new(path!("/test")).join(file_name)).unwrap(),
|
||||
version: None,
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
severity: Some(lsp::DiagnosticSeverity::WARNING),
|
||||
@@ -4360,9 +4409,9 @@ async fn test_reloading_buffer_manually(
|
||||
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree("/a", json!({ "a.rs": "let one = 1;" }))
|
||||
.insert_tree(path!("/a"), json!({ "a.rs": "let one = 1;" }))
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/a"), cx_a).await;
|
||||
let buffer_a = project_a
|
||||
.update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
|
||||
.await
|
||||
@@ -4390,7 +4439,7 @@ async fn test_reloading_buffer_manually(
|
||||
client_a
|
||||
.fs()
|
||||
.save(
|
||||
"/a/a.rs".as_ref(),
|
||||
path!("/a/a.rs").as_ref(),
|
||||
&Rope::from("let seven = 7;"),
|
||||
LineEnding::Unix,
|
||||
)
|
||||
@@ -4512,39 +4561,45 @@ async fn test_formatting_buffer(
|
||||
"let honey = \"two\"\n"
|
||||
);
|
||||
|
||||
// Ensure buffer can be formatted using an external command. Notice how the
|
||||
// host's configuration is honored as opposed to using the guest's settings.
|
||||
cx_a.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
|
||||
file.defaults.formatter = Some(SelectedFormatter::List(FormatterList(
|
||||
vec![Formatter::External {
|
||||
command: "awk".into(),
|
||||
arguments: Some(vec!["{sub(/two/,\"{buffer_path}\")}1".to_string()].into()),
|
||||
}]
|
||||
.into(),
|
||||
)));
|
||||
// There is no `awk` command on Windows.
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
{
|
||||
// Ensure buffer can be formatted using an external command. Notice how the
|
||||
// host's configuration is honored as opposed to using the guest's settings.
|
||||
cx_a.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings::<AllLanguageSettings>(cx, |file| {
|
||||
file.defaults.formatter = Some(SelectedFormatter::List(FormatterList(
|
||||
vec![Formatter::External {
|
||||
command: "awk".into(),
|
||||
arguments: Some(
|
||||
vec!["{sub(/two/,\"{buffer_path}\")}1".to_string()].into(),
|
||||
),
|
||||
}]
|
||||
.into(),
|
||||
)));
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
executor.allow_parking();
|
||||
project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project.format(
|
||||
HashSet::from_iter([buffer_b.clone()]),
|
||||
LspFormatTarget::Buffers,
|
||||
true,
|
||||
FormatTrigger::Save,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
buffer_b.read_with(cx_b, |buffer, _| buffer.text()),
|
||||
format!("let honey = \"{}/a.rs\"\n", directory.to_str().unwrap())
|
||||
);
|
||||
executor.allow_parking();
|
||||
project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project.format(
|
||||
HashSet::from_iter([buffer_b.clone()]),
|
||||
LspFormatTarget::Buffers,
|
||||
true,
|
||||
FormatTrigger::Save,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
buffer_b.read_with(cx_b, |buffer, _| buffer.text()),
|
||||
format!("let honey = \"{}/a.rs\"\n", directory.to_str().unwrap())
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
@@ -4702,7 +4757,7 @@ async fn test_definition(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/root",
|
||||
path!("/root"),
|
||||
json!({
|
||||
"dir-1": {
|
||||
"a.rs": "const ONE: usize = b::TWO + b::THREE;",
|
||||
@@ -4714,7 +4769,9 @@ async fn test_definition(
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/root/dir-1", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a
|
||||
.build_local_project(path!("/root/dir-1"), cx_a)
|
||||
.await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
@@ -4735,7 +4792,7 @@ async fn test_definition(
|
||||
|_, _| async move {
|
||||
Ok(Some(lsp::GotoDefinitionResponse::Scalar(
|
||||
lsp::Location::new(
|
||||
lsp::Url::from_file_path("/root/dir-2/b.rs").unwrap(),
|
||||
lsp::Url::from_file_path(path!("/root/dir-2/b.rs")).unwrap(),
|
||||
lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
|
||||
),
|
||||
)))
|
||||
@@ -4766,7 +4823,7 @@ async fn test_definition(
|
||||
|_, _| async move {
|
||||
Ok(Some(lsp::GotoDefinitionResponse::Scalar(
|
||||
lsp::Location::new(
|
||||
lsp::Url::from_file_path("/root/dir-2/b.rs").unwrap(),
|
||||
lsp::Url::from_file_path(path!("/root/dir-2/b.rs")).unwrap(),
|
||||
lsp::Range::new(lsp::Position::new(1, 6), lsp::Position::new(1, 11)),
|
||||
),
|
||||
)))
|
||||
@@ -4803,7 +4860,7 @@ async fn test_definition(
|
||||
);
|
||||
Ok(Some(lsp::GotoDefinitionResponse::Scalar(
|
||||
lsp::Location::new(
|
||||
lsp::Url::from_file_path("/root/dir-2/c.rs").unwrap(),
|
||||
lsp::Url::from_file_path(path!("/root/dir-2/c.rs")).unwrap(),
|
||||
lsp::Range::new(lsp::Position::new(0, 5), lsp::Position::new(0, 7)),
|
||||
),
|
||||
)))
|
||||
@@ -4855,7 +4912,7 @@ async fn test_references(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/root",
|
||||
path!("/root"),
|
||||
json!({
|
||||
"dir-1": {
|
||||
"one.rs": "const ONE: usize = 1;",
|
||||
@@ -4867,7 +4924,9 @@ async fn test_references(
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/root/dir-1", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a
|
||||
.build_local_project(path!("/root/dir-1"), cx_a)
|
||||
.await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
@@ -4890,7 +4949,7 @@ async fn test_references(
|
||||
move |params, _| {
|
||||
assert_eq!(
|
||||
params.text_document_position.text_document.uri.as_str(),
|
||||
"file:///root/dir-1/one.rs"
|
||||
uri!("file:///root/dir-1/one.rs")
|
||||
);
|
||||
let rx = rx.clone();
|
||||
async move {
|
||||
@@ -4919,15 +4978,15 @@ async fn test_references(
|
||||
lsp_response_tx
|
||||
.unbounded_send(Ok(Some(vec![
|
||||
lsp::Location {
|
||||
uri: lsp::Url::from_file_path("/root/dir-1/two.rs").unwrap(),
|
||||
uri: lsp::Url::from_file_path(path!("/root/dir-1/two.rs")).unwrap(),
|
||||
range: lsp::Range::new(lsp::Position::new(0, 24), lsp::Position::new(0, 27)),
|
||||
},
|
||||
lsp::Location {
|
||||
uri: lsp::Url::from_file_path("/root/dir-1/two.rs").unwrap(),
|
||||
uri: lsp::Url::from_file_path(path!("/root/dir-1/two.rs")).unwrap(),
|
||||
range: lsp::Range::new(lsp::Position::new(0, 35), lsp::Position::new(0, 38)),
|
||||
},
|
||||
lsp::Location {
|
||||
uri: lsp::Url::from_file_path("/root/dir-2/three.rs").unwrap(),
|
||||
uri: lsp::Url::from_file_path(path!("/root/dir-2/three.rs")).unwrap(),
|
||||
range: lsp::Range::new(lsp::Position::new(0, 37), lsp::Position::new(0, 40)),
|
||||
},
|
||||
])))
|
||||
@@ -4952,7 +5011,7 @@ async fn test_references(
|
||||
assert_eq!(references[1].buffer, references[0].buffer);
|
||||
assert_eq!(
|
||||
three_buffer.file().unwrap().full_path(cx),
|
||||
Path::new("/root/dir-2/three.rs")
|
||||
Path::new(path!("/root/dir-2/three.rs"))
|
||||
);
|
||||
|
||||
assert_eq!(references[0].range.to_offset(two_buffer), 24..27);
|
||||
@@ -5106,7 +5165,7 @@ async fn test_document_highlights(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/root-1",
|
||||
path!("/root-1"),
|
||||
json!({
|
||||
"main.rs": "fn double(number: i32) -> i32 { number + number }",
|
||||
}),
|
||||
@@ -5118,7 +5177,7 @@ async fn test_document_highlights(
|
||||
.register_fake_lsp("Rust", Default::default());
|
||||
client_a.language_registry().add(rust_lang());
|
||||
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/root-1", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/root-1"), cx_a).await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
@@ -5143,7 +5202,7 @@ async fn test_document_highlights(
|
||||
.text_document
|
||||
.uri
|
||||
.as_str(),
|
||||
"file:///root-1/main.rs"
|
||||
uri!("file:///root-1/main.rs")
|
||||
);
|
||||
assert_eq!(
|
||||
params.text_document_position_params.position,
|
||||
@@ -5206,7 +5265,7 @@ async fn test_lsp_hover(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/root-1",
|
||||
path!("/root-1"),
|
||||
json!({
|
||||
"main.rs": "use std::collections::HashMap;",
|
||||
}),
|
||||
@@ -5240,7 +5299,7 @@ async fn test_lsp_hover(
|
||||
),
|
||||
];
|
||||
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/root-1", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/root-1"), cx_a).await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
@@ -5280,7 +5339,7 @@ async fn test_lsp_hover(
|
||||
.text_document
|
||||
.uri
|
||||
.as_str(),
|
||||
"file:///root-1/main.rs"
|
||||
uri!("file:///root-1/main.rs")
|
||||
);
|
||||
let name = new_server_name.clone();
|
||||
async move {
|
||||
@@ -5306,7 +5365,7 @@ async fn test_lsp_hover(
|
||||
.text_document
|
||||
.uri
|
||||
.as_str(),
|
||||
"file:///root-1/main.rs"
|
||||
uri!("file:///root-1/main.rs")
|
||||
);
|
||||
assert_eq!(
|
||||
params.text_document_position_params.position,
|
||||
@@ -5413,7 +5472,7 @@ async fn test_project_symbols(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/code",
|
||||
path!("/code"),
|
||||
json!({
|
||||
"crate-1": {
|
||||
"one.rs": "const ONE: usize = 1;",
|
||||
@@ -5427,7 +5486,9 @@ async fn test_project_symbols(
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/code/crate-1", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a
|
||||
.build_local_project(path!("/code/crate-1"), cx_a)
|
||||
.await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
@@ -5450,7 +5511,7 @@ async fn test_project_symbols(
|
||||
lsp::SymbolInformation {
|
||||
name: "TWO".into(),
|
||||
location: lsp::Location {
|
||||
uri: lsp::Url::from_file_path("/code/crate-2/two.rs").unwrap(),
|
||||
uri: lsp::Url::from_file_path(path!("/code/crate-2/two.rs")).unwrap(),
|
||||
range: lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
|
||||
},
|
||||
kind: lsp::SymbolKind::CONSTANT,
|
||||
@@ -5481,13 +5542,13 @@ async fn test_project_symbols(
|
||||
buffer_b_2.read_with(cx_b, |buffer, cx| {
|
||||
assert_eq!(
|
||||
buffer.file().unwrap().full_path(cx),
|
||||
Path::new("/code/crate-2/two.rs")
|
||||
Path::new(path!("/code/crate-2/two.rs"))
|
||||
);
|
||||
});
|
||||
|
||||
// Attempt to craft a symbol and violate host's privacy by opening an arbitrary file.
|
||||
let mut fake_symbol = symbols[0].clone();
|
||||
fake_symbol.path.path = Path::new("/code/secrets").into();
|
||||
fake_symbol.path.path = Path::new(path!("/code/secrets")).into();
|
||||
let error = project_b
|
||||
.update(cx_b, |project, cx| {
|
||||
project.open_buffer_for_symbol(&fake_symbol, cx)
|
||||
@@ -5520,14 +5581,14 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it(
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/root",
|
||||
path!("/root"),
|
||||
json!({
|
||||
"a.rs": "const ONE: usize = b::TWO;",
|
||||
"b.rs": "const TWO: usize = 2",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/root", cx_a).await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project(path!("/root"), cx_a).await;
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
@@ -5546,7 +5607,7 @@ async fn test_open_buffer_while_getting_definition_pointing_to_it(
|
||||
|_, _| async move {
|
||||
Ok(Some(lsp::GotoDefinitionResponse::Scalar(
|
||||
lsp::Location::new(
|
||||
lsp::Url::from_file_path("/root/b.rs").unwrap(),
|
||||
lsp::Url::from_file_path(path!("/root/b.rs")).unwrap(),
|
||||
lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
|
||||
),
|
||||
)))
|
||||
|
||||
@@ -27,7 +27,7 @@ use std::{
|
||||
rc::Rc,
|
||||
sync::Arc,
|
||||
};
|
||||
use util::ResultExt;
|
||||
use util::{path, ResultExt};
|
||||
|
||||
#[gpui::test(
|
||||
iterations = 100,
|
||||
@@ -280,7 +280,7 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
let mut paths = client.fs().paths(false);
|
||||
paths.remove(0);
|
||||
let new_root_path = if paths.is_empty() || rng.gen() {
|
||||
Path::new("/").join(plan.next_root_dir_name())
|
||||
Path::new(path!("/")).join(plan.next_root_dir_name())
|
||||
} else {
|
||||
paths.choose(rng).unwrap().clone()
|
||||
};
|
||||
@@ -547,7 +547,7 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
first_root_name
|
||||
);
|
||||
|
||||
let root_path = Path::new("/").join(&first_root_name);
|
||||
let root_path = Path::new(path!("/")).join(&first_root_name);
|
||||
client.fs().create_dir(&root_path).await.unwrap();
|
||||
client
|
||||
.fs()
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
use crate::tests::TestServer;
|
||||
use call::ActiveCall;
|
||||
use collections::HashSet;
|
||||
use collections::{HashMap, HashSet};
|
||||
use dap::DapRegistry;
|
||||
use extension::ExtensionHostProxy;
|
||||
use fs::{FakeFs, Fs as _};
|
||||
use fs::{FakeFs, Fs as _, RemoveOptions};
|
||||
use futures::StreamExt as _;
|
||||
use gpui::{
|
||||
AppContext as _, BackgroundExecutor, SemanticVersion, TestAppContext, UpdateGlobal as _,
|
||||
@@ -26,6 +27,7 @@ use remote_server::{HeadlessAppState, HeadlessProject};
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use std::{path::Path, sync::Arc};
|
||||
use util::{path, separator};
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_sharing_an_ssh_remote_project(
|
||||
@@ -52,7 +54,7 @@ async fn test_sharing_an_ssh_remote_project(
|
||||
let remote_fs = FakeFs::new(server_cx.executor());
|
||||
remote_fs
|
||||
.insert_tree(
|
||||
"/code",
|
||||
path!("/code"),
|
||||
json!({
|
||||
"project1": {
|
||||
".zed": {
|
||||
@@ -84,6 +86,7 @@ async fn test_sharing_an_ssh_remote_project(
|
||||
http_client: remote_http_client,
|
||||
node_runtime: node,
|
||||
languages,
|
||||
debug_adapters: Arc::new(DapRegistry::fake()),
|
||||
extension_host_proxy: Arc::new(ExtensionHostProxy::new()),
|
||||
},
|
||||
cx,
|
||||
@@ -92,7 +95,7 @@ async fn test_sharing_an_ssh_remote_project(
|
||||
|
||||
let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
|
||||
let (project_a, worktree_id) = client_a
|
||||
.build_ssh_project("/code/project1", client_ssh, cx_a)
|
||||
.build_ssh_project(path!("/code/project1"), client_ssh, cx_a)
|
||||
.await;
|
||||
|
||||
// While the SSH worktree is being scanned, user A shares the remote project.
|
||||
@@ -178,7 +181,7 @@ async fn test_sharing_an_ssh_remote_project(
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
remote_fs
|
||||
.load("/code/project1/src/renamed.rs".as_ref())
|
||||
.load(path!("/code/project1/src/renamed.rs").as_ref())
|
||||
.await
|
||||
.unwrap(),
|
||||
"fn one() -> usize { 100 }"
|
||||
@@ -193,7 +196,7 @@ async fn test_sharing_an_ssh_remote_project(
|
||||
.path()
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
"src/renamed.rs".to_string()
|
||||
separator!("src/renamed.rs").to_string()
|
||||
);
|
||||
});
|
||||
}
|
||||
@@ -251,6 +254,7 @@ async fn test_ssh_collaboration_git_branches(
|
||||
http_client: remote_http_client,
|
||||
node_runtime: node,
|
||||
languages,
|
||||
debug_adapters: Arc::new(DapRegistry::fake()),
|
||||
extension_host_proxy: Arc::new(ExtensionHostProxy::new()),
|
||||
},
|
||||
cx,
|
||||
@@ -356,6 +360,26 @@ async fn test_ssh_collaboration_git_branches(
|
||||
});
|
||||
|
||||
assert_eq!(server_branch.name, "totally-new-branch");
|
||||
|
||||
// Remove the git repository and check that all participants get the update.
|
||||
remote_fs
|
||||
.remove_dir("/project/.git".as_ref(), RemoveOptions::default())
|
||||
.await
|
||||
.unwrap();
|
||||
executor.run_until_parked();
|
||||
|
||||
project_a.update(cx_a, |project, cx| {
|
||||
pretty_assertions::assert_eq!(
|
||||
project.git_store().read(cx).repo_snapshots(cx),
|
||||
HashMap::default()
|
||||
);
|
||||
});
|
||||
project_b.update(cx_b, |project, cx| {
|
||||
pretty_assertions::assert_eq!(
|
||||
project.git_store().read(cx).repo_snapshots(cx),
|
||||
HashMap::default()
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -388,7 +412,10 @@ async fn test_ssh_collaboration_formatting_with_prettier(
|
||||
let buffer_text = "let one = \"two\"";
|
||||
let prettier_format_suffix = project::TEST_PRETTIER_FORMAT_SUFFIX;
|
||||
remote_fs
|
||||
.insert_tree("/project", serde_json::json!({ "a.ts": buffer_text }))
|
||||
.insert_tree(
|
||||
path!("/project"),
|
||||
serde_json::json!({ "a.ts": buffer_text }),
|
||||
)
|
||||
.await;
|
||||
|
||||
let test_plugin = "test_plugin";
|
||||
@@ -427,6 +454,7 @@ async fn test_ssh_collaboration_formatting_with_prettier(
|
||||
http_client: remote_http_client,
|
||||
node_runtime: NodeRuntime::unavailable(),
|
||||
languages,
|
||||
debug_adapters: Arc::new(DapRegistry::fake()),
|
||||
extension_host_proxy: Arc::new(ExtensionHostProxy::new()),
|
||||
},
|
||||
cx,
|
||||
@@ -435,7 +463,7 @@ async fn test_ssh_collaboration_formatting_with_prettier(
|
||||
|
||||
let client_ssh = SshRemoteClient::fake_client(opts, cx_a).await;
|
||||
let (project_a, worktree_id) = client_a
|
||||
.build_ssh_project("/project", client_ssh, cx_a)
|
||||
.build_ssh_project(path!("/project"), client_ssh, cx_a)
|
||||
.await;
|
||||
|
||||
// While the SSH worktree is being scanned, user A shares the remote project.
|
||||
|
||||
@@ -14,6 +14,7 @@ use client::{
|
||||
use clock::FakeSystemClock;
|
||||
use collab_ui::channel_view::ChannelView;
|
||||
use collections::{HashMap, HashSet};
|
||||
use dap::DapRegistry;
|
||||
use fs::FakeFs;
|
||||
use futures::{channel::oneshot, StreamExt as _};
|
||||
use git::GitHostingProviderRegistry;
|
||||
@@ -43,6 +44,7 @@ use std::{
|
||||
Arc,
|
||||
},
|
||||
};
|
||||
use util::path;
|
||||
use workspace::{Workspace, WorkspaceStore};
|
||||
|
||||
#[cfg(not(target_os = "macos"))]
|
||||
@@ -276,12 +278,14 @@ impl TestServer {
|
||||
let user_store = cx.new(|cx| UserStore::new(client.clone(), cx));
|
||||
let workspace_store = cx.new(|cx| WorkspaceStore::new(client.clone(), cx));
|
||||
let language_registry = Arc::new(LanguageRegistry::test(cx.executor()));
|
||||
let debug_adapters = Arc::new(DapRegistry::default());
|
||||
let session = cx.new(|cx| AppSession::new(Session::test(), cx));
|
||||
let app_state = Arc::new(workspace::AppState {
|
||||
client: client.clone(),
|
||||
user_store: user_store.clone(),
|
||||
workspace_store,
|
||||
languages: language_registry,
|
||||
debug_adapters,
|
||||
fs: fs.clone(),
|
||||
build_window_options: |_, _| Default::default(),
|
||||
node_runtime: NodeRuntime::unavailable(),
|
||||
@@ -741,7 +745,7 @@ impl TestClient {
|
||||
pub async fn build_test_project(&self, cx: &mut TestAppContext) -> Entity<Project> {
|
||||
self.fs()
|
||||
.insert_tree(
|
||||
"/a",
|
||||
path!("/a"),
|
||||
json!({
|
||||
"1.txt": "one\none\none",
|
||||
"2.js": "function two() { return 2; }",
|
||||
@@ -749,7 +753,7 @@ impl TestClient {
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
self.build_local_project("/a", cx).await.0
|
||||
self.build_local_project(path!("/a"), cx).await.0
|
||||
}
|
||||
|
||||
pub async fn host_workspace(
|
||||
@@ -794,6 +798,7 @@ impl TestClient {
|
||||
self.app_state.node_runtime.clone(),
|
||||
self.app_state.user_store.clone(),
|
||||
self.app_state.languages.clone(),
|
||||
self.app_state.debug_adapters.clone(),
|
||||
self.app_state.fs.clone(),
|
||||
None,
|
||||
cx,
|
||||
|
||||
@@ -1156,20 +1156,7 @@ impl Panel for ChatPanel {
|
||||
}
|
||||
|
||||
fn icon(&self, _window: &Window, cx: &App) -> Option<ui::IconName> {
|
||||
let show_icon = match ChatPanelSettings::get_global(cx).button {
|
||||
ChatPanelButton::Never => false,
|
||||
ChatPanelButton::Always => true,
|
||||
ChatPanelButton::WhenInCall => {
|
||||
let is_in_call = ActiveCall::global(cx)
|
||||
.read(cx)
|
||||
.room()
|
||||
.map_or(false, |room| room.read(cx).contains_guests());
|
||||
|
||||
self.active || is_in_call
|
||||
}
|
||||
};
|
||||
|
||||
show_icon.then(|| ui::IconName::MessageBubbles)
|
||||
self.enabled(cx).then(|| ui::IconName::MessageBubbles)
|
||||
}
|
||||
|
||||
fn icon_tooltip(&self, _: &Window, _: &App) -> Option<&'static str> {
|
||||
@@ -1190,6 +1177,21 @@ impl Panel for ChatPanel {
|
||||
fn activation_priority(&self) -> u32 {
|
||||
7
|
||||
}
|
||||
|
||||
fn enabled(&self, cx: &App) -> bool {
|
||||
match ChatPanelSettings::get_global(cx).button {
|
||||
ChatPanelButton::Never => false,
|
||||
ChatPanelButton::Always => true,
|
||||
ChatPanelButton::WhenInCall => {
|
||||
let is_in_call = ActiveCall::global(cx)
|
||||
.read(cx)
|
||||
.room()
|
||||
.map_or(false, |room| room.read(cx).contains_guests());
|
||||
|
||||
self.active || is_in_call
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<PanelEvent> for ChatPanel {}
|
||||
|
||||
@@ -247,12 +247,15 @@ impl ContextServerManager {
|
||||
let mut desired_servers = HashMap::default();
|
||||
|
||||
let (registry, project) = this.update(cx, |this, cx| {
|
||||
let location = this.project.read(cx).worktrees(cx).next().map(|worktree| {
|
||||
settings::SettingsLocation {
|
||||
let location = this
|
||||
.project
|
||||
.read(cx)
|
||||
.visible_worktrees(cx)
|
||||
.next()
|
||||
.map(|worktree| settings::SettingsLocation {
|
||||
worktree_id: worktree.read(cx).id(),
|
||||
path: Path::new(""),
|
||||
}
|
||||
});
|
||||
});
|
||||
let settings = ContextServerSettings::get(location, cx);
|
||||
desired_servers = settings.context_servers.clone();
|
||||
|
||||
|
||||
@@ -8,6 +8,10 @@ license = "GPL-3.0-or-later"
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/dap.rs"
|
||||
doctest = false
|
||||
|
||||
[features]
|
||||
test-support = [
|
||||
"gpui/test-support",
|
||||
@@ -35,6 +39,7 @@ log.workspace = true
|
||||
node_runtime.workspace = true
|
||||
parking_lot.workspace = true
|
||||
paths.workspace = true
|
||||
regex.workspace = true
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
|
||||
@@ -13,15 +13,16 @@ use serde_json::Value;
|
||||
use settings::WorktreeId;
|
||||
use smol::{self, fs::File, lock::Mutex};
|
||||
use std::{
|
||||
borrow::Borrow,
|
||||
collections::{HashMap, HashSet},
|
||||
ffi::{OsStr, OsString},
|
||||
fmt::Debug,
|
||||
net::Ipv4Addr,
|
||||
ops::Deref,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
path::PathBuf,
|
||||
sync::{Arc, LazyLock},
|
||||
};
|
||||
use task::DebugAdapterConfig;
|
||||
use task::{DebugAdapterConfig, DebugTaskDefinition};
|
||||
use util::ResultExt;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
@@ -46,7 +47,7 @@ pub trait DapDelegate {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Deserialize, Serialize)]
|
||||
pub struct DebugAdapterName(pub Arc<str>);
|
||||
pub struct DebugAdapterName(pub SharedString);
|
||||
|
||||
impl Deref for DebugAdapterName {
|
||||
type Target = str;
|
||||
@@ -62,9 +63,9 @@ impl AsRef<str> for DebugAdapterName {
|
||||
}
|
||||
}
|
||||
|
||||
impl AsRef<Path> for DebugAdapterName {
|
||||
fn as_ref(&self) -> &Path {
|
||||
Path::new(&*self.0)
|
||||
impl Borrow<str> for DebugAdapterName {
|
||||
fn borrow(&self) -> &str {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
@@ -76,7 +77,7 @@ impl std::fmt::Display for DebugAdapterName {
|
||||
|
||||
impl From<DebugAdapterName> for SharedString {
|
||||
fn from(name: DebugAdapterName) -> Self {
|
||||
SharedString::from(name.0)
|
||||
name.0
|
||||
}
|
||||
}
|
||||
|
||||
@@ -123,7 +124,7 @@ pub async fn download_adapter_from_github(
|
||||
file_type: DownloadedFileType,
|
||||
delegate: &dyn DapDelegate,
|
||||
) -> Result<PathBuf> {
|
||||
let adapter_path = paths::debug_adapters_dir().join(&adapter_name);
|
||||
let adapter_path = paths::debug_adapters_dir().join(&adapter_name.as_ref());
|
||||
let version_path = adapter_path.join(format!("{}_{}", adapter_name, github_version.tag_name));
|
||||
let fs = delegate.fs();
|
||||
|
||||
@@ -288,15 +289,21 @@ pub trait DebugAdapter: 'static + Send + Sync {
|
||||
) -> Result<DebugAdapterBinary>;
|
||||
|
||||
/// Should return base configuration to make the debug adapter work
|
||||
fn request_args(&self, config: &DebugAdapterConfig) -> Value;
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> Value;
|
||||
|
||||
fn attach_processes_filter(&self) -> regex::Regex {
|
||||
EMPTY_REGEX.clone()
|
||||
}
|
||||
}
|
||||
|
||||
static EMPTY_REGEX: LazyLock<regex::Regex> =
|
||||
LazyLock::new(|| regex::Regex::new("").expect("Regex compilation to succeed"));
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub struct FakeAdapter {}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
impl FakeAdapter {
|
||||
const ADAPTER_NAME: &'static str = "fake-adapter";
|
||||
pub const ADAPTER_NAME: &'static str = "fake-adapter";
|
||||
|
||||
pub fn new() -> Self {
|
||||
Self {}
|
||||
@@ -351,13 +358,13 @@ impl DebugAdapter for FakeAdapter {
|
||||
unimplemented!("get installed binary");
|
||||
}
|
||||
|
||||
fn request_args(&self, config: &DebugAdapterConfig) -> Value {
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
|
||||
use serde_json::json;
|
||||
use task::DebugRequestType;
|
||||
|
||||
json!({
|
||||
"request": match config.request {
|
||||
DebugRequestType::Launch => "launch",
|
||||
DebugRequestType::Launch(_) => "launch",
|
||||
DebugRequestType::Attach(_) => "attach",
|
||||
},
|
||||
"process_id": if let DebugRequestType::Attach(attach_config) = &config.request {
|
||||
@@ -367,4 +374,10 @@ impl DebugAdapter for FakeAdapter {
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
fn attach_processes_filter(&self) -> regex::Regex {
|
||||
static REGEX: LazyLock<regex::Regex> =
|
||||
LazyLock::new(|| regex::Regex::new("^fake-binary").unwrap());
|
||||
REGEX.clone()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -71,7 +71,6 @@ impl DebugAdapterClient {
|
||||
let client_id = this.id;
|
||||
|
||||
// start handling events/reverse requests
|
||||
|
||||
cx.background_spawn(Self::handle_receive_messages(
|
||||
client_id,
|
||||
server_rx,
|
||||
@@ -119,7 +118,6 @@ impl DebugAdapterClient {
|
||||
Ok(message) => message,
|
||||
Err(e) => break Err(e.into()),
|
||||
};
|
||||
|
||||
match message {
|
||||
Message::Event(ev) => {
|
||||
log::debug!("Client {} received event `{}`", client_id.0, &ev);
|
||||
@@ -164,7 +162,6 @@ impl DebugAdapterClient {
|
||||
command: R::COMMAND.to_string(),
|
||||
arguments: Some(serialized_arguments),
|
||||
};
|
||||
|
||||
self.transport_delegate
|
||||
.add_pending_request(sequence_id, callback_tx)
|
||||
.await;
|
||||
@@ -172,7 +169,7 @@ impl DebugAdapterClient {
|
||||
log::debug!(
|
||||
"Client {} send `{}` request with sequence_id: {}",
|
||||
self.id.0,
|
||||
R::COMMAND.to_string(),
|
||||
R::COMMAND,
|
||||
sequence_id
|
||||
);
|
||||
|
||||
@@ -434,7 +431,7 @@ mod tests {
|
||||
|
||||
let client = DebugAdapterClient::start(
|
||||
crate::client::SessionId(1),
|
||||
DebugAdapterName(Arc::from("test-adapter")),
|
||||
DebugAdapterName("test-adapter".into()),
|
||||
DebugAdapterBinary {
|
||||
command: "command".into(),
|
||||
arguments: Default::default(),
|
||||
|
||||
17
crates/dap/src/dap.rs
Normal file
17
crates/dap/src/dap.rs
Normal file
@@ -0,0 +1,17 @@
|
||||
pub mod adapters;
|
||||
pub mod client;
|
||||
pub mod debugger_settings;
|
||||
pub mod proto_conversions;
|
||||
mod registry;
|
||||
pub mod transport;
|
||||
|
||||
pub use dap_types::*;
|
||||
pub use registry::DapRegistry;
|
||||
pub use task::{DebugAdapterConfig, DebugRequestType};
|
||||
|
||||
pub type ScopeId = u64;
|
||||
pub type VariableReference = u64;
|
||||
pub type StackFrameId = u64;
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub use adapters::FakeAdapter;
|
||||
@@ -1,38 +0,0 @@
|
||||
pub mod adapters;
|
||||
pub mod client;
|
||||
pub mod debugger_settings;
|
||||
pub mod proto_conversions;
|
||||
pub mod transport;
|
||||
|
||||
pub use dap_types::*;
|
||||
pub use task::{DebugAdapterConfig, DebugAdapterKind, DebugRequestType};
|
||||
|
||||
pub type ScopeId = u64;
|
||||
pub type VariableReference = u64;
|
||||
pub type StackFrameId = u64;
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub use adapters::FakeAdapter;
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn test_config(
|
||||
request: DebugRequestType,
|
||||
fail: Option<bool>,
|
||||
caps: Option<Capabilities>,
|
||||
) -> DebugAdapterConfig {
|
||||
DebugAdapterConfig {
|
||||
label: "test config".into(),
|
||||
kind: DebugAdapterKind::Fake((
|
||||
fail.unwrap_or_default(),
|
||||
caps.unwrap_or(Capabilities {
|
||||
supports_step_back: Some(false),
|
||||
..Default::default()
|
||||
}),
|
||||
)),
|
||||
request,
|
||||
program: None,
|
||||
supports_attach: false,
|
||||
cwd: None,
|
||||
initialize_args: None,
|
||||
}
|
||||
}
|
||||
39
crates/dap/src/registry.rs
Normal file
39
crates/dap/src/registry.rs
Normal file
@@ -0,0 +1,39 @@
|
||||
use parking_lot::RwLock;
|
||||
|
||||
use crate::adapters::{DebugAdapter, DebugAdapterName};
|
||||
use std::{collections::BTreeMap, sync::Arc};
|
||||
|
||||
#[derive(Default)]
|
||||
struct DapRegistryState {
|
||||
adapters: BTreeMap<DebugAdapterName, Arc<dyn DebugAdapter>>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
/// Stores available debug adapters.
|
||||
pub struct DapRegistry(Arc<RwLock<DapRegistryState>>);
|
||||
|
||||
impl DapRegistry {
|
||||
pub fn add_adapter(&self, adapter: Arc<dyn DebugAdapter>) {
|
||||
let name = adapter.name();
|
||||
let _previous_value = self.0.write().adapters.insert(name, adapter);
|
||||
debug_assert!(
|
||||
_previous_value.is_none(),
|
||||
"Attempted to insert a new debug adapter when one is already registered"
|
||||
);
|
||||
}
|
||||
pub fn adapter(&self, name: &str) -> Option<Arc<dyn DebugAdapter>> {
|
||||
self.0.read().adapters.get(name).cloned()
|
||||
}
|
||||
pub fn enumerate_adapters(&self) -> Vec<DebugAdapterName> {
|
||||
self.0.read().adapters.keys().cloned().collect()
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn fake() -> Self {
|
||||
use crate::FakeAdapter;
|
||||
|
||||
let register = Self::default();
|
||||
register.add_adapter(Arc::new(FakeAdapter::new()));
|
||||
register
|
||||
}
|
||||
}
|
||||
@@ -261,8 +261,6 @@ impl TransportDelegate {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
smol::future::yield_now().await;
|
||||
};
|
||||
|
||||
log::debug!("Handle adapter log dropped");
|
||||
@@ -319,8 +317,6 @@ impl TransportDelegate {
|
||||
}
|
||||
Err(error) => break Err(error.into()),
|
||||
}
|
||||
|
||||
smol::future::yield_now().await;
|
||||
};
|
||||
|
||||
log::debug!("Handle adapter input dropped");
|
||||
@@ -360,8 +356,6 @@ impl TransportDelegate {
|
||||
}
|
||||
Err(e) => break Err(e),
|
||||
}
|
||||
|
||||
smol::future::yield_now().await;
|
||||
};
|
||||
|
||||
drop(client_tx);
|
||||
@@ -393,8 +387,6 @@ impl TransportDelegate {
|
||||
}
|
||||
Err(error) => break Err(error.into()),
|
||||
}
|
||||
|
||||
smol::future::yield_now().await;
|
||||
};
|
||||
|
||||
log::debug!("Handle adapter error dropped");
|
||||
|
||||
@@ -30,7 +30,6 @@ paths.workspace = true
|
||||
regex.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
sysinfo.workspace = true
|
||||
task.workspace = true
|
||||
util.workspace = true
|
||||
|
||||
|
||||
@@ -1,84 +0,0 @@
|
||||
use dap::transport::TcpTransport;
|
||||
use gpui::AsyncApp;
|
||||
use serde_json::Value;
|
||||
use std::{collections::HashMap, ffi::OsString, path::PathBuf};
|
||||
use sysinfo::{Pid, Process};
|
||||
use task::DebugAdapterConfig;
|
||||
|
||||
use crate::*;
|
||||
|
||||
pub(crate) struct CustomDebugAdapter {
|
||||
custom_args: CustomArgs,
|
||||
}
|
||||
|
||||
impl CustomDebugAdapter {
|
||||
const ADAPTER_NAME: &'static str = "custom_dap";
|
||||
|
||||
pub(crate) async fn new(custom_args: CustomArgs) -> Result<Self> {
|
||||
Ok(CustomDebugAdapter { custom_args })
|
||||
}
|
||||
|
||||
pub fn attach_processes(processes: &HashMap<Pid, Process>) -> Vec<(&Pid, &Process)> {
|
||||
processes.iter().collect::<Vec<_>>()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
impl DebugAdapter for CustomDebugAdapter {
|
||||
fn name(&self) -> DebugAdapterName {
|
||||
DebugAdapterName(Self::ADAPTER_NAME.into())
|
||||
}
|
||||
|
||||
async fn get_binary(
|
||||
&self,
|
||||
_: &dyn DapDelegate,
|
||||
config: &DebugAdapterConfig,
|
||||
_: Option<PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
let connection = if let DebugConnectionType::TCP(connection) = &self.custom_args.connection
|
||||
{
|
||||
Some(adapters::TcpArguments {
|
||||
host: connection.host(),
|
||||
port: TcpTransport::port(&connection).await?,
|
||||
timeout: connection.timeout,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let ret = DebugAdapterBinary {
|
||||
command: self.custom_args.command.clone(),
|
||||
arguments: self
|
||||
.custom_args
|
||||
.args
|
||||
.clone()
|
||||
.map(|args| args.iter().map(OsString::from).collect()),
|
||||
cwd: config.cwd.clone(),
|
||||
envs: self.custom_args.envs.clone(),
|
||||
connection,
|
||||
};
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
async fn fetch_latest_adapter_version(&self, _: &dyn DapDelegate) -> Result<AdapterVersion> {
|
||||
bail!("Custom debug adapters don't have latest versions")
|
||||
}
|
||||
|
||||
async fn install_binary(&self, _: AdapterVersion, _: &dyn DapDelegate) -> Result<()> {
|
||||
bail!("Custom debug adapters cannot be installed")
|
||||
}
|
||||
|
||||
async fn get_installed_binary(
|
||||
&self,
|
||||
_: &dyn DapDelegate,
|
||||
_: &DebugAdapterConfig,
|
||||
_: Option<PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
bail!("Custom debug adapters cannot be installed")
|
||||
}
|
||||
|
||||
fn request_args(&self, config: &DebugAdapterConfig) -> Value {
|
||||
json!({"program": config.program})
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,3 @@
|
||||
mod custom;
|
||||
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
|
||||
mod gdb;
|
||||
mod go;
|
||||
mod javascript;
|
||||
@@ -7,16 +5,17 @@ mod lldb;
|
||||
mod php;
|
||||
mod python;
|
||||
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
use std::{net::Ipv4Addr, sync::Arc};
|
||||
|
||||
use anyhow::{anyhow, bail, Result};
|
||||
use anyhow::{anyhow, Result};
|
||||
use async_trait::async_trait;
|
||||
use custom::CustomDebugAdapter;
|
||||
use dap::adapters::{
|
||||
self, AdapterVersion, DapDelegate, DebugAdapter, DebugAdapterBinary, DebugAdapterName,
|
||||
GithubRepo,
|
||||
use dap::{
|
||||
adapters::{
|
||||
self, AdapterVersion, DapDelegate, DebugAdapter, DebugAdapterBinary, DebugAdapterName,
|
||||
GithubRepo,
|
||||
},
|
||||
DapRegistry,
|
||||
};
|
||||
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
|
||||
use gdb::GdbDebugAdapter;
|
||||
use go::GoDebugAdapter;
|
||||
use javascript::JsDebugAdapter;
|
||||
@@ -24,44 +23,28 @@ use lldb::LldbDebugAdapter;
|
||||
use php::PhpDebugAdapter;
|
||||
use python::PythonDebugAdapter;
|
||||
use serde_json::{json, Value};
|
||||
use sysinfo::{Pid, Process};
|
||||
use task::{CustomArgs, DebugAdapterConfig, DebugAdapterKind, DebugConnectionType, TCPHost};
|
||||
use task::{DebugAdapterConfig, TCPHost};
|
||||
|
||||
pub async fn build_adapter(kind: &DebugAdapterKind) -> Result<Arc<dyn DebugAdapter>> {
|
||||
match kind {
|
||||
DebugAdapterKind::Custom(start_args) => {
|
||||
Ok(Arc::new(CustomDebugAdapter::new(start_args.clone()).await?))
|
||||
}
|
||||
DebugAdapterKind::Python(host) => Ok(Arc::new(PythonDebugAdapter::new(host).await?)),
|
||||
DebugAdapterKind::Php(host) => Ok(Arc::new(PhpDebugAdapter::new(host.clone()).await?)),
|
||||
DebugAdapterKind::Javascript(host) => {
|
||||
Ok(Arc::new(JsDebugAdapter::new(host.clone()).await?))
|
||||
}
|
||||
DebugAdapterKind::Lldb => Ok(Arc::new(LldbDebugAdapter::new())),
|
||||
DebugAdapterKind::Go(host) => Ok(Arc::new(GoDebugAdapter::new(host).await?)),
|
||||
#[cfg(any(target_arch = "x86", target_arch = "x86_64"))]
|
||||
DebugAdapterKind::Gdb => Ok(Arc::new(GdbDebugAdapter::new())),
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
DebugAdapterKind::Fake(_) => Ok(Arc::new(dap::adapters::FakeAdapter::new())),
|
||||
#[cfg(not(any(test, feature = "test-support")))]
|
||||
#[allow(unreachable_patterns)]
|
||||
_ => unreachable!("Fake variant only exists with test-support feature"),
|
||||
}
|
||||
pub fn init(registry: Arc<DapRegistry>) {
|
||||
registry.add_adapter(Arc::from(PythonDebugAdapter));
|
||||
registry.add_adapter(Arc::from(PhpDebugAdapter));
|
||||
registry.add_adapter(Arc::from(JsDebugAdapter::default()));
|
||||
registry.add_adapter(Arc::from(LldbDebugAdapter));
|
||||
registry.add_adapter(Arc::from(GoDebugAdapter));
|
||||
registry.add_adapter(Arc::from(GdbDebugAdapter));
|
||||
}
|
||||
|
||||
pub fn attach_processes<'a>(
|
||||
kind: &DebugAdapterKind,
|
||||
processes: &'a HashMap<Pid, Process>,
|
||||
) -> Vec<(&'a Pid, &'a Process)> {
|
||||
match kind {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
DebugAdapterKind::Fake(_) => processes
|
||||
.iter()
|
||||
.filter(|(pid, _)| pid.as_u32() == std::process::id())
|
||||
.collect::<Vec<_>>(),
|
||||
DebugAdapterKind::Custom(_) => CustomDebugAdapter::attach_processes(processes),
|
||||
DebugAdapterKind::Javascript(_) => JsDebugAdapter::attach_processes(processes),
|
||||
DebugAdapterKind::Lldb => LldbDebugAdapter::attach_processes(processes),
|
||||
_ => processes.iter().collect::<Vec<_>>(),
|
||||
}
|
||||
pub(crate) async fn configure_tcp_connection(
|
||||
tcp_connection: TCPHost,
|
||||
) -> Result<(Ipv4Addr, u16, Option<u64>)> {
|
||||
let host = tcp_connection.host();
|
||||
let timeout = tcp_connection.timeout;
|
||||
|
||||
let port = if let Some(port) = tcp_connection.port {
|
||||
port
|
||||
} else {
|
||||
dap::transport::TcpTransport::port(&tcp_connection).await?
|
||||
};
|
||||
|
||||
Ok((host, port, timeout))
|
||||
}
|
||||
|
||||
@@ -1,20 +1,17 @@
|
||||
use std::ffi::OsStr;
|
||||
|
||||
use anyhow::Result;
|
||||
use anyhow::{bail, Result};
|
||||
use async_trait::async_trait;
|
||||
use gpui::AsyncApp;
|
||||
use task::DebugAdapterConfig;
|
||||
use task::{DebugAdapterConfig, DebugTaskDefinition};
|
||||
|
||||
use crate::*;
|
||||
|
||||
pub(crate) struct GdbDebugAdapter {}
|
||||
#[derive(Default)]
|
||||
pub(crate) struct GdbDebugAdapter;
|
||||
|
||||
impl GdbDebugAdapter {
|
||||
const ADAPTER_NAME: &'static str = "gdb";
|
||||
|
||||
pub(crate) fn new() -> Self {
|
||||
GdbDebugAdapter {}
|
||||
}
|
||||
const ADAPTER_NAME: &'static str = "GDB";
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
@@ -26,7 +23,7 @@ impl DebugAdapter for GdbDebugAdapter {
|
||||
async fn get_binary(
|
||||
&self,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugAdapterConfig,
|
||||
_: &DebugAdapterConfig,
|
||||
user_installed_path: Option<std::path::PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
@@ -34,7 +31,6 @@ impl DebugAdapter for GdbDebugAdapter {
|
||||
.filter(|p| p.exists())
|
||||
.and_then(|p| p.to_str().map(|s| s.to_string()));
|
||||
|
||||
/* GDB implements DAP natively so just need to */
|
||||
let gdb_path = delegate
|
||||
.which(OsStr::new("gdb"))
|
||||
.and_then(|p| p.to_str().map(|s| s.to_string()))
|
||||
@@ -50,7 +46,7 @@ impl DebugAdapter for GdbDebugAdapter {
|
||||
command: gdb_path,
|
||||
arguments: Some(vec!["-i=dap".into()]),
|
||||
envs: None,
|
||||
cwd: config.cwd.clone(),
|
||||
cwd: None,
|
||||
connection: None,
|
||||
})
|
||||
}
|
||||
@@ -77,7 +73,14 @@ impl DebugAdapter for GdbDebugAdapter {
|
||||
unimplemented!("GDB cannot be installed by Zed (yet)")
|
||||
}
|
||||
|
||||
fn request_args(&self, config: &DebugAdapterConfig) -> Value {
|
||||
json!({"program": config.program, "cwd": config.cwd})
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
|
||||
match &config.request {
|
||||
dap::DebugRequestType::Attach(attach_config) => {
|
||||
json!({"pid": attach_config.process_id})
|
||||
}
|
||||
dap::DebugRequestType::Launch(launch_config) => {
|
||||
json!({"program": launch_config.program, "cwd": launch_config.cwd})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,25 +1,15 @@
|
||||
use dap::transport::TcpTransport;
|
||||
use anyhow::bail;
|
||||
use gpui::AsyncApp;
|
||||
use std::{ffi::OsStr, net::Ipv4Addr, path::PathBuf};
|
||||
use std::{ffi::OsStr, path::PathBuf};
|
||||
use task::DebugTaskDefinition;
|
||||
|
||||
use crate::*;
|
||||
|
||||
pub(crate) struct GoDebugAdapter {
|
||||
port: u16,
|
||||
host: Ipv4Addr,
|
||||
timeout: Option<u64>,
|
||||
}
|
||||
#[derive(Default, Debug)]
|
||||
pub(crate) struct GoDebugAdapter;
|
||||
|
||||
impl GoDebugAdapter {
|
||||
const ADAPTER_NAME: &'static str = "delve";
|
||||
|
||||
pub(crate) async fn new(host: &TCPHost) -> Result<Self> {
|
||||
Ok(GoDebugAdapter {
|
||||
port: TcpTransport::port(host).await?,
|
||||
host: host.host(),
|
||||
timeout: host.timeout,
|
||||
})
|
||||
}
|
||||
const ADAPTER_NAME: &'static str = "Delve";
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
@@ -73,28 +63,39 @@ impl DebugAdapter for GoDebugAdapter {
|
||||
.and_then(|p| p.to_str().map(|p| p.to_string()))
|
||||
.ok_or(anyhow!("Dlv not found in path"))?;
|
||||
|
||||
let Some(tcp_connection) = config.tcp_connection.clone() else {
|
||||
bail!("Go Debug Adapter expects tcp connection arguments to be provided");
|
||||
};
|
||||
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
|
||||
|
||||
Ok(DebugAdapterBinary {
|
||||
command: delve_path,
|
||||
arguments: Some(vec![
|
||||
"dap".into(),
|
||||
"--listen".into(),
|
||||
format!("{}:{}", self.host, self.port).into(),
|
||||
format!("{}:{}", host, port).into(),
|
||||
]),
|
||||
cwd: config.cwd.clone(),
|
||||
cwd: None,
|
||||
envs: None,
|
||||
connection: Some(adapters::TcpArguments {
|
||||
host: self.host,
|
||||
port: self.port,
|
||||
timeout: self.timeout,
|
||||
host,
|
||||
port,
|
||||
timeout,
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
fn request_args(&self, config: &DebugAdapterConfig) -> Value {
|
||||
json!({
|
||||
"program": config.program,
|
||||
"cwd": config.cwd,
|
||||
"subProcess": true,
|
||||
})
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
|
||||
match &config.request {
|
||||
dap::DebugRequestType::Attach(attach_config) => {
|
||||
json!({
|
||||
"processId": attach_config.process_id
|
||||
})
|
||||
}
|
||||
dap::DebugRequestType::Launch(launch_config) => json!({
|
||||
"program": launch_config.program,
|
||||
"cwd": launch_config.cwd,
|
||||
}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,39 +1,28 @@
|
||||
use adapters::latest_github_release;
|
||||
use dap::transport::TcpTransport;
|
||||
use gpui::AsyncApp;
|
||||
use regex::Regex;
|
||||
use std::{collections::HashMap, net::Ipv4Addr, path::PathBuf};
|
||||
use sysinfo::{Pid, Process};
|
||||
use task::DebugRequestType;
|
||||
use std::path::PathBuf;
|
||||
use task::{DebugRequestType, DebugTaskDefinition};
|
||||
|
||||
use crate::*;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct JsDebugAdapter {
|
||||
port: u16,
|
||||
host: Ipv4Addr,
|
||||
timeout: Option<u64>,
|
||||
attach_processes: Regex,
|
||||
}
|
||||
|
||||
impl Default for JsDebugAdapter {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
attach_processes: Regex::new(r"(?i)^(?:node|bun|iojs)(?:$|\b)")
|
||||
.expect("Regex compilation to succeed"),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl JsDebugAdapter {
|
||||
const ADAPTER_NAME: &'static str = "vscode-js-debug";
|
||||
const ADAPTER_NAME: &'static str = "JavaScript";
|
||||
const ADAPTER_NPM_NAME: &'static str = "vscode-js-debug";
|
||||
const ADAPTER_PATH: &'static str = "js-debug/src/dapDebugServer.js";
|
||||
|
||||
pub(crate) async fn new(host: TCPHost) -> Result<Self> {
|
||||
Ok(JsDebugAdapter {
|
||||
host: host.host(),
|
||||
timeout: host.timeout,
|
||||
port: TcpTransport::port(&host).await?,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn attach_processes(processes: &HashMap<Pid, Process>) -> Vec<(&Pid, &Process)> {
|
||||
let regex = Regex::new(r"(?i)^(?:node|bun|iojs)(?:$|\b)").unwrap();
|
||||
|
||||
processes
|
||||
.iter()
|
||||
.filter(|(_, process)| regex.is_match(&process.name().to_string_lossy()))
|
||||
.collect::<Vec<_>>()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
@@ -47,7 +36,7 @@ impl DebugAdapter for JsDebugAdapter {
|
||||
delegate: &dyn DapDelegate,
|
||||
) -> Result<AdapterVersion> {
|
||||
let release = latest_github_release(
|
||||
&format!("{}/{}", "microsoft", Self::ADAPTER_NAME),
|
||||
&format!("{}/{}", "microsoft", Self::ADAPTER_NPM_NAME),
|
||||
true,
|
||||
false,
|
||||
delegate.http_client(),
|
||||
@@ -78,7 +67,7 @@ impl DebugAdapter for JsDebugAdapter {
|
||||
let adapter_path = if let Some(user_installed_path) = user_installed_path {
|
||||
user_installed_path
|
||||
} else {
|
||||
let adapter_path = paths::debug_adapters_dir().join(self.name());
|
||||
let adapter_path = paths::debug_adapters_dir().join(self.name().as_ref());
|
||||
|
||||
let file_name_prefix = format!("{}_", self.name());
|
||||
|
||||
@@ -89,6 +78,13 @@ impl DebugAdapter for JsDebugAdapter {
|
||||
.ok_or_else(|| anyhow!("Couldn't find JavaScript dap directory"))?
|
||||
};
|
||||
|
||||
let Some(tcp_connection) = config.tcp_connection.clone() else {
|
||||
anyhow::bail!(
|
||||
"Javascript Debug Adapter expects tcp connection arguments to be provided"
|
||||
);
|
||||
};
|
||||
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
|
||||
|
||||
Ok(DebugAdapterBinary {
|
||||
command: delegate
|
||||
.node_runtime()
|
||||
@@ -98,15 +94,15 @@ impl DebugAdapter for JsDebugAdapter {
|
||||
.into_owned(),
|
||||
arguments: Some(vec![
|
||||
adapter_path.join(Self::ADAPTER_PATH).into(),
|
||||
self.port.to_string().into(),
|
||||
self.host.to_string().into(),
|
||||
port.to_string().into(),
|
||||
host.to_string().into(),
|
||||
]),
|
||||
cwd: config.cwd.clone(),
|
||||
cwd: None,
|
||||
envs: None,
|
||||
connection: Some(adapters::TcpArguments {
|
||||
host: self.host,
|
||||
port: self.port,
|
||||
timeout: self.timeout,
|
||||
host,
|
||||
port,
|
||||
timeout,
|
||||
}),
|
||||
})
|
||||
}
|
||||
@@ -127,22 +123,35 @@ impl DebugAdapter for JsDebugAdapter {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
fn request_args(&self, config: &DebugAdapterConfig) -> Value {
|
||||
let pid = if let DebugRequestType::Attach(attach_config) = &config.request {
|
||||
attach_config.process_id
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
json!({
|
||||
"program": config.program,
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
|
||||
let mut args = json!({
|
||||
"type": "pwa-node",
|
||||
"request": match config.request {
|
||||
DebugRequestType::Launch => "launch",
|
||||
DebugRequestType::Launch(_) => "launch",
|
||||
DebugRequestType::Attach(_) => "attach",
|
||||
},
|
||||
"processId": pid,
|
||||
"cwd": config.cwd,
|
||||
})
|
||||
});
|
||||
let map = args.as_object_mut().unwrap();
|
||||
match &config.request {
|
||||
DebugRequestType::Attach(attach) => {
|
||||
map.insert("processId".into(), attach.process_id.into());
|
||||
}
|
||||
DebugRequestType::Launch(launch) => {
|
||||
map.insert("program".into(), launch.program.clone().into());
|
||||
map.insert(
|
||||
"cwd".into(),
|
||||
launch
|
||||
.cwd
|
||||
.as_ref()
|
||||
.map(|s| s.to_string_lossy().into_owned())
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
}
|
||||
args
|
||||
}
|
||||
|
||||
fn attach_processes_filter(&self) -> Regex {
|
||||
self.attach_processes.clone()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,25 +1,17 @@
|
||||
use std::{collections::HashMap, ffi::OsStr, path::PathBuf};
|
||||
use std::{ffi::OsStr, path::PathBuf};
|
||||
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use gpui::AsyncApp;
|
||||
use sysinfo::{Pid, Process};
|
||||
use task::{DebugAdapterConfig, DebugRequestType};
|
||||
use task::{DebugAdapterConfig, DebugRequestType, DebugTaskDefinition};
|
||||
|
||||
use crate::*;
|
||||
|
||||
pub(crate) struct LldbDebugAdapter {}
|
||||
#[derive(Default)]
|
||||
pub(crate) struct LldbDebugAdapter;
|
||||
|
||||
impl LldbDebugAdapter {
|
||||
const ADAPTER_NAME: &'static str = "lldb";
|
||||
|
||||
pub(crate) fn new() -> Self {
|
||||
LldbDebugAdapter {}
|
||||
}
|
||||
|
||||
pub fn attach_processes(processes: &HashMap<Pid, Process>) -> Vec<(&Pid, &Process)> {
|
||||
processes.iter().collect::<Vec<_>>()
|
||||
}
|
||||
const ADAPTER_NAME: &'static str = "LLDB";
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
@@ -31,7 +23,7 @@ impl DebugAdapter for LldbDebugAdapter {
|
||||
async fn get_binary(
|
||||
&self,
|
||||
delegate: &dyn DapDelegate,
|
||||
config: &DebugAdapterConfig,
|
||||
_: &DebugAdapterConfig,
|
||||
user_installed_path: Option<PathBuf>,
|
||||
_: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
@@ -48,7 +40,7 @@ impl DebugAdapter for LldbDebugAdapter {
|
||||
command: lldb_dap_path,
|
||||
arguments: None,
|
||||
envs: None,
|
||||
cwd: config.cwd.clone(),
|
||||
cwd: None,
|
||||
connection: None,
|
||||
})
|
||||
}
|
||||
@@ -75,21 +67,30 @@ impl DebugAdapter for LldbDebugAdapter {
|
||||
unimplemented!("LLDB debug adapter cannot be installed by Zed (yet)")
|
||||
}
|
||||
|
||||
fn request_args(&self, config: &DebugAdapterConfig) -> Value {
|
||||
let pid = if let DebugRequestType::Attach(attach_config) = &config.request {
|
||||
attach_config.process_id
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
json!({
|
||||
"program": config.program,
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
|
||||
let mut args = json!({
|
||||
"request": match config.request {
|
||||
DebugRequestType::Launch => "launch",
|
||||
DebugRequestType::Launch(_) => "launch",
|
||||
DebugRequestType::Attach(_) => "attach",
|
||||
},
|
||||
"pid": pid,
|
||||
"cwd": config.cwd,
|
||||
})
|
||||
});
|
||||
let map = args.as_object_mut().unwrap();
|
||||
match &config.request {
|
||||
DebugRequestType::Attach(attach) => {
|
||||
map.insert("pid".into(), attach.process_id.into());
|
||||
}
|
||||
DebugRequestType::Launch(launch) => {
|
||||
map.insert("program".into(), launch.program.clone().into());
|
||||
map.insert(
|
||||
"cwd".into(),
|
||||
launch
|
||||
.cwd
|
||||
.as_ref()
|
||||
.map(|s| s.to_string_lossy().into_owned())
|
||||
.into(),
|
||||
);
|
||||
}
|
||||
}
|
||||
args
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,27 +1,19 @@
|
||||
use adapters::latest_github_release;
|
||||
use dap::{adapters::TcpArguments, transport::TcpTransport};
|
||||
use anyhow::bail;
|
||||
use dap::adapters::TcpArguments;
|
||||
use gpui::AsyncApp;
|
||||
use std::{net::Ipv4Addr, path::PathBuf};
|
||||
use std::path::PathBuf;
|
||||
use task::DebugTaskDefinition;
|
||||
|
||||
use crate::*;
|
||||
|
||||
pub(crate) struct PhpDebugAdapter {
|
||||
port: u16,
|
||||
host: Ipv4Addr,
|
||||
timeout: Option<u64>,
|
||||
}
|
||||
#[derive(Default)]
|
||||
pub(crate) struct PhpDebugAdapter;
|
||||
|
||||
impl PhpDebugAdapter {
|
||||
const ADAPTER_NAME: &'static str = "vscode-php-debug";
|
||||
const ADAPTER_NAME: &'static str = "PHP";
|
||||
const ADAPTER_PACKAGE_NAME: &'static str = "vscode-php-debug";
|
||||
const ADAPTER_PATH: &'static str = "extension/out/phpDebug.js";
|
||||
|
||||
pub(crate) async fn new(host: TCPHost) -> Result<Self> {
|
||||
Ok(PhpDebugAdapter {
|
||||
port: TcpTransport::port(&host).await?,
|
||||
host: host.host(),
|
||||
timeout: host.timeout,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
@@ -35,7 +27,7 @@ impl DebugAdapter for PhpDebugAdapter {
|
||||
delegate: &dyn DapDelegate,
|
||||
) -> Result<AdapterVersion> {
|
||||
let release = latest_github_release(
|
||||
&format!("{}/{}", "xdebug", Self::ADAPTER_NAME),
|
||||
&format!("{}/{}", "xdebug", Self::ADAPTER_PACKAGE_NAME),
|
||||
true,
|
||||
false,
|
||||
delegate.http_client(),
|
||||
@@ -66,7 +58,7 @@ impl DebugAdapter for PhpDebugAdapter {
|
||||
let adapter_path = if let Some(user_installed_path) = user_installed_path {
|
||||
user_installed_path
|
||||
} else {
|
||||
let adapter_path = paths::debug_adapters_dir().join(self.name());
|
||||
let adapter_path = paths::debug_adapters_dir().join(self.name().as_ref());
|
||||
|
||||
let file_name_prefix = format!("{}_", self.name());
|
||||
|
||||
@@ -77,6 +69,11 @@ impl DebugAdapter for PhpDebugAdapter {
|
||||
.ok_or_else(|| anyhow!("Couldn't find PHP dap directory"))?
|
||||
};
|
||||
|
||||
let Some(tcp_connection) = config.tcp_connection.clone() else {
|
||||
bail!("PHP Debug Adapter expects tcp connection arguments to be provided");
|
||||
};
|
||||
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
|
||||
|
||||
Ok(DebugAdapterBinary {
|
||||
command: delegate
|
||||
.node_runtime()
|
||||
@@ -86,14 +83,14 @@ impl DebugAdapter for PhpDebugAdapter {
|
||||
.into_owned(),
|
||||
arguments: Some(vec![
|
||||
adapter_path.join(Self::ADAPTER_PATH).into(),
|
||||
format!("--server={}", self.port).into(),
|
||||
format!("--server={}", port).into(),
|
||||
]),
|
||||
connection: Some(TcpArguments {
|
||||
port: self.port,
|
||||
host: self.host,
|
||||
timeout: self.timeout,
|
||||
port,
|
||||
host,
|
||||
timeout,
|
||||
}),
|
||||
cwd: config.cwd.clone(),
|
||||
cwd: None,
|
||||
envs: None,
|
||||
})
|
||||
}
|
||||
@@ -114,10 +111,18 @@ impl DebugAdapter for PhpDebugAdapter {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn request_args(&self, config: &DebugAdapterConfig) -> Value {
|
||||
json!({
|
||||
"program": config.program,
|
||||
"cwd": config.cwd,
|
||||
})
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
|
||||
match &config.request {
|
||||
dap::DebugRequestType::Attach(_) => {
|
||||
// php adapter does not support attaching
|
||||
json!({})
|
||||
}
|
||||
dap::DebugRequestType::Launch(launch_config) => {
|
||||
json!({
|
||||
"program": launch_config.program,
|
||||
"cwd": launch_config.cwd,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,26 +1,18 @@
|
||||
use crate::*;
|
||||
use dap::transport::TcpTransport;
|
||||
use anyhow::bail;
|
||||
use dap::DebugRequestType;
|
||||
use gpui::AsyncApp;
|
||||
use std::{ffi::OsStr, net::Ipv4Addr, path::PathBuf};
|
||||
use std::{ffi::OsStr, path::PathBuf};
|
||||
use task::DebugTaskDefinition;
|
||||
|
||||
pub(crate) struct PythonDebugAdapter {
|
||||
port: u16,
|
||||
host: Ipv4Addr,
|
||||
timeout: Option<u64>,
|
||||
}
|
||||
#[derive(Default)]
|
||||
pub(crate) struct PythonDebugAdapter;
|
||||
|
||||
impl PythonDebugAdapter {
|
||||
const ADAPTER_NAME: &'static str = "debugpy";
|
||||
const ADAPTER_NAME: &'static str = "Debugpy";
|
||||
const ADAPTER_PACKAGE_NAME: &'static str = "debugpy";
|
||||
const ADAPTER_PATH: &'static str = "src/debugpy/adapter";
|
||||
const LANGUAGE_NAME: &'static str = "Python";
|
||||
|
||||
pub(crate) async fn new(host: &TCPHost) -> Result<Self> {
|
||||
Ok(PythonDebugAdapter {
|
||||
port: TcpTransport::port(host).await?,
|
||||
host: host.host(),
|
||||
timeout: host.timeout,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
@@ -34,7 +26,7 @@ impl DebugAdapter for PythonDebugAdapter {
|
||||
delegate: &dyn DapDelegate,
|
||||
) -> Result<AdapterVersion> {
|
||||
let github_repo = GithubRepo {
|
||||
repo_name: Self::ADAPTER_NAME.into(),
|
||||
repo_name: Self::ADAPTER_PACKAGE_NAME.into(),
|
||||
repo_owner: "microsoft".into(),
|
||||
};
|
||||
|
||||
@@ -78,12 +70,16 @@ impl DebugAdapter for PythonDebugAdapter {
|
||||
cx: &mut AsyncApp,
|
||||
) -> Result<DebugAdapterBinary> {
|
||||
const BINARY_NAMES: [&str; 3] = ["python3", "python", "py"];
|
||||
let Some(tcp_connection) = config.tcp_connection.clone() else {
|
||||
bail!("Python Debug Adapter expects tcp connection arguments to be provided");
|
||||
};
|
||||
let (host, port, timeout) = crate::configure_tcp_connection(tcp_connection).await?;
|
||||
|
||||
let debugpy_dir = if let Some(user_installed_path) = user_installed_path {
|
||||
user_installed_path
|
||||
} else {
|
||||
let adapter_path = paths::debug_adapters_dir().join(self.name());
|
||||
let file_name_prefix = format!("{}_", self.name());
|
||||
let adapter_path = paths::debug_adapters_dir().join(self.name().as_ref());
|
||||
let file_name_prefix = format!("{}_", Self::ADAPTER_PACKAGE_NAME);
|
||||
|
||||
util::fs::find_file_name_in_dir(adapter_path.as_path(), |file_name| {
|
||||
file_name.starts_with(&file_name_prefix)
|
||||
@@ -118,25 +114,36 @@ impl DebugAdapter for PythonDebugAdapter {
|
||||
command: python_path.ok_or(anyhow!("failed to find binary path for python"))?,
|
||||
arguments: Some(vec![
|
||||
debugpy_dir.join(Self::ADAPTER_PATH).into(),
|
||||
format!("--port={}", self.port).into(),
|
||||
format!("--host={}", self.host).into(),
|
||||
format!("--port={}", port).into(),
|
||||
format!("--host={}", host).into(),
|
||||
]),
|
||||
connection: Some(adapters::TcpArguments {
|
||||
host: self.host,
|
||||
port: self.port,
|
||||
timeout: self.timeout,
|
||||
host,
|
||||
port,
|
||||
timeout,
|
||||
}),
|
||||
cwd: config.cwd.clone(),
|
||||
cwd: None,
|
||||
envs: None,
|
||||
})
|
||||
}
|
||||
|
||||
fn request_args(&self, config: &DebugAdapterConfig) -> Value {
|
||||
json!({
|
||||
"program": config.program,
|
||||
"subProcess": true,
|
||||
"cwd": config.cwd,
|
||||
"redirectOutput": true,
|
||||
})
|
||||
fn request_args(&self, config: &DebugTaskDefinition) -> Value {
|
||||
match &config.request {
|
||||
DebugRequestType::Launch(launch_config) => {
|
||||
json!({
|
||||
"program": launch_config.program,
|
||||
"subProcess": true,
|
||||
"cwd": launch_config.cwd,
|
||||
"redirectOutput": true,
|
||||
})
|
||||
}
|
||||
dap::DebugRequestType::Attach(attach_config) => {
|
||||
json!({
|
||||
"subProcess": true,
|
||||
"redirectOutput": true,
|
||||
"processId": attach_config.process_id
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,6 +8,10 @@ license = "GPL-3.0-or-later"
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[lib]
|
||||
path = "src/debugger_ui.rs"
|
||||
doctest = false
|
||||
|
||||
[features]
|
||||
test-support = [
|
||||
"dap/test-support",
|
||||
|
||||
@@ -3,8 +3,8 @@ use fuzzy::{StringMatch, StringMatchCandidate};
|
||||
use gpui::Subscription;
|
||||
use gpui::{DismissEvent, Entity, EventEmitter, Focusable, Render};
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use project::debugger::attach_processes;
|
||||
|
||||
use std::cell::LazyCell;
|
||||
use std::sync::Arc;
|
||||
use sysinfo::System;
|
||||
use ui::{prelude::*, Context, Tooltip};
|
||||
@@ -13,10 +13,10 @@ use util::debug_panic;
|
||||
use workspace::ModalView;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct Candidate {
|
||||
pid: u32,
|
||||
name: String,
|
||||
command: Vec<String>,
|
||||
pub(super) struct Candidate {
|
||||
pub(super) pid: u32,
|
||||
pub(super) name: SharedString,
|
||||
pub(super) command: Vec<String>,
|
||||
}
|
||||
|
||||
pub(crate) struct AttachModalDelegate {
|
||||
@@ -24,16 +24,20 @@ pub(crate) struct AttachModalDelegate {
|
||||
matches: Vec<StringMatch>,
|
||||
placeholder_text: Arc<str>,
|
||||
project: Entity<project::Project>,
|
||||
debug_config: task::DebugAdapterConfig,
|
||||
candidates: Option<Vec<Candidate>>,
|
||||
debug_config: task::DebugTaskDefinition,
|
||||
candidates: Arc<[Candidate]>,
|
||||
}
|
||||
|
||||
impl AttachModalDelegate {
|
||||
pub fn new(project: Entity<project::Project>, debug_config: task::DebugAdapterConfig) -> Self {
|
||||
fn new(
|
||||
project: Entity<project::Project>,
|
||||
debug_config: task::DebugTaskDefinition,
|
||||
candidates: Arc<[Candidate]>,
|
||||
) -> Self {
|
||||
Self {
|
||||
project,
|
||||
debug_config,
|
||||
candidates: None,
|
||||
candidates,
|
||||
selected_index: 0,
|
||||
matches: Vec::default(),
|
||||
placeholder_text: Arc::from("Select the process you want to attach the debugger to"),
|
||||
@@ -49,12 +53,56 @@ pub struct AttachModal {
|
||||
impl AttachModal {
|
||||
pub fn new(
|
||||
project: Entity<project::Project>,
|
||||
debug_config: task::DebugAdapterConfig,
|
||||
debug_config: task::DebugTaskDefinition,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let mut processes: Vec<_> = System::new_all()
|
||||
.processes()
|
||||
.values()
|
||||
.map(|process| {
|
||||
let name = process.name().to_string_lossy().into_owned();
|
||||
Candidate {
|
||||
name: name.into(),
|
||||
pid: process.pid().as_u32(),
|
||||
command: process
|
||||
.cmd()
|
||||
.iter()
|
||||
.map(|s| s.to_string_lossy().to_string())
|
||||
.collect::<Vec<_>>(),
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
processes.sort_by_key(|k| k.name.clone());
|
||||
Self::with_processes(project, debug_config, processes, window, cx)
|
||||
}
|
||||
|
||||
pub(super) fn with_processes(
|
||||
project: Entity<project::Project>,
|
||||
debug_config: task::DebugTaskDefinition,
|
||||
processes: Vec<Candidate>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let adapter = project
|
||||
.read(cx)
|
||||
.debug_adapters()
|
||||
.adapter(&debug_config.adapter);
|
||||
let filter = LazyCell::new(|| adapter.map(|adapter| adapter.attach_processes_filter()));
|
||||
let processes = processes
|
||||
.into_iter()
|
||||
.filter(|process| {
|
||||
filter
|
||||
.as_ref()
|
||||
.map_or(false, |filter| filter.is_match(&process.name))
|
||||
})
|
||||
.collect();
|
||||
let picker = cx.new(|cx| {
|
||||
Picker::uniform_list(AttachModalDelegate::new(project, debug_config), window, cx)
|
||||
Picker::uniform_list(
|
||||
AttachModalDelegate::new(project, debug_config, processes),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
Self {
|
||||
_subscription: cx.subscribe(&picker, |_, _, _, cx| {
|
||||
@@ -116,32 +164,7 @@ impl PickerDelegate for AttachModalDelegate {
|
||||
) -> gpui::Task<()> {
|
||||
cx.spawn(async move |this, cx| {
|
||||
let Some(processes) = this
|
||||
.update(cx, |this, _| {
|
||||
if let Some(processes) = this.delegate.candidates.clone() {
|
||||
processes
|
||||
} else {
|
||||
let system = System::new_all();
|
||||
|
||||
let processes =
|
||||
attach_processes(&this.delegate.debug_config.kind, &system.processes());
|
||||
let candidates = processes
|
||||
.into_iter()
|
||||
.map(|(pid, process)| Candidate {
|
||||
pid: pid.as_u32(),
|
||||
name: process.name().to_string_lossy().into_owned(),
|
||||
command: process
|
||||
.cmd()
|
||||
.iter()
|
||||
.map(|s| s.to_string_lossy().to_string())
|
||||
.collect::<Vec<_>>(),
|
||||
})
|
||||
.collect::<Vec<Candidate>>();
|
||||
|
||||
let _ = this.delegate.candidates.insert(candidates.clone());
|
||||
|
||||
candidates
|
||||
}
|
||||
})
|
||||
.update(cx, |this, _| this.delegate.candidates.clone())
|
||||
.ok()
|
||||
else {
|
||||
return;
|
||||
@@ -176,7 +199,6 @@ impl PickerDelegate for AttachModalDelegate {
|
||||
let delegate = &mut this.delegate;
|
||||
|
||||
delegate.matches = matches;
|
||||
delegate.candidates = Some(processes);
|
||||
|
||||
if delegate.matches.is_empty() {
|
||||
delegate.selected_index = 0;
|
||||
@@ -195,7 +217,7 @@ impl PickerDelegate for AttachModalDelegate {
|
||||
.get(self.selected_index())
|
||||
.and_then(|current_match| {
|
||||
let ix = current_match.candidate_id;
|
||||
self.candidates.as_ref().map(|candidates| &candidates[ix])
|
||||
self.candidates.get(ix)
|
||||
});
|
||||
|
||||
let Some(candidate) = candidate else {
|
||||
@@ -206,7 +228,7 @@ impl PickerDelegate for AttachModalDelegate {
|
||||
DebugRequestType::Attach(config) => {
|
||||
config.process_id = Some(candidate.pid);
|
||||
}
|
||||
DebugRequestType::Launch => {
|
||||
DebugRequestType::Launch(_) => {
|
||||
debug_panic!("Debugger attach modal used on launch debug config");
|
||||
return;
|
||||
}
|
||||
@@ -214,7 +236,13 @@ impl PickerDelegate for AttachModalDelegate {
|
||||
|
||||
let config = self.debug_config.clone();
|
||||
self.project
|
||||
.update(cx, |project, cx| project.start_debug_session(config, cx))
|
||||
.update(cx, |project, cx| {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
let ret = project.fake_debug_session(config.request, None, false, cx);
|
||||
#[cfg(not(any(test, feature = "test-support")))]
|
||||
let ret = project.start_debug_session(config.into(), cx);
|
||||
ret
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
cx.emit(DismissEvent);
|
||||
@@ -222,7 +250,6 @@ impl PickerDelegate for AttachModalDelegate {
|
||||
|
||||
fn dismissed(&mut self, _window: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
self.selected_index = 0;
|
||||
self.candidates.take();
|
||||
|
||||
cx.emit(DismissEvent);
|
||||
}
|
||||
@@ -234,9 +261,8 @@ impl PickerDelegate for AttachModalDelegate {
|
||||
_window: &mut Window,
|
||||
_: &mut Context<Picker<Self>>,
|
||||
) -> Option<Self::ListItem> {
|
||||
let candidates = self.candidates.as_ref()?;
|
||||
let hit = &self.matches[ix];
|
||||
let candidate = &candidates.get(hit.candidate_id)?;
|
||||
let candidate = self.candidates.get(hit.candidate_id)?;
|
||||
|
||||
Some(
|
||||
ListItem::new(SharedString::from(format!("process-entry-{ix}")))
|
||||
@@ -279,9 +305,8 @@ impl PickerDelegate for AttachModalDelegate {
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub(crate) fn process_names(modal: &AttachModal, cx: &mut Context<AttachModal>) -> Vec<String> {
|
||||
pub(crate) fn _process_names(modal: &AttachModal, cx: &mut Context<AttachModal>) -> Vec<String> {
|
||||
modal.picker.update(cx, |picker, _| {
|
||||
picker
|
||||
.delegate
|
||||
|
||||
@@ -3,8 +3,8 @@ use anyhow::{anyhow, Result};
|
||||
use collections::HashMap;
|
||||
use command_palette_hooks::CommandPaletteFilter;
|
||||
use dap::{
|
||||
client::SessionId, debugger_settings::DebuggerSettings, ContinuedEvent, DebugAdapterConfig,
|
||||
LoadedSourceEvent, ModuleEvent, OutputEvent, StoppedEvent, ThreadEvent,
|
||||
client::SessionId, debugger_settings::DebuggerSettings, ContinuedEvent, LoadedSourceEvent,
|
||||
ModuleEvent, OutputEvent, StoppedEvent, ThreadEvent,
|
||||
};
|
||||
use futures::{channel::mpsc, SinkExt as _};
|
||||
use gpui::{
|
||||
@@ -19,6 +19,7 @@ use project::{
|
||||
use rpc::proto::{self};
|
||||
use settings::Settings;
|
||||
use std::{any::TypeId, path::PathBuf};
|
||||
use task::DebugTaskDefinition;
|
||||
use terminal_view::terminal_panel::TerminalPanel;
|
||||
use ui::prelude::*;
|
||||
use util::ResultExt;
|
||||
@@ -52,7 +53,7 @@ pub struct DebugPanel {
|
||||
project: WeakEntity<Project>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
pub(crate) last_inert_config: Option<DebugAdapterConfig>,
|
||||
pub(crate) last_inert_config: Option<DebugTaskDefinition>,
|
||||
}
|
||||
|
||||
impl DebugPanel {
|
||||
|
||||
@@ -6,7 +6,6 @@ mod starting;
|
||||
use std::time::Duration;
|
||||
|
||||
use dap::client::SessionId;
|
||||
use dap::DebugAdapterConfig;
|
||||
use failed::FailedState;
|
||||
use gpui::{
|
||||
percentage, Animation, AnimationExt, AnyElement, App, Entity, EventEmitter, FocusHandle,
|
||||
@@ -19,6 +18,7 @@ use project::Project;
|
||||
use rpc::proto::{self, PeerId};
|
||||
use running::RunningState;
|
||||
use starting::{StartingEvent, StartingState};
|
||||
use task::DebugTaskDefinition;
|
||||
use ui::{prelude::*, Indicator};
|
||||
use util::ResultExt;
|
||||
use workspace::{
|
||||
@@ -73,7 +73,7 @@ impl DebugSession {
|
||||
project: Entity<Project>,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
debug_panel: WeakEntity<DebugPanel>,
|
||||
config: Option<DebugAdapterConfig>,
|
||||
config: Option<DebugTaskDefinition>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Entity<Self> {
|
||||
@@ -171,7 +171,7 @@ impl DebugSession {
|
||||
.flatten()
|
||||
.expect("worktree-less project");
|
||||
let Ok((new_session_id, task)) = dap_store.update(cx, |store, cx| {
|
||||
store.new_session(config, &worktree, None, cx)
|
||||
store.new_session(config.into(), &worktree, None, cx)
|
||||
}) else {
|
||||
return;
|
||||
};
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use dap::{DebugAdapterConfig, DebugAdapterKind, DebugRequestType};
|
||||
use dap::DebugRequestType;
|
||||
use editor::{Editor, EditorElement, EditorStyle};
|
||||
use gpui::{App, AppContext, Entity, EventEmitter, FocusHandle, Focusable, TextStyle, WeakEntity};
|
||||
use settings::Settings as _;
|
||||
use task::TCPHost;
|
||||
use task::{DebugTaskDefinition, LaunchConfig, TCPHost};
|
||||
use theme::ThemeSettings;
|
||||
use ui::{
|
||||
div, h_flex, relative, v_flex, ActiveTheme as _, ButtonCommon, ButtonLike, Clickable, Context,
|
||||
@@ -35,7 +35,7 @@ impl SpawnMode {
|
||||
impl From<DebugRequestType> for SpawnMode {
|
||||
fn from(request: DebugRequestType) -> Self {
|
||||
match request {
|
||||
DebugRequestType::Launch => SpawnMode::Launch,
|
||||
DebugRequestType::Launch(_) => SpawnMode::Launch,
|
||||
DebugRequestType::Attach(_) => SpawnMode::Attach,
|
||||
}
|
||||
}
|
||||
@@ -55,18 +55,13 @@ impl InertState {
|
||||
pub(super) fn new(
|
||||
workspace: WeakEntity<Workspace>,
|
||||
default_cwd: &str,
|
||||
debug_config: Option<DebugAdapterConfig>,
|
||||
debug_config: Option<DebugTaskDefinition>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let selected_debugger = debug_config.as_ref().and_then(|config| match config.kind {
|
||||
DebugAdapterKind::Lldb => Some("LLDB".into()),
|
||||
DebugAdapterKind::Go(_) => Some("Delve".into()),
|
||||
DebugAdapterKind::Php(_) => Some("PHP".into()),
|
||||
DebugAdapterKind::Javascript(_) => Some("JavaScript".into()),
|
||||
DebugAdapterKind::Python(_) => Some("Debugpy".into()),
|
||||
_ => None,
|
||||
});
|
||||
let selected_debugger = debug_config
|
||||
.as_ref()
|
||||
.map(|config| SharedString::from(config.adapter.clone()));
|
||||
|
||||
let spawn_mode = debug_config
|
||||
.as_ref()
|
||||
@@ -75,7 +70,10 @@ impl InertState {
|
||||
|
||||
let program = debug_config
|
||||
.as_ref()
|
||||
.and_then(|config| config.program.to_owned());
|
||||
.and_then(|config| match &config.request {
|
||||
DebugRequestType::Attach(_) => None,
|
||||
DebugRequestType::Launch(launch_config) => Some(launch_config.program.clone()),
|
||||
});
|
||||
|
||||
let program_editor = cx.new(|cx| {
|
||||
let mut editor = Editor::single_line(window, cx);
|
||||
@@ -88,7 +86,10 @@ impl InertState {
|
||||
});
|
||||
|
||||
let cwd = debug_config
|
||||
.and_then(|config| config.cwd.map(|cwd| cwd.to_owned()))
|
||||
.and_then(|config| match &config.request {
|
||||
DebugRequestType::Attach(_) => None,
|
||||
DebugRequestType::Launch(launch_config) => launch_config.cwd.clone(),
|
||||
})
|
||||
.unwrap_or_else(|| PathBuf::from(default_cwd));
|
||||
|
||||
let cwd_editor = cx.new(|cx| {
|
||||
@@ -116,7 +117,7 @@ impl Focusable for InertState {
|
||||
}
|
||||
|
||||
pub(crate) enum InertEvent {
|
||||
Spawned { config: DebugAdapterConfig },
|
||||
Spawned { config: DebugTaskDefinition },
|
||||
}
|
||||
|
||||
impl EventEmitter<InertEvent> for InertState {}
|
||||
@@ -130,6 +131,7 @@ impl Render for InertState {
|
||||
cx: &mut ui::Context<'_, Self>,
|
||||
) -> impl ui::IntoElement {
|
||||
let weak = cx.weak_entity();
|
||||
let workspace = self.workspace.clone();
|
||||
let disable_buttons = self.selected_debugger.is_none();
|
||||
let spawn_button = ButtonLike::new_rounded_left("spawn-debug-session")
|
||||
.child(Label::new(self.spawn_mode.label()).size(LabelSize::Small))
|
||||
@@ -137,21 +139,26 @@ impl Render for InertState {
|
||||
if this.spawn_mode == SpawnMode::Launch {
|
||||
let program = this.program_editor.read(cx).text(cx);
|
||||
let cwd = PathBuf::from(this.cwd_editor.read(cx).text(cx));
|
||||
let kind =
|
||||
kind_for_label(this.selected_debugger.as_deref().unwrap_or_else(|| {
|
||||
let kind = this
|
||||
.selected_debugger
|
||||
.as_deref()
|
||||
.unwrap_or_else(|| {
|
||||
unimplemented!(
|
||||
"Automatic selection of a debugger based on users project"
|
||||
)
|
||||
}));
|
||||
})
|
||||
.to_string();
|
||||
|
||||
cx.emit(InertEvent::Spawned {
|
||||
config: DebugAdapterConfig {
|
||||
config: DebugTaskDefinition {
|
||||
label: "hard coded".into(),
|
||||
kind,
|
||||
request: DebugRequestType::Launch,
|
||||
program: Some(program),
|
||||
cwd: Some(cwd),
|
||||
adapter: kind,
|
||||
request: DebugRequestType::Launch(LaunchConfig {
|
||||
program,
|
||||
cwd: Some(cwd),
|
||||
}),
|
||||
tcp_connection: Some(TCPHost::default()),
|
||||
initialize_args: None,
|
||||
supports_attach: false,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
@@ -159,6 +166,7 @@ impl Render for InertState {
|
||||
}
|
||||
}))
|
||||
.disabled(disable_buttons);
|
||||
|
||||
v_flex()
|
||||
.track_focus(&self.focus_handle)
|
||||
.size_full()
|
||||
@@ -179,28 +187,36 @@ impl Render for InertState {
|
||||
.as_ref()
|
||||
.unwrap_or_else(|| &SELECT_DEBUGGER_LABEL)
|
||||
.clone(),
|
||||
ContextMenu::build(window, cx, move |this, _, _| {
|
||||
let setter_for_name = |name: &'static str| {
|
||||
ContextMenu::build(window, cx, move |mut this, _, cx| {
|
||||
let setter_for_name = |name: SharedString| {
|
||||
let weak = weak.clone();
|
||||
move |_: &mut Window, cx: &mut App| {
|
||||
let name = name;
|
||||
(&weak)
|
||||
.update(cx, move |this, _| {
|
||||
this.selected_debugger = Some(name.into());
|
||||
})
|
||||
.ok();
|
||||
let name = name.clone();
|
||||
weak.update(cx, move |this, cx| {
|
||||
this.selected_debugger = Some(name.clone());
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
};
|
||||
this.entry("GDB", None, setter_for_name("GDB"))
|
||||
.entry("Delve", None, setter_for_name("Delve"))
|
||||
.entry("LLDB", None, setter_for_name("LLDB"))
|
||||
.entry("PHP", None, setter_for_name("PHP"))
|
||||
.entry(
|
||||
"JavaScript",
|
||||
let available_adapters = workspace
|
||||
.update(cx, |this, cx| {
|
||||
this.project()
|
||||
.read(cx)
|
||||
.debug_adapters()
|
||||
.enumerate_adapters()
|
||||
})
|
||||
.ok()
|
||||
.unwrap_or_default();
|
||||
|
||||
for adapter in available_adapters {
|
||||
this = this.entry(
|
||||
adapter.0.clone(),
|
||||
None,
|
||||
setter_for_name("JavaScript"),
|
||||
)
|
||||
.entry("Debugpy", None, setter_for_name("Debugpy"))
|
||||
setter_for_name(adapter.0.clone()),
|
||||
);
|
||||
}
|
||||
this
|
||||
}),
|
||||
)),
|
||||
),
|
||||
@@ -265,18 +281,6 @@ impl Render for InertState {
|
||||
}
|
||||
}
|
||||
|
||||
fn kind_for_label(label: &str) -> DebugAdapterKind {
|
||||
match label {
|
||||
"LLDB" => DebugAdapterKind::Lldb,
|
||||
"Debugpy" => DebugAdapterKind::Python(TCPHost::default()),
|
||||
"JavaScript" => DebugAdapterKind::Javascript(TCPHost::default()),
|
||||
"PHP" => DebugAdapterKind::Php(TCPHost::default()),
|
||||
"Delve" => DebugAdapterKind::Go(TCPHost::default()),
|
||||
_ => {
|
||||
unimplemented!()
|
||||
} // Maybe we should set a toast notification here
|
||||
}
|
||||
}
|
||||
impl InertState {
|
||||
fn render_editor(editor: &Entity<Editor>, cx: &Context<Self>) -> impl IntoElement {
|
||||
let settings = ThemeSettings::get_global(cx);
|
||||
@@ -302,19 +306,20 @@ impl InertState {
|
||||
}
|
||||
|
||||
fn attach(&self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
let cwd = PathBuf::from(self.cwd_editor.read(cx).text(cx));
|
||||
let kind = kind_for_label(self.selected_debugger.as_deref().unwrap_or_else(|| {
|
||||
unimplemented!("Automatic selection of a debugger based on users project")
|
||||
}));
|
||||
let kind = self
|
||||
.selected_debugger
|
||||
.as_deref()
|
||||
.map(|s| s.to_string())
|
||||
.unwrap_or_else(|| {
|
||||
unimplemented!("Automatic selection of a debugger based on users project")
|
||||
});
|
||||
|
||||
let config = DebugAdapterConfig {
|
||||
let config = DebugTaskDefinition {
|
||||
label: "hard coded attach".into(),
|
||||
kind,
|
||||
adapter: kind,
|
||||
request: DebugRequestType::Attach(task::AttachConfig { process_id: None }),
|
||||
program: None,
|
||||
cwd: Some(cwd),
|
||||
initialize_args: None,
|
||||
supports_attach: true,
|
||||
tcp_connection: Some(TCPHost::default()),
|
||||
};
|
||||
|
||||
let _ = self.workspace.update(cx, |workspace, cx| {
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
use crate::*;
|
||||
use crate::{attach_modal::Candidate, *};
|
||||
use attach_modal::AttachModal;
|
||||
use dap::client::SessionId;
|
||||
use dap::{client::SessionId, FakeAdapter};
|
||||
use gpui::{BackgroundExecutor, TestAppContext, VisualTestContext};
|
||||
use menu::Confirm;
|
||||
use project::{FakeFs, Project};
|
||||
use serde_json::json;
|
||||
use task::AttachConfig;
|
||||
use task::{AttachConfig, DebugTaskDefinition, TCPHost};
|
||||
use tests::{init_test, init_test_workspace};
|
||||
|
||||
#[gpui::test]
|
||||
@@ -27,14 +27,12 @@ async fn test_direct_attach_to_process(executor: BackgroundExecutor, cx: &mut Te
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(
|
||||
dap::test_config(
|
||||
dap::DebugRequestType::Attach(AttachConfig {
|
||||
process_id: Some(10),
|
||||
}),
|
||||
None,
|
||||
None,
|
||||
),
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Attach(AttachConfig {
|
||||
process_id: Some(10),
|
||||
}),
|
||||
None,
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -83,13 +81,32 @@ async fn test_show_attach_modal_and_select_process(
|
||||
let attach_modal = workspace
|
||||
.update(cx, |workspace, window, cx| {
|
||||
workspace.toggle_modal(window, cx, |window, cx| {
|
||||
AttachModal::new(
|
||||
AttachModal::with_processes(
|
||||
project.clone(),
|
||||
dap::test_config(
|
||||
dap::DebugRequestType::Attach(AttachConfig { process_id: None }),
|
||||
None,
|
||||
None,
|
||||
),
|
||||
DebugTaskDefinition {
|
||||
adapter: FakeAdapter::ADAPTER_NAME.into(),
|
||||
request: dap::DebugRequestType::Attach(AttachConfig::default()),
|
||||
label: "attach example".into(),
|
||||
initialize_args: None,
|
||||
tcp_connection: Some(TCPHost::default()),
|
||||
},
|
||||
vec![
|
||||
Candidate {
|
||||
pid: 0,
|
||||
name: "fake-binary-1".into(),
|
||||
command: vec![],
|
||||
},
|
||||
Candidate {
|
||||
pid: 3,
|
||||
name: "non-fake-binary-1".into(),
|
||||
command: vec![],
|
||||
},
|
||||
Candidate {
|
||||
pid: 1,
|
||||
name: "fake-binary-2".into(),
|
||||
command: vec![],
|
||||
},
|
||||
],
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
@@ -105,10 +122,10 @@ async fn test_show_attach_modal_and_select_process(
|
||||
workspace
|
||||
.update(cx, |_, _, cx| {
|
||||
let names =
|
||||
attach_modal.update(cx, |modal, cx| attach_modal::process_names(&modal, cx));
|
||||
attach_modal.update(cx, |modal, cx| attach_modal::_process_names(&modal, cx));
|
||||
|
||||
// we filtered out all processes that are not the current process(zed itself)
|
||||
assert_eq!(1, names.len());
|
||||
// we filtered out all processes that are not starting with `fake-binary`
|
||||
assert_eq!(2, names.len());
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
|
||||
@@ -3,6 +3,7 @@ use dap::requests::StackTrace;
|
||||
use gpui::{BackgroundExecutor, TestAppContext, VisualTestContext};
|
||||
use project::{FakeFs, Project};
|
||||
use serde_json::json;
|
||||
use task::LaunchConfig;
|
||||
use tests::{init_test, init_test_workspace};
|
||||
|
||||
#[gpui::test]
|
||||
@@ -29,8 +30,10 @@ async fn test_handle_output_event(executor: BackgroundExecutor, cx: &mut TestApp
|
||||
.unwrap();
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(
|
||||
dap::test_config(dap::DebugRequestType::Launch, None, None),
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Launch(LaunchConfig::default()),
|
||||
None,
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
@@ -5,8 +5,8 @@ use dap::{
|
||||
Continue, Disconnect, Launch, Next, RunInTerminal, SetBreakpoints, StackTrace,
|
||||
StartDebugging, StepBack, StepIn, StepOut, Threads,
|
||||
},
|
||||
DebugRequestType, ErrorResponse, RunInTerminalRequestArguments, SourceBreakpoint,
|
||||
StartDebuggingRequestArguments, StartDebuggingRequestArgumentsRequest,
|
||||
ErrorResponse, RunInTerminalRequestArguments, SourceBreakpoint, StartDebuggingRequestArguments,
|
||||
StartDebuggingRequestArgumentsRequest,
|
||||
};
|
||||
use editor::{
|
||||
actions::{self},
|
||||
@@ -25,6 +25,7 @@ use std::{
|
||||
Arc,
|
||||
},
|
||||
};
|
||||
use task::LaunchConfig;
|
||||
use terminal_view::{terminal_panel::TerminalPanel, TerminalView};
|
||||
use tests::{active_debug_session_panel, init_test, init_test_workspace};
|
||||
use util::path;
|
||||
@@ -49,7 +50,12 @@ async fn test_basic_show_debug_panel(executor: BackgroundExecutor, cx: &mut Test
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(dap::test_config(DebugRequestType::Launch, None, None), cx)
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Launch(LaunchConfig::default()),
|
||||
None,
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let session = task.await.unwrap();
|
||||
@@ -201,7 +207,12 @@ async fn test_we_can_only_have_one_panel_per_debug_session(
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(dap::test_config(DebugRequestType::Launch, None, None), cx)
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Launch(LaunchConfig::default()),
|
||||
None,
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let session = task.await.unwrap();
|
||||
@@ -385,7 +396,12 @@ async fn test_handle_successful_run_in_terminal_reverse_request(
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(dap::test_config(DebugRequestType::Launch, None, None), cx)
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Launch(LaunchConfig::default()),
|
||||
None,
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let session = task.await.unwrap();
|
||||
@@ -475,7 +491,12 @@ async fn test_handle_error_run_in_terminal_reverse_request(
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(dap::test_config(DebugRequestType::Launch, None, None), cx)
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Launch(LaunchConfig::default()),
|
||||
None,
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let session = task.await.unwrap();
|
||||
@@ -555,7 +576,12 @@ async fn test_handle_start_debugging_reverse_request(
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(dap::test_config(DebugRequestType::Launch, None, None), cx)
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Launch(LaunchConfig::default()),
|
||||
None,
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let session = task.await.unwrap();
|
||||
@@ -668,7 +694,12 @@ async fn test_shutdown_children_when_parent_session_shutdown(
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(dap::test_config(DebugRequestType::Launch, None, None), cx)
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Launch(LaunchConfig::default()),
|
||||
None,
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let parent_session = task.await.unwrap();
|
||||
@@ -776,7 +807,12 @@ async fn test_shutdown_parent_session_if_all_children_are_shutdown(
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(dap::test_config(DebugRequestType::Launch, None, None), cx)
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Launch(LaunchConfig::default()),
|
||||
None,
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let parent_session = task.await.unwrap();
|
||||
@@ -891,15 +927,13 @@ async fn test_debug_panel_item_thread_status_reset_on_failure(
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(
|
||||
dap::test_config(
|
||||
DebugRequestType::Launch,
|
||||
None,
|
||||
Some(dap::Capabilities {
|
||||
supports_step_back: Some(true),
|
||||
..Default::default()
|
||||
}),
|
||||
),
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Launch(LaunchConfig::default()),
|
||||
Some(dap::Capabilities {
|
||||
supports_step_back: Some(true),
|
||||
..Default::default()
|
||||
}),
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -1122,7 +1156,12 @@ async fn test_send_breakpoints_when_editor_has_been_saved(
|
||||
.unwrap();
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(dap::test_config(DebugRequestType::Launch, None, None), cx)
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Launch(LaunchConfig::default()),
|
||||
None,
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let session = task.await.unwrap();
|
||||
@@ -1347,7 +1386,12 @@ async fn test_unsetting_breakpoints_on_clear_breakpoint_action(
|
||||
});
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(dap::test_config(DebugRequestType::Launch, None, None), cx)
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Launch(LaunchConfig::default()),
|
||||
None,
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
let session = task.await.unwrap();
|
||||
@@ -1419,8 +1463,10 @@ async fn test_debug_session_is_shutdown_when_attach_and_launch_request_fails(
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(
|
||||
dap::test_config(DebugRequestType::Launch, Some(true), None),
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Launch(LaunchConfig::default()),
|
||||
None,
|
||||
true,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
@@ -5,7 +5,7 @@ use crate::{
|
||||
};
|
||||
use dap::{
|
||||
requests::{Modules, StackTrace, Threads},
|
||||
DebugRequestType, StoppedEvent,
|
||||
StoppedEvent,
|
||||
};
|
||||
use gpui::{BackgroundExecutor, TestAppContext, VisualTestContext};
|
||||
use project::{FakeFs, Project};
|
||||
@@ -13,6 +13,7 @@ use std::sync::{
|
||||
atomic::{AtomicBool, AtomicI32, Ordering},
|
||||
Arc,
|
||||
};
|
||||
use task::LaunchConfig;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_module_list(executor: BackgroundExecutor, cx: &mut TestAppContext) {
|
||||
@@ -30,15 +31,13 @@ async fn test_module_list(executor: BackgroundExecutor, cx: &mut TestAppContext)
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(
|
||||
dap::test_config(
|
||||
DebugRequestType::Launch,
|
||||
None,
|
||||
Some(dap::Capabilities {
|
||||
supports_modules_request: Some(true),
|
||||
..Default::default()
|
||||
}),
|
||||
),
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Launch(LaunchConfig::default()),
|
||||
Some(dap::Capabilities {
|
||||
supports_modules_request: Some(true),
|
||||
..Default::default()
|
||||
}),
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
@@ -12,6 +12,7 @@ use gpui::{BackgroundExecutor, TestAppContext, VisualTestContext};
|
||||
use project::{FakeFs, Project};
|
||||
use serde_json::json;
|
||||
use std::sync::Arc;
|
||||
use task::LaunchConfig;
|
||||
use unindent::Unindent as _;
|
||||
use util::path;
|
||||
|
||||
@@ -52,8 +53,10 @@ async fn test_fetch_initial_stack_frames_and_go_to_stack_frame(
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(
|
||||
dap::test_config(dap::DebugRequestType::Launch, None, None),
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Launch(LaunchConfig::default()),
|
||||
None,
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -240,8 +243,10 @@ async fn test_select_stack_frame(executor: BackgroundExecutor, cx: &mut TestAppC
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(
|
||||
dap::test_config(dap::DebugRequestType::Launch, None, None),
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Launch(LaunchConfig::default()),
|
||||
None,
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -513,8 +518,10 @@ async fn test_collapsed_entries(executor: BackgroundExecutor, cx: &mut TestAppCo
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(
|
||||
dap::test_config(dap::DebugRequestType::Launch, None, None),
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Launch(LaunchConfig::default()),
|
||||
None,
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
@@ -17,6 +17,7 @@ use gpui::{BackgroundExecutor, TestAppContext, VisualTestContext};
|
||||
use menu::{SelectFirst, SelectNext, SelectPrevious};
|
||||
use project::{FakeFs, Project};
|
||||
use serde_json::json;
|
||||
use task::LaunchConfig;
|
||||
use unindent::Unindent as _;
|
||||
use util::path;
|
||||
|
||||
@@ -56,8 +57,10 @@ async fn test_basic_fetch_initial_scope_and_variables(
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(
|
||||
dap::test_config(dap::DebugRequestType::Launch, None, None),
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Launch(LaunchConfig::default()),
|
||||
None,
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -283,8 +286,10 @@ async fn test_fetch_variables_for_multiple_scopes(
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(
|
||||
dap::test_config(dap::DebugRequestType::Launch, None, None),
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Launch(LaunchConfig::default()),
|
||||
None,
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -562,8 +567,10 @@ async fn test_keyboard_navigation(executor: BackgroundExecutor, cx: &mut TestApp
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(
|
||||
dap::test_config(dap::DebugRequestType::Launch, None, None),
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Launch(LaunchConfig::default()),
|
||||
None,
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -1362,8 +1369,10 @@ async fn test_variable_list_only_sends_requests_when_rendering(
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(
|
||||
dap::test_config(dap::DebugRequestType::Launch, None, None),
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Launch(LaunchConfig::default()),
|
||||
None,
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
@@ -1639,8 +1648,10 @@ async fn test_it_fetches_scopes_variables_when_you_select_a_stack_frame(
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let task = project.update(cx, |project, cx| {
|
||||
project.start_debug_session(
|
||||
dap::test_config(dap::DebugRequestType::Launch, None, None),
|
||||
project.fake_debug_session(
|
||||
dap::DebugRequestType::Launch(LaunchConfig::default()),
|
||||
None,
|
||||
false,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
@@ -20,8 +20,8 @@ use crate::hover_popover::hover_markdown_style;
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct CommitDetails {
|
||||
pub sha: SharedString,
|
||||
pub committer_name: SharedString,
|
||||
pub committer_email: SharedString,
|
||||
pub author_name: SharedString,
|
||||
pub author_email: SharedString,
|
||||
pub commit_time: OffsetDateTime,
|
||||
pub message: Option<ParsedCommitMessage>,
|
||||
}
|
||||
@@ -133,16 +133,12 @@ impl CommitTooltip {
|
||||
CommitDetails {
|
||||
sha: blame.sha.to_string().into(),
|
||||
commit_time,
|
||||
committer_name: blame
|
||||
.committer_name
|
||||
author_name: blame
|
||||
.author
|
||||
.clone()
|
||||
.unwrap_or("<no name>".to_string())
|
||||
.into(),
|
||||
committer_email: blame
|
||||
.committer_email
|
||||
.clone()
|
||||
.unwrap_or("".to_string())
|
||||
.into(),
|
||||
author_email: blame.author_mail.clone().unwrap_or("".to_string()).into(),
|
||||
message: details,
|
||||
},
|
||||
window,
|
||||
@@ -180,9 +176,9 @@ impl Render for CommitTooltip {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let avatar = CommitAvatar::new(&self.commit).render(window, cx);
|
||||
|
||||
let author = self.commit.committer_name.clone();
|
||||
let author = self.commit.author_name.clone();
|
||||
|
||||
let author_email = self.commit.committer_email.clone();
|
||||
let author_email = self.commit.author_email.clone();
|
||||
|
||||
let short_commit_id = self
|
||||
.commit
|
||||
|
||||
@@ -772,11 +772,11 @@ pub struct Editor {
|
||||
expect_bounds_change: Option<Bounds<Pixels>>,
|
||||
tasks: BTreeMap<(BufferId, BufferRow), RunnableTasks>,
|
||||
tasks_update_task: Option<Task<()>>,
|
||||
pub breakpoint_store: Option<Entity<BreakpointStore>>,
|
||||
breakpoint_store: Option<Entity<BreakpointStore>>,
|
||||
/// Allow's a user to create a breakpoint by selecting this indicator
|
||||
/// It should be None while a user is not hovering over the gutter
|
||||
/// Otherwise it represents the point that the breakpoint will be shown
|
||||
pub gutter_breakpoint_indicator: Option<DisplayPoint>,
|
||||
gutter_breakpoint_indicator: (Option<(DisplayPoint, bool)>, Option<Task<()>>),
|
||||
in_project_search: bool,
|
||||
previous_search_ranges: Option<Arc<[Range<Anchor>]>>,
|
||||
breadcrumb_header: Option<String>,
|
||||
@@ -1409,14 +1409,6 @@ impl Editor {
|
||||
code_action_providers.push(Rc::new(project) as Rc<_>);
|
||||
}
|
||||
|
||||
let hide_mouse_while_typing = if !matches!(mode, EditorMode::SingleLine { .. }) {
|
||||
EditorSettings::get_global(cx)
|
||||
.hide_mouse_while_typing
|
||||
.unwrap_or(true)
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
let mut this = Self {
|
||||
focus_handle,
|
||||
show_cursor_when_unfocused: false,
|
||||
@@ -1543,7 +1535,7 @@ impl Editor {
|
||||
tasks: Default::default(),
|
||||
|
||||
breakpoint_store,
|
||||
gutter_breakpoint_indicator: None,
|
||||
gutter_breakpoint_indicator: (None, None),
|
||||
_subscriptions: vec![
|
||||
cx.observe(&buffer, Self::on_buffer_changed),
|
||||
cx.subscribe_in(&buffer, window, Self::on_buffer_event),
|
||||
@@ -1579,7 +1571,9 @@ impl Editor {
|
||||
text_style_refinement: None,
|
||||
load_diff_task: load_uncommitted_diff,
|
||||
mouse_cursor_hidden: false,
|
||||
hide_mouse_while_typing,
|
||||
hide_mouse_while_typing: EditorSettings::get_global(cx)
|
||||
.hide_mouse_while_typing
|
||||
.unwrap_or(true),
|
||||
};
|
||||
if let Some(breakpoints) = this.breakpoint_store.as_ref() {
|
||||
this._subscriptions
|
||||
@@ -6134,35 +6128,6 @@ impl Editor {
|
||||
return breakpoint_display_points;
|
||||
};
|
||||
|
||||
if let Some(buffer) = self.buffer.read(cx).as_singleton() {
|
||||
let buffer_snapshot = buffer.read(cx).snapshot();
|
||||
|
||||
for breakpoint in
|
||||
breakpoint_store
|
||||
.read(cx)
|
||||
.breakpoints(&buffer, None, &buffer_snapshot, cx)
|
||||
{
|
||||
let point = buffer_snapshot.summary_for_anchor::<Point>(&breakpoint.0);
|
||||
let mut anchor = multi_buffer_snapshot.anchor_before(point);
|
||||
anchor.text_anchor = breakpoint.0;
|
||||
|
||||
breakpoint_display_points.insert(
|
||||
snapshot
|
||||
.point_to_display_point(
|
||||
MultiBufferPoint {
|
||||
row: point.row,
|
||||
column: point.column,
|
||||
},
|
||||
Bias::Left,
|
||||
)
|
||||
.row(),
|
||||
(anchor, breakpoint.1.clone()),
|
||||
);
|
||||
}
|
||||
|
||||
return breakpoint_display_points;
|
||||
}
|
||||
|
||||
let range = snapshot.display_point_to_point(DisplayPoint::new(range.start, 0), Bias::Left)
|
||||
..snapshot.display_point_to_point(DisplayPoint::new(range.end, 0), Bias::Right);
|
||||
|
||||
@@ -6302,7 +6267,8 @@ impl Editor {
|
||||
let (color, icon) = {
|
||||
let color = if self
|
||||
.gutter_breakpoint_indicator
|
||||
.is_some_and(|point| point.row() == row)
|
||||
.0
|
||||
.is_some_and(|(point, is_visible)| is_visible && point.row() == row)
|
||||
{
|
||||
Color::Hint
|
||||
} else if breakpoint.is_disabled() {
|
||||
@@ -16701,11 +16667,7 @@ impl Editor {
|
||||
self.scroll_manager.vertical_scroll_margin = editor_settings.vertical_scroll_margin;
|
||||
self.show_breadcrumbs = editor_settings.toolbar.breadcrumbs;
|
||||
self.cursor_shape = editor_settings.cursor_shape.unwrap_or_default();
|
||||
self.hide_mouse_while_typing = if !matches!(self.mode, EditorMode::SingleLine { .. }) {
|
||||
editor_settings.hide_mouse_while_typing.unwrap_or(true)
|
||||
} else {
|
||||
false
|
||||
};
|
||||
self.hide_mouse_while_typing = editor_settings.hide_mouse_while_typing.unwrap_or(true);
|
||||
|
||||
if !self.hide_mouse_while_typing {
|
||||
self.mouse_cursor_hidden = false;
|
||||
|
||||
@@ -72,6 +72,7 @@ use std::{
|
||||
ops::{Deref, Range},
|
||||
rc::Rc,
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
};
|
||||
use sum_tree::Bias;
|
||||
use text::BufferId;
|
||||
@@ -893,7 +894,6 @@ impl EditorElement {
|
||||
let modifiers = event.modifiers;
|
||||
let gutter_hovered = gutter_hitbox.is_hovered(window);
|
||||
editor.set_gutter_hovered(gutter_hovered, cx);
|
||||
editor.gutter_breakpoint_indicator = None;
|
||||
editor.mouse_cursor_hidden = false;
|
||||
|
||||
if gutter_hovered {
|
||||
@@ -910,8 +910,38 @@ impl EditorElement {
|
||||
.buffer_for_excerpt(buffer_anchor.excerpt_id)
|
||||
.is_some_and(|buffer| buffer.file().is_some())
|
||||
{
|
||||
editor.gutter_breakpoint_indicator = Some(new_point);
|
||||
let was_hovered = editor.gutter_breakpoint_indicator.0.is_some();
|
||||
let is_visible = editor
|
||||
.gutter_breakpoint_indicator
|
||||
.0
|
||||
.map_or(false, |(_, is_active)| is_active);
|
||||
editor.gutter_breakpoint_indicator.0 = Some((new_point, is_visible));
|
||||
|
||||
editor.gutter_breakpoint_indicator.1.get_or_insert_with(|| {
|
||||
cx.spawn(async move |this, cx| {
|
||||
if !was_hovered {
|
||||
cx.background_executor()
|
||||
.timer(Duration::from_millis(200))
|
||||
.await;
|
||||
}
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
if let Some((_, is_active)) =
|
||||
this.gutter_breakpoint_indicator.0.as_mut()
|
||||
{
|
||||
*is_active = true;
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
});
|
||||
} else {
|
||||
editor.gutter_breakpoint_indicator = (None, None);
|
||||
}
|
||||
} else {
|
||||
editor.gutter_breakpoint_indicator = (None, None);
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
@@ -1338,7 +1368,8 @@ impl EditorElement {
|
||||
fn layout_scrollbars(
|
||||
&self,
|
||||
snapshot: &EditorSnapshot,
|
||||
scrollbar_range_data: ScrollbarLayoutInformation,
|
||||
scrollbar_layout_information: ScrollbarLayoutInformation,
|
||||
content_offset: gpui::Point<Pixels>,
|
||||
scroll_position: gpui::Point<f32>,
|
||||
non_visible_cursors: bool,
|
||||
window: &mut Window,
|
||||
@@ -1390,7 +1421,8 @@ impl EditorElement {
|
||||
|
||||
Some(EditorScrollbars::from_scrollbar_axes(
|
||||
scrollbar_settings.axes,
|
||||
&scrollbar_range_data,
|
||||
&scrollbar_layout_information,
|
||||
content_offset,
|
||||
scroll_position,
|
||||
self.style.scrollbar_width,
|
||||
show_scrollbars,
|
||||
@@ -1955,7 +1987,12 @@ impl EditorElement {
|
||||
.filter_map(|(display_row, (text_anchor, bp))| {
|
||||
if row_infos
|
||||
.get((display_row.0.saturating_sub(range.start.0)) as usize)
|
||||
.is_some_and(|row_info| row_info.expand_info.is_some())
|
||||
.is_some_and(|row_info| {
|
||||
row_info.expand_info.is_some()
|
||||
|| row_info
|
||||
.diff_status
|
||||
.is_some_and(|status| status.is_deleted())
|
||||
})
|
||||
{
|
||||
return None;
|
||||
}
|
||||
@@ -6608,13 +6645,13 @@ impl Element for EditorElement {
|
||||
|
||||
// Offset the content_bounds from the text_bounds by the gutter margin (which
|
||||
// is roughly half a character wide) to make hit testing work more like how we want.
|
||||
let content_origin =
|
||||
text_hitbox.origin + point(gutter_dimensions.margin, Pixels::ZERO);
|
||||
let content_offset = point(gutter_dimensions.margin, Pixels::ZERO);
|
||||
let content_origin = text_hitbox.origin + content_offset;
|
||||
|
||||
let scrollbar_bounds =
|
||||
let editor_text_bounds =
|
||||
Bounds::from_corners(content_origin, bounds.bottom_right());
|
||||
|
||||
let height_in_lines = scrollbar_bounds.size.height / line_height;
|
||||
let height_in_lines = editor_text_bounds.size.height / line_height;
|
||||
|
||||
let max_row = snapshot.max_point().row().as_f32();
|
||||
|
||||
@@ -6829,8 +6866,10 @@ impl Element for EditorElement {
|
||||
// has their mouse over that line when a breakpoint isn't there
|
||||
if cx.has_flag::<Debugger>() {
|
||||
let gutter_breakpoint_indicator =
|
||||
self.editor.read(cx).gutter_breakpoint_indicator;
|
||||
if let Some(gutter_breakpoint_point) = gutter_breakpoint_indicator {
|
||||
self.editor.read(cx).gutter_breakpoint_indicator.0;
|
||||
if let Some((gutter_breakpoint_point, _)) =
|
||||
gutter_breakpoint_indicator.filter(|(_, is_active)| *is_active)
|
||||
{
|
||||
breakpoint_rows
|
||||
.entry(gutter_breakpoint_point.row())
|
||||
.or_insert_with(|| {
|
||||
@@ -6942,7 +6981,7 @@ impl Element for EditorElement {
|
||||
.width;
|
||||
|
||||
let scrollbar_layout_information = ScrollbarLayoutInformation::new(
|
||||
scrollbar_bounds,
|
||||
text_hitbox.bounds,
|
||||
glyph_grid_cell,
|
||||
size(longest_line_width, max_row.as_f32() * line_height),
|
||||
longest_line_blame_width,
|
||||
@@ -7014,7 +7053,7 @@ impl Element for EditorElement {
|
||||
MultiBufferRow(end_anchor.to_point(&snapshot.buffer_snapshot).row);
|
||||
|
||||
let scroll_max = point(
|
||||
((scroll_width - scrollbar_bounds.size.width) / em_width).max(0.0),
|
||||
((scroll_width - editor_text_bounds.size.width) / em_width).max(0.0),
|
||||
max_scroll_top,
|
||||
);
|
||||
|
||||
@@ -7220,6 +7259,7 @@ impl Element for EditorElement {
|
||||
let scrollbars_layout = self.layout_scrollbars(
|
||||
&snapshot,
|
||||
scrollbar_layout_information,
|
||||
content_offset,
|
||||
scroll_position,
|
||||
non_visible_cursors,
|
||||
window,
|
||||
@@ -7589,8 +7629,8 @@ pub(super) fn gutter_bounds(
|
||||
|
||||
/// Holds information required for layouting the editor scrollbars.
|
||||
struct ScrollbarLayoutInformation {
|
||||
/// The bounds of the editor text area.
|
||||
editor_text_bounds: Bounds<Pixels>,
|
||||
/// The bounds of the editor area (excluding the content offset).
|
||||
editor_bounds: Bounds<Pixels>,
|
||||
/// The available range to scroll within the document.
|
||||
scroll_range: Size<Pixels>,
|
||||
/// The space available for one glyph in the editor.
|
||||
@@ -7599,7 +7639,7 @@ struct ScrollbarLayoutInformation {
|
||||
|
||||
impl ScrollbarLayoutInformation {
|
||||
pub fn new(
|
||||
scrollbar_bounds: Bounds<Pixels>,
|
||||
editor_bounds: Bounds<Pixels>,
|
||||
glyph_grid_cell: Size<Pixels>,
|
||||
document_size: Size<Pixels>,
|
||||
longest_line_blame_width: Pixels,
|
||||
@@ -7608,7 +7648,7 @@ impl ScrollbarLayoutInformation {
|
||||
settings: &EditorSettings,
|
||||
) -> Self {
|
||||
let vertical_overscroll = match settings.scroll_beyond_last_line {
|
||||
ScrollBeyondLastLine::OnePage => scrollbar_bounds.size.height,
|
||||
ScrollBeyondLastLine::OnePage => editor_bounds.size.height,
|
||||
ScrollBeyondLastLine::Off => glyph_grid_cell.height,
|
||||
ScrollBeyondLastLine::VerticalScrollMargin => {
|
||||
(1.0 + settings.vertical_scroll_margin) * glyph_grid_cell.height
|
||||
@@ -7626,7 +7666,7 @@ impl ScrollbarLayoutInformation {
|
||||
let scroll_range = document_size + overscroll;
|
||||
|
||||
ScrollbarLayoutInformation {
|
||||
editor_text_bounds: scrollbar_bounds,
|
||||
editor_bounds,
|
||||
scroll_range,
|
||||
glyph_grid_cell,
|
||||
}
|
||||
@@ -7732,13 +7772,14 @@ impl EditorScrollbars {
|
||||
pub fn from_scrollbar_axes(
|
||||
settings_visibility: ScrollbarAxes,
|
||||
layout_information: &ScrollbarLayoutInformation,
|
||||
content_offset: gpui::Point<Pixels>,
|
||||
scroll_position: gpui::Point<f32>,
|
||||
scrollbar_width: Pixels,
|
||||
show_scrollbars: bool,
|
||||
window: &mut Window,
|
||||
) -> Self {
|
||||
let ScrollbarLayoutInformation {
|
||||
editor_text_bounds,
|
||||
editor_bounds,
|
||||
scroll_range,
|
||||
glyph_grid_cell,
|
||||
} = layout_information;
|
||||
@@ -7746,20 +7787,20 @@ impl EditorScrollbars {
|
||||
let scrollbar_bounds_for = |axis: ScrollbarAxis| match axis {
|
||||
ScrollbarAxis::Horizontal => Bounds::from_corner_and_size(
|
||||
Corner::BottomLeft,
|
||||
editor_text_bounds.bottom_left(),
|
||||
editor_bounds.bottom_left(),
|
||||
size(
|
||||
if settings_visibility.vertical {
|
||||
editor_text_bounds.size.width - scrollbar_width
|
||||
editor_bounds.size.width - scrollbar_width
|
||||
} else {
|
||||
editor_text_bounds.size.width
|
||||
editor_bounds.size.width
|
||||
},
|
||||
scrollbar_width,
|
||||
),
|
||||
),
|
||||
ScrollbarAxis::Vertical => Bounds::from_corner_and_size(
|
||||
Corner::TopRight,
|
||||
editor_text_bounds.top_right(),
|
||||
size(scrollbar_width, editor_text_bounds.size.height),
|
||||
editor_bounds.top_right(),
|
||||
size(scrollbar_width, editor_bounds.size.height),
|
||||
),
|
||||
};
|
||||
|
||||
@@ -7768,23 +7809,24 @@ impl EditorScrollbars {
|
||||
.along(axis)
|
||||
.then(|| {
|
||||
(
|
||||
editor_text_bounds.size.along(axis),
|
||||
editor_bounds.size.along(axis) - content_offset.along(axis),
|
||||
scroll_range.along(axis),
|
||||
)
|
||||
})
|
||||
.filter(|(editor_size, scroll_range)| {
|
||||
.filter(|(editor_content_size, scroll_range)| {
|
||||
// The scrollbar should only be rendered if the content does
|
||||
// not entirely fit into the editor
|
||||
// However, this only applies to the horizontal scrollbar, as information about the
|
||||
// vertical scrollbar layout is always needed for scrollbar diagnostics.
|
||||
axis != ScrollbarAxis::Horizontal || editor_size < scroll_range
|
||||
axis != ScrollbarAxis::Horizontal || editor_content_size < scroll_range
|
||||
})
|
||||
.map(|(editor_size, scroll_range)| {
|
||||
.map(|(editor_content_size, scroll_range)| {
|
||||
ScrollbarLayout::new(
|
||||
window.insert_hitbox(scrollbar_bounds_for(axis), false),
|
||||
editor_size,
|
||||
editor_content_size,
|
||||
scroll_range,
|
||||
glyph_grid_cell.along(axis),
|
||||
content_offset.along(axis),
|
||||
scroll_position.along(axis),
|
||||
axis,
|
||||
)
|
||||
@@ -7819,6 +7861,7 @@ struct ScrollbarLayout {
|
||||
hitbox: Hitbox,
|
||||
visible_range: Range<f32>,
|
||||
text_unit_size: Pixels,
|
||||
content_offset: Pixels,
|
||||
thumb_size: Pixels,
|
||||
axis: ScrollbarAxis,
|
||||
}
|
||||
@@ -7831,30 +7874,34 @@ impl ScrollbarLayout {
|
||||
|
||||
fn new(
|
||||
scrollbar_track_hitbox: Hitbox,
|
||||
editor_size: Pixels,
|
||||
editor_content_size: Pixels,
|
||||
scroll_range: Pixels,
|
||||
glyph_space: Pixels,
|
||||
content_offset: Pixels,
|
||||
scroll_position: f32,
|
||||
axis: ScrollbarAxis,
|
||||
) -> Self {
|
||||
let scrollbar_track_bounds = scrollbar_track_hitbox.bounds;
|
||||
let scrollbar_track_length = scrollbar_track_bounds.size.along(axis);
|
||||
let track_bounds = scrollbar_track_hitbox.bounds;
|
||||
// The length of the track available to the scrollbar thumb. We deliberately
|
||||
// exclude the content size here so that the thumb aligns with the content.
|
||||
let track_length = track_bounds.size.along(axis) - content_offset;
|
||||
|
||||
let text_units_per_page = editor_size / glyph_space;
|
||||
let text_units_per_page = editor_content_size / glyph_space;
|
||||
let visible_range = scroll_position..scroll_position + text_units_per_page;
|
||||
let total_text_units = scroll_range / glyph_space;
|
||||
|
||||
let thumb_percentage = text_units_per_page / total_text_units;
|
||||
let thumb_size = (scrollbar_track_length * thumb_percentage)
|
||||
let thumb_size = (track_length * thumb_percentage)
|
||||
.max(ScrollbarLayout::MIN_THUMB_SIZE)
|
||||
.min(scrollbar_track_length);
|
||||
let text_unit_size = (scrollbar_track_length - thumb_size)
|
||||
/ (total_text_units - text_units_per_page).max(0.);
|
||||
.min(track_length);
|
||||
let text_unit_size =
|
||||
(track_length - thumb_size) / (total_text_units - text_units_per_page).max(0.);
|
||||
|
||||
ScrollbarLayout {
|
||||
hitbox: scrollbar_track_hitbox,
|
||||
visible_range,
|
||||
text_unit_size,
|
||||
content_offset,
|
||||
thumb_size,
|
||||
axis,
|
||||
}
|
||||
@@ -7873,7 +7920,7 @@ impl ScrollbarLayout {
|
||||
}
|
||||
|
||||
fn thumb_origin(&self, origin: Pixels) -> Pixels {
|
||||
origin + self.visible_range.start * self.text_unit_size
|
||||
origin + self.content_offset + self.visible_range.start * self.text_unit_size
|
||||
}
|
||||
|
||||
fn marker_quads_for_ranges(
|
||||
|
||||
@@ -150,7 +150,7 @@ impl GitBlame {
|
||||
this.generate(cx);
|
||||
}
|
||||
}
|
||||
project::Event::WorktreeUpdatedGitRepositories(_) => {
|
||||
project::Event::GitStateUpdated => {
|
||||
log::debug!("Status of git repositories updated. Regenerating blame data...",);
|
||||
this.generate(cx);
|
||||
}
|
||||
|
||||
@@ -240,7 +240,7 @@ impl EditorTestContext {
|
||||
// unlike cx.simulate_keystrokes(), this does not run_until_parked
|
||||
// so you can use it to test detailed timing
|
||||
pub fn simulate_keystroke(&mut self, keystroke_text: &str) {
|
||||
let keystroke = Keystroke::parse(keystroke_text).unwrap();
|
||||
let keystroke = Keystroke::parse_case_insensitive(keystroke_text).unwrap();
|
||||
self.cx.dispatch_keystroke(self.window, keystroke);
|
||||
}
|
||||
|
||||
@@ -339,7 +339,8 @@ impl EditorTestContext {
|
||||
let mut found = None;
|
||||
fs.with_git_state(&Self::root_path().join(".git"), false, |git_state| {
|
||||
found = git_state.index_contents.get(path.as_ref()).cloned();
|
||||
});
|
||||
})
|
||||
.unwrap();
|
||||
assert_eq!(expected, found.as_deref());
|
||||
}
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@ clap.workspace = true
|
||||
client.workspace = true
|
||||
clock.workspace = true
|
||||
collections.workspace = true
|
||||
dap.workspace = true
|
||||
env_logger.workspace = true
|
||||
feature_flags.workspace = true
|
||||
fs.workspace = true
|
||||
|
||||
@@ -4,6 +4,7 @@ use clap::Parser;
|
||||
use client::{Client, UserStore};
|
||||
use clock::RealSystemClock;
|
||||
use collections::BTreeMap;
|
||||
use dap::DapRegistry;
|
||||
use feature_flags::FeatureFlagAppExt as _;
|
||||
use gpui::{AppContext as _, AsyncApp, BackgroundExecutor, Entity};
|
||||
use http_client::{HttpClient, Method};
|
||||
@@ -273,7 +274,7 @@ async fn run_evaluation(
|
||||
let repos_dir = Path::new(EVAL_REPOS_DIR);
|
||||
let db_path = Path::new(EVAL_DB_PATH);
|
||||
let api_key = std::env::var("OPENAI_API_KEY").unwrap();
|
||||
let fs = Arc::new(RealFs::new(None)) as Arc<dyn Fs>;
|
||||
let fs = Arc::new(RealFs::new(None, cx.background_executor().clone())) as Arc<dyn Fs>;
|
||||
let clock = Arc::new(RealSystemClock);
|
||||
let client = cx
|
||||
.update(|cx| {
|
||||
@@ -302,6 +303,7 @@ async fn run_evaluation(
|
||||
));
|
||||
|
||||
let language_registry = Arc::new(LanguageRegistry::new(executor.clone()));
|
||||
let debug_adapters = Arc::new(DapRegistry::default());
|
||||
cx.update(|cx| languages::init(language_registry.clone(), node_runtime.clone(), cx))
|
||||
.unwrap();
|
||||
|
||||
@@ -346,6 +348,7 @@ async fn run_evaluation(
|
||||
node_runtime.clone(),
|
||||
user_store.clone(),
|
||||
language_registry.clone(),
|
||||
debug_adapters.clone(),
|
||||
fs.clone(),
|
||||
None,
|
||||
cx,
|
||||
|
||||
@@ -18,6 +18,7 @@ clap = { workspace = true, features = ["derive"] }
|
||||
env_logger.workspace = true
|
||||
extension.workspace = true
|
||||
fs.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
reqwest_client.workspace = true
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user