Compare commits
57 Commits
anchor-enu
...
ureq
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
bab228fc10 | ||
|
|
21a023980d | ||
|
|
ae6a3d15af | ||
|
|
dc7c49bd0b | ||
|
|
1eddd2f38d | ||
|
|
dc48af0ca1 | ||
|
|
1f54fde4d2 | ||
|
|
19162c3160 | ||
|
|
9300dbc834 | ||
|
|
bbf5ed2ba1 | ||
|
|
500c3c54a6 | ||
|
|
59dc3985a1 | ||
|
|
ccc871c44c | ||
|
|
4e2ae06ca6 | ||
|
|
300bf87f77 | ||
|
|
a6cb17fb51 | ||
|
|
9d197ddc99 | ||
|
|
623a6eca75 | ||
|
|
7bb510971a | ||
|
|
eb71d2f1a8 | ||
|
|
fc9db97ac7 | ||
|
|
e9bc9ed5d5 | ||
|
|
9a8601227d | ||
|
|
d33600525e | ||
|
|
fdb03d3058 | ||
|
|
c4e0f5e0ee | ||
|
|
da1ef13442 | ||
|
|
5045f984a9 | ||
|
|
2d71c36ad3 | ||
|
|
d2ffad0f34 | ||
|
|
692590bff4 | ||
|
|
87ac4cff60 | ||
|
|
9606858436 | ||
|
|
f39e54decc | ||
|
|
8a7ef4db59 | ||
|
|
fd07fef4db | ||
|
|
4a4d8c1cab | ||
|
|
b69c6ee7df | ||
|
|
0e86ba0983 | ||
|
|
5e62bbfd29 | ||
|
|
21be70f278 | ||
|
|
2470db4901 | ||
|
|
e87d6da2a6 | ||
|
|
437bcc0ce6 | ||
|
|
3a2f0653d1 | ||
|
|
336b4a5690 | ||
|
|
93a4295f66 | ||
|
|
f019ad563f | ||
|
|
399e094f02 | ||
|
|
dbc325ea12 | ||
|
|
6b56530a4a | ||
|
|
20c06545b6 | ||
|
|
d989183f94 | ||
|
|
3ba071b993 | ||
|
|
e4080ef565 | ||
|
|
e95e1c9ae5 | ||
|
|
1ff10b71c8 |
14
.github/workflows/close_stale_issues.yml
vendored
14
.github/workflows/close_stale_issues.yml
vendored
@@ -1,27 +1,31 @@
|
||||
name: "Close Stale Issues"
|
||||
on:
|
||||
schedule:
|
||||
- cron: "0 1 * * *"
|
||||
- cron: "0 11 * * 2"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v9
|
||||
- uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
stale-issue-message: >
|
||||
Hi there! 👋
|
||||
|
||||
We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. Are you able to reproduce this issue in the latest version of Zed? If so, please let us know by commenting on this issue and we will keep it open; otherwise, we'll close it in a week. Feel free to open a new issue if you're seeing this message after the issue has been closed.
|
||||
We're working to clean up our issue tracker by closing older issues that might not be relevant anymore. Are you able to reproduce this issue in the latest version of Zed? If so, please let us know by commenting on this issue and we will keep it open; otherwise, we'll close it in 10 days. Feel free to open a new issue if you're seeing this message after the issue has been closed.
|
||||
|
||||
Thanks for your help!
|
||||
close-issue-message: "This issue was closed due to inactivity; feel free to open a new issue if you're still experiencing this problem!"
|
||||
# We will increase `days-before-stale` to 365 on or after Jan 24th,
|
||||
# 2024. This date marks one year since migrating issues from
|
||||
# 'community' to 'zed' repository. The migration added activity to all
|
||||
# issues, preventing 365 days from working until then.
|
||||
days-before-stale: 180
|
||||
days-before-close: 7
|
||||
days-before-close: 10
|
||||
any-of-issue-labels: "defect,panic / crash"
|
||||
operations-per-run: 100
|
||||
operations-per-run: 1000
|
||||
ascending: true
|
||||
enable-statistics: true
|
||||
stale-issue-label: "stale"
|
||||
|
||||
267
Cargo.lock
generated
267
Cargo.lock
generated
@@ -847,8 +847,8 @@ dependencies = [
|
||||
"chrono",
|
||||
"futures-util",
|
||||
"http-types",
|
||||
"hyper",
|
||||
"hyper-rustls",
|
||||
"hyper 0.14.30",
|
||||
"hyper-rustls 0.24.2",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_path_to_error",
|
||||
@@ -1085,6 +1085,33 @@ dependencies = [
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aws-lc-rs"
|
||||
version = "1.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2f95446d919226d587817a7d21379e6eb099b97b45110a7f272a444ca5c54070"
|
||||
dependencies = [
|
||||
"aws-lc-sys",
|
||||
"mirai-annotations",
|
||||
"paste",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aws-lc-sys"
|
||||
version = "0.21.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b3ddc4a5b231dd6958b140ff3151b6412b3f4321fab354f399eec8f14b06df62"
|
||||
dependencies = [
|
||||
"bindgen 0.69.4",
|
||||
"cc",
|
||||
"cmake",
|
||||
"dunce",
|
||||
"fs_extra",
|
||||
"libc",
|
||||
"paste",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "aws-runtime"
|
||||
version = "1.4.2"
|
||||
@@ -1337,13 +1364,13 @@ dependencies = [
|
||||
"aws-smithy-types",
|
||||
"bytes 1.7.1",
|
||||
"fastrand 2.1.1",
|
||||
"h2",
|
||||
"h2 0.3.26",
|
||||
"http 0.2.12",
|
||||
"http-body 0.4.6",
|
||||
"http-body 1.0.1",
|
||||
"httparse",
|
||||
"hyper",
|
||||
"hyper-rustls",
|
||||
"hyper 0.14.30",
|
||||
"hyper-rustls 0.24.2",
|
||||
"once_cell",
|
||||
"pin-project-lite",
|
||||
"pin-utils",
|
||||
@@ -1433,7 +1460,7 @@ dependencies = [
|
||||
"headers",
|
||||
"http 0.2.12",
|
||||
"http-body 0.4.6",
|
||||
"hyper",
|
||||
"hyper 0.14.30",
|
||||
"itoa",
|
||||
"matchit",
|
||||
"memchr",
|
||||
@@ -1582,12 +1609,15 @@ dependencies = [
|
||||
"itertools 0.12.1",
|
||||
"lazy_static",
|
||||
"lazycell",
|
||||
"log",
|
||||
"prettyplease",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"regex",
|
||||
"rustc-hash",
|
||||
"shlex",
|
||||
"syn 2.0.76",
|
||||
"which 4.4.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -2364,7 +2394,7 @@ dependencies = [
|
||||
"clickhouse-derive",
|
||||
"clickhouse-rs-cityhash-sys",
|
||||
"futures 0.3.30",
|
||||
"hyper",
|
||||
"hyper 0.14.30",
|
||||
"hyper-tls",
|
||||
"lz4",
|
||||
"sealed",
|
||||
@@ -2409,7 +2439,6 @@ dependencies = [
|
||||
"cocoa 0.26.0",
|
||||
"collections",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"http_client",
|
||||
@@ -2453,6 +2482,15 @@ dependencies = [
|
||||
"smallvec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cmake"
|
||||
version = "0.1.51"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fb1e43aa7fd152b1f968787f7dbcdeb306d1867ff373c69955211876c053f91a"
|
||||
dependencies = [
|
||||
"cc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cobs"
|
||||
version = "0.2.3"
|
||||
@@ -2569,7 +2607,7 @@ dependencies = [
|
||||
"headless",
|
||||
"hex",
|
||||
"http_client",
|
||||
"hyper",
|
||||
"hyper 1.4.1",
|
||||
"indoc",
|
||||
"isahc_http_client",
|
||||
"jsonwebtoken",
|
||||
@@ -3670,6 +3708,12 @@ dependencies = [
|
||||
"phf",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dunce"
|
||||
version = "1.0.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813"
|
||||
|
||||
[[package]]
|
||||
name = "dwrote"
|
||||
version = "0.11.1"
|
||||
@@ -4597,6 +4641,12 @@ dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fs_extra"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c"
|
||||
|
||||
[[package]]
|
||||
name = "fsevent"
|
||||
version = "0.1.0"
|
||||
@@ -5186,6 +5236,25 @@ dependencies = [
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "h2"
|
||||
version = "0.4.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "524e8ac6999421f49a846c2d4411f337e53497d8ec55d67753beffa43c5d9205"
|
||||
dependencies = [
|
||||
"atomic-waker",
|
||||
"bytes 1.7.1",
|
||||
"fnv",
|
||||
"futures-core",
|
||||
"futures-sink",
|
||||
"http 1.1.0",
|
||||
"indexmap 2.4.0",
|
||||
"slab",
|
||||
"tokio",
|
||||
"tokio-util",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "half"
|
||||
version = "2.4.1"
|
||||
@@ -5566,7 +5635,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"derive_more",
|
||||
"futures 0.3.30",
|
||||
"http 0.2.12",
|
||||
"http 1.1.0",
|
||||
"log",
|
||||
"serde",
|
||||
"serde_json",
|
||||
@@ -5608,7 +5677,7 @@ dependencies = [
|
||||
"futures-channel",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"h2",
|
||||
"h2 0.3.26",
|
||||
"http 0.2.12",
|
||||
"http-body 0.4.6",
|
||||
"httparse",
|
||||
@@ -5622,6 +5691,26 @@ dependencies = [
|
||||
"want",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hyper"
|
||||
version = "1.4.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "50dfd22e0e76d0f662d429a5f80fcaf3855009297eab6a0a9f8543834744ba05"
|
||||
dependencies = [
|
||||
"bytes 1.7.1",
|
||||
"futures-channel",
|
||||
"futures-util",
|
||||
"h2 0.4.6",
|
||||
"http 1.1.0",
|
||||
"http-body 1.0.1",
|
||||
"httparse",
|
||||
"itoa",
|
||||
"pin-project-lite",
|
||||
"smallvec",
|
||||
"tokio",
|
||||
"want",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hyper-rustls"
|
||||
version = "0.24.2"
|
||||
@@ -5630,12 +5719,31 @@ checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590"
|
||||
dependencies = [
|
||||
"futures-util",
|
||||
"http 0.2.12",
|
||||
"hyper",
|
||||
"hyper 0.14.30",
|
||||
"log",
|
||||
"rustls 0.21.12",
|
||||
"rustls-native-certs 0.6.3",
|
||||
"tokio",
|
||||
"tokio-rustls",
|
||||
"tokio-rustls 0.24.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hyper-rustls"
|
||||
version = "0.27.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333"
|
||||
dependencies = [
|
||||
"futures-util",
|
||||
"http 1.1.0",
|
||||
"hyper 1.4.1",
|
||||
"hyper-util",
|
||||
"log",
|
||||
"rustls 0.23.13",
|
||||
"rustls-native-certs 0.8.0",
|
||||
"rustls-pki-types",
|
||||
"tokio",
|
||||
"tokio-rustls 0.26.0",
|
||||
"tower-service",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5645,12 +5753,51 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905"
|
||||
dependencies = [
|
||||
"bytes 1.7.1",
|
||||
"hyper",
|
||||
"hyper 0.14.30",
|
||||
"native-tls",
|
||||
"tokio",
|
||||
"tokio-native-tls",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hyper-util"
|
||||
version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "41296eb09f183ac68eec06e03cdbea2e759633d4067b2f6552fc2e009bcad08b"
|
||||
dependencies = [
|
||||
"bytes 1.7.1",
|
||||
"futures-channel",
|
||||
"futures-util",
|
||||
"http 1.1.0",
|
||||
"http-body 1.0.1",
|
||||
"hyper 1.4.1",
|
||||
"pin-project-lite",
|
||||
"socket2 0.5.7",
|
||||
"tokio",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "hyper_client"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"derive_more",
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"http_client",
|
||||
"hyper 1.4.1",
|
||||
"hyper-rustls 0.27.3",
|
||||
"hyper-util",
|
||||
"log",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"smol",
|
||||
"ureq",
|
||||
"url",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iana-time-zone"
|
||||
version = "0.1.60"
|
||||
@@ -6479,7 +6626,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4979f22fdb869068da03c9f7528f8297c6fd2606bc3a4affe42e6a823fdb8da4"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"windows-targets 0.48.5",
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -7046,6 +7193,12 @@ dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mirai-annotations"
|
||||
version = "1.12.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c9be0862c1b3f26a88803c4a49de6889c10e608b3ee9344e6ef5b45fb37ad3d1"
|
||||
|
||||
[[package]]
|
||||
name = "multi_buffer"
|
||||
version = "0.1.0"
|
||||
@@ -7190,6 +7343,7 @@ dependencies = [
|
||||
"async-std",
|
||||
"async-tar",
|
||||
"async-trait",
|
||||
"async-watch",
|
||||
"async_zip",
|
||||
"futures 0.3.30",
|
||||
"http_client",
|
||||
@@ -7202,6 +7356,7 @@ dependencies = [
|
||||
"tempfile",
|
||||
"util",
|
||||
"walkdir",
|
||||
"which 6.0.3",
|
||||
"windows 0.58.0",
|
||||
]
|
||||
|
||||
@@ -9200,10 +9355,10 @@ dependencies = [
|
||||
"encoding_rs",
|
||||
"futures-core",
|
||||
"futures-util",
|
||||
"h2",
|
||||
"h2 0.3.26",
|
||||
"http 0.2.12",
|
||||
"http-body 0.4.6",
|
||||
"hyper",
|
||||
"hyper 0.14.30",
|
||||
"hyper-tls",
|
||||
"ipnet",
|
||||
"js-sys",
|
||||
@@ -9600,10 +9755,26 @@ checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e"
|
||||
dependencies = [
|
||||
"log",
|
||||
"ring 0.17.8",
|
||||
"rustls-webpki",
|
||||
"rustls-webpki 0.101.7",
|
||||
"sct",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustls"
|
||||
version = "0.23.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f2dabaac7466917e566adb06783a81ca48944c6898a1b08b9374106dd671f4c8"
|
||||
dependencies = [
|
||||
"aws-lc-rs",
|
||||
"log",
|
||||
"once_cell",
|
||||
"ring 0.17.8",
|
||||
"rustls-pki-types",
|
||||
"rustls-webpki 0.102.8",
|
||||
"subtle",
|
||||
"zeroize",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustls-native-certs"
|
||||
version = "0.6.3"
|
||||
@@ -9664,6 +9835,18 @@ dependencies = [
|
||||
"untrusted 0.9.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustls-webpki"
|
||||
version = "0.102.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9"
|
||||
dependencies = [
|
||||
"aws-lc-rs",
|
||||
"ring 0.17.8",
|
||||
"rustls-pki-types",
|
||||
"untrusted 0.9.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rustversion"
|
||||
version = "1.0.17"
|
||||
@@ -10497,6 +10680,7 @@ dependencies = [
|
||||
"futures 0.3.30",
|
||||
"gpui",
|
||||
"parking_lot",
|
||||
"paths",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"snippet",
|
||||
@@ -11004,6 +11188,7 @@ dependencies = [
|
||||
"text",
|
||||
"theme",
|
||||
"ui",
|
||||
"unicode-segmentation",
|
||||
"util",
|
||||
"windows 0.58.0",
|
||||
]
|
||||
@@ -11810,6 +11995,17 @@ dependencies = [
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-rustls"
|
||||
version = "0.26.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0c7bc40d0e5a97695bb96e27995cd3a08538541b0a846f65bba7a359f36700d4"
|
||||
dependencies = [
|
||||
"rustls 0.23.13",
|
||||
"rustls-pki-types",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-socks"
|
||||
version = "0.5.2"
|
||||
@@ -12529,6 +12725,22 @@ version = "0.9.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
|
||||
|
||||
[[package]]
|
||||
name = "ureq"
|
||||
version = "2.10.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b74fc6b57825be3373f7054754755f03ac3a8f5d70015ccad699ba2029956f4a"
|
||||
dependencies = [
|
||||
"base64 0.22.1",
|
||||
"flate2",
|
||||
"log",
|
||||
"once_cell",
|
||||
"rustls 0.23.13",
|
||||
"rustls-pki-types",
|
||||
"url",
|
||||
"webpki-roots 0.26.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "url"
|
||||
version = "2.5.2"
|
||||
@@ -12832,7 +13044,7 @@ dependencies = [
|
||||
"futures-util",
|
||||
"headers",
|
||||
"http 0.2.12",
|
||||
"hyper",
|
||||
"hyper 0.14.30",
|
||||
"log",
|
||||
"mime",
|
||||
"mime_guess",
|
||||
@@ -13402,6 +13614,15 @@ version = "0.25.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1"
|
||||
|
||||
[[package]]
|
||||
name = "webpki-roots"
|
||||
version = "0.26.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "841c67bff177718f1d4dfefde8d8f0e78f9b6589319ba88312f567fc5841a958"
|
||||
dependencies = [
|
||||
"rustls-pki-types",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "weezl"
|
||||
version = "0.1.8"
|
||||
@@ -13532,7 +13753,7 @@ version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb"
|
||||
dependencies = [
|
||||
"windows-sys 0.48.0",
|
||||
"windows-sys 0.59.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -14386,13 +14607,14 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.155.0"
|
||||
version = "0.156.0"
|
||||
dependencies = [
|
||||
"activity_indicator",
|
||||
"anyhow",
|
||||
"ashpd",
|
||||
"assets",
|
||||
"assistant",
|
||||
"async-watch",
|
||||
"audio",
|
||||
"auto_update",
|
||||
"backtrace",
|
||||
@@ -14466,6 +14688,7 @@ dependencies = [
|
||||
"session",
|
||||
"settings",
|
||||
"settings_ui",
|
||||
"shellexpand 2.1.2",
|
||||
"simplelog",
|
||||
"smol",
|
||||
"snippet_provider",
|
||||
@@ -14617,7 +14840,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed_lua"
|
||||
version = "0.0.3"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"zed_extension_api 0.1.0",
|
||||
]
|
||||
|
||||
@@ -129,6 +129,7 @@ members = [
|
||||
"crates/worktree",
|
||||
"crates/zed",
|
||||
"crates/zed_actions",
|
||||
"crates/hyper_client",
|
||||
|
||||
#
|
||||
# Extensions
|
||||
@@ -356,7 +357,9 @@ git2 = { version = "0.19", default-features = false }
|
||||
globset = "0.4"
|
||||
heed = { version = "0.20.1", features = ["read-txn-no-tls"] }
|
||||
hex = "0.4.3"
|
||||
hyper = "0.14"
|
||||
hyper = "1.4.1"
|
||||
hyper-util = "0.1.9"
|
||||
hyper-rustls = "0.27.3"
|
||||
html5ever = "0.27.0"
|
||||
ignore = "0.4.22"
|
||||
image = "0.25.1"
|
||||
|
||||
@@ -292,6 +292,8 @@
|
||||
"g ctrl-x": ["vim::Decrement", { "step": true }],
|
||||
"shift-i": "vim::InsertBefore",
|
||||
"shift-a": "vim::InsertAfter",
|
||||
"g I": "vim::VisualInsertFirstNonWhiteSpace",
|
||||
"g A": "vim::VisualInsertEndOfLine",
|
||||
"shift-j": "vim::JoinLines",
|
||||
"r": ["vim::PushOperator", "Replace"],
|
||||
"ctrl-c": ["vim::SwitchMode", "Normal"],
|
||||
|
||||
@@ -762,6 +762,7 @@
|
||||
// }
|
||||
//
|
||||
"file_types": {
|
||||
"Plain Text": ["txt"],
|
||||
"JSON": ["flake.lock"],
|
||||
"JSONC": [
|
||||
"**/.zed/**/*.json",
|
||||
@@ -771,6 +772,21 @@
|
||||
"pyrightconfig.json"
|
||||
]
|
||||
},
|
||||
/// By default use a recent system version of node, or install our own.
|
||||
/// You can override this to use a version of node that is not in $PATH with:
|
||||
/// {
|
||||
/// "node": {
|
||||
/// "node_path": "/path/to/node"
|
||||
/// "npm_path": "/path/to/npm" (defaults to node_path/../npm)
|
||||
/// }
|
||||
/// }
|
||||
/// or to ensure Zed always downloads and installs an isolated version of node:
|
||||
/// {
|
||||
/// "node": {
|
||||
/// "ignore_system_version": true,
|
||||
/// }
|
||||
/// NOTE: changing this setting currently requires restarting Zed.
|
||||
"node": {},
|
||||
// The extensions that Zed should automatically install on startup.
|
||||
//
|
||||
// If you don't want any of these extensions, add this field to your settings
|
||||
|
||||
@@ -227,10 +227,10 @@ impl ActivityIndicator {
|
||||
for status in &self.statuses {
|
||||
match status.status {
|
||||
LanguageServerBinaryStatus::CheckingForUpdate => {
|
||||
checking_for_update.push(status.name.0.as_ref())
|
||||
checking_for_update.push(status.name.clone())
|
||||
}
|
||||
LanguageServerBinaryStatus::Downloading => downloading.push(status.name.0.as_ref()),
|
||||
LanguageServerBinaryStatus::Failed { .. } => failed.push(status.name.0.as_ref()),
|
||||
LanguageServerBinaryStatus::Downloading => downloading.push(status.name.clone()),
|
||||
LanguageServerBinaryStatus::Failed { .. } => failed.push(status.name.clone()),
|
||||
LanguageServerBinaryStatus::None => {}
|
||||
}
|
||||
}
|
||||
@@ -242,8 +242,24 @@ impl ActivityIndicator {
|
||||
.size(IconSize::Small)
|
||||
.into_any_element(),
|
||||
),
|
||||
message: format!("Downloading {}...", downloading.join(", "),),
|
||||
on_click: None,
|
||||
message: format!(
|
||||
"Downloading {}...",
|
||||
downloading.iter().map(|name| name.0.as_ref()).fold(
|
||||
String::new(),
|
||||
|mut acc, s| {
|
||||
if !acc.is_empty() {
|
||||
acc.push_str(", ");
|
||||
}
|
||||
acc.push_str(s);
|
||||
acc
|
||||
}
|
||||
)
|
||||
),
|
||||
on_click: Some(Arc::new(move |this, cx| {
|
||||
this.statuses
|
||||
.retain(|status| !downloading.contains(&status.name));
|
||||
this.dismiss_error_message(&DismissErrorMessage, cx)
|
||||
})),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -256,9 +272,22 @@ impl ActivityIndicator {
|
||||
),
|
||||
message: format!(
|
||||
"Checking for updates to {}...",
|
||||
checking_for_update.join(", "),
|
||||
checking_for_update.iter().map(|name| name.0.as_ref()).fold(
|
||||
String::new(),
|
||||
|mut acc, s| {
|
||||
if !acc.is_empty() {
|
||||
acc.push_str(", ");
|
||||
}
|
||||
acc.push_str(s);
|
||||
acc
|
||||
}
|
||||
),
|
||||
),
|
||||
on_click: None,
|
||||
on_click: Some(Arc::new(move |this, cx| {
|
||||
this.statuses
|
||||
.retain(|status| !checking_for_update.contains(&status.name));
|
||||
this.dismiss_error_message(&DismissErrorMessage, cx)
|
||||
})),
|
||||
});
|
||||
}
|
||||
|
||||
@@ -270,8 +299,17 @@ impl ActivityIndicator {
|
||||
.into_any_element(),
|
||||
),
|
||||
message: format!(
|
||||
"Failed to download {}. Click to show error.",
|
||||
failed.join(", "),
|
||||
"Failed to run {}. Click to show error.",
|
||||
failed
|
||||
.iter()
|
||||
.map(|name| name.0.as_ref())
|
||||
.fold(String::new(), |mut acc, s| {
|
||||
if !acc.is_empty() {
|
||||
acc.push_str(", ");
|
||||
}
|
||||
acc.push_str(s);
|
||||
acc
|
||||
}),
|
||||
),
|
||||
on_click: Some(Arc::new(|this, cx| {
|
||||
this.show_error_message(&Default::default(), cx)
|
||||
@@ -280,7 +318,7 @@ impl ActivityIndicator {
|
||||
}
|
||||
|
||||
// Show any formatting failure
|
||||
if let Some(failure) = self.project.read(cx).last_formatting_failure() {
|
||||
if let Some(failure) = self.project.read(cx).last_formatting_failure(cx) {
|
||||
return Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::Warning)
|
||||
@@ -304,7 +342,9 @@ impl ActivityIndicator {
|
||||
.into_any_element(),
|
||||
),
|
||||
message: "Checking for Zed updates…".to_string(),
|
||||
on_click: None,
|
||||
on_click: Some(Arc::new(|this, cx| {
|
||||
this.dismiss_error_message(&DismissErrorMessage, cx)
|
||||
})),
|
||||
}),
|
||||
AutoUpdateStatus::Downloading => Some(Content {
|
||||
icon: Some(
|
||||
@@ -313,7 +353,9 @@ impl ActivityIndicator {
|
||||
.into_any_element(),
|
||||
),
|
||||
message: "Downloading Zed update…".to_string(),
|
||||
on_click: None,
|
||||
on_click: Some(Arc::new(|this, cx| {
|
||||
this.dismiss_error_message(&DismissErrorMessage, cx)
|
||||
})),
|
||||
}),
|
||||
AutoUpdateStatus::Installing => Some(Content {
|
||||
icon: Some(
|
||||
@@ -322,7 +364,9 @@ impl ActivityIndicator {
|
||||
.into_any_element(),
|
||||
),
|
||||
message: "Installing Zed update…".to_string(),
|
||||
on_click: None,
|
||||
on_click: Some(Arc::new(|this, cx| {
|
||||
this.dismiss_error_message(&DismissErrorMessage, cx)
|
||||
})),
|
||||
}),
|
||||
AutoUpdateStatus::Updated { binary_path } => Some(Content {
|
||||
icon: None,
|
||||
@@ -342,7 +386,7 @@ impl ActivityIndicator {
|
||||
),
|
||||
message: "Auto update failed".to_string(),
|
||||
on_click: Some(Arc::new(|this, cx| {
|
||||
this.dismiss_error_message(&Default::default(), cx)
|
||||
this.dismiss_error_message(&DismissErrorMessage, cx)
|
||||
})),
|
||||
}),
|
||||
AutoUpdateStatus::Idle => None,
|
||||
@@ -360,7 +404,9 @@ impl ActivityIndicator {
|
||||
.into_any_element(),
|
||||
),
|
||||
message: format!("Updating {extension_id} extension…"),
|
||||
on_click: None,
|
||||
on_click: Some(Arc::new(|this, cx| {
|
||||
this.dismiss_error_message(&DismissErrorMessage, cx)
|
||||
})),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ use anyhow::{anyhow, Context, Result};
|
||||
use chrono::{DateTime, Utc};
|
||||
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt};
|
||||
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
|
||||
use isahc::config::Configurable;
|
||||
use isahc::http::{HeaderMap, HeaderValue};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use strum::{EnumIter, EnumString};
|
||||
|
||||
@@ -2794,7 +2794,7 @@ impl ContextEditor {
|
||||
let multibuffer = editor.buffer().read(cx).snapshot(cx);
|
||||
let (&excerpt_id, _, buffer) = multibuffer.as_singleton().unwrap();
|
||||
let anchor = if group.context_range.start.to_offset(buffer) == 0 {
|
||||
Anchor::Start
|
||||
Anchor::min()
|
||||
} else {
|
||||
multibuffer
|
||||
.anchor_in_excerpt(excerpt_id, group.context_range.start)
|
||||
|
||||
@@ -357,9 +357,6 @@ impl ContextStore {
|
||||
let Some(project_id) = project.remote_id() else {
|
||||
return Task::ready(Err(anyhow!("project was not remote")));
|
||||
};
|
||||
if project.is_local_or_ssh() {
|
||||
return Task::ready(Err(anyhow!("cannot create remote contexts as the host")));
|
||||
}
|
||||
|
||||
let replica_id = project.replica_id();
|
||||
let capability = project.capability();
|
||||
@@ -488,9 +485,6 @@ impl ContextStore {
|
||||
let Some(project_id) = project.remote_id() else {
|
||||
return Task::ready(Err(anyhow!("project was not remote")));
|
||||
};
|
||||
if project.is_local_or_ssh() {
|
||||
return Task::ready(Err(anyhow!("cannot open remote contexts as the host")));
|
||||
}
|
||||
|
||||
if let Some(context) = self.loaded_context_for_id(&context_id, cx) {
|
||||
return Task::ready(Ok(context));
|
||||
|
||||
@@ -1208,7 +1208,7 @@ impl InlineAssistant {
|
||||
editor.set_read_only(true);
|
||||
editor.set_show_inline_completions(Some(false), cx);
|
||||
editor.highlight_rows::<DeletedLines>(
|
||||
Anchor::Start..=Anchor::End,
|
||||
Anchor::min()..=Anchor::max(),
|
||||
Some(cx.theme().status().deleted_background),
|
||||
false,
|
||||
cx,
|
||||
|
||||
@@ -138,7 +138,7 @@ impl WorkflowSuggestion {
|
||||
}
|
||||
Self::CreateFile { description } => {
|
||||
initial_prompt = description.clone();
|
||||
suggestion_range = Anchor::Start..Anchor::Start;
|
||||
suggestion_range = editor::Anchor::min()..editor::Anchor::min();
|
||||
}
|
||||
Self::InsertBefore {
|
||||
position,
|
||||
|
||||
@@ -23,7 +23,6 @@ chrono = { workspace = true, features = ["serde"] }
|
||||
clock.workspace = true
|
||||
collections.workspace = true
|
||||
feature_flags.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
http_client.workspace = true
|
||||
|
||||
@@ -23,8 +23,7 @@ To use a different set of admin users, create `crates/collab/seed.json`.
|
||||
```json
|
||||
{
|
||||
"admins": ["yourgithubhere"],
|
||||
"channels": ["zed"],
|
||||
"number_of_users": 20
|
||||
"channels": ["zed"]
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
@@ -8,6 +8,5 @@
|
||||
"JosephTLyons",
|
||||
"rgbkrk"
|
||||
],
|
||||
"channels": ["zed"],
|
||||
"number_of_users": 100
|
||||
"channels": ["zed"]
|
||||
}
|
||||
|
||||
602
crates/collab/seed/github_users.json
Normal file
602
crates/collab/seed/github_users.json
Normal file
@@ -0,0 +1,602 @@
|
||||
[
|
||||
{
|
||||
"id": 1,
|
||||
"login": "mojombo",
|
||||
"email": "tom@mojombo.com",
|
||||
"created_at": "2007-10-20T05:24:19Z"
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"login": "defunkt",
|
||||
"email": null,
|
||||
"created_at": "2007-10-20T05:24:19Z"
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"login": "pjhyett",
|
||||
"email": "pj@hyett.com",
|
||||
"created_at": "2008-01-07T17:54:22Z"
|
||||
},
|
||||
{
|
||||
"id": 4,
|
||||
"login": "wycats",
|
||||
"email": "wycats@gmail.com",
|
||||
"created_at": "2008-01-12T05:38:33Z"
|
||||
},
|
||||
{
|
||||
"id": 5,
|
||||
"login": "ezmobius",
|
||||
"email": null,
|
||||
"created_at": "2008-01-12T07:51:46Z"
|
||||
},
|
||||
{
|
||||
"id": 6,
|
||||
"login": "ivey",
|
||||
"email": "ivey@gweezlebur.com",
|
||||
"created_at": "2008-01-12T15:15:00Z"
|
||||
},
|
||||
{
|
||||
"id": 7,
|
||||
"login": "evanphx",
|
||||
"email": "evan@phx.io",
|
||||
"created_at": "2008-01-12T16:46:24Z"
|
||||
},
|
||||
{
|
||||
"id": 17,
|
||||
"login": "vanpelt",
|
||||
"email": "vanpelt@wandb.com",
|
||||
"created_at": "2008-01-13T05:57:18Z"
|
||||
},
|
||||
{
|
||||
"id": 18,
|
||||
"login": "wayneeseguin",
|
||||
"email": "wayneeseguin@gmail.com",
|
||||
"created_at": "2008-01-13T06:02:21Z"
|
||||
},
|
||||
{
|
||||
"id": 19,
|
||||
"login": "brynary",
|
||||
"email": null,
|
||||
"created_at": "2008-01-13T10:19:47Z"
|
||||
},
|
||||
{
|
||||
"id": 20,
|
||||
"login": "kevinclark",
|
||||
"email": "kevin.clark@gmail.com",
|
||||
"created_at": "2008-01-13T18:33:26Z"
|
||||
},
|
||||
{
|
||||
"id": 21,
|
||||
"login": "technoweenie",
|
||||
"email": "technoweenie@hey.com",
|
||||
"created_at": "2008-01-14T04:33:35Z"
|
||||
},
|
||||
{
|
||||
"id": 22,
|
||||
"login": "macournoyer",
|
||||
"email": "macournoyer@gmail.com",
|
||||
"created_at": "2008-01-14T10:49:35Z"
|
||||
},
|
||||
{
|
||||
"id": 23,
|
||||
"login": "takeo",
|
||||
"email": "toby@takeo.email",
|
||||
"created_at": "2008-01-14T11:25:49Z"
|
||||
},
|
||||
{
|
||||
"id": 25,
|
||||
"login": "caged",
|
||||
"email": "encytemedia@gmail.com",
|
||||
"created_at": "2008-01-15T04:47:24Z"
|
||||
},
|
||||
{
|
||||
"id": 26,
|
||||
"login": "topfunky",
|
||||
"email": null,
|
||||
"created_at": "2008-01-15T05:40:05Z"
|
||||
},
|
||||
{
|
||||
"id": 27,
|
||||
"login": "anotherjesse",
|
||||
"email": "anotherjesse@gmail.com",
|
||||
"created_at": "2008-01-15T07:49:30Z"
|
||||
},
|
||||
{
|
||||
"id": 28,
|
||||
"login": "roland",
|
||||
"email": null,
|
||||
"created_at": "2008-01-15T08:12:51Z"
|
||||
},
|
||||
{
|
||||
"id": 29,
|
||||
"login": "lukas",
|
||||
"email": "lukas@wandb.com",
|
||||
"created_at": "2008-01-15T12:50:02Z"
|
||||
},
|
||||
{
|
||||
"id": 30,
|
||||
"login": "fanvsfan",
|
||||
"email": null,
|
||||
"created_at": "2008-01-15T14:15:23Z"
|
||||
},
|
||||
{
|
||||
"id": 31,
|
||||
"login": "tomtt",
|
||||
"email": null,
|
||||
"created_at": "2008-01-15T15:44:31Z"
|
||||
},
|
||||
{
|
||||
"id": 32,
|
||||
"login": "railsjitsu",
|
||||
"email": null,
|
||||
"created_at": "2008-01-16T04:57:23Z"
|
||||
},
|
||||
{
|
||||
"id": 34,
|
||||
"login": "nitay",
|
||||
"email": null,
|
||||
"created_at": "2008-01-18T14:09:11Z"
|
||||
},
|
||||
{
|
||||
"id": 35,
|
||||
"login": "kevwil",
|
||||
"email": null,
|
||||
"created_at": "2008-01-19T05:50:12Z"
|
||||
},
|
||||
{
|
||||
"id": 36,
|
||||
"login": "KirinDave",
|
||||
"email": null,
|
||||
"created_at": "2008-01-19T08:01:02Z"
|
||||
},
|
||||
{
|
||||
"id": 37,
|
||||
"login": "jamesgolick",
|
||||
"email": "jamesgolick@gmail.com",
|
||||
"created_at": "2008-01-19T22:52:30Z"
|
||||
},
|
||||
{
|
||||
"id": 38,
|
||||
"login": "atmos",
|
||||
"email": "atmos@atmos.org",
|
||||
"created_at": "2008-01-22T09:14:11Z"
|
||||
},
|
||||
{
|
||||
"id": 44,
|
||||
"login": "errfree",
|
||||
"email": null,
|
||||
"created_at": "2008-01-24T02:08:37Z"
|
||||
},
|
||||
{
|
||||
"id": 45,
|
||||
"login": "mojodna",
|
||||
"email": null,
|
||||
"created_at": "2008-01-24T04:40:22Z"
|
||||
},
|
||||
{
|
||||
"id": 46,
|
||||
"login": "bmizerany",
|
||||
"email": "blake.mizerany@gmail.com",
|
||||
"created_at": "2008-01-24T04:44:30Z"
|
||||
},
|
||||
{
|
||||
"id": 47,
|
||||
"login": "jnewland",
|
||||
"email": "jesse@jnewland.com",
|
||||
"created_at": "2008-01-25T02:28:12Z"
|
||||
},
|
||||
{
|
||||
"id": 48,
|
||||
"login": "joshknowles",
|
||||
"email": "joshknowles@gmail.com",
|
||||
"created_at": "2008-01-25T21:30:42Z"
|
||||
},
|
||||
{
|
||||
"id": 49,
|
||||
"login": "hornbeck",
|
||||
"email": "hornbeck@gmail.com",
|
||||
"created_at": "2008-01-25T21:49:23Z"
|
||||
},
|
||||
{
|
||||
"id": 50,
|
||||
"login": "jwhitmire",
|
||||
"email": "jeff@jwhitmire.com",
|
||||
"created_at": "2008-01-25T22:07:48Z"
|
||||
},
|
||||
{
|
||||
"id": 51,
|
||||
"login": "elbowdonkey",
|
||||
"email": null,
|
||||
"created_at": "2008-01-25T22:08:20Z"
|
||||
},
|
||||
{
|
||||
"id": 52,
|
||||
"login": "reinh",
|
||||
"email": null,
|
||||
"created_at": "2008-01-25T22:16:29Z"
|
||||
},
|
||||
{
|
||||
"id": 53,
|
||||
"login": "knzai",
|
||||
"email": "git@knz.ai",
|
||||
"created_at": "2008-01-25T22:33:10Z"
|
||||
},
|
||||
{
|
||||
"id": 68,
|
||||
"login": "bs",
|
||||
"email": "yap@bri.tt",
|
||||
"created_at": "2008-01-27T01:46:29Z"
|
||||
},
|
||||
{
|
||||
"id": 69,
|
||||
"login": "rsanheim",
|
||||
"email": null,
|
||||
"created_at": "2008-01-27T07:09:47Z"
|
||||
},
|
||||
{
|
||||
"id": 70,
|
||||
"login": "schacon",
|
||||
"email": "schacon@gmail.com",
|
||||
"created_at": "2008-01-27T17:19:28Z"
|
||||
},
|
||||
{
|
||||
"id": 71,
|
||||
"login": "uggedal",
|
||||
"email": null,
|
||||
"created_at": "2008-01-27T22:18:57Z"
|
||||
},
|
||||
{
|
||||
"id": 72,
|
||||
"login": "bruce",
|
||||
"email": "brwcodes@gmail.com",
|
||||
"created_at": "2008-01-28T07:16:45Z"
|
||||
},
|
||||
{
|
||||
"id": 73,
|
||||
"login": "sam",
|
||||
"email": "ssmoot@gmail.com",
|
||||
"created_at": "2008-01-28T19:01:26Z"
|
||||
},
|
||||
{
|
||||
"id": 74,
|
||||
"login": "mmower",
|
||||
"email": "self@mattmower.com",
|
||||
"created_at": "2008-01-28T19:47:50Z"
|
||||
},
|
||||
{
|
||||
"id": 75,
|
||||
"login": "abhay",
|
||||
"email": null,
|
||||
"created_at": "2008-01-28T21:08:23Z"
|
||||
},
|
||||
{
|
||||
"id": 76,
|
||||
"login": "rabble",
|
||||
"email": "evan@protest.net",
|
||||
"created_at": "2008-01-28T23:27:02Z"
|
||||
},
|
||||
{
|
||||
"id": 77,
|
||||
"login": "benburkert",
|
||||
"email": "ben@benburkert.com",
|
||||
"created_at": "2008-01-28T23:44:14Z"
|
||||
},
|
||||
{
|
||||
"id": 78,
|
||||
"login": "indirect",
|
||||
"email": "andre@arko.net",
|
||||
"created_at": "2008-01-29T07:59:27Z"
|
||||
},
|
||||
{
|
||||
"id": 79,
|
||||
"login": "fearoffish",
|
||||
"email": "me@fearof.fish",
|
||||
"created_at": "2008-01-29T08:43:10Z"
|
||||
},
|
||||
{
|
||||
"id": 80,
|
||||
"login": "ry",
|
||||
"email": "ry@tinyclouds.org",
|
||||
"created_at": "2008-01-29T08:50:34Z"
|
||||
},
|
||||
{
|
||||
"id": 81,
|
||||
"login": "engineyard",
|
||||
"email": null,
|
||||
"created_at": "2008-01-29T09:51:30Z"
|
||||
},
|
||||
{
|
||||
"id": 82,
|
||||
"login": "jsierles",
|
||||
"email": null,
|
||||
"created_at": "2008-01-29T11:10:25Z"
|
||||
},
|
||||
{
|
||||
"id": 83,
|
||||
"login": "tweibley",
|
||||
"email": null,
|
||||
"created_at": "2008-01-29T13:52:07Z"
|
||||
},
|
||||
{
|
||||
"id": 84,
|
||||
"login": "peimei",
|
||||
"email": "james@railsjitsu.com",
|
||||
"created_at": "2008-01-29T15:44:11Z"
|
||||
},
|
||||
{
|
||||
"id": 85,
|
||||
"login": "brixen",
|
||||
"email": "brixen@gmail.com",
|
||||
"created_at": "2008-01-29T16:47:55Z"
|
||||
},
|
||||
{
|
||||
"id": 87,
|
||||
"login": "tmornini",
|
||||
"email": null,
|
||||
"created_at": "2008-01-29T18:43:39Z"
|
||||
},
|
||||
{
|
||||
"id": 88,
|
||||
"login": "outerim",
|
||||
"email": "lee@outerim.com",
|
||||
"created_at": "2008-01-29T18:48:32Z"
|
||||
},
|
||||
{
|
||||
"id": 89,
|
||||
"login": "daksis",
|
||||
"email": null,
|
||||
"created_at": "2008-01-29T19:18:16Z"
|
||||
},
|
||||
{
|
||||
"id": 90,
|
||||
"login": "sr",
|
||||
"email": "me@simonrozet.com",
|
||||
"created_at": "2008-01-29T20:37:53Z"
|
||||
},
|
||||
{
|
||||
"id": 91,
|
||||
"login": "lifo",
|
||||
"email": null,
|
||||
"created_at": "2008-01-29T23:09:30Z"
|
||||
},
|
||||
{
|
||||
"id": 92,
|
||||
"login": "rsl",
|
||||
"email": "sconds@gmail.com",
|
||||
"created_at": "2008-01-29T23:13:36Z"
|
||||
},
|
||||
{
|
||||
"id": 93,
|
||||
"login": "imownbey",
|
||||
"email": null,
|
||||
"created_at": "2008-01-29T23:13:44Z"
|
||||
},
|
||||
{
|
||||
"id": 94,
|
||||
"login": "dylanegan",
|
||||
"email": null,
|
||||
"created_at": "2008-01-29T23:15:18Z"
|
||||
},
|
||||
{
|
||||
"id": 95,
|
||||
"login": "jm",
|
||||
"email": "jeremymcanally@gmail.com",
|
||||
"created_at": "2008-01-29T23:15:32Z"
|
||||
},
|
||||
{
|
||||
"id": 100,
|
||||
"login": "kmarsh",
|
||||
"email": "kevin.marsh@gmail.com",
|
||||
"created_at": "2008-01-29T23:48:24Z"
|
||||
},
|
||||
{
|
||||
"id": 101,
|
||||
"login": "jvantuyl",
|
||||
"email": "jayson@aggressive.ly",
|
||||
"created_at": "2008-01-30T01:11:50Z"
|
||||
},
|
||||
{
|
||||
"id": 102,
|
||||
"login": "BrianTheCoder",
|
||||
"email": "wbsmith83@gmail.com",
|
||||
"created_at": "2008-01-30T02:22:32Z"
|
||||
},
|
||||
{
|
||||
"id": 103,
|
||||
"login": "freeformz",
|
||||
"email": "freeformz@gmail.com",
|
||||
"created_at": "2008-01-30T06:19:57Z"
|
||||
},
|
||||
{
|
||||
"id": 104,
|
||||
"login": "hassox",
|
||||
"email": "dneighman@gmail.com",
|
||||
"created_at": "2008-01-30T06:31:06Z"
|
||||
},
|
||||
{
|
||||
"id": 105,
|
||||
"login": "automatthew",
|
||||
"email": "automatthew@gmail.com",
|
||||
"created_at": "2008-01-30T19:00:58Z"
|
||||
},
|
||||
{
|
||||
"id": 106,
|
||||
"login": "queso",
|
||||
"email": "Joshua.owens@gmail.com",
|
||||
"created_at": "2008-01-30T19:48:45Z"
|
||||
},
|
||||
{
|
||||
"id": 107,
|
||||
"login": "lancecarlson",
|
||||
"email": null,
|
||||
"created_at": "2008-01-30T19:53:29Z"
|
||||
},
|
||||
{
|
||||
"id": 108,
|
||||
"login": "drnic",
|
||||
"email": "drnicwilliams@gmail.com",
|
||||
"created_at": "2008-01-30T23:19:18Z"
|
||||
},
|
||||
{
|
||||
"id": 109,
|
||||
"login": "lukesutton",
|
||||
"email": null,
|
||||
"created_at": "2008-01-31T04:01:02Z"
|
||||
},
|
||||
{
|
||||
"id": 110,
|
||||
"login": "danwrong",
|
||||
"email": null,
|
||||
"created_at": "2008-01-31T08:51:31Z"
|
||||
},
|
||||
{
|
||||
"id": 111,
|
||||
"login": "HamptonMakes",
|
||||
"email": "hampton@hamptoncatlin.com",
|
||||
"created_at": "2008-01-31T17:03:51Z"
|
||||
},
|
||||
{
|
||||
"id": 112,
|
||||
"login": "jfrost",
|
||||
"email": null,
|
||||
"created_at": "2008-01-31T22:14:27Z"
|
||||
},
|
||||
{
|
||||
"id": 113,
|
||||
"login": "mattetti",
|
||||
"email": null,
|
||||
"created_at": "2008-01-31T22:56:31Z"
|
||||
},
|
||||
{
|
||||
"id": 114,
|
||||
"login": "ctennis",
|
||||
"email": "c@leb.tennis",
|
||||
"created_at": "2008-01-31T23:43:14Z"
|
||||
},
|
||||
{
|
||||
"id": 115,
|
||||
"login": "lawrencepit",
|
||||
"email": "lawrence.pit@gmail.com",
|
||||
"created_at": "2008-01-31T23:57:16Z"
|
||||
},
|
||||
{
|
||||
"id": 116,
|
||||
"login": "marcjeanson",
|
||||
"email": "github@marcjeanson.com",
|
||||
"created_at": "2008-02-01T01:27:19Z"
|
||||
},
|
||||
{
|
||||
"id": 117,
|
||||
"login": "grempe",
|
||||
"email": null,
|
||||
"created_at": "2008-02-01T04:12:42Z"
|
||||
},
|
||||
{
|
||||
"id": 118,
|
||||
"login": "peterc",
|
||||
"email": "git@peterc.org",
|
||||
"created_at": "2008-02-02T01:00:36Z"
|
||||
},
|
||||
{
|
||||
"id": 119,
|
||||
"login": "ministrycentered",
|
||||
"email": null,
|
||||
"created_at": "2008-02-02T03:50:26Z"
|
||||
},
|
||||
{
|
||||
"id": 120,
|
||||
"login": "afarnham",
|
||||
"email": null,
|
||||
"created_at": "2008-02-02T05:11:03Z"
|
||||
},
|
||||
{
|
||||
"id": 121,
|
||||
"login": "up_the_irons",
|
||||
"email": null,
|
||||
"created_at": "2008-02-02T10:59:51Z"
|
||||
},
|
||||
{
|
||||
"id": 122,
|
||||
"login": "cristibalan",
|
||||
"email": "cristibalan@gmail.com",
|
||||
"created_at": "2008-02-02T11:29:45Z"
|
||||
},
|
||||
{
|
||||
"id": 123,
|
||||
"login": "heavysixer",
|
||||
"email": null,
|
||||
"created_at": "2008-02-02T15:06:53Z"
|
||||
},
|
||||
{
|
||||
"id": 124,
|
||||
"login": "brosner",
|
||||
"email": "brosner@gmail.com",
|
||||
"created_at": "2008-02-02T19:03:54Z"
|
||||
},
|
||||
{
|
||||
"id": 125,
|
||||
"login": "danielmorrison",
|
||||
"email": "daniel@collectiveidea.com",
|
||||
"created_at": "2008-02-02T19:46:35Z"
|
||||
},
|
||||
{
|
||||
"id": 126,
|
||||
"login": "danielharan",
|
||||
"email": "chebuctonian@gmail.com",
|
||||
"created_at": "2008-02-02T21:42:21Z"
|
||||
},
|
||||
{
|
||||
"id": 127,
|
||||
"login": "kvnsmth",
|
||||
"email": null,
|
||||
"created_at": "2008-02-02T22:00:03Z"
|
||||
},
|
||||
{
|
||||
"id": 128,
|
||||
"login": "collectiveidea",
|
||||
"email": "info@collectiveidea.com",
|
||||
"created_at": "2008-02-02T22:34:46Z"
|
||||
},
|
||||
{
|
||||
"id": 129,
|
||||
"login": "canadaduane",
|
||||
"email": "duane.johnson@gmail.com",
|
||||
"created_at": "2008-02-02T23:25:39Z"
|
||||
},
|
||||
{
|
||||
"id": 130,
|
||||
"login": "corasaurus-hex",
|
||||
"email": "cora@sutton.me",
|
||||
"created_at": "2008-02-03T04:20:22Z"
|
||||
},
|
||||
{
|
||||
"id": 131,
|
||||
"login": "dstrelau",
|
||||
"email": null,
|
||||
"created_at": "2008-02-03T14:59:12Z"
|
||||
},
|
||||
{
|
||||
"id": 132,
|
||||
"login": "sunny",
|
||||
"email": "sunny@sunfox.org",
|
||||
"created_at": "2008-02-03T15:43:43Z"
|
||||
},
|
||||
{
|
||||
"id": 133,
|
||||
"login": "dkubb",
|
||||
"email": "github@dan.kubb.ca",
|
||||
"created_at": "2008-02-03T20:40:13Z"
|
||||
},
|
||||
{
|
||||
"id": 134,
|
||||
"login": "jnicklas",
|
||||
"email": "jonas@jnicklas.com",
|
||||
"created_at": "2008-02-03T20:43:50Z"
|
||||
},
|
||||
{
|
||||
"id": 135,
|
||||
"login": "richcollins",
|
||||
"email": "richcollins@gmail.com",
|
||||
"created_at": "2008-02-03T21:11:25Z"
|
||||
}
|
||||
]
|
||||
@@ -364,17 +364,19 @@ pub async fn post_panic(
|
||||
}
|
||||
|
||||
fn report_to_slack(panic: &Panic) -> bool {
|
||||
if panic.os_name == "Linux" {
|
||||
if panic.payload.contains("ERROR_SURFACE_LOST_KHR") {
|
||||
return false;
|
||||
}
|
||||
if panic.payload.contains("ERROR_SURFACE_LOST_KHR") {
|
||||
return false;
|
||||
}
|
||||
|
||||
if panic
|
||||
.payload
|
||||
.contains("GPU has crashed, and no debug information is available")
|
||||
{
|
||||
return false;
|
||||
}
|
||||
if panic.payload.contains("ERROR_INITIALIZATION_FAILED") {
|
||||
return false;
|
||||
}
|
||||
|
||||
if panic
|
||||
.payload
|
||||
.contains("GPU has crashed, and no debug information is available")
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
|
||||
@@ -298,6 +298,12 @@ impl Database {
|
||||
result
|
||||
}
|
||||
|
||||
/// Returns all feature flags.
|
||||
pub async fn list_feature_flags(&self) -> Result<Vec<feature_flag::Model>> {
|
||||
self.transaction(|tx| async move { Ok(feature_flag::Entity::find().all(&*tx).await?) })
|
||||
.await
|
||||
}
|
||||
|
||||
/// Creates a new feature flag.
|
||||
pub async fn create_user_flag(&self, flag: &str, enabled_for_all: bool) -> Result<FlagId> {
|
||||
self.transaction(|tx| async move {
|
||||
|
||||
@@ -4,10 +4,13 @@ use anyhow::Context;
|
||||
use chrono::{DateTime, Utc};
|
||||
use db::Database;
|
||||
use serde::{de::DeserializeOwned, Deserialize};
|
||||
use std::{fmt::Write, fs, path::Path};
|
||||
use std::{fs, path::Path};
|
||||
|
||||
use crate::Config;
|
||||
|
||||
/// A GitHub user.
|
||||
///
|
||||
/// This representation corresponds to the entries in the `seed/github_users.json` file.
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct GithubUser {
|
||||
id: i32,
|
||||
@@ -18,12 +21,10 @@ struct GithubUser {
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SeedConfig {
|
||||
// Which users to create as admins.
|
||||
/// Which users to create as admins.
|
||||
admins: Vec<String>,
|
||||
// Which channels to create (all admins are invited to all channels)
|
||||
/// Which channels to create (all admins are invited to all channels).
|
||||
channels: Vec<String>,
|
||||
// Number of random users to create from the Github API
|
||||
number_of_users: Option<usize>,
|
||||
}
|
||||
|
||||
pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result<()> {
|
||||
@@ -47,11 +48,21 @@ pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result
|
||||
let flag_names = ["remoting", "language-models"];
|
||||
let mut flags = Vec::new();
|
||||
|
||||
let existing_feature_flags = db.list_feature_flags().await?;
|
||||
|
||||
for flag_name in flag_names {
|
||||
if existing_feature_flags
|
||||
.iter()
|
||||
.any(|flag| flag.flag == flag_name)
|
||||
{
|
||||
log::info!("Flag {flag_name:?} already exists");
|
||||
continue;
|
||||
}
|
||||
|
||||
let flag = db
|
||||
.create_user_flag(flag_name, false)
|
||||
.await
|
||||
.unwrap_or_else(|_| panic!("failed to create flag: '{flag_name}'"));
|
||||
.unwrap_or_else(|err| panic!("failed to create flag: '{flag_name}': {err}"));
|
||||
flags.push(flag);
|
||||
}
|
||||
|
||||
@@ -106,44 +117,29 @@ pub async fn seed(config: &Config, db: &Database, force: bool) -> anyhow::Result
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Fix this later
|
||||
if let Some(number_of_users) = seed_config.number_of_users {
|
||||
// Fetch 100 other random users from GitHub and insert them into the database
|
||||
// (for testing autocompleters, etc.)
|
||||
let mut user_count = db
|
||||
.get_all_users(0, 200)
|
||||
let github_users_filepath = seed_path.parent().unwrap().join("seed/github_users.json");
|
||||
let github_users: Vec<GithubUser> =
|
||||
serde_json::from_str(&fs::read_to_string(github_users_filepath)?)?;
|
||||
|
||||
for github_user in github_users {
|
||||
log::info!("Seeding {:?} from GitHub", github_user.login);
|
||||
|
||||
let user = db
|
||||
.get_or_create_user_by_github_account(
|
||||
&github_user.login,
|
||||
github_user.id,
|
||||
github_user.email.as_deref(),
|
||||
github_user.created_at,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
.expect("failed to load users from db")
|
||||
.len();
|
||||
let mut last_user_id = None;
|
||||
while user_count < number_of_users {
|
||||
let mut uri = "https://api.github.com/users?per_page=100".to_string();
|
||||
if let Some(last_user_id) = last_user_id {
|
||||
write!(&mut uri, "&since={}", last_user_id).unwrap();
|
||||
}
|
||||
let users = fetch_github::<Vec<GithubUser>>(&client, &uri).await;
|
||||
.expect("failed to insert user");
|
||||
|
||||
for github_user in users {
|
||||
last_user_id = Some(github_user.id);
|
||||
user_count += 1;
|
||||
let user = db
|
||||
.get_or_create_user_by_github_account(
|
||||
&github_user.login,
|
||||
github_user.id,
|
||||
github_user.email.as_deref(),
|
||||
github_user.created_at,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
.expect("failed to insert user");
|
||||
|
||||
for flag in &flags {
|
||||
db.add_user_flag(user.id, *flag).await.context(format!(
|
||||
"Unable to enable flag '{}' for user '{}'",
|
||||
flag, user.id
|
||||
))?;
|
||||
}
|
||||
}
|
||||
for flag in &flags {
|
||||
db.add_user_flag(user.id, *flag).await.context(format!(
|
||||
"Unable to enable flag '{}' for user '{}'",
|
||||
flag, user.id
|
||||
))?;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -297,7 +297,7 @@ fn assert_remote_selections(
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let range = Anchor::Start..Anchor::End;
|
||||
let range = Anchor::min()..Anchor::max();
|
||||
let remote_selections = snapshot
|
||||
.remote_selections_in_range(&range, editor.collaboration_hub().unwrap(), cx)
|
||||
.map(|s| {
|
||||
|
||||
@@ -7,18 +7,12 @@ use collections::HashMap;
|
||||
use editor::{
|
||||
actions::{
|
||||
ConfirmCodeAction, ConfirmCompletion, ConfirmRename, ContextMenuFirst, Redo, Rename,
|
||||
RevertSelectedHunks, ToggleCodeActions, Undo,
|
||||
},
|
||||
display_map::DisplayRow,
|
||||
test::{
|
||||
editor_hunks,
|
||||
editor_test_context::{AssertionContextManager, EditorTestContext},
|
||||
expanded_hunks, expanded_hunks_background_highlights,
|
||||
ToggleCodeActions, Undo,
|
||||
},
|
||||
test::editor_test_context::{AssertionContextManager, EditorTestContext},
|
||||
Editor,
|
||||
};
|
||||
use futures::StreamExt;
|
||||
use git::diff::DiffHunkStatus;
|
||||
use gpui::{TestAppContext, UpdateGlobal, VisualContext, VisualTestContext};
|
||||
use indoc::indoc;
|
||||
use language::{
|
||||
@@ -1970,288 +1964,6 @@ async fn test_inlay_hint_refresh_is_forwarded(
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_multiple_hunk_types_revert(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
||||
let mut server = TestServer::start(cx_a.executor()).await;
|
||||
let client_a = server.create_client(cx_a, "user_a").await;
|
||||
let client_b = server.create_client(cx_b, "user_b").await;
|
||||
server
|
||||
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
|
||||
.await;
|
||||
let active_call_a = cx_a.read(ActiveCall::global);
|
||||
let active_call_b = cx_b.read(ActiveCall::global);
|
||||
|
||||
cx_a.update(editor::init);
|
||||
cx_b.update(editor::init);
|
||||
|
||||
client_a.language_registry().add(rust_lang());
|
||||
client_b.language_registry().add(rust_lang());
|
||||
|
||||
let base_text = indoc! {r#"struct Row;
|
||||
struct Row1;
|
||||
struct Row2;
|
||||
|
||||
struct Row4;
|
||||
struct Row5;
|
||||
struct Row6;
|
||||
|
||||
struct Row8;
|
||||
struct Row9;
|
||||
struct Row10;"#};
|
||||
|
||||
client_a
|
||||
.fs()
|
||||
.insert_tree(
|
||||
"/a",
|
||||
json!({
|
||||
"main.rs": base_text,
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
|
||||
active_call_a
|
||||
.update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
let project_id = active_call_a
|
||||
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let project_b = client_b.build_dev_server_project(project_id, cx_b).await;
|
||||
active_call_b
|
||||
.update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a);
|
||||
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
|
||||
|
||||
let editor_a = workspace_a
|
||||
.update(cx_a, |workspace, cx| {
|
||||
workspace.open_path((worktree_id, "main.rs"), None, true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.downcast::<Editor>()
|
||||
.unwrap();
|
||||
|
||||
let editor_b = workspace_b
|
||||
.update(cx_b, |workspace, cx| {
|
||||
workspace.open_path((worktree_id, "main.rs"), None, true, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
.downcast::<Editor>()
|
||||
.unwrap();
|
||||
|
||||
let mut editor_cx_a = EditorTestContext {
|
||||
cx: cx_a.clone(),
|
||||
window: cx_a.handle(),
|
||||
editor: editor_a,
|
||||
assertion_cx: AssertionContextManager::new(),
|
||||
};
|
||||
let mut editor_cx_b = EditorTestContext {
|
||||
cx: cx_b.clone(),
|
||||
window: cx_b.handle(),
|
||||
editor: editor_b,
|
||||
assertion_cx: AssertionContextManager::new(),
|
||||
};
|
||||
|
||||
// host edits the file, that differs from the base text, producing diff hunks
|
||||
editor_cx_a.set_state(indoc! {r#"struct Row;
|
||||
struct Row0.1;
|
||||
struct Row0.2;
|
||||
struct Row1;
|
||||
|
||||
struct Row4;
|
||||
struct Row5444;
|
||||
struct Row6;
|
||||
|
||||
struct Row9;
|
||||
struct Row1220;ˇ"#});
|
||||
editor_cx_a.update_editor(|editor, cx| {
|
||||
editor
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.as_singleton()
|
||||
.unwrap()
|
||||
.update(cx, |buffer, cx| {
|
||||
buffer.set_diff_base(Some(base_text.into()), cx);
|
||||
});
|
||||
});
|
||||
editor_cx_b.update_editor(|editor, cx| {
|
||||
editor
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.as_singleton()
|
||||
.unwrap()
|
||||
.update(cx, |buffer, cx| {
|
||||
buffer.set_diff_base(Some(base_text.into()), cx);
|
||||
});
|
||||
});
|
||||
cx_a.executor().run_until_parked();
|
||||
cx_b.executor().run_until_parked();
|
||||
|
||||
// the client selects a range in the updated buffer, expands it to see the diff for each hunk in the selection
|
||||
// the host does not see the diffs toggled
|
||||
editor_cx_b.set_selections_state(indoc! {r#"«ˇstruct Row;
|
||||
struct Row0.1;
|
||||
struct Row0.2;
|
||||
struct Row1;
|
||||
|
||||
struct Row4;
|
||||
struct Row5444;
|
||||
struct Row6;
|
||||
|
||||
struct R»ow9;
|
||||
struct Row1220;"#});
|
||||
editor_cx_b
|
||||
.update_editor(|editor, cx| editor.toggle_hunk_diff(&editor::actions::ToggleHunkDiff, cx));
|
||||
cx_a.executor().run_until_parked();
|
||||
cx_b.executor().run_until_parked();
|
||||
editor_cx_a.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new());
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![
|
||||
(
|
||||
"".to_string(),
|
||||
DiffHunkStatus::Added,
|
||||
DisplayRow(1)..DisplayRow(3)
|
||||
),
|
||||
(
|
||||
"struct Row2;\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(4)..DisplayRow(4)
|
||||
),
|
||||
(
|
||||
"struct Row5;\n".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(6)..DisplayRow(7)
|
||||
),
|
||||
(
|
||||
"struct Row8;\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(9)..DisplayRow(9)
|
||||
),
|
||||
(
|
||||
"struct Row10;".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(10)..DisplayRow(10),
|
||||
),
|
||||
]
|
||||
);
|
||||
assert_eq!(all_expanded_hunks, Vec::new());
|
||||
});
|
||||
editor_cx_b.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(1)..=DisplayRow(2), DisplayRow(8)..=DisplayRow(8)],
|
||||
);
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![
|
||||
(
|
||||
"".to_string(),
|
||||
DiffHunkStatus::Added,
|
||||
DisplayRow(1)..DisplayRow(3)
|
||||
),
|
||||
(
|
||||
"struct Row2;\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(5)..DisplayRow(5)
|
||||
),
|
||||
(
|
||||
"struct Row5;\n".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(8)..DisplayRow(9)
|
||||
),
|
||||
(
|
||||
"struct Row8;\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(12)..DisplayRow(12)
|
||||
),
|
||||
(
|
||||
"struct Row10;".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(13)..DisplayRow(13),
|
||||
),
|
||||
]
|
||||
);
|
||||
assert_eq!(all_expanded_hunks, &all_hunks[..all_hunks.len() - 1]);
|
||||
});
|
||||
|
||||
// the client reverts the hunks, removing the expanded diffs too
|
||||
// both host and the client observe the reverted state (with one hunk left, not covered by client's selection)
|
||||
editor_cx_b.update_editor(|editor, cx| {
|
||||
editor.revert_selected_hunks(&RevertSelectedHunks, cx);
|
||||
});
|
||||
cx_a.executor().run_until_parked();
|
||||
cx_b.executor().run_until_parked();
|
||||
editor_cx_a.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(expanded_hunks_background_highlights(editor, cx), Vec::new());
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![(
|
||||
"struct Row10;".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(10)..DisplayRow(10),
|
||||
)]
|
||||
);
|
||||
assert_eq!(all_expanded_hunks, Vec::new());
|
||||
});
|
||||
editor_cx_b.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(5)..=DisplayRow(5)]
|
||||
);
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![(
|
||||
"struct Row10;".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(10)..DisplayRow(10),
|
||||
)]
|
||||
);
|
||||
assert_eq!(all_expanded_hunks, Vec::new());
|
||||
});
|
||||
editor_cx_a.assert_editor_state(indoc! {r#"struct Row;
|
||||
struct Row1;
|
||||
struct Row2;
|
||||
|
||||
struct Row4;
|
||||
struct Row5;
|
||||
struct Row6;
|
||||
|
||||
struct Row8;
|
||||
struct Row9;
|
||||
struct Row1220;ˇ"#});
|
||||
editor_cx_b.assert_editor_state(indoc! {r#"«ˇstruct Row;
|
||||
struct Row1;
|
||||
struct Row2;
|
||||
|
||||
struct Row4;
|
||||
struct Row5;
|
||||
struct Row6;
|
||||
|
||||
struct Row8;
|
||||
struct R»ow9;
|
||||
struct Row1220;"#});
|
||||
}
|
||||
|
||||
#[gpui::test(iterations = 10)]
|
||||
async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
|
||||
let mut server = TestServer::start(cx_a.executor()).await;
|
||||
|
||||
@@ -28,8 +28,8 @@ use live_kit_client::MacOSDisplay;
|
||||
use lsp::LanguageServerId;
|
||||
use parking_lot::Mutex;
|
||||
use project::{
|
||||
search::SearchQuery, search::SearchResult, DiagnosticSummary, FormatTrigger, HoverBlockKind,
|
||||
Project, ProjectPath,
|
||||
lsp_store::FormatTrigger, search::SearchQuery, search::SearchResult, DiagnosticSummary,
|
||||
HoverBlockKind, Project, ProjectPath,
|
||||
};
|
||||
use rand::prelude::*;
|
||||
use serde_json::json;
|
||||
|
||||
@@ -298,8 +298,7 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
continue;
|
||||
};
|
||||
let project_root_name = root_name_for_project(&project, cx);
|
||||
let is_local =
|
||||
project.read_with(cx, |project, _| project.is_local_or_ssh());
|
||||
let is_local = project.read_with(cx, |project, _| project.is_local());
|
||||
let worktree = project.read_with(cx, |project, cx| {
|
||||
project
|
||||
.worktrees(cx)
|
||||
@@ -335,7 +334,7 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
continue;
|
||||
};
|
||||
let project_root_name = root_name_for_project(&project, cx);
|
||||
let is_local = project.read_with(cx, |project, _| project.is_local_or_ssh());
|
||||
let is_local = project.read_with(cx, |project, _| project.is_local());
|
||||
|
||||
match rng.gen_range(0..100_u32) {
|
||||
// Manipulate an existing buffer
|
||||
@@ -1256,7 +1255,7 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
let buffers = client.buffers().clone();
|
||||
for (guest_project, guest_buffers) in &buffers {
|
||||
let project_id = if guest_project.read_with(client_cx, |project, _| {
|
||||
project.is_local_or_ssh() || project.is_disconnected()
|
||||
project.is_local() || project.is_disconnected()
|
||||
}) {
|
||||
continue;
|
||||
} else {
|
||||
@@ -1560,9 +1559,7 @@ async fn ensure_project_shared(
|
||||
let first_root_name = root_name_for_project(project, cx);
|
||||
let active_call = cx.read(ActiveCall::global);
|
||||
if active_call.read_with(cx, |call, _| call.room().is_some())
|
||||
&& project.read_with(cx, |project, _| {
|
||||
project.is_local_or_ssh() && !project.is_shared()
|
||||
})
|
||||
&& project.read_with(cx, |project, _| project.is_local() && !project.is_shared())
|
||||
{
|
||||
match active_call
|
||||
.update(cx, |call, cx| call.share_project(project.clone(), cx))
|
||||
|
||||
@@ -3,6 +3,7 @@ use call::ActiveCall;
|
||||
use fs::{FakeFs, Fs as _};
|
||||
use gpui::{Context as _, TestAppContext};
|
||||
use language::language_settings::all_language_settings;
|
||||
use project::ProjectPath;
|
||||
use remote::SshSession;
|
||||
use remote_server::HeadlessProject;
|
||||
use serde_json::json;
|
||||
@@ -108,14 +109,36 @@ async fn test_sharing_an_ssh_remote_project(
|
||||
});
|
||||
|
||||
project_b
|
||||
.update(cx_b, |project, cx| project.save_buffer(buffer_b, cx))
|
||||
.update(cx_b, |project, cx| {
|
||||
project.save_buffer_as(
|
||||
buffer_b.clone(),
|
||||
ProjectPath {
|
||||
worktree_id: worktree_id.to_owned(),
|
||||
path: Arc::from(Path::new("src/renamed.rs")),
|
||||
},
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
remote_fs
|
||||
.load("/code/project1/src/lib.rs".as_ref())
|
||||
.load("/code/project1/src/renamed.rs".as_ref())
|
||||
.await
|
||||
.unwrap(),
|
||||
"fn one() -> usize { 100 }"
|
||||
);
|
||||
cx_b.run_until_parked();
|
||||
cx_b.update(|cx| {
|
||||
assert_eq!(
|
||||
buffer_b
|
||||
.read(cx)
|
||||
.file()
|
||||
.unwrap()
|
||||
.path()
|
||||
.to_string_lossy()
|
||||
.to_string(),
|
||||
"src/renamed.rs".to_string()
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ use git::GitHostingProviderRegistry;
|
||||
use gpui::{BackgroundExecutor, Context, Model, Task, TestAppContext, View, VisualTestContext};
|
||||
use http_client::FakeHttpClient;
|
||||
use language::LanguageRegistry;
|
||||
use node_runtime::FakeNodeRuntime;
|
||||
use node_runtime::NodeRuntime;
|
||||
use notifications::NotificationStore;
|
||||
use parking_lot::Mutex;
|
||||
use project::{Project, WorktreeId};
|
||||
@@ -278,7 +278,7 @@ impl TestServer {
|
||||
languages: language_registry,
|
||||
fs: fs.clone(),
|
||||
build_window_options: |_, _| Default::default(),
|
||||
node_runtime: FakeNodeRuntime::new(),
|
||||
node_runtime: NodeRuntime::unavailable(),
|
||||
session,
|
||||
});
|
||||
|
||||
@@ -408,7 +408,7 @@ impl TestServer {
|
||||
languages: language_registry,
|
||||
fs: fs.clone(),
|
||||
build_window_options: |_, _| Default::default(),
|
||||
node_runtime: FakeNodeRuntime::new(),
|
||||
node_runtime: NodeRuntime::unavailable(),
|
||||
session,
|
||||
});
|
||||
|
||||
|
||||
@@ -57,7 +57,7 @@ pub fn init(
|
||||
new_server_id: LanguageServerId,
|
||||
fs: Arc<dyn Fs>,
|
||||
http: Arc<dyn HttpClient>,
|
||||
node_runtime: Arc<dyn NodeRuntime>,
|
||||
node_runtime: NodeRuntime,
|
||||
cx: &mut AppContext,
|
||||
) {
|
||||
copilot_chat::init(fs, http.clone(), cx);
|
||||
@@ -302,7 +302,7 @@ pub struct Completion {
|
||||
|
||||
pub struct Copilot {
|
||||
http: Arc<dyn HttpClient>,
|
||||
node_runtime: Arc<dyn NodeRuntime>,
|
||||
node_runtime: NodeRuntime,
|
||||
server: CopilotServer,
|
||||
buffers: HashSet<WeakModel<Buffer>>,
|
||||
server_id: LanguageServerId,
|
||||
@@ -334,7 +334,7 @@ impl Copilot {
|
||||
fn start(
|
||||
new_server_id: LanguageServerId,
|
||||
http: Arc<dyn HttpClient>,
|
||||
node_runtime: Arc<dyn NodeRuntime>,
|
||||
node_runtime: NodeRuntime,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Self {
|
||||
let mut this = Self {
|
||||
@@ -392,7 +392,7 @@ impl Copilot {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn fake(cx: &mut gpui::TestAppContext) -> (Model<Self>, lsp::FakeLanguageServer) {
|
||||
use lsp::FakeLanguageServer;
|
||||
use node_runtime::FakeNodeRuntime;
|
||||
use node_runtime::NodeRuntime;
|
||||
|
||||
let (server, fake_server) = FakeLanguageServer::new(
|
||||
LanguageServerId(0),
|
||||
@@ -406,7 +406,7 @@ impl Copilot {
|
||||
cx.to_async(),
|
||||
);
|
||||
let http = http_client::FakeHttpClient::create(|_| async { unreachable!() });
|
||||
let node_runtime = FakeNodeRuntime::new();
|
||||
let node_runtime = NodeRuntime::unavailable();
|
||||
let this = cx.new_model(|cx| Self {
|
||||
server_id: LanguageServerId(0),
|
||||
http: http.clone(),
|
||||
@@ -425,7 +425,7 @@ impl Copilot {
|
||||
async fn start_language_server(
|
||||
new_server_id: LanguageServerId,
|
||||
http: Arc<dyn HttpClient>,
|
||||
node_runtime: Arc<dyn NodeRuntime>,
|
||||
node_runtime: NodeRuntime,
|
||||
this: WeakModel<Self>,
|
||||
mut cx: AsyncAppContext,
|
||||
) {
|
||||
|
||||
@@ -69,7 +69,7 @@ impl CreaseSnapshot {
|
||||
&'a self,
|
||||
range: Range<MultiBufferRow>,
|
||||
snapshot: &'a MultiBufferSnapshot,
|
||||
) -> impl '_ + Iterator<Item = &'a Crease> {
|
||||
) -> impl 'a + Iterator<Item = &'a Crease> {
|
||||
let start = snapshot.anchor_before(Point::new(range.start.0, 0));
|
||||
let mut cursor = self.creases.cursor::<ItemSummary>(snapshot);
|
||||
cursor.seek(&start, Bias::Left, snapshot);
|
||||
@@ -271,7 +271,7 @@ pub struct ItemSummary {
|
||||
impl Default for ItemSummary {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
range: Anchor::Start..Anchor::Start,
|
||||
range: Anchor::min()..Anchor::min(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -376,7 +376,7 @@ impl FoldMap {
|
||||
.folds
|
||||
.cursor::<FoldRange>(&inlay_snapshot.buffer);
|
||||
folds_cursor.seek(
|
||||
&FoldRange(anchor..Anchor::End),
|
||||
&FoldRange(anchor..Anchor::max()),
|
||||
Bias::Left,
|
||||
&inlay_snapshot.buffer,
|
||||
);
|
||||
@@ -997,7 +997,7 @@ impl DerefMut for FoldRange {
|
||||
|
||||
impl Default for FoldRange {
|
||||
fn default() -> Self {
|
||||
Self(Anchor::Start..Anchor::End)
|
||||
Self(Anchor::min()..Anchor::max())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1027,10 +1027,10 @@ pub struct FoldSummary {
|
||||
impl Default for FoldSummary {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
start: Anchor::Start,
|
||||
end: Anchor::End,
|
||||
min_start: Anchor::End,
|
||||
max_end: Anchor::Start,
|
||||
start: Anchor::min(),
|
||||
end: Anchor::max(),
|
||||
min_start: Anchor::max(),
|
||||
max_end: Anchor::min(),
|
||||
count: 0,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -71,7 +71,6 @@ pub use element::{
|
||||
use futures::{future, FutureExt};
|
||||
use fuzzy::{StringMatch, StringMatchCandidate};
|
||||
use git::blame::GitBlame;
|
||||
use git::diff_hunk_to_display;
|
||||
use gpui::{
|
||||
div, impl_actions, point, prelude::*, px, relative, size, uniform_list, Action, AnyElement,
|
||||
AppContext, AsyncWindowContext, AvailableSpace, BackgroundExecutor, Bounds, ClipboardEntry,
|
||||
@@ -84,8 +83,8 @@ use gpui::{
|
||||
};
|
||||
use highlight_matching_bracket::refresh_matching_bracket_highlights;
|
||||
use hover_popover::{hide_hover, HoverState};
|
||||
use hunk_diff::ExpandedHunks;
|
||||
pub(crate) use hunk_diff::HoveredHunk;
|
||||
use hunk_diff::{diff_hunk_to_display, ExpandedHunks};
|
||||
use indent_guides::ActiveIndentGuidesState;
|
||||
use inlay_hint_cache::{InlayHintCache, InlaySplice, InvalidationStrategy};
|
||||
pub use inline_completion_provider::*;
|
||||
@@ -122,8 +121,8 @@ use ordered_float::OrderedFloat;
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use project::project_settings::{GitGutterSetting, ProjectSettings};
|
||||
use project::{
|
||||
CodeAction, Completion, CompletionIntent, FormatTrigger, Item, Location, Project, ProjectPath,
|
||||
ProjectTransaction, TaskSourceKind,
|
||||
lsp_store::FormatTrigger, CodeAction, Completion, CompletionIntent, Item, Location, Project,
|
||||
ProjectPath, ProjectTransaction, TaskSourceKind,
|
||||
};
|
||||
use rand::prelude::*;
|
||||
use rpc::{proto::*, ErrorExt};
|
||||
@@ -155,7 +154,7 @@ use theme::{
|
||||
};
|
||||
use ui::{
|
||||
h_flex, prelude::*, ButtonSize, ButtonStyle, Disclosure, IconButton, IconName, IconSize,
|
||||
ListItem, Popover, Tooltip,
|
||||
ListItem, Popover, PopoverMenuHandle, Tooltip,
|
||||
};
|
||||
use util::{defer, maybe, post_inc, RangeExt, ResultExt, TryFutureExt};
|
||||
use workspace::item::{ItemHandle, PreviewTabsSettings};
|
||||
@@ -563,6 +562,7 @@ pub struct Editor {
|
||||
nav_history: Option<ItemNavHistory>,
|
||||
context_menu: RwLock<Option<ContextMenu>>,
|
||||
mouse_context_menu: Option<MouseContextMenu>,
|
||||
hunk_controls_menu_handle: PopoverMenuHandle<ui::ContextMenu>,
|
||||
completion_tasks: Vec<(CompletionId, Task<Option<()>>)>,
|
||||
signature_help_state: SignatureHelpState,
|
||||
auto_signature_help: Option<bool>,
|
||||
@@ -1939,6 +1939,7 @@ impl Editor {
|
||||
nav_history: None,
|
||||
context_menu: RwLock::new(None),
|
||||
mouse_context_menu: None,
|
||||
hunk_controls_menu_handle: PopoverMenuHandle::default(),
|
||||
completion_tasks: Default::default(),
|
||||
signature_help_state: SignatureHelpState::default(),
|
||||
auto_signature_help: None,
|
||||
@@ -3101,19 +3102,9 @@ impl Editor {
|
||||
if self.linked_edit_ranges.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let selection_start_buffer_id = match selection.start {
|
||||
text::Anchor::Start | text::Anchor::End => None,
|
||||
text::Anchor::Character { buffer_id, .. } => Some(buffer_id),
|
||||
};
|
||||
let selection_end_buffer_id = match selection.end {
|
||||
text::Anchor::Start | text::Anchor::End => None,
|
||||
text::Anchor::Character { buffer_id, .. } => Some(buffer_id),
|
||||
};
|
||||
|
||||
let ((base_range, linked_ranges), buffer_snapshot, buffer) = selection_end_buffer_id
|
||||
.and_then(|end_buffer_id| {
|
||||
if selection_start_buffer_id != Some(end_buffer_id) {
|
||||
let ((base_range, linked_ranges), buffer_snapshot, buffer) =
|
||||
selection.end.buffer_id.and_then(|end_buffer_id| {
|
||||
if selection.start.buffer_id != Some(end_buffer_id) {
|
||||
return None;
|
||||
}
|
||||
let buffer = self.buffer.read(cx).buffer(end_buffer_id)?;
|
||||
@@ -3140,8 +3131,8 @@ impl Editor {
|
||||
continue;
|
||||
}
|
||||
if self.selections.disjoint_anchor_ranges().iter().any(|s| {
|
||||
if s.start.buffer_id != selection_start_buffer_id
|
||||
|| s.end.buffer_id != selection_end_buffer_id
|
||||
if s.start.buffer_id != selection.start.buffer_id
|
||||
|| s.end.buffer_id != selection.end.buffer_id
|
||||
{
|
||||
return false;
|
||||
}
|
||||
@@ -5024,10 +5015,12 @@ impl Editor {
|
||||
continue;
|
||||
}
|
||||
|
||||
let range = Anchor::Text {
|
||||
let range = Anchor {
|
||||
buffer_id,
|
||||
excerpt_id,
|
||||
text_anchor: start,
|
||||
}..Anchor::Text {
|
||||
}..Anchor {
|
||||
buffer_id,
|
||||
excerpt_id,
|
||||
text_anchor: end,
|
||||
};
|
||||
@@ -5392,23 +5385,6 @@ impl Editor {
|
||||
}))
|
||||
}
|
||||
|
||||
fn close_hunk_diff_button(
|
||||
&self,
|
||||
hunk: HoveredHunk,
|
||||
row: DisplayRow,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> IconButton {
|
||||
IconButton::new(
|
||||
("close_hunk_diff_indicator", row.0 as usize),
|
||||
ui::IconName::Close,
|
||||
)
|
||||
.shape(ui::IconButtonShape::Square)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.tooltip(|cx| Tooltip::for_action("Close hunk diff", &ToggleHunkDiff, cx))
|
||||
.on_click(cx.listener(move |editor, _e, cx| editor.toggle_hovered_hunk(&hunk, cx)))
|
||||
}
|
||||
|
||||
pub fn context_menu_visible(&self) -> bool {
|
||||
self.context_menu
|
||||
.read()
|
||||
@@ -5619,15 +5595,7 @@ impl Editor {
|
||||
for selection in selections.iter() {
|
||||
let selection_start = snapshot.anchor_before(selection.start).text_anchor;
|
||||
let selection_end = snapshot.anchor_after(selection.end).text_anchor;
|
||||
let selection_start_buffer_id = match selection_start {
|
||||
text::Anchor::Start | text::Anchor::End => None,
|
||||
text::Anchor::Character { buffer_id, .. } => Some(buffer_id),
|
||||
};
|
||||
let selection_end_buffer_id = match selection_end {
|
||||
text::Anchor::Start | text::Anchor::End => None,
|
||||
text::Anchor::Character { buffer_id, .. } => Some(buffer_id),
|
||||
};
|
||||
if selection_start_buffer_id != selection_end_buffer_id {
|
||||
if selection_start.buffer_id != selection_end.buffer_id {
|
||||
continue;
|
||||
}
|
||||
if let Some(ranges) =
|
||||
@@ -8990,7 +8958,7 @@ impl Editor {
|
||||
.spawn({
|
||||
let snapshot = display_snapshot.clone();
|
||||
async move {
|
||||
Self::fetch_runnable_ranges(&snapshot, Anchor::Start..Anchor::End)
|
||||
Self::fetch_runnable_ranges(&snapshot, Anchor::min()..Anchor::max())
|
||||
}
|
||||
})
|
||||
.await;
|
||||
@@ -9352,32 +9320,42 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
fn go_to_hunk(&mut self, _: &GoToHunk, cx: &mut ViewContext<Self>) {
|
||||
fn go_to_next_hunk(&mut self, _: &GoToHunk, cx: &mut ViewContext<Self>) {
|
||||
let snapshot = self
|
||||
.display_map
|
||||
.update(cx, |display_map, cx| display_map.snapshot(cx));
|
||||
let selection = self.selections.newest::<Point>(cx);
|
||||
self.go_to_hunk_after_position(&snapshot, selection.head(), cx);
|
||||
}
|
||||
|
||||
if !self.seek_in_direction(
|
||||
&snapshot,
|
||||
selection.head(),
|
||||
fn go_to_hunk_after_position(
|
||||
&mut self,
|
||||
snapshot: &DisplaySnapshot,
|
||||
position: Point,
|
||||
cx: &mut ViewContext<'_, Editor>,
|
||||
) -> Option<MultiBufferDiffHunk> {
|
||||
if let Some(hunk) = self.go_to_next_hunk_in_direction(
|
||||
snapshot,
|
||||
position,
|
||||
false,
|
||||
snapshot.buffer_snapshot.git_diff_hunks_in_range(
|
||||
MultiBufferRow(selection.head().row + 1)..MultiBufferRow::MAX,
|
||||
),
|
||||
snapshot
|
||||
.buffer_snapshot
|
||||
.git_diff_hunks_in_range(MultiBufferRow(position.row + 1)..MultiBufferRow::MAX),
|
||||
cx,
|
||||
) {
|
||||
let wrapped_point = Point::zero();
|
||||
self.seek_in_direction(
|
||||
&snapshot,
|
||||
wrapped_point,
|
||||
true,
|
||||
snapshot.buffer_snapshot.git_diff_hunks_in_range(
|
||||
MultiBufferRow(wrapped_point.row + 1)..MultiBufferRow::MAX,
|
||||
),
|
||||
cx,
|
||||
);
|
||||
return Some(hunk);
|
||||
}
|
||||
|
||||
let wrapped_point = Point::zero();
|
||||
self.go_to_next_hunk_in_direction(
|
||||
snapshot,
|
||||
wrapped_point,
|
||||
true,
|
||||
snapshot.buffer_snapshot.git_diff_hunks_in_range(
|
||||
MultiBufferRow(wrapped_point.row + 1)..MultiBufferRow::MAX,
|
||||
),
|
||||
cx,
|
||||
)
|
||||
}
|
||||
|
||||
fn go_to_prev_hunk(&mut self, _: &GoToPrevHunk, cx: &mut ViewContext<Self>) {
|
||||
@@ -9386,52 +9364,65 @@ impl Editor {
|
||||
.update(cx, |display_map, cx| display_map.snapshot(cx));
|
||||
let selection = self.selections.newest::<Point>(cx);
|
||||
|
||||
if !self.seek_in_direction(
|
||||
&snapshot,
|
||||
selection.head(),
|
||||
false,
|
||||
snapshot.buffer_snapshot.git_diff_hunks_in_range_rev(
|
||||
MultiBufferRow(0)..MultiBufferRow(selection.head().row),
|
||||
),
|
||||
cx,
|
||||
) {
|
||||
let wrapped_point = snapshot.buffer_snapshot.max_point();
|
||||
self.seek_in_direction(
|
||||
&snapshot,
|
||||
wrapped_point,
|
||||
true,
|
||||
snapshot.buffer_snapshot.git_diff_hunks_in_range_rev(
|
||||
MultiBufferRow(0)..MultiBufferRow(wrapped_point.row),
|
||||
),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
self.go_to_hunk_before_position(&snapshot, selection.head(), cx);
|
||||
}
|
||||
|
||||
fn seek_in_direction(
|
||||
fn go_to_hunk_before_position(
|
||||
&mut self,
|
||||
snapshot: &DisplaySnapshot,
|
||||
position: Point,
|
||||
cx: &mut ViewContext<'_, Editor>,
|
||||
) -> Option<MultiBufferDiffHunk> {
|
||||
if let Some(hunk) = self.go_to_next_hunk_in_direction(
|
||||
snapshot,
|
||||
position,
|
||||
false,
|
||||
snapshot
|
||||
.buffer_snapshot
|
||||
.git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(position.row)),
|
||||
cx,
|
||||
) {
|
||||
return Some(hunk);
|
||||
}
|
||||
|
||||
let wrapped_point = snapshot.buffer_snapshot.max_point();
|
||||
self.go_to_next_hunk_in_direction(
|
||||
snapshot,
|
||||
wrapped_point,
|
||||
true,
|
||||
snapshot
|
||||
.buffer_snapshot
|
||||
.git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(wrapped_point.row)),
|
||||
cx,
|
||||
)
|
||||
}
|
||||
|
||||
fn go_to_next_hunk_in_direction(
|
||||
&mut self,
|
||||
snapshot: &DisplaySnapshot,
|
||||
initial_point: Point,
|
||||
is_wrapped: bool,
|
||||
hunks: impl Iterator<Item = MultiBufferDiffHunk>,
|
||||
cx: &mut ViewContext<Editor>,
|
||||
) -> bool {
|
||||
) -> Option<MultiBufferDiffHunk> {
|
||||
let display_point = initial_point.to_display_point(snapshot);
|
||||
let mut hunks = hunks
|
||||
.map(|hunk| diff_hunk_to_display(&hunk, snapshot))
|
||||
.filter(|hunk| is_wrapped || !hunk.contains_display_row(display_point.row()))
|
||||
.map(|hunk| (diff_hunk_to_display(&hunk, snapshot), hunk))
|
||||
.filter(|(display_hunk, _)| {
|
||||
is_wrapped || !display_hunk.contains_display_row(display_point.row())
|
||||
})
|
||||
.dedup();
|
||||
|
||||
if let Some(hunk) = hunks.next() {
|
||||
if let Some((display_hunk, hunk)) = hunks.next() {
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
let row = hunk.start_display_row();
|
||||
let row = display_hunk.start_display_row();
|
||||
let point = DisplayPoint::new(row, 0);
|
||||
s.select_display_ranges([point..point]);
|
||||
});
|
||||
|
||||
true
|
||||
Some(hunk)
|
||||
} else {
|
||||
false
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11532,7 +11523,7 @@ impl Editor {
|
||||
&'a self,
|
||||
position: Anchor,
|
||||
buffer: &'a MultiBufferSnapshot,
|
||||
) -> impl 'a + Iterator<Item = &Range<Anchor>> {
|
||||
) -> impl 'a + Iterator<Item = &'a Range<Anchor>> {
|
||||
let read_highlights = self
|
||||
.background_highlights
|
||||
.get(&TypeId::of::<DocumentHighlightRead>())
|
||||
@@ -11836,7 +11827,7 @@ impl Editor {
|
||||
.filter_map(|buffer| {
|
||||
let buffer = buffer.read(cx);
|
||||
let language = buffer.language()?;
|
||||
if project.is_local_or_ssh()
|
||||
if project.is_local()
|
||||
&& project.language_servers_for_buffer(buffer, cx).count() == 0
|
||||
{
|
||||
None
|
||||
@@ -12489,11 +12480,6 @@ impl Editor {
|
||||
Some(gpui::Point::new(source_x, source_y))
|
||||
}
|
||||
|
||||
fn gutter_bounds(&self) -> Option<Bounds<Pixels>> {
|
||||
let bounds = self.last_bounds?;
|
||||
Some(element::gutter_bounds(bounds, self.gutter_dimensions))
|
||||
}
|
||||
|
||||
pub fn has_active_completions_menu(&self) -> bool {
|
||||
self.context_menu.read().as_ref().map_or(false, |menu| {
|
||||
menu.visible() && matches!(menu, ContextMenu::Completions(_))
|
||||
@@ -12702,7 +12688,7 @@ fn snippet_completions(
|
||||
return vec![];
|
||||
}
|
||||
let snapshot = buffer.read(cx).text_snapshot();
|
||||
let chunks = snapshot.reversed_chunks_in_range(text::Anchor::Start..buffer_position);
|
||||
let chunks = snapshot.reversed_chunks_in_range(text::Anchor::MIN..buffer_position);
|
||||
|
||||
let mut lines = chunks.lines();
|
||||
let Some(line_at) = lines.next().filter(|line| !line.is_empty()) else {
|
||||
|
||||
@@ -739,9 +739,7 @@ async fn test_navigation_history(cx: &mut TestAppContext) {
|
||||
|
||||
// Ensure we don't panic when navigation data contains invalid anchors *and* points.
|
||||
let mut invalid_anchor = editor.scroll_manager.anchor().anchor;
|
||||
invalid_anchor.text_anchor = text::Anchor::Start {
|
||||
buffer_id: BufferId::new(999).unwrap(),
|
||||
};
|
||||
invalid_anchor.text_anchor.buffer_id = BufferId::new(999).ok();
|
||||
let invalid_point = Point::new(9999, 0);
|
||||
editor.navigate(
|
||||
Box::new(NavigationData {
|
||||
@@ -9625,7 +9623,7 @@ async fn go_to_hunk(executor: BackgroundExecutor, cx: &mut gpui::TestAppContext)
|
||||
cx.update_editor(|editor, cx| {
|
||||
//Wrap around the bottom of the buffer
|
||||
for _ in 0..3 {
|
||||
editor.go_to_hunk(&GoToHunk, cx);
|
||||
editor.go_to_next_hunk(&GoToHunk, cx);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -9711,7 +9709,7 @@ async fn go_to_hunk(executor: BackgroundExecutor, cx: &mut gpui::TestAppContext)
|
||||
|
||||
//Make sure that the fold only gets one hunk
|
||||
for _ in 0..4 {
|
||||
editor.go_to_hunk(&GoToHunk, cx);
|
||||
editor.go_to_next_hunk(&GoToHunk, cx);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -11228,7 +11226,7 @@ async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::Test
|
||||
|
||||
cx.update_editor(|editor, cx| {
|
||||
for _ in 0..4 {
|
||||
editor.go_to_hunk(&GoToHunk, cx);
|
||||
editor.go_to_next_hunk(&GoToHunk, cx);
|
||||
editor.toggle_hunk_diff(&ToggleHunkDiff, cx);
|
||||
}
|
||||
});
|
||||
@@ -11251,18 +11249,13 @@ async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::Test
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(1)..=DisplayRow(1), DisplayRow(7)..=DisplayRow(7), DisplayRow(9)..=DisplayRow(9)],
|
||||
"After expanding, all git additions should be highlighted for Modified (split into added and removed) and Added hunks"
|
||||
);
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![
|
||||
("use some::mod;\n".to_string(), DiffHunkStatus::Modified, DisplayRow(1)..DisplayRow(2)),
|
||||
("const A: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(4)..DisplayRow(4)),
|
||||
(" println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, DisplayRow(7)..DisplayRow(8)),
|
||||
("".to_string(), DiffHunkStatus::Added, DisplayRow(9)..DisplayRow(10)),
|
||||
("use some::mod;\n".to_string(), DiffHunkStatus::Modified, DisplayRow(2)..DisplayRow(3)),
|
||||
("const A: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(6)..DisplayRow(6)),
|
||||
(" println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, DisplayRow(10)..DisplayRow(11)),
|
||||
("".to_string(), DiffHunkStatus::Added, DisplayRow(13)..DisplayRow(14)),
|
||||
],
|
||||
"After expanding, all hunks' display rows should have shifted by the amount of deleted lines added \
|
||||
(from modified and removed hunks)"
|
||||
@@ -11271,6 +11264,11 @@ async fn test_toggle_hunk_diff(executor: BackgroundExecutor, cx: &mut gpui::Test
|
||||
all_hunks, all_expanded_hunks,
|
||||
"Editor hunks should not change and all be expanded"
|
||||
);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(2)..=DisplayRow(2), DisplayRow(10)..=DisplayRow(10), DisplayRow(13)..=DisplayRow(13)],
|
||||
"After expanding, all git additions should be highlighted for Modified (split into added and removed) and Added hunks"
|
||||
);
|
||||
});
|
||||
|
||||
cx.update_editor(|editor, cx| {
|
||||
@@ -11313,7 +11311,7 @@ async fn test_toggled_diff_base_change(
|
||||
const B: u32 = 42;
|
||||
const C: u32 = 42;
|
||||
|
||||
fn main(ˇ) {
|
||||
fn main() {
|
||||
println!("hello");
|
||||
|
||||
println!("world");
|
||||
@@ -11358,9 +11356,9 @@ async fn test_toggled_diff_base_change(
|
||||
DisplayRow(3)..DisplayRow(3)
|
||||
),
|
||||
(
|
||||
"fn main(ˇ) {\n println!(\"hello\");\n".to_string(),
|
||||
" println!(\"hello\");\n".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(5)..DisplayRow(7)
|
||||
DisplayRow(6)..DisplayRow(7)
|
||||
),
|
||||
(
|
||||
"".to_string(),
|
||||
@@ -11392,22 +11390,18 @@ async fn test_toggled_diff_base_change(
|
||||
"#
|
||||
.unindent(),
|
||||
);
|
||||
|
||||
cx.update_editor(|editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(9)..=DisplayRow(10), DisplayRow(13)..=DisplayRow(14)],
|
||||
"After expanding, all git additions should be highlighted for Modified (split into added and removed) and Added hunks"
|
||||
);
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![
|
||||
("use some::mod1;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(1)..DisplayRow(1)),
|
||||
("const B: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(5)..DisplayRow(5)),
|
||||
("fn main(ˇ) {\n println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, DisplayRow(9)..DisplayRow(11)),
|
||||
("".to_string(), DiffHunkStatus::Added, DisplayRow(13)..DisplayRow(15)),
|
||||
("use some::mod1;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(2)..DisplayRow(2)),
|
||||
("const B: u32 = 42;\n".to_string(), DiffHunkStatus::Removed, DisplayRow(7)..DisplayRow(7)),
|
||||
(" println!(\"hello\");\n".to_string(), DiffHunkStatus::Modified, DisplayRow(12)..DisplayRow(13)),
|
||||
("".to_string(), DiffHunkStatus::Added, DisplayRow(16)..DisplayRow(18)),
|
||||
],
|
||||
"After expanding, all hunks' display rows should have shifted by the amount of deleted lines added \
|
||||
(from modified and removed hunks)"
|
||||
@@ -11416,6 +11410,11 @@ async fn test_toggled_diff_base_change(
|
||||
all_hunks, all_expanded_hunks,
|
||||
"Editor hunks should not change and all be expanded"
|
||||
);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(12)..=DisplayRow(12), DisplayRow(16)..=DisplayRow(17)],
|
||||
"After expanding, all git additions should be highlighted for Modified (split into added and removed) and Added hunks"
|
||||
);
|
||||
});
|
||||
|
||||
cx.set_diff_base(Some("new diff base!"));
|
||||
@@ -11461,7 +11460,7 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test
|
||||
const B: u32 = 42;
|
||||
const C: u32 = 42;
|
||||
|
||||
fn main(ˇ) {
|
||||
fn main() {
|
||||
println!("hello");
|
||||
|
||||
println!("world");
|
||||
@@ -11522,9 +11521,9 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test
|
||||
DisplayRow(3)..DisplayRow(3)
|
||||
),
|
||||
(
|
||||
"fn main(ˇ) {\n println!(\"hello\");\n".to_string(),
|
||||
" println!(\"hello\");\n".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(5)..DisplayRow(7)
|
||||
DisplayRow(6)..DisplayRow(7)
|
||||
),
|
||||
(
|
||||
"".to_string(),
|
||||
@@ -11578,50 +11577,50 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![
|
||||
DisplayRow(9)..=DisplayRow(10),
|
||||
DisplayRow(13)..=DisplayRow(14),
|
||||
DisplayRow(19)..=DisplayRow(19)
|
||||
]
|
||||
);
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![
|
||||
(
|
||||
"use some::mod1;\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(1)..DisplayRow(1)
|
||||
DisplayRow(2)..DisplayRow(2)
|
||||
),
|
||||
(
|
||||
"const B: u32 = 42;\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(5)..DisplayRow(5)
|
||||
DisplayRow(7)..DisplayRow(7)
|
||||
),
|
||||
(
|
||||
"fn main(ˇ) {\n println!(\"hello\");\n".to_string(),
|
||||
" println!(\"hello\");\n".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(9)..DisplayRow(11)
|
||||
DisplayRow(12)..DisplayRow(13)
|
||||
),
|
||||
(
|
||||
"".to_string(),
|
||||
DiffHunkStatus::Added,
|
||||
DisplayRow(13)..DisplayRow(15)
|
||||
DisplayRow(16)..DisplayRow(18)
|
||||
),
|
||||
(
|
||||
"".to_string(),
|
||||
DiffHunkStatus::Added,
|
||||
DisplayRow(19)..DisplayRow(20)
|
||||
DisplayRow(23)..DisplayRow(24)
|
||||
),
|
||||
(
|
||||
"fn another2() {\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(23)..DisplayRow(23)
|
||||
DisplayRow(28)..DisplayRow(28)
|
||||
),
|
||||
],
|
||||
);
|
||||
assert_eq!(all_hunks, all_expanded_hunks);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![
|
||||
DisplayRow(12)..=DisplayRow(12),
|
||||
DisplayRow(16)..=DisplayRow(17),
|
||||
DisplayRow(23)..=DisplayRow(23)
|
||||
]
|
||||
);
|
||||
});
|
||||
|
||||
cx.update_editor(|editor, cx| editor.fold_selected_ranges(&FoldSelectedRanges, cx));
|
||||
@@ -11655,11 +11654,6 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(0)..=DisplayRow(0), DisplayRow(5)..=DisplayRow(5)],
|
||||
"Only one hunk is left not folded, its highlight should be visible"
|
||||
);
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![
|
||||
@@ -11674,7 +11668,7 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test
|
||||
DisplayRow(0)..DisplayRow(0)
|
||||
),
|
||||
(
|
||||
"fn main(ˇ) {\n println!(\"hello\");\n".to_string(),
|
||||
" println!(\"hello\");\n".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(0)..DisplayRow(0)
|
||||
),
|
||||
@@ -11686,12 +11680,12 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test
|
||||
(
|
||||
"".to_string(),
|
||||
DiffHunkStatus::Added,
|
||||
DisplayRow(5)..DisplayRow(6)
|
||||
DisplayRow(6)..DisplayRow(7)
|
||||
),
|
||||
(
|
||||
"fn another2() {\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(9)..DisplayRow(9)
|
||||
DisplayRow(11)..DisplayRow(11)
|
||||
),
|
||||
],
|
||||
"Hunk list should still return shifted folded hunks"
|
||||
@@ -11702,16 +11696,21 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test
|
||||
(
|
||||
"".to_string(),
|
||||
DiffHunkStatus::Added,
|
||||
DisplayRow(5)..DisplayRow(6)
|
||||
DisplayRow(6)..DisplayRow(7)
|
||||
),
|
||||
(
|
||||
"fn another2() {\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(9)..DisplayRow(9)
|
||||
DisplayRow(11)..DisplayRow(11)
|
||||
),
|
||||
],
|
||||
"Only non-folded hunks should be left expanded"
|
||||
);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(0)..=DisplayRow(0), DisplayRow(6)..=DisplayRow(6)],
|
||||
"Only one hunk is left not folded, its highlight should be visible"
|
||||
);
|
||||
});
|
||||
|
||||
cx.update_editor(|editor, cx| {
|
||||
@@ -11748,51 +11747,51 @@ async fn test_fold_unfold_diff(executor: BackgroundExecutor, cx: &mut gpui::Test
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let all_hunks = editor_hunks(editor, &snapshot, cx);
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![
|
||||
DisplayRow(9)..=DisplayRow(10),
|
||||
DisplayRow(13)..=DisplayRow(14),
|
||||
DisplayRow(19)..=DisplayRow(19)
|
||||
],
|
||||
"After unfolding, all hunk diffs should be visible again"
|
||||
);
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![
|
||||
(
|
||||
"use some::mod1;\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(1)..DisplayRow(1)
|
||||
DisplayRow(2)..DisplayRow(2)
|
||||
),
|
||||
(
|
||||
"const B: u32 = 42;\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(5)..DisplayRow(5)
|
||||
DisplayRow(7)..DisplayRow(7)
|
||||
),
|
||||
(
|
||||
"fn main(ˇ) {\n println!(\"hello\");\n".to_string(),
|
||||
" println!(\"hello\");\n".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(9)..DisplayRow(11)
|
||||
DisplayRow(12)..DisplayRow(13)
|
||||
),
|
||||
(
|
||||
"".to_string(),
|
||||
DiffHunkStatus::Added,
|
||||
DisplayRow(13)..DisplayRow(15)
|
||||
DisplayRow(16)..DisplayRow(18)
|
||||
),
|
||||
(
|
||||
"".to_string(),
|
||||
DiffHunkStatus::Added,
|
||||
DisplayRow(19)..DisplayRow(20)
|
||||
DisplayRow(23)..DisplayRow(24)
|
||||
),
|
||||
(
|
||||
"fn another2() {\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(23)..DisplayRow(23)
|
||||
DisplayRow(28)..DisplayRow(28)
|
||||
),
|
||||
],
|
||||
);
|
||||
assert_eq!(all_hunks, all_expanded_hunks);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![
|
||||
DisplayRow(12)..=DisplayRow(12),
|
||||
DisplayRow(16)..=DisplayRow(17),
|
||||
DisplayRow(23)..=DisplayRow(23)
|
||||
],
|
||||
"After unfolding, all hunk diffs should be visible again"
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -11942,17 +11941,17 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext)
|
||||
(
|
||||
"bbbb\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(5)..DisplayRow(5),
|
||||
DisplayRow(6)..DisplayRow(6),
|
||||
),
|
||||
(
|
||||
"nnnn\n".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(23)..DisplayRow(24),
|
||||
DisplayRow(25)..DisplayRow(26),
|
||||
),
|
||||
(
|
||||
"".to_string(),
|
||||
DiffHunkStatus::Added,
|
||||
DisplayRow(43)..DisplayRow(44),
|
||||
DisplayRow(46)..DisplayRow(47),
|
||||
),
|
||||
];
|
||||
|
||||
@@ -11977,8 +11976,8 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext)
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![
|
||||
DisplayRow(23)..=DisplayRow(23),
|
||||
DisplayRow(43)..=DisplayRow(43)
|
||||
DisplayRow(25)..=DisplayRow(25),
|
||||
DisplayRow(46)..=DisplayRow(46)
|
||||
],
|
||||
);
|
||||
assert_eq!(all_hunks, expected_all_hunks_shifted);
|
||||
@@ -12009,8 +12008,8 @@ async fn test_toggle_diff_expand_in_multi_buffer(cx: &mut gpui::TestAppContext)
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![
|
||||
DisplayRow(23)..=DisplayRow(23),
|
||||
DisplayRow(43)..=DisplayRow(43)
|
||||
DisplayRow(25)..=DisplayRow(25),
|
||||
DisplayRow(46)..=DisplayRow(46)
|
||||
],
|
||||
);
|
||||
assert_eq!(all_hunks, expected_all_hunks_shifted);
|
||||
@@ -12118,12 +12117,12 @@ async fn test_edits_around_toggled_additions(
|
||||
vec![(
|
||||
"".to_string(),
|
||||
DiffHunkStatus::Added,
|
||||
DisplayRow(4)..DisplayRow(7)
|
||||
DisplayRow(5)..DisplayRow(8)
|
||||
)]
|
||||
);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(4)..=DisplayRow(6)]
|
||||
vec![DisplayRow(5)..=DisplayRow(7)]
|
||||
);
|
||||
assert_eq!(all_hunks, all_expanded_hunks);
|
||||
});
|
||||
@@ -12158,12 +12157,12 @@ async fn test_edits_around_toggled_additions(
|
||||
vec![(
|
||||
"".to_string(),
|
||||
DiffHunkStatus::Added,
|
||||
DisplayRow(4)..DisplayRow(8)
|
||||
DisplayRow(5)..DisplayRow(9)
|
||||
)]
|
||||
);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(4)..=DisplayRow(6)],
|
||||
vec![DisplayRow(5)..=DisplayRow(7)],
|
||||
"Edited hunk should have one more line added"
|
||||
);
|
||||
assert_eq!(
|
||||
@@ -12203,12 +12202,12 @@ async fn test_edits_around_toggled_additions(
|
||||
vec![(
|
||||
"".to_string(),
|
||||
DiffHunkStatus::Added,
|
||||
DisplayRow(4)..DisplayRow(9)
|
||||
DisplayRow(5)..DisplayRow(10)
|
||||
)]
|
||||
);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(4)..=DisplayRow(6)],
|
||||
vec![DisplayRow(5)..=DisplayRow(7)],
|
||||
"Edited hunk should have one more line added"
|
||||
);
|
||||
assert_eq!(all_hunks, all_expanded_hunks);
|
||||
@@ -12247,12 +12246,12 @@ async fn test_edits_around_toggled_additions(
|
||||
vec![(
|
||||
"".to_string(),
|
||||
DiffHunkStatus::Added,
|
||||
DisplayRow(4)..DisplayRow(8)
|
||||
DisplayRow(5)..DisplayRow(9)
|
||||
)]
|
||||
);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(4)..=DisplayRow(6)],
|
||||
vec![DisplayRow(5)..=DisplayRow(7)],
|
||||
"Deleting a line should shrint the hunk"
|
||||
);
|
||||
assert_eq!(
|
||||
@@ -12295,12 +12294,12 @@ async fn test_edits_around_toggled_additions(
|
||||
vec![(
|
||||
"".to_string(),
|
||||
DiffHunkStatus::Added,
|
||||
DisplayRow(5)..DisplayRow(6)
|
||||
DisplayRow(6)..DisplayRow(7)
|
||||
)]
|
||||
);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(5)..=DisplayRow(5)]
|
||||
vec![DisplayRow(6)..=DisplayRow(6)]
|
||||
);
|
||||
assert_eq!(all_hunks, all_expanded_hunks);
|
||||
});
|
||||
@@ -12337,7 +12336,7 @@ async fn test_edits_around_toggled_additions(
|
||||
(
|
||||
"const A: u32 = 42;\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(2)..DisplayRow(2)
|
||||
DisplayRow(3)..DisplayRow(3)
|
||||
)
|
||||
]
|
||||
);
|
||||
@@ -12351,7 +12350,7 @@ async fn test_edits_around_toggled_additions(
|
||||
vec![(
|
||||
"const A: u32 = 42;\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(2)..DisplayRow(2)
|
||||
DisplayRow(3)..DisplayRow(3)
|
||||
)],
|
||||
"Should open hunks that were adjacent to the stale addition one"
|
||||
);
|
||||
@@ -12447,7 +12446,7 @@ async fn test_edits_around_toggled_deletions(
|
||||
vec![(
|
||||
"const A: u32 = 42;\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(4)..DisplayRow(4)
|
||||
DisplayRow(5)..DisplayRow(5)
|
||||
)]
|
||||
);
|
||||
assert_eq!(all_hunks, all_expanded_hunks);
|
||||
@@ -12487,7 +12486,7 @@ async fn test_edits_around_toggled_deletions(
|
||||
vec![(
|
||||
"const A: u32 = 42;\nconst B: u32 = 42;\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(5)..DisplayRow(5)
|
||||
DisplayRow(6)..DisplayRow(6)
|
||||
)]
|
||||
);
|
||||
assert_eq!(all_hunks, all_expanded_hunks);
|
||||
@@ -12522,7 +12521,7 @@ async fn test_edits_around_toggled_deletions(
|
||||
vec![(
|
||||
"const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n".to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(6)..DisplayRow(6)
|
||||
DisplayRow(7)..DisplayRow(7)
|
||||
)]
|
||||
);
|
||||
assert_eq!(all_hunks, all_expanded_hunks);
|
||||
@@ -12556,12 +12555,12 @@ async fn test_edits_around_toggled_deletions(
|
||||
vec![(
|
||||
"const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n\n".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(7)..DisplayRow(8)
|
||||
DisplayRow(8)..DisplayRow(9)
|
||||
)]
|
||||
);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(7)..=DisplayRow(7)],
|
||||
vec![DisplayRow(8)..=DisplayRow(8)],
|
||||
"Modified expanded hunks should display additions and highlight their background"
|
||||
);
|
||||
assert_eq!(all_hunks, all_expanded_hunks);
|
||||
@@ -12655,14 +12654,14 @@ async fn test_edits_around_toggled_modifications(
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(6)..=DisplayRow(6)],
|
||||
vec![DisplayRow(7)..=DisplayRow(7)],
|
||||
);
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![(
|
||||
"const C: u32 = 42;\n".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(6)..DisplayRow(7)
|
||||
DisplayRow(7)..DisplayRow(8)
|
||||
)]
|
||||
);
|
||||
assert_eq!(all_hunks, all_expanded_hunks);
|
||||
@@ -12698,7 +12697,7 @@ async fn test_edits_around_toggled_modifications(
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(6)..=DisplayRow(6)],
|
||||
vec![DisplayRow(7)..=DisplayRow(7)],
|
||||
"Modified hunk should grow highlighted lines on more text additions"
|
||||
);
|
||||
assert_eq!(
|
||||
@@ -12706,7 +12705,7 @@ async fn test_edits_around_toggled_modifications(
|
||||
vec![(
|
||||
"const C: u32 = 42;\n".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(6)..DisplayRow(9)
|
||||
DisplayRow(7)..DisplayRow(10)
|
||||
)]
|
||||
);
|
||||
assert_eq!(all_hunks, all_expanded_hunks);
|
||||
@@ -12744,14 +12743,14 @@ async fn test_edits_around_toggled_modifications(
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(6)..=DisplayRow(8)],
|
||||
vec![DisplayRow(7)..=DisplayRow(9)],
|
||||
);
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![(
|
||||
"const B: u32 = 42;\nconst C: u32 = 42;\n".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(6)..DisplayRow(9)
|
||||
DisplayRow(7)..DisplayRow(10)
|
||||
)],
|
||||
"Modified hunk should grow deleted lines on text deletions above"
|
||||
);
|
||||
@@ -12788,7 +12787,7 @@ async fn test_edits_around_toggled_modifications(
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(6)..=DisplayRow(9)],
|
||||
vec![DisplayRow(7)..=DisplayRow(10)],
|
||||
"Modified hunk should grow deleted lines on text modifications above"
|
||||
);
|
||||
assert_eq!(
|
||||
@@ -12796,7 +12795,7 @@ async fn test_edits_around_toggled_modifications(
|
||||
vec![(
|
||||
"const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(6)..DisplayRow(10)
|
||||
DisplayRow(7)..DisplayRow(11)
|
||||
)]
|
||||
);
|
||||
assert_eq!(all_hunks, all_expanded_hunks);
|
||||
@@ -12832,7 +12831,7 @@ async fn test_edits_around_toggled_modifications(
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(6)..=DisplayRow(8)],
|
||||
vec![DisplayRow(7)..=DisplayRow(9)],
|
||||
"Modified hunk should grow shrink lines on modification lines removal"
|
||||
);
|
||||
assert_eq!(
|
||||
@@ -12840,7 +12839,7 @@ async fn test_edits_around_toggled_modifications(
|
||||
vec![(
|
||||
"const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\n".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(6)..DisplayRow(9)
|
||||
DisplayRow(7)..DisplayRow(10)
|
||||
)]
|
||||
);
|
||||
assert_eq!(all_hunks, all_expanded_hunks);
|
||||
@@ -12882,7 +12881,7 @@ async fn test_edits_around_toggled_modifications(
|
||||
"const A: u32 = 42;\nconst B: u32 = 42;\nconst C: u32 = 42;\nconst D: u32 = 42;\n"
|
||||
.to_string(),
|
||||
DiffHunkStatus::Removed,
|
||||
DisplayRow(7)..DisplayRow(7)
|
||||
DisplayRow(8)..DisplayRow(8)
|
||||
)]
|
||||
);
|
||||
assert_eq!(all_hunks, all_expanded_hunks);
|
||||
@@ -12976,14 +12975,14 @@ async fn test_multiple_expanded_hunks_merge(
|
||||
let all_expanded_hunks = expanded_hunks(editor, &snapshot, cx);
|
||||
assert_eq!(
|
||||
expanded_hunks_background_highlights(editor, cx),
|
||||
vec![DisplayRow(6)..=DisplayRow(6)],
|
||||
vec![DisplayRow(7)..=DisplayRow(7)],
|
||||
);
|
||||
assert_eq!(
|
||||
all_hunks,
|
||||
vec![(
|
||||
"const C: u32 = 42;\n".to_string(),
|
||||
DiffHunkStatus::Modified,
|
||||
DisplayRow(6)..DisplayRow(7)
|
||||
DisplayRow(7)..DisplayRow(8)
|
||||
)]
|
||||
);
|
||||
assert_eq!(all_hunks, all_expanded_hunks);
|
||||
|
||||
@@ -7,14 +7,11 @@ use crate::{
|
||||
CurrentLineHighlight, DoubleClickInMultibuffer, MultiCursorModifier, ScrollBeyondLastLine,
|
||||
ShowScrollbar,
|
||||
},
|
||||
git::{
|
||||
blame::{CommitDetails, GitBlame},
|
||||
diff_hunk_to_display, DisplayDiffHunk,
|
||||
},
|
||||
git::blame::{CommitDetails, GitBlame},
|
||||
hover_popover::{
|
||||
self, hover_at, HOVER_POPOVER_GAP, MIN_POPOVER_CHARACTER_WIDTH, MIN_POPOVER_LINE_HEIGHT,
|
||||
},
|
||||
hunk_diff::ExpandedHunk,
|
||||
hunk_diff::{diff_hunk_to_display, DisplayDiffHunk},
|
||||
hunk_status,
|
||||
items::BufferSearchHighlights,
|
||||
mouse_context_menu::{self, MenuPosition, MouseContextMenu},
|
||||
@@ -23,8 +20,8 @@ use crate::{
|
||||
DocumentHighlightRead, DocumentHighlightWrite, Editor, EditorMode, EditorSettings,
|
||||
EditorSnapshot, EditorStyle, ExpandExcerpts, FocusedBlock, GutterDimensions, HalfPageDown,
|
||||
HalfPageUp, HandleInput, HoveredCursor, HoveredHunk, LineDown, LineUp, OpenExcerpts, PageDown,
|
||||
PageUp, Point, RangeToAnchorExt, RowExt, RowRangeExt, SelectPhase, Selection, SoftWrap,
|
||||
ToPoint, CURSORS_VISIBLE_FOR, MAX_LINE_LEN,
|
||||
PageUp, Point, RowExt, RowRangeExt, SelectPhase, Selection, SoftWrap, ToPoint,
|
||||
CURSORS_VISIBLE_FOR, MAX_LINE_LEN,
|
||||
};
|
||||
use client::ParticipantIndex;
|
||||
use collections::{BTreeMap, HashMap};
|
||||
@@ -305,7 +302,7 @@ impl EditorElement {
|
||||
}
|
||||
register_action(view, cx, Editor::go_to_diagnostic);
|
||||
register_action(view, cx, Editor::go_to_prev_diagnostic);
|
||||
register_action(view, cx, Editor::go_to_hunk);
|
||||
register_action(view, cx, Editor::go_to_next_hunk);
|
||||
register_action(view, cx, Editor::go_to_prev_hunk);
|
||||
register_action(view, cx, |editor, a, cx| {
|
||||
editor.go_to_definition(a, cx).detach_and_log_err(cx);
|
||||
@@ -492,28 +489,7 @@ impl EditorElement {
|
||||
let mut modifiers = event.modifiers;
|
||||
|
||||
if let Some(hovered_hunk) = hovered_hunk {
|
||||
if modifiers.control || modifiers.platform {
|
||||
editor.toggle_hovered_hunk(&hovered_hunk, cx);
|
||||
} else {
|
||||
let display_range = hovered_hunk
|
||||
.multi_buffer_range
|
||||
.clone()
|
||||
.to_display_points(&position_map.snapshot);
|
||||
let hunk_bounds = Self::diff_hunk_bounds(
|
||||
&position_map.snapshot,
|
||||
position_map.line_height,
|
||||
gutter_hitbox.bounds,
|
||||
&DisplayDiffHunk::Unfolded {
|
||||
diff_base_byte_range: hovered_hunk.diff_base_byte_range.clone(),
|
||||
display_row_range: display_range.start.row()..display_range.end.row(),
|
||||
multi_buffer_range: hovered_hunk.multi_buffer_range.clone(),
|
||||
status: hovered_hunk.status,
|
||||
},
|
||||
);
|
||||
if hunk_bounds.contains(&event.position) {
|
||||
editor.open_hunk_context_menu(hovered_hunk, event.position, cx);
|
||||
}
|
||||
}
|
||||
editor.toggle_hovered_hunk(&hovered_hunk, cx);
|
||||
cx.notify();
|
||||
return;
|
||||
} else if gutter_hitbox.is_hovered(cx) {
|
||||
@@ -950,7 +926,7 @@ impl EditorElement {
|
||||
// Remote cursors
|
||||
if let Some(collaboration_hub) = &editor.collaboration_hub {
|
||||
for remote_selection in snapshot.remote_selections_in_range(
|
||||
&(Anchor::Start..Anchor::End),
|
||||
&(Anchor::min()..Anchor::max()),
|
||||
collaboration_hub.deref(),
|
||||
cx,
|
||||
) {
|
||||
@@ -1269,6 +1245,7 @@ impl EditorElement {
|
||||
line_height: Pixels,
|
||||
gutter_hitbox: &Hitbox,
|
||||
display_rows: Range<DisplayRow>,
|
||||
anchor_range: Range<Anchor>,
|
||||
snapshot: &EditorSnapshot,
|
||||
cx: &mut WindowContext,
|
||||
) -> Vec<(DisplayDiffHunk, Option<Hitbox>)> {
|
||||
@@ -1289,30 +1266,84 @@ impl EditorElement {
|
||||
.git
|
||||
.git_gutter
|
||||
.unwrap_or_default();
|
||||
let display_hunks = buffer_snapshot
|
||||
.git_diff_hunks_in_range(buffer_start_row..buffer_end_row)
|
||||
.map(|hunk| diff_hunk_to_display(&hunk, snapshot))
|
||||
.dedup()
|
||||
.map(|hunk| match git_gutter_setting {
|
||||
GitGutterSetting::TrackedFiles => {
|
||||
let hitbox = match hunk {
|
||||
DisplayDiffHunk::Unfolded { .. } => {
|
||||
let hunk_bounds = Self::diff_hunk_bounds(
|
||||
snapshot,
|
||||
line_height,
|
||||
gutter_hitbox.bounds,
|
||||
&hunk,
|
||||
);
|
||||
Some(cx.insert_hitbox(hunk_bounds, true))
|
||||
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
let expanded_hunks = &editor.expanded_hunks.hunks;
|
||||
let expanded_hunks_start_ix = expanded_hunks
|
||||
.binary_search_by(|hunk| {
|
||||
hunk.hunk_range
|
||||
.end
|
||||
.cmp(&anchor_range.start, &buffer_snapshot)
|
||||
.then(Ordering::Less)
|
||||
})
|
||||
.unwrap_err();
|
||||
let mut expanded_hunks = expanded_hunks[expanded_hunks_start_ix..].iter().peekable();
|
||||
|
||||
let display_hunks = buffer_snapshot
|
||||
.git_diff_hunks_in_range(buffer_start_row..buffer_end_row)
|
||||
.filter_map(|hunk| {
|
||||
let display_hunk = diff_hunk_to_display(&hunk, snapshot);
|
||||
|
||||
if let DisplayDiffHunk::Unfolded {
|
||||
multi_buffer_range,
|
||||
status,
|
||||
..
|
||||
} = &display_hunk
|
||||
{
|
||||
let mut is_expanded = false;
|
||||
while let Some(expanded_hunk) = expanded_hunks.peek() {
|
||||
match expanded_hunk
|
||||
.hunk_range
|
||||
.start
|
||||
.cmp(&multi_buffer_range.start, &buffer_snapshot)
|
||||
{
|
||||
Ordering::Less => {
|
||||
expanded_hunks.next();
|
||||
}
|
||||
Ordering::Equal => {
|
||||
is_expanded = true;
|
||||
break;
|
||||
}
|
||||
Ordering::Greater => {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
DisplayDiffHunk::Folded { .. } => None,
|
||||
};
|
||||
(hunk, hitbox)
|
||||
}
|
||||
GitGutterSetting::Hide => (hunk, None),
|
||||
})
|
||||
.collect();
|
||||
display_hunks
|
||||
match status {
|
||||
DiffHunkStatus::Added => {}
|
||||
DiffHunkStatus::Modified => {}
|
||||
DiffHunkStatus::Removed => {
|
||||
if is_expanded {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some(display_hunk)
|
||||
})
|
||||
.dedup()
|
||||
.map(|hunk| match git_gutter_setting {
|
||||
GitGutterSetting::TrackedFiles => {
|
||||
let hitbox = match hunk {
|
||||
DisplayDiffHunk::Unfolded { .. } => {
|
||||
let hunk_bounds = Self::diff_hunk_bounds(
|
||||
snapshot,
|
||||
line_height,
|
||||
gutter_hitbox.bounds,
|
||||
&hunk,
|
||||
);
|
||||
Some(cx.insert_hitbox(hunk_bounds, true))
|
||||
}
|
||||
DisplayDiffHunk::Folded { .. } => None,
|
||||
};
|
||||
(hunk, hitbox)
|
||||
}
|
||||
GitGutterSetting::Hide => (hunk, None),
|
||||
})
|
||||
.collect();
|
||||
display_hunks
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
@@ -3187,7 +3218,7 @@ impl EditorElement {
|
||||
Some((
|
||||
hunk_bounds,
|
||||
cx.theme().status().modified,
|
||||
Corners::all(1. * line_height),
|
||||
Corners::all(px(0.)),
|
||||
))
|
||||
}
|
||||
DisplayDiffHunk::Unfolded { status, .. } => {
|
||||
@@ -3195,12 +3226,12 @@ impl EditorElement {
|
||||
DiffHunkStatus::Added => (
|
||||
hunk_hitbox.bounds,
|
||||
cx.theme().status().created,
|
||||
Corners::all(0.05 * line_height),
|
||||
Corners::all(px(0.)),
|
||||
),
|
||||
DiffHunkStatus::Modified => (
|
||||
hunk_hitbox.bounds,
|
||||
cx.theme().status().modified,
|
||||
Corners::all(0.05 * line_height),
|
||||
Corners::all(px(0.)),
|
||||
),
|
||||
DiffHunkStatus::Removed => (
|
||||
Bounds::new(
|
||||
@@ -3244,7 +3275,7 @@ impl EditorElement {
|
||||
let start_y = display_row.as_f32() * line_height - scroll_top;
|
||||
let end_y = start_y + line_height;
|
||||
|
||||
let width = 0.275 * line_height;
|
||||
let width = Self::diff_hunk_strip_width(line_height);
|
||||
let highlight_origin = gutter_bounds.origin + point(px(0.), start_y);
|
||||
let highlight_size = size(width, end_y - start_y);
|
||||
Bounds::new(highlight_origin, highlight_size)
|
||||
@@ -3277,7 +3308,7 @@ impl EditorElement {
|
||||
let start_y = start_row.as_f32() * line_height - scroll_top;
|
||||
let end_y = end_row_in_current_excerpt.as_f32() * line_height - scroll_top;
|
||||
|
||||
let width = 0.275 * line_height;
|
||||
let width = Self::diff_hunk_strip_width(line_height);
|
||||
let highlight_origin = gutter_bounds.origin + point(px(0.), start_y);
|
||||
let highlight_size = size(width, end_y - start_y);
|
||||
Bounds::new(highlight_origin, highlight_size)
|
||||
@@ -3289,7 +3320,7 @@ impl EditorElement {
|
||||
let start_y = row.as_f32() * line_height - offset - scroll_top;
|
||||
let end_y = start_y + line_height;
|
||||
|
||||
let width = 0.35 * line_height;
|
||||
let width = (0.35 * line_height).floor();
|
||||
let highlight_origin = gutter_bounds.origin + point(px(0.), start_y);
|
||||
let highlight_size = size(width, end_y - start_y);
|
||||
Bounds::new(highlight_origin, highlight_size)
|
||||
@@ -3298,6 +3329,12 @@ impl EditorElement {
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the width of the diff strip that will be displayed in the gutter.
|
||||
pub(super) fn diff_hunk_strip_width(line_height: Pixels) -> Pixels {
|
||||
// We floor the value to prevent pixel rounding.
|
||||
(0.275 * line_height).floor()
|
||||
}
|
||||
|
||||
fn paint_gutter_indicators(&self, layout: &mut EditorLayout, cx: &mut WindowContext) {
|
||||
cx.paint_layer(layout.gutter_hitbox.bounds, |cx| {
|
||||
cx.with_element_namespace("gutter_fold_toggles", |cx| {
|
||||
@@ -3309,9 +3346,6 @@ impl EditorElement {
|
||||
for test_indicator in layout.test_indicators.iter_mut() {
|
||||
test_indicator.paint(cx);
|
||||
}
|
||||
for close_indicator in layout.close_indicators.iter_mut() {
|
||||
close_indicator.paint(cx);
|
||||
}
|
||||
|
||||
if let Some(indicator) = layout.code_actions_indicator.as_mut() {
|
||||
indicator.paint(cx);
|
||||
@@ -4097,46 +4131,6 @@ impl EditorElement {
|
||||
+ 1;
|
||||
self.column_pixels(digit_count, cx)
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_hunk_diff_close_indicators(
|
||||
&self,
|
||||
line_height: Pixels,
|
||||
scroll_pixel_position: gpui::Point<Pixels>,
|
||||
gutter_dimensions: &GutterDimensions,
|
||||
gutter_hitbox: &Hitbox,
|
||||
rows_with_hunk_bounds: &HashMap<DisplayRow, Bounds<Pixels>>,
|
||||
expanded_hunks_by_rows: HashMap<DisplayRow, ExpandedHunk>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Vec<AnyElement> {
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
expanded_hunks_by_rows
|
||||
.into_iter()
|
||||
.map(|(display_row, hunk)| {
|
||||
let button = editor.close_hunk_diff_button(
|
||||
HoveredHunk {
|
||||
multi_buffer_range: hunk.hunk_range,
|
||||
status: hunk.status,
|
||||
diff_base_byte_range: hunk.diff_base_byte_range,
|
||||
},
|
||||
display_row,
|
||||
cx,
|
||||
);
|
||||
|
||||
prepaint_gutter_button(
|
||||
button,
|
||||
display_row,
|
||||
line_height,
|
||||
gutter_dimensions,
|
||||
scroll_pixel_position,
|
||||
gutter_hitbox,
|
||||
rows_with_hunk_bounds,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
@@ -5087,14 +5081,14 @@ impl Element for EditorElement {
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let start_anchor = if start_row == Default::default() {
|
||||
Anchor::Start
|
||||
Anchor::min()
|
||||
} else {
|
||||
snapshot.buffer_snapshot.anchor_before(
|
||||
DisplayPoint::new(start_row, 0).to_offset(&snapshot, Bias::Left),
|
||||
)
|
||||
};
|
||||
let end_anchor = if end_row > max_row {
|
||||
Anchor::End
|
||||
Anchor::max()
|
||||
} else {
|
||||
snapshot.buffer_snapshot.anchor_before(
|
||||
DisplayPoint::new(end_row, 0).to_offset(&snapshot, Bias::Right),
|
||||
@@ -5158,6 +5152,7 @@ impl Element for EditorElement {
|
||||
line_height,
|
||||
&gutter_hitbox,
|
||||
start_row..end_row,
|
||||
start_anchor..end_anchor,
|
||||
&snapshot,
|
||||
cx,
|
||||
);
|
||||
@@ -5486,15 +5481,6 @@ impl Element for EditorElement {
|
||||
} else {
|
||||
Vec::new()
|
||||
};
|
||||
let close_indicators = self.layout_hunk_diff_close_indicators(
|
||||
line_height,
|
||||
scroll_pixel_position,
|
||||
&gutter_dimensions,
|
||||
&gutter_hitbox,
|
||||
&rows_with_hunk_bounds,
|
||||
expanded_add_hunks_by_rows,
|
||||
cx,
|
||||
);
|
||||
|
||||
self.layout_signature_help(
|
||||
&hitbox,
|
||||
@@ -5607,7 +5593,6 @@ impl Element for EditorElement {
|
||||
selections,
|
||||
mouse_context_menu,
|
||||
test_indicators,
|
||||
close_indicators,
|
||||
code_actions_indicator,
|
||||
gutter_fold_toggles,
|
||||
crease_trailers,
|
||||
@@ -5749,7 +5734,6 @@ pub struct EditorLayout {
|
||||
selections: Vec<(PlayerColor, Vec<SelectionLayout>)>,
|
||||
code_actions_indicator: Option<AnyElement>,
|
||||
test_indicators: Vec<AnyElement>,
|
||||
close_indicators: Vec<AnyElement>,
|
||||
gutter_fold_toggles: Vec<Option<AnyElement>>,
|
||||
crease_trailers: Vec<Option<CreaseTrailerLayout>>,
|
||||
mouse_context_menu: Option<AnyElement>,
|
||||
|
||||
@@ -1,309 +1 @@
|
||||
pub mod blame;
|
||||
|
||||
use std::ops::Range;
|
||||
|
||||
use git::diff::DiffHunkStatus;
|
||||
use language::Point;
|
||||
use multi_buffer::{Anchor, MultiBufferDiffHunk};
|
||||
|
||||
use crate::{
|
||||
display_map::{DisplaySnapshot, ToDisplayPoint},
|
||||
hunk_status, AnchorRangeExt, DisplayRow,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum DisplayDiffHunk {
|
||||
Folded {
|
||||
display_row: DisplayRow,
|
||||
},
|
||||
|
||||
Unfolded {
|
||||
diff_base_byte_range: Range<usize>,
|
||||
display_row_range: Range<DisplayRow>,
|
||||
multi_buffer_range: Range<Anchor>,
|
||||
status: DiffHunkStatus,
|
||||
},
|
||||
}
|
||||
|
||||
impl DisplayDiffHunk {
|
||||
pub fn start_display_row(&self) -> DisplayRow {
|
||||
match self {
|
||||
&DisplayDiffHunk::Folded { display_row } => display_row,
|
||||
DisplayDiffHunk::Unfolded {
|
||||
display_row_range, ..
|
||||
} => display_row_range.start,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn contains_display_row(&self, display_row: DisplayRow) -> bool {
|
||||
let range = match self {
|
||||
&DisplayDiffHunk::Folded { display_row } => display_row..=display_row,
|
||||
|
||||
DisplayDiffHunk::Unfolded {
|
||||
display_row_range, ..
|
||||
} => display_row_range.start..=display_row_range.end,
|
||||
};
|
||||
|
||||
range.contains(&display_row)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn diff_hunk_to_display(
|
||||
hunk: &MultiBufferDiffHunk,
|
||||
snapshot: &DisplaySnapshot,
|
||||
) -> DisplayDiffHunk {
|
||||
let hunk_start_point = Point::new(hunk.row_range.start.0, 0);
|
||||
let hunk_start_point_sub = Point::new(hunk.row_range.start.0.saturating_sub(1), 0);
|
||||
let hunk_end_point_sub = Point::new(
|
||||
hunk.row_range
|
||||
.end
|
||||
.0
|
||||
.saturating_sub(1)
|
||||
.max(hunk.row_range.start.0),
|
||||
0,
|
||||
);
|
||||
|
||||
let status = hunk_status(hunk);
|
||||
let is_removal = status == DiffHunkStatus::Removed;
|
||||
|
||||
let folds_start = Point::new(hunk.row_range.start.0.saturating_sub(2), 0);
|
||||
let folds_end = Point::new(hunk.row_range.end.0 + 2, 0);
|
||||
let folds_range = folds_start..folds_end;
|
||||
|
||||
let containing_fold = snapshot.folds_in_range(folds_range).find(|fold| {
|
||||
let fold_point_range = fold.range.to_point(&snapshot.buffer_snapshot);
|
||||
let fold_point_range = fold_point_range.start..=fold_point_range.end;
|
||||
|
||||
let folded_start = fold_point_range.contains(&hunk_start_point);
|
||||
let folded_end = fold_point_range.contains(&hunk_end_point_sub);
|
||||
let folded_start_sub = fold_point_range.contains(&hunk_start_point_sub);
|
||||
|
||||
(folded_start && folded_end) || (is_removal && folded_start_sub)
|
||||
});
|
||||
|
||||
if let Some(fold) = containing_fold {
|
||||
let row = fold.range.start.to_display_point(snapshot).row();
|
||||
DisplayDiffHunk::Folded { display_row: row }
|
||||
} else {
|
||||
let start = hunk_start_point.to_display_point(snapshot).row();
|
||||
|
||||
let hunk_end_row = hunk.row_range.end.max(hunk.row_range.start);
|
||||
let hunk_end_point = Point::new(hunk_end_row.0, 0);
|
||||
|
||||
let multi_buffer_start = snapshot.buffer_snapshot.anchor_after(hunk_start_point);
|
||||
let multi_buffer_end = snapshot.buffer_snapshot.anchor_before(hunk_end_point);
|
||||
let end = hunk_end_point.to_display_point(snapshot).row();
|
||||
|
||||
DisplayDiffHunk::Unfolded {
|
||||
display_row_range: start..end,
|
||||
multi_buffer_range: multi_buffer_start..multi_buffer_end,
|
||||
status,
|
||||
diff_base_byte_range: hunk.diff_base_byte_range.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use crate::Point;
|
||||
use crate::{editor_tests::init_test, hunk_status};
|
||||
use gpui::{Context, TestAppContext};
|
||||
use language::Capability::ReadWrite;
|
||||
use multi_buffer::{ExcerptRange, MultiBuffer, MultiBufferRow};
|
||||
use project::{FakeFs, Project};
|
||||
use unindent::Unindent;
|
||||
#[gpui::test]
|
||||
async fn test_diff_hunks_in_range(cx: &mut TestAppContext) {
|
||||
use git::diff::DiffHunkStatus;
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let fs = FakeFs::new(cx.background_executor.clone());
|
||||
let project = Project::test(fs, [], cx).await;
|
||||
|
||||
// buffer has two modified hunks with two rows each
|
||||
let buffer_1 = project.update(cx, |project, cx| {
|
||||
project.create_local_buffer(
|
||||
"
|
||||
1.zero
|
||||
1.ONE
|
||||
1.TWO
|
||||
1.three
|
||||
1.FOUR
|
||||
1.FIVE
|
||||
1.six
|
||||
"
|
||||
.unindent()
|
||||
.as_str(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
buffer_1.update(cx, |buffer, cx| {
|
||||
buffer.set_diff_base(
|
||||
Some(
|
||||
"
|
||||
1.zero
|
||||
1.one
|
||||
1.two
|
||||
1.three
|
||||
1.four
|
||||
1.five
|
||||
1.six
|
||||
"
|
||||
.unindent(),
|
||||
),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
// buffer has a deletion hunk and an insertion hunk
|
||||
let buffer_2 = project.update(cx, |project, cx| {
|
||||
project.create_local_buffer(
|
||||
"
|
||||
2.zero
|
||||
2.one
|
||||
2.two
|
||||
2.three
|
||||
2.four
|
||||
2.five
|
||||
2.six
|
||||
"
|
||||
.unindent()
|
||||
.as_str(),
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
buffer_2.update(cx, |buffer, cx| {
|
||||
buffer.set_diff_base(
|
||||
Some(
|
||||
"
|
||||
2.zero
|
||||
2.one
|
||||
2.one-and-a-half
|
||||
2.two
|
||||
2.three
|
||||
2.four
|
||||
2.six
|
||||
"
|
||||
.unindent(),
|
||||
),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
cx.background_executor.run_until_parked();
|
||||
|
||||
let multibuffer = cx.new_model(|cx| {
|
||||
let mut multibuffer = MultiBuffer::new(ReadWrite);
|
||||
multibuffer.push_excerpts(
|
||||
buffer_1.clone(),
|
||||
[
|
||||
// excerpt ends in the middle of a modified hunk
|
||||
ExcerptRange {
|
||||
context: Point::new(0, 0)..Point::new(1, 5),
|
||||
primary: Default::default(),
|
||||
},
|
||||
// excerpt begins in the middle of a modified hunk
|
||||
ExcerptRange {
|
||||
context: Point::new(5, 0)..Point::new(6, 5),
|
||||
primary: Default::default(),
|
||||
},
|
||||
],
|
||||
cx,
|
||||
);
|
||||
multibuffer.push_excerpts(
|
||||
buffer_2.clone(),
|
||||
[
|
||||
// excerpt ends at a deletion
|
||||
ExcerptRange {
|
||||
context: Point::new(0, 0)..Point::new(1, 5),
|
||||
primary: Default::default(),
|
||||
},
|
||||
// excerpt starts at a deletion
|
||||
ExcerptRange {
|
||||
context: Point::new(2, 0)..Point::new(2, 5),
|
||||
primary: Default::default(),
|
||||
},
|
||||
// excerpt fully contains a deletion hunk
|
||||
ExcerptRange {
|
||||
context: Point::new(1, 0)..Point::new(2, 5),
|
||||
primary: Default::default(),
|
||||
},
|
||||
// excerpt fully contains an insertion hunk
|
||||
ExcerptRange {
|
||||
context: Point::new(4, 0)..Point::new(6, 5),
|
||||
primary: Default::default(),
|
||||
},
|
||||
],
|
||||
cx,
|
||||
);
|
||||
multibuffer
|
||||
});
|
||||
|
||||
let snapshot = multibuffer.read_with(cx, |b, cx| b.snapshot(cx));
|
||||
|
||||
assert_eq!(
|
||||
snapshot.text(),
|
||||
"
|
||||
1.zero
|
||||
1.ONE
|
||||
1.FIVE
|
||||
1.six
|
||||
2.zero
|
||||
2.one
|
||||
2.two
|
||||
2.one
|
||||
2.two
|
||||
2.four
|
||||
2.five
|
||||
2.six"
|
||||
.unindent()
|
||||
);
|
||||
|
||||
let expected = [
|
||||
(
|
||||
DiffHunkStatus::Modified,
|
||||
MultiBufferRow(1)..MultiBufferRow(2),
|
||||
),
|
||||
(
|
||||
DiffHunkStatus::Modified,
|
||||
MultiBufferRow(2)..MultiBufferRow(3),
|
||||
),
|
||||
//TODO: Define better when and where removed hunks show up at range extremities
|
||||
(
|
||||
DiffHunkStatus::Removed,
|
||||
MultiBufferRow(6)..MultiBufferRow(6),
|
||||
),
|
||||
(
|
||||
DiffHunkStatus::Removed,
|
||||
MultiBufferRow(8)..MultiBufferRow(8),
|
||||
),
|
||||
(
|
||||
DiffHunkStatus::Added,
|
||||
MultiBufferRow(10)..MultiBufferRow(11),
|
||||
),
|
||||
];
|
||||
|
||||
assert_eq!(
|
||||
snapshot
|
||||
.git_diff_hunks_in_range(MultiBufferRow(0)..MultiBufferRow(12))
|
||||
.map(|hunk| (hunk_status(&hunk), hunk.row_range))
|
||||
.collect::<Vec<_>>(),
|
||||
&expected,
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
snapshot
|
||||
.git_diff_hunks_in_range_rev(MultiBufferRow(0)..MultiBufferRow(12))
|
||||
.map(|hunk| (hunk_status(&hunk), hunk.row_range))
|
||||
.collect::<Vec<_>>(),
|
||||
expected
|
||||
.iter()
|
||||
.rev()
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
.as_slice(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -20,8 +20,8 @@ use language::{
|
||||
};
|
||||
use multi_buffer::AnchorRangeExt;
|
||||
use project::{
|
||||
project_settings::ProjectSettings, search::SearchQuery, FormatTrigger, Item as _, Project,
|
||||
ProjectPath,
|
||||
lsp_store::FormatTrigger, project_settings::ProjectSettings, search::SearchQuery, Item as _,
|
||||
Project, ProjectPath,
|
||||
};
|
||||
use rpc::proto::{self, update_view, PeerId};
|
||||
use settings::Settings;
|
||||
|
||||
@@ -40,13 +40,13 @@ impl ScrollAnchor {
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
offset: gpui::Point::default(),
|
||||
anchor: Anchor::Start,
|
||||
anchor: Anchor::min(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn scroll_position(&self, snapshot: &DisplaySnapshot) -> gpui::Point<f32> {
|
||||
let mut scroll_position = self.offset;
|
||||
if self.anchor == Anchor::Start {
|
||||
if self.anchor == Anchor::min() {
|
||||
scroll_position.y = 0.;
|
||||
} else {
|
||||
let scroll_top = self.anchor.to_display_point(snapshot).row().as_f32();
|
||||
@@ -194,7 +194,7 @@ impl ScrollManager {
|
||||
let (new_anchor, top_row) = if scroll_position.y <= 0. {
|
||||
(
|
||||
ScrollAnchor {
|
||||
anchor: Anchor::Start,
|
||||
anchor: Anchor::min(),
|
||||
offset: scroll_position.max(&gpui::Point::default()),
|
||||
},
|
||||
0,
|
||||
|
||||
@@ -48,8 +48,8 @@ impl SelectionsCollection {
|
||||
pending: Some(PendingSelection {
|
||||
selection: Selection {
|
||||
id: 0,
|
||||
start: Anchor::Start,
|
||||
end: Anchor::Start,
|
||||
start: Anchor::min(),
|
||||
end: Anchor::min(),
|
||||
reversed: false,
|
||||
goal: SelectionGoal::None,
|
||||
},
|
||||
@@ -109,7 +109,7 @@ impl SelectionsCollection {
|
||||
|
||||
pub fn all<'a, D>(&self, cx: &AppContext) -> Vec<Selection<D>>
|
||||
where
|
||||
D: 'a + TextDimension + Ord + Sub<D, Output = D> + std::fmt::Debug,
|
||||
D: 'a + TextDimension + Ord + Sub<D, Output = D>,
|
||||
{
|
||||
let disjoint_anchors = &self.disjoint;
|
||||
let mut disjoint =
|
||||
@@ -850,7 +850,7 @@ pub(crate) fn resolve_multiple<'a, D, I>(
|
||||
snapshot: &MultiBufferSnapshot,
|
||||
) -> impl 'a + Iterator<Item = Selection<D>>
|
||||
where
|
||||
D: TextDimension + Ord + Sub<D, Output = D> + std::fmt::Debug,
|
||||
D: TextDimension + Ord + Sub<D, Output = D>,
|
||||
I: 'a + IntoIterator<Item = &'a Selection<Anchor>>,
|
||||
{
|
||||
let (to_summarize, selections) = selections.into_iter().tee();
|
||||
|
||||
@@ -9,7 +9,7 @@ use git::GitHostingProviderRegistry;
|
||||
use gpui::{AsyncAppContext, BackgroundExecutor, Context, Model};
|
||||
use http_client::{HttpClient, Method};
|
||||
use language::LanguageRegistry;
|
||||
use node_runtime::FakeNodeRuntime;
|
||||
use node_runtime::NodeRuntime;
|
||||
use open_ai::OpenAiEmbeddingModel;
|
||||
use project::Project;
|
||||
use semantic_index::{
|
||||
@@ -292,7 +292,7 @@ async fn run_evaluation(
|
||||
let user_store = cx
|
||||
.new_model(|cx| UserStore::new(client.clone(), cx))
|
||||
.unwrap();
|
||||
let node_runtime = Arc::new(FakeNodeRuntime {});
|
||||
let node_runtime = NodeRuntime::unavailable();
|
||||
|
||||
let evaluations = fs::read(&evaluations_path).expect("failed to read evaluations.json");
|
||||
let evaluations: Vec<EvaluationProject> = serde_json::from_slice(&evaluations).unwrap();
|
||||
|
||||
@@ -10,16 +10,11 @@ use gpui::AsyncAppContext;
|
||||
use language::{
|
||||
CodeLabel, HighlightId, Language, LanguageServerName, LspAdapter, LspAdapterDelegate,
|
||||
};
|
||||
use lsp::{CodeActionKind, LanguageServerBinary};
|
||||
use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions};
|
||||
use serde::Serialize;
|
||||
use serde_json::Value;
|
||||
use std::ops::Range;
|
||||
use std::{
|
||||
any::Any,
|
||||
path::{Path, PathBuf},
|
||||
pin::Pin,
|
||||
sync::Arc,
|
||||
};
|
||||
use std::{any::Any, path::PathBuf, pin::Pin, sync::Arc};
|
||||
use util::{maybe, ResultExt};
|
||||
use wasmtime_wasi::WasiView as _;
|
||||
|
||||
@@ -38,8 +33,8 @@ impl LspAdapter for ExtensionLspAdapter {
|
||||
|
||||
fn get_language_server_command<'a>(
|
||||
self: Arc<Self>,
|
||||
_: Option<Arc<Path>>,
|
||||
delegate: Arc<dyn LspAdapterDelegate>,
|
||||
_: LanguageServerBinaryOptions,
|
||||
_: futures::lock::MutexGuard<'a, Option<LanguageServerBinary>>,
|
||||
_: &'a mut AsyncAppContext,
|
||||
) -> Pin<Box<dyn 'a + Future<Output = Result<LanguageServerBinary>>>> {
|
||||
@@ -124,10 +119,6 @@ impl LspAdapter for ExtensionLspAdapter {
|
||||
unreachable!("get_language_server_command is overridden")
|
||||
}
|
||||
|
||||
async fn installation_test_binary(&self, _: PathBuf) -> Option<LanguageServerBinary> {
|
||||
None
|
||||
}
|
||||
|
||||
fn code_action_kinds(&self) -> Option<Vec<CodeActionKind>> {
|
||||
let code_action_kinds = self
|
||||
.extension
|
||||
|
||||
@@ -177,7 +177,7 @@ actions!(zed, [ReloadExtensions]);
|
||||
pub fn init(
|
||||
fs: Arc<dyn Fs>,
|
||||
client: Arc<Client>,
|
||||
node_runtime: Arc<dyn NodeRuntime>,
|
||||
node_runtime: NodeRuntime,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
theme_registry: Arc<ThemeRegistry>,
|
||||
cx: &mut AppContext,
|
||||
@@ -228,7 +228,7 @@ impl ExtensionStore {
|
||||
http_client: Arc<HttpClientWithUrl>,
|
||||
builder_client: Arc<dyn HttpClient>,
|
||||
telemetry: Option<Arc<Telemetry>>,
|
||||
node_runtime: Arc<dyn NodeRuntime>,
|
||||
node_runtime: NodeRuntime,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
theme_registry: Arc<ThemeRegistry>,
|
||||
slash_command_registry: Arc<SlashCommandRegistry>,
|
||||
|
||||
@@ -15,7 +15,7 @@ use http_client::{FakeHttpClient, Response};
|
||||
use indexed_docs::IndexedDocsRegistry;
|
||||
use isahc_http_client::IsahcHttpClient;
|
||||
use language::{LanguageMatcher, LanguageRegistry, LanguageServerBinaryStatus, LanguageServerName};
|
||||
use node_runtime::FakeNodeRuntime;
|
||||
use node_runtime::NodeRuntime;
|
||||
use parking_lot::Mutex;
|
||||
use project::{Project, DEFAULT_COMPLETION_CONTEXT};
|
||||
use release_channel::AppVersion;
|
||||
@@ -264,7 +264,7 @@ async fn test_extension_store(cx: &mut TestAppContext) {
|
||||
let slash_command_registry = SlashCommandRegistry::new();
|
||||
let indexed_docs_registry = Arc::new(IndexedDocsRegistry::new(cx.executor()));
|
||||
let snippet_registry = Arc::new(SnippetRegistry::new());
|
||||
let node_runtime = FakeNodeRuntime::new();
|
||||
let node_runtime = NodeRuntime::unavailable();
|
||||
|
||||
let store = cx.new_model(|cx| {
|
||||
ExtensionStore::new(
|
||||
@@ -490,7 +490,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
|
||||
let slash_command_registry = SlashCommandRegistry::new();
|
||||
let indexed_docs_registry = Arc::new(IndexedDocsRegistry::new(cx.executor()));
|
||||
let snippet_registry = Arc::new(SnippetRegistry::new());
|
||||
let node_runtime = FakeNodeRuntime::new();
|
||||
let node_runtime = NodeRuntime::unavailable();
|
||||
|
||||
let mut status_updates = language_registry.language_server_binary_statuses();
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ pub(crate) struct WasmHost {
|
||||
engine: Engine,
|
||||
release_channel: ReleaseChannel,
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
node_runtime: Arc<dyn NodeRuntime>,
|
||||
node_runtime: NodeRuntime,
|
||||
pub(crate) language_registry: Arc<LanguageRegistry>,
|
||||
fs: Arc<dyn Fs>,
|
||||
pub(crate) work_dir: PathBuf,
|
||||
@@ -80,7 +80,7 @@ impl WasmHost {
|
||||
pub fn new(
|
||||
fs: Arc<dyn Fs>,
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
node_runtime: Arc<dyn NodeRuntime>,
|
||||
node_runtime: NodeRuntime,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
work_dir: PathBuf,
|
||||
cx: &mut AppContext,
|
||||
|
||||
@@ -18,8 +18,7 @@ use regex::Regex;
|
||||
use serde_derive::Serialize;
|
||||
use ui::{prelude::*, Button, ButtonStyle, IconPosition, Tooltip};
|
||||
use util::ResultExt;
|
||||
use workspace::notifications::NotificationId;
|
||||
use workspace::{DismissDecision, ModalView, Toast, Workspace};
|
||||
use workspace::{DismissDecision, ModalView, Workspace};
|
||||
|
||||
use crate::{system_specs::SystemSpecs, GiveFeedback, OpenZedRepo};
|
||||
|
||||
@@ -120,44 +119,34 @@ impl FeedbackModal {
|
||||
pub fn register(workspace: &mut Workspace, cx: &mut ViewContext<Workspace>) {
|
||||
let _handle = cx.view().downgrade();
|
||||
workspace.register_action(move |workspace, _: &GiveFeedback, cx| {
|
||||
let markdown = workspace
|
||||
.app_state()
|
||||
.languages
|
||||
.language_for_name("Markdown");
|
||||
workspace
|
||||
.with_local_workspace(cx, |workspace, cx| {
|
||||
let markdown = workspace
|
||||
.app_state()
|
||||
.languages
|
||||
.language_for_name("Markdown");
|
||||
|
||||
let project = workspace.project().clone();
|
||||
let is_local_project = project.read(cx).is_local_or_ssh();
|
||||
let project = workspace.project().clone();
|
||||
|
||||
if !is_local_project {
|
||||
struct FeedbackInRemoteProject;
|
||||
let system_specs = SystemSpecs::new(cx);
|
||||
cx.spawn(|workspace, mut cx| async move {
|
||||
let markdown = markdown.await.log_err();
|
||||
let buffer = project.update(&mut cx, |project, cx| {
|
||||
project.create_local_buffer("", markdown, cx)
|
||||
})?;
|
||||
let system_specs = system_specs.await;
|
||||
|
||||
workspace.show_toast(
|
||||
Toast::new(
|
||||
NotificationId::unique::<FeedbackInRemoteProject>(),
|
||||
"You can only submit feedback in your own project.",
|
||||
),
|
||||
cx,
|
||||
);
|
||||
return;
|
||||
}
|
||||
workspace.update(&mut cx, |workspace, cx| {
|
||||
workspace.toggle_modal(cx, move |cx| {
|
||||
FeedbackModal::new(system_specs, project, buffer, cx)
|
||||
});
|
||||
})?;
|
||||
|
||||
let system_specs = SystemSpecs::new(cx);
|
||||
cx.spawn(|workspace, mut cx| async move {
|
||||
let markdown = markdown.await.log_err();
|
||||
let buffer = project.update(&mut cx, |project, cx| {
|
||||
project.create_local_buffer("", markdown, cx)
|
||||
})?;
|
||||
let system_specs = system_specs.await;
|
||||
|
||||
workspace.update(&mut cx, |workspace, cx| {
|
||||
workspace.toggle_modal(cx, move |cx| {
|
||||
FeedbackModal::new(system_specs, project, buffer, cx)
|
||||
});
|
||||
})?;
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
@@ -884,7 +884,7 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
project
|
||||
.worktree_for_id(history_item.project.worktree_id, cx)
|
||||
.is_some()
|
||||
|| (project.is_local_or_ssh() && history_item.absolute.is_some())
|
||||
|| (project.is_local() && history_item.absolute.is_some())
|
||||
}),
|
||||
self.currently_opened_path.as_ref(),
|
||||
None,
|
||||
@@ -1070,7 +1070,9 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
self.labels_for_match(path_match, cx, ix);
|
||||
|
||||
let file_icon = if settings.file_icons {
|
||||
FileIcons::get_icon(Path::new(&file_name), cx).map(Icon::from_path)
|
||||
FileIcons::get_icon(Path::new(&file_name), cx)
|
||||
.map(Icon::from_path)
|
||||
.map(|icon| icon.color(Color::Muted))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
@@ -41,7 +41,7 @@ impl sum_tree::Item for InternalDiffHunk {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Default, Clone)]
|
||||
pub struct DiffHunkSummary {
|
||||
buffer_range: Range<Anchor>,
|
||||
}
|
||||
@@ -49,10 +49,8 @@ pub struct DiffHunkSummary {
|
||||
impl sum_tree::Summary for DiffHunkSummary {
|
||||
type Context = text::BufferSnapshot;
|
||||
|
||||
fn zero(buffer: &Self::Context) -> Self {
|
||||
Self {
|
||||
buffer_range: buffer.min_anchor()..buffer.min_anchor(),
|
||||
}
|
||||
fn zero(_cx: &Self::Context) -> Self {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
|
||||
|
||||
@@ -18,6 +18,9 @@ pub mod repository;
|
||||
pub mod status;
|
||||
|
||||
pub static DOT_GIT: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new(".git"));
|
||||
pub static COOKIES: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new("cookies"));
|
||||
pub static FSMONITOR_DAEMON: LazyLock<&'static OsStr> =
|
||||
LazyLock::new(|| OsStr::new("fsmonitor--daemon"));
|
||||
pub static GITIGNORE: LazyLock<&'static OsStr> = LazyLock::new(|| OsStr::new(".gitignore"));
|
||||
|
||||
#[derive(Clone, Copy, Eq, Hash, PartialEq)]
|
||||
|
||||
@@ -145,6 +145,9 @@ impl GitHostingProvider for Github {
|
||||
.base_url()
|
||||
.join(&format!("{owner}/{repo}/blob/{sha}/{path}"))
|
||||
.unwrap();
|
||||
if path.ends_with(".md") {
|
||||
permalink.set_query(Some("plain=1"));
|
||||
}
|
||||
permalink.set_fragment(
|
||||
selection
|
||||
.map(|selection| self.line_fragment(&selection))
|
||||
|
||||
@@ -65,6 +65,9 @@ impl GitHostingProvider for Gitlab {
|
||||
.base_url()
|
||||
.join(&format!("{owner}/{repo}/-/blob/{sha}/{path}"))
|
||||
.unwrap();
|
||||
if path.ends_with(".md") {
|
||||
permalink.set_query(Some("plain=1"));
|
||||
}
|
||||
permalink.set_fragment(
|
||||
selection
|
||||
.map(|selection| self.line_fragment(&selection))
|
||||
|
||||
@@ -70,9 +70,7 @@ mod tests {
|
||||
|
||||
unsafe {
|
||||
let image: id = msg_send![class!(NSImage), alloc];
|
||||
image.initWithContentsOfFile_(
|
||||
NSString::alloc(nil).init_str("/Users/rtfeldman/Downloads/test.jpeg"),
|
||||
);
|
||||
image.initWithContentsOfFile_(NSString::alloc(nil).init_str("test.jpeg"));
|
||||
let _size = image.size();
|
||||
|
||||
let string = NSString::alloc(nil).init_str("Test String");
|
||||
|
||||
@@ -25,7 +25,7 @@ pub struct DevServer {
|
||||
}
|
||||
|
||||
pub struct AppState {
|
||||
pub node_runtime: Arc<dyn NodeRuntime>,
|
||||
pub node_runtime: NodeRuntime,
|
||||
pub user_store: Model<UserStore>,
|
||||
pub languages: Arc<LanguageRegistry>,
|
||||
pub fs: Arc<dyn Fs>,
|
||||
|
||||
@@ -16,7 +16,7 @@ path = "src/http_client.rs"
|
||||
doctest = true
|
||||
|
||||
[dependencies]
|
||||
http = "0.2"
|
||||
http = "1.1.0"
|
||||
anyhow.workspace = true
|
||||
derive_more.workspace = true
|
||||
futures.workspace = true
|
||||
|
||||
@@ -264,6 +264,35 @@ pub fn read_proxy_from_env() -> Option<Uri> {
|
||||
None
|
||||
}
|
||||
|
||||
pub struct BlockedHttpClient;
|
||||
|
||||
impl HttpClient for BlockedHttpClient {
|
||||
fn send(
|
||||
&self,
|
||||
_req: Request<AsyncBody>,
|
||||
) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
|
||||
Box::pin(async {
|
||||
Err(std::io::Error::new(
|
||||
std::io::ErrorKind::PermissionDenied,
|
||||
"BlockedHttpClient disallowed request",
|
||||
)
|
||||
.into())
|
||||
})
|
||||
}
|
||||
|
||||
fn proxy(&self) -> Option<&Uri> {
|
||||
None
|
||||
}
|
||||
|
||||
fn send_with_redirect_policy(
|
||||
&self,
|
||||
req: Request<AsyncBody>,
|
||||
_: bool,
|
||||
) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>> {
|
||||
self.send(req)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "test-support")]
|
||||
type FakeHttpHandler = Box<
|
||||
dyn Fn(Request<AsyncBody>) -> BoxFuture<'static, Result<Response<AsyncBody>, anyhow::Error>>
|
||||
|
||||
45
crates/http_client/src/hyper_based.rs
Normal file
45
crates/http_client/src/hyper_based.rs
Normal file
@@ -0,0 +1,45 @@
|
||||
use anyhow::Error;
|
||||
use futures::future::BoxFuture;
|
||||
use http::{Response, Uri};
|
||||
use hyper_util::client::legacy::{connect::HttpConnector, Client};
|
||||
|
||||
struct HyperBasedHttpClient {
|
||||
client: Client<HttpConnector, Vec<u8>>,
|
||||
}
|
||||
|
||||
struct Executor {
|
||||
executor: gpui::BackgroundExecutor,
|
||||
}
|
||||
impl impl HyperBasedHttpClient {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
client: Client::builder().build(HttpConnector::new()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl HttpClient for HyperBasedHttpClient {
|
||||
fn proxy(&self) -> Option<&Uri> {
|
||||
None
|
||||
}
|
||||
|
||||
fn send(
|
||||
&self,
|
||||
request: HttpRequest,
|
||||
method: &str,
|
||||
) -> BoxFuture<'static, Result<Response, Error>> {
|
||||
let request = request.into_hyper_request(method);
|
||||
Box::pin(async move {
|
||||
let response = self.client.request(request).await?;
|
||||
Ok(response.into())
|
||||
})
|
||||
}
|
||||
|
||||
fn send_response(&self, response: HttpResponse) -> BoxFuture<'static, Result<(), Error>> {
|
||||
let response = response.into_hyper_response();
|
||||
Box::pin(async move {
|
||||
let _ = self.client.request(response).await?;
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
}
|
||||
38
crates/hyper_client/Cargo.toml
Normal file
38
crates/hyper_client/Cargo.toml
Normal file
@@ -0,0 +1,38 @@
|
||||
[package]
|
||||
name = "hyper_client"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
license = "Apache-2.0"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[features]
|
||||
test-support = []
|
||||
|
||||
[lib]
|
||||
path = "src/hyper_client.rs"
|
||||
doctest = true
|
||||
|
||||
[[example]]
|
||||
name = "client"
|
||||
path = "examples/client.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
derive_more.workspace = true
|
||||
futures.workspace = true
|
||||
log.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
smol.workspace = true
|
||||
url.workspace = true
|
||||
gpui.workspace = true
|
||||
http_client.workspace = true
|
||||
|
||||
ureq = "2.10.1"
|
||||
|
||||
hyper.workspace = true
|
||||
hyper-util = {workspace = true, features = ["client", "http1", "http2", "client-legacy"]}
|
||||
hyper-rustls.workspace = true
|
||||
20
crates/hyper_client/examples/client.rs
Normal file
20
crates/hyper_client/examples/client.rs
Normal file
@@ -0,0 +1,20 @@
|
||||
use futures::AsyncReadExt;
|
||||
use http_client::{AsyncBody, HttpClient};
|
||||
use hyper_client::UreqHttpClient;
|
||||
|
||||
fn main() {
|
||||
gpui::App::headless().run(|cx| {
|
||||
dbg!(std::thread::current().id());
|
||||
cx.spawn(|cx| async move {
|
||||
let resp = UreqHttpClient::new(cx.background_executor().clone())
|
||||
.get("http://zed.dev", AsyncBody::empty(), false)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let mut body = String::new();
|
||||
resp.into_body().read_to_string(&mut body).await.unwrap();
|
||||
dbg!(&body.len());
|
||||
})
|
||||
.detach();
|
||||
})
|
||||
}
|
||||
144
crates/hyper_client/src/hyper_client.rs
Normal file
144
crates/hyper_client/src/hyper_client.rs
Normal file
@@ -0,0 +1,144 @@
|
||||
use std::io::Read;
|
||||
use std::{pin::Pin, task::Poll};
|
||||
|
||||
use anyhow::Error;
|
||||
use futures::channel::mpsc;
|
||||
use futures::future::BoxFuture;
|
||||
use futures::{AsyncRead, StreamExt};
|
||||
use gpui::AppContext;
|
||||
use http_client::{http, AsyncBody, HttpClient, Inner};
|
||||
use hyper::body::{Body, Bytes, Frame, Incoming, SizeHint};
|
||||
use hyper::http::{Response, Uri};
|
||||
use hyper_util::client::legacy::{connect::HttpConnector, Client};
|
||||
use smol::future::FutureExt;
|
||||
use std::future::Future;
|
||||
|
||||
pub struct UreqHttpClient {
|
||||
client: ureq::Agent,
|
||||
background_executor: gpui::BackgroundExecutor,
|
||||
}
|
||||
|
||||
impl UreqHttpClient {
|
||||
pub fn new(background_executor: gpui::BackgroundExecutor) -> Self {
|
||||
Self {
|
||||
client: ureq::agent(),
|
||||
background_executor,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct UreqResponseReader {
|
||||
task: gpui::Task<()>,
|
||||
receiver: mpsc::Receiver<std::io::Result<Vec<u8>>>,
|
||||
buffer: Vec<u8>,
|
||||
}
|
||||
|
||||
impl UreqResponseReader {
|
||||
fn new(background_executor: gpui::BackgroundExecutor, response: ureq::Response) -> Self {
|
||||
let (mut sender, receiver) = mpsc::channel(1);
|
||||
let mut reader = response.into_reader();
|
||||
let task = background_executor.spawn(async move {
|
||||
let mut buffer = vec![0; 8192];
|
||||
loop {
|
||||
let n = match reader.read(&mut buffer) {
|
||||
Ok(0) => break,
|
||||
Ok(n) => n,
|
||||
Err(e) => {
|
||||
let _ = sender.try_send(Err(e));
|
||||
break;
|
||||
}
|
||||
};
|
||||
let _ = sender.try_send(Ok(buffer[..n].to_vec()));
|
||||
}
|
||||
});
|
||||
|
||||
UreqResponseReader {
|
||||
task,
|
||||
receiver,
|
||||
buffer: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AsyncRead for UreqResponseReader {
|
||||
fn poll_read(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut std::task::Context<'_>,
|
||||
buf: &mut [u8],
|
||||
) -> Poll<std::io::Result<usize>> {
|
||||
let now = std::time::Instant::now();
|
||||
if self.buffer.is_empty() {
|
||||
match self.receiver.poll_next_unpin(cx) {
|
||||
Poll::Ready(Some(Ok(data))) => self.buffer.extend(data),
|
||||
Poll::Ready(Some(Err(e))) => return Poll::Ready(Err(e)),
|
||||
Poll::Ready(None) => return Poll::Ready(Ok(0)), // EOF
|
||||
Poll::Pending => {
|
||||
dbg!(now.elapsed());
|
||||
return Poll::Pending;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let n = std::cmp::min(buf.len(), self.buffer.len());
|
||||
dbg!(buf.len(), self.buffer.len(), now.elapsed());
|
||||
dbg!(std::thread::current().id());
|
||||
buf[..n].copy_from_slice(&self.buffer[..n]);
|
||||
self.buffer.drain(..n);
|
||||
|
||||
Poll::Ready(Ok(n))
|
||||
}
|
||||
}
|
||||
|
||||
impl HttpClient for UreqHttpClient {
|
||||
fn proxy(&self) -> Option<&Uri> {
|
||||
None
|
||||
}
|
||||
|
||||
fn send_with_redirect_policy(
|
||||
&self,
|
||||
request: http::Request<AsyncBody>,
|
||||
follow_redirects: bool,
|
||||
) -> BoxFuture<'static, Result<http::Response<AsyncBody>, Error>> {
|
||||
let method = request.method().clone();
|
||||
let url = request.uri().to_string();
|
||||
let headers = request.headers().clone();
|
||||
let mut req = self.client.request(method.as_str(), &url);
|
||||
for (name, value) in headers.iter() {
|
||||
req = req.set(name.as_str(), value.to_str().unwrap());
|
||||
}
|
||||
let executor = self.background_executor.clone();
|
||||
let req = executor.spawn(async move {
|
||||
let resp = req.send(request.into_body());
|
||||
dbg!(std::thread::current().id());
|
||||
resp
|
||||
});
|
||||
|
||||
// Set follow_redirects policy
|
||||
// req = req.redirects(if follow_redirects { 10 } else { 0 });
|
||||
|
||||
async move {
|
||||
// Set headers
|
||||
// Send the request
|
||||
let response = req.await?;
|
||||
dbg!(std::thread::current().id());
|
||||
|
||||
// Convert ureq response to http::Response
|
||||
let mut builder = http::Response::builder()
|
||||
.status(response.status())
|
||||
.version(http::Version::HTTP_11);
|
||||
|
||||
// Set response headers
|
||||
for name in response.headers_names() {
|
||||
if let Some(value) = response.header(&name) {
|
||||
builder = builder.header(name, value);
|
||||
}
|
||||
}
|
||||
|
||||
let body = AsyncBody::from_reader(UreqResponseReader::new(executor, response));
|
||||
let http_response = builder.body(body)?;
|
||||
|
||||
Ok(http_response)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
}
|
||||
@@ -2768,7 +2768,7 @@ fn test_random_collaboration(cx: &mut AppContext, mut rng: StdRng) {
|
||||
for buffer in &buffers {
|
||||
let buffer = buffer.read(cx).snapshot();
|
||||
let actual_remote_selections = buffer
|
||||
.selections_in_range(buffer.min_anchor()..buffer.max_anchor(), false)
|
||||
.selections_in_range(Anchor::MIN..Anchor::MAX, false)
|
||||
.map(|(replica_id, _, _, selections)| (replica_id, selections.collect::<Vec<_>>()))
|
||||
.collect::<Vec<_>>();
|
||||
let expected_remote_selections = active_selections
|
||||
|
||||
@@ -246,17 +246,23 @@ impl DiagnosticEntry<Anchor> {
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for Summary {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
start: Anchor::MIN,
|
||||
end: Anchor::MAX,
|
||||
min_start: Anchor::MAX,
|
||||
max_end: Anchor::MIN,
|
||||
count: 0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sum_tree::Summary for Summary {
|
||||
type Context = text::BufferSnapshot;
|
||||
|
||||
fn zero(buffer: &Self::Context) -> Self {
|
||||
Self {
|
||||
start: buffer.min_anchor(),
|
||||
end: buffer.max_anchor(),
|
||||
min_start: buffer.max_anchor(),
|
||||
max_end: buffer.min_anchor(),
|
||||
count: 0,
|
||||
}
|
||||
fn zero(_cx: &Self::Context) -> Self {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
|
||||
|
||||
@@ -29,7 +29,7 @@ use gpui::{AppContext, AsyncAppContext, Model, SharedString, Task};
|
||||
pub use highlight_map::HighlightMap;
|
||||
use http_client::HttpClient;
|
||||
pub use language_registry::LanguageName;
|
||||
use lsp::{CodeActionKind, LanguageServerBinary};
|
||||
use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions};
|
||||
use parking_lot::Mutex;
|
||||
use regex::Regex;
|
||||
use schemars::{
|
||||
@@ -69,7 +69,7 @@ pub use buffer::*;
|
||||
pub use diagnostic_set::DiagnosticEntry;
|
||||
pub use language_registry::{
|
||||
AvailableLanguage, LanguageNotFound, LanguageQueries, LanguageRegistry,
|
||||
LanguageServerBinaryStatus, PendingLanguageServer, QUERY_FILENAME_PREFIXES,
|
||||
LanguageServerBinaryStatus, QUERY_FILENAME_PREFIXES,
|
||||
};
|
||||
pub use lsp::LanguageServerId;
|
||||
pub use outline::*;
|
||||
@@ -249,28 +249,17 @@ impl CachedLspAdapter {
|
||||
|
||||
pub async fn get_language_server_command(
|
||||
self: Arc<Self>,
|
||||
container_dir: Option<Arc<Path>>,
|
||||
delegate: Arc<dyn LspAdapterDelegate>,
|
||||
binary_options: LanguageServerBinaryOptions,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<LanguageServerBinary> {
|
||||
let cached_binary = self.cached_binary.lock().await;
|
||||
self.adapter
|
||||
.clone()
|
||||
.get_language_server_command(container_dir, delegate, cached_binary, cx)
|
||||
.get_language_server_command(delegate, binary_options, cached_binary, cx)
|
||||
.await
|
||||
}
|
||||
|
||||
pub fn can_be_reinstalled(&self) -> bool {
|
||||
self.adapter.can_be_reinstalled()
|
||||
}
|
||||
|
||||
pub async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
self.adapter.installation_test_binary(container_dir).await
|
||||
}
|
||||
|
||||
pub fn code_action_kinds(&self) -> Option<Vec<CodeActionKind>> {
|
||||
self.adapter.code_action_kinds()
|
||||
}
|
||||
@@ -322,7 +311,12 @@ pub trait LspAdapterDelegate: Send + Sync {
|
||||
fn worktree_id(&self) -> WorktreeId;
|
||||
fn worktree_root_path(&self) -> &Path;
|
||||
fn update_status(&self, language: LanguageServerName, status: LanguageServerBinaryStatus);
|
||||
async fn language_server_download_dir(&self, name: &LanguageServerName) -> Option<Arc<Path>>;
|
||||
|
||||
async fn npm_package_installed_version(
|
||||
&self,
|
||||
package_name: &str,
|
||||
) -> Result<Option<(PathBuf, String)>>;
|
||||
async fn which(&self, command: &OsStr) -> Option<PathBuf>;
|
||||
async fn shell_env(&self) -> HashMap<String, String>;
|
||||
async fn read_text_file(&self, path: PathBuf) -> Result<String>;
|
||||
@@ -335,8 +329,8 @@ pub trait LspAdapter: 'static + Send + Sync {
|
||||
|
||||
fn get_language_server_command<'a>(
|
||||
self: Arc<Self>,
|
||||
container_dir: Option<Arc<Path>>,
|
||||
delegate: Arc<dyn LspAdapterDelegate>,
|
||||
binary_options: LanguageServerBinaryOptions,
|
||||
mut cached_binary: futures::lock::MutexGuard<'a, Option<LanguageServerBinary>>,
|
||||
cx: &'a mut AsyncAppContext,
|
||||
) -> Pin<Box<dyn 'a + Future<Output = Result<LanguageServerBinary>>>> {
|
||||
@@ -352,30 +346,30 @@ pub trait LspAdapter: 'static + Send + Sync {
|
||||
// We only want to cache when we fall back to the global one,
|
||||
// because we don't want to download and overwrite our global one
|
||||
// for each worktree we might have open.
|
||||
if let Some(binary) = self.check_if_user_installed(delegate.as_ref(), cx).await {
|
||||
log::info!(
|
||||
"found user-installed language server for {}. path: {:?}, arguments: {:?}",
|
||||
self.name().0,
|
||||
binary.path,
|
||||
binary.arguments
|
||||
);
|
||||
return Ok(binary);
|
||||
if binary_options.allow_path_lookup {
|
||||
if let Some(binary) = self.check_if_user_installed(delegate.as_ref(), cx).await {
|
||||
log::info!(
|
||||
"found user-installed language server for {}. path: {:?}, arguments: {:?}",
|
||||
self.name().0,
|
||||
binary.path,
|
||||
binary.arguments
|
||||
);
|
||||
return Ok(binary);
|
||||
}
|
||||
}
|
||||
|
||||
if !binary_options.allow_binary_download {
|
||||
return Err(anyhow!("downloading language servers disabled"));
|
||||
}
|
||||
|
||||
if let Some(cached_binary) = cached_binary.as_ref() {
|
||||
return Ok(cached_binary.clone());
|
||||
}
|
||||
|
||||
let Some(container_dir) = container_dir else {
|
||||
let Some(container_dir) = delegate.language_server_download_dir(&self.name()).await else {
|
||||
anyhow::bail!("cannot download language servers for remotes (yet)")
|
||||
};
|
||||
|
||||
if !container_dir.exists() {
|
||||
smol::fs::create_dir_all(&container_dir)
|
||||
.await
|
||||
.context("failed to create container directory")?;
|
||||
}
|
||||
|
||||
let mut binary = try_fetch_server_binary(self.as_ref(), &delegate, container_dir.to_path_buf(), cx).await;
|
||||
|
||||
if let Err(error) = binary.as_ref() {
|
||||
@@ -384,8 +378,9 @@ pub trait LspAdapter: 'static + Send + Sync {
|
||||
.await
|
||||
{
|
||||
log::info!(
|
||||
"failed to fetch newest version of language server {:?}. falling back to using {:?}",
|
||||
"failed to fetch newest version of language server {:?}. error: {:?}, falling back to using {:?}",
|
||||
self.name(),
|
||||
error,
|
||||
prev_downloaded_binary.path
|
||||
);
|
||||
binary = Ok(prev_downloaded_binary);
|
||||
@@ -442,21 +437,6 @@ pub trait LspAdapter: 'static + Send + Sync {
|
||||
delegate: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary>;
|
||||
|
||||
/// Returns `true` if a language server can be reinstalled.
|
||||
///
|
||||
/// If language server initialization fails, a reinstallation will be attempted unless the value returned from this method is `false`.
|
||||
///
|
||||
/// Implementations that rely on software already installed on user's system
|
||||
/// should have [`can_be_reinstalled`](Self::can_be_reinstalled) return `false`.
|
||||
fn can_be_reinstalled(&self) -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary>;
|
||||
|
||||
fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
|
||||
|
||||
/// Post-processes completions provided by the language server.
|
||||
@@ -564,6 +544,7 @@ async fn try_fetch_server_binary<L: LspAdapter + 'static + Send + Sync + ?Sized>
|
||||
let name = adapter.name();
|
||||
log::info!("fetching latest version of language server {:?}", name.0);
|
||||
delegate.update_status(name.clone(), LanguageServerBinaryStatus::CheckingForUpdate);
|
||||
|
||||
let latest_version = adapter
|
||||
.fetch_latest_server_version(delegate.as_ref())
|
||||
.await?;
|
||||
@@ -1709,8 +1690,8 @@ impl LspAdapter for FakeLspAdapter {
|
||||
|
||||
fn get_language_server_command<'a>(
|
||||
self: Arc<Self>,
|
||||
_: Option<Arc<Path>>,
|
||||
_: Arc<dyn LspAdapterDelegate>,
|
||||
_: LanguageServerBinaryOptions,
|
||||
_: futures::lock::MutexGuard<'a, Option<LanguageServerBinary>>,
|
||||
_: &'a mut AsyncAppContext,
|
||||
) -> Pin<Box<dyn 'a + Future<Output = Result<LanguageServerBinary>>>> {
|
||||
@@ -1741,10 +1722,6 @@ impl LspAdapter for FakeLspAdapter {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
async fn installation_test_binary(&self, _: PathBuf) -> Option<LanguageServerBinary> {
|
||||
unreachable!();
|
||||
}
|
||||
|
||||
fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {}
|
||||
|
||||
fn disk_based_diagnostic_sources(&self) -> Vec<String> {
|
||||
|
||||
@@ -4,18 +4,17 @@ use crate::{
|
||||
},
|
||||
task_context::ContextProvider,
|
||||
with_parser, CachedLspAdapter, File, Language, LanguageConfig, LanguageId, LanguageMatcher,
|
||||
LanguageServerName, LspAdapter, LspAdapterDelegate, PLAIN_TEXT,
|
||||
LanguageServerName, LspAdapter, PLAIN_TEXT,
|
||||
};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use collections::{hash_map, HashMap, HashSet};
|
||||
|
||||
use futures::{
|
||||
channel::{mpsc, oneshot},
|
||||
future::Shared,
|
||||
Future,
|
||||
};
|
||||
use globset::GlobSet;
|
||||
use gpui::{AppContext, BackgroundExecutor, Task};
|
||||
use gpui::{AppContext, BackgroundExecutor};
|
||||
use lsp::LanguageServerId;
|
||||
use parking_lot::{Mutex, RwLock};
|
||||
use postage::watch;
|
||||
@@ -118,12 +117,6 @@ pub enum LanguageServerBinaryStatus {
|
||||
Failed { error: String },
|
||||
}
|
||||
|
||||
pub struct PendingLanguageServer {
|
||||
pub server_id: LanguageServerId,
|
||||
pub task: Task<Result<(lsp::LanguageServer, Option<serde_json::Value>)>>,
|
||||
pub container_dir: Option<Arc<Path>>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct AvailableLanguage {
|
||||
id: LanguageId,
|
||||
@@ -882,123 +875,53 @@ impl LanguageRegistry {
|
||||
self.lsp_binary_status_tx.send(server_name, status);
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
pub fn create_pending_language_server(
|
||||
self: &Arc<Self>,
|
||||
stderr_capture: Arc<Mutex<Option<String>>>,
|
||||
_language_name_for_tests: LanguageName,
|
||||
adapter: Arc<CachedLspAdapter>,
|
||||
root_path: Arc<Path>,
|
||||
delegate: Arc<dyn LspAdapterDelegate>,
|
||||
project_environment: Shared<Task<Option<HashMap<String, String>>>>,
|
||||
cx: &mut AppContext,
|
||||
) -> Option<PendingLanguageServer> {
|
||||
let server_id = self.state.write().next_language_server_id();
|
||||
log::info!(
|
||||
"attempting to start language server {:?}, path: {root_path:?}, id: {server_id}",
|
||||
adapter.name.0
|
||||
);
|
||||
pub fn next_language_server_id(&self) -> LanguageServerId {
|
||||
self.state.write().next_language_server_id()
|
||||
}
|
||||
|
||||
let container_dir: Option<Arc<Path>> = self
|
||||
.language_server_download_dir
|
||||
pub fn language_server_download_dir(&self, name: &LanguageServerName) -> Option<Arc<Path>> {
|
||||
self.language_server_download_dir
|
||||
.as_ref()
|
||||
.map(|dir| Arc::from(dir.join(adapter.name.0.as_ref())));
|
||||
let root_path = root_path.clone();
|
||||
let this = Arc::downgrade(self);
|
||||
.map(|dir| Arc::from(dir.join(name.0.as_ref())))
|
||||
}
|
||||
|
||||
let task = cx.spawn({
|
||||
let container_dir = container_dir.clone();
|
||||
move |mut cx| async move {
|
||||
let project_environment = project_environment.await;
|
||||
|
||||
let binary_result = adapter
|
||||
.clone()
|
||||
.get_language_server_command(container_dir, delegate.clone(), &mut cx)
|
||||
.await;
|
||||
|
||||
delegate.update_status(adapter.name.clone(), LanguageServerBinaryStatus::None);
|
||||
|
||||
let mut binary = binary_result?;
|
||||
|
||||
// If we do have a project environment (either by spawning a shell in in the project directory
|
||||
// or by getting it from the CLI) and the language server command itself
|
||||
// doesn't have an environment (which it would have, if it was found in $PATH), then
|
||||
// we use the project environment.
|
||||
if binary.env.is_none() && project_environment.is_some() {
|
||||
log::info!(
|
||||
"using project environment for language server {:?}, id: {server_id}",
|
||||
adapter.name.0
|
||||
);
|
||||
binary.env = project_environment.clone();
|
||||
}
|
||||
|
||||
let options = adapter
|
||||
.adapter
|
||||
.clone()
|
||||
.initialization_options(&delegate)
|
||||
.await?;
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
if true {
|
||||
if let Some(this) = this.upgrade() {
|
||||
if let Some(fake_entry) = this
|
||||
.state
|
||||
.write()
|
||||
.fake_server_entries
|
||||
.get_mut(&adapter.name)
|
||||
{
|
||||
let (server, mut fake_server) = lsp::FakeLanguageServer::new(
|
||||
server_id,
|
||||
binary,
|
||||
adapter.name.0.to_string(),
|
||||
fake_entry.capabilities.clone(),
|
||||
cx.clone(),
|
||||
);
|
||||
fake_entry._server = Some(fake_server.clone());
|
||||
|
||||
if let Some(initializer) = &fake_entry.initializer {
|
||||
initializer(&mut fake_server);
|
||||
}
|
||||
|
||||
let tx = fake_entry.tx.clone();
|
||||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
if fake_server
|
||||
.try_receive_notification::<lsp::notification::Initialized>(
|
||||
)
|
||||
.await
|
||||
.is_some()
|
||||
{
|
||||
tx.unbounded_send(fake_server.clone()).ok();
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
return Ok((server, options));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
drop(this);
|
||||
Ok((
|
||||
lsp::LanguageServer::new(
|
||||
stderr_capture,
|
||||
server_id,
|
||||
binary,
|
||||
&root_path,
|
||||
adapter.code_action_kinds(),
|
||||
cx,
|
||||
)?,
|
||||
options,
|
||||
))
|
||||
}
|
||||
});
|
||||
|
||||
Some(PendingLanguageServer {
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn create_fake_language_server(
|
||||
&self,
|
||||
server_id: LanguageServerId,
|
||||
name: &LanguageServerName,
|
||||
binary: lsp::LanguageServerBinary,
|
||||
cx: gpui::AsyncAppContext,
|
||||
) -> Option<lsp::LanguageServer> {
|
||||
let mut state = self.state.write();
|
||||
let fake_entry = state.fake_server_entries.get_mut(&name)?;
|
||||
let (server, mut fake_server) = lsp::FakeLanguageServer::new(
|
||||
server_id,
|
||||
task,
|
||||
container_dir,
|
||||
})
|
||||
binary,
|
||||
name.0.to_string(),
|
||||
fake_entry.capabilities.clone(),
|
||||
cx.clone(),
|
||||
);
|
||||
fake_entry._server = Some(fake_server.clone());
|
||||
|
||||
if let Some(initializer) = &fake_entry.initializer {
|
||||
initializer(&mut fake_server);
|
||||
}
|
||||
|
||||
let tx = fake_entry.tx.clone();
|
||||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
if fake_server
|
||||
.try_receive_notification::<lsp::notification::Initialized>()
|
||||
.await
|
||||
.is_some()
|
||||
{
|
||||
tx.unbounded_send(fake_server.clone()).ok();
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
Some(server)
|
||||
}
|
||||
|
||||
pub fn language_server_binary_statuses(
|
||||
@@ -1007,29 +930,16 @@ impl LanguageRegistry {
|
||||
self.lsp_binary_status_tx.subscribe()
|
||||
}
|
||||
|
||||
pub fn delete_server_container(
|
||||
&self,
|
||||
adapter: Arc<CachedLspAdapter>,
|
||||
cx: &mut AppContext,
|
||||
) -> Task<()> {
|
||||
pub async fn delete_server_container(&self, name: LanguageServerName) {
|
||||
log::info!("deleting server container");
|
||||
let Some(dir) = self.language_server_download_dir(&name) else {
|
||||
return;
|
||||
};
|
||||
|
||||
let download_dir = self
|
||||
.language_server_download_dir
|
||||
.clone()
|
||||
.expect("language server download directory has not been assigned before deleting server container");
|
||||
|
||||
cx.spawn(|_| async move {
|
||||
let container_dir = download_dir.join(adapter.name.0.as_ref());
|
||||
smol::fs::remove_dir_all(container_dir)
|
||||
.await
|
||||
.context("server container removal")
|
||||
.log_err();
|
||||
})
|
||||
}
|
||||
|
||||
pub fn next_language_server_id(&self) -> LanguageServerId {
|
||||
self.state.write().next_language_server_id()
|
||||
smol::fs::remove_dir_all(dir)
|
||||
.await
|
||||
.context("server container removal")
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ use clock::ReplicaId;
|
||||
use lsp::{DiagnosticSeverity, LanguageServerId};
|
||||
use rpc::proto;
|
||||
use serde_json::Value;
|
||||
use std::{ops::Range, str::FromStr, sync::Arc, u32};
|
||||
use std::{ops::Range, str::FromStr, sync::Arc};
|
||||
use text::*;
|
||||
|
||||
pub use proto::{BufferState, Operation};
|
||||
@@ -221,36 +221,15 @@ pub fn serialize_diagnostics<'a>(
|
||||
|
||||
/// Serializes an [`Anchor`] to be sent over RPC.
|
||||
pub fn serialize_anchor(anchor: &Anchor) -> proto::Anchor {
|
||||
match *anchor {
|
||||
Anchor::Start { buffer_id } => proto::Anchor {
|
||||
replica_id: 0,
|
||||
timestamp: 0,
|
||||
offset: 0,
|
||||
bias: proto::Bias::Left as i32,
|
||||
buffer_id: Some(buffer_id.into()),
|
||||
},
|
||||
Anchor::End { buffer_id } => proto::Anchor {
|
||||
replica_id: u32::MAX,
|
||||
timestamp: u32::MAX,
|
||||
offset: u64::MAX,
|
||||
bias: proto::Bias::Right as i32,
|
||||
buffer_id: Some(buffer_id.into()),
|
||||
},
|
||||
Anchor::Character {
|
||||
buffer_id,
|
||||
insertion_id,
|
||||
offset,
|
||||
bias,
|
||||
} => proto::Anchor {
|
||||
replica_id: insertion_id.replica_id as u32,
|
||||
timestamp: insertion_id.value,
|
||||
offset: offset as u64,
|
||||
bias: match bias {
|
||||
Bias::Left => proto::Bias::Left as i32,
|
||||
Bias::Right => proto::Bias::Right as i32,
|
||||
},
|
||||
buffer_id: Some(buffer_id.into()),
|
||||
proto::Anchor {
|
||||
replica_id: anchor.timestamp.replica_id as u32,
|
||||
timestamp: anchor.timestamp.value,
|
||||
offset: anchor.offset as u64,
|
||||
bias: match anchor.bias {
|
||||
Bias::Left => proto::Bias::Left as i32,
|
||||
Bias::Right => proto::Bias::Right as i32,
|
||||
},
|
||||
buffer_id: anchor.buffer_id.map(Into::into),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -450,33 +429,23 @@ pub fn deserialize_diagnostics(
|
||||
|
||||
/// Deserializes an [`Anchor`] from the RPC representation.
|
||||
pub fn deserialize_anchor(anchor: proto::Anchor) -> Option<Anchor> {
|
||||
let buffer_id = BufferId::new(anchor.buffer_id?).ok()?;
|
||||
if anchor.replica_id == 0
|
||||
&& anchor.timestamp == 0
|
||||
&& anchor.offset == 0
|
||||
&& anchor.bias == proto::Bias::Left as i32
|
||||
{
|
||||
Some(Anchor::Start { buffer_id })
|
||||
} else if anchor.replica_id == u32::MAX
|
||||
&& anchor.timestamp == u32::MAX
|
||||
&& anchor.offset == u64::MAX
|
||||
&& anchor.bias == proto::Bias::Right as i32
|
||||
{
|
||||
Some(Anchor::End { buffer_id })
|
||||
let buffer_id = if let Some(id) = anchor.buffer_id {
|
||||
Some(BufferId::new(id).ok()?)
|
||||
} else {
|
||||
Some(Anchor::Character {
|
||||
insertion_id: clock::Lamport {
|
||||
replica_id: anchor.replica_id as ReplicaId,
|
||||
value: anchor.timestamp,
|
||||
},
|
||||
offset: anchor.offset as usize,
|
||||
bias: match proto::Bias::from_i32(anchor.bias)? {
|
||||
proto::Bias::Left => Bias::Left,
|
||||
proto::Bias::Right => Bias::Right,
|
||||
},
|
||||
buffer_id,
|
||||
})
|
||||
}
|
||||
None
|
||||
};
|
||||
Some(Anchor {
|
||||
timestamp: clock::Lamport {
|
||||
replica_id: anchor.replica_id as ReplicaId,
|
||||
value: anchor.timestamp,
|
||||
},
|
||||
offset: anchor.offset as usize,
|
||||
bias: match proto::Bias::from_i32(anchor.bias)? {
|
||||
proto::Bias::Left => Bias::Left,
|
||||
proto::Bias::Right => Bias::Right,
|
||||
},
|
||||
buffer_id,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns a `[clock::Lamport`] timestamp for the given [`proto::Operation`].
|
||||
|
||||
@@ -303,7 +303,7 @@ impl SyntaxSnapshot {
|
||||
let slice = cursor.slice(
|
||||
&SyntaxLayerPosition {
|
||||
depth: depth + 1,
|
||||
range: text.min_anchor()..text.max_anchor(),
|
||||
range: Anchor::MIN..Anchor::MAX,
|
||||
language: None,
|
||||
},
|
||||
Bias::Left,
|
||||
@@ -459,7 +459,7 @@ impl SyntaxSnapshot {
|
||||
start_point: Point::zero().to_ts_point(),
|
||||
end_point: text.max_point().to_ts_point(),
|
||||
}],
|
||||
range: text.min_anchor()..text.max_anchor(),
|
||||
range: Anchor::MIN..Anchor::MAX,
|
||||
mode: ParseMode::Single,
|
||||
});
|
||||
|
||||
@@ -474,7 +474,7 @@ impl SyntaxSnapshot {
|
||||
} else {
|
||||
SyntaxLayerPosition {
|
||||
depth: max_depth + 1,
|
||||
range: text.max_anchor()..text.max_anchor(),
|
||||
range: Anchor::MAX..Anchor::MAX,
|
||||
language: None,
|
||||
}
|
||||
};
|
||||
@@ -485,7 +485,7 @@ impl SyntaxSnapshot {
|
||||
|
||||
let bounded_position = SyntaxLayerPositionBeforeChange {
|
||||
position: position.clone(),
|
||||
change: changed_regions.start_position(text),
|
||||
change: changed_regions.start_position(),
|
||||
};
|
||||
if bounded_position.cmp(cursor.start(), text).is_gt() {
|
||||
let slice = cursor.slice(&bounded_position, Bias::Left, text);
|
||||
@@ -794,7 +794,7 @@ impl SyntaxSnapshot {
|
||||
range: Range<usize>,
|
||||
buffer: &'a BufferSnapshot,
|
||||
query: fn(&Grammar) -> Option<&Query>,
|
||||
) -> SyntaxMapCaptures {
|
||||
) -> SyntaxMapCaptures<'a> {
|
||||
SyntaxMapCaptures::new(
|
||||
range.clone(),
|
||||
buffer.as_rope(),
|
||||
@@ -808,7 +808,7 @@ impl SyntaxSnapshot {
|
||||
range: Range<usize>,
|
||||
buffer: &'a BufferSnapshot,
|
||||
query: fn(&Grammar) -> Option<&Query>,
|
||||
) -> SyntaxMapMatches {
|
||||
) -> SyntaxMapMatches<'a> {
|
||||
SyntaxMapMatches::new(
|
||||
range.clone(),
|
||||
buffer.as_rope(),
|
||||
@@ -828,7 +828,7 @@ impl SyntaxSnapshot {
|
||||
range: Range<T>,
|
||||
buffer: &'a BufferSnapshot,
|
||||
include_hidden: bool,
|
||||
) -> impl 'a + Iterator<Item = SyntaxLayer> {
|
||||
) -> impl 'a + Iterator<Item = SyntaxLayer<'a>> {
|
||||
let start_offset = range.start.to_offset(buffer);
|
||||
let end_offset = range.end.to_offset(buffer);
|
||||
let start = buffer.anchor_before(start_offset);
|
||||
@@ -1608,11 +1608,11 @@ impl ChangedRegion {
|
||||
}
|
||||
|
||||
impl ChangeRegionSet {
|
||||
fn start_position(&self, text: &BufferSnapshot) -> ChangeStartPosition {
|
||||
fn start_position(&self) -> ChangeStartPosition {
|
||||
self.0.first().map_or(
|
||||
ChangeStartPosition {
|
||||
depth: usize::MAX,
|
||||
position: text.max_anchor(),
|
||||
position: Anchor::MAX,
|
||||
},
|
||||
|region| ChangeStartPosition {
|
||||
depth: region.depth,
|
||||
@@ -1661,26 +1661,32 @@ impl ChangeRegionSet {
|
||||
}
|
||||
}
|
||||
|
||||
impl sum_tree::Summary for SyntaxLayerSummary {
|
||||
type Context = BufferSnapshot;
|
||||
|
||||
fn zero(buffer: &BufferSnapshot) -> Self {
|
||||
impl Default for SyntaxLayerSummary {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
max_depth: 0,
|
||||
min_depth: 0,
|
||||
range: buffer.max_anchor()..buffer.min_anchor(),
|
||||
last_layer_range: buffer.min_anchor()..buffer.max_anchor(),
|
||||
range: Anchor::MAX..Anchor::MIN,
|
||||
last_layer_range: Anchor::MIN..Anchor::MAX,
|
||||
last_layer_language: None,
|
||||
contains_unknown_injections: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl sum_tree::Summary for SyntaxLayerSummary {
|
||||
type Context = BufferSnapshot;
|
||||
|
||||
fn zero(_cx: &BufferSnapshot) -> Self {
|
||||
Default::default()
|
||||
}
|
||||
|
||||
fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
|
||||
if other.max_depth > self.max_depth {
|
||||
self.max_depth = other.max_depth;
|
||||
self.range = other.range.clone();
|
||||
} else {
|
||||
if self.range == (buffer.max_anchor()..buffer.max_anchor()) {
|
||||
if self.range == (Anchor::MAX..Anchor::MAX) {
|
||||
self.range.start = other.range.start;
|
||||
}
|
||||
if other.range.end.cmp(&self.range.end, buffer).is_gt() {
|
||||
|
||||
@@ -184,7 +184,7 @@ pub fn init(cx: &mut AppContext) {
|
||||
|
||||
cx.observe_new_views(move |workspace: &mut Workspace, cx| {
|
||||
let project = workspace.project();
|
||||
if project.read(cx).is_local_or_ssh() {
|
||||
if project.read(cx).is_local() {
|
||||
log_store.update(cx, |store, cx| {
|
||||
store.add_project(project, cx);
|
||||
});
|
||||
@@ -193,7 +193,7 @@ pub fn init(cx: &mut AppContext) {
|
||||
let log_store = log_store.clone();
|
||||
workspace.register_action(move |workspace, _: &OpenLanguageServerLogs, cx| {
|
||||
let project = workspace.project().read(cx);
|
||||
if project.is_local_or_ssh() {
|
||||
if project.is_local() {
|
||||
workspace.add_item_to_active_pane(
|
||||
Box::new(cx.new_view(|cx| {
|
||||
LspLogView::new(workspace.project().clone(), log_store.clone(), cx)
|
||||
|
||||
@@ -5,7 +5,6 @@ use gpui::AsyncAppContext;
|
||||
use http_client::github::{latest_github_release, GitHubLspBinaryVersion};
|
||||
pub use language::*;
|
||||
use lsp::LanguageServerBinary;
|
||||
use project::{lsp_store::language_server_settings, project_settings::BinarySettings};
|
||||
use smol::fs::{self, File};
|
||||
use std::{any::Any, env::consts, path::PathBuf, sync::Arc};
|
||||
use util::{fs::remove_matching, maybe, ResultExt};
|
||||
@@ -25,41 +24,14 @@ impl super::LspAdapter for CLspAdapter {
|
||||
async fn check_if_user_installed(
|
||||
&self,
|
||||
delegate: &dyn LspAdapterDelegate,
|
||||
cx: &AsyncAppContext,
|
||||
_: &AsyncAppContext,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
let configured_binary = cx.update(|cx| {
|
||||
language_server_settings(delegate, &Self::SERVER_NAME, cx)
|
||||
.and_then(|s| s.binary.clone())
|
||||
});
|
||||
|
||||
match configured_binary {
|
||||
Ok(Some(BinarySettings {
|
||||
path: Some(path),
|
||||
arguments,
|
||||
..
|
||||
})) => Some(LanguageServerBinary {
|
||||
path: path.into(),
|
||||
arguments: arguments
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
.map(|arg| arg.into())
|
||||
.collect(),
|
||||
env: None,
|
||||
}),
|
||||
Ok(Some(BinarySettings {
|
||||
path_lookup: Some(false),
|
||||
..
|
||||
})) => None,
|
||||
_ => {
|
||||
let env = delegate.shell_env().await;
|
||||
let path = delegate.which(Self::SERVER_NAME.as_ref()).await?;
|
||||
Some(LanguageServerBinary {
|
||||
path,
|
||||
arguments: vec![],
|
||||
env: Some(env),
|
||||
})
|
||||
}
|
||||
}
|
||||
let path = delegate.which(Self::SERVER_NAME.as_ref()).await?;
|
||||
Some(LanguageServerBinary {
|
||||
path,
|
||||
arguments: vec![],
|
||||
env: None,
|
||||
})
|
||||
}
|
||||
|
||||
async fn fetch_latest_server_version(
|
||||
@@ -141,18 +113,6 @@ impl super::LspAdapter for CLspAdapter {
|
||||
get_cached_server_binary(container_dir).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir)
|
||||
.await
|
||||
.map(|mut binary| {
|
||||
binary.arguments = vec!["--help".into()];
|
||||
binary
|
||||
})
|
||||
}
|
||||
|
||||
async fn label_for_completion(
|
||||
&self,
|
||||
completion: &lsp::CompletionItem,
|
||||
|
||||
@@ -22,11 +22,11 @@ fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||
}
|
||||
|
||||
pub struct CssLspAdapter {
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
node: NodeRuntime,
|
||||
}
|
||||
|
||||
impl CssLspAdapter {
|
||||
pub fn new(node: Arc<dyn NodeRuntime>) -> Self {
|
||||
pub fn new(node: NodeRuntime) -> Self {
|
||||
CssLspAdapter { node }
|
||||
}
|
||||
}
|
||||
@@ -81,14 +81,7 @@ impl LspAdapter for CssLspAdapter {
|
||||
container_dir: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
}
|
||||
|
||||
async fn initialization_options(
|
||||
@@ -103,7 +96,7 @@ impl LspAdapter for CssLspAdapter {
|
||||
|
||||
async fn get_cached_server_binary(
|
||||
container_dir: PathBuf,
|
||||
node: &dyn NodeRuntime,
|
||||
node: &NodeRuntime,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
maybe!(async {
|
||||
let mut last_version_dir = None;
|
||||
|
||||
@@ -6,7 +6,6 @@ use gpui::{AppContext, AsyncAppContext, Task};
|
||||
use http_client::github::latest_github_release;
|
||||
pub use language::*;
|
||||
use lsp::LanguageServerBinary;
|
||||
use project::{lsp_store::language_server_settings, project_settings::BinarySettings};
|
||||
use regex::Regex;
|
||||
use serde_json::json;
|
||||
use smol::{fs, process};
|
||||
@@ -68,41 +67,14 @@ impl super::LspAdapter for GoLspAdapter {
|
||||
async fn check_if_user_installed(
|
||||
&self,
|
||||
delegate: &dyn LspAdapterDelegate,
|
||||
cx: &AsyncAppContext,
|
||||
_: &AsyncAppContext,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
let configured_binary = cx.update(|cx| {
|
||||
language_server_settings(delegate, &Self::SERVER_NAME, cx)
|
||||
.and_then(|s| s.binary.clone())
|
||||
});
|
||||
|
||||
match configured_binary {
|
||||
Ok(Some(BinarySettings {
|
||||
path: Some(path),
|
||||
arguments,
|
||||
..
|
||||
})) => Some(LanguageServerBinary {
|
||||
path: path.into(),
|
||||
arguments: arguments
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
.map(|arg| arg.into())
|
||||
.collect(),
|
||||
env: None,
|
||||
}),
|
||||
Ok(Some(BinarySettings {
|
||||
path_lookup: Some(false),
|
||||
..
|
||||
})) => None,
|
||||
_ => {
|
||||
let env = delegate.shell_env().await;
|
||||
let path = delegate.which(Self::SERVER_NAME.as_ref()).await?;
|
||||
Some(LanguageServerBinary {
|
||||
path,
|
||||
arguments: server_binary_arguments(),
|
||||
env: Some(env),
|
||||
})
|
||||
}
|
||||
}
|
||||
let path = delegate.which(Self::SERVER_NAME.as_ref()).await?;
|
||||
Some(LanguageServerBinary {
|
||||
path,
|
||||
arguments: server_binary_arguments(),
|
||||
env: None,
|
||||
})
|
||||
}
|
||||
|
||||
fn will_fetch_server(
|
||||
@@ -214,18 +186,6 @@ impl super::LspAdapter for GoLspAdapter {
|
||||
get_cached_server_binary(container_dir).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir)
|
||||
.await
|
||||
.map(|mut binary| {
|
||||
binary.arguments = vec!["--help".into()];
|
||||
binary
|
||||
})
|
||||
}
|
||||
|
||||
async fn initialization_options(
|
||||
self: Arc<Self>,
|
||||
_: &Arc<dyn LspAdapterDelegate>,
|
||||
|
||||
@@ -59,13 +59,13 @@ fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||
}
|
||||
|
||||
pub struct JsonLspAdapter {
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
node: NodeRuntime,
|
||||
languages: Arc<LanguageRegistry>,
|
||||
workspace_config: OnceLock<Value>,
|
||||
}
|
||||
|
||||
impl JsonLspAdapter {
|
||||
pub fn new(node: Arc<dyn NodeRuntime>, languages: Arc<LanguageRegistry>) -> Self {
|
||||
pub fn new(node: NodeRuntime, languages: Arc<LanguageRegistry>) -> Self {
|
||||
Self {
|
||||
node,
|
||||
languages,
|
||||
@@ -183,14 +183,7 @@ impl LspAdapter for JsonLspAdapter {
|
||||
container_dir: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
}
|
||||
|
||||
async fn initialization_options(
|
||||
@@ -226,7 +219,7 @@ impl LspAdapter for JsonLspAdapter {
|
||||
|
||||
async fn get_cached_server_binary(
|
||||
container_dir: PathBuf,
|
||||
node: &dyn NodeRuntime,
|
||||
node: &NodeRuntime,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
maybe!(async {
|
||||
let mut last_version_dir = None;
|
||||
@@ -374,18 +367,6 @@ impl LspAdapter for NodeVersionAdapter {
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_version_server_binary(container_dir).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_version_server_binary(container_dir)
|
||||
.await
|
||||
.map(|mut binary| {
|
||||
binary.arguments = vec!["--version".into()];
|
||||
binary
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_cached_version_server_binary(container_dir: PathBuf) -> Option<LanguageServerBinary> {
|
||||
|
||||
@@ -30,11 +30,7 @@ mod yaml;
|
||||
#[exclude = "*.rs"]
|
||||
struct LanguageDir;
|
||||
|
||||
pub fn init(
|
||||
languages: Arc<LanguageRegistry>,
|
||||
node_runtime: Arc<dyn NodeRuntime>,
|
||||
cx: &mut AppContext,
|
||||
) {
|
||||
pub fn init(languages: Arc<LanguageRegistry>, node_runtime: NodeRuntime, cx: &mut AppContext) {
|
||||
languages.register_native_grammars([
|
||||
("bash", tree_sitter_bash::LANGUAGE),
|
||||
("c", tree_sitter_c::LANGUAGE),
|
||||
|
||||
@@ -20,19 +20,20 @@ use task::{TaskTemplate, TaskTemplates, VariableName};
|
||||
use util::ResultExt;
|
||||
|
||||
const SERVER_PATH: &str = "node_modules/pyright/langserver.index.js";
|
||||
const NODE_MODULE_RELATIVE_SERVER_PATH: &str = "pyright/langserver.index.js";
|
||||
|
||||
fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||
vec![server_path.into(), "--stdio".into()]
|
||||
}
|
||||
|
||||
pub struct PythonLspAdapter {
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
node: NodeRuntime,
|
||||
}
|
||||
|
||||
impl PythonLspAdapter {
|
||||
const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("pyright");
|
||||
|
||||
pub fn new(node: Arc<dyn NodeRuntime>) -> Self {
|
||||
pub fn new(node: NodeRuntime) -> Self {
|
||||
PythonLspAdapter { node }
|
||||
}
|
||||
}
|
||||
@@ -43,6 +44,26 @@ impl LspAdapter for PythonLspAdapter {
|
||||
Self::SERVER_NAME.clone()
|
||||
}
|
||||
|
||||
async fn check_if_user_installed(
|
||||
&self,
|
||||
delegate: &dyn LspAdapterDelegate,
|
||||
_: &AsyncAppContext,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
let node = delegate.which("node".as_ref()).await?;
|
||||
let (node_modules_path, _) = delegate
|
||||
.npm_package_installed_version(Self::SERVER_NAME.as_ref())
|
||||
.await
|
||||
.log_err()??;
|
||||
|
||||
let path = node_modules_path.join(NODE_MODULE_RELATIVE_SERVER_PATH);
|
||||
|
||||
Some(LanguageServerBinary {
|
||||
path: node,
|
||||
env: None,
|
||||
arguments: server_binary_arguments(&path),
|
||||
})
|
||||
}
|
||||
|
||||
async fn fetch_latest_server_version(
|
||||
&self,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
@@ -94,14 +115,7 @@ impl LspAdapter for PythonLspAdapter {
|
||||
container_dir: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
}
|
||||
|
||||
async fn process_completions(&self, items: &mut [lsp::CompletionItem]) {
|
||||
@@ -198,7 +212,7 @@ impl LspAdapter for PythonLspAdapter {
|
||||
|
||||
async fn get_cached_server_binary(
|
||||
container_dir: PathBuf,
|
||||
node: &dyn NodeRuntime,
|
||||
node: &NodeRuntime,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
let server_path = container_dir.join(SERVER_PATH);
|
||||
if server_path.exists() {
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
(parameter (identifier) @variable)
|
||||
(attribute attribute: (identifier) @property)
|
||||
(type (identifier) @type)
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ use http_client::github::{latest_github_release, GitHubLspBinaryVersion};
|
||||
pub use language::*;
|
||||
use language_settings::all_language_settings;
|
||||
use lsp::LanguageServerBinary;
|
||||
use project::{lsp_store::language_server_settings, project_settings::BinarySettings};
|
||||
use regex::Regex;
|
||||
use smol::fs::{self, File};
|
||||
use std::{
|
||||
@@ -37,77 +36,34 @@ impl LspAdapter for RustLspAdapter {
|
||||
async fn check_if_user_installed(
|
||||
&self,
|
||||
delegate: &dyn LspAdapterDelegate,
|
||||
cx: &AsyncAppContext,
|
||||
_: &AsyncAppContext,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
let configured_binary = cx
|
||||
.update(|cx| {
|
||||
language_server_settings(delegate, &Self::SERVER_NAME, cx)
|
||||
.and_then(|s| s.binary.clone())
|
||||
let path = delegate.which("rust-analyzer".as_ref()).await?;
|
||||
let env = delegate.shell_env().await;
|
||||
|
||||
// It is surprisingly common for ~/.cargo/bin/rust-analyzer to be a symlink to
|
||||
// /usr/bin/rust-analyzer that fails when you run it; so we need to test it.
|
||||
log::info!("found rust-analyzer in PATH. trying to run `rust-analyzer --help`");
|
||||
let result = delegate
|
||||
.try_exec(LanguageServerBinary {
|
||||
path: path.clone(),
|
||||
arguments: vec!["--help".into()],
|
||||
env: Some(env.clone()),
|
||||
})
|
||||
.ok()?;
|
||||
.await;
|
||||
if let Err(err) = result {
|
||||
log::error!(
|
||||
"failed to run rust-analyzer after detecting it in PATH: binary: {:?}: {}",
|
||||
path,
|
||||
err
|
||||
);
|
||||
return None;
|
||||
}
|
||||
|
||||
let (path, env, arguments) = match configured_binary {
|
||||
// If nothing is configured, or path_lookup explicitly enabled,
|
||||
// we lookup the binary in the path.
|
||||
None
|
||||
| Some(BinarySettings {
|
||||
path: None,
|
||||
path_lookup: Some(true),
|
||||
..
|
||||
})
|
||||
| Some(BinarySettings {
|
||||
path: None,
|
||||
path_lookup: None,
|
||||
..
|
||||
}) => {
|
||||
let path = delegate.which("rust-analyzer".as_ref()).await;
|
||||
let env = delegate.shell_env().await;
|
||||
|
||||
if let Some(path) = path {
|
||||
// It is surprisingly common for ~/.cargo/bin/rust-analyzer to be a symlink to
|
||||
// /usr/bin/rust-analyzer that fails when you run it; so we need to test it.
|
||||
log::info!("found rust-analyzer in PATH. trying to run `rust-analyzer --help`");
|
||||
match delegate
|
||||
.try_exec(LanguageServerBinary {
|
||||
path: path.clone(),
|
||||
arguments: vec!["--help".into()],
|
||||
env: Some(env.clone()),
|
||||
})
|
||||
.await
|
||||
{
|
||||
Ok(()) => (Some(path), Some(env), None),
|
||||
Err(err) => {
|
||||
log::error!("failed to run rust-analyzer after detecting it in PATH: binary: {:?}: {}", path, err);
|
||||
(None, None, None)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
(None, None, None)
|
||||
}
|
||||
}
|
||||
// Otherwise, we use the configured binary.
|
||||
Some(BinarySettings {
|
||||
path: Some(path),
|
||||
arguments,
|
||||
path_lookup,
|
||||
}) => {
|
||||
if path_lookup.is_some() {
|
||||
log::warn!("Both `path` and `path_lookup` are set, ignoring `path_lookup`");
|
||||
}
|
||||
(Some(path.into()), None, arguments)
|
||||
}
|
||||
|
||||
_ => (None, None, None),
|
||||
};
|
||||
|
||||
path.map(|path| LanguageServerBinary {
|
||||
Some(LanguageServerBinary {
|
||||
path,
|
||||
env,
|
||||
arguments: arguments
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
.map(|arg| arg.into())
|
||||
.collect(),
|
||||
env: Some(env),
|
||||
arguments: vec![],
|
||||
})
|
||||
}
|
||||
|
||||
@@ -186,18 +142,6 @@ impl LspAdapter for RustLspAdapter {
|
||||
get_cached_server_binary(container_dir).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir)
|
||||
.await
|
||||
.map(|mut binary| {
|
||||
binary.arguments = vec!["--help".into()];
|
||||
binary
|
||||
})
|
||||
}
|
||||
|
||||
fn disk_based_diagnostic_sources(&self) -> Vec<String> {
|
||||
vec!["rustc".into()]
|
||||
}
|
||||
|
||||
@@ -4,8 +4,7 @@
|
||||
(struct_item
|
||||
(visibility_modifier)? @context
|
||||
"struct" @context
|
||||
name: (_) @name
|
||||
body: (_ "{" @open (_)* "}" @close)) @item
|
||||
name: (_) @name) @item
|
||||
|
||||
(enum_item
|
||||
(visibility_modifier)? @context
|
||||
|
||||
@@ -28,14 +28,14 @@ fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||
}
|
||||
|
||||
pub struct TailwindLspAdapter {
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
node: NodeRuntime,
|
||||
}
|
||||
|
||||
impl TailwindLspAdapter {
|
||||
const SERVER_NAME: LanguageServerName =
|
||||
LanguageServerName::new_static("tailwindcss-language-server");
|
||||
|
||||
pub fn new(node: Arc<dyn NodeRuntime>) -> Self {
|
||||
pub fn new(node: NodeRuntime) -> Self {
|
||||
TailwindLspAdapter { node }
|
||||
}
|
||||
}
|
||||
@@ -46,38 +46,6 @@ impl LspAdapter for TailwindLspAdapter {
|
||||
Self::SERVER_NAME.clone()
|
||||
}
|
||||
|
||||
async fn check_if_user_installed(
|
||||
&self,
|
||||
delegate: &dyn LspAdapterDelegate,
|
||||
cx: &AsyncAppContext,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
let configured_binary = cx
|
||||
.update(|cx| {
|
||||
language_server_settings(delegate, &Self::SERVER_NAME, cx)
|
||||
.and_then(|s| s.binary.clone())
|
||||
})
|
||||
.ok()??;
|
||||
|
||||
let path = if let Some(configured_path) = configured_binary.path.map(PathBuf::from) {
|
||||
configured_path
|
||||
} else {
|
||||
self.node.binary_path().await.ok()?
|
||||
};
|
||||
|
||||
let arguments = configured_binary
|
||||
.arguments
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
.map(|arg| arg.into())
|
||||
.collect();
|
||||
|
||||
Some(LanguageServerBinary {
|
||||
path,
|
||||
arguments,
|
||||
env: None,
|
||||
})
|
||||
}
|
||||
|
||||
async fn fetch_latest_server_version(
|
||||
&self,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
@@ -122,14 +90,7 @@ impl LspAdapter for TailwindLspAdapter {
|
||||
container_dir: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
}
|
||||
|
||||
async fn initialization_options(
|
||||
@@ -198,7 +159,7 @@ impl LspAdapter for TailwindLspAdapter {
|
||||
|
||||
async fn get_cached_server_binary(
|
||||
container_dir: PathBuf,
|
||||
node: &dyn NodeRuntime,
|
||||
node: &NodeRuntime,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
maybe!(async {
|
||||
let mut last_version_dir = None;
|
||||
|
||||
@@ -65,7 +65,7 @@ fn eslint_server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||
}
|
||||
|
||||
pub struct TypeScriptLspAdapter {
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
node: NodeRuntime,
|
||||
}
|
||||
|
||||
impl TypeScriptLspAdapter {
|
||||
@@ -73,7 +73,7 @@ impl TypeScriptLspAdapter {
|
||||
const NEW_SERVER_PATH: &'static str = "node_modules/typescript-language-server/lib/cli.mjs";
|
||||
const SERVER_NAME: LanguageServerName =
|
||||
LanguageServerName::new_static("typescript-language-server");
|
||||
pub fn new(node: Arc<dyn NodeRuntime>) -> Self {
|
||||
pub fn new(node: NodeRuntime) -> Self {
|
||||
TypeScriptLspAdapter { node }
|
||||
}
|
||||
async fn tsdk_path(adapter: &Arc<dyn LspAdapterDelegate>) -> &'static str {
|
||||
@@ -161,14 +161,7 @@ impl LspAdapter for TypeScriptLspAdapter {
|
||||
container_dir: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_ts_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_ts_server_binary(container_dir, &*self.node).await
|
||||
get_cached_ts_server_binary(container_dir, &self.node).await
|
||||
}
|
||||
|
||||
fn code_action_kinds(&self) -> Option<Vec<CodeActionKind>> {
|
||||
@@ -264,7 +257,7 @@ impl LspAdapter for TypeScriptLspAdapter {
|
||||
|
||||
async fn get_cached_ts_server_binary(
|
||||
container_dir: PathBuf,
|
||||
node: &dyn NodeRuntime,
|
||||
node: &NodeRuntime,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
maybe!(async {
|
||||
let old_server_path = container_dir.join(TypeScriptLspAdapter::OLD_SERVER_PATH);
|
||||
@@ -293,11 +286,12 @@ async fn get_cached_ts_server_binary(
|
||||
}
|
||||
|
||||
pub struct EsLintLspAdapter {
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
node: NodeRuntime,
|
||||
}
|
||||
|
||||
impl EsLintLspAdapter {
|
||||
const CURRENT_VERSION: &'static str = "release/2.4.4";
|
||||
const CURRENT_VERSION: &'static str = "2.4.4";
|
||||
const CURRENT_VERSION_TAG_NAME: &'static str = "release/2.4.4";
|
||||
|
||||
#[cfg(not(windows))]
|
||||
const GITHUB_ASSET_KIND: AssetKind = AssetKind::TarGz;
|
||||
@@ -310,9 +304,13 @@ impl EsLintLspAdapter {
|
||||
const FLAT_CONFIG_FILE_NAMES: &'static [&'static str] =
|
||||
&["eslint.config.js", "eslint.config.mjs", "eslint.config.cjs"];
|
||||
|
||||
pub fn new(node: Arc<dyn NodeRuntime>) -> Self {
|
||||
pub fn new(node: NodeRuntime) -> Self {
|
||||
EsLintLspAdapter { node }
|
||||
}
|
||||
|
||||
fn build_destination_path(container_dir: &Path) -> PathBuf {
|
||||
container_dir.join(format!("vscode-eslint-{}", Self::CURRENT_VERSION))
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
@@ -413,7 +411,7 @@ impl LspAdapter for EsLintLspAdapter {
|
||||
) -> Result<Box<dyn 'static + Send + Any>> {
|
||||
let url = build_asset_url(
|
||||
"microsoft/vscode-eslint",
|
||||
Self::CURRENT_VERSION,
|
||||
Self::CURRENT_VERSION_TAG_NAME,
|
||||
Self::GITHUB_ASSET_KIND,
|
||||
)?;
|
||||
|
||||
@@ -430,7 +428,7 @@ impl LspAdapter for EsLintLspAdapter {
|
||||
delegate: &dyn LspAdapterDelegate,
|
||||
) -> Result<LanguageServerBinary> {
|
||||
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
|
||||
let destination_path = container_dir.join(format!("vscode-eslint-{}", version.name));
|
||||
let destination_path = Self::build_destination_path(&container_dir);
|
||||
let server_path = destination_path.join(Self::SERVER_PATH);
|
||||
|
||||
if fs::metadata(&server_path).await.is_err() {
|
||||
@@ -476,11 +474,11 @@ impl LspAdapter for EsLintLspAdapter {
|
||||
}
|
||||
|
||||
self.node
|
||||
.run_npm_subcommand(Some(&repo_root), "install", &[])
|
||||
.run_npm_subcommand(&repo_root, "install", &[])
|
||||
.await?;
|
||||
|
||||
self.node
|
||||
.run_npm_subcommand(Some(&repo_root), "run-script", &["compile"])
|
||||
.run_npm_subcommand(&repo_root, "run-script", &["compile"])
|
||||
.await?;
|
||||
}
|
||||
|
||||
@@ -496,38 +494,14 @@ impl LspAdapter for EsLintLspAdapter {
|
||||
container_dir: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_eslint_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_eslint_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_cached_eslint_server_binary(
|
||||
container_dir: PathBuf,
|
||||
node: &dyn NodeRuntime,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
maybe!(async {
|
||||
// This is unfortunate but we don't know what the version is to build a path directly
|
||||
let mut dir = fs::read_dir(&container_dir).await?;
|
||||
let first = dir.next().await.ok_or(anyhow!("missing first file"))??;
|
||||
if !first.file_type().await?.is_dir() {
|
||||
return Err(anyhow!("First entry is not a directory"));
|
||||
}
|
||||
let server_path = first.path().join(EsLintLspAdapter::SERVER_PATH);
|
||||
|
||||
Ok(LanguageServerBinary {
|
||||
path: node.binary_path().await?,
|
||||
let server_path =
|
||||
Self::build_destination_path(&container_dir).join(EsLintLspAdapter::SERVER_PATH);
|
||||
Some(LanguageServerBinary {
|
||||
path: self.node.binary_path().await.ok()?,
|
||||
env: None,
|
||||
arguments: eslint_server_binary_arguments(&server_path),
|
||||
})
|
||||
})
|
||||
.await
|
||||
.log_err()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
|
||||
@@ -5,28 +5,28 @@ use gpui::AsyncAppContext;
|
||||
use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
|
||||
use lsp::{CodeActionKind, LanguageServerBinary};
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::{lsp_store::language_server_settings, project_settings::BinarySettings};
|
||||
use serde_json::{json, Value};
|
||||
use project::lsp_store::language_server_settings;
|
||||
use serde_json::Value;
|
||||
use std::{
|
||||
any::Any,
|
||||
ffi::OsString,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
use util::{maybe, ResultExt};
|
||||
use util::{maybe, merge_json_value_into, ResultExt};
|
||||
|
||||
fn typescript_server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||
vec![server_path.into(), "--stdio".into()]
|
||||
}
|
||||
|
||||
pub struct VtslsLspAdapter {
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
node: NodeRuntime,
|
||||
}
|
||||
|
||||
impl VtslsLspAdapter {
|
||||
const SERVER_PATH: &'static str = "node_modules/@vtsls/language-server/bin/vtsls.js";
|
||||
|
||||
pub fn new(node: Arc<dyn NodeRuntime>) -> Self {
|
||||
pub fn new(node: NodeRuntime) -> Self {
|
||||
VtslsLspAdapter { node }
|
||||
}
|
||||
async fn tsdk_path(adapter: &Arc<dyn LspAdapterDelegate>) -> &'static str {
|
||||
@@ -71,40 +71,15 @@ impl LspAdapter for VtslsLspAdapter {
|
||||
async fn check_if_user_installed(
|
||||
&self,
|
||||
delegate: &dyn LspAdapterDelegate,
|
||||
cx: &AsyncAppContext,
|
||||
_: &AsyncAppContext,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
let configured_binary = cx.update(|cx| {
|
||||
language_server_settings(delegate, &SERVER_NAME, cx).and_then(|s| s.binary.clone())
|
||||
});
|
||||
|
||||
match configured_binary {
|
||||
Ok(Some(BinarySettings {
|
||||
path: Some(path),
|
||||
arguments,
|
||||
..
|
||||
})) => Some(LanguageServerBinary {
|
||||
path: path.into(),
|
||||
arguments: arguments
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
.map(|arg| arg.into())
|
||||
.collect(),
|
||||
env: None,
|
||||
}),
|
||||
Ok(Some(BinarySettings {
|
||||
path_lookup: Some(false),
|
||||
..
|
||||
})) => None,
|
||||
_ => {
|
||||
let env = delegate.shell_env().await;
|
||||
let path = delegate.which(SERVER_NAME.as_ref()).await?;
|
||||
Some(LanguageServerBinary {
|
||||
path: path.clone(),
|
||||
arguments: typescript_server_binary_arguments(&path),
|
||||
env: Some(env),
|
||||
})
|
||||
}
|
||||
}
|
||||
let env = delegate.shell_env().await;
|
||||
let path = delegate.which(SERVER_NAME.as_ref()).await?;
|
||||
Some(LanguageServerBinary {
|
||||
path: path.clone(),
|
||||
arguments: typescript_server_binary_arguments(&path),
|
||||
env: Some(env),
|
||||
})
|
||||
}
|
||||
|
||||
async fn fetch_server_binary(
|
||||
@@ -154,14 +129,7 @@ impl LspAdapter for VtslsLspAdapter {
|
||||
container_dir: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_ts_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_ts_server_binary(container_dir, &*self.node).await
|
||||
get_cached_ts_server_binary(container_dir, &self.node).await
|
||||
}
|
||||
|
||||
fn code_action_kinds(&self) -> Option<Vec<CodeActionKind>> {
|
||||
@@ -212,11 +180,12 @@ impl LspAdapter for VtslsLspAdapter {
|
||||
})
|
||||
}
|
||||
|
||||
async fn initialization_options(
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
adapter: &Arc<dyn LspAdapterDelegate>,
|
||||
) -> Result<Option<serde_json::Value>> {
|
||||
let tsdk_path = Self::tsdk_path(adapter).await;
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<Value> {
|
||||
let tsdk_path = Self::tsdk_path(delegate).await;
|
||||
let config = serde_json::json!({
|
||||
"tsdk": tsdk_path,
|
||||
"suggest": {
|
||||
@@ -243,10 +212,13 @@ impl LspAdapter for VtslsLspAdapter {
|
||||
"enumMemberValues": {
|
||||
"enabled": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"tsserver": {
|
||||
"maxTsServerMemory": 8092
|
||||
},
|
||||
});
|
||||
|
||||
Ok(Some(json!({
|
||||
let mut default_workspace_configuration = serde_json::json!({
|
||||
"typescript": config,
|
||||
"javascript": config,
|
||||
"vtsls": {
|
||||
@@ -258,33 +230,18 @@ impl LspAdapter for VtslsLspAdapter {
|
||||
},
|
||||
"autoUseWorkspaceTsdk": true
|
||||
}
|
||||
})))
|
||||
}
|
||||
});
|
||||
|
||||
async fn workspace_configuration(
|
||||
self: Arc<Self>,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Result<Value> {
|
||||
let override_options = cx.update(|cx| {
|
||||
language_server_settings(delegate.as_ref(), &SERVER_NAME, cx)
|
||||
.and_then(|s| s.settings.clone())
|
||||
})?;
|
||||
|
||||
if let Some(options) = override_options {
|
||||
return Ok(options);
|
||||
if let Some(override_options) = override_options {
|
||||
merge_json_value_into(override_options, &mut default_workspace_configuration)
|
||||
}
|
||||
|
||||
let config = serde_json::json!({
|
||||
"tsserver": {
|
||||
"maxTsServerMemory": 8092
|
||||
},
|
||||
});
|
||||
|
||||
Ok(serde_json::json!({
|
||||
"typescript": config,
|
||||
"javascript": config
|
||||
}))
|
||||
Ok(default_workspace_configuration)
|
||||
}
|
||||
|
||||
fn language_ids(&self) -> HashMap<String, String> {
|
||||
@@ -298,7 +255,7 @@ impl LspAdapter for VtslsLspAdapter {
|
||||
|
||||
async fn get_cached_ts_server_binary(
|
||||
container_dir: PathBuf,
|
||||
node: &dyn NodeRuntime,
|
||||
node: &NodeRuntime,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
maybe!(async {
|
||||
let server_path = container_dir.join(VtslsLspAdapter::SERVER_PATH);
|
||||
|
||||
@@ -26,12 +26,12 @@ fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
|
||||
}
|
||||
|
||||
pub struct YamlLspAdapter {
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
node: NodeRuntime,
|
||||
}
|
||||
|
||||
impl YamlLspAdapter {
|
||||
const SERVER_NAME: LanguageServerName = LanguageServerName::new_static("yaml-language-server");
|
||||
pub fn new(node: Arc<dyn NodeRuntime>) -> Self {
|
||||
pub fn new(node: NodeRuntime) -> Self {
|
||||
YamlLspAdapter { node }
|
||||
}
|
||||
}
|
||||
@@ -42,37 +42,6 @@ impl LspAdapter for YamlLspAdapter {
|
||||
Self::SERVER_NAME.clone()
|
||||
}
|
||||
|
||||
async fn check_if_user_installed(
|
||||
&self,
|
||||
delegate: &dyn LspAdapterDelegate,
|
||||
cx: &AsyncAppContext,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
let configured_binary = cx
|
||||
.update(|cx| {
|
||||
language_server_settings(delegate, &Self::SERVER_NAME, cx)
|
||||
.and_then(|s| s.binary.clone())
|
||||
})
|
||||
.ok()??;
|
||||
|
||||
let path = if let Some(configured_path) = configured_binary.path.map(PathBuf::from) {
|
||||
configured_path
|
||||
} else {
|
||||
self.node.binary_path().await.ok()?
|
||||
};
|
||||
|
||||
let arguments = configured_binary
|
||||
.arguments
|
||||
.unwrap_or_default()
|
||||
.iter()
|
||||
.map(|arg| arg.into())
|
||||
.collect();
|
||||
Some(LanguageServerBinary {
|
||||
path,
|
||||
arguments,
|
||||
env: None,
|
||||
})
|
||||
}
|
||||
|
||||
async fn fetch_latest_server_version(
|
||||
&self,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
@@ -117,14 +86,7 @@ impl LspAdapter for YamlLspAdapter {
|
||||
container_dir: PathBuf,
|
||||
_: &dyn LspAdapterDelegate,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
}
|
||||
|
||||
async fn installation_test_binary(
|
||||
&self,
|
||||
container_dir: PathBuf,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
get_cached_server_binary(container_dir, &*self.node).await
|
||||
get_cached_server_binary(container_dir, &self.node).await
|
||||
}
|
||||
|
||||
async fn workspace_configuration(
|
||||
@@ -157,7 +119,7 @@ impl LspAdapter for YamlLspAdapter {
|
||||
|
||||
async fn get_cached_server_binary(
|
||||
container_dir: PathBuf,
|
||||
node: &dyn NodeRuntime,
|
||||
node: &NodeRuntime,
|
||||
) -> Option<LanguageServerBinary> {
|
||||
maybe!(async {
|
||||
let mut last_version_dir = None;
|
||||
|
||||
@@ -64,6 +64,15 @@ pub struct LanguageServerBinary {
|
||||
pub env: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
/// Configures the search (and installation) of language servers.
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct LanguageServerBinaryOptions {
|
||||
/// Whether the adapter should look at the users system
|
||||
pub allow_path_lookup: bool,
|
||||
/// Whether the adapter should download its own version
|
||||
pub allow_binary_download: bool,
|
||||
}
|
||||
|
||||
/// A running language server process.
|
||||
pub struct LanguageServer {
|
||||
server_id: LanguageServerId,
|
||||
|
||||
@@ -2,7 +2,7 @@ use assets::Assets;
|
||||
use gpui::{prelude::*, rgb, App, KeyBinding, StyleRefinement, View, WindowOptions};
|
||||
use language::{language_settings::AllLanguageSettings, LanguageRegistry};
|
||||
use markdown::{Markdown, MarkdownStyle};
|
||||
use node_runtime::FakeNodeRuntime;
|
||||
use node_runtime::NodeRuntime;
|
||||
use settings::SettingsStore;
|
||||
use std::sync::Arc;
|
||||
use theme::LoadThemes;
|
||||
@@ -102,7 +102,7 @@ pub fn main() {
|
||||
});
|
||||
cx.bind_keys([KeyBinding::new("cmd-c", markdown::Copy, None)]);
|
||||
|
||||
let node_runtime = FakeNodeRuntime::new();
|
||||
let node_runtime = NodeRuntime::unavailable();
|
||||
theme::init(LoadThemes::JustBase, cx);
|
||||
|
||||
let language_registry = LanguageRegistry::new(cx.background_executor().clone());
|
||||
|
||||
@@ -2,7 +2,7 @@ use assets::Assets;
|
||||
use gpui::*;
|
||||
use language::{language_settings::AllLanguageSettings, LanguageRegistry};
|
||||
use markdown::{Markdown, MarkdownStyle};
|
||||
use node_runtime::FakeNodeRuntime;
|
||||
use node_runtime::NodeRuntime;
|
||||
use settings::SettingsStore;
|
||||
use std::sync::Arc;
|
||||
use theme::LoadThemes;
|
||||
@@ -28,7 +28,7 @@ pub fn main() {
|
||||
});
|
||||
cx.bind_keys([KeyBinding::new("cmd-c", markdown::Copy, None)]);
|
||||
|
||||
let node_runtime = FakeNodeRuntime::new();
|
||||
let node_runtime = NodeRuntime::unavailable();
|
||||
let language_registry = Arc::new(LanguageRegistry::new(cx.background_executor().clone()));
|
||||
languages::init(language_registry.clone(), node_runtime, cx);
|
||||
theme::init(LoadThemes::JustBase, cx);
|
||||
|
||||
@@ -5,105 +5,75 @@ use std::{
|
||||
ops::{Range, Sub},
|
||||
};
|
||||
use sum_tree::Bias;
|
||||
use text::BufferId;
|
||||
|
||||
#[derive(Clone, Copy, Eq, PartialEq, Debug, Hash)]
|
||||
pub enum Anchor {
|
||||
Start,
|
||||
End,
|
||||
Text {
|
||||
excerpt_id: ExcerptId,
|
||||
text_anchor: text::Anchor,
|
||||
},
|
||||
pub struct Anchor {
|
||||
pub buffer_id: Option<BufferId>,
|
||||
pub excerpt_id: ExcerptId,
|
||||
pub text_anchor: text::Anchor,
|
||||
}
|
||||
|
||||
impl Anchor {
|
||||
pub fn excerpt_id(&self) -> ExcerptId {
|
||||
match self {
|
||||
Anchor::Start => ExcerptId::min(),
|
||||
Anchor::End => ExcerptId::max(),
|
||||
Anchor::Text { excerpt_id, .. } => *excerpt_id,
|
||||
pub fn min() -> Self {
|
||||
Self {
|
||||
buffer_id: None,
|
||||
excerpt_id: ExcerptId::min(),
|
||||
text_anchor: text::Anchor::MIN,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn max() -> Self {
|
||||
Self {
|
||||
buffer_id: None,
|
||||
excerpt_id: ExcerptId::max(),
|
||||
text_anchor: text::Anchor::MAX,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cmp(&self, other: &Anchor, snapshot: &MultiBufferSnapshot) -> Ordering {
|
||||
match (self, other) {
|
||||
(Anchor::Start, Anchor::Start) | (Anchor::End, Anchor::End) => Ordering::Equal,
|
||||
(_, Anchor::Start) | (Anchor::End, _) => Ordering::Greater,
|
||||
(Anchor::Start, _) | (_, Anchor::End) => Ordering::Less,
|
||||
(
|
||||
Anchor::Text {
|
||||
excerpt_id: id1,
|
||||
text_anchor: anchor1,
|
||||
},
|
||||
Anchor::Text {
|
||||
excerpt_id: id2,
|
||||
text_anchor: anchor2,
|
||||
},
|
||||
) => {
|
||||
let excerpt_id_cmp = id1.cmp(id2, snapshot);
|
||||
if excerpt_id_cmp.is_eq() {
|
||||
if let Some(excerpt) = snapshot.excerpt(*id1) {
|
||||
anchor1.cmp(anchor2, &excerpt.buffer)
|
||||
} else {
|
||||
Ordering::Equal
|
||||
}
|
||||
} else {
|
||||
excerpt_id_cmp
|
||||
}
|
||||
let excerpt_id_cmp = self.excerpt_id.cmp(&other.excerpt_id, snapshot);
|
||||
if excerpt_id_cmp.is_eq() {
|
||||
if self.excerpt_id == ExcerptId::min() || self.excerpt_id == ExcerptId::max() {
|
||||
Ordering::Equal
|
||||
} else if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) {
|
||||
self.text_anchor.cmp(&other.text_anchor, &excerpt.buffer)
|
||||
} else {
|
||||
Ordering::Equal
|
||||
}
|
||||
} else {
|
||||
excerpt_id_cmp
|
||||
}
|
||||
}
|
||||
|
||||
pub fn bias(&self) -> Bias {
|
||||
match self {
|
||||
Anchor::Start => Bias::Left,
|
||||
Anchor::End => Bias::Right,
|
||||
Anchor::Text { text_anchor, .. } => match text_anchor {
|
||||
text::Anchor::Start { .. } => Bias::Left,
|
||||
text::Anchor::End { .. } => Bias::Right,
|
||||
text::Anchor::Character { bias, .. } => *bias,
|
||||
},
|
||||
}
|
||||
self.text_anchor.bias
|
||||
}
|
||||
|
||||
pub fn bias_left(&self, snapshot: &MultiBufferSnapshot) -> Anchor {
|
||||
match self {
|
||||
Anchor::Start => *self,
|
||||
Anchor::End => snapshot.anchor_before(snapshot.len()),
|
||||
Anchor::Text {
|
||||
excerpt_id,
|
||||
text_anchor,
|
||||
} => {
|
||||
if let Some(excerpt) = snapshot.excerpt(*excerpt_id) {
|
||||
Anchor::Text {
|
||||
excerpt_id: *excerpt_id,
|
||||
text_anchor: text_anchor.bias_left(&excerpt.buffer),
|
||||
}
|
||||
} else {
|
||||
*self
|
||||
}
|
||||
if self.text_anchor.bias != Bias::Left {
|
||||
if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) {
|
||||
return Self {
|
||||
buffer_id: self.buffer_id,
|
||||
excerpt_id: self.excerpt_id,
|
||||
text_anchor: self.text_anchor.bias_left(&excerpt.buffer),
|
||||
};
|
||||
}
|
||||
}
|
||||
*self
|
||||
}
|
||||
|
||||
pub fn bias_right(&self, snapshot: &MultiBufferSnapshot) -> Anchor {
|
||||
match self {
|
||||
Anchor::Start => snapshot.anchor_after(0),
|
||||
Anchor::End => *self,
|
||||
Anchor::Text {
|
||||
excerpt_id,
|
||||
text_anchor,
|
||||
} => {
|
||||
if let Some(excerpt) = snapshot.excerpt(*excerpt_id) {
|
||||
Anchor::Text {
|
||||
excerpt_id: *excerpt_id,
|
||||
text_anchor: text_anchor.bias_right(&excerpt.buffer),
|
||||
}
|
||||
} else {
|
||||
*self
|
||||
}
|
||||
if self.text_anchor.bias != Bias::Right {
|
||||
if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) {
|
||||
return Self {
|
||||
buffer_id: self.buffer_id,
|
||||
excerpt_id: self.excerpt_id,
|
||||
text_anchor: self.text_anchor.bias_right(&excerpt.buffer),
|
||||
};
|
||||
}
|
||||
}
|
||||
*self
|
||||
}
|
||||
|
||||
pub fn summary<D>(&self, snapshot: &MultiBufferSnapshot) -> D
|
||||
@@ -114,18 +84,15 @@ impl Anchor {
|
||||
}
|
||||
|
||||
pub fn is_valid(&self, snapshot: &MultiBufferSnapshot) -> bool {
|
||||
match self {
|
||||
Self::Start | Anchor::End => true,
|
||||
Anchor::Text {
|
||||
excerpt_id,
|
||||
text_anchor,
|
||||
} => {
|
||||
if let Some(excerpt) = snapshot.excerpt(*excerpt_id) {
|
||||
excerpt.contains(self) && text_anchor.is_valid(&excerpt.buffer)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
if *self == Anchor::min() || *self == Anchor::max() {
|
||||
true
|
||||
} else if let Some(excerpt) = snapshot.excerpt(self.excerpt_id) {
|
||||
excerpt.contains(self)
|
||||
&& (self.text_anchor == excerpt.range.context.start
|
||||
|| self.text_anchor == excerpt.range.context.end
|
||||
|| self.text_anchor.is_valid(&excerpt.buffer))
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ use anyhow::{anyhow, Result};
|
||||
use clock::ReplicaId;
|
||||
use collections::{BTreeMap, Bound, HashMap, HashSet};
|
||||
use futures::{channel::mpsc, SinkExt};
|
||||
use gpui::{AppContext, EntityId, EventEmitter, Model, ModelContext};
|
||||
use gpui::{AppContext, EntityId, EventEmitter, Model, ModelContext, Task};
|
||||
use itertools::Itertools;
|
||||
use language::{
|
||||
language_settings::{language_settings, LanguageSettings},
|
||||
@@ -437,14 +437,13 @@ impl MultiBuffer {
|
||||
self.capability == Capability::ReadOnly
|
||||
}
|
||||
|
||||
pub fn singleton(buffer_model: Model<Buffer>, cx: &mut ModelContext<Self>) -> Self {
|
||||
let buffer = buffer_model.read(cx);
|
||||
let mut this = Self::new(buffer.capability());
|
||||
pub fn singleton(buffer: Model<Buffer>, cx: &mut ModelContext<Self>) -> Self {
|
||||
let mut this = Self::new(buffer.read(cx).capability());
|
||||
this.singleton = true;
|
||||
this.push_excerpts(
|
||||
buffer_model,
|
||||
buffer,
|
||||
[ExcerptRange {
|
||||
context: buffer.min_anchor()..buffer.max_anchor(),
|
||||
context: text::Anchor::MIN..text::Anchor::MAX,
|
||||
primary: None,
|
||||
}],
|
||||
cx,
|
||||
@@ -963,27 +962,9 @@ impl MultiBuffer {
|
||||
Default::default();
|
||||
let snapshot = self.read(cx);
|
||||
let mut cursor = snapshot.excerpts.cursor::<Option<&Locator>>(&());
|
||||
for mut selection in selections {
|
||||
if selection.start == Anchor::Start {
|
||||
if let Some(first_excerpt) = snapshot.excerpts.first() {
|
||||
selection.start = Anchor::Text {
|
||||
excerpt_id: first_excerpt.id,
|
||||
text_anchor: first_excerpt.range.context.start,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if selection.end == Anchor::End {
|
||||
if let Some(last_excerpt) = snapshot.excerpts.last() {
|
||||
selection.end = Anchor::Text {
|
||||
excerpt_id: last_excerpt.id,
|
||||
text_anchor: last_excerpt.range.context.end,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
let start_locator = snapshot.excerpt_locator_for_id(selection.start.excerpt_id());
|
||||
let end_locator = snapshot.excerpt_locator_for_id(selection.end.excerpt_id());
|
||||
for selection in selections {
|
||||
let start_locator = snapshot.excerpt_locator_for_id(selection.start.excerpt_id);
|
||||
let end_locator = snapshot.excerpt_locator_for_id(selection.end.excerpt_id);
|
||||
|
||||
cursor.seek(&Some(start_locator), Bias::Left, &());
|
||||
while let Some(excerpt) = cursor.item() {
|
||||
@@ -993,15 +974,11 @@ impl MultiBuffer {
|
||||
|
||||
let mut start = excerpt.range.context.start;
|
||||
let mut end = excerpt.range.context.end;
|
||||
if excerpt.id == selection.start.excerpt_id() {
|
||||
if let Anchor::Text { text_anchor, .. } = selection.start {
|
||||
start = text_anchor;
|
||||
}
|
||||
if excerpt.id == selection.start.excerpt_id {
|
||||
start = selection.start.text_anchor;
|
||||
}
|
||||
if excerpt.id == selection.end.excerpt_id() {
|
||||
if let Anchor::Text { text_anchor, .. } = selection.end {
|
||||
end = text_anchor;
|
||||
}
|
||||
if excerpt.id == selection.end.excerpt_id {
|
||||
end = selection.end.text_anchor;
|
||||
}
|
||||
selections_by_buffer
|
||||
.entry(excerpt.buffer_id)
|
||||
@@ -1153,64 +1130,6 @@ impl MultiBuffer {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stream_excerpts_with_context_lines(
|
||||
&mut self,
|
||||
buffer: Model<Buffer>,
|
||||
ranges: Vec<Range<text::Anchor>>,
|
||||
context_line_count: u32,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> mpsc::Receiver<Range<Anchor>> {
|
||||
let (buffer_id, buffer_snapshot) =
|
||||
buffer.update(cx, |buffer, _| (buffer.remote_id(), buffer.snapshot()));
|
||||
|
||||
let (mut tx, rx) = mpsc::channel(256);
|
||||
cx.spawn(move |this, mut cx| async move {
|
||||
let mut excerpt_ranges = Vec::new();
|
||||
let mut range_counts = Vec::new();
|
||||
cx.background_executor()
|
||||
.scoped(|scope| {
|
||||
scope.spawn(async {
|
||||
let (ranges, counts) =
|
||||
build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count);
|
||||
excerpt_ranges = ranges;
|
||||
range_counts = counts;
|
||||
});
|
||||
})
|
||||
.await;
|
||||
|
||||
let mut ranges = ranges.into_iter();
|
||||
let mut range_counts = range_counts.into_iter();
|
||||
for excerpt_ranges in excerpt_ranges.chunks(100) {
|
||||
let excerpt_ids = match this.update(&mut cx, |this, cx| {
|
||||
this.push_excerpts(buffer.clone(), excerpt_ranges.iter().cloned(), cx)
|
||||
}) {
|
||||
Ok(excerpt_ids) => excerpt_ids,
|
||||
Err(_) => return,
|
||||
};
|
||||
|
||||
for (excerpt_id, range_count) in excerpt_ids.into_iter().zip(range_counts.by_ref())
|
||||
{
|
||||
for range in ranges.by_ref().take(range_count) {
|
||||
let start = Anchor::Text {
|
||||
excerpt_id,
|
||||
text_anchor: range.start,
|
||||
};
|
||||
let end = Anchor::Text {
|
||||
excerpt_id,
|
||||
text_anchor: range.end,
|
||||
};
|
||||
if tx.send(start..end).await.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
rx
|
||||
}
|
||||
|
||||
pub fn push_excerpts<O>(
|
||||
&mut self,
|
||||
buffer: Model<Buffer>,
|
||||
@@ -1244,11 +1163,13 @@ impl MultiBuffer {
|
||||
let mut ranges = ranges.into_iter();
|
||||
for (excerpt_id, range_count) in excerpt_ids.into_iter().zip(range_counts.into_iter()) {
|
||||
anchor_ranges.extend(ranges.by_ref().take(range_count).map(|range| {
|
||||
let start = Anchor::Text {
|
||||
let start = Anchor {
|
||||
buffer_id: Some(buffer_id),
|
||||
excerpt_id,
|
||||
text_anchor: buffer_snapshot.anchor_after(range.start),
|
||||
};
|
||||
let end = Anchor::Text {
|
||||
let end = Anchor {
|
||||
buffer_id: Some(buffer_id),
|
||||
excerpt_id,
|
||||
text_anchor: buffer_snapshot.anchor_after(range.end),
|
||||
};
|
||||
@@ -1258,6 +1179,91 @@ impl MultiBuffer {
|
||||
anchor_ranges
|
||||
}
|
||||
|
||||
pub fn push_multiple_excerpts_with_context_lines(
|
||||
&mut self,
|
||||
buffers_with_ranges: Vec<(Model<Buffer>, Vec<Range<text::Anchor>>)>,
|
||||
context_line_count: u32,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Task<Vec<Range<Anchor>>> {
|
||||
use futures::StreamExt;
|
||||
|
||||
let (excerpt_ranges_tx, mut excerpt_ranges_rx) = mpsc::channel(256);
|
||||
|
||||
let mut buffer_ids = Vec::with_capacity(buffers_with_ranges.len());
|
||||
|
||||
for (buffer, ranges) in buffers_with_ranges {
|
||||
let (buffer_id, buffer_snapshot) =
|
||||
buffer.update(cx, |buffer, _| (buffer.remote_id(), buffer.snapshot()));
|
||||
|
||||
buffer_ids.push(buffer_id);
|
||||
|
||||
cx.background_executor()
|
||||
.spawn({
|
||||
let mut excerpt_ranges_tx = excerpt_ranges_tx.clone();
|
||||
|
||||
async move {
|
||||
let (excerpt_ranges, counts) =
|
||||
build_excerpt_ranges(&buffer_snapshot, &ranges, context_line_count);
|
||||
excerpt_ranges_tx
|
||||
.send((buffer_id, buffer.clone(), ranges, excerpt_ranges, counts))
|
||||
.await
|
||||
.ok();
|
||||
}
|
||||
})
|
||||
.detach()
|
||||
}
|
||||
|
||||
cx.spawn(move |this, mut cx| async move {
|
||||
let mut results_by_buffer_id = HashMap::default();
|
||||
while let Some((buffer_id, buffer, ranges, excerpt_ranges, range_counts)) =
|
||||
excerpt_ranges_rx.next().await
|
||||
{
|
||||
results_by_buffer_id
|
||||
.insert(buffer_id, (buffer, ranges, excerpt_ranges, range_counts));
|
||||
}
|
||||
|
||||
let mut multi_buffer_ranges = Vec::default();
|
||||
'outer: for buffer_id in buffer_ids {
|
||||
let Some((buffer, ranges, excerpt_ranges, range_counts)) =
|
||||
results_by_buffer_id.remove(&buffer_id)
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let mut ranges = ranges.into_iter();
|
||||
let mut range_counts = range_counts.into_iter();
|
||||
for excerpt_ranges in excerpt_ranges.chunks(100) {
|
||||
let excerpt_ids = match this.update(&mut cx, |this, cx| {
|
||||
this.push_excerpts(buffer.clone(), excerpt_ranges.iter().cloned(), cx)
|
||||
}) {
|
||||
Ok(excerpt_ids) => excerpt_ids,
|
||||
Err(_) => continue 'outer,
|
||||
};
|
||||
|
||||
for (excerpt_id, range_count) in
|
||||
excerpt_ids.into_iter().zip(range_counts.by_ref())
|
||||
{
|
||||
for range in ranges.by_ref().take(range_count) {
|
||||
let start = Anchor {
|
||||
buffer_id: Some(buffer_id),
|
||||
excerpt_id,
|
||||
text_anchor: range.start,
|
||||
};
|
||||
let end = Anchor {
|
||||
buffer_id: Some(buffer_id),
|
||||
excerpt_id,
|
||||
text_anchor: range.end,
|
||||
};
|
||||
multi_buffer_ranges.push(start..end);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
multi_buffer_ranges
|
||||
})
|
||||
}
|
||||
|
||||
pub fn insert_excerpts_after<O>(
|
||||
&mut self,
|
||||
prev_excerpt_id: ExcerptId,
|
||||
@@ -1673,15 +1679,14 @@ impl MultiBuffer {
|
||||
let mut error = None;
|
||||
let mut futures = Vec::new();
|
||||
for anchor in anchors {
|
||||
if let Anchor::Text { text_anchor, .. } = anchor {
|
||||
if let Some(buffer) = borrow.get(&text_anchor.buffer_id()) {
|
||||
if let Some(buffer_id) = anchor.buffer_id {
|
||||
if let Some(buffer) = borrow.get(&buffer_id) {
|
||||
buffer.buffer.update(cx, |buffer, _| {
|
||||
futures.push(buffer.wait_for_anchors([text_anchor]))
|
||||
futures.push(buffer.wait_for_anchors([anchor.text_anchor]))
|
||||
});
|
||||
} else {
|
||||
error = Some(anyhow!(
|
||||
"buffer {:?} is not part of this multi-buffer",
|
||||
text_anchor.buffer_id()
|
||||
"buffer {buffer_id} is not part of this multi-buffer"
|
||||
));
|
||||
break;
|
||||
}
|
||||
@@ -1704,43 +1709,14 @@ impl MultiBuffer {
|
||||
cx: &AppContext,
|
||||
) -> Option<(Model<Buffer>, language::Anchor)> {
|
||||
let snapshot = self.read(cx);
|
||||
match snapshot.anchor_before(position) {
|
||||
Anchor::Start => {
|
||||
if let Some(first_excerpt) = snapshot.excerpts.first() {
|
||||
let buffer = self
|
||||
.buffers
|
||||
.borrow()
|
||||
.get(&first_excerpt.buffer_id)?
|
||||
.buffer
|
||||
.clone();
|
||||
Some((buffer, first_excerpt.range.context.start))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Anchor::End => {
|
||||
if let Some(last_excerpt) = snapshot.excerpts.last() {
|
||||
let buffer = self
|
||||
.buffers
|
||||
.borrow()
|
||||
.get(&last_excerpt.buffer_id)?
|
||||
.buffer
|
||||
.clone();
|
||||
Some((buffer, last_excerpt.range.context.end))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
Anchor::Text { text_anchor, .. } => {
|
||||
let buffer = self
|
||||
.buffers
|
||||
.borrow()
|
||||
.get(&text_anchor.buffer_id())?
|
||||
.buffer
|
||||
.clone();
|
||||
Some((buffer, text_anchor))
|
||||
}
|
||||
}
|
||||
let anchor = snapshot.anchor_before(position);
|
||||
let buffer = self
|
||||
.buffers
|
||||
.borrow()
|
||||
.get(&anchor.buffer_id?)?
|
||||
.buffer
|
||||
.clone();
|
||||
Some((buffer, anchor.text_anchor))
|
||||
}
|
||||
|
||||
fn on_buffer_event(
|
||||
@@ -2993,7 +2969,7 @@ impl MultiBufferSnapshot {
|
||||
D: TextDimension + Ord + Sub<D, Output = D>,
|
||||
{
|
||||
let mut cursor = self.excerpts.cursor::<ExcerptSummary>(&());
|
||||
let locator = self.excerpt_locator_for_id(anchor.excerpt_id());
|
||||
let locator = self.excerpt_locator_for_id(anchor.excerpt_id);
|
||||
|
||||
cursor.seek(locator, Bias::Left, &());
|
||||
if cursor.item().is_none() {
|
||||
@@ -3002,7 +2978,7 @@ impl MultiBufferSnapshot {
|
||||
|
||||
let mut position = D::from_text_summary(&cursor.start().text);
|
||||
if let Some(excerpt) = cursor.item() {
|
||||
if excerpt.id == anchor.excerpt_id() {
|
||||
if excerpt.id == anchor.excerpt_id {
|
||||
let excerpt_buffer_start =
|
||||
excerpt.range.context.start.summary::<D>(&excerpt.buffer);
|
||||
let excerpt_buffer_end = excerpt.range.context.end.summary::<D>(&excerpt.buffer);
|
||||
@@ -3134,11 +3110,12 @@ impl MultiBufferSnapshot {
|
||||
// If there's no adjacent excerpt that contains the anchor's position,
|
||||
// then report that the anchor has lost its position.
|
||||
if !kept_position {
|
||||
let bias = anchor.bias();
|
||||
anchor = if let Some(excerpt) = next_excerpt {
|
||||
let mut text_anchor =
|
||||
excerpt.range.context.start.bias(bias, &excerpt.buffer);
|
||||
|
||||
let mut text_anchor = excerpt
|
||||
.range
|
||||
.context
|
||||
.start
|
||||
.bias(anchor.text_anchor.bias, &excerpt.buffer);
|
||||
if text_anchor
|
||||
.cmp(&excerpt.range.context.end, &excerpt.buffer)
|
||||
.is_gt()
|
||||
@@ -3151,7 +3128,11 @@ impl MultiBufferSnapshot {
|
||||
text_anchor,
|
||||
}
|
||||
} else if let Some(excerpt) = prev_excerpt {
|
||||
let mut text_anchor = excerpt.range.context.end.bias(bias, &excerpt.buffer);
|
||||
let mut text_anchor = excerpt
|
||||
.range
|
||||
.context
|
||||
.end
|
||||
.bias(anchor.text_anchor.bias, &excerpt.buffer);
|
||||
if text_anchor
|
||||
.cmp(&excerpt.range.context.start, &excerpt.buffer)
|
||||
.is_lt()
|
||||
@@ -3163,10 +3144,10 @@ impl MultiBufferSnapshot {
|
||||
excerpt_id: excerpt.id,
|
||||
text_anchor,
|
||||
}
|
||||
} else if bias == Bias::Left {
|
||||
Anchor::Start
|
||||
} else if anchor.text_anchor.bias == Bias::Left {
|
||||
Anchor::min()
|
||||
} else {
|
||||
Anchor::End
|
||||
Anchor::max()
|
||||
};
|
||||
}
|
||||
|
||||
@@ -3216,9 +3197,9 @@ impl MultiBufferSnapshot {
|
||||
text_anchor,
|
||||
}
|
||||
} else if offset == 0 && bias == Bias::Left {
|
||||
Anchor::Start
|
||||
Anchor::min()
|
||||
} else {
|
||||
Anchor::End
|
||||
Anchor::max()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5096,7 +5077,6 @@ where
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use futures::StreamExt;
|
||||
use gpui::{AppContext, Context, TestAppContext};
|
||||
use language::{Buffer, Rope};
|
||||
use parking_lot::RwLock;
|
||||
@@ -5645,41 +5625,67 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_stream_excerpts_with_context_lines(cx: &mut TestAppContext) {
|
||||
let buffer = cx.new_model(|cx| Buffer::local(sample_text(20, 3, 'a'), cx));
|
||||
let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite));
|
||||
let anchor_ranges = multibuffer.update(cx, |multibuffer, cx| {
|
||||
let snapshot = buffer.read(cx);
|
||||
let ranges = vec![
|
||||
snapshot.anchor_before(Point::new(3, 2))..snapshot.anchor_before(Point::new(4, 2)),
|
||||
snapshot.anchor_before(Point::new(7, 1))..snapshot.anchor_before(Point::new(7, 3)),
|
||||
snapshot.anchor_before(Point::new(15, 0))
|
||||
..snapshot.anchor_before(Point::new(15, 0)),
|
||||
];
|
||||
multibuffer.stream_excerpts_with_context_lines(buffer.clone(), ranges, 2, cx)
|
||||
});
|
||||
#[gpui::test(iterations = 100)]
|
||||
async fn test_push_multiple_excerpts_with_context_lines(cx: &mut TestAppContext) {
|
||||
let buffer_1 = cx.new_model(|cx| Buffer::local(sample_text(20, 3, 'a'), cx));
|
||||
let buffer_2 = cx.new_model(|cx| Buffer::local(sample_text(15, 4, 'a'), cx));
|
||||
let snapshot_1 = buffer_1.update(cx, |buffer, _| buffer.snapshot());
|
||||
let snapshot_2 = buffer_2.update(cx, |buffer, _| buffer.snapshot());
|
||||
let ranges_1 = vec![
|
||||
snapshot_1.anchor_before(Point::new(3, 2))..snapshot_1.anchor_before(Point::new(4, 2)),
|
||||
snapshot_1.anchor_before(Point::new(7, 1))..snapshot_1.anchor_before(Point::new(7, 3)),
|
||||
snapshot_1.anchor_before(Point::new(15, 0))
|
||||
..snapshot_1.anchor_before(Point::new(15, 0)),
|
||||
];
|
||||
let ranges_2 = vec![
|
||||
snapshot_2.anchor_before(Point::new(2, 1))..snapshot_2.anchor_before(Point::new(3, 1)),
|
||||
snapshot_2.anchor_before(Point::new(10, 0))
|
||||
..snapshot_2.anchor_before(Point::new(10, 2)),
|
||||
];
|
||||
|
||||
let anchor_ranges = anchor_ranges.collect::<Vec<_>>().await;
|
||||
let multibuffer = cx.new_model(|_| MultiBuffer::new(Capability::ReadWrite));
|
||||
let anchor_ranges = multibuffer
|
||||
.update(cx, |multibuffer, cx| {
|
||||
multibuffer.push_multiple_excerpts_with_context_lines(
|
||||
vec![(buffer_1.clone(), ranges_1), (buffer_2.clone(), ranges_2)],
|
||||
2,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await;
|
||||
|
||||
let snapshot = multibuffer.update(cx, |multibuffer, cx| multibuffer.snapshot(cx));
|
||||
assert_eq!(
|
||||
snapshot.text(),
|
||||
concat!(
|
||||
"bbb\n", //
|
||||
"bbb\n", // buffer_1
|
||||
"ccc\n", //
|
||||
"ddd\n", //
|
||||
"eee\n", //
|
||||
"ddd\n", // <-- excerpt 1
|
||||
"eee\n", // <-- excerpt 1
|
||||
"fff\n", //
|
||||
"ggg\n", //
|
||||
"hhh\n", //
|
||||
"hhh\n", // <-- excerpt 2
|
||||
"iii\n", //
|
||||
"jjj\n", //
|
||||
//
|
||||
"nnn\n", //
|
||||
"ooo\n", //
|
||||
"ppp\n", //
|
||||
"ppp\n", // <-- excerpt 3
|
||||
"qqq\n", //
|
||||
"rrr", //
|
||||
"rrr\n", //
|
||||
//
|
||||
"aaaa\n", // buffer 2
|
||||
"bbbb\n", //
|
||||
"cccc\n", // <-- excerpt 4
|
||||
"dddd\n", // <-- excerpt 4
|
||||
"eeee\n", //
|
||||
"ffff\n", //
|
||||
//
|
||||
"iiii\n", //
|
||||
"jjjj\n", //
|
||||
"kkkk\n", // <-- excerpt 5
|
||||
"llll\n", //
|
||||
"mmmm", //
|
||||
)
|
||||
);
|
||||
|
||||
@@ -5691,7 +5697,9 @@ mod tests {
|
||||
vec![
|
||||
Point::new(2, 2)..Point::new(3, 2),
|
||||
Point::new(6, 1)..Point::new(6, 3),
|
||||
Point::new(11, 0)..Point::new(11, 0)
|
||||
Point::new(11, 0)..Point::new(11, 0),
|
||||
Point::new(16, 1)..Point::new(17, 1),
|
||||
Point::new(22, 0)..Point::new(22, 2)
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ test-support = ["tempfile"]
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
async-compression.workspace = true
|
||||
async-watch.workspace = true
|
||||
async-tar.workspace = true
|
||||
async-trait.workspace = true
|
||||
async_zip.workspace = true
|
||||
@@ -32,6 +33,7 @@ smol.workspace = true
|
||||
tempfile = { workspace = true, optional = true }
|
||||
util.workspace = true
|
||||
walkdir = "2.5.0"
|
||||
which.workspace = true
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
async-std = { version = "1.12.0", features = ["unstable"] }
|
||||
|
||||
@@ -5,7 +5,7 @@ pub use archive::extract_zip;
|
||||
use async_compression::futures::bufread::GzipDecoder;
|
||||
use async_tar::Archive;
|
||||
use futures::AsyncReadExt;
|
||||
use http_client::HttpClient;
|
||||
use http_client::{HttpClient, Uri};
|
||||
use semver::Version;
|
||||
use serde::Deserialize;
|
||||
use smol::io::BufReader;
|
||||
@@ -23,60 +23,167 @@ use util::ResultExt;
|
||||
#[cfg(windows)]
|
||||
use smol::process::windows::CommandExt;
|
||||
|
||||
const VERSION: &str = "v22.5.1";
|
||||
|
||||
#[cfg(not(windows))]
|
||||
const NODE_PATH: &str = "bin/node";
|
||||
#[cfg(windows)]
|
||||
const NODE_PATH: &str = "node.exe";
|
||||
|
||||
#[cfg(not(windows))]
|
||||
const NPM_PATH: &str = "bin/npm";
|
||||
#[cfg(windows)]
|
||||
const NPM_PATH: &str = "node_modules/npm/bin/npm-cli.js";
|
||||
|
||||
enum ArchiveType {
|
||||
TarGz,
|
||||
Zip,
|
||||
#[derive(Clone, Debug, Default, Eq, PartialEq)]
|
||||
pub struct NodeBinaryOptions {
|
||||
pub allow_path_lookup: bool,
|
||||
pub allow_binary_download: bool,
|
||||
pub use_paths: Option<(PathBuf, PathBuf)>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct NpmInfo {
|
||||
#[serde(default)]
|
||||
dist_tags: NpmInfoDistTags,
|
||||
versions: Vec<String>,
|
||||
#[derive(Clone)]
|
||||
pub struct NodeRuntime(Arc<Mutex<NodeRuntimeState>>);
|
||||
|
||||
struct NodeRuntimeState {
|
||||
http: Arc<dyn HttpClient>,
|
||||
instance: Option<Box<dyn NodeRuntimeTrait>>,
|
||||
last_options: Option<NodeBinaryOptions>,
|
||||
options: async_watch::Receiver<Option<NodeBinaryOptions>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Default)]
|
||||
pub struct NpmInfoDistTags {
|
||||
latest: Option<String>,
|
||||
}
|
||||
impl NodeRuntime {
|
||||
pub fn new(
|
||||
http: Arc<dyn HttpClient>,
|
||||
options: async_watch::Receiver<Option<NodeBinaryOptions>>,
|
||||
) -> Self {
|
||||
NodeRuntime(Arc::new(Mutex::new(NodeRuntimeState {
|
||||
http,
|
||||
instance: None,
|
||||
last_options: None,
|
||||
options,
|
||||
})))
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
pub trait NodeRuntime: Send + Sync {
|
||||
async fn binary_path(&self) -> Result<PathBuf>;
|
||||
async fn node_environment_path(&self) -> Result<OsString>;
|
||||
pub fn unavailable() -> Self {
|
||||
NodeRuntime(Arc::new(Mutex::new(NodeRuntimeState {
|
||||
http: Arc::new(http_client::BlockedHttpClient),
|
||||
instance: None,
|
||||
last_options: None,
|
||||
options: async_watch::channel(Some(NodeBinaryOptions::default())).1,
|
||||
})))
|
||||
}
|
||||
|
||||
async fn run_npm_subcommand(
|
||||
async fn instance(&self) -> Result<Box<dyn NodeRuntimeTrait>> {
|
||||
let mut state = self.0.lock().await;
|
||||
|
||||
while state.options.borrow().is_none() {
|
||||
state.options.changed().await?;
|
||||
}
|
||||
let options = state.options.borrow().clone().unwrap();
|
||||
if state.last_options.as_ref() != Some(&options) {
|
||||
state.instance.take();
|
||||
}
|
||||
if let Some(instance) = state.instance.as_ref() {
|
||||
return Ok(instance.boxed_clone());
|
||||
}
|
||||
|
||||
if let Some((node, npm)) = options.use_paths.as_ref() {
|
||||
let instance = SystemNodeRuntime::new(node.clone(), npm.clone()).await?;
|
||||
state.instance = Some(instance.boxed_clone());
|
||||
return Ok(instance);
|
||||
}
|
||||
|
||||
if options.allow_path_lookup {
|
||||
if let Some(instance) = SystemNodeRuntime::detect().await {
|
||||
state.instance = Some(instance.boxed_clone());
|
||||
return Ok(instance);
|
||||
}
|
||||
}
|
||||
|
||||
let instance = if options.allow_binary_download {
|
||||
ManagedNodeRuntime::install_if_needed(&state.http).await?
|
||||
} else {
|
||||
Box::new(UnavailableNodeRuntime)
|
||||
};
|
||||
|
||||
state.instance = Some(instance.boxed_clone());
|
||||
return Ok(instance);
|
||||
}
|
||||
|
||||
pub async fn binary_path(&self) -> Result<PathBuf> {
|
||||
self.instance().await?.binary_path()
|
||||
}
|
||||
|
||||
pub async fn run_npm_subcommand(
|
||||
&self,
|
||||
directory: Option<&Path>,
|
||||
directory: &Path,
|
||||
subcommand: &str,
|
||||
args: &[&str],
|
||||
) -> Result<Output>;
|
||||
) -> Result<Output> {
|
||||
let http = self.0.lock().await.http.clone();
|
||||
self.instance()
|
||||
.await?
|
||||
.run_npm_subcommand(Some(directory), http.proxy(), subcommand, args)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn npm_package_latest_version(&self, name: &str) -> Result<String>;
|
||||
|
||||
async fn npm_install_packages(&self, directory: &Path, packages: &[(&str, &str)])
|
||||
-> Result<()>;
|
||||
|
||||
async fn npm_package_installed_version(
|
||||
pub async fn npm_package_installed_version(
|
||||
&self,
|
||||
local_package_directory: &Path,
|
||||
name: &str,
|
||||
) -> Result<Option<String>>;
|
||||
) -> Result<Option<String>> {
|
||||
self.instance()
|
||||
.await?
|
||||
.npm_package_installed_version(local_package_directory, name)
|
||||
.await
|
||||
}
|
||||
|
||||
async fn should_install_npm_package(
|
||||
pub async fn npm_package_latest_version(&self, name: &str) -> Result<String> {
|
||||
let http = self.0.lock().await.http.clone();
|
||||
let output = self
|
||||
.instance()
|
||||
.await?
|
||||
.run_npm_subcommand(
|
||||
None,
|
||||
http.proxy(),
|
||||
"info",
|
||||
&[
|
||||
name,
|
||||
"--json",
|
||||
"--fetch-retry-mintimeout",
|
||||
"2000",
|
||||
"--fetch-retry-maxtimeout",
|
||||
"5000",
|
||||
"--fetch-timeout",
|
||||
"5000",
|
||||
],
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut info: NpmInfo = serde_json::from_slice(&output.stdout)?;
|
||||
info.dist_tags
|
||||
.latest
|
||||
.or_else(|| info.versions.pop())
|
||||
.ok_or_else(|| anyhow!("no version found for npm package {}", name))
|
||||
}
|
||||
|
||||
pub async fn npm_install_packages(
|
||||
&self,
|
||||
directory: &Path,
|
||||
packages: &[(&str, &str)],
|
||||
) -> Result<()> {
|
||||
let packages: Vec<_> = packages
|
||||
.iter()
|
||||
.map(|(name, version)| format!("{name}@{version}"))
|
||||
.collect();
|
||||
|
||||
let mut arguments: Vec<_> = packages.iter().map(|p| p.as_str()).collect();
|
||||
arguments.extend_from_slice(&[
|
||||
"--save-exact",
|
||||
"--fetch-retry-mintimeout",
|
||||
"2000",
|
||||
"--fetch-retry-maxtimeout",
|
||||
"5000",
|
||||
"--fetch-timeout",
|
||||
"5000",
|
||||
]);
|
||||
|
||||
// This is also wrong because the directory is wrong.
|
||||
self.run_npm_subcommand(directory, "install", &arguments)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn should_install_npm_package(
|
||||
&self,
|
||||
package_name: &str,
|
||||
local_executable_path: &Path,
|
||||
@@ -110,21 +217,78 @@ pub trait NodeRuntime: Send + Sync {
|
||||
}
|
||||
}
|
||||
|
||||
pub struct RealNodeRuntime {
|
||||
http: Arc<dyn HttpClient>,
|
||||
installation_lock: Mutex<()>,
|
||||
enum ArchiveType {
|
||||
TarGz,
|
||||
Zip,
|
||||
}
|
||||
|
||||
impl RealNodeRuntime {
|
||||
pub fn new(http: Arc<dyn HttpClient>) -> Arc<dyn NodeRuntime> {
|
||||
Arc::new(RealNodeRuntime {
|
||||
http,
|
||||
installation_lock: Mutex::new(()),
|
||||
})
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
pub struct NpmInfo {
|
||||
#[serde(default)]
|
||||
dist_tags: NpmInfoDistTags,
|
||||
versions: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Default)]
|
||||
pub struct NpmInfoDistTags {
|
||||
latest: Option<String>,
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
trait NodeRuntimeTrait: Send + Sync {
|
||||
fn boxed_clone(&self) -> Box<dyn NodeRuntimeTrait>;
|
||||
fn binary_path(&self) -> Result<PathBuf>;
|
||||
|
||||
async fn run_npm_subcommand(
|
||||
&self,
|
||||
directory: Option<&Path>,
|
||||
proxy: Option<&Uri>,
|
||||
subcommand: &str,
|
||||
args: &[&str],
|
||||
) -> Result<Output>;
|
||||
|
||||
async fn npm_package_installed_version(
|
||||
&self,
|
||||
local_package_directory: &Path,
|
||||
name: &str,
|
||||
) -> Result<Option<String>>;
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct ManagedNodeRuntime {
|
||||
installation_path: PathBuf,
|
||||
}
|
||||
|
||||
impl ManagedNodeRuntime {
|
||||
const VERSION: &str = "v22.5.1";
|
||||
|
||||
#[cfg(not(windows))]
|
||||
const NODE_PATH: &str = "bin/node";
|
||||
#[cfg(windows)]
|
||||
const NODE_PATH: &str = "node.exe";
|
||||
|
||||
#[cfg(not(windows))]
|
||||
const NPM_PATH: &str = "bin/npm";
|
||||
#[cfg(windows)]
|
||||
const NPM_PATH: &str = "node_modules/npm/bin/npm-cli.js";
|
||||
|
||||
async fn node_environment_path(&self) -> Result<OsString> {
|
||||
let node_binary = self.installation_path.join(Self::NODE_PATH);
|
||||
let mut env_path = vec![node_binary
|
||||
.parent()
|
||||
.expect("invalid node binary path")
|
||||
.to_path_buf()];
|
||||
|
||||
if let Some(existing_path) = std::env::var_os("PATH") {
|
||||
let mut paths = std::env::split_paths(&existing_path).collect::<Vec<_>>();
|
||||
env_path.append(&mut paths);
|
||||
}
|
||||
|
||||
std::env::join_paths(env_path).context("failed to create PATH env variable")
|
||||
}
|
||||
|
||||
async fn install_if_needed(&self) -> Result<PathBuf> {
|
||||
let _lock = self.installation_lock.lock().await;
|
||||
async fn install_if_needed(http: &Arc<dyn HttpClient>) -> Result<Box<dyn NodeRuntimeTrait>> {
|
||||
log::info!("Node runtime install_if_needed");
|
||||
|
||||
let os = match consts::OS {
|
||||
@@ -140,11 +304,12 @@ impl RealNodeRuntime {
|
||||
other => bail!("Running on unsupported architecture: {other}"),
|
||||
};
|
||||
|
||||
let folder_name = format!("node-{VERSION}-{os}-{arch}");
|
||||
let version = Self::VERSION;
|
||||
let folder_name = format!("node-{version}-{os}-{arch}");
|
||||
let node_containing_dir = paths::support_dir().join("node");
|
||||
let node_dir = node_containing_dir.join(folder_name);
|
||||
let node_binary = node_dir.join(NODE_PATH);
|
||||
let npm_file = node_dir.join(NPM_PATH);
|
||||
let node_binary = node_dir.join(Self::NODE_PATH);
|
||||
let npm_file = node_dir.join(Self::NPM_PATH);
|
||||
|
||||
let mut command = Command::new(&node_binary);
|
||||
|
||||
@@ -177,16 +342,16 @@ impl RealNodeRuntime {
|
||||
other => bail!("Running on unsupported os: {other}"),
|
||||
};
|
||||
|
||||
let version = Self::VERSION;
|
||||
let file_name = format!(
|
||||
"node-{VERSION}-{os}-{arch}.{extension}",
|
||||
"node-{version}-{os}-{arch}.{extension}",
|
||||
extension = match archive_type {
|
||||
ArchiveType::TarGz => "tar.gz",
|
||||
ArchiveType::Zip => "zip",
|
||||
}
|
||||
);
|
||||
let url = format!("https://nodejs.org/dist/{VERSION}/{file_name}");
|
||||
let mut response = self
|
||||
.http
|
||||
let url = format!("https://nodejs.org/dist/{version}/{file_name}");
|
||||
let mut response = http
|
||||
.get(&url, Default::default(), true)
|
||||
.await
|
||||
.context("error downloading Node binary tarball")?;
|
||||
@@ -207,43 +372,32 @@ impl RealNodeRuntime {
|
||||
_ = fs::write(node_dir.join("blank_user_npmrc"), []).await;
|
||||
_ = fs::write(node_dir.join("blank_global_npmrc"), []).await;
|
||||
|
||||
anyhow::Ok(node_dir)
|
||||
anyhow::Ok(Box::new(ManagedNodeRuntime {
|
||||
installation_path: node_dir,
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl NodeRuntime for RealNodeRuntime {
|
||||
async fn binary_path(&self) -> Result<PathBuf> {
|
||||
let installation_path = self.install_if_needed().await?;
|
||||
Ok(installation_path.join(NODE_PATH))
|
||||
impl NodeRuntimeTrait for ManagedNodeRuntime {
|
||||
fn boxed_clone(&self) -> Box<dyn NodeRuntimeTrait> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
|
||||
async fn node_environment_path(&self) -> Result<OsString> {
|
||||
let installation_path = self.install_if_needed().await?;
|
||||
let node_binary = installation_path.join(NODE_PATH);
|
||||
let mut env_path = vec![node_binary
|
||||
.parent()
|
||||
.expect("invalid node binary path")
|
||||
.to_path_buf()];
|
||||
|
||||
if let Some(existing_path) = std::env::var_os("PATH") {
|
||||
let mut paths = std::env::split_paths(&existing_path).collect::<Vec<_>>();
|
||||
env_path.append(&mut paths);
|
||||
}
|
||||
|
||||
Ok(std::env::join_paths(env_path).context("failed to create PATH env variable")?)
|
||||
fn binary_path(&self) -> Result<PathBuf> {
|
||||
Ok(self.installation_path.join(Self::NODE_PATH))
|
||||
}
|
||||
|
||||
async fn run_npm_subcommand(
|
||||
&self,
|
||||
directory: Option<&Path>,
|
||||
proxy: Option<&Uri>,
|
||||
subcommand: &str,
|
||||
args: &[&str],
|
||||
) -> Result<Output> {
|
||||
let attempt = || async move {
|
||||
let installation_path = self.install_if_needed().await?;
|
||||
let node_binary = installation_path.join(NODE_PATH);
|
||||
let npm_file = installation_path.join(NPM_PATH);
|
||||
let node_binary = self.installation_path.join(Self::NODE_PATH);
|
||||
let npm_file = self.installation_path.join(Self::NPM_PATH);
|
||||
let env_path = self.node_environment_path().await?;
|
||||
|
||||
if smol::fs::metadata(&node_binary).await.is_err() {
|
||||
@@ -258,54 +412,17 @@ impl NodeRuntime for RealNodeRuntime {
|
||||
command.env_clear();
|
||||
command.env("PATH", env_path);
|
||||
command.arg(npm_file).arg(subcommand);
|
||||
command.args(["--cache".into(), installation_path.join("cache")]);
|
||||
command.args(["--cache".into(), self.installation_path.join("cache")]);
|
||||
command.args([
|
||||
"--userconfig".into(),
|
||||
installation_path.join("blank_user_npmrc"),
|
||||
self.installation_path.join("blank_user_npmrc"),
|
||||
]);
|
||||
command.args([
|
||||
"--globalconfig".into(),
|
||||
installation_path.join("blank_global_npmrc"),
|
||||
self.installation_path.join("blank_global_npmrc"),
|
||||
]);
|
||||
command.args(args);
|
||||
|
||||
if let Some(directory) = directory {
|
||||
command.current_dir(directory);
|
||||
command.args(["--prefix".into(), directory.to_path_buf()]);
|
||||
}
|
||||
|
||||
if let Some(proxy) = self.http.proxy() {
|
||||
// Map proxy settings from `http://localhost:10809` to `http://127.0.0.1:10809`
|
||||
// NodeRuntime without environment information can not parse `localhost`
|
||||
// correctly.
|
||||
// TODO: map to `[::1]` if we are using ipv6
|
||||
let proxy = proxy
|
||||
.to_string()
|
||||
.to_ascii_lowercase()
|
||||
.replace("localhost", "127.0.0.1");
|
||||
|
||||
command.args(["--proxy", &proxy]);
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
// SYSTEMROOT is a critical environment variables for Windows.
|
||||
if let Some(val) = std::env::var("SYSTEMROOT")
|
||||
.context("Missing environment variable: SYSTEMROOT!")
|
||||
.log_err()
|
||||
{
|
||||
command.env("SYSTEMROOT", val);
|
||||
}
|
||||
// Without ComSpec, the post-install will always fail.
|
||||
if let Some(val) = std::env::var("ComSpec")
|
||||
.context("Missing environment variable: ComSpec!")
|
||||
.log_err()
|
||||
{
|
||||
command.env("ComSpec", val);
|
||||
}
|
||||
command.creation_flags(windows::Win32::System::Threading::CREATE_NO_WINDOW.0);
|
||||
}
|
||||
|
||||
configure_npm_command(&mut command, directory, proxy);
|
||||
command.output().await.map_err(|e| anyhow!("{e}"))
|
||||
};
|
||||
|
||||
@@ -332,30 +449,122 @@ impl NodeRuntime for RealNodeRuntime {
|
||||
|
||||
output.map_err(|e| anyhow!("{e}"))
|
||||
}
|
||||
async fn npm_package_installed_version(
|
||||
&self,
|
||||
local_package_directory: &Path,
|
||||
name: &str,
|
||||
) -> Result<Option<String>> {
|
||||
read_package_installed_version(local_package_directory.join("node_modules"), name).await
|
||||
}
|
||||
}
|
||||
|
||||
async fn npm_package_latest_version(&self, name: &str) -> Result<String> {
|
||||
let output = self
|
||||
.run_npm_subcommand(
|
||||
None,
|
||||
"info",
|
||||
&[
|
||||
name,
|
||||
"--json",
|
||||
"--fetch-retry-mintimeout",
|
||||
"2000",
|
||||
"--fetch-retry-maxtimeout",
|
||||
"5000",
|
||||
"--fetch-timeout",
|
||||
"5000",
|
||||
],
|
||||
#[derive(Clone)]
|
||||
pub struct SystemNodeRuntime {
|
||||
node: PathBuf,
|
||||
npm: PathBuf,
|
||||
global_node_modules: PathBuf,
|
||||
scratch_dir: PathBuf,
|
||||
}
|
||||
|
||||
impl SystemNodeRuntime {
|
||||
const MIN_VERSION: semver::Version = Version::new(18, 0, 0);
|
||||
async fn new(node: PathBuf, npm: PathBuf) -> Result<Box<dyn NodeRuntimeTrait>> {
|
||||
let output = Command::new(&node)
|
||||
.arg("--version")
|
||||
.output()
|
||||
.await
|
||||
.with_context(|| format!("running node from {:?}", node))?;
|
||||
if !output.status.success() {
|
||||
anyhow::bail!(
|
||||
"failed to run node --version. stdout: {}, stderr: {}",
|
||||
String::from_utf8_lossy(&output.stdout),
|
||||
String::from_utf8_lossy(&output.stderr),
|
||||
);
|
||||
}
|
||||
let version_str = String::from_utf8_lossy(&output.stdout);
|
||||
let version = semver::Version::parse(version_str.trim().trim_start_matches('v'))?;
|
||||
if version < Self::MIN_VERSION {
|
||||
anyhow::bail!(
|
||||
"node at {} is too old. want: {}, got: {}",
|
||||
node.to_string_lossy(),
|
||||
Self::MIN_VERSION,
|
||||
version
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
let mut info: NpmInfo = serde_json::from_slice(&output.stdout)?;
|
||||
info.dist_tags
|
||||
.latest
|
||||
.or_else(|| info.versions.pop())
|
||||
.ok_or_else(|| anyhow!("no version found for npm package {}", name))
|
||||
let scratch_dir = paths::support_dir().join("node");
|
||||
fs::create_dir(&scratch_dir).await.ok();
|
||||
fs::create_dir(scratch_dir.join("cache")).await.ok();
|
||||
fs::write(scratch_dir.join("blank_user_npmrc"), [])
|
||||
.await
|
||||
.ok();
|
||||
fs::write(scratch_dir.join("blank_global_npmrc"), [])
|
||||
.await
|
||||
.ok();
|
||||
|
||||
let mut this = Self {
|
||||
node,
|
||||
npm,
|
||||
global_node_modules: PathBuf::default(),
|
||||
scratch_dir,
|
||||
};
|
||||
let output = this.run_npm_subcommand(None, None, "root", &["-g"]).await?;
|
||||
this.global_node_modules =
|
||||
PathBuf::from(String::from_utf8_lossy(&output.stdout).to_string());
|
||||
|
||||
Ok(Box::new(this))
|
||||
}
|
||||
|
||||
async fn detect() -> Option<Box<dyn NodeRuntimeTrait>> {
|
||||
let node = which::which("node").ok()?;
|
||||
let npm = which::which("npm").ok()?;
|
||||
Self::new(node, npm).await.log_err()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl NodeRuntimeTrait for SystemNodeRuntime {
|
||||
fn boxed_clone(&self) -> Box<dyn NodeRuntimeTrait> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
|
||||
fn binary_path(&self) -> Result<PathBuf> {
|
||||
Ok(self.node.clone())
|
||||
}
|
||||
|
||||
async fn run_npm_subcommand(
|
||||
&self,
|
||||
directory: Option<&Path>,
|
||||
proxy: Option<&Uri>,
|
||||
subcommand: &str,
|
||||
args: &[&str],
|
||||
) -> anyhow::Result<Output> {
|
||||
let mut command = Command::new(self.npm.clone());
|
||||
command
|
||||
.env_clear()
|
||||
.env("PATH", std::env::var_os("PATH").unwrap_or_default())
|
||||
.arg(subcommand)
|
||||
.args(["--cache".into(), self.scratch_dir.join("cache")])
|
||||
.args([
|
||||
"--userconfig".into(),
|
||||
self.scratch_dir.join("blank_user_npmrc"),
|
||||
])
|
||||
.args([
|
||||
"--globalconfig".into(),
|
||||
self.scratch_dir.join("blank_global_npmrc"),
|
||||
])
|
||||
.args(args);
|
||||
configure_npm_command(&mut command, directory, proxy);
|
||||
let output = command.output().await?;
|
||||
if !output.status.success() {
|
||||
return Err(anyhow!(
|
||||
"failed to execute npm {subcommand} subcommand:\nstdout: {:?}\nstderr: {:?}",
|
||||
String::from_utf8_lossy(&output.stdout),
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
));
|
||||
}
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
async fn npm_package_installed_version(
|
||||
@@ -363,151 +572,104 @@ impl NodeRuntime for RealNodeRuntime {
|
||||
local_package_directory: &Path,
|
||||
name: &str,
|
||||
) -> Result<Option<String>> {
|
||||
let mut package_json_path = local_package_directory.to_owned();
|
||||
package_json_path.extend(["node_modules", name, "package.json"]);
|
||||
read_package_installed_version(local_package_directory.join("node_modules"), name).await
|
||||
// todo: allow returning a globally installed version (requires callers not to hard-code the path)
|
||||
}
|
||||
}
|
||||
|
||||
let mut file = match fs::File::open(package_json_path).await {
|
||||
Ok(file) => file,
|
||||
Err(err) => {
|
||||
if err.kind() == io::ErrorKind::NotFound {
|
||||
return Ok(None);
|
||||
}
|
||||
pub async fn read_package_installed_version(
|
||||
node_module_directory: PathBuf,
|
||||
name: &str,
|
||||
) -> Result<Option<String>> {
|
||||
let package_json_path = node_module_directory.join(name).join("package.json");
|
||||
|
||||
Err(err)?
|
||||
let mut file = match fs::File::open(package_json_path).await {
|
||||
Ok(file) => file,
|
||||
Err(err) => {
|
||||
if err.kind() == io::ErrorKind::NotFound {
|
||||
return Ok(None);
|
||||
}
|
||||
};
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct PackageJson {
|
||||
version: String,
|
||||
Err(err)?
|
||||
}
|
||||
};
|
||||
|
||||
let mut contents = String::new();
|
||||
file.read_to_string(&mut contents).await?;
|
||||
let package_json: PackageJson = serde_json::from_str(&contents)?;
|
||||
Ok(Some(package_json.version))
|
||||
#[derive(Deserialize)]
|
||||
struct PackageJson {
|
||||
version: String,
|
||||
}
|
||||
|
||||
async fn npm_install_packages(
|
||||
&self,
|
||||
directory: &Path,
|
||||
packages: &[(&str, &str)],
|
||||
) -> Result<()> {
|
||||
let packages: Vec<_> = packages
|
||||
.iter()
|
||||
.map(|(name, version)| format!("{name}@{version}"))
|
||||
.collect();
|
||||
|
||||
let mut arguments: Vec<_> = packages.iter().map(|p| p.as_str()).collect();
|
||||
arguments.extend_from_slice(&[
|
||||
"--save-exact",
|
||||
"--fetch-retry-mintimeout",
|
||||
"2000",
|
||||
"--fetch-retry-maxtimeout",
|
||||
"5000",
|
||||
"--fetch-timeout",
|
||||
"5000",
|
||||
]);
|
||||
|
||||
self.run_npm_subcommand(Some(directory), "install", &arguments)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
let mut contents = String::new();
|
||||
file.read_to_string(&mut contents).await?;
|
||||
let package_json: PackageJson = serde_json::from_str(&contents)?;
|
||||
Ok(Some(package_json.version))
|
||||
}
|
||||
|
||||
pub struct FakeNodeRuntime;
|
||||
|
||||
impl FakeNodeRuntime {
|
||||
pub fn new() -> Arc<dyn NodeRuntime> {
|
||||
Arc::new(Self)
|
||||
}
|
||||
}
|
||||
pub struct UnavailableNodeRuntime;
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl NodeRuntime for FakeNodeRuntime {
|
||||
async fn binary_path(&self) -> anyhow::Result<PathBuf> {
|
||||
unreachable!()
|
||||
impl NodeRuntimeTrait for UnavailableNodeRuntime {
|
||||
fn boxed_clone(&self) -> Box<dyn NodeRuntimeTrait> {
|
||||
Box::new(UnavailableNodeRuntime)
|
||||
}
|
||||
|
||||
async fn node_environment_path(&self) -> anyhow::Result<OsString> {
|
||||
unreachable!()
|
||||
fn binary_path(&self) -> Result<PathBuf> {
|
||||
bail!("binary_path: no node runtime available")
|
||||
}
|
||||
|
||||
async fn run_npm_subcommand(
|
||||
&self,
|
||||
_: Option<&Path>,
|
||||
subcommand: &str,
|
||||
args: &[&str],
|
||||
_: Option<&Uri>,
|
||||
_: &str,
|
||||
_: &[&str],
|
||||
) -> anyhow::Result<Output> {
|
||||
unreachable!("Should not run npm subcommand '{subcommand}' with args {args:?}")
|
||||
}
|
||||
|
||||
async fn npm_package_latest_version(&self, name: &str) -> anyhow::Result<String> {
|
||||
unreachable!("Should not query npm package '{name}' for latest version")
|
||||
bail!("run_npm_subcommand: no node runtime available")
|
||||
}
|
||||
|
||||
async fn npm_package_installed_version(
|
||||
&self,
|
||||
_local_package_directory: &Path,
|
||||
name: &str,
|
||||
_: &str,
|
||||
) -> Result<Option<String>> {
|
||||
unreachable!("Should not query npm package '{name}' for installed version")
|
||||
}
|
||||
|
||||
async fn npm_install_packages(
|
||||
&self,
|
||||
_: &Path,
|
||||
packages: &[(&str, &str)],
|
||||
) -> anyhow::Result<()> {
|
||||
unreachable!("Should not install packages {packages:?}")
|
||||
bail!("npm_package_installed_version: no node runtime available")
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Remove this when headless binary can run node
|
||||
pub struct DummyNodeRuntime;
|
||||
fn configure_npm_command(command: &mut Command, directory: Option<&Path>, proxy: Option<&Uri>) {
|
||||
if let Some(directory) = directory {
|
||||
command.current_dir(directory);
|
||||
command.args(["--prefix".into(), directory.to_path_buf()]);
|
||||
}
|
||||
|
||||
impl DummyNodeRuntime {
|
||||
pub fn new() -> Arc<dyn NodeRuntime> {
|
||||
Arc::new(Self)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait::async_trait]
|
||||
impl NodeRuntime for DummyNodeRuntime {
|
||||
async fn binary_path(&self) -> anyhow::Result<PathBuf> {
|
||||
anyhow::bail!("Dummy Node Runtime")
|
||||
}
|
||||
|
||||
async fn node_environment_path(&self) -> anyhow::Result<OsString> {
|
||||
anyhow::bail!("Dummy node runtime")
|
||||
}
|
||||
|
||||
async fn run_npm_subcommand(
|
||||
&self,
|
||||
_: Option<&Path>,
|
||||
_subcommand: &str,
|
||||
_args: &[&str],
|
||||
) -> anyhow::Result<Output> {
|
||||
anyhow::bail!("Dummy node runtime")
|
||||
}
|
||||
|
||||
async fn npm_package_latest_version(&self, _name: &str) -> anyhow::Result<String> {
|
||||
anyhow::bail!("Dummy node runtime")
|
||||
}
|
||||
|
||||
async fn npm_package_installed_version(
|
||||
&self,
|
||||
_local_package_directory: &Path,
|
||||
_name: &str,
|
||||
) -> Result<Option<String>> {
|
||||
anyhow::bail!("Dummy node runtime")
|
||||
}
|
||||
|
||||
async fn npm_install_packages(
|
||||
&self,
|
||||
_: &Path,
|
||||
_packages: &[(&str, &str)],
|
||||
) -> anyhow::Result<()> {
|
||||
anyhow::bail!("Dummy node runtime")
|
||||
if let Some(proxy) = proxy {
|
||||
// Map proxy settings from `http://localhost:10809` to `http://127.0.0.1:10809`
|
||||
// NodeRuntime without environment information can not parse `localhost`
|
||||
// correctly.
|
||||
// TODO: map to `[::1]` if we are using ipv6
|
||||
let proxy = proxy
|
||||
.to_string()
|
||||
.to_ascii_lowercase()
|
||||
.replace("localhost", "127.0.0.1");
|
||||
|
||||
command.args(["--proxy", &proxy]);
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
// SYSTEMROOT is a critical environment variables for Windows.
|
||||
if let Some(val) = std::env::var("SYSTEMROOT")
|
||||
.context("Missing environment variable: SYSTEMROOT!")
|
||||
.log_err()
|
||||
{
|
||||
command.env("SYSTEMROOT", val);
|
||||
}
|
||||
// Without ComSpec, the post-install will always fail.
|
||||
if let Some(val) = std::env::var("ComSpec")
|
||||
.context("Missing environment variable: ComSpec!")
|
||||
.log_err()
|
||||
{
|
||||
command.env("ComSpec", val);
|
||||
}
|
||||
command.creation_flags(windows::Win32::System::Threading::CREATE_NO_WINDOW.0);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -82,7 +82,8 @@ fn get_max_tokens(name: &str) -> usize {
|
||||
"llama3" | "gemma2" | "gemma" | "codegemma" | "starcoder" | "aya" => 8192,
|
||||
"codellama" | "starcoder2" => 16384,
|
||||
"mistral" | "codestral" | "mixstral" | "llava" | "qwen2" | "dolphin-mixtral" => 32768,
|
||||
"llama3.1" | "phi3" | "phi3.5" | "command-r" | "deepseek-coder-v2" | "yi-coder" => 128000,
|
||||
"llama3.1" | "phi3" | "phi3.5" | "command-r" | "deepseek-coder-v2" | "yi-coder"
|
||||
| "qwen2.5-coder" => 128000,
|
||||
_ => DEFAULT_TOKENS,
|
||||
}
|
||||
.clamp(1, MAXIMUM_TOKENS)
|
||||
|
||||
@@ -3906,9 +3906,11 @@ impl Render for OutlinePanel {
|
||||
.on_action(cx.listener(Self::toggle_active_editor_pin))
|
||||
.on_action(cx.listener(Self::unfold_directory))
|
||||
.on_action(cx.listener(Self::fold_directory))
|
||||
.when(project.is_local_or_ssh(), |el| {
|
||||
.when(project.is_local(), |el| {
|
||||
el.on_action(cx.listener(Self::reveal_in_finder))
|
||||
.on_action(cx.listener(Self::open_in_terminal))
|
||||
})
|
||||
.when(project.is_local() || project.is_via_ssh(), |el| {
|
||||
el.on_action(cx.listener(Self::open_in_terminal))
|
||||
})
|
||||
.on_mouse_down(
|
||||
MouseButton::Right,
|
||||
|
||||
@@ -138,7 +138,7 @@ impl Prettier {
|
||||
pub async fn start(
|
||||
_: LanguageServerId,
|
||||
prettier_dir: PathBuf,
|
||||
_: Arc<dyn NodeRuntime>,
|
||||
_: NodeRuntime,
|
||||
_: AsyncAppContext,
|
||||
) -> anyhow::Result<Self> {
|
||||
Ok(Self::Test(TestPrettier {
|
||||
@@ -151,7 +151,7 @@ impl Prettier {
|
||||
pub async fn start(
|
||||
server_id: LanguageServerId,
|
||||
prettier_dir: PathBuf,
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
node: NodeRuntime,
|
||||
cx: AsyncAppContext,
|
||||
) -> anyhow::Result<Self> {
|
||||
use lsp::LanguageServerBinary;
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,10 +1,9 @@
|
||||
mod signature_help;
|
||||
|
||||
use crate::{
|
||||
buffer_store::BufferStore, lsp_store::LspStore, CodeAction, CoreCompletion, DocumentHighlight,
|
||||
Hover, HoverBlock, HoverBlockKind, InlayHint, InlayHintLabel, InlayHintLabelPart,
|
||||
InlayHintLabelPartTooltip, InlayHintTooltip, Location, LocationLink, MarkupContent,
|
||||
ProjectTransaction, ResolveState,
|
||||
lsp_store::LspStore, CodeAction, CoreCompletion, DocumentHighlight, Hover, HoverBlock,
|
||||
HoverBlockKind, InlayHint, InlayHintLabel, InlayHintLabelPart, InlayHintLabelPartTooltip,
|
||||
InlayHintTooltip, Location, LocationLink, MarkupContent, ProjectTransaction, ResolveState,
|
||||
};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use async_trait::async_trait;
|
||||
@@ -417,18 +416,18 @@ impl LspCommand for PerformRename {
|
||||
message: proto::PerformRenameResponse,
|
||||
lsp_store: Model<LspStore>,
|
||||
_: Model<Buffer>,
|
||||
cx: AsyncAppContext,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<ProjectTransaction> {
|
||||
let message = message
|
||||
.transaction
|
||||
.ok_or_else(|| anyhow!("missing transaction"))?;
|
||||
BufferStore::deserialize_project_transaction(
|
||||
lsp_store.read_with(&cx, |lsp_store, _| lsp_store.buffer_store().downgrade())?,
|
||||
message,
|
||||
self.push_to_history,
|
||||
cx,
|
||||
)
|
||||
.await
|
||||
lsp_store
|
||||
.update(&mut cx, |lsp_store, cx| {
|
||||
lsp_store.buffer_store().update(cx, |buffer_store, cx| {
|
||||
buffer_store.deserialize_project_transaction(message, self.push_to_history, cx)
|
||||
})
|
||||
})?
|
||||
.await
|
||||
}
|
||||
|
||||
fn buffer_id_from_proto(message: &proto::PerformRename) -> Result<BufferId> {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -25,12 +25,12 @@ use smol::stream::StreamExt;
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
|
||||
use crate::{
|
||||
worktree_store::WorktreeStore, File, FormatOperation, PathChange, ProjectEntryId, Worktree,
|
||||
WorktreeId,
|
||||
lsp_store::WorktreeId, worktree_store::WorktreeStore, File, PathChange, ProjectEntryId,
|
||||
Worktree,
|
||||
};
|
||||
|
||||
pub struct PrettierStore {
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
node: NodeRuntime,
|
||||
fs: Arc<dyn Fs>,
|
||||
languages: Arc<LanguageRegistry>,
|
||||
worktree_store: Model<WorktreeStore>,
|
||||
@@ -52,7 +52,7 @@ impl EventEmitter<PrettierStoreEvent> for PrettierStore {}
|
||||
|
||||
impl PrettierStore {
|
||||
pub fn new(
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
node: NodeRuntime,
|
||||
fs: Arc<dyn Fs>,
|
||||
languages: Arc<LanguageRegistry>,
|
||||
worktree_store: Model<WorktreeStore>,
|
||||
@@ -212,7 +212,7 @@ impl PrettierStore {
|
||||
}
|
||||
|
||||
fn start_prettier(
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
node: NodeRuntime,
|
||||
prettier_dir: PathBuf,
|
||||
worktree_id: Option<WorktreeId>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
@@ -241,7 +241,7 @@ impl PrettierStore {
|
||||
}
|
||||
|
||||
fn start_default_prettier(
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
node: NodeRuntime,
|
||||
worktree_id: Option<WorktreeId>,
|
||||
cx: &mut ModelContext<PrettierStore>,
|
||||
) -> Task<anyhow::Result<PrettierTask>> {
|
||||
@@ -644,7 +644,7 @@ pub(super) async fn format_with_prettier(
|
||||
prettier_store: &WeakModel<PrettierStore>,
|
||||
buffer: &Model<Buffer>,
|
||||
cx: &mut AsyncAppContext,
|
||||
) -> Option<Result<FormatOperation>> {
|
||||
) -> Option<Result<crate::lsp_store::FormatOperation>> {
|
||||
let prettier_instance = prettier_store
|
||||
.update(cx, |prettier_store, cx| {
|
||||
prettier_store.prettier_instance_for_buffer(buffer, cx)
|
||||
@@ -671,7 +671,7 @@ pub(super) async fn format_with_prettier(
|
||||
let format_result = prettier
|
||||
.format(buffer, buffer_path, cx)
|
||||
.await
|
||||
.map(FormatOperation::Prettier)
|
||||
.map(crate::lsp_store::FormatOperation::Prettier)
|
||||
.with_context(|| format!("{} failed to format buffer", prettier_description));
|
||||
|
||||
Some(format_result)
|
||||
@@ -749,7 +749,7 @@ impl DefaultPrettier {
|
||||
|
||||
pub fn prettier_task(
|
||||
&mut self,
|
||||
node: &Arc<dyn NodeRuntime>,
|
||||
node: &NodeRuntime,
|
||||
worktree_id: Option<WorktreeId>,
|
||||
cx: &mut ModelContext<PrettierStore>,
|
||||
) -> Option<Task<anyhow::Result<PrettierTask>>> {
|
||||
@@ -767,7 +767,7 @@ impl DefaultPrettier {
|
||||
impl PrettierInstance {
|
||||
pub fn prettier_task(
|
||||
&mut self,
|
||||
node: &Arc<dyn NodeRuntime>,
|
||||
node: &NodeRuntime,
|
||||
prettier_dir: Option<&Path>,
|
||||
worktree_id: Option<WorktreeId>,
|
||||
cx: &mut ModelContext<PrettierStore>,
|
||||
@@ -786,7 +786,7 @@ impl PrettierInstance {
|
||||
None => match prettier_dir {
|
||||
Some(prettier_dir) => {
|
||||
let new_task = PrettierStore::start_prettier(
|
||||
Arc::clone(node),
|
||||
node.clone(),
|
||||
prettier_dir.to_path_buf(),
|
||||
worktree_id,
|
||||
cx,
|
||||
@@ -797,7 +797,7 @@ impl PrettierInstance {
|
||||
}
|
||||
None => {
|
||||
self.attempt += 1;
|
||||
let node = Arc::clone(node);
|
||||
let node = node.clone();
|
||||
cx.spawn(|prettier_store, mut cx| async move {
|
||||
prettier_store
|
||||
.update(&mut cx, |_, cx| {
|
||||
@@ -818,7 +818,7 @@ impl PrettierInstance {
|
||||
async fn install_prettier_packages(
|
||||
fs: &dyn Fs,
|
||||
plugins_to_install: HashSet<Arc<str>>,
|
||||
node: Arc<dyn NodeRuntime>,
|
||||
node: NodeRuntime,
|
||||
) -> anyhow::Result<()> {
|
||||
let packages_to_versions = future::try_join_all(
|
||||
plugins_to_install
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -34,6 +34,10 @@ pub struct ProjectSettings {
|
||||
#[serde(default)]
|
||||
pub git: GitSettings,
|
||||
|
||||
/// Configuration for Node-related features
|
||||
#[serde(default)]
|
||||
pub node: NodeBinarySettings,
|
||||
|
||||
/// Configuration for how direnv configuration should be loaded
|
||||
#[serde(default)]
|
||||
pub load_direnv: DirenvSettings,
|
||||
@@ -43,6 +47,17 @@ pub struct ProjectSettings {
|
||||
pub session: SessionSettings,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize, JsonSchema)]
|
||||
pub struct NodeBinarySettings {
|
||||
/// The path to the node binary
|
||||
pub path: Option<String>,
|
||||
/// The path to the npm binary Zed should use (defaults to .path/../npm)
|
||||
pub npm_path: Option<String>,
|
||||
/// If disabled, zed will download its own copy of node.
|
||||
#[serde(default)]
|
||||
pub ignore_system_version: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(rename_all = "snake_case")]
|
||||
pub enum DirenvSettings {
|
||||
@@ -128,7 +143,7 @@ const fn true_value() -> bool {
|
||||
pub struct BinarySettings {
|
||||
pub path: Option<String>,
|
||||
pub arguments: Option<Vec<String>>,
|
||||
pub path_lookup: Option<bool>,
|
||||
pub ignore_system_version: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
|
||||
@@ -319,17 +334,20 @@ impl SettingsObserver {
|
||||
.log_err();
|
||||
}
|
||||
|
||||
let weak_client = ssh.downgrade();
|
||||
cx.observe_global::<SettingsStore>(move |_, cx| {
|
||||
let new_settings = cx.global::<SettingsStore>().raw_user_settings();
|
||||
if &settings != new_settings {
|
||||
settings = new_settings.clone()
|
||||
}
|
||||
if let Some(content) = serde_json::to_string(&settings).log_err() {
|
||||
ssh.send(proto::UpdateUserSettings {
|
||||
project_id: 0,
|
||||
content,
|
||||
})
|
||||
.log_err();
|
||||
if let Some(ssh) = weak_client.upgrade() {
|
||||
ssh.send(proto::UpdateUserSettings {
|
||||
project_id: 0,
|
||||
content,
|
||||
})
|
||||
.log_err();
|
||||
}
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
@@ -4,7 +4,7 @@ use futures::{future, StreamExt};
|
||||
use gpui::{AppContext, SemanticVersion, UpdateGlobal};
|
||||
use http_client::Url;
|
||||
use language::{
|
||||
language_settings::{AllLanguageSettings, LanguageSettingsContent},
|
||||
language_settings::{language_settings, AllLanguageSettings, LanguageSettingsContent},
|
||||
tree_sitter_rust, tree_sitter_typescript, Diagnostic, DiagnosticSet, FakeLspAdapter,
|
||||
LanguageConfig, LanguageMatcher, LanguageName, LineEnding, OffsetRangeExt, Point, ToPoint,
|
||||
};
|
||||
@@ -537,7 +537,7 @@ async fn test_managing_language_servers(cx: &mut gpui::TestAppContext) {
|
||||
DiagnosticSet::from_sorted_entries(
|
||||
vec![DiagnosticEntry {
|
||||
diagnostic: Default::default(),
|
||||
range: buffer.min_anchor()..buffer.max_anchor(),
|
||||
range: Anchor::MIN..Anchor::MAX,
|
||||
}],
|
||||
&buffer.snapshot(),
|
||||
),
|
||||
|
||||
@@ -18,7 +18,7 @@ use gpui::{
|
||||
use postage::oneshot;
|
||||
use rpc::{
|
||||
proto::{self, SSH_PROJECT_ID},
|
||||
AnyProtoClient, TypedEnvelope,
|
||||
AnyProtoClient, ErrorExt, TypedEnvelope,
|
||||
};
|
||||
use smol::{
|
||||
channel::{Receiver, Sender},
|
||||
@@ -207,7 +207,7 @@ impl WorktreeStore {
|
||||
cx.background_executor().spawn(async move {
|
||||
match task.await {
|
||||
Ok(worktree) => Ok(worktree),
|
||||
Err(err) => Err(anyhow!("{}", err)),
|
||||
Err(err) => Err((*err).cloned()),
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -221,10 +221,11 @@ impl WorktreeStore {
|
||||
) -> Task<Result<Model<Worktree>, Arc<anyhow::Error>>> {
|
||||
let mut abs_path = abs_path.as_ref().to_string_lossy().to_string();
|
||||
// If we start with `/~` that means the ssh path was something like `ssh://user@host/~/home-dir-folder/`
|
||||
// in which case want to strip the leading the `/` and expand the tilde.
|
||||
// in which case want to strip the leading the `/`.
|
||||
// On the host-side, the `~` will get expanded.
|
||||
// That's what git does too: https://github.com/libgit2/libgit2/issues/3345#issuecomment-127050850
|
||||
if abs_path.starts_with("/~") {
|
||||
abs_path = shellexpand::tilde(&abs_path[1..]).to_string();
|
||||
abs_path = abs_path[1..].to_string();
|
||||
}
|
||||
let root_name = PathBuf::from(abs_path.clone())
|
||||
.file_name()
|
||||
|
||||
@@ -484,6 +484,7 @@ impl ProjectPanel {
|
||||
let worktree_id = worktree.id();
|
||||
let is_read_only = project.is_read_only();
|
||||
let is_remote = project.is_via_collab() && project.dev_server_project_id().is_none();
|
||||
let is_local = project.is_local();
|
||||
|
||||
let context_menu = ContextMenu::build(cx, |menu, cx| {
|
||||
menu.context(self.focus_handle.clone()).map(|menu| {
|
||||
@@ -495,13 +496,15 @@ impl ProjectPanel {
|
||||
menu.action("New File", Box::new(NewFile))
|
||||
.action("New Folder", Box::new(NewDirectory))
|
||||
.separator()
|
||||
.when(cfg!(target_os = "macos"), |menu| {
|
||||
.when(is_local && cfg!(target_os = "macos"), |menu| {
|
||||
menu.action("Reveal in Finder", Box::new(RevealInFileManager))
|
||||
})
|
||||
.when(cfg!(not(target_os = "macos")), |menu| {
|
||||
.when(is_local && cfg!(not(target_os = "macos")), |menu| {
|
||||
menu.action("Reveal in File Manager", Box::new(RevealInFileManager))
|
||||
})
|
||||
.action("Open in Default App", Box::new(OpenWithSystem))
|
||||
.when(is_local, |menu| {
|
||||
menu.action("Open in Default App", Box::new(OpenWithSystem))
|
||||
})
|
||||
.action("Open in Terminal", Box::new(OpenInTerminal))
|
||||
.when(is_dir, |menu| {
|
||||
menu.separator()
|
||||
@@ -2719,11 +2722,14 @@ impl Render for ProjectPanel {
|
||||
}
|
||||
}))
|
||||
})
|
||||
.when(project.is_local_or_ssh(), |el| {
|
||||
.when(project.is_local(), |el| {
|
||||
el.on_action(cx.listener(Self::reveal_in_finder))
|
||||
.on_action(cx.listener(Self::open_system))
|
||||
.on_action(cx.listener(Self::open_in_terminal))
|
||||
})
|
||||
.when(project.is_via_ssh(), |el| {
|
||||
el.on_action(cx.listener(Self::open_in_terminal))
|
||||
})
|
||||
.on_mouse_down(
|
||||
MouseButton::Right,
|
||||
cx.listener(move |this, event: &MouseDownEvent, cx| {
|
||||
|
||||
@@ -293,7 +293,10 @@ message Envelope {
|
||||
|
||||
TryExec try_exec = 252;
|
||||
ReadTextFile read_text_file = 253;
|
||||
ReadTextFileResponse read_text_file_response = 254; // current max
|
||||
ReadTextFileResponse read_text_file_response = 254;
|
||||
|
||||
CheckFileExists check_file_exists = 255;
|
||||
CheckFileExistsResponse check_file_exists_response = 256; // current max
|
||||
}
|
||||
|
||||
reserved 158 to 161;
|
||||
@@ -2574,3 +2577,13 @@ message TryExec {
|
||||
message TryExecResponse {
|
||||
string text = 1;
|
||||
}
|
||||
|
||||
message CheckFileExists {
|
||||
uint64 project_id = 1;
|
||||
string path = 2;
|
||||
}
|
||||
|
||||
message CheckFileExistsResponse {
|
||||
bool exists = 1;
|
||||
string path = 2;
|
||||
}
|
||||
|
||||
@@ -372,7 +372,9 @@ messages!(
|
||||
(ShellEnvResponse, Foreground),
|
||||
(TryExec, Foreground),
|
||||
(ReadTextFile, Foreground),
|
||||
(ReadTextFileResponse, Foreground)
|
||||
(ReadTextFileResponse, Foreground),
|
||||
(CheckFileExists, Background),
|
||||
(CheckFileExistsResponse, Background)
|
||||
);
|
||||
|
||||
request_messages!(
|
||||
@@ -501,6 +503,7 @@ request_messages!(
|
||||
(ShellEnv, ShellEnvResponse),
|
||||
(ReadTextFile, ReadTextFileResponse),
|
||||
(TryExec, Ack),
|
||||
(CheckFileExists, CheckFileExistsResponse)
|
||||
);
|
||||
|
||||
entity_messages!(
|
||||
@@ -578,7 +581,8 @@ entity_messages!(
|
||||
WhichCommand,
|
||||
ShellEnv,
|
||||
TryExec,
|
||||
ReadTextFile
|
||||
ReadTextFile,
|
||||
CheckFileExists,
|
||||
);
|
||||
|
||||
entity_messages!(
|
||||
|
||||
@@ -259,23 +259,12 @@ impl PickerDelegate for RecentProjectsDelegate {
|
||||
dev_server_project.paths.join("")
|
||||
)
|
||||
}
|
||||
SerializedWorkspaceLocation::Ssh(ssh_project) => {
|
||||
format!(
|
||||
"{}{}{}{}",
|
||||
ssh_project.host,
|
||||
ssh_project
|
||||
.port
|
||||
.as_ref()
|
||||
.map(|port| port.to_string())
|
||||
.unwrap_or_default(),
|
||||
ssh_project.path,
|
||||
ssh_project
|
||||
.user
|
||||
.as_ref()
|
||||
.map(|user| user.to_string())
|
||||
.unwrap_or_default()
|
||||
)
|
||||
}
|
||||
SerializedWorkspaceLocation::Ssh(ssh_project) => ssh_project
|
||||
.ssh_urls()
|
||||
.iter()
|
||||
.map(|path| path.to_string_lossy().to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(""),
|
||||
};
|
||||
|
||||
StringMatchCandidate::new(id, combined_string)
|
||||
@@ -403,7 +392,7 @@ impl PickerDelegate for RecentProjectsDelegate {
|
||||
password: None,
|
||||
};
|
||||
|
||||
let paths = vec![PathBuf::from(ssh_project.path.clone())];
|
||||
let paths = ssh_project.paths.iter().map(PathBuf::from).collect();
|
||||
|
||||
cx.spawn(|_, mut cx| async move {
|
||||
open_ssh_project(connection_options, paths, app_state, open_options, &mut cx).await
|
||||
@@ -458,11 +447,10 @@ impl PickerDelegate for RecentProjectsDelegate {
|
||||
.order()
|
||||
.iter()
|
||||
.filter_map(|i| paths.paths().get(*i).cloned())
|
||||
.map(|path| path.compact())
|
||||
.collect(),
|
||||
),
|
||||
SerializedWorkspaceLocation::Ssh(ssh_project) => {
|
||||
Arc::new(vec![PathBuf::from(ssh_project.ssh_url())])
|
||||
}
|
||||
SerializedWorkspaceLocation::Ssh(ssh_project) => Arc::new(ssh_project.ssh_urls()),
|
||||
SerializedWorkspaceLocation::DevServer(dev_server_project) => {
|
||||
Arc::new(vec![PathBuf::from(format!(
|
||||
"{}:{}",
|
||||
@@ -475,7 +463,6 @@ impl PickerDelegate for RecentProjectsDelegate {
|
||||
let (match_labels, paths): (Vec<_>, Vec<_>) = paths
|
||||
.iter()
|
||||
.map(|path| {
|
||||
let path = path.compact();
|
||||
let highlighted_text =
|
||||
highlights_for_path(path.as_ref(), &hit.positions, path_start_offset);
|
||||
|
||||
@@ -511,7 +498,7 @@ impl PickerDelegate for RecentProjectsDelegate {
|
||||
.color(Color::Muted)
|
||||
.into_any_element()
|
||||
}
|
||||
SerializedWorkspaceLocation::Ssh(_) => Icon::new(IconName::Screen)
|
||||
SerializedWorkspaceLocation::Ssh(_) => Icon::new(IconName::Server)
|
||||
.color(Color::Muted)
|
||||
.into_any_element(),
|
||||
SerializedWorkspaceLocation::DevServer(_) => {
|
||||
@@ -706,7 +693,6 @@ fn highlights_for_path(
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
impl RecentProjectsDelegate {
|
||||
fn delete_recent_project(&self, ix: usize, cx: &mut ViewContext<Picker<Self>>) {
|
||||
if let Some(selected_match) = self.matches.get(ix) {
|
||||
|
||||
@@ -327,7 +327,14 @@ impl SshClientDelegate {
|
||||
cx,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("failed to download remote server binary: {}", e))?;
|
||||
.map_err(|e| {
|
||||
anyhow::anyhow!(
|
||||
"failed to download remote server binary (os: {}, arch: {}): {}",
|
||||
platform.os,
|
||||
platform.arch,
|
||||
e
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok((binary_path, version))
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ use futures::{
|
||||
future::BoxFuture,
|
||||
select_biased, AsyncReadExt as _, AsyncWriteExt as _, Future, FutureExt as _, StreamExt as _,
|
||||
};
|
||||
use gpui::{AppContext, AsyncAppContext, Model, SemanticVersion};
|
||||
use gpui::{AppContext, AsyncAppContext, Model, SemanticVersion, Task};
|
||||
use parking_lot::Mutex;
|
||||
use rpc::{
|
||||
proto::{self, build_typed_envelope, Envelope, EnvelopedMessage, PeerId, RequestMessage},
|
||||
@@ -51,11 +51,12 @@ pub struct SshSession {
|
||||
spawn_process_tx: mpsc::UnboundedSender<SpawnRequest>,
|
||||
client_socket: Option<SshSocket>,
|
||||
state: Mutex<ProtoMessageHandlerSet>, // Lock
|
||||
_io_task: Option<Task<Result<()>>>,
|
||||
}
|
||||
|
||||
struct SshClientState {
|
||||
socket: SshSocket,
|
||||
_master_process: process::Child,
|
||||
master_process: process::Child,
|
||||
_temp_dir: TempDir,
|
||||
}
|
||||
|
||||
@@ -173,8 +174,7 @@ impl SshSession {
|
||||
let mut child_stdout = remote_server_child.stdout.take().unwrap();
|
||||
let mut child_stdin = remote_server_child.stdin.take().unwrap();
|
||||
|
||||
let executor = cx.background_executor().clone();
|
||||
executor.clone().spawn(async move {
|
||||
let io_task = cx.background_executor().spawn(async move {
|
||||
let mut stdin_buffer = Vec::new();
|
||||
let mut stdout_buffer = Vec::new();
|
||||
let mut stderr_buffer = Vec::new();
|
||||
@@ -264,9 +264,18 @@ impl SshSession {
|
||||
}
|
||||
}
|
||||
}
|
||||
}).detach();
|
||||
});
|
||||
|
||||
cx.update(|cx| Self::new(incoming_rx, outgoing_tx, spawn_process_tx, Some(socket), cx))
|
||||
cx.update(|cx| {
|
||||
Self::new(
|
||||
incoming_rx,
|
||||
outgoing_tx,
|
||||
spawn_process_tx,
|
||||
Some(socket),
|
||||
Some(io_task),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn server(
|
||||
@@ -275,7 +284,7 @@ impl SshSession {
|
||||
cx: &AppContext,
|
||||
) -> Arc<SshSession> {
|
||||
let (tx, _rx) = mpsc::unbounded();
|
||||
Self::new(incoming_rx, outgoing_tx, tx, None, cx)
|
||||
Self::new(incoming_rx, outgoing_tx, tx, None, None, cx)
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
@@ -293,6 +302,7 @@ impl SshSession {
|
||||
client_to_server_tx,
|
||||
tx.clone(),
|
||||
None, // todo()
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
@@ -302,6 +312,7 @@ impl SshSession {
|
||||
server_to_client_tx,
|
||||
tx.clone(),
|
||||
None,
|
||||
None,
|
||||
cx,
|
||||
)
|
||||
}),
|
||||
@@ -313,6 +324,7 @@ impl SshSession {
|
||||
outgoing_tx: mpsc::UnboundedSender<Envelope>,
|
||||
spawn_process_tx: mpsc::UnboundedSender<SpawnRequest>,
|
||||
client_socket: Option<SshSocket>,
|
||||
io_task: Option<Task<Result<()>>>,
|
||||
cx: &AppContext,
|
||||
) -> Arc<SshSession> {
|
||||
let this = Arc::new(Self {
|
||||
@@ -322,13 +334,18 @@ impl SshSession {
|
||||
spawn_process_tx,
|
||||
client_socket,
|
||||
state: Default::default(),
|
||||
_io_task: io_task,
|
||||
});
|
||||
|
||||
cx.spawn(|cx| {
|
||||
let this = this.clone();
|
||||
let this = Arc::downgrade(&this);
|
||||
async move {
|
||||
let peer_id = PeerId { owner_id: 0, id: 0 };
|
||||
while let Some(incoming) = incoming_rx.next().await {
|
||||
let Some(this) = this.upgrade() else {
|
||||
return anyhow::Ok(());
|
||||
};
|
||||
|
||||
if let Some(request_id) = incoming.responding_to {
|
||||
let request_id = MessageId(request_id);
|
||||
let sender = this.response_channels.lock().remove(&request_id);
|
||||
@@ -576,7 +593,7 @@ impl SshClientState {
|
||||
connection_options,
|
||||
socket_path,
|
||||
},
|
||||
_master_process: master_process,
|
||||
master_process,
|
||||
_temp_dir: temp_dir,
|
||||
})
|
||||
}
|
||||
@@ -699,6 +716,14 @@ impl SshClientState {
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for SshClientState {
|
||||
fn drop(&mut self) {
|
||||
if let Err(error) = self.master_process.kill() {
|
||||
log::error!("failed to kill SSH master process: {}", error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SshSocket {
|
||||
fn ssh_command<S: AsRef<OsStr>>(&self, program: S) -> process::Command {
|
||||
let mut command = process::Command::new("ssh");
|
||||
|
||||
@@ -22,6 +22,7 @@ test-support = ["fs/test-support"]
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
client.workspace = true
|
||||
env_logger.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
|
||||
@@ -2,7 +2,7 @@ use anyhow::{anyhow, Result};
|
||||
use fs::Fs;
|
||||
use gpui::{AppContext, AsyncAppContext, Context, Model, ModelContext};
|
||||
use language::{proto::serialize_operation, Buffer, BufferEvent, LanguageRegistry};
|
||||
use node_runtime::DummyNodeRuntime;
|
||||
use node_runtime::NodeRuntime;
|
||||
use project::{
|
||||
buffer_store::{BufferStore, BufferStoreEvent},
|
||||
project_settings::SettingsObserver,
|
||||
@@ -50,14 +50,13 @@ impl HeadlessProject {
|
||||
store
|
||||
});
|
||||
let buffer_store = cx.new_model(|cx| {
|
||||
let mut buffer_store =
|
||||
BufferStore::new(worktree_store.clone(), Some(SSH_PROJECT_ID), cx);
|
||||
let mut buffer_store = BufferStore::local(worktree_store.clone(), cx);
|
||||
buffer_store.shared(SSH_PROJECT_ID, session.clone().into(), cx);
|
||||
buffer_store
|
||||
});
|
||||
let prettier_store = cx.new_model(|cx| {
|
||||
PrettierStore::new(
|
||||
DummyNodeRuntime::new(),
|
||||
NodeRuntime::unavailable(),
|
||||
fs.clone(),
|
||||
languages.clone(),
|
||||
worktree_store.clone(),
|
||||
@@ -108,6 +107,7 @@ impl HeadlessProject {
|
||||
session.subscribe_to_entity(SSH_PROJECT_ID, &settings_observer);
|
||||
|
||||
client.add_request_handler(cx.weak_model(), Self::handle_list_remote_directory);
|
||||
client.add_request_handler(cx.weak_model(), Self::handle_check_file_exists);
|
||||
|
||||
client.add_model_request_handler(Self::handle_add_worktree);
|
||||
client.add_model_request_handler(Self::handle_open_buffer_by_path);
|
||||
@@ -189,11 +189,34 @@ impl HeadlessProject {
|
||||
message: TypedEnvelope<proto::AddWorktree>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<proto::AddWorktreeResponse> {
|
||||
use client::ErrorCodeExt;
|
||||
let path = shellexpand::tilde(&message.payload.path).to_string();
|
||||
|
||||
let fs = this.read_with(&mut cx, |this, _| this.fs.clone())?;
|
||||
let path = PathBuf::from(path);
|
||||
|
||||
let canonicalized = match fs.canonicalize(&path).await {
|
||||
Ok(path) => path,
|
||||
Err(e) => {
|
||||
let mut parent = path
|
||||
.parent()
|
||||
.ok_or(e)
|
||||
.map_err(|_| anyhow!("{:?} does not exist", path))?;
|
||||
if parent == Path::new("") {
|
||||
parent = util::paths::home_dir();
|
||||
}
|
||||
let parent = fs.canonicalize(parent).await.map_err(|_| {
|
||||
anyhow!(proto::ErrorCode::DevServerProjectPathDoesNotExist
|
||||
.with_tag("path", &path.to_string_lossy().as_ref()))
|
||||
})?;
|
||||
parent.join(path.file_name().unwrap())
|
||||
}
|
||||
};
|
||||
|
||||
let worktree = this
|
||||
.update(&mut cx.clone(), |this, _| {
|
||||
Worktree::local(
|
||||
Path::new(&path),
|
||||
Arc::from(canonicalized),
|
||||
true,
|
||||
this.fs.clone(),
|
||||
this.next_entry_id.clone(),
|
||||
@@ -298,4 +321,20 @@ impl HeadlessProject {
|
||||
}
|
||||
Ok(proto::ListRemoteDirectoryResponse { entries })
|
||||
}
|
||||
|
||||
pub async fn handle_check_file_exists(
|
||||
this: Model<Self>,
|
||||
envelope: TypedEnvelope<proto::CheckFileExists>,
|
||||
cx: AsyncAppContext,
|
||||
) -> Result<proto::CheckFileExistsResponse> {
|
||||
let fs = cx.read_model(&this, |this, _| this.fs.clone())?;
|
||||
let expanded = shellexpand::tilde(&envelope.payload.path).to_string();
|
||||
|
||||
let exists = fs.is_file(&PathBuf::from(expanded.clone())).await;
|
||||
|
||||
Ok(proto::CheckFileExistsResponse {
|
||||
exists,
|
||||
path: expanded,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user