Compare commits

..

7 Commits

Author SHA1 Message Date
Marshall Bowers
9641ae0755 Remove basic.conf (#10120)
This PR removes the `basic.conf` file.

In #10099 we suppressed some typo warnings that had cropped up in this
file, but it turns out we don't need the file at all.

Release Notes:

- N/A
2024-04-03 12:32:47 -04:00
Kirill Bulatov
ce73ff9808 Avoid failing format test with current date (#10068)
Replace the test that tested with
`chrono::offset::Local::now().naive_local()` taken, failing the
formatting once per year at least.


Release Notes:

- N/A
2024-04-03 12:32:40 -04:00
Joseph T. Lyons
240db73199 v0.129.x stable 2024-04-03 12:11:10 -04:00
gcp-cherry-pick-bot[bot]
6b52917e75 Don't update active completion for editors that are not focused (cherry-pick #9904) (#9907)
Cherry-picked Don't update active completion for editors that are not
focused (#9904)

Release Notes:

- N/A

Co-authored-by: Antonio Scandurra <me@as-cii.com>
2024-03-28 10:52:33 +01:00
Marshall Bowers
f226a9932a zed 0.129.1 2024-03-27 13:50:53 -04:00
Marshall Bowers
a7915cb848 Look up extensions in the new index when reporting extension events (#9879)
This PR fixes a bug that was causing extension telemetry events to not
be reported.

We need to look up the extensions in the new index, as the extensions to
load won't be found in the old index.

Release Notes:

- N/A
2024-03-27 13:48:45 -04:00
Joseph T. Lyons
2d8288f076 v0.129.x preview 2024-03-27 10:52:55 -04:00
325 changed files with 4370 additions and 12095 deletions

View File

@@ -23,6 +23,12 @@ body:
description: Run the `copy system specs into clipboard` command palette action and paste the output in the field below.
validations:
required: true
- type: textarea
attributes:
label: If applicable, add mockups / screenshots to help explain present your vision of the feature
description: Drag issues into the text input below
validations:
required: false
- type: textarea
attributes:
label: If applicable, attach your `~/Library/Logs/Zed/Zed.log` file to this issue.

View File

@@ -54,9 +54,6 @@ jobs:
- name: Check unused dependencies
uses: bnjbvr/cargo-machete@main
- name: Check license generation
run: script/generate-licenses /tmp/zed_licenses_output
- name: Ensure fresh merge
shell: bash -euxo pipefail {0}
run: |

460
Cargo.lock generated
View File

@@ -9,7 +9,6 @@ dependencies = [
"anyhow",
"auto_update",
"editor",
"extension",
"futures 0.3.28",
"gpui",
"language",
@@ -88,9 +87,9 @@ dependencies = [
[[package]]
name = "alacritty_terminal"
version = "0.23.0"
version = "0.23.0-rc1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6d1ea4484c8676f295307a4892d478c70ac8da1dbd8c7c10830a504b7f1022f"
checksum = "bc2c16faa5425a10be102dda76f73d76049b44746e18ddeefc44d78bbe76cbce"
dependencies = [
"base64 0.22.0",
"bitflags 2.4.2",
@@ -213,18 +212,6 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "anthropic"
version = "0.1.0"
dependencies = [
"anyhow",
"futures 0.3.28",
"serde",
"serde_json",
"tokio",
"util",
]
[[package]]
name = "anyhow"
version = "1.0.75"
@@ -298,9 +285,9 @@ dependencies = [
[[package]]
name = "ashpd"
version = "0.8.1"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd884d7c72877a94102c3715f3b1cd09ff4fac28221add3e57cfbe25c236d093"
checksum = "01992ad7774250d5b7fe214e2676cb99bf92564436d8135ab44fe815e71769a9"
dependencies = [
"async-fs 2.1.1",
"async-net 2.0.0",
@@ -311,7 +298,7 @@ dependencies = [
"serde",
"serde_repr",
"url",
"zbus",
"zbus 3.15.1",
]
[[package]]
@@ -335,7 +322,6 @@ dependencies = [
"ctor",
"editor",
"env_logger",
"file_icons",
"fs",
"futures 0.3.28",
"gpui",
@@ -365,6 +351,16 @@ dependencies = [
"workspace",
]
[[package]]
name = "async-broadcast"
version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c48ccdbf6ca6b121e0f586cbc0e73ae440e56c67c30fa0873b4e110d9c26d2b"
dependencies = [
"event-listener 2.5.3",
"futures-core",
]
[[package]]
name = "async-broadcast"
version = "0.7.0"
@@ -2170,7 +2166,6 @@ dependencies = [
name = "collab"
version = "0.44.0"
dependencies = [
"anthropic",
"anyhow",
"async-trait",
"async-tungstenite",
@@ -2223,7 +2218,6 @@ dependencies = [
"rustc-demangle",
"scrypt",
"sea-orm",
"semantic_version",
"semver",
"serde",
"serde_derive",
@@ -2571,21 +2565,19 @@ dependencies = [
[[package]]
name = "cosmic-text"
version = "0.11.2"
version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c578f2b9abb4d5f3fbb12aba4008084d435dc6a8425c195cfe0b3594bfea0c25"
checksum = "75acbfb314aeb4f5210d379af45ed1ec2c98c7f1790bf57b8a4c562ac0c51b71"
dependencies = [
"bitflags 2.4.2",
"fontdb 0.16.2",
"fontdb 0.15.0",
"libm",
"log",
"rangemap",
"rustc-hash",
"rustybuzz 0.12.1",
"rustybuzz 0.11.0",
"self_cell",
"swash",
"sys-locale",
"ttf-parser 0.20.0",
"unicode-bidi",
"unicode-linebreak",
"unicode-script",
@@ -2807,7 +2799,7 @@ dependencies = [
"autocfg",
"cfg-if",
"crossbeam-utils",
"memoffset",
"memoffset 0.9.0",
"scopeguard",
]
@@ -3262,8 +3254,6 @@ dependencies = [
"sum_tree",
"text",
"theme",
"time",
"time_format",
"tree-sitter-html",
"tree-sitter-rust",
"tree-sitter-typescript",
@@ -3508,7 +3498,6 @@ dependencies = [
"parking_lot",
"project",
"schemars",
"semantic_version",
"serde",
"serde_json",
"serde_json_lenient",
@@ -3555,13 +3544,10 @@ dependencies = [
"db",
"editor",
"extension",
"fs",
"fuzzy",
"gpui",
"language",
"picker",
"project",
"semantic_version",
"serde",
"settings",
"smallvec",
@@ -3691,18 +3677,6 @@ dependencies = [
"workspace",
]
[[package]]
name = "file_icons"
version = "0.1.0"
dependencies = [
"collections",
"gpui",
"serde",
"serde_derive",
"serde_json",
"util",
]
[[package]]
name = "filetime"
version = "0.2.22"
@@ -3819,16 +3793,16 @@ dependencies = [
[[package]]
name = "fontdb"
version = "0.16.2"
version = "0.15.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0299020c3ef3f60f526a4f64ab4a3d4ce116b1acbf24cdd22da0068e5d81dc3"
checksum = "020e203f177c0fb250fb19455a252e838d2bbbce1f80f25ecc42402aafa8cd38"
dependencies = [
"fontconfig-parser",
"log",
"memmap2 0.9.4",
"memmap2 0.8.0",
"slotmap",
"tinyvec",
"ttf-parser 0.20.0",
"ttf-parser 0.19.2",
]
[[package]]
@@ -3875,9 +3849,9 @@ checksum = "aa9a19cbb55df58761df49b23516a86d432839add4af60fc256da840f66ed35b"
[[package]]
name = "form_urlencoded"
version = "1.2.1"
version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456"
checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652"
dependencies = [
"percent-encoding",
]
@@ -3913,7 +3887,6 @@ dependencies = [
"collections",
"fsevent",
"futures 0.3.28",
"git",
"git2",
"gpui",
"lazy_static",
@@ -4211,21 +4184,14 @@ dependencies = [
name = "git"
version = "0.1.0"
dependencies = [
"anyhow",
"clock",
"collections",
"git2",
"lazy_static",
"log",
"pretty_assertions",
"serde",
"serde_json",
"smol",
"sum_tree",
"text",
"time",
"unindent",
"url",
]
[[package]]
@@ -4402,7 +4368,6 @@ dependencies = [
"resvg",
"schemars",
"seahash",
"semantic_version",
"serde",
"serde_derive",
"serde_json",
@@ -4779,9 +4744,9 @@ checksum = "25a2bc672d1148e28034f176e01fffebb08b35768468cc954630da77a1449005"
[[package]]
name = "idna"
version = "0.5.0"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6"
checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c"
dependencies = [
"unicode-bidi",
"unicode-normalization",
@@ -5330,12 +5295,15 @@ dependencies = [
"tree-sitter",
"tree-sitter-bash",
"tree-sitter-c",
"tree-sitter-c-sharp",
"tree-sitter-clojure",
"tree-sitter-cpp",
"tree-sitter-css",
"tree-sitter-dart",
"tree-sitter-elixir",
"tree-sitter-elm",
"tree-sitter-embedded-template",
"tree-sitter-erlang",
"tree-sitter-glsl",
"tree-sitter-go",
"tree-sitter-gomod",
@@ -5350,6 +5318,7 @@ dependencies = [
"tree-sitter-nix",
"tree-sitter-nu",
"tree-sitter-ocaml",
"tree-sitter-php",
"tree-sitter-proto",
"tree-sitter-python",
"tree-sitter-racket",
@@ -5357,9 +5326,11 @@ dependencies = [
"tree-sitter-ruby",
"tree-sitter-rust",
"tree-sitter-scheme",
"tree-sitter-toml",
"tree-sitter-typescript",
"tree-sitter-vue",
"tree-sitter-yaml",
"tree-sitter-zig",
"unindent",
"util",
"workspace",
@@ -5524,7 +5495,7 @@ name = "live_kit_client"
version = "0.1.0"
dependencies = [
"anyhow",
"async-broadcast",
"async-broadcast 0.7.0",
"async-trait",
"collections",
"core-foundation",
@@ -5599,7 +5570,6 @@ dependencies = [
"serde_json",
"smol",
"util",
"windows 0.53.0",
]
[[package]]
@@ -5772,6 +5742,15 @@ dependencies = [
"libc",
]
[[package]]
name = "memoffset"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5de893c32cde5f383baa4c04c5d6dbdd735cfd4a794b0debdb2bb1b421da5ff4"
dependencies = [
"autocfg",
]
[[package]]
name = "memoffset"
version = "0.9.0"
@@ -6009,6 +5988,18 @@ dependencies = [
"libc",
]
[[package]]
name = "nix"
version = "0.26.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b"
dependencies = [
"bitflags 1.3.2",
"cfg-if",
"libc",
"memoffset 0.7.1",
]
[[package]]
name = "nix"
version = "0.27.1"
@@ -6018,7 +6009,7 @@ dependencies = [
"bitflags 2.4.2",
"cfg-if",
"libc",
"memoffset",
"memoffset 0.9.0",
]
[[package]]
@@ -6387,9 +6378,9 @@ dependencies = [
"rand 0.8.5",
"serde",
"sha2 0.10.7",
"zbus",
"zbus 4.0.1",
"zeroize",
"zvariant",
"zvariant 4.0.2",
]
[[package]]
@@ -6400,9 +6391,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5"
[[package]]
name = "open"
version = "5.1.2"
version = "5.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "449f0ff855d85ddbf1edd5b646d65249ead3f5e422aaa86b7d2d0b049b103e32"
checksum = "90878fb664448b54c4e592455ad02831e23a3f7e157374a8b95654731aac7349"
dependencies = [
"is-wsl",
"libc",
@@ -6724,9 +6715,9 @@ dependencies = [
[[package]]
name = "percent-encoding"
version = "2.3.1"
version = "2.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94"
[[package]]
name = "petgraph"
@@ -6766,7 +6757,6 @@ dependencies = [
"env_logger",
"gpui",
"menu",
"serde",
"serde_json",
"ui",
"workspace",
@@ -7076,7 +7066,6 @@ dependencies = [
"fs",
"futures 0.3.28",
"fuzzy",
"git",
"git2",
"globset",
"gpui",
@@ -7118,7 +7107,6 @@ dependencies = [
"collections",
"db",
"editor",
"file_icons",
"gpui",
"language",
"menu",
@@ -7459,9 +7447,11 @@ dependencies = [
name = "recent_projects"
version = "0.1.0"
dependencies = [
"collections",
"editor",
"fuzzy",
"gpui",
"itertools 0.11.0",
"language",
"menu",
"ordered-float 2.10.0",
@@ -8056,11 +8046,11 @@ dependencies = [
[[package]]
name = "rustybuzz"
version = "0.12.1"
version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f0ae5692c5beaad6a9e22830deeed7874eae8a4e3ba4076fb48e12c56856222c"
checksum = "2ee8fe2a8461a0854a37101fe7a1b13998d0cfa987e43248e81d2a5f4570f6fa"
dependencies = [
"bitflags 2.4.2",
"bitflags 1.3.2",
"bytemuck",
"libm",
"smallvec",
@@ -8309,6 +8299,7 @@ dependencies = [
"serde",
"serde_json",
"settings",
"smallvec",
"smol",
"theme",
"ui",
@@ -8360,14 +8351,6 @@ version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58bf37232d3bb9a2c4e641ca2a11d83b5062066f88df7fed36c28772046d65ba"
[[package]]
name = "semantic_version"
version = "0.1.0"
dependencies = [
"anyhow",
"serde",
]
[[package]]
name = "semver"
version = "1.0.18"
@@ -9159,7 +9142,6 @@ dependencies = [
"ctrlc",
"dialoguer",
"editor",
"embed-manifest",
"fuzzy",
"gpui",
"indoc",
@@ -9416,29 +9398,6 @@ dependencies = [
"winx",
]
[[package]]
name = "tab_switcher"
version = "0.1.0"
dependencies = [
"anyhow",
"collections",
"ctor",
"editor",
"env_logger",
"gpui",
"language",
"menu",
"picker",
"project",
"serde",
"serde_json",
"terminal_view",
"theme",
"ui",
"util",
"workspace",
]
[[package]]
name = "taffy"
version = "0.3.11"
@@ -9492,7 +9451,6 @@ dependencies = [
"editor",
"fuzzy",
"gpui",
"itertools 0.11.0",
"language",
"menu",
"picker",
@@ -9511,8 +9469,8 @@ dependencies = [
name = "telemetry_events"
version = "0.1.0"
dependencies = [
"semantic_version",
"serde",
"util",
]
[[package]]
@@ -10193,6 +10151,24 @@ dependencies = [
"tree-sitter",
]
[[package]]
name = "tree-sitter-c-sharp"
version = "0.20.0"
source = "git+https://github.com/tree-sitter/tree-sitter-c-sharp?rev=dd5e59721a5f8dae34604060833902b882023aaf#dd5e59721a5f8dae34604060833902b882023aaf"
dependencies = [
"cc",
"tree-sitter",
]
[[package]]
name = "tree-sitter-clojure"
version = "0.0.9"
source = "git+https://github.com/prcastro/tree-sitter-clojure?branch=update-ts#38b4f8d264248b2fd09575fbce66f7c22e8929d5"
dependencies = [
"cc",
"tree-sitter",
]
[[package]]
name = "tree-sitter-cpp"
version = "0.20.0"
@@ -10248,6 +10224,16 @@ dependencies = [
"tree-sitter",
]
[[package]]
name = "tree-sitter-erlang"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93ced5145ebb17f83243bf055b74e108da7cc129e12faab4166df03f59b287f4"
dependencies = [
"cc",
"tree-sitter",
]
[[package]]
name = "tree-sitter-glsl"
version = "0.1.4"
@@ -10386,6 +10372,16 @@ dependencies = [
"tree-sitter",
]
[[package]]
name = "tree-sitter-php"
version = "0.21.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0db3788e709a5adfb583683a4b686a084e41a0f9e5a2fcb9a8e358f11481036a"
dependencies = [
"cc",
"tree-sitter",
]
[[package]]
name = "tree-sitter-proto"
version = "0.0.2"
@@ -10453,6 +10449,15 @@ dependencies = [
"tree-sitter",
]
[[package]]
name = "tree-sitter-toml"
version = "0.5.1"
source = "git+https://github.com/tree-sitter/tree-sitter-toml?rev=342d9be207c2dba869b9967124c679b5e6fd0ebe#342d9be207c2dba869b9967124c679b5e6fd0ebe"
dependencies = [
"cc",
"tree-sitter",
]
[[package]]
name = "tree-sitter-typescript"
version = "0.20.2"
@@ -10480,6 +10485,15 @@ dependencies = [
"tree-sitter",
]
[[package]]
name = "tree-sitter-zig"
version = "0.0.1"
source = "git+https://github.com/maxxnino/tree-sitter-zig?rev=0d08703e4c3f426ec61695d7617415fff97029bd#0d08703e4c3f426ec61695d7617415fff97029bd"
dependencies = [
"cc",
"tree-sitter",
]
[[package]]
name = "try-lock"
version = "0.2.4"
@@ -10498,6 +10512,12 @@ version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ae2f58a822f08abdaf668897e96a5656fe72f5a9ce66422423e8849384872e6"
[[package]]
name = "ttf-parser"
version = "0.19.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49d64318d8311fc2668e48b63969f4343e0a85c4a109aa8460d6672e364b8bd1"
[[package]]
name = "ttf-parser"
version = "0.20.0"
@@ -10555,7 +10575,7 @@ version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "89daebc3e6fd160ac4aa9fc8b3bf71e1f74fbf92367ae71fb83a037e8bf164b9"
dependencies = [
"memoffset",
"memoffset 0.9.0",
"tempfile",
"winapi",
]
@@ -10692,9 +10712,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
[[package]]
name = "url"
version = "2.5.0"
version = "2.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633"
checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5"
dependencies = [
"form_urlencoded",
"idna",
@@ -10965,9 +10985,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
[[package]]
name = "wasm-bindgen"
version = "0.2.92"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8"
checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342"
dependencies = [
"cfg-if",
"wasm-bindgen-macro",
@@ -10975,9 +10995,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-backend"
version = "0.2.92"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da"
checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd"
dependencies = [
"bumpalo",
"log",
@@ -11002,9 +11022,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro"
version = "0.2.92"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726"
checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d"
dependencies = [
"quote",
"wasm-bindgen-macro-support",
@@ -11012,9 +11032,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-macro-support"
version = "0.2.92"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7"
checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b"
dependencies = [
"proc-macro2",
"quote",
@@ -11025,9 +11045,9 @@ dependencies = [
[[package]]
name = "wasm-bindgen-shared"
version = "0.2.92"
version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96"
checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1"
[[package]]
name = "wasm-encoder"
@@ -11275,7 +11295,7 @@ dependencies = [
"log",
"mach",
"memfd",
"memoffset",
"memoffset 0.9.0",
"paste",
"psm",
"rustix 0.38.32",
@@ -11511,9 +11531,9 @@ dependencies = [
[[package]]
name = "weezl"
version = "0.1.8"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53a85b86a771b1c87058196170769dd264f66c0782acf1ae6cc51bfd64b39082"
checksum = "9193164d4de03a926d909d3bc7c30543cecb35400c02114792c2cae20d5e2dbb"
[[package]]
name = "welcome"
@@ -12305,13 +12325,54 @@ dependencies = [
"pkg-config",
]
[[package]]
name = "zbus"
version = "3.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5acecd3f8422f198b1a2f954bcc812fe89f3fa4281646f3da1da7925db80085d"
dependencies = [
"async-broadcast 0.5.1",
"async-executor",
"async-fs 1.6.0",
"async-io 1.13.0",
"async-lock 2.8.0",
"async-process 1.7.0",
"async-recursion 1.0.5",
"async-task",
"async-trait",
"blocking",
"byteorder",
"derivative",
"enumflags2",
"event-listener 2.5.3",
"futures-core",
"futures-sink",
"futures-util",
"hex",
"nix 0.26.4",
"once_cell",
"ordered-stream",
"rand 0.8.5",
"serde",
"serde_repr",
"sha1",
"static_assertions",
"tracing",
"uds_windows",
"winapi",
"xdg-home",
"zbus_macros 3.15.1",
"zbus_names 2.6.1",
"zvariant 3.15.1",
]
[[package]]
name = "zbus"
version = "4.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7b8e3d6ae3342792a6cc2340e4394334c7402f3d793b390d2c5494a4032b3030"
dependencies = [
"async-broadcast",
"async-broadcast 0.7.0",
"async-executor",
"async-fs 2.1.1",
"async-io 2.3.1",
@@ -12339,9 +12400,23 @@ dependencies = [
"uds_windows",
"windows-sys 0.52.0",
"xdg-home",
"zbus_macros",
"zbus_names",
"zvariant",
"zbus_macros 4.0.1",
"zbus_names 3.0.0",
"zvariant 4.0.2",
]
[[package]]
name = "zbus_macros"
version = "3.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2207eb71efebda17221a579ca78b45c4c5f116f074eb745c3a172e688ccf89f5"
dependencies = [
"proc-macro-crate 1.3.1",
"proc-macro2",
"quote",
"regex",
"syn 1.0.109",
"zvariant_utils",
]
[[package]]
@@ -12358,6 +12433,17 @@ dependencies = [
"zvariant_utils",
]
[[package]]
name = "zbus_names"
version = "2.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "437d738d3750bed6ca9b8d423ccc7a8eb284f6b1d6d4e225a0e4e6258d864c8d"
dependencies = [
"serde",
"static_assertions",
"zvariant 3.15.1",
]
[[package]]
name = "zbus_names"
version = "3.0.0"
@@ -12366,12 +12452,12 @@ checksum = "4b9b1fef7d021261cc16cba64c351d291b715febe0fa10dc3a443ac5a5022e6c"
dependencies = [
"serde",
"static_assertions",
"zvariant",
"zvariant 4.0.2",
]
[[package]]
name = "zed"
version = "0.130.0"
version = "0.129.1"
dependencies = [
"activity_indicator",
"anyhow",
@@ -12402,7 +12488,6 @@ dependencies = [
"extensions_ui",
"feedback",
"file_finder",
"file_icons",
"fs",
"futures 0.3.28",
"go_to_line",
@@ -12437,7 +12522,6 @@ dependencies = [
"settings",
"simplelog",
"smol",
"tab_switcher",
"task",
"tasks_ui",
"terminal_view",
@@ -12466,28 +12550,14 @@ dependencies = [
name = "zed_astro"
version = "0.0.1"
dependencies = [
"zed_extension_api 0.0.4",
"zed_extension_api 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "zed_clojure"
version = "0.0.1"
name = "zed_extension_api"
version = "0.0.4"
dependencies = [
"zed_extension_api 0.0.4",
]
[[package]]
name = "zed_csharp"
version = "0.0.1"
dependencies = [
"zed_extension_api 0.0.4",
]
[[package]]
name = "zed_erlang"
version = "0.0.1"
dependencies = [
"zed_extension_api 0.0.4",
"wit-bindgen",
]
[[package]]
@@ -12499,83 +12569,46 @@ dependencies = [
"wit-bindgen",
]
[[package]]
name = "zed_extension_api"
version = "0.0.5"
dependencies = [
"wit-bindgen",
]
[[package]]
name = "zed_extension_api"
version = "0.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a5f4ae4e302a80591635ef9a236b35fde6fcc26cfd060e66fde4ba9f9fd394a1"
dependencies = [
"wit-bindgen",
]
[[package]]
name = "zed_gleam"
version = "0.0.2"
version = "0.0.1"
dependencies = [
"zed_extension_api 0.0.4",
"zed_extension_api 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "zed_haskell"
version = "0.0.1"
dependencies = [
"zed_extension_api 0.0.4",
]
[[package]]
name = "zed_php"
version = "0.0.1"
dependencies = [
"zed_extension_api 0.0.4",
"zed_extension_api 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "zed_prisma"
version = "0.0.1"
dependencies = [
"zed_extension_api 0.0.4",
"zed_extension_api 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "zed_purescript"
version = "0.0.1"
dependencies = [
"zed_extension_api 0.0.4",
"zed_extension_api 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "zed_svelte"
version = "0.0.1"
dependencies = [
"zed_extension_api 0.0.4",
]
[[package]]
name = "zed_toml"
version = "0.0.2"
dependencies = [
"zed_extension_api 0.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"zed_extension_api 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "zed_uiua"
version = "0.0.1"
dependencies = [
"zed_extension_api 0.0.4",
]
[[package]]
name = "zed_zig"
version = "0.0.1"
dependencies = [
"zed_extension_api 0.0.5 (registry+https://github.com/rust-lang/crates.io-index)",
"zed_extension_api 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
@@ -12654,6 +12687,21 @@ dependencies = [
"pkg-config",
]
[[package]]
name = "zvariant"
version = "3.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c5b4fcf3660d30fc33ae5cd97e2017b23a96e85afd7a1dd014534cd0bf34ba67"
dependencies = [
"byteorder",
"enumflags2",
"libc",
"serde",
"static_assertions",
"url",
"zvariant_derive 3.15.1",
]
[[package]]
name = "zvariant"
version = "4.0.2"
@@ -12664,8 +12712,20 @@ dependencies = [
"enumflags2",
"serde",
"static_assertions",
"url",
"zvariant_derive",
"zvariant_derive 4.0.2",
]
[[package]]
name = "zvariant_derive"
version = "3.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0277758a8a0afc0e573e80ed5bfd9d9c2b48bd3108ffe09384f9f738c83f4a55"
dependencies = [
"proc-macro-crate 1.3.1",
"proc-macro2",
"quote",
"syn 1.0.109",
"zvariant_utils",
]
[[package]]

View File

@@ -1,7 +1,6 @@
[workspace]
members = [
"crates/activity_indicator",
"crates/anthropic",
"crates/assets",
"crates/assistant",
"crates/audio",
@@ -29,7 +28,6 @@ members = [
"crates/feature_flags",
"crates/feedback",
"crates/file_finder",
"crates/file_icons",
"crates/fs",
"crates/fsevent",
"crates/fuzzy",
@@ -72,7 +70,6 @@ members = [
"crates/task",
"crates/tasks_ui",
"crates/search",
"crates/semantic_version",
"crates/settings",
"crates/snippet",
"crates/sqlez",
@@ -80,7 +77,6 @@ members = [
"crates/story",
"crates/storybook",
"crates/sum_tree",
"crates/tab_switcher",
"crates/terminal",
"crates/terminal_view",
"crates/text",
@@ -100,18 +96,12 @@ members = [
"crates/zed_actions",
"extensions/astro",
"extensions/clojure",
"extensions/csharp",
"extensions/erlang",
"extensions/gleam",
"extensions/haskell",
"extensions/php",
"extensions/prisma",
"extensions/purescript",
"extensions/svelte",
"extensions/toml",
"extensions/uiua",
"extensions/zig",
"tooling/xtask",
]
@@ -121,7 +111,6 @@ resolver = "2"
[workspace.dependencies]
activity_indicator = { path = "crates/activity_indicator" }
ai = { path = "crates/ai" }
anthropic = { path = "crates/anthropic" }
assets = { path = "crates/assets" }
assistant = { path = "crates/assistant" }
audio = { path = "crates/audio" }
@@ -149,7 +138,6 @@ extensions_ui = { path = "crates/extensions_ui" }
feature_flags = { path = "crates/feature_flags" }
feedback = { path = "crates/feedback" }
file_finder = { path = "crates/file_finder" }
file_icons = { path = "crates/file_icons" }
fs = { path = "crates/fs" }
fsevent = { path = "crates/fsevent" }
fuzzy = { path = "crates/fuzzy" }
@@ -193,7 +181,6 @@ rpc = { path = "crates/rpc" }
task = { path = "crates/task" }
tasks_ui = { path = "crates/tasks_ui" }
search = { path = "crates/search" }
semantic_version = { path = "crates/semantic_version" }
settings = { path = "crates/settings" }
snippet = { path = "crates/snippet" }
sqlez = { path = "crates/sqlez" }
@@ -201,7 +188,6 @@ sqlez_macros = { path = "crates/sqlez_macros" }
story = { path = "crates/story" }
storybook = { path = "crates/storybook" }
sum_tree = { path = "crates/sum_tree" }
tab_switcher = { path = "crates/tab_switcher" }
terminal = { path = "crates/terminal" }
terminal_view = { path = "crates/terminal_view" }
text = { path = "crates/text" }
@@ -291,8 +277,6 @@ tempfile = "3.9.0"
thiserror = "1.0.29"
tiktoken-rs = "0.5.7"
time = { version = "0.3", features = [
"macros",
"parsing",
"serde",
"serde-well-known",
"formatting",
@@ -303,12 +287,15 @@ tower-http = "0.4.4"
tree-sitter = { version = "0.20", features = ["wasm"] }
tree-sitter-bash = { git = "https://github.com/tree-sitter/tree-sitter-bash", rev = "7331995b19b8f8aba2d5e26deb51d2195c18bc94" }
tree-sitter-c = "0.20.1"
tree-sitter-clojure = { git = "https://github.com/prcastro/tree-sitter-clojure", branch = "update-ts" }
tree-sitter-c-sharp = { git = "https://github.com/tree-sitter/tree-sitter-c-sharp", rev = "dd5e59721a5f8dae34604060833902b882023aaf" }
tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev = "f44509141e7e483323d2ec178f2d2e6c0fc041c1" }
tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" }
tree-sitter-dart = { git = "https://github.com/agent3bood/tree-sitter-dart", rev = "48934e3bf757a9b78f17bdfaa3e2b4284656fdc7" }
tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "a2861e88a730287a60c11ea9299c033c7d076e30" }
tree-sitter-elm = { git = "https://github.com/elm-tooling/tree-sitter-elm", rev = "692c50c0b961364c40299e73c1306aecb5d20f40" }
tree-sitter-embedded-template = "0.20.0"
tree-sitter-erlang = "0.4.0"
tree-sitter-glsl = { git = "https://github.com/theHamsta/tree-sitter-glsl", rev = "2a56fb7bc8bb03a1892b4741279dd0a8758b7fb3" }
tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "aeb2f33b366fd78d5789ff104956ce23508b85db" }
tree-sitter-gomod = { git = "https://github.com/camdencheek/tree-sitter-go-mod" }
@@ -324,6 +311,7 @@ tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown",
tree-sitter-nix = { git = "https://github.com/nix-community/tree-sitter-nix", rev = "66e3e9ce9180ae08fc57372061006ef83f0abde7" }
tree-sitter-nu = { git = "https://github.com/nushell/tree-sitter-nu", rev = "7dd29f9616822e5fc259f5b4ae6c4ded9a71a132" }
tree-sitter-ocaml = { git = "https://github.com/tree-sitter/tree-sitter-ocaml", rev = "4abfdc1c7af2c6c77a370aee974627be1c285b3b" }
tree-sitter-php = "0.21.1"
tree-sitter-proto = { git = "https://github.com/rewinfrey/tree-sitter-proto", rev = "36d54f288aee112f13a67b550ad32634d0c2cb52" }
tree-sitter-python = "0.20.2"
tree-sitter-racket = { git = "https://github.com/zed-industries/tree-sitter-racket", rev = "eb010cf2c674c6fd9a6316a84e28ef90190fe51a" }
@@ -331,9 +319,11 @@ tree-sitter-regex = "0.20.0"
tree-sitter-ruby = "0.20.0"
tree-sitter-rust = "0.20.3"
tree-sitter-scheme = { git = "https://github.com/6cdh/tree-sitter-scheme", rev = "af0fd1fa452cb2562dc7b5c8a8c55551c39273b9" }
tree-sitter-toml = { git = "https://github.com/tree-sitter/tree-sitter-toml", rev = "342d9be207c2dba869b9967124c679b5e6fd0ebe" }
tree-sitter-typescript = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "5d20856f34315b068c41edaee2ac8a100081d259" }
tree-sitter-vue = { git = "https://github.com/zed-industries/tree-sitter-vue", rev = "6608d9d60c386f19d80af7d8132322fa11199c42" }
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "f545a41f57502e1b5ddf2a6668896c1b0620f930" }
tree-sitter-zig = { git = "https://github.com/maxxnino/tree-sitter-zig", rev = "0d08703e4c3f426ec61695d7617415fff97029bd" }
unindent = "0.1.7"
unicase = "2.6"
url = "2.2"
@@ -394,7 +384,6 @@ debug = "limited"
[profile.dev.package]
taffy = { opt-level = 3 }
cranelift-codegen = { opt-level = 3 }
resvg = { opt-level = 3 }
rustybuzz = { opt-level = 3 }
ttf-parser = { opt-level = 3 }
wasmtime-cranelift = { opt-level = 3 }

View File

@@ -16,9 +16,7 @@
"escape": "menu::Cancel",
"ctrl-escape": "menu::Cancel",
"ctrl-c": "menu::Cancel",
"shift-enter": "picker::UseSelectedQuery",
"alt-enter": ["picker::ConfirmInput", { "secondary": false }],
"ctrl-alt-enter": ["picker::ConfirmInput", { "secondary": true }],
"shift-enter": "menu::UseSelectedQuery",
"ctrl-shift-w": "workspace::CloseWindow",
"shift-escape": "workspace::ToggleZoom",
"ctrl-o": "workspace::Open",
@@ -138,8 +136,7 @@
// ],
"ctrl-alt-space": "editor::ShowCharacterPalette",
"ctrl-;": "editor::ToggleLineNumbers",
"ctrl-k ctrl-r": "editor::RevertSelectedHunks",
"ctrl-alt-g b": "editor::ToggleGitBlame"
"ctrl-k ctrl-r": "editor::RevertSelectedHunks"
}
},
{
@@ -219,7 +216,7 @@
"context": "BufferSearchBar && in_replace",
"bindings": {
"enter": "search::ReplaceNext",
"ctrl-enter": "search::ReplaceAll"
"cmd-enter": "search::ReplaceAll"
}
},
{
@@ -258,7 +255,7 @@
"bindings": {
"escape": "project_search::ToggleFocus",
"alt-tab": "search::CycleMode",
"ctrl-shift-h": "search::ToggleReplace",
"cmd-shift-h": "search::ToggleReplace",
"alt-ctrl-g": "search::ActivateRegexMode",
"alt-ctrl-x": "search::ActivateTextMode"
}
@@ -266,7 +263,9 @@
{
"context": "Pane",
"bindings": {
"ctrl-shift-tab": "pane::ActivatePrevItem",
"ctrl-pageup": "pane::ActivatePrevItem",
"ctrl-tab": "pane::ActivateNextItem",
"ctrl-pagedown": "pane::ActivateNextItem",
"ctrl-w": "pane::CloseActiveItem",
"alt-ctrl-t": "pane::CloseInactiveItems",
@@ -304,10 +303,8 @@
}
],
"ctrl-alt-shift-down": "editor::DuplicateLine",
"ctrl-shift-left": "editor::SelectToPreviousWordStart",
"ctrl-shift-right": "editor::SelectToNextWordEnd",
"ctrl-shift-up": "editor::SelectLargerSyntaxNode", //todo(linux) tmp keybinding
"ctrl-shift-down": "editor::SelectSmallerSyntaxNode", //todo(linux) tmp keybinding
"ctrl-shift-right": "editor::SelectLargerSyntaxNode",
"ctrl-shift-left": "editor::SelectSmallerSyntaxNode",
"ctrl-d": [
"editor::SelectNext",
{
@@ -356,14 +353,14 @@
"ctrl-shift-]": "editor::UnfoldLines",
"ctrl-space": "editor::ShowCompletions",
"ctrl-.": "editor::ToggleCodeActions",
"alt-ctrl-r": "editor::RevealInFinder",
"alt-cmd-r": "editor::RevealInFinder",
"ctrl-alt-shift-c": "editor::DisplayCursorNames"
}
},
{
"context": "Editor && mode == full",
"bindings": {
"ctrl-shift-o": "outline::Toggle",
"cmd-shift-o": "outline::Toggle",
"ctrl-g": "go_to_line::Toggle"
}
},
@@ -421,10 +418,8 @@
"ctrl-shift-f": "pane::DeploySearch",
"ctrl-k ctrl-s": "zed::OpenKeymap",
"ctrl-k ctrl-t": "theme_selector::Toggle",
"ctrl-shift-t": "project_symbols::Toggle",
"ctrl-t": "project_symbols::Toggle",
"ctrl-p": "file_finder::Toggle",
"ctrl-tab": "tab_switcher::Toggle",
"ctrl-shift-tab": ["tab_switcher::Toggle", { "select_last": true }],
"ctrl-e": "file_finder::Toggle",
"ctrl-shift-p": "command_palette::Toggle",
"ctrl-shift-m": "diagnostics::Deploy",
@@ -449,8 +444,6 @@
{
"context": "Editor",
"bindings": {
"ctrl-shift-k": "editor::DeleteLine",
"ctrl-shift-d": "editor::DuplicateLineDown",
"ctrl-j": "editor::JoinLines",
"ctrl-alt-backspace": "editor::DeleteToPreviousSubwordStart",
"ctrl-alt-h": "editor::DeleteToPreviousSubwordStart",
@@ -551,7 +544,7 @@
"delete": "project_panel::Delete",
"ctrl-backspace": ["project_panel::Delete", { "skip_prompt": true }],
"ctrl-delete": ["project_panel::Delete", { "skip_prompt": true }],
"alt-ctrl-r": "project_panel::RevealInFinder",
"alt-cmd-r": "project_panel::RevealInFinder",
"alt-shift-f": "project_panel::NewSearchInDirectory"
}
},
@@ -596,10 +589,6 @@
"context": "FileFinder",
"bindings": { "ctrl-shift-p": "file_finder::SelectPrev" }
},
{
"context": "TabSwitcher",
"bindings": { "ctrl-shift-tab": "menu::SelectPrev" }
},
{
"context": "Terminal",
"bindings": {
@@ -612,12 +601,7 @@
"pagedown": ["terminal::SendKeystroke", "pagedown"],
"escape": ["terminal::SendKeystroke", "escape"],
"enter": ["terminal::SendKeystroke", "enter"],
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"],
// Some nice conveniences
"ctrl-backspace": ["terminal::SendText", "\u0015"],
"ctrl-right": ["terminal::SendText", "\u0005"],
"ctrl-left": ["terminal::SendText", "\u0001"]
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"]
}
}
]

View File

@@ -17,11 +17,8 @@
"cmd-enter": "menu::SecondaryConfirm",
"escape": "menu::Cancel",
"cmd-escape": "menu::Cancel",
"ctrl-escape": "menu::Cancel",
"ctrl-c": "menu::Cancel",
"shift-enter": "picker::UseSelectedQuery",
"alt-enter": ["picker::ConfirmInput", { "secondary": false }],
"cmd-alt-enter": ["picker::ConfirmInput", { "secondary": true }],
"shift-enter": "menu::UseSelectedQuery",
"cmd-shift-w": "workspace::CloseWindow",
"shift-escape": "workspace::ToggleZoom",
"cmd-o": "workspace::Open",
@@ -158,8 +155,7 @@
],
"ctrl-cmd-space": "editor::ShowCharacterPalette",
"cmd-;": "editor::ToggleLineNumbers",
"cmd-alt-z": "editor::RevertSelectedHunks",
"cmd-alt-g b": "editor::ToggleGitBlame"
"cmd-alt-z": "editor::RevertSelectedHunks"
}
},
{
@@ -321,8 +317,13 @@
"cmd-shift-k": "editor::DeleteLine",
"alt-up": "editor::MoveLineUp",
"alt-down": "editor::MoveLineDown",
"alt-shift-up": "editor::DuplicateLineUp",
"alt-shift-down": "editor::DuplicateLineDown",
"alt-shift-up": [
"editor::DuplicateLine",
{
"move_upwards": true
}
],
"alt-shift-down": "editor::DuplicateLine",
"ctrl-shift-right": "editor::SelectLargerSyntaxNode",
"ctrl-shift-left": "editor::SelectSmallerSyntaxNode",
"cmd-d": [
@@ -440,8 +441,6 @@
"cmd-k cmd-t": "theme_selector::Toggle",
"cmd-t": "project_symbols::Toggle",
"cmd-p": "file_finder::Toggle",
"ctrl-tab": "tab_switcher::Toggle",
"ctrl-shift-tab": ["tab_switcher::Toggle", { "select_last": true }],
"cmd-shift-p": "command_palette::Toggle",
"cmd-shift-m": "diagnostics::Deploy",
"cmd-shift-e": "project_panel::ToggleFocus",
@@ -604,10 +603,6 @@
"context": "FileFinder",
"bindings": { "cmd-shift-p": "file_finder::SelectPrev" }
},
{
"context": "TabSwitcher",
"bindings": { "ctrl-shift-tab": "menu::SelectPrev" }
},
{
"context": "Terminal",
"bindings": {

View File

@@ -11,7 +11,7 @@
"ctrl->": "zed::IncreaseBufferFontSize",
"ctrl-<": "zed::DecreaseBufferFontSize",
"ctrl-shift-j": "editor::JoinLines",
"cmd-d": "editor::DuplicateLineDown",
"cmd-d": "editor::DuplicateLine",
"cmd-backspace": "editor::DeleteLine",
"cmd-pagedown": "editor::MovePageDown",
"cmd-pageup": "editor::MovePageUp",

View File

@@ -9,7 +9,7 @@
"context": "Editor",
"bindings": {
"cmd-l": "go_to_line::Toggle",
"ctrl-shift-d": "editor::DuplicateLineDown",
"ctrl-shift-d": "editor::DuplicateLine",
"cmd-b": "editor::GoToDefinition",
"cmd-j": "editor::ScrollCursorCenter",
"cmd-enter": "editor::NewlineBelow",

View File

@@ -48,8 +48,7 @@
// which gives the same size as all other panes.
"active_pane_magnification": 1.0,
// The key to use for adding multiple cursors
// Currently "alt" or "cmd_or_ctrl" (also aliased as
// "cmd" and "ctrl") are supported.
// Currently "alt" or "cmd" are supported.
"multi_cursor_modifier": "alt",
// Whether to enable vim modes and key bindings
"vim_mode": false,
@@ -561,9 +560,6 @@
"source.organizeImports": true
}
},
"Make": {
"hard_tabs": true
},
"Markdown": {
"tab_size": 2,
"soft_wrap": "preferred_line_length"

View File

@@ -111,7 +111,7 @@
"hint": "#618399ff",
"hint.background": "#12231fff",
"hint.border": "#183934ff",
"ignored": "#6b6b73ff",
"ignored": "#aca8aeff",
"ignored.background": "#262933ff",
"ignored.border": "#2b2f38ff",
"info": "#10a793ff",

View File

@@ -111,7 +111,7 @@
"hint": "#706897ff",
"hint.background": "#161a35ff",
"hint.border": "#222953ff",
"ignored": "#756f7eff",
"ignored": "#898591ff",
"ignored.background": "#3a353fff",
"ignored.border": "#56505eff",
"info": "#566ddaff",
@@ -495,7 +495,7 @@
"hint": "#776d9dff",
"hint.background": "#e1e0f9ff",
"hint.border": "#c8c7f2ff",
"ignored": "#6e6876ff",
"ignored": "#5a5462ff",
"ignored.background": "#bfbcc5ff",
"ignored.border": "#8f8b96ff",
"info": "#586cdaff",
@@ -879,7 +879,7 @@
"hint": "#b17272ff",
"hint.background": "#171e38ff",
"hint.border": "#262f56ff",
"ignored": "#8f8b77ff",
"ignored": "#a4a08bff",
"ignored.background": "#45433bff",
"ignored.border": "#6c695cff",
"info": "#6684e0ff",
@@ -1263,7 +1263,7 @@
"hint": "#b37979ff",
"hint.background": "#e3e5faff",
"hint.border": "#cdd1f5ff",
"ignored": "#878471ff",
"ignored": "#706d5fff",
"ignored.background": "#cecab4ff",
"ignored.border": "#a8a48eff",
"info": "#6684dfff",
@@ -1647,7 +1647,7 @@
"hint": "#6f815aff",
"hint.background": "#142319ff",
"hint.border": "#1c3927ff",
"ignored": "#7d7c6aff",
"ignored": "#91907fff",
"ignored.background": "#424136ff",
"ignored.border": "#5d5c4cff",
"info": "#36a165ff",
@@ -2031,7 +2031,7 @@
"hint": "#758961ff",
"hint.background": "#d9ecdfff",
"hint.border": "#bbddc6ff",
"ignored": "#767463ff",
"ignored": "#61604fff",
"ignored.background": "#c5c4b9ff",
"ignored.border": "#969585ff",
"info": "#37a165ff",
@@ -2415,7 +2415,7 @@
"hint": "#a77087ff",
"hint.background": "#0f1c3dff",
"hint.border": "#182d5bff",
"ignored": "#8e8683ff",
"ignored": "#a79f9dff",
"ignored.background": "#443c39ff",
"ignored.border": "#665f5cff",
"info": "#407ee6ff",
@@ -2799,7 +2799,7 @@
"hint": "#a67287ff",
"hint.background": "#dfe3fbff",
"hint.border": "#c6cef7ff",
"ignored": "#837b78ff",
"ignored": "#6a6360ff",
"ignored.background": "#ccc7c5ff",
"ignored.border": "#aaa3a1ff",
"info": "#407ee6ff",
@@ -3183,7 +3183,7 @@
"hint": "#8d70a8ff",
"hint.background": "#0d1a43ff",
"hint.border": "#192961ff",
"ignored": "#908190ff",
"ignored": "#a899a8ff",
"ignored.background": "#433a43ff",
"ignored.border": "#675b67ff",
"info": "#5169ebff",
@@ -3567,7 +3567,7 @@
"hint": "#8c70a6ff",
"hint.background": "#e2dffcff",
"hint.border": "#cac7faff",
"ignored": "#857785ff",
"ignored": "#6b5e6bff",
"ignored.background": "#c6b8c6ff",
"ignored.border": "#ad9dadff",
"info": "#5169ebff",
@@ -3951,7 +3951,7 @@
"hint": "#52809aff",
"hint.background": "#121c24ff",
"hint.border": "#1a2f3cff",
"ignored": "#688c9dff",
"ignored": "#7c9fb3ff",
"ignored.background": "#33444dff",
"ignored.border": "#4f6a78ff",
"info": "#267eadff",
@@ -4335,7 +4335,7 @@
"hint": "#5a87a0ff",
"hint.background": "#d8e4eeff",
"hint.border": "#b9cee0ff",
"ignored": "#628496ff",
"ignored": "#526f7dff",
"ignored.background": "#a6cadcff",
"ignored.border": "#80a4b6ff",
"info": "#267eadff",
@@ -4719,7 +4719,7 @@
"hint": "#8a647aff",
"hint.background": "#1c1b29ff",
"hint.border": "#2c2b45ff",
"ignored": "#756e6eff",
"ignored": "#898383ff",
"ignored.background": "#3b3535ff",
"ignored.border": "#564e4eff",
"info": "#7272caff",
@@ -5103,7 +5103,7 @@
"hint": "#91697fff",
"hint.background": "#e4e1f5ff",
"hint.border": "#cecaecff",
"ignored": "#6e6666ff",
"ignored": "#5a5252ff",
"ignored.background": "#c1bbbbff",
"ignored.border": "#8e8989ff",
"info": "#7272caff",
@@ -5487,7 +5487,7 @@
"hint": "#607e76ff",
"hint.background": "#151e20ff",
"hint.border": "#1f3233ff",
"ignored": "#6f7e74ff",
"ignored": "#859188ff",
"ignored.background": "#353f39ff",
"ignored.border": "#505e55ff",
"info": "#468b8fff",
@@ -5871,7 +5871,7 @@
"hint": "#66847cff",
"hint.background": "#dae7e8ff",
"hint.border": "#bed4d6ff",
"ignored": "#68766dff",
"ignored": "#546259ff",
"ignored.background": "#bcc5bfff",
"ignored.border": "#8b968eff",
"info": "#488b90ff",
@@ -6255,7 +6255,7 @@
"hint": "#008b9fff",
"hint.background": "#051949ff",
"hint.border": "#102667ff",
"ignored": "#778f77ff",
"ignored": "#8ba48bff",
"ignored.background": "#3b453bff",
"ignored.border": "#5c6c5cff",
"info": "#3e62f4ff",
@@ -6639,7 +6639,7 @@
"hint": "#008fa1ff",
"hint.background": "#e1ddfeff",
"hint.border": "#c9c4fdff",
"ignored": "#718771ff",
"ignored": "#5f705fff",
"ignored.background": "#b4ceb4ff",
"ignored.border": "#8ea88eff",
"info": "#3e61f4ff",
@@ -7023,7 +7023,7 @@
"hint": "#6c81a5ff",
"hint.background": "#161f2bff",
"hint.border": "#203348ff",
"ignored": "#7e849eff",
"ignored": "#959bb2ff",
"ignored.background": "#3e4769ff",
"ignored.border": "#5b6385ff",
"info": "#3e8ed0ff",
@@ -7407,7 +7407,7 @@
"hint": "#7087b2ff",
"hint.background": "#dde7f6ff",
"hint.border": "#c2d5efff",
"ignored": "#767d9aff",
"ignored": "#5f6789ff",
"ignored.background": "#c1c5d8ff",
"ignored.border": "#9a9fb6ff",
"info": "#3e8fd0ff",

View File

@@ -111,7 +111,7 @@
"hint": "#628b80ff",
"hint.background": "#0d2f4eff",
"hint.border": "#1b4a6eff",
"ignored": "#696a6aff",
"ignored": "#8a8986ff",
"ignored.background": "#313337ff",
"ignored.border": "#3f4043ff",
"info": "#5ac1feff",
@@ -480,7 +480,7 @@
"hint": "#8ca7c2ff",
"hint.background": "#deebfaff",
"hint.border": "#c4daf6ff",
"ignored": "#a9acaeff",
"ignored": "#8b8e92ff",
"ignored.background": "#dcdddeff",
"ignored.border": "#cfd1d2ff",
"info": "#3b9ee5ff",
@@ -849,7 +849,7 @@
"hint": "#7399a3ff",
"hint.background": "#123950ff",
"hint.border": "#24556fff",
"ignored": "#7b7d7fff",
"ignored": "#9a9a98ff",
"ignored.background": "#464a52ff",
"ignored.border": "#53565dff",
"info": "#72cffeff",

View File

@@ -111,7 +111,7 @@
"hint": "#8c957dff",
"hint.background": "#1e2321ff",
"hint.border": "#303a36ff",
"ignored": "#998b78ff",
"ignored": "#c5b597ff",
"ignored.background": "#4c4642ff",
"ignored.border": "#5b534dff",
"info": "#83a598ff",
@@ -485,7 +485,7 @@
"hint": "#6a695bff",
"hint.background": "#1e2321ff",
"hint.border": "#303a36ff",
"ignored": "#998b78ff",
"ignored": "#c5b597ff",
"ignored.background": "#4c4642ff",
"ignored.border": "#5b534dff",
"info": "#83a598ff",
@@ -859,7 +859,7 @@
"hint": "#8c957dff",
"hint.background": "#1e2321ff",
"hint.border": "#303a36ff",
"ignored": "#998b78ff",
"ignored": "#c5b597ff",
"ignored.background": "#4c4642ff",
"ignored.border": "#5b534dff",
"info": "#83a598ff",
@@ -1233,7 +1233,7 @@
"hint": "#677562ff",
"hint.background": "#d2dee2ff",
"hint.border": "#adc5ccff",
"ignored": "#897b6eff",
"ignored": "#5f5650ff",
"ignored.background": "#d9c8a4ff",
"ignored.border": "#c8b899ff",
"info": "#0b6678ff",
@@ -1607,7 +1607,7 @@
"hint": "#677562ff",
"hint.background": "#d2dee2ff",
"hint.border": "#adc5ccff",
"ignored": "#897b6eff",
"ignored": "#5f5650ff",
"ignored.background": "#d9c8a4ff",
"ignored.border": "#c8b899ff",
"info": "#0b6678ff",
@@ -1981,7 +1981,7 @@
"hint": "#677562ff",
"hint.background": "#d2dee2ff",
"hint.border": "#adc5ccff",
"ignored": "#897b6eff",
"ignored": "#5f5650ff",
"ignored.background": "#d9c8a4ff",
"ignored.border": "#c8b899ff",
"info": "#0b6678ff",

View File

@@ -111,7 +111,7 @@
"hint": "#5a6f89ff",
"hint.background": "#18243dff",
"hint.border": "#293b5bff",
"ignored": "#555a63ff",
"ignored": "#838994ff",
"ignored.background": "#3b414dff",
"ignored.border": "#464b57ff",
"info": "#74ade8ff",
@@ -485,7 +485,7 @@
"hint": "#9294beff",
"hint.background": "#e2e2faff",
"hint.border": "#cbcdf6ff",
"ignored": "#a1a1a3ff",
"ignored": "#7e8087ff",
"ignored.background": "#dcdcddff",
"ignored.border": "#c9c9caff",
"info": "#5c78e2ff",

View File

@@ -111,7 +111,7 @@
"hint": "#5e768cff",
"hint.background": "#2f3639ff",
"hint.border": "#435255ff",
"ignored": "#2f2b43ff",
"ignored": "#74708dff",
"ignored.background": "#292738ff",
"ignored.border": "#423f55ff",
"info": "#9bced6ff",
@@ -490,7 +490,7 @@
"hint": "#7a92aaff",
"hint.background": "#dde9ebff",
"hint.border": "#c3d7dbff",
"ignored": "#938fa3ff",
"ignored": "#706c8cff",
"ignored.background": "#dcd8d8ff",
"ignored.border": "#dcd6d5ff",
"info": "#57949fff",
@@ -869,7 +869,7 @@
"hint": "#728aa2ff",
"hint.background": "#2f3639ff",
"hint.border": "#435255ff",
"ignored": "#605d7aff",
"ignored": "#85819eff",
"ignored.background": "#38354eff",
"ignored.border": "#504c68ff",
"info": "#9bced6ff",

View File

@@ -111,7 +111,7 @@
"hint": "#727d68ff",
"hint.background": "#171e1eff",
"hint.border": "#223131ff",
"ignored": "#827568ff",
"ignored": "#a69782ff",
"ignored.background": "#333944ff",
"ignored.border": "#3d4350ff",
"info": "#518b8bff",

View File

@@ -111,7 +111,7 @@
"hint": "#4f8297ff",
"hint.background": "#141f2cff",
"hint.border": "#1b3149ff",
"ignored": "#6f8389ff",
"ignored": "#93a1a1ff",
"ignored.background": "#073743ff",
"ignored.border": "#2b4e58ff",
"info": "#278ad1ff",
@@ -480,7 +480,7 @@
"hint": "#5789a3ff",
"hint.background": "#dbe6f6ff",
"hint.border": "#bfd3efff",
"ignored": "#6a7f86ff",
"ignored": "#34555eff",
"ignored.background": "#cfd0c4ff",
"ignored.border": "#9faaa8ff",
"info": "#288bd1ff",

View File

@@ -111,7 +111,7 @@
"hint": "#246e61ff",
"hint.background": "#0e2242ff",
"hint.border": "#193760ff",
"ignored": "#4c4735ff",
"ignored": "#736e55ff",
"ignored.background": "#2a261cff",
"ignored.border": "#302c21ff",
"info": "#499befff",

View File

@@ -16,7 +16,6 @@ doctest = false
anyhow.workspace = true
auto_update.workspace = true
editor.workspace = true
extension.workspace = true
futures.workspace = true
gpui.workspace = true
language.workspace = true

View File

@@ -1,6 +1,5 @@
use auto_update::{AutoUpdateStatus, AutoUpdater, DismissErrorMessage};
use editor::Editor;
use extension::ExtensionStore;
use futures::StreamExt;
use gpui::{
actions, svg, AppContext, CursorStyle, EventEmitter, InteractiveElement as _, Model,
@@ -289,18 +288,6 @@ impl ActivityIndicator {
};
}
if let Some(extension_store) =
ExtensionStore::try_global(cx).map(|extension_store| extension_store.read(cx))
{
if let Some(extension_id) = extension_store.outstanding_operations().keys().next() {
return Content {
icon: Some(DOWNLOAD_ICON),
message: format!("Updating {extension_id} extension…"),
on_click: None,
};
}
}
Default::default()
}
}

View File

@@ -1,22 +0,0 @@
[package]
name = "anthropic"
version = "0.1.0"
edition = "2021"
publish = false
license = "AGPL-3.0-or-later"
[lib]
path = "src/anthropic.rs"
[dependencies]
anyhow.workspace = true
futures.workspace = true
serde.workspace = true
serde_json.workspace = true
util.workspace = true
[dev-dependencies]
tokio.workspace = true
[lints]
workspace = true

View File

@@ -1 +0,0 @@
../../LICENSE-AGPL

View File

@@ -1,234 +0,0 @@
use anyhow::{anyhow, Result};
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, StreamExt};
use serde::{Deserialize, Serialize};
use std::convert::TryFrom;
use util::http::{AsyncBody, HttpClient, Method, Request as HttpRequest};
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
pub enum Model {
#[default]
#[serde(rename = "claude-3-opus-20240229")]
Claude3Opus,
#[serde(rename = "claude-3-sonnet-20240229")]
Claude3Sonnet,
#[serde(rename = "claude-3-haiku-20240307")]
Claude3Haiku,
}
impl Model {
pub fn from_id(id: &str) -> Result<Self> {
if id.starts_with("claude-3-opus") {
Ok(Self::Claude3Opus)
} else if id.starts_with("claude-3-sonnet") {
Ok(Self::Claude3Sonnet)
} else if id.starts_with("claude-3-haiku") {
Ok(Self::Claude3Haiku)
} else {
Err(anyhow!("Invalid model id: {}", id))
}
}
pub fn display_name(&self) -> &'static str {
match self {
Self::Claude3Opus => "Claude 3 Opus",
Self::Claude3Sonnet => "Claude 3 Sonnet",
Self::Claude3Haiku => "Claude 3 Haiku",
}
}
pub fn max_token_count(&self) -> usize {
200_000
}
}
#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum Role {
User,
Assistant,
}
impl TryFrom<String> for Role {
type Error = anyhow::Error;
fn try_from(value: String) -> Result<Self> {
match value.as_str() {
"user" => Ok(Self::User),
"assistant" => Ok(Self::Assistant),
_ => Err(anyhow!("invalid role '{value}'")),
}
}
}
impl From<Role> for String {
fn from(val: Role) -> Self {
match val {
Role::User => "user".to_owned(),
Role::Assistant => "assistant".to_owned(),
}
}
}
#[derive(Debug, Serialize)]
pub struct Request {
pub model: Model,
pub messages: Vec<RequestMessage>,
pub stream: bool,
pub system: String,
pub max_tokens: u32,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct RequestMessage {
pub role: Role,
pub content: String,
}
#[derive(Deserialize, Debug)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum ResponseEvent {
MessageStart {
message: ResponseMessage,
},
ContentBlockStart {
index: u32,
content_block: ContentBlock,
},
Ping {},
ContentBlockDelta {
index: u32,
delta: TextDelta,
},
ContentBlockStop {
index: u32,
},
MessageDelta {
delta: ResponseMessage,
usage: Usage,
},
MessageStop {},
}
#[derive(Deserialize, Debug)]
pub struct ResponseMessage {
#[serde(rename = "type")]
pub message_type: Option<String>,
pub id: Option<String>,
pub role: Option<String>,
pub content: Option<Vec<String>>,
pub model: Option<String>,
pub stop_reason: Option<String>,
pub stop_sequence: Option<String>,
pub usage: Option<Usage>,
}
#[derive(Deserialize, Debug)]
pub struct Usage {
pub input_tokens: Option<u32>,
pub output_tokens: Option<u32>,
}
#[derive(Deserialize, Debug)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum ContentBlock {
Text { text: String },
}
#[derive(Deserialize, Debug)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum TextDelta {
TextDelta { text: String },
}
pub async fn stream_completion(
client: &dyn HttpClient,
api_url: &str,
api_key: &str,
request: Request,
) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
let uri = format!("{api_url}/v1/messages");
let request = HttpRequest::builder()
.method(Method::POST)
.uri(uri)
.header("Anthropic-Version", "2023-06-01")
.header("Anthropic-Beta", "messages-2023-12-15")
.header("X-Api-Key", api_key)
.header("Content-Type", "application/json")
.body(AsyncBody::from(serde_json::to_string(&request)?))?;
let mut response = client.send(request).await?;
if response.status().is_success() {
let reader = BufReader::new(response.into_body());
Ok(reader
.lines()
.filter_map(|line| async move {
match line {
Ok(line) => {
let line = line.strip_prefix("data: ")?;
match serde_json::from_str(line) {
Ok(response) => Some(Ok(response)),
Err(error) => Some(Err(anyhow!(error))),
}
}
Err(error) => Some(Err(anyhow!(error))),
}
})
.boxed())
} else {
let mut body = Vec::new();
response.body_mut().read_to_end(&mut body).await?;
let body_str = std::str::from_utf8(&body)?;
match serde_json::from_str::<ResponseEvent>(body_str) {
Ok(_) => Err(anyhow!(
"Unexpected success response while expecting an error: {}",
body_str,
)),
Err(_) => Err(anyhow!(
"Failed to connect to API: {} {}",
response.status(),
body_str,
)),
}
}
}
// #[cfg(test)]
// mod tests {
// use super::*;
// use util::http::IsahcHttpClient;
// #[tokio::test]
// async fn stream_completion_success() {
// let http_client = IsahcHttpClient::new().unwrap();
// let request = Request {
// model: Model::Claude3Opus,
// messages: vec![RequestMessage {
// role: Role::User,
// content: "Ping".to_string(),
// }],
// stream: true,
// system: "Respond to ping with pong".to_string(),
// max_tokens: 4096,
// };
// let stream = stream_completion(
// &http_client,
// "https://api.anthropic.com",
// &std::env::var("ANTHROPIC_API_KEY").expect("ANTHROPIC_API_KEY not set"),
// request,
// )
// .await
// .unwrap();
// stream
// .for_each(|event| async {
// match event {
// Ok(event) => println!("{:?}", event),
// Err(e) => eprintln!("Error: {:?}", e),
// }
// })
// .await;
// }
// }

View File

@@ -16,7 +16,6 @@ client.workspace = true
collections.workspace = true
command_palette_hooks.workspace = true
editor.workspace = true
file_icons.workspace = true
fs.workspace = true
futures.workspace = true
gpui.workspace = true

View File

@@ -6,8 +6,6 @@ mod prompts;
mod saved_conversation;
mod streaming_diff;
mod embedded_scope;
pub use assistant_panel::AssistantPanel;
use assistant_settings::{AssistantSettings, OpenAiModel, ZedDotDevModel};
use chrono::{DateTime, Local};

View File

@@ -1,14 +1,13 @@
use crate::{
assistant_settings::{AssistantDockPosition, AssistantSettings, ZedDotDevModel},
codegen::{self, Codegen, CodegenKind},
embedded_scope::EmbeddedScope,
prompts::generate_content_prompt,
Assist, CompletionProvider, CycleMessageRole, InlineAssist, LanguageModel,
LanguageModelRequest, LanguageModelRequestMessage, MessageId, MessageMetadata, MessageStatus,
NewConversation, QuoteSelection, ResetKey, Role, SavedConversation, SavedConversationMetadata,
SavedMessage, Split, ToggleFocus, ToggleIncludeConversation,
};
use anyhow::{anyhow, Result};
use anyhow::Result;
use chrono::{DateTime, Local};
use collections::{hash_map, HashMap, HashSet, VecDeque};
use editor::{
@@ -17,10 +16,9 @@ use editor::{
BlockContext, BlockDisposition, BlockId, BlockProperties, BlockStyle, ToDisplayPoint,
},
scroll::{Autoscroll, AutoscrollStrategy},
Anchor, Editor, EditorElement, EditorEvent, EditorStyle, MultiBuffer, MultiBufferSnapshot,
ToOffset as _, ToPoint,
Anchor, Editor, EditorElement, EditorEvent, EditorStyle, MultiBufferSnapshot, ToOffset as _,
ToPoint,
};
use file_icons::FileIcons;
use fs::Fs;
use futures::StreamExt;
use gpui::{
@@ -49,7 +47,7 @@ use uuid::Uuid;
use workspace::{
dock::{DockPosition, Panel, PanelEvent},
searchable::Direction,
Event as WorkspaceEvent, Save, Toast, ToggleZoom, Toolbar, Workspace,
Save, Toast, ToggleZoom, Toolbar, Workspace,
};
pub fn init(cx: &mut AppContext) {
@@ -162,11 +160,6 @@ impl AssistantPanel {
];
let model = CompletionProvider::global(cx).default_model();
cx.observe_global::<FileIcons>(|_, cx| {
cx.notify();
})
.detach();
Self {
workspace: workspace_handle,
active_conversation_editor: None,
@@ -716,20 +709,18 @@ impl AssistantPanel {
});
}
fn new_conversation(&mut self, cx: &mut ViewContext<Self>) -> Option<View<ConversationEditor>> {
let workspace = self.workspace.upgrade()?;
fn new_conversation(&mut self, cx: &mut ViewContext<Self>) -> View<ConversationEditor> {
let editor = cx.new_view(|cx| {
ConversationEditor::new(
self.model.clone(),
self.languages.clone(),
self.fs.clone(),
workspace,
self.workspace.clone(),
cx,
)
});
self.show_conversation(editor.clone(), cx);
Some(editor)
editor
}
fn show_conversation(
@@ -768,18 +759,15 @@ impl AssistantPanel {
open_ai::Model::FourTurbo => open_ai::Model::ThreePointFiveTurbo,
}),
LanguageModel::ZedDotDev(model) => LanguageModel::ZedDotDev(match &model {
ZedDotDevModel::Gpt3Point5Turbo => ZedDotDevModel::Gpt4,
ZedDotDevModel::Gpt4 => ZedDotDevModel::Gpt4Turbo,
ZedDotDevModel::Gpt4Turbo => ZedDotDevModel::Claude3Opus,
ZedDotDevModel::Claude3Opus => ZedDotDevModel::Claude3Sonnet,
ZedDotDevModel::Claude3Sonnet => ZedDotDevModel::Claude3Haiku,
ZedDotDevModel::Claude3Haiku => {
ZedDotDevModel::GptThreePointFiveTurbo => ZedDotDevModel::GptFour,
ZedDotDevModel::GptFour => ZedDotDevModel::GptFourTurbo,
ZedDotDevModel::GptFourTurbo => {
match CompletionProvider::global(cx).default_model() {
LanguageModel::ZedDotDev(custom) => custom,
_ => ZedDotDevModel::Gpt3Point5Turbo,
_ => ZedDotDevModel::GptThreePointFiveTurbo,
}
}
ZedDotDevModel::Custom(_) => ZedDotDevModel::Gpt3Point5Turbo,
ZedDotDevModel::Custom(_) => ZedDotDevModel::GptThreePointFiveTurbo,
}),
};
@@ -1001,15 +989,11 @@ impl AssistantPanel {
.await?;
this.update(&mut cx, |this, cx| {
let workspace = workspace
.upgrade()
.ok_or_else(|| anyhow!("workspace dropped"))?;
let editor = cx.new_view(|cx| {
ConversationEditor::for_conversation(conversation, fs, workspace, cx)
});
this.show_conversation(editor, cx);
anyhow::Ok(())
})??;
})?;
Ok(())
})
}
@@ -1280,10 +1264,9 @@ struct Summary {
done: bool,
}
pub struct Conversation {
struct Conversation {
id: Option<String>,
buffer: Model<Buffer>,
embedded_scope: EmbeddedScope,
message_anchors: Vec<MessageAnchor>,
messages_metadata: HashMap<MessageId, MessageMetadata>,
next_message_id: MessageId,
@@ -1305,7 +1288,6 @@ impl Conversation {
fn new(
model: LanguageModel,
language_registry: Arc<LanguageRegistry>,
embedded_scope: EmbeddedScope,
cx: &mut ModelContext<Self>,
) -> Self {
let markdown = language_registry.language_for_name("Markdown");
@@ -1339,9 +1321,7 @@ impl Conversation {
pending_save: Task::ready(Ok(())),
path: None,
buffer,
embedded_scope,
};
let message = MessageAnchor {
id: MessageId(post_inc(&mut this.next_message_id.0)),
start: language::Anchor::MIN,
@@ -1442,7 +1422,6 @@ impl Conversation {
pending_save: Task::ready(Ok(())),
path: Some(path),
buffer,
embedded_scope: EmbeddedScope::new(),
};
this.count_remaining_tokens(cx);
this
@@ -1461,7 +1440,7 @@ impl Conversation {
}
}
pub(crate) fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
let request = self.to_completion_request(cx);
self.pending_token_count = cx.spawn(|this, mut cx| {
async move {
@@ -1624,7 +1603,7 @@ impl Conversation {
}
fn to_completion_request(&self, cx: &mut ModelContext<Conversation>) -> LanguageModelRequest {
let mut request = LanguageModelRequest {
let request = LanguageModelRequest {
model: self.model.clone(),
messages: self
.messages(cx)
@@ -1634,9 +1613,6 @@ impl Conversation {
stop: vec![],
temperature: 1.0,
};
let context_message = self.embedded_scope.message(cx);
request.messages.extend(context_message);
request
}
@@ -2026,18 +2002,17 @@ impl ConversationEditor {
model: LanguageModel,
language_registry: Arc<LanguageRegistry>,
fs: Arc<dyn Fs>,
workspace: View<Workspace>,
workspace: WeakView<Workspace>,
cx: &mut ViewContext<Self>,
) -> Self {
let conversation = cx
.new_model(|cx| Conversation::new(model, language_registry, EmbeddedScope::new(), cx));
let conversation = cx.new_model(|cx| Conversation::new(model, language_registry, cx));
Self::for_conversation(conversation, fs, workspace, cx)
}
fn for_conversation(
conversation: Model<Conversation>,
fs: Arc<dyn Fs>,
workspace: View<Workspace>,
workspace: WeakView<Workspace>,
cx: &mut ViewContext<Self>,
) -> Self {
let editor = cx.new_view(|cx| {
@@ -2052,7 +2027,6 @@ impl ConversationEditor {
cx.observe(&conversation, |_, _, cx| cx.notify()),
cx.subscribe(&conversation, Self::handle_conversation_event),
cx.subscribe(&editor, Self::handle_editor_event),
cx.subscribe(&workspace, Self::handle_workspace_event),
];
let mut this = Self {
@@ -2061,10 +2035,9 @@ impl ConversationEditor {
blocks: Default::default(),
scroll_position: None,
fs,
workspace: workspace.downgrade(),
workspace,
_subscriptions,
};
this.update_active_buffer(workspace, cx);
this.update_message_headers(cx);
this
}
@@ -2198,37 +2171,6 @@ impl ConversationEditor {
}
}
fn handle_workspace_event(
&mut self,
workspace: View<Workspace>,
event: &WorkspaceEvent,
cx: &mut ViewContext<Self>,
) {
if let WorkspaceEvent::ActiveItemChanged = event {
self.update_active_buffer(workspace, cx);
}
}
fn update_active_buffer(
&mut self,
workspace: View<Workspace>,
cx: &mut ViewContext<'_, ConversationEditor>,
) {
let active_buffer = workspace
.read(cx)
.active_item(cx)
.and_then(|item| Some(item.act_as::<Editor>(cx)?.read(cx).buffer().clone()));
self.conversation.update(cx, |conversation, cx| {
conversation
.embedded_scope
.set_active_buffer(active_buffer.clone(), cx);
conversation.count_remaining_tokens(cx);
cx.notify();
});
}
fn cursor_scroll_position(&self, cx: &mut ViewContext<Self>) -> Option<ScrollPosition> {
self.editor.update(cx, |editor, cx| {
let snapshot = editor.snapshot(cx);
@@ -2362,11 +2304,11 @@ impl ConversationEditor {
let start_language = buffer.language_at(range.start);
let end_language = buffer.language_at(range.end);
let language_name = if start_language == end_language {
start_language.map(|language| language.code_fence_block_name())
start_language.map(|language| language.name())
} else {
None
};
let language_name = language_name.as_deref().unwrap_or("");
let language_name = language_name.as_deref().unwrap_or("").to_lowercase();
let selected_text = buffer.text_for_range(range).collect::<String>();
let text = if selected_text.is_empty() {
@@ -2390,17 +2332,15 @@ impl ConversationEditor {
if let Some(text) = text {
panel.update(cx, |panel, cx| {
if let Some(conversation) = panel
let conversation = panel
.active_conversation_editor()
.cloned()
.or_else(|| panel.new_conversation(cx))
{
conversation.update(cx, |conversation, cx| {
conversation
.editor
.update(cx, |editor, cx| editor.insert(&text, cx))
});
};
.unwrap_or_else(|| panel.new_conversation(cx));
conversation.update(cx, |conversation, cx| {
conversation
.editor
.update(cx, |editor, cx| editor.insert(&text, cx))
});
});
}
}
@@ -2465,120 +2405,12 @@ impl ConversationEditor {
.map(|summary| summary.text.clone())
.unwrap_or_else(|| "New Conversation".into())
}
fn render_embedded_scope(&self, cx: &mut ViewContext<Self>) -> Option<impl Element> {
let active_buffer = self
.conversation
.read(cx)
.embedded_scope
.active_buffer()?
.clone();
Some(
div()
.p_4()
.v_flex()
.child(
div()
.h_flex()
.items_center()
.child(Icon::new(IconName::File))
.child(
div()
.h_6()
.child(Label::new("File Contexts"))
.ml_1()
.font_weight(FontWeight::SEMIBOLD),
),
)
.child(
div()
.ml_4()
.child(self.render_active_buffer(active_buffer, cx)),
),
)
}
fn render_active_buffer(
&self,
buffer: Model<MultiBuffer>,
cx: &mut ViewContext<Self>,
) -> impl Element {
let buffer = buffer.read(cx);
let icon_path;
let path;
if let Some(singleton) = buffer.as_singleton() {
let singleton = singleton.read(cx);
path = singleton.file().map(|file| file.full_path(cx));
icon_path = path
.as_ref()
.and_then(|path| FileIcons::get_icon(path.as_path(), cx))
.map(SharedString::from)
.unwrap_or_else(|| SharedString::from("icons/file_icons/file.svg"));
} else {
icon_path = SharedString::from("icons/file_icons/file.svg");
path = None;
}
let file_name = path.map_or("Untitled".to_string(), |path| {
path.to_string_lossy().to_string()
});
let enabled = self
.conversation
.read(cx)
.embedded_scope
.active_buffer_enabled();
let file_name_text_color = if enabled {
Color::Default
} else {
Color::Disabled
};
div()
.id("active-buffer")
.h_flex()
.cursor_pointer()
.child(Icon::from_path(icon_path).color(file_name_text_color))
.child(
div()
.h_6()
.child(Label::new(file_name).color(file_name_text_color))
.ml_1(),
)
.children(enabled.then(|| {
div()
.child(Icon::new(IconName::Check).color(file_name_text_color))
.ml_1()
}))
.on_click(cx.listener(move |this, _, cx| {
this.conversation.update(cx, |conversation, cx| {
conversation
.embedded_scope
.set_active_buffer_enabled(!enabled);
cx.notify();
})
}))
}
}
impl EventEmitter<ConversationEditorEvent> for ConversationEditor {}
impl Render for ConversationEditor {
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl Element {
//
// The ConversationEditor has two main segments
//
// 1. Messages Editor
// 2. Context
// - File Context (currently only the active file)
// - Project Diagnostics (Planned)
// - Deep Code Context (Planned, for query and other tools for the model)
//
div()
.key_context("ConversationEditor")
.capture_action(cx.listener(ConversationEditor::cancel_last_assist))
@@ -2588,15 +2420,14 @@ impl Render for ConversationEditor {
.on_action(cx.listener(ConversationEditor::assist))
.on_action(cx.listener(ConversationEditor::split))
.size_full()
.v_flex()
.relative()
.child(
div()
.flex_grow()
.size_full()
.pl_4()
.bg(cx.theme().colors().editor_background)
.child(self.editor.clone()),
)
.child(div().flex_shrink().children(self.render_embedded_scope(cx)))
}
}
@@ -2968,9 +2799,8 @@ mod tests {
init(cx);
let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone()));
let conversation = cx.new_model(|cx| {
Conversation::new(LanguageModel::default(), registry, EmbeddedScope::new(), cx)
});
let conversation =
cx.new_model(|cx| Conversation::new(LanguageModel::default(), registry, cx));
let buffer = conversation.read(cx).buffer.clone();
let message_1 = conversation.read(cx).message_anchors[0].clone();
@@ -3101,9 +2931,8 @@ mod tests {
init(cx);
let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone()));
let conversation = cx.new_model(|cx| {
Conversation::new(LanguageModel::default(), registry, EmbeddedScope::new(), cx)
});
let conversation =
cx.new_model(|cx| Conversation::new(LanguageModel::default(), registry, cx));
let buffer = conversation.read(cx).buffer.clone();
let message_1 = conversation.read(cx).message_anchors[0].clone();
@@ -3201,9 +3030,8 @@ mod tests {
cx.set_global(settings_store);
init(cx);
let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone()));
let conversation = cx.new_model(|cx| {
Conversation::new(LanguageModel::default(), registry, EmbeddedScope::new(), cx)
});
let conversation =
cx.new_model(|cx| Conversation::new(LanguageModel::default(), registry, cx));
let buffer = conversation.read(cx).buffer.clone();
let message_1 = conversation.read(cx).message_anchors[0].clone();
@@ -3287,14 +3115,8 @@ mod tests {
cx.set_global(CompletionProvider::Fake(FakeCompletionProvider::default()));
cx.update(init);
let registry = Arc::new(LanguageRegistry::test(cx.executor()));
let conversation = cx.new_model(|cx| {
Conversation::new(
LanguageModel::default(),
registry.clone(),
EmbeddedScope::new(),
cx,
)
});
let conversation =
cx.new_model(|cx| Conversation::new(LanguageModel::default(), registry.clone(), cx));
let buffer = conversation.read_with(cx, |conversation, _| conversation.buffer.clone());
let message_0 =
conversation.read_with(cx, |conversation, _| conversation.message_anchors[0].id);

View File

@@ -14,13 +14,10 @@ use settings::Settings;
#[derive(Clone, Debug, Default, PartialEq)]
pub enum ZedDotDevModel {
Gpt3Point5Turbo,
Gpt4,
GptThreePointFiveTurbo,
GptFour,
#[default]
Gpt4Turbo,
Claude3Opus,
Claude3Sonnet,
Claude3Haiku,
GptFourTurbo,
Custom(String),
}
@@ -52,9 +49,9 @@ impl<'de> Deserialize<'de> for ZedDotDevModel {
E: de::Error,
{
match value {
"gpt-3.5-turbo" => Ok(ZedDotDevModel::Gpt3Point5Turbo),
"gpt-4" => Ok(ZedDotDevModel::Gpt4),
"gpt-4-turbo-preview" => Ok(ZedDotDevModel::Gpt4Turbo),
"gpt-3.5-turbo" => Ok(ZedDotDevModel::GptThreePointFiveTurbo),
"gpt-4" => Ok(ZedDotDevModel::GptFour),
"gpt-4-turbo-preview" => Ok(ZedDotDevModel::GptFourTurbo),
_ => Ok(ZedDotDevModel::Custom(value.to_owned())),
}
}
@@ -97,34 +94,27 @@ impl JsonSchema for ZedDotDevModel {
impl ZedDotDevModel {
pub fn id(&self) -> &str {
match self {
Self::Gpt3Point5Turbo => "gpt-3.5-turbo",
Self::Gpt4 => "gpt-4",
Self::Gpt4Turbo => "gpt-4-turbo-preview",
Self::Claude3Opus => "claude-3-opus",
Self::Claude3Sonnet => "claude-3-sonnet",
Self::Claude3Haiku => "claude-3-haiku",
Self::GptThreePointFiveTurbo => "gpt-3.5-turbo",
Self::GptFour => "gpt-4",
Self::GptFourTurbo => "gpt-4-turbo-preview",
Self::Custom(id) => id,
}
}
pub fn display_name(&self) -> &str {
match self {
Self::Gpt3Point5Turbo => "GPT 3.5 Turbo",
Self::Gpt4 => "GPT 4",
Self::Gpt4Turbo => "GPT 4 Turbo",
Self::Claude3Opus => "Claude 3 Opus",
Self::Claude3Sonnet => "Claude 3 Sonnet",
Self::Claude3Haiku => "Claude 3 Haiku",
Self::GptThreePointFiveTurbo => "gpt-3.5-turbo",
Self::GptFour => "gpt-4",
Self::GptFourTurbo => "gpt-4-turbo",
Self::Custom(id) => id.as_str(),
}
}
pub fn max_token_count(&self) -> usize {
match self {
Self::Gpt3Point5Turbo => 2048,
Self::Gpt4 => 4096,
Self::Gpt4Turbo => 128000,
Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3Haiku => 200000,
Self::GptThreePointFiveTurbo => 2048,
Self::GptFour => 4096,
Self::GptFourTurbo => 128000,
Self::Custom(_) => 4096, // TODO: Make this configurable
}
}

View File

@@ -1,5 +1,5 @@
use crate::{
assistant_settings::ZedDotDevModel, count_open_ai_tokens, CompletionProvider, LanguageModel,
assistant_settings::ZedDotDevModel, count_open_ai_tokens, CompletionProvider,
LanguageModelRequest,
};
use anyhow::{anyhow, Result};
@@ -78,21 +78,13 @@ impl ZedDotDevCompletionProvider {
cx: &AppContext,
) -> BoxFuture<'static, Result<usize>> {
match request.model {
LanguageModel::OpenAi(_) => future::ready(Err(anyhow!("invalid model"))).boxed(),
LanguageModel::ZedDotDev(ZedDotDevModel::Gpt4)
| LanguageModel::ZedDotDev(ZedDotDevModel::Gpt4Turbo)
| LanguageModel::ZedDotDev(ZedDotDevModel::Gpt3Point5Turbo) => {
crate::LanguageModel::OpenAi(_) => future::ready(Err(anyhow!("invalid model"))).boxed(),
crate::LanguageModel::ZedDotDev(ZedDotDevModel::GptFour)
| crate::LanguageModel::ZedDotDev(ZedDotDevModel::GptFourTurbo)
| crate::LanguageModel::ZedDotDev(ZedDotDevModel::GptThreePointFiveTurbo) => {
count_open_ai_tokens(request, cx.background_executor())
}
LanguageModel::ZedDotDev(
ZedDotDevModel::Claude3Opus
| ZedDotDevModel::Claude3Sonnet
| ZedDotDevModel::Claude3Haiku,
) => {
// Can't find a tokenizer for Claude 3, so for now just use the same as OpenAI's as an approximation.
count_open_ai_tokens(request, cx.background_executor())
}
LanguageModel::ZedDotDev(ZedDotDevModel::Custom(model)) => {
crate::LanguageModel::ZedDotDev(ZedDotDevModel::Custom(model)) => {
let request = self.client.request(proto::CountTokensWithLanguageModel {
model,
messages: request

View File

@@ -1,91 +0,0 @@
use editor::MultiBuffer;
use gpui::{AppContext, Model, ModelContext, Subscription};
use crate::{assistant_panel::Conversation, LanguageModelRequestMessage, Role};
#[derive(Default)]
pub struct EmbeddedScope {
active_buffer: Option<Model<MultiBuffer>>,
active_buffer_enabled: bool,
active_buffer_subscription: Option<Subscription>,
}
impl EmbeddedScope {
pub fn new() -> Self {
Self {
active_buffer: None,
active_buffer_enabled: true,
active_buffer_subscription: None,
}
}
pub fn set_active_buffer(
&mut self,
buffer: Option<Model<MultiBuffer>>,
cx: &mut ModelContext<Conversation>,
) {
self.active_buffer_subscription.take();
if let Some(active_buffer) = buffer.clone() {
self.active_buffer_subscription =
Some(cx.subscribe(&active_buffer, |conversation, _, e, cx| {
if let multi_buffer::Event::Edited { .. } = e {
conversation.count_remaining_tokens(cx)
}
}));
}
self.active_buffer = buffer;
}
pub fn active_buffer(&self) -> Option<&Model<MultiBuffer>> {
self.active_buffer.as_ref()
}
pub fn active_buffer_enabled(&self) -> bool {
self.active_buffer_enabled
}
pub fn set_active_buffer_enabled(&mut self, enabled: bool) {
self.active_buffer_enabled = enabled;
}
/// Provide a message for the language model based on the active buffer.
pub fn message(&self, cx: &AppContext) -> Option<LanguageModelRequestMessage> {
if !self.active_buffer_enabled {
return None;
}
let active_buffer = self.active_buffer.as_ref()?;
let buffer = active_buffer.read(cx);
if let Some(singleton) = buffer.as_singleton() {
let singleton = singleton.read(cx);
let filename = singleton
.file()
.map(|file| file.path().to_string_lossy())
.unwrap_or("Untitled".into());
let text = singleton.text();
let language = singleton
.language()
.map(|l| {
let name = l.code_fence_block_name();
name.to_string()
})
.unwrap_or_default();
let markdown =
format!("User's active file `{filename}`:\n\n```{language}\n{text}```\n\n");
return Some(LanguageModelRequestMessage {
role: Role::System,
content: markdown,
});
}
None
}
}

View File

@@ -1,4 +1,4 @@
#![cfg_attr(any(target_os = "linux", target_os = "windows"), allow(dead_code))]
#![cfg_attr(target_os = "linux", allow(dead_code))]
use anyhow::{anyhow, Context, Result};
use clap::Parser;

View File

@@ -590,10 +590,7 @@ mod tests {
}
#[gpui::test]
async fn test_telemetry_flush_on_flush_interval(
executor: BackgroundExecutor,
cx: &mut TestAppContext,
) {
async fn test_connection_timeout(executor: BackgroundExecutor, cx: &mut TestAppContext) {
init_test(cx);
let clock = Arc::new(FakeSystemClock::new(
Utc.with_ymd_and_hms(1990, 4, 12, 12, 0, 0).unwrap(),

View File

@@ -18,7 +18,6 @@ sqlite = ["sea-orm/sqlx-sqlite", "sqlx/sqlite"]
test-support = ["sqlite"]
[dependencies]
anthropic.workspace = true
anyhow.workspace = true
async-tungstenite = "0.16"
aws-config = { version = "1.1.5" }
@@ -47,7 +46,6 @@ reqwest = { version = "0.11", features = ["json"] }
rpc.workspace = true
scrypt = "0.7"
sea-orm = { version = "0.12.x", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] }
semantic_version.workspace = true
semver.workspace = true
serde.workspace = true
serde_derive.workspace = true

View File

@@ -1,12 +0,0 @@
[Interface]
PrivateKey = B5Fp/yVfP0QYlb+YJv9ea+EMI1mWODPD3akh91cVjvc=
Address = fdaa:0:2ce3:a7b:bea:0:a:2/120
DNS = fdaa:0:2ce3::3
[Peer]
PublicKey = RKAYPljEJiuaELNDdQIEJmQienT9+LRISfIHwH45HAw=
AllowedIPs = fdaa:0:2ce3::/48
Endpoint = ord1.gateway.6pn.dev:51820
PersistentKeepalive = 15

View File

@@ -130,11 +130,6 @@ spec:
secretKeyRef:
name: openai
key: api_key
- name: ANTHROPIC_API_KEY
valueFrom:
secretKeyRef:
name: anthropic
key: api_key
- name: BLOB_STORE_ACCESS_KEY
valueFrom:
secretKeyRef:

View File

@@ -10,7 +10,6 @@ use axum::{
Extension, Router, TypedHeader,
};
use rpc::ExtensionMetadata;
use semantic_version::SemanticVersion;
use serde::{Serialize, Serializer};
use sha2::{Digest, Sha256};
use std::sync::{Arc, OnceLock};
@@ -18,6 +17,7 @@ use telemetry_events::{
ActionEvent, AppEvent, AssistantEvent, CallEvent, CopilotEvent, CpuEvent, EditEvent,
EditorEvent, Event, EventRequestBody, EventWrapper, ExtensionEvent, MemoryEvent, SettingEvent,
};
use util::SemanticVersion;
pub fn router() -> Router {
Router::new()
@@ -459,12 +459,6 @@ impl ToUpload {
}
insert.end().await?;
let event_count = rows.len();
log::info!(
"wrote {event_count} {event_specifier} to '{table}'",
event_specifier = if event_count == 1 { "event" } else { "events" }
);
}
Ok(())
@@ -528,9 +522,9 @@ impl EditorEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
os_name: body.os_name.clone(),
os_version: body.os_version.clone().unwrap_or_default(),
@@ -590,9 +584,9 @@ impl CopilotEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
os_name: body.os_name.clone(),
os_version: body.os_version.clone().unwrap_or_default(),
@@ -645,9 +639,9 @@ impl CallEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone().unwrap_or_default(),
session_id: body.session_id.clone(),
@@ -694,9 +688,9 @@ impl AssistantEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),
@@ -738,9 +732,9 @@ impl CpuEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),
@@ -785,9 +779,9 @@ impl MemoryEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),
@@ -831,9 +825,9 @@ impl AppEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),
@@ -876,9 +870,9 @@ impl SettingEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),
@@ -927,9 +921,9 @@ impl ExtensionEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),
@@ -991,9 +985,9 @@ impl EditEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),
@@ -1040,9 +1034,9 @@ impl ActionEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),

View File

@@ -1,4 +1,3 @@
use crate::db::ExtensionVersionConstraints;
use crate::{db::NewExtensionVersion, AppState, Error, Result};
use anyhow::{anyhow, Context as _};
use aws_sdk_s3::presigning::PresigningConfig;
@@ -11,17 +10,14 @@ use axum::{
};
use collections::HashMap;
use rpc::{ExtensionApiManifest, GetExtensionsResponse};
use semantic_version::SemanticVersion;
use serde::Deserialize;
use std::{sync::Arc, time::Duration};
use time::PrimitiveDateTime;
use util::{maybe, ResultExt};
use util::ResultExt;
pub fn router() -> Router {
Router::new()
.route("/extensions", get(get_extensions))
.route("/extensions/updates", get(get_extension_updates))
.route("/extensions/:extension_id", get(get_extension_versions))
.route(
"/extensions/:extension_id/download",
get(download_latest_extension),
@@ -36,103 +32,38 @@ pub fn router() -> Router {
struct GetExtensionsParams {
filter: Option<String>,
#[serde(default)]
ids: Option<String>,
#[serde(default)]
max_schema_version: i32,
}
#[derive(Debug, Deserialize)]
struct DownloadLatestExtensionParams {
extension_id: String,
}
#[derive(Debug, Deserialize)]
struct DownloadExtensionParams {
extension_id: String,
version: String,
}
async fn get_extensions(
Extension(app): Extension<Arc<AppState>>,
Query(params): Query<GetExtensionsParams>,
) -> Result<Json<GetExtensionsResponse>> {
let extension_ids = params
.ids
.as_ref()
.map(|s| s.split(',').map(|s| s.trim()).collect::<Vec<_>>());
let extensions = if let Some(extension_ids) = extension_ids {
app.db.get_extensions_by_ids(&extension_ids, None).await?
} else {
app.db
.get_extensions(params.filter.as_deref(), params.max_schema_version, 500)
.await?
};
Ok(Json(GetExtensionsResponse { data: extensions }))
}
#[derive(Debug, Deserialize)]
struct GetExtensionUpdatesParams {
ids: String,
min_schema_version: i32,
max_schema_version: i32,
min_wasm_api_version: SemanticVersion,
max_wasm_api_version: SemanticVersion,
}
async fn get_extension_updates(
Extension(app): Extension<Arc<AppState>>,
Query(params): Query<GetExtensionUpdatesParams>,
) -> Result<Json<GetExtensionsResponse>> {
let constraints = ExtensionVersionConstraints {
schema_versions: params.min_schema_version..=params.max_schema_version,
wasm_api_versions: params.min_wasm_api_version..=params.max_wasm_api_version,
};
let extension_ids = params.ids.split(',').map(|s| s.trim()).collect::<Vec<_>>();
let extensions = app
.db
.get_extensions_by_ids(&extension_ids, Some(&constraints))
.get_extensions(params.filter.as_deref(), params.max_schema_version, 500)
.await?;
Ok(Json(GetExtensionsResponse { data: extensions }))
}
#[derive(Debug, Deserialize)]
struct GetExtensionVersionsParams {
extension_id: String,
}
async fn get_extension_versions(
Extension(app): Extension<Arc<AppState>>,
Path(params): Path<GetExtensionVersionsParams>,
) -> Result<Json<GetExtensionsResponse>> {
let extension_versions = app.db.get_extension_versions(&params.extension_id).await?;
Ok(Json(GetExtensionsResponse {
data: extension_versions,
}))
}
#[derive(Debug, Deserialize)]
struct DownloadLatestExtensionParams {
extension_id: String,
min_schema_version: Option<i32>,
max_schema_version: Option<i32>,
min_wasm_api_version: Option<SemanticVersion>,
max_wasm_api_version: Option<SemanticVersion>,
}
async fn download_latest_extension(
Extension(app): Extension<Arc<AppState>>,
Path(params): Path<DownloadLatestExtensionParams>,
) -> Result<Redirect> {
let constraints = maybe!({
let min_schema_version = params.min_schema_version?;
let max_schema_version = params.max_schema_version?;
let min_wasm_api_version = params.min_wasm_api_version?;
let max_wasm_api_version = params.max_wasm_api_version?;
Some(ExtensionVersionConstraints {
schema_versions: min_schema_version..=max_schema_version,
wasm_api_versions: min_wasm_api_version..=max_wasm_api_version,
})
});
let extension = app
.db
.get_extension(&params.extension_id, constraints.as_ref())
.get_extension(&params.extension_id)
.await?
.ok_or_else(|| anyhow!("unknown extension"))?;
download_extension(
@@ -145,12 +76,6 @@ async fn download_latest_extension(
.await
}
#[derive(Debug, Deserialize)]
struct DownloadExtensionParams {
extension_id: String,
version: String,
}
async fn download_extension(
Extension(app): Extension<Arc<AppState>>,
Path(params): Path<DownloadExtensionParams>,

View File

@@ -1,8 +1,9 @@
use collections::HashMap;
use semantic_version::SemanticVersion;
use serde::{Deserialize, Serialize};
use serde_derive::Deserialize;
use serde_derive::Serialize;
use serde_json::Value;
use util::SemanticVersion;
#[derive(Debug)]
pub struct IpsFile {

View File

@@ -21,13 +21,11 @@ use sea_orm::{
FromQueryResult, IntoActiveModel, IsolationLevel, JoinType, QueryOrder, QuerySelect, Statement,
TransactionTrait,
};
use semantic_version::SemanticVersion;
use serde::{Deserialize, Serialize};
use serde::{ser::Error as _, Deserialize, Serialize, Serializer};
use sqlx::{
migrate::{Migrate, Migration, MigrationSource},
Connection,
};
use std::ops::RangeInclusive;
use std::{
fmt::Write as _,
future::Future,
@@ -38,7 +36,7 @@ use std::{
sync::Arc,
time::Duration,
};
use time::PrimitiveDateTime;
use time::{format_description::well_known::iso8601, PrimitiveDateTime};
use tokio::sync::{Mutex, OwnedMutexGuard};
#[cfg(test)]
@@ -732,7 +730,20 @@ pub struct NewExtensionVersion {
pub published_at: PrimitiveDateTime,
}
pub struct ExtensionVersionConstraints {
pub schema_versions: RangeInclusive<i32>,
pub wasm_api_versions: RangeInclusive<SemanticVersion>,
pub fn serialize_iso8601<S: Serializer>(
datetime: &PrimitiveDateTime,
serializer: S,
) -> Result<S::Ok, S::Error> {
const SERDE_CONFIG: iso8601::EncodedConfig = iso8601::Config::DEFAULT
.set_year_is_six_digits(false)
.set_time_precision(iso8601::TimePrecision::Second {
decimal_digits: None,
})
.encode();
datetime
.assume_utc()
.format(&time::format_description::well_known::Iso8601::<SERDE_CONFIG>)
.map_err(S::Error::custom)?
.serialize(serializer)
}

View File

@@ -1,8 +1,4 @@
use std::str::FromStr;
use chrono::Utc;
use sea_orm::sea_query::IntoCondition;
use util::ResultExt;
use super::*;
@@ -14,163 +10,53 @@ impl Database {
limit: usize,
) -> Result<Vec<ExtensionMetadata>> {
self.transaction(|tx| async move {
let mut condition = Condition::all()
.add(
extension::Column::LatestVersion
.into_expr()
.eq(extension_version::Column::Version.into_expr()),
)
.add(extension_version::Column::SchemaVersion.lte(max_schema_version));
let mut condition = Condition::all().add(
extension::Column::LatestVersion
.into_expr()
.eq(extension_version::Column::Version.into_expr()),
);
if let Some(filter) = filter {
let fuzzy_name_filter = Self::fuzzy_like_string(filter);
condition = condition.add(Expr::cust_with_expr("name ILIKE $1", fuzzy_name_filter));
}
self.get_extensions_where(condition, Some(limit as u64), &tx)
.await
})
.await
}
pub async fn get_extensions_by_ids(
&self,
ids: &[&str],
constraints: Option<&ExtensionVersionConstraints>,
) -> Result<Vec<ExtensionMetadata>> {
self.transaction(|tx| async move {
let extensions = extension::Entity::find()
.filter(extension::Column::ExternalId.is_in(ids.iter().copied()))
.inner_join(extension_version::Entity)
.select_also(extension_version::Entity)
.filter(condition)
.filter(extension_version::Column::SchemaVersion.lte(max_schema_version))
.order_by_desc(extension::Column::TotalDownloadCount)
.order_by_asc(extension::Column::Name)
.limit(Some(limit as u64))
.all(&*tx)
.await?;
let mut max_versions = self
.get_latest_versions_for_extensions(&extensions, constraints, &tx)
.await?;
Ok(extensions
.into_iter()
.filter_map(|extension| {
let (version, _) = max_versions.remove(&extension.id)?;
Some(metadata_from_extension_and_version(extension, version))
.filter_map(|(extension, version)| {
Some(metadata_from_extension_and_version(extension, version?))
})
.collect())
})
.await
}
async fn get_latest_versions_for_extensions(
&self,
extensions: &[extension::Model],
constraints: Option<&ExtensionVersionConstraints>,
tx: &DatabaseTransaction,
) -> Result<HashMap<ExtensionId, (extension_version::Model, SemanticVersion)>> {
let mut versions = extension_version::Entity::find()
.filter(
extension_version::Column::ExtensionId
.is_in(extensions.iter().map(|extension| extension.id)),
)
.stream(tx)
.await?;
let mut max_versions =
HashMap::<ExtensionId, (extension_version::Model, SemanticVersion)>::default();
while let Some(version) = versions.next().await {
let version = version?;
let Some(extension_version) = SemanticVersion::from_str(&version.version).log_err()
else {
continue;
};
if let Some((_, max_extension_version)) = &max_versions.get(&version.extension_id) {
if max_extension_version > &extension_version {
continue;
}
}
if let Some(constraints) = constraints {
if !constraints
.schema_versions
.contains(&version.schema_version)
{
continue;
}
if let Some(wasm_api_version) = version.wasm_api_version.as_ref() {
if let Some(version) = SemanticVersion::from_str(wasm_api_version).log_err() {
if !constraints.wasm_api_versions.contains(&version) {
continue;
}
} else {
continue;
}
}
}
max_versions.insert(version.extension_id, (version, extension_version));
}
Ok(max_versions)
}
/// Returns all of the versions for the extension with the given ID.
pub async fn get_extension_versions(
&self,
extension_id: &str,
) -> Result<Vec<ExtensionMetadata>> {
self.transaction(|tx| async move {
let condition = extension::Column::ExternalId
.eq(extension_id)
.into_condition();
self.get_extensions_where(condition, None, &tx).await
})
.await
}
async fn get_extensions_where(
&self,
condition: Condition,
limit: Option<u64>,
tx: &DatabaseTransaction,
) -> Result<Vec<ExtensionMetadata>> {
let extensions = extension::Entity::find()
.inner_join(extension_version::Entity)
.select_also(extension_version::Entity)
.filter(condition)
.order_by_desc(extension::Column::TotalDownloadCount)
.order_by_asc(extension::Column::Name)
.limit(limit)
.all(tx)
.await?;
Ok(extensions
.into_iter()
.filter_map(|(extension, version)| {
Some(metadata_from_extension_and_version(extension, version?))
})
.collect())
}
pub async fn get_extension(
&self,
extension_id: &str,
constraints: Option<&ExtensionVersionConstraints>,
) -> Result<Option<ExtensionMetadata>> {
pub async fn get_extension(&self, extension_id: &str) -> Result<Option<ExtensionMetadata>> {
self.transaction(|tx| async move {
let extension = extension::Entity::find()
.filter(extension::Column::ExternalId.eq(extension_id))
.filter(
extension::Column::LatestVersion
.into_expr()
.eq(extension_version::Column::Version.into_expr()),
)
.inner_join(extension_version::Entity)
.select_also(extension_version::Entity)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such extension: {extension_id}"))?;
let extensions = [extension];
let mut versions = self
.get_latest_versions_for_extensions(&extensions, constraints, &tx)
.await?;
let [extension] = extensions;
Ok(versions.remove(&extension.id).map(|(max_version, _)| {
metadata_from_extension_and_version(extension, max_version)
Ok(extension.and_then(|(extension, version)| {
Some(metadata_from_extension_and_version(extension, version?))
}))
})
.await

View File

@@ -1,5 +1,4 @@
use super::Database;
use crate::db::ExtensionVersionConstraints;
use crate::{
db::{queries::extensions::convert_time_to_chrono, ExtensionMetadata, NewExtensionVersion},
test_both_dbs,
@@ -279,108 +278,3 @@ async fn test_extensions(db: &Arc<Database>) {
]
);
}
test_both_dbs!(
test_extensions_by_id,
test_extensions_by_id_postgres,
test_extensions_by_id_sqlite
);
async fn test_extensions_by_id(db: &Arc<Database>) {
let versions = db.get_known_extension_versions().await.unwrap();
assert!(versions.is_empty());
let extensions = db.get_extensions(None, 1, 5).await.unwrap();
assert!(extensions.is_empty());
let t0 = time::OffsetDateTime::from_unix_timestamp_nanos(0).unwrap();
let t0 = time::PrimitiveDateTime::new(t0.date(), t0.time());
let t0_chrono = convert_time_to_chrono(t0);
db.insert_extension_versions(
&[
(
"ext1",
vec![
NewExtensionVersion {
name: "Extension 1".into(),
version: semver::Version::parse("0.0.1").unwrap(),
description: "an extension".into(),
authors: vec!["max".into()],
repository: "ext1/repo".into(),
schema_version: 1,
wasm_api_version: Some("0.0.4".into()),
published_at: t0,
},
NewExtensionVersion {
name: "Extension 1".into(),
version: semver::Version::parse("0.0.2").unwrap(),
description: "a good extension".into(),
authors: vec!["max".into()],
repository: "ext1/repo".into(),
schema_version: 1,
wasm_api_version: Some("0.0.4".into()),
published_at: t0,
},
NewExtensionVersion {
name: "Extension 1".into(),
version: semver::Version::parse("0.0.3").unwrap(),
description: "a real good extension".into(),
authors: vec!["max".into(), "marshall".into()],
repository: "ext1/repo".into(),
schema_version: 1,
wasm_api_version: Some("0.0.5".into()),
published_at: t0,
},
],
),
(
"ext2",
vec![NewExtensionVersion {
name: "Extension 2".into(),
version: semver::Version::parse("0.2.0").unwrap(),
description: "a great extension".into(),
authors: vec!["marshall".into()],
repository: "ext2/repo".into(),
schema_version: 0,
wasm_api_version: None,
published_at: t0,
}],
),
]
.into_iter()
.collect(),
)
.await
.unwrap();
let extensions = db
.get_extensions_by_ids(
&["ext1"],
Some(&ExtensionVersionConstraints {
schema_versions: 1..=1,
wasm_api_versions: "0.0.1".parse().unwrap()..="0.0.4".parse().unwrap(),
}),
)
.await
.unwrap();
assert_eq!(
extensions,
&[ExtensionMetadata {
id: "ext1".into(),
manifest: rpc::ExtensionApiManifest {
name: "Extension 1".into(),
version: "0.0.2".into(),
authors: vec!["max".into()],
description: Some("a good extension".into()),
repository: "ext1/repo".into(),
schema_version: Some(1),
wasm_api_version: Some("0.0.4".into()),
},
published_at: t0_chrono,
download_count: 0,
}]
);
}

View File

@@ -134,7 +134,6 @@ pub struct Config {
pub zed_environment: Arc<str>,
pub openai_api_key: Option<Arc<str>>,
pub google_ai_api_key: Option<Arc<str>>,
pub anthropic_api_key: Option<Arc<str>>,
pub zed_client_checksum_seed: Option<String>,
pub slack_panics_webhook: Option<String>,
pub auto_join_channel_id: Option<ChannelId>,

View File

@@ -137,38 +137,18 @@ async fn main() -> Result<()> {
);
#[cfg(unix)]
let signal = async move {
let mut sigterm = tokio::signal::unix::signal(SignalKind::terminate())
.expect("failed to listen for interrupt signal");
let mut sigint = tokio::signal::unix::signal(SignalKind::interrupt())
.expect("failed to listen for interrupt signal");
let sigterm = sigterm.recv();
let sigint = sigint.recv();
futures::pin_mut!(sigterm, sigint);
futures::future::select(sigterm, sigint).await;
};
#[cfg(windows)]
let signal = async move {
// todo(windows):
// `ctrl_close` does not work well, because tokio's signal handler always returns soon,
// but system termiates the application soon after returning CTRL+CLOSE handler.
// So we should implement blocking handler to treat CTRL+CLOSE signal.
let mut ctrl_break = tokio::signal::windows::ctrl_break()
.expect("failed to listen for interrupt signal");
let mut ctrl_c = tokio::signal::windows::ctrl_c()
.expect("failed to listen for interrupt signal");
let ctrl_break = ctrl_break.recv();
let ctrl_c = ctrl_c.recv();
futures::pin_mut!(ctrl_break, ctrl_c);
futures::future::select(ctrl_break, ctrl_c).await;
};
axum::Server::from_tcp(listener)
.map_err(|e| anyhow!(e))?
.serve(app.into_make_service_with_connect_info::<SocketAddr>())
.with_graceful_shutdown(async move {
signal.await;
let mut sigterm = tokio::signal::unix::signal(SignalKind::terminate())
.expect("failed to listen for interrupt signal");
let mut sigint = tokio::signal::unix::signal(SignalKind::interrupt())
.expect("failed to listen for interrupt signal");
let sigterm = sigterm.recv();
let sigint = sigint.recv();
futures::pin_mut!(sigterm, sigint);
futures::future::select(sigterm, sigint).await;
tracing::info!("Received interrupt signal");
if let Some(rpc_server) = rpc_server {
@@ -177,6 +157,10 @@ async fn main() -> Result<()> {
})
.await
.map_err(|e| anyhow!(e))?;
// todo("windows")
#[cfg(windows)]
unimplemented!();
}
_ => {
Err(anyhow!(

View File

@@ -46,7 +46,6 @@ use rpc::{
},
Connection, ConnectionId, ErrorCode, ErrorCodeExt, ErrorExt, Peer, Receipt, TypedEnvelope,
};
use semantic_version::SemanticVersion;
use serde::{Serialize, Serializer};
use std::{
any::TypeId,
@@ -69,7 +68,7 @@ use tracing::{
field::{self},
info_span, instrument, Instrument,
};
use util::http::IsahcHttpClient;
use util::{http::IsahcHttpClient, SemanticVersion};
pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30);
@@ -367,7 +366,6 @@ impl Server {
.add_request_handler(forward_mutating_project_request::<proto::ExpandProjectEntry>)
.add_request_handler(forward_mutating_project_request::<proto::OnTypeFormatting>)
.add_request_handler(forward_mutating_project_request::<proto::SaveBuffer>)
.add_request_handler(forward_mutating_project_request::<proto::BlameBuffer>)
.add_message_handler(create_buffer_for_peer)
.add_request_handler(update_buffer)
.add_message_handler(broadcast_project_message_from_host::<proto::RefreshInlayHints>)
@@ -419,7 +417,6 @@ impl Server {
session,
app_state.config.openai_api_key.clone(),
app_state.config.google_ai_api_key.clone(),
app_state.config.anthropic_api_key.clone(),
)
}
})
@@ -734,13 +731,7 @@ impl Server {
executor: Executor,
) -> impl Future<Output = ()> {
let this = self.clone();
let span = info_span!("handle connection", %address,
connection_id=field::Empty,
user_id=field::Empty,
login=field::Empty,
impersonator=field::Empty,
dev_server_id=field::Empty
);
let span = info_span!("handle connection", %address, impersonator = field::Empty, connection_id = field::Empty);
principal.update_span(&span);
let mut teardown = self.teardown.subscribe();
@@ -818,12 +809,7 @@ impl Server {
let type_name = message.payload_type_name();
// note: we copy all the fields from the parent span so we can query them in the logs.
// (https://github.com/tokio-rs/tracing/issues/2670).
let span = tracing::info_span!("receive message", %connection_id, %address, type_name,
user_id=field::Empty,
login=field::Empty,
impersonator=field::Empty,
dev_server_id=field::Empty
);
let span = tracing::info_span!("receive message", %connection_id, %address, type_name);
principal.update_span(&span);
let span_enter = span.enter();
if let Some(handler) = this.handlers.get(&message.payload_type_id()) {
@@ -3518,7 +3504,6 @@ async fn complete_with_language_model(
session: Session,
open_ai_api_key: Option<Arc<str>>,
google_ai_api_key: Option<Arc<str>>,
anthropic_api_key: Option<Arc<str>>,
) -> Result<()> {
let Some(session) = session.for_user() else {
return Err(anyhow!("user not found"))?;
@@ -3537,10 +3522,6 @@ async fn complete_with_language_model(
let api_key = google_ai_api_key
.ok_or_else(|| anyhow!("no Google AI API key configured on the server"))?;
complete_with_google_ai(request, response, session, api_key).await?;
} else if request.model.starts_with("claude") {
let api_key = anthropic_api_key
.ok_or_else(|| anyhow!("no Anthropic AI API key configured on the server"))?;
complete_with_anthropic(request, response, session, api_key).await?;
}
Ok(())
@@ -3638,121 +3619,6 @@ async fn complete_with_google_ai(
Ok(())
}
async fn complete_with_anthropic(
request: proto::CompleteWithLanguageModel,
response: StreamingResponse<proto::CompleteWithLanguageModel>,
session: UserSession,
api_key: Arc<str>,
) -> Result<()> {
let model = anthropic::Model::from_id(&request.model)?;
let mut system_message = String::new();
let messages = request
.messages
.into_iter()
.filter_map(|message| match message.role() {
LanguageModelRole::LanguageModelUser => Some(anthropic::RequestMessage {
role: anthropic::Role::User,
content: message.content,
}),
LanguageModelRole::LanguageModelAssistant => Some(anthropic::RequestMessage {
role: anthropic::Role::Assistant,
content: message.content,
}),
// Anthropic's API breaks system instructions out as a separate field rather
// than having a system message role.
LanguageModelRole::LanguageModelSystem => {
if !system_message.is_empty() {
system_message.push_str("\n\n");
}
system_message.push_str(&message.content);
None
}
})
.collect();
let mut stream = anthropic::stream_completion(
&session.http_client,
"https://api.anthropic.com",
&api_key,
anthropic::Request {
model,
messages,
stream: true,
system: system_message,
max_tokens: 4092,
},
)
.await?;
let mut current_role = proto::LanguageModelRole::LanguageModelAssistant;
while let Some(event) = stream.next().await {
let event = event?;
match event {
anthropic::ResponseEvent::MessageStart { message } => {
if let Some(role) = message.role {
if role == "assistant" {
current_role = proto::LanguageModelRole::LanguageModelAssistant;
} else if role == "user" {
current_role = proto::LanguageModelRole::LanguageModelUser;
}
}
}
anthropic::ResponseEvent::ContentBlockStart { content_block, .. } => {
match content_block {
anthropic::ContentBlock::Text { text } => {
if !text.is_empty() {
response.send(proto::LanguageModelResponse {
choices: vec![proto::LanguageModelChoiceDelta {
index: 0,
delta: Some(proto::LanguageModelResponseMessage {
role: Some(current_role as i32),
content: Some(text),
}),
finish_reason: None,
}],
})?;
}
}
}
}
anthropic::ResponseEvent::ContentBlockDelta { delta, .. } => match delta {
anthropic::TextDelta::TextDelta { text } => {
response.send(proto::LanguageModelResponse {
choices: vec![proto::LanguageModelChoiceDelta {
index: 0,
delta: Some(proto::LanguageModelResponseMessage {
role: Some(current_role as i32),
content: Some(text),
}),
finish_reason: None,
}],
})?;
}
},
anthropic::ResponseEvent::MessageDelta { delta, .. } => {
if let Some(stop_reason) = delta.stop_reason {
response.send(proto::LanguageModelResponse {
choices: vec![proto::LanguageModelChoiceDelta {
index: 0,
delta: None,
finish_reason: Some(stop_reason),
}],
})?;
}
}
anthropic::ResponseEvent::ContentBlockStop { .. } => {}
anthropic::ResponseEvent::MessageStop {} => {}
anthropic::ResponseEvent::Ping {} => {}
}
}
Ok(())
}
struct CountTokensWithLanguageModelRateLimit;
impl RateLimit for CountTokensWithLanguageModelRateLimit {

View File

@@ -2,10 +2,9 @@ use crate::db::{ChannelId, ChannelRole, UserId};
use anyhow::{anyhow, Result};
use collections::{BTreeMap, HashMap, HashSet};
use rpc::ConnectionId;
use semantic_version::SemanticVersion;
use serde::Serialize;
use std::fmt;
use tracing::instrument;
use util::{semver, SemanticVersion};
#[derive(Default, Serialize)]
pub struct ConnectionPool {
@@ -21,6 +20,7 @@ struct ConnectedUser {
#[derive(Debug, Serialize)]
pub struct ZedVersion(pub SemanticVersion);
use std::fmt;
impl fmt::Display for ZedVersion {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
@@ -30,7 +30,7 @@ impl fmt::Display for ZedVersion {
impl ZedVersion {
pub fn can_collaborate(&self) -> bool {
self.0 >= SemanticVersion::new(0, 127, 3)
self.0 >= semver(0, 127, 3)
}
}

View File

@@ -23,7 +23,6 @@ use rpc::RECEIVE_TIMEOUT;
use serde_json::json;
use settings::SettingsStore;
use std::{
ops::Range,
path::Path,
sync::{
atomic::{self, AtomicBool, AtomicUsize},
@@ -1987,187 +1986,6 @@ struct Row10;"#};
struct Row1220;"#});
}
#[gpui::test(iterations = 10)]
async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
let mut server = TestServer::start(cx_a.executor()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
server
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
.await;
let active_call_a = cx_a.read(ActiveCall::global);
cx_a.update(editor::init);
cx_b.update(editor::init);
client_a
.fs()
.insert_tree(
"/my-repo",
json!({
".git": {},
"file.txt": "line1\nline2\nline3\nline\n",
}),
)
.await;
let blame = git::blame::Blame {
entries: vec![
blame_entry("1b1b1b", 0..1),
blame_entry("0d0d0d", 1..2),
blame_entry("3a3a3a", 2..3),
blame_entry("4c4c4c", 3..4),
],
permalinks: [
("1b1b1b", "http://example.com/codehost/idx-0"),
("0d0d0d", "http://example.com/codehost/idx-1"),
("3a3a3a", "http://example.com/codehost/idx-2"),
("4c4c4c", "http://example.com/codehost/idx-3"),
]
.into_iter()
.map(|(sha, url)| (sha.parse().unwrap(), url.parse().unwrap()))
.collect(),
messages: [
("1b1b1b", "message for idx-0"),
("0d0d0d", "message for idx-1"),
("3a3a3a", "message for idx-2"),
("4c4c4c", "message for idx-3"),
]
.into_iter()
.map(|(sha, message)| (sha.parse().unwrap(), message.into()))
.collect(),
};
client_a.fs().set_blame_for_repo(
Path::new("/my-repo/.git"),
vec![(Path::new("file.txt"), blame)],
);
let (project_a, worktree_id) = client_a.build_local_project("/my-repo", cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
// Create editor_a
let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a);
let editor_a = workspace_a
.update(cx_a, |workspace, cx| {
workspace.open_path((worktree_id, "file.txt"), None, true, cx)
})
.await
.unwrap()
.downcast::<Editor>()
.unwrap();
// Join the project as client B.
let project_b = client_b.build_remote_project(project_id, cx_b).await;
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
let editor_b = workspace_b
.update(cx_b, |workspace, cx| {
workspace.open_path((worktree_id, "file.txt"), None, true, cx)
})
.await
.unwrap()
.downcast::<Editor>()
.unwrap();
// client_b now requests git blame for the open buffer
editor_b.update(cx_b, |editor_b, cx| {
assert!(editor_b.blame().is_none());
editor_b.toggle_git_blame(&editor::actions::ToggleGitBlame {}, cx);
});
cx_a.executor().run_until_parked();
cx_b.executor().run_until_parked();
editor_b.update(cx_b, |editor_b, cx| {
let blame = editor_b.blame().expect("editor_b should have blame now");
let entries = blame.update(cx, |blame, cx| {
blame
.blame_for_rows((0..4).map(Some), cx)
.collect::<Vec<_>>()
});
assert_eq!(
entries,
vec![
Some(blame_entry("1b1b1b", 0..1)),
Some(blame_entry("0d0d0d", 1..2)),
Some(blame_entry("3a3a3a", 2..3)),
Some(blame_entry("4c4c4c", 3..4)),
]
);
blame.update(cx, |blame, _| {
for (idx, entry) in entries.iter().flatten().enumerate() {
assert_eq!(
blame.permalink_for_entry(entry).unwrap().to_string(),
format!("http://example.com/codehost/idx-{}", idx)
);
assert_eq!(
blame.message_for_entry(entry).unwrap(),
format!("message for idx-{}", idx)
);
}
});
});
// editor_b updates the file, which gets sent to client_a, which updates git blame,
// which gets back to client_b.
editor_b.update(cx_b, |editor_b, cx| {
editor_b.edit([(Point::new(0, 3)..Point::new(0, 3), "FOO")], cx);
});
cx_a.executor().run_until_parked();
cx_b.executor().run_until_parked();
editor_b.update(cx_b, |editor_b, cx| {
let blame = editor_b.blame().expect("editor_b should have blame now");
let entries = blame.update(cx, |blame, cx| {
blame
.blame_for_rows((0..4).map(Some), cx)
.collect::<Vec<_>>()
});
assert_eq!(
entries,
vec![
None,
Some(blame_entry("0d0d0d", 1..2)),
Some(blame_entry("3a3a3a", 2..3)),
Some(blame_entry("4c4c4c", 3..4)),
]
);
});
// Now editor_a also updates the file
editor_a.update(cx_a, |editor_a, cx| {
editor_a.edit([(Point::new(1, 3)..Point::new(1, 3), "FOO")], cx);
});
cx_a.executor().run_until_parked();
cx_b.executor().run_until_parked();
editor_b.update(cx_b, |editor_b, cx| {
let blame = editor_b.blame().expect("editor_b should have blame now");
let entries = blame.update(cx, |blame, cx| {
blame
.blame_for_rows((0..4).map(Some), cx)
.collect::<Vec<_>>()
});
assert_eq!(
entries,
vec![
None,
None,
Some(blame_entry("3a3a3a", 2..3)),
Some(blame_entry("4c4c4c", 3..4)),
]
);
});
}
fn extract_hint_labels(editor: &Editor) -> Vec<String> {
let mut labels = Vec::new();
for hint in editor.inlay_hint_cache().hints() {
@@ -2178,11 +1996,3 @@ fn extract_hint_labels(editor: &Editor) -> Vec<String> {
}
labels
}
fn blame_entry(sha: &str, range: Range<u32>) -> git::blame::BlameEntry {
git::blame::BlameEntry {
sha: sha.parse().unwrap(),
range,
..Default::default()
}
}

View File

@@ -4978,15 +4978,11 @@ async fn test_lsp_hover(
},
);
let hovers = project_b
let hover_info = project_b
.update(cx_b, |p, cx| p.hover(&buffer_b, 22, cx))
.await;
assert_eq!(
hovers.len(),
1,
"Expected exactly one hover but got: {hovers:?}"
);
let hover_info = hovers.into_iter().next().unwrap();
.await
.unwrap()
.unwrap();
buffer_b.read_with(cx_b, |buffer, _| {
let snapshot = buffer.snapshot();

View File

@@ -832,7 +832,7 @@ impl RandomizedTest for ProjectCollaborationTest {
.boxed(),
LspRequestKind::CodeAction => project
.code_actions(&buffer, offset..offset, cx)
.map(|_| Ok(()))
.map_ok(|_| ())
.boxed(),
LspRequestKind::Definition => project
.definition(&buffer, offset, cx)

View File

@@ -19,6 +19,7 @@ use futures::{channel::oneshot, StreamExt as _};
use gpui::{BackgroundExecutor, Context, Model, Task, TestAppContext, View, VisualTestContext};
use language::LanguageRegistry;
use node_runtime::FakeNodeRuntime;
use notifications::NotificationStore;
use parking_lot::Mutex;
use project::{Project, WorktreeId};
@@ -26,7 +27,6 @@ use rpc::{
proto::{self, ChannelRole},
RECEIVE_TIMEOUT,
};
use semantic_version::SemanticVersion;
use serde_json::json;
use settings::SettingsStore;
use std::{
@@ -39,7 +39,7 @@ use std::{
Arc,
},
};
use util::http::FakeHttpClient;
use util::{http::FakeHttpClient, SemanticVersion};
use workspace::{Workspace, WorkspaceId, WorkspaceStore};
pub struct TestServer {
@@ -512,7 +512,6 @@ impl TestServer {
blob_store_bucket: None,
openai_api_key: None,
google_ai_api_key: None,
anthropic_api_key: None,
clickhouse_url: None,
clickhouse_user: None,
clickhouse_password: None,

View File

@@ -14,12 +14,12 @@ use db::kvp::KEY_VALUE_STORE;
use editor::{Editor, EditorElement, EditorStyle};
use fuzzy::{match_strings, StringMatchCandidate};
use gpui::{
actions, anchored, canvas, deferred, div, fill, list, point, prelude::*, px, AnyElement,
AppContext, AsyncWindowContext, Bounds, ClickEvent, ClipboardItem, DismissEvent, Div,
EventEmitter, FocusHandle, FocusableView, FontStyle, FontWeight, InteractiveElement,
IntoElement, ListOffset, ListState, Model, MouseDownEvent, ParentElement, Pixels, Point,
PromptLevel, Render, SharedString, Styled, Subscription, Task, TextStyle, View, ViewContext,
VisualContext, WeakView, WhiteSpace,
actions, canvas, div, fill, list, overlay, point, prelude::*, px, AnyElement, AppContext,
AsyncWindowContext, Bounds, ClickEvent, ClipboardItem, DismissEvent, Div, EventEmitter,
FocusHandle, FocusableView, FontStyle, FontWeight, InteractiveElement, IntoElement, ListOffset,
ListState, Model, MouseDownEvent, ParentElement, Pixels, Point, PromptLevel, Render,
SharedString, Styled, Subscription, Task, TextStyle, View, ViewContext, VisualContext,
WeakView, WhiteSpace,
};
use menu::{Cancel, Confirm, SecondaryConfirm, SelectNext, SelectPrev};
use project::{Fs, Project};
@@ -2767,13 +2767,10 @@ impl Render for CollabPanel {
self.render_signed_in(cx)
})
.children(self.context_menu.as_ref().map(|(menu, position, _)| {
deferred(
anchored()
.position(*position)
.anchor(gpui::AnchorCorner::TopLeft)
.child(menu.clone()),
)
.with_priority(1)
overlay()
.position(*position)
.anchor(gpui::AnchorCorner::TopLeft)
.child(menu.clone())
}))
}
}

View File

@@ -5,9 +5,9 @@ use client::{
};
use fuzzy::{match_strings, StringMatchCandidate};
use gpui::{
actions, anchored, deferred, div, AppContext, ClipboardItem, DismissEvent, EventEmitter,
FocusableView, Model, ParentElement, Render, Styled, Subscription, Task, View, ViewContext,
VisualContext, WeakView,
actions, div, overlay, AppContext, ClipboardItem, DismissEvent, EventEmitter, FocusableView,
Model, ParentElement, Render, Styled, Subscription, Task, View, ViewContext, VisualContext,
WeakView,
};
use picker::{Picker, PickerDelegate};
use std::sync::Arc;
@@ -409,12 +409,9 @@ impl PickerDelegate for ChannelModalDelegate {
.children(
if let (Some((menu, _)), true) = (&self.context_menu, selected) {
Some(
deferred(
anchored()
.anchor(gpui::AnchorCorner::TopRight)
.child(menu.clone()),
)
.with_priority(1),
overlay()
.anchor(gpui::AnchorCorner::TopRight)
.child(menu.clone()),
)
} else {
None

View File

@@ -13,8 +13,8 @@ use call::{report_call_event_for_room, ActiveCall};
pub use collab_panel::CollabPanel;
pub use collab_titlebar_item::CollabTitlebarItem;
use gpui::{
actions, point, AppContext, DevicePixels, Pixels, PlatformDisplay, Size, Task,
WindowBackgroundAppearance, WindowContext, WindowKind, WindowOptions,
actions, point, AppContext, DevicePixels, Pixels, PlatformDisplay, Size, Task, WindowContext,
WindowKind, WindowOptions,
};
use panel_settings::MessageEditorSettings;
pub use panel_settings::{
@@ -121,6 +121,5 @@ fn notification_window_options(
is_movable: false,
display_id: Some(screen.id()),
fullscreen: false,
window_background: WindowBackgroundAppearance::default(),
}
}

View File

@@ -10,6 +10,5 @@ pub type HashMap<K, V> = std::collections::HashMap<K, V>;
#[cfg(not(feature = "test-support"))]
pub type HashSet<T> = std::collections::HashSet<T>;
pub use rustc_hash::FxHasher;
pub use rustc_hash::{FxHashMap, FxHashSet};
pub use std::collections::*;

View File

@@ -376,7 +376,6 @@ impl Copilot {
use node_runtime::FakeNodeRuntime;
let (server, fake_server) = FakeLanguageServer::new(
LanguageServerId(0),
LanguageServerBinary {
path: "path/to/copilot".into(),
arguments: vec![],
@@ -798,7 +797,7 @@ impl Copilot {
) -> Task<Result<()>> {
let server = match self.server.as_authenticated() {
Ok(server) => server,
Err(_) => return Task::ready(Ok(())),
Err(error) => return Task::ready(Err(error)),
};
let request =
server

View File

@@ -61,8 +61,6 @@ smol.workspace = true
snippet.workspace = true
sum_tree.workspace = true
text.workspace = true
time.workspace = true
time_format.workspace = true
theme.workspace = true
tree-sitter-html = { workspace = true, optional = true }
tree-sitter-rust = { workspace = true, optional = true }

View File

@@ -94,6 +94,12 @@ pub struct SelectDownByLines {
pub(super) lines: u32,
}
#[derive(PartialEq, Clone, Deserialize, Default)]
pub struct DuplicateLine {
#[serde(default)]
pub move_upwards: bool,
}
impl_actions!(
editor,
[
@@ -113,6 +119,7 @@ impl_actions!(
MoveDownByLines,
SelectUpByLines,
SelectDownByLines,
DuplicateLine
]
);
@@ -153,8 +160,6 @@ gpui::actions!(
DeleteToPreviousSubwordStart,
DeleteToPreviousWordStart,
DisplayCursorNames,
DuplicateLineUp,
DuplicateLineDown,
ExpandMacroRecursively,
FindAllReferences,
Fold,
@@ -244,7 +249,6 @@ gpui::actions!(
SplitSelectionIntoLines,
Tab,
TabPrev,
ToggleGitBlame,
ToggleInlayHints,
ToggleLineNumbers,
ToggleSoftWrap,

View File

@@ -38,7 +38,6 @@ mod editor_tests;
#[cfg(any(test, feature = "test-support"))]
pub mod test;
use ::git::diff::{DiffHunk, DiffHunkStatus};
use ::git::permalink::{build_permalink, BuildPermalinkParams};
pub(crate) use actions::*;
use aho_corasick::AhoCorasick;
use anyhow::{anyhow, Context as _, Result};
@@ -57,16 +56,15 @@ pub use element::{
};
use futures::FutureExt;
use fuzzy::{StringMatch, StringMatchCandidate};
use git::blame::GitBlame;
use git::diff_hunk_to_display;
use gpui::{
div, impl_actions, point, prelude::*, px, relative, rems, size, uniform_list, Action,
AnyElement, AppContext, AsyncWindowContext, AvailableSpace, BackgroundExecutor, Bounds,
ClipboardItem, Context, DispatchPhase, ElementId, EventEmitter, FocusHandle, FocusableView,
FontId, FontStyle, FontWeight, HighlightStyle, Hsla, InteractiveText, KeyContext, Model,
MouseButton, ParentElement, Pixels, Render, SharedString, StrikethroughStyle, Styled,
StyledText, Subscription, Task, TextStyle, UnderlineStyle, UniformListScrollHandle, View,
ViewContext, ViewInputHandler, VisualContext, WeakView, WhiteSpace, WindowContext,
AnyElement, AppContext, AsyncWindowContext, BackgroundExecutor, Bounds, ClipboardItem, Context,
DispatchPhase, ElementId, EventEmitter, FocusHandle, FocusableView, FontId, FontStyle,
FontWeight, HighlightStyle, Hsla, InteractiveText, KeyContext, Model, MouseButton,
ParentElement, Pixels, Render, SharedString, StrikethroughStyle, Styled, StyledText,
Subscription, Task, TextStyle, UnderlineStyle, UniformListScrollHandle, View, ViewContext,
ViewInputHandler, VisualContext, WeakView, WhiteSpace, WindowContext,
};
use highlight_matching_bracket::refresh_matching_bracket_highlights;
use hover_popover::{hide_hover, HoverState};
@@ -94,7 +92,8 @@ pub use multi_buffer::{
use ordered_float::OrderedFloat;
use parking_lot::{Mutex, RwLock};
use project::project_settings::{GitGutterSetting, ProjectSettings};
use project::{FormatTrigger, Item, Location, Project, ProjectPath, ProjectTransaction};
use project::Item;
use project::{FormatTrigger, Location, Project, ProjectPath, ProjectTransaction};
use rand::prelude::*;
use rpc::proto::*;
use scroll::{Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager, ScrollbarAutoHide};
@@ -125,7 +124,7 @@ use ui::{
h_flex, prelude::*, ButtonSize, ButtonStyle, IconButton, IconName, IconSize, ListItem, Popover,
Tooltip,
};
use util::{defer, maybe, post_inc, RangeExt, ResultExt, TryFutureExt};
use util::{maybe, post_inc, RangeExt, ResultExt, TryFutureExt};
use workspace::Toast;
use workspace::{
searchable::SearchEvent, ItemNavHistory, SplitDirection, ViewId, Workspace, WorkspaceId,
@@ -433,9 +432,6 @@ pub struct Editor {
editor_actions: Vec<Box<dyn Fn(&mut ViewContext<Self>)>>,
use_autoclose: bool,
auto_replace_emoji_shortcode: bool,
show_git_blame: bool,
blame: Option<Model<GitBlame>>,
blame_subscription: Option<Subscription>,
custom_context_menu: Option<
Box<
dyn 'static
@@ -447,7 +443,6 @@ pub struct Editor {
pub struct EditorSnapshot {
pub mode: EditorMode,
show_gutter: bool,
show_git_blame: bool,
pub display_snapshot: DisplaySnapshot,
pub placeholder_text: Option<Arc<str>>,
is_focused: bool,
@@ -455,14 +450,11 @@ pub struct EditorSnapshot {
ongoing_scroll: OngoingScroll,
}
const GIT_BLAME_GUTTER_WIDTH_CHARS: f32 = 53.;
pub struct GutterDimensions {
pub left_padding: Pixels,
pub right_padding: Pixels,
pub width: Pixels,
pub margin: Pixels,
pub git_blame_entries_width: Option<Pixels>,
}
impl Default for GutterDimensions {
@@ -472,7 +464,6 @@ impl Default for GutterDimensions {
right_padding: Pixels::ZERO,
width: Pixels::ZERO,
margin: Pixels::ZERO,
git_blame_entries_width: None,
}
}
}
@@ -1480,9 +1471,6 @@ impl Editor {
vim_replace_map: Default::default(),
show_inline_completions: mode == EditorMode::Full,
custom_context_menu: None,
show_git_blame: false,
blame: None,
blame_subscription: None,
_subscriptions: vec![
cx.observe(&buffer, Self::on_buffer_changed),
cx.subscribe(&buffer, Self::on_buffer_event),
@@ -1628,10 +1616,6 @@ impl Editor {
EditorSnapshot {
mode: self.mode,
show_gutter: self.show_gutter,
show_git_blame: self
.blame
.as_ref()
.map_or(false, |blame| blame.read(cx).has_generated_entries()),
display_snapshot: self.display_map.update(cx, |map, cx| map.snapshot(cx)),
scroll_anchor: self.scroll_manager.anchor(),
ongoing_scroll: self.scroll_manager.ongoing_scroll(),
@@ -3758,17 +3742,19 @@ impl Editor {
let actions = if let Ok(code_actions) = project.update(&mut cx, |project, cx| {
project.code_actions(&start_buffer, start..end, cx)
}) {
code_actions.await
code_actions.await.log_err()
} else {
Vec::new()
None
};
this.update(&mut cx, |this, cx| {
this.available_code_actions = if actions.is_empty() {
None
} else {
Some((start_buffer, actions.into()))
};
this.available_code_actions = actions.and_then(|actions| {
if actions.is_empty() {
None
} else {
Some((start_buffer, actions.into()))
}
});
cx.notify();
})
.log_err();
@@ -4569,7 +4555,6 @@ impl Editor {
}
let mut delta_for_end_row = 0;
let has_multiple_rows = start_row + 1 != end_row;
for row in start_row..end_row {
let current_indent = snapshot.indent_size_for_line(row);
let indent_delta = match (current_indent.kind, indent_kind) {
@@ -4581,12 +4566,7 @@ impl Editor {
(_, IndentKind::Tab) => IndentSize::tab(),
};
let start = if has_multiple_rows || current_indent.len < selection.start.column {
0
} else {
selection.start.column
};
let row_start = Point::new(row, start);
let row_start = Point::new(row, 0);
edits.push((
row_start..row_start,
indent_delta.chars().collect::<String>(),
@@ -4632,7 +4612,7 @@ impl Editor {
rows.start += 1;
}
}
let has_multiple_rows = rows.len() > 1;
for row in rows {
let indent_size = snapshot.indent_size_for_line(row);
if indent_size.len > 0 {
@@ -4647,16 +4627,7 @@ impl Editor {
}
IndentKind::Tab => 1,
};
let start = if has_multiple_rows
|| deletion_len > selection.start.column
|| indent_size.len < selection.start.column
{
0
} else {
selection.start.column - deletion_len
};
deletion_ranges
.push(Point::new(row, start)..Point::new(row, start + deletion_len));
deletion_ranges.push(Point::new(row, 0)..Point::new(row, deletion_len));
last_outdent = Some(row);
}
}
@@ -5152,7 +5123,7 @@ impl Editor {
});
}
pub fn duplicate_line(&mut self, upwards: bool, cx: &mut ViewContext<Self>) {
pub fn duplicate_line(&mut self, action: &DuplicateLine, cx: &mut ViewContext<Self>) {
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
let buffer = &display_map.buffer_snapshot;
let selections = self.selections.all::<Point>(cx);
@@ -5181,7 +5152,7 @@ impl Editor {
.text_for_range(start..end)
.chain(Some("\n"))
.collect::<String>();
let insert_location = if upwards {
let insert_location = if action.move_upwards {
Point::new(rows.end, 0)
} else {
start
@@ -5198,14 +5169,6 @@ impl Editor {
});
}
pub fn duplicate_line_up(&mut self, _: &DuplicateLineUp, cx: &mut ViewContext<Self>) {
self.duplicate_line(true, cx);
}
pub fn duplicate_line_down(&mut self, _: &DuplicateLineDown, cx: &mut ViewContext<Self>) {
self.duplicate_line(false, cx);
}
pub fn move_line_up(&mut self, _: &MoveLineUp, cx: &mut ViewContext<Self>) {
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
let buffer = self.buffer.read(cx).snapshot(cx);
@@ -7683,7 +7646,7 @@ impl Editor {
let range = target.range.to_offset(target.buffer.read(cx));
let range = editor.range_for_match(&range);
if Some(&target.buffer) == editor.buffer.read(cx).as_singleton().as_ref() {
editor.change_selections(Some(Autoscroll::focused()), cx, |s| {
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([range]);
});
} else {
@@ -7703,7 +7666,7 @@ impl Editor {
// to avoid creating a history entry at the previous cursor location.
pane.update(cx, |pane, _| pane.disable_history());
target_editor.change_selections(
Some(Autoscroll::focused()),
Some(Autoscroll::fit()),
cx,
|s| {
s.select_ranges([range]);
@@ -7853,10 +7816,9 @@ impl Editor {
Bias::Left
},
);
match self
.find_all_references_task_sources
.binary_search_by(|anchor| anchor.cmp(&head_anchor, &multi_buffer_snapshot))
.binary_search_by(|task_anchor| task_anchor.cmp(&head_anchor, &multi_buffer_snapshot))
{
Ok(_) => {
log::info!(
@@ -7874,27 +7836,66 @@ impl Editor {
let workspace = self.workspace()?;
let project = workspace.read(cx).project().clone();
let references = project.update(cx, |project, cx| project.references(&buffer, head, cx));
Some(cx.spawn(|editor, mut cx| async move {
let _cleanup = defer({
let mut cx = cx.clone();
move || {
let _ = editor.update(&mut cx, |editor, _| {
if let Ok(i) =
editor
.find_all_references_task_sources
.binary_search_by(|anchor| {
anchor.cmp(&head_anchor, &multi_buffer_snapshot)
})
{
editor.find_all_references_task_sources.remove(i);
}
});
}
});
let open_task = cx.spawn(|editor, mut cx| async move {
let mut locations = references.await?;
let snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot())?;
let head_offset = text::ToOffset::to_offset(&head, &snapshot);
// LSP may return references that contain the item itself we requested `find_all_references` for (eg. rust-analyzer)
// So we will remove it from locations
// If there is only one reference, we will not do this filter cause it may make locations empty
if locations.len() > 1 {
cx.update(|cx| {
locations.retain(|location| {
// fn foo(x : i64) {
// ^
// println!(x);
// }
// It is ok to find reference when caret being at ^ (the end of the word)
// So we turn offset into inclusive to include the end of the word
!location
.range
.to_offset(location.buffer.read(cx))
.to_inclusive()
.contains(&head_offset)
});
})?;
}
let locations = references.await?;
if locations.is_empty() {
return anyhow::Ok(());
return Ok(());
}
// If there is one reference, just open it directly
if locations.len() == 1 {
let target = locations.pop().unwrap();
return editor.update(&mut cx, |editor, cx| {
let range = target.range.to_offset(target.buffer.read(cx));
let range = editor.range_for_match(&range);
if Some(&target.buffer) == editor.buffer().read(cx).as_singleton().as_ref() {
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([range]);
});
} else {
cx.window_context().defer(move |cx| {
let target_editor: View<Self> =
workspace.update(cx, |workspace, cx| {
workspace.open_project_item(
workspace.active_pane().clone(),
target.buffer.clone(),
cx,
)
});
target_editor.update(cx, |target_editor, cx| {
target_editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([range]);
})
})
})
}
});
}
workspace.update(&mut cx, |workspace, cx| {
@@ -7914,7 +7915,24 @@ impl Editor {
Self::open_locations_in_multibuffer(
workspace, locations, replica_id, title, false, cx,
);
})
})?;
Ok(())
});
Some(cx.spawn(|editor, mut cx| async move {
open_task.await?;
editor.update(&mut cx, |editor, _| {
if let Ok(i) =
editor
.find_all_references_task_sources
.binary_search_by(|task_anchor| {
task_anchor.cmp(&head_anchor, &multi_buffer_snapshot)
})
{
editor.find_all_references_task_sources.remove(i);
}
})?;
anyhow::Ok(())
}))
}
@@ -8806,42 +8824,9 @@ impl Editor {
}
}
pub fn toggle_git_blame(&mut self, _: &ToggleGitBlame, cx: &mut ViewContext<Self>) {
if !self.show_git_blame {
if let Err(error) = self.show_git_blame_internal(cx) {
log::error!("failed to toggle on 'git blame': {}", error);
return;
}
self.show_git_blame = true
} else {
self.blame_subscription.take();
self.blame.take();
self.show_git_blame = false
}
cx.notify();
}
fn show_git_blame_internal(&mut self, cx: &mut ViewContext<Self>) -> Result<()> {
if let Some(project) = self.project.as_ref() {
let Some(buffer) = self.buffer().read(cx).as_singleton() else {
anyhow::bail!("git blame not available in multi buffers")
};
let project = project.clone();
let blame = cx.new_model(|cx| GitBlame::new(buffer, project, cx));
self.blame_subscription = Some(cx.observe(&blame, |_, _, cx| cx.notify()));
self.blame = Some(blame);
}
Ok(())
}
pub fn blame(&self) -> Option<&Model<GitBlame>> {
self.blame.as_ref()
}
fn get_permalink_to_line(&mut self, cx: &mut ViewContext<Self>) -> Result<url::Url> {
use git::permalink::{build_permalink, BuildPermalinkParams};
let (path, repo) = maybe!({
let project_handle = self.project.as_ref()?.clone();
let project = project_handle.read(cx);
@@ -8874,12 +8859,7 @@ impl Editor {
remote_url: &origin_url,
sha: &sha,
path: &path,
selection: selection.map(|selection| {
let range = selection.range();
let start = range.start.row;
let end = range.end.row;
start..end
}),
selection: selection.map(|selection| selection.range()),
})
}
@@ -9462,7 +9442,6 @@ impl Editor {
path: ProjectPath,
position: Point,
anchor: language::Anchor,
offset_from_top: u32,
cx: &mut ViewContext<Self>,
) {
let workspace = self.workspace();
@@ -9490,13 +9469,9 @@ impl Editor {
};
let nav_history = editor.nav_history.take();
editor.change_selections(
Some(Autoscroll::top_relative(offset_from_top as usize)),
cx,
|s| {
s.select_ranges([cursor..cursor]);
},
);
editor.change_selections(Some(Autoscroll::newest()), cx, |s| {
s.select_ranges([cursor..cursor]);
});
editor.nav_history = nav_history;
anyhow::Ok(())
@@ -9995,12 +9970,7 @@ impl EditorSnapshot {
0.0.into()
};
let git_blame_entries_width = self
.show_git_blame
.then_some(em_width * GIT_BLAME_GUTTER_WIDTH_CHARS);
let mut left_padding = git_blame_entries_width.unwrap_or(Pixels::ZERO);
left_padding += if gutter_settings.code_actions {
let left_padding = if gutter_settings.code_actions {
em_width * 3.0
} else if show_git_gutter && gutter_settings.line_numbers {
em_width * 2.0
@@ -10025,7 +9995,6 @@ impl EditorSnapshot {
right_padding,
width: line_gutter_width + left_padding + right_padding,
margin: -descent,
git_blame_entries_width,
}
}
}
@@ -10532,41 +10501,6 @@ pub fn diagnostic_block_renderer(diagnostic: Diagnostic, _is_valid: bool) -> Ren
let mut text_style = cx.text_style().clone();
text_style.color = diagnostic_style(diagnostic.severity, true, cx.theme().status());
let multi_line_diagnostic = diagnostic.message.contains('\n');
let buttons = |diagnostic: &Diagnostic, block_id: usize| {
if multi_line_diagnostic {
v_flex()
} else {
h_flex()
}
.children(diagnostic.is_primary.then(|| {
IconButton::new(("close-block", block_id), IconName::XCircle)
.icon_color(Color::Muted)
.size(ButtonSize::Compact)
.style(ButtonStyle::Transparent)
.visible_on_hover(group_id.clone())
.on_click(move |_click, cx| cx.dispatch_action(Box::new(Cancel)))
.tooltip(|cx| Tooltip::for_action("Close Diagnostics", &Cancel, cx))
}))
.child(
IconButton::new(("copy-block", block_id), IconName::Copy)
.icon_color(Color::Muted)
.size(ButtonSize::Compact)
.style(ButtonStyle::Transparent)
.visible_on_hover(group_id.clone())
.on_click({
let message = diagnostic.message.clone();
move |_click, cx| cx.write_to_clipboard(ClipboardItem::new(message.clone()))
})
.tooltip(|cx| Tooltip::text("Copy diagnostic message", cx)),
)
};
let icon_size = buttons(&diagnostic, cx.block_id)
.into_any_element()
.measure(AvailableSpace::min_size(), cx);
h_flex()
.id(cx.block_id)
.group(group_id.clone())
@@ -10577,10 +10511,9 @@ pub fn diagnostic_block_renderer(diagnostic: Diagnostic, _is_valid: bool) -> Ren
.child(
div()
.flex()
.w(cx.anchor_x - cx.gutter_dimensions.width - icon_size.width)
.w(cx.anchor_x - cx.gutter_dimensions.width)
.flex_shrink(),
)
.child(buttons(&diagnostic, cx.block_id))
.child(div().flex().flex_shrink_0().child(
StyledText::new(text_without_backticks.clone()).with_highlights(
&text_style,
@@ -10595,6 +10528,18 @@ pub fn diagnostic_block_renderer(diagnostic: Diagnostic, _is_valid: bool) -> Ren
}),
),
))
.child(
IconButton::new(("copy-block", cx.block_id), IconName::Copy)
.icon_color(Color::Muted)
.size(ButtonSize::Compact)
.style(ButtonStyle::Transparent)
.visible_on_hover(group_id)
.on_click({
let message = diagnostic.message.clone();
move |_click, cx| cx.write_to_clipboard(ClipboardItem::new(message.clone()))
})
.tooltip(|cx| Tooltip::text("Copy diagnostic message", cx)),
)
.into_any_element()
})
}

View File

@@ -92,8 +92,7 @@ pub enum ShowScrollbar {
#[serde(rename_all = "snake_case")]
pub enum MultiCursorModifier {
Alt,
#[serde(alias = "cmd", alias = "ctrl")]
CmdOrCtrl,
Cmd,
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]

View File

@@ -3116,7 +3116,7 @@ fn test_duplicate_line(cx: &mut TestAppContext) {
DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0),
])
});
view.duplicate_line_down(&DuplicateLineDown, cx);
view.duplicate_line(&DuplicateLine::default(), cx);
assert_eq!(view.display_text(cx), "abc\nabc\ndef\ndef\nghi\n\n");
assert_eq!(
view.selections.display_ranges(cx),
@@ -3140,7 +3140,7 @@ fn test_duplicate_line(cx: &mut TestAppContext) {
DisplayPoint::new(1, 2)..DisplayPoint::new(2, 1),
])
});
view.duplicate_line_down(&DuplicateLineDown, cx);
view.duplicate_line(&DuplicateLine::default(), cx);
assert_eq!(view.display_text(cx), "abc\ndef\nghi\nabc\ndef\nghi\n");
assert_eq!(
view.selections.display_ranges(cx),
@@ -3166,7 +3166,7 @@ fn test_duplicate_line(cx: &mut TestAppContext) {
DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0),
])
});
view.duplicate_line_up(&DuplicateLineUp, cx);
view.duplicate_line(&DuplicateLine { move_upwards: true }, cx);
assert_eq!(view.display_text(cx), "abc\nabc\ndef\ndef\nghi\n\n");
assert_eq!(
view.selections.display_ranges(cx),
@@ -3190,7 +3190,7 @@ fn test_duplicate_line(cx: &mut TestAppContext) {
DisplayPoint::new(1, 2)..DisplayPoint::new(2, 1),
])
});
view.duplicate_line_up(&DuplicateLineUp, cx);
view.duplicate_line(&DuplicateLine { move_upwards: true }, cx);
assert_eq!(view.display_text(cx), "abc\ndef\nghi\nabc\ndef\nghi\n");
assert_eq!(
view.selections.display_ranges(cx),
@@ -4087,47 +4087,6 @@ let foo = «2ˇ»;"#,
);
}
#[gpui::test]
async fn test_select_previous_multibuffer(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorTestContext::new_multibuffer(
cx,
[
indoc! {
"aaa\n«bbb\nccc\n»ddd"
},
indoc! {
"aaa\n«bbb\nccc\n»ddd"
},
],
);
cx.assert_editor_state(indoc! {"
ˇbbb
ccc
bbb
ccc
"});
cx.dispatch_action(SelectPrevious::default());
cx.assert_editor_state(indoc! {"
«bbbˇ»
ccc
bbb
ccc
"});
cx.dispatch_action(SelectPrevious::default());
cx.assert_editor_state(indoc! {"
«bbbˇ»
ccc
«bbbˇ»
ccc
"});
}
#[gpui::test]
async fn test_select_previous_with_single_caret(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
@@ -8498,6 +8457,105 @@ async fn test_document_format_with_prettier(cx: &mut gpui::TestAppContext) {
);
}
#[gpui::test]
async fn test_find_all_references(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorLspTestContext::new_rust(
lsp::ServerCapabilities {
document_formatting_provider: Some(lsp::OneOf::Left(true)),
..Default::default()
},
cx,
)
.await;
cx.set_state(indoc! {"
fn foo(«paramˇ»: i64) {
println!(param);
}
"});
cx.lsp
.handle_request::<lsp::request::References, _, _>(move |_, _| async move {
Ok(Some(vec![
lsp::Location {
uri: lsp::Url::from_file_path("/root/dir/file.rs").unwrap(),
range: lsp::Range::new(lsp::Position::new(0, 7), lsp::Position::new(0, 12)),
},
lsp::Location {
uri: lsp::Url::from_file_path("/root/dir/file.rs").unwrap(),
range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 18)),
},
]))
});
let references = cx
.update_editor(|editor, cx| editor.find_all_references(&FindAllReferences, cx))
.unwrap();
cx.executor().run_until_parked();
cx.executor().start_waiting();
references.await.unwrap();
cx.assert_editor_state(indoc! {"
fn foo(param: i64) {
println!(«paramˇ»);
}
"});
let references = cx
.update_editor(|editor, cx| editor.find_all_references(&FindAllReferences, cx))
.unwrap();
cx.executor().run_until_parked();
cx.executor().start_waiting();
references.await.unwrap();
cx.assert_editor_state(indoc! {"
fn foo(«paramˇ»: i64) {
println!(param);
}
"});
cx.set_state(indoc! {"
fn foo(param: i64) {
let a = param;
let aˇ = param;
let a = param;
println!(param);
}
"});
cx.lsp
.handle_request::<lsp::request::References, _, _>(move |_, _| async move {
Ok(Some(vec![lsp::Location {
uri: lsp::Url::from_file_path("/root/dir/file.rs").unwrap(),
range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 9)),
}]))
});
let references = cx
.update_editor(|editor, cx| editor.find_all_references(&FindAllReferences, cx))
.unwrap();
cx.executor().run_until_parked();
cx.executor().start_waiting();
references.await.unwrap();
cx.assert_editor_state(indoc! {"
fn foo(param: i64) {
let a = param;
let «aˇ» = param;
let a = param;
println!(param);
}
"});
}
#[gpui::test]
async fn test_addition_reverts(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});

View File

@@ -4,12 +4,12 @@ use crate::{
TransformBlock,
},
editor_settings::{DoubleClickInMultibuffer, MultiCursorModifier, ShowScrollbar},
git::{blame::GitBlame, diff_hunk_to_display, DisplayDiffHunk},
git::{diff_hunk_to_display, DisplayDiffHunk},
hover_popover::{
self, hover_at, HOVER_POPOVER_GAP, MIN_POPOVER_CHARACTER_WIDTH, MIN_POPOVER_LINE_HEIGHT,
},
items::BufferSearchHighlights,
mouse_context_menu::{self, MouseContextMenu},
mouse_context_menu,
scroll::scroll_amount::ScrollAmount,
CursorShape, DisplayPoint, DocumentHighlightRead, DocumentHighlightWrite, Editor, EditorMode,
EditorSettings, EditorSnapshot, EditorStyle, GutterDimensions, HalfPageDown, HalfPageUp,
@@ -18,16 +18,15 @@ use crate::{
};
use anyhow::Result;
use collections::{BTreeMap, HashMap};
use git::{blame::BlameEntry, diff::DiffHunkStatus, Oid};
use git::diff::DiffHunkStatus;
use gpui::{
anchored, deferred, div, fill, outline, point, px, quad, relative, size, svg,
transparent_black, Action, AnchorCorner, AnyElement, AnyView, AvailableSpace, Bounds,
ClipboardItem, ContentMask, Corners, CursorStyle, DispatchPhase, Edges, Element,
ElementContext, ElementInputHandler, Entity, Hitbox, Hsla, InteractiveElement, IntoElement,
ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent,
ParentElement, Pixels, ScrollDelta, ScrollWheelEvent, ShapedLine, SharedString, Size, Stateful,
StatefulInteractiveElement, Style, Styled, TextRun, TextStyle, TextStyleRefinement, View,
ViewContext, WindowContext,
div, fill, outline, overlay, point, px, quad, relative, size, svg, transparent_black, Action,
AnchorCorner, AnyElement, AvailableSpace, Bounds, ContentMask, Corners, CursorStyle,
DispatchPhase, Edges, Element, ElementContext, ElementInputHandler, Entity, Hitbox, Hsla,
InteractiveElement, IntoElement, ModifiersChangedEvent, MouseButton, MouseDownEvent,
MouseMoveEvent, MouseUpEvent, ParentElement, Pixels, ScrollDelta, ScrollWheelEvent, ShapedLine,
SharedString, Size, Stateful, StatefulInteractiveElement, Style, Styled, TextRun, TextStyle,
TextStyleRefinement, View, ViewContext, WindowContext,
};
use itertools::Itertools;
use language::language_settings::ShowWhitespaceSetting;
@@ -50,8 +49,8 @@ use std::{
};
use sum_tree::Bias;
use theme::{ActiveTheme, PlayerColor};
use ui::{h_flex, ButtonLike, ButtonStyle, ContextMenu, Tooltip};
use ui::{prelude::*, tooltip_container};
use ui::prelude::*;
use ui::{h_flex, ButtonLike, ButtonStyle, Tooltip};
use util::ResultExt;
use workspace::item::Item;
@@ -178,8 +177,7 @@ impl EditorElement {
register_action(view, cx, Editor::delete_to_beginning_of_line);
register_action(view, cx, Editor::delete_to_end_of_line);
register_action(view, cx, Editor::cut_to_end_of_line);
register_action(view, cx, Editor::duplicate_line_up);
register_action(view, cx, Editor::duplicate_line_down);
register_action(view, cx, Editor::duplicate_line);
register_action(view, cx, Editor::move_line_up);
register_action(view, cx, Editor::move_line_down);
register_action(view, cx, Editor::transpose);
@@ -302,7 +300,6 @@ impl EditorElement {
register_action(view, cx, Editor::copy_highlight_json);
register_action(view, cx, Editor::copy_permalink_to_line);
register_action(view, cx, Editor::open_permalink_to_line);
register_action(view, cx, Editor::toggle_git_blame);
register_action(view, cx, |editor, action, cx| {
if let Some(task) = editor.format(action, cx) {
task.detach_and_log_err(cx);
@@ -449,8 +446,7 @@ impl EditorElement {
},
cx,
);
} else if modifiers.shift && !modifiers.control && !modifiers.alt && !modifiers.secondary()
{
} else if modifiers.shift && !modifiers.control && !modifiers.alt && !modifiers.command {
editor.select(
SelectPhase::Extend {
position,
@@ -462,7 +458,7 @@ impl EditorElement {
let multi_cursor_setting = EditorSettings::get_global(cx).multi_cursor_modifier;
let multi_cursor_modifier = match multi_cursor_setting {
MultiCursorModifier::Alt => modifiers.alt,
MultiCursorModifier::CmdOrCtrl => modifiers.secondary(),
MultiCursorModifier::Cmd => modifiers.command,
};
editor.select(
SelectPhase::Begin {
@@ -514,8 +510,8 @@ impl EditorElement {
let multi_cursor_setting = EditorSettings::get_global(cx).multi_cursor_modifier;
let multi_cursor_modifier = match multi_cursor_setting {
MultiCursorModifier::Alt => event.modifiers.secondary(),
MultiCursorModifier::CmdOrCtrl => event.modifiers.alt,
MultiCursorModifier::Alt => event.modifiers.command,
MultiCursorModifier::Cmd => event.modifiers.alt,
};
if !pending_nonempty_selections && multi_cursor_modifier && text_hitbox.is_hovered(cx) {
@@ -1085,66 +1081,6 @@ impl EditorElement {
.collect()
}
#[allow(clippy::too_many_arguments)]
fn layout_blame_entries(
&self,
buffer_rows: impl Iterator<Item = Option<u32>>,
em_width: Pixels,
scroll_position: gpui::Point<f32>,
line_height: Pixels,
gutter_hitbox: &Hitbox,
max_width: Option<Pixels>,
cx: &mut ElementContext,
) -> Option<Vec<AnyElement>> {
let Some(blame) = self.editor.read(cx).blame.as_ref().cloned() else {
return None;
};
let blamed_rows: Vec<_> = blame.update(cx, |blame, cx| {
blame.blame_for_rows(buffer_rows, cx).collect()
});
let width = if let Some(max_width) = max_width {
AvailableSpace::Definite(max_width)
} else {
AvailableSpace::MaxContent
};
let scroll_top = scroll_position.y * line_height;
let start_x = em_width * 1;
let mut last_used_color: Option<(PlayerColor, Oid)> = None;
let text_style = &self.style.text;
let shaped_lines = blamed_rows
.into_iter()
.enumerate()
.flat_map(|(ix, blame_entry)| {
if let Some(blame_entry) = blame_entry {
let mut element = render_blame_entry(
ix,
&blame,
blame_entry,
text_style,
&mut last_used_color,
self.editor.clone(),
cx,
);
let start_y = ix as f32 * line_height - (scroll_top % line_height);
let absolute_offset = gutter_hitbox.origin + point(start_x, start_y);
element.layout(absolute_offset, size(width, AvailableSpace::MinContent), cx);
Some(element)
} else {
None
}
})
.collect();
Some(shaped_lines)
}
fn layout_code_actions_indicator(
&self,
line_height: Pixels,
@@ -1171,26 +1107,19 @@ impl EditorElement {
);
let indicator_size = button.measure(available_space, cx);
let blame_width = gutter_dimensions
.git_blame_entries_width
.unwrap_or(Pixels::ZERO);
let mut x = blame_width;
let available_width = gutter_dimensions.margin + gutter_dimensions.left_padding
- indicator_size.width
- blame_width;
x += available_width / 2.;
let mut x = Pixels::ZERO;
let mut y = newest_selection_head.row() as f32 * line_height - scroll_pixel_position.y;
// Center indicator.
x +=
(gutter_dimensions.margin + gutter_dimensions.left_padding - indicator_size.width) / 2.;
y += (line_height - indicator_size.height) / 2.;
button.layout(gutter_hitbox.origin + point(x, y), available_space, cx);
Some(button)
}
fn calculate_relative_line_numbers(
&self,
buffer_rows: Vec<Option<u32>>,
snapshot: &EditorSnapshot,
rows: &Range<u32>,
relative_to: Option<u32>,
) -> HashMap<u32, u32> {
@@ -1200,6 +1129,12 @@ impl EditorElement {
};
let start = rows.start.min(relative_to);
let end = rows.end.max(relative_to);
let buffer_rows = snapshot
.buffer_rows(start)
.take(1 + (end - start) as usize)
.collect::<Vec<_>>();
let head_idx = relative_to - start;
let mut delta = 1;
@@ -1235,7 +1170,6 @@ impl EditorElement {
fn layout_line_numbers(
&self,
rows: Range<u32>,
buffer_rows: impl Iterator<Item = Option<u32>>,
active_rows: &BTreeMap<u32, bool>,
newest_selection_head: Option<DisplayPoint>,
snapshot: &EditorSnapshot,
@@ -1274,11 +1208,13 @@ impl EditorElement {
None
};
let buffer_rows = buffer_rows.collect::<Vec<_>>();
let relative_rows =
self.calculate_relative_line_numbers(buffer_rows.clone(), &rows, relative_to);
let relative_rows = self.calculate_relative_line_numbers(&snapshot, &rows, relative_to);
for (ix, row) in buffer_rows.into_iter().enumerate() {
for (ix, row) in snapshot
.buffer_rows(rows.start)
.take((rows.end - rows.start) as usize)
.enumerate()
{
let display_row = rows.start + ix as u32;
let (active, color) = if active_rows.contains_key(&display_row) {
(true, cx.theme().colors().editor_active_line_number)
@@ -1410,7 +1346,6 @@ impl EditorElement {
let render_block = |block: &TransformBlock,
available_space: Size<AvailableSpace>,
block_id: usize,
block_row_start: u32,
cx: &mut ElementContext| {
let mut element = match block {
TransformBlock::Custom(block) => {
@@ -1445,7 +1380,6 @@ impl EditorElement {
buffer,
range,
starts_new_buffer,
height,
..
} => {
let include_root = self
@@ -1461,7 +1395,6 @@ impl EditorElement {
position: Point,
anchor: text::Anchor,
path: ProjectPath,
line_offset_from_top: u32,
}
let jump_data = project::File::from_dyn(buffer.file()).map(|file| {
@@ -1473,29 +1406,12 @@ impl EditorElement {
.primary
.as_ref()
.map_or(range.context.start, |primary| primary.start);
let excerpt_start = range.context.start;
let jump_position = language::ToPoint::to_point(&jump_anchor, buffer);
let offset_from_excerpt_start = if jump_anchor == excerpt_start {
0
} else {
let excerpt_start_row =
language::ToPoint::to_point(&jump_anchor, buffer).row;
jump_position.row - excerpt_start_row
};
let line_offset_from_top =
block_row_start + *height as u32 + offset_from_excerpt_start
- snapshot
.scroll_anchor
.scroll_position(&snapshot.display_snapshot)
.y as u32;
JumpData {
position: jump_position,
anchor: jump_anchor,
path: jump_path,
line_offset_from_top,
}
});
@@ -1565,7 +1481,6 @@ impl EditorElement {
jump_data.path.clone(),
jump_data.position,
jump_data.anchor,
jump_data.line_offset_from_top,
cx,
);
}
@@ -1624,7 +1539,6 @@ impl EditorElement {
path.clone(),
jump_data.position,
jump_data.anchor,
jump_data.line_offset_from_top,
cx,
);
}
@@ -1657,7 +1571,6 @@ impl EditorElement {
path.clone(),
jump_data.position,
jump_data.anchor,
jump_data.line_offset_from_top,
cx,
);
}
@@ -1690,7 +1603,7 @@ impl EditorElement {
AvailableSpace::MinContent,
AvailableSpace::Definite(block.height() as f32 * line_height),
);
let (element, element_size) = render_block(block, available_space, block_id, row, cx);
let (element, element_size) = render_block(block, available_space, block_id, cx);
block_id += 1;
fixed_block_max_width = fixed_block_max_width.max(element_size.width + em_width);
blocks.push(BlockLayout {
@@ -1718,7 +1631,7 @@ impl EditorElement {
AvailableSpace::Definite(width),
AvailableSpace::Definite(block.height() as f32 * line_height),
);
let (element, _) = render_block(block, available_space, block_id, row, cx);
let (element, _) = render_block(block, available_space, block_id, cx);
block_id += 1;
blocks.push(BlockLayout {
row,
@@ -1806,16 +1719,12 @@ impl EditorElement {
fn layout_mouse_context_menu(&self, cx: &mut ElementContext) -> Option<AnyElement> {
let mouse_context_menu = self.editor.read(cx).mouse_context_menu.as_ref()?;
let mut element = deferred(
anchored()
.position(mouse_context_menu.position)
.child(mouse_context_menu.context_menu.clone())
.anchor(AnchorCorner::TopLeft)
.snap_to_window(),
)
.with_priority(1)
.into_any();
let mut element = overlay()
.position(mouse_context_menu.position)
.child(mouse_context_menu.context_menu.clone())
.anchor(AnchorCorner::TopLeft)
.snap_to_window()
.into_any();
element.layout(gpui::Point::default(), AvailableSpace::min_size(), cx);
Some(element)
}
@@ -2076,10 +1985,6 @@ impl EditorElement {
Self::paint_diff_hunks(layout, cx);
}
if layout.blamed_display_rows.is_some() {
self.paint_blamed_display_rows(layout, cx);
}
for (ix, line) in layout.line_numbers.iter().enumerate() {
if let Some(line) = line {
let line_origin = layout.gutter_hitbox.origin
@@ -2213,18 +2118,6 @@ impl EditorElement {
})
}
fn paint_blamed_display_rows(&self, layout: &mut EditorLayout, cx: &mut ElementContext) {
let Some(blamed_display_rows) = layout.blamed_display_rows.take() else {
return;
};
cx.paint_layer(layout.gutter_hitbox.bounds, |cx| {
for mut blame_element in blamed_display_rows.into_iter() {
blame_element.paint(cx);
}
})
}
fn paint_text(&mut self, layout: &mut EditorLayout, cx: &mut ElementContext) {
cx.with_content_mask(
Some(ContentMask {
@@ -2872,189 +2765,6 @@ impl EditorElement {
}
}
fn render_blame_entry(
ix: usize,
blame: &gpui::Model<GitBlame>,
blame_entry: BlameEntry,
text_style: &TextStyle,
last_used_color: &mut Option<(PlayerColor, Oid)>,
editor: View<Editor>,
cx: &mut ElementContext<'_>,
) -> AnyElement {
let mut sha_color = cx
.theme()
.players()
.color_for_participant(blame_entry.sha.into());
// If the last color we used is the same as the one we get for this line, but
// the commit SHAs are different, then we try again to get a different color.
match *last_used_color {
Some((color, sha)) if sha != blame_entry.sha && color.cursor == sha_color.cursor => {
let index: u32 = blame_entry.sha.into();
sha_color = cx.theme().players().color_for_participant(index + 1);
}
_ => {}
};
last_used_color.replace((sha_color, blame_entry.sha));
let relative_timestamp = match blame_entry.author_offset_date_time() {
Ok(timestamp) => time_format::format_localized_timestamp(
timestamp,
time::OffsetDateTime::now_utc(),
cx.local_timezone(),
time_format::TimestampFormat::Relative,
),
Err(_) => "Error parsing date".to_string(),
};
let pretty_commit_id = format!("{}", blame_entry.sha);
let short_commit_id = pretty_commit_id.clone().chars().take(6).collect::<String>();
let author_name = blame_entry.author.as_deref().unwrap_or("<no name>");
let name = util::truncate_and_trailoff(author_name, 20);
let permalink = blame.read(cx).permalink_for_entry(&blame_entry);
let commit_message = blame.read(cx).message_for_entry(&blame_entry);
h_flex()
.w_full()
.font(text_style.font().family)
.line_height(text_style.line_height)
.id(("blame", ix))
.children([
div()
.text_color(sha_color.cursor)
.child(short_commit_id)
.mr_2(),
div()
.w_full()
.h_flex()
.justify_between()
.text_color(cx.theme().status().hint)
.child(name)
.child(relative_timestamp),
])
.on_mouse_down(MouseButton::Right, {
let blame_entry = blame_entry.clone();
move |event, cx| {
deploy_blame_entry_context_menu(&blame_entry, editor.clone(), event.position, cx);
}
})
.hover(|style| style.bg(cx.theme().colors().element_hover))
.when_some(permalink, |this, url| {
let url = url.clone();
this.cursor_pointer().on_click(move |_, cx| {
cx.stop_propagation();
cx.open_url(url.as_str())
})
})
.tooltip(move |cx| {
BlameEntryTooltip::new(
sha_color.cursor,
commit_message.clone(),
blame_entry.clone(),
cx,
)
})
.into_any()
}
fn deploy_blame_entry_context_menu(
blame_entry: &BlameEntry,
editor: View<Editor>,
position: gpui::Point<Pixels>,
cx: &mut WindowContext<'_>,
) {
let context_menu = ContextMenu::build(cx, move |this, _| {
let sha = format!("{}", blame_entry.sha);
this.entry("Copy commit SHA", None, move |cx| {
cx.write_to_clipboard(ClipboardItem::new(sha.clone()));
})
});
editor.update(cx, move |editor, cx| {
editor.mouse_context_menu = Some(MouseContextMenu::new(position, context_menu, cx));
cx.notify();
});
}
struct BlameEntryTooltip {
color: Hsla,
commit_message: Option<String>,
blame_entry: BlameEntry,
}
impl BlameEntryTooltip {
fn new(
color: Hsla,
commit_message: Option<String>,
blame_entry: BlameEntry,
cx: &mut WindowContext,
) -> AnyView {
cx.new_view(|_cx| Self {
color,
commit_message,
blame_entry,
})
.into()
}
}
impl Render for BlameEntryTooltip {
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
let author = self
.blame_entry
.author
.clone()
.unwrap_or("<no name>".to_string());
let author_email = self.blame_entry.author_mail.clone().unwrap_or_default();
let absolute_timestamp = match self.blame_entry.author_offset_date_time() {
Ok(timestamp) => time_format::format_localized_timestamp(
timestamp,
time::OffsetDateTime::now_utc(),
cx.local_timezone(),
time_format::TimestampFormat::Absolute,
),
Err(_) => "Error parsing date".to_string(),
};
let message = match &self.commit_message {
Some(message) => util::truncate_lines_and_trailoff(message, 15),
None => self.blame_entry.summary.clone().unwrap_or_default(),
};
let pretty_commit_id = format!("{}", self.blame_entry.sha);
tooltip_container(cx, move |this, cx| {
this.occlude()
.on_mouse_move(|_, cx| cx.stop_propagation())
.child(
v_flex()
.child(
h_flex()
.child(
div()
.text_color(cx.theme().colors().text_muted)
.child("Commit")
.pr_2(),
)
.child(
div().text_color(self.color).child(pretty_commit_id.clone()),
),
)
.child(
div()
.child(format!(
"{} {} - {}",
author, author_email, absolute_timestamp
))
.text_color(cx.theme().colors().text_muted),
)
.child(div().child(message)),
)
})
}
}
#[derive(Debug)]
pub(crate) struct LineWithInvisibles {
pub line: ShapedLine,
@@ -3413,10 +3123,6 @@ impl Element for EditorElement {
let end_row =
1 + cmp::min((scroll_position.y + height_in_lines).ceil() as u32, max_row);
let buffer_rows = snapshot
.buffer_rows(start_row)
.take((start_row..end_row).len());
let start_anchor = if start_row == 0 {
Anchor::min()
} else {
@@ -3458,7 +3164,6 @@ impl Element for EditorElement {
let (line_numbers, fold_statuses) = self.layout_line_numbers(
start_row..end_row,
buffer_rows.clone(),
&active_rows,
newest_selection_head,
&snapshot,
@@ -3467,16 +3172,6 @@ impl Element for EditorElement {
let display_hunks = self.layout_git_gutters(start_row..end_row, &snapshot);
let blamed_display_rows = self.layout_blame_entries(
buffer_rows,
em_width,
scroll_position,
line_height,
&gutter_hitbox,
gutter_dimensions.git_blame_entries_width,
cx,
);
let mut max_visible_line_width = Pixels::ZERO;
let line_layouts =
self.layout_lines(start_row..end_row, &line_numbers, &snapshot, cx);
@@ -3704,7 +3399,6 @@ impl Element for EditorElement {
redacted_ranges,
line_numbers,
display_hunks,
blamed_display_rows,
folds,
blocks,
cursors,
@@ -3791,7 +3485,6 @@ pub struct EditorLayout {
highlighted_rows: BTreeMap<u32, Hsla>,
line_numbers: Vec<Option<ShapedLine>>,
display_hunks: Vec<DisplayDiffHunk>,
blamed_display_rows: Option<Vec<AnyElement>>,
folds: Vec<FoldLayout>,
blocks: Vec<BlockLayout>,
highlighted_ranges: Vec<(Range<DisplayPoint>, Hsla)>,
@@ -4264,7 +3957,6 @@ mod tests {
element
.layout_line_numbers(
0..6,
(0..6).map(Some),
&Default::default(),
Some(DisplayPoint::new(0, 0)),
&snapshot,
@@ -4276,8 +3968,12 @@ mod tests {
.unwrap();
assert_eq!(layouts.len(), 6);
let relative_rows =
element.calculate_relative_line_numbers((0..6).map(Some).collect(), &(0..6), Some(3));
let relative_rows = window
.update(cx, |editor, cx| {
let snapshot = editor.snapshot(cx);
element.calculate_relative_line_numbers(&snapshot, &(0..6), Some(3))
})
.unwrap();
assert_eq!(relative_rows[&0], 3);
assert_eq!(relative_rows[&1], 2);
assert_eq!(relative_rows[&2], 1);
@@ -4286,16 +3982,26 @@ mod tests {
assert_eq!(relative_rows[&5], 2);
// works if cursor is before screen
let relative_rows =
element.calculate_relative_line_numbers((0..6).map(Some).collect(), &(3..6), Some(1));
let relative_rows = window
.update(cx, |editor, cx| {
let snapshot = editor.snapshot(cx);
element.calculate_relative_line_numbers(&snapshot, &(3..6), Some(1))
})
.unwrap();
assert_eq!(relative_rows.len(), 3);
assert_eq!(relative_rows[&3], 2);
assert_eq!(relative_rows[&4], 3);
assert_eq!(relative_rows[&5], 4);
// works if cursor is after screen
let relative_rows =
element.calculate_relative_line_numbers((0..6).map(Some).collect(), &(0..3), Some(6));
let relative_rows = window
.update(cx, |editor, cx| {
let snapshot = editor.snapshot(cx);
element.calculate_relative_line_numbers(&snapshot, &(0..3), Some(6))
})
.unwrap();
assert_eq!(relative_rows.len(), 3);
assert_eq!(relative_rows[&0], 5);
assert_eq!(relative_rows[&1], 4);

View File

@@ -1,4 +1,4 @@
pub mod blame;
pub mod permalink;
use std::ops::Range;

View File

@@ -1,706 +0,0 @@
use anyhow::Result;
use collections::HashMap;
use git::{
blame::{Blame, BlameEntry},
Oid,
};
use gpui::{Model, ModelContext, Subscription, Task};
use language::{Bias, Buffer, BufferSnapshot, Edit};
use project::{Item, Project};
use smallvec::SmallVec;
use sum_tree::SumTree;
use url::Url;
#[derive(Clone, Debug, Default)]
pub struct GitBlameEntry {
pub rows: u32,
pub blame: Option<BlameEntry>,
}
#[derive(Clone, Debug, Default)]
pub struct GitBlameEntrySummary {
rows: u32,
}
impl sum_tree::Item for GitBlameEntry {
type Summary = GitBlameEntrySummary;
fn summary(&self) -> Self::Summary {
GitBlameEntrySummary { rows: self.rows }
}
}
impl sum_tree::Summary for GitBlameEntrySummary {
type Context = ();
fn add_summary(&mut self, summary: &Self, _cx: &()) {
self.rows += summary.rows;
}
}
impl<'a> sum_tree::Dimension<'a, GitBlameEntrySummary> for u32 {
fn add_summary(&mut self, summary: &'a GitBlameEntrySummary, _cx: &()) {
*self += summary.rows;
}
}
pub struct GitBlame {
project: Model<Project>,
buffer: Model<Buffer>,
entries: SumTree<GitBlameEntry>,
permalinks: HashMap<Oid, Url>,
messages: HashMap<Oid, String>,
buffer_snapshot: BufferSnapshot,
buffer_edits: text::Subscription,
task: Task<Result<()>>,
generated: bool,
_refresh_subscription: Subscription,
}
impl GitBlame {
pub fn new(
buffer: Model<Buffer>,
project: Model<Project>,
cx: &mut ModelContext<Self>,
) -> Self {
let entries = SumTree::from_item(
GitBlameEntry {
rows: buffer.read(cx).max_point().row + 1,
blame: None,
},
&(),
);
let refresh_subscription = cx.subscribe(&project, {
let buffer = buffer.clone();
move |this, _, event, cx| match event {
project::Event::WorktreeUpdatedEntries(_, updated) => {
let project_entry_id = buffer.read(cx).entry_id(cx);
if updated
.iter()
.any(|(_, entry_id, _)| project_entry_id == Some(*entry_id))
{
log::debug!("Updated buffers. Regenerating blame data...",);
this.generate(cx);
}
}
project::Event::WorktreeUpdatedGitRepositories => {
log::debug!("Status of git repositories updated. Regenerating blame data...",);
this.generate(cx);
}
_ => {}
}
});
let buffer_snapshot = buffer.read(cx).snapshot();
let buffer_edits = buffer.update(cx, |buffer, _| buffer.subscribe());
let mut this = Self {
project,
buffer,
buffer_snapshot,
entries,
buffer_edits,
permalinks: HashMap::default(),
messages: HashMap::default(),
task: Task::ready(Ok(())),
generated: false,
_refresh_subscription: refresh_subscription,
};
this.generate(cx);
this
}
pub fn has_generated_entries(&self) -> bool {
self.generated
}
pub fn permalink_for_entry(&self, entry: &BlameEntry) -> Option<Url> {
self.permalinks.get(&entry.sha).cloned()
}
pub fn message_for_entry(&self, entry: &BlameEntry) -> Option<String> {
self.messages.get(&entry.sha).cloned()
}
pub fn blame_for_rows<'a>(
&'a mut self,
rows: impl 'a + IntoIterator<Item = Option<u32>>,
cx: &mut ModelContext<Self>,
) -> impl 'a + Iterator<Item = Option<BlameEntry>> {
self.sync(cx);
let mut cursor = self.entries.cursor::<u32>();
rows.into_iter().map(move |row| {
let row = row?;
cursor.seek_forward(&row, Bias::Right, &());
cursor.item()?.blame.clone()
})
}
fn sync(&mut self, cx: &mut ModelContext<Self>) {
let edits = self.buffer_edits.consume();
let new_snapshot = self.buffer.read(cx).snapshot();
let mut row_edits = edits
.into_iter()
.map(|edit| {
let old_point_range = self.buffer_snapshot.offset_to_point(edit.old.start)
..self.buffer_snapshot.offset_to_point(edit.old.end);
let new_point_range = new_snapshot.offset_to_point(edit.new.start)
..new_snapshot.offset_to_point(edit.new.end);
if old_point_range.start.column
== self.buffer_snapshot.line_len(old_point_range.start.row)
&& (new_snapshot.chars_at(edit.new.start).next() == Some('\n')
|| self.buffer_snapshot.line_len(old_point_range.end.row) == 0)
{
Edit {
old: old_point_range.start.row + 1..old_point_range.end.row + 1,
new: new_point_range.start.row + 1..new_point_range.end.row + 1,
}
} else if old_point_range.start.column == 0
&& old_point_range.end.column == 0
&& new_point_range.end.column == 0
{
Edit {
old: old_point_range.start.row..old_point_range.end.row,
new: new_point_range.start.row..new_point_range.end.row,
}
} else {
Edit {
old: old_point_range.start.row..old_point_range.end.row + 1,
new: new_point_range.start.row..new_point_range.end.row + 1,
}
}
})
.peekable();
let mut new_entries = SumTree::new();
let mut cursor = self.entries.cursor::<u32>();
while let Some(mut edit) = row_edits.next() {
while let Some(next_edit) = row_edits.peek() {
if edit.old.end >= next_edit.old.start {
edit.old.end = next_edit.old.end;
edit.new.end = next_edit.new.end;
row_edits.next();
} else {
break;
}
}
new_entries.append(cursor.slice(&edit.old.start, Bias::Right, &()), &());
if edit.new.start > new_entries.summary().rows {
new_entries.push(
GitBlameEntry {
rows: edit.new.start - new_entries.summary().rows,
blame: cursor.item().and_then(|entry| entry.blame.clone()),
},
&(),
);
}
cursor.seek(&edit.old.end, Bias::Right, &());
if !edit.new.is_empty() {
new_entries.push(
GitBlameEntry {
rows: edit.new.len() as u32,
blame: None,
},
&(),
);
}
let old_end = cursor.end(&());
if row_edits
.peek()
.map_or(true, |next_edit| next_edit.old.start >= old_end)
{
if let Some(entry) = cursor.item() {
if old_end > edit.old.end {
new_entries.push(
GitBlameEntry {
rows: cursor.end(&()) - edit.old.end,
blame: entry.blame.clone(),
},
&(),
);
}
cursor.next(&());
}
}
}
new_entries.append(cursor.suffix(&()), &());
drop(cursor);
self.buffer_snapshot = new_snapshot;
self.entries = new_entries;
}
#[cfg(test)]
fn check_invariants(&mut self, cx: &mut ModelContext<Self>) {
self.sync(cx);
assert_eq!(
self.entries.summary().rows,
self.buffer.read(cx).max_point().row + 1
);
}
fn generate(&mut self, cx: &mut ModelContext<Self>) {
let buffer_edits = self.buffer.update(cx, |buffer, _| buffer.subscribe());
let snapshot = self.buffer.read(cx).snapshot();
let blame = self.project.read(cx).blame_buffer(&self.buffer, None, cx);
self.task = cx.spawn(|this, mut cx| async move {
let (entries, permalinks, messages) = cx
.background_executor()
.spawn({
let snapshot = snapshot.clone();
async move {
let Blame {
entries,
permalinks,
messages,
} = blame.await?;
let mut current_row = 0;
let mut entries = SumTree::from_iter(
entries.into_iter().flat_map(|entry| {
let mut entries = SmallVec::<[GitBlameEntry; 2]>::new();
if entry.range.start > current_row {
let skipped_rows = entry.range.start - current_row;
entries.push(GitBlameEntry {
rows: skipped_rows,
blame: None,
});
}
entries.push(GitBlameEntry {
rows: entry.range.len() as u32,
blame: Some(entry.clone()),
});
current_row = entry.range.end;
entries
}),
&(),
);
let max_row = snapshot.max_point().row;
if max_row >= current_row {
entries.push(
GitBlameEntry {
rows: (max_row + 1) - current_row,
blame: None,
},
&(),
);
}
anyhow::Ok((entries, permalinks, messages))
}
})
.await?;
this.update(&mut cx, |this, cx| {
this.buffer_edits = buffer_edits;
this.buffer_snapshot = snapshot;
this.entries = entries;
this.permalinks = permalinks;
this.messages = messages;
this.generated = true;
cx.notify();
})
});
}
}
#[cfg(test)]
mod tests {
use super::*;
use gpui::Context;
use language::{Point, Rope};
use project::FakeFs;
use rand::prelude::*;
use serde_json::json;
use settings::SettingsStore;
use std::{cmp, env, ops::Range, path::Path};
use unindent::Unindent as _;
use util::RandomCharIter;
macro_rules! assert_blame_rows {
($blame:expr, $rows:expr, $expected:expr, $cx:expr) => {
assert_eq!(
$blame
.blame_for_rows($rows.map(Some), $cx)
.collect::<Vec<_>>(),
$expected
);
};
}
fn init_test(cx: &mut gpui::TestAppContext) {
cx.update(|cx| {
let settings = SettingsStore::test(cx);
cx.set_global(settings);
theme::init(theme::LoadThemes::JustBase, cx);
language::init(cx);
client::init_settings(cx);
workspace::init_settings(cx);
Project::init_settings(cx);
crate::init(cx);
});
}
#[gpui::test]
async fn test_blame_for_rows(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/my-repo",
json!({
".git": {},
"file.txt": r#"
AAA Line 1
BBB Line 2 - Modified 1
CCC Line 3 - Modified 2
modified in memory 1
modified in memory 1
DDD Line 4 - Modified 2
EEE Line 5 - Modified 1
FFF Line 6 - Modified 2
"#
.unindent()
}),
)
.await;
fs.set_blame_for_repo(
Path::new("/my-repo/.git"),
vec![(
Path::new("file.txt"),
Blame {
entries: vec![
blame_entry("1b1b1b", 0..1),
blame_entry("0d0d0d", 1..2),
blame_entry("3a3a3a", 2..3),
blame_entry("3a3a3a", 5..6),
blame_entry("0d0d0d", 6..7),
blame_entry("3a3a3a", 7..8),
],
..Default::default()
},
)],
);
let project = Project::test(fs, ["/my-repo".as_ref()], cx).await;
let buffer = project
.update(cx, |project, cx| {
project.open_local_buffer("/my-repo/file.txt", cx)
})
.await
.unwrap();
let git_blame = cx.new_model(|cx| GitBlame::new(buffer.clone(), project, cx));
cx.executor().run_until_parked();
git_blame.update(cx, |blame, cx| {
// All lines
assert_eq!(
blame
.blame_for_rows((0..8).map(Some), cx)
.collect::<Vec<_>>(),
vec![
Some(blame_entry("1b1b1b", 0..1)),
Some(blame_entry("0d0d0d", 1..2)),
Some(blame_entry("3a3a3a", 2..3)),
None,
None,
Some(blame_entry("3a3a3a", 5..6)),
Some(blame_entry("0d0d0d", 6..7)),
Some(blame_entry("3a3a3a", 7..8)),
]
);
// Subset of lines
assert_eq!(
blame
.blame_for_rows((1..4).map(Some), cx)
.collect::<Vec<_>>(),
vec![
Some(blame_entry("0d0d0d", 1..2)),
Some(blame_entry("3a3a3a", 2..3)),
None
]
);
// Subset of lines, with some not displayed
assert_eq!(
blame
.blame_for_rows(vec![Some(1), None, None], cx)
.collect::<Vec<_>>(),
vec![Some(blame_entry("0d0d0d", 1..2)), None, None]
);
});
}
#[gpui::test]
async fn test_blame_for_rows_with_edits(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/my-repo",
json!({
".git": {},
"file.txt": r#"
Line 1
Line 2
Line 3
"#
.unindent()
}),
)
.await;
fs.set_blame_for_repo(
Path::new("/my-repo/.git"),
vec![(
Path::new("file.txt"),
Blame {
entries: vec![blame_entry("1b1b1b", 0..4)],
..Default::default()
},
)],
);
let project = Project::test(fs, ["/my-repo".as_ref()], cx).await;
let buffer = project
.update(cx, |project, cx| {
project.open_local_buffer("/my-repo/file.txt", cx)
})
.await
.unwrap();
let git_blame = cx.new_model(|cx| GitBlame::new(buffer.clone(), project, cx));
cx.executor().run_until_parked();
git_blame.update(cx, |blame, cx| {
// Sanity check before edits: make sure that we get the same blame entry for all
// lines.
assert_blame_rows!(
blame,
(0..4),
vec![
Some(blame_entry("1b1b1b", 0..4)),
Some(blame_entry("1b1b1b", 0..4)),
Some(blame_entry("1b1b1b", 0..4)),
Some(blame_entry("1b1b1b", 0..4)),
],
cx
);
});
// Modify a single line, at the start of the line
buffer.update(cx, |buffer, cx| {
buffer.edit([(Point::new(0, 0)..Point::new(0, 0), "X")], None, cx);
});
git_blame.update(cx, |blame, cx| {
assert_blame_rows!(
blame,
(0..2),
vec![None, Some(blame_entry("1b1b1b", 0..4))],
cx
);
});
// Modify a single line, in the middle of the line
buffer.update(cx, |buffer, cx| {
buffer.edit([(Point::new(1, 2)..Point::new(1, 2), "X")], None, cx);
});
git_blame.update(cx, |blame, cx| {
assert_blame_rows!(
blame,
(1..4),
vec![
None,
Some(blame_entry("1b1b1b", 0..4)),
Some(blame_entry("1b1b1b", 0..4))
],
cx
);
});
// Before we insert a newline at the end, sanity check:
git_blame.update(cx, |blame, cx| {
assert_blame_rows!(blame, (3..4), vec![Some(blame_entry("1b1b1b", 0..4))], cx);
});
// Insert a newline at the end
buffer.update(cx, |buffer, cx| {
buffer.edit([(Point::new(3, 6)..Point::new(3, 6), "\n")], None, cx);
});
// Only the new line is marked as edited:
git_blame.update(cx, |blame, cx| {
assert_blame_rows!(
blame,
(3..5),
vec![Some(blame_entry("1b1b1b", 0..4)), None],
cx
);
});
// Before we insert a newline at the start, sanity check:
git_blame.update(cx, |blame, cx| {
assert_blame_rows!(blame, (2..3), vec![Some(blame_entry("1b1b1b", 0..4)),], cx);
});
// Usage example
// Insert a newline at the start of the row
buffer.update(cx, |buffer, cx| {
buffer.edit([(Point::new(2, 0)..Point::new(2, 0), "\n")], None, cx);
});
// Only the new line is marked as edited:
git_blame.update(cx, |blame, cx| {
assert_blame_rows!(
blame,
(2..4),
vec![None, Some(blame_entry("1b1b1b", 0..4)),],
cx
);
});
}
#[gpui::test(iterations = 100)]
async fn test_blame_random(mut rng: StdRng, cx: &mut gpui::TestAppContext) {
let operations = env::var("OPERATIONS")
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
.unwrap_or(10);
let max_edits_per_operation = env::var("MAX_EDITS_PER_OPERATION")
.map(|i| {
i.parse()
.expect("invalid `MAX_EDITS_PER_OPERATION` variable")
})
.unwrap_or(5);
init_test(cx);
let fs = FakeFs::new(cx.executor());
let buffer_initial_text_len = rng.gen_range(5..15);
let mut buffer_initial_text = Rope::from(
RandomCharIter::new(&mut rng)
.take(buffer_initial_text_len)
.collect::<String>()
.as_str(),
);
let mut newline_ixs = (0..buffer_initial_text_len).choose_multiple(&mut rng, 5);
newline_ixs.sort_unstable();
for newline_ix in newline_ixs.into_iter().rev() {
let newline_ix = buffer_initial_text.clip_offset(newline_ix, Bias::Right);
buffer_initial_text.replace(newline_ix..newline_ix, "\n");
}
log::info!("initial buffer text: {:?}", buffer_initial_text);
fs.insert_tree(
"/my-repo",
json!({
".git": {},
"file.txt": buffer_initial_text.to_string()
}),
)
.await;
let blame_entries = gen_blame_entries(buffer_initial_text.max_point().row, &mut rng);
log::info!("initial blame entries: {:?}", blame_entries);
fs.set_blame_for_repo(
Path::new("/my-repo/.git"),
vec![(
Path::new("file.txt"),
Blame {
entries: blame_entries,
..Default::default()
},
)],
);
let project = Project::test(fs.clone(), ["/my-repo".as_ref()], cx).await;
let buffer = project
.update(cx, |project, cx| {
project.open_local_buffer("/my-repo/file.txt", cx)
})
.await
.unwrap();
let git_blame = cx.new_model(|cx| GitBlame::new(buffer.clone(), project, cx));
cx.executor().run_until_parked();
git_blame.update(cx, |blame, cx| blame.check_invariants(cx));
for _ in 0..operations {
match rng.gen_range(0..100) {
0..=19 => {
log::info!("quiescing");
cx.executor().run_until_parked();
}
20..=69 => {
log::info!("editing buffer");
buffer.update(cx, |buffer, cx| {
buffer.randomly_edit(&mut rng, max_edits_per_operation, cx);
log::info!("buffer text: {:?}", buffer.text());
});
let blame_entries = gen_blame_entries(
buffer.read_with(cx, |buffer, _| buffer.max_point().row),
&mut rng,
);
log::info!("regenerating blame entries: {:?}", blame_entries);
fs.set_blame_for_repo(
Path::new("/my-repo/.git"),
vec![(
Path::new("file.txt"),
Blame {
entries: blame_entries,
..Default::default()
},
)],
);
}
_ => {
git_blame.update(cx, |blame, cx| blame.check_invariants(cx));
}
}
}
git_blame.update(cx, |blame, cx| blame.check_invariants(cx));
}
fn gen_blame_entries(max_row: u32, rng: &mut StdRng) -> Vec<BlameEntry> {
let mut last_row = 0;
let mut blame_entries = Vec::new();
for ix in 0..5 {
if last_row < max_row {
let row_start = rng.gen_range(last_row..max_row);
let row_end = rng.gen_range(row_start + 1..cmp::min(row_start + 3, max_row) + 1);
blame_entries.push(blame_entry(&ix.to_string(), row_start..row_end));
last_row = row_end;
} else {
break;
}
}
blame_entries
}
fn blame_entry(sha: &str, range: Range<u32>) -> BlameEntry {
BlameEntry {
sha: sha.parse().unwrap(),
range,
..Default::default()
}
}
}

View File

@@ -1,9 +1,10 @@
use std::ops::Range;
use anyhow::{anyhow, Result};
use language::Point;
use url::Url;
pub(crate) enum GitHostingProvider {
enum GitHostingProvider {
Github,
Gitlab,
Gitee,
@@ -28,9 +29,9 @@ impl GitHostingProvider {
/// Returns the fragment portion of the URL for the selected lines in
/// the representation the [`GitHostingProvider`] expects.
fn line_fragment(&self, selection: &Range<u32>) -> String {
if selection.start == selection.end {
let line = selection.start + 1;
fn line_fragment(&self, selection: &Range<Point>) -> String {
if selection.start.row == selection.end.row {
let line = selection.start.row + 1;
match self {
Self::Github | Self::Gitlab | Self::Gitee | Self::Sourcehut | Self::Codeberg => {
@@ -39,8 +40,8 @@ impl GitHostingProvider {
Self::Bitbucket => format!("lines-{}", line),
}
} else {
let start_line = selection.start + 1;
let end_line = selection.end + 1;
let start_line = selection.start.row + 1;
let end_line = selection.end.row + 1;
match self {
Self::Github | Self::Codeberg => format!("L{}-L{}", start_line, end_line),
@@ -57,7 +58,7 @@ pub struct BuildPermalinkParams<'a> {
pub remote_url: &'a str,
pub sha: &'a str,
pub path: &'a str,
pub selection: Option<Range<u32>>,
pub selection: Option<Range<Point>>,
}
pub fn build_permalink(params: BuildPermalinkParams) -> Result<Url> {
@@ -87,42 +88,17 @@ pub fn build_permalink(params: BuildPermalinkParams) -> Result<Url> {
let mut permalink = provider.base_url().join(&path).unwrap();
permalink.set_fragment(line_fragment.as_deref());
Ok(permalink)
}
pub(crate) struct ParsedGitRemote<'a> {
struct ParsedGitRemote<'a> {
pub provider: GitHostingProvider,
pub owner: &'a str,
pub repo: &'a str,
}
pub(crate) struct BuildCommitPermalinkParams<'a> {
pub remote: &'a ParsedGitRemote<'a>,
pub sha: &'a str,
}
pub(crate) fn build_commit_permalink(params: BuildCommitPermalinkParams) -> Url {
let BuildCommitPermalinkParams { sha, remote } = params;
let ParsedGitRemote {
provider,
owner,
repo,
} = remote;
let path = match provider {
GitHostingProvider::Github => format!("{owner}/{repo}/commit/{sha}"),
GitHostingProvider::Gitlab => format!("{owner}/{repo}/-/commit/{sha}"),
GitHostingProvider::Gitee => format!("{owner}/{repo}/commit/{sha}"),
GitHostingProvider::Bitbucket => format!("{owner}/{repo}/commits/{sha}"),
GitHostingProvider::Sourcehut => format!("~{owner}/{repo}/commit/{sha}"),
GitHostingProvider::Codeberg => format!("{owner}/{repo}/commit/{sha}"),
};
provider.base_url().join(&path).unwrap()
}
pub(crate) fn parse_git_remote_url(url: &str) -> Option<ParsedGitRemote> {
fn parse_git_remote_url(url: &str) -> Option<ParsedGitRemote> {
if url.starts_with("git@github.com:") || url.starts_with("https://github.com/") {
let repo_with_owner = url
.trim_start_matches("git@github.com:")
@@ -241,7 +217,7 @@ mod tests {
remote_url: "git@github.com:zed-industries/zed.git",
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
path: "crates/editor/src/git/permalink.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -255,7 +231,7 @@ mod tests {
remote_url: "git@github.com:zed-industries/zed.git",
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
path: "crates/editor/src/git/permalink.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();
@@ -283,7 +259,7 @@ mod tests {
remote_url: "https://github.com/zed-industries/zed.git",
sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
path: "crates/zed/src/main.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -297,7 +273,7 @@ mod tests {
remote_url: "https://github.com/zed-industries/zed.git",
sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
path: "crates/zed/src/main.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();
@@ -325,7 +301,7 @@ mod tests {
remote_url: "git@gitlab.com:zed-industries/zed.git",
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
path: "crates/editor/src/git/permalink.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -339,7 +315,7 @@ mod tests {
remote_url: "git@gitlab.com:zed-industries/zed.git",
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
path: "crates/editor/src/git/permalink.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();
@@ -367,7 +343,7 @@ mod tests {
remote_url: "https://gitlab.com/zed-industries/zed.git",
sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
path: "crates/zed/src/main.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -381,7 +357,7 @@ mod tests {
remote_url: "https://gitlab.com/zed-industries/zed.git",
sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
path: "crates/zed/src/main.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();
@@ -409,7 +385,7 @@ mod tests {
remote_url: "git@gitee.com:libkitten/zed.git",
sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
path: "crates/editor/src/git/permalink.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -423,7 +399,7 @@ mod tests {
remote_url: "git@gitee.com:libkitten/zed.git",
sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
path: "crates/editor/src/git/permalink.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();
@@ -451,7 +427,7 @@ mod tests {
remote_url: "https://gitee.com/libkitten/zed.git",
sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
path: "crates/zed/src/main.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -465,7 +441,7 @@ mod tests {
remote_url: "https://gitee.com/libkitten/zed.git",
sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
path: "crates/zed/src/main.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();
let expected_url = "https://gitee.com/libkitten/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/zed/src/main.rs#L24-48";
@@ -519,7 +495,7 @@ mod tests {
remote_url: "git@bitbucket.org:thorstenzed/testingrepo.git",
sha: "f00b4r",
path: "main.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -534,7 +510,7 @@ mod tests {
remote_url: "git@bitbucket.org:thorstenzed/testingrepo.git",
sha: "f00b4r",
path: "main.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();
@@ -577,7 +553,7 @@ mod tests {
remote_url: "git@git.sr.ht:~rajveermalviya/zed",
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
path: "crates/editor/src/git/permalink.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -591,7 +567,7 @@ mod tests {
remote_url: "git@git.sr.ht:~rajveermalviya/zed",
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
path: "crates/editor/src/git/permalink.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();
@@ -619,7 +595,7 @@ mod tests {
remote_url: "https://git.sr.ht/~rajveermalviya/zed",
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
path: "crates/zed/src/main.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -633,7 +609,7 @@ mod tests {
remote_url: "https://git.sr.ht/~rajveermalviya/zed",
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
path: "crates/zed/src/main.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();
@@ -661,7 +637,7 @@ mod tests {
remote_url: "git@codeberg.org:rajveermalviya/zed.git",
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
path: "crates/editor/src/git/permalink.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -675,7 +651,7 @@ mod tests {
remote_url: "git@codeberg.org:rajveermalviya/zed.git",
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
path: "crates/editor/src/git/permalink.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();
@@ -703,7 +679,7 @@ mod tests {
remote_url: "https://codeberg.org/rajveermalviya/zed.git",
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
path: "crates/zed/src/main.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -717,7 +693,7 @@ mod tests {
remote_url: "https://codeberg.org/rajveermalviya/zed.git",
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
path: "crates/zed/src/main.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();

View File

@@ -93,7 +93,7 @@ impl Editor {
modifiers: Modifiers,
cx: &mut ViewContext<Self>,
) {
if !modifiers.secondary() || self.has_pending_selection() {
if !modifiers.command || self.has_pending_selection() {
self.hide_hovered_link(cx);
return;
}
@@ -113,7 +113,7 @@ impl Editor {
&snapshot,
point_for_position,
self,
modifiers.secondary(),
modifiers.command,
modifiers.shift,
cx,
);
@@ -256,7 +256,7 @@ pub fn update_inlay_link_and_hover_points(
snapshot: &EditorSnapshot,
point_for_position: PointForPosition,
editor: &mut Editor,
secondary_held: bool,
cmd_held: bool,
shift_held: bool,
cx: &mut ViewContext<'_, Editor>,
) {
@@ -394,9 +394,7 @@ pub fn update_inlay_link_and_hover_points(
if let Some((language_server_id, location)) =
hovered_hint_part.location
{
if secondary_held
&& !editor.has_pending_nonempty_selection()
{
if cmd_held && !editor.has_pending_nonempty_selection() {
go_to_definition_updated = true;
show_link_definition(
shift_held,
@@ -702,7 +700,10 @@ mod tests {
use gpui::Modifiers;
use indoc::indoc;
use language::language_settings::InlayHintSettings;
use lsp::request::{GotoDefinition, GotoTypeDefinition};
use lsp::{
request::{GotoDefinition, GotoTypeDefinition},
References,
};
use util::assert_set_eq;
use workspace::item::Item;
@@ -761,7 +762,7 @@ mod tests {
let «variable» = A;
"});
cx.simulate_modifiers_change(Modifiers::secondary_key());
cx.simulate_modifiers_change(Modifiers::command());
cx.run_until_parked();
// Assert no link highlights
cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
@@ -822,7 +823,7 @@ mod tests {
])))
});
cx.simulate_mouse_move(hover_point, Modifiers::secondary_key());
cx.simulate_mouse_move(hover_point, Modifiers::command());
requests.next().await;
cx.background_executor.run_until_parked();
cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
@@ -848,7 +849,7 @@ mod tests {
])))
});
cx.simulate_mouse_move(hover_point, Modifiers::secondary_key());
cx.simulate_mouse_move(hover_point, Modifiers::command());
requests.next().await;
cx.background_executor.run_until_parked();
cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
@@ -867,7 +868,7 @@ mod tests {
// No definitions returned
Ok(Some(lsp::GotoDefinitionResponse::Link(vec![])))
});
cx.simulate_mouse_move(hover_point, Modifiers::secondary_key());
cx.simulate_mouse_move(hover_point, Modifiers::command());
requests.next().await;
cx.background_executor.run_until_parked();
@@ -911,7 +912,7 @@ mod tests {
])))
});
cx.simulate_modifiers_change(Modifiers::secondary_key());
cx.simulate_modifiers_change(Modifiers::command());
requests.next().await;
cx.background_executor.run_until_parked();
@@ -927,7 +928,7 @@ mod tests {
fn do_work() { test(); }
"});
cx.simulate_mouse_move(hover_point, Modifiers::secondary_key());
cx.simulate_mouse_move(hover_point, Modifiers::command());
cx.background_executor.run_until_parked();
cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
fn test() { do_work(); }
@@ -939,7 +940,7 @@ mod tests {
fn test() { do_work(); }
fn do_work() { tesˇt(); }
"});
cx.simulate_mouse_move(hover_point, Modifiers::secondary_key());
cx.simulate_mouse_move(hover_point, Modifiers::command());
cx.background_executor.run_until_parked();
cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
fn test() { do_work(); }
@@ -947,7 +948,7 @@ mod tests {
"});
// Cmd click with existing definition doesn't re-request and dismisses highlight
cx.simulate_click(hover_point, Modifiers::secondary_key());
cx.simulate_click(hover_point, Modifiers::command());
cx.lsp
.handle_request::<GotoDefinition, _, _>(move |_, _| async move {
// Empty definition response to make sure we aren't hitting the lsp and using
@@ -986,7 +987,7 @@ mod tests {
},
])))
});
cx.simulate_click(hover_point, Modifiers::secondary_key());
cx.simulate_click(hover_point, Modifiers::command());
requests.next().await;
cx.background_executor.run_until_parked();
cx.assert_editor_state(indoc! {"
@@ -1029,7 +1030,7 @@ mod tests {
s.set_pending_anchor_range(anchor_range, crate::SelectMode::Character)
});
});
cx.simulate_mouse_move(hover_point, Modifiers::secondary_key());
cx.simulate_mouse_move(hover_point, Modifiers::command());
cx.background_executor.run_until_parked();
assert!(requests.try_next().is_err());
cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
@@ -1143,7 +1144,7 @@ mod tests {
});
// Press cmd to trigger highlight
let hover_point = cx.pixel_position_for(midpoint);
cx.simulate_mouse_move(hover_point, Modifiers::secondary_key());
cx.simulate_mouse_move(hover_point, Modifiers::command());
cx.background_executor.run_until_parked();
cx.update_editor(|editor, cx| {
let snapshot = editor.snapshot(cx);
@@ -1174,9 +1175,9 @@ mod tests {
assert!(actual_ranges.is_empty(), "When no cmd is pressed, should have no hint label selected, but got: {actual_ranges:?}");
});
cx.simulate_modifiers_change(Modifiers::secondary_key());
cx.simulate_modifiers_change(Modifiers::command());
cx.background_executor.run_until_parked();
cx.simulate_click(hover_point, Modifiers::secondary_key());
cx.simulate_click(hover_point, Modifiers::command());
cx.background_executor.run_until_parked();
cx.assert_editor_state(indoc! {"
struct «TestStructˇ»;
@@ -1206,12 +1207,12 @@ mod tests {
Let's test a [complex](https://zed.dev/channel/had-(ˇoops)) case.
"});
cx.simulate_mouse_move(screen_coord, Modifiers::secondary_key());
cx.simulate_mouse_move(screen_coord, Modifiers::command());
cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
Let's test a [complex](«https://zed.dev/channel/had-(oops)ˇ») case.
"});
cx.simulate_click(screen_coord, Modifiers::secondary_key());
cx.simulate_click(screen_coord, Modifiers::command());
assert_eq!(
cx.opened_url(),
Some("https://zed.dev/channel/had-(oops)".into())
@@ -1234,12 +1235,12 @@ mod tests {
let screen_coord =
cx.pixel_position(indoc! {"https://zed.dev/relˇeases is a cool webpage."});
cx.simulate_mouse_move(screen_coord, Modifiers::secondary_key());
cx.simulate_mouse_move(screen_coord, Modifiers::command());
cx.assert_editor_text_highlights::<HoveredLinkState>(
indoc! {"«https://zed.dev/releasesˇ» is a cool webpage."},
);
cx.simulate_click(screen_coord, Modifiers::secondary_key());
cx.simulate_click(screen_coord, Modifiers::command());
assert_eq!(cx.opened_url(), Some("https://zed.dev/releases".into()));
}
@@ -1259,12 +1260,155 @@ mod tests {
let screen_coord =
cx.pixel_position(indoc! {"A cool webpage is https://zed.dev/releˇases"});
cx.simulate_mouse_move(screen_coord, Modifiers::secondary_key());
cx.simulate_mouse_move(screen_coord, Modifiers::command());
cx.assert_editor_text_highlights::<HoveredLinkState>(
indoc! {"A cool webpage is «https://zed.dev/releasesˇ»"},
);
cx.simulate_click(screen_coord, Modifiers::secondary_key());
cx.simulate_click(screen_coord, Modifiers::command());
assert_eq!(cx.opened_url(), Some("https://zed.dev/releases".into()));
}
#[gpui::test]
async fn test_cmd_click_back_and_forth(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorLspTestContext::new_rust(lsp::ServerCapabilities::default(), cx).await;
cx.set_state(indoc! {"
fn test() {
do_work();
fn do_work() {
test();
}
"});
// cmd-click on `test` definition and usage, and expect Zed to allow going back and forth,
// because cmd-click first searches for definitions to go to, and then fall backs to symbol usages to go to.
let definition_hover_point = cx.pixel_position(indoc! {"
fn testˇ() {
do_work();
}
fn do_work() {
test();
}
"});
let definition_display_point = cx.display_point(indoc! {"
fn testˇ() {
do_work();
}
fn do_work() {
test();
}
"});
let definition_range = cx.lsp_range(indoc! {"
fn «test»() {
do_work();
}
fn do_work() {
test();
}
"});
let reference_hover_point = cx.pixel_position(indoc! {"
fn test() {
do_work();
}
fn do_work() {
testˇ();
}
"});
let reference_display_point = cx.display_point(indoc! {"
fn test() {
do_work();
}
fn do_work() {
testˇ();
}
"});
let reference_range = cx.lsp_range(indoc! {"
fn test() {
do_work();
}
fn do_work() {
«test»();
}
"});
let expected_uri = cx.buffer_lsp_url.clone();
cx.lsp
.handle_request::<GotoDefinition, _, _>(move |params, _| {
let expected_uri = expected_uri.clone();
async move {
assert_eq!(
params.text_document_position_params.text_document.uri,
expected_uri
);
let position = params.text_document_position_params.position;
Ok(Some(lsp::GotoDefinitionResponse::Link(
if position.line == reference_display_point.row()
&& position.character == reference_display_point.column()
{
vec![lsp::LocationLink {
origin_selection_range: None,
target_uri: params.text_document_position_params.text_document.uri,
target_range: definition_range,
target_selection_range: definition_range,
}]
} else {
// We cannot navigate to the definition outside of its reference point
Vec::new()
},
)))
}
});
let expected_uri = cx.buffer_lsp_url.clone();
cx.lsp.handle_request::<References, _, _>(move |params, _| {
let expected_uri = expected_uri.clone();
async move {
assert_eq!(
params.text_document_position.text_document.uri,
expected_uri
);
let position = params.text_document_position.position;
// Zed should not look for references if GotoDefinition works or returns non-empty result
assert_eq!(position.line, definition_display_point.row());
assert_eq!(position.character, definition_display_point.column());
Ok(Some(vec![lsp::Location {
uri: params.text_document_position.text_document.uri,
range: reference_range,
}]))
}
});
for _ in 0..5 {
cx.simulate_click(definition_hover_point, Modifiers::command());
cx.background_executor.run_until_parked();
cx.assert_editor_state(indoc! {"
fn test() {
do_work();
}
fn do_work() {
«testˇ»();
}
"});
cx.simulate_click(reference_hover_point, Modifiers::command());
cx.background_executor.run_until_parked();
cx.assert_editor_state(indoc! {"
fn «testˇ»() {
do_work();
}
fn do_work() {
test();
}
"});
}
}
}

View File

@@ -4,18 +4,17 @@ use crate::{
Anchor, AnchorRangeExt, DisplayPoint, Editor, EditorSettings, EditorSnapshot, EditorStyle,
ExcerptId, Hover, RangeToAnchorExt,
};
use futures::{stream::FuturesUnordered, FutureExt};
use futures::FutureExt;
use gpui::{
div, px, AnyElement, CursorStyle, Hsla, InteractiveElement, IntoElement, MouseButton,
div, px, AnyElement, CursorStyle, Hsla, InteractiveElement, IntoElement, Model, MouseButton,
ParentElement, Pixels, SharedString, Size, StatefulInteractiveElement, Styled, Task,
ViewContext, WeakView,
};
use language::{markdown, Bias, DiagnosticEntry, Language, LanguageRegistry, ParsedMarkdown};
use lsp::DiagnosticSeverity;
use project::{HoverBlock, HoverBlockKind, InlayHintLabelPart};
use project::{HoverBlock, HoverBlockKind, InlayHintLabelPart, Project};
use settings::Settings;
use smol::stream::StreamExt;
use std::{ops::Range, sync::Arc, time::Duration};
use ui::{prelude::*, Tooltip};
use util::TryFutureExt;
@@ -84,20 +83,13 @@ pub fn hover_at_inlay(editor: &mut Editor, inlay_hover: InlayHover, cx: &mut Vie
return;
};
if editor
.hover_state
.info_popovers
.iter()
.any(|InfoPopover { symbol_range, .. }| {
if let RangeInEditor::Inlay(range) = symbol_range {
if range == &inlay_hover.range {
// Hover triggered from same location as last time. Don't show again.
return true;
}
if let Some(InfoPopover { symbol_range, .. }) = &editor.hover_state.info_popover {
if let RangeInEditor::Inlay(range) = symbol_range {
if range == &inlay_hover.range {
// Hover triggered from same location as last time. Don't show again.
return;
}
false
})
{
}
hide_hover(editor, cx);
}
@@ -115,13 +107,15 @@ pub fn hover_at_inlay(editor: &mut Editor, inlay_hover: InlayHover, cx: &mut Vie
let parsed_content = parse_blocks(&blocks, &language_registry, None).await;
let hover_popover = InfoPopover {
project: project.clone(),
symbol_range: RangeInEditor::Inlay(inlay_hover.range.clone()),
blocks,
parsed_content,
};
this.update(&mut cx, |this, cx| {
// TODO: no background highlights happen for inlays currently
this.hover_state.info_popovers = vec![hover_popover];
this.hover_state.info_popover = Some(hover_popover);
cx.notify();
})?;
@@ -138,9 +132,8 @@ pub fn hover_at_inlay(editor: &mut Editor, inlay_hover: InlayHover, cx: &mut Vie
/// Triggered by the `Hover` action when the cursor is not over a symbol or when the
/// selections changed.
pub fn hide_hover(editor: &mut Editor, cx: &mut ViewContext<Editor>) -> bool {
let info_popovers = editor.hover_state.info_popovers.drain(..);
let diagnostics_popover = editor.hover_state.diagnostic_popover.take();
let did_hide = info_popovers.count() > 0 || diagnostics_popover.is_some();
let did_hide = editor.hover_state.info_popover.take().is_some()
| editor.hover_state.diagnostic_popover.take().is_some();
editor.hover_state.info_task = None;
editor.hover_state.triggered_from = None;
@@ -197,26 +190,22 @@ fn show_hover(
};
if !ignore_timeout {
if editor
.hover_state
.info_popovers
.iter()
.any(|InfoPopover { symbol_range, .. }| {
symbol_range
.as_text_range()
.map(|range| {
let hover_range = range.to_offset(&snapshot.buffer_snapshot);
// LSP returns a hover result for the end index of ranges that should be hovered, so we need to
// use an inclusive range here to check if we should dismiss the popover
(hover_range.start..=hover_range.end).contains(&multibuffer_offset)
})
.unwrap_or(false)
})
{
// Hover triggered from same location as last time. Don't show again.
return;
} else {
hide_hover(editor, cx);
if let Some(InfoPopover { symbol_range, .. }) = &editor.hover_state.info_popover {
if symbol_range
.as_text_range()
.map(|range| {
let hover_range = range.to_offset(&snapshot.buffer_snapshot);
// LSP returns a hover result for the end index of ranges that should be hovered, so we need to
// use an inclusive range here to check if we should dismiss the popover
(hover_range.start..=hover_range.end).contains(&multibuffer_offset)
})
.unwrap_or(false)
{
// Hover triggered from same location as last time. Don't show again.
return;
} else {
hide_hover(editor, cx);
}
}
}
@@ -295,14 +284,10 @@ fn show_hover(
});
})?;
let hovers_response = hover_request.await;
let language_registry = project.update(&mut cx, |p, _| p.languages().clone())?;
let hover_result = hover_request.await.ok().flatten();
let snapshot = this.update(&mut cx, |this, cx| this.snapshot(cx))?;
let mut hover_highlights = Vec::with_capacity(hovers_response.len());
let mut info_popovers = Vec::with_capacity(hovers_response.len());
let mut info_popover_tasks = hovers_response
.into_iter()
.map(|hover_result| async {
let hover_popover = match hover_result {
Some(hover_result) if !hover_result.is_empty() => {
// Create symbol range of anchors for highlighting and filtering of future requests.
let range = hover_result
.range
@@ -318,42 +303,44 @@ fn show_hover(
})
.unwrap_or_else(|| anchor..anchor);
let language_registry =
project.update(&mut cx, |p, _| p.languages().clone())?;
let blocks = hover_result.contents;
let language = hover_result.language;
let parsed_content = parse_blocks(&blocks, &language_registry, language).await;
(
range.clone(),
InfoPopover {
symbol_range: RangeInEditor::Text(range),
parsed_content,
},
)
})
.collect::<FuturesUnordered<_>>();
while let Some((highlight_range, info_popover)) = info_popover_tasks.next().await {
hover_highlights.push(highlight_range);
info_popovers.push(info_popover);
}
Some(InfoPopover {
project: project.clone(),
symbol_range: RangeInEditor::Text(range),
blocks,
parsed_content,
})
}
this.update(&mut cx, |editor, cx| {
if hover_highlights.is_empty() {
editor.clear_background_highlights::<HoverState>(cx);
} else {
_ => None,
};
this.update(&mut cx, |this, cx| {
if let Some(symbol_range) = hover_popover
.as_ref()
.and_then(|hover_popover| hover_popover.symbol_range.as_text_range())
{
// Highlight the selected symbol using a background highlight
editor.highlight_background::<HoverState>(
hover_highlights,
this.highlight_background::<HoverState>(
vec![symbol_range],
|theme| theme.element_hover, // todo update theme
cx,
);
} else {
this.clear_background_highlights::<HoverState>(cx);
}
editor.hover_state.info_popovers = info_popovers;
this.hover_state.info_popover = hover_popover;
cx.notify();
cx.refresh();
})?;
anyhow::Ok(())
Ok::<_, anyhow::Error>(())
}
.log_err()
});
@@ -435,7 +422,7 @@ async fn parse_blocks(
#[derive(Default)]
pub struct HoverState {
pub info_popovers: Vec<InfoPopover>,
pub info_popover: Option<InfoPopover>,
pub diagnostic_popover: Option<DiagnosticPopover>,
pub triggered_from: Option<Anchor>,
pub info_task: Option<Task<Option<()>>>,
@@ -443,7 +430,7 @@ pub struct HoverState {
impl HoverState {
pub fn visible(&self) -> bool {
!self.info_popovers.is_empty() || self.diagnostic_popover.is_some()
self.info_popover.is_some() || self.diagnostic_popover.is_some()
}
pub fn render(
@@ -462,20 +449,12 @@ impl HoverState {
.as_ref()
.map(|diagnostic_popover| &diagnostic_popover.local_diagnostic.range.start)
.or_else(|| {
self.info_popovers.iter().find_map(|info_popover| {
match &info_popover.symbol_range {
RangeInEditor::Text(range) => Some(&range.start),
RangeInEditor::Inlay(_) => None,
}
})
})
.or_else(|| {
self.info_popovers.iter().find_map(|info_popover| {
match &info_popover.symbol_range {
RangeInEditor::Text(_) => None,
RangeInEditor::Inlay(range) => Some(&range.inlay_position),
}
})
self.info_popover
.as_ref()
.map(|info_popover| match &info_popover.symbol_range {
RangeInEditor::Text(range) => &range.start,
RangeInEditor::Inlay(range) => &range.inlay_position,
})
})?;
let point = anchor.to_display_point(&snapshot.display_snapshot);
@@ -489,8 +468,8 @@ impl HoverState {
if let Some(diagnostic_popover) = self.diagnostic_popover.as_ref() {
elements.push(diagnostic_popover.render(style, max_size, cx));
}
for info_popover in &mut self.info_popovers {
elements.push(info_popover.render(style, max_size, workspace.clone(), cx));
if let Some(info_popover) = self.info_popover.as_mut() {
elements.push(info_popover.render(style, max_size, workspace, cx));
}
Some((point, elements))
@@ -499,7 +478,9 @@ impl HoverState {
#[derive(Debug, Clone)]
pub struct InfoPopover {
pub project: Model<Project>,
symbol_range: RangeInEditor,
pub blocks: Vec<HoverBlock>,
parsed_content: ParsedMarkdown,
}
@@ -683,19 +664,12 @@ mod tests {
cx.editor(|editor, _| {
assert!(editor.hover_state.visible());
assert_eq!(
editor.hover_state.info_popovers.len(),
1,
"Expected exactly one hover but got: {:?}",
editor.hover_state.info_popovers
);
let rendered = editor
.hover_state
.info_popovers
.first()
.cloned()
.unwrap()
.parsed_content;
assert_eq!(rendered.text, "some basic docs".to_string())
editor.hover_state.info_popover.clone().unwrap().blocks,
vec![HoverBlock {
text: "some basic docs".to_string(),
kind: HoverBlockKind::Markdown,
},]
)
});
// Mouse moved with no hover response dismisses
@@ -750,19 +724,12 @@ mod tests {
cx.condition(|editor, _| editor.hover_state.visible()).await;
cx.editor(|editor, _| {
assert_eq!(
editor.hover_state.info_popovers.len(),
1,
"Expected exactly one hover but got: {:?}",
editor.hover_state.info_popovers
);
let rendered = editor
.hover_state
.info_popovers
.first()
.cloned()
.unwrap()
.parsed_content;
assert_eq!(rendered.text, "some other basic docs".to_string())
editor.hover_state.info_popover.clone().unwrap().blocks,
vec![HoverBlock {
text: "some other basic docs".to_string(),
kind: HoverBlockKind::Markdown,
}]
)
});
}
@@ -806,21 +773,11 @@ mod tests {
cx.condition(|editor, _| editor.hover_state.visible()).await;
cx.editor(|editor, _| {
assert_eq!(
editor.hover_state.info_popovers.len(),
1,
"Expected exactly one hover but got: {:?}",
editor.hover_state.info_popovers
);
let rendered = editor
.hover_state
.info_popovers
.first()
.cloned()
.unwrap()
.parsed_content;
assert_eq!(
rendered.text,
"regular text for hover to show".to_string(),
editor.hover_state.info_popover.clone().unwrap().blocks,
vec![HoverBlock {
text: "regular text for hover to show".to_string(),
kind: HoverBlockKind::Markdown,
}],
"No empty string hovers should be shown"
);
});
@@ -867,21 +824,20 @@ mod tests {
.next()
.await;
let languages = cx.language_registry().clone();
cx.condition(|editor, _| editor.hover_state.visible()).await;
cx.editor(|editor, _| {
let blocks = editor.hover_state.info_popover.clone().unwrap().blocks;
assert_eq!(
editor.hover_state.info_popovers.len(),
1,
"Expected exactly one hover but got: {:?}",
editor.hover_state.info_popovers
blocks,
vec![HoverBlock {
text: markdown_string,
kind: HoverBlockKind::Markdown,
}],
);
let rendered = editor
.hover_state
.info_popovers
.first()
.cloned()
.unwrap()
.parsed_content;
let rendered = smol::block_on(parse_blocks(&blocks, &languages, None));
assert_eq!(
rendered.text,
code_str.trim(),
@@ -933,9 +889,7 @@ mod tests {
cx.background_executor.run_until_parked();
cx.editor(|Editor { hover_state, .. }, _| {
assert!(
hover_state.diagnostic_popover.is_some() && hover_state.info_popovers.is_empty()
)
assert!(hover_state.diagnostic_popover.is_some() && hover_state.info_popover.is_none())
});
// Info Popover shows after request responded to
@@ -1335,10 +1289,8 @@ mod tests {
cx.background_executor.run_until_parked();
cx.update_editor(|editor, cx| {
let hover_state = &editor.hover_state;
assert!(
hover_state.diagnostic_popover.is_none() && hover_state.info_popovers.len() == 1
);
let popover = hover_state.info_popovers.first().cloned().unwrap();
assert!(hover_state.diagnostic_popover.is_none() && hover_state.info_popover.is_some());
let popover = hover_state.info_popover.as_ref().unwrap();
let buffer_snapshot = editor.buffer().update(cx, |buffer, cx| buffer.snapshot(cx));
assert_eq!(
popover.symbol_range,
@@ -1390,10 +1342,8 @@ mod tests {
cx.background_executor.run_until_parked();
cx.update_editor(|editor, cx| {
let hover_state = &editor.hover_state;
assert!(
hover_state.diagnostic_popover.is_none() && hover_state.info_popovers.len() == 1
);
let popover = hover_state.info_popovers.first().cloned().unwrap();
assert!(hover_state.diagnostic_popover.is_none() && hover_state.info_popover.is_some());
let popover = hover_state.info_popover.as_ref().unwrap();
let buffer_snapshot = editor.buffer().update(cx, |buffer, cx| buffer.snapshot(cx));
assert_eq!(
popover.symbol_range,

View File

@@ -1194,7 +1194,7 @@ pub fn entry_git_aware_label_color(
selected: bool,
) -> Color {
if ignored {
Color::Ignored
Color::Disabled
} else {
match git_status {
Some(GitFileStatus::Added) => Color::Created,

View File

@@ -10,31 +10,6 @@ pub struct MouseContextMenu {
_subscription: Subscription,
}
impl MouseContextMenu {
pub(crate) fn new(
position: Point<Pixels>,
context_menu: View<ui::ContextMenu>,
cx: &mut ViewContext<Editor>,
) -> Self {
let context_menu_focus = context_menu.focus_handle(cx);
cx.focus(&context_menu_focus);
let _subscription =
cx.subscribe(&context_menu, move |this, _, _event: &DismissEvent, cx| {
this.mouse_context_menu.take();
if context_menu_focus.contains_focused(cx) {
this.focus(cx);
}
});
Self {
position,
context_menu,
_subscription,
}
}
}
pub fn deploy_context_menu(
editor: &mut Editor,
position: Point<Pixels>,
@@ -85,8 +60,21 @@ pub fn deploy_context_menu(
.action("Reveal in Finder", Box::new(RevealInFinder))
})
};
let mouse_context_menu = MouseContextMenu::new(position, context_menu, cx);
editor.mouse_context_menu = Some(mouse_context_menu);
let context_menu_focus = context_menu.focus_handle(cx);
cx.focus(&context_menu_focus);
let _subscription = cx.subscribe(&context_menu, move |this, _, _event: &DismissEvent, cx| {
this.mouse_context_menu.take();
if context_menu_focus.contains_focused(cx) {
this.focus(cx);
}
});
editor.mouse_context_menu = Some(MouseContextMenu {
position,
context_menu,
_subscription,
});
cx.notify();
}

View File

@@ -32,10 +32,6 @@ impl Autoscroll {
pub fn focused() -> Self {
Self::Strategy(AutoscrollStrategy::Focused)
}
/// Scrolls so that the newest cursor is roughly an n-th line from the top.
pub fn top_relative(n: usize) -> Self {
Self::Strategy(AutoscrollStrategy::TopRelative(n))
}
}
#[derive(PartialEq, Eq, Default, Clone, Copy)]
@@ -47,7 +43,6 @@ pub enum AutoscrollStrategy {
Focused,
Top,
Bottom,
TopRelative(usize),
}
impl AutoscrollStrategy {
@@ -183,10 +178,6 @@ impl Editor {
scroll_position.y = (target_bottom - visible_lines).max(0.0);
self.set_scroll_position_internal(scroll_position, local, true, cx);
}
AutoscrollStrategy::TopRelative(lines) => {
scroll_position.y = target_top - lines as f32;
self.set_scroll_position_internal(scroll_position, local, true, cx);
}
}
self.scroll_manager.last_autoscroll = Some((

View File

@@ -10,7 +10,6 @@ use gpui::{
use indoc::indoc;
use itertools::Itertools;
use language::{Buffer, BufferSnapshot, LanguageRegistry};
use multi_buffer::ExcerptRange;
use parking_lot::RwLock;
use project::{FakeFs, Project};
use std::{
@@ -21,14 +20,12 @@ use std::{
Arc,
},
};
use text::BufferId;
use ui::Context;
use util::{
assert_set_eq,
test::{generate_marked_text, marked_text_ranges},
};
use super::{build_editor, build_editor_with_project};
use super::build_editor_with_project;
pub struct EditorTestContext {
pub cx: gpui::VisualTestContext,
@@ -70,43 +67,6 @@ impl EditorTestContext {
}
}
pub fn new_multibuffer<const COUNT: usize>(
cx: &mut gpui::TestAppContext,
excerpts: [&str; COUNT],
) -> EditorTestContext {
let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite);
let buffer = cx.new_model(|cx| {
for (i, excerpt) in excerpts.into_iter().enumerate() {
let (text, ranges) = marked_text_ranges(excerpt, false);
let buffer =
cx.new_model(|_| Buffer::new(0, BufferId::new(i as u64 + 1).unwrap(), text));
multibuffer.push_excerpts(
buffer,
ranges.into_iter().map(|range| ExcerptRange {
context: range,
primary: None,
}),
cx,
);
}
multibuffer
});
let editor = cx.add_window(|cx| {
let editor = build_editor(buffer, cx);
editor.focus(cx);
editor
});
let editor_view = editor.root_view(cx).unwrap();
Self {
cx: VisualTestContext::from_window(*editor.deref(), cx),
window: editor.into(),
editor: editor_view,
assertion_cx: AssertionContextManager::new(),
}
}
pub fn condition(
&self,
predicate: impl FnMut(&Editor, &AppContext) -> bool,

View File

@@ -12,6 +12,10 @@ workspace = true
path = "src/extension_store.rs"
doctest = false
[[bin]]
name = "extension_json_schemas"
path = "src/extension_json_schemas.rs"
[dependencies]
anyhow.workspace = true
async-compression.workspace = true
@@ -29,7 +33,6 @@ lsp.workspace = true
node_runtime.workspace = true
project.workspace = true
schemars.workspace = true
semantic_version.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true

View File

@@ -195,13 +195,7 @@ impl ExtensionBuilder {
&grammar_metadata.rev,
)?;
let base_grammar_path = grammar_metadata
.path
.as_ref()
.map(|path| grammar_repo_dir.join(path))
.unwrap_or(grammar_repo_dir);
let src_path = base_grammar_path.join("src");
let src_path = grammar_repo_dir.join("src");
let parser_path = src_path.join("parser.c");
let scanner_path = src_path.join("scanner.c");
@@ -485,7 +479,7 @@ impl ExtensionBuilder {
fn populate_defaults(manifest: &mut ExtensionManifest, extension_path: &Path) -> Result<()> {
// For legacy extensions on the v0 schema (aka, using `extension.json`), clear out any existing
// contents of the computed fields, since we don't care what the existing values are.
if manifest.schema_version.is_v0() {
if manifest.schema_version == 0 {
manifest.languages.clear();
manifest.grammars.clear();
manifest.themes.clear();
@@ -528,7 +522,7 @@ fn populate_defaults(manifest: &mut ExtensionManifest, extension_path: &Path) ->
// For legacy extensions on the v0 schema (aka, using `extension.json`), we want to populate the grammars in
// the manifest using the contents of the `grammars` directory.
if manifest.schema_version.is_v0() {
if manifest.schema_version == 0 {
let grammars_dir = extension_path.join("grammars");
if grammars_dir.exists() {
for entry in fs::read_dir(&grammars_dir).context("failed to list grammars dir")? {
@@ -539,8 +533,6 @@ fn populate_defaults(manifest: &mut ExtensionManifest, extension_path: &Path) ->
struct GrammarConfigToml {
pub repository: String,
pub commit: String,
#[serde(default)]
pub path: Option<String>,
}
let grammar_config = fs::read_to_string(&grammar_path)?;
@@ -556,7 +548,6 @@ fn populate_defaults(manifest: &mut ExtensionManifest, extension_path: &Path) ->
GrammarManifestEntry {
repository: grammar_config.repository,
rev: grammar_config.commit,
path: grammar_config.path,
},
);
}

View File

@@ -0,0 +1,17 @@
use language::LanguageConfig;
use schemars::schema_for;
use theme::ThemeFamilyContent;
fn main() {
let theme_family_schema = schema_for!(ThemeFamilyContent);
let language_config_schema = schema_for!(LanguageConfig);
println!(
"{}",
serde_json::to_string_pretty(&theme_family_schema).unwrap()
);
println!(
"{}",
serde_json::to_string_pretty(&language_config_schema).unwrap()
);
}

View File

@@ -1,7 +1,6 @@
use crate::wasm_host::{wit::LanguageServerConfig, WasmExtension, WasmHost};
use anyhow::{anyhow, Context, Result};
use async_trait::async_trait;
use collections::HashMap;
use futures::{Future, FutureExt};
use gpui::AsyncAppContext;
use language::{Language, LanguageServerName, LspAdapter, LspAdapterDelegate};
@@ -57,24 +56,6 @@ impl LspAdapter for ExtensionLspAdapter {
.host
.path_from_extension(&self.extension.manifest.id, command.command.as_ref());
// TODO: This should now be done via the `zed::make_file_executable` function in
// Zed extension API, but we're leaving these existing usages in place temporarily
// to avoid any compatibility issues between Zed and the extension versions.
//
// We can remove once the following extension versions no longer see any use:
// - toml@0.0.2
// - zig@0.0.1
if ["toml", "zig"].contains(&self.extension.manifest.id.as_ref()) {
#[cfg(not(windows))]
{
use std::fs::{self, Permissions};
use std::os::unix::fs::PermissionsExt;
fs::set_permissions(&path, Permissions::from_mode(0o755))
.context("failed to set file permissions")?;
}
}
Ok(LanguageServerBinary {
path,
arguments: command.args.into_iter().map(|arg| arg.into()).collect(),
@@ -112,25 +93,6 @@ impl LspAdapter for ExtensionLspAdapter {
None
}
fn language_ids(&self) -> HashMap<String, String> {
// TODO: The language IDs can be provided via the language server options
// in `extension.toml now but we're leaving these existing usages in place temporarily
// to avoid any compatibility issues between Zed and the extension versions.
//
// We can remove once the following extension versions no longer see any use:
// - php@0.0.1
if self.extension.manifest.id.as_ref() == "php" {
return HashMap::from_iter([("PHP".into(), "php".into())]);
}
self.extension
.manifest
.language_servers
.get(&LanguageServerName(self.config.name.clone().into()))
.map(|server| server.language_ids.clone())
.unwrap_or_default()
}
async fn initialization_options(
self: Arc<Self>,
delegate: &Arc<dyn LspAdapterDelegate>,

View File

@@ -1,18 +1,17 @@
use anyhow::{anyhow, Context, Result};
use collections::{BTreeMap, HashMap};
use collections::BTreeMap;
use fs::Fs;
use language::LanguageServerName;
use semantic_version::SemanticVersion;
use serde::{Deserialize, Serialize};
use std::{
ffi::OsStr,
fmt,
path::{Path, PathBuf},
sync::Arc,
};
use util::SemanticVersion;
/// This is the old version of the extension manifest, from when it was `extension.json`.
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
pub struct OldExtensionManifest {
pub name: String,
pub version: Arc<str>,
@@ -32,30 +31,12 @@ pub struct OldExtensionManifest {
pub grammars: BTreeMap<Arc<str>, PathBuf>,
}
/// The schema version of the [`ExtensionManifest`].
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)]
pub struct SchemaVersion(pub i32);
impl fmt::Display for SchemaVersion {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl SchemaVersion {
pub const ZERO: Self = Self(0);
pub fn is_v0(&self) -> bool {
self == &Self::ZERO
}
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
pub struct ExtensionManifest {
pub id: Arc<str>,
pub name: String,
pub version: Arc<str>,
pub schema_version: SchemaVersion,
pub schema_version: i32,
#[serde(default)]
pub description: Option<String>,
@@ -92,15 +73,11 @@ pub struct GrammarManifestEntry {
pub repository: String,
#[serde(alias = "commit")]
pub rev: String,
#[serde(default)]
pub path: Option<String>,
}
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
pub struct LanguageServerManifestEntry {
pub language: Arc<str>,
#[serde(default)]
pub language_ids: HashMap<String, String>,
}
impl ExtensionManifest {
@@ -145,7 +122,7 @@ fn manifest_from_old_manifest(
description: manifest_json.description,
repository: manifest_json.repository,
authors: manifest_json.authors,
schema_version: SchemaVersion::ZERO,
schema_version: 0,
lib: Default::default(),
themes: {
let mut themes = manifest_json.themes.into_values().collect::<Vec<_>>();

View File

@@ -1,39 +0,0 @@
use anyhow::Result;
use collections::HashMap;
use gpui::AppContext;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::Settings;
use std::sync::Arc;
#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)]
pub struct ExtensionSettings {
#[serde(default)]
pub auto_update_extensions: HashMap<Arc<str>, bool>,
}
impl ExtensionSettings {
pub fn should_auto_update(&self, extension_id: &str) -> bool {
self.auto_update_extensions
.get(extension_id)
.copied()
.unwrap_or(true)
}
}
impl Settings for ExtensionSettings {
const KEY: Option<&'static str> = None;
type FileContent = Self;
fn load(
_default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_cx: &mut AppContext,
) -> Result<Self>
where
Self: Sized,
{
Ok(user_values.get(0).copied().cloned().unwrap_or_default())
}
}

View File

@@ -1,19 +1,17 @@
pub mod extension_builder;
mod extension_lsp_adapter;
mod extension_manifest;
mod extension_settings;
mod wasm_host;
#[cfg(test)]
mod extension_store_test;
use crate::extension_manifest::SchemaVersion;
use crate::{extension_lsp_adapter::ExtensionLspAdapter, wasm_host::wit};
use anyhow::{anyhow, bail, Context as _, Result};
use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive;
use client::{telemetry::Telemetry, Client, ExtensionMetadata, GetExtensionsResponse};
use collections::{btree_map, BTreeMap, HashSet};
use collections::{hash_map, BTreeMap, HashMap, HashSet};
use extension_builder::{CompileExtensionOptions, ExtensionBuilder};
use fs::{Fs, RemoveOptions};
use futures::{
@@ -24,20 +22,13 @@ use futures::{
io::BufReader,
select_biased, AsyncReadExt as _, Future, FutureExt as _, StreamExt as _,
};
use gpui::{
actions, AppContext, AsyncAppContext, Context, EventEmitter, Global, Model, ModelContext, Task,
WeakModel,
};
use gpui::{actions, AppContext, Context, EventEmitter, Global, Model, ModelContext, Task};
use language::{
ContextProviderWithTasks, LanguageConfig, LanguageMatcher, LanguageQueries, LanguageRegistry,
QUERY_FILENAME_PREFIXES,
};
use node_runtime::NodeRuntime;
use semantic_version::SemanticVersion;
use serde::{Deserialize, Serialize};
use settings::Settings;
use std::ops::RangeInclusive;
use std::str::FromStr;
use std::{
cmp::Ordering,
path::{self, Path, PathBuf},
@@ -52,47 +43,16 @@ use util::{
paths::EXTENSIONS_DIR,
ResultExt,
};
use wasm_host::{
wit::{is_supported_wasm_api_version, wasm_api_version_range},
WasmExtension, WasmHost,
};
use wasm_host::{WasmExtension, WasmHost};
pub use extension_manifest::{
ExtensionLibraryKind, ExtensionManifest, GrammarManifestEntry, OldExtensionManifest,
};
pub use extension_settings::ExtensionSettings;
const RELOAD_DEBOUNCE_DURATION: Duration = Duration::from_millis(200);
const FS_WATCH_LATENCY: Duration = Duration::from_millis(100);
/// The current extension [`SchemaVersion`] supported by Zed.
const CURRENT_SCHEMA_VERSION: SchemaVersion = SchemaVersion(1);
/// Returns the [`SchemaVersion`] range that is compatible with this version of Zed.
pub fn schema_version_range() -> RangeInclusive<SchemaVersion> {
SchemaVersion::ZERO..=CURRENT_SCHEMA_VERSION
}
/// Returns whether the given extension version is compatible with this version of Zed.
pub fn is_version_compatible(extension_version: &ExtensionMetadata) -> bool {
let schema_version = extension_version.manifest.schema_version.unwrap_or(0);
if CURRENT_SCHEMA_VERSION.0 < schema_version {
return false;
}
if let Some(wasm_api_version) = extension_version
.manifest
.wasm_api_version
.as_ref()
.and_then(|wasm_api_version| SemanticVersion::from_str(wasm_api_version).ok())
{
if !is_supported_wasm_api_version(wasm_api_version) {
return false;
}
}
true
}
const CURRENT_SCHEMA_VERSION: i64 = 1;
pub struct ExtensionStore {
builder: Arc<ExtensionBuilder>,
@@ -103,7 +63,7 @@ pub struct ExtensionStore {
reload_tx: UnboundedSender<Option<Arc<str>>>,
reload_complete_senders: Vec<oneshot::Sender<()>>,
installed_dir: PathBuf,
outstanding_operations: BTreeMap<Arc<str>, ExtensionOperation>,
outstanding_operations: HashMap<Arc<str>, ExtensionOperation>,
index_path: PathBuf,
language_registry: Arc<LanguageRegistry>,
theme_registry: Arc<ThemeRegistry>,
@@ -113,8 +73,17 @@ pub struct ExtensionStore {
tasks: Vec<Task<()>>,
}
#[derive(Clone)]
pub enum ExtensionStatus {
NotInstalled,
Installing,
Upgrading,
Installed(Arc<str>),
Removing,
}
#[derive(Clone, Copy)]
pub enum ExtensionOperation {
enum ExtensionOperation {
Upgrade,
Install,
Remove,
@@ -143,8 +112,8 @@ pub struct ExtensionIndex {
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
pub struct ExtensionIndexEntry {
pub manifest: Arc<ExtensionManifest>,
pub dev: bool,
manifest: Arc<ExtensionManifest>,
dev: bool,
}
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Deserialize, Serialize)]
@@ -171,8 +140,6 @@ pub fn init(
theme_registry: Arc<ThemeRegistry>,
cx: &mut AppContext,
) {
ExtensionSettings::register(cx);
let store = cx.new_model(move |cx| {
ExtensionStore::new(
EXTENSIONS_DIR.clone(),
@@ -196,11 +163,6 @@ pub fn init(
}
impl ExtensionStore {
pub fn try_global(cx: &AppContext) -> Option<Model<Self>> {
cx.try_global::<GlobalExtensionStore>()
.map(|store| store.0.clone())
}
pub fn global(cx: &AppContext) -> Model<Self> {
cx.global::<GlobalExtensionStore>().0.clone()
}
@@ -281,20 +243,10 @@ impl ExtensionStore {
// Immediately load all of the extensions in the initial manifest. If the
// index needs to be rebuild, then enqueue
let load_initial_extensions = this.extensions_updated(extension_index, cx);
let mut reload_future = None;
if extension_index_needs_rebuild {
reload_future = Some(this.reload(None, cx));
let _ = this.reload(None, cx);
}
cx.spawn(|this, mut cx| async move {
if let Some(future) = reload_future {
future.await;
}
this.update(&mut cx, |this, cx| this.check_for_updates(cx))
.ok();
})
.detach();
// Perform all extension loading in a single task to ensure that we
// never attempt to simultaneously load/unload extensions from multiple
// parallel tasks.
@@ -384,12 +336,16 @@ impl ExtensionStore {
self.installed_dir.clone()
}
pub fn outstanding_operations(&self) -> &BTreeMap<Arc<str>, ExtensionOperation> {
&self.outstanding_operations
}
pub fn installed_extensions(&self) -> &BTreeMap<Arc<str>, ExtensionIndexEntry> {
&self.extension_index.extensions
pub fn extension_status(&self, extension_id: &str) -> ExtensionStatus {
match self.outstanding_operations.get(extension_id) {
Some(ExtensionOperation::Install) => ExtensionStatus::Installing,
Some(ExtensionOperation::Remove) => ExtensionStatus::Removing,
Some(ExtensionOperation::Upgrade) => ExtensionStatus::Upgrading,
None => match self.extension_index.extensions.get(extension_id) {
Some(extension) => ExtensionStatus::Installed(extension.manifest.version.clone()),
None => ExtensionStatus::NotInstalled,
},
}
}
pub fn dev_extensions(&self) -> impl Iterator<Item = &Arc<ExtensionManifest>> {
@@ -421,109 +377,7 @@ impl ExtensionStore {
query.push(("filter", search));
}
self.fetch_extensions_from_api("/extensions", &query, cx)
}
pub fn fetch_extensions_with_update_available(
&mut self,
cx: &mut ModelContext<Self>,
) -> Task<Result<Vec<ExtensionMetadata>>> {
let schema_versions = schema_version_range();
let wasm_api_versions = wasm_api_version_range();
let extension_settings = ExtensionSettings::get_global(cx);
let extension_ids = self
.extension_index
.extensions
.keys()
.map(|id| id.as_ref())
.filter(|id| extension_settings.should_auto_update(id))
.collect::<Vec<_>>()
.join(",");
let task = self.fetch_extensions_from_api(
"/extensions/updates",
&[
("min_schema_version", &schema_versions.start().to_string()),
("max_schema_version", &schema_versions.end().to_string()),
(
"min_wasm_api_version",
&wasm_api_versions.start().to_string(),
),
("max_wasm_api_version", &wasm_api_versions.end().to_string()),
("ids", &extension_ids),
],
cx,
);
cx.spawn(move |this, mut cx| async move {
let extensions = task.await?;
this.update(&mut cx, |this, _cx| {
extensions
.into_iter()
.filter(|extension| {
this.extension_index.extensions.get(&extension.id).map_or(
true,
|installed_extension| {
installed_extension.manifest.version != extension.manifest.version
},
)
})
.collect()
})
})
}
pub fn fetch_extension_versions(
&self,
extension_id: &str,
cx: &mut ModelContext<Self>,
) -> Task<Result<Vec<ExtensionMetadata>>> {
self.fetch_extensions_from_api(&format!("/extensions/{extension_id}"), &[], cx)
}
pub fn check_for_updates(&mut self, cx: &mut ModelContext<Self>) {
let task = self.fetch_extensions_with_update_available(cx);
cx.spawn(move |this, mut cx| async move {
Self::upgrade_extensions(this, task.await?, &mut cx).await
})
.detach();
}
async fn upgrade_extensions(
this: WeakModel<Self>,
extensions: Vec<ExtensionMetadata>,
cx: &mut AsyncAppContext,
) -> Result<()> {
for extension in extensions {
let task = this.update(cx, |this, cx| {
if let Some(installed_extension) =
this.extension_index.extensions.get(&extension.id)
{
let installed_version =
SemanticVersion::from_str(&installed_extension.manifest.version).ok()?;
let latest_version =
SemanticVersion::from_str(&extension.manifest.version).ok()?;
if installed_version >= latest_version {
return None;
}
}
Some(this.upgrade_extension(extension.id, extension.manifest.version, cx))
})?;
if let Some(task) = task {
task.await.log_err();
}
}
anyhow::Ok(())
}
fn fetch_extensions_from_api(
&self,
path: &str,
query: &[(&str, &str)],
cx: &mut ModelContext<'_, ExtensionStore>,
) -> Task<Result<Vec<ExtensionMetadata>>> {
let url = self.http_client.build_zed_api_url(path, &query);
let url = self.http_client.build_zed_api_url("/extensions", &query);
let http_client = self.http_client.clone();
cx.spawn(move |_, _| async move {
let mut response = http_client
@@ -557,7 +411,6 @@ impl ExtensionStore {
cx: &mut ModelContext<Self>,
) {
self.install_or_upgrade_extension(extension_id, version, ExtensionOperation::Install, cx)
.detach_and_log_err(cx);
}
fn install_or_upgrade_extension_at_endpoint(
@@ -566,16 +419,15 @@ impl ExtensionStore {
url: Url,
operation: ExtensionOperation,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
) {
let extension_dir = self.installed_dir.join(extension_id.as_ref());
let http_client = self.http_client.clone();
let fs = self.fs.clone();
match self.outstanding_operations.entry(extension_id.clone()) {
btree_map::Entry::Occupied(_) => return Task::ready(Ok(())),
btree_map::Entry::Vacant(e) => e.insert(operation),
hash_map::Entry::Occupied(_) => return,
hash_map::Entry::Vacant(e) => e.insert(operation),
};
cx.notify();
cx.spawn(move |this, mut cx| async move {
let _finish = util::defer({
@@ -625,6 +477,7 @@ impl ExtensionStore {
anyhow::Ok(())
})
.detach_and_log_err(cx);
}
pub fn install_latest_extension(
@@ -634,23 +487,9 @@ impl ExtensionStore {
) {
log::info!("installing extension {extension_id} latest version");
let schema_versions = schema_version_range();
let wasm_api_versions = wasm_api_version_range();
let Some(url) = self
.http_client
.build_zed_api_url(
&format!("/extensions/{extension_id}/download"),
&[
("min_schema_version", &schema_versions.start().to_string()),
("max_schema_version", &schema_versions.end().to_string()),
(
"min_wasm_api_version",
&wasm_api_versions.start().to_string(),
),
("max_wasm_api_version", &wasm_api_versions.end().to_string()),
],
)
.build_zed_api_url(&format!("/extensions/{extension_id}/download"), &[])
.log_err()
else {
return;
@@ -661,8 +500,7 @@ impl ExtensionStore {
url,
ExtensionOperation::Install,
cx,
)
.detach_and_log_err(cx);
);
}
pub fn upgrade_extension(
@@ -670,7 +508,7 @@ impl ExtensionStore {
extension_id: Arc<str>,
version: Arc<str>,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
) {
self.install_or_upgrade_extension(extension_id, version, ExtensionOperation::Upgrade, cx)
}
@@ -680,7 +518,7 @@ impl ExtensionStore {
version: Arc<str>,
operation: ExtensionOperation,
cx: &mut ModelContext<Self>,
) -> Task<Result<()>> {
) {
log::info!("installing extension {extension_id} {version}");
let Some(url) = self
.http_client
@@ -690,10 +528,10 @@ impl ExtensionStore {
)
.log_err()
else {
return Task::ready(Ok(()));
return;
};
self.install_or_upgrade_extension_at_endpoint(extension_id, url, operation, cx)
self.install_or_upgrade_extension_at_endpoint(extension_id, url, operation, cx);
}
pub fn uninstall_extension(&mut self, extension_id: Arc<str>, cx: &mut ModelContext<Self>) {
@@ -701,8 +539,8 @@ impl ExtensionStore {
let fs = self.fs.clone();
match self.outstanding_operations.entry(extension_id.clone()) {
btree_map::Entry::Occupied(_) => return,
btree_map::Entry::Vacant(e) => e.insert(ExtensionOperation::Remove),
hash_map::Entry::Occupied(_) => return,
hash_map::Entry::Vacant(e) => e.insert(ExtensionOperation::Remove),
};
cx.spawn(move |this, mut cx| async move {
@@ -751,8 +589,8 @@ impl ExtensionStore {
if !this.update(&mut cx, |this, cx| {
match this.outstanding_operations.entry(extension_id.clone()) {
btree_map::Entry::Occupied(_) => return false,
btree_map::Entry::Vacant(e) => e.insert(ExtensionOperation::Remove),
hash_map::Entry::Occupied(_) => return false,
hash_map::Entry::Vacant(e) => e.insert(ExtensionOperation::Remove),
};
cx.notify();
true
@@ -819,8 +657,8 @@ impl ExtensionStore {
let fs = self.fs.clone();
match self.outstanding_operations.entry(extension_id.clone()) {
btree_map::Entry::Occupied(_) => return,
btree_map::Entry::Vacant(e) => e.insert(ExtensionOperation::Upgrade),
hash_map::Entry::Occupied(_) => return,
hash_map::Entry::Vacant(e) => e.insert(ExtensionOperation::Upgrade),
};
cx.notify();
@@ -1141,14 +979,6 @@ impl ExtensionStore {
let Ok(extension_dir) = extension_dir else {
continue;
};
if extension_dir
.file_name()
.map_or(false, |file_name| file_name == ".DS_Store")
{
continue;
}
Self::add_extension_to_index(fs.clone(), extension_dir, &mut index)
.await
.log_err();

View File

@@ -1,5 +1,3 @@
use crate::extension_manifest::SchemaVersion;
use crate::extension_settings::ExtensionSettings;
use crate::{
Event, ExtensionIndex, ExtensionIndexEntry, ExtensionIndexLanguageEntry,
ExtensionIndexThemeEntry, ExtensionManifest, ExtensionStore, GrammarManifestEntry,
@@ -15,7 +13,7 @@ use node_runtime::FakeNodeRuntime;
use parking_lot::Mutex;
use project::Project;
use serde_json::json;
use settings::{Settings as _, SettingsStore};
use settings::SettingsStore;
use std::{
ffi::OsString,
path::{Path, PathBuf},
@@ -37,7 +35,11 @@ fn init_logger() {
#[gpui::test]
async fn test_extension_store(cx: &mut TestAppContext) {
init_test(cx);
cx.update(|cx| {
let store = SettingsStore::test(cx);
cx.set_global(store);
theme::init(theme::LoadThemes::JustBase, cx);
});
let fs = FakeFs::new(cx.executor());
let http_client = FakeHttpClient::with_200_response();
@@ -144,7 +146,7 @@ async fn test_extension_store(cx: &mut TestAppContext) {
id: "zed-ruby".into(),
name: "Zed Ruby".into(),
version: "1.0.0".into(),
schema_version: SchemaVersion::ZERO,
schema_version: 0,
description: None,
authors: Vec::new(),
repository: None,
@@ -169,7 +171,7 @@ async fn test_extension_store(cx: &mut TestAppContext) {
id: "zed-monokai".into(),
name: "Zed Monokai".into(),
version: "2.0.0".into(),
schema_version: SchemaVersion::ZERO,
schema_version: 0,
description: None,
authors: vec![],
repository: None,
@@ -326,7 +328,7 @@ async fn test_extension_store(cx: &mut TestAppContext) {
id: "zed-gruvbox".into(),
name: "Zed Gruvbox".into(),
version: "1.0.0".into(),
schema_version: SchemaVersion::ZERO,
schema_version: 0,
description: None,
authors: vec![],
repository: None,
@@ -446,7 +448,7 @@ async fn test_extension_store_with_gleam_extension(cx: &mut TestAppContext) {
let cache_dir = root_dir.join("target");
let gleam_extension_dir = root_dir.join("extensions").join("gleam");
let fs = Arc::new(RealFs::default());
let fs = Arc::new(RealFs);
let extensions_dir = temp_tree(json!({
"installed": {},
"work": {}
@@ -483,6 +485,7 @@ async fn test_extension_store_with_gleam_extension(cx: &mut TestAppContext) {
move |request| {
let language_server_version = language_server_version.clone();
async move {
language_server_version.lock().http_request_count += 1;
let version = language_server_version.lock().version.clone();
let binary_contents = language_server_version.lock().binary_contents.clone();
@@ -492,7 +495,6 @@ async fn test_extension_store_with_gleam_extension(cx: &mut TestAppContext) {
let uri = request.uri().to_string();
if uri == github_releases_uri {
language_server_version.lock().http_request_count += 1;
Ok(Response::new(
json!([
{
@@ -512,7 +514,6 @@ async fn test_extension_store_with_gleam_extension(cx: &mut TestAppContext) {
.into(),
))
} else if uri == asset_download_uri {
language_server_version.lock().http_request_count += 1;
let mut bytes = Vec::<u8>::new();
let mut archive = async_tar::Builder::new(&mut bytes);
let mut header = async_tar::Header::new_gnu();
@@ -671,7 +672,6 @@ fn init_test(cx: &mut TestAppContext) {
cx.set_global(store);
theme::init(theme::LoadThemes::JustBase, cx);
Project::init_settings(cx);
ExtensionSettings::register(cx);
language::init(cx);
});
}

View File

@@ -14,12 +14,11 @@ use futures::{
use gpui::BackgroundExecutor;
use language::LanguageRegistry;
use node_runtime::NodeRuntime;
use semantic_version::SemanticVersion;
use std::{
path::{Path, PathBuf},
sync::{Arc, OnceLock},
};
use util::http::HttpClient;
use util::{http::HttpClient, SemanticVersion};
use wasmtime::{
component::{Component, ResourceTable},
Engine, Store,
@@ -204,11 +203,11 @@ pub fn parse_wasm_extension_version(
fn parse_wasm_extension_version_custom_section(data: &[u8]) -> Option<SemanticVersion> {
if data.len() == 6 {
Some(SemanticVersion::new(
u16::from_be_bytes([data[0], data[1]]) as _,
u16::from_be_bytes([data[2], data[3]]) as _,
u16::from_be_bytes([data[4], data[5]]) as _,
))
Some(SemanticVersion {
major: u16::from_be_bytes([data[0], data[1]]) as _,
minor: u16::from_be_bytes([data[2], data[3]]) as _,
patch: u16::from_be_bytes([data[4], data[5]]) as _,
})
} else {
None
}

View File

@@ -1,18 +1,17 @@
mod since_v0_0_1;
mod since_v0_0_4;
mod v0_0_1;
mod v0_0_4;
use super::{wasm_engine, WasmState};
use anyhow::{Context, Result};
use language::LspAdapterDelegate;
use semantic_version::SemanticVersion;
use std::ops::RangeInclusive;
use std::sync::Arc;
use util::SemanticVersion;
use wasmtime::{
component::{Component, Instance, Linker, Resource},
Store,
};
use since_v0_0_4 as latest;
use v0_0_4 as latest;
pub use latest::{Command, LanguageServerConfig};
@@ -29,20 +28,9 @@ fn wasi_view(state: &mut WasmState) -> &mut WasmState {
state
}
/// Returns whether the given Wasm API version is supported by the Wasm host.
pub fn is_supported_wasm_api_version(version: SemanticVersion) -> bool {
wasm_api_version_range().contains(&version)
}
/// Returns the Wasm API version range that is supported by the Wasm host.
#[inline(always)]
pub fn wasm_api_version_range() -> RangeInclusive<SemanticVersion> {
since_v0_0_1::MIN_VERSION..=latest::MAX_VERSION
}
pub enum Extension {
V004(since_v0_0_4::Extension),
V001(since_v0_0_1::Extension),
V004(v0_0_4::Extension),
V001(v0_0_1::Extension),
}
impl Extension {
@@ -51,23 +39,17 @@ impl Extension {
version: SemanticVersion,
component: &Component,
) -> Result<(Self, Instance)> {
if version < latest::MIN_VERSION {
let (extension, instance) = since_v0_0_1::Extension::instantiate_async(
store,
&component,
since_v0_0_1::linker(),
)
.await
.context("failed to instantiate wasm extension")?;
if version < latest::VERSION {
let (extension, instance) =
v0_0_1::Extension::instantiate_async(store, &component, v0_0_1::linker())
.await
.context("failed to instantiate wasm extension")?;
Ok((Self::V001(extension), instance))
} else {
let (extension, instance) = since_v0_0_4::Extension::instantiate_async(
store,
&component,
since_v0_0_4::linker(),
)
.await
.context("failed to instantiate wasm extension")?;
let (extension, instance) =
v0_0_4::Extension::instantiate_async(store, &component, v0_0_4::linker())
.await
.context("failed to instantiate wasm extension")?;
Ok((Self::V004(extension), instance))
}
}
@@ -119,13 +101,3 @@ impl Extension {
}
}
}
trait ToWasmtimeResult<T> {
fn to_wasmtime_result(self) -> wasmtime::Result<Result<T, String>>;
}
impl<T> ToWasmtimeResult<T> for Result<T> {
fn to_wasmtime_result(self) -> wasmtime::Result<Result<T, String>> {
Ok(self.map_err(|error| error.to_string()))
}
}

View File

@@ -3,15 +3,12 @@ use crate::wasm_host::WasmState;
use anyhow::Result;
use async_trait::async_trait;
use language::{LanguageServerBinaryStatus, LspAdapterDelegate};
use semantic_version::SemanticVersion;
use std::sync::{Arc, OnceLock};
use wasmtime::component::{Linker, Resource};
pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 0, 1);
wasmtime::component::bindgen!({
async: true,
path: "../extension_api/wit/since_v0.0.1",
path: "../extension_api/wit/0.0.1",
with: {
"worktree": ExtensionWorktree,
},

View File

@@ -1,4 +1,3 @@
use crate::wasm_host::wit::ToWasmtimeResult;
use crate::wasm_host::WasmState;
use anyhow::{anyhow, Result};
use async_compression::futures::bufread::GzipDecoder;
@@ -6,22 +5,23 @@ use async_tar::Archive;
use async_trait::async_trait;
use futures::io::BufReader;
use language::{LanguageServerBinaryStatus, LspAdapterDelegate};
use semantic_version::SemanticVersion;
use std::path::Path;
use std::{
env,
path::PathBuf,
sync::{Arc, OnceLock},
};
use util::maybe;
use util::{maybe, SemanticVersion};
use wasmtime::component::{Linker, Resource};
pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 0, 4);
pub const MAX_VERSION: SemanticVersion = SemanticVersion::new(0, 0, 5);
pub const VERSION: SemanticVersion = SemanticVersion {
major: 0,
minor: 0,
patch: 4,
};
wasmtime::component::bindgen!({
async: true,
path: "../extension_api/wit/since_v0.0.4",
path: "../extension_api/wit/0.0.4",
with: {
"worktree": ExtensionWorktree,
},
@@ -77,34 +77,37 @@ impl HostWorktree for WasmState {
#[async_trait]
impl ExtensionImports for WasmState {
async fn node_binary_path(&mut self) -> wasmtime::Result<Result<String, String>> {
self.host
.node_runtime
.binary_path()
.await
.map(|path| path.to_string_lossy().to_string())
.to_wasmtime_result()
convert_result(
self.host
.node_runtime
.binary_path()
.await
.map(|path| path.to_string_lossy().to_string()),
)
}
async fn npm_package_latest_version(
&mut self,
package_name: String,
) -> wasmtime::Result<Result<String, String>> {
self.host
.node_runtime
.npm_package_latest_version(&package_name)
.await
.to_wasmtime_result()
convert_result(
self.host
.node_runtime
.npm_package_latest_version(&package_name)
.await,
)
}
async fn npm_package_installed_version(
&mut self,
package_name: String,
) -> wasmtime::Result<Result<Option<String>, String>> {
self.host
.node_runtime
.npm_package_installed_version(&self.work_dir(), &package_name)
.await
.to_wasmtime_result()
convert_result(
self.host
.node_runtime
.npm_package_installed_version(&self.work_dir(), &package_name)
.await,
)
}
async fn npm_install_package(
@@ -112,11 +115,12 @@ impl ExtensionImports for WasmState {
package_name: String,
version: String,
) -> wasmtime::Result<Result<(), String>> {
self.host
.node_runtime
.npm_install_packages(&self.work_dir(), &[(&package_name, &version)])
.await
.to_wasmtime_result()
convert_result(
self.host
.node_runtime
.npm_install_packages(&self.work_dir(), &[(&package_name, &version)])
.await,
)
}
async fn latest_github_release(
@@ -124,28 +128,29 @@ impl ExtensionImports for WasmState {
repo: String,
options: GithubReleaseOptions,
) -> wasmtime::Result<Result<GithubRelease, String>> {
maybe!(async {
let release = util::github::latest_github_release(
&repo,
options.require_assets,
options.pre_release,
self.host.http_client.clone(),
)
.await?;
Ok(GithubRelease {
version: release.tag_name,
assets: release
.assets
.into_iter()
.map(|asset| GithubReleaseAsset {
name: asset.name,
download_url: asset.browser_download_url,
})
.collect(),
convert_result(
maybe!(async {
let release = util::github::latest_github_release(
&repo,
options.require_assets,
options.pre_release,
self.host.http_client.clone(),
)
.await?;
Ok(GithubRelease {
version: release.tag_name,
assets: release
.assets
.into_iter()
.map(|asset| GithubReleaseAsset {
name: asset.name,
download_url: asset.browser_download_url,
})
.collect(),
})
})
})
.await
.to_wasmtime_result()
.await,
)
}
async fn current_platform(&mut self) -> Result<(Os, Architecture)> {
@@ -195,7 +200,7 @@ impl ExtensionImports for WasmState {
path: String,
file_type: DownloadedFileType,
) -> wasmtime::Result<Result<(), String>> {
maybe!(async {
let result = maybe!(async {
let path = PathBuf::from(path);
let extension_work_dir = self.host.work_dir.join(self.manifest.id.as_ref());
@@ -258,8 +263,6 @@ impl ExtensionImports for WasmState {
let unzip_status = std::process::Command::new("unzip")
.current_dir(&extension_work_dir)
.arg("-d")
.arg(&destination_path)
.arg(&zip_path)
.output()?
.status;
@@ -271,27 +274,11 @@ impl ExtensionImports for WasmState {
Ok(())
})
.await
.to_wasmtime_result()
}
async fn make_file_executable(&mut self, path: String) -> wasmtime::Result<Result<(), String>> {
#[allow(unused)]
let path = self
.host
.writeable_path_from_extension(&self.manifest.id, Path::new(&path))?;
#[cfg(unix)]
{
use std::fs::{self, Permissions};
use std::os::unix::fs::PermissionsExt;
return fs::set_permissions(&path, Permissions::from_mode(0o755))
.map_err(|error| anyhow!("failed to set permissions for path {path:?}: {error}"))
.to_wasmtime_result();
}
#[cfg(not(unix))]
Ok(Ok(()))
.await;
convert_result(result)
}
}
fn convert_result<T>(result: Result<T>) -> wasmtime::Result<Result<T, String>> {
Ok(result.map_err(|error| error.to_string()))
}

View File

@@ -1,6 +1,6 @@
[package]
name = "zed_extension_api"
version = "0.0.5"
version = "0.0.4"
description = "APIs for creating Zed extensions in Rust"
repository = "https://github.com/zed-industries/zed"
documentation = "https://docs.rs/zed_extension_api"

View File

@@ -52,5 +52,5 @@ zed::register_extension!(MyExtension);
To run your extension in Zed as you're developing it:
- Open the extensions view using the `zed: extensions` action in the command palette.
- Click the `Install Dev Extension` button in the top right
- Click the `Add Dev Extension` button in the top right
- Choose the path to your extension directory.

View File

@@ -53,7 +53,7 @@ pub static ZED_API_VERSION: [u8; 6] = *include_bytes!(concat!(env!("OUT_DIR"), "
mod wit {
wit_bindgen::generate!({
skip: ["init-extension"],
path: "./wit/since_v0.0.4",
path: "./wit/0.0.4",
});
}

View File

@@ -62,12 +62,9 @@ world extension {
/// Gets the latest release for the given GitHub repository.
import latest-github-release: func(repo: string, options: github-release-options) -> result<github-release, string>;
/// Downloads a file from the given url, and saves it to the given path within the extension's
/// Downloads a file from the given url, and saves it to the given filename within the extension's
/// working directory. Extracts the file according to the given file type.
import download-file: func(url: string, file-path: string, file-type: downloaded-file-type) -> result<_, string>;
/// Makes the file at the given path executable.
import make-file-executable: func(filepath: string) -> result<_, string>;
import download-file: func(url: string, output-filename: string, file-type: downloaded-file-type) -> result<_, string>;
/// Updates the installation status for the given language server.
import set-language-server-installation-status: func(language-server-name: string, status: language-server-installation-status);

View File

@@ -36,7 +36,7 @@ async fn main() -> Result<()> {
env_logger::init();
let args = Args::parse();
let fs = Arc::new(RealFs::default());
let fs = Arc::new(RealFs);
let engine = wasmtime::Engine::default();
let mut wasm_store = WasmStore::new(engine)?;
@@ -95,7 +95,7 @@ async fn main() -> Result<()> {
version: manifest.version,
description: manifest.description,
authors: manifest.authors,
schema_version: Some(manifest.schema_version.0),
schema_version: Some(manifest.schema_version),
repository: manifest
.repository
.ok_or_else(|| anyhow!("missing repository in extension manifest"))?,

View File

@@ -20,13 +20,10 @@ client.workspace = true
db.workspace = true
editor.workspace = true
extension.workspace = true
fs.workspace = true
fuzzy.workspace = true
gpui.workspace = true
language.workspace = true
picker.workspace = true
project.workspace = true
semantic_version.workspace = true
serde.workspace = true
settings.workspace = true
smallvec.workspace = true

View File

@@ -1,8 +1,10 @@
use std::collections::HashMap;
use std::path::Path;
use std::sync::{Arc, OnceLock};
use std::{
collections::HashMap,
sync::{Arc, OnceLock},
};
use db::kvp::KEY_VALUE_STORE;
use editor::Editor;
use extension::ExtensionStore;
use gpui::{Entity, Model, VisualContext};
@@ -10,99 +12,49 @@ use language::Buffer;
use ui::ViewContext;
use workspace::{notifications::simple_message_notification, Workspace};
fn suggested_extensions() -> &'static HashMap<&'static str, Arc<str>> {
pub fn suggested_extension(file_extension_or_name: &str) -> Option<Arc<str>> {
static SUGGESTED: OnceLock<HashMap<&str, Arc<str>>> = OnceLock::new();
SUGGESTED.get_or_init(|| {
[
("astro", "astro"),
("beancount", "beancount"),
("clojure", "bb"),
("clojure", "clj"),
("clojure", "cljc"),
("clojure", "cljs"),
("clojure", "edn"),
("csharp", "cs"),
("dockerfile", "Dockerfile"),
("elisp", "el"),
("erlang", "erl"),
("erlang", "hrl"),
("fish", "fish"),
("git-firefly", ".gitconfig"),
("git-firefly", ".gitignore"),
("git-firefly", "COMMIT_EDITMSG"),
("git-firefly", "EDIT_DESCRIPTION"),
("git-firefly", "MERGE_MSG"),
("git-firefly", "NOTES_EDITMSG"),
("git-firefly", "TAG_EDITMSG"),
("git-firefly", "git-rebase-todo"),
("gleam", "gleam"),
("graphql", "gql"),
("graphql", "graphql"),
("haskell", "hs"),
("java", "java"),
("kotlin", "kt"),
("latex", "tex"),
("make", "Makefile"),
("nix", "nix"),
("php", "php"),
("prisma", "prisma"),
("purescript", "purs"),
("r", "r"),
("r", "R"),
("sql", "sql"),
("svelte", "svelte"),
("swift", "swift"),
("templ", "templ"),
("toml", "Cargo.lock"),
("toml", "toml"),
("wgsl", "wgsl"),
("zig", "zig"),
]
.into_iter()
.map(|(name, file)| (file, name.into()))
.collect()
})
}
#[derive(Debug, PartialEq, Eq, Clone)]
struct SuggestedExtension {
pub extension_id: Arc<str>,
pub file_name_or_extension: Arc<str>,
}
/// Returns the suggested extension for the given [`Path`].
fn suggested_extension(path: impl AsRef<Path>) -> Option<SuggestedExtension> {
let path = path.as_ref();
let file_extension: Option<Arc<str>> = path
.extension()
.and_then(|extension| Some(extension.to_str()?.into()));
let file_name: Option<Arc<str>> = path
.file_name()
.and_then(|file_name| Some(file_name.to_str()?.into()));
let (file_name_or_extension, extension_id) = None
// We suggest against file names first, as these suggestions will be more
// specific than ones based on the file extension.
.or_else(|| {
file_name.clone().zip(
file_name
.as_deref()
.and_then(|file_name| suggested_extensions().get(file_name)),
)
SUGGESTED
.get_or_init(|| {
[
("astro", "astro"),
("beancount", "beancount"),
("dockerfile", "Dockerfile"),
("elisp", "el"),
("fish", "fish"),
("git-firefly", ".gitconfig"),
("git-firefly", ".gitignore"),
("git-firefly", "COMMIT_EDITMSG"),
("git-firefly", "EDIT_DESCRIPTION"),
("git-firefly", "git-rebase-todo"),
("git-firefly", "MERGE_MSG"),
("git-firefly", "NOTES_EDITMSG"),
("git-firefly", "TAG_EDITMSG"),
("gleam", "gleam"),
("graphql", "gql"),
("graphql", "graphql"),
("haskell", "hs"),
("java", "java"),
("kotlin", "kt"),
("latex", "tex"),
("make", "Makefile"),
("nix", "nix"),
("prisma", "prisma"),
("purescript", "purs"),
("r", "r"),
("r", "R"),
("sql", "sql"),
("svelte", "svelte"),
("swift", "swift"),
("templ", "templ"),
("wgsl", "wgsl"),
]
.into_iter()
.map(|(name, file)| (file, name.into()))
.collect::<HashMap<&str, Arc<str>>>()
})
.or_else(|| {
file_extension.clone().zip(
file_extension
.as_deref()
.and_then(|file_extension| suggested_extensions().get(file_extension)),
)
})?;
Some(SuggestedExtension {
extension_id: extension_id.clone(),
file_name_or_extension,
})
.get(file_extension_or_name)
.map(|str| str.clone())
}
fn language_extension_key(extension_id: &str) -> String {
@@ -110,22 +62,25 @@ fn language_extension_key(extension_id: &str) -> String {
}
pub(crate) fn suggest(buffer: Model<Buffer>, cx: &mut ViewContext<Workspace>) {
let Some(file) = buffer.read(cx).file().cloned() else {
let Some(file_name_or_extension) = buffer.read(cx).file().and_then(|file| {
Some(match file.path().extension() {
Some(extension) => extension.to_str()?.to_string(),
None => file.path().to_str()?.to_string(),
})
}) else {
return;
};
let Some(SuggestedExtension {
extension_id,
file_name_or_extension,
}) = suggested_extension(file.path())
else {
let Some(extension_id) = suggested_extension(&file_name_or_extension) else {
return;
};
let key = language_extension_key(&extension_id);
let Ok(None) = KEY_VALUE_STORE.read_kvp(&key) else {
let value = KEY_VALUE_STORE.read_kvp(&key);
if value.is_err() || value.unwrap().is_some() {
return;
};
}
cx.on_next_frame(move |workspace, cx| {
let Some(editor) = workspace.active_item_as::<Editor>(cx) else {
@@ -139,8 +94,8 @@ pub(crate) fn suggest(buffer: Model<Buffer>, cx: &mut ViewContext<Workspace>) {
workspace.show_notification(buffer.entity_id().as_u64() as usize, cx, |cx| {
cx.new_view(move |_cx| {
simple_message_notification::MessageNotification::new(format!(
"Do you want to install the recommended '{}' extension for '{}' files?",
extension_id, file_name_or_extension
"Do you want to install the recommended '{}' extension?",
file_name_or_extension
))
.with_click_message("Yes")
.on_click({
@@ -164,47 +119,3 @@ pub(crate) fn suggest(buffer: Model<Buffer>, cx: &mut ViewContext<Workspace>) {
});
})
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
pub fn test_suggested_extension() {
assert_eq!(
suggested_extension("Cargo.toml"),
Some(SuggestedExtension {
extension_id: "toml".into(),
file_name_or_extension: "toml".into()
})
);
assert_eq!(
suggested_extension("Cargo.lock"),
Some(SuggestedExtension {
extension_id: "toml".into(),
file_name_or_extension: "Cargo.lock".into()
})
);
assert_eq!(
suggested_extension("Dockerfile"),
Some(SuggestedExtension {
extension_id: "dockerfile".into(),
file_name_or_extension: "Dockerfile".into()
})
);
assert_eq!(
suggested_extension("a/b/c/d/.gitignore"),
Some(SuggestedExtension {
extension_id: "git-firefly".into(),
file_name_or_extension: ".gitignore".into()
})
);
assert_eq!(
suggested_extension("a/b/c/d/test.gleam"),
Some(SuggestedExtension {
extension_id: "gleam".into(),
file_name_or_extension: "gleam".into()
})
);
}
}

View File

@@ -1,238 +0,0 @@
use std::str::FromStr;
use std::sync::Arc;
use client::ExtensionMetadata;
use extension::{ExtensionSettings, ExtensionStore};
use fs::Fs;
use fuzzy::{match_strings, StringMatch, StringMatchCandidate};
use gpui::{
prelude::*, AppContext, DismissEvent, EventEmitter, FocusableView, Task, View, WeakView,
};
use picker::{Picker, PickerDelegate};
use semantic_version::SemanticVersion;
use settings::update_settings_file;
use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing};
use util::ResultExt;
use workspace::ModalView;
pub struct ExtensionVersionSelector {
picker: View<Picker<ExtensionVersionSelectorDelegate>>,
}
impl ModalView for ExtensionVersionSelector {}
impl EventEmitter<DismissEvent> for ExtensionVersionSelector {}
impl FocusableView for ExtensionVersionSelector {
fn focus_handle(&self, cx: &AppContext) -> gpui::FocusHandle {
self.picker.focus_handle(cx)
}
}
impl Render for ExtensionVersionSelector {
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
v_flex().w(rems(34.)).child(self.picker.clone())
}
}
impl ExtensionVersionSelector {
pub fn new(delegate: ExtensionVersionSelectorDelegate, cx: &mut ViewContext<Self>) -> Self {
let picker = cx.new_view(|cx| Picker::uniform_list(delegate, cx));
Self { picker }
}
}
pub struct ExtensionVersionSelectorDelegate {
fs: Arc<dyn Fs>,
view: WeakView<ExtensionVersionSelector>,
extension_versions: Vec<ExtensionMetadata>,
selected_index: usize,
matches: Vec<StringMatch>,
}
impl ExtensionVersionSelectorDelegate {
pub fn new(
fs: Arc<dyn Fs>,
weak_view: WeakView<ExtensionVersionSelector>,
mut extension_versions: Vec<ExtensionMetadata>,
) -> Self {
extension_versions.sort_unstable_by(|a, b| {
let a_version = SemanticVersion::from_str(&a.manifest.version);
let b_version = SemanticVersion::from_str(&b.manifest.version);
match (a_version, b_version) {
(Ok(a_version), Ok(b_version)) => b_version.cmp(&a_version),
_ => b.published_at.cmp(&a.published_at),
}
});
let matches = extension_versions
.iter()
.map(|extension| StringMatch {
candidate_id: 0,
score: 0.0,
positions: Default::default(),
string: format!("v{}", extension.manifest.version),
})
.collect();
Self {
fs,
view: weak_view,
extension_versions,
selected_index: 0,
matches,
}
}
}
impl PickerDelegate for ExtensionVersionSelectorDelegate {
type ListItem = ui::ListItem;
fn placeholder_text(&self, _cx: &mut WindowContext) -> Arc<str> {
"Select extension version...".into()
}
fn match_count(&self) -> usize {
self.matches.len()
}
fn selected_index(&self) -> usize {
self.selected_index
}
fn set_selected_index(&mut self, ix: usize, _cx: &mut ViewContext<Picker<Self>>) {
self.selected_index = ix;
}
fn update_matches(&mut self, query: String, cx: &mut ViewContext<Picker<Self>>) -> Task<()> {
let background_executor = cx.background_executor().clone();
let candidates = self
.extension_versions
.iter()
.enumerate()
.map(|(id, extension)| {
let text = format!("v{}", extension.manifest.version);
StringMatchCandidate {
id,
char_bag: text.as_str().into(),
string: text,
}
})
.collect::<Vec<_>>();
cx.spawn(move |this, mut cx| async move {
let matches = if query.is_empty() {
candidates
.into_iter()
.enumerate()
.map(|(index, candidate)| StringMatch {
candidate_id: index,
string: candidate.string,
positions: Vec::new(),
score: 0.0,
})
.collect()
} else {
match_strings(
&candidates,
&query,
false,
100,
&Default::default(),
background_executor,
)
.await
};
this.update(&mut cx, |this, _cx| {
this.delegate.matches = matches;
this.delegate.selected_index = this
.delegate
.selected_index
.min(this.delegate.matches.len().saturating_sub(1));
})
.log_err();
})
}
fn confirm(&mut self, _secondary: bool, cx: &mut ViewContext<Picker<Self>>) {
if self.matches.is_empty() {
self.dismissed(cx);
return;
}
let candidate_id = self.matches[self.selected_index].candidate_id;
let extension_version = &self.extension_versions[candidate_id];
if !extension::is_version_compatible(extension_version) {
return;
}
let extension_store = ExtensionStore::global(cx);
extension_store.update(cx, |store, cx| {
let extension_id = extension_version.id.clone();
let version = extension_version.manifest.version.clone();
update_settings_file::<ExtensionSettings>(self.fs.clone(), cx, {
let extension_id = extension_id.clone();
move |settings| {
settings.auto_update_extensions.insert(extension_id, false);
}
});
store.install_extension(extension_id, version, cx);
});
}
fn dismissed(&mut self, cx: &mut ViewContext<Picker<Self>>) {
self.view
.update(cx, |_, cx| cx.emit(DismissEvent))
.log_err();
}
fn render_match(
&self,
ix: usize,
selected: bool,
_cx: &mut ViewContext<Picker<Self>>,
) -> Option<Self::ListItem> {
let version_match = &self.matches[ix];
let extension_version = &self.extension_versions[version_match.candidate_id];
let is_version_compatible = extension::is_version_compatible(extension_version);
let disabled = !is_version_compatible;
Some(
ListItem::new(ix)
.inset(true)
.spacing(ListItemSpacing::Sparse)
.selected(selected)
.disabled(disabled)
.child(
HighlightedLabel::new(
version_match.string.clone(),
version_match.positions.clone(),
)
.when(disabled, |label| label.color(Color::Muted)),
)
.end_slot(
h_flex()
.gap_2()
.when(!is_version_compatible, |this| {
this.child(Label::new("Incompatible").color(Color::Muted))
})
.child(
Label::new(
extension_version
.published_at
.format("%Y-%m-%d")
.to_string(),
)
.when(disabled, |label| label.color(Color::Muted)),
),
),
)
}
}

View File

@@ -1,15 +1,11 @@
mod components;
mod extension_suggest;
mod extension_version_selector;
use crate::components::ExtensionCard;
use crate::extension_version_selector::{
ExtensionVersionSelector, ExtensionVersionSelectorDelegate,
};
use client::telemetry::Telemetry;
use client::ExtensionMetadata;
use editor::{Editor, EditorElement, EditorStyle};
use extension::{ExtensionManifest, ExtensionOperation, ExtensionStore};
use extension::{ExtensionManifest, ExtensionStatus, ExtensionStore};
use fuzzy::{match_strings, StringMatchCandidate};
use gpui::{
actions, canvas, uniform_list, AnyElement, AppContext, EventEmitter, FocusableView, FontStyle,
@@ -21,7 +17,7 @@ use std::ops::DerefMut;
use std::time::Duration;
use std::{ops::Range, sync::Arc};
use theme::ThemeSettings;
use ui::{popover_menu, prelude::*, ContextMenu, ToggleButton, Tooltip};
use ui::{prelude::*, ToggleButton, Tooltip};
use util::ResultExt as _;
use workspace::{
item::{Item, ItemEvent},
@@ -81,15 +77,6 @@ pub fn init(cx: &mut AppContext) {
.detach();
}
#[derive(Clone)]
pub enum ExtensionStatus {
NotInstalled,
Installing,
Upgrading,
Installed(Arc<str>),
Removing,
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy)]
enum ExtensionFilter {
All,
@@ -107,7 +94,6 @@ impl ExtensionFilter {
}
pub struct ExtensionsPage {
workspace: WeakView<Workspace>,
list: UniformListScrollHandle,
telemetry: Arc<Telemetry>,
is_fetching_extensions: bool,
@@ -145,7 +131,6 @@ impl ExtensionsPage {
cx.subscribe(&query_editor, Self::on_query_change).detach();
let mut this = Self {
workspace: workspace.weak_handle(),
list: UniformListScrollHandle::new(),
telemetry: workspace.client().telemetry().clone(),
is_fetching_extensions: false,
@@ -189,21 +174,9 @@ impl ExtensionsPage {
}
}
fn extension_status(extension_id: &str, cx: &mut ViewContext<Self>) -> ExtensionStatus {
fn filter_extension_entries(&mut self, cx: &mut ViewContext<Self>) {
let extension_store = ExtensionStore::global(cx).read(cx);
match extension_store.outstanding_operations().get(extension_id) {
Some(ExtensionOperation::Install) => ExtensionStatus::Installing,
Some(ExtensionOperation::Remove) => ExtensionStatus::Removing,
Some(ExtensionOperation::Upgrade) => ExtensionStatus::Upgrading,
None => match extension_store.installed_extensions().get(extension_id) {
Some(extension) => ExtensionStatus::Installed(extension.manifest.version.clone()),
None => ExtensionStatus::NotInstalled,
},
}
}
fn filter_extension_entries(&mut self, cx: &mut ViewContext<Self>) {
self.filtered_remote_extension_indices.clear();
self.filtered_remote_extension_indices.extend(
self.remote_extension_entries
@@ -212,11 +185,11 @@ impl ExtensionsPage {
.filter(|(_, extension)| match self.filter {
ExtensionFilter::All => true,
ExtensionFilter::Installed => {
let status = Self::extension_status(&extension.id, cx);
let status = extension_store.extension_status(&extension.id);
matches!(status, ExtensionStatus::Installed(_))
}
ExtensionFilter::NotInstalled => {
let status = Self::extension_status(&extension.id, cx);
let status = extension_store.extension_status(&extension.id);
matches!(status, ExtensionStatus::NotInstalled)
}
@@ -312,7 +285,9 @@ impl ExtensionsPage {
extension: &ExtensionManifest,
cx: &mut ViewContext<Self>,
) -> ExtensionCard {
let status = Self::extension_status(&extension.id, cx);
let status = ExtensionStore::global(cx)
.read(cx)
.extension_status(&extension.id);
let repository_url = extension.repository.clone();
@@ -414,10 +389,10 @@ impl ExtensionsPage {
extension: &ExtensionMetadata,
cx: &mut ViewContext<Self>,
) -> ExtensionCard {
let this = cx.view().clone();
let status = Self::extension_status(&extension.id, cx);
let status = ExtensionStore::global(cx)
.read(cx)
.extension_status(&extension.id);
let extension_id = extension.id.clone();
let (install_or_uninstall_button, upgrade_button) =
self.buttons_for_entry(extension, &status, cx);
let repository_url = extension.manifest.repository.clone();
@@ -479,122 +454,45 @@ impl ExtensionsPage {
)
}))
.child(
h_flex()
.gap_2()
.child(
IconButton::new(
SharedString::from(format!("repository-{}", extension.id)),
IconName::Github,
)
.icon_color(Color::Accent)
.icon_size(IconSize::Small)
.style(ButtonStyle::Filled)
.on_click(cx.listener({
let repository_url = repository_url.clone();
move |_, _, cx| {
cx.open_url(&repository_url);
}
}))
.tooltip(move |cx| Tooltip::text(repository_url.clone(), cx)),
)
.child(
popover_menu(SharedString::from(format!("more-{}", extension.id)))
.trigger(
IconButton::new(
SharedString::from(format!("more-{}", extension.id)),
IconName::Ellipsis,
)
.icon_color(Color::Accent)
.icon_size(IconSize::Small)
.style(ButtonStyle::Filled),
)
.menu(move |cx| {
Some(Self::render_remote_extension_context_menu(
&this,
extension_id.clone(),
cx,
))
}),
),
IconButton::new(
SharedString::from(format!("repository-{}", extension.id)),
IconName::Github,
)
.icon_color(Color::Accent)
.icon_size(IconSize::Small)
.style(ButtonStyle::Filled)
.on_click(cx.listener({
let repository_url = repository_url.clone();
move |_, _, cx| {
cx.open_url(&repository_url);
}
}))
.tooltip(move |cx| Tooltip::text(repository_url.clone(), cx)),
),
)
}
fn render_remote_extension_context_menu(
this: &View<Self>,
extension_id: Arc<str>,
cx: &mut WindowContext,
) -> View<ContextMenu> {
let context_menu = ContextMenu::build(cx, |context_menu, cx| {
context_menu.entry(
"Install Another Version...",
None,
cx.handler_for(&this, move |this, cx| {
this.show_extension_version_list(extension_id.clone(), cx)
}),
)
});
context_menu
}
fn show_extension_version_list(&mut self, extension_id: Arc<str>, cx: &mut ViewContext<Self>) {
let Some(workspace) = self.workspace.upgrade() else {
return;
};
cx.spawn(move |this, mut cx| async move {
let extension_versions_task = this.update(&mut cx, |_, cx| {
let extension_store = ExtensionStore::global(cx);
extension_store.update(cx, |store, cx| {
store.fetch_extension_versions(&extension_id, cx)
})
})?;
let extension_versions = extension_versions_task.await?;
workspace.update(&mut cx, |workspace, cx| {
let fs = workspace.project().read(cx).fs().clone();
workspace.toggle_modal(cx, |cx| {
let delegate = ExtensionVersionSelectorDelegate::new(
fs,
cx.view().downgrade(),
extension_versions,
);
ExtensionVersionSelector::new(delegate, cx)
});
})?;
anyhow::Ok(())
})
.detach_and_log_err(cx);
}
fn buttons_for_entry(
&self,
extension: &ExtensionMetadata,
status: &ExtensionStatus,
cx: &mut ViewContext<Self>,
) -> (Button, Option<Button>) {
let is_compatible = extension::is_version_compatible(&extension);
let disabled = !is_compatible;
match status.clone() {
ExtensionStatus::NotInstalled => (
Button::new(SharedString::from(extension.id.clone()), "Install")
.disabled(disabled)
.on_click(cx.listener({
Button::new(SharedString::from(extension.id.clone()), "Install").on_click(
cx.listener({
let extension_id = extension.id.clone();
let version = extension.manifest.version.clone();
move |this, _, cx| {
this.telemetry
.report_app_event("extensions: install extension".to_string());
ExtensionStore::global(cx).update(cx, |store, cx| {
store.install_latest_extension(extension_id.clone(), cx)
store.install_extension(extension_id.clone(), version.clone(), cx)
});
}
})),
}),
),
None,
),
ExtensionStatus::Installing => (
@@ -624,9 +522,8 @@ impl ExtensionsPage {
None
} else {
Some(
Button::new(SharedString::from(extension.id.clone()), "Upgrade")
.disabled(disabled)
.on_click(cx.listener({
Button::new(SharedString::from(extension.id.clone()), "Upgrade").on_click(
cx.listener({
let extension_id = extension.id.clone();
let version = extension.manifest.version.clone();
move |this, _, cx| {
@@ -634,16 +531,15 @@ impl ExtensionsPage {
"extensions: install extension".to_string(),
);
ExtensionStore::global(cx).update(cx, |store, cx| {
store
.upgrade_extension(
extension_id.clone(),
version.clone(),
cx,
)
.detach_and_log_err(cx)
store.upgrade_extension(
extension_id.clone(),
version.clone(),
cx,
)
});
}
})),
}),
),
)
},
),
@@ -817,7 +713,7 @@ impl Render for ExtensionsPage {
.justify_between()
.child(Headline::new("Extensions").size(HeadlineSize::XLarge))
.child(
Button::new("install-dev-extension", "Install Dev Extension")
Button::new("add-dev-extension", "Add Dev Extension")
.style(ButtonStyle::Filled)
.size(ButtonSize::Large)
.on_click(|_event, cx| {

View File

@@ -1,6 +1,6 @@
use gpui::AppContext;
use human_bytes::human_bytes;
use release_channel::{AppCommitSha, AppVersion, ReleaseChannel};
use release_channel::{AppVersion, ReleaseChannel};
use serde::Serialize;
use std::{env, fmt::Display};
use sysinfo::{MemoryRefreshKind, RefreshKind, System};
@@ -13,13 +13,12 @@ pub struct SystemSpecs {
os_version: Option<String>,
memory: u64,
architecture: &'static str,
commit_sha: Option<String>,
}
impl SystemSpecs {
pub fn new(cx: &AppContext) -> Self {
let app_version = AppVersion::global(cx).to_string();
let release_channel = ReleaseChannel::global(cx);
let release_channel = ReleaseChannel::global(cx).display_name();
let os_name = cx.app_metadata().os_name;
let system = System::new_with_specifics(
RefreshKind::new().with_memory(MemoryRefreshKind::everything()),
@@ -30,21 +29,14 @@ impl SystemSpecs {
.app_metadata()
.os_version
.map(|os_version| os_version.to_string());
let commit_sha = match release_channel {
ReleaseChannel::Dev | ReleaseChannel::Nightly => {
AppCommitSha::try_global(cx).map(|sha| sha.0.clone())
}
_ => None,
};
SystemSpecs {
app_version,
release_channel: release_channel.display_name(),
release_channel,
os_name,
os_version,
memory,
architecture,
commit_sha,
}
}
}
@@ -55,14 +47,8 @@ impl Display for SystemSpecs {
Some(os_version) => format!("OS: {} {}", self.os_name, os_version),
None => format!("OS: {}", self.os_name),
};
let app_version_information = format!(
"Zed: v{} ({})",
self.app_version,
match &self.commit_sha {
Some(commit_sha) => format!("{} {}", self.release_channel, commit_sha),
None => self.release_channel.to_string(),
}
);
let app_version_information =
format!("Zed: v{} ({})", self.app_version, self.release_channel);
let system_specs = [
app_version_information,
os_information,

View File

@@ -1490,7 +1490,7 @@ async fn test_keeps_file_finder_open_after_modifier_keys_release(cx: &mut gpui::
open_queried_buffer("1", 1, "1.txt", &workspace, cx).await;
cx.simulate_modifiers_change(Modifiers::secondary_key());
cx.simulate_modifiers_change(Modifiers::command());
open_file_picker(&workspace, cx);
cx.simulate_modifiers_change(Modifiers::none());
@@ -1519,7 +1519,7 @@ async fn test_opens_file_on_modifier_keys_release(cx: &mut gpui::TestAppContext)
open_queried_buffer("1", 1, "1.txt", &workspace, cx).await;
open_queried_buffer("2", 1, "2.txt", &workspace, cx).await;
cx.simulate_modifiers_change(Modifiers::secondary_key());
cx.simulate_modifiers_change(Modifiers::command());
let picker = open_file_picker(&workspace, cx);
picker.update(cx, |finder, _| {
assert_eq!(finder.delegate.matches.len(), 2);
@@ -1560,7 +1560,7 @@ async fn test_switches_between_release_norelease_modes_on_forward_nav(
open_queried_buffer("2", 1, "2.txt", &workspace, cx).await;
// Open with a shortcut
cx.simulate_modifiers_change(Modifiers::secondary_key());
cx.simulate_modifiers_change(Modifiers::command());
let picker = open_file_picker(&workspace, cx);
picker.update(cx, |finder, _| {
assert_eq!(finder.delegate.matches.len(), 2);
@@ -1581,7 +1581,7 @@ async fn test_switches_between_release_norelease_modes_on_forward_nav(
// Back to navigation with initial shortcut
// Open file on modifiers release
cx.simulate_modifiers_change(Modifiers::secondary_key());
cx.simulate_modifiers_change(Modifiers::command());
cx.dispatch_action(Toggle);
cx.simulate_modifiers_change(Modifiers::none());
cx.read(|cx| {
@@ -1617,7 +1617,7 @@ async fn test_switches_between_release_norelease_modes_on_backward_nav(
open_queried_buffer("3", 1, "3.txt", &workspace, cx).await;
// Open with a shortcut
cx.simulate_modifiers_change(Modifiers::secondary_key());
cx.simulate_modifiers_change(Modifiers::command());
let picker = open_file_picker(&workspace, cx);
picker.update(cx, |finder, _| {
assert_eq!(finder.delegate.matches.len(), 3);
@@ -1640,7 +1640,7 @@ async fn test_switches_between_release_norelease_modes_on_backward_nav(
// Back to navigation with initial shortcut
// Open file on modifiers release
cx.simulate_modifiers_change(Modifiers::secondary_key());
cx.simulate_modifiers_change(Modifiers::command());
cx.dispatch_action(SelectPrev); // <-- File Finder's SelectPrev, not menu's
cx.simulate_modifiers_change(Modifiers::none());
cx.read(|cx| {
@@ -1669,7 +1669,7 @@ async fn test_extending_modifiers_does_not_confirm_selection(cx: &mut gpui::Test
open_queried_buffer("1", 1, "1.txt", &workspace, cx).await;
cx.simulate_modifiers_change(Modifiers::secondary_key());
cx.simulate_modifiers_change(Modifiers::command());
open_file_picker(&workspace, cx);
cx.simulate_modifiers_change(Modifiers::command_shift());

View File

@@ -1,21 +0,0 @@
[package]
name = "file_icons"
version = "0.1.0"
edition = "2021"
publish = false
license = "GPL-3.0-or-later"
[lints]
workspace = true
[lib]
path = "src/file_icons.rs"
doctest = false
[dependencies]
gpui.workspace = true
util.workspace = true
serde.workspace = true
serde_derive.workspace = true
serde_json.workspace = true
collections.workspace = true

View File

@@ -26,7 +26,6 @@ tempfile.workspace = true
lazy_static.workspace = true
parking_lot.workspace = true
smol.workspace = true
git.workspace = true
git2.workspace = true
serde.workspace = true
serde_derive.workspace = true

View File

@@ -9,7 +9,7 @@ use async_tar::Archive;
use futures::{future::BoxFuture, AsyncRead, Stream, StreamExt};
use git2::Repository as LibGitRepository;
use parking_lot::Mutex;
use repository::{GitRepository, RealGitRepository};
use repository::GitRepository;
use rope::Rope;
#[cfg(any(test, feature = "test-support"))]
use smol::io::AsyncReadExt;
@@ -111,16 +111,7 @@ pub struct Metadata {
pub is_dir: bool,
}
#[derive(Default)]
pub struct RealFs {
git_binary_path: Option<PathBuf>,
}
impl RealFs {
pub fn new(git_binary_path: Option<PathBuf>) -> Self {
Self { git_binary_path }
}
}
pub struct RealFs;
#[async_trait::async_trait]
impl Fs for RealFs {
@@ -440,10 +431,7 @@ impl Fs for RealFs {
LibGitRepository::open(dotgit_path)
.log_err()
.map::<Arc<Mutex<dyn GitRepository>>, _>(|libgit_repository| {
Arc::new(Mutex::new(RealGitRepository::new(
libgit_repository,
self.git_binary_path.clone(),
)))
Arc::new(Mutex::new(libgit_repository))
})
}
@@ -836,17 +824,6 @@ impl FakeFs {
});
}
pub fn set_blame_for_repo(&self, dot_git: &Path, blames: Vec<(&Path, git::blame::Blame)>) {
self.with_git_state(dot_git, true, |state| {
state.blames.clear();
state.blames.extend(
blames
.into_iter()
.map(|(path, blame)| (path.to_path_buf(), blame)),
);
});
}
pub fn set_status_for_repo_via_working_copy_change(
&self,
dot_git: &Path,

View File

@@ -1,9 +1,7 @@
use anyhow::{Context, Result};
use anyhow::Result;
use collections::HashMap;
use git::blame::Blame;
use git2::{BranchType, StatusShow};
use parking_lot::Mutex;
use rope::Rope;
use serde_derive::{Deserialize, Serialize};
use std::{
cmp::Ordering,
@@ -25,9 +23,6 @@ pub struct Branch {
pub trait GitRepository: Send {
fn reload_index(&self);
/// Loads a git repository entry's contents.
/// Note that for symlink entries, this will return the contents of the symlink, not the target.
fn load_index_text(&self, relative_file_path: &Path) -> Option<String>;
/// Returns the URL of the remote with the given name.
@@ -58,8 +53,6 @@ pub trait GitRepository: Send {
fn branches(&self) -> Result<Vec<Branch>>;
fn change_branch(&self, _: &str) -> Result<()>;
fn create_branch(&self, _: &str) -> Result<()>;
fn blame(&self, path: &Path, content: Rope) -> Result<git::blame::Blame>;
}
impl std::fmt::Debug for dyn GitRepository {
@@ -68,23 +61,9 @@ impl std::fmt::Debug for dyn GitRepository {
}
}
pub struct RealGitRepository {
pub repository: LibGitRepository,
pub git_binary_path: PathBuf,
}
impl RealGitRepository {
pub fn new(repository: LibGitRepository, git_binary_path: Option<PathBuf>) -> Self {
Self {
repository,
git_binary_path: git_binary_path.unwrap_or_else(|| PathBuf::from("git")),
}
}
}
impl GitRepository for RealGitRepository {
impl GitRepository for LibGitRepository {
fn reload_index(&self) {
if let Ok(mut index) = self.repository.index() {
if let Ok(mut index) = self.index() {
_ = index.read(false);
}
}
@@ -106,7 +85,7 @@ impl GitRepository for RealGitRepository {
Ok(Some(String::from_utf8(content)?))
}
match logic(&self.repository, relative_file_path) {
match logic(self, relative_file_path) {
Ok(value) => return value,
Err(err) => log::error!("Error loading head text: {:?}", err),
}
@@ -114,18 +93,18 @@ impl GitRepository for RealGitRepository {
}
fn remote_url(&self, name: &str) -> Option<String> {
let remote = self.repository.find_remote(name).ok()?;
let remote = self.find_remote(name).ok()?;
remote.url().map(|url| url.to_string())
}
fn branch_name(&self) -> Option<String> {
let head = self.repository.head().log_err()?;
let head = self.head().log_err()?;
let branch = String::from_utf8_lossy(head.shorthand_bytes());
Some(branch.to_string())
}
fn head_sha(&self) -> Option<String> {
let head = self.repository.head().ok()?;
let head = self.head().ok()?;
head.target().map(|oid| oid.to_string())
}
@@ -136,7 +115,7 @@ impl GitRepository for RealGitRepository {
options.pathspec(path_prefix);
options.show(StatusShow::Index);
if let Some(statuses) = self.repository.statuses(Some(&mut options)).log_err() {
if let Some(statuses) = self.statuses(Some(&mut options)).log_err() {
for status in statuses.iter() {
let path = RepoPath(PathBuf::try_from_bytes(status.path_bytes()).unwrap());
let status = status.status();
@@ -153,7 +132,7 @@ impl GitRepository for RealGitRepository {
fn unstaged_status(&self, path: &RepoPath, mtime: SystemTime) -> Option<GitFileStatus> {
// If the file has not changed since it was added to the index, then
// there can't be any changes.
if matches_index(&self.repository, path, mtime) {
if matches_index(self, path, mtime) {
return None;
}
@@ -165,7 +144,7 @@ impl GitRepository for RealGitRepository {
options.include_unmodified(true);
options.show(StatusShow::Workdir);
let statuses = self.repository.statuses(Some(&mut options)).log_err()?;
let statuses = self.statuses(Some(&mut options)).log_err()?;
let status = statuses.get(0).and_then(|s| read_status(s.status()));
status
}
@@ -181,17 +160,17 @@ impl GitRepository for RealGitRepository {
// If the file has not changed since it was added to the index, then
// there's no need to examine the working directory file: just compare
// the blob in the index to the one in the HEAD commit.
if matches_index(&self.repository, path, mtime) {
if matches_index(self, path, mtime) {
options.show(StatusShow::Index);
}
let statuses = self.repository.statuses(Some(&mut options)).log_err()?;
let statuses = self.statuses(Some(&mut options)).log_err()?;
let status = statuses.get(0).and_then(|s| read_status(s.status()));
status
}
fn branches(&self) -> Result<Vec<Branch>> {
let local_branches = self.repository.branches(Some(BranchType::Local))?;
let local_branches = self.branches(Some(BranchType::Local))?;
let valid_branches = local_branches
.filter_map(|branch| {
branch.ok().and_then(|(branch, _)| {
@@ -213,11 +192,11 @@ impl GitRepository for RealGitRepository {
Ok(valid_branches)
}
fn change_branch(&self, name: &str) -> Result<()> {
let revision = self.repository.find_branch(name, BranchType::Local)?;
let revision = self.find_branch(name, BranchType::Local)?;
let revision = revision.get();
let as_tree = revision.peel_to_tree()?;
self.repository.checkout_tree(as_tree.as_object(), None)?;
self.repository.set_head(
self.checkout_tree(as_tree.as_object(), None)?;
self.set_head(
revision
.name()
.ok_or_else(|| anyhow::anyhow!("Branch name could not be retrieved"))?,
@@ -225,29 +204,11 @@ impl GitRepository for RealGitRepository {
Ok(())
}
fn create_branch(&self, name: &str) -> Result<()> {
let current_commit = self.repository.head()?.peel_to_commit()?;
self.repository.branch(name, &current_commit, false)?;
let current_commit = self.head()?.peel_to_commit()?;
self.branch(name, &current_commit, false)?;
Ok(())
}
fn blame(&self, path: &Path, content: Rope) -> Result<git::blame::Blame> {
let git_dir_path = self.repository.path();
let working_directory = git_dir_path.parent().with_context(|| {
format!("failed to get git working directory for {:?}", git_dir_path)
})?;
const REMOTE_NAME: &str = "origin";
let remote_url = self.remote_url(REMOTE_NAME);
git::blame::Blame::for_path(
&self.git_binary_path,
working_directory,
path,
&content,
remote_url,
)
}
}
fn matches_index(repo: &LibGitRepository, path: &RepoPath, mtime: SystemTime) -> bool {
@@ -290,7 +251,6 @@ pub struct FakeGitRepository {
#[derive(Debug, Clone, Default)]
pub struct FakeGitRepositoryState {
pub index_contents: HashMap<PathBuf, String>,
pub blames: HashMap<PathBuf, Blame>,
pub worktree_statuses: HashMap<RepoPath, GitFileStatus>,
pub branch_name: Option<String>,
}
@@ -357,15 +317,6 @@ impl GitRepository for FakeGitRepository {
state.branch_name = Some(name.to_owned());
Ok(())
}
fn blame(&self, path: &Path, _content: Rope) -> Result<git::blame::Blame> {
let state = self.state.lock();
state
.blames
.get(path)
.with_context(|| format!("failed to get blame for {:?}", path))
.cloned()
}
}
fn check_path_to_repo_path_errors(relative_file_path: &Path) -> Result<()> {

View File

@@ -12,23 +12,16 @@ workspace = true
path = "src/git.rs"
[dependencies]
anyhow.workspace = true
clock.workspace = true
collections.workspace = true
git2.workspace = true
lazy_static.workspace = true
log.workspace = true
smol.workspace = true
sum_tree.workspace = true
text.workspace = true
time.workspace = true
url.workspace = true
serde.workspace = true
[dev-dependencies]
unindent.workspace = true
serde_json.workspace = true
pretty_assertions.workspace = true
[features]
test-support = []

View File

@@ -1,358 +0,0 @@
use crate::commit::get_messages;
use crate::permalink::{build_commit_permalink, parse_git_remote_url, BuildCommitPermalinkParams};
use crate::Oid;
use anyhow::{anyhow, Context, Result};
use collections::{HashMap, HashSet};
use serde::{Deserialize, Serialize};
use std::io::Write;
use std::process::{Command, Stdio};
use std::{ops::Range, path::Path};
use text::Rope;
use time;
use time::macros::format_description;
use time::OffsetDateTime;
use time::UtcOffset;
use url::Url;
pub use git2 as libgit;
#[derive(Debug, Clone, Default)]
pub struct Blame {
pub entries: Vec<BlameEntry>,
pub messages: HashMap<Oid, String>,
pub permalinks: HashMap<Oid, Url>,
}
impl Blame {
pub fn for_path(
git_binary: &Path,
working_directory: &Path,
path: &Path,
content: &Rope,
remote_url: Option<String>,
) -> Result<Self> {
let output = run_git_blame(git_binary, working_directory, path, &content)?;
let mut entries = parse_git_blame(&output)?;
entries.sort_unstable_by(|a, b| a.range.start.cmp(&b.range.start));
let mut permalinks = HashMap::default();
let mut unique_shas = HashSet::default();
let parsed_remote_url = remote_url.as_deref().and_then(parse_git_remote_url);
for entry in entries.iter_mut() {
unique_shas.insert(entry.sha);
if let Some(remote) = parsed_remote_url.as_ref() {
permalinks.entry(entry.sha).or_insert_with(|| {
build_commit_permalink(BuildCommitPermalinkParams {
remote,
sha: entry.sha.to_string().as_str(),
})
});
}
}
let shas = unique_shas.into_iter().collect::<Vec<_>>();
let messages =
get_messages(&working_directory, &shas).context("failed to get commit messages")?;
Ok(Self {
entries,
permalinks,
messages,
})
}
}
fn run_git_blame(
git_binary: &Path,
working_directory: &Path,
path: &Path,
contents: &Rope,
) -> Result<String> {
let child = Command::new(git_binary)
.current_dir(working_directory)
.arg("blame")
.arg("--incremental")
.arg("--contents")
.arg("-")
.arg(path.as_os_str())
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.spawn()
.map_err(|e| anyhow!("Failed to start git blame process: {}", e))?;
let mut stdin = child
.stdin
.as_ref()
.context("failed to get pipe to stdin of git blame command")?;
for chunk in contents.chunks() {
stdin.write_all(chunk.as_bytes())?;
}
stdin.flush()?;
let output = child
.wait_with_output()
.map_err(|e| anyhow!("Failed to read git blame output: {}", e))?;
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
return Err(anyhow!("git blame process failed: {}", stderr));
}
Ok(String::from_utf8(output.stdout)?)
}
#[derive(Serialize, Deserialize, Default, Debug, Clone, PartialEq, Eq)]
pub struct BlameEntry {
pub sha: Oid,
pub range: Range<u32>,
pub original_line_number: u32,
pub author: Option<String>,
pub author_mail: Option<String>,
pub author_time: Option<i64>,
pub author_tz: Option<String>,
pub committer: Option<String>,
pub committer_mail: Option<String>,
pub committer_time: Option<i64>,
pub committer_tz: Option<String>,
pub summary: Option<String>,
pub previous: Option<String>,
pub filename: String,
}
impl BlameEntry {
// Returns a BlameEntry by parsing the first line of a `git blame --incremental`
// entry. The line MUST have this format:
//
// <40-byte-hex-sha1> <sourceline> <resultline> <num-lines>
fn new_from_blame_line(line: &str) -> Result<BlameEntry> {
let mut parts = line.split_whitespace();
let sha = parts
.next()
.and_then(|line| line.parse::<Oid>().ok())
.ok_or_else(|| anyhow!("failed to parse sha"))?;
let original_line_number = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
.ok_or_else(|| anyhow!("Failed to parse original line number"))?;
let final_line_number = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
let line_count = parts
.next()
.and_then(|line| line.parse::<u32>().ok())
.ok_or_else(|| anyhow!("Failed to parse final line number"))?;
let start_line = final_line_number.saturating_sub(1);
let end_line = start_line + line_count;
let range = start_line..end_line;
Ok(Self {
sha,
range,
original_line_number,
..Default::default()
})
}
pub fn author_offset_date_time(&self) -> Result<time::OffsetDateTime> {
if let (Some(author_time), Some(author_tz)) = (self.author_time, &self.author_tz) {
let format = format_description!("[offset_hour][offset_minute]");
let offset = UtcOffset::parse(author_tz, &format)?;
let date_time_utc = OffsetDateTime::from_unix_timestamp(author_time)?;
Ok(date_time_utc.to_offset(offset))
} else {
// Directly return current time in UTC if there's no committer time or timezone
Ok(time::OffsetDateTime::now_utc())
}
}
}
// parse_git_blame parses the output of `git blame --incremental`, which returns
// all the blame-entries for a given path incrementally, as it finds them.
//
// Each entry *always* starts with:
//
// <40-byte-hex-sha1> <sourceline> <resultline> <num-lines>
//
// Each entry *always* ends with:
//
// filename <whitespace-quoted-filename-goes-here>
//
// Line numbers are 1-indexed.
//
// A `git blame --incremental` entry looks like this:
//
// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 2 2 1
// author Joe Schmoe
// author-mail <joe.schmoe@example.com>
// author-time 1709741400
// author-tz +0100
// committer Joe Schmoe
// committer-mail <joe.schmoe@example.com>
// committer-time 1709741400
// committer-tz +0100
// summary Joe's cool commit
// previous 486c2409237a2c627230589e567024a96751d475 index.js
// filename index.js
//
// If the entry has the same SHA as an entry that was already printed then no
// signature information is printed:
//
// 6ad46b5257ba16d12c5ca9f0d4900320959df7f4 3 4 1
// previous 486c2409237a2c627230589e567024a96751d475 index.js
// filename index.js
//
// More about `--incremental` output: https://mirrors.edge.kernel.org/pub/software/scm/git/docs/git-blame.html
fn parse_git_blame(output: &str) -> Result<Vec<BlameEntry>> {
let mut entries: Vec<BlameEntry> = Vec::new();
let mut index: HashMap<Oid, usize> = HashMap::default();
let mut current_entry: Option<BlameEntry> = None;
for line in output.lines() {
let mut done = false;
match &mut current_entry {
None => {
let mut new_entry = BlameEntry::new_from_blame_line(line)?;
if let Some(existing_entry) = index
.get(&new_entry.sha)
.and_then(|slot| entries.get(*slot))
{
new_entry.author = existing_entry.author.clone();
new_entry.author_mail = existing_entry.author_mail.clone();
new_entry.author_time = existing_entry.author_time;
new_entry.author_tz = existing_entry.author_tz.clone();
new_entry.committer = existing_entry.committer.clone();
new_entry.committer_mail = existing_entry.committer_mail.clone();
new_entry.committer_time = existing_entry.committer_time;
new_entry.committer_tz = existing_entry.committer_tz.clone();
new_entry.summary = existing_entry.summary.clone();
}
current_entry.replace(new_entry);
}
Some(entry) => {
let Some((key, value)) = line.split_once(' ') else {
continue;
};
let is_committed = !entry.sha.is_zero();
match key {
"filename" => {
entry.filename = value.into();
done = true;
}
"previous" => entry.previous = Some(value.into()),
"summary" if is_committed => entry.summary = Some(value.into()),
"author" if is_committed => entry.author = Some(value.into()),
"author-mail" if is_committed => entry.author_mail = Some(value.into()),
"author-time" if is_committed => {
entry.author_time = Some(value.parse::<i64>()?)
}
"author-tz" if is_committed => entry.author_tz = Some(value.into()),
"committer" if is_committed => entry.committer = Some(value.into()),
"committer-mail" if is_committed => entry.committer_mail = Some(value.into()),
"committer-time" if is_committed => {
entry.committer_time = Some(value.parse::<i64>()?)
}
"committer-tz" if is_committed => entry.committer_tz = Some(value.into()),
_ => {}
}
}
};
if done {
if let Some(entry) = current_entry.take() {
index.insert(entry.sha, entries.len());
// We only want annotations that have a commit.
if !entry.sha.is_zero() {
entries.push(entry);
}
}
}
}
Ok(entries)
}
#[cfg(test)]
mod tests {
use std::path::PathBuf;
use super::parse_git_blame;
use super::BlameEntry;
fn read_test_data(filename: &str) -> String {
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
path.push("test_data");
path.push(filename);
std::fs::read_to_string(&path)
.unwrap_or_else(|_| panic!("Could not read test data at {:?}. Is it generated?", path))
}
fn assert_eq_golden(entries: &Vec<BlameEntry>, golden_filename: &str) {
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
path.push("test_data");
path.push("golden");
path.push(format!("{}.json", golden_filename));
let have_json =
serde_json::to_string_pretty(&entries).expect("could not serialize entries to JSON");
let update = std::env::var("UPDATE_GOLDEN")
.map(|val| val.to_ascii_lowercase() == "true")
.unwrap_or(false);
if update {
std::fs::create_dir_all(path.parent().unwrap())
.expect("could not create golden test data directory");
std::fs::write(&path, have_json).expect("could not write out golden data");
} else {
let want_json =
std::fs::read_to_string(&path).unwrap_or_else(|_| {
panic!("could not read golden test data file at {:?}. Did you run the test with UPDATE_GOLDEN=true before?", path);
});
pretty_assertions::assert_eq!(have_json, want_json, "wrong blame entries");
}
}
#[test]
fn test_parse_git_blame_not_committed() {
let output = read_test_data("blame_incremental_not_committed");
let entries = parse_git_blame(&output).unwrap();
assert_eq_golden(&entries, "blame_incremental_not_committed");
}
#[test]
fn test_parse_git_blame_simple() {
let output = read_test_data("blame_incremental_simple");
let entries = parse_git_blame(&output).unwrap();
assert_eq_golden(&entries, "blame_incremental_simple");
}
#[test]
fn test_parse_git_blame_complex() {
let output = read_test_data("blame_incremental_complex");
let entries = parse_git_blame(&output).unwrap();
assert_eq_golden(&entries, "blame_incremental_complex");
}
}

View File

@@ -1,35 +0,0 @@
use crate::Oid;
use anyhow::{anyhow, Result};
use collections::HashMap;
use std::path::Path;
use std::process::Command;
pub fn get_messages(working_directory: &Path, shas: &[Oid]) -> Result<HashMap<Oid, String>> {
const MARKER: &'static str = "<MARKER>";
let output = Command::new("git")
.current_dir(working_directory)
.arg("show")
.arg("-s")
.arg(format!("--format=%B{}", MARKER))
.args(shas.iter().map(ToString::to_string))
.output()
.map_err(|e| anyhow!("Failed to start git blame process: {}", e))?;
anyhow::ensure!(
output.status.success(),
"'git show' failed with error {:?}",
output.status
);
Ok(shas
.iter()
.cloned()
.zip(
String::from_utf8_lossy(&output.stdout)
.trim()
.split_terminator(MARKER)
.map(|str| String::from(str.trim())),
)
.collect::<HashMap<Oid, String>>())
}

View File

@@ -1,107 +1,11 @@
use anyhow::{anyhow, Context, Result};
use serde::{Deserialize, Serialize};
use std::ffi::OsStr;
use std::fmt;
use std::str::FromStr;
pub use git2 as libgit;
pub use lazy_static::lazy_static;
pub mod blame;
pub mod commit;
pub mod diff;
pub mod permalink;
lazy_static! {
pub static ref DOT_GIT: &'static OsStr = OsStr::new(".git");
pub static ref GITIGNORE: &'static OsStr = OsStr::new(".gitignore");
}
#[derive(Clone, Copy, Eq, Hash, PartialEq)]
pub struct Oid(libgit::Oid);
impl Oid {
pub fn from_bytes(bytes: &[u8]) -> Result<Self> {
let oid = libgit::Oid::from_bytes(bytes).context("failed to parse bytes into git oid")?;
Ok(Self(oid))
}
pub fn as_bytes(&self) -> &[u8] {
self.0.as_bytes()
}
pub(crate) fn is_zero(&self) -> bool {
self.0.is_zero()
}
}
impl FromStr for Oid {
type Err = anyhow::Error;
fn from_str(s: &str) -> std::prelude::v1::Result<Self, Self::Err> {
libgit::Oid::from_str(s)
.map_err(|error| anyhow!("failed to parse git oid: {}", error))
.map(|oid| Self(oid))
}
}
impl fmt::Debug for Oid {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Display::fmt(self, f)
}
}
impl fmt::Display for Oid {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl Serialize for Oid {
fn serialize<S>(&self, serializer: S) -> std::prelude::v1::Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.0.to_string())
}
}
impl<'de> Deserialize<'de> for Oid {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
s.parse::<Oid>().map_err(serde::de::Error::custom)
}
}
impl Default for Oid {
fn default() -> Self {
Self(libgit::Oid::zero())
}
}
impl From<Oid> for u32 {
fn from(oid: Oid) -> Self {
let bytes = oid.0.as_bytes();
debug_assert!(bytes.len() > 4);
let mut u32_bytes: [u8; 4] = [0; 4];
u32_bytes.copy_from_slice(&bytes[..4]);
u32::from_ne_bytes(u32_bytes)
}
}
impl From<Oid> for usize {
fn from(oid: Oid) -> Self {
let bytes = oid.0.as_bytes();
debug_assert!(bytes.len() > 8);
let mut u64_bytes: [u8; 8] = [0; 8];
u64_bytes.copy_from_slice(&bytes[..8]);
u64::from_ne_bytes(u64_bytes) as usize
}
}

Some files were not shown because too many files have changed in this diff Show More