Compare commits
165 Commits
context-se
...
remote-ker
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
473bc89d3a | ||
|
|
118e7a66b3 | ||
|
|
6469500330 | ||
|
|
694231afd1 | ||
|
|
566c93a0f5 | ||
|
|
1430718d1a | ||
|
|
e31f44450e | ||
|
|
e0761db62d | ||
|
|
8c342ef706 | ||
|
|
7e67753d51 | ||
|
|
1475a7000f | ||
|
|
41fd9189e3 | ||
|
|
973498e075 | ||
|
|
b63394f4bd | ||
|
|
743165fa6c | ||
|
|
e03968f538 | ||
|
|
3c57a4071c | ||
|
|
ad6a07e574 | ||
|
|
c2668bc953 | ||
|
|
705a06c3dd | ||
|
|
f77b6ab79c | ||
|
|
ea5131ce0a | ||
|
|
1c2b3ad782 | ||
|
|
496dae968b | ||
|
|
5c6565a9e0 | ||
|
|
7853e32f80 | ||
|
|
f5cbfa718e | ||
|
|
6a2c712990 | ||
|
|
9454f0f1c7 | ||
|
|
5b0c15d8c4 | ||
|
|
aae39071ef | ||
|
|
a35b73e63e | ||
|
|
c0d11be75f | ||
|
|
0e26d22fea | ||
|
|
bd0f197415 | ||
|
|
343c88574a | ||
|
|
e7a0890086 | ||
|
|
d4c5c0f05e | ||
|
|
f0c7e62adc | ||
|
|
80d50f56f3 | ||
|
|
fb6c987e3e | ||
|
|
b4c2f29c8b | ||
|
|
8666ec95ba | ||
|
|
889aac9c03 | ||
|
|
5b9916e34b | ||
|
|
5b317f60df | ||
|
|
e2552b9add | ||
|
|
37899187c6 | ||
|
|
d265e44209 | ||
|
|
f12981db32 | ||
|
|
d99f5fe83e | ||
|
|
df1d0dec0a | ||
|
|
ad94ad511a | ||
|
|
0e7770a9a2 | ||
|
|
3f905d57e5 | ||
|
|
f01a86c644 | ||
|
|
5fd7afb9da | ||
|
|
9260abafba | ||
|
|
d92166f9f6 | ||
|
|
59a355da74 | ||
|
|
ee207ab77e | ||
|
|
31566cb5a0 | ||
|
|
2d3476530e | ||
|
|
f9990b42fa | ||
|
|
97e9137cb7 | ||
|
|
932c7e23c8 | ||
|
|
65a9c8d994 | ||
|
|
33f09bad60 | ||
|
|
792c1e4710 | ||
|
|
b421ffafb5 | ||
|
|
21c785ede4 | ||
|
|
516f7b3642 | ||
|
|
f34877334e | ||
|
|
6e296eb4b6 | ||
|
|
4c8c6c08fe | ||
|
|
050ce919ba | ||
|
|
369828f51c | ||
|
|
ac5ecf5487 | ||
|
|
1235d0808e | ||
|
|
6ff69faf37 | ||
|
|
f449e8d3d3 | ||
|
|
da09cbd055 | ||
|
|
4327459d2a | ||
|
|
cc601bd770 | ||
|
|
c491b75e07 | ||
|
|
3420ebb428 | ||
|
|
b23d72ec4f | ||
|
|
e25a03cd3c | ||
|
|
9e8ff3f198 | ||
|
|
6d80d5b74b | ||
|
|
7137bdee02 | ||
|
|
98403aa994 | ||
|
|
794ad1af75 | ||
|
|
4b1f0c033b | ||
|
|
3796b4a55c | ||
|
|
8c02929710 | ||
|
|
1e14697bb6 | ||
|
|
f619a872b5 | ||
|
|
c03f5b351b | ||
|
|
a8df0642a8 | ||
|
|
aee01f2c50 | ||
|
|
c9546070ac | ||
|
|
1855a312d0 | ||
|
|
332b33716a | ||
|
|
acf25324be | ||
|
|
f0882f44a7 | ||
|
|
189a034e71 | ||
|
|
7f52071513 | ||
|
|
56c93be4de | ||
|
|
43999c47e1 | ||
|
|
690a725667 | ||
|
|
b5ce8e7aa5 | ||
|
|
d177a1d4e5 | ||
|
|
5d17cfab31 | ||
|
|
404ddeebc5 | ||
|
|
ad370ed986 | ||
|
|
ced9045591 | ||
|
|
0d9bcbba25 | ||
|
|
c650ba4e72 | ||
|
|
5fab3ca5ba | ||
|
|
621a200d2f | ||
|
|
2544fad8a4 | ||
|
|
49eb865e8a | ||
|
|
a650fe0d77 | ||
|
|
204a989758 | ||
|
|
776cfe44d7 | ||
|
|
35798212c4 | ||
|
|
89f9a506f9 | ||
|
|
04ba75e2e5 | ||
|
|
f7b4431659 | ||
|
|
6b9eba2109 | ||
|
|
58e3b788dc | ||
|
|
9fd971d8c9 | ||
|
|
cf7679e6a0 | ||
|
|
07c0c54c28 | ||
|
|
093c9cc87b | ||
|
|
6b3c909155 | ||
|
|
7e349e52b1 | ||
|
|
84d17fb191 | ||
|
|
d3d408d47d | ||
|
|
6e477bbf56 | ||
|
|
3c2dcf50fa | ||
|
|
a15f408f0c | ||
|
|
b1cd9e4d24 | ||
|
|
254ce74036 | ||
|
|
b913cf2e02 | ||
|
|
92613a8904 | ||
|
|
96deabfb78 | ||
|
|
ad31aacb7a | ||
|
|
a04c2ecff7 | ||
|
|
f96b29ca54 | ||
|
|
9d2fc691de | ||
|
|
b084d53f8e | ||
|
|
7832883c74 | ||
|
|
eb4e7472e6 | ||
|
|
27dfb48a7b | ||
|
|
3a319e6cbe | ||
|
|
84e47fb80b | ||
|
|
7e82ca8082 | ||
|
|
b44078781d | ||
|
|
3b1f12af75 | ||
|
|
b8cf0a1ed1 | ||
|
|
3f224274da | ||
|
|
56cf32cb91 | ||
|
|
90ffd65a10 |
7
.github/workflows/ci.yml
vendored
7
.github/workflows/ci.yml
vendored
@@ -245,6 +245,7 @@ jobs:
|
||||
# 25 was chosen arbitrarily.
|
||||
fetch-depth: 25
|
||||
clean: false
|
||||
ref: ${{ github.ref }}
|
||||
|
||||
- name: Limit target directory size
|
||||
run: script/clear-target-dir-if-larger-than 100
|
||||
@@ -261,6 +262,9 @@ jobs:
|
||||
mkdir -p target/
|
||||
# Ignore any errors that occur while drafting release notes to not fail the build.
|
||||
script/draft-release-notes "$RELEASE_VERSION" "$RELEASE_CHANNEL" > target/release-notes.md || true
|
||||
script/create-draft-release target/release-notes.md
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Generate license file
|
||||
run: script/generate-licenses
|
||||
@@ -306,7 +310,6 @@ jobs:
|
||||
target/aarch64-apple-darwin/release/Zed-aarch64.dmg
|
||||
target/x86_64-apple-darwin/release/Zed-x86_64.dmg
|
||||
target/release/Zed.dmg
|
||||
body_path: target/release-notes.md
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
@@ -353,7 +356,6 @@ jobs:
|
||||
files: |
|
||||
target/zed-remote-server-linux-x86_64.gz
|
||||
target/release/zed-linux-x86_64.tar.gz
|
||||
body: ""
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
@@ -400,6 +402,5 @@ jobs:
|
||||
files: |
|
||||
target/zed-remote-server-linux-aarch64.gz
|
||||
target/release/zed-linux-aarch64.tar.gz
|
||||
body: ""
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
21
.github/workflows/script_checks.yml
vendored
Normal file
21
.github/workflows/script_checks.yml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
name: Script
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "script/**"
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
shellcheck:
|
||||
name: "ShellCheck Scripts"
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4
|
||||
- name: Shellcheck ./scripts
|
||||
run: |
|
||||
./script/shellcheck-scripts error
|
||||
12
.mailmap
12
.mailmap
@@ -22,10 +22,14 @@ Antonio Scandurra <me@as-cii.com> <antonio@zed.dev>
|
||||
Bennet Bo Fenner <bennet@zed.dev>
|
||||
Bennet Bo Fenner <bennet@zed.dev> <53836821+bennetbo@users.noreply.github.com>
|
||||
Bennet Bo Fenner <bennet@zed.dev> <bennetbo@gmx.de>
|
||||
Boris Cherny <boris@anthropic.com>
|
||||
Boris Cherny <boris@anthropic.com> <boris@performancejs.com>
|
||||
Chris Hayes <chris+git@hayes.software>
|
||||
Christian Bergschneider <christian.bergschneider@gmx.de>
|
||||
Christian Bergschneider <christian.bergschneider@gmx.de> <magiclake@gmx.de>
|
||||
Conrad Irwin <conrad@zed.dev>
|
||||
Conrad Irwin <conrad@zed.dev> <conrad.irwin@gmail.com>
|
||||
Dairon Medina <dairon.medina@gmail.com>
|
||||
Danilo Leal <danilo@zed.dev>
|
||||
Danilo Leal <danilo@zed.dev> <67129314+danilo-leal@users.noreply.github.com>
|
||||
Evren Sen <nervenes@icloud.com>
|
||||
@@ -35,6 +39,7 @@ Fernando Tagawa <tagawafernando@gmail.com>
|
||||
Fernando Tagawa <tagawafernando@gmail.com> <fernando.tagawa.gamail.com@gmail.com>
|
||||
Greg Morenz <greg-morenz@droid.cafe>
|
||||
Greg Morenz <greg-morenz@droid.cafe> <morenzg@gmail.com>
|
||||
Ihnat Aŭtuška <autushka.ihnat@gmail.com>
|
||||
Ivan Žužak <izuzak@gmail.com>
|
||||
Ivan Žužak <izuzak@gmail.com> <ivan.zuzak@github.com>
|
||||
Joseph T. Lyons <JosephTLyons@gmail.com>
|
||||
@@ -61,10 +66,13 @@ Max Brunsfeld <maxbrunsfeld@gmail.com> <max@zed.dev>
|
||||
Max Linke <maxlinke88@gmail.com>
|
||||
Max Linke <maxlinke88@gmail.com> <kain88-de@users.noreply.github.com>
|
||||
Michael Sloan <michael@zed.dev>
|
||||
Michael Sloan <michael@zed.dev> <mgsloan@gmail.com>
|
||||
Michael Sloan <michael@zed.dev> <mgsloan@google.com>
|
||||
Mikayla Maki <mikayla@zed.dev>
|
||||
Mikayla Maki <mikayla@zed.dev> <mikayla.c.maki@gmail.com>
|
||||
Mikayla Maki <mikayla@zed.dev> <mikayla.c.maki@icloud.com>
|
||||
Muhammad Talal Anwar <mail@talal.io>
|
||||
Muhammad Talal Anwar <mail@talal.io> <talalanwar@outlook.com>
|
||||
Nate Butler <iamnbutler@gmail.com>
|
||||
Nate Butler <iamnbutler@gmail.com> <nate@zed.dev>
|
||||
Nathan Sobo <nathan@zed.dev>
|
||||
@@ -88,7 +96,11 @@ Robert Clover <git@clo4.net>
|
||||
Robert Clover <git@clo4.net> <robert@clover.gdn>
|
||||
Roy Williams <roy.williams.iii@gmail.com>
|
||||
Roy Williams <roy.williams.iii@gmail.com> <roy@anthropic.com>
|
||||
Sebastijan Kelnerič <sebastijan.kelneric@sebba.dev>
|
||||
Sebastijan Kelnerič <sebastijan.kelneric@sebba.dev> <sebastijan.kelneric@vichava.com>
|
||||
Sergey Onufrienko <sergey@onufrienko.com>
|
||||
Shish <webmaster@shishnet.org>
|
||||
Shish <webmaster@shishnet.org> <shish@shishnet.org>
|
||||
Thorben Kröger <dev@thorben.net>
|
||||
Thorben Kröger <dev@thorben.net> <thorben.kroeger@hexagon.com>
|
||||
Thorsten Ball <thorsten@zed.dev>
|
||||
|
||||
1796
Cargo.lock
generated
1796
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
49
Cargo.toml
49
Cargo.toml
@@ -148,7 +148,6 @@ members = [
|
||||
"extensions/haskell",
|
||||
"extensions/html",
|
||||
"extensions/lua",
|
||||
"extensions/ocaml",
|
||||
"extensions/php",
|
||||
"extensions/perplexity",
|
||||
"extensions/prisma",
|
||||
@@ -369,12 +368,14 @@ indexmap = { version = "1.6.2", features = ["serde"] }
|
||||
indoc = "2"
|
||||
itertools = "0.13.0"
|
||||
jsonwebtoken = "9.3"
|
||||
jupyter-protocol = { version = "0.2.0" }
|
||||
jupyter-websocket-client = { version = "0.4.1" }
|
||||
libc = "0.2"
|
||||
linkify = "0.10.0"
|
||||
log = { version = "0.4.16", features = ["kv_unstable_serde", "serde"] }
|
||||
markup5ever_rcdom = "0.3.0"
|
||||
nanoid = "0.4"
|
||||
nbformat = "0.5.0"
|
||||
nbformat = "0.6.0"
|
||||
nix = "0.29"
|
||||
num-format = "0.4.4"
|
||||
once_cell = "1.19.0"
|
||||
@@ -389,7 +390,7 @@ pet-core = { git = "https://github.com/microsoft/python-environment-tools.git",
|
||||
pet-poetry = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
|
||||
pet-reporter = { git = "https://github.com/microsoft/python-environment-tools.git", rev = "ffcbf3f28c46633abd5448a52b1f396c322e0d6c" }
|
||||
postage = { version = "0.5", features = ["futures-traits"] }
|
||||
pretty_assertions = "1.3.0"
|
||||
pretty_assertions = { version = "1.3.0", features = ["unstable"] }
|
||||
profiling = "1"
|
||||
prost = "0.9"
|
||||
prost-build = "0.9"
|
||||
@@ -408,7 +409,7 @@ reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "fd110f
|
||||
"stream",
|
||||
] }
|
||||
rsa = "0.9.6"
|
||||
runtimelib = { version = "0.19.0", default-features = false, features = [
|
||||
runtimelib = { version = "0.21.0", default-features = false, features = [
|
||||
"async-dispatcher-runtime",
|
||||
] }
|
||||
rustc-demangle = "0.1.23"
|
||||
@@ -562,6 +563,46 @@ rustybuzz = { opt-level = 3 }
|
||||
ttf-parser = { opt-level = 3 }
|
||||
wasmtime-cranelift = { opt-level = 3 }
|
||||
wasmtime = { opt-level = 3 }
|
||||
# Build single-source-file crates with cg=1 as it helps make `cargo build` of a whole workspace a bit faster
|
||||
activity_indicator = { codegen-units = 1 }
|
||||
assets = { codegen-units = 1 }
|
||||
breadcrumbs = { codegen-units = 1 }
|
||||
collections = { codegen-units = 1 }
|
||||
command_palette = { codegen-units = 1 }
|
||||
command_palette_hooks = { codegen-units = 1 }
|
||||
evals = { codegen-units = 1 }
|
||||
extension_cli = { codegen-units = 1 }
|
||||
feature_flags = { codegen-units = 1 }
|
||||
file_icons = { codegen-units = 1 }
|
||||
fsevent = { codegen-units = 1 }
|
||||
image_viewer = { codegen-units = 1 }
|
||||
inline_completion_button = { codegen-units = 1 }
|
||||
install_cli = { codegen-units = 1 }
|
||||
journal = { codegen-units = 1 }
|
||||
menu = { codegen-units = 1 }
|
||||
notifications = { codegen-units = 1 }
|
||||
ollama = { codegen-units = 1 }
|
||||
outline = { codegen-units = 1 }
|
||||
paths = { codegen-units = 1 }
|
||||
prettier = { codegen-units = 1 }
|
||||
project_symbols = { codegen-units = 1 }
|
||||
refineable = { codegen-units = 1 }
|
||||
release_channel = { codegen-units = 1 }
|
||||
reqwest_client = { codegen-units = 1 }
|
||||
rich_text = { codegen-units = 1 }
|
||||
semantic_version = { codegen-units = 1 }
|
||||
session = { codegen-units = 1 }
|
||||
snippet = { codegen-units = 1 }
|
||||
snippets_ui = { codegen-units = 1 }
|
||||
sqlez_macros = { codegen-units = 1 }
|
||||
story = { codegen-units = 1 }
|
||||
supermaven_api = { codegen-units = 1 }
|
||||
telemetry_events = { codegen-units = 1 }
|
||||
theme_selector = { codegen-units = 1 }
|
||||
time_format = { codegen-units = 1 }
|
||||
ui_input = { codegen-units = 1 }
|
||||
vcs_menu = { codegen-units = 1 }
|
||||
zed_actions = { codegen-units = 1 }
|
||||
|
||||
[profile.release]
|
||||
debug = "limited"
|
||||
|
||||
@@ -251,6 +251,8 @@
|
||||
"ctrl-pagedown": "pane::ActivateNextItem",
|
||||
"ctrl-shift-pageup": "pane::SwapItemLeft",
|
||||
"ctrl-shift-pagedown": "pane::SwapItemRight",
|
||||
"back": "pane::GoBack",
|
||||
"forward": "pane::GoForward",
|
||||
"ctrl-w": "pane::CloseActiveItem",
|
||||
"ctrl-f4": "pane::CloseActiveItem",
|
||||
"alt-ctrl-t": ["pane::CloseInactiveItems", { "close_pinned": false }],
|
||||
@@ -648,8 +650,24 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "FileFinder",
|
||||
"bindings": { "ctrl-shift-p": "file_finder::SelectPrev" }
|
||||
"context": "FileFinder && !menu_open",
|
||||
"bindings": {
|
||||
"ctrl-shift-p": "file_finder::SelectPrev",
|
||||
"ctrl": "file_finder::OpenMenu",
|
||||
"ctrl-j": "pane::SplitDown",
|
||||
"ctrl-k": "pane::SplitUp",
|
||||
"ctrl-h": "pane::SplitLeft",
|
||||
"ctrl-l": "pane::SplitRight"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "FileFinder && menu_open",
|
||||
"bindings": {
|
||||
"j": "pane::SplitDown",
|
||||
"k": "pane::SplitUp",
|
||||
"h": "pane::SplitLeft",
|
||||
"l": "pane::SplitRight"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "TabSwitcher",
|
||||
|
||||
@@ -648,8 +648,24 @@
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "FileFinder",
|
||||
"bindings": { "cmd-shift-p": "file_finder::SelectPrev" }
|
||||
"context": "FileFinder && !menu_open",
|
||||
"bindings": {
|
||||
"cmd-shift-p": "file_finder::SelectPrev",
|
||||
"cmd": "file_finder::OpenMenu",
|
||||
"cmd-j": "pane::SplitDown",
|
||||
"cmd-k": "pane::SplitUp",
|
||||
"cmd-h": "pane::SplitLeft",
|
||||
"cmd-l": "pane::SplitRight"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "FileFinder && menu_open",
|
||||
"bindings": {
|
||||
"j": "pane::SplitDown",
|
||||
"k": "pane::SplitUp",
|
||||
"h": "pane::SplitLeft",
|
||||
"l": "pane::SplitRight"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "TabSwitcher",
|
||||
|
||||
@@ -4,9 +4,7 @@
|
||||
"ctrl-shift-[": "pane::ActivatePrevItem",
|
||||
"ctrl-shift-]": "pane::ActivateNextItem",
|
||||
"ctrl-pageup": "pane::ActivatePrevItem",
|
||||
"ctrl-pagedown": "pane::ActivateNextItem",
|
||||
"ctrl-tab": "pane::ActivateNextItem",
|
||||
"ctrl-shift-tab": "pane::ActivatePrevItem"
|
||||
"ctrl-pagedown": "pane::ActivateNextItem"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -18,6 +16,7 @@
|
||||
"ctrl-shift-l": "editor::SplitSelectionIntoLines",
|
||||
"ctrl-shift-a": "editor::SelectLargerSyntaxNode",
|
||||
"ctrl-shift-d": "editor::DuplicateLineDown",
|
||||
"alt-f3": "editor::SelectAllMatches", // find_all_under
|
||||
"f12": "editor::GoToDefinition",
|
||||
"ctrl-f12": "editor::GoToDefinitionSplit",
|
||||
"shift-f12": "editor::FindAllReferences",
|
||||
|
||||
@@ -4,9 +4,7 @@
|
||||
"cmd-shift-[": "pane::ActivatePrevItem",
|
||||
"cmd-shift-]": "pane::ActivateNextItem",
|
||||
"ctrl-pageup": "pane::ActivatePrevItem",
|
||||
"ctrl-pagedown": "pane::ActivateNextItem",
|
||||
"ctrl-tab": "pane::ActivateNextItem",
|
||||
"ctrl-shift-tab": "pane::ActivatePrevItem"
|
||||
"ctrl-pagedown": "pane::ActivateNextItem"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -21,6 +19,7 @@
|
||||
"cmd-shift-l": "editor::SplitSelectionIntoLines",
|
||||
"cmd-shift-a": "editor::SelectLargerSyntaxNode",
|
||||
"cmd-shift-d": "editor::DuplicateLineDown",
|
||||
"ctrl-cmd-g": "editor::SelectAllMatches", // find_all_under
|
||||
"shift-f12": "editor::FindAllReferences",
|
||||
"alt-cmd-down": "editor::GoToDefinition",
|
||||
"ctrl-alt-cmd-down": "editor::GoToDefinitionSplit",
|
||||
|
||||
@@ -304,7 +304,8 @@
|
||||
"ctrl-q": ["vim::PushOperator", { "Literal": {} }],
|
||||
"ctrl-shift-q": ["vim::PushOperator", { "Literal": {} }],
|
||||
"ctrl-r": ["vim::PushOperator", "Register"],
|
||||
"insert": "vim::ToggleReplace"
|
||||
"insert": "vim::ToggleReplace",
|
||||
"ctrl-o": "vim::TemporaryNormal"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -380,8 +381,7 @@
|
||||
"shift-b": "vim::CurlyBrackets",
|
||||
"<": "vim::AngleBrackets",
|
||||
">": "vim::AngleBrackets",
|
||||
"a": "vim::AngleBrackets",
|
||||
"g": "vim::Argument"
|
||||
"a": "vim::Argument"
|
||||
}
|
||||
},
|
||||
{
|
||||
|
||||
@@ -157,7 +157,7 @@
|
||||
"auto_signature_help": false,
|
||||
/// Whether to show the signature help after completion or a bracket pair inserted.
|
||||
/// If `auto_signature_help` is enabled, this setting will be treated as enabled also.
|
||||
"show_signature_help_after_edits": true,
|
||||
"show_signature_help_after_edits": false,
|
||||
// Whether to show wrap guides (vertical rulers) in the editor.
|
||||
// Setting this to true will show a guide at the 'preferred_line_length' value
|
||||
// if 'soft_wrap' is set to 'preferred_line_length', and will show any
|
||||
@@ -490,6 +490,9 @@
|
||||
"version": "2",
|
||||
// Whether the assistant is enabled.
|
||||
"enabled": true,
|
||||
// Whether to show inline hints showing the keybindings to use the inline assistant and the
|
||||
// assistant panel.
|
||||
"show_hints": true,
|
||||
// Whether to show the assistant panel button in the status bar.
|
||||
"button": true,
|
||||
// Where to dock the assistant panel. Can be 'left', 'right' or 'bottom'.
|
||||
@@ -580,7 +583,23 @@
|
||||
// Settings related to the file finder.
|
||||
"file_finder": {
|
||||
// Whether to show file icons in the file finder.
|
||||
"file_icons": true
|
||||
"file_icons": true,
|
||||
// Determines how much space the file finder can take up in relation to the available window width.
|
||||
// There are 5 possible width values:
|
||||
//
|
||||
// 1. Small: This value is essentially a fixed width.
|
||||
// "modal_width": "small"
|
||||
// 2. Medium:
|
||||
// "modal_width": "medium"
|
||||
// 3. Large:
|
||||
// "modal_width": "large"
|
||||
// 4. Extra Large:
|
||||
// "modal_width": "xlarge"
|
||||
// 5. Fullscreen: This value removes any horizontal padding, as it consumes the whole viewport width.
|
||||
// "modal_width": "full"
|
||||
//
|
||||
// Default: small
|
||||
"modal_max_width": "small"
|
||||
},
|
||||
// Whether or not to remove any trailing whitespace from lines of a buffer
|
||||
// before saving it.
|
||||
@@ -857,15 +876,8 @@
|
||||
//
|
||||
"file_types": {
|
||||
"Plain Text": ["txt"],
|
||||
"JSON": ["flake.lock"],
|
||||
"JSONC": [
|
||||
"**/.zed/**/*.json",
|
||||
"**/zed/**/*.json",
|
||||
"**/Zed/**/*.json",
|
||||
"tsconfig.json",
|
||||
"pyrightconfig.json"
|
||||
],
|
||||
"TOML": ["uv.lock"]
|
||||
"JSONC": ["**/.zed/**/*.json", "**/zed/**/*.json", "**/Zed/**/*.json"],
|
||||
"Shell Script": [".env.*"]
|
||||
},
|
||||
/// By default use a recent system version of node, or install our own.
|
||||
/// You can override this to use a version of node that is not in $PATH with:
|
||||
@@ -1053,13 +1065,11 @@
|
||||
"api_url": "https://generativelanguage.googleapis.com"
|
||||
},
|
||||
"ollama": {
|
||||
"api_url": "http://localhost:11434",
|
||||
"low_speed_timeout_in_seconds": 60
|
||||
"api_url": "http://localhost:11434"
|
||||
},
|
||||
"openai": {
|
||||
"version": "1",
|
||||
"api_url": "https://api.openai.com/v1",
|
||||
"low_speed_timeout_in_seconds": 600
|
||||
"api_url": "https://api.openai.com/v1"
|
||||
}
|
||||
},
|
||||
// Zed's Prettier integration settings.
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
mod supported_countries;
|
||||
|
||||
use std::time::Duration;
|
||||
use std::{pin::Pin, str::FromStr};
|
||||
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use chrono::{DateTime, Utc};
|
||||
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt};
|
||||
use http_client::http::{HeaderMap, HeaderValue};
|
||||
use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest};
|
||||
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use strum::{EnumIter, EnumString};
|
||||
use thiserror::Error;
|
||||
@@ -161,10 +160,7 @@ pub async fn complete(
|
||||
.method(Method::POST)
|
||||
.uri(uri)
|
||||
.header("Anthropic-Version", "2023-06-01")
|
||||
.header(
|
||||
"Anthropic-Beta",
|
||||
"tools-2024-04-04,prompt-caching-2024-07-31,max-tokens-3-5-sonnet-2024-07-15",
|
||||
)
|
||||
.header("Anthropic-Beta", "prompt-caching-2024-07-31")
|
||||
.header("X-Api-Key", api_key)
|
||||
.header("Content-Type", "application/json");
|
||||
|
||||
@@ -210,9 +206,8 @@ pub async fn stream_completion(
|
||||
api_url: &str,
|
||||
api_key: &str,
|
||||
request: Request,
|
||||
low_speed_timeout: Option<Duration>,
|
||||
) -> Result<BoxStream<'static, Result<Event, AnthropicError>>, AnthropicError> {
|
||||
stream_completion_with_rate_limit_info(client, api_url, api_key, request, low_speed_timeout)
|
||||
stream_completion_with_rate_limit_info(client, api_url, api_key, request)
|
||||
.await
|
||||
.map(|output| output.0)
|
||||
}
|
||||
@@ -264,7 +259,6 @@ pub async fn stream_completion_with_rate_limit_info(
|
||||
api_url: &str,
|
||||
api_key: &str,
|
||||
request: Request,
|
||||
low_speed_timeout: Option<Duration>,
|
||||
) -> Result<
|
||||
(
|
||||
BoxStream<'static, Result<Event, AnthropicError>>,
|
||||
@@ -277,7 +271,7 @@ pub async fn stream_completion_with_rate_limit_info(
|
||||
stream: true,
|
||||
};
|
||||
let uri = format!("{api_url}/v1/messages");
|
||||
let mut request_builder = HttpRequest::builder()
|
||||
let request_builder = HttpRequest::builder()
|
||||
.method(Method::POST)
|
||||
.uri(uri)
|
||||
.header("Anthropic-Version", "2023-06-01")
|
||||
@@ -287,9 +281,6 @@ pub async fn stream_completion_with_rate_limit_info(
|
||||
)
|
||||
.header("X-Api-Key", api_key)
|
||||
.header("Content-Type", "application/json");
|
||||
if let Some(low_speed_timeout) = low_speed_timeout {
|
||||
request_builder = request_builder.read_timeout(low_speed_timeout);
|
||||
}
|
||||
let serialized_request =
|
||||
serde_json::to_string(&request).context("failed to serialize request")?;
|
||||
let request = request_builder
|
||||
|
||||
@@ -18,6 +18,7 @@ mod terminal_inline_assistant;
|
||||
mod tool_working_set;
|
||||
mod tools;
|
||||
|
||||
use crate::slash_command::project_command::ProjectSlashCommandFeatureFlag;
|
||||
pub use crate::slash_command_working_set::{SlashCommandId, SlashCommandWorkingSet};
|
||||
pub use crate::tool_working_set::{ToolId, ToolWorkingSet};
|
||||
pub use assistant_panel::{AssistantPanel, AssistantPanelEvent};
|
||||
@@ -215,23 +216,32 @@ pub fn init(
|
||||
});
|
||||
}
|
||||
|
||||
if cx.has_flag::<SearchSlashCommandFeatureFlag>() {
|
||||
cx.spawn(|mut cx| {
|
||||
let client = client.clone();
|
||||
async move {
|
||||
let embedding_provider = CloudEmbeddingProvider::new(client.clone());
|
||||
let semantic_index = SemanticDb::new(
|
||||
paths::embeddings_dir().join("semantic-index-db.0.mdb"),
|
||||
Arc::new(embedding_provider),
|
||||
&mut cx,
|
||||
)
|
||||
.await?;
|
||||
cx.spawn(|mut cx| {
|
||||
let client = client.clone();
|
||||
async move {
|
||||
let is_search_slash_command_enabled = cx
|
||||
.update(|cx| cx.wait_for_flag::<SearchSlashCommandFeatureFlag>())?
|
||||
.await;
|
||||
let is_project_slash_command_enabled = cx
|
||||
.update(|cx| cx.wait_for_flag::<ProjectSlashCommandFeatureFlag>())?
|
||||
.await;
|
||||
|
||||
cx.update(|cx| cx.set_global(semantic_index))
|
||||
if !is_search_slash_command_enabled && !is_project_slash_command_enabled {
|
||||
return Ok(());
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
let embedding_provider = CloudEmbeddingProvider::new(client.clone());
|
||||
let semantic_index = SemanticDb::new(
|
||||
paths::embeddings_dir().join("semantic-index-db.0.mdb"),
|
||||
Arc::new(embedding_provider),
|
||||
&mut cx,
|
||||
)
|
||||
.await?;
|
||||
|
||||
cx.update(|cx| cx.set_global(semantic_index))
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
context_store::init(&client.clone().into());
|
||||
prompt_library::init(cx);
|
||||
|
||||
@@ -1480,7 +1480,6 @@ struct ScrollPosition {
|
||||
}
|
||||
|
||||
struct PatchViewState {
|
||||
footer_block_id: CustomBlockId,
|
||||
crease_id: CreaseId,
|
||||
editor: Option<PatchEditorState>,
|
||||
update_task: Option<Task<()>>,
|
||||
@@ -1934,7 +1933,7 @@ impl ContextEditor {
|
||||
);
|
||||
});
|
||||
|
||||
Crease::new(
|
||||
Crease::inline(
|
||||
start..end,
|
||||
placeholder,
|
||||
fold_toggle("tool-use"),
|
||||
@@ -2032,7 +2031,7 @@ impl ContextEditor {
|
||||
let end = buffer
|
||||
.anchor_in_excerpt(excerpt_id, command.source_range.end)
|
||||
.unwrap();
|
||||
Crease::new(start..end, placeholder, render_toggle, render_trailer)
|
||||
Crease::inline(start..end, placeholder, render_toggle, render_trailer)
|
||||
}),
|
||||
cx,
|
||||
);
|
||||
@@ -2051,30 +2050,6 @@ impl ContextEditor {
|
||||
ContextEvent::SlashCommandOutputSectionAdded { section } => {
|
||||
self.insert_slash_command_output_sections([section.clone()], false, cx);
|
||||
}
|
||||
ContextEvent::SlashCommandFinished {
|
||||
output_range: _output_range,
|
||||
run_commands_in_ranges,
|
||||
} => {
|
||||
for range in run_commands_in_ranges {
|
||||
let commands = self.context.update(cx, |context, cx| {
|
||||
context.reparse(cx);
|
||||
context
|
||||
.pending_commands_for_range(range.clone(), cx)
|
||||
.to_vec()
|
||||
});
|
||||
|
||||
for command in commands {
|
||||
self.run_command(
|
||||
command.source_range,
|
||||
&command.name,
|
||||
&command.arguments,
|
||||
false,
|
||||
self.workspace.clone(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
ContextEvent::UsePendingTools => {
|
||||
let pending_tool_uses = self
|
||||
.context
|
||||
@@ -2124,7 +2099,7 @@ impl ContextEditor {
|
||||
|
||||
let buffer_row = MultiBufferRow(start.to_point(&buffer).row);
|
||||
|
||||
let crease = Crease::new(
|
||||
let crease = Crease::inline(
|
||||
start..end,
|
||||
placeholder,
|
||||
fold_toggle("tool-use"),
|
||||
@@ -2153,6 +2128,37 @@ impl ContextEditor {
|
||||
command_id: InvokedSlashCommandId,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if let Some(invoked_slash_command) =
|
||||
self.context.read(cx).invoked_slash_command(&command_id)
|
||||
{
|
||||
if let InvokedSlashCommandStatus::Finished = invoked_slash_command.status {
|
||||
let run_commands_in_ranges = invoked_slash_command
|
||||
.run_commands_in_ranges
|
||||
.iter()
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
for range in run_commands_in_ranges {
|
||||
let commands = self.context.update(cx, |context, cx| {
|
||||
context.reparse(cx);
|
||||
context
|
||||
.pending_commands_for_range(range.clone(), cx)
|
||||
.to_vec()
|
||||
});
|
||||
|
||||
for command in commands {
|
||||
self.run_command(
|
||||
command.source_range,
|
||||
&command.name,
|
||||
&command.arguments,
|
||||
false,
|
||||
self.workspace.clone(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
if let Some(invoked_slash_command) =
|
||||
self.context.read(cx).invoked_slash_command(&command_id)
|
||||
@@ -2192,18 +2198,14 @@ impl ContextEditor {
|
||||
let crease_end = buffer
|
||||
.anchor_in_excerpt(excerpt_id, invoked_slash_command.range.end)
|
||||
.unwrap();
|
||||
let fold_placeholder =
|
||||
invoked_slash_command_fold_placeholder(command_id, context);
|
||||
let crease_ids = editor.insert_creases(
|
||||
[Crease::new(
|
||||
crease_start..crease_end,
|
||||
fold_placeholder.clone(),
|
||||
fold_toggle("invoked-slash-command"),
|
||||
|_row, _folded, _cx| Empty.into_any(),
|
||||
)],
|
||||
cx,
|
||||
let crease = Crease::inline(
|
||||
crease_start..crease_end,
|
||||
invoked_slash_command_fold_placeholder(command_id, context),
|
||||
fold_toggle("invoked-slash-command"),
|
||||
|_row, _folded, _cx| Empty.into_any(),
|
||||
);
|
||||
editor.fold_ranges([(crease_start..crease_end, fold_placeholder)], false, cx);
|
||||
let crease_ids = editor.insert_creases([crease.clone()], cx);
|
||||
editor.fold_creases(vec![crease], false, cx);
|
||||
entry.insert(crease_ids[0]);
|
||||
} else {
|
||||
cx.notify()
|
||||
@@ -2225,23 +2227,32 @@ impl ContextEditor {
|
||||
cx: &mut ViewContext<ContextEditor>,
|
||||
) {
|
||||
let this = cx.view().downgrade();
|
||||
let mut removed_crease_ids = Vec::new();
|
||||
let mut removed_block_ids = HashSet::default();
|
||||
let mut editors_to_close = Vec::new();
|
||||
for range in removed {
|
||||
if let Some(state) = self.patches.remove(range) {
|
||||
editors_to_close.extend(state.editor.and_then(|state| state.editor.upgrade()));
|
||||
removed_block_ids.insert(state.footer_block_id);
|
||||
removed_crease_ids.push(state.crease_id);
|
||||
}
|
||||
}
|
||||
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let multibuffer = &snapshot.buffer_snapshot;
|
||||
let (&excerpt_id, _, _) = multibuffer.as_singleton().unwrap();
|
||||
|
||||
let mut replaced_blocks = HashMap::default();
|
||||
let mut removed_crease_ids = Vec::new();
|
||||
let mut ranges_to_unfold: Vec<Range<Anchor>> = Vec::new();
|
||||
for range in removed {
|
||||
if let Some(state) = self.patches.remove(range) {
|
||||
let patch_start = multibuffer
|
||||
.anchor_in_excerpt(excerpt_id, range.start)
|
||||
.unwrap();
|
||||
let patch_end = multibuffer
|
||||
.anchor_in_excerpt(excerpt_id, range.end)
|
||||
.unwrap();
|
||||
|
||||
editors_to_close.extend(state.editor.and_then(|state| state.editor.upgrade()));
|
||||
ranges_to_unfold.push(patch_start..patch_end);
|
||||
removed_crease_ids.push(state.crease_id);
|
||||
}
|
||||
}
|
||||
editor.unfold_ranges(&ranges_to_unfold, true, false, cx);
|
||||
editor.remove_creases(removed_crease_ids, cx);
|
||||
|
||||
for range in updated {
|
||||
let Some(patch) = self.context.read(cx).patch_for_range(&range, cx).cloned() else {
|
||||
continue;
|
||||
@@ -2254,19 +2265,21 @@ impl ContextEditor {
|
||||
let patch_end = multibuffer
|
||||
.anchor_in_excerpt(excerpt_id, patch.range.end)
|
||||
.unwrap();
|
||||
let render_block: RenderBlock = Box::new({
|
||||
let render_block: RenderBlock = Arc::new({
|
||||
let this = this.clone();
|
||||
let patch_range = range.clone();
|
||||
move |cx: &mut BlockContext<'_, '_>| {
|
||||
let max_width = cx.max_width;
|
||||
let gutter_width = cx.gutter_dimensions.full_width();
|
||||
let block_id = cx.block_id;
|
||||
let selected = cx.selected;
|
||||
this.update(&mut **cx, |this, cx| {
|
||||
this.render_patch_footer(
|
||||
this.render_patch_block(
|
||||
patch_range.clone(),
|
||||
max_width,
|
||||
gutter_width,
|
||||
block_id,
|
||||
selected,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
@@ -2276,25 +2289,16 @@ impl ContextEditor {
|
||||
}
|
||||
});
|
||||
|
||||
let header_placeholder = FoldPlaceholder {
|
||||
render: {
|
||||
let this = this.clone();
|
||||
let patch_range = range.clone();
|
||||
Arc::new(move |fold_id, _range, cx| {
|
||||
this.update(cx, |this, cx| {
|
||||
this.render_patch_header(patch_range.clone(), fold_id, cx)
|
||||
})
|
||||
.ok()
|
||||
.flatten()
|
||||
.unwrap_or_else(|| Empty.into_any())
|
||||
})
|
||||
},
|
||||
..Default::default()
|
||||
};
|
||||
let height = path_count as u32 + 1;
|
||||
let crease = Crease::block(
|
||||
patch_start..patch_end,
|
||||
height,
|
||||
BlockStyle::Flex,
|
||||
render_block.clone(),
|
||||
);
|
||||
|
||||
let should_refold;
|
||||
if let Some(state) = self.patches.get_mut(&range) {
|
||||
replaced_blocks.insert(state.footer_block_id, render_block);
|
||||
if let Some(editor_state) = &state.editor {
|
||||
if editor_state.opened_patch != patch {
|
||||
state.update_task = Some({
|
||||
@@ -2311,33 +2315,11 @@ impl ContextEditor {
|
||||
should_refold =
|
||||
snapshot.intersects_fold(patch_start.to_offset(&snapshot.buffer_snapshot));
|
||||
} else {
|
||||
let block_ids = editor.insert_blocks(
|
||||
[BlockProperties {
|
||||
height: path_count as u32 + 1,
|
||||
style: BlockStyle::Flex,
|
||||
render: render_block,
|
||||
placement: BlockPlacement::Below(patch_start),
|
||||
priority: 0,
|
||||
}],
|
||||
None,
|
||||
cx,
|
||||
);
|
||||
|
||||
let new_crease_ids = editor.insert_creases(
|
||||
[Crease::new(
|
||||
patch_start..patch_end,
|
||||
header_placeholder.clone(),
|
||||
fold_toggle("patch-header"),
|
||||
|_, _, _| Empty.into_any_element(),
|
||||
)],
|
||||
cx,
|
||||
);
|
||||
|
||||
let crease_id = editor.insert_creases([crease.clone()], cx)[0];
|
||||
self.patches.insert(
|
||||
range.clone(),
|
||||
PatchViewState {
|
||||
footer_block_id: block_ids[0],
|
||||
crease_id: new_crease_ids[0],
|
||||
crease_id,
|
||||
editor: None,
|
||||
update_task: None,
|
||||
},
|
||||
@@ -2348,13 +2330,9 @@ impl ContextEditor {
|
||||
|
||||
if should_refold {
|
||||
editor.unfold_ranges(&[patch_start..patch_end], true, false, cx);
|
||||
editor.fold_ranges([(patch_start..patch_end, header_placeholder)], false, cx);
|
||||
editor.fold_creases(vec![crease], false, cx);
|
||||
}
|
||||
}
|
||||
|
||||
editor.remove_creases(removed_crease_ids, cx);
|
||||
editor.remove_blocks(removed_block_ids, None, cx);
|
||||
editor.replace_blocks(replaced_blocks, None, cx);
|
||||
});
|
||||
|
||||
for editor in editors_to_close {
|
||||
@@ -2385,7 +2363,7 @@ impl ContextEditor {
|
||||
let buffer_row = MultiBufferRow(start.to_point(&buffer).row);
|
||||
buffer_rows_to_fold.insert(buffer_row);
|
||||
creases.push(
|
||||
Crease::new(
|
||||
Crease::inline(
|
||||
start..end,
|
||||
FoldPlaceholder {
|
||||
render: render_fold_icon_button(
|
||||
@@ -2674,7 +2652,7 @@ impl ContextEditor {
|
||||
let mut blocks_to_replace: HashMap<_, RenderBlock> = Default::default();
|
||||
|
||||
let render_block = |message: MessageMetadata| -> RenderBlock {
|
||||
Box::new({
|
||||
Arc::new({
|
||||
let context = self.context.clone();
|
||||
|
||||
move |cx| {
|
||||
@@ -3127,7 +3105,7 @@ impl ContextEditor {
|
||||
crease_title,
|
||||
cx.view().downgrade(),
|
||||
);
|
||||
let crease = Crease::new(
|
||||
let crease = Crease::inline(
|
||||
anchor_before..anchor_after,
|
||||
fold_placeholder,
|
||||
render_quote_selection_output_toggle,
|
||||
@@ -3217,31 +3195,29 @@ impl ContextEditor {
|
||||
&snapshot,
|
||||
)
|
||||
.filter_map(|crease| {
|
||||
if let Some(metadata) = &crease.metadata {
|
||||
let start = crease
|
||||
.range
|
||||
if let Crease::Inline {
|
||||
range, metadata, ..
|
||||
} = &crease
|
||||
{
|
||||
let metadata = metadata.as_ref()?;
|
||||
let start = range
|
||||
.start
|
||||
.to_offset(&snapshot)
|
||||
.saturating_sub(selection_start);
|
||||
let end = crease
|
||||
.range
|
||||
let end = range
|
||||
.end
|
||||
.to_offset(&snapshot)
|
||||
.saturating_sub(selection_start);
|
||||
|
||||
let range_relative_to_selection = start..end;
|
||||
|
||||
if range_relative_to_selection.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(SelectedCreaseMetadata {
|
||||
if !range_relative_to_selection.is_empty() {
|
||||
return Some(SelectedCreaseMetadata {
|
||||
range_relative_to_selection,
|
||||
crease: metadata.clone(),
|
||||
})
|
||||
});
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
None
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
}),
|
||||
@@ -3322,7 +3298,7 @@ impl ContextEditor {
|
||||
|
||||
let buffer_row = MultiBufferRow(start.to_point(&buffer).row);
|
||||
buffer_rows_to_fold.insert(buffer_row);
|
||||
Crease::new(
|
||||
Crease::inline(
|
||||
start..end,
|
||||
FoldPlaceholder {
|
||||
render: render_fold_icon_button(
|
||||
@@ -3364,7 +3340,8 @@ impl ContextEditor {
|
||||
|
||||
self.context.update(cx, |context, cx| {
|
||||
for image in images {
|
||||
let Some(render_image) = image.to_image_data(cx).log_err() else {
|
||||
let Some(render_image) = image.to_image_data(cx.svg_renderer()).log_err()
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
let image_id = image.id();
|
||||
@@ -3415,7 +3392,7 @@ impl ContextEditor {
|
||||
placement: BlockPlacement::Above(anchor),
|
||||
height: MAX_HEIGHT_IN_LINES,
|
||||
style: BlockStyle::Sticky,
|
||||
render: Box::new(move |cx| {
|
||||
render: Arc::new(move |cx| {
|
||||
let image_size = size_for_image(
|
||||
&image,
|
||||
size(
|
||||
@@ -3472,33 +3449,13 @@ impl ContextEditor {
|
||||
.unwrap_or_else(|| Cow::Borrowed(DEFAULT_TAB_TITLE))
|
||||
}
|
||||
|
||||
fn render_patch_header(
|
||||
&self,
|
||||
range: Range<text::Anchor>,
|
||||
_id: FoldId,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<AnyElement> {
|
||||
let patch = self.context.read(cx).patch_for_range(&range, cx)?;
|
||||
let theme = cx.theme().clone();
|
||||
Some(
|
||||
h_flex()
|
||||
.px_1()
|
||||
.py_0p5()
|
||||
.border_b_1()
|
||||
.border_color(theme.status().info_border)
|
||||
.gap_1()
|
||||
.child(Icon::new(IconName::Diff).size(IconSize::Small))
|
||||
.child(Label::new(patch.title.clone()).size(LabelSize::Small))
|
||||
.into_any(),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_patch_footer(
|
||||
fn render_patch_block(
|
||||
&mut self,
|
||||
range: Range<text::Anchor>,
|
||||
max_width: Pixels,
|
||||
gutter_width: Pixels,
|
||||
id: BlockId,
|
||||
selected: bool,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Option<AnyElement> {
|
||||
let snapshot = self.editor.update(cx, |editor, cx| editor.snapshot(cx));
|
||||
@@ -3509,10 +3466,7 @@ impl ContextEditor {
|
||||
.anchor_in_excerpt(excerpt_id, range.start)
|
||||
.unwrap();
|
||||
|
||||
if !snapshot.intersects_fold(anchor) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let theme = cx.theme().clone();
|
||||
let patch = self.context.read(cx).patch_for_range(&range, cx)?;
|
||||
let paths = patch
|
||||
.paths()
|
||||
@@ -3522,9 +3476,18 @@ impl ContextEditor {
|
||||
Some(
|
||||
v_flex()
|
||||
.id(id)
|
||||
.pl(gutter_width)
|
||||
.w(max_width)
|
||||
.py_2()
|
||||
.bg(theme.colors().editor_background)
|
||||
.ml(gutter_width)
|
||||
.pb_1()
|
||||
.w(max_width - gutter_width)
|
||||
.rounded_md()
|
||||
.border_1()
|
||||
.border_color(theme.colors().border_variant)
|
||||
.overflow_hidden()
|
||||
.hover(|style| style.border_color(theme.colors().text_accent))
|
||||
.when(selected, |this| {
|
||||
this.border_color(theme.colors().text_accent)
|
||||
})
|
||||
.cursor(CursorStyle::PointingHand)
|
||||
.on_click(cx.listener(move |this, _, cx| {
|
||||
this.editor.update(cx, |editor, cx| {
|
||||
@@ -3534,24 +3497,60 @@ impl ContextEditor {
|
||||
});
|
||||
this.focus_active_patch(cx);
|
||||
}))
|
||||
.child(
|
||||
div()
|
||||
.px_2()
|
||||
.py_1()
|
||||
.overflow_hidden()
|
||||
.text_ellipsis()
|
||||
.border_b_1()
|
||||
.border_color(theme.colors().border_variant)
|
||||
.bg(theme.colors().element_background)
|
||||
.child(
|
||||
Label::new(patch.title.clone())
|
||||
.size(LabelSize::Small)
|
||||
.color(Color::Muted),
|
||||
),
|
||||
)
|
||||
.children(paths.into_iter().map(|path| {
|
||||
h_flex()
|
||||
.pl_1()
|
||||
.gap_1()
|
||||
.px_2()
|
||||
.pt_1()
|
||||
.gap_1p5()
|
||||
.child(Icon::new(IconName::File).size(IconSize::Small))
|
||||
.child(Label::new(path).size(LabelSize::Small))
|
||||
}))
|
||||
.when(patch.status == AssistantPatchStatus::Pending, |div| {
|
||||
div.child(
|
||||
Label::new("Generating")
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small)
|
||||
.with_animation(
|
||||
"pulsating-label",
|
||||
Animation::new(Duration::from_secs(2))
|
||||
.repeat()
|
||||
.with_easing(pulsating_between(0.4, 1.)),
|
||||
|label, delta| label.alpha(delta),
|
||||
h_flex()
|
||||
.pt_1()
|
||||
.px_2()
|
||||
.gap_1()
|
||||
.child(
|
||||
Icon::new(IconName::ArrowCircle)
|
||||
.size(IconSize::XSmall)
|
||||
.color(Color::Muted)
|
||||
.with_animation(
|
||||
"arrow-circle",
|
||||
Animation::new(Duration::from_secs(2)).repeat(),
|
||||
|icon, delta| {
|
||||
icon.transform(Transformation::rotate(percentage(
|
||||
delta,
|
||||
)))
|
||||
},
|
||||
),
|
||||
)
|
||||
.child(
|
||||
Label::new("Generating…")
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small)
|
||||
.with_animation(
|
||||
"pulsating-label",
|
||||
Animation::new(Duration::from_secs(2))
|
||||
.repeat()
|
||||
.with_easing(pulsating_between(0.4, 0.8)),
|
||||
|label, delta| label.alpha(delta),
|
||||
),
|
||||
),
|
||||
)
|
||||
})
|
||||
@@ -3929,7 +3928,7 @@ impl ContextEditor {
|
||||
.child(
|
||||
div()
|
||||
.id("error-message")
|
||||
.max_h_24()
|
||||
.max_h_32()
|
||||
.overflow_y_scroll()
|
||||
.child(Label::new(error_message.clone())),
|
||||
)
|
||||
|
||||
@@ -35,20 +35,17 @@ pub enum AssistantProviderContentV1 {
|
||||
OpenAi {
|
||||
default_model: Option<OpenAiModel>,
|
||||
api_url: Option<String>,
|
||||
low_speed_timeout_in_seconds: Option<u64>,
|
||||
available_models: Option<Vec<OpenAiModel>>,
|
||||
},
|
||||
#[serde(rename = "anthropic")]
|
||||
Anthropic {
|
||||
default_model: Option<AnthropicModel>,
|
||||
api_url: Option<String>,
|
||||
low_speed_timeout_in_seconds: Option<u64>,
|
||||
},
|
||||
#[serde(rename = "ollama")]
|
||||
Ollama {
|
||||
default_model: Option<OllamaModel>,
|
||||
api_url: Option<String>,
|
||||
low_speed_timeout_in_seconds: Option<u64>,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -63,6 +60,7 @@ pub struct AssistantSettings {
|
||||
pub inline_alternatives: Vec<LanguageModelSelection>,
|
||||
pub using_outdated_settings_version: bool,
|
||||
pub enable_experimental_live_diffs: bool,
|
||||
pub show_hints: bool,
|
||||
}
|
||||
|
||||
impl AssistantSettings {
|
||||
@@ -115,47 +113,41 @@ impl AssistantSettingsContent {
|
||||
if let VersionedAssistantSettingsContent::V1(settings) = settings {
|
||||
if let Some(provider) = settings.provider.clone() {
|
||||
match provider {
|
||||
AssistantProviderContentV1::Anthropic {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
..
|
||||
} => update_settings_file::<AllLanguageModelSettings>(
|
||||
fs,
|
||||
cx,
|
||||
move |content, _| {
|
||||
if content.anthropic.is_none() {
|
||||
content.anthropic = Some(AnthropicSettingsContent::Versioned(
|
||||
VersionedAnthropicSettingsContent::V1(
|
||||
AnthropicSettingsContentV1 {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models: None,
|
||||
},
|
||||
),
|
||||
));
|
||||
}
|
||||
},
|
||||
),
|
||||
AssistantProviderContentV1::Ollama {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
..
|
||||
} => update_settings_file::<AllLanguageModelSettings>(
|
||||
fs,
|
||||
cx,
|
||||
move |content, _| {
|
||||
if content.ollama.is_none() {
|
||||
content.ollama = Some(OllamaSettingsContent {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models: None,
|
||||
});
|
||||
}
|
||||
},
|
||||
),
|
||||
AssistantProviderContentV1::Anthropic { api_url, .. } => {
|
||||
update_settings_file::<AllLanguageModelSettings>(
|
||||
fs,
|
||||
cx,
|
||||
move |content, _| {
|
||||
if content.anthropic.is_none() {
|
||||
content.anthropic =
|
||||
Some(AnthropicSettingsContent::Versioned(
|
||||
VersionedAnthropicSettingsContent::V1(
|
||||
AnthropicSettingsContentV1 {
|
||||
api_url,
|
||||
available_models: None,
|
||||
},
|
||||
),
|
||||
));
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
AssistantProviderContentV1::Ollama { api_url, .. } => {
|
||||
update_settings_file::<AllLanguageModelSettings>(
|
||||
fs,
|
||||
cx,
|
||||
move |content, _| {
|
||||
if content.ollama.is_none() {
|
||||
content.ollama = Some(OllamaSettingsContent {
|
||||
api_url,
|
||||
available_models: None,
|
||||
});
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
AssistantProviderContentV1::OpenAi {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models,
|
||||
..
|
||||
} => update_settings_file::<AllLanguageModelSettings>(
|
||||
@@ -188,7 +180,6 @@ impl AssistantSettingsContent {
|
||||
VersionedOpenAiSettingsContent::V1(
|
||||
OpenAiSettingsContentV1 {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models,
|
||||
},
|
||||
),
|
||||
@@ -212,6 +203,7 @@ impl AssistantSettingsContent {
|
||||
AssistantSettingsContent::Versioned(settings) => match settings {
|
||||
VersionedAssistantSettingsContent::V1(settings) => AssistantSettingsContentV2 {
|
||||
enabled: settings.enabled,
|
||||
show_hints: None,
|
||||
button: settings.button,
|
||||
dock: settings.dock,
|
||||
default_width: settings.default_width,
|
||||
@@ -252,6 +244,7 @@ impl AssistantSettingsContent {
|
||||
},
|
||||
AssistantSettingsContent::Legacy(settings) => AssistantSettingsContentV2 {
|
||||
enabled: None,
|
||||
show_hints: None,
|
||||
button: settings.button,
|
||||
dock: settings.dock,
|
||||
default_width: settings.default_width,
|
||||
@@ -298,54 +291,41 @@ impl AssistantSettingsContent {
|
||||
log::warn!("attempted to set zed.dev model on outdated settings");
|
||||
}
|
||||
"anthropic" => {
|
||||
let (api_url, low_speed_timeout_in_seconds) = match &settings.provider {
|
||||
Some(AssistantProviderContentV1::Anthropic {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
..
|
||||
}) => (api_url.clone(), *low_speed_timeout_in_seconds),
|
||||
_ => (None, None),
|
||||
let api_url = match &settings.provider {
|
||||
Some(AssistantProviderContentV1::Anthropic { api_url, .. }) => {
|
||||
api_url.clone()
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
settings.provider = Some(AssistantProviderContentV1::Anthropic {
|
||||
default_model: AnthropicModel::from_id(&model).ok(),
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
});
|
||||
}
|
||||
"ollama" => {
|
||||
let (api_url, low_speed_timeout_in_seconds) = match &settings.provider {
|
||||
Some(AssistantProviderContentV1::Ollama {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
..
|
||||
}) => (api_url.clone(), *low_speed_timeout_in_seconds),
|
||||
_ => (None, None),
|
||||
let api_url = match &settings.provider {
|
||||
Some(AssistantProviderContentV1::Ollama { api_url, .. }) => {
|
||||
api_url.clone()
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
settings.provider = Some(AssistantProviderContentV1::Ollama {
|
||||
default_model: Some(ollama::Model::new(&model, None, None)),
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
});
|
||||
}
|
||||
"openai" => {
|
||||
let (api_url, low_speed_timeout_in_seconds, available_models) =
|
||||
match &settings.provider {
|
||||
Some(AssistantProviderContentV1::OpenAi {
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models,
|
||||
..
|
||||
}) => (
|
||||
api_url.clone(),
|
||||
*low_speed_timeout_in_seconds,
|
||||
available_models.clone(),
|
||||
),
|
||||
_ => (None, None, None),
|
||||
};
|
||||
let (api_url, available_models) = match &settings.provider {
|
||||
Some(AssistantProviderContentV1::OpenAi {
|
||||
api_url,
|
||||
available_models,
|
||||
..
|
||||
}) => (api_url.clone(), available_models.clone()),
|
||||
_ => (None, None),
|
||||
};
|
||||
settings.provider = Some(AssistantProviderContentV1::OpenAi {
|
||||
default_model: OpenAiModel::from_id(&model).ok(),
|
||||
api_url,
|
||||
low_speed_timeout_in_seconds,
|
||||
available_models,
|
||||
});
|
||||
}
|
||||
@@ -377,6 +357,7 @@ impl Default for VersionedAssistantSettingsContent {
|
||||
fn default() -> Self {
|
||||
Self::V2(AssistantSettingsContentV2 {
|
||||
enabled: None,
|
||||
show_hints: None,
|
||||
button: None,
|
||||
dock: None,
|
||||
default_width: None,
|
||||
@@ -394,6 +375,11 @@ pub struct AssistantSettingsContentV2 {
|
||||
///
|
||||
/// Default: true
|
||||
enabled: Option<bool>,
|
||||
/// Whether to show inline hints that show keybindings for inline assistant
|
||||
/// and assistant panel.
|
||||
///
|
||||
/// Default: true
|
||||
show_hints: Option<bool>,
|
||||
/// Whether to show the assistant panel button in the status bar.
|
||||
///
|
||||
/// Default: true
|
||||
@@ -528,6 +514,7 @@ impl Settings for AssistantSettings {
|
||||
|
||||
let value = value.upgrade();
|
||||
merge(&mut settings.enabled, value.enabled);
|
||||
merge(&mut settings.show_hints, value.show_hints);
|
||||
merge(&mut settings.button, value.button);
|
||||
merge(&mut settings.dock, value.dock);
|
||||
merge(
|
||||
@@ -598,6 +585,7 @@ mod tests {
|
||||
}),
|
||||
inline_alternatives: None,
|
||||
enabled: None,
|
||||
show_hints: None,
|
||||
button: None,
|
||||
dock: None,
|
||||
default_width: None,
|
||||
|
||||
@@ -381,10 +381,6 @@ pub enum ContextEvent {
|
||||
SlashCommandOutputSectionAdded {
|
||||
section: SlashCommandOutputSection<language::Anchor>,
|
||||
},
|
||||
SlashCommandFinished {
|
||||
output_range: Range<language::Anchor>,
|
||||
run_commands_in_ranges: Vec<Range<language::Anchor>>,
|
||||
},
|
||||
UsePendingTools,
|
||||
ToolFinished {
|
||||
tool_use_id: Arc<str>,
|
||||
@@ -916,6 +912,7 @@ impl Context {
|
||||
InvokedSlashCommand {
|
||||
name: name.into(),
|
||||
range: output_range,
|
||||
run_commands_in_ranges: Vec::new(),
|
||||
status: InvokedSlashCommandStatus::Running(Task::ready(())),
|
||||
transaction: None,
|
||||
timestamp: id.0,
|
||||
@@ -1914,7 +1911,6 @@ impl Context {
|
||||
}
|
||||
|
||||
let mut pending_section_stack: Vec<PendingSection> = Vec::new();
|
||||
let mut run_commands_in_ranges: Vec<Range<language::Anchor>> = Vec::new();
|
||||
let mut last_role: Option<Role> = None;
|
||||
let mut last_section_range = None;
|
||||
|
||||
@@ -1980,7 +1976,13 @@ impl Context {
|
||||
|
||||
let end = this.buffer.read(cx).anchor_before(insert_position);
|
||||
if run_commands_in_text {
|
||||
run_commands_in_ranges.push(start..end);
|
||||
if let Some(invoked_slash_command) =
|
||||
this.invoked_slash_commands.get_mut(&command_id)
|
||||
{
|
||||
invoked_slash_command
|
||||
.run_commands_in_ranges
|
||||
.push(start..end);
|
||||
}
|
||||
}
|
||||
}
|
||||
SlashCommandEvent::EndSection => {
|
||||
@@ -2100,6 +2102,7 @@ impl Context {
|
||||
InvokedSlashCommand {
|
||||
name: name.to_string().into(),
|
||||
range: command_range.clone(),
|
||||
run_commands_in_ranges: Vec::new(),
|
||||
status: InvokedSlashCommandStatus::Running(insert_output_task),
|
||||
transaction: Some(first_transaction),
|
||||
timestamp: command_id.0,
|
||||
@@ -2383,7 +2386,11 @@ impl Context {
|
||||
});
|
||||
Some(error.to_string())
|
||||
} else {
|
||||
let error_message = error.to_string().trim().to_string();
|
||||
let error_message = error
|
||||
.chain()
|
||||
.map(|err| err.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
cx.emit(ContextEvent::ShowAssistError(SharedString::from(
|
||||
error_message.clone(),
|
||||
)));
|
||||
@@ -2887,7 +2894,7 @@ impl Context {
|
||||
request.messages.push(LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![
|
||||
"Generate a concise 3-7 word title for this conversation, omitting punctuation"
|
||||
"Generate a concise 3-7 word title for this conversation, omitting punctuation. Go straight to the title, without any preamble and prefix like `Here's a concise suggestion:...` or `Title:`"
|
||||
.into(),
|
||||
],
|
||||
cache: false,
|
||||
@@ -3172,6 +3179,7 @@ pub struct ParsedSlashCommand {
|
||||
pub struct InvokedSlashCommand {
|
||||
pub name: SharedString,
|
||||
pub range: Range<language::Anchor>,
|
||||
pub run_commands_in_ranges: Vec<Range<language::Anchor>>,
|
||||
pub status: InvokedSlashCommandStatus,
|
||||
pub transaction: Option<language::TransactionId>,
|
||||
timestamp: clock::Lamport,
|
||||
|
||||
@@ -8,9 +8,8 @@ use anyhow::{anyhow, Context as _, Result};
|
||||
use client::{proto, telemetry::Telemetry, Client, TypedEnvelope};
|
||||
use clock::ReplicaId;
|
||||
use collections::HashMap;
|
||||
use command_palette_hooks::CommandPaletteFilter;
|
||||
use context_servers::manager::{ContextServerManager, ContextServerSettings};
|
||||
use context_servers::{ContextServerFactoryRegistry, CONTEXT_SERVERS_NAMESPACE};
|
||||
use context_servers::manager::ContextServerManager;
|
||||
use context_servers::ContextServerFactoryRegistry;
|
||||
use fs::Fs;
|
||||
use futures::StreamExt;
|
||||
use fuzzy::StringMatchCandidate;
|
||||
@@ -22,7 +21,6 @@ use paths::contexts_dir;
|
||||
use project::Project;
|
||||
use regex::Regex;
|
||||
use rpc::AnyProtoClient;
|
||||
use settings::{Settings as _, SettingsStore};
|
||||
use std::{
|
||||
cmp::Reverse,
|
||||
ffi::OsStr,
|
||||
@@ -111,7 +109,11 @@ impl ContextStore {
|
||||
let (mut events, _) = fs.watch(contexts_dir(), CONTEXT_WATCH_DURATION).await;
|
||||
|
||||
let this = cx.new_model(|cx: &mut ModelContext<Self>| {
|
||||
let context_server_manager = cx.new_model(|_cx| ContextServerManager::new());
|
||||
let context_server_factory_registry =
|
||||
ContextServerFactoryRegistry::default_global(cx);
|
||||
let context_server_manager = cx.new_model(|cx| {
|
||||
ContextServerManager::new(context_server_factory_registry, project.clone(), cx)
|
||||
});
|
||||
let mut this = Self {
|
||||
contexts: Vec::new(),
|
||||
contexts_metadata: Vec::new(),
|
||||
@@ -148,91 +150,16 @@ impl ContextStore {
|
||||
this.handle_project_changed(project.clone(), cx);
|
||||
this.synchronize_contexts(cx);
|
||||
this.register_context_server_handlers(cx);
|
||||
|
||||
if project.read(cx).is_local() {
|
||||
// TODO: At the time when we construct the `ContextStore` we may not have yet initialized the extensions.
|
||||
// In order to register the context servers when the extension is loaded, we're periodically looping to
|
||||
// see if there are context servers to register.
|
||||
//
|
||||
// I tried doing this in a subscription on the `ExtensionStore`, but it never seemed to fire.
|
||||
//
|
||||
// We should find a more elegant way to do this.
|
||||
let context_server_factory_registry =
|
||||
ContextServerFactoryRegistry::default_global(cx);
|
||||
cx.spawn(|context_store, mut cx| async move {
|
||||
loop {
|
||||
let mut servers_to_register = Vec::new();
|
||||
for (_id, factory) in
|
||||
context_server_factory_registry.context_server_factories()
|
||||
{
|
||||
if let Some(server) = factory(project.clone(), &cx).await.log_err()
|
||||
{
|
||||
servers_to_register.push(server);
|
||||
}
|
||||
}
|
||||
|
||||
let Some(_) = context_store
|
||||
.update(&mut cx, |this, cx| {
|
||||
this.context_server_manager.update(cx, |this, cx| {
|
||||
for server in servers_to_register {
|
||||
this.add_server(server, cx).detach_and_log_err(cx);
|
||||
}
|
||||
})
|
||||
})
|
||||
.log_err()
|
||||
else {
|
||||
break;
|
||||
};
|
||||
|
||||
smol::Timer::after(Duration::from_millis(100)).await;
|
||||
}
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
this
|
||||
})?;
|
||||
this.update(&mut cx, |this, cx| this.reload(cx))?
|
||||
.await
|
||||
.log_err();
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.watch_context_server_settings(cx);
|
||||
})
|
||||
.log_err();
|
||||
|
||||
Ok(this)
|
||||
})
|
||||
}
|
||||
|
||||
fn watch_context_server_settings(&self, cx: &mut ModelContext<Self>) {
|
||||
cx.observe_global::<SettingsStore>(move |this, cx| {
|
||||
this.context_server_manager.update(cx, |manager, cx| {
|
||||
let location = this.project.read(cx).worktrees(cx).next().map(|worktree| {
|
||||
settings::SettingsLocation {
|
||||
worktree_id: worktree.read(cx).id(),
|
||||
path: Path::new(""),
|
||||
}
|
||||
});
|
||||
let settings = ContextServerSettings::get(location, cx);
|
||||
|
||||
manager.maintain_servers(settings, cx);
|
||||
|
||||
let has_any_context_servers = !manager.servers().is_empty();
|
||||
CommandPaletteFilter::update_global(cx, |filter, _cx| {
|
||||
if has_any_context_servers {
|
||||
filter.show_namespace(CONTEXT_SERVERS_NAMESPACE);
|
||||
} else {
|
||||
filter.hide_namespace(CONTEXT_SERVERS_NAMESPACE);
|
||||
}
|
||||
});
|
||||
})
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
async fn handle_advertise_contexts(
|
||||
this: Model<Self>,
|
||||
envelope: TypedEnvelope<proto::AdvertiseContexts>,
|
||||
|
||||
@@ -24,9 +24,9 @@ use futures::{
|
||||
join, SinkExt, Stream, StreamExt,
|
||||
};
|
||||
use gpui::{
|
||||
anchored, deferred, point, AnyElement, AppContext, ClickEvent, EventEmitter, FocusHandle,
|
||||
FocusableView, FontWeight, Global, HighlightStyle, Model, ModelContext, Subscription, Task,
|
||||
TextStyle, UpdateGlobal, View, ViewContext, WeakView, WindowContext,
|
||||
anchored, deferred, point, AnyElement, AppContext, ClickEvent, CursorStyle, EventEmitter,
|
||||
FocusHandle, FocusableView, FontWeight, Global, HighlightStyle, Model, ModelContext,
|
||||
Subscription, Task, TextStyle, UpdateGlobal, View, ViewContext, WeakView, WindowContext,
|
||||
};
|
||||
use language::{Buffer, IndentKind, Point, Selection, TransactionId};
|
||||
use language_model::{
|
||||
@@ -460,7 +460,7 @@ impl InlineAssistant {
|
||||
style: BlockStyle::Sticky,
|
||||
placement: BlockPlacement::Below(range.end),
|
||||
height: 0,
|
||||
render: Box::new(|cx| {
|
||||
render: Arc::new(|cx| {
|
||||
v_flex()
|
||||
.h_full()
|
||||
.w_full()
|
||||
@@ -1197,8 +1197,9 @@ impl InlineAssistant {
|
||||
placement: BlockPlacement::Above(new_row),
|
||||
height,
|
||||
style: BlockStyle::Flex,
|
||||
render: Box::new(move |cx| {
|
||||
render: Arc::new(move |cx| {
|
||||
div()
|
||||
.block_mouse_down()
|
||||
.bg(cx.theme().status().deleted_background)
|
||||
.size_full()
|
||||
.h(height as f32 * cx.line_height())
|
||||
@@ -1317,7 +1318,7 @@ impl InlineAssistGroup {
|
||||
|
||||
fn build_assist_editor_renderer(editor: &View<PromptEditor>) -> RenderBlock {
|
||||
let editor = editor.clone();
|
||||
Box::new(move |cx: &mut BlockContext| {
|
||||
Arc::new(move |cx: &mut BlockContext| {
|
||||
*editor.read(cx).gutter_dimensions.lock() = *cx.gutter_dimensions;
|
||||
editor.clone().into_any_element()
|
||||
})
|
||||
@@ -1480,6 +1481,8 @@ impl Render for PromptEditor {
|
||||
h_flex()
|
||||
.key_context("PromptEditor")
|
||||
.bg(cx.theme().colors().editor_background)
|
||||
.block_mouse_down()
|
||||
.cursor(CursorStyle::Arrow)
|
||||
.border_y_1()
|
||||
.border_color(cx.theme().status().info_border)
|
||||
.size_full()
|
||||
|
||||
@@ -2,7 +2,7 @@ use crate::assistant_panel::ContextEditor;
|
||||
use crate::SlashCommandWorkingSet;
|
||||
use anyhow::Result;
|
||||
use assistant_slash_command::AfterCompletion;
|
||||
pub use assistant_slash_command::{SlashCommand, SlashCommandOutput, SlashCommandRegistry};
|
||||
pub use assistant_slash_command::{SlashCommand, SlashCommandOutput};
|
||||
use editor::{CompletionProvider, Editor};
|
||||
use fuzzy::{match_strings, StringMatchCandidate};
|
||||
use gpui::{AppContext, Model, Task, ViewContext, WeakView, WindowContext};
|
||||
@@ -171,8 +171,7 @@ impl SlashCommandCompletionProvider {
|
||||
let mut flag = self.cancel_flag.lock();
|
||||
flag.store(true, SeqCst);
|
||||
*flag = new_cancel_flag.clone();
|
||||
let commands = SlashCommandRegistry::global(cx);
|
||||
if let Some(command) = commands.command(command_name) {
|
||||
if let Some(command) = self.slash_commands.command(command_name, cx) {
|
||||
let completions = command.complete_argument(
|
||||
arguments,
|
||||
new_cancel_flag.clone(),
|
||||
|
||||
@@ -27,7 +27,7 @@ pub struct ContextServerSlashCommand {
|
||||
impl ContextServerSlashCommand {
|
||||
pub fn new(
|
||||
server_manager: Model<ContextServerManager>,
|
||||
server: &Arc<dyn ContextServer>,
|
||||
server: &Arc<ContextServer>,
|
||||
prompt: Prompt,
|
||||
) -> Self {
|
||||
Self {
|
||||
@@ -152,7 +152,7 @@ impl SlashCommand for ContextServerSlashCommand {
|
||||
if result
|
||||
.messages
|
||||
.iter()
|
||||
.any(|msg| !matches!(msg.role, context_servers::types::SamplingRole::User))
|
||||
.any(|msg| !matches!(msg.role, context_servers::types::Role::User))
|
||||
{
|
||||
return Err(anyhow!(
|
||||
"Prompt contains non-user roles, which is not supported"
|
||||
@@ -164,7 +164,7 @@ impl SlashCommand for ContextServerSlashCommand {
|
||||
.messages
|
||||
.into_iter()
|
||||
.filter_map(|msg| match msg.content {
|
||||
context_servers::types::SamplingContent::Text { text } => Some(text),
|
||||
context_servers::types::MessageContent::Text { text } => Some(text),
|
||||
_ => None,
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
|
||||
@@ -69,6 +69,10 @@ impl SlashCommand for DefaultSlashCommand {
|
||||
text.push('\n');
|
||||
}
|
||||
|
||||
if !text.ends_with('\n') {
|
||||
text.push('\n');
|
||||
}
|
||||
|
||||
Ok(SlashCommandOutput {
|
||||
sections: vec![SlashCommandOutputSection {
|
||||
range: 0..text.len(),
|
||||
|
||||
@@ -74,11 +74,21 @@ impl Tool for ContextServerTool {
|
||||
);
|
||||
let response = protocol.run_tool(tool_name, arguments).await?;
|
||||
|
||||
let tool_result = match response.tool_result {
|
||||
serde_json::Value::String(s) => s,
|
||||
_ => serde_json::to_string(&response.tool_result)?,
|
||||
};
|
||||
Ok(tool_result)
|
||||
let mut result = String::new();
|
||||
for content in response.content {
|
||||
match content {
|
||||
types::ToolResponseContent::Text { text } => {
|
||||
result.push_str(&text);
|
||||
}
|
||||
types::ToolResponseContent::Image { .. } => {
|
||||
log::warn!("Ignoring image content from tool response");
|
||||
}
|
||||
types::ToolResponseContent::Resource { .. } => {
|
||||
log::warn!("Ignoring resource content from tool response");
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(result)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
|
||||
@@ -13,8 +13,10 @@ path = "src/assistant_slash_command.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
async-trait.workspace = true
|
||||
collections.workspace = true
|
||||
derive_more.workspace = true
|
||||
extension.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
@@ -22,6 +24,7 @@ language_model.workspace = true
|
||||
parking_lot.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
ui.workspace = true
|
||||
workspace.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
mod extension_slash_command;
|
||||
mod slash_command_registry;
|
||||
|
||||
pub use crate::extension_slash_command::*;
|
||||
pub use crate::slash_command_registry::*;
|
||||
use anyhow::Result;
|
||||
use futures::stream::{self, BoxStream};
|
||||
use futures::StreamExt;
|
||||
@@ -7,7 +10,6 @@ use gpui::{AnyElement, AppContext, ElementId, SharedString, Task, WeakView, Wind
|
||||
use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate, OffsetRangeExt};
|
||||
pub use language_model::Role;
|
||||
use serde::{Deserialize, Serialize};
|
||||
pub use slash_command_registry::*;
|
||||
use std::{
|
||||
ops::Range,
|
||||
sync::{atomic::AtomicBool, Arc},
|
||||
|
||||
143
crates/assistant_slash_command/src/extension_slash_command.rs
Normal file
143
crates/assistant_slash_command/src/extension_slash_command.rs
Normal file
@@ -0,0 +1,143 @@
|
||||
use std::path::PathBuf;
|
||||
use std::sync::{atomic::AtomicBool, Arc};
|
||||
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use extension::{Extension, WorktreeDelegate};
|
||||
use gpui::{Task, WeakView, WindowContext};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use ui::prelude::*;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
SlashCommandResult,
|
||||
};
|
||||
|
||||
/// An adapter that allows an [`LspAdapterDelegate`] to be used as a [`WorktreeDelegate`].
|
||||
struct WorktreeDelegateAdapter(Arc<dyn LspAdapterDelegate>);
|
||||
|
||||
#[async_trait]
|
||||
impl WorktreeDelegate for WorktreeDelegateAdapter {
|
||||
fn id(&self) -> u64 {
|
||||
self.0.worktree_id().to_proto()
|
||||
}
|
||||
|
||||
fn root_path(&self) -> String {
|
||||
self.0.worktree_root_path().to_string_lossy().to_string()
|
||||
}
|
||||
|
||||
async fn read_text_file(&self, path: PathBuf) -> Result<String> {
|
||||
self.0.read_text_file(path).await
|
||||
}
|
||||
|
||||
async fn which(&self, binary_name: String) -> Option<String> {
|
||||
self.0
|
||||
.which(binary_name.as_ref())
|
||||
.await
|
||||
.map(|path| path.to_string_lossy().to_string())
|
||||
}
|
||||
|
||||
async fn shell_env(&self) -> Vec<(String, String)> {
|
||||
self.0.shell_env().await.into_iter().collect()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ExtensionSlashCommand {
|
||||
extension: Arc<dyn Extension>,
|
||||
command: extension::SlashCommand,
|
||||
}
|
||||
|
||||
impl ExtensionSlashCommand {
|
||||
pub fn new(extension: Arc<dyn Extension>, command: extension::SlashCommand) -> Self {
|
||||
Self { extension, command }
|
||||
}
|
||||
}
|
||||
|
||||
impl SlashCommand for ExtensionSlashCommand {
|
||||
fn name(&self) -> String {
|
||||
self.command.name.clone()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
self.command.description.clone()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
self.command.tooltip_text.clone()
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
self.command.requires_argument
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
let command = self.command.clone();
|
||||
let arguments = arguments.to_owned();
|
||||
cx.background_executor().spawn(async move {
|
||||
let completions = self
|
||||
.extension
|
||||
.complete_slash_command_argument(command, arguments)
|
||||
.await?;
|
||||
|
||||
anyhow::Ok(
|
||||
completions
|
||||
.into_iter()
|
||||
.map(|completion| ArgumentCompletion {
|
||||
label: completion.label.into(),
|
||||
new_text: completion.new_text,
|
||||
replace_previous_arguments: false,
|
||||
after_completion: completion.run_command.into(),
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
_workspace: WeakView<Workspace>,
|
||||
delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<SlashCommandResult> {
|
||||
let command = self.command.clone();
|
||||
let arguments = arguments.to_owned();
|
||||
let output = cx.background_executor().spawn(async move {
|
||||
let delegate =
|
||||
delegate.map(|delegate| Arc::new(WorktreeDelegateAdapter(delegate.clone())) as _);
|
||||
let output = self
|
||||
.extension
|
||||
.run_slash_command(command, arguments, delegate)
|
||||
.await?;
|
||||
|
||||
anyhow::Ok(output)
|
||||
});
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let output = output.await?;
|
||||
Ok(SlashCommandOutput {
|
||||
text: output.text,
|
||||
sections: output
|
||||
.sections
|
||||
.into_iter()
|
||||
.map(|section| SlashCommandOutputSection {
|
||||
range: section.range,
|
||||
icon: IconName::Code,
|
||||
label: section.label.into(),
|
||||
metadata: None,
|
||||
})
|
||||
.collect(),
|
||||
run_commands_in_text: false,
|
||||
}
|
||||
.to_event_stream())
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -343,7 +343,7 @@ fn init_test(cx: &mut AppContext) -> Model<ChannelStore> {
|
||||
release_channel::init(SemanticVersion::default(), cx);
|
||||
client::init_settings(cx);
|
||||
|
||||
let clock = Arc::new(FakeSystemClock::default());
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let http = FakeHttpClient::with_404_response();
|
||||
let client = Client::new(clock, http.clone(), cx);
|
||||
let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
|
||||
|
||||
@@ -42,7 +42,6 @@ serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
sha2.workspace = true
|
||||
smol.workspace = true
|
||||
sysinfo.workspace = true
|
||||
telemetry_events.workspace = true
|
||||
text.workspace = true
|
||||
thiserror.workspace = true
|
||||
|
||||
@@ -1780,7 +1780,7 @@ mod tests {
|
||||
let user_id = 5;
|
||||
let client = cx.update(|cx| {
|
||||
Client::new(
|
||||
Arc::new(FakeSystemClock::default()),
|
||||
Arc::new(FakeSystemClock::new()),
|
||||
FakeHttpClient::with_404_response(),
|
||||
cx,
|
||||
)
|
||||
@@ -1821,7 +1821,7 @@ mod tests {
|
||||
let user_id = 5;
|
||||
let client = cx.update(|cx| {
|
||||
Client::new(
|
||||
Arc::new(FakeSystemClock::default()),
|
||||
Arc::new(FakeSystemClock::new()),
|
||||
FakeHttpClient::with_404_response(),
|
||||
cx,
|
||||
)
|
||||
@@ -1900,7 +1900,7 @@ mod tests {
|
||||
let dropped_auth_count = Arc::new(Mutex::new(0));
|
||||
let client = cx.update(|cx| {
|
||||
Client::new(
|
||||
Arc::new(FakeSystemClock::default()),
|
||||
Arc::new(FakeSystemClock::new()),
|
||||
FakeHttpClient::with_404_response(),
|
||||
cx,
|
||||
)
|
||||
@@ -1943,7 +1943,7 @@ mod tests {
|
||||
let user_id = 5;
|
||||
let client = cx.update(|cx| {
|
||||
Client::new(
|
||||
Arc::new(FakeSystemClock::default()),
|
||||
Arc::new(FakeSystemClock::new()),
|
||||
FakeHttpClient::with_404_response(),
|
||||
cx,
|
||||
)
|
||||
@@ -2003,7 +2003,7 @@ mod tests {
|
||||
let user_id = 5;
|
||||
let client = cx.update(|cx| {
|
||||
Client::new(
|
||||
Arc::new(FakeSystemClock::default()),
|
||||
Arc::new(FakeSystemClock::new()),
|
||||
FakeHttpClient::with_404_response(),
|
||||
cx,
|
||||
)
|
||||
@@ -2038,7 +2038,7 @@ mod tests {
|
||||
let user_id = 5;
|
||||
let client = cx.update(|cx| {
|
||||
Client::new(
|
||||
Arc::new(FakeSystemClock::default()),
|
||||
Arc::new(FakeSystemClock::new()),
|
||||
FakeHttpClient::with_404_response(),
|
||||
cx,
|
||||
)
|
||||
|
||||
@@ -2,7 +2,6 @@ mod event_coalescer;
|
||||
|
||||
use crate::{ChannelId, TelemetrySettings};
|
||||
use anyhow::Result;
|
||||
use chrono::{DateTime, Utc};
|
||||
use clock::SystemClock;
|
||||
use collections::{HashMap, HashSet};
|
||||
use futures::Future;
|
||||
@@ -15,12 +14,11 @@ use settings::{Settings, SettingsStore};
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
use std::time::Instant;
|
||||
use std::{env, mem, path::PathBuf, sync::Arc, time::Duration};
|
||||
use sysinfo::{CpuRefreshKind, Pid, ProcessRefreshKind, RefreshKind, System};
|
||||
use telemetry_events::{
|
||||
ActionEvent, AppEvent, AssistantEvent, CallEvent, CpuEvent, EditEvent, EditorEvent, Event,
|
||||
EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, MemoryEvent, ReplEvent,
|
||||
SettingEvent,
|
||||
ActionEvent, AppEvent, AssistantEvent, CallEvent, EditEvent, EditorEvent, Event,
|
||||
EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, ReplEvent, SettingEvent,
|
||||
};
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
use worktree::{UpdatedEntriesSet, WorktreeId};
|
||||
@@ -46,7 +44,7 @@ struct TelemetryState {
|
||||
flush_events_task: Option<Task<()>>,
|
||||
log_file: Option<File>,
|
||||
is_staff: Option<bool>,
|
||||
first_event_date_time: Option<DateTime<Utc>>,
|
||||
first_event_date_time: Option<Instant>,
|
||||
event_coalescer: EventCoalescer,
|
||||
max_queue_size: usize,
|
||||
worktree_id_map: WorktreeIdMap,
|
||||
@@ -293,55 +291,13 @@ impl Telemetry {
|
||||
state.session_id = Some(session_id);
|
||||
state.app_version = release_channel::AppVersion::global(cx).to_string();
|
||||
state.os_name = os_name();
|
||||
|
||||
drop(state);
|
||||
|
||||
let this = self.clone();
|
||||
cx.background_executor()
|
||||
.spawn(async move {
|
||||
let mut system = System::new_with_specifics(
|
||||
RefreshKind::new().with_cpu(CpuRefreshKind::everything()),
|
||||
);
|
||||
|
||||
let refresh_kind = ProcessRefreshKind::new().with_cpu().with_memory();
|
||||
let current_process = Pid::from_u32(std::process::id());
|
||||
system.refresh_processes_specifics(
|
||||
sysinfo::ProcessesToUpdate::Some(&[current_process]),
|
||||
refresh_kind,
|
||||
);
|
||||
|
||||
// Waiting some amount of time before the first query is important to get a reasonable value
|
||||
// https://docs.rs/sysinfo/0.29.10/sysinfo/trait.ProcessExt.html#tymethod.cpu_usage
|
||||
const DURATION_BETWEEN_SYSTEM_EVENTS: Duration = Duration::from_secs(4 * 60);
|
||||
|
||||
loop {
|
||||
smol::Timer::after(DURATION_BETWEEN_SYSTEM_EVENTS).await;
|
||||
|
||||
let current_process = Pid::from_u32(std::process::id());
|
||||
system.refresh_processes_specifics(
|
||||
sysinfo::ProcessesToUpdate::Some(&[current_process]),
|
||||
refresh_kind,
|
||||
);
|
||||
let Some(process) = system.process(current_process) else {
|
||||
log::error!(
|
||||
"Failed to find own process {current_process:?} in system process table"
|
||||
);
|
||||
// TODO: Fire an error telemetry event
|
||||
return;
|
||||
};
|
||||
|
||||
this.report_memory_event(process.memory(), process.virtual_memory());
|
||||
this.report_cpu_event(process.cpu_usage(), system.cpus().len() as u32);
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
pub fn metrics_enabled(self: &Arc<Self>) -> bool {
|
||||
let state = self.state.lock();
|
||||
let enabled = state.settings.metrics;
|
||||
drop(state);
|
||||
return enabled;
|
||||
enabled
|
||||
}
|
||||
|
||||
pub fn set_authenticated_user_info(
|
||||
@@ -416,28 +372,6 @@ impl Telemetry {
|
||||
self.report_event(event)
|
||||
}
|
||||
|
||||
pub fn report_cpu_event(self: &Arc<Self>, usage_as_percentage: f32, core_count: u32) {
|
||||
let event = Event::Cpu(CpuEvent {
|
||||
usage_as_percentage,
|
||||
core_count,
|
||||
});
|
||||
|
||||
self.report_event(event)
|
||||
}
|
||||
|
||||
pub fn report_memory_event(
|
||||
self: &Arc<Self>,
|
||||
memory_in_bytes: u64,
|
||||
virtual_memory_in_bytes: u64,
|
||||
) {
|
||||
let event = Event::Memory(MemoryEvent {
|
||||
memory_in_bytes,
|
||||
virtual_memory_in_bytes,
|
||||
});
|
||||
|
||||
self.report_event(event)
|
||||
}
|
||||
|
||||
pub fn report_app_event(self: &Arc<Self>, operation: String) -> Event {
|
||||
let event = Event::App(AppEvent { operation });
|
||||
|
||||
@@ -469,7 +403,10 @@ impl Telemetry {
|
||||
|
||||
if let Some((start, end, environment)) = period_data {
|
||||
let event = Event::Edit(EditEvent {
|
||||
duration: end.timestamp_millis() - start.timestamp_millis(),
|
||||
duration: end
|
||||
.saturating_duration_since(start)
|
||||
.min(Duration::from_secs(60 * 60 * 24))
|
||||
.as_millis() as i64,
|
||||
environment: environment.to_string(),
|
||||
is_via_ssh,
|
||||
});
|
||||
@@ -567,9 +504,10 @@ impl Telemetry {
|
||||
let date_time = self.clock.utc_now();
|
||||
|
||||
let milliseconds_since_first_event = match state.first_event_date_time {
|
||||
Some(first_event_date_time) => {
|
||||
date_time.timestamp_millis() - first_event_date_time.timestamp_millis()
|
||||
}
|
||||
Some(first_event_date_time) => date_time
|
||||
.saturating_duration_since(first_event_date_time)
|
||||
.min(Duration::from_secs(60 * 60 * 24))
|
||||
.as_millis() as i64,
|
||||
None => {
|
||||
state.first_event_date_time = Some(date_time);
|
||||
0
|
||||
@@ -702,7 +640,6 @@ pub fn calculate_json_checksum(json: &impl AsRef<[u8]>) -> Option<String> {
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use chrono::TimeZone;
|
||||
use clock::FakeSystemClock;
|
||||
use gpui::TestAppContext;
|
||||
use http_client::FakeHttpClient;
|
||||
@@ -710,9 +647,7 @@ mod tests {
|
||||
#[gpui::test]
|
||||
fn test_telemetry_flush_on_max_queue_size(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let clock = Arc::new(FakeSystemClock::new(
|
||||
Utc.with_ymd_and_hms(1990, 4, 12, 12, 0, 0).unwrap(),
|
||||
));
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let http = FakeHttpClient::with_200_response();
|
||||
let system_id = Some("system_id".to_string());
|
||||
let installation_id = Some("installation_id".to_string());
|
||||
@@ -743,7 +678,7 @@ mod tests {
|
||||
Some(first_date_time)
|
||||
);
|
||||
|
||||
clock.advance(chrono::Duration::milliseconds(100));
|
||||
clock.advance(Duration::from_millis(100));
|
||||
|
||||
let event = telemetry.report_app_event(operation.clone());
|
||||
assert_eq!(
|
||||
@@ -759,7 +694,7 @@ mod tests {
|
||||
Some(first_date_time)
|
||||
);
|
||||
|
||||
clock.advance(chrono::Duration::milliseconds(100));
|
||||
clock.advance(Duration::from_millis(100));
|
||||
|
||||
let event = telemetry.report_app_event(operation.clone());
|
||||
assert_eq!(
|
||||
@@ -775,7 +710,7 @@ mod tests {
|
||||
Some(first_date_time)
|
||||
);
|
||||
|
||||
clock.advance(chrono::Duration::milliseconds(100));
|
||||
clock.advance(Duration::from_millis(100));
|
||||
|
||||
// Adding a 4th event should cause a flush
|
||||
let event = telemetry.report_app_event(operation.clone());
|
||||
@@ -796,9 +731,7 @@ mod tests {
|
||||
cx: &mut TestAppContext,
|
||||
) {
|
||||
init_test(cx);
|
||||
let clock = Arc::new(FakeSystemClock::new(
|
||||
Utc.with_ymd_and_hms(1990, 4, 12, 12, 0, 0).unwrap(),
|
||||
));
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let http = FakeHttpClient::with_200_response();
|
||||
let system_id = Some("system_id".to_string());
|
||||
let installation_id = Some("installation_id".to_string());
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use std::sync::Arc;
|
||||
use std::time;
|
||||
use std::{sync::Arc, time::Instant};
|
||||
|
||||
use chrono::{DateTime, Duration, Utc};
|
||||
use clock::SystemClock;
|
||||
|
||||
const COALESCE_TIMEOUT: time::Duration = time::Duration::from_secs(20);
|
||||
@@ -10,8 +9,8 @@ const SIMULATED_DURATION_FOR_SINGLE_EVENT: time::Duration = time::Duration::from
|
||||
#[derive(Debug, PartialEq)]
|
||||
struct PeriodData {
|
||||
environment: &'static str,
|
||||
start: DateTime<Utc>,
|
||||
end: Option<DateTime<Utc>>,
|
||||
start: Instant,
|
||||
end: Option<Instant>,
|
||||
}
|
||||
|
||||
pub struct EventCoalescer {
|
||||
@@ -27,9 +26,8 @@ impl EventCoalescer {
|
||||
pub fn log_event(
|
||||
&mut self,
|
||||
environment: &'static str,
|
||||
) -> Option<(DateTime<Utc>, DateTime<Utc>, &'static str)> {
|
||||
) -> Option<(Instant, Instant, &'static str)> {
|
||||
let log_time = self.clock.utc_now();
|
||||
let coalesce_timeout = Duration::from_std(COALESCE_TIMEOUT).unwrap();
|
||||
|
||||
let Some(state) = &mut self.state else {
|
||||
self.state = Some(PeriodData {
|
||||
@@ -43,7 +41,7 @@ impl EventCoalescer {
|
||||
let period_end = state
|
||||
.end
|
||||
.unwrap_or(state.start + SIMULATED_DURATION_FOR_SINGLE_EVENT);
|
||||
let within_timeout = log_time - period_end < coalesce_timeout;
|
||||
let within_timeout = log_time - period_end < COALESCE_TIMEOUT;
|
||||
let environment_is_same = state.environment == environment;
|
||||
let should_coaelesce = !within_timeout || !environment_is_same;
|
||||
|
||||
@@ -70,16 +68,13 @@ impl EventCoalescer {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use chrono::TimeZone;
|
||||
use clock::FakeSystemClock;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_same_context_exceeding_timeout() {
|
||||
let clock = Arc::new(FakeSystemClock::new(
|
||||
Utc.with_ymd_and_hms(1990, 4, 12, 0, 0, 0).unwrap(),
|
||||
));
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let environment_1 = "environment_1";
|
||||
let mut event_coalescer = EventCoalescer::new(clock.clone());
|
||||
|
||||
@@ -98,7 +93,7 @@ mod tests {
|
||||
})
|
||||
);
|
||||
|
||||
let within_timeout_adjustment = Duration::from_std(COALESCE_TIMEOUT / 2).unwrap();
|
||||
let within_timeout_adjustment = COALESCE_TIMEOUT / 2;
|
||||
|
||||
// Ensure that many calls within the timeout don't start a new period
|
||||
for _ in 0..100 {
|
||||
@@ -118,7 +113,7 @@ mod tests {
|
||||
}
|
||||
|
||||
let period_end = clock.utc_now();
|
||||
let exceed_timeout_adjustment = Duration::from_std(COALESCE_TIMEOUT * 2).unwrap();
|
||||
let exceed_timeout_adjustment = COALESCE_TIMEOUT * 2;
|
||||
// Logging an event exceeding the timeout should start a new period
|
||||
clock.advance(exceed_timeout_adjustment);
|
||||
let new_period_start = clock.utc_now();
|
||||
@@ -137,9 +132,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_different_environment_under_timeout() {
|
||||
let clock = Arc::new(FakeSystemClock::new(
|
||||
Utc.with_ymd_and_hms(1990, 4, 12, 0, 0, 0).unwrap(),
|
||||
));
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let environment_1 = "environment_1";
|
||||
let mut event_coalescer = EventCoalescer::new(clock.clone());
|
||||
|
||||
@@ -158,7 +151,7 @@ mod tests {
|
||||
})
|
||||
);
|
||||
|
||||
let within_timeout_adjustment = Duration::from_std(COALESCE_TIMEOUT / 2).unwrap();
|
||||
let within_timeout_adjustment = COALESCE_TIMEOUT / 2;
|
||||
clock.advance(within_timeout_adjustment);
|
||||
let period_end = clock.utc_now();
|
||||
let period_data = event_coalescer.log_event(environment_1);
|
||||
@@ -193,9 +186,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_switching_environment_while_within_timeout() {
|
||||
let clock = Arc::new(FakeSystemClock::new(
|
||||
Utc.with_ymd_and_hms(1990, 4, 12, 0, 0, 0).unwrap(),
|
||||
));
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let environment_1 = "environment_1";
|
||||
let mut event_coalescer = EventCoalescer::new(clock.clone());
|
||||
|
||||
@@ -214,7 +205,7 @@ mod tests {
|
||||
})
|
||||
);
|
||||
|
||||
let within_timeout_adjustment = Duration::from_std(COALESCE_TIMEOUT / 2).unwrap();
|
||||
let within_timeout_adjustment = COALESCE_TIMEOUT / 2;
|
||||
clock.advance(within_timeout_adjustment);
|
||||
let period_end = clock.utc_now();
|
||||
let environment_2 = "environment_2";
|
||||
@@ -240,9 +231,7 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_switching_environment_while_exceeding_timeout() {
|
||||
let clock = Arc::new(FakeSystemClock::new(
|
||||
Utc.with_ymd_and_hms(1990, 4, 12, 0, 0, 0).unwrap(),
|
||||
));
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let environment_1 = "environment_1";
|
||||
let mut event_coalescer = EventCoalescer::new(clock.clone());
|
||||
|
||||
@@ -261,7 +250,7 @@ mod tests {
|
||||
})
|
||||
);
|
||||
|
||||
let exceed_timeout_adjustment = Duration::from_std(COALESCE_TIMEOUT * 2).unwrap();
|
||||
let exceed_timeout_adjustment = COALESCE_TIMEOUT * 2;
|
||||
clock.advance(exceed_timeout_adjustment);
|
||||
let period_end = clock.utc_now();
|
||||
let environment_2 = "environment_2";
|
||||
|
||||
@@ -16,7 +16,6 @@ doctest = false
|
||||
test-support = ["dep:parking_lot"]
|
||||
|
||||
[dependencies]
|
||||
chrono.workspace = true
|
||||
parking_lot = { workspace = true, optional = true }
|
||||
serde.workspace = true
|
||||
smallvec.workspace = true
|
||||
|
||||
@@ -1,21 +1,21 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use std::time::Instant;
|
||||
|
||||
pub trait SystemClock: Send + Sync {
|
||||
/// Returns the current date and time in UTC.
|
||||
fn utc_now(&self) -> DateTime<Utc>;
|
||||
fn utc_now(&self) -> Instant;
|
||||
}
|
||||
|
||||
pub struct RealSystemClock;
|
||||
|
||||
impl SystemClock for RealSystemClock {
|
||||
fn utc_now(&self) -> DateTime<Utc> {
|
||||
Utc::now()
|
||||
fn utc_now(&self) -> Instant {
|
||||
Instant::now()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub struct FakeSystemClockState {
|
||||
now: DateTime<Utc>,
|
||||
now: Instant,
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
@@ -24,36 +24,30 @@ pub struct FakeSystemClock {
|
||||
state: parking_lot::Mutex<FakeSystemClockState>,
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
impl Default for FakeSystemClock {
|
||||
fn default() -> Self {
|
||||
Self::new(Utc::now())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
impl FakeSystemClock {
|
||||
pub fn new(now: DateTime<Utc>) -> Self {
|
||||
let state = FakeSystemClockState { now };
|
||||
pub fn new() -> Self {
|
||||
let state = FakeSystemClockState {
|
||||
now: Instant::now(),
|
||||
};
|
||||
|
||||
Self {
|
||||
state: parking_lot::Mutex::new(state),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_now(&self, now: DateTime<Utc>) {
|
||||
pub fn set_now(&self, now: Instant) {
|
||||
self.state.lock().now = now;
|
||||
}
|
||||
|
||||
/// Advances the [`FakeSystemClock`] by the specified [`Duration`](chrono::Duration).
|
||||
pub fn advance(&self, duration: chrono::Duration) {
|
||||
pub fn advance(&self, duration: std::time::Duration) {
|
||||
self.state.lock().now += duration;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
impl SystemClock for FakeSystemClock {
|
||||
fn utc_now(&self) -> DateTime<Utc> {
|
||||
fn utc_now(&self) -> Instant {
|
||||
self.state.lock().now
|
||||
}
|
||||
}
|
||||
|
||||
@@ -24,6 +24,7 @@ async-stripe.workspace = true
|
||||
async-tungstenite.workspace = true
|
||||
aws-config = { version = "1.1.5" }
|
||||
aws-sdk-s3 = { version = "1.15.0" }
|
||||
aws-sdk-kinesis = "1.51.0"
|
||||
axum = { version = "0.6", features = ["json", "headers", "ws"] }
|
||||
axum-extra = { version = "0.4", features = ["erased-json"] }
|
||||
base64.workspace = true
|
||||
@@ -78,6 +79,7 @@ uuid.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
assistant = { workspace = true, features = ["test-support"] }
|
||||
context_servers.workspace = true
|
||||
async-trait.workspace = true
|
||||
audio.workspace = true
|
||||
call = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -174,6 +174,31 @@ spec:
|
||||
secretKeyRef:
|
||||
name: blob-store
|
||||
key: bucket
|
||||
- name: KINESIS_ACCESS_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: kinesis
|
||||
key: access_key
|
||||
- name: KINESIS_SECRET_KEY
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: kinesis
|
||||
key: secret_key
|
||||
- name: KINESIS_STREAM
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: kinesis
|
||||
key: stream
|
||||
- name: KINESIS_REGION
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: kinesis
|
||||
key: region
|
||||
- name: BLOB_STORE_BUCKET
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
name: blob-store
|
||||
key: bucket
|
||||
- name: CLICKHOUSE_URL
|
||||
valueFrom:
|
||||
secretKeyRef:
|
||||
|
||||
@@ -11,9 +11,11 @@ use axum::{
|
||||
routing::post,
|
||||
Extension, Router, TypedHeader,
|
||||
};
|
||||
use chrono::Duration;
|
||||
use rpc::ExtensionMetadata;
|
||||
use semantic_version::SemanticVersion;
|
||||
use serde::{Serialize, Serializer};
|
||||
use serde::{Deserialize, Serialize, Serializer};
|
||||
use serde_json::json;
|
||||
use sha2::{Digest, Sha256};
|
||||
use std::sync::{Arc, OnceLock};
|
||||
use telemetry_events::{
|
||||
@@ -21,6 +23,7 @@ use telemetry_events::{
|
||||
EventRequestBody, EventWrapper, ExtensionEvent, InlineCompletionEvent, MemoryEvent, Panic,
|
||||
ReplEvent, SettingEvent,
|
||||
};
|
||||
use util::ResultExt;
|
||||
use uuid::Uuid;
|
||||
|
||||
const CRASH_REPORTS_BUCKET: &str = "zed-crash-reports";
|
||||
@@ -388,13 +391,6 @@ pub async fn post_events(
|
||||
country_code_header: Option<TypedHeader<CloudflareIpCountryHeader>>,
|
||||
body: Bytes,
|
||||
) -> Result<()> {
|
||||
let Some(clickhouse_client) = app.clickhouse_client.clone() else {
|
||||
Err(Error::http(
|
||||
StatusCode::NOT_IMPLEMENTED,
|
||||
"not supported".into(),
|
||||
))?
|
||||
};
|
||||
|
||||
let Some(expected) = calculate_json_checksum(app.clone(), &body) else {
|
||||
return Err(Error::http(
|
||||
StatusCode::INTERNAL_SERVER_ERROR,
|
||||
@@ -416,6 +412,34 @@ pub async fn post_events(
|
||||
};
|
||||
let country_code = country_code_header.map(|h| h.to_string());
|
||||
|
||||
let first_event_at = chrono::Utc::now()
|
||||
- chrono::Duration::milliseconds(last_event.milliseconds_since_first_event);
|
||||
|
||||
if let Some(kinesis_client) = app.kinesis_client.clone() {
|
||||
if let Some(stream) = app.config.kinesis_stream.clone() {
|
||||
let mut request = kinesis_client.put_records().stream_name(stream);
|
||||
for row in for_snowflake(request_body.clone(), first_event_at, country_code.clone()) {
|
||||
if let Some(data) = serde_json::to_vec(&row).log_err() {
|
||||
request = request.records(
|
||||
aws_sdk_kinesis::types::PutRecordsRequestEntry::builder()
|
||||
.partition_key(request_body.system_id.clone().unwrap_or_default())
|
||||
.data(data.into())
|
||||
.build()
|
||||
.unwrap(),
|
||||
);
|
||||
}
|
||||
}
|
||||
request.send().await.log_err();
|
||||
}
|
||||
};
|
||||
|
||||
let Some(clickhouse_client) = app.clickhouse_client.clone() else {
|
||||
Err(Error::http(
|
||||
StatusCode::NOT_IMPLEMENTED,
|
||||
"not supported".into(),
|
||||
))?
|
||||
};
|
||||
|
||||
let first_event_at = chrono::Utc::now()
|
||||
- chrono::Duration::milliseconds(last_event.milliseconds_since_first_event);
|
||||
|
||||
@@ -459,20 +483,7 @@ pub async fn post_events(
|
||||
checksum_matched,
|
||||
))
|
||||
}
|
||||
Event::Cpu(event) => to_upload.cpu_events.push(CpuEventRow::from_event(
|
||||
event.clone(),
|
||||
wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
)),
|
||||
Event::Memory(event) => to_upload.memory_events.push(MemoryEventRow::from_event(
|
||||
event.clone(),
|
||||
wrapper,
|
||||
&request_body,
|
||||
first_event_at,
|
||||
checksum_matched,
|
||||
)),
|
||||
Event::Cpu(_) | Event::Memory(_) => continue,
|
||||
Event::App(event) => to_upload.app_events.push(AppEventRow::from_event(
|
||||
event.clone(),
|
||||
wrapper,
|
||||
@@ -923,6 +934,7 @@ pub struct CpuEventRow {
|
||||
}
|
||||
|
||||
impl CpuEventRow {
|
||||
#[allow(unused)]
|
||||
fn from_event(
|
||||
event: CpuEvent,
|
||||
wrapper: &EventWrapper,
|
||||
@@ -977,6 +989,7 @@ pub struct MemoryEventRow {
|
||||
}
|
||||
|
||||
impl MemoryEventRow {
|
||||
#[allow(unused)]
|
||||
fn from_event(
|
||||
event: MemoryEvent,
|
||||
wrapper: &EventWrapper,
|
||||
@@ -1364,3 +1377,259 @@ pub fn calculate_json_checksum(app: Arc<AppState>, json: &impl AsRef<[u8]>) -> O
|
||||
summer.update(checksum_seed);
|
||||
Some(summer.finalize().into_iter().collect())
|
||||
}
|
||||
|
||||
fn for_snowflake(
|
||||
body: EventRequestBody,
|
||||
first_event_at: chrono::DateTime<chrono::Utc>,
|
||||
country_code: Option<String>,
|
||||
) -> impl Iterator<Item = SnowflakeRow> {
|
||||
body.events.into_iter().flat_map(move |event| {
|
||||
let timestamp =
|
||||
first_event_at + Duration::milliseconds(event.milliseconds_since_first_event);
|
||||
let (event_type, mut event_properties) = match &event.event {
|
||||
Event::Editor(e) => (
|
||||
match e.operation.as_str() {
|
||||
"open" => "Editor Opened".to_string(),
|
||||
"save" => "Editor Saved".to_string(),
|
||||
_ => format!("Unknown Editor Event: {}", e.operation),
|
||||
},
|
||||
serde_json::to_value(e).unwrap(),
|
||||
),
|
||||
Event::InlineCompletion(e) => (
|
||||
format!(
|
||||
"Inline Completion {}",
|
||||
if e.suggestion_accepted {
|
||||
"Accepted"
|
||||
} else {
|
||||
"Discarded"
|
||||
}
|
||||
),
|
||||
serde_json::to_value(e).unwrap(),
|
||||
),
|
||||
Event::Call(e) => {
|
||||
let event_type = match e.operation.trim() {
|
||||
"unshare project" => "Project Unshared".to_string(),
|
||||
"open channel notes" => "Channel Notes Opened".to_string(),
|
||||
"share project" => "Project Shared".to_string(),
|
||||
"join channel" => "Channel Joined".to_string(),
|
||||
"hang up" => "Call Ended".to_string(),
|
||||
"accept incoming" => "Incoming Call Accepted".to_string(),
|
||||
"invite" => "Participant Invited".to_string(),
|
||||
"disable microphone" => "Microphone Disabled".to_string(),
|
||||
"enable microphone" => "Microphone Enabled".to_string(),
|
||||
"enable screen share" => "Screen Share Enabled".to_string(),
|
||||
"disable screen share" => "Screen Share Disabled".to_string(),
|
||||
"decline incoming" => "Incoming Call Declined".to_string(),
|
||||
"enable camera" => "Camera Enabled".to_string(),
|
||||
"disable camera" => "Camera Disabled".to_string(),
|
||||
_ => format!("Unknown Call Event: {}", e.operation),
|
||||
};
|
||||
|
||||
(event_type, serde_json::to_value(e).unwrap())
|
||||
}
|
||||
Event::Assistant(e) => (
|
||||
match e.phase {
|
||||
telemetry_events::AssistantPhase::Response => "Assistant Responded".to_string(),
|
||||
telemetry_events::AssistantPhase::Invoked => "Assistant Invoked".to_string(),
|
||||
telemetry_events::AssistantPhase::Accepted => {
|
||||
"Assistant Response Accepted".to_string()
|
||||
}
|
||||
telemetry_events::AssistantPhase::Rejected => {
|
||||
"Assistant Response Rejected".to_string()
|
||||
}
|
||||
},
|
||||
serde_json::to_value(e).unwrap(),
|
||||
),
|
||||
Event::Cpu(_) | Event::Memory(_) => return None,
|
||||
Event::App(e) => {
|
||||
let mut properties = json!({});
|
||||
let event_type = match e.operation.trim() {
|
||||
"extensions: install extension" => "Extension Installed".to_string(),
|
||||
"open" => "App Opened".to_string(),
|
||||
"project search: open" => "Project Search Opened".to_string(),
|
||||
"first open" => {
|
||||
properties["is_first_open"] = json!(true);
|
||||
"App First Opened".to_string()
|
||||
}
|
||||
"extensions: uninstall extension" => "Extension Uninstalled".to_string(),
|
||||
"welcome page: close" => "Welcome Page Closed".to_string(),
|
||||
"open project" => {
|
||||
properties["is_first_time"] = json!(false);
|
||||
"Project Opened".to_string()
|
||||
}
|
||||
"welcome page: install cli" => "CLI Installed".to_string(),
|
||||
"project diagnostics: open" => "Project Diagnostics Opened".to_string(),
|
||||
"extensions page: open" => "Extensions Page Opened".to_string(),
|
||||
"welcome page: change theme" => "Welcome Theme Changed".to_string(),
|
||||
"welcome page: toggle metric telemetry" => {
|
||||
properties["enabled"] = json!(false);
|
||||
"Welcome Telemetry Toggled".to_string()
|
||||
}
|
||||
"welcome page: change keymap" => "Keymap Changed".to_string(),
|
||||
"welcome page: toggle vim" => {
|
||||
properties["enabled"] = json!(false);
|
||||
"Welcome Vim Mode Toggled".to_string()
|
||||
}
|
||||
"welcome page: sign in to copilot" => "Welcome Copilot Signed In".to_string(),
|
||||
"welcome page: toggle diagnostic telemetry" => {
|
||||
"Welcome Telemetry Toggled".to_string()
|
||||
}
|
||||
"welcome page: open" => "Welcome Page Opened".to_string(),
|
||||
"close" => "App Closed".to_string(),
|
||||
"markdown preview: open" => "Markdown Preview Opened".to_string(),
|
||||
"welcome page: open extensions" => "Extensions Page Opened".to_string(),
|
||||
"open node project" | "open pnpm project" | "open yarn project" => {
|
||||
properties["project_type"] = json!("node");
|
||||
properties["is_first_time"] = json!(false);
|
||||
"Project Opened".to_string()
|
||||
}
|
||||
"repl sessions: open" => "REPL Session Started".to_string(),
|
||||
"welcome page: toggle helix" => {
|
||||
properties["enabled"] = json!(false);
|
||||
"Helix Mode Toggled".to_string()
|
||||
}
|
||||
"welcome page: edit settings" => {
|
||||
properties["changed_settings"] = json!([]);
|
||||
"Settings Edited".to_string()
|
||||
}
|
||||
"welcome page: view docs" => "Documentation Viewed".to_string(),
|
||||
"open ssh project" => {
|
||||
properties["is_first_time"] = json!(false);
|
||||
"SSH Project Opened".to_string()
|
||||
}
|
||||
"create ssh server" => "SSH Server Created".to_string(),
|
||||
"create ssh project" => "SSH Project Created".to_string(),
|
||||
"first open for release channel" => {
|
||||
properties["is_first_for_channel"] = json!(true);
|
||||
"App First Opened For Release Channel".to_string()
|
||||
}
|
||||
"feature upsell: toggle vim" => {
|
||||
properties["source"] = json!("Feature Upsell");
|
||||
"Vim Mode Toggled".to_string()
|
||||
}
|
||||
_ => e
|
||||
.operation
|
||||
.strip_prefix("feature upsell: viewed docs (")
|
||||
.and_then(|s| s.strip_suffix(')'))
|
||||
.map_or_else(
|
||||
|| format!("Unknown App Event: {}", e.operation),
|
||||
|docs_url| {
|
||||
properties["url"] = json!(docs_url);
|
||||
properties["source"] = json!("Feature Upsell");
|
||||
"Documentation Viewed".to_string()
|
||||
},
|
||||
),
|
||||
};
|
||||
(event_type, properties)
|
||||
}
|
||||
Event::Setting(e) => (
|
||||
"Settings Changed".to_string(),
|
||||
serde_json::to_value(e).unwrap(),
|
||||
),
|
||||
Event::Extension(e) => (
|
||||
"Extension Loaded".to_string(),
|
||||
serde_json::to_value(e).unwrap(),
|
||||
),
|
||||
Event::Edit(e) => (
|
||||
"Editor Edited".to_string(),
|
||||
serde_json::to_value(e).unwrap(),
|
||||
),
|
||||
Event::Action(e) => (
|
||||
"Action Invoked".to_string(),
|
||||
serde_json::to_value(e).unwrap(),
|
||||
),
|
||||
Event::Repl(e) => (
|
||||
"Kernel Status Changed".to_string(),
|
||||
serde_json::to_value(e).unwrap(),
|
||||
),
|
||||
};
|
||||
|
||||
if let serde_json::Value::Object(ref mut map) = event_properties {
|
||||
map.insert("app_version".to_string(), body.app_version.clone().into());
|
||||
map.insert("os_name".to_string(), body.os_name.clone().into());
|
||||
map.insert("os_version".to_string(), body.os_version.clone().into());
|
||||
map.insert("architecture".to_string(), body.architecture.clone().into());
|
||||
map.insert(
|
||||
"release_channel".to_string(),
|
||||
body.release_channel.clone().into(),
|
||||
);
|
||||
map.insert("signed_in".to_string(), event.signed_in.into());
|
||||
if let Some(country_code) = country_code.as_ref() {
|
||||
map.insert("country_code".to_string(), country_code.clone().into());
|
||||
}
|
||||
}
|
||||
|
||||
let user_properties = Some(serde_json::json!({
|
||||
"is_staff": body.is_staff,
|
||||
"Country": country_code.clone(),
|
||||
"OS": format!("{} {}", body.os_name, body.os_version.clone().unwrap_or_default()),
|
||||
"Version": body.app_version.clone(),
|
||||
}));
|
||||
|
||||
Some(SnowflakeRow {
|
||||
time: timestamp,
|
||||
user_id: body.metrics_id.clone(),
|
||||
device_id: body.system_id.clone(),
|
||||
event_type,
|
||||
event_properties,
|
||||
user_properties,
|
||||
insert_id: Some(Uuid::new_v4().to_string()),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct SnowflakeRow {
|
||||
pub time: chrono::DateTime<chrono::Utc>,
|
||||
pub user_id: Option<String>,
|
||||
pub device_id: Option<String>,
|
||||
pub event_type: String,
|
||||
pub event_properties: serde_json::Value,
|
||||
pub user_properties: Option<serde_json::Value>,
|
||||
pub insert_id: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
struct SnowflakeData {
|
||||
/// Identifier unique to each Zed installation (differs for stable, preview, dev)
|
||||
pub installation_id: Option<String>,
|
||||
/// Identifier unique to each logged in Zed user (randomly generated on first sign in)
|
||||
/// Identifier unique to each Zed session (differs for each time you open Zed)
|
||||
pub session_id: Option<String>,
|
||||
pub metrics_id: Option<String>,
|
||||
/// True for Zed staff, otherwise false
|
||||
pub is_staff: Option<bool>,
|
||||
/// Zed version number
|
||||
pub app_version: String,
|
||||
pub os_name: String,
|
||||
pub os_version: Option<String>,
|
||||
pub architecture: String,
|
||||
/// Zed release channel (stable, preview, dev)
|
||||
pub release_channel: Option<String>,
|
||||
pub signed_in: bool,
|
||||
|
||||
#[serde(flatten)]
|
||||
pub editor_event: Option<EditorEvent>,
|
||||
#[serde(flatten)]
|
||||
pub inline_completion_event: Option<InlineCompletionEvent>,
|
||||
#[serde(flatten)]
|
||||
pub call_event: Option<CallEvent>,
|
||||
#[serde(flatten)]
|
||||
pub assistant_event: Option<AssistantEvent>,
|
||||
#[serde(flatten)]
|
||||
pub cpu_event: Option<CpuEvent>,
|
||||
#[serde(flatten)]
|
||||
pub memory_event: Option<MemoryEvent>,
|
||||
#[serde(flatten)]
|
||||
pub app_event: Option<AppEvent>,
|
||||
#[serde(flatten)]
|
||||
pub setting_event: Option<SettingEvent>,
|
||||
#[serde(flatten)]
|
||||
pub extension_event: Option<ExtensionEvent>,
|
||||
#[serde(flatten)]
|
||||
pub edit_event: Option<EditEvent>,
|
||||
#[serde(flatten)]
|
||||
pub repl_event: Option<ReplEvent>,
|
||||
#[serde(flatten)]
|
||||
pub action_event: Option<ActionEvent>,
|
||||
}
|
||||
|
||||
@@ -170,6 +170,10 @@ pub struct Config {
|
||||
pub blob_store_access_key: Option<String>,
|
||||
pub blob_store_secret_key: Option<String>,
|
||||
pub blob_store_bucket: Option<String>,
|
||||
pub kinesis_region: Option<String>,
|
||||
pub kinesis_stream: Option<String>,
|
||||
pub kinesis_access_key: Option<String>,
|
||||
pub kinesis_secret_key: Option<String>,
|
||||
pub zed_environment: Arc<str>,
|
||||
pub openai_api_key: Option<Arc<str>>,
|
||||
pub google_ai_api_key: Option<Arc<str>>,
|
||||
@@ -238,6 +242,10 @@ impl Config {
|
||||
stripe_api_key: None,
|
||||
supermaven_admin_api_key: None,
|
||||
user_backfiller_github_access_token: None,
|
||||
kinesis_region: None,
|
||||
kinesis_access_key: None,
|
||||
kinesis_secret_key: None,
|
||||
kinesis_stream: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -276,6 +284,7 @@ pub struct AppState {
|
||||
pub rate_limiter: Arc<RateLimiter>,
|
||||
pub executor: Executor,
|
||||
pub clickhouse_client: Option<::clickhouse::Client>,
|
||||
pub kinesis_client: Option<::aws_sdk_kinesis::Client>,
|
||||
pub config: Config,
|
||||
}
|
||||
|
||||
@@ -332,6 +341,11 @@ impl AppState {
|
||||
.clickhouse_url
|
||||
.as_ref()
|
||||
.and_then(|_| build_clickhouse_client(&config).log_err()),
|
||||
kinesis_client: if config.kinesis_access_key.is_some() {
|
||||
build_kinesis_client(&config).await.log_err()
|
||||
} else {
|
||||
None
|
||||
},
|
||||
config,
|
||||
};
|
||||
Ok(Arc::new(this))
|
||||
@@ -381,6 +395,35 @@ async fn build_blob_store_client(config: &Config) -> anyhow::Result<aws_sdk_s3::
|
||||
Ok(aws_sdk_s3::Client::new(&s3_config))
|
||||
}
|
||||
|
||||
async fn build_kinesis_client(config: &Config) -> anyhow::Result<aws_sdk_kinesis::Client> {
|
||||
let keys = aws_sdk_s3::config::Credentials::new(
|
||||
config
|
||||
.kinesis_access_key
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("missing kinesis_access_key"))?,
|
||||
config
|
||||
.kinesis_secret_key
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("missing kinesis_secret_key"))?,
|
||||
None,
|
||||
None,
|
||||
"env",
|
||||
);
|
||||
|
||||
let kinesis_config = aws_config::defaults(BehaviorVersion::latest())
|
||||
.region(Region::new(
|
||||
config
|
||||
.kinesis_region
|
||||
.clone()
|
||||
.ok_or_else(|| anyhow!("missing blob_store_region"))?,
|
||||
))
|
||||
.credentials_provider(keys)
|
||||
.load()
|
||||
.await;
|
||||
|
||||
Ok(aws_sdk_kinesis::Client::new(&kinesis_config))
|
||||
}
|
||||
|
||||
fn build_clickhouse_client(config: &Config) -> anyhow::Result<::clickhouse::Client> {
|
||||
Ok(::clickhouse::Client::default()
|
||||
.with_url(
|
||||
|
||||
@@ -267,7 +267,6 @@ async fn perform_completion(
|
||||
anthropic::ANTHROPIC_API_URL,
|
||||
api_key,
|
||||
request,
|
||||
None,
|
||||
)
|
||||
.await
|
||||
.map_err(|err| match err {
|
||||
@@ -357,7 +356,6 @@ async fn perform_completion(
|
||||
open_ai::OPEN_AI_API_URL,
|
||||
api_key,
|
||||
serde_json::from_str(params.provider_request.get())?,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
@@ -390,7 +388,6 @@ async fn perform_completion(
|
||||
google_ai::API_URL,
|
||||
api_key,
|
||||
serde_json::from_str(params.provider_request.get())?,
|
||||
None,
|
||||
)
|
||||
.await?;
|
||||
|
||||
|
||||
@@ -3621,7 +3621,6 @@ async fn count_language_model_tokens(
|
||||
google_ai::API_URL,
|
||||
api_key,
|
||||
serde_json::from_str(&request.request)?,
|
||||
None,
|
||||
)
|
||||
.await?
|
||||
}
|
||||
@@ -4031,12 +4030,18 @@ async fn get_llm_api_token(
|
||||
Err(anyhow!("terms of service not accepted"))?
|
||||
}
|
||||
|
||||
let mut account_created_at = user.created_at;
|
||||
if let Some(github_created_at) = user.github_user_created_at {
|
||||
account_created_at = account_created_at.min(github_created_at);
|
||||
}
|
||||
if Utc::now().naive_utc() - account_created_at < MIN_ACCOUNT_AGE_FOR_LLM_USE {
|
||||
Err(anyhow!("account too young"))?
|
||||
let has_llm_subscription = session.has_llm_subscription(&db).await?;
|
||||
|
||||
let bypass_account_age_check =
|
||||
has_llm_subscription || flags.iter().any(|flag| flag == "bypass-account-age-check");
|
||||
if !bypass_account_age_check {
|
||||
let mut account_created_at = user.created_at;
|
||||
if let Some(github_created_at) = user.github_user_created_at {
|
||||
account_created_at = account_created_at.min(github_created_at);
|
||||
}
|
||||
if Utc::now().naive_utc() - account_created_at < MIN_ACCOUNT_AGE_FOR_LLM_USE {
|
||||
Err(anyhow!("account too young"))?
|
||||
}
|
||||
}
|
||||
|
||||
let billing_preferences = db.get_billing_preferences(user.id).await?;
|
||||
@@ -4046,7 +4051,7 @@ async fn get_llm_api_token(
|
||||
session.is_staff(),
|
||||
billing_preferences,
|
||||
has_llm_closed_beta_feature_flag,
|
||||
session.has_llm_subscription(&db).await?,
|
||||
has_llm_subscription,
|
||||
session.current_plan(&db).await?,
|
||||
&session.app_state.config,
|
||||
)?;
|
||||
|
||||
@@ -6486,6 +6486,8 @@ async fn test_context_collaboration_with_reconnect(
|
||||
assert_eq!(project.collaborators().len(), 1);
|
||||
});
|
||||
|
||||
cx_a.update(context_servers::init);
|
||||
cx_b.update(context_servers::init);
|
||||
let prompt_builder = Arc::new(PromptBuilder::new(None).unwrap());
|
||||
let context_store_a = cx_a
|
||||
.update(|cx| {
|
||||
|
||||
@@ -1323,11 +1323,8 @@ impl RandomizedTest for ProjectCollaborationTest {
|
||||
match (host_file, guest_file) {
|
||||
(Some(host_file), Some(guest_file)) => {
|
||||
assert_eq!(guest_file.path(), host_file.path());
|
||||
assert_eq!(guest_file.is_deleted(), host_file.is_deleted());
|
||||
assert_eq!(
|
||||
guest_file.mtime(),
|
||||
host_file.mtime(),
|
||||
"guest {} mtime does not match host {} for path {:?} in project {}",
|
||||
assert_eq!(guest_file.disk_state(), host_file.disk_state(),
|
||||
"guest {} disk_state does not match host {} for path {:?} in project {}",
|
||||
guest_user_id,
|
||||
host_user_id,
|
||||
guest_file.path(),
|
||||
|
||||
@@ -168,7 +168,7 @@ impl TestServer {
|
||||
client::init_settings(cx);
|
||||
});
|
||||
|
||||
let clock = Arc::new(FakeSystemClock::default());
|
||||
let clock = Arc::new(FakeSystemClock::new());
|
||||
let http = FakeHttpClient::with_404_response();
|
||||
let user_id = if let Ok(Some(user)) = self.app_state.db.get_user_by_github_login(name).await
|
||||
{
|
||||
@@ -512,6 +512,7 @@ impl TestServer {
|
||||
rate_limiter: Arc::new(RateLimiter::new(test_db.db().clone())),
|
||||
executor,
|
||||
clickhouse_client: None,
|
||||
kinesis_client: None,
|
||||
config: Config {
|
||||
http_port: 0,
|
||||
database_url: "".into(),
|
||||
@@ -550,6 +551,10 @@ impl TestServer {
|
||||
stripe_api_key: None,
|
||||
supermaven_admin_api_key: None,
|
||||
user_backfiller_github_access_token: None,
|
||||
kinesis_region: None,
|
||||
kinesis_stream: None,
|
||||
kinesis_access_key: None,
|
||||
kinesis_secret_key: None,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
@@ -23,7 +23,7 @@ use std::{sync::Arc, time::Duration};
|
||||
use time::{OffsetDateTime, UtcOffset};
|
||||
use ui::{
|
||||
prelude::*, Avatar, Button, ContextMenu, IconButton, IconName, KeyBinding, Label, PopoverMenu,
|
||||
TabBar, Tooltip,
|
||||
Tab, TabBar, Tooltip,
|
||||
};
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
use workspace::{
|
||||
@@ -939,7 +939,7 @@ impl Render for ChatPanel {
|
||||
TabBar::new("chat_header").child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.h(rems(ui::Tab::CONTAINER_HEIGHT_IN_REMS))
|
||||
.h(Tab::container_height(cx))
|
||||
.px_2()
|
||||
.child(Label::new(
|
||||
self.active_chat
|
||||
|
||||
@@ -2521,7 +2521,7 @@ impl CollabPanel {
|
||||
.flex()
|
||||
.w_full()
|
||||
.when(!channel.is_root_channel(), |el| {
|
||||
el.on_drag(channel.clone(), move |channel, cx| {
|
||||
el.on_drag(channel.clone(), move |channel, _, cx| {
|
||||
cx.new_view(|_| DraggedChannelView {
|
||||
channel: channel.clone(),
|
||||
width,
|
||||
|
||||
@@ -19,7 +19,9 @@ use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsStore};
|
||||
use std::{sync::Arc, time::Duration};
|
||||
use time::{OffsetDateTime, UtcOffset};
|
||||
use ui::{h_flex, prelude::*, v_flex, Avatar, Button, Icon, IconButton, IconName, Label, Tooltip};
|
||||
use ui::{
|
||||
h_flex, prelude::*, v_flex, Avatar, Button, Icon, IconButton, IconName, Label, Tab, Tooltip,
|
||||
};
|
||||
use util::{ResultExt, TryFutureExt};
|
||||
use workspace::notifications::NotificationId;
|
||||
use workspace::{
|
||||
@@ -588,7 +590,7 @@ impl Render for NotificationPanel {
|
||||
.px_2()
|
||||
.py_1()
|
||||
// Match the height of the tab bar so they line up.
|
||||
.h(rems(ui::Tab::CONTAINER_HEIGHT_IN_REMS))
|
||||
.h(Tab::container_height(cx))
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(Label::new("Notifications"))
|
||||
|
||||
@@ -39,11 +39,13 @@ impl CommandPaletteFilter {
|
||||
}
|
||||
|
||||
/// Updates the global [`CommandPaletteFilter`] using the given closure.
|
||||
pub fn update_global<F, R>(cx: &mut AppContext, update: F) -> R
|
||||
pub fn update_global<F>(cx: &mut AppContext, update: F)
|
||||
where
|
||||
F: FnOnce(&mut Self, &mut AppContext) -> R,
|
||||
F: FnOnce(&mut Self, &mut AppContext),
|
||||
{
|
||||
cx.update_global(|this: &mut GlobalCommandPaletteFilter, cx| update(&mut this.0, cx))
|
||||
if cx.has_global::<GlobalCommandPaletteFilter>() {
|
||||
cx.update_global(|this: &mut GlobalCommandPaletteFilter, cx| update(&mut this.0, cx))
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns whether the given [`Action`] is hidden by the filter.
|
||||
|
||||
@@ -13,7 +13,6 @@ path = "src/context_servers.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
async-trait.workspace = true
|
||||
collections.workspace = true
|
||||
command_palette_hooks.workspace = true
|
||||
futures.workspace = true
|
||||
|
||||
@@ -25,6 +25,13 @@ use util::TryFutureExt;
|
||||
const JSON_RPC_VERSION: &str = "2.0";
|
||||
const REQUEST_TIMEOUT: Duration = Duration::from_secs(60);
|
||||
|
||||
// Standard JSON-RPC error codes
|
||||
pub const PARSE_ERROR: i32 = -32700;
|
||||
pub const INVALID_REQUEST: i32 = -32600;
|
||||
pub const METHOD_NOT_FOUND: i32 = -32601;
|
||||
pub const INVALID_PARAMS: i32 = -32602;
|
||||
pub const INTERNAL_ERROR: i32 = -32603;
|
||||
|
||||
type ResponseHandler = Box<dyn Send + FnOnce(Result<String, Error>)>;
|
||||
type NotificationHandler = Box<dyn Send + FnMut(Value, AsyncAppContext)>;
|
||||
|
||||
|
||||
@@ -8,7 +8,6 @@ use command_palette_hooks::CommandPaletteFilter;
|
||||
use gpui::{actions, AppContext};
|
||||
use settings::Settings;
|
||||
|
||||
pub use crate::manager::ContextServer;
|
||||
use crate::manager::ContextServerSettings;
|
||||
pub use crate::registry::ContextServerFactoryRegistry;
|
||||
|
||||
|
||||
@@ -15,37 +15,55 @@
|
||||
//! and react to changes in settings.
|
||||
|
||||
use std::path::Path;
|
||||
use std::pin::Pin;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{bail, Result};
|
||||
use async_trait::async_trait;
|
||||
use collections::{HashMap, HashSet};
|
||||
use futures::{Future, FutureExt};
|
||||
use gpui::{AsyncAppContext, EventEmitter, ModelContext, Task};
|
||||
use collections::HashMap;
|
||||
use command_palette_hooks::CommandPaletteFilter;
|
||||
use gpui::{AsyncAppContext, EventEmitter, Model, ModelContext, Subscription, Task, WeakModel};
|
||||
use log;
|
||||
use parking_lot::RwLock;
|
||||
use project::Project;
|
||||
use schemars::gen::SchemaGenerator;
|
||||
use schemars::schema::{InstanceType, Schema, SchemaObject};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources};
|
||||
use settings::{Settings, SettingsSources, SettingsStore};
|
||||
use util::ResultExt as _;
|
||||
|
||||
use crate::{
|
||||
client::{self, Client},
|
||||
types,
|
||||
types, ContextServerFactoryRegistry, CONTEXT_SERVERS_NAMESPACE,
|
||||
};
|
||||
|
||||
#[derive(Deserialize, Serialize, Default, Clone, PartialEq, Eq, JsonSchema, Debug)]
|
||||
pub struct ContextServerSettings {
|
||||
/// Settings for context servers used in the Assistant.
|
||||
#[serde(default)]
|
||||
pub context_servers: HashMap<Arc<str>, ServerConfig>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Clone, PartialEq, Eq, JsonSchema, Debug, Default)]
|
||||
pub struct ServerConfig {
|
||||
/// The command to run this context server.
|
||||
///
|
||||
/// This will override the command set by an extension.
|
||||
pub command: Option<ServerCommand>,
|
||||
/// The settings for this context server.
|
||||
///
|
||||
/// Consult the documentation for the context server to see what settings
|
||||
/// are supported.
|
||||
#[schemars(schema_with = "server_config_settings_json_schema")]
|
||||
pub settings: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
fn server_config_settings_json_schema(_generator: &mut SchemaGenerator) -> Schema {
|
||||
Schema::Object(SchemaObject {
|
||||
instance_type: Some(InstanceType::Object.into()),
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Deserialize, Serialize, Clone, PartialEq, Eq, JsonSchema, Debug)]
|
||||
pub struct ServerCommand {
|
||||
pub path: String,
|
||||
@@ -66,25 +84,13 @@ impl Settings for ContextServerSettings {
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
pub trait ContextServer: Send + Sync + 'static {
|
||||
fn id(&self) -> Arc<str>;
|
||||
fn config(&self) -> Arc<ServerConfig>;
|
||||
fn client(&self) -> Option<Arc<crate::protocol::InitializedContextServerProtocol>>;
|
||||
fn start<'a>(
|
||||
self: Arc<Self>,
|
||||
cx: &'a AsyncAppContext,
|
||||
) -> Pin<Box<dyn 'a + Future<Output = Result<()>>>>;
|
||||
fn stop(&self) -> Result<()>;
|
||||
}
|
||||
|
||||
pub struct NativeContextServer {
|
||||
pub struct ContextServer {
|
||||
pub id: Arc<str>,
|
||||
pub config: Arc<ServerConfig>,
|
||||
pub client: RwLock<Option<Arc<crate::protocol::InitializedContextServerProtocol>>>,
|
||||
}
|
||||
|
||||
impl NativeContextServer {
|
||||
impl ContextServer {
|
||||
pub fn new(id: Arc<str>, config: Arc<ServerConfig>) -> Self {
|
||||
Self {
|
||||
id,
|
||||
@@ -92,61 +98,52 @@ impl NativeContextServer {
|
||||
client: RwLock::new(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
impl ContextServer for NativeContextServer {
|
||||
fn id(&self) -> Arc<str> {
|
||||
pub fn id(&self) -> Arc<str> {
|
||||
self.id.clone()
|
||||
}
|
||||
|
||||
fn config(&self) -> Arc<ServerConfig> {
|
||||
pub fn config(&self) -> Arc<ServerConfig> {
|
||||
self.config.clone()
|
||||
}
|
||||
|
||||
fn client(&self) -> Option<Arc<crate::protocol::InitializedContextServerProtocol>> {
|
||||
pub fn client(&self) -> Option<Arc<crate::protocol::InitializedContextServerProtocol>> {
|
||||
self.client.read().clone()
|
||||
}
|
||||
|
||||
fn start<'a>(
|
||||
self: Arc<Self>,
|
||||
cx: &'a AsyncAppContext,
|
||||
) -> Pin<Box<dyn 'a + Future<Output = Result<()>>>> {
|
||||
async move {
|
||||
log::info!("starting context server {}", self.id);
|
||||
let Some(command) = &self.config.command else {
|
||||
bail!("no command specified for server {}", self.id);
|
||||
};
|
||||
let client = Client::new(
|
||||
client::ContextServerId(self.id.clone()),
|
||||
client::ModelContextServerBinary {
|
||||
executable: Path::new(&command.path).to_path_buf(),
|
||||
args: command.args.clone(),
|
||||
env: command.env.clone(),
|
||||
},
|
||||
cx.clone(),
|
||||
)?;
|
||||
pub async fn start(self: Arc<Self>, cx: &AsyncAppContext) -> Result<()> {
|
||||
log::info!("starting context server {}", self.id);
|
||||
let Some(command) = &self.config.command else {
|
||||
bail!("no command specified for server {}", self.id);
|
||||
};
|
||||
let client = Client::new(
|
||||
client::ContextServerId(self.id.clone()),
|
||||
client::ModelContextServerBinary {
|
||||
executable: Path::new(&command.path).to_path_buf(),
|
||||
args: command.args.clone(),
|
||||
env: command.env.clone(),
|
||||
},
|
||||
cx.clone(),
|
||||
)?;
|
||||
|
||||
let protocol = crate::protocol::ModelContextProtocol::new(client);
|
||||
let client_info = types::Implementation {
|
||||
name: "Zed".to_string(),
|
||||
version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
};
|
||||
let initialized_protocol = protocol.initialize(client_info).await?;
|
||||
let protocol = crate::protocol::ModelContextProtocol::new(client);
|
||||
let client_info = types::Implementation {
|
||||
name: "Zed".to_string(),
|
||||
version: env!("CARGO_PKG_VERSION").to_string(),
|
||||
};
|
||||
let initialized_protocol = protocol.initialize(client_info).await?;
|
||||
|
||||
log::debug!(
|
||||
"context server {} initialized: {:?}",
|
||||
self.id,
|
||||
initialized_protocol.initialize,
|
||||
);
|
||||
log::debug!(
|
||||
"context server {} initialized: {:?}",
|
||||
self.id,
|
||||
initialized_protocol.initialize,
|
||||
);
|
||||
|
||||
*self.client.write() = Some(Arc::new(initialized_protocol));
|
||||
Ok(())
|
||||
}
|
||||
.boxed_local()
|
||||
*self.client.write() = Some(Arc::new(initialized_protocol));
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn stop(&self) -> Result<()> {
|
||||
pub fn stop(&self) -> Result<()> {
|
||||
let mut client = self.client.write();
|
||||
if let Some(protocol) = client.take() {
|
||||
drop(protocol);
|
||||
@@ -155,13 +152,13 @@ impl ContextServer for NativeContextServer {
|
||||
}
|
||||
}
|
||||
|
||||
/// A Context server manager manages the starting and stopping
|
||||
/// of all servers. To obtain a server to interact with, a crate
|
||||
/// must go through the `GlobalContextServerManager` which holds
|
||||
/// a model to the ContextServerManager.
|
||||
pub struct ContextServerManager {
|
||||
servers: HashMap<Arc<str>, Arc<dyn ContextServer>>,
|
||||
pending_servers: HashSet<Arc<str>>,
|
||||
servers: HashMap<Arc<str>, Arc<ContextServer>>,
|
||||
project: Model<Project>,
|
||||
registry: Model<ContextServerFactoryRegistry>,
|
||||
update_servers_task: Option<Task<Result<()>>>,
|
||||
needs_server_update: bool,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
|
||||
pub enum Event {
|
||||
@@ -171,74 +168,66 @@ pub enum Event {
|
||||
|
||||
impl EventEmitter<Event> for ContextServerManager {}
|
||||
|
||||
impl Default for ContextServerManager {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
impl ContextServerManager {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
pub fn new(
|
||||
registry: Model<ContextServerFactoryRegistry>,
|
||||
project: Model<Project>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Self {
|
||||
let mut this = Self {
|
||||
_subscriptions: vec![
|
||||
cx.observe(®istry, |this, _registry, cx| {
|
||||
this.available_context_servers_changed(cx);
|
||||
}),
|
||||
cx.observe_global::<SettingsStore>(|this, cx| {
|
||||
this.available_context_servers_changed(cx);
|
||||
}),
|
||||
],
|
||||
project,
|
||||
registry,
|
||||
needs_server_update: false,
|
||||
servers: HashMap::default(),
|
||||
pending_servers: HashSet::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_server(
|
||||
&mut self,
|
||||
server: Arc<dyn ContextServer>,
|
||||
cx: &ModelContext<Self>,
|
||||
) -> Task<anyhow::Result<()>> {
|
||||
let server_id = server.id();
|
||||
|
||||
if self.servers.contains_key(&server_id) || self.pending_servers.contains(&server_id) {
|
||||
return Task::ready(Ok(()));
|
||||
}
|
||||
|
||||
let task = {
|
||||
let server_id = server_id.clone();
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
server.clone().start(&cx).await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.servers.insert(server_id.clone(), server);
|
||||
this.pending_servers.remove(&server_id);
|
||||
cx.emit(Event::ServerStarted {
|
||||
server_id: server_id.clone(),
|
||||
});
|
||||
})?;
|
||||
Ok(())
|
||||
})
|
||||
update_servers_task: None,
|
||||
};
|
||||
|
||||
self.pending_servers.insert(server_id);
|
||||
task
|
||||
this.available_context_servers_changed(cx);
|
||||
this
|
||||
}
|
||||
|
||||
pub fn get_server(&self, id: &str) -> Option<Arc<dyn ContextServer>> {
|
||||
self.servers.get(id).cloned()
|
||||
fn available_context_servers_changed(&mut self, cx: &mut ModelContext<Self>) {
|
||||
if self.update_servers_task.is_some() {
|
||||
self.needs_server_update = true;
|
||||
} else {
|
||||
self.update_servers_task = Some(cx.spawn(|this, mut cx| async move {
|
||||
this.update(&mut cx, |this, _| {
|
||||
this.needs_server_update = false;
|
||||
})?;
|
||||
|
||||
Self::maintain_servers(this.clone(), cx.clone()).await?;
|
||||
|
||||
this.update(&mut cx, |this, cx| {
|
||||
let has_any_context_servers = !this.servers().is_empty();
|
||||
if has_any_context_servers {
|
||||
CommandPaletteFilter::update_global(cx, |filter, _cx| {
|
||||
filter.show_namespace(CONTEXT_SERVERS_NAMESPACE);
|
||||
});
|
||||
}
|
||||
|
||||
this.update_servers_task.take();
|
||||
if this.needs_server_update {
|
||||
this.available_context_servers_changed(cx);
|
||||
}
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn remove_server(
|
||||
&mut self,
|
||||
id: &Arc<str>,
|
||||
cx: &ModelContext<Self>,
|
||||
) -> Task<anyhow::Result<()>> {
|
||||
let id = id.clone();
|
||||
cx.spawn(|this, mut cx| async move {
|
||||
if let Some(server) =
|
||||
this.update(&mut cx, |this, _cx| this.servers.remove(id.as_ref()))?
|
||||
{
|
||||
server.stop()?;
|
||||
}
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.pending_servers.remove(id.as_ref());
|
||||
cx.emit(Event::ServerStopped {
|
||||
server_id: id.clone(),
|
||||
})
|
||||
})?;
|
||||
Ok(())
|
||||
})
|
||||
pub fn get_server(&self, id: &str) -> Option<Arc<ContextServer>> {
|
||||
self.servers
|
||||
.get(id)
|
||||
.filter(|server| server.client().is_some())
|
||||
.cloned()
|
||||
}
|
||||
|
||||
pub fn restart_server(
|
||||
@@ -251,7 +240,7 @@ impl ContextServerManager {
|
||||
if let Some(server) = this.update(&mut cx, |this, _cx| this.servers.remove(&id))? {
|
||||
server.stop()?;
|
||||
let config = server.config();
|
||||
let new_server = Arc::new(NativeContextServer::new(id.clone(), config));
|
||||
let new_server = Arc::new(ContextServer::new(id.clone(), config));
|
||||
new_server.clone().start(&cx).await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.servers.insert(id.clone(), new_server);
|
||||
@@ -267,45 +256,83 @@ impl ContextServerManager {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn servers(&self) -> Vec<Arc<dyn ContextServer>> {
|
||||
self.servers.values().cloned().collect()
|
||||
pub fn servers(&self) -> Vec<Arc<ContextServer>> {
|
||||
self.servers
|
||||
.values()
|
||||
.filter(|server| server.client().is_some())
|
||||
.cloned()
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn maintain_servers(&mut self, settings: &ContextServerSettings, cx: &ModelContext<Self>) {
|
||||
let current_servers = self
|
||||
.servers()
|
||||
.into_iter()
|
||||
.map(|server| (server.id(), server.config()))
|
||||
.collect::<HashMap<_, _>>();
|
||||
async fn maintain_servers(this: WeakModel<Self>, mut cx: AsyncAppContext) -> Result<()> {
|
||||
let mut desired_servers = HashMap::default();
|
||||
|
||||
let new_servers = settings
|
||||
.context_servers
|
||||
.iter()
|
||||
.map(|(id, config)| (id.clone(), config.clone()))
|
||||
.collect::<HashMap<_, _>>();
|
||||
let (registry, project) = this.update(&mut cx, |this, cx| {
|
||||
let location = this.project.read(cx).worktrees(cx).next().map(|worktree| {
|
||||
settings::SettingsLocation {
|
||||
worktree_id: worktree.read(cx).id(),
|
||||
path: Path::new(""),
|
||||
}
|
||||
});
|
||||
let settings = ContextServerSettings::get(location, cx);
|
||||
desired_servers = settings.context_servers.clone();
|
||||
|
||||
let servers_to_add = new_servers
|
||||
.iter()
|
||||
.filter(|(id, _)| !current_servers.contains_key(id.as_ref()))
|
||||
.map(|(id, config)| (id.clone(), config.clone()))
|
||||
.collect::<Vec<_>>();
|
||||
(this.registry.clone(), this.project.clone())
|
||||
})?;
|
||||
|
||||
let servers_to_remove = current_servers
|
||||
.keys()
|
||||
.filter(|id| !new_servers.contains_key(id.as_ref()))
|
||||
.cloned()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
log::trace!("servers_to_add={:?}", servers_to_add);
|
||||
for (id, config) in servers_to_add {
|
||||
if config.command.is_some() {
|
||||
let server = Arc::new(NativeContextServer::new(id, Arc::new(config)));
|
||||
self.add_server(server, cx).detach_and_log_err(cx);
|
||||
for (id, factory) in
|
||||
registry.read_with(&cx, |registry, _| registry.context_server_factories())?
|
||||
{
|
||||
let config = desired_servers.entry(id).or_default();
|
||||
if config.command.is_none() {
|
||||
if let Some(extension_command) = factory(project.clone(), &cx).await.log_err() {
|
||||
config.command = Some(extension_command);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for id in servers_to_remove {
|
||||
self.remove_server(&id, cx).detach_and_log_err(cx);
|
||||
let mut servers_to_start = HashMap::default();
|
||||
let mut servers_to_stop = HashMap::default();
|
||||
|
||||
this.update(&mut cx, |this, _cx| {
|
||||
this.servers.retain(|id, server| {
|
||||
if desired_servers.contains_key(id) {
|
||||
true
|
||||
} else {
|
||||
servers_to_stop.insert(id.clone(), server.clone());
|
||||
false
|
||||
}
|
||||
});
|
||||
|
||||
for (id, config) in desired_servers {
|
||||
let existing_config = this.servers.get(&id).map(|server| server.config());
|
||||
if existing_config.as_deref() != Some(&config) {
|
||||
let config = Arc::new(config);
|
||||
let server = Arc::new(ContextServer::new(id.clone(), config));
|
||||
servers_to_start.insert(id.clone(), server.clone());
|
||||
let old_server = this.servers.insert(id.clone(), server);
|
||||
if let Some(old_server) = old_server {
|
||||
servers_to_stop.insert(id, old_server);
|
||||
}
|
||||
}
|
||||
}
|
||||
})?;
|
||||
|
||||
for (id, server) in servers_to_stop {
|
||||
server.stop().log_err();
|
||||
this.update(&mut cx, |_, cx| {
|
||||
cx.emit(Event::ServerStopped { server_id: id })
|
||||
})?;
|
||||
}
|
||||
|
||||
for (id, server) in servers_to_start {
|
||||
if server.start(&cx).await.log_err().is_some() {
|
||||
this.update(&mut cx, |_, cx| {
|
||||
cx.emit(Event::ServerStarted { server_id: id })
|
||||
})?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,8 +11,6 @@ use collections::HashMap;
|
||||
use crate::client::Client;
|
||||
use crate::types;
|
||||
|
||||
const PROTOCOL_VERSION: &str = "2024-10-07";
|
||||
|
||||
pub struct ModelContextProtocol {
|
||||
inner: Client,
|
||||
}
|
||||
@@ -23,10 +21,9 @@ impl ModelContextProtocol {
|
||||
}
|
||||
|
||||
fn supported_protocols() -> Vec<types::ProtocolVersion> {
|
||||
vec![
|
||||
types::ProtocolVersion::VersionString(PROTOCOL_VERSION.to_string()),
|
||||
types::ProtocolVersion::VersionNumber(1),
|
||||
]
|
||||
vec![types::ProtocolVersion(
|
||||
types::LATEST_PROTOCOL_VERSION.to_string(),
|
||||
)]
|
||||
}
|
||||
|
||||
pub async fn initialize(
|
||||
@@ -34,11 +31,13 @@ impl ModelContextProtocol {
|
||||
client_info: types::Implementation,
|
||||
) -> Result<InitializedContextServerProtocol> {
|
||||
let params = types::InitializeParams {
|
||||
protocol_version: types::ProtocolVersion::VersionString(PROTOCOL_VERSION.to_string()),
|
||||
protocol_version: types::ProtocolVersion(types::LATEST_PROTOCOL_VERSION.to_string()),
|
||||
capabilities: types::ClientCapabilities {
|
||||
experimental: None,
|
||||
sampling: None,
|
||||
roots: None,
|
||||
},
|
||||
meta: None,
|
||||
client_info,
|
||||
};
|
||||
|
||||
@@ -148,6 +147,7 @@ impl InitializedContextServerProtocol {
|
||||
let params = types::PromptsGetParams {
|
||||
name: prompt.as_ref().to_string(),
|
||||
arguments: Some(arguments),
|
||||
meta: None,
|
||||
};
|
||||
|
||||
let response: types::PromptsGetResponse = self
|
||||
@@ -170,6 +170,7 @@ impl InitializedContextServerProtocol {
|
||||
name: argument.into(),
|
||||
value: value.into(),
|
||||
},
|
||||
meta: None,
|
||||
};
|
||||
let result: types::CompletionCompleteResponse = self
|
||||
.inner
|
||||
@@ -210,6 +211,7 @@ impl InitializedContextServerProtocol {
|
||||
let params = types::CallToolParams {
|
||||
name: tool.as_ref().to_string(),
|
||||
arguments,
|
||||
meta: None,
|
||||
};
|
||||
|
||||
let response: types::CallToolResponse = self
|
||||
|
||||
@@ -2,75 +2,61 @@ use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use collections::HashMap;
|
||||
use gpui::{AppContext, AsyncAppContext, Global, Model, ReadGlobal, Task};
|
||||
use parking_lot::RwLock;
|
||||
use gpui::{AppContext, AsyncAppContext, Context, Global, Model, ReadGlobal, Task};
|
||||
use project::Project;
|
||||
|
||||
use crate::ContextServer;
|
||||
use crate::manager::ServerCommand;
|
||||
|
||||
pub type ContextServerFactory = Arc<
|
||||
dyn Fn(Model<Project>, &AsyncAppContext) -> Task<Result<Arc<dyn ContextServer>>>
|
||||
+ Send
|
||||
+ Sync
|
||||
+ 'static,
|
||||
dyn Fn(Model<Project>, &AsyncAppContext) -> Task<Result<ServerCommand>> + Send + Sync + 'static,
|
||||
>;
|
||||
|
||||
#[derive(Default)]
|
||||
struct GlobalContextServerFactoryRegistry(Arc<ContextServerFactoryRegistry>);
|
||||
struct GlobalContextServerFactoryRegistry(Model<ContextServerFactoryRegistry>);
|
||||
|
||||
impl Global for GlobalContextServerFactoryRegistry {}
|
||||
|
||||
#[derive(Default)]
|
||||
struct ContextServerFactoryRegistryState {
|
||||
context_servers: HashMap<Arc<str>, ContextServerFactory>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct ContextServerFactoryRegistry {
|
||||
state: RwLock<ContextServerFactoryRegistryState>,
|
||||
context_servers: HashMap<Arc<str>, ContextServerFactory>,
|
||||
}
|
||||
|
||||
impl ContextServerFactoryRegistry {
|
||||
/// Returns the global [`ContextServerFactoryRegistry`].
|
||||
pub fn global(cx: &AppContext) -> Arc<Self> {
|
||||
pub fn global(cx: &AppContext) -> Model<Self> {
|
||||
GlobalContextServerFactoryRegistry::global(cx).0.clone()
|
||||
}
|
||||
|
||||
/// Returns the global [`ContextServerFactoryRegistry`].
|
||||
///
|
||||
/// Inserts a default [`ContextServerFactoryRegistry`] if one does not yet exist.
|
||||
pub fn default_global(cx: &mut AppContext) -> Arc<Self> {
|
||||
cx.default_global::<GlobalContextServerFactoryRegistry>()
|
||||
.0
|
||||
.clone()
|
||||
pub fn default_global(cx: &mut AppContext) -> Model<Self> {
|
||||
if !cx.has_global::<GlobalContextServerFactoryRegistry>() {
|
||||
let registry = cx.new_model(|_| Self::new());
|
||||
cx.set_global(GlobalContextServerFactoryRegistry(registry));
|
||||
}
|
||||
cx.global::<GlobalContextServerFactoryRegistry>().0.clone()
|
||||
}
|
||||
|
||||
pub fn new() -> Arc<Self> {
|
||||
Arc::new(Self {
|
||||
state: RwLock::new(ContextServerFactoryRegistryState {
|
||||
context_servers: HashMap::default(),
|
||||
}),
|
||||
})
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
context_servers: HashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn context_server_factories(&self) -> Vec<(Arc<str>, ContextServerFactory)> {
|
||||
self.state
|
||||
.read()
|
||||
.context_servers
|
||||
self.context_servers
|
||||
.iter()
|
||||
.map(|(id, factory)| (id.clone(), factory.clone()))
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// Registers the provided [`ContextServerFactory`].
|
||||
pub fn register_server_factory(&self, id: Arc<str>, factory: ContextServerFactory) {
|
||||
let mut state = self.state.write();
|
||||
state.context_servers.insert(id, factory);
|
||||
pub fn register_server_factory(&mut self, id: Arc<str>, factory: ContextServerFactory) {
|
||||
self.context_servers.insert(id, factory);
|
||||
}
|
||||
|
||||
/// Unregisters the [`ContextServerFactory`] for the server with the given ID.
|
||||
pub fn unregister_server_factory_by_id(&self, server_id: &str) {
|
||||
let mut state = self.state.write();
|
||||
state.context_servers.remove(server_id);
|
||||
pub fn unregister_server_factory_by_id(&mut self, server_id: &str) {
|
||||
self.context_servers.remove(server_id);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,8 +2,8 @@ use collections::HashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub const LATEST_PROTOCOL_VERSION: &str = "2024-11-05";
|
||||
|
||||
pub enum RequestType {
|
||||
Initialize,
|
||||
CallTool,
|
||||
@@ -18,6 +18,7 @@ pub enum RequestType {
|
||||
Ping,
|
||||
ListTools,
|
||||
ListResourceTemplates,
|
||||
ListRoots,
|
||||
}
|
||||
|
||||
impl RequestType {
|
||||
@@ -36,16 +37,14 @@ impl RequestType {
|
||||
RequestType::Ping => "ping",
|
||||
RequestType::ListTools => "tools/list",
|
||||
RequestType::ListResourceTemplates => "resources/templates/list",
|
||||
RequestType::ListRoots => "roots/list",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Serialize, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum ProtocolVersion {
|
||||
VersionString(String),
|
||||
VersionNumber(u32),
|
||||
}
|
||||
#[serde(transparent)]
|
||||
pub struct ProtocolVersion(pub String);
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
@@ -53,6 +52,8 @@ pub struct InitializeParams {
|
||||
pub protocol_version: ProtocolVersion,
|
||||
pub capabilities: ClientCapabilities,
|
||||
pub client_info: Implementation,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
@@ -61,30 +62,40 @@ pub struct CallToolParams {
|
||||
pub name: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub arguments: Option<HashMap<String, serde_json::Value>>,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResourcesUnsubscribeParams {
|
||||
pub uri: Url,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResourcesSubscribeParams {
|
||||
pub uri: Url,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResourcesReadParams {
|
||||
pub uri: Url,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct LoggingSetLevelParams {
|
||||
pub level: LoggingLevel,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
@@ -93,6 +104,8 @@ pub struct PromptsGetParams {
|
||||
pub name: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub arguments: Option<HashMap<String, String>>,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
@@ -100,6 +113,8 @@ pub struct PromptsGetParams {
|
||||
pub struct CompletionCompleteParams {
|
||||
pub r#ref: CompletionReference,
|
||||
pub argument: CompletionArgument,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
@@ -145,12 +160,16 @@ pub struct InitializeResponse {
|
||||
pub protocol_version: ProtocolVersion,
|
||||
pub capabilities: ServerCapabilities,
|
||||
pub server_info: Implementation,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResourcesReadResponse {
|
||||
pub contents: Vec<ResourceContent>,
|
||||
pub contents: Vec<ResourceContents>,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
@@ -159,29 +178,39 @@ pub struct ResourcesListResponse {
|
||||
pub resources: Vec<Resource>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub next_cursor: Option<String>,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SamplingMessage {
|
||||
pub role: Role,
|
||||
pub content: MessageContent,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct SamplingMessage {
|
||||
pub role: SamplingRole,
|
||||
pub content: SamplingContent,
|
||||
pub struct PromptMessage {
|
||||
pub role: Role,
|
||||
pub content: MessageContent,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum SamplingRole {
|
||||
pub enum Role {
|
||||
User,
|
||||
Assistant,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum SamplingContent {
|
||||
pub enum MessageContent {
|
||||
#[serde(rename = "text")]
|
||||
Text { text: String },
|
||||
#[serde(rename = "image")]
|
||||
Image { data: String, mime_type: String },
|
||||
#[serde(rename = "resource")]
|
||||
Resource { resource: ResourceContents },
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
@@ -189,7 +218,9 @@ pub enum SamplingContent {
|
||||
pub struct PromptsGetResponse {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub description: Option<String>,
|
||||
pub messages: Vec<SamplingMessage>,
|
||||
pub messages: Vec<PromptMessage>,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
@@ -198,12 +229,16 @@ pub struct PromptsListResponse {
|
||||
pub prompts: Vec<Prompt>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub next_cursor: Option<String>,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CompletionCompleteResponse {
|
||||
pub completion: CompletionResult,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
@@ -214,6 +249,8 @@ pub struct CompletionResult {
|
||||
pub total: Option<u32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub has_more: Option<bool>,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize, Serialize)]
|
||||
@@ -243,6 +280,8 @@ pub struct ClientCapabilities {
|
||||
pub experimental: Option<HashMap<String, serde_json::Value>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub sampling: Option<serde_json::Value>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub roots: Option<RootsCapabilities>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
@@ -283,6 +322,13 @@ pub struct ToolsCapabilities {
|
||||
pub list_changed: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct RootsCapabilities {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub list_changed: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Tool {
|
||||
@@ -312,14 +358,28 @@ pub struct Resource {
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ResourceContent {
|
||||
pub struct ResourceContents {
|
||||
pub uri: Url,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub mime_type: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct TextResourceContents {
|
||||
pub uri: Url,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub text: Option<String>,
|
||||
pub mime_type: Option<String>,
|
||||
pub text: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct BlobResourceContents {
|
||||
pub uri: Url,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub blob: Option<String>,
|
||||
pub mime_type: Option<String>,
|
||||
pub blob: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
@@ -338,8 +398,32 @@ pub struct ResourceTemplate {
|
||||
pub enum LoggingLevel {
|
||||
Debug,
|
||||
Info,
|
||||
Notice,
|
||||
Warning,
|
||||
Error,
|
||||
Critical,
|
||||
Alert,
|
||||
Emergency,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ModelPreferences {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub hints: Option<Vec<ModelHint>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub cost_priority: Option<f64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub speed_priority: Option<f64>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub intelligence_priority: Option<f64>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ModelHint {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub name: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
@@ -352,6 +436,7 @@ pub enum NotificationType {
|
||||
ResourcesListChanged,
|
||||
ToolsListChanged,
|
||||
PromptsListChanged,
|
||||
RootsListChanged,
|
||||
}
|
||||
|
||||
impl NotificationType {
|
||||
@@ -364,6 +449,7 @@ impl NotificationType {
|
||||
NotificationType::ResourcesListChanged => "notifications/resources/list_changed",
|
||||
NotificationType::ToolsListChanged => "notifications/tools/list_changed",
|
||||
NotificationType::PromptsListChanged => "notifications/prompts/list_changed",
|
||||
NotificationType::RootsListChanged => "notifications/roots/list_changed",
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -373,6 +459,14 @@ impl NotificationType {
|
||||
pub enum ClientNotification {
|
||||
Initialized,
|
||||
Progress(ProgressParams),
|
||||
RootsListChanged,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
pub enum ProgressToken {
|
||||
String(String),
|
||||
Number(f64),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
@@ -382,10 +476,10 @@ pub struct ProgressParams {
|
||||
pub progress: f64,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub total: Option<f64>,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
pub type ProgressToken = String;
|
||||
|
||||
pub enum CompletionTotal {
|
||||
Exact(u32),
|
||||
HasMore,
|
||||
@@ -410,7 +504,22 @@ pub struct Completion {
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct CallToolResponse {
|
||||
pub tool_result: serde_json::Value,
|
||||
pub content: Vec<ToolResponseContent>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub is_error: Option<bool>,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(tag = "type")]
|
||||
pub enum ToolResponseContent {
|
||||
#[serde(rename = "text")]
|
||||
Text { text: String },
|
||||
#[serde(rename = "image")]
|
||||
Image { data: String, mime_type: String },
|
||||
#[serde(rename = "resource")]
|
||||
Resource { resource: ResourceContents },
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
@@ -419,4 +528,22 @@ pub struct ListToolsResponse {
|
||||
pub tools: Vec<Tool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub next_cursor: Option<String>,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct ListRootsResponse {
|
||||
pub roots: Vec<Root>,
|
||||
#[serde(rename = "_meta", skip_serializing_if = "Option::is_none")]
|
||||
pub meta: Option<HashMap<String, serde_json::Value>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct Root {
|
||||
pub uri: Url,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub name: Option<String>,
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ use gpui::{
|
||||
actions, AppContext, AsyncAppContext, Context, Entity, EntityId, EventEmitter, Global, Model,
|
||||
ModelContext, Task, WeakModel,
|
||||
};
|
||||
use http_client::github::latest_github_release;
|
||||
use http_client::github::get_release_by_tag_name;
|
||||
use http_client::HttpClient;
|
||||
use language::{
|
||||
language_settings::{all_language_settings, language_settings, InlineCompletionProvider},
|
||||
@@ -989,12 +989,12 @@ async fn clear_copilot_dir() {
|
||||
}
|
||||
|
||||
async fn get_copilot_lsp(http: Arc<dyn HttpClient>) -> anyhow::Result<PathBuf> {
|
||||
const SERVER_PATH: &str = "dist/agent.js";
|
||||
const SERVER_PATH: &str = "dist/language-server.js";
|
||||
|
||||
///Check for the latest copilot language server and download it if we haven't already
|
||||
async fn fetch_latest(http: Arc<dyn HttpClient>) -> anyhow::Result<PathBuf> {
|
||||
let release =
|
||||
latest_github_release("zed-industries/copilot", true, false, http.clone()).await?;
|
||||
get_release_by_tag_name("zed-industries/copilot", "v0.7.0", http.clone()).await?;
|
||||
|
||||
let version_dir = &paths::copilot_dir().join(format!("copilot-{}", release.tag_name));
|
||||
|
||||
@@ -1229,8 +1229,10 @@ mod tests {
|
||||
Some(self)
|
||||
}
|
||||
|
||||
fn mtime(&self) -> Option<std::time::SystemTime> {
|
||||
unimplemented!()
|
||||
fn disk_state(&self) -> language::DiskState {
|
||||
language::DiskState::Present {
|
||||
mtime: std::time::UNIX_EPOCH,
|
||||
}
|
||||
}
|
||||
|
||||
fn path(&self) -> &Arc<Path> {
|
||||
@@ -1245,10 +1247,6 @@ mod tests {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn is_deleted(&self) -> bool {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
fn as_any(&self) -> &dyn std::any::Any {
|
||||
unimplemented!()
|
||||
}
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::sync::OnceLock;
|
||||
use std::{sync::Arc, time::Duration};
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use chrono::DateTime;
|
||||
use fs::Fs;
|
||||
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, StreamExt};
|
||||
use gpui::{AppContext, AsyncAppContext, Global};
|
||||
use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest};
|
||||
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
|
||||
use paths::home_dir;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::watch_config_file;
|
||||
@@ -254,7 +254,6 @@ impl CopilotChat {
|
||||
|
||||
pub async fn stream_completion(
|
||||
request: Request,
|
||||
low_speed_timeout: Option<Duration>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
|
||||
let Some(this) = cx.update(|cx| Self::global(cx)).ok().flatten() else {
|
||||
@@ -274,8 +273,7 @@ impl CopilotChat {
|
||||
let token = match api_token {
|
||||
Some(api_token) if api_token.remaining_seconds() > 5 * 60 => api_token.clone(),
|
||||
_ => {
|
||||
let token =
|
||||
request_api_token(&oauth_token, client.clone(), low_speed_timeout).await?;
|
||||
let token = request_api_token(&oauth_token, client.clone()).await?;
|
||||
this.update(&mut cx, |this, cx| {
|
||||
this.api_token = Some(token.clone());
|
||||
cx.notify();
|
||||
@@ -284,25 +282,17 @@ impl CopilotChat {
|
||||
}
|
||||
};
|
||||
|
||||
stream_completion(client.clone(), token.api_key, request, low_speed_timeout).await
|
||||
stream_completion(client.clone(), token.api_key, request).await
|
||||
}
|
||||
}
|
||||
|
||||
async fn request_api_token(
|
||||
oauth_token: &str,
|
||||
client: Arc<dyn HttpClient>,
|
||||
low_speed_timeout: Option<Duration>,
|
||||
) -> Result<ApiToken> {
|
||||
let mut request_builder = HttpRequest::builder()
|
||||
async fn request_api_token(oauth_token: &str, client: Arc<dyn HttpClient>) -> Result<ApiToken> {
|
||||
let request_builder = HttpRequest::builder()
|
||||
.method(Method::GET)
|
||||
.uri(COPILOT_CHAT_AUTH_URL)
|
||||
.header("Authorization", format!("token {}", oauth_token))
|
||||
.header("Accept", "application/json");
|
||||
|
||||
if let Some(low_speed_timeout) = low_speed_timeout {
|
||||
request_builder = request_builder.read_timeout(low_speed_timeout);
|
||||
}
|
||||
|
||||
let request = request_builder.body(AsyncBody::empty())?;
|
||||
|
||||
let mut response = client.send(request).await?;
|
||||
@@ -340,9 +330,8 @@ async fn stream_completion(
|
||||
client: Arc<dyn HttpClient>,
|
||||
api_key: String,
|
||||
request: Request,
|
||||
low_speed_timeout: Option<Duration>,
|
||||
) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
|
||||
let mut request_builder = HttpRequest::builder()
|
||||
let request_builder = HttpRequest::builder()
|
||||
.method(Method::POST)
|
||||
.uri(COPILOT_CHAT_COMPLETION_URL)
|
||||
.header(
|
||||
@@ -356,9 +345,6 @@ async fn stream_completion(
|
||||
.header("Content-Type", "application/json")
|
||||
.header("Copilot-Integration-Id", "vscode-chat");
|
||||
|
||||
if let Some(low_speed_timeout) = low_speed_timeout {
|
||||
request_builder = request_builder.read_timeout(low_speed_timeout);
|
||||
}
|
||||
let is_streaming = request.stream;
|
||||
|
||||
let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?;
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
|
||||
@@ -32,6 +32,7 @@ use std::{
|
||||
cmp::Ordering,
|
||||
mem,
|
||||
ops::Range,
|
||||
sync::Arc,
|
||||
};
|
||||
use theme::ActiveTheme;
|
||||
pub use toolbar_controls::ToolbarControls;
|
||||
@@ -726,6 +727,10 @@ impl Item for ProjectDiagnosticsEditor {
|
||||
self.excerpts.read(cx).is_dirty(cx)
|
||||
}
|
||||
|
||||
fn has_deleted_file(&self, cx: &AppContext) -> bool {
|
||||
self.excerpts.read(cx).has_deleted_file(cx)
|
||||
}
|
||||
|
||||
fn has_conflict(&self, cx: &AppContext) -> bool {
|
||||
self.excerpts.read(cx).has_conflict(cx)
|
||||
}
|
||||
@@ -790,10 +795,11 @@ const DIAGNOSTIC_HEADER: &str = "diagnostic header";
|
||||
fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock {
|
||||
let (message, code_ranges) = highlight_diagnostic_message(&diagnostic, None);
|
||||
let message: SharedString = message;
|
||||
Box::new(move |cx| {
|
||||
Arc::new(move |cx| {
|
||||
let highlight_style: HighlightStyle = cx.theme().colors().text_accent.into();
|
||||
h_flex()
|
||||
.id(DIAGNOSTIC_HEADER)
|
||||
.block_mouse_down()
|
||||
.h(2. * cx.line_height())
|
||||
.pl_10()
|
||||
.pr_5()
|
||||
|
||||
@@ -297,6 +297,7 @@ gpui::actions!(
|
||||
OpenExcerptsSplit,
|
||||
OpenProposedChangesEditor,
|
||||
OpenFile,
|
||||
OpenDocs,
|
||||
OpenPermalinkToLine,
|
||||
OpenUrl,
|
||||
Outdent,
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Context as _;
|
||||
use gpui::{View, ViewContext, WindowContext};
|
||||
use language::Language;
|
||||
@@ -54,9 +52,9 @@ pub fn switch_source_header(
|
||||
cx.spawn(|_editor, mut cx| async move {
|
||||
let switch_source_header = switch_source_header_task
|
||||
.await
|
||||
.with_context(|| format!("Switch source/header LSP request for path \"{}\" failed", source_file))?;
|
||||
.with_context(|| format!("Switch source/header LSP request for path \"{source_file}\" failed"))?;
|
||||
if switch_source_header.0.is_empty() {
|
||||
log::info!("Clangd returned an empty string when requesting to switch source/header from \"{}\"", source_file);
|
||||
log::info!("Clangd returned an empty string when requesting to switch source/header from \"{source_file}\"" );
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
@@ -67,14 +65,17 @@ pub fn switch_source_header(
|
||||
)
|
||||
})?;
|
||||
|
||||
let path = goto.to_file_path().map_err(|()| {
|
||||
anyhow::anyhow!("URL conversion to file path failed for \"{goto}\"")
|
||||
})?;
|
||||
|
||||
workspace
|
||||
.update(&mut cx, |workspace, view_cx| {
|
||||
workspace.open_abs_path(PathBuf::from(goto.path()), false, view_cx)
|
||||
workspace.open_abs_path(path, false, view_cx)
|
||||
})
|
||||
.with_context(|| {
|
||||
format!(
|
||||
"Switch source/header could not open \"{}\" in workspace",
|
||||
goto.path()
|
||||
"Switch source/header could not open \"{goto}\" in workspace"
|
||||
)
|
||||
})?
|
||||
.await
|
||||
|
||||
@@ -5,6 +5,7 @@ use gpui::{Task, ViewContext};
|
||||
|
||||
use crate::Editor;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct DebouncedDelay {
|
||||
task: Option<Task<()>>,
|
||||
cancel_channel: Option<oneshot::Sender<()>>,
|
||||
|
||||
@@ -36,7 +36,7 @@ use block_map::{BlockRow, BlockSnapshot};
|
||||
use collections::{HashMap, HashSet};
|
||||
pub use crease_map::*;
|
||||
pub use fold_map::{Fold, FoldId, FoldPlaceholder, FoldPoint};
|
||||
use fold_map::{FoldMap, FoldMapWriter, FoldOffset, FoldSnapshot};
|
||||
use fold_map::{FoldMap, FoldSnapshot};
|
||||
use gpui::{
|
||||
AnyElement, Font, HighlightStyle, LineLayout, Model, ModelContext, Pixels, UnderlineStyle,
|
||||
};
|
||||
@@ -65,8 +65,8 @@ use std::{
|
||||
};
|
||||
use sum_tree::{Bias, TreeMap};
|
||||
use tab_map::{TabMap, TabSnapshot};
|
||||
use text::{Edit, LineIndent};
|
||||
use ui::{div, px, IntoElement, ParentElement, SharedString, Styled, WindowContext};
|
||||
use text::LineIndent;
|
||||
use ui::{px, SharedString, WindowContext};
|
||||
use unicode_segmentation::UnicodeSegmentation;
|
||||
use wrap_map::{WrapMap, WrapSnapshot};
|
||||
|
||||
@@ -197,22 +197,86 @@ impl DisplayMap {
|
||||
other
|
||||
.folds_in_range(0..other.buffer_snapshot.len())
|
||||
.map(|fold| {
|
||||
(
|
||||
Crease::simple(
|
||||
fold.range.to_offset(&other.buffer_snapshot),
|
||||
fold.placeholder.clone(),
|
||||
)
|
||||
}),
|
||||
})
|
||||
.collect(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
/// Creates folds for the given ranges.
|
||||
pub fn fold<T: ToOffset>(
|
||||
/// Creates folds for the given creases.
|
||||
pub fn fold<T: Clone + ToOffset>(
|
||||
&mut self,
|
||||
ranges: impl IntoIterator<Item = (Range<T>, FoldPlaceholder)>,
|
||||
creases: Vec<Crease<T>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
self.update_fold_map(cx, |fold_map| fold_map.fold(ranges))
|
||||
let buffer_snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
let edits = self.buffer_subscription.consume().into_inner();
|
||||
let tab_size = Self::tab_size(&self.buffer, cx);
|
||||
let (snapshot, edits) = self.inlay_map.sync(buffer_snapshot.clone(), edits);
|
||||
let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
self.block_map.read(snapshot, edits);
|
||||
|
||||
let inline = creases.iter().filter_map(|crease| {
|
||||
if let Crease::Inline {
|
||||
range, placeholder, ..
|
||||
} = crease
|
||||
{
|
||||
Some((range.clone(), placeholder.clone()))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
let (snapshot, edits) = fold_map.fold(inline);
|
||||
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
let mut block_map = self.block_map.write(snapshot, edits);
|
||||
let blocks = creases.into_iter().filter_map(|crease| {
|
||||
if let Crease::Block {
|
||||
range,
|
||||
block_height,
|
||||
render_block,
|
||||
block_style,
|
||||
block_priority,
|
||||
..
|
||||
} = crease
|
||||
{
|
||||
Some((
|
||||
range,
|
||||
render_block,
|
||||
block_height,
|
||||
block_style,
|
||||
block_priority,
|
||||
))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
});
|
||||
block_map.insert(
|
||||
blocks
|
||||
.into_iter()
|
||||
.map(|(range, render, height, style, priority)| {
|
||||
let start = buffer_snapshot.anchor_before(range.start);
|
||||
let end = buffer_snapshot.anchor_after(range.end);
|
||||
BlockProperties {
|
||||
placement: BlockPlacement::Replace(start..end),
|
||||
render,
|
||||
height,
|
||||
style,
|
||||
priority,
|
||||
}
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
/// Removes any folds with the given ranges.
|
||||
@@ -221,26 +285,6 @@ impl DisplayMap {
|
||||
ranges: impl IntoIterator<Item = Range<T>>,
|
||||
type_id: TypeId,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
self.update_fold_map(cx, |fold_map| fold_map.remove_folds(ranges, type_id))
|
||||
}
|
||||
|
||||
/// Removes any folds whose ranges intersect any of the given ranges.
|
||||
pub fn unfold_intersecting<T: ToOffset>(
|
||||
&mut self,
|
||||
ranges: impl IntoIterator<Item = Range<T>>,
|
||||
inclusive: bool,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
self.update_fold_map(cx, |fold_map| {
|
||||
fold_map.unfold_intersecting(ranges, inclusive)
|
||||
})
|
||||
}
|
||||
|
||||
fn update_fold_map(
|
||||
&mut self,
|
||||
cx: &mut ModelContext<Self>,
|
||||
callback: impl FnOnce(&mut FoldMapWriter) -> (FoldSnapshot, Vec<Edit<FoldOffset>>),
|
||||
) {
|
||||
let snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
let edits = self.buffer_subscription.consume().into_inner();
|
||||
@@ -252,17 +296,49 @@ impl DisplayMap {
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
self.block_map.read(snapshot, edits);
|
||||
let (snapshot, edits) = callback(&mut fold_map);
|
||||
let (snapshot, edits) = fold_map.remove_folds(ranges, type_id);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
self.block_map.write(snapshot, edits);
|
||||
}
|
||||
|
||||
/// Removes any folds whose ranges intersect any of the given ranges.
|
||||
pub fn unfold_intersecting<T: ToOffset>(
|
||||
&mut self,
|
||||
ranges: impl IntoIterator<Item = Range<T>>,
|
||||
inclusive: bool,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) {
|
||||
let snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
let offset_ranges = ranges
|
||||
.into_iter()
|
||||
.map(|range| range.start.to_offset(&snapshot)..range.end.to_offset(&snapshot))
|
||||
.collect::<Vec<_>>();
|
||||
let edits = self.buffer_subscription.consume().into_inner();
|
||||
let tab_size = Self::tab_size(&self.buffer, cx);
|
||||
let (snapshot, edits) = self.inlay_map.sync(snapshot, edits);
|
||||
let (mut fold_map, snapshot, edits) = self.fold_map.write(snapshot, edits);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
self.block_map.read(snapshot, edits);
|
||||
|
||||
let (snapshot, edits) =
|
||||
fold_map.unfold_intersecting(offset_ranges.iter().cloned(), inclusive);
|
||||
let (snapshot, edits) = self.tab_map.sync(snapshot, edits, tab_size);
|
||||
let (snapshot, edits) = self
|
||||
.wrap_map
|
||||
.update(cx, |map, cx| map.sync(snapshot, edits, cx));
|
||||
let mut block_map = self.block_map.write(snapshot, edits);
|
||||
block_map.remove_intersecting_replace_blocks(offset_ranges, inclusive);
|
||||
}
|
||||
|
||||
pub fn insert_creases(
|
||||
&mut self,
|
||||
creases: impl IntoIterator<Item = Crease>,
|
||||
creases: impl IntoIterator<Item = Crease<Anchor>>,
|
||||
cx: &mut ModelContext<Self>,
|
||||
) -> Vec<CreaseId> {
|
||||
let snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
@@ -465,11 +541,17 @@ pub struct HighlightStyles {
|
||||
pub suggestion: Option<HighlightStyle>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum ChunkReplacement {
|
||||
Renderer(ChunkRenderer),
|
||||
Str(SharedString),
|
||||
}
|
||||
|
||||
pub struct HighlightedChunk<'a> {
|
||||
pub text: &'a str,
|
||||
pub style: Option<HighlightStyle>,
|
||||
pub is_tab: bool,
|
||||
pub renderer: Option<ChunkRenderer>,
|
||||
pub replacement: Option<ChunkReplacement>,
|
||||
}
|
||||
|
||||
impl<'a> HighlightedChunk<'a> {
|
||||
@@ -481,7 +563,7 @@ impl<'a> HighlightedChunk<'a> {
|
||||
let mut text = self.text;
|
||||
let style = self.style;
|
||||
let is_tab = self.is_tab;
|
||||
let renderer = self.renderer;
|
||||
let renderer = self.replacement;
|
||||
iter::from_fn(move || {
|
||||
let mut prefix_len = 0;
|
||||
while let Some(&ch) = chars.peek() {
|
||||
@@ -497,30 +579,33 @@ impl<'a> HighlightedChunk<'a> {
|
||||
text: prefix,
|
||||
style,
|
||||
is_tab,
|
||||
renderer: renderer.clone(),
|
||||
replacement: renderer.clone(),
|
||||
});
|
||||
}
|
||||
chars.next();
|
||||
let (prefix, suffix) = text.split_at(ch.len_utf8());
|
||||
text = suffix;
|
||||
if let Some(replacement) = replacement(ch) {
|
||||
let background = editor_style.status.hint_background;
|
||||
let underline = editor_style.status.hint;
|
||||
let invisible_highlight = HighlightStyle {
|
||||
background_color: Some(editor_style.status.hint_background),
|
||||
underline: Some(UnderlineStyle {
|
||||
color: Some(editor_style.status.hint),
|
||||
thickness: px(1.),
|
||||
wavy: false,
|
||||
}),
|
||||
..Default::default()
|
||||
};
|
||||
let invisible_style = if let Some(mut style) = style {
|
||||
style.highlight(invisible_highlight);
|
||||
style
|
||||
} else {
|
||||
invisible_highlight
|
||||
};
|
||||
return Some(HighlightedChunk {
|
||||
text: prefix,
|
||||
style: None,
|
||||
style: Some(invisible_style),
|
||||
is_tab: false,
|
||||
renderer: Some(ChunkRenderer {
|
||||
render: Arc::new(move |_| {
|
||||
div()
|
||||
.child(replacement)
|
||||
.bg(background)
|
||||
.text_decoration_1()
|
||||
.text_decoration_color(underline)
|
||||
.into_any_element()
|
||||
}),
|
||||
constrain_width: false,
|
||||
}),
|
||||
replacement: Some(ChunkReplacement::Str(replacement.into())),
|
||||
});
|
||||
} else {
|
||||
let invisible_highlight = HighlightStyle {
|
||||
@@ -543,7 +628,7 @@ impl<'a> HighlightedChunk<'a> {
|
||||
text: prefix,
|
||||
style: Some(invisible_style),
|
||||
is_tab: false,
|
||||
renderer: renderer.clone(),
|
||||
replacement: renderer.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -555,7 +640,7 @@ impl<'a> HighlightedChunk<'a> {
|
||||
text: remainder,
|
||||
style,
|
||||
is_tab,
|
||||
renderer: renderer.clone(),
|
||||
replacement: renderer.clone(),
|
||||
})
|
||||
} else {
|
||||
None
|
||||
@@ -596,7 +681,7 @@ impl DisplaySnapshot {
|
||||
) -> impl Iterator<Item = Option<MultiBufferRow>> + '_ {
|
||||
self.block_snapshot
|
||||
.buffer_rows(BlockRow(start_row.0))
|
||||
.map(|row| row.map(|row| MultiBufferRow(row.0)))
|
||||
.map(|row| row.map(MultiBufferRow))
|
||||
}
|
||||
|
||||
pub fn max_buffer_row(&self) -> MultiBufferRow {
|
||||
@@ -819,7 +904,7 @@ impl DisplaySnapshot {
|
||||
text: chunk.text,
|
||||
style: highlight_style,
|
||||
is_tab: chunk.is_tab,
|
||||
renderer: chunk.renderer,
|
||||
replacement: chunk.renderer.map(ChunkReplacement::Renderer),
|
||||
}
|
||||
.highlight_invisibles(editor_style)
|
||||
})
|
||||
@@ -987,7 +1072,12 @@ impl DisplaySnapshot {
|
||||
}
|
||||
|
||||
pub fn is_line_folded(&self, buffer_row: MultiBufferRow) -> bool {
|
||||
self.fold_snapshot.is_line_folded(buffer_row)
|
||||
self.block_snapshot.is_line_replaced(buffer_row)
|
||||
|| self.fold_snapshot.is_line_folded(buffer_row)
|
||||
}
|
||||
|
||||
pub fn is_line_replaced(&self, buffer_row: MultiBufferRow) -> bool {
|
||||
self.block_snapshot.is_line_replaced(buffer_row)
|
||||
}
|
||||
|
||||
pub fn is_block_line(&self, display_row: DisplayRow) -> bool {
|
||||
@@ -1061,19 +1151,42 @@ impl DisplaySnapshot {
|
||||
.unwrap_or(false)
|
||||
}
|
||||
|
||||
pub fn foldable_range(
|
||||
&self,
|
||||
buffer_row: MultiBufferRow,
|
||||
) -> Option<(Range<Point>, FoldPlaceholder)> {
|
||||
pub fn crease_for_buffer_row(&self, buffer_row: MultiBufferRow) -> Option<Crease<Point>> {
|
||||
let start = MultiBufferPoint::new(buffer_row.0, self.buffer_snapshot.line_len(buffer_row));
|
||||
if let Some(crease) = self
|
||||
.crease_snapshot
|
||||
.query_row(buffer_row, &self.buffer_snapshot)
|
||||
{
|
||||
Some((
|
||||
crease.range.to_point(&self.buffer_snapshot),
|
||||
crease.placeholder.clone(),
|
||||
))
|
||||
match crease {
|
||||
Crease::Inline {
|
||||
range,
|
||||
placeholder,
|
||||
render_toggle,
|
||||
render_trailer,
|
||||
metadata,
|
||||
} => Some(Crease::Inline {
|
||||
range: range.to_point(&self.buffer_snapshot),
|
||||
placeholder: placeholder.clone(),
|
||||
render_toggle: render_toggle.clone(),
|
||||
render_trailer: render_trailer.clone(),
|
||||
metadata: metadata.clone(),
|
||||
}),
|
||||
Crease::Block {
|
||||
range,
|
||||
block_height,
|
||||
block_style,
|
||||
render_block,
|
||||
block_priority,
|
||||
render_toggle,
|
||||
} => Some(Crease::Block {
|
||||
range: range.to_point(&self.buffer_snapshot),
|
||||
block_height: *block_height,
|
||||
block_style: *block_style,
|
||||
render_block: render_block.clone(),
|
||||
block_priority: *block_priority,
|
||||
render_toggle: render_toggle.clone(),
|
||||
}),
|
||||
}
|
||||
} else if self.starts_indent(MultiBufferRow(start.row))
|
||||
&& !self.is_line_folded(MultiBufferRow(start.row))
|
||||
{
|
||||
@@ -1110,7 +1223,13 @@ impl DisplaySnapshot {
|
||||
.line_len(MultiBufferRow(row_before_line_breaks.row)),
|
||||
);
|
||||
|
||||
Some((start..row_before_line_breaks, self.fold_placeholder.clone()))
|
||||
Some(Crease::Inline {
|
||||
range: start..row_before_line_breaks,
|
||||
placeholder: self.fold_placeholder.clone(),
|
||||
render_toggle: None,
|
||||
render_trailer: None,
|
||||
metadata: None,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@@ -1418,7 +1537,7 @@ pub mod tests {
|
||||
placement,
|
||||
style: BlockStyle::Fixed,
|
||||
height,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority,
|
||||
}
|
||||
})
|
||||
@@ -1457,7 +1576,8 @@ pub mod tests {
|
||||
map.fold(
|
||||
ranges
|
||||
.into_iter()
|
||||
.map(|range| (range, FoldPlaceholder::test())),
|
||||
.map(|range| Crease::simple(range, FoldPlaceholder::test()))
|
||||
.collect(),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
@@ -1832,7 +1952,7 @@ pub mod tests {
|
||||
|
||||
map.update(cx, |map, cx| {
|
||||
map.fold(
|
||||
vec![(
|
||||
vec![Crease::simple(
|
||||
MultiBufferPoint::new(0, 6)..MultiBufferPoint::new(3, 2),
|
||||
FoldPlaceholder::test(),
|
||||
)],
|
||||
@@ -1922,7 +2042,7 @@ pub mod tests {
|
||||
),
|
||||
height: 1,
|
||||
style: BlockStyle::Sticky,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
}],
|
||||
cx,
|
||||
@@ -2028,7 +2148,7 @@ pub mod tests {
|
||||
),
|
||||
height: 1,
|
||||
style: BlockStyle::Sticky,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
}],
|
||||
cx,
|
||||
@@ -2104,7 +2224,7 @@ pub mod tests {
|
||||
),
|
||||
height: 4,
|
||||
style: BlockStyle::Fixed,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
}],
|
||||
cx,
|
||||
@@ -2253,7 +2373,7 @@ pub mod tests {
|
||||
|
||||
map.update(cx, |map, cx| {
|
||||
map.fold(
|
||||
vec![(
|
||||
vec![Crease::simple(
|
||||
MultiBufferPoint::new(0, 6)..MultiBufferPoint::new(3, 2),
|
||||
FoldPlaceholder::test(),
|
||||
)],
|
||||
@@ -2452,7 +2572,7 @@ pub mod tests {
|
||||
snapshot.anchor_before(Point::new(2, 0))..snapshot.anchor_after(Point::new(3, 3));
|
||||
|
||||
map.crease_map.insert(
|
||||
[Crease::new(
|
||||
[Crease::inline(
|
||||
range,
|
||||
FoldPlaceholder::test(),
|
||||
|_row, _status, _toggle, _cx| div(),
|
||||
|
||||
@@ -7,7 +7,7 @@ use collections::{Bound, HashMap, HashSet};
|
||||
use gpui::{AnyElement, EntityId, Pixels, WindowContext};
|
||||
use language::{Chunk, Patch, Point};
|
||||
use multi_buffer::{
|
||||
Anchor, ExcerptId, ExcerptInfo, MultiBufferRow, MultiBufferSnapshot, ToPoint as _,
|
||||
Anchor, ExcerptId, ExcerptInfo, MultiBufferRow, MultiBufferSnapshot, ToOffset, ToPoint as _,
|
||||
};
|
||||
use parking_lot::Mutex;
|
||||
use std::{
|
||||
@@ -77,7 +77,7 @@ pub struct BlockRow(pub(super) u32);
|
||||
#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)]
|
||||
struct WrapRow(u32);
|
||||
|
||||
pub type RenderBlock = Box<dyn Send + FnMut(&mut BlockContext) -> AnyElement>;
|
||||
pub type RenderBlock = Arc<dyn Send + Sync + Fn(&mut BlockContext) -> AnyElement>;
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq)]
|
||||
pub enum BlockPlacement<T> {
|
||||
@@ -352,6 +352,13 @@ impl Block {
|
||||
Block::ExcerptBoundary { next_excerpt, .. } => next_excerpt.is_none(),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_replacement(&self) -> bool {
|
||||
match self {
|
||||
Block::Custom(block) => matches!(block.placement, BlockPlacement::Replace(_)),
|
||||
Block::ExcerptBoundary { .. } => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Debug for Block {
|
||||
@@ -1119,6 +1126,64 @@ impl<'a> BlockMapWriter<'a> {
|
||||
.retain(|id, _| !block_ids.contains(id));
|
||||
self.0.sync(wrap_snapshot, edits);
|
||||
}
|
||||
|
||||
pub fn remove_intersecting_replace_blocks<T>(
|
||||
&mut self,
|
||||
ranges: impl IntoIterator<Item = Range<T>>,
|
||||
inclusive: bool,
|
||||
) where
|
||||
T: ToOffset,
|
||||
{
|
||||
let wrap_snapshot = self.0.wrap_snapshot.borrow();
|
||||
let mut blocks_to_remove = HashSet::default();
|
||||
for range in ranges {
|
||||
let range = range.start.to_offset(wrap_snapshot.buffer_snapshot())
|
||||
..range.end.to_offset(wrap_snapshot.buffer_snapshot());
|
||||
for block in self.blocks_intersecting_buffer_range(range, inclusive) {
|
||||
if matches!(block.placement, BlockPlacement::Replace(_)) {
|
||||
blocks_to_remove.insert(block.id);
|
||||
}
|
||||
}
|
||||
}
|
||||
drop(wrap_snapshot);
|
||||
self.remove(blocks_to_remove);
|
||||
}
|
||||
|
||||
fn blocks_intersecting_buffer_range(
|
||||
&self,
|
||||
range: Range<usize>,
|
||||
inclusive: bool,
|
||||
) -> &[Arc<CustomBlock>] {
|
||||
let wrap_snapshot = self.0.wrap_snapshot.borrow();
|
||||
let buffer = wrap_snapshot.buffer_snapshot();
|
||||
let start_block_ix = match self.0.custom_blocks.binary_search_by(|probe| {
|
||||
probe
|
||||
.end()
|
||||
.to_offset(buffer)
|
||||
.cmp(&range.start)
|
||||
.then(if inclusive {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
Ordering::Less
|
||||
})
|
||||
}) {
|
||||
Ok(ix) | Err(ix) => ix,
|
||||
};
|
||||
let end_block_ix = match self.0.custom_blocks.binary_search_by(|probe| {
|
||||
probe
|
||||
.start()
|
||||
.to_offset(buffer)
|
||||
.cmp(&range.end)
|
||||
.then(if inclusive {
|
||||
Ordering::Less
|
||||
} else {
|
||||
Ordering::Greater
|
||||
})
|
||||
}) {
|
||||
Ok(ix) | Err(ix) => ix,
|
||||
};
|
||||
&self.0.custom_blocks[start_block_ix..end_block_ix]
|
||||
}
|
||||
}
|
||||
|
||||
impl BlockSnapshot {
|
||||
@@ -1298,6 +1363,21 @@ impl BlockSnapshot {
|
||||
cursor.item().map_or(false, |t| t.block.is_some())
|
||||
}
|
||||
|
||||
pub(super) fn is_line_replaced(&self, row: MultiBufferRow) -> bool {
|
||||
let wrap_point = self
|
||||
.wrap_snapshot
|
||||
.make_wrap_point(Point::new(row.0, 0), Bias::Left);
|
||||
let mut cursor = self.transforms.cursor::<(WrapRow, BlockRow)>(&());
|
||||
cursor.seek(&WrapRow(wrap_point.row()), Bias::Right, &());
|
||||
cursor.item().map_or(false, |transform| {
|
||||
if let Some(Block::Custom(block)) = transform.block.as_ref() {
|
||||
matches!(block.placement, BlockPlacement::Replace(_))
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn clip_point(&self, point: BlockPoint, bias: Bias) -> BlockPoint {
|
||||
let mut cursor = self.transforms.cursor::<(BlockRow, WrapRow)>(&());
|
||||
cursor.seek(&BlockRow(point.row), Bias::Right, &());
|
||||
@@ -1515,7 +1595,7 @@ impl<'a> Iterator for BlockChunks<'a> {
|
||||
}
|
||||
|
||||
impl<'a> Iterator for BlockBufferRows<'a> {
|
||||
type Item = Option<BlockRow>;
|
||||
type Item = Option<u32>;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.started {
|
||||
@@ -1538,16 +1618,25 @@ impl<'a> Iterator for BlockBufferRows<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
if self.transforms.item()?.block.is_none() {
|
||||
let transform = self.transforms.item()?;
|
||||
if transform
|
||||
.block
|
||||
.as_ref()
|
||||
.map_or(true, |block| block.is_replacement())
|
||||
{
|
||||
self.input_buffer_rows.seek(self.transforms.start().1 .0);
|
||||
}
|
||||
}
|
||||
|
||||
let transform = self.transforms.item()?;
|
||||
if transform.block.is_some() {
|
||||
Some(None)
|
||||
if let Some(block) = transform.block.as_ref() {
|
||||
if block.is_replacement() && self.transforms.start().0 == self.output_row {
|
||||
Some(self.input_buffer_rows.next().unwrap())
|
||||
} else {
|
||||
Some(None)
|
||||
}
|
||||
} else {
|
||||
Some(self.input_buffer_rows.next().unwrap().map(BlockRow))
|
||||
Some(self.input_buffer_rows.next().unwrap())
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1709,21 +1798,21 @@ mod tests {
|
||||
style: BlockStyle::Fixed,
|
||||
placement: BlockPlacement::Above(buffer_snapshot.anchor_after(Point::new(1, 0))),
|
||||
height: 1,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
},
|
||||
BlockProperties {
|
||||
style: BlockStyle::Fixed,
|
||||
placement: BlockPlacement::Above(buffer_snapshot.anchor_after(Point::new(1, 2))),
|
||||
height: 2,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
},
|
||||
BlockProperties {
|
||||
style: BlockStyle::Fixed,
|
||||
placement: BlockPlacement::Below(buffer_snapshot.anchor_after(Point::new(3, 3))),
|
||||
height: 3,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
},
|
||||
]);
|
||||
@@ -1821,10 +1910,7 @@ mod tests {
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
snapshot
|
||||
.buffer_rows(BlockRow(0))
|
||||
.map(|row| row.map(|r| r.0))
|
||||
.collect::<Vec<_>>(),
|
||||
snapshot.buffer_rows(BlockRow(0)).collect::<Vec<_>>(),
|
||||
&[
|
||||
Some(0),
|
||||
None,
|
||||
@@ -1960,21 +2046,21 @@ mod tests {
|
||||
style: BlockStyle::Fixed,
|
||||
placement: BlockPlacement::Above(buffer_snapshot.anchor_after(Point::new(1, 0))),
|
||||
height: 1,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
},
|
||||
BlockProperties {
|
||||
style: BlockStyle::Fixed,
|
||||
placement: BlockPlacement::Above(buffer_snapshot.anchor_after(Point::new(1, 2))),
|
||||
height: 2,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
},
|
||||
BlockProperties {
|
||||
style: BlockStyle::Fixed,
|
||||
placement: BlockPlacement::Below(buffer_snapshot.anchor_after(Point::new(3, 3))),
|
||||
height: 3,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
},
|
||||
]);
|
||||
@@ -2062,14 +2148,14 @@ mod tests {
|
||||
BlockProperties {
|
||||
style: BlockStyle::Fixed,
|
||||
placement: BlockPlacement::Above(buffer_snapshot.anchor_after(Point::new(1, 12))),
|
||||
render: Box::new(|_| div().into_any()),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
height: 1,
|
||||
priority: 0,
|
||||
},
|
||||
BlockProperties {
|
||||
style: BlockStyle::Fixed,
|
||||
placement: BlockPlacement::Below(buffer_snapshot.anchor_after(Point::new(1, 1))),
|
||||
render: Box::new(|_| div().into_any()),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
height: 1,
|
||||
priority: 0,
|
||||
},
|
||||
@@ -2109,7 +2195,7 @@ mod tests {
|
||||
..buffer_snapshot.anchor_before(Point::new(3, 1)),
|
||||
),
|
||||
height: 4,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
}]);
|
||||
|
||||
@@ -2162,14 +2248,14 @@ mod tests {
|
||||
style: BlockStyle::Fixed,
|
||||
placement: BlockPlacement::Above(buffer_snapshot.anchor_after(Point::new(1, 3))),
|
||||
height: 1,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
},
|
||||
BlockProperties {
|
||||
style: BlockStyle::Fixed,
|
||||
placement: BlockPlacement::Below(buffer_snapshot.anchor_after(Point::new(6, 2))),
|
||||
height: 1,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
},
|
||||
]);
|
||||
@@ -2183,21 +2269,21 @@ mod tests {
|
||||
style: BlockStyle::Fixed,
|
||||
placement: BlockPlacement::Below(buffer_snapshot.anchor_after(Point::new(1, 3))),
|
||||
height: 1,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
},
|
||||
BlockProperties {
|
||||
style: BlockStyle::Fixed,
|
||||
placement: BlockPlacement::Above(buffer_snapshot.anchor_after(Point::new(2, 1))),
|
||||
height: 1,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
},
|
||||
BlockProperties {
|
||||
style: BlockStyle::Fixed,
|
||||
placement: BlockPlacement::Above(buffer_snapshot.anchor_after(Point::new(6, 1))),
|
||||
height: 1,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
},
|
||||
]);
|
||||
@@ -2302,7 +2388,7 @@ mod tests {
|
||||
style: BlockStyle::Fixed,
|
||||
placement,
|
||||
height,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
}
|
||||
})
|
||||
@@ -2321,7 +2407,7 @@ mod tests {
|
||||
placement: props.placement.clone(),
|
||||
height: props.height,
|
||||
style: props.style,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
}));
|
||||
}
|
||||
@@ -2409,6 +2495,7 @@ mod tests {
|
||||
let mut expected_buffer_rows = Vec::new();
|
||||
let mut expected_text = String::new();
|
||||
let mut expected_block_positions = Vec::new();
|
||||
let mut expected_replaced_buffer_rows = HashSet::default();
|
||||
let input_text = wraps_snapshot.text();
|
||||
|
||||
// Loop over the input lines, creating (N - 1) empty lines for
|
||||
@@ -2422,6 +2509,9 @@ mod tests {
|
||||
let mut block_row = 0;
|
||||
while let Some((wrap_row, input_line)) = input_text_lines.next() {
|
||||
let wrap_row = wrap_row as u32;
|
||||
let multibuffer_row = wraps_snapshot
|
||||
.to_point(WrapPoint::new(wrap_row, 0), Bias::Left)
|
||||
.row;
|
||||
|
||||
// Create empty lines for the above block
|
||||
while let Some((placement, block)) = sorted_blocks_iter.peek() {
|
||||
@@ -2451,30 +2541,33 @@ mod tests {
|
||||
{
|
||||
if wrap_row >= replace_range.start.0 {
|
||||
is_in_replace_block = true;
|
||||
|
||||
if wrap_row == replace_range.start.0 {
|
||||
expected_buffer_rows.push(input_buffer_rows[multibuffer_row as usize]);
|
||||
}
|
||||
|
||||
if wrap_row == replace_range.end.0 {
|
||||
expected_block_positions.push((block_row, block.id()));
|
||||
if block.height() > 0 {
|
||||
let text = "\n".repeat((block.height() - 1) as usize);
|
||||
if block_row > 0 {
|
||||
expected_text.push('\n');
|
||||
}
|
||||
expected_text.push_str(&text);
|
||||
for _ in 0..block.height() {
|
||||
expected_buffer_rows.push(None);
|
||||
}
|
||||
block_row += block.height();
|
||||
let text = "\n".repeat((block.height() - 1) as usize);
|
||||
if block_row > 0 {
|
||||
expected_text.push('\n');
|
||||
}
|
||||
expected_text.push_str(&text);
|
||||
|
||||
for _ in 1..block.height() {
|
||||
expected_buffer_rows.push(None);
|
||||
}
|
||||
block_row += block.height();
|
||||
|
||||
sorted_blocks_iter.next();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !is_in_replace_block {
|
||||
let buffer_row = input_buffer_rows[wraps_snapshot
|
||||
.to_point(WrapPoint::new(wrap_row, 0), Bias::Left)
|
||||
.row as usize];
|
||||
|
||||
if is_in_replace_block {
|
||||
expected_replaced_buffer_rows.insert(MultiBufferRow(multibuffer_row));
|
||||
} else {
|
||||
let buffer_row = input_buffer_rows[multibuffer_row as usize];
|
||||
let soft_wrapped = wraps_snapshot
|
||||
.to_tab_point(WrapPoint::new(wrap_row, 0))
|
||||
.column()
|
||||
@@ -2543,9 +2636,10 @@ mod tests {
|
||||
assert_eq!(
|
||||
blocks_snapshot
|
||||
.buffer_rows(BlockRow(start_row as u32))
|
||||
.map(|row| row.map(|r| r.0))
|
||||
.collect::<Vec<_>>(),
|
||||
&expected_buffer_rows[start_row..]
|
||||
&expected_buffer_rows[start_row..],
|
||||
"incorrect buffer_rows starting at row {:?}",
|
||||
start_row
|
||||
);
|
||||
}
|
||||
|
||||
@@ -2666,6 +2760,16 @@ mod tests {
|
||||
block_point.column += c.len_utf8() as u32;
|
||||
}
|
||||
}
|
||||
|
||||
for buffer_row in 0..=buffer_snapshot.max_point().row {
|
||||
let buffer_row = MultiBufferRow(buffer_row);
|
||||
assert_eq!(
|
||||
blocks_snapshot.is_line_replaced(buffer_row),
|
||||
expected_replaced_buffer_rows.contains(&buffer_row),
|
||||
"incorrect is_line_replaced({:?})",
|
||||
buffer_row
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,12 +2,12 @@ use collections::HashMap;
|
||||
use gpui::{AnyElement, IntoElement};
|
||||
use multi_buffer::{Anchor, AnchorRangeExt, MultiBufferRow, MultiBufferSnapshot, ToPoint};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{cmp::Ordering, ops::Range, sync::Arc};
|
||||
use std::{cmp::Ordering, fmt::Debug, ops::Range, sync::Arc};
|
||||
use sum_tree::{Bias, SeekTarget, SumTree};
|
||||
use text::Point;
|
||||
use ui::{IconName, SharedString, WindowContext};
|
||||
|
||||
use crate::FoldPlaceholder;
|
||||
use crate::{BlockStyle, FoldPlaceholder, RenderBlock};
|
||||
|
||||
#[derive(Copy, Clone, Default, Debug, Eq, PartialEq, PartialOrd, Ord, Hash)]
|
||||
pub struct CreaseId(usize);
|
||||
@@ -45,15 +45,15 @@ impl CreaseSnapshot {
|
||||
&'a self,
|
||||
row: MultiBufferRow,
|
||||
snapshot: &'a MultiBufferSnapshot,
|
||||
) -> Option<&'a Crease> {
|
||||
) -> Option<&'a Crease<Anchor>> {
|
||||
let start = snapshot.anchor_before(Point::new(row.0, 0));
|
||||
let mut cursor = self.creases.cursor::<ItemSummary>(snapshot);
|
||||
cursor.seek(&start, Bias::Left, snapshot);
|
||||
while let Some(item) = cursor.item() {
|
||||
match Ord::cmp(&item.crease.range.start.to_point(snapshot).row, &row.0) {
|
||||
match Ord::cmp(&item.crease.range().start.to_point(snapshot).row, &row.0) {
|
||||
Ordering::Less => cursor.next(snapshot),
|
||||
Ordering::Equal => {
|
||||
if item.crease.range.start.is_valid(snapshot) {
|
||||
if item.crease.range().start.is_valid(snapshot) {
|
||||
return Some(&item.crease);
|
||||
} else {
|
||||
cursor.next(snapshot);
|
||||
@@ -69,7 +69,7 @@ impl CreaseSnapshot {
|
||||
&'a self,
|
||||
range: Range<MultiBufferRow>,
|
||||
snapshot: &'a MultiBufferSnapshot,
|
||||
) -> impl 'a + Iterator<Item = &'a Crease> {
|
||||
) -> impl 'a + Iterator<Item = &'a Crease<Anchor>> {
|
||||
let start = snapshot.anchor_before(Point::new(range.start.0, 0));
|
||||
let mut cursor = self.creases.cursor::<ItemSummary>(snapshot);
|
||||
cursor.seek(&start, Bias::Left, snapshot);
|
||||
@@ -77,8 +77,9 @@ impl CreaseSnapshot {
|
||||
std::iter::from_fn(move || {
|
||||
while let Some(item) = cursor.item() {
|
||||
cursor.next(snapshot);
|
||||
let crease_start = item.crease.range.start.to_point(snapshot);
|
||||
let crease_end = item.crease.range.end.to_point(snapshot);
|
||||
let crease_range = item.crease.range();
|
||||
let crease_start = crease_range.start.to_point(snapshot);
|
||||
let crease_end = crease_range.end.to_point(snapshot);
|
||||
if crease_end.row > range.end.0 {
|
||||
continue;
|
||||
}
|
||||
@@ -99,8 +100,9 @@ impl CreaseSnapshot {
|
||||
|
||||
cursor.next(snapshot);
|
||||
while let Some(item) = cursor.item() {
|
||||
let start_point = item.crease.range.start.to_point(snapshot);
|
||||
let end_point = item.crease.range.end.to_point(snapshot);
|
||||
let crease_range = item.crease.range();
|
||||
let start_point = crease_range.start.to_point(snapshot);
|
||||
let end_point = crease_range.end.to_point(snapshot);
|
||||
results.push((item.id, start_point..end_point));
|
||||
cursor.next(snapshot);
|
||||
}
|
||||
@@ -123,12 +125,22 @@ type RenderTrailerFn =
|
||||
Arc<dyn Send + Sync + Fn(MultiBufferRow, bool, &mut WindowContext) -> AnyElement>;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Crease {
|
||||
pub range: Range<Anchor>,
|
||||
pub placeholder: FoldPlaceholder,
|
||||
pub render_toggle: RenderToggleFn,
|
||||
pub render_trailer: RenderTrailerFn,
|
||||
pub metadata: Option<CreaseMetadata>,
|
||||
pub enum Crease<T> {
|
||||
Inline {
|
||||
range: Range<T>,
|
||||
placeholder: FoldPlaceholder,
|
||||
render_toggle: Option<RenderToggleFn>,
|
||||
render_trailer: Option<RenderTrailerFn>,
|
||||
metadata: Option<CreaseMetadata>,
|
||||
},
|
||||
Block {
|
||||
range: Range<T>,
|
||||
block_height: u32,
|
||||
block_style: BlockStyle,
|
||||
render_block: RenderBlock,
|
||||
block_priority: usize,
|
||||
render_toggle: Option<RenderToggleFn>,
|
||||
},
|
||||
}
|
||||
|
||||
/// Metadata about a [`Crease`], that is used for serialization.
|
||||
@@ -138,9 +150,30 @@ pub struct CreaseMetadata {
|
||||
pub label: SharedString,
|
||||
}
|
||||
|
||||
impl Crease {
|
||||
pub fn new<RenderToggle, ToggleElement, RenderTrailer, TrailerElement>(
|
||||
range: Range<Anchor>,
|
||||
impl<T> Crease<T> {
|
||||
pub fn simple(range: Range<T>, placeholder: FoldPlaceholder) -> Self {
|
||||
Crease::Inline {
|
||||
range,
|
||||
placeholder,
|
||||
render_toggle: None,
|
||||
render_trailer: None,
|
||||
metadata: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn block(range: Range<T>, height: u32, style: BlockStyle, render: RenderBlock) -> Self {
|
||||
Self::Block {
|
||||
range,
|
||||
block_height: height,
|
||||
block_style: style,
|
||||
render_block: render,
|
||||
block_priority: 0,
|
||||
render_toggle: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn inline<RenderToggle, ToggleElement, RenderTrailer, TrailerElement>(
|
||||
range: Range<T>,
|
||||
placeholder: FoldPlaceholder,
|
||||
render_toggle: RenderToggle,
|
||||
render_trailer: RenderTrailer,
|
||||
@@ -164,37 +197,76 @@ impl Crease {
|
||||
+ 'static,
|
||||
TrailerElement: IntoElement,
|
||||
{
|
||||
Crease {
|
||||
Crease::Inline {
|
||||
range,
|
||||
placeholder,
|
||||
render_toggle: Arc::new(move |row, folded, toggle, cx| {
|
||||
render_toggle: Some(Arc::new(move |row, folded, toggle, cx| {
|
||||
render_toggle(row, folded, toggle, cx).into_any_element()
|
||||
}),
|
||||
render_trailer: Arc::new(move |row, folded, cx| {
|
||||
})),
|
||||
render_trailer: Some(Arc::new(move |row, folded, cx| {
|
||||
render_trailer(row, folded, cx).into_any_element()
|
||||
}),
|
||||
})),
|
||||
metadata: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn with_metadata(mut self, metadata: CreaseMetadata) -> Self {
|
||||
self.metadata = Some(metadata);
|
||||
self
|
||||
pub fn with_metadata(self, metadata: CreaseMetadata) -> Self {
|
||||
match self {
|
||||
Crease::Inline {
|
||||
range,
|
||||
placeholder,
|
||||
render_toggle,
|
||||
render_trailer,
|
||||
..
|
||||
} => Crease::Inline {
|
||||
range,
|
||||
placeholder,
|
||||
render_toggle,
|
||||
render_trailer,
|
||||
metadata: Some(metadata),
|
||||
},
|
||||
Crease::Block { .. } => self,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn range(&self) -> &Range<T> {
|
||||
match self {
|
||||
Crease::Inline { range, .. } => range,
|
||||
Crease::Block { range, .. } => range,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Crease {
|
||||
impl<T> std::fmt::Debug for Crease<T>
|
||||
where
|
||||
T: Debug,
|
||||
{
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("Crease")
|
||||
.field("range", &self.range)
|
||||
.finish()
|
||||
match self {
|
||||
Crease::Inline {
|
||||
range, metadata, ..
|
||||
} => f
|
||||
.debug_struct("Crease::Inline")
|
||||
.field("range", range)
|
||||
.field("metadata", metadata)
|
||||
.finish_non_exhaustive(),
|
||||
Crease::Block {
|
||||
range,
|
||||
block_height,
|
||||
..
|
||||
} => f
|
||||
.debug_struct("Crease::Block")
|
||||
.field("range", range)
|
||||
.field("height", block_height)
|
||||
.finish_non_exhaustive(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
struct CreaseItem {
|
||||
id: CreaseId,
|
||||
crease: Crease,
|
||||
crease: Crease<Anchor>,
|
||||
}
|
||||
|
||||
impl CreaseMap {
|
||||
@@ -204,7 +276,7 @@ impl CreaseMap {
|
||||
|
||||
pub fn insert(
|
||||
&mut self,
|
||||
creases: impl IntoIterator<Item = Crease>,
|
||||
creases: impl IntoIterator<Item = Crease<Anchor>>,
|
||||
snapshot: &MultiBufferSnapshot,
|
||||
) -> Vec<CreaseId> {
|
||||
let mut new_ids = Vec::new();
|
||||
@@ -212,11 +284,12 @@ impl CreaseMap {
|
||||
let mut new_creases = SumTree::new(snapshot);
|
||||
let mut cursor = self.snapshot.creases.cursor::<ItemSummary>(snapshot);
|
||||
for crease in creases {
|
||||
new_creases.append(cursor.slice(&crease.range, Bias::Left, snapshot), snapshot);
|
||||
let crease_range = crease.range().clone();
|
||||
new_creases.append(cursor.slice(&crease_range, Bias::Left, snapshot), snapshot);
|
||||
|
||||
let id = self.next_id;
|
||||
self.next_id.0 += 1;
|
||||
self.id_to_range.insert(id, crease.range.clone());
|
||||
self.id_to_range.insert(id, crease_range);
|
||||
new_creases.push(CreaseItem { crease, id }, snapshot);
|
||||
new_ids.push(id);
|
||||
}
|
||||
@@ -293,7 +366,7 @@ impl sum_tree::Item for CreaseItem {
|
||||
|
||||
fn summary(&self, _cx: &MultiBufferSnapshot) -> Self::Summary {
|
||||
ItemSummary {
|
||||
range: self.crease.range.clone(),
|
||||
range: self.crease.range().clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -326,13 +399,13 @@ mod test {
|
||||
|
||||
// Insert creases
|
||||
let creases = [
|
||||
Crease::new(
|
||||
Crease::inline(
|
||||
snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(1, 5)),
|
||||
FoldPlaceholder::test(),
|
||||
|_row, _folded, _toggle, _cx| div(),
|
||||
|_row, _folded, _cx| div(),
|
||||
),
|
||||
Crease::new(
|
||||
Crease::inline(
|
||||
snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_after(Point::new(3, 5)),
|
||||
FoldPlaceholder::test(),
|
||||
|_row, _folded, _toggle, _cx| div(),
|
||||
@@ -372,19 +445,19 @@ mod test {
|
||||
let mut crease_map = CreaseMap::new(&snapshot);
|
||||
|
||||
let creases = [
|
||||
Crease::new(
|
||||
Crease::inline(
|
||||
snapshot.anchor_before(Point::new(1, 0))..snapshot.anchor_after(Point::new(1, 5)),
|
||||
FoldPlaceholder::test(),
|
||||
|_row, _folded, _toggle, _cx| div(),
|
||||
|_row, _folded, _cx| div(),
|
||||
),
|
||||
Crease::new(
|
||||
Crease::inline(
|
||||
snapshot.anchor_before(Point::new(3, 0))..snapshot.anchor_after(Point::new(3, 5)),
|
||||
FoldPlaceholder::test(),
|
||||
|_row, _folded, _toggle, _cx| div(),
|
||||
|_row, _folded, _cx| div(),
|
||||
),
|
||||
Crease::new(
|
||||
Crease::inline(
|
||||
snapshot.anchor_before(Point::new(5, 0))..snapshot.anchor_after(Point::new(5, 5)),
|
||||
FoldPlaceholder::test(),
|
||||
|_row, _folded, _toggle, _cx| div(),
|
||||
@@ -402,12 +475,12 @@ mod test {
|
||||
let range = MultiBufferRow(2)..MultiBufferRow(5);
|
||||
let creases: Vec<_> = crease_snapshot.creases_in_range(range, &snapshot).collect();
|
||||
assert_eq!(creases.len(), 1);
|
||||
assert_eq!(creases[0].range.start.to_point(&snapshot).row, 3);
|
||||
assert_eq!(creases[0].range().start.to_point(&snapshot).row, 3);
|
||||
|
||||
let range = MultiBufferRow(0)..MultiBufferRow(2);
|
||||
let creases: Vec<_> = crease_snapshot.creases_in_range(range, &snapshot).collect();
|
||||
assert_eq!(creases.len(), 1);
|
||||
assert_eq!(creases[0].range.start.to_point(&snapshot).row, 1);
|
||||
assert_eq!(creases[0].range().start.to_point(&snapshot).row, 1);
|
||||
|
||||
let range = MultiBufferRow(6)..MultiBufferRow(7);
|
||||
let creases: Vec<_> = crease_snapshot.creases_in_range(range, &snapshot).collect();
|
||||
|
||||
@@ -540,6 +540,15 @@ pub enum IsVimMode {
|
||||
No,
|
||||
}
|
||||
|
||||
pub trait ActiveLineTrailerProvider {
|
||||
fn render_active_line_trailer(
|
||||
&mut self,
|
||||
style: &EditorStyle,
|
||||
focus_handle: &FocusHandle,
|
||||
cx: &mut WindowContext,
|
||||
) -> Option<AnyElement>;
|
||||
}
|
||||
|
||||
/// Zed's primary text input `View`, allowing users to edit a [`MultiBuffer`]
|
||||
///
|
||||
/// See the [module level documentation](self) for more information.
|
||||
@@ -667,6 +676,7 @@ pub struct Editor {
|
||||
next_scroll_position: NextScrollCursorCenterTopBottom,
|
||||
addons: HashMap<TypeId, Box<dyn Addon>>,
|
||||
_scroll_cursor_center_top_bottom_task: Task<()>,
|
||||
active_line_trailer_provider: Option<Box<dyn ActiveLineTrailerProvider>>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, Default)]
|
||||
@@ -883,6 +893,7 @@ struct AutocloseRegion {
|
||||
struct SnippetState {
|
||||
ranges: Vec<Vec<Range<Anchor>>>,
|
||||
active_index: usize,
|
||||
choices: Vec<Option<Vec<String>>>,
|
||||
}
|
||||
|
||||
#[doc(hidden)]
|
||||
@@ -1000,7 +1011,7 @@ enum ContextMenuOrigin {
|
||||
GutterIndicator(DisplayRow),
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
#[derive(Clone, Debug)]
|
||||
struct CompletionsMenu {
|
||||
id: CompletionId,
|
||||
sort_completions: bool,
|
||||
@@ -1011,10 +1022,105 @@ struct CompletionsMenu {
|
||||
matches: Arc<[StringMatch]>,
|
||||
selected_item: usize,
|
||||
scroll_handle: UniformListScrollHandle,
|
||||
selected_completion_documentation_resolve_debounce: Arc<Mutex<DebouncedDelay>>,
|
||||
selected_completion_documentation_resolve_debounce: Option<Arc<Mutex<DebouncedDelay>>>,
|
||||
}
|
||||
|
||||
impl CompletionsMenu {
|
||||
fn new(
|
||||
id: CompletionId,
|
||||
sort_completions: bool,
|
||||
initial_position: Anchor,
|
||||
buffer: Model<Buffer>,
|
||||
completions: Box<[Completion]>,
|
||||
) -> Self {
|
||||
let match_candidates = completions
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(id, completion)| {
|
||||
StringMatchCandidate::new(
|
||||
id,
|
||||
completion.label.text[completion.label.filter_range.clone()].into(),
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
|
||||
Self {
|
||||
id,
|
||||
sort_completions,
|
||||
initial_position,
|
||||
buffer,
|
||||
completions: Arc::new(RwLock::new(completions)),
|
||||
match_candidates,
|
||||
matches: Vec::new().into(),
|
||||
selected_item: 0,
|
||||
scroll_handle: UniformListScrollHandle::new(),
|
||||
selected_completion_documentation_resolve_debounce: Some(Arc::new(Mutex::new(
|
||||
DebouncedDelay::new(),
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
fn new_snippet_choices(
|
||||
id: CompletionId,
|
||||
sort_completions: bool,
|
||||
choices: &Vec<String>,
|
||||
selection: Range<Anchor>,
|
||||
buffer: Model<Buffer>,
|
||||
) -> Self {
|
||||
let completions = choices
|
||||
.iter()
|
||||
.map(|choice| Completion {
|
||||
old_range: selection.start.text_anchor..selection.end.text_anchor,
|
||||
new_text: choice.to_string(),
|
||||
label: CodeLabel {
|
||||
text: choice.to_string(),
|
||||
runs: Default::default(),
|
||||
filter_range: Default::default(),
|
||||
},
|
||||
server_id: LanguageServerId(usize::MAX),
|
||||
documentation: None,
|
||||
lsp_completion: Default::default(),
|
||||
confirm: None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let match_candidates = choices
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(id, completion)| StringMatchCandidate::new(id, completion.to_string()))
|
||||
.collect();
|
||||
let matches = choices
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(id, completion)| StringMatch {
|
||||
candidate_id: id,
|
||||
score: 1.,
|
||||
positions: vec![],
|
||||
string: completion.clone(),
|
||||
})
|
||||
.collect();
|
||||
Self {
|
||||
id,
|
||||
sort_completions,
|
||||
initial_position: selection.start,
|
||||
buffer,
|
||||
completions: Arc::new(RwLock::new(completions)),
|
||||
match_candidates,
|
||||
matches,
|
||||
selected_item: 0,
|
||||
scroll_handle: UniformListScrollHandle::new(),
|
||||
selected_completion_documentation_resolve_debounce: Some(Arc::new(Mutex::new(
|
||||
DebouncedDelay::new(),
|
||||
))),
|
||||
}
|
||||
}
|
||||
|
||||
fn suppress_documentation_resolution(mut self) -> Self {
|
||||
self.selected_completion_documentation_resolve_debounce
|
||||
.take();
|
||||
self
|
||||
}
|
||||
|
||||
fn select_first(
|
||||
&mut self,
|
||||
provider: Option<&dyn CompletionProvider>,
|
||||
@@ -1115,6 +1221,12 @@ impl CompletionsMenu {
|
||||
let Some(provider) = provider else {
|
||||
return;
|
||||
};
|
||||
let Some(documentation_resolve) = self
|
||||
.selected_completion_documentation_resolve_debounce
|
||||
.as_ref()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
let resolve_task = provider.resolve_completions(
|
||||
self.buffer.clone(),
|
||||
@@ -1127,15 +1239,13 @@ impl CompletionsMenu {
|
||||
EditorSettings::get_global(cx).completion_documentation_secondary_query_debounce;
|
||||
let delay = Duration::from_millis(delay_ms);
|
||||
|
||||
self.selected_completion_documentation_resolve_debounce
|
||||
.lock()
|
||||
.fire_new(delay, cx, |_, cx| {
|
||||
cx.spawn(move |this, mut cx| async move {
|
||||
if let Some(true) = resolve_task.await.log_err() {
|
||||
this.update(&mut cx, |_, cx| cx.notify()).ok();
|
||||
}
|
||||
})
|
||||
});
|
||||
documentation_resolve.lock().fire_new(delay, cx, |_, cx| {
|
||||
cx.spawn(move |this, mut cx| async move {
|
||||
if let Some(true) = resolve_task.await.log_err() {
|
||||
this.update(&mut cx, |_, cx| cx.notify()).ok();
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
fn visible(&self) -> bool {
|
||||
@@ -1418,6 +1528,7 @@ impl CompletionsMenu {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct AvailableCodeAction {
|
||||
excerpt_id: ExcerptId,
|
||||
action: CodeAction,
|
||||
@@ -2104,6 +2215,7 @@ impl Editor {
|
||||
addons: HashMap::default(),
|
||||
_scroll_cursor_center_top_bottom_task: Task::ready(()),
|
||||
text_style_refinement: None,
|
||||
active_line_trailer_provider: None,
|
||||
};
|
||||
this.tasks_update_task = Some(this.refresh_runnables(cx));
|
||||
this._subscriptions.extend(project_subscriptions);
|
||||
@@ -2392,6 +2504,16 @@ impl Editor {
|
||||
self.refresh_inline_completion(false, false, cx);
|
||||
}
|
||||
|
||||
pub fn set_active_line_trailer_provider<T>(
|
||||
&mut self,
|
||||
provider: Option<T>,
|
||||
_cx: &mut ViewContext<Self>,
|
||||
) where
|
||||
T: ActiveLineTrailerProvider + 'static,
|
||||
{
|
||||
self.active_line_trailer_provider = provider.map(|provider| Box::new(provider) as Box<_>);
|
||||
}
|
||||
|
||||
pub fn placeholder_text(&self, _cx: &WindowContext) -> Option<&str> {
|
||||
self.placeholder_text.as_deref()
|
||||
}
|
||||
@@ -4386,6 +4508,10 @@ impl Editor {
|
||||
return;
|
||||
};
|
||||
|
||||
if !self.snippet_stack.is_empty() && self.context_menu.read().as_ref().is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
let position = self.selections.newest_anchor().head();
|
||||
let (buffer, buffer_position) =
|
||||
if let Some(output) = self.buffer.read(cx).text_anchor_for_position(position, cx) {
|
||||
@@ -4431,30 +4557,13 @@ impl Editor {
|
||||
})?;
|
||||
let completions = completions.await.log_err();
|
||||
let menu = if let Some(completions) = completions {
|
||||
let mut menu = CompletionsMenu {
|
||||
let mut menu = CompletionsMenu::new(
|
||||
id,
|
||||
sort_completions,
|
||||
initial_position: position,
|
||||
match_candidates: completions
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(id, completion)| {
|
||||
StringMatchCandidate::new(
|
||||
id,
|
||||
completion.label.text[completion.label.filter_range.clone()]
|
||||
.into(),
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
buffer: buffer.clone(),
|
||||
completions: Arc::new(RwLock::new(completions.into())),
|
||||
matches: Vec::new().into(),
|
||||
selected_item: 0,
|
||||
scroll_handle: UniformListScrollHandle::new(),
|
||||
selected_completion_documentation_resolve_debounce: Arc::new(Mutex::new(
|
||||
DebouncedDelay::new(),
|
||||
)),
|
||||
};
|
||||
position,
|
||||
buffer.clone(),
|
||||
completions.into(),
|
||||
);
|
||||
menu.filter(query.as_deref(), cx.background_executor().clone())
|
||||
.await;
|
||||
|
||||
@@ -4657,7 +4766,11 @@ impl Editor {
|
||||
self.transact(cx, |this, cx| {
|
||||
if let Some(mut snippet) = snippet {
|
||||
snippet.text = text.to_string();
|
||||
for tabstop in snippet.tabstops.iter_mut().flatten() {
|
||||
for tabstop in snippet
|
||||
.tabstops
|
||||
.iter_mut()
|
||||
.flat_map(|tabstop| tabstop.ranges.iter_mut())
|
||||
{
|
||||
tabstop.start -= common_prefix_len as isize;
|
||||
tabstop.end -= common_prefix_len as isize;
|
||||
}
|
||||
@@ -5693,6 +5806,27 @@ impl Editor {
|
||||
context_menu
|
||||
}
|
||||
|
||||
fn show_snippet_choices(
|
||||
&mut self,
|
||||
choices: &Vec<String>,
|
||||
selection: Range<Anchor>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
if selection.start.buffer_id.is_none() {
|
||||
return;
|
||||
}
|
||||
let buffer_id = selection.start.buffer_id.unwrap();
|
||||
let buffer = self.buffer().read(cx).buffer(buffer_id);
|
||||
let id = post_inc(&mut self.next_completion_id);
|
||||
|
||||
if let Some(buffer) = buffer {
|
||||
*self.context_menu.write() = Some(ContextMenu::Completions(
|
||||
CompletionsMenu::new_snippet_choices(id, true, choices, selection, buffer)
|
||||
.suppress_documentation_resolution(),
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn insert_snippet(
|
||||
&mut self,
|
||||
insertion_ranges: &[Range<usize>],
|
||||
@@ -5702,6 +5836,7 @@ impl Editor {
|
||||
struct Tabstop<T> {
|
||||
is_end_tabstop: bool,
|
||||
ranges: Vec<Range<T>>,
|
||||
choices: Option<Vec<String>>,
|
||||
}
|
||||
|
||||
let tabstops = self.buffer.update(cx, |buffer, cx| {
|
||||
@@ -5721,10 +5856,11 @@ impl Editor {
|
||||
.tabstops
|
||||
.iter()
|
||||
.map(|tabstop| {
|
||||
let is_end_tabstop = tabstop.first().map_or(false, |tabstop| {
|
||||
let is_end_tabstop = tabstop.ranges.first().map_or(false, |tabstop| {
|
||||
tabstop.is_empty() && tabstop.start == snippet.text.len() as isize
|
||||
});
|
||||
let mut tabstop_ranges = tabstop
|
||||
.ranges
|
||||
.iter()
|
||||
.flat_map(|tabstop_range| {
|
||||
let mut delta = 0_isize;
|
||||
@@ -5746,6 +5882,7 @@ impl Editor {
|
||||
Tabstop {
|
||||
is_end_tabstop,
|
||||
ranges: tabstop_ranges,
|
||||
choices: tabstop.choices.clone(),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
@@ -5755,16 +5892,29 @@ impl Editor {
|
||||
s.select_ranges(tabstop.ranges.iter().cloned());
|
||||
});
|
||||
|
||||
if let Some(choices) = &tabstop.choices {
|
||||
if let Some(selection) = tabstop.ranges.first() {
|
||||
self.show_snippet_choices(choices, selection.clone(), cx)
|
||||
}
|
||||
}
|
||||
|
||||
// If we're already at the last tabstop and it's at the end of the snippet,
|
||||
// we're done, we don't need to keep the state around.
|
||||
if !tabstop.is_end_tabstop {
|
||||
let choices = tabstops
|
||||
.iter()
|
||||
.map(|tabstop| tabstop.choices.clone())
|
||||
.collect();
|
||||
|
||||
let ranges = tabstops
|
||||
.into_iter()
|
||||
.map(|tabstop| tabstop.ranges)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
self.snippet_stack.push(SnippetState {
|
||||
active_index: 0,
|
||||
ranges,
|
||||
choices,
|
||||
});
|
||||
}
|
||||
|
||||
@@ -5839,6 +5989,13 @@ impl Editor {
|
||||
self.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.select_anchor_ranges(current_ranges.iter().cloned())
|
||||
});
|
||||
|
||||
if let Some(choices) = &snippet.choices[snippet.active_index] {
|
||||
if let Some(selection) = current_ranges.first() {
|
||||
self.show_snippet_choices(&choices, selection.clone(), cx);
|
||||
}
|
||||
}
|
||||
|
||||
// If snippet state is not at the last tabstop, push it back on the stack
|
||||
if snippet.active_index + 1 < snippet.ranges.len() {
|
||||
self.snippet_stack.push(snippet);
|
||||
@@ -6779,7 +6936,7 @@ impl Editor {
|
||||
|
||||
let mut edits = Vec::new();
|
||||
let mut unfold_ranges = Vec::new();
|
||||
let mut refold_ranges = Vec::new();
|
||||
let mut refold_creases = Vec::new();
|
||||
|
||||
let selections = self.selections.all::<Point>(cx);
|
||||
let mut selections = selections.iter().peekable();
|
||||
@@ -6854,7 +7011,7 @@ impl Editor {
|
||||
let mut end = fold.range.end.to_point(&buffer);
|
||||
start.row -= row_delta;
|
||||
end.row -= row_delta;
|
||||
refold_ranges.push((start..end, fold.placeholder.clone()));
|
||||
refold_creases.push(Crease::simple(start..end, fold.placeholder.clone()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6870,7 +7027,7 @@ impl Editor {
|
||||
buffer.edit([(range, text)], None, cx);
|
||||
}
|
||||
});
|
||||
this.fold_ranges(refold_ranges, true, cx);
|
||||
this.fold_creases(refold_creases, true, cx);
|
||||
this.change_selections(Some(Autoscroll::fit()), cx, |s| {
|
||||
s.select(new_selections);
|
||||
})
|
||||
@@ -6883,7 +7040,7 @@ impl Editor {
|
||||
|
||||
let mut edits = Vec::new();
|
||||
let mut unfold_ranges = Vec::new();
|
||||
let mut refold_ranges = Vec::new();
|
||||
let mut refold_creases = Vec::new();
|
||||
|
||||
let selections = self.selections.all::<Point>(cx);
|
||||
let mut selections = selections.iter().peekable();
|
||||
@@ -6948,7 +7105,7 @@ impl Editor {
|
||||
let mut end = fold.range.end.to_point(&buffer);
|
||||
start.row += row_delta;
|
||||
end.row += row_delta;
|
||||
refold_ranges.push((start..end, fold.placeholder.clone()));
|
||||
refold_creases.push(Crease::simple(start..end, fold.placeholder.clone()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -6964,7 +7121,7 @@ impl Editor {
|
||||
buffer.edit([(range, text)], None, cx);
|
||||
}
|
||||
});
|
||||
this.fold_ranges(refold_ranges, true, cx);
|
||||
this.fold_creases(refold_creases, true, cx);
|
||||
this.change_selections(Some(Autoscroll::fit()), cx, |s| s.select(new_selections));
|
||||
});
|
||||
}
|
||||
@@ -10421,7 +10578,7 @@ impl Editor {
|
||||
style: BlockStyle::Flex,
|
||||
placement: BlockPlacement::Below(range.start),
|
||||
height: 1,
|
||||
render: Box::new({
|
||||
render: Arc::new({
|
||||
let rename_editor = rename_editor.clone();
|
||||
move |cx: &mut BlockContext| {
|
||||
let mut text_style = cx.editor_style.text.clone();
|
||||
@@ -10431,6 +10588,7 @@ impl Editor {
|
||||
text_style = text_style.highlight(highlight_style);
|
||||
}
|
||||
div()
|
||||
.block_mouse_down()
|
||||
.pl(cx.anchor_x)
|
||||
.child(EditorElement::new(
|
||||
&rename_editor,
|
||||
@@ -10894,7 +11052,7 @@ impl Editor {
|
||||
}
|
||||
|
||||
pub fn fold(&mut self, _: &actions::Fold, cx: &mut ViewContext<Self>) {
|
||||
let mut fold_ranges = Vec::new();
|
||||
let mut to_fold = Vec::new();
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let selections = self.selections.all_adjusted(cx);
|
||||
|
||||
@@ -10906,12 +11064,10 @@ impl Editor {
|
||||
let mut found = false;
|
||||
let mut row = range.start.row;
|
||||
while row <= range.end.row {
|
||||
if let Some((foldable_range, fold_text)) =
|
||||
{ display_map.foldable_range(MultiBufferRow(row)) }
|
||||
{
|
||||
if let Some(crease) = display_map.crease_for_buffer_row(MultiBufferRow(row)) {
|
||||
found = true;
|
||||
row = foldable_range.end.row + 1;
|
||||
fold_ranges.push((foldable_range, fold_text));
|
||||
row = crease.range().end.row + 1;
|
||||
to_fold.push(crease);
|
||||
} else {
|
||||
row += 1
|
||||
}
|
||||
@@ -10922,11 +11078,9 @@ impl Editor {
|
||||
}
|
||||
|
||||
for row in (0..=range.start.row).rev() {
|
||||
if let Some((foldable_range, fold_text)) =
|
||||
display_map.foldable_range(MultiBufferRow(row))
|
||||
{
|
||||
if foldable_range.end.row >= buffer_start_row {
|
||||
fold_ranges.push((foldable_range, fold_text));
|
||||
if let Some(crease) = display_map.crease_for_buffer_row(MultiBufferRow(row)) {
|
||||
if crease.range().end.row >= buffer_start_row {
|
||||
to_fold.push(crease);
|
||||
if row <= range.start.row {
|
||||
break;
|
||||
}
|
||||
@@ -10935,26 +11089,29 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
self.fold_ranges(fold_ranges, true, cx);
|
||||
self.fold_creases(to_fold, true, cx);
|
||||
}
|
||||
|
||||
fn fold_at_level(&mut self, fold_at: &FoldAtLevel, cx: &mut ViewContext<Self>) {
|
||||
let fold_at_level = fold_at.level;
|
||||
let snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
let mut fold_ranges = Vec::new();
|
||||
let mut to_fold = Vec::new();
|
||||
let mut stack = vec![(0, snapshot.max_buffer_row().0, 1)];
|
||||
|
||||
while let Some((mut start_row, end_row, current_level)) = stack.pop() {
|
||||
while start_row < end_row {
|
||||
match self.snapshot(cx).foldable_range(MultiBufferRow(start_row)) {
|
||||
Some(foldable_range) => {
|
||||
let nested_start_row = foldable_range.0.start.row + 1;
|
||||
let nested_end_row = foldable_range.0.end.row;
|
||||
match self
|
||||
.snapshot(cx)
|
||||
.crease_for_buffer_row(MultiBufferRow(start_row))
|
||||
{
|
||||
Some(crease) => {
|
||||
let nested_start_row = crease.range().start.row + 1;
|
||||
let nested_end_row = crease.range().end.row;
|
||||
|
||||
if current_level < fold_at_level {
|
||||
stack.push((nested_start_row, nested_end_row, current_level + 1));
|
||||
} else if current_level == fold_at_level {
|
||||
fold_ranges.push(foldable_range);
|
||||
to_fold.push(crease);
|
||||
}
|
||||
|
||||
start_row = nested_end_row + 1;
|
||||
@@ -10964,7 +11121,7 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
self.fold_ranges(fold_ranges, true, cx);
|
||||
self.fold_creases(to_fold, true, cx);
|
||||
}
|
||||
|
||||
pub fn fold_all(&mut self, _: &actions::FoldAll, cx: &mut ViewContext<Self>) {
|
||||
@@ -10972,16 +11129,18 @@ impl Editor {
|
||||
let snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
|
||||
for row in 0..snapshot.max_buffer_row().0 {
|
||||
if let Some(foldable_range) = self.snapshot(cx).foldable_range(MultiBufferRow(row)) {
|
||||
if let Some(foldable_range) =
|
||||
self.snapshot(cx).crease_for_buffer_row(MultiBufferRow(row))
|
||||
{
|
||||
fold_ranges.push(foldable_range);
|
||||
}
|
||||
}
|
||||
|
||||
self.fold_ranges(fold_ranges, true, cx);
|
||||
self.fold_creases(fold_ranges, true, cx);
|
||||
}
|
||||
|
||||
pub fn fold_recursive(&mut self, _: &actions::FoldRecursive, cx: &mut ViewContext<Self>) {
|
||||
let mut fold_ranges = Vec::new();
|
||||
let mut to_fold = Vec::new();
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let selections = self.selections.all_adjusted(cx);
|
||||
|
||||
@@ -10992,11 +11151,9 @@ impl Editor {
|
||||
if range.start.row != range.end.row {
|
||||
let mut found = false;
|
||||
for row in range.start.row..=range.end.row {
|
||||
if let Some((foldable_range, fold_text)) =
|
||||
{ display_map.foldable_range(MultiBufferRow(row)) }
|
||||
{
|
||||
if let Some(crease) = display_map.crease_for_buffer_row(MultiBufferRow(row)) {
|
||||
found = true;
|
||||
fold_ranges.push((foldable_range, fold_text));
|
||||
to_fold.push(crease);
|
||||
}
|
||||
}
|
||||
if found {
|
||||
@@ -11005,11 +11162,9 @@ impl Editor {
|
||||
}
|
||||
|
||||
for row in (0..=range.start.row).rev() {
|
||||
if let Some((foldable_range, fold_text)) =
|
||||
display_map.foldable_range(MultiBufferRow(row))
|
||||
{
|
||||
if foldable_range.end.row >= buffer_start_row {
|
||||
fold_ranges.push((foldable_range, fold_text));
|
||||
if let Some(crease) = display_map.crease_for_buffer_row(MultiBufferRow(row)) {
|
||||
if crease.range().end.row >= buffer_start_row {
|
||||
to_fold.push(crease);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
@@ -11017,21 +11172,21 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
self.fold_ranges(fold_ranges, true, cx);
|
||||
self.fold_creases(to_fold, true, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at(&mut self, fold_at: &FoldAt, cx: &mut ViewContext<Self>) {
|
||||
let buffer_row = fold_at.buffer_row;
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
|
||||
if let Some((fold_range, placeholder)) = display_map.foldable_range(buffer_row) {
|
||||
if let Some(crease) = display_map.crease_for_buffer_row(buffer_row) {
|
||||
let autoscroll = self
|
||||
.selections
|
||||
.all::<Point>(cx)
|
||||
.iter()
|
||||
.any(|selection| fold_range.overlaps(&selection.range()));
|
||||
.any(|selection| crease.range().overlaps(&selection.range()));
|
||||
|
||||
self.fold_ranges([(fold_range, placeholder)], autoscroll, cx);
|
||||
self.fold_creases(vec![crease], autoscroll, cx);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11092,81 +11247,78 @@ impl Editor {
|
||||
|
||||
pub fn unfold_all(&mut self, _: &actions::UnfoldAll, cx: &mut ViewContext<Self>) {
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
self.unfold_ranges(
|
||||
&[Point::zero()..display_map.max_point().to_point(&display_map)],
|
||||
true,
|
||||
true,
|
||||
cx,
|
||||
);
|
||||
self.unfold_ranges(&[0..display_map.buffer_snapshot.len()], true, true, cx);
|
||||
}
|
||||
|
||||
pub fn fold_selected_ranges(&mut self, _: &FoldSelectedRanges, cx: &mut ViewContext<Self>) {
|
||||
let selections = self.selections.all::<Point>(cx);
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let line_mode = self.selections.line_mode;
|
||||
let ranges = selections.into_iter().map(|s| {
|
||||
if line_mode {
|
||||
let start = Point::new(s.start.row, 0);
|
||||
let end = Point::new(
|
||||
s.end.row,
|
||||
display_map
|
||||
.buffer_snapshot
|
||||
.line_len(MultiBufferRow(s.end.row)),
|
||||
);
|
||||
(start..end, display_map.fold_placeholder.clone())
|
||||
} else {
|
||||
(s.start..s.end, display_map.fold_placeholder.clone())
|
||||
}
|
||||
});
|
||||
self.fold_ranges(ranges, true, cx);
|
||||
let ranges = selections
|
||||
.into_iter()
|
||||
.map(|s| {
|
||||
if line_mode {
|
||||
let start = Point::new(s.start.row, 0);
|
||||
let end = Point::new(
|
||||
s.end.row,
|
||||
display_map
|
||||
.buffer_snapshot
|
||||
.line_len(MultiBufferRow(s.end.row)),
|
||||
);
|
||||
Crease::simple(start..end, display_map.fold_placeholder.clone())
|
||||
} else {
|
||||
Crease::simple(s.start..s.end, display_map.fold_placeholder.clone())
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
self.fold_creases(ranges, true, cx);
|
||||
}
|
||||
|
||||
pub fn fold_ranges<T: ToOffset + Clone>(
|
||||
pub fn fold_creases<T: ToOffset + Clone>(
|
||||
&mut self,
|
||||
ranges: impl IntoIterator<Item = (Range<T>, FoldPlaceholder)>,
|
||||
creases: Vec<Crease<T>>,
|
||||
auto_scroll: bool,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
let mut fold_ranges = Vec::new();
|
||||
if creases.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let mut buffers_affected = HashMap::default();
|
||||
let multi_buffer = self.buffer().read(cx);
|
||||
for (fold_range, fold_text) in ranges {
|
||||
for crease in &creases {
|
||||
if let Some((_, buffer, _)) =
|
||||
multi_buffer.excerpt_containing(fold_range.start.clone(), cx)
|
||||
multi_buffer.excerpt_containing(crease.range().start.clone(), cx)
|
||||
{
|
||||
buffers_affected.insert(buffer.read(cx).remote_id(), buffer);
|
||||
};
|
||||
fold_ranges.push((fold_range, fold_text));
|
||||
}
|
||||
|
||||
let mut ranges = fold_ranges.into_iter().peekable();
|
||||
if ranges.peek().is_some() {
|
||||
self.display_map.update(cx, |map, cx| map.fold(ranges, cx));
|
||||
self.display_map.update(cx, |map, cx| map.fold(creases, cx));
|
||||
|
||||
if auto_scroll {
|
||||
self.request_autoscroll(Autoscroll::fit(), cx);
|
||||
}
|
||||
|
||||
for buffer in buffers_affected.into_values() {
|
||||
self.sync_expanded_diff_hunks(buffer, cx);
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
|
||||
if let Some(active_diagnostics) = self.active_diagnostics.take() {
|
||||
// Clear diagnostics block when folding a range that contains it.
|
||||
let snapshot = self.snapshot(cx);
|
||||
if snapshot.intersects_fold(active_diagnostics.primary_range.start) {
|
||||
drop(snapshot);
|
||||
self.active_diagnostics = Some(active_diagnostics);
|
||||
self.dismiss_diagnostics(cx);
|
||||
} else {
|
||||
self.active_diagnostics = Some(active_diagnostics);
|
||||
}
|
||||
}
|
||||
|
||||
self.scrollbar_marker_state.dirty = true;
|
||||
if auto_scroll {
|
||||
self.request_autoscroll(Autoscroll::fit(), cx);
|
||||
}
|
||||
|
||||
for buffer in buffers_affected.into_values() {
|
||||
self.sync_expanded_diff_hunks(buffer, cx);
|
||||
}
|
||||
|
||||
cx.notify();
|
||||
|
||||
if let Some(active_diagnostics) = self.active_diagnostics.take() {
|
||||
// Clear diagnostics block when folding a range that contains it.
|
||||
let snapshot = self.snapshot(cx);
|
||||
if snapshot.intersects_fold(active_diagnostics.primary_range.start) {
|
||||
drop(snapshot);
|
||||
self.active_diagnostics = Some(active_diagnostics);
|
||||
self.dismiss_diagnostics(cx);
|
||||
} else {
|
||||
self.active_diagnostics = Some(active_diagnostics);
|
||||
}
|
||||
}
|
||||
|
||||
self.scrollbar_marker_state.dirty = true;
|
||||
}
|
||||
|
||||
/// Removes any folds whose ranges intersect any of the given ranges.
|
||||
@@ -11215,6 +11367,7 @@ impl Editor {
|
||||
}
|
||||
|
||||
self.display_map.update(cx, update);
|
||||
|
||||
if auto_scroll {
|
||||
self.request_autoscroll(Autoscroll::fit(), cx);
|
||||
}
|
||||
@@ -11317,7 +11470,7 @@ impl Editor {
|
||||
|
||||
pub fn insert_creases(
|
||||
&mut self,
|
||||
creases: impl IntoIterator<Item = Crease>,
|
||||
creases: impl IntoIterator<Item = Crease<Anchor>>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Vec<CreaseId> {
|
||||
self.display_map
|
||||
@@ -11717,6 +11870,29 @@ impl Editor {
|
||||
&& self.has_blame_entries(cx)
|
||||
}
|
||||
|
||||
pub fn render_active_line_trailer(
|
||||
&mut self,
|
||||
style: &EditorStyle,
|
||||
cx: &mut WindowContext,
|
||||
) -> Option<AnyElement> {
|
||||
let selection = self.selections.newest::<Point>(cx);
|
||||
if !selection.is_empty() {
|
||||
return None;
|
||||
};
|
||||
|
||||
let snapshot = self.buffer.read(cx).snapshot(cx);
|
||||
let buffer_row = MultiBufferRow(selection.head().row);
|
||||
|
||||
if snapshot.line_len(buffer_row) != 0 || self.has_active_inline_completion(cx) {
|
||||
return None;
|
||||
}
|
||||
|
||||
let focus_handle = self.focus_handle.clone();
|
||||
self.active_line_trailer_provider
|
||||
.as_mut()?
|
||||
.render_active_line_trailer(style, &focus_handle, cx)
|
||||
}
|
||||
|
||||
fn has_blame_entries(&self, cx: &mut WindowContext) -> bool {
|
||||
self.blame()
|
||||
.map_or(false, |blame| blame.read(cx).has_generated_entries())
|
||||
@@ -14056,7 +14232,7 @@ impl EditorSnapshot {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn render_fold_toggle(
|
||||
pub fn render_crease_toggle(
|
||||
&self,
|
||||
buffer_row: MultiBufferRow,
|
||||
row_contains_cursor: bool,
|
||||
@@ -14064,34 +14240,38 @@ impl EditorSnapshot {
|
||||
cx: &mut WindowContext,
|
||||
) -> Option<AnyElement> {
|
||||
let folded = self.is_line_folded(buffer_row);
|
||||
let mut is_foldable = false;
|
||||
|
||||
if let Some(crease) = self
|
||||
.crease_snapshot
|
||||
.query_row(buffer_row, &self.buffer_snapshot)
|
||||
{
|
||||
let toggle_callback = Arc::new(move |folded, cx: &mut WindowContext| {
|
||||
if folded {
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.fold_at(&crate::FoldAt { buffer_row }, cx)
|
||||
});
|
||||
} else {
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.unfold_at(&crate::UnfoldAt { buffer_row }, cx)
|
||||
});
|
||||
is_foldable = true;
|
||||
match crease {
|
||||
Crease::Inline { render_toggle, .. } | Crease::Block { render_toggle, .. } => {
|
||||
if let Some(render_toggle) = render_toggle {
|
||||
let toggle_callback = Arc::new(move |folded, cx: &mut WindowContext| {
|
||||
if folded {
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.fold_at(&crate::FoldAt { buffer_row }, cx)
|
||||
});
|
||||
} else {
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.unfold_at(&crate::UnfoldAt { buffer_row }, cx)
|
||||
});
|
||||
}
|
||||
});
|
||||
return Some((render_toggle)(buffer_row, folded, toggle_callback, cx));
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Some((crease.render_toggle)(
|
||||
buffer_row,
|
||||
folded,
|
||||
toggle_callback,
|
||||
cx,
|
||||
))
|
||||
} else if folded
|
||||
|| (self.starts_indent(buffer_row) && (row_contains_cursor || self.gutter_hovered))
|
||||
{
|
||||
is_foldable |= self.starts_indent(buffer_row);
|
||||
|
||||
if folded || (is_foldable && (row_contains_cursor || self.gutter_hovered)) {
|
||||
Some(
|
||||
Disclosure::new(("indent-fold-indicator", buffer_row.0), !folded)
|
||||
Disclosure::new(("gutter_crease", buffer_row.0), !folded)
|
||||
.selected(folded)
|
||||
.on_click(cx.listener_for(&editor, move |this, _e, cx| {
|
||||
if folded {
|
||||
@@ -14113,10 +14293,15 @@ impl EditorSnapshot {
|
||||
cx: &mut WindowContext,
|
||||
) -> Option<AnyElement> {
|
||||
let folded = self.is_line_folded(buffer_row);
|
||||
let crease = self
|
||||
if let Crease::Inline { render_trailer, .. } = self
|
||||
.crease_snapshot
|
||||
.query_row(buffer_row, &self.buffer_snapshot)?;
|
||||
Some((crease.render_trailer)(buffer_row, folded, cx))
|
||||
.query_row(buffer_row, &self.buffer_snapshot)?
|
||||
{
|
||||
let render_trailer = render_trailer.as_ref()?;
|
||||
Some(render_trailer(buffer_row, folded, cx))
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -14621,7 +14806,7 @@ pub fn diagnostic_block_renderer(
|
||||
let (text_without_backticks, code_ranges) =
|
||||
highlight_diagnostic_message(&diagnostic, max_message_rows);
|
||||
|
||||
Box::new(move |cx: &mut BlockContext| {
|
||||
Arc::new(move |cx: &mut BlockContext| {
|
||||
let group_id: SharedString = cx.block_id.to_string().into();
|
||||
|
||||
let mut text_style = cx.text_style().clone();
|
||||
@@ -14676,6 +14861,7 @@ pub fn diagnostic_block_renderer(
|
||||
.group(group_id.clone())
|
||||
.relative()
|
||||
.size_full()
|
||||
.block_mouse_down()
|
||||
.pl(cx.gutter_dimensions.width)
|
||||
.w(cx.max_width - cx.gutter_dimensions.full_width())
|
||||
.child(
|
||||
|
||||
@@ -279,7 +279,7 @@ pub struct EditorSettingsContent {
|
||||
|
||||
/// Whether to show the signature help pop-up after completions or bracket pairs inserted.
|
||||
///
|
||||
/// Default: true
|
||||
/// Default: false
|
||||
pub show_signature_help_after_edits: Option<bool>,
|
||||
|
||||
/// Jupyter REPL settings.
|
||||
|
||||
@@ -596,10 +596,10 @@ fn test_clone(cx: &mut TestAppContext) {
|
||||
|
||||
_ = editor.update(cx, |editor, cx| {
|
||||
editor.change_selections(None, cx, |s| s.select_ranges(selection_ranges.clone()));
|
||||
editor.fold_ranges(
|
||||
[
|
||||
(Point::new(1, 0)..Point::new(2, 0), FoldPlaceholder::test()),
|
||||
(Point::new(3, 0)..Point::new(4, 0), FoldPlaceholder::test()),
|
||||
editor.fold_creases(
|
||||
vec![
|
||||
Crease::simple(Point::new(1, 0)..Point::new(2, 0), FoldPlaceholder::test()),
|
||||
Crease::simple(Point::new(3, 0)..Point::new(4, 0), FoldPlaceholder::test()),
|
||||
],
|
||||
true,
|
||||
cx,
|
||||
@@ -1283,11 +1283,11 @@ fn test_move_cursor_multibyte(cx: &mut TestAppContext) {
|
||||
assert_eq!('α'.len_utf8(), 2);
|
||||
|
||||
_ = view.update(cx, |view, cx| {
|
||||
view.fold_ranges(
|
||||
view.fold_creases(
|
||||
vec![
|
||||
(Point::new(0, 6)..Point::new(0, 12), FoldPlaceholder::test()),
|
||||
(Point::new(1, 2)..Point::new(1, 4), FoldPlaceholder::test()),
|
||||
(Point::new(2, 4)..Point::new(2, 8), FoldPlaceholder::test()),
|
||||
Crease::simple(Point::new(0, 6)..Point::new(0, 12), FoldPlaceholder::test()),
|
||||
Crease::simple(Point::new(1, 2)..Point::new(1, 4), FoldPlaceholder::test()),
|
||||
Crease::simple(Point::new(2, 4)..Point::new(2, 8), FoldPlaceholder::test()),
|
||||
],
|
||||
true,
|
||||
cx,
|
||||
@@ -1398,6 +1398,15 @@ fn test_move_cursor_different_line_lengths(cx: &mut TestAppContext) {
|
||||
view.change_selections(None, cx, |s| {
|
||||
s.select_display_ranges([empty_range(0, "ⓐⓑⓒⓓⓔ".len())]);
|
||||
});
|
||||
|
||||
// moving above start of document should move selection to start of document,
|
||||
// but the next move down should still be at the original goal_x
|
||||
view.move_up(&MoveUp, cx);
|
||||
assert_eq!(
|
||||
view.selections.display_ranges(cx),
|
||||
&[empty_range(0, "".len())]
|
||||
);
|
||||
|
||||
view.move_down(&MoveDown, cx);
|
||||
assert_eq!(
|
||||
view.selections.display_ranges(cx),
|
||||
@@ -1422,6 +1431,25 @@ fn test_move_cursor_different_line_lengths(cx: &mut TestAppContext) {
|
||||
&[empty_range(4, "ⓐⓑⓒⓓⓔ".len())]
|
||||
);
|
||||
|
||||
// moving past end of document should not change goal_x
|
||||
view.move_down(&MoveDown, cx);
|
||||
assert_eq!(
|
||||
view.selections.display_ranges(cx),
|
||||
&[empty_range(5, "".len())]
|
||||
);
|
||||
|
||||
view.move_down(&MoveDown, cx);
|
||||
assert_eq!(
|
||||
view.selections.display_ranges(cx),
|
||||
&[empty_range(5, "".len())]
|
||||
);
|
||||
|
||||
view.move_up(&MoveUp, cx);
|
||||
assert_eq!(
|
||||
view.selections.display_ranges(cx),
|
||||
&[empty_range(4, "ⓐⓑⓒⓓⓔ".len())]
|
||||
);
|
||||
|
||||
view.move_up(&MoveUp, cx);
|
||||
assert_eq!(
|
||||
view.selections.display_ranges(cx),
|
||||
@@ -3875,11 +3903,11 @@ fn test_move_line_up_down(cx: &mut TestAppContext) {
|
||||
build_editor(buffer, cx)
|
||||
});
|
||||
_ = view.update(cx, |view, cx| {
|
||||
view.fold_ranges(
|
||||
view.fold_creases(
|
||||
vec![
|
||||
(Point::new(0, 2)..Point::new(1, 2), FoldPlaceholder::test()),
|
||||
(Point::new(2, 3)..Point::new(4, 1), FoldPlaceholder::test()),
|
||||
(Point::new(7, 0)..Point::new(8, 4), FoldPlaceholder::test()),
|
||||
Crease::simple(Point::new(0, 2)..Point::new(1, 2), FoldPlaceholder::test()),
|
||||
Crease::simple(Point::new(2, 3)..Point::new(4, 1), FoldPlaceholder::test()),
|
||||
Crease::simple(Point::new(7, 0)..Point::new(8, 4), FoldPlaceholder::test()),
|
||||
],
|
||||
true,
|
||||
cx,
|
||||
@@ -3980,7 +4008,7 @@ fn test_move_line_up_down_with_blocks(cx: &mut TestAppContext) {
|
||||
style: BlockStyle::Fixed,
|
||||
placement: BlockPlacement::Below(snapshot.anchor_after(Point::new(2, 0))),
|
||||
height: 1,
|
||||
render: Box::new(|_| div().into_any()),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
}],
|
||||
Some(Autoscroll::fit()),
|
||||
@@ -4022,7 +4050,7 @@ async fn test_selections_and_replace_blocks(cx: &mut TestAppContext) {
|
||||
placement,
|
||||
height: 4,
|
||||
style: BlockStyle::Sticky,
|
||||
render: Box::new(|_| gpui::div().into_any_element()),
|
||||
render: Arc::new(|_| gpui::div().into_any_element()),
|
||||
priority: 0,
|
||||
}],
|
||||
None,
|
||||
@@ -4717,11 +4745,11 @@ fn test_split_selection_into_lines(cx: &mut TestAppContext) {
|
||||
build_editor(buffer, cx)
|
||||
});
|
||||
_ = view.update(cx, |view, cx| {
|
||||
view.fold_ranges(
|
||||
view.fold_creases(
|
||||
vec![
|
||||
(Point::new(0, 2)..Point::new(1, 2), FoldPlaceholder::test()),
|
||||
(Point::new(2, 3)..Point::new(4, 1), FoldPlaceholder::test()),
|
||||
(Point::new(7, 0)..Point::new(8, 4), FoldPlaceholder::test()),
|
||||
Crease::simple(Point::new(0, 2)..Point::new(1, 2), FoldPlaceholder::test()),
|
||||
Crease::simple(Point::new(2, 3)..Point::new(4, 1), FoldPlaceholder::test()),
|
||||
Crease::simple(Point::new(7, 0)..Point::new(8, 4), FoldPlaceholder::test()),
|
||||
],
|
||||
true,
|
||||
cx,
|
||||
@@ -5398,13 +5426,13 @@ async fn test_select_larger_smaller_syntax_node(cx: &mut gpui::TestAppContext) {
|
||||
// Ensure that we keep expanding the selection if the larger selection starts or ends within
|
||||
// a fold.
|
||||
editor.update(cx, |view, cx| {
|
||||
view.fold_ranges(
|
||||
view.fold_creases(
|
||||
vec![
|
||||
(
|
||||
Crease::simple(
|
||||
Point::new(0, 21)..Point::new(0, 24),
|
||||
FoldPlaceholder::test(),
|
||||
),
|
||||
(
|
||||
Crease::simple(
|
||||
Point::new(3, 20)..Point::new(3, 22),
|
||||
FoldPlaceholder::test(),
|
||||
),
|
||||
@@ -6551,6 +6579,45 @@ async fn test_auto_replace_emoji_shortcode(cx: &mut gpui::TestAppContext) {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_snippet_placeholder_choices(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let (text, insertion_ranges) = marked_text_ranges(
|
||||
indoc! {"
|
||||
ˇ
|
||||
"},
|
||||
false,
|
||||
);
|
||||
|
||||
let buffer = cx.update(|cx| MultiBuffer::build_simple(&text, cx));
|
||||
let (editor, cx) = cx.add_window_view(|cx| build_editor(buffer, cx));
|
||||
|
||||
_ = editor.update(cx, |editor, cx| {
|
||||
let snippet = Snippet::parse("type ${1|,i32,u32|} = $2").unwrap();
|
||||
|
||||
editor
|
||||
.insert_snippet(&insertion_ranges, snippet, cx)
|
||||
.unwrap();
|
||||
|
||||
fn assert(editor: &mut Editor, cx: &mut ViewContext<Editor>, marked_text: &str) {
|
||||
let (expected_text, selection_ranges) = marked_text_ranges(marked_text, false);
|
||||
assert_eq!(editor.text(cx), expected_text);
|
||||
assert_eq!(editor.selections.ranges::<usize>(cx), selection_ranges);
|
||||
}
|
||||
|
||||
assert(
|
||||
editor,
|
||||
cx,
|
||||
indoc! {"
|
||||
type «» =•
|
||||
"},
|
||||
);
|
||||
|
||||
assert!(editor.context_menu_visible(), "There should be a matches");
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_snippets(cx: &mut gpui::TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
@@ -13139,7 +13206,7 @@ fn test_crease_insertion_and_rendering(cx: &mut TestAppContext) {
|
||||
callback: Arc<dyn Fn(bool, &mut WindowContext) + Send + Sync>,
|
||||
}
|
||||
|
||||
let crease = Crease::new(
|
||||
let crease = Crease::inline(
|
||||
range,
|
||||
FoldPlaceholder::test(),
|
||||
{
|
||||
@@ -13158,7 +13225,8 @@ fn test_crease_insertion_and_rendering(cx: &mut TestAppContext) {
|
||||
|
||||
editor.insert_creases(Some(crease), cx);
|
||||
let snapshot = editor.snapshot(cx);
|
||||
let _div = snapshot.render_fold_toggle(MultiBufferRow(1), false, cx.view().clone(), cx);
|
||||
let _div =
|
||||
snapshot.render_crease_toggle(MultiBufferRow(1), false, cx.view().clone(), cx);
|
||||
snapshot
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
@@ -16,8 +16,8 @@ use crate::{
|
||||
items::BufferSearchHighlights,
|
||||
mouse_context_menu::{self, MenuPosition, MouseContextMenu},
|
||||
scroll::scroll_amount::ScrollAmount,
|
||||
BlockId, CodeActionsMenu, CursorShape, CustomBlockId, DisplayPoint, DisplayRow,
|
||||
DocumentHighlightRead, DocumentHighlightWrite, Editor, EditorMode, EditorSettings,
|
||||
BlockId, ChunkReplacement, CodeActionsMenu, CursorShape, CustomBlockId, DisplayPoint,
|
||||
DisplayRow, DocumentHighlightRead, DocumentHighlightWrite, Editor, EditorMode, EditorSettings,
|
||||
EditorSnapshot, EditorStyle, ExpandExcerpts, FocusedBlock, GutterDimensions, HalfPageDown,
|
||||
HalfPageUp, HandleInput, HoveredCursor, HoveredHunk, JumpData, LineDown, LineUp, OpenExcerpts,
|
||||
PageDown, PageUp, Point, RowExt, RowRangeExt, SelectPhase, Selection, SoftWrap, ToPoint,
|
||||
@@ -34,8 +34,8 @@ use gpui::{
|
||||
FontId, GlobalElementId, Hitbox, Hsla, InteractiveElement, IntoElement, Length,
|
||||
ModifiersChangedEvent, MouseButton, MouseDownEvent, MouseMoveEvent, MouseUpEvent, PaintQuad,
|
||||
ParentElement, Pixels, ScrollDelta, ScrollWheelEvent, ShapedLine, SharedString, Size,
|
||||
StatefulInteractiveElement, Style, Styled, TextRun, TextStyle, TextStyleRefinement, View,
|
||||
ViewContext, WeakView, WindowContext,
|
||||
StatefulInteractiveElement, Style, Styled, TextRun, TextStyleRefinement, View, ViewContext,
|
||||
WeakView, WindowContext,
|
||||
};
|
||||
use gpui::{ClickEvent, Subscription};
|
||||
use itertools::Itertools;
|
||||
@@ -1227,9 +1227,9 @@ impl EditorElement {
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn prepaint_gutter_fold_toggles(
|
||||
fn prepaint_crease_toggles(
|
||||
&self,
|
||||
toggles: &mut [Option<AnyElement>],
|
||||
crease_toggles: &mut [Option<AnyElement>],
|
||||
line_height: Pixels,
|
||||
gutter_dimensions: &GutterDimensions,
|
||||
gutter_settings: crate::editor_settings::Gutter,
|
||||
@@ -1237,25 +1237,25 @@ impl EditorElement {
|
||||
gutter_hitbox: &Hitbox,
|
||||
cx: &mut WindowContext,
|
||||
) {
|
||||
for (ix, fold_indicator) in toggles.iter_mut().enumerate() {
|
||||
if let Some(fold_indicator) = fold_indicator {
|
||||
for (ix, crease_toggle) in crease_toggles.iter_mut().enumerate() {
|
||||
if let Some(crease_toggle) = crease_toggle {
|
||||
debug_assert!(gutter_settings.folds);
|
||||
let available_space = size(
|
||||
AvailableSpace::MinContent,
|
||||
AvailableSpace::Definite(line_height * 0.55),
|
||||
);
|
||||
let fold_indicator_size = fold_indicator.layout_as_root(available_space, cx);
|
||||
let crease_toggle_size = crease_toggle.layout_as_root(available_space, cx);
|
||||
|
||||
let position = point(
|
||||
gutter_dimensions.width - gutter_dimensions.right_padding,
|
||||
ix as f32 * line_height - (scroll_pixel_position.y % line_height),
|
||||
);
|
||||
let centering_offset = point(
|
||||
(gutter_dimensions.fold_area_width() - fold_indicator_size.width) / 2.,
|
||||
(line_height - fold_indicator_size.height) / 2.,
|
||||
(gutter_dimensions.fold_area_width() - crease_toggle_size.width) / 2.,
|
||||
(line_height - crease_toggle_size.height) / 2.,
|
||||
);
|
||||
let origin = gutter_hitbox.origin + position + centering_offset;
|
||||
fold_indicator.prepaint_as_root(origin, available_space, cx);
|
||||
crease_toggle.prepaint_as_root(origin, available_space, cx);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1412,7 +1412,7 @@ impl EditorElement {
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn layout_inline_blame(
|
||||
fn layout_active_line_trailer(
|
||||
&self,
|
||||
display_row: DisplayRow,
|
||||
display_snapshot: &DisplaySnapshot,
|
||||
@@ -1424,61 +1424,71 @@ impl EditorElement {
|
||||
line_height: Pixels,
|
||||
cx: &mut WindowContext,
|
||||
) -> Option<AnyElement> {
|
||||
if !self
|
||||
let render_inline_blame = self
|
||||
.editor
|
||||
.update(cx, |editor, cx| editor.render_git_blame_inline(cx))
|
||||
{
|
||||
return None;
|
||||
}
|
||||
.update(cx, |editor, cx| editor.render_git_blame_inline(cx));
|
||||
if render_inline_blame {
|
||||
let workspace = self
|
||||
.editor
|
||||
.read(cx)
|
||||
.workspace
|
||||
.as_ref()
|
||||
.map(|(w, _)| w.clone());
|
||||
|
||||
let workspace = self
|
||||
.editor
|
||||
.read(cx)
|
||||
.workspace
|
||||
.as_ref()
|
||||
.map(|(w, _)| w.clone());
|
||||
let display_point = DisplayPoint::new(display_row, 0);
|
||||
let buffer_row = MultiBufferRow(display_point.to_point(display_snapshot).row);
|
||||
|
||||
let display_point = DisplayPoint::new(display_row, 0);
|
||||
let buffer_row = MultiBufferRow(display_point.to_point(display_snapshot).row);
|
||||
let blame = self.editor.read(cx).blame.clone()?;
|
||||
let blame_entry = blame
|
||||
.update(cx, |blame, cx| {
|
||||
blame.blame_for_rows([Some(buffer_row)], cx).next()
|
||||
})
|
||||
.flatten()?;
|
||||
|
||||
let blame = self.editor.read(cx).blame.clone()?;
|
||||
let blame_entry = blame
|
||||
.update(cx, |blame, cx| {
|
||||
blame.blame_for_rows([Some(buffer_row)], cx).next()
|
||||
})
|
||||
.flatten()?;
|
||||
let mut element =
|
||||
render_inline_blame_entry(&blame, blame_entry, &self.style, workspace, cx);
|
||||
|
||||
let mut element =
|
||||
render_inline_blame_entry(&blame, blame_entry, &self.style, workspace, cx);
|
||||
let start_y = content_origin.y
|
||||
+ line_height * (display_row.as_f32() - scroll_pixel_position.y / line_height);
|
||||
|
||||
let start_y = content_origin.y
|
||||
+ line_height * (display_row.as_f32() - scroll_pixel_position.y / line_height);
|
||||
let start_x = {
|
||||
const INLINE_BLAME_PADDING_EM_WIDTHS: f32 = 6.;
|
||||
|
||||
let start_x = {
|
||||
const INLINE_BLAME_PADDING_EM_WIDTHS: f32 = 6.;
|
||||
let line_end = if let Some(crease_trailer) = crease_trailer {
|
||||
crease_trailer.bounds.right()
|
||||
} else {
|
||||
content_origin.x - scroll_pixel_position.x + line_layout.width
|
||||
};
|
||||
let padded_line_end = line_end + em_width * INLINE_BLAME_PADDING_EM_WIDTHS;
|
||||
|
||||
let line_end = if let Some(crease_trailer) = crease_trailer {
|
||||
crease_trailer.bounds.right()
|
||||
} else {
|
||||
content_origin.x - scroll_pixel_position.x + line_layout.width
|
||||
let min_column_in_pixels = ProjectSettings::get_global(cx)
|
||||
.git
|
||||
.inline_blame
|
||||
.and_then(|settings| settings.min_column)
|
||||
.map(|col| self.column_pixels(col as usize, cx))
|
||||
.unwrap_or(px(0.));
|
||||
let min_start = content_origin.x - scroll_pixel_position.x + min_column_in_pixels;
|
||||
|
||||
cmp::max(padded_line_end, min_start)
|
||||
};
|
||||
let padded_line_end = line_end + em_width * INLINE_BLAME_PADDING_EM_WIDTHS;
|
||||
|
||||
let min_column_in_pixels = ProjectSettings::get_global(cx)
|
||||
.git
|
||||
.inline_blame
|
||||
.and_then(|settings| settings.min_column)
|
||||
.map(|col| self.column_pixels(col as usize, cx))
|
||||
.unwrap_or(px(0.));
|
||||
let min_start = content_origin.x - scroll_pixel_position.x + min_column_in_pixels;
|
||||
let absolute_offset = point(start_x, start_y);
|
||||
element.prepaint_as_root(absolute_offset, AvailableSpace::min_size(), cx);
|
||||
|
||||
cmp::max(padded_line_end, min_start)
|
||||
};
|
||||
Some(element)
|
||||
} else if let Some(mut element) = self.editor.update(cx, |editor, cx| {
|
||||
editor.render_active_line_trailer(&self.style, cx)
|
||||
}) {
|
||||
let start_y = content_origin.y
|
||||
+ line_height * (display_row.as_f32() - scroll_pixel_position.y / line_height);
|
||||
let start_x = content_origin.x - scroll_pixel_position.x + em_width;
|
||||
let absolute_offset = point(start_x, start_y);
|
||||
element.prepaint_as_root(absolute_offset, AvailableSpace::min_size(), cx);
|
||||
|
||||
let absolute_offset = point(start_x, start_y);
|
||||
element.prepaint_as_root(absolute_offset, AvailableSpace::min_size(), cx);
|
||||
|
||||
Some(element)
|
||||
Some(element)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
@@ -1915,7 +1925,7 @@ impl EditorElement {
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn layout_gutter_fold_toggles(
|
||||
fn layout_crease_toggles(
|
||||
&self,
|
||||
rows: Range<DisplayRow>,
|
||||
buffer_rows: impl IntoIterator<Item = Option<MultiBufferRow>>,
|
||||
@@ -1934,7 +1944,7 @@ impl EditorElement {
|
||||
if let Some(multibuffer_row) = row {
|
||||
let display_row = DisplayRow(rows.start.0 + ix as u32);
|
||||
let active = active_rows.contains_key(&display_row);
|
||||
snapshot.render_fold_toggle(
|
||||
snapshot.render_crease_toggle(
|
||||
multibuffer_row,
|
||||
active,
|
||||
self.editor.clone(),
|
||||
@@ -2019,7 +2029,7 @@ impl EditorElement {
|
||||
let chunks = snapshot.highlighted_chunks(rows.clone(), true, style);
|
||||
LineWithInvisibles::from_chunks(
|
||||
chunks,
|
||||
&style.text,
|
||||
&style,
|
||||
MAX_LINE_LEN,
|
||||
rows.len(),
|
||||
snapshot.mode,
|
||||
@@ -2122,9 +2132,7 @@ impl EditorElement {
|
||||
max_width: text_hitbox.size.width.max(*scroll_width),
|
||||
editor_style: &self.style,
|
||||
}))
|
||||
.cursor(CursorStyle::Arrow)
|
||||
.on_mouse_down(MouseButton::Left, |_, cx| cx.stop_propagation())
|
||||
.into_any_element()
|
||||
.into_any()
|
||||
}
|
||||
|
||||
Block::ExcerptBoundary {
|
||||
@@ -3354,9 +3362,9 @@ impl EditorElement {
|
||||
|
||||
fn paint_gutter_indicators(&self, layout: &mut EditorLayout, cx: &mut WindowContext) {
|
||||
cx.paint_layer(layout.gutter_hitbox.bounds, |cx| {
|
||||
cx.with_element_namespace("gutter_fold_toggles", |cx| {
|
||||
for fold_indicator in layout.gutter_fold_toggles.iter_mut().flatten() {
|
||||
fold_indicator.paint(cx);
|
||||
cx.with_element_namespace("crease_toggles", |cx| {
|
||||
for crease_toggle in layout.crease_toggles.iter_mut().flatten() {
|
||||
crease_toggle.paint(cx);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -3456,7 +3464,7 @@ impl EditorElement {
|
||||
self.paint_lines(&invisible_display_ranges, layout, cx);
|
||||
self.paint_redactions(layout, cx);
|
||||
self.paint_cursors(layout, cx);
|
||||
self.paint_inline_blame(layout, cx);
|
||||
self.paint_active_line_trailer(layout, cx);
|
||||
cx.with_element_namespace("crease_trailers", |cx| {
|
||||
for trailer in layout.crease_trailers.iter_mut().flatten() {
|
||||
trailer.element.paint(cx);
|
||||
@@ -3938,10 +3946,10 @@ impl EditorElement {
|
||||
}
|
||||
}
|
||||
|
||||
fn paint_inline_blame(&mut self, layout: &mut EditorLayout, cx: &mut WindowContext) {
|
||||
if let Some(mut inline_blame) = layout.inline_blame.take() {
|
||||
fn paint_active_line_trailer(&mut self, layout: &mut EditorLayout, cx: &mut WindowContext) {
|
||||
if let Some(mut element) = layout.active_line_trailer.take() {
|
||||
cx.paint_layer(layout.text_hitbox.bounds, |cx| {
|
||||
inline_blame.paint(cx);
|
||||
element.paint(cx);
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -4374,7 +4382,7 @@ impl LineWithInvisibles {
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
fn from_chunks<'a>(
|
||||
chunks: impl Iterator<Item = HighlightedChunk<'a>>,
|
||||
text_style: &TextStyle,
|
||||
editor_style: &EditorStyle,
|
||||
max_line_len: usize,
|
||||
max_line_count: usize,
|
||||
editor_mode: EditorMode,
|
||||
@@ -4382,6 +4390,7 @@ impl LineWithInvisibles {
|
||||
is_row_soft_wrapped: impl Copy + Fn(usize) -> bool,
|
||||
cx: &mut WindowContext,
|
||||
) -> Vec<Self> {
|
||||
let text_style = &editor_style.text;
|
||||
let mut layouts = Vec::with_capacity(max_line_count);
|
||||
let mut fragments: SmallVec<[LineFragment; 1]> = SmallVec::new();
|
||||
let mut line = String::new();
|
||||
@@ -4400,9 +4409,9 @@ impl LineWithInvisibles {
|
||||
text: "\n",
|
||||
style: None,
|
||||
is_tab: false,
|
||||
renderer: None,
|
||||
replacement: None,
|
||||
}]) {
|
||||
if let Some(renderer) = highlighted_chunk.renderer {
|
||||
if let Some(replacement) = highlighted_chunk.replacement {
|
||||
if !line.is_empty() {
|
||||
let shaped_line = cx
|
||||
.text_system()
|
||||
@@ -4415,42 +4424,71 @@ impl LineWithInvisibles {
|
||||
styles.clear();
|
||||
}
|
||||
|
||||
let available_width = if renderer.constrain_width {
|
||||
let chunk = if highlighted_chunk.text == ellipsis.as_ref() {
|
||||
ellipsis.clone()
|
||||
} else {
|
||||
SharedString::from(Arc::from(highlighted_chunk.text))
|
||||
};
|
||||
let shaped_line = cx
|
||||
.text_system()
|
||||
.shape_line(
|
||||
chunk,
|
||||
font_size,
|
||||
&[text_style.to_run(highlighted_chunk.text.len())],
|
||||
)
|
||||
.unwrap();
|
||||
AvailableSpace::Definite(shaped_line.width)
|
||||
} else {
|
||||
AvailableSpace::MinContent
|
||||
};
|
||||
match replacement {
|
||||
ChunkReplacement::Renderer(renderer) => {
|
||||
let available_width = if renderer.constrain_width {
|
||||
let chunk = if highlighted_chunk.text == ellipsis.as_ref() {
|
||||
ellipsis.clone()
|
||||
} else {
|
||||
SharedString::from(Arc::from(highlighted_chunk.text))
|
||||
};
|
||||
let shaped_line = cx
|
||||
.text_system()
|
||||
.shape_line(
|
||||
chunk,
|
||||
font_size,
|
||||
&[text_style.to_run(highlighted_chunk.text.len())],
|
||||
)
|
||||
.unwrap();
|
||||
AvailableSpace::Definite(shaped_line.width)
|
||||
} else {
|
||||
AvailableSpace::MinContent
|
||||
};
|
||||
|
||||
let mut element = (renderer.render)(&mut ChunkRendererContext {
|
||||
context: cx,
|
||||
max_width: text_width,
|
||||
});
|
||||
let line_height = text_style.line_height_in_pixels(cx.rem_size());
|
||||
let size = element.layout_as_root(
|
||||
size(available_width, AvailableSpace::Definite(line_height)),
|
||||
cx,
|
||||
);
|
||||
let mut element = (renderer.render)(&mut ChunkRendererContext {
|
||||
context: cx,
|
||||
max_width: text_width,
|
||||
});
|
||||
let line_height = text_style.line_height_in_pixels(cx.rem_size());
|
||||
let size = element.layout_as_root(
|
||||
size(available_width, AvailableSpace::Definite(line_height)),
|
||||
cx,
|
||||
);
|
||||
|
||||
width += size.width;
|
||||
len += highlighted_chunk.text.len();
|
||||
fragments.push(LineFragment::Element {
|
||||
element: Some(element),
|
||||
size,
|
||||
len: highlighted_chunk.text.len(),
|
||||
});
|
||||
width += size.width;
|
||||
len += highlighted_chunk.text.len();
|
||||
fragments.push(LineFragment::Element {
|
||||
element: Some(element),
|
||||
size,
|
||||
len: highlighted_chunk.text.len(),
|
||||
});
|
||||
}
|
||||
ChunkReplacement::Str(x) => {
|
||||
let text_style = if let Some(style) = highlighted_chunk.style {
|
||||
Cow::Owned(text_style.clone().highlight(style))
|
||||
} else {
|
||||
Cow::Borrowed(text_style)
|
||||
};
|
||||
|
||||
let run = TextRun {
|
||||
len: x.len(),
|
||||
font: text_style.font(),
|
||||
color: text_style.color,
|
||||
background_color: text_style.background_color,
|
||||
underline: text_style.underline,
|
||||
strikethrough: text_style.strikethrough,
|
||||
};
|
||||
let line_layout = cx
|
||||
.text_system()
|
||||
.shape_line(x, font_size, &[run])
|
||||
.unwrap()
|
||||
.with_len(highlighted_chunk.text.len());
|
||||
|
||||
width += line_layout.width;
|
||||
len += highlighted_chunk.text.len();
|
||||
fragments.push(LineFragment::Text(line_layout))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
for (ix, mut line_chunk) in highlighted_chunk.text.split('\n').enumerate() {
|
||||
if ix > 0 {
|
||||
@@ -5167,16 +5205,15 @@ impl Element for EditorElement {
|
||||
cx,
|
||||
);
|
||||
|
||||
let mut gutter_fold_toggles =
|
||||
cx.with_element_namespace("gutter_fold_toggles", |cx| {
|
||||
self.layout_gutter_fold_toggles(
|
||||
start_row..end_row,
|
||||
buffer_rows.iter().copied(),
|
||||
&active_rows,
|
||||
&snapshot,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let mut crease_toggles = cx.with_element_namespace("crease_toggles", |cx| {
|
||||
self.layout_crease_toggles(
|
||||
start_row..end_row,
|
||||
buffer_rows.iter().copied(),
|
||||
&active_rows,
|
||||
&snapshot,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
let crease_trailers = cx.with_element_namespace("crease_trailers", |cx| {
|
||||
self.layout_crease_trailers(buffer_rows.iter().copied(), &snapshot, cx)
|
||||
});
|
||||
@@ -5304,14 +5341,14 @@ impl Element for EditorElement {
|
||||
)
|
||||
});
|
||||
|
||||
let mut inline_blame = None;
|
||||
let mut active_line_trailer = None;
|
||||
if let Some(newest_selection_head) = newest_selection_head {
|
||||
let display_row = newest_selection_head.row();
|
||||
if (start_row..end_row).contains(&display_row) {
|
||||
let line_ix = display_row.minus(start_row) as usize;
|
||||
let line_layout = &line_layouts[line_ix];
|
||||
let crease_trailer_layout = crease_trailers[line_ix].as_ref();
|
||||
inline_blame = self.layout_inline_blame(
|
||||
active_line_trailer = self.layout_active_line_trailer(
|
||||
display_row,
|
||||
&snapshot.display_snapshot,
|
||||
line_layout,
|
||||
@@ -5556,9 +5593,9 @@ impl Element for EditorElement {
|
||||
let mouse_context_menu =
|
||||
self.layout_mouse_context_menu(&snapshot, start_row..end_row, cx);
|
||||
|
||||
cx.with_element_namespace("gutter_fold_toggles", |cx| {
|
||||
self.prepaint_gutter_fold_toggles(
|
||||
&mut gutter_fold_toggles,
|
||||
cx.with_element_namespace("crease_toggles", |cx| {
|
||||
self.prepaint_crease_toggles(
|
||||
&mut crease_toggles,
|
||||
line_height,
|
||||
&gutter_dimensions,
|
||||
gutter_settings,
|
||||
@@ -5630,7 +5667,7 @@ impl Element for EditorElement {
|
||||
line_elements,
|
||||
line_numbers,
|
||||
blamed_display_rows,
|
||||
inline_blame,
|
||||
active_line_trailer,
|
||||
blocks,
|
||||
cursors,
|
||||
visible_cursors,
|
||||
@@ -5638,7 +5675,7 @@ impl Element for EditorElement {
|
||||
mouse_context_menu,
|
||||
test_indicators,
|
||||
code_actions_indicator,
|
||||
gutter_fold_toggles,
|
||||
crease_toggles,
|
||||
crease_trailers,
|
||||
tab_invisible,
|
||||
space_invisible,
|
||||
@@ -5671,7 +5708,6 @@ impl Element for EditorElement {
|
||||
line_height: Some(self.style.text.line_height),
|
||||
..Default::default()
|
||||
};
|
||||
let mouse_position = cx.mouse_position();
|
||||
let hovered_hunk = layout
|
||||
.display_hunks
|
||||
.iter()
|
||||
@@ -5685,7 +5721,7 @@ impl Element for EditorElement {
|
||||
} => {
|
||||
if hunk_hitbox
|
||||
.as_ref()
|
||||
.map(|hitbox| hitbox.contains(&mouse_position))
|
||||
.map(|hitbox| hitbox.is_hovered(cx))
|
||||
.unwrap_or(false)
|
||||
{
|
||||
Some(HoveredHunk {
|
||||
@@ -5768,7 +5804,7 @@ pub struct EditorLayout {
|
||||
line_numbers: Vec<Option<ShapedLine>>,
|
||||
display_hunks: Vec<(DisplayDiffHunk, Option<Hitbox>)>,
|
||||
blamed_display_rows: Option<Vec<AnyElement>>,
|
||||
inline_blame: Option<AnyElement>,
|
||||
active_line_trailer: Option<AnyElement>,
|
||||
blocks: Vec<BlockLayout>,
|
||||
highlighted_ranges: Vec<(Range<DisplayPoint>, Hsla)>,
|
||||
highlighted_gutter_ranges: Vec<(Range<DisplayPoint>, Hsla)>,
|
||||
@@ -5778,7 +5814,7 @@ pub struct EditorLayout {
|
||||
selections: Vec<(PlayerColor, Vec<SelectionLayout>)>,
|
||||
code_actions_indicator: Option<AnyElement>,
|
||||
test_indicators: Vec<AnyElement>,
|
||||
gutter_fold_toggles: Vec<Option<AnyElement>>,
|
||||
crease_toggles: Vec<Option<AnyElement>>,
|
||||
crease_trailers: Vec<Option<CreaseTrailerLayout>>,
|
||||
mouse_context_menu: Option<AnyElement>,
|
||||
tab_invisible: ShapedLine,
|
||||
@@ -5996,7 +6032,7 @@ fn layout_line(
|
||||
let chunks = snapshot.highlighted_chunks(row..row + DisplayRow(1), true, style);
|
||||
LineWithInvisibles::from_chunks(
|
||||
chunks,
|
||||
&style.text,
|
||||
&style,
|
||||
MAX_LINE_LEN,
|
||||
1,
|
||||
snapshot.mode,
|
||||
@@ -6623,7 +6659,7 @@ mod tests {
|
||||
style: BlockStyle::Fixed,
|
||||
placement: BlockPlacement::Above(Anchor::min()),
|
||||
height: 3,
|
||||
render: Box::new(|cx| div().h(3. * cx.line_height()).into_any()),
|
||||
render: Arc::new(|cx| div().h(3. * cx.line_height()).into_any()),
|
||||
priority: 0,
|
||||
}],
|
||||
None,
|
||||
|
||||
@@ -425,7 +425,7 @@ impl Editor {
|
||||
height: 1,
|
||||
style: BlockStyle::Sticky,
|
||||
priority: 0,
|
||||
render: Box::new({
|
||||
render: Arc::new({
|
||||
let editor = cx.view().clone();
|
||||
let hunk = hunk.clone();
|
||||
|
||||
@@ -435,6 +435,7 @@ impl Editor {
|
||||
|
||||
h_flex()
|
||||
.id(cx.block_id)
|
||||
.block_mouse_down()
|
||||
.h(cx.line_height())
|
||||
.w_full()
|
||||
.border_t_1()
|
||||
@@ -707,12 +708,13 @@ impl Editor {
|
||||
height,
|
||||
style: BlockStyle::Flex,
|
||||
priority: 0,
|
||||
render: Box::new(move |cx| {
|
||||
render: Arc::new(move |cx| {
|
||||
let width = EditorElement::diff_hunk_strip_width(cx.line_height());
|
||||
let gutter_dimensions = editor.read(cx.context).gutter_dimensions;
|
||||
|
||||
h_flex()
|
||||
.id(cx.block_id)
|
||||
.block_mouse_down()
|
||||
.bg(deleted_hunk_color)
|
||||
.h(height as f32 * cx.line_height())
|
||||
.w_full()
|
||||
|
||||
@@ -16,7 +16,8 @@ use gpui::{
|
||||
VisualContext, WeakView, WindowContext,
|
||||
};
|
||||
use language::{
|
||||
proto::serialize_anchor as serialize_text_anchor, Bias, Buffer, CharKind, Point, SelectionGoal,
|
||||
proto::serialize_anchor as serialize_text_anchor, Bias, Buffer, CharKind, DiskState, Point,
|
||||
SelectionGoal,
|
||||
};
|
||||
use lsp::DiagnosticSeverity;
|
||||
use multi_buffer::AnchorRangeExt;
|
||||
@@ -635,12 +636,21 @@ impl Item for Editor {
|
||||
Some(util::truncate_and_trailoff(description, MAX_TAB_TITLE_LEN))
|
||||
});
|
||||
|
||||
// Whether the file was saved in the past but is now deleted.
|
||||
let was_deleted: bool = self
|
||||
.buffer()
|
||||
.read(cx)
|
||||
.as_singleton()
|
||||
.and_then(|buffer| buffer.read(cx).file())
|
||||
.map_or(false, |file| file.disk_state() == DiskState::Deleted);
|
||||
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(
|
||||
Label::new(self.title(cx).to_string())
|
||||
.color(label_color)
|
||||
.italic(params.preview),
|
||||
.italic(params.preview)
|
||||
.strikethrough(was_deleted),
|
||||
)
|
||||
.when_some(description, |this, description| {
|
||||
this.child(
|
||||
@@ -700,6 +710,10 @@ impl Item for Editor {
|
||||
self.buffer().read(cx).read(cx).is_dirty()
|
||||
}
|
||||
|
||||
fn has_deleted_file(&self, cx: &AppContext) -> bool {
|
||||
self.buffer().read(cx).read(cx).has_deleted_file()
|
||||
}
|
||||
|
||||
fn has_conflict(&self, cx: &AppContext) -> bool {
|
||||
self.buffer().read(cx).read(cx).has_conflict()
|
||||
}
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
use super::{Bias, DisplayPoint, DisplaySnapshot, SelectionGoal, ToDisplayPoint};
|
||||
use crate::{scroll::ScrollAnchor, CharKind, DisplayRow, EditorStyle, RowExt, ToOffset, ToPoint};
|
||||
use gpui::{px, Pixels, WindowTextSystem};
|
||||
use gpui::{Pixels, WindowTextSystem};
|
||||
use language::Point;
|
||||
use multi_buffer::{MultiBufferRow, MultiBufferSnapshot};
|
||||
use serde::Deserialize;
|
||||
@@ -120,7 +120,7 @@ pub(crate) fn up_by_rows(
|
||||
preserve_column_at_start: bool,
|
||||
text_layout_details: &TextLayoutDetails,
|
||||
) -> (DisplayPoint, SelectionGoal) {
|
||||
let mut goal_x = match goal {
|
||||
let goal_x = match goal {
|
||||
SelectionGoal::HorizontalPosition(x) => x.into(),
|
||||
SelectionGoal::WrappedHorizontalPosition((_, x)) => x.into(),
|
||||
SelectionGoal::HorizontalRange { end, .. } => end.into(),
|
||||
@@ -138,7 +138,6 @@ pub(crate) fn up_by_rows(
|
||||
return (start, goal);
|
||||
} else {
|
||||
point = DisplayPoint::new(DisplayRow(0), 0);
|
||||
goal_x = px(0.);
|
||||
}
|
||||
|
||||
let mut clipped_point = map.clip_point(point, Bias::Left);
|
||||
@@ -159,7 +158,7 @@ pub(crate) fn down_by_rows(
|
||||
preserve_column_at_end: bool,
|
||||
text_layout_details: &TextLayoutDetails,
|
||||
) -> (DisplayPoint, SelectionGoal) {
|
||||
let mut goal_x = match goal {
|
||||
let goal_x = match goal {
|
||||
SelectionGoal::HorizontalPosition(x) => x.into(),
|
||||
SelectionGoal::WrappedHorizontalPosition((_, x)) => x.into(),
|
||||
SelectionGoal::HorizontalRange { end, .. } => end.into(),
|
||||
@@ -174,7 +173,6 @@ pub(crate) fn down_by_rows(
|
||||
return (start, goal);
|
||||
} else {
|
||||
point = map.max_point();
|
||||
goal_x = map.x_for_display_point(point, text_layout_details)
|
||||
}
|
||||
|
||||
let mut clipped_point = map.clip_point(point, Bias::Right);
|
||||
@@ -610,7 +608,7 @@ mod tests {
|
||||
test::{editor_test_context::EditorTestContext, marked_display_snapshot},
|
||||
Buffer, DisplayMap, DisplayRow, ExcerptRange, FoldPlaceholder, InlayId, MultiBuffer,
|
||||
};
|
||||
use gpui::{font, Context as _};
|
||||
use gpui::{font, px, Context as _};
|
||||
use language::Capability;
|
||||
use project::Project;
|
||||
use settings::SettingsStore;
|
||||
@@ -977,7 +975,7 @@ mod tests {
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(2), 0),
|
||||
SelectionGoal::HorizontalPosition(0.0)
|
||||
SelectionGoal::HorizontalPosition(col_2_x.0),
|
||||
),
|
||||
);
|
||||
assert_eq!(
|
||||
@@ -990,7 +988,7 @@ mod tests {
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(2), 0),
|
||||
SelectionGoal::HorizontalPosition(0.0)
|
||||
SelectionGoal::HorizontalPosition(0.0),
|
||||
),
|
||||
);
|
||||
|
||||
@@ -1059,7 +1057,7 @@ mod tests {
|
||||
let max_point_x = snapshot
|
||||
.x_for_display_point(DisplayPoint::new(DisplayRow(7), 2), &text_layout_details);
|
||||
|
||||
// Can't move down off the end
|
||||
// Can't move down off the end, and attempting to do so leaves the selection goal unchanged
|
||||
assert_eq!(
|
||||
down(
|
||||
&snapshot,
|
||||
@@ -1070,7 +1068,7 @@ mod tests {
|
||||
),
|
||||
(
|
||||
DisplayPoint::new(DisplayRow(7), 2),
|
||||
SelectionGoal::HorizontalPosition(max_point_x.0)
|
||||
SelectionGoal::HorizontalPosition(0.0)
|
||||
),
|
||||
);
|
||||
assert_eq!(
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
use std::{fs, path::Path};
|
||||
|
||||
use anyhow::Context as _;
|
||||
use gpui::{Context, View, ViewContext, VisualContext, WindowContext};
|
||||
use language::Language;
|
||||
@@ -7,7 +9,7 @@ use text::ToPointUtf16;
|
||||
|
||||
use crate::{
|
||||
element::register_action, lsp_ext::find_specific_language_server_in_selection, Editor,
|
||||
ExpandMacroRecursively,
|
||||
ExpandMacroRecursively, OpenDocs,
|
||||
};
|
||||
|
||||
const RUST_ANALYZER_NAME: &str = "rust-analyzer";
|
||||
@@ -24,6 +26,7 @@ pub fn apply_related_actions(editor: &View<Editor>, cx: &mut WindowContext) {
|
||||
.is_some()
|
||||
{
|
||||
register_action(editor, cx, expand_macro_recursively);
|
||||
register_action(editor, cx, open_docs);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -94,3 +97,64 @@ pub fn expand_macro_recursively(
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
pub fn open_docs(editor: &mut Editor, _: &OpenDocs, cx: &mut ViewContext<'_, Editor>) {
|
||||
if editor.selections.count() == 0 {
|
||||
return;
|
||||
}
|
||||
let Some(project) = &editor.project else {
|
||||
return;
|
||||
};
|
||||
let Some(workspace) = editor.workspace() else {
|
||||
return;
|
||||
};
|
||||
|
||||
let Some((trigger_anchor, _rust_language, server_to_query, buffer)) =
|
||||
find_specific_language_server_in_selection(
|
||||
editor,
|
||||
cx,
|
||||
is_rust_language,
|
||||
RUST_ANALYZER_NAME,
|
||||
)
|
||||
else {
|
||||
return;
|
||||
};
|
||||
|
||||
let project = project.clone();
|
||||
let buffer_snapshot = buffer.read(cx).snapshot();
|
||||
let position = trigger_anchor.text_anchor.to_point_utf16(&buffer_snapshot);
|
||||
let open_docs_task = project.update(cx, |project, cx| {
|
||||
project.request_lsp(
|
||||
buffer,
|
||||
project::LanguageServerToQuery::Other(server_to_query),
|
||||
project::lsp_ext_command::OpenDocs { position },
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
cx.spawn(|_editor, mut cx| async move {
|
||||
let docs_urls = open_docs_task.await.context("open docs")?;
|
||||
if docs_urls.is_empty() {
|
||||
log::debug!("Empty docs urls for position {position:?}");
|
||||
return Ok(());
|
||||
} else {
|
||||
log::debug!("{:?}", docs_urls);
|
||||
}
|
||||
|
||||
workspace.update(&mut cx, |_workspace, cx| {
|
||||
// Check if the local document exists, otherwise fallback to the online document.
|
||||
// Open with the default browser.
|
||||
if let Some(local_url) = docs_urls.local {
|
||||
if fs::metadata(Path::new(&local_url[8..])).is_ok() {
|
||||
cx.open_url(&local_url);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(web_url) = docs_urls.web {
|
||||
cx.open_url(&web_url);
|
||||
}
|
||||
})
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
@@ -234,7 +234,16 @@ impl EditorLspTestContext {
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_html::language()),
|
||||
);
|
||||
)
|
||||
.with_queries(LanguageQueries {
|
||||
brackets: Some(Cow::from(indoc! {r#"
|
||||
("<" @open "/>" @close)
|
||||
("</" @open ">" @close)
|
||||
("<" @open ">" @close)
|
||||
("\"" @open "\"" @close)"#})),
|
||||
..Default::default()
|
||||
})
|
||||
.expect("Could not parse queries");
|
||||
Self::new(language, Default::default(), cx).await
|
||||
}
|
||||
|
||||
|
||||
@@ -15,9 +15,11 @@ path = "src/extension.rs"
|
||||
anyhow.workspace = true
|
||||
async-compression.workspace = true
|
||||
async-tar.workspace = true
|
||||
async-trait.workspace = true
|
||||
collections.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
http_client.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
|
||||
@@ -1,10 +1,101 @@
|
||||
pub mod extension_builder;
|
||||
mod extension_manifest;
|
||||
mod types;
|
||||
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::sync::Arc;
|
||||
|
||||
use ::lsp::LanguageServerName;
|
||||
use anyhow::{anyhow, bail, Context as _, Result};
|
||||
use async_trait::async_trait;
|
||||
use fs::normalize_path;
|
||||
use gpui::Task;
|
||||
use language::LanguageName;
|
||||
use semantic_version::SemanticVersion;
|
||||
|
||||
pub use crate::extension_manifest::*;
|
||||
pub use crate::types::*;
|
||||
|
||||
#[async_trait]
|
||||
pub trait WorktreeDelegate: Send + Sync + 'static {
|
||||
fn id(&self) -> u64;
|
||||
fn root_path(&self) -> String;
|
||||
async fn read_text_file(&self, path: PathBuf) -> Result<String>;
|
||||
async fn which(&self, binary_name: String) -> Option<String>;
|
||||
async fn shell_env(&self) -> Vec<(String, String)>;
|
||||
}
|
||||
|
||||
pub trait KeyValueStoreDelegate: Send + Sync + 'static {
|
||||
fn insert(&self, key: String, docs: String) -> Task<Result<()>>;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait Extension: Send + Sync + 'static {
|
||||
/// Returns the [`ExtensionManifest`] for this extension.
|
||||
fn manifest(&self) -> Arc<ExtensionManifest>;
|
||||
|
||||
/// Returns the path to this extension's working directory.
|
||||
fn work_dir(&self) -> Arc<Path>;
|
||||
|
||||
/// Returns a path relative to this extension's working directory.
|
||||
fn path_from_extension(&self, path: &Path) -> PathBuf {
|
||||
normalize_path(&self.work_dir().join(path))
|
||||
}
|
||||
|
||||
async fn language_server_command(
|
||||
&self,
|
||||
language_server_id: LanguageServerName,
|
||||
language_name: LanguageName,
|
||||
worktree: Arc<dyn WorktreeDelegate>,
|
||||
) -> Result<Command>;
|
||||
|
||||
async fn language_server_initialization_options(
|
||||
&self,
|
||||
language_server_id: LanguageServerName,
|
||||
language_name: LanguageName,
|
||||
worktree: Arc<dyn WorktreeDelegate>,
|
||||
) -> Result<Option<String>>;
|
||||
|
||||
async fn language_server_workspace_configuration(
|
||||
&self,
|
||||
language_server_id: LanguageServerName,
|
||||
worktree: Arc<dyn WorktreeDelegate>,
|
||||
) -> Result<Option<String>>;
|
||||
|
||||
async fn labels_for_completions(
|
||||
&self,
|
||||
language_server_id: LanguageServerName,
|
||||
completions: Vec<Completion>,
|
||||
) -> Result<Vec<Option<CodeLabel>>>;
|
||||
|
||||
async fn labels_for_symbols(
|
||||
&self,
|
||||
language_server_id: LanguageServerName,
|
||||
symbols: Vec<Symbol>,
|
||||
) -> Result<Vec<Option<CodeLabel>>>;
|
||||
|
||||
async fn complete_slash_command_argument(
|
||||
&self,
|
||||
command: SlashCommand,
|
||||
arguments: Vec<String>,
|
||||
) -> Result<Vec<SlashCommandArgumentCompletion>>;
|
||||
|
||||
async fn run_slash_command(
|
||||
&self,
|
||||
command: SlashCommand,
|
||||
arguments: Vec<String>,
|
||||
worktree: Option<Arc<dyn WorktreeDelegate>>,
|
||||
) -> Result<SlashCommandOutput>;
|
||||
|
||||
async fn suggest_docs_packages(&self, provider: Arc<str>) -> Result<Vec<String>>;
|
||||
|
||||
async fn index_docs(
|
||||
&self,
|
||||
provider: Arc<str>,
|
||||
package_name: Arc<str>,
|
||||
kv_store: Arc<dyn KeyValueStoreDelegate>,
|
||||
) -> Result<()>;
|
||||
}
|
||||
|
||||
pub fn parse_wasm_extension_version(
|
||||
extension_id: &str,
|
||||
|
||||
49
crates/extension/src/types.rs
Normal file
49
crates/extension/src/types.rs
Normal file
@@ -0,0 +1,49 @@
|
||||
mod lsp;
|
||||
mod slash_command;
|
||||
|
||||
use std::ops::Range;
|
||||
|
||||
pub use lsp::*;
|
||||
pub use slash_command::*;
|
||||
|
||||
/// A list of environment variables.
|
||||
pub type EnvVars = Vec<(String, String)>;
|
||||
|
||||
/// A command.
|
||||
pub struct Command {
|
||||
/// The command to execute.
|
||||
pub command: String,
|
||||
/// The arguments to pass to the command.
|
||||
pub args: Vec<String>,
|
||||
/// The environment variables to set for the command.
|
||||
pub env: EnvVars,
|
||||
}
|
||||
|
||||
/// A label containing some code.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CodeLabel {
|
||||
/// The source code to parse with Tree-sitter.
|
||||
pub code: String,
|
||||
/// The spans to display in the label.
|
||||
pub spans: Vec<CodeLabelSpan>,
|
||||
/// The range of the displayed label to include when filtering.
|
||||
pub filter_range: Range<usize>,
|
||||
}
|
||||
|
||||
/// A span within a code label.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum CodeLabelSpan {
|
||||
/// A range into the parsed code.
|
||||
CodeRange(Range<usize>),
|
||||
/// A span containing a code literal.
|
||||
Literal(CodeLabelSpanLiteral),
|
||||
}
|
||||
|
||||
/// A span containing a code literal.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CodeLabelSpanLiteral {
|
||||
/// The literal text.
|
||||
pub text: String,
|
||||
/// The name of the highlight to use for this literal.
|
||||
pub highlight_name: Option<String>,
|
||||
}
|
||||
96
crates/extension/src/types/lsp.rs
Normal file
96
crates/extension/src/types/lsp.rs
Normal file
@@ -0,0 +1,96 @@
|
||||
use std::option::Option;
|
||||
|
||||
/// An LSP completion.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Completion {
|
||||
pub label: String,
|
||||
pub label_details: Option<CompletionLabelDetails>,
|
||||
pub detail: Option<String>,
|
||||
pub kind: Option<CompletionKind>,
|
||||
pub insert_text_format: Option<InsertTextFormat>,
|
||||
}
|
||||
|
||||
/// The kind of an LSP completion.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum CompletionKind {
|
||||
Text,
|
||||
Method,
|
||||
Function,
|
||||
Constructor,
|
||||
Field,
|
||||
Variable,
|
||||
Class,
|
||||
Interface,
|
||||
Module,
|
||||
Property,
|
||||
Unit,
|
||||
Value,
|
||||
Enum,
|
||||
Keyword,
|
||||
Snippet,
|
||||
Color,
|
||||
File,
|
||||
Reference,
|
||||
Folder,
|
||||
EnumMember,
|
||||
Constant,
|
||||
Struct,
|
||||
Event,
|
||||
Operator,
|
||||
TypeParameter,
|
||||
Other(i32),
|
||||
}
|
||||
|
||||
/// Label details for an LSP completion.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CompletionLabelDetails {
|
||||
pub detail: Option<String>,
|
||||
pub description: Option<String>,
|
||||
}
|
||||
|
||||
/// Defines how to interpret the insert text in a completion item.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum InsertTextFormat {
|
||||
PlainText,
|
||||
Snippet,
|
||||
Other(i32),
|
||||
}
|
||||
|
||||
/// An LSP symbol.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Symbol {
|
||||
pub kind: SymbolKind,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
/// The kind of an LSP symbol.
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub enum SymbolKind {
|
||||
File,
|
||||
Module,
|
||||
Namespace,
|
||||
Package,
|
||||
Class,
|
||||
Method,
|
||||
Property,
|
||||
Field,
|
||||
Constructor,
|
||||
Enum,
|
||||
Interface,
|
||||
Function,
|
||||
Variable,
|
||||
Constant,
|
||||
String,
|
||||
Number,
|
||||
Boolean,
|
||||
Array,
|
||||
Object,
|
||||
Key,
|
||||
Null,
|
||||
EnumMember,
|
||||
Struct,
|
||||
Event,
|
||||
Operator,
|
||||
TypeParameter,
|
||||
Other(i32),
|
||||
}
|
||||
43
crates/extension/src/types/slash_command.rs
Normal file
43
crates/extension/src/types/slash_command.rs
Normal file
@@ -0,0 +1,43 @@
|
||||
use std::ops::Range;
|
||||
|
||||
/// A slash command for use in the Assistant.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SlashCommand {
|
||||
/// The name of the slash command.
|
||||
pub name: String,
|
||||
/// The description of the slash command.
|
||||
pub description: String,
|
||||
/// The tooltip text to display for the run button.
|
||||
pub tooltip_text: String,
|
||||
/// Whether this slash command requires an argument.
|
||||
pub requires_argument: bool,
|
||||
}
|
||||
|
||||
/// The output of a slash command.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SlashCommandOutput {
|
||||
/// The text produced by the slash command.
|
||||
pub text: String,
|
||||
/// The list of sections to show in the slash command placeholder.
|
||||
pub sections: Vec<SlashCommandOutputSection>,
|
||||
}
|
||||
|
||||
/// A section in the slash command output.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SlashCommandOutputSection {
|
||||
/// The range this section occupies.
|
||||
pub range: Range<usize>,
|
||||
/// The label to display in the placeholder for this section.
|
||||
pub label: String,
|
||||
}
|
||||
|
||||
/// A completion for a slash command argument.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SlashCommandArgumentCompletion {
|
||||
/// The label to display for this completion.
|
||||
pub label: String,
|
||||
/// The new text that should be inserted into the command when this completion is accepted.
|
||||
pub new_text: String,
|
||||
/// Whether the command should be run when accepting this completion.
|
||||
pub run_command: bool,
|
||||
}
|
||||
@@ -8,9 +8,6 @@ keywords = ["zed", "extension"]
|
||||
edition = "2021"
|
||||
license = "Apache-2.0"
|
||||
|
||||
# Remove when we're ready to publish v0.2.0.
|
||||
publish = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
|
||||
@@ -63,6 +63,7 @@ Here is the compatibility of the `zed_extension_api` with versions of Zed:
|
||||
|
||||
| Zed version | `zed_extension_api` version |
|
||||
| ----------- | --------------------------- |
|
||||
| `0.162.x` | `0.0.1` - `0.2.0` |
|
||||
| `0.149.x` | `0.0.1` - `0.1.0` |
|
||||
| `0.131.x` | `0.0.1` - `0.0.6` |
|
||||
| `0.130.x` | `0.0.1` - `0.0.5` |
|
||||
|
||||
@@ -58,3 +58,4 @@ language = { workspace = true, features = ["test-support"] }
|
||||
parking_lot.workspace = true
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
reqwest_client.workspace = true
|
||||
theme = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -2,13 +2,17 @@ pub mod extension_lsp_adapter;
|
||||
pub mod extension_settings;
|
||||
pub mod wasm_host;
|
||||
|
||||
use crate::{extension_lsp_adapter::ExtensionLspAdapter, wasm_host::wit};
|
||||
#[cfg(test)]
|
||||
mod extension_store_test;
|
||||
|
||||
use crate::extension_lsp_adapter::ExtensionLspAdapter;
|
||||
use anyhow::{anyhow, bail, Context as _, Result};
|
||||
use async_compression::futures::bufread::GzipDecoder;
|
||||
use async_tar::Archive;
|
||||
use client::{telemetry::Telemetry, Client, ExtensionMetadata, GetExtensionsResponse};
|
||||
use collections::{btree_map, BTreeMap, HashSet};
|
||||
use extension::extension_builder::{CompileExtensionOptions, ExtensionBuilder};
|
||||
use extension::Extension;
|
||||
pub use extension::ExtensionManifest;
|
||||
use fs::{Fs, RemoveOptions};
|
||||
use futures::{
|
||||
@@ -90,10 +94,6 @@ pub fn is_version_compatible(
|
||||
true
|
||||
}
|
||||
|
||||
pub trait DocsDatabase: Send + Sync + 'static {
|
||||
fn insert(&self, key: String, docs: String) -> Task<Result<()>>;
|
||||
}
|
||||
|
||||
pub trait ExtensionRegistrationHooks: Send + Sync + 'static {
|
||||
fn remove_user_themes(&self, _themes: Vec<SharedString>) {}
|
||||
|
||||
@@ -135,9 +135,8 @@ pub trait ExtensionRegistrationHooks: Send + Sync + 'static {
|
||||
|
||||
fn register_slash_command(
|
||||
&self,
|
||||
_slash_command: wit::SlashCommand,
|
||||
_extension: WasmExtension,
|
||||
_host: Arc<WasmHost>,
|
||||
_extension: Arc<dyn Extension>,
|
||||
_command: extension::SlashCommand,
|
||||
) {
|
||||
}
|
||||
|
||||
@@ -145,17 +144,11 @@ pub trait ExtensionRegistrationHooks: Send + Sync + 'static {
|
||||
&self,
|
||||
_id: Arc<str>,
|
||||
_extension: WasmExtension,
|
||||
_host: Arc<WasmHost>,
|
||||
_cx: &mut AppContext,
|
||||
) {
|
||||
}
|
||||
|
||||
fn register_docs_provider(
|
||||
&self,
|
||||
_extension: WasmExtension,
|
||||
_host: Arc<WasmHost>,
|
||||
_provider_id: Arc<str>,
|
||||
) {
|
||||
}
|
||||
fn register_docs_provider(&self, _extension: Arc<dyn Extension>, _provider_id: Arc<str>) {}
|
||||
|
||||
fn register_snippets(&self, _path: &PathBuf, _snippet_contents: &str) -> Result<()> {
|
||||
Ok(())
|
||||
@@ -1238,18 +1231,16 @@ impl ExtensionStore {
|
||||
this.reload_complete_senders.clear();
|
||||
|
||||
for (manifest, wasm_extension) in &wasm_extensions {
|
||||
let extension = Arc::new(wasm_extension.clone());
|
||||
|
||||
for (language_server_id, language_server_config) in &manifest.language_servers {
|
||||
for language in language_server_config.languages() {
|
||||
this.registration_hooks.register_lsp_adapter(
|
||||
language.clone(),
|
||||
ExtensionLspAdapter {
|
||||
extension: wasm_extension.clone(),
|
||||
host: this.wasm_host.clone(),
|
||||
extension: extension.clone(),
|
||||
language_server_id: language_server_id.clone(),
|
||||
config: wit::LanguageServerConfig {
|
||||
name: language_server_id.0.to_string(),
|
||||
language_name: language.to_string(),
|
||||
},
|
||||
language_name: language.clone(),
|
||||
},
|
||||
);
|
||||
}
|
||||
@@ -1257,7 +1248,8 @@ impl ExtensionStore {
|
||||
|
||||
for (slash_command_name, slash_command) in &manifest.slash_commands {
|
||||
this.registration_hooks.register_slash_command(
|
||||
crate::wit::SlashCommand {
|
||||
extension.clone(),
|
||||
extension::SlashCommand {
|
||||
name: slash_command_name.to_string(),
|
||||
description: slash_command.description.to_string(),
|
||||
// We don't currently expose this as a configurable option, as it currently drives
|
||||
@@ -1266,8 +1258,6 @@ impl ExtensionStore {
|
||||
tooltip_text: String::new(),
|
||||
requires_argument: slash_command.requires_argument,
|
||||
},
|
||||
wasm_extension.clone(),
|
||||
this.wasm_host.clone(),
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1275,16 +1265,13 @@ impl ExtensionStore {
|
||||
this.registration_hooks.register_context_server(
|
||||
id.clone(),
|
||||
wasm_extension.clone(),
|
||||
this.wasm_host.clone(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
for (provider_id, _provider) in &manifest.indexed_docs_providers {
|
||||
this.registration_hooks.register_docs_provider(
|
||||
wasm_extension.clone(),
|
||||
this.wasm_host.clone(),
|
||||
provider_id.clone(),
|
||||
);
|
||||
this.registration_hooks
|
||||
.register_docs_provider(extension.clone(), provider_id.clone());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
use crate::wasm_host::{
|
||||
wit::{self, LanguageServerConfig},
|
||||
WasmExtension, WasmHost,
|
||||
};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use anyhow::{Context, Result};
|
||||
use async_trait::async_trait;
|
||||
use collections::HashMap;
|
||||
use extension::{Extension, WorktreeDelegate};
|
||||
use futures::{Future, FutureExt};
|
||||
use gpui::AsyncAppContext;
|
||||
use language::{
|
||||
CodeLabel, HighlightId, Language, LanguageToolchainStore, LspAdapter, LspAdapterDelegate,
|
||||
CodeLabel, HighlightId, Language, LanguageName, LanguageToolchainStore, LspAdapter,
|
||||
LspAdapterDelegate,
|
||||
};
|
||||
use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerName};
|
||||
use serde::Serialize;
|
||||
@@ -16,19 +14,46 @@ use serde_json::Value;
|
||||
use std::ops::Range;
|
||||
use std::{any::Any, path::PathBuf, pin::Pin, sync::Arc};
|
||||
use util::{maybe, ResultExt};
|
||||
use wasmtime_wasi::WasiView as _;
|
||||
|
||||
/// An adapter that allows an [`LspAdapterDelegate`] to be used as a [`WorktreeDelegate`].
|
||||
pub struct WorktreeDelegateAdapter(pub Arc<dyn LspAdapterDelegate>);
|
||||
|
||||
#[async_trait]
|
||||
impl WorktreeDelegate for WorktreeDelegateAdapter {
|
||||
fn id(&self) -> u64 {
|
||||
self.0.worktree_id().to_proto()
|
||||
}
|
||||
|
||||
fn root_path(&self) -> String {
|
||||
self.0.worktree_root_path().to_string_lossy().to_string()
|
||||
}
|
||||
|
||||
async fn read_text_file(&self, path: PathBuf) -> Result<String> {
|
||||
self.0.read_text_file(path).await
|
||||
}
|
||||
|
||||
async fn which(&self, binary_name: String) -> Option<String> {
|
||||
self.0
|
||||
.which(binary_name.as_ref())
|
||||
.await
|
||||
.map(|path| path.to_string_lossy().to_string())
|
||||
}
|
||||
|
||||
async fn shell_env(&self) -> Vec<(String, String)> {
|
||||
self.0.shell_env().await.into_iter().collect()
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ExtensionLspAdapter {
|
||||
pub(crate) extension: WasmExtension,
|
||||
pub(crate) extension: Arc<dyn Extension>,
|
||||
pub(crate) language_server_id: LanguageServerName,
|
||||
pub(crate) config: LanguageServerConfig,
|
||||
pub(crate) host: Arc<WasmHost>,
|
||||
pub(crate) language_name: LanguageName,
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
impl LspAdapter for ExtensionLspAdapter {
|
||||
fn name(&self) -> LanguageServerName {
|
||||
LanguageServerName(self.config.name.clone().into())
|
||||
self.language_server_id.clone()
|
||||
}
|
||||
|
||||
fn get_language_server_command<'a>(
|
||||
@@ -39,32 +64,17 @@ impl LspAdapter for ExtensionLspAdapter {
|
||||
_: &'a mut AsyncAppContext,
|
||||
) -> Pin<Box<dyn 'a + Future<Output = Result<LanguageServerBinary>>>> {
|
||||
async move {
|
||||
let delegate = Arc::new(WorktreeDelegateAdapter(delegate.clone())) as _;
|
||||
let command = self
|
||||
.extension
|
||||
.call({
|
||||
let this = self.clone();
|
||||
|extension, store| {
|
||||
async move {
|
||||
let resource = store.data_mut().table().push(delegate)?;
|
||||
let command = extension
|
||||
.call_language_server_command(
|
||||
store,
|
||||
&this.language_server_id,
|
||||
&this.config,
|
||||
resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|e| anyhow!("{}", e))?;
|
||||
anyhow::Ok(command)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.language_server_command(
|
||||
self.language_server_id.clone(),
|
||||
self.language_name.clone(),
|
||||
delegate,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let path = self
|
||||
.host
|
||||
.path_from_extension(&self.extension.manifest.id, command.command.as_ref());
|
||||
let path = self.extension.path_from_extension(command.command.as_ref());
|
||||
|
||||
// TODO: This should now be done via the `zed::make_file_executable` function in
|
||||
// Zed extension API, but we're leaving these existing usages in place temporarily
|
||||
@@ -73,8 +83,8 @@ impl LspAdapter for ExtensionLspAdapter {
|
||||
// We can remove once the following extension versions no longer see any use:
|
||||
// - toml@0.0.2
|
||||
// - zig@0.0.1
|
||||
if ["toml", "zig"].contains(&self.extension.manifest.id.as_ref())
|
||||
&& path.starts_with(&self.host.work_dir)
|
||||
if ["toml", "zig"].contains(&self.extension.manifest().id.as_ref())
|
||||
&& path.starts_with(&self.extension.work_dir())
|
||||
{
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
@@ -122,7 +132,7 @@ impl LspAdapter for ExtensionLspAdapter {
|
||||
fn code_action_kinds(&self) -> Option<Vec<CodeActionKind>> {
|
||||
let code_action_kinds = self
|
||||
.extension
|
||||
.manifest
|
||||
.manifest()
|
||||
.language_servers
|
||||
.get(&self.language_server_id)
|
||||
.and_then(|server| server.code_action_kinds.clone());
|
||||
@@ -143,14 +153,14 @@ impl LspAdapter for ExtensionLspAdapter {
|
||||
//
|
||||
// We can remove once the following extension versions no longer see any use:
|
||||
// - php@0.0.1
|
||||
if self.extension.manifest.id.as_ref() == "php" {
|
||||
if self.extension.manifest().id.as_ref() == "php" {
|
||||
return HashMap::from_iter([("PHP".into(), "php".into())]);
|
||||
}
|
||||
|
||||
self.extension
|
||||
.manifest
|
||||
.manifest()
|
||||
.language_servers
|
||||
.get(&LanguageServerName(self.config.name.clone().into()))
|
||||
.get(&self.language_server_id)
|
||||
.map(|server| server.language_ids.clone())
|
||||
.unwrap_or_default()
|
||||
}
|
||||
@@ -159,28 +169,14 @@ impl LspAdapter for ExtensionLspAdapter {
|
||||
self: Arc<Self>,
|
||||
delegate: &Arc<dyn LspAdapterDelegate>,
|
||||
) -> Result<Option<serde_json::Value>> {
|
||||
let delegate = delegate.clone();
|
||||
let delegate = Arc::new(WorktreeDelegateAdapter(delegate.clone())) as _;
|
||||
let json_options = self
|
||||
.extension
|
||||
.call({
|
||||
let this = self.clone();
|
||||
|extension, store| {
|
||||
async move {
|
||||
let resource = store.data_mut().table().push(delegate)?;
|
||||
let options = extension
|
||||
.call_language_server_initialization_options(
|
||||
store,
|
||||
&this.language_server_id,
|
||||
&this.config,
|
||||
resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|e| anyhow!("{}", e))?;
|
||||
anyhow::Ok(options)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.language_server_initialization_options(
|
||||
self.language_server_id.clone(),
|
||||
self.language_name.clone(),
|
||||
delegate,
|
||||
)
|
||||
.await?;
|
||||
Ok(if let Some(json_options) = json_options {
|
||||
serde_json::from_str(&json_options).with_context(|| {
|
||||
@@ -197,31 +193,14 @@ impl LspAdapter for ExtensionLspAdapter {
|
||||
_: Arc<dyn LanguageToolchainStore>,
|
||||
_cx: &mut AsyncAppContext,
|
||||
) -> Result<Value> {
|
||||
let delegate = delegate.clone();
|
||||
let delegate = Arc::new(WorktreeDelegateAdapter(delegate.clone())) as _;
|
||||
let json_options: Option<String> = self
|
||||
.extension
|
||||
.call({
|
||||
let this = self.clone();
|
||||
|extension, store| {
|
||||
async move {
|
||||
let resource = store.data_mut().table().push(delegate)?;
|
||||
let options = extension
|
||||
.call_language_server_workspace_configuration(
|
||||
store,
|
||||
&this.language_server_id,
|
||||
resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|e| anyhow!("{}", e))?;
|
||||
anyhow::Ok(options)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.language_server_workspace_configuration(self.language_server_id.clone(), delegate)
|
||||
.await?;
|
||||
Ok(if let Some(json_options) = json_options {
|
||||
serde_json::from_str(&json_options).with_context(|| {
|
||||
format!("failed to parse initialization_options from extension: {json_options}")
|
||||
format!("failed to parse workspace_configuration from extension: {json_options}")
|
||||
})?
|
||||
} else {
|
||||
serde_json::json!({})
|
||||
@@ -235,30 +214,16 @@ impl LspAdapter for ExtensionLspAdapter {
|
||||
) -> Result<Vec<Option<CodeLabel>>> {
|
||||
let completions = completions
|
||||
.iter()
|
||||
.map(|completion| wit::Completion::from(completion.clone()))
|
||||
.cloned()
|
||||
.map(lsp_completion_to_extension)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let labels = self
|
||||
.extension
|
||||
.call({
|
||||
let this = self.clone();
|
||||
|extension, store| {
|
||||
async move {
|
||||
extension
|
||||
.call_labels_for_completions(
|
||||
store,
|
||||
&this.language_server_id,
|
||||
completions,
|
||||
)
|
||||
.await?
|
||||
.map_err(|e| anyhow!("{}", e))
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.labels_for_completions(self.language_server_id.clone(), completions)
|
||||
.await?;
|
||||
|
||||
Ok(labels_from_wit(labels, language))
|
||||
Ok(labels_from_extension(labels, language))
|
||||
}
|
||||
|
||||
async fn labels_for_symbols(
|
||||
@@ -269,34 +234,29 @@ impl LspAdapter for ExtensionLspAdapter {
|
||||
let symbols = symbols
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|(name, kind)| wit::Symbol {
|
||||
.map(|(name, kind)| extension::Symbol {
|
||||
name,
|
||||
kind: kind.into(),
|
||||
kind: lsp_symbol_kind_to_extension(kind),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let labels = self
|
||||
.extension
|
||||
.call({
|
||||
let this = self.clone();
|
||||
|extension, store| {
|
||||
async move {
|
||||
extension
|
||||
.call_labels_for_symbols(store, &this.language_server_id, symbols)
|
||||
.await?
|
||||
.map_err(|e| anyhow!("{}", e))
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.labels_for_symbols(self.language_server_id.clone(), symbols)
|
||||
.await?;
|
||||
|
||||
Ok(labels_from_wit(labels, language))
|
||||
Ok(labels_from_extension(
|
||||
labels
|
||||
.into_iter()
|
||||
.map(|label| label.map(Into::into))
|
||||
.collect(),
|
||||
language,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
fn labels_from_wit(
|
||||
labels: Vec<Option<wit::CodeLabel>>,
|
||||
fn labels_from_extension(
|
||||
labels: Vec<Option<extension::CodeLabel>>,
|
||||
language: &Arc<Language>,
|
||||
) -> Vec<Option<CodeLabel>> {
|
||||
labels
|
||||
@@ -314,7 +274,7 @@ fn labels_from_wit(
|
||||
}
|
||||
|
||||
fn build_code_label(
|
||||
label: &wit::CodeLabel,
|
||||
label: &extension::CodeLabel,
|
||||
parsed_runs: &[(Range<usize>, HighlightId)],
|
||||
language: &Arc<Language>,
|
||||
) -> Option<CodeLabel> {
|
||||
@@ -323,8 +283,7 @@ fn build_code_label(
|
||||
|
||||
for span in &label.spans {
|
||||
match span {
|
||||
wit::CodeLabelSpan::CodeRange(range) => {
|
||||
let range = Range::from(*range);
|
||||
extension::CodeLabelSpan::CodeRange(range) => {
|
||||
let code_span = &label.code.get(range.clone())?;
|
||||
let mut input_ix = range.start;
|
||||
let mut output_ix = text.len();
|
||||
@@ -350,7 +309,7 @@ fn build_code_label(
|
||||
|
||||
text.push_str(code_span);
|
||||
}
|
||||
wit::CodeLabelSpan::Literal(span) => {
|
||||
extension::CodeLabelSpan::Literal(span) => {
|
||||
let highlight_id = language
|
||||
.grammar()
|
||||
.zip(span.highlight_name.as_ref())
|
||||
@@ -365,7 +324,7 @@ fn build_code_label(
|
||||
}
|
||||
}
|
||||
|
||||
let filter_range = Range::from(label.filter_range);
|
||||
let filter_range = label.filter_range.clone();
|
||||
text.get(filter_range.clone())?;
|
||||
Some(CodeLabel {
|
||||
text,
|
||||
@@ -374,109 +333,101 @@ fn build_code_label(
|
||||
})
|
||||
}
|
||||
|
||||
impl From<wit::Range> for Range<usize> {
|
||||
fn from(range: wit::Range) -> Self {
|
||||
let start = range.start as usize;
|
||||
let end = range.end as usize;
|
||||
start..end
|
||||
fn lsp_completion_to_extension(value: lsp::CompletionItem) -> extension::Completion {
|
||||
extension::Completion {
|
||||
label: value.label,
|
||||
label_details: value
|
||||
.label_details
|
||||
.map(lsp_completion_item_label_details_to_extension),
|
||||
detail: value.detail,
|
||||
kind: value.kind.map(lsp_completion_item_kind_to_extension),
|
||||
insert_text_format: value
|
||||
.insert_text_format
|
||||
.map(lsp_insert_text_format_to_extension),
|
||||
}
|
||||
}
|
||||
|
||||
impl From<lsp::CompletionItem> for wit::Completion {
|
||||
fn from(value: lsp::CompletionItem) -> Self {
|
||||
Self {
|
||||
label: value.label,
|
||||
label_details: value.label_details.map(Into::into),
|
||||
detail: value.detail,
|
||||
kind: value.kind.map(Into::into),
|
||||
insert_text_format: value.insert_text_format.map(Into::into),
|
||||
}
|
||||
fn lsp_completion_item_label_details_to_extension(
|
||||
value: lsp::CompletionItemLabelDetails,
|
||||
) -> extension::CompletionLabelDetails {
|
||||
extension::CompletionLabelDetails {
|
||||
detail: value.detail,
|
||||
description: value.description,
|
||||
}
|
||||
}
|
||||
|
||||
impl From<lsp::CompletionItemLabelDetails> for wit::CompletionLabelDetails {
|
||||
fn from(value: lsp::CompletionItemLabelDetails) -> Self {
|
||||
Self {
|
||||
detail: value.detail,
|
||||
description: value.description,
|
||||
}
|
||||
fn lsp_completion_item_kind_to_extension(
|
||||
value: lsp::CompletionItemKind,
|
||||
) -> extension::CompletionKind {
|
||||
match value {
|
||||
lsp::CompletionItemKind::TEXT => extension::CompletionKind::Text,
|
||||
lsp::CompletionItemKind::METHOD => extension::CompletionKind::Method,
|
||||
lsp::CompletionItemKind::FUNCTION => extension::CompletionKind::Function,
|
||||
lsp::CompletionItemKind::CONSTRUCTOR => extension::CompletionKind::Constructor,
|
||||
lsp::CompletionItemKind::FIELD => extension::CompletionKind::Field,
|
||||
lsp::CompletionItemKind::VARIABLE => extension::CompletionKind::Variable,
|
||||
lsp::CompletionItemKind::CLASS => extension::CompletionKind::Class,
|
||||
lsp::CompletionItemKind::INTERFACE => extension::CompletionKind::Interface,
|
||||
lsp::CompletionItemKind::MODULE => extension::CompletionKind::Module,
|
||||
lsp::CompletionItemKind::PROPERTY => extension::CompletionKind::Property,
|
||||
lsp::CompletionItemKind::UNIT => extension::CompletionKind::Unit,
|
||||
lsp::CompletionItemKind::VALUE => extension::CompletionKind::Value,
|
||||
lsp::CompletionItemKind::ENUM => extension::CompletionKind::Enum,
|
||||
lsp::CompletionItemKind::KEYWORD => extension::CompletionKind::Keyword,
|
||||
lsp::CompletionItemKind::SNIPPET => extension::CompletionKind::Snippet,
|
||||
lsp::CompletionItemKind::COLOR => extension::CompletionKind::Color,
|
||||
lsp::CompletionItemKind::FILE => extension::CompletionKind::File,
|
||||
lsp::CompletionItemKind::REFERENCE => extension::CompletionKind::Reference,
|
||||
lsp::CompletionItemKind::FOLDER => extension::CompletionKind::Folder,
|
||||
lsp::CompletionItemKind::ENUM_MEMBER => extension::CompletionKind::EnumMember,
|
||||
lsp::CompletionItemKind::CONSTANT => extension::CompletionKind::Constant,
|
||||
lsp::CompletionItemKind::STRUCT => extension::CompletionKind::Struct,
|
||||
lsp::CompletionItemKind::EVENT => extension::CompletionKind::Event,
|
||||
lsp::CompletionItemKind::OPERATOR => extension::CompletionKind::Operator,
|
||||
lsp::CompletionItemKind::TYPE_PARAMETER => extension::CompletionKind::TypeParameter,
|
||||
_ => extension::CompletionKind::Other(extract_int(value)),
|
||||
}
|
||||
}
|
||||
|
||||
impl From<lsp::CompletionItemKind> for wit::CompletionKind {
|
||||
fn from(value: lsp::CompletionItemKind) -> Self {
|
||||
match value {
|
||||
lsp::CompletionItemKind::TEXT => Self::Text,
|
||||
lsp::CompletionItemKind::METHOD => Self::Method,
|
||||
lsp::CompletionItemKind::FUNCTION => Self::Function,
|
||||
lsp::CompletionItemKind::CONSTRUCTOR => Self::Constructor,
|
||||
lsp::CompletionItemKind::FIELD => Self::Field,
|
||||
lsp::CompletionItemKind::VARIABLE => Self::Variable,
|
||||
lsp::CompletionItemKind::CLASS => Self::Class,
|
||||
lsp::CompletionItemKind::INTERFACE => Self::Interface,
|
||||
lsp::CompletionItemKind::MODULE => Self::Module,
|
||||
lsp::CompletionItemKind::PROPERTY => Self::Property,
|
||||
lsp::CompletionItemKind::UNIT => Self::Unit,
|
||||
lsp::CompletionItemKind::VALUE => Self::Value,
|
||||
lsp::CompletionItemKind::ENUM => Self::Enum,
|
||||
lsp::CompletionItemKind::KEYWORD => Self::Keyword,
|
||||
lsp::CompletionItemKind::SNIPPET => Self::Snippet,
|
||||
lsp::CompletionItemKind::COLOR => Self::Color,
|
||||
lsp::CompletionItemKind::FILE => Self::File,
|
||||
lsp::CompletionItemKind::REFERENCE => Self::Reference,
|
||||
lsp::CompletionItemKind::FOLDER => Self::Folder,
|
||||
lsp::CompletionItemKind::ENUM_MEMBER => Self::EnumMember,
|
||||
lsp::CompletionItemKind::CONSTANT => Self::Constant,
|
||||
lsp::CompletionItemKind::STRUCT => Self::Struct,
|
||||
lsp::CompletionItemKind::EVENT => Self::Event,
|
||||
lsp::CompletionItemKind::OPERATOR => Self::Operator,
|
||||
lsp::CompletionItemKind::TYPE_PARAMETER => Self::TypeParameter,
|
||||
_ => Self::Other(extract_int(value)),
|
||||
}
|
||||
fn lsp_insert_text_format_to_extension(
|
||||
value: lsp::InsertTextFormat,
|
||||
) -> extension::InsertTextFormat {
|
||||
match value {
|
||||
lsp::InsertTextFormat::PLAIN_TEXT => extension::InsertTextFormat::PlainText,
|
||||
lsp::InsertTextFormat::SNIPPET => extension::InsertTextFormat::Snippet,
|
||||
_ => extension::InsertTextFormat::Other(extract_int(value)),
|
||||
}
|
||||
}
|
||||
|
||||
impl From<lsp::InsertTextFormat> for wit::InsertTextFormat {
|
||||
fn from(value: lsp::InsertTextFormat) -> Self {
|
||||
match value {
|
||||
lsp::InsertTextFormat::PLAIN_TEXT => Self::PlainText,
|
||||
lsp::InsertTextFormat::SNIPPET => Self::Snippet,
|
||||
_ => Self::Other(extract_int(value)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<lsp::SymbolKind> for wit::SymbolKind {
|
||||
fn from(value: lsp::SymbolKind) -> Self {
|
||||
match value {
|
||||
lsp::SymbolKind::FILE => Self::File,
|
||||
lsp::SymbolKind::MODULE => Self::Module,
|
||||
lsp::SymbolKind::NAMESPACE => Self::Namespace,
|
||||
lsp::SymbolKind::PACKAGE => Self::Package,
|
||||
lsp::SymbolKind::CLASS => Self::Class,
|
||||
lsp::SymbolKind::METHOD => Self::Method,
|
||||
lsp::SymbolKind::PROPERTY => Self::Property,
|
||||
lsp::SymbolKind::FIELD => Self::Field,
|
||||
lsp::SymbolKind::CONSTRUCTOR => Self::Constructor,
|
||||
lsp::SymbolKind::ENUM => Self::Enum,
|
||||
lsp::SymbolKind::INTERFACE => Self::Interface,
|
||||
lsp::SymbolKind::FUNCTION => Self::Function,
|
||||
lsp::SymbolKind::VARIABLE => Self::Variable,
|
||||
lsp::SymbolKind::CONSTANT => Self::Constant,
|
||||
lsp::SymbolKind::STRING => Self::String,
|
||||
lsp::SymbolKind::NUMBER => Self::Number,
|
||||
lsp::SymbolKind::BOOLEAN => Self::Boolean,
|
||||
lsp::SymbolKind::ARRAY => Self::Array,
|
||||
lsp::SymbolKind::OBJECT => Self::Object,
|
||||
lsp::SymbolKind::KEY => Self::Key,
|
||||
lsp::SymbolKind::NULL => Self::Null,
|
||||
lsp::SymbolKind::ENUM_MEMBER => Self::EnumMember,
|
||||
lsp::SymbolKind::STRUCT => Self::Struct,
|
||||
lsp::SymbolKind::EVENT => Self::Event,
|
||||
lsp::SymbolKind::OPERATOR => Self::Operator,
|
||||
lsp::SymbolKind::TYPE_PARAMETER => Self::TypeParameter,
|
||||
_ => Self::Other(extract_int(value)),
|
||||
}
|
||||
fn lsp_symbol_kind_to_extension(value: lsp::SymbolKind) -> extension::SymbolKind {
|
||||
match value {
|
||||
lsp::SymbolKind::FILE => extension::SymbolKind::File,
|
||||
lsp::SymbolKind::MODULE => extension::SymbolKind::Module,
|
||||
lsp::SymbolKind::NAMESPACE => extension::SymbolKind::Namespace,
|
||||
lsp::SymbolKind::PACKAGE => extension::SymbolKind::Package,
|
||||
lsp::SymbolKind::CLASS => extension::SymbolKind::Class,
|
||||
lsp::SymbolKind::METHOD => extension::SymbolKind::Method,
|
||||
lsp::SymbolKind::PROPERTY => extension::SymbolKind::Property,
|
||||
lsp::SymbolKind::FIELD => extension::SymbolKind::Field,
|
||||
lsp::SymbolKind::CONSTRUCTOR => extension::SymbolKind::Constructor,
|
||||
lsp::SymbolKind::ENUM => extension::SymbolKind::Enum,
|
||||
lsp::SymbolKind::INTERFACE => extension::SymbolKind::Interface,
|
||||
lsp::SymbolKind::FUNCTION => extension::SymbolKind::Function,
|
||||
lsp::SymbolKind::VARIABLE => extension::SymbolKind::Variable,
|
||||
lsp::SymbolKind::CONSTANT => extension::SymbolKind::Constant,
|
||||
lsp::SymbolKind::STRING => extension::SymbolKind::String,
|
||||
lsp::SymbolKind::NUMBER => extension::SymbolKind::Number,
|
||||
lsp::SymbolKind::BOOLEAN => extension::SymbolKind::Boolean,
|
||||
lsp::SymbolKind::ARRAY => extension::SymbolKind::Array,
|
||||
lsp::SymbolKind::OBJECT => extension::SymbolKind::Object,
|
||||
lsp::SymbolKind::KEY => extension::SymbolKind::Key,
|
||||
lsp::SymbolKind::NULL => extension::SymbolKind::Null,
|
||||
lsp::SymbolKind::ENUM_MEMBER => extension::SymbolKind::EnumMember,
|
||||
lsp::SymbolKind::STRUCT => extension::SymbolKind::Struct,
|
||||
lsp::SymbolKind::EVENT => extension::SymbolKind::Event,
|
||||
lsp::SymbolKind::OPERATOR => extension::SymbolKind::Operator,
|
||||
lsp::SymbolKind::TYPE_PARAMETER => extension::SymbolKind::TypeParameter,
|
||||
_ => extension::SymbolKind::Other(extract_int(value)),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -503,21 +454,14 @@ fn test_build_code_label() {
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let label = build_code_label(
|
||||
&wit::CodeLabel {
|
||||
&extension::CodeLabel {
|
||||
spans: vec![
|
||||
wit::CodeLabelSpan::CodeRange(wit::Range {
|
||||
start: code.find("pqrs").unwrap() as u32,
|
||||
end: code.len() as u32,
|
||||
}),
|
||||
wit::CodeLabelSpan::CodeRange(wit::Range {
|
||||
start: code.find(": fn").unwrap() as u32,
|
||||
end: code.find(" = ").unwrap() as u32,
|
||||
}),
|
||||
extension::CodeLabelSpan::CodeRange(code.find("pqrs").unwrap()..code.len()),
|
||||
extension::CodeLabelSpan::CodeRange(
|
||||
code.find(": fn").unwrap()..code.find(" = ").unwrap(),
|
||||
),
|
||||
],
|
||||
filter_range: wit::Range {
|
||||
start: 0,
|
||||
end: "pqrs.tuv".len() as u32,
|
||||
},
|
||||
filter_range: 0.."pqrs.tuv".len(),
|
||||
code,
|
||||
},
|
||||
&code_runs,
|
||||
@@ -555,21 +499,14 @@ fn test_build_code_label_with_invalid_ranges() {
|
||||
// A span uses a code range that is invalid because it starts inside of
|
||||
// a multi-byte character.
|
||||
let label = build_code_label(
|
||||
&wit::CodeLabel {
|
||||
&extension::CodeLabel {
|
||||
spans: vec![
|
||||
wit::CodeLabelSpan::CodeRange(wit::Range {
|
||||
start: code.find('B').unwrap() as u32,
|
||||
end: code.find(" = ").unwrap() as u32,
|
||||
}),
|
||||
wit::CodeLabelSpan::CodeRange(wit::Range {
|
||||
start: code.find('🏀').unwrap() as u32 + 1,
|
||||
end: code.len() as u32,
|
||||
}),
|
||||
extension::CodeLabelSpan::CodeRange(
|
||||
code.find('B').unwrap()..code.find(" = ").unwrap(),
|
||||
),
|
||||
extension::CodeLabelSpan::CodeRange((code.find('🏀').unwrap() + 1)..code.len()),
|
||||
],
|
||||
filter_range: wit::Range {
|
||||
start: 0,
|
||||
end: "B".len() as u32,
|
||||
},
|
||||
filter_range: 0.."B".len(),
|
||||
code,
|
||||
},
|
||||
&code_runs,
|
||||
@@ -579,12 +516,14 @@ fn test_build_code_label_with_invalid_ranges() {
|
||||
|
||||
// Filter range extends beyond actual text
|
||||
let label = build_code_label(
|
||||
&wit::CodeLabel {
|
||||
spans: vec![wit::CodeLabelSpan::Literal(wit::CodeLabelSpanLiteral {
|
||||
text: "abc".into(),
|
||||
highlight_name: Some("type".into()),
|
||||
})],
|
||||
filter_range: wit::Range { start: 0, end: 5 },
|
||||
&extension::CodeLabel {
|
||||
spans: vec![extension::CodeLabelSpan::Literal(
|
||||
extension::CodeLabelSpanLiteral {
|
||||
text: "abc".into(),
|
||||
highlight_name: Some("type".into()),
|
||||
},
|
||||
)],
|
||||
filter_range: 0..5,
|
||||
code: String::new(),
|
||||
},
|
||||
&code_runs,
|
||||
|
||||
@@ -1,20 +1,17 @@
|
||||
use assistant_slash_command::SlashCommandRegistry;
|
||||
use crate::extension_lsp_adapter::ExtensionLspAdapter;
|
||||
use crate::{
|
||||
Event, ExtensionIndex, ExtensionIndexEntry, ExtensionIndexLanguageEntry,
|
||||
ExtensionIndexThemeEntry, ExtensionManifest, ExtensionSettings, ExtensionStore,
|
||||
GrammarManifestEntry, SchemaVersion, RELOAD_DEBOUNCE_DURATION,
|
||||
};
|
||||
use anyhow::Result;
|
||||
use async_compression::futures::bufread::GzipEncoder;
|
||||
use collections::BTreeMap;
|
||||
use context_servers::ContextServerFactoryRegistry;
|
||||
use extension_host::ExtensionSettings;
|
||||
use extension_host::SchemaVersion;
|
||||
use extension_host::{
|
||||
Event, ExtensionIndex, ExtensionIndexEntry, ExtensionIndexLanguageEntry,
|
||||
ExtensionIndexThemeEntry, ExtensionManifest, ExtensionStore, GrammarManifestEntry,
|
||||
RELOAD_DEBOUNCE_DURATION,
|
||||
};
|
||||
use fs::{FakeFs, Fs, RealFs};
|
||||
use futures::{io::BufReader, AsyncReadExt, StreamExt};
|
||||
use gpui::{Context, SemanticVersion, TestAppContext};
|
||||
use gpui::{BackgroundExecutor, Context, SemanticVersion, SharedString, Task, TestAppContext};
|
||||
use http_client::{FakeHttpClient, Response};
|
||||
use indexed_docs::IndexedDocsRegistry;
|
||||
use language::{LanguageMatcher, LanguageRegistry, LanguageServerBinaryStatus};
|
||||
use language::{LanguageMatcher, LanguageRegistry, LanguageServerBinaryStatus, LoadedLanguage};
|
||||
use lsp::LanguageServerName;
|
||||
use node_runtime::NodeRuntime;
|
||||
use parking_lot::Mutex;
|
||||
@@ -23,7 +20,6 @@ use release_channel::AppVersion;
|
||||
use reqwest_client::ReqwestClient;
|
||||
use serde_json::json;
|
||||
use settings::{Settings as _, SettingsStore};
|
||||
use snippet_provider::SnippetRegistry;
|
||||
use std::{
|
||||
ffi::OsString,
|
||||
path::{Path, PathBuf},
|
||||
@@ -32,6 +28,84 @@ use std::{
|
||||
use theme::ThemeRegistry;
|
||||
use util::test::temp_tree;
|
||||
|
||||
use crate::ExtensionRegistrationHooks;
|
||||
|
||||
struct TestExtensionRegistrationHooks {
|
||||
executor: BackgroundExecutor,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
theme_registry: Arc<ThemeRegistry>,
|
||||
}
|
||||
|
||||
impl ExtensionRegistrationHooks for TestExtensionRegistrationHooks {
|
||||
fn list_theme_names(&self, path: PathBuf, fs: Arc<dyn Fs>) -> Task<Result<Vec<String>>> {
|
||||
self.executor.spawn(async move {
|
||||
let themes = theme::read_user_theme(&path, fs).await?;
|
||||
Ok(themes.themes.into_iter().map(|theme| theme.name).collect())
|
||||
})
|
||||
}
|
||||
|
||||
fn load_user_theme(&self, theme_path: PathBuf, fs: Arc<dyn fs::Fs>) -> Task<Result<()>> {
|
||||
let theme_registry = self.theme_registry.clone();
|
||||
self.executor
|
||||
.spawn(async move { theme_registry.load_user_theme(&theme_path, fs).await })
|
||||
}
|
||||
|
||||
fn remove_user_themes(&self, themes: Vec<SharedString>) {
|
||||
self.theme_registry.remove_user_themes(&themes);
|
||||
}
|
||||
|
||||
fn register_language(
|
||||
&self,
|
||||
language: language::LanguageName,
|
||||
grammar: Option<Arc<str>>,
|
||||
matcher: language::LanguageMatcher,
|
||||
load: Arc<dyn Fn() -> Result<LoadedLanguage> + 'static + Send + Sync>,
|
||||
) {
|
||||
self.language_registry
|
||||
.register_language(language, grammar, matcher, load)
|
||||
}
|
||||
|
||||
fn remove_languages(
|
||||
&self,
|
||||
languages_to_remove: &[language::LanguageName],
|
||||
grammars_to_remove: &[Arc<str>],
|
||||
) {
|
||||
self.language_registry
|
||||
.remove_languages(&languages_to_remove, &grammars_to_remove);
|
||||
}
|
||||
|
||||
fn register_wasm_grammars(&self, grammars: Vec<(Arc<str>, PathBuf)>) {
|
||||
self.language_registry.register_wasm_grammars(grammars)
|
||||
}
|
||||
|
||||
fn register_lsp_adapter(
|
||||
&self,
|
||||
language_name: language::LanguageName,
|
||||
adapter: ExtensionLspAdapter,
|
||||
) {
|
||||
self.language_registry
|
||||
.register_lsp_adapter(language_name, Arc::new(adapter));
|
||||
}
|
||||
|
||||
fn update_lsp_status(
|
||||
&self,
|
||||
server_name: lsp::LanguageServerName,
|
||||
status: LanguageServerBinaryStatus,
|
||||
) {
|
||||
self.language_registry
|
||||
.update_lsp_status(server_name, status);
|
||||
}
|
||||
|
||||
fn remove_lsp_adapter(
|
||||
&self,
|
||||
language_name: &language::LanguageName,
|
||||
server_name: &lsp::LanguageServerName,
|
||||
) {
|
||||
self.language_registry
|
||||
.remove_lsp_adapter(language_name, server_name);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[ctor::ctor]
|
||||
fn init_logger() {
|
||||
@@ -265,27 +339,18 @@ async fn test_extension_store(cx: &mut TestAppContext) {
|
||||
|
||||
let language_registry = Arc::new(LanguageRegistry::test(cx.executor()));
|
||||
let theme_registry = Arc::new(ThemeRegistry::new(Box::new(())));
|
||||
let slash_command_registry = SlashCommandRegistry::new();
|
||||
let indexed_docs_registry = Arc::new(IndexedDocsRegistry::new(cx.executor()));
|
||||
let snippet_registry = Arc::new(SnippetRegistry::new());
|
||||
let context_server_factory_registry = ContextServerFactoryRegistry::new();
|
||||
let registration_hooks = Arc::new(TestExtensionRegistrationHooks {
|
||||
executor: cx.executor(),
|
||||
language_registry: language_registry.clone(),
|
||||
theme_registry: theme_registry.clone(),
|
||||
});
|
||||
let node_runtime = NodeRuntime::unavailable();
|
||||
|
||||
let store = cx.new_model(|cx| {
|
||||
let extension_registration_hooks = crate::ConcreteExtensionRegistrationHooks::new(
|
||||
theme_registry.clone(),
|
||||
slash_command_registry.clone(),
|
||||
indexed_docs_registry.clone(),
|
||||
snippet_registry.clone(),
|
||||
language_registry.clone(),
|
||||
context_server_factory_registry.clone(),
|
||||
cx,
|
||||
);
|
||||
|
||||
ExtensionStore::new(
|
||||
PathBuf::from("/the-extension-dir"),
|
||||
None,
|
||||
extension_registration_hooks,
|
||||
registration_hooks.clone(),
|
||||
fs.clone(),
|
||||
http_client.clone(),
|
||||
http_client.clone(),
|
||||
@@ -407,20 +472,10 @@ async fn test_extension_store(cx: &mut TestAppContext) {
|
||||
// Create new extension store, as if Zed were restarting.
|
||||
drop(store);
|
||||
let store = cx.new_model(|cx| {
|
||||
let extension_api = crate::ConcreteExtensionRegistrationHooks::new(
|
||||
theme_registry.clone(),
|
||||
slash_command_registry,
|
||||
indexed_docs_registry,
|
||||
snippet_registry,
|
||||
language_registry.clone(),
|
||||
context_server_factory_registry.clone(),
|
||||
cx,
|
||||
);
|
||||
|
||||
ExtensionStore::new(
|
||||
PathBuf::from("/the-extension-dir"),
|
||||
None,
|
||||
extension_api,
|
||||
registration_hooks,
|
||||
fs.clone(),
|
||||
http_client.clone(),
|
||||
http_client.clone(),
|
||||
@@ -505,10 +560,11 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
|
||||
|
||||
let language_registry = project.read_with(cx, |project, _cx| project.languages().clone());
|
||||
let theme_registry = Arc::new(ThemeRegistry::new(Box::new(())));
|
||||
let slash_command_registry = SlashCommandRegistry::new();
|
||||
let indexed_docs_registry = Arc::new(IndexedDocsRegistry::new(cx.executor()));
|
||||
let snippet_registry = Arc::new(SnippetRegistry::new());
|
||||
let context_server_factory_registry = ContextServerFactoryRegistry::new();
|
||||
let registration_hooks = Arc::new(TestExtensionRegistrationHooks {
|
||||
executor: cx.executor(),
|
||||
language_registry: language_registry.clone(),
|
||||
theme_registry: theme_registry.clone(),
|
||||
});
|
||||
let node_runtime = NodeRuntime::unavailable();
|
||||
|
||||
let mut status_updates = language_registry.language_server_binary_statuses();
|
||||
@@ -599,19 +655,10 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
|
||||
Arc::new(ReqwestClient::user_agent(&user_agent).expect("Could not create HTTP client"));
|
||||
|
||||
let extension_store = cx.new_model(|cx| {
|
||||
let extension_api = crate::ConcreteExtensionRegistrationHooks::new(
|
||||
theme_registry.clone(),
|
||||
slash_command_registry,
|
||||
indexed_docs_registry,
|
||||
snippet_registry,
|
||||
language_registry.clone(),
|
||||
context_server_factory_registry.clone(),
|
||||
cx,
|
||||
);
|
||||
ExtensionStore::new(
|
||||
extensions_dir.clone(),
|
||||
Some(cache_dir),
|
||||
extension_api,
|
||||
registration_hooks,
|
||||
fs.clone(),
|
||||
extension_client.clone(),
|
||||
builder_client,
|
||||
@@ -626,7 +673,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
|
||||
let executor = cx.executor();
|
||||
let _task = cx.executor().spawn(async move {
|
||||
while let Some(event) = events.next().await {
|
||||
if let extension_host::Event::StartedReloading = event {
|
||||
if let Event::StartedReloading = event {
|
||||
executor.advance_clock(RELOAD_DEBOUNCE_DURATION);
|
||||
}
|
||||
}
|
||||
@@ -2,6 +2,11 @@ pub mod wit;
|
||||
|
||||
use crate::{ExtensionManifest, ExtensionRegistrationHooks};
|
||||
use anyhow::{anyhow, bail, Context as _, Result};
|
||||
use async_trait::async_trait;
|
||||
use extension::{
|
||||
CodeLabel, Command, Completion, KeyValueStoreDelegate, SlashCommand,
|
||||
SlashCommandArgumentCompletion, SlashCommandOutput, Symbol, WorktreeDelegate,
|
||||
};
|
||||
use fs::{normalize_path, Fs};
|
||||
use futures::future::LocalBoxFuture;
|
||||
use futures::{
|
||||
@@ -14,6 +19,8 @@ use futures::{
|
||||
};
|
||||
use gpui::{AppContext, AsyncAppContext, BackgroundExecutor, Task};
|
||||
use http_client::HttpClient;
|
||||
use language::LanguageName;
|
||||
use lsp::LanguageServerName;
|
||||
use node_runtime::NodeRuntime;
|
||||
use release_channel::ReleaseChannel;
|
||||
use semantic_version::SemanticVersion;
|
||||
@@ -25,9 +32,9 @@ use wasmtime::{
|
||||
component::{Component, ResourceTable},
|
||||
Engine, Store,
|
||||
};
|
||||
use wasmtime_wasi as wasi;
|
||||
use wasmtime_wasi::{self as wasi, WasiView};
|
||||
use wit::Extension;
|
||||
pub use wit::{ExtensionProject, SlashCommand};
|
||||
pub use wit::ExtensionProject;
|
||||
|
||||
pub struct WasmHost {
|
||||
engine: Engine,
|
||||
@@ -45,10 +52,234 @@ pub struct WasmHost {
|
||||
pub struct WasmExtension {
|
||||
tx: UnboundedSender<ExtensionCall>,
|
||||
pub manifest: Arc<ExtensionManifest>,
|
||||
pub work_dir: Arc<Path>,
|
||||
#[allow(unused)]
|
||||
pub zed_api_version: SemanticVersion,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl extension::Extension for WasmExtension {
|
||||
fn manifest(&self) -> Arc<ExtensionManifest> {
|
||||
self.manifest.clone()
|
||||
}
|
||||
|
||||
fn work_dir(&self) -> Arc<Path> {
|
||||
self.work_dir.clone()
|
||||
}
|
||||
|
||||
async fn language_server_command(
|
||||
&self,
|
||||
language_server_id: LanguageServerName,
|
||||
language_name: LanguageName,
|
||||
worktree: Arc<dyn WorktreeDelegate>,
|
||||
) -> Result<Command> {
|
||||
self.call(|extension, store| {
|
||||
async move {
|
||||
let resource = store.data_mut().table().push(worktree)?;
|
||||
let command = extension
|
||||
.call_language_server_command(
|
||||
store,
|
||||
&language_server_id,
|
||||
&language_name,
|
||||
resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|err| anyhow!("{err}"))?;
|
||||
|
||||
Ok(command.into())
|
||||
}
|
||||
.boxed()
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn language_server_initialization_options(
|
||||
&self,
|
||||
language_server_id: LanguageServerName,
|
||||
language_name: LanguageName,
|
||||
worktree: Arc<dyn WorktreeDelegate>,
|
||||
) -> Result<Option<String>> {
|
||||
self.call(|extension, store| {
|
||||
async move {
|
||||
let resource = store.data_mut().table().push(worktree)?;
|
||||
let options = extension
|
||||
.call_language_server_initialization_options(
|
||||
store,
|
||||
&language_server_id,
|
||||
&language_name,
|
||||
resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|err| anyhow!("{err}"))?;
|
||||
anyhow::Ok(options)
|
||||
}
|
||||
.boxed()
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn language_server_workspace_configuration(
|
||||
&self,
|
||||
language_server_id: LanguageServerName,
|
||||
worktree: Arc<dyn WorktreeDelegate>,
|
||||
) -> Result<Option<String>> {
|
||||
self.call(|extension, store| {
|
||||
async move {
|
||||
let resource = store.data_mut().table().push(worktree)?;
|
||||
let options = extension
|
||||
.call_language_server_workspace_configuration(
|
||||
store,
|
||||
&language_server_id,
|
||||
resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|err| anyhow!("{err}"))?;
|
||||
anyhow::Ok(options)
|
||||
}
|
||||
.boxed()
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn labels_for_completions(
|
||||
&self,
|
||||
language_server_id: LanguageServerName,
|
||||
completions: Vec<Completion>,
|
||||
) -> Result<Vec<Option<CodeLabel>>> {
|
||||
self.call(|extension, store| {
|
||||
async move {
|
||||
let labels = extension
|
||||
.call_labels_for_completions(
|
||||
store,
|
||||
&language_server_id,
|
||||
completions.into_iter().map(Into::into).collect(),
|
||||
)
|
||||
.await?
|
||||
.map_err(|err| anyhow!("{err}"))?;
|
||||
|
||||
Ok(labels
|
||||
.into_iter()
|
||||
.map(|label| label.map(Into::into))
|
||||
.collect())
|
||||
}
|
||||
.boxed()
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn labels_for_symbols(
|
||||
&self,
|
||||
language_server_id: LanguageServerName,
|
||||
symbols: Vec<Symbol>,
|
||||
) -> Result<Vec<Option<CodeLabel>>> {
|
||||
self.call(|extension, store| {
|
||||
async move {
|
||||
let labels = extension
|
||||
.call_labels_for_symbols(
|
||||
store,
|
||||
&language_server_id,
|
||||
symbols.into_iter().map(Into::into).collect(),
|
||||
)
|
||||
.await?
|
||||
.map_err(|err| anyhow!("{err}"))?;
|
||||
|
||||
Ok(labels
|
||||
.into_iter()
|
||||
.map(|label| label.map(Into::into))
|
||||
.collect())
|
||||
}
|
||||
.boxed()
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn complete_slash_command_argument(
|
||||
&self,
|
||||
command: SlashCommand,
|
||||
arguments: Vec<String>,
|
||||
) -> Result<Vec<SlashCommandArgumentCompletion>> {
|
||||
self.call(|extension, store| {
|
||||
async move {
|
||||
let completions = extension
|
||||
.call_complete_slash_command_argument(store, &command.into(), &arguments)
|
||||
.await?
|
||||
.map_err(|err| anyhow!("{err}"))?;
|
||||
|
||||
Ok(completions.into_iter().map(Into::into).collect())
|
||||
}
|
||||
.boxed()
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn run_slash_command(
|
||||
&self,
|
||||
command: SlashCommand,
|
||||
arguments: Vec<String>,
|
||||
delegate: Option<Arc<dyn WorktreeDelegate>>,
|
||||
) -> Result<SlashCommandOutput> {
|
||||
self.call(|extension, store| {
|
||||
async move {
|
||||
let resource = if let Some(delegate) = delegate {
|
||||
Some(store.data_mut().table().push(delegate)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let output = extension
|
||||
.call_run_slash_command(store, &command.into(), &arguments, resource)
|
||||
.await?
|
||||
.map_err(|err| anyhow!("{err}"))?;
|
||||
|
||||
Ok(output.into())
|
||||
}
|
||||
.boxed()
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn suggest_docs_packages(&self, provider: Arc<str>) -> Result<Vec<String>> {
|
||||
self.call(|extension, store| {
|
||||
async move {
|
||||
let packages = extension
|
||||
.call_suggest_docs_packages(store, provider.as_ref())
|
||||
.await?
|
||||
.map_err(|err| anyhow!("{err:?}"))?;
|
||||
|
||||
Ok(packages)
|
||||
}
|
||||
.boxed()
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn index_docs(
|
||||
&self,
|
||||
provider: Arc<str>,
|
||||
package_name: Arc<str>,
|
||||
kv_store: Arc<dyn KeyValueStoreDelegate>,
|
||||
) -> Result<()> {
|
||||
self.call(|extension, store| {
|
||||
async move {
|
||||
let kv_store_resource = store.data_mut().table().push(kv_store)?;
|
||||
extension
|
||||
.call_index_docs(
|
||||
store,
|
||||
provider.as_ref(),
|
||||
package_name.as_ref(),
|
||||
kv_store_resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|err| anyhow!("{err:?}"))?;
|
||||
|
||||
anyhow::Ok(())
|
||||
}
|
||||
.boxed()
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
pub struct WasmState {
|
||||
manifest: Arc<ExtensionManifest>,
|
||||
pub table: ResourceTable,
|
||||
@@ -152,6 +383,7 @@ impl WasmHost {
|
||||
|
||||
Ok(WasmExtension {
|
||||
manifest: manifest.clone(),
|
||||
work_dir: this.work_dir.join(manifest.id.as_ref()).into(),
|
||||
tx,
|
||||
zed_api_version,
|
||||
})
|
||||
@@ -182,11 +414,6 @@ impl WasmHost {
|
||||
.build())
|
||||
}
|
||||
|
||||
pub fn path_from_extension(&self, id: &Arc<str>, path: &Path) -> PathBuf {
|
||||
let extension_work_dir = self.work_dir.join(id.as_ref());
|
||||
normalize_path(&extension_work_dir.join(path))
|
||||
}
|
||||
|
||||
pub fn writeable_path_from_extension(&self, id: &Arc<str>, path: &Path) -> Result<PathBuf> {
|
||||
let extension_work_dir = self.work_dir.join(id.as_ref());
|
||||
let path = normalize_path(&extension_work_dir.join(path));
|
||||
|
||||
@@ -3,15 +3,14 @@ mod since_v0_0_4;
|
||||
mod since_v0_0_6;
|
||||
mod since_v0_1_0;
|
||||
mod since_v0_2_0;
|
||||
use extension::{KeyValueStoreDelegate, WorktreeDelegate};
|
||||
use language::LanguageName;
|
||||
use lsp::LanguageServerName;
|
||||
use release_channel::ReleaseChannel;
|
||||
use since_v0_2_0 as latest;
|
||||
|
||||
use crate::DocsDatabase;
|
||||
|
||||
use super::{wasm_engine, WasmState};
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use language::LspAdapterDelegate;
|
||||
use semantic_version::SemanticVersion;
|
||||
use std::{ops::RangeInclusive, sync::Arc};
|
||||
use wasmtime::{
|
||||
@@ -59,12 +58,35 @@ pub fn wasm_api_version_range(release_channel: ReleaseChannel) -> RangeInclusive
|
||||
|
||||
let max_version = match release_channel {
|
||||
ReleaseChannel::Dev | ReleaseChannel::Nightly => latest::MAX_VERSION,
|
||||
ReleaseChannel::Stable | ReleaseChannel::Preview => since_v0_1_0::MAX_VERSION,
|
||||
ReleaseChannel::Stable | ReleaseChannel::Preview => latest::MAX_VERSION,
|
||||
};
|
||||
|
||||
since_v0_0_1::MIN_VERSION..=max_version
|
||||
}
|
||||
|
||||
/// Authorizes access to use unreleased versions of the Wasm API, based on the provided [`ReleaseChannel`].
|
||||
///
|
||||
/// Note: If there isn't currently an unreleased Wasm API version this function may be unused. Don't delete it!
|
||||
pub fn authorize_access_to_unreleased_wasm_api_version(
|
||||
release_channel: ReleaseChannel,
|
||||
) -> Result<()> {
|
||||
let allow_unreleased_version = match release_channel {
|
||||
ReleaseChannel::Dev | ReleaseChannel::Nightly => true,
|
||||
ReleaseChannel::Stable | ReleaseChannel::Preview => {
|
||||
// We always allow the latest in tests so that the extension tests pass on release branches.
|
||||
cfg!(any(test, feature = "test-support"))
|
||||
}
|
||||
};
|
||||
|
||||
if !allow_unreleased_version {
|
||||
Err(anyhow!(
|
||||
"unreleased versions of the extension API can only be used on development builds of Zed"
|
||||
))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub enum Extension {
|
||||
V020(since_v0_2_0::Extension),
|
||||
V010(since_v0_1_0::Extension),
|
||||
@@ -80,20 +102,10 @@ impl Extension {
|
||||
version: SemanticVersion,
|
||||
component: &Component,
|
||||
) -> Result<Self> {
|
||||
// Note: The release channel can be used to stage a new version of the extension API.
|
||||
let _ = release_channel;
|
||||
|
||||
if version >= latest::MIN_VERSION {
|
||||
// Note: The release channel can be used to stage a new version of the extension API.
|
||||
// We always allow the latest in tests so that the extension tests pass on release branches.
|
||||
let allow_latest_version = match release_channel {
|
||||
ReleaseChannel::Dev | ReleaseChannel::Nightly => true,
|
||||
ReleaseChannel::Stable | ReleaseChannel::Preview => {
|
||||
cfg!(any(test, feature = "test-support"))
|
||||
}
|
||||
};
|
||||
if !allow_latest_version {
|
||||
Err(anyhow!(
|
||||
"unreleased versions of the extension API can only be used on development builds of Zed"
|
||||
))?;
|
||||
}
|
||||
let extension =
|
||||
latest::Extension::instantiate_async(store, component, latest::linker())
|
||||
.await
|
||||
@@ -152,8 +164,8 @@ impl Extension {
|
||||
&self,
|
||||
store: &mut Store<WasmState>,
|
||||
language_server_id: &LanguageServerName,
|
||||
config: &LanguageServerConfig,
|
||||
resource: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
language_name: &LanguageName,
|
||||
resource: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
) -> Result<Result<Command, String>> {
|
||||
match self {
|
||||
Extension::V020(ext) => {
|
||||
@@ -169,11 +181,26 @@ impl Extension {
|
||||
.await?
|
||||
.map(|command| command.into())),
|
||||
Extension::V004(ext) => Ok(ext
|
||||
.call_language_server_command(store, config, resource)
|
||||
.call_language_server_command(
|
||||
store,
|
||||
&LanguageServerConfig {
|
||||
name: language_server_id.0.to_string(),
|
||||
language_name: language_name.to_string(),
|
||||
},
|
||||
resource,
|
||||
)
|
||||
.await?
|
||||
.map(|command| command.into())),
|
||||
Extension::V001(ext) => Ok(ext
|
||||
.call_language_server_command(store, &config.clone().into(), resource)
|
||||
.call_language_server_command(
|
||||
store,
|
||||
&LanguageServerConfig {
|
||||
name: language_server_id.0.to_string(),
|
||||
language_name: language_name.to_string(),
|
||||
}
|
||||
.into(),
|
||||
resource,
|
||||
)
|
||||
.await?
|
||||
.map(|command| command.into())),
|
||||
}
|
||||
@@ -183,8 +210,8 @@ impl Extension {
|
||||
&self,
|
||||
store: &mut Store<WasmState>,
|
||||
language_server_id: &LanguageServerName,
|
||||
config: &LanguageServerConfig,
|
||||
resource: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
language_name: &LanguageName,
|
||||
resource: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
) -> Result<Result<Option<String>, String>> {
|
||||
match self {
|
||||
Extension::V020(ext) => {
|
||||
@@ -212,13 +239,24 @@ impl Extension {
|
||||
.await
|
||||
}
|
||||
Extension::V004(ext) => {
|
||||
ext.call_language_server_initialization_options(store, config, resource)
|
||||
.await
|
||||
ext.call_language_server_initialization_options(
|
||||
store,
|
||||
&LanguageServerConfig {
|
||||
name: language_server_id.0.to_string(),
|
||||
language_name: language_name.to_string(),
|
||||
},
|
||||
resource,
|
||||
)
|
||||
.await
|
||||
}
|
||||
Extension::V001(ext) => {
|
||||
ext.call_language_server_initialization_options(
|
||||
store,
|
||||
&config.clone().into(),
|
||||
&LanguageServerConfig {
|
||||
name: language_server_id.0.to_string(),
|
||||
language_name: language_name.to_string(),
|
||||
}
|
||||
.into(),
|
||||
resource,
|
||||
)
|
||||
.await
|
||||
@@ -230,7 +268,7 @@ impl Extension {
|
||||
&self,
|
||||
store: &mut Store<WasmState>,
|
||||
language_server_id: &LanguageServerName,
|
||||
resource: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
resource: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
) -> Result<Result<Option<String>, String>> {
|
||||
match self {
|
||||
Extension::V020(ext) => {
|
||||
@@ -367,7 +405,7 @@ impl Extension {
|
||||
store: &mut Store<WasmState>,
|
||||
command: &SlashCommand,
|
||||
arguments: &[String],
|
||||
resource: Option<Resource<Arc<dyn LspAdapterDelegate>>>,
|
||||
resource: Option<Resource<Arc<dyn WorktreeDelegate>>>,
|
||||
) -> Result<Result<SlashCommandOutput, String>> {
|
||||
match self {
|
||||
Extension::V020(ext) => {
|
||||
@@ -422,15 +460,15 @@ impl Extension {
|
||||
store: &mut Store<WasmState>,
|
||||
provider: &str,
|
||||
package_name: &str,
|
||||
database: Resource<Arc<dyn DocsDatabase>>,
|
||||
kv_store: Resource<Arc<dyn KeyValueStoreDelegate>>,
|
||||
) -> Result<Result<(), String>> {
|
||||
match self {
|
||||
Extension::V020(ext) => {
|
||||
ext.call_index_docs(store, provider, package_name, database)
|
||||
ext.call_index_docs(store, provider, package_name, kv_store)
|
||||
.await
|
||||
}
|
||||
Extension::V010(ext) => {
|
||||
ext.call_index_docs(store, provider, package_name, database)
|
||||
ext.call_index_docs(store, provider, package_name, kv_store)
|
||||
.await
|
||||
}
|
||||
Extension::V001(_) | Extension::V004(_) | Extension::V006(_) => {
|
||||
|
||||
@@ -3,7 +3,8 @@ use crate::wasm_host::wit::since_v0_0_4;
|
||||
use crate::wasm_host::WasmState;
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use language::{LanguageServerBinaryStatus, LspAdapterDelegate};
|
||||
use extension::WorktreeDelegate;
|
||||
use language::LanguageServerBinaryStatus;
|
||||
use semantic_version::SemanticVersion;
|
||||
use std::sync::{Arc, OnceLock};
|
||||
use wasmtime::component::{Linker, Resource};
|
||||
@@ -21,7 +22,7 @@ wasmtime::component::bindgen!({
|
||||
},
|
||||
});
|
||||
|
||||
pub type ExtensionWorktree = Arc<dyn LspAdapterDelegate>;
|
||||
pub type ExtensionWorktree = Arc<dyn WorktreeDelegate>;
|
||||
|
||||
pub fn linker() -> &'static Linker<WasmState> {
|
||||
static LINKER: OnceLock<Linker<WasmState>> = OnceLock::new();
|
||||
@@ -62,7 +63,7 @@ impl From<Command> for latest::Command {
|
||||
impl HostWorktree for WasmState {
|
||||
async fn read_text_file(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
delegate: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
path: String,
|
||||
) -> wasmtime::Result<Result<String, String>> {
|
||||
latest::HostWorktree::read_text_file(self, delegate, path).await
|
||||
@@ -70,14 +71,14 @@ impl HostWorktree for WasmState {
|
||||
|
||||
async fn shell_env(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
delegate: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
) -> wasmtime::Result<EnvVars> {
|
||||
latest::HostWorktree::shell_env(self, delegate).await
|
||||
}
|
||||
|
||||
async fn which(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
delegate: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
binary_name: String,
|
||||
) -> wasmtime::Result<Option<String>> {
|
||||
latest::HostWorktree::which(self, delegate, binary_name).await
|
||||
|
||||
@@ -2,7 +2,7 @@ use super::latest;
|
||||
use crate::wasm_host::WasmState;
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use language::LspAdapterDelegate;
|
||||
use extension::WorktreeDelegate;
|
||||
use semantic_version::SemanticVersion;
|
||||
use std::sync::{Arc, OnceLock};
|
||||
use wasmtime::component::{Linker, Resource};
|
||||
@@ -20,7 +20,7 @@ wasmtime::component::bindgen!({
|
||||
},
|
||||
});
|
||||
|
||||
pub type ExtensionWorktree = Arc<dyn LspAdapterDelegate>;
|
||||
pub type ExtensionWorktree = Arc<dyn WorktreeDelegate>;
|
||||
|
||||
pub fn linker() -> &'static Linker<WasmState> {
|
||||
static LINKER: OnceLock<Linker<WasmState>> = OnceLock::new();
|
||||
@@ -71,7 +71,7 @@ impl From<Command> for latest::Command {
|
||||
impl HostWorktree for WasmState {
|
||||
async fn read_text_file(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
delegate: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
path: String,
|
||||
) -> wasmtime::Result<Result<String, String>> {
|
||||
latest::HostWorktree::read_text_file(self, delegate, path).await
|
||||
@@ -79,14 +79,14 @@ impl HostWorktree for WasmState {
|
||||
|
||||
async fn shell_env(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
delegate: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
) -> wasmtime::Result<EnvVars> {
|
||||
latest::HostWorktree::shell_env(self, delegate).await
|
||||
}
|
||||
|
||||
async fn which(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
delegate: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
binary_name: String,
|
||||
) -> wasmtime::Result<Option<String>> {
|
||||
latest::HostWorktree::which(self, delegate, binary_name).await
|
||||
|
||||
@@ -2,7 +2,7 @@ use super::{latest, since_v0_1_0};
|
||||
use crate::wasm_host::WasmState;
|
||||
use anyhow::Result;
|
||||
use async_trait::async_trait;
|
||||
use language::LspAdapterDelegate;
|
||||
use extension::WorktreeDelegate;
|
||||
use semantic_version::SemanticVersion;
|
||||
use std::sync::{Arc, OnceLock};
|
||||
use wasmtime::component::{Linker, Resource};
|
||||
@@ -26,7 +26,7 @@ mod settings {
|
||||
include!(concat!(env!("OUT_DIR"), "/since_v0.0.6/settings.rs"));
|
||||
}
|
||||
|
||||
pub type ExtensionWorktree = Arc<dyn LspAdapterDelegate>;
|
||||
pub type ExtensionWorktree = Arc<dyn WorktreeDelegate>;
|
||||
|
||||
pub fn linker() -> &'static Linker<WasmState> {
|
||||
static LINKER: OnceLock<Linker<WasmState>> = OnceLock::new();
|
||||
@@ -113,23 +113,20 @@ impl From<CodeLabel> for latest::CodeLabel {
|
||||
|
||||
#[async_trait]
|
||||
impl HostWorktree for WasmState {
|
||||
async fn id(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
) -> wasmtime::Result<u64> {
|
||||
async fn id(&mut self, delegate: Resource<Arc<dyn WorktreeDelegate>>) -> wasmtime::Result<u64> {
|
||||
latest::HostWorktree::id(self, delegate).await
|
||||
}
|
||||
|
||||
async fn root_path(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
delegate: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
) -> wasmtime::Result<String> {
|
||||
latest::HostWorktree::root_path(self, delegate).await
|
||||
}
|
||||
|
||||
async fn read_text_file(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
delegate: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
path: String,
|
||||
) -> wasmtime::Result<Result<String, String>> {
|
||||
latest::HostWorktree::read_text_file(self, delegate, path).await
|
||||
@@ -137,14 +134,14 @@ impl HostWorktree for WasmState {
|
||||
|
||||
async fn shell_env(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
delegate: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
) -> wasmtime::Result<EnvVars> {
|
||||
latest::HostWorktree::shell_env(self, delegate).await
|
||||
}
|
||||
|
||||
async fn which(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
delegate: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
binary_name: String,
|
||||
) -> wasmtime::Result<Option<String>> {
|
||||
latest::HostWorktree::which(self, delegate, binary_name).await
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
use crate::wasm_host::{wit::ToWasmtimeResult, WasmState};
|
||||
use crate::DocsDatabase;
|
||||
use ::http_client::{AsyncBody, HttpRequestExt};
|
||||
use ::settings::{Settings, WorktreeId};
|
||||
use anyhow::{anyhow, bail, Context, Result};
|
||||
use async_compression::futures::bufread::GzipDecoder;
|
||||
use async_tar::Archive;
|
||||
use async_trait::async_trait;
|
||||
use extension::{KeyValueStoreDelegate, WorktreeDelegate};
|
||||
use futures::{io::BufReader, FutureExt as _};
|
||||
use futures::{lock::Mutex, AsyncReadExt};
|
||||
use language::LanguageName;
|
||||
use language::{
|
||||
language_settings::AllLanguageSettings, LanguageServerBinaryStatus, LspAdapterDelegate,
|
||||
};
|
||||
use language::{language_settings::AllLanguageSettings, LanguageServerBinaryStatus};
|
||||
use project::project_settings::ProjectSettings;
|
||||
use semantic_version::SemanticVersion;
|
||||
use std::{
|
||||
@@ -24,7 +22,6 @@ use wasmtime::component::{Linker, Resource};
|
||||
use super::latest;
|
||||
|
||||
pub const MIN_VERSION: SemanticVersion = SemanticVersion::new(0, 1, 0);
|
||||
pub const MAX_VERSION: SemanticVersion = SemanticVersion::new(0, 1, 0);
|
||||
|
||||
wasmtime::component::bindgen!({
|
||||
async: true,
|
||||
@@ -47,8 +44,8 @@ mod settings {
|
||||
include!(concat!(env!("OUT_DIR"), "/since_v0.1.0/settings.rs"));
|
||||
}
|
||||
|
||||
pub type ExtensionWorktree = Arc<dyn LspAdapterDelegate>;
|
||||
pub type ExtensionKeyValueStore = Arc<dyn DocsDatabase>;
|
||||
pub type ExtensionWorktree = Arc<dyn WorktreeDelegate>;
|
||||
pub type ExtensionKeyValueStore = Arc<dyn KeyValueStoreDelegate>;
|
||||
pub type ExtensionHttpResponseStream = Arc<Mutex<::http_client::Response<AsyncBody>>>;
|
||||
|
||||
pub fn linker() -> &'static Linker<WasmState> {
|
||||
@@ -251,52 +248,38 @@ impl HostKeyValueStore for WasmState {
|
||||
|
||||
#[async_trait]
|
||||
impl HostWorktree for WasmState {
|
||||
async fn id(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
) -> wasmtime::Result<u64> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate.worktree_id().to_proto())
|
||||
async fn id(&mut self, delegate: Resource<Arc<dyn WorktreeDelegate>>) -> wasmtime::Result<u64> {
|
||||
latest::HostWorktree::id(self, delegate).await
|
||||
}
|
||||
|
||||
async fn root_path(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
delegate: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
) -> wasmtime::Result<String> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate.worktree_root_path().to_string_lossy().to_string())
|
||||
latest::HostWorktree::root_path(self, delegate).await
|
||||
}
|
||||
|
||||
async fn read_text_file(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
delegate: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
path: String,
|
||||
) -> wasmtime::Result<Result<String, String>> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate
|
||||
.read_text_file(path.into())
|
||||
.await
|
||||
.map_err(|error| error.to_string()))
|
||||
latest::HostWorktree::read_text_file(self, delegate, path).await
|
||||
}
|
||||
|
||||
async fn shell_env(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
delegate: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
) -> wasmtime::Result<EnvVars> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate.shell_env().await.into_iter().collect())
|
||||
latest::HostWorktree::shell_env(self, delegate).await
|
||||
}
|
||||
|
||||
async fn which(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
delegate: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
binary_name: String,
|
||||
) -> wasmtime::Result<Option<String>> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate
|
||||
.which(binary_name.as_ref())
|
||||
.await
|
||||
.map(|path| path.to_string_lossy().to_string()))
|
||||
latest::HostWorktree::which(self, delegate, binary_name).await
|
||||
}
|
||||
|
||||
fn drop(&mut self, _worktree: Resource<Worktree>) -> Result<()> {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use crate::wasm_host::wit::since_v0_2_0::slash_command::SlashCommandOutputSection;
|
||||
use crate::wasm_host::wit::{CompletionKind, CompletionLabelDetails, InsertTextFormat, SymbolKind};
|
||||
use crate::wasm_host::{wit::ToWasmtimeResult, WasmState};
|
||||
use crate::DocsDatabase;
|
||||
use ::http_client::{AsyncBody, HttpRequestExt};
|
||||
use ::settings::{Settings, WorktreeId};
|
||||
use anyhow::{anyhow, bail, Context, Result};
|
||||
@@ -7,12 +8,10 @@ use async_compression::futures::bufread::GzipDecoder;
|
||||
use async_tar::Archive;
|
||||
use async_trait::async_trait;
|
||||
use context_servers::manager::ContextServerSettings;
|
||||
use extension::{KeyValueStoreDelegate, WorktreeDelegate};
|
||||
use futures::{io::BufReader, FutureExt as _};
|
||||
use futures::{lock::Mutex, AsyncReadExt};
|
||||
use language::{
|
||||
language_settings::AllLanguageSettings, LanguageName, LanguageServerBinaryStatus,
|
||||
LspAdapterDelegate,
|
||||
};
|
||||
use language::{language_settings::AllLanguageSettings, LanguageName, LanguageServerBinaryStatus};
|
||||
use project::project_settings::ProjectSettings;
|
||||
use semantic_version::SemanticVersion;
|
||||
use std::{
|
||||
@@ -44,8 +43,8 @@ mod settings {
|
||||
include!(concat!(env!("OUT_DIR"), "/since_v0.2.0/settings.rs"));
|
||||
}
|
||||
|
||||
pub type ExtensionWorktree = Arc<dyn LspAdapterDelegate>;
|
||||
pub type ExtensionKeyValueStore = Arc<dyn DocsDatabase>;
|
||||
pub type ExtensionWorktree = Arc<dyn WorktreeDelegate>;
|
||||
pub type ExtensionKeyValueStore = Arc<dyn KeyValueStoreDelegate>;
|
||||
pub type ExtensionHttpResponseStream = Arc<Mutex<::http_client::Response<AsyncBody>>>;
|
||||
|
||||
pub struct ExtensionProject {
|
||||
@@ -57,6 +56,198 @@ pub fn linker() -> &'static Linker<WasmState> {
|
||||
LINKER.get_or_init(|| super::new_linker(Extension::add_to_linker))
|
||||
}
|
||||
|
||||
impl From<Range> for std::ops::Range<usize> {
|
||||
fn from(range: Range) -> Self {
|
||||
let start = range.start as usize;
|
||||
let end = range.end as usize;
|
||||
start..end
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Command> for extension::Command {
|
||||
fn from(value: Command) -> Self {
|
||||
Self {
|
||||
command: value.command,
|
||||
args: value.args,
|
||||
env: value.env,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CodeLabel> for extension::CodeLabel {
|
||||
fn from(value: CodeLabel) -> Self {
|
||||
Self {
|
||||
code: value.code,
|
||||
spans: value.spans.into_iter().map(Into::into).collect(),
|
||||
filter_range: value.filter_range.into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CodeLabelSpan> for extension::CodeLabelSpan {
|
||||
fn from(value: CodeLabelSpan) -> Self {
|
||||
match value {
|
||||
CodeLabelSpan::CodeRange(range) => Self::CodeRange(range.into()),
|
||||
CodeLabelSpan::Literal(literal) => Self::Literal(literal.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<CodeLabelSpanLiteral> for extension::CodeLabelSpanLiteral {
|
||||
fn from(value: CodeLabelSpanLiteral) -> Self {
|
||||
Self {
|
||||
text: value.text,
|
||||
highlight_name: value.highlight_name,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<extension::Completion> for Completion {
|
||||
fn from(value: extension::Completion) -> Self {
|
||||
Self {
|
||||
label: value.label,
|
||||
label_details: value.label_details.map(Into::into),
|
||||
detail: value.detail,
|
||||
kind: value.kind.map(Into::into),
|
||||
insert_text_format: value.insert_text_format.map(Into::into),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<extension::CompletionLabelDetails> for CompletionLabelDetails {
|
||||
fn from(value: extension::CompletionLabelDetails) -> Self {
|
||||
Self {
|
||||
detail: value.detail,
|
||||
description: value.description,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<extension::CompletionKind> for CompletionKind {
|
||||
fn from(value: extension::CompletionKind) -> Self {
|
||||
match value {
|
||||
extension::CompletionKind::Text => Self::Text,
|
||||
extension::CompletionKind::Method => Self::Method,
|
||||
extension::CompletionKind::Function => Self::Function,
|
||||
extension::CompletionKind::Constructor => Self::Constructor,
|
||||
extension::CompletionKind::Field => Self::Field,
|
||||
extension::CompletionKind::Variable => Self::Variable,
|
||||
extension::CompletionKind::Class => Self::Class,
|
||||
extension::CompletionKind::Interface => Self::Interface,
|
||||
extension::CompletionKind::Module => Self::Module,
|
||||
extension::CompletionKind::Property => Self::Property,
|
||||
extension::CompletionKind::Unit => Self::Unit,
|
||||
extension::CompletionKind::Value => Self::Value,
|
||||
extension::CompletionKind::Enum => Self::Enum,
|
||||
extension::CompletionKind::Keyword => Self::Keyword,
|
||||
extension::CompletionKind::Snippet => Self::Snippet,
|
||||
extension::CompletionKind::Color => Self::Color,
|
||||
extension::CompletionKind::File => Self::File,
|
||||
extension::CompletionKind::Reference => Self::Reference,
|
||||
extension::CompletionKind::Folder => Self::Folder,
|
||||
extension::CompletionKind::EnumMember => Self::EnumMember,
|
||||
extension::CompletionKind::Constant => Self::Constant,
|
||||
extension::CompletionKind::Struct => Self::Struct,
|
||||
extension::CompletionKind::Event => Self::Event,
|
||||
extension::CompletionKind::Operator => Self::Operator,
|
||||
extension::CompletionKind::TypeParameter => Self::TypeParameter,
|
||||
extension::CompletionKind::Other(value) => Self::Other(value),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<extension::InsertTextFormat> for InsertTextFormat {
|
||||
fn from(value: extension::InsertTextFormat) -> Self {
|
||||
match value {
|
||||
extension::InsertTextFormat::PlainText => Self::PlainText,
|
||||
extension::InsertTextFormat::Snippet => Self::Snippet,
|
||||
extension::InsertTextFormat::Other(value) => Self::Other(value),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<extension::Symbol> for Symbol {
|
||||
fn from(value: extension::Symbol) -> Self {
|
||||
Self {
|
||||
kind: value.kind.into(),
|
||||
name: value.name,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<extension::SymbolKind> for SymbolKind {
|
||||
fn from(value: extension::SymbolKind) -> Self {
|
||||
match value {
|
||||
extension::SymbolKind::File => Self::File,
|
||||
extension::SymbolKind::Module => Self::Module,
|
||||
extension::SymbolKind::Namespace => Self::Namespace,
|
||||
extension::SymbolKind::Package => Self::Package,
|
||||
extension::SymbolKind::Class => Self::Class,
|
||||
extension::SymbolKind::Method => Self::Method,
|
||||
extension::SymbolKind::Property => Self::Property,
|
||||
extension::SymbolKind::Field => Self::Field,
|
||||
extension::SymbolKind::Constructor => Self::Constructor,
|
||||
extension::SymbolKind::Enum => Self::Enum,
|
||||
extension::SymbolKind::Interface => Self::Interface,
|
||||
extension::SymbolKind::Function => Self::Function,
|
||||
extension::SymbolKind::Variable => Self::Variable,
|
||||
extension::SymbolKind::Constant => Self::Constant,
|
||||
extension::SymbolKind::String => Self::String,
|
||||
extension::SymbolKind::Number => Self::Number,
|
||||
extension::SymbolKind::Boolean => Self::Boolean,
|
||||
extension::SymbolKind::Array => Self::Array,
|
||||
extension::SymbolKind::Object => Self::Object,
|
||||
extension::SymbolKind::Key => Self::Key,
|
||||
extension::SymbolKind::Null => Self::Null,
|
||||
extension::SymbolKind::EnumMember => Self::EnumMember,
|
||||
extension::SymbolKind::Struct => Self::Struct,
|
||||
extension::SymbolKind::Event => Self::Event,
|
||||
extension::SymbolKind::Operator => Self::Operator,
|
||||
extension::SymbolKind::TypeParameter => Self::TypeParameter,
|
||||
extension::SymbolKind::Other(value) => Self::Other(value),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<extension::SlashCommand> for SlashCommand {
|
||||
fn from(value: extension::SlashCommand) -> Self {
|
||||
Self {
|
||||
name: value.name,
|
||||
description: value.description,
|
||||
tooltip_text: value.tooltip_text,
|
||||
requires_argument: value.requires_argument,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SlashCommandOutput> for extension::SlashCommandOutput {
|
||||
fn from(value: SlashCommandOutput) -> Self {
|
||||
Self {
|
||||
text: value.text,
|
||||
sections: value.sections.into_iter().map(Into::into).collect(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SlashCommandOutputSection> for extension::SlashCommandOutputSection {
|
||||
fn from(value: SlashCommandOutputSection) -> Self {
|
||||
Self {
|
||||
range: value.range.start as usize..value.range.end as usize,
|
||||
label: value.label,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SlashCommandArgumentCompletion> for extension::SlashCommandArgumentCompletion {
|
||||
fn from(value: SlashCommandArgumentCompletion) -> Self {
|
||||
Self {
|
||||
label: value.label,
|
||||
new_text: value.new_text,
|
||||
run_command: value.run_command,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl HostKeyValueStore for WasmState {
|
||||
async fn insert(
|
||||
@@ -93,25 +284,22 @@ impl HostProject for WasmState {
|
||||
|
||||
#[async_trait]
|
||||
impl HostWorktree for WasmState {
|
||||
async fn id(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
) -> wasmtime::Result<u64> {
|
||||
async fn id(&mut self, delegate: Resource<Arc<dyn WorktreeDelegate>>) -> wasmtime::Result<u64> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate.worktree_id().to_proto())
|
||||
Ok(delegate.id())
|
||||
}
|
||||
|
||||
async fn root_path(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
delegate: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
) -> wasmtime::Result<String> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate.worktree_root_path().to_string_lossy().to_string())
|
||||
Ok(delegate.root_path())
|
||||
}
|
||||
|
||||
async fn read_text_file(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
delegate: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
path: String,
|
||||
) -> wasmtime::Result<Result<String, String>> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
@@ -123,7 +311,7 @@ impl HostWorktree for WasmState {
|
||||
|
||||
async fn shell_env(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
delegate: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
) -> wasmtime::Result<EnvVars> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate.shell_env().await.into_iter().collect())
|
||||
@@ -131,14 +319,11 @@ impl HostWorktree for WasmState {
|
||||
|
||||
async fn which(
|
||||
&mut self,
|
||||
delegate: Resource<Arc<dyn LspAdapterDelegate>>,
|
||||
delegate: Resource<Arc<dyn WorktreeDelegate>>,
|
||||
binary_name: String,
|
||||
) -> wasmtime::Result<Option<String>> {
|
||||
let delegate = self.table.get(&delegate)?;
|
||||
Ok(delegate
|
||||
.which(binary_name.as_ref())
|
||||
.await
|
||||
.map(|path| path.to_string_lossy().to_string()))
|
||||
Ok(delegate.which(binary_name).await)
|
||||
}
|
||||
|
||||
fn drop(&mut self, _worktree: Resource<Worktree>) -> Result<()> {
|
||||
|
||||
@@ -11,25 +11,22 @@ workspace = true
|
||||
[lib]
|
||||
path = "src/extensions_ui.rs"
|
||||
|
||||
[features]
|
||||
test-support = []
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
assistant_slash_command.workspace = true
|
||||
async-trait.workspace = true
|
||||
client.workspace = true
|
||||
collections.workspace = true
|
||||
context_servers.workspace = true
|
||||
db.workspace = true
|
||||
editor.workspace = true
|
||||
extension.workspace = true
|
||||
extension_host.workspace = true
|
||||
fs.workspace = true
|
||||
futures.workspace = true
|
||||
fuzzy.workspace = true
|
||||
gpui.workspace = true
|
||||
indexed_docs.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
lsp.workspace = true
|
||||
num-format.workspace = true
|
||||
picker.workspace = true
|
||||
@@ -47,23 +44,7 @@ util.workspace = true
|
||||
vim.workspace = true
|
||||
wasmtime-wasi.workspace = true
|
||||
workspace.workspace = true
|
||||
zed_actions.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
async-compression.workspace = true
|
||||
async-tar.workspace = true
|
||||
ctor.workspace = true
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
env_logger.workspace = true
|
||||
extension_host = {workspace = true, features = ["test-support"] }
|
||||
fs = { workspace = true, features = ["test-support"] }
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
http_client.workspace = true
|
||||
indexed_docs.workspace = true
|
||||
language = { workspace = true, features = ["test-support"] }
|
||||
lsp.workspace = true
|
||||
node_runtime.workspace = true
|
||||
parking_lot.workspace = true
|
||||
project = { workspace = true, features = ["test-support"] }
|
||||
reqwest_client.workspace = true
|
||||
serde_json.workspace = true
|
||||
workspace = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -1,97 +0,0 @@
|
||||
use std::pin::Pin;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use async_trait::async_trait;
|
||||
use context_servers::manager::{NativeContextServer, ServerCommand, ServerConfig};
|
||||
use context_servers::protocol::InitializedContextServerProtocol;
|
||||
use context_servers::ContextServer;
|
||||
use extension_host::wasm_host::{ExtensionProject, WasmExtension, WasmHost};
|
||||
use futures::{Future, FutureExt};
|
||||
use gpui::{AsyncAppContext, Model};
|
||||
use project::Project;
|
||||
use wasmtime_wasi::WasiView as _;
|
||||
|
||||
pub struct ExtensionContextServer {
|
||||
#[allow(unused)]
|
||||
pub(crate) extension: WasmExtension,
|
||||
#[allow(unused)]
|
||||
pub(crate) host: Arc<WasmHost>,
|
||||
id: Arc<str>,
|
||||
context_server: Arc<NativeContextServer>,
|
||||
}
|
||||
|
||||
impl ExtensionContextServer {
|
||||
pub async fn new(
|
||||
extension: WasmExtension,
|
||||
host: Arc<WasmHost>,
|
||||
id: Arc<str>,
|
||||
project: Model<Project>,
|
||||
mut cx: AsyncAppContext,
|
||||
) -> Result<Self> {
|
||||
let extension_project = project.update(&mut cx, |project, cx| ExtensionProject {
|
||||
worktree_ids: project
|
||||
.visible_worktrees(cx)
|
||||
.map(|worktree| worktree.read(cx).id().to_proto())
|
||||
.collect(),
|
||||
})?;
|
||||
let command = extension
|
||||
.call({
|
||||
let id = id.clone();
|
||||
|extension, store| {
|
||||
async move {
|
||||
let project = store.data_mut().table().push(extension_project)?;
|
||||
let command = extension
|
||||
.call_context_server_command(store, id.clone(), project)
|
||||
.await?
|
||||
.map_err(|e| anyhow!("{}", e))?;
|
||||
anyhow::Ok(command)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
|
||||
let config = Arc::new(ServerConfig {
|
||||
settings: None,
|
||||
command: Some(ServerCommand {
|
||||
path: command.command,
|
||||
args: command.args,
|
||||
env: Some(command.env.into_iter().collect()),
|
||||
}),
|
||||
});
|
||||
|
||||
anyhow::Ok(Self {
|
||||
extension,
|
||||
host,
|
||||
id: id.clone(),
|
||||
context_server: Arc::new(NativeContextServer::new(id, config)),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait(?Send)]
|
||||
impl ContextServer for ExtensionContextServer {
|
||||
fn id(&self) -> Arc<str> {
|
||||
self.id.clone()
|
||||
}
|
||||
|
||||
fn config(&self) -> Arc<ServerConfig> {
|
||||
self.context_server.config()
|
||||
}
|
||||
|
||||
fn client(&self) -> Option<Arc<InitializedContextServerProtocol>> {
|
||||
self.context_server.client()
|
||||
}
|
||||
|
||||
fn start<'a>(
|
||||
self: Arc<Self>,
|
||||
cx: &'a AsyncAppContext,
|
||||
) -> Pin<Box<dyn 'a + Future<Output = Result<()>>>> {
|
||||
self.context_server.clone().start(cx)
|
||||
}
|
||||
|
||||
fn stop(&self) -> Result<()> {
|
||||
self.context_server.stop()
|
||||
}
|
||||
}
|
||||
@@ -1,79 +0,0 @@
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use async_trait::async_trait;
|
||||
use futures::FutureExt;
|
||||
use indexed_docs::{IndexedDocsDatabase, IndexedDocsProvider, PackageName, ProviderId};
|
||||
use wasmtime_wasi::WasiView;
|
||||
|
||||
use extension_host::wasm_host::{WasmExtension, WasmHost};
|
||||
|
||||
pub struct ExtensionIndexedDocsProvider {
|
||||
pub(crate) extension: WasmExtension,
|
||||
pub(crate) host: Arc<WasmHost>,
|
||||
pub(crate) id: ProviderId,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl IndexedDocsProvider for ExtensionIndexedDocsProvider {
|
||||
fn id(&self) -> ProviderId {
|
||||
self.id.clone()
|
||||
}
|
||||
|
||||
fn database_path(&self) -> PathBuf {
|
||||
let mut database_path = self.host.work_dir.clone();
|
||||
database_path.push(self.extension.manifest.id.as_ref());
|
||||
database_path.push("docs");
|
||||
database_path.push(format!("{}.0.mdb", self.id));
|
||||
|
||||
database_path
|
||||
}
|
||||
|
||||
async fn suggest_packages(&self) -> Result<Vec<PackageName>> {
|
||||
self.extension
|
||||
.call({
|
||||
let id = self.id.clone();
|
||||
|extension, store| {
|
||||
async move {
|
||||
let packages = extension
|
||||
.call_suggest_docs_packages(store, id.as_ref())
|
||||
.await?
|
||||
.map_err(|err| anyhow!("{err:?}"))?;
|
||||
|
||||
Ok(packages
|
||||
.into_iter()
|
||||
.map(|package| PackageName::from(package.as_str()))
|
||||
.collect())
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
async fn index(&self, package: PackageName, database: Arc<IndexedDocsDatabase>) -> Result<()> {
|
||||
self.extension
|
||||
.call({
|
||||
let id = self.id.clone();
|
||||
|extension, store| {
|
||||
async move {
|
||||
let database_resource = store.data_mut().table().push(database as _)?;
|
||||
extension
|
||||
.call_index_docs(
|
||||
store,
|
||||
id.as_ref(),
|
||||
package.as_ref(),
|
||||
database_resource,
|
||||
)
|
||||
.await?
|
||||
.map_err(|err| anyhow!("{err:?}"))?;
|
||||
|
||||
anyhow::Ok(())
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.await
|
||||
}
|
||||
}
|
||||
@@ -1,19 +1,21 @@
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
|
||||
use anyhow::Result;
|
||||
use assistant_slash_command::SlashCommandRegistry;
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::{ExtensionSlashCommand, SlashCommandRegistry};
|
||||
use context_servers::manager::ServerCommand;
|
||||
use context_servers::ContextServerFactoryRegistry;
|
||||
use db::smol::future::FutureExt as _;
|
||||
use extension::Extension;
|
||||
use extension_host::wasm_host::ExtensionProject;
|
||||
use extension_host::{extension_lsp_adapter::ExtensionLspAdapter, wasm_host};
|
||||
use fs::Fs;
|
||||
use gpui::{AppContext, BackgroundExecutor, Task};
|
||||
use indexed_docs::{IndexedDocsRegistry, ProviderId};
|
||||
use gpui::{AppContext, BackgroundExecutor, Model, Task};
|
||||
use indexed_docs::{ExtensionIndexedDocsProvider, IndexedDocsRegistry, ProviderId};
|
||||
use language::{LanguageRegistry, LanguageServerBinaryStatus, LoadedLanguage};
|
||||
use snippet_provider::SnippetRegistry;
|
||||
use theme::{ThemeRegistry, ThemeSettings};
|
||||
use ui::SharedString;
|
||||
|
||||
use crate::extension_context_server::ExtensionContextServer;
|
||||
use crate::{extension_indexed_docs_provider, extension_slash_command::ExtensionSlashCommand};
|
||||
use wasmtime_wasi::WasiView as _;
|
||||
|
||||
pub struct ConcreteExtensionRegistrationHooks {
|
||||
slash_command_registry: Arc<SlashCommandRegistry>,
|
||||
@@ -21,7 +23,7 @@ pub struct ConcreteExtensionRegistrationHooks {
|
||||
indexed_docs_registry: Arc<IndexedDocsRegistry>,
|
||||
snippet_registry: Arc<SnippetRegistry>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
context_server_factory_registry: Arc<ContextServerFactoryRegistry>,
|
||||
context_server_factory_registry: Model<ContextServerFactoryRegistry>,
|
||||
executor: BackgroundExecutor,
|
||||
}
|
||||
|
||||
@@ -32,7 +34,7 @@ impl ConcreteExtensionRegistrationHooks {
|
||||
indexed_docs_registry: Arc<IndexedDocsRegistry>,
|
||||
snippet_registry: Arc<SnippetRegistry>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
context_server_factory_registry: Arc<ContextServerFactoryRegistry>,
|
||||
context_server_factory_registry: Model<ContextServerFactoryRegistry>,
|
||||
cx: &AppContext,
|
||||
) -> Arc<dyn extension_host::ExtensionRegistrationHooks> {
|
||||
Arc::new(Self {
|
||||
@@ -60,58 +62,85 @@ impl extension_host::ExtensionRegistrationHooks for ConcreteExtensionRegistratio
|
||||
|
||||
fn register_slash_command(
|
||||
&self,
|
||||
command: wasm_host::SlashCommand,
|
||||
extension: wasm_host::WasmExtension,
|
||||
host: Arc<wasm_host::WasmHost>,
|
||||
extension: Arc<dyn Extension>,
|
||||
command: extension::SlashCommand,
|
||||
) {
|
||||
self.slash_command_registry.register_command(
|
||||
ExtensionSlashCommand {
|
||||
command,
|
||||
extension,
|
||||
host,
|
||||
},
|
||||
false,
|
||||
)
|
||||
self.slash_command_registry
|
||||
.register_command(ExtensionSlashCommand::new(extension, command), false)
|
||||
}
|
||||
|
||||
fn register_context_server(
|
||||
&self,
|
||||
id: Arc<str>,
|
||||
extension: wasm_host::WasmExtension,
|
||||
host: Arc<wasm_host::WasmHost>,
|
||||
cx: &mut AppContext,
|
||||
) {
|
||||
self.context_server_factory_registry
|
||||
.register_server_factory(
|
||||
id.clone(),
|
||||
Arc::new({
|
||||
move |project, cx| {
|
||||
let id = id.clone();
|
||||
let extension = extension.clone();
|
||||
let host = host.clone();
|
||||
cx.spawn(|cx| async move {
|
||||
let context_server =
|
||||
ExtensionContextServer::new(extension, host, id, project, cx)
|
||||
.update(cx, |registry, _| {
|
||||
registry.register_server_factory(
|
||||
id.clone(),
|
||||
Arc::new({
|
||||
move |project, cx| {
|
||||
log::info!(
|
||||
"loading command for context server {id} from extension {}",
|
||||
extension.manifest.id
|
||||
);
|
||||
|
||||
let id = id.clone();
|
||||
let extension = extension.clone();
|
||||
cx.spawn(|mut cx| async move {
|
||||
let extension_project =
|
||||
project.update(&mut cx, |project, cx| ExtensionProject {
|
||||
worktree_ids: project
|
||||
.visible_worktrees(cx)
|
||||
.map(|worktree| worktree.read(cx).id().to_proto())
|
||||
.collect(),
|
||||
})?;
|
||||
|
||||
let command = extension
|
||||
.call({
|
||||
let id = id.clone();
|
||||
|extension, store| {
|
||||
async move {
|
||||
let project = store
|
||||
.data_mut()
|
||||
.table()
|
||||
.push(extension_project)?;
|
||||
let command = extension
|
||||
.call_context_server_command(
|
||||
store,
|
||||
id.clone(),
|
||||
project,
|
||||
)
|
||||
.await?
|
||||
.map_err(|e| anyhow!("{}", e))?;
|
||||
anyhow::Ok(command)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
anyhow::Ok(Arc::new(context_server) as _)
|
||||
})
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
log::info!("loaded command for context server {id}: {command:?}");
|
||||
|
||||
Ok(ServerCommand {
|
||||
path: command.command,
|
||||
args: command.args,
|
||||
env: Some(command.env.into_iter().collect()),
|
||||
})
|
||||
})
|
||||
}
|
||||
}),
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
fn register_docs_provider(
|
||||
&self,
|
||||
extension: wasm_host::WasmExtension,
|
||||
host: Arc<wasm_host::WasmHost>,
|
||||
provider_id: Arc<str>,
|
||||
) {
|
||||
self.indexed_docs_registry.register_provider(Box::new(
|
||||
extension_indexed_docs_provider::ExtensionIndexedDocsProvider {
|
||||
fn register_docs_provider(&self, extension: Arc<dyn Extension>, provider_id: Arc<str>) {
|
||||
self.indexed_docs_registry
|
||||
.register_provider(Box::new(ExtensionIndexedDocsProvider::new(
|
||||
extension,
|
||||
host,
|
||||
id: ProviderId(provider_id),
|
||||
},
|
||||
));
|
||||
ProviderId(provider_id),
|
||||
)));
|
||||
}
|
||||
|
||||
fn register_snippets(&self, path: &PathBuf, snippet_contents: &str) -> Result<()> {
|
||||
|
||||
@@ -1,135 +0,0 @@
|
||||
use std::sync::{atomic::AtomicBool, Arc};
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use assistant_slash_command::{
|
||||
ArgumentCompletion, SlashCommand, SlashCommandOutput, SlashCommandOutputSection,
|
||||
SlashCommandResult,
|
||||
};
|
||||
use futures::FutureExt as _;
|
||||
use gpui::{Task, WeakView, WindowContext};
|
||||
use language::{BufferSnapshot, LspAdapterDelegate};
|
||||
use ui::prelude::*;
|
||||
use wasmtime_wasi::WasiView;
|
||||
use workspace::Workspace;
|
||||
|
||||
use extension_host::wasm_host::{WasmExtension, WasmHost};
|
||||
|
||||
pub struct ExtensionSlashCommand {
|
||||
pub(crate) extension: WasmExtension,
|
||||
#[allow(unused)]
|
||||
pub(crate) host: Arc<WasmHost>,
|
||||
pub(crate) command: extension_host::wasm_host::SlashCommand,
|
||||
}
|
||||
|
||||
impl SlashCommand for ExtensionSlashCommand {
|
||||
fn name(&self) -> String {
|
||||
self.command.name.clone()
|
||||
}
|
||||
|
||||
fn description(&self) -> String {
|
||||
self.command.description.clone()
|
||||
}
|
||||
|
||||
fn menu_text(&self) -> String {
|
||||
self.command.tooltip_text.clone()
|
||||
}
|
||||
|
||||
fn requires_argument(&self) -> bool {
|
||||
self.command.requires_argument
|
||||
}
|
||||
|
||||
fn complete_argument(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
_cancel: Arc<AtomicBool>,
|
||||
_workspace: Option<WeakView<Workspace>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<Result<Vec<ArgumentCompletion>>> {
|
||||
let arguments = arguments.to_owned();
|
||||
cx.background_executor().spawn(async move {
|
||||
self.extension
|
||||
.call({
|
||||
let this = self.clone();
|
||||
move |extension, store| {
|
||||
async move {
|
||||
let completions = extension
|
||||
.call_complete_slash_command_argument(
|
||||
store,
|
||||
&this.command,
|
||||
&arguments,
|
||||
)
|
||||
.await?
|
||||
.map_err(|e| anyhow!("{}", e))?;
|
||||
|
||||
anyhow::Ok(
|
||||
completions
|
||||
.into_iter()
|
||||
.map(|completion| ArgumentCompletion {
|
||||
label: completion.label.into(),
|
||||
new_text: completion.new_text,
|
||||
replace_previous_arguments: false,
|
||||
after_completion: completion.run_command.into(),
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.await
|
||||
})
|
||||
}
|
||||
|
||||
fn run(
|
||||
self: Arc<Self>,
|
||||
arguments: &[String],
|
||||
_context_slash_command_output_sections: &[SlashCommandOutputSection<language::Anchor>],
|
||||
_context_buffer: BufferSnapshot,
|
||||
_workspace: WeakView<Workspace>,
|
||||
delegate: Option<Arc<dyn LspAdapterDelegate>>,
|
||||
cx: &mut WindowContext,
|
||||
) -> Task<SlashCommandResult> {
|
||||
let arguments = arguments.to_owned();
|
||||
let output = cx.background_executor().spawn(async move {
|
||||
self.extension
|
||||
.call({
|
||||
let this = self.clone();
|
||||
move |extension, store| {
|
||||
async move {
|
||||
let resource = if let Some(delegate) = delegate {
|
||||
Some(store.data_mut().table().push(delegate)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let output = extension
|
||||
.call_run_slash_command(store, &this.command, &arguments, resource)
|
||||
.await?
|
||||
.map_err(|e| anyhow!("{}", e))?;
|
||||
|
||||
anyhow::Ok(output)
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
})
|
||||
.await
|
||||
});
|
||||
cx.foreground_executor().spawn(async move {
|
||||
let output = output.await?;
|
||||
Ok(SlashCommandOutput {
|
||||
text: output.text,
|
||||
sections: output
|
||||
.sections
|
||||
.into_iter()
|
||||
.map(|section| SlashCommandOutputSection {
|
||||
range: section.range.into(),
|
||||
icon: IconName::Code,
|
||||
label: section.label.into(),
|
||||
metadata: None,
|
||||
})
|
||||
.collect(),
|
||||
run_commands_in_text: false,
|
||||
}
|
||||
.to_event_stream())
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,14 +1,8 @@
|
||||
mod components;
|
||||
mod extension_context_server;
|
||||
mod extension_indexed_docs_provider;
|
||||
mod extension_registration_hooks;
|
||||
mod extension_slash_command;
|
||||
mod extension_suggest;
|
||||
mod extension_version_selector;
|
||||
|
||||
#[cfg(test)]
|
||||
mod extension_store_test;
|
||||
|
||||
pub use extension_registration_hooks::ConcreteExtensionRegistrationHooks;
|
||||
|
||||
use std::ops::DerefMut;
|
||||
@@ -44,12 +38,12 @@ use crate::extension_version_selector::{
|
||||
ExtensionVersionSelector, ExtensionVersionSelectorDelegate,
|
||||
};
|
||||
|
||||
actions!(zed, [Extensions, InstallDevExtension]);
|
||||
actions!(zed, [InstallDevExtension]);
|
||||
|
||||
pub fn init(cx: &mut AppContext) {
|
||||
cx.observe_new_views(move |workspace: &mut Workspace, cx| {
|
||||
workspace
|
||||
.register_action(move |workspace, _: &Extensions, cx| {
|
||||
.register_action(move |workspace, _: &zed_actions::Extensions, cx| {
|
||||
let existing = workspace
|
||||
.active_pane()
|
||||
.read(cx)
|
||||
|
||||
@@ -10,13 +10,13 @@ pub use open_path_prompt::OpenPathDelegate;
|
||||
|
||||
use collections::HashMap;
|
||||
use editor::{scroll::Autoscroll, Bias, Editor};
|
||||
use file_finder_settings::FileFinderSettings;
|
||||
use file_finder_settings::{FileFinderSettings, FileFinderWidth};
|
||||
use file_icons::FileIcons;
|
||||
use fuzzy::{CharBag, PathMatch, PathMatchCandidate};
|
||||
use gpui::{
|
||||
actions, rems, Action, AnyElement, AppContext, DismissEvent, EventEmitter, FocusHandle,
|
||||
FocusableView, Model, Modifiers, ModifiersChangedEvent, ParentElement, Render, Styled, Task,
|
||||
View, ViewContext, VisualContext, WeakView,
|
||||
actions, Action, AnyElement, AppContext, DismissEvent, EventEmitter, FocusHandle,
|
||||
FocusableView, KeyContext, Model, Modifiers, ModifiersChangedEvent, ParentElement, Render,
|
||||
Styled, Task, View, ViewContext, VisualContext, WeakView,
|
||||
};
|
||||
use new_path_prompt::NewPathPrompt;
|
||||
use open_path_prompt::OpenPathPrompt;
|
||||
@@ -32,16 +32,30 @@ use std::{
|
||||
},
|
||||
};
|
||||
use text::Point;
|
||||
use ui::{prelude::*, HighlightedLabel, ListItem, ListItemSpacing};
|
||||
use ui::{
|
||||
prelude::*, ContextMenu, HighlightedLabel, KeyBinding, ListItem, ListItemSpacing, PopoverMenu,
|
||||
PopoverMenuHandle,
|
||||
};
|
||||
use util::{paths::PathWithPosition, post_inc, ResultExt};
|
||||
use workspace::{item::PreviewTabsSettings, notifications::NotifyResultExt, ModalView, Workspace};
|
||||
use workspace::{
|
||||
item::PreviewTabsSettings, notifications::NotifyResultExt, pane, ModalView, SplitDirection,
|
||||
Workspace,
|
||||
};
|
||||
|
||||
actions!(file_finder, [SelectPrev]);
|
||||
actions!(file_finder, [SelectPrev, OpenMenu]);
|
||||
|
||||
impl ModalView for FileFinder {}
|
||||
impl ModalView for FileFinder {
|
||||
fn on_before_dismiss(&mut self, cx: &mut ViewContext<Self>) -> workspace::DismissDecision {
|
||||
let submenu_focused = self.picker.update(cx, |picker, cx| {
|
||||
picker.delegate.popover_menu_handle.is_focused(cx)
|
||||
});
|
||||
workspace::DismissDecision::Dismiss(!submenu_focused)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FileFinder {
|
||||
picker: View<Picker<FileFinderDelegate>>,
|
||||
picker_focus_handle: FocusHandle,
|
||||
init_modifiers: Option<Modifiers>,
|
||||
}
|
||||
|
||||
@@ -142,8 +156,14 @@ impl FileFinder {
|
||||
}
|
||||
|
||||
fn new(delegate: FileFinderDelegate, cx: &mut ViewContext<Self>) -> Self {
|
||||
let picker = cx.new_view(|cx| Picker::uniform_list(delegate, cx));
|
||||
let picker_focus_handle = picker.focus_handle(cx);
|
||||
picker.update(cx, |picker, _| {
|
||||
picker.delegate.focus_handle = picker_focus_handle.clone();
|
||||
});
|
||||
Self {
|
||||
picker: cx.new_view(|cx| Picker::uniform_list(delegate, cx)),
|
||||
picker,
|
||||
picker_focus_handle,
|
||||
init_modifiers: cx.modifiers().modified().then_some(cx.modifiers()),
|
||||
}
|
||||
}
|
||||
@@ -168,23 +188,105 @@ impl FileFinder {
|
||||
self.init_modifiers = Some(cx.modifiers());
|
||||
cx.dispatch_action(Box::new(menu::SelectPrev));
|
||||
}
|
||||
|
||||
fn handle_open_menu(&mut self, _: &OpenMenu, cx: &mut ViewContext<Self>) {
|
||||
self.picker.update(cx, |picker, cx| {
|
||||
let menu_handle = &picker.delegate.popover_menu_handle;
|
||||
if !menu_handle.is_deployed() {
|
||||
menu_handle.show(cx);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn go_to_file_split_left(&mut self, _: &pane::SplitLeft, cx: &mut ViewContext<Self>) {
|
||||
self.go_to_file_split_inner(SplitDirection::Left, cx)
|
||||
}
|
||||
|
||||
fn go_to_file_split_right(&mut self, _: &pane::SplitRight, cx: &mut ViewContext<Self>) {
|
||||
self.go_to_file_split_inner(SplitDirection::Right, cx)
|
||||
}
|
||||
|
||||
fn go_to_file_split_up(&mut self, _: &pane::SplitUp, cx: &mut ViewContext<Self>) {
|
||||
self.go_to_file_split_inner(SplitDirection::Up, cx)
|
||||
}
|
||||
|
||||
fn go_to_file_split_down(&mut self, _: &pane::SplitDown, cx: &mut ViewContext<Self>) {
|
||||
self.go_to_file_split_inner(SplitDirection::Down, cx)
|
||||
}
|
||||
|
||||
fn go_to_file_split_inner(
|
||||
&mut self,
|
||||
split_direction: SplitDirection,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) {
|
||||
self.picker.update(cx, |picker, cx| {
|
||||
let delegate = &mut picker.delegate;
|
||||
if let Some(workspace) = delegate.workspace.upgrade() {
|
||||
if let Some(m) = delegate.matches.get(delegate.selected_index()) {
|
||||
let path = match &m {
|
||||
Match::History { path, .. } => {
|
||||
let worktree_id = path.project.worktree_id;
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: Arc::clone(&path.project.path),
|
||||
}
|
||||
}
|
||||
Match::Search(m) => ProjectPath {
|
||||
worktree_id: WorktreeId::from_usize(m.0.worktree_id),
|
||||
path: m.0.path.clone(),
|
||||
},
|
||||
};
|
||||
let open_task = workspace.update(cx, move |workspace, cx| {
|
||||
workspace.split_path_preview(path, false, Some(split_direction), cx)
|
||||
});
|
||||
open_task.detach_and_log_err(cx);
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn modal_max_width(
|
||||
width_setting: Option<FileFinderWidth>,
|
||||
cx: &mut ViewContext<Self>,
|
||||
) -> Pixels {
|
||||
let window_width = cx.viewport_size().width;
|
||||
let small_width = Pixels(545.);
|
||||
|
||||
match width_setting {
|
||||
None | Some(FileFinderWidth::Small) => small_width,
|
||||
Some(FileFinderWidth::Full) => window_width,
|
||||
Some(FileFinderWidth::XLarge) => (window_width - Pixels(512.)).max(small_width),
|
||||
Some(FileFinderWidth::Large) => (window_width - Pixels(768.)).max(small_width),
|
||||
Some(FileFinderWidth::Medium) => (window_width - Pixels(1024.)).max(small_width),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<DismissEvent> for FileFinder {}
|
||||
|
||||
impl FocusableView for FileFinder {
|
||||
fn focus_handle(&self, cx: &AppContext) -> FocusHandle {
|
||||
self.picker.focus_handle(cx)
|
||||
fn focus_handle(&self, _: &AppContext) -> FocusHandle {
|
||||
self.picker_focus_handle.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for FileFinder {
|
||||
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
let key_context = self.picker.read(cx).delegate.key_context(cx);
|
||||
|
||||
let file_finder_settings = FileFinderSettings::get_global(cx);
|
||||
let modal_max_width = Self::modal_max_width(file_finder_settings.modal_max_width, cx);
|
||||
|
||||
v_flex()
|
||||
.key_context("FileFinder")
|
||||
.w(rems(34.))
|
||||
.key_context(key_context)
|
||||
.w(modal_max_width)
|
||||
.on_modifiers_changed(cx.listener(Self::handle_modifiers_changed))
|
||||
.on_action(cx.listener(Self::handle_select_prev))
|
||||
.on_action(cx.listener(Self::handle_open_menu))
|
||||
.on_action(cx.listener(Self::go_to_file_split_left))
|
||||
.on_action(cx.listener(Self::go_to_file_split_right))
|
||||
.on_action(cx.listener(Self::go_to_file_split_up))
|
||||
.on_action(cx.listener(Self::go_to_file_split_down))
|
||||
.child(self.picker.clone())
|
||||
}
|
||||
}
|
||||
@@ -205,6 +307,8 @@ pub struct FileFinderDelegate {
|
||||
history_items: Vec<FoundPath>,
|
||||
separate_history: bool,
|
||||
first_update: bool,
|
||||
popover_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
focus_handle: FocusHandle,
|
||||
}
|
||||
|
||||
/// Use a custom ordering for file finder: the regular one
|
||||
@@ -533,6 +637,8 @@ impl FileFinderDelegate {
|
||||
history_items,
|
||||
separate_history,
|
||||
first_update: true,
|
||||
popover_menu_handle: PopoverMenuHandle::default(),
|
||||
focus_handle: cx.focus_handle(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -845,6 +951,15 @@ impl FileFinderDelegate {
|
||||
|
||||
0
|
||||
}
|
||||
|
||||
fn key_context(&self, cx: &WindowContext) -> KeyContext {
|
||||
let mut key_context = KeyContext::new_with_defaults();
|
||||
key_context.add("FileFinder");
|
||||
if self.popover_menu_handle.is_focused(cx) {
|
||||
key_context.add("menu_open");
|
||||
}
|
||||
key_context
|
||||
}
|
||||
}
|
||||
|
||||
impl PickerDelegate for FileFinderDelegate {
|
||||
@@ -958,7 +1073,7 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
let allow_preview =
|
||||
PreviewTabsSettings::get_global(cx).enable_preview_from_file_finder;
|
||||
if secondary {
|
||||
workspace.split_path_preview(project_path, allow_preview, cx)
|
||||
workspace.split_path_preview(project_path, allow_preview, None, cx)
|
||||
} else {
|
||||
workspace.open_path_preview(
|
||||
project_path,
|
||||
@@ -1125,6 +1240,49 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_footer(&self, cx: &mut ViewContext<Picker<Self>>) -> Option<AnyElement> {
|
||||
Some(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.p_2()
|
||||
.gap_2()
|
||||
.justify_end()
|
||||
.border_t_1()
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
.child(
|
||||
Button::new("open-selection", "Open")
|
||||
.key_binding(KeyBinding::for_action(&menu::Confirm, cx))
|
||||
.on_click(|_, cx| cx.dispatch_action(menu::Confirm.boxed_clone())),
|
||||
)
|
||||
.child(
|
||||
PopoverMenu::new("menu-popover")
|
||||
.with_handle(self.popover_menu_handle.clone())
|
||||
.attach(gpui::AnchorCorner::TopRight)
|
||||
.anchor(gpui::AnchorCorner::BottomRight)
|
||||
.trigger(
|
||||
Button::new("actions-trigger", "Split Options")
|
||||
.selected_label_color(Color::Accent)
|
||||
.key_binding(KeyBinding::for_action_in(
|
||||
&OpenMenu,
|
||||
&self.focus_handle,
|
||||
cx,
|
||||
)),
|
||||
)
|
||||
.menu({
|
||||
move |cx| {
|
||||
Some(ContextMenu::build(cx, move |menu, _| {
|
||||
menu.action("Split Left", pane::SplitLeft.boxed_clone())
|
||||
.action("Split Right", pane::SplitRight.boxed_clone())
|
||||
.action("Split Up", pane::SplitUp.boxed_clone())
|
||||
.action("Split Down", pane::SplitDown.boxed_clone())
|
||||
}))
|
||||
}
|
||||
}),
|
||||
)
|
||||
.into_any(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -6,6 +6,7 @@ use settings::{Settings, SettingsSources};
|
||||
#[derive(Deserialize, Debug, Clone, Copy, PartialEq)]
|
||||
pub struct FileFinderSettings {
|
||||
pub file_icons: bool,
|
||||
pub modal_max_width: Option<FileFinderWidth>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
@@ -14,6 +15,10 @@ pub struct FileFinderSettingsContent {
|
||||
///
|
||||
/// Default: true
|
||||
pub file_icons: Option<bool>,
|
||||
/// Determines how much space the file finder can take up in relation to the available window width.
|
||||
///
|
||||
/// Default: small
|
||||
pub modal_max_width: Option<FileFinderWidth>,
|
||||
}
|
||||
|
||||
impl Settings for FileFinderSettings {
|
||||
@@ -25,3 +30,14 @@ impl Settings for FileFinderSettings {
|
||||
sources.json_merge()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy, Default, Serialize, Deserialize, JsonSchema)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum FileFinderWidth {
|
||||
#[default]
|
||||
Small,
|
||||
Medium,
|
||||
Large,
|
||||
XLarge,
|
||||
Full,
|
||||
}
|
||||
|
||||
@@ -2,9 +2,8 @@ mod supported_countries;
|
||||
|
||||
use anyhow::{anyhow, Result};
|
||||
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt};
|
||||
use http_client::{AsyncBody, HttpClient, HttpRequestExt, Method, Request as HttpRequest};
|
||||
use http_client::{AsyncBody, HttpClient, Method, Request as HttpRequest};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::time::Duration;
|
||||
|
||||
pub use supported_countries::*;
|
||||
|
||||
@@ -15,7 +14,6 @@ pub async fn stream_generate_content(
|
||||
api_url: &str,
|
||||
api_key: &str,
|
||||
mut request: GenerateContentRequest,
|
||||
low_speed_timeout: Option<Duration>,
|
||||
) -> Result<BoxStream<'static, Result<GenerateContentResponse>>> {
|
||||
let uri = format!(
|
||||
"{api_url}/v1beta/models/{model}:streamGenerateContent?alt=sse&key={api_key}",
|
||||
@@ -23,15 +21,11 @@ pub async fn stream_generate_content(
|
||||
);
|
||||
request.model.clear();
|
||||
|
||||
let mut request_builder = HttpRequest::builder()
|
||||
let request_builder = HttpRequest::builder()
|
||||
.method(Method::POST)
|
||||
.uri(uri)
|
||||
.header("Content-Type", "application/json");
|
||||
|
||||
if let Some(low_speed_timeout) = low_speed_timeout {
|
||||
request_builder = request_builder.read_timeout(low_speed_timeout);
|
||||
};
|
||||
|
||||
let request = request_builder.body(AsyncBody::from(serde_json::to_string(&request)?))?;
|
||||
let mut response = client.send(request).await?;
|
||||
if response.status().is_success() {
|
||||
@@ -70,7 +64,6 @@ pub async fn count_tokens(
|
||||
api_url: &str,
|
||||
api_key: &str,
|
||||
request: CountTokensRequest,
|
||||
low_speed_timeout: Option<Duration>,
|
||||
) -> Result<CountTokensResponse> {
|
||||
let uri = format!(
|
||||
"{}/v1beta/models/gemini-pro:countTokens?key={}",
|
||||
@@ -78,15 +71,11 @@ pub async fn count_tokens(
|
||||
);
|
||||
let request = serde_json::to_string(&request)?;
|
||||
|
||||
let mut request_builder = HttpRequest::builder()
|
||||
let request_builder = HttpRequest::builder()
|
||||
.method(Method::POST)
|
||||
.uri(&uri)
|
||||
.header("Content-Type", "application/json");
|
||||
|
||||
if let Some(low_speed_timeout) = low_speed_timeout {
|
||||
request_builder = request_builder.read_timeout(low_speed_timeout);
|
||||
}
|
||||
|
||||
let http_request = request_builder.body(AsyncBody::from(request))?;
|
||||
let mut response = client.send(http_request).await?;
|
||||
let mut text = String::new();
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
use gpui::{
|
||||
div, img, prelude::*, App, AppContext, ImageSource, Render, ViewContext, WindowOptions,
|
||||
};
|
||||
use gpui::{div, img, prelude::*, App, AppContext, Render, ViewContext, WindowOptions};
|
||||
use std::path::PathBuf;
|
||||
|
||||
struct GifViewer {
|
||||
@@ -16,7 +14,7 @@ impl GifViewer {
|
||||
impl Render for GifViewer {
|
||||
fn render(&mut self, _cx: &mut ViewContext<Self>) -> impl IntoElement {
|
||||
div().size_full().child(
|
||||
img(ImageSource::File(self.gif_path.clone().into()))
|
||||
img(self.gif_path.clone())
|
||||
.size_full()
|
||||
.object_fit(gpui::ObjectFit::Contain)
|
||||
.id("gif"),
|
||||
|
||||
@@ -6,8 +6,8 @@
|
||||
<circle cx="240" cy="100" r="30" stroke="#dc2626" />
|
||||
<circle cx="380" cy="100" r="20" stroke="#d97706" />
|
||||
<circle cx="380" cy="240" r="30" stroke="#06b6d4" />
|
||||
<circle cx="100" cy="240" r="30" stroke="#3b82f6" />
|
||||
<circle cx="100" cy="240" r="30" stroke="#3b82f666" />
|
||||
<circle cx="240" cy="380" r="30" stroke="#7c3aed" />
|
||||
<circle cx="380" cy="380" r="20" stroke="#c026d3" />
|
||||
<circle cx="100" cy="380" r="20" stroke="#e11d48" />
|
||||
</svg>
|
||||
<circle cx="100" cy="380" r="20" stroke="#e11d4866" />
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 741 B After Width: | Height: | Size: 746 B |
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user