Compare commits

..

7 Commits

Author SHA1 Message Date
Marshall Bowers
9641ae0755 Remove basic.conf (#10120)
This PR removes the `basic.conf` file.

In #10099 we suppressed some typo warnings that had cropped up in this
file, but it turns out we don't need the file at all.

Release Notes:

- N/A
2024-04-03 12:32:47 -04:00
Kirill Bulatov
ce73ff9808 Avoid failing format test with current date (#10068)
Replace the test that tested with
`chrono::offset::Local::now().naive_local()` taken, failing the
formatting once per year at least.


Release Notes:

- N/A
2024-04-03 12:32:40 -04:00
Joseph T. Lyons
240db73199 v0.129.x stable 2024-04-03 12:11:10 -04:00
gcp-cherry-pick-bot[bot]
6b52917e75 Don't update active completion for editors that are not focused (cherry-pick #9904) (#9907)
Cherry-picked Don't update active completion for editors that are not
focused (#9904)

Release Notes:

- N/A

Co-authored-by: Antonio Scandurra <me@as-cii.com>
2024-03-28 10:52:33 +01:00
Marshall Bowers
f226a9932a zed 0.129.1 2024-03-27 13:50:53 -04:00
Marshall Bowers
a7915cb848 Look up extensions in the new index when reporting extension events (#9879)
This PR fixes a bug that was causing extension telemetry events to not
be reported.

We need to look up the extensions in the new index, as the extensions to
load won't be found in the old index.

Release Notes:

- N/A
2024-03-27 13:48:45 -04:00
Joseph T. Lyons
2d8288f076 v0.129.x preview 2024-03-27 10:52:55 -04:00
492 changed files with 7916 additions and 21617 deletions

View File

@@ -23,6 +23,12 @@ body:
description: Run the `copy system specs into clipboard` command palette action and paste the output in the field below.
validations:
required: true
- type: textarea
attributes:
label: If applicable, add mockups / screenshots to help explain present your vision of the feature
description: Drag issues into the text input below
validations:
required: false
- type: textarea
attributes:
label: If applicable, attach your `~/Library/Logs/Zed/Zed.log` file to this issue.

View File

@@ -54,9 +54,6 @@ jobs:
- name: Check unused dependencies
uses: bnjbvr/cargo-machete@main
- name: Check license generation
run: script/generate-licenses /tmp/zed_licenses_output
- name: Ensure fresh merge
shell: bash -euxo pipefail {0}
run: |

View File

@@ -9,10 +9,10 @@ jobs:
if: github.repository_owner == 'zed-industries'
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: "3.11"
python-version: "3.10.5"
architecture: "x64"
cache: "pip"
- run: pip install -r script/update_top_ranking_issues/requirements.txt
- run: python script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 5393
- run: python script/update_top_ranking_issues/main.py 5393 --github-token ${{ secrets.GITHUB_TOKEN }} --prod

View File

@@ -9,10 +9,10 @@ jobs:
if: github.repository_owner == 'zed-industries'
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: actions/setup-python@v4
with:
python-version: "3.11"
python-version: "3.10.5"
architecture: "x64"
cache: "pip"
- run: pip install -r script/update_top_ranking_issues/requirements.txt
- run: python script/update_top_ranking_issues/main.py --github-token ${{ secrets.GITHUB_TOKEN }} --issue-reference-number 6952 --query-day-interval 7
- run: python script/update_top_ranking_issues/main.py 6952 --github-token ${{ secrets.GITHUB_TOKEN }} --prod --query-day-interval 7

514
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,6 @@
[workspace]
members = [
"crates/activity_indicator",
"crates/anthropic",
"crates/assets",
"crates/assistant",
"crates/audio",
@@ -29,7 +28,6 @@ members = [
"crates/feature_flags",
"crates/feedback",
"crates/file_finder",
"crates/file_icons",
"crates/fs",
"crates/fsevent",
"crates/fuzzy",
@@ -72,7 +70,6 @@ members = [
"crates/task",
"crates/tasks_ui",
"crates/search",
"crates/semantic_version",
"crates/settings",
"crates/snippet",
"crates/sqlez",
@@ -80,7 +77,6 @@ members = [
"crates/story",
"crates/storybook",
"crates/sum_tree",
"crates/tab_switcher",
"crates/terminal",
"crates/terminal_view",
"crates/text",
@@ -100,21 +96,12 @@ members = [
"crates/zed_actions",
"extensions/astro",
"extensions/clojure",
"extensions/csharp",
"extensions/dart",
"extensions/emmet",
"extensions/erlang",
"extensions/gleam",
"extensions/haskell",
"extensions/html",
"extensions/php",
"extensions/prisma",
"extensions/purescript",
"extensions/svelte",
"extensions/toml",
"extensions/uiua",
"extensions/zig",
"tooling/xtask",
]
@@ -124,7 +111,6 @@ resolver = "2"
[workspace.dependencies]
activity_indicator = { path = "crates/activity_indicator" }
ai = { path = "crates/ai" }
anthropic = { path = "crates/anthropic" }
assets = { path = "crates/assets" }
assistant = { path = "crates/assistant" }
audio = { path = "crates/audio" }
@@ -152,7 +138,6 @@ extensions_ui = { path = "crates/extensions_ui" }
feature_flags = { path = "crates/feature_flags" }
feedback = { path = "crates/feedback" }
file_finder = { path = "crates/file_finder" }
file_icons = { path = "crates/file_icons" }
fs = { path = "crates/fs" }
fsevent = { path = "crates/fsevent" }
fuzzy = { path = "crates/fuzzy" }
@@ -196,7 +181,6 @@ rpc = { path = "crates/rpc" }
task = { path = "crates/task" }
tasks_ui = { path = "crates/tasks_ui" }
search = { path = "crates/search" }
semantic_version = { path = "crates/semantic_version" }
settings = { path = "crates/settings" }
snippet = { path = "crates/snippet" }
sqlez = { path = "crates/sqlez" }
@@ -204,7 +188,6 @@ sqlez_macros = { path = "crates/sqlez_macros" }
story = { path = "crates/story" }
storybook = { path = "crates/storybook" }
sum_tree = { path = "crates/sum_tree" }
tab_switcher = { path = "crates/tab_switcher" }
terminal = { path = "crates/terminal" }
terminal_view = { path = "crates/terminal_view" }
text = { path = "crates/text" }
@@ -223,16 +206,14 @@ zed = { path = "crates/zed" }
zed_actions = { path = "crates/zed_actions" }
anyhow = "1.0.57"
any_vec = "0.13"
async-compression = { version = "0.4", features = ["gzip", "futures-io"] }
async-fs = "1.6"
async-recursion = "1.0.0"
async-tar = "0.4.2"
async-trait = "0.1"
bitflags = "2.4.2"
# todo(linux): Remove these once https://github.com/kvark/blade/pull/107 is merged and we've upgraded our renderer
blade-graphics = { git = "https://github.com/zed-industries/blade", rev = "85981c0f4890a5fcd08da2a53cc4a0459247af44" }
blade-macros = { git = "https://github.com/zed-industries/blade", rev = "85981c0f4890a5fcd08da2a53cc4a0459247af44" }
blade-graphics = { git = "https://github.com/kvark/blade", rev = "61cbd6b2c224791d52b150fe535cee665cc91bb2" }
blade-macros = { git = "https://github.com/kvark/blade", rev = "61cbd6b2c224791d52b150fe535cee665cc91bb2" }
blade-rwh = { package = "raw-window-handle", version = "0.5" }
cap-std = "3.0"
chrono = { version = "0.4", features = ["serde"] }
@@ -296,8 +277,6 @@ tempfile = "3.9.0"
thiserror = "1.0.29"
tiktoken-rs = "0.5.7"
time = { version = "0.3", features = [
"macros",
"parsing",
"serde",
"serde-well-known",
"formatting",
@@ -308,11 +287,15 @@ tower-http = "0.4.4"
tree-sitter = { version = "0.20", features = ["wasm"] }
tree-sitter-bash = { git = "https://github.com/tree-sitter/tree-sitter-bash", rev = "7331995b19b8f8aba2d5e26deb51d2195c18bc94" }
tree-sitter-c = "0.20.1"
tree-sitter-clojure = { git = "https://github.com/prcastro/tree-sitter-clojure", branch = "update-ts" }
tree-sitter-c-sharp = { git = "https://github.com/tree-sitter/tree-sitter-c-sharp", rev = "dd5e59721a5f8dae34604060833902b882023aaf" }
tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev = "f44509141e7e483323d2ec178f2d2e6c0fc041c1" }
tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" }
tree-sitter-dart = { git = "https://github.com/agent3bood/tree-sitter-dart", rev = "48934e3bf757a9b78f17bdfaa3e2b4284656fdc7" }
tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "a2861e88a730287a60c11ea9299c033c7d076e30" }
tree-sitter-elm = { git = "https://github.com/elm-tooling/tree-sitter-elm", rev = "692c50c0b961364c40299e73c1306aecb5d20f40" }
tree-sitter-embedded-template = "0.20.0"
tree-sitter-erlang = "0.4.0"
tree-sitter-glsl = { git = "https://github.com/theHamsta/tree-sitter-glsl", rev = "2a56fb7bc8bb03a1892b4741279dd0a8758b7fb3" }
tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "aeb2f33b366fd78d5789ff104956ce23508b85db" }
tree-sitter-gomod = { git = "https://github.com/camdencheek/tree-sitter-go-mod" }
@@ -328,6 +311,7 @@ tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown",
tree-sitter-nix = { git = "https://github.com/nix-community/tree-sitter-nix", rev = "66e3e9ce9180ae08fc57372061006ef83f0abde7" }
tree-sitter-nu = { git = "https://github.com/nushell/tree-sitter-nu", rev = "7dd29f9616822e5fc259f5b4ae6c4ded9a71a132" }
tree-sitter-ocaml = { git = "https://github.com/tree-sitter/tree-sitter-ocaml", rev = "4abfdc1c7af2c6c77a370aee974627be1c285b3b" }
tree-sitter-php = "0.21.1"
tree-sitter-proto = { git = "https://github.com/rewinfrey/tree-sitter-proto", rev = "36d54f288aee112f13a67b550ad32634d0c2cb52" }
tree-sitter-python = "0.20.2"
tree-sitter-racket = { git = "https://github.com/zed-industries/tree-sitter-racket", rev = "eb010cf2c674c6fd9a6316a84e28ef90190fe51a" }
@@ -335,9 +319,11 @@ tree-sitter-regex = "0.20.0"
tree-sitter-ruby = "0.20.0"
tree-sitter-rust = "0.20.3"
tree-sitter-scheme = { git = "https://github.com/6cdh/tree-sitter-scheme", rev = "af0fd1fa452cb2562dc7b5c8a8c55551c39273b9" }
tree-sitter-toml = { git = "https://github.com/tree-sitter/tree-sitter-toml", rev = "342d9be207c2dba869b9967124c679b5e6fd0ebe" }
tree-sitter-typescript = { git = "https://github.com/tree-sitter/tree-sitter-typescript", rev = "5d20856f34315b068c41edaee2ac8a100081d259" }
tree-sitter-vue = { git = "https://github.com/zed-industries/tree-sitter-vue", rev = "6608d9d60c386f19d80af7d8132322fa11199c42" }
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "f545a41f57502e1b5ddf2a6668896c1b0620f930" }
tree-sitter-zig = { git = "https://github.com/maxxnino/tree-sitter-zig", rev = "0d08703e4c3f426ec61695d7617415fff97029bd" }
unindent = "0.1.7"
unicase = "2.6"
url = "2.2"
@@ -398,7 +384,6 @@ debug = "limited"
[profile.dev.package]
taffy = { opt-level = 3 }
cranelift-codegen = { opt-level = 3 }
resvg = { opt-level = 3 }
rustybuzz = { opt-level = 3 }
ttf-parser = { opt-level = 3 }
wasmtime-cranelift = { opt-level = 3 }

View File

@@ -16,9 +16,7 @@
"escape": "menu::Cancel",
"ctrl-escape": "menu::Cancel",
"ctrl-c": "menu::Cancel",
"shift-enter": "picker::UseSelectedQuery",
"alt-enter": ["picker::ConfirmInput", { "secondary": false }],
"ctrl-alt-enter": ["picker::ConfirmInput", { "secondary": true }],
"shift-enter": "menu::UseSelectedQuery",
"ctrl-shift-w": "workspace::CloseWindow",
"shift-escape": "workspace::ToggleZoom",
"ctrl-o": "workspace::Open",
@@ -28,7 +26,7 @@
"ctrl-0": "zed::ResetBufferFontSize",
"ctrl-,": "zed::OpenSettings",
"ctrl-q": "zed::Quit",
"alt-f9": "zed::Hide",
"ctrl-h": "zed::Hide",
"f11": "zed::ToggleFullScreen"
}
},
@@ -38,6 +36,7 @@
"escape": "editor::Cancel",
"backspace": "editor::Backspace",
"shift-backspace": "editor::Backspace",
"ctrl-h": "editor::Backspace",
"delete": "editor::Delete",
"ctrl-d": "editor::Delete",
"tab": "editor::Tab",
@@ -137,8 +136,7 @@
// ],
"ctrl-alt-space": "editor::ShowCharacterPalette",
"ctrl-;": "editor::ToggleLineNumbers",
"ctrl-k ctrl-r": "editor::RevertSelectedHunks",
"ctrl-alt-g b": "editor::ToggleGitBlame"
"ctrl-k ctrl-r": "editor::RevertSelectedHunks"
}
},
{
@@ -149,11 +147,10 @@
"ctrl-shift-enter": "editor::NewlineBelow",
"ctrl-enter": "editor::NewlineAbove",
"alt-z": "editor::ToggleSoftWrap",
"ctrl-f": "buffer_search::Deploy",
"ctrl-h": [
"ctrl-f": [
"buffer_search::Deploy",
{
"replace_enabled": true
"focus": true
}
],
// "cmd-e": [
@@ -212,16 +209,14 @@
"enter": "search::SelectNextMatch",
"shift-enter": "search::SelectPrevMatch",
"alt-enter": "search::SelectAllMatches",
"alt-tab": "search::CycleMode",
"ctrl-f": "search::FocusSearch",
"ctrl-h": "search::ToggleReplace"
"alt-tab": "search::CycleMode"
}
},
{
"context": "BufferSearchBar && in_replace",
"bindings": {
"enter": "search::ReplaceNext",
"ctrl-enter": "search::ReplaceAll"
"cmd-enter": "search::ReplaceAll"
}
},
{
@@ -236,7 +231,6 @@
"bindings": {
"escape": "project_search::ToggleFocus",
"alt-tab": "search::CycleMode",
"ctrl-shift-f": "search::FocusSearch",
"ctrl-shift-h": "search::ToggleReplace",
"alt-ctrl-g": "search::ActivateRegexMode",
"alt-ctrl-x": "search::ActivateTextMode"
@@ -261,7 +255,7 @@
"bindings": {
"escape": "project_search::ToggleFocus",
"alt-tab": "search::CycleMode",
"ctrl-shift-h": "search::ToggleReplace",
"cmd-shift-h": "search::ToggleReplace",
"alt-ctrl-g": "search::ActivateRegexMode",
"alt-ctrl-x": "search::ActivateTextMode"
}
@@ -269,7 +263,9 @@
{
"context": "Pane",
"bindings": {
"ctrl-shift-tab": "pane::ActivatePrevItem",
"ctrl-pageup": "pane::ActivatePrevItem",
"ctrl-tab": "pane::ActivateNextItem",
"ctrl-pagedown": "pane::ActivateNextItem",
"ctrl-w": "pane::CloseActiveItem",
"alt-ctrl-t": "pane::CloseInactiveItems",
@@ -307,10 +303,8 @@
}
],
"ctrl-alt-shift-down": "editor::DuplicateLine",
"ctrl-shift-left": "editor::SelectToPreviousWordStart",
"ctrl-shift-right": "editor::SelectToNextWordEnd",
"ctrl-shift-up": "editor::SelectLargerSyntaxNode", //todo(linux) tmp keybinding
"ctrl-shift-down": "editor::SelectSmallerSyntaxNode", //todo(linux) tmp keybinding
"ctrl-shift-right": "editor::SelectLargerSyntaxNode",
"ctrl-shift-left": "editor::SelectSmallerSyntaxNode",
"ctrl-d": [
"editor::SelectNext",
{
@@ -359,14 +353,14 @@
"ctrl-shift-]": "editor::UnfoldLines",
"ctrl-space": "editor::ShowCompletions",
"ctrl-.": "editor::ToggleCodeActions",
"alt-ctrl-r": "editor::RevealInFinder",
"alt-cmd-r": "editor::RevealInFinder",
"ctrl-alt-shift-c": "editor::DisplayCursorNames"
}
},
{
"context": "Editor && mode == full",
"bindings": {
"ctrl-shift-o": "outline::Toggle",
"cmd-shift-o": "outline::Toggle",
"ctrl-g": "go_to_line::Toggle"
}
},
@@ -422,18 +416,10 @@
"ctrl-j": "workspace::ToggleBottomDock",
"ctrl-alt-y": "workspace::CloseAllDocks",
"ctrl-shift-f": "pane::DeploySearch",
"ctrl-shift-h": [
"pane::DeploySearch",
{
"replace_enabled": true
}
],
"ctrl-k ctrl-s": "zed::OpenKeymap",
"ctrl-k ctrl-t": "theme_selector::Toggle",
"ctrl-shift-t": "project_symbols::Toggle",
"ctrl-t": "project_symbols::Toggle",
"ctrl-p": "file_finder::Toggle",
"ctrl-tab": "tab_switcher::Toggle",
"ctrl-shift-tab": ["tab_switcher::Toggle", { "select_last": true }],
"ctrl-e": "file_finder::Toggle",
"ctrl-shift-p": "command_palette::Toggle",
"ctrl-shift-m": "diagnostics::Deploy",
@@ -458,8 +444,6 @@
{
"context": "Editor",
"bindings": {
"ctrl-shift-k": "editor::DeleteLine",
"ctrl-shift-d": "editor::DuplicateLineDown",
"ctrl-j": "editor::JoinLines",
"ctrl-alt-backspace": "editor::DeleteToPreviousSubwordStart",
"ctrl-alt-h": "editor::DeleteToPreviousSubwordStart",
@@ -560,7 +544,7 @@
"delete": "project_panel::Delete",
"ctrl-backspace": ["project_panel::Delete", { "skip_prompt": true }],
"ctrl-delete": ["project_panel::Delete", { "skip_prompt": true }],
"alt-ctrl-r": "project_panel::RevealInFinder",
"alt-cmd-r": "project_panel::RevealInFinder",
"alt-shift-f": "project_panel::NewSearchInDirectory"
}
},
@@ -605,13 +589,6 @@
"context": "FileFinder",
"bindings": { "ctrl-shift-p": "file_finder::SelectPrev" }
},
{
"context": "TabSwitcher",
"bindings": {
"ctrl-shift-tab": "menu::SelectPrev",
"ctrl-backspace": "tab_switcher::CloseSelectedItem"
}
},
{
"context": "Terminal",
"bindings": {
@@ -624,12 +601,7 @@
"pagedown": ["terminal::SendKeystroke", "pagedown"],
"escape": ["terminal::SendKeystroke", "escape"],
"enter": ["terminal::SendKeystroke", "enter"],
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"],
// Some nice conveniences
"ctrl-backspace": ["terminal::SendText", "\u0015"],
"ctrl-right": ["terminal::SendText", "\u0005"],
"ctrl-left": ["terminal::SendText", "\u0001"]
"ctrl-c": ["terminal::SendKeystroke", "ctrl-c"]
}
}
]

View File

@@ -17,11 +17,8 @@
"cmd-enter": "menu::SecondaryConfirm",
"escape": "menu::Cancel",
"cmd-escape": "menu::Cancel",
"ctrl-escape": "menu::Cancel",
"ctrl-c": "menu::Cancel",
"shift-enter": "picker::UseSelectedQuery",
"alt-enter": ["picker::ConfirmInput", { "secondary": false }],
"cmd-alt-enter": ["picker::ConfirmInput", { "secondary": true }],
"shift-enter": "menu::UseSelectedQuery",
"cmd-shift-w": "workspace::CloseWindow",
"shift-escape": "workspace::ToggleZoom",
"cmd-o": "workspace::Open",
@@ -158,8 +155,7 @@
],
"ctrl-cmd-space": "editor::ShowCharacterPalette",
"cmd-;": "editor::ToggleLineNumbers",
"cmd-alt-z": "editor::RevertSelectedHunks",
"cmd-alt-g b": "editor::ToggleGitBlame"
"cmd-alt-z": "editor::RevertSelectedHunks"
}
},
{
@@ -170,11 +166,10 @@
"cmd-shift-enter": "editor::NewlineAbove",
"cmd-enter": "editor::NewlineBelow",
"alt-z": "editor::ToggleSoftWrap",
"cmd-f": "buffer_search::Deploy",
"cmd-alt-f": [
"cmd-f": [
"buffer_search::Deploy",
{
"replace_enabled": true
"focus": true
}
],
"cmd-e": [
@@ -233,9 +228,7 @@
"enter": "search::SelectNextMatch",
"shift-enter": "search::SelectPrevMatch",
"alt-enter": "search::SelectAllMatches",
"alt-tab": "search::CycleMode",
"cmd-f": "search::FocusSearch",
"cmd-alt-f": "search::ToggleReplace"
"alt-tab": "search::CycleMode"
}
},
{
@@ -257,7 +250,6 @@
"bindings": {
"escape": "project_search::ToggleFocus",
"alt-tab": "search::CycleMode",
"cmd-shift-f": "search::FocusSearch",
"cmd-shift-h": "search::ToggleReplace",
"alt-cmd-g": "search::ActivateRegexMode",
"alt-cmd-x": "search::ActivateTextMode"
@@ -325,8 +317,13 @@
"cmd-shift-k": "editor::DeleteLine",
"alt-up": "editor::MoveLineUp",
"alt-down": "editor::MoveLineDown",
"alt-shift-up": "editor::DuplicateLineUp",
"alt-shift-down": "editor::DuplicateLineDown",
"alt-shift-up": [
"editor::DuplicateLine",
{
"move_upwards": true
}
],
"alt-shift-down": "editor::DuplicateLine",
"ctrl-shift-right": "editor::SelectLargerSyntaxNode",
"ctrl-shift-left": "editor::SelectSmallerSyntaxNode",
"cmd-d": [
@@ -440,18 +437,10 @@
"cmd-j": "workspace::ToggleBottomDock",
"alt-cmd-y": "workspace::CloseAllDocks",
"cmd-shift-f": "pane::DeploySearch",
"cmd-shift-h": [
"pane::DeploySearch",
{
"replace_enabled": true
}
],
"cmd-k cmd-s": "zed::OpenKeymap",
"cmd-k cmd-t": "theme_selector::Toggle",
"cmd-t": "project_symbols::Toggle",
"cmd-p": "file_finder::Toggle",
"ctrl-tab": "tab_switcher::Toggle",
"ctrl-shift-tab": ["tab_switcher::Toggle", { "select_last": true }],
"cmd-shift-p": "command_palette::Toggle",
"cmd-shift-m": "diagnostics::Deploy",
"cmd-shift-e": "project_panel::ToggleFocus",
@@ -614,13 +603,6 @@
"context": "FileFinder",
"bindings": { "cmd-shift-p": "file_finder::SelectPrev" }
},
{
"context": "TabSwitcher",
"bindings": {
"ctrl-shift-tab": "menu::SelectPrev",
"ctrl-backspace": "tab_switcher::CloseSelectedItem"
}
},
{
"context": "Terminal",
"bindings": {

View File

@@ -11,7 +11,7 @@
"ctrl->": "zed::IncreaseBufferFontSize",
"ctrl-<": "zed::DecreaseBufferFontSize",
"ctrl-shift-j": "editor::JoinLines",
"cmd-d": "editor::DuplicateLineDown",
"cmd-d": "editor::DuplicateLine",
"cmd-backspace": "editor::DeleteLine",
"cmd-pagedown": "editor::MovePageDown",
"cmd-pageup": "editor::MovePageUp",

View File

@@ -9,7 +9,7 @@
"context": "Editor",
"bindings": {
"cmd-l": "go_to_line::Toggle",
"ctrl-shift-d": "editor::DuplicateLineDown",
"ctrl-shift-d": "editor::DuplicateLine",
"cmd-b": "editor::GoToDefinition",
"cmd-j": "editor::ScrollCursorCenter",
"cmd-enter": "editor::NewlineBelow",

View File

@@ -73,17 +73,8 @@
],
"g shift-e": ["vim::PreviousWordEnd", { "ignorePunctuation": true }],
"/": "vim::Search",
"?": [
"vim::Search",
{
"backwards": true
}
],
"*": "vim::MoveToNext",
"#": "vim::MoveToPrev",
"n": "vim::MoveToNextMatch",
"shift-n": "vim::MoveToPrevMatch",
"n": "search::SelectNextMatch",
"shift-n": "search::SelectPrevMatch",
"%": "vim::Matching",
"f": [
"vim::PushOperator",
@@ -146,10 +137,8 @@
"g d": "editor::GoToDefinition",
"g shift-d": "editor::GoToTypeDefinition",
"g x": "editor::OpenUrl",
"g n": "vim::SelectNextMatch",
"g shift-n": "vim::SelectPreviousMatch",
"g l": "vim::SelectNext",
"g shift-l": "vim::SelectPrevious",
"g n": "vim::SelectNext",
"g shift-n": "vim::SelectPrevious",
"g >": [
"editor::SelectNext",
{
@@ -360,6 +349,15 @@
],
"u": "editor::Undo",
"ctrl-r": "editor::Redo",
"/": "vim::Search",
"?": [
"vim::Search",
{
"backwards": true
}
],
"*": "vim::MoveToNext",
"#": "vim::MoveToPrev",
"r": ["vim::PushOperator", "Replace"],
"s": "vim::Substitute",
"shift-s": "vim::SubstituteLine",
@@ -384,46 +382,18 @@
"d": "editor::Rename" // zed specific
}
},
{
"context": "Editor && vim_mode == normal && vim_operator == c",
"bindings": {
"s": [
"vim::PushOperator",
{
"ChangeSurrounds": {}
}
]
}
},
{
"context": "Editor && vim_operator == d",
"bindings": {
"d": "vim::CurrentLine"
}
},
{
"context": "Editor && vim_mode == normal && vim_operator == d",
"bindings": {
"s": ["vim::PushOperator", "DeleteSurrounds"]
}
},
{
"context": "Editor && vim_operator == y",
"bindings": {
"y": "vim::CurrentLine"
}
},
{
"context": "Editor && vim_mode == normal && vim_operator == y",
"bindings": {
"s": [
"vim::PushOperator",
{
"AddSurrounds": {}
}
]
}
},
{
"context": "Editor && VimObject",
"bindings": {
@@ -576,12 +546,6 @@
"escape": "buffer_search::Dismiss"
}
},
{
"context": "EmptyPane || SharedScreen",
"bindings": {
":": "command_palette::Toggle"
}
},
{
// netrw compatibility
"context": "ProjectPanel && not_editing",

View File

@@ -48,8 +48,7 @@
// which gives the same size as all other panes.
"active_pane_magnification": 1.0,
// The key to use for adding multiple cursors
// Currently "alt" or "cmd_or_ctrl" (also aliased as
// "cmd" and "ctrl") are supported.
// Currently "alt" or "cmd" are supported.
"multi_cursor_modifier": "alt",
// Whether to enable vim modes and key bindings
"vim_mode": false,
@@ -70,7 +69,7 @@
// documentation when not included in original completion list.
"completion_documentation_secondary_query_debounce": 300,
// Whether to show wrap guides in the editor. Setting this to true will
// show a guide at the 'preferred_line_length' value if 'soft_wrap' is set to
// show a guide at the 'preferred_line_length' value if softwrap is set to
// 'preferred_line_length', and will show any additional guides as specified
// by the 'wrap_guides' setting.
"show_wrap_guides": true,
@@ -284,11 +283,6 @@
// 4. Save when idle for a certain amount of time:
// "autosave": { "after_delay": {"milliseconds": 500} },
"autosave": "off",
// Settings related to the editor's tab bar.
"tab_bar": {
// Whether or not to show the navigation history buttons.
"show_nav_history_buttons": true
},
// Settings related to the editor's tabs
"tabs": {
// Show git status colors in the editor tabs.
@@ -550,29 +544,56 @@
"file_types": {},
// Different settings for specific languages.
"languages": {
"C++": {
"format_on_save": "off"
"Plain Text": {
"soft_wrap": "preferred_line_length"
},
"C": {
"format_on_save": "off"
"Elixir": {
"tab_size": 2
},
"Gleam": {
"tab_size": 2
},
"Go": {
"tab_size": 4,
"hard_tabs": true,
"code_actions_on_format": {
"source.organizeImports": true
}
},
"Make": {
"hard_tabs": true
"Markdown": {
"tab_size": 2,
"soft_wrap": "preferred_line_length"
},
"JavaScript": {
"tab_size": 2
},
"Terraform": {
"tab_size": 2
},
"TypeScript": {
"tab_size": 2
},
"TSX": {
"tab_size": 2
},
"YAML": {
"tab_size": 2
},
"JSON": {
"tab_size": 2
},
"OCaml": {
"tab_size": 2
},
"OCaml Interface": {
"tab_size": 2
},
"Prisma": {
"tab_size": 2
}
},
// Zed's Prettier integration settings.
// If Prettier is enabled, Zed will use this for its Prettier instance for any applicable file, if
// If Prettier is enabled, Zed will use this its Prettier instance for any applicable file, if
// project has no other Prettier installed.
"prettier": {
// Use regular Prettier json configuration:
@@ -621,10 +642,5 @@
// Mostly useful for developers who are managing multiple instances of Zed.
"dev": {
// "theme": "Andromeda"
},
// Task-related settings.
"task": {
// Whether to show task status indicator in the status bar. Default: true
"show_status_indicator": true
}
}

View File

@@ -111,7 +111,7 @@
"hint": "#618399ff",
"hint.background": "#12231fff",
"hint.border": "#183934ff",
"ignored": "#6b6b73ff",
"ignored": "#aca8aeff",
"ignored.background": "#262933ff",
"ignored.border": "#2b2f38ff",
"info": "#10a793ff",

View File

@@ -111,7 +111,7 @@
"hint": "#706897ff",
"hint.background": "#161a35ff",
"hint.border": "#222953ff",
"ignored": "#756f7eff",
"ignored": "#898591ff",
"ignored.background": "#3a353fff",
"ignored.border": "#56505eff",
"info": "#566ddaff",
@@ -495,7 +495,7 @@
"hint": "#776d9dff",
"hint.background": "#e1e0f9ff",
"hint.border": "#c8c7f2ff",
"ignored": "#6e6876ff",
"ignored": "#5a5462ff",
"ignored.background": "#bfbcc5ff",
"ignored.border": "#8f8b96ff",
"info": "#586cdaff",
@@ -879,7 +879,7 @@
"hint": "#b17272ff",
"hint.background": "#171e38ff",
"hint.border": "#262f56ff",
"ignored": "#8f8b77ff",
"ignored": "#a4a08bff",
"ignored.background": "#45433bff",
"ignored.border": "#6c695cff",
"info": "#6684e0ff",
@@ -1263,7 +1263,7 @@
"hint": "#b37979ff",
"hint.background": "#e3e5faff",
"hint.border": "#cdd1f5ff",
"ignored": "#878471ff",
"ignored": "#706d5fff",
"ignored.background": "#cecab4ff",
"ignored.border": "#a8a48eff",
"info": "#6684dfff",
@@ -1647,7 +1647,7 @@
"hint": "#6f815aff",
"hint.background": "#142319ff",
"hint.border": "#1c3927ff",
"ignored": "#7d7c6aff",
"ignored": "#91907fff",
"ignored.background": "#424136ff",
"ignored.border": "#5d5c4cff",
"info": "#36a165ff",
@@ -2031,7 +2031,7 @@
"hint": "#758961ff",
"hint.background": "#d9ecdfff",
"hint.border": "#bbddc6ff",
"ignored": "#767463ff",
"ignored": "#61604fff",
"ignored.background": "#c5c4b9ff",
"ignored.border": "#969585ff",
"info": "#37a165ff",
@@ -2415,7 +2415,7 @@
"hint": "#a77087ff",
"hint.background": "#0f1c3dff",
"hint.border": "#182d5bff",
"ignored": "#8e8683ff",
"ignored": "#a79f9dff",
"ignored.background": "#443c39ff",
"ignored.border": "#665f5cff",
"info": "#407ee6ff",
@@ -2799,7 +2799,7 @@
"hint": "#a67287ff",
"hint.background": "#dfe3fbff",
"hint.border": "#c6cef7ff",
"ignored": "#837b78ff",
"ignored": "#6a6360ff",
"ignored.background": "#ccc7c5ff",
"ignored.border": "#aaa3a1ff",
"info": "#407ee6ff",
@@ -3183,7 +3183,7 @@
"hint": "#8d70a8ff",
"hint.background": "#0d1a43ff",
"hint.border": "#192961ff",
"ignored": "#908190ff",
"ignored": "#a899a8ff",
"ignored.background": "#433a43ff",
"ignored.border": "#675b67ff",
"info": "#5169ebff",
@@ -3567,7 +3567,7 @@
"hint": "#8c70a6ff",
"hint.background": "#e2dffcff",
"hint.border": "#cac7faff",
"ignored": "#857785ff",
"ignored": "#6b5e6bff",
"ignored.background": "#c6b8c6ff",
"ignored.border": "#ad9dadff",
"info": "#5169ebff",
@@ -3951,7 +3951,7 @@
"hint": "#52809aff",
"hint.background": "#121c24ff",
"hint.border": "#1a2f3cff",
"ignored": "#688c9dff",
"ignored": "#7c9fb3ff",
"ignored.background": "#33444dff",
"ignored.border": "#4f6a78ff",
"info": "#267eadff",
@@ -4335,7 +4335,7 @@
"hint": "#5a87a0ff",
"hint.background": "#d8e4eeff",
"hint.border": "#b9cee0ff",
"ignored": "#628496ff",
"ignored": "#526f7dff",
"ignored.background": "#a6cadcff",
"ignored.border": "#80a4b6ff",
"info": "#267eadff",
@@ -4719,7 +4719,7 @@
"hint": "#8a647aff",
"hint.background": "#1c1b29ff",
"hint.border": "#2c2b45ff",
"ignored": "#756e6eff",
"ignored": "#898383ff",
"ignored.background": "#3b3535ff",
"ignored.border": "#564e4eff",
"info": "#7272caff",
@@ -5103,7 +5103,7 @@
"hint": "#91697fff",
"hint.background": "#e4e1f5ff",
"hint.border": "#cecaecff",
"ignored": "#6e6666ff",
"ignored": "#5a5252ff",
"ignored.background": "#c1bbbbff",
"ignored.border": "#8e8989ff",
"info": "#7272caff",
@@ -5487,7 +5487,7 @@
"hint": "#607e76ff",
"hint.background": "#151e20ff",
"hint.border": "#1f3233ff",
"ignored": "#6f7e74ff",
"ignored": "#859188ff",
"ignored.background": "#353f39ff",
"ignored.border": "#505e55ff",
"info": "#468b8fff",
@@ -5871,7 +5871,7 @@
"hint": "#66847cff",
"hint.background": "#dae7e8ff",
"hint.border": "#bed4d6ff",
"ignored": "#68766dff",
"ignored": "#546259ff",
"ignored.background": "#bcc5bfff",
"ignored.border": "#8b968eff",
"info": "#488b90ff",
@@ -6255,7 +6255,7 @@
"hint": "#008b9fff",
"hint.background": "#051949ff",
"hint.border": "#102667ff",
"ignored": "#778f77ff",
"ignored": "#8ba48bff",
"ignored.background": "#3b453bff",
"ignored.border": "#5c6c5cff",
"info": "#3e62f4ff",
@@ -6639,7 +6639,7 @@
"hint": "#008fa1ff",
"hint.background": "#e1ddfeff",
"hint.border": "#c9c4fdff",
"ignored": "#718771ff",
"ignored": "#5f705fff",
"ignored.background": "#b4ceb4ff",
"ignored.border": "#8ea88eff",
"info": "#3e61f4ff",
@@ -7023,7 +7023,7 @@
"hint": "#6c81a5ff",
"hint.background": "#161f2bff",
"hint.border": "#203348ff",
"ignored": "#7e849eff",
"ignored": "#959bb2ff",
"ignored.background": "#3e4769ff",
"ignored.border": "#5b6385ff",
"info": "#3e8ed0ff",
@@ -7407,7 +7407,7 @@
"hint": "#7087b2ff",
"hint.background": "#dde7f6ff",
"hint.border": "#c2d5efff",
"ignored": "#767d9aff",
"ignored": "#5f6789ff",
"ignored.background": "#c1c5d8ff",
"ignored.border": "#9a9fb6ff",
"info": "#3e8fd0ff",

View File

@@ -111,7 +111,7 @@
"hint": "#628b80ff",
"hint.background": "#0d2f4eff",
"hint.border": "#1b4a6eff",
"ignored": "#696a6aff",
"ignored": "#8a8986ff",
"ignored.background": "#313337ff",
"ignored.border": "#3f4043ff",
"info": "#5ac1feff",
@@ -480,7 +480,7 @@
"hint": "#8ca7c2ff",
"hint.background": "#deebfaff",
"hint.border": "#c4daf6ff",
"ignored": "#a9acaeff",
"ignored": "#8b8e92ff",
"ignored.background": "#dcdddeff",
"ignored.border": "#cfd1d2ff",
"info": "#3b9ee5ff",
@@ -849,7 +849,7 @@
"hint": "#7399a3ff",
"hint.background": "#123950ff",
"hint.border": "#24556fff",
"ignored": "#7b7d7fff",
"ignored": "#9a9a98ff",
"ignored.background": "#464a52ff",
"ignored.border": "#53565dff",
"info": "#72cffeff",

View File

@@ -111,7 +111,7 @@
"hint": "#8c957dff",
"hint.background": "#1e2321ff",
"hint.border": "#303a36ff",
"ignored": "#998b78ff",
"ignored": "#c5b597ff",
"ignored.background": "#4c4642ff",
"ignored.border": "#5b534dff",
"info": "#83a598ff",
@@ -485,7 +485,7 @@
"hint": "#6a695bff",
"hint.background": "#1e2321ff",
"hint.border": "#303a36ff",
"ignored": "#998b78ff",
"ignored": "#c5b597ff",
"ignored.background": "#4c4642ff",
"ignored.border": "#5b534dff",
"info": "#83a598ff",
@@ -859,7 +859,7 @@
"hint": "#8c957dff",
"hint.background": "#1e2321ff",
"hint.border": "#303a36ff",
"ignored": "#998b78ff",
"ignored": "#c5b597ff",
"ignored.background": "#4c4642ff",
"ignored.border": "#5b534dff",
"info": "#83a598ff",
@@ -1233,7 +1233,7 @@
"hint": "#677562ff",
"hint.background": "#d2dee2ff",
"hint.border": "#adc5ccff",
"ignored": "#897b6eff",
"ignored": "#5f5650ff",
"ignored.background": "#d9c8a4ff",
"ignored.border": "#c8b899ff",
"info": "#0b6678ff",
@@ -1607,7 +1607,7 @@
"hint": "#677562ff",
"hint.background": "#d2dee2ff",
"hint.border": "#adc5ccff",
"ignored": "#897b6eff",
"ignored": "#5f5650ff",
"ignored.background": "#d9c8a4ff",
"ignored.border": "#c8b899ff",
"info": "#0b6678ff",
@@ -1981,7 +1981,7 @@
"hint": "#677562ff",
"hint.background": "#d2dee2ff",
"hint.border": "#adc5ccff",
"ignored": "#897b6eff",
"ignored": "#5f5650ff",
"ignored.background": "#d9c8a4ff",
"ignored.border": "#c8b899ff",
"info": "#0b6678ff",

View File

@@ -111,7 +111,7 @@
"hint": "#5a6f89ff",
"hint.background": "#18243dff",
"hint.border": "#293b5bff",
"ignored": "#555a63ff",
"ignored": "#838994ff",
"ignored.background": "#3b414dff",
"ignored.border": "#464b57ff",
"info": "#74ade8ff",
@@ -485,7 +485,7 @@
"hint": "#9294beff",
"hint.background": "#e2e2faff",
"hint.border": "#cbcdf6ff",
"ignored": "#a1a1a3ff",
"ignored": "#7e8087ff",
"ignored.background": "#dcdcddff",
"ignored.border": "#c9c9caff",
"info": "#5c78e2ff",

View File

@@ -111,7 +111,7 @@
"hint": "#5e768cff",
"hint.background": "#2f3639ff",
"hint.border": "#435255ff",
"ignored": "#2f2b43ff",
"ignored": "#74708dff",
"ignored.background": "#292738ff",
"ignored.border": "#423f55ff",
"info": "#9bced6ff",
@@ -490,7 +490,7 @@
"hint": "#7a92aaff",
"hint.background": "#dde9ebff",
"hint.border": "#c3d7dbff",
"ignored": "#938fa3ff",
"ignored": "#706c8cff",
"ignored.background": "#dcd8d8ff",
"ignored.border": "#dcd6d5ff",
"info": "#57949fff",
@@ -869,7 +869,7 @@
"hint": "#728aa2ff",
"hint.background": "#2f3639ff",
"hint.border": "#435255ff",
"ignored": "#605d7aff",
"ignored": "#85819eff",
"ignored.background": "#38354eff",
"ignored.border": "#504c68ff",
"info": "#9bced6ff",

View File

@@ -111,7 +111,7 @@
"hint": "#727d68ff",
"hint.background": "#171e1eff",
"hint.border": "#223131ff",
"ignored": "#827568ff",
"ignored": "#a69782ff",
"ignored.background": "#333944ff",
"ignored.border": "#3d4350ff",
"info": "#518b8bff",

View File

@@ -111,7 +111,7 @@
"hint": "#4f8297ff",
"hint.background": "#141f2cff",
"hint.border": "#1b3149ff",
"ignored": "#6f8389ff",
"ignored": "#93a1a1ff",
"ignored.background": "#073743ff",
"ignored.border": "#2b4e58ff",
"info": "#278ad1ff",
@@ -480,7 +480,7 @@
"hint": "#5789a3ff",
"hint.background": "#dbe6f6ff",
"hint.border": "#bfd3efff",
"ignored": "#6a7f86ff",
"ignored": "#34555eff",
"ignored.background": "#cfd0c4ff",
"ignored.border": "#9faaa8ff",
"info": "#288bd1ff",

View File

@@ -111,7 +111,7 @@
"hint": "#246e61ff",
"hint.background": "#0e2242ff",
"hint.border": "#193760ff",
"ignored": "#4c4735ff",
"ignored": "#736e55ff",
"ignored.background": "#2a261cff",
"ignored.border": "#302c21ff",
"info": "#499befff",

View File

@@ -16,7 +16,6 @@ doctest = false
anyhow.workspace = true
auto_update.workspace = true
editor.workspace = true
extension.workspace = true
futures.workspace = true
gpui.workspace = true
language.workspace = true

View File

@@ -1,6 +1,5 @@
use auto_update::{AutoUpdateStatus, AutoUpdater, DismissErrorMessage};
use editor::Editor;
use extension::ExtensionStore;
use futures::StreamExt;
use gpui::{
actions, svg, AppContext, CursorStyle, EventEmitter, InteractiveElement as _, Model,
@@ -289,18 +288,6 @@ impl ActivityIndicator {
};
}
if let Some(extension_store) =
ExtensionStore::try_global(cx).map(|extension_store| extension_store.read(cx))
{
if let Some(extension_id) = extension_store.outstanding_operations().keys().next() {
return Content {
icon: Some(DOWNLOAD_ICON),
message: format!("Updating {extension_id} extension…"),
on_click: None,
};
}
}
Default::default()
}
}

View File

@@ -1,22 +0,0 @@
[package]
name = "anthropic"
version = "0.1.0"
edition = "2021"
publish = false
license = "AGPL-3.0-or-later"
[lib]
path = "src/anthropic.rs"
[dependencies]
anyhow.workspace = true
futures.workspace = true
serde.workspace = true
serde_json.workspace = true
util.workspace = true
[dev-dependencies]
tokio.workspace = true
[lints]
workspace = true

View File

@@ -1 +0,0 @@
../../LICENSE-AGPL

View File

@@ -1,234 +0,0 @@
use anyhow::{anyhow, Result};
use futures::{io::BufReader, stream::BoxStream, AsyncBufReadExt, AsyncReadExt, StreamExt};
use serde::{Deserialize, Serialize};
use std::convert::TryFrom;
use util::http::{AsyncBody, HttpClient, Method, Request as HttpRequest};
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
pub enum Model {
#[default]
#[serde(rename = "claude-3-opus-20240229")]
Claude3Opus,
#[serde(rename = "claude-3-sonnet-20240229")]
Claude3Sonnet,
#[serde(rename = "claude-3-haiku-20240307")]
Claude3Haiku,
}
impl Model {
pub fn from_id(id: &str) -> Result<Self> {
if id.starts_with("claude-3-opus") {
Ok(Self::Claude3Opus)
} else if id.starts_with("claude-3-sonnet") {
Ok(Self::Claude3Sonnet)
} else if id.starts_with("claude-3-haiku") {
Ok(Self::Claude3Haiku)
} else {
Err(anyhow!("Invalid model id: {}", id))
}
}
pub fn display_name(&self) -> &'static str {
match self {
Self::Claude3Opus => "Claude 3 Opus",
Self::Claude3Sonnet => "Claude 3 Sonnet",
Self::Claude3Haiku => "Claude 3 Haiku",
}
}
pub fn max_token_count(&self) -> usize {
200_000
}
}
#[derive(Clone, Copy, Serialize, Deserialize, Debug, Eq, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum Role {
User,
Assistant,
}
impl TryFrom<String> for Role {
type Error = anyhow::Error;
fn try_from(value: String) -> Result<Self> {
match value.as_str() {
"user" => Ok(Self::User),
"assistant" => Ok(Self::Assistant),
_ => Err(anyhow!("invalid role '{value}'")),
}
}
}
impl From<Role> for String {
fn from(val: Role) -> Self {
match val {
Role::User => "user".to_owned(),
Role::Assistant => "assistant".to_owned(),
}
}
}
#[derive(Debug, Serialize)]
pub struct Request {
pub model: Model,
pub messages: Vec<RequestMessage>,
pub stream: bool,
pub system: String,
pub max_tokens: u32,
}
#[derive(Serialize, Deserialize, Debug, Eq, PartialEq)]
pub struct RequestMessage {
pub role: Role,
pub content: String,
}
#[derive(Deserialize, Debug)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum ResponseEvent {
MessageStart {
message: ResponseMessage,
},
ContentBlockStart {
index: u32,
content_block: ContentBlock,
},
Ping {},
ContentBlockDelta {
index: u32,
delta: TextDelta,
},
ContentBlockStop {
index: u32,
},
MessageDelta {
delta: ResponseMessage,
usage: Usage,
},
MessageStop {},
}
#[derive(Deserialize, Debug)]
pub struct ResponseMessage {
#[serde(rename = "type")]
pub message_type: Option<String>,
pub id: Option<String>,
pub role: Option<String>,
pub content: Option<Vec<String>>,
pub model: Option<String>,
pub stop_reason: Option<String>,
pub stop_sequence: Option<String>,
pub usage: Option<Usage>,
}
#[derive(Deserialize, Debug)]
pub struct Usage {
pub input_tokens: Option<u32>,
pub output_tokens: Option<u32>,
}
#[derive(Deserialize, Debug)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum ContentBlock {
Text { text: String },
}
#[derive(Deserialize, Debug)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum TextDelta {
TextDelta { text: String },
}
pub async fn stream_completion(
client: &dyn HttpClient,
api_url: &str,
api_key: &str,
request: Request,
) -> Result<BoxStream<'static, Result<ResponseEvent>>> {
let uri = format!("{api_url}/v1/messages");
let request = HttpRequest::builder()
.method(Method::POST)
.uri(uri)
.header("Anthropic-Version", "2023-06-01")
.header("Anthropic-Beta", "messages-2023-12-15")
.header("X-Api-Key", api_key)
.header("Content-Type", "application/json")
.body(AsyncBody::from(serde_json::to_string(&request)?))?;
let mut response = client.send(request).await?;
if response.status().is_success() {
let reader = BufReader::new(response.into_body());
Ok(reader
.lines()
.filter_map(|line| async move {
match line {
Ok(line) => {
let line = line.strip_prefix("data: ")?;
match serde_json::from_str(line) {
Ok(response) => Some(Ok(response)),
Err(error) => Some(Err(anyhow!(error))),
}
}
Err(error) => Some(Err(anyhow!(error))),
}
})
.boxed())
} else {
let mut body = Vec::new();
response.body_mut().read_to_end(&mut body).await?;
let body_str = std::str::from_utf8(&body)?;
match serde_json::from_str::<ResponseEvent>(body_str) {
Ok(_) => Err(anyhow!(
"Unexpected success response while expecting an error: {}",
body_str,
)),
Err(_) => Err(anyhow!(
"Failed to connect to API: {} {}",
response.status(),
body_str,
)),
}
}
}
// #[cfg(test)]
// mod tests {
// use super::*;
// use util::http::IsahcHttpClient;
// #[tokio::test]
// async fn stream_completion_success() {
// let http_client = IsahcHttpClient::new().unwrap();
// let request = Request {
// model: Model::Claude3Opus,
// messages: vec![RequestMessage {
// role: Role::User,
// content: "Ping".to_string(),
// }],
// stream: true,
// system: "Respond to ping with pong".to_string(),
// max_tokens: 4096,
// };
// let stream = stream_completion(
// &http_client,
// "https://api.anthropic.com",
// &std::env::var("ANTHROPIC_API_KEY").expect("ANTHROPIC_API_KEY not set"),
// request,
// )
// .await
// .unwrap();
// stream
// .for_each(|event| async {
// match event {
// Ok(event) => println!("{:?}", event),
// Err(e) => eprintln!("Error: {:?}", e),
// }
// })
// .await;
// }
// }

View File

@@ -16,7 +16,6 @@ client.workspace = true
collections.workspace = true
command_palette_hooks.workspace = true
editor.workspace = true
file_icons.workspace = true
fs.workspace = true
futures.workspace = true
gpui.workspace = true

View File

@@ -6,8 +6,6 @@ mod prompts;
mod saved_conversation;
mod streaming_diff;
mod embedded_scope;
pub use assistant_panel::AssistantPanel;
use assistant_settings::{AssistantSettings, OpenAiModel, ZedDotDevModel};
use chrono::{DateTime, Local};

View File

@@ -1,14 +1,13 @@
use crate::{
assistant_settings::{AssistantDockPosition, AssistantSettings, ZedDotDevModel},
codegen::{self, Codegen, CodegenKind},
embedded_scope::EmbeddedScope,
prompts::generate_content_prompt,
Assist, CompletionProvider, CycleMessageRole, InlineAssist, LanguageModel,
LanguageModelRequest, LanguageModelRequestMessage, MessageId, MessageMetadata, MessageStatus,
NewConversation, QuoteSelection, ResetKey, Role, SavedConversation, SavedConversationMetadata,
SavedMessage, Split, ToggleFocus, ToggleIncludeConversation,
};
use anyhow::{anyhow, Result};
use anyhow::Result;
use chrono::{DateTime, Local};
use collections::{hash_map, HashMap, HashSet, VecDeque};
use editor::{
@@ -17,10 +16,9 @@ use editor::{
BlockContext, BlockDisposition, BlockId, BlockProperties, BlockStyle, ToDisplayPoint,
},
scroll::{Autoscroll, AutoscrollStrategy},
Anchor, Editor, EditorElement, EditorEvent, EditorStyle, MultiBuffer, MultiBufferSnapshot,
ToOffset as _, ToPoint,
Anchor, Editor, EditorElement, EditorEvent, EditorStyle, MultiBufferSnapshot, ToOffset as _,
ToPoint,
};
use file_icons::FileIcons;
use fs::Fs;
use futures::StreamExt;
use gpui::{
@@ -49,7 +47,7 @@ use uuid::Uuid;
use workspace::{
dock::{DockPosition, Panel, PanelEvent},
searchable::Direction,
Event as WorkspaceEvent, Save, Toast, ToggleZoom, Toolbar, Workspace,
Save, Toast, ToggleZoom, Toolbar, Workspace,
};
pub fn init(cx: &mut AppContext) {
@@ -162,11 +160,6 @@ impl AssistantPanel {
];
let model = CompletionProvider::global(cx).default_model();
cx.observe_global::<FileIcons>(|_, cx| {
cx.notify();
})
.detach();
Self {
workspace: workspace_handle,
active_conversation_editor: None,
@@ -345,7 +338,7 @@ impl AssistantPanel {
style: BlockStyle::Flex,
position: snapshot.anchor_before(point_selection.head()),
height: 2,
render: Box::new({
render: Arc::new({
let inline_assistant = inline_assistant.clone();
move |cx: &mut BlockContext| {
*measurements.lock() = BlockMeasurements {
@@ -695,7 +688,7 @@ impl AssistantPanel {
editor.clear_background_highlights::<PendingInlineAssist>(cx);
} else {
editor.highlight_background::<PendingInlineAssist>(
&background_ranges,
background_ranges,
|theme| theme.editor_active_line_background, // todo!("use the appropriate color")
cx,
);
@@ -716,20 +709,18 @@ impl AssistantPanel {
});
}
fn new_conversation(&mut self, cx: &mut ViewContext<Self>) -> Option<View<ConversationEditor>> {
let workspace = self.workspace.upgrade()?;
fn new_conversation(&mut self, cx: &mut ViewContext<Self>) -> View<ConversationEditor> {
let editor = cx.new_view(|cx| {
ConversationEditor::new(
self.model.clone(),
self.languages.clone(),
self.fs.clone(),
workspace,
self.workspace.clone(),
cx,
)
});
self.show_conversation(editor.clone(), cx);
Some(editor)
editor
}
fn show_conversation(
@@ -768,18 +759,15 @@ impl AssistantPanel {
open_ai::Model::FourTurbo => open_ai::Model::ThreePointFiveTurbo,
}),
LanguageModel::ZedDotDev(model) => LanguageModel::ZedDotDev(match &model {
ZedDotDevModel::Gpt3Point5Turbo => ZedDotDevModel::Gpt4,
ZedDotDevModel::Gpt4 => ZedDotDevModel::Gpt4Turbo,
ZedDotDevModel::Gpt4Turbo => ZedDotDevModel::Claude3Opus,
ZedDotDevModel::Claude3Opus => ZedDotDevModel::Claude3Sonnet,
ZedDotDevModel::Claude3Sonnet => ZedDotDevModel::Claude3Haiku,
ZedDotDevModel::Claude3Haiku => {
ZedDotDevModel::GptThreePointFiveTurbo => ZedDotDevModel::GptFour,
ZedDotDevModel::GptFour => ZedDotDevModel::GptFourTurbo,
ZedDotDevModel::GptFourTurbo => {
match CompletionProvider::global(cx).default_model() {
LanguageModel::ZedDotDev(custom) => custom,
_ => ZedDotDevModel::Gpt3Point5Turbo,
_ => ZedDotDevModel::GptThreePointFiveTurbo,
}
}
ZedDotDevModel::Custom(_) => ZedDotDevModel::Gpt3Point5Turbo,
ZedDotDevModel::Custom(_) => ZedDotDevModel::GptThreePointFiveTurbo,
}),
};
@@ -1001,15 +989,11 @@ impl AssistantPanel {
.await?;
this.update(&mut cx, |this, cx| {
let workspace = workspace
.upgrade()
.ok_or_else(|| anyhow!("workspace dropped"))?;
let editor = cx.new_view(|cx| {
ConversationEditor::for_conversation(conversation, fs, workspace, cx)
});
this.show_conversation(editor, cx);
anyhow::Ok(())
})??;
})?;
Ok(())
})
}
@@ -1280,10 +1264,9 @@ struct Summary {
done: bool,
}
pub struct Conversation {
struct Conversation {
id: Option<String>,
buffer: Model<Buffer>,
embedded_scope: EmbeddedScope,
message_anchors: Vec<MessageAnchor>,
messages_metadata: HashMap<MessageId, MessageMetadata>,
next_message_id: MessageId,
@@ -1305,7 +1288,6 @@ impl Conversation {
fn new(
model: LanguageModel,
language_registry: Arc<LanguageRegistry>,
embedded_scope: EmbeddedScope,
cx: &mut ModelContext<Self>,
) -> Self {
let markdown = language_registry.language_for_name("Markdown");
@@ -1339,9 +1321,7 @@ impl Conversation {
pending_save: Task::ready(Ok(())),
path: None,
buffer,
embedded_scope,
};
let message = MessageAnchor {
id: MessageId(post_inc(&mut this.next_message_id.0)),
start: language::Anchor::MIN,
@@ -1442,7 +1422,6 @@ impl Conversation {
pending_save: Task::ready(Ok(())),
path: Some(path),
buffer,
embedded_scope: EmbeddedScope::new(),
};
this.count_remaining_tokens(cx);
this
@@ -1461,7 +1440,7 @@ impl Conversation {
}
}
pub(crate) fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
let request = self.to_completion_request(cx);
self.pending_token_count = cx.spawn(|this, mut cx| {
async move {
@@ -1624,7 +1603,7 @@ impl Conversation {
}
fn to_completion_request(&self, cx: &mut ModelContext<Conversation>) -> LanguageModelRequest {
let mut request = LanguageModelRequest {
let request = LanguageModelRequest {
model: self.model.clone(),
messages: self
.messages(cx)
@@ -1634,9 +1613,6 @@ impl Conversation {
stop: vec![],
temperature: 1.0,
};
let context_message = self.embedded_scope.message(cx);
request.messages.extend(context_message);
request
}
@@ -2026,18 +2002,17 @@ impl ConversationEditor {
model: LanguageModel,
language_registry: Arc<LanguageRegistry>,
fs: Arc<dyn Fs>,
workspace: View<Workspace>,
workspace: WeakView<Workspace>,
cx: &mut ViewContext<Self>,
) -> Self {
let conversation = cx
.new_model(|cx| Conversation::new(model, language_registry, EmbeddedScope::new(), cx));
let conversation = cx.new_model(|cx| Conversation::new(model, language_registry, cx));
Self::for_conversation(conversation, fs, workspace, cx)
}
fn for_conversation(
conversation: Model<Conversation>,
fs: Arc<dyn Fs>,
workspace: View<Workspace>,
workspace: WeakView<Workspace>,
cx: &mut ViewContext<Self>,
) -> Self {
let editor = cx.new_view(|cx| {
@@ -2052,7 +2027,6 @@ impl ConversationEditor {
cx.observe(&conversation, |_, _, cx| cx.notify()),
cx.subscribe(&conversation, Self::handle_conversation_event),
cx.subscribe(&editor, Self::handle_editor_event),
cx.subscribe(&workspace, Self::handle_workspace_event),
];
let mut this = Self {
@@ -2061,10 +2035,9 @@ impl ConversationEditor {
blocks: Default::default(),
scroll_position: None,
fs,
workspace: workspace.downgrade(),
workspace,
_subscriptions,
};
this.update_active_buffer(workspace, cx);
this.update_message_headers(cx);
this
}
@@ -2198,37 +2171,6 @@ impl ConversationEditor {
}
}
fn handle_workspace_event(
&mut self,
workspace: View<Workspace>,
event: &WorkspaceEvent,
cx: &mut ViewContext<Self>,
) {
if let WorkspaceEvent::ActiveItemChanged = event {
self.update_active_buffer(workspace, cx);
}
}
fn update_active_buffer(
&mut self,
workspace: View<Workspace>,
cx: &mut ViewContext<'_, ConversationEditor>,
) {
let active_buffer = workspace
.read(cx)
.active_item(cx)
.and_then(|item| Some(item.act_as::<Editor>(cx)?.read(cx).buffer().clone()));
self.conversation.update(cx, |conversation, cx| {
conversation
.embedded_scope
.set_active_buffer(active_buffer.clone(), cx);
conversation.count_remaining_tokens(cx);
cx.notify();
});
}
fn cursor_scroll_position(&self, cx: &mut ViewContext<Self>) -> Option<ScrollPosition> {
self.editor.update(cx, |editor, cx| {
let snapshot = editor.snapshot(cx);
@@ -2266,7 +2208,7 @@ impl ConversationEditor {
.unwrap(),
height: 2,
style: BlockStyle::Sticky,
render: Box::new({
render: Arc::new({
let conversation = self.conversation.clone();
move |_cx| {
let message_id = message.id;
@@ -2362,11 +2304,11 @@ impl ConversationEditor {
let start_language = buffer.language_at(range.start);
let end_language = buffer.language_at(range.end);
let language_name = if start_language == end_language {
start_language.map(|language| language.code_fence_block_name())
start_language.map(|language| language.name())
} else {
None
};
let language_name = language_name.as_deref().unwrap_or("");
let language_name = language_name.as_deref().unwrap_or("").to_lowercase();
let selected_text = buffer.text_for_range(range).collect::<String>();
let text = if selected_text.is_empty() {
@@ -2390,17 +2332,15 @@ impl ConversationEditor {
if let Some(text) = text {
panel.update(cx, |panel, cx| {
if let Some(conversation) = panel
let conversation = panel
.active_conversation_editor()
.cloned()
.or_else(|| panel.new_conversation(cx))
{
conversation.update(cx, |conversation, cx| {
conversation
.editor
.update(cx, |editor, cx| editor.insert(&text, cx))
});
};
.unwrap_or_else(|| panel.new_conversation(cx));
conversation.update(cx, |conversation, cx| {
conversation
.editor
.update(cx, |editor, cx| editor.insert(&text, cx))
});
});
}
}
@@ -2465,120 +2405,12 @@ impl ConversationEditor {
.map(|summary| summary.text.clone())
.unwrap_or_else(|| "New Conversation".into())
}
fn render_embedded_scope(&self, cx: &mut ViewContext<Self>) -> Option<impl Element> {
let active_buffer = self
.conversation
.read(cx)
.embedded_scope
.active_buffer()?
.clone();
Some(
div()
.p_4()
.v_flex()
.child(
div()
.h_flex()
.items_center()
.child(Icon::new(IconName::File))
.child(
div()
.h_6()
.child(Label::new("File Contexts"))
.ml_1()
.font_weight(FontWeight::SEMIBOLD),
),
)
.child(
div()
.ml_4()
.child(self.render_active_buffer(active_buffer, cx)),
),
)
}
fn render_active_buffer(
&self,
buffer: Model<MultiBuffer>,
cx: &mut ViewContext<Self>,
) -> impl Element {
let buffer = buffer.read(cx);
let icon_path;
let path;
if let Some(singleton) = buffer.as_singleton() {
let singleton = singleton.read(cx);
path = singleton.file().map(|file| file.full_path(cx));
icon_path = path
.as_ref()
.and_then(|path| FileIcons::get_icon(path.as_path(), cx))
.map(SharedString::from)
.unwrap_or_else(|| SharedString::from("icons/file_icons/file.svg"));
} else {
icon_path = SharedString::from("icons/file_icons/file.svg");
path = None;
}
let file_name = path.map_or("Untitled".to_string(), |path| {
path.to_string_lossy().to_string()
});
let enabled = self
.conversation
.read(cx)
.embedded_scope
.active_buffer_enabled();
let file_name_text_color = if enabled {
Color::Default
} else {
Color::Disabled
};
div()
.id("active-buffer")
.h_flex()
.cursor_pointer()
.child(Icon::from_path(icon_path).color(file_name_text_color))
.child(
div()
.h_6()
.child(Label::new(file_name).color(file_name_text_color))
.ml_1(),
)
.children(enabled.then(|| {
div()
.child(Icon::new(IconName::Check).color(file_name_text_color))
.ml_1()
}))
.on_click(cx.listener(move |this, _, cx| {
this.conversation.update(cx, |conversation, cx| {
conversation
.embedded_scope
.set_active_buffer_enabled(!enabled);
cx.notify();
})
}))
}
}
impl EventEmitter<ConversationEditorEvent> for ConversationEditor {}
impl Render for ConversationEditor {
fn render(&mut self, cx: &mut ViewContext<Self>) -> impl Element {
//
// The ConversationEditor has two main segments
//
// 1. Messages Editor
// 2. Context
// - File Context (currently only the active file)
// - Project Diagnostics (Planned)
// - Deep Code Context (Planned, for query and other tools for the model)
//
div()
.key_context("ConversationEditor")
.capture_action(cx.listener(ConversationEditor::cancel_last_assist))
@@ -2588,15 +2420,14 @@ impl Render for ConversationEditor {
.on_action(cx.listener(ConversationEditor::assist))
.on_action(cx.listener(ConversationEditor::split))
.size_full()
.v_flex()
.relative()
.child(
div()
.flex_grow()
.size_full()
.pl_4()
.bg(cx.theme().colors().editor_background)
.child(self.editor.clone()),
)
.child(div().flex_shrink().children(self.render_embedded_scope(cx)))
}
}
@@ -2968,9 +2799,8 @@ mod tests {
init(cx);
let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone()));
let conversation = cx.new_model(|cx| {
Conversation::new(LanguageModel::default(), registry, EmbeddedScope::new(), cx)
});
let conversation =
cx.new_model(|cx| Conversation::new(LanguageModel::default(), registry, cx));
let buffer = conversation.read(cx).buffer.clone();
let message_1 = conversation.read(cx).message_anchors[0].clone();
@@ -3101,9 +2931,8 @@ mod tests {
init(cx);
let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone()));
let conversation = cx.new_model(|cx| {
Conversation::new(LanguageModel::default(), registry, EmbeddedScope::new(), cx)
});
let conversation =
cx.new_model(|cx| Conversation::new(LanguageModel::default(), registry, cx));
let buffer = conversation.read(cx).buffer.clone();
let message_1 = conversation.read(cx).message_anchors[0].clone();
@@ -3201,9 +3030,8 @@ mod tests {
cx.set_global(settings_store);
init(cx);
let registry = Arc::new(LanguageRegistry::test(cx.background_executor().clone()));
let conversation = cx.new_model(|cx| {
Conversation::new(LanguageModel::default(), registry, EmbeddedScope::new(), cx)
});
let conversation =
cx.new_model(|cx| Conversation::new(LanguageModel::default(), registry, cx));
let buffer = conversation.read(cx).buffer.clone();
let message_1 = conversation.read(cx).message_anchors[0].clone();
@@ -3287,14 +3115,8 @@ mod tests {
cx.set_global(CompletionProvider::Fake(FakeCompletionProvider::default()));
cx.update(init);
let registry = Arc::new(LanguageRegistry::test(cx.executor()));
let conversation = cx.new_model(|cx| {
Conversation::new(
LanguageModel::default(),
registry.clone(),
EmbeddedScope::new(),
cx,
)
});
let conversation =
cx.new_model(|cx| Conversation::new(LanguageModel::default(), registry.clone(), cx));
let buffer = conversation.read_with(cx, |conversation, _| conversation.buffer.clone());
let message_0 =
conversation.read_with(cx, |conversation, _| conversation.message_anchors[0].id);

View File

@@ -10,17 +10,14 @@ use serde::{
de::{self, Visitor},
Deserialize, Deserializer, Serialize, Serializer,
};
use settings::{Settings, SettingsSources};
use settings::Settings;
#[derive(Clone, Debug, Default, PartialEq)]
pub enum ZedDotDevModel {
Gpt3Point5Turbo,
Gpt4,
GptThreePointFiveTurbo,
GptFour,
#[default]
Gpt4Turbo,
Claude3Opus,
Claude3Sonnet,
Claude3Haiku,
GptFourTurbo,
Custom(String),
}
@@ -52,9 +49,9 @@ impl<'de> Deserialize<'de> for ZedDotDevModel {
E: de::Error,
{
match value {
"gpt-3.5-turbo" => Ok(ZedDotDevModel::Gpt3Point5Turbo),
"gpt-4" => Ok(ZedDotDevModel::Gpt4),
"gpt-4-turbo-preview" => Ok(ZedDotDevModel::Gpt4Turbo),
"gpt-3.5-turbo" => Ok(ZedDotDevModel::GptThreePointFiveTurbo),
"gpt-4" => Ok(ZedDotDevModel::GptFour),
"gpt-4-turbo-preview" => Ok(ZedDotDevModel::GptFourTurbo),
_ => Ok(ZedDotDevModel::Custom(value.to_owned())),
}
}
@@ -97,34 +94,27 @@ impl JsonSchema for ZedDotDevModel {
impl ZedDotDevModel {
pub fn id(&self) -> &str {
match self {
Self::Gpt3Point5Turbo => "gpt-3.5-turbo",
Self::Gpt4 => "gpt-4",
Self::Gpt4Turbo => "gpt-4-turbo-preview",
Self::Claude3Opus => "claude-3-opus",
Self::Claude3Sonnet => "claude-3-sonnet",
Self::Claude3Haiku => "claude-3-haiku",
Self::GptThreePointFiveTurbo => "gpt-3.5-turbo",
Self::GptFour => "gpt-4",
Self::GptFourTurbo => "gpt-4-turbo-preview",
Self::Custom(id) => id,
}
}
pub fn display_name(&self) -> &str {
match self {
Self::Gpt3Point5Turbo => "GPT 3.5 Turbo",
Self::Gpt4 => "GPT 4",
Self::Gpt4Turbo => "GPT 4 Turbo",
Self::Claude3Opus => "Claude 3 Opus",
Self::Claude3Sonnet => "Claude 3 Sonnet",
Self::Claude3Haiku => "Claude 3 Haiku",
Self::GptThreePointFiveTurbo => "gpt-3.5-turbo",
Self::GptFour => "gpt-4",
Self::GptFourTurbo => "gpt-4-turbo",
Self::Custom(id) => id.as_str(),
}
}
pub fn max_token_count(&self) -> usize {
match self {
Self::Gpt3Point5Turbo => 2048,
Self::Gpt4 => 4096,
Self::Gpt4Turbo => 128000,
Self::Claude3Opus | Self::Claude3Sonnet | Self::Claude3Haiku => 200000,
Self::GptThreePointFiveTurbo => 2048,
Self::GptFour => 4096,
Self::GptFourTurbo => 128000,
Self::Custom(_) => 4096, // TODO: Make this configurable
}
}
@@ -332,12 +322,13 @@ impl Settings for AssistantSettings {
type FileContent = AssistantSettingsContent;
fn load(
sources: SettingsSources<Self::FileContent>,
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &mut gpui::AppContext,
) -> anyhow::Result<Self> {
let mut settings = AssistantSettings::default();
for value in sources.defaults_and_customizations() {
for value in [default_value].iter().chain(user_values) {
let value = value.upgrade();
merge(&mut settings.enabled, value.enabled);
merge(&mut settings.button, value.button);

View File

@@ -1,5 +1,5 @@
use crate::{
assistant_settings::ZedDotDevModel, count_open_ai_tokens, CompletionProvider, LanguageModel,
assistant_settings::ZedDotDevModel, count_open_ai_tokens, CompletionProvider,
LanguageModelRequest,
};
use anyhow::{anyhow, Result};
@@ -78,21 +78,13 @@ impl ZedDotDevCompletionProvider {
cx: &AppContext,
) -> BoxFuture<'static, Result<usize>> {
match request.model {
LanguageModel::OpenAi(_) => future::ready(Err(anyhow!("invalid model"))).boxed(),
LanguageModel::ZedDotDev(ZedDotDevModel::Gpt4)
| LanguageModel::ZedDotDev(ZedDotDevModel::Gpt4Turbo)
| LanguageModel::ZedDotDev(ZedDotDevModel::Gpt3Point5Turbo) => {
crate::LanguageModel::OpenAi(_) => future::ready(Err(anyhow!("invalid model"))).boxed(),
crate::LanguageModel::ZedDotDev(ZedDotDevModel::GptFour)
| crate::LanguageModel::ZedDotDev(ZedDotDevModel::GptFourTurbo)
| crate::LanguageModel::ZedDotDev(ZedDotDevModel::GptThreePointFiveTurbo) => {
count_open_ai_tokens(request, cx.background_executor())
}
LanguageModel::ZedDotDev(
ZedDotDevModel::Claude3Opus
| ZedDotDevModel::Claude3Sonnet
| ZedDotDevModel::Claude3Haiku,
) => {
// Can't find a tokenizer for Claude 3, so for now just use the same as OpenAI's as an approximation.
count_open_ai_tokens(request, cx.background_executor())
}
LanguageModel::ZedDotDev(ZedDotDevModel::Custom(model)) => {
crate::LanguageModel::ZedDotDev(ZedDotDevModel::Custom(model)) => {
let request = self.client.request(proto::CountTokensWithLanguageModel {
model,
messages: request

View File

@@ -1,91 +0,0 @@
use editor::MultiBuffer;
use gpui::{AppContext, Model, ModelContext, Subscription};
use crate::{assistant_panel::Conversation, LanguageModelRequestMessage, Role};
#[derive(Default)]
pub struct EmbeddedScope {
active_buffer: Option<Model<MultiBuffer>>,
active_buffer_enabled: bool,
active_buffer_subscription: Option<Subscription>,
}
impl EmbeddedScope {
pub fn new() -> Self {
Self {
active_buffer: None,
active_buffer_enabled: true,
active_buffer_subscription: None,
}
}
pub fn set_active_buffer(
&mut self,
buffer: Option<Model<MultiBuffer>>,
cx: &mut ModelContext<Conversation>,
) {
self.active_buffer_subscription.take();
if let Some(active_buffer) = buffer.clone() {
self.active_buffer_subscription =
Some(cx.subscribe(&active_buffer, |conversation, _, e, cx| {
if let multi_buffer::Event::Edited { .. } = e {
conversation.count_remaining_tokens(cx)
}
}));
}
self.active_buffer = buffer;
}
pub fn active_buffer(&self) -> Option<&Model<MultiBuffer>> {
self.active_buffer.as_ref()
}
pub fn active_buffer_enabled(&self) -> bool {
self.active_buffer_enabled
}
pub fn set_active_buffer_enabled(&mut self, enabled: bool) {
self.active_buffer_enabled = enabled;
}
/// Provide a message for the language model based on the active buffer.
pub fn message(&self, cx: &AppContext) -> Option<LanguageModelRequestMessage> {
if !self.active_buffer_enabled {
return None;
}
let active_buffer = self.active_buffer.as_ref()?;
let buffer = active_buffer.read(cx);
if let Some(singleton) = buffer.as_singleton() {
let singleton = singleton.read(cx);
let filename = singleton
.file()
.map(|file| file.path().to_string_lossy())
.unwrap_or("Untitled".into());
let text = singleton.text();
let language = singleton
.language()
.map(|l| {
let name = l.code_fence_block_name();
name.to_string()
})
.unwrap_or_default();
let markdown =
format!("User's active file `{filename}`:\n\n```{language}\n{text}```\n\n");
return Some(LanguageModelRequestMessage {
role: Role::System,
content: markdown,
});
}
None
}
}

View File

@@ -11,13 +11,13 @@ use gpui::{
};
use isahc::AsyncBody;
use markdown_preview::markdown_preview_view::{MarkdownPreviewMode, MarkdownPreviewView};
use markdown_preview::markdown_preview_view::MarkdownPreviewView;
use schemars::JsonSchema;
use serde::Deserialize;
use serde_derive::Serialize;
use smol::io::AsyncReadExt;
use settings::{Settings, SettingsSources, SettingsStore};
use settings::{Settings, SettingsStore};
use smol::{fs::File, process::Command};
use release_channel::{AppCommitSha, AppVersion, ReleaseChannel};
@@ -82,22 +82,25 @@ struct AutoUpdateSetting(bool);
/// Whether or not to automatically check for updates.
///
/// Default: true
#[derive(Clone, Copy, Default, JsonSchema, Deserialize, Serialize)]
#[derive(Clone, Default, JsonSchema, Deserialize, Serialize)]
#[serde(transparent)]
struct AutoUpdateSettingContent(bool);
struct AutoUpdateSettingOverride(Option<bool>);
impl Settings for AutoUpdateSetting {
const KEY: Option<&'static str> = Some("auto_update");
type FileContent = Option<AutoUpdateSettingContent>;
type FileContent = AutoUpdateSettingOverride;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
let auto_update = [sources.release_channel, sources.user]
.into_iter()
.find_map(|value| value.copied().flatten())
.unwrap_or(sources.default.ok_or_else(Self::missing_default)?);
Ok(Self(auto_update.0))
fn load(
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &mut AppContext,
) -> Result<Self> {
Ok(Self(
Self::json_merge(default_value, user_values)?
.0
.ok_or_else(Self::missing_default)?,
))
}
}
@@ -235,11 +238,10 @@ fn view_release_notes_locally(workspace: &mut Workspace, cx: &mut ViewContext<Wo
.new_view(|cx| Editor::for_multibuffer(buffer, Some(project), cx));
let workspace_handle = workspace.weak_handle();
let view: View<MarkdownPreviewView> = MarkdownPreviewView::new(
MarkdownPreviewMode::Default,
editor,
workspace_handle,
language_registry,
Some(tab_description),
language_registry,
cx,
);
workspace.add_item_to_active_pane(Box::new(view.clone()), cx);

View File

@@ -373,10 +373,7 @@ impl ActiveCall {
self.report_call_event("hang up", cx);
Audio::end_call(cx);
let channel_id = self.channel_id(cx);
if let Some((room, _)) = self.room.take() {
cx.emit(Event::RoomLeft { channel_id });
room.update(cx, |room, cx| room.leave(cx))
} else {
Task::ready(Ok(()))

View File

@@ -2,7 +2,7 @@ use anyhow::Result;
use gpui::AppContext;
use schemars::JsonSchema;
use serde_derive::{Deserialize, Serialize};
use settings::{Settings, SettingsSources};
use settings::Settings;
#[derive(Deserialize, Debug)]
pub struct CallSettings {
@@ -29,7 +29,14 @@ impl Settings for CallSettings {
type FileContent = CallSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
sources.json_merge()
fn load(
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_cx: &mut AppContext,
) -> Result<Self>
where
Self: Sized,
{
Self::load_via_json_merge(default_value, user_values)
}
}

View File

@@ -52,7 +52,7 @@ pub enum Event {
RemoteProjectInvitationDiscarded {
project_id: u64,
},
RoomLeft {
Left {
channel_id: Option<ChannelId>,
},
}
@@ -366,6 +366,9 @@ impl Room {
pub(crate) fn leave(&mut self, cx: &mut ModelContext<Self>) -> Task<Result<()>> {
cx.notify();
cx.emit(Event::Left {
channel_id: self.channel_id(),
});
self.leave_internal(cx)
}

View File

@@ -222,9 +222,6 @@ impl ChannelChat {
let message = ChannelMessage::from_proto(response, &user_store, &mut cx).await?;
this.update(&mut cx, |this, cx| {
this.insert_messages(SumTree::from_item(message, &()), cx);
if this.first_loaded_message_id.is_none() {
this.first_loaded_message_id = Some(id);
}
})?;
Ok(id)
}))

View File

@@ -1,4 +1,4 @@
#![cfg_attr(any(target_os = "linux", target_os = "windows"), allow(dead_code))]
#![cfg_attr(target_os = "linux", allow(dead_code))]
use anyhow::{anyhow, Context, Result};
use clap::Parser;

View File

@@ -28,7 +28,7 @@ use release_channel::{AppVersion, ReleaseChannel};
use rpc::proto::{AnyTypedEnvelope, EntityMessage, EnvelopedMessage, PeerId, RequestMessage};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsSources, SettingsStore};
use settings::{Settings, SettingsStore};
use std::fmt;
use std::{
any::TypeId,
@@ -97,8 +97,15 @@ impl Settings for ClientSettings {
type FileContent = ClientSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
let mut result = sources.json_merge::<Self>()?;
fn load(
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &mut AppContext,
) -> Result<Self>
where
Self: Sized,
{
let mut result = Self::load_via_json_merge(default_value, user_values)?;
if let Some(server_url) = &*ZED_SERVER_URL {
result.server_url = server_url.clone()
}
@@ -420,19 +427,21 @@ impl settings::Settings for TelemetrySettings {
type FileContent = TelemetrySettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
fn load(
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &mut AppContext,
) -> Result<Self> {
Ok(Self {
diagnostics: sources.user.as_ref().and_then(|v| v.diagnostics).unwrap_or(
sources
.default
diagnostics: user_values.first().and_then(|v| v.diagnostics).unwrap_or(
default_value
.diagnostics
.ok_or_else(Self::missing_default)?,
),
metrics: sources
.user
.as_ref()
metrics: user_values
.first()
.and_then(|v| v.metrics)
.unwrap_or(sources.default.metrics.ok_or_else(Self::missing_default)?),
.unwrap_or(default_value.metrics.ok_or_else(Self::missing_default)?),
})
}
}
@@ -781,6 +790,7 @@ impl Client {
}
Status::UpgradeRequired => return Err(EstablishConnectionError::UpgradeRequired)?,
};
if was_disconnected {
self.set_status(Status::Authenticating, cx);
} else {

View File

@@ -590,10 +590,7 @@ mod tests {
}
#[gpui::test]
async fn test_telemetry_flush_on_flush_interval(
executor: BackgroundExecutor,
cx: &mut TestAppContext,
) {
async fn test_connection_timeout(executor: BackgroundExecutor, cx: &mut TestAppContext) {
init_test(cx);
let clock = Arc::new(FakeSystemClock::new(
Utc.with_ymd_and_hms(1990, 4, 12, 12, 0, 0).unwrap(),

View File

@@ -18,7 +18,6 @@ sqlite = ["sea-orm/sqlx-sqlite", "sqlx/sqlite"]
test-support = ["sqlite"]
[dependencies]
anthropic.workspace = true
anyhow.workspace = true
async-tungstenite = "0.16"
aws-config = { version = "1.1.5" }
@@ -47,7 +46,6 @@ reqwest = { version = "0.11", features = ["json"] }
rpc.workspace = true
scrypt = "0.7"
sea-orm = { version = "0.12.x", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] }
semantic_version.workspace = true
semver.workspace = true
serde.workspace = true
serde_derive.workspace = true

View File

@@ -47,6 +47,19 @@ spec:
metadata:
labels:
app: ${ZED_SERVICE_NAME}
annotations:
ad.datadoghq.com/collab.check_names: |
["openmetrics"]
ad.datadoghq.com/collab.init_configs: |
[{}]
ad.datadoghq.com/collab.instances: |
[
{
"openmetrics_endpoint": "http://%%host%%:%%port%%/metrics",
"namespace": "collab_${ZED_KUBE_NAMESPACE}",
"metrics": [".*"]
}
]
spec:
containers:
- name: ${ZED_SERVICE_NAME}
@@ -117,11 +130,6 @@ spec:
secretKeyRef:
name: openai
key: api_key
- name: ANTHROPIC_API_KEY
valueFrom:
secretKeyRef:
name: anthropic
key: api_key
- name: BLOB_STORE_ACCESS_KEY
valueFrom:
secretKeyRef:

View File

@@ -10,7 +10,6 @@ use axum::{
Extension, Router, TypedHeader,
};
use rpc::ExtensionMetadata;
use semantic_version::SemanticVersion;
use serde::{Serialize, Serializer};
use sha2::{Digest, Sha256};
use std::sync::{Arc, OnceLock};
@@ -18,6 +17,7 @@ use telemetry_events::{
ActionEvent, AppEvent, AssistantEvent, CallEvent, CopilotEvent, CpuEvent, EditEvent,
EditorEvent, Event, EventRequestBody, EventWrapper, ExtensionEvent, MemoryEvent, SettingEvent,
};
use util::SemanticVersion;
pub fn router() -> Router {
Router::new()
@@ -459,12 +459,6 @@ impl ToUpload {
}
insert.end().await?;
let event_count = rows.len();
log::info!(
"wrote {event_count} {event_specifier} to '{table}'",
event_specifier = if event_count == 1 { "event" } else { "events" }
);
}
Ok(())
@@ -528,9 +522,9 @@ impl EditorEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
os_name: body.os_name.clone(),
os_version: body.os_version.clone().unwrap_or_default(),
@@ -590,9 +584,9 @@ impl CopilotEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
os_name: body.os_name.clone(),
os_version: body.os_version.clone().unwrap_or_default(),
@@ -645,9 +639,9 @@ impl CallEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone().unwrap_or_default(),
session_id: body.session_id.clone(),
@@ -694,9 +688,9 @@ impl AssistantEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),
@@ -738,9 +732,9 @@ impl CpuEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),
@@ -785,9 +779,9 @@ impl MemoryEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),
@@ -831,9 +825,9 @@ impl AppEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),
@@ -876,9 +870,9 @@ impl SettingEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),
@@ -927,9 +921,9 @@ impl ExtensionEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),
@@ -991,9 +985,9 @@ impl EditEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),
@@ -1040,9 +1034,9 @@ impl ActionEventRow {
Self {
app_version: body.app_version.clone(),
major: semver.map(|v| v.major() as i32),
minor: semver.map(|v| v.minor() as i32),
patch: semver.map(|v| v.patch() as i32),
major: semver.map(|s| s.major as i32),
minor: semver.map(|s| s.minor as i32),
patch: semver.map(|s| s.patch as i32),
release_channel: body.release_channel.clone().unwrap_or_default(),
installation_id: body.installation_id.clone(),
session_id: body.session_id.clone(),

View File

@@ -1,4 +1,3 @@
use crate::db::ExtensionVersionConstraints;
use crate::{db::NewExtensionVersion, AppState, Error, Result};
use anyhow::{anyhow, Context as _};
use aws_sdk_s3::presigning::PresigningConfig;
@@ -11,17 +10,14 @@ use axum::{
};
use collections::HashMap;
use rpc::{ExtensionApiManifest, GetExtensionsResponse};
use semantic_version::SemanticVersion;
use serde::Deserialize;
use std::{sync::Arc, time::Duration};
use time::PrimitiveDateTime;
use util::{maybe, ResultExt};
use util::ResultExt;
pub fn router() -> Router {
Router::new()
.route("/extensions", get(get_extensions))
.route("/extensions/updates", get(get_extension_updates))
.route("/extensions/:extension_id", get(get_extension_versions))
.route(
"/extensions/:extension_id/download",
get(download_latest_extension),
@@ -36,103 +32,38 @@ pub fn router() -> Router {
struct GetExtensionsParams {
filter: Option<String>,
#[serde(default)]
ids: Option<String>,
#[serde(default)]
max_schema_version: i32,
}
#[derive(Debug, Deserialize)]
struct DownloadLatestExtensionParams {
extension_id: String,
}
#[derive(Debug, Deserialize)]
struct DownloadExtensionParams {
extension_id: String,
version: String,
}
async fn get_extensions(
Extension(app): Extension<Arc<AppState>>,
Query(params): Query<GetExtensionsParams>,
) -> Result<Json<GetExtensionsResponse>> {
let extension_ids = params
.ids
.as_ref()
.map(|s| s.split(',').map(|s| s.trim()).collect::<Vec<_>>());
let extensions = if let Some(extension_ids) = extension_ids {
app.db.get_extensions_by_ids(&extension_ids, None).await?
} else {
app.db
.get_extensions(params.filter.as_deref(), params.max_schema_version, 500)
.await?
};
Ok(Json(GetExtensionsResponse { data: extensions }))
}
#[derive(Debug, Deserialize)]
struct GetExtensionUpdatesParams {
ids: String,
min_schema_version: i32,
max_schema_version: i32,
min_wasm_api_version: SemanticVersion,
max_wasm_api_version: SemanticVersion,
}
async fn get_extension_updates(
Extension(app): Extension<Arc<AppState>>,
Query(params): Query<GetExtensionUpdatesParams>,
) -> Result<Json<GetExtensionsResponse>> {
let constraints = ExtensionVersionConstraints {
schema_versions: params.min_schema_version..=params.max_schema_version,
wasm_api_versions: params.min_wasm_api_version..=params.max_wasm_api_version,
};
let extension_ids = params.ids.split(',').map(|s| s.trim()).collect::<Vec<_>>();
let extensions = app
.db
.get_extensions_by_ids(&extension_ids, Some(&constraints))
.get_extensions(params.filter.as_deref(), params.max_schema_version, 500)
.await?;
Ok(Json(GetExtensionsResponse { data: extensions }))
}
#[derive(Debug, Deserialize)]
struct GetExtensionVersionsParams {
extension_id: String,
}
async fn get_extension_versions(
Extension(app): Extension<Arc<AppState>>,
Path(params): Path<GetExtensionVersionsParams>,
) -> Result<Json<GetExtensionsResponse>> {
let extension_versions = app.db.get_extension_versions(&params.extension_id).await?;
Ok(Json(GetExtensionsResponse {
data: extension_versions,
}))
}
#[derive(Debug, Deserialize)]
struct DownloadLatestExtensionParams {
extension_id: String,
min_schema_version: Option<i32>,
max_schema_version: Option<i32>,
min_wasm_api_version: Option<SemanticVersion>,
max_wasm_api_version: Option<SemanticVersion>,
}
async fn download_latest_extension(
Extension(app): Extension<Arc<AppState>>,
Path(params): Path<DownloadLatestExtensionParams>,
) -> Result<Redirect> {
let constraints = maybe!({
let min_schema_version = params.min_schema_version?;
let max_schema_version = params.max_schema_version?;
let min_wasm_api_version = params.min_wasm_api_version?;
let max_wasm_api_version = params.max_wasm_api_version?;
Some(ExtensionVersionConstraints {
schema_versions: min_schema_version..=max_schema_version,
wasm_api_versions: min_wasm_api_version..=max_wasm_api_version,
})
});
let extension = app
.db
.get_extension(&params.extension_id, constraints.as_ref())
.get_extension(&params.extension_id)
.await?
.ok_or_else(|| anyhow!("unknown extension"))?;
download_extension(
@@ -145,12 +76,6 @@ async fn download_latest_extension(
.await
}
#[derive(Debug, Deserialize)]
struct DownloadExtensionParams {
extension_id: String,
version: String,
}
async fn download_extension(
Extension(app): Extension<Arc<AppState>>,
Path(params): Path<DownloadExtensionParams>,

View File

@@ -1,8 +1,9 @@
use collections::HashMap;
use semantic_version::SemanticVersion;
use serde::{Deserialize, Serialize};
use serde_derive::Deserialize;
use serde_derive::Serialize;
use serde_json::Value;
use util::SemanticVersion;
#[derive(Debug)]
pub struct IpsFile {

View File

@@ -21,13 +21,11 @@ use sea_orm::{
FromQueryResult, IntoActiveModel, IsolationLevel, JoinType, QueryOrder, QuerySelect, Statement,
TransactionTrait,
};
use semantic_version::SemanticVersion;
use serde::{Deserialize, Serialize};
use serde::{ser::Error as _, Deserialize, Serialize, Serializer};
use sqlx::{
migrate::{Migrate, Migration, MigrationSource},
Connection,
};
use std::ops::RangeInclusive;
use std::{
fmt::Write as _,
future::Future,
@@ -38,7 +36,7 @@ use std::{
sync::Arc,
time::Duration,
};
use time::PrimitiveDateTime;
use time::{format_description::well_known::iso8601, PrimitiveDateTime};
use tokio::sync::{Mutex, OwnedMutexGuard};
#[cfg(test)]
@@ -460,8 +458,6 @@ pub struct UpdatedChannelMessage {
pub notifications: NotificationBatch,
pub reply_to_message_id: Option<MessageId>,
pub timestamp: PrimitiveDateTime,
pub deleted_mention_notification_ids: Vec<NotificationId>,
pub updated_mention_notifications: Vec<rpc::proto::Notification>,
}
#[derive(Clone, Debug, PartialEq, Eq, FromQueryResult, Serialize, Deserialize)]
@@ -734,7 +730,20 @@ pub struct NewExtensionVersion {
pub published_at: PrimitiveDateTime,
}
pub struct ExtensionVersionConstraints {
pub schema_versions: RangeInclusive<i32>,
pub wasm_api_versions: RangeInclusive<SemanticVersion>,
pub fn serialize_iso8601<S: Serializer>(
datetime: &PrimitiveDateTime,
serializer: S,
) -> Result<S::Ok, S::Error> {
const SERDE_CONFIG: iso8601::EncodedConfig = iso8601::Config::DEFAULT
.set_year_is_six_digits(false)
.set_time_precision(iso8601::TimePrecision::Second {
decimal_digits: None,
})
.encode();
datetime
.assume_utc()
.format(&time::format_description::well_known::Iso8601::<SERDE_CONFIG>)
.map_err(S::Error::custom)?
.serialize(serializer)
}

View File

@@ -1,8 +1,4 @@
use std::str::FromStr;
use chrono::Utc;
use sea_orm::sea_query::IntoCondition;
use util::ResultExt;
use super::*;
@@ -14,163 +10,53 @@ impl Database {
limit: usize,
) -> Result<Vec<ExtensionMetadata>> {
self.transaction(|tx| async move {
let mut condition = Condition::all()
.add(
extension::Column::LatestVersion
.into_expr()
.eq(extension_version::Column::Version.into_expr()),
)
.add(extension_version::Column::SchemaVersion.lte(max_schema_version));
let mut condition = Condition::all().add(
extension::Column::LatestVersion
.into_expr()
.eq(extension_version::Column::Version.into_expr()),
);
if let Some(filter) = filter {
let fuzzy_name_filter = Self::fuzzy_like_string(filter);
condition = condition.add(Expr::cust_with_expr("name ILIKE $1", fuzzy_name_filter));
}
self.get_extensions_where(condition, Some(limit as u64), &tx)
.await
})
.await
}
pub async fn get_extensions_by_ids(
&self,
ids: &[&str],
constraints: Option<&ExtensionVersionConstraints>,
) -> Result<Vec<ExtensionMetadata>> {
self.transaction(|tx| async move {
let extensions = extension::Entity::find()
.filter(extension::Column::ExternalId.is_in(ids.iter().copied()))
.inner_join(extension_version::Entity)
.select_also(extension_version::Entity)
.filter(condition)
.filter(extension_version::Column::SchemaVersion.lte(max_schema_version))
.order_by_desc(extension::Column::TotalDownloadCount)
.order_by_asc(extension::Column::Name)
.limit(Some(limit as u64))
.all(&*tx)
.await?;
let mut max_versions = self
.get_latest_versions_for_extensions(&extensions, constraints, &tx)
.await?;
Ok(extensions
.into_iter()
.filter_map(|extension| {
let (version, _) = max_versions.remove(&extension.id)?;
Some(metadata_from_extension_and_version(extension, version))
.filter_map(|(extension, version)| {
Some(metadata_from_extension_and_version(extension, version?))
})
.collect())
})
.await
}
async fn get_latest_versions_for_extensions(
&self,
extensions: &[extension::Model],
constraints: Option<&ExtensionVersionConstraints>,
tx: &DatabaseTransaction,
) -> Result<HashMap<ExtensionId, (extension_version::Model, SemanticVersion)>> {
let mut versions = extension_version::Entity::find()
.filter(
extension_version::Column::ExtensionId
.is_in(extensions.iter().map(|extension| extension.id)),
)
.stream(tx)
.await?;
let mut max_versions =
HashMap::<ExtensionId, (extension_version::Model, SemanticVersion)>::default();
while let Some(version) = versions.next().await {
let version = version?;
let Some(extension_version) = SemanticVersion::from_str(&version.version).log_err()
else {
continue;
};
if let Some((_, max_extension_version)) = &max_versions.get(&version.extension_id) {
if max_extension_version > &extension_version {
continue;
}
}
if let Some(constraints) = constraints {
if !constraints
.schema_versions
.contains(&version.schema_version)
{
continue;
}
if let Some(wasm_api_version) = version.wasm_api_version.as_ref() {
if let Some(version) = SemanticVersion::from_str(wasm_api_version).log_err() {
if !constraints.wasm_api_versions.contains(&version) {
continue;
}
} else {
continue;
}
}
}
max_versions.insert(version.extension_id, (version, extension_version));
}
Ok(max_versions)
}
/// Returns all of the versions for the extension with the given ID.
pub async fn get_extension_versions(
&self,
extension_id: &str,
) -> Result<Vec<ExtensionMetadata>> {
self.transaction(|tx| async move {
let condition = extension::Column::ExternalId
.eq(extension_id)
.into_condition();
self.get_extensions_where(condition, None, &tx).await
})
.await
}
async fn get_extensions_where(
&self,
condition: Condition,
limit: Option<u64>,
tx: &DatabaseTransaction,
) -> Result<Vec<ExtensionMetadata>> {
let extensions = extension::Entity::find()
.inner_join(extension_version::Entity)
.select_also(extension_version::Entity)
.filter(condition)
.order_by_desc(extension::Column::TotalDownloadCount)
.order_by_asc(extension::Column::Name)
.limit(limit)
.all(tx)
.await?;
Ok(extensions
.into_iter()
.filter_map(|(extension, version)| {
Some(metadata_from_extension_and_version(extension, version?))
})
.collect())
}
pub async fn get_extension(
&self,
extension_id: &str,
constraints: Option<&ExtensionVersionConstraints>,
) -> Result<Option<ExtensionMetadata>> {
pub async fn get_extension(&self, extension_id: &str) -> Result<Option<ExtensionMetadata>> {
self.transaction(|tx| async move {
let extension = extension::Entity::find()
.filter(extension::Column::ExternalId.eq(extension_id))
.filter(
extension::Column::LatestVersion
.into_expr()
.eq(extension_version::Column::Version.into_expr()),
)
.inner_join(extension_version::Entity)
.select_also(extension_version::Entity)
.one(&*tx)
.await?
.ok_or_else(|| anyhow!("no such extension: {extension_id}"))?;
let extensions = [extension];
let mut versions = self
.get_latest_versions_for_extensions(&extensions, constraints, &tx)
.await?;
let [extension] = extensions;
Ok(versions.remove(&extension.id).map(|(max_version, _)| {
metadata_from_extension_and_version(extension, max_version)
Ok(extension.and_then(|(extension, version)| {
Some(metadata_from_extension_and_version(extension, version?))
}))
})
.await

View File

@@ -1,8 +1,7 @@
use super::*;
use rpc::Notification;
use sea_orm::{SelectColumns, TryInsertResult};
use sea_orm::TryInsertResult;
use time::OffsetDateTime;
use util::ResultExt;
impl Database {
/// Inserts a record representing a user joining the chat for a given channel.
@@ -481,20 +480,13 @@ impl Database {
Ok(results)
}
fn get_notification_kind_id_by_name(&self, notification_kind: &str) -> Option<i32> {
self.notification_kinds_by_id
.iter()
.find(|(_, kind)| **kind == notification_kind)
.map(|kind| kind.0 .0)
}
/// Removes the channel message with the given ID.
pub async fn remove_channel_message(
&self,
channel_id: ChannelId,
message_id: MessageId,
user_id: UserId,
) -> Result<(Vec<ConnectionId>, Vec<NotificationId>)> {
) -> Result<Vec<ConnectionId>> {
self.transaction(|tx| async move {
let mut rows = channel_chat_participant::Entity::find()
.filter(channel_chat_participant::Column::ChannelId.eq(channel_id))
@@ -539,29 +531,7 @@ impl Database {
}
}
let notification_kind_id =
self.get_notification_kind_id_by_name("ChannelMessageMention");
let existing_notifications = notification::Entity::find()
.filter(notification::Column::EntityId.eq(message_id))
.filter(notification::Column::Kind.eq(notification_kind_id))
.select_column(notification::Column::Id)
.all(&*tx)
.await?;
let existing_notification_ids = existing_notifications
.into_iter()
.map(|notification| notification.id)
.collect();
// remove all the mention notifications for this message
notification::Entity::delete_many()
.filter(notification::Column::EntityId.eq(message_id))
.filter(notification::Column::Kind.eq(notification_kind_id))
.exec(&*tx)
.await?;
Ok((participant_connection_ids, existing_notification_ids))
Ok(participant_connection_ids)
})
.await
}
@@ -659,44 +629,14 @@ impl Database {
.await?;
}
let mut update_mention_user_ids = HashSet::default();
let mut new_mention_user_ids =
mentions.iter().map(|m| m.user_id).collect::<HashSet<_>>();
let mut mentioned_user_ids = mentions.iter().map(|m| m.user_id).collect::<HashSet<_>>();
// Filter out users that were mentioned before
for mention in &old_mentions {
if new_mention_user_ids.contains(&mention.user_id.to_proto()) {
update_mention_user_ids.insert(mention.user_id.to_proto());
}
new_mention_user_ids.remove(&mention.user_id.to_proto());
}
let notification_kind_id =
self.get_notification_kind_id_by_name("ChannelMessageMention");
let existing_notifications = notification::Entity::find()
.filter(notification::Column::EntityId.eq(message_id))
.filter(notification::Column::Kind.eq(notification_kind_id))
.all(&*tx)
.await?;
// determine which notifications should be updated or deleted
let mut deleted_notification_ids = HashSet::default();
let mut updated_mention_notifications = Vec::new();
for notification in existing_notifications {
if update_mention_user_ids.contains(&notification.recipient_id.to_proto()) {
if let Some(notification) =
self::notifications::model_to_proto(self, notification).log_err()
{
updated_mention_notifications.push(notification);
}
} else {
deleted_notification_ids.insert(notification.id);
}
for mention in old_mentions {
mentioned_user_ids.remove(&mention.user_id.to_proto());
}
let mut notifications = Vec::new();
for mentioned_user in new_mention_user_ids {
for mentioned_user in mentioned_user_ids {
notifications.extend(
self.create_notification(
UserId::from_proto(mentioned_user),
@@ -718,10 +658,6 @@ impl Database {
notifications,
reply_to_message_id: channel_message.reply_to_message_id,
timestamp: channel_message.sent_at,
deleted_mention_notification_ids: deleted_notification_ids
.into_iter()
.collect::<Vec<_>>(),
updated_mention_notifications,
})
})
.await

View File

@@ -1,6 +1,5 @@
use super::*;
use rpc::Notification;
use util::ResultExt;
impl Database {
/// Initializes the different kinds of notifications by upserting records for them.
@@ -54,8 +53,11 @@ impl Database {
.await?;
while let Some(row) = rows.next().await {
let row = row?;
if let Some(proto) = model_to_proto(self, row).log_err() {
let kind = row.kind;
if let Some(proto) = model_to_proto(self, row) {
result.push(proto);
} else {
log::warn!("unknown notification kind {:?}", kind);
}
}
result.reverse();
@@ -198,9 +200,7 @@ impl Database {
})
.exec(tx)
.await?;
Ok(model_to_proto(self, row)
.map(|notification| (recipient_id, notification))
.ok())
Ok(model_to_proto(self, row).map(|notification| (recipient_id, notification)))
} else {
Ok(None)
}
@@ -241,12 +241,9 @@ impl Database {
}
}
pub fn model_to_proto(this: &Database, row: notification::Model) -> Result<proto::Notification> {
let kind = this
.notification_kinds_by_id
.get(&row.kind)
.ok_or_else(|| anyhow!("Unknown notification kind"))?;
Ok(proto::Notification {
fn model_to_proto(this: &Database, row: notification::Model) -> Option<proto::Notification> {
let kind = this.notification_kinds_by_id.get(&row.kind)?;
Some(proto::Notification {
id: row.id.to_proto(),
kind: kind.to_string(),
timestamp: row.created_at.assume_utc().unix_timestamp() as u64,

View File

@@ -349,17 +349,6 @@ impl Database {
.await
}
pub async fn stale_room_connection(&self, user_id: UserId) -> Result<Option<ConnectionId>> {
self.transaction(|tx| async move {
let participant = room_participant::Entity::find()
.filter(room_participant::Column::UserId.eq(user_id))
.one(&*tx)
.await?;
Ok(participant.and_then(|p| p.answering_connection()))
})
.await
}
async fn get_next_participant_index_internal(
&self,
room_id: RoomId,
@@ -414,50 +403,39 @@ impl Database {
.get_next_participant_index_internal(room_id, tx)
.await?;
// If someone has been invited into the room, accept the invite instead of inserting
let result = room_participant::Entity::update_many()
.filter(
Condition::all()
.add(room_participant::Column::RoomId.eq(room_id))
.add(room_participant::Column::UserId.eq(user_id))
.add(room_participant::Column::AnsweringConnectionId.is_null()),
)
.set(room_participant::ActiveModel {
participant_index: ActiveValue::Set(Some(participant_index)),
answering_connection_id: ActiveValue::set(Some(connection.id as i32)),
answering_connection_server_id: ActiveValue::set(Some(ServerId(
connection.owner_id as i32,
))),
answering_connection_lost: ActiveValue::set(false),
..Default::default()
})
.exec(tx)
.await?;
if result.rows_affected == 0 {
room_participant::Entity::insert(room_participant::ActiveModel {
room_id: ActiveValue::set(room_id),
user_id: ActiveValue::set(user_id),
answering_connection_id: ActiveValue::set(Some(connection.id as i32)),
answering_connection_server_id: ActiveValue::set(Some(ServerId(
connection.owner_id as i32,
))),
answering_connection_lost: ActiveValue::set(false),
calling_user_id: ActiveValue::set(user_id),
calling_connection_id: ActiveValue::set(connection.id as i32),
calling_connection_server_id: ActiveValue::set(Some(ServerId(
connection.owner_id as i32,
))),
participant_index: ActiveValue::Set(Some(participant_index)),
role: ActiveValue::set(Some(role)),
id: ActiveValue::NotSet,
location_kind: ActiveValue::NotSet,
location_project_id: ActiveValue::NotSet,
initial_project_id: ActiveValue::NotSet,
})
.exec(tx)
.await?;
}
room_participant::Entity::insert_many([room_participant::ActiveModel {
room_id: ActiveValue::set(room_id),
user_id: ActiveValue::set(user_id),
answering_connection_id: ActiveValue::set(Some(connection.id as i32)),
answering_connection_server_id: ActiveValue::set(Some(ServerId(
connection.owner_id as i32,
))),
answering_connection_lost: ActiveValue::set(false),
calling_user_id: ActiveValue::set(user_id),
calling_connection_id: ActiveValue::set(connection.id as i32),
calling_connection_server_id: ActiveValue::set(Some(ServerId(
connection.owner_id as i32,
))),
participant_index: ActiveValue::Set(Some(participant_index)),
role: ActiveValue::set(Some(role)),
id: ActiveValue::NotSet,
location_kind: ActiveValue::NotSet,
location_project_id: ActiveValue::NotSet,
initial_project_id: ActiveValue::NotSet,
}])
.on_conflict(
OnConflict::columns([room_participant::Column::UserId])
.update_columns([
room_participant::Column::AnsweringConnectionId,
room_participant::Column::AnsweringConnectionServerId,
room_participant::Column::AnsweringConnectionLost,
room_participant::Column::ParticipantIndex,
room_participant::Column::Role,
])
.to_owned(),
)
.exec(tx)
.await?;
let (channel, room) = self.get_channel_room(room_id, &tx).await?;
let channel = channel.ok_or_else(|| anyhow!("no channel for room"))?;

View File

@@ -1,5 +1,4 @@
use super::Database;
use crate::db::ExtensionVersionConstraints;
use crate::{
db::{queries::extensions::convert_time_to_chrono, ExtensionMetadata, NewExtensionVersion},
test_both_dbs,
@@ -279,108 +278,3 @@ async fn test_extensions(db: &Arc<Database>) {
]
);
}
test_both_dbs!(
test_extensions_by_id,
test_extensions_by_id_postgres,
test_extensions_by_id_sqlite
);
async fn test_extensions_by_id(db: &Arc<Database>) {
let versions = db.get_known_extension_versions().await.unwrap();
assert!(versions.is_empty());
let extensions = db.get_extensions(None, 1, 5).await.unwrap();
assert!(extensions.is_empty());
let t0 = time::OffsetDateTime::from_unix_timestamp_nanos(0).unwrap();
let t0 = time::PrimitiveDateTime::new(t0.date(), t0.time());
let t0_chrono = convert_time_to_chrono(t0);
db.insert_extension_versions(
&[
(
"ext1",
vec![
NewExtensionVersion {
name: "Extension 1".into(),
version: semver::Version::parse("0.0.1").unwrap(),
description: "an extension".into(),
authors: vec!["max".into()],
repository: "ext1/repo".into(),
schema_version: 1,
wasm_api_version: Some("0.0.4".into()),
published_at: t0,
},
NewExtensionVersion {
name: "Extension 1".into(),
version: semver::Version::parse("0.0.2").unwrap(),
description: "a good extension".into(),
authors: vec!["max".into()],
repository: "ext1/repo".into(),
schema_version: 1,
wasm_api_version: Some("0.0.4".into()),
published_at: t0,
},
NewExtensionVersion {
name: "Extension 1".into(),
version: semver::Version::parse("0.0.3").unwrap(),
description: "a real good extension".into(),
authors: vec!["max".into(), "marshall".into()],
repository: "ext1/repo".into(),
schema_version: 1,
wasm_api_version: Some("0.0.5".into()),
published_at: t0,
},
],
),
(
"ext2",
vec![NewExtensionVersion {
name: "Extension 2".into(),
version: semver::Version::parse("0.2.0").unwrap(),
description: "a great extension".into(),
authors: vec!["marshall".into()],
repository: "ext2/repo".into(),
schema_version: 0,
wasm_api_version: None,
published_at: t0,
}],
),
]
.into_iter()
.collect(),
)
.await
.unwrap();
let extensions = db
.get_extensions_by_ids(
&["ext1"],
Some(&ExtensionVersionConstraints {
schema_versions: 1..=1,
wasm_api_versions: "0.0.1".parse().unwrap()..="0.0.4".parse().unwrap(),
}),
)
.await
.unwrap();
assert_eq!(
extensions,
&[ExtensionMetadata {
id: "ext1".into(),
manifest: rpc::ExtensionApiManifest {
name: "Extension 1".into(),
version: "0.0.2".into(),
authors: vec!["max".into()],
description: Some("a good extension".into()),
repository: "ext1/repo".into(),
schema_version: Some(1),
wasm_api_version: Some("0.0.4".into()),
},
published_at: t0_chrono,
download_count: 0,
}]
);
}

View File

@@ -134,7 +134,6 @@ pub struct Config {
pub zed_environment: Arc<str>,
pub openai_api_key: Option<Arc<str>>,
pub google_ai_api_key: Option<Arc<str>>,
pub anthropic_api_key: Option<Arc<str>>,
pub zed_client_checksum_seed: Option<String>,
pub slack_panics_webhook: Option<String>,
pub auto_join_channel_id: Option<ChannelId>,

View File

@@ -137,38 +137,18 @@ async fn main() -> Result<()> {
);
#[cfg(unix)]
let signal = async move {
let mut sigterm = tokio::signal::unix::signal(SignalKind::terminate())
.expect("failed to listen for interrupt signal");
let mut sigint = tokio::signal::unix::signal(SignalKind::interrupt())
.expect("failed to listen for interrupt signal");
let sigterm = sigterm.recv();
let sigint = sigint.recv();
futures::pin_mut!(sigterm, sigint);
futures::future::select(sigterm, sigint).await;
};
#[cfg(windows)]
let signal = async move {
// todo(windows):
// `ctrl_close` does not work well, because tokio's signal handler always returns soon,
// but system termiates the application soon after returning CTRL+CLOSE handler.
// So we should implement blocking handler to treat CTRL+CLOSE signal.
let mut ctrl_break = tokio::signal::windows::ctrl_break()
.expect("failed to listen for interrupt signal");
let mut ctrl_c = tokio::signal::windows::ctrl_c()
.expect("failed to listen for interrupt signal");
let ctrl_break = ctrl_break.recv();
let ctrl_c = ctrl_c.recv();
futures::pin_mut!(ctrl_break, ctrl_c);
futures::future::select(ctrl_break, ctrl_c).await;
};
axum::Server::from_tcp(listener)
.map_err(|e| anyhow!(e))?
.serve(app.into_make_service_with_connect_info::<SocketAddr>())
.with_graceful_shutdown(async move {
signal.await;
let mut sigterm = tokio::signal::unix::signal(SignalKind::terminate())
.expect("failed to listen for interrupt signal");
let mut sigint = tokio::signal::unix::signal(SignalKind::interrupt())
.expect("failed to listen for interrupt signal");
let sigterm = sigterm.recv();
let sigint = sigint.recv();
futures::pin_mut!(sigterm, sigint);
futures::future::select(sigterm, sigint).await;
tracing::info!("Received interrupt signal");
if let Some(rpc_server) = rpc_server {
@@ -177,6 +157,10 @@ async fn main() -> Result<()> {
})
.await
.map_err(|e| anyhow!(e))?;
// todo("windows")
#[cfg(windows)]
unimplemented!();
}
_ => {
Err(anyhow!(

View File

@@ -46,7 +46,6 @@ use rpc::{
},
Connection, ConnectionId, ErrorCode, ErrorCodeExt, ErrorExt, Peer, Receipt, TypedEnvelope,
};
use semantic_version::SemanticVersion;
use serde::{Serialize, Serializer};
use std::{
any::TypeId,
@@ -69,7 +68,7 @@ use tracing::{
field::{self},
info_span, instrument, Instrument,
};
use util::http::IsahcHttpClient;
use util::{http::IsahcHttpClient, SemanticVersion};
pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30);
@@ -367,8 +366,6 @@ impl Server {
.add_request_handler(forward_mutating_project_request::<proto::ExpandProjectEntry>)
.add_request_handler(forward_mutating_project_request::<proto::OnTypeFormatting>)
.add_request_handler(forward_mutating_project_request::<proto::SaveBuffer>)
.add_request_handler(forward_mutating_project_request::<proto::BlameBuffer>)
.add_request_handler(forward_mutating_project_request::<proto::MultiLspQuery>)
.add_message_handler(create_buffer_for_peer)
.add_request_handler(update_buffer)
.add_message_handler(broadcast_project_message_from_host::<proto::RefreshInlayHints>)
@@ -420,7 +417,6 @@ impl Server {
session,
app_state.config.openai_api_key.clone(),
app_state.config.google_ai_api_key.clone(),
app_state.config.anthropic_api_key.clone(),
)
}
})
@@ -735,13 +731,7 @@ impl Server {
executor: Executor,
) -> impl Future<Output = ()> {
let this = self.clone();
let span = info_span!("handle connection", %address,
connection_id=field::Empty,
user_id=field::Empty,
login=field::Empty,
impersonator=field::Empty,
dev_server_id=field::Empty
);
let span = info_span!("handle connection", %address, impersonator = field::Empty, connection_id = field::Empty);
principal.update_span(&span);
let mut teardown = self.teardown.subscribe();
@@ -819,12 +809,7 @@ impl Server {
let type_name = message.payload_type_name();
// note: we copy all the fields from the parent span so we can query them in the logs.
// (https://github.com/tokio-rs/tracing/issues/2670).
let span = tracing::info_span!("receive message", %connection_id, %address, type_name,
user_id=field::Empty,
login=field::Empty,
impersonator=field::Empty,
dev_server_id=field::Empty
);
let span = tracing::info_span!("receive message", %connection_id, %address, type_name);
principal.update_span(&span);
let span_enter = span.enter();
if let Some(handler) = this.handlers.get(&message.payload_type_id()) {
@@ -1204,7 +1189,7 @@ async fn connection_lost(
_ = executor.sleep(RECONNECT_TIMEOUT).fuse() => {
if let Some(session) = session.for_user() {
log::info!("connection lost, removing all resources for user:{}, connection:{:?}", session.user_id(), session.connection_id);
leave_room_for_session(&session, session.connection_id).await.trace_err();
leave_room_for_session(&session).await.trace_err();
leave_channel_buffers_for_session(&session)
.await
.trace_err();
@@ -1540,7 +1525,7 @@ async fn leave_room(
response: Response<proto::LeaveRoom>,
session: UserSession,
) -> Result<()> {
leave_room_for_session(&session, session.connection_id).await?;
leave_room_for_session(&session).await?;
response.send(proto::Ack {})?;
Ok(())
}
@@ -3024,19 +3009,8 @@ async fn join_channel_internal(
session: UserSession,
) -> Result<()> {
let joined_room = {
let mut db = session.db().await;
// If zed quits without leaving the room, and the user re-opens zed before the
// RECONNECT_TIMEOUT, we need to make sure that we kick the user out of the previous
// room they were in.
if let Some(connection) = db.stale_room_connection(session.user_id()).await? {
tracing::info!(
stale_connection_id = %connection,
"cleaning up stale connection",
);
drop(db);
leave_room_for_session(&session, connection).await?;
db = session.db().await;
}
leave_room_for_session(&session).await?;
let db = session.db().await;
let (joined_room, membership_updated, role) = db
.join_channel(channel_id, session.user_id(), session.connection_id)
@@ -3389,30 +3363,14 @@ async fn remove_channel_message(
) -> Result<()> {
let channel_id = ChannelId::from_proto(request.channel_id);
let message_id = MessageId::from_proto(request.message_id);
let (connection_ids, existing_notification_ids) = session
let connection_ids = session
.db()
.await
.remove_channel_message(channel_id, message_id, session.user_id())
.await?;
broadcast(
Some(session.connection_id),
connection_ids,
move |connection| {
session.peer.send(connection, request.clone())?;
for notification_id in &existing_notification_ids {
session.peer.send(
connection,
proto::DeleteNotification {
notification_id: (*notification_id).to_proto(),
},
)?;
}
Ok(())
},
);
broadcast(Some(session.connection_id), connection_ids, |connection| {
session.peer.send(connection, request.clone())
});
response.send(proto::Ack {})?;
Ok(())
}
@@ -3431,8 +3389,6 @@ async fn update_channel_message(
notifications,
reply_to_message_id,
timestamp,
deleted_mention_notification_ids,
updated_mention_notifications,
} = session
.db()
.await
@@ -3475,27 +3431,7 @@ async fn update_channel_message(
channel_id: channel_id.to_proto(),
message: Some(message.clone()),
},
)?;
for notification_id in &deleted_mention_notification_ids {
session.peer.send(
connection,
proto::DeleteNotification {
notification_id: (*notification_id).to_proto(),
},
)?;
}
for notification in &updated_mention_notifications {
session.peer.send(
connection,
proto::UpdateNotification {
notification: Some(notification.clone()),
},
)?;
}
Ok(())
)
},
);
@@ -3568,7 +3504,6 @@ async fn complete_with_language_model(
session: Session,
open_ai_api_key: Option<Arc<str>>,
google_ai_api_key: Option<Arc<str>>,
anthropic_api_key: Option<Arc<str>>,
) -> Result<()> {
let Some(session) = session.for_user() else {
return Err(anyhow!("user not found"))?;
@@ -3587,10 +3522,6 @@ async fn complete_with_language_model(
let api_key = google_ai_api_key
.ok_or_else(|| anyhow!("no Google AI API key configured on the server"))?;
complete_with_google_ai(request, response, session, api_key).await?;
} else if request.model.starts_with("claude") {
let api_key = anthropic_api_key
.ok_or_else(|| anyhow!("no Anthropic AI API key configured on the server"))?;
complete_with_anthropic(request, response, session, api_key).await?;
}
Ok(())
@@ -3688,121 +3619,6 @@ async fn complete_with_google_ai(
Ok(())
}
async fn complete_with_anthropic(
request: proto::CompleteWithLanguageModel,
response: StreamingResponse<proto::CompleteWithLanguageModel>,
session: UserSession,
api_key: Arc<str>,
) -> Result<()> {
let model = anthropic::Model::from_id(&request.model)?;
let mut system_message = String::new();
let messages = request
.messages
.into_iter()
.filter_map(|message| match message.role() {
LanguageModelRole::LanguageModelUser => Some(anthropic::RequestMessage {
role: anthropic::Role::User,
content: message.content,
}),
LanguageModelRole::LanguageModelAssistant => Some(anthropic::RequestMessage {
role: anthropic::Role::Assistant,
content: message.content,
}),
// Anthropic's API breaks system instructions out as a separate field rather
// than having a system message role.
LanguageModelRole::LanguageModelSystem => {
if !system_message.is_empty() {
system_message.push_str("\n\n");
}
system_message.push_str(&message.content);
None
}
})
.collect();
let mut stream = anthropic::stream_completion(
&session.http_client,
"https://api.anthropic.com",
&api_key,
anthropic::Request {
model,
messages,
stream: true,
system: system_message,
max_tokens: 4092,
},
)
.await?;
let mut current_role = proto::LanguageModelRole::LanguageModelAssistant;
while let Some(event) = stream.next().await {
let event = event?;
match event {
anthropic::ResponseEvent::MessageStart { message } => {
if let Some(role) = message.role {
if role == "assistant" {
current_role = proto::LanguageModelRole::LanguageModelAssistant;
} else if role == "user" {
current_role = proto::LanguageModelRole::LanguageModelUser;
}
}
}
anthropic::ResponseEvent::ContentBlockStart { content_block, .. } => {
match content_block {
anthropic::ContentBlock::Text { text } => {
if !text.is_empty() {
response.send(proto::LanguageModelResponse {
choices: vec![proto::LanguageModelChoiceDelta {
index: 0,
delta: Some(proto::LanguageModelResponseMessage {
role: Some(current_role as i32),
content: Some(text),
}),
finish_reason: None,
}],
})?;
}
}
}
}
anthropic::ResponseEvent::ContentBlockDelta { delta, .. } => match delta {
anthropic::TextDelta::TextDelta { text } => {
response.send(proto::LanguageModelResponse {
choices: vec![proto::LanguageModelChoiceDelta {
index: 0,
delta: Some(proto::LanguageModelResponseMessage {
role: Some(current_role as i32),
content: Some(text),
}),
finish_reason: None,
}],
})?;
}
},
anthropic::ResponseEvent::MessageDelta { delta, .. } => {
if let Some(stop_reason) = delta.stop_reason {
response.send(proto::LanguageModelResponse {
choices: vec![proto::LanguageModelChoiceDelta {
index: 0,
delta: None,
finish_reason: Some(stop_reason),
}],
})?;
}
}
anthropic::ResponseEvent::ContentBlockStop { .. } => {}
anthropic::ResponseEvent::MessageStop {} => {}
anthropic::ResponseEvent::Ping {} => {}
}
}
Ok(())
}
struct CountTokensWithLanguageModelRateLimit;
impl RateLimit for CountTokensWithLanguageModelRateLimit {
@@ -4249,7 +4065,7 @@ async fn update_user_contacts(user_id: UserId, session: &Session) -> Result<()>
Ok(())
}
async fn leave_room_for_session(session: &UserSession, connection_id: ConnectionId) -> Result<()> {
async fn leave_room_for_session(session: &UserSession) -> Result<()> {
let mut contacts_to_update = HashSet::default();
let room_id;
@@ -4259,7 +4075,7 @@ async fn leave_room_for_session(session: &UserSession, connection_id: Connection
let room;
let channel;
if let Some(mut left_room) = session.db().await.leave_room(connection_id).await? {
if let Some(mut left_room) = session.db().await.leave_room(session.connection_id).await? {
contacts_to_update.insert(session.user_id());
for project in left_room.left_projects.values() {

View File

@@ -2,10 +2,9 @@ use crate::db::{ChannelId, ChannelRole, UserId};
use anyhow::{anyhow, Result};
use collections::{BTreeMap, HashMap, HashSet};
use rpc::ConnectionId;
use semantic_version::SemanticVersion;
use serde::Serialize;
use std::fmt;
use tracing::instrument;
use util::{semver, SemanticVersion};
#[derive(Default, Serialize)]
pub struct ConnectionPool {
@@ -21,6 +20,7 @@ struct ConnectedUser {
#[derive(Debug, Serialize)]
pub struct ZedVersion(pub SemanticVersion);
use std::fmt;
impl fmt::Display for ZedVersion {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
@@ -30,7 +30,7 @@ impl fmt::Display for ZedVersion {
impl ZedVersion {
pub fn can_collaborate(&self) -> bool {
self.0 >= SemanticVersion::new(0, 127, 3)
self.0 >= semver(0, 127, 3)
}
}

View File

@@ -222,18 +222,8 @@ async fn test_remove_channel_message(
.update(cx_a, |c, cx| c.send_message("one".into(), cx).unwrap())
.await
.unwrap();
let msg_id_2 = channel_chat_a
.update(cx_a, |c, cx| {
c.send_message(
MessageParams {
text: "two @user_b".to_string(),
mentions: vec![(4..12, client_b.id())],
reply_to_message_id: None,
},
cx,
)
.unwrap()
})
channel_chat_a
.update(cx_a, |c, cx| c.send_message("two".into(), cx).unwrap())
.await
.unwrap();
channel_chat_a
@@ -243,24 +233,10 @@ async fn test_remove_channel_message(
// Clients A and B see all of the messages.
executor.run_until_parked();
let expected_messages = &["one", "two @user_b", "three"];
let expected_messages = &["one", "two", "three"];
assert_messages(&channel_chat_a, expected_messages, cx_a);
assert_messages(&channel_chat_b, expected_messages, cx_b);
// Ensure that client B received a notification for the mention.
client_b.notification_store().read_with(cx_b, |store, _| {
assert_eq!(store.notification_count(), 2);
let entry = store.notification_at(0).unwrap();
assert_eq!(
entry.notification,
Notification::ChannelMessageMention {
message_id: msg_id_2,
sender_id: client_a.id(),
channel_id: channel_id.0,
}
);
});
// Client A deletes one of their messages.
channel_chat_a
.update(cx_a, |c, cx| {
@@ -285,13 +261,6 @@ async fn test_remove_channel_message(
.await
.unwrap();
assert_messages(&channel_chat_c, expected_messages, cx_c);
// Ensure we remove the notifications when the message is removed
client_b.notification_store().read_with(cx_b, |store, _| {
// First notification is the channel invitation, second would be the mention
// notification, which should now be removed.
assert_eq!(store.notification_count(), 1);
});
}
#[track_caller]
@@ -629,97 +598,4 @@ async fn test_chat_editing(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext)
}
);
});
// Test update message and keep the mention and check that the body is updated correctly
channel_chat_a
.update(cx_a, |c, cx| {
c.update_message(
msg_id,
MessageParams {
text: "Updated body v2 including a mention for @user_b".into(),
reply_to_message_id: None,
mentions: vec![(37..45, client_b.id())],
},
cx,
)
.unwrap()
})
.await
.unwrap();
cx_a.run_until_parked();
cx_b.run_until_parked();
channel_chat_a.update(cx_a, |channel_chat, _| {
assert_eq!(
channel_chat.find_loaded_message(msg_id).unwrap().body,
"Updated body v2 including a mention for @user_b",
)
});
channel_chat_b.update(cx_b, |channel_chat, _| {
assert_eq!(
channel_chat.find_loaded_message(msg_id).unwrap().body,
"Updated body v2 including a mention for @user_b",
)
});
client_b.notification_store().read_with(cx_b, |store, _| {
let message = store.channel_message_for_id(msg_id);
assert!(message.is_some());
assert_eq!(
message.unwrap().body,
"Updated body v2 including a mention for @user_b"
);
assert_eq!(store.notification_count(), 2);
let entry = store.notification_at(0).unwrap();
assert_eq!(
entry.notification,
Notification::ChannelMessageMention {
message_id: msg_id,
sender_id: client_a.id(),
channel_id: channel_id.0,
}
);
});
// If we remove a mention from a message the corresponding mention notification
// should also be removed.
channel_chat_a
.update(cx_a, |c, cx| {
c.update_message(
msg_id,
MessageParams {
text: "Updated body without a mention".into(),
reply_to_message_id: None,
mentions: vec![],
},
cx,
)
.unwrap()
})
.await
.unwrap();
cx_a.run_until_parked();
cx_b.run_until_parked();
channel_chat_a.update(cx_a, |channel_chat, _| {
assert_eq!(
channel_chat.find_loaded_message(msg_id).unwrap().body,
"Updated body without a mention",
)
});
channel_chat_b.update(cx_b, |channel_chat, _| {
assert_eq!(
channel_chat.find_loaded_message(msg_id).unwrap().body,
"Updated body without a mention",
)
});
client_b.notification_store().read_with(cx_b, |store, _| {
// First notification is the channel invitation, second would be the mention
// notification, which should now be removed.
assert_eq!(store.notification_count(), 1);
});
}

View File

@@ -23,7 +23,6 @@ use rpc::RECEIVE_TIMEOUT;
use serde_json::json;
use settings::SettingsStore;
use std::{
ops::Range,
path::Path,
sync::{
atomic::{self, AtomicBool, AtomicUsize},
@@ -1987,187 +1986,6 @@ struct Row10;"#};
struct Row1220;"#});
}
#[gpui::test(iterations = 10)]
async fn test_git_blame_is_forwarded(cx_a: &mut TestAppContext, cx_b: &mut TestAppContext) {
let mut server = TestServer::start(cx_a.executor()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
server
.create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
.await;
let active_call_a = cx_a.read(ActiveCall::global);
cx_a.update(editor::init);
cx_b.update(editor::init);
client_a
.fs()
.insert_tree(
"/my-repo",
json!({
".git": {},
"file.txt": "line1\nline2\nline3\nline\n",
}),
)
.await;
let blame = git::blame::Blame {
entries: vec![
blame_entry("1b1b1b", 0..1),
blame_entry("0d0d0d", 1..2),
blame_entry("3a3a3a", 2..3),
blame_entry("4c4c4c", 3..4),
],
permalinks: [
("1b1b1b", "http://example.com/codehost/idx-0"),
("0d0d0d", "http://example.com/codehost/idx-1"),
("3a3a3a", "http://example.com/codehost/idx-2"),
("4c4c4c", "http://example.com/codehost/idx-3"),
]
.into_iter()
.map(|(sha, url)| (sha.parse().unwrap(), url.parse().unwrap()))
.collect(),
messages: [
("1b1b1b", "message for idx-0"),
("0d0d0d", "message for idx-1"),
("3a3a3a", "message for idx-2"),
("4c4c4c", "message for idx-3"),
]
.into_iter()
.map(|(sha, message)| (sha.parse().unwrap(), message.into()))
.collect(),
};
client_a.fs().set_blame_for_repo(
Path::new("/my-repo/.git"),
vec![(Path::new("file.txt"), blame)],
);
let (project_a, worktree_id) = client_a.build_local_project("/my-repo", cx_a).await;
let project_id = active_call_a
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
// Create editor_a
let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a);
let editor_a = workspace_a
.update(cx_a, |workspace, cx| {
workspace.open_path((worktree_id, "file.txt"), None, true, cx)
})
.await
.unwrap()
.downcast::<Editor>()
.unwrap();
// Join the project as client B.
let project_b = client_b.build_remote_project(project_id, cx_b).await;
let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
let editor_b = workspace_b
.update(cx_b, |workspace, cx| {
workspace.open_path((worktree_id, "file.txt"), None, true, cx)
})
.await
.unwrap()
.downcast::<Editor>()
.unwrap();
// client_b now requests git blame for the open buffer
editor_b.update(cx_b, |editor_b, cx| {
assert!(editor_b.blame().is_none());
editor_b.toggle_git_blame(&editor::actions::ToggleGitBlame {}, cx);
});
cx_a.executor().run_until_parked();
cx_b.executor().run_until_parked();
editor_b.update(cx_b, |editor_b, cx| {
let blame = editor_b.blame().expect("editor_b should have blame now");
let entries = blame.update(cx, |blame, cx| {
blame
.blame_for_rows((0..4).map(Some), cx)
.collect::<Vec<_>>()
});
assert_eq!(
entries,
vec![
Some(blame_entry("1b1b1b", 0..1)),
Some(blame_entry("0d0d0d", 1..2)),
Some(blame_entry("3a3a3a", 2..3)),
Some(blame_entry("4c4c4c", 3..4)),
]
);
blame.update(cx, |blame, _| {
for (idx, entry) in entries.iter().flatten().enumerate() {
assert_eq!(
blame.permalink_for_entry(entry).unwrap().to_string(),
format!("http://example.com/codehost/idx-{}", idx)
);
assert_eq!(
blame.message_for_entry(entry).unwrap(),
format!("message for idx-{}", idx)
);
}
});
});
// editor_b updates the file, which gets sent to client_a, which updates git blame,
// which gets back to client_b.
editor_b.update(cx_b, |editor_b, cx| {
editor_b.edit([(Point::new(0, 3)..Point::new(0, 3), "FOO")], cx);
});
cx_a.executor().run_until_parked();
cx_b.executor().run_until_parked();
editor_b.update(cx_b, |editor_b, cx| {
let blame = editor_b.blame().expect("editor_b should have blame now");
let entries = blame.update(cx, |blame, cx| {
blame
.blame_for_rows((0..4).map(Some), cx)
.collect::<Vec<_>>()
});
assert_eq!(
entries,
vec![
None,
Some(blame_entry("0d0d0d", 1..2)),
Some(blame_entry("3a3a3a", 2..3)),
Some(blame_entry("4c4c4c", 3..4)),
]
);
});
// Now editor_a also updates the file
editor_a.update(cx_a, |editor_a, cx| {
editor_a.edit([(Point::new(1, 3)..Point::new(1, 3), "FOO")], cx);
});
cx_a.executor().run_until_parked();
cx_b.executor().run_until_parked();
editor_b.update(cx_b, |editor_b, cx| {
let blame = editor_b.blame().expect("editor_b should have blame now");
let entries = blame.update(cx, |blame, cx| {
blame
.blame_for_rows((0..4).map(Some), cx)
.collect::<Vec<_>>()
});
assert_eq!(
entries,
vec![
None,
None,
Some(blame_entry("3a3a3a", 2..3)),
Some(blame_entry("4c4c4c", 3..4)),
]
);
});
}
fn extract_hint_labels(editor: &Editor) -> Vec<String> {
let mut labels = Vec::new();
for hint in editor.inlay_hint_cache().hints() {
@@ -2178,11 +1996,3 @@ fn extract_hint_labels(editor: &Editor) -> Vec<String> {
}
labels
}
fn blame_entry(sha: &str, range: Range<u32>) -> git::blame::BlameEntry {
git::blame::BlameEntry {
sha: sha.parse().unwrap(),
range,
..Default::default()
}
}

View File

@@ -2007,7 +2007,7 @@ async fn test_following_to_channel_notes_without_a_shared_project(
});
}
pub(crate) async fn join_channel(
async fn join_channel(
channel_id: ChannelId,
client: &TestClient,
cx: &mut TestAppContext,

View File

@@ -1,9 +1,6 @@
use crate::{
rpc::{CLEANUP_TIMEOUT, RECONNECT_TIMEOUT},
tests::{
channel_id, following_tests::join_channel, room_participants, rust_lang, RoomParticipants,
TestClient, TestServer,
},
tests::{channel_id, room_participants, rust_lang, RoomParticipants, TestClient, TestServer},
};
use call::{room, ActiveCall, ParticipantLocation, Room};
use client::{User, RECEIVE_TIMEOUT};
@@ -1866,24 +1863,6 @@ async fn test_active_call_events(
executor.run_until_parked();
assert_eq!(mem::take(&mut *events_a.borrow_mut()), vec![]);
assert_eq!(mem::take(&mut *events_b.borrow_mut()), vec![]);
// Unsharing a project should dispatch the RemoteProjectUnshared event.
active_call_a
.update(cx_a, |call, cx| call.hang_up(cx))
.await
.unwrap();
executor.run_until_parked();
assert_eq!(
mem::take(&mut *events_a.borrow_mut()),
vec![room::Event::RoomLeft { channel_id: None }]
);
assert_eq!(
mem::take(&mut *events_b.borrow_mut()),
vec![room::Event::RemoteProjectUnshared {
project_id: project_a_id,
}]
);
}
fn active_call_events(cx: &mut TestAppContext) -> Rc<RefCell<Vec<room::Event>>> {
@@ -4659,16 +4638,9 @@ async fn test_references(
let active_call_a = cx_a.read(ActiveCall::global);
client_a.language_registry().add(rust_lang());
let mut fake_language_servers = client_a.language_registry().register_fake_lsp_adapter(
"Rust",
FakeLspAdapter {
capabilities: lsp::ServerCapabilities {
references_provider: Some(lsp::OneOf::Left(true)),
..Default::default()
},
..Default::default()
},
);
let mut fake_language_servers = client_a
.language_registry()
.register_fake_lsp_adapter("Rust", Default::default());
client_a
.fs()
@@ -4959,35 +4931,9 @@ async fn test_lsp_hover(
.await;
client_a.language_registry().add(rust_lang());
let language_server_names = ["rust-analyzer", "CrabLang-ls"];
let mut fake_language_servers = client_a
.language_registry()
.register_specific_fake_lsp_adapter(
"Rust",
true,
FakeLspAdapter {
name: "rust-analyzer",
capabilities: lsp::ServerCapabilities {
hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
..lsp::ServerCapabilities::default()
},
..FakeLspAdapter::default()
},
);
let _other_server = client_a
.language_registry()
.register_specific_fake_lsp_adapter(
"Rust",
false,
FakeLspAdapter {
name: "CrabLang-ls",
capabilities: lsp::ServerCapabilities {
hover_provider: Some(lsp::HoverProviderCapability::Simple(true)),
..lsp::ServerCapabilities::default()
},
..FakeLspAdapter::default()
},
);
.register_fake_lsp_adapter("Rust", Default::default());
let (project_a, worktree_id) = client_a.build_local_project("/root-1", cx_a).await;
let project_id = active_call_a
@@ -5000,133 +4946,62 @@ async fn test_lsp_hover(
let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx));
let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap();
let mut servers_with_hover_requests = HashMap::default();
for i in 0..language_server_names.len() {
let new_server = fake_language_servers.next().await.unwrap_or_else(|| {
panic!(
"Failed to get language server #{i} with name {}",
&language_server_names[i]
)
});
let new_server_name = new_server.server.name();
assert!(
!servers_with_hover_requests.contains_key(new_server_name),
"Unexpected: initialized server with the same name twice. Name: `{new_server_name}`"
);
let new_server_name = new_server_name.to_string();
match new_server_name.as_str() {
"CrabLang-ls" => {
servers_with_hover_requests.insert(
new_server_name.clone(),
new_server.handle_request::<lsp::request::HoverRequest, _, _>(
move |params, _| {
assert_eq!(
params
.text_document_position_params
.text_document
.uri
.as_str(),
"file:///root-1/main.rs"
);
let name = new_server_name.clone();
async move {
Ok(Some(lsp::Hover {
contents: lsp::HoverContents::Scalar(
lsp::MarkedString::String(format!("{name} hover")),
),
range: None,
}))
}
},
),
);
}
"rust-analyzer" => {
servers_with_hover_requests.insert(
new_server_name.clone(),
new_server.handle_request::<lsp::request::HoverRequest, _, _>(
|params, _| async move {
assert_eq!(
params
.text_document_position_params
.text_document
.uri
.as_str(),
"file:///root-1/main.rs"
);
assert_eq!(
params.text_document_position_params.position,
lsp::Position::new(0, 22)
);
Ok(Some(lsp::Hover {
contents: lsp::HoverContents::Array(vec![
lsp::MarkedString::String("Test hover content.".to_string()),
lsp::MarkedString::LanguageString(lsp::LanguageString {
language: "Rust".to_string(),
value: "let foo = 42;".to_string(),
}),
]),
range: Some(lsp::Range::new(
lsp::Position::new(0, 22),
lsp::Position::new(0, 29),
)),
}))
},
),
);
}
unexpected => panic!("Unexpected server name: {unexpected}"),
}
}
// Request hover information as the guest.
let mut hovers = project_b
.update(cx_b, |p, cx| p.hover(&buffer_b, 22, cx))
.await;
assert_eq!(
hovers.len(),
2,
"Expected two hovers from both language servers, but got: {hovers:?}"
);
let _: Vec<()> = futures::future::join_all(servers_with_hover_requests.into_values().map(
|mut hover_request| async move {
hover_request
.next()
.await
.expect("All hover requests should have been triggered")
let fake_language_server = fake_language_servers.next().await.unwrap();
fake_language_server.handle_request::<lsp::request::HoverRequest, _, _>(
|params, _| async move {
assert_eq!(
params
.text_document_position_params
.text_document
.uri
.as_str(),
"file:///root-1/main.rs"
);
assert_eq!(
params.text_document_position_params.position,
lsp::Position::new(0, 22)
);
Ok(Some(lsp::Hover {
contents: lsp::HoverContents::Array(vec![
lsp::MarkedString::String("Test hover content.".to_string()),
lsp::MarkedString::LanguageString(lsp::LanguageString {
language: "Rust".to_string(),
value: "let foo = 42;".to_string(),
}),
]),
range: Some(lsp::Range::new(
lsp::Position::new(0, 22),
lsp::Position::new(0, 29),
)),
}))
},
))
.await;
);
let hover_info = project_b
.update(cx_b, |p, cx| p.hover(&buffer_b, 22, cx))
.await
.unwrap()
.unwrap();
hovers.sort_by_key(|hover| hover.contents.len());
let first_hover = hovers.first().cloned().unwrap();
assert_eq!(
first_hover.contents,
vec![project::HoverBlock {
text: "CrabLang-ls hover".to_string(),
kind: HoverBlockKind::Markdown,
},]
);
let second_hover = hovers.last().cloned().unwrap();
assert_eq!(
second_hover.contents,
vec![
project::HoverBlock {
text: "Test hover content.".to_string(),
kind: HoverBlockKind::Markdown,
},
project::HoverBlock {
text: "let foo = 42;".to_string(),
kind: HoverBlockKind::Code {
language: "Rust".to_string()
},
}
]
);
buffer_b.read_with(cx_b, |buffer, _| {
let snapshot = buffer.snapshot();
assert_eq!(second_hover.range.unwrap().to_offset(&snapshot), 22..29);
assert_eq!(hover_info.range.unwrap().to_offset(&snapshot), 22..29);
assert_eq!(
hover_info.contents,
vec![
project::HoverBlock {
text: "Test hover content.".to_string(),
kind: HoverBlockKind::Markdown,
},
project::HoverBlock {
text: "let foo = 42;".to_string(),
kind: HoverBlockKind::Code {
language: "Rust".to_string()
},
}
]
);
});
}
@@ -6035,7 +5910,7 @@ async fn test_right_click_menu_behind_collab_panel(cx: &mut TestAppContext) {
#[gpui::test]
async fn test_cmd_k_left(cx: &mut TestAppContext) {
let (_, client) = TestServer::start1(cx).await;
let client = TestServer::start1(cx).await;
let (workspace, cx) = client.build_test_workspace(cx).await;
cx.simulate_keystrokes("cmd-n");
@@ -6055,16 +5930,3 @@ async fn test_cmd_k_left(cx: &mut TestAppContext) {
assert!(workspace.items(cx).collect::<Vec<_>>().len() == 2);
});
}
#[gpui::test]
async fn test_join_after_restart(cx1: &mut TestAppContext, cx2: &mut TestAppContext) {
let (mut server, client) = TestServer::start1(cx1).await;
let channel1 = server.make_public_channel("channel1", &client, cx1).await;
let channel2 = server.make_public_channel("channel2", &client, cx1).await;
join_channel(channel1, &client, cx1).await.unwrap();
drop(client);
let client2 = server.create_client(cx2, "user_a").await;
join_channel(channel2, &client2, cx2).await.unwrap();
}

View File

@@ -832,7 +832,7 @@ impl RandomizedTest for ProjectCollaborationTest {
.boxed(),
LspRequestKind::CodeAction => project
.code_actions(&buffer, offset..offset, cx)
.map(|_| Ok(()))
.map_ok(|_| ())
.boxed(),
LspRequestKind::Definition => project
.definition(&buffer, offset, cx)

View File

@@ -19,6 +19,7 @@ use futures::{channel::oneshot, StreamExt as _};
use gpui::{BackgroundExecutor, Context, Model, Task, TestAppContext, View, VisualTestContext};
use language::LanguageRegistry;
use node_runtime::FakeNodeRuntime;
use notifications::NotificationStore;
use parking_lot::Mutex;
use project::{Project, WorktreeId};
@@ -26,7 +27,6 @@ use rpc::{
proto::{self, ChannelRole},
RECEIVE_TIMEOUT,
};
use semantic_version::SemanticVersion;
use serde_json::json;
use settings::SettingsStore;
use std::{
@@ -39,7 +39,7 @@ use std::{
Arc,
},
};
use util::http::FakeHttpClient;
use util::{http::FakeHttpClient, SemanticVersion};
use workspace::{Workspace, WorkspaceId, WorkspaceStore};
pub struct TestServer {
@@ -135,10 +135,9 @@ impl TestServer {
(server, client_a, client_b, channel_id)
}
pub async fn start1(cx: &mut TestAppContext) -> (TestServer, TestClient) {
pub async fn start1(cx: &mut TestAppContext) -> TestClient {
let mut server = Self::start(cx.executor().clone()).await;
let client = server.create_client(cx, "user_a").await;
(server, client)
server.create_client(cx, "user_a").await
}
pub async fn reset(&self) {
@@ -513,7 +512,6 @@ impl TestServer {
blob_store_bucket: None,
openai_api_key: None,
google_ai_api_key: None,
anthropic_api_key: None,
clickhouse_url: None,
clickhouse_user: None,
clickhouse_password: None,

View File

@@ -156,7 +156,7 @@ impl ChatPanel {
}
}
}
room::Event::RoomLeft { channel_id } => {
room::Event::Left { channel_id } => {
if channel_id == &this.channel_id(cx) {
cx.emit(PanelEvent::Close)
}
@@ -615,8 +615,6 @@ impl ChatPanel {
.child(
IconButton::new(("reply", message_id), IconName::ReplyArrowRight)
.on_click(cx.listener(move |this, _, cx| {
this.cancel_edit_message(cx);
this.message_editor.update(cx, |editor, cx| {
editor.set_reply_to_message_id(message_id);
editor.focus_handle(cx).focus(cx);
@@ -638,8 +636,6 @@ impl ChatPanel {
IconButton::new(("edit", message_id), IconName::Pencil)
.on_click(cx.listener(move |this, _, cx| {
this.message_editor.update(cx, |editor, cx| {
editor.clear_reply_to_message_id();
let message = this
.active_chat()
.and_then(|active_chat| {

View File

@@ -9,12 +9,12 @@ use gpui::{
Render, SharedString, Task, TextStyle, View, ViewContext, WeakView, WhiteSpace,
};
use language::{
language_settings::SoftWrap, Anchor, Buffer, BufferSnapshot, CodeLabel, LanguageRegistry,
LanguageServerId, ToOffset,
language_settings::SoftWrap, Anchor, Buffer, BufferSnapshot, CodeLabel, Completion,
LanguageRegistry, LanguageServerId, ToOffset,
};
use lazy_static::lazy_static;
use parking_lot::RwLock;
use project::{search::SearchQuery, Completion};
use project::search::SearchQuery;
use settings::Settings;
use std::{ops::Range, sync::Arc, time::Duration};
use theme::ThemeSettings;
@@ -48,7 +48,7 @@ impl CompletionProvider for MessageEditorCompletionProvider {
buffer: &Model<Buffer>,
buffer_position: language::Anchor,
cx: &mut ViewContext<Editor>,
) -> Task<anyhow::Result<Vec<Completion>>> {
) -> Task<anyhow::Result<Vec<language::Completion>>> {
let Some(handle) = self.0.upgrade() else {
return Task::ready(Ok(Vec::new()));
};
@@ -60,7 +60,7 @@ impl CompletionProvider for MessageEditorCompletionProvider {
fn resolve_completions(
&self,
_completion_indices: Vec<usize>,
_completions: Arc<RwLock<Box<[Completion]>>>,
_completions: Arc<RwLock<Box<[language::Completion]>>>,
_cx: &mut ViewContext<Editor>,
) -> Task<anyhow::Result<bool>> {
Task::ready(Ok(false))

View File

@@ -14,12 +14,12 @@ use db::kvp::KEY_VALUE_STORE;
use editor::{Editor, EditorElement, EditorStyle};
use fuzzy::{match_strings, StringMatchCandidate};
use gpui::{
actions, anchored, canvas, deferred, div, fill, list, point, prelude::*, px, AnyElement,
AppContext, AsyncWindowContext, Bounds, ClickEvent, ClipboardItem, DismissEvent, Div,
EventEmitter, FocusHandle, FocusableView, FontStyle, FontWeight, InteractiveElement,
IntoElement, ListOffset, ListState, Model, MouseDownEvent, ParentElement, Pixels, Point,
PromptLevel, Render, SharedString, Styled, Subscription, Task, TextStyle, View, ViewContext,
VisualContext, WeakView, WhiteSpace,
actions, canvas, div, fill, list, overlay, point, prelude::*, px, AnyElement, AppContext,
AsyncWindowContext, Bounds, ClickEvent, ClipboardItem, DismissEvent, Div, EventEmitter,
FocusHandle, FocusableView, FontStyle, FontWeight, InteractiveElement, IntoElement, ListOffset,
ListState, Model, MouseDownEvent, ParentElement, Pixels, Point, PromptLevel, Render,
SharedString, Styled, Subscription, Task, TextStyle, View, ViewContext, VisualContext,
WeakView, WhiteSpace,
};
use menu::{Cancel, Confirm, SecondaryConfirm, SelectNext, SelectPrev};
use project::{Fs, Project};
@@ -2767,13 +2767,10 @@ impl Render for CollabPanel {
self.render_signed_in(cx)
})
.children(self.context_menu.as_ref().map(|(menu, position, _)| {
deferred(
anchored()
.position(*position)
.anchor(gpui::AnchorCorner::TopLeft)
.child(menu.clone()),
)
.with_priority(1)
overlay()
.position(*position)
.anchor(gpui::AnchorCorner::TopLeft)
.child(menu.clone())
}))
}
}

View File

@@ -5,9 +5,9 @@ use client::{
};
use fuzzy::{match_strings, StringMatchCandidate};
use gpui::{
actions, anchored, deferred, div, AppContext, ClipboardItem, DismissEvent, EventEmitter,
FocusableView, Model, ParentElement, Render, Styled, Subscription, Task, View, ViewContext,
VisualContext, WeakView,
actions, div, overlay, AppContext, ClipboardItem, DismissEvent, EventEmitter, FocusableView,
Model, ParentElement, Render, Styled, Subscription, Task, View, ViewContext, VisualContext,
WeakView,
};
use picker::{Picker, PickerDelegate};
use std::sync::Arc;
@@ -409,12 +409,9 @@ impl PickerDelegate for ChannelModalDelegate {
.children(
if let (Some((menu, _)), true) = (&self.context_menu, selected) {
Some(
deferred(
anchored()
.anchor(gpui::AnchorCorner::TopRight)
.child(menu.clone()),
)
.with_priority(1),
overlay()
.anchor(gpui::AnchorCorner::TopRight)
.child(menu.clone()),
)
} else {
None

View File

@@ -13,8 +13,8 @@ use call::{report_call_event_for_room, ActiveCall};
pub use collab_panel::CollabPanel;
pub use collab_titlebar_item::CollabTitlebarItem;
use gpui::{
actions, point, AppContext, DevicePixels, Pixels, PlatformDisplay, Size, Task,
WindowBackgroundAppearance, WindowContext, WindowKind, WindowOptions,
actions, point, AppContext, DevicePixels, Pixels, PlatformDisplay, Size, Task, WindowContext,
WindowKind, WindowOptions,
};
use panel_settings::MessageEditorSettings;
pub use panel_settings::{
@@ -121,6 +121,5 @@ fn notification_window_options(
is_movable: false,
display_id: Some(screen.id()),
fullscreen: false,
window_background: WindowBackgroundAppearance::default(),
}
}

View File

@@ -58,7 +58,7 @@ pub fn init(app_state: &Arc<AppState>, cx: &mut AppContext) {
}
}
room::Event::RoomLeft { .. } => {
room::Event::Left { .. } => {
for (_, windows) in notification_windows.drain() {
for window in windows {
window

View File

@@ -2,7 +2,7 @@ use anyhow;
use gpui::Pixels;
use schemars::JsonSchema;
use serde_derive::{Deserialize, Serialize};
use settings::{Settings, SettingsSources};
use settings::Settings;
use workspace::dock::DockPosition;
#[derive(Deserialize, Debug)]
@@ -53,52 +53,48 @@ pub struct MessageEditorSettings {
impl Settings for CollaborationPanelSettings {
const KEY: Option<&'static str> = Some("collaboration_panel");
type FileContent = PanelSettingsContent;
fn load(
sources: SettingsSources<Self::FileContent>,
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &mut gpui::AppContext,
) -> anyhow::Result<Self> {
sources.json_merge()
Self::load_via_json_merge(default_value, user_values)
}
}
impl Settings for ChatPanelSettings {
const KEY: Option<&'static str> = Some("chat_panel");
type FileContent = PanelSettingsContent;
fn load(
sources: SettingsSources<Self::FileContent>,
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &mut gpui::AppContext,
) -> anyhow::Result<Self> {
sources.json_merge()
Self::load_via_json_merge(default_value, user_values)
}
}
impl Settings for NotificationPanelSettings {
const KEY: Option<&'static str> = Some("notification_panel");
type FileContent = PanelSettingsContent;
fn load(
sources: SettingsSources<Self::FileContent>,
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &mut gpui::AppContext,
) -> anyhow::Result<Self> {
sources.json_merge()
Self::load_via_json_merge(default_value, user_values)
}
}
impl Settings for MessageEditorSettings {
const KEY: Option<&'static str> = Some("message_editor");
type FileContent = MessageEditorSettings;
fn load(
sources: SettingsSources<Self::FileContent>,
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &mut gpui::AppContext,
) -> anyhow::Result<Self> {
sources.json_merge()
Self::load_via_json_merge(default_value, user_values)
}
}

View File

@@ -10,6 +10,5 @@ pub type HashMap<K, V> = std::collections::HashMap<K, V>;
#[cfg(not(feature = "test-support"))]
pub type HashSet<T> = std::collections::HashSet<T>;
pub use rustc_hash::FxHasher;
pub use rustc_hash::{FxHashMap, FxHashSet};
pub use std::collections::*;

View File

@@ -376,7 +376,6 @@ impl Copilot {
use node_runtime::FakeNodeRuntime;
let (server, fake_server) = FakeLanguageServer::new(
LanguageServerId(0),
LanguageServerBinary {
path: "path/to/copilot".into(),
arguments: vec![],
@@ -798,7 +797,7 @@ impl Copilot {
) -> Task<Result<()>> {
let server = match self.server.as_authenticated() {
Ok(server) => server,
Err(_) => return Task::ready(Ok(())),
Err(error) => return Task::ready(Err(error)),
};
let request =
server

View File

@@ -32,6 +32,7 @@ use std::{
mem,
ops::Range,
path::PathBuf,
sync::Arc,
};
use theme::ActiveTheme;
pub use toolbar_controls::ToolbarControls;
@@ -804,7 +805,7 @@ impl Item for ProjectDiagnosticsEditor {
fn diagnostic_header_renderer(diagnostic: Diagnostic) -> RenderBlock {
let (message, code_ranges) = highlight_diagnostic_message(&diagnostic);
let message: SharedString = message;
Box::new(move |cx| {
Arc::new(move |cx| {
let highlight_style: HighlightStyle = cx.theme().colors().text_accent.into();
h_flex()
.id("diagnostic header")

View File

@@ -1,8 +1,5 @@
use anyhow::Result;
use gpui::AppContext;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsSources};
#[derive(Deserialize, Debug)]
pub struct ProjectDiagnosticsSettings {
@@ -18,11 +15,18 @@ pub struct ProjectDiagnosticsSettingsContent {
include_warnings: Option<bool>,
}
impl Settings for ProjectDiagnosticsSettings {
impl settings::Settings for ProjectDiagnosticsSettings {
const KEY: Option<&'static str> = Some("diagnostics");
type FileContent = ProjectDiagnosticsSettingsContent;
fn load(sources: SettingsSources<Self::FileContent>, _: &mut AppContext) -> Result<Self> {
sources.json_merge()
fn load(
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_cx: &mut gpui::AppContext,
) -> anyhow::Result<Self>
where
Self: Sized,
{
Self::load_via_json_merge(default_value, user_values)
}
}

View File

@@ -61,8 +61,6 @@ smol.workspace = true
snippet.workspace = true
sum_tree.workspace = true
text.workspace = true
time.workspace = true
time_format.workspace = true
theme.workspace = true
tree-sitter-html = { workspace = true, optional = true }
tree-sitter-rust = { workspace = true, optional = true }

View File

@@ -94,6 +94,12 @@ pub struct SelectDownByLines {
pub(super) lines: u32,
}
#[derive(PartialEq, Clone, Deserialize, Default)]
pub struct DuplicateLine {
#[serde(default)]
pub move_upwards: bool,
}
impl_actions!(
editor,
[
@@ -113,6 +119,7 @@ impl_actions!(
MoveDownByLines,
SelectUpByLines,
SelectDownByLines,
DuplicateLine
]
);
@@ -153,8 +160,6 @@ gpui::actions!(
DeleteToPreviousSubwordStart,
DeleteToPreviousWordStart,
DisplayCursorNames,
DuplicateLineUp,
DuplicateLineDown,
ExpandMacroRecursively,
FindAllReferences,
Fold,
@@ -244,7 +249,6 @@ gpui::actions!(
SplitSelectionIntoLines,
Tab,
TabPrev,
ToggleGitBlame,
ToggleInlayHints,
ToggleLineNumbers,
ToggleSoftWrap,

View File

@@ -26,7 +26,7 @@ mod wrap_map;
use crate::EditorStyle;
use crate::{hover_links::InlayHighlight, movement::TextLayoutDetails, InlayId};
pub use block_map::{BlockMap, BlockPoint};
use collections::{HashMap, HashSet};
use collections::{BTreeMap, HashMap, HashSet};
use fold_map::FoldMap;
use gpui::{Font, HighlightStyle, Hsla, LineLayout, Model, ModelContext, Pixels, UnderlineStyle};
use inlay_map::InlayMap;
@@ -63,7 +63,7 @@ pub trait ToDisplayPoint {
}
type TextHighlights = TreeMap<Option<TypeId>, Arc<(HighlightStyle, Vec<Range<Anchor>>)>>;
type InlayHighlights = TreeMap<TypeId, TreeMap<InlayId, (HighlightStyle, InlayHighlight)>>;
type InlayHighlights = BTreeMap<TypeId, HashMap<InlayId, (HighlightStyle, InlayHighlight)>>;
/// Decides how text in a [`MultiBuffer`] should be displayed in a buffer, handling inlay hints,
/// folding, hard tabs, soft wrapping, custom blocks (like diagnostics), and highlighting.
@@ -157,7 +157,7 @@ impl DisplayMap {
);
}
pub fn fold<T: ToOffset<MultiBuffer>>(
pub fn fold<T: ToOffset>(
&mut self,
ranges: impl IntoIterator<Item = Range<T>>,
cx: &mut ModelContext<Self>,
@@ -180,7 +180,7 @@ impl DisplayMap {
self.block_map.read(snapshot, edits);
}
pub fn unfold<T: ToOffset<MultiBuffer>>(
pub fn unfold<T: ToOffset>(
&mut self,
ranges: impl IntoIterator<Item = Range<T>>,
inclusive: bool,
@@ -257,15 +257,10 @@ impl DisplayMap {
style: HighlightStyle,
) {
for highlight in highlights {
let update = self.inlay_highlights.update(&type_id, |highlights| {
highlights.insert(highlight.inlay, (style, highlight.clone()))
});
if update.is_none() {
self.inlay_highlights.insert(
type_id,
TreeMap::from_ordered_entries([(highlight.inlay, (style, highlight))]),
);
}
self.inlay_highlights
.entry(type_id)
.or_default()
.insert(highlight.inlay, (style, highlight));
}
}
@@ -359,7 +354,6 @@ pub struct HighlightedChunk<'a> {
pub is_tab: bool,
}
#[derive(Clone)]
pub struct DisplaySnapshot {
pub buffer_snapshot: MultiBufferSnapshot,
pub fold_snapshot: fold_map::FoldSnapshot,
@@ -433,10 +427,7 @@ impl DisplaySnapshot {
} else if range.start.row == self.max_buffer_row()
|| (range.end.column > 0 && range.end.row == self.max_buffer_row())
{
Point::new(
range.start.row - 1,
self.buffer_snapshot.line_len(range.start.row - 1),
)
Point::new(range.start.row - 1, self.line_len(range.start.row - 1))
} else {
self.prev_line_boundary(range.start).0
};
@@ -666,7 +657,7 @@ impl DisplaySnapshot {
layout_line.closest_index_for_x(x) as u32
}
pub fn display_chars_at(
pub fn chars_at(
&self,
mut point: DisplayPoint,
) -> impl Iterator<Item = (char, DisplayPoint)> + '_ {
@@ -693,27 +684,60 @@ impl DisplaySnapshot {
})
}
pub fn buffer_chars_at(
pub fn reverse_chars_at(
&self,
mut offset: Offset<MultiBuffer>,
) -> impl Iterator<Item = (char, usize)> + '_ {
self.buffer_snapshot.chars_at(offset).map(move |ch| {
let ret = (ch, offset);
offset += ch.len_utf8();
ret
})
mut point: DisplayPoint,
) -> impl Iterator<Item = (char, DisplayPoint)> + '_ {
point = DisplayPoint(self.block_snapshot.clip_point(point.0, Bias::Left));
self.reverse_text_chunks(point.row())
.flat_map(|chunk| chunk.chars().rev())
.skip_while({
let mut column = self.line_len(point.row());
if self.max_point().row() > point.row() {
column += 1;
}
move |char| {
let at_point = column <= point.column();
column = column.saturating_sub(char.len_utf8() as u32);
!at_point
}
})
.map(move |ch| {
if ch == '\n' {
*point.row_mut() -= 1;
*point.column_mut() = self.line_len(point.row());
} else {
*point.column_mut() = point.column().saturating_sub(ch.len_utf8() as u32);
}
(ch, point)
})
}
pub fn reverse_buffer_chars_at(
&self,
mut offset: Offset<MultiBuffer>,
) -> impl Iterator<Item = (char, usize)> + '_ {
self.buffer_snapshot
.reversed_chars_at(offset)
.map(move |ch| {
offset -= ch.len_utf8();
(ch, offset)
})
pub fn column_to_chars(&self, display_row: u32, target: u32) -> u32 {
let mut count = 0;
let mut column = 0;
for (c, _) in self.chars_at(DisplayPoint::new(display_row, 0)) {
if column >= target {
break;
}
count += 1;
column += c.len_utf8() as u32;
}
count
}
pub fn column_from_chars(&self, display_row: u32, char_count: u32) -> u32 {
let mut column = 0;
for (count, (c, _)) in self.chars_at(DisplayPoint::new(display_row, 0)).enumerate() {
if c == '\n' || count >= char_count as usize {
break;
}
column += c.len_utf8() as u32;
}
column
}
pub fn clip_point(&self, point: DisplayPoint, bias: Bias) -> DisplayPoint {
@@ -735,7 +759,7 @@ impl DisplaySnapshot {
pub fn folds_in_range<T>(&self, range: Range<T>) -> impl Iterator<Item = &Fold>
where
T: ToOffset<MultiBuffer>,
T: ToOffset,
{
self.fold_snapshot.folds_in_range(range)
}
@@ -747,7 +771,7 @@ impl DisplaySnapshot {
self.block_snapshot.blocks_in_range(rows)
}
pub fn intersects_fold<T: ToOffset<MultiBuffer>>(&self, offset: T) -> bool {
pub fn intersects_fold<T: ToOffset>(&self, offset: T) -> bool {
self.fold_snapshot.intersects_fold(offset)
}
@@ -784,6 +808,20 @@ impl DisplaySnapshot {
result
}
pub fn line_indent(&self, display_row: u32) -> (u32, bool) {
let mut indent = 0;
let mut is_blank = true;
for (c, _) in self.chars_at(DisplayPoint::new(display_row, 0)) {
if c == ' ' {
indent += 1;
} else {
is_blank = c == '\n';
break;
}
}
(indent, is_blank)
}
pub fn line_indent_for_buffer_row(&self, buffer_row: u32) -> (u32, bool) {
let (buffer, range) = self
.buffer_snapshot
@@ -884,7 +922,7 @@ impl DisplaySnapshot {
#[cfg(any(test, feature = "test-support"))]
pub(crate) fn inlay_highlights<Tag: ?Sized + 'static>(
&self,
) -> Option<&TreeMap<InlayId, (HighlightStyle, InlayHighlight)>> {
) -> Option<&HashMap<InlayId, (HighlightStyle, InlayHighlight)>> {
let type_id = TypeId::of::<Tag>();
self.inlay_highlights.get(&type_id)
}
@@ -1105,7 +1143,7 @@ pub mod tests {
position,
height,
disposition,
render: Box::new(|_| div().into_any()),
render: Arc::new(|_| div().into_any()),
}
})
.collect::<Vec<_>>();

View File

@@ -37,7 +37,6 @@ pub struct BlockMap {
pub struct BlockMapWriter<'a>(&'a mut BlockMap);
#[derive(Clone)]
pub struct BlockSnapshot {
wrap_snapshot: WrapSnapshot,
transforms: SumTree<Transform>,
@@ -55,7 +54,7 @@ struct BlockRow(u32);
#[derive(Copy, Clone, Debug, Default, Eq, Ord, PartialOrd, PartialEq)]
struct WrapRow(u32);
pub type RenderBlock = Box<dyn Send + Fn(&mut BlockContext) -> AnyElement>;
pub type RenderBlock = Arc<dyn Fn(&mut BlockContext) -> AnyElement>;
pub struct Block {
id: BlockId,
@@ -66,11 +65,15 @@ pub struct Block {
disposition: BlockDisposition,
}
pub struct BlockProperties<P> {
#[derive(Clone)]
pub struct BlockProperties<P>
where
P: Clone,
{
pub position: P,
pub height: u8,
pub style: BlockStyle,
pub render: Box<dyn Send + Fn(&mut BlockContext) -> AnyElement>,
pub render: Arc<dyn Fn(&mut BlockContext) -> AnyElement>,
pub disposition: BlockDisposition,
}
@@ -1038,21 +1041,21 @@ mod tests {
position: buffer_snapshot.anchor_after(Point::new(1, 0)),
height: 1,
disposition: BlockDisposition::Above,
render: Box::new(|_| div().into_any()),
render: Arc::new(|_| div().into_any()),
},
BlockProperties {
style: BlockStyle::Fixed,
position: buffer_snapshot.anchor_after(Point::new(1, 2)),
height: 2,
disposition: BlockDisposition::Above,
render: Box::new(|_| div().into_any()),
render: Arc::new(|_| div().into_any()),
},
BlockProperties {
style: BlockStyle::Fixed,
position: buffer_snapshot.anchor_after(Point::new(3, 3)),
height: 3,
disposition: BlockDisposition::Below,
render: Box::new(|_| div().into_any()),
render: Arc::new(|_| div().into_any()),
},
]);
@@ -1206,14 +1209,14 @@ mod tests {
style: BlockStyle::Fixed,
position: buffer_snapshot.anchor_after(Point::new(1, 12)),
disposition: BlockDisposition::Above,
render: Box::new(|_| div().into_any()),
render: Arc::new(|_| div().into_any()),
height: 1,
},
BlockProperties {
style: BlockStyle::Fixed,
position: buffer_snapshot.anchor_after(Point::new(1, 1)),
disposition: BlockDisposition::Below,
render: Box::new(|_| div().into_any()),
render: Arc::new(|_| div().into_any()),
height: 1,
},
]);
@@ -1308,7 +1311,7 @@ mod tests {
position,
height,
disposition,
render: Box::new(|_| div().into_any()),
render: Arc::new(|_| div().into_any()),
}
})
.collect::<Vec<_>>();
@@ -1322,14 +1325,7 @@ mod tests {
wrap_map.sync(tab_snapshot, tab_edits, cx)
});
let mut block_map = block_map.write(wraps_snapshot, wrap_edits);
let block_ids =
block_map.insert(block_properties.iter().map(|props| BlockProperties {
position: props.position,
height: props.height,
style: props.style,
render: Box::new(|_| div().into_any()),
disposition: props.disposition,
}));
let block_ids = block_map.insert(block_properties.clone());
for (block_id, props) in block_ids.into_iter().zip(block_properties) {
custom_blocks.push((block_id, props));
}

View File

@@ -4,7 +4,7 @@ use super::{
};
use gpui::{ElementId, HighlightStyle, Hsla};
use language::{Chunk, Edit, Point, TextSummary};
use multi_buffer::{Anchor, AnchorRangeExt, MultiBuffer, MultiBufferSnapshot, ToOffset};
use multi_buffer::{Anchor, AnchorRangeExt, MultiBufferSnapshot, ToOffset};
use std::{
any::TypeId,
cmp::{self, Ordering},
@@ -74,7 +74,7 @@ impl<'a> sum_tree::Dimension<'a, TransformSummary> for FoldPoint {
pub(crate) struct FoldMapWriter<'a>(&'a mut FoldMap);
impl<'a> FoldMapWriter<'a> {
pub(crate) fn fold<T: ToOffset<MultiBuffer>>(
pub(crate) fn fold<T: ToOffset>(
&mut self,
ranges: impl IntoIterator<Item = Range<T>>,
) -> (FoldSnapshot, Vec<FoldEdit>) {
@@ -129,7 +129,7 @@ impl<'a> FoldMapWriter<'a> {
(self.0.snapshot.clone(), edits)
}
pub(crate) fn unfold<T: ToOffset<MultiBuffer>>(
pub(crate) fn unfold<T: ToOffset>(
&mut self,
ranges: impl IntoIterator<Item = Range<T>>,
inclusive: bool,
@@ -609,7 +609,7 @@ impl FoldSnapshot {
pub fn folds_in_range<T>(&self, range: Range<T>) -> impl Iterator<Item = &Fold>
where
T: ToOffset<MultiBuffer>,
T: ToOffset,
{
let mut folds = intersecting_folds(&self.inlay_snapshot, &self.folds, range, false);
iter::from_fn(move || {
@@ -621,7 +621,7 @@ impl FoldSnapshot {
pub fn intersects_fold<T>(&self, offset: T) -> bool
where
T: ToOffset<MultiBuffer>,
T: ToOffset,
{
let buffer_offset = offset.to_offset(&self.inlay_snapshot.buffer);
let inlay_offset = self.inlay_snapshot.to_inlay_offset(buffer_offset);
@@ -741,7 +741,7 @@ fn intersecting_folds<'a, T>(
inclusive: bool,
) -> FilterCursor<'a, impl 'a + FnMut(&FoldSummary) -> bool, Fold, usize>
where
T: ToOffset<MultiBuffer>,
T: ToOffset,
{
let buffer = &inlay_snapshot.buffer;
let start = buffer.anchor_before(range.start.to_offset(buffer));

View File

@@ -1695,39 +1695,38 @@ mod tests {
while inlay_indices.len() < inlay_highlight_count {
inlay_indices.insert(rng.gen_range(0..inlays.len()));
}
let new_highlights = TreeMap::from_ordered_entries(
inlay_indices
.into_iter()
.filter_map(|i| {
let (_, inlay) = &inlays[i];
let inlay_text_len = inlay.text.len();
match inlay_text_len {
0 => None,
1 => Some(InlayHighlight {
let new_highlights = inlay_indices
.into_iter()
.filter_map(|i| {
let (_, inlay) = &inlays[i];
let inlay_text_len = inlay.text.len();
match inlay_text_len {
0 => None,
1 => Some(InlayHighlight {
inlay: inlay.id,
inlay_position: inlay.position,
range: 0..1,
}),
n => {
let inlay_text = inlay.text.to_string();
let mut highlight_end = rng.gen_range(1..n);
let mut highlight_start = rng.gen_range(0..highlight_end);
while !inlay_text.is_char_boundary(highlight_end) {
highlight_end += 1;
}
while !inlay_text.is_char_boundary(highlight_start) {
highlight_start -= 1;
}
Some(InlayHighlight {
inlay: inlay.id,
inlay_position: inlay.position,
range: 0..1,
}),
n => {
let inlay_text = inlay.text.to_string();
let mut highlight_end = rng.gen_range(1..n);
let mut highlight_start = rng.gen_range(0..highlight_end);
while !inlay_text.is_char_boundary(highlight_end) {
highlight_end += 1;
}
while !inlay_text.is_char_boundary(highlight_start) {
highlight_start -= 1;
}
Some(InlayHighlight {
inlay: inlay.id,
inlay_position: inlay.position,
range: highlight_start..highlight_end,
})
}
range: highlight_start..highlight_end,
})
}
})
.map(|highlight| (highlight.inlay, (HighlightStyle::default(), highlight))),
);
}
})
.map(|highlight| (highlight.inlay, (HighlightStyle::default(), highlight)))
.collect();
log::info!("highlighting inlay ranges {new_highlights:?}");
inlay_highlights.insert(TypeId::of::<()>(), new_highlights);
}

View File

@@ -38,7 +38,6 @@ mod editor_tests;
#[cfg(any(test, feature = "test-support"))]
pub mod test;
use ::git::diff::{DiffHunk, DiffHunkStatus};
use ::git::permalink::{build_permalink, BuildPermalinkParams};
pub(crate) use actions::*;
use aho_corasick::AhoCorasick;
use anyhow::{anyhow, Context as _, Result};
@@ -57,16 +56,15 @@ pub use element::{
};
use futures::FutureExt;
use fuzzy::{StringMatch, StringMatchCandidate};
use git::blame::GitBlame;
use git::diff_hunk_to_display;
use gpui::{
div, impl_actions, point, prelude::*, px, relative, rems, size, uniform_list, Action,
AnyElement, AppContext, AsyncWindowContext, AvailableSpace, BackgroundExecutor, Bounds,
ClipboardItem, Context, DispatchPhase, ElementId, EventEmitter, FocusHandle, FocusableView,
FontId, FontStyle, FontWeight, HighlightStyle, Hsla, InteractiveText, KeyContext, Model,
MouseButton, PaintQuad, ParentElement, Pixels, Render, SharedString, Size, StrikethroughStyle,
Styled, StyledText, Subscription, Task, TextStyle, UnderlineStyle, UniformListScrollHandle,
View, ViewContext, ViewInputHandler, VisualContext, WeakView, WhiteSpace, WindowContext,
AnyElement, AppContext, AsyncWindowContext, BackgroundExecutor, Bounds, ClipboardItem, Context,
DispatchPhase, ElementId, EventEmitter, FocusHandle, FocusableView, FontId, FontStyle,
FontWeight, HighlightStyle, Hsla, InteractiveText, KeyContext, Model, MouseButton,
ParentElement, Pixels, Render, SharedString, StrikethroughStyle, Styled, StyledText,
Subscription, Task, TextStyle, UnderlineStyle, UniformListScrollHandle, View, ViewContext,
ViewInputHandler, VisualContext, WeakView, WhiteSpace, WindowContext,
};
use highlight_matching_bracket::refresh_matching_bracket_highlights;
use hover_popover::{hide_hover, HoverState};
@@ -74,12 +72,12 @@ use inlay_hint_cache::{InlayHintCache, InlaySplice, InvalidationStrategy};
pub use inline_completion_provider::*;
pub use items::MAX_TAB_TITLE_LEN;
use itertools::Itertools;
use language::{char_kind, CharKind};
use language::{
char_kind,
language_settings::{self, all_language_settings, InlayHintSettings},
markdown, point_from_lsp, AutoindentMode, BracketPair, Buffer, Capability, CharKind, CodeLabel,
CursorShape, Diagnostic, Documentation, IndentKind, IndentSize, Language, OffsetRangeExt,
Point, Selection, SelectionGoal, TransactionId,
markdown, point_from_lsp, AutoindentMode, BracketPair, Buffer, Capability, CodeAction,
CodeLabel, Completion, CursorShape, Diagnostic, Documentation, IndentKind, IndentSize,
Language, OffsetRangeExt, Point, Selection, SelectionGoal, TransactionId,
};
use hover_links::{HoverLink, HoveredLinkState, InlayHighlight};
@@ -94,9 +92,8 @@ pub use multi_buffer::{
use ordered_float::OrderedFloat;
use parking_lot::{Mutex, RwLock};
use project::project_settings::{GitGutterSetting, ProjectSettings};
use project::{
CodeAction, Completion, FormatTrigger, Item, Location, Project, ProjectPath, ProjectTransaction,
};
use project::Item;
use project::{FormatTrigger, Location, Project, ProjectPath, ProjectTransaction};
use rand::prelude::*;
use rpc::proto::*;
use scroll::{Autoscroll, OngoingScroll, ScrollAnchor, ScrollManager, ScrollbarAutoHide};
@@ -118,7 +115,6 @@ use std::{
time::{Duration, Instant},
};
pub use sum_tree::Bias;
use sum_tree::TreeMap;
use text::{BufferId, OffsetUtf16, Rope};
use theme::{
observe_buffer_font_size_adjustment, ActiveTheme, PlayerColor, StatusColors, SyntaxTheme,
@@ -128,7 +124,7 @@ use ui::{
h_flex, prelude::*, ButtonSize, ButtonStyle, IconButton, IconName, IconSize, ListItem, Popover,
Tooltip,
};
use util::{defer, maybe, post_inc, RangeExt, ResultExt, TryFutureExt};
use util::{maybe, post_inc, RangeExt, ResultExt, TryFutureExt};
use workspace::Toast;
use workspace::{
searchable::SearchEvent, ItemNavHistory, SplitDirection, ViewId, Workspace, WorkspaceId,
@@ -358,31 +354,7 @@ type CompletionId = usize;
// type GetFieldEditorTheme = dyn Fn(&theme::Theme) -> theme::FieldEditor;
// type OverrideTextStyle = dyn Fn(&EditorStyle) -> Option<HighlightStyle>;
type BackgroundHighlight = (fn(&ThemeColors) -> Hsla, Arc<[Range<Anchor>]>);
struct ScrollbarMarkerState {
scrollbar_size: Size<Pixels>,
dirty: bool,
markers: Arc<[PaintQuad]>,
pending_refresh: Option<Task<Result<()>>>,
}
impl ScrollbarMarkerState {
fn should_refresh(&self, scrollbar_size: Size<Pixels>) -> bool {
self.pending_refresh.is_none() && (self.scrollbar_size != scrollbar_size || self.dirty)
}
}
impl Default for ScrollbarMarkerState {
fn default() -> Self {
Self {
scrollbar_size: Size::default(),
dirty: false,
markers: Arc::from([]),
pending_refresh: None,
}
}
}
type BackgroundHighlight = (fn(&ThemeColors) -> Hsla, Vec<Range<Anchor>>);
/// Zed's primary text input `View`, allowing users to edit a [`MultiBuffer`]
///
@@ -421,8 +393,7 @@ pub struct Editor {
placeholder_text: Option<Arc<str>>,
highlight_order: usize,
highlighted_rows: HashMap<TypeId, Vec<(usize, Range<Anchor>, Hsla)>>,
background_highlights: TreeMap<TypeId, BackgroundHighlight>,
scrollbar_marker_state: ScrollbarMarkerState,
background_highlights: BTreeMap<TypeId, BackgroundHighlight>,
nav_history: Option<ItemNavHistory>,
context_menu: RwLock<Option<ContextMenu>>,
mouse_context_menu: Option<MouseContextMenu>,
@@ -461,9 +432,6 @@ pub struct Editor {
editor_actions: Vec<Box<dyn Fn(&mut ViewContext<Self>)>>,
use_autoclose: bool,
auto_replace_emoji_shortcode: bool,
show_git_blame: bool,
blame: Option<Model<GitBlame>>,
blame_subscription: Option<Subscription>,
custom_context_menu: Option<
Box<
dyn 'static
@@ -472,11 +440,9 @@ pub struct Editor {
>,
}
#[derive(Clone)]
pub struct EditorSnapshot {
pub mode: EditorMode,
show_gutter: bool,
show_git_blame: bool,
pub display_snapshot: DisplaySnapshot,
pub placeholder_text: Option<Arc<str>>,
is_focused: bool,
@@ -484,14 +450,11 @@ pub struct EditorSnapshot {
ongoing_scroll: OngoingScroll,
}
const GIT_BLAME_GUTTER_WIDTH_CHARS: f32 = 53.;
pub struct GutterDimensions {
pub left_padding: Pixels,
pub right_padding: Pixels,
pub width: Pixels,
pub margin: Pixels,
pub git_blame_entries_width: Option<Pixels>,
}
impl Default for GutterDimensions {
@@ -501,7 +464,6 @@ impl Default for GutterDimensions {
right_padding: Pixels::ZERO,
width: Pixels::ZERO,
margin: Pixels::ZERO,
git_blame_entries_width: None,
}
}
}
@@ -1469,7 +1431,6 @@ impl Editor {
highlight_order: 0,
highlighted_rows: HashMap::default(),
background_highlights: Default::default(),
scrollbar_marker_state: ScrollbarMarkerState::default(),
nav_history: None,
context_menu: RwLock::new(None),
mouse_context_menu: None,
@@ -1510,9 +1471,6 @@ impl Editor {
vim_replace_map: Default::default(),
show_inline_completions: mode == EditorMode::Full,
custom_context_menu: None,
show_git_blame: false,
blame: None,
blame_subscription: None,
_subscriptions: vec![
cx.observe(&buffer, Self::on_buffer_changed),
cx.subscribe(&buffer, Self::on_buffer_event),
@@ -1658,10 +1616,6 @@ impl Editor {
EditorSnapshot {
mode: self.mode,
show_gutter: self.show_gutter,
show_git_blame: self
.blame
.as_ref()
.map_or(false, |blame| blame.read(cx).has_generated_entries()),
display_snapshot: self.display_map.update(cx, |map, cx| map.snapshot(cx)),
scroll_anchor: self.scroll_manager.anchor(),
ongoing_scroll: self.scroll_manager.ongoing_scroll(),
@@ -1670,15 +1624,11 @@ impl Editor {
}
}
pub fn language_at<T: ToOffset<MultiBuffer>>(
&self,
point: T,
cx: &AppContext,
) -> Option<Arc<Language>> {
pub fn language_at<T: ToOffset>(&self, point: T, cx: &AppContext) -> Option<Arc<Language>> {
self.buffer.read(cx).language_at(point, cx)
}
pub fn file_at<T: ToOffset<MultiBuffer>>(
pub fn file_at<T: ToOffset>(
&self,
point: T,
cx: &AppContext,
@@ -1967,7 +1917,7 @@ impl Editor {
pub fn edit<I, S, T>(&mut self, edits: I, cx: &mut ViewContext<Self>)
where
I: IntoIterator<Item = (Range<S>, T)>,
S: ToOffset<MultiBuffer>,
S: ToOffset,
T: Into<Arc<str>>,
{
if self.read_only(cx) {
@@ -1981,7 +1931,7 @@ impl Editor {
pub fn edit_with_autoindent<I, S, T>(&mut self, edits: I, cx: &mut ViewContext<Self>)
where
I: IntoIterator<Item = (Range<S>, T)>,
S: ToOffset<MultiBuffer>,
S: ToOffset,
T: Into<Arc<str>>,
{
if self.read_only(cx) {
@@ -2000,7 +1950,7 @@ impl Editor {
cx: &mut ViewContext<Self>,
) where
I: IntoIterator<Item = (Range<S>, T)>,
S: ToOffset<MultiBuffer>,
S: ToOffset,
T: Into<Arc<str>>,
{
if self.read_only(cx) {
@@ -3071,7 +3021,7 @@ impl Editor {
/// Iterate the given selections, and for each one, find the smallest surrounding
/// autoclose region. This uses the ordering of the selections and the autoclose
/// regions to avoid repeated comparisons.
fn selections_with_autoclose_regions<'a, D: ToOffset<MultiBuffer> + Clone>(
fn selections_with_autoclose_regions<'a, D: ToOffset + Clone>(
&'a self,
selections: impl IntoIterator<Item = Selection<D>>,
buffer: &'a MultiBufferSnapshot,
@@ -3126,10 +3076,7 @@ impl Editor {
});
}
fn completion_query(
buffer: &MultiBufferSnapshot,
position: impl ToOffset<MultiBuffer>,
) -> Option<String> {
fn completion_query(buffer: &MultiBufferSnapshot, position: impl ToOffset) -> Option<String> {
let offset = position.to_offset(buffer);
let (word_range, kind) = buffer.surrounding_word(offset);
if offset > word_range.start && kind == Some(CharKind::Word) {
@@ -3767,7 +3714,7 @@ impl Editor {
workspace.add_item_to_active_pane(Box::new(editor.clone()), cx);
editor.update(cx, |editor, cx| {
editor.highlight_background::<Self>(
&ranges_to_highlight,
ranges_to_highlight,
|theme| theme.editor_highlighted_line_background,
cx,
);
@@ -3795,17 +3742,19 @@ impl Editor {
let actions = if let Ok(code_actions) = project.update(&mut cx, |project, cx| {
project.code_actions(&start_buffer, start..end, cx)
}) {
code_actions.await
code_actions.await.log_err()
} else {
Vec::new()
None
};
this.update(&mut cx, |this, cx| {
this.available_code_actions = if actions.is_empty() {
None
} else {
Some((start_buffer, actions.into()))
};
this.available_code_actions = actions.and_then(|actions| {
if actions.is_empty() {
None
} else {
Some((start_buffer, actions.into()))
}
});
cx.notify();
})
.log_err();
@@ -3897,12 +3846,12 @@ impl Editor {
}
this.highlight_background::<DocumentHighlightRead>(
&read_ranges,
read_ranges,
|theme| theme.editor_document_highlight_read_background,
cx,
);
this.highlight_background::<DocumentHighlightWrite>(
&write_ranges,
write_ranges,
|theme| theme.editor_document_highlight_write_background,
cx,
);
@@ -4606,7 +4555,6 @@ impl Editor {
}
let mut delta_for_end_row = 0;
let has_multiple_rows = start_row + 1 != end_row;
for row in start_row..end_row {
let current_indent = snapshot.indent_size_for_line(row);
let indent_delta = match (current_indent.kind, indent_kind) {
@@ -4618,12 +4566,7 @@ impl Editor {
(_, IndentKind::Tab) => IndentSize::tab(),
};
let start = if has_multiple_rows || current_indent.len < selection.start.column {
0
} else {
selection.start.column
};
let row_start = Point::new(row, start);
let row_start = Point::new(row, 0);
edits.push((
row_start..row_start,
indent_delta.chars().collect::<String>(),
@@ -4669,7 +4612,7 @@ impl Editor {
rows.start += 1;
}
}
let has_multiple_rows = rows.len() > 1;
for row in rows {
let indent_size = snapshot.indent_size_for_line(row);
if indent_size.len > 0 {
@@ -4684,16 +4627,7 @@ impl Editor {
}
IndentKind::Tab => 1,
};
let start = if has_multiple_rows
|| deletion_len > selection.start.column
|| indent_size.len < selection.start.column
{
0
} else {
selection.start.column - deletion_len
};
deletion_ranges
.push(Point::new(row, start)..Point::new(row, start + deletion_len));
deletion_ranges.push(Point::new(row, 0)..Point::new(row, deletion_len));
last_outdent = Some(row);
}
}
@@ -5189,7 +5123,7 @@ impl Editor {
});
}
pub fn duplicate_line(&mut self, upwards: bool, cx: &mut ViewContext<Self>) {
pub fn duplicate_line(&mut self, action: &DuplicateLine, cx: &mut ViewContext<Self>) {
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
let buffer = &display_map.buffer_snapshot;
let selections = self.selections.all::<Point>(cx);
@@ -5218,7 +5152,7 @@ impl Editor {
.text_for_range(start..end)
.chain(Some("\n"))
.collect::<String>();
let insert_location = if upwards {
let insert_location = if action.move_upwards {
Point::new(rows.end, 0)
} else {
start
@@ -5235,14 +5169,6 @@ impl Editor {
});
}
pub fn duplicate_line_up(&mut self, _: &DuplicateLineUp, cx: &mut ViewContext<Self>) {
self.duplicate_line(true, cx);
}
pub fn duplicate_line_down(&mut self, _: &DuplicateLineDown, cx: &mut ViewContext<Self>) {
self.duplicate_line(false, cx);
}
pub fn move_line_up(&mut self, _: &MoveLineUp, cx: &mut ViewContext<Self>) {
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
let buffer = self.buffer.read(cx).snapshot(cx);
@@ -7101,27 +7027,14 @@ impl Editor {
}
// If the language has line comments, toggle those.
if let Some(full_comment_prefixes) = language
if let Some(full_comment_prefix) = language
.line_comment_prefixes()
.filter(|prefixes| !prefixes.is_empty())
.and_then(|prefixes| prefixes.first())
{
// Split the comment prefix's trailing whitespace into a separate string,
// as that portion won't be used for detecting if a line is a comment.
struct Comment {
full_prefix: Arc<str>,
trimmed_prefix_len: usize,
}
let prefixes: SmallVec<[Comment; 4]> = full_comment_prefixes
.iter()
.map(|full_prefix| {
let trimmed_prefix_len = full_prefix.trim_end_matches(' ').len();
Comment {
trimmed_prefix_len,
full_prefix: full_prefix.clone(),
}
})
.collect();
let comment_prefix = full_comment_prefix.trim_end_matches(' ');
let comment_prefix_whitespace = &full_comment_prefix[comment_prefix.len()..];
let mut all_selection_lines_are_comments = true;
for row in start_row..=end_row {
@@ -7129,28 +7042,16 @@ impl Editor {
continue;
}
let Some((prefix, prefix_range)) = prefixes
.iter()
.map(|prefix| {
(
prefix,
comment_prefix_range(
snapshot.deref(),
row,
&prefix.full_prefix[..prefix.trimmed_prefix_len],
&prefix.full_prefix[prefix.trimmed_prefix_len..],
),
)
})
.max_by_key(|(_, range)| range.end.column - range.start.column)
else {
// There has to be at least one prefix.
break;
};
let prefix_range = comment_prefix_range(
snapshot.deref(),
row,
comment_prefix,
comment_prefix_whitespace,
);
if prefix_range.is_empty() {
all_selection_lines_are_comments = false;
}
selection_edit_ranges.push((prefix_range, prefix.full_prefix.clone()));
selection_edit_ranges.push(prefix_range);
}
if all_selection_lines_are_comments {
@@ -7158,17 +7059,17 @@ impl Editor {
selection_edit_ranges
.iter()
.cloned()
.map(|(range, _)| (range, empty_str.clone())),
.map(|range| (range, empty_str.clone())),
);
} else {
let min_column = selection_edit_ranges
.iter()
.map(|(range, _)| range.start.column)
.map(|r| r.start.column)
.min()
.unwrap_or(0);
edits.extend(selection_edit_ranges.iter().map(|(range, prefix)| {
edits.extend(selection_edit_ranges.iter().map(|range| {
let position = Point::new(range.start.row, min_column);
(position..position, prefix.clone())
(position..position, full_comment_prefix.clone())
}));
}
} else if let Some((full_comment_prefix, comment_suffix)) =
@@ -7745,7 +7646,7 @@ impl Editor {
let range = target.range.to_offset(target.buffer.read(cx));
let range = editor.range_for_match(&range);
if Some(&target.buffer) == editor.buffer.read(cx).as_singleton().as_ref() {
editor.change_selections(Some(Autoscroll::focused()), cx, |s| {
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([range]);
});
} else {
@@ -7765,7 +7666,7 @@ impl Editor {
// to avoid creating a history entry at the previous cursor location.
pane.update(cx, |pane, _| pane.disable_history());
target_editor.change_selections(
Some(Autoscroll::focused()),
Some(Autoscroll::fit()),
cx,
|s| {
s.select_ranges([range]);
@@ -7915,10 +7816,9 @@ impl Editor {
Bias::Left
},
);
match self
.find_all_references_task_sources
.binary_search_by(|anchor| anchor.cmp(&head_anchor, &multi_buffer_snapshot))
.binary_search_by(|task_anchor| task_anchor.cmp(&head_anchor, &multi_buffer_snapshot))
{
Ok(_) => {
log::info!(
@@ -7936,27 +7836,66 @@ impl Editor {
let workspace = self.workspace()?;
let project = workspace.read(cx).project().clone();
let references = project.update(cx, |project, cx| project.references(&buffer, head, cx));
Some(cx.spawn(|editor, mut cx| async move {
let _cleanup = defer({
let mut cx = cx.clone();
move || {
let _ = editor.update(&mut cx, |editor, _| {
if let Ok(i) =
editor
.find_all_references_task_sources
.binary_search_by(|anchor| {
anchor.cmp(&head_anchor, &multi_buffer_snapshot)
})
{
editor.find_all_references_task_sources.remove(i);
}
});
}
});
let open_task = cx.spawn(|editor, mut cx| async move {
let mut locations = references.await?;
let snapshot = buffer.update(&mut cx, |buffer, _| buffer.snapshot())?;
let head_offset = text::ToOffset::to_offset(&head, &snapshot);
// LSP may return references that contain the item itself we requested `find_all_references` for (eg. rust-analyzer)
// So we will remove it from locations
// If there is only one reference, we will not do this filter cause it may make locations empty
if locations.len() > 1 {
cx.update(|cx| {
locations.retain(|location| {
// fn foo(x : i64) {
// ^
// println!(x);
// }
// It is ok to find reference when caret being at ^ (the end of the word)
// So we turn offset into inclusive to include the end of the word
!location
.range
.to_offset(location.buffer.read(cx))
.to_inclusive()
.contains(&head_offset)
});
})?;
}
let locations = references.await?;
if locations.is_empty() {
return anyhow::Ok(());
return Ok(());
}
// If there is one reference, just open it directly
if locations.len() == 1 {
let target = locations.pop().unwrap();
return editor.update(&mut cx, |editor, cx| {
let range = target.range.to_offset(target.buffer.read(cx));
let range = editor.range_for_match(&range);
if Some(&target.buffer) == editor.buffer().read(cx).as_singleton().as_ref() {
editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([range]);
});
} else {
cx.window_context().defer(move |cx| {
let target_editor: View<Self> =
workspace.update(cx, |workspace, cx| {
workspace.open_project_item(
workspace.active_pane().clone(),
target.buffer.clone(),
cx,
)
});
target_editor.update(cx, |target_editor, cx| {
target_editor.change_selections(Some(Autoscroll::fit()), cx, |s| {
s.select_ranges([range]);
})
})
})
}
});
}
workspace.update(&mut cx, |workspace, cx| {
@@ -7976,7 +7915,24 @@ impl Editor {
Self::open_locations_in_multibuffer(
workspace, locations, replica_id, title, false, cx,
);
})
})?;
Ok(())
});
Some(cx.spawn(|editor, mut cx| async move {
open_task.await?;
editor.update(&mut cx, |editor, _| {
if let Ok(i) =
editor
.find_all_references_task_sources
.binary_search_by(|task_anchor| {
task_anchor.cmp(&head_anchor, &multi_buffer_snapshot)
})
{
editor.find_all_references_task_sources.remove(i);
}
})?;
anyhow::Ok(())
}))
}
@@ -8029,7 +7985,7 @@ impl Editor {
});
editor.update(cx, |editor, cx| {
editor.highlight_background::<Self>(
&ranges_to_highlight,
ranges_to_highlight,
|theme| theme.editor_highlighted_line_background,
cx,
);
@@ -8120,15 +8076,15 @@ impl Editor {
editor
});
let write_highlights =
this.clear_background_highlights::<DocumentHighlightWrite>(cx);
let read_highlights =
this.clear_background_highlights::<DocumentHighlightRead>(cx);
let ranges = write_highlights
.iter()
.flat_map(|(_, ranges)| ranges.iter())
.chain(read_highlights.iter().flat_map(|(_, ranges)| ranges.iter()))
.cloned()
let ranges = this
.clear_background_highlights::<DocumentHighlightWrite>(cx)
.into_iter()
.flat_map(|(_, ranges)| ranges.into_iter())
.chain(
this.clear_background_highlights::<DocumentHighlightRead>(cx)
.into_iter()
.flat_map(|(_, ranges)| ranges.into_iter()),
)
.collect();
this.highlight_text::<Rename>(
@@ -8146,7 +8102,7 @@ impl Editor {
style: BlockStyle::Flex,
position: range.start,
height: 1,
render: Box::new({
render: Arc::new({
let rename_editor = rename_editor.clone();
move |cx: &mut BlockContext| {
let mut text_style = cx.editor_style.text.clone();
@@ -8617,7 +8573,7 @@ impl Editor {
self.fold_ranges(ranges, true, cx);
}
pub fn fold_ranges<T: ToOffset<MultiBuffer> + Clone>(
pub fn fold_ranges<T: ToOffset + Clone>(
&mut self,
ranges: impl IntoIterator<Item = Range<T>>,
auto_scroll: bool,
@@ -8635,7 +8591,7 @@ impl Editor {
}
}
pub fn unfold_ranges<T: ToOffset<MultiBuffer> + Clone>(
pub fn unfold_ranges<T: ToOffset + Clone>(
&mut self,
ranges: impl IntoIterator<Item = Range<T>>,
inclusive: bool,
@@ -8868,42 +8824,9 @@ impl Editor {
}
}
pub fn toggle_git_blame(&mut self, _: &ToggleGitBlame, cx: &mut ViewContext<Self>) {
if !self.show_git_blame {
if let Err(error) = self.show_git_blame_internal(cx) {
log::error!("failed to toggle on 'git blame': {}", error);
return;
}
self.show_git_blame = true
} else {
self.blame_subscription.take();
self.blame.take();
self.show_git_blame = false
}
cx.notify();
}
fn show_git_blame_internal(&mut self, cx: &mut ViewContext<Self>) -> Result<()> {
if let Some(project) = self.project.as_ref() {
let Some(buffer) = self.buffer().read(cx).as_singleton() else {
anyhow::bail!("git blame not available in multi buffers")
};
let project = project.clone();
let blame = cx.new_model(|cx| GitBlame::new(buffer, project, cx));
self.blame_subscription = Some(cx.observe(&blame, |_, _, cx| cx.notify()));
self.blame = Some(blame);
}
Ok(())
}
pub fn blame(&self) -> Option<&Model<GitBlame>> {
self.blame.as_ref()
}
fn get_permalink_to_line(&mut self, cx: &mut ViewContext<Self>) -> Result<url::Url> {
use git::permalink::{build_permalink, BuildPermalinkParams};
let (path, repo) = maybe!({
let project_handle = self.project.as_ref()?.clone();
let project = project_handle.read(cx);
@@ -8936,12 +8859,7 @@ impl Editor {
remote_url: &origin_url,
sha: &sha,
path: &path,
selection: selection.map(|selection| {
let range = selection.range();
let start = range.start.row;
let end = range.end.row;
start..end
}),
selection: selection.map(|selection| selection.range()),
})
}
@@ -9078,13 +8996,13 @@ impl Editor {
pub fn highlight_background<T: 'static>(
&mut self,
ranges: &[Range<Anchor>],
ranges: Vec<Range<Anchor>>,
color_fetcher: fn(&ThemeColors) -> Hsla,
cx: &mut ViewContext<Self>,
) {
let snapshot = self.snapshot(cx);
// this is to try and catch a panic sooner
for range in ranges {
for range in &ranges {
snapshot
.buffer_snapshot
.summary_for_anchor::<usize>(&range.start);
@@ -9094,21 +9012,16 @@ impl Editor {
}
self.background_highlights
.insert(TypeId::of::<T>(), (color_fetcher, Arc::from(ranges)));
self.scrollbar_marker_state.dirty = true;
.insert(TypeId::of::<T>(), (color_fetcher, ranges));
cx.notify();
}
pub fn clear_background_highlights<T: 'static>(
&mut self,
cx: &mut ViewContext<Self>,
_cx: &mut ViewContext<Self>,
) -> Option<BackgroundHighlight> {
let text_highlights = self.background_highlights.remove(&TypeId::of::<T>())?;
if !text_highlights.1.is_empty() {
self.scrollbar_marker_state.dirty = true;
cx.notify();
}
Some(text_highlights)
let text_highlights = self.background_highlights.remove(&TypeId::of::<T>());
text_highlights
}
#[cfg(feature = "test-support")]
@@ -9362,7 +9275,6 @@ impl Editor {
multi_buffer::Event::Edited {
singleton_buffer_edited,
} => {
self.scrollbar_marker_state.dirty = true;
self.refresh_active_diagnostics(cx);
self.refresh_code_actions(cx);
if self.has_active_inline_completion(cx) {
@@ -9430,16 +9342,10 @@ impl Editor {
multi_buffer::Event::FileHandleChanged | multi_buffer::Event::Reloaded => {
cx.emit(EditorEvent::TitleChanged)
}
multi_buffer::Event::DiffBaseChanged => {
self.scrollbar_marker_state.dirty = true;
cx.emit(EditorEvent::DiffBaseChanged);
cx.notify();
}
multi_buffer::Event::DiffBaseChanged => cx.emit(EditorEvent::DiffBaseChanged),
multi_buffer::Event::Closed => cx.emit(EditorEvent::Closed),
multi_buffer::Event::DiagnosticsUpdated => {
self.refresh_active_diagnostics(cx);
self.scrollbar_marker_state.dirty = true;
cx.notify();
}
_ => {}
};
@@ -9536,7 +9442,6 @@ impl Editor {
path: ProjectPath,
position: Point,
anchor: language::Anchor,
offset_from_top: u32,
cx: &mut ViewContext<Self>,
) {
let workspace = self.workspace();
@@ -9564,13 +9469,9 @@ impl Editor {
};
let nav_history = editor.nav_history.take();
editor.change_selections(
Some(Autoscroll::top_relative(offset_from_top as usize)),
cx,
|s| {
s.select_ranges([cursor..cursor]);
},
);
editor.change_selections(Some(Autoscroll::newest()), cx, |s| {
s.select_ranges([cursor..cursor]);
});
editor.nav_history = nav_history;
anyhow::Ok(())
@@ -10026,7 +9927,7 @@ impl EditorSnapshot {
})
}
pub fn language_at<T: ToOffset<MultiBuffer>>(&self, position: T) -> Option<&Arc<Language>> {
pub fn language_at<T: ToOffset>(&self, position: T) -> Option<&Arc<Language>> {
self.display_snapshot.buffer_snapshot.language_at(position)
}
@@ -10069,12 +9970,7 @@ impl EditorSnapshot {
0.0.into()
};
let git_blame_entries_width = self
.show_git_blame
.then_some(em_width * GIT_BLAME_GUTTER_WIDTH_CHARS);
let mut left_padding = git_blame_entries_width.unwrap_or(Pixels::ZERO);
left_padding += if gutter_settings.code_actions {
let left_padding = if gutter_settings.code_actions {
em_width * 3.0
} else if show_git_gutter && gutter_settings.line_numbers {
em_width * 2.0
@@ -10099,7 +9995,6 @@ impl EditorSnapshot {
right_padding,
width: line_gutter_width + left_padding + right_padding,
margin: -descent,
git_blame_entries_width,
}
}
}
@@ -10207,7 +10102,7 @@ impl Render for Editor {
background,
local_player: cx.theme().players().local(),
text: text_style,
scrollbar_width: px(13.),
scrollbar_width: px(12.),
syntax: cx.theme().syntax().clone(),
status: cx.theme().status().clone(),
inlay_hints_style: HighlightStyle {
@@ -10489,7 +10384,7 @@ trait SelectionExt {
-> Range<u32>;
}
impl<T: ToPoint + ToOffset<MultiBuffer>> SelectionExt for Selection<T> {
impl<T: ToPoint + ToOffset> SelectionExt for Selection<T> {
fn point_range(&self, buffer: &MultiBufferSnapshot) -> Range<Point> {
let start = self.start.to_point(buffer);
let end = self.end.to_point(buffer);
@@ -10546,7 +10441,7 @@ impl<T: ToPoint + ToOffset<MultiBuffer>> SelectionExt for Selection<T> {
impl<T: InvalidationRegion> InvalidationStack<T> {
fn invalidate<S>(&mut self, selections: &[Selection<S>], buffer: &MultiBufferSnapshot)
where
S: Clone + ToOffset<MultiBuffer>,
S: Clone + ToOffset,
{
while let Some(region) = self.last() {
let all_selections_inside_invalidation_ranges =
@@ -10600,47 +10495,12 @@ impl InvalidationRegion for SnippetState {
pub fn diagnostic_block_renderer(diagnostic: Diagnostic, _is_valid: bool) -> RenderBlock {
let (text_without_backticks, code_ranges) = highlight_diagnostic_message(&diagnostic);
Box::new(move |cx: &mut BlockContext| {
Arc::new(move |cx: &mut BlockContext| {
let group_id: SharedString = cx.block_id.to_string().into();
let mut text_style = cx.text_style().clone();
text_style.color = diagnostic_style(diagnostic.severity, true, cx.theme().status());
let multi_line_diagnostic = diagnostic.message.contains('\n');
let buttons = |diagnostic: &Diagnostic, block_id: usize| {
if multi_line_diagnostic {
v_flex()
} else {
h_flex()
}
.children(diagnostic.is_primary.then(|| {
IconButton::new(("close-block", block_id), IconName::XCircle)
.icon_color(Color::Muted)
.size(ButtonSize::Compact)
.style(ButtonStyle::Transparent)
.visible_on_hover(group_id.clone())
.on_click(move |_click, cx| cx.dispatch_action(Box::new(Cancel)))
.tooltip(|cx| Tooltip::for_action("Close Diagnostics", &Cancel, cx))
}))
.child(
IconButton::new(("copy-block", block_id), IconName::Copy)
.icon_color(Color::Muted)
.size(ButtonSize::Compact)
.style(ButtonStyle::Transparent)
.visible_on_hover(group_id.clone())
.on_click({
let message = diagnostic.message.clone();
move |_click, cx| cx.write_to_clipboard(ClipboardItem::new(message.clone()))
})
.tooltip(|cx| Tooltip::text("Copy diagnostic message", cx)),
)
};
let icon_size = buttons(&diagnostic, cx.block_id)
.into_any_element()
.measure(AvailableSpace::min_size(), cx);
h_flex()
.id(cx.block_id)
.group(group_id.clone())
@@ -10651,10 +10511,9 @@ pub fn diagnostic_block_renderer(diagnostic: Diagnostic, _is_valid: bool) -> Ren
.child(
div()
.flex()
.w(cx.anchor_x - cx.gutter_dimensions.width - icon_size.width)
.w(cx.anchor_x - cx.gutter_dimensions.width)
.flex_shrink(),
)
.child(buttons(&diagnostic, cx.block_id))
.child(div().flex().flex_shrink_0().child(
StyledText::new(text_without_backticks.clone()).with_highlights(
&text_style,
@@ -10669,6 +10528,18 @@ pub fn diagnostic_block_renderer(diagnostic: Diagnostic, _is_valid: bool) -> Ren
}),
),
))
.child(
IconButton::new(("copy-block", cx.block_id), IconName::Copy)
.icon_color(Color::Muted)
.size(ButtonSize::Compact)
.style(ButtonStyle::Transparent)
.visible_on_hover(group_id)
.on_click({
let message = diagnostic.message.clone();
move |_click, cx| cx.write_to_clipboard(ClipboardItem::new(message.clone()))
})
.tooltip(|cx| Tooltip::text("Copy diagnostic message", cx)),
)
.into_any_element()
})
}
@@ -10787,7 +10658,7 @@ trait RangeToAnchorExt {
fn to_anchors(self, snapshot: &MultiBufferSnapshot) -> Range<Anchor>;
}
impl<T: ToOffset<MultiBuffer>> RangeToAnchorExt for Range<T> {
impl<T: ToOffset> RangeToAnchorExt for Range<T> {
fn to_anchors(self, snapshot: &MultiBufferSnapshot) -> Range<Anchor> {
snapshot.anchor_after(self.start)..snapshot.anchor_before(self.end)
}

View File

@@ -1,7 +1,6 @@
use gpui::AppContext;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsSources};
use settings::Settings;
#[derive(Deserialize, Clone)]
pub struct EditorSettings {
@@ -93,8 +92,7 @@ pub enum ShowScrollbar {
#[serde(rename_all = "snake_case")]
pub enum MultiCursorModifier {
Alt,
#[serde(alias = "cmd", alias = "ctrl")]
CmdOrCtrl,
Cmd,
}
#[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
@@ -225,9 +223,10 @@ impl Settings for EditorSettings {
type FileContent = EditorSettingsContent;
fn load(
sources: SettingsSources<Self::FileContent>,
_: &mut AppContext,
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
_: &mut gpui::AppContext,
) -> anyhow::Result<Self> {
sources.json_merge()
Self::load_via_json_merge(default_value, user_values)
}
}

View File

@@ -2685,65 +2685,6 @@ fn test_join_lines_with_multi_selection(cx: &mut TestAppContext) {
});
}
#[gpui::test]
async fn test_join_lines_with_git_diff_base(
executor: BackgroundExecutor,
cx: &mut gpui::TestAppContext,
) {
init_test(cx, |_| {});
let mut cx = EditorTestContext::new(cx).await;
let diff_base = r#"
Line 0
Line 1
Line 2
Line 3
"#
.unindent();
cx.set_state(
&r#"
ˇLine 0
Line 1
Line 2
Line 3
"#
.unindent(),
);
cx.set_diff_base(Some(&diff_base));
executor.run_until_parked();
// Join lines
cx.update_editor(|editor, cx| {
editor.join_lines(&JoinLines, cx);
});
executor.run_until_parked();
cx.assert_editor_state(
&r#"
Line 0ˇ Line 1
Line 2
Line 3
"#
.unindent(),
);
// Join again
cx.update_editor(|editor, cx| {
editor.join_lines(&JoinLines, cx);
});
executor.run_until_parked();
cx.assert_editor_state(
&r#"
Line 0 Line 1ˇ Line 2
Line 3
"#
.unindent(),
);
}
#[gpui::test]
async fn test_manipulate_lines_with_single_selection(cx: &mut TestAppContext) {
init_test(cx, |_| {});
@@ -3175,7 +3116,7 @@ fn test_duplicate_line(cx: &mut TestAppContext) {
DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0),
])
});
view.duplicate_line_down(&DuplicateLineDown, cx);
view.duplicate_line(&DuplicateLine::default(), cx);
assert_eq!(view.display_text(cx), "abc\nabc\ndef\ndef\nghi\n\n");
assert_eq!(
view.selections.display_ranges(cx),
@@ -3199,7 +3140,7 @@ fn test_duplicate_line(cx: &mut TestAppContext) {
DisplayPoint::new(1, 2)..DisplayPoint::new(2, 1),
])
});
view.duplicate_line_down(&DuplicateLineDown, cx);
view.duplicate_line(&DuplicateLine::default(), cx);
assert_eq!(view.display_text(cx), "abc\ndef\nghi\nabc\ndef\nghi\n");
assert_eq!(
view.selections.display_ranges(cx),
@@ -3225,7 +3166,7 @@ fn test_duplicate_line(cx: &mut TestAppContext) {
DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0),
])
});
view.duplicate_line_up(&DuplicateLineUp, cx);
view.duplicate_line(&DuplicateLine { move_upwards: true }, cx);
assert_eq!(view.display_text(cx), "abc\nabc\ndef\ndef\nghi\n\n");
assert_eq!(
view.selections.display_ranges(cx),
@@ -3249,7 +3190,7 @@ fn test_duplicate_line(cx: &mut TestAppContext) {
DisplayPoint::new(1, 2)..DisplayPoint::new(2, 1),
])
});
view.duplicate_line_up(&DuplicateLineUp, cx);
view.duplicate_line(&DuplicateLine { move_upwards: true }, cx);
assert_eq!(view.display_text(cx), "abc\ndef\nghi\nabc\ndef\nghi\n");
assert_eq!(
view.selections.display_ranges(cx),
@@ -3376,7 +3317,7 @@ fn test_move_line_up_down_with_blocks(cx: &mut TestAppContext) {
position: snapshot.anchor_after(Point::new(2, 0)),
disposition: BlockDisposition::Below,
height: 1,
render: Box::new(|_| div().into_any()),
render: Arc::new(|_| div().into_any()),
}],
Some(Autoscroll::fit()),
cx,
@@ -4146,47 +4087,6 @@ let foo = «2ˇ»;"#,
);
}
#[gpui::test]
async fn test_select_previous_multibuffer(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorTestContext::new_multibuffer(
cx,
[
indoc! {
"aaa\n«bbb\nccc\n»ddd"
},
indoc! {
"aaa\n«bbb\nccc\n»ddd"
},
],
);
cx.assert_editor_state(indoc! {"
ˇbbb
ccc
bbb
ccc
"});
cx.dispatch_action(SelectPrevious::default());
cx.assert_editor_state(indoc! {"
«bbbˇ»
ccc
bbb
ccc
"});
cx.dispatch_action(SelectPrevious::default());
cx.assert_editor_state(indoc! {"
«bbbˇ»
ccc
«bbbˇ»
ccc
"});
}
#[gpui::test]
async fn test_select_previous_with_single_caret(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
@@ -7322,7 +7222,7 @@ fn test_highlighted_ranges(cx: &mut TestAppContext) {
|range: Range<Point>| buffer.anchor_after(range.start)..buffer.anchor_after(range.end);
editor.highlight_background::<Type1>(
&[
vec![
anchor_range(Point::new(2, 1)..Point::new(2, 3)),
anchor_range(Point::new(4, 2)..Point::new(4, 4)),
anchor_range(Point::new(6, 3)..Point::new(6, 5)),
@@ -7332,7 +7232,7 @@ fn test_highlighted_ranges(cx: &mut TestAppContext) {
cx,
);
editor.highlight_background::<Type2>(
&[
vec![
anchor_range(Point::new(3, 2)..Point::new(3, 5)),
anchor_range(Point::new(5, 3)..Point::new(5, 6)),
anchor_range(Point::new(7, 4)..Point::new(7, 7)),
@@ -8557,6 +8457,105 @@ async fn test_document_format_with_prettier(cx: &mut gpui::TestAppContext) {
);
}
#[gpui::test]
async fn test_find_all_references(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorLspTestContext::new_rust(
lsp::ServerCapabilities {
document_formatting_provider: Some(lsp::OneOf::Left(true)),
..Default::default()
},
cx,
)
.await;
cx.set_state(indoc! {"
fn foo(«paramˇ»: i64) {
println!(param);
}
"});
cx.lsp
.handle_request::<lsp::request::References, _, _>(move |_, _| async move {
Ok(Some(vec![
lsp::Location {
uri: lsp::Url::from_file_path("/root/dir/file.rs").unwrap(),
range: lsp::Range::new(lsp::Position::new(0, 7), lsp::Position::new(0, 12)),
},
lsp::Location {
uri: lsp::Url::from_file_path("/root/dir/file.rs").unwrap(),
range: lsp::Range::new(lsp::Position::new(1, 13), lsp::Position::new(1, 18)),
},
]))
});
let references = cx
.update_editor(|editor, cx| editor.find_all_references(&FindAllReferences, cx))
.unwrap();
cx.executor().run_until_parked();
cx.executor().start_waiting();
references.await.unwrap();
cx.assert_editor_state(indoc! {"
fn foo(param: i64) {
println!(«paramˇ»);
}
"});
let references = cx
.update_editor(|editor, cx| editor.find_all_references(&FindAllReferences, cx))
.unwrap();
cx.executor().run_until_parked();
cx.executor().start_waiting();
references.await.unwrap();
cx.assert_editor_state(indoc! {"
fn foo(«paramˇ»: i64) {
println!(param);
}
"});
cx.set_state(indoc! {"
fn foo(param: i64) {
let a = param;
let aˇ = param;
let a = param;
println!(param);
}
"});
cx.lsp
.handle_request::<lsp::request::References, _, _>(move |_, _| async move {
Ok(Some(vec![lsp::Location {
uri: lsp::Url::from_file_path("/root/dir/file.rs").unwrap(),
range: lsp::Range::new(lsp::Position::new(2, 8), lsp::Position::new(2, 9)),
}]))
});
let references = cx
.update_editor(|editor, cx| editor.find_all_references(&FindAllReferences, cx))
.unwrap();
cx.executor().run_until_parked();
cx.executor().start_waiting();
references.await.unwrap();
cx.assert_editor_state(indoc! {"
fn foo(param: i64) {
let a = param;
let «aˇ» = param;
let a = param;
println!(param);
}
"});
}
#[gpui::test]
async fn test_addition_reverts(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
pub mod blame;
pub mod permalink;
use std::ops::Range;

View File

@@ -1,706 +0,0 @@
use anyhow::Result;
use collections::HashMap;
use git::{
blame::{Blame, BlameEntry},
Oid,
};
use gpui::{Model, ModelContext, Subscription, Task};
use language::{Bias, Buffer, BufferSnapshot, Edit};
use project::{Item, Project};
use smallvec::SmallVec;
use sum_tree::SumTree;
use url::Url;
#[derive(Clone, Debug, Default)]
pub struct GitBlameEntry {
pub rows: u32,
pub blame: Option<BlameEntry>,
}
#[derive(Clone, Debug, Default)]
pub struct GitBlameEntrySummary {
rows: u32,
}
impl sum_tree::Item for GitBlameEntry {
type Summary = GitBlameEntrySummary;
fn summary(&self) -> Self::Summary {
GitBlameEntrySummary { rows: self.rows }
}
}
impl sum_tree::Summary for GitBlameEntrySummary {
type Context = ();
fn add_summary(&mut self, summary: &Self, _cx: &()) {
self.rows += summary.rows;
}
}
impl<'a> sum_tree::Dimension<'a, GitBlameEntrySummary> for u32 {
fn add_summary(&mut self, summary: &'a GitBlameEntrySummary, _cx: &()) {
*self += summary.rows;
}
}
pub struct GitBlame {
project: Model<Project>,
buffer: Model<Buffer>,
entries: SumTree<GitBlameEntry>,
permalinks: HashMap<Oid, Url>,
messages: HashMap<Oid, String>,
buffer_snapshot: BufferSnapshot,
buffer_edits: text::Subscription,
task: Task<Result<()>>,
generated: bool,
_refresh_subscription: Subscription,
}
impl GitBlame {
pub fn new(
buffer: Model<Buffer>,
project: Model<Project>,
cx: &mut ModelContext<Self>,
) -> Self {
let entries = SumTree::from_item(
GitBlameEntry {
rows: buffer.read(cx).max_point().row + 1,
blame: None,
},
&(),
);
let refresh_subscription = cx.subscribe(&project, {
let buffer = buffer.clone();
move |this, _, event, cx| match event {
project::Event::WorktreeUpdatedEntries(_, updated) => {
let project_entry_id = buffer.read(cx).entry_id(cx);
if updated
.iter()
.any(|(_, entry_id, _)| project_entry_id == Some(*entry_id))
{
log::debug!("Updated buffers. Regenerating blame data...",);
this.generate(cx);
}
}
project::Event::WorktreeUpdatedGitRepositories => {
log::debug!("Status of git repositories updated. Regenerating blame data...",);
this.generate(cx);
}
_ => {}
}
});
let buffer_snapshot = buffer.read(cx).snapshot();
let buffer_edits = buffer.update(cx, |buffer, _| buffer.subscribe());
let mut this = Self {
project,
buffer,
buffer_snapshot,
entries,
buffer_edits,
permalinks: HashMap::default(),
messages: HashMap::default(),
task: Task::ready(Ok(())),
generated: false,
_refresh_subscription: refresh_subscription,
};
this.generate(cx);
this
}
pub fn has_generated_entries(&self) -> bool {
self.generated
}
pub fn permalink_for_entry(&self, entry: &BlameEntry) -> Option<Url> {
self.permalinks.get(&entry.sha).cloned()
}
pub fn message_for_entry(&self, entry: &BlameEntry) -> Option<String> {
self.messages.get(&entry.sha).cloned()
}
pub fn blame_for_rows<'a>(
&'a mut self,
rows: impl 'a + IntoIterator<Item = Option<u32>>,
cx: &mut ModelContext<Self>,
) -> impl 'a + Iterator<Item = Option<BlameEntry>> {
self.sync(cx);
let mut cursor = self.entries.cursor::<u32>();
rows.into_iter().map(move |row| {
let row = row?;
cursor.seek_forward(&row, Bias::Right, &());
cursor.item()?.blame.clone()
})
}
fn sync(&mut self, cx: &mut ModelContext<Self>) {
let edits = self.buffer_edits.consume();
let new_snapshot = self.buffer.read(cx).snapshot();
let mut row_edits = edits
.into_iter()
.map(|edit| {
let old_point_range = self.buffer_snapshot.offset_to_point(edit.old.start)
..self.buffer_snapshot.offset_to_point(edit.old.end);
let new_point_range = new_snapshot.offset_to_point(edit.new.start)
..new_snapshot.offset_to_point(edit.new.end);
if old_point_range.start.column
== self.buffer_snapshot.line_len(old_point_range.start.row)
&& (new_snapshot.chars_at(edit.new.start).next() == Some('\n')
|| self.buffer_snapshot.line_len(old_point_range.end.row) == 0)
{
Edit {
old: old_point_range.start.row + 1..old_point_range.end.row + 1,
new: new_point_range.start.row + 1..new_point_range.end.row + 1,
}
} else if old_point_range.start.column == 0
&& old_point_range.end.column == 0
&& new_point_range.end.column == 0
{
Edit {
old: old_point_range.start.row..old_point_range.end.row,
new: new_point_range.start.row..new_point_range.end.row,
}
} else {
Edit {
old: old_point_range.start.row..old_point_range.end.row + 1,
new: new_point_range.start.row..new_point_range.end.row + 1,
}
}
})
.peekable();
let mut new_entries = SumTree::new();
let mut cursor = self.entries.cursor::<u32>();
while let Some(mut edit) = row_edits.next() {
while let Some(next_edit) = row_edits.peek() {
if edit.old.end >= next_edit.old.start {
edit.old.end = next_edit.old.end;
edit.new.end = next_edit.new.end;
row_edits.next();
} else {
break;
}
}
new_entries.append(cursor.slice(&edit.old.start, Bias::Right, &()), &());
if edit.new.start > new_entries.summary().rows {
new_entries.push(
GitBlameEntry {
rows: edit.new.start - new_entries.summary().rows,
blame: cursor.item().and_then(|entry| entry.blame.clone()),
},
&(),
);
}
cursor.seek(&edit.old.end, Bias::Right, &());
if !edit.new.is_empty() {
new_entries.push(
GitBlameEntry {
rows: edit.new.len() as u32,
blame: None,
},
&(),
);
}
let old_end = cursor.end(&());
if row_edits
.peek()
.map_or(true, |next_edit| next_edit.old.start >= old_end)
{
if let Some(entry) = cursor.item() {
if old_end > edit.old.end {
new_entries.push(
GitBlameEntry {
rows: cursor.end(&()) - edit.old.end,
blame: entry.blame.clone(),
},
&(),
);
}
cursor.next(&());
}
}
}
new_entries.append(cursor.suffix(&()), &());
drop(cursor);
self.buffer_snapshot = new_snapshot;
self.entries = new_entries;
}
#[cfg(test)]
fn check_invariants(&mut self, cx: &mut ModelContext<Self>) {
self.sync(cx);
assert_eq!(
self.entries.summary().rows,
self.buffer.read(cx).max_point().row + 1
);
}
fn generate(&mut self, cx: &mut ModelContext<Self>) {
let buffer_edits = self.buffer.update(cx, |buffer, _| buffer.subscribe());
let snapshot = self.buffer.read(cx).snapshot();
let blame = self.project.read(cx).blame_buffer(&self.buffer, None, cx);
self.task = cx.spawn(|this, mut cx| async move {
let (entries, permalinks, messages) = cx
.background_executor()
.spawn({
let snapshot = snapshot.clone();
async move {
let Blame {
entries,
permalinks,
messages,
} = blame.await?;
let mut current_row = 0;
let mut entries = SumTree::from_iter(
entries.into_iter().flat_map(|entry| {
let mut entries = SmallVec::<[GitBlameEntry; 2]>::new();
if entry.range.start > current_row {
let skipped_rows = entry.range.start - current_row;
entries.push(GitBlameEntry {
rows: skipped_rows,
blame: None,
});
}
entries.push(GitBlameEntry {
rows: entry.range.len() as u32,
blame: Some(entry.clone()),
});
current_row = entry.range.end;
entries
}),
&(),
);
let max_row = snapshot.max_point().row;
if max_row >= current_row {
entries.push(
GitBlameEntry {
rows: (max_row + 1) - current_row,
blame: None,
},
&(),
);
}
anyhow::Ok((entries, permalinks, messages))
}
})
.await?;
this.update(&mut cx, |this, cx| {
this.buffer_edits = buffer_edits;
this.buffer_snapshot = snapshot;
this.entries = entries;
this.permalinks = permalinks;
this.messages = messages;
this.generated = true;
cx.notify();
})
});
}
}
#[cfg(test)]
mod tests {
use super::*;
use gpui::Context;
use language::{Point, Rope};
use project::FakeFs;
use rand::prelude::*;
use serde_json::json;
use settings::SettingsStore;
use std::{cmp, env, ops::Range, path::Path};
use unindent::Unindent as _;
use util::RandomCharIter;
macro_rules! assert_blame_rows {
($blame:expr, $rows:expr, $expected:expr, $cx:expr) => {
assert_eq!(
$blame
.blame_for_rows($rows.map(Some), $cx)
.collect::<Vec<_>>(),
$expected
);
};
}
fn init_test(cx: &mut gpui::TestAppContext) {
cx.update(|cx| {
let settings = SettingsStore::test(cx);
cx.set_global(settings);
theme::init(theme::LoadThemes::JustBase, cx);
language::init(cx);
client::init_settings(cx);
workspace::init_settings(cx);
Project::init_settings(cx);
crate::init(cx);
});
}
#[gpui::test]
async fn test_blame_for_rows(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/my-repo",
json!({
".git": {},
"file.txt": r#"
AAA Line 1
BBB Line 2 - Modified 1
CCC Line 3 - Modified 2
modified in memory 1
modified in memory 1
DDD Line 4 - Modified 2
EEE Line 5 - Modified 1
FFF Line 6 - Modified 2
"#
.unindent()
}),
)
.await;
fs.set_blame_for_repo(
Path::new("/my-repo/.git"),
vec![(
Path::new("file.txt"),
Blame {
entries: vec![
blame_entry("1b1b1b", 0..1),
blame_entry("0d0d0d", 1..2),
blame_entry("3a3a3a", 2..3),
blame_entry("3a3a3a", 5..6),
blame_entry("0d0d0d", 6..7),
blame_entry("3a3a3a", 7..8),
],
..Default::default()
},
)],
);
let project = Project::test(fs, ["/my-repo".as_ref()], cx).await;
let buffer = project
.update(cx, |project, cx| {
project.open_local_buffer("/my-repo/file.txt", cx)
})
.await
.unwrap();
let git_blame = cx.new_model(|cx| GitBlame::new(buffer.clone(), project, cx));
cx.executor().run_until_parked();
git_blame.update(cx, |blame, cx| {
// All lines
assert_eq!(
blame
.blame_for_rows((0..8).map(Some), cx)
.collect::<Vec<_>>(),
vec![
Some(blame_entry("1b1b1b", 0..1)),
Some(blame_entry("0d0d0d", 1..2)),
Some(blame_entry("3a3a3a", 2..3)),
None,
None,
Some(blame_entry("3a3a3a", 5..6)),
Some(blame_entry("0d0d0d", 6..7)),
Some(blame_entry("3a3a3a", 7..8)),
]
);
// Subset of lines
assert_eq!(
blame
.blame_for_rows((1..4).map(Some), cx)
.collect::<Vec<_>>(),
vec![
Some(blame_entry("0d0d0d", 1..2)),
Some(blame_entry("3a3a3a", 2..3)),
None
]
);
// Subset of lines, with some not displayed
assert_eq!(
blame
.blame_for_rows(vec![Some(1), None, None], cx)
.collect::<Vec<_>>(),
vec![Some(blame_entry("0d0d0d", 1..2)), None, None]
);
});
}
#[gpui::test]
async fn test_blame_for_rows_with_edits(cx: &mut gpui::TestAppContext) {
init_test(cx);
let fs = FakeFs::new(cx.executor());
fs.insert_tree(
"/my-repo",
json!({
".git": {},
"file.txt": r#"
Line 1
Line 2
Line 3
"#
.unindent()
}),
)
.await;
fs.set_blame_for_repo(
Path::new("/my-repo/.git"),
vec![(
Path::new("file.txt"),
Blame {
entries: vec![blame_entry("1b1b1b", 0..4)],
..Default::default()
},
)],
);
let project = Project::test(fs, ["/my-repo".as_ref()], cx).await;
let buffer = project
.update(cx, |project, cx| {
project.open_local_buffer("/my-repo/file.txt", cx)
})
.await
.unwrap();
let git_blame = cx.new_model(|cx| GitBlame::new(buffer.clone(), project, cx));
cx.executor().run_until_parked();
git_blame.update(cx, |blame, cx| {
// Sanity check before edits: make sure that we get the same blame entry for all
// lines.
assert_blame_rows!(
blame,
(0..4),
vec![
Some(blame_entry("1b1b1b", 0..4)),
Some(blame_entry("1b1b1b", 0..4)),
Some(blame_entry("1b1b1b", 0..4)),
Some(blame_entry("1b1b1b", 0..4)),
],
cx
);
});
// Modify a single line, at the start of the line
buffer.update(cx, |buffer, cx| {
buffer.edit([(Point::new(0, 0)..Point::new(0, 0), "X")], None, cx);
});
git_blame.update(cx, |blame, cx| {
assert_blame_rows!(
blame,
(0..2),
vec![None, Some(blame_entry("1b1b1b", 0..4))],
cx
);
});
// Modify a single line, in the middle of the line
buffer.update(cx, |buffer, cx| {
buffer.edit([(Point::new(1, 2)..Point::new(1, 2), "X")], None, cx);
});
git_blame.update(cx, |blame, cx| {
assert_blame_rows!(
blame,
(1..4),
vec![
None,
Some(blame_entry("1b1b1b", 0..4)),
Some(blame_entry("1b1b1b", 0..4))
],
cx
);
});
// Before we insert a newline at the end, sanity check:
git_blame.update(cx, |blame, cx| {
assert_blame_rows!(blame, (3..4), vec![Some(blame_entry("1b1b1b", 0..4))], cx);
});
// Insert a newline at the end
buffer.update(cx, |buffer, cx| {
buffer.edit([(Point::new(3, 6)..Point::new(3, 6), "\n")], None, cx);
});
// Only the new line is marked as edited:
git_blame.update(cx, |blame, cx| {
assert_blame_rows!(
blame,
(3..5),
vec![Some(blame_entry("1b1b1b", 0..4)), None],
cx
);
});
// Before we insert a newline at the start, sanity check:
git_blame.update(cx, |blame, cx| {
assert_blame_rows!(blame, (2..3), vec![Some(blame_entry("1b1b1b", 0..4)),], cx);
});
// Usage example
// Insert a newline at the start of the row
buffer.update(cx, |buffer, cx| {
buffer.edit([(Point::new(2, 0)..Point::new(2, 0), "\n")], None, cx);
});
// Only the new line is marked as edited:
git_blame.update(cx, |blame, cx| {
assert_blame_rows!(
blame,
(2..4),
vec![None, Some(blame_entry("1b1b1b", 0..4)),],
cx
);
});
}
#[gpui::test(iterations = 100)]
async fn test_blame_random(mut rng: StdRng, cx: &mut gpui::TestAppContext) {
let operations = env::var("OPERATIONS")
.map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
.unwrap_or(10);
let max_edits_per_operation = env::var("MAX_EDITS_PER_OPERATION")
.map(|i| {
i.parse()
.expect("invalid `MAX_EDITS_PER_OPERATION` variable")
})
.unwrap_or(5);
init_test(cx);
let fs = FakeFs::new(cx.executor());
let buffer_initial_text_len = rng.gen_range(5..15);
let mut buffer_initial_text = Rope::from(
RandomCharIter::new(&mut rng)
.take(buffer_initial_text_len)
.collect::<String>()
.as_str(),
);
let mut newline_ixs = (0..buffer_initial_text_len).choose_multiple(&mut rng, 5);
newline_ixs.sort_unstable();
for newline_ix in newline_ixs.into_iter().rev() {
let newline_ix = buffer_initial_text.clip_offset(newline_ix, Bias::Right);
buffer_initial_text.replace(newline_ix..newline_ix, "\n");
}
log::info!("initial buffer text: {:?}", buffer_initial_text);
fs.insert_tree(
"/my-repo",
json!({
".git": {},
"file.txt": buffer_initial_text.to_string()
}),
)
.await;
let blame_entries = gen_blame_entries(buffer_initial_text.max_point().row, &mut rng);
log::info!("initial blame entries: {:?}", blame_entries);
fs.set_blame_for_repo(
Path::new("/my-repo/.git"),
vec![(
Path::new("file.txt"),
Blame {
entries: blame_entries,
..Default::default()
},
)],
);
let project = Project::test(fs.clone(), ["/my-repo".as_ref()], cx).await;
let buffer = project
.update(cx, |project, cx| {
project.open_local_buffer("/my-repo/file.txt", cx)
})
.await
.unwrap();
let git_blame = cx.new_model(|cx| GitBlame::new(buffer.clone(), project, cx));
cx.executor().run_until_parked();
git_blame.update(cx, |blame, cx| blame.check_invariants(cx));
for _ in 0..operations {
match rng.gen_range(0..100) {
0..=19 => {
log::info!("quiescing");
cx.executor().run_until_parked();
}
20..=69 => {
log::info!("editing buffer");
buffer.update(cx, |buffer, cx| {
buffer.randomly_edit(&mut rng, max_edits_per_operation, cx);
log::info!("buffer text: {:?}", buffer.text());
});
let blame_entries = gen_blame_entries(
buffer.read_with(cx, |buffer, _| buffer.max_point().row),
&mut rng,
);
log::info!("regenerating blame entries: {:?}", blame_entries);
fs.set_blame_for_repo(
Path::new("/my-repo/.git"),
vec![(
Path::new("file.txt"),
Blame {
entries: blame_entries,
..Default::default()
},
)],
);
}
_ => {
git_blame.update(cx, |blame, cx| blame.check_invariants(cx));
}
}
}
git_blame.update(cx, |blame, cx| blame.check_invariants(cx));
}
fn gen_blame_entries(max_row: u32, rng: &mut StdRng) -> Vec<BlameEntry> {
let mut last_row = 0;
let mut blame_entries = Vec::new();
for ix in 0..5 {
if last_row < max_row {
let row_start = rng.gen_range(last_row..max_row);
let row_end = rng.gen_range(row_start + 1..cmp::min(row_start + 3, max_row) + 1);
blame_entries.push(blame_entry(&ix.to_string(), row_start..row_end));
last_row = row_end;
} else {
break;
}
}
blame_entries
}
fn blame_entry(sha: &str, range: Range<u32>) -> BlameEntry {
BlameEntry {
sha: sha.parse().unwrap(),
range,
..Default::default()
}
}
}

View File

@@ -1,9 +1,10 @@
use std::ops::Range;
use anyhow::{anyhow, Result};
use language::Point;
use url::Url;
pub(crate) enum GitHostingProvider {
enum GitHostingProvider {
Github,
Gitlab,
Gitee,
@@ -28,9 +29,9 @@ impl GitHostingProvider {
/// Returns the fragment portion of the URL for the selected lines in
/// the representation the [`GitHostingProvider`] expects.
fn line_fragment(&self, selection: &Range<u32>) -> String {
if selection.start == selection.end {
let line = selection.start + 1;
fn line_fragment(&self, selection: &Range<Point>) -> String {
if selection.start.row == selection.end.row {
let line = selection.start.row + 1;
match self {
Self::Github | Self::Gitlab | Self::Gitee | Self::Sourcehut | Self::Codeberg => {
@@ -39,8 +40,8 @@ impl GitHostingProvider {
Self::Bitbucket => format!("lines-{}", line),
}
} else {
let start_line = selection.start + 1;
let end_line = selection.end + 1;
let start_line = selection.start.row + 1;
let end_line = selection.end.row + 1;
match self {
Self::Github | Self::Codeberg => format!("L{}-L{}", start_line, end_line),
@@ -57,7 +58,7 @@ pub struct BuildPermalinkParams<'a> {
pub remote_url: &'a str,
pub sha: &'a str,
pub path: &'a str,
pub selection: Option<Range<u32>>,
pub selection: Option<Range<Point>>,
}
pub fn build_permalink(params: BuildPermalinkParams) -> Result<Url> {
@@ -87,42 +88,17 @@ pub fn build_permalink(params: BuildPermalinkParams) -> Result<Url> {
let mut permalink = provider.base_url().join(&path).unwrap();
permalink.set_fragment(line_fragment.as_deref());
Ok(permalink)
}
pub(crate) struct ParsedGitRemote<'a> {
struct ParsedGitRemote<'a> {
pub provider: GitHostingProvider,
pub owner: &'a str,
pub repo: &'a str,
}
pub(crate) struct BuildCommitPermalinkParams<'a> {
pub remote: &'a ParsedGitRemote<'a>,
pub sha: &'a str,
}
pub(crate) fn build_commit_permalink(params: BuildCommitPermalinkParams) -> Url {
let BuildCommitPermalinkParams { sha, remote } = params;
let ParsedGitRemote {
provider,
owner,
repo,
} = remote;
let path = match provider {
GitHostingProvider::Github => format!("{owner}/{repo}/commit/{sha}"),
GitHostingProvider::Gitlab => format!("{owner}/{repo}/-/commit/{sha}"),
GitHostingProvider::Gitee => format!("{owner}/{repo}/commit/{sha}"),
GitHostingProvider::Bitbucket => format!("{owner}/{repo}/commits/{sha}"),
GitHostingProvider::Sourcehut => format!("~{owner}/{repo}/commit/{sha}"),
GitHostingProvider::Codeberg => format!("{owner}/{repo}/commit/{sha}"),
};
provider.base_url().join(&path).unwrap()
}
pub(crate) fn parse_git_remote_url(url: &str) -> Option<ParsedGitRemote> {
fn parse_git_remote_url(url: &str) -> Option<ParsedGitRemote> {
if url.starts_with("git@github.com:") || url.starts_with("https://github.com/") {
let repo_with_owner = url
.trim_start_matches("git@github.com:")
@@ -241,7 +217,7 @@ mod tests {
remote_url: "git@github.com:zed-industries/zed.git",
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
path: "crates/editor/src/git/permalink.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -255,7 +231,7 @@ mod tests {
remote_url: "git@github.com:zed-industries/zed.git",
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
path: "crates/editor/src/git/permalink.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();
@@ -283,7 +259,7 @@ mod tests {
remote_url: "https://github.com/zed-industries/zed.git",
sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
path: "crates/zed/src/main.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -297,7 +273,7 @@ mod tests {
remote_url: "https://github.com/zed-industries/zed.git",
sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
path: "crates/zed/src/main.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();
@@ -325,7 +301,7 @@ mod tests {
remote_url: "git@gitlab.com:zed-industries/zed.git",
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
path: "crates/editor/src/git/permalink.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -339,7 +315,7 @@ mod tests {
remote_url: "git@gitlab.com:zed-industries/zed.git",
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
path: "crates/editor/src/git/permalink.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();
@@ -367,7 +343,7 @@ mod tests {
remote_url: "https://gitlab.com/zed-industries/zed.git",
sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
path: "crates/zed/src/main.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -381,7 +357,7 @@ mod tests {
remote_url: "https://gitlab.com/zed-industries/zed.git",
sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
path: "crates/zed/src/main.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();
@@ -409,7 +385,7 @@ mod tests {
remote_url: "git@gitee.com:libkitten/zed.git",
sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
path: "crates/editor/src/git/permalink.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -423,7 +399,7 @@ mod tests {
remote_url: "git@gitee.com:libkitten/zed.git",
sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
path: "crates/editor/src/git/permalink.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();
@@ -451,7 +427,7 @@ mod tests {
remote_url: "https://gitee.com/libkitten/zed.git",
sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
path: "crates/zed/src/main.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -465,7 +441,7 @@ mod tests {
remote_url: "https://gitee.com/libkitten/zed.git",
sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
path: "crates/zed/src/main.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();
let expected_url = "https://gitee.com/libkitten/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/zed/src/main.rs#L24-48";
@@ -519,7 +495,7 @@ mod tests {
remote_url: "git@bitbucket.org:thorstenzed/testingrepo.git",
sha: "f00b4r",
path: "main.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -534,7 +510,7 @@ mod tests {
remote_url: "git@bitbucket.org:thorstenzed/testingrepo.git",
sha: "f00b4r",
path: "main.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();
@@ -577,7 +553,7 @@ mod tests {
remote_url: "git@git.sr.ht:~rajveermalviya/zed",
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
path: "crates/editor/src/git/permalink.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -591,7 +567,7 @@ mod tests {
remote_url: "git@git.sr.ht:~rajveermalviya/zed",
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
path: "crates/editor/src/git/permalink.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();
@@ -619,7 +595,7 @@ mod tests {
remote_url: "https://git.sr.ht/~rajveermalviya/zed",
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
path: "crates/zed/src/main.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -633,7 +609,7 @@ mod tests {
remote_url: "https://git.sr.ht/~rajveermalviya/zed",
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
path: "crates/zed/src/main.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();
@@ -661,7 +637,7 @@ mod tests {
remote_url: "git@codeberg.org:rajveermalviya/zed.git",
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
path: "crates/editor/src/git/permalink.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -675,7 +651,7 @@ mod tests {
remote_url: "git@codeberg.org:rajveermalviya/zed.git",
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
path: "crates/editor/src/git/permalink.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();
@@ -703,7 +679,7 @@ mod tests {
remote_url: "https://codeberg.org/rajveermalviya/zed.git",
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
path: "crates/zed/src/main.rs",
selection: Some(6..6),
selection: Some(Point::new(6, 1)..Point::new(6, 10)),
})
.unwrap();
@@ -717,7 +693,7 @@ mod tests {
remote_url: "https://codeberg.org/rajveermalviya/zed.git",
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
path: "crates/zed/src/main.rs",
selection: Some(23..47),
selection: Some(Point::new(23, 1)..Point::new(47, 10)),
})
.unwrap();

View File

@@ -20,7 +20,7 @@ pub fn refresh_matching_bracket_highlights(editor: &mut Editor, cx: &mut ViewCon
.innermost_enclosing_bracket_ranges(head..head, None)
{
editor.highlight_background::<MatchingBracketHighlight>(
&[
vec![
opening_range.to_anchors(&snapshot.buffer_snapshot),
closing_range.to_anchors(&snapshot.buffer_snapshot),
],

View File

@@ -93,7 +93,7 @@ impl Editor {
modifiers: Modifiers,
cx: &mut ViewContext<Self>,
) {
if !modifiers.secondary() || self.has_pending_selection() {
if !modifiers.command || self.has_pending_selection() {
self.hide_hovered_link(cx);
return;
}
@@ -113,7 +113,7 @@ impl Editor {
&snapshot,
point_for_position,
self,
modifiers.secondary(),
modifiers.command,
modifiers.shift,
cx,
);
@@ -256,7 +256,7 @@ pub fn update_inlay_link_and_hover_points(
snapshot: &EditorSnapshot,
point_for_position: PointForPosition,
editor: &mut Editor,
secondary_held: bool,
cmd_held: bool,
shift_held: bool,
cx: &mut ViewContext<'_, Editor>,
) {
@@ -394,9 +394,7 @@ pub fn update_inlay_link_and_hover_points(
if let Some((language_server_id, location)) =
hovered_hint_part.location
{
if secondary_held
&& !editor.has_pending_nonempty_selection()
{
if cmd_held && !editor.has_pending_nonempty_selection() {
go_to_definition_updated = true;
show_link_definition(
shift_held,
@@ -702,7 +700,10 @@ mod tests {
use gpui::Modifiers;
use indoc::indoc;
use language::language_settings::InlayHintSettings;
use lsp::request::{GotoDefinition, GotoTypeDefinition};
use lsp::{
request::{GotoDefinition, GotoTypeDefinition},
References,
};
use util::assert_set_eq;
use workspace::item::Item;
@@ -761,7 +762,7 @@ mod tests {
let «variable» = A;
"});
cx.simulate_modifiers_change(Modifiers::secondary_key());
cx.simulate_modifiers_change(Modifiers::command());
cx.run_until_parked();
// Assert no link highlights
cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
@@ -822,7 +823,7 @@ mod tests {
])))
});
cx.simulate_mouse_move(hover_point, Modifiers::secondary_key());
cx.simulate_mouse_move(hover_point, Modifiers::command());
requests.next().await;
cx.background_executor.run_until_parked();
cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
@@ -848,7 +849,7 @@ mod tests {
])))
});
cx.simulate_mouse_move(hover_point, Modifiers::secondary_key());
cx.simulate_mouse_move(hover_point, Modifiers::command());
requests.next().await;
cx.background_executor.run_until_parked();
cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
@@ -867,7 +868,7 @@ mod tests {
// No definitions returned
Ok(Some(lsp::GotoDefinitionResponse::Link(vec![])))
});
cx.simulate_mouse_move(hover_point, Modifiers::secondary_key());
cx.simulate_mouse_move(hover_point, Modifiers::command());
requests.next().await;
cx.background_executor.run_until_parked();
@@ -911,7 +912,7 @@ mod tests {
])))
});
cx.simulate_modifiers_change(Modifiers::secondary_key());
cx.simulate_modifiers_change(Modifiers::command());
requests.next().await;
cx.background_executor.run_until_parked();
@@ -927,7 +928,7 @@ mod tests {
fn do_work() { test(); }
"});
cx.simulate_mouse_move(hover_point, Modifiers::secondary_key());
cx.simulate_mouse_move(hover_point, Modifiers::command());
cx.background_executor.run_until_parked();
cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
fn test() { do_work(); }
@@ -939,7 +940,7 @@ mod tests {
fn test() { do_work(); }
fn do_work() { tesˇt(); }
"});
cx.simulate_mouse_move(hover_point, Modifiers::secondary_key());
cx.simulate_mouse_move(hover_point, Modifiers::command());
cx.background_executor.run_until_parked();
cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
fn test() { do_work(); }
@@ -947,7 +948,7 @@ mod tests {
"});
// Cmd click with existing definition doesn't re-request and dismisses highlight
cx.simulate_click(hover_point, Modifiers::secondary_key());
cx.simulate_click(hover_point, Modifiers::command());
cx.lsp
.handle_request::<GotoDefinition, _, _>(move |_, _| async move {
// Empty definition response to make sure we aren't hitting the lsp and using
@@ -986,7 +987,7 @@ mod tests {
},
])))
});
cx.simulate_click(hover_point, Modifiers::secondary_key());
cx.simulate_click(hover_point, Modifiers::command());
requests.next().await;
cx.background_executor.run_until_parked();
cx.assert_editor_state(indoc! {"
@@ -1029,7 +1030,7 @@ mod tests {
s.set_pending_anchor_range(anchor_range, crate::SelectMode::Character)
});
});
cx.simulate_mouse_move(hover_point, Modifiers::secondary_key());
cx.simulate_mouse_move(hover_point, Modifiers::command());
cx.background_executor.run_until_parked();
assert!(requests.try_next().is_err());
cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
@@ -1143,7 +1144,7 @@ mod tests {
});
// Press cmd to trigger highlight
let hover_point = cx.pixel_position_for(midpoint);
cx.simulate_mouse_move(hover_point, Modifiers::secondary_key());
cx.simulate_mouse_move(hover_point, Modifiers::command());
cx.background_executor.run_until_parked();
cx.update_editor(|editor, cx| {
let snapshot = editor.snapshot(cx);
@@ -1174,9 +1175,9 @@ mod tests {
assert!(actual_ranges.is_empty(), "When no cmd is pressed, should have no hint label selected, but got: {actual_ranges:?}");
});
cx.simulate_modifiers_change(Modifiers::secondary_key());
cx.simulate_modifiers_change(Modifiers::command());
cx.background_executor.run_until_parked();
cx.simulate_click(hover_point, Modifiers::secondary_key());
cx.simulate_click(hover_point, Modifiers::command());
cx.background_executor.run_until_parked();
cx.assert_editor_state(indoc! {"
struct «TestStructˇ»;
@@ -1206,12 +1207,12 @@ mod tests {
Let's test a [complex](https://zed.dev/channel/had-(ˇoops)) case.
"});
cx.simulate_mouse_move(screen_coord, Modifiers::secondary_key());
cx.simulate_mouse_move(screen_coord, Modifiers::command());
cx.assert_editor_text_highlights::<HoveredLinkState>(indoc! {"
Let's test a [complex](«https://zed.dev/channel/had-(oops)ˇ») case.
"});
cx.simulate_click(screen_coord, Modifiers::secondary_key());
cx.simulate_click(screen_coord, Modifiers::command());
assert_eq!(
cx.opened_url(),
Some("https://zed.dev/channel/had-(oops)".into())
@@ -1234,12 +1235,12 @@ mod tests {
let screen_coord =
cx.pixel_position(indoc! {"https://zed.dev/relˇeases is a cool webpage."});
cx.simulate_mouse_move(screen_coord, Modifiers::secondary_key());
cx.simulate_mouse_move(screen_coord, Modifiers::command());
cx.assert_editor_text_highlights::<HoveredLinkState>(
indoc! {"«https://zed.dev/releasesˇ» is a cool webpage."},
);
cx.simulate_click(screen_coord, Modifiers::secondary_key());
cx.simulate_click(screen_coord, Modifiers::command());
assert_eq!(cx.opened_url(), Some("https://zed.dev/releases".into()));
}
@@ -1259,12 +1260,155 @@ mod tests {
let screen_coord =
cx.pixel_position(indoc! {"A cool webpage is https://zed.dev/releˇases"});
cx.simulate_mouse_move(screen_coord, Modifiers::secondary_key());
cx.simulate_mouse_move(screen_coord, Modifiers::command());
cx.assert_editor_text_highlights::<HoveredLinkState>(
indoc! {"A cool webpage is «https://zed.dev/releasesˇ»"},
);
cx.simulate_click(screen_coord, Modifiers::secondary_key());
cx.simulate_click(screen_coord, Modifiers::command());
assert_eq!(cx.opened_url(), Some("https://zed.dev/releases".into()));
}
#[gpui::test]
async fn test_cmd_click_back_and_forth(cx: &mut gpui::TestAppContext) {
init_test(cx, |_| {});
let mut cx = EditorLspTestContext::new_rust(lsp::ServerCapabilities::default(), cx).await;
cx.set_state(indoc! {"
fn test() {
do_work();
fn do_work() {
test();
}
"});
// cmd-click on `test` definition and usage, and expect Zed to allow going back and forth,
// because cmd-click first searches for definitions to go to, and then fall backs to symbol usages to go to.
let definition_hover_point = cx.pixel_position(indoc! {"
fn testˇ() {
do_work();
}
fn do_work() {
test();
}
"});
let definition_display_point = cx.display_point(indoc! {"
fn testˇ() {
do_work();
}
fn do_work() {
test();
}
"});
let definition_range = cx.lsp_range(indoc! {"
fn «test»() {
do_work();
}
fn do_work() {
test();
}
"});
let reference_hover_point = cx.pixel_position(indoc! {"
fn test() {
do_work();
}
fn do_work() {
testˇ();
}
"});
let reference_display_point = cx.display_point(indoc! {"
fn test() {
do_work();
}
fn do_work() {
testˇ();
}
"});
let reference_range = cx.lsp_range(indoc! {"
fn test() {
do_work();
}
fn do_work() {
«test»();
}
"});
let expected_uri = cx.buffer_lsp_url.clone();
cx.lsp
.handle_request::<GotoDefinition, _, _>(move |params, _| {
let expected_uri = expected_uri.clone();
async move {
assert_eq!(
params.text_document_position_params.text_document.uri,
expected_uri
);
let position = params.text_document_position_params.position;
Ok(Some(lsp::GotoDefinitionResponse::Link(
if position.line == reference_display_point.row()
&& position.character == reference_display_point.column()
{
vec![lsp::LocationLink {
origin_selection_range: None,
target_uri: params.text_document_position_params.text_document.uri,
target_range: definition_range,
target_selection_range: definition_range,
}]
} else {
// We cannot navigate to the definition outside of its reference point
Vec::new()
},
)))
}
});
let expected_uri = cx.buffer_lsp_url.clone();
cx.lsp.handle_request::<References, _, _>(move |params, _| {
let expected_uri = expected_uri.clone();
async move {
assert_eq!(
params.text_document_position.text_document.uri,
expected_uri
);
let position = params.text_document_position.position;
// Zed should not look for references if GotoDefinition works or returns non-empty result
assert_eq!(position.line, definition_display_point.row());
assert_eq!(position.character, definition_display_point.column());
Ok(Some(vec![lsp::Location {
uri: params.text_document_position.text_document.uri,
range: reference_range,
}]))
}
});
for _ in 0..5 {
cx.simulate_click(definition_hover_point, Modifiers::command());
cx.background_executor.run_until_parked();
cx.assert_editor_state(indoc! {"
fn test() {
do_work();
}
fn do_work() {
«testˇ»();
}
"});
cx.simulate_click(reference_hover_point, Modifiers::command());
cx.background_executor.run_until_parked();
cx.assert_editor_state(indoc! {"
fn «testˇ»() {
do_work();
}
fn do_work() {
test();
}
"});
}
}
}

View File

@@ -4,18 +4,17 @@ use crate::{
Anchor, AnchorRangeExt, DisplayPoint, Editor, EditorSettings, EditorSnapshot, EditorStyle,
ExcerptId, Hover, RangeToAnchorExt,
};
use futures::{stream::FuturesUnordered, FutureExt};
use futures::FutureExt;
use gpui::{
div, px, AnyElement, CursorStyle, Hsla, InteractiveElement, IntoElement, MouseButton,
div, px, AnyElement, CursorStyle, Hsla, InteractiveElement, IntoElement, Model, MouseButton,
ParentElement, Pixels, SharedString, Size, StatefulInteractiveElement, Styled, Task,
ViewContext, WeakView,
};
use language::{markdown, Bias, DiagnosticEntry, Language, LanguageRegistry, ParsedMarkdown};
use lsp::DiagnosticSeverity;
use project::{HoverBlock, HoverBlockKind, InlayHintLabelPart};
use project::{HoverBlock, HoverBlockKind, InlayHintLabelPart, Project};
use settings::Settings;
use smol::stream::StreamExt;
use std::{ops::Range, sync::Arc, time::Duration};
use ui::{prelude::*, Tooltip};
use util::TryFutureExt;
@@ -84,20 +83,13 @@ pub fn hover_at_inlay(editor: &mut Editor, inlay_hover: InlayHover, cx: &mut Vie
return;
};
if editor
.hover_state
.info_popovers
.iter()
.any(|InfoPopover { symbol_range, .. }| {
if let RangeInEditor::Inlay(range) = symbol_range {
if range == &inlay_hover.range {
// Hover triggered from same location as last time. Don't show again.
return true;
}
if let Some(InfoPopover { symbol_range, .. }) = &editor.hover_state.info_popover {
if let RangeInEditor::Inlay(range) = symbol_range {
if range == &inlay_hover.range {
// Hover triggered from same location as last time. Don't show again.
return;
}
false
})
{
}
hide_hover(editor, cx);
}
@@ -115,13 +107,15 @@ pub fn hover_at_inlay(editor: &mut Editor, inlay_hover: InlayHover, cx: &mut Vie
let parsed_content = parse_blocks(&blocks, &language_registry, None).await;
let hover_popover = InfoPopover {
project: project.clone(),
symbol_range: RangeInEditor::Inlay(inlay_hover.range.clone()),
blocks,
parsed_content,
};
this.update(&mut cx, |this, cx| {
// TODO: no background highlights happen for inlays currently
this.hover_state.info_popovers = vec![hover_popover];
this.hover_state.info_popover = Some(hover_popover);
cx.notify();
})?;
@@ -138,9 +132,8 @@ pub fn hover_at_inlay(editor: &mut Editor, inlay_hover: InlayHover, cx: &mut Vie
/// Triggered by the `Hover` action when the cursor is not over a symbol or when the
/// selections changed.
pub fn hide_hover(editor: &mut Editor, cx: &mut ViewContext<Editor>) -> bool {
let info_popovers = editor.hover_state.info_popovers.drain(..);
let diagnostics_popover = editor.hover_state.diagnostic_popover.take();
let did_hide = info_popovers.count() > 0 || diagnostics_popover.is_some();
let did_hide = editor.hover_state.info_popover.take().is_some()
| editor.hover_state.diagnostic_popover.take().is_some();
editor.hover_state.info_task = None;
editor.hover_state.triggered_from = None;
@@ -197,26 +190,22 @@ fn show_hover(
};
if !ignore_timeout {
if editor
.hover_state
.info_popovers
.iter()
.any(|InfoPopover { symbol_range, .. }| {
symbol_range
.as_text_range()
.map(|range| {
let hover_range = range.to_offset(&snapshot.buffer_snapshot);
// LSP returns a hover result for the end index of ranges that should be hovered, so we need to
// use an inclusive range here to check if we should dismiss the popover
(hover_range.start..=hover_range.end).contains(&multibuffer_offset)
})
.unwrap_or(false)
})
{
// Hover triggered from same location as last time. Don't show again.
return;
} else {
hide_hover(editor, cx);
if let Some(InfoPopover { symbol_range, .. }) = &editor.hover_state.info_popover {
if symbol_range
.as_text_range()
.map(|range| {
let hover_range = range.to_offset(&snapshot.buffer_snapshot);
// LSP returns a hover result for the end index of ranges that should be hovered, so we need to
// use an inclusive range here to check if we should dismiss the popover
(hover_range.start..=hover_range.end).contains(&multibuffer_offset)
})
.unwrap_or(false)
{
// Hover triggered from same location as last time. Don't show again.
return;
} else {
hide_hover(editor, cx);
}
}
}
@@ -295,14 +284,10 @@ fn show_hover(
});
})?;
let hovers_response = hover_request.await;
let language_registry = project.update(&mut cx, |p, _| p.languages().clone())?;
let hover_result = hover_request.await.ok().flatten();
let snapshot = this.update(&mut cx, |this, cx| this.snapshot(cx))?;
let mut hover_highlights = Vec::with_capacity(hovers_response.len());
let mut info_popovers = Vec::with_capacity(hovers_response.len());
let mut info_popover_tasks = hovers_response
.into_iter()
.map(|hover_result| async {
let hover_popover = match hover_result {
Some(hover_result) if !hover_result.is_empty() => {
// Create symbol range of anchors for highlighting and filtering of future requests.
let range = hover_result
.range
@@ -318,42 +303,44 @@ fn show_hover(
})
.unwrap_or_else(|| anchor..anchor);
let language_registry =
project.update(&mut cx, |p, _| p.languages().clone())?;
let blocks = hover_result.contents;
let language = hover_result.language;
let parsed_content = parse_blocks(&blocks, &language_registry, language).await;
(
range.clone(),
InfoPopover {
symbol_range: RangeInEditor::Text(range),
parsed_content,
},
)
})
.collect::<FuturesUnordered<_>>();
while let Some((highlight_range, info_popover)) = info_popover_tasks.next().await {
hover_highlights.push(highlight_range);
info_popovers.push(info_popover);
}
Some(InfoPopover {
project: project.clone(),
symbol_range: RangeInEditor::Text(range),
blocks,
parsed_content,
})
}
this.update(&mut cx, |editor, cx| {
if hover_highlights.is_empty() {
editor.clear_background_highlights::<HoverState>(cx);
} else {
_ => None,
};
this.update(&mut cx, |this, cx| {
if let Some(symbol_range) = hover_popover
.as_ref()
.and_then(|hover_popover| hover_popover.symbol_range.as_text_range())
{
// Highlight the selected symbol using a background highlight
editor.highlight_background::<HoverState>(
&hover_highlights,
this.highlight_background::<HoverState>(
vec![symbol_range],
|theme| theme.element_hover, // todo update theme
cx,
);
} else {
this.clear_background_highlights::<HoverState>(cx);
}
editor.hover_state.info_popovers = info_popovers;
this.hover_state.info_popover = hover_popover;
cx.notify();
cx.refresh();
})?;
anyhow::Ok(())
Ok::<_, anyhow::Error>(())
}
.log_err()
});
@@ -375,12 +362,12 @@ async fn parse_blocks(
match &block.kind {
HoverBlockKind::PlainText => {
markdown::new_paragraph(&mut text, &mut Vec::new());
text.push_str(&block.text.replace("\\n", "\n"));
text.push_str(&block.text);
}
HoverBlockKind::Markdown => {
markdown::parse_markdown_block(
&block.text.replace("\\n", "\n"),
&block.text,
language_registry,
language.clone(),
&mut text,
@@ -435,7 +422,7 @@ async fn parse_blocks(
#[derive(Default)]
pub struct HoverState {
pub info_popovers: Vec<InfoPopover>,
pub info_popover: Option<InfoPopover>,
pub diagnostic_popover: Option<DiagnosticPopover>,
pub triggered_from: Option<Anchor>,
pub info_task: Option<Task<Option<()>>>,
@@ -443,7 +430,7 @@ pub struct HoverState {
impl HoverState {
pub fn visible(&self) -> bool {
!self.info_popovers.is_empty() || self.diagnostic_popover.is_some()
self.info_popover.is_some() || self.diagnostic_popover.is_some()
}
pub fn render(
@@ -462,20 +449,12 @@ impl HoverState {
.as_ref()
.map(|diagnostic_popover| &diagnostic_popover.local_diagnostic.range.start)
.or_else(|| {
self.info_popovers.iter().find_map(|info_popover| {
match &info_popover.symbol_range {
RangeInEditor::Text(range) => Some(&range.start),
RangeInEditor::Inlay(_) => None,
}
})
})
.or_else(|| {
self.info_popovers.iter().find_map(|info_popover| {
match &info_popover.symbol_range {
RangeInEditor::Text(_) => None,
RangeInEditor::Inlay(range) => Some(&range.inlay_position),
}
})
self.info_popover
.as_ref()
.map(|info_popover| match &info_popover.symbol_range {
RangeInEditor::Text(range) => &range.start,
RangeInEditor::Inlay(range) => &range.inlay_position,
})
})?;
let point = anchor.to_display_point(&snapshot.display_snapshot);
@@ -489,8 +468,8 @@ impl HoverState {
if let Some(diagnostic_popover) = self.diagnostic_popover.as_ref() {
elements.push(diagnostic_popover.render(style, max_size, cx));
}
for info_popover in &mut self.info_popovers {
elements.push(info_popover.render(style, max_size, workspace.clone(), cx));
if let Some(info_popover) = self.info_popover.as_mut() {
elements.push(info_popover.render(style, max_size, workspace, cx));
}
Some((point, elements))
@@ -499,7 +478,9 @@ impl HoverState {
#[derive(Debug, Clone)]
pub struct InfoPopover {
pub project: Model<Project>,
symbol_range: RangeInEditor,
pub blocks: Vec<HoverBlock>,
parsed_content: ParsedMarkdown,
}
@@ -683,19 +664,12 @@ mod tests {
cx.editor(|editor, _| {
assert!(editor.hover_state.visible());
assert_eq!(
editor.hover_state.info_popovers.len(),
1,
"Expected exactly one hover but got: {:?}",
editor.hover_state.info_popovers
);
let rendered = editor
.hover_state
.info_popovers
.first()
.cloned()
.unwrap()
.parsed_content;
assert_eq!(rendered.text, "some basic docs".to_string())
editor.hover_state.info_popover.clone().unwrap().blocks,
vec![HoverBlock {
text: "some basic docs".to_string(),
kind: HoverBlockKind::Markdown,
},]
)
});
// Mouse moved with no hover response dismisses
@@ -750,19 +724,12 @@ mod tests {
cx.condition(|editor, _| editor.hover_state.visible()).await;
cx.editor(|editor, _| {
assert_eq!(
editor.hover_state.info_popovers.len(),
1,
"Expected exactly one hover but got: {:?}",
editor.hover_state.info_popovers
);
let rendered = editor
.hover_state
.info_popovers
.first()
.cloned()
.unwrap()
.parsed_content;
assert_eq!(rendered.text, "some other basic docs".to_string())
editor.hover_state.info_popover.clone().unwrap().blocks,
vec![HoverBlock {
text: "some other basic docs".to_string(),
kind: HoverBlockKind::Markdown,
}]
)
});
}
@@ -806,21 +773,11 @@ mod tests {
cx.condition(|editor, _| editor.hover_state.visible()).await;
cx.editor(|editor, _| {
assert_eq!(
editor.hover_state.info_popovers.len(),
1,
"Expected exactly one hover but got: {:?}",
editor.hover_state.info_popovers
);
let rendered = editor
.hover_state
.info_popovers
.first()
.cloned()
.unwrap()
.parsed_content;
assert_eq!(
rendered.text,
"regular text for hover to show".to_string(),
editor.hover_state.info_popover.clone().unwrap().blocks,
vec![HoverBlock {
text: "regular text for hover to show".to_string(),
kind: HoverBlockKind::Markdown,
}],
"No empty string hovers should be shown"
);
});
@@ -867,21 +824,20 @@ mod tests {
.next()
.await;
let languages = cx.language_registry().clone();
cx.condition(|editor, _| editor.hover_state.visible()).await;
cx.editor(|editor, _| {
let blocks = editor.hover_state.info_popover.clone().unwrap().blocks;
assert_eq!(
editor.hover_state.info_popovers.len(),
1,
"Expected exactly one hover but got: {:?}",
editor.hover_state.info_popovers
blocks,
vec![HoverBlock {
text: markdown_string,
kind: HoverBlockKind::Markdown,
}],
);
let rendered = editor
.hover_state
.info_popovers
.first()
.cloned()
.unwrap()
.parsed_content;
let rendered = smol::block_on(parse_blocks(&blocks, &languages, None));
assert_eq!(
rendered.text,
code_str.trim(),
@@ -933,9 +889,7 @@ mod tests {
cx.background_executor.run_until_parked();
cx.editor(|Editor { hover_state, .. }, _| {
assert!(
hover_state.diagnostic_popover.is_some() && hover_state.info_popovers.is_empty()
)
assert!(hover_state.diagnostic_popover.is_some() && hover_state.info_popover.is_none())
});
// Info Popover shows after request responded to
@@ -1335,10 +1289,8 @@ mod tests {
cx.background_executor.run_until_parked();
cx.update_editor(|editor, cx| {
let hover_state = &editor.hover_state;
assert!(
hover_state.diagnostic_popover.is_none() && hover_state.info_popovers.len() == 1
);
let popover = hover_state.info_popovers.first().cloned().unwrap();
assert!(hover_state.diagnostic_popover.is_none() && hover_state.info_popover.is_some());
let popover = hover_state.info_popover.as_ref().unwrap();
let buffer_snapshot = editor.buffer().update(cx, |buffer, cx| buffer.snapshot(cx));
assert_eq!(
popover.symbol_range,
@@ -1390,10 +1342,8 @@ mod tests {
cx.background_executor.run_until_parked();
cx.update_editor(|editor, cx| {
let hover_state = &editor.hover_state;
assert!(
hover_state.diagnostic_popover.is_none() && hover_state.info_popovers.len() == 1
);
let popover = hover_state.info_popovers.first().cloned().unwrap();
assert!(hover_state.diagnostic_popover.is_none() && hover_state.info_popover.is_some());
let popover = hover_state.info_popover.as_ref().unwrap();
let buffer_snapshot = editor.buffer().update(cx, |buffer, cx| buffer.snapshot(cx));
assert_eq!(
popover.symbol_range,

View File

@@ -705,38 +705,31 @@ impl Item for Editor {
.await?;
}
if buffers.len() == 1 {
// Apply full save routine for singleton buffers, to allow to `touch` the file via the editor.
project
.update(&mut cx, |project, cx| project.save_buffers(buffers, cx))?
.await?;
} else {
// For multi-buffers, only format and save the buffers with changes.
// For clean buffers, we simulate saving by calling `Buffer::did_save`,
// so that language servers or other downstream listeners of save events get notified.
let (dirty_buffers, clean_buffers) = buffers.into_iter().partition(|buffer| {
buffer
.update(&mut cx, |buffer, _| {
buffer.is_dirty() || buffer.has_conflict()
})
.unwrap_or(false)
});
// Only format and save the buffers with changes. For clean buffers,
// we simulate saving by calling `Buffer::did_save`, so that language servers or
// other downstream listeners of save events get notified.
let (dirty_buffers, clean_buffers) = buffers.into_iter().partition(|buffer| {
buffer
.update(&mut cx, |buffer, _| {
buffer.is_dirty() || buffer.has_conflict()
})
.unwrap_or(false)
});
project
.update(&mut cx, |project, cx| {
project.save_buffers(dirty_buffers, cx)
})?
.await?;
for buffer in clean_buffers {
buffer
.update(&mut cx, |buffer, cx| {
let version = buffer.saved_version().clone();
let fingerprint = buffer.saved_version_fingerprint();
let mtime = buffer.saved_mtime();
buffer.did_save(version, fingerprint, mtime, cx);
})
.ok();
}
project
.update(&mut cx, |project, cx| {
project.save_buffers(dirty_buffers, cx)
})?
.await?;
for buffer in clean_buffers {
buffer
.update(&mut cx, |buffer, cx| {
let version = buffer.saved_version().clone();
let fingerprint = buffer.saved_version_fingerprint();
let mtime = buffer.saved_mtime();
buffer.did_save(version, fingerprint, mtime, cx);
})
.ok();
}
Ok(())
@@ -983,7 +976,7 @@ impl SearchableItem for Editor {
self.clear_background_highlights::<BufferSearchHighlights>(cx);
}
fn update_matches(&mut self, matches: &[Range<Anchor>], cx: &mut ViewContext<Self>) {
fn update_matches(&mut self, matches: Vec<Range<Anchor>>, cx: &mut ViewContext<Self>) {
self.highlight_background::<BufferSearchHighlights>(
matches,
|theme| theme.search_match_background,
@@ -1020,7 +1013,7 @@ impl SearchableItem for Editor {
fn activate_match(
&mut self,
index: usize,
matches: &[Range<Anchor>],
matches: Vec<Range<Anchor>>,
cx: &mut ViewContext<Self>,
) {
self.unfold_ranges([matches[index].clone()], false, true, cx);
@@ -1030,10 +1023,10 @@ impl SearchableItem for Editor {
})
}
fn select_matches(&mut self, matches: &[Self::Match], cx: &mut ViewContext<Self>) {
self.unfold_ranges(matches.to_vec(), false, false, cx);
fn select_matches(&mut self, matches: Vec<Self::Match>, cx: &mut ViewContext<Self>) {
self.unfold_ranges(matches.clone(), false, false, cx);
let mut ranges = Vec::new();
for m in matches {
for m in &matches {
ranges.push(self.range_for_match(&m))
}
self.change_selections(None, cx, |s| s.select_ranges(ranges));
@@ -1062,7 +1055,7 @@ impl SearchableItem for Editor {
}
fn match_index_for_direction(
&mut self,
matches: &[Range<Anchor>],
matches: &Vec<Range<Anchor>>,
current_index: usize,
direction: Direction,
count: usize,
@@ -1154,11 +1147,11 @@ impl SearchableItem for Editor {
fn active_match_index(
&mut self,
matches: &[Range<Anchor>],
matches: Vec<Range<Anchor>>,
cx: &mut ViewContext<Self>,
) -> Option<usize> {
active_match_index(
matches,
&matches,
&self.selections.newest_anchor().head(),
&self.buffer().read(cx).snapshot(cx),
)
@@ -1201,7 +1194,7 @@ pub fn entry_git_aware_label_color(
selected: bool,
) -> Color {
if ignored {
Color::Ignored
Color::Disabled
} else {
match git_status {
Some(GitFileStatus::Added) => Color::Created,

View File

@@ -10,31 +10,6 @@ pub struct MouseContextMenu {
_subscription: Subscription,
}
impl MouseContextMenu {
pub(crate) fn new(
position: Point<Pixels>,
context_menu: View<ui::ContextMenu>,
cx: &mut ViewContext<Editor>,
) -> Self {
let context_menu_focus = context_menu.focus_handle(cx);
cx.focus(&context_menu_focus);
let _subscription =
cx.subscribe(&context_menu, move |this, _, _event: &DismissEvent, cx| {
this.mouse_context_menu.take();
if context_menu_focus.contains_focused(cx) {
this.focus(cx);
}
});
Self {
position,
context_menu,
_subscription,
}
}
}
pub fn deploy_context_menu(
editor: &mut Editor,
position: Point<Pixels>,
@@ -85,8 +60,21 @@ pub fn deploy_context_menu(
.action("Reveal in Finder", Box::new(RevealInFinder))
})
};
let mouse_context_menu = MouseContextMenu::new(position, context_menu, cx);
editor.mouse_context_menu = Some(mouse_context_menu);
let context_menu_focus = context_menu.focus_handle(cx);
cx.focus(&context_menu_focus);
let _subscription = cx.subscribe(&context_menu, move |this, _, _event: &DismissEvent, cx| {
this.mouse_context_menu.take();
if context_menu_focus.contains_focused(cx) {
this.focus(cx);
}
});
editor.mouse_context_menu = Some(MouseContextMenu {
position,
context_menu,
_subscription,
});
cx.notify();
}

View File

@@ -5,15 +5,14 @@ use super::{Bias, DisplayPoint, DisplaySnapshot, SelectionGoal, ToDisplayPoint};
use crate::{char_kind, scroll::ScrollAnchor, CharKind, EditorStyle, ToOffset, ToPoint};
use gpui::{px, Pixels, WindowTextSystem};
use language::Point;
use multi_buffer::{MultiBufferSnapshot, Offset};
use serde::Deserialize;
use multi_buffer::MultiBufferSnapshot;
use std::{ops::Range, sync::Arc};
/// Defines search strategy for items in `movement` module.
/// `FindRange::SingeLine` only looks for a match on a single line at a time, whereas
/// `FindRange::MultiLine` keeps going until the end of a string.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Deserialize)]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum FindRange {
SingleLine,
MultiLine,
@@ -472,8 +471,8 @@ pub fn find_boundary_exclusive(
/// the [`DisplaySnapshot`]. The offsets are relative to the start of a buffer.
pub fn chars_after(
map: &DisplaySnapshot,
mut offset: Offset<multi_buffer::MultiBuffer>,
) -> impl Iterator<Item = (char, Range<Offset<multi_buffer::MultiBuffer>>)> + '_ {
mut offset: usize,
) -> impl Iterator<Item = (char, Range<usize>)> + '_ {
map.buffer_snapshot.chars_at(offset).map(move |ch| {
let before = offset;
offset = offset + ch.len_utf8();
@@ -486,13 +485,13 @@ pub fn chars_after(
/// the [`DisplaySnapshot`]. The offsets are relative to the start of a buffer.
pub fn chars_before(
map: &DisplaySnapshot,
mut offset: Offset<multi_buffer::MultiBuffer>,
) -> impl Iterator<Item = (char, Range<Offset<multi_buffer::MultiBuffer>>)> + '_ {
mut offset: usize,
) -> impl Iterator<Item = (char, Range<usize>)> + '_ {
map.buffer_snapshot
.reversed_chars_at(offset)
.map(move |ch| {
let after = offset;
offset = Offset::new(offset.0 - ch.len_utf8());
offset = offset - ch.len_utf8();
(ch, offset..after)
})
}

View File

@@ -32,10 +32,6 @@ impl Autoscroll {
pub fn focused() -> Self {
Self::Strategy(AutoscrollStrategy::Focused)
}
/// Scrolls so that the newest cursor is roughly an n-th line from the top.
pub fn top_relative(n: usize) -> Self {
Self::Strategy(AutoscrollStrategy::TopRelative(n))
}
}
#[derive(PartialEq, Eq, Default, Clone, Copy)]
@@ -47,7 +43,6 @@ pub enum AutoscrollStrategy {
Focused,
Top,
Bottom,
TopRelative(usize),
}
impl AutoscrollStrategy {
@@ -183,10 +178,6 @@ impl Editor {
scroll_position.y = (target_bottom - visible_lines).max(0.0);
self.set_scroll_position_internal(scroll_position, local, true, cx);
}
AutoscrollStrategy::TopRelative(lines) => {
scroll_position.y = target_top - lines as f32;
self.set_scroll_position_internal(scroll_position, local, true, cx);
}
}
self.scroll_manager.last_autoscroll = Some((

View File

@@ -490,13 +490,7 @@ impl<'a> MutableSelectionsCollection<'a> {
pub fn insert_range<T>(&mut self, range: Range<T>)
where
T: 'a
+ ToOffset<multi_buffer::MultiBuffer>
+ ToPoint
+ TextDimension
+ Ord
+ Sub<T, Output = T>
+ std::marker::Copy,
T: 'a + ToOffset + ToPoint + TextDimension + Ord + Sub<T, Output = T> + std::marker::Copy,
{
let mut selections = self.all(self.cx);
let mut start = range.start.to_offset(&self.buffer());
@@ -519,11 +513,7 @@ impl<'a> MutableSelectionsCollection<'a> {
pub fn select<T>(&mut self, mut selections: Vec<Selection<T>>)
where
T: ToOffset<multi_buffer::MultiBuffer>
+ ToPoint
+ Ord
+ std::marker::Copy
+ std::fmt::Debug,
T: ToOffset + ToPoint + Ord + std::marker::Copy + std::fmt::Debug,
{
let buffer = self.buffer.read(self.cx).snapshot(self.cx);
selections.sort_unstable_by_key(|s| s.start);
@@ -572,7 +562,7 @@ impl<'a> MutableSelectionsCollection<'a> {
pub fn select_ranges<I, T>(&mut self, ranges: I)
where
I: IntoIterator<Item = Range<T>>,
T: ToOffset<multi_buffer::MultiBuffer>,
T: ToOffset,
{
let buffer = self.buffer.read(self.cx).snapshot(self.cx);
let ranges = ranges

View File

@@ -10,7 +10,6 @@ use gpui::{
use indoc::indoc;
use itertools::Itertools;
use language::{Buffer, BufferSnapshot, LanguageRegistry};
use multi_buffer::ExcerptRange;
use parking_lot::RwLock;
use project::{FakeFs, Project};
use std::{
@@ -21,14 +20,12 @@ use std::{
Arc,
},
};
use text::BufferId;
use ui::Context;
use util::{
assert_set_eq,
test::{generate_marked_text, marked_text_ranges},
};
use super::{build_editor, build_editor_with_project};
use super::build_editor_with_project;
pub struct EditorTestContext {
pub cx: gpui::VisualTestContext,
@@ -70,43 +67,6 @@ impl EditorTestContext {
}
}
pub fn new_multibuffer<const COUNT: usize>(
cx: &mut gpui::TestAppContext,
excerpts: [&str; COUNT],
) -> EditorTestContext {
let mut multibuffer = MultiBuffer::new(0, language::Capability::ReadWrite);
let buffer = cx.new_model(|cx| {
for (i, excerpt) in excerpts.into_iter().enumerate() {
let (text, ranges) = marked_text_ranges(excerpt, false);
let buffer =
cx.new_model(|_| Buffer::new(0, BufferId::new(i as u64 + 1).unwrap(), text));
multibuffer.push_excerpts(
buffer,
ranges.into_iter().map(|range| ExcerptRange {
context: range,
primary: None,
}),
cx,
);
}
multibuffer
});
let editor = cx.add_window(|cx| {
let editor = build_editor(buffer, cx);
editor.focus(cx);
editor
});
let editor_view = editor.root_view(cx).unwrap();
Self {
cx: VisualTestContext::from_window(*editor.deref(), cx),
window: editor.into(),
editor: editor_view,
assertion_cx: AssertionContextManager::new(),
}
}
pub fn condition(
&self,
predicate: impl FnMut(&Editor, &AppContext) -> bool,
@@ -345,7 +305,7 @@ impl EditorTestContext {
.background_highlights
.get(&TypeId::of::<Tag>())
.map(|h| h.1.clone())
.unwrap_or_else(|| Arc::from([]))
.unwrap_or_default()
.into_iter()
.map(|range| range.to_offset(&snapshot.buffer_snapshot))
.collect()

View File

@@ -12,6 +12,10 @@ workspace = true
path = "src/extension_store.rs"
doctest = false
[[bin]]
name = "extension_json_schemas"
path = "src/extension_json_schemas.rs"
[dependencies]
anyhow.workspace = true
async-compression.workspace = true
@@ -29,7 +33,6 @@ lsp.workspace = true
node_runtime.workspace = true
project.workspace = true
schemars.workspace = true
semantic_version.workspace = true
serde.workspace = true
serde_json.workspace = true
settings.workspace = true

View File

@@ -195,13 +195,7 @@ impl ExtensionBuilder {
&grammar_metadata.rev,
)?;
let base_grammar_path = grammar_metadata
.path
.as_ref()
.map(|path| grammar_repo_dir.join(path))
.unwrap_or(grammar_repo_dir);
let src_path = base_grammar_path.join("src");
let src_path = grammar_repo_dir.join("src");
let parser_path = src_path.join("parser.c");
let scanner_path = src_path.join("scanner.c");
@@ -485,7 +479,7 @@ impl ExtensionBuilder {
fn populate_defaults(manifest: &mut ExtensionManifest, extension_path: &Path) -> Result<()> {
// For legacy extensions on the v0 schema (aka, using `extension.json`), clear out any existing
// contents of the computed fields, since we don't care what the existing values are.
if manifest.schema_version.is_v0() {
if manifest.schema_version == 0 {
manifest.languages.clear();
manifest.grammars.clear();
manifest.themes.clear();
@@ -528,7 +522,7 @@ fn populate_defaults(manifest: &mut ExtensionManifest, extension_path: &Path) ->
// For legacy extensions on the v0 schema (aka, using `extension.json`), we want to populate the grammars in
// the manifest using the contents of the `grammars` directory.
if manifest.schema_version.is_v0() {
if manifest.schema_version == 0 {
let grammars_dir = extension_path.join("grammars");
if grammars_dir.exists() {
for entry in fs::read_dir(&grammars_dir).context("failed to list grammars dir")? {
@@ -539,8 +533,6 @@ fn populate_defaults(manifest: &mut ExtensionManifest, extension_path: &Path) ->
struct GrammarConfigToml {
pub repository: String,
pub commit: String,
#[serde(default)]
pub path: Option<String>,
}
let grammar_config = fs::read_to_string(&grammar_path)?;
@@ -556,7 +548,6 @@ fn populate_defaults(manifest: &mut ExtensionManifest, extension_path: &Path) ->
GrammarManifestEntry {
repository: grammar_config.repository,
rev: grammar_config.commit,
path: grammar_config.path,
},
);
}

View File

@@ -0,0 +1,17 @@
use language::LanguageConfig;
use schemars::schema_for;
use theme::ThemeFamilyContent;
fn main() {
let theme_family_schema = schema_for!(ThemeFamilyContent);
let language_config_schema = schema_for!(LanguageConfig);
println!(
"{}",
serde_json::to_string_pretty(&theme_family_schema).unwrap()
);
println!(
"{}",
serde_json::to_string_pretty(&language_config_schema).unwrap()
);
}

View File

@@ -1,31 +1,20 @@
use crate::wasm_host::{
wit::{self, LanguageServerConfig},
WasmExtension, WasmHost,
};
use crate::wasm_host::{wit::LanguageServerConfig, WasmExtension, WasmHost};
use anyhow::{anyhow, Context, Result};
use async_trait::async_trait;
use collections::HashMap;
use futures::{Future, FutureExt};
use gpui::AsyncAppContext;
use language::{
CodeLabel, HighlightId, Language, LanguageServerName, LspAdapter, LspAdapterDelegate,
};
use language::{Language, LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::LanguageServerBinary;
use serde::Serialize;
use serde_json::Value;
use std::ops::Range;
use std::{
any::Any,
path::{Path, PathBuf},
pin::Pin,
sync::Arc,
};
use util::{maybe, ResultExt};
use wasmtime_wasi::WasiView as _;
pub struct ExtensionLspAdapter {
pub(crate) extension: WasmExtension,
pub(crate) language_server_id: LanguageServerName,
pub(crate) config: LanguageServerConfig,
pub(crate) host: Arc<WasmHost>,
}
@@ -53,12 +42,7 @@ impl LspAdapter for ExtensionLspAdapter {
async move {
let resource = store.data_mut().table().push(delegate)?;
let command = extension
.call_language_server_command(
store,
&this.language_server_id,
&this.config,
resource,
)
.call_language_server_command(store, &this.config, resource)
.await?
.map_err(|e| anyhow!("{}", e))?;
anyhow::Ok(command)
@@ -72,24 +56,6 @@ impl LspAdapter for ExtensionLspAdapter {
.host
.path_from_extension(&self.extension.manifest.id, command.command.as_ref());
// TODO: This should now be done via the `zed::make_file_executable` function in
// Zed extension API, but we're leaving these existing usages in place temporarily
// to avoid any compatibility issues between Zed and the extension versions.
//
// We can remove once the following extension versions no longer see any use:
// - toml@0.0.2
// - zig@0.0.1
if ["toml", "zig"].contains(&self.extension.manifest.id.as_ref()) {
#[cfg(not(windows))]
{
use std::fs::{self, Permissions};
use std::os::unix::fs::PermissionsExt;
fs::set_permissions(&path, Permissions::from_mode(0o755))
.context("failed to set file permissions")?;
}
}
Ok(LanguageServerBinary {
path,
arguments: command.args.into_iter().map(|arg| arg.into()).collect(),
@@ -127,25 +93,6 @@ impl LspAdapter for ExtensionLspAdapter {
None
}
fn language_ids(&self) -> HashMap<String, String> {
// TODO: The language IDs can be provided via the language server options
// in `extension.toml now but we're leaving these existing usages in place temporarily
// to avoid any compatibility issues between Zed and the extension versions.
//
// We can remove once the following extension versions no longer see any use:
// - php@0.0.1
if self.extension.manifest.id.as_ref() == "php" {
return HashMap::from_iter([("PHP".into(), "php".into())]);
}
self.extension
.manifest
.language_servers
.get(&LanguageServerName(self.config.name.clone().into()))
.map(|server| server.language_ids.clone())
.unwrap_or_default()
}
async fn initialization_options(
self: Arc<Self>,
delegate: &Arc<dyn LspAdapterDelegate>,
@@ -161,7 +108,6 @@ impl LspAdapter for ExtensionLspAdapter {
let options = extension
.call_language_server_initialization_options(
store,
&this.language_server_id,
&this.config,
resource,
)
@@ -181,394 +127,4 @@ impl LspAdapter for ExtensionLspAdapter {
None
})
}
async fn workspace_configuration(
self: Arc<Self>,
delegate: &Arc<dyn LspAdapterDelegate>,
_cx: &mut AsyncAppContext,
) -> Result<Value> {
let delegate = delegate.clone();
let json_options: Option<String> = self
.extension
.call({
let this = self.clone();
|extension, store| {
async move {
let resource = store.data_mut().table().push(delegate)?;
let options = extension
.call_language_server_workspace_configuration(
store,
&this.language_server_id,
resource,
)
.await?
.map_err(|e| anyhow!("{}", e))?;
anyhow::Ok(options)
}
.boxed()
}
})
.await?;
Ok(if let Some(json_options) = json_options {
serde_json::from_str(&json_options).with_context(|| {
format!("failed to parse initialization_options from extension: {json_options}")
})?
} else {
serde_json::json!({})
})
}
async fn labels_for_completions(
self: Arc<Self>,
completions: &[lsp::CompletionItem],
language: &Arc<Language>,
) -> Result<Vec<Option<CodeLabel>>> {
let completions = completions
.into_iter()
.map(|completion| wit::Completion::from(completion.clone()))
.collect::<Vec<_>>();
let labels = self
.extension
.call({
let this = self.clone();
|extension, store| {
async move {
extension
.call_labels_for_completions(
store,
&this.language_server_id,
completions,
)
.await?
.map_err(|e| anyhow!("{}", e))
}
.boxed()
}
})
.await?;
Ok(labels_from_wit(labels, language))
}
async fn labels_for_symbols(
self: Arc<Self>,
symbols: &[(String, lsp::SymbolKind)],
language: &Arc<Language>,
) -> Result<Vec<Option<CodeLabel>>> {
let symbols = symbols
.into_iter()
.cloned()
.map(|(name, kind)| wit::Symbol {
name,
kind: kind.into(),
})
.collect::<Vec<_>>();
let labels = self
.extension
.call({
let this = self.clone();
|extension, store| {
async move {
extension
.call_labels_for_symbols(store, &this.language_server_id, symbols)
.await?
.map_err(|e| anyhow!("{}", e))
}
.boxed()
}
})
.await?;
Ok(labels_from_wit(labels, language))
}
}
fn labels_from_wit(
labels: Vec<Option<wit::CodeLabel>>,
language: &Arc<Language>,
) -> Vec<Option<CodeLabel>> {
labels
.into_iter()
.map(|label| {
let label = label?;
let runs = if !label.code.is_empty() {
language.highlight_text(&label.code.as_str().into(), 0..label.code.len())
} else {
Vec::new()
};
build_code_label(&label, &runs, &language)
})
.collect()
}
fn build_code_label(
label: &wit::CodeLabel,
parsed_runs: &[(Range<usize>, HighlightId)],
language: &Arc<Language>,
) -> Option<CodeLabel> {
let mut text = String::new();
let mut runs = vec![];
for span in &label.spans {
match span {
wit::CodeLabelSpan::CodeRange(range) => {
let range = Range::from(*range);
let code_span = &label.code.get(range.clone())?;
let mut input_ix = range.start;
let mut output_ix = text.len();
for (run_range, id) in parsed_runs {
if run_range.start >= range.end {
break;
}
if run_range.end <= input_ix {
continue;
}
if run_range.start > input_ix {
let len = run_range.start - input_ix;
output_ix += len;
input_ix += len;
}
let len = range.end.min(run_range.end) - input_ix;
runs.push((output_ix..output_ix + len, *id));
output_ix += len;
input_ix += len;
}
text.push_str(code_span);
}
wit::CodeLabelSpan::Literal(span) => {
let highlight_id = language
.grammar()
.zip(span.highlight_name.as_ref())
.and_then(|(grammar, highlight_name)| {
grammar.highlight_id_for_name(&highlight_name)
})
.unwrap_or_default();
let ix = text.len();
runs.push((ix..ix + span.text.len(), highlight_id));
text.push_str(&span.text);
}
}
}
let filter_range = Range::from(label.filter_range);
text.get(filter_range.clone())?;
Some(CodeLabel {
text,
runs,
filter_range,
})
}
impl From<wit::Range> for Range<usize> {
fn from(range: wit::Range) -> Self {
let start = range.start as usize;
let end = range.end as usize;
start..end
}
}
impl From<lsp::CompletionItem> for wit::Completion {
fn from(value: lsp::CompletionItem) -> Self {
Self {
label: value.label,
detail: value.detail,
kind: value.kind.map(Into::into),
insert_text_format: value.insert_text_format.map(Into::into),
}
}
}
impl From<lsp::CompletionItemKind> for wit::CompletionKind {
fn from(value: lsp::CompletionItemKind) -> Self {
match value {
lsp::CompletionItemKind::TEXT => Self::Text,
lsp::CompletionItemKind::METHOD => Self::Method,
lsp::CompletionItemKind::FUNCTION => Self::Function,
lsp::CompletionItemKind::CONSTRUCTOR => Self::Constructor,
lsp::CompletionItemKind::FIELD => Self::Field,
lsp::CompletionItemKind::VARIABLE => Self::Variable,
lsp::CompletionItemKind::CLASS => Self::Class,
lsp::CompletionItemKind::INTERFACE => Self::Interface,
lsp::CompletionItemKind::MODULE => Self::Module,
lsp::CompletionItemKind::PROPERTY => Self::Property,
lsp::CompletionItemKind::UNIT => Self::Unit,
lsp::CompletionItemKind::VALUE => Self::Value,
lsp::CompletionItemKind::ENUM => Self::Enum,
lsp::CompletionItemKind::KEYWORD => Self::Keyword,
lsp::CompletionItemKind::SNIPPET => Self::Snippet,
lsp::CompletionItemKind::COLOR => Self::Color,
lsp::CompletionItemKind::FILE => Self::File,
lsp::CompletionItemKind::REFERENCE => Self::Reference,
lsp::CompletionItemKind::FOLDER => Self::Folder,
lsp::CompletionItemKind::ENUM_MEMBER => Self::EnumMember,
lsp::CompletionItemKind::CONSTANT => Self::Constant,
lsp::CompletionItemKind::STRUCT => Self::Struct,
lsp::CompletionItemKind::EVENT => Self::Event,
lsp::CompletionItemKind::OPERATOR => Self::Operator,
lsp::CompletionItemKind::TYPE_PARAMETER => Self::TypeParameter,
_ => Self::Other(extract_int(value)),
}
}
}
impl From<lsp::InsertTextFormat> for wit::InsertTextFormat {
fn from(value: lsp::InsertTextFormat) -> Self {
match value {
lsp::InsertTextFormat::PLAIN_TEXT => Self::PlainText,
lsp::InsertTextFormat::SNIPPET => Self::Snippet,
_ => Self::Other(extract_int(value)),
}
}
}
impl From<lsp::SymbolKind> for wit::SymbolKind {
fn from(value: lsp::SymbolKind) -> Self {
match value {
lsp::SymbolKind::FILE => Self::File,
lsp::SymbolKind::MODULE => Self::Module,
lsp::SymbolKind::NAMESPACE => Self::Namespace,
lsp::SymbolKind::PACKAGE => Self::Package,
lsp::SymbolKind::CLASS => Self::Class,
lsp::SymbolKind::METHOD => Self::Method,
lsp::SymbolKind::PROPERTY => Self::Property,
lsp::SymbolKind::FIELD => Self::Field,
lsp::SymbolKind::CONSTRUCTOR => Self::Constructor,
lsp::SymbolKind::ENUM => Self::Enum,
lsp::SymbolKind::INTERFACE => Self::Interface,
lsp::SymbolKind::FUNCTION => Self::Function,
lsp::SymbolKind::VARIABLE => Self::Variable,
lsp::SymbolKind::CONSTANT => Self::Constant,
lsp::SymbolKind::STRING => Self::String,
lsp::SymbolKind::NUMBER => Self::Number,
lsp::SymbolKind::BOOLEAN => Self::Boolean,
lsp::SymbolKind::ARRAY => Self::Array,
lsp::SymbolKind::OBJECT => Self::Object,
lsp::SymbolKind::KEY => Self::Key,
lsp::SymbolKind::NULL => Self::Null,
lsp::SymbolKind::ENUM_MEMBER => Self::EnumMember,
lsp::SymbolKind::STRUCT => Self::Struct,
lsp::SymbolKind::EVENT => Self::Event,
lsp::SymbolKind::OPERATOR => Self::Operator,
lsp::SymbolKind::TYPE_PARAMETER => Self::TypeParameter,
_ => Self::Other(extract_int(value)),
}
}
}
fn extract_int<T: Serialize>(value: T) -> i32 {
maybe!({
let kind = serde_json::to_value(&value)?;
serde_json::from_value(kind)
})
.log_err()
.unwrap_or(-1)
}
#[test]
fn test_build_code_label() {
use util::test::marked_text_ranges;
let (code, code_ranges) = marked_text_ranges(
"«const» «a»: «fn»(«Bcd»(«Efgh»)) -> «Ijklm» = pqrs.tuv",
false,
);
let code_runs = code_ranges
.into_iter()
.map(|range| (range, HighlightId(0)))
.collect::<Vec<_>>();
let label = build_code_label(
&wit::CodeLabel {
spans: vec![
wit::CodeLabelSpan::CodeRange(wit::Range {
start: code.find("pqrs").unwrap() as u32,
end: code.len() as u32,
}),
wit::CodeLabelSpan::CodeRange(wit::Range {
start: code.find(": fn").unwrap() as u32,
end: code.find(" = ").unwrap() as u32,
}),
],
filter_range: wit::Range {
start: 0,
end: "pqrs.tuv".len() as u32,
},
code,
},
&code_runs,
&language::PLAIN_TEXT,
)
.unwrap();
let (label_text, label_ranges) =
marked_text_ranges("pqrs.tuv: «fn»(«Bcd»(«Efgh»)) -> «Ijklm»", false);
let label_runs = label_ranges
.into_iter()
.map(|range| (range, HighlightId(0)))
.collect::<Vec<_>>();
assert_eq!(
label,
CodeLabel {
text: label_text,
runs: label_runs,
filter_range: label.filter_range.clone()
}
)
}
#[test]
fn test_build_code_label_with_invalid_ranges() {
use util::test::marked_text_ranges;
let (code, code_ranges) = marked_text_ranges("const «a»: «B» = '🏀'", false);
let code_runs = code_ranges
.into_iter()
.map(|range| (range, HighlightId(0)))
.collect::<Vec<_>>();
// A span uses a code range that is invalid because it starts inside of
// a multi-byte character.
let label = build_code_label(
&wit::CodeLabel {
spans: vec![
wit::CodeLabelSpan::CodeRange(wit::Range {
start: code.find('B').unwrap() as u32,
end: code.find(" = ").unwrap() as u32,
}),
wit::CodeLabelSpan::CodeRange(wit::Range {
start: code.find('🏀').unwrap() as u32 + 1,
end: code.len() as u32,
}),
],
filter_range: wit::Range {
start: 0,
end: "B".len() as u32,
},
code,
},
&code_runs,
&language::PLAIN_TEXT,
);
assert!(label.is_none());
// Filter range extends beyond actual text
let label = build_code_label(
&wit::CodeLabel {
spans: vec![wit::CodeLabelSpan::Literal(wit::CodeLabelSpanLiteral {
text: "abc".into(),
highlight_name: Some("type".into()),
})],
filter_range: wit::Range { start: 0, end: 5 },
code: String::new(),
},
&code_runs,
&language::PLAIN_TEXT,
);
assert!(label.is_none());
}

View File

@@ -1,18 +1,17 @@
use anyhow::{anyhow, Context, Result};
use collections::{BTreeMap, HashMap};
use collections::BTreeMap;
use fs::Fs;
use language::LanguageServerName;
use semantic_version::SemanticVersion;
use serde::{Deserialize, Serialize};
use std::{
ffi::OsStr,
fmt,
path::{Path, PathBuf},
sync::Arc,
};
use util::SemanticVersion;
/// This is the old version of the extension manifest, from when it was `extension.json`.
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
pub struct OldExtensionManifest {
pub name: String,
pub version: Arc<str>,
@@ -32,30 +31,12 @@ pub struct OldExtensionManifest {
pub grammars: BTreeMap<Arc<str>, PathBuf>,
}
/// The schema version of the [`ExtensionManifest`].
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone, Copy, Serialize, Deserialize)]
pub struct SchemaVersion(pub i32);
impl fmt::Display for SchemaVersion {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl SchemaVersion {
pub const ZERO: Self = Self(0);
pub fn is_v0(&self) -> bool {
self == &Self::ZERO
}
}
#[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize)]
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
pub struct ExtensionManifest {
pub id: Arc<str>,
pub name: String,
pub version: Arc<str>,
pub schema_version: SchemaVersion,
pub schema_version: i32,
#[serde(default)]
pub description: Option<String>,
@@ -92,38 +73,11 @@ pub struct GrammarManifestEntry {
pub repository: String,
#[serde(alias = "commit")]
pub rev: String,
#[serde(default)]
pub path: Option<String>,
}
#[derive(Clone, PartialEq, Eq, Debug, Deserialize, Serialize)]
pub struct LanguageServerManifestEntry {
/// Deprecated in favor of `languages`.
#[serde(default)]
language: Option<Arc<str>>,
/// The list of languages this language server should work with.
#[serde(default)]
languages: Vec<Arc<str>>,
#[serde(default)]
pub language_ids: HashMap<String, String>,
}
impl LanguageServerManifestEntry {
/// Returns the list of languages for the language server.
///
/// Prefer this over accessing the `language` or `languages` fields directly,
/// as we currently support both.
///
/// We can replace this with just field access for the `languages` field once
/// we have removed `language`.
pub fn languages(&self) -> impl IntoIterator<Item = Arc<str>> + '_ {
let language = if self.languages.is_empty() {
self.language.clone()
} else {
None
};
self.languages.iter().cloned().chain(language)
}
pub language: Arc<str>,
}
impl ExtensionManifest {
@@ -168,7 +122,7 @@ fn manifest_from_old_manifest(
description: manifest_json.description,
repository: manifest_json.repository,
authors: manifest_json.authors,
schema_version: SchemaVersion::ZERO,
schema_version: 0,
lib: Default::default(),
themes: {
let mut themes = manifest_json.themes.into_values().collect::<Vec<_>>();

View File

@@ -1,32 +0,0 @@
use anyhow::Result;
use collections::HashMap;
use gpui::AppContext;
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
use settings::{Settings, SettingsSources};
use std::sync::Arc;
#[derive(Deserialize, Serialize, Debug, Default, Clone, JsonSchema)]
pub struct ExtensionSettings {
#[serde(default)]
pub auto_update_extensions: HashMap<Arc<str>, bool>,
}
impl ExtensionSettings {
pub fn should_auto_update(&self, extension_id: &str) -> bool {
self.auto_update_extensions
.get(extension_id)
.copied()
.unwrap_or(true)
}
}
impl Settings for ExtensionSettings {
const KEY: Option<&'static str> = None;
type FileContent = Self;
fn load(sources: SettingsSources<Self::FileContent>, _cx: &mut AppContext) -> Result<Self> {
Ok(sources.user.cloned().unwrap_or_default())
}
}

Some files were not shown because too many files have changed in this diff Show More