Compare commits
44 Commits
debug-shel
...
uniform-ta
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a93e175db5 | ||
|
|
bf1625e100 | ||
|
|
a2b86e3dfc | ||
|
|
b9fae2729b | ||
|
|
e66f6c42e2 | ||
|
|
3dff31e3c8 | ||
|
|
9301856f6f | ||
|
|
a8a125a444 | ||
|
|
253883fa4d | ||
|
|
bff55228fb | ||
|
|
91169b8a4c | ||
|
|
0fdc162f8f | ||
|
|
1ee034751a | ||
|
|
01b2b62d08 | ||
|
|
ddcaf08834 | ||
|
|
16c02ea28a | ||
|
|
73fefe4fec | ||
|
|
42059d6ca8 | ||
|
|
10be45d0db | ||
|
|
cadc9aac95 | ||
|
|
8895ddc690 | ||
|
|
4550e5275b | ||
|
|
c0f704e48b | ||
|
|
fcacc4153c | ||
|
|
8c1bcacd5e | ||
|
|
5885ee5e2f | ||
|
|
5295cfaf59 | ||
|
|
034401da4a | ||
|
|
bbf4679956 | ||
|
|
e7d0c15b53 | ||
|
|
4530d60796 | ||
|
|
e564f635ef | ||
|
|
f2fff54207 | ||
|
|
6e66ad851e | ||
|
|
c839ee65ad | ||
|
|
29f3bdfbcc | ||
|
|
26f89b45d4 | ||
|
|
87f98382e1 | ||
|
|
0b0b7d0ad8 | ||
|
|
b1e4b8d767 | ||
|
|
fc6d3c8fc9 | ||
|
|
08ba877df2 | ||
|
|
9ca91c34bf | ||
|
|
adca3a059a |
10
.github/workflows/ci.yml
vendored
10
.github/workflows/ci.yml
vendored
@@ -29,7 +29,6 @@ jobs:
|
||||
outputs:
|
||||
run_tests: ${{ steps.filter.outputs.run_tests }}
|
||||
run_license: ${{ steps.filter.outputs.run_license }}
|
||||
run_docs: ${{ steps.filter.outputs.run_docs }}
|
||||
runs-on:
|
||||
- ubuntu-latest
|
||||
steps:
|
||||
@@ -59,11 +58,6 @@ jobs:
|
||||
else
|
||||
echo "run_tests=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
if [[ $(git diff --name-only $COMPARE_REV ${{ github.sha }} | grep '^docs/') ]]; then
|
||||
echo "run_docs=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
echo "run_docs=false" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
if [[ $(git diff --name-only $COMPARE_REV ${{ github.sha }} | grep '^Cargo.lock') ]]; then
|
||||
echo "run_license=true" >> $GITHUB_OUTPUT
|
||||
else
|
||||
@@ -204,9 +198,7 @@ jobs:
|
||||
timeout-minutes: 60
|
||||
name: Check docs
|
||||
needs: [job_spec]
|
||||
if: |
|
||||
github.repository_owner == 'zed-industries' &&
|
||||
(needs.job_spec.outputs.run_tests == 'true' || needs.job_spec.outputs.run_docs == 'true')
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on:
|
||||
- buildjet-8vcpu-ubuntu-2204
|
||||
steps:
|
||||
|
||||
34
.github/workflows/community_delete_comments.yml
vendored
Normal file
34
.github/workflows/community_delete_comments.yml
vendored
Normal file
@@ -0,0 +1,34 @@
|
||||
name: Delete Mediafire Comments
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
delete_comment:
|
||||
if: github.repository_owner == 'zed-industries'
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check for specific strings in comment
|
||||
id: check_comment
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
with:
|
||||
script: |
|
||||
const comment = context.payload.comment.body;
|
||||
const triggerStrings = ['www.mediafire.com'];
|
||||
return triggerStrings.some(triggerString => comment.includes(triggerString));
|
||||
|
||||
- name: Delete comment if it contains any of the specific strings
|
||||
if: steps.check_comment.outputs.result == 'true'
|
||||
uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7
|
||||
with:
|
||||
script: |
|
||||
const commentId = context.payload.comment.id;
|
||||
await github.rest.issues.deleteComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
comment_id: commentId
|
||||
});
|
||||
@@ -40,7 +40,6 @@
|
||||
},
|
||||
"file_types": {
|
||||
"Dockerfile": ["Dockerfile*[!dockerignore]"],
|
||||
"JSONC": ["assets/**/*.json", "renovate.json"],
|
||||
"Git Ignore": ["dockerignore"]
|
||||
},
|
||||
"hard_tabs": false,
|
||||
|
||||
26
Cargo.lock
generated
26
Cargo.lock
generated
@@ -14,7 +14,6 @@ dependencies = [
|
||||
"gpui",
|
||||
"language",
|
||||
"project",
|
||||
"proto",
|
||||
"release_channel",
|
||||
"smallvec",
|
||||
"ui",
|
||||
@@ -110,11 +109,18 @@ dependencies = [
|
||||
name = "agent_settings"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anthropic",
|
||||
"anyhow",
|
||||
"collections",
|
||||
"deepseek",
|
||||
"fs",
|
||||
"gpui",
|
||||
"language_model",
|
||||
"lmstudio",
|
||||
"log",
|
||||
"mistral",
|
||||
"ollama",
|
||||
"open_ai",
|
||||
"paths",
|
||||
"schemars",
|
||||
"serde",
|
||||
@@ -4342,7 +4348,6 @@ dependencies = [
|
||||
"terminal_view",
|
||||
"theme",
|
||||
"tree-sitter",
|
||||
"tree-sitter-go",
|
||||
"tree-sitter-json",
|
||||
"ui",
|
||||
"unindent",
|
||||
@@ -9019,7 +9024,6 @@ dependencies = [
|
||||
"itertools 0.14.0",
|
||||
"language",
|
||||
"lsp",
|
||||
"picker",
|
||||
"project",
|
||||
"release_channel",
|
||||
"serde_json",
|
||||
@@ -13163,7 +13167,6 @@ dependencies = [
|
||||
"thiserror 2.0.12",
|
||||
"urlencoding",
|
||||
"util",
|
||||
"which 6.0.3",
|
||||
"workspace-hack",
|
||||
]
|
||||
|
||||
@@ -14563,13 +14566,22 @@ dependencies = [
|
||||
name = "settings_ui"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"collections",
|
||||
"command_palette",
|
||||
"command_palette_hooks",
|
||||
"component",
|
||||
"db",
|
||||
"editor",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
"log",
|
||||
"menu",
|
||||
"paths",
|
||||
"project",
|
||||
"schemars",
|
||||
"search",
|
||||
"serde",
|
||||
"settings",
|
||||
"theme",
|
||||
@@ -17045,6 +17057,7 @@ dependencies = [
|
||||
"gpui_macros",
|
||||
"icons",
|
||||
"itertools 0.14.0",
|
||||
"log",
|
||||
"menu",
|
||||
"serde",
|
||||
"settings",
|
||||
@@ -17427,8 +17440,11 @@ name = "vercel"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"futures 0.3.31",
|
||||
"http_client",
|
||||
"schemars",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"strum 0.27.1",
|
||||
"workspace-hack",
|
||||
]
|
||||
@@ -19915,7 +19931,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.194.0"
|
||||
version = "0.193.0"
|
||||
dependencies = [
|
||||
"activity_indicator",
|
||||
"agent",
|
||||
|
||||
@@ -41,8 +41,7 @@
|
||||
"shift-f11": "debugger::StepOut",
|
||||
"f11": "zed::ToggleFullScreen",
|
||||
"ctrl-alt-z": "edit_prediction::RateCompletions",
|
||||
"ctrl-shift-i": "edit_prediction::ToggleMenu",
|
||||
"ctrl-alt-l": "lsp_tool::ToggleMenu"
|
||||
"ctrl-shift-i": "edit_prediction::ToggleMenu"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -244,7 +243,8 @@
|
||||
"ctrl-alt-e": "agent::RemoveAllContext",
|
||||
"ctrl-shift-e": "project_panel::ToggleFocus",
|
||||
"ctrl-shift-enter": "agent::ContinueThread",
|
||||
"alt-enter": "agent::ContinueWithBurnMode"
|
||||
"alt-enter": "agent::ContinueWithBurnMode",
|
||||
"ctrl-alt-b": "agent::ToggleBurnMode"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -1050,5 +1050,12 @@
|
||||
"ctrl-tab": "pane::ActivateNextItem",
|
||||
"ctrl-shift-tab": "pane::ActivatePreviousItem"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "KeymapEditor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-f": "search::FocusSearch"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -47,8 +47,7 @@
|
||||
"fn-f": "zed::ToggleFullScreen",
|
||||
"ctrl-cmd-f": "zed::ToggleFullScreen",
|
||||
"ctrl-cmd-z": "edit_prediction::RateCompletions",
|
||||
"ctrl-cmd-i": "edit_prediction::ToggleMenu",
|
||||
"ctrl-cmd-l": "lsp_tool::ToggleMenu"
|
||||
"ctrl-cmd-i": "edit_prediction::ToggleMenu"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -284,7 +283,8 @@
|
||||
"cmd-alt-e": "agent::RemoveAllContext",
|
||||
"cmd-shift-e": "project_panel::ToggleFocus",
|
||||
"cmd-shift-enter": "agent::ContinueThread",
|
||||
"alt-enter": "agent::ContinueWithBurnMode"
|
||||
"alt-enter": "agent::ContinueWithBurnMode",
|
||||
"cmd-alt-b": "agent::ToggleBurnMode"
|
||||
}
|
||||
},
|
||||
{
|
||||
@@ -587,6 +587,7 @@
|
||||
"alt-cmd-o": ["projects::OpenRecent", { "create_new_window": false }],
|
||||
"ctrl-cmd-o": ["projects::OpenRemote", { "from_existing_connection": false, "create_new_window": false }],
|
||||
"ctrl-cmd-shift-o": ["projects::OpenRemote", { "from_existing_connection": true, "create_new_window": false }],
|
||||
"alt-cmd-b": "branches::OpenRecent",
|
||||
"ctrl-~": "workspace::NewTerminal",
|
||||
"cmd-s": "workspace::Save",
|
||||
"cmd-k s": "workspace::SaveWithoutFormat",
|
||||
@@ -610,7 +611,7 @@
|
||||
"cmd-shift-f": "pane::DeploySearch",
|
||||
"cmd-shift-h": ["pane::DeploySearch", { "replace_enabled": true }],
|
||||
"cmd-shift-t": "pane::ReopenClosedItem",
|
||||
"cmd-k cmd-s": "zed::OpenKeymap",
|
||||
"cmd-k cmd-s": "zed::OpenKeymapEditor",
|
||||
"cmd-k cmd-t": "theme_selector::Toggle",
|
||||
"cmd-t": "project_symbols::Toggle",
|
||||
"cmd-p": "file_finder::Toggle",
|
||||
@@ -1148,5 +1149,12 @@
|
||||
"ctrl-tab": "pane::ActivateNextItem",
|
||||
"ctrl-shift-tab": "pane::ActivatePreviousItem"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "KeymapEditor",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-f": "search::FocusSearch"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -9,13 +9,6 @@
|
||||
},
|
||||
{
|
||||
"context": "Editor",
|
||||
"bindings": {
|
||||
"ctrl-k ctrl-u": "editor::ConvertToUpperCase", // editor:upper-case
|
||||
"ctrl-k ctrl-l": "editor::ConvertToLowerCase" // editor:lower-case
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && mode == full",
|
||||
"bindings": {
|
||||
"ctrl-shift-l": "language_selector::Toggle", // grammar-selector:show
|
||||
"ctrl-|": "pane::RevealInProjectPanel", // tree-view:reveal-active-file
|
||||
@@ -26,20 +19,25 @@
|
||||
"shift-f3": ["editor::SelectPrevious", { "replace_newest": true }], //find-and-replace:find-previous
|
||||
"alt-shift-down": "editor::AddSelectionBelow", // editor:add-selection-below
|
||||
"alt-shift-up": "editor::AddSelectionAbove", // editor:add-selection-above
|
||||
"ctrl-k ctrl-u": "editor::ConvertToUpperCase", // editor:upper-case
|
||||
"ctrl-k ctrl-l": "editor::ConvertToLowerCase", // editor:lower-case
|
||||
"ctrl-j": "editor::JoinLines", // editor:join-lines
|
||||
"ctrl-shift-d": "editor::DuplicateLineDown", // editor:duplicate-lines
|
||||
"ctrl-up": "editor::MoveLineUp", // editor:move-line-up
|
||||
"ctrl-down": "editor::MoveLineDown", // editor:move-line-down
|
||||
"ctrl-\\": "workspace::ToggleLeftDock", // tree-view:toggle
|
||||
"ctrl-shift-m": "markdown::OpenPreviewToTheSide", // markdown-preview:toggle
|
||||
"ctrl-shift-m": "markdown::OpenPreviewToTheSide" // markdown-preview:toggle
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && mode == full",
|
||||
"bindings": {
|
||||
"ctrl-r": "outline::Toggle" // symbols-view:toggle-project-symbols
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar",
|
||||
"bindings": {
|
||||
"f3": ["editor::SelectNext", { "replace_newest": true }], // find-and-replace:find-next
|
||||
"shift-f3": ["editor::SelectPrevious", { "replace_newest": true }], //find-and-replace:find-previous
|
||||
"ctrl-f3": "search::SelectNextMatch", // find-and-replace:find-next-selected
|
||||
"ctrl-shift-f3": "search::SelectPreviousMatch" // find-and-replace:find-previous-selected
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"ctrl-shift-i": "agent::ToggleFocus",
|
||||
"ctrl-l": "agent::ToggleFocus",
|
||||
"ctrl-shift-l": "agent::ToggleFocus",
|
||||
"ctrl-alt-b": "agent::ToggleFocus",
|
||||
"ctrl-shift-j": "agent::OpenConfiguration"
|
||||
}
|
||||
},
|
||||
@@ -41,6 +42,7 @@
|
||||
"ctrl-shift-i": "workspace::ToggleRightDock",
|
||||
"ctrl-l": "workspace::ToggleRightDock",
|
||||
"ctrl-shift-l": "workspace::ToggleRightDock",
|
||||
"ctrl-alt-b": "workspace::ToggleRightDock",
|
||||
"ctrl-w": "workspace::ToggleRightDock", // technically should close chat
|
||||
"ctrl-.": "agent::ToggleProfileSelector",
|
||||
"ctrl-/": "agent::ToggleModelSelector",
|
||||
|
||||
@@ -9,14 +9,6 @@
|
||||
},
|
||||
{
|
||||
"context": "Editor",
|
||||
"bindings": {
|
||||
"cmd-shift-backspace": "editor::DeleteToBeginningOfLine",
|
||||
"cmd-k cmd-u": "editor::ConvertToUpperCase",
|
||||
"cmd-k cmd-l": "editor::ConvertToLowerCase"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && mode == full",
|
||||
"bindings": {
|
||||
"ctrl-shift-l": "language_selector::Toggle",
|
||||
"cmd-|": "pane::RevealInProjectPanel",
|
||||
@@ -27,20 +19,26 @@
|
||||
"cmd-shift-g": ["editor::SelectPrevious", { "replace_newest": true }],
|
||||
"ctrl-shift-down": "editor::AddSelectionBelow",
|
||||
"ctrl-shift-up": "editor::AddSelectionAbove",
|
||||
"cmd-shift-backspace": "editor::DeleteToBeginningOfLine",
|
||||
"cmd-k cmd-u": "editor::ConvertToUpperCase",
|
||||
"cmd-k cmd-l": "editor::ConvertToLowerCase",
|
||||
"alt-enter": "editor::Newline",
|
||||
"cmd-shift-d": "editor::DuplicateLineDown",
|
||||
"ctrl-cmd-up": "editor::MoveLineUp",
|
||||
"ctrl-cmd-down": "editor::MoveLineDown",
|
||||
"cmd-\\": "workspace::ToggleLeftDock",
|
||||
"ctrl-shift-m": "markdown::OpenPreviewToTheSide",
|
||||
"ctrl-shift-m": "markdown::OpenPreviewToTheSide"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "Editor && mode == full",
|
||||
"bindings": {
|
||||
"cmd-r": "outline::Toggle"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar",
|
||||
"bindings": {
|
||||
"cmd-g": ["editor::SelectNext", { "replace_newest": true }],
|
||||
"cmd-shift-g": ["editor::SelectPrevious", { "replace_newest": true }],
|
||||
"cmd-f3": "search::SelectNextMatch",
|
||||
"cmd-shift-f3": "search::SelectPreviousMatch"
|
||||
}
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
"cmd-shift-i": "agent::ToggleFocus",
|
||||
"cmd-l": "agent::ToggleFocus",
|
||||
"cmd-shift-l": "agent::ToggleFocus",
|
||||
"cmd-alt-b": "agent::ToggleFocus",
|
||||
"cmd-shift-j": "agent::OpenConfiguration"
|
||||
}
|
||||
},
|
||||
@@ -42,6 +43,7 @@
|
||||
"cmd-shift-i": "workspace::ToggleRightDock",
|
||||
"cmd-l": "workspace::ToggleRightDock",
|
||||
"cmd-shift-l": "workspace::ToggleRightDock",
|
||||
"cmd-alt-b": "workspace::ToggleRightDock",
|
||||
"cmd-w": "workspace::ToggleRightDock", // technically should close chat
|
||||
"cmd-.": "agent::ToggleProfileSelector",
|
||||
"cmd-/": "agent::ToggleModelSelector",
|
||||
|
||||
@@ -85,10 +85,10 @@
|
||||
"[ {": ["vim::UnmatchedBackward", { "char": "{" }],
|
||||
"] )": ["vim::UnmatchedForward", { "char": ")" }],
|
||||
"[ (": ["vim::UnmatchedBackward", { "char": "(" }],
|
||||
"f": ["vim::PushFindForward", { "before": false, "multiline": false }],
|
||||
"t": ["vim::PushFindForward", { "before": true, "multiline": false }],
|
||||
"shift-f": ["vim::PushFindBackward", { "after": false, "multiline": false }],
|
||||
"shift-t": ["vim::PushFindBackward", { "after": true, "multiline": false }],
|
||||
"f": ["vim::PushFindForward", { "before": false }],
|
||||
"t": ["vim::PushFindForward", { "before": true }],
|
||||
"shift-f": ["vim::PushFindBackward", { "after": false }],
|
||||
"shift-t": ["vim::PushFindBackward", { "after": true }],
|
||||
"m": "vim::PushMark",
|
||||
"'": ["vim::PushJump", { "line": true }],
|
||||
"`": ["vim::PushJump", { "line": false }],
|
||||
@@ -368,10 +368,6 @@
|
||||
"escape": "editor::Cancel",
|
||||
"ctrl-[": "editor::Cancel",
|
||||
":": "command_palette::Toggle",
|
||||
"left": "vim::WrappingLeft",
|
||||
"right": "vim::WrappingRight",
|
||||
"h": "vim::WrappingLeft",
|
||||
"l": "vim::WrappingRight",
|
||||
"shift-d": "vim::DeleteToEndOfLine",
|
||||
"shift-j": "vim::JoinLines",
|
||||
"y": "editor::Copy",
|
||||
@@ -389,10 +385,6 @@
|
||||
"shift-p": ["vim::Paste", { "before": true }],
|
||||
"u": "vim::Undo",
|
||||
"ctrl-r": "vim::Redo",
|
||||
"f": ["vim::PushFindForward", { "before": false, "multiline": true }],
|
||||
"t": ["vim::PushFindForward", { "before": true, "multiline": true }],
|
||||
"shift-f": ["vim::PushFindBackward", { "after": false, "multiline": true }],
|
||||
"shift-t": ["vim::PushFindBackward", { "after": true, "multiline": true }],
|
||||
"r": "vim::PushReplace",
|
||||
"s": "vim::Substitute",
|
||||
"shift-s": "vim::SubstituteLine",
|
||||
|
||||
@@ -1720,11 +1720,6 @@
|
||||
// }
|
||||
// }
|
||||
},
|
||||
// Common language server settings.
|
||||
"global_lsp_settings": {
|
||||
// Whether to show the LSP servers button in the status bar.
|
||||
"button": true
|
||||
},
|
||||
// Jupyter settings
|
||||
"jupyter": {
|
||||
"enabled": true
|
||||
@@ -1739,6 +1734,7 @@
|
||||
"default_mode": "normal",
|
||||
"toggle_relative_line_numbers": false,
|
||||
"use_system_clipboard": "always",
|
||||
"use_multiline_find": false,
|
||||
"use_smartcase_find": false,
|
||||
"highlight_on_yank_duration": 200,
|
||||
"custom_digraphs": {},
|
||||
|
||||
@@ -21,7 +21,6 @@ futures.workspace = true
|
||||
gpui.workspace = true
|
||||
language.workspace = true
|
||||
project.workspace = true
|
||||
proto.workspace = true
|
||||
smallvec.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
|
||||
@@ -80,13 +80,10 @@ impl ActivityIndicator {
|
||||
let this = cx.new(|cx| {
|
||||
let mut status_events = languages.language_server_binary_statuses();
|
||||
cx.spawn(async move |this, cx| {
|
||||
while let Some((name, binary_status)) = status_events.next().await {
|
||||
while let Some((name, status)) = status_events.next().await {
|
||||
this.update(cx, |this: &mut ActivityIndicator, cx| {
|
||||
this.statuses.retain(|s| s.name != name);
|
||||
this.statuses.push(ServerStatus {
|
||||
name,
|
||||
status: LanguageServerStatusUpdate::Binary(binary_status),
|
||||
});
|
||||
this.statuses.push(ServerStatus { name, status });
|
||||
cx.notify();
|
||||
})?;
|
||||
}
|
||||
@@ -115,76 +112,8 @@ impl ActivityIndicator {
|
||||
|
||||
cx.subscribe(
|
||||
&project.read(cx).lsp_store(),
|
||||
|activity_indicator, _, event, cx| match event {
|
||||
LspStoreEvent::LanguageServerUpdate { name, message, .. } => {
|
||||
if let proto::update_language_server::Variant::StatusUpdate(status_update) =
|
||||
message
|
||||
{
|
||||
let Some(name) = name.clone() else {
|
||||
return;
|
||||
};
|
||||
let status = match &status_update.status {
|
||||
Some(proto::status_update::Status::Binary(binary_status)) => {
|
||||
if let Some(binary_status) =
|
||||
proto::ServerBinaryStatus::from_i32(*binary_status)
|
||||
{
|
||||
let binary_status = match binary_status {
|
||||
proto::ServerBinaryStatus::None => BinaryStatus::None,
|
||||
proto::ServerBinaryStatus::CheckingForUpdate => {
|
||||
BinaryStatus::CheckingForUpdate
|
||||
}
|
||||
proto::ServerBinaryStatus::Downloading => {
|
||||
BinaryStatus::Downloading
|
||||
}
|
||||
proto::ServerBinaryStatus::Starting => {
|
||||
BinaryStatus::Starting
|
||||
}
|
||||
proto::ServerBinaryStatus::Stopping => {
|
||||
BinaryStatus::Stopping
|
||||
}
|
||||
proto::ServerBinaryStatus::Stopped => {
|
||||
BinaryStatus::Stopped
|
||||
}
|
||||
proto::ServerBinaryStatus::Failed => {
|
||||
let Some(error) = status_update.message.clone()
|
||||
else {
|
||||
return;
|
||||
};
|
||||
BinaryStatus::Failed { error }
|
||||
}
|
||||
};
|
||||
LanguageServerStatusUpdate::Binary(binary_status)
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
}
|
||||
Some(proto::status_update::Status::Health(health_status)) => {
|
||||
if let Some(health) =
|
||||
proto::ServerHealth::from_i32(*health_status)
|
||||
{
|
||||
let health = match health {
|
||||
proto::ServerHealth::Ok => ServerHealth::Ok,
|
||||
proto::ServerHealth::Warning => ServerHealth::Warning,
|
||||
proto::ServerHealth::Error => ServerHealth::Error,
|
||||
};
|
||||
LanguageServerStatusUpdate::Health(
|
||||
health,
|
||||
status_update.message.clone().map(SharedString::from),
|
||||
)
|
||||
} else {
|
||||
return;
|
||||
}
|
||||
}
|
||||
None => return,
|
||||
};
|
||||
|
||||
activity_indicator.statuses.retain(|s| s.name != name);
|
||||
activity_indicator
|
||||
.statuses
|
||||
.push(ServerStatus { name, status });
|
||||
}
|
||||
cx.notify()
|
||||
}
|
||||
|_, _, event, cx| match event {
|
||||
LspStoreEvent::LanguageServerUpdate { .. } => cx.notify(),
|
||||
_ => {}
|
||||
},
|
||||
)
|
||||
@@ -299,23 +228,9 @@ impl ActivityIndicator {
|
||||
_: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let error_dismissed = if let Some(updater) = &self.auto_updater {
|
||||
updater.update(cx, |updater, cx| updater.dismiss_error(cx))
|
||||
} else {
|
||||
false
|
||||
};
|
||||
if error_dismissed {
|
||||
return;
|
||||
if let Some(updater) = &self.auto_updater {
|
||||
updater.update(cx, |updater, cx| updater.dismiss_error(cx));
|
||||
}
|
||||
|
||||
self.project.update(cx, |project, cx| {
|
||||
if project.last_formatting_failure(cx).is_some() {
|
||||
project.reset_last_formatting_failure(cx);
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn pending_language_server_work<'a>(
|
||||
@@ -484,12 +399,6 @@ impl ActivityIndicator {
|
||||
let mut servers_to_clear_statuses = HashSet::<LanguageServerName>::default();
|
||||
for status in &self.statuses {
|
||||
match &status.status {
|
||||
LanguageServerStatusUpdate::Binary(
|
||||
BinaryStatus::Starting | BinaryStatus::Stopping,
|
||||
) => {}
|
||||
LanguageServerStatusUpdate::Binary(BinaryStatus::Stopped) => {
|
||||
servers_to_clear_statuses.insert(status.name.clone());
|
||||
}
|
||||
LanguageServerStatusUpdate::Binary(BinaryStatus::CheckingForUpdate) => {
|
||||
checking_for_update.push(status.name.clone());
|
||||
}
|
||||
|
||||
@@ -85,14 +85,6 @@ impl AgentProfile {
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn is_tool_enabled(&self, source: ToolSource, tool_name: String, cx: &App) -> bool {
|
||||
let Some(settings) = AgentSettings::get_global(cx).profiles.get(&self.id) else {
|
||||
return false;
|
||||
};
|
||||
|
||||
return Self::is_enabled(settings, source, tool_name);
|
||||
}
|
||||
|
||||
fn is_enabled(settings: &AgentProfileSettings, source: ToolSource, name: String) -> bool {
|
||||
match source {
|
||||
ToolSource::Native => *settings.tools.get(name.as_str()).unwrap_or(&false),
|
||||
|
||||
@@ -145,10 +145,6 @@ impl Message {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn push_redacted_thinking(&mut self, data: String) {
|
||||
self.segments.push(MessageSegment::RedactedThinking(data));
|
||||
}
|
||||
|
||||
pub fn push_text(&mut self, text: &str) {
|
||||
if let Some(MessageSegment::Text(segment)) = self.segments.last_mut() {
|
||||
segment.push_str(text);
|
||||
@@ -187,7 +183,7 @@ pub enum MessageSegment {
|
||||
text: String,
|
||||
signature: Option<String>,
|
||||
},
|
||||
RedactedThinking(String),
|
||||
RedactedThinking(Vec<u8>),
|
||||
}
|
||||
|
||||
impl MessageSegment {
|
||||
@@ -198,13 +194,6 @@ impl MessageSegment {
|
||||
Self::RedactedThinking(_) => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn text(&self) -> Option<&str> {
|
||||
match self {
|
||||
MessageSegment::Text(text) => Some(text),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
||||
@@ -1654,25 +1643,6 @@ impl Thread {
|
||||
};
|
||||
}
|
||||
}
|
||||
LanguageModelCompletionEvent::RedactedThinking {
|
||||
data
|
||||
} => {
|
||||
thread.received_chunk();
|
||||
|
||||
if let Some(last_message) = thread.messages.last_mut() {
|
||||
if last_message.role == Role::Assistant
|
||||
&& !thread.tool_use.has_tool_results(last_message.id)
|
||||
{
|
||||
last_message.push_redacted_thinking(data);
|
||||
} else {
|
||||
request_assistant_message_id =
|
||||
Some(thread.insert_assistant_message(
|
||||
vec![MessageSegment::RedactedThinking(data)],
|
||||
cx,
|
||||
));
|
||||
};
|
||||
}
|
||||
}
|
||||
LanguageModelCompletionEvent::ToolUse(tool_use) => {
|
||||
let last_assistant_message_id = request_assistant_message_id
|
||||
.unwrap_or_else(|| {
|
||||
@@ -1777,7 +1747,7 @@ impl Thread {
|
||||
match result.as_ref() {
|
||||
Ok(stop_reason) => match stop_reason {
|
||||
StopReason::ToolUse => {
|
||||
let tool_uses = thread.use_pending_tools(window, model.clone(), cx);
|
||||
let tool_uses = thread.use_pending_tools(window, cx, model.clone());
|
||||
cx.emit(ThreadEvent::UsePendingTools { tool_uses });
|
||||
}
|
||||
StopReason::EndTurn | StopReason::MaxTokens => {
|
||||
@@ -2127,8 +2097,8 @@ impl Thread {
|
||||
pub fn use_pending_tools(
|
||||
&mut self,
|
||||
window: Option<AnyWindowHandle>,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
cx: &mut Context<Self>,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
) -> Vec<PendingToolUse> {
|
||||
self.auto_capture_telemetry(cx);
|
||||
let request =
|
||||
@@ -2142,53 +2112,43 @@ impl Thread {
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for tool_use in pending_tool_uses.iter() {
|
||||
self.use_pending_tool(tool_use.clone(), request.clone(), model.clone(), window, cx);
|
||||
if let Some(tool) = self.tools.read(cx).tool(&tool_use.name, cx) {
|
||||
if tool.needs_confirmation(&tool_use.input, cx)
|
||||
&& !AgentSettings::get_global(cx).always_allow_tool_actions
|
||||
{
|
||||
self.tool_use.confirm_tool_use(
|
||||
tool_use.id.clone(),
|
||||
tool_use.ui_text.clone(),
|
||||
tool_use.input.clone(),
|
||||
request.clone(),
|
||||
tool,
|
||||
);
|
||||
cx.emit(ThreadEvent::ToolConfirmationNeeded);
|
||||
} else {
|
||||
self.run_tool(
|
||||
tool_use.id.clone(),
|
||||
tool_use.ui_text.clone(),
|
||||
tool_use.input.clone(),
|
||||
request.clone(),
|
||||
tool,
|
||||
model.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
} else {
|
||||
self.handle_hallucinated_tool_use(
|
||||
tool_use.id.clone(),
|
||||
tool_use.name.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pending_tool_uses
|
||||
}
|
||||
|
||||
fn use_pending_tool(
|
||||
&mut self,
|
||||
tool_use: PendingToolUse,
|
||||
request: Arc<LanguageModelRequest>,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
window: Option<AnyWindowHandle>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let Some(tool) = self.tools.read(cx).tool(&tool_use.name, cx) else {
|
||||
return self.handle_hallucinated_tool_use(tool_use.id, tool_use.name, window, cx);
|
||||
};
|
||||
|
||||
if !self.profile.is_tool_enabled(tool.source(), tool.name(), cx) {
|
||||
return self.handle_hallucinated_tool_use(tool_use.id, tool_use.name, window, cx);
|
||||
}
|
||||
|
||||
if tool.needs_confirmation(&tool_use.input, cx)
|
||||
&& !AgentSettings::get_global(cx).always_allow_tool_actions
|
||||
{
|
||||
self.tool_use.confirm_tool_use(
|
||||
tool_use.id,
|
||||
tool_use.ui_text,
|
||||
tool_use.input,
|
||||
request,
|
||||
tool,
|
||||
);
|
||||
cx.emit(ThreadEvent::ToolConfirmationNeeded);
|
||||
} else {
|
||||
self.run_tool(
|
||||
tool_use.id,
|
||||
tool_use.ui_text,
|
||||
tool_use.input,
|
||||
request,
|
||||
tool,
|
||||
model,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn handle_hallucinated_tool_use(
|
||||
&mut self,
|
||||
tool_use_id: LanguageModelToolUseId,
|
||||
|
||||
@@ -71,7 +71,7 @@ impl Column for DataType {
|
||||
}
|
||||
}
|
||||
|
||||
const RULES_FILE_NAMES: [&'static str; 9] = [
|
||||
const RULES_FILE_NAMES: [&'static str; 8] = [
|
||||
".rules",
|
||||
".cursorrules",
|
||||
".windsurfrules",
|
||||
@@ -80,7 +80,6 @@ const RULES_FILE_NAMES: [&'static str; 9] = [
|
||||
"CLAUDE.md",
|
||||
"AGENT.md",
|
||||
"AGENTS.md",
|
||||
"GEMINI.md",
|
||||
];
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
@@ -732,7 +731,7 @@ pub enum SerializedMessageSegment {
|
||||
signature: Option<String>,
|
||||
},
|
||||
RedactedThinking {
|
||||
data: String,
|
||||
data: Vec<u8>,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@@ -12,10 +12,17 @@ workspace = true
|
||||
path = "src/agent_settings.rs"
|
||||
|
||||
[dependencies]
|
||||
anthropic = { workspace = true, features = ["schemars"] }
|
||||
anyhow.workspace = true
|
||||
collections.workspace = true
|
||||
gpui.workspace = true
|
||||
language_model.workspace = true
|
||||
lmstudio = { workspace = true, features = ["schemars"] }
|
||||
log.workspace = true
|
||||
ollama = { workspace = true, features = ["schemars"] }
|
||||
open_ai = { workspace = true, features = ["schemars"] }
|
||||
deepseek = { workspace = true, features = ["schemars"] }
|
||||
mistral = { workspace = true, features = ["schemars"] }
|
||||
schemars.workspace = true
|
||||
serde.workspace = true
|
||||
settings.workspace = true
|
||||
|
||||
@@ -2,10 +2,16 @@ mod agent_profile;
|
||||
|
||||
use std::sync::Arc;
|
||||
|
||||
use ::open_ai::Model as OpenAiModel;
|
||||
use anthropic::Model as AnthropicModel;
|
||||
use anyhow::{Result, bail};
|
||||
use collections::IndexMap;
|
||||
use deepseek::Model as DeepseekModel;
|
||||
use gpui::{App, Pixels, SharedString};
|
||||
use language_model::LanguageModel;
|
||||
use lmstudio::Model as LmStudioModel;
|
||||
use mistral::Model as MistralModel;
|
||||
use ollama::Model as OllamaModel;
|
||||
use schemars::{JsonSchema, schema::Schema};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources};
|
||||
@@ -42,6 +48,45 @@ pub enum NotifyWhenAgentWaiting {
|
||||
Never,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq)]
|
||||
#[serde(tag = "name", rename_all = "snake_case")]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub enum AgentProviderContentV1 {
|
||||
#[serde(rename = "zed.dev")]
|
||||
ZedDotDev { default_model: Option<String> },
|
||||
#[serde(rename = "openai")]
|
||||
OpenAi {
|
||||
default_model: Option<OpenAiModel>,
|
||||
api_url: Option<String>,
|
||||
available_models: Option<Vec<OpenAiModel>>,
|
||||
},
|
||||
#[serde(rename = "anthropic")]
|
||||
Anthropic {
|
||||
default_model: Option<AnthropicModel>,
|
||||
api_url: Option<String>,
|
||||
},
|
||||
#[serde(rename = "ollama")]
|
||||
Ollama {
|
||||
default_model: Option<OllamaModel>,
|
||||
api_url: Option<String>,
|
||||
},
|
||||
#[serde(rename = "lmstudio")]
|
||||
LmStudio {
|
||||
default_model: Option<LmStudioModel>,
|
||||
api_url: Option<String>,
|
||||
},
|
||||
#[serde(rename = "deepseek")]
|
||||
DeepSeek {
|
||||
default_model: Option<DeepseekModel>,
|
||||
api_url: Option<String>,
|
||||
},
|
||||
#[serde(rename = "mistral")]
|
||||
Mistral {
|
||||
default_model: Option<MistralModel>,
|
||||
api_url: Option<String>,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Debug)]
|
||||
pub struct AgentSettings {
|
||||
pub enabled: bool,
|
||||
@@ -123,56 +168,366 @@ impl LanguageModelParameters {
|
||||
}
|
||||
}
|
||||
|
||||
/// Agent panel settings
|
||||
#[derive(Clone, Serialize, Deserialize, Debug, Default)]
|
||||
pub struct AgentSettingsContent {
|
||||
#[serde(flatten)]
|
||||
pub inner: Option<AgentSettingsContentInner>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, Debug)]
|
||||
#[serde(untagged)]
|
||||
pub enum AgentSettingsContentInner {
|
||||
Versioned(Box<VersionedAgentSettingsContent>),
|
||||
Legacy(LegacyAgentSettingsContent),
|
||||
}
|
||||
|
||||
impl AgentSettingsContentInner {
|
||||
fn for_v2(content: AgentSettingsContentV2) -> Self {
|
||||
AgentSettingsContentInner::Versioned(Box::new(VersionedAgentSettingsContent::V2(content)))
|
||||
}
|
||||
}
|
||||
|
||||
impl JsonSchema for AgentSettingsContent {
|
||||
fn schema_name() -> String {
|
||||
VersionedAgentSettingsContent::schema_name()
|
||||
}
|
||||
|
||||
fn json_schema(r#gen: &mut schemars::r#gen::SchemaGenerator) -> Schema {
|
||||
VersionedAgentSettingsContent::json_schema(r#gen)
|
||||
}
|
||||
|
||||
fn is_referenceable() -> bool {
|
||||
VersionedAgentSettingsContent::is_referenceable()
|
||||
}
|
||||
}
|
||||
|
||||
impl AgentSettingsContent {
|
||||
pub fn is_version_outdated(&self) -> bool {
|
||||
match &self.inner {
|
||||
Some(AgentSettingsContentInner::Versioned(settings)) => match **settings {
|
||||
VersionedAgentSettingsContent::V1(_) => true,
|
||||
VersionedAgentSettingsContent::V2(_) => false,
|
||||
},
|
||||
Some(AgentSettingsContentInner::Legacy(_)) => true,
|
||||
None => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn upgrade(&self) -> AgentSettingsContentV2 {
|
||||
match &self.inner {
|
||||
Some(AgentSettingsContentInner::Versioned(settings)) => match **settings {
|
||||
VersionedAgentSettingsContent::V1(ref settings) => AgentSettingsContentV2 {
|
||||
enabled: settings.enabled,
|
||||
button: settings.button,
|
||||
dock: settings.dock,
|
||||
default_width: settings.default_width,
|
||||
default_height: settings.default_width,
|
||||
default_model: settings
|
||||
.provider
|
||||
.clone()
|
||||
.and_then(|provider| match provider {
|
||||
AgentProviderContentV1::ZedDotDev { default_model } => default_model
|
||||
.map(|model| LanguageModelSelection {
|
||||
provider: "zed.dev".into(),
|
||||
model,
|
||||
}),
|
||||
AgentProviderContentV1::OpenAi { default_model, .. } => default_model
|
||||
.map(|model| LanguageModelSelection {
|
||||
provider: "openai".into(),
|
||||
model: model.id().to_string(),
|
||||
}),
|
||||
AgentProviderContentV1::Anthropic { default_model, .. } => {
|
||||
default_model.map(|model| LanguageModelSelection {
|
||||
provider: "anthropic".into(),
|
||||
model: model.id().to_string(),
|
||||
})
|
||||
}
|
||||
AgentProviderContentV1::Ollama { default_model, .. } => default_model
|
||||
.map(|model| LanguageModelSelection {
|
||||
provider: "ollama".into(),
|
||||
model: model.id().to_string(),
|
||||
}),
|
||||
AgentProviderContentV1::LmStudio { default_model, .. } => default_model
|
||||
.map(|model| LanguageModelSelection {
|
||||
provider: "lmstudio".into(),
|
||||
model: model.id().to_string(),
|
||||
}),
|
||||
AgentProviderContentV1::DeepSeek { default_model, .. } => default_model
|
||||
.map(|model| LanguageModelSelection {
|
||||
provider: "deepseek".into(),
|
||||
model: model.id().to_string(),
|
||||
}),
|
||||
AgentProviderContentV1::Mistral { default_model, .. } => default_model
|
||||
.map(|model| LanguageModelSelection {
|
||||
provider: "mistral".into(),
|
||||
model: model.id().to_string(),
|
||||
}),
|
||||
}),
|
||||
inline_assistant_model: None,
|
||||
commit_message_model: None,
|
||||
thread_summary_model: None,
|
||||
inline_alternatives: None,
|
||||
default_profile: None,
|
||||
default_view: None,
|
||||
profiles: None,
|
||||
always_allow_tool_actions: None,
|
||||
notify_when_agent_waiting: None,
|
||||
stream_edits: None,
|
||||
single_file_review: None,
|
||||
model_parameters: Vec::new(),
|
||||
preferred_completion_mode: None,
|
||||
enable_feedback: None,
|
||||
play_sound_when_agent_done: None,
|
||||
},
|
||||
VersionedAgentSettingsContent::V2(ref settings) => settings.clone(),
|
||||
},
|
||||
Some(AgentSettingsContentInner::Legacy(settings)) => AgentSettingsContentV2 {
|
||||
enabled: None,
|
||||
button: settings.button,
|
||||
dock: settings.dock,
|
||||
default_width: settings.default_width,
|
||||
default_height: settings.default_height,
|
||||
default_model: Some(LanguageModelSelection {
|
||||
provider: "openai".into(),
|
||||
model: settings
|
||||
.default_open_ai_model
|
||||
.clone()
|
||||
.unwrap_or_default()
|
||||
.id()
|
||||
.to_string(),
|
||||
}),
|
||||
inline_assistant_model: None,
|
||||
commit_message_model: None,
|
||||
thread_summary_model: None,
|
||||
inline_alternatives: None,
|
||||
default_profile: None,
|
||||
default_view: None,
|
||||
profiles: None,
|
||||
always_allow_tool_actions: None,
|
||||
notify_when_agent_waiting: None,
|
||||
stream_edits: None,
|
||||
single_file_review: None,
|
||||
model_parameters: Vec::new(),
|
||||
preferred_completion_mode: None,
|
||||
enable_feedback: None,
|
||||
play_sound_when_agent_done: None,
|
||||
},
|
||||
None => AgentSettingsContentV2::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_dock(&mut self, dock: AgentDockPosition) {
|
||||
self.dock = Some(dock);
|
||||
match &mut self.inner {
|
||||
Some(AgentSettingsContentInner::Versioned(settings)) => match **settings {
|
||||
VersionedAgentSettingsContent::V1(ref mut settings) => {
|
||||
settings.dock = Some(dock);
|
||||
}
|
||||
VersionedAgentSettingsContent::V2(ref mut settings) => {
|
||||
settings.dock = Some(dock);
|
||||
}
|
||||
},
|
||||
Some(AgentSettingsContentInner::Legacy(settings)) => {
|
||||
settings.dock = Some(dock);
|
||||
}
|
||||
None => {
|
||||
self.inner = Some(AgentSettingsContentInner::for_v2(AgentSettingsContentV2 {
|
||||
dock: Some(dock),
|
||||
..Default::default()
|
||||
}))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_model(&mut self, language_model: Arc<dyn LanguageModel>) {
|
||||
let model = language_model.id().0.to_string();
|
||||
let provider = language_model.provider_id().0.to_string();
|
||||
|
||||
self.default_model = Some(LanguageModelSelection {
|
||||
provider: provider.into(),
|
||||
model,
|
||||
});
|
||||
match &mut self.inner {
|
||||
Some(AgentSettingsContentInner::Versioned(settings)) => match **settings {
|
||||
VersionedAgentSettingsContent::V1(ref mut settings) => match provider.as_ref() {
|
||||
"zed.dev" => {
|
||||
log::warn!("attempted to set zed.dev model on outdated settings");
|
||||
}
|
||||
"anthropic" => {
|
||||
let api_url = match &settings.provider {
|
||||
Some(AgentProviderContentV1::Anthropic { api_url, .. }) => {
|
||||
api_url.clone()
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
settings.provider = Some(AgentProviderContentV1::Anthropic {
|
||||
default_model: AnthropicModel::from_id(&model).ok(),
|
||||
api_url,
|
||||
});
|
||||
}
|
||||
"ollama" => {
|
||||
let api_url = match &settings.provider {
|
||||
Some(AgentProviderContentV1::Ollama { api_url, .. }) => api_url.clone(),
|
||||
_ => None,
|
||||
};
|
||||
settings.provider = Some(AgentProviderContentV1::Ollama {
|
||||
default_model: Some(ollama::Model::new(
|
||||
&model,
|
||||
None,
|
||||
None,
|
||||
Some(language_model.supports_tools()),
|
||||
Some(language_model.supports_images()),
|
||||
None,
|
||||
)),
|
||||
api_url,
|
||||
});
|
||||
}
|
||||
"lmstudio" => {
|
||||
let api_url = match &settings.provider {
|
||||
Some(AgentProviderContentV1::LmStudio { api_url, .. }) => {
|
||||
api_url.clone()
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
settings.provider = Some(AgentProviderContentV1::LmStudio {
|
||||
default_model: Some(lmstudio::Model::new(
|
||||
&model, None, None, false, false,
|
||||
)),
|
||||
api_url,
|
||||
});
|
||||
}
|
||||
"openai" => {
|
||||
let (api_url, available_models) = match &settings.provider {
|
||||
Some(AgentProviderContentV1::OpenAi {
|
||||
api_url,
|
||||
available_models,
|
||||
..
|
||||
}) => (api_url.clone(), available_models.clone()),
|
||||
_ => (None, None),
|
||||
};
|
||||
settings.provider = Some(AgentProviderContentV1::OpenAi {
|
||||
default_model: OpenAiModel::from_id(&model).ok(),
|
||||
api_url,
|
||||
available_models,
|
||||
});
|
||||
}
|
||||
"deepseek" => {
|
||||
let api_url = match &settings.provider {
|
||||
Some(AgentProviderContentV1::DeepSeek { api_url, .. }) => {
|
||||
api_url.clone()
|
||||
}
|
||||
_ => None,
|
||||
};
|
||||
settings.provider = Some(AgentProviderContentV1::DeepSeek {
|
||||
default_model: DeepseekModel::from_id(&model).ok(),
|
||||
api_url,
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
VersionedAgentSettingsContent::V2(ref mut settings) => {
|
||||
settings.default_model = Some(LanguageModelSelection {
|
||||
provider: provider.into(),
|
||||
model,
|
||||
});
|
||||
}
|
||||
},
|
||||
Some(AgentSettingsContentInner::Legacy(settings)) => {
|
||||
if let Ok(model) = OpenAiModel::from_id(&language_model.id().0) {
|
||||
settings.default_open_ai_model = Some(model);
|
||||
}
|
||||
}
|
||||
None => {
|
||||
self.inner = Some(AgentSettingsContentInner::for_v2(AgentSettingsContentV2 {
|
||||
default_model: Some(LanguageModelSelection {
|
||||
provider: provider.into(),
|
||||
model,
|
||||
}),
|
||||
..Default::default()
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_inline_assistant_model(&mut self, provider: String, model: String) {
|
||||
self.inline_assistant_model = Some(LanguageModelSelection {
|
||||
provider: provider.into(),
|
||||
model,
|
||||
});
|
||||
self.v2_setting(|setting| {
|
||||
setting.inline_assistant_model = Some(LanguageModelSelection {
|
||||
provider: provider.into(),
|
||||
model,
|
||||
});
|
||||
Ok(())
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
|
||||
pub fn set_commit_message_model(&mut self, provider: String, model: String) {
|
||||
self.commit_message_model = Some(LanguageModelSelection {
|
||||
provider: provider.into(),
|
||||
model,
|
||||
});
|
||||
self.v2_setting(|setting| {
|
||||
setting.commit_message_model = Some(LanguageModelSelection {
|
||||
provider: provider.into(),
|
||||
model,
|
||||
});
|
||||
Ok(())
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
|
||||
pub fn v2_setting(
|
||||
&mut self,
|
||||
f: impl FnOnce(&mut AgentSettingsContentV2) -> anyhow::Result<()>,
|
||||
) -> anyhow::Result<()> {
|
||||
match self.inner.get_or_insert_with(|| {
|
||||
AgentSettingsContentInner::for_v2(AgentSettingsContentV2 {
|
||||
..Default::default()
|
||||
})
|
||||
}) {
|
||||
AgentSettingsContentInner::Versioned(boxed) => {
|
||||
if let VersionedAgentSettingsContent::V2(ref mut settings) = **boxed {
|
||||
f(settings)
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
_ => Ok(()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_thread_summary_model(&mut self, provider: String, model: String) {
|
||||
self.thread_summary_model = Some(LanguageModelSelection {
|
||||
provider: provider.into(),
|
||||
model,
|
||||
});
|
||||
self.v2_setting(|setting| {
|
||||
setting.thread_summary_model = Some(LanguageModelSelection {
|
||||
provider: provider.into(),
|
||||
model,
|
||||
});
|
||||
Ok(())
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
|
||||
pub fn set_always_allow_tool_actions(&mut self, allow: bool) {
|
||||
self.always_allow_tool_actions = Some(allow);
|
||||
self.v2_setting(|setting| {
|
||||
setting.always_allow_tool_actions = Some(allow);
|
||||
Ok(())
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
|
||||
pub fn set_play_sound_when_agent_done(&mut self, allow: bool) {
|
||||
self.play_sound_when_agent_done = Some(allow);
|
||||
self.v2_setting(|setting| {
|
||||
setting.play_sound_when_agent_done = Some(allow);
|
||||
Ok(())
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
|
||||
pub fn set_single_file_review(&mut self, allow: bool) {
|
||||
self.single_file_review = Some(allow);
|
||||
self.v2_setting(|setting| {
|
||||
setting.single_file_review = Some(allow);
|
||||
Ok(())
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
|
||||
pub fn set_profile(&mut self, profile_id: AgentProfileId) {
|
||||
self.default_profile = Some(profile_id);
|
||||
self.v2_setting(|setting| {
|
||||
setting.default_profile = Some(profile_id);
|
||||
Ok(())
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
|
||||
pub fn create_profile(
|
||||
@@ -180,39 +535,79 @@ impl AgentSettingsContent {
|
||||
profile_id: AgentProfileId,
|
||||
profile_settings: AgentProfileSettings,
|
||||
) -> Result<()> {
|
||||
let profiles = self.profiles.get_or_insert_default();
|
||||
if profiles.contains_key(&profile_id) {
|
||||
bail!("profile with ID '{profile_id}' already exists");
|
||||
}
|
||||
self.v2_setting(|settings| {
|
||||
let profiles = settings.profiles.get_or_insert_default();
|
||||
if profiles.contains_key(&profile_id) {
|
||||
bail!("profile with ID '{profile_id}' already exists");
|
||||
}
|
||||
|
||||
profiles.insert(
|
||||
profile_id,
|
||||
AgentProfileContent {
|
||||
name: profile_settings.name.into(),
|
||||
tools: profile_settings.tools,
|
||||
enable_all_context_servers: Some(profile_settings.enable_all_context_servers),
|
||||
context_servers: profile_settings
|
||||
.context_servers
|
||||
.into_iter()
|
||||
.map(|(server_id, preset)| {
|
||||
(
|
||||
server_id,
|
||||
ContextServerPresetContent {
|
||||
tools: preset.tools,
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
},
|
||||
);
|
||||
profiles.insert(
|
||||
profile_id,
|
||||
AgentProfileContent {
|
||||
name: profile_settings.name.into(),
|
||||
tools: profile_settings.tools,
|
||||
enable_all_context_servers: Some(profile_settings.enable_all_context_servers),
|
||||
context_servers: profile_settings
|
||||
.context_servers
|
||||
.into_iter()
|
||||
.map(|(server_id, preset)| {
|
||||
(
|
||||
server_id,
|
||||
ContextServerPresetContent {
|
||||
tools: preset.tools,
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
},
|
||||
);
|
||||
|
||||
Ok(())
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
#[serde(tag = "version")]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub enum VersionedAgentSettingsContent {
|
||||
#[serde(rename = "1")]
|
||||
V1(AgentSettingsContentV1),
|
||||
#[serde(rename = "2")]
|
||||
V2(AgentSettingsContentV2),
|
||||
}
|
||||
|
||||
impl Default for VersionedAgentSettingsContent {
|
||||
fn default() -> Self {
|
||||
Self::V2(AgentSettingsContentV2 {
|
||||
enabled: None,
|
||||
button: None,
|
||||
dock: None,
|
||||
default_width: None,
|
||||
default_height: None,
|
||||
default_model: None,
|
||||
inline_assistant_model: None,
|
||||
commit_message_model: None,
|
||||
thread_summary_model: None,
|
||||
inline_alternatives: None,
|
||||
default_profile: None,
|
||||
default_view: None,
|
||||
profiles: None,
|
||||
always_allow_tool_actions: None,
|
||||
notify_when_agent_waiting: None,
|
||||
stream_edits: None,
|
||||
single_file_review: None,
|
||||
model_parameters: Vec::new(),
|
||||
preferred_completion_mode: None,
|
||||
enable_feedback: None,
|
||||
play_sound_when_agent_done: None,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug, Default)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub struct AgentSettingsContent {
|
||||
pub struct AgentSettingsContentV2 {
|
||||
/// Whether the Agent is enabled.
|
||||
///
|
||||
/// Default: true
|
||||
@@ -339,7 +734,6 @@ impl JsonSchema for LanguageModelProviderSetting {
|
||||
"deepseek".into(),
|
||||
"openrouter".into(),
|
||||
"mistral".into(),
|
||||
"vercel".into(),
|
||||
]),
|
||||
..Default::default()
|
||||
}
|
||||
@@ -384,6 +778,65 @@ pub struct ContextServerPresetContent {
|
||||
pub tools: IndexMap<Arc<str>, bool>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub struct AgentSettingsContentV1 {
|
||||
/// Whether the Agent is enabled.
|
||||
///
|
||||
/// Default: true
|
||||
enabled: Option<bool>,
|
||||
/// Whether to show the Agent panel button in the status bar.
|
||||
///
|
||||
/// Default: true
|
||||
button: Option<bool>,
|
||||
/// Where to dock the Agent.
|
||||
///
|
||||
/// Default: right
|
||||
dock: Option<AgentDockPosition>,
|
||||
/// Default width in pixels when the Agent is docked to the left or right.
|
||||
///
|
||||
/// Default: 640
|
||||
default_width: Option<f32>,
|
||||
/// Default height in pixels when the Agent is docked to the bottom.
|
||||
///
|
||||
/// Default: 320
|
||||
default_height: Option<f32>,
|
||||
/// The provider of the Agent service.
|
||||
///
|
||||
/// This can be "openai", "anthropic", "ollama", "lmstudio", "deepseek", "zed.dev"
|
||||
/// each with their respective default models and configurations.
|
||||
provider: Option<AgentProviderContentV1>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize, JsonSchema, Debug)]
|
||||
#[schemars(deny_unknown_fields)]
|
||||
pub struct LegacyAgentSettingsContent {
|
||||
/// Whether to show the Agent panel button in the status bar.
|
||||
///
|
||||
/// Default: true
|
||||
pub button: Option<bool>,
|
||||
/// Where to dock the Agent.
|
||||
///
|
||||
/// Default: right
|
||||
pub dock: Option<AgentDockPosition>,
|
||||
/// Default width in pixels when the Agent is docked to the left or right.
|
||||
///
|
||||
/// Default: 640
|
||||
pub default_width: Option<f32>,
|
||||
/// Default height in pixels when the Agent is docked to the bottom.
|
||||
///
|
||||
/// Default: 320
|
||||
pub default_height: Option<f32>,
|
||||
/// The default OpenAI model to use when creating new chats.
|
||||
///
|
||||
/// Default: gpt-4-1106-preview
|
||||
pub default_open_ai_model: Option<OpenAiModel>,
|
||||
/// OpenAI API base URL to use when creating new chats.
|
||||
///
|
||||
/// Default: <https://api.openai.com/v1>
|
||||
pub openai_api_url: Option<String>,
|
||||
}
|
||||
|
||||
impl Settings for AgentSettings {
|
||||
const KEY: Option<&'static str> = Some("agent");
|
||||
|
||||
@@ -400,6 +853,11 @@ impl Settings for AgentSettings {
|
||||
let mut settings = AgentSettings::default();
|
||||
|
||||
for value in sources.defaults_and_customizations() {
|
||||
if value.is_version_outdated() {
|
||||
settings.using_outdated_settings_version = true;
|
||||
}
|
||||
|
||||
let value = value.upgrade();
|
||||
merge(&mut settings.enabled, value.enabled);
|
||||
merge(&mut settings.button, value.button);
|
||||
merge(&mut settings.dock, value.dock);
|
||||
@@ -411,23 +869,17 @@ impl Settings for AgentSettings {
|
||||
&mut settings.default_height,
|
||||
value.default_height.map(Into::into),
|
||||
);
|
||||
merge(&mut settings.default_model, value.default_model.clone());
|
||||
merge(&mut settings.default_model, value.default_model);
|
||||
settings.inline_assistant_model = value
|
||||
.inline_assistant_model
|
||||
.clone()
|
||||
.or(settings.inline_assistant_model.take());
|
||||
settings.commit_message_model = value
|
||||
.clone()
|
||||
.commit_message_model
|
||||
.or(settings.commit_message_model.take());
|
||||
settings.thread_summary_model = value
|
||||
.clone()
|
||||
.thread_summary_model
|
||||
.or(settings.thread_summary_model.take());
|
||||
merge(
|
||||
&mut settings.inline_alternatives,
|
||||
value.inline_alternatives.clone(),
|
||||
);
|
||||
merge(&mut settings.inline_alternatives, value.inline_alternatives);
|
||||
merge(
|
||||
&mut settings.always_allow_tool_actions,
|
||||
value.always_allow_tool_actions,
|
||||
@@ -442,7 +894,7 @@ impl Settings for AgentSettings {
|
||||
);
|
||||
merge(&mut settings.stream_edits, value.stream_edits);
|
||||
merge(&mut settings.single_file_review, value.single_file_review);
|
||||
merge(&mut settings.default_profile, value.default_profile.clone());
|
||||
merge(&mut settings.default_profile, value.default_profile);
|
||||
merge(&mut settings.default_view, value.default_view);
|
||||
merge(
|
||||
&mut settings.preferred_completion_mode,
|
||||
@@ -454,24 +906,24 @@ impl Settings for AgentSettings {
|
||||
.model_parameters
|
||||
.extend_from_slice(&value.model_parameters);
|
||||
|
||||
if let Some(profiles) = value.profiles.as_ref() {
|
||||
if let Some(profiles) = value.profiles {
|
||||
settings
|
||||
.profiles
|
||||
.extend(profiles.into_iter().map(|(id, profile)| {
|
||||
(
|
||||
id.clone(),
|
||||
id,
|
||||
AgentProfileSettings {
|
||||
name: profile.name.clone().into(),
|
||||
tools: profile.tools.clone(),
|
||||
name: profile.name.into(),
|
||||
tools: profile.tools,
|
||||
enable_all_context_servers: profile
|
||||
.enable_all_context_servers
|
||||
.unwrap_or_default(),
|
||||
context_servers: profile
|
||||
.context_servers
|
||||
.iter()
|
||||
.into_iter()
|
||||
.map(|(context_server_id, preset)| {
|
||||
(
|
||||
context_server_id.clone(),
|
||||
context_server_id,
|
||||
ContextServerPreset {
|
||||
tools: preset.tools.clone(),
|
||||
},
|
||||
@@ -492,8 +944,28 @@ impl Settings for AgentSettings {
|
||||
.read_value("chat.agent.enabled")
|
||||
.and_then(|b| b.as_bool())
|
||||
{
|
||||
current.enabled = Some(b);
|
||||
current.button = Some(b);
|
||||
match &mut current.inner {
|
||||
Some(AgentSettingsContentInner::Versioned(versioned)) => match versioned.as_mut() {
|
||||
VersionedAgentSettingsContent::V1(setting) => {
|
||||
setting.enabled = Some(b);
|
||||
setting.button = Some(b);
|
||||
}
|
||||
|
||||
VersionedAgentSettingsContent::V2(setting) => {
|
||||
setting.enabled = Some(b);
|
||||
setting.button = Some(b);
|
||||
}
|
||||
},
|
||||
Some(AgentSettingsContentInner::Legacy(setting)) => setting.button = Some(b),
|
||||
None => {
|
||||
current.inner =
|
||||
Some(AgentSettingsContentInner::for_v2(AgentSettingsContentV2 {
|
||||
enabled: Some(b),
|
||||
button: Some(b),
|
||||
..Default::default()
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -503,3 +975,149 @@ fn merge<T>(target: &mut T, value: Option<T>) {
|
||||
*target = value;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use fs::Fs;
|
||||
use gpui::{ReadGlobal, TestAppContext};
|
||||
use settings::SettingsStore;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_deserialize_agent_settings_with_version(cx: &mut TestAppContext) {
|
||||
let fs = fs::FakeFs::new(cx.executor().clone());
|
||||
fs.create_dir(paths::settings_file().parent().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.update(|cx| {
|
||||
let test_settings = settings::SettingsStore::test(cx);
|
||||
cx.set_global(test_settings);
|
||||
AgentSettings::register(cx);
|
||||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
assert!(!AgentSettings::get_global(cx).using_outdated_settings_version);
|
||||
assert_eq!(
|
||||
AgentSettings::get_global(cx).default_model,
|
||||
LanguageModelSelection {
|
||||
provider: "zed.dev".into(),
|
||||
model: "claude-sonnet-4".into(),
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
cx.update(|cx| {
|
||||
settings::SettingsStore::global(cx).update_settings_file::<AgentSettings>(
|
||||
fs.clone(),
|
||||
|settings, _| {
|
||||
*settings = AgentSettingsContent {
|
||||
inner: Some(AgentSettingsContentInner::for_v2(AgentSettingsContentV2 {
|
||||
default_model: Some(LanguageModelSelection {
|
||||
provider: "test-provider".into(),
|
||||
model: "gpt-99".into(),
|
||||
}),
|
||||
inline_assistant_model: None,
|
||||
commit_message_model: None,
|
||||
thread_summary_model: None,
|
||||
inline_alternatives: None,
|
||||
enabled: None,
|
||||
button: None,
|
||||
dock: None,
|
||||
default_width: None,
|
||||
default_height: None,
|
||||
default_profile: None,
|
||||
default_view: None,
|
||||
profiles: None,
|
||||
always_allow_tool_actions: None,
|
||||
play_sound_when_agent_done: None,
|
||||
notify_when_agent_waiting: None,
|
||||
stream_edits: None,
|
||||
single_file_review: None,
|
||||
enable_feedback: None,
|
||||
model_parameters: Vec::new(),
|
||||
preferred_completion_mode: None,
|
||||
})),
|
||||
}
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
let raw_settings_value = fs.load(paths::settings_file()).await.unwrap();
|
||||
assert!(raw_settings_value.contains(r#""version": "2""#));
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct AgentSettingsTest {
|
||||
agent: AgentSettingsContent,
|
||||
}
|
||||
|
||||
let agent_settings: AgentSettingsTest =
|
||||
serde_json_lenient::from_str(&raw_settings_value).unwrap();
|
||||
|
||||
assert!(!agent_settings.agent.is_version_outdated());
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_load_settings_from_old_key(cx: &mut TestAppContext) {
|
||||
let fs = fs::FakeFs::new(cx.executor().clone());
|
||||
fs.create_dir(paths::settings_file().parent().unwrap())
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
cx.update(|cx| {
|
||||
let mut test_settings = settings::SettingsStore::test(cx);
|
||||
let user_settings_content = r#"{
|
||||
"assistant": {
|
||||
"enabled": true,
|
||||
"version": "2",
|
||||
"default_model": {
|
||||
"provider": "zed.dev",
|
||||
"model": "gpt-99"
|
||||
},
|
||||
}}"#;
|
||||
test_settings
|
||||
.set_user_settings(user_settings_content, cx)
|
||||
.unwrap();
|
||||
cx.set_global(test_settings);
|
||||
AgentSettings::register(cx);
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
let agent_settings = cx.update(|cx| AgentSettings::get_global(cx).clone());
|
||||
assert!(agent_settings.enabled);
|
||||
assert!(!agent_settings.using_outdated_settings_version);
|
||||
assert_eq!(agent_settings.default_model.model, "gpt-99");
|
||||
|
||||
cx.update_global::<SettingsStore, _>(|settings_store, cx| {
|
||||
settings_store.update_user_settings::<AgentSettings>(cx, |settings| {
|
||||
*settings = AgentSettingsContent {
|
||||
inner: Some(AgentSettingsContentInner::for_v2(AgentSettingsContentV2 {
|
||||
enabled: Some(false),
|
||||
default_model: Some(LanguageModelSelection {
|
||||
provider: "xai".to_owned().into(),
|
||||
model: "grok".to_owned(),
|
||||
}),
|
||||
..Default::default()
|
||||
})),
|
||||
};
|
||||
});
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
let settings = cx.update(|cx| SettingsStore::global(cx).raw_user_settings().clone());
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct AgentSettingsTest {
|
||||
assistant: AgentSettingsContent,
|
||||
agent: Option<serde_json_lenient::Value>,
|
||||
}
|
||||
|
||||
let agent_settings: AgentSettingsTest = serde_json::from_value(settings).unwrap();
|
||||
assert!(agent_settings.agent.is_none());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -809,12 +809,7 @@ impl ActiveThread {
|
||||
};
|
||||
|
||||
for message in thread.read(cx).messages().cloned().collect::<Vec<_>>() {
|
||||
let rendered_message = RenderedMessage::from_segments(
|
||||
&message.segments,
|
||||
this.language_registry.clone(),
|
||||
cx,
|
||||
);
|
||||
this.push_rendered_message(message.id, rendered_message);
|
||||
this.push_message(&message.id, &message.segments, window, cx);
|
||||
|
||||
for tool_use in thread.read(cx).tool_uses_for_message(message.id, cx) {
|
||||
this.render_tool_use_markdown(
|
||||
@@ -880,11 +875,36 @@ impl ActiveThread {
|
||||
&self.text_thread_store
|
||||
}
|
||||
|
||||
fn push_rendered_message(&mut self, id: MessageId, rendered_message: RenderedMessage) {
|
||||
fn push_message(
|
||||
&mut self,
|
||||
id: &MessageId,
|
||||
segments: &[MessageSegment],
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let old_len = self.messages.len();
|
||||
self.messages.push(id);
|
||||
self.messages.push(*id);
|
||||
self.list_state.splice(old_len..old_len, 1);
|
||||
self.rendered_messages_by_id.insert(id, rendered_message);
|
||||
|
||||
let rendered_message =
|
||||
RenderedMessage::from_segments(segments, self.language_registry.clone(), cx);
|
||||
self.rendered_messages_by_id.insert(*id, rendered_message);
|
||||
}
|
||||
|
||||
fn edited_message(
|
||||
&mut self,
|
||||
id: &MessageId,
|
||||
segments: &[MessageSegment],
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let Some(index) = self.messages.iter().position(|message_id| message_id == id) else {
|
||||
return;
|
||||
};
|
||||
self.list_state.splice(index..index + 1, 1);
|
||||
let rendered_message =
|
||||
RenderedMessage::from_segments(segments, self.language_registry.clone(), cx);
|
||||
self.rendered_messages_by_id.insert(*id, rendered_message);
|
||||
}
|
||||
|
||||
fn deleted_message(&mut self, id: &MessageId) {
|
||||
@@ -1017,43 +1037,31 @@ impl ActiveThread {
|
||||
}
|
||||
}
|
||||
ThreadEvent::MessageAdded(message_id) => {
|
||||
if let Some(rendered_message) = self.thread.update(cx, |thread, cx| {
|
||||
thread.message(*message_id).map(|message| {
|
||||
RenderedMessage::from_segments(
|
||||
&message.segments,
|
||||
self.language_registry.clone(),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
}) {
|
||||
self.push_rendered_message(*message_id, rendered_message);
|
||||
if let Some(message_segments) = self
|
||||
.thread
|
||||
.read(cx)
|
||||
.message(*message_id)
|
||||
.map(|message| message.segments.clone())
|
||||
{
|
||||
self.push_message(message_id, &message_segments, window, cx);
|
||||
}
|
||||
|
||||
self.save_thread(cx);
|
||||
cx.notify();
|
||||
}
|
||||
ThreadEvent::MessageEdited(message_id) => {
|
||||
if let Some(index) = self.messages.iter().position(|id| id == message_id) {
|
||||
if let Some(rendered_message) = self.thread.update(cx, |thread, cx| {
|
||||
thread.message(*message_id).map(|message| {
|
||||
let mut rendered_message = RenderedMessage {
|
||||
language_registry: self.language_registry.clone(),
|
||||
segments: Vec::with_capacity(message.segments.len()),
|
||||
};
|
||||
for segment in &message.segments {
|
||||
rendered_message.push_segment(segment, cx);
|
||||
}
|
||||
rendered_message
|
||||
})
|
||||
}) {
|
||||
self.list_state.splice(index..index + 1, 1);
|
||||
self.rendered_messages_by_id
|
||||
.insert(*message_id, rendered_message);
|
||||
self.scroll_to_bottom(cx);
|
||||
self.save_thread(cx);
|
||||
cx.notify();
|
||||
}
|
||||
if let Some(message_segments) = self
|
||||
.thread
|
||||
.read(cx)
|
||||
.message(*message_id)
|
||||
.map(|message| message.segments.clone())
|
||||
{
|
||||
self.edited_message(message_id, &message_segments, window, cx);
|
||||
}
|
||||
|
||||
self.scroll_to_bottom(cx);
|
||||
self.save_thread(cx);
|
||||
cx.notify();
|
||||
}
|
||||
ThreadEvent::MessageDeleted(message_id) => {
|
||||
self.deleted_message(message_id);
|
||||
@@ -1303,11 +1311,17 @@ impl ActiveThread {
|
||||
fn start_editing_message(
|
||||
&mut self,
|
||||
message_id: MessageId,
|
||||
message_text: impl Into<Arc<str>>,
|
||||
message_segments: &[MessageSegment],
|
||||
message_creases: &[MessageCrease],
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
// User message should always consist of a single text segment,
|
||||
// therefore we can skip returning early if it's not a text segment.
|
||||
let Some(MessageSegment::Text(message_text)) = message_segments.first() else {
|
||||
return;
|
||||
};
|
||||
|
||||
let editor = crate::message_editor::create_editor(
|
||||
self.workspace.clone(),
|
||||
self.context_store.downgrade(),
|
||||
@@ -1319,7 +1333,7 @@ impl ActiveThread {
|
||||
cx,
|
||||
);
|
||||
editor.update(cx, |editor, cx| {
|
||||
editor.set_text(message_text, window, cx);
|
||||
editor.set_text(message_text.clone(), window, cx);
|
||||
insert_message_creases(editor, message_creases, &self.context_store, window, cx);
|
||||
editor.focus_handle(cx).focus(window);
|
||||
editor.move_to_end(&editor::actions::MoveToEnd, window, cx);
|
||||
@@ -1814,6 +1828,8 @@ impl ActiveThread {
|
||||
return div().children(loading_dots).into_any();
|
||||
}
|
||||
|
||||
let message_creases = message.creases.clone();
|
||||
|
||||
let Some(rendered_message) = self.rendered_messages_by_id.get(&message_id) else {
|
||||
return Empty.into_any();
|
||||
};
|
||||
@@ -2128,30 +2144,15 @@ impl ActiveThread {
|
||||
}),
|
||||
)
|
||||
.on_click(cx.listener({
|
||||
let message_creases = message.creases.clone();
|
||||
let message_segments = message.segments.clone();
|
||||
move |this, _, window, cx| {
|
||||
if let Some(message_text) =
|
||||
this.thread.read(cx).message(message_id).and_then(|message| {
|
||||
message.segments.first().and_then(|segment| {
|
||||
match segment {
|
||||
MessageSegment::Text(message_text) => {
|
||||
Some(Into::<Arc<str>>::into(message_text.as_str()))
|
||||
}
|
||||
_ => {
|
||||
None
|
||||
}
|
||||
}
|
||||
})
|
||||
})
|
||||
{
|
||||
this.start_editing_message(
|
||||
message_id,
|
||||
message_text,
|
||||
&message_creases,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
this.start_editing_message(
|
||||
message_id,
|
||||
&message_segments,
|
||||
&message_creases,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
})),
|
||||
),
|
||||
@@ -3825,15 +3826,13 @@ mod tests {
|
||||
});
|
||||
|
||||
active_thread.update_in(cx, |active_thread, window, cx| {
|
||||
if let Some(message_text) = message.segments.first().and_then(MessageSegment::text) {
|
||||
active_thread.start_editing_message(
|
||||
message.id,
|
||||
message_text,
|
||||
message.creases.as_slice(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
active_thread.start_editing_message(
|
||||
message.id,
|
||||
message.segments.as_slice(),
|
||||
message.creases.as_slice(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
let editor = active_thread
|
||||
.editing_message
|
||||
.as_ref()
|
||||
@@ -3848,15 +3847,13 @@ mod tests {
|
||||
|
||||
let message = thread.update(cx, |thread, _| thread.message(message.id).cloned().unwrap());
|
||||
active_thread.update_in(cx, |active_thread, window, cx| {
|
||||
if let Some(message_text) = message.segments.first().and_then(MessageSegment::text) {
|
||||
active_thread.start_editing_message(
|
||||
message.id,
|
||||
message_text,
|
||||
message.creases.as_slice(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
active_thread.start_editing_message(
|
||||
message.id,
|
||||
message.segments.as_slice(),
|
||||
message.creases.as_slice(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
let editor = active_thread
|
||||
.editing_message
|
||||
.as_ref()
|
||||
@@ -3938,15 +3935,13 @@ mod tests {
|
||||
|
||||
// Edit the message while the completion is still running
|
||||
active_thread.update_in(cx, |active_thread, window, cx| {
|
||||
if let Some(message_text) = message.segments.first().and_then(MessageSegment::text) {
|
||||
active_thread.start_editing_message(
|
||||
message.id,
|
||||
message_text,
|
||||
message.creases.as_slice(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
active_thread.start_editing_message(
|
||||
message.id,
|
||||
message.segments.as_slice(),
|
||||
message.creases.as_slice(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
let editor = active_thread
|
||||
.editing_message
|
||||
.as_ref()
|
||||
|
||||
@@ -295,7 +295,10 @@ impl ConfigureContextServerModal {
|
||||
ContextServerDescriptorRegistry::default_global(cx)
|
||||
.read(cx)
|
||||
.context_server_descriptor(&server_id.0)
|
||||
.map(|_| ContextServerSettings::default_extension())
|
||||
.map(|_| ContextServerSettings::Extension {
|
||||
enabled: true,
|
||||
settings: serde_json::json!({}),
|
||||
})
|
||||
})
|
||||
else {
|
||||
return Task::ready(Err(anyhow::anyhow!("Context server not found")));
|
||||
|
||||
@@ -272,35 +272,42 @@ impl PickerDelegate for ToolPickerDelegate {
|
||||
let server_id = server_id.clone();
|
||||
let tool_name = tool_name.clone();
|
||||
move |settings: &mut AgentSettingsContent, _cx| {
|
||||
let profiles = settings.profiles.get_or_insert_default();
|
||||
let profile = profiles
|
||||
.entry(profile_id)
|
||||
.or_insert_with(|| AgentProfileContent {
|
||||
name: default_profile.name.into(),
|
||||
tools: default_profile.tools,
|
||||
enable_all_context_servers: Some(
|
||||
default_profile.enable_all_context_servers,
|
||||
),
|
||||
context_servers: default_profile
|
||||
.context_servers
|
||||
.into_iter()
|
||||
.map(|(server_id, preset)| {
|
||||
(
|
||||
server_id,
|
||||
ContextServerPresetContent {
|
||||
tools: preset.tools,
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
});
|
||||
settings
|
||||
.v2_setting(|v2_settings| {
|
||||
let profiles = v2_settings.profiles.get_or_insert_default();
|
||||
let profile =
|
||||
profiles
|
||||
.entry(profile_id)
|
||||
.or_insert_with(|| AgentProfileContent {
|
||||
name: default_profile.name.into(),
|
||||
tools: default_profile.tools,
|
||||
enable_all_context_servers: Some(
|
||||
default_profile.enable_all_context_servers,
|
||||
),
|
||||
context_servers: default_profile
|
||||
.context_servers
|
||||
.into_iter()
|
||||
.map(|(server_id, preset)| {
|
||||
(
|
||||
server_id,
|
||||
ContextServerPresetContent {
|
||||
tools: preset.tools,
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect(),
|
||||
});
|
||||
|
||||
if let Some(server_id) = server_id {
|
||||
let preset = profile.context_servers.entry(server_id).or_default();
|
||||
*preset.tools.entry(tool_name).or_default() = !is_currently_enabled;
|
||||
} else {
|
||||
*profile.tools.entry(tool_name).or_default() = !is_currently_enabled;
|
||||
}
|
||||
if let Some(server_id) = server_id {
|
||||
let preset = profile.context_servers.entry(server_id).or_default();
|
||||
*preset.tools.entry(tool_name).or_default() = !is_currently_enabled;
|
||||
} else {
|
||||
*profile.tools.entry(tool_name).or_default() = !is_currently_enabled;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@@ -2117,7 +2117,6 @@ impl AssistantContext {
|
||||
);
|
||||
}
|
||||
}
|
||||
LanguageModelCompletionEvent::RedactedThinking { .. } => {},
|
||||
LanguageModelCompletionEvent::Text(mut chunk) => {
|
||||
if let Some(start) = thought_process_stack.pop() {
|
||||
let end = buffer.anchor_before(message_old_end_offset);
|
||||
@@ -2523,12 +2522,6 @@ impl AssistantContext {
|
||||
}
|
||||
|
||||
let message = start_message;
|
||||
let at_end = range.end >= message.offset_range.end.saturating_sub(1);
|
||||
let role_after = if range.start == range.end || at_end {
|
||||
Role::User
|
||||
} else {
|
||||
message.role
|
||||
};
|
||||
let role = message.role;
|
||||
let mut edited_buffer = false;
|
||||
|
||||
@@ -2563,7 +2556,7 @@ impl AssistantContext {
|
||||
};
|
||||
|
||||
let suffix_metadata = MessageMetadata {
|
||||
role: role_after,
|
||||
role,
|
||||
status: MessageStatus::Done,
|
||||
timestamp: suffix.id.0,
|
||||
cache: None,
|
||||
|
||||
@@ -1470,7 +1470,7 @@ impl EditAgentTest {
|
||||
Project::init_settings(cx);
|
||||
language::init(cx);
|
||||
language_model::init(client.clone(), cx);
|
||||
language_models::init(user_store.clone(), client.clone(), cx);
|
||||
language_models::init(user_store.clone(), client.clone(), fs.clone(), cx);
|
||||
crate::init(client.http_client(), cx);
|
||||
});
|
||||
|
||||
|
||||
@@ -9132,7 +9132,7 @@ impl Editor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.manipulate_immutable_lines(window, cx, |lines| lines.sort())
|
||||
self.manipulate_lines(window, cx, |lines| lines.sort())
|
||||
}
|
||||
|
||||
pub fn sort_lines_case_insensitive(
|
||||
@@ -9141,7 +9141,7 @@ impl Editor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.manipulate_immutable_lines(window, cx, |lines| {
|
||||
self.manipulate_lines(window, cx, |lines| {
|
||||
lines.sort_by_key(|line| line.to_lowercase())
|
||||
})
|
||||
}
|
||||
@@ -9152,7 +9152,7 @@ impl Editor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.manipulate_immutable_lines(window, cx, |lines| {
|
||||
self.manipulate_lines(window, cx, |lines| {
|
||||
let mut seen = HashSet::default();
|
||||
lines.retain(|line| seen.insert(line.to_lowercase()));
|
||||
})
|
||||
@@ -9164,7 +9164,7 @@ impl Editor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.manipulate_immutable_lines(window, cx, |lines| {
|
||||
self.manipulate_lines(window, cx, |lines| {
|
||||
let mut seen = HashSet::default();
|
||||
lines.retain(|line| seen.insert(*line));
|
||||
})
|
||||
@@ -9606,20 +9606,20 @@ impl Editor {
|
||||
}
|
||||
|
||||
pub fn reverse_lines(&mut self, _: &ReverseLines, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.manipulate_immutable_lines(window, cx, |lines| lines.reverse())
|
||||
self.manipulate_lines(window, cx, |lines| lines.reverse())
|
||||
}
|
||||
|
||||
pub fn shuffle_lines(&mut self, _: &ShuffleLines, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.manipulate_immutable_lines(window, cx, |lines| lines.shuffle(&mut thread_rng()))
|
||||
self.manipulate_lines(window, cx, |lines| lines.shuffle(&mut thread_rng()))
|
||||
}
|
||||
|
||||
fn manipulate_lines<M>(
|
||||
fn manipulate_lines<Fn>(
|
||||
&mut self,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
mut manipulate: M,
|
||||
mut callback: Fn,
|
||||
) where
|
||||
M: FnMut(&str) -> LineManipulationResult,
|
||||
Fn: FnMut(&mut Vec<&str>),
|
||||
{
|
||||
self.hide_mouse_cursor(&HideMouseCursorOrigin::TypingAction);
|
||||
|
||||
@@ -9652,14 +9652,18 @@ impl Editor {
|
||||
.text_for_range(start_point..end_point)
|
||||
.collect::<String>();
|
||||
|
||||
let LineManipulationResult { new_text, line_count_before, line_count_after} = manipulate(&text);
|
||||
let mut lines = text.split('\n').collect_vec();
|
||||
|
||||
edits.push((start_point..end_point, new_text));
|
||||
let lines_before = lines.len();
|
||||
callback(&mut lines);
|
||||
let lines_after = lines.len();
|
||||
|
||||
edits.push((start_point..end_point, lines.join("\n")));
|
||||
|
||||
// Selections must change based on added and removed line count
|
||||
let start_row =
|
||||
MultiBufferRow(start_point.row + added_lines as u32 - removed_lines as u32);
|
||||
let end_row = MultiBufferRow(start_row.0 + line_count_after.saturating_sub(1) as u32);
|
||||
let end_row = MultiBufferRow(start_row.0 + lines_after.saturating_sub(1) as u32);
|
||||
new_selections.push(Selection {
|
||||
id: selection.id,
|
||||
start: start_row,
|
||||
@@ -9668,10 +9672,10 @@ impl Editor {
|
||||
reversed: selection.reversed,
|
||||
});
|
||||
|
||||
if line_count_after > line_count_before {
|
||||
added_lines += line_count_after - line_count_before;
|
||||
} else if line_count_before > line_count_after {
|
||||
removed_lines += line_count_before - line_count_after;
|
||||
if lines_after > lines_before {
|
||||
added_lines += lines_after - lines_before;
|
||||
} else if lines_before > lines_after {
|
||||
removed_lines += lines_before - lines_after;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9716,171 +9720,6 @@ impl Editor {
|
||||
})
|
||||
}
|
||||
|
||||
fn manipulate_immutable_lines<Fn>(
|
||||
&mut self,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
mut callback: Fn,
|
||||
) where
|
||||
Fn: FnMut(&mut Vec<&str>),
|
||||
{
|
||||
self.manipulate_lines(window, cx, |text| {
|
||||
let mut lines: Vec<&str> = text.split('\n').collect();
|
||||
let line_count_before = lines.len();
|
||||
|
||||
callback(&mut lines);
|
||||
|
||||
LineManipulationResult {
|
||||
new_text: lines.join("\n"),
|
||||
line_count_before,
|
||||
line_count_after: lines.len(),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn manipulate_mutable_lines<Fn>(
|
||||
&mut self,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
mut callback: Fn,
|
||||
) where
|
||||
Fn: FnMut(&mut Vec<Cow<'_, str>>),
|
||||
{
|
||||
self.manipulate_lines(window, cx, |text| {
|
||||
let mut lines: Vec<Cow<str>> = text.split('\n').map(Cow::from).collect();
|
||||
let line_count_before = lines.len();
|
||||
|
||||
callback(&mut lines);
|
||||
|
||||
LineManipulationResult {
|
||||
new_text: lines.join("\n"),
|
||||
line_count_before,
|
||||
line_count_after: lines.len(),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pub fn convert_indentation_to_spaces(
|
||||
&mut self,
|
||||
_: &ConvertIndentationToSpaces,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let settings = self.buffer.read(cx).language_settings(cx);
|
||||
let tab_size = settings.tab_size.get() as usize;
|
||||
|
||||
self.manipulate_mutable_lines(window, cx, |lines| {
|
||||
// Allocates a reasonably sized scratch buffer once for the whole loop
|
||||
let mut reindented_line = String::with_capacity(MAX_LINE_LEN);
|
||||
// Avoids recomputing spaces that could be inserted many times
|
||||
let space_cache: Vec<Vec<char>> = (1..=tab_size)
|
||||
.map(|n| IndentSize::spaces(n as u32).chars().collect())
|
||||
.collect();
|
||||
|
||||
for line in lines.iter_mut().filter(|line| !line.is_empty()) {
|
||||
let mut chars = line.as_ref().chars();
|
||||
let mut col = 0;
|
||||
let mut changed = false;
|
||||
|
||||
while let Some(ch) = chars.next() {
|
||||
match ch {
|
||||
' ' => {
|
||||
reindented_line.push(' ');
|
||||
col += 1;
|
||||
}
|
||||
'\t' => {
|
||||
// \t are converted to spaces depending on the current column
|
||||
let spaces_len = tab_size - (col % tab_size);
|
||||
reindented_line.extend(&space_cache[spaces_len - 1]);
|
||||
col += spaces_len;
|
||||
changed = true;
|
||||
}
|
||||
_ => {
|
||||
// If we dont append before break, the character is consumed
|
||||
reindented_line.push(ch);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !changed {
|
||||
reindented_line.clear();
|
||||
continue;
|
||||
}
|
||||
// Append the rest of the line and replace old reference with new one
|
||||
reindented_line.extend(chars);
|
||||
*line = Cow::Owned(reindented_line.clone());
|
||||
reindented_line.clear();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pub fn convert_indentation_to_tabs(
|
||||
&mut self,
|
||||
_: &ConvertIndentationToTabs,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let settings = self.buffer.read(cx).language_settings(cx);
|
||||
let tab_size = settings.tab_size.get() as usize;
|
||||
|
||||
self.manipulate_mutable_lines(window, cx, |lines| {
|
||||
// Allocates a reasonably sized buffer once for the whole loop
|
||||
let mut reindented_line = String::with_capacity(MAX_LINE_LEN);
|
||||
// Avoids recomputing spaces that could be inserted many times
|
||||
let space_cache: Vec<Vec<char>> = (1..=tab_size)
|
||||
.map(|n| IndentSize::spaces(n as u32).chars().collect())
|
||||
.collect();
|
||||
|
||||
for line in lines.iter_mut().filter(|line| !line.is_empty()) {
|
||||
let mut chars = line.chars();
|
||||
let mut spaces_count = 0;
|
||||
let mut first_non_indent_char = None;
|
||||
let mut changed = false;
|
||||
|
||||
while let Some(ch) = chars.next() {
|
||||
match ch {
|
||||
' ' => {
|
||||
// Keep track of spaces. Append \t when we reach tab_size
|
||||
spaces_count += 1;
|
||||
changed = true;
|
||||
if spaces_count == tab_size {
|
||||
reindented_line.push('\t');
|
||||
spaces_count = 0;
|
||||
}
|
||||
}
|
||||
'\t' => {
|
||||
reindented_line.push('\t');
|
||||
spaces_count = 0;
|
||||
}
|
||||
_ => {
|
||||
// Dont append it yet, we might have remaining spaces
|
||||
first_non_indent_char = Some(ch);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !changed {
|
||||
reindented_line.clear();
|
||||
continue;
|
||||
}
|
||||
// Remaining spaces that didn't make a full tab stop
|
||||
if spaces_count > 0 {
|
||||
reindented_line.extend(&space_cache[spaces_count - 1]);
|
||||
}
|
||||
// If we consume an extra character that was not indentation, add it back
|
||||
if let Some(extra_char) = first_non_indent_char {
|
||||
reindented_line.push(extra_char);
|
||||
}
|
||||
// Append the rest of the line and replace old reference with new one
|
||||
reindented_line.extend(chars);
|
||||
*line = Cow::Owned(reindented_line.clone());
|
||||
reindented_line.clear();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pub fn convert_to_upper_case(
|
||||
&mut self,
|
||||
_: &ConvertToUpperCase,
|
||||
@@ -21318,13 +21157,6 @@ pub struct LineHighlight {
|
||||
pub type_id: Option<TypeId>,
|
||||
}
|
||||
|
||||
struct LineManipulationResult {
|
||||
pub new_text: String,
|
||||
pub line_count_before: usize,
|
||||
pub line_count_after: usize,
|
||||
}
|
||||
|
||||
|
||||
fn render_diff_hunk_controls(
|
||||
row: u32,
|
||||
status: &DiffHunkStatus,
|
||||
|
||||
@@ -132,11 +132,6 @@ pub fn notify_if_app_was_updated(cx: &mut App) {
|
||||
let Some(updater) = AutoUpdater::get(cx) else {
|
||||
return;
|
||||
};
|
||||
|
||||
if let ReleaseChannel::Nightly = ReleaseChannel::global(cx) {
|
||||
return;
|
||||
}
|
||||
|
||||
let should_show_notification = updater.read(cx).should_show_update_notification(cx);
|
||||
cx.spawn(async move |cx| {
|
||||
let should_show_notification = should_show_notification.await?;
|
||||
|
||||
@@ -11,13 +11,6 @@ pub enum BedrockModelMode {
|
||||
},
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq)]
|
||||
pub struct BedrockModelCacheConfiguration {
|
||||
pub max_cache_anchors: usize,
|
||||
pub min_total_token: u64,
|
||||
}
|
||||
|
||||
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
|
||||
#[derive(Clone, Debug, Default, Serialize, Deserialize, PartialEq, EnumIter)]
|
||||
pub enum Model {
|
||||
@@ -111,7 +104,6 @@ pub enum Model {
|
||||
display_name: Option<String>,
|
||||
max_output_tokens: Option<u64>,
|
||||
default_temperature: Option<f32>,
|
||||
cache_configuration: Option<BedrockModelCacheConfiguration>,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -409,56 +401,6 @@ impl Model {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn supports_caching(&self) -> bool {
|
||||
match self {
|
||||
// Only Claude models on Bedrock support caching
|
||||
// Nova models support only text caching
|
||||
// https://docs.aws.amazon.com/bedrock/latest/userguide/prompt-caching.html#prompt-caching-models
|
||||
Self::Claude3_5Haiku
|
||||
| Self::Claude3_7Sonnet
|
||||
| Self::Claude3_7SonnetThinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::ClaudeOpus4
|
||||
| Self::ClaudeOpus4Thinking => true,
|
||||
|
||||
// Custom models - check if they have cache configuration
|
||||
Self::Custom {
|
||||
cache_configuration,
|
||||
..
|
||||
} => cache_configuration.is_some(),
|
||||
|
||||
// All other models don't support caching
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cache_configuration(&self) -> Option<BedrockModelCacheConfiguration> {
|
||||
match self {
|
||||
Self::Claude3_7Sonnet
|
||||
| Self::Claude3_7SonnetThinking
|
||||
| Self::ClaudeSonnet4
|
||||
| Self::ClaudeSonnet4Thinking
|
||||
| Self::ClaudeOpus4
|
||||
| Self::ClaudeOpus4Thinking => Some(BedrockModelCacheConfiguration {
|
||||
max_cache_anchors: 4,
|
||||
min_total_token: 1024,
|
||||
}),
|
||||
|
||||
Self::Claude3_5Haiku => Some(BedrockModelCacheConfiguration {
|
||||
max_cache_anchors: 4,
|
||||
min_total_token: 2048,
|
||||
}),
|
||||
|
||||
Self::Custom {
|
||||
cache_configuration,
|
||||
..
|
||||
} => cache_configuration.clone(),
|
||||
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn mode(&self) -> BedrockModelMode {
|
||||
match self {
|
||||
Model::Claude3_7SonnetThinking => BedrockModelMode::Thinking {
|
||||
@@ -718,7 +660,6 @@ mod tests {
|
||||
display_name: Some("My Custom Model".to_string()),
|
||||
max_output_tokens: Some(8192),
|
||||
default_temperature: Some(0.7),
|
||||
cache_configuration: None,
|
||||
};
|
||||
|
||||
// Custom model should return its name unchanged
|
||||
|
||||
@@ -2008,7 +2008,6 @@ async fn join_project(
|
||||
session.connection_id,
|
||||
proto::UpdateLanguageServer {
|
||||
project_id: project_id.to_proto(),
|
||||
server_name: Some(language_server.name.clone()),
|
||||
language_server_id: language_server.id,
|
||||
variant: Some(
|
||||
proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
|
||||
|
||||
@@ -16,9 +16,9 @@ use crate::stripe_client::{
|
||||
StripeCreateCheckoutSessionLineItems, StripeCreateCheckoutSessionParams,
|
||||
StripeCreateCheckoutSessionSubscriptionData, StripeCreateMeterEventParams,
|
||||
StripeCreateMeterEventPayload, StripeCreateSubscriptionItems, StripeCreateSubscriptionParams,
|
||||
StripeCustomerId, StripeCustomerUpdate, StripeCustomerUpdateAddress, StripeCustomerUpdateName,
|
||||
StripeMeter, StripePrice, StripePriceId, StripeSubscription, StripeSubscriptionId,
|
||||
StripeSubscriptionTrialSettings, StripeSubscriptionTrialSettingsEndBehavior,
|
||||
StripeCustomerId, StripeMeter, StripePrice, StripePriceId, StripeSubscription,
|
||||
StripeSubscriptionId, StripeSubscriptionTrialSettings,
|
||||
StripeSubscriptionTrialSettingsEndBehavior,
|
||||
StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, UpdateSubscriptionItems,
|
||||
UpdateSubscriptionParams,
|
||||
};
|
||||
@@ -247,11 +247,6 @@ impl StripeBilling {
|
||||
}]);
|
||||
params.success_url = Some(success_url);
|
||||
params.billing_address_collection = Some(StripeBillingAddressCollection::Required);
|
||||
params.customer_update = Some(StripeCustomerUpdate {
|
||||
address: Some(StripeCustomerUpdateAddress::Auto),
|
||||
name: Some(StripeCustomerUpdateName::Auto),
|
||||
shipping: None,
|
||||
});
|
||||
|
||||
let session = self.client.create_checkout_session(params).await?;
|
||||
Ok(session.url.context("no checkout session URL")?)
|
||||
@@ -306,11 +301,6 @@ impl StripeBilling {
|
||||
}]);
|
||||
params.success_url = Some(success_url);
|
||||
params.billing_address_collection = Some(StripeBillingAddressCollection::Required);
|
||||
params.customer_update = Some(StripeCustomerUpdate {
|
||||
address: Some(StripeCustomerUpdateAddress::Auto),
|
||||
name: Some(StripeCustomerUpdateName::Auto),
|
||||
shipping: None,
|
||||
});
|
||||
|
||||
let session = self.client.create_checkout_session(params).await?;
|
||||
Ok(session.url.context("no checkout session URL")?)
|
||||
|
||||
@@ -154,31 +154,6 @@ pub enum StripeBillingAddressCollection {
|
||||
Required,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone)]
|
||||
pub struct StripeCustomerUpdate {
|
||||
pub address: Option<StripeCustomerUpdateAddress>,
|
||||
pub name: Option<StripeCustomerUpdateName>,
|
||||
pub shipping: Option<StripeCustomerUpdateShipping>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub enum StripeCustomerUpdateAddress {
|
||||
Auto,
|
||||
Never,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub enum StripeCustomerUpdateName {
|
||||
Auto,
|
||||
Never,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
pub enum StripeCustomerUpdateShipping {
|
||||
Auto,
|
||||
Never,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct StripeCreateCheckoutSessionParams<'a> {
|
||||
pub customer: Option<&'a StripeCustomerId>,
|
||||
@@ -189,7 +164,6 @@ pub struct StripeCreateCheckoutSessionParams<'a> {
|
||||
pub subscription_data: Option<StripeCreateCheckoutSessionSubscriptionData>,
|
||||
pub success_url: Option<&'a str>,
|
||||
pub billing_address_collection: Option<StripeBillingAddressCollection>,
|
||||
pub customer_update: Option<StripeCustomerUpdate>,
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
|
||||
|
||||
@@ -12,10 +12,9 @@ use crate::stripe_client::{
|
||||
StripeCheckoutSessionMode, StripeCheckoutSessionPaymentMethodCollection, StripeClient,
|
||||
StripeCreateCheckoutSessionLineItems, StripeCreateCheckoutSessionParams,
|
||||
StripeCreateCheckoutSessionSubscriptionData, StripeCreateMeterEventParams,
|
||||
StripeCreateSubscriptionParams, StripeCustomer, StripeCustomerId, StripeCustomerUpdate,
|
||||
StripeMeter, StripeMeterId, StripePrice, StripePriceId, StripeSubscription,
|
||||
StripeSubscriptionId, StripeSubscriptionItem, StripeSubscriptionItemId, UpdateCustomerParams,
|
||||
UpdateSubscriptionParams,
|
||||
StripeCreateSubscriptionParams, StripeCustomer, StripeCustomerId, StripeMeter, StripeMeterId,
|
||||
StripePrice, StripePriceId, StripeSubscription, StripeSubscriptionId, StripeSubscriptionItem,
|
||||
StripeSubscriptionItemId, UpdateCustomerParams, UpdateSubscriptionParams,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -37,7 +36,6 @@ pub struct StripeCreateCheckoutSessionCall {
|
||||
pub subscription_data: Option<StripeCreateCheckoutSessionSubscriptionData>,
|
||||
pub success_url: Option<String>,
|
||||
pub billing_address_collection: Option<StripeBillingAddressCollection>,
|
||||
pub customer_update: Option<StripeCustomerUpdate>,
|
||||
}
|
||||
|
||||
pub struct FakeStripeClient {
|
||||
@@ -235,7 +233,6 @@ impl StripeClient for FakeStripeClient {
|
||||
subscription_data: params.subscription_data,
|
||||
success_url: params.success_url.map(|url| url.to_string()),
|
||||
billing_address_collection: params.billing_address_collection,
|
||||
customer_update: params.customer_update,
|
||||
});
|
||||
|
||||
Ok(StripeCheckoutSession {
|
||||
|
||||
@@ -22,11 +22,10 @@ use crate::stripe_client::{
|
||||
StripeCheckoutSessionPaymentMethodCollection, StripeClient,
|
||||
StripeCreateCheckoutSessionLineItems, StripeCreateCheckoutSessionParams,
|
||||
StripeCreateCheckoutSessionSubscriptionData, StripeCreateMeterEventParams,
|
||||
StripeCreateSubscriptionParams, StripeCustomer, StripeCustomerId, StripeCustomerUpdate,
|
||||
StripeCustomerUpdateAddress, StripeCustomerUpdateName, StripeCustomerUpdateShipping,
|
||||
StripeMeter, StripePrice, StripePriceId, StripePriceRecurring, StripeSubscription,
|
||||
StripeSubscriptionId, StripeSubscriptionItem, StripeSubscriptionItemId,
|
||||
StripeSubscriptionTrialSettings, StripeSubscriptionTrialSettingsEndBehavior,
|
||||
StripeCreateSubscriptionParams, StripeCustomer, StripeCustomerId, StripeMeter, StripePrice,
|
||||
StripePriceId, StripePriceRecurring, StripeSubscription, StripeSubscriptionId,
|
||||
StripeSubscriptionItem, StripeSubscriptionItemId, StripeSubscriptionTrialSettings,
|
||||
StripeSubscriptionTrialSettingsEndBehavior,
|
||||
StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, UpdateCustomerParams,
|
||||
UpdateSubscriptionParams,
|
||||
};
|
||||
@@ -447,7 +446,6 @@ impl<'a> TryFrom<StripeCreateCheckoutSessionParams<'a>> for CreateCheckoutSessio
|
||||
subscription_data: value.subscription_data.map(Into::into),
|
||||
success_url: value.success_url,
|
||||
billing_address_collection: value.billing_address_collection.map(Into::into),
|
||||
customer_update: value.customer_update.map(Into::into),
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
@@ -543,50 +541,3 @@ impl From<StripeBillingAddressCollection> for stripe::CheckoutSessionBillingAddr
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<StripeCustomerUpdateAddress> for stripe::CreateCheckoutSessionCustomerUpdateAddress {
|
||||
fn from(value: StripeCustomerUpdateAddress) -> Self {
|
||||
match value {
|
||||
StripeCustomerUpdateAddress::Auto => {
|
||||
stripe::CreateCheckoutSessionCustomerUpdateAddress::Auto
|
||||
}
|
||||
StripeCustomerUpdateAddress::Never => {
|
||||
stripe::CreateCheckoutSessionCustomerUpdateAddress::Never
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<StripeCustomerUpdateName> for stripe::CreateCheckoutSessionCustomerUpdateName {
|
||||
fn from(value: StripeCustomerUpdateName) -> Self {
|
||||
match value {
|
||||
StripeCustomerUpdateName::Auto => stripe::CreateCheckoutSessionCustomerUpdateName::Auto,
|
||||
StripeCustomerUpdateName::Never => {
|
||||
stripe::CreateCheckoutSessionCustomerUpdateName::Never
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<StripeCustomerUpdateShipping> for stripe::CreateCheckoutSessionCustomerUpdateShipping {
|
||||
fn from(value: StripeCustomerUpdateShipping) -> Self {
|
||||
match value {
|
||||
StripeCustomerUpdateShipping::Auto => {
|
||||
stripe::CreateCheckoutSessionCustomerUpdateShipping::Auto
|
||||
}
|
||||
StripeCustomerUpdateShipping::Never => {
|
||||
stripe::CreateCheckoutSessionCustomerUpdateShipping::Never
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<StripeCustomerUpdate> for stripe::CreateCheckoutSessionCustomerUpdate {
|
||||
fn from(value: StripeCustomerUpdate) -> Self {
|
||||
stripe::CreateCheckoutSessionCustomerUpdate {
|
||||
address: value.address.map(Into::into),
|
||||
name: value.name.map(Into::into),
|
||||
shipping: value.shipping.map(Into::into),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,9 +8,8 @@ use crate::stripe_billing::StripeBilling;
|
||||
use crate::stripe_client::{
|
||||
FakeStripeClient, StripeBillingAddressCollection, StripeCheckoutSessionMode,
|
||||
StripeCheckoutSessionPaymentMethodCollection, StripeCreateCheckoutSessionLineItems,
|
||||
StripeCreateCheckoutSessionSubscriptionData, StripeCustomerId, StripeCustomerUpdate,
|
||||
StripeCustomerUpdateAddress, StripeCustomerUpdateName, StripeMeter, StripeMeterId, StripePrice,
|
||||
StripePriceId, StripePriceRecurring, StripeSubscription, StripeSubscriptionId,
|
||||
StripeCreateCheckoutSessionSubscriptionData, StripeCustomerId, StripeMeter, StripeMeterId,
|
||||
StripePrice, StripePriceId, StripePriceRecurring, StripeSubscription, StripeSubscriptionId,
|
||||
StripeSubscriptionItem, StripeSubscriptionItemId, StripeSubscriptionTrialSettings,
|
||||
StripeSubscriptionTrialSettingsEndBehavior,
|
||||
StripeSubscriptionTrialSettingsEndBehaviorMissingPaymentMethod, UpdateSubscriptionItems,
|
||||
@@ -432,14 +431,6 @@ async fn test_checkout_with_zed_pro() {
|
||||
call.billing_address_collection,
|
||||
Some(StripeBillingAddressCollection::Required)
|
||||
);
|
||||
assert_eq!(
|
||||
call.customer_update,
|
||||
Some(StripeCustomerUpdate {
|
||||
address: Some(StripeCustomerUpdateAddress::Auto),
|
||||
name: Some(StripeCustomerUpdateName::Auto),
|
||||
shipping: None,
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -525,14 +516,6 @@ async fn test_checkout_with_zed_pro_trial() {
|
||||
call.billing_address_collection,
|
||||
Some(StripeBillingAddressCollection::Required)
|
||||
);
|
||||
assert_eq!(
|
||||
call.customer_update,
|
||||
Some(StripeCustomerUpdate {
|
||||
address: Some(StripeCustomerUpdateAddress::Auto),
|
||||
name: Some(StripeCustomerUpdateName::Auto),
|
||||
shipping: None,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
// Successful checkout with extended trial.
|
||||
@@ -591,13 +574,5 @@ async fn test_checkout_with_zed_pro_trial() {
|
||||
call.billing_address_collection,
|
||||
Some(StripeBillingAddressCollection::Required)
|
||||
);
|
||||
assert_eq!(
|
||||
call.customer_update,
|
||||
Some(StripeCustomerUpdate {
|
||||
address: Some(StripeCustomerUpdateAddress::Auto),
|
||||
name: Some(StripeCustomerUpdateName::Auto),
|
||||
shipping: None,
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,7 +41,7 @@ pub struct CommandPalette {
|
||||
/// Removes subsequent whitespace characters and double colons from the query.
|
||||
///
|
||||
/// This improves the likelihood of a match by either humanized name or keymap-style name.
|
||||
fn normalize_query(input: &str) -> String {
|
||||
pub fn normalize_action_query(input: &str) -> String {
|
||||
let mut result = String::with_capacity(input.len());
|
||||
let mut last_char = None;
|
||||
|
||||
@@ -297,7 +297,7 @@ impl PickerDelegate for CommandPaletteDelegate {
|
||||
let mut commands = self.all_commands.clone();
|
||||
let hit_counts = self.hit_counts();
|
||||
let executor = cx.background_executor().clone();
|
||||
let query = normalize_query(query.as_str());
|
||||
let query = normalize_action_query(query.as_str());
|
||||
async move {
|
||||
commands.sort_by_key(|action| {
|
||||
(
|
||||
@@ -311,29 +311,17 @@ impl PickerDelegate for CommandPaletteDelegate {
|
||||
.enumerate()
|
||||
.map(|(ix, command)| StringMatchCandidate::new(ix, &command.name))
|
||||
.collect::<Vec<_>>();
|
||||
let matches = if query.is_empty() {
|
||||
candidates
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(index, candidate)| StringMatch {
|
||||
candidate_id: index,
|
||||
string: candidate.string,
|
||||
positions: Vec::new(),
|
||||
score: 0.0,
|
||||
})
|
||||
.collect()
|
||||
} else {
|
||||
fuzzy::match_strings(
|
||||
&candidates,
|
||||
&query,
|
||||
true,
|
||||
true,
|
||||
10000,
|
||||
&Default::default(),
|
||||
executor,
|
||||
)
|
||||
.await
|
||||
};
|
||||
|
||||
let matches = fuzzy::match_strings(
|
||||
&candidates,
|
||||
&query,
|
||||
true,
|
||||
true,
|
||||
10000,
|
||||
&Default::default(),
|
||||
executor,
|
||||
)
|
||||
.await;
|
||||
|
||||
tx.send((commands, matches)).await.log_err();
|
||||
}
|
||||
@@ -422,8 +410,8 @@ impl PickerDelegate for CommandPaletteDelegate {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<Self::ListItem> {
|
||||
let r#match = self.matches.get(ix)?;
|
||||
let command = self.commands.get(r#match.candidate_id)?;
|
||||
let matching_command = self.matches.get(ix)?;
|
||||
let command = self.commands.get(matching_command.candidate_id)?;
|
||||
Some(
|
||||
ListItem::new(ix)
|
||||
.inset(true)
|
||||
@@ -436,7 +424,7 @@ impl PickerDelegate for CommandPaletteDelegate {
|
||||
.justify_between()
|
||||
.child(HighlightedLabel::new(
|
||||
command.name.clone(),
|
||||
r#match.positions.clone(),
|
||||
matching_command.positions.clone(),
|
||||
))
|
||||
.children(KeyBinding::for_action_in(
|
||||
&*command.action,
|
||||
@@ -512,19 +500,28 @@ mod tests {
|
||||
|
||||
#[test]
|
||||
fn test_normalize_query() {
|
||||
assert_eq!(normalize_query("editor: backspace"), "editor: backspace");
|
||||
assert_eq!(normalize_query("editor: backspace"), "editor: backspace");
|
||||
assert_eq!(normalize_query("editor: backspace"), "editor: backspace");
|
||||
assert_eq!(
|
||||
normalize_query("editor::GoToDefinition"),
|
||||
normalize_action_query("editor: backspace"),
|
||||
"editor: backspace"
|
||||
);
|
||||
assert_eq!(
|
||||
normalize_action_query("editor: backspace"),
|
||||
"editor: backspace"
|
||||
);
|
||||
assert_eq!(
|
||||
normalize_action_query("editor: backspace"),
|
||||
"editor: backspace"
|
||||
);
|
||||
assert_eq!(
|
||||
normalize_action_query("editor::GoToDefinition"),
|
||||
"editor:GoToDefinition"
|
||||
);
|
||||
assert_eq!(
|
||||
normalize_query("editor::::GoToDefinition"),
|
||||
normalize_action_query("editor::::GoToDefinition"),
|
||||
"editor:GoToDefinition"
|
||||
);
|
||||
assert_eq!(
|
||||
normalize_query("editor: :GoToDefinition"),
|
||||
normalize_action_query("editor: :GoToDefinition"),
|
||||
"editor: :GoToDefinition"
|
||||
);
|
||||
}
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum VariableLookupKind {
|
||||
Variable,
|
||||
@@ -18,3 +20,641 @@ pub struct InlineValueLocation {
|
||||
pub row: usize,
|
||||
pub column: usize,
|
||||
}
|
||||
|
||||
/// A trait for providing inline values for debugging purposes.
|
||||
///
|
||||
/// Implementors of this trait are responsible for analyzing a given node in the
|
||||
/// source code and extracting variable information, including their names,
|
||||
/// scopes, and positions. This information is used to display inline values
|
||||
/// during debugging sessions. Implementors must also handle variable scoping
|
||||
/// themselves by traversing the syntax tree upwards to determine whether a
|
||||
/// variable is local or global.
|
||||
pub trait InlineValueProvider: 'static + Send + Sync {
|
||||
/// Provides a list of inline value locations based on the given node and source code.
|
||||
///
|
||||
/// # Parameters
|
||||
/// - `node`: The root node of the active debug line. Implementors should traverse
|
||||
/// upwards from this node to gather variable information and determine their scope.
|
||||
/// - `source`: The source code as a string slice, used to extract variable names.
|
||||
/// - `max_row`: The maximum row to consider when collecting variables. Variables
|
||||
/// declared beyond this row should be ignored.
|
||||
///
|
||||
/// # Returns
|
||||
/// A vector of `InlineValueLocation` instances, each representing a variable's
|
||||
/// name, scope, and the position of the inline value should be shown.
|
||||
fn provide(
|
||||
&self,
|
||||
node: language::Node,
|
||||
source: &str,
|
||||
max_row: usize,
|
||||
) -> Vec<InlineValueLocation>;
|
||||
}
|
||||
|
||||
pub struct RustInlineValueProvider;
|
||||
|
||||
impl InlineValueProvider for RustInlineValueProvider {
|
||||
fn provide(
|
||||
&self,
|
||||
mut node: language::Node,
|
||||
source: &str,
|
||||
max_row: usize,
|
||||
) -> Vec<InlineValueLocation> {
|
||||
let mut variables = Vec::new();
|
||||
let mut variable_names = HashSet::new();
|
||||
let mut scope = VariableScope::Local;
|
||||
|
||||
loop {
|
||||
let mut variable_names_in_scope = HashMap::new();
|
||||
for child in node.named_children(&mut node.walk()) {
|
||||
if child.start_position().row >= max_row {
|
||||
break;
|
||||
}
|
||||
|
||||
if scope == VariableScope::Local && child.kind() == "let_declaration" {
|
||||
if let Some(identifier) = child.child_by_field_name("pattern") {
|
||||
let variable_name = source[identifier.byte_range()].to_string();
|
||||
|
||||
if variable_names.contains(&variable_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(index) = variable_names_in_scope.get(&variable_name) {
|
||||
variables.remove(*index);
|
||||
}
|
||||
|
||||
variable_names_in_scope.insert(variable_name.clone(), variables.len());
|
||||
variables.push(InlineValueLocation {
|
||||
variable_name,
|
||||
scope: VariableScope::Local,
|
||||
lookup: VariableLookupKind::Variable,
|
||||
row: identifier.end_position().row,
|
||||
column: identifier.end_position().column,
|
||||
});
|
||||
}
|
||||
} else if child.kind() == "static_item" {
|
||||
if let Some(name) = child.child_by_field_name("name") {
|
||||
let variable_name = source[name.byte_range()].to_string();
|
||||
variables.push(InlineValueLocation {
|
||||
variable_name,
|
||||
scope: scope.clone(),
|
||||
lookup: VariableLookupKind::Expression,
|
||||
row: name.end_position().row,
|
||||
column: name.end_position().column,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variable_names.extend(variable_names_in_scope.keys().cloned());
|
||||
|
||||
if matches!(node.kind(), "function_item" | "closure_expression") {
|
||||
scope = VariableScope::Global;
|
||||
}
|
||||
|
||||
if let Some(parent) = node.parent() {
|
||||
node = parent;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
variables
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PythonInlineValueProvider;
|
||||
|
||||
impl InlineValueProvider for PythonInlineValueProvider {
|
||||
fn provide(
|
||||
&self,
|
||||
mut node: language::Node,
|
||||
source: &str,
|
||||
max_row: usize,
|
||||
) -> Vec<InlineValueLocation> {
|
||||
let mut variables = Vec::new();
|
||||
let mut variable_names = HashSet::new();
|
||||
let mut scope = VariableScope::Local;
|
||||
|
||||
loop {
|
||||
let mut variable_names_in_scope = HashMap::new();
|
||||
for child in node.named_children(&mut node.walk()) {
|
||||
if child.start_position().row >= max_row {
|
||||
break;
|
||||
}
|
||||
|
||||
if scope == VariableScope::Local {
|
||||
match child.kind() {
|
||||
"expression_statement" => {
|
||||
if let Some(expr) = child.child(0) {
|
||||
if expr.kind() == "assignment" {
|
||||
if let Some(param) = expr.child(0) {
|
||||
let param_identifier = if param.kind() == "identifier" {
|
||||
Some(param)
|
||||
} else if param.kind() == "typed_parameter" {
|
||||
param.child(0)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Some(identifier) = param_identifier {
|
||||
if identifier.kind() == "identifier" {
|
||||
let variable_name =
|
||||
source[identifier.byte_range()].to_string();
|
||||
|
||||
if variable_names.contains(&variable_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(index) =
|
||||
variable_names_in_scope.get(&variable_name)
|
||||
{
|
||||
variables.remove(*index);
|
||||
}
|
||||
|
||||
variable_names_in_scope
|
||||
.insert(variable_name.clone(), variables.len());
|
||||
variables.push(InlineValueLocation {
|
||||
variable_name,
|
||||
scope: VariableScope::Local,
|
||||
lookup: VariableLookupKind::Variable,
|
||||
row: identifier.end_position().row,
|
||||
column: identifier.end_position().column,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"function_definition" => {
|
||||
if let Some(params) = child.child_by_field_name("parameters") {
|
||||
for param in params.named_children(&mut params.walk()) {
|
||||
let param_identifier = if param.kind() == "identifier" {
|
||||
Some(param)
|
||||
} else if param.kind() == "typed_parameter" {
|
||||
param.child(0)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
if let Some(identifier) = param_identifier {
|
||||
if identifier.kind() == "identifier" {
|
||||
let variable_name =
|
||||
source[identifier.byte_range()].to_string();
|
||||
|
||||
if variable_names.contains(&variable_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(index) =
|
||||
variable_names_in_scope.get(&variable_name)
|
||||
{
|
||||
variables.remove(*index);
|
||||
}
|
||||
|
||||
variable_names_in_scope
|
||||
.insert(variable_name.clone(), variables.len());
|
||||
variables.push(InlineValueLocation {
|
||||
variable_name,
|
||||
scope: VariableScope::Local,
|
||||
lookup: VariableLookupKind::Variable,
|
||||
row: identifier.end_position().row,
|
||||
column: identifier.end_position().column,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"for_statement" => {
|
||||
if let Some(target) = child.child_by_field_name("left") {
|
||||
if target.kind() == "identifier" {
|
||||
let variable_name = source[target.byte_range()].to_string();
|
||||
|
||||
if variable_names.contains(&variable_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(index) = variable_names_in_scope.get(&variable_name)
|
||||
{
|
||||
variables.remove(*index);
|
||||
}
|
||||
|
||||
variable_names_in_scope
|
||||
.insert(variable_name.clone(), variables.len());
|
||||
variables.push(InlineValueLocation {
|
||||
variable_name,
|
||||
scope: VariableScope::Local,
|
||||
lookup: VariableLookupKind::Variable,
|
||||
row: target.end_position().row,
|
||||
column: target.end_position().column,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variable_names.extend(variable_names_in_scope.keys().cloned());
|
||||
|
||||
if matches!(node.kind(), "function_definition" | "module")
|
||||
&& node.range().end_point.row < max_row
|
||||
{
|
||||
scope = VariableScope::Global;
|
||||
}
|
||||
|
||||
if let Some(parent) = node.parent() {
|
||||
node = parent;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
variables
|
||||
}
|
||||
}
|
||||
|
||||
pub struct GoInlineValueProvider;
|
||||
|
||||
impl InlineValueProvider for GoInlineValueProvider {
|
||||
fn provide(
|
||||
&self,
|
||||
mut node: language::Node,
|
||||
source: &str,
|
||||
max_row: usize,
|
||||
) -> Vec<InlineValueLocation> {
|
||||
let mut variables = Vec::new();
|
||||
let mut variable_names = HashSet::new();
|
||||
let mut scope = VariableScope::Local;
|
||||
|
||||
loop {
|
||||
let mut variable_names_in_scope = HashMap::new();
|
||||
for child in node.named_children(&mut node.walk()) {
|
||||
if child.start_position().row >= max_row {
|
||||
break;
|
||||
}
|
||||
|
||||
if scope == VariableScope::Local {
|
||||
match child.kind() {
|
||||
"var_declaration" => {
|
||||
for var_spec in child.named_children(&mut child.walk()) {
|
||||
if var_spec.kind() == "var_spec" {
|
||||
if let Some(name_node) = var_spec.child_by_field_name("name") {
|
||||
let variable_name =
|
||||
source[name_node.byte_range()].to_string();
|
||||
|
||||
if variable_names.contains(&variable_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(index) =
|
||||
variable_names_in_scope.get(&variable_name)
|
||||
{
|
||||
variables.remove(*index);
|
||||
}
|
||||
|
||||
variable_names_in_scope
|
||||
.insert(variable_name.clone(), variables.len());
|
||||
variables.push(InlineValueLocation {
|
||||
variable_name,
|
||||
scope: VariableScope::Local,
|
||||
lookup: VariableLookupKind::Variable,
|
||||
row: name_node.end_position().row,
|
||||
column: name_node.end_position().column,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"short_var_declaration" => {
|
||||
if let Some(left_side) = child.child_by_field_name("left") {
|
||||
for identifier in left_side.named_children(&mut left_side.walk()) {
|
||||
if identifier.kind() == "identifier" {
|
||||
let variable_name =
|
||||
source[identifier.byte_range()].to_string();
|
||||
|
||||
if variable_names.contains(&variable_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(index) =
|
||||
variable_names_in_scope.get(&variable_name)
|
||||
{
|
||||
variables.remove(*index);
|
||||
}
|
||||
|
||||
variable_names_in_scope
|
||||
.insert(variable_name.clone(), variables.len());
|
||||
variables.push(InlineValueLocation {
|
||||
variable_name,
|
||||
scope: VariableScope::Local,
|
||||
lookup: VariableLookupKind::Variable,
|
||||
row: identifier.end_position().row,
|
||||
column: identifier.end_position().column,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"assignment_statement" => {
|
||||
if let Some(left_side) = child.child_by_field_name("left") {
|
||||
for identifier in left_side.named_children(&mut left_side.walk()) {
|
||||
if identifier.kind() == "identifier" {
|
||||
let variable_name =
|
||||
source[identifier.byte_range()].to_string();
|
||||
|
||||
if variable_names.contains(&variable_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(index) =
|
||||
variable_names_in_scope.get(&variable_name)
|
||||
{
|
||||
variables.remove(*index);
|
||||
}
|
||||
|
||||
variable_names_in_scope
|
||||
.insert(variable_name.clone(), variables.len());
|
||||
variables.push(InlineValueLocation {
|
||||
variable_name,
|
||||
scope: VariableScope::Local,
|
||||
lookup: VariableLookupKind::Variable,
|
||||
row: identifier.end_position().row,
|
||||
column: identifier.end_position().column,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"function_declaration" | "method_declaration" => {
|
||||
if let Some(params) = child.child_by_field_name("parameters") {
|
||||
for param in params.named_children(&mut params.walk()) {
|
||||
if param.kind() == "parameter_declaration" {
|
||||
if let Some(name_node) = param.child_by_field_name("name") {
|
||||
let variable_name =
|
||||
source[name_node.byte_range()].to_string();
|
||||
|
||||
if variable_names.contains(&variable_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(index) =
|
||||
variable_names_in_scope.get(&variable_name)
|
||||
{
|
||||
variables.remove(*index);
|
||||
}
|
||||
|
||||
variable_names_in_scope
|
||||
.insert(variable_name.clone(), variables.len());
|
||||
variables.push(InlineValueLocation {
|
||||
variable_name,
|
||||
scope: VariableScope::Local,
|
||||
lookup: VariableLookupKind::Variable,
|
||||
row: name_node.end_position().row,
|
||||
column: name_node.end_position().column,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"for_statement" => {
|
||||
if let Some(clause) = child.named_child(0) {
|
||||
if clause.kind() == "for_clause" {
|
||||
if let Some(init) = clause.named_child(0) {
|
||||
if init.kind() == "short_var_declaration" {
|
||||
if let Some(left_side) =
|
||||
init.child_by_field_name("left")
|
||||
{
|
||||
if left_side.kind() == "expression_list" {
|
||||
for identifier in left_side
|
||||
.named_children(&mut left_side.walk())
|
||||
{
|
||||
if identifier.kind() == "identifier" {
|
||||
let variable_name = source
|
||||
[identifier.byte_range()]
|
||||
.to_string();
|
||||
|
||||
if variable_names
|
||||
.contains(&variable_name)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(index) =
|
||||
variable_names_in_scope
|
||||
.get(&variable_name)
|
||||
{
|
||||
variables.remove(*index);
|
||||
}
|
||||
|
||||
variable_names_in_scope.insert(
|
||||
variable_name.clone(),
|
||||
variables.len(),
|
||||
);
|
||||
variables.push(InlineValueLocation {
|
||||
variable_name,
|
||||
scope: VariableScope::Local,
|
||||
lookup:
|
||||
VariableLookupKind::Variable,
|
||||
row: identifier.end_position().row,
|
||||
column: identifier
|
||||
.end_position()
|
||||
.column,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if clause.kind() == "range_clause" {
|
||||
if let Some(left) = clause.child_by_field_name("left") {
|
||||
if left.kind() == "expression_list" {
|
||||
for identifier in left.named_children(&mut left.walk())
|
||||
{
|
||||
if identifier.kind() == "identifier" {
|
||||
let variable_name =
|
||||
source[identifier.byte_range()].to_string();
|
||||
|
||||
if variable_name == "_" {
|
||||
continue;
|
||||
}
|
||||
|
||||
if variable_names.contains(&variable_name) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(index) =
|
||||
variable_names_in_scope.get(&variable_name)
|
||||
{
|
||||
variables.remove(*index);
|
||||
}
|
||||
variable_names_in_scope.insert(
|
||||
variable_name.clone(),
|
||||
variables.len(),
|
||||
);
|
||||
variables.push(InlineValueLocation {
|
||||
variable_name,
|
||||
scope: VariableScope::Local,
|
||||
lookup: VariableLookupKind::Variable,
|
||||
row: identifier.end_position().row,
|
||||
column: identifier.end_position().column,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
} else if child.kind() == "var_declaration" {
|
||||
for var_spec in child.named_children(&mut child.walk()) {
|
||||
if var_spec.kind() == "var_spec" {
|
||||
if let Some(name_node) = var_spec.child_by_field_name("name") {
|
||||
let variable_name = source[name_node.byte_range()].to_string();
|
||||
variables.push(InlineValueLocation {
|
||||
variable_name,
|
||||
scope: VariableScope::Global,
|
||||
lookup: VariableLookupKind::Expression,
|
||||
row: name_node.end_position().row,
|
||||
column: name_node.end_position().column,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variable_names.extend(variable_names_in_scope.keys().cloned());
|
||||
|
||||
if matches!(node.kind(), "function_declaration" | "method_declaration") {
|
||||
scope = VariableScope::Global;
|
||||
}
|
||||
|
||||
if let Some(parent) = node.parent() {
|
||||
node = parent;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
variables
|
||||
}
|
||||
}
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use tree_sitter::Parser;
|
||||
|
||||
#[test]
|
||||
fn test_go_inline_value_provider() {
|
||||
let provider = GoInlineValueProvider;
|
||||
let source = r#"
|
||||
package main
|
||||
|
||||
func main() {
|
||||
items := []int{1, 2, 3, 4, 5}
|
||||
for i, v := range items {
|
||||
println(i, v)
|
||||
}
|
||||
for j := 0; j < 10; j++ {
|
||||
println(j)
|
||||
}
|
||||
}
|
||||
"#;
|
||||
|
||||
let mut parser = Parser::new();
|
||||
if parser
|
||||
.set_language(&tree_sitter_go::LANGUAGE.into())
|
||||
.is_err()
|
||||
{
|
||||
return;
|
||||
}
|
||||
let Some(tree) = parser.parse(source, None) else {
|
||||
return;
|
||||
};
|
||||
let root_node = tree.root_node();
|
||||
|
||||
let mut main_body = None;
|
||||
for child in root_node.named_children(&mut root_node.walk()) {
|
||||
if child.kind() == "function_declaration" {
|
||||
if let Some(name) = child.child_by_field_name("name") {
|
||||
if &source[name.byte_range()] == "main" {
|
||||
if let Some(body) = child.child_by_field_name("body") {
|
||||
main_body = Some(body);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let Some(main_body) = main_body else {
|
||||
return;
|
||||
};
|
||||
|
||||
let variables = provider.provide(main_body, source, 100);
|
||||
assert!(variables.len() >= 2);
|
||||
|
||||
let variable_names: Vec<&str> =
|
||||
variables.iter().map(|v| v.variable_name.as_str()).collect();
|
||||
assert!(variable_names.contains(&"items"));
|
||||
assert!(variable_names.contains(&"j"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_go_inline_value_provider_counter_pattern() {
|
||||
let provider = GoInlineValueProvider;
|
||||
let source = r#"
|
||||
package main
|
||||
|
||||
func main() {
|
||||
N := 10
|
||||
for i := range N {
|
||||
println(i)
|
||||
}
|
||||
}
|
||||
"#;
|
||||
|
||||
let mut parser = Parser::new();
|
||||
if parser
|
||||
.set_language(&tree_sitter_go::LANGUAGE.into())
|
||||
.is_err()
|
||||
{
|
||||
return;
|
||||
}
|
||||
let Some(tree) = parser.parse(source, None) else {
|
||||
return;
|
||||
};
|
||||
let root_node = tree.root_node();
|
||||
|
||||
let mut main_body = None;
|
||||
for child in root_node.named_children(&mut root_node.walk()) {
|
||||
if child.kind() == "function_declaration" {
|
||||
if let Some(name) = child.child_by_field_name("name") {
|
||||
if &source[name.byte_range()] == "main" {
|
||||
if let Some(body) = child.child_by_field_name("body") {
|
||||
main_body = Some(body);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let Some(main_body) = main_body else {
|
||||
return;
|
||||
};
|
||||
let variables = provider.provide(main_body, source, 100);
|
||||
|
||||
let variable_names: Vec<&str> =
|
||||
variables.iter().map(|v| v.variable_name.as_str()).collect();
|
||||
assert!(variable_names.contains(&"N"));
|
||||
assert!(variable_names.contains(&"i"));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,7 +8,10 @@ use task::{
|
||||
AdapterSchema, AdapterSchemas, DebugRequest, DebugScenario, SpawnInTerminal, TaskTemplate,
|
||||
};
|
||||
|
||||
use crate::adapters::{DebugAdapter, DebugAdapterName};
|
||||
use crate::{
|
||||
adapters::{DebugAdapter, DebugAdapterName},
|
||||
inline_value::InlineValueProvider,
|
||||
};
|
||||
use std::{collections::BTreeMap, sync::Arc};
|
||||
|
||||
/// Given a user build configuration, locator creates a fill-in debug target ([DebugScenario]) on behalf of the user.
|
||||
@@ -30,6 +33,7 @@ pub trait DapLocator: Send + Sync {
|
||||
struct DapRegistryState {
|
||||
adapters: BTreeMap<DebugAdapterName, Arc<dyn DebugAdapter>>,
|
||||
locators: FxHashMap<SharedString, Arc<dyn DapLocator>>,
|
||||
inline_value_providers: FxHashMap<String, Arc<dyn InlineValueProvider>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
@@ -78,6 +82,22 @@ impl DapRegistry {
|
||||
schemas
|
||||
}
|
||||
|
||||
pub fn add_inline_value_provider(
|
||||
&self,
|
||||
language: String,
|
||||
provider: Arc<dyn InlineValueProvider>,
|
||||
) {
|
||||
let _previous_value = self
|
||||
.0
|
||||
.write()
|
||||
.inline_value_providers
|
||||
.insert(language, provider);
|
||||
debug_assert!(
|
||||
_previous_value.is_none(),
|
||||
"Attempted to insert a new inline value provider when one is already registered"
|
||||
);
|
||||
}
|
||||
|
||||
pub fn locators(&self) -> FxHashMap<SharedString, Arc<dyn DapLocator>> {
|
||||
self.0.read().locators.clone()
|
||||
}
|
||||
@@ -86,6 +106,10 @@ impl DapRegistry {
|
||||
self.0.read().adapters.get(name).cloned()
|
||||
}
|
||||
|
||||
pub fn inline_value_provider(&self, language: &str) -> Option<Arc<dyn InlineValueProvider>> {
|
||||
self.0.read().inline_value_providers.get(language).cloned()
|
||||
}
|
||||
|
||||
pub fn enumerate_adapters(&self) -> Vec<DebugAdapterName> {
|
||||
self.0.read().adapters.keys().cloned().collect()
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ use dap::{
|
||||
GithubRepo,
|
||||
},
|
||||
configure_tcp_connection,
|
||||
inline_value::{GoInlineValueProvider, PythonInlineValueProvider, RustInlineValueProvider},
|
||||
};
|
||||
use gdb::GdbDebugAdapter;
|
||||
use go::GoDebugAdapter;
|
||||
@@ -43,5 +44,10 @@ pub fn init(cx: &mut App) {
|
||||
{
|
||||
registry.add_adapter(Arc::from(dap::FakeAdapter {}));
|
||||
}
|
||||
|
||||
registry.add_inline_value_provider("Rust".to_string(), Arc::from(RustInlineValueProvider));
|
||||
registry
|
||||
.add_inline_value_provider("Python".to_string(), Arc::from(PythonInlineValueProvider));
|
||||
registry.add_inline_value_provider("Go".to_string(), Arc::from(GoInlineValueProvider));
|
||||
})
|
||||
}
|
||||
|
||||
@@ -81,4 +81,3 @@ unindent.workspace = true
|
||||
util = { workspace = true, features = ["test-support"] }
|
||||
workspace = { workspace = true, features = ["test-support"] }
|
||||
zlog.workspace = true
|
||||
tree-sitter-go.workspace = true
|
||||
|
||||
@@ -695,6 +695,30 @@ impl DebugPanel {
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
IconButton::new("debug-step-out", IconName::ArrowUpRight)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.shape(ui::IconButtonShape::Square)
|
||||
.on_click(window.listener_for(
|
||||
&running_state,
|
||||
|this, _, _window, cx| {
|
||||
this.step_out(cx);
|
||||
},
|
||||
))
|
||||
.disabled(thread_status != ThreadStatus::Stopped)
|
||||
.tooltip({
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |window, cx| {
|
||||
Tooltip::for_action_in(
|
||||
"Step out",
|
||||
&StepOut,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
IconButton::new(
|
||||
"debug-step-into",
|
||||
@@ -722,30 +746,6 @@ impl DebugPanel {
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
IconButton::new("debug-step-out", IconName::ArrowUpRight)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.shape(ui::IconButtonShape::Square)
|
||||
.on_click(window.listener_for(
|
||||
&running_state,
|
||||
|this, _, _window, cx| {
|
||||
this.step_out(cx);
|
||||
},
|
||||
))
|
||||
.disabled(thread_status != ThreadStatus::Stopped)
|
||||
.tooltip({
|
||||
let focus_handle = focus_handle.clone();
|
||||
move |window, cx| {
|
||||
Tooltip::for_action_in(
|
||||
"Step out",
|
||||
&StepOut,
|
||||
&focus_handle,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
}),
|
||||
)
|
||||
.child(Divider::vertical())
|
||||
.child(
|
||||
IconButton::new("debug-restart", IconName::DebugRestart)
|
||||
|
||||
@@ -11,7 +11,7 @@ use project::worktree_store::WorktreeStore;
|
||||
use rpc::proto;
|
||||
use running::RunningState;
|
||||
use std::{cell::OnceCell, sync::OnceLock};
|
||||
use ui::{Indicator, Tooltip, prelude::*};
|
||||
use ui::{Indicator, prelude::*};
|
||||
use workspace::{
|
||||
CollaboratorId, FollowableItem, ViewId, Workspace,
|
||||
item::{self, Item},
|
||||
@@ -153,8 +153,6 @@ impl DebugSession {
|
||||
};
|
||||
|
||||
h_flex()
|
||||
.id("session-label")
|
||||
.tooltip(Tooltip::text(format!("Session {}", self.session_id(cx).0,)))
|
||||
.ml(depth * px(16.0))
|
||||
.gap_2()
|
||||
.when_some(icon, |this, indicator| this.child(indicator))
|
||||
|
||||
@@ -582,31 +582,14 @@ impl CompletionProvider for ConsoleQueryBarCompletionProvider {
|
||||
|
||||
fn is_completion_trigger(
|
||||
&self,
|
||||
buffer: &Entity<Buffer>,
|
||||
position: language::Anchor,
|
||||
text: &str,
|
||||
_buffer: &Entity<Buffer>,
|
||||
_position: language::Anchor,
|
||||
_text: &str,
|
||||
_trigger_in_words: bool,
|
||||
menu_is_open: bool,
|
||||
cx: &mut Context<Editor>,
|
||||
_menu_is_open: bool,
|
||||
_cx: &mut Context<Editor>,
|
||||
) -> bool {
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
if !menu_is_open && !snapshot.settings_at(position, cx).show_completions_on_input {
|
||||
return false;
|
||||
}
|
||||
|
||||
self.0
|
||||
.read_with(cx, |console, cx| {
|
||||
console
|
||||
.session
|
||||
.read(cx)
|
||||
.capabilities()
|
||||
.completion_trigger_characters
|
||||
.as_ref()
|
||||
.map(|triggers| triggers.contains(&text.to_string()))
|
||||
})
|
||||
.ok()
|
||||
.flatten()
|
||||
.unwrap_or(true)
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
@@ -646,23 +629,8 @@ impl ConsoleQueryBarCompletionProvider {
|
||||
(variables, string_matches)
|
||||
});
|
||||
|
||||
let snapshot = buffer.read(cx).text_snapshot();
|
||||
let query = snapshot.text();
|
||||
let replace_range = {
|
||||
let buffer_offset = buffer_position.to_offset(&snapshot);
|
||||
let reversed_chars = snapshot.reversed_chars_for_range(0..buffer_offset);
|
||||
let mut word_len = 0;
|
||||
for ch in reversed_chars {
|
||||
if ch.is_alphanumeric() || ch == '_' {
|
||||
word_len += 1;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let word_start_offset = buffer_offset - word_len;
|
||||
let start_anchor = snapshot.anchor_at(word_start_offset, Bias::Left);
|
||||
start_anchor..buffer_position
|
||||
};
|
||||
let query = buffer.read(cx).text();
|
||||
|
||||
cx.spawn(async move |_, cx| {
|
||||
const LIMIT: usize = 10;
|
||||
let matches = fuzzy::match_strings(
|
||||
@@ -682,7 +650,7 @@ impl ConsoleQueryBarCompletionProvider {
|
||||
let variable_value = variables.get(&string_match.string)?;
|
||||
|
||||
Some(project::Completion {
|
||||
replace_range: replace_range.clone(),
|
||||
replace_range: buffer_position..buffer_position,
|
||||
new_text: string_match.string.clone(),
|
||||
label: CodeLabel {
|
||||
filter_range: 0..string_match.string.len(),
|
||||
|
||||
@@ -246,10 +246,10 @@ fn main() {
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
pretty_assertions::assert_eq!(
|
||||
r#"
|
||||
static mut GLOBAL: usize = 1;
|
||||
static mut GLOBAL: 1: usize = 1;
|
||||
|
||||
fn main() {
|
||||
let x: 10 = 10;
|
||||
let x = 10;
|
||||
let value = 42;
|
||||
let y = 4;
|
||||
let tester = {
|
||||
@@ -303,11 +303,11 @@ fn main() {
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
pretty_assertions::assert_eq!(
|
||||
r#"
|
||||
static mut GLOBAL: usize = 1;
|
||||
static mut GLOBAL: 1: usize = 1;
|
||||
|
||||
fn main() {
|
||||
let x: 10 = 10;
|
||||
let value: 42 = 42;
|
||||
let value = 42;
|
||||
let y = 4;
|
||||
let tester = {
|
||||
let y = 10;
|
||||
@@ -360,12 +360,12 @@ fn main() {
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
pretty_assertions::assert_eq!(
|
||||
r#"
|
||||
static mut GLOBAL: usize = 1;
|
||||
static mut GLOBAL: 1: usize = 1;
|
||||
|
||||
fn main() {
|
||||
let x: 10 = 10;
|
||||
let value: 42 = 42;
|
||||
let y: 4 = 4;
|
||||
let y = 4;
|
||||
let tester = {
|
||||
let y = 10;
|
||||
let y = 5;
|
||||
@@ -417,7 +417,7 @@ fn main() {
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
pretty_assertions::assert_eq!(
|
||||
r#"
|
||||
static mut GLOBAL: usize = 1;
|
||||
static mut GLOBAL: 1: usize = 1;
|
||||
|
||||
fn main() {
|
||||
let x: 10 = 10;
|
||||
@@ -474,14 +474,14 @@ fn main() {
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
pretty_assertions::assert_eq!(
|
||||
r#"
|
||||
static mut GLOBAL: usize = 1;
|
||||
static mut GLOBAL: 1: usize = 1;
|
||||
|
||||
fn main() {
|
||||
let x: 10 = 10;
|
||||
let value: 42 = 42;
|
||||
let y: 4 = 4;
|
||||
let tester = {
|
||||
let y: 4 = 10;
|
||||
let y = 10;
|
||||
let y = 5;
|
||||
let b = 3;
|
||||
vec![y, 20, 30]
|
||||
@@ -581,15 +581,15 @@ fn main() {
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
pretty_assertions::assert_eq!(
|
||||
r#"
|
||||
static mut GLOBAL: usize = 1;
|
||||
static mut GLOBAL: 1: usize = 1;
|
||||
|
||||
fn main() {
|
||||
let x: 10 = 10;
|
||||
let value: 42 = 42;
|
||||
let y: 10 = 4;
|
||||
let y = 4;
|
||||
let tester = {
|
||||
let y: 10 = 10;
|
||||
let y: 10 = 5;
|
||||
let y = 5;
|
||||
let b = 3;
|
||||
vec![y, 20, 30]
|
||||
};
|
||||
@@ -688,14 +688,14 @@ fn main() {
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
pretty_assertions::assert_eq!(
|
||||
r#"
|
||||
static mut GLOBAL: usize = 1;
|
||||
static mut GLOBAL: 1: usize = 1;
|
||||
|
||||
fn main() {
|
||||
let x: 10 = 10;
|
||||
let value: 42 = 42;
|
||||
let y: 5 = 4;
|
||||
let y = 4;
|
||||
let tester = {
|
||||
let y: 5 = 10;
|
||||
let y = 10;
|
||||
let y: 5 = 5;
|
||||
let b = 3;
|
||||
vec![y, 20, 30]
|
||||
@@ -807,17 +807,17 @@ fn main() {
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
pretty_assertions::assert_eq!(
|
||||
r#"
|
||||
static mut GLOBAL: usize = 1;
|
||||
static mut GLOBAL: 1: usize = 1;
|
||||
|
||||
fn main() {
|
||||
let x: 10 = 10;
|
||||
let value: 42 = 42;
|
||||
let y: 5 = 4;
|
||||
let y = 4;
|
||||
let tester = {
|
||||
let y: 5 = 10;
|
||||
let y = 10;
|
||||
let y: 5 = 5;
|
||||
let b: 3 = 3;
|
||||
vec![y: 5, 20, 30]
|
||||
vec![y, 20, 30]
|
||||
};
|
||||
|
||||
let caller = || {
|
||||
@@ -926,7 +926,7 @@ fn main() {
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
pretty_assertions::assert_eq!(
|
||||
r#"
|
||||
static mut GLOBAL: usize = 1;
|
||||
static mut GLOBAL: 1: usize = 1;
|
||||
|
||||
fn main() {
|
||||
let x: 10 = 10;
|
||||
@@ -1058,7 +1058,7 @@ fn main() {
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
pretty_assertions::assert_eq!(
|
||||
r#"
|
||||
static mut GLOBAL: usize = 1;
|
||||
static mut GLOBAL: 1: usize = 1;
|
||||
|
||||
fn main() {
|
||||
let x: 10 = 10;
|
||||
@@ -1115,21 +1115,21 @@ fn main() {
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
pretty_assertions::assert_eq!(
|
||||
r#"
|
||||
static mut GLOBAL: usize = 1;
|
||||
static mut GLOBAL: 1: usize = 1;
|
||||
|
||||
fn main() {
|
||||
let x: 10 = 10;
|
||||
let value: 42 = 42;
|
||||
let y: 4 = 4;
|
||||
let tester: size=3 = {
|
||||
let x = 10;
|
||||
let value = 42;
|
||||
let y = 4;
|
||||
let tester = {
|
||||
let y = 10;
|
||||
let y = 5;
|
||||
let b = 3;
|
||||
vec![y, 20, 30]
|
||||
};
|
||||
|
||||
let caller: <not available> = || {
|
||||
let x: 10 = 3;
|
||||
let caller = || {
|
||||
let x = 3;
|
||||
println!("x={}", x);
|
||||
};
|
||||
|
||||
@@ -1193,10 +1193,10 @@ fn main() {
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
pretty_assertions::assert_eq!(
|
||||
r#"
|
||||
static mut GLOBAL: usize = 1;
|
||||
static mut GLOBAL: 1: usize = 1;
|
||||
|
||||
fn main() {
|
||||
let x: 3 = 10;
|
||||
let x = 10;
|
||||
let value = 42;
|
||||
let y = 4;
|
||||
let tester = {
|
||||
@@ -1208,7 +1208,7 @@ fn main() {
|
||||
|
||||
let caller = || {
|
||||
let x: 3 = 3;
|
||||
println!("x={}", x: 3);
|
||||
println!("x={}", x);
|
||||
};
|
||||
|
||||
caller();
|
||||
@@ -1338,7 +1338,7 @@ fn main() {
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
pretty_assertions::assert_eq!(
|
||||
r#"
|
||||
static mut GLOBAL: usize = 1;
|
||||
static mut GLOBAL: 2: usize = 1;
|
||||
|
||||
fn main() {
|
||||
let x: 10 = 10;
|
||||
@@ -1362,7 +1362,7 @@ fn main() {
|
||||
GLOBAL = 2;
|
||||
}
|
||||
|
||||
let result = value: 42 * 2 * x: 10;
|
||||
let result = value * 2 * x;
|
||||
println!("Simple test executed: value={}, result={}", value, result);
|
||||
assert!(true);
|
||||
}
|
||||
@@ -1483,7 +1483,7 @@ fn main() {
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
pretty_assertions::assert_eq!(
|
||||
r#"
|
||||
static mut GLOBAL: usize = 1;
|
||||
static mut GLOBAL: 2: usize = 1;
|
||||
|
||||
fn main() {
|
||||
let x: 10 = 10;
|
||||
@@ -1507,8 +1507,8 @@ fn main() {
|
||||
GLOBAL = 2;
|
||||
}
|
||||
|
||||
let result: 840 = value: 42 * 2 * x: 10;
|
||||
println!("Simple test executed: value={}, result={}", value: 42, result: 840);
|
||||
let result: 840 = value * 2 * x;
|
||||
println!("Simple test executed: value={}, result={}", value, result);
|
||||
assert!(true);
|
||||
}
|
||||
"#
|
||||
@@ -1519,7 +1519,6 @@ fn main() {
|
||||
}
|
||||
|
||||
fn rust_lang() -> Language {
|
||||
let debug_variables_query = include_str!("../../../languages/src/rust/debugger.scm");
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "Rust".into(),
|
||||
@@ -1531,8 +1530,6 @@ fn rust_lang() -> Language {
|
||||
},
|
||||
Some(tree_sitter_rust::LANGUAGE.into()),
|
||||
)
|
||||
.with_debug_variables_query(debug_variables_query)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -1821,8 +1818,8 @@ def process_data(untyped_param, typed_param: int, another_typed: str):
|
||||
def process_data(untyped_param: test_value, typed_param: 42: int, another_typed: world: str):
|
||||
# Local variables
|
||||
x: 10 = 10
|
||||
result: 84 = typed_param: 42 * 2
|
||||
text: Hello, world = "Hello, " + another_typed: world
|
||||
result: 84 = typed_param * 2
|
||||
text: Hello, world = "Hello, " + another_typed
|
||||
|
||||
# For loop with range
|
||||
sum_value: 10 = 0
|
||||
@@ -1840,7 +1837,6 @@ def process_data(untyped_param, typed_param: int, another_typed: str):
|
||||
}
|
||||
|
||||
fn python_lang() -> Language {
|
||||
let debug_variables_query = include_str!("../../../languages/src/python/debugger.scm");
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "Python".into(),
|
||||
@@ -1852,392 +1848,4 @@ fn python_lang() -> Language {
|
||||
},
|
||||
Some(tree_sitter_python::LANGUAGE.into()),
|
||||
)
|
||||
.with_debug_variables_query(debug_variables_query)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn go_lang() -> Language {
|
||||
let debug_variables_query = include_str!("../../../languages/src/go/debugger.scm");
|
||||
Language::new(
|
||||
LanguageConfig {
|
||||
name: "Go".into(),
|
||||
matcher: LanguageMatcher {
|
||||
path_suffixes: vec!["go".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_go::LANGUAGE.into()),
|
||||
)
|
||||
.with_debug_variables_query(debug_variables_query)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
/// Test utility function for inline values testing
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `variables` - List of tuples containing (variable_name, variable_value)
|
||||
/// * `before` - Source code before inline values are applied
|
||||
/// * `after` - Expected source code after inline values are applied
|
||||
/// * `language` - Language configuration to use for parsing
|
||||
/// * `executor` - Background executor for async operations
|
||||
/// * `cx` - Test app context
|
||||
async fn test_inline_values_util(
|
||||
local_variables: &[(&str, &str)],
|
||||
global_variables: &[(&str, &str)],
|
||||
before: &str,
|
||||
after: &str,
|
||||
active_debug_line: Option<usize>,
|
||||
language: Language,
|
||||
executor: BackgroundExecutor,
|
||||
cx: &mut TestAppContext,
|
||||
) {
|
||||
init_test(cx);
|
||||
|
||||
let lines_count = before.lines().count();
|
||||
let stop_line =
|
||||
active_debug_line.unwrap_or_else(|| if lines_count > 6 { 6 } else { lines_count - 1 });
|
||||
|
||||
let fs = FakeFs::new(executor.clone());
|
||||
fs.insert_tree(path!("/project"), json!({ "main.rs": before.to_string() }))
|
||||
.await;
|
||||
|
||||
let project = Project::test(fs.clone(), [path!("/project").as_ref()], cx).await;
|
||||
let workspace = init_test_workspace(&project, cx).await;
|
||||
workspace
|
||||
.update(cx, |workspace, window, cx| {
|
||||
workspace.focus_panel::<DebugPanel>(window, cx);
|
||||
})
|
||||
.unwrap();
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
let session = start_debug_session(&workspace, cx, |_| {}).unwrap();
|
||||
let client = session.update(cx, |session, _| session.adapter_client().unwrap());
|
||||
|
||||
client.on_request::<dap::requests::Threads, _>(|_, _| {
|
||||
Ok(dap::ThreadsResponse {
|
||||
threads: vec![dap::Thread {
|
||||
id: 1,
|
||||
name: "main".into(),
|
||||
}],
|
||||
})
|
||||
});
|
||||
|
||||
client.on_request::<dap::requests::StackTrace, _>(move |_, _| {
|
||||
Ok(dap::StackTraceResponse {
|
||||
stack_frames: vec![dap::StackFrame {
|
||||
id: 1,
|
||||
name: "main".into(),
|
||||
source: Some(dap::Source {
|
||||
name: Some("main.rs".into()),
|
||||
path: Some(path!("/project/main.rs").into()),
|
||||
source_reference: None,
|
||||
presentation_hint: None,
|
||||
origin: None,
|
||||
sources: None,
|
||||
adapter_data: None,
|
||||
checksums: None,
|
||||
}),
|
||||
line: stop_line as u64,
|
||||
column: 1,
|
||||
end_line: None,
|
||||
end_column: None,
|
||||
can_restart: None,
|
||||
instruction_pointer_reference: None,
|
||||
module_id: None,
|
||||
presentation_hint: None,
|
||||
}],
|
||||
total_frames: None,
|
||||
})
|
||||
});
|
||||
|
||||
let local_vars: Vec<Variable> = local_variables
|
||||
.iter()
|
||||
.map(|(name, value)| Variable {
|
||||
name: (*name).into(),
|
||||
value: (*value).into(),
|
||||
type_: None,
|
||||
presentation_hint: None,
|
||||
evaluate_name: None,
|
||||
variables_reference: 0,
|
||||
named_variables: None,
|
||||
indexed_variables: None,
|
||||
memory_reference: None,
|
||||
declaration_location_reference: None,
|
||||
value_location_reference: None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let global_vars: Vec<Variable> = global_variables
|
||||
.iter()
|
||||
.map(|(name, value)| Variable {
|
||||
name: (*name).into(),
|
||||
value: (*value).into(),
|
||||
type_: None,
|
||||
presentation_hint: None,
|
||||
evaluate_name: None,
|
||||
variables_reference: 0,
|
||||
named_variables: None,
|
||||
indexed_variables: None,
|
||||
memory_reference: None,
|
||||
declaration_location_reference: None,
|
||||
value_location_reference: None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
client.on_request::<Variables, _>({
|
||||
let local_vars = Arc::new(local_vars.clone());
|
||||
let global_vars = Arc::new(global_vars.clone());
|
||||
move |_, args| {
|
||||
let variables = match args.variables_reference {
|
||||
2 => (*local_vars).clone(),
|
||||
3 => (*global_vars).clone(),
|
||||
_ => vec![],
|
||||
};
|
||||
Ok(dap::VariablesResponse { variables })
|
||||
}
|
||||
});
|
||||
|
||||
client.on_request::<dap::requests::Scopes, _>(move |_, _| {
|
||||
Ok(dap::ScopesResponse {
|
||||
scopes: vec![
|
||||
Scope {
|
||||
name: "Local".into(),
|
||||
presentation_hint: None,
|
||||
variables_reference: 2,
|
||||
named_variables: None,
|
||||
indexed_variables: None,
|
||||
expensive: false,
|
||||
source: None,
|
||||
line: None,
|
||||
column: None,
|
||||
end_line: None,
|
||||
end_column: None,
|
||||
},
|
||||
Scope {
|
||||
name: "Global".into(),
|
||||
presentation_hint: None,
|
||||
variables_reference: 3,
|
||||
named_variables: None,
|
||||
indexed_variables: None,
|
||||
expensive: false,
|
||||
source: None,
|
||||
line: None,
|
||||
column: None,
|
||||
end_line: None,
|
||||
end_column: None,
|
||||
},
|
||||
],
|
||||
})
|
||||
});
|
||||
|
||||
if !global_variables.is_empty() {
|
||||
let global_evaluate_map: std::collections::HashMap<String, String> = global_variables
|
||||
.iter()
|
||||
.map(|(name, value)| (name.to_string(), value.to_string()))
|
||||
.collect();
|
||||
|
||||
client.on_request::<dap::requests::Evaluate, _>(move |_, args| {
|
||||
let value = global_evaluate_map
|
||||
.get(&args.expression)
|
||||
.unwrap_or(&"undefined".to_string())
|
||||
.clone();
|
||||
|
||||
Ok(dap::EvaluateResponse {
|
||||
result: value,
|
||||
type_: None,
|
||||
presentation_hint: None,
|
||||
variables_reference: 0,
|
||||
named_variables: None,
|
||||
indexed_variables: None,
|
||||
memory_reference: None,
|
||||
value_location_reference: None,
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
client
|
||||
.fake_event(dap::messages::Events::Stopped(dap::StoppedEvent {
|
||||
reason: dap::StoppedEventReason::Pause,
|
||||
description: None,
|
||||
thread_id: Some(1),
|
||||
preserve_focus_hint: None,
|
||||
text: None,
|
||||
all_threads_stopped: None,
|
||||
hit_breakpoint_ids: None,
|
||||
}))
|
||||
.await;
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
let project_path = Path::new(path!("/project"));
|
||||
let worktree = project
|
||||
.update(cx, |project, cx| project.find_worktree(project_path, cx))
|
||||
.expect("This worktree should exist in project")
|
||||
.0;
|
||||
|
||||
let worktree_id = workspace
|
||||
.update(cx, |_, _, cx| worktree.read(cx).id())
|
||||
.unwrap();
|
||||
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_buffer((worktree_id, "main.rs"), cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
buffer.set_language(Some(Arc::new(language)), cx);
|
||||
});
|
||||
|
||||
let (editor, cx) = cx.add_window_view(|window, cx| {
|
||||
Editor::new(
|
||||
EditorMode::full(),
|
||||
MultiBuffer::build_from_buffer(buffer, cx),
|
||||
Some(project),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
});
|
||||
|
||||
active_debug_session_panel(workspace, cx).update_in(cx, |_, window, cx| {
|
||||
cx.focus_self(window);
|
||||
});
|
||||
cx.run_until_parked();
|
||||
|
||||
editor.update(cx, |editor, cx| editor.refresh_inline_values(cx));
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
pretty_assertions::assert_eq!(after, editor.snapshot(window, cx).text());
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_inline_values_example(executor: BackgroundExecutor, cx: &mut TestAppContext) {
|
||||
let variables = [("x", "10"), ("y", "20"), ("result", "30")];
|
||||
|
||||
let before = r#"
|
||||
fn main() {
|
||||
let x = 10;
|
||||
let y = 20;
|
||||
let result = x + y;
|
||||
println!("Result: {}", result);
|
||||
}
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
let after = r#"
|
||||
fn main() {
|
||||
let x: 10 = 10;
|
||||
let y: 20 = 20;
|
||||
let result: 30 = x: 10 + y: 20;
|
||||
println!("Result: {}", result: 30);
|
||||
}
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
test_inline_values_util(
|
||||
&variables,
|
||||
&[],
|
||||
&before,
|
||||
&after,
|
||||
None,
|
||||
rust_lang(),
|
||||
executor,
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_inline_values_with_globals(executor: BackgroundExecutor, cx: &mut TestAppContext) {
|
||||
let variables = [("x", "5"), ("y", "10")];
|
||||
|
||||
let before = r#"
|
||||
static mut GLOBAL_COUNTER: usize = 42;
|
||||
|
||||
fn main() {
|
||||
let x = 5;
|
||||
let y = 10;
|
||||
unsafe {
|
||||
GLOBAL_COUNTER += 1;
|
||||
}
|
||||
println!("x={}, y={}, global={}", x, y, unsafe { GLOBAL_COUNTER });
|
||||
}
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
let after = r#"
|
||||
static mut GLOBAL_COUNTER: 42: usize = 42;
|
||||
|
||||
fn main() {
|
||||
let x: 5 = 5;
|
||||
let y: 10 = 10;
|
||||
unsafe {
|
||||
GLOBAL_COUNTER += 1;
|
||||
}
|
||||
println!("x={}, y={}, global={}", x, y, unsafe { GLOBAL_COUNTER });
|
||||
}
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
test_inline_values_util(
|
||||
&variables,
|
||||
&[("GLOBAL_COUNTER", "42")],
|
||||
&before,
|
||||
&after,
|
||||
None,
|
||||
rust_lang(),
|
||||
executor,
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_go_inline_values(executor: BackgroundExecutor, cx: &mut TestAppContext) {
|
||||
let variables = [("x", "42"), ("y", "hello")];
|
||||
|
||||
let before = r#"
|
||||
package main
|
||||
|
||||
var globalCounter int = 100
|
||||
|
||||
func main() {
|
||||
x := 42
|
||||
y := "hello"
|
||||
z := x + 10
|
||||
println(x, y, z)
|
||||
}
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
let after = r#"
|
||||
package main
|
||||
|
||||
var globalCounter: 100 int = 100
|
||||
|
||||
func main() {
|
||||
x: 42 := 42
|
||||
y := "hello"
|
||||
z := x + 10
|
||||
println(x, y, z)
|
||||
}
|
||||
"#
|
||||
.unindent();
|
||||
|
||||
test_inline_values_util(
|
||||
&variables,
|
||||
&[("globalCounter", "100")],
|
||||
&before,
|
||||
&after,
|
||||
None,
|
||||
go_lang(),
|
||||
executor,
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
@@ -201,13 +201,13 @@ pub struct Response {
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Usage {
|
||||
pub prompt_tokens: u64,
|
||||
pub completion_tokens: u64,
|
||||
pub total_tokens: u64,
|
||||
pub prompt_tokens: u32,
|
||||
pub completion_tokens: u32,
|
||||
pub total_tokens: u32,
|
||||
#[serde(default)]
|
||||
pub prompt_cache_hit_tokens: u64,
|
||||
pub prompt_cache_hit_tokens: u32,
|
||||
#[serde(default)]
|
||||
pub prompt_cache_miss_tokens: u64,
|
||||
pub prompt_cache_miss_tokens: u32,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
@@ -224,7 +224,6 @@ pub struct StreamResponse {
|
||||
pub created: u64,
|
||||
pub model: String,
|
||||
pub choices: Vec<StreamChoice>,
|
||||
pub usage: Option<Usage>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
|
||||
@@ -270,8 +270,6 @@ actions!(
|
||||
ContextMenuLast,
|
||||
ContextMenuNext,
|
||||
ContextMenuPrevious,
|
||||
ConvertIndentationToSpaces,
|
||||
ConvertIndentationToTabs,
|
||||
ConvertToKebabCase,
|
||||
ConvertToLowerCamelCase,
|
||||
ConvertToLowerCase,
|
||||
|
||||
@@ -1205,7 +1205,7 @@ impl CodeActionContents {
|
||||
tasks_len + code_actions_len + self.debug_scenarios.len()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
|
||||
|
||||
@@ -3388,12 +3388,9 @@ impl Editor {
|
||||
auto_scroll = true;
|
||||
}
|
||||
2 => {
|
||||
let position = display_map
|
||||
.clip_point(position, Bias::Left)
|
||||
.to_offset(&display_map, Bias::Left);
|
||||
let (range, _) = buffer.surrounding_word(position, false);
|
||||
start = buffer.anchor_before(range.start);
|
||||
end = buffer.anchor_before(range.end);
|
||||
let range = movement::surrounding_word(&display_map, position);
|
||||
start = buffer.anchor_before(range.start.to_point(&display_map));
|
||||
end = buffer.anchor_before(range.end.to_point(&display_map));
|
||||
mode = SelectMode::Word(start..end);
|
||||
auto_scroll = true;
|
||||
}
|
||||
@@ -3526,39 +3523,37 @@ impl Editor {
|
||||
if self.columnar_selection_state.is_some() {
|
||||
self.select_columns(position, goal_column, &display_map, window, cx);
|
||||
} else if let Some(mut pending) = self.selections.pending_anchor() {
|
||||
let buffer = &display_map.buffer_snapshot;
|
||||
let buffer = self.buffer.read(cx).snapshot(cx);
|
||||
let head;
|
||||
let tail;
|
||||
let mode = self.selections.pending_mode().unwrap();
|
||||
match &mode {
|
||||
SelectMode::Character => {
|
||||
head = position.to_point(&display_map);
|
||||
tail = pending.tail().to_point(buffer);
|
||||
tail = pending.tail().to_point(&buffer);
|
||||
}
|
||||
SelectMode::Word(original_range) => {
|
||||
let offset = display_map
|
||||
.clip_point(position, Bias::Left)
|
||||
.to_offset(&display_map, Bias::Left);
|
||||
let original_range = original_range.to_offset(buffer);
|
||||
|
||||
let head_offset = if buffer.is_inside_word(offset, false)
|
||||
|| original_range.contains(&offset)
|
||||
let original_display_range = original_range.start.to_display_point(&display_map)
|
||||
..original_range.end.to_display_point(&display_map);
|
||||
let original_buffer_range = original_display_range.start.to_point(&display_map)
|
||||
..original_display_range.end.to_point(&display_map);
|
||||
if movement::is_inside_word(&display_map, position)
|
||||
|| original_display_range.contains(&position)
|
||||
{
|
||||
let (word_range, _) = buffer.surrounding_word(offset, false);
|
||||
if word_range.start < original_range.start {
|
||||
word_range.start
|
||||
let word_range = movement::surrounding_word(&display_map, position);
|
||||
if word_range.start < original_display_range.start {
|
||||
head = word_range.start.to_point(&display_map);
|
||||
} else {
|
||||
word_range.end
|
||||
head = word_range.end.to_point(&display_map);
|
||||
}
|
||||
} else {
|
||||
offset
|
||||
};
|
||||
head = position.to_point(&display_map);
|
||||
}
|
||||
|
||||
head = head_offset.to_point(buffer);
|
||||
if head_offset <= original_range.start {
|
||||
tail = original_range.end.to_point(buffer);
|
||||
if head <= original_buffer_range.start {
|
||||
tail = original_buffer_range.end;
|
||||
} else {
|
||||
tail = original_range.start.to_point(buffer);
|
||||
tail = original_buffer_range.start;
|
||||
}
|
||||
}
|
||||
SelectMode::Line(original_range) => {
|
||||
@@ -5976,23 +5971,15 @@ impl Editor {
|
||||
|
||||
editor.update_in(cx, |editor, window, cx| {
|
||||
crate::hover_popover::hide_hover(editor, cx);
|
||||
let actions = CodeActionContents::new(
|
||||
resolved_tasks,
|
||||
code_actions,
|
||||
debug_scenarios,
|
||||
task_context.unwrap_or_default(),
|
||||
);
|
||||
|
||||
// Don't show the menu if there are no actions available
|
||||
if actions.is_empty() {
|
||||
cx.notify();
|
||||
return Task::ready(Ok(()));
|
||||
}
|
||||
|
||||
*editor.context_menu.borrow_mut() =
|
||||
Some(CodeContextMenu::CodeActions(CodeActionsMenu {
|
||||
buffer,
|
||||
actions,
|
||||
actions: CodeActionContents::new(
|
||||
resolved_tasks,
|
||||
code_actions,
|
||||
debug_scenarios,
|
||||
task_context.unwrap_or_default(),
|
||||
),
|
||||
selected_item: Default::default(),
|
||||
scroll_handle: UniformListScrollHandle::default(),
|
||||
deployed_from,
|
||||
@@ -10088,7 +10075,7 @@ impl Editor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.manipulate_immutable_lines(window, cx, |lines| lines.sort())
|
||||
self.manipulate_lines(window, cx, |lines| lines.sort())
|
||||
}
|
||||
|
||||
pub fn sort_lines_case_insensitive(
|
||||
@@ -10097,7 +10084,7 @@ impl Editor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.manipulate_immutable_lines(window, cx, |lines| {
|
||||
self.manipulate_lines(window, cx, |lines| {
|
||||
lines.sort_by_key(|line| line.to_lowercase())
|
||||
})
|
||||
}
|
||||
@@ -10108,7 +10095,7 @@ impl Editor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.manipulate_immutable_lines(window, cx, |lines| {
|
||||
self.manipulate_lines(window, cx, |lines| {
|
||||
let mut seen = HashSet::default();
|
||||
lines.retain(|line| seen.insert(line.to_lowercase()));
|
||||
})
|
||||
@@ -10120,7 +10107,7 @@ impl Editor {
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.manipulate_immutable_lines(window, cx, |lines| {
|
||||
self.manipulate_lines(window, cx, |lines| {
|
||||
let mut seen = HashSet::default();
|
||||
lines.retain(|line| seen.insert(*line));
|
||||
})
|
||||
@@ -10563,20 +10550,20 @@ impl Editor {
|
||||
}
|
||||
|
||||
pub fn reverse_lines(&mut self, _: &ReverseLines, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.manipulate_immutable_lines(window, cx, |lines| lines.reverse())
|
||||
self.manipulate_lines(window, cx, |lines| lines.reverse())
|
||||
}
|
||||
|
||||
pub fn shuffle_lines(&mut self, _: &ShuffleLines, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.manipulate_immutable_lines(window, cx, |lines| lines.shuffle(&mut thread_rng()))
|
||||
self.manipulate_lines(window, cx, |lines| lines.shuffle(&mut thread_rng()))
|
||||
}
|
||||
|
||||
fn manipulate_lines<M>(
|
||||
fn manipulate_lines<Fn>(
|
||||
&mut self,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
mut manipulate: M,
|
||||
mut callback: Fn,
|
||||
) where
|
||||
M: FnMut(&str) -> LineManipulationResult,
|
||||
Fn: FnMut(&mut Vec<&str>),
|
||||
{
|
||||
self.hide_mouse_cursor(HideMouseCursorOrigin::TypingAction, cx);
|
||||
|
||||
@@ -10609,18 +10596,18 @@ impl Editor {
|
||||
.text_for_range(start_point..end_point)
|
||||
.collect::<String>();
|
||||
|
||||
let LineManipulationResult {
|
||||
new_text,
|
||||
line_count_before,
|
||||
line_count_after,
|
||||
} = manipulate(&text);
|
||||
let mut lines = text.split('\n').collect_vec();
|
||||
|
||||
edits.push((start_point..end_point, new_text));
|
||||
let lines_before = lines.len();
|
||||
callback(&mut lines);
|
||||
let lines_after = lines.len();
|
||||
|
||||
edits.push((start_point..end_point, lines.join("\n")));
|
||||
|
||||
// Selections must change based on added and removed line count
|
||||
let start_row =
|
||||
MultiBufferRow(start_point.row + added_lines as u32 - removed_lines as u32);
|
||||
let end_row = MultiBufferRow(start_row.0 + line_count_after.saturating_sub(1) as u32);
|
||||
let end_row = MultiBufferRow(start_row.0 + lines_after.saturating_sub(1) as u32);
|
||||
new_selections.push(Selection {
|
||||
id: selection.id,
|
||||
start: start_row,
|
||||
@@ -10629,10 +10616,10 @@ impl Editor {
|
||||
reversed: selection.reversed,
|
||||
});
|
||||
|
||||
if line_count_after > line_count_before {
|
||||
added_lines += line_count_after - line_count_before;
|
||||
} else if line_count_before > line_count_after {
|
||||
removed_lines += line_count_before - line_count_after;
|
||||
if lines_after > lines_before {
|
||||
added_lines += lines_after - lines_before;
|
||||
} else if lines_before > lines_after {
|
||||
removed_lines += lines_before - lines_after;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10677,171 +10664,6 @@ impl Editor {
|
||||
})
|
||||
}
|
||||
|
||||
fn manipulate_immutable_lines<Fn>(
|
||||
&mut self,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
mut callback: Fn,
|
||||
) where
|
||||
Fn: FnMut(&mut Vec<&str>),
|
||||
{
|
||||
self.manipulate_lines(window, cx, |text| {
|
||||
let mut lines: Vec<&str> = text.split('\n').collect();
|
||||
let line_count_before = lines.len();
|
||||
|
||||
callback(&mut lines);
|
||||
|
||||
LineManipulationResult {
|
||||
new_text: lines.join("\n"),
|
||||
line_count_before,
|
||||
line_count_after: lines.len(),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn manipulate_mutable_lines<Fn>(
|
||||
&mut self,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
mut callback: Fn,
|
||||
) where
|
||||
Fn: FnMut(&mut Vec<Cow<'_, str>>),
|
||||
{
|
||||
self.manipulate_lines(window, cx, |text| {
|
||||
let mut lines: Vec<Cow<str>> = text.split('\n').map(Cow::from).collect();
|
||||
let line_count_before = lines.len();
|
||||
|
||||
callback(&mut lines);
|
||||
|
||||
LineManipulationResult {
|
||||
new_text: lines.join("\n"),
|
||||
line_count_before,
|
||||
line_count_after: lines.len(),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pub fn convert_indentation_to_spaces(
|
||||
&mut self,
|
||||
_: &ConvertIndentationToSpaces,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let settings = self.buffer.read(cx).language_settings(cx);
|
||||
let tab_size = settings.tab_size.get() as usize;
|
||||
|
||||
self.manipulate_mutable_lines(window, cx, |lines| {
|
||||
// Allocates a reasonably sized scratch buffer once for the whole loop
|
||||
let mut reindented_line = String::with_capacity(MAX_LINE_LEN);
|
||||
// Avoids recomputing spaces that could be inserted many times
|
||||
let space_cache: Vec<Vec<char>> = (1..=tab_size)
|
||||
.map(|n| IndentSize::spaces(n as u32).chars().collect())
|
||||
.collect();
|
||||
|
||||
for line in lines.iter_mut().filter(|line| !line.is_empty()) {
|
||||
let mut chars = line.as_ref().chars();
|
||||
let mut col = 0;
|
||||
let mut changed = false;
|
||||
|
||||
while let Some(ch) = chars.next() {
|
||||
match ch {
|
||||
' ' => {
|
||||
reindented_line.push(' ');
|
||||
col += 1;
|
||||
}
|
||||
'\t' => {
|
||||
// \t are converted to spaces depending on the current column
|
||||
let spaces_len = tab_size - (col % tab_size);
|
||||
reindented_line.extend(&space_cache[spaces_len - 1]);
|
||||
col += spaces_len;
|
||||
changed = true;
|
||||
}
|
||||
_ => {
|
||||
// If we dont append before break, the character is consumed
|
||||
reindented_line.push(ch);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !changed {
|
||||
reindented_line.clear();
|
||||
continue;
|
||||
}
|
||||
// Append the rest of the line and replace old reference with new one
|
||||
reindented_line.extend(chars);
|
||||
*line = Cow::Owned(reindented_line.clone());
|
||||
reindented_line.clear();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pub fn convert_indentation_to_tabs(
|
||||
&mut self,
|
||||
_: &ConvertIndentationToTabs,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let settings = self.buffer.read(cx).language_settings(cx);
|
||||
let tab_size = settings.tab_size.get() as usize;
|
||||
|
||||
self.manipulate_mutable_lines(window, cx, |lines| {
|
||||
// Allocates a reasonably sized buffer once for the whole loop
|
||||
let mut reindented_line = String::with_capacity(MAX_LINE_LEN);
|
||||
// Avoids recomputing spaces that could be inserted many times
|
||||
let space_cache: Vec<Vec<char>> = (1..=tab_size)
|
||||
.map(|n| IndentSize::spaces(n as u32).chars().collect())
|
||||
.collect();
|
||||
|
||||
for line in lines.iter_mut().filter(|line| !line.is_empty()) {
|
||||
let mut chars = line.chars();
|
||||
let mut spaces_count = 0;
|
||||
let mut first_non_indent_char = None;
|
||||
let mut changed = false;
|
||||
|
||||
while let Some(ch) = chars.next() {
|
||||
match ch {
|
||||
' ' => {
|
||||
// Keep track of spaces. Append \t when we reach tab_size
|
||||
spaces_count += 1;
|
||||
changed = true;
|
||||
if spaces_count == tab_size {
|
||||
reindented_line.push('\t');
|
||||
spaces_count = 0;
|
||||
}
|
||||
}
|
||||
'\t' => {
|
||||
reindented_line.push('\t');
|
||||
spaces_count = 0;
|
||||
}
|
||||
_ => {
|
||||
// Dont append it yet, we might have remaining spaces
|
||||
first_non_indent_char = Some(ch);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !changed {
|
||||
reindented_line.clear();
|
||||
continue;
|
||||
}
|
||||
// Remaining spaces that didn't make a full tab stop
|
||||
if spaces_count > 0 {
|
||||
reindented_line.extend(&space_cache[spaces_count - 1]);
|
||||
}
|
||||
// If we consume an extra character that was not indentation, add it back
|
||||
if let Some(extra_char) = first_non_indent_char {
|
||||
reindented_line.push(extra_char);
|
||||
}
|
||||
// Append the rest of the line and replace old reference with new one
|
||||
reindented_line.extend(chars);
|
||||
*line = Cow::Owned(reindented_line.clone());
|
||||
reindented_line.clear();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pub fn convert_to_upper_case(
|
||||
&mut self,
|
||||
_: &ConvertToUpperCase,
|
||||
@@ -10972,6 +10794,7 @@ impl Editor {
|
||||
where
|
||||
Fn: FnMut(&str) -> String,
|
||||
{
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let buffer = self.buffer.read(cx).snapshot(cx);
|
||||
|
||||
let mut new_selections = Vec::new();
|
||||
@@ -10982,8 +10805,13 @@ impl Editor {
|
||||
let selection_is_empty = selection.is_empty();
|
||||
|
||||
let (start, end) = if selection_is_empty {
|
||||
let (word_range, _) = buffer.surrounding_word(selection.start, false);
|
||||
(word_range.start, word_range.end)
|
||||
let word_range = movement::surrounding_word(
|
||||
&display_map,
|
||||
selection.start.to_display_point(&display_map),
|
||||
);
|
||||
let start = word_range.start.to_offset(&display_map, Bias::Left);
|
||||
let end = word_range.end.to_offset(&display_map, Bias::Left);
|
||||
(start, end)
|
||||
} else {
|
||||
(selection.start, selection.end)
|
||||
};
|
||||
@@ -13427,10 +13255,12 @@ impl Editor {
|
||||
let query_match = query_match.unwrap(); // can only fail due to I/O
|
||||
let offset_range =
|
||||
start_offset + query_match.start()..start_offset + query_match.end();
|
||||
let display_range = offset_range.start.to_display_point(display_map)
|
||||
..offset_range.end.to_display_point(display_map);
|
||||
|
||||
if !select_next_state.wordwise
|
||||
|| (!buffer.is_inside_word(offset_range.start, false)
|
||||
&& !buffer.is_inside_word(offset_range.end, false))
|
||||
|| (!movement::is_inside_word(display_map, display_range.start)
|
||||
&& !movement::is_inside_word(display_map, display_range.end))
|
||||
{
|
||||
// TODO: This is n^2, because we might check all the selections
|
||||
if !selections
|
||||
@@ -13494,9 +13324,12 @@ impl Editor {
|
||||
|
||||
if only_carets {
|
||||
for selection in &mut selections {
|
||||
let (word_range, _) = buffer.surrounding_word(selection.start, false);
|
||||
selection.start = word_range.start;
|
||||
selection.end = word_range.end;
|
||||
let word_range = movement::surrounding_word(
|
||||
display_map,
|
||||
selection.start.to_display_point(display_map),
|
||||
);
|
||||
selection.start = word_range.start.to_offset(display_map, Bias::Left);
|
||||
selection.end = word_range.end.to_offset(display_map, Bias::Left);
|
||||
selection.goal = SelectionGoal::None;
|
||||
selection.reversed = false;
|
||||
self.select_match_ranges(
|
||||
@@ -13577,22 +13410,18 @@ impl Editor {
|
||||
} else {
|
||||
query_match.start()..query_match.end()
|
||||
};
|
||||
let display_range = offset_range.start.to_display_point(&display_map)
|
||||
..offset_range.end.to_display_point(&display_map);
|
||||
|
||||
if !select_next_state.wordwise
|
||||
|| (!buffer.is_inside_word(offset_range.start, false)
|
||||
&& !buffer.is_inside_word(offset_range.end, false))
|
||||
|| (!movement::is_inside_word(&display_map, display_range.start)
|
||||
&& !movement::is_inside_word(&display_map, display_range.end))
|
||||
{
|
||||
new_selections.push(offset_range.start..offset_range.end);
|
||||
}
|
||||
}
|
||||
|
||||
select_next_state.done = true;
|
||||
|
||||
if new_selections.is_empty() {
|
||||
log::error!("bug: new_selections is empty in select_all_matches");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
self.unfold_ranges(&new_selections.clone(), false, false, cx);
|
||||
self.change_selections(None, window, cx, |selections| {
|
||||
selections.select_ranges(new_selections)
|
||||
@@ -13652,10 +13481,12 @@ impl Editor {
|
||||
let query_match = query_match.unwrap(); // can only fail due to I/O
|
||||
let offset_range =
|
||||
end_offset - query_match.end()..end_offset - query_match.start();
|
||||
let display_range = offset_range.start.to_display_point(&display_map)
|
||||
..offset_range.end.to_display_point(&display_map);
|
||||
|
||||
if !select_prev_state.wordwise
|
||||
|| (!buffer.is_inside_word(offset_range.start, false)
|
||||
&& !buffer.is_inside_word(offset_range.end, false))
|
||||
|| (!movement::is_inside_word(&display_map, display_range.start)
|
||||
&& !movement::is_inside_word(&display_map, display_range.end))
|
||||
{
|
||||
next_selected_range = Some(offset_range);
|
||||
break;
|
||||
@@ -13713,9 +13544,12 @@ impl Editor {
|
||||
|
||||
if only_carets {
|
||||
for selection in &mut selections {
|
||||
let (word_range, _) = buffer.surrounding_word(selection.start, false);
|
||||
selection.start = word_range.start;
|
||||
selection.end = word_range.end;
|
||||
let word_range = movement::surrounding_word(
|
||||
&display_map,
|
||||
selection.start.to_display_point(&display_map),
|
||||
);
|
||||
selection.start = word_range.start.to_offset(&display_map, Bias::Left);
|
||||
selection.end = word_range.end.to_offset(&display_map, Bias::Left);
|
||||
selection.goal = SelectionGoal::None;
|
||||
selection.reversed = false;
|
||||
self.select_match_ranges(
|
||||
@@ -14190,11 +14024,26 @@ impl Editor {
|
||||
if let Some((node, _)) = buffer.syntax_ancestor(old_range.clone()) {
|
||||
// manually select word at selection
|
||||
if ["string_content", "inline"].contains(&node.kind()) {
|
||||
let (word_range, _) = buffer.surrounding_word(old_range.start, false);
|
||||
let word_range = {
|
||||
let display_point = buffer
|
||||
.offset_to_point(old_range.start)
|
||||
.to_display_point(&display_map);
|
||||
let Range { start, end } =
|
||||
movement::surrounding_word(&display_map, display_point);
|
||||
start.to_point(&display_map).to_offset(&buffer)
|
||||
..end.to_point(&display_map).to_offset(&buffer)
|
||||
};
|
||||
// ignore if word is already selected
|
||||
if !word_range.is_empty() && old_range != word_range {
|
||||
let (last_word_range, _) =
|
||||
buffer.surrounding_word(old_range.end, false);
|
||||
let last_word_range = {
|
||||
let display_point = buffer
|
||||
.offset_to_point(old_range.end)
|
||||
.to_display_point(&display_map);
|
||||
let Range { start, end } =
|
||||
movement::surrounding_word(&display_map, display_point);
|
||||
start.to_point(&display_map).to_offset(&buffer)
|
||||
..end.to_point(&display_map).to_offset(&buffer)
|
||||
};
|
||||
// only select word if start and end point belongs to same word
|
||||
if word_range == last_word_range {
|
||||
selected_larger_node = true;
|
||||
@@ -16164,7 +16013,7 @@ impl Editor {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn restart_language_server(
|
||||
fn restart_language_server(
|
||||
&mut self,
|
||||
_: &RestartLanguageServer,
|
||||
_: &mut Window,
|
||||
@@ -16175,7 +16024,6 @@ impl Editor {
|
||||
project.update(cx, |project, cx| {
|
||||
project.restart_language_servers_for_buffers(
|
||||
multi_buffer.all_buffers().into_iter().collect(),
|
||||
HashSet::default(),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
@@ -16183,7 +16031,7 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stop_language_server(
|
||||
fn stop_language_server(
|
||||
&mut self,
|
||||
_: &StopLanguageServer,
|
||||
_: &mut Window,
|
||||
@@ -16194,7 +16042,6 @@ impl Editor {
|
||||
project.update(cx, |project, cx| {
|
||||
project.stop_language_servers_for_buffers(
|
||||
multi_buffer.all_buffers().into_iter().collect(),
|
||||
HashSet::default(),
|
||||
cx,
|
||||
);
|
||||
cx.emit(project::Event::RefreshInlayHints);
|
||||
@@ -19320,7 +19167,7 @@ impl Editor {
|
||||
let current_execution_position = self
|
||||
.highlighted_rows
|
||||
.get(&TypeId::of::<ActiveDebugLine>())
|
||||
.and_then(|lines| lines.last().map(|line| line.range.end));
|
||||
.and_then(|lines| lines.last().map(|line| line.range.start));
|
||||
|
||||
self.inline_value_cache.refresh_task = cx.spawn(async move |editor, cx| {
|
||||
let inline_values = editor
|
||||
@@ -21706,6 +21553,7 @@ impl SemanticsProvider for Entity<Project> {
|
||||
fn inline_values(
|
||||
&self,
|
||||
buffer_handle: Entity<Buffer>,
|
||||
|
||||
range: Range<text::Anchor>,
|
||||
cx: &mut App,
|
||||
) -> Option<Task<anyhow::Result<Vec<InlayHint>>>> {
|
||||
@@ -23116,12 +22964,6 @@ pub struct LineHighlight {
|
||||
pub type_id: Option<TypeId>,
|
||||
}
|
||||
|
||||
struct LineManipulationResult {
|
||||
pub new_text: String,
|
||||
pub line_count_before: usize,
|
||||
pub line_count_after: usize,
|
||||
}
|
||||
|
||||
fn render_diff_hunk_controls(
|
||||
row: u32,
|
||||
status: &DiffHunkStatus,
|
||||
|
||||
@@ -3976,7 +3976,7 @@ async fn test_custom_newlines_cause_no_false_positive_diffs(
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_manipulate_immutable_lines_with_single_selection(cx: &mut TestAppContext) {
|
||||
async fn test_manipulate_lines_with_single_selection(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
@@ -4021,8 +4021,8 @@ async fn test_manipulate_immutable_lines_with_single_selection(cx: &mut TestAppC
|
||||
|
||||
// Skip testing shuffle_line()
|
||||
|
||||
// From here on out, test more complex cases of manipulate_immutable_lines() with a single driver method: sort_lines_case_sensitive()
|
||||
// Since all methods calling manipulate_immutable_lines() are doing the exact same general thing (reordering lines)
|
||||
// From here on out, test more complex cases of manipulate_lines() with a single driver method: sort_lines_case_sensitive()
|
||||
// Since all methods calling manipulate_lines() are doing the exact same general thing (reordering lines)
|
||||
|
||||
// Don't manipulate when cursor is on single line, but expand the selection
|
||||
cx.set_state(indoc! {"
|
||||
@@ -4089,7 +4089,7 @@ async fn test_manipulate_immutable_lines_with_single_selection(cx: &mut TestAppC
|
||||
bbˇ»b
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| {
|
||||
e.manipulate_immutable_lines(window, cx, |lines| lines.push("added_line"))
|
||||
e.manipulate_lines(window, cx, |lines| lines.push("added_line"))
|
||||
});
|
||||
cx.assert_editor_state(indoc! {"
|
||||
«aaa
|
||||
@@ -4103,7 +4103,7 @@ async fn test_manipulate_immutable_lines_with_single_selection(cx: &mut TestAppC
|
||||
bbbˇ»
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| {
|
||||
e.manipulate_immutable_lines(window, cx, |lines| {
|
||||
e.manipulate_lines(window, cx, |lines| {
|
||||
lines.pop();
|
||||
})
|
||||
});
|
||||
@@ -4117,7 +4117,7 @@ async fn test_manipulate_immutable_lines_with_single_selection(cx: &mut TestAppC
|
||||
bbbˇ»
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| {
|
||||
e.manipulate_immutable_lines(window, cx, |lines| {
|
||||
e.manipulate_lines(window, cx, |lines| {
|
||||
lines.drain(..);
|
||||
})
|
||||
});
|
||||
@@ -4217,7 +4217,7 @@ async fn test_unique_lines_single_selection(cx: &mut TestAppContext) {
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_manipulate_immutable_lines_with_multi_selection(cx: &mut TestAppContext) {
|
||||
async fn test_manipulate_lines_with_multi_selection(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
@@ -4277,7 +4277,7 @@ async fn test_manipulate_immutable_lines_with_multi_selection(cx: &mut TestAppCo
|
||||
aaaˇ»aa
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| {
|
||||
e.manipulate_immutable_lines(window, cx, |lines| lines.push("added line"))
|
||||
e.manipulate_lines(window, cx, |lines| lines.push("added line"))
|
||||
});
|
||||
cx.assert_editor_state(indoc! {"
|
||||
«2
|
||||
@@ -4298,7 +4298,7 @@ async fn test_manipulate_immutable_lines_with_multi_selection(cx: &mut TestAppCo
|
||||
aaaˇ»aa
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| {
|
||||
e.manipulate_immutable_lines(window, cx, |lines| {
|
||||
e.manipulate_lines(window, cx, |lines| {
|
||||
lines.pop();
|
||||
})
|
||||
});
|
||||
@@ -4309,222 +4309,6 @@ async fn test_manipulate_immutable_lines_with_multi_selection(cx: &mut TestAppCo
|
||||
"});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_convert_indentation_to_spaces(cx: &mut TestAppContext) {
|
||||
init_test(cx, |settings| {
|
||||
settings.defaults.tab_size = NonZeroU32::new(3)
|
||||
});
|
||||
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
|
||||
// MULTI SELECTION
|
||||
// Ln.1 "«" tests empty lines
|
||||
// Ln.9 tests just leading whitespace
|
||||
cx.set_state(indoc! {"
|
||||
«
|
||||
abc // No indentationˇ»
|
||||
«\tabc // 1 tabˇ»
|
||||
\t\tabc « ˇ» // 2 tabs
|
||||
\t ab«c // Tab followed by space
|
||||
\tabc // Space followed by tab (3 spaces should be the result)
|
||||
\t \t \t \tabc // Mixed indentation (tab conversion depends on the column)
|
||||
abˇ»ˇc ˇ ˇ // Already space indented«
|
||||
\t
|
||||
\tabc\tdef // Only the leading tab is manipulatedˇ»
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| {
|
||||
e.convert_indentation_to_spaces(&ConvertIndentationToSpaces, window, cx);
|
||||
});
|
||||
cx.assert_editor_state(indoc! {"
|
||||
«
|
||||
abc // No indentation
|
||||
abc // 1 tab
|
||||
abc // 2 tabs
|
||||
abc // Tab followed by space
|
||||
abc // Space followed by tab (3 spaces should be the result)
|
||||
abc // Mixed indentation (tab conversion depends on the column)
|
||||
abc // Already space indented
|
||||
|
||||
abc\tdef // Only the leading tab is manipulatedˇ»
|
||||
"});
|
||||
|
||||
// Test on just a few lines, the others should remain unchanged
|
||||
// Only lines (3, 5, 10, 11) should change
|
||||
cx.set_state(indoc! {"
|
||||
|
||||
abc // No indentation
|
||||
\tabcˇ // 1 tab
|
||||
\t\tabc // 2 tabs
|
||||
\t abcˇ // Tab followed by space
|
||||
\tabc // Space followed by tab (3 spaces should be the result)
|
||||
\t \t \t \tabc // Mixed indentation (tab conversion depends on the column)
|
||||
abc // Already space indented
|
||||
«\t
|
||||
\tabc\tdef // Only the leading tab is manipulatedˇ»
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| {
|
||||
e.convert_indentation_to_spaces(&ConvertIndentationToSpaces, window, cx);
|
||||
});
|
||||
cx.assert_editor_state(indoc! {"
|
||||
|
||||
abc // No indentation
|
||||
« abc // 1 tabˇ»
|
||||
\t\tabc // 2 tabs
|
||||
« abc // Tab followed by spaceˇ»
|
||||
\tabc // Space followed by tab (3 spaces should be the result)
|
||||
\t \t \t \tabc // Mixed indentation (tab conversion depends on the column)
|
||||
abc // Already space indented
|
||||
«
|
||||
abc\tdef // Only the leading tab is manipulatedˇ»
|
||||
"});
|
||||
|
||||
// SINGLE SELECTION
|
||||
// Ln.1 "«" tests empty lines
|
||||
// Ln.9 tests just leading whitespace
|
||||
cx.set_state(indoc! {"
|
||||
«
|
||||
abc // No indentation
|
||||
\tabc // 1 tab
|
||||
\t\tabc // 2 tabs
|
||||
\t abc // Tab followed by space
|
||||
\tabc // Space followed by tab (3 spaces should be the result)
|
||||
\t \t \t \tabc // Mixed indentation (tab conversion depends on the column)
|
||||
abc // Already space indented
|
||||
\t
|
||||
\tabc\tdef // Only the leading tab is manipulatedˇ»
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| {
|
||||
e.convert_indentation_to_spaces(&ConvertIndentationToSpaces, window, cx);
|
||||
});
|
||||
cx.assert_editor_state(indoc! {"
|
||||
«
|
||||
abc // No indentation
|
||||
abc // 1 tab
|
||||
abc // 2 tabs
|
||||
abc // Tab followed by space
|
||||
abc // Space followed by tab (3 spaces should be the result)
|
||||
abc // Mixed indentation (tab conversion depends on the column)
|
||||
abc // Already space indented
|
||||
|
||||
abc\tdef // Only the leading tab is manipulatedˇ»
|
||||
"});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_convert_indentation_to_tabs(cx: &mut TestAppContext) {
|
||||
init_test(cx, |settings| {
|
||||
settings.defaults.tab_size = NonZeroU32::new(3)
|
||||
});
|
||||
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
|
||||
// MULTI SELECTION
|
||||
// Ln.1 "«" tests empty lines
|
||||
// Ln.11 tests just leading whitespace
|
||||
cx.set_state(indoc! {"
|
||||
«
|
||||
abˇ»ˇc // No indentation
|
||||
abc ˇ ˇ // 1 space (< 3 so dont convert)
|
||||
abc « // 2 spaces (< 3 so dont convert)
|
||||
abc // 3 spaces (convert)
|
||||
abc ˇ» // 5 spaces (1 tab + 2 spaces)
|
||||
«\tˇ»\t«\tˇ»abc // Already tab indented
|
||||
«\t abc // Tab followed by space
|
||||
\tabc // Space followed by tab (should be consumed due to tab)
|
||||
\t \t \t \tabc // Mixed indentation (first 3 spaces are consumed, the others are converted)
|
||||
\tˇ» «\t
|
||||
abcˇ» \t ˇˇˇ // Only the leading spaces should be converted
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| {
|
||||
e.convert_indentation_to_tabs(&ConvertIndentationToTabs, window, cx);
|
||||
});
|
||||
cx.assert_editor_state(indoc! {"
|
||||
«
|
||||
abc // No indentation
|
||||
abc // 1 space (< 3 so dont convert)
|
||||
abc // 2 spaces (< 3 so dont convert)
|
||||
\tabc // 3 spaces (convert)
|
||||
\t abc // 5 spaces (1 tab + 2 spaces)
|
||||
\t\t\tabc // Already tab indented
|
||||
\t abc // Tab followed by space
|
||||
\tabc // Space followed by tab (should be consumed due to tab)
|
||||
\t\t\t\t\tabc // Mixed indentation (first 3 spaces are consumed, the others are converted)
|
||||
\t\t\t
|
||||
\tabc \t // Only the leading spaces should be convertedˇ»
|
||||
"});
|
||||
|
||||
// Test on just a few lines, the other should remain unchanged
|
||||
// Only lines (4, 8, 11, 12) should change
|
||||
cx.set_state(indoc! {"
|
||||
|
||||
abc // No indentation
|
||||
abc // 1 space (< 3 so dont convert)
|
||||
abc // 2 spaces (< 3 so dont convert)
|
||||
« abc // 3 spaces (convert)ˇ»
|
||||
abc // 5 spaces (1 tab + 2 spaces)
|
||||
\t\t\tabc // Already tab indented
|
||||
\t abc // Tab followed by space
|
||||
\tabc ˇ // Space followed by tab (should be consumed due to tab)
|
||||
\t\t \tabc // Mixed indentation
|
||||
\t \t \t \tabc // Mixed indentation
|
||||
\t \tˇ
|
||||
« abc \t // Only the leading spaces should be convertedˇ»
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| {
|
||||
e.convert_indentation_to_tabs(&ConvertIndentationToTabs, window, cx);
|
||||
});
|
||||
cx.assert_editor_state(indoc! {"
|
||||
|
||||
abc // No indentation
|
||||
abc // 1 space (< 3 so dont convert)
|
||||
abc // 2 spaces (< 3 so dont convert)
|
||||
«\tabc // 3 spaces (convert)ˇ»
|
||||
abc // 5 spaces (1 tab + 2 spaces)
|
||||
\t\t\tabc // Already tab indented
|
||||
\t abc // Tab followed by space
|
||||
«\tabc // Space followed by tab (should be consumed due to tab)ˇ»
|
||||
\t\t \tabc // Mixed indentation
|
||||
\t \t \t \tabc // Mixed indentation
|
||||
«\t\t\t
|
||||
\tabc \t // Only the leading spaces should be convertedˇ»
|
||||
"});
|
||||
|
||||
// SINGLE SELECTION
|
||||
// Ln.1 "«" tests empty lines
|
||||
// Ln.11 tests just leading whitespace
|
||||
cx.set_state(indoc! {"
|
||||
«
|
||||
abc // No indentation
|
||||
abc // 1 space (< 3 so dont convert)
|
||||
abc // 2 spaces (< 3 so dont convert)
|
||||
abc // 3 spaces (convert)
|
||||
abc // 5 spaces (1 tab + 2 spaces)
|
||||
\t\t\tabc // Already tab indented
|
||||
\t abc // Tab followed by space
|
||||
\tabc // Space followed by tab (should be consumed due to tab)
|
||||
\t \t \t \tabc // Mixed indentation (first 3 spaces are consumed, the others are converted)
|
||||
\t \t
|
||||
abc \t // Only the leading spaces should be convertedˇ»
|
||||
"});
|
||||
cx.update_editor(|e, window, cx| {
|
||||
e.convert_indentation_to_tabs(&ConvertIndentationToTabs, window, cx);
|
||||
});
|
||||
cx.assert_editor_state(indoc! {"
|
||||
«
|
||||
abc // No indentation
|
||||
abc // 1 space (< 3 so dont convert)
|
||||
abc // 2 spaces (< 3 so dont convert)
|
||||
\tabc // 3 spaces (convert)
|
||||
\t abc // 5 spaces (1 tab + 2 spaces)
|
||||
\t\t\tabc // Already tab indented
|
||||
\t abc // Tab followed by space
|
||||
\tabc // Space followed by tab (should be consumed due to tab)
|
||||
\t\t\t\t\tabc // Mixed indentation (first 3 spaces are consumed, the others are converted)
|
||||
\t\t\t
|
||||
\tabc \t // Only the leading spaces should be convertedˇ»
|
||||
"});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_toggle_case(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
@@ -6883,15 +6667,6 @@ async fn test_select_all_matches(cx: &mut TestAppContext) {
|
||||
cx.update_editor(|e, window, cx| e.select_all_matches(&SelectAllMatches, window, cx))
|
||||
.unwrap();
|
||||
cx.assert_editor_state("abc\n« ˇ»abc\nabc");
|
||||
|
||||
// Test with a single word and clip_at_line_ends=true (#29823)
|
||||
cx.set_state("aˇbc");
|
||||
cx.update_editor(|e, window, cx| {
|
||||
e.set_clip_at_line_ends(true, cx);
|
||||
e.select_all_matches(&SelectAllMatches, window, cx).unwrap();
|
||||
e.set_clip_at_line_ends(false, cx);
|
||||
});
|
||||
cx.assert_editor_state("«abcˇ»");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -15665,7 +15440,7 @@ async fn test_completions_default_resolve_data_handling(cx: &mut TestAppContext)
|
||||
// Completions that have already been resolved are skipped.
|
||||
assert_eq!(
|
||||
*resolved_items.lock(),
|
||||
items[items.len() - 17..items.len() - 4]
|
||||
items[items.len() - 16..items.len() - 4]
|
||||
.iter()
|
||||
.cloned()
|
||||
.map(|mut item| {
|
||||
|
||||
@@ -230,8 +230,6 @@ impl EditorElement {
|
||||
register_action(editor, window, Editor::reverse_lines);
|
||||
register_action(editor, window, Editor::shuffle_lines);
|
||||
register_action(editor, window, Editor::toggle_case);
|
||||
register_action(editor, window, Editor::convert_indentation_to_spaces);
|
||||
register_action(editor, window, Editor::convert_indentation_to_tabs);
|
||||
register_action(editor, window, Editor::convert_to_upper_case);
|
||||
register_action(editor, window, Editor::convert_to_lower_case);
|
||||
register_action(editor, window, Editor::convert_to_title_case);
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
//! in editor given a given motion (e.g. it handles converting a "move left" command into coordinates in editor). It is exposed mostly for use by vim crate.
|
||||
|
||||
use super::{Bias, DisplayPoint, DisplaySnapshot, SelectionGoal, ToDisplayPoint};
|
||||
use crate::{DisplayRow, EditorStyle, ToOffset, ToPoint, scroll::ScrollAnchor};
|
||||
use crate::{CharKind, DisplayRow, EditorStyle, ToOffset, ToPoint, scroll::ScrollAnchor};
|
||||
use gpui::{Pixels, WindowTextSystem};
|
||||
use language::Point;
|
||||
use multi_buffer::{MultiBufferRow, MultiBufferSnapshot};
|
||||
@@ -721,6 +721,38 @@ pub fn chars_before(
|
||||
})
|
||||
}
|
||||
|
||||
pub(crate) fn is_inside_word(map: &DisplaySnapshot, point: DisplayPoint) -> bool {
|
||||
let raw_point = point.to_point(map);
|
||||
let classifier = map.buffer_snapshot.char_classifier_at(raw_point);
|
||||
let ix = map.clip_point(point, Bias::Left).to_offset(map, Bias::Left);
|
||||
let text = &map.buffer_snapshot;
|
||||
let next_char_kind = text.chars_at(ix).next().map(|c| classifier.kind(c));
|
||||
let prev_char_kind = text
|
||||
.reversed_chars_at(ix)
|
||||
.next()
|
||||
.map(|c| classifier.kind(c));
|
||||
prev_char_kind.zip(next_char_kind) == Some((CharKind::Word, CharKind::Word))
|
||||
}
|
||||
|
||||
pub(crate) fn surrounding_word(
|
||||
map: &DisplaySnapshot,
|
||||
position: DisplayPoint,
|
||||
) -> Range<DisplayPoint> {
|
||||
let position = map
|
||||
.clip_point(position, Bias::Left)
|
||||
.to_offset(map, Bias::Left);
|
||||
let (range, _) = map.buffer_snapshot.surrounding_word(position, false);
|
||||
let start = range
|
||||
.start
|
||||
.to_point(&map.buffer_snapshot)
|
||||
.to_display_point(map);
|
||||
let end = range
|
||||
.end
|
||||
.to_point(&map.buffer_snapshot)
|
||||
.to_display_point(map);
|
||||
start..end
|
||||
}
|
||||
|
||||
/// Returns a list of lines (represented as a [`DisplayPoint`] range) contained
|
||||
/// within a passed range.
|
||||
///
|
||||
@@ -1059,6 +1091,30 @@ mod tests {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_surrounding_word(cx: &mut gpui::App) {
|
||||
init_test(cx);
|
||||
|
||||
fn assert(marked_text: &str, cx: &mut gpui::App) {
|
||||
let (snapshot, display_points) = marked_display_snapshot(marked_text, cx);
|
||||
assert_eq!(
|
||||
surrounding_word(&snapshot, display_points[1]),
|
||||
display_points[0]..display_points[2],
|
||||
"{}",
|
||||
marked_text
|
||||
);
|
||||
}
|
||||
|
||||
assert("ˇˇloremˇ ipsum", cx);
|
||||
assert("ˇloˇremˇ ipsum", cx);
|
||||
assert("ˇloremˇˇ ipsum", cx);
|
||||
assert("loremˇ ˇ ˇipsum", cx);
|
||||
assert("lorem\nˇˇˇ\nipsum", cx);
|
||||
assert("lorem\nˇˇipsumˇ", cx);
|
||||
assert("loremˇ,ˇˇ ipsum", cx);
|
||||
assert("ˇloremˇˇ, ipsum", cx);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_move_up_and_down_with_excerpts(cx: &mut gpui::TestAppContext) {
|
||||
cx.update(|cx| {
|
||||
|
||||
@@ -417,7 +417,7 @@ pub fn init(cx: &mut App) -> Arc<AgentAppState> {
|
||||
debug_adapter_extension::init(extension_host_proxy.clone(), cx);
|
||||
language_extension::init(extension_host_proxy.clone(), languages.clone());
|
||||
language_model::init(client.clone(), cx);
|
||||
language_models::init(user_store.clone(), client.clone(), cx);
|
||||
language_models::init(user_store.clone(), client.clone(), fs.clone(), cx);
|
||||
languages::init(languages.clone(), node_runtime.clone(), cx);
|
||||
prompt_store::init(cx);
|
||||
terminal_view::init(cx);
|
||||
|
||||
@@ -1030,7 +1030,6 @@ pub fn response_events_to_markdown(
|
||||
Ok(LanguageModelCompletionEvent::Thinking { text, .. }) => {
|
||||
thinking_buffer.push_str(text);
|
||||
}
|
||||
Ok(LanguageModelCompletionEvent::RedactedThinking { .. }) => {}
|
||||
Ok(LanguageModelCompletionEvent::Stop(reason)) => {
|
||||
flush_buffers(&mut response, &mut text_buffer, &mut thinking_buffer);
|
||||
response.push_str(&format!("**Stop**: {:?}\n\n", reason));
|
||||
@@ -1127,7 +1126,6 @@ impl ThreadDialog {
|
||||
|
||||
// Skip these
|
||||
Ok(LanguageModelCompletionEvent::UsageUpdate(_))
|
||||
| Ok(LanguageModelCompletionEvent::RedactedThinking { .. })
|
||||
| Ok(LanguageModelCompletionEvent::StatusUpdate { .. })
|
||||
| Ok(LanguageModelCompletionEvent::StartMessage { .. })
|
||||
| Ok(LanguageModelCompletionEvent::Stop(_)) => {}
|
||||
|
||||
@@ -4,13 +4,13 @@ use crate::{
|
||||
GrammarManifestEntry, RELOAD_DEBOUNCE_DURATION, SchemaVersion,
|
||||
};
|
||||
use async_compression::futures::bufread::GzipEncoder;
|
||||
use collections::{BTreeMap, HashSet};
|
||||
use collections::BTreeMap;
|
||||
use extension::ExtensionHostProxy;
|
||||
use fs::{FakeFs, Fs, RealFs};
|
||||
use futures::{AsyncReadExt, StreamExt, io::BufReader};
|
||||
use gpui::{AppContext as _, SemanticVersion, TestAppContext};
|
||||
use http_client::{FakeHttpClient, Response};
|
||||
use language::{BinaryStatus, LanguageMatcher, LanguageRegistry};
|
||||
use language::{BinaryStatus, LanguageMatcher, LanguageRegistry, LanguageServerStatusUpdate};
|
||||
use lsp::LanguageServerName;
|
||||
use node_runtime::NodeRuntime;
|
||||
use parking_lot::Mutex;
|
||||
@@ -720,22 +720,20 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
|
||||
status_updates.next().await.unwrap(),
|
||||
status_updates.next().await.unwrap(),
|
||||
status_updates.next().await.unwrap(),
|
||||
status_updates.next().await.unwrap(),
|
||||
],
|
||||
[
|
||||
(
|
||||
LanguageServerName::new_static("gleam"),
|
||||
BinaryStatus::Starting
|
||||
LanguageServerStatusUpdate::Binary(BinaryStatus::CheckingForUpdate)
|
||||
),
|
||||
(
|
||||
LanguageServerName::new_static("gleam"),
|
||||
BinaryStatus::CheckingForUpdate
|
||||
LanguageServerStatusUpdate::Binary(BinaryStatus::Downloading)
|
||||
),
|
||||
(
|
||||
LanguageServerName::new_static("gleam"),
|
||||
BinaryStatus::Downloading
|
||||
),
|
||||
(LanguageServerName::new_static("gleam"), BinaryStatus::None)
|
||||
LanguageServerStatusUpdate::Binary(BinaryStatus::None)
|
||||
)
|
||||
]
|
||||
);
|
||||
|
||||
@@ -796,7 +794,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
|
||||
|
||||
// Start a new instance of the language server.
|
||||
project.update(cx, |project, cx| {
|
||||
project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx)
|
||||
project.restart_language_servers_for_buffers(vec![buffer.clone()], cx)
|
||||
});
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
@@ -818,7 +816,7 @@ async fn test_extension_store_with_test_extension(cx: &mut TestAppContext) {
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
project.update(cx, |project, cx| {
|
||||
project.restart_language_servers_for_buffers(vec![buffer.clone()], HashSet::default(), cx)
|
||||
project.restart_language_servers_for_buffers(vec![buffer.clone()], cx)
|
||||
});
|
||||
|
||||
// The extension re-fetches the latest version of the language server.
|
||||
|
||||
@@ -945,10 +945,7 @@ impl ExtensionImports for WasmState {
|
||||
.get(key.as_str())
|
||||
})
|
||||
.cloned()
|
||||
.unwrap_or_else(|| {
|
||||
project::project_settings::ContextServerSettings::default_extension(
|
||||
)
|
||||
});
|
||||
.context("Failed to get context server configuration")?;
|
||||
|
||||
match settings {
|
||||
project::project_settings::ContextServerSettings::Custom {
|
||||
|
||||
@@ -939,47 +939,20 @@ impl FileFinderDelegate {
|
||||
matches.into_iter(),
|
||||
extend_old_matches,
|
||||
);
|
||||
let worktree = self.project.read(cx).visible_worktrees(cx).next();
|
||||
let filename = query.raw_query.to_string();
|
||||
let path = Path::new(&filename);
|
||||
|
||||
let filename = &query.raw_query;
|
||||
let mut query_path = Path::new(filename);
|
||||
// add option of creating new file only if path is relative
|
||||
let available_worktree = self
|
||||
.project
|
||||
.read(cx)
|
||||
.visible_worktrees(cx)
|
||||
.filter(|worktree| !worktree.read(cx).is_single_file())
|
||||
.collect::<Vec<_>>();
|
||||
let worktree_count = available_worktree.len();
|
||||
let mut expect_worktree = available_worktree.first().cloned();
|
||||
for worktree in available_worktree {
|
||||
let worktree_root = worktree
|
||||
.read(cx)
|
||||
.abs_path()
|
||||
.file_name()
|
||||
.map_or(String::new(), |f| f.to_string_lossy().to_string());
|
||||
if worktree_count > 1 && query_path.starts_with(&worktree_root) {
|
||||
query_path = query_path
|
||||
.strip_prefix(&worktree_root)
|
||||
.unwrap_or(query_path);
|
||||
expect_worktree = Some(worktree);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(FoundPath { ref project, .. }) = self.currently_opened_path {
|
||||
let worktree_id = project.worktree_id;
|
||||
expect_worktree = self.project.read(cx).worktree_for_id(worktree_id, cx);
|
||||
}
|
||||
|
||||
if let Some(worktree) = expect_worktree {
|
||||
if let Some(worktree) = worktree {
|
||||
let worktree = worktree.read(cx);
|
||||
if query_path.is_relative()
|
||||
&& worktree.entry_for_path(&query_path).is_none()
|
||||
if path.is_relative()
|
||||
&& worktree.entry_for_path(&path).is_none()
|
||||
&& !filename.ends_with("/")
|
||||
{
|
||||
self.matches.matches.push(Match::CreateNew(ProjectPath {
|
||||
worktree_id: worktree.id(),
|
||||
path: Arc::from(query_path),
|
||||
path: Arc::from(path),
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -881,148 +881,6 @@ async fn test_single_file_worktrees(cx: &mut TestAppContext) {
|
||||
picker.update(cx, |f, _| assert_eq!(f.delegate.matches.len(), 0));
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_create_file_for_multiple_worktrees(cx: &mut TestAppContext) {
|
||||
let app_state = init_test(cx);
|
||||
app_state
|
||||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
path!("/roota"),
|
||||
json!({ "the-parent-dira": { "filea": "" } }),
|
||||
)
|
||||
.await;
|
||||
|
||||
app_state
|
||||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
path!("/rootb"),
|
||||
json!({ "the-parent-dirb": { "fileb": "" } }),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(
|
||||
app_state.fs.clone(),
|
||||
[path!("/roota").as_ref(), path!("/rootb").as_ref()],
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
|
||||
let (_worktree_id1, worktree_id2) = cx.read(|cx| {
|
||||
let worktrees = workspace.read(cx).worktrees(cx).collect::<Vec<_>>();
|
||||
(
|
||||
WorktreeId::from_usize(worktrees[0].entity_id().as_u64() as usize),
|
||||
WorktreeId::from_usize(worktrees[1].entity_id().as_u64() as usize),
|
||||
)
|
||||
});
|
||||
|
||||
let b_path = ProjectPath {
|
||||
worktree_id: worktree_id2,
|
||||
path: Arc::from(Path::new(path!("the-parent-dirb/fileb"))),
|
||||
};
|
||||
workspace
|
||||
.update_in(cx, |workspace, window, cx| {
|
||||
workspace.open_path(b_path, None, true, window, cx)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let finder = open_file_picker(&workspace, cx);
|
||||
|
||||
finder
|
||||
.update_in(cx, |f, window, cx| {
|
||||
f.delegate.spawn_search(
|
||||
test_path_position(path!("the-parent-dirb/filec")),
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
})
|
||||
.await;
|
||||
cx.run_until_parked();
|
||||
finder.update_in(cx, |picker, window, cx| {
|
||||
assert_eq!(picker.delegate.matches.len(), 1);
|
||||
picker.delegate.confirm(false, window, cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
cx.read(|cx| {
|
||||
let active_editor = workspace.read(cx).active_item_as::<Editor>(cx).unwrap();
|
||||
let project_path = active_editor.read(cx).project_path(cx);
|
||||
assert_eq!(
|
||||
project_path,
|
||||
Some(ProjectPath {
|
||||
worktree_id: worktree_id2,
|
||||
path: Arc::from(Path::new(path!("the-parent-dirb/filec")))
|
||||
})
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_create_file_no_focused_with_multiple_worktrees(cx: &mut TestAppContext) {
|
||||
let app_state = init_test(cx);
|
||||
app_state
|
||||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
path!("/roota"),
|
||||
json!({ "the-parent-dira": { "filea": "" } }),
|
||||
)
|
||||
.await;
|
||||
|
||||
app_state
|
||||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
path!("/rootb"),
|
||||
json!({ "the-parent-dirb": { "fileb": "" } }),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(
|
||||
app_state.fs.clone(),
|
||||
[path!("/roota").as_ref(), path!("/rootb").as_ref()],
|
||||
cx,
|
||||
)
|
||||
.await;
|
||||
|
||||
let (workspace, cx) = cx.add_window_view(|window, cx| Workspace::test_new(project, window, cx));
|
||||
let (_worktree_id1, worktree_id2) = cx.read(|cx| {
|
||||
let worktrees = workspace.read(cx).worktrees(cx).collect::<Vec<_>>();
|
||||
(
|
||||
WorktreeId::from_usize(worktrees[0].entity_id().as_u64() as usize),
|
||||
WorktreeId::from_usize(worktrees[1].entity_id().as_u64() as usize),
|
||||
)
|
||||
});
|
||||
|
||||
let finder = open_file_picker(&workspace, cx);
|
||||
|
||||
finder
|
||||
.update_in(cx, |f, window, cx| {
|
||||
f.delegate
|
||||
.spawn_search(test_path_position(path!("rootb/filec")), window, cx)
|
||||
})
|
||||
.await;
|
||||
cx.run_until_parked();
|
||||
finder.update_in(cx, |picker, window, cx| {
|
||||
assert_eq!(picker.delegate.matches.len(), 1);
|
||||
picker.delegate.confirm(false, window, cx)
|
||||
});
|
||||
cx.run_until_parked();
|
||||
cx.read(|cx| {
|
||||
let active_editor = workspace.read(cx).active_item_as::<Editor>(cx).unwrap();
|
||||
let project_path = active_editor.read(cx).project_path(cx);
|
||||
assert_eq!(
|
||||
project_path,
|
||||
Some(ProjectPath {
|
||||
worktree_id: worktree_id2,
|
||||
path: Arc::from(Path::new("filec"))
|
||||
})
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_path_distance_ordering(cx: &mut TestAppContext) {
|
||||
let app_state = init_test(cx);
|
||||
|
||||
@@ -413,6 +413,10 @@ impl PickerDelegate for BranchListDelegate {
|
||||
cx.emit(DismissEvent);
|
||||
}
|
||||
|
||||
fn render_header(&self, _: &mut Window, _cx: &mut Context<Picker<Self>>) -> Option<AnyElement> {
|
||||
None
|
||||
}
|
||||
|
||||
fn render_match(
|
||||
&self,
|
||||
ix: usize,
|
||||
|
||||
@@ -1,4 +1,6 @@
|
||||
use gpui::{App, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Task, WeakEntity};
|
||||
use gpui::{
|
||||
AnyElement, App, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Task, WeakEntity,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use project::{Project, git_store::Repository};
|
||||
@@ -205,6 +207,15 @@ impl PickerDelegate for RepositorySelectorDelegate {
|
||||
.ok();
|
||||
}
|
||||
|
||||
fn render_header(
|
||||
&self,
|
||||
_window: &mut Window,
|
||||
_cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<AnyElement> {
|
||||
// TODO: Implement header rendering if needed
|
||||
None
|
||||
}
|
||||
|
||||
fn render_match(
|
||||
&self,
|
||||
ix: usize,
|
||||
|
||||
@@ -298,3 +298,7 @@ path = "examples/uniform_list.rs"
|
||||
[[example]]
|
||||
name = "window_shadow"
|
||||
path = "examples/window_shadow.rs"
|
||||
|
||||
[[example]]
|
||||
name = "uniform_table"
|
||||
path = "examples/uniform_table.rs"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use gpui::{
|
||||
App, Application, Bounds, Context, Window, WindowBounds, WindowOptions, div, prelude::*, px,
|
||||
rgb, size, uniform_list,
|
||||
App, Application, Bounds, Context, ListSizingBehavior, Window, WindowBounds, WindowOptions,
|
||||
div, prelude::*, px, rgb, size, uniform_list,
|
||||
};
|
||||
|
||||
struct UniformListExample {}
|
||||
@@ -12,6 +12,7 @@ impl Render for UniformListExample {
|
||||
"entries",
|
||||
50,
|
||||
cx.processor(|_this, range, _window, _cx| {
|
||||
dbg!(&range);
|
||||
let mut items = Vec::new();
|
||||
for ix in range {
|
||||
let item = ix + 1;
|
||||
@@ -30,6 +31,7 @@ impl Render for UniformListExample {
|
||||
items
|
||||
}),
|
||||
)
|
||||
.with_sizing_behavior(ListSizingBehavior::Infer)
|
||||
.h_full(),
|
||||
)
|
||||
}
|
||||
|
||||
54
crates/gpui/examples/uniform_table.rs
Normal file
54
crates/gpui/examples/uniform_table.rs
Normal file
@@ -0,0 +1,54 @@
|
||||
use gpui::{
|
||||
App, Application, Bounds, Context, Window, WindowBounds, WindowOptions, div, prelude::*, px,
|
||||
rgb, size,
|
||||
};
|
||||
|
||||
struct UniformTableExample {}
|
||||
|
||||
impl Render for UniformTableExample {
|
||||
fn render(&mut self, _window: &mut Window, _: &mut Context<Self>) -> impl IntoElement {
|
||||
const COLS: usize = 24;
|
||||
const ROWS: usize = 100;
|
||||
let mut headers = [0; COLS];
|
||||
|
||||
for column in 0..COLS {
|
||||
headers[column] = column;
|
||||
}
|
||||
|
||||
div().bg(rgb(0xffffff)).size_full().child(
|
||||
gpui::uniform_table("simple table", ROWS, move |range, _, _| {
|
||||
dbg!(&range);
|
||||
range
|
||||
.map(|row_index| {
|
||||
let mut row = [0; COLS];
|
||||
for col in 0..COLS {
|
||||
row[col] = (row_index + 1) * (col + 1);
|
||||
}
|
||||
row.map(|cell| ToString::to_string(&cell))
|
||||
.map(|cell| div().flex().flex_row().child(cell))
|
||||
.map(IntoElement::into_any_element)
|
||||
})
|
||||
.collect()
|
||||
})
|
||||
.with_width_from_item(Some(ROWS - 1))
|
||||
// todo! without this, the AvailableSpace passed in window.request_measured_layout is a Definite(2600px) on Anthony's machine
|
||||
// this doesn't make sense, and results in the full range of elements getting rendered. This also occurs on uniform_list
|
||||
// This is resulting from windows.bounds() being called
|
||||
.h_full(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
Application::new().run(|cx: &mut App| {
|
||||
let bounds = Bounds::centered(None, size(px(300.0), px(300.0)), cx);
|
||||
cx.open_window(
|
||||
WindowOptions {
|
||||
window_bounds: Some(WindowBounds::Windowed(bounds)),
|
||||
..Default::default()
|
||||
},
|
||||
|_, cx| cx.new(|_| UniformTableExample {}),
|
||||
)
|
||||
.unwrap();
|
||||
});
|
||||
}
|
||||
@@ -909,7 +909,7 @@ impl App {
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
{
|
||||
self.update_window(window, |_, window, cx| window.draw(cx).clear())
|
||||
self.update_window(window, |_, window, cx| window.draw(cx))
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
@@ -1334,6 +1334,11 @@ impl App {
|
||||
self.pending_effects.push_back(Effect::RefreshWindows);
|
||||
}
|
||||
|
||||
/// Get all key bindings in the app.
|
||||
pub fn key_bindings(&self) -> Rc<RefCell<Keymap>> {
|
||||
self.keymap.clone()
|
||||
}
|
||||
|
||||
/// Register a global listener for actions invoked via the keyboard.
|
||||
pub fn on_action<A: Action>(&mut self, listener: impl Fn(&A, &mut Self) + 'static) {
|
||||
self.global_action_listeners
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use std::{
|
||||
alloc::{self, handle_alloc_error},
|
||||
alloc,
|
||||
cell::Cell,
|
||||
ops::{Deref, DerefMut},
|
||||
ptr,
|
||||
@@ -20,98 +20,43 @@ impl Drop for ArenaElement {
|
||||
}
|
||||
}
|
||||
|
||||
struct Chunk {
|
||||
pub struct Arena {
|
||||
start: *mut u8,
|
||||
end: *mut u8,
|
||||
offset: *mut u8,
|
||||
elements: Vec<ArenaElement>,
|
||||
valid: Rc<Cell<bool>>,
|
||||
}
|
||||
|
||||
impl Drop for Chunk {
|
||||
fn drop(&mut self) {
|
||||
impl Arena {
|
||||
pub fn new(size_in_bytes: usize) -> Self {
|
||||
unsafe {
|
||||
let chunk_size = self.end.offset_from_unsigned(self.start);
|
||||
// this never fails as it succeeded during allocation
|
||||
let layout = alloc::Layout::from_size_align(chunk_size, 1).unwrap();
|
||||
alloc::dealloc(self.start, layout);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Chunk {
|
||||
fn new(chunk_size: usize) -> Self {
|
||||
unsafe {
|
||||
// this only fails if chunk_size is unreasonably huge
|
||||
let layout = alloc::Layout::from_size_align(chunk_size, 1).unwrap();
|
||||
let layout = alloc::Layout::from_size_align(size_in_bytes, 1).unwrap();
|
||||
let start = alloc::alloc(layout);
|
||||
if start.is_null() {
|
||||
handle_alloc_error(layout);
|
||||
}
|
||||
let end = start.add(chunk_size);
|
||||
let end = start.add(size_in_bytes);
|
||||
Self {
|
||||
start,
|
||||
end,
|
||||
offset: start,
|
||||
elements: Vec::new(),
|
||||
valid: Rc::new(Cell::new(true)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn allocate(&mut self, layout: alloc::Layout) -> Option<*mut u8> {
|
||||
unsafe {
|
||||
let aligned = self.offset.add(self.offset.align_offset(layout.align()));
|
||||
let next = aligned.add(layout.size());
|
||||
|
||||
if next <= self.end {
|
||||
self.offset = next;
|
||||
Some(aligned)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn reset(&mut self) {
|
||||
self.offset = self.start;
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Arena {
|
||||
chunks: Vec<Chunk>,
|
||||
elements: Vec<ArenaElement>,
|
||||
valid: Rc<Cell<bool>>,
|
||||
current_chunk_index: usize,
|
||||
chunk_size: usize,
|
||||
}
|
||||
|
||||
impl Drop for Arena {
|
||||
fn drop(&mut self) {
|
||||
self.clear();
|
||||
}
|
||||
}
|
||||
|
||||
impl Arena {
|
||||
pub fn new(chunk_size: usize) -> Self {
|
||||
assert!(chunk_size > 0);
|
||||
Self {
|
||||
chunks: vec![Chunk::new(chunk_size)],
|
||||
elements: Vec::new(),
|
||||
valid: Rc::new(Cell::new(true)),
|
||||
current_chunk_index: 0,
|
||||
chunk_size,
|
||||
}
|
||||
pub fn len(&self) -> usize {
|
||||
self.offset as usize - self.start as usize
|
||||
}
|
||||
|
||||
pub fn capacity(&self) -> usize {
|
||||
self.chunks.len() * self.chunk_size
|
||||
self.end as usize - self.start as usize
|
||||
}
|
||||
|
||||
pub fn clear(&mut self) {
|
||||
self.valid.set(false);
|
||||
self.valid = Rc::new(Cell::new(true));
|
||||
self.elements.clear();
|
||||
for chunk_index in 0..=self.current_chunk_index {
|
||||
self.chunks[chunk_index].reset();
|
||||
}
|
||||
self.current_chunk_index = 0;
|
||||
self.offset = self.start;
|
||||
}
|
||||
|
||||
#[inline(always)]
|
||||
@@ -134,45 +79,33 @@ impl Arena {
|
||||
|
||||
unsafe {
|
||||
let layout = alloc::Layout::new::<T>();
|
||||
let mut current_chunk = &mut self.chunks[self.current_chunk_index];
|
||||
let ptr = if let Some(ptr) = current_chunk.allocate(layout) {
|
||||
ptr
|
||||
} else {
|
||||
self.current_chunk_index += 1;
|
||||
if self.current_chunk_index >= self.chunks.len() {
|
||||
self.chunks.push(Chunk::new(self.chunk_size));
|
||||
assert_eq!(self.current_chunk_index, self.chunks.len() - 1);
|
||||
log::info!(
|
||||
"increased element arena capacity to {}kb",
|
||||
self.capacity() / 1024,
|
||||
);
|
||||
}
|
||||
current_chunk = &mut self.chunks[self.current_chunk_index];
|
||||
if let Some(ptr) = current_chunk.allocate(layout) {
|
||||
ptr
|
||||
} else {
|
||||
panic!(
|
||||
"Arena chunk_size of {} is too small to allocate {} bytes",
|
||||
self.chunk_size,
|
||||
layout.size()
|
||||
);
|
||||
}
|
||||
let offset = self.offset.add(self.offset.align_offset(layout.align()));
|
||||
let next_offset = offset.add(layout.size());
|
||||
assert!(next_offset <= self.end, "not enough space in Arena");
|
||||
|
||||
let result = ArenaBox {
|
||||
ptr: offset.cast(),
|
||||
valid: self.valid.clone(),
|
||||
};
|
||||
|
||||
inner_writer(ptr.cast(), f);
|
||||
inner_writer(result.ptr, f);
|
||||
self.elements.push(ArenaElement {
|
||||
value: ptr,
|
||||
value: offset,
|
||||
drop: drop::<T>,
|
||||
});
|
||||
self.offset = next_offset;
|
||||
|
||||
ArenaBox {
|
||||
ptr: ptr.cast(),
|
||||
valid: self.valid.clone(),
|
||||
}
|
||||
result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for Arena {
|
||||
fn drop(&mut self) {
|
||||
self.clear();
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ArenaBox<T: ?Sized> {
|
||||
ptr: *mut T,
|
||||
valid: Rc<Cell<bool>>,
|
||||
@@ -282,17 +215,13 @@ mod tests {
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_arena_grow() {
|
||||
let mut arena = Arena::new(8);
|
||||
#[should_panic(expected = "not enough space in Arena")]
|
||||
fn test_arena_overflow() {
|
||||
let mut arena = Arena::new(16);
|
||||
arena.alloc(|| 1u64);
|
||||
arena.alloc(|| 2u64);
|
||||
|
||||
assert_eq!(arena.capacity(), 16);
|
||||
|
||||
arena.alloc(|| 3u32);
|
||||
arena.alloc(|| 4u32);
|
||||
|
||||
assert_eq!(arena.capacity(), 24);
|
||||
// This should panic.
|
||||
arena.alloc(|| 3u64);
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -613,10 +613,10 @@ pub trait InteractiveElement: Sized {
|
||||
/// Track the focus state of the given focus handle on this element.
|
||||
/// If the focus handle is focused by the application, this element will
|
||||
/// apply its focused styles.
|
||||
fn track_focus(mut self, focus_handle: &FocusHandle) -> FocusableWrapper<Self> {
|
||||
fn track_focus(mut self, focus_handle: &FocusHandle) -> Self {
|
||||
self.interactivity().focusable = true;
|
||||
self.interactivity().tracked_focus_handle = Some(focus_handle.clone());
|
||||
FocusableWrapper { element: self }
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the keymap context for this element. This will be used to determine
|
||||
@@ -980,15 +980,35 @@ pub trait InteractiveElement: Sized {
|
||||
self.interactivity().block_mouse_except_scroll();
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the given styles to be applied when this element, specifically, is focused.
|
||||
/// Requires that the element is focusable. Elements can be made focusable using [`InteractiveElement::track_focus`].
|
||||
fn focus(mut self, f: impl FnOnce(StyleRefinement) -> StyleRefinement) -> Self
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.interactivity().focus_style = Some(Box::new(f(StyleRefinement::default())));
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the given styles to be applied when this element is inside another element that is focused.
|
||||
/// Requires that the element is focusable. Elements can be made focusable using [`InteractiveElement::track_focus`].
|
||||
fn in_focus(mut self, f: impl FnOnce(StyleRefinement) -> StyleRefinement) -> Self
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.interactivity().in_focus_style = Some(Box::new(f(StyleRefinement::default())));
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for elements that want to use the standard GPUI interactivity features
|
||||
/// that require state.
|
||||
pub trait StatefulInteractiveElement: InteractiveElement {
|
||||
/// Set this element to focusable.
|
||||
fn focusable(mut self) -> FocusableWrapper<Self> {
|
||||
fn focusable(mut self) -> Self {
|
||||
self.interactivity().focusable = true;
|
||||
FocusableWrapper { element: self }
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the overflow x and y to scroll.
|
||||
@@ -1118,27 +1138,6 @@ pub trait StatefulInteractiveElement: InteractiveElement {
|
||||
}
|
||||
}
|
||||
|
||||
/// A trait for providing focus related APIs to interactive elements
|
||||
pub trait FocusableElement: InteractiveElement {
|
||||
/// Set the given styles to be applied when this element, specifically, is focused.
|
||||
fn focus(mut self, f: impl FnOnce(StyleRefinement) -> StyleRefinement) -> Self
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.interactivity().focus_style = Some(Box::new(f(StyleRefinement::default())));
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the given styles to be applied when this element is inside another element that is focused.
|
||||
fn in_focus(mut self, f: impl FnOnce(StyleRefinement) -> StyleRefinement) -> Self
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
self.interactivity().in_focus_style = Some(Box::new(f(StyleRefinement::default())));
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) type MouseDownListener =
|
||||
Box<dyn Fn(&MouseDownEvent, DispatchPhase, &Hitbox, &mut Window, &mut App) + 'static>;
|
||||
pub(crate) type MouseUpListener =
|
||||
@@ -2777,126 +2776,6 @@ impl GroupHitboxes {
|
||||
}
|
||||
}
|
||||
|
||||
/// A wrapper around an element that can be focused.
|
||||
pub struct FocusableWrapper<E> {
|
||||
/// The element that is focusable
|
||||
pub element: E,
|
||||
}
|
||||
|
||||
impl<E: InteractiveElement> FocusableElement for FocusableWrapper<E> {}
|
||||
|
||||
impl<E> InteractiveElement for FocusableWrapper<E>
|
||||
where
|
||||
E: InteractiveElement,
|
||||
{
|
||||
fn interactivity(&mut self) -> &mut Interactivity {
|
||||
self.element.interactivity()
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: StatefulInteractiveElement> StatefulInteractiveElement for FocusableWrapper<E> {}
|
||||
|
||||
impl<E> Styled for FocusableWrapper<E>
|
||||
where
|
||||
E: Styled,
|
||||
{
|
||||
fn style(&mut self) -> &mut StyleRefinement {
|
||||
self.element.style()
|
||||
}
|
||||
}
|
||||
|
||||
impl FocusableWrapper<Div> {
|
||||
/// Add a listener to be called when the children of this `Div` are prepainted.
|
||||
/// This allows you to store the [`Bounds`] of the children for later use.
|
||||
pub fn on_children_prepainted(
|
||||
mut self,
|
||||
listener: impl Fn(Vec<Bounds<Pixels>>, &mut Window, &mut App) + 'static,
|
||||
) -> Self {
|
||||
self.element = self.element.on_children_prepainted(listener);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<E> Element for FocusableWrapper<E>
|
||||
where
|
||||
E: Element,
|
||||
{
|
||||
type RequestLayoutState = E::RequestLayoutState;
|
||||
type PrepaintState = E::PrepaintState;
|
||||
|
||||
fn id(&self) -> Option<ElementId> {
|
||||
self.element.id()
|
||||
}
|
||||
|
||||
fn source_location(&self) -> Option<&'static core::panic::Location<'static>> {
|
||||
self.element.source_location()
|
||||
}
|
||||
|
||||
fn request_layout(
|
||||
&mut self,
|
||||
id: Option<&GlobalElementId>,
|
||||
inspector_id: Option<&InspectorElementId>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> (LayoutId, Self::RequestLayoutState) {
|
||||
self.element.request_layout(id, inspector_id, window, cx)
|
||||
}
|
||||
|
||||
fn prepaint(
|
||||
&mut self,
|
||||
id: Option<&GlobalElementId>,
|
||||
inspector_id: Option<&InspectorElementId>,
|
||||
bounds: Bounds<Pixels>,
|
||||
state: &mut Self::RequestLayoutState,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> E::PrepaintState {
|
||||
self.element
|
||||
.prepaint(id, inspector_id, bounds, state, window, cx)
|
||||
}
|
||||
|
||||
fn paint(
|
||||
&mut self,
|
||||
id: Option<&GlobalElementId>,
|
||||
inspector_id: Option<&InspectorElementId>,
|
||||
bounds: Bounds<Pixels>,
|
||||
request_layout: &mut Self::RequestLayoutState,
|
||||
prepaint: &mut Self::PrepaintState,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) {
|
||||
self.element.paint(
|
||||
id,
|
||||
inspector_id,
|
||||
bounds,
|
||||
request_layout,
|
||||
prepaint,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl<E> IntoElement for FocusableWrapper<E>
|
||||
where
|
||||
E: IntoElement,
|
||||
{
|
||||
type Element = E::Element;
|
||||
|
||||
fn into_element(self) -> Self::Element {
|
||||
self.element.into_element()
|
||||
}
|
||||
}
|
||||
|
||||
impl<E> ParentElement for FocusableWrapper<E>
|
||||
where
|
||||
E: ParentElement,
|
||||
{
|
||||
fn extend(&mut self, elements: impl IntoIterator<Item = AnyElement>) {
|
||||
self.element.extend(elements)
|
||||
}
|
||||
}
|
||||
|
||||
/// A wrapper around an element that can store state, produced after assigning an ElementId.
|
||||
pub struct Stateful<E> {
|
||||
pub(crate) element: E,
|
||||
@@ -2927,8 +2806,6 @@ where
|
||||
}
|
||||
}
|
||||
|
||||
impl<E: FocusableElement> FocusableElement for Stateful<E> {}
|
||||
|
||||
impl<E> Element for Stateful<E>
|
||||
where
|
||||
E: Element,
|
||||
|
||||
@@ -25,7 +25,7 @@ use std::{
|
||||
use thiserror::Error;
|
||||
use util::ResultExt;
|
||||
|
||||
use super::{FocusableElement, Stateful, StatefulInteractiveElement};
|
||||
use super::{Stateful, StatefulInteractiveElement};
|
||||
|
||||
/// The delay before showing the loading state.
|
||||
pub const LOADING_DELAY: Duration = Duration::from_millis(200);
|
||||
@@ -509,8 +509,6 @@ impl IntoElement for Img {
|
||||
}
|
||||
}
|
||||
|
||||
impl FocusableElement for Img {}
|
||||
|
||||
impl StatefulInteractiveElement for Img {}
|
||||
|
||||
impl ImageSource {
|
||||
|
||||
@@ -10,6 +10,7 @@ mod surface;
|
||||
mod svg;
|
||||
mod text;
|
||||
mod uniform_list;
|
||||
mod uniform_table;
|
||||
|
||||
pub use anchored::*;
|
||||
pub use animation::*;
|
||||
@@ -23,3 +24,4 @@ pub use surface::*;
|
||||
pub use svg::*;
|
||||
pub use text::*;
|
||||
pub use uniform_list::*;
|
||||
pub use uniform_table::*;
|
||||
|
||||
516
crates/gpui/src/elements/uniform_table.rs
Normal file
516
crates/gpui/src/elements/uniform_table.rs
Normal file
@@ -0,0 +1,516 @@
|
||||
use std::{cell::RefCell, cmp, ops::Range, rc::Rc};
|
||||
|
||||
use smallvec::SmallVec;
|
||||
|
||||
use crate::{
|
||||
AnyElement, App, AvailableSpace, Bounds, ContentMask, Div, Element, ElementId, GlobalElementId,
|
||||
Hitbox, InspectorElementId, Interactivity, IntoElement, IsZero as _, LayoutId, Length,
|
||||
Overflow, Pixels, ScrollHandle, Size, StyleRefinement, Styled, Window, point, px, size,
|
||||
};
|
||||
|
||||
/// todo!
|
||||
pub struct UniformTable<const COLS: usize> {
|
||||
id: ElementId,
|
||||
row_count: usize,
|
||||
render_rows:
|
||||
Rc<dyn Fn(Range<usize>, &mut Window, &mut App) -> Vec<[AnyElement; COLS]> + 'static>,
|
||||
interactivity: Interactivity,
|
||||
source_location: &'static std::panic::Location<'static>,
|
||||
item_to_measure_index: usize,
|
||||
scroll_handle: Option<UniformTableScrollHandle>, // todo! we either want to make our own or make a shared scroll handle between list and table
|
||||
sizings: [Length; COLS],
|
||||
}
|
||||
|
||||
/// TODO
|
||||
#[track_caller]
|
||||
pub fn uniform_table<const COLS: usize, F>(
|
||||
id: impl Into<ElementId>,
|
||||
row_count: usize,
|
||||
render_rows: F,
|
||||
) -> UniformTable<COLS>
|
||||
where
|
||||
F: 'static + Fn(Range<usize>, &mut Window, &mut App) -> Vec<[AnyElement; COLS]>,
|
||||
{
|
||||
let mut base_style = StyleRefinement::default();
|
||||
base_style.overflow.y = Some(Overflow::Scroll);
|
||||
let id = id.into();
|
||||
|
||||
let mut interactivity = Interactivity::new();
|
||||
interactivity.element_id = Some(id.clone());
|
||||
|
||||
UniformTable {
|
||||
id: id.clone(),
|
||||
row_count,
|
||||
render_rows: Rc::new(render_rows),
|
||||
interactivity: Interactivity {
|
||||
element_id: Some(id),
|
||||
base_style: Box::new(base_style),
|
||||
..Interactivity::new()
|
||||
},
|
||||
source_location: core::panic::Location::caller(),
|
||||
item_to_measure_index: 0,
|
||||
scroll_handle: None,
|
||||
sizings: [Length::Auto; COLS],
|
||||
}
|
||||
}
|
||||
|
||||
impl<const COLS: usize> UniformTable<COLS> {
|
||||
/// todo!
|
||||
pub fn with_width_from_item(mut self, item_index: Option<usize>) -> Self {
|
||||
self.item_to_measure_index = item_index.unwrap_or(0);
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<const COLS: usize> IntoElement for UniformTable<COLS> {
|
||||
type Element = Self;
|
||||
|
||||
fn into_element(self) -> Self::Element {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<const COLS: usize> Styled for UniformTable<COLS> {
|
||||
fn style(&mut self) -> &mut StyleRefinement {
|
||||
&mut self.interactivity.base_style
|
||||
}
|
||||
}
|
||||
|
||||
impl<const COLS: usize> Element for UniformTable<COLS> {
|
||||
type RequestLayoutState = ();
|
||||
|
||||
type PrepaintState = (Option<Hitbox>, SmallVec<[AnyElement; 32]>);
|
||||
|
||||
fn id(&self) -> Option<ElementId> {
|
||||
Some(self.id.clone())
|
||||
}
|
||||
|
||||
fn source_location(&self) -> Option<&'static std::panic::Location<'static>> {
|
||||
Some(self.source_location)
|
||||
}
|
||||
|
||||
fn request_layout(
|
||||
&mut self,
|
||||
global_id: Option<&GlobalElementId>,
|
||||
inspector_id: Option<&InspectorElementId>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> (LayoutId, Self::RequestLayoutState) {
|
||||
let measure_cx = MeasureContext::new(self);
|
||||
let item_size = measure_cx.measure_item(AvailableSpace::MinContent, None, window, cx);
|
||||
let layout_id =
|
||||
self.interactivity.request_layout(
|
||||
global_id,
|
||||
inspector_id,
|
||||
window,
|
||||
cx,
|
||||
|style, window, _cx| {
|
||||
window.with_text_style(style.text_style().cloned(), |window| {
|
||||
window.request_measured_layout(
|
||||
style,
|
||||
move |known_dimensions, available_space, window, cx| {
|
||||
let desired_height = item_size.height * measure_cx.row_count;
|
||||
let width = known_dimensions.width.unwrap_or(match available_space
|
||||
.width
|
||||
{
|
||||
AvailableSpace::Definite(x) => x,
|
||||
AvailableSpace::MinContent | AvailableSpace::MaxContent => {
|
||||
item_size.width
|
||||
}
|
||||
});
|
||||
let height =
|
||||
known_dimensions.height.unwrap_or(
|
||||
match available_space.height {
|
||||
AvailableSpace::Definite(height) => desired_height
|
||||
.min(dbg!(window.bounds()).size.height),
|
||||
AvailableSpace::MinContent
|
||||
| AvailableSpace::MaxContent => desired_height,
|
||||
},
|
||||
);
|
||||
size(width, height)
|
||||
},
|
||||
)
|
||||
})
|
||||
},
|
||||
);
|
||||
|
||||
(layout_id, ())
|
||||
}
|
||||
|
||||
fn prepaint(
|
||||
&mut self,
|
||||
global_id: Option<&GlobalElementId>,
|
||||
inspector_id: Option<&InspectorElementId>,
|
||||
bounds: Bounds<Pixels>,
|
||||
_request_layout: &mut Self::RequestLayoutState,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Self::PrepaintState {
|
||||
let style = self
|
||||
.interactivity
|
||||
.compute_style(global_id, None, window, cx);
|
||||
let border = style.border_widths.to_pixels(window.rem_size());
|
||||
let padding = style
|
||||
.padding
|
||||
.to_pixels(bounds.size.into(), window.rem_size());
|
||||
|
||||
let padded_bounds = Bounds::from_corners(
|
||||
bounds.origin + point(border.left + padding.left, border.top + padding.top),
|
||||
bounds.bottom_right()
|
||||
- point(border.right + padding.right, border.bottom + padding.bottom),
|
||||
);
|
||||
|
||||
let can_scroll_horizontally = true;
|
||||
|
||||
let mut column_widths = [Pixels::default(); COLS];
|
||||
let longest_row_size = MeasureContext::new(self).measure_item(
|
||||
AvailableSpace::Definite(bounds.size.width),
|
||||
Some(&mut column_widths),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
|
||||
// We need to run this for each column:
|
||||
let content_width = padded_bounds.size.width.max(longest_row_size.width);
|
||||
|
||||
let content_size = Size {
|
||||
width: content_width,
|
||||
height: longest_row_size.height * self.row_count + padding.top + padding.bottom,
|
||||
};
|
||||
|
||||
let shared_scroll_offset = self.interactivity.scroll_offset.clone().unwrap();
|
||||
let row_height = longest_row_size.height;
|
||||
let shared_scroll_to_item = self.scroll_handle.as_mut().and_then(|handle| {
|
||||
let mut handle = handle.0.borrow_mut();
|
||||
handle.last_row_size = Some(RowSize {
|
||||
row: padded_bounds.size,
|
||||
contents: content_size,
|
||||
});
|
||||
handle.deferred_scroll_to_item.take()
|
||||
});
|
||||
|
||||
let mut rendered_rows = SmallVec::default();
|
||||
|
||||
let hitbox = self.interactivity.prepaint(
|
||||
global_id,
|
||||
inspector_id,
|
||||
bounds,
|
||||
content_size,
|
||||
window,
|
||||
cx,
|
||||
|style, mut scroll_offset, hitbox, window, cx| {
|
||||
dbg!(bounds, window.bounds());
|
||||
let border = style.border_widths.to_pixels(window.rem_size());
|
||||
let padding = style
|
||||
.padding
|
||||
.to_pixels(bounds.size.into(), window.rem_size());
|
||||
|
||||
let padded_bounds = Bounds::from_corners(
|
||||
bounds.origin + point(border.left + padding.left, border.top),
|
||||
bounds.bottom_right() - point(border.right + padding.right, border.bottom),
|
||||
);
|
||||
|
||||
let y_flipped = if let Some(scroll_handle) = self.scroll_handle.as_mut() {
|
||||
let mut scroll_state = scroll_handle.0.borrow_mut();
|
||||
scroll_state.base_handle.set_bounds(bounds);
|
||||
scroll_state.y_flipped
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
if self.row_count > 0 {
|
||||
let content_height = row_height * self.row_count + padding.top + padding.bottom;
|
||||
let is_scrolled_vertically = !scroll_offset.y.is_zero();
|
||||
let min_vertical_scroll_offset = padded_bounds.size.height - content_height;
|
||||
if is_scrolled_vertically && scroll_offset.y < min_vertical_scroll_offset {
|
||||
shared_scroll_offset.borrow_mut().y = min_vertical_scroll_offset;
|
||||
scroll_offset.y = min_vertical_scroll_offset;
|
||||
}
|
||||
|
||||
let content_width = content_size.width + padding.left + padding.right;
|
||||
let is_scrolled_horizontally =
|
||||
can_scroll_horizontally && !scroll_offset.x.is_zero();
|
||||
if is_scrolled_horizontally && content_width <= padded_bounds.size.width {
|
||||
shared_scroll_offset.borrow_mut().x = Pixels::ZERO;
|
||||
scroll_offset.x = Pixels::ZERO;
|
||||
}
|
||||
|
||||
if let Some((mut ix, scroll_strategy)) = shared_scroll_to_item {
|
||||
if y_flipped {
|
||||
ix = self.row_count.saturating_sub(ix + 1);
|
||||
}
|
||||
let list_height = dbg!(padded_bounds.size.height);
|
||||
let mut updated_scroll_offset = shared_scroll_offset.borrow_mut();
|
||||
let item_top = row_height * ix + padding.top;
|
||||
let item_bottom = item_top + row_height;
|
||||
let scroll_top = -updated_scroll_offset.y;
|
||||
let mut scrolled_to_top = false;
|
||||
if item_top < scroll_top + padding.top {
|
||||
scrolled_to_top = true;
|
||||
updated_scroll_offset.y = -(item_top) + padding.top;
|
||||
} else if item_bottom > scroll_top + list_height - padding.bottom {
|
||||
scrolled_to_top = true;
|
||||
updated_scroll_offset.y = -(item_bottom - list_height) - padding.bottom;
|
||||
}
|
||||
|
||||
match scroll_strategy {
|
||||
ScrollStrategy::Top => {}
|
||||
ScrollStrategy::Center => {
|
||||
if scrolled_to_top {
|
||||
let item_center = item_top + row_height / 2.0;
|
||||
let target_scroll_top = item_center - list_height / 2.0;
|
||||
|
||||
if item_top < scroll_top
|
||||
|| item_bottom > scroll_top + list_height
|
||||
{
|
||||
updated_scroll_offset.y = -target_scroll_top
|
||||
.max(Pixels::ZERO)
|
||||
.min(content_height - list_height)
|
||||
.max(Pixels::ZERO);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
scroll_offset = *updated_scroll_offset
|
||||
}
|
||||
|
||||
let first_visible_element_ix =
|
||||
(-(scroll_offset.y + padding.top) / row_height).floor() as usize;
|
||||
let last_visible_element_ix = ((-scroll_offset.y + padded_bounds.size.height)
|
||||
/ row_height)
|
||||
.ceil() as usize;
|
||||
let visible_range =
|
||||
first_visible_element_ix..cmp::min(last_visible_element_ix, self.row_count);
|
||||
let rows = if y_flipped {
|
||||
let flipped_range = self.row_count.saturating_sub(visible_range.end)
|
||||
..self.row_count.saturating_sub(visible_range.start);
|
||||
let mut items = (self.render_rows)(flipped_range, window, cx);
|
||||
items.reverse();
|
||||
items
|
||||
} else {
|
||||
(self.render_rows)(visible_range.clone(), window, cx)
|
||||
};
|
||||
|
||||
let content_mask = ContentMask { bounds };
|
||||
window.with_content_mask(Some(content_mask), |window| {
|
||||
let available_width = if can_scroll_horizontally {
|
||||
padded_bounds.size.width + scroll_offset.x.abs()
|
||||
} else {
|
||||
padded_bounds.size.width
|
||||
};
|
||||
let available_space = size(
|
||||
AvailableSpace::Definite(available_width),
|
||||
AvailableSpace::Definite(row_height),
|
||||
);
|
||||
for (mut row, ix) in rows.into_iter().zip(visible_range.clone()) {
|
||||
let row_origin = padded_bounds.origin
|
||||
+ point(
|
||||
if can_scroll_horizontally {
|
||||
scroll_offset.x + padding.left
|
||||
} else {
|
||||
scroll_offset.x
|
||||
},
|
||||
row_height * ix + scroll_offset.y + padding.top,
|
||||
);
|
||||
|
||||
let mut item = render_row(row, column_widths, row_height).into_any();
|
||||
|
||||
item.layout_as_root(available_space, window, cx);
|
||||
item.prepaint_at(row_origin, window, cx);
|
||||
rendered_rows.push(item);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
hitbox
|
||||
},
|
||||
);
|
||||
return (hitbox, rendered_rows);
|
||||
}
|
||||
|
||||
fn paint(
|
||||
&mut self,
|
||||
global_id: Option<&GlobalElementId>,
|
||||
inspector_id: Option<&InspectorElementId>,
|
||||
bounds: Bounds<Pixels>,
|
||||
_: &mut Self::RequestLayoutState,
|
||||
(hitbox, rendered_rows): &mut Self::PrepaintState,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) {
|
||||
self.interactivity.paint(
|
||||
global_id,
|
||||
inspector_id,
|
||||
bounds,
|
||||
hitbox.as_ref(),
|
||||
window,
|
||||
cx,
|
||||
|_, window, cx| {
|
||||
for item in rendered_rows {
|
||||
item.paint(window, cx);
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
const DIVIDER_PADDING_PX: Pixels = px(2.0);
|
||||
|
||||
fn render_row<const COLS: usize>(
|
||||
row: [AnyElement; COLS],
|
||||
column_widths: [Pixels; COLS],
|
||||
row_height: Pixels,
|
||||
) -> Div {
|
||||
use crate::ParentElement;
|
||||
let mut div = crate::div().flex().flex_row().gap(DIVIDER_PADDING_PX);
|
||||
|
||||
for (ix, cell) in row.into_iter().enumerate() {
|
||||
div = div.child(
|
||||
crate::div()
|
||||
.w(column_widths[ix])
|
||||
.h(row_height)
|
||||
.overflow_hidden()
|
||||
.child(cell),
|
||||
)
|
||||
}
|
||||
|
||||
div
|
||||
}
|
||||
|
||||
struct MeasureContext<const COLS: usize> {
|
||||
row_count: usize,
|
||||
item_to_measure_index: usize,
|
||||
render_rows:
|
||||
Rc<dyn Fn(Range<usize>, &mut Window, &mut App) -> Vec<[AnyElement; COLS]> + 'static>,
|
||||
sizings: [Length; COLS],
|
||||
}
|
||||
|
||||
impl<const COLS: usize> MeasureContext<COLS> {
|
||||
fn new(table: &UniformTable<COLS>) -> Self {
|
||||
Self {
|
||||
row_count: table.row_count,
|
||||
item_to_measure_index: table.item_to_measure_index,
|
||||
render_rows: table.render_rows.clone(),
|
||||
sizings: table.sizings,
|
||||
}
|
||||
}
|
||||
|
||||
fn measure_item(
|
||||
&self,
|
||||
table_width: AvailableSpace,
|
||||
column_sizes: Option<&mut [Pixels; COLS]>,
|
||||
window: &mut Window,
|
||||
cx: &mut App,
|
||||
) -> Size<Pixels> {
|
||||
if self.row_count == 0 {
|
||||
return Size::default();
|
||||
}
|
||||
|
||||
let item_ix = cmp::min(self.item_to_measure_index, self.row_count - 1);
|
||||
let mut items = (self.render_rows)(item_ix..item_ix + 1, window, cx);
|
||||
let Some(mut item_to_measure) = items.pop() else {
|
||||
return Size::default();
|
||||
};
|
||||
let mut default_column_sizes = [Pixels::default(); COLS];
|
||||
let column_sizes = column_sizes.unwrap_or(&mut default_column_sizes);
|
||||
|
||||
let mut row_height = px(0.0);
|
||||
for i in 0..COLS {
|
||||
let column_available_width = match self.sizings[i] {
|
||||
Length::Definite(definite_length) => match table_width {
|
||||
AvailableSpace::Definite(pixels) => AvailableSpace::Definite(
|
||||
definite_length.to_pixels(pixels.into(), window.rem_size()),
|
||||
),
|
||||
AvailableSpace::MinContent => AvailableSpace::MinContent,
|
||||
AvailableSpace::MaxContent => AvailableSpace::MaxContent,
|
||||
},
|
||||
Length::Auto => AvailableSpace::MaxContent,
|
||||
};
|
||||
|
||||
let column_available_space = size(column_available_width, AvailableSpace::MinContent);
|
||||
|
||||
// todo!: Adjust row sizing to account for inter-column spacing
|
||||
let cell_size = item_to_measure[i].layout_as_root(column_available_space, window, cx);
|
||||
column_sizes[i] = cell_size.width;
|
||||
row_height = row_height.max(cell_size.height);
|
||||
}
|
||||
|
||||
let mut width = Pixels::ZERO;
|
||||
|
||||
for size in *column_sizes {
|
||||
width += size;
|
||||
}
|
||||
|
||||
Size::new(width + (COLS - 1) * DIVIDER_PADDING_PX, row_height)
|
||||
}
|
||||
}
|
||||
|
||||
impl<const COLS: usize> UniformTable<COLS> {}
|
||||
|
||||
/// A handle for controlling the scroll position of a uniform list.
|
||||
/// This should be stored in your view and passed to the uniform_list on each frame.
|
||||
#[derive(Clone, Debug, Default)]
|
||||
pub struct UniformTableScrollHandle(pub Rc<RefCell<UniformTableScrollState>>);
|
||||
|
||||
/// Where to place the element scrolled to.
|
||||
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
|
||||
pub enum ScrollStrategy {
|
||||
/// Place the element at the top of the list's viewport.
|
||||
Top,
|
||||
/// Attempt to place the element in the middle of the list's viewport.
|
||||
/// May not be possible if there's not enough list items above the item scrolled to:
|
||||
/// in this case, the element will be placed at the closest possible position.
|
||||
Center,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Default)]
|
||||
/// The size of the item and its contents.
|
||||
pub struct RowSize {
|
||||
/// The size of the item.
|
||||
pub row: Size<Pixels>,
|
||||
/// The size of the item's contents, which may be larger than the item itself,
|
||||
/// if the item was bounded by a parent element.
|
||||
pub contents: Size<Pixels>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Default)]
|
||||
#[allow(missing_docs)]
|
||||
pub struct UniformTableScrollState {
|
||||
pub base_handle: ScrollHandle,
|
||||
pub deferred_scroll_to_item: Option<(usize, ScrollStrategy)>,
|
||||
/// Size of the item, captured during last layout.
|
||||
pub last_row_size: Option<RowSize>,
|
||||
/// Whether the list was vertically flipped during last layout.
|
||||
pub y_flipped: bool,
|
||||
}
|
||||
|
||||
impl UniformTableScrollHandle {
|
||||
/// Create a new scroll handle to bind to a uniform list.
|
||||
pub fn new() -> Self {
|
||||
Self(Rc::new(RefCell::new(UniformTableScrollState {
|
||||
base_handle: ScrollHandle::new(),
|
||||
deferred_scroll_to_item: None,
|
||||
last_row_size: None,
|
||||
y_flipped: false,
|
||||
})))
|
||||
}
|
||||
|
||||
/// Scroll the list to the given item index.
|
||||
pub fn scroll_to_item(&self, ix: usize, strategy: ScrollStrategy) {
|
||||
self.0.borrow_mut().deferred_scroll_to_item = Some((ix, strategy));
|
||||
}
|
||||
|
||||
/// Check if the list is flipped vertically.
|
||||
pub fn y_flipped(&self) -> bool {
|
||||
self.0.borrow().y_flipped
|
||||
}
|
||||
|
||||
/// Get the index of the topmost visible child.
|
||||
#[cfg(any(test, feature = "test-support"))]
|
||||
pub fn logical_scroll_top_index(&self) -> usize {
|
||||
let this = self.0.borrow();
|
||||
this.deferred_scroll_to_item
|
||||
.map(|(ix, _)| ix)
|
||||
.unwrap_or_else(|| this.base_handle.logical_scroll_top().0)
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,7 @@ use std::rc::Rc;
|
||||
|
||||
use collections::HashMap;
|
||||
|
||||
use crate::{Action, InvalidKeystrokeError, KeyBindingContextPredicate, Keystroke};
|
||||
use crate::{Action, InvalidKeystrokeError, KeyBindingContextPredicate, Keystroke, SharedString};
|
||||
use smallvec::SmallVec;
|
||||
|
||||
/// A keybinding and its associated metadata, from the keymap.
|
||||
@@ -11,6 +11,8 @@ pub struct KeyBinding {
|
||||
pub(crate) keystrokes: SmallVec<[Keystroke; 2]>,
|
||||
pub(crate) context_predicate: Option<Rc<KeyBindingContextPredicate>>,
|
||||
pub(crate) meta: Option<KeyBindingMetaIndex>,
|
||||
/// The json input string used when building the keybinding, if any
|
||||
pub(crate) action_input: Option<SharedString>,
|
||||
}
|
||||
|
||||
impl Clone for KeyBinding {
|
||||
@@ -20,6 +22,7 @@ impl Clone for KeyBinding {
|
||||
keystrokes: self.keystrokes.clone(),
|
||||
context_predicate: self.context_predicate.clone(),
|
||||
meta: self.meta,
|
||||
action_input: self.action_input.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -32,7 +35,7 @@ impl KeyBinding {
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Self::load(keystrokes, Box::new(action), context_predicate, None).unwrap()
|
||||
Self::load(keystrokes, Box::new(action), context_predicate, None, None).unwrap()
|
||||
}
|
||||
|
||||
/// Load a keybinding from the given raw data.
|
||||
@@ -41,6 +44,7 @@ impl KeyBinding {
|
||||
action: Box<dyn Action>,
|
||||
context_predicate: Option<Rc<KeyBindingContextPredicate>>,
|
||||
key_equivalents: Option<&HashMap<char, char>>,
|
||||
action_input: Option<SharedString>,
|
||||
) -> std::result::Result<Self, InvalidKeystrokeError> {
|
||||
let mut keystrokes: SmallVec<[Keystroke; 2]> = keystrokes
|
||||
.split_whitespace()
|
||||
@@ -62,6 +66,7 @@ impl KeyBinding {
|
||||
action,
|
||||
context_predicate,
|
||||
meta: None,
|
||||
action_input,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -110,6 +115,11 @@ impl KeyBinding {
|
||||
pub fn meta(&self) -> Option<KeyBindingMetaIndex> {
|
||||
self.meta
|
||||
}
|
||||
|
||||
/// Get the action input associated with the action for this binding
|
||||
pub fn action_input(&self) -> Option<SharedString> {
|
||||
self.action_input.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for KeyBinding {
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
use crate::{Capslock, xcb_flush};
|
||||
use crate::Capslock;
|
||||
use core::str;
|
||||
use std::{
|
||||
cell::RefCell,
|
||||
@@ -378,7 +378,6 @@ impl X11Client {
|
||||
xcb_connection
|
||||
.xkb_use_extension(XKB_X11_MIN_MAJOR_XKB_VERSION, XKB_X11_MIN_MINOR_XKB_VERSION),
|
||||
)?;
|
||||
assert!(xkb.supported);
|
||||
|
||||
let events = xkb::EventType::STATE_NOTIFY
|
||||
| xkb::EventType::MAP_NOTIFY
|
||||
@@ -402,6 +401,7 @@ impl X11Client {
|
||||
&xkb::SelectEventsAux::new(),
|
||||
),
|
||||
)?;
|
||||
assert!(xkb.supported);
|
||||
|
||||
let xkb_context = xkbc::Context::new(xkbc::CONTEXT_NO_FLAGS);
|
||||
let xkb_device_id = xkbc::x11::get_core_keyboard_device_id(&xcb_connection);
|
||||
@@ -484,8 +484,6 @@ impl X11Client {
|
||||
})
|
||||
.map_err(|err| anyhow!("Failed to initialize XDP event source: {err:?}"))?;
|
||||
|
||||
xcb_flush(&xcb_connection);
|
||||
|
||||
Ok(X11Client(Rc::new(RefCell::new(X11ClientState {
|
||||
modifiers: Modifiers::default(),
|
||||
capslock: Capslock::default(),
|
||||
@@ -1525,7 +1523,6 @@ impl LinuxClient for X11Client {
|
||||
),
|
||||
)
|
||||
.log_err();
|
||||
xcb_flush(&state.xcb_connection);
|
||||
|
||||
let window_ref = WindowRef {
|
||||
window: window.0.clone(),
|
||||
@@ -1557,18 +1554,19 @@ impl LinuxClient for X11Client {
|
||||
};
|
||||
|
||||
state.cursor_styles.insert(focused_window, style);
|
||||
check_reply(
|
||||
|| "Failed to set cursor style",
|
||||
state.xcb_connection.change_window_attributes(
|
||||
state
|
||||
.xcb_connection
|
||||
.change_window_attributes(
|
||||
focused_window,
|
||||
&ChangeWindowAttributesAux {
|
||||
cursor: Some(cursor),
|
||||
..Default::default()
|
||||
},
|
||||
),
|
||||
)
|
||||
.log_err();
|
||||
state.xcb_connection.flush().log_err();
|
||||
)
|
||||
.anyhow()
|
||||
.and_then(|cookie| cookie.check().anyhow())
|
||||
.context("X11: Failed to set cursor style")
|
||||
.log_err();
|
||||
}
|
||||
|
||||
fn open_uri(&self, uri: &str) {
|
||||
@@ -2089,7 +2087,6 @@ fn xdnd_send_finished(
|
||||
xcb_connection.send_event(false, target, EventMask::default(), message),
|
||||
)
|
||||
.log_err();
|
||||
xcb_connection.flush().log_err();
|
||||
}
|
||||
|
||||
fn xdnd_send_status(
|
||||
@@ -2112,7 +2109,6 @@ fn xdnd_send_status(
|
||||
xcb_connection.send_event(false, target, EventMask::default(), message),
|
||||
)
|
||||
.log_err();
|
||||
xcb_connection.flush().log_err();
|
||||
}
|
||||
|
||||
/// Recomputes `pointer_device_states` by querying all pointer devices.
|
||||
@@ -2266,6 +2262,6 @@ fn create_invisible_cursor(
|
||||
|
||||
connection.free_pixmap(empty_pixmap)?;
|
||||
|
||||
xcb_flush(connection);
|
||||
connection.flush()?;
|
||||
Ok(cursor)
|
||||
}
|
||||
|
||||
@@ -320,13 +320,6 @@ impl rwh::HasDisplayHandle for X11Window {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn xcb_flush(xcb: &XCBConnection) {
|
||||
xcb.flush()
|
||||
.map_err(handle_connection_error)
|
||||
.context("X11 flush failed")
|
||||
.log_err();
|
||||
}
|
||||
|
||||
pub(crate) fn check_reply<E, F, C>(
|
||||
failure_context: F,
|
||||
result: Result<VoidCookie<'_, C>, ConnectionError>,
|
||||
@@ -604,7 +597,7 @@ impl X11WindowState {
|
||||
),
|
||||
)?;
|
||||
|
||||
xcb_flush(&xcb);
|
||||
xcb.flush()?;
|
||||
|
||||
let renderer = {
|
||||
let raw_window = RawWindow {
|
||||
@@ -664,7 +657,7 @@ impl X11WindowState {
|
||||
|| "X11 DestroyWindow failed while cleaning it up after setup failure.",
|
||||
xcb.destroy_window(x_window),
|
||||
)?;
|
||||
xcb_flush(&xcb);
|
||||
xcb.flush()?;
|
||||
}
|
||||
|
||||
setup_result
|
||||
@@ -692,7 +685,7 @@ impl Drop for X11WindowHandle {
|
||||
|| "X11 DestroyWindow failed while dropping X11WindowHandle.",
|
||||
self.xcb.destroy_window(self.id),
|
||||
)?;
|
||||
xcb_flush(&self.xcb);
|
||||
self.xcb.flush()?;
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.log_err();
|
||||
@@ -711,7 +704,7 @@ impl Drop for X11Window {
|
||||
|| "X11 DestroyWindow failure.",
|
||||
self.0.xcb.destroy_window(self.0.x_window),
|
||||
)?;
|
||||
xcb_flush(&self.0.xcb);
|
||||
self.0.xcb.flush()?;
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
@@ -806,9 +799,7 @@ impl X11Window {
|
||||
xproto::EventMask::SUBSTRUCTURE_REDIRECT | xproto::EventMask::SUBSTRUCTURE_NOTIFY,
|
||||
message,
|
||||
),
|
||||
)?;
|
||||
xcb_flush(&self.0.xcb);
|
||||
Ok(())
|
||||
)
|
||||
}
|
||||
|
||||
fn get_root_position(
|
||||
@@ -861,8 +852,15 @@ impl X11Window {
|
||||
),
|
||||
)?;
|
||||
|
||||
xcb_flush(&self.0.xcb);
|
||||
Ok(())
|
||||
self.flush()
|
||||
}
|
||||
|
||||
fn flush(&self) -> anyhow::Result<()> {
|
||||
self.0
|
||||
.xcb
|
||||
.flush()
|
||||
.map_err(handle_connection_error)
|
||||
.context("X11 flush failed")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1200,7 +1198,7 @@ impl PlatformWindow for X11Window {
|
||||
),
|
||||
)
|
||||
.log_err();
|
||||
xcb_flush(&self.0.xcb);
|
||||
self.flush().log_err();
|
||||
}
|
||||
|
||||
fn scale_factor(&self) -> f32 {
|
||||
@@ -1291,7 +1289,7 @@ impl PlatformWindow for X11Window {
|
||||
xproto::Time::CURRENT_TIME,
|
||||
)
|
||||
.log_err();
|
||||
xcb_flush(&self.0.xcb);
|
||||
self.flush().log_err();
|
||||
}
|
||||
|
||||
fn is_active(&self) -> bool {
|
||||
@@ -1326,7 +1324,7 @@ impl PlatformWindow for X11Window {
|
||||
),
|
||||
)
|
||||
.log_err();
|
||||
xcb_flush(&self.0.xcb);
|
||||
self.flush().log_err();
|
||||
}
|
||||
|
||||
fn set_app_id(&mut self, app_id: &str) {
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
//! application to avoid having to import each trait individually.
|
||||
|
||||
pub use crate::{
|
||||
AppContext as _, BorrowAppContext, Context, Element, FocusableElement, InteractiveElement,
|
||||
IntoElement, ParentElement, Refineable, Render, RenderOnce, StatefulInteractiveElement, Styled,
|
||||
StyledImage, VisualContext, util::FluentBuilder,
|
||||
AppContext as _, BorrowAppContext, Context, Element, InteractiveElement, IntoElement,
|
||||
ParentElement, Refineable, Render, RenderOnce, StatefulInteractiveElement, Styled, StyledImage,
|
||||
VisualContext, util::FluentBuilder,
|
||||
};
|
||||
|
||||
@@ -28,10 +28,8 @@ const EXPECT_MESSAGE: &str = "we should avoid taffy layout errors by constructio
|
||||
|
||||
impl TaffyLayoutEngine {
|
||||
pub fn new() -> Self {
|
||||
let mut taffy = TaffyTree::new();
|
||||
taffy.disable_rounding();
|
||||
TaffyLayoutEngine {
|
||||
taffy,
|
||||
taffy: TaffyTree::new(),
|
||||
absolute_layout_bounds: FxHashMap::default(),
|
||||
computed_layouts: FxHashSet::default(),
|
||||
}
|
||||
|
||||
@@ -206,20 +206,8 @@ slotmap::new_key_type! {
|
||||
}
|
||||
|
||||
thread_local! {
|
||||
pub(crate) static ELEMENT_ARENA: RefCell<Arena> = RefCell::new(Arena::new(1024 * 1024));
|
||||
}
|
||||
|
||||
/// Returned when the element arena has been used and so must be cleared before the next draw.
|
||||
#[must_use]
|
||||
pub struct ArenaClearNeeded;
|
||||
|
||||
impl ArenaClearNeeded {
|
||||
/// Clear the element arena.
|
||||
pub fn clear(self) {
|
||||
ELEMENT_ARENA.with_borrow_mut(|element_arena| {
|
||||
element_arena.clear();
|
||||
});
|
||||
}
|
||||
/// 8MB wasn't quite enough...
|
||||
pub(crate) static ELEMENT_ARENA: RefCell<Arena> = RefCell::new(Arena::new(32 * 1024 * 1024));
|
||||
}
|
||||
|
||||
pub(crate) type FocusMap = RwLock<SlotMap<FocusId, AtomicUsize>>;
|
||||
@@ -980,10 +968,8 @@ impl Window {
|
||||
measure("frame duration", || {
|
||||
handle
|
||||
.update(&mut cx, |_, window, cx| {
|
||||
let arena_clear_needed = window.draw(cx);
|
||||
window.draw(cx);
|
||||
window.present();
|
||||
// drop the arena elements after present to reduce latency
|
||||
arena_clear_needed.clear();
|
||||
})
|
||||
.log_err();
|
||||
})
|
||||
@@ -1744,7 +1730,7 @@ impl Window {
|
||||
/// Produces a new frame and assigns it to `rendered_frame`. To actually show
|
||||
/// the contents of the new [Scene], use [present].
|
||||
#[profiling::function]
|
||||
pub fn draw(&mut self, cx: &mut App) -> ArenaClearNeeded {
|
||||
pub fn draw(&mut self, cx: &mut App) {
|
||||
self.invalidate_entities();
|
||||
cx.entities.clear_accessed();
|
||||
debug_assert!(self.rendered_entity_stack.is_empty());
|
||||
@@ -1768,6 +1754,13 @@ impl Window {
|
||||
self.layout_engine.as_mut().unwrap().clear();
|
||||
self.text_system().finish_frame();
|
||||
self.next_frame.finish(&mut self.rendered_frame);
|
||||
ELEMENT_ARENA.with_borrow_mut(|element_arena| {
|
||||
let percentage = (element_arena.len() as f32 / element_arena.capacity() as f32) * 100.;
|
||||
if percentage >= 80. {
|
||||
log::warn!("elevated element arena occupation: {}.", percentage);
|
||||
}
|
||||
element_arena.clear();
|
||||
});
|
||||
|
||||
self.invalidator.set_phase(DrawPhase::Focus);
|
||||
let previous_focus_path = self.rendered_frame.focus_path();
|
||||
@@ -1809,8 +1802,6 @@ impl Window {
|
||||
self.refreshing = false;
|
||||
self.invalidator.set_phase(DrawPhase::None);
|
||||
self.needs_present.set(true);
|
||||
|
||||
ArenaClearNeeded
|
||||
}
|
||||
|
||||
fn record_entities_accessed(&mut self, cx: &mut App) {
|
||||
@@ -3476,7 +3467,7 @@ impl Window {
|
||||
|
||||
fn dispatch_key_event(&mut self, event: &dyn Any, cx: &mut App) {
|
||||
if self.invalidator.is_dirty() {
|
||||
self.draw(cx).clear();
|
||||
self.draw(cx);
|
||||
}
|
||||
|
||||
let node_id = self.focus_node_id_in_rendered_frame(self.focus);
|
||||
|
||||
@@ -20,7 +20,6 @@ test-support = [
|
||||
"text/test-support",
|
||||
"tree-sitter-rust",
|
||||
"tree-sitter-python",
|
||||
"tree-sitter-rust",
|
||||
"tree-sitter-typescript",
|
||||
"settings/test-support",
|
||||
"util/test-support",
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
pub use crate::{
|
||||
Grammar, Language, LanguageRegistry,
|
||||
diagnostic_set::DiagnosticSet,
|
||||
highlight_map::{HighlightId, HighlightMap},
|
||||
proto,
|
||||
};
|
||||
use crate::{
|
||||
DebuggerTextObject, LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag,
|
||||
TextObject, TreeSitterOptions,
|
||||
LanguageScope, Outline, OutlineConfig, RunnableCapture, RunnableTag, TextObject,
|
||||
TreeSitterOptions,
|
||||
diagnostic_set::{DiagnosticEntry, DiagnosticGroup},
|
||||
language_settings::{LanguageSettings, language_settings},
|
||||
outline::OutlineItem,
|
||||
@@ -11,12 +17,6 @@ use crate::{
|
||||
task_context::RunnableRange,
|
||||
text_diff::text_diff,
|
||||
};
|
||||
pub use crate::{
|
||||
Grammar, Language, LanguageRegistry,
|
||||
diagnostic_set::DiagnosticSet,
|
||||
highlight_map::{HighlightId, HighlightMap},
|
||||
proto,
|
||||
};
|
||||
use anyhow::{Context as _, Result};
|
||||
pub use clock::ReplicaId;
|
||||
use clock::{AGENT_REPLICA_ID, Lamport};
|
||||
@@ -3848,74 +3848,6 @@ impl BufferSnapshot {
|
||||
.filter(|pair| !pair.newline_only)
|
||||
}
|
||||
|
||||
pub fn debug_variables_query<T: ToOffset>(
|
||||
&self,
|
||||
range: Range<T>,
|
||||
) -> impl Iterator<Item = (Range<usize>, DebuggerTextObject)> + '_ {
|
||||
let range = range.start.to_offset(self).saturating_sub(1)
|
||||
..self.len().min(range.end.to_offset(self) + 1);
|
||||
|
||||
let mut matches = self.syntax.matches_with_options(
|
||||
range.clone(),
|
||||
&self.text,
|
||||
TreeSitterOptions::default(),
|
||||
|grammar| grammar.debug_variables_config.as_ref().map(|c| &c.query),
|
||||
);
|
||||
|
||||
let configs = matches
|
||||
.grammars()
|
||||
.iter()
|
||||
.map(|grammar| grammar.debug_variables_config.as_ref())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut captures = Vec::<(Range<usize>, DebuggerTextObject)>::new();
|
||||
|
||||
iter::from_fn(move || {
|
||||
loop {
|
||||
while let Some(capture) = captures.pop() {
|
||||
if capture.0.overlaps(&range) {
|
||||
return Some(capture);
|
||||
}
|
||||
}
|
||||
|
||||
let mat = matches.peek()?;
|
||||
|
||||
let Some(config) = configs[mat.grammar_index].as_ref() else {
|
||||
matches.advance();
|
||||
continue;
|
||||
};
|
||||
|
||||
for capture in mat.captures {
|
||||
let Some(ix) = config
|
||||
.objects_by_capture_ix
|
||||
.binary_search_by_key(&capture.index, |e| e.0)
|
||||
.ok()
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
let text_object = config.objects_by_capture_ix[ix].1;
|
||||
let byte_range = capture.node.byte_range();
|
||||
|
||||
let mut found = false;
|
||||
for (range, existing) in captures.iter_mut() {
|
||||
if existing == &text_object {
|
||||
range.start = range.start.min(byte_range.start);
|
||||
range.end = range.end.max(byte_range.end);
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if !found {
|
||||
captures.push((byte_range, text_object));
|
||||
}
|
||||
}
|
||||
|
||||
matches.advance();
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn text_object_ranges<T: ToOffset>(
|
||||
&self,
|
||||
range: Range<T>,
|
||||
|
||||
@@ -1082,7 +1082,6 @@ pub struct Grammar {
|
||||
pub embedding_config: Option<EmbeddingConfig>,
|
||||
pub(crate) injection_config: Option<InjectionConfig>,
|
||||
pub(crate) override_config: Option<OverrideConfig>,
|
||||
pub(crate) debug_variables_config: Option<DebugVariablesConfig>,
|
||||
pub(crate) highlight_map: Mutex<HighlightMap>,
|
||||
}
|
||||
|
||||
@@ -1105,22 +1104,6 @@ pub struct OutlineConfig {
|
||||
pub annotation_capture_ix: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum DebuggerTextObject {
|
||||
Variable,
|
||||
Scope,
|
||||
}
|
||||
|
||||
impl DebuggerTextObject {
|
||||
pub fn from_capture_name(name: &str) -> Option<DebuggerTextObject> {
|
||||
match name {
|
||||
"debug-variable" => Some(DebuggerTextObject::Variable),
|
||||
"debug-scope" => Some(DebuggerTextObject::Scope),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum TextObject {
|
||||
InsideFunction,
|
||||
@@ -1223,11 +1206,6 @@ struct BracketsPatternConfig {
|
||||
newline_only: bool,
|
||||
}
|
||||
|
||||
pub struct DebugVariablesConfig {
|
||||
pub query: Query,
|
||||
pub objects_by_capture_ix: Vec<(u32, DebuggerTextObject)>,
|
||||
}
|
||||
|
||||
impl Language {
|
||||
pub fn new(config: LanguageConfig, ts_language: Option<tree_sitter::Language>) -> Self {
|
||||
Self::new_with_id(LanguageId::new(), config, ts_language)
|
||||
@@ -1259,7 +1237,6 @@ impl Language {
|
||||
redactions_config: None,
|
||||
runnable_config: None,
|
||||
error_query: Query::new(&ts_language, "(ERROR) @error").ok(),
|
||||
debug_variables_config: None,
|
||||
ts_language,
|
||||
highlight_map: Default::default(),
|
||||
})
|
||||
@@ -1330,11 +1307,6 @@ impl Language {
|
||||
.with_text_object_query(query.as_ref())
|
||||
.context("Error loading textobject query")?;
|
||||
}
|
||||
if let Some(query) = queries.debugger {
|
||||
self = self
|
||||
.with_debug_variables_query(query.as_ref())
|
||||
.context("Error loading debug variables query")?;
|
||||
}
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
@@ -1453,24 +1425,6 @@ impl Language {
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
pub fn with_debug_variables_query(mut self, source: &str) -> Result<Self> {
|
||||
let grammar = self.grammar_mut().context("cannot mutate grammar")?;
|
||||
let query = Query::new(&grammar.ts_language, source)?;
|
||||
|
||||
let mut objects_by_capture_ix = Vec::new();
|
||||
for (ix, name) in query.capture_names().iter().enumerate() {
|
||||
if let Some(text_object) = DebuggerTextObject::from_capture_name(name) {
|
||||
objects_by_capture_ix.push((ix as u32, text_object));
|
||||
}
|
||||
}
|
||||
|
||||
grammar.debug_variables_config = Some(DebugVariablesConfig {
|
||||
query,
|
||||
objects_by_capture_ix,
|
||||
});
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
pub fn with_brackets_query(mut self, source: &str) -> Result<Self> {
|
||||
let grammar = self.grammar_mut().context("cannot mutate grammar")?;
|
||||
let query = Query::new(&grammar.ts_language, source)?;
|
||||
@@ -1976,10 +1930,6 @@ impl Grammar {
|
||||
.capture_index_for_name(name)?;
|
||||
Some(self.highlight_map.lock().get(capture_id))
|
||||
}
|
||||
|
||||
pub fn debug_variables_config(&self) -> Option<&DebugVariablesConfig> {
|
||||
self.debug_variables_config.as_ref()
|
||||
}
|
||||
}
|
||||
|
||||
impl CodeLabel {
|
||||
|
||||
@@ -157,9 +157,6 @@ pub enum BinaryStatus {
|
||||
None,
|
||||
CheckingForUpdate,
|
||||
Downloading,
|
||||
Starting,
|
||||
Stopping,
|
||||
Stopped,
|
||||
Failed { error: String },
|
||||
}
|
||||
|
||||
@@ -229,7 +226,7 @@ pub const QUERY_FILENAME_PREFIXES: &[(
|
||||
("overrides", |q| &mut q.overrides),
|
||||
("redactions", |q| &mut q.redactions),
|
||||
("runnables", |q| &mut q.runnables),
|
||||
("debugger", |q| &mut q.debugger),
|
||||
("debug_variables", |q| &mut q.debug_variables),
|
||||
("textobjects", |q| &mut q.text_objects),
|
||||
];
|
||||
|
||||
@@ -246,12 +243,12 @@ pub struct LanguageQueries {
|
||||
pub redactions: Option<Cow<'static, str>>,
|
||||
pub runnables: Option<Cow<'static, str>>,
|
||||
pub text_objects: Option<Cow<'static, str>>,
|
||||
pub debugger: Option<Cow<'static, str>>,
|
||||
pub debug_variables: Option<Cow<'static, str>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
struct ServerStatusSender {
|
||||
txs: Arc<Mutex<Vec<mpsc::UnboundedSender<(LanguageServerName, BinaryStatus)>>>>,
|
||||
txs: Arc<Mutex<Vec<mpsc::UnboundedSender<(LanguageServerName, LanguageServerStatusUpdate)>>>>,
|
||||
}
|
||||
|
||||
pub struct LoadedLanguage {
|
||||
@@ -1088,7 +1085,11 @@ impl LanguageRegistry {
|
||||
self.state.read().all_lsp_adapters.get(name).cloned()
|
||||
}
|
||||
|
||||
pub fn update_lsp_binary_status(&self, server_name: LanguageServerName, status: BinaryStatus) {
|
||||
pub fn update_lsp_status(
|
||||
&self,
|
||||
server_name: LanguageServerName,
|
||||
status: LanguageServerStatusUpdate,
|
||||
) {
|
||||
self.lsp_binary_status_tx.send(server_name, status);
|
||||
}
|
||||
|
||||
@@ -1144,7 +1145,7 @@ impl LanguageRegistry {
|
||||
|
||||
pub fn language_server_binary_statuses(
|
||||
&self,
|
||||
) -> mpsc::UnboundedReceiver<(LanguageServerName, BinaryStatus)> {
|
||||
) -> mpsc::UnboundedReceiver<(LanguageServerName, LanguageServerStatusUpdate)> {
|
||||
self.lsp_binary_status_tx.subscribe()
|
||||
}
|
||||
|
||||
@@ -1259,13 +1260,15 @@ impl LanguageRegistryState {
|
||||
}
|
||||
|
||||
impl ServerStatusSender {
|
||||
fn subscribe(&self) -> mpsc::UnboundedReceiver<(LanguageServerName, BinaryStatus)> {
|
||||
fn subscribe(
|
||||
&self,
|
||||
) -> mpsc::UnboundedReceiver<(LanguageServerName, LanguageServerStatusUpdate)> {
|
||||
let (tx, rx) = mpsc::unbounded();
|
||||
self.txs.lock().push(tx);
|
||||
rx
|
||||
}
|
||||
|
||||
fn send(&self, name: LanguageServerName, status: BinaryStatus) {
|
||||
fn send(&self, name: LanguageServerName, status: LanguageServerStatusUpdate) {
|
||||
let mut txs = self.txs.lock();
|
||||
txs.retain(|tx| tx.unbounded_send((name.clone(), status.clone())).is_ok());
|
||||
}
|
||||
|
||||
@@ -12,8 +12,8 @@ use fs::Fs;
|
||||
use futures::{Future, FutureExt};
|
||||
use gpui::AsyncApp;
|
||||
use language::{
|
||||
BinaryStatus, CodeLabel, HighlightId, Language, LanguageName, LanguageToolchainStore,
|
||||
LspAdapter, LspAdapterDelegate,
|
||||
BinaryStatus, CodeLabel, HighlightId, Language, LanguageName, LanguageServerStatusUpdate,
|
||||
LanguageToolchainStore, LspAdapter, LspAdapterDelegate,
|
||||
};
|
||||
use lsp::{CodeActionKind, LanguageServerBinary, LanguageServerBinaryOptions, LanguageServerName};
|
||||
use serde::Serialize;
|
||||
@@ -82,8 +82,10 @@ impl ExtensionLanguageServerProxy for LanguageServerRegistryProxy {
|
||||
language_server_id: LanguageServerName,
|
||||
status: BinaryStatus,
|
||||
) {
|
||||
self.language_registry
|
||||
.update_lsp_binary_status(language_server_id, status);
|
||||
self.language_registry.update_lsp_status(
|
||||
language_server_id,
|
||||
LanguageServerStatusUpdate::Binary(status),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -67,9 +67,6 @@ pub enum LanguageModelCompletionEvent {
|
||||
text: String,
|
||||
signature: Option<String>,
|
||||
},
|
||||
RedactedThinking {
|
||||
data: String,
|
||||
},
|
||||
ToolUse(LanguageModelToolUse),
|
||||
StartMessage {
|
||||
message_id: String,
|
||||
@@ -362,7 +359,6 @@ pub trait LanguageModel: Send + Sync {
|
||||
Ok(LanguageModelCompletionEvent::StartMessage { .. }) => None,
|
||||
Ok(LanguageModelCompletionEvent::Text(text)) => Some(Ok(text)),
|
||||
Ok(LanguageModelCompletionEvent::Thinking { .. }) => None,
|
||||
Ok(LanguageModelCompletionEvent::RedactedThinking { .. }) => None,
|
||||
Ok(LanguageModelCompletionEvent::Stop(_)) => None,
|
||||
Ok(LanguageModelCompletionEvent::ToolUse(_)) => None,
|
||||
Ok(LanguageModelCompletionEvent::UsageUpdate(token_usage)) => {
|
||||
|
||||
@@ -303,7 +303,7 @@ pub enum MessageContent {
|
||||
text: String,
|
||||
signature: Option<String>,
|
||||
},
|
||||
RedactedThinking(String),
|
||||
RedactedThinking(Vec<u8>),
|
||||
Image(LanguageModelImage),
|
||||
ToolUse(LanguageModelToolUse),
|
||||
ToolResult(LanguageModelToolResult),
|
||||
|
||||
@@ -42,6 +42,7 @@ open_ai = { workspace = true, features = ["schemars"] }
|
||||
open_router = { workspace = true, features = ["schemars"] }
|
||||
vercel = { workspace = true, features = ["schemars"] }
|
||||
partial-json-fixer.workspace = true
|
||||
project.workspace = true
|
||||
proto.workspace = true
|
||||
release_channel.workspace = true
|
||||
schemars.workspace = true
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use client::{Client, UserStore};
|
||||
use fs::Fs;
|
||||
use gpui::{App, Context, Entity};
|
||||
use language_model::LanguageModelRegistry;
|
||||
use provider::deepseek::DeepSeekLanguageModelProvider;
|
||||
@@ -22,8 +23,8 @@ use crate::provider::open_router::OpenRouterLanguageModelProvider;
|
||||
use crate::provider::vercel::VercelLanguageModelProvider;
|
||||
pub use crate::settings::*;
|
||||
|
||||
pub fn init(user_store: Entity<UserStore>, client: Arc<Client>, cx: &mut App) {
|
||||
crate::settings::init(cx);
|
||||
pub fn init(user_store: Entity<UserStore>, client: Arc<Client>, fs: Arc<dyn Fs>, cx: &mut App) {
|
||||
crate::settings::init(fs, cx);
|
||||
let registry = LanguageModelRegistry::global(cx);
|
||||
registry.update(cx, |registry, cx| {
|
||||
register_language_model_providers(registry, user_store, client, cx);
|
||||
|
||||
@@ -41,6 +41,7 @@ pub struct AnthropicSettings {
|
||||
pub api_url: String,
|
||||
/// Extend Zed's list of Anthropic models.
|
||||
pub available_models: Vec<AvailableModel>,
|
||||
pub needs_setting_migration: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
|
||||
@@ -553,7 +554,9 @@ pub fn into_anthropic(
|
||||
}
|
||||
MessageContent::RedactedThinking(data) => {
|
||||
if !data.is_empty() {
|
||||
Some(anthropic::RequestContent::RedactedThinking { data })
|
||||
Some(anthropic::RequestContent::RedactedThinking {
|
||||
data: String::from_utf8(data).ok()?,
|
||||
})
|
||||
} else {
|
||||
None
|
||||
}
|
||||
@@ -727,8 +730,10 @@ impl AnthropicEventMapper {
|
||||
signature: None,
|
||||
})]
|
||||
}
|
||||
ResponseContent::RedactedThinking { data } => {
|
||||
vec![Ok(LanguageModelCompletionEvent::RedactedThinking { data })]
|
||||
ResponseContent::RedactedThinking { .. } => {
|
||||
// Redacted thinking is encrypted and not accessible to the user, see:
|
||||
// https://docs.anthropic.com/en/docs/build-with-claude/extended-thinking#suggestions-for-handling-redacted-thinking-in-production
|
||||
Vec::new()
|
||||
}
|
||||
ResponseContent::ToolUse { id, name, .. } => {
|
||||
self.tool_uses_by_index.insert(
|
||||
|
||||
@@ -11,8 +11,8 @@ use aws_http_client::AwsHttpClient;
|
||||
use bedrock::bedrock_client::Client as BedrockClient;
|
||||
use bedrock::bedrock_client::config::timeout::TimeoutConfig;
|
||||
use bedrock::bedrock_client::types::{
|
||||
CachePointBlock, CachePointType, ContentBlockDelta, ContentBlockStart, ConverseStreamOutput,
|
||||
ReasoningContentBlockDelta, StopReason,
|
||||
ContentBlockDelta, ContentBlockStart, ConverseStreamOutput, ReasoningContentBlockDelta,
|
||||
StopReason,
|
||||
};
|
||||
use bedrock::{
|
||||
BedrockAnyToolChoice, BedrockAutoToolChoice, BedrockBlob, BedrockError, BedrockInnerContent,
|
||||
@@ -48,7 +48,7 @@ use strum::{EnumIter, IntoEnumIterator, IntoStaticStr};
|
||||
use theme::ThemeSettings;
|
||||
use tokio::runtime::Handle;
|
||||
use ui::{Icon, IconName, List, Tooltip, prelude::*};
|
||||
use util::ResultExt;
|
||||
use util::{ResultExt, default};
|
||||
|
||||
use crate::AllLanguageModelSettings;
|
||||
|
||||
@@ -329,12 +329,6 @@ impl LanguageModelProvider for BedrockLanguageModelProvider {
|
||||
max_tokens: model.max_tokens,
|
||||
max_output_tokens: model.max_output_tokens,
|
||||
default_temperature: model.default_temperature,
|
||||
cache_configuration: model.cache_configuration.as_ref().map(|config| {
|
||||
bedrock::BedrockModelCacheConfiguration {
|
||||
max_cache_anchors: config.max_cache_anchors,
|
||||
min_total_token: config.min_total_token,
|
||||
}
|
||||
}),
|
||||
},
|
||||
);
|
||||
}
|
||||
@@ -509,8 +503,7 @@ impl LanguageModel for BedrockModel {
|
||||
LanguageModelToolChoice::Auto | LanguageModelToolChoice::Any => {
|
||||
self.model.supports_tool_use()
|
||||
}
|
||||
// Add support for None - we'll filter tool calls at response
|
||||
LanguageModelToolChoice::None => self.model.supports_tool_use(),
|
||||
LanguageModelToolChoice::None => false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -556,15 +549,12 @@ impl LanguageModel for BedrockModel {
|
||||
}
|
||||
};
|
||||
|
||||
let deny_tool_calls = request.tool_choice == Some(LanguageModelToolChoice::None);
|
||||
|
||||
let request = match into_bedrock(
|
||||
request,
|
||||
model_id,
|
||||
self.model.default_temperature(),
|
||||
self.model.max_output_tokens(),
|
||||
self.model.mode(),
|
||||
self.model.supports_caching(),
|
||||
) {
|
||||
Ok(request) => request,
|
||||
Err(err) => return futures::future::ready(Err(err.into())).boxed(),
|
||||
@@ -575,53 +565,25 @@ impl LanguageModel for BedrockModel {
|
||||
let request = self.stream_completion(request, cx);
|
||||
let future = self.request_limiter.stream(async move {
|
||||
let response = request.map_err(|err| anyhow!(err))?.await;
|
||||
let events = map_to_language_model_completion_events(response, owned_handle);
|
||||
|
||||
if deny_tool_calls {
|
||||
Ok(deny_tool_use_events(events).boxed())
|
||||
} else {
|
||||
Ok(events.boxed())
|
||||
}
|
||||
Ok(map_to_language_model_completion_events(
|
||||
response,
|
||||
owned_handle,
|
||||
))
|
||||
});
|
||||
|
||||
async move { Ok(future.await?.boxed()) }.boxed()
|
||||
}
|
||||
|
||||
fn cache_configuration(&self) -> Option<LanguageModelCacheConfiguration> {
|
||||
self.model
|
||||
.cache_configuration()
|
||||
.map(|config| LanguageModelCacheConfiguration {
|
||||
max_cache_anchors: config.max_cache_anchors,
|
||||
should_speculate: false,
|
||||
min_total_token: config.min_total_token,
|
||||
})
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
fn deny_tool_use_events(
|
||||
events: impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>,
|
||||
) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
|
||||
events.map(|event| {
|
||||
match event {
|
||||
Ok(LanguageModelCompletionEvent::ToolUse(tool_use)) => {
|
||||
// Convert tool use to an error message if model decided to call it
|
||||
Ok(LanguageModelCompletionEvent::Text(format!(
|
||||
"\n\n[Error: Tool calls are disabled in this context. Attempted to call '{}']",
|
||||
tool_use.name
|
||||
)))
|
||||
}
|
||||
other => other,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn into_bedrock(
|
||||
request: LanguageModelRequest,
|
||||
model: String,
|
||||
default_temperature: f32,
|
||||
max_output_tokens: u64,
|
||||
mode: BedrockModelMode,
|
||||
supports_caching: bool,
|
||||
) -> Result<bedrock::Request> {
|
||||
let mut new_messages: Vec<BedrockMessage> = Vec::new();
|
||||
let mut system_message = String::new();
|
||||
@@ -633,7 +595,7 @@ pub fn into_bedrock(
|
||||
|
||||
match message.role {
|
||||
Role::User | Role::Assistant => {
|
||||
let mut bedrock_message_content: Vec<BedrockInnerContent> = message
|
||||
let bedrock_message_content: Vec<BedrockInnerContent> = message
|
||||
.content
|
||||
.into_iter()
|
||||
.filter_map(|content| match content {
|
||||
@@ -645,11 +607,6 @@ pub fn into_bedrock(
|
||||
}
|
||||
}
|
||||
MessageContent::Thinking { text, signature } => {
|
||||
if model.contains(Model::DeepSeekR1.request_id()) {
|
||||
// DeepSeekR1 doesn't support thinking blocks
|
||||
// And the AWS API demands that you strip them
|
||||
return None;
|
||||
}
|
||||
let thinking = BedrockThinkingTextBlock::builder()
|
||||
.text(text)
|
||||
.set_signature(signature)
|
||||
@@ -662,32 +619,19 @@ pub fn into_bedrock(
|
||||
))
|
||||
}
|
||||
MessageContent::RedactedThinking(blob) => {
|
||||
if model.contains(Model::DeepSeekR1.request_id()) {
|
||||
// DeepSeekR1 doesn't support thinking blocks
|
||||
// And the AWS API demands that you strip them
|
||||
return None;
|
||||
}
|
||||
let redacted =
|
||||
BedrockThinkingBlock::RedactedContent(BedrockBlob::new(blob));
|
||||
|
||||
Some(BedrockInnerContent::ReasoningContent(redacted))
|
||||
}
|
||||
MessageContent::ToolUse(tool_use) => {
|
||||
let input = if tool_use.input.is_null() {
|
||||
// Bedrock API requires valid JsonValue, not null, for tool use input
|
||||
value_to_aws_document(&serde_json::json!({}))
|
||||
} else {
|
||||
value_to_aws_document(&tool_use.input)
|
||||
};
|
||||
BedrockToolUseBlock::builder()
|
||||
.name(tool_use.name.to_string())
|
||||
.tool_use_id(tool_use.id.to_string())
|
||||
.input(input)
|
||||
.build()
|
||||
.context("failed to build Bedrock tool use block")
|
||||
.log_err()
|
||||
.map(BedrockInnerContent::ToolUse)
|
||||
},
|
||||
MessageContent::ToolUse(tool_use) => BedrockToolUseBlock::builder()
|
||||
.name(tool_use.name.to_string())
|
||||
.tool_use_id(tool_use.id.to_string())
|
||||
.input(value_to_aws_document(&tool_use.input))
|
||||
.build()
|
||||
.context("failed to build Bedrock tool use block")
|
||||
.log_err()
|
||||
.map(BedrockInnerContent::ToolUse),
|
||||
MessageContent::ToolResult(tool_result) => {
|
||||
BedrockToolResultBlock::builder()
|
||||
.tool_use_id(tool_result.tool_use_id.to_string())
|
||||
@@ -717,14 +661,6 @@ pub fn into_bedrock(
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
if message.cache && supports_caching {
|
||||
bedrock_message_content.push(BedrockInnerContent::CachePoint(
|
||||
CachePointBlock::builder()
|
||||
.r#type(CachePointType::Default)
|
||||
.build()
|
||||
.context("failed to build cache point block")?,
|
||||
));
|
||||
}
|
||||
let bedrock_role = match message.role {
|
||||
Role::User => bedrock::BedrockRole::User,
|
||||
Role::Assistant => bedrock::BedrockRole::Assistant,
|
||||
@@ -753,7 +689,7 @@ pub fn into_bedrock(
|
||||
}
|
||||
}
|
||||
|
||||
let mut tool_spec: Vec<BedrockTool> = request
|
||||
let tool_spec: Vec<BedrockTool> = request
|
||||
.tools
|
||||
.iter()
|
||||
.filter_map(|tool| {
|
||||
@@ -770,15 +706,6 @@ pub fn into_bedrock(
|
||||
})
|
||||
.collect();
|
||||
|
||||
if !tool_spec.is_empty() && supports_caching {
|
||||
tool_spec.push(BedrockTool::CachePoint(
|
||||
CachePointBlock::builder()
|
||||
.r#type(CachePointType::Default)
|
||||
.build()
|
||||
.context("failed to build cache point block")?,
|
||||
));
|
||||
}
|
||||
|
||||
let tool_choice = match request.tool_choice {
|
||||
Some(LanguageModelToolChoice::Auto) | None => {
|
||||
BedrockToolChoice::Auto(BedrockAutoToolChoice::builder().build())
|
||||
@@ -787,8 +714,7 @@ pub fn into_bedrock(
|
||||
BedrockToolChoice::Any(BedrockAnyToolChoice::builder().build())
|
||||
}
|
||||
Some(LanguageModelToolChoice::None) => {
|
||||
// For None, we still use Auto but will filter out tool calls in the response
|
||||
BedrockToolChoice::Auto(BedrockAutoToolChoice::builder().build())
|
||||
anyhow::bail!("LanguageModelToolChoice::None is not supported");
|
||||
}
|
||||
};
|
||||
let tool_config: BedrockToolConfig = BedrockToolConfig::builder()
|
||||
@@ -1021,11 +947,10 @@ pub fn map_to_language_model_completion_events(
|
||||
LanguageModelCompletionEvent::UsageUpdate(
|
||||
TokenUsage {
|
||||
input_tokens: metadata.input_tokens as u64,
|
||||
output_tokens: metadata.output_tokens as u64,
|
||||
cache_creation_input_tokens:
|
||||
metadata.cache_write_input_tokens.unwrap_or_default() as u64,
|
||||
cache_read_input_tokens:
|
||||
metadata.cache_read_input_tokens.unwrap_or_default() as u64,
|
||||
output_tokens: metadata.output_tokens
|
||||
as u64,
|
||||
cache_creation_input_tokens: default(),
|
||||
cache_read_input_tokens: default(),
|
||||
},
|
||||
);
|
||||
return Some((Some(Ok(completion_event)), state));
|
||||
|
||||
@@ -888,12 +888,7 @@ impl LanguageModel for CloudLanguageModel {
|
||||
Ok(model) => model,
|
||||
Err(err) => return async move { Err(anyhow!(err).into()) }.boxed(),
|
||||
};
|
||||
let request = into_open_ai(
|
||||
request,
|
||||
model.id(),
|
||||
model.supports_parallel_tool_calls(),
|
||||
None,
|
||||
);
|
||||
let request = into_open_ai(request, &model, None);
|
||||
let llm_api_token = self.llm_api_token.clone();
|
||||
let future = self.request_limiter.stream(async move {
|
||||
let PerformLlmCompletionResponse {
|
||||
|
||||
@@ -14,7 +14,7 @@ use language_model::{
|
||||
LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
|
||||
LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
|
||||
LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolUse, MessageContent,
|
||||
RateLimiter, Role, StopReason, TokenUsage,
|
||||
RateLimiter, Role, StopReason,
|
||||
};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -513,15 +513,6 @@ impl DeepSeekEventMapper {
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(usage) = event.usage {
|
||||
events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
|
||||
input_tokens: usage.prompt_tokens,
|
||||
output_tokens: usage.completion_tokens,
|
||||
cache_creation_input_tokens: 0,
|
||||
cache_read_input_tokens: 0,
|
||||
})));
|
||||
}
|
||||
|
||||
match choice.finish_reason.as_deref() {
|
||||
Some("stop") => {
|
||||
events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)));
|
||||
|
||||
@@ -36,6 +36,7 @@ const PROVIDER_NAME: &str = "Mistral";
|
||||
pub struct MistralSettings {
|
||||
pub api_url: String,
|
||||
pub available_models: Vec<AvailableModel>,
|
||||
pub needs_setting_migration: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
|
||||
|
||||
@@ -12,7 +12,7 @@ use language_model::{
|
||||
LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
|
||||
LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
|
||||
LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolUse, MessageContent,
|
||||
RateLimiter, Role, StopReason, TokenUsage,
|
||||
RateLimiter, Role, StopReason,
|
||||
};
|
||||
use menu;
|
||||
use open_ai::{ImageUrl, Model, ResponseStreamEvent, stream_completion};
|
||||
@@ -28,7 +28,6 @@ use ui::{ElevationIndex, List, Tooltip, prelude::*};
|
||||
use ui_input::SingleLineInput;
|
||||
use util::ResultExt;
|
||||
|
||||
use crate::OpenAiSettingsContent;
|
||||
use crate::{AllLanguageModelSettings, ui::InstructionListItem};
|
||||
|
||||
const PROVIDER_ID: &str = "openai";
|
||||
@@ -38,6 +37,7 @@ const PROVIDER_NAME: &str = "OpenAI";
|
||||
pub struct OpenAiSettings {
|
||||
pub api_url: String,
|
||||
pub available_models: Vec<AvailableModel>,
|
||||
pub needs_setting_migration: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
|
||||
@@ -344,12 +344,7 @@ impl LanguageModel for OpenAiLanguageModel {
|
||||
LanguageModelCompletionError,
|
||||
>,
|
||||
> {
|
||||
let request = into_open_ai(
|
||||
request,
|
||||
self.model.id(),
|
||||
self.model.supports_parallel_tool_calls(),
|
||||
self.max_output_tokens(),
|
||||
);
|
||||
let request = into_open_ai(request, &self.model, self.max_output_tokens());
|
||||
let completions = self.stream_completion(request, cx);
|
||||
async move {
|
||||
let mapper = OpenAiEventMapper::new();
|
||||
@@ -361,11 +356,10 @@ impl LanguageModel for OpenAiLanguageModel {
|
||||
|
||||
pub fn into_open_ai(
|
||||
request: LanguageModelRequest,
|
||||
model_id: &str,
|
||||
supports_parallel_tool_calls: bool,
|
||||
model: &Model,
|
||||
max_output_tokens: Option<u64>,
|
||||
) -> open_ai::Request {
|
||||
let stream = !model_id.starts_with("o1-");
|
||||
let stream = !model.id().starts_with("o1-");
|
||||
|
||||
let mut messages = Vec::new();
|
||||
for message in request.messages {
|
||||
@@ -441,13 +435,13 @@ pub fn into_open_ai(
|
||||
}
|
||||
|
||||
open_ai::Request {
|
||||
model: model_id.into(),
|
||||
model: model.id().into(),
|
||||
messages,
|
||||
stream,
|
||||
stop: request.stop,
|
||||
temperature: request.temperature.unwrap_or(1.0),
|
||||
max_completion_tokens: max_output_tokens,
|
||||
parallel_tool_calls: if supports_parallel_tool_calls && !request.tools.is_empty() {
|
||||
parallel_tool_calls: if model.supports_parallel_tool_calls() && !request.tools.is_empty() {
|
||||
// Disable parallel tool calls, as the Agent currently expects a maximum of one per turn.
|
||||
Some(false)
|
||||
} else {
|
||||
@@ -534,20 +528,11 @@ impl OpenAiEventMapper {
|
||||
&mut self,
|
||||
event: ResponseStreamEvent,
|
||||
) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
|
||||
let mut events = Vec::new();
|
||||
if let Some(usage) = event.usage {
|
||||
events.push(Ok(LanguageModelCompletionEvent::UsageUpdate(TokenUsage {
|
||||
input_tokens: usage.prompt_tokens,
|
||||
output_tokens: usage.completion_tokens,
|
||||
cache_creation_input_tokens: 0,
|
||||
cache_read_input_tokens: 0,
|
||||
})));
|
||||
}
|
||||
|
||||
let Some(choice) = event.choices.first() else {
|
||||
return events;
|
||||
return Vec::new();
|
||||
};
|
||||
|
||||
let mut events = Vec::new();
|
||||
if let Some(content) = choice.delta.content.clone() {
|
||||
events.push(Ok(LanguageModelCompletionEvent::Text(content)));
|
||||
}
|
||||
@@ -803,13 +788,30 @@ impl ConfigurationView {
|
||||
if !api_url.is_empty() && api_url != effective_current_url {
|
||||
let fs = <dyn Fs>::global(cx);
|
||||
update_settings_file::<AllLanguageModelSettings>(fs, cx, move |settings, _| {
|
||||
if let Some(settings) = settings.openai.as_mut() {
|
||||
settings.api_url = Some(api_url.clone());
|
||||
use crate::settings::{OpenAiSettingsContent, VersionedOpenAiSettingsContent};
|
||||
|
||||
if settings.openai.is_none() {
|
||||
settings.openai = Some(OpenAiSettingsContent::Versioned(
|
||||
VersionedOpenAiSettingsContent::V1(
|
||||
crate::settings::OpenAiSettingsContentV1 {
|
||||
api_url: Some(api_url.clone()),
|
||||
available_models: None,
|
||||
},
|
||||
),
|
||||
));
|
||||
} else {
|
||||
settings.openai = Some(OpenAiSettingsContent {
|
||||
api_url: Some(api_url.clone()),
|
||||
available_models: None,
|
||||
});
|
||||
if let Some(openai) = settings.openai.as_mut() {
|
||||
match openai {
|
||||
OpenAiSettingsContent::Versioned(versioned) => match versioned {
|
||||
VersionedOpenAiSettingsContent::V1(v1) => {
|
||||
v1.api_url = Some(api_url.clone());
|
||||
}
|
||||
},
|
||||
OpenAiSettingsContent::Legacy(legacy) => {
|
||||
legacy.api_url = Some(api_url.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -823,8 +825,19 @@ impl ConfigurationView {
|
||||
});
|
||||
let fs = <dyn Fs>::global(cx);
|
||||
update_settings_file::<AllLanguageModelSettings>(fs, cx, |settings, _cx| {
|
||||
if let Some(settings) = settings.openai.as_mut() {
|
||||
settings.api_url = None;
|
||||
use crate::settings::{OpenAiSettingsContent, VersionedOpenAiSettingsContent};
|
||||
|
||||
if let Some(openai) = settings.openai.as_mut() {
|
||||
match openai {
|
||||
OpenAiSettingsContent::Versioned(versioned) => match versioned {
|
||||
VersionedOpenAiSettingsContent::V1(v1) => {
|
||||
v1.api_url = None;
|
||||
}
|
||||
},
|
||||
OpenAiSettingsContent::Legacy(legacy) => {
|
||||
legacy.api_url = None;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
cx.notify();
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use collections::BTreeMap;
|
||||
use collections::{BTreeMap, HashMap};
|
||||
use credentials_provider::CredentialsProvider;
|
||||
|
||||
use futures::Stream;
|
||||
use futures::{FutureExt, StreamExt, future::BoxFuture};
|
||||
use gpui::{AnyView, App, AsyncApp, Context, Entity, Subscription, Task, Window};
|
||||
use http_client::HttpClient;
|
||||
@@ -8,13 +10,16 @@ use language_model::{
|
||||
AuthenticateError, LanguageModel, LanguageModelCompletionError, LanguageModelCompletionEvent,
|
||||
LanguageModelId, LanguageModelName, LanguageModelProvider, LanguageModelProviderId,
|
||||
LanguageModelProviderName, LanguageModelProviderState, LanguageModelRequest,
|
||||
LanguageModelToolChoice, RateLimiter, Role,
|
||||
LanguageModelToolChoice, LanguageModelToolResultContent, LanguageModelToolUse, MessageContent,
|
||||
RateLimiter, Role, StopReason,
|
||||
};
|
||||
use menu;
|
||||
use open_ai::ResponseStreamEvent;
|
||||
use open_ai::{ImageUrl, ResponseStreamEvent, stream_completion};
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsStore};
|
||||
use std::pin::Pin;
|
||||
use std::str::FromStr as _;
|
||||
use std::sync::Arc;
|
||||
use strum::IntoEnumIterator;
|
||||
use vercel::Model;
|
||||
@@ -32,6 +37,7 @@ const PROVIDER_NAME: &str = "Vercel";
|
||||
pub struct VercelSettings {
|
||||
pub api_url: String,
|
||||
pub available_models: Vec<AvailableModel>,
|
||||
pub needs_setting_migration: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, JsonSchema)]
|
||||
@@ -194,12 +200,14 @@ impl LanguageModelProvider for VercelLanguageModelProvider {
|
||||
fn provided_models(&self, cx: &App) -> Vec<Arc<dyn LanguageModel>> {
|
||||
let mut models = BTreeMap::default();
|
||||
|
||||
// Add base models from vercel::Model::iter()
|
||||
for model in vercel::Model::iter() {
|
||||
if !matches!(model, vercel::Model::Custom { .. }) {
|
||||
models.insert(model.id().to_string(), model);
|
||||
}
|
||||
}
|
||||
|
||||
// Override with available models from settings
|
||||
for model in &AllLanguageModelSettings::get_global(cx)
|
||||
.vercel
|
||||
.available_models
|
||||
@@ -270,8 +278,7 @@ impl VercelLanguageModel {
|
||||
|
||||
let future = self.request_limiter.stream(async move {
|
||||
let api_key = api_key.context("Missing Vercel API Key")?;
|
||||
let request =
|
||||
open_ai::stream_completion(http_client.as_ref(), &api_url, &api_key, request);
|
||||
let request = stream_completion(http_client.as_ref(), &api_url, &api_key, request);
|
||||
let response = request.await?;
|
||||
Ok(response)
|
||||
});
|
||||
@@ -302,14 +309,14 @@ impl LanguageModel for VercelLanguageModel {
|
||||
}
|
||||
|
||||
fn supports_images(&self) -> bool {
|
||||
true
|
||||
false
|
||||
}
|
||||
|
||||
fn supports_tool_choice(&self, choice: LanguageModelToolChoice) -> bool {
|
||||
match choice {
|
||||
LanguageModelToolChoice::Auto
|
||||
| LanguageModelToolChoice::Any
|
||||
| LanguageModelToolChoice::None => true,
|
||||
LanguageModelToolChoice::Auto => true,
|
||||
LanguageModelToolChoice::Any => true,
|
||||
LanguageModelToolChoice::None => true,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -347,21 +354,264 @@ impl LanguageModel for VercelLanguageModel {
|
||||
LanguageModelCompletionError,
|
||||
>,
|
||||
> {
|
||||
let request = crate::provider::open_ai::into_open_ai(
|
||||
request,
|
||||
self.model.id(),
|
||||
self.model.supports_parallel_tool_calls(),
|
||||
self.max_output_tokens(),
|
||||
);
|
||||
let request = into_vercel(request, &self.model, self.max_output_tokens());
|
||||
let completions = self.stream_completion(request, cx);
|
||||
async move {
|
||||
let mapper = crate::provider::open_ai::OpenAiEventMapper::new();
|
||||
let mapper = VercelEventMapper::new();
|
||||
Ok(mapper.map_stream(completions.await?).boxed())
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_vercel(
|
||||
request: LanguageModelRequest,
|
||||
model: &vercel::Model,
|
||||
max_output_tokens: Option<u64>,
|
||||
) -> open_ai::Request {
|
||||
let stream = !model.id().starts_with("o1-");
|
||||
|
||||
let mut messages = Vec::new();
|
||||
for message in request.messages {
|
||||
for content in message.content {
|
||||
match content {
|
||||
MessageContent::Text(text) | MessageContent::Thinking { text, .. } => {
|
||||
add_message_content_part(
|
||||
open_ai::MessagePart::Text { text: text },
|
||||
message.role,
|
||||
&mut messages,
|
||||
)
|
||||
}
|
||||
MessageContent::RedactedThinking(_) => {}
|
||||
MessageContent::Image(image) => {
|
||||
add_message_content_part(
|
||||
open_ai::MessagePart::Image {
|
||||
image_url: ImageUrl {
|
||||
url: image.to_base64_url(),
|
||||
detail: None,
|
||||
},
|
||||
},
|
||||
message.role,
|
||||
&mut messages,
|
||||
);
|
||||
}
|
||||
MessageContent::ToolUse(tool_use) => {
|
||||
let tool_call = open_ai::ToolCall {
|
||||
id: tool_use.id.to_string(),
|
||||
content: open_ai::ToolCallContent::Function {
|
||||
function: open_ai::FunctionContent {
|
||||
name: tool_use.name.to_string(),
|
||||
arguments: serde_json::to_string(&tool_use.input)
|
||||
.unwrap_or_default(),
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
if let Some(open_ai::RequestMessage::Assistant { tool_calls, .. }) =
|
||||
messages.last_mut()
|
||||
{
|
||||
tool_calls.push(tool_call);
|
||||
} else {
|
||||
messages.push(open_ai::RequestMessage::Assistant {
|
||||
content: None,
|
||||
tool_calls: vec![tool_call],
|
||||
});
|
||||
}
|
||||
}
|
||||
MessageContent::ToolResult(tool_result) => {
|
||||
let content = match &tool_result.content {
|
||||
LanguageModelToolResultContent::Text(text) => {
|
||||
vec![open_ai::MessagePart::Text {
|
||||
text: text.to_string(),
|
||||
}]
|
||||
}
|
||||
LanguageModelToolResultContent::Image(image) => {
|
||||
vec![open_ai::MessagePart::Image {
|
||||
image_url: ImageUrl {
|
||||
url: image.to_base64_url(),
|
||||
detail: None,
|
||||
},
|
||||
}]
|
||||
}
|
||||
};
|
||||
|
||||
messages.push(open_ai::RequestMessage::Tool {
|
||||
content: content.into(),
|
||||
tool_call_id: tool_result.tool_use_id.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
open_ai::Request {
|
||||
model: model.id().into(),
|
||||
messages,
|
||||
stream,
|
||||
stop: request.stop,
|
||||
temperature: request.temperature.unwrap_or(1.0),
|
||||
max_completion_tokens: max_output_tokens,
|
||||
parallel_tool_calls: if model.supports_parallel_tool_calls() && !request.tools.is_empty() {
|
||||
// Disable parallel tool calls, as the Agent currently expects a maximum of one per turn.
|
||||
Some(false)
|
||||
} else {
|
||||
None
|
||||
},
|
||||
tools: request
|
||||
.tools
|
||||
.into_iter()
|
||||
.map(|tool| open_ai::ToolDefinition::Function {
|
||||
function: open_ai::FunctionDefinition {
|
||||
name: tool.name,
|
||||
description: Some(tool.description),
|
||||
parameters: Some(tool.input_schema),
|
||||
},
|
||||
})
|
||||
.collect(),
|
||||
tool_choice: request.tool_choice.map(|choice| match choice {
|
||||
LanguageModelToolChoice::Auto => open_ai::ToolChoice::Auto,
|
||||
LanguageModelToolChoice::Any => open_ai::ToolChoice::Required,
|
||||
LanguageModelToolChoice::None => open_ai::ToolChoice::None,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn add_message_content_part(
|
||||
new_part: open_ai::MessagePart,
|
||||
role: Role,
|
||||
messages: &mut Vec<open_ai::RequestMessage>,
|
||||
) {
|
||||
match (role, messages.last_mut()) {
|
||||
(Role::User, Some(open_ai::RequestMessage::User { content }))
|
||||
| (
|
||||
Role::Assistant,
|
||||
Some(open_ai::RequestMessage::Assistant {
|
||||
content: Some(content),
|
||||
..
|
||||
}),
|
||||
)
|
||||
| (Role::System, Some(open_ai::RequestMessage::System { content, .. })) => {
|
||||
content.push_part(new_part);
|
||||
}
|
||||
_ => {
|
||||
messages.push(match role {
|
||||
Role::User => open_ai::RequestMessage::User {
|
||||
content: open_ai::MessageContent::from(vec![new_part]),
|
||||
},
|
||||
Role::Assistant => open_ai::RequestMessage::Assistant {
|
||||
content: Some(open_ai::MessageContent::from(vec![new_part])),
|
||||
tool_calls: Vec::new(),
|
||||
},
|
||||
Role::System => open_ai::RequestMessage::System {
|
||||
content: open_ai::MessageContent::from(vec![new_part]),
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct VercelEventMapper {
|
||||
tool_calls_by_index: HashMap<usize, RawToolCall>,
|
||||
}
|
||||
|
||||
impl VercelEventMapper {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
tool_calls_by_index: HashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn map_stream(
|
||||
mut self,
|
||||
events: Pin<Box<dyn Send + Stream<Item = Result<ResponseStreamEvent>>>>,
|
||||
) -> impl Stream<Item = Result<LanguageModelCompletionEvent, LanguageModelCompletionError>>
|
||||
{
|
||||
events.flat_map(move |event| {
|
||||
futures::stream::iter(match event {
|
||||
Ok(event) => self.map_event(event),
|
||||
Err(error) => vec![Err(LanguageModelCompletionError::Other(anyhow!(error)))],
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
pub fn map_event(
|
||||
&mut self,
|
||||
event: ResponseStreamEvent,
|
||||
) -> Vec<Result<LanguageModelCompletionEvent, LanguageModelCompletionError>> {
|
||||
let Some(choice) = event.choices.first() else {
|
||||
return Vec::new();
|
||||
};
|
||||
|
||||
let mut events = Vec::new();
|
||||
if let Some(content) = choice.delta.content.clone() {
|
||||
events.push(Ok(LanguageModelCompletionEvent::Text(content)));
|
||||
}
|
||||
|
||||
if let Some(tool_calls) = choice.delta.tool_calls.as_ref() {
|
||||
for tool_call in tool_calls {
|
||||
let entry = self.tool_calls_by_index.entry(tool_call.index).or_default();
|
||||
|
||||
if let Some(tool_id) = tool_call.id.clone() {
|
||||
entry.id = tool_id;
|
||||
}
|
||||
|
||||
if let Some(function) = tool_call.function.as_ref() {
|
||||
if let Some(name) = function.name.clone() {
|
||||
entry.name = name;
|
||||
}
|
||||
|
||||
if let Some(arguments) = function.arguments.clone() {
|
||||
entry.arguments.push_str(&arguments);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match choice.finish_reason.as_deref() {
|
||||
Some("stop") => {
|
||||
events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)));
|
||||
}
|
||||
Some("tool_calls") => {
|
||||
events.extend(self.tool_calls_by_index.drain().map(|(_, tool_call)| {
|
||||
match serde_json::Value::from_str(&tool_call.arguments) {
|
||||
Ok(input) => Ok(LanguageModelCompletionEvent::ToolUse(
|
||||
LanguageModelToolUse {
|
||||
id: tool_call.id.clone().into(),
|
||||
name: tool_call.name.as_str().into(),
|
||||
is_input_complete: true,
|
||||
input,
|
||||
raw_input: tool_call.arguments.clone(),
|
||||
},
|
||||
)),
|
||||
Err(error) => Err(LanguageModelCompletionError::BadInputJson {
|
||||
id: tool_call.id.into(),
|
||||
tool_name: tool_call.name.as_str().into(),
|
||||
raw_input: tool_call.arguments.into(),
|
||||
json_parse_error: error.to_string(),
|
||||
}),
|
||||
}
|
||||
}));
|
||||
|
||||
events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::ToolUse)));
|
||||
}
|
||||
Some(stop_reason) => {
|
||||
log::error!("Unexpected Vercel stop_reason: {stop_reason:?}",);
|
||||
events.push(Ok(LanguageModelCompletionEvent::Stop(StopReason::EndTurn)));
|
||||
}
|
||||
None => {}
|
||||
}
|
||||
|
||||
events
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct RawToolCall {
|
||||
id: String,
|
||||
name: String,
|
||||
arguments: String,
|
||||
}
|
||||
|
||||
pub fn count_vercel_tokens(
|
||||
request: LanguageModelRequest,
|
||||
model: Model,
|
||||
@@ -397,7 +647,7 @@ pub fn count_vercel_tokens(
|
||||
}
|
||||
// Map Vercel models to appropriate OpenAI models for token counting
|
||||
// since Vercel uses OpenAI-compatible API
|
||||
Model::VZeroOnePointFiveMedium => {
|
||||
Model::VZero => {
|
||||
// Vercel v0 is similar to GPT-4o, so use gpt-4o for token counting
|
||||
tiktoken_rs::num_tokens_from_messages("gpt-4o", &messages)
|
||||
}
|
||||
@@ -575,3 +825,43 @@ impl Render for ConfigurationView {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use gpui::TestAppContext;
|
||||
use language_model::LanguageModelRequestMessage;
|
||||
|
||||
use super::*;
|
||||
|
||||
#[gpui::test]
|
||||
fn tiktoken_rs_support(cx: &TestAppContext) {
|
||||
let request = LanguageModelRequest {
|
||||
thread_id: None,
|
||||
prompt_id: None,
|
||||
intent: None,
|
||||
mode: None,
|
||||
messages: vec![LanguageModelRequestMessage {
|
||||
role: Role::User,
|
||||
content: vec![MessageContent::Text("message".into())],
|
||||
cache: false,
|
||||
}],
|
||||
tools: vec![],
|
||||
tool_choice: None,
|
||||
stop: vec![],
|
||||
temperature: None,
|
||||
};
|
||||
|
||||
// Validate that all models are supported by tiktoken-rs
|
||||
for model in Model::iter() {
|
||||
let count = cx
|
||||
.executor()
|
||||
.block(count_vercel_tokens(
|
||||
request.clone(),
|
||||
model,
|
||||
&cx.app.borrow(),
|
||||
))
|
||||
.unwrap();
|
||||
assert!(count > 0);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,12 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use anyhow::Result;
|
||||
use gpui::App;
|
||||
use language_model::LanguageModelCacheConfiguration;
|
||||
use project::Fs;
|
||||
use schemars::JsonSchema;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use settings::{Settings, SettingsSources};
|
||||
use settings::{Settings, SettingsSources, update_settings_file};
|
||||
|
||||
use crate::provider::{
|
||||
self,
|
||||
@@ -20,8 +24,36 @@ use crate::provider::{
|
||||
};
|
||||
|
||||
/// Initializes the language model settings.
|
||||
pub fn init(cx: &mut App) {
|
||||
pub fn init(fs: Arc<dyn Fs>, cx: &mut App) {
|
||||
AllLanguageModelSettings::register(cx);
|
||||
|
||||
if AllLanguageModelSettings::get_global(cx)
|
||||
.openai
|
||||
.needs_setting_migration
|
||||
{
|
||||
update_settings_file::<AllLanguageModelSettings>(fs.clone(), cx, move |setting, _| {
|
||||
if let Some(settings) = setting.openai.clone() {
|
||||
let (newest_version, _) = settings.upgrade();
|
||||
setting.openai = Some(OpenAiSettingsContent::Versioned(
|
||||
VersionedOpenAiSettingsContent::V1(newest_version),
|
||||
));
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
if AllLanguageModelSettings::get_global(cx)
|
||||
.anthropic
|
||||
.needs_setting_migration
|
||||
{
|
||||
update_settings_file::<AllLanguageModelSettings>(fs, cx, move |setting, _| {
|
||||
if let Some(settings) = setting.anthropic.clone() {
|
||||
let (newest_version, _) = settings.upgrade();
|
||||
setting.anthropic = Some(AnthropicSettingsContent::Versioned(
|
||||
VersionedAnthropicSettingsContent::V1(newest_version),
|
||||
));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -58,7 +90,78 @@ pub struct AllLanguageModelSettingsContent {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
|
||||
pub struct AnthropicSettingsContent {
|
||||
#[serde(untagged)]
|
||||
pub enum AnthropicSettingsContent {
|
||||
Versioned(VersionedAnthropicSettingsContent),
|
||||
Legacy(LegacyAnthropicSettingsContent),
|
||||
}
|
||||
|
||||
impl AnthropicSettingsContent {
|
||||
pub fn upgrade(self) -> (AnthropicSettingsContentV1, bool) {
|
||||
match self {
|
||||
AnthropicSettingsContent::Legacy(content) => (
|
||||
AnthropicSettingsContentV1 {
|
||||
api_url: content.api_url,
|
||||
available_models: content.available_models.map(|models| {
|
||||
models
|
||||
.into_iter()
|
||||
.filter_map(|model| match model {
|
||||
anthropic::Model::Custom {
|
||||
name,
|
||||
display_name,
|
||||
max_tokens,
|
||||
tool_override,
|
||||
cache_configuration,
|
||||
max_output_tokens,
|
||||
default_temperature,
|
||||
extra_beta_headers,
|
||||
mode,
|
||||
} => Some(provider::anthropic::AvailableModel {
|
||||
name,
|
||||
display_name,
|
||||
max_tokens,
|
||||
tool_override,
|
||||
cache_configuration: cache_configuration.as_ref().map(
|
||||
|config| LanguageModelCacheConfiguration {
|
||||
max_cache_anchors: config.max_cache_anchors,
|
||||
should_speculate: config.should_speculate,
|
||||
min_total_token: config.min_total_token,
|
||||
},
|
||||
),
|
||||
max_output_tokens,
|
||||
default_temperature,
|
||||
extra_beta_headers,
|
||||
mode: Some(mode.into()),
|
||||
}),
|
||||
_ => None,
|
||||
})
|
||||
.collect()
|
||||
}),
|
||||
},
|
||||
true,
|
||||
),
|
||||
AnthropicSettingsContent::Versioned(content) => match content {
|
||||
VersionedAnthropicSettingsContent::V1(content) => (content, false),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
|
||||
pub struct LegacyAnthropicSettingsContent {
|
||||
pub api_url: Option<String>,
|
||||
pub available_models: Option<Vec<anthropic::Model>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
|
||||
#[serde(tag = "version")]
|
||||
pub enum VersionedAnthropicSettingsContent {
|
||||
#[serde(rename = "1")]
|
||||
V1(AnthropicSettingsContentV1),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
|
||||
pub struct AnthropicSettingsContentV1 {
|
||||
pub api_url: Option<String>,
|
||||
pub available_models: Option<Vec<provider::anthropic::AvailableModel>>,
|
||||
}
|
||||
@@ -97,7 +200,64 @@ pub struct MistralSettingsContent {
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
|
||||
pub struct OpenAiSettingsContent {
|
||||
#[serde(untagged)]
|
||||
pub enum OpenAiSettingsContent {
|
||||
Versioned(VersionedOpenAiSettingsContent),
|
||||
Legacy(LegacyOpenAiSettingsContent),
|
||||
}
|
||||
|
||||
impl OpenAiSettingsContent {
|
||||
pub fn upgrade(self) -> (OpenAiSettingsContentV1, bool) {
|
||||
match self {
|
||||
OpenAiSettingsContent::Legacy(content) => (
|
||||
OpenAiSettingsContentV1 {
|
||||
api_url: content.api_url,
|
||||
available_models: content.available_models.map(|models| {
|
||||
models
|
||||
.into_iter()
|
||||
.filter_map(|model| match model {
|
||||
open_ai::Model::Custom {
|
||||
name,
|
||||
display_name,
|
||||
max_tokens,
|
||||
max_output_tokens,
|
||||
max_completion_tokens,
|
||||
} => Some(provider::open_ai::AvailableModel {
|
||||
name,
|
||||
max_tokens,
|
||||
max_output_tokens,
|
||||
display_name,
|
||||
max_completion_tokens,
|
||||
}),
|
||||
_ => None,
|
||||
})
|
||||
.collect()
|
||||
}),
|
||||
},
|
||||
true,
|
||||
),
|
||||
OpenAiSettingsContent::Versioned(content) => match content {
|
||||
VersionedOpenAiSettingsContent::V1(content) => (content, false),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
|
||||
pub struct LegacyOpenAiSettingsContent {
|
||||
pub api_url: Option<String>,
|
||||
pub available_models: Option<Vec<open_ai::Model>>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
|
||||
#[serde(tag = "version")]
|
||||
pub enum VersionedOpenAiSettingsContent {
|
||||
#[serde(rename = "1")]
|
||||
V1(OpenAiSettingsContentV1),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema)]
|
||||
pub struct OpenAiSettingsContentV1 {
|
||||
pub api_url: Option<String>,
|
||||
pub available_models: Option<Vec<provider::open_ai::AvailableModel>>,
|
||||
}
|
||||
@@ -143,7 +303,15 @@ impl settings::Settings for AllLanguageModelSettings {
|
||||
|
||||
for value in sources.defaults_and_customizations() {
|
||||
// Anthropic
|
||||
let anthropic = value.anthropic.clone();
|
||||
let (anthropic, upgraded) = match value.anthropic.clone().map(|s| s.upgrade()) {
|
||||
Some((content, upgraded)) => (Some(content), upgraded),
|
||||
None => (None, false),
|
||||
};
|
||||
|
||||
if upgraded {
|
||||
settings.anthropic.needs_setting_migration = true;
|
||||
}
|
||||
|
||||
merge(
|
||||
&mut settings.anthropic.api_url,
|
||||
anthropic.as_ref().and_then(|s| s.api_url.clone()),
|
||||
@@ -209,7 +377,15 @@ impl settings::Settings for AllLanguageModelSettings {
|
||||
);
|
||||
|
||||
// OpenAI
|
||||
let openai = value.openai.clone();
|
||||
let (openai, upgraded) = match value.openai.clone().map(|s| s.upgrade()) {
|
||||
Some((content, upgraded)) => (Some(content), upgraded),
|
||||
None => (None, false),
|
||||
};
|
||||
|
||||
if upgraded {
|
||||
settings.openai.needs_setting_migration = true;
|
||||
}
|
||||
|
||||
merge(
|
||||
&mut settings.openai.api_url,
|
||||
openai.as_ref().and_then(|s| s.api_url.clone()),
|
||||
|
||||
@@ -14,7 +14,6 @@ doctest = false
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
client.workspace = true
|
||||
collections.workspace = true
|
||||
copilot.workspace = true
|
||||
editor.workspace = true
|
||||
@@ -23,19 +22,18 @@ gpui.workspace = true
|
||||
itertools.workspace = true
|
||||
language.workspace = true
|
||||
lsp.workspace = true
|
||||
picker.workspace = true
|
||||
project.workspace = true
|
||||
serde_json.workspace = true
|
||||
settings.workspace = true
|
||||
theme.workspace = true
|
||||
tree-sitter.workspace = true
|
||||
ui.workspace = true
|
||||
util.workspace = true
|
||||
workspace.workspace = true
|
||||
zed_actions.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
client = { workspace = true, features = ["test-support"] }
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
release_channel.workspace = true
|
||||
gpui = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -1,54 +1,17 @@
|
||||
mod key_context_view;
|
||||
mod lsp_log;
|
||||
pub mod lsp_tool;
|
||||
mod syntax_tree_view;
|
||||
|
||||
#[cfg(test)]
|
||||
mod lsp_log_tests;
|
||||
|
||||
use gpui::{App, AppContext, Entity};
|
||||
use gpui::App;
|
||||
|
||||
pub use lsp_log::{LogStore, LspLogToolbarItemView, LspLogView};
|
||||
pub use syntax_tree_view::{SyntaxTreeToolbarItemView, SyntaxTreeView};
|
||||
use ui::{Context, Window};
|
||||
use workspace::{Item, ItemHandle, SplitDirection, Workspace};
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
lsp_log::init(cx);
|
||||
syntax_tree_view::init(cx);
|
||||
key_context_view::init(cx);
|
||||
}
|
||||
|
||||
fn get_or_create_tool<T>(
|
||||
workspace: &mut Workspace,
|
||||
destination: SplitDirection,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Workspace>,
|
||||
new_tool: impl FnOnce(&mut Window, &mut Context<T>) -> T,
|
||||
) -> Entity<T>
|
||||
where
|
||||
T: Item,
|
||||
{
|
||||
if let Some(item) = workspace.item_of_type::<T>(cx) {
|
||||
return item;
|
||||
}
|
||||
|
||||
let new_tool = cx.new(|cx| new_tool(window, cx));
|
||||
match workspace.find_pane_in_direction(destination, cx) {
|
||||
Some(right_pane) => {
|
||||
workspace.add_item(
|
||||
right_pane,
|
||||
new_tool.boxed_clone(),
|
||||
None,
|
||||
true,
|
||||
true,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
None => {
|
||||
workspace.split_item(destination, new_tool.boxed_clone(), window, cx);
|
||||
}
|
||||
}
|
||||
new_tool
|
||||
}
|
||||
|
||||
@@ -3,14 +3,14 @@ use copilot::Copilot;
|
||||
use editor::{Editor, EditorEvent, actions::MoveToEnd, scroll::Autoscroll};
|
||||
use futures::{StreamExt, channel::mpsc};
|
||||
use gpui::{
|
||||
AnyView, App, Context, Corner, Entity, EventEmitter, FocusHandle, Focusable, Global,
|
||||
IntoElement, ParentElement, Render, Styled, Subscription, WeakEntity, Window, actions, div,
|
||||
AnyView, App, Context, Corner, Entity, EventEmitter, FocusHandle, Focusable, IntoElement,
|
||||
ParentElement, Render, Styled, Subscription, WeakEntity, Window, actions, div,
|
||||
};
|
||||
use itertools::Itertools;
|
||||
use language::{LanguageServerId, language_settings::SoftWrap};
|
||||
use lsp::{
|
||||
IoKind, LanguageServer, LanguageServerName, LanguageServerSelector, MessageType,
|
||||
SetTraceParams, TraceValue, notification::SetTrace,
|
||||
IoKind, LanguageServer, LanguageServerName, MessageType, SetTraceParams, TraceValue,
|
||||
notification::SetTrace,
|
||||
};
|
||||
use project::{Project, WorktreeId, search::SearchQuery};
|
||||
use std::{any::TypeId, borrow::Cow, sync::Arc};
|
||||
@@ -21,8 +21,6 @@ use workspace::{
|
||||
searchable::{Direction, SearchEvent, SearchableItem, SearchableItemHandle},
|
||||
};
|
||||
|
||||
use crate::get_or_create_tool;
|
||||
|
||||
const SEND_LINE: &str = "\n// Send:";
|
||||
const RECEIVE_LINE: &str = "\n// Receive:";
|
||||
const MAX_STORED_LOG_ENTRIES: usize = 2000;
|
||||
@@ -46,7 +44,7 @@ trait Message: AsRef<str> {
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct LogMessage {
|
||||
struct LogMessage {
|
||||
message: String,
|
||||
typ: MessageType,
|
||||
}
|
||||
@@ -73,7 +71,7 @@ impl Message for LogMessage {
|
||||
}
|
||||
}
|
||||
|
||||
pub(super) struct TraceMessage {
|
||||
struct TraceMessage {
|
||||
message: String,
|
||||
}
|
||||
|
||||
@@ -101,7 +99,7 @@ impl Message for RpcMessage {
|
||||
type Level = ();
|
||||
}
|
||||
|
||||
pub(super) struct LanguageServerState {
|
||||
struct LanguageServerState {
|
||||
name: Option<LanguageServerName>,
|
||||
worktree_id: Option<WorktreeId>,
|
||||
kind: LanguageServerKind,
|
||||
@@ -206,13 +204,8 @@ pub(crate) struct LogMenuItem {
|
||||
|
||||
actions!(dev, [OpenLanguageServerLogs]);
|
||||
|
||||
pub(super) struct GlobalLogStore(pub WeakEntity<LogStore>);
|
||||
|
||||
impl Global for GlobalLogStore {}
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
let log_store = cx.new(LogStore::new);
|
||||
cx.set_global(GlobalLogStore(log_store.downgrade()));
|
||||
|
||||
cx.observe_new(move |workspace: &mut Workspace, _, cx| {
|
||||
let project = workspace.project();
|
||||
@@ -226,14 +219,13 @@ pub fn init(cx: &mut App) {
|
||||
workspace.register_action(move |workspace, _: &OpenLanguageServerLogs, window, cx| {
|
||||
let project = workspace.project().read(cx);
|
||||
if project.is_local() || project.is_via_ssh() {
|
||||
let project = workspace.project().clone();
|
||||
let log_store = log_store.clone();
|
||||
get_or_create_tool(
|
||||
workspace,
|
||||
workspace.split_item(
|
||||
SplitDirection::Right,
|
||||
Box::new(cx.new(|cx| {
|
||||
LspLogView::new(workspace.project().clone(), log_store.clone(), window, cx)
|
||||
})),
|
||||
window,
|
||||
cx,
|
||||
move |window, cx| LspLogView::new(project, log_store, window, cx),
|
||||
);
|
||||
}
|
||||
});
|
||||
@@ -362,7 +354,7 @@ impl LogStore {
|
||||
);
|
||||
}
|
||||
|
||||
pub(super) fn get_language_server_state(
|
||||
fn get_language_server_state(
|
||||
&mut self,
|
||||
id: LanguageServerId,
|
||||
) -> Option<&mut LanguageServerState> {
|
||||
@@ -488,14 +480,11 @@ impl LogStore {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
pub(super) fn server_logs(&self, server_id: LanguageServerId) -> Option<&VecDeque<LogMessage>> {
|
||||
fn server_logs(&self, server_id: LanguageServerId) -> Option<&VecDeque<LogMessage>> {
|
||||
Some(&self.language_servers.get(&server_id)?.log_messages)
|
||||
}
|
||||
|
||||
pub(super) fn server_trace(
|
||||
&self,
|
||||
server_id: LanguageServerId,
|
||||
) -> Option<&VecDeque<TraceMessage>> {
|
||||
fn server_trace(&self, server_id: LanguageServerId) -> Option<&VecDeque<TraceMessage>> {
|
||||
Some(&self.language_servers.get(&server_id)?.trace_messages)
|
||||
}
|
||||
|
||||
@@ -540,110 +529,6 @@ impl LogStore {
|
||||
Some(())
|
||||
}
|
||||
|
||||
pub fn has_server_logs(&self, server: &LanguageServerSelector) -> bool {
|
||||
match server {
|
||||
LanguageServerSelector::Id(id) => self.language_servers.contains_key(id),
|
||||
LanguageServerSelector::Name(name) => self
|
||||
.language_servers
|
||||
.iter()
|
||||
.any(|(_, state)| state.name.as_ref() == Some(name)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn open_server_log(
|
||||
&mut self,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
server: LanguageServerSelector,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
cx.spawn_in(window, async move |log_store, cx| {
|
||||
let Some(log_store) = log_store.upgrade() else {
|
||||
return;
|
||||
};
|
||||
workspace
|
||||
.update_in(cx, |workspace, window, cx| {
|
||||
let project = workspace.project().clone();
|
||||
let tool_log_store = log_store.clone();
|
||||
let log_view = get_or_create_tool(
|
||||
workspace,
|
||||
SplitDirection::Right,
|
||||
window,
|
||||
cx,
|
||||
move |window, cx| LspLogView::new(project, tool_log_store, window, cx),
|
||||
);
|
||||
log_view.update(cx, |log_view, cx| {
|
||||
let server_id = match server {
|
||||
LanguageServerSelector::Id(id) => Some(id),
|
||||
LanguageServerSelector::Name(name) => {
|
||||
log_store.read(cx).language_servers.iter().find_map(
|
||||
|(id, state)| {
|
||||
if state.name.as_ref() == Some(&name) {
|
||||
Some(*id)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
};
|
||||
if let Some(server_id) = server_id {
|
||||
log_view.show_logs_for_server(server_id, window, cx);
|
||||
}
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
pub fn open_server_trace(
|
||||
&mut self,
|
||||
workspace: WeakEntity<Workspace>,
|
||||
server: LanguageServerSelector,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
cx.spawn_in(window, async move |log_store, cx| {
|
||||
let Some(log_store) = log_store.upgrade() else {
|
||||
return;
|
||||
};
|
||||
workspace
|
||||
.update_in(cx, |workspace, window, cx| {
|
||||
let project = workspace.project().clone();
|
||||
let tool_log_store = log_store.clone();
|
||||
let log_view = get_or_create_tool(
|
||||
workspace,
|
||||
SplitDirection::Right,
|
||||
window,
|
||||
cx,
|
||||
move |window, cx| LspLogView::new(project, tool_log_store, window, cx),
|
||||
);
|
||||
log_view.update(cx, |log_view, cx| {
|
||||
let server_id = match server {
|
||||
LanguageServerSelector::Id(id) => Some(id),
|
||||
LanguageServerSelector::Name(name) => {
|
||||
log_store.read(cx).language_servers.iter().find_map(
|
||||
|(id, state)| {
|
||||
if state.name.as_ref() == Some(&name) {
|
||||
Some(*id)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
},
|
||||
)
|
||||
}
|
||||
};
|
||||
if let Some(server_id) = server_id {
|
||||
log_view.show_rpc_trace_for_server(server_id, window, cx);
|
||||
}
|
||||
});
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn on_io(
|
||||
&mut self,
|
||||
language_server_id: LanguageServerId,
|
||||
@@ -971,7 +856,7 @@ impl LspLogView {
|
||||
self.editor_subscriptions = editor_subscriptions;
|
||||
cx.notify();
|
||||
}
|
||||
self.editor.read(cx).focus_handle(cx).focus(window);
|
||||
window.focus(&self.focus_handle);
|
||||
}
|
||||
|
||||
fn update_log_level(
|
||||
@@ -997,7 +882,7 @@ impl LspLogView {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
self.editor.read(cx).focus_handle(cx).focus(window);
|
||||
window.focus(&self.focus_handle);
|
||||
}
|
||||
|
||||
fn show_trace_for_server(
|
||||
@@ -1019,7 +904,7 @@ impl LspLogView {
|
||||
self.editor_subscriptions = editor_subscriptions;
|
||||
cx.notify();
|
||||
}
|
||||
self.editor.read(cx).focus_handle(cx).focus(window);
|
||||
window.focus(&self.focus_handle);
|
||||
}
|
||||
|
||||
fn show_rpc_trace_for_server(
|
||||
@@ -1062,7 +947,7 @@ impl LspLogView {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
self.editor.read(cx).focus_handle(cx).focus(window);
|
||||
window.focus(&self.focus_handle);
|
||||
}
|
||||
|
||||
fn toggle_rpc_trace_for_server(
|
||||
@@ -1126,7 +1011,7 @@ impl LspLogView {
|
||||
self.editor = editor;
|
||||
self.editor_subscriptions = editor_subscriptions;
|
||||
cx.notify();
|
||||
self.editor.read(cx).focus_handle(cx).focus(window);
|
||||
window.focus(&self.focus_handle);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,917 +0,0 @@
|
||||
use std::{collections::hash_map, path::PathBuf, sync::Arc, time::Duration};
|
||||
|
||||
use client::proto;
|
||||
use collections::{HashMap, HashSet};
|
||||
use editor::{Editor, EditorEvent};
|
||||
use gpui::{Corner, DismissEvent, Entity, Focusable as _, Subscription, Task, WeakEntity, actions};
|
||||
use language::{BinaryStatus, BufferId, LocalFile, ServerHealth};
|
||||
use lsp::{LanguageServerId, LanguageServerName, LanguageServerSelector};
|
||||
use picker::{Picker, PickerDelegate, popover_menu::PickerPopoverMenu};
|
||||
use project::{LspStore, LspStoreEvent, project_settings::ProjectSettings};
|
||||
use settings::{Settings as _, SettingsStore};
|
||||
use ui::{Context, IconButtonShape, Indicator, Tooltip, Window, prelude::*};
|
||||
|
||||
use workspace::{StatusItemView, Workspace};
|
||||
|
||||
use crate::lsp_log::GlobalLogStore;
|
||||
|
||||
actions!(lsp_tool, [ToggleMenu]);
|
||||
|
||||
pub struct LspTool {
|
||||
state: Entity<PickerState>,
|
||||
lsp_picker: Option<Entity<Picker<LspPickerDelegate>>>,
|
||||
_subscriptions: Vec<Subscription>,
|
||||
}
|
||||
|
||||
struct PickerState {
|
||||
workspace: WeakEntity<Workspace>,
|
||||
lsp_store: WeakEntity<LspStore>,
|
||||
active_editor: Option<ActiveEditor>,
|
||||
language_servers: LanguageServers,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct LspPickerDelegate {
|
||||
state: Entity<PickerState>,
|
||||
selected_index: usize,
|
||||
items: Vec<LspItem>,
|
||||
other_servers_start_index: Option<usize>,
|
||||
}
|
||||
|
||||
struct ActiveEditor {
|
||||
editor: WeakEntity<Editor>,
|
||||
_editor_subscription: Subscription,
|
||||
editor_buffers: HashSet<BufferId>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
struct LanguageServers {
|
||||
health_statuses: HashMap<LanguageServerId, LanguageServerHealthStatus>,
|
||||
binary_statuses: HashMap<LanguageServerName, LanguageServerBinaryStatus>,
|
||||
servers_per_buffer_abs_path:
|
||||
HashMap<PathBuf, HashMap<LanguageServerId, Option<LanguageServerName>>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct LanguageServerHealthStatus {
|
||||
name: LanguageServerName,
|
||||
health: Option<(Option<SharedString>, ServerHealth)>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct LanguageServerBinaryStatus {
|
||||
status: BinaryStatus,
|
||||
message: Option<SharedString>,
|
||||
}
|
||||
|
||||
impl LanguageServerHealthStatus {
|
||||
fn health(&self) -> Option<ServerHealth> {
|
||||
self.health.as_ref().map(|(_, health)| *health)
|
||||
}
|
||||
|
||||
fn message(&self) -> Option<SharedString> {
|
||||
self.health
|
||||
.as_ref()
|
||||
.and_then(|(message, _)| message.clone())
|
||||
}
|
||||
}
|
||||
|
||||
impl LspPickerDelegate {
|
||||
fn regenerate_items(&mut self, cx: &mut Context<Picker<Self>>) {
|
||||
self.state.update(cx, |state, cx| {
|
||||
let editor_buffers = state
|
||||
.active_editor
|
||||
.as_ref()
|
||||
.map(|active_editor| active_editor.editor_buffers.clone())
|
||||
.unwrap_or_default();
|
||||
let editor_buffer_paths = editor_buffers
|
||||
.iter()
|
||||
.filter_map(|buffer_id| {
|
||||
let buffer_path = state
|
||||
.lsp_store
|
||||
.update(cx, |lsp_store, cx| {
|
||||
Some(
|
||||
project::File::from_dyn(
|
||||
lsp_store
|
||||
.buffer_store()
|
||||
.read(cx)
|
||||
.get(*buffer_id)?
|
||||
.read(cx)
|
||||
.file(),
|
||||
)?
|
||||
.abs_path(cx),
|
||||
)
|
||||
})
|
||||
.ok()??;
|
||||
Some(buffer_path)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut servers_with_health_checks = HashSet::default();
|
||||
let mut server_ids_with_health_checks = HashSet::default();
|
||||
let mut buffer_servers =
|
||||
Vec::with_capacity(state.language_servers.health_statuses.len());
|
||||
let mut other_servers =
|
||||
Vec::with_capacity(state.language_servers.health_statuses.len());
|
||||
let buffer_server_ids = editor_buffer_paths
|
||||
.iter()
|
||||
.filter_map(|buffer_path| {
|
||||
state
|
||||
.language_servers
|
||||
.servers_per_buffer_abs_path
|
||||
.get(buffer_path)
|
||||
})
|
||||
.flatten()
|
||||
.fold(HashMap::default(), |mut acc, (server_id, name)| {
|
||||
match acc.entry(*server_id) {
|
||||
hash_map::Entry::Occupied(mut o) => {
|
||||
let old_name: &mut Option<&LanguageServerName> = o.get_mut();
|
||||
if old_name.is_none() {
|
||||
*old_name = name.as_ref();
|
||||
}
|
||||
}
|
||||
hash_map::Entry::Vacant(v) => {
|
||||
v.insert(name.as_ref());
|
||||
}
|
||||
}
|
||||
acc
|
||||
});
|
||||
for (server_id, server_state) in &state.language_servers.health_statuses {
|
||||
let binary_status = state
|
||||
.language_servers
|
||||
.binary_statuses
|
||||
.get(&server_state.name);
|
||||
servers_with_health_checks.insert(&server_state.name);
|
||||
server_ids_with_health_checks.insert(*server_id);
|
||||
if buffer_server_ids.contains_key(server_id) {
|
||||
buffer_servers.push(ServerData::WithHealthCheck(
|
||||
*server_id,
|
||||
server_state,
|
||||
binary_status,
|
||||
));
|
||||
} else {
|
||||
other_servers.push(ServerData::WithHealthCheck(
|
||||
*server_id,
|
||||
server_state,
|
||||
binary_status,
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
for (server_name, status) in state
|
||||
.language_servers
|
||||
.binary_statuses
|
||||
.iter()
|
||||
.filter(|(name, _)| !servers_with_health_checks.contains(name))
|
||||
{
|
||||
let has_matching_server = state
|
||||
.language_servers
|
||||
.servers_per_buffer_abs_path
|
||||
.iter()
|
||||
.filter(|(path, _)| editor_buffer_paths.contains(path))
|
||||
.flat_map(|(_, server_associations)| server_associations.iter())
|
||||
.any(|(_, name)| name.as_ref() == Some(server_name));
|
||||
if has_matching_server {
|
||||
buffer_servers.push(ServerData::WithBinaryStatus(server_name, status));
|
||||
} else {
|
||||
other_servers.push(ServerData::WithBinaryStatus(server_name, status));
|
||||
}
|
||||
}
|
||||
|
||||
buffer_servers.sort_by_key(|data| data.name().clone());
|
||||
other_servers.sort_by_key(|data| data.name().clone());
|
||||
let mut other_servers_start_index = None;
|
||||
let mut new_lsp_items =
|
||||
Vec::with_capacity(buffer_servers.len() + other_servers.len() + 2);
|
||||
if !buffer_servers.is_empty() {
|
||||
new_lsp_items.push(LspItem::Header(SharedString::new("Current Buffer")));
|
||||
new_lsp_items.extend(buffer_servers.into_iter().map(ServerData::into_lsp_item));
|
||||
}
|
||||
if !other_servers.is_empty() {
|
||||
other_servers_start_index = Some(new_lsp_items.len());
|
||||
new_lsp_items.push(LspItem::Header(SharedString::new("Other Active Servers")));
|
||||
new_lsp_items.extend(other_servers.into_iter().map(ServerData::into_lsp_item));
|
||||
}
|
||||
|
||||
self.items = new_lsp_items;
|
||||
self.other_servers_start_index = other_servers_start_index;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl LanguageServers {
|
||||
fn update_binary_status(
|
||||
&mut self,
|
||||
binary_status: BinaryStatus,
|
||||
message: Option<&str>,
|
||||
name: LanguageServerName,
|
||||
) {
|
||||
let binary_status_message = message.map(SharedString::new);
|
||||
if matches!(
|
||||
binary_status,
|
||||
BinaryStatus::Stopped | BinaryStatus::Failed { .. }
|
||||
) {
|
||||
self.health_statuses.retain(|_, server| server.name != name);
|
||||
}
|
||||
self.binary_statuses.insert(
|
||||
name,
|
||||
LanguageServerBinaryStatus {
|
||||
status: binary_status,
|
||||
message: binary_status_message,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
fn update_server_health(
|
||||
&mut self,
|
||||
id: LanguageServerId,
|
||||
health: ServerHealth,
|
||||
message: Option<&str>,
|
||||
name: Option<LanguageServerName>,
|
||||
) {
|
||||
if let Some(state) = self.health_statuses.get_mut(&id) {
|
||||
state.health = Some((message.map(SharedString::new), health));
|
||||
if let Some(name) = name {
|
||||
state.name = name;
|
||||
}
|
||||
} else if let Some(name) = name {
|
||||
self.health_statuses.insert(
|
||||
id,
|
||||
LanguageServerHealthStatus {
|
||||
health: Some((message.map(SharedString::new), health)),
|
||||
name,
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum ServerData<'a> {
|
||||
WithHealthCheck(
|
||||
LanguageServerId,
|
||||
&'a LanguageServerHealthStatus,
|
||||
Option<&'a LanguageServerBinaryStatus>,
|
||||
),
|
||||
WithBinaryStatus(&'a LanguageServerName, &'a LanguageServerBinaryStatus),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum LspItem {
|
||||
WithHealthCheck(
|
||||
LanguageServerId,
|
||||
LanguageServerHealthStatus,
|
||||
Option<LanguageServerBinaryStatus>,
|
||||
),
|
||||
WithBinaryStatus(LanguageServerName, LanguageServerBinaryStatus),
|
||||
Header(SharedString),
|
||||
}
|
||||
|
||||
impl ServerData<'_> {
|
||||
fn name(&self) -> &LanguageServerName {
|
||||
match self {
|
||||
Self::WithHealthCheck(_, state, _) => &state.name,
|
||||
Self::WithBinaryStatus(name, ..) => name,
|
||||
}
|
||||
}
|
||||
|
||||
fn into_lsp_item(self) -> LspItem {
|
||||
match self {
|
||||
Self::WithHealthCheck(id, name, status) => {
|
||||
LspItem::WithHealthCheck(id, name.clone(), status.cloned())
|
||||
}
|
||||
Self::WithBinaryStatus(name, status) => {
|
||||
LspItem::WithBinaryStatus(name.clone(), status.clone())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl PickerDelegate for LspPickerDelegate {
|
||||
type ListItem = AnyElement;
|
||||
|
||||
fn match_count(&self) -> usize {
|
||||
self.items.len()
|
||||
}
|
||||
|
||||
fn selected_index(&self) -> usize {
|
||||
self.selected_index
|
||||
}
|
||||
|
||||
fn set_selected_index(&mut self, ix: usize, _: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
self.selected_index = ix;
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn update_matches(
|
||||
&mut self,
|
||||
_: String,
|
||||
_: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Task<()> {
|
||||
cx.spawn(async move |lsp_picker, cx| {
|
||||
cx.background_executor()
|
||||
.timer(Duration::from_millis(30))
|
||||
.await;
|
||||
lsp_picker
|
||||
.update(cx, |lsp_picker, cx| {
|
||||
lsp_picker.delegate.regenerate_items(cx);
|
||||
})
|
||||
.ok();
|
||||
})
|
||||
}
|
||||
|
||||
fn placeholder_text(&self, _window: &mut Window, _cx: &mut App) -> Arc<str> {
|
||||
Arc::default()
|
||||
}
|
||||
|
||||
fn confirm(&mut self, _: bool, _: &mut Window, _: &mut Context<Picker<Self>>) {}
|
||||
|
||||
fn dismissed(&mut self, _: &mut Window, cx: &mut Context<Picker<Self>>) {
|
||||
cx.emit(DismissEvent);
|
||||
}
|
||||
|
||||
fn render_match(
|
||||
&self,
|
||||
ix: usize,
|
||||
_: bool,
|
||||
_: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Option<Self::ListItem> {
|
||||
let is_other_server = self
|
||||
.other_servers_start_index
|
||||
.map_or(false, |start| ix >= start);
|
||||
let server_binary_status;
|
||||
let server_health;
|
||||
let server_message;
|
||||
let server_id;
|
||||
let server_name;
|
||||
match self.items.get(ix)? {
|
||||
LspItem::WithHealthCheck(
|
||||
language_server_id,
|
||||
language_server_health_status,
|
||||
language_server_binary_status,
|
||||
) => {
|
||||
server_binary_status = language_server_binary_status.as_ref();
|
||||
server_health = language_server_health_status.health();
|
||||
server_message = language_server_health_status.message();
|
||||
server_id = Some(*language_server_id);
|
||||
server_name = language_server_health_status.name.clone();
|
||||
}
|
||||
LspItem::WithBinaryStatus(language_server_name, language_server_binary_status) => {
|
||||
server_binary_status = Some(language_server_binary_status);
|
||||
server_health = None;
|
||||
server_message = language_server_binary_status.message.clone();
|
||||
server_id = None;
|
||||
server_name = language_server_name.clone();
|
||||
}
|
||||
LspItem::Header(header) => {
|
||||
return Some(
|
||||
h_flex()
|
||||
.justify_center()
|
||||
.child(Label::new(header.clone()))
|
||||
.into_any_element(),
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
let workspace = self.state.read(cx).workspace.clone();
|
||||
let lsp_logs = cx.global::<GlobalLogStore>().0.upgrade()?;
|
||||
let lsp_store = self.state.read(cx).lsp_store.upgrade()?;
|
||||
let server_selector = server_id
|
||||
.map(LanguageServerSelector::Id)
|
||||
.unwrap_or_else(|| LanguageServerSelector::Name(server_name.clone()));
|
||||
let can_stop = server_binary_status.is_none_or(|status| {
|
||||
matches!(status.status, BinaryStatus::None | BinaryStatus::Starting)
|
||||
});
|
||||
// TODO currently, Zed remote does not work well with the LSP logs
|
||||
// https://github.com/zed-industries/zed/issues/28557
|
||||
let has_logs = lsp_store.read(cx).as_local().is_some()
|
||||
&& lsp_logs.read(cx).has_server_logs(&server_selector);
|
||||
let status_color = server_binary_status
|
||||
.and_then(|binary_status| match binary_status.status {
|
||||
BinaryStatus::None => None,
|
||||
BinaryStatus::CheckingForUpdate
|
||||
| BinaryStatus::Downloading
|
||||
| BinaryStatus::Starting => Some(Color::Modified),
|
||||
BinaryStatus::Stopping => Some(Color::Disabled),
|
||||
BinaryStatus::Stopped => Some(Color::Disabled),
|
||||
BinaryStatus::Failed { .. } => Some(Color::Error),
|
||||
})
|
||||
.or_else(|| {
|
||||
Some(match server_health? {
|
||||
ServerHealth::Ok => Color::Success,
|
||||
ServerHealth::Warning => Color::Warning,
|
||||
ServerHealth::Error => Color::Error,
|
||||
})
|
||||
})
|
||||
.unwrap_or(Color::Success);
|
||||
|
||||
Some(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.justify_between()
|
||||
.gap_2()
|
||||
.child(
|
||||
h_flex()
|
||||
.id("server-status-indicator")
|
||||
.gap_2()
|
||||
.child(Indicator::dot().color(status_color))
|
||||
.child(Label::new(server_name.0.clone()))
|
||||
.when_some(server_message.clone(), |div, server_message| {
|
||||
div.tooltip(move |_, cx| Tooltip::simple(server_message.clone(), cx))
|
||||
}),
|
||||
)
|
||||
.child(
|
||||
h_flex()
|
||||
.gap_1()
|
||||
.when(has_logs, |div| {
|
||||
div.child(
|
||||
IconButton::new("debug-language-server", IconName::MessageBubbles)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.tooltip(|_, cx| Tooltip::simple("Debug Language Server", cx))
|
||||
.on_click({
|
||||
let workspace = workspace.clone();
|
||||
let lsp_logs = lsp_logs.downgrade();
|
||||
let server_selector = server_selector.clone();
|
||||
move |_, window, cx| {
|
||||
lsp_logs
|
||||
.update(cx, |lsp_logs, cx| {
|
||||
lsp_logs.open_server_trace(
|
||||
workspace.clone(),
|
||||
server_selector.clone(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
}),
|
||||
)
|
||||
})
|
||||
.when(can_stop, |div| {
|
||||
div.child(
|
||||
IconButton::new("stop-server", IconName::Stop)
|
||||
.icon_size(IconSize::Small)
|
||||
.tooltip(|_, cx| Tooltip::simple("Stop server", cx))
|
||||
.on_click({
|
||||
let lsp_store = lsp_store.downgrade();
|
||||
let server_selector = server_selector.clone();
|
||||
move |_, _, cx| {
|
||||
lsp_store
|
||||
.update(cx, |lsp_store, cx| {
|
||||
lsp_store.stop_language_servers_for_buffers(
|
||||
Vec::new(),
|
||||
HashSet::from_iter([
|
||||
server_selector.clone()
|
||||
]),
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
}),
|
||||
)
|
||||
})
|
||||
.child(
|
||||
IconButton::new("restart-server", IconName::Rerun)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.tooltip(|_, cx| Tooltip::simple("Restart server", cx))
|
||||
.on_click({
|
||||
let state = self.state.clone();
|
||||
let workspace = workspace.clone();
|
||||
let lsp_store = lsp_store.downgrade();
|
||||
let editor_buffers = state
|
||||
.read(cx)
|
||||
.active_editor
|
||||
.as_ref()
|
||||
.map(|active_editor| active_editor.editor_buffers.clone())
|
||||
.unwrap_or_default();
|
||||
let server_selector = server_selector.clone();
|
||||
move |_, _, cx| {
|
||||
if let Some(workspace) = workspace.upgrade() {
|
||||
let project = workspace.read(cx).project().clone();
|
||||
let buffer_store =
|
||||
project.read(cx).buffer_store().clone();
|
||||
let buffers = if is_other_server {
|
||||
let worktree_store =
|
||||
project.read(cx).worktree_store();
|
||||
state
|
||||
.read(cx)
|
||||
.language_servers
|
||||
.servers_per_buffer_abs_path
|
||||
.iter()
|
||||
.filter_map(|(abs_path, servers)| {
|
||||
if servers.values().any(|server| {
|
||||
server.as_ref() == Some(&server_name)
|
||||
}) {
|
||||
worktree_store
|
||||
.read(cx)
|
||||
.find_worktree(abs_path, cx)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.filter_map(|(worktree, relative_path)| {
|
||||
let entry = worktree
|
||||
.read(cx)
|
||||
.entry_for_path(&relative_path)?;
|
||||
project
|
||||
.read(cx)
|
||||
.path_for_entry(entry.id, cx)
|
||||
})
|
||||
.filter_map(|project_path| {
|
||||
buffer_store
|
||||
.read(cx)
|
||||
.get_by_path(&project_path)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
} else {
|
||||
editor_buffers
|
||||
.iter()
|
||||
.flat_map(|buffer_id| {
|
||||
buffer_store.read(cx).get(*buffer_id)
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
};
|
||||
if !buffers.is_empty() {
|
||||
lsp_store
|
||||
.update(cx, |lsp_store, cx| {
|
||||
lsp_store
|
||||
.restart_language_servers_for_buffers(
|
||||
buffers,
|
||||
HashSet::from_iter([
|
||||
server_selector.clone(),
|
||||
]),
|
||||
cx,
|
||||
);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
}),
|
||||
),
|
||||
)
|
||||
.cursor_default()
|
||||
.into_any_element(),
|
||||
)
|
||||
}
|
||||
|
||||
fn render_editor(
|
||||
&self,
|
||||
editor: &Entity<Editor>,
|
||||
_: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Div {
|
||||
div().child(div().track_focus(&editor.focus_handle(cx)))
|
||||
}
|
||||
|
||||
fn render_footer(&self, _: &mut Window, cx: &mut Context<Picker<Self>>) -> Option<AnyElement> {
|
||||
if self.items.is_empty() {
|
||||
Some(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
.child(
|
||||
Button::new("stop-all-servers", "Stop all servers")
|
||||
.disabled(true)
|
||||
.on_click(move |_, _, _| {})
|
||||
.full_width(),
|
||||
)
|
||||
.into_any_element(),
|
||||
)
|
||||
} else {
|
||||
let lsp_store = self.state.read(cx).lsp_store.clone();
|
||||
Some(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.border_color(cx.theme().colors().border_variant)
|
||||
.child(
|
||||
Button::new("stop-all-servers", "Stop all servers")
|
||||
.on_click({
|
||||
move |_, _, cx| {
|
||||
lsp_store
|
||||
.update(cx, |lsp_store, cx| {
|
||||
lsp_store.stop_all_language_servers(cx);
|
||||
})
|
||||
.ok();
|
||||
}
|
||||
})
|
||||
.full_width(),
|
||||
)
|
||||
.into_any_element(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
fn separators_after_indices(&self) -> Vec<usize> {
|
||||
if self.items.is_empty() {
|
||||
Vec::new()
|
||||
} else {
|
||||
vec![self.items.len() - 1]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TODO kb keyboard story
|
||||
impl LspTool {
|
||||
pub fn new(workspace: &Workspace, window: &mut Window, cx: &mut Context<Self>) -> Self {
|
||||
let settings_subscription =
|
||||
cx.observe_global_in::<SettingsStore>(window, move |lsp_tool, window, cx| {
|
||||
if ProjectSettings::get_global(cx).global_lsp_settings.button {
|
||||
if lsp_tool.lsp_picker.is_none() {
|
||||
lsp_tool.lsp_picker =
|
||||
Some(Self::new_lsp_picker(lsp_tool.state.clone(), window, cx));
|
||||
cx.notify();
|
||||
return;
|
||||
}
|
||||
} else if lsp_tool.lsp_picker.take().is_some() {
|
||||
cx.notify();
|
||||
}
|
||||
});
|
||||
|
||||
let lsp_store = workspace.project().read(cx).lsp_store();
|
||||
let lsp_store_subscription =
|
||||
cx.subscribe_in(&lsp_store, window, |lsp_tool, _, e, window, cx| {
|
||||
lsp_tool.on_lsp_store_event(e, window, cx)
|
||||
});
|
||||
|
||||
let state = cx.new(|_| PickerState {
|
||||
workspace: workspace.weak_handle(),
|
||||
lsp_store: lsp_store.downgrade(),
|
||||
active_editor: None,
|
||||
language_servers: LanguageServers::default(),
|
||||
});
|
||||
|
||||
Self {
|
||||
state,
|
||||
lsp_picker: None,
|
||||
_subscriptions: vec![settings_subscription, lsp_store_subscription],
|
||||
}
|
||||
}
|
||||
|
||||
fn on_lsp_store_event(
|
||||
&mut self,
|
||||
e: &LspStoreEvent,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let Some(lsp_picker) = self.lsp_picker.clone() else {
|
||||
return;
|
||||
};
|
||||
let mut updated = false;
|
||||
|
||||
match e {
|
||||
LspStoreEvent::LanguageServerUpdate {
|
||||
language_server_id,
|
||||
name,
|
||||
message: proto::update_language_server::Variant::StatusUpdate(status_update),
|
||||
} => match &status_update.status {
|
||||
Some(proto::status_update::Status::Binary(binary_status)) => {
|
||||
let Some(name) = name.as_ref() else {
|
||||
return;
|
||||
};
|
||||
if let Some(binary_status) = proto::ServerBinaryStatus::from_i32(*binary_status)
|
||||
{
|
||||
let binary_status = match binary_status {
|
||||
proto::ServerBinaryStatus::None => BinaryStatus::None,
|
||||
proto::ServerBinaryStatus::CheckingForUpdate => {
|
||||
BinaryStatus::CheckingForUpdate
|
||||
}
|
||||
proto::ServerBinaryStatus::Downloading => BinaryStatus::Downloading,
|
||||
proto::ServerBinaryStatus::Starting => BinaryStatus::Starting,
|
||||
proto::ServerBinaryStatus::Stopping => BinaryStatus::Stopping,
|
||||
proto::ServerBinaryStatus::Stopped => BinaryStatus::Stopped,
|
||||
proto::ServerBinaryStatus::Failed => {
|
||||
let Some(error) = status_update.message.clone() else {
|
||||
return;
|
||||
};
|
||||
BinaryStatus::Failed { error }
|
||||
}
|
||||
};
|
||||
self.state.update(cx, |state, _| {
|
||||
state.language_servers.update_binary_status(
|
||||
binary_status,
|
||||
status_update.message.as_deref(),
|
||||
name.clone(),
|
||||
);
|
||||
});
|
||||
updated = true;
|
||||
};
|
||||
}
|
||||
Some(proto::status_update::Status::Health(health_status)) => {
|
||||
if let Some(health) = proto::ServerHealth::from_i32(*health_status) {
|
||||
let health = match health {
|
||||
proto::ServerHealth::Ok => ServerHealth::Ok,
|
||||
proto::ServerHealth::Warning => ServerHealth::Warning,
|
||||
proto::ServerHealth::Error => ServerHealth::Error,
|
||||
};
|
||||
self.state.update(cx, |state, _| {
|
||||
state.language_servers.update_server_health(
|
||||
*language_server_id,
|
||||
health,
|
||||
status_update.message.as_deref(),
|
||||
name.clone(),
|
||||
);
|
||||
});
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
None => {}
|
||||
},
|
||||
LspStoreEvent::LanguageServerUpdate {
|
||||
language_server_id,
|
||||
name,
|
||||
message: proto::update_language_server::Variant::RegisteredForBuffer(update),
|
||||
..
|
||||
} => {
|
||||
self.state.update(cx, |state, _| {
|
||||
state
|
||||
.language_servers
|
||||
.servers_per_buffer_abs_path
|
||||
.entry(PathBuf::from(&update.buffer_abs_path))
|
||||
.or_default()
|
||||
.insert(*language_server_id, name.clone());
|
||||
});
|
||||
updated = true;
|
||||
}
|
||||
_ => {}
|
||||
};
|
||||
|
||||
if updated {
|
||||
lsp_picker.update(cx, |lsp_picker, cx| {
|
||||
lsp_picker.refresh(window, cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn new_lsp_picker(
|
||||
state: Entity<PickerState>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Entity<Picker<LspPickerDelegate>> {
|
||||
cx.new(|cx| {
|
||||
let mut delegate = LspPickerDelegate {
|
||||
selected_index: 0,
|
||||
other_servers_start_index: None,
|
||||
items: Vec::new(),
|
||||
state,
|
||||
};
|
||||
delegate.regenerate_items(cx);
|
||||
Picker::list(delegate, window, cx)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl StatusItemView for LspTool {
|
||||
fn set_active_pane_item(
|
||||
&mut self,
|
||||
active_pane_item: Option<&dyn workspace::ItemHandle>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
if ProjectSettings::get_global(cx).global_lsp_settings.button {
|
||||
if let Some(editor) = active_pane_item.and_then(|item| item.downcast::<Editor>()) {
|
||||
if Some(&editor)
|
||||
!= self
|
||||
.state
|
||||
.read(cx)
|
||||
.active_editor
|
||||
.as_ref()
|
||||
.and_then(|active_editor| active_editor.editor.upgrade())
|
||||
.as_ref()
|
||||
{
|
||||
let editor_buffers =
|
||||
HashSet::from_iter(editor.read(cx).buffer().read(cx).excerpt_buffer_ids());
|
||||
let _editor_subscription = cx.subscribe_in(
|
||||
&editor,
|
||||
window,
|
||||
|lsp_tool, _, e: &EditorEvent, window, cx| match e {
|
||||
EditorEvent::ExcerptsAdded { buffer, .. } => {
|
||||
lsp_tool.state.update(cx, |state, cx| {
|
||||
if let Some(active_editor) = state.active_editor.as_mut() {
|
||||
let buffer_id = buffer.read(cx).remote_id();
|
||||
if active_editor.editor_buffers.insert(buffer_id) {
|
||||
if let Some(picker) = &lsp_tool.lsp_picker {
|
||||
picker.update(cx, |picker, cx| {
|
||||
picker.refresh(window, cx)
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
EditorEvent::ExcerptsRemoved {
|
||||
removed_buffer_ids, ..
|
||||
} => {
|
||||
lsp_tool.state.update(cx, |state, cx| {
|
||||
if let Some(active_editor) = state.active_editor.as_mut() {
|
||||
let mut removed = false;
|
||||
for id in removed_buffer_ids {
|
||||
active_editor.editor_buffers.retain(|buffer_id| {
|
||||
let retain = buffer_id != id;
|
||||
removed |= !retain;
|
||||
retain
|
||||
});
|
||||
}
|
||||
if removed {
|
||||
if let Some(picker) = &lsp_tool.lsp_picker {
|
||||
picker.update(cx, |picker, cx| {
|
||||
picker.refresh(window, cx)
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
_ => {}
|
||||
},
|
||||
);
|
||||
self.state.update(cx, |state, _| {
|
||||
state.active_editor = Some(ActiveEditor {
|
||||
editor: editor.downgrade(),
|
||||
_editor_subscription,
|
||||
editor_buffers,
|
||||
});
|
||||
});
|
||||
|
||||
let lsp_picker = Self::new_lsp_picker(self.state.clone(), window, cx);
|
||||
self.lsp_picker = Some(lsp_picker.clone());
|
||||
lsp_picker.update(cx, |lsp_picker, cx| lsp_picker.refresh(window, cx));
|
||||
}
|
||||
} else if self.state.read(cx).active_editor.is_some() {
|
||||
self.state.update(cx, |state, _| {
|
||||
state.active_editor = None;
|
||||
});
|
||||
if let Some(lsp_picker) = self.lsp_picker.as_ref() {
|
||||
lsp_picker.update(cx, |lsp_picker, cx| {
|
||||
lsp_picker.refresh(window, cx);
|
||||
});
|
||||
};
|
||||
}
|
||||
} else if self.state.read(cx).active_editor.is_some() {
|
||||
self.state.update(cx, |state, _| {
|
||||
state.active_editor = None;
|
||||
});
|
||||
if let Some(lsp_picker) = self.lsp_picker.as_ref() {
|
||||
lsp_picker.update(cx, |lsp_picker, cx| {
|
||||
lsp_picker.refresh(window, cx);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for LspTool {
|
||||
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl ui::IntoElement {
|
||||
let Some(lsp_picker) = self.lsp_picker.clone() else {
|
||||
return div();
|
||||
};
|
||||
|
||||
let mut has_errors = false;
|
||||
let mut has_warnings = false;
|
||||
let mut has_other_notifications = false;
|
||||
let state = self.state.read(cx);
|
||||
for server in state.language_servers.health_statuses.values() {
|
||||
if let Some(binary_status) = &state.language_servers.binary_statuses.get(&server.name) {
|
||||
has_errors |= matches!(binary_status.status, BinaryStatus::Failed { .. });
|
||||
has_other_notifications |= binary_status.message.is_some();
|
||||
}
|
||||
|
||||
if let Some((message, health)) = &server.health {
|
||||
has_other_notifications |= message.is_some();
|
||||
match health {
|
||||
ServerHealth::Ok => {}
|
||||
ServerHealth::Warning => has_warnings = true,
|
||||
ServerHealth::Error => has_errors = true,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let indicator = if has_errors {
|
||||
Some(Indicator::dot().color(Color::Error))
|
||||
} else if has_warnings {
|
||||
Some(Indicator::dot().color(Color::Warning))
|
||||
} else if has_other_notifications {
|
||||
Some(Indicator::dot().color(Color::Modified))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
div().child(
|
||||
PickerPopoverMenu::new(
|
||||
lsp_picker.clone(),
|
||||
IconButton::new("zed-lsp-tool-button", IconName::Bolt)
|
||||
.when_some(indicator, IconButton::indicator)
|
||||
.shape(IconButtonShape::Square)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.indicator_border_color(Some(cx.theme().colors().status_bar_background)),
|
||||
move |_, cx| Tooltip::simple("Language servers", cx),
|
||||
Corner::BottomRight,
|
||||
cx,
|
||||
)
|
||||
.render(window, cx),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
(parameter_declaration (identifier) @debug-variable)
|
||||
|
||||
(short_var_declaration (expression_list (identifier) @debug-variable))
|
||||
|
||||
(var_declaration (var_spec (identifier) @debug-variable))
|
||||
|
||||
(const_declaration (const_spec (identifier) @debug-variable))
|
||||
|
||||
(assignment_statement (expression_list (identifier) @debug-variable))
|
||||
|
||||
(binary_expression (identifier) @debug-variable
|
||||
(#not-match? @debug-variable "^[A-Z]"))
|
||||
|
||||
(call_expression (argument_list (identifier) @debug-variable
|
||||
(#not-match? @debug-variable "^[A-Z]")))
|
||||
|
||||
(return_statement (expression_list (identifier) @debug-variable
|
||||
(#not-match? @debug-variable "^[A-Z]")))
|
||||
|
||||
(range_clause (expression_list (identifier) @debug-variable))
|
||||
|
||||
(parenthesized_expression (identifier) @debug-variable
|
||||
(#not-match? @debug-variable "^[A-Z]"))
|
||||
|
||||
(block) @debug-scope
|
||||
(function_declaration) @debug-scope
|
||||
@@ -1,43 +0,0 @@
|
||||
(identifier) @debug-variable
|
||||
(#eq? @debug-variable "self")
|
||||
|
||||
(assignment left: (identifier) @debug-variable)
|
||||
(assignment left: (pattern_list (identifier) @debug-variable))
|
||||
(assignment left: (tuple_pattern (identifier) @debug-variable))
|
||||
|
||||
(augmented_assignment left: (identifier) @debug-variable)
|
||||
|
||||
(for_statement left: (identifier) @debug-variable)
|
||||
(for_statement left: (pattern_list (identifier) @debug-variable))
|
||||
(for_statement left: (tuple_pattern (identifier) @debug-variable))
|
||||
|
||||
(for_in_clause left: (identifier) @debug-variable)
|
||||
(for_in_clause left: (pattern_list (identifier) @debug-variable))
|
||||
(for_in_clause left: (tuple_pattern (identifier) @debug-variable))
|
||||
|
||||
(as_pattern (identifier) @debug-variable)
|
||||
|
||||
(binary_operator left: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
|
||||
(binary_operator right: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
|
||||
(comparison_operator (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
|
||||
|
||||
(list (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
|
||||
(tuple (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
|
||||
(set (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
|
||||
|
||||
(subscript value: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
|
||||
|
||||
(attribute object: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
|
||||
|
||||
(return_statement (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
|
||||
|
||||
(parenthesized_expression (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
|
||||
|
||||
(argument_list (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
|
||||
|
||||
(if_statement condition: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
|
||||
|
||||
(while_statement condition: (identifier) @debug-variable (#not-match? @debug-variable "^[A-Z]"))
|
||||
|
||||
(block) @debug-scope
|
||||
(module) @debug-scope
|
||||
@@ -1,50 +0,0 @@
|
||||
(metavariable) @debug-variable
|
||||
|
||||
(parameter (identifier) @debug-variable)
|
||||
|
||||
(self) @debug-variable
|
||||
|
||||
(static_item (identifier) @debug-variable)
|
||||
(const_item (identifier) @debug-variable)
|
||||
|
||||
(let_declaration pattern: (identifier) @debug-variable)
|
||||
|
||||
(let_condition (identifier) @debug-variable)
|
||||
|
||||
(match_arm (identifier) @debug-variable)
|
||||
|
||||
(for_expression (identifier) @debug-variable)
|
||||
|
||||
(closure_parameters (identifier) @debug-variable)
|
||||
|
||||
(assignment_expression (identifier) @debug-variable)
|
||||
|
||||
(field_expression (identifier) @debug-variable)
|
||||
|
||||
(binary_expression (identifier) @debug-variable
|
||||
(#not-match? @debug-variable "^[A-Z]"))
|
||||
|
||||
(reference_expression (identifier) @debug-variable
|
||||
(#not-match? @debug-variable "^[A-Z]"))
|
||||
|
||||
(array_expression (identifier) @debug-variable)
|
||||
(tuple_expression (identifier) @debug-variable)
|
||||
(return_expression (identifier) @debug-variable)
|
||||
(await_expression (identifier) @debug-variable)
|
||||
(try_expression (identifier) @debug-variable)
|
||||
(index_expression (identifier) @debug-variable)
|
||||
(range_expression (identifier) @debug-variable)
|
||||
(unary_expression (identifier) @debug-variable)
|
||||
|
||||
(if_expression (identifier) @debug-variable)
|
||||
(while_expression (identifier) @debug-variable)
|
||||
|
||||
(parenthesized_expression (identifier) @debug-variable)
|
||||
|
||||
(arguments (identifier) @debug-variable
|
||||
(#not-match? @debug-variable "^[A-Z]"))
|
||||
|
||||
(macro_invocation (token_tree (identifier) @debug-variable
|
||||
(#not-match? @debug-variable "^[A-Z]")))
|
||||
|
||||
(block) @debug-scope
|
||||
@@ -108,12 +108,6 @@ pub struct LanguageServer {
|
||||
root_uri: Url,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
|
||||
pub enum LanguageServerSelector {
|
||||
Id(LanguageServerId),
|
||||
Name(LanguageServerName),
|
||||
}
|
||||
|
||||
/// Identifies a running language server.
|
||||
#[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
#[repr(transparent)]
|
||||
|
||||
@@ -6,10 +6,7 @@ pub mod markdown_parser;
|
||||
pub mod markdown_preview_view;
|
||||
pub mod markdown_renderer;
|
||||
|
||||
actions!(
|
||||
markdown,
|
||||
[OpenPreview, OpenPreviewToTheSide, OpenFollowingPreview]
|
||||
);
|
||||
actions!(markdown, [OpenPreview, OpenPreviewToTheSide]);
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
cx.observe_new(|workspace: &mut Workspace, window, cx| {
|
||||
|
||||
@@ -20,7 +20,7 @@ use workspace::{Pane, Workspace};
|
||||
use crate::OpenPreviewToTheSide;
|
||||
use crate::markdown_elements::ParsedMarkdownElement;
|
||||
use crate::{
|
||||
OpenFollowingPreview, OpenPreview,
|
||||
OpenPreview,
|
||||
markdown_elements::ParsedMarkdown,
|
||||
markdown_parser::parse_markdown,
|
||||
markdown_renderer::{RenderContext, render_markdown_block},
|
||||
@@ -39,7 +39,6 @@ pub struct MarkdownPreviewView {
|
||||
tab_content_text: Option<SharedString>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
parsing_markdown_task: Option<Task<Result<()>>>,
|
||||
mode: MarkdownPreviewMode,
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, Debug, PartialEq)]
|
||||
@@ -59,11 +58,9 @@ impl MarkdownPreviewView {
|
||||
pub fn register(workspace: &mut Workspace, _window: &mut Window, _cx: &mut Context<Workspace>) {
|
||||
workspace.register_action(move |workspace, _: &OpenPreview, window, cx| {
|
||||
if let Some(editor) = Self::resolve_active_item_as_markdown_editor(workspace, cx) {
|
||||
let view = Self::create_markdown_view(workspace, editor.clone(), window, cx);
|
||||
let view = Self::create_markdown_view(workspace, editor, window, cx);
|
||||
workspace.active_pane().update(cx, |pane, cx| {
|
||||
if let Some(existing_view_idx) =
|
||||
Self::find_existing_independent_preview_item_idx(pane, &editor, cx)
|
||||
{
|
||||
if let Some(existing_view_idx) = Self::find_existing_preview_item_idx(pane) {
|
||||
pane.activate_item(existing_view_idx, true, true, window, cx);
|
||||
} else {
|
||||
pane.add_item(Box::new(view.clone()), true, true, None, window, cx)
|
||||
@@ -87,9 +84,7 @@ impl MarkdownPreviewView {
|
||||
)
|
||||
});
|
||||
pane.update(cx, |pane, cx| {
|
||||
if let Some(existing_view_idx) =
|
||||
Self::find_existing_independent_preview_item_idx(pane, &editor, cx)
|
||||
{
|
||||
if let Some(existing_view_idx) = Self::find_existing_preview_item_idx(pane) {
|
||||
pane.activate_item(existing_view_idx, true, true, window, cx);
|
||||
} else {
|
||||
pane.add_item(Box::new(view.clone()), false, false, None, window, cx)
|
||||
@@ -99,49 +94,11 @@ impl MarkdownPreviewView {
|
||||
cx.notify();
|
||||
}
|
||||
});
|
||||
|
||||
workspace.register_action(move |workspace, _: &OpenFollowingPreview, window, cx| {
|
||||
if let Some(editor) = Self::resolve_active_item_as_markdown_editor(workspace, cx) {
|
||||
// Check if there's already a following preview
|
||||
let existing_follow_view_idx = {
|
||||
let active_pane = workspace.active_pane().read(cx);
|
||||
active_pane
|
||||
.items_of_type::<MarkdownPreviewView>()
|
||||
.find(|view| view.read(cx).mode == MarkdownPreviewMode::Follow)
|
||||
.and_then(|view| active_pane.index_for_item(&view))
|
||||
};
|
||||
|
||||
if let Some(existing_follow_view_idx) = existing_follow_view_idx {
|
||||
workspace.active_pane().update(cx, |pane, cx| {
|
||||
pane.activate_item(existing_follow_view_idx, true, true, window, cx);
|
||||
});
|
||||
} else {
|
||||
let view =
|
||||
Self::create_following_markdown_view(workspace, editor.clone(), window, cx);
|
||||
workspace.active_pane().update(cx, |pane, cx| {
|
||||
pane.add_item(Box::new(view.clone()), true, true, None, window, cx)
|
||||
});
|
||||
}
|
||||
cx.notify();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn find_existing_independent_preview_item_idx(
|
||||
pane: &Pane,
|
||||
editor: &Entity<Editor>,
|
||||
cx: &App,
|
||||
) -> Option<usize> {
|
||||
fn find_existing_preview_item_idx(pane: &Pane) -> Option<usize> {
|
||||
pane.items_of_type::<MarkdownPreviewView>()
|
||||
.find(|view| {
|
||||
let view_read = view.read(cx);
|
||||
// Only look for independent (Default mode) previews, not Follow previews
|
||||
view_read.mode == MarkdownPreviewMode::Default
|
||||
&& view_read
|
||||
.active_editor
|
||||
.as_ref()
|
||||
.is_some_and(|active_editor| active_editor.editor == *editor)
|
||||
})
|
||||
.nth(0)
|
||||
.and_then(|view| pane.index_for_item(&view))
|
||||
}
|
||||
|
||||
@@ -165,25 +122,6 @@ impl MarkdownPreviewView {
|
||||
editor: Entity<Editor>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Workspace>,
|
||||
) -> Entity<MarkdownPreviewView> {
|
||||
let language_registry = workspace.project().read(cx).languages().clone();
|
||||
let workspace_handle = workspace.weak_handle();
|
||||
MarkdownPreviewView::new(
|
||||
MarkdownPreviewMode::Default,
|
||||
editor,
|
||||
workspace_handle,
|
||||
language_registry,
|
||||
None,
|
||||
window,
|
||||
cx,
|
||||
)
|
||||
}
|
||||
|
||||
fn create_following_markdown_view(
|
||||
workspace: &mut Workspace,
|
||||
editor: Entity<Editor>,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Workspace>,
|
||||
) -> Entity<MarkdownPreviewView> {
|
||||
let language_registry = workspace.project().read(cx).languages().clone();
|
||||
let workspace_handle = workspace.weak_handle();
|
||||
@@ -328,7 +266,6 @@ impl MarkdownPreviewView {
|
||||
language_registry,
|
||||
parsing_markdown_task: None,
|
||||
image_cache: RetainAllImageCache::new(cx),
|
||||
mode,
|
||||
};
|
||||
|
||||
this.set_editor(active_editor, window, cx);
|
||||
@@ -406,7 +343,6 @@ impl MarkdownPreviewView {
|
||||
);
|
||||
|
||||
let tab_content = editor.read(cx).tab_content_text(0, cx);
|
||||
|
||||
if self.tab_content_text.is_none() {
|
||||
self.tab_content_text = Some(format!("Preview {}", tab_content).into());
|
||||
}
|
||||
|
||||
@@ -81,9 +81,3 @@ pub(crate) mod m_2025_06_16 {
|
||||
|
||||
pub(crate) use settings::SETTINGS_PATTERNS;
|
||||
}
|
||||
|
||||
pub(crate) mod m_2025_06_25 {
|
||||
mod settings;
|
||||
|
||||
pub(crate) use settings::SETTINGS_PATTERNS;
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user