Compare commits
4 Commits
test-test
...
static-rel
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f6946ad4e8 | ||
|
|
c9972c2972 | ||
|
|
afdc53fdb7 | ||
|
|
d2e5947cf3 |
35
.github/ISSUE_TEMPLATE/07_bug_windows.yml
vendored
35
.github/ISSUE_TEMPLATE/07_bug_windows.yml
vendored
@@ -1,35 +0,0 @@
|
||||
name: Bug Report (Windows)
|
||||
description: Zed Windows Related Bugs
|
||||
type: "Bug"
|
||||
labels: ["windows"]
|
||||
title: "Windows: <a short description of the Windows bug>"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one-line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one-line summary of the issue below -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
<!-- Describe with sufficient detail to reproduce from a clean Zed install. -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
**Expected Behavior**:
|
||||
**Actual Behavior**:
|
||||
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -866,7 +866,7 @@ jobs:
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe
|
||||
name: ZedEditorUserSetup-x64-${{ github.event.pull_request.head.sha || github.sha }}.exe
|
||||
path: ${{ env.SETUP_PATH }}
|
||||
|
||||
- name: Upload Artifacts to release
|
||||
|
||||
2204
Cargo.lock
generated
2204
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
35
Cargo.toml
35
Cargo.toml
@@ -164,7 +164,6 @@ members = [
|
||||
"crates/sum_tree",
|
||||
"crates/supermaven",
|
||||
"crates/supermaven_api",
|
||||
"crates/codestral",
|
||||
"crates/svg_preview",
|
||||
"crates/system_specs",
|
||||
"crates/tab_switcher",
|
||||
@@ -274,7 +273,7 @@ cloud_llm_client = { path = "crates/cloud_llm_client" }
|
||||
cloud_zeta2_prompt = { path = "crates/cloud_zeta2_prompt" }
|
||||
collab = { path = "crates/collab" }
|
||||
collab_ui = { path = "crates/collab_ui" }
|
||||
collections = { path = "crates/collections", version = "0.1.0" }
|
||||
collections = { path = "crates/collections", package = "zed-collections", version = "0.1.0" }
|
||||
command_palette = { path = "crates/command_palette" }
|
||||
command_palette_hooks = { path = "crates/command_palette_hooks" }
|
||||
component = { path = "crates/component" }
|
||||
@@ -290,7 +289,7 @@ debug_adapter_extension = { path = "crates/debug_adapter_extension" }
|
||||
debugger_tools = { path = "crates/debugger_tools" }
|
||||
debugger_ui = { path = "crates/debugger_ui" }
|
||||
deepseek = { path = "crates/deepseek" }
|
||||
derive_refineable = { path = "crates/refineable/derive_refineable" }
|
||||
derive_refineable = { path = "crates/refineable/derive_refineable", package = "zed-derive-refineable", version = "0.1.0" }
|
||||
diagnostics = { path = "crates/diagnostics" }
|
||||
editor = { path = "crates/editor" }
|
||||
extension = { path = "crates/extension" }
|
||||
@@ -309,10 +308,10 @@ git_ui = { path = "crates/git_ui" }
|
||||
go_to_line = { path = "crates/go_to_line" }
|
||||
google_ai = { path = "crates/google_ai" }
|
||||
gpui = { path = "crates/gpui", default-features = false }
|
||||
gpui_macros = { path = "crates/gpui_macros" }
|
||||
gpui_macros = { path = "crates/gpui_macros", package = "gpui-macros", version = "0.1.0" }
|
||||
gpui_tokio = { path = "crates/gpui_tokio" }
|
||||
html_to_markdown = { path = "crates/html_to_markdown" }
|
||||
http_client = { path = "crates/http_client" }
|
||||
http_client = { path = "crates/http_client", package = "zed-http-client", version = "0.1.0" }
|
||||
http_client_tls = { path = "crates/http_client_tls" }
|
||||
icons = { path = "crates/icons" }
|
||||
image_viewer = { path = "crates/image_viewer" }
|
||||
@@ -341,7 +340,7 @@ lsp = { path = "crates/lsp" }
|
||||
markdown = { path = "crates/markdown" }
|
||||
markdown_preview = { path = "crates/markdown_preview" }
|
||||
svg_preview = { path = "crates/svg_preview" }
|
||||
media = { path = "crates/media" }
|
||||
media = { path = "crates/media", package = "zed-media", version = "0.1.0" }
|
||||
menu = { path = "crates/menu" }
|
||||
migrator = { path = "crates/migrator" }
|
||||
mistral = { path = "crates/mistral" }
|
||||
@@ -358,7 +357,7 @@ outline = { path = "crates/outline" }
|
||||
outline_panel = { path = "crates/outline_panel" }
|
||||
panel = { path = "crates/panel" }
|
||||
paths = { path = "crates/paths" }
|
||||
perf = { path = "tooling/perf" }
|
||||
perf = { path = "tooling/perf", package = "zed-perf", version = "0.1.0" }
|
||||
picker = { path = "crates/picker" }
|
||||
plugin = { path = "crates/plugin" }
|
||||
plugin_macros = { path = "crates/plugin_macros" }
|
||||
@@ -370,7 +369,7 @@ project_symbols = { path = "crates/project_symbols" }
|
||||
prompt_store = { path = "crates/prompt_store" }
|
||||
proto = { path = "crates/proto" }
|
||||
recent_projects = { path = "crates/recent_projects" }
|
||||
refineable = { path = "crates/refineable" }
|
||||
refineable = { path = "crates/refineable", package = "zed-refineable", version = "0.1.0" }
|
||||
release_channel = { path = "crates/release_channel" }
|
||||
scheduler = { path = "crates/scheduler" }
|
||||
remote = { path = "crates/remote" }
|
||||
@@ -383,7 +382,7 @@ rope = { path = "crates/rope" }
|
||||
rpc = { path = "crates/rpc" }
|
||||
rules_library = { path = "crates/rules_library" }
|
||||
search = { path = "crates/search" }
|
||||
semantic_version = { path = "crates/semantic_version" }
|
||||
semantic_version = { path = "crates/semantic_version", package = "zed-semantic-version", version = "0.1.0" }
|
||||
session = { path = "crates/session" }
|
||||
settings = { path = "crates/settings" }
|
||||
settings_macros = { path = "crates/settings_macros" }
|
||||
@@ -396,10 +395,9 @@ sqlez_macros = { path = "crates/sqlez_macros" }
|
||||
story = { path = "crates/story" }
|
||||
storybook = { path = "crates/storybook" }
|
||||
streaming_diff = { path = "crates/streaming_diff" }
|
||||
sum_tree = { path = "crates/sum_tree" }
|
||||
sum_tree = { path = "crates/sum_tree", package = "zed-sum-tree", version = "0.1.0" }
|
||||
supermaven = { path = "crates/supermaven" }
|
||||
supermaven_api = { path = "crates/supermaven_api" }
|
||||
codestral = { path = "crates/codestral" }
|
||||
system_specs = { path = "crates/system_specs" }
|
||||
tab_switcher = { path = "crates/tab_switcher" }
|
||||
task = { path = "crates/task" }
|
||||
@@ -420,8 +418,8 @@ ui = { path = "crates/ui" }
|
||||
ui_input = { path = "crates/ui_input" }
|
||||
ui_macros = { path = "crates/ui_macros" }
|
||||
ui_prompt = { path = "crates/ui_prompt" }
|
||||
util = { path = "crates/util" }
|
||||
util_macros = { path = "crates/util_macros" }
|
||||
util = { path = "crates/util", package = "zed-util", version = "0.1.0" }
|
||||
util_macros = { path = "crates/util_macros", package = "zed-util-macros", version = "0.1.0" }
|
||||
vercel = { path = "crates/vercel" }
|
||||
vim = { path = "crates/vim" }
|
||||
vim_mode_setting = { path = "crates/vim_mode_setting" }
|
||||
@@ -478,6 +476,7 @@ bitflags = "2.6.0"
|
||||
blade-graphics = { version = "0.7.0" }
|
||||
blade-macros = { version = "0.3.0" }
|
||||
blade-util = { version = "0.3.0" }
|
||||
blake3 = "1.5.3"
|
||||
bytes = "1.0"
|
||||
cargo_metadata = "0.19"
|
||||
cargo_toml = "0.21"
|
||||
@@ -654,7 +653,7 @@ strum = { version = "0.27.0", features = ["derive"] }
|
||||
subtle = "2.5.0"
|
||||
syn = { version = "2.0.101", features = ["full", "extra-traits", "visit-mut"] }
|
||||
sys-locale = "0.3.1"
|
||||
sysinfo = "0.37.0"
|
||||
sysinfo = "0.31.0"
|
||||
take-until = "0.2.0"
|
||||
tempfile = "3.20.0"
|
||||
thiserror = "2.0.12"
|
||||
@@ -693,7 +692,7 @@ tree-sitter-python = "0.25"
|
||||
tree-sitter-regex = "0.24"
|
||||
tree-sitter-ruby = "0.23"
|
||||
tree-sitter-rust = "0.24"
|
||||
tree-sitter-typescript = { git = "https://github.com/zed-industries/tree-sitter-typescript", rev = "e2c53597d6a5d9cf7bbe8dccde576fe1e46c5899" } # https://github.com/tree-sitter/tree-sitter-typescript/pull/347
|
||||
tree-sitter-typescript = "0.23"
|
||||
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" }
|
||||
unicase = "2.6"
|
||||
unicode-script = "0.5.7"
|
||||
@@ -805,7 +804,7 @@ wasmtime = { opt-level = 3 }
|
||||
activity_indicator = { codegen-units = 1 }
|
||||
assets = { codegen-units = 1 }
|
||||
breadcrumbs = { codegen-units = 1 }
|
||||
collections = { codegen-units = 1 }
|
||||
zed-collections = { codegen-units = 1 }
|
||||
command_palette = { codegen-units = 1 }
|
||||
command_palette_hooks = { codegen-units = 1 }
|
||||
extension_cli = { codegen-units = 1 }
|
||||
@@ -825,11 +824,11 @@ outline = { codegen-units = 1 }
|
||||
paths = { codegen-units = 1 }
|
||||
prettier = { codegen-units = 1 }
|
||||
project_symbols = { codegen-units = 1 }
|
||||
refineable = { codegen-units = 1 }
|
||||
zed-refineable = { codegen-units = 1 }
|
||||
release_channel = { codegen-units = 1 }
|
||||
reqwest_client = { codegen-units = 1 }
|
||||
rich_text = { codegen-units = 1 }
|
||||
semantic_version = { codegen-units = 1 }
|
||||
zed-semantic-version = { codegen-units = 1 }
|
||||
session = { codegen-units = 1 }
|
||||
snippet = { codegen-units = 1 }
|
||||
snippets_ui = { codegen-units = 1 }
|
||||
|
||||
2
Cross.toml
Normal file
2
Cross.toml
Normal file
@@ -0,0 +1,2 @@
|
||||
[build]
|
||||
dockerfile = "Dockerfile-cross"
|
||||
17
Dockerfile-cross
Normal file
17
Dockerfile-cross
Normal file
@@ -0,0 +1,17 @@
|
||||
# syntax=docker/dockerfile:1
|
||||
|
||||
ARG CROSS_BASE_IMAGE
|
||||
FROM ${CROSS_BASE_IMAGE}
|
||||
WORKDIR /app
|
||||
ARG TZ=Etc/UTC \
|
||||
LANG=C.UTF-8 \
|
||||
LC_ALL=C.UTF-8 \
|
||||
DEBIAN_FRONTEND=noninteractive
|
||||
ENV CARGO_TERM_COLOR=always
|
||||
|
||||
COPY script/install-mold script/
|
||||
RUN ./script/install-mold "2.34.0"
|
||||
COPY script/remote-server script/
|
||||
RUN ./script/remote-server
|
||||
|
||||
COPY . .
|
||||
@@ -1,3 +1,9 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M13.2806 4.66818L8.26042 1.76982C8.09921 1.67673 7.9003 1.67673 7.73909 1.76982L2.71918 4.66818C2.58367 4.74642 2.5 4.89112 2.5 5.04785V10.8924C2.5 11.0489 2.58367 11.1938 2.71918 11.2721L7.73934 14.1704C7.90054 14.2635 8.09946 14.2635 8.26066 14.1704L13.2808 11.2721C13.4163 11.1938 13.5 11.0491 13.5 10.8924V5.04785C13.5 4.89136 13.4163 4.74642 13.2808 4.66818H13.2806ZM12.9653 5.28212L8.11901 13.676C8.08626 13.7326 7.99977 13.7095 7.99977 13.6439V8.14771C7.99977 8.03788 7.94107 7.9363 7.84586 7.88115L3.08613 5.13317C3.02957 5.10041 3.05266 5.0139 3.11818 5.0139H12.8106C12.9483 5.0139 13.0343 5.1631 12.9655 5.28236H12.9653V5.28212Z" fill="#C4CAD4"/>
|
||||
<path opacity="0.6" d="M3.5 11V5.5L8.5 8L3.5 11Z" fill="black"/>
|
||||
<path opacity="0.4" d="M8.5 14L3.5 11L8.5 8V14Z" fill="black"/>
|
||||
<path opacity="0.6" d="M8.5 5.5H3.5L8.5 2.5L8.5 5.5Z" fill="black"/>
|
||||
<path opacity="0.8" d="M8.5 5.5V2.5L13.5 5.5H8.5Z" fill="black"/>
|
||||
<path opacity="0.2" d="M13.5 11L8.5 14L11 9.5L13.5 11Z" fill="black"/>
|
||||
<path opacity="0.5" d="M13.5 11L11 9.5L13.5 5V11Z" fill="black"/>
|
||||
<path d="M3.5 11V5L8.5 2.11325L13.5 5V11L8.5 13.8868L3.5 11Z" stroke="black"/>
|
||||
</svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 769 B After Width: | Height: | Size: 583 B |
@@ -1,4 +1 @@
|
||||
<svg width="16" height="16" viewBox="0 0 16 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path d="M6.125 9.25001L3 6.125L6.125 3" stroke="#C4CAD4" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
<path d="M3 6.125H9.56251C10.0139 6.125 10.4609 6.21391 10.878 6.38666C11.295 6.55942 11.674 6.81262 11.9932 7.13182C12.3124 7.45102 12.5656 7.82997 12.7383 8.24703C12.9111 8.66408 13 9.11108 13 9.5625C13 10.0139 12.9111 10.4609 12.7383 10.878C12.5656 11.295 12.3124 11.674 11.9932 11.9932C11.674 12.3124 11.295 12.5656 10.878 12.7383C10.4609 12.9111 10.0139 13 9.56251 13H7.375" stroke="black" stroke-width="1.2" stroke-linecap="round" stroke-linejoin="round"/>
|
||||
</svg>
|
||||
<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" fill="none"><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M2.6 5v3.6h3.6"/><path stroke="#000" stroke-linecap="round" stroke-linejoin="round" stroke-width="1.2" d="M13.4 11A5.4 5.4 0 0 0 8 5.6a5.4 5.4 0 0 0-3.6 1.38L2.6 8.6"/></svg>
|
||||
|
||||
|
Before Width: | Height: | Size: 692 B After Width: | Height: | Size: 339 B |
@@ -30,8 +30,8 @@
|
||||
"ctrl-+": ["zed::IncreaseBufferFontSize", { "persist": false }],
|
||||
"ctrl--": ["zed::DecreaseBufferFontSize", { "persist": false }],
|
||||
"ctrl-0": ["zed::ResetBufferFontSize", { "persist": false }],
|
||||
"ctrl-,": "zed::OpenSettings",
|
||||
"ctrl-alt-,": "zed::OpenSettingsFile",
|
||||
"ctrl-,": "zed::OpenSettingsEditor",
|
||||
"ctrl-alt-,": "zed::OpenSettings",
|
||||
"ctrl-q": "zed::Quit",
|
||||
"f4": "debugger::Start",
|
||||
"shift-f5": "debugger::Stop",
|
||||
@@ -374,6 +374,13 @@
|
||||
"ctrl-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar",
|
||||
"bindings": {
|
||||
@@ -527,15 +534,15 @@
|
||||
"ctrl-k ctrl-l": "editor::ToggleFold",
|
||||
"ctrl-k ctrl-[": "editor::FoldRecursive",
|
||||
"ctrl-k ctrl-]": "editor::UnfoldRecursive",
|
||||
"ctrl-k ctrl-1": "editor::FoldAtLevel_1",
|
||||
"ctrl-k ctrl-2": "editor::FoldAtLevel_2",
|
||||
"ctrl-k ctrl-3": "editor::FoldAtLevel_3",
|
||||
"ctrl-k ctrl-4": "editor::FoldAtLevel_4",
|
||||
"ctrl-k ctrl-5": "editor::FoldAtLevel_5",
|
||||
"ctrl-k ctrl-6": "editor::FoldAtLevel_6",
|
||||
"ctrl-k ctrl-7": "editor::FoldAtLevel_7",
|
||||
"ctrl-k ctrl-8": "editor::FoldAtLevel_8",
|
||||
"ctrl-k ctrl-9": "editor::FoldAtLevel_9",
|
||||
"ctrl-k ctrl-1": ["editor::FoldAtLevel", 1],
|
||||
"ctrl-k ctrl-2": ["editor::FoldAtLevel", 2],
|
||||
"ctrl-k ctrl-3": ["editor::FoldAtLevel", 3],
|
||||
"ctrl-k ctrl-4": ["editor::FoldAtLevel", 4],
|
||||
"ctrl-k ctrl-5": ["editor::FoldAtLevel", 5],
|
||||
"ctrl-k ctrl-6": ["editor::FoldAtLevel", 6],
|
||||
"ctrl-k ctrl-7": ["editor::FoldAtLevel", 7],
|
||||
"ctrl-k ctrl-8": ["editor::FoldAtLevel", 8],
|
||||
"ctrl-k ctrl-9": ["editor::FoldAtLevel", 9],
|
||||
"ctrl-k ctrl-0": "editor::FoldAll",
|
||||
"ctrl-k ctrl-j": "editor::UnfoldAll",
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
@@ -621,7 +628,7 @@
|
||||
"ctrl-shift-f": "pane::DeploySearch",
|
||||
"ctrl-shift-h": ["pane::DeploySearch", { "replace_enabled": true }],
|
||||
"ctrl-shift-t": "pane::ReopenClosedItem",
|
||||
"ctrl-k ctrl-s": "zed::OpenKeymap",
|
||||
"ctrl-k ctrl-s": "zed::OpenKeymapEditor",
|
||||
"ctrl-k ctrl-t": "theme_selector::Toggle",
|
||||
"ctrl-alt-super-p": "settings_profile_selector::Toggle",
|
||||
"ctrl-t": "project_symbols::Toggle",
|
||||
@@ -1229,6 +1236,9 @@
|
||||
"context": "Onboarding",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-1": "onboarding::ActivateBasicsPage",
|
||||
"ctrl-2": "onboarding::ActivateEditingPage",
|
||||
"ctrl-3": "onboarding::ActivateAISetupPage",
|
||||
"ctrl-enter": "onboarding::Finish",
|
||||
"alt-shift-l": "onboarding::SignIn",
|
||||
"alt-shift-a": "onboarding::OpenAccount"
|
||||
@@ -1240,44 +1250,5 @@
|
||||
"bindings": {
|
||||
"ctrl-shift-enter": "workspace::OpenWithSystem"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-w": "workspace::CloseWindow",
|
||||
"escape": "workspace::CloseWindow",
|
||||
"ctrl-m": "settings_editor::Minimize",
|
||||
"ctrl-f": "search::FocusSearch",
|
||||
"left": "settings_editor::ToggleFocusNav",
|
||||
"ctrl-shift-e": "settings_editor::ToggleFocusNav",
|
||||
// todo(settings_ui): cut this down based on the max files and overflow UI
|
||||
"ctrl-1": ["settings_editor::FocusFile", 0],
|
||||
"ctrl-2": ["settings_editor::FocusFile", 1],
|
||||
"ctrl-3": ["settings_editor::FocusFile", 2],
|
||||
"ctrl-4": ["settings_editor::FocusFile", 3],
|
||||
"ctrl-5": ["settings_editor::FocusFile", 4],
|
||||
"ctrl-6": ["settings_editor::FocusFile", 5],
|
||||
"ctrl-7": ["settings_editor::FocusFile", 6],
|
||||
"ctrl-8": ["settings_editor::FocusFile", 7],
|
||||
"ctrl-9": ["settings_editor::FocusFile", 8],
|
||||
"ctrl-0": ["settings_editor::FocusFile", 9],
|
||||
"ctrl-pageup": "settings_editor::FocusPreviousFile",
|
||||
"ctrl-pagedown": "settings_editor::FocusNextFile"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow > NavigationMenu",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"up": "settings_editor::FocusPreviousNavEntry",
|
||||
"down": "settings_editor::FocusNextNavEntry",
|
||||
"right": "settings_editor::ExpandNavEntry",
|
||||
"left": "settings_editor::CollapseNavEntry",
|
||||
"pageup": "settings_editor::FocusPreviousRootNavEntry",
|
||||
"pagedown": "settings_editor::FocusNextRootNavEntry",
|
||||
"home": "settings_editor::FocusFirstNavEntry",
|
||||
"end": "settings_editor::FocusLastNavEntry"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -39,8 +39,8 @@
|
||||
"cmd-+": ["zed::IncreaseBufferFontSize", { "persist": false }],
|
||||
"cmd--": ["zed::DecreaseBufferFontSize", { "persist": false }],
|
||||
"cmd-0": ["zed::ResetBufferFontSize", { "persist": false }],
|
||||
"cmd-,": "zed::OpenSettings",
|
||||
"cmd-alt-,": "zed::OpenSettingsFile",
|
||||
"cmd-,": "zed::OpenSettingsEditor",
|
||||
"cmd-alt-,": "zed::OpenSettings",
|
||||
"cmd-q": "zed::Quit",
|
||||
"cmd-h": "zed::Hide",
|
||||
"alt-cmd-h": "zed::HideOthers",
|
||||
@@ -431,6 +431,13 @@
|
||||
"cmd-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar",
|
||||
"use_key_equivalents": true,
|
||||
@@ -582,15 +589,15 @@
|
||||
"cmd-k cmd-l": "editor::ToggleFold",
|
||||
"cmd-k cmd-[": "editor::FoldRecursive",
|
||||
"cmd-k cmd-]": "editor::UnfoldRecursive",
|
||||
"cmd-k cmd-1": "editor::FoldAtLevel_1",
|
||||
"cmd-k cmd-2": "editor::FoldAtLevel_2",
|
||||
"cmd-k cmd-3": "editor::FoldAtLevel_3",
|
||||
"cmd-k cmd-4": "editor::FoldAtLevel_4",
|
||||
"cmd-k cmd-5": "editor::FoldAtLevel_5",
|
||||
"cmd-k cmd-6": "editor::FoldAtLevel_6",
|
||||
"cmd-k cmd-7": "editor::FoldAtLevel_7",
|
||||
"cmd-k cmd-8": "editor::FoldAtLevel_8",
|
||||
"cmd-k cmd-9": "editor::FoldAtLevel_9",
|
||||
"cmd-k cmd-1": ["editor::FoldAtLevel", 1],
|
||||
"cmd-k cmd-2": ["editor::FoldAtLevel", 2],
|
||||
"cmd-k cmd-3": ["editor::FoldAtLevel", 3],
|
||||
"cmd-k cmd-4": ["editor::FoldAtLevel", 4],
|
||||
"cmd-k cmd-5": ["editor::FoldAtLevel", 5],
|
||||
"cmd-k cmd-6": ["editor::FoldAtLevel", 6],
|
||||
"cmd-k cmd-7": ["editor::FoldAtLevel", 7],
|
||||
"cmd-k cmd-8": ["editor::FoldAtLevel", 8],
|
||||
"cmd-k cmd-9": ["editor::FoldAtLevel", 9],
|
||||
"cmd-k cmd-0": "editor::FoldAll",
|
||||
"cmd-k cmd-j": "editor::UnfoldAll",
|
||||
// Using `ctrl-space` / `ctrl-shift-space` in Zed requires disabling the macOS global shortcut.
|
||||
@@ -690,7 +697,7 @@
|
||||
"cmd-shift-f": "pane::DeploySearch",
|
||||
"cmd-shift-h": ["pane::DeploySearch", { "replace_enabled": true }],
|
||||
"cmd-shift-t": "pane::ReopenClosedItem",
|
||||
"cmd-k cmd-s": "zed::OpenKeymap",
|
||||
"cmd-k cmd-s": "zed::OpenKeymapEditor",
|
||||
"cmd-k cmd-t": "theme_selector::Toggle",
|
||||
"ctrl-alt-cmd-p": "settings_profile_selector::Toggle",
|
||||
"cmd-t": "project_symbols::Toggle",
|
||||
@@ -1334,7 +1341,10 @@
|
||||
"context": "Onboarding",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-enter": "onboarding::Finish",
|
||||
"cmd-1": "onboarding::ActivateBasicsPage",
|
||||
"cmd-2": "onboarding::ActivateEditingPage",
|
||||
"cmd-3": "onboarding::ActivateAISetupPage",
|
||||
"cmd-escape": "onboarding::Finish",
|
||||
"alt-tab": "onboarding::SignIn",
|
||||
"alt-shift-a": "onboarding::OpenAccount"
|
||||
}
|
||||
@@ -1345,44 +1355,5 @@
|
||||
"bindings": {
|
||||
"ctrl-shift-enter": "workspace::OpenWithSystem"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-w": "workspace::CloseWindow",
|
||||
"escape": "workspace::CloseWindow",
|
||||
"cmd-m": "settings_editor::Minimize",
|
||||
"cmd-f": "search::FocusSearch",
|
||||
"left": "settings_editor::ToggleFocusNav",
|
||||
"cmd-shift-e": "settings_editor::ToggleFocusNav",
|
||||
// todo(settings_ui): cut this down based on the max files and overflow UI
|
||||
"ctrl-1": ["settings_editor::FocusFile", 0],
|
||||
"ctrl-2": ["settings_editor::FocusFile", 1],
|
||||
"ctrl-3": ["settings_editor::FocusFile", 2],
|
||||
"ctrl-4": ["settings_editor::FocusFile", 3],
|
||||
"ctrl-5": ["settings_editor::FocusFile", 4],
|
||||
"ctrl-6": ["settings_editor::FocusFile", 5],
|
||||
"ctrl-7": ["settings_editor::FocusFile", 6],
|
||||
"ctrl-8": ["settings_editor::FocusFile", 7],
|
||||
"ctrl-9": ["settings_editor::FocusFile", 8],
|
||||
"ctrl-0": ["settings_editor::FocusFile", 9],
|
||||
"cmd-{": "settings_editor::FocusPreviousFile",
|
||||
"cmd-}": "settings_editor::FocusNextFile"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow > NavigationMenu",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"up": "settings_editor::FocusPreviousNavEntry",
|
||||
"down": "settings_editor::FocusNextNavEntry",
|
||||
"right": "settings_editor::ExpandNavEntry",
|
||||
"left": "settings_editor::CollapseNavEntry",
|
||||
"pageup": "settings_editor::FocusPreviousRootNavEntry",
|
||||
"pagedown": "settings_editor::FocusNextRootNavEntry",
|
||||
"home": "settings_editor::FocusFirstNavEntry",
|
||||
"end": "settings_editor::FocusLastNavEntry"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -29,8 +29,8 @@
|
||||
"ctrl-shift-=": ["zed::IncreaseBufferFontSize", { "persist": false }],
|
||||
"ctrl--": ["zed::DecreaseBufferFontSize", { "persist": false }],
|
||||
"ctrl-0": ["zed::ResetBufferFontSize", { "persist": false }],
|
||||
"ctrl-,": "zed::OpenSettings",
|
||||
"ctrl-alt-,": "zed::OpenSettingsFile",
|
||||
"ctrl-,": "zed::OpenSettingsEditor",
|
||||
"ctrl-alt-,": "zed::OpenSettings",
|
||||
"ctrl-q": "zed::Quit",
|
||||
"f4": "debugger::Start",
|
||||
"shift-f5": "debugger::Stop",
|
||||
@@ -134,7 +134,7 @@
|
||||
"ctrl-k z": "editor::ToggleSoftWrap",
|
||||
"ctrl-f": "buffer_search::Deploy",
|
||||
"ctrl-h": "buffer_search::DeployReplace",
|
||||
"ctrl-shift-.": "agent::QuoteSelection",
|
||||
"ctrl-shift-.": "assistant::QuoteSelection",
|
||||
"ctrl-shift-,": "assistant::InsertIntoEditor",
|
||||
"shift-alt-e": "editor::SelectEnclosingSymbol",
|
||||
"ctrl-shift-backspace": "editor::GoToPreviousChange",
|
||||
@@ -244,7 +244,7 @@
|
||||
"ctrl-shift-i": "agent::ToggleOptionsMenu",
|
||||
// "ctrl-shift-alt-n": "agent::ToggleNewThreadMenu",
|
||||
"shift-alt-escape": "agent::ExpandMessageEditor",
|
||||
"ctrl-shift-.": "agent::QuoteSelection",
|
||||
"ctrl-shift-.": "assistant::QuoteSelection",
|
||||
"shift-alt-e": "agent::RemoveAllContext",
|
||||
"ctrl-shift-e": "project_panel::ToggleFocus",
|
||||
"ctrl-shift-enter": "agent::ContinueThread",
|
||||
@@ -383,6 +383,13 @@
|
||||
"ctrl-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar",
|
||||
"use_key_equivalents": true,
|
||||
@@ -536,15 +543,15 @@
|
||||
"ctrl-k ctrl-l": "editor::ToggleFold",
|
||||
"ctrl-k ctrl-[": "editor::FoldRecursive",
|
||||
"ctrl-k ctrl-]": "editor::UnfoldRecursive",
|
||||
"ctrl-k ctrl-1": "editor::FoldAtLevel_1",
|
||||
"ctrl-k ctrl-2": "editor::FoldAtLevel_2",
|
||||
"ctrl-k ctrl-3": "editor::FoldAtLevel_3",
|
||||
"ctrl-k ctrl-4": "editor::FoldAtLevel_4",
|
||||
"ctrl-k ctrl-5": "editor::FoldAtLevel_5",
|
||||
"ctrl-k ctrl-6": "editor::FoldAtLevel_6",
|
||||
"ctrl-k ctrl-7": "editor::FoldAtLevel_7",
|
||||
"ctrl-k ctrl-8": "editor::FoldAtLevel_8",
|
||||
"ctrl-k ctrl-9": "editor::FoldAtLevel_9",
|
||||
"ctrl-k ctrl-1": ["editor::FoldAtLevel", 1],
|
||||
"ctrl-k ctrl-2": ["editor::FoldAtLevel", 2],
|
||||
"ctrl-k ctrl-3": ["editor::FoldAtLevel", 3],
|
||||
"ctrl-k ctrl-4": ["editor::FoldAtLevel", 4],
|
||||
"ctrl-k ctrl-5": ["editor::FoldAtLevel", 5],
|
||||
"ctrl-k ctrl-6": ["editor::FoldAtLevel", 6],
|
||||
"ctrl-k ctrl-7": ["editor::FoldAtLevel", 7],
|
||||
"ctrl-k ctrl-8": ["editor::FoldAtLevel", 8],
|
||||
"ctrl-k ctrl-9": ["editor::FoldAtLevel", 9],
|
||||
"ctrl-k ctrl-0": "editor::FoldAll",
|
||||
"ctrl-k ctrl-j": "editor::UnfoldAll",
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
@@ -623,7 +630,7 @@
|
||||
"ctrl-shift-f": "pane::DeploySearch",
|
||||
"ctrl-shift-h": ["pane::DeploySearch", { "replace_enabled": true }],
|
||||
"ctrl-shift-t": "pane::ReopenClosedItem",
|
||||
"ctrl-k ctrl-s": "zed::OpenKeymap",
|
||||
"ctrl-k ctrl-s": "zed::OpenKeymapEditor",
|
||||
"ctrl-k ctrl-t": "theme_selector::Toggle",
|
||||
"ctrl-alt-super-p": "settings_profile_selector::Toggle",
|
||||
"ctrl-t": "project_symbols::Toggle",
|
||||
@@ -1257,48 +1264,12 @@
|
||||
"context": "Onboarding",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-1": "onboarding::ActivateBasicsPage",
|
||||
"ctrl-2": "onboarding::ActivateEditingPage",
|
||||
"ctrl-3": "onboarding::ActivateAISetupPage",
|
||||
"ctrl-enter": "onboarding::Finish",
|
||||
"alt-shift-l": "onboarding::SignIn",
|
||||
"shift-alt-a": "onboarding::OpenAccount"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-w": "workspace::CloseWindow",
|
||||
"escape": "workspace::CloseWindow",
|
||||
"ctrl-m": "settings_editor::Minimize",
|
||||
"ctrl-f": "search::FocusSearch",
|
||||
"left": "settings_editor::ToggleFocusNav",
|
||||
"ctrl-shift-e": "settings_editor::ToggleFocusNav",
|
||||
// todo(settings_ui): cut this down based on the max files and overflow UI
|
||||
"ctrl-1": ["settings_editor::FocusFile", 0],
|
||||
"ctrl-2": ["settings_editor::FocusFile", 1],
|
||||
"ctrl-3": ["settings_editor::FocusFile", 2],
|
||||
"ctrl-4": ["settings_editor::FocusFile", 3],
|
||||
"ctrl-5": ["settings_editor::FocusFile", 4],
|
||||
"ctrl-6": ["settings_editor::FocusFile", 5],
|
||||
"ctrl-7": ["settings_editor::FocusFile", 6],
|
||||
"ctrl-8": ["settings_editor::FocusFile", 7],
|
||||
"ctrl-9": ["settings_editor::FocusFile", 8],
|
||||
"ctrl-0": ["settings_editor::FocusFile", 9],
|
||||
"ctrl-pageup": "settings_editor::FocusPreviousFile",
|
||||
"ctrl-pagedown": "settings_editor::FocusNextFile"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow > NavigationMenu",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"up": "settings_editor::FocusPreviousNavEntry",
|
||||
"down": "settings_editor::FocusNextNavEntry",
|
||||
"right": "settings_editor::ExpandNavEntry",
|
||||
"left": "settings_editor::CollapseNavEntry",
|
||||
"pageup": "settings_editor::FocusPreviousRootNavEntry",
|
||||
"pagedown": "settings_editor::FocusNextRootNavEntry",
|
||||
"home": "settings_editor::FocusFirstNavEntry",
|
||||
"end": "settings_editor::FocusLastNavEntry"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
[
|
||||
{
|
||||
"bindings": {
|
||||
"ctrl-alt-s": "zed::OpenSettingsFile",
|
||||
"ctrl-alt-s": "zed::OpenSettings",
|
||||
"ctrl-{": "pane::ActivatePreviousItem",
|
||||
"ctrl-}": "pane::ActivateNextItem",
|
||||
"shift-escape": null, // Unmap workspace::zoom
|
||||
|
||||
@@ -580,18 +580,18 @@
|
||||
// "q": "vim::AnyQuotes",
|
||||
"q": "vim::MiniQuotes",
|
||||
"|": "vim::VerticalBars",
|
||||
"(": ["vim::Parentheses", { "opening": true }],
|
||||
"(": "vim::Parentheses",
|
||||
")": "vim::Parentheses",
|
||||
"b": "vim::Parentheses",
|
||||
// "b": "vim::AnyBrackets",
|
||||
// "b": "vim::MiniBrackets",
|
||||
"[": ["vim::SquareBrackets", { "opening": true }],
|
||||
"[": "vim::SquareBrackets",
|
||||
"]": "vim::SquareBrackets",
|
||||
"r": "vim::SquareBrackets",
|
||||
"{": ["vim::CurlyBrackets", { "opening": true }],
|
||||
"{": "vim::CurlyBrackets",
|
||||
"}": "vim::CurlyBrackets",
|
||||
"shift-b": "vim::CurlyBrackets",
|
||||
"<": ["vim::AngleBrackets", { "opening": true }],
|
||||
"<": "vim::AngleBrackets",
|
||||
">": "vim::AngleBrackets",
|
||||
"a": "vim::Argument",
|
||||
"i": "vim::IndentObj",
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
{
|
||||
"$schema": "zed://schemas/settings",
|
||||
/// The displayed name of this project. If not set or empty, the root directory name
|
||||
/// will be displayed.
|
||||
"project_name": "",
|
||||
@@ -77,7 +76,7 @@
|
||||
"ui_font_size": 16,
|
||||
// The default font size for agent responses in the agent panel. Falls back to the UI font size if unset.
|
||||
"agent_ui_font_size": null,
|
||||
// The default font size for user messages in the agent panel.
|
||||
// The default font size for user messages in the agent panel. Falls back to the buffer font size if unset.
|
||||
"agent_buffer_font_size": 12,
|
||||
// How much to fade out unused code.
|
||||
"unnecessary_code_fade": 0.3,
|
||||
@@ -722,9 +721,7 @@
|
||||
// Whether to enable drag-and-drop operations in the project panel.
|
||||
"drag_and_drop": true,
|
||||
// Whether to hide the root entry when only one folder is open in the window.
|
||||
"hide_root": false,
|
||||
// Whether to hide the hidden entries in the project panel.
|
||||
"hide_hidden": false
|
||||
"hide_root": false
|
||||
},
|
||||
"outline_panel": {
|
||||
// Whether to show the outline panel button in the status bar
|
||||
@@ -906,7 +903,6 @@
|
||||
"now": true,
|
||||
"find_path": true,
|
||||
"read_file": true,
|
||||
"open": true,
|
||||
"grep": true,
|
||||
"terminal": true,
|
||||
"thinking": true,
|
||||
@@ -918,6 +914,7 @@
|
||||
// We don't know which of the context server tools are safe for the "Ask" profile, so we don't enable them by default.
|
||||
// "enable_all_context_servers": true,
|
||||
"tools": {
|
||||
"contents": true,
|
||||
"diagnostics": true,
|
||||
"fetch": true,
|
||||
"list_directory": true,
|
||||
@@ -1104,31 +1101,25 @@
|
||||
// Removes any lines containing only whitespace at the end of the file and
|
||||
// ensures just one newline at the end.
|
||||
"ensure_final_newline_on_save": true,
|
||||
// Whether or not to perform a buffer format before saving: [on, off]
|
||||
// Whether or not to perform a buffer format before saving: [on, off, prettier, language_server]
|
||||
// Keep in mind, if the autosave with delay is enabled, format_on_save will be ignored
|
||||
"format_on_save": "on",
|
||||
// How to perform a buffer format. This setting can take multiple values:
|
||||
// How to perform a buffer format. This setting can take 4 values:
|
||||
//
|
||||
// 1. Default. Format files using Zed's Prettier integration (if applicable),
|
||||
// or falling back to formatting via language server:
|
||||
// "formatter": "auto"
|
||||
// 2. Format code using the current language server:
|
||||
// 1. Format code using the current language server:
|
||||
// "formatter": "language_server"
|
||||
// 3. Format code using a specific language server:
|
||||
// "formatter": {"language_server": {"name": "ruff"}}
|
||||
// 4. Format code using an external command:
|
||||
// 2. Format code using an external command:
|
||||
// "formatter": {
|
||||
// "external": {
|
||||
// "command": "prettier",
|
||||
// "arguments": ["--stdin-filepath", "{buffer_path}"]
|
||||
// }
|
||||
// }
|
||||
// 5. Format code using Zed's Prettier integration:
|
||||
// 3. Format code using Zed's Prettier integration:
|
||||
// "formatter": "prettier"
|
||||
// 6. Format code using a code action
|
||||
// "formatter": {"code_action": "source.fixAll.eslint"}
|
||||
// 7. An array of any format step specified above to apply in order
|
||||
// "formatter": [{"code_action": "source.fixAll.eslint"}, "prettier"]
|
||||
// 4. Default. Format files using Zed's Prettier integration (if applicable),
|
||||
// or falling back to formatting via language server:
|
||||
// "formatter": "auto"
|
||||
"formatter": "auto",
|
||||
// How to soft-wrap long lines of text.
|
||||
// Possible values:
|
||||
@@ -1242,8 +1233,8 @@
|
||||
"git_gutter": "tracked_files",
|
||||
/// Sets the debounce threshold (in milliseconds) after which changes are reflected in the git gutter.
|
||||
///
|
||||
/// Default: 0
|
||||
"gutter_debounce": 0,
|
||||
/// Default: null
|
||||
"gutter_debounce": null,
|
||||
// Control whether the git blame information is shown inline,
|
||||
// in the currently focused line.
|
||||
"inline_blame": {
|
||||
@@ -1320,18 +1311,15 @@
|
||||
// "proxy": "",
|
||||
// "proxy_no_verify": false
|
||||
// },
|
||||
// Whether edit predictions are enabled when editing text threads.
|
||||
// This setting has no effect if globally disabled.
|
||||
"enabled_in_text_threads": true,
|
||||
|
||||
"copilot": {
|
||||
"enterprise_uri": null,
|
||||
"proxy": null,
|
||||
"proxy_no_verify": null
|
||||
},
|
||||
"codestral": {
|
||||
"model": null,
|
||||
"max_tokens": null
|
||||
},
|
||||
// Whether edit predictions are enabled when editing text threads.
|
||||
// This setting has no effect if globally disabled.
|
||||
"enabled_in_text_threads": true
|
||||
}
|
||||
},
|
||||
// Settings specific to journaling
|
||||
"journal": {
|
||||
@@ -1413,8 +1401,8 @@
|
||||
// 4. A box drawn around the following character
|
||||
// "hollow"
|
||||
//
|
||||
// Default: "block"
|
||||
"cursor_shape": "block",
|
||||
// Default: not set, defaults to "block"
|
||||
"cursor_shape": null,
|
||||
// Set whether Alternate Scroll mode (code: ?1007) is active by default.
|
||||
// Alternate Scroll mode converts mouse scroll events into up / down key
|
||||
// presses when in the alternate screen (e.g. when running applications
|
||||
@@ -1436,8 +1424,8 @@
|
||||
// Whether or not selecting text in the terminal will automatically
|
||||
// copy to the system clipboard.
|
||||
"copy_on_select": false,
|
||||
// Whether to keep the text selection after copying it to the clipboard.
|
||||
"keep_selection_on_copy": true,
|
||||
// Whether to keep the text selection after copying it to the clipboard
|
||||
"keep_selection_on_copy": false,
|
||||
// Whether to show the terminal button in the status bar
|
||||
"button": true,
|
||||
// Any key-value pairs added to this list will be added to the terminal's
|
||||
@@ -1527,6 +1515,7 @@
|
||||
// A value of 45 preserves colorful themes while ensuring legibility.
|
||||
"minimum_contrast": 45
|
||||
},
|
||||
"code_actions_on_format": {},
|
||||
// Settings related to running tasks.
|
||||
"tasks": {
|
||||
"variables": {},
|
||||
@@ -1696,7 +1685,9 @@
|
||||
"preferred_line_length": 72
|
||||
},
|
||||
"Go": {
|
||||
"formatter": [{ "code_action": "source.organizeImports" }, "language_server"],
|
||||
"code_actions_on_format": {
|
||||
"source.organizeImports": true
|
||||
},
|
||||
"debuggers": ["Delve"]
|
||||
},
|
||||
"GraphQL": {
|
||||
@@ -2060,7 +2051,7 @@
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
"profiles": {},
|
||||
"profiles": [],
|
||||
|
||||
// A map of log scopes to the desired log level.
|
||||
// Useful for filtering out noisy logs or enabling more verbose logging.
|
||||
|
||||
@@ -9,8 +9,6 @@ disallowed-methods = [
|
||||
{ path = "std::process::Command::spawn", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::spawn" },
|
||||
{ path = "std::process::Command::output", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::output" },
|
||||
{ path = "std::process::Command::status", reason = "Spawning `std::process::Command` can block the current thread for an unknown duration", replacement = "smol::process::Command::status" },
|
||||
{ path = "serde_json::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892. Use `serde_json::from_slice` instead." },
|
||||
{ path = "serde_json_lenient::from_reader", reason = "Parsing from a buffer is much slower than first reading the buffer into a Vec/String, see https://github.com/serde-rs/json/issues/160#issuecomment-253446892, Use `serde_json_lenient::from_slice` instead." },
|
||||
]
|
||||
disallowed-types = [
|
||||
# { path = "std::collections::HashMap", replacement = "collections::HashMap" },
|
||||
|
||||
@@ -2112,7 +2112,6 @@ impl AcpThread {
|
||||
|
||||
let project = self.project.clone();
|
||||
let language_registry = project.read(cx).languages().clone();
|
||||
let is_windows = project.read(cx).path_style(cx).is_windows();
|
||||
|
||||
let terminal_id = acp::TerminalId(Uuid::new_v4().to_string().into());
|
||||
let terminal_task = cx.spawn({
|
||||
@@ -2126,10 +2125,9 @@ impl AcpThread {
|
||||
.and_then(|r| r.read(cx).default_system_shell())
|
||||
})?
|
||||
.unwrap_or_else(|| get_default_system_shell_preferring_bash());
|
||||
let (task_command, task_args) =
|
||||
ShellBuilder::new(&Shell::Program(shell), is_windows)
|
||||
.redirect_stdin_to_dev_null()
|
||||
.build(Some(command.clone()), &args);
|
||||
let (task_command, task_args) = ShellBuilder::new(&Shell::Program(shell))
|
||||
.redirect_stdin_to_dev_null()
|
||||
.build(Some(command.clone()), &args);
|
||||
let terminal = project
|
||||
.update(cx, |project, cx| {
|
||||
project.create_terminal_task(
|
||||
|
||||
@@ -4,26 +4,22 @@ use std::{
|
||||
fmt::Display,
|
||||
rc::{Rc, Weak},
|
||||
sync::Arc,
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use agent_client_protocol as acp;
|
||||
use collections::HashMap;
|
||||
use gpui::{
|
||||
App, ClipboardItem, Empty, Entity, EventEmitter, FocusHandle, Focusable, Global, ListAlignment,
|
||||
ListState, StyleRefinement, Subscription, Task, TextStyleRefinement, Window, actions, list,
|
||||
prelude::*,
|
||||
App, Empty, Entity, EventEmitter, FocusHandle, Focusable, Global, ListAlignment, ListState,
|
||||
StyleRefinement, Subscription, Task, TextStyleRefinement, Window, actions, list, prelude::*,
|
||||
};
|
||||
use language::LanguageRegistry;
|
||||
use markdown::{CodeBlockRenderer, Markdown, MarkdownElement, MarkdownStyle};
|
||||
use project::Project;
|
||||
use settings::Settings;
|
||||
use theme::ThemeSettings;
|
||||
use ui::{Tooltip, prelude::*};
|
||||
use ui::prelude::*;
|
||||
use util::ResultExt as _;
|
||||
use workspace::{
|
||||
Item, ItemHandle, ToolbarItemEvent, ToolbarItemLocation, ToolbarItemView, Workspace,
|
||||
};
|
||||
use workspace::{Item, Workspace};
|
||||
|
||||
actions!(dev, [OpenAcpLogs]);
|
||||
|
||||
@@ -231,34 +227,6 @@ impl AcpTools {
|
||||
cx.notify();
|
||||
}
|
||||
|
||||
fn serialize_observed_messages(&self) -> Option<String> {
|
||||
let connection = self.watched_connection.as_ref()?;
|
||||
|
||||
let messages: Vec<serde_json::Value> = connection
|
||||
.messages
|
||||
.iter()
|
||||
.filter_map(|message| {
|
||||
let params = match &message.params {
|
||||
Ok(Some(params)) => params.clone(),
|
||||
Ok(None) => serde_json::Value::Null,
|
||||
Err(err) => serde_json::to_value(err).ok()?,
|
||||
};
|
||||
Some(serde_json::json!({
|
||||
"_direction": match message.direction {
|
||||
acp::StreamMessageDirection::Incoming => "incoming",
|
||||
acp::StreamMessageDirection::Outgoing => "outgoing",
|
||||
},
|
||||
"_type": message.message_type.to_string().to_lowercase(),
|
||||
"id": message.request_id,
|
||||
"method": message.name.to_string(),
|
||||
"params": params,
|
||||
}))
|
||||
})
|
||||
.collect();
|
||||
|
||||
serde_json::to_string_pretty(&messages).ok()
|
||||
}
|
||||
|
||||
fn render_message(
|
||||
&mut self,
|
||||
index: usize,
|
||||
@@ -524,92 +492,3 @@ impl Render for AcpTools {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AcpToolsToolbarItemView {
|
||||
acp_tools: Option<Entity<AcpTools>>,
|
||||
just_copied: bool,
|
||||
}
|
||||
|
||||
impl AcpToolsToolbarItemView {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
acp_tools: None,
|
||||
just_copied: false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for AcpToolsToolbarItemView {
|
||||
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let Some(acp_tools) = self.acp_tools.as_ref() else {
|
||||
return Empty.into_any_element();
|
||||
};
|
||||
|
||||
let acp_tools = acp_tools.clone();
|
||||
|
||||
h_flex()
|
||||
.gap_2()
|
||||
.child(
|
||||
IconButton::new(
|
||||
"copy_all_messages",
|
||||
if self.just_copied {
|
||||
IconName::Check
|
||||
} else {
|
||||
IconName::Copy
|
||||
},
|
||||
)
|
||||
.icon_size(IconSize::Small)
|
||||
.tooltip(Tooltip::text(if self.just_copied {
|
||||
"Copied!"
|
||||
} else {
|
||||
"Copy All Messages"
|
||||
}))
|
||||
.disabled(
|
||||
acp_tools
|
||||
.read(cx)
|
||||
.watched_connection
|
||||
.as_ref()
|
||||
.is_none_or(|connection| connection.messages.is_empty()),
|
||||
)
|
||||
.on_click(cx.listener(move |this, _, _window, cx| {
|
||||
if let Some(content) = acp_tools.read(cx).serialize_observed_messages() {
|
||||
cx.write_to_clipboard(ClipboardItem::new_string(content));
|
||||
|
||||
this.just_copied = true;
|
||||
cx.spawn(async move |this, cx| {
|
||||
cx.background_executor().timer(Duration::from_secs(2)).await;
|
||||
this.update(cx, |this, cx| {
|
||||
this.just_copied = false;
|
||||
cx.notify();
|
||||
})
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
})),
|
||||
)
|
||||
.into_any()
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<ToolbarItemEvent> for AcpToolsToolbarItemView {}
|
||||
|
||||
impl ToolbarItemView for AcpToolsToolbarItemView {
|
||||
fn set_active_pane_item(
|
||||
&mut self,
|
||||
active_pane_item: Option<&dyn ItemHandle>,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> ToolbarItemLocation {
|
||||
if let Some(item) = active_pane_item
|
||||
&& let Some(acp_tools) = item.downcast::<AcpTools>()
|
||||
{
|
||||
self.acp_tools = Some(acp_tools);
|
||||
cx.notify();
|
||||
return ToolbarItemLocation::PrimaryRight;
|
||||
}
|
||||
if self.acp_tools.take().is_some() {
|
||||
cx.notify();
|
||||
}
|
||||
ToolbarItemLocation::Hidden
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ use std::{
|
||||
cmp::Reverse,
|
||||
collections::HashSet,
|
||||
fmt::Write,
|
||||
path::Path,
|
||||
sync::Arc,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
@@ -327,13 +328,17 @@ impl ActivityIndicator {
|
||||
.flatten()
|
||||
}
|
||||
|
||||
fn pending_environment_error<'a>(&'a self, cx: &'a App) -> Option<&'a EnvironmentErrorMessage> {
|
||||
self.project.read(cx).peek_environment_error(cx)
|
||||
fn pending_environment_errors<'a>(
|
||||
&'a self,
|
||||
cx: &'a App,
|
||||
) -> impl Iterator<Item = (&'a Arc<Path>, &'a EnvironmentErrorMessage)> {
|
||||
self.project.read(cx).shell_environment_errors(cx)
|
||||
}
|
||||
|
||||
fn content_to_render(&mut self, cx: &mut Context<Self>) -> Option<Content> {
|
||||
// Show if any direnv calls failed
|
||||
if let Some(error) = self.pending_environment_error(cx) {
|
||||
if let Some((abs_path, error)) = self.pending_environment_errors(cx).next() {
|
||||
let abs_path = abs_path.clone();
|
||||
return Some(Content {
|
||||
icon: Some(
|
||||
Icon::new(IconName::Warning)
|
||||
@@ -343,7 +348,7 @@ impl ActivityIndicator {
|
||||
message: error.0.clone(),
|
||||
on_click: Some(Arc::new(move |this, window, cx| {
|
||||
this.project.update(cx, |project, cx| {
|
||||
project.pop_environment_error(cx);
|
||||
project.remove_environment_error(&abs_path, cx);
|
||||
});
|
||||
window.dispatch_action(Box::new(workspace::OpenLog), cx);
|
||||
})),
|
||||
|
||||
@@ -39,6 +39,7 @@ heed.workspace = true
|
||||
http_client.workspace = true
|
||||
icons.workspace = true
|
||||
indoc.workspace = true
|
||||
itertools.workspace = true
|
||||
language.workspace = true
|
||||
language_model.workspace = true
|
||||
log.workspace = true
|
||||
|
||||
@@ -2,6 +2,7 @@ pub mod agent_profile;
|
||||
pub mod context;
|
||||
pub mod context_server_tool;
|
||||
pub mod context_store;
|
||||
pub mod history_store;
|
||||
pub mod thread;
|
||||
pub mod thread_store;
|
||||
pub mod tool_use;
|
||||
|
||||
253
crates/agent/src/history_store.rs
Normal file
253
crates/agent/src/history_store.rs
Normal file
@@ -0,0 +1,253 @@
|
||||
use crate::{ThreadId, thread_store::SerializedThreadMetadata};
|
||||
use anyhow::{Context as _, Result};
|
||||
use assistant_context::SavedContextMetadata;
|
||||
use chrono::{DateTime, Utc};
|
||||
use gpui::{App, AsyncApp, Entity, SharedString, Task, prelude::*};
|
||||
use itertools::Itertools;
|
||||
use paths::contexts_dir;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{collections::VecDeque, path::Path, sync::Arc, time::Duration};
|
||||
use util::ResultExt as _;
|
||||
|
||||
const MAX_RECENTLY_OPENED_ENTRIES: usize = 6;
|
||||
const NAVIGATION_HISTORY_PATH: &str = "agent-navigation-history.json";
|
||||
const SAVE_RECENTLY_OPENED_ENTRIES_DEBOUNCE: Duration = Duration::from_millis(50);
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum HistoryEntry {
|
||||
Thread(SerializedThreadMetadata),
|
||||
Context(SavedContextMetadata),
|
||||
}
|
||||
|
||||
impl HistoryEntry {
|
||||
pub fn updated_at(&self) -> DateTime<Utc> {
|
||||
match self {
|
||||
HistoryEntry::Thread(thread) => thread.updated_at,
|
||||
HistoryEntry::Context(context) => context.mtime.to_utc(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn id(&self) -> HistoryEntryId {
|
||||
match self {
|
||||
HistoryEntry::Thread(thread) => HistoryEntryId::Thread(thread.id.clone()),
|
||||
HistoryEntry::Context(context) => HistoryEntryId::Context(context.path.clone()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn title(&self) -> &SharedString {
|
||||
match self {
|
||||
HistoryEntry::Thread(thread) => &thread.summary,
|
||||
HistoryEntry::Context(context) => &context.title,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Generic identifier for a history entry.
|
||||
#[derive(Clone, PartialEq, Eq, Debug)]
|
||||
pub enum HistoryEntryId {
|
||||
Thread(ThreadId),
|
||||
Context(Arc<Path>),
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
enum SerializedRecentOpen {
|
||||
Thread(String),
|
||||
ContextName(String),
|
||||
/// Old format which stores the full path
|
||||
Context(String),
|
||||
}
|
||||
|
||||
pub struct HistoryStore {
|
||||
context_store: Entity<assistant_context::ContextStore>,
|
||||
recently_opened_entries: VecDeque<HistoryEntryId>,
|
||||
_subscriptions: Vec<gpui::Subscription>,
|
||||
_save_recently_opened_entries_task: Task<()>,
|
||||
}
|
||||
|
||||
impl HistoryStore {
|
||||
pub fn new(
|
||||
context_store: Entity<assistant_context::ContextStore>,
|
||||
initial_recent_entries: impl IntoIterator<Item = HistoryEntryId>,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Self {
|
||||
let subscriptions = vec![cx.observe(&context_store, |_, _, cx| cx.notify())];
|
||||
|
||||
cx.spawn(async move |this, cx| {
|
||||
let entries = Self::load_recently_opened_entries(cx).await.log_err()?;
|
||||
this.update(cx, |this, _| {
|
||||
this.recently_opened_entries
|
||||
.extend(
|
||||
entries.into_iter().take(
|
||||
MAX_RECENTLY_OPENED_ENTRIES
|
||||
.saturating_sub(this.recently_opened_entries.len()),
|
||||
),
|
||||
);
|
||||
})
|
||||
.ok()
|
||||
})
|
||||
.detach();
|
||||
|
||||
Self {
|
||||
context_store,
|
||||
recently_opened_entries: initial_recent_entries.into_iter().collect(),
|
||||
_subscriptions: subscriptions,
|
||||
_save_recently_opened_entries_task: Task::ready(()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn entries(&self, cx: &mut Context<Self>) -> Vec<HistoryEntry> {
|
||||
let mut history_entries = Vec::new();
|
||||
|
||||
#[cfg(debug_assertions)]
|
||||
if std::env::var("ZED_SIMULATE_NO_THREAD_HISTORY").is_ok() {
|
||||
return history_entries;
|
||||
}
|
||||
|
||||
history_entries.extend(
|
||||
self.context_store
|
||||
.read(cx)
|
||||
.unordered_contexts()
|
||||
.cloned()
|
||||
.map(HistoryEntry::Context),
|
||||
);
|
||||
|
||||
history_entries.sort_unstable_by_key(|entry| std::cmp::Reverse(entry.updated_at()));
|
||||
history_entries
|
||||
}
|
||||
|
||||
pub fn recent_entries(&self, limit: usize, cx: &mut Context<Self>) -> Vec<HistoryEntry> {
|
||||
self.entries(cx).into_iter().take(limit).collect()
|
||||
}
|
||||
|
||||
pub fn recently_opened_entries(&self, cx: &App) -> Vec<HistoryEntry> {
|
||||
#[cfg(debug_assertions)]
|
||||
if std::env::var("ZED_SIMULATE_NO_THREAD_HISTORY").is_ok() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let context_entries =
|
||||
self.context_store
|
||||
.read(cx)
|
||||
.unordered_contexts()
|
||||
.flat_map(|context| {
|
||||
self.recently_opened_entries
|
||||
.iter()
|
||||
.enumerate()
|
||||
.flat_map(|(index, entry)| match entry {
|
||||
HistoryEntryId::Context(path) if &context.path == path => {
|
||||
Some((index, HistoryEntry::Context(context.clone())))
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
});
|
||||
|
||||
context_entries
|
||||
// optimization to halt iteration early
|
||||
.take(self.recently_opened_entries.len())
|
||||
.sorted_unstable_by_key(|(index, _)| *index)
|
||||
.map(|(_, entry)| entry)
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn save_recently_opened_entries(&mut self, cx: &mut Context<Self>) {
|
||||
let serialized_entries = self
|
||||
.recently_opened_entries
|
||||
.iter()
|
||||
.filter_map(|entry| match entry {
|
||||
HistoryEntryId::Context(path) => path.file_name().map(|file| {
|
||||
SerializedRecentOpen::ContextName(file.to_string_lossy().into_owned())
|
||||
}),
|
||||
HistoryEntryId::Thread(id) => Some(SerializedRecentOpen::Thread(id.to_string())),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
self._save_recently_opened_entries_task = cx.spawn(async move |_, cx| {
|
||||
cx.background_executor()
|
||||
.timer(SAVE_RECENTLY_OPENED_ENTRIES_DEBOUNCE)
|
||||
.await;
|
||||
cx.background_spawn(async move {
|
||||
let path = paths::data_dir().join(NAVIGATION_HISTORY_PATH);
|
||||
let content = serde_json::to_string(&serialized_entries)?;
|
||||
std::fs::write(path, content)?;
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.await
|
||||
.log_err();
|
||||
});
|
||||
}
|
||||
|
||||
fn load_recently_opened_entries(cx: &AsyncApp) -> Task<Result<Vec<HistoryEntryId>>> {
|
||||
cx.background_spawn(async move {
|
||||
let path = paths::data_dir().join(NAVIGATION_HISTORY_PATH);
|
||||
let contents = match smol::fs::read_to_string(path).await {
|
||||
Ok(it) => it,
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(e)
|
||||
.context("deserializing persisted agent panel navigation history");
|
||||
}
|
||||
};
|
||||
let entries = serde_json::from_str::<Vec<SerializedRecentOpen>>(&contents)
|
||||
.context("deserializing persisted agent panel navigation history")?
|
||||
.into_iter()
|
||||
.take(MAX_RECENTLY_OPENED_ENTRIES)
|
||||
.flat_map(|entry| match entry {
|
||||
SerializedRecentOpen::Thread(id) => {
|
||||
Some(HistoryEntryId::Thread(id.as_str().into()))
|
||||
}
|
||||
SerializedRecentOpen::ContextName(file_name) => Some(HistoryEntryId::Context(
|
||||
contexts_dir().join(file_name).into(),
|
||||
)),
|
||||
SerializedRecentOpen::Context(path) => {
|
||||
Path::new(&path).file_name().map(|file_name| {
|
||||
HistoryEntryId::Context(contexts_dir().join(file_name).into())
|
||||
})
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
Ok(entries)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn push_recently_opened_entry(&mut self, entry: HistoryEntryId, cx: &mut Context<Self>) {
|
||||
self.recently_opened_entries
|
||||
.retain(|old_entry| old_entry != &entry);
|
||||
self.recently_opened_entries.push_front(entry);
|
||||
self.recently_opened_entries
|
||||
.truncate(MAX_RECENTLY_OPENED_ENTRIES);
|
||||
self.save_recently_opened_entries(cx);
|
||||
}
|
||||
|
||||
pub fn remove_recently_opened_thread(&mut self, id: ThreadId, cx: &mut Context<Self>) {
|
||||
self.recently_opened_entries.retain(
|
||||
|entry| !matches!(entry, HistoryEntryId::Thread(thread_id) if thread_id == &id),
|
||||
);
|
||||
self.save_recently_opened_entries(cx);
|
||||
}
|
||||
|
||||
pub fn replace_recently_opened_text_thread(
|
||||
&mut self,
|
||||
old_path: &Path,
|
||||
new_path: &Arc<Path>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
for entry in &mut self.recently_opened_entries {
|
||||
match entry {
|
||||
HistoryEntryId::Context(path) if path.as_ref() == old_path => {
|
||||
*entry = HistoryEntryId::Context(new_path.clone());
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
self.save_recently_opened_entries(cx);
|
||||
}
|
||||
|
||||
pub fn remove_recently_opened_entry(&mut self, entry: &HistoryEntryId, cx: &mut Context<Self>) {
|
||||
self.recently_opened_entries
|
||||
.retain(|old_entry| old_entry != entry);
|
||||
self.save_recently_opened_entries(cx);
|
||||
}
|
||||
}
|
||||
@@ -1276,6 +1276,62 @@ impl Thread {
|
||||
);
|
||||
}
|
||||
|
||||
pub fn retry_last_completion(
|
||||
&mut self,
|
||||
window: Option<AnyWindowHandle>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
// Clear any existing error state
|
||||
self.retry_state = None;
|
||||
|
||||
// Use the last error context if available, otherwise fall back to configured model
|
||||
let (model, intent) = if let Some((model, intent)) = self.last_error_context.take() {
|
||||
(model, intent)
|
||||
} else if let Some(configured_model) = self.configured_model.as_ref() {
|
||||
let model = configured_model.model.clone();
|
||||
let intent = if self.has_pending_tool_uses() {
|
||||
CompletionIntent::ToolResults
|
||||
} else {
|
||||
CompletionIntent::UserPrompt
|
||||
};
|
||||
(model, intent)
|
||||
} else if let Some(configured_model) = self.get_or_init_configured_model(cx) {
|
||||
let model = configured_model.model.clone();
|
||||
let intent = if self.has_pending_tool_uses() {
|
||||
CompletionIntent::ToolResults
|
||||
} else {
|
||||
CompletionIntent::UserPrompt
|
||||
};
|
||||
(model, intent)
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
|
||||
self.send_to_model(model, intent, window, cx);
|
||||
}
|
||||
|
||||
pub fn enable_burn_mode_and_retry(
|
||||
&mut self,
|
||||
window: Option<AnyWindowHandle>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.completion_mode = CompletionMode::Burn;
|
||||
cx.emit(ThreadEvent::ProfileChanged);
|
||||
self.retry_last_completion(window, cx);
|
||||
}
|
||||
|
||||
pub fn used_tools_since_last_user_message(&self) -> bool {
|
||||
for message in self.messages.iter().rev() {
|
||||
if self.tool_use.message_has_tool_results(message.id) {
|
||||
return true;
|
||||
} else if message.role == Role::User {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
pub fn to_completion_request(
|
||||
&self,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
|
||||
@@ -25,21 +25,23 @@ use std::any::Any;
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use std::sync::{Arc, LazyLock};
|
||||
use util::ResultExt;
|
||||
use util::rel_path::RelPath;
|
||||
|
||||
const RULES_FILE_NAMES: [&str; 9] = [
|
||||
".rules",
|
||||
".cursorrules",
|
||||
".windsurfrules",
|
||||
".clinerules",
|
||||
".github/copilot-instructions.md",
|
||||
"CLAUDE.md",
|
||||
"AGENT.md",
|
||||
"AGENTS.md",
|
||||
"GEMINI.md",
|
||||
];
|
||||
static RULES_FILE_NAMES: LazyLock<[&RelPath; 9]> = LazyLock::new(|| {
|
||||
[
|
||||
RelPath::unix(".rules").unwrap(),
|
||||
RelPath::unix(".cursorrules").unwrap(),
|
||||
RelPath::unix(".windsurfrules").unwrap(),
|
||||
RelPath::unix(".clinerules").unwrap(),
|
||||
RelPath::unix(".github/copilot-instructions.md").unwrap(),
|
||||
RelPath::unix("CLAUDE.md").unwrap(),
|
||||
RelPath::unix("AGENT.md").unwrap(),
|
||||
RelPath::unix("AGENTS.md").unwrap(),
|
||||
RelPath::unix("GEMINI.md").unwrap(),
|
||||
]
|
||||
});
|
||||
|
||||
pub struct RulesLoadingError {
|
||||
pub message: SharedString,
|
||||
@@ -475,7 +477,7 @@ impl NativeAgent {
|
||||
.into_iter()
|
||||
.filter_map(|name| {
|
||||
worktree
|
||||
.entry_for_path(RelPath::unix(name).unwrap())
|
||||
.entry_for_path(name)
|
||||
.filter(|entry| entry.is_file())
|
||||
.map(|entry| entry.path.clone())
|
||||
})
|
||||
@@ -556,11 +558,10 @@ impl NativeAgent {
|
||||
self.project_context_needs_refresh.send(()).ok();
|
||||
}
|
||||
project::Event::WorktreeUpdatedEntries(_, items) => {
|
||||
if items.iter().any(|(path, _, _)| {
|
||||
RULES_FILE_NAMES
|
||||
.iter()
|
||||
.any(|name| path.as_ref() == RelPath::unix(name).unwrap())
|
||||
}) {
|
||||
if items
|
||||
.iter()
|
||||
.any(|(path, _, _)| RULES_FILE_NAMES.iter().any(|name| path.as_ref() == *name))
|
||||
{
|
||||
self.project_context_needs_refresh.send(()).ok();
|
||||
}
|
||||
}
|
||||
@@ -1418,6 +1419,7 @@ mod tests {
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)] // TODO: Fix this test on Windows
|
||||
async fn test_save_load_thread(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
@@ -1497,8 +1499,7 @@ mod tests {
|
||||
model.send_last_completion_stream_text_chunk("Lorem.");
|
||||
model.end_last_completion_stream();
|
||||
cx.run_until_parked();
|
||||
summary_model
|
||||
.send_last_completion_stream_text_chunk(&format!("Explaining {}", path!("/a/b.md")));
|
||||
summary_model.send_last_completion_stream_text_chunk("Explaining /a/b.md");
|
||||
summary_model.end_last_completion_stream();
|
||||
|
||||
send.await.unwrap();
|
||||
@@ -1538,7 +1539,7 @@ mod tests {
|
||||
history_entries(&history_store, cx),
|
||||
vec![(
|
||||
HistoryEntryId::AcpThread(session_id.clone()),
|
||||
format!("Explaining {}", path!("/a/b.md"))
|
||||
"Explaining /a/b.md".into()
|
||||
)]
|
||||
);
|
||||
let acp_thread = agent
|
||||
|
||||
@@ -15,11 +15,10 @@ use agent_settings::{
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::adapt_schema_to_format;
|
||||
use chrono::{DateTime, Utc};
|
||||
use client::{ModelRequestUsage, RequestUsage, UserStore};
|
||||
use cloud_llm_client::{CompletionIntent, CompletionRequestStatus, Plan, UsageLimit};
|
||||
use client::{ModelRequestUsage, RequestUsage};
|
||||
use cloud_llm_client::{CompletionIntent, CompletionRequestStatus, UsageLimit};
|
||||
use collections::{HashMap, HashSet, IndexMap};
|
||||
use fs::Fs;
|
||||
use futures::stream;
|
||||
use futures::{
|
||||
FutureExt,
|
||||
channel::{mpsc, oneshot},
|
||||
@@ -35,7 +34,7 @@ use language_model::{
|
||||
LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest,
|
||||
LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
|
||||
LanguageModelToolResultContent, LanguageModelToolSchemaFormat, LanguageModelToolUse,
|
||||
LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage, ZED_CLOUD_PROVIDER_ID,
|
||||
LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage,
|
||||
};
|
||||
use project::{
|
||||
Project,
|
||||
@@ -586,7 +585,6 @@ pub struct Thread {
|
||||
pending_title_generation: Option<Task<()>>,
|
||||
summary: Option<SharedString>,
|
||||
messages: Vec<Message>,
|
||||
user_store: Entity<UserStore>,
|
||||
completion_mode: CompletionMode,
|
||||
/// Holds the task that handles agent interaction until the end of the turn.
|
||||
/// Survives across multiple requests as the model performs tool calls and
|
||||
@@ -643,7 +641,6 @@ impl Thread {
|
||||
pending_title_generation: None,
|
||||
summary: None,
|
||||
messages: Vec::new(),
|
||||
user_store: project.read(cx).user_store(),
|
||||
completion_mode: AgentSettings::get_global(cx).preferred_completion_mode,
|
||||
running_turn: None,
|
||||
pending_message: None,
|
||||
@@ -823,7 +820,6 @@ impl Thread {
|
||||
pending_title_generation: None,
|
||||
summary: db_thread.detailed_summary,
|
||||
messages: db_thread.messages,
|
||||
user_store: project.read(cx).user_store(),
|
||||
completion_mode: db_thread.completion_mode.unwrap_or_default(),
|
||||
running_turn: None,
|
||||
pending_message: None,
|
||||
@@ -1253,12 +1249,12 @@ impl Thread {
|
||||
);
|
||||
|
||||
log::debug!("Calling model.stream_completion, attempt {}", attempt);
|
||||
|
||||
let (mut events, mut error) = match model.stream_completion(request, cx).await {
|
||||
Ok(events) => (events, None),
|
||||
Err(err) => (stream::empty().boxed(), Some(err)),
|
||||
};
|
||||
let mut events = model
|
||||
.stream_completion(request, cx)
|
||||
.await
|
||||
.map_err(|error| anyhow!(error))?;
|
||||
let mut tool_results = FuturesUnordered::new();
|
||||
let mut error = None;
|
||||
while let Some(event) = events.next().await {
|
||||
log::trace!("Received completion event: {:?}", event);
|
||||
match event {
|
||||
@@ -1306,10 +1302,8 @@ impl Thread {
|
||||
|
||||
if let Some(error) = error {
|
||||
attempt += 1;
|
||||
let retry = this.update(cx, |this, cx| {
|
||||
let user_store = this.user_store.read(cx);
|
||||
this.handle_completion_error(error, attempt, user_store.plan())
|
||||
})??;
|
||||
let retry =
|
||||
this.update(cx, |this, _| this.handle_completion_error(error, attempt))??;
|
||||
let timer = cx.background_executor().timer(retry.duration);
|
||||
event_stream.send_retry(retry);
|
||||
timer.await;
|
||||
@@ -1336,23 +1330,8 @@ impl Thread {
|
||||
&mut self,
|
||||
error: LanguageModelCompletionError,
|
||||
attempt: u8,
|
||||
plan: Option<Plan>,
|
||||
) -> Result<acp_thread::RetryStatus> {
|
||||
let Some(model) = self.model.as_ref() else {
|
||||
return Err(anyhow!(error));
|
||||
};
|
||||
|
||||
let auto_retry = if model.provider_id() == ZED_CLOUD_PROVIDER_ID {
|
||||
match plan {
|
||||
Some(Plan::V2(_)) => true,
|
||||
Some(Plan::V1(_)) => self.completion_mode == CompletionMode::Burn,
|
||||
None => false,
|
||||
}
|
||||
} else {
|
||||
true
|
||||
};
|
||||
|
||||
if !auto_retry {
|
||||
if self.completion_mode == CompletionMode::Normal {
|
||||
return Err(anyhow!(error));
|
||||
}
|
||||
|
||||
|
||||
@@ -790,7 +790,7 @@ mod tests {
|
||||
store.update_user_settings(cx, |settings| {
|
||||
settings.project.all_languages.defaults.format_on_save = Some(FormatOnSave::On);
|
||||
settings.project.all_languages.defaults.formatter =
|
||||
Some(language::language_settings::FormatterList::default());
|
||||
Some(language::language_settings::SelectedFormatter::Auto);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -9,9 +9,8 @@ use futures::io::BufReader;
|
||||
use project::Project;
|
||||
use project::agent_server_store::AgentServerCommand;
|
||||
use serde::Deserialize;
|
||||
use settings::{Settings as _, SettingsLocation};
|
||||
use task::Shell;
|
||||
use util::{ResultExt as _, get_default_system_shell_preferring_bash};
|
||||
use util::ResultExt as _;
|
||||
|
||||
use std::path::PathBuf;
|
||||
use std::{any::Any, cell::RefCell};
|
||||
@@ -23,7 +22,7 @@ use gpui::{App, AppContext as _, AsyncApp, Entity, SharedString, Task, WeakEntit
|
||||
|
||||
use acp_thread::{AcpThread, AuthRequired, LoadError, TerminalProviderEvent};
|
||||
use terminal::TerminalBuilder;
|
||||
use terminal::terminal_settings::{AlternateScroll, CursorShape, TerminalSettings};
|
||||
use terminal::terminal_settings::{AlternateScroll, CursorShape};
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
#[error("Unsupported version")]
|
||||
@@ -169,10 +168,7 @@ impl AcpConnection {
|
||||
meta: None,
|
||||
},
|
||||
terminal: true,
|
||||
meta: Some(serde_json::json!({
|
||||
// Experimental: Allow for rendering terminal output from the agents
|
||||
"terminal_output": true,
|
||||
})),
|
||||
meta: None,
|
||||
},
|
||||
meta: None,
|
||||
})
|
||||
@@ -819,25 +815,13 @@ impl acp::Client for ClientDelegate {
|
||||
let mut env = if let Some(dir) = &args.cwd {
|
||||
project
|
||||
.update(&mut self.cx.clone(), |project, cx| {
|
||||
let worktree = project.find_worktree(dir.as_path(), cx);
|
||||
let shell = TerminalSettings::get(
|
||||
worktree.as_ref().map(|(worktree, path)| SettingsLocation {
|
||||
worktree_id: worktree.read(cx).id(),
|
||||
path: &path,
|
||||
}),
|
||||
cx,
|
||||
)
|
||||
.shell
|
||||
.clone();
|
||||
project.directory_environment(&shell, dir.clone().into(), cx)
|
||||
project.directory_environment(&task::Shell::System, dir.clone().into(), cx)
|
||||
})?
|
||||
.await
|
||||
.unwrap_or_default()
|
||||
} else {
|
||||
Default::default()
|
||||
};
|
||||
// Disables paging for `git` and hopefully other commands
|
||||
env.insert("PAGER".into(), "".into());
|
||||
for var in args.env {
|
||||
env.insert(var.name, var.value);
|
||||
}
|
||||
@@ -850,11 +834,8 @@ impl acp::Client for ClientDelegate {
|
||||
.and_then(|r| r.read(cx).default_system_shell())
|
||||
.map(Shell::Program)
|
||||
})?
|
||||
.unwrap_or_else(|| Shell::Program(get_default_system_shell_preferring_bash()));
|
||||
let is_windows = project
|
||||
.read_with(&self.cx, |project, cx| project.path_style(cx).is_windows())
|
||||
.unwrap_or(cfg!(windows));
|
||||
let (task_command, task_args) = task::ShellBuilder::new(&shell, is_windows)
|
||||
.unwrap_or(task::Shell::System);
|
||||
let (task_command, task_args) = task::ShellBuilder::new(&shell)
|
||||
.redirect_stdin_to_dev_null()
|
||||
.build(Some(args.command.clone()), &args.args);
|
||||
|
||||
|
||||
@@ -1,16 +1,11 @@
|
||||
use std::rc::Rc;
|
||||
use std::sync::Arc;
|
||||
use std::{any::Any, path::Path};
|
||||
|
||||
use acp_thread::AgentConnection;
|
||||
use agent_client_protocol as acp;
|
||||
use anyhow::{Context as _, Result};
|
||||
use fs::Fs;
|
||||
use gpui::{App, AppContext as _, SharedString, Task};
|
||||
use project::agent_server_store::{AllAgentServersSettings, CODEX_NAME};
|
||||
use settings::{SettingsStore, update_settings_file};
|
||||
|
||||
use crate::{AgentServer, AgentServerDelegate, load_proxy_env};
|
||||
use acp_thread::AgentConnection;
|
||||
use anyhow::{Context as _, Result};
|
||||
use gpui::{App, SharedString, Task};
|
||||
use project::agent_server_store::CODEX_NAME;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Codex;
|
||||
@@ -35,27 +30,6 @@ impl AgentServer for Codex {
|
||||
ui::IconName::AiOpenAi
|
||||
}
|
||||
|
||||
fn default_mode(&self, cx: &mut App) -> Option<acp::SessionModeId> {
|
||||
let settings = cx.read_global(|settings: &SettingsStore, _| {
|
||||
settings.get::<AllAgentServersSettings>(None).codex.clone()
|
||||
});
|
||||
|
||||
settings
|
||||
.as_ref()
|
||||
.and_then(|s| s.default_mode.clone().map(|m| acp::SessionModeId(m.into())))
|
||||
}
|
||||
|
||||
fn set_default_mode(&self, mode_id: Option<acp::SessionModeId>, fs: Arc<dyn Fs>, cx: &mut App) {
|
||||
update_settings_file(fs, cx, |settings, _| {
|
||||
settings
|
||||
.agent_servers
|
||||
.get_or_insert_default()
|
||||
.codex
|
||||
.get_or_insert_default()
|
||||
.default_mode = mode_id.map(|m| m.to_string())
|
||||
});
|
||||
}
|
||||
|
||||
fn connect(
|
||||
&self,
|
||||
root_dir: Option<&Path>,
|
||||
|
||||
@@ -12,7 +12,7 @@ use anyhow::Result;
|
||||
use editor::{CompletionProvider, Editor, ExcerptId};
|
||||
use fuzzy::{StringMatch, StringMatchCandidate};
|
||||
use gpui::{App, Entity, Task, WeakEntity};
|
||||
use language::{Buffer, CodeLabel, CodeLabelBuilder, HighlightId};
|
||||
use language::{Buffer, CodeLabel, HighlightId};
|
||||
use lsp::CompletionContext;
|
||||
use project::lsp_store::{CompletionDocumentation, SymbolLocation};
|
||||
use project::{
|
||||
@@ -27,7 +27,7 @@ use util::rel_path::RelPath;
|
||||
use workspace::Workspace;
|
||||
|
||||
use crate::AgentPanel;
|
||||
use crate::acp::message_editor::MessageEditor;
|
||||
use crate::acp::message_editor::{MessageEditor, MessageEditorEvent};
|
||||
use crate::context_picker::file_context_picker::{FileMatch, search_files};
|
||||
use crate::context_picker::rules_context_picker::{RulesContextEntry, search_rules};
|
||||
use crate::context_picker::symbol_context_picker::SymbolMatch;
|
||||
@@ -673,7 +673,7 @@ impl ContextPickerCompletionProvider {
|
||||
|
||||
fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx: &App) -> CodeLabel {
|
||||
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
|
||||
let mut label = CodeLabelBuilder::default();
|
||||
let mut label = CodeLabel::default();
|
||||
|
||||
label.push_str(file_name, None);
|
||||
label.push_str(" ", None);
|
||||
@@ -682,7 +682,9 @@ fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx:
|
||||
label.push_str(directory, comment_id);
|
||||
}
|
||||
|
||||
label.build()
|
||||
label.filter_range = 0..label.text().len();
|
||||
|
||||
label
|
||||
}
|
||||
|
||||
impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
@@ -757,13 +759,13 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
let editor = editor.clone();
|
||||
move |cx| {
|
||||
editor
|
||||
.update(cx, |editor, cx| {
|
||||
.update(cx, |_editor, cx| {
|
||||
match intent {
|
||||
CompletionIntent::Complete
|
||||
| CompletionIntent::CompleteWithInsert
|
||||
| CompletionIntent::CompleteWithReplace => {
|
||||
if !is_missing_argument {
|
||||
editor.send(cx);
|
||||
cx.emit(MessageEditorEvent::Send);
|
||||
}
|
||||
}
|
||||
CompletionIntent::Compose => {}
|
||||
@@ -773,7 +775,7 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
}
|
||||
});
|
||||
}
|
||||
false
|
||||
is_missing_argument
|
||||
}
|
||||
})),
|
||||
}
|
||||
@@ -908,17 +910,6 @@ impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
offset_to_line,
|
||||
self.prompt_capabilities.borrow().embedded_context,
|
||||
)
|
||||
.filter(|completion| {
|
||||
// Right now we don't support completing arguments of slash commands
|
||||
let is_slash_command_with_argument = matches!(
|
||||
completion,
|
||||
ContextCompletion::SlashCommand(SlashCommandCompletion {
|
||||
argument: Some(_),
|
||||
..
|
||||
})
|
||||
);
|
||||
!is_slash_command_with_argument
|
||||
})
|
||||
.map(|completion| {
|
||||
completion.source_range().start <= offset_to_line + position.column as usize
|
||||
&& completion.source_range().end >= offset_to_line + position.column as usize
|
||||
|
||||
@@ -141,9 +141,7 @@ impl MessageEditor {
|
||||
|
||||
subscriptions.push(cx.subscribe_in(&editor, window, {
|
||||
move |this, editor, event, window, cx| {
|
||||
if let EditorEvent::Edited { .. } = event
|
||||
&& !editor.read(cx).read_only(cx)
|
||||
{
|
||||
if let EditorEvent::Edited { .. } = event {
|
||||
let snapshot = editor.update(cx, |editor, cx| {
|
||||
let new_hints = this
|
||||
.command_hint(editor.buffer(), cx)
|
||||
@@ -825,20 +823,13 @@ impl MessageEditor {
|
||||
});
|
||||
}
|
||||
|
||||
pub fn send(&mut self, cx: &mut Context<Self>) {
|
||||
fn send(&mut self, _: &Chat, _: &mut Window, cx: &mut Context<Self>) {
|
||||
if self.is_empty(cx) {
|
||||
return;
|
||||
}
|
||||
self.editor.update(cx, |editor, cx| {
|
||||
editor.clear_inlay_hints(cx);
|
||||
});
|
||||
cx.emit(MessageEditorEvent::Send)
|
||||
}
|
||||
|
||||
fn chat(&mut self, _: &Chat, _: &mut Window, cx: &mut Context<Self>) {
|
||||
self.send(cx);
|
||||
}
|
||||
|
||||
fn cancel(&mut self, _: &editor::actions::Cancel, _: &mut Window, cx: &mut Context<Self>) {
|
||||
cx.emit(MessageEditorEvent::Cancel)
|
||||
}
|
||||
@@ -1039,7 +1030,6 @@ impl MessageEditor {
|
||||
) else {
|
||||
return;
|
||||
};
|
||||
|
||||
self.editor.update(cx, |message_editor, cx| {
|
||||
message_editor.edit([(cursor_anchor..cursor_anchor, completion.new_text)], cx);
|
||||
});
|
||||
@@ -1297,7 +1287,7 @@ impl Render for MessageEditor {
|
||||
fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
div()
|
||||
.key_context("MessageEditor")
|
||||
.on_action(cx.listener(Self::chat))
|
||||
.on_action(cx.listener(Self::send))
|
||||
.on_action(cx.listener(Self::cancel))
|
||||
.capture_action(cx.listener(Self::paste))
|
||||
.flex_1()
|
||||
@@ -2021,11 +2011,21 @@ mod tests {
|
||||
editor.update_in(&mut cx, |editor, _window, cx| {
|
||||
assert_eq!(editor.text(cx), "/say-hello ");
|
||||
assert_eq!(editor.display_text(cx), "/say-hello <name>");
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
|
||||
assert_eq!(
|
||||
current_completion_labels_with_documentation(editor),
|
||||
&[("say-hello".into(), "Say hello to whoever you want".into())]
|
||||
);
|
||||
});
|
||||
|
||||
cx.simulate_input("GPT5");
|
||||
|
||||
editor.update_in(&mut cx, |editor, window, cx| {
|
||||
assert!(editor.has_visible_completions_menu());
|
||||
editor.confirm_completion(&editor::actions::ConfirmCompletion::default(), window, cx);
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
editor.update_in(&mut cx, |editor, window, cx| {
|
||||
@@ -2034,7 +2034,7 @@ mod tests {
|
||||
assert!(!editor.has_visible_completions_menu());
|
||||
|
||||
// Delete argument
|
||||
for _ in 0..5 {
|
||||
for _ in 0..4 {
|
||||
editor.backspace(&editor::actions::Backspace, window, cx);
|
||||
}
|
||||
});
|
||||
@@ -2042,12 +2042,13 @@ mod tests {
|
||||
cx.run_until_parked();
|
||||
|
||||
editor.update_in(&mut cx, |editor, window, cx| {
|
||||
assert_eq!(editor.text(cx), "/say-hello");
|
||||
assert_eq!(editor.text(cx), "/say-hello ");
|
||||
// Hint is visible because argument was deleted
|
||||
assert_eq!(editor.display_text(cx), "/say-hello <name>");
|
||||
|
||||
// Delete last command letter
|
||||
editor.backspace(&editor::actions::Backspace, window, cx);
|
||||
editor.backspace(&editor::actions::Backspace, window, cx);
|
||||
});
|
||||
|
||||
cx.run_until_parked();
|
||||
|
||||
@@ -278,7 +278,7 @@ pub struct AcpThreadView {
|
||||
thread_feedback: ThreadFeedbackState,
|
||||
list_state: ListState,
|
||||
auth_task: Option<Task<()>>,
|
||||
collapsed_tool_calls: HashSet<acp::ToolCallId>,
|
||||
expanded_tool_calls: HashSet<acp::ToolCallId>,
|
||||
expanded_thinking_blocks: HashSet<(usize, usize)>,
|
||||
edits_expanded: bool,
|
||||
plan_expanded: bool,
|
||||
@@ -419,7 +419,7 @@ impl AcpThreadView {
|
||||
thread_error: None,
|
||||
thread_feedback: Default::default(),
|
||||
auth_task: None,
|
||||
collapsed_tool_calls: HashSet::default(),
|
||||
expanded_tool_calls: HashSet::default(),
|
||||
expanded_thinking_blocks: HashSet::default(),
|
||||
editing_message: None,
|
||||
edits_expanded: false,
|
||||
@@ -954,17 +954,17 @@ impl AcpThreadView {
|
||||
) {
|
||||
match &event.view_event {
|
||||
ViewEvent::NewDiff(tool_call_id) => {
|
||||
if !AgentSettings::get_global(cx).expand_edit_card {
|
||||
self.collapsed_tool_calls.insert(tool_call_id.clone());
|
||||
if AgentSettings::get_global(cx).expand_edit_card {
|
||||
self.expanded_tool_calls.insert(tool_call_id.clone());
|
||||
}
|
||||
}
|
||||
ViewEvent::NewTerminal(tool_call_id) => {
|
||||
if !AgentSettings::get_global(cx).expand_terminal_card {
|
||||
self.collapsed_tool_calls.insert(tool_call_id.clone());
|
||||
if AgentSettings::get_global(cx).expand_terminal_card {
|
||||
self.expanded_tool_calls.insert(tool_call_id.clone());
|
||||
}
|
||||
}
|
||||
ViewEvent::TerminalMovedToBackground(tool_call_id) => {
|
||||
self.collapsed_tool_calls.insert(tool_call_id.clone());
|
||||
self.expanded_tool_calls.remove(tool_call_id);
|
||||
}
|
||||
ViewEvent::MessageEditorEvent(_editor, MessageEditorEvent::Focus) => {
|
||||
if let Some(thread) = self.thread()
|
||||
@@ -1046,36 +1046,32 @@ impl AcpThreadView {
|
||||
};
|
||||
|
||||
let connection = thread.read(cx).connection().clone();
|
||||
let can_login = !connection.auth_methods().is_empty() || self.login.is_some();
|
||||
// Does the agent have a specific logout command? Prefer that in case they need to reset internal state.
|
||||
let logout_supported = text == "/logout"
|
||||
&& self
|
||||
.available_commands
|
||||
.borrow()
|
||||
.iter()
|
||||
.any(|command| command.name == "logout");
|
||||
if can_login && !logout_supported {
|
||||
self.message_editor
|
||||
.update(cx, |editor, cx| editor.clear(window, cx));
|
||||
|
||||
let this = cx.weak_entity();
|
||||
let agent = self.agent.clone();
|
||||
window.defer(cx, |window, cx| {
|
||||
Self::handle_auth_required(
|
||||
this,
|
||||
AuthRequired {
|
||||
description: None,
|
||||
provider_id: None,
|
||||
},
|
||||
agent,
|
||||
connection,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
cx.notify();
|
||||
let auth_methods = connection.auth_methods();
|
||||
let has_supported_auth = auth_methods.iter().any(|method| {
|
||||
let id = method.id.0.as_ref();
|
||||
id == "claude-login" || id == "spawn-gemini-cli"
|
||||
});
|
||||
let can_login = has_supported_auth || auth_methods.is_empty() || self.login.is_some();
|
||||
if !can_login {
|
||||
return;
|
||||
}
|
||||
};
|
||||
let this = cx.weak_entity();
|
||||
let agent = self.agent.clone();
|
||||
window.defer(cx, |window, cx| {
|
||||
Self::handle_auth_required(
|
||||
this,
|
||||
AuthRequired {
|
||||
description: None,
|
||||
provider_id: None,
|
||||
},
|
||||
agent,
|
||||
connection,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
cx.notify();
|
||||
return;
|
||||
}
|
||||
|
||||
self.send_impl(self.message_editor.clone(), window, cx)
|
||||
@@ -1254,6 +1250,12 @@ impl AcpThreadView {
|
||||
.detach();
|
||||
}
|
||||
|
||||
fn open_agent_diff(&mut self, _: &OpenAgentDiff, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if let Some(thread) = self.thread() {
|
||||
AgentDiffPane::deploy(thread.clone(), self.workspace.clone(), window, cx).log_err();
|
||||
}
|
||||
}
|
||||
|
||||
fn open_edited_buffer(
|
||||
&mut self,
|
||||
buffer: &Entity<Buffer>,
|
||||
@@ -2119,7 +2121,7 @@ impl AcpThreadView {
|
||||
|
||||
let is_collapsible = !tool_call.content.is_empty() && !needs_confirmation;
|
||||
|
||||
let is_open = needs_confirmation || !self.collapsed_tool_calls.contains(&tool_call.id);
|
||||
let is_open = needs_confirmation || self.expanded_tool_calls.contains(&tool_call.id);
|
||||
|
||||
let tool_output_display =
|
||||
if is_open {
|
||||
@@ -2269,9 +2271,9 @@ impl AcpThreadView {
|
||||
let id = tool_call.id.clone();
|
||||
move |this: &mut Self, _, _, cx: &mut Context<Self>| {
|
||||
if is_open {
|
||||
this.collapsed_tool_calls.insert(id.clone());
|
||||
this.expanded_tool_calls.remove(&id);
|
||||
} else {
|
||||
this.collapsed_tool_calls.remove(&id);
|
||||
this.expanded_tool_calls.insert(id.clone());
|
||||
}
|
||||
cx.notify();
|
||||
}
|
||||
@@ -2473,7 +2475,7 @@ impl AcpThreadView {
|
||||
.icon_color(Color::Muted)
|
||||
.on_click(cx.listener({
|
||||
move |this: &mut Self, _, _, cx: &mut Context<Self>| {
|
||||
this.collapsed_tool_calls.insert(tool_call_id.clone());
|
||||
this.expanded_tool_calls.remove(&tool_call_id);
|
||||
cx.notify();
|
||||
}
|
||||
})),
|
||||
@@ -2725,7 +2727,7 @@ impl AcpThreadView {
|
||||
let output_line_count = output.map(|output| output.content_line_count).unwrap_or(0);
|
||||
|
||||
let command_failed = command_finished
|
||||
&& output.is_some_and(|o| o.exit_status.is_some_and(|status| !status.success()));
|
||||
&& output.is_some_and(|o| o.exit_status.is_none_or(|status| !status.success()));
|
||||
|
||||
let time_elapsed = if let Some(output) = output {
|
||||
output.ended_at.duration_since(started_at)
|
||||
@@ -2751,7 +2753,7 @@ impl AcpThreadView {
|
||||
.map(|path| path.display().to_string())
|
||||
.unwrap_or_else(|| "current directory".to_string());
|
||||
|
||||
let is_expanded = !self.collapsed_tool_calls.contains(&tool_call.id);
|
||||
let is_expanded = self.expanded_tool_calls.contains(&tool_call.id);
|
||||
|
||||
let header = h_flex()
|
||||
.id(header_id)
|
||||
@@ -2886,9 +2888,9 @@ impl AcpThreadView {
|
||||
let id = tool_call.id.clone();
|
||||
move |this, _event, _window, _cx| {
|
||||
if is_expanded {
|
||||
this.collapsed_tool_calls.insert(id.clone());
|
||||
this.expanded_tool_calls.remove(&id);
|
||||
} else {
|
||||
this.collapsed_tool_calls.remove(&id);
|
||||
this.expanded_tool_calls.insert(id.clone());
|
||||
}
|
||||
}
|
||||
})),
|
||||
@@ -3280,12 +3282,6 @@ impl AcpThreadView {
|
||||
this.style(ButtonStyle::Outlined)
|
||||
}
|
||||
})
|
||||
.when_some(
|
||||
method.description.clone(),
|
||||
|this, description| {
|
||||
this.tooltip(Tooltip::text(description))
|
||||
},
|
||||
)
|
||||
.on_click({
|
||||
cx.listener(move |this, _, window, cx| {
|
||||
telemetry::event!(
|
||||
@@ -4975,12 +4971,10 @@ impl AcpThreadView {
|
||||
})
|
||||
}
|
||||
|
||||
/// Inserts the selected text into the message editor or the message being
|
||||
/// edited, if any.
|
||||
pub(crate) fn insert_selections(&self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.active_editor(cx).update(cx, |editor, cx| {
|
||||
editor.insert_selections(window, cx);
|
||||
});
|
||||
self.message_editor.update(cx, |message_editor, cx| {
|
||||
message_editor.insert_selections(window, cx);
|
||||
})
|
||||
}
|
||||
|
||||
fn render_thread_retry_status_callout(
|
||||
@@ -5391,23 +5385,6 @@ impl AcpThreadView {
|
||||
};
|
||||
task.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
/// Returns the currently active editor, either for a message that is being
|
||||
/// edited or the editor for a new message.
|
||||
fn active_editor(&self, cx: &App) -> Entity<MessageEditor> {
|
||||
if let Some(index) = self.editing_message
|
||||
&& let Some(editor) = self
|
||||
.entry_view_state
|
||||
.read(cx)
|
||||
.entry(index)
|
||||
.and_then(|e| e.message_editor())
|
||||
.cloned()
|
||||
{
|
||||
editor
|
||||
} else {
|
||||
self.message_editor.clone()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn loading_contents_spinner(size: IconSize) -> AnyElement {
|
||||
@@ -5422,7 +5399,7 @@ impl Focusable for AcpThreadView {
|
||||
fn focus_handle(&self, cx: &App) -> FocusHandle {
|
||||
match self.thread_state {
|
||||
ThreadState::Loading { .. } | ThreadState::Ready { .. } => {
|
||||
self.active_editor(cx).focus_handle(cx)
|
||||
self.message_editor.focus_handle(cx)
|
||||
}
|
||||
ThreadState::LoadError(_) | ThreadState::Unauthenticated { .. } => {
|
||||
self.focus_handle.clone()
|
||||
@@ -5439,6 +5416,7 @@ impl Render for AcpThreadView {
|
||||
v_flex()
|
||||
.size_full()
|
||||
.key_context("AcpThread")
|
||||
.on_action(cx.listener(Self::open_agent_diff))
|
||||
.on_action(cx.listener(Self::toggle_burn_mode))
|
||||
.on_action(cx.listener(Self::keep_all))
|
||||
.on_action(cx.listener(Self::reject_all))
|
||||
@@ -6682,146 +6660,4 @@ pub(crate) mod tests {
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_message_editing_insert_selections(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let connection = StubAgentConnection::new();
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Response".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
}]);
|
||||
|
||||
let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await;
|
||||
add_to_workspace(thread_view.clone(), cx);
|
||||
|
||||
let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone());
|
||||
message_editor.update_in(cx, |editor, window, cx| {
|
||||
editor.set_text("Original message to edit", window, cx)
|
||||
});
|
||||
thread_view.update_in(cx, |thread_view, window, cx| thread_view.send(window, cx));
|
||||
cx.run_until_parked();
|
||||
|
||||
let user_message_editor = thread_view.read_with(cx, |thread_view, cx| {
|
||||
thread_view
|
||||
.entry_view_state
|
||||
.read(cx)
|
||||
.entry(0)
|
||||
.expect("Should have at least one entry")
|
||||
.message_editor()
|
||||
.expect("Should have message editor")
|
||||
.clone()
|
||||
});
|
||||
|
||||
cx.focus(&user_message_editor);
|
||||
thread_view.read_with(cx, |thread_view, _cx| {
|
||||
assert_eq!(thread_view.editing_message, Some(0));
|
||||
});
|
||||
|
||||
// Ensure to edit the focused message before proceeding otherwise, since
|
||||
// its content is not different from what was sent, focus will be lost.
|
||||
user_message_editor.update_in(cx, |editor, window, cx| {
|
||||
editor.set_text("Original message to edit with ", window, cx)
|
||||
});
|
||||
|
||||
// Create a simple buffer with some text so we can create a selection
|
||||
// that will then be added to the message being edited.
|
||||
let (workspace, project) = thread_view.read_with(cx, |thread_view, _cx| {
|
||||
(thread_view.workspace.clone(), thread_view.project.clone())
|
||||
});
|
||||
let buffer = project.update(cx, |project, cx| {
|
||||
project.create_local_buffer("let a = 10 + 10;", None, false, cx)
|
||||
});
|
||||
|
||||
workspace
|
||||
.update_in(cx, |workspace, window, cx| {
|
||||
let editor = cx.new(|cx| {
|
||||
let mut editor =
|
||||
Editor::for_buffer(buffer.clone(), Some(project.clone()), window, cx);
|
||||
|
||||
editor.change_selections(Default::default(), window, cx, |selections| {
|
||||
selections.select_ranges([8..15]);
|
||||
});
|
||||
|
||||
editor
|
||||
});
|
||||
workspace.add_item_to_active_pane(Box::new(editor), None, false, window, cx);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
thread_view.update_in(cx, |thread_view, window, cx| {
|
||||
assert_eq!(thread_view.editing_message, Some(0));
|
||||
thread_view.insert_selections(window, cx);
|
||||
});
|
||||
|
||||
user_message_editor.read_with(cx, |editor, cx| {
|
||||
let text = editor.editor().read(cx).text(cx);
|
||||
let expected_text = String::from("Original message to edit with selection ");
|
||||
|
||||
assert_eq!(text, expected_text);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_insert_selections(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let connection = StubAgentConnection::new();
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Response".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
}]);
|
||||
|
||||
let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await;
|
||||
add_to_workspace(thread_view.clone(), cx);
|
||||
|
||||
let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone());
|
||||
message_editor.update_in(cx, |editor, window, cx| {
|
||||
editor.set_text("Can you review this snippet ", window, cx)
|
||||
});
|
||||
|
||||
// Create a simple buffer with some text so we can create a selection
|
||||
// that will then be added to the message being edited.
|
||||
let (workspace, project) = thread_view.read_with(cx, |thread_view, _cx| {
|
||||
(thread_view.workspace.clone(), thread_view.project.clone())
|
||||
});
|
||||
let buffer = project.update(cx, |project, cx| {
|
||||
project.create_local_buffer("let a = 10 + 10;", None, false, cx)
|
||||
});
|
||||
|
||||
workspace
|
||||
.update_in(cx, |workspace, window, cx| {
|
||||
let editor = cx.new(|cx| {
|
||||
let mut editor =
|
||||
Editor::for_buffer(buffer.clone(), Some(project.clone()), window, cx);
|
||||
|
||||
editor.change_selections(Default::default(), window, cx, |selections| {
|
||||
selections.select_ranges([8..15]);
|
||||
});
|
||||
|
||||
editor
|
||||
});
|
||||
workspace.add_item_to_active_pane(Box::new(editor), None, false, window, cx);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
thread_view.update_in(cx, |thread_view, window, cx| {
|
||||
assert_eq!(thread_view.editing_message, None);
|
||||
thread_view.insert_selections(window, cx);
|
||||
});
|
||||
|
||||
thread_view.read_with(cx, |thread_view, cx| {
|
||||
let text = thread_view.message_editor.read(cx).text(cx);
|
||||
let expected_txt = String::from("Can you review this snippet selection ");
|
||||
|
||||
assert_eq!(text, expected_txt);
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@ mod tool_picker;
|
||||
|
||||
use std::{ops::Range, sync::Arc};
|
||||
|
||||
use agent_settings::AgentSettings;
|
||||
use anyhow::Result;
|
||||
use assistant_tool::{ToolSource, ToolWorkingSet};
|
||||
use cloud_llm_client::{Plan, PlanV1, PlanV2};
|
||||
@@ -14,6 +15,7 @@ use context_server::ContextServerId;
|
||||
use editor::{Editor, SelectionEffects, scroll::Autoscroll};
|
||||
use extension::ExtensionManifest;
|
||||
use extension_host::ExtensionStore;
|
||||
use feature_flags::{CodexAcpFeatureFlag, FeatureFlagAppExt as _};
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
Action, AnyView, App, AsyncWindowContext, Corner, Entity, EventEmitter, FocusHandle, Focusable,
|
||||
@@ -28,10 +30,10 @@ use project::{
|
||||
agent_server_store::{AgentServerStore, CLAUDE_CODE_NAME, CODEX_NAME, GEMINI_NAME},
|
||||
context_server_store::{ContextServerConfiguration, ContextServerStatus, ContextServerStore},
|
||||
};
|
||||
use settings::{SettingsStore, update_settings_file};
|
||||
use settings::{Settings, SettingsStore, update_settings_file};
|
||||
use ui::{
|
||||
Chip, CommonAnimationExt, ContextMenu, Disclosure, Divider, DividerColor, ElevationIndex,
|
||||
Indicator, PopoverMenu, Switch, SwitchColor, Tooltip, WithScrollbar, prelude::*,
|
||||
Indicator, PopoverMenu, Switch, SwitchColor, SwitchField, Tooltip, WithScrollbar, prelude::*,
|
||||
};
|
||||
use util::ResultExt as _;
|
||||
use workspace::{Workspace, create_and_open_local_file};
|
||||
@@ -401,6 +403,101 @@ impl AgentConfiguration {
|
||||
)
|
||||
}
|
||||
|
||||
fn render_command_permission(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let always_allow_tool_actions = AgentSettings::get_global(cx).always_allow_tool_actions;
|
||||
let fs = self.fs.clone();
|
||||
|
||||
SwitchField::new(
|
||||
"always-allow-tool-actions-switch",
|
||||
"Allow running commands without asking for confirmation",
|
||||
Some(
|
||||
"The agent can perform potentially destructive actions without asking for your confirmation.".into(),
|
||||
),
|
||||
always_allow_tool_actions,
|
||||
move |state, _window, cx| {
|
||||
let allow = state == &ToggleState::Selected;
|
||||
update_settings_file(fs.clone(), cx, move |settings, _| {
|
||||
settings.agent.get_or_insert_default().set_always_allow_tool_actions(allow);
|
||||
});
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn render_single_file_review(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let single_file_review = AgentSettings::get_global(cx).single_file_review;
|
||||
let fs = self.fs.clone();
|
||||
|
||||
SwitchField::new(
|
||||
"single-file-review",
|
||||
"Enable single-file agent reviews",
|
||||
Some("Agent edits are also displayed in single-file editors for review.".into()),
|
||||
single_file_review,
|
||||
move |state, _window, cx| {
|
||||
let allow = state == &ToggleState::Selected;
|
||||
update_settings_file(fs.clone(), cx, move |settings, _| {
|
||||
settings
|
||||
.agent
|
||||
.get_or_insert_default()
|
||||
.set_single_file_review(allow);
|
||||
});
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn render_sound_notification(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let play_sound_when_agent_done = AgentSettings::get_global(cx).play_sound_when_agent_done;
|
||||
let fs = self.fs.clone();
|
||||
|
||||
SwitchField::new(
|
||||
"sound-notification",
|
||||
"Play sound when finished generating",
|
||||
Some(
|
||||
"Hear a notification sound when the agent is done generating changes or needs your input.".into(),
|
||||
),
|
||||
play_sound_when_agent_done,
|
||||
move |state, _window, cx| {
|
||||
let allow = state == &ToggleState::Selected;
|
||||
update_settings_file(fs.clone(), cx, move |settings, _| {
|
||||
settings.agent.get_or_insert_default().set_play_sound_when_agent_done(allow);
|
||||
});
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn render_modifier_to_send(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let use_modifier_to_send = AgentSettings::get_global(cx).use_modifier_to_send;
|
||||
let fs = self.fs.clone();
|
||||
|
||||
SwitchField::new(
|
||||
"modifier-send",
|
||||
"Use modifier to submit a message",
|
||||
Some(
|
||||
"Make a modifier (cmd-enter on macOS, ctrl-enter on Linux or Windows) required to send messages.".into(),
|
||||
),
|
||||
use_modifier_to_send,
|
||||
move |state, _window, cx| {
|
||||
let allow = state == &ToggleState::Selected;
|
||||
update_settings_file(fs.clone(), cx, move |settings, _| {
|
||||
settings.agent.get_or_insert_default().set_use_modifier_to_send(allow);
|
||||
});
|
||||
},
|
||||
)
|
||||
}
|
||||
|
||||
fn render_general_settings_section(&mut self, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
v_flex()
|
||||
.p(DynamicSpacing::Base16.rems(cx))
|
||||
.pr(DynamicSpacing::Base20.rems(cx))
|
||||
.gap_2p5()
|
||||
.border_b_1()
|
||||
.border_color(cx.theme().colors().border)
|
||||
.child(Headline::new("General Settings"))
|
||||
.child(self.render_command_permission(cx))
|
||||
.child(self.render_single_file_review(cx))
|
||||
.child(self.render_sound_notification(cx))
|
||||
.child(self.render_modifier_to_send(cx))
|
||||
}
|
||||
|
||||
fn render_zed_plan_info(&self, plan: Option<Plan>, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
if let Some(plan) = plan {
|
||||
let free_chip_bg = cx
|
||||
@@ -988,11 +1085,14 @@ impl AgentConfiguration {
|
||||
"Claude Code",
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiOpenAi,
|
||||
"Codex",
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.when(cx.has_flag::<CodexAcpFeatureFlag>(), |this| {
|
||||
this
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiOpenAi,
|
||||
"Codex",
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
})
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiGemini,
|
||||
"Gemini CLI",
|
||||
@@ -1045,6 +1145,7 @@ impl Render for AgentConfiguration {
|
||||
.track_scroll(&self.scroll_handle)
|
||||
.size_full()
|
||||
.overflow_y_scroll()
|
||||
.child(self.render_general_settings_section(cx))
|
||||
.child(self.render_agent_servers_section(cx))
|
||||
.child(self.render_context_servers_section(window, cx))
|
||||
.child(self.render_provider_configuration_section(cx)),
|
||||
|
||||
@@ -619,10 +619,10 @@ mod tests {
|
||||
cx.update(|_window, cx| {
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry.register_provider(
|
||||
Arc::new(FakeLanguageModelProvider::new(
|
||||
FakeLanguageModelProvider::new(
|
||||
LanguageModelProviderId::new("someprovider"),
|
||||
LanguageModelProviderName::new("Some Provider"),
|
||||
)),
|
||||
),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -19,10 +19,9 @@ use zed_actions::agent::{OpenClaudeCodeOnboardingModal, ReauthenticateAgent};
|
||||
use crate::acp::{AcpThreadHistory, ThreadHistoryEvent};
|
||||
use crate::ui::{AcpOnboardingModal, ClaudeCodeOnboardingModal};
|
||||
use crate::{
|
||||
AddContextServer, AgentDiffPane, DeleteRecentlyOpenThread, Follow, InlineAssistant,
|
||||
NewTextThread, NewThread, OpenActiveThreadAsMarkdown, OpenAgentDiff, OpenHistory,
|
||||
ResetTrialEndUpsell, ResetTrialUpsell, ToggleNavigationMenu, ToggleNewThreadMenu,
|
||||
ToggleOptionsMenu,
|
||||
AddContextServer, DeleteRecentlyOpenThread, Follow, InlineAssistant, NewTextThread, NewThread,
|
||||
OpenActiveThreadAsMarkdown, OpenHistory, ResetTrialEndUpsell, ResetTrialUpsell,
|
||||
ToggleNavigationMenu, ToggleNewThreadMenu, ToggleOptionsMenu,
|
||||
acp::AcpThreadView,
|
||||
agent_configuration::{AgentConfiguration, AssistantConfigurationEvent},
|
||||
slash_command::SlashCommandCompletionProvider,
|
||||
@@ -34,6 +33,7 @@ use crate::{
|
||||
};
|
||||
use agent::{
|
||||
context_store::ContextStore,
|
||||
history_store::{HistoryEntryId, HistoryStore},
|
||||
thread_store::{TextThreadStore, ThreadStore},
|
||||
};
|
||||
use agent_settings::AgentSettings;
|
||||
@@ -48,12 +48,12 @@ use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer};
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
Action, AnyElement, App, AsyncWindowContext, Corner, DismissEvent, Entity, EventEmitter,
|
||||
ExternalPaths, FocusHandle, Focusable, KeyContext, Pixels, Subscription, Task, UpdateGlobal,
|
||||
WeakEntity, prelude::*,
|
||||
ExternalPaths, FocusHandle, Focusable, KeyContext, Pixels, ReadGlobal as _, Subscription, Task,
|
||||
UpdateGlobal, WeakEntity, prelude::*,
|
||||
};
|
||||
use language::LanguageRegistry;
|
||||
use language_model::{ConfigurationError, LanguageModelRegistry};
|
||||
use project::{Project, ProjectPath, Worktree};
|
||||
use project::{DisableAiSettings, Project, ProjectPath, Worktree};
|
||||
use prompt_store::{PromptBuilder, PromptStore, UserPromptId};
|
||||
use rules_library::{RulesLibrary, open_rules_library};
|
||||
use search::{BufferSearchBar, buffer_search};
|
||||
@@ -75,6 +75,7 @@ use zed_actions::{
|
||||
assistant::{OpenRulesLibrary, ToggleFocus},
|
||||
};
|
||||
|
||||
use feature_flags::{CodexAcpFeatureFlag, FeatureFlagAppExt as _};
|
||||
const AGENT_PANEL_KEY: &str = "agent_panel";
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
@@ -140,16 +141,6 @@ pub fn init(cx: &mut App) {
|
||||
.register_action(|workspace, _: &Follow, window, cx| {
|
||||
workspace.follow(CollaboratorId::Agent, window, cx);
|
||||
})
|
||||
.register_action(|workspace, _: &OpenAgentDiff, window, cx| {
|
||||
let thread = workspace
|
||||
.panel::<AgentPanel>(cx)
|
||||
.and_then(|panel| panel.read(cx).active_thread_view().cloned())
|
||||
.and_then(|thread_view| thread_view.read(cx).thread().cloned());
|
||||
|
||||
if let Some(thread) = thread {
|
||||
AgentDiffPane::deploy_in_workspace(thread, workspace, window, cx);
|
||||
}
|
||||
})
|
||||
.register_action(|workspace, _: &ToggleNavigationMenu, window, cx| {
|
||||
if let Some(panel) = workspace.panel::<AgentPanel>(cx) {
|
||||
workspace.focus_panel::<AgentPanel>(window, cx);
|
||||
@@ -308,6 +299,7 @@ impl ActiveView {
|
||||
|
||||
pub fn prompt_editor(
|
||||
context_editor: Entity<TextThreadEditor>,
|
||||
history_store: Entity<HistoryStore>,
|
||||
acp_history_store: Entity<agent2::HistoryStore>,
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
window: &mut Window,
|
||||
@@ -375,6 +367,18 @@ impl ActiveView {
|
||||
})
|
||||
}
|
||||
ContextEvent::PathChanged { old_path, new_path } => {
|
||||
history_store.update(cx, |history_store, cx| {
|
||||
if let Some(old_path) = old_path {
|
||||
history_store
|
||||
.replace_recently_opened_text_thread(old_path, new_path, cx);
|
||||
} else {
|
||||
history_store.push_recently_opened_entry(
|
||||
HistoryEntryId::Context(new_path.clone()),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
acp_history_store.update(cx, |history_store, cx| {
|
||||
if let Some(old_path) = old_path {
|
||||
history_store
|
||||
@@ -416,7 +420,7 @@ pub struct AgentPanel {
|
||||
language_registry: Arc<LanguageRegistry>,
|
||||
thread_store: Entity<ThreadStore>,
|
||||
acp_history: Entity<AcpThreadHistory>,
|
||||
history_store: Entity<agent2::HistoryStore>,
|
||||
acp_history_store: Entity<agent2::HistoryStore>,
|
||||
context_store: Entity<TextThreadStore>,
|
||||
prompt_store: Option<Entity<PromptStore>>,
|
||||
inline_assist_context_store: Entity<ContextStore>,
|
||||
@@ -424,6 +428,7 @@ pub struct AgentPanel {
|
||||
configuration_subscription: Option<Subscription>,
|
||||
active_view: ActiveView,
|
||||
previous_view: Option<ActiveView>,
|
||||
history_store: Entity<HistoryStore>,
|
||||
new_thread_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
agent_panel_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
assistant_navigation_menu_handle: PopoverMenuHandle<ContextMenu>,
|
||||
@@ -515,6 +520,13 @@ impl AgentPanel {
|
||||
)
|
||||
});
|
||||
|
||||
if SettingsStore::global(cx)
|
||||
.get::<DisableAiSettings>(None)
|
||||
.disable_ai
|
||||
{
|
||||
return panel;
|
||||
}
|
||||
|
||||
panel.as_mut(cx).loading = true;
|
||||
if let Some(serialized_panel) = serialized_panel {
|
||||
panel.update(cx, |panel, cx| {
|
||||
@@ -556,8 +568,10 @@ impl AgentPanel {
|
||||
let inline_assist_context_store =
|
||||
cx.new(|_cx| ContextStore::new(project.downgrade(), Some(thread_store.downgrade())));
|
||||
|
||||
let history_store = cx.new(|cx| agent2::HistoryStore::new(context_store.clone(), cx));
|
||||
let acp_history = cx.new(|cx| AcpThreadHistory::new(history_store.clone(), window, cx));
|
||||
let history_store = cx.new(|cx| HistoryStore::new(context_store.clone(), [], cx));
|
||||
|
||||
let acp_history_store = cx.new(|cx| agent2::HistoryStore::new(context_store.clone(), cx));
|
||||
let acp_history = cx.new(|cx| AcpThreadHistory::new(acp_history_store.clone(), window, cx));
|
||||
cx.subscribe_in(
|
||||
&acp_history,
|
||||
window,
|
||||
@@ -579,12 +593,14 @@ impl AgentPanel {
|
||||
)
|
||||
.detach();
|
||||
|
||||
cx.observe(&history_store, |_, _, cx| cx.notify()).detach();
|
||||
|
||||
let panel_type = AgentSettings::get_global(cx).default_view;
|
||||
let active_view = match panel_type {
|
||||
DefaultView::Thread => ActiveView::native_agent(
|
||||
fs.clone(),
|
||||
prompt_store.clone(),
|
||||
history_store.clone(),
|
||||
acp_history_store.clone(),
|
||||
project.clone(),
|
||||
workspace.clone(),
|
||||
window,
|
||||
@@ -610,6 +626,7 @@ impl AgentPanel {
|
||||
ActiveView::prompt_editor(
|
||||
context_editor,
|
||||
history_store.clone(),
|
||||
acp_history_store.clone(),
|
||||
language_registry.clone(),
|
||||
window,
|
||||
cx,
|
||||
@@ -661,6 +678,43 @@ impl AgentPanel {
|
||||
)
|
||||
});
|
||||
|
||||
let mut old_disable_ai = false;
|
||||
cx.observe_global_in::<SettingsStore>(window, move |panel, window, cx| {
|
||||
let disable_ai = DisableAiSettings::get_global(cx).disable_ai;
|
||||
if old_disable_ai != disable_ai {
|
||||
let agent_panel_id = cx.entity_id();
|
||||
let agent_panel_visible = panel
|
||||
.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
let agent_dock_position = panel.position(window, cx);
|
||||
let agent_dock = workspace.dock_at_position(agent_dock_position);
|
||||
let agent_panel_focused = agent_dock
|
||||
.read(cx)
|
||||
.active_panel()
|
||||
.is_some_and(|panel| panel.panel_id() == agent_panel_id);
|
||||
|
||||
let active_panel_visible = agent_dock
|
||||
.read(cx)
|
||||
.visible_panel()
|
||||
.is_some_and(|panel| panel.panel_id() == agent_panel_id);
|
||||
|
||||
if agent_panel_focused {
|
||||
cx.dispatch_action(&ToggleFocus);
|
||||
}
|
||||
|
||||
active_panel_visible
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
if agent_panel_visible {
|
||||
cx.emit(PanelEvent::Close);
|
||||
}
|
||||
|
||||
old_disable_ai = disable_ai;
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
Self {
|
||||
active_view,
|
||||
workspace,
|
||||
@@ -675,6 +729,7 @@ impl AgentPanel {
|
||||
configuration_subscription: None,
|
||||
inline_assist_context_store,
|
||||
previous_view: None,
|
||||
history_store: history_store.clone(),
|
||||
new_thread_menu_handle: PopoverMenuHandle::default(),
|
||||
agent_panel_menu_handle: PopoverMenuHandle::default(),
|
||||
assistant_navigation_menu_handle: PopoverMenuHandle::default(),
|
||||
@@ -685,7 +740,7 @@ impl AgentPanel {
|
||||
pending_serialization: None,
|
||||
onboarding,
|
||||
acp_history,
|
||||
history_store,
|
||||
acp_history_store,
|
||||
selected_agent: AgentType::default(),
|
||||
loading: false,
|
||||
}
|
||||
@@ -739,7 +794,7 @@ impl AgentPanel {
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
let Some(thread) = self
|
||||
.history_store
|
||||
.acp_history_store
|
||||
.read(cx)
|
||||
.thread_from_session_id(&action.from_session_id)
|
||||
else {
|
||||
@@ -788,6 +843,7 @@ impl AgentPanel {
|
||||
ActiveView::prompt_editor(
|
||||
context_editor.clone(),
|
||||
self.history_store.clone(),
|
||||
self.acp_history_store.clone(),
|
||||
self.language_registry.clone(),
|
||||
window,
|
||||
cx,
|
||||
@@ -819,7 +875,7 @@ impl AgentPanel {
|
||||
}
|
||||
|
||||
let loading = self.loading;
|
||||
let history = self.history_store.clone();
|
||||
let history = self.acp_history_store.clone();
|
||||
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let ext_agent = match agent_choice {
|
||||
@@ -880,7 +936,7 @@ impl AgentPanel {
|
||||
summarize_thread,
|
||||
workspace.clone(),
|
||||
project,
|
||||
this.history_store.clone(),
|
||||
this.acp_history_store.clone(),
|
||||
this.prompt_store.clone(),
|
||||
window,
|
||||
cx,
|
||||
@@ -978,6 +1034,7 @@ impl AgentPanel {
|
||||
ActiveView::prompt_editor(
|
||||
editor,
|
||||
self.history_store.clone(),
|
||||
self.acp_history_store.clone(),
|
||||
self.language_registry.clone(),
|
||||
window,
|
||||
cx,
|
||||
@@ -1241,6 +1298,11 @@ impl AgentPanel {
|
||||
match &new_view {
|
||||
ActiveView::TextThread { context_editor, .. } => {
|
||||
self.history_store.update(cx, |store, cx| {
|
||||
if let Some(path) = context_editor.read(cx).context().read(cx).path() {
|
||||
store.push_recently_opened_entry(HistoryEntryId::Context(path.clone()), cx)
|
||||
}
|
||||
});
|
||||
self.acp_history_store.update(cx, |store, cx| {
|
||||
if let Some(path) = context_editor.read(cx).context().read(cx).path() {
|
||||
store.push_recently_opened_entry(
|
||||
agent2::HistoryEntryId::TextThread(path.clone()),
|
||||
@@ -1274,7 +1336,7 @@ impl AgentPanel {
|
||||
) -> ContextMenu {
|
||||
let entries = panel
|
||||
.read(cx)
|
||||
.history_store
|
||||
.acp_history_store
|
||||
.read(cx)
|
||||
.recently_opened_entries(cx);
|
||||
|
||||
@@ -1319,7 +1381,7 @@ impl AgentPanel {
|
||||
move |_window, cx| {
|
||||
panel
|
||||
.update(cx, |this, cx| {
|
||||
this.history_store.update(cx, |history_store, cx| {
|
||||
this.acp_history_store.update(cx, |history_store, cx| {
|
||||
history_store.remove_recently_opened_entry(&id, cx);
|
||||
});
|
||||
})
|
||||
@@ -1921,32 +1983,34 @@ impl AgentPanel {
|
||||
}
|
||||
}),
|
||||
)
|
||||
.item(
|
||||
ContextMenuEntry::new("New Codex Thread")
|
||||
.icon(IconName::AiOpenAi)
|
||||
.disabled(is_via_collab)
|
||||
.icon_color(Color::Muted)
|
||||
.handler({
|
||||
let workspace = workspace.clone();
|
||||
move |window, cx| {
|
||||
if let Some(workspace) = workspace.upgrade() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
if let Some(panel) =
|
||||
workspace.panel::<AgentPanel>(cx)
|
||||
{
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.new_agent_thread(
|
||||
AgentType::Codex,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
});
|
||||
.when(cx.has_flag::<CodexAcpFeatureFlag>(), |this| {
|
||||
this.item(
|
||||
ContextMenuEntry::new("New Codex Thread")
|
||||
.icon(IconName::AiOpenAi)
|
||||
.disabled(is_via_collab)
|
||||
.icon_color(Color::Muted)
|
||||
.handler({
|
||||
let workspace = workspace.clone();
|
||||
move |window, cx| {
|
||||
if let Some(workspace) = workspace.upgrade() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
if let Some(panel) =
|
||||
workspace.panel::<AgentPanel>(cx)
|
||||
{
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.new_agent_thread(
|
||||
AgentType::Codex,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}),
|
||||
)
|
||||
}),
|
||||
)
|
||||
})
|
||||
.item(
|
||||
ContextMenuEntry::new("New Gemini CLI Thread")
|
||||
.icon(IconName::AiGemini)
|
||||
@@ -2150,7 +2214,10 @@ impl AgentPanel {
|
||||
false
|
||||
}
|
||||
_ => {
|
||||
let history_is_empty = self.history_store.read(cx).is_empty(cx);
|
||||
let history_is_empty = self.acp_history_store.read(cx).is_empty(cx)
|
||||
&& self
|
||||
.history_store
|
||||
.update(cx, |store, cx| store.recent_entries(1, cx).is_empty());
|
||||
|
||||
let has_configured_non_zed_providers = LanguageModelRegistry::read_global(cx)
|
||||
.providers()
|
||||
|
||||
@@ -11,7 +11,7 @@ use fuzzy::{StringMatch, StringMatchCandidate};
|
||||
use gpui::{App, Entity, Task, WeakEntity};
|
||||
use http_client::HttpClientWithUrl;
|
||||
use itertools::Itertools;
|
||||
use language::{Buffer, CodeLabel, CodeLabelBuilder, HighlightId};
|
||||
use language::{Buffer, CodeLabel, HighlightId};
|
||||
use lsp::CompletionContext;
|
||||
use project::lsp_store::SymbolLocation;
|
||||
use project::{
|
||||
@@ -686,8 +686,7 @@ impl ContextPickerCompletionProvider {
|
||||
};
|
||||
|
||||
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
|
||||
let mut label = CodeLabelBuilder::default();
|
||||
label.push_str(&symbol.name, None);
|
||||
let mut label = CodeLabel::plain(symbol.name.clone(), None);
|
||||
label.push_str(" ", None);
|
||||
label.push_str(&file_name, comment_id);
|
||||
label.push_str(&format!(" L{}", symbol.range.start.0.row + 1), comment_id);
|
||||
@@ -697,7 +696,7 @@ impl ContextPickerCompletionProvider {
|
||||
Some(Completion {
|
||||
replace_range: source_range.clone(),
|
||||
new_text,
|
||||
label: label.build(),
|
||||
label,
|
||||
documentation: None,
|
||||
source: project::CompletionSource::Custom,
|
||||
icon_path: Some(IconName::Code.path().into()),
|
||||
@@ -730,7 +729,7 @@ impl ContextPickerCompletionProvider {
|
||||
|
||||
fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx: &App) -> CodeLabel {
|
||||
let comment_id = cx.theme().syntax().highlight_id("comment").map(HighlightId);
|
||||
let mut label = CodeLabelBuilder::default();
|
||||
let mut label = CodeLabel::default();
|
||||
|
||||
label.push_str(file_name, None);
|
||||
label.push_str(" ", None);
|
||||
@@ -739,7 +738,9 @@ fn build_code_label_for_full_path(file_name: &str, directory: Option<&str>, cx:
|
||||
label.push_str(directory, comment_id);
|
||||
}
|
||||
|
||||
label.build()
|
||||
label.filter_range = 0..label.text().len();
|
||||
|
||||
label
|
||||
}
|
||||
|
||||
impl CompletionProvider for ContextPickerCompletionProvider {
|
||||
|
||||
@@ -9,7 +9,6 @@ use anyhow::Result;
|
||||
use futures::StreamExt;
|
||||
use futures::stream::{self, BoxStream};
|
||||
use gpui::{App, SharedString, Task, WeakEntity, Window};
|
||||
use language::CodeLabelBuilder;
|
||||
use language::HighlightId;
|
||||
use language::{BufferSnapshot, CodeLabel, LspAdapterDelegate, OffsetRangeExt};
|
||||
pub use language_model::Role;
|
||||
@@ -329,15 +328,15 @@ impl SlashCommandLine {
|
||||
}
|
||||
|
||||
pub fn create_label_for_command(command_name: &str, arguments: &[&str], cx: &App) -> CodeLabel {
|
||||
let mut label = CodeLabelBuilder::default();
|
||||
let mut label = CodeLabel::default();
|
||||
label.push_str(command_name, None);
|
||||
label.respan_filter_range(None);
|
||||
label.push_str(" ", None);
|
||||
label.push_str(
|
||||
&arguments.join(" "),
|
||||
cx.theme().syntax().highlight_id("comment").map(HighlightId),
|
||||
);
|
||||
label.build()
|
||||
label.filter_range = 0..command_name.len();
|
||||
label
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
|
||||
@@ -7,7 +7,7 @@ use futures::Stream;
|
||||
use futures::channel::mpsc;
|
||||
use fuzzy::PathMatch;
|
||||
use gpui::{App, Entity, Task, WeakEntity};
|
||||
use language::{BufferSnapshot, CodeLabelBuilder, HighlightId, LineEnding, LspAdapterDelegate};
|
||||
use language::{BufferSnapshot, CodeLabel, HighlightId, LineEnding, LspAdapterDelegate};
|
||||
use project::{PathMatchCandidateSet, Project};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use smol::stream::StreamExt;
|
||||
@@ -168,7 +168,7 @@ impl SlashCommand for FileSlashCommand {
|
||||
.display(path_style)
|
||||
.to_string();
|
||||
|
||||
let mut label = CodeLabelBuilder::default();
|
||||
let mut label = CodeLabel::default();
|
||||
let file_name = path_match.path.file_name()?;
|
||||
let label_text = if path_match.is_dir {
|
||||
format!("{}/ ", file_name)
|
||||
@@ -178,10 +178,10 @@ impl SlashCommand for FileSlashCommand {
|
||||
|
||||
label.push_str(label_text.as_str(), None);
|
||||
label.push_str(&text, comment_id);
|
||||
label.respan_filter_range(Some(file_name));
|
||||
label.filter_range = 0..file_name.len();
|
||||
|
||||
Some(ArgumentCompletion {
|
||||
label: label.build(),
|
||||
label,
|
||||
new_text: text,
|
||||
after_completion: AfterCompletion::Compose,
|
||||
replace_previous_arguments: false,
|
||||
|
||||
@@ -7,7 +7,7 @@ use collections::{HashMap, HashSet};
|
||||
use editor::Editor;
|
||||
use futures::future::join_all;
|
||||
use gpui::{Task, WeakEntity};
|
||||
use language::{BufferSnapshot, CodeLabel, CodeLabelBuilder, HighlightId, LspAdapterDelegate};
|
||||
use language::{BufferSnapshot, CodeLabel, HighlightId, LspAdapterDelegate};
|
||||
use std::sync::{Arc, atomic::AtomicBool};
|
||||
use ui::{ActiveTheme, App, Window, prelude::*};
|
||||
use util::{ResultExt, paths::PathStyle};
|
||||
@@ -308,10 +308,10 @@ fn create_tab_completion_label(
|
||||
comment_id: Option<HighlightId>,
|
||||
) -> CodeLabel {
|
||||
let (parent_path, file_name) = path_style.split(path);
|
||||
let mut label = CodeLabelBuilder::default();
|
||||
let mut label = CodeLabel::default();
|
||||
label.push_str(file_name, None);
|
||||
label.push_str(" ", None);
|
||||
label.push_str(parent_path.unwrap_or_default(), comment_id);
|
||||
label.respan_filter_range(Some(file_name));
|
||||
label.build()
|
||||
label.filter_range = 0..file_name.len();
|
||||
label
|
||||
}
|
||||
|
||||
@@ -1538,7 +1538,7 @@ mod tests {
|
||||
store.update_user_settings(cx, |settings| {
|
||||
settings.project.all_languages.defaults.format_on_save = Some(FormatOnSave::On);
|
||||
settings.project.all_languages.defaults.formatter =
|
||||
Some(language::language_settings::FormatterList::default());
|
||||
Some(language::language_settings::SelectedFormatter::Auto);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -136,7 +136,6 @@ impl Tool for TerminalTool {
|
||||
}),
|
||||
None => Task::ready(None).shared(),
|
||||
};
|
||||
let is_windows = project.read(cx).path_style(cx).is_windows();
|
||||
let shell = project
|
||||
.update(cx, |project, cx| {
|
||||
project
|
||||
@@ -156,7 +155,7 @@ impl Tool for TerminalTool {
|
||||
let build_cmd = {
|
||||
let input_command = input.command.clone();
|
||||
move || {
|
||||
ShellBuilder::new(&Shell::Program(shell), is_windows)
|
||||
ShellBuilder::new(&Shell::Program(shell))
|
||||
.redirect_stdin_to_dev_null()
|
||||
.build(Some(input_command), &[])
|
||||
}
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
use anyhow::{Context as _, Result};
|
||||
use collections::HashMap;
|
||||
use gpui::{App, BackgroundExecutor, BorrowAppContext, Global};
|
||||
use log::info;
|
||||
|
||||
#[cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))]
|
||||
mod non_windows_and_freebsd_deps {
|
||||
pub(super) use gpui::AsyncApp;
|
||||
pub(super) use libwebrtc::native::apm;
|
||||
pub(super) use log::info;
|
||||
pub(super) use parking_lot::Mutex;
|
||||
pub(super) use rodio::cpal::Sample;
|
||||
pub(super) use rodio::source::LimitSettings;
|
||||
|
||||
@@ -649,7 +649,7 @@ impl AutoUpdater {
|
||||
#[cfg(not(target_os = "windows"))]
|
||||
anyhow::ensure!(
|
||||
which::which("rsync").is_ok(),
|
||||
"Could not auto-update because the required rsync utility was not found."
|
||||
"Aborting. Could not find rsync which is required for auto-updates."
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
@@ -658,7 +658,7 @@ impl AutoUpdater {
|
||||
let filename = match OS {
|
||||
"macos" => anyhow::Ok("Zed.dmg"),
|
||||
"linux" => Ok("zed.tar.gz"),
|
||||
"windows" => Ok("Zed.exe"),
|
||||
"windows" => Ok("zed_editor_installer.exe"),
|
||||
unsupported_os => anyhow::bail!("not supported: {unsupported_os}"),
|
||||
}?;
|
||||
|
||||
|
||||
@@ -68,13 +68,10 @@ struct Args {
|
||||
#[arg(short, long, overrides_with = "add")]
|
||||
new: bool,
|
||||
/// Sets a custom directory for all user data (e.g., database, extensions, logs).
|
||||
/// This overrides the default platform-specific data directory location:
|
||||
#[cfg_attr(target_os = "macos", doc = "`~/Library/Application Support/Zed`.")]
|
||||
#[cfg_attr(target_os = "windows", doc = "`%LOCALAPPDATA%\\Zed`.")]
|
||||
#[cfg_attr(
|
||||
not(any(target_os = "windows", target_os = "macos")),
|
||||
doc = "`$XDG_DATA_HOME/zed`."
|
||||
)]
|
||||
/// This overrides the default platform-specific data directory location.
|
||||
/// On macOS, the default is `~/Library/Application Support/Zed`.
|
||||
/// On Linux/FreeBSD, the default is `$XDG_DATA_HOME/zed`.
|
||||
/// On Windows, the default is `%LOCALAPPDATA%\Zed`.
|
||||
#[arg(long, value_name = "DIR")]
|
||||
user_data_dir: Option<String>,
|
||||
/// The paths to open in Zed (space-separated).
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use chrono::Duration;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
ops::{Add, Range, Sub},
|
||||
ops::Range,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
@@ -18,8 +18,8 @@ pub struct PredictEditsRequest {
|
||||
pub excerpt_path: Arc<Path>,
|
||||
/// Within file
|
||||
pub excerpt_range: Range<usize>,
|
||||
pub excerpt_line_range: Range<Line>,
|
||||
pub cursor_point: Point,
|
||||
/// Within `excerpt`
|
||||
pub cursor_offset: usize,
|
||||
/// Within `signatures`
|
||||
pub excerpt_parent: Option<usize>,
|
||||
pub signatures: Vec<Signature>,
|
||||
@@ -47,13 +47,12 @@ pub struct PredictEditsRequest {
|
||||
pub enum PromptFormat {
|
||||
MarkedExcerpt,
|
||||
LabeledSections,
|
||||
NumberedLines,
|
||||
/// Prompt format intended for use via zeta_cli
|
||||
OnlySnippets,
|
||||
}
|
||||
|
||||
impl PromptFormat {
|
||||
pub const DEFAULT: PromptFormat = PromptFormat::NumberedLines;
|
||||
pub const DEFAULT: PromptFormat = PromptFormat::LabeledSections;
|
||||
}
|
||||
|
||||
impl Default for PromptFormat {
|
||||
@@ -74,7 +73,6 @@ impl std::fmt::Display for PromptFormat {
|
||||
PromptFormat::MarkedExcerpt => write!(f, "Marked Excerpt"),
|
||||
PromptFormat::LabeledSections => write!(f, "Labeled Sections"),
|
||||
PromptFormat::OnlySnippets => write!(f, "Only Snippets"),
|
||||
PromptFormat::NumberedLines => write!(f, "Numbered Lines"),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -99,7 +97,7 @@ pub struct Signature {
|
||||
pub parent_index: Option<usize>,
|
||||
/// Range of `text` within the file, possibly truncated according to `text_is_truncated`. The
|
||||
/// file is implicitly the file that contains the descendant declaration or excerpt.
|
||||
pub range: Range<Line>,
|
||||
pub range: Range<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
@@ -108,7 +106,7 @@ pub struct ReferencedDeclaration {
|
||||
pub text: String,
|
||||
pub text_is_truncated: bool,
|
||||
/// Range of `text` within file, possibly truncated according to `text_is_truncated`
|
||||
pub range: Range<Line>,
|
||||
pub range: Range<usize>,
|
||||
/// Range within `text`
|
||||
pub signature_range: Range<usize>,
|
||||
/// Index within `signatures`.
|
||||
@@ -129,6 +127,7 @@ pub struct DeclarationScoreComponents {
|
||||
pub declaration_count: usize,
|
||||
pub reference_line_distance: u32,
|
||||
pub declaration_line_distance: u32,
|
||||
pub declaration_line_distance_rank: usize,
|
||||
pub excerpt_vs_item_jaccard: f32,
|
||||
pub excerpt_vs_signature_jaccard: f32,
|
||||
pub adjacent_vs_item_jaccard: f32,
|
||||
@@ -137,15 +136,6 @@ pub struct DeclarationScoreComponents {
|
||||
pub excerpt_vs_signature_weighted_overlap: f32,
|
||||
pub adjacent_vs_item_weighted_overlap: f32,
|
||||
pub adjacent_vs_signature_weighted_overlap: f32,
|
||||
pub path_import_match_count: usize,
|
||||
pub wildcard_path_import_match_count: usize,
|
||||
pub import_similarity: f32,
|
||||
pub max_import_similarity: f32,
|
||||
pub normalized_import_similarity: f32,
|
||||
pub wildcard_import_similarity: f32,
|
||||
pub normalized_wildcard_import_similarity: f32,
|
||||
pub included_by_others: usize,
|
||||
pub includes_others: usize,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
@@ -171,36 +161,10 @@ pub struct DebugInfo {
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Edit {
|
||||
pub path: Arc<Path>,
|
||||
pub range: Range<Line>,
|
||||
pub range: Range<usize>,
|
||||
pub content: String,
|
||||
}
|
||||
|
||||
fn is_default<T: Default + PartialEq>(value: &T) -> bool {
|
||||
*value == T::default()
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, PartialOrd, Eq, Ord)]
|
||||
pub struct Point {
|
||||
pub line: Line,
|
||||
pub column: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, PartialOrd, Eq, Ord)]
|
||||
#[serde(transparent)]
|
||||
pub struct Line(pub u32);
|
||||
|
||||
impl Add for Line {
|
||||
type Output = Self;
|
||||
|
||||
fn add(self, rhs: Self) -> Self::Output {
|
||||
Self(self.0 + rhs.0)
|
||||
}
|
||||
}
|
||||
|
||||
impl Sub for Line {
|
||||
type Output = Self;
|
||||
|
||||
fn sub(self, rhs: Self) -> Self::Output {
|
||||
Self(self.0 - rhs.0)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
//! Zeta2 prompt planning and generation code shared with cloud.
|
||||
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use cloud_llm_client::predict_edits_v3::{
|
||||
self, Event, Line, Point, PromptFormat, ReferencedDeclaration,
|
||||
};
|
||||
use cloud_llm_client::predict_edits_v3::{self, Event, PromptFormat, ReferencedDeclaration};
|
||||
use indoc::indoc;
|
||||
use ordered_float::OrderedFloat;
|
||||
use rustc_hash::{FxHashMap, FxHashSet};
|
||||
@@ -45,42 +43,6 @@ const LABELED_SECTIONS_SYSTEM_PROMPT: &str = indoc! {r#"
|
||||
}
|
||||
"#};
|
||||
|
||||
const NUMBERED_LINES_SYSTEM_PROMPT: &str = indoc! {r#"
|
||||
# Instructions
|
||||
|
||||
You are a code completion assistant helping a programmer finish their work. Your task is to:
|
||||
|
||||
1. Analyze the edit history to understand what the programmer is trying to achieve
|
||||
2. Identify any incomplete refactoring or changes that need to be finished
|
||||
3. Make the remaining edits that a human programmer would logically make next
|
||||
4. Apply systematic changes consistently across the entire codebase - if you see a pattern starting, complete it everywhere.
|
||||
|
||||
Focus on:
|
||||
- Understanding the intent behind the changes (e.g., improving error handling, refactoring APIs, fixing bugs)
|
||||
- Completing any partially-applied changes across the codebase
|
||||
- Ensuring consistency with the programming style and patterns already established
|
||||
- Making edits that maintain or improve code quality
|
||||
- If the programmer started refactoring one instance of a pattern, find and update ALL similar instances
|
||||
- Don't write a lot of code if you're not sure what to do
|
||||
|
||||
Rules:
|
||||
- Do not just mechanically apply patterns - reason about what changes make sense given the context and the programmer's apparent goals.
|
||||
- Do not just fix syntax errors - look for the broader refactoring pattern and apply it systematically throughout the code.
|
||||
- Write the edits in the unified diff format as shown in the example.
|
||||
|
||||
# Example output:
|
||||
|
||||
```
|
||||
--- a/distill-claude/tmp-outs/edits_history.txt
|
||||
+++ b/distill-claude/tmp-outs/edits_history.txt
|
||||
@@ -1,3 +1,3 @@
|
||||
-
|
||||
-
|
||||
-import sys
|
||||
+import json
|
||||
```
|
||||
"#};
|
||||
|
||||
pub struct PlannedPrompt<'a> {
|
||||
request: &'a predict_edits_v3::PredictEditsRequest,
|
||||
/// Snippets to include in the prompt. These may overlap - they are merged / deduplicated in
|
||||
@@ -93,7 +55,6 @@ pub fn system_prompt(format: PromptFormat) -> &'static str {
|
||||
match format {
|
||||
PromptFormat::MarkedExcerpt => MARKED_EXCERPT_SYSTEM_PROMPT,
|
||||
PromptFormat::LabeledSections => LABELED_SECTIONS_SYSTEM_PROMPT,
|
||||
PromptFormat::NumberedLines => NUMBERED_LINES_SYSTEM_PROMPT,
|
||||
// only intended for use via zeta_cli
|
||||
PromptFormat::OnlySnippets => "",
|
||||
}
|
||||
@@ -102,7 +63,7 @@ pub fn system_prompt(format: PromptFormat) -> &'static str {
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct PlannedSnippet<'a> {
|
||||
path: Arc<Path>,
|
||||
range: Range<Line>,
|
||||
range: Range<usize>,
|
||||
text: &'a str,
|
||||
// TODO: Indicate this in the output
|
||||
#[allow(dead_code)]
|
||||
@@ -118,7 +79,7 @@ pub enum DeclarationStyle {
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct SectionLabels {
|
||||
pub excerpt_index: usize,
|
||||
pub section_ranges: Vec<(Arc<Path>, Range<Line>)>,
|
||||
pub section_ranges: Vec<(Arc<Path>, Range<usize>)>,
|
||||
}
|
||||
|
||||
impl<'a> PlannedPrompt<'a> {
|
||||
@@ -235,24 +196,10 @@ impl<'a> PlannedPrompt<'a> {
|
||||
declaration.text.len()
|
||||
));
|
||||
};
|
||||
let signature_start_line = declaration.range.start
|
||||
+ Line(
|
||||
declaration.text[..declaration.signature_range.start]
|
||||
.lines()
|
||||
.count() as u32,
|
||||
);
|
||||
let signature_end_line = signature_start_line
|
||||
+ Line(
|
||||
declaration.text
|
||||
[declaration.signature_range.start..declaration.signature_range.end]
|
||||
.lines()
|
||||
.count() as u32,
|
||||
);
|
||||
let range = signature_start_line..signature_end_line;
|
||||
|
||||
PlannedSnippet {
|
||||
path: declaration.path.clone(),
|
||||
range,
|
||||
range: (declaration.signature_range.start + declaration.range.start)
|
||||
..(declaration.signature_range.end + declaration.range.start),
|
||||
text,
|
||||
text_is_truncated: declaration.text_is_truncated,
|
||||
}
|
||||
@@ -371,7 +318,7 @@ impl<'a> PlannedPrompt<'a> {
|
||||
}
|
||||
let excerpt_snippet = PlannedSnippet {
|
||||
path: self.request.excerpt_path.clone(),
|
||||
range: self.request.excerpt_line_range.clone(),
|
||||
range: self.request.excerpt_range.clone(),
|
||||
text: &self.request.excerpt,
|
||||
text_is_truncated: false,
|
||||
};
|
||||
@@ -381,33 +328,32 @@ impl<'a> PlannedPrompt<'a> {
|
||||
let mut excerpt_file_insertions = match self.request.prompt_format {
|
||||
PromptFormat::MarkedExcerpt => vec![
|
||||
(
|
||||
Point {
|
||||
line: self.request.excerpt_line_range.start,
|
||||
column: 0,
|
||||
},
|
||||
self.request.excerpt_range.start,
|
||||
EDITABLE_REGION_START_MARKER_WITH_NEWLINE,
|
||||
),
|
||||
(self.request.cursor_point, CURSOR_MARKER),
|
||||
(
|
||||
Point {
|
||||
line: self.request.excerpt_line_range.end,
|
||||
column: 0,
|
||||
},
|
||||
self.request.excerpt_range.start + self.request.cursor_offset,
|
||||
CURSOR_MARKER,
|
||||
),
|
||||
(
|
||||
self.request
|
||||
.excerpt_range
|
||||
.end
|
||||
.saturating_sub(0)
|
||||
.max(self.request.excerpt_range.start),
|
||||
EDITABLE_REGION_END_MARKER_WITH_NEWLINE,
|
||||
),
|
||||
],
|
||||
PromptFormat::LabeledSections => vec![(self.request.cursor_point, CURSOR_MARKER)],
|
||||
PromptFormat::NumberedLines => vec![(self.request.cursor_point, CURSOR_MARKER)],
|
||||
PromptFormat::LabeledSections => vec![(
|
||||
self.request.excerpt_range.start + self.request.cursor_offset,
|
||||
CURSOR_MARKER,
|
||||
)],
|
||||
PromptFormat::OnlySnippets => vec![],
|
||||
};
|
||||
|
||||
let mut prompt = String::new();
|
||||
prompt.push_str("## User Edits\n\n");
|
||||
if self.request.events.is_empty() {
|
||||
prompt.push_str("No edits yet.\n");
|
||||
} else {
|
||||
Self::push_events(&mut prompt, &self.request.events);
|
||||
}
|
||||
Self::push_events(&mut prompt, &self.request.events);
|
||||
|
||||
prompt.push_str("\n## Code\n\n");
|
||||
let section_labels =
|
||||
@@ -445,17 +391,13 @@ impl<'a> PlannedPrompt<'a> {
|
||||
if *predicted {
|
||||
writeln!(
|
||||
output,
|
||||
"User accepted prediction {:?}:\n`````diff\n{}\n`````\n",
|
||||
"User accepted prediction {:?}:\n```diff\n{}\n```\n",
|
||||
path, diff
|
||||
)
|
||||
.unwrap();
|
||||
} else {
|
||||
writeln!(
|
||||
output,
|
||||
"User edited {:?}:\n`````diff\n{}\n`````\n",
|
||||
path, diff
|
||||
)
|
||||
.unwrap();
|
||||
writeln!(output, "User edited {:?}:\n```diff\n{}\n```\n", path, diff)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -465,7 +407,7 @@ impl<'a> PlannedPrompt<'a> {
|
||||
fn push_file_snippets(
|
||||
&self,
|
||||
output: &mut String,
|
||||
excerpt_file_insertions: &mut Vec<(Point, &'static str)>,
|
||||
excerpt_file_insertions: &mut Vec<(usize, &'static str)>,
|
||||
file_snippets: Vec<(&'a Path, Vec<&'a PlannedSnippet>, bool)>,
|
||||
) -> Result<SectionLabels> {
|
||||
let mut section_ranges = Vec::new();
|
||||
@@ -475,13 +417,15 @@ impl<'a> PlannedPrompt<'a> {
|
||||
snippets.sort_by_key(|s| (s.range.start, Reverse(s.range.end)));
|
||||
|
||||
// TODO: What if the snippets get expanded too large to be editable?
|
||||
let mut current_snippet: Option<(&PlannedSnippet, Range<Line>)> = None;
|
||||
let mut disjoint_snippets: Vec<(&PlannedSnippet, Range<Line>)> = Vec::new();
|
||||
let mut current_snippet: Option<(&PlannedSnippet, Range<usize>)> = None;
|
||||
let mut disjoint_snippets: Vec<(&PlannedSnippet, Range<usize>)> = Vec::new();
|
||||
for snippet in snippets {
|
||||
if let Some((_, current_snippet_range)) = current_snippet.as_mut()
|
||||
&& snippet.range.start <= current_snippet_range.end
|
||||
&& snippet.range.start < current_snippet_range.end
|
||||
{
|
||||
current_snippet_range.end = current_snippet_range.end.max(snippet.range.end);
|
||||
if snippet.range.end > current_snippet_range.end {
|
||||
current_snippet_range.end = snippet.range.end;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
if let Some(current_snippet) = current_snippet.take() {
|
||||
@@ -493,24 +437,21 @@ impl<'a> PlannedPrompt<'a> {
|
||||
disjoint_snippets.push(current_snippet);
|
||||
}
|
||||
|
||||
// TODO: remove filename=?
|
||||
writeln!(output, "`````filename={}", file_path.display()).ok();
|
||||
writeln!(output, "```{}", file_path.display()).ok();
|
||||
let mut skipped_last_snippet = false;
|
||||
for (snippet, range) in disjoint_snippets {
|
||||
let section_index = section_ranges.len();
|
||||
|
||||
match self.request.prompt_format {
|
||||
PromptFormat::MarkedExcerpt
|
||||
| PromptFormat::OnlySnippets
|
||||
| PromptFormat::NumberedLines => {
|
||||
if range.start.0 > 0 && !skipped_last_snippet {
|
||||
PromptFormat::MarkedExcerpt | PromptFormat::OnlySnippets => {
|
||||
if range.start > 0 && !skipped_last_snippet {
|
||||
output.push_str("…\n");
|
||||
}
|
||||
}
|
||||
PromptFormat::LabeledSections => {
|
||||
if is_excerpt_file
|
||||
&& range.start <= self.request.excerpt_line_range.start
|
||||
&& range.end >= self.request.excerpt_line_range.end
|
||||
&& range.start <= self.request.excerpt_range.start
|
||||
&& range.end >= self.request.excerpt_range.end
|
||||
{
|
||||
writeln!(output, "<|current_section|>").ok();
|
||||
} else {
|
||||
@@ -519,83 +460,46 @@ impl<'a> PlannedPrompt<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
let push_full_snippet = |output: &mut String| {
|
||||
if self.request.prompt_format == PromptFormat::NumberedLines {
|
||||
for (i, line) in snippet.text.lines().enumerate() {
|
||||
writeln!(output, "{}|{}", i as u32 + range.start.0 + 1, line)?;
|
||||
}
|
||||
} else {
|
||||
output.push_str(&snippet.text);
|
||||
}
|
||||
anyhow::Ok(())
|
||||
};
|
||||
|
||||
if is_excerpt_file {
|
||||
if self.request.prompt_format == PromptFormat::OnlySnippets {
|
||||
if range.start >= self.request.excerpt_line_range.start
|
||||
&& range.end <= self.request.excerpt_line_range.end
|
||||
if range.start >= self.request.excerpt_range.start
|
||||
&& range.end <= self.request.excerpt_range.end
|
||||
{
|
||||
skipped_last_snippet = true;
|
||||
} else {
|
||||
skipped_last_snippet = false;
|
||||
output.push_str(snippet.text);
|
||||
}
|
||||
} else if !excerpt_file_insertions.is_empty() {
|
||||
let lines = snippet.text.lines().collect::<Vec<_>>();
|
||||
let push_line = |output: &mut String, line_ix: usize| {
|
||||
if self.request.prompt_format == PromptFormat::NumberedLines {
|
||||
write!(output, "{}|", line_ix as u32 + range.start.0 + 1)?;
|
||||
}
|
||||
anyhow::Ok(writeln!(output, "{}", lines[line_ix])?)
|
||||
};
|
||||
let mut last_line_ix = 0;
|
||||
let mut insertion_ix = 0;
|
||||
while insertion_ix < excerpt_file_insertions.len() {
|
||||
let (point, insertion) = &excerpt_file_insertions[insertion_ix];
|
||||
let found = point.line >= range.start && point.line <= range.end;
|
||||
} else {
|
||||
let mut last_offset = range.start;
|
||||
let mut i = 0;
|
||||
while i < excerpt_file_insertions.len() {
|
||||
let (offset, insertion) = &excerpt_file_insertions[i];
|
||||
let found = *offset >= range.start && *offset <= range.end;
|
||||
if found {
|
||||
excerpt_index = Some(section_index);
|
||||
let insertion_line_ix = (point.line.0 - range.start.0) as usize;
|
||||
for line_ix in last_line_ix..insertion_line_ix {
|
||||
push_line(output, line_ix)?;
|
||||
}
|
||||
if let Some(next_line) = lines.get(insertion_line_ix) {
|
||||
if self.request.prompt_format == PromptFormat::NumberedLines {
|
||||
write!(
|
||||
output,
|
||||
"{}|",
|
||||
insertion_line_ix as u32 + range.start.0 + 1
|
||||
)?
|
||||
}
|
||||
output.push_str(&next_line[..point.column as usize]);
|
||||
output.push_str(insertion);
|
||||
writeln!(output, "{}", &next_line[point.column as usize..])?;
|
||||
} else {
|
||||
writeln!(output, "{}", insertion)?;
|
||||
}
|
||||
last_line_ix = insertion_line_ix + 1;
|
||||
excerpt_file_insertions.remove(insertion_ix);
|
||||
output.push_str(
|
||||
&snippet.text[last_offset - range.start..offset - range.start],
|
||||
);
|
||||
output.push_str(insertion);
|
||||
last_offset = *offset;
|
||||
excerpt_file_insertions.remove(i);
|
||||
continue;
|
||||
}
|
||||
insertion_ix += 1;
|
||||
i += 1;
|
||||
}
|
||||
skipped_last_snippet = false;
|
||||
for line_ix in last_line_ix..lines.len() {
|
||||
push_line(output, line_ix)?;
|
||||
}
|
||||
} else {
|
||||
skipped_last_snippet = false;
|
||||
push_full_snippet(output)?;
|
||||
output.push_str(&snippet.text[last_offset - range.start..]);
|
||||
}
|
||||
} else {
|
||||
skipped_last_snippet = false;
|
||||
push_full_snippet(output)?;
|
||||
output.push_str(snippet.text);
|
||||
}
|
||||
|
||||
section_ranges.push((snippet.path.clone(), range));
|
||||
}
|
||||
|
||||
output.push_str("`````\n\n");
|
||||
output.push_str("```\n\n");
|
||||
}
|
||||
|
||||
Ok(SectionLabels {
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
[package]
|
||||
name = "codestral"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lib]
|
||||
path = "src/codestral.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
edit_prediction.workspace = true
|
||||
edit_prediction_context.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
http_client.workspace = true
|
||||
language.workspace = true
|
||||
language_models.workspace = true
|
||||
log.workspace = true
|
||||
mistral.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
smol.workspace = true
|
||||
text.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
@@ -1 +0,0 @@
|
||||
../../LICENSE-GPL
|
||||
@@ -1,381 +0,0 @@
|
||||
use anyhow::{Context as _, Result};
|
||||
use edit_prediction::{Direction, EditPrediction, EditPredictionProvider};
|
||||
use edit_prediction_context::{EditPredictionExcerpt, EditPredictionExcerptOptions};
|
||||
use futures::AsyncReadExt;
|
||||
use gpui::{App, Context, Entity, Task};
|
||||
use http_client::HttpClient;
|
||||
use language::{
|
||||
language_settings::all_language_settings, Anchor, Buffer, BufferSnapshot, EditPreview, ToPoint,
|
||||
};
|
||||
use language_models::MistralLanguageModelProvider;
|
||||
use mistral::CODESTRAL_API_URL;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
ops::Range,
|
||||
sync::Arc,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use text::ToOffset;
|
||||
|
||||
pub const DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(150);
|
||||
|
||||
const EXCERPT_OPTIONS: EditPredictionExcerptOptions = EditPredictionExcerptOptions {
|
||||
max_bytes: 1050,
|
||||
min_bytes: 525,
|
||||
target_before_cursor_over_total_bytes: 0.66,
|
||||
};
|
||||
|
||||
/// Represents a completion that has been received and processed from Codestral.
|
||||
/// This struct maintains the state needed to interpolate the completion as the user types.
|
||||
#[derive(Clone)]
|
||||
struct CurrentCompletion {
|
||||
/// The buffer snapshot at the time the completion was generated.
|
||||
/// Used to detect changes and interpolate edits.
|
||||
snapshot: BufferSnapshot,
|
||||
/// The edits that should be applied to transform the original text into the predicted text.
|
||||
/// Each edit is a range in the buffer and the text to replace it with.
|
||||
edits: Arc<[(Range<Anchor>, String)]>,
|
||||
/// Preview of how the buffer will look after applying the edits.
|
||||
edit_preview: EditPreview,
|
||||
}
|
||||
|
||||
impl CurrentCompletion {
|
||||
/// Attempts to adjust the edits based on changes made to the buffer since the completion was generated.
|
||||
/// Returns None if the user's edits conflict with the predicted edits.
|
||||
fn interpolate(&self, new_snapshot: &BufferSnapshot) -> Option<Vec<(Range<Anchor>, String)>> {
|
||||
edit_prediction::interpolate_edits(&self.snapshot, new_snapshot, &self.edits)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CodestralCompletionProvider {
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
pending_request: Option<Task<Result<()>>>,
|
||||
current_completion: Option<CurrentCompletion>,
|
||||
}
|
||||
|
||||
impl CodestralCompletionProvider {
|
||||
pub fn new(http_client: Arc<dyn HttpClient>) -> Self {
|
||||
Self {
|
||||
http_client,
|
||||
pending_request: None,
|
||||
current_completion: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_api_key(cx: &App) -> bool {
|
||||
Self::api_key(cx).is_some()
|
||||
}
|
||||
|
||||
fn api_key(cx: &App) -> Option<Arc<str>> {
|
||||
MistralLanguageModelProvider::try_global(cx)
|
||||
.and_then(|provider| provider.codestral_api_key(CODESTRAL_API_URL, cx))
|
||||
}
|
||||
|
||||
/// Uses Codestral's Fill-in-the-Middle API for code completion.
|
||||
async fn fetch_completion(
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
api_key: &str,
|
||||
prompt: String,
|
||||
suffix: String,
|
||||
model: String,
|
||||
max_tokens: Option<u32>,
|
||||
) -> Result<String> {
|
||||
let start_time = Instant::now();
|
||||
|
||||
log::debug!(
|
||||
"Codestral: Requesting completion (model: {}, max_tokens: {:?})",
|
||||
model,
|
||||
max_tokens
|
||||
);
|
||||
|
||||
let request = CodestralRequest {
|
||||
model,
|
||||
prompt,
|
||||
suffix: if suffix.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(suffix)
|
||||
},
|
||||
max_tokens: max_tokens.or(Some(350)),
|
||||
temperature: Some(0.2),
|
||||
top_p: Some(1.0),
|
||||
stream: Some(false),
|
||||
stop: None,
|
||||
random_seed: None,
|
||||
min_tokens: None,
|
||||
};
|
||||
|
||||
let request_body = serde_json::to_string(&request)?;
|
||||
|
||||
log::debug!("Codestral: Sending FIM request");
|
||||
|
||||
let http_request = http_client::Request::builder()
|
||||
.method(http_client::Method::POST)
|
||||
.uri(format!("{}/v1/fim/completions", CODESTRAL_API_URL))
|
||||
.header("Content-Type", "application/json")
|
||||
.header("Authorization", format!("Bearer {}", api_key))
|
||||
.body(http_client::AsyncBody::from(request_body))?;
|
||||
|
||||
let mut response = http_client.send(http_request).await?;
|
||||
let status = response.status();
|
||||
|
||||
log::debug!("Codestral: Response status: {}", status);
|
||||
|
||||
if !status.is_success() {
|
||||
let mut body = String::new();
|
||||
response.body_mut().read_to_string(&mut body).await?;
|
||||
return Err(anyhow::anyhow!(
|
||||
"Codestral API error: {} - {}",
|
||||
status,
|
||||
body
|
||||
));
|
||||
}
|
||||
|
||||
let mut body = String::new();
|
||||
response.body_mut().read_to_string(&mut body).await?;
|
||||
|
||||
let codestral_response: CodestralResponse = serde_json::from_str(&body)?;
|
||||
|
||||
let elapsed = start_time.elapsed();
|
||||
|
||||
if let Some(choice) = codestral_response.choices.first() {
|
||||
let completion = &choice.message.content;
|
||||
|
||||
log::debug!(
|
||||
"Codestral: Completion received ({} tokens, {:.2}s)",
|
||||
codestral_response.usage.completion_tokens,
|
||||
elapsed.as_secs_f64()
|
||||
);
|
||||
|
||||
// Return just the completion text for insertion at cursor
|
||||
Ok(completion.clone())
|
||||
} else {
|
||||
log::error!("Codestral: No completion returned in response");
|
||||
Err(anyhow::anyhow!("No completion returned from Codestral"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EditPredictionProvider for CodestralCompletionProvider {
|
||||
fn name() -> &'static str {
|
||||
"codestral"
|
||||
}
|
||||
|
||||
fn display_name() -> &'static str {
|
||||
"Codestral"
|
||||
}
|
||||
|
||||
fn show_completions_in_menu() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn is_enabled(&self, _buffer: &Entity<Buffer>, _cursor_position: Anchor, cx: &App) -> bool {
|
||||
Self::api_key(cx).is_some()
|
||||
}
|
||||
|
||||
fn is_refreshing(&self) -> bool {
|
||||
self.pending_request.is_some()
|
||||
}
|
||||
|
||||
fn refresh(
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
cursor_position: language::Anchor,
|
||||
debounce: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
log::debug!("Codestral: Refresh called (debounce: {})", debounce);
|
||||
|
||||
let Some(api_key) = Self::api_key(cx) else {
|
||||
log::warn!("Codestral: No API key configured, skipping refresh");
|
||||
return;
|
||||
};
|
||||
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
|
||||
// Check if current completion is still valid
|
||||
if let Some(current_completion) = self.current_completion.as_ref() {
|
||||
if current_completion.interpolate(&snapshot).is_some() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let http_client = self.http_client.clone();
|
||||
|
||||
// Get settings
|
||||
let settings = all_language_settings(None, cx);
|
||||
let model = settings
|
||||
.edit_predictions
|
||||
.codestral
|
||||
.model
|
||||
.clone()
|
||||
.unwrap_or_else(|| "codestral-latest".to_string());
|
||||
let max_tokens = settings.edit_predictions.codestral.max_tokens;
|
||||
|
||||
self.pending_request = Some(cx.spawn(async move |this, cx| {
|
||||
if debounce {
|
||||
log::debug!("Codestral: Debouncing for {:?}", DEBOUNCE_TIMEOUT);
|
||||
smol::Timer::after(DEBOUNCE_TIMEOUT).await;
|
||||
}
|
||||
|
||||
let cursor_offset = cursor_position.to_offset(&snapshot);
|
||||
let cursor_point = cursor_offset.to_point(&snapshot);
|
||||
let excerpt = EditPredictionExcerpt::select_from_buffer(
|
||||
cursor_point,
|
||||
&snapshot,
|
||||
&EXCERPT_OPTIONS,
|
||||
None,
|
||||
)
|
||||
.context("Line containing cursor doesn't fit in excerpt max bytes")?;
|
||||
|
||||
let excerpt_text = excerpt.text(&snapshot);
|
||||
let cursor_within_excerpt = cursor_offset
|
||||
.saturating_sub(excerpt.range.start)
|
||||
.min(excerpt_text.body.len());
|
||||
let prompt = excerpt_text.body[..cursor_within_excerpt].to_string();
|
||||
let suffix = excerpt_text.body[cursor_within_excerpt..].to_string();
|
||||
|
||||
let completion_text = match Self::fetch_completion(
|
||||
http_client,
|
||||
&api_key,
|
||||
prompt,
|
||||
suffix,
|
||||
model,
|
||||
max_tokens,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(completion) => completion,
|
||||
Err(e) => {
|
||||
log::error!("Codestral: Failed to fetch completion: {}", e);
|
||||
this.update(cx, |this, cx| {
|
||||
this.pending_request = None;
|
||||
cx.notify();
|
||||
})?;
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
|
||||
if completion_text.trim().is_empty() {
|
||||
log::debug!("Codestral: Completion was empty after trimming; ignoring");
|
||||
this.update(cx, |this, cx| {
|
||||
this.pending_request = None;
|
||||
cx.notify();
|
||||
})?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let edits: Arc<[(Range<Anchor>, String)]> =
|
||||
vec![(cursor_position..cursor_position, completion_text)].into();
|
||||
let edit_preview = buffer
|
||||
.read_with(cx, |buffer, cx| buffer.preview_edits(edits.clone(), cx))?
|
||||
.await;
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
this.current_completion = Some(CurrentCompletion {
|
||||
snapshot,
|
||||
edits,
|
||||
edit_preview,
|
||||
});
|
||||
this.pending_request = None;
|
||||
cx.notify();
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}));
|
||||
}
|
||||
|
||||
fn cycle(
|
||||
&mut self,
|
||||
_buffer: Entity<Buffer>,
|
||||
_cursor_position: Anchor,
|
||||
_direction: Direction,
|
||||
_cx: &mut Context<Self>,
|
||||
) {
|
||||
// Codestral doesn't support multiple completions, so cycling does nothing
|
||||
}
|
||||
|
||||
fn accept(&mut self, _cx: &mut Context<Self>) {
|
||||
log::debug!("Codestral: Completion accepted");
|
||||
self.pending_request = None;
|
||||
self.current_completion = None;
|
||||
}
|
||||
|
||||
fn discard(&mut self, _cx: &mut Context<Self>) {
|
||||
log::debug!("Codestral: Completion discarded");
|
||||
self.pending_request = None;
|
||||
self.current_completion = None;
|
||||
}
|
||||
|
||||
/// Returns the completion suggestion, adjusted or invalidated based on user edits
|
||||
fn suggest(
|
||||
&mut self,
|
||||
buffer: &Entity<Buffer>,
|
||||
_cursor_position: Anchor,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<EditPrediction> {
|
||||
let current_completion = self.current_completion.as_ref()?;
|
||||
let buffer = buffer.read(cx);
|
||||
let edits = current_completion.interpolate(&buffer.snapshot())?;
|
||||
if edits.is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some(EditPrediction::Local {
|
||||
id: None,
|
||||
edits,
|
||||
edit_preview: Some(current_completion.edit_preview.clone()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct CodestralRequest {
|
||||
pub model: String,
|
||||
pub prompt: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub suffix: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub max_tokens: Option<u32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub temperature: Option<f32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub top_p: Option<f32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub stream: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub stop: Option<Vec<String>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub random_seed: Option<u32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub min_tokens: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct CodestralResponse {
|
||||
pub id: String,
|
||||
pub object: String,
|
||||
pub model: String,
|
||||
pub usage: Usage,
|
||||
pub created: u64,
|
||||
pub choices: Vec<Choice>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Usage {
|
||||
pub prompt_tokens: u32,
|
||||
pub completion_tokens: u32,
|
||||
pub total_tokens: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Choice {
|
||||
pub index: u32,
|
||||
pub message: Message,
|
||||
pub finish_reason: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Message {
|
||||
pub content: String,
|
||||
pub role: String,
|
||||
}
|
||||
@@ -97,7 +97,6 @@ CREATE TABLE "worktree_entries" (
|
||||
"is_external" BOOL NOT NULL,
|
||||
"is_ignored" BOOL NOT NULL,
|
||||
"is_deleted" BOOL NOT NULL,
|
||||
"is_hidden" BOOL NOT NULL,
|
||||
"git_status" INTEGER,
|
||||
"is_fifo" BOOL NOT NULL,
|
||||
PRIMARY KEY (project_id, worktree_id, id),
|
||||
|
||||
@@ -1,2 +0,0 @@
|
||||
ALTER TABLE "worktree_entries"
|
||||
ADD "is_hidden" BOOL NOT NULL DEFAULT FALSE;
|
||||
@@ -282,7 +282,6 @@ impl Database {
|
||||
git_status: ActiveValue::set(None),
|
||||
is_external: ActiveValue::set(entry.is_external),
|
||||
is_deleted: ActiveValue::set(false),
|
||||
is_hidden: ActiveValue::set(entry.is_hidden),
|
||||
scan_id: ActiveValue::set(update.scan_id as i64),
|
||||
is_fifo: ActiveValue::set(entry.is_fifo),
|
||||
}
|
||||
@@ -301,7 +300,6 @@ impl Database {
|
||||
worktree_entry::Column::MtimeNanos,
|
||||
worktree_entry::Column::CanonicalPath,
|
||||
worktree_entry::Column::IsIgnored,
|
||||
worktree_entry::Column::IsHidden,
|
||||
worktree_entry::Column::ScanId,
|
||||
])
|
||||
.to_owned(),
|
||||
@@ -907,7 +905,6 @@ impl Database {
|
||||
canonical_path: db_entry.canonical_path,
|
||||
is_ignored: db_entry.is_ignored,
|
||||
is_external: db_entry.is_external,
|
||||
is_hidden: db_entry.is_hidden,
|
||||
// This is only used in the summarization backlog, so if it's None,
|
||||
// that just means we won't be able to detect when to resummarize
|
||||
// based on total number of backlogged bytes - instead, we'd go
|
||||
|
||||
@@ -671,7 +671,6 @@ impl Database {
|
||||
canonical_path: db_entry.canonical_path,
|
||||
is_ignored: db_entry.is_ignored,
|
||||
is_external: db_entry.is_external,
|
||||
is_hidden: db_entry.is_hidden,
|
||||
// This is only used in the summarization backlog, so if it's None,
|
||||
// that just means we won't be able to detect when to resummarize
|
||||
// based on total number of backlogged bytes - instead, we'd go
|
||||
|
||||
@@ -19,7 +19,6 @@ pub struct Model {
|
||||
pub is_ignored: bool,
|
||||
pub is_external: bool,
|
||||
pub is_deleted: bool,
|
||||
pub is_hidden: bool,
|
||||
pub scan_id: i64,
|
||||
pub is_fifo: bool,
|
||||
pub canonical_path: Option<String>,
|
||||
|
||||
@@ -30,9 +30,9 @@ impl fmt::Display for ZedVersion {
|
||||
|
||||
impl ZedVersion {
|
||||
pub fn can_collaborate(&self) -> bool {
|
||||
// v0.204.1 was the first version after the auto-update bug.
|
||||
// We reject any clients older than that to hope we can persuade them to upgrade.
|
||||
if self.0 < SemanticVersion::new(0, 204, 1) {
|
||||
// v0.198.4 is the first version where we no longer connect to Collab automatically.
|
||||
// We reject any clients older than that to prevent them from connecting to Collab just for authentication.
|
||||
if self.0 < SemanticVersion::new(0, 198, 4) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,7 @@ use crate::{
|
||||
};
|
||||
use call::ActiveCall;
|
||||
use editor::{
|
||||
DocumentColorsRenderMode, Editor, FETCH_COLORS_DEBOUNCE_TIMEOUT, RowInfo, SelectionEffects,
|
||||
DocumentColorsRenderMode, Editor, RowInfo, SelectionEffects,
|
||||
actions::{
|
||||
ConfirmCodeAction, ConfirmCompletion, ConfirmRename, ContextMenuFirst,
|
||||
ExpandMacroRecursively, MoveToEnd, Redo, Rename, SelectAll, ToggleCodeActions, Undo,
|
||||
@@ -1272,7 +1272,7 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes
|
||||
fake_language_server.start_progress("the-token").await;
|
||||
|
||||
executor.advance_clock(SERVER_PROGRESS_THROTTLE_TIMEOUT);
|
||||
fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
token: lsp::NumberOrString::String("the-token".to_string()),
|
||||
value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::Report(
|
||||
lsp::WorkDoneProgressReport {
|
||||
@@ -1306,7 +1306,7 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes
|
||||
});
|
||||
|
||||
executor.advance_clock(SERVER_PROGRESS_THROTTLE_TIMEOUT);
|
||||
fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
token: lsp::NumberOrString::String("the-token".to_string()),
|
||||
value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::Report(
|
||||
lsp::WorkDoneProgressReport {
|
||||
@@ -2409,7 +2409,6 @@ async fn test_lsp_document_color(cx_a: &mut TestAppContext, cx_b: &mut TestAppCo
|
||||
.unwrap();
|
||||
|
||||
color_request_handle.next().await.unwrap();
|
||||
executor.advance_clock(FETCH_COLORS_DEBOUNCE_TIMEOUT);
|
||||
executor.run_until_parked();
|
||||
|
||||
assert_eq!(
|
||||
@@ -2849,7 +2848,7 @@ async fn test_lsp_pull_diagnostics(
|
||||
});
|
||||
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
lsp::PublishDiagnosticsParams {
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
range: lsp::Range {
|
||||
@@ -2870,7 +2869,7 @@ async fn test_lsp_pull_diagnostics(
|
||||
},
|
||||
);
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
lsp::PublishDiagnosticsParams {
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap(),
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
range: lsp::Range {
|
||||
@@ -2892,7 +2891,7 @@ async fn test_lsp_pull_diagnostics(
|
||||
);
|
||||
|
||||
if should_stream_workspace_diagnostic {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
token: expected_workspace_diagnostic_token.clone(),
|
||||
value: lsp::ProgressParamsValue::WorkspaceDiagnostic(
|
||||
lsp::WorkspaceDiagnosticReportResult::Report(lsp::WorkspaceDiagnosticReport {
|
||||
@@ -3074,7 +3073,7 @@ async fn test_lsp_pull_diagnostics(
|
||||
});
|
||||
|
||||
if should_stream_workspace_diagnostic {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
token: expected_workspace_diagnostic_token.clone(),
|
||||
value: lsp::ProgressParamsValue::WorkspaceDiagnostic(
|
||||
lsp::WorkspaceDiagnosticReportResult::Report(lsp::WorkspaceDiagnosticReport {
|
||||
|
||||
@@ -25,7 +25,7 @@ use gpui::{
|
||||
use language::{
|
||||
Diagnostic, DiagnosticEntry, DiagnosticSourceKind, FakeLspAdapter, Language, LanguageConfig,
|
||||
LanguageMatcher, LineEnding, OffsetRangeExt, Point, Rope,
|
||||
language_settings::{Formatter, FormatterList},
|
||||
language_settings::{Formatter, FormatterList, SelectedFormatter},
|
||||
tree_sitter_rust, tree_sitter_typescript,
|
||||
};
|
||||
use lsp::{LanguageServerId, OneOf};
|
||||
@@ -39,7 +39,7 @@ use project::{
|
||||
use prompt_store::PromptBuilder;
|
||||
use rand::prelude::*;
|
||||
use serde_json::json;
|
||||
use settings::{LanguageServerFormatterSpecifier, PrettierSettingsContent, SettingsStore};
|
||||
use settings::{PrettierSettingsContent, SettingsStore};
|
||||
use std::{
|
||||
cell::{Cell, RefCell},
|
||||
env, future, mem,
|
||||
@@ -4077,7 +4077,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
.receive_notification::<lsp::notification::DidOpenTextDocument>()
|
||||
.await;
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
lsp::PublishDiagnosticsParams {
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(),
|
||||
version: None,
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
@@ -4097,7 +4097,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
.await
|
||||
.unwrap();
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
lsp::PublishDiagnosticsParams {
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(),
|
||||
version: None,
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
@@ -4171,7 +4171,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
|
||||
// Simulate a language server reporting more errors for a file.
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
lsp::PublishDiagnosticsParams {
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(),
|
||||
version: None,
|
||||
diagnostics: vec![
|
||||
@@ -4269,7 +4269,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
|
||||
// Simulate a language server reporting no errors for a file.
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
lsp::PublishDiagnosticsParams {
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(),
|
||||
version: None,
|
||||
diagnostics: Vec::new(),
|
||||
@@ -4365,7 +4365,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
|
||||
.await
|
||||
.into_response()
|
||||
.unwrap();
|
||||
fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
token: lsp::NumberOrString::String("the-disk-based-token".to_string()),
|
||||
value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::Begin(
|
||||
lsp::WorkDoneProgressBegin {
|
||||
@@ -4376,7 +4376,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
|
||||
});
|
||||
for file_name in file_names {
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
lsp::PublishDiagnosticsParams {
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(Path::new(path!("/test")).join(file_name)).unwrap(),
|
||||
version: None,
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
@@ -4389,7 +4389,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
|
||||
},
|
||||
);
|
||||
}
|
||||
fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
token: lsp::NumberOrString::String("the-disk-based-token".to_string()),
|
||||
value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::End(
|
||||
lsp::WorkDoneProgressEnd { message: None },
|
||||
@@ -4610,13 +4610,14 @@ async fn test_formatting_buffer(
|
||||
cx_a.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings(cx, |file| {
|
||||
file.project.all_languages.defaults.formatter =
|
||||
Some(FormatterList::Single(Formatter::External {
|
||||
file.project.all_languages.defaults.formatter = Some(SelectedFormatter::List(
|
||||
FormatterList::Single(Formatter::External {
|
||||
command: "awk".into(),
|
||||
arguments: Some(
|
||||
vec!["{sub(/two/,\"{buffer_path}\")}1".to_string()].into(),
|
||||
),
|
||||
}));
|
||||
}),
|
||||
));
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -4707,7 +4708,7 @@ async fn test_prettier_formatting_buffer(
|
||||
cx_a.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings(cx, |file| {
|
||||
file.project.all_languages.defaults.formatter = Some(FormatterList::default());
|
||||
file.project.all_languages.defaults.formatter = Some(SelectedFormatter::Auto);
|
||||
file.project.all_languages.defaults.prettier = Some(PrettierSettingsContent {
|
||||
allowed: Some(true),
|
||||
..Default::default()
|
||||
@@ -4718,8 +4719,8 @@ async fn test_prettier_formatting_buffer(
|
||||
cx_b.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings(cx, |file| {
|
||||
file.project.all_languages.defaults.formatter = Some(FormatterList::Single(
|
||||
Formatter::LanguageServer(LanguageServerFormatterSpecifier::Current),
|
||||
file.project.all_languages.defaults.formatter = Some(SelectedFormatter::List(
|
||||
FormatterList::Single(Formatter::LanguageServer { name: None }),
|
||||
));
|
||||
file.project.all_languages.defaults.prettier = Some(PrettierSettingsContent {
|
||||
allowed: Some(true),
|
||||
|
||||
@@ -14,7 +14,7 @@ use gpui::{
|
||||
use http_client::BlockedHttpClient;
|
||||
use language::{
|
||||
FakeLspAdapter, Language, LanguageConfig, LanguageMatcher, LanguageRegistry,
|
||||
language_settings::{Formatter, FormatterList, language_settings},
|
||||
language_settings::{Formatter, FormatterList, SelectedFormatter, language_settings},
|
||||
tree_sitter_typescript,
|
||||
};
|
||||
use node_runtime::NodeRuntime;
|
||||
@@ -27,7 +27,7 @@ use remote::RemoteClient;
|
||||
use remote_server::{HeadlessAppState, HeadlessProject};
|
||||
use rpc::proto;
|
||||
use serde_json::json;
|
||||
use settings::{LanguageServerFormatterSpecifier, PrettierSettingsContent, SettingsStore};
|
||||
use settings::{PrettierSettingsContent, SettingsStore};
|
||||
use std::{
|
||||
path::Path,
|
||||
sync::{Arc, atomic::AtomicUsize},
|
||||
@@ -491,7 +491,7 @@ async fn test_ssh_collaboration_formatting_with_prettier(
|
||||
cx_a.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings(cx, |file| {
|
||||
file.project.all_languages.defaults.formatter = Some(FormatterList::default());
|
||||
file.project.all_languages.defaults.formatter = Some(SelectedFormatter::Auto);
|
||||
file.project.all_languages.defaults.prettier = Some(PrettierSettingsContent {
|
||||
allowed: Some(true),
|
||||
..Default::default()
|
||||
@@ -502,8 +502,8 @@ async fn test_ssh_collaboration_formatting_with_prettier(
|
||||
cx_b.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings(cx, |file| {
|
||||
file.project.all_languages.defaults.formatter = Some(FormatterList::Single(
|
||||
Formatter::LanguageServer(LanguageServerFormatterSpecifier::Current),
|
||||
file.project.all_languages.defaults.formatter = Some(SelectedFormatter::List(
|
||||
FormatterList::Single(Formatter::LanguageServer { name: None }),
|
||||
));
|
||||
file.project.all_languages.defaults.prettier = Some(PrettierSettingsContent {
|
||||
allowed: Some(true),
|
||||
@@ -550,7 +550,7 @@ async fn test_ssh_collaboration_formatting_with_prettier(
|
||||
cx_a.update(|cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings(cx, |file| {
|
||||
file.project.all_languages.defaults.formatter = Some(FormatterList::default());
|
||||
file.project.all_languages.defaults.formatter = Some(SelectedFormatter::Auto);
|
||||
file.project.all_languages.defaults.prettier = Some(PrettierSettingsContent {
|
||||
allowed: Some(true),
|
||||
..Default::default()
|
||||
|
||||
@@ -2250,7 +2250,7 @@ impl CollabPanel {
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
v_flex().w_full().items_center().child(
|
||||
div().flex().w_full().items_center().child(
|
||||
Label::new("Sign in to enable collaboration.")
|
||||
.color(Color::Muted)
|
||||
.size(LabelSize::Small),
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
[package]
|
||||
name = "collections"
|
||||
name = "zed-collections"
|
||||
version = "0.1.0"
|
||||
edition.workspace = true
|
||||
publish = false
|
||||
publish = true
|
||||
license = "Apache-2.0"
|
||||
description = "Standard collection type re-exports used by Zed and GPUI"
|
||||
|
||||
|
||||
@@ -97,10 +97,11 @@ impl CommandPaletteFilter {
|
||||
pub struct CommandInterceptResult {
|
||||
/// The action produced as a result of the interception.
|
||||
pub action: Box<dyn Action>,
|
||||
/// The display string to show in the command palette for this result.
|
||||
// TODO: Document this field.
|
||||
#[allow(missing_docs)]
|
||||
pub string: String,
|
||||
/// The character positions in the string that match the query.
|
||||
/// Used for highlighting matched characters in the command palette UI.
|
||||
// TODO: Document this field.
|
||||
#[allow(missing_docs)]
|
||||
pub positions: Vec<usize>,
|
||||
}
|
||||
|
||||
|
||||
@@ -41,9 +41,12 @@ impl StdioTransport {
|
||||
command.current_dir(working_directory);
|
||||
}
|
||||
|
||||
let mut server = command
|
||||
.spawn()
|
||||
.with_context(|| format!("failed to spawn command {command:?})",))?;
|
||||
let mut server = command.spawn().with_context(|| {
|
||||
format!(
|
||||
"failed to spawn command. (path={:?}, args={:?})",
|
||||
binary.executable, &binary.args
|
||||
)
|
||||
})?;
|
||||
|
||||
let stdin = server.stdin.take().unwrap();
|
||||
let stdout = server.stdout.take().unwrap();
|
||||
|
||||
@@ -270,7 +270,7 @@ impl RegisteredBuffer {
|
||||
server
|
||||
.lsp
|
||||
.notify::<lsp::notification::DidChangeTextDocument>(
|
||||
lsp::DidChangeTextDocumentParams {
|
||||
&lsp::DidChangeTextDocumentParams {
|
||||
text_document: lsp::VersionedTextDocumentIdentifier::new(
|
||||
buffer.uri.clone(),
|
||||
buffer.snapshot_version,
|
||||
@@ -744,7 +744,7 @@ impl Copilot {
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
server
|
||||
.notify::<lsp::notification::DidOpenTextDocument>(
|
||||
lsp::DidOpenTextDocumentParams {
|
||||
&lsp::DidOpenTextDocumentParams {
|
||||
text_document: lsp::TextDocumentItem {
|
||||
uri: uri.clone(),
|
||||
language_id: language_id.clone(),
|
||||
@@ -792,14 +792,13 @@ impl Copilot {
|
||||
server
|
||||
.lsp
|
||||
.notify::<lsp::notification::DidSaveTextDocument>(
|
||||
lsp::DidSaveTextDocumentParams {
|
||||
&lsp::DidSaveTextDocumentParams {
|
||||
text_document: lsp::TextDocumentIdentifier::new(
|
||||
registered_buffer.uri.clone(),
|
||||
),
|
||||
text: None,
|
||||
},
|
||||
)
|
||||
.ok();
|
||||
)?;
|
||||
}
|
||||
language::BufferEvent::FileHandleChanged
|
||||
| language::BufferEvent::LanguageChanged => {
|
||||
@@ -815,15 +814,14 @@ impl Copilot {
|
||||
server
|
||||
.lsp
|
||||
.notify::<lsp::notification::DidCloseTextDocument>(
|
||||
lsp::DidCloseTextDocumentParams {
|
||||
&lsp::DidCloseTextDocumentParams {
|
||||
text_document: lsp::TextDocumentIdentifier::new(old_uri),
|
||||
},
|
||||
)
|
||||
.ok();
|
||||
)?;
|
||||
server
|
||||
.lsp
|
||||
.notify::<lsp::notification::DidOpenTextDocument>(
|
||||
lsp::DidOpenTextDocumentParams {
|
||||
&lsp::DidOpenTextDocumentParams {
|
||||
text_document: lsp::TextDocumentItem::new(
|
||||
registered_buffer.uri.clone(),
|
||||
registered_buffer.language_id.clone(),
|
||||
@@ -831,8 +829,7 @@ impl Copilot {
|
||||
registered_buffer.snapshot.text(),
|
||||
),
|
||||
},
|
||||
)
|
||||
.ok();
|
||||
)?;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
@@ -849,7 +846,7 @@ impl Copilot {
|
||||
server
|
||||
.lsp
|
||||
.notify::<lsp::notification::DidCloseTextDocument>(
|
||||
lsp::DidCloseTextDocumentParams {
|
||||
&lsp::DidCloseTextDocumentParams {
|
||||
text_document: lsp::TextDocumentIdentifier::new(buffer.uri),
|
||||
},
|
||||
)
|
||||
@@ -1154,12 +1151,9 @@ fn notify_did_change_config_to_server(
|
||||
}
|
||||
});
|
||||
|
||||
server
|
||||
.notify::<lsp::notification::DidChangeConfiguration>(lsp::DidChangeConfigurationParams {
|
||||
settings,
|
||||
})
|
||||
.ok();
|
||||
Ok(())
|
||||
server.notify::<lsp::notification::DidChangeConfiguration>(&lsp::DidChangeConfigurationParams {
|
||||
settings,
|
||||
})
|
||||
}
|
||||
|
||||
async fn clear_copilot_dir() {
|
||||
|
||||
@@ -92,10 +92,7 @@ pub async fn init(crash_init: InitCrashHandler) {
|
||||
#[cfg(target_os = "macos")]
|
||||
suspend_all_other_threads();
|
||||
|
||||
// on macos this "ping" is needed to ensure that all our
|
||||
// `client.send_message` calls have been processed before we trigger the
|
||||
// minidump request.
|
||||
client.ping().ok();
|
||||
client.ping().unwrap();
|
||||
client.request_dump(crash_context).is_ok()
|
||||
} else {
|
||||
true
|
||||
|
||||
@@ -46,7 +46,6 @@ pub trait DapDelegate: Send + Sync + 'static {
|
||||
async fn which(&self, command: &OsStr) -> Option<PathBuf>;
|
||||
async fn read_text_file(&self, path: &RelPath) -> Result<String>;
|
||||
async fn shell_env(&self) -> collections::HashMap<String, String>;
|
||||
fn is_headless(&self) -> bool;
|
||||
}
|
||||
|
||||
#[derive(
|
||||
|
||||
@@ -674,7 +674,13 @@ impl StdioTransport {
|
||||
command.args(&binary.arguments);
|
||||
command.envs(&binary.envs);
|
||||
|
||||
let mut process = Child::spawn(command, Stdio::piped())?;
|
||||
let mut process = Child::spawn(command, Stdio::piped()).with_context(|| {
|
||||
format!(
|
||||
"failed to spawn command `{} {}`.",
|
||||
binary_command,
|
||||
binary.arguments.join(" ")
|
||||
)
|
||||
})?;
|
||||
|
||||
let err_task = process.stderr.take().map(|stderr| {
|
||||
cx.background_spawn(TransportDelegate::handle_adapter_log(
|
||||
@@ -1052,13 +1058,11 @@ impl Child {
|
||||
#[cfg(not(windows))]
|
||||
fn spawn(mut command: std::process::Command, stdin: Stdio) -> Result<Self> {
|
||||
util::set_pre_exec_to_start_new_session(&mut command);
|
||||
let mut command = smol::process::Command::from(command);
|
||||
let process = command
|
||||
let process = smol::process::Command::from(command)
|
||||
.stdin(stdin)
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()
|
||||
.with_context(|| format!("failed to spawn command `{command:?}`",))?;
|
||||
.spawn()?;
|
||||
Ok(Self { process })
|
||||
}
|
||||
|
||||
@@ -1066,13 +1070,11 @@ impl Child {
|
||||
fn spawn(command: std::process::Command, stdin: Stdio) -> Result<Self> {
|
||||
// TODO(windows): create a job object and add the child process handle to it,
|
||||
// see https://learn.microsoft.com/en-us/windows/win32/procthread/job-objects
|
||||
let mut command = smol::process::Command::from(command);
|
||||
let process = command
|
||||
let process = smol::process::Command::from(command)
|
||||
.stdin(stdin)
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()
|
||||
.with_context(|| format!("failed to spawn command `{command:?}`",))?;
|
||||
.spawn()?;
|
||||
Ok(Self { process })
|
||||
}
|
||||
|
||||
|
||||
@@ -120,13 +120,6 @@ impl JsDebugAdapter {
|
||||
configuration
|
||||
.entry("sourceMapRenames")
|
||||
.or_insert(true.into());
|
||||
|
||||
// Set up remote browser debugging
|
||||
if delegate.is_headless() {
|
||||
configuration
|
||||
.entry("browserLaunchLocation")
|
||||
.or_insert("ui".into());
|
||||
}
|
||||
}
|
||||
|
||||
let adapter_path = if let Some(user_installed_path) = user_installed_path {
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
use crate::*;
|
||||
use anyhow::{Context as _, bail};
|
||||
use anyhow::Context as _;
|
||||
use dap::{DebugRequest, StartDebuggingRequestArguments, adapters::DebugTaskDefinition};
|
||||
use fs::RemoveOptions;
|
||||
use futures::{StreamExt, TryStreamExt};
|
||||
use gpui::http_client::AsyncBody;
|
||||
use gpui::{AsyncApp, SharedString};
|
||||
use json_dotpath::DotPaths;
|
||||
use language::{LanguageName, Toolchain};
|
||||
use language::LanguageName;
|
||||
use paths::debug_adapters_dir;
|
||||
use serde_json::Value;
|
||||
use smol::fs::File;
|
||||
@@ -20,8 +20,7 @@ use std::{
|
||||
ffi::OsStr,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use util::command::new_smol_command;
|
||||
use util::{ResultExt, paths::PathStyle, rel_path::RelPath};
|
||||
use util::{ResultExt, maybe, paths::PathStyle, rel_path::RelPath};
|
||||
|
||||
#[derive(Default)]
|
||||
pub(crate) struct PythonDebugAdapter {
|
||||
@@ -93,16 +92,12 @@ impl PythonDebugAdapter {
|
||||
})
|
||||
}
|
||||
|
||||
async fn fetch_wheel(
|
||||
&self,
|
||||
toolchain: Option<Toolchain>,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
) -> Result<Arc<Path>> {
|
||||
async fn fetch_wheel(&self, delegate: &Arc<dyn DapDelegate>) -> Result<Arc<Path>, String> {
|
||||
let download_dir = debug_adapters_dir().join(Self::ADAPTER_NAME).join("wheels");
|
||||
std::fs::create_dir_all(&download_dir)?;
|
||||
let venv_python = self.base_venv_path(toolchain, delegate).await?;
|
||||
std::fs::create_dir_all(&download_dir).map_err(|e| e.to_string())?;
|
||||
let system_python = self.base_venv_path(delegate).await?;
|
||||
|
||||
let installation_succeeded = util::command::new_smol_command(venv_python.as_ref())
|
||||
let installation_succeeded = util::command::new_smol_command(system_python.as_ref())
|
||||
.args([
|
||||
"-m",
|
||||
"pip",
|
||||
@@ -114,36 +109,36 @@ impl PythonDebugAdapter {
|
||||
])
|
||||
.output()
|
||||
.await
|
||||
.context("spawn system python")?
|
||||
.map_err(|e| format!("{e}"))?
|
||||
.status
|
||||
.success();
|
||||
if !installation_succeeded {
|
||||
bail!("debugpy installation failed (could not fetch Debugpy's wheel)");
|
||||
return Err("debugpy installation failed (could not fetch Debugpy's wheel)".into());
|
||||
}
|
||||
|
||||
let wheel_path = std::fs::read_dir(&download_dir)?
|
||||
let wheel_path = std::fs::read_dir(&download_dir)
|
||||
.map_err(|e| e.to_string())?
|
||||
.find_map(|entry| {
|
||||
entry.ok().filter(|e| {
|
||||
e.file_type().is_ok_and(|typ| typ.is_file())
|
||||
&& Path::new(&e.file_name()).extension() == Some("whl".as_ref())
|
||||
})
|
||||
})
|
||||
.with_context(|| format!("Did not find a .whl in {download_dir:?}"))?;
|
||||
.ok_or_else(|| String::from("Did not find a .whl in {download_dir}"))?;
|
||||
|
||||
util::archive::extract_zip(
|
||||
&debug_adapters_dir().join(Self::ADAPTER_NAME),
|
||||
File::open(&wheel_path.path()).await?,
|
||||
File::open(&wheel_path.path())
|
||||
.await
|
||||
.map_err(|e| e.to_string())?,
|
||||
)
|
||||
.await?;
|
||||
.await
|
||||
.map_err(|e| e.to_string())?;
|
||||
|
||||
Ok(Arc::from(wheel_path.path()))
|
||||
}
|
||||
|
||||
async fn maybe_fetch_new_wheel(
|
||||
&self,
|
||||
toolchain: Option<Toolchain>,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
) -> Result<()> {
|
||||
async fn maybe_fetch_new_wheel(&self, delegate: &Arc<dyn DapDelegate>) {
|
||||
let latest_release = delegate
|
||||
.http_client()
|
||||
.get(
|
||||
@@ -153,61 +148,62 @@ impl PythonDebugAdapter {
|
||||
)
|
||||
.await
|
||||
.log_err();
|
||||
let response = latest_release
|
||||
.filter(|response| response.status().is_success())
|
||||
.context("getting latest release")?;
|
||||
maybe!(async move {
|
||||
let response = latest_release.filter(|response| response.status().is_success())?;
|
||||
|
||||
let download_dir = debug_adapters_dir().join(Self::ADAPTER_NAME);
|
||||
std::fs::create_dir_all(&download_dir)?;
|
||||
let download_dir = debug_adapters_dir().join(Self::ADAPTER_NAME);
|
||||
std::fs::create_dir_all(&download_dir).ok()?;
|
||||
|
||||
let mut output = String::new();
|
||||
response.into_body().read_to_string(&mut output).await?;
|
||||
let as_json = serde_json::Value::from_str(&output)?;
|
||||
let latest_version = as_json
|
||||
.get("info")
|
||||
.and_then(|info| {
|
||||
let mut output = String::new();
|
||||
response
|
||||
.into_body()
|
||||
.read_to_string(&mut output)
|
||||
.await
|
||||
.ok()?;
|
||||
let as_json = serde_json::Value::from_str(&output).ok()?;
|
||||
let latest_version = as_json.get("info").and_then(|info| {
|
||||
info.get("version")
|
||||
.and_then(|version| version.as_str())
|
||||
.map(ToOwned::to_owned)
|
||||
})
|
||||
.context("parsing latest release information")?;
|
||||
let dist_info_dirname: OsString = format!("debugpy-{latest_version}.dist-info").into();
|
||||
let is_up_to_date = delegate
|
||||
.fs()
|
||||
.read_dir(&debug_adapters_dir().join(Self::ADAPTER_NAME))
|
||||
.await?
|
||||
.into_stream()
|
||||
.any(async |entry| {
|
||||
entry.is_ok_and(|e| e.file_name().is_some_and(|name| name == dist_info_dirname))
|
||||
})
|
||||
.await;
|
||||
|
||||
if !is_up_to_date {
|
||||
delegate
|
||||
})?;
|
||||
let dist_info_dirname: OsString = format!("debugpy-{latest_version}.dist-info").into();
|
||||
let is_up_to_date = delegate
|
||||
.fs()
|
||||
.remove_dir(
|
||||
&debug_adapters_dir().join(Self::ADAPTER_NAME),
|
||||
RemoveOptions {
|
||||
recursive: true,
|
||||
ignore_if_not_exists: true,
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
self.fetch_wheel(toolchain, delegate).await?;
|
||||
}
|
||||
anyhow::Ok(())
|
||||
.read_dir(&debug_adapters_dir().join(Self::ADAPTER_NAME))
|
||||
.await
|
||||
.ok()?
|
||||
.into_stream()
|
||||
.any(async |entry| {
|
||||
entry.is_ok_and(|e| e.file_name().is_some_and(|name| name == dist_info_dirname))
|
||||
})
|
||||
.await;
|
||||
|
||||
if !is_up_to_date {
|
||||
delegate
|
||||
.fs()
|
||||
.remove_dir(
|
||||
&debug_adapters_dir().join(Self::ADAPTER_NAME),
|
||||
RemoveOptions {
|
||||
recursive: true,
|
||||
ignore_if_not_exists: true,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.ok()?;
|
||||
self.fetch_wheel(delegate).await.ok()?;
|
||||
}
|
||||
Some(())
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
async fn fetch_debugpy_whl(
|
||||
&self,
|
||||
toolchain: Option<Toolchain>,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
) -> Result<Arc<Path>, String> {
|
||||
self.debugpy_whl_base_path
|
||||
.get_or_init(|| async move {
|
||||
self.maybe_fetch_new_wheel(toolchain, delegate)
|
||||
.await
|
||||
.map_err(|e| format!("{e}"))?;
|
||||
self.maybe_fetch_new_wheel(delegate).await;
|
||||
Ok(Arc::from(
|
||||
debug_adapters_dir()
|
||||
.join(Self::ADAPTER_NAME)
|
||||
@@ -220,24 +216,12 @@ impl PythonDebugAdapter {
|
||||
.clone()
|
||||
}
|
||||
|
||||
async fn base_venv_path(
|
||||
&self,
|
||||
toolchain: Option<Toolchain>,
|
||||
delegate: &Arc<dyn DapDelegate>,
|
||||
) -> Result<Arc<Path>> {
|
||||
let result = self.base_venv_path
|
||||
async fn base_venv_path(&self, delegate: &Arc<dyn DapDelegate>) -> Result<Arc<Path>, String> {
|
||||
self.base_venv_path
|
||||
.get_or_init(|| async {
|
||||
let base_python = if let Some(toolchain) = toolchain {
|
||||
toolchain.path.to_string()
|
||||
} else {
|
||||
Self::system_python_name(delegate).await.ok_or_else(|| {
|
||||
let mut message = "Could not find a Python installation".to_owned();
|
||||
if cfg!(windows){
|
||||
message.push_str(". Install Python from the Microsoft Store, or manually from https://www.python.org/downloads/windows.")
|
||||
}
|
||||
message
|
||||
})?
|
||||
};
|
||||
let base_python = Self::system_python_name(delegate)
|
||||
.await
|
||||
.ok_or_else(|| String::from("Could not find a Python installation"))?;
|
||||
|
||||
let did_succeed = util::command::new_smol_command(base_python)
|
||||
.args(["-m", "venv", "zed_base_venv"])
|
||||
@@ -255,50 +239,35 @@ impl PythonDebugAdapter {
|
||||
return Err("Failed to create base virtual environment".into());
|
||||
}
|
||||
|
||||
const PYTHON_PATH: &str = if cfg!(target_os = "windows") {
|
||||
"Scripts/python.exe"
|
||||
const DIR: &str = if cfg!(target_os = "windows") {
|
||||
"Scripts"
|
||||
} else {
|
||||
"bin/python3"
|
||||
"bin"
|
||||
};
|
||||
Ok(Arc::from(
|
||||
paths::debug_adapters_dir()
|
||||
.join(Self::DEBUG_ADAPTER_NAME.as_ref())
|
||||
.join("zed_base_venv")
|
||||
.join(PYTHON_PATH)
|
||||
.join(DIR)
|
||||
.join("python3")
|
||||
.as_ref(),
|
||||
))
|
||||
})
|
||||
.await
|
||||
.clone();
|
||||
match result {
|
||||
Ok(path) => Ok(path),
|
||||
Err(e) => Err(anyhow::anyhow!("{e}")),
|
||||
}
|
||||
.clone()
|
||||
}
|
||||
async fn system_python_name(delegate: &Arc<dyn DapDelegate>) -> Option<String> {
|
||||
const BINARY_NAMES: [&str; 3] = ["python3", "python", "py"];
|
||||
let mut name = None;
|
||||
|
||||
for cmd in BINARY_NAMES {
|
||||
let Some(path) = delegate.which(OsStr::new(cmd)).await else {
|
||||
continue;
|
||||
};
|
||||
// Try to detect situations where `python3` exists but is not a real Python interpreter.
|
||||
// Notably, on fresh Windows installs, `python3` is a shim that opens the Microsoft Store app
|
||||
// when run with no arguments, and just fails otherwise.
|
||||
let Some(output) = new_smol_command(&path)
|
||||
.args(["-c", "print(1 + 2)"])
|
||||
.output()
|
||||
name = delegate
|
||||
.which(OsStr::new(cmd))
|
||||
.await
|
||||
.ok()
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
if output.stdout.trim_ascii() != b"3" {
|
||||
continue;
|
||||
.map(|path| path.to_string_lossy().into_owned());
|
||||
if name.is_some() {
|
||||
break;
|
||||
}
|
||||
name = Some(path.to_string_lossy().into_owned());
|
||||
break;
|
||||
}
|
||||
name
|
||||
}
|
||||
@@ -777,10 +746,15 @@ impl DebugAdapter for PythonDebugAdapter {
|
||||
)
|
||||
.await;
|
||||
|
||||
self.fetch_debugpy_whl(toolchain.clone(), delegate)
|
||||
let debugpy_path = self
|
||||
.fetch_debugpy_whl(delegate)
|
||||
.await
|
||||
.map_err(|e| anyhow::anyhow!("{e}"))?;
|
||||
if let Some(toolchain) = &toolchain {
|
||||
log::debug!(
|
||||
"Found debugpy in toolchain environment: {}",
|
||||
debugpy_path.display()
|
||||
);
|
||||
return self
|
||||
.get_installed_binary(
|
||||
delegate,
|
||||
|
||||
@@ -963,21 +963,26 @@ pub fn init(cx: &mut App) {
|
||||
};
|
||||
|
||||
let project = workspace.project();
|
||||
log_store.update(cx, |store, cx| {
|
||||
store.add_project(project, cx);
|
||||
});
|
||||
if project.read(cx).is_local() {
|
||||
log_store.update(cx, |store, cx| {
|
||||
store.add_project(project, cx);
|
||||
});
|
||||
}
|
||||
|
||||
let log_store = log_store.clone();
|
||||
workspace.register_action(move |workspace, _: &OpenDebugAdapterLogs, window, cx| {
|
||||
workspace.add_item_to_active_pane(
|
||||
Box::new(cx.new(|cx| {
|
||||
DapLogView::new(workspace.project().clone(), log_store.clone(), window, cx)
|
||||
})),
|
||||
None,
|
||||
true,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
let project = workspace.project().read(cx);
|
||||
if project.is_local() {
|
||||
workspace.add_item_to_active_pane(
|
||||
Box::new(cx.new(|cx| {
|
||||
DapLogView::new(workspace.project().clone(), log_store.clone(), window, cx)
|
||||
})),
|
||||
None,
|
||||
true,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
}
|
||||
});
|
||||
})
|
||||
.detach();
|
||||
|
||||
@@ -268,12 +268,12 @@ impl DebugPanel {
|
||||
|
||||
async move |_, cx| {
|
||||
if let Err(error) = task.await {
|
||||
log::error!("{error:#}");
|
||||
log::error!("{error}");
|
||||
session
|
||||
.update(cx, |session, cx| {
|
||||
session
|
||||
.console_output(cx)
|
||||
.unbounded_send(format!("error: {:#}", error))
|
||||
.unbounded_send(format!("error: {}", error))
|
||||
.ok();
|
||||
session.shutdown(cx)
|
||||
})?
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
use std::rc::Rc;
|
||||
|
||||
use collections::HashMap;
|
||||
use gpui::{Corner, Entity, WeakEntity};
|
||||
use gpui::{Entity, WeakEntity};
|
||||
use project::debugger::session::{ThreadId, ThreadStatus};
|
||||
use ui::{CommonAnimationExt, ContextMenu, DropdownMenu, DropdownStyle, Indicator, prelude::*};
|
||||
use util::{maybe, truncate_and_trailoff};
|
||||
@@ -211,7 +211,6 @@ impl DebugPanel {
|
||||
this
|
||||
}),
|
||||
)
|
||||
.attach(Corner::BottomLeft)
|
||||
.style(DropdownStyle::Ghost)
|
||||
.handle(self.session_picker_menu_handle.clone());
|
||||
|
||||
@@ -323,7 +322,6 @@ impl DebugPanel {
|
||||
this
|
||||
}),
|
||||
)
|
||||
.attach(Corner::BottomLeft)
|
||||
.disabled(session_terminated)
|
||||
.style(DropdownStyle::Ghost)
|
||||
.handle(self.thread_picker_menu_handle.clone()),
|
||||
|
||||
@@ -937,7 +937,6 @@ impl RunningState {
|
||||
let task_store = project.read(cx).task_store().downgrade();
|
||||
let weak_project = project.downgrade();
|
||||
let weak_workspace = workspace.downgrade();
|
||||
let is_windows = project.read(cx).path_style(cx).is_windows();
|
||||
let remote_shell = project
|
||||
.read(cx)
|
||||
.remote_client()
|
||||
@@ -1030,7 +1029,7 @@ impl RunningState {
|
||||
task.resolved.shell = Shell::Program(remote_shell);
|
||||
}
|
||||
|
||||
let builder = ShellBuilder::new(&task.resolved.shell, is_windows);
|
||||
let builder = ShellBuilder::new(&task.resolved.shell);
|
||||
let command_label = builder.command_label(task.resolved.command.as_deref().unwrap_or(""));
|
||||
let (command, args) =
|
||||
builder.build(task.resolved.command.clone(), &task.resolved.args);
|
||||
|
||||
@@ -669,7 +669,11 @@ impl ConsoleQueryBarCompletionProvider {
|
||||
&snapshot,
|
||||
),
|
||||
new_text: string_match.string.clone(),
|
||||
label: CodeLabel::plain(string_match.string.clone(), None),
|
||||
label: CodeLabel {
|
||||
filter_range: 0..string_match.string.len(),
|
||||
text: string_match.string.clone(),
|
||||
runs: Vec::new(),
|
||||
},
|
||||
icon_path: None,
|
||||
documentation: Some(CompletionDocumentation::MultiLineMarkdown(
|
||||
variable_value.into(),
|
||||
@@ -778,7 +782,11 @@ impl ConsoleQueryBarCompletionProvider {
|
||||
&snapshot,
|
||||
),
|
||||
new_text,
|
||||
label: CodeLabel::plain(completion.label, None),
|
||||
label: CodeLabel {
|
||||
filter_range: 0..completion.label.len(),
|
||||
text: completion.label,
|
||||
runs: Vec::new(),
|
||||
},
|
||||
icon_path: None,
|
||||
documentation: completion.detail.map(|detail| {
|
||||
CompletionDocumentation::MultiLineMarkdown(detail.into())
|
||||
|
||||
@@ -965,11 +965,10 @@ async fn heuristic_syntactic_expand(
|
||||
let row_count = node_end.row - node_start.row + 1;
|
||||
let mut ancestor_range = None;
|
||||
let reached_outline_node = cx.background_executor().scoped({
|
||||
let node_range = node_range.clone();
|
||||
let outline_range = outline_range.clone();
|
||||
let ancestor_range = &mut ancestor_range;
|
||||
|scope| {
|
||||
scope.spawn(async move {
|
||||
let node_range = node_range.clone();
|
||||
let outline_range = outline_range.clone();
|
||||
let ancestor_range = &mut ancestor_range;
|
||||
|scope| {scope.spawn(async move {
|
||||
// Stop if we've exceeded the row count or reached an outline node. Then, find the interval
|
||||
// of node children which contains the query range. For example, this allows just returning
|
||||
// the header of a declaration rather than the entire declaration.
|
||||
@@ -981,11 +980,8 @@ async fn heuristic_syntactic_expand(
|
||||
if cursor.goto_first_child() {
|
||||
loop {
|
||||
let child_node = cursor.node();
|
||||
let child_range =
|
||||
previous_end..Point::from_ts_point(child_node.end_position());
|
||||
if included_child_start.is_none()
|
||||
&& child_range.contains(&input_range.start)
|
||||
{
|
||||
let child_range = previous_end..Point::from_ts_point(child_node.end_position());
|
||||
if included_child_start.is_none() && child_range.contains(&input_range.start) {
|
||||
included_child_start = Some(child_range.start);
|
||||
}
|
||||
if child_range.contains(&input_range.end) {
|
||||
@@ -1001,22 +997,19 @@ async fn heuristic_syntactic_expand(
|
||||
if let Some(start) = included_child_start {
|
||||
let row_count = end.row - start.row;
|
||||
if row_count < max_row_count {
|
||||
*ancestor_range =
|
||||
Some(Some(RangeInclusive::new(start.row, end.row)));
|
||||
*ancestor_range = Some(Some(RangeInclusive::new(start.row, end.row)));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
log::info!(
|
||||
"Expanding to ancestor started on {} node\
|
||||
exceeding row limit of {max_row_count}.",
|
||||
"Expanding to ancestor started on {} node exceeding row limit of {max_row_count}.",
|
||||
node.grammar_name()
|
||||
);
|
||||
*ancestor_range = Some(None);
|
||||
}
|
||||
})
|
||||
}
|
||||
});
|
||||
}});
|
||||
reached_outline_node.await;
|
||||
if let Some(node) = ancestor_range {
|
||||
return node;
|
||||
|
||||
@@ -20,8 +20,6 @@ util.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
zed.workspace = true
|
||||
zlog.workspace = true
|
||||
task.workspace = true
|
||||
theme.workspace = true
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
@@ -53,20 +53,9 @@ fn main() -> Result<()> {
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||
enum PreprocessorError {
|
||||
ActionNotFound {
|
||||
action_name: String,
|
||||
},
|
||||
DeprecatedActionUsed {
|
||||
used: String,
|
||||
should_be: String,
|
||||
},
|
||||
ActionNotFound { action_name: String },
|
||||
DeprecatedActionUsed { used: String, should_be: String },
|
||||
InvalidFrontmatterLine(String),
|
||||
InvalidSettingsJson {
|
||||
file: std::path::PathBuf,
|
||||
line: usize,
|
||||
snippet: String,
|
||||
error: String,
|
||||
},
|
||||
}
|
||||
|
||||
impl PreprocessorError {
|
||||
@@ -83,20 +72,6 @@ impl PreprocessorError {
|
||||
}
|
||||
PreprocessorError::ActionNotFound { action_name }
|
||||
}
|
||||
|
||||
fn new_for_invalid_settings_json(
|
||||
chapter: &Chapter,
|
||||
location: usize,
|
||||
snippet: String,
|
||||
error: String,
|
||||
) -> Self {
|
||||
PreprocessorError::InvalidSettingsJson {
|
||||
file: chapter.path.clone().expect("chapter has path"),
|
||||
line: chapter.content[..location].lines().count() + 1,
|
||||
snippet,
|
||||
error,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Display for PreprocessorError {
|
||||
@@ -113,21 +88,6 @@ impl std::fmt::Display for PreprocessorError {
|
||||
"Deprecated action used: {} should be {}",
|
||||
used, should_be
|
||||
),
|
||||
PreprocessorError::InvalidSettingsJson {
|
||||
file,
|
||||
line,
|
||||
snippet,
|
||||
error,
|
||||
} => {
|
||||
write!(
|
||||
f,
|
||||
"Invalid settings JSON at {}:{}\nError: {}\n\n{}",
|
||||
file.display(),
|
||||
line,
|
||||
error,
|
||||
snippet
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -140,11 +100,11 @@ fn handle_preprocessing() -> Result<()> {
|
||||
let (_ctx, mut book) = CmdPreprocessor::parse_input(input.as_bytes())?;
|
||||
|
||||
let mut errors = HashSet::<PreprocessorError>::new();
|
||||
|
||||
handle_frontmatter(&mut book, &mut errors);
|
||||
template_big_table_of_actions(&mut book);
|
||||
template_and_validate_keybindings(&mut book, &mut errors);
|
||||
template_and_validate_actions(&mut book, &mut errors);
|
||||
template_and_validate_json_snippets(&mut book, &mut errors);
|
||||
|
||||
if !errors.is_empty() {
|
||||
const ANSI_RED: &str = "\x1b[31m";
|
||||
@@ -275,161 +235,6 @@ fn find_binding(os: &str, action: &str) -> Option<String> {
|
||||
})
|
||||
}
|
||||
|
||||
fn template_and_validate_json_snippets(book: &mut Book, errors: &mut HashSet<PreprocessorError>) {
|
||||
fn for_each_labeled_code_block_mut(
|
||||
book: &mut Book,
|
||||
errors: &mut HashSet<PreprocessorError>,
|
||||
f: impl Fn(&str, &str) -> anyhow::Result<()>,
|
||||
) {
|
||||
const TAGGED_JSON_BLOCK_START: &'static str = "```json [";
|
||||
const JSON_BLOCK_END: &'static str = "```";
|
||||
|
||||
for_each_chapter_mut(book, |chapter| {
|
||||
let mut offset = 0;
|
||||
while let Some(loc) = chapter.content[offset..].find(TAGGED_JSON_BLOCK_START) {
|
||||
let loc = loc + offset;
|
||||
let tag_start = loc + TAGGED_JSON_BLOCK_START.len();
|
||||
offset = tag_start;
|
||||
let Some(tag_end) = chapter.content[tag_start..].find(']') else {
|
||||
errors.insert(PreprocessorError::new_for_invalid_settings_json(
|
||||
chapter,
|
||||
loc,
|
||||
chapter.content[loc..tag_start].to_string(),
|
||||
"Unclosed JSON block tag".to_string(),
|
||||
));
|
||||
continue;
|
||||
};
|
||||
let tag_end = tag_end + tag_start;
|
||||
|
||||
let tag = &chapter.content[tag_start..tag_end];
|
||||
|
||||
if tag.contains('\n') {
|
||||
errors.insert(PreprocessorError::new_for_invalid_settings_json(
|
||||
chapter,
|
||||
loc,
|
||||
chapter.content[loc..tag_start].to_string(),
|
||||
"Unclosed JSON block tag".to_string(),
|
||||
));
|
||||
continue;
|
||||
}
|
||||
|
||||
let snippet_start = tag_end + 1;
|
||||
offset = snippet_start;
|
||||
|
||||
let Some(snippet_end) = chapter.content[snippet_start..].find(JSON_BLOCK_END)
|
||||
else {
|
||||
errors.insert(PreprocessorError::new_for_invalid_settings_json(
|
||||
chapter,
|
||||
loc,
|
||||
chapter.content[loc..tag_end + 1].to_string(),
|
||||
"Missing closing code block".to_string(),
|
||||
));
|
||||
continue;
|
||||
};
|
||||
let snippet_end = snippet_start + snippet_end;
|
||||
let snippet_json = &chapter.content[snippet_start..snippet_end];
|
||||
offset = snippet_end + 3;
|
||||
|
||||
if let Err(err) = f(tag, snippet_json) {
|
||||
errors.insert(PreprocessorError::new_for_invalid_settings_json(
|
||||
chapter,
|
||||
loc,
|
||||
chapter.content[loc..snippet_end + 3].to_string(),
|
||||
err.to_string(),
|
||||
));
|
||||
continue;
|
||||
};
|
||||
let tag_range_complete = tag_start - 1..tag_end + 1;
|
||||
offset -= tag_range_complete.len();
|
||||
chapter.content.replace_range(tag_range_complete, "");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
for_each_labeled_code_block_mut(book, errors, |label, snippet_json| {
|
||||
let mut snippet_json_fixed = snippet_json
|
||||
.to_string()
|
||||
.replace("\n>", "\n")
|
||||
.trim()
|
||||
.to_string();
|
||||
while snippet_json_fixed.starts_with("//") {
|
||||
if let Some(line_end) = snippet_json_fixed.find('\n') {
|
||||
snippet_json_fixed.replace_range(0..line_end, "");
|
||||
snippet_json_fixed = snippet_json_fixed.trim().to_string();
|
||||
}
|
||||
}
|
||||
match label {
|
||||
"settings" => {
|
||||
if !snippet_json_fixed.starts_with('{') || !snippet_json_fixed.ends_with('}') {
|
||||
snippet_json_fixed.insert(0, '{');
|
||||
snippet_json_fixed.push_str("\n}");
|
||||
}
|
||||
settings::parse_json_with_comments::<settings::SettingsContent>(
|
||||
&snippet_json_fixed,
|
||||
)?;
|
||||
}
|
||||
"keymap" => {
|
||||
if !snippet_json_fixed.starts_with('[') || !snippet_json_fixed.ends_with(']') {
|
||||
snippet_json_fixed.insert(0, '[');
|
||||
snippet_json_fixed.push_str("\n]");
|
||||
}
|
||||
|
||||
let keymap = settings::KeymapFile::parse(&snippet_json_fixed)
|
||||
.context("Failed to parse keymap JSON")?;
|
||||
for section in keymap.sections() {
|
||||
for (keystrokes, action) in section.bindings() {
|
||||
keystrokes
|
||||
.split_whitespace()
|
||||
.map(|source| gpui::Keystroke::parse(source))
|
||||
.collect::<std::result::Result<Vec<_>, _>>()
|
||||
.context("Failed to parse keystroke")?;
|
||||
if let Some((action_name, _)) = settings::KeymapFile::parse_action(action)
|
||||
.map_err(|err| anyhow::format_err!(err))
|
||||
.context("Failed to parse action")?
|
||||
{
|
||||
anyhow::ensure!(
|
||||
find_action_by_name(action_name).is_some(),
|
||||
"Action not found: {}",
|
||||
action_name
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"debug" => {
|
||||
if !snippet_json_fixed.starts_with('[') || !snippet_json_fixed.ends_with(']') {
|
||||
snippet_json_fixed.insert(0, '[');
|
||||
snippet_json_fixed.push_str("\n]");
|
||||
}
|
||||
|
||||
settings::parse_json_with_comments::<task::DebugTaskFile>(&snippet_json_fixed)?;
|
||||
}
|
||||
"tasks" => {
|
||||
if !snippet_json_fixed.starts_with('[') || !snippet_json_fixed.ends_with(']') {
|
||||
snippet_json_fixed.insert(0, '[');
|
||||
snippet_json_fixed.push_str("\n]");
|
||||
}
|
||||
|
||||
settings::parse_json_with_comments::<task::TaskTemplates>(&snippet_json_fixed)?;
|
||||
}
|
||||
"icon-theme" => {
|
||||
if !snippet_json_fixed.starts_with('{') || !snippet_json_fixed.ends_with('}') {
|
||||
snippet_json_fixed.insert(0, '{');
|
||||
snippet_json_fixed.push_str("\n}");
|
||||
}
|
||||
|
||||
settings::parse_json_with_comments::<theme::IconThemeFamilyContent>(
|
||||
&snippet_json_fixed,
|
||||
)?;
|
||||
}
|
||||
label => {
|
||||
anyhow::bail!("Unexpected JSON code block tag: {}", label)
|
||||
}
|
||||
};
|
||||
Ok(())
|
||||
});
|
||||
}
|
||||
|
||||
/// Removes any configurable options from the stringified action if existing,
|
||||
/// ensuring that only the actual action name is returned. If the action consists
|
||||
/// only of a string and nothing else, the string is returned as-is.
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::ops::Range;
|
||||
|
||||
use client::EditPredictionUsage;
|
||||
use gpui::{App, Context, Entity, SharedString};
|
||||
use language::{Anchor, Buffer, BufferSnapshot, OffsetRangeExt};
|
||||
use language::Buffer;
|
||||
|
||||
// TODO: Find a better home for `Direction`.
|
||||
//
|
||||
@@ -242,51 +242,3 @@ where
|
||||
self.update(cx, |this, cx| this.suggest(buffer, cursor_position, cx))
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns edits updated based on user edits since the old snapshot. None is returned if any user
|
||||
/// edit is not a prefix of a predicted insertion.
|
||||
pub fn interpolate_edits(
|
||||
old_snapshot: &BufferSnapshot,
|
||||
new_snapshot: &BufferSnapshot,
|
||||
current_edits: &[(Range<Anchor>, String)],
|
||||
) -> Option<Vec<(Range<Anchor>, String)>> {
|
||||
let mut edits = Vec::new();
|
||||
|
||||
let mut model_edits = current_edits.iter().peekable();
|
||||
for user_edit in new_snapshot.edits_since::<usize>(&old_snapshot.version) {
|
||||
while let Some((model_old_range, _)) = model_edits.peek() {
|
||||
let model_old_range = model_old_range.to_offset(old_snapshot);
|
||||
if model_old_range.end < user_edit.old.start {
|
||||
let (model_old_range, model_new_text) = model_edits.next().unwrap();
|
||||
edits.push((model_old_range.clone(), model_new_text.clone()));
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((model_old_range, model_new_text)) = model_edits.peek() {
|
||||
let model_old_offset_range = model_old_range.to_offset(old_snapshot);
|
||||
if user_edit.old == model_old_offset_range {
|
||||
let user_new_text = new_snapshot
|
||||
.text_for_range(user_edit.new.clone())
|
||||
.collect::<String>();
|
||||
|
||||
if let Some(model_suffix) = model_new_text.strip_prefix(&user_new_text) {
|
||||
if !model_suffix.is_empty() {
|
||||
let anchor = old_snapshot.anchor_after(user_edit.old.end);
|
||||
edits.push((anchor..anchor, model_suffix.to_string()));
|
||||
}
|
||||
|
||||
model_edits.next();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return None;
|
||||
}
|
||||
|
||||
edits.extend(model_edits.cloned());
|
||||
|
||||
if edits.is_empty() { None } else { Some(edits) }
|
||||
}
|
||||
|
||||
@@ -16,7 +16,6 @@ doctest = false
|
||||
anyhow.workspace = true
|
||||
client.workspace = true
|
||||
cloud_llm_client.workspace = true
|
||||
codestral.workspace = true
|
||||
copilot.workspace = true
|
||||
editor.workspace = true
|
||||
feature_flags.workspace = true
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use anyhow::Result;
|
||||
use client::{UserStore, zed_urls};
|
||||
use cloud_llm_client::UsageLimit;
|
||||
use codestral::CodestralCompletionProvider;
|
||||
use copilot::{Copilot, Status};
|
||||
use editor::{Editor, SelectionEffects, actions::ShowEditPrediction, scroll::Autoscroll};
|
||||
use feature_flags::{FeatureFlagAppExt, PredictEditsRateCompletionsFeatureFlag};
|
||||
@@ -235,67 +234,6 @@ impl Render for EditPredictionButton {
|
||||
)
|
||||
}
|
||||
|
||||
EditPredictionProvider::Codestral => {
|
||||
let enabled = self.editor_enabled.unwrap_or(true);
|
||||
let has_api_key = CodestralCompletionProvider::has_api_key(cx);
|
||||
let fs = self.fs.clone();
|
||||
let this = cx.entity();
|
||||
|
||||
div().child(
|
||||
PopoverMenu::new("codestral")
|
||||
.menu(move |window, cx| {
|
||||
if has_api_key {
|
||||
Some(this.update(cx, |this, cx| {
|
||||
this.build_codestral_context_menu(window, cx)
|
||||
}))
|
||||
} else {
|
||||
Some(ContextMenu::build(window, cx, |menu, _, _| {
|
||||
let fs = fs.clone();
|
||||
menu.entry("Use Zed AI instead", None, move |_, cx| {
|
||||
set_completion_provider(
|
||||
fs.clone(),
|
||||
cx,
|
||||
EditPredictionProvider::Zed,
|
||||
)
|
||||
})
|
||||
.separator()
|
||||
.entry(
|
||||
"Configure Codestral API Key",
|
||||
None,
|
||||
move |window, cx| {
|
||||
window.dispatch_action(
|
||||
zed_actions::agent::OpenSettings.boxed_clone(),
|
||||
cx,
|
||||
);
|
||||
},
|
||||
)
|
||||
}))
|
||||
}
|
||||
})
|
||||
.anchor(Corner::BottomRight)
|
||||
.trigger_with_tooltip(
|
||||
IconButton::new("codestral-icon", IconName::AiMistral)
|
||||
.shape(IconButtonShape::Square)
|
||||
.when(!has_api_key, |this| {
|
||||
this.indicator(Indicator::dot().color(Color::Error))
|
||||
.indicator_border_color(Some(
|
||||
cx.theme().colors().status_bar_background,
|
||||
))
|
||||
})
|
||||
.when(has_api_key && !enabled, |this| {
|
||||
this.indicator(Indicator::dot().color(Color::Ignored))
|
||||
.indicator_border_color(Some(
|
||||
cx.theme().colors().status_bar_background,
|
||||
))
|
||||
}),
|
||||
move |window, cx| {
|
||||
Tooltip::for_action("Codestral", &ToggleMenu, window, cx)
|
||||
},
|
||||
)
|
||||
.with_handle(self.popover_menu_handle.clone()),
|
||||
)
|
||||
}
|
||||
|
||||
EditPredictionProvider::Zed => {
|
||||
let enabled = self.editor_enabled.unwrap_or(true);
|
||||
|
||||
@@ -555,7 +493,6 @@ impl EditPredictionButton {
|
||||
EditPredictionProvider::Zed
|
||||
| EditPredictionProvider::Copilot
|
||||
| EditPredictionProvider::Supermaven
|
||||
| EditPredictionProvider::Codestral
|
||||
) {
|
||||
menu = menu
|
||||
.separator()
|
||||
@@ -782,25 +719,6 @@ impl EditPredictionButton {
|
||||
})
|
||||
}
|
||||
|
||||
fn build_codestral_context_menu(
|
||||
&self,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Entity<ContextMenu> {
|
||||
let fs = self.fs.clone();
|
||||
ContextMenu::build(window, cx, |menu, window, cx| {
|
||||
self.build_language_settings_menu(menu, window, cx)
|
||||
.separator()
|
||||
.entry("Use Zed AI instead", None, move |_, cx| {
|
||||
set_completion_provider(fs.clone(), cx, EditPredictionProvider::Zed)
|
||||
})
|
||||
.separator()
|
||||
.entry("Configure Codestral API Key", None, move |window, cx| {
|
||||
window.dispatch_action(zed_actions::agent::OpenSettings.boxed_clone(), cx);
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn build_zeta_context_menu(
|
||||
&self,
|
||||
window: &mut Window,
|
||||
|
||||
@@ -19,7 +19,6 @@ collections.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
hashbrown.workspace = true
|
||||
indoc.workspace = true
|
||||
itertools.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
@@ -46,8 +45,5 @@ project = {workspace= true, features = ["test-support"]}
|
||||
serde_json.workspace = true
|
||||
settings = {workspace= true, features = ["test-support"]}
|
||||
text = { workspace = true, features = ["test-support"] }
|
||||
tree-sitter-c.workspace = true
|
||||
tree-sitter-cpp.workspace = true
|
||||
tree-sitter-go.workspace = true
|
||||
util = { workspace = true, features = ["test-support"] }
|
||||
zlog.workspace = true
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
use cloud_llm_client::predict_edits_v3::{self, Line};
|
||||
use language::{Language, LanguageId};
|
||||
use language::LanguageId;
|
||||
use project::ProjectEntryId;
|
||||
use std::borrow::Cow;
|
||||
use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
use std::{borrow::Cow, path::Path};
|
||||
use text::{Bias, BufferId, Rope};
|
||||
use util::paths::{path_ends_with, strip_path_suffix};
|
||||
use util::rel_path::RelPath;
|
||||
|
||||
use crate::outline::OutlineDeclaration;
|
||||
|
||||
@@ -25,14 +22,12 @@ pub enum Declaration {
|
||||
File {
|
||||
project_entry_id: ProjectEntryId,
|
||||
declaration: FileDeclaration,
|
||||
cached_path: CachedDeclarationPath,
|
||||
},
|
||||
Buffer {
|
||||
project_entry_id: ProjectEntryId,
|
||||
buffer_id: BufferId,
|
||||
rope: Rope,
|
||||
declaration: BufferDeclaration,
|
||||
cached_path: CachedDeclarationPath,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -78,13 +73,6 @@ impl Declaration {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cached_path(&self) -> &CachedDeclarationPath {
|
||||
match self {
|
||||
Declaration::File { cached_path, .. } => cached_path,
|
||||
Declaration::Buffer { cached_path, .. } => cached_path,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn item_range(&self) -> Range<usize> {
|
||||
match self {
|
||||
Declaration::File { declaration, .. } => declaration.item_range.clone(),
|
||||
@@ -92,18 +80,6 @@ impl Declaration {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn item_line_range(&self) -> Range<Line> {
|
||||
match self {
|
||||
Declaration::File { declaration, .. } => declaration.item_line_range.clone(),
|
||||
Declaration::Buffer {
|
||||
declaration, rope, ..
|
||||
} => {
|
||||
Line(rope.offset_to_point(declaration.item_range.start).row)
|
||||
..Line(rope.offset_to_point(declaration.item_range.end).row)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn item_text(&self) -> (Cow<'_, str>, bool) {
|
||||
match self {
|
||||
Declaration::File { declaration, .. } => (
|
||||
@@ -143,18 +119,6 @@ impl Declaration {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn signature_line_range(&self) -> Range<Line> {
|
||||
match self {
|
||||
Declaration::File { declaration, .. } => declaration.signature_line_range.clone(),
|
||||
Declaration::Buffer {
|
||||
declaration, rope, ..
|
||||
} => {
|
||||
Line(rope.offset_to_point(declaration.signature_range.start).row)
|
||||
..Line(rope.offset_to_point(declaration.signature_range.end).row)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn signature_range_in_item_text(&self) -> Range<usize> {
|
||||
let signature_range = self.signature_range();
|
||||
let item_range = self.item_range();
|
||||
@@ -167,7 +131,7 @@ fn expand_range_to_line_boundaries_and_truncate(
|
||||
range: &Range<usize>,
|
||||
limit: usize,
|
||||
rope: &Rope,
|
||||
) -> (Range<usize>, Range<predict_edits_v3::Line>, bool) {
|
||||
) -> (Range<usize>, bool) {
|
||||
let mut point_range = rope.offset_to_point(range.start)..rope.offset_to_point(range.end);
|
||||
point_range.start.column = 0;
|
||||
point_range.end.row += 1;
|
||||
@@ -180,10 +144,7 @@ fn expand_range_to_line_boundaries_and_truncate(
|
||||
item_range.end = item_range.start + limit;
|
||||
}
|
||||
item_range.end = rope.clip_offset(item_range.end, Bias::Left);
|
||||
|
||||
let line_range =
|
||||
predict_edits_v3::Line(point_range.start.row)..predict_edits_v3::Line(point_range.end.row);
|
||||
(item_range, line_range, is_truncated)
|
||||
(item_range, is_truncated)
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -192,30 +153,25 @@ pub struct FileDeclaration {
|
||||
pub identifier: Identifier,
|
||||
/// offset range of the declaration in the file, expanded to line boundaries and truncated
|
||||
pub item_range: Range<usize>,
|
||||
/// line range of the declaration in the file, potentially truncated
|
||||
pub item_line_range: Range<predict_edits_v3::Line>,
|
||||
/// text of `item_range`
|
||||
pub text: Arc<str>,
|
||||
/// whether `text` was truncated
|
||||
pub text_is_truncated: bool,
|
||||
/// offset range of the signature in the file, expanded to line boundaries and truncated
|
||||
pub signature_range: Range<usize>,
|
||||
/// line range of the signature in the file, truncated
|
||||
pub signature_line_range: Range<Line>,
|
||||
/// whether `signature` was truncated
|
||||
pub signature_is_truncated: bool,
|
||||
}
|
||||
|
||||
impl FileDeclaration {
|
||||
pub fn from_outline(declaration: OutlineDeclaration, rope: &Rope) -> FileDeclaration {
|
||||
let (item_range_in_file, item_line_range_in_file, text_is_truncated) =
|
||||
expand_range_to_line_boundaries_and_truncate(
|
||||
&declaration.item_range,
|
||||
ITEM_TEXT_TRUNCATION_LENGTH,
|
||||
rope,
|
||||
);
|
||||
let (item_range_in_file, text_is_truncated) = expand_range_to_line_boundaries_and_truncate(
|
||||
&declaration.item_range,
|
||||
ITEM_TEXT_TRUNCATION_LENGTH,
|
||||
rope,
|
||||
);
|
||||
|
||||
let (mut signature_range_in_file, signature_line_range, mut signature_is_truncated) =
|
||||
let (mut signature_range_in_file, mut signature_is_truncated) =
|
||||
expand_range_to_line_boundaries_and_truncate(
|
||||
&declaration.signature_range,
|
||||
ITEM_TEXT_TRUNCATION_LENGTH,
|
||||
@@ -235,7 +191,6 @@ impl FileDeclaration {
|
||||
parent: None,
|
||||
identifier: declaration.identifier,
|
||||
signature_range: signature_range_in_file,
|
||||
signature_line_range,
|
||||
signature_is_truncated,
|
||||
text: rope
|
||||
.chunks_in_range(item_range_in_file.clone())
|
||||
@@ -243,7 +198,6 @@ impl FileDeclaration {
|
||||
.into(),
|
||||
text_is_truncated,
|
||||
item_range: item_range_in_file,
|
||||
item_line_range: item_line_range_in_file,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -260,13 +214,12 @@ pub struct BufferDeclaration {
|
||||
|
||||
impl BufferDeclaration {
|
||||
pub fn from_outline(declaration: OutlineDeclaration, rope: &Rope) -> Self {
|
||||
let (item_range, _item_line_range, item_range_is_truncated) =
|
||||
expand_range_to_line_boundaries_and_truncate(
|
||||
&declaration.item_range,
|
||||
ITEM_TEXT_TRUNCATION_LENGTH,
|
||||
rope,
|
||||
);
|
||||
let (signature_range, _signature_line_range, signature_range_is_truncated) =
|
||||
let (item_range, item_range_is_truncated) = expand_range_to_line_boundaries_and_truncate(
|
||||
&declaration.item_range,
|
||||
ITEM_TEXT_TRUNCATION_LENGTH,
|
||||
rope,
|
||||
);
|
||||
let (signature_range, signature_range_is_truncated) =
|
||||
expand_range_to_line_boundaries_and_truncate(
|
||||
&declaration.signature_range,
|
||||
ITEM_TEXT_TRUNCATION_LENGTH,
|
||||
@@ -282,69 +235,3 @@ impl BufferDeclaration {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CachedDeclarationPath {
|
||||
pub worktree_abs_path: Arc<Path>,
|
||||
pub rel_path: Arc<RelPath>,
|
||||
/// The relative path of the file, possibly stripped according to `import_path_strip_regex`.
|
||||
pub rel_path_after_regex_stripping: Arc<RelPath>,
|
||||
}
|
||||
|
||||
impl CachedDeclarationPath {
|
||||
pub fn new(
|
||||
worktree_abs_path: Arc<Path>,
|
||||
path: &Arc<RelPath>,
|
||||
language: Option<&Arc<Language>>,
|
||||
) -> Self {
|
||||
let rel_path = path.clone();
|
||||
let rel_path_after_regex_stripping = if let Some(language) = language
|
||||
&& let Some(strip_regex) = language.config().import_path_strip_regex.as_ref()
|
||||
&& let Ok(stripped) = RelPath::unix(&Path::new(
|
||||
strip_regex.replace_all(rel_path.as_unix_str(), "").as_ref(),
|
||||
)) {
|
||||
Arc::from(stripped)
|
||||
} else {
|
||||
rel_path.clone()
|
||||
};
|
||||
CachedDeclarationPath {
|
||||
worktree_abs_path,
|
||||
rel_path,
|
||||
rel_path_after_regex_stripping,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn new_for_test(worktree_abs_path: &str, rel_path: &str) -> Self {
|
||||
let rel_path: Arc<RelPath> = util::rel_path::rel_path(rel_path).into();
|
||||
CachedDeclarationPath {
|
||||
worktree_abs_path: std::path::PathBuf::from(worktree_abs_path).into(),
|
||||
rel_path_after_regex_stripping: rel_path.clone(),
|
||||
rel_path,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ends_with_posix_path(&self, path: &Path) -> bool {
|
||||
if path.as_os_str().len() <= self.rel_path_after_regex_stripping.as_unix_str().len() {
|
||||
path_ends_with(self.rel_path_after_regex_stripping.as_std_path(), path)
|
||||
} else {
|
||||
if let Some(remaining) =
|
||||
strip_path_suffix(path, self.rel_path_after_regex_stripping.as_std_path())
|
||||
{
|
||||
path_ends_with(&self.worktree_abs_path, remaining)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn equals_absolute_path(&self, path: &Path) -> bool {
|
||||
if let Some(remaining) =
|
||||
strip_path_suffix(path, &self.rel_path_after_regex_stripping.as_std_path())
|
||||
{
|
||||
self.worktree_abs_path.as_ref() == remaining
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,17 +1,15 @@
|
||||
use cloud_llm_client::predict_edits_v3::DeclarationScoreComponents;
|
||||
use collections::HashMap;
|
||||
use itertools::Itertools as _;
|
||||
use language::BufferSnapshot;
|
||||
use ordered_float::OrderedFloat;
|
||||
use project::ProjectEntryId;
|
||||
use serde::Serialize;
|
||||
use std::{cmp::Reverse, ops::Range, path::Path, sync::Arc};
|
||||
use std::{cmp::Reverse, ops::Range};
|
||||
use strum::EnumIter;
|
||||
use text::{Point, ToPoint};
|
||||
use util::RangeExt as _;
|
||||
|
||||
use crate::{
|
||||
CachedDeclarationPath, Declaration, EditPredictionExcerpt, Identifier,
|
||||
imports::{Import, Imports, Module},
|
||||
Declaration, EditPredictionExcerpt, Identifier,
|
||||
reference::{Reference, ReferenceRegion},
|
||||
syntax_index::SyntaxIndexState,
|
||||
text_similarity::{Occurrences, jaccard_similarity, weighted_overlap_coefficient},
|
||||
@@ -19,17 +17,12 @@ use crate::{
|
||||
|
||||
const MAX_IDENTIFIER_DECLARATION_COUNT: usize = 16;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct EditPredictionScoreOptions {
|
||||
pub omit_excerpt_overlaps: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ScoredDeclaration {
|
||||
/// identifier used by the local reference
|
||||
pub identifier: Identifier,
|
||||
pub declaration: Declaration,
|
||||
pub components: DeclarationScoreComponents,
|
||||
pub score_components: DeclarationScoreComponents,
|
||||
pub scores: DeclarationScores,
|
||||
}
|
||||
|
||||
#[derive(EnumIter, Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
||||
@@ -38,61 +31,15 @@ pub enum DeclarationStyle {
|
||||
Declaration,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Default)]
|
||||
pub struct DeclarationScores {
|
||||
pub signature: f32,
|
||||
pub declaration: f32,
|
||||
pub retrieval: f32,
|
||||
}
|
||||
|
||||
impl ScoredDeclaration {
|
||||
/// Returns the score for this declaration with the specified style.
|
||||
pub fn score(&self, style: DeclarationStyle) -> f32 {
|
||||
// TODO: handle truncation
|
||||
|
||||
// Score related to how likely this is the correct declaration, range 0 to 1
|
||||
let retrieval = self.retrieval_score();
|
||||
|
||||
// Score related to the distance between the reference and cursor, range 0 to 1
|
||||
let distance_score = if self.components.is_referenced_nearby {
|
||||
1.0 / (1.0 + self.components.reference_line_distance as f32 / 10.0).powf(2.0)
|
||||
} else {
|
||||
// same score as ~14 lines away, rationale is to not overly penalize references from parent signatures
|
||||
0.5
|
||||
};
|
||||
|
||||
// For now instead of linear combination, the scores are just multiplied together.
|
||||
let combined_score = 10.0 * retrieval * distance_score;
|
||||
|
||||
match style {
|
||||
DeclarationStyle::Signature => {
|
||||
combined_score * self.components.excerpt_vs_signature_weighted_overlap
|
||||
}
|
||||
DeclarationStyle::Declaration => {
|
||||
2.0 * combined_score * self.components.excerpt_vs_item_weighted_overlap
|
||||
}
|
||||
DeclarationStyle::Signature => self.scores.signature,
|
||||
DeclarationStyle::Declaration => self.scores.declaration,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn retrieval_score(&self) -> f32 {
|
||||
let mut score = if self.components.is_same_file {
|
||||
10.0 / self.components.same_file_declaration_count as f32
|
||||
} else if self.components.path_import_match_count > 0 {
|
||||
3.0
|
||||
} else if self.components.wildcard_path_import_match_count > 0 {
|
||||
1.0
|
||||
} else if self.components.normalized_import_similarity > 0.0 {
|
||||
self.components.normalized_import_similarity
|
||||
} else if self.components.normalized_wildcard_import_similarity > 0.0 {
|
||||
0.5 * self.components.normalized_wildcard_import_similarity
|
||||
} else {
|
||||
1.0 / self.components.declaration_count as f32
|
||||
};
|
||||
score *= 1. + self.components.included_by_others as f32 / 2.;
|
||||
score *= 1. + self.components.includes_others as f32 / 4.;
|
||||
score
|
||||
}
|
||||
|
||||
pub fn size(&self, style: DeclarationStyle) -> usize {
|
||||
match &self.declaration {
|
||||
Declaration::File { declaration, .. } => match style {
|
||||
@@ -107,259 +54,110 @@ impl ScoredDeclaration {
|
||||
}
|
||||
|
||||
pub fn score_density(&self, style: DeclarationStyle) -> f32 {
|
||||
self.score(style) / self.size(style) as f32
|
||||
self.score(style) / (self.size(style)) as f32
|
||||
}
|
||||
}
|
||||
|
||||
pub fn scored_declarations(
|
||||
options: &EditPredictionScoreOptions,
|
||||
index: &SyntaxIndexState,
|
||||
excerpt: &EditPredictionExcerpt,
|
||||
excerpt_occurrences: &Occurrences,
|
||||
adjacent_occurrences: &Occurrences,
|
||||
imports: &Imports,
|
||||
identifier_to_references: HashMap<Identifier, Vec<Reference>>,
|
||||
cursor_offset: usize,
|
||||
current_buffer: &BufferSnapshot,
|
||||
) -> Vec<ScoredDeclaration> {
|
||||
let cursor_point = cursor_offset.to_point(¤t_buffer);
|
||||
|
||||
let mut wildcard_import_occurrences = Vec::new();
|
||||
let mut wildcard_import_paths = Vec::new();
|
||||
for wildcard_import in imports.wildcard_modules.iter() {
|
||||
match wildcard_import {
|
||||
Module::Namespace(namespace) => {
|
||||
wildcard_import_occurrences.push(namespace.occurrences())
|
||||
}
|
||||
Module::SourceExact(path) => wildcard_import_paths.push(path),
|
||||
Module::SourceFuzzy(path) => {
|
||||
wildcard_import_occurrences.push(Occurrences::from_path(&path))
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut declarations = identifier_to_references
|
||||
.into_iter()
|
||||
.flat_map(|(identifier, references)| {
|
||||
let declarations =
|
||||
index.declarations_for_identifier::<MAX_IDENTIFIER_DECLARATION_COUNT>(&identifier);
|
||||
let declaration_count = declarations.len();
|
||||
|
||||
let mut scored_declarations = Vec::new();
|
||||
let mut project_entry_id_to_outline_ranges: HashMap<ProjectEntryId, Vec<Range<usize>>> =
|
||||
HashMap::default();
|
||||
for (identifier, references) in identifier_to_references {
|
||||
let mut import_occurrences = Vec::new();
|
||||
let mut import_paths = Vec::new();
|
||||
let mut found_external_identifier: Option<&Identifier> = None;
|
||||
declarations
|
||||
.into_iter()
|
||||
.filter_map(|(declaration_id, declaration)| match declaration {
|
||||
Declaration::Buffer {
|
||||
buffer_id,
|
||||
declaration: buffer_declaration,
|
||||
..
|
||||
} => {
|
||||
let is_same_file = buffer_id == ¤t_buffer.remote_id();
|
||||
|
||||
if let Some(imports) = imports.identifier_to_imports.get(&identifier) {
|
||||
// only use alias when it's the only import, could be generalized if some language
|
||||
// has overlapping aliases
|
||||
//
|
||||
// TODO: when an aliased declaration is included in the prompt, should include the
|
||||
// aliasing in the prompt.
|
||||
//
|
||||
// TODO: For SourceFuzzy consider having componentwise comparison that pays
|
||||
// attention to ordering.
|
||||
if let [
|
||||
Import::Alias {
|
||||
module,
|
||||
external_identifier,
|
||||
},
|
||||
] = imports.as_slice()
|
||||
{
|
||||
match module {
|
||||
Module::Namespace(namespace) => {
|
||||
import_occurrences.push(namespace.occurrences())
|
||||
}
|
||||
Module::SourceExact(path) => import_paths.push(path),
|
||||
Module::SourceFuzzy(path) => {
|
||||
import_occurrences.push(Occurrences::from_path(&path))
|
||||
}
|
||||
}
|
||||
found_external_identifier = Some(&external_identifier);
|
||||
} else {
|
||||
for import in imports {
|
||||
match import {
|
||||
Import::Direct { module } => match module {
|
||||
Module::Namespace(namespace) => {
|
||||
import_occurrences.push(namespace.occurrences())
|
||||
}
|
||||
Module::SourceExact(path) => import_paths.push(path),
|
||||
Module::SourceFuzzy(path) => {
|
||||
import_occurrences.push(Occurrences::from_path(&path))
|
||||
}
|
||||
},
|
||||
Import::Alias { .. } => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let identifier_to_lookup = found_external_identifier.unwrap_or(&identifier);
|
||||
// TODO: update this to be able to return more declarations? Especially if there is the
|
||||
// ability to quickly filter a large list (based on imports)
|
||||
let identifier_declarations = index
|
||||
.declarations_for_identifier::<MAX_IDENTIFIER_DECLARATION_COUNT>(&identifier_to_lookup);
|
||||
let declaration_count = identifier_declarations.len();
|
||||
|
||||
if declaration_count == 0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
// TODO: option to filter out other candidates when same file / import match
|
||||
let mut checked_declarations = Vec::with_capacity(declaration_count);
|
||||
for (declaration_id, declaration) in identifier_declarations {
|
||||
match declaration {
|
||||
Declaration::Buffer {
|
||||
buffer_id,
|
||||
declaration: buffer_declaration,
|
||||
..
|
||||
} => {
|
||||
if buffer_id == ¤t_buffer.remote_id() {
|
||||
let already_included_in_prompt =
|
||||
range_intersection(&buffer_declaration.item_range, &excerpt.range)
|
||||
.is_some()
|
||||
if is_same_file {
|
||||
let overlaps_excerpt =
|
||||
range_intersection(&buffer_declaration.item_range, &excerpt.range)
|
||||
.is_some();
|
||||
if overlaps_excerpt
|
||||
|| excerpt
|
||||
.parent_declarations
|
||||
.iter()
|
||||
.any(|(excerpt_parent, _)| excerpt_parent == &declaration_id);
|
||||
if !options.omit_excerpt_overlaps || !already_included_in_prompt {
|
||||
let declaration_line = buffer_declaration
|
||||
.item_range
|
||||
.start
|
||||
.to_point(current_buffer)
|
||||
.row;
|
||||
let declaration_line_distance =
|
||||
(cursor_point.row as i32 - declaration_line as i32).unsigned_abs();
|
||||
checked_declarations.push(CheckedDeclaration {
|
||||
declaration,
|
||||
same_file_line_distance: Some(declaration_line_distance),
|
||||
path_import_match_count: 0,
|
||||
wildcard_path_import_match_count: 0,
|
||||
});
|
||||
.any(|(excerpt_parent, _)| excerpt_parent == &declaration_id)
|
||||
{
|
||||
None
|
||||
} else {
|
||||
let declaration_line = buffer_declaration
|
||||
.item_range
|
||||
.start
|
||||
.to_point(current_buffer)
|
||||
.row;
|
||||
Some((
|
||||
true,
|
||||
(cursor_point.row as i32 - declaration_line as i32)
|
||||
.unsigned_abs(),
|
||||
declaration,
|
||||
))
|
||||
}
|
||||
} else {
|
||||
Some((false, u32::MAX, declaration))
|
||||
}
|
||||
continue;
|
||||
} else {
|
||||
}
|
||||
}
|
||||
Declaration::File { .. } => {}
|
||||
}
|
||||
let declaration_path = declaration.cached_path();
|
||||
let path_import_match_count = import_paths
|
||||
.iter()
|
||||
.filter(|import_path| {
|
||||
declaration_path_matches_import(&declaration_path, import_path)
|
||||
Declaration::File { .. } => {
|
||||
// We can assume that a file declaration is in a different file,
|
||||
// because the current one must be open
|
||||
Some((false, u32::MAX, declaration))
|
||||
}
|
||||
})
|
||||
.count();
|
||||
let wildcard_path_import_match_count = wildcard_import_paths
|
||||
.iter()
|
||||
.filter(|import_path| {
|
||||
declaration_path_matches_import(&declaration_path, import_path)
|
||||
})
|
||||
.count();
|
||||
checked_declarations.push(CheckedDeclaration {
|
||||
declaration,
|
||||
same_file_line_distance: None,
|
||||
path_import_match_count,
|
||||
wildcard_path_import_match_count,
|
||||
});
|
||||
}
|
||||
.sorted_by_key(|&(_, distance, _)| distance)
|
||||
.enumerate()
|
||||
.map(
|
||||
|(
|
||||
declaration_line_distance_rank,
|
||||
(is_same_file, declaration_line_distance, declaration),
|
||||
)| {
|
||||
let same_file_declaration_count = index.file_declaration_count(declaration);
|
||||
|
||||
let mut max_import_similarity = 0.0;
|
||||
let mut max_wildcard_import_similarity = 0.0;
|
||||
score_declaration(
|
||||
&identifier,
|
||||
&references,
|
||||
declaration.clone(),
|
||||
is_same_file,
|
||||
declaration_line_distance,
|
||||
declaration_line_distance_rank,
|
||||
same_file_declaration_count,
|
||||
declaration_count,
|
||||
&excerpt_occurrences,
|
||||
&adjacent_occurrences,
|
||||
cursor_point,
|
||||
current_buffer,
|
||||
)
|
||||
},
|
||||
)
|
||||
.collect::<Vec<_>>()
|
||||
})
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut scored_declarations_for_identifier = Vec::with_capacity(checked_declarations.len());
|
||||
for checked_declaration in checked_declarations {
|
||||
let same_file_declaration_count =
|
||||
index.file_declaration_count(checked_declaration.declaration);
|
||||
|
||||
let declaration = score_declaration(
|
||||
&identifier,
|
||||
&references,
|
||||
checked_declaration,
|
||||
same_file_declaration_count,
|
||||
declaration_count,
|
||||
&excerpt_occurrences,
|
||||
&adjacent_occurrences,
|
||||
&import_occurrences,
|
||||
&wildcard_import_occurrences,
|
||||
cursor_point,
|
||||
current_buffer,
|
||||
);
|
||||
|
||||
if declaration.components.import_similarity > max_import_similarity {
|
||||
max_import_similarity = declaration.components.import_similarity;
|
||||
}
|
||||
|
||||
if declaration.components.wildcard_import_similarity > max_wildcard_import_similarity {
|
||||
max_wildcard_import_similarity = declaration.components.wildcard_import_similarity;
|
||||
}
|
||||
|
||||
project_entry_id_to_outline_ranges
|
||||
.entry(declaration.declaration.project_entry_id())
|
||||
.or_default()
|
||||
.push(declaration.declaration.item_range());
|
||||
scored_declarations_for_identifier.push(declaration);
|
||||
}
|
||||
|
||||
if max_import_similarity > 0.0 || max_wildcard_import_similarity > 0.0 {
|
||||
for declaration in scored_declarations_for_identifier.iter_mut() {
|
||||
if max_import_similarity > 0.0 {
|
||||
declaration.components.max_import_similarity = max_import_similarity;
|
||||
declaration.components.normalized_import_similarity =
|
||||
declaration.components.import_similarity / max_import_similarity;
|
||||
}
|
||||
if max_wildcard_import_similarity > 0.0 {
|
||||
declaration.components.normalized_wildcard_import_similarity =
|
||||
declaration.components.wildcard_import_similarity
|
||||
/ max_wildcard_import_similarity;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
scored_declarations.extend(scored_declarations_for_identifier);
|
||||
}
|
||||
|
||||
// TODO: Inform this via import / retrieval scores of outline items
|
||||
// TODO: Consider using a sweepline
|
||||
for scored_declaration in scored_declarations.iter_mut() {
|
||||
let project_entry_id = scored_declaration.declaration.project_entry_id();
|
||||
let Some(ranges) = project_entry_id_to_outline_ranges.get(&project_entry_id) else {
|
||||
continue;
|
||||
};
|
||||
for range in ranges {
|
||||
if range.contains_inclusive(&scored_declaration.declaration.item_range()) {
|
||||
scored_declaration.components.included_by_others += 1
|
||||
} else if scored_declaration
|
||||
.declaration
|
||||
.item_range()
|
||||
.contains_inclusive(range)
|
||||
{
|
||||
scored_declaration.components.includes_others += 1
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
scored_declarations.sort_unstable_by_key(|declaration| {
|
||||
Reverse(OrderedFloat(
|
||||
declaration.score(DeclarationStyle::Declaration),
|
||||
))
|
||||
declarations.sort_unstable_by_key(|declaration| {
|
||||
let score_density = declaration
|
||||
.score_density(DeclarationStyle::Declaration)
|
||||
.max(declaration.score_density(DeclarationStyle::Signature));
|
||||
Reverse(OrderedFloat(score_density))
|
||||
});
|
||||
|
||||
scored_declarations
|
||||
}
|
||||
|
||||
struct CheckedDeclaration<'a> {
|
||||
declaration: &'a Declaration,
|
||||
same_file_line_distance: Option<u32>,
|
||||
path_import_match_count: usize,
|
||||
wildcard_path_import_match_count: usize,
|
||||
}
|
||||
|
||||
fn declaration_path_matches_import(
|
||||
declaration_path: &CachedDeclarationPath,
|
||||
import_path: &Arc<Path>,
|
||||
) -> bool {
|
||||
if import_path.is_absolute() {
|
||||
declaration_path.equals_absolute_path(import_path)
|
||||
} else {
|
||||
declaration_path.ends_with_posix_path(import_path)
|
||||
}
|
||||
declarations
|
||||
}
|
||||
|
||||
fn range_intersection<T: Ord + Clone>(a: &Range<T>, b: &Range<T>) -> Option<Range<T>> {
|
||||
@@ -375,23 +173,17 @@ fn range_intersection<T: Ord + Clone>(a: &Range<T>, b: &Range<T>) -> Option<Rang
|
||||
fn score_declaration(
|
||||
identifier: &Identifier,
|
||||
references: &[Reference],
|
||||
checked_declaration: CheckedDeclaration,
|
||||
declaration: Declaration,
|
||||
is_same_file: bool,
|
||||
declaration_line_distance: u32,
|
||||
declaration_line_distance_rank: usize,
|
||||
same_file_declaration_count: usize,
|
||||
declaration_count: usize,
|
||||
excerpt_occurrences: &Occurrences,
|
||||
adjacent_occurrences: &Occurrences,
|
||||
import_occurrences: &[Occurrences],
|
||||
wildcard_import_occurrences: &[Occurrences],
|
||||
cursor: Point,
|
||||
current_buffer: &BufferSnapshot,
|
||||
) -> ScoredDeclaration {
|
||||
let CheckedDeclaration {
|
||||
declaration,
|
||||
same_file_line_distance,
|
||||
path_import_match_count,
|
||||
wildcard_path_import_match_count,
|
||||
} = checked_declaration;
|
||||
|
||||
) -> Option<ScoredDeclaration> {
|
||||
let is_referenced_nearby = references
|
||||
.iter()
|
||||
.any(|r| r.region == ReferenceRegion::Nearby);
|
||||
@@ -408,9 +200,6 @@ fn score_declaration(
|
||||
.min()
|
||||
.unwrap();
|
||||
|
||||
let is_same_file = same_file_line_distance.is_some();
|
||||
let declaration_line_distance = same_file_line_distance.unwrap_or(u32::MAX);
|
||||
|
||||
let item_source_occurrences = Occurrences::within_string(&declaration.item_text().0);
|
||||
let item_signature_occurrences = Occurrences::within_string(&declaration.signature_text().0);
|
||||
let excerpt_vs_item_jaccard = jaccard_similarity(excerpt_occurrences, &item_source_occurrences);
|
||||
@@ -430,37 +219,6 @@ fn score_declaration(
|
||||
let adjacent_vs_signature_weighted_overlap =
|
||||
weighted_overlap_coefficient(adjacent_occurrences, &item_signature_occurrences);
|
||||
|
||||
let mut import_similarity = 0f32;
|
||||
let mut wildcard_import_similarity = 0f32;
|
||||
if !import_occurrences.is_empty() || !wildcard_import_occurrences.is_empty() {
|
||||
let cached_path = declaration.cached_path();
|
||||
let path_occurrences = Occurrences::from_worktree_path(
|
||||
cached_path
|
||||
.worktree_abs_path
|
||||
.file_name()
|
||||
.map(|f| f.to_string_lossy()),
|
||||
&cached_path.rel_path,
|
||||
);
|
||||
import_similarity = import_occurrences
|
||||
.iter()
|
||||
.map(|namespace_occurrences| {
|
||||
OrderedFloat(jaccard_similarity(namespace_occurrences, &path_occurrences))
|
||||
})
|
||||
.max()
|
||||
.map(|similarity| similarity.into_inner())
|
||||
.unwrap_or_default();
|
||||
|
||||
// TODO: Consider something other than max
|
||||
wildcard_import_similarity = wildcard_import_occurrences
|
||||
.iter()
|
||||
.map(|namespace_occurrences| {
|
||||
OrderedFloat(jaccard_similarity(namespace_occurrences, &path_occurrences))
|
||||
})
|
||||
.max()
|
||||
.map(|similarity| similarity.into_inner())
|
||||
.unwrap_or_default();
|
||||
}
|
||||
|
||||
// TODO: Consider adding declaration_file_count
|
||||
let score_components = DeclarationScoreComponents {
|
||||
is_same_file,
|
||||
@@ -468,6 +226,7 @@ fn score_declaration(
|
||||
is_referenced_in_breadcrumb,
|
||||
reference_line_distance,
|
||||
declaration_line_distance,
|
||||
declaration_line_distance_rank,
|
||||
reference_count,
|
||||
same_file_declaration_count,
|
||||
declaration_count,
|
||||
@@ -479,61 +238,52 @@ fn score_declaration(
|
||||
excerpt_vs_signature_weighted_overlap,
|
||||
adjacent_vs_item_weighted_overlap,
|
||||
adjacent_vs_signature_weighted_overlap,
|
||||
path_import_match_count,
|
||||
wildcard_path_import_match_count,
|
||||
import_similarity,
|
||||
max_import_similarity: 0.0,
|
||||
normalized_import_similarity: 0.0,
|
||||
wildcard_import_similarity,
|
||||
normalized_wildcard_import_similarity: 0.0,
|
||||
included_by_others: 0,
|
||||
includes_others: 0,
|
||||
};
|
||||
|
||||
ScoredDeclaration {
|
||||
Some(ScoredDeclaration {
|
||||
identifier: identifier.clone(),
|
||||
declaration: declaration.clone(),
|
||||
components: score_components,
|
||||
}
|
||||
declaration: declaration,
|
||||
scores: DeclarationScores::score(&score_components),
|
||||
score_components,
|
||||
})
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct DeclarationScores {
|
||||
pub signature: f32,
|
||||
pub declaration: f32,
|
||||
pub retrieval: f32,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_declaration_path_matches() {
|
||||
let declaration_path =
|
||||
CachedDeclarationPath::new_for_test("/home/user/project", "src/maths.ts");
|
||||
impl DeclarationScores {
|
||||
fn score(components: &DeclarationScoreComponents) -> DeclarationScores {
|
||||
// TODO: handle truncation
|
||||
|
||||
assert!(declaration_path_matches_import(
|
||||
&declaration_path,
|
||||
&Path::new("maths.ts").into()
|
||||
));
|
||||
// Score related to how likely this is the correct declaration, range 0 to 1
|
||||
let retrieval = if components.is_same_file {
|
||||
// TODO: use declaration_line_distance_rank
|
||||
1.0 / components.same_file_declaration_count as f32
|
||||
} else {
|
||||
1.0 / components.declaration_count as f32
|
||||
};
|
||||
|
||||
assert!(declaration_path_matches_import(
|
||||
&declaration_path,
|
||||
&Path::new("project/src/maths.ts").into()
|
||||
));
|
||||
// Score related to the distance between the reference and cursor, range 0 to 1
|
||||
let distance_score = if components.is_referenced_nearby {
|
||||
1.0 / (1.0 + components.reference_line_distance as f32 / 10.0).powf(2.0)
|
||||
} else {
|
||||
// same score as ~14 lines away, rationale is to not overly penalize references from parent signatures
|
||||
0.5
|
||||
};
|
||||
|
||||
assert!(declaration_path_matches_import(
|
||||
&declaration_path,
|
||||
&Path::new("user/project/src/maths.ts").into()
|
||||
));
|
||||
// For now instead of linear combination, the scores are just multiplied together.
|
||||
let combined_score = 10.0 * retrieval * distance_score;
|
||||
|
||||
assert!(declaration_path_matches_import(
|
||||
&declaration_path,
|
||||
&Path::new("/home/user/project/src/maths.ts").into()
|
||||
));
|
||||
|
||||
assert!(!declaration_path_matches_import(
|
||||
&declaration_path,
|
||||
&Path::new("other.ts").into()
|
||||
));
|
||||
|
||||
assert!(!declaration_path_matches_import(
|
||||
&declaration_path,
|
||||
&Path::new("/home/user/project/src/other.ts").into()
|
||||
));
|
||||
DeclarationScores {
|
||||
signature: combined_score * components.excerpt_vs_signature_weighted_overlap,
|
||||
// declaration score gets boosted both by being multiplied by 2 and by there being more
|
||||
// weighted overlap.
|
||||
declaration: 2.0 * combined_score * components.excerpt_vs_item_weighted_overlap,
|
||||
retrieval,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,15 +1,13 @@
|
||||
mod declaration;
|
||||
mod declaration_scoring;
|
||||
mod excerpt;
|
||||
mod imports;
|
||||
mod outline;
|
||||
mod reference;
|
||||
mod syntax_index;
|
||||
pub mod text_similarity;
|
||||
|
||||
use std::{path::Path, sync::Arc};
|
||||
use std::sync::Arc;
|
||||
|
||||
use cloud_llm_client::predict_edits_v3;
|
||||
use collections::HashMap;
|
||||
use gpui::{App, AppContext as _, Entity, Task};
|
||||
use language::BufferSnapshot;
|
||||
@@ -18,24 +16,14 @@ use text::{Point, ToOffset as _};
|
||||
pub use declaration::*;
|
||||
pub use declaration_scoring::*;
|
||||
pub use excerpt::*;
|
||||
pub use imports::*;
|
||||
pub use reference::*;
|
||||
pub use syntax_index::*;
|
||||
|
||||
pub use predict_edits_v3::Line;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct EditPredictionContextOptions {
|
||||
pub use_imports: bool,
|
||||
pub excerpt: EditPredictionExcerptOptions,
|
||||
pub score: EditPredictionScoreOptions,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct EditPredictionContext {
|
||||
pub excerpt: EditPredictionExcerpt,
|
||||
pub excerpt_text: EditPredictionExcerptText,
|
||||
pub cursor_point: Point,
|
||||
pub cursor_offset_in_excerpt: usize,
|
||||
pub declarations: Vec<ScoredDeclaration>,
|
||||
}
|
||||
|
||||
@@ -43,34 +31,21 @@ impl EditPredictionContext {
|
||||
pub fn gather_context_in_background(
|
||||
cursor_point: Point,
|
||||
buffer: BufferSnapshot,
|
||||
options: EditPredictionContextOptions,
|
||||
excerpt_options: EditPredictionExcerptOptions,
|
||||
syntax_index: Option<Entity<SyntaxIndex>>,
|
||||
cx: &mut App,
|
||||
) -> Task<Option<Self>> {
|
||||
let parent_abs_path = project::File::from_dyn(buffer.file()).and_then(|f| {
|
||||
let mut path = f.worktree.read(cx).absolutize(&f.path);
|
||||
if path.pop() { Some(path) } else { None }
|
||||
});
|
||||
|
||||
if let Some(syntax_index) = syntax_index {
|
||||
let index_state =
|
||||
syntax_index.read_with(cx, |index, _cx| Arc::downgrade(index.state()));
|
||||
cx.background_spawn(async move {
|
||||
let parent_abs_path = parent_abs_path.as_deref();
|
||||
let index_state = index_state.upgrade()?;
|
||||
let index_state = index_state.lock().await;
|
||||
Self::gather_context(
|
||||
cursor_point,
|
||||
&buffer,
|
||||
parent_abs_path,
|
||||
&options,
|
||||
Some(&index_state),
|
||||
)
|
||||
Self::gather_context(cursor_point, &buffer, &excerpt_options, Some(&index_state))
|
||||
})
|
||||
} else {
|
||||
cx.background_spawn(async move {
|
||||
let parent_abs_path = parent_abs_path.as_deref();
|
||||
Self::gather_context(cursor_point, &buffer, parent_abs_path, &options, None)
|
||||
Self::gather_context(cursor_point, &buffer, &excerpt_options, None)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -78,20 +53,13 @@ impl EditPredictionContext {
|
||||
pub fn gather_context(
|
||||
cursor_point: Point,
|
||||
buffer: &BufferSnapshot,
|
||||
parent_abs_path: Option<&Path>,
|
||||
options: &EditPredictionContextOptions,
|
||||
excerpt_options: &EditPredictionExcerptOptions,
|
||||
index_state: Option<&SyntaxIndexState>,
|
||||
) -> Option<Self> {
|
||||
let imports = if options.use_imports {
|
||||
Imports::gather(&buffer, parent_abs_path)
|
||||
} else {
|
||||
Imports::default()
|
||||
};
|
||||
Self::gather_context_with_references_fn(
|
||||
cursor_point,
|
||||
buffer,
|
||||
&imports,
|
||||
options,
|
||||
excerpt_options,
|
||||
index_state,
|
||||
references_in_excerpt,
|
||||
)
|
||||
@@ -100,8 +68,7 @@ impl EditPredictionContext {
|
||||
pub fn gather_context_with_references_fn(
|
||||
cursor_point: Point,
|
||||
buffer: &BufferSnapshot,
|
||||
imports: &Imports,
|
||||
options: &EditPredictionContextOptions,
|
||||
excerpt_options: &EditPredictionExcerptOptions,
|
||||
index_state: Option<&SyntaxIndexState>,
|
||||
get_references: impl FnOnce(
|
||||
&EditPredictionExcerpt,
|
||||
@@ -112,7 +79,7 @@ impl EditPredictionContext {
|
||||
let excerpt = EditPredictionExcerpt::select_from_buffer(
|
||||
cursor_point,
|
||||
buffer,
|
||||
&options.excerpt,
|
||||
excerpt_options,
|
||||
index_state,
|
||||
)?;
|
||||
let excerpt_text = excerpt.text(buffer);
|
||||
@@ -127,17 +94,17 @@ impl EditPredictionContext {
|
||||
);
|
||||
|
||||
let cursor_offset_in_file = cursor_point.to_offset(buffer);
|
||||
// TODO fix this to not need saturating_sub
|
||||
let cursor_offset_in_excerpt = cursor_offset_in_file.saturating_sub(excerpt.range.start);
|
||||
|
||||
let declarations = if let Some(index_state) = index_state {
|
||||
let references = get_references(&excerpt, &excerpt_text, buffer);
|
||||
|
||||
scored_declarations(
|
||||
&options.score,
|
||||
&index_state,
|
||||
&excerpt,
|
||||
&excerpt_occurrences,
|
||||
&adjacent_occurrences,
|
||||
&imports,
|
||||
references,
|
||||
cursor_offset_in_file,
|
||||
buffer,
|
||||
@@ -149,7 +116,7 @@ impl EditPredictionContext {
|
||||
Some(Self {
|
||||
excerpt,
|
||||
excerpt_text,
|
||||
cursor_point,
|
||||
cursor_offset_in_excerpt,
|
||||
declarations,
|
||||
})
|
||||
}
|
||||
@@ -193,18 +160,12 @@ mod tests {
|
||||
EditPredictionContext::gather_context_in_background(
|
||||
cursor_point,
|
||||
buffer_snapshot,
|
||||
EditPredictionContextOptions {
|
||||
use_imports: true,
|
||||
excerpt: EditPredictionExcerptOptions {
|
||||
max_bytes: 60,
|
||||
min_bytes: 10,
|
||||
target_before_cursor_over_total_bytes: 0.5,
|
||||
},
|
||||
score: EditPredictionScoreOptions {
|
||||
omit_excerpt_overlaps: true,
|
||||
},
|
||||
EditPredictionExcerptOptions {
|
||||
max_bytes: 60,
|
||||
min_bytes: 10,
|
||||
target_before_cursor_over_total_bytes: 0.5,
|
||||
},
|
||||
Some(index.clone()),
|
||||
Some(index),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
|
||||
@@ -4,7 +4,7 @@ use text::{Point, ToOffset as _, ToPoint as _};
|
||||
use tree_sitter::{Node, TreeCursor};
|
||||
use util::RangeExt;
|
||||
|
||||
use crate::{BufferDeclaration, Line, declaration::DeclarationId, syntax_index::SyntaxIndexState};
|
||||
use crate::{BufferDeclaration, declaration::DeclarationId, syntax_index::SyntaxIndexState};
|
||||
|
||||
// TODO:
|
||||
//
|
||||
@@ -35,7 +35,6 @@ pub struct EditPredictionExcerptOptions {
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct EditPredictionExcerpt {
|
||||
pub range: Range<usize>,
|
||||
pub line_range: Range<Line>,
|
||||
pub parent_declarations: Vec<(DeclarationId, Range<usize>)>,
|
||||
pub size: usize,
|
||||
}
|
||||
@@ -87,19 +86,12 @@ impl EditPredictionExcerpt {
|
||||
buffer.len(),
|
||||
options.max_bytes
|
||||
);
|
||||
let offset_range = 0..buffer.len();
|
||||
let line_range = Line(0)..Line(buffer.max_point().row);
|
||||
return Some(EditPredictionExcerpt::new(
|
||||
offset_range,
|
||||
line_range,
|
||||
Vec::new(),
|
||||
));
|
||||
return Some(EditPredictionExcerpt::new(0..buffer.len(), Vec::new()));
|
||||
}
|
||||
|
||||
let query_offset = query_point.to_offset(buffer);
|
||||
let query_line_range = query_point.row..query_point.row + 1;
|
||||
let query_range = Point::new(query_line_range.start, 0).to_offset(buffer)
|
||||
..Point::new(query_line_range.end, 0).to_offset(buffer);
|
||||
let query_range = Point::new(query_point.row, 0).to_offset(buffer)
|
||||
..Point::new(query_point.row + 1, 0).to_offset(buffer);
|
||||
if query_range.len() >= options.max_bytes {
|
||||
return None;
|
||||
}
|
||||
@@ -115,7 +107,6 @@ impl EditPredictionExcerpt {
|
||||
let excerpt_selector = ExcerptSelector {
|
||||
query_offset,
|
||||
query_range,
|
||||
query_line_range: Line(query_line_range.start)..Line(query_line_range.end),
|
||||
parent_declarations: &parent_declarations,
|
||||
buffer,
|
||||
options,
|
||||
@@ -139,11 +130,7 @@ impl EditPredictionExcerpt {
|
||||
excerpt_selector.select_lines()
|
||||
}
|
||||
|
||||
fn new(
|
||||
range: Range<usize>,
|
||||
line_range: Range<Line>,
|
||||
parent_declarations: Vec<(DeclarationId, Range<usize>)>,
|
||||
) -> Self {
|
||||
fn new(range: Range<usize>, parent_declarations: Vec<(DeclarationId, Range<usize>)>) -> Self {
|
||||
let size = range.len()
|
||||
+ parent_declarations
|
||||
.iter()
|
||||
@@ -153,11 +140,10 @@ impl EditPredictionExcerpt {
|
||||
range,
|
||||
parent_declarations,
|
||||
size,
|
||||
line_range,
|
||||
}
|
||||
}
|
||||
|
||||
fn with_expanded_range(&self, new_range: Range<usize>, new_line_range: Range<Line>) -> Self {
|
||||
fn with_expanded_range(&self, new_range: Range<usize>) -> Self {
|
||||
if !new_range.contains_inclusive(&self.range) {
|
||||
// this is an issue because parent_signature_ranges may be incorrect
|
||||
log::error!("bug: with_expanded_range called with disjoint range");
|
||||
@@ -169,7 +155,7 @@ impl EditPredictionExcerpt {
|
||||
}
|
||||
parent_declarations.push((*declaration_id, range.clone()));
|
||||
}
|
||||
Self::new(new_range, new_line_range, parent_declarations)
|
||||
Self::new(new_range, parent_declarations)
|
||||
}
|
||||
|
||||
fn parent_signatures_size(&self) -> usize {
|
||||
@@ -180,7 +166,6 @@ impl EditPredictionExcerpt {
|
||||
struct ExcerptSelector<'a> {
|
||||
query_offset: usize,
|
||||
query_range: Range<usize>,
|
||||
query_line_range: Range<Line>,
|
||||
parent_declarations: &'a [(DeclarationId, &'a BufferDeclaration)],
|
||||
buffer: &'a BufferSnapshot,
|
||||
options: &'a EditPredictionExcerptOptions,
|
||||
@@ -193,13 +178,10 @@ impl<'a> ExcerptSelector<'a> {
|
||||
let mut cursor = selected_layer_root.walk();
|
||||
|
||||
loop {
|
||||
let line_start = node_line_start(cursor.node());
|
||||
let line_end = node_line_end(cursor.node());
|
||||
let line_range = Line(line_start.row)..Line(line_end.row);
|
||||
let excerpt_range =
|
||||
line_start.to_offset(&self.buffer)..line_end.to_offset(&self.buffer);
|
||||
let excerpt_range = node_line_start(cursor.node()).to_offset(&self.buffer)
|
||||
..node_line_end(cursor.node()).to_offset(&self.buffer);
|
||||
if excerpt_range.contains_inclusive(&self.query_range) {
|
||||
let excerpt = self.make_excerpt(excerpt_range, line_range);
|
||||
let excerpt = self.make_excerpt(excerpt_range);
|
||||
if excerpt.size <= self.options.max_bytes {
|
||||
return Some(self.expand_to_siblings(&mut cursor, excerpt));
|
||||
}
|
||||
@@ -290,13 +272,9 @@ impl<'a> ExcerptSelector<'a> {
|
||||
|
||||
let mut forward = None;
|
||||
while !forward_done {
|
||||
let new_end_point = node_line_end(forward_cursor.node());
|
||||
let new_end = new_end_point.to_offset(&self.buffer);
|
||||
let new_end = node_line_end(forward_cursor.node()).to_offset(&self.buffer);
|
||||
if new_end > excerpt.range.end {
|
||||
let new_excerpt = excerpt.with_expanded_range(
|
||||
excerpt.range.start..new_end,
|
||||
excerpt.line_range.start..Line(new_end_point.row),
|
||||
);
|
||||
let new_excerpt = excerpt.with_expanded_range(excerpt.range.start..new_end);
|
||||
if new_excerpt.size <= self.options.max_bytes {
|
||||
forward = Some(new_excerpt);
|
||||
break;
|
||||
@@ -311,13 +289,9 @@ impl<'a> ExcerptSelector<'a> {
|
||||
|
||||
let mut backward = None;
|
||||
while !backward_done {
|
||||
let new_start_point = node_line_start(backward_cursor.node());
|
||||
let new_start = new_start_point.to_offset(&self.buffer);
|
||||
let new_start = node_line_start(backward_cursor.node()).to_offset(&self.buffer);
|
||||
if new_start < excerpt.range.start {
|
||||
let new_excerpt = excerpt.with_expanded_range(
|
||||
new_start..excerpt.range.end,
|
||||
Line(new_start_point.row)..excerpt.line_range.end,
|
||||
);
|
||||
let new_excerpt = excerpt.with_expanded_range(new_start..excerpt.range.end);
|
||||
if new_excerpt.size <= self.options.max_bytes {
|
||||
backward = Some(new_excerpt);
|
||||
break;
|
||||
@@ -365,7 +339,7 @@ impl<'a> ExcerptSelector<'a> {
|
||||
|
||||
fn select_lines(&self) -> Option<EditPredictionExcerpt> {
|
||||
// early return if line containing query_offset is already too large
|
||||
let excerpt = self.make_excerpt(self.query_range.clone(), self.query_line_range.clone());
|
||||
let excerpt = self.make_excerpt(self.query_range.clone());
|
||||
if excerpt.size > self.options.max_bytes {
|
||||
log::debug!(
|
||||
"excerpt for cursor line is {} bytes, which exceeds the window",
|
||||
@@ -379,24 +353,24 @@ impl<'a> ExcerptSelector<'a> {
|
||||
let before_bytes =
|
||||
(self.options.target_before_cursor_over_total_bytes * bytes_remaining as f32) as usize;
|
||||
|
||||
let start_line = {
|
||||
let start_point = {
|
||||
let offset = self.query_offset.saturating_sub(before_bytes);
|
||||
let point = offset.to_point(self.buffer);
|
||||
Line(point.row + 1)
|
||||
Point::new(point.row + 1, 0)
|
||||
};
|
||||
let start_offset = Point::new(start_line.0, 0).to_offset(&self.buffer);
|
||||
let end_line = {
|
||||
let start_offset = start_point.to_offset(&self.buffer);
|
||||
let end_point = {
|
||||
let offset = start_offset + bytes_remaining;
|
||||
let point = offset.to_point(self.buffer);
|
||||
Line(point.row)
|
||||
Point::new(point.row, 0)
|
||||
};
|
||||
let end_offset = Point::new(end_line.0, 0).to_offset(&self.buffer);
|
||||
let end_offset = end_point.to_offset(&self.buffer);
|
||||
|
||||
// this could be expanded further since recalculated `signature_size` may be smaller, but
|
||||
// skipping that for now for simplicity
|
||||
//
|
||||
// TODO: could also consider checking if lines immediately before / after fit.
|
||||
let excerpt = self.make_excerpt(start_offset..end_offset, start_line..end_line);
|
||||
let excerpt = self.make_excerpt(start_offset..end_offset);
|
||||
if excerpt.size > self.options.max_bytes {
|
||||
log::error!(
|
||||
"bug: line-based excerpt selection has size {}, \
|
||||
@@ -408,14 +382,14 @@ impl<'a> ExcerptSelector<'a> {
|
||||
return Some(excerpt);
|
||||
}
|
||||
|
||||
fn make_excerpt(&self, range: Range<usize>, line_range: Range<Line>) -> EditPredictionExcerpt {
|
||||
fn make_excerpt(&self, range: Range<usize>) -> EditPredictionExcerpt {
|
||||
let parent_declarations = self
|
||||
.parent_declarations
|
||||
.iter()
|
||||
.filter(|(_, declaration)| declaration.item_range.contains_inclusive(&range))
|
||||
.map(|(id, declaration)| (*id, declaration.signature_range.clone()))
|
||||
.collect();
|
||||
EditPredictionExcerpt::new(range, line_range, parent_declarations)
|
||||
EditPredictionExcerpt::new(range, parent_declarations)
|
||||
}
|
||||
|
||||
/// Returns `true` if the `forward` excerpt is a better choice than the `backward` excerpt.
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -5,7 +5,6 @@ use futures::lock::Mutex;
|
||||
use futures::{FutureExt as _, StreamExt, future};
|
||||
use gpui::{App, AppContext as _, AsyncApp, Context, Entity, Task, WeakEntity};
|
||||
use itertools::Itertools;
|
||||
|
||||
use language::{Buffer, BufferEvent};
|
||||
use postage::stream::Stream as _;
|
||||
use project::buffer_store::{BufferStore, BufferStoreEvent};
|
||||
@@ -18,7 +17,6 @@ use std::sync::Arc;
|
||||
use text::BufferId;
|
||||
use util::{RangeExt as _, debug_panic, some_or_debug_panic};
|
||||
|
||||
use crate::CachedDeclarationPath;
|
||||
use crate::declaration::{
|
||||
BufferDeclaration, Declaration, DeclarationId, FileDeclaration, Identifier,
|
||||
};
|
||||
@@ -30,8 +28,6 @@ use crate::outline::declarations_in_buffer;
|
||||
// `buffer_declarations_containing_range` assumes that the index is always immediately up to date.
|
||||
//
|
||||
// * Add a per language configuration for skipping indexing.
|
||||
//
|
||||
// * Handle tsx / ts / js referencing each-other
|
||||
|
||||
// Potential future improvements:
|
||||
//
|
||||
@@ -65,7 +61,6 @@ pub struct SyntaxIndex {
|
||||
state: Arc<Mutex<SyntaxIndexState>>,
|
||||
project: WeakEntity<Project>,
|
||||
initial_file_indexing_done_rx: postage::watch::Receiver<bool>,
|
||||
_file_indexing_task: Option<Task<()>>,
|
||||
}
|
||||
|
||||
pub struct SyntaxIndexState {
|
||||
@@ -75,6 +70,7 @@ pub struct SyntaxIndexState {
|
||||
buffers: HashMap<BufferId, BufferState>,
|
||||
dirty_files: HashMap<ProjectEntryId, ProjectPath>,
|
||||
dirty_files_tx: mpsc::Sender<()>,
|
||||
_file_indexing_task: Option<Task<()>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
@@ -106,12 +102,12 @@ impl SyntaxIndex {
|
||||
buffers: HashMap::default(),
|
||||
dirty_files: HashMap::default(),
|
||||
dirty_files_tx,
|
||||
_file_indexing_task: None,
|
||||
};
|
||||
let mut this = Self {
|
||||
let this = Self {
|
||||
project: project.downgrade(),
|
||||
state: Arc::new(Mutex::new(initial_state)),
|
||||
initial_file_indexing_done_rx,
|
||||
_file_indexing_task: None,
|
||||
};
|
||||
|
||||
let worktree_store = project.read(cx).worktree_store();
|
||||
@@ -120,77 +116,75 @@ impl SyntaxIndex {
|
||||
.worktrees()
|
||||
.map(|w| w.read(cx).snapshot())
|
||||
.collect::<Vec<_>>();
|
||||
this._file_indexing_task = Some(cx.spawn(async move |this, cx| {
|
||||
let snapshots_file_count = initial_worktree_snapshots
|
||||
.iter()
|
||||
.map(|worktree| worktree.file_count())
|
||||
.sum::<usize>();
|
||||
if snapshots_file_count > 0 {
|
||||
let chunk_size = snapshots_file_count.div_ceil(file_indexing_parallelism);
|
||||
let chunk_count = snapshots_file_count.div_ceil(chunk_size);
|
||||
let file_chunks = initial_worktree_snapshots
|
||||
.iter()
|
||||
.flat_map(|worktree| {
|
||||
let worktree_id = worktree.id();
|
||||
worktree.files(false, 0).map(move |entry| {
|
||||
(
|
||||
entry.id,
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: entry.path.clone(),
|
||||
},
|
||||
)
|
||||
if !initial_worktree_snapshots.is_empty() {
|
||||
this.state.try_lock().unwrap()._file_indexing_task =
|
||||
Some(cx.spawn(async move |this, cx| {
|
||||
let snapshots_file_count = initial_worktree_snapshots
|
||||
.iter()
|
||||
.map(|worktree| worktree.file_count())
|
||||
.sum::<usize>();
|
||||
let chunk_size = snapshots_file_count.div_ceil(file_indexing_parallelism);
|
||||
let chunk_count = snapshots_file_count.div_ceil(chunk_size);
|
||||
let file_chunks = initial_worktree_snapshots
|
||||
.iter()
|
||||
.flat_map(|worktree| {
|
||||
let worktree_id = worktree.id();
|
||||
worktree.files(false, 0).map(move |entry| {
|
||||
(
|
||||
entry.id,
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: entry.path.clone(),
|
||||
},
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
.chunks(chunk_size);
|
||||
.chunks(chunk_size);
|
||||
|
||||
let mut tasks = Vec::with_capacity(chunk_count);
|
||||
for chunk in file_chunks.into_iter() {
|
||||
tasks.push(Self::update_dirty_files(
|
||||
&this,
|
||||
chunk.into_iter().collect(),
|
||||
cx.clone(),
|
||||
));
|
||||
}
|
||||
futures::future::join_all(tasks).await;
|
||||
log::info!("Finished initial file indexing");
|
||||
}
|
||||
let mut tasks = Vec::with_capacity(chunk_count);
|
||||
for chunk in file_chunks.into_iter() {
|
||||
tasks.push(Self::update_dirty_files(
|
||||
&this,
|
||||
chunk.into_iter().collect(),
|
||||
cx.clone(),
|
||||
));
|
||||
}
|
||||
futures::future::join_all(tasks).await;
|
||||
|
||||
*initial_file_indexing_done_tx.borrow_mut() = true;
|
||||
log::info!("Finished initial file indexing");
|
||||
*initial_file_indexing_done_tx.borrow_mut() = true;
|
||||
|
||||
let Ok(state) = this.read_with(cx, |this, _cx| Arc::downgrade(&this.state)) else {
|
||||
return;
|
||||
};
|
||||
while dirty_files_rx.next().await.is_some() {
|
||||
let Some(state) = state.upgrade() else {
|
||||
return;
|
||||
};
|
||||
let mut state = state.lock().await;
|
||||
let was_underused = state.dirty_files.capacity() > 255
|
||||
&& state.dirty_files.len() * 8 < state.dirty_files.capacity();
|
||||
let dirty_files = state.dirty_files.drain().collect::<Vec<_>>();
|
||||
if was_underused {
|
||||
state.dirty_files.shrink_to_fit();
|
||||
}
|
||||
drop(state);
|
||||
if dirty_files.is_empty() {
|
||||
continue;
|
||||
}
|
||||
let Ok(state) = this.read_with(cx, |this, _cx| this.state.clone()) else {
|
||||
return;
|
||||
};
|
||||
while dirty_files_rx.next().await.is_some() {
|
||||
let mut state = state.lock().await;
|
||||
let was_underused = state.dirty_files.capacity() > 255
|
||||
&& state.dirty_files.len() * 8 < state.dirty_files.capacity();
|
||||
let dirty_files = state.dirty_files.drain().collect::<Vec<_>>();
|
||||
if was_underused {
|
||||
state.dirty_files.shrink_to_fit();
|
||||
}
|
||||
drop(state);
|
||||
if dirty_files.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let chunk_size = dirty_files.len().div_ceil(file_indexing_parallelism);
|
||||
let chunk_count = dirty_files.len().div_ceil(chunk_size);
|
||||
let mut tasks = Vec::with_capacity(chunk_count);
|
||||
let chunks = dirty_files.into_iter().chunks(chunk_size);
|
||||
for chunk in chunks.into_iter() {
|
||||
tasks.push(Self::update_dirty_files(
|
||||
&this,
|
||||
chunk.into_iter().collect(),
|
||||
cx.clone(),
|
||||
));
|
||||
}
|
||||
futures::future::join_all(tasks).await;
|
||||
}
|
||||
}));
|
||||
let chunk_size = dirty_files.len().div_ceil(file_indexing_parallelism);
|
||||
let chunk_count = dirty_files.len().div_ceil(chunk_size);
|
||||
let mut tasks = Vec::with_capacity(chunk_count);
|
||||
let chunks = dirty_files.into_iter().chunks(chunk_size);
|
||||
for chunk in chunks.into_iter() {
|
||||
tasks.push(Self::update_dirty_files(
|
||||
&this,
|
||||
chunk.into_iter().collect(),
|
||||
cx.clone(),
|
||||
));
|
||||
}
|
||||
futures::future::join_all(tasks).await;
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
cx.subscribe(&worktree_store, Self::handle_worktree_store_event)
|
||||
.detach();
|
||||
@@ -370,9 +364,7 @@ impl SyntaxIndex {
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
match event {
|
||||
BufferEvent::Edited |
|
||||
// paths are cached and so should be updated
|
||||
BufferEvent::FileHandleChanged => self.update_buffer(buffer, cx),
|
||||
BufferEvent::Edited => self.update_buffer(buffer, cx),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
@@ -383,16 +375,8 @@ impl SyntaxIndex {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some((project_entry_id, cached_path)) = project::File::from_dyn(buffer.file())
|
||||
.and_then(|f| {
|
||||
let project_entry_id = f.project_entry_id()?;
|
||||
let cached_path = CachedDeclarationPath::new(
|
||||
f.worktree.read(cx).abs_path(),
|
||||
&f.path,
|
||||
buffer.language(),
|
||||
);
|
||||
Some((project_entry_id, cached_path))
|
||||
})
|
||||
let Some(project_entry_id) =
|
||||
project::File::from_dyn(buffer.file()).and_then(|f| f.project_entry_id(cx))
|
||||
else {
|
||||
return;
|
||||
};
|
||||
@@ -456,7 +440,6 @@ impl SyntaxIndex {
|
||||
buffer_id,
|
||||
declaration,
|
||||
project_entry_id,
|
||||
cached_path: cached_path.clone(),
|
||||
});
|
||||
new_ids.push(declaration_id);
|
||||
|
||||
@@ -524,14 +507,13 @@ impl SyntaxIndex {
|
||||
|
||||
let snapshot_task = worktree.update(cx, |worktree, cx| {
|
||||
let load_task = worktree.load_file(&project_path.path, cx);
|
||||
let worktree_abs_path = worktree.abs_path();
|
||||
cx.spawn(async move |_this, cx| {
|
||||
let loaded_file = load_task.await?;
|
||||
let language = language.await?;
|
||||
|
||||
let buffer = cx.new(|cx| {
|
||||
let mut buffer = Buffer::local(loaded_file.text, cx);
|
||||
buffer.set_language(Some(language.clone()), cx);
|
||||
buffer.set_language(Some(language), cx);
|
||||
buffer
|
||||
})?;
|
||||
|
||||
@@ -540,22 +522,14 @@ impl SyntaxIndex {
|
||||
parse_status.changed().await?;
|
||||
}
|
||||
|
||||
let cached_path = CachedDeclarationPath::new(
|
||||
worktree_abs_path,
|
||||
&project_path.path,
|
||||
Some(&language),
|
||||
);
|
||||
|
||||
let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
|
||||
|
||||
anyhow::Ok((snapshot, cached_path))
|
||||
buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
|
||||
})
|
||||
});
|
||||
|
||||
let state = Arc::downgrade(&self.state);
|
||||
cx.background_spawn(async move {
|
||||
// TODO: How to handle errors?
|
||||
let Ok((snapshot, cached_path)) = snapshot_task.await else {
|
||||
let Ok(snapshot) = snapshot_task.await else {
|
||||
return;
|
||||
};
|
||||
let rope = snapshot.as_rope();
|
||||
@@ -593,7 +567,6 @@ impl SyntaxIndex {
|
||||
let declaration_id = state.declarations.insert(Declaration::File {
|
||||
project_entry_id: entry_id,
|
||||
declaration,
|
||||
cached_path: cached_path.clone(),
|
||||
});
|
||||
new_ids.push(declaration_id);
|
||||
|
||||
@@ -948,7 +921,6 @@ mod tests {
|
||||
if let Declaration::File {
|
||||
declaration,
|
||||
project_entry_id: file,
|
||||
..
|
||||
} = declaration
|
||||
{
|
||||
assert_eq!(
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
use hashbrown::HashTable;
|
||||
use regex::Regex;
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
hash::{Hash, Hasher as _},
|
||||
path::Path,
|
||||
sync::LazyLock,
|
||||
};
|
||||
use util::rel_path::RelPath;
|
||||
|
||||
use crate::reference::Reference;
|
||||
|
||||
@@ -48,34 +45,19 @@ impl Occurrences {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn from_identifiers(identifiers: impl IntoIterator<Item = impl AsRef<str>>) -> Self {
|
||||
pub fn from_identifiers<'a>(identifiers: impl IntoIterator<Item = &'a str>) -> Self {
|
||||
let mut this = Self::default();
|
||||
// TODO: Score matches that match case higher?
|
||||
//
|
||||
// TODO: Also include unsplit identifier?
|
||||
for identifier in identifiers {
|
||||
for identifier_part in split_identifier(identifier.as_ref()) {
|
||||
for identifier_part in split_identifier(identifier) {
|
||||
this.add_hash(fx_hash(&identifier_part.to_lowercase()));
|
||||
}
|
||||
}
|
||||
this
|
||||
}
|
||||
|
||||
pub fn from_worktree_path(worktree_name: Option<Cow<'_, str>>, rel_path: &RelPath) -> Self {
|
||||
if let Some(worktree_name) = worktree_name {
|
||||
Self::from_identifiers(
|
||||
std::iter::once(worktree_name)
|
||||
.chain(iter_path_without_extension(rel_path.as_std_path())),
|
||||
)
|
||||
} else {
|
||||
Self::from_path(rel_path.as_std_path())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_path(path: &Path) -> Self {
|
||||
Self::from_identifiers(iter_path_without_extension(path))
|
||||
}
|
||||
|
||||
fn add_hash(&mut self, hash: u64) {
|
||||
self.table
|
||||
.entry(
|
||||
@@ -100,15 +82,6 @@ impl Occurrences {
|
||||
}
|
||||
}
|
||||
|
||||
fn iter_path_without_extension(path: &Path) -> impl Iterator<Item = Cow<'_, str>> {
|
||||
let last_component: Option<Cow<'_, str>> = path.file_stem().map(|stem| stem.to_string_lossy());
|
||||
let mut path_components = path.components();
|
||||
path_components.next_back();
|
||||
path_components
|
||||
.map(|component| component.as_os_str().to_string_lossy())
|
||||
.chain(last_component)
|
||||
}
|
||||
|
||||
pub fn fx_hash<T: Hash + ?Sized>(data: &T) -> u64 {
|
||||
let mut hasher = collections::FxHasher::default();
|
||||
data.hash(&mut hasher);
|
||||
@@ -296,19 +269,4 @@ mod test {
|
||||
// the smaller set, 10.
|
||||
assert_eq!(weighted_overlap_coefficient(&set_a, &set_b), 7.0 / 10.0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iter_path_without_extension() {
|
||||
let mut iter = iter_path_without_extension(Path::new(""));
|
||||
assert_eq!(iter.next(), None);
|
||||
|
||||
let iter = iter_path_without_extension(Path::new("foo"));
|
||||
assert_eq!(iter.collect::<Vec<_>>(), ["foo"]);
|
||||
|
||||
let iter = iter_path_without_extension(Path::new("foo/bar.txt"));
|
||||
assert_eq!(iter.collect::<Vec<_>>(), ["foo", "bar"]);
|
||||
|
||||
let iter = iter_path_without_extension(Path::new("foo/bar/baz.txt"));
|
||||
assert_eq!(iter.collect::<Vec<_>>(), ["foo", "bar", "baz"]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -456,33 +456,6 @@ actions!(
|
||||
Fold,
|
||||
/// Folds all foldable regions in the editor.
|
||||
FoldAll,
|
||||
/// Folds all code blocks at indentation level 1.
|
||||
#[action(name = "FoldAtLevel_1")]
|
||||
FoldAtLevel1,
|
||||
/// Folds all code blocks at indentation level 2.
|
||||
#[action(name = "FoldAtLevel_2")]
|
||||
FoldAtLevel2,
|
||||
/// Folds all code blocks at indentation level 3.
|
||||
#[action(name = "FoldAtLevel_3")]
|
||||
FoldAtLevel3,
|
||||
/// Folds all code blocks at indentation level 4.
|
||||
#[action(name = "FoldAtLevel_4")]
|
||||
FoldAtLevel4,
|
||||
/// Folds all code blocks at indentation level 5.
|
||||
#[action(name = "FoldAtLevel_5")]
|
||||
FoldAtLevel5,
|
||||
/// Folds all code blocks at indentation level 6.
|
||||
#[action(name = "FoldAtLevel_6")]
|
||||
FoldAtLevel6,
|
||||
/// Folds all code blocks at indentation level 7.
|
||||
#[action(name = "FoldAtLevel_7")]
|
||||
FoldAtLevel7,
|
||||
/// Folds all code blocks at indentation level 8.
|
||||
#[action(name = "FoldAtLevel_8")]
|
||||
FoldAtLevel8,
|
||||
/// Folds all code blocks at indentation level 9.
|
||||
#[action(name = "FoldAtLevel_9")]
|
||||
FoldAtLevel9,
|
||||
/// Folds all function bodies in the editor.
|
||||
FoldFunctionBodies,
|
||||
/// Folds the current code block and all its children.
|
||||
|
||||
@@ -328,7 +328,11 @@ impl CompletionsMenu {
|
||||
.map(|choice| Completion {
|
||||
replace_range: selection.start.text_anchor..selection.end.text_anchor,
|
||||
new_text: choice.to_string(),
|
||||
label: CodeLabel::plain(choice.to_string(), None),
|
||||
label: CodeLabel {
|
||||
text: choice.to_string(),
|
||||
runs: Default::default(),
|
||||
filter_range: Default::default(),
|
||||
},
|
||||
icon_path: None,
|
||||
documentation: None,
|
||||
confirm: None,
|
||||
@@ -1514,7 +1518,6 @@ impl CodeActionsMenu {
|
||||
this.child(
|
||||
h_flex()
|
||||
.overflow_hidden()
|
||||
.when(is_quick_action_bar, |this| this.text_ui(cx))
|
||||
.child(task.resolved_label.replace("\n", ""))
|
||||
.when(selected, |this| {
|
||||
this.text_color(colors.text_accent)
|
||||
@@ -1525,7 +1528,6 @@ impl CodeActionsMenu {
|
||||
this.child(
|
||||
h_flex()
|
||||
.overflow_hidden()
|
||||
.when(is_quick_action_bar, |this| this.text_ui(cx))
|
||||
.child("debug: ")
|
||||
.child(scenario.label.clone())
|
||||
.when(selected, |this| {
|
||||
|
||||
@@ -689,7 +689,6 @@ impl BlockMap {
|
||||
|
||||
// For each of these blocks, insert a new isomorphic transform preceding the block,
|
||||
// and then insert the block itself.
|
||||
let mut just_processed_folded_buffer = false;
|
||||
for (block_placement, block) in blocks_in_edit.drain(..) {
|
||||
let mut summary = TransformSummary {
|
||||
input_rows: 0,
|
||||
@@ -702,12 +701,8 @@ impl BlockMap {
|
||||
match block_placement {
|
||||
BlockPlacement::Above(position) => {
|
||||
rows_before_block = position.0 - new_transforms.summary().input_rows;
|
||||
just_processed_folded_buffer = false;
|
||||
}
|
||||
BlockPlacement::Near(position) | BlockPlacement::Below(position) => {
|
||||
if just_processed_folded_buffer {
|
||||
continue;
|
||||
}
|
||||
if position.0 + 1 < new_transforms.summary().input_rows {
|
||||
continue;
|
||||
}
|
||||
@@ -716,7 +711,6 @@ impl BlockMap {
|
||||
BlockPlacement::Replace(range) => {
|
||||
rows_before_block = range.start().0 - new_transforms.summary().input_rows;
|
||||
summary.input_rows = range.end().0 - range.start().0 + 1;
|
||||
just_processed_folded_buffer = matches!(block, Block::FoldedBuffer { .. });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3572,96 +3566,6 @@ mod tests {
|
||||
assert_eq!(blocks_snapshot.text(), "abc\n\ndef\nghi\njkl\nmno");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_folded_buffer_with_near_blocks(cx: &mut gpui::TestAppContext) {
|
||||
cx.update(init_test);
|
||||
|
||||
let text = "line 1\nline 2\nline 3";
|
||||
let buffer = cx.update(|cx| {
|
||||
MultiBuffer::build_multi([(text, vec![Point::new(0, 0)..Point::new(2, 6)])], cx)
|
||||
});
|
||||
let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
|
||||
let buffer_ids = buffer_snapshot
|
||||
.excerpts()
|
||||
.map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id())
|
||||
.dedup()
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(buffer_ids.len(), 1);
|
||||
let buffer_id = buffer_ids[0];
|
||||
|
||||
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, wrap_snapshot) =
|
||||
cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx));
|
||||
let mut block_map = BlockMap::new(wrap_snapshot.clone(), 1, 1);
|
||||
|
||||
let mut writer = block_map.write(wrap_snapshot.clone(), Patch::default());
|
||||
writer.insert(vec![BlockProperties {
|
||||
style: BlockStyle::Fixed,
|
||||
placement: BlockPlacement::Near(buffer_snapshot.anchor_after(Point::new(0, 0))),
|
||||
height: Some(1),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
}]);
|
||||
|
||||
let blocks_snapshot = block_map.read(wrap_snapshot.clone(), Patch::default());
|
||||
assert_eq!(blocks_snapshot.text(), "\nline 1\n\nline 2\nline 3");
|
||||
|
||||
let mut writer = block_map.write(wrap_snapshot.clone(), Patch::default());
|
||||
buffer.read_with(cx, |buffer, cx| {
|
||||
writer.fold_buffers([buffer_id], buffer, cx);
|
||||
});
|
||||
|
||||
let blocks_snapshot = block_map.read(wrap_snapshot, Patch::default());
|
||||
assert_eq!(blocks_snapshot.text(), "");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_folded_buffer_with_near_blocks_on_last_line(cx: &mut gpui::TestAppContext) {
|
||||
cx.update(init_test);
|
||||
|
||||
let text = "line 1\nline 2\nline 3\nline 4";
|
||||
let buffer = cx.update(|cx| {
|
||||
MultiBuffer::build_multi([(text, vec![Point::new(0, 0)..Point::new(3, 6)])], cx)
|
||||
});
|
||||
let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
|
||||
let buffer_ids = buffer_snapshot
|
||||
.excerpts()
|
||||
.map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id())
|
||||
.dedup()
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(buffer_ids.len(), 1);
|
||||
let buffer_id = buffer_ids[0];
|
||||
|
||||
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, wrap_snapshot) =
|
||||
cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx));
|
||||
let mut block_map = BlockMap::new(wrap_snapshot.clone(), 1, 1);
|
||||
|
||||
let mut writer = block_map.write(wrap_snapshot.clone(), Patch::default());
|
||||
writer.insert(vec![BlockProperties {
|
||||
style: BlockStyle::Fixed,
|
||||
placement: BlockPlacement::Near(buffer_snapshot.anchor_after(Point::new(3, 6))),
|
||||
height: Some(1),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
}]);
|
||||
|
||||
let blocks_snapshot = block_map.read(wrap_snapshot.clone(), Patch::default());
|
||||
assert_eq!(blocks_snapshot.text(), "\nline 1\nline 2\nline 3\nline 4\n");
|
||||
|
||||
let mut writer = block_map.write(wrap_snapshot.clone(), Patch::default());
|
||||
buffer.read_with(cx, |buffer, cx| {
|
||||
writer.fold_buffers([buffer_id], buffer, cx);
|
||||
});
|
||||
|
||||
let blocks_snapshot = block_map.read(wrap_snapshot, Patch::default());
|
||||
assert_eq!(blocks_snapshot.text(), "");
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut gpui::App) {
|
||||
let settings = SettingsStore::test(cx);
|
||||
cx.set_global(settings);
|
||||
|
||||
@@ -226,7 +226,6 @@ pub const SELECTION_HIGHLIGHT_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis
|
||||
pub(crate) const CODE_ACTION_TIMEOUT: Duration = Duration::from_secs(5);
|
||||
pub(crate) const FORMAT_TIMEOUT: Duration = Duration::from_secs(5);
|
||||
pub(crate) const SCROLL_CENTER_TOP_BOTTOM_DEBOUNCE_TIMEOUT: Duration = Duration::from_secs(1);
|
||||
pub const FETCH_COLORS_DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(150);
|
||||
|
||||
pub(crate) const EDIT_PREDICTION_KEY_CONTEXT: &str = "edit_prediction";
|
||||
pub(crate) const EDIT_PREDICTION_CONFLICT_KEY_CONTEXT: &str = "edit_prediction_conflict";
|
||||
@@ -1190,7 +1189,6 @@ pub struct Editor {
|
||||
inline_value_cache: InlineValueCache,
|
||||
selection_drag_state: SelectionDragState,
|
||||
colors: Option<LspColorData>,
|
||||
refresh_colors_task: Task<()>,
|
||||
folding_newlines: Task<()>,
|
||||
pub lookup_key: Option<Box<dyn Any + Send + Sync>>,
|
||||
}
|
||||
@@ -2246,7 +2244,6 @@ impl Editor {
|
||||
tasks_update_task: None,
|
||||
pull_diagnostics_task: Task::ready(()),
|
||||
colors: None,
|
||||
refresh_colors_task: Task::ready(()),
|
||||
next_color_inlay_id: 0,
|
||||
linked_edit_ranges: Default::default(),
|
||||
in_project_search: false,
|
||||
@@ -3175,7 +3172,7 @@ impl Editor {
|
||||
self.refresh_code_actions(window, cx);
|
||||
self.refresh_document_highlights(cx);
|
||||
self.refresh_selected_text_highlights(false, window, cx);
|
||||
refresh_matching_bracket_highlights(self, cx);
|
||||
refresh_matching_bracket_highlights(self, window, cx);
|
||||
self.update_visible_edit_prediction(window, cx);
|
||||
self.edit_prediction_requires_modifier_in_indent_conflict = true;
|
||||
linked_editing_ranges::refresh_linked_ranges(self, window, cx);
|
||||
@@ -3514,46 +3511,26 @@ impl Editor {
|
||||
) {
|
||||
let display_map = self.display_map.update(cx, |map, cx| map.snapshot(cx));
|
||||
let tail = self.selections.newest::<usize>(cx).tail();
|
||||
let click_count = click_count.max(match self.selections.select_mode() {
|
||||
SelectMode::Character => 1,
|
||||
SelectMode::Word(_) => 2,
|
||||
SelectMode::Line(_) => 3,
|
||||
SelectMode::All => 4,
|
||||
});
|
||||
self.begin_selection(position, false, click_count, window, cx);
|
||||
|
||||
let position = position.to_offset(&display_map, Bias::Left);
|
||||
let tail_anchor = display_map.buffer_snapshot().anchor_before(tail);
|
||||
|
||||
let current_selection = match self.selections.select_mode() {
|
||||
SelectMode::Character | SelectMode::All => tail_anchor..tail_anchor,
|
||||
SelectMode::Word(range) | SelectMode::Line(range) => range.clone(),
|
||||
};
|
||||
|
||||
let mut pending_selection = self
|
||||
.selections
|
||||
.pending_anchor()
|
||||
.cloned()
|
||||
.expect("extend_selection not called with pending selection");
|
||||
|
||||
if pending_selection
|
||||
.start
|
||||
.cmp(¤t_selection.start, display_map.buffer_snapshot())
|
||||
== Ordering::Greater
|
||||
{
|
||||
pending_selection.start = current_selection.start;
|
||||
}
|
||||
if pending_selection
|
||||
.end
|
||||
.cmp(¤t_selection.end, display_map.buffer_snapshot())
|
||||
== Ordering::Less
|
||||
{
|
||||
pending_selection.end = current_selection.end;
|
||||
if position >= tail {
|
||||
pending_selection.start = tail_anchor;
|
||||
} else {
|
||||
pending_selection.end = tail_anchor;
|
||||
pending_selection.reversed = true;
|
||||
}
|
||||
|
||||
let mut pending_mode = self.selections.pending_mode().unwrap();
|
||||
match &mut pending_mode {
|
||||
SelectMode::Word(range) | SelectMode::Line(range) => *range = current_selection,
|
||||
SelectMode::Word(range) | SelectMode::Line(range) => *range = tail_anchor..tail_anchor,
|
||||
_ => {}
|
||||
}
|
||||
|
||||
@@ -3564,8 +3541,7 @@ impl Editor {
|
||||
};
|
||||
|
||||
self.change_selections(effects, window, cx, |s| {
|
||||
s.set_pending(pending_selection.clone(), pending_mode);
|
||||
s.set_is_extending(true);
|
||||
s.set_pending(pending_selection.clone(), pending_mode)
|
||||
});
|
||||
}
|
||||
|
||||
@@ -3834,16 +3810,11 @@ impl Editor {
|
||||
|
||||
fn end_selection(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.columnar_selection_state.take();
|
||||
if let Some(pending_mode) = self.selections.pending_mode() {
|
||||
if self.selections.pending_anchor().is_some() {
|
||||
let selections = self.selections.all::<usize>(cx);
|
||||
self.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
|
||||
s.select(selections);
|
||||
s.clear_pending();
|
||||
if s.is_extending() {
|
||||
s.set_is_extending(false);
|
||||
} else {
|
||||
s.set_select_mode(pending_mode);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -5243,7 +5214,15 @@ impl Editor {
|
||||
if enabled {
|
||||
(InvalidationStrategy::RefreshRequested, None)
|
||||
} else {
|
||||
self.clear_inlay_hints(cx);
|
||||
self.splice_inlays(
|
||||
&self
|
||||
.visible_inlay_hints(cx)
|
||||
.iter()
|
||||
.map(|inlay| inlay.id)
|
||||
.collect::<Vec<InlayId>>(),
|
||||
Vec::new(),
|
||||
cx,
|
||||
);
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -5255,7 +5234,15 @@ impl Editor {
|
||||
if enabled {
|
||||
(InvalidationStrategy::RefreshRequested, None)
|
||||
} else {
|
||||
self.clear_inlay_hints(cx);
|
||||
self.splice_inlays(
|
||||
&self
|
||||
.visible_inlay_hints(cx)
|
||||
.iter()
|
||||
.map(|inlay| inlay.id)
|
||||
.collect::<Vec<InlayId>>(),
|
||||
Vec::new(),
|
||||
cx,
|
||||
);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
@@ -5266,7 +5253,7 @@ impl Editor {
|
||||
match self.inlay_hint_cache.update_settings(
|
||||
&self.buffer,
|
||||
new_settings,
|
||||
self.visible_inlay_hints(cx).cloned().collect::<Vec<_>>(),
|
||||
self.visible_inlay_hints(cx),
|
||||
cx,
|
||||
) {
|
||||
ControlFlow::Break(Some(InlaySplice {
|
||||
@@ -5316,25 +5303,13 @@ impl Editor {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn clear_inlay_hints(&self, cx: &mut Context<Editor>) {
|
||||
self.splice_inlays(
|
||||
&self
|
||||
.visible_inlay_hints(cx)
|
||||
.map(|inlay| inlay.id)
|
||||
.collect::<Vec<_>>(),
|
||||
Vec::new(),
|
||||
cx,
|
||||
);
|
||||
}
|
||||
|
||||
fn visible_inlay_hints<'a>(
|
||||
&'a self,
|
||||
cx: &'a Context<Editor>,
|
||||
) -> impl Iterator<Item = &'a Inlay> {
|
||||
fn visible_inlay_hints(&self, cx: &Context<Editor>) -> Vec<Inlay> {
|
||||
self.display_map
|
||||
.read(cx)
|
||||
.current_inlays()
|
||||
.filter(move |inlay| matches!(inlay.id, InlayId::Hint(_)))
|
||||
.cloned()
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn visible_excerpts(
|
||||
@@ -5368,7 +5343,7 @@ impl Editor {
|
||||
let buffer_worktree = project.worktree_for_id(buffer_file.worktree_id(cx), cx)?;
|
||||
let worktree_entry = buffer_worktree
|
||||
.read(cx)
|
||||
.entry_for_id(buffer_file.project_entry_id()?)?;
|
||||
.entry_for_id(buffer_file.project_entry_id(cx)?)?;
|
||||
if worktree_entry.is_ignored {
|
||||
return None;
|
||||
}
|
||||
@@ -6632,32 +6607,26 @@ impl Editor {
|
||||
&self.context_menu
|
||||
}
|
||||
|
||||
fn refresh_code_actions(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
fn refresh_code_actions(&mut self, window: &mut Window, cx: &mut Context<Self>) -> Option<()> {
|
||||
let newest_selection = self.selections.newest_anchor().clone();
|
||||
let newest_selection_adjusted = self.selections.newest_adjusted(cx);
|
||||
let buffer = self.buffer.read(cx);
|
||||
if newest_selection.head().diff_base_anchor.is_some() {
|
||||
return None;
|
||||
}
|
||||
let (start_buffer, start) =
|
||||
buffer.text_anchor_for_position(newest_selection_adjusted.start, cx)?;
|
||||
let (end_buffer, end) =
|
||||
buffer.text_anchor_for_position(newest_selection_adjusted.end, cx)?;
|
||||
if start_buffer != end_buffer {
|
||||
return None;
|
||||
}
|
||||
|
||||
self.code_actions_task = Some(cx.spawn_in(window, async move |this, cx| {
|
||||
cx.background_executor()
|
||||
.timer(CODE_ACTIONS_DEBOUNCE_TIMEOUT)
|
||||
.await;
|
||||
|
||||
let (start_buffer, start, _, end, newest_selection) = this
|
||||
.update(cx, |this, cx| {
|
||||
let newest_selection = this.selections.newest_anchor().clone();
|
||||
if newest_selection.head().diff_base_anchor.is_some() {
|
||||
return None;
|
||||
}
|
||||
let newest_selection_adjusted = this.selections.newest_adjusted(cx);
|
||||
let buffer = this.buffer.read(cx);
|
||||
|
||||
let (start_buffer, start) =
|
||||
buffer.text_anchor_for_position(newest_selection_adjusted.start, cx)?;
|
||||
let (end_buffer, end) =
|
||||
buffer.text_anchor_for_position(newest_selection_adjusted.end, cx)?;
|
||||
|
||||
Some((start_buffer, start, end_buffer, end, newest_selection))
|
||||
})?
|
||||
.filter(|(start_buffer, _, end_buffer, _, _)| start_buffer == end_buffer)
|
||||
.context(
|
||||
"Expected selection to lie in a single buffer when refreshing code actions",
|
||||
)?;
|
||||
let (providers, tasks) = this.update_in(cx, |this, window, cx| {
|
||||
let providers = this.code_action_providers.clone();
|
||||
let tasks = this
|
||||
@@ -6698,6 +6667,7 @@ impl Editor {
|
||||
cx.notify();
|
||||
})
|
||||
}));
|
||||
None
|
||||
}
|
||||
|
||||
fn start_inline_blame_timer(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
@@ -6947,24 +6917,19 @@ impl Editor {
|
||||
if self.selections.count() != 1 || self.selections.line_mode() {
|
||||
return None;
|
||||
}
|
||||
let selection = self.selections.newest_anchor();
|
||||
let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx);
|
||||
let selection_point_range = selection.start.to_point(&multi_buffer_snapshot)
|
||||
..selection.end.to_point(&multi_buffer_snapshot);
|
||||
// If the selection spans multiple rows OR it is empty
|
||||
if selection_point_range.start.row != selection_point_range.end.row
|
||||
|| selection_point_range.start.column == selection_point_range.end.column
|
||||
{
|
||||
let selection = self.selections.newest::<Point>(cx);
|
||||
if selection.is_empty() || selection.start.row != selection.end.row {
|
||||
return None;
|
||||
}
|
||||
|
||||
let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx);
|
||||
let selection_anchor_range = selection.range().to_anchors(&multi_buffer_snapshot);
|
||||
let query = multi_buffer_snapshot
|
||||
.text_for_range(selection.range())
|
||||
.text_for_range(selection_anchor_range.clone())
|
||||
.collect::<String>();
|
||||
if query.trim().is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some((query, selection.range()))
|
||||
Some((query, selection_anchor_range))
|
||||
}
|
||||
|
||||
fn update_selection_occurrence_highlights(
|
||||
@@ -7001,7 +6966,6 @@ impl Editor {
|
||||
) else {
|
||||
return Vec::default();
|
||||
};
|
||||
let query_range = query_range.to_anchors(&multi_buffer_snapshot);
|
||||
for (buffer_snapshot, search_range, excerpt_id) in buffer_ranges {
|
||||
match_ranges.extend(
|
||||
regex
|
||||
@@ -10496,33 +10460,29 @@ impl Editor {
|
||||
|
||||
let buffer = display_map.buffer_snapshot();
|
||||
let mut edit_start = ToOffset::to_offset(&Point::new(rows.start.0, 0), buffer);
|
||||
let (edit_end, target_row) = if buffer.max_point().row >= rows.end.0 {
|
||||
let edit_end = if buffer.max_point().row >= rows.end.0 {
|
||||
// If there's a line after the range, delete the \n from the end of the row range
|
||||
(
|
||||
ToOffset::to_offset(&Point::new(rows.end.0, 0), buffer),
|
||||
rows.end,
|
||||
)
|
||||
ToOffset::to_offset(&Point::new(rows.end.0, 0), buffer)
|
||||
} else {
|
||||
// If there isn't a line after the range, delete the \n from the line before the
|
||||
// start of the row range
|
||||
edit_start = edit_start.saturating_sub(1);
|
||||
(buffer.len(), rows.start.previous_row())
|
||||
buffer.len()
|
||||
};
|
||||
|
||||
let text_layout_details = self.text_layout_details(window);
|
||||
let x = display_map.x_for_display_point(
|
||||
let (cursor, goal) = movement::down_by_rows(
|
||||
&display_map,
|
||||
selection.head().to_display_point(&display_map),
|
||||
&text_layout_details,
|
||||
rows.len() as u32,
|
||||
selection.goal,
|
||||
false,
|
||||
&self.text_layout_details(window),
|
||||
);
|
||||
let row = Point::new(target_row.0, 0)
|
||||
.to_display_point(&display_map)
|
||||
.row();
|
||||
let column = display_map.display_column_for_x(row, x, &text_layout_details);
|
||||
|
||||
new_cursors.push((
|
||||
selection.id,
|
||||
buffer.anchor_after(DisplayPoint::new(row, column).to_point(&display_map)),
|
||||
SelectionGoal::None,
|
||||
buffer.anchor_after(cursor.to_point(&display_map)),
|
||||
goal,
|
||||
));
|
||||
edit_ranges.push(edit_start..edit_end);
|
||||
}
|
||||
@@ -11727,26 +11687,13 @@ impl Editor {
|
||||
rows.end.previous_row().0,
|
||||
buffer.line_len(rows.end.previous_row()),
|
||||
);
|
||||
|
||||
let mut text = buffer.text_for_range(start..end).collect::<String>();
|
||||
|
||||
let text = buffer
|
||||
.text_for_range(start..end)
|
||||
.chain(Some("\n"))
|
||||
.collect::<String>();
|
||||
let insert_location = if upwards {
|
||||
// When duplicating upward, we need to insert before the current line.
|
||||
// If we're on the last line and it doesn't end with a newline,
|
||||
// we need to add a newline before the duplicated content.
|
||||
let needs_leading_newline = rows.end.0 >= buffer.max_point().row
|
||||
&& buffer.max_point().column > 0
|
||||
&& !text.ends_with('\n');
|
||||
|
||||
if needs_leading_newline {
|
||||
text.insert(0, '\n');
|
||||
end
|
||||
} else {
|
||||
text.push('\n');
|
||||
Point::new(rows.end.0, 0)
|
||||
}
|
||||
Point::new(rows.end.0, 0)
|
||||
} else {
|
||||
text.push('\n');
|
||||
start
|
||||
};
|
||||
edits.push((insert_location..insert_location, text));
|
||||
@@ -12556,18 +12503,9 @@ impl Editor {
|
||||
let mut start = selection.start;
|
||||
let mut end = selection.end;
|
||||
let is_entire_line = selection.is_empty() || self.selections.line_mode();
|
||||
let mut add_trailing_newline = false;
|
||||
if is_entire_line {
|
||||
start = Point::new(start.row, 0);
|
||||
let next_line_start = Point::new(end.row + 1, 0);
|
||||
if next_line_start <= max_point {
|
||||
end = next_line_start;
|
||||
} else {
|
||||
// We're on the last line without a trailing newline.
|
||||
// Copy to the end of the line and add a newline afterwards.
|
||||
end = Point::new(end.row, buffer.line_len(MultiBufferRow(end.row)));
|
||||
add_trailing_newline = true;
|
||||
}
|
||||
end = cmp::min(max_point, Point::new(end.row + 1, 0));
|
||||
}
|
||||
|
||||
let mut trimmed_selections = Vec::new();
|
||||
@@ -12618,10 +12556,6 @@ impl Editor {
|
||||
text.push_str(chunk);
|
||||
len += chunk.len();
|
||||
}
|
||||
if add_trailing_newline {
|
||||
text.push('\n');
|
||||
len += 1;
|
||||
}
|
||||
clipboard_selections.push(ClipboardSelection {
|
||||
len,
|
||||
is_entire_line,
|
||||
@@ -14449,10 +14383,6 @@ impl Editor {
|
||||
let last_selection = selections.iter().max_by_key(|s| s.id).unwrap();
|
||||
let mut next_selected_range = None;
|
||||
|
||||
// Collect and sort selection ranges for efficient overlap checking
|
||||
let mut selection_ranges: Vec<_> = selections.iter().map(|s| s.range()).collect();
|
||||
selection_ranges.sort_by_key(|r| r.start);
|
||||
|
||||
let bytes_after_last_selection =
|
||||
buffer.bytes_in_range(last_selection.end..buffer.len());
|
||||
let bytes_before_first_selection = buffer.bytes_in_range(0..first_selection.start);
|
||||
@@ -14474,20 +14404,11 @@ impl Editor {
|
||||
|| (!buffer.is_inside_word(offset_range.start, None)
|
||||
&& !buffer.is_inside_word(offset_range.end, None))
|
||||
{
|
||||
// Use binary search to check for overlap (O(log n))
|
||||
let overlaps = selection_ranges
|
||||
.binary_search_by(|range| {
|
||||
if range.end <= offset_range.start {
|
||||
std::cmp::Ordering::Less
|
||||
} else if range.start >= offset_range.end {
|
||||
std::cmp::Ordering::Greater
|
||||
} else {
|
||||
std::cmp::Ordering::Equal
|
||||
}
|
||||
})
|
||||
.is_ok();
|
||||
|
||||
if !overlaps {
|
||||
// TODO: This is n^2, because we might check all the selections
|
||||
if !selections
|
||||
.iter()
|
||||
.any(|selection| selection.range().overlaps(&offset_range))
|
||||
{
|
||||
next_selected_range = Some(offset_range);
|
||||
break;
|
||||
}
|
||||
@@ -18249,87 +18170,6 @@ impl Editor {
|
||||
self.fold_creases(to_fold, true, window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_1(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel1,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(1), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_2(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel2,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(2), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_3(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel3,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(3), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_4(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel4,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(4), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_5(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel5,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(5), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_6(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel6,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(6), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_7(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel7,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(7), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_8(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel8,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(8), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_9(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel9,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(9), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_all(&mut self, _: &actions::FoldAll, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if self.buffer.read(cx).is_singleton() {
|
||||
let mut fold_ranges = Vec::new();
|
||||
@@ -20858,7 +20698,7 @@ impl Editor {
|
||||
self.refresh_code_actions(window, cx);
|
||||
self.refresh_selected_text_highlights(true, window, cx);
|
||||
self.refresh_single_line_folds(window, cx);
|
||||
refresh_matching_bracket_highlights(self, cx);
|
||||
refresh_matching_bracket_highlights(self, window, cx);
|
||||
if self.has_active_edit_prediction() {
|
||||
self.update_visible_edit_prediction(window, cx);
|
||||
}
|
||||
@@ -23077,7 +22917,11 @@ fn snippet_completions(
|
||||
}),
|
||||
lsp_defaults: None,
|
||||
},
|
||||
label: CodeLabel::plain(matching_prefix.clone(), None),
|
||||
label: CodeLabel {
|
||||
text: matching_prefix.clone(),
|
||||
runs: Vec::new(),
|
||||
filter_range: 0..matching_prefix.len(),
|
||||
},
|
||||
icon_path: None,
|
||||
documentation: Some(CompletionDocumentation::SingleLineAndMultiLinePlainText {
|
||||
single_line: snippet.name.clone().into(),
|
||||
@@ -24684,7 +24528,7 @@ impl Render for MissingEditPredictionKeybindingTooltip {
|
||||
.items_end()
|
||||
.w_full()
|
||||
.child(Button::new("open-keymap", "Assign Keybinding").size(ButtonSize::Compact).on_click(|_ev, window, cx| {
|
||||
window.dispatch_action(zed_actions::OpenKeymapFile.boxed_clone(), cx)
|
||||
window.dispatch_action(zed_actions::OpenKeymap.boxed_clone(), cx)
|
||||
}))
|
||||
.child(Button::new("see-docs", "See Docs").size(ButtonSize::Compact).on_click(|_ev, _window, cx| {
|
||||
cx.open_url("https://zed.dev/docs/completions#edit-predictions-missing-keybinding");
|
||||
|
||||
@@ -267,7 +267,7 @@ impl Settings for EditorSettings {
|
||||
delay: drag_and_drop_selection.delay.unwrap(),
|
||||
},
|
||||
lsp_document_colors: editor.lsp_document_colors.unwrap(),
|
||||
minimum_contrast_for_highlights: editor.minimum_contrast_for_highlights.unwrap().0,
|
||||
minimum_contrast_for_highlights: editor.minimum_contrast_for_highlights.unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -27,6 +27,7 @@ use language::{
|
||||
LanguageConfigOverride, LanguageMatcher, LanguageName, Override, Point,
|
||||
language_settings::{
|
||||
CompletionSettingsContent, FormatterList, LanguageSettingsContent, LspInsertMode,
|
||||
SelectedFormatter,
|
||||
},
|
||||
tree_sitter_python,
|
||||
};
|
||||
@@ -618,93 +619,6 @@ fn test_movement_actions_with_pending_selection(cx: &mut TestAppContext) {
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_extending_selection(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let editor = cx.add_window(|window, cx| {
|
||||
let buffer = MultiBuffer::build_simple("aaa bbb ccc ddd eee", cx);
|
||||
build_editor(buffer, window, cx)
|
||||
});
|
||||
|
||||
_ = editor.update(cx, |editor, window, cx| {
|
||||
editor.begin_selection(DisplayPoint::new(DisplayRow(0), 5), false, 1, window, cx);
|
||||
editor.end_selection(window, cx);
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
[DisplayPoint::new(DisplayRow(0), 5)..DisplayPoint::new(DisplayRow(0), 5)]
|
||||
);
|
||||
|
||||
editor.extend_selection(DisplayPoint::new(DisplayRow(0), 10), 1, window, cx);
|
||||
editor.end_selection(window, cx);
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
[DisplayPoint::new(DisplayRow(0), 5)..DisplayPoint::new(DisplayRow(0), 10)]
|
||||
);
|
||||
|
||||
editor.extend_selection(DisplayPoint::new(DisplayRow(0), 10), 1, window, cx);
|
||||
editor.end_selection(window, cx);
|
||||
editor.extend_selection(DisplayPoint::new(DisplayRow(0), 10), 2, window, cx);
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
[DisplayPoint::new(DisplayRow(0), 5)..DisplayPoint::new(DisplayRow(0), 11)]
|
||||
);
|
||||
|
||||
editor.update_selection(
|
||||
DisplayPoint::new(DisplayRow(0), 1),
|
||||
0,
|
||||
gpui::Point::<f32>::default(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
editor.end_selection(window, cx);
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
[DisplayPoint::new(DisplayRow(0), 5)..DisplayPoint::new(DisplayRow(0), 0)]
|
||||
);
|
||||
|
||||
editor.begin_selection(DisplayPoint::new(DisplayRow(0), 5), true, 1, window, cx);
|
||||
editor.end_selection(window, cx);
|
||||
editor.begin_selection(DisplayPoint::new(DisplayRow(0), 5), true, 2, window, cx);
|
||||
editor.end_selection(window, cx);
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
[DisplayPoint::new(DisplayRow(0), 4)..DisplayPoint::new(DisplayRow(0), 7)]
|
||||
);
|
||||
|
||||
editor.extend_selection(DisplayPoint::new(DisplayRow(0), 10), 1, window, cx);
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
[DisplayPoint::new(DisplayRow(0), 4)..DisplayPoint::new(DisplayRow(0), 11)]
|
||||
);
|
||||
|
||||
editor.update_selection(
|
||||
DisplayPoint::new(DisplayRow(0), 6),
|
||||
0,
|
||||
gpui::Point::<f32>::default(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
[DisplayPoint::new(DisplayRow(0), 4)..DisplayPoint::new(DisplayRow(0), 7)]
|
||||
);
|
||||
|
||||
editor.update_selection(
|
||||
DisplayPoint::new(DisplayRow(0), 1),
|
||||
0,
|
||||
gpui::Point::<f32>::default(),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
editor.end_selection(window, cx);
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
[DisplayPoint::new(DisplayRow(0), 7)..DisplayPoint::new(DisplayRow(0), 0)]
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_clone(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
@@ -4386,8 +4300,8 @@ fn test_delete_line(cx: &mut TestAppContext) {
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
vec![
|
||||
DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0),
|
||||
DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 1),
|
||||
DisplayPoint::new(DisplayRow(0), 3)..DisplayPoint::new(DisplayRow(0), 3),
|
||||
]
|
||||
);
|
||||
});
|
||||
@@ -4409,24 +4323,6 @@ fn test_delete_line(cx: &mut TestAppContext) {
|
||||
vec![DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(0), 1)]
|
||||
);
|
||||
});
|
||||
|
||||
let editor = cx.add_window(|window, cx| {
|
||||
let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n\njkl\nmno", cx);
|
||||
build_editor(buffer, window, cx)
|
||||
});
|
||||
_ = editor.update(cx, |editor, window, cx| {
|
||||
editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
|
||||
s.select_display_ranges([
|
||||
DisplayPoint::new(DisplayRow(0), 1)..DisplayPoint::new(DisplayRow(2), 1)
|
||||
])
|
||||
});
|
||||
editor.delete_line(&DeleteLine, window, cx);
|
||||
assert_eq!(editor.display_text(cx), "\njkl\nmno");
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
vec![DisplayPoint::new(DisplayRow(0), 0)..DisplayPoint::new(DisplayRow(0), 0)]
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
@@ -11907,8 +11803,8 @@ async fn test_range_format_respects_language_tab_size_override(cx: &mut TestAppC
|
||||
#[gpui::test]
|
||||
async fn test_document_format_manual_trigger(cx: &mut TestAppContext) {
|
||||
init_test(cx, |settings| {
|
||||
settings.defaults.formatter = Some(FormatterList::Single(Formatter::LanguageServer(
|
||||
settings::LanguageServerFormatterSpecifier::Current,
|
||||
settings.defaults.formatter = Some(SelectedFormatter::List(FormatterList::Single(
|
||||
Formatter::LanguageServer { name: None },
|
||||
)))
|
||||
});
|
||||
|
||||
@@ -12033,11 +11929,11 @@ async fn test_document_format_manual_trigger(cx: &mut TestAppContext) {
|
||||
async fn test_multiple_formatters(cx: &mut TestAppContext) {
|
||||
init_test(cx, |settings| {
|
||||
settings.defaults.remove_trailing_whitespace_on_save = Some(true);
|
||||
settings.defaults.formatter = Some(FormatterList::Vec(vec![
|
||||
Formatter::LanguageServer(settings::LanguageServerFormatterSpecifier::Current),
|
||||
settings.defaults.formatter = Some(SelectedFormatter::List(FormatterList::Vec(vec![
|
||||
Formatter::LanguageServer { name: None },
|
||||
Formatter::CodeAction("code-action-1".into()),
|
||||
Formatter::CodeAction("code-action-2".into()),
|
||||
]))
|
||||
])))
|
||||
});
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
@@ -12292,9 +12188,9 @@ async fn test_multiple_formatters(cx: &mut TestAppContext) {
|
||||
#[gpui::test]
|
||||
async fn test_organize_imports_manual_trigger(cx: &mut TestAppContext) {
|
||||
init_test(cx, |settings| {
|
||||
settings.defaults.formatter = Some(FormatterList::Vec(vec![Formatter::LanguageServer(
|
||||
settings::LanguageServerFormatterSpecifier::Current,
|
||||
)]))
|
||||
settings.defaults.formatter = Some(SelectedFormatter::List(FormatterList::Vec(vec![
|
||||
Formatter::LanguageServer { name: None },
|
||||
])))
|
||||
});
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
@@ -12497,7 +12393,7 @@ async fn test_concurrent_format_requests(cx: &mut TestAppContext) {
|
||||
#[gpui::test]
|
||||
async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) {
|
||||
init_test(cx, |settings| {
|
||||
settings.defaults.formatter = Some(FormatterList::default())
|
||||
settings.defaults.formatter = Some(SelectedFormatter::Auto)
|
||||
});
|
||||
|
||||
let mut cx = EditorLspTestContext::new_rust(
|
||||
@@ -12520,6 +12416,11 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) {
|
||||
.join("\n"),
|
||||
);
|
||||
|
||||
// Submit a format request.
|
||||
let format = cx
|
||||
.update_editor(|editor, window, cx| editor.format(&Format, window, cx))
|
||||
.unwrap();
|
||||
|
||||
// Record which buffer changes have been sent to the language server
|
||||
let buffer_changes = Arc::new(Mutex::new(Vec::new()));
|
||||
cx.lsp
|
||||
@@ -12540,29 +12441,28 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) {
|
||||
.set_request_handler::<lsp::request::Formatting, _, _>({
|
||||
let buffer_changes = buffer_changes.clone();
|
||||
move |_, _| {
|
||||
let buffer_changes = buffer_changes.clone();
|
||||
// When formatting is requested, trailing whitespace has already been stripped,
|
||||
// and the trailing newline has already been added.
|
||||
assert_eq!(
|
||||
&buffer_changes.lock()[1..],
|
||||
&[
|
||||
(
|
||||
lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(0, 4)),
|
||||
"".into()
|
||||
),
|
||||
(
|
||||
lsp::Range::new(lsp::Position::new(2, 5), lsp::Position::new(2, 6)),
|
||||
"".into()
|
||||
),
|
||||
(
|
||||
lsp::Range::new(lsp::Position::new(3, 4), lsp::Position::new(3, 4)),
|
||||
"\n".into()
|
||||
),
|
||||
]
|
||||
);
|
||||
|
||||
// Insert blank lines between each line of the buffer.
|
||||
async move {
|
||||
// When formatting is requested, trailing whitespace has already been stripped,
|
||||
// and the trailing newline has already been added.
|
||||
assert_eq!(
|
||||
&buffer_changes.lock()[1..],
|
||||
&[
|
||||
(
|
||||
lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(0, 4)),
|
||||
"".into()
|
||||
),
|
||||
(
|
||||
lsp::Range::new(lsp::Position::new(2, 5), lsp::Position::new(2, 6)),
|
||||
"".into()
|
||||
),
|
||||
(
|
||||
lsp::Range::new(lsp::Position::new(3, 4), lsp::Position::new(3, 4)),
|
||||
"\n".into()
|
||||
),
|
||||
]
|
||||
);
|
||||
|
||||
Ok(Some(vec![
|
||||
lsp::TextEdit {
|
||||
range: lsp::Range::new(
|
||||
@@ -12583,17 +12483,10 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) {
|
||||
}
|
||||
});
|
||||
|
||||
// Submit a format request.
|
||||
let format = cx
|
||||
.update_editor(|editor, window, cx| editor.format(&Format, window, cx))
|
||||
.unwrap();
|
||||
|
||||
cx.run_until_parked();
|
||||
// After formatting the buffer, the trailing whitespace is stripped,
|
||||
// a newline is appended, and the edits provided by the language server
|
||||
// have been applied.
|
||||
format.await.unwrap();
|
||||
|
||||
cx.assert_editor_state(
|
||||
&[
|
||||
"one", //
|
||||
@@ -14878,7 +14771,12 @@ async fn test_multiline_completion(cx: &mut TestAppContext) {
|
||||
} else {
|
||||
item.label.clone()
|
||||
};
|
||||
Some(language::CodeLabel::plain(text, None))
|
||||
let len = text.len();
|
||||
Some(language::CodeLabel {
|
||||
text,
|
||||
runs: Vec::new(),
|
||||
filter_range: 0..len,
|
||||
})
|
||||
})),
|
||||
..FakeLspAdapter::default()
|
||||
},
|
||||
@@ -16617,7 +16515,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut TestAppContext) {
|
||||
leader.update(cx, |leader, cx| {
|
||||
leader.buffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.set_excerpts_for_path(
|
||||
PathKey::with_sort_prefix(1, rel_path("b.txt").into_arc()),
|
||||
PathKey::namespaced(1, rel_path("b.txt").into_arc()),
|
||||
buffer_1.clone(),
|
||||
vec![
|
||||
Point::row_range(0..3),
|
||||
@@ -16628,7 +16526,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut TestAppContext) {
|
||||
cx,
|
||||
);
|
||||
multibuffer.set_excerpts_for_path(
|
||||
PathKey::with_sort_prefix(1, rel_path("a.txt").into_arc()),
|
||||
PathKey::namespaced(1, rel_path("a.txt").into_arc()),
|
||||
buffer_2.clone(),
|
||||
vec![Point::row_range(0..6), Point::row_range(8..12)],
|
||||
0,
|
||||
@@ -18181,7 +18079,9 @@ fn completion_menu_entries(menu: &CompletionsMenu) -> Vec<String> {
|
||||
#[gpui::test]
|
||||
async fn test_document_format_with_prettier(cx: &mut TestAppContext) {
|
||||
init_test(cx, |settings| {
|
||||
settings.defaults.formatter = Some(FormatterList::Single(Formatter::Prettier))
|
||||
settings.defaults.formatter = Some(SelectedFormatter::List(FormatterList::Single(
|
||||
Formatter::Prettier,
|
||||
)))
|
||||
});
|
||||
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
@@ -18248,7 +18148,7 @@ async fn test_document_format_with_prettier(cx: &mut TestAppContext) {
|
||||
);
|
||||
|
||||
update_test_language_settings(cx, |settings| {
|
||||
settings.defaults.formatter = Some(FormatterList::default())
|
||||
settings.defaults.formatter = Some(SelectedFormatter::Auto)
|
||||
});
|
||||
let format = editor.update_in(cx, |editor, window, cx| {
|
||||
editor.perform_format(
|
||||
@@ -21129,7 +21029,7 @@ async fn test_display_diff_hunks(cx: &mut TestAppContext) {
|
||||
for buffer in &buffers {
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
multibuffer.set_excerpts_for_path(
|
||||
PathKey::with_sort_prefix(0, buffer.read(cx).file().unwrap().path().clone()),
|
||||
PathKey::namespaced(0, buffer.read(cx).file().unwrap().path().clone()),
|
||||
buffer.clone(),
|
||||
vec![text::Anchor::MIN.to_point(&snapshot)..text::Anchor::MAX.to_point(&snapshot)],
|
||||
2,
|
||||
@@ -25803,7 +25703,7 @@ async fn test_document_colors(cx: &mut TestAppContext) {
|
||||
.set_request_handler::<lsp::request::DocumentColor, _, _>(move |_, _| async move {
|
||||
panic!("Should not be called");
|
||||
});
|
||||
cx.executor().advance_clock(FETCH_COLORS_DEBOUNCE_TIMEOUT);
|
||||
cx.executor().advance_clock(Duration::from_millis(100));
|
||||
color_request_handle.next().await.unwrap();
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
@@ -25887,9 +25787,9 @@ async fn test_document_colors(cx: &mut TestAppContext) {
|
||||
color_request_handle.next().await.unwrap();
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
2,
|
||||
3,
|
||||
requests_made.load(atomic::Ordering::Acquire),
|
||||
"Should query for colors once per save (deduplicated) and once per formatting after save"
|
||||
"Should query for colors once per save and once per formatting after save"
|
||||
);
|
||||
|
||||
drop(editor);
|
||||
@@ -25910,7 +25810,7 @@ async fn test_document_colors(cx: &mut TestAppContext) {
|
||||
.unwrap();
|
||||
close.await.unwrap();
|
||||
assert_eq!(
|
||||
2,
|
||||
3,
|
||||
requests_made.load(atomic::Ordering::Acquire),
|
||||
"After saving and closing all editors, no extra requests should be made"
|
||||
);
|
||||
@@ -25930,7 +25830,7 @@ async fn test_document_colors(cx: &mut TestAppContext) {
|
||||
})
|
||||
})
|
||||
.unwrap();
|
||||
cx.executor().advance_clock(FETCH_COLORS_DEBOUNCE_TIMEOUT);
|
||||
cx.executor().advance_clock(Duration::from_millis(100));
|
||||
cx.run_until_parked();
|
||||
let editor = workspace
|
||||
.update(cx, |workspace, _, cx| {
|
||||
@@ -25941,9 +25841,9 @@ async fn test_document_colors(cx: &mut TestAppContext) {
|
||||
.expect("Should be an editor")
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
color_request_handle.next().await.unwrap();
|
||||
assert_eq!(
|
||||
2,
|
||||
3,
|
||||
requests_made.load(atomic::Ordering::Acquire),
|
||||
"Cache should be reused on buffer close and reopen"
|
||||
);
|
||||
@@ -25984,11 +25884,10 @@ async fn test_document_colors(cx: &mut TestAppContext) {
|
||||
});
|
||||
save.await.unwrap();
|
||||
|
||||
cx.executor().advance_clock(FETCH_COLORS_DEBOUNCE_TIMEOUT);
|
||||
empty_color_request_handle.next().await.unwrap();
|
||||
cx.run_until_parked();
|
||||
assert_eq!(
|
||||
3,
|
||||
4,
|
||||
requests_made.load(atomic::Ordering::Acquire),
|
||||
"Should query for colors once per save only, as formatting was not requested"
|
||||
);
|
||||
@@ -26576,64 +26475,3 @@ fn extract_color_inlays(editor: &Editor, cx: &App) -> Vec<Rgba> {
|
||||
.map(Rgba::from)
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_duplicate_line_up_on_last_line_without_newline(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let editor = cx.add_window(|window, cx| {
|
||||
let buffer = MultiBuffer::build_simple("line1\nline2", cx);
|
||||
build_editor(buffer, window, cx)
|
||||
});
|
||||
|
||||
editor
|
||||
.update(cx, |editor, window, cx| {
|
||||
editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
|
||||
s.select_display_ranges([
|
||||
DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0)
|
||||
])
|
||||
});
|
||||
|
||||
editor.duplicate_line_up(&DuplicateLineUp, window, cx);
|
||||
|
||||
assert_eq!(
|
||||
editor.display_text(cx),
|
||||
"line1\nline2\nline2",
|
||||
"Duplicating last line upward should create duplicate above, not on same line"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
vec![DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0)],
|
||||
"Selection should remain on the original line"
|
||||
);
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_copy_line_without_trailing_newline(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
|
||||
cx.set_state("line1\nline2ˇ");
|
||||
|
||||
cx.update_editor(|e, window, cx| e.copy(&Copy, window, cx));
|
||||
|
||||
let clipboard_text = cx
|
||||
.read_from_clipboard()
|
||||
.and_then(|item| item.text().as_deref().map(str::to_string));
|
||||
|
||||
assert_eq!(
|
||||
clipboard_text,
|
||||
Some("line2\n".to_string()),
|
||||
"Copying a line without trailing newline should include a newline"
|
||||
);
|
||||
|
||||
cx.set_state("line1\nˇ");
|
||||
|
||||
cx.update_editor(|e, window, cx| e.paste(&Paste, window, cx));
|
||||
|
||||
cx.assert_editor_state("line1\nline2\nˇ");
|
||||
}
|
||||
|
||||
@@ -432,15 +432,6 @@ impl EditorElement {
|
||||
register_action(editor, window, Editor::open_selected_filename);
|
||||
register_action(editor, window, Editor::fold);
|
||||
register_action(editor, window, Editor::fold_at_level);
|
||||
register_action(editor, window, Editor::fold_at_level_1);
|
||||
register_action(editor, window, Editor::fold_at_level_2);
|
||||
register_action(editor, window, Editor::fold_at_level_3);
|
||||
register_action(editor, window, Editor::fold_at_level_4);
|
||||
register_action(editor, window, Editor::fold_at_level_5);
|
||||
register_action(editor, window, Editor::fold_at_level_6);
|
||||
register_action(editor, window, Editor::fold_at_level_7);
|
||||
register_action(editor, window, Editor::fold_at_level_8);
|
||||
register_action(editor, window, Editor::fold_at_level_9);
|
||||
register_action(editor, window, Editor::fold_all);
|
||||
register_action(editor, window, Editor::fold_function_bodies);
|
||||
register_action(editor, window, Editor::fold_recursive);
|
||||
@@ -681,7 +672,6 @@ impl EditorElement {
|
||||
.drag_and_drop_selection
|
||||
.enabled
|
||||
&& click_count == 1
|
||||
&& !modifiers.shift
|
||||
{
|
||||
let newest_anchor = editor.selections.newest_anchor();
|
||||
let snapshot = editor.snapshot(window, cx);
|
||||
@@ -740,35 +730,6 @@ impl EditorElement {
|
||||
}
|
||||
}
|
||||
|
||||
if !is_singleton {
|
||||
let display_row = (ScrollPixelOffset::from(
|
||||
(event.position - gutter_hitbox.bounds.origin).y / position_map.line_height,
|
||||
) + position_map.scroll_position.y) as u32;
|
||||
let multi_buffer_row = position_map
|
||||
.snapshot
|
||||
.display_point_to_point(DisplayPoint::new(DisplayRow(display_row), 0), Bias::Right)
|
||||
.row;
|
||||
if line_numbers
|
||||
.get(&MultiBufferRow(multi_buffer_row))
|
||||
.and_then(|line_number| line_number.hitbox.as_ref())
|
||||
.is_some_and(|hitbox| hitbox.contains(&event.position))
|
||||
{
|
||||
let line_offset_from_top = display_row - position_map.scroll_position.y as u32;
|
||||
|
||||
editor.open_excerpts_common(
|
||||
Some(JumpData::MultiBufferRow {
|
||||
row: MultiBufferRow(multi_buffer_row),
|
||||
line_offset_from_top,
|
||||
}),
|
||||
modifiers.alt,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
cx.stop_propagation();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let position = point_for_position.previous_valid;
|
||||
if let Some(mode) = Editor::columnar_selection_mode(&modifiers, cx) {
|
||||
editor.select(
|
||||
@@ -806,6 +767,34 @@ impl EditorElement {
|
||||
);
|
||||
}
|
||||
cx.stop_propagation();
|
||||
|
||||
if !is_singleton {
|
||||
let display_row = (ScrollPixelOffset::from(
|
||||
(event.position - gutter_hitbox.bounds.origin).y / position_map.line_height,
|
||||
) + position_map.scroll_position.y) as u32;
|
||||
let multi_buffer_row = position_map
|
||||
.snapshot
|
||||
.display_point_to_point(DisplayPoint::new(DisplayRow(display_row), 0), Bias::Right)
|
||||
.row;
|
||||
if line_numbers
|
||||
.get(&MultiBufferRow(multi_buffer_row))
|
||||
.and_then(|line_number| line_number.hitbox.as_ref())
|
||||
.is_some_and(|hitbox| hitbox.contains(&event.position))
|
||||
{
|
||||
let line_offset_from_top = display_row - position_map.scroll_position.y as u32;
|
||||
|
||||
editor.open_excerpts_common(
|
||||
Some(JumpData::MultiBufferRow {
|
||||
row: MultiBufferRow(multi_buffer_row),
|
||||
line_offset_from_top,
|
||||
}),
|
||||
modifiers.alt,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
cx.stop_propagation();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn mouse_right_down(
|
||||
|
||||
@@ -1,46 +1,47 @@
|
||||
use crate::{Editor, RangeToAnchorExt};
|
||||
use gpui::{Context, HighlightStyle};
|
||||
use gpui::{Context, HighlightStyle, Window};
|
||||
use language::CursorShape;
|
||||
use multi_buffer::ToOffset;
|
||||
use theme::ActiveTheme;
|
||||
|
||||
enum MatchingBracketHighlight {}
|
||||
|
||||
pub fn refresh_matching_bracket_highlights(editor: &mut Editor, cx: &mut Context<Editor>) {
|
||||
pub fn refresh_matching_bracket_highlights(
|
||||
editor: &mut Editor,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
editor.clear_highlights::<MatchingBracketHighlight>(cx);
|
||||
|
||||
let buffer_snapshot = editor.buffer.read(cx).snapshot(cx);
|
||||
let newest_selection = editor
|
||||
.selections
|
||||
.newest_anchor()
|
||||
.map(|anchor| anchor.to_offset(&buffer_snapshot));
|
||||
let newest_selection = editor.selections.newest::<usize>(cx);
|
||||
// Don't highlight brackets if the selection isn't empty
|
||||
if !newest_selection.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let snapshot = editor.snapshot(window, cx);
|
||||
let head = newest_selection.head();
|
||||
if head > buffer_snapshot.len() {
|
||||
if head > snapshot.buffer_snapshot().len() {
|
||||
log::error!("bug: cursor offset is out of range while refreshing bracket highlights");
|
||||
return;
|
||||
}
|
||||
|
||||
let mut tail = head;
|
||||
if (editor.cursor_shape == CursorShape::Block || editor.cursor_shape == CursorShape::Hollow)
|
||||
&& head < buffer_snapshot.len()
|
||||
&& head < snapshot.buffer_snapshot().len()
|
||||
{
|
||||
if let Some(tail_ch) = buffer_snapshot.chars_at(tail).next() {
|
||||
if let Some(tail_ch) = snapshot.buffer_snapshot().chars_at(tail).next() {
|
||||
tail += tail_ch.len_utf8();
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((opening_range, closing_range)) =
|
||||
buffer_snapshot.innermost_enclosing_bracket_ranges(head..tail, None)
|
||||
if let Some((opening_range, closing_range)) = snapshot
|
||||
.buffer_snapshot()
|
||||
.innermost_enclosing_bracket_ranges(head..tail, None)
|
||||
{
|
||||
editor.highlight_text::<MatchingBracketHighlight>(
|
||||
vec![
|
||||
opening_range.to_anchors(&buffer_snapshot),
|
||||
closing_range.to_anchors(&buffer_snapshot),
|
||||
opening_range.to_anchors(&snapshot.buffer_snapshot()),
|
||||
closing_range.to_anchors(&snapshot.buffer_snapshot()),
|
||||
],
|
||||
HighlightStyle {
|
||||
background_color: Some(
|
||||
|
||||
@@ -307,6 +307,7 @@ pub fn update_inlay_link_and_hover_points(
|
||||
buffer_snapshot.anchor_after(point_for_position.next_valid.to_point(snapshot));
|
||||
if let Some(hovered_hint) = editor
|
||||
.visible_inlay_hints(cx)
|
||||
.into_iter()
|
||||
.skip_while(|hint| {
|
||||
hint.position
|
||||
.cmp(&previous_valid_anchor, &buffer_snapshot)
|
||||
|
||||
@@ -1013,7 +1013,7 @@ fn fetch_and_update_hints(
|
||||
.cloned()
|
||||
})?;
|
||||
|
||||
let visible_hints = editor.update(cx, |editor, cx| editor.visible_inlay_hints(cx).cloned().collect::<Vec<_>>())?;
|
||||
let visible_hints = editor.update(cx, |editor, cx| editor.visible_inlay_hints(cx))?;
|
||||
let new_hints = match inlay_hints_fetch_task {
|
||||
Some(fetch_task) => {
|
||||
log::debug!(
|
||||
@@ -1495,7 +1495,7 @@ pub mod tests {
|
||||
.into_response()
|
||||
.expect("work done progress create request failed");
|
||||
cx.executor().run_until_parked();
|
||||
fake_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
fake_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
token: lsp::ProgressToken::String(progress_token.to_string()),
|
||||
value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::Begin(
|
||||
lsp::WorkDoneProgressBegin::default(),
|
||||
@@ -1515,7 +1515,7 @@ pub mod tests {
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
fake_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
fake_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
token: lsp::ProgressToken::String(progress_token.to_string()),
|
||||
value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::End(
|
||||
lsp::WorkDoneProgressEnd::default(),
|
||||
@@ -3570,6 +3570,7 @@ pub mod tests {
|
||||
pub fn visible_hint_labels(editor: &Editor, cx: &Context<Editor>) -> Vec<String> {
|
||||
editor
|
||||
.visible_inlay_hints(cx)
|
||||
.into_iter()
|
||||
.map(|hint| hint.text().to_string())
|
||||
.collect()
|
||||
}
|
||||
|
||||
@@ -13,8 +13,8 @@ use ui::{App, Context, Window};
|
||||
use util::post_inc;
|
||||
|
||||
use crate::{
|
||||
DisplayPoint, Editor, EditorSettings, EditorSnapshot, FETCH_COLORS_DEBOUNCE_TIMEOUT, InlayId,
|
||||
InlaySplice, RangeToAnchorExt, display_map::Inlay, editor_settings::DocumentColorsRenderMode,
|
||||
DisplayPoint, Editor, EditorSettings, EditorSnapshot, InlayId, InlaySplice, RangeToAnchorExt,
|
||||
display_map::Inlay, editor_settings::DocumentColorsRenderMode,
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
@@ -193,12 +193,7 @@ impl Editor {
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
});
|
||||
|
||||
self.refresh_colors_task = cx.spawn(async move |editor, cx| {
|
||||
cx.background_executor()
|
||||
.timer(FETCH_COLORS_DEBOUNCE_TIMEOUT)
|
||||
.await;
|
||||
|
||||
cx.spawn(async move |editor, cx| {
|
||||
let all_colors = join_all(all_colors_task).await;
|
||||
if all_colors.is_empty() {
|
||||
return;
|
||||
@@ -425,6 +420,7 @@ impl Editor {
|
||||
}
|
||||
})
|
||||
.ok();
|
||||
});
|
||||
})
|
||||
.detach();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,8 +35,6 @@ pub struct SelectionsCollection {
|
||||
disjoint: Arc<[Selection<Anchor>]>,
|
||||
/// A pending selection, such as when the mouse is being dragged
|
||||
pending: Option<PendingSelection>,
|
||||
select_mode: SelectMode,
|
||||
is_extending: bool,
|
||||
}
|
||||
|
||||
impl SelectionsCollection {
|
||||
@@ -57,8 +55,6 @@ impl SelectionsCollection {
|
||||
},
|
||||
mode: SelectMode::Character,
|
||||
}),
|
||||
select_mode: SelectMode::Character,
|
||||
is_extending: false,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -188,27 +184,6 @@ impl SelectionsCollection {
|
||||
selections
|
||||
}
|
||||
|
||||
/// Returns all of the selections, adjusted to take into account the selection line_mode. Uses a provided snapshot to resolve selections.
|
||||
pub fn all_adjusted_with_snapshot(
|
||||
&self,
|
||||
snapshot: &MultiBufferSnapshot,
|
||||
) -> Vec<Selection<Point>> {
|
||||
let mut selections = self
|
||||
.disjoint
|
||||
.iter()
|
||||
.chain(self.pending_anchor())
|
||||
.map(|anchor| anchor.map(|anchor| anchor.to_point(&snapshot)))
|
||||
.collect::<Vec<_>>();
|
||||
if self.line_mode {
|
||||
for selection in &mut selections {
|
||||
let new_range = snapshot.expand_to_line(selection.range());
|
||||
selection.start = new_range.start;
|
||||
selection.end = new_range.end;
|
||||
}
|
||||
}
|
||||
selections
|
||||
}
|
||||
|
||||
/// Returns the newest selection, adjusted to take into account the selection line_mode
|
||||
pub fn newest_adjusted(&self, cx: &mut App) -> Selection<Point> {
|
||||
let mut selection = self.newest::<Point>(cx);
|
||||
@@ -460,22 +435,6 @@ impl SelectionsCollection {
|
||||
pub fn set_line_mode(&mut self, line_mode: bool) {
|
||||
self.line_mode = line_mode;
|
||||
}
|
||||
|
||||
pub fn select_mode(&self) -> &SelectMode {
|
||||
&self.select_mode
|
||||
}
|
||||
|
||||
pub fn set_select_mode(&mut self, select_mode: SelectMode) {
|
||||
self.select_mode = select_mode;
|
||||
}
|
||||
|
||||
pub fn is_extending(&self) -> bool {
|
||||
self.is_extending
|
||||
}
|
||||
|
||||
pub fn set_is_extending(&mut self, is_extending: bool) {
|
||||
self.is_extending = is_extending;
|
||||
}
|
||||
}
|
||||
|
||||
pub struct MutableSelectionsCollection<'a> {
|
||||
@@ -610,32 +569,21 @@ impl<'a> MutableSelectionsCollection<'a> {
|
||||
self.select(selections);
|
||||
}
|
||||
|
||||
pub fn select<T>(&mut self, selections: Vec<Selection<T>>)
|
||||
pub fn select<T>(&mut self, mut selections: Vec<Selection<T>>)
|
||||
where
|
||||
T: ToOffset + std::marker::Copy + std::fmt::Debug,
|
||||
T: ToOffset + ToPoint + Ord + std::marker::Copy + std::fmt::Debug,
|
||||
{
|
||||
let buffer = self.buffer.read(self.cx).snapshot(self.cx);
|
||||
let mut selections = selections
|
||||
.into_iter()
|
||||
.map(|selection| selection.map(|it| it.to_offset(&buffer)))
|
||||
.map(|mut selection| {
|
||||
if selection.start > selection.end {
|
||||
mem::swap(&mut selection.start, &mut selection.end);
|
||||
selection.reversed = true
|
||||
}
|
||||
selection
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
selections.sort_unstable_by_key(|s| s.start);
|
||||
// Merge overlapping selections.
|
||||
let mut i = 1;
|
||||
while i < selections.len() {
|
||||
if selections[i].start <= selections[i - 1].end {
|
||||
if selections[i - 1].end >= selections[i].start {
|
||||
let removed = selections.remove(i);
|
||||
if removed.start < selections[i - 1].start {
|
||||
selections[i - 1].start = removed.start;
|
||||
}
|
||||
if selections[i - 1].end < removed.end {
|
||||
if removed.end > selections[i - 1].end {
|
||||
selections[i - 1].end = removed.end;
|
||||
}
|
||||
} else {
|
||||
@@ -979,10 +927,13 @@ impl DerefMut for MutableSelectionsCollection<'_> {
|
||||
}
|
||||
}
|
||||
|
||||
fn selection_to_anchor_selection(
|
||||
selection: Selection<usize>,
|
||||
fn selection_to_anchor_selection<T>(
|
||||
selection: Selection<T>,
|
||||
buffer: &MultiBufferSnapshot,
|
||||
) -> Selection<Anchor> {
|
||||
) -> Selection<Anchor>
|
||||
where
|
||||
T: ToOffset + Ord,
|
||||
{
|
||||
let end_bias = if selection.start == selection.end {
|
||||
Bias::Right
|
||||
} else {
|
||||
@@ -1020,7 +971,7 @@ fn resolve_selections_point<'a>(
|
||||
})
|
||||
}
|
||||
|
||||
/// Panics if passed selections are not in order
|
||||
// Panics if passed selections are not in order
|
||||
fn resolve_selections_display<'a>(
|
||||
selections: impl 'a + IntoIterator<Item = &'a Selection<Anchor>>,
|
||||
map: &'a DisplaySnapshot,
|
||||
@@ -1052,7 +1003,7 @@ fn resolve_selections_display<'a>(
|
||||
coalesce_selections(selections)
|
||||
}
|
||||
|
||||
/// Panics if passed selections are not in order
|
||||
// Panics if passed selections are not in order
|
||||
pub(crate) fn resolve_selections<'a, D, I>(
|
||||
selections: I,
|
||||
map: &'a DisplaySnapshot,
|
||||
|
||||
@@ -262,77 +262,6 @@ impl EditorLspTestContext {
|
||||
Self::new(language, capabilities, cx).await
|
||||
}
|
||||
|
||||
pub async fn new_tsx(
|
||||
capabilities: lsp::ServerCapabilities,
|
||||
cx: &mut gpui::TestAppContext,
|
||||
) -> EditorLspTestContext {
|
||||
let mut word_characters: HashSet<char> = Default::default();
|
||||
word_characters.insert('$');
|
||||
word_characters.insert('#');
|
||||
let language = Language::new(
|
||||
LanguageConfig {
|
||||
name: "TSX".into(),
|
||||
matcher: LanguageMatcher {
|
||||
path_suffixes: vec!["tsx".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
brackets: language::BracketPairConfig {
|
||||
pairs: vec![language::BracketPair {
|
||||
start: "{".to_string(),
|
||||
end: "}".to_string(),
|
||||
close: true,
|
||||
surround: true,
|
||||
newline: true,
|
||||
}],
|
||||
disabled_scopes_by_bracket_ix: Default::default(),
|
||||
},
|
||||
word_characters,
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
|
||||
)
|
||||
.with_queries(LanguageQueries {
|
||||
brackets: Some(Cow::from(indoc! {r#"
|
||||
("(" @open ")" @close)
|
||||
("[" @open "]" @close)
|
||||
("{" @open "}" @close)
|
||||
("<" @open ">" @close)
|
||||
("<" @open "/>" @close)
|
||||
("</" @open ">" @close)
|
||||
("\"" @open "\"" @close)
|
||||
("'" @open "'" @close)
|
||||
("`" @open "`" @close)
|
||||
((jsx_element (jsx_opening_element) @open (jsx_closing_element) @close) (#set! newline.only))"#})),
|
||||
indents: Some(Cow::from(indoc! {r#"
|
||||
[
|
||||
(call_expression)
|
||||
(assignment_expression)
|
||||
(member_expression)
|
||||
(lexical_declaration)
|
||||
(variable_declaration)
|
||||
(assignment_expression)
|
||||
(if_statement)
|
||||
(for_statement)
|
||||
] @indent
|
||||
|
||||
(_ "[" "]" @end) @indent
|
||||
(_ "<" ">" @end) @indent
|
||||
(_ "{" "}" @end) @indent
|
||||
(_ "(" ")" @end) @indent
|
||||
|
||||
(jsx_opening_element ">" @end) @indent
|
||||
|
||||
(jsx_element
|
||||
(jsx_opening_element) @start
|
||||
(jsx_closing_element)? @end) @indent
|
||||
"#})),
|
||||
..Default::default()
|
||||
})
|
||||
.expect("Could not parse queries");
|
||||
|
||||
Self::new(language, capabilities, cx).await
|
||||
}
|
||||
|
||||
pub async fn new_html(cx: &mut gpui::TestAppContext) -> Self {
|
||||
let language = Language::new(
|
||||
LanguageConfig {
|
||||
@@ -440,7 +369,7 @@ impl EditorLspTestContext {
|
||||
}
|
||||
|
||||
pub fn notify<T: notification::Notification>(&self, params: T::Params) {
|
||||
self.lsp.notify::<T>(params);
|
||||
self.lsp.notify::<T>(¶ms);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use crate::{
|
||||
AnchorRangeExt, DisplayPoint, Editor, ExcerptId, MultiBuffer, MultiBufferSnapshot, RowExt,
|
||||
AnchorRangeExt, DisplayPoint, Editor, MultiBuffer, RowExt,
|
||||
display_map::{HighlightKey, ToDisplayPoint},
|
||||
};
|
||||
use buffer_diff::DiffHunkStatusKind;
|
||||
@@ -24,7 +24,6 @@ use std::{
|
||||
atomic::{AtomicUsize, Ordering},
|
||||
},
|
||||
};
|
||||
use text::Selection;
|
||||
use util::{
|
||||
assert_set_eq,
|
||||
test::{generate_marked_text, marked_text_ranges},
|
||||
@@ -389,23 +388,6 @@ impl EditorTestContext {
|
||||
|
||||
#[track_caller]
|
||||
pub fn assert_excerpts_with_selections(&mut self, marked_text: &str) {
|
||||
let actual_text = self.to_format_multibuffer_as_marked_text();
|
||||
let fmt_additional_notes = || {
|
||||
struct Format<'a, T: std::fmt::Display>(&'a str, &'a T);
|
||||
|
||||
impl<T: std::fmt::Display> std::fmt::Display for Format<'_, T> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(
|
||||
f,
|
||||
"\n\n----- EXPECTED: -----\n\n{}\n\n----- ACTUAL: -----\n\n{}\n\n",
|
||||
self.0, self.1
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
Format(marked_text, &actual_text)
|
||||
};
|
||||
|
||||
let expected_excerpts = marked_text
|
||||
.strip_prefix("[EXCERPT]\n")
|
||||
.unwrap()
|
||||
@@ -426,10 +408,9 @@ impl EditorTestContext {
|
||||
|
||||
assert!(
|
||||
excerpts.len() == expected_excerpts.len(),
|
||||
"should have {} excerpts, got {}{}",
|
||||
"should have {} excerpts, got {}",
|
||||
expected_excerpts.len(),
|
||||
excerpts.len(),
|
||||
fmt_additional_notes(),
|
||||
excerpts.len()
|
||||
);
|
||||
|
||||
for (ix, (excerpt_id, snapshot, range)) in excerpts.into_iter().enumerate() {
|
||||
@@ -443,25 +424,18 @@ impl EditorTestContext {
|
||||
if !expected_selections.is_empty() {
|
||||
assert!(
|
||||
is_selected,
|
||||
"excerpt {ix} should contain selections. got {:?}{}",
|
||||
"excerpt {ix} should be selected. got {:?}",
|
||||
self.editor_state(),
|
||||
fmt_additional_notes(),
|
||||
);
|
||||
} else {
|
||||
assert!(
|
||||
!is_selected,
|
||||
"excerpt {ix} should not contain selections, got: {selections:?}{}",
|
||||
fmt_additional_notes(),
|
||||
"excerpt {ix} should not be selected, got: {selections:?}",
|
||||
);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
assert!(
|
||||
!is_folded,
|
||||
"excerpt {} should not be folded{}",
|
||||
ix,
|
||||
fmt_additional_notes()
|
||||
);
|
||||
assert!(!is_folded, "excerpt {} should not be folded", ix);
|
||||
assert_eq!(
|
||||
multibuffer_snapshot
|
||||
.text_for_range(Anchor::range_in_buffer(
|
||||
@@ -470,9 +444,7 @@ impl EditorTestContext {
|
||||
range.context.clone()
|
||||
))
|
||||
.collect::<String>(),
|
||||
expected_text,
|
||||
"{}",
|
||||
fmt_additional_notes(),
|
||||
expected_text
|
||||
);
|
||||
|
||||
let selections = selections
|
||||
@@ -488,38 +460,13 @@ impl EditorTestContext {
|
||||
.collect::<Vec<_>>();
|
||||
// todo: selections that cross excerpt boundaries..
|
||||
assert_eq!(
|
||||
selections,
|
||||
expected_selections,
|
||||
"excerpt {} has incorrect selections{}",
|
||||
selections, expected_selections,
|
||||
"excerpt {} has incorrect selections",
|
||||
ix,
|
||||
fmt_additional_notes()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
fn to_format_multibuffer_as_marked_text(&mut self) -> FormatMultiBufferAsMarkedText {
|
||||
let (multibuffer_snapshot, selections, excerpts) = self.update_editor(|editor, _, cx| {
|
||||
let multibuffer_snapshot = editor.buffer.read(cx).snapshot(cx);
|
||||
|
||||
let selections = editor.selections.disjoint_anchors_arc().to_vec();
|
||||
let excerpts = multibuffer_snapshot
|
||||
.excerpts()
|
||||
.map(|(e_id, snapshot, range)| {
|
||||
let is_folded = editor.is_buffer_folded(snapshot.remote_id(), cx);
|
||||
(e_id, snapshot.clone(), range, is_folded)
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
(multibuffer_snapshot, selections, excerpts)
|
||||
});
|
||||
|
||||
FormatMultiBufferAsMarkedText {
|
||||
multibuffer_snapshot,
|
||||
selections,
|
||||
excerpts,
|
||||
}
|
||||
}
|
||||
|
||||
/// Make an assertion about the editor's text and the ranges and directions
|
||||
/// of its selections using a string containing embedded range markers.
|
||||
///
|
||||
@@ -624,63 +571,6 @@ impl EditorTestContext {
|
||||
}
|
||||
}
|
||||
|
||||
struct FormatMultiBufferAsMarkedText {
|
||||
multibuffer_snapshot: MultiBufferSnapshot,
|
||||
selections: Vec<Selection<Anchor>>,
|
||||
excerpts: Vec<(ExcerptId, BufferSnapshot, ExcerptRange<text::Anchor>, bool)>,
|
||||
}
|
||||
|
||||
impl std::fmt::Display for FormatMultiBufferAsMarkedText {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let Self {
|
||||
multibuffer_snapshot,
|
||||
selections,
|
||||
excerpts,
|
||||
} = self;
|
||||
|
||||
for (excerpt_id, snapshot, range, is_folded) in excerpts.into_iter() {
|
||||
write!(f, "[EXCERPT]\n")?;
|
||||
if *is_folded {
|
||||
write!(f, "[FOLDED]\n")?;
|
||||
}
|
||||
|
||||
let mut text = multibuffer_snapshot
|
||||
.text_for_range(Anchor::range_in_buffer(
|
||||
*excerpt_id,
|
||||
snapshot.remote_id(),
|
||||
range.context.clone(),
|
||||
))
|
||||
.collect::<String>();
|
||||
|
||||
let selections = selections
|
||||
.iter()
|
||||
.filter(|&s| s.head().excerpt_id == *excerpt_id)
|
||||
.map(|s| {
|
||||
let head = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot)
|
||||
- text::ToOffset::to_offset(&range.context.start, &snapshot);
|
||||
let tail = text::ToOffset::to_offset(&s.head().text_anchor, &snapshot)
|
||||
- text::ToOffset::to_offset(&range.context.start, &snapshot);
|
||||
tail..head
|
||||
})
|
||||
.rev()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
for selection in selections {
|
||||
if selection.is_empty() {
|
||||
text.insert(selection.start, 'ˇ');
|
||||
continue;
|
||||
}
|
||||
text.insert(selection.end, '»');
|
||||
text.insert(selection.start, '«');
|
||||
}
|
||||
|
||||
write!(f, "{text}")?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[track_caller]
|
||||
pub fn assert_state_with_diff(
|
||||
editor: &Entity<Editor>,
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user