Compare commits
112 Commits
static-rel
...
fix-rodio-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f28fd3a0fc | ||
|
|
c8f21ab95a | ||
|
|
1065762d03 | ||
|
|
71ced03a5d | ||
|
|
a78b560b8b | ||
|
|
b9a6660b93 | ||
|
|
a693d44553 | ||
|
|
41ee92e5f2 | ||
|
|
a9eb480f3c | ||
|
|
5698636c92 | ||
|
|
bbd735905f | ||
|
|
3d5ddcccf0 | ||
|
|
4dae3a15cc | ||
|
|
c6373cc26d | ||
|
|
a4ec693e34 | ||
|
|
08a2b6898b | ||
|
|
13b17b3a85 | ||
|
|
e4f0fbbf80 | ||
|
|
98d4c34199 | ||
|
|
c24f365b69 | ||
|
|
2dfde55367 | ||
|
|
e946a06efe | ||
|
|
75067c94ad | ||
|
|
d7143009fc | ||
|
|
a22c29c5f9 | ||
|
|
c543709d5f | ||
|
|
c58931ac04 | ||
|
|
dd5da592f0 | ||
|
|
f1d17fcfbe | ||
|
|
ccfc1ce387 | ||
|
|
3d4f488d46 | ||
|
|
ba2337ffb9 | ||
|
|
37d676e2c6 | ||
|
|
1bb6752e3e | ||
|
|
8c9b42dda8 | ||
|
|
15c4aadb57 | ||
|
|
3d200a5466 | ||
|
|
e077b63915 | ||
|
|
ef839cc207 | ||
|
|
3d0312f4c7 | ||
|
|
c1e3958c26 | ||
|
|
ba937d16e7 | ||
|
|
4dbd186485 | ||
|
|
88887fd292 | ||
|
|
31e75b2235 | ||
|
|
681c19899f | ||
|
|
439add3d23 | ||
|
|
81b98cdd4d | ||
|
|
ca89a40df2 | ||
|
|
f5884e99d0 | ||
|
|
fce931144e | ||
|
|
ef423148fc | ||
|
|
cd656485c8 | ||
|
|
1e149b755f | ||
|
|
e0eeda11ed | ||
|
|
bcef3b5010 | ||
|
|
5fd187769d | ||
|
|
096930817b | ||
|
|
c7d5afedc5 | ||
|
|
d6b1801fb3 | ||
|
|
7c55f7181d | ||
|
|
4684d6b50e | ||
|
|
578e7e4cbd | ||
|
|
a960db6a43 | ||
|
|
5a0f796a44 | ||
|
|
604d56659d | ||
|
|
1d1c799b4b | ||
|
|
70af11ef2a | ||
|
|
5fa4b3bfe8 | ||
|
|
93a5dffea1 | ||
|
|
9ac010043c | ||
|
|
dd3b65f707 | ||
|
|
057b7b1543 | ||
|
|
a9455eb947 | ||
|
|
db3c186af0 | ||
|
|
71856706c7 | ||
|
|
4ec24ebe01 | ||
|
|
4152942a8e | ||
|
|
bbf4bfad6f | ||
|
|
989d172cfc | ||
|
|
1265b229a9 | ||
|
|
294ca25f44 | ||
|
|
5c7907ad2f | ||
|
|
f652c3a14d | ||
|
|
69ac003bc9 | ||
|
|
d615525771 | ||
|
|
8bf37dd130 | ||
|
|
8cb67ec91c | ||
|
|
cd67941598 | ||
|
|
669db62e33 | ||
|
|
41f1835bbe | ||
|
|
791ba9ce4c | ||
|
|
e60a61f7e7 | ||
|
|
b8a6180b82 | ||
|
|
dfce57c7f8 | ||
|
|
15580a867b | ||
|
|
f7bb22fb83 | ||
|
|
7db7ad93a2 | ||
|
|
642643de01 | ||
|
|
391e304c9f | ||
|
|
3106472bf3 | ||
|
|
d04ac864b8 | ||
|
|
f9a2724a8b | ||
|
|
ded73c9d56 | ||
|
|
41cf114d8a | ||
|
|
e765818487 | ||
|
|
84f488879c | ||
|
|
85985fe960 | ||
|
|
3bec885536 | ||
|
|
9a5034ea6d | ||
|
|
64eec67a81 | ||
|
|
ffff56f7fe |
@@ -26,7 +26,7 @@ third-party = [
|
||||
# build of remote_server should not include scap / its x11 dependency
|
||||
{ name = "zed-scap", git = "https://github.com/zed-industries/scap", rev = "4afea48c3b002197176fb19cd0f9b180dd36eaac", version = "0.0.8-zed" },
|
||||
# build of remote_server should not need to include on libalsa through rodio
|
||||
{ name = "rodio", git = "https://github.com/RustAudio/rodio" },
|
||||
{ name = "rodio", git = "https://github.com/RustAudio/rodio", rev = "836c0133533ad89f6df08b584d1e4c7269744062" },
|
||||
]
|
||||
|
||||
[final-excludes]
|
||||
|
||||
35
.github/ISSUE_TEMPLATE/07_bug_windows.yml
vendored
Normal file
35
.github/ISSUE_TEMPLATE/07_bug_windows.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
name: Bug Report (Windows)
|
||||
description: Zed Windows Related Bugs
|
||||
type: "Bug"
|
||||
labels: ["windows"]
|
||||
title: "Windows: <a short description of the Windows bug>"
|
||||
body:
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Summary
|
||||
description: Describe the bug with a one-line summary, and provide detailed reproduction steps
|
||||
value: |
|
||||
<!-- Please insert a one-line summary of the issue below -->
|
||||
SUMMARY_SENTENCE_HERE
|
||||
|
||||
### Description
|
||||
<!-- Describe with sufficient detail to reproduce from a clean Zed install. -->
|
||||
Steps to trigger the problem:
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
**Expected Behavior**:
|
||||
**Actual Behavior**:
|
||||
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: environment
|
||||
attributes:
|
||||
label: Zed Version and System Specs
|
||||
description: 'Open Zed, and in the command palette select "zed: copy system specs into clipboard"'
|
||||
placeholder: |
|
||||
Output of "zed: copy system specs into clipboard"
|
||||
validations:
|
||||
required: true
|
||||
2
.github/workflows/ci.yml
vendored
2
.github/workflows/ci.yml
vendored
@@ -866,7 +866,7 @@ jobs:
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4
|
||||
if: contains(github.event.pull_request.labels.*.name, 'run-bundling')
|
||||
with:
|
||||
name: ZedEditorUserSetup-x64-${{ github.event.pull_request.head.sha || github.sha }}.exe
|
||||
name: Zed_${{ github.event.pull_request.head.sha || github.sha }}-x86_64.exe
|
||||
path: ${{ env.SETUP_PATH }}
|
||||
|
||||
- name: Upload Artifacts to release
|
||||
|
||||
81
Cargo.lock
generated
81
Cargo.lock
generated
@@ -778,6 +778,9 @@ dependencies = [
|
||||
"serde",
|
||||
"serde_repr",
|
||||
"url",
|
||||
"wayland-backend",
|
||||
"wayland-client",
|
||||
"wayland-protocols 0.32.6",
|
||||
"zbus",
|
||||
]
|
||||
|
||||
@@ -3313,6 +3316,27 @@ dependencies = [
|
||||
"unicode-width",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "codestral"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"edit_prediction",
|
||||
"edit_prediction_context",
|
||||
"futures 0.3.31",
|
||||
"gpui",
|
||||
"language",
|
||||
"language_models",
|
||||
"log",
|
||||
"mistral",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"smol",
|
||||
"text",
|
||||
"workspace-hack",
|
||||
"zed-http-client",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "collab"
|
||||
version = "0.44.0"
|
||||
@@ -5112,6 +5136,7 @@ dependencies = [
|
||||
"anyhow",
|
||||
"client",
|
||||
"cloud_llm_client",
|
||||
"codestral",
|
||||
"copilot",
|
||||
"edit_prediction",
|
||||
"editor",
|
||||
@@ -5164,6 +5189,9 @@ dependencies = [
|
||||
"strum 0.27.1",
|
||||
"text",
|
||||
"tree-sitter",
|
||||
"tree-sitter-c",
|
||||
"tree-sitter-cpp",
|
||||
"tree-sitter-go",
|
||||
"workspace-hack",
|
||||
"zed-collections",
|
||||
"zed-util",
|
||||
@@ -5861,9 +5889,7 @@ version = "0.1.0"
|
||||
dependencies = [
|
||||
"editor",
|
||||
"gpui",
|
||||
"menu",
|
||||
"system_specs",
|
||||
"ui",
|
||||
"urlencoding",
|
||||
"workspace",
|
||||
"workspace-hack",
|
||||
@@ -6747,6 +6773,7 @@ dependencies = [
|
||||
"futures 0.3.31",
|
||||
"git2",
|
||||
"gpui",
|
||||
"itertools 0.14.0",
|
||||
"log",
|
||||
"parking_lot",
|
||||
"pretty_assertions",
|
||||
@@ -6763,6 +6790,7 @@ dependencies = [
|
||||
"time",
|
||||
"unindent",
|
||||
"url",
|
||||
"urlencoding",
|
||||
"uuid",
|
||||
"workspace-hack",
|
||||
"zed-collections",
|
||||
@@ -6987,7 +7015,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "gpui"
|
||||
version = "0.1.0"
|
||||
version = "0.2.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"as-raw-xcb-connection",
|
||||
@@ -7062,7 +7090,7 @@ dependencies = [
|
||||
"wayland-backend",
|
||||
"wayland-client",
|
||||
"wayland-cursor",
|
||||
"wayland-protocols",
|
||||
"wayland-protocols 0.31.2",
|
||||
"wayland-protocols-plasma",
|
||||
"windows 0.61.1",
|
||||
"windows-core 0.61.0",
|
||||
@@ -8023,6 +8051,7 @@ dependencies = [
|
||||
"serde_json",
|
||||
"serde_json_lenient",
|
||||
"theme",
|
||||
"title_bar",
|
||||
"ui",
|
||||
"workspace",
|
||||
"workspace-hack",
|
||||
@@ -8775,7 +8804,6 @@ dependencies = [
|
||||
"serde_json",
|
||||
"serde_json_lenient",
|
||||
"settings",
|
||||
"shlex",
|
||||
"smol",
|
||||
"task",
|
||||
"text",
|
||||
@@ -10532,20 +10560,15 @@ dependencies = [
|
||||
name = "onboarding"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"ai_onboarding",
|
||||
"anyhow",
|
||||
"client",
|
||||
"component",
|
||||
"db",
|
||||
"documented",
|
||||
"editor",
|
||||
"fs",
|
||||
"fuzzy",
|
||||
"git",
|
||||
"gpui",
|
||||
"itertools 0.14.0",
|
||||
"language",
|
||||
"language_model",
|
||||
"menu",
|
||||
"notifications",
|
||||
"picker",
|
||||
@@ -12009,7 +12032,6 @@ dependencies = [
|
||||
"dap_adapters",
|
||||
"extension",
|
||||
"fancy-regex 0.14.0",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
"fuzzy",
|
||||
@@ -13279,12 +13301,14 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "rodio"
|
||||
version = "0.21.1"
|
||||
source = "git+https://github.com/RustAudio/rodio#e2074c6c2acf07b57cf717e076bdda7a9ac6e70b"
|
||||
source = "git+https://github.com/RustAudio/rodio?rev=836c0133533ad89f6df08b584d1e4c7269744062#836c0133533ad89f6df08b584d1e4c7269744062"
|
||||
dependencies = [
|
||||
"cpal",
|
||||
"dasp_sample",
|
||||
"hound",
|
||||
"num-rational",
|
||||
"rand 0.9.1",
|
||||
"rand_distr",
|
||||
"rtrb",
|
||||
"symphonia",
|
||||
"thiserror 2.0.12",
|
||||
@@ -14359,24 +14383,28 @@ dependencies = [
|
||||
"anyhow",
|
||||
"assets",
|
||||
"client",
|
||||
"command_palette_hooks",
|
||||
"editor",
|
||||
"feature_flags",
|
||||
"fs",
|
||||
"futures 0.3.31",
|
||||
"fuzzy",
|
||||
"gpui",
|
||||
"heck 0.5.0",
|
||||
"language",
|
||||
"log",
|
||||
"menu",
|
||||
"node_runtime",
|
||||
"paths",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"schemars 1.0.1",
|
||||
"search",
|
||||
"serde",
|
||||
"session",
|
||||
"settings",
|
||||
"strum 0.27.1",
|
||||
"theme",
|
||||
"title_bar",
|
||||
"ui",
|
||||
"ui_input",
|
||||
"workspace",
|
||||
@@ -16937,8 +16965,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "tree-sitter-typescript"
|
||||
version = "0.23.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6c5f76ed8d947a75cc446d5fccd8b602ebf0cde64ccf2ffa434d873d7a575eff"
|
||||
source = "git+https://github.com/zed-industries/tree-sitter-typescript?rev=e2c53597d6a5d9cf7bbe8dccde576fe1e46c5899#e2c53597d6a5d9cf7bbe8dccde576fe1e46c5899"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"tree-sitter-language",
|
||||
@@ -18167,6 +18194,18 @@ dependencies = [
|
||||
"wayland-scanner",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wayland-protocols"
|
||||
version = "0.32.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0781cf46869b37e36928f7b432273c0995aa8aed9552c556fb18754420541efc"
|
||||
dependencies = [
|
||||
"bitflags 2.9.0",
|
||||
"wayland-backend",
|
||||
"wayland-client",
|
||||
"wayland-scanner",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wayland-protocols-plasma"
|
||||
version = "0.2.0"
|
||||
@@ -18176,7 +18215,7 @@ dependencies = [
|
||||
"bitflags 2.9.0",
|
||||
"wayland-backend",
|
||||
"wayland-client",
|
||||
"wayland-protocols",
|
||||
"wayland-protocols 0.31.2",
|
||||
"wayland-scanner",
|
||||
]
|
||||
|
||||
@@ -19425,6 +19464,7 @@ dependencies = [
|
||||
"aho-corasick",
|
||||
"anstream",
|
||||
"arrayvec",
|
||||
"ashpd 0.11.0",
|
||||
"async-compression",
|
||||
"async-std",
|
||||
"async-tungstenite",
|
||||
@@ -19597,6 +19637,8 @@ dependencies = [
|
||||
"wasmtime",
|
||||
"wasmtime-cranelift",
|
||||
"wasmtime-environ",
|
||||
"wayland-backend",
|
||||
"wayland-sys",
|
||||
"winapi",
|
||||
"windows-core 0.61.0",
|
||||
"windows-numerics",
|
||||
@@ -19604,6 +19646,7 @@ dependencies = [
|
||||
"windows-sys 0.52.0",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.61.0",
|
||||
"zbus_macros",
|
||||
"zeroize",
|
||||
"zvariant",
|
||||
]
|
||||
@@ -19961,7 +20004,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "zed"
|
||||
version = "0.208.0"
|
||||
version = "0.209.0"
|
||||
dependencies = [
|
||||
"acp_tools",
|
||||
"activity_indicator",
|
||||
@@ -19984,6 +20027,7 @@ dependencies = [
|
||||
"clap",
|
||||
"cli",
|
||||
"client",
|
||||
"codestral",
|
||||
"collab_ui",
|
||||
"command_palette",
|
||||
"component",
|
||||
@@ -20688,6 +20732,8 @@ dependencies = [
|
||||
"indoc",
|
||||
"language",
|
||||
"log",
|
||||
"multi_buffer",
|
||||
"ordered-float 2.10.1",
|
||||
"pretty_assertions",
|
||||
"project",
|
||||
"serde",
|
||||
@@ -20741,6 +20787,7 @@ dependencies = [
|
||||
"terminal_view",
|
||||
"watch",
|
||||
"workspace-hack",
|
||||
"zed-collections",
|
||||
"zed-util",
|
||||
"zeta",
|
||||
"zeta2",
|
||||
|
||||
@@ -164,6 +164,7 @@ members = [
|
||||
"crates/sum_tree",
|
||||
"crates/supermaven",
|
||||
"crates/supermaven_api",
|
||||
"crates/codestral",
|
||||
"crates/svg_preview",
|
||||
"crates/system_specs",
|
||||
"crates/tab_switcher",
|
||||
@@ -377,7 +378,7 @@ remote_server = { path = "crates/remote_server" }
|
||||
repl = { path = "crates/repl" }
|
||||
reqwest_client = { path = "crates/reqwest_client" }
|
||||
rich_text = { path = "crates/rich_text" }
|
||||
rodio = { git = "https://github.com/RustAudio/rodio" }
|
||||
rodio = { git = "https://github.com/RustAudio/rodio" , rev = "836c0133533ad89f6df08b584d1e4c7269744062"}
|
||||
rope = { path = "crates/rope" }
|
||||
rpc = { path = "crates/rpc" }
|
||||
rules_library = { path = "crates/rules_library" }
|
||||
@@ -398,6 +399,7 @@ streaming_diff = { path = "crates/streaming_diff" }
|
||||
sum_tree = { path = "crates/sum_tree", package = "zed-sum-tree", version = "0.1.0" }
|
||||
supermaven = { path = "crates/supermaven" }
|
||||
supermaven_api = { path = "crates/supermaven_api" }
|
||||
codestral = { path = "crates/codestral" }
|
||||
system_specs = { path = "crates/system_specs" }
|
||||
tab_switcher = { path = "crates/tab_switcher" }
|
||||
task = { path = "crates/task" }
|
||||
@@ -476,7 +478,6 @@ bitflags = "2.6.0"
|
||||
blade-graphics = { version = "0.7.0" }
|
||||
blade-macros = { version = "0.3.0" }
|
||||
blade-util = { version = "0.3.0" }
|
||||
blake3 = "1.5.3"
|
||||
bytes = "1.0"
|
||||
cargo_metadata = "0.19"
|
||||
cargo_toml = "0.21"
|
||||
@@ -692,7 +693,7 @@ tree-sitter-python = "0.25"
|
||||
tree-sitter-regex = "0.24"
|
||||
tree-sitter-ruby = "0.23"
|
||||
tree-sitter-rust = "0.24"
|
||||
tree-sitter-typescript = "0.23"
|
||||
tree-sitter-typescript = { git = "https://github.com/zed-industries/tree-sitter-typescript", rev = "e2c53597d6a5d9cf7bbe8dccde576fe1e46c5899" } # https://github.com/tree-sitter/tree-sitter-typescript/pull/347
|
||||
tree-sitter-yaml = { git = "https://github.com/zed-industries/tree-sitter-yaml", rev = "baff0b51c64ef6a1fb1f8390f3ad6015b83ec13a" }
|
||||
unicase = "2.6"
|
||||
unicode-script = "0.5.7"
|
||||
|
||||
@@ -374,13 +374,6 @@
|
||||
"ctrl-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar",
|
||||
"bindings": {
|
||||
@@ -534,15 +527,15 @@
|
||||
"ctrl-k ctrl-l": "editor::ToggleFold",
|
||||
"ctrl-k ctrl-[": "editor::FoldRecursive",
|
||||
"ctrl-k ctrl-]": "editor::UnfoldRecursive",
|
||||
"ctrl-k ctrl-1": ["editor::FoldAtLevel", 1],
|
||||
"ctrl-k ctrl-2": ["editor::FoldAtLevel", 2],
|
||||
"ctrl-k ctrl-3": ["editor::FoldAtLevel", 3],
|
||||
"ctrl-k ctrl-4": ["editor::FoldAtLevel", 4],
|
||||
"ctrl-k ctrl-5": ["editor::FoldAtLevel", 5],
|
||||
"ctrl-k ctrl-6": ["editor::FoldAtLevel", 6],
|
||||
"ctrl-k ctrl-7": ["editor::FoldAtLevel", 7],
|
||||
"ctrl-k ctrl-8": ["editor::FoldAtLevel", 8],
|
||||
"ctrl-k ctrl-9": ["editor::FoldAtLevel", 9],
|
||||
"ctrl-k ctrl-1": "editor::FoldAtLevel_1",
|
||||
"ctrl-k ctrl-2": "editor::FoldAtLevel_2",
|
||||
"ctrl-k ctrl-3": "editor::FoldAtLevel_3",
|
||||
"ctrl-k ctrl-4": "editor::FoldAtLevel_4",
|
||||
"ctrl-k ctrl-5": "editor::FoldAtLevel_5",
|
||||
"ctrl-k ctrl-6": "editor::FoldAtLevel_6",
|
||||
"ctrl-k ctrl-7": "editor::FoldAtLevel_7",
|
||||
"ctrl-k ctrl-8": "editor::FoldAtLevel_8",
|
||||
"ctrl-k ctrl-9": "editor::FoldAtLevel_9",
|
||||
"ctrl-k ctrl-0": "editor::FoldAll",
|
||||
"ctrl-k ctrl-j": "editor::UnfoldAll",
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
@@ -1236,9 +1229,6 @@
|
||||
"context": "Onboarding",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-1": "onboarding::ActivateBasicsPage",
|
||||
"ctrl-2": "onboarding::ActivateEditingPage",
|
||||
"ctrl-3": "onboarding::ActivateAISetupPage",
|
||||
"ctrl-enter": "onboarding::Finish",
|
||||
"alt-shift-l": "onboarding::SignIn",
|
||||
"alt-shift-a": "onboarding::OpenAccount"
|
||||
@@ -1250,5 +1240,41 @@
|
||||
"bindings": {
|
||||
"ctrl-shift-enter": "workspace::OpenWithSystem"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-w": "workspace::CloseWindow",
|
||||
"escape": "workspace::CloseWindow",
|
||||
"ctrl-m": "settings_editor::Minimize",
|
||||
"ctrl-f": "search::FocusSearch",
|
||||
"ctrl-shift-e": "settings_editor::ToggleFocusNav",
|
||||
// todo(settings_ui): cut this down based on the max files and overflow UI
|
||||
"ctrl-1": ["settings_editor::FocusFile", 0],
|
||||
"ctrl-2": ["settings_editor::FocusFile", 1],
|
||||
"ctrl-3": ["settings_editor::FocusFile", 2],
|
||||
"ctrl-4": ["settings_editor::FocusFile", 3],
|
||||
"ctrl-5": ["settings_editor::FocusFile", 4],
|
||||
"ctrl-6": ["settings_editor::FocusFile", 5],
|
||||
"ctrl-7": ["settings_editor::FocusFile", 6],
|
||||
"ctrl-8": ["settings_editor::FocusFile", 7],
|
||||
"ctrl-9": ["settings_editor::FocusFile", 8],
|
||||
"ctrl-0": ["settings_editor::FocusFile", 9],
|
||||
"ctrl-pageup": "settings_editor::FocusPreviousFile",
|
||||
"ctrl-pagedown": "settings_editor::FocusNextFile"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow > NavigationMenu",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"right": "settings_editor::ExpandNavEntry",
|
||||
"left": "settings_editor::CollapseNavEntry",
|
||||
"pageup": "settings_editor::FocusPreviousRootNavEntry",
|
||||
"pagedown": "settings_editor::FocusNextRootNavEntry",
|
||||
"home": "settings_editor::FocusFirstNavEntry",
|
||||
"end": "settings_editor::FocusLastNavEntry"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -431,13 +431,6 @@
|
||||
"cmd-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar",
|
||||
"use_key_equivalents": true,
|
||||
@@ -589,15 +582,15 @@
|
||||
"cmd-k cmd-l": "editor::ToggleFold",
|
||||
"cmd-k cmd-[": "editor::FoldRecursive",
|
||||
"cmd-k cmd-]": "editor::UnfoldRecursive",
|
||||
"cmd-k cmd-1": ["editor::FoldAtLevel", 1],
|
||||
"cmd-k cmd-2": ["editor::FoldAtLevel", 2],
|
||||
"cmd-k cmd-3": ["editor::FoldAtLevel", 3],
|
||||
"cmd-k cmd-4": ["editor::FoldAtLevel", 4],
|
||||
"cmd-k cmd-5": ["editor::FoldAtLevel", 5],
|
||||
"cmd-k cmd-6": ["editor::FoldAtLevel", 6],
|
||||
"cmd-k cmd-7": ["editor::FoldAtLevel", 7],
|
||||
"cmd-k cmd-8": ["editor::FoldAtLevel", 8],
|
||||
"cmd-k cmd-9": ["editor::FoldAtLevel", 9],
|
||||
"cmd-k cmd-1": "editor::FoldAtLevel_1",
|
||||
"cmd-k cmd-2": "editor::FoldAtLevel_2",
|
||||
"cmd-k cmd-3": "editor::FoldAtLevel_3",
|
||||
"cmd-k cmd-4": "editor::FoldAtLevel_4",
|
||||
"cmd-k cmd-5": "editor::FoldAtLevel_5",
|
||||
"cmd-k cmd-6": "editor::FoldAtLevel_6",
|
||||
"cmd-k cmd-7": "editor::FoldAtLevel_7",
|
||||
"cmd-k cmd-8": "editor::FoldAtLevel_8",
|
||||
"cmd-k cmd-9": "editor::FoldAtLevel_9",
|
||||
"cmd-k cmd-0": "editor::FoldAll",
|
||||
"cmd-k cmd-j": "editor::UnfoldAll",
|
||||
// Using `ctrl-space` / `ctrl-shift-space` in Zed requires disabling the macOS global shortcut.
|
||||
@@ -1341,10 +1334,7 @@
|
||||
"context": "Onboarding",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-1": "onboarding::ActivateBasicsPage",
|
||||
"cmd-2": "onboarding::ActivateEditingPage",
|
||||
"cmd-3": "onboarding::ActivateAISetupPage",
|
||||
"cmd-escape": "onboarding::Finish",
|
||||
"cmd-enter": "onboarding::Finish",
|
||||
"alt-tab": "onboarding::SignIn",
|
||||
"alt-shift-a": "onboarding::OpenAccount"
|
||||
}
|
||||
@@ -1355,5 +1345,41 @@
|
||||
"bindings": {
|
||||
"ctrl-shift-enter": "workspace::OpenWithSystem"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"cmd-w": "workspace::CloseWindow",
|
||||
"escape": "workspace::CloseWindow",
|
||||
"cmd-m": "settings_editor::Minimize",
|
||||
"cmd-f": "search::FocusSearch",
|
||||
"cmd-shift-e": "settings_editor::ToggleFocusNav",
|
||||
// todo(settings_ui): cut this down based on the max files and overflow UI
|
||||
"ctrl-1": ["settings_editor::FocusFile", 0],
|
||||
"ctrl-2": ["settings_editor::FocusFile", 1],
|
||||
"ctrl-3": ["settings_editor::FocusFile", 2],
|
||||
"ctrl-4": ["settings_editor::FocusFile", 3],
|
||||
"ctrl-5": ["settings_editor::FocusFile", 4],
|
||||
"ctrl-6": ["settings_editor::FocusFile", 5],
|
||||
"ctrl-7": ["settings_editor::FocusFile", 6],
|
||||
"ctrl-8": ["settings_editor::FocusFile", 7],
|
||||
"ctrl-9": ["settings_editor::FocusFile", 8],
|
||||
"ctrl-0": ["settings_editor::FocusFile", 9],
|
||||
"cmd-{": "settings_editor::FocusPreviousFile",
|
||||
"cmd-}": "settings_editor::FocusNextFile"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow > NavigationMenu",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"right": "settings_editor::ExpandNavEntry",
|
||||
"left": "settings_editor::CollapseNavEntry",
|
||||
"pageup": "settings_editor::FocusPreviousRootNavEntry",
|
||||
"pagedown": "settings_editor::FocusNextRootNavEntry",
|
||||
"home": "settings_editor::FocusFirstNavEntry",
|
||||
"end": "settings_editor::FocusLastNavEntry"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -134,7 +134,7 @@
|
||||
"ctrl-k z": "editor::ToggleSoftWrap",
|
||||
"ctrl-f": "buffer_search::Deploy",
|
||||
"ctrl-h": "buffer_search::DeployReplace",
|
||||
"ctrl-shift-.": "assistant::QuoteSelection",
|
||||
"ctrl-shift-.": "agent::QuoteSelection",
|
||||
"ctrl-shift-,": "assistant::InsertIntoEditor",
|
||||
"shift-alt-e": "editor::SelectEnclosingSymbol",
|
||||
"ctrl-shift-backspace": "editor::GoToPreviousChange",
|
||||
@@ -244,7 +244,7 @@
|
||||
"ctrl-shift-i": "agent::ToggleOptionsMenu",
|
||||
// "ctrl-shift-alt-n": "agent::ToggleNewThreadMenu",
|
||||
"shift-alt-escape": "agent::ExpandMessageEditor",
|
||||
"ctrl-shift-.": "assistant::QuoteSelection",
|
||||
"ctrl-shift-.": "agent::QuoteSelection",
|
||||
"shift-alt-e": "agent::RemoveAllContext",
|
||||
"ctrl-shift-e": "project_panel::ToggleFocus",
|
||||
"ctrl-shift-enter": "agent::ContinueThread",
|
||||
@@ -383,13 +383,6 @@
|
||||
"ctrl-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-w": "workspace::CloseWindow"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "BufferSearchBar",
|
||||
"use_key_equivalents": true,
|
||||
@@ -543,15 +536,15 @@
|
||||
"ctrl-k ctrl-l": "editor::ToggleFold",
|
||||
"ctrl-k ctrl-[": "editor::FoldRecursive",
|
||||
"ctrl-k ctrl-]": "editor::UnfoldRecursive",
|
||||
"ctrl-k ctrl-1": ["editor::FoldAtLevel", 1],
|
||||
"ctrl-k ctrl-2": ["editor::FoldAtLevel", 2],
|
||||
"ctrl-k ctrl-3": ["editor::FoldAtLevel", 3],
|
||||
"ctrl-k ctrl-4": ["editor::FoldAtLevel", 4],
|
||||
"ctrl-k ctrl-5": ["editor::FoldAtLevel", 5],
|
||||
"ctrl-k ctrl-6": ["editor::FoldAtLevel", 6],
|
||||
"ctrl-k ctrl-7": ["editor::FoldAtLevel", 7],
|
||||
"ctrl-k ctrl-8": ["editor::FoldAtLevel", 8],
|
||||
"ctrl-k ctrl-9": ["editor::FoldAtLevel", 9],
|
||||
"ctrl-k ctrl-1": "editor::FoldAtLevel_1",
|
||||
"ctrl-k ctrl-2": "editor::FoldAtLevel_2",
|
||||
"ctrl-k ctrl-3": "editor::FoldAtLevel_3",
|
||||
"ctrl-k ctrl-4": "editor::FoldAtLevel_4",
|
||||
"ctrl-k ctrl-5": "editor::FoldAtLevel_5",
|
||||
"ctrl-k ctrl-6": "editor::FoldAtLevel_6",
|
||||
"ctrl-k ctrl-7": "editor::FoldAtLevel_7",
|
||||
"ctrl-k ctrl-8": "editor::FoldAtLevel_8",
|
||||
"ctrl-k ctrl-9": "editor::FoldAtLevel_9",
|
||||
"ctrl-k ctrl-0": "editor::FoldAll",
|
||||
"ctrl-k ctrl-j": "editor::UnfoldAll",
|
||||
"ctrl-space": "editor::ShowCompletions",
|
||||
@@ -1264,12 +1257,45 @@
|
||||
"context": "Onboarding",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-1": "onboarding::ActivateBasicsPage",
|
||||
"ctrl-2": "onboarding::ActivateEditingPage",
|
||||
"ctrl-3": "onboarding::ActivateAISetupPage",
|
||||
"ctrl-enter": "onboarding::Finish",
|
||||
"alt-shift-l": "onboarding::SignIn",
|
||||
"shift-alt-a": "onboarding::OpenAccount"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"ctrl-w": "workspace::CloseWindow",
|
||||
"escape": "workspace::CloseWindow",
|
||||
"ctrl-m": "settings_editor::Minimize",
|
||||
"ctrl-f": "search::FocusSearch",
|
||||
"ctrl-shift-e": "settings_editor::ToggleFocusNav",
|
||||
// todo(settings_ui): cut this down based on the max files and overflow UI
|
||||
"ctrl-1": ["settings_editor::FocusFile", 0],
|
||||
"ctrl-2": ["settings_editor::FocusFile", 1],
|
||||
"ctrl-3": ["settings_editor::FocusFile", 2],
|
||||
"ctrl-4": ["settings_editor::FocusFile", 3],
|
||||
"ctrl-5": ["settings_editor::FocusFile", 4],
|
||||
"ctrl-6": ["settings_editor::FocusFile", 5],
|
||||
"ctrl-7": ["settings_editor::FocusFile", 6],
|
||||
"ctrl-8": ["settings_editor::FocusFile", 7],
|
||||
"ctrl-9": ["settings_editor::FocusFile", 8],
|
||||
"ctrl-0": ["settings_editor::FocusFile", 9],
|
||||
"ctrl-pageup": "settings_editor::FocusPreviousFile",
|
||||
"ctrl-pagedown": "settings_editor::FocusNextFile"
|
||||
}
|
||||
},
|
||||
{
|
||||
"context": "SettingsWindow > NavigationMenu",
|
||||
"use_key_equivalents": true,
|
||||
"bindings": {
|
||||
"right": "settings_editor::ExpandNavEntry",
|
||||
"left": "settings_editor::CollapseNavEntry",
|
||||
"pageup": "settings_editor::FocusPreviousRootNavEntry",
|
||||
"pagedown": "settings_editor::FocusNextRootNavEntry",
|
||||
"home": "settings_editor::FocusFirstNavEntry",
|
||||
"end": "settings_editor::FocusLastNavEntry"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -580,18 +580,18 @@
|
||||
// "q": "vim::AnyQuotes",
|
||||
"q": "vim::MiniQuotes",
|
||||
"|": "vim::VerticalBars",
|
||||
"(": "vim::Parentheses",
|
||||
"(": ["vim::Parentheses", { "opening": true }],
|
||||
")": "vim::Parentheses",
|
||||
"b": "vim::Parentheses",
|
||||
// "b": "vim::AnyBrackets",
|
||||
// "b": "vim::MiniBrackets",
|
||||
"[": "vim::SquareBrackets",
|
||||
"[": ["vim::SquareBrackets", { "opening": true }],
|
||||
"]": "vim::SquareBrackets",
|
||||
"r": "vim::SquareBrackets",
|
||||
"{": "vim::CurlyBrackets",
|
||||
"{": ["vim::CurlyBrackets", { "opening": true }],
|
||||
"}": "vim::CurlyBrackets",
|
||||
"shift-b": "vim::CurlyBrackets",
|
||||
"<": "vim::AngleBrackets",
|
||||
"<": ["vim::AngleBrackets", { "opening": true }],
|
||||
">": "vim::AngleBrackets",
|
||||
"a": "vim::Argument",
|
||||
"i": "vim::IndentObj",
|
||||
|
||||
@@ -76,7 +76,7 @@
|
||||
"ui_font_size": 16,
|
||||
// The default font size for agent responses in the agent panel. Falls back to the UI font size if unset.
|
||||
"agent_ui_font_size": null,
|
||||
// The default font size for user messages in the agent panel. Falls back to the buffer font size if unset.
|
||||
// The default font size for user messages in the agent panel.
|
||||
"agent_buffer_font_size": 12,
|
||||
// How much to fade out unused code.
|
||||
"unnecessary_code_fade": 0.3,
|
||||
@@ -1233,8 +1233,8 @@
|
||||
"git_gutter": "tracked_files",
|
||||
/// Sets the debounce threshold (in milliseconds) after which changes are reflected in the git gutter.
|
||||
///
|
||||
/// Default: null
|
||||
"gutter_debounce": null,
|
||||
/// Default: 0
|
||||
"gutter_debounce": 0,
|
||||
// Control whether the git blame information is shown inline,
|
||||
// in the currently focused line.
|
||||
"inline_blame": {
|
||||
@@ -1311,15 +1311,18 @@
|
||||
// "proxy": "",
|
||||
// "proxy_no_verify": false
|
||||
// },
|
||||
// Whether edit predictions are enabled when editing text threads.
|
||||
// This setting has no effect if globally disabled.
|
||||
"enabled_in_text_threads": true,
|
||||
|
||||
"copilot": {
|
||||
"enterprise_uri": null,
|
||||
"proxy": null,
|
||||
"proxy_no_verify": null
|
||||
}
|
||||
},
|
||||
"codestral": {
|
||||
"model": null,
|
||||
"max_tokens": null
|
||||
},
|
||||
// Whether edit predictions are enabled when editing text threads.
|
||||
// This setting has no effect if globally disabled.
|
||||
"enabled_in_text_threads": true
|
||||
},
|
||||
// Settings specific to journaling
|
||||
"journal": {
|
||||
@@ -1401,8 +1404,8 @@
|
||||
// 4. A box drawn around the following character
|
||||
// "hollow"
|
||||
//
|
||||
// Default: not set, defaults to "block"
|
||||
"cursor_shape": null,
|
||||
// Default: "block"
|
||||
"cursor_shape": "block",
|
||||
// Set whether Alternate Scroll mode (code: ?1007) is active by default.
|
||||
// Alternate Scroll mode converts mouse scroll events into up / down key
|
||||
// presses when in the alternate screen (e.g. when running applications
|
||||
@@ -1424,8 +1427,8 @@
|
||||
// Whether or not selecting text in the terminal will automatically
|
||||
// copy to the system clipboard.
|
||||
"copy_on_select": false,
|
||||
// Whether to keep the text selection after copying it to the clipboard
|
||||
"keep_selection_on_copy": false,
|
||||
// Whether to keep the text selection after copying it to the clipboard.
|
||||
"keep_selection_on_copy": true,
|
||||
// Whether to show the terminal button in the status bar
|
||||
"button": true,
|
||||
// Any key-value pairs added to this list will be added to the terminal's
|
||||
|
||||
@@ -1276,62 +1276,6 @@ impl Thread {
|
||||
);
|
||||
}
|
||||
|
||||
pub fn retry_last_completion(
|
||||
&mut self,
|
||||
window: Option<AnyWindowHandle>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
// Clear any existing error state
|
||||
self.retry_state = None;
|
||||
|
||||
// Use the last error context if available, otherwise fall back to configured model
|
||||
let (model, intent) = if let Some((model, intent)) = self.last_error_context.take() {
|
||||
(model, intent)
|
||||
} else if let Some(configured_model) = self.configured_model.as_ref() {
|
||||
let model = configured_model.model.clone();
|
||||
let intent = if self.has_pending_tool_uses() {
|
||||
CompletionIntent::ToolResults
|
||||
} else {
|
||||
CompletionIntent::UserPrompt
|
||||
};
|
||||
(model, intent)
|
||||
} else if let Some(configured_model) = self.get_or_init_configured_model(cx) {
|
||||
let model = configured_model.model.clone();
|
||||
let intent = if self.has_pending_tool_uses() {
|
||||
CompletionIntent::ToolResults
|
||||
} else {
|
||||
CompletionIntent::UserPrompt
|
||||
};
|
||||
(model, intent)
|
||||
} else {
|
||||
return;
|
||||
};
|
||||
|
||||
self.send_to_model(model, intent, window, cx);
|
||||
}
|
||||
|
||||
pub fn enable_burn_mode_and_retry(
|
||||
&mut self,
|
||||
window: Option<AnyWindowHandle>,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.completion_mode = CompletionMode::Burn;
|
||||
cx.emit(ThreadEvent::ProfileChanged);
|
||||
self.retry_last_completion(window, cx);
|
||||
}
|
||||
|
||||
pub fn used_tools_since_last_user_message(&self) -> bool {
|
||||
for message in self.messages.iter().rev() {
|
||||
if self.tool_use.message_has_tool_results(message.id) {
|
||||
return true;
|
||||
} else if message.role == Role::User {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
pub fn to_completion_request(
|
||||
&self,
|
||||
model: Arc<dyn LanguageModel>,
|
||||
|
||||
@@ -25,23 +25,21 @@ use std::any::Any;
|
||||
use std::collections::HashMap;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::rc::Rc;
|
||||
use std::sync::{Arc, LazyLock};
|
||||
use std::sync::Arc;
|
||||
use util::ResultExt;
|
||||
use util::rel_path::RelPath;
|
||||
|
||||
static RULES_FILE_NAMES: LazyLock<[&RelPath; 9]> = LazyLock::new(|| {
|
||||
[
|
||||
RelPath::unix(".rules").unwrap(),
|
||||
RelPath::unix(".cursorrules").unwrap(),
|
||||
RelPath::unix(".windsurfrules").unwrap(),
|
||||
RelPath::unix(".clinerules").unwrap(),
|
||||
RelPath::unix(".github/copilot-instructions.md").unwrap(),
|
||||
RelPath::unix("CLAUDE.md").unwrap(),
|
||||
RelPath::unix("AGENT.md").unwrap(),
|
||||
RelPath::unix("AGENTS.md").unwrap(),
|
||||
RelPath::unix("GEMINI.md").unwrap(),
|
||||
]
|
||||
});
|
||||
const RULES_FILE_NAMES: [&str; 9] = [
|
||||
".rules",
|
||||
".cursorrules",
|
||||
".windsurfrules",
|
||||
".clinerules",
|
||||
".github/copilot-instructions.md",
|
||||
"CLAUDE.md",
|
||||
"AGENT.md",
|
||||
"AGENTS.md",
|
||||
"GEMINI.md",
|
||||
];
|
||||
|
||||
pub struct RulesLoadingError {
|
||||
pub message: SharedString,
|
||||
@@ -477,7 +475,7 @@ impl NativeAgent {
|
||||
.into_iter()
|
||||
.filter_map(|name| {
|
||||
worktree
|
||||
.entry_for_path(name)
|
||||
.entry_for_path(RelPath::unix(name).unwrap())
|
||||
.filter(|entry| entry.is_file())
|
||||
.map(|entry| entry.path.clone())
|
||||
})
|
||||
@@ -558,10 +556,11 @@ impl NativeAgent {
|
||||
self.project_context_needs_refresh.send(()).ok();
|
||||
}
|
||||
project::Event::WorktreeUpdatedEntries(_, items) => {
|
||||
if items
|
||||
.iter()
|
||||
.any(|(path, _, _)| RULES_FILE_NAMES.iter().any(|name| path.as_ref() == *name))
|
||||
{
|
||||
if items.iter().any(|(path, _, _)| {
|
||||
RULES_FILE_NAMES
|
||||
.iter()
|
||||
.any(|name| path.as_ref() == RelPath::unix(name).unwrap())
|
||||
}) {
|
||||
self.project_context_needs_refresh.send(()).ok();
|
||||
}
|
||||
}
|
||||
@@ -1419,7 +1418,6 @@ mod tests {
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
#[cfg_attr(target_os = "windows", ignore)] // TODO: Fix this test on Windows
|
||||
async fn test_save_load_thread(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.executor());
|
||||
@@ -1499,7 +1497,8 @@ mod tests {
|
||||
model.send_last_completion_stream_text_chunk("Lorem.");
|
||||
model.end_last_completion_stream();
|
||||
cx.run_until_parked();
|
||||
summary_model.send_last_completion_stream_text_chunk("Explaining /a/b.md");
|
||||
summary_model
|
||||
.send_last_completion_stream_text_chunk(&format!("Explaining {}", path!("/a/b.md")));
|
||||
summary_model.end_last_completion_stream();
|
||||
|
||||
send.await.unwrap();
|
||||
@@ -1539,7 +1538,7 @@ mod tests {
|
||||
history_entries(&history_store, cx),
|
||||
vec![(
|
||||
HistoryEntryId::AcpThread(session_id.clone()),
|
||||
"Explaining /a/b.md".into()
|
||||
format!("Explaining {}", path!("/a/b.md"))
|
||||
)]
|
||||
);
|
||||
let acp_thread = agent
|
||||
|
||||
@@ -15,10 +15,11 @@ use agent_settings::{
|
||||
use anyhow::{Context as _, Result, anyhow};
|
||||
use assistant_tool::adapt_schema_to_format;
|
||||
use chrono::{DateTime, Utc};
|
||||
use client::{ModelRequestUsage, RequestUsage};
|
||||
use cloud_llm_client::{CompletionIntent, CompletionRequestStatus, UsageLimit};
|
||||
use client::{ModelRequestUsage, RequestUsage, UserStore};
|
||||
use cloud_llm_client::{CompletionIntent, CompletionRequestStatus, Plan, UsageLimit};
|
||||
use collections::{HashMap, HashSet, IndexMap};
|
||||
use fs::Fs;
|
||||
use futures::stream;
|
||||
use futures::{
|
||||
FutureExt,
|
||||
channel::{mpsc, oneshot},
|
||||
@@ -34,7 +35,7 @@ use language_model::{
|
||||
LanguageModelImage, LanguageModelProviderId, LanguageModelRegistry, LanguageModelRequest,
|
||||
LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelToolResult,
|
||||
LanguageModelToolResultContent, LanguageModelToolSchemaFormat, LanguageModelToolUse,
|
||||
LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage,
|
||||
LanguageModelToolUseId, Role, SelectedModel, StopReason, TokenUsage, ZED_CLOUD_PROVIDER_ID,
|
||||
};
|
||||
use project::{
|
||||
Project,
|
||||
@@ -585,6 +586,7 @@ pub struct Thread {
|
||||
pending_title_generation: Option<Task<()>>,
|
||||
summary: Option<SharedString>,
|
||||
messages: Vec<Message>,
|
||||
user_store: Entity<UserStore>,
|
||||
completion_mode: CompletionMode,
|
||||
/// Holds the task that handles agent interaction until the end of the turn.
|
||||
/// Survives across multiple requests as the model performs tool calls and
|
||||
@@ -641,6 +643,7 @@ impl Thread {
|
||||
pending_title_generation: None,
|
||||
summary: None,
|
||||
messages: Vec::new(),
|
||||
user_store: project.read(cx).user_store(),
|
||||
completion_mode: AgentSettings::get_global(cx).preferred_completion_mode,
|
||||
running_turn: None,
|
||||
pending_message: None,
|
||||
@@ -820,6 +823,7 @@ impl Thread {
|
||||
pending_title_generation: None,
|
||||
summary: db_thread.detailed_summary,
|
||||
messages: db_thread.messages,
|
||||
user_store: project.read(cx).user_store(),
|
||||
completion_mode: db_thread.completion_mode.unwrap_or_default(),
|
||||
running_turn: None,
|
||||
pending_message: None,
|
||||
@@ -1249,12 +1253,12 @@ impl Thread {
|
||||
);
|
||||
|
||||
log::debug!("Calling model.stream_completion, attempt {}", attempt);
|
||||
let mut events = model
|
||||
.stream_completion(request, cx)
|
||||
.await
|
||||
.map_err(|error| anyhow!(error))?;
|
||||
|
||||
let (mut events, mut error) = match model.stream_completion(request, cx).await {
|
||||
Ok(events) => (events, None),
|
||||
Err(err) => (stream::empty().boxed(), Some(err)),
|
||||
};
|
||||
let mut tool_results = FuturesUnordered::new();
|
||||
let mut error = None;
|
||||
while let Some(event) = events.next().await {
|
||||
log::trace!("Received completion event: {:?}", event);
|
||||
match event {
|
||||
@@ -1302,8 +1306,10 @@ impl Thread {
|
||||
|
||||
if let Some(error) = error {
|
||||
attempt += 1;
|
||||
let retry =
|
||||
this.update(cx, |this, _| this.handle_completion_error(error, attempt))??;
|
||||
let retry = this.update(cx, |this, cx| {
|
||||
let user_store = this.user_store.read(cx);
|
||||
this.handle_completion_error(error, attempt, user_store.plan())
|
||||
})??;
|
||||
let timer = cx.background_executor().timer(retry.duration);
|
||||
event_stream.send_retry(retry);
|
||||
timer.await;
|
||||
@@ -1330,8 +1336,23 @@ impl Thread {
|
||||
&mut self,
|
||||
error: LanguageModelCompletionError,
|
||||
attempt: u8,
|
||||
plan: Option<Plan>,
|
||||
) -> Result<acp_thread::RetryStatus> {
|
||||
if self.completion_mode == CompletionMode::Normal {
|
||||
let Some(model) = self.model.as_ref() else {
|
||||
return Err(anyhow!(error));
|
||||
};
|
||||
|
||||
let auto_retry = if model.provider_id() == ZED_CLOUD_PROVIDER_ID {
|
||||
match plan {
|
||||
Some(Plan::V2(_)) => true,
|
||||
Some(Plan::V1(_)) => self.completion_mode == CompletionMode::Burn,
|
||||
None => false,
|
||||
}
|
||||
} else {
|
||||
true
|
||||
};
|
||||
|
||||
if !auto_retry {
|
||||
return Err(anyhow!(error));
|
||||
}
|
||||
|
||||
|
||||
@@ -1030,6 +1030,7 @@ impl MessageEditor {
|
||||
) else {
|
||||
return;
|
||||
};
|
||||
|
||||
self.editor.update(cx, |message_editor, cx| {
|
||||
message_editor.edit([(cursor_anchor..cursor_anchor, completion.new_text)], cx);
|
||||
});
|
||||
|
||||
@@ -1046,32 +1046,33 @@ impl AcpThreadView {
|
||||
};
|
||||
|
||||
let connection = thread.read(cx).connection().clone();
|
||||
let auth_methods = connection.auth_methods();
|
||||
let has_supported_auth = auth_methods.iter().any(|method| {
|
||||
let id = method.id.0.as_ref();
|
||||
id == "claude-login" || id == "spawn-gemini-cli"
|
||||
});
|
||||
let can_login = has_supported_auth || auth_methods.is_empty() || self.login.is_some();
|
||||
if !can_login {
|
||||
let can_login = !connection.auth_methods().is_empty() || self.login.is_some();
|
||||
// Does the agent have a specific logout command? Prefer that in case they need to reset internal state.
|
||||
let logout_supported = text == "/logout"
|
||||
&& self
|
||||
.available_commands
|
||||
.borrow()
|
||||
.iter()
|
||||
.any(|command| command.name == "logout");
|
||||
if can_login && !logout_supported {
|
||||
let this = cx.weak_entity();
|
||||
let agent = self.agent.clone();
|
||||
window.defer(cx, |window, cx| {
|
||||
Self::handle_auth_required(
|
||||
this,
|
||||
AuthRequired {
|
||||
description: None,
|
||||
provider_id: None,
|
||||
},
|
||||
agent,
|
||||
connection,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
cx.notify();
|
||||
return;
|
||||
};
|
||||
let this = cx.weak_entity();
|
||||
let agent = self.agent.clone();
|
||||
window.defer(cx, |window, cx| {
|
||||
Self::handle_auth_required(
|
||||
this,
|
||||
AuthRequired {
|
||||
description: None,
|
||||
provider_id: None,
|
||||
},
|
||||
agent,
|
||||
connection,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
cx.notify();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
self.send_impl(self.message_editor.clone(), window, cx)
|
||||
@@ -2727,7 +2728,7 @@ impl AcpThreadView {
|
||||
let output_line_count = output.map(|output| output.content_line_count).unwrap_or(0);
|
||||
|
||||
let command_failed = command_finished
|
||||
&& output.is_some_and(|o| o.exit_status.is_none_or(|status| !status.success()));
|
||||
&& output.is_some_and(|o| o.exit_status.is_some_and(|status| !status.success()));
|
||||
|
||||
let time_elapsed = if let Some(output) = output {
|
||||
output.ended_at.duration_since(started_at)
|
||||
@@ -4971,10 +4972,12 @@ impl AcpThreadView {
|
||||
})
|
||||
}
|
||||
|
||||
/// Inserts the selected text into the message editor or the message being
|
||||
/// edited, if any.
|
||||
pub(crate) fn insert_selections(&self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.message_editor.update(cx, |message_editor, cx| {
|
||||
message_editor.insert_selections(window, cx);
|
||||
})
|
||||
self.active_editor(cx).update(cx, |editor, cx| {
|
||||
editor.insert_selections(window, cx);
|
||||
});
|
||||
}
|
||||
|
||||
fn render_thread_retry_status_callout(
|
||||
@@ -5385,6 +5388,23 @@ impl AcpThreadView {
|
||||
};
|
||||
task.detach_and_log_err(cx);
|
||||
}
|
||||
|
||||
/// Returns the currently active editor, either for a message that is being
|
||||
/// edited or the editor for a new message.
|
||||
fn active_editor(&self, cx: &App) -> Entity<MessageEditor> {
|
||||
if let Some(index) = self.editing_message
|
||||
&& let Some(editor) = self
|
||||
.entry_view_state
|
||||
.read(cx)
|
||||
.entry(index)
|
||||
.and_then(|e| e.message_editor())
|
||||
.cloned()
|
||||
{
|
||||
editor
|
||||
} else {
|
||||
self.message_editor.clone()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn loading_contents_spinner(size: IconSize) -> AnyElement {
|
||||
@@ -5399,7 +5419,7 @@ impl Focusable for AcpThreadView {
|
||||
fn focus_handle(&self, cx: &App) -> FocusHandle {
|
||||
match self.thread_state {
|
||||
ThreadState::Loading { .. } | ThreadState::Ready { .. } => {
|
||||
self.message_editor.focus_handle(cx)
|
||||
self.active_editor(cx).focus_handle(cx)
|
||||
}
|
||||
ThreadState::LoadError(_) | ThreadState::Unauthenticated { .. } => {
|
||||
self.focus_handle.clone()
|
||||
@@ -6660,4 +6680,146 @@ pub(crate) mod tests {
|
||||
)
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_message_editing_insert_selections(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let connection = StubAgentConnection::new();
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Response".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
}]);
|
||||
|
||||
let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await;
|
||||
add_to_workspace(thread_view.clone(), cx);
|
||||
|
||||
let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone());
|
||||
message_editor.update_in(cx, |editor, window, cx| {
|
||||
editor.set_text("Original message to edit", window, cx)
|
||||
});
|
||||
thread_view.update_in(cx, |thread_view, window, cx| thread_view.send(window, cx));
|
||||
cx.run_until_parked();
|
||||
|
||||
let user_message_editor = thread_view.read_with(cx, |thread_view, cx| {
|
||||
thread_view
|
||||
.entry_view_state
|
||||
.read(cx)
|
||||
.entry(0)
|
||||
.expect("Should have at least one entry")
|
||||
.message_editor()
|
||||
.expect("Should have message editor")
|
||||
.clone()
|
||||
});
|
||||
|
||||
cx.focus(&user_message_editor);
|
||||
thread_view.read_with(cx, |thread_view, _cx| {
|
||||
assert_eq!(thread_view.editing_message, Some(0));
|
||||
});
|
||||
|
||||
// Ensure to edit the focused message before proceeding otherwise, since
|
||||
// its content is not different from what was sent, focus will be lost.
|
||||
user_message_editor.update_in(cx, |editor, window, cx| {
|
||||
editor.set_text("Original message to edit with ", window, cx)
|
||||
});
|
||||
|
||||
// Create a simple buffer with some text so we can create a selection
|
||||
// that will then be added to the message being edited.
|
||||
let (workspace, project) = thread_view.read_with(cx, |thread_view, _cx| {
|
||||
(thread_view.workspace.clone(), thread_view.project.clone())
|
||||
});
|
||||
let buffer = project.update(cx, |project, cx| {
|
||||
project.create_local_buffer("let a = 10 + 10;", None, false, cx)
|
||||
});
|
||||
|
||||
workspace
|
||||
.update_in(cx, |workspace, window, cx| {
|
||||
let editor = cx.new(|cx| {
|
||||
let mut editor =
|
||||
Editor::for_buffer(buffer.clone(), Some(project.clone()), window, cx);
|
||||
|
||||
editor.change_selections(Default::default(), window, cx, |selections| {
|
||||
selections.select_ranges([8..15]);
|
||||
});
|
||||
|
||||
editor
|
||||
});
|
||||
workspace.add_item_to_active_pane(Box::new(editor), None, false, window, cx);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
thread_view.update_in(cx, |thread_view, window, cx| {
|
||||
assert_eq!(thread_view.editing_message, Some(0));
|
||||
thread_view.insert_selections(window, cx);
|
||||
});
|
||||
|
||||
user_message_editor.read_with(cx, |editor, cx| {
|
||||
let text = editor.editor().read(cx).text(cx);
|
||||
let expected_text = String::from("Original message to edit with selection ");
|
||||
|
||||
assert_eq!(text, expected_text);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_insert_selections(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let connection = StubAgentConnection::new();
|
||||
connection.set_next_prompt_updates(vec![acp::SessionUpdate::AgentMessageChunk {
|
||||
content: acp::ContentBlock::Text(acp::TextContent {
|
||||
text: "Response".into(),
|
||||
annotations: None,
|
||||
meta: None,
|
||||
}),
|
||||
}]);
|
||||
|
||||
let (thread_view, cx) = setup_thread_view(StubAgentServer::new(connection), cx).await;
|
||||
add_to_workspace(thread_view.clone(), cx);
|
||||
|
||||
let message_editor = cx.read(|cx| thread_view.read(cx).message_editor.clone());
|
||||
message_editor.update_in(cx, |editor, window, cx| {
|
||||
editor.set_text("Can you review this snippet ", window, cx)
|
||||
});
|
||||
|
||||
// Create a simple buffer with some text so we can create a selection
|
||||
// that will then be added to the message being edited.
|
||||
let (workspace, project) = thread_view.read_with(cx, |thread_view, _cx| {
|
||||
(thread_view.workspace.clone(), thread_view.project.clone())
|
||||
});
|
||||
let buffer = project.update(cx, |project, cx| {
|
||||
project.create_local_buffer("let a = 10 + 10;", None, false, cx)
|
||||
});
|
||||
|
||||
workspace
|
||||
.update_in(cx, |workspace, window, cx| {
|
||||
let editor = cx.new(|cx| {
|
||||
let mut editor =
|
||||
Editor::for_buffer(buffer.clone(), Some(project.clone()), window, cx);
|
||||
|
||||
editor.change_selections(Default::default(), window, cx, |selections| {
|
||||
selections.select_ranges([8..15]);
|
||||
});
|
||||
|
||||
editor
|
||||
});
|
||||
workspace.add_item_to_active_pane(Box::new(editor), None, false, window, cx);
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
thread_view.update_in(cx, |thread_view, window, cx| {
|
||||
assert_eq!(thread_view.editing_message, None);
|
||||
thread_view.insert_selections(window, cx);
|
||||
});
|
||||
|
||||
thread_view.read_with(cx, |thread_view, cx| {
|
||||
let text = thread_view.message_editor.read(cx).text(cx);
|
||||
let expected_txt = String::from("Can you review this snippet selection ");
|
||||
|
||||
assert_eq!(text, expected_txt);
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -15,7 +15,6 @@ use context_server::ContextServerId;
|
||||
use editor::{Editor, SelectionEffects, scroll::Autoscroll};
|
||||
use extension::ExtensionManifest;
|
||||
use extension_host::ExtensionStore;
|
||||
use feature_flags::{CodexAcpFeatureFlag, FeatureFlagAppExt as _};
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
Action, AnyView, App, AsyncWindowContext, Corner, Entity, EventEmitter, FocusHandle, Focusable,
|
||||
@@ -409,7 +408,7 @@ impl AgentConfiguration {
|
||||
|
||||
SwitchField::new(
|
||||
"always-allow-tool-actions-switch",
|
||||
"Allow running commands without asking for confirmation",
|
||||
Some("Allow running commands without asking for confirmation"),
|
||||
Some(
|
||||
"The agent can perform potentially destructive actions without asking for your confirmation.".into(),
|
||||
),
|
||||
@@ -429,7 +428,7 @@ impl AgentConfiguration {
|
||||
|
||||
SwitchField::new(
|
||||
"single-file-review",
|
||||
"Enable single-file agent reviews",
|
||||
Some("Enable single-file agent reviews"),
|
||||
Some("Agent edits are also displayed in single-file editors for review.".into()),
|
||||
single_file_review,
|
||||
move |state, _window, cx| {
|
||||
@@ -450,7 +449,7 @@ impl AgentConfiguration {
|
||||
|
||||
SwitchField::new(
|
||||
"sound-notification",
|
||||
"Play sound when finished generating",
|
||||
Some("Play sound when finished generating"),
|
||||
Some(
|
||||
"Hear a notification sound when the agent is done generating changes or needs your input.".into(),
|
||||
),
|
||||
@@ -470,7 +469,7 @@ impl AgentConfiguration {
|
||||
|
||||
SwitchField::new(
|
||||
"modifier-send",
|
||||
"Use modifier to submit a message",
|
||||
Some("Use modifier to submit a message"),
|
||||
Some(
|
||||
"Make a modifier (cmd-enter on macOS, ctrl-enter on Linux or Windows) required to send messages.".into(),
|
||||
),
|
||||
@@ -1085,14 +1084,11 @@ impl AgentConfiguration {
|
||||
"Claude Code",
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.when(cx.has_flag::<CodexAcpFeatureFlag>(), |this| {
|
||||
this
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiOpenAi,
|
||||
"Codex",
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
})
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiOpenAi,
|
||||
"Codex",
|
||||
))
|
||||
.child(Divider::horizontal().color(DividerColor::BorderFaded))
|
||||
.child(self.render_agent_server(
|
||||
IconName::AiGemini,
|
||||
"Gemini CLI",
|
||||
|
||||
@@ -619,10 +619,10 @@ mod tests {
|
||||
cx.update(|_window, cx| {
|
||||
LanguageModelRegistry::global(cx).update(cx, |registry, cx| {
|
||||
registry.register_provider(
|
||||
FakeLanguageModelProvider::new(
|
||||
Arc::new(FakeLanguageModelProvider::new(
|
||||
LanguageModelProviderId::new("someprovider"),
|
||||
LanguageModelProviderName::new("Some Provider"),
|
||||
),
|
||||
)),
|
||||
cx,
|
||||
);
|
||||
});
|
||||
|
||||
@@ -48,12 +48,12 @@ use editor::{Anchor, AnchorRangeExt as _, Editor, EditorEvent, MultiBuffer};
|
||||
use fs::Fs;
|
||||
use gpui::{
|
||||
Action, AnyElement, App, AsyncWindowContext, Corner, DismissEvent, Entity, EventEmitter,
|
||||
ExternalPaths, FocusHandle, Focusable, KeyContext, Pixels, ReadGlobal as _, Subscription, Task,
|
||||
UpdateGlobal, WeakEntity, prelude::*,
|
||||
ExternalPaths, FocusHandle, Focusable, KeyContext, Pixels, Subscription, Task, UpdateGlobal,
|
||||
WeakEntity, prelude::*,
|
||||
};
|
||||
use language::LanguageRegistry;
|
||||
use language_model::{ConfigurationError, LanguageModelRegistry};
|
||||
use project::{DisableAiSettings, Project, ProjectPath, Worktree};
|
||||
use project::{Project, ProjectPath, Worktree};
|
||||
use prompt_store::{PromptBuilder, PromptStore, UserPromptId};
|
||||
use rules_library::{RulesLibrary, open_rules_library};
|
||||
use search::{BufferSearchBar, buffer_search};
|
||||
@@ -75,7 +75,6 @@ use zed_actions::{
|
||||
assistant::{OpenRulesLibrary, ToggleFocus},
|
||||
};
|
||||
|
||||
use feature_flags::{CodexAcpFeatureFlag, FeatureFlagAppExt as _};
|
||||
const AGENT_PANEL_KEY: &str = "agent_panel";
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
@@ -520,13 +519,6 @@ impl AgentPanel {
|
||||
)
|
||||
});
|
||||
|
||||
if SettingsStore::global(cx)
|
||||
.get::<DisableAiSettings>(None)
|
||||
.disable_ai
|
||||
{
|
||||
return panel;
|
||||
}
|
||||
|
||||
panel.as_mut(cx).loading = true;
|
||||
if let Some(serialized_panel) = serialized_panel {
|
||||
panel.update(cx, |panel, cx| {
|
||||
@@ -678,43 +670,6 @@ impl AgentPanel {
|
||||
)
|
||||
});
|
||||
|
||||
let mut old_disable_ai = false;
|
||||
cx.observe_global_in::<SettingsStore>(window, move |panel, window, cx| {
|
||||
let disable_ai = DisableAiSettings::get_global(cx).disable_ai;
|
||||
if old_disable_ai != disable_ai {
|
||||
let agent_panel_id = cx.entity_id();
|
||||
let agent_panel_visible = panel
|
||||
.workspace
|
||||
.update(cx, |workspace, cx| {
|
||||
let agent_dock_position = panel.position(window, cx);
|
||||
let agent_dock = workspace.dock_at_position(agent_dock_position);
|
||||
let agent_panel_focused = agent_dock
|
||||
.read(cx)
|
||||
.active_panel()
|
||||
.is_some_and(|panel| panel.panel_id() == agent_panel_id);
|
||||
|
||||
let active_panel_visible = agent_dock
|
||||
.read(cx)
|
||||
.visible_panel()
|
||||
.is_some_and(|panel| panel.panel_id() == agent_panel_id);
|
||||
|
||||
if agent_panel_focused {
|
||||
cx.dispatch_action(&ToggleFocus);
|
||||
}
|
||||
|
||||
active_panel_visible
|
||||
})
|
||||
.unwrap_or_default();
|
||||
|
||||
if agent_panel_visible {
|
||||
cx.emit(PanelEvent::Close);
|
||||
}
|
||||
|
||||
old_disable_ai = disable_ai;
|
||||
}
|
||||
})
|
||||
.detach();
|
||||
|
||||
Self {
|
||||
active_view,
|
||||
workspace,
|
||||
@@ -1983,34 +1938,32 @@ impl AgentPanel {
|
||||
}
|
||||
}),
|
||||
)
|
||||
.when(cx.has_flag::<CodexAcpFeatureFlag>(), |this| {
|
||||
this.item(
|
||||
ContextMenuEntry::new("New Codex Thread")
|
||||
.icon(IconName::AiOpenAi)
|
||||
.disabled(is_via_collab)
|
||||
.icon_color(Color::Muted)
|
||||
.handler({
|
||||
let workspace = workspace.clone();
|
||||
move |window, cx| {
|
||||
if let Some(workspace) = workspace.upgrade() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
if let Some(panel) =
|
||||
workspace.panel::<AgentPanel>(cx)
|
||||
{
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.new_agent_thread(
|
||||
AgentType::Codex,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
.item(
|
||||
ContextMenuEntry::new("New Codex Thread")
|
||||
.icon(IconName::AiOpenAi)
|
||||
.disabled(is_via_collab)
|
||||
.icon_color(Color::Muted)
|
||||
.handler({
|
||||
let workspace = workspace.clone();
|
||||
move |window, cx| {
|
||||
if let Some(workspace) = workspace.upgrade() {
|
||||
workspace.update(cx, |workspace, cx| {
|
||||
if let Some(panel) =
|
||||
workspace.panel::<AgentPanel>(cx)
|
||||
{
|
||||
panel.update(cx, |panel, cx| {
|
||||
panel.new_agent_thread(
|
||||
AgentType::Codex,
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
}),
|
||||
)
|
||||
})
|
||||
}
|
||||
}),
|
||||
)
|
||||
.item(
|
||||
ContextMenuEntry::new("New Gemini CLI Thread")
|
||||
.icon(IconName::AiGemini)
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
use anyhow::{Context as _, Result};
|
||||
use collections::HashMap;
|
||||
use gpui::{App, BackgroundExecutor, BorrowAppContext, Global};
|
||||
use log::info;
|
||||
|
||||
#[cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))]
|
||||
mod non_windows_and_freebsd_deps {
|
||||
pub(super) use gpui::AsyncApp;
|
||||
pub(super) use libwebrtc::native::apm;
|
||||
pub(super) use log::info;
|
||||
pub(super) use parking_lot::Mutex;
|
||||
pub(super) use rodio::cpal::Sample;
|
||||
pub(super) use rodio::source::LimitSettings;
|
||||
|
||||
@@ -658,7 +658,7 @@ impl AutoUpdater {
|
||||
let filename = match OS {
|
||||
"macos" => anyhow::Ok("Zed.dmg"),
|
||||
"linux" => Ok("zed.tar.gz"),
|
||||
"windows" => Ok("zed_editor_installer.exe"),
|
||||
"windows" => Ok("Zed.exe"),
|
||||
unsupported_os => anyhow::bail!("not supported: {unsupported_os}"),
|
||||
}?;
|
||||
|
||||
|
||||
@@ -127,7 +127,6 @@ pub struct DeclarationScoreComponents {
|
||||
pub declaration_count: usize,
|
||||
pub reference_line_distance: u32,
|
||||
pub declaration_line_distance: u32,
|
||||
pub declaration_line_distance_rank: usize,
|
||||
pub excerpt_vs_item_jaccard: f32,
|
||||
pub excerpt_vs_signature_jaccard: f32,
|
||||
pub adjacent_vs_item_jaccard: f32,
|
||||
@@ -136,6 +135,13 @@ pub struct DeclarationScoreComponents {
|
||||
pub excerpt_vs_signature_weighted_overlap: f32,
|
||||
pub adjacent_vs_item_weighted_overlap: f32,
|
||||
pub adjacent_vs_signature_weighted_overlap: f32,
|
||||
pub path_import_match_count: usize,
|
||||
pub wildcard_path_import_match_count: usize,
|
||||
pub import_similarity: f32,
|
||||
pub max_import_similarity: f32,
|
||||
pub normalized_import_similarity: f32,
|
||||
pub wildcard_import_similarity: f32,
|
||||
pub normalized_wildcard_import_similarity: f32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
|
||||
28
crates/codestral/Cargo.toml
Normal file
28
crates/codestral/Cargo.toml
Normal file
@@ -0,0 +1,28 @@
|
||||
[package]
|
||||
name = "codestral"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
publish = false
|
||||
license = "GPL-3.0-or-later"
|
||||
|
||||
[lib]
|
||||
path = "src/codestral.rs"
|
||||
|
||||
[dependencies]
|
||||
anyhow.workspace = true
|
||||
edit_prediction.workspace = true
|
||||
edit_prediction_context.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
http_client.workspace = true
|
||||
language.workspace = true
|
||||
language_models.workspace = true
|
||||
log.workspace = true
|
||||
mistral.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
smol.workspace = true
|
||||
text.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
1
crates/codestral/LICENSE-GPL
Symbolic link
1
crates/codestral/LICENSE-GPL
Symbolic link
@@ -0,0 +1 @@
|
||||
../../LICENSE-GPL
|
||||
381
crates/codestral/src/codestral.rs
Normal file
381
crates/codestral/src/codestral.rs
Normal file
@@ -0,0 +1,381 @@
|
||||
use anyhow::{Context as _, Result};
|
||||
use edit_prediction::{Direction, EditPrediction, EditPredictionProvider};
|
||||
use edit_prediction_context::{EditPredictionExcerpt, EditPredictionExcerptOptions};
|
||||
use futures::AsyncReadExt;
|
||||
use gpui::{App, Context, Entity, Task};
|
||||
use http_client::HttpClient;
|
||||
use language::{
|
||||
language_settings::all_language_settings, Anchor, Buffer, BufferSnapshot, EditPreview, ToPoint,
|
||||
};
|
||||
use language_models::MistralLanguageModelProvider;
|
||||
use mistral::CODESTRAL_API_URL;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
ops::Range,
|
||||
sync::Arc,
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
use text::ToOffset;
|
||||
|
||||
pub const DEBOUNCE_TIMEOUT: Duration = Duration::from_millis(150);
|
||||
|
||||
const EXCERPT_OPTIONS: EditPredictionExcerptOptions = EditPredictionExcerptOptions {
|
||||
max_bytes: 1050,
|
||||
min_bytes: 525,
|
||||
target_before_cursor_over_total_bytes: 0.66,
|
||||
};
|
||||
|
||||
/// Represents a completion that has been received and processed from Codestral.
|
||||
/// This struct maintains the state needed to interpolate the completion as the user types.
|
||||
#[derive(Clone)]
|
||||
struct CurrentCompletion {
|
||||
/// The buffer snapshot at the time the completion was generated.
|
||||
/// Used to detect changes and interpolate edits.
|
||||
snapshot: BufferSnapshot,
|
||||
/// The edits that should be applied to transform the original text into the predicted text.
|
||||
/// Each edit is a range in the buffer and the text to replace it with.
|
||||
edits: Arc<[(Range<Anchor>, String)]>,
|
||||
/// Preview of how the buffer will look after applying the edits.
|
||||
edit_preview: EditPreview,
|
||||
}
|
||||
|
||||
impl CurrentCompletion {
|
||||
/// Attempts to adjust the edits based on changes made to the buffer since the completion was generated.
|
||||
/// Returns None if the user's edits conflict with the predicted edits.
|
||||
fn interpolate(&self, new_snapshot: &BufferSnapshot) -> Option<Vec<(Range<Anchor>, String)>> {
|
||||
edit_prediction::interpolate_edits(&self.snapshot, new_snapshot, &self.edits)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CodestralCompletionProvider {
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
pending_request: Option<Task<Result<()>>>,
|
||||
current_completion: Option<CurrentCompletion>,
|
||||
}
|
||||
|
||||
impl CodestralCompletionProvider {
|
||||
pub fn new(http_client: Arc<dyn HttpClient>) -> Self {
|
||||
Self {
|
||||
http_client,
|
||||
pending_request: None,
|
||||
current_completion: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_api_key(cx: &App) -> bool {
|
||||
Self::api_key(cx).is_some()
|
||||
}
|
||||
|
||||
fn api_key(cx: &App) -> Option<Arc<str>> {
|
||||
MistralLanguageModelProvider::try_global(cx)
|
||||
.and_then(|provider| provider.codestral_api_key(CODESTRAL_API_URL, cx))
|
||||
}
|
||||
|
||||
/// Uses Codestral's Fill-in-the-Middle API for code completion.
|
||||
async fn fetch_completion(
|
||||
http_client: Arc<dyn HttpClient>,
|
||||
api_key: &str,
|
||||
prompt: String,
|
||||
suffix: String,
|
||||
model: String,
|
||||
max_tokens: Option<u32>,
|
||||
) -> Result<String> {
|
||||
let start_time = Instant::now();
|
||||
|
||||
log::debug!(
|
||||
"Codestral: Requesting completion (model: {}, max_tokens: {:?})",
|
||||
model,
|
||||
max_tokens
|
||||
);
|
||||
|
||||
let request = CodestralRequest {
|
||||
model,
|
||||
prompt,
|
||||
suffix: if suffix.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(suffix)
|
||||
},
|
||||
max_tokens: max_tokens.or(Some(350)),
|
||||
temperature: Some(0.2),
|
||||
top_p: Some(1.0),
|
||||
stream: Some(false),
|
||||
stop: None,
|
||||
random_seed: None,
|
||||
min_tokens: None,
|
||||
};
|
||||
|
||||
let request_body = serde_json::to_string(&request)?;
|
||||
|
||||
log::debug!("Codestral: Sending FIM request");
|
||||
|
||||
let http_request = http_client::Request::builder()
|
||||
.method(http_client::Method::POST)
|
||||
.uri(format!("{}/v1/fim/completions", CODESTRAL_API_URL))
|
||||
.header("Content-Type", "application/json")
|
||||
.header("Authorization", format!("Bearer {}", api_key))
|
||||
.body(http_client::AsyncBody::from(request_body))?;
|
||||
|
||||
let mut response = http_client.send(http_request).await?;
|
||||
let status = response.status();
|
||||
|
||||
log::debug!("Codestral: Response status: {}", status);
|
||||
|
||||
if !status.is_success() {
|
||||
let mut body = String::new();
|
||||
response.body_mut().read_to_string(&mut body).await?;
|
||||
return Err(anyhow::anyhow!(
|
||||
"Codestral API error: {} - {}",
|
||||
status,
|
||||
body
|
||||
));
|
||||
}
|
||||
|
||||
let mut body = String::new();
|
||||
response.body_mut().read_to_string(&mut body).await?;
|
||||
|
||||
let codestral_response: CodestralResponse = serde_json::from_str(&body)?;
|
||||
|
||||
let elapsed = start_time.elapsed();
|
||||
|
||||
if let Some(choice) = codestral_response.choices.first() {
|
||||
let completion = &choice.message.content;
|
||||
|
||||
log::debug!(
|
||||
"Codestral: Completion received ({} tokens, {:.2}s)",
|
||||
codestral_response.usage.completion_tokens,
|
||||
elapsed.as_secs_f64()
|
||||
);
|
||||
|
||||
// Return just the completion text for insertion at cursor
|
||||
Ok(completion.clone())
|
||||
} else {
|
||||
log::error!("Codestral: No completion returned in response");
|
||||
Err(anyhow::anyhow!("No completion returned from Codestral"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EditPredictionProvider for CodestralCompletionProvider {
|
||||
fn name() -> &'static str {
|
||||
"codestral"
|
||||
}
|
||||
|
||||
fn display_name() -> &'static str {
|
||||
"Codestral"
|
||||
}
|
||||
|
||||
fn show_completions_in_menu() -> bool {
|
||||
true
|
||||
}
|
||||
|
||||
fn is_enabled(&self, _buffer: &Entity<Buffer>, _cursor_position: Anchor, cx: &App) -> bool {
|
||||
Self::api_key(cx).is_some()
|
||||
}
|
||||
|
||||
fn is_refreshing(&self) -> bool {
|
||||
self.pending_request.is_some()
|
||||
}
|
||||
|
||||
fn refresh(
|
||||
&mut self,
|
||||
buffer: Entity<Buffer>,
|
||||
cursor_position: language::Anchor,
|
||||
debounce: bool,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
log::debug!("Codestral: Refresh called (debounce: {})", debounce);
|
||||
|
||||
let Some(api_key) = Self::api_key(cx) else {
|
||||
log::warn!("Codestral: No API key configured, skipping refresh");
|
||||
return;
|
||||
};
|
||||
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
|
||||
// Check if current completion is still valid
|
||||
if let Some(current_completion) = self.current_completion.as_ref() {
|
||||
if current_completion.interpolate(&snapshot).is_some() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
let http_client = self.http_client.clone();
|
||||
|
||||
// Get settings
|
||||
let settings = all_language_settings(None, cx);
|
||||
let model = settings
|
||||
.edit_predictions
|
||||
.codestral
|
||||
.model
|
||||
.clone()
|
||||
.unwrap_or_else(|| "codestral-latest".to_string());
|
||||
let max_tokens = settings.edit_predictions.codestral.max_tokens;
|
||||
|
||||
self.pending_request = Some(cx.spawn(async move |this, cx| {
|
||||
if debounce {
|
||||
log::debug!("Codestral: Debouncing for {:?}", DEBOUNCE_TIMEOUT);
|
||||
smol::Timer::after(DEBOUNCE_TIMEOUT).await;
|
||||
}
|
||||
|
||||
let cursor_offset = cursor_position.to_offset(&snapshot);
|
||||
let cursor_point = cursor_offset.to_point(&snapshot);
|
||||
let excerpt = EditPredictionExcerpt::select_from_buffer(
|
||||
cursor_point,
|
||||
&snapshot,
|
||||
&EXCERPT_OPTIONS,
|
||||
None,
|
||||
)
|
||||
.context("Line containing cursor doesn't fit in excerpt max bytes")?;
|
||||
|
||||
let excerpt_text = excerpt.text(&snapshot);
|
||||
let cursor_within_excerpt = cursor_offset
|
||||
.saturating_sub(excerpt.range.start)
|
||||
.min(excerpt_text.body.len());
|
||||
let prompt = excerpt_text.body[..cursor_within_excerpt].to_string();
|
||||
let suffix = excerpt_text.body[cursor_within_excerpt..].to_string();
|
||||
|
||||
let completion_text = match Self::fetch_completion(
|
||||
http_client,
|
||||
&api_key,
|
||||
prompt,
|
||||
suffix,
|
||||
model,
|
||||
max_tokens,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(completion) => completion,
|
||||
Err(e) => {
|
||||
log::error!("Codestral: Failed to fetch completion: {}", e);
|
||||
this.update(cx, |this, cx| {
|
||||
this.pending_request = None;
|
||||
cx.notify();
|
||||
})?;
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
|
||||
if completion_text.trim().is_empty() {
|
||||
log::debug!("Codestral: Completion was empty after trimming; ignoring");
|
||||
this.update(cx, |this, cx| {
|
||||
this.pending_request = None;
|
||||
cx.notify();
|
||||
})?;
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let edits: Arc<[(Range<Anchor>, String)]> =
|
||||
vec![(cursor_position..cursor_position, completion_text)].into();
|
||||
let edit_preview = buffer
|
||||
.read_with(cx, |buffer, cx| buffer.preview_edits(edits.clone(), cx))?
|
||||
.await;
|
||||
|
||||
this.update(cx, |this, cx| {
|
||||
this.current_completion = Some(CurrentCompletion {
|
||||
snapshot,
|
||||
edits,
|
||||
edit_preview,
|
||||
});
|
||||
this.pending_request = None;
|
||||
cx.notify();
|
||||
})?;
|
||||
|
||||
Ok(())
|
||||
}));
|
||||
}
|
||||
|
||||
fn cycle(
|
||||
&mut self,
|
||||
_buffer: Entity<Buffer>,
|
||||
_cursor_position: Anchor,
|
||||
_direction: Direction,
|
||||
_cx: &mut Context<Self>,
|
||||
) {
|
||||
// Codestral doesn't support multiple completions, so cycling does nothing
|
||||
}
|
||||
|
||||
fn accept(&mut self, _cx: &mut Context<Self>) {
|
||||
log::debug!("Codestral: Completion accepted");
|
||||
self.pending_request = None;
|
||||
self.current_completion = None;
|
||||
}
|
||||
|
||||
fn discard(&mut self, _cx: &mut Context<Self>) {
|
||||
log::debug!("Codestral: Completion discarded");
|
||||
self.pending_request = None;
|
||||
self.current_completion = None;
|
||||
}
|
||||
|
||||
/// Returns the completion suggestion, adjusted or invalidated based on user edits
|
||||
fn suggest(
|
||||
&mut self,
|
||||
buffer: &Entity<Buffer>,
|
||||
_cursor_position: Anchor,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Option<EditPrediction> {
|
||||
let current_completion = self.current_completion.as_ref()?;
|
||||
let buffer = buffer.read(cx);
|
||||
let edits = current_completion.interpolate(&buffer.snapshot())?;
|
||||
if edits.is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some(EditPrediction::Local {
|
||||
id: None,
|
||||
edits,
|
||||
edit_preview: Some(current_completion.edit_preview.clone()),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct CodestralRequest {
|
||||
pub model: String,
|
||||
pub prompt: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub suffix: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub max_tokens: Option<u32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub temperature: Option<f32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub top_p: Option<f32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub stream: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub stop: Option<Vec<String>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub random_seed: Option<u32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub min_tokens: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct CodestralResponse {
|
||||
pub id: String,
|
||||
pub object: String,
|
||||
pub model: String,
|
||||
pub usage: Usage,
|
||||
pub created: u64,
|
||||
pub choices: Vec<Choice>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Usage {
|
||||
pub prompt_tokens: u32,
|
||||
pub completion_tokens: u32,
|
||||
pub total_tokens: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Choice {
|
||||
pub index: u32,
|
||||
pub message: Message,
|
||||
pub finish_reason: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Message {
|
||||
pub content: String,
|
||||
pub role: String,
|
||||
}
|
||||
@@ -1272,7 +1272,7 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes
|
||||
fake_language_server.start_progress("the-token").await;
|
||||
|
||||
executor.advance_clock(SERVER_PROGRESS_THROTTLE_TIMEOUT);
|
||||
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
token: lsp::NumberOrString::String("the-token".to_string()),
|
||||
value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::Report(
|
||||
lsp::WorkDoneProgressReport {
|
||||
@@ -1306,7 +1306,7 @@ async fn test_language_server_statuses(cx_a: &mut TestAppContext, cx_b: &mut Tes
|
||||
});
|
||||
|
||||
executor.advance_clock(SERVER_PROGRESS_THROTTLE_TIMEOUT);
|
||||
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
token: lsp::NumberOrString::String("the-token".to_string()),
|
||||
value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::Report(
|
||||
lsp::WorkDoneProgressReport {
|
||||
@@ -2848,7 +2848,7 @@ async fn test_lsp_pull_diagnostics(
|
||||
});
|
||||
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(path!("/a/main.rs")).unwrap(),
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
range: lsp::Range {
|
||||
@@ -2869,7 +2869,7 @@ async fn test_lsp_pull_diagnostics(
|
||||
},
|
||||
);
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(path!("/a/lib.rs")).unwrap(),
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
range: lsp::Range {
|
||||
@@ -2891,7 +2891,7 @@ async fn test_lsp_pull_diagnostics(
|
||||
);
|
||||
|
||||
if should_stream_workspace_diagnostic {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
token: expected_workspace_diagnostic_token.clone(),
|
||||
value: lsp::ProgressParamsValue::WorkspaceDiagnostic(
|
||||
lsp::WorkspaceDiagnosticReportResult::Report(lsp::WorkspaceDiagnosticReport {
|
||||
@@ -3073,7 +3073,7 @@ async fn test_lsp_pull_diagnostics(
|
||||
});
|
||||
|
||||
if should_stream_workspace_diagnostic {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
token: expected_workspace_diagnostic_token.clone(),
|
||||
value: lsp::ProgressParamsValue::WorkspaceDiagnostic(
|
||||
lsp::WorkspaceDiagnosticReportResult::Report(lsp::WorkspaceDiagnosticReport {
|
||||
|
||||
@@ -4077,7 +4077,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
.receive_notification::<lsp::notification::DidOpenTextDocument>()
|
||||
.await;
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(),
|
||||
version: None,
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
@@ -4097,7 +4097,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
.await
|
||||
.unwrap();
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(),
|
||||
version: None,
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
@@ -4171,7 +4171,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
|
||||
// Simulate a language server reporting more errors for a file.
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(),
|
||||
version: None,
|
||||
diagnostics: vec![
|
||||
@@ -4269,7 +4269,7 @@ async fn test_collaborating_with_diagnostics(
|
||||
|
||||
// Simulate a language server reporting no errors for a file.
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(path!("/a/a.rs")).unwrap(),
|
||||
version: None,
|
||||
diagnostics: Vec::new(),
|
||||
@@ -4365,7 +4365,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
|
||||
.await
|
||||
.into_response()
|
||||
.unwrap();
|
||||
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
token: lsp::NumberOrString::String("the-disk-based-token".to_string()),
|
||||
value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::Begin(
|
||||
lsp::WorkDoneProgressBegin {
|
||||
@@ -4376,7 +4376,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
|
||||
});
|
||||
for file_name in file_names {
|
||||
fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
|
||||
&lsp::PublishDiagnosticsParams {
|
||||
lsp::PublishDiagnosticsParams {
|
||||
uri: lsp::Uri::from_file_path(Path::new(path!("/test")).join(file_name)).unwrap(),
|
||||
version: None,
|
||||
diagnostics: vec![lsp::Diagnostic {
|
||||
@@ -4389,7 +4389,7 @@ async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
|
||||
},
|
||||
);
|
||||
}
|
||||
fake_language_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
token: lsp::NumberOrString::String("the-disk-based-token".to_string()),
|
||||
value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::End(
|
||||
lsp::WorkDoneProgressEnd { message: None },
|
||||
|
||||
@@ -97,11 +97,10 @@ impl CommandPaletteFilter {
|
||||
pub struct CommandInterceptResult {
|
||||
/// The action produced as a result of the interception.
|
||||
pub action: Box<dyn Action>,
|
||||
// TODO: Document this field.
|
||||
#[allow(missing_docs)]
|
||||
/// The display string to show in the command palette for this result.
|
||||
pub string: String,
|
||||
// TODO: Document this field.
|
||||
#[allow(missing_docs)]
|
||||
/// The character positions in the string that match the query.
|
||||
/// Used for highlighting matched characters in the command palette UI.
|
||||
pub positions: Vec<usize>,
|
||||
}
|
||||
|
||||
|
||||
@@ -41,12 +41,9 @@ impl StdioTransport {
|
||||
command.current_dir(working_directory);
|
||||
}
|
||||
|
||||
let mut server = command.spawn().with_context(|| {
|
||||
format!(
|
||||
"failed to spawn command. (path={:?}, args={:?})",
|
||||
binary.executable, &binary.args
|
||||
)
|
||||
})?;
|
||||
let mut server = command
|
||||
.spawn()
|
||||
.with_context(|| format!("failed to spawn command {command:?})",))?;
|
||||
|
||||
let stdin = server.stdin.take().unwrap();
|
||||
let stdout = server.stdout.take().unwrap();
|
||||
|
||||
@@ -270,7 +270,7 @@ impl RegisteredBuffer {
|
||||
server
|
||||
.lsp
|
||||
.notify::<lsp::notification::DidChangeTextDocument>(
|
||||
&lsp::DidChangeTextDocumentParams {
|
||||
lsp::DidChangeTextDocumentParams {
|
||||
text_document: lsp::VersionedTextDocumentIdentifier::new(
|
||||
buffer.uri.clone(),
|
||||
buffer.snapshot_version,
|
||||
@@ -744,7 +744,7 @@ impl Copilot {
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
server
|
||||
.notify::<lsp::notification::DidOpenTextDocument>(
|
||||
&lsp::DidOpenTextDocumentParams {
|
||||
lsp::DidOpenTextDocumentParams {
|
||||
text_document: lsp::TextDocumentItem {
|
||||
uri: uri.clone(),
|
||||
language_id: language_id.clone(),
|
||||
@@ -792,13 +792,14 @@ impl Copilot {
|
||||
server
|
||||
.lsp
|
||||
.notify::<lsp::notification::DidSaveTextDocument>(
|
||||
&lsp::DidSaveTextDocumentParams {
|
||||
lsp::DidSaveTextDocumentParams {
|
||||
text_document: lsp::TextDocumentIdentifier::new(
|
||||
registered_buffer.uri.clone(),
|
||||
),
|
||||
text: None,
|
||||
},
|
||||
)?;
|
||||
)
|
||||
.ok();
|
||||
}
|
||||
language::BufferEvent::FileHandleChanged
|
||||
| language::BufferEvent::LanguageChanged => {
|
||||
@@ -814,14 +815,15 @@ impl Copilot {
|
||||
server
|
||||
.lsp
|
||||
.notify::<lsp::notification::DidCloseTextDocument>(
|
||||
&lsp::DidCloseTextDocumentParams {
|
||||
lsp::DidCloseTextDocumentParams {
|
||||
text_document: lsp::TextDocumentIdentifier::new(old_uri),
|
||||
},
|
||||
)?;
|
||||
)
|
||||
.ok();
|
||||
server
|
||||
.lsp
|
||||
.notify::<lsp::notification::DidOpenTextDocument>(
|
||||
&lsp::DidOpenTextDocumentParams {
|
||||
lsp::DidOpenTextDocumentParams {
|
||||
text_document: lsp::TextDocumentItem::new(
|
||||
registered_buffer.uri.clone(),
|
||||
registered_buffer.language_id.clone(),
|
||||
@@ -829,7 +831,8 @@ impl Copilot {
|
||||
registered_buffer.snapshot.text(),
|
||||
),
|
||||
},
|
||||
)?;
|
||||
)
|
||||
.ok();
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
@@ -846,7 +849,7 @@ impl Copilot {
|
||||
server
|
||||
.lsp
|
||||
.notify::<lsp::notification::DidCloseTextDocument>(
|
||||
&lsp::DidCloseTextDocumentParams {
|
||||
lsp::DidCloseTextDocumentParams {
|
||||
text_document: lsp::TextDocumentIdentifier::new(buffer.uri),
|
||||
},
|
||||
)
|
||||
@@ -1151,9 +1154,12 @@ fn notify_did_change_config_to_server(
|
||||
}
|
||||
});
|
||||
|
||||
server.notify::<lsp::notification::DidChangeConfiguration>(&lsp::DidChangeConfigurationParams {
|
||||
settings,
|
||||
})
|
||||
server
|
||||
.notify::<lsp::notification::DidChangeConfiguration>(lsp::DidChangeConfigurationParams {
|
||||
settings,
|
||||
})
|
||||
.ok();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn clear_copilot_dir() {
|
||||
|
||||
@@ -46,6 +46,7 @@ pub trait DapDelegate: Send + Sync + 'static {
|
||||
async fn which(&self, command: &OsStr) -> Option<PathBuf>;
|
||||
async fn read_text_file(&self, path: &RelPath) -> Result<String>;
|
||||
async fn shell_env(&self) -> collections::HashMap<String, String>;
|
||||
fn is_headless(&self) -> bool;
|
||||
}
|
||||
|
||||
#[derive(
|
||||
|
||||
@@ -674,13 +674,7 @@ impl StdioTransport {
|
||||
command.args(&binary.arguments);
|
||||
command.envs(&binary.envs);
|
||||
|
||||
let mut process = Child::spawn(command, Stdio::piped()).with_context(|| {
|
||||
format!(
|
||||
"failed to spawn command `{} {}`.",
|
||||
binary_command,
|
||||
binary.arguments.join(" ")
|
||||
)
|
||||
})?;
|
||||
let mut process = Child::spawn(command, Stdio::piped())?;
|
||||
|
||||
let err_task = process.stderr.take().map(|stderr| {
|
||||
cx.background_spawn(TransportDelegate::handle_adapter_log(
|
||||
@@ -1058,11 +1052,13 @@ impl Child {
|
||||
#[cfg(not(windows))]
|
||||
fn spawn(mut command: std::process::Command, stdin: Stdio) -> Result<Self> {
|
||||
util::set_pre_exec_to_start_new_session(&mut command);
|
||||
let process = smol::process::Command::from(command)
|
||||
let mut command = smol::process::Command::from(command);
|
||||
let process = command
|
||||
.stdin(stdin)
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()?;
|
||||
.spawn()
|
||||
.with_context(|| format!("failed to spawn command `{command:?}`",))?;
|
||||
Ok(Self { process })
|
||||
}
|
||||
|
||||
@@ -1070,11 +1066,13 @@ impl Child {
|
||||
fn spawn(command: std::process::Command, stdin: Stdio) -> Result<Self> {
|
||||
// TODO(windows): create a job object and add the child process handle to it,
|
||||
// see https://learn.microsoft.com/en-us/windows/win32/procthread/job-objects
|
||||
let process = smol::process::Command::from(command)
|
||||
let mut command = smol::process::Command::from(command);
|
||||
let process = command
|
||||
.stdin(stdin)
|
||||
.stdout(Stdio::piped())
|
||||
.stderr(Stdio::piped())
|
||||
.spawn()?;
|
||||
.spawn()
|
||||
.with_context(|| format!("failed to spawn command `{command:?}`",))?;
|
||||
Ok(Self { process })
|
||||
}
|
||||
|
||||
|
||||
@@ -120,6 +120,13 @@ impl JsDebugAdapter {
|
||||
configuration
|
||||
.entry("sourceMapRenames")
|
||||
.or_insert(true.into());
|
||||
|
||||
// Set up remote browser debugging
|
||||
if delegate.is_headless() {
|
||||
configuration
|
||||
.entry("browserLaunchLocation")
|
||||
.or_insert("ui".into());
|
||||
}
|
||||
}
|
||||
|
||||
let adapter_path = if let Some(user_installed_path) = user_installed_path {
|
||||
|
||||
@@ -2,7 +2,7 @@ use std::ops::Range;
|
||||
|
||||
use client::EditPredictionUsage;
|
||||
use gpui::{App, Context, Entity, SharedString};
|
||||
use language::Buffer;
|
||||
use language::{Anchor, Buffer, BufferSnapshot, OffsetRangeExt};
|
||||
|
||||
// TODO: Find a better home for `Direction`.
|
||||
//
|
||||
@@ -242,3 +242,51 @@ where
|
||||
self.update(cx, |this, cx| this.suggest(buffer, cursor_position, cx))
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns edits updated based on user edits since the old snapshot. None is returned if any user
|
||||
/// edit is not a prefix of a predicted insertion.
|
||||
pub fn interpolate_edits(
|
||||
old_snapshot: &BufferSnapshot,
|
||||
new_snapshot: &BufferSnapshot,
|
||||
current_edits: &[(Range<Anchor>, String)],
|
||||
) -> Option<Vec<(Range<Anchor>, String)>> {
|
||||
let mut edits = Vec::new();
|
||||
|
||||
let mut model_edits = current_edits.iter().peekable();
|
||||
for user_edit in new_snapshot.edits_since::<usize>(&old_snapshot.version) {
|
||||
while let Some((model_old_range, _)) = model_edits.peek() {
|
||||
let model_old_range = model_old_range.to_offset(old_snapshot);
|
||||
if model_old_range.end < user_edit.old.start {
|
||||
let (model_old_range, model_new_text) = model_edits.next().unwrap();
|
||||
edits.push((model_old_range.clone(), model_new_text.clone()));
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((model_old_range, model_new_text)) = model_edits.peek() {
|
||||
let model_old_offset_range = model_old_range.to_offset(old_snapshot);
|
||||
if user_edit.old == model_old_offset_range {
|
||||
let user_new_text = new_snapshot
|
||||
.text_for_range(user_edit.new.clone())
|
||||
.collect::<String>();
|
||||
|
||||
if let Some(model_suffix) = model_new_text.strip_prefix(&user_new_text) {
|
||||
if !model_suffix.is_empty() {
|
||||
let anchor = old_snapshot.anchor_after(user_edit.old.end);
|
||||
edits.push((anchor..anchor, model_suffix.to_string()));
|
||||
}
|
||||
|
||||
model_edits.next();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return None;
|
||||
}
|
||||
|
||||
edits.extend(model_edits.cloned());
|
||||
|
||||
if edits.is_empty() { None } else { Some(edits) }
|
||||
}
|
||||
|
||||
@@ -16,6 +16,7 @@ doctest = false
|
||||
anyhow.workspace = true
|
||||
client.workspace = true
|
||||
cloud_llm_client.workspace = true
|
||||
codestral.workspace = true
|
||||
copilot.workspace = true
|
||||
editor.workspace = true
|
||||
feature_flags.workspace = true
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use anyhow::Result;
|
||||
use client::{UserStore, zed_urls};
|
||||
use cloud_llm_client::UsageLimit;
|
||||
use codestral::CodestralCompletionProvider;
|
||||
use copilot::{Copilot, Status};
|
||||
use editor::{Editor, SelectionEffects, actions::ShowEditPrediction, scroll::Autoscroll};
|
||||
use feature_flags::{FeatureFlagAppExt, PredictEditsRateCompletionsFeatureFlag};
|
||||
@@ -234,6 +235,67 @@ impl Render for EditPredictionButton {
|
||||
)
|
||||
}
|
||||
|
||||
EditPredictionProvider::Codestral => {
|
||||
let enabled = self.editor_enabled.unwrap_or(true);
|
||||
let has_api_key = CodestralCompletionProvider::has_api_key(cx);
|
||||
let fs = self.fs.clone();
|
||||
let this = cx.entity();
|
||||
|
||||
div().child(
|
||||
PopoverMenu::new("codestral")
|
||||
.menu(move |window, cx| {
|
||||
if has_api_key {
|
||||
Some(this.update(cx, |this, cx| {
|
||||
this.build_codestral_context_menu(window, cx)
|
||||
}))
|
||||
} else {
|
||||
Some(ContextMenu::build(window, cx, |menu, _, _| {
|
||||
let fs = fs.clone();
|
||||
menu.entry("Use Zed AI instead", None, move |_, cx| {
|
||||
set_completion_provider(
|
||||
fs.clone(),
|
||||
cx,
|
||||
EditPredictionProvider::Zed,
|
||||
)
|
||||
})
|
||||
.separator()
|
||||
.entry(
|
||||
"Configure Codestral API Key",
|
||||
None,
|
||||
move |window, cx| {
|
||||
window.dispatch_action(
|
||||
zed_actions::agent::OpenSettings.boxed_clone(),
|
||||
cx,
|
||||
);
|
||||
},
|
||||
)
|
||||
}))
|
||||
}
|
||||
})
|
||||
.anchor(Corner::BottomRight)
|
||||
.trigger_with_tooltip(
|
||||
IconButton::new("codestral-icon", IconName::AiMistral)
|
||||
.shape(IconButtonShape::Square)
|
||||
.when(!has_api_key, |this| {
|
||||
this.indicator(Indicator::dot().color(Color::Error))
|
||||
.indicator_border_color(Some(
|
||||
cx.theme().colors().status_bar_background,
|
||||
))
|
||||
})
|
||||
.when(has_api_key && !enabled, |this| {
|
||||
this.indicator(Indicator::dot().color(Color::Ignored))
|
||||
.indicator_border_color(Some(
|
||||
cx.theme().colors().status_bar_background,
|
||||
))
|
||||
}),
|
||||
move |window, cx| {
|
||||
Tooltip::for_action("Codestral", &ToggleMenu, window, cx)
|
||||
},
|
||||
)
|
||||
.with_handle(self.popover_menu_handle.clone()),
|
||||
)
|
||||
}
|
||||
|
||||
EditPredictionProvider::Zed => {
|
||||
let enabled = self.editor_enabled.unwrap_or(true);
|
||||
|
||||
@@ -493,6 +555,7 @@ impl EditPredictionButton {
|
||||
EditPredictionProvider::Zed
|
||||
| EditPredictionProvider::Copilot
|
||||
| EditPredictionProvider::Supermaven
|
||||
| EditPredictionProvider::Codestral
|
||||
) {
|
||||
menu = menu
|
||||
.separator()
|
||||
@@ -719,6 +782,25 @@ impl EditPredictionButton {
|
||||
})
|
||||
}
|
||||
|
||||
fn build_codestral_context_menu(
|
||||
&self,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) -> Entity<ContextMenu> {
|
||||
let fs = self.fs.clone();
|
||||
ContextMenu::build(window, cx, |menu, window, cx| {
|
||||
self.build_language_settings_menu(menu, window, cx)
|
||||
.separator()
|
||||
.entry("Use Zed AI instead", None, move |_, cx| {
|
||||
set_completion_provider(fs.clone(), cx, EditPredictionProvider::Zed)
|
||||
})
|
||||
.separator()
|
||||
.entry("Configure Codestral API Key", None, move |window, cx| {
|
||||
window.dispatch_action(zed_actions::agent::OpenSettings.boxed_clone(), cx);
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
fn build_zeta_context_menu(
|
||||
&self,
|
||||
window: &mut Window,
|
||||
|
||||
@@ -19,6 +19,7 @@ collections.workspace = true
|
||||
futures.workspace = true
|
||||
gpui.workspace = true
|
||||
hashbrown.workspace = true
|
||||
indoc.workspace = true
|
||||
itertools.workspace = true
|
||||
language.workspace = true
|
||||
log.workspace = true
|
||||
@@ -45,5 +46,8 @@ project = {workspace= true, features = ["test-support"]}
|
||||
serde_json.workspace = true
|
||||
settings = {workspace= true, features = ["test-support"]}
|
||||
text = { workspace = true, features = ["test-support"] }
|
||||
tree-sitter-c.workspace = true
|
||||
tree-sitter-cpp.workspace = true
|
||||
tree-sitter-go.workspace = true
|
||||
util = { workspace = true, features = ["test-support"] }
|
||||
zlog.workspace = true
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
use language::LanguageId;
|
||||
use language::{Language, LanguageId};
|
||||
use project::ProjectEntryId;
|
||||
use std::borrow::Cow;
|
||||
use std::ops::Range;
|
||||
use std::sync::Arc;
|
||||
use std::{borrow::Cow, path::Path};
|
||||
use text::{Bias, BufferId, Rope};
|
||||
use util::paths::{path_ends_with, strip_path_suffix};
|
||||
use util::rel_path::RelPath;
|
||||
|
||||
use crate::outline::OutlineDeclaration;
|
||||
|
||||
@@ -22,12 +24,14 @@ pub enum Declaration {
|
||||
File {
|
||||
project_entry_id: ProjectEntryId,
|
||||
declaration: FileDeclaration,
|
||||
cached_path: CachedDeclarationPath,
|
||||
},
|
||||
Buffer {
|
||||
project_entry_id: ProjectEntryId,
|
||||
buffer_id: BufferId,
|
||||
rope: Rope,
|
||||
declaration: BufferDeclaration,
|
||||
cached_path: CachedDeclarationPath,
|
||||
},
|
||||
}
|
||||
|
||||
@@ -73,6 +77,13 @@ impl Declaration {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn cached_path(&self) -> &CachedDeclarationPath {
|
||||
match self {
|
||||
Declaration::File { cached_path, .. } => cached_path,
|
||||
Declaration::Buffer { cached_path, .. } => cached_path,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn item_range(&self) -> Range<usize> {
|
||||
match self {
|
||||
Declaration::File { declaration, .. } => declaration.item_range.clone(),
|
||||
@@ -235,3 +246,69 @@ impl BufferDeclaration {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct CachedDeclarationPath {
|
||||
pub worktree_abs_path: Arc<Path>,
|
||||
pub rel_path: Arc<RelPath>,
|
||||
/// The relative path of the file, possibly stripped according to `import_path_strip_regex`.
|
||||
pub rel_path_after_regex_stripping: Arc<RelPath>,
|
||||
}
|
||||
|
||||
impl CachedDeclarationPath {
|
||||
pub fn new(
|
||||
worktree_abs_path: Arc<Path>,
|
||||
path: &Arc<RelPath>,
|
||||
language: Option<&Arc<Language>>,
|
||||
) -> Self {
|
||||
let rel_path = path.clone();
|
||||
let rel_path_after_regex_stripping = if let Some(language) = language
|
||||
&& let Some(strip_regex) = language.config().import_path_strip_regex.as_ref()
|
||||
&& let Ok(stripped) = RelPath::unix(&Path::new(
|
||||
strip_regex.replace_all(rel_path.as_unix_str(), "").as_ref(),
|
||||
)) {
|
||||
Arc::from(stripped)
|
||||
} else {
|
||||
rel_path.clone()
|
||||
};
|
||||
CachedDeclarationPath {
|
||||
worktree_abs_path,
|
||||
rel_path,
|
||||
rel_path_after_regex_stripping,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn new_for_test(worktree_abs_path: &str, rel_path: &str) -> Self {
|
||||
let rel_path: Arc<RelPath> = util::rel_path::rel_path(rel_path).into();
|
||||
CachedDeclarationPath {
|
||||
worktree_abs_path: std::path::PathBuf::from(worktree_abs_path).into(),
|
||||
rel_path_after_regex_stripping: rel_path.clone(),
|
||||
rel_path,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ends_with_posix_path(&self, path: &Path) -> bool {
|
||||
if path.as_os_str().len() <= self.rel_path_after_regex_stripping.as_unix_str().len() {
|
||||
path_ends_with(self.rel_path_after_regex_stripping.as_std_path(), path)
|
||||
} else {
|
||||
if let Some(remaining) =
|
||||
strip_path_suffix(path, self.rel_path_after_regex_stripping.as_std_path())
|
||||
{
|
||||
path_ends_with(&self.worktree_abs_path, remaining)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn equals_absolute_path(&self, path: &Path) -> bool {
|
||||
if let Some(remaining) =
|
||||
strip_path_suffix(path, &self.rel_path_after_regex_stripping.as_std_path())
|
||||
{
|
||||
self.worktree_abs_path.as_ref() == remaining
|
||||
} else {
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
use cloud_llm_client::predict_edits_v3::DeclarationScoreComponents;
|
||||
use collections::HashMap;
|
||||
use itertools::Itertools as _;
|
||||
use language::BufferSnapshot;
|
||||
use ordered_float::OrderedFloat;
|
||||
use serde::Serialize;
|
||||
use std::{cmp::Reverse, ops::Range};
|
||||
use std::{cmp::Reverse, ops::Range, path::Path, sync::Arc};
|
||||
use strum::EnumIter;
|
||||
use text::{Point, ToPoint};
|
||||
|
||||
use crate::{
|
||||
Declaration, EditPredictionExcerpt, Identifier,
|
||||
CachedDeclarationPath, Declaration, EditPredictionExcerpt, Identifier,
|
||||
imports::{Import, Imports, Module},
|
||||
reference::{Reference, ReferenceRegion},
|
||||
syntax_index::SyntaxIndexState,
|
||||
text_similarity::{Occurrences, jaccard_similarity, weighted_overlap_coefficient},
|
||||
@@ -17,12 +17,17 @@ use crate::{
|
||||
|
||||
const MAX_IDENTIFIER_DECLARATION_COUNT: usize = 16;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, Eq)]
|
||||
pub struct EditPredictionScoreOptions {
|
||||
pub omit_excerpt_overlaps: bool,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ScoredDeclaration {
|
||||
/// identifier used by the local reference
|
||||
pub identifier: Identifier,
|
||||
pub declaration: Declaration,
|
||||
pub score_components: DeclarationScoreComponents,
|
||||
pub scores: DeclarationScores,
|
||||
pub components: DeclarationScoreComponents,
|
||||
}
|
||||
|
||||
#[derive(EnumIter, Clone, Copy, PartialEq, Eq, Hash, Debug)]
|
||||
@@ -31,12 +36,55 @@ pub enum DeclarationStyle {
|
||||
Declaration,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Default)]
|
||||
pub struct DeclarationScores {
|
||||
pub signature: f32,
|
||||
pub declaration: f32,
|
||||
pub retrieval: f32,
|
||||
}
|
||||
|
||||
impl ScoredDeclaration {
|
||||
/// Returns the score for this declaration with the specified style.
|
||||
pub fn score(&self, style: DeclarationStyle) -> f32 {
|
||||
// TODO: handle truncation
|
||||
|
||||
// Score related to how likely this is the correct declaration, range 0 to 1
|
||||
let retrieval = self.retrieval_score();
|
||||
|
||||
// Score related to the distance between the reference and cursor, range 0 to 1
|
||||
let distance_score = if self.components.is_referenced_nearby {
|
||||
1.0 / (1.0 + self.components.reference_line_distance as f32 / 10.0).powf(2.0)
|
||||
} else {
|
||||
// same score as ~14 lines away, rationale is to not overly penalize references from parent signatures
|
||||
0.5
|
||||
};
|
||||
|
||||
// For now instead of linear combination, the scores are just multiplied together.
|
||||
let combined_score = 10.0 * retrieval * distance_score;
|
||||
|
||||
match style {
|
||||
DeclarationStyle::Signature => self.scores.signature,
|
||||
DeclarationStyle::Declaration => self.scores.declaration,
|
||||
DeclarationStyle::Signature => {
|
||||
combined_score * self.components.excerpt_vs_signature_weighted_overlap
|
||||
}
|
||||
DeclarationStyle::Declaration => {
|
||||
2.0 * combined_score * self.components.excerpt_vs_item_weighted_overlap
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn retrieval_score(&self) -> f32 {
|
||||
if self.components.is_same_file {
|
||||
10.0 / self.components.same_file_declaration_count as f32
|
||||
} else if self.components.path_import_match_count > 0 {
|
||||
3.0
|
||||
} else if self.components.wildcard_path_import_match_count > 0 {
|
||||
1.0
|
||||
} else if self.components.normalized_import_similarity > 0.0 {
|
||||
self.components.normalized_import_similarity
|
||||
} else if self.components.normalized_wildcard_import_similarity > 0.0 {
|
||||
0.5 * self.components.normalized_wildcard_import_similarity
|
||||
} else {
|
||||
1.0 / self.components.declaration_count as f32
|
||||
}
|
||||
}
|
||||
|
||||
@@ -54,100 +102,215 @@ impl ScoredDeclaration {
|
||||
}
|
||||
|
||||
pub fn score_density(&self, style: DeclarationStyle) -> f32 {
|
||||
self.score(style) / (self.size(style)) as f32
|
||||
self.score(style) / self.size(style) as f32
|
||||
}
|
||||
}
|
||||
|
||||
pub fn scored_declarations(
|
||||
options: &EditPredictionScoreOptions,
|
||||
index: &SyntaxIndexState,
|
||||
excerpt: &EditPredictionExcerpt,
|
||||
excerpt_occurrences: &Occurrences,
|
||||
adjacent_occurrences: &Occurrences,
|
||||
imports: &Imports,
|
||||
identifier_to_references: HashMap<Identifier, Vec<Reference>>,
|
||||
cursor_offset: usize,
|
||||
current_buffer: &BufferSnapshot,
|
||||
) -> Vec<ScoredDeclaration> {
|
||||
let cursor_point = cursor_offset.to_point(¤t_buffer);
|
||||
|
||||
let mut wildcard_import_occurrences = Vec::new();
|
||||
let mut wildcard_import_paths = Vec::new();
|
||||
for wildcard_import in imports.wildcard_modules.iter() {
|
||||
match wildcard_import {
|
||||
Module::Namespace(namespace) => {
|
||||
wildcard_import_occurrences.push(namespace.occurrences())
|
||||
}
|
||||
Module::SourceExact(path) => wildcard_import_paths.push(path),
|
||||
Module::SourceFuzzy(path) => {
|
||||
wildcard_import_occurrences.push(Occurrences::from_path(&path))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut declarations = identifier_to_references
|
||||
.into_iter()
|
||||
.flat_map(|(identifier, references)| {
|
||||
let declarations =
|
||||
index.declarations_for_identifier::<MAX_IDENTIFIER_DECLARATION_COUNT>(&identifier);
|
||||
let mut import_occurrences = Vec::new();
|
||||
let mut import_paths = Vec::new();
|
||||
let mut found_external_identifier: Option<&Identifier> = None;
|
||||
|
||||
if let Some(imports) = imports.identifier_to_imports.get(&identifier) {
|
||||
// only use alias when it's the only import, could be generalized if some language
|
||||
// has overlapping aliases
|
||||
//
|
||||
// TODO: when an aliased declaration is included in the prompt, should include the
|
||||
// aliasing in the prompt.
|
||||
//
|
||||
// TODO: For SourceFuzzy consider having componentwise comparison that pays
|
||||
// attention to ordering.
|
||||
if let [
|
||||
Import::Alias {
|
||||
module,
|
||||
external_identifier,
|
||||
},
|
||||
] = imports.as_slice()
|
||||
{
|
||||
match module {
|
||||
Module::Namespace(namespace) => {
|
||||
import_occurrences.push(namespace.occurrences())
|
||||
}
|
||||
Module::SourceExact(path) => import_paths.push(path),
|
||||
Module::SourceFuzzy(path) => {
|
||||
import_occurrences.push(Occurrences::from_path(&path))
|
||||
}
|
||||
}
|
||||
found_external_identifier = Some(&external_identifier);
|
||||
} else {
|
||||
for import in imports {
|
||||
match import {
|
||||
Import::Direct { module } => match module {
|
||||
Module::Namespace(namespace) => {
|
||||
import_occurrences.push(namespace.occurrences())
|
||||
}
|
||||
Module::SourceExact(path) => import_paths.push(path),
|
||||
Module::SourceFuzzy(path) => {
|
||||
import_occurrences.push(Occurrences::from_path(&path))
|
||||
}
|
||||
},
|
||||
Import::Alias { .. } => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let identifier_to_lookup = found_external_identifier.unwrap_or(&identifier);
|
||||
// TODO: update this to be able to return more declarations? Especially if there is the
|
||||
// ability to quickly filter a large list (based on imports)
|
||||
let declarations = index
|
||||
.declarations_for_identifier::<MAX_IDENTIFIER_DECLARATION_COUNT>(
|
||||
&identifier_to_lookup,
|
||||
);
|
||||
let declaration_count = declarations.len();
|
||||
|
||||
declarations
|
||||
.into_iter()
|
||||
.filter_map(|(declaration_id, declaration)| match declaration {
|
||||
if declaration_count == 0 {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
// TODO: option to filter out other candidates when same file / import match
|
||||
let mut checked_declarations = Vec::new();
|
||||
for (declaration_id, declaration) in declarations {
|
||||
match declaration {
|
||||
Declaration::Buffer {
|
||||
buffer_id,
|
||||
declaration: buffer_declaration,
|
||||
..
|
||||
} => {
|
||||
let is_same_file = buffer_id == ¤t_buffer.remote_id();
|
||||
|
||||
if is_same_file {
|
||||
let overlaps_excerpt =
|
||||
if buffer_id == ¤t_buffer.remote_id() {
|
||||
let already_included_in_prompt =
|
||||
range_intersection(&buffer_declaration.item_range, &excerpt.range)
|
||||
.is_some();
|
||||
if overlaps_excerpt
|
||||
|| excerpt
|
||||
.parent_declarations
|
||||
.iter()
|
||||
.any(|(excerpt_parent, _)| excerpt_parent == &declaration_id)
|
||||
{
|
||||
None
|
||||
} else {
|
||||
.is_some()
|
||||
|| excerpt.parent_declarations.iter().any(
|
||||
|(excerpt_parent, _)| excerpt_parent == &declaration_id,
|
||||
);
|
||||
if !options.omit_excerpt_overlaps || !already_included_in_prompt {
|
||||
let declaration_line = buffer_declaration
|
||||
.item_range
|
||||
.start
|
||||
.to_point(current_buffer)
|
||||
.row;
|
||||
Some((
|
||||
true,
|
||||
(cursor_point.row as i32 - declaration_line as i32)
|
||||
.unsigned_abs(),
|
||||
let declaration_line_distance = (cursor_point.row as i32
|
||||
- declaration_line as i32)
|
||||
.unsigned_abs();
|
||||
checked_declarations.push(CheckedDeclaration {
|
||||
declaration,
|
||||
))
|
||||
same_file_line_distance: Some(declaration_line_distance),
|
||||
path_import_match_count: 0,
|
||||
wildcard_path_import_match_count: 0,
|
||||
});
|
||||
}
|
||||
continue;
|
||||
} else {
|
||||
Some((false, u32::MAX, declaration))
|
||||
}
|
||||
}
|
||||
Declaration::File { .. } => {
|
||||
// We can assume that a file declaration is in a different file,
|
||||
// because the current one must be open
|
||||
Some((false, u32::MAX, declaration))
|
||||
}
|
||||
})
|
||||
.sorted_by_key(|&(_, distance, _)| distance)
|
||||
.enumerate()
|
||||
.map(
|
||||
|(
|
||||
declaration_line_distance_rank,
|
||||
(is_same_file, declaration_line_distance, declaration),
|
||||
)| {
|
||||
let same_file_declaration_count = index.file_declaration_count(declaration);
|
||||
Declaration::File { .. } => {}
|
||||
}
|
||||
let declaration_path = declaration.cached_path();
|
||||
let path_import_match_count = import_paths
|
||||
.iter()
|
||||
.filter(|import_path| {
|
||||
declaration_path_matches_import(&declaration_path, import_path)
|
||||
})
|
||||
.count();
|
||||
let wildcard_path_import_match_count = wildcard_import_paths
|
||||
.iter()
|
||||
.filter(|import_path| {
|
||||
declaration_path_matches_import(&declaration_path, import_path)
|
||||
})
|
||||
.count();
|
||||
checked_declarations.push(CheckedDeclaration {
|
||||
declaration,
|
||||
same_file_line_distance: None,
|
||||
path_import_match_count,
|
||||
wildcard_path_import_match_count,
|
||||
});
|
||||
}
|
||||
|
||||
score_declaration(
|
||||
&identifier,
|
||||
&references,
|
||||
declaration.clone(),
|
||||
is_same_file,
|
||||
declaration_line_distance,
|
||||
declaration_line_distance_rank,
|
||||
same_file_declaration_count,
|
||||
declaration_count,
|
||||
&excerpt_occurrences,
|
||||
&adjacent_occurrences,
|
||||
cursor_point,
|
||||
current_buffer,
|
||||
)
|
||||
},
|
||||
)
|
||||
.collect::<Vec<_>>()
|
||||
let mut max_import_similarity = 0.0;
|
||||
let mut max_wildcard_import_similarity = 0.0;
|
||||
|
||||
let mut scored_declarations_for_identifier = checked_declarations
|
||||
.into_iter()
|
||||
.map(|checked_declaration| {
|
||||
let same_file_declaration_count =
|
||||
index.file_declaration_count(checked_declaration.declaration);
|
||||
|
||||
let declaration = score_declaration(
|
||||
&identifier,
|
||||
&references,
|
||||
checked_declaration,
|
||||
same_file_declaration_count,
|
||||
declaration_count,
|
||||
&excerpt_occurrences,
|
||||
&adjacent_occurrences,
|
||||
&import_occurrences,
|
||||
&wildcard_import_occurrences,
|
||||
cursor_point,
|
||||
current_buffer,
|
||||
);
|
||||
|
||||
if declaration.components.import_similarity > max_import_similarity {
|
||||
max_import_similarity = declaration.components.import_similarity;
|
||||
}
|
||||
|
||||
if declaration.components.wildcard_import_similarity
|
||||
> max_wildcard_import_similarity
|
||||
{
|
||||
max_wildcard_import_similarity =
|
||||
declaration.components.wildcard_import_similarity;
|
||||
}
|
||||
|
||||
declaration
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if max_import_similarity > 0.0 || max_wildcard_import_similarity > 0.0 {
|
||||
for declaration in scored_declarations_for_identifier.iter_mut() {
|
||||
if max_import_similarity > 0.0 {
|
||||
declaration.components.max_import_similarity = max_import_similarity;
|
||||
declaration.components.normalized_import_similarity =
|
||||
declaration.components.import_similarity / max_import_similarity;
|
||||
}
|
||||
if max_wildcard_import_similarity > 0.0 {
|
||||
declaration.components.normalized_wildcard_import_similarity =
|
||||
declaration.components.wildcard_import_similarity
|
||||
/ max_wildcard_import_similarity;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
scored_declarations_for_identifier
|
||||
})
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
declarations.sort_unstable_by_key(|declaration| {
|
||||
@@ -160,6 +323,24 @@ pub fn scored_declarations(
|
||||
declarations
|
||||
}
|
||||
|
||||
struct CheckedDeclaration<'a> {
|
||||
declaration: &'a Declaration,
|
||||
same_file_line_distance: Option<u32>,
|
||||
path_import_match_count: usize,
|
||||
wildcard_path_import_match_count: usize,
|
||||
}
|
||||
|
||||
fn declaration_path_matches_import(
|
||||
declaration_path: &CachedDeclarationPath,
|
||||
import_path: &Arc<Path>,
|
||||
) -> bool {
|
||||
if import_path.is_absolute() {
|
||||
declaration_path.equals_absolute_path(import_path)
|
||||
} else {
|
||||
declaration_path.ends_with_posix_path(import_path)
|
||||
}
|
||||
}
|
||||
|
||||
fn range_intersection<T: Ord + Clone>(a: &Range<T>, b: &Range<T>) -> Option<Range<T>> {
|
||||
let start = a.start.clone().max(b.start.clone());
|
||||
let end = a.end.clone().min(b.end.clone());
|
||||
@@ -173,17 +354,23 @@ fn range_intersection<T: Ord + Clone>(a: &Range<T>, b: &Range<T>) -> Option<Rang
|
||||
fn score_declaration(
|
||||
identifier: &Identifier,
|
||||
references: &[Reference],
|
||||
declaration: Declaration,
|
||||
is_same_file: bool,
|
||||
declaration_line_distance: u32,
|
||||
declaration_line_distance_rank: usize,
|
||||
checked_declaration: CheckedDeclaration,
|
||||
same_file_declaration_count: usize,
|
||||
declaration_count: usize,
|
||||
excerpt_occurrences: &Occurrences,
|
||||
adjacent_occurrences: &Occurrences,
|
||||
import_occurrences: &[Occurrences],
|
||||
wildcard_import_occurrences: &[Occurrences],
|
||||
cursor: Point,
|
||||
current_buffer: &BufferSnapshot,
|
||||
) -> Option<ScoredDeclaration> {
|
||||
) -> ScoredDeclaration {
|
||||
let CheckedDeclaration {
|
||||
declaration,
|
||||
same_file_line_distance,
|
||||
path_import_match_count,
|
||||
wildcard_path_import_match_count,
|
||||
} = checked_declaration;
|
||||
|
||||
let is_referenced_nearby = references
|
||||
.iter()
|
||||
.any(|r| r.region == ReferenceRegion::Nearby);
|
||||
@@ -200,6 +387,9 @@ fn score_declaration(
|
||||
.min()
|
||||
.unwrap();
|
||||
|
||||
let is_same_file = same_file_line_distance.is_some();
|
||||
let declaration_line_distance = same_file_line_distance.unwrap_or(u32::MAX);
|
||||
|
||||
let item_source_occurrences = Occurrences::within_string(&declaration.item_text().0);
|
||||
let item_signature_occurrences = Occurrences::within_string(&declaration.signature_text().0);
|
||||
let excerpt_vs_item_jaccard = jaccard_similarity(excerpt_occurrences, &item_source_occurrences);
|
||||
@@ -219,6 +409,37 @@ fn score_declaration(
|
||||
let adjacent_vs_signature_weighted_overlap =
|
||||
weighted_overlap_coefficient(adjacent_occurrences, &item_signature_occurrences);
|
||||
|
||||
let mut import_similarity = 0f32;
|
||||
let mut wildcard_import_similarity = 0f32;
|
||||
if !import_occurrences.is_empty() || !wildcard_import_occurrences.is_empty() {
|
||||
let cached_path = declaration.cached_path();
|
||||
let path_occurrences = Occurrences::from_worktree_path(
|
||||
cached_path
|
||||
.worktree_abs_path
|
||||
.file_name()
|
||||
.map(|f| f.to_string_lossy()),
|
||||
&cached_path.rel_path,
|
||||
);
|
||||
import_similarity = import_occurrences
|
||||
.iter()
|
||||
.map(|namespace_occurrences| {
|
||||
OrderedFloat(jaccard_similarity(namespace_occurrences, &path_occurrences))
|
||||
})
|
||||
.max()
|
||||
.map(|similarity| similarity.into_inner())
|
||||
.unwrap_or_default();
|
||||
|
||||
// TODO: Consider something other than max
|
||||
wildcard_import_similarity = wildcard_import_occurrences
|
||||
.iter()
|
||||
.map(|namespace_occurrences| {
|
||||
OrderedFloat(jaccard_similarity(namespace_occurrences, &path_occurrences))
|
||||
})
|
||||
.max()
|
||||
.map(|similarity| similarity.into_inner())
|
||||
.unwrap_or_default();
|
||||
}
|
||||
|
||||
// TODO: Consider adding declaration_file_count
|
||||
let score_components = DeclarationScoreComponents {
|
||||
is_same_file,
|
||||
@@ -226,7 +447,6 @@ fn score_declaration(
|
||||
is_referenced_in_breadcrumb,
|
||||
reference_line_distance,
|
||||
declaration_line_distance,
|
||||
declaration_line_distance_rank,
|
||||
reference_count,
|
||||
same_file_declaration_count,
|
||||
declaration_count,
|
||||
@@ -238,52 +458,59 @@ fn score_declaration(
|
||||
excerpt_vs_signature_weighted_overlap,
|
||||
adjacent_vs_item_weighted_overlap,
|
||||
adjacent_vs_signature_weighted_overlap,
|
||||
path_import_match_count,
|
||||
wildcard_path_import_match_count,
|
||||
import_similarity,
|
||||
max_import_similarity: 0.0,
|
||||
normalized_import_similarity: 0.0,
|
||||
wildcard_import_similarity,
|
||||
normalized_wildcard_import_similarity: 0.0,
|
||||
};
|
||||
|
||||
Some(ScoredDeclaration {
|
||||
ScoredDeclaration {
|
||||
identifier: identifier.clone(),
|
||||
declaration: declaration,
|
||||
scores: DeclarationScores::score(&score_components),
|
||||
score_components,
|
||||
})
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct DeclarationScores {
|
||||
pub signature: f32,
|
||||
pub declaration: f32,
|
||||
pub retrieval: f32,
|
||||
}
|
||||
|
||||
impl DeclarationScores {
|
||||
fn score(components: &DeclarationScoreComponents) -> DeclarationScores {
|
||||
// TODO: handle truncation
|
||||
|
||||
// Score related to how likely this is the correct declaration, range 0 to 1
|
||||
let retrieval = if components.is_same_file {
|
||||
// TODO: use declaration_line_distance_rank
|
||||
1.0 / components.same_file_declaration_count as f32
|
||||
} else {
|
||||
1.0 / components.declaration_count as f32
|
||||
};
|
||||
|
||||
// Score related to the distance between the reference and cursor, range 0 to 1
|
||||
let distance_score = if components.is_referenced_nearby {
|
||||
1.0 / (1.0 + components.reference_line_distance as f32 / 10.0).powf(2.0)
|
||||
} else {
|
||||
// same score as ~14 lines away, rationale is to not overly penalize references from parent signatures
|
||||
0.5
|
||||
};
|
||||
|
||||
// For now instead of linear combination, the scores are just multiplied together.
|
||||
let combined_score = 10.0 * retrieval * distance_score;
|
||||
|
||||
DeclarationScores {
|
||||
signature: combined_score * components.excerpt_vs_signature_weighted_overlap,
|
||||
// declaration score gets boosted both by being multiplied by 2 and by there being more
|
||||
// weighted overlap.
|
||||
declaration: 2.0 * combined_score * components.excerpt_vs_item_weighted_overlap,
|
||||
retrieval,
|
||||
}
|
||||
declaration: declaration.clone(),
|
||||
components: score_components,
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_declaration_path_matches() {
|
||||
let declaration_path =
|
||||
CachedDeclarationPath::new_for_test("/home/user/project", "src/maths.ts");
|
||||
|
||||
assert!(declaration_path_matches_import(
|
||||
&declaration_path,
|
||||
&Path::new("maths.ts").into()
|
||||
));
|
||||
|
||||
assert!(declaration_path_matches_import(
|
||||
&declaration_path,
|
||||
&Path::new("project/src/maths.ts").into()
|
||||
));
|
||||
|
||||
assert!(declaration_path_matches_import(
|
||||
&declaration_path,
|
||||
&Path::new("user/project/src/maths.ts").into()
|
||||
));
|
||||
|
||||
assert!(declaration_path_matches_import(
|
||||
&declaration_path,
|
||||
&Path::new("/home/user/project/src/maths.ts").into()
|
||||
));
|
||||
|
||||
assert!(!declaration_path_matches_import(
|
||||
&declaration_path,
|
||||
&Path::new("other.ts").into()
|
||||
));
|
||||
|
||||
assert!(!declaration_path_matches_import(
|
||||
&declaration_path,
|
||||
&Path::new("/home/user/project/src/other.ts").into()
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,12 +1,13 @@
|
||||
mod declaration;
|
||||
mod declaration_scoring;
|
||||
mod excerpt;
|
||||
mod imports;
|
||||
mod outline;
|
||||
mod reference;
|
||||
mod syntax_index;
|
||||
pub mod text_similarity;
|
||||
|
||||
use std::sync::Arc;
|
||||
use std::{path::Path, sync::Arc};
|
||||
|
||||
use collections::HashMap;
|
||||
use gpui::{App, AppContext as _, Entity, Task};
|
||||
@@ -16,9 +17,17 @@ use text::{Point, ToOffset as _};
|
||||
pub use declaration::*;
|
||||
pub use declaration_scoring::*;
|
||||
pub use excerpt::*;
|
||||
pub use imports::*;
|
||||
pub use reference::*;
|
||||
pub use syntax_index::*;
|
||||
|
||||
#[derive(Clone, Debug, PartialEq)]
|
||||
pub struct EditPredictionContextOptions {
|
||||
pub use_imports: bool,
|
||||
pub excerpt: EditPredictionExcerptOptions,
|
||||
pub score: EditPredictionScoreOptions,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct EditPredictionContext {
|
||||
pub excerpt: EditPredictionExcerpt,
|
||||
@@ -31,21 +40,34 @@ impl EditPredictionContext {
|
||||
pub fn gather_context_in_background(
|
||||
cursor_point: Point,
|
||||
buffer: BufferSnapshot,
|
||||
excerpt_options: EditPredictionExcerptOptions,
|
||||
options: EditPredictionContextOptions,
|
||||
syntax_index: Option<Entity<SyntaxIndex>>,
|
||||
cx: &mut App,
|
||||
) -> Task<Option<Self>> {
|
||||
let parent_abs_path = project::File::from_dyn(buffer.file()).and_then(|f| {
|
||||
let mut path = f.worktree.read(cx).absolutize(&f.path);
|
||||
if path.pop() { Some(path) } else { None }
|
||||
});
|
||||
|
||||
if let Some(syntax_index) = syntax_index {
|
||||
let index_state =
|
||||
syntax_index.read_with(cx, |index, _cx| Arc::downgrade(index.state()));
|
||||
cx.background_spawn(async move {
|
||||
let parent_abs_path = parent_abs_path.as_deref();
|
||||
let index_state = index_state.upgrade()?;
|
||||
let index_state = index_state.lock().await;
|
||||
Self::gather_context(cursor_point, &buffer, &excerpt_options, Some(&index_state))
|
||||
Self::gather_context(
|
||||
cursor_point,
|
||||
&buffer,
|
||||
parent_abs_path,
|
||||
&options,
|
||||
Some(&index_state),
|
||||
)
|
||||
})
|
||||
} else {
|
||||
cx.background_spawn(async move {
|
||||
Self::gather_context(cursor_point, &buffer, &excerpt_options, None)
|
||||
let parent_abs_path = parent_abs_path.as_deref();
|
||||
Self::gather_context(cursor_point, &buffer, parent_abs_path, &options, None)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -53,13 +75,20 @@ impl EditPredictionContext {
|
||||
pub fn gather_context(
|
||||
cursor_point: Point,
|
||||
buffer: &BufferSnapshot,
|
||||
excerpt_options: &EditPredictionExcerptOptions,
|
||||
parent_abs_path: Option<&Path>,
|
||||
options: &EditPredictionContextOptions,
|
||||
index_state: Option<&SyntaxIndexState>,
|
||||
) -> Option<Self> {
|
||||
let imports = if options.use_imports {
|
||||
Imports::gather(&buffer, parent_abs_path)
|
||||
} else {
|
||||
Imports::default()
|
||||
};
|
||||
Self::gather_context_with_references_fn(
|
||||
cursor_point,
|
||||
buffer,
|
||||
excerpt_options,
|
||||
&imports,
|
||||
options,
|
||||
index_state,
|
||||
references_in_excerpt,
|
||||
)
|
||||
@@ -68,7 +97,8 @@ impl EditPredictionContext {
|
||||
pub fn gather_context_with_references_fn(
|
||||
cursor_point: Point,
|
||||
buffer: &BufferSnapshot,
|
||||
excerpt_options: &EditPredictionExcerptOptions,
|
||||
imports: &Imports,
|
||||
options: &EditPredictionContextOptions,
|
||||
index_state: Option<&SyntaxIndexState>,
|
||||
get_references: impl FnOnce(
|
||||
&EditPredictionExcerpt,
|
||||
@@ -79,7 +109,7 @@ impl EditPredictionContext {
|
||||
let excerpt = EditPredictionExcerpt::select_from_buffer(
|
||||
cursor_point,
|
||||
buffer,
|
||||
excerpt_options,
|
||||
&options.excerpt,
|
||||
index_state,
|
||||
)?;
|
||||
let excerpt_text = excerpt.text(buffer);
|
||||
@@ -101,10 +131,12 @@ impl EditPredictionContext {
|
||||
let references = get_references(&excerpt, &excerpt_text, buffer);
|
||||
|
||||
scored_declarations(
|
||||
&options.score,
|
||||
&index_state,
|
||||
&excerpt,
|
||||
&excerpt_occurrences,
|
||||
&adjacent_occurrences,
|
||||
&imports,
|
||||
references,
|
||||
cursor_offset_in_file,
|
||||
buffer,
|
||||
@@ -160,12 +192,18 @@ mod tests {
|
||||
EditPredictionContext::gather_context_in_background(
|
||||
cursor_point,
|
||||
buffer_snapshot,
|
||||
EditPredictionExcerptOptions {
|
||||
max_bytes: 60,
|
||||
min_bytes: 10,
|
||||
target_before_cursor_over_total_bytes: 0.5,
|
||||
EditPredictionContextOptions {
|
||||
use_imports: true,
|
||||
excerpt: EditPredictionExcerptOptions {
|
||||
max_bytes: 60,
|
||||
min_bytes: 10,
|
||||
target_before_cursor_over_total_bytes: 0.5,
|
||||
},
|
||||
score: EditPredictionScoreOptions {
|
||||
omit_excerpt_overlaps: true,
|
||||
},
|
||||
},
|
||||
Some(index),
|
||||
Some(index.clone()),
|
||||
cx,
|
||||
)
|
||||
})
|
||||
|
||||
1319
crates/edit_prediction_context/src/imports.rs
Normal file
1319
crates/edit_prediction_context/src/imports.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -5,6 +5,7 @@ use futures::lock::Mutex;
|
||||
use futures::{FutureExt as _, StreamExt, future};
|
||||
use gpui::{App, AppContext as _, AsyncApp, Context, Entity, Task, WeakEntity};
|
||||
use itertools::Itertools;
|
||||
|
||||
use language::{Buffer, BufferEvent};
|
||||
use postage::stream::Stream as _;
|
||||
use project::buffer_store::{BufferStore, BufferStoreEvent};
|
||||
@@ -17,6 +18,7 @@ use std::sync::Arc;
|
||||
use text::BufferId;
|
||||
use util::{RangeExt as _, debug_panic, some_or_debug_panic};
|
||||
|
||||
use crate::CachedDeclarationPath;
|
||||
use crate::declaration::{
|
||||
BufferDeclaration, Declaration, DeclarationId, FileDeclaration, Identifier,
|
||||
};
|
||||
@@ -28,6 +30,8 @@ use crate::outline::declarations_in_buffer;
|
||||
// `buffer_declarations_containing_range` assumes that the index is always immediately up to date.
|
||||
//
|
||||
// * Add a per language configuration for skipping indexing.
|
||||
//
|
||||
// * Handle tsx / ts / js referencing each-other
|
||||
|
||||
// Potential future improvements:
|
||||
//
|
||||
@@ -61,6 +65,7 @@ pub struct SyntaxIndex {
|
||||
state: Arc<Mutex<SyntaxIndexState>>,
|
||||
project: WeakEntity<Project>,
|
||||
initial_file_indexing_done_rx: postage::watch::Receiver<bool>,
|
||||
_file_indexing_task: Option<Task<()>>,
|
||||
}
|
||||
|
||||
pub struct SyntaxIndexState {
|
||||
@@ -70,7 +75,6 @@ pub struct SyntaxIndexState {
|
||||
buffers: HashMap<BufferId, BufferState>,
|
||||
dirty_files: HashMap<ProjectEntryId, ProjectPath>,
|
||||
dirty_files_tx: mpsc::Sender<()>,
|
||||
_file_indexing_task: Option<Task<()>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
@@ -102,12 +106,12 @@ impl SyntaxIndex {
|
||||
buffers: HashMap::default(),
|
||||
dirty_files: HashMap::default(),
|
||||
dirty_files_tx,
|
||||
_file_indexing_task: None,
|
||||
};
|
||||
let this = Self {
|
||||
let mut this = Self {
|
||||
project: project.downgrade(),
|
||||
state: Arc::new(Mutex::new(initial_state)),
|
||||
initial_file_indexing_done_rx,
|
||||
_file_indexing_task: None,
|
||||
};
|
||||
|
||||
let worktree_store = project.read(cx).worktree_store();
|
||||
@@ -116,75 +120,77 @@ impl SyntaxIndex {
|
||||
.worktrees()
|
||||
.map(|w| w.read(cx).snapshot())
|
||||
.collect::<Vec<_>>();
|
||||
if !initial_worktree_snapshots.is_empty() {
|
||||
this.state.try_lock().unwrap()._file_indexing_task =
|
||||
Some(cx.spawn(async move |this, cx| {
|
||||
let snapshots_file_count = initial_worktree_snapshots
|
||||
.iter()
|
||||
.map(|worktree| worktree.file_count())
|
||||
.sum::<usize>();
|
||||
let chunk_size = snapshots_file_count.div_ceil(file_indexing_parallelism);
|
||||
let chunk_count = snapshots_file_count.div_ceil(chunk_size);
|
||||
let file_chunks = initial_worktree_snapshots
|
||||
.iter()
|
||||
.flat_map(|worktree| {
|
||||
let worktree_id = worktree.id();
|
||||
worktree.files(false, 0).map(move |entry| {
|
||||
(
|
||||
entry.id,
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: entry.path.clone(),
|
||||
},
|
||||
)
|
||||
})
|
||||
this._file_indexing_task = Some(cx.spawn(async move |this, cx| {
|
||||
let snapshots_file_count = initial_worktree_snapshots
|
||||
.iter()
|
||||
.map(|worktree| worktree.file_count())
|
||||
.sum::<usize>();
|
||||
if snapshots_file_count > 0 {
|
||||
let chunk_size = snapshots_file_count.div_ceil(file_indexing_parallelism);
|
||||
let chunk_count = snapshots_file_count.div_ceil(chunk_size);
|
||||
let file_chunks = initial_worktree_snapshots
|
||||
.iter()
|
||||
.flat_map(|worktree| {
|
||||
let worktree_id = worktree.id();
|
||||
worktree.files(false, 0).map(move |entry| {
|
||||
(
|
||||
entry.id,
|
||||
ProjectPath {
|
||||
worktree_id,
|
||||
path: entry.path.clone(),
|
||||
},
|
||||
)
|
||||
})
|
||||
.chunks(chunk_size);
|
||||
})
|
||||
.chunks(chunk_size);
|
||||
|
||||
let mut tasks = Vec::with_capacity(chunk_count);
|
||||
for chunk in file_chunks.into_iter() {
|
||||
tasks.push(Self::update_dirty_files(
|
||||
&this,
|
||||
chunk.into_iter().collect(),
|
||||
cx.clone(),
|
||||
));
|
||||
}
|
||||
futures::future::join_all(tasks).await;
|
||||
let mut tasks = Vec::with_capacity(chunk_count);
|
||||
for chunk in file_chunks.into_iter() {
|
||||
tasks.push(Self::update_dirty_files(
|
||||
&this,
|
||||
chunk.into_iter().collect(),
|
||||
cx.clone(),
|
||||
));
|
||||
}
|
||||
futures::future::join_all(tasks).await;
|
||||
log::info!("Finished initial file indexing");
|
||||
}
|
||||
|
||||
log::info!("Finished initial file indexing");
|
||||
*initial_file_indexing_done_tx.borrow_mut() = true;
|
||||
*initial_file_indexing_done_tx.borrow_mut() = true;
|
||||
|
||||
let Ok(state) = this.read_with(cx, |this, _cx| this.state.clone()) else {
|
||||
return;
|
||||
};
|
||||
while dirty_files_rx.next().await.is_some() {
|
||||
let mut state = state.lock().await;
|
||||
let was_underused = state.dirty_files.capacity() > 255
|
||||
&& state.dirty_files.len() * 8 < state.dirty_files.capacity();
|
||||
let dirty_files = state.dirty_files.drain().collect::<Vec<_>>();
|
||||
if was_underused {
|
||||
state.dirty_files.shrink_to_fit();
|
||||
}
|
||||
drop(state);
|
||||
if dirty_files.is_empty() {
|
||||
continue;
|
||||
}
|
||||
let Ok(state) = this.read_with(cx, |this, _cx| Arc::downgrade(&this.state)) else {
|
||||
return;
|
||||
};
|
||||
while dirty_files_rx.next().await.is_some() {
|
||||
let Some(state) = state.upgrade() else {
|
||||
return;
|
||||
};
|
||||
let mut state = state.lock().await;
|
||||
let was_underused = state.dirty_files.capacity() > 255
|
||||
&& state.dirty_files.len() * 8 < state.dirty_files.capacity();
|
||||
let dirty_files = state.dirty_files.drain().collect::<Vec<_>>();
|
||||
if was_underused {
|
||||
state.dirty_files.shrink_to_fit();
|
||||
}
|
||||
drop(state);
|
||||
if dirty_files.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let chunk_size = dirty_files.len().div_ceil(file_indexing_parallelism);
|
||||
let chunk_count = dirty_files.len().div_ceil(chunk_size);
|
||||
let mut tasks = Vec::with_capacity(chunk_count);
|
||||
let chunks = dirty_files.into_iter().chunks(chunk_size);
|
||||
for chunk in chunks.into_iter() {
|
||||
tasks.push(Self::update_dirty_files(
|
||||
&this,
|
||||
chunk.into_iter().collect(),
|
||||
cx.clone(),
|
||||
));
|
||||
}
|
||||
futures::future::join_all(tasks).await;
|
||||
}
|
||||
}));
|
||||
}
|
||||
let chunk_size = dirty_files.len().div_ceil(file_indexing_parallelism);
|
||||
let chunk_count = dirty_files.len().div_ceil(chunk_size);
|
||||
let mut tasks = Vec::with_capacity(chunk_count);
|
||||
let chunks = dirty_files.into_iter().chunks(chunk_size);
|
||||
for chunk in chunks.into_iter() {
|
||||
tasks.push(Self::update_dirty_files(
|
||||
&this,
|
||||
chunk.into_iter().collect(),
|
||||
cx.clone(),
|
||||
));
|
||||
}
|
||||
futures::future::join_all(tasks).await;
|
||||
}
|
||||
}));
|
||||
|
||||
cx.subscribe(&worktree_store, Self::handle_worktree_store_event)
|
||||
.detach();
|
||||
@@ -364,7 +370,9 @@ impl SyntaxIndex {
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
match event {
|
||||
BufferEvent::Edited => self.update_buffer(buffer, cx),
|
||||
BufferEvent::Edited |
|
||||
// paths are cached and so should be updated
|
||||
BufferEvent::FileHandleChanged => self.update_buffer(buffer, cx),
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
@@ -375,8 +383,16 @@ impl SyntaxIndex {
|
||||
return;
|
||||
}
|
||||
|
||||
let Some(project_entry_id) =
|
||||
project::File::from_dyn(buffer.file()).and_then(|f| f.project_entry_id(cx))
|
||||
let Some((project_entry_id, cached_path)) = project::File::from_dyn(buffer.file())
|
||||
.and_then(|f| {
|
||||
let project_entry_id = f.project_entry_id()?;
|
||||
let cached_path = CachedDeclarationPath::new(
|
||||
f.worktree.read(cx).abs_path(),
|
||||
&f.path,
|
||||
buffer.language(),
|
||||
);
|
||||
Some((project_entry_id, cached_path))
|
||||
})
|
||||
else {
|
||||
return;
|
||||
};
|
||||
@@ -440,6 +456,7 @@ impl SyntaxIndex {
|
||||
buffer_id,
|
||||
declaration,
|
||||
project_entry_id,
|
||||
cached_path: cached_path.clone(),
|
||||
});
|
||||
new_ids.push(declaration_id);
|
||||
|
||||
@@ -507,13 +524,14 @@ impl SyntaxIndex {
|
||||
|
||||
let snapshot_task = worktree.update(cx, |worktree, cx| {
|
||||
let load_task = worktree.load_file(&project_path.path, cx);
|
||||
let worktree_abs_path = worktree.abs_path();
|
||||
cx.spawn(async move |_this, cx| {
|
||||
let loaded_file = load_task.await?;
|
||||
let language = language.await?;
|
||||
|
||||
let buffer = cx.new(|cx| {
|
||||
let mut buffer = Buffer::local(loaded_file.text, cx);
|
||||
buffer.set_language(Some(language), cx);
|
||||
buffer.set_language(Some(language.clone()), cx);
|
||||
buffer
|
||||
})?;
|
||||
|
||||
@@ -522,14 +540,22 @@ impl SyntaxIndex {
|
||||
parse_status.changed().await?;
|
||||
}
|
||||
|
||||
buffer.read_with(cx, |buffer, _cx| buffer.snapshot())
|
||||
let cached_path = CachedDeclarationPath::new(
|
||||
worktree_abs_path,
|
||||
&project_path.path,
|
||||
Some(&language),
|
||||
);
|
||||
|
||||
let snapshot = buffer.read_with(cx, |buffer, _cx| buffer.snapshot())?;
|
||||
|
||||
anyhow::Ok((snapshot, cached_path))
|
||||
})
|
||||
});
|
||||
|
||||
let state = Arc::downgrade(&self.state);
|
||||
cx.background_spawn(async move {
|
||||
// TODO: How to handle errors?
|
||||
let Ok(snapshot) = snapshot_task.await else {
|
||||
let Ok((snapshot, cached_path)) = snapshot_task.await else {
|
||||
return;
|
||||
};
|
||||
let rope = snapshot.as_rope();
|
||||
@@ -567,6 +593,7 @@ impl SyntaxIndex {
|
||||
let declaration_id = state.declarations.insert(Declaration::File {
|
||||
project_entry_id: entry_id,
|
||||
declaration,
|
||||
cached_path: cached_path.clone(),
|
||||
});
|
||||
new_ids.push(declaration_id);
|
||||
|
||||
@@ -921,6 +948,7 @@ mod tests {
|
||||
if let Declaration::File {
|
||||
declaration,
|
||||
project_entry_id: file,
|
||||
..
|
||||
} = declaration
|
||||
{
|
||||
assert_eq!(
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
use hashbrown::HashTable;
|
||||
use regex::Regex;
|
||||
use std::{
|
||||
borrow::Cow,
|
||||
hash::{Hash, Hasher as _},
|
||||
path::Path,
|
||||
sync::LazyLock,
|
||||
};
|
||||
use util::rel_path::RelPath;
|
||||
|
||||
use crate::reference::Reference;
|
||||
|
||||
@@ -45,19 +48,34 @@ impl Occurrences {
|
||||
)
|
||||
}
|
||||
|
||||
pub fn from_identifiers<'a>(identifiers: impl IntoIterator<Item = &'a str>) -> Self {
|
||||
pub fn from_identifiers(identifiers: impl IntoIterator<Item = impl AsRef<str>>) -> Self {
|
||||
let mut this = Self::default();
|
||||
// TODO: Score matches that match case higher?
|
||||
//
|
||||
// TODO: Also include unsplit identifier?
|
||||
for identifier in identifiers {
|
||||
for identifier_part in split_identifier(identifier) {
|
||||
for identifier_part in split_identifier(identifier.as_ref()) {
|
||||
this.add_hash(fx_hash(&identifier_part.to_lowercase()));
|
||||
}
|
||||
}
|
||||
this
|
||||
}
|
||||
|
||||
pub fn from_worktree_path(worktree_name: Option<Cow<'_, str>>, rel_path: &RelPath) -> Self {
|
||||
if let Some(worktree_name) = worktree_name {
|
||||
Self::from_identifiers(
|
||||
std::iter::once(worktree_name)
|
||||
.chain(iter_path_without_extension(rel_path.as_std_path())),
|
||||
)
|
||||
} else {
|
||||
Self::from_path(rel_path.as_std_path())
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_path(path: &Path) -> Self {
|
||||
Self::from_identifiers(iter_path_without_extension(path))
|
||||
}
|
||||
|
||||
fn add_hash(&mut self, hash: u64) {
|
||||
self.table
|
||||
.entry(
|
||||
@@ -82,6 +100,15 @@ impl Occurrences {
|
||||
}
|
||||
}
|
||||
|
||||
fn iter_path_without_extension(path: &Path) -> impl Iterator<Item = Cow<'_, str>> {
|
||||
let last_component: Option<Cow<'_, str>> = path.file_stem().map(|stem| stem.to_string_lossy());
|
||||
let mut path_components = path.components();
|
||||
path_components.next_back();
|
||||
path_components
|
||||
.map(|component| component.as_os_str().to_string_lossy())
|
||||
.chain(last_component)
|
||||
}
|
||||
|
||||
pub fn fx_hash<T: Hash + ?Sized>(data: &T) -> u64 {
|
||||
let mut hasher = collections::FxHasher::default();
|
||||
data.hash(&mut hasher);
|
||||
@@ -269,4 +296,19 @@ mod test {
|
||||
// the smaller set, 10.
|
||||
assert_eq!(weighted_overlap_coefficient(&set_a, &set_b), 7.0 / 10.0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_iter_path_without_extension() {
|
||||
let mut iter = iter_path_without_extension(Path::new(""));
|
||||
assert_eq!(iter.next(), None);
|
||||
|
||||
let iter = iter_path_without_extension(Path::new("foo"));
|
||||
assert_eq!(iter.collect::<Vec<_>>(), ["foo"]);
|
||||
|
||||
let iter = iter_path_without_extension(Path::new("foo/bar.txt"));
|
||||
assert_eq!(iter.collect::<Vec<_>>(), ["foo", "bar"]);
|
||||
|
||||
let iter = iter_path_without_extension(Path::new("foo/bar/baz.txt"));
|
||||
assert_eq!(iter.collect::<Vec<_>>(), ["foo", "bar", "baz"]);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -456,6 +456,33 @@ actions!(
|
||||
Fold,
|
||||
/// Folds all foldable regions in the editor.
|
||||
FoldAll,
|
||||
/// Folds all code blocks at indentation level 1.
|
||||
#[action(name = "FoldAtLevel_1")]
|
||||
FoldAtLevel1,
|
||||
/// Folds all code blocks at indentation level 2.
|
||||
#[action(name = "FoldAtLevel_2")]
|
||||
FoldAtLevel2,
|
||||
/// Folds all code blocks at indentation level 3.
|
||||
#[action(name = "FoldAtLevel_3")]
|
||||
FoldAtLevel3,
|
||||
/// Folds all code blocks at indentation level 4.
|
||||
#[action(name = "FoldAtLevel_4")]
|
||||
FoldAtLevel4,
|
||||
/// Folds all code blocks at indentation level 5.
|
||||
#[action(name = "FoldAtLevel_5")]
|
||||
FoldAtLevel5,
|
||||
/// Folds all code blocks at indentation level 6.
|
||||
#[action(name = "FoldAtLevel_6")]
|
||||
FoldAtLevel6,
|
||||
/// Folds all code blocks at indentation level 7.
|
||||
#[action(name = "FoldAtLevel_7")]
|
||||
FoldAtLevel7,
|
||||
/// Folds all code blocks at indentation level 8.
|
||||
#[action(name = "FoldAtLevel_8")]
|
||||
FoldAtLevel8,
|
||||
/// Folds all code blocks at indentation level 9.
|
||||
#[action(name = "FoldAtLevel_9")]
|
||||
FoldAtLevel9,
|
||||
/// Folds all function bodies in the editor.
|
||||
FoldFunctionBodies,
|
||||
/// Folds the current code block and all its children.
|
||||
|
||||
@@ -689,6 +689,7 @@ impl BlockMap {
|
||||
|
||||
// For each of these blocks, insert a new isomorphic transform preceding the block,
|
||||
// and then insert the block itself.
|
||||
let mut just_processed_folded_buffer = false;
|
||||
for (block_placement, block) in blocks_in_edit.drain(..) {
|
||||
let mut summary = TransformSummary {
|
||||
input_rows: 0,
|
||||
@@ -701,8 +702,12 @@ impl BlockMap {
|
||||
match block_placement {
|
||||
BlockPlacement::Above(position) => {
|
||||
rows_before_block = position.0 - new_transforms.summary().input_rows;
|
||||
just_processed_folded_buffer = false;
|
||||
}
|
||||
BlockPlacement::Near(position) | BlockPlacement::Below(position) => {
|
||||
if just_processed_folded_buffer {
|
||||
continue;
|
||||
}
|
||||
if position.0 + 1 < new_transforms.summary().input_rows {
|
||||
continue;
|
||||
}
|
||||
@@ -711,6 +716,7 @@ impl BlockMap {
|
||||
BlockPlacement::Replace(range) => {
|
||||
rows_before_block = range.start().0 - new_transforms.summary().input_rows;
|
||||
summary.input_rows = range.end().0 - range.start().0 + 1;
|
||||
just_processed_folded_buffer = matches!(block, Block::FoldedBuffer { .. });
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3566,6 +3572,96 @@ mod tests {
|
||||
assert_eq!(blocks_snapshot.text(), "abc\n\ndef\nghi\njkl\nmno");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_folded_buffer_with_near_blocks(cx: &mut gpui::TestAppContext) {
|
||||
cx.update(init_test);
|
||||
|
||||
let text = "line 1\nline 2\nline 3";
|
||||
let buffer = cx.update(|cx| {
|
||||
MultiBuffer::build_multi([(text, vec![Point::new(0, 0)..Point::new(2, 6)])], cx)
|
||||
});
|
||||
let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
|
||||
let buffer_ids = buffer_snapshot
|
||||
.excerpts()
|
||||
.map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id())
|
||||
.dedup()
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(buffer_ids.len(), 1);
|
||||
let buffer_id = buffer_ids[0];
|
||||
|
||||
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, wrap_snapshot) =
|
||||
cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx));
|
||||
let mut block_map = BlockMap::new(wrap_snapshot.clone(), 1, 1);
|
||||
|
||||
let mut writer = block_map.write(wrap_snapshot.clone(), Patch::default());
|
||||
writer.insert(vec![BlockProperties {
|
||||
style: BlockStyle::Fixed,
|
||||
placement: BlockPlacement::Near(buffer_snapshot.anchor_after(Point::new(0, 0))),
|
||||
height: Some(1),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
}]);
|
||||
|
||||
let blocks_snapshot = block_map.read(wrap_snapshot.clone(), Patch::default());
|
||||
assert_eq!(blocks_snapshot.text(), "\nline 1\n\nline 2\nline 3");
|
||||
|
||||
let mut writer = block_map.write(wrap_snapshot.clone(), Patch::default());
|
||||
buffer.read_with(cx, |buffer, cx| {
|
||||
writer.fold_buffers([buffer_id], buffer, cx);
|
||||
});
|
||||
|
||||
let blocks_snapshot = block_map.read(wrap_snapshot, Patch::default());
|
||||
assert_eq!(blocks_snapshot.text(), "");
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_folded_buffer_with_near_blocks_on_last_line(cx: &mut gpui::TestAppContext) {
|
||||
cx.update(init_test);
|
||||
|
||||
let text = "line 1\nline 2\nline 3\nline 4";
|
||||
let buffer = cx.update(|cx| {
|
||||
MultiBuffer::build_multi([(text, vec![Point::new(0, 0)..Point::new(3, 6)])], cx)
|
||||
});
|
||||
let buffer_snapshot = cx.update(|cx| buffer.read(cx).snapshot(cx));
|
||||
let buffer_ids = buffer_snapshot
|
||||
.excerpts()
|
||||
.map(|(_, buffer_snapshot, _)| buffer_snapshot.remote_id())
|
||||
.dedup()
|
||||
.collect::<Vec<_>>();
|
||||
assert_eq!(buffer_ids.len(), 1);
|
||||
let buffer_id = buffer_ids[0];
|
||||
|
||||
let (_, inlay_snapshot) = InlayMap::new(buffer_snapshot.clone());
|
||||
let (_, fold_snapshot) = FoldMap::new(inlay_snapshot);
|
||||
let (_, tab_snapshot) = TabMap::new(fold_snapshot, 4.try_into().unwrap());
|
||||
let (_, wrap_snapshot) =
|
||||
cx.update(|cx| WrapMap::new(tab_snapshot, font("Helvetica"), px(14.0), None, cx));
|
||||
let mut block_map = BlockMap::new(wrap_snapshot.clone(), 1, 1);
|
||||
|
||||
let mut writer = block_map.write(wrap_snapshot.clone(), Patch::default());
|
||||
writer.insert(vec![BlockProperties {
|
||||
style: BlockStyle::Fixed,
|
||||
placement: BlockPlacement::Near(buffer_snapshot.anchor_after(Point::new(3, 6))),
|
||||
height: Some(1),
|
||||
render: Arc::new(|_| div().into_any()),
|
||||
priority: 0,
|
||||
}]);
|
||||
|
||||
let blocks_snapshot = block_map.read(wrap_snapshot.clone(), Patch::default());
|
||||
assert_eq!(blocks_snapshot.text(), "\nline 1\nline 2\nline 3\nline 4\n");
|
||||
|
||||
let mut writer = block_map.write(wrap_snapshot.clone(), Patch::default());
|
||||
buffer.read_with(cx, |buffer, cx| {
|
||||
writer.fold_buffers([buffer_id], buffer, cx);
|
||||
});
|
||||
|
||||
let blocks_snapshot = block_map.read(wrap_snapshot, Patch::default());
|
||||
assert_eq!(blocks_snapshot.text(), "");
|
||||
}
|
||||
|
||||
fn init_test(cx: &mut gpui::App) {
|
||||
let settings = SettingsStore::test(cx);
|
||||
cx.set_global(settings);
|
||||
|
||||
@@ -3172,7 +3172,7 @@ impl Editor {
|
||||
self.refresh_code_actions(window, cx);
|
||||
self.refresh_document_highlights(cx);
|
||||
self.refresh_selected_text_highlights(false, window, cx);
|
||||
refresh_matching_bracket_highlights(self, window, cx);
|
||||
refresh_matching_bracket_highlights(self, cx);
|
||||
self.update_visible_edit_prediction(window, cx);
|
||||
self.edit_prediction_requires_modifier_in_indent_conflict = true;
|
||||
linked_editing_ranges::refresh_linked_ranges(self, window, cx);
|
||||
@@ -5343,7 +5343,7 @@ impl Editor {
|
||||
let buffer_worktree = project.worktree_for_id(buffer_file.worktree_id(cx), cx)?;
|
||||
let worktree_entry = buffer_worktree
|
||||
.read(cx)
|
||||
.entry_for_id(buffer_file.project_entry_id(cx)?)?;
|
||||
.entry_for_id(buffer_file.project_entry_id()?)?;
|
||||
if worktree_entry.is_ignored {
|
||||
return None;
|
||||
}
|
||||
@@ -6607,26 +6607,32 @@ impl Editor {
|
||||
&self.context_menu
|
||||
}
|
||||
|
||||
fn refresh_code_actions(&mut self, window: &mut Window, cx: &mut Context<Self>) -> Option<()> {
|
||||
let newest_selection = self.selections.newest_anchor().clone();
|
||||
let newest_selection_adjusted = self.selections.newest_adjusted(cx);
|
||||
let buffer = self.buffer.read(cx);
|
||||
if newest_selection.head().diff_base_anchor.is_some() {
|
||||
return None;
|
||||
}
|
||||
let (start_buffer, start) =
|
||||
buffer.text_anchor_for_position(newest_selection_adjusted.start, cx)?;
|
||||
let (end_buffer, end) =
|
||||
buffer.text_anchor_for_position(newest_selection_adjusted.end, cx)?;
|
||||
if start_buffer != end_buffer {
|
||||
return None;
|
||||
}
|
||||
|
||||
fn refresh_code_actions(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
self.code_actions_task = Some(cx.spawn_in(window, async move |this, cx| {
|
||||
cx.background_executor()
|
||||
.timer(CODE_ACTIONS_DEBOUNCE_TIMEOUT)
|
||||
.await;
|
||||
|
||||
let (start_buffer, start, _, end, newest_selection) = this
|
||||
.update(cx, |this, cx| {
|
||||
let newest_selection = this.selections.newest_anchor().clone();
|
||||
if newest_selection.head().diff_base_anchor.is_some() {
|
||||
return None;
|
||||
}
|
||||
let newest_selection_adjusted = this.selections.newest_adjusted(cx);
|
||||
let buffer = this.buffer.read(cx);
|
||||
|
||||
let (start_buffer, start) =
|
||||
buffer.text_anchor_for_position(newest_selection_adjusted.start, cx)?;
|
||||
let (end_buffer, end) =
|
||||
buffer.text_anchor_for_position(newest_selection_adjusted.end, cx)?;
|
||||
|
||||
Some((start_buffer, start, end_buffer, end, newest_selection))
|
||||
})?
|
||||
.filter(|(start_buffer, _, end_buffer, _, _)| start_buffer == end_buffer)
|
||||
.context(
|
||||
"Expected selection to lie in a single buffer when refreshing code actions",
|
||||
)?;
|
||||
let (providers, tasks) = this.update_in(cx, |this, window, cx| {
|
||||
let providers = this.code_action_providers.clone();
|
||||
let tasks = this
|
||||
@@ -6667,7 +6673,6 @@ impl Editor {
|
||||
cx.notify();
|
||||
})
|
||||
}));
|
||||
None
|
||||
}
|
||||
|
||||
fn start_inline_blame_timer(&mut self, window: &mut Window, cx: &mut Context<Self>) {
|
||||
@@ -6917,19 +6922,24 @@ impl Editor {
|
||||
if self.selections.count() != 1 || self.selections.line_mode() {
|
||||
return None;
|
||||
}
|
||||
let selection = self.selections.newest::<Point>(cx);
|
||||
if selection.is_empty() || selection.start.row != selection.end.row {
|
||||
let selection = self.selections.newest_anchor();
|
||||
let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx);
|
||||
let selection_point_range = selection.start.to_point(&multi_buffer_snapshot)
|
||||
..selection.end.to_point(&multi_buffer_snapshot);
|
||||
// If the selection spans multiple rows OR it is empty
|
||||
if selection_point_range.start.row != selection_point_range.end.row
|
||||
|| selection_point_range.start.column == selection_point_range.end.column
|
||||
{
|
||||
return None;
|
||||
}
|
||||
let multi_buffer_snapshot = self.buffer().read(cx).snapshot(cx);
|
||||
let selection_anchor_range = selection.range().to_anchors(&multi_buffer_snapshot);
|
||||
|
||||
let query = multi_buffer_snapshot
|
||||
.text_for_range(selection_anchor_range.clone())
|
||||
.text_for_range(selection.range())
|
||||
.collect::<String>();
|
||||
if query.trim().is_empty() {
|
||||
return None;
|
||||
}
|
||||
Some((query, selection_anchor_range))
|
||||
Some((query, selection.range()))
|
||||
}
|
||||
|
||||
fn update_selection_occurrence_highlights(
|
||||
@@ -11687,13 +11697,26 @@ impl Editor {
|
||||
rows.end.previous_row().0,
|
||||
buffer.line_len(rows.end.previous_row()),
|
||||
);
|
||||
let text = buffer
|
||||
.text_for_range(start..end)
|
||||
.chain(Some("\n"))
|
||||
.collect::<String>();
|
||||
|
||||
let mut text = buffer.text_for_range(start..end).collect::<String>();
|
||||
|
||||
let insert_location = if upwards {
|
||||
Point::new(rows.end.0, 0)
|
||||
// When duplicating upward, we need to insert before the current line.
|
||||
// If we're on the last line and it doesn't end with a newline,
|
||||
// we need to add a newline before the duplicated content.
|
||||
let needs_leading_newline = rows.end.0 >= buffer.max_point().row
|
||||
&& buffer.max_point().column > 0
|
||||
&& !text.ends_with('\n');
|
||||
|
||||
if needs_leading_newline {
|
||||
text.insert(0, '\n');
|
||||
end
|
||||
} else {
|
||||
text.push('\n');
|
||||
Point::new(rows.end.0, 0)
|
||||
}
|
||||
} else {
|
||||
text.push('\n');
|
||||
start
|
||||
};
|
||||
edits.push((insert_location..insert_location, text));
|
||||
@@ -12503,9 +12526,18 @@ impl Editor {
|
||||
let mut start = selection.start;
|
||||
let mut end = selection.end;
|
||||
let is_entire_line = selection.is_empty() || self.selections.line_mode();
|
||||
let mut add_trailing_newline = false;
|
||||
if is_entire_line {
|
||||
start = Point::new(start.row, 0);
|
||||
end = cmp::min(max_point, Point::new(end.row + 1, 0));
|
||||
let next_line_start = Point::new(end.row + 1, 0);
|
||||
if next_line_start <= max_point {
|
||||
end = next_line_start;
|
||||
} else {
|
||||
// We're on the last line without a trailing newline.
|
||||
// Copy to the end of the line and add a newline afterwards.
|
||||
end = Point::new(end.row, buffer.line_len(MultiBufferRow(end.row)));
|
||||
add_trailing_newline = true;
|
||||
}
|
||||
}
|
||||
|
||||
let mut trimmed_selections = Vec::new();
|
||||
@@ -12556,6 +12588,10 @@ impl Editor {
|
||||
text.push_str(chunk);
|
||||
len += chunk.len();
|
||||
}
|
||||
if add_trailing_newline {
|
||||
text.push('\n');
|
||||
len += 1;
|
||||
}
|
||||
clipboard_selections.push(ClipboardSelection {
|
||||
len,
|
||||
is_entire_line,
|
||||
@@ -18170,6 +18206,87 @@ impl Editor {
|
||||
self.fold_creases(to_fold, true, window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_1(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel1,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(1), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_2(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel2,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(2), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_3(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel3,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(3), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_4(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel4,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(4), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_5(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel5,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(5), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_6(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel6,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(6), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_7(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel7,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(7), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_8(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel8,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(8), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_at_level_9(
|
||||
&mut self,
|
||||
_: &actions::FoldAtLevel9,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
self.fold_at_level(&actions::FoldAtLevel(9), window, cx);
|
||||
}
|
||||
|
||||
pub fn fold_all(&mut self, _: &actions::FoldAll, window: &mut Window, cx: &mut Context<Self>) {
|
||||
if self.buffer.read(cx).is_singleton() {
|
||||
let mut fold_ranges = Vec::new();
|
||||
@@ -20698,7 +20815,7 @@ impl Editor {
|
||||
self.refresh_code_actions(window, cx);
|
||||
self.refresh_selected_text_highlights(true, window, cx);
|
||||
self.refresh_single_line_folds(window, cx);
|
||||
refresh_matching_bracket_highlights(self, window, cx);
|
||||
refresh_matching_bracket_highlights(self, cx);
|
||||
if self.has_active_edit_prediction() {
|
||||
self.update_visible_edit_prediction(window, cx);
|
||||
}
|
||||
|
||||
@@ -267,7 +267,7 @@ impl Settings for EditorSettings {
|
||||
delay: drag_and_drop_selection.delay.unwrap(),
|
||||
},
|
||||
lsp_document_colors: editor.lsp_document_colors.unwrap(),
|
||||
minimum_contrast_for_highlights: editor.minimum_contrast_for_highlights.unwrap(),
|
||||
minimum_contrast_for_highlights: editor.minimum_contrast_for_highlights.unwrap().0,
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -12416,11 +12416,6 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) {
|
||||
.join("\n"),
|
||||
);
|
||||
|
||||
// Submit a format request.
|
||||
let format = cx
|
||||
.update_editor(|editor, window, cx| editor.format(&Format, window, cx))
|
||||
.unwrap();
|
||||
|
||||
// Record which buffer changes have been sent to the language server
|
||||
let buffer_changes = Arc::new(Mutex::new(Vec::new()));
|
||||
cx.lsp
|
||||
@@ -12441,28 +12436,29 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) {
|
||||
.set_request_handler::<lsp::request::Formatting, _, _>({
|
||||
let buffer_changes = buffer_changes.clone();
|
||||
move |_, _| {
|
||||
// When formatting is requested, trailing whitespace has already been stripped,
|
||||
// and the trailing newline has already been added.
|
||||
assert_eq!(
|
||||
&buffer_changes.lock()[1..],
|
||||
&[
|
||||
(
|
||||
lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(0, 4)),
|
||||
"".into()
|
||||
),
|
||||
(
|
||||
lsp::Range::new(lsp::Position::new(2, 5), lsp::Position::new(2, 6)),
|
||||
"".into()
|
||||
),
|
||||
(
|
||||
lsp::Range::new(lsp::Position::new(3, 4), lsp::Position::new(3, 4)),
|
||||
"\n".into()
|
||||
),
|
||||
]
|
||||
);
|
||||
|
||||
let buffer_changes = buffer_changes.clone();
|
||||
// Insert blank lines between each line of the buffer.
|
||||
async move {
|
||||
// When formatting is requested, trailing whitespace has already been stripped,
|
||||
// and the trailing newline has already been added.
|
||||
assert_eq!(
|
||||
&buffer_changes.lock()[1..],
|
||||
&[
|
||||
(
|
||||
lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(0, 4)),
|
||||
"".into()
|
||||
),
|
||||
(
|
||||
lsp::Range::new(lsp::Position::new(2, 5), lsp::Position::new(2, 6)),
|
||||
"".into()
|
||||
),
|
||||
(
|
||||
lsp::Range::new(lsp::Position::new(3, 4), lsp::Position::new(3, 4)),
|
||||
"\n".into()
|
||||
),
|
||||
]
|
||||
);
|
||||
|
||||
Ok(Some(vec![
|
||||
lsp::TextEdit {
|
||||
range: lsp::Range::new(
|
||||
@@ -12483,10 +12479,17 @@ async fn test_strip_whitespace_and_format_via_lsp(cx: &mut TestAppContext) {
|
||||
}
|
||||
});
|
||||
|
||||
// Submit a format request.
|
||||
let format = cx
|
||||
.update_editor(|editor, window, cx| editor.format(&Format, window, cx))
|
||||
.unwrap();
|
||||
|
||||
cx.run_until_parked();
|
||||
// After formatting the buffer, the trailing whitespace is stripped,
|
||||
// a newline is appended, and the edits provided by the language server
|
||||
// have been applied.
|
||||
format.await.unwrap();
|
||||
|
||||
cx.assert_editor_state(
|
||||
&[
|
||||
"one", //
|
||||
@@ -16515,7 +16518,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut TestAppContext) {
|
||||
leader.update(cx, |leader, cx| {
|
||||
leader.buffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.set_excerpts_for_path(
|
||||
PathKey::namespaced(1, rel_path("b.txt").into_arc()),
|
||||
PathKey::with_sort_prefix(1, rel_path("b.txt").into_arc()),
|
||||
buffer_1.clone(),
|
||||
vec![
|
||||
Point::row_range(0..3),
|
||||
@@ -16526,7 +16529,7 @@ async fn test_following_with_multiple_excerpts(cx: &mut TestAppContext) {
|
||||
cx,
|
||||
);
|
||||
multibuffer.set_excerpts_for_path(
|
||||
PathKey::namespaced(1, rel_path("a.txt").into_arc()),
|
||||
PathKey::with_sort_prefix(1, rel_path("a.txt").into_arc()),
|
||||
buffer_2.clone(),
|
||||
vec![Point::row_range(0..6), Point::row_range(8..12)],
|
||||
0,
|
||||
@@ -21029,7 +21032,7 @@ async fn test_display_diff_hunks(cx: &mut TestAppContext) {
|
||||
for buffer in &buffers {
|
||||
let snapshot = buffer.read(cx).snapshot();
|
||||
multibuffer.set_excerpts_for_path(
|
||||
PathKey::namespaced(0, buffer.read(cx).file().unwrap().path().clone()),
|
||||
PathKey::with_sort_prefix(0, buffer.read(cx).file().unwrap().path().clone()),
|
||||
buffer.clone(),
|
||||
vec![text::Anchor::MIN.to_point(&snapshot)..text::Anchor::MAX.to_point(&snapshot)],
|
||||
2,
|
||||
@@ -26475,3 +26478,64 @@ fn extract_color_inlays(editor: &Editor, cx: &App) -> Vec<Rgba> {
|
||||
.map(Rgba::from)
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
fn test_duplicate_line_up_on_last_line_without_newline(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let editor = cx.add_window(|window, cx| {
|
||||
let buffer = MultiBuffer::build_simple("line1\nline2", cx);
|
||||
build_editor(buffer, window, cx)
|
||||
});
|
||||
|
||||
editor
|
||||
.update(cx, |editor, window, cx| {
|
||||
editor.change_selections(SelectionEffects::no_scroll(), window, cx, |s| {
|
||||
s.select_display_ranges([
|
||||
DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0)
|
||||
])
|
||||
});
|
||||
|
||||
editor.duplicate_line_up(&DuplicateLineUp, window, cx);
|
||||
|
||||
assert_eq!(
|
||||
editor.display_text(cx),
|
||||
"line1\nline2\nline2",
|
||||
"Duplicating last line upward should create duplicate above, not on same line"
|
||||
);
|
||||
|
||||
assert_eq!(
|
||||
editor.selections.display_ranges(cx),
|
||||
vec![DisplayPoint::new(DisplayRow(1), 0)..DisplayPoint::new(DisplayRow(1), 0)],
|
||||
"Selection should remain on the original line"
|
||||
);
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_copy_line_without_trailing_newline(cx: &mut TestAppContext) {
|
||||
init_test(cx, |_| {});
|
||||
|
||||
let mut cx = EditorTestContext::new(cx).await;
|
||||
|
||||
cx.set_state("line1\nline2ˇ");
|
||||
|
||||
cx.update_editor(|e, window, cx| e.copy(&Copy, window, cx));
|
||||
|
||||
let clipboard_text = cx
|
||||
.read_from_clipboard()
|
||||
.and_then(|item| item.text().as_deref().map(str::to_string));
|
||||
|
||||
assert_eq!(
|
||||
clipboard_text,
|
||||
Some("line2\n".to_string()),
|
||||
"Copying a line without trailing newline should include a newline"
|
||||
);
|
||||
|
||||
cx.set_state("line1\nˇ");
|
||||
|
||||
cx.update_editor(|e, window, cx| e.paste(&Paste, window, cx));
|
||||
|
||||
cx.assert_editor_state("line1\nline2\nˇ");
|
||||
}
|
||||
|
||||
@@ -432,6 +432,15 @@ impl EditorElement {
|
||||
register_action(editor, window, Editor::open_selected_filename);
|
||||
register_action(editor, window, Editor::fold);
|
||||
register_action(editor, window, Editor::fold_at_level);
|
||||
register_action(editor, window, Editor::fold_at_level_1);
|
||||
register_action(editor, window, Editor::fold_at_level_2);
|
||||
register_action(editor, window, Editor::fold_at_level_3);
|
||||
register_action(editor, window, Editor::fold_at_level_4);
|
||||
register_action(editor, window, Editor::fold_at_level_5);
|
||||
register_action(editor, window, Editor::fold_at_level_6);
|
||||
register_action(editor, window, Editor::fold_at_level_7);
|
||||
register_action(editor, window, Editor::fold_at_level_8);
|
||||
register_action(editor, window, Editor::fold_at_level_9);
|
||||
register_action(editor, window, Editor::fold_all);
|
||||
register_action(editor, window, Editor::fold_function_bodies);
|
||||
register_action(editor, window, Editor::fold_recursive);
|
||||
|
||||
@@ -1,47 +1,46 @@
|
||||
use crate::{Editor, RangeToAnchorExt};
|
||||
use gpui::{Context, HighlightStyle, Window};
|
||||
use gpui::{Context, HighlightStyle};
|
||||
use language::CursorShape;
|
||||
use multi_buffer::ToOffset;
|
||||
use theme::ActiveTheme;
|
||||
|
||||
enum MatchingBracketHighlight {}
|
||||
|
||||
pub fn refresh_matching_bracket_highlights(
|
||||
editor: &mut Editor,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Editor>,
|
||||
) {
|
||||
pub fn refresh_matching_bracket_highlights(editor: &mut Editor, cx: &mut Context<Editor>) {
|
||||
editor.clear_highlights::<MatchingBracketHighlight>(cx);
|
||||
|
||||
let newest_selection = editor.selections.newest::<usize>(cx);
|
||||
let buffer_snapshot = editor.buffer.read(cx).snapshot(cx);
|
||||
let newest_selection = editor
|
||||
.selections
|
||||
.newest_anchor()
|
||||
.map(|anchor| anchor.to_offset(&buffer_snapshot));
|
||||
// Don't highlight brackets if the selection isn't empty
|
||||
if !newest_selection.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let snapshot = editor.snapshot(window, cx);
|
||||
let head = newest_selection.head();
|
||||
if head > snapshot.buffer_snapshot().len() {
|
||||
if head > buffer_snapshot.len() {
|
||||
log::error!("bug: cursor offset is out of range while refreshing bracket highlights");
|
||||
return;
|
||||
}
|
||||
|
||||
let mut tail = head;
|
||||
if (editor.cursor_shape == CursorShape::Block || editor.cursor_shape == CursorShape::Hollow)
|
||||
&& head < snapshot.buffer_snapshot().len()
|
||||
&& head < buffer_snapshot.len()
|
||||
{
|
||||
if let Some(tail_ch) = snapshot.buffer_snapshot().chars_at(tail).next() {
|
||||
if let Some(tail_ch) = buffer_snapshot.chars_at(tail).next() {
|
||||
tail += tail_ch.len_utf8();
|
||||
}
|
||||
}
|
||||
|
||||
if let Some((opening_range, closing_range)) = snapshot
|
||||
.buffer_snapshot()
|
||||
.innermost_enclosing_bracket_ranges(head..tail, None)
|
||||
if let Some((opening_range, closing_range)) =
|
||||
buffer_snapshot.innermost_enclosing_bracket_ranges(head..tail, None)
|
||||
{
|
||||
editor.highlight_text::<MatchingBracketHighlight>(
|
||||
vec![
|
||||
opening_range.to_anchors(&snapshot.buffer_snapshot()),
|
||||
closing_range.to_anchors(&snapshot.buffer_snapshot()),
|
||||
opening_range.to_anchors(&buffer_snapshot),
|
||||
closing_range.to_anchors(&buffer_snapshot),
|
||||
],
|
||||
HighlightStyle {
|
||||
background_color: Some(
|
||||
|
||||
@@ -1495,7 +1495,7 @@ pub mod tests {
|
||||
.into_response()
|
||||
.expect("work done progress create request failed");
|
||||
cx.executor().run_until_parked();
|
||||
fake_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
fake_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
token: lsp::ProgressToken::String(progress_token.to_string()),
|
||||
value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::Begin(
|
||||
lsp::WorkDoneProgressBegin::default(),
|
||||
@@ -1515,7 +1515,7 @@ pub mod tests {
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
fake_server.notify::<lsp::notification::Progress>(&lsp::ProgressParams {
|
||||
fake_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
|
||||
token: lsp::ProgressToken::String(progress_token.to_string()),
|
||||
value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::End(
|
||||
lsp::WorkDoneProgressEnd::default(),
|
||||
|
||||
@@ -184,6 +184,27 @@ impl SelectionsCollection {
|
||||
selections
|
||||
}
|
||||
|
||||
/// Returns all of the selections, adjusted to take into account the selection line_mode. Uses a provided snapshot to resolve selections.
|
||||
pub fn all_adjusted_with_snapshot(
|
||||
&self,
|
||||
snapshot: &MultiBufferSnapshot,
|
||||
) -> Vec<Selection<Point>> {
|
||||
let mut selections = self
|
||||
.disjoint
|
||||
.iter()
|
||||
.chain(self.pending_anchor())
|
||||
.map(|anchor| anchor.map(|anchor| anchor.to_point(&snapshot)))
|
||||
.collect::<Vec<_>>();
|
||||
if self.line_mode {
|
||||
for selection in &mut selections {
|
||||
let new_range = snapshot.expand_to_line(selection.range());
|
||||
selection.start = new_range.start;
|
||||
selection.end = new_range.end;
|
||||
}
|
||||
}
|
||||
selections
|
||||
}
|
||||
|
||||
/// Returns the newest selection, adjusted to take into account the selection line_mode
|
||||
pub fn newest_adjusted(&self, cx: &mut App) -> Selection<Point> {
|
||||
let mut selection = self.newest::<Point>(cx);
|
||||
|
||||
@@ -262,6 +262,77 @@ impl EditorLspTestContext {
|
||||
Self::new(language, capabilities, cx).await
|
||||
}
|
||||
|
||||
pub async fn new_tsx(
|
||||
capabilities: lsp::ServerCapabilities,
|
||||
cx: &mut gpui::TestAppContext,
|
||||
) -> EditorLspTestContext {
|
||||
let mut word_characters: HashSet<char> = Default::default();
|
||||
word_characters.insert('$');
|
||||
word_characters.insert('#');
|
||||
let language = Language::new(
|
||||
LanguageConfig {
|
||||
name: "TSX".into(),
|
||||
matcher: LanguageMatcher {
|
||||
path_suffixes: vec!["tsx".to_string()],
|
||||
..Default::default()
|
||||
},
|
||||
brackets: language::BracketPairConfig {
|
||||
pairs: vec![language::BracketPair {
|
||||
start: "{".to_string(),
|
||||
end: "}".to_string(),
|
||||
close: true,
|
||||
surround: true,
|
||||
newline: true,
|
||||
}],
|
||||
disabled_scopes_by_bracket_ix: Default::default(),
|
||||
},
|
||||
word_characters,
|
||||
..Default::default()
|
||||
},
|
||||
Some(tree_sitter_typescript::LANGUAGE_TSX.into()),
|
||||
)
|
||||
.with_queries(LanguageQueries {
|
||||
brackets: Some(Cow::from(indoc! {r#"
|
||||
("(" @open ")" @close)
|
||||
("[" @open "]" @close)
|
||||
("{" @open "}" @close)
|
||||
("<" @open ">" @close)
|
||||
("<" @open "/>" @close)
|
||||
("</" @open ">" @close)
|
||||
("\"" @open "\"" @close)
|
||||
("'" @open "'" @close)
|
||||
("`" @open "`" @close)
|
||||
((jsx_element (jsx_opening_element) @open (jsx_closing_element) @close) (#set! newline.only))"#})),
|
||||
indents: Some(Cow::from(indoc! {r#"
|
||||
[
|
||||
(call_expression)
|
||||
(assignment_expression)
|
||||
(member_expression)
|
||||
(lexical_declaration)
|
||||
(variable_declaration)
|
||||
(assignment_expression)
|
||||
(if_statement)
|
||||
(for_statement)
|
||||
] @indent
|
||||
|
||||
(_ "[" "]" @end) @indent
|
||||
(_ "<" ">" @end) @indent
|
||||
(_ "{" "}" @end) @indent
|
||||
(_ "(" ")" @end) @indent
|
||||
|
||||
(jsx_opening_element ">" @end) @indent
|
||||
|
||||
(jsx_element
|
||||
(jsx_opening_element) @start
|
||||
(jsx_closing_element)? @end) @indent
|
||||
"#})),
|
||||
..Default::default()
|
||||
})
|
||||
.expect("Could not parse queries");
|
||||
|
||||
Self::new(language, capabilities, cx).await
|
||||
}
|
||||
|
||||
pub async fn new_html(cx: &mut gpui::TestAppContext) -> Self {
|
||||
let language = Language::new(
|
||||
LanguageConfig {
|
||||
@@ -369,7 +440,7 @@ impl EditorLspTestContext {
|
||||
}
|
||||
|
||||
pub fn notify<T: notification::Notification>(&self, params: T::Params) {
|
||||
self.lsp.notify::<T>(¶ms);
|
||||
self.lsp.notify::<T>(params);
|
||||
}
|
||||
|
||||
#[cfg(target_os = "windows")]
|
||||
|
||||
@@ -29,7 +29,7 @@ use ui::{
|
||||
};
|
||||
use vim_mode_setting::VimModeSetting;
|
||||
use workspace::{
|
||||
Workspace, WorkspaceId,
|
||||
Workspace,
|
||||
item::{Item, ItemEvent},
|
||||
};
|
||||
use zed_actions::ExtensionCategoryFilter;
|
||||
@@ -1551,15 +1551,6 @@ impl Item for ExtensionsPage {
|
||||
false
|
||||
}
|
||||
|
||||
fn clone_on_split(
|
||||
&self,
|
||||
_workspace_id: Option<WorkspaceId>,
|
||||
_window: &mut Window,
|
||||
_: &mut Context<Self>,
|
||||
) -> Option<Entity<Self>> {
|
||||
None
|
||||
}
|
||||
|
||||
fn to_item_events(event: &Self::Event, mut f: impl FnMut(workspace::item::ItemEvent)) {
|
||||
f(*event)
|
||||
}
|
||||
|
||||
@@ -17,9 +17,3 @@ pub struct PanicFeatureFlag;
|
||||
impl FeatureFlag for PanicFeatureFlag {
|
||||
const NAME: &'static str = "panic";
|
||||
}
|
||||
|
||||
pub struct CodexAcpFeatureFlag;
|
||||
|
||||
impl FeatureFlag for CodexAcpFeatureFlag {
|
||||
const NAME: &'static str = "codex-acp";
|
||||
}
|
||||
|
||||
@@ -16,14 +16,12 @@ test-support = []
|
||||
|
||||
[dependencies]
|
||||
gpui.workspace = true
|
||||
menu.workspace = true
|
||||
system_specs.workspace = true
|
||||
ui.workspace = true
|
||||
urlencoding.workspace = true
|
||||
util.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
workspace.workspace = true
|
||||
zed_actions.workspace = true
|
||||
workspace-hack.workspace = true
|
||||
|
||||
[dev-dependencies]
|
||||
editor = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -2,19 +2,13 @@ use gpui::{App, ClipboardItem, PromptLevel, actions};
|
||||
use system_specs::{CopySystemSpecsIntoClipboard, SystemSpecs};
|
||||
use util::ResultExt;
|
||||
use workspace::Workspace;
|
||||
use zed_actions::feedback::FileBugReport;
|
||||
|
||||
pub mod feedback_modal;
|
||||
use zed_actions::feedback::{EmailZed, FileBugReport, RequestFeature};
|
||||
|
||||
actions!(
|
||||
zed,
|
||||
[
|
||||
/// Opens email client to send feedback to Zed support.
|
||||
EmailZed,
|
||||
/// Opens the Zed repository on GitHub.
|
||||
OpenZedRepo,
|
||||
/// Opens the feature request form.
|
||||
RequestFeature,
|
||||
]
|
||||
);
|
||||
|
||||
@@ -48,11 +42,7 @@ fn email_body(specs: &SystemSpecs) -> String {
|
||||
}
|
||||
|
||||
pub fn init(cx: &mut App) {
|
||||
cx.observe_new(|workspace: &mut Workspace, window, cx| {
|
||||
let Some(window) = window else {
|
||||
return;
|
||||
};
|
||||
feedback_modal::FeedbackModal::register(workspace, window, cx);
|
||||
cx.observe_new(|workspace: &mut Workspace, _, _| {
|
||||
workspace
|
||||
.register_action(|_, _: &CopySystemSpecsIntoClipboard, window, cx| {
|
||||
let specs = SystemSpecs::new(window, cx);
|
||||
|
||||
@@ -1,113 +0,0 @@
|
||||
use gpui::{App, Context, DismissEvent, EventEmitter, FocusHandle, Focusable, Render, Window};
|
||||
use ui::{IconPosition, prelude::*};
|
||||
use workspace::{ModalView, Workspace};
|
||||
use zed_actions::feedback::GiveFeedback;
|
||||
|
||||
use crate::{EmailZed, FileBugReport, OpenZedRepo, RequestFeature};
|
||||
|
||||
pub struct FeedbackModal {
|
||||
focus_handle: FocusHandle,
|
||||
}
|
||||
|
||||
impl Focusable for FeedbackModal {
|
||||
fn focus_handle(&self, _: &App) -> FocusHandle {
|
||||
self.focus_handle.clone()
|
||||
}
|
||||
}
|
||||
impl EventEmitter<DismissEvent> for FeedbackModal {}
|
||||
|
||||
impl ModalView for FeedbackModal {}
|
||||
|
||||
impl FeedbackModal {
|
||||
pub fn register(workspace: &mut Workspace, _: &mut Window, cx: &mut Context<Workspace>) {
|
||||
let _handle = cx.entity().downgrade();
|
||||
workspace.register_action(move |workspace, _: &GiveFeedback, window, cx| {
|
||||
workspace.toggle_modal(window, cx, move |_, cx| FeedbackModal::new(cx));
|
||||
});
|
||||
}
|
||||
|
||||
pub fn new(cx: &mut Context<Self>) -> Self {
|
||||
Self {
|
||||
focus_handle: cx.focus_handle(),
|
||||
}
|
||||
}
|
||||
|
||||
fn cancel(&mut self, _: &menu::Cancel, _: &mut Window, cx: &mut Context<Self>) {
|
||||
cx.emit(DismissEvent)
|
||||
}
|
||||
}
|
||||
|
||||
impl Render for FeedbackModal {
|
||||
fn render(&mut self, _: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
|
||||
let open_zed_repo =
|
||||
cx.listener(|_, _, window, cx| window.dispatch_action(Box::new(OpenZedRepo), cx));
|
||||
|
||||
v_flex()
|
||||
.key_context("GiveFeedback")
|
||||
.on_action(cx.listener(Self::cancel))
|
||||
.elevation_3(cx)
|
||||
.w_96()
|
||||
.h_auto()
|
||||
.p_4()
|
||||
.gap_2()
|
||||
.child(
|
||||
h_flex()
|
||||
.w_full()
|
||||
.justify_between()
|
||||
.child(Headline::new("Give Feedback"))
|
||||
.child(
|
||||
IconButton::new("close-btn", IconName::Close)
|
||||
.icon_color(Color::Muted)
|
||||
.on_click(cx.listener(move |_, _, window, cx| {
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
this.update(cx, |_, cx| cx.emit(DismissEvent)).ok();
|
||||
})
|
||||
.detach();
|
||||
})),
|
||||
),
|
||||
)
|
||||
.child(Label::new("Thanks for using Zed! To share your experience with us, reach for the channel that's the most appropriate:"))
|
||||
.child(
|
||||
Button::new("file-a-bug-report", "File a Bug Report")
|
||||
.full_width()
|
||||
.icon(IconName::Debug)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_position(IconPosition::Start)
|
||||
.on_click(cx.listener(|_, _, window, cx| {
|
||||
window.dispatch_action(Box::new(FileBugReport), cx);
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
Button::new("request-a-feature", "Request a Feature")
|
||||
.full_width()
|
||||
.icon(IconName::Sparkle)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_position(IconPosition::Start)
|
||||
.on_click(cx.listener(|_, _, window, cx| {
|
||||
window.dispatch_action(Box::new(RequestFeature), cx);
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
Button::new("send-us_an-email", "Send an Email")
|
||||
.full_width()
|
||||
.icon(IconName::Envelope)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_position(IconPosition::Start)
|
||||
.on_click(cx.listener(|_, _, window, cx| {
|
||||
window.dispatch_action(Box::new(EmailZed), cx);
|
||||
})),
|
||||
)
|
||||
.child(
|
||||
Button::new("zed_repository", "GitHub Repository")
|
||||
.full_width()
|
||||
.icon(IconName::Github)
|
||||
.icon_size(IconSize::XSmall)
|
||||
.icon_color(Color::Muted)
|
||||
.icon_position(IconPosition::Start)
|
||||
.on_click(open_zed_repo),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1172,18 +1172,25 @@ impl FileFinderDelegate {
|
||||
)
|
||||
}
|
||||
|
||||
/// Attempts to resolve an absolute file path and update the search matches if found.
|
||||
///
|
||||
/// If the query path resolves to an absolute file that exists in the project,
|
||||
/// this method will find the corresponding worktree and relative path, create a
|
||||
/// match for it, and update the picker's search results.
|
||||
///
|
||||
/// Returns `true` if the absolute path exists, otherwise returns `false`.
|
||||
fn lookup_absolute_path(
|
||||
&self,
|
||||
query: FileSearchQuery,
|
||||
window: &mut Window,
|
||||
cx: &mut Context<Picker<Self>>,
|
||||
) -> Task<()> {
|
||||
) -> Task<bool> {
|
||||
cx.spawn_in(window, async move |picker, cx| {
|
||||
let Some(project) = picker
|
||||
.read_with(cx, |picker, _| picker.delegate.project.clone())
|
||||
.log_err()
|
||||
else {
|
||||
return;
|
||||
return false;
|
||||
};
|
||||
|
||||
let query_path = Path::new(query.path_query());
|
||||
@@ -1216,7 +1223,7 @@ impl FileFinderDelegate {
|
||||
})
|
||||
.log_err();
|
||||
if update_result.is_none() {
|
||||
return;
|
||||
return abs_file_exists;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1229,6 +1236,7 @@ impl FileFinderDelegate {
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.log_err();
|
||||
abs_file_exists
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1377,13 +1385,14 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
} else {
|
||||
let path_position = PathWithPosition::parse_str(raw_query);
|
||||
let raw_query = raw_query.trim().trim_end_matches(':').to_owned();
|
||||
let path = path_position.path.to_str();
|
||||
let path_trimmed = path.unwrap_or(&raw_query).trim_end_matches(':');
|
||||
let path = path_position.path.clone();
|
||||
let path_str = path_position.path.to_str();
|
||||
let path_trimmed = path_str.unwrap_or(&raw_query).trim_end_matches(':');
|
||||
let file_query_end = if path_trimmed == raw_query {
|
||||
None
|
||||
} else {
|
||||
// Safe to unwrap as we won't get here when the unwrap in if fails
|
||||
Some(path.unwrap().len())
|
||||
Some(path_str.unwrap().len())
|
||||
};
|
||||
|
||||
let query = FileSearchQuery {
|
||||
@@ -1392,11 +1401,29 @@ impl PickerDelegate for FileFinderDelegate {
|
||||
path_position,
|
||||
};
|
||||
|
||||
if Path::new(query.path_query()).is_absolute() {
|
||||
self.lookup_absolute_path(query, window, cx)
|
||||
} else {
|
||||
self.spawn_search(query, window, cx)
|
||||
}
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let _ = maybe!(async move {
|
||||
let is_absolute_path = path.is_absolute();
|
||||
let did_resolve_abs_path = is_absolute_path
|
||||
&& this
|
||||
.update_in(cx, |this, window, cx| {
|
||||
this.delegate
|
||||
.lookup_absolute_path(query.clone(), window, cx)
|
||||
})?
|
||||
.await;
|
||||
|
||||
// Only check for relative paths if no absolute paths were
|
||||
// found.
|
||||
if !did_resolve_abs_path {
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.delegate.spawn_search(query, window, cx)
|
||||
})?
|
||||
.await;
|
||||
}
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.await;
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -3069,3 +3069,49 @@ async fn test_filename_precedence(cx: &mut TestAppContext) {
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_paths_with_starting_slash(cx: &mut TestAppContext) {
|
||||
let app_state = init_test(cx);
|
||||
app_state
|
||||
.fs
|
||||
.as_fake()
|
||||
.insert_tree(
|
||||
path!("/root"),
|
||||
json!({
|
||||
"a": {
|
||||
"file1.txt": "",
|
||||
"b": {
|
||||
"file2.txt": "",
|
||||
},
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
let project = Project::test(app_state.fs.clone(), [path!("/root").as_ref()], cx).await;
|
||||
|
||||
let (picker, workspace, cx) = build_find_picker(project, cx);
|
||||
|
||||
let matching_abs_path = "/file1.txt".to_string();
|
||||
picker
|
||||
.update_in(cx, |picker, window, cx| {
|
||||
picker
|
||||
.delegate
|
||||
.update_matches(matching_abs_path, window, cx)
|
||||
})
|
||||
.await;
|
||||
picker.update(cx, |picker, _| {
|
||||
assert_eq!(
|
||||
collect_search_matches(picker).search_paths_only(),
|
||||
vec![rel_path("a/file1.txt").into()],
|
||||
"Relative path starting with slash should match"
|
||||
)
|
||||
});
|
||||
cx.dispatch_action(SelectNext);
|
||||
cx.dispatch_action(Confirm);
|
||||
cx.read(|cx| {
|
||||
let active_editor = workspace.read(cx).active_item_as::<Editor>(cx).unwrap();
|
||||
assert_eq!(active_editor.read(cx).title(cx), "file1.txt");
|
||||
});
|
||||
}
|
||||
|
||||
@@ -755,7 +755,7 @@ impl PickerDelegate for OpenPathDelegate {
|
||||
.with_default_highlights(
|
||||
&window.text_style(),
|
||||
vec![(
|
||||
delta..label_len,
|
||||
delta..delta + label_len,
|
||||
HighlightStyle::color(Color::Conflict.color(cx)),
|
||||
)],
|
||||
)
|
||||
@@ -765,7 +765,7 @@ impl PickerDelegate for OpenPathDelegate {
|
||||
.with_default_highlights(
|
||||
&window.text_style(),
|
||||
vec![(
|
||||
delta..label_len,
|
||||
delta..delta + label_len,
|
||||
HighlightStyle::color(Color::Created.color(cx)),
|
||||
)],
|
||||
)
|
||||
|
||||
@@ -23,6 +23,7 @@ derive_more.workspace = true
|
||||
git2.workspace = true
|
||||
gpui.workspace = true
|
||||
http_client.workspace = true
|
||||
itertools.workspace = true
|
||||
log.workspace = true
|
||||
parking_lot.workspace = true
|
||||
regex.workspace = true
|
||||
@@ -36,6 +37,7 @@ text.workspace = true
|
||||
thiserror.workspace = true
|
||||
time.workspace = true
|
||||
url.workspace = true
|
||||
urlencoding.workspace = true
|
||||
util.workspace = true
|
||||
uuid.workspace = true
|
||||
futures.workspace = true
|
||||
|
||||
@@ -94,6 +94,8 @@ actions!(
|
||||
OpenModifiedFiles,
|
||||
/// Clones a repository.
|
||||
Clone,
|
||||
/// Adds a file to .gitignore.
|
||||
AddToGitignore,
|
||||
]
|
||||
);
|
||||
|
||||
|
||||
@@ -5,9 +5,12 @@ use async_trait::async_trait;
|
||||
use derive_more::{Deref, DerefMut};
|
||||
use gpui::{App, Global, SharedString};
|
||||
use http_client::HttpClient;
|
||||
use itertools::Itertools;
|
||||
use parking_lot::RwLock;
|
||||
use url::Url;
|
||||
|
||||
use crate::repository::RepoPath;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Clone)]
|
||||
pub struct PullRequest {
|
||||
pub number: u32,
|
||||
@@ -55,10 +58,21 @@ pub struct BuildCommitPermalinkParams<'a> {
|
||||
|
||||
pub struct BuildPermalinkParams<'a> {
|
||||
pub sha: &'a str,
|
||||
pub path: &'a str,
|
||||
/// URL-escaped path using unescaped `/` as the directory separator.
|
||||
pub path: String,
|
||||
pub selection: Option<Range<u32>>,
|
||||
}
|
||||
|
||||
impl<'a> BuildPermalinkParams<'a> {
|
||||
pub fn new(sha: &'a str, path: &RepoPath, selection: Option<Range<u32>>) -> Self {
|
||||
Self {
|
||||
sha,
|
||||
path: path.components().map(urlencoding::encode).join("/"),
|
||||
selection,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A Git hosting provider.
|
||||
#[async_trait]
|
||||
pub trait GitHostingProvider {
|
||||
|
||||
@@ -30,3 +30,4 @@ workspace-hack.workspace = true
|
||||
indoc.workspace = true
|
||||
serde_json.workspace = true
|
||||
pretty_assertions.workspace = true
|
||||
git = { workspace = true, features = ["test-support"] }
|
||||
|
||||
@@ -126,6 +126,7 @@ impl GitHostingProvider for Bitbucket {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use git::repository::repo_path;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
@@ -182,11 +183,7 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "f00b4r",
|
||||
path: "main.rs",
|
||||
selection: None,
|
||||
},
|
||||
BuildPermalinkParams::new("f00b4r", &repo_path("main.rs"), None),
|
||||
);
|
||||
|
||||
let expected_url = "https://bitbucket.org/zed-industries/zed/src/f00b4r/main.rs";
|
||||
@@ -200,11 +197,7 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "f00b4r",
|
||||
path: "main.rs",
|
||||
selection: Some(6..6),
|
||||
},
|
||||
BuildPermalinkParams::new("f00b4r", &repo_path("main.rs"), Some(6..6)),
|
||||
);
|
||||
|
||||
let expected_url = "https://bitbucket.org/zed-industries/zed/src/f00b4r/main.rs#lines-7";
|
||||
@@ -218,11 +211,7 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "f00b4r",
|
||||
path: "main.rs",
|
||||
selection: Some(23..47),
|
||||
},
|
||||
BuildPermalinkParams::new("f00b4r", &repo_path("main.rs"), Some(23..47)),
|
||||
);
|
||||
|
||||
let expected_url =
|
||||
|
||||
@@ -191,6 +191,7 @@ impl GitHostingProvider for Chromium {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use git::repository::repo_path;
|
||||
use indoc::indoc;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
@@ -218,11 +219,11 @@ mod tests {
|
||||
owner: Arc::from(""),
|
||||
repo: "chromium/src".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "fea5080b182fc92e3be0c01c5dece602fe70b588",
|
||||
path: "ui/base/cursor/cursor.h",
|
||||
selection: None,
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"fea5080b182fc92e3be0c01c5dece602fe70b588",
|
||||
&repo_path("ui/base/cursor/cursor.h"),
|
||||
None,
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://chromium.googlesource.com/chromium/src/+/fea5080b182fc92e3be0c01c5dece602fe70b588/ui/base/cursor/cursor.h";
|
||||
@@ -236,11 +237,11 @@ mod tests {
|
||||
owner: Arc::from(""),
|
||||
repo: "chromium/src".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "fea5080b182fc92e3be0c01c5dece602fe70b588",
|
||||
path: "ui/base/cursor/cursor.h",
|
||||
selection: Some(18..18),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"fea5080b182fc92e3be0c01c5dece602fe70b588",
|
||||
&repo_path("ui/base/cursor/cursor.h"),
|
||||
Some(18..18),
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://chromium.googlesource.com/chromium/src/+/fea5080b182fc92e3be0c01c5dece602fe70b588/ui/base/cursor/cursor.h#19";
|
||||
@@ -254,11 +255,11 @@ mod tests {
|
||||
owner: Arc::from(""),
|
||||
repo: "chromium/src".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "fea5080b182fc92e3be0c01c5dece602fe70b588",
|
||||
path: "ui/base/cursor/cursor.h",
|
||||
selection: Some(18..30),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"fea5080b182fc92e3be0c01c5dece602fe70b588",
|
||||
&repo_path("ui/base/cursor/cursor.h"),
|
||||
Some(18..30),
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://chromium.googlesource.com/chromium/src/+/fea5080b182fc92e3be0c01c5dece602fe70b588/ui/base/cursor/cursor.h#19";
|
||||
|
||||
@@ -204,6 +204,7 @@ impl GitHostingProvider for Codeberg {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use git::repository::repo_path;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
@@ -245,11 +246,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: None,
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
None,
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://codeberg.org/zed-industries/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs";
|
||||
@@ -263,11 +264,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: Some(6..6),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
Some(6..6),
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://codeberg.org/zed-industries/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs#L7";
|
||||
@@ -281,11 +282,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: Some(23..47),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
Some(23..47),
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://codeberg.org/zed-industries/zed/src/commit/faa6f979be417239b2e070dbbf6392b909224e0b/crates/editor/src/git/permalink.rs#L24-L48";
|
||||
|
||||
@@ -84,6 +84,7 @@ impl GitHostingProvider for Gitee {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use git::repository::repo_path;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
@@ -125,11 +126,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: None,
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
None,
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://gitee.com/zed-industries/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs";
|
||||
@@ -143,11 +144,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: Some(6..6),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
Some(6..6),
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://gitee.com/zed-industries/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs#L7";
|
||||
@@ -161,11 +162,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: Some(23..47),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"e5fe811d7ad0fc26934edd76f891d20bdc3bb194",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
Some(23..47),
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://gitee.com/zed-industries/zed/blob/e5fe811d7ad0fc26934edd76f891d20bdc3bb194/crates/editor/src/git/permalink.rs#L24-48";
|
||||
|
||||
@@ -259,6 +259,7 @@ impl GitHostingProvider for Github {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use git::repository::repo_path;
|
||||
use indoc::indoc;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
@@ -400,11 +401,11 @@ mod tests {
|
||||
};
|
||||
let permalink = Github::public_instance().build_permalink(
|
||||
remote,
|
||||
BuildPermalinkParams {
|
||||
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: None,
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
None,
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://github.com/zed-industries/zed/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs";
|
||||
@@ -418,11 +419,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
|
||||
path: "crates/zed/src/main.rs",
|
||||
selection: None,
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
|
||||
&repo_path("crates/zed/src/main.rs"),
|
||||
None,
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://github.com/zed-industries/zed/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs";
|
||||
@@ -436,11 +437,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: Some(6..6),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
Some(6..6),
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://github.com/zed-industries/zed/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs#L7";
|
||||
@@ -454,11 +455,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: Some(23..47),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
Some(23..47),
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://github.com/zed-industries/zed/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs#L24-L48";
|
||||
@@ -506,4 +507,23 @@ mod tests {
|
||||
};
|
||||
assert_eq!(github.extract_pull_request(&remote, message), None);
|
||||
}
|
||||
|
||||
/// Regression test for issue #39875
|
||||
#[test]
|
||||
fn test_git_permalink_url_escaping() {
|
||||
let permalink = Github::public_instance().build_permalink(
|
||||
ParsedGitRemote {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "nonexistent".into(),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"3ef1539900037dd3601be7149b2b39ed6d0ce3db",
|
||||
&repo_path("app/blog/[slug]/page.tsx"),
|
||||
Some(7..7),
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://github.com/zed-industries/nonexistent/blob/3ef1539900037dd3601be7149b2b39ed6d0ce3db/app/blog/%5Bslug%5D/page.tsx#L8";
|
||||
assert_eq!(permalink.to_string(), expected_url.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -126,6 +126,7 @@ impl GitHostingProvider for Gitlab {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use git::repository::repo_path;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
@@ -209,11 +210,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: None,
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
None,
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://gitlab.com/zed-industries/zed/-/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs";
|
||||
@@ -227,11 +228,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: Some(6..6),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
Some(6..6),
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://gitlab.com/zed-industries/zed/-/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs#L7";
|
||||
@@ -245,11 +246,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: Some(23..47),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
Some(23..47),
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://gitlab.com/zed-industries/zed/-/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs#L24-48";
|
||||
@@ -266,11 +267,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: None,
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
None,
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://gitlab.some-enterprise.com/zed-industries/zed/-/blob/e6ebe7974deb6bb6cc0e2595c8ec31f0c71084b7/crates/editor/src/git/permalink.rs";
|
||||
@@ -287,11 +288,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
|
||||
path: "crates/zed/src/main.rs",
|
||||
selection: None,
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"b2efec9824c45fcc90c9a7eb107a50d1772a60aa",
|
||||
&repo_path("crates/zed/src/main.rs"),
|
||||
None,
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://gitlab-instance.big-co.com/zed-industries/zed/-/blob/b2efec9824c45fcc90c9a7eb107a50d1772a60aa/crates/zed/src/main.rs";
|
||||
|
||||
@@ -89,6 +89,7 @@ impl GitHostingProvider for Sourcehut {
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use git::repository::repo_path;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
@@ -145,11 +146,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: None,
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
None,
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://git.sr.ht/~zed-industries/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs";
|
||||
@@ -163,11 +164,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed.git".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: None,
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
None,
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://git.sr.ht/~zed-industries/zed.git/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs";
|
||||
@@ -181,11 +182,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: Some(6..6),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
Some(6..6),
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://git.sr.ht/~zed-industries/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs#L7";
|
||||
@@ -199,11 +200,11 @@ mod tests {
|
||||
owner: "zed-industries".into(),
|
||||
repo: "zed".into(),
|
||||
},
|
||||
BuildPermalinkParams {
|
||||
sha: "faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
path: "crates/editor/src/git/permalink.rs",
|
||||
selection: Some(23..47),
|
||||
},
|
||||
BuildPermalinkParams::new(
|
||||
"faa6f979be417239b2e070dbbf6392b909224e0b",
|
||||
&repo_path("crates/editor/src/git/permalink.rs"),
|
||||
Some(23..47),
|
||||
),
|
||||
);
|
||||
|
||||
let expected_url = "https://git.sr.ht/~zed-industries/zed/tree/faa6f979be417239b2e070dbbf6392b909224e0b/item/crates/editor/src/git/permalink.rs#L24-48";
|
||||
|
||||
@@ -43,8 +43,8 @@ struct CommitMetadataFile {
|
||||
worktree_id: WorktreeId,
|
||||
}
|
||||
|
||||
const COMMIT_METADATA_NAMESPACE: u64 = 0;
|
||||
const FILE_NAMESPACE: u64 = 1;
|
||||
const COMMIT_METADATA_SORT_PREFIX: u64 = 0;
|
||||
const FILE_NAMESPACE_SORT_PREFIX: u64 = 1;
|
||||
|
||||
impl CommitView {
|
||||
pub fn open(
|
||||
@@ -145,7 +145,7 @@ impl CommitView {
|
||||
});
|
||||
multibuffer.update(cx, |multibuffer, cx| {
|
||||
multibuffer.set_excerpts_for_path(
|
||||
PathKey::namespaced(COMMIT_METADATA_NAMESPACE, file.title.clone()),
|
||||
PathKey::with_sort_prefix(COMMIT_METADATA_SORT_PREFIX, file.title.clone()),
|
||||
buffer.clone(),
|
||||
vec![Point::zero()..buffer.read(cx).max_point()],
|
||||
0,
|
||||
@@ -193,7 +193,7 @@ impl CommitView {
|
||||
.collect::<Vec<_>>();
|
||||
let path = snapshot.file().unwrap().path().clone();
|
||||
let _is_newly_added = multibuffer.set_excerpts_for_path(
|
||||
PathKey::namespaced(FILE_NAMESPACE, path),
|
||||
PathKey::with_sort_prefix(FILE_NAMESPACE_SORT_PREFIX, path),
|
||||
buffer,
|
||||
diff_hunk_ranges,
|
||||
multibuffer_context_lines(cx),
|
||||
|
||||
@@ -386,6 +386,7 @@ impl GitPanel {
|
||||
cx.observe_global_in::<SettingsStore>(window, move |this, window, cx| {
|
||||
let is_sort_by_path = GitPanelSettings::get_global(cx).sort_by_path;
|
||||
if is_sort_by_path != was_sort_by_path {
|
||||
this.entries.clear();
|
||||
this.update_visible_entries(window, cx);
|
||||
}
|
||||
was_sort_by_path = is_sort_by_path
|
||||
@@ -869,6 +870,77 @@ impl GitPanel {
|
||||
});
|
||||
}
|
||||
|
||||
fn add_to_gitignore(
|
||||
&mut self,
|
||||
_: &git::AddToGitignore,
|
||||
_window: &mut Window,
|
||||
cx: &mut Context<Self>,
|
||||
) {
|
||||
maybe!({
|
||||
let list_entry = self.entries.get(self.selected_entry?)?.clone();
|
||||
let entry = list_entry.status_entry()?.to_owned();
|
||||
|
||||
if !entry.status.is_created() {
|
||||
return Some(());
|
||||
}
|
||||
|
||||
let project = self.project.downgrade();
|
||||
let repo_path = entry.repo_path;
|
||||
let active_repository = self.active_repository.as_ref()?.downgrade();
|
||||
|
||||
cx.spawn(async move |_, cx| {
|
||||
let file_path_str = repo_path.0.display(PathStyle::Posix);
|
||||
|
||||
let repo_root = active_repository.read_with(cx, |repository, _| {
|
||||
repository.snapshot().work_directory_abs_path
|
||||
})?;
|
||||
|
||||
let gitignore_abs_path = repo_root.join(".gitignore");
|
||||
|
||||
let buffer = project
|
||||
.update(cx, |project, cx| {
|
||||
project.open_local_buffer(gitignore_abs_path, cx)
|
||||
})?
|
||||
.await?;
|
||||
|
||||
let mut should_save = false;
|
||||
buffer.update(cx, |buffer, cx| {
|
||||
let existing_content = buffer.text();
|
||||
|
||||
if existing_content
|
||||
.lines()
|
||||
.any(|line| line.trim() == file_path_str)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
let insert_position = existing_content.len();
|
||||
let new_entry = if existing_content.is_empty() {
|
||||
format!("{}\n", file_path_str)
|
||||
} else if existing_content.ends_with('\n') {
|
||||
format!("{}\n", file_path_str)
|
||||
} else {
|
||||
format!("\n{}\n", file_path_str)
|
||||
};
|
||||
|
||||
buffer.edit([(insert_position..insert_position, new_entry)], None, cx);
|
||||
should_save = true;
|
||||
})?;
|
||||
|
||||
if should_save {
|
||||
project
|
||||
.update(cx, |project, cx| project.save_buffer(buffer, cx))?
|
||||
.await?;
|
||||
}
|
||||
|
||||
anyhow::Ok(())
|
||||
})
|
||||
.detach_and_log_err(cx);
|
||||
|
||||
Some(())
|
||||
});
|
||||
}
|
||||
|
||||
fn revert_entry(
|
||||
&mut self,
|
||||
entry: &GitStatusEntry,
|
||||
@@ -3816,10 +3888,17 @@ impl GitPanel {
|
||||
"Restore File"
|
||||
};
|
||||
let context_menu = ContextMenu::build(window, cx, |context_menu, _, _| {
|
||||
context_menu
|
||||
let mut context_menu = context_menu
|
||||
.context(self.focus_handle.clone())
|
||||
.action(stage_title, ToggleStaged.boxed_clone())
|
||||
.action(restore_title, git::RestoreFile::default().boxed_clone())
|
||||
.action(restore_title, git::RestoreFile::default().boxed_clone());
|
||||
|
||||
if entry.status.is_created() {
|
||||
context_menu =
|
||||
context_menu.action("Add to .gitignore", git::AddToGitignore.boxed_clone());
|
||||
}
|
||||
|
||||
context_menu
|
||||
.separator()
|
||||
.action("Open Diff", Confirm.boxed_clone())
|
||||
.action("Open File", SecondaryConfirm.boxed_clone())
|
||||
@@ -4242,6 +4321,7 @@ impl Render for GitPanel {
|
||||
.on_action(cx.listener(Self::unstage_selected))
|
||||
.on_action(cx.listener(Self::restore_tracked_files))
|
||||
.on_action(cx.listener(Self::revert_selected))
|
||||
.on_action(cx.listener(Self::add_to_gitignore))
|
||||
.on_action(cx.listener(Self::clean_all))
|
||||
.on_action(cx.listener(Self::generate_commit_message_action))
|
||||
.on_action(cx.listener(Self::stash_all))
|
||||
@@ -4887,12 +4967,13 @@ mod tests {
|
||||
repository::repo_path,
|
||||
status::{StatusCode, UnmergedStatus, UnmergedStatusCode},
|
||||
};
|
||||
use gpui::{TestAppContext, VisualTestContext};
|
||||
use gpui::{TestAppContext, UpdateGlobal, VisualTestContext};
|
||||
use project::{FakeFs, WorktreeSettings};
|
||||
use serde_json::json;
|
||||
use settings::SettingsStore;
|
||||
use theme::LoadThemes;
|
||||
use util::path;
|
||||
use util::rel_path::rel_path;
|
||||
|
||||
use super::*;
|
||||
|
||||
@@ -5210,6 +5291,242 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_bulk_staging_with_sort_by_paths(cx: &mut TestAppContext) {
|
||||
use GitListEntry::*;
|
||||
|
||||
init_test(cx);
|
||||
let fs = FakeFs::new(cx.background_executor.clone());
|
||||
fs.insert_tree(
|
||||
"/root",
|
||||
json!({
|
||||
"project": {
|
||||
".git": {},
|
||||
"src": {
|
||||
"main.rs": "fn main() {}",
|
||||
"lib.rs": "pub fn hello() {}",
|
||||
"utils.rs": "pub fn util() {}"
|
||||
},
|
||||
"tests": {
|
||||
"test.rs": "fn test() {}"
|
||||
},
|
||||
"new_file.txt": "new content",
|
||||
"another_new.rs": "// new file",
|
||||
"conflict.txt": "conflicted content"
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
fs.set_status_for_repo(
|
||||
Path::new(path!("/root/project/.git")),
|
||||
&[
|
||||
("src/main.rs", StatusCode::Modified.worktree()),
|
||||
("src/lib.rs", StatusCode::Modified.worktree()),
|
||||
("tests/test.rs", StatusCode::Modified.worktree()),
|
||||
("new_file.txt", FileStatus::Untracked),
|
||||
("another_new.rs", FileStatus::Untracked),
|
||||
("src/utils.rs", FileStatus::Untracked),
|
||||
(
|
||||
"conflict.txt",
|
||||
UnmergedStatus {
|
||||
first_head: UnmergedStatusCode::Updated,
|
||||
second_head: UnmergedStatusCode::Updated,
|
||||
}
|
||||
.into(),
|
||||
),
|
||||
],
|
||||
);
|
||||
|
||||
let project = Project::test(fs.clone(), [Path::new(path!("/root/project"))], cx).await;
|
||||
let workspace =
|
||||
cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
|
||||
cx.read(|cx| {
|
||||
project
|
||||
.read(cx)
|
||||
.worktrees(cx)
|
||||
.next()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.as_local()
|
||||
.unwrap()
|
||||
.scan_complete()
|
||||
})
|
||||
.await;
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let panel = workspace.update(cx, GitPanel::new).unwrap();
|
||||
|
||||
let handle = cx.update_window_entity(&panel, |panel, _, _| {
|
||||
std::mem::replace(&mut panel.update_visible_entries_task, Task::ready(()))
|
||||
});
|
||||
cx.executor().advance_clock(2 * UPDATE_DEBOUNCE);
|
||||
handle.await;
|
||||
|
||||
let entries = panel.read_with(cx, |panel, _| panel.entries.clone());
|
||||
#[rustfmt::skip]
|
||||
pretty_assertions::assert_matches!(
|
||||
entries.as_slice(),
|
||||
&[
|
||||
Header(GitHeaderEntry { header: Section::Conflict }),
|
||||
Status(GitStatusEntry { staging: StageStatus::Unstaged, .. }),
|
||||
Header(GitHeaderEntry { header: Section::Tracked }),
|
||||
Status(GitStatusEntry { staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { staging: StageStatus::Unstaged, .. }),
|
||||
Header(GitHeaderEntry { header: Section::New }),
|
||||
Status(GitStatusEntry { staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { staging: StageStatus::Unstaged, .. }),
|
||||
],
|
||||
);
|
||||
|
||||
assert_entry_paths(
|
||||
&entries,
|
||||
&[
|
||||
None,
|
||||
Some("conflict.txt"),
|
||||
None,
|
||||
Some("src/lib.rs"),
|
||||
Some("src/main.rs"),
|
||||
Some("tests/test.rs"),
|
||||
None,
|
||||
Some("another_new.rs"),
|
||||
Some("new_file.txt"),
|
||||
Some("src/utils.rs"),
|
||||
],
|
||||
);
|
||||
|
||||
let second_status_entry = entries[3].clone();
|
||||
panel.update_in(cx, |panel, window, cx| {
|
||||
panel.toggle_staged_for_entry(&second_status_entry, window, cx);
|
||||
});
|
||||
|
||||
cx.update(|_window, cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings(cx, |settings| {
|
||||
settings.git_panel.get_or_insert_default().sort_by_path = Some(true);
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
panel.update_in(cx, |panel, window, cx| {
|
||||
panel.selected_entry = Some(7);
|
||||
panel.stage_range(&git::StageRange, window, cx);
|
||||
});
|
||||
|
||||
cx.read(|cx| {
|
||||
project
|
||||
.read(cx)
|
||||
.worktrees(cx)
|
||||
.next()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.as_local()
|
||||
.unwrap()
|
||||
.scan_complete()
|
||||
})
|
||||
.await;
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let handle = cx.update_window_entity(&panel, |panel, _, _| {
|
||||
std::mem::replace(&mut panel.update_visible_entries_task, Task::ready(()))
|
||||
});
|
||||
cx.executor().advance_clock(2 * UPDATE_DEBOUNCE);
|
||||
handle.await;
|
||||
|
||||
let entries = panel.read_with(cx, |panel, _| panel.entries.clone());
|
||||
#[rustfmt::skip]
|
||||
pretty_assertions::assert_matches!(
|
||||
entries.as_slice(),
|
||||
&[
|
||||
Status(GitStatusEntry { status: FileStatus::Untracked, staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Unmerged(..), staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Untracked, staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Tracked(..), staging: StageStatus::Staged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Tracked(..), staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Untracked, staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Tracked(..), staging: StageStatus::Unstaged, .. }),
|
||||
],
|
||||
);
|
||||
|
||||
assert_entry_paths(
|
||||
&entries,
|
||||
&[
|
||||
Some("another_new.rs"),
|
||||
Some("conflict.txt"),
|
||||
Some("new_file.txt"),
|
||||
Some("src/lib.rs"),
|
||||
Some("src/main.rs"),
|
||||
Some("src/utils.rs"),
|
||||
Some("tests/test.rs"),
|
||||
],
|
||||
);
|
||||
|
||||
let third_status_entry = entries[4].clone();
|
||||
panel.update_in(cx, |panel, window, cx| {
|
||||
panel.toggle_staged_for_entry(&third_status_entry, window, cx);
|
||||
});
|
||||
|
||||
panel.update_in(cx, |panel, window, cx| {
|
||||
panel.selected_entry = Some(9);
|
||||
panel.stage_range(&git::StageRange, window, cx);
|
||||
});
|
||||
|
||||
cx.read(|cx| {
|
||||
project
|
||||
.read(cx)
|
||||
.worktrees(cx)
|
||||
.next()
|
||||
.unwrap()
|
||||
.read(cx)
|
||||
.as_local()
|
||||
.unwrap()
|
||||
.scan_complete()
|
||||
})
|
||||
.await;
|
||||
|
||||
cx.executor().run_until_parked();
|
||||
|
||||
let handle = cx.update_window_entity(&panel, |panel, _, _| {
|
||||
std::mem::replace(&mut panel.update_visible_entries_task, Task::ready(()))
|
||||
});
|
||||
cx.executor().advance_clock(2 * UPDATE_DEBOUNCE);
|
||||
handle.await;
|
||||
|
||||
let entries = panel.read_with(cx, |panel, _| panel.entries.clone());
|
||||
#[rustfmt::skip]
|
||||
pretty_assertions::assert_matches!(
|
||||
entries.as_slice(),
|
||||
&[
|
||||
Status(GitStatusEntry { status: FileStatus::Untracked, staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Unmerged(..), staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Untracked, staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Tracked(..), staging: StageStatus::Staged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Tracked(..), staging: StageStatus::Staged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Untracked, staging: StageStatus::Unstaged, .. }),
|
||||
Status(GitStatusEntry { status: FileStatus::Tracked(..), staging: StageStatus::Unstaged, .. }),
|
||||
],
|
||||
);
|
||||
|
||||
assert_entry_paths(
|
||||
&entries,
|
||||
&[
|
||||
Some("another_new.rs"),
|
||||
Some("conflict.txt"),
|
||||
Some("new_file.txt"),
|
||||
Some("src/lib.rs"),
|
||||
Some("src/main.rs"),
|
||||
Some("src/utils.rs"),
|
||||
Some("tests/test.rs"),
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_amend_commit_message_handling(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
@@ -5278,4 +5595,81 @@ mod tests {
|
||||
assert_eq!(current_message, "");
|
||||
});
|
||||
}
|
||||
|
||||
#[gpui::test]
|
||||
async fn test_open_diff(cx: &mut TestAppContext) {
|
||||
init_test(cx);
|
||||
|
||||
let fs = FakeFs::new(cx.background_executor.clone());
|
||||
fs.insert_tree(
|
||||
path!("/project"),
|
||||
json!({
|
||||
".git": {},
|
||||
"tracked": "tracked\n",
|
||||
"untracked": "\n",
|
||||
}),
|
||||
)
|
||||
.await;
|
||||
|
||||
fs.set_head_and_index_for_repo(
|
||||
path!("/project/.git").as_ref(),
|
||||
&[("tracked", "old tracked\n".into())],
|
||||
);
|
||||
|
||||
let project = Project::test(fs.clone(), [Path::new(path!("/project"))], cx).await;
|
||||
let workspace =
|
||||
cx.add_window(|window, cx| Workspace::test_new(project.clone(), window, cx));
|
||||
let cx = &mut VisualTestContext::from_window(*workspace, cx);
|
||||
let panel = workspace.update(cx, GitPanel::new).unwrap();
|
||||
|
||||
// Enable the `sort_by_path` setting and wait for entries to be updated,
|
||||
// as there should no longer be separators between Tracked and Untracked
|
||||
// files.
|
||||
cx.update(|_window, cx| {
|
||||
SettingsStore::update_global(cx, |store, cx| {
|
||||
store.update_user_settings(cx, |settings| {
|
||||
settings.git_panel.get_or_insert_default().sort_by_path = Some(true);
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
cx.update_window_entity(&panel, |panel, _, _| {
|
||||
std::mem::replace(&mut panel.update_visible_entries_task, Task::ready(()))
|
||||
})
|
||||
.await;
|
||||
|
||||
// Confirm that `Open Diff` still works for the untracked file, updating
|
||||
// the Project Diff's active path.
|
||||
panel.update_in(cx, |panel, window, cx| {
|
||||
panel.selected_entry = Some(1);
|
||||
panel.open_diff(&Confirm, window, cx);
|
||||
});
|
||||
cx.run_until_parked();
|
||||
|
||||
let _ = workspace.update(cx, |workspace, _window, cx| {
|
||||
let active_path = workspace
|
||||
.item_of_type::<ProjectDiff>(cx)
|
||||
.expect("ProjectDiff should exist")
|
||||
.read(cx)
|
||||
.active_path(cx)
|
||||
.expect("active_path should exist");
|
||||
|
||||
assert_eq!(active_path.path, rel_path("untracked").into_arc());
|
||||
});
|
||||
}
|
||||
|
||||
fn assert_entry_paths(entries: &[GitListEntry], expected_paths: &[Option<&str>]) {
|
||||
assert_eq!(entries.len(), expected_paths.len());
|
||||
for (entry, expected_path) in entries.iter().zip(expected_paths) {
|
||||
assert_eq!(
|
||||
entry.status_entry().map(|status| status
|
||||
.repo_path
|
||||
.0
|
||||
.as_std_path()
|
||||
.to_string_lossy()
|
||||
.to_string()),
|
||||
expected_path.map(|s| s.to_string())
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ use editor::{
|
||||
use futures::StreamExt;
|
||||
use git::{
|
||||
Commit, StageAll, StageAndNext, ToggleStaged, UnstageAll, UnstageAndNext,
|
||||
repository::{Branch, Upstream, UpstreamTracking, UpstreamTrackingStatus},
|
||||
repository::{Branch, RepoPath, Upstream, UpstreamTracking, UpstreamTrackingStatus},
|
||||
status::FileStatus,
|
||||
};
|
||||
use gpui::{
|
||||
@@ -27,7 +27,7 @@ use language::{Anchor, Buffer, Capability, OffsetRangeExt};
|
||||
use multi_buffer::{MultiBuffer, PathKey};
|
||||
use project::{
|
||||
Project, ProjectPath,
|
||||
git_store::{GitStore, GitStoreEvent},
|
||||
git_store::{GitStore, GitStoreEvent, Repository},
|
||||
};
|
||||
use settings::{Settings, SettingsStore};
|
||||
use std::any::{Any, TypeId};
|
||||
@@ -73,9 +73,9 @@ struct DiffBuffer {
|
||||
file_status: FileStatus,
|
||||
}
|
||||
|
||||
const CONFLICT_NAMESPACE: u64 = 1;
|
||||
const TRACKED_NAMESPACE: u64 = 2;
|
||||
const NEW_NAMESPACE: u64 = 3;
|
||||
const CONFLICT_SORT_PREFIX: u64 = 1;
|
||||
const TRACKED_SORT_PREFIX: u64 = 2;
|
||||
const NEW_SORT_PREFIX: u64 = 3;
|
||||
|
||||
impl ProjectDiff {
|
||||
pub(crate) fn register(workspace: &mut Workspace, cx: &mut Context<Workspace>) {
|
||||
@@ -234,16 +234,8 @@ impl ProjectDiff {
|
||||
return;
|
||||
};
|
||||
let repo = git_repo.read(cx);
|
||||
|
||||
let namespace = if repo.had_conflict_on_last_merge_head_change(&entry.repo_path) {
|
||||
CONFLICT_NAMESPACE
|
||||
} else if entry.status.is_created() {
|
||||
NEW_NAMESPACE
|
||||
} else {
|
||||
TRACKED_NAMESPACE
|
||||
};
|
||||
|
||||
let path_key = PathKey::namespaced(namespace, entry.repo_path.0);
|
||||
let sort_prefix = sort_prefix(repo, &entry.repo_path, entry.status, cx);
|
||||
let path_key = PathKey::with_sort_prefix(sort_prefix, entry.repo_path.0);
|
||||
|
||||
self.move_to_path(path_key, window, cx)
|
||||
}
|
||||
@@ -388,16 +380,8 @@ impl ProjectDiff {
|
||||
else {
|
||||
continue;
|
||||
};
|
||||
let namespace = if GitPanelSettings::get_global(cx).sort_by_path {
|
||||
TRACKED_NAMESPACE
|
||||
} else if repo.had_conflict_on_last_merge_head_change(&entry.repo_path) {
|
||||
CONFLICT_NAMESPACE
|
||||
} else if entry.status.is_created() {
|
||||
NEW_NAMESPACE
|
||||
} else {
|
||||
TRACKED_NAMESPACE
|
||||
};
|
||||
let path_key = PathKey::namespaced(namespace, entry.repo_path.0.clone());
|
||||
let sort_prefix = sort_prefix(repo, &entry.repo_path, entry.status, cx);
|
||||
let path_key = PathKey::with_sort_prefix(sort_prefix, entry.repo_path.0.clone());
|
||||
|
||||
previous_paths.remove(&path_key);
|
||||
let load_buffer = self
|
||||
@@ -541,6 +525,18 @@ impl ProjectDiff {
|
||||
}
|
||||
}
|
||||
|
||||
fn sort_prefix(repo: &Repository, repo_path: &RepoPath, status: FileStatus, cx: &App) -> u64 {
|
||||
if GitPanelSettings::get_global(cx).sort_by_path {
|
||||
TRACKED_SORT_PREFIX
|
||||
} else if repo.had_conflict_on_last_merge_head_change(repo_path) {
|
||||
CONFLICT_SORT_PREFIX
|
||||
} else if status.is_created() {
|
||||
NEW_SORT_PREFIX
|
||||
} else {
|
||||
TRACKED_SORT_PREFIX
|
||||
}
|
||||
}
|
||||
|
||||
impl EventEmitter<EditorEvent> for ProjectDiff {}
|
||||
|
||||
impl Focusable for ProjectDiff {
|
||||
@@ -1463,7 +1459,7 @@ mod tests {
|
||||
|
||||
let editor = cx.update_window_entity(&diff, |diff, window, cx| {
|
||||
diff.move_to_path(
|
||||
PathKey::namespaced(TRACKED_NAMESPACE, rel_path("foo").into_arc()),
|
||||
PathKey::with_sort_prefix(TRACKED_SORT_PREFIX, rel_path("foo").into_arc()),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
@@ -1484,7 +1480,7 @@ mod tests {
|
||||
|
||||
let editor = cx.update_window_entity(&diff, |diff, window, cx| {
|
||||
diff.move_to_path(
|
||||
PathKey::namespaced(TRACKED_NAMESPACE, rel_path("bar").into_arc()),
|
||||
PathKey::with_sort_prefix(TRACKED_SORT_PREFIX, rel_path("bar").into_arc()),
|
||||
window,
|
||||
cx,
|
||||
);
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
use gpui::{App, DismissEvent, Entity, EventEmitter, FocusHandle, Focusable, Task, WeakEntity};
|
||||
use itertools::Itertools;
|
||||
use picker::{Picker, PickerDelegate};
|
||||
use picker::{Picker, PickerDelegate, PickerEditorPosition};
|
||||
use project::{Project, git_store::Repository};
|
||||
use std::sync::Arc;
|
||||
use ui::{ListItem, ListItemSpacing, prelude::*};
|
||||
@@ -36,11 +36,11 @@ impl RepositorySelector {
|
||||
) -> Self {
|
||||
let git_store = project_handle.read(cx).git_store().clone();
|
||||
let repository_entries = git_store.update(cx, |git_store, _cx| {
|
||||
git_store
|
||||
.repositories()
|
||||
.values()
|
||||
.cloned()
|
||||
.collect::<Vec<_>>()
|
||||
let mut repos: Vec<_> = git_store.repositories().values().cloned().collect();
|
||||
|
||||
repos.sort_by_key(|a| a.read(_cx).display_name());
|
||||
|
||||
repos
|
||||
});
|
||||
let filtered_repositories = repository_entries.clone();
|
||||
|
||||
@@ -59,7 +59,7 @@ impl RepositorySelector {
|
||||
};
|
||||
|
||||
let picker = cx.new(|cx| {
|
||||
Picker::nonsearchable_uniform_list(delegate, window, cx)
|
||||
Picker::uniform_list(delegate, window, cx)
|
||||
.widest_item(widest_item_ix)
|
||||
.max_height(Some(rems(20.).into()))
|
||||
});
|
||||
@@ -158,6 +158,10 @@ impl PickerDelegate for RepositorySelectorDelegate {
|
||||
"Select a repository...".into()
|
||||
}
|
||||
|
||||
fn editor_position(&self) -> PickerEditorPosition {
|
||||
PickerEditorPosition::End
|
||||
}
|
||||
|
||||
fn update_matches(
|
||||
&mut self,
|
||||
query: String,
|
||||
@@ -166,25 +170,31 @@ impl PickerDelegate for RepositorySelectorDelegate {
|
||||
) -> Task<()> {
|
||||
let all_repositories = self.repository_entries.clone();
|
||||
|
||||
let repo_names: Vec<(Entity<Repository>, String)> = all_repositories
|
||||
.iter()
|
||||
.map(|repo| (repo.clone(), repo.read(cx).display_name().to_lowercase()))
|
||||
.collect();
|
||||
|
||||
cx.spawn_in(window, async move |this, cx| {
|
||||
let filtered_repositories = cx
|
||||
.background_spawn(async move {
|
||||
if query.is_empty() {
|
||||
all_repositories
|
||||
} else {
|
||||
all_repositories
|
||||
let query_lower = query.to_lowercase();
|
||||
repo_names
|
||||
.into_iter()
|
||||
.filter(|_repo_info| {
|
||||
// TODO: Implement repository filtering logic
|
||||
true
|
||||
})
|
||||
.filter(|(_, display_name)| display_name.contains(&query_lower))
|
||||
.map(|(repo, _)| repo)
|
||||
.collect()
|
||||
}
|
||||
})
|
||||
.await;
|
||||
|
||||
this.update_in(cx, |this, window, cx| {
|
||||
this.delegate.filtered_repositories = filtered_repositories;
|
||||
let mut sorted_repositories = filtered_repositories;
|
||||
sorted_repositories.sort_by_key(|a| a.read(cx).display_name());
|
||||
this.delegate.filtered_repositories = sorted_repositories;
|
||||
this.delegate.set_selected_index(0, window, cx);
|
||||
cx.notify();
|
||||
})
|
||||
|
||||
@@ -113,7 +113,9 @@ impl CursorPosition {
|
||||
let mut last_selection = None::<Selection<Point>>;
|
||||
let snapshot = editor.buffer().read(cx).snapshot(cx);
|
||||
if snapshot.excerpts().count() > 0 {
|
||||
for selection in editor.selections.all_adjusted(cx) {
|
||||
for selection in
|
||||
editor.selections.all_adjusted_with_snapshot(&snapshot)
|
||||
{
|
||||
let selection_summary = snapshot
|
||||
.text_summary_for_range::<text::TextSummary, _>(
|
||||
selection.start..selection.end,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[package]
|
||||
name = "gpui"
|
||||
version = "0.1.0"
|
||||
version = "0.2.0"
|
||||
edition.workspace = true
|
||||
authors = ["Nathan Sobo <nathan@zed.dev>"]
|
||||
description = "Zed's GPU-accelerated UI framework"
|
||||
@@ -43,7 +43,7 @@ wayland = [
|
||||
"blade-macros",
|
||||
"blade-util",
|
||||
"bytemuck",
|
||||
"ashpd",
|
||||
"ashpd/wayland",
|
||||
"cosmic-text",
|
||||
"font-kit",
|
||||
"calloop-wayland-source",
|
||||
|
||||
@@ -11,6 +11,8 @@ GPUI is still in active development as we work on the Zed code editor, and is st
|
||||
gpui = { version = "*" }
|
||||
```
|
||||
|
||||
- [Ownership and data flow](_ownership_and_data_flow)
|
||||
|
||||
Everything in GPUI starts with an `Application`. You can create one with `Application::new()`, and kick off your application by passing a callback to `Application::run()`. Inside this callback, you can create a new window with `App::open_window()`, and register your first root view. See [gpui.rs](https://www.gpui.rs/) for a complete example.
|
||||
|
||||
### Dependencies
|
||||
|
||||
@@ -3,8 +3,8 @@ use std::time::Duration;
|
||||
use anyhow::Result;
|
||||
use gpui::{
|
||||
Animation, AnimationExt as _, App, Application, AssetSource, Bounds, Context, SharedString,
|
||||
Transformation, Window, WindowBounds, WindowOptions, black, bounce, div, ease_in_out,
|
||||
percentage, prelude::*, px, rgb, size, svg,
|
||||
Transformation, Window, WindowBounds, WindowOptions, bounce, div, ease_in_out, percentage,
|
||||
prelude::*, px, size, svg,
|
||||
};
|
||||
|
||||
struct Assets {}
|
||||
@@ -37,37 +37,66 @@ struct AnimationExample {}
|
||||
|
||||
impl Render for AnimationExample {
|
||||
fn render(&mut self, _window: &mut Window, _cx: &mut Context<Self>) -> impl IntoElement {
|
||||
div().flex().flex_col().size_full().justify_around().child(
|
||||
div().flex().flex_row().w_full().justify_around().child(
|
||||
div()
|
||||
.flex()
|
||||
.flex_col()
|
||||
.size_full()
|
||||
.bg(gpui::white())
|
||||
.text_color(gpui::black())
|
||||
.justify_around()
|
||||
.child(
|
||||
div()
|
||||
.flex()
|
||||
.bg(rgb(0x2e7d32))
|
||||
.size(px(300.0))
|
||||
.justify_center()
|
||||
.items_center()
|
||||
.shadow_lg()
|
||||
.text_xl()
|
||||
.text_color(black())
|
||||
.child("hello")
|
||||
.flex_col()
|
||||
.size_full()
|
||||
.justify_around()
|
||||
.child(
|
||||
svg()
|
||||
.size_8()
|
||||
.path(ARROW_CIRCLE_SVG)
|
||||
.text_color(black())
|
||||
.with_animation(
|
||||
"image_circle",
|
||||
Animation::new(Duration::from_secs(2))
|
||||
.repeat()
|
||||
.with_easing(bounce(ease_in_out)),
|
||||
|svg, delta| {
|
||||
svg.with_transformation(Transformation::rotate(percentage(
|
||||
delta,
|
||||
)))
|
||||
},
|
||||
div()
|
||||
.id("content")
|
||||
.flex()
|
||||
.flex_col()
|
||||
.h(px(150.))
|
||||
.overflow_y_scroll()
|
||||
.w_full()
|
||||
.flex_1()
|
||||
.justify_center()
|
||||
.items_center()
|
||||
.text_xl()
|
||||
.gap_4()
|
||||
.child("Hello Animation")
|
||||
.child(
|
||||
svg()
|
||||
.size_20()
|
||||
.overflow_hidden()
|
||||
.path(ARROW_CIRCLE_SVG)
|
||||
.text_color(gpui::black())
|
||||
.with_animation(
|
||||
"image_circle",
|
||||
Animation::new(Duration::from_secs(2))
|
||||
.repeat()
|
||||
.with_easing(bounce(ease_in_out)),
|
||||
|svg, delta| {
|
||||
svg.with_transformation(Transformation::rotate(
|
||||
percentage(delta),
|
||||
))
|
||||
},
|
||||
),
|
||||
),
|
||||
)
|
||||
.child(
|
||||
div()
|
||||
.flex()
|
||||
.h(px(64.))
|
||||
.w_full()
|
||||
.p_2()
|
||||
.justify_center()
|
||||
.items_center()
|
||||
.border_t_1()
|
||||
.border_color(gpui::black().opacity(0.1))
|
||||
.bg(gpui::black().opacity(0.05))
|
||||
.child("Other Panel"),
|
||||
),
|
||||
),
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -618,17 +618,25 @@ pub trait InteractiveElement: Sized {
|
||||
self
|
||||
}
|
||||
|
||||
/// Designate this element as a tab stop, equivalent to `tab_index(0)`.
|
||||
/// This should be the primary mechanism for tab navigation within the application.
|
||||
fn tab_stop(mut self) -> Self {
|
||||
self.tab_index(0)
|
||||
/// Set whether this element is a tab stop.
|
||||
///
|
||||
/// When false, the element remains in tab-index order but cannot be reached via keyboard navigation.
|
||||
/// Useful for container elements: focus the container, then call `window.focus_next()` to focus
|
||||
/// the first tab stop inside it while having the container element itself be unreachable via the keyboard.
|
||||
/// Should only be used with `tab_index`.
|
||||
fn tab_stop(mut self, tab_stop: bool) -> Self {
|
||||
self.interactivity().tab_stop = tab_stop;
|
||||
self
|
||||
}
|
||||
|
||||
/// Set index of the tab stop order. This should only be used in conjunction with `tab_group`
|
||||
/// Set index of the tab stop order, and set this node as a tab stop.
|
||||
/// This will default the element to being a tab stop. See [`Self::tab_stop`] for more information.
|
||||
/// This should only be used in conjunction with `tab_group`
|
||||
/// in order to not interfere with the tab index of other elements.
|
||||
fn tab_index(mut self, index: isize) -> Self {
|
||||
self.interactivity().focusable = true;
|
||||
self.interactivity().tab_index = Some(index);
|
||||
self.interactivity().tab_stop = true;
|
||||
self
|
||||
}
|
||||
|
||||
@@ -1505,6 +1513,7 @@ pub struct Interactivity {
|
||||
pub(crate) hitbox_behavior: HitboxBehavior,
|
||||
pub(crate) tab_index: Option<isize>,
|
||||
pub(crate) tab_group: bool,
|
||||
pub(crate) tab_stop: bool,
|
||||
|
||||
#[cfg(any(feature = "inspector", debug_assertions))]
|
||||
pub(crate) source_location: Option<&'static core::panic::Location<'static>>,
|
||||
@@ -1569,10 +1578,10 @@ impl Interactivity {
|
||||
.focus_handle
|
||||
.get_or_insert_with(|| cx.focus_handle())
|
||||
.clone()
|
||||
.tab_stop(false);
|
||||
.tab_stop(self.tab_stop);
|
||||
|
||||
if let Some(index) = self.tab_index {
|
||||
handle = handle.tab_index(index).tab_stop(true);
|
||||
handle = handle.tab_index(index);
|
||||
}
|
||||
|
||||
self.tracked_focus_handle = Some(handle);
|
||||
@@ -3025,7 +3034,20 @@ struct ScrollHandleState {
|
||||
child_bounds: Vec<Bounds<Pixels>>,
|
||||
scroll_to_bottom: bool,
|
||||
overflow: Point<Overflow>,
|
||||
active_item: Option<usize>,
|
||||
active_item: Option<ScrollActiveItem>,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, Copy)]
|
||||
struct ScrollActiveItem {
|
||||
index: usize,
|
||||
strategy: ScrollStrategy,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone, Copy)]
|
||||
enum ScrollStrategy {
|
||||
#[default]
|
||||
FirstVisible,
|
||||
Top,
|
||||
}
|
||||
|
||||
/// A handle to the scrollable aspects of an element.
|
||||
@@ -3075,6 +3097,25 @@ impl ScrollHandle {
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the bottom child that's scrolled into view.
|
||||
pub fn bottom_item(&self) -> usize {
|
||||
let state = self.0.borrow();
|
||||
let bottom = state.bounds.bottom() - state.offset.borrow().y;
|
||||
|
||||
match state.child_bounds.binary_search_by(|bounds| {
|
||||
if bottom < bounds.top() {
|
||||
Ordering::Greater
|
||||
} else if bottom > bounds.bottom() {
|
||||
Ordering::Less
|
||||
} else {
|
||||
Ordering::Equal
|
||||
}
|
||||
}) {
|
||||
Ok(ix) => ix,
|
||||
Err(ix) => ix.min(state.child_bounds.len().saturating_sub(1)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Return the bounds into which this child is painted
|
||||
pub fn bounds(&self) -> Bounds<Pixels> {
|
||||
self.0.borrow().bounds
|
||||
@@ -3088,26 +3129,48 @@ impl ScrollHandle {
|
||||
/// Update [ScrollHandleState]'s active item for scrolling to in prepaint
|
||||
pub fn scroll_to_item(&self, ix: usize) {
|
||||
let mut state = self.0.borrow_mut();
|
||||
state.active_item = Some(ix);
|
||||
state.active_item = Some(ScrollActiveItem {
|
||||
index: ix,
|
||||
strategy: ScrollStrategy::default(),
|
||||
});
|
||||
}
|
||||
|
||||
/// Scrolls the minimal amount to ensure that the child is
|
||||
/// fully visible
|
||||
/// Update [ScrollHandleState]'s active item for scrolling to in prepaint
|
||||
/// This scrolls the minimal amount to ensure that the child is the first visible element
|
||||
pub fn scroll_to_top_of_item(&self, ix: usize) {
|
||||
let mut state = self.0.borrow_mut();
|
||||
state.active_item = Some(ScrollActiveItem {
|
||||
index: ix,
|
||||
strategy: ScrollStrategy::Top,
|
||||
});
|
||||
}
|
||||
|
||||
/// Scrolls the minimal amount to either ensure that the child is
|
||||
/// fully visible or the top element of the view depends on the
|
||||
/// scroll strategy
|
||||
fn scroll_to_active_item(&self) {
|
||||
let mut state = self.0.borrow_mut();
|
||||
|
||||
let Some(active_item_index) = state.active_item else {
|
||||
let Some(active_item) = state.active_item else {
|
||||
return;
|
||||
};
|
||||
let active_item = match state.child_bounds.get(active_item_index) {
|
||||
|
||||
let active_item = match state.child_bounds.get(active_item.index) {
|
||||
Some(bounds) => {
|
||||
let mut scroll_offset = state.offset.borrow_mut();
|
||||
|
||||
if state.overflow.y == Overflow::Scroll {
|
||||
if bounds.top() + scroll_offset.y < state.bounds.top() {
|
||||
match active_item.strategy {
|
||||
ScrollStrategy::FirstVisible => {
|
||||
if state.overflow.y == Overflow::Scroll {
|
||||
if bounds.top() + scroll_offset.y < state.bounds.top() {
|
||||
scroll_offset.y = state.bounds.top() - bounds.top();
|
||||
} else if bounds.bottom() + scroll_offset.y > state.bounds.bottom() {
|
||||
scroll_offset.y = state.bounds.bottom() - bounds.bottom();
|
||||
}
|
||||
}
|
||||
}
|
||||
ScrollStrategy::Top => {
|
||||
scroll_offset.y = state.bounds.top() - bounds.top();
|
||||
} else if bounds.bottom() + scroll_offset.y > state.bounds.bottom() {
|
||||
scroll_offset.y = state.bounds.bottom() - bounds.bottom();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -3120,7 +3183,7 @@ impl ScrollHandle {
|
||||
}
|
||||
None
|
||||
}
|
||||
None => Some(active_item_index),
|
||||
None => Some(active_item),
|
||||
};
|
||||
state.active_item = active_item;
|
||||
}
|
||||
@@ -3154,6 +3217,21 @@ impl ScrollHandle {
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the logical scroll bottom, based on a child index and a pixel offset.
|
||||
pub fn logical_scroll_bottom(&self) -> (usize, Pixels) {
|
||||
let ix = self.bottom_item();
|
||||
let state = self.0.borrow();
|
||||
|
||||
if let Some(child_bounds) = state.child_bounds.get(ix) {
|
||||
(
|
||||
ix,
|
||||
child_bounds.bottom() + state.offset.borrow().y - state.bounds.bottom(),
|
||||
)
|
||||
} else {
|
||||
(ix, px(0.))
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the count of children for scrollable item.
|
||||
pub fn children_count(&self) -> usize {
|
||||
self.0.borrow().child_bounds.len()
|
||||
|
||||
@@ -180,7 +180,8 @@ impl StyledText {
|
||||
"Can't use `with_default_highlights` and `with_highlights`"
|
||||
);
|
||||
let runs = Self::compute_runs(&self.text, default_style, highlights);
|
||||
self.with_runs(runs)
|
||||
self.runs = Some(runs);
|
||||
self
|
||||
}
|
||||
|
||||
/// Set the styling attributes for the given text, as well as
|
||||
@@ -193,15 +194,7 @@ impl StyledText {
|
||||
self.runs.is_none(),
|
||||
"Can't use `with_highlights` and `with_default_highlights`"
|
||||
);
|
||||
self.delayed_highlights = Some(
|
||||
highlights
|
||||
.into_iter()
|
||||
.inspect(|(run, _)| {
|
||||
debug_assert!(self.text.is_char_boundary(run.start));
|
||||
debug_assert!(self.text.is_char_boundary(run.end));
|
||||
})
|
||||
.collect::<Vec<_>>(),
|
||||
);
|
||||
self.delayed_highlights = Some(highlights.into_iter().collect::<Vec<_>>());
|
||||
self
|
||||
}
|
||||
|
||||
@@ -214,10 +207,8 @@ impl StyledText {
|
||||
let mut ix = 0;
|
||||
for (range, highlight) in highlights {
|
||||
if ix < range.start {
|
||||
debug_assert!(text.is_char_boundary(range.start));
|
||||
runs.push(default_style.clone().to_run(range.start - ix));
|
||||
}
|
||||
debug_assert!(text.is_char_boundary(range.end));
|
||||
runs.push(
|
||||
default_style
|
||||
.clone()
|
||||
@@ -234,11 +225,6 @@ impl StyledText {
|
||||
|
||||
/// Set the text runs for this piece of text.
|
||||
pub fn with_runs(mut self, runs: Vec<TextRun>) -> Self {
|
||||
let mut text = &**self.text;
|
||||
for run in &runs {
|
||||
text = text.get(run.len..).expect("invalid text run");
|
||||
}
|
||||
assert!(text.is_empty(), "invalid text run");
|
||||
self.runs = Some(runs);
|
||||
self
|
||||
}
|
||||
|
||||
@@ -138,7 +138,11 @@ impl UniformListScrollHandle {
|
||||
})))
|
||||
}
|
||||
|
||||
/// Scroll the list so that the given item index is onscreen.
|
||||
/// Scroll the list so that the given item index is visible.
|
||||
///
|
||||
/// This uses non-strict scrolling: if the item is already fully visible, no scrolling occurs.
|
||||
/// If the item is out of view, it scrolls the minimum amount to bring it into view according
|
||||
/// to the strategy.
|
||||
pub fn scroll_to_item(&self, ix: usize, strategy: ScrollStrategy) {
|
||||
self.0.borrow_mut().deferred_scroll_to_item = Some(DeferredScrollToItem {
|
||||
item_index: ix,
|
||||
@@ -149,6 +153,9 @@ impl UniformListScrollHandle {
|
||||
}
|
||||
|
||||
/// Scroll the list so that the given item index is at scroll strategy position.
|
||||
///
|
||||
/// This uses strict scrolling: the item will always be scrolled to match the strategy position,
|
||||
/// even if it's already visible. Use this when you need precise positioning.
|
||||
pub fn scroll_to_item_strict(&self, ix: usize, strategy: ScrollStrategy) {
|
||||
self.0.borrow_mut().deferred_scroll_to_item = Some(DeferredScrollToItem {
|
||||
item_index: ix,
|
||||
@@ -158,11 +165,16 @@ impl UniformListScrollHandle {
|
||||
});
|
||||
}
|
||||
|
||||
/// Scroll the list to the given item index with an offset.
|
||||
/// Scroll the list to the given item index with an offset in number of items.
|
||||
///
|
||||
/// For ScrollStrategy::Top, the item will be placed at the offset position from the top.
|
||||
/// This uses non-strict scrolling: if the item is already visible within the offset region,
|
||||
/// no scrolling occurs.
|
||||
///
|
||||
/// For ScrollStrategy::Center, the item will be centered between offset and the last visible item.
|
||||
/// The offset parameter shrinks the effective viewport by the specified number of items
|
||||
/// from the corresponding edge, then applies the scroll strategy within that reduced viewport:
|
||||
/// - `ScrollStrategy::Top`: Shrinks from top, positions item at the new top
|
||||
/// - `ScrollStrategy::Center`: Shrinks from top, centers item in the reduced viewport
|
||||
/// - `ScrollStrategy::Bottom`: Shrinks from bottom, positions item at the new bottom
|
||||
pub fn scroll_to_item_with_offset(&self, ix: usize, strategy: ScrollStrategy, offset: usize) {
|
||||
self.0.borrow_mut().deferred_scroll_to_item = Some(DeferredScrollToItem {
|
||||
item_index: ix,
|
||||
@@ -172,6 +184,30 @@ impl UniformListScrollHandle {
|
||||
});
|
||||
}
|
||||
|
||||
/// Scroll the list so that the given item index is at the exact scroll strategy position with an offset.
|
||||
///
|
||||
/// This uses strict scrolling: the item will always be scrolled to match the strategy position,
|
||||
/// even if it's already visible.
|
||||
///
|
||||
/// The offset parameter shrinks the effective viewport by the specified number of items
|
||||
/// from the corresponding edge, then applies the scroll strategy within that reduced viewport:
|
||||
/// - `ScrollStrategy::Top`: Shrinks from top, positions item at the new top
|
||||
/// - `ScrollStrategy::Center`: Shrinks from top, centers item in the reduced viewport
|
||||
/// - `ScrollStrategy::Bottom`: Shrinks from bottom, positions item at the new bottom
|
||||
pub fn scroll_to_item_strict_with_offset(
|
||||
&self,
|
||||
ix: usize,
|
||||
strategy: ScrollStrategy,
|
||||
offset: usize,
|
||||
) {
|
||||
self.0.borrow_mut().deferred_scroll_to_item = Some(DeferredScrollToItem {
|
||||
item_index: ix,
|
||||
strategy,
|
||||
offset,
|
||||
scroll_strict: true,
|
||||
});
|
||||
}
|
||||
|
||||
/// Check if the list is flipped vertically.
|
||||
pub fn y_flipped(&self) -> bool {
|
||||
self.0.borrow().y_flipped
|
||||
@@ -392,7 +428,7 @@ impl Element for UniformList {
|
||||
{
|
||||
match deferred_scroll.strategy {
|
||||
ScrollStrategy::Top => {
|
||||
updated_scroll_offset.y = -item_top
|
||||
updated_scroll_offset.y = -(item_top - offset_pixels)
|
||||
.max(Pixels::ZERO)
|
||||
.min(content_height - list_height)
|
||||
.max(Pixels::ZERO);
|
||||
@@ -410,7 +446,8 @@ impl Element for UniformList {
|
||||
.max(Pixels::ZERO);
|
||||
}
|
||||
ScrollStrategy::Bottom => {
|
||||
updated_scroll_offset.y = -(item_bottom - list_height)
|
||||
updated_scroll_offset.y = -(item_bottom - list_height
|
||||
+ offset_pixels)
|
||||
.max(Pixels::ZERO)
|
||||
.min(content_height - list_height)
|
||||
.max(Pixels::ZERO);
|
||||
|
||||
@@ -1,67 +1,4 @@
|
||||
//! # Welcome to GPUI!
|
||||
//!
|
||||
//! GPUI is a hybrid immediate and retained mode, GPU accelerated, UI framework
|
||||
//! for Rust, designed to support a wide variety of applications.
|
||||
//!
|
||||
//! ## Getting Started
|
||||
//!
|
||||
//! GPUI is still in active development as we work on the Zed code editor and isn't yet on crates.io.
|
||||
//! You'll also need to use the latest version of stable rust. Add the following to your Cargo.toml:
|
||||
//!
|
||||
//! ```toml
|
||||
//! [dependencies]
|
||||
//! gpui = { git = "https://github.com/zed-industries/zed" }
|
||||
//! ```
|
||||
//!
|
||||
//! - [Ownership and data flow](_ownership_and_data_flow)
|
||||
//!
|
||||
//! Everything in GPUI starts with an [`Application`]. You can create one with [`Application::new`], and
|
||||
//! kick off your application by passing a callback to [`Application::run`]. Inside this callback,
|
||||
//! you can create a new window with [`App::open_window`], and register your first root
|
||||
//! view. See [gpui.rs](https://www.gpui.rs/) for a complete example.
|
||||
//!
|
||||
//! ## The Big Picture
|
||||
//!
|
||||
//! GPUI offers three different [registers](https://en.wikipedia.org/wiki/Register_(sociolinguistics)) depending on your needs:
|
||||
//!
|
||||
//! - State management and communication with [`Entity`]'s. Whenever you need to store application state
|
||||
//! that communicates between different parts of your application, you'll want to use GPUI's
|
||||
//! entities. Entities are owned by GPUI and are only accessible through an owned smart pointer
|
||||
//! similar to an [`std::rc::Rc`]. See [`app::Context`] for more information.
|
||||
//!
|
||||
//! - High level, declarative UI with views. All UI in GPUI starts with a view. A view is simply
|
||||
//! a [`Entity`] that can be rendered, by implementing the [`Render`] trait. At the start of each frame, GPUI
|
||||
//! will call this render method on the root view of a given window. Views build a tree of
|
||||
//! [`Element`]s, lay them out and style them with a tailwind-style API, and then give them to
|
||||
//! GPUI to turn into pixels. See the [`elements::Div`] element for an all purpose swiss-army
|
||||
//! knife for UI.
|
||||
//!
|
||||
//! - Low level, imperative UI with Elements. Elements are the building blocks of UI in GPUI, and they
|
||||
//! provide a nice wrapper around an imperative API that provides as much flexibility and control as
|
||||
//! you need. Elements have total control over how they and their child elements are rendered and
|
||||
//! can be used for making efficient views into large lists, implement custom layouting for a code editor,
|
||||
//! and anything else you can think of. See the [`elements`] module for more information.
|
||||
//!
|
||||
//! Each of these registers has one or more corresponding contexts that can be accessed from all GPUI services.
|
||||
//! This context is your main interface to GPUI, and is used extensively throughout the framework.
|
||||
//!
|
||||
//! ## Other Resources
|
||||
//!
|
||||
//! In addition to the systems above, GPUI provides a range of smaller services that are useful for building
|
||||
//! complex applications:
|
||||
//!
|
||||
//! - Actions are user-defined structs that are used for converting keystrokes into logical operations in your UI.
|
||||
//! Use this for implementing keyboard shortcuts, such as cmd-q (See `action` module for more information).
|
||||
//! - Platform services, such as `quit the app` or `open a URL` are available as methods on the [`app::App`].
|
||||
//! - An async executor that is integrated with the platform's event loop. See the [`executor`] module for more information.,
|
||||
//! - The [`gpui::test`](macro@test) macro provides a convenient way to write tests for your GPUI applications. Tests also have their
|
||||
//! own kind of context, a [`TestAppContext`] which provides ways of simulating common platform input. See [`TestAppContext`]
|
||||
//! and [`mod@test`] modules for more details.
|
||||
//!
|
||||
//! Currently, the best way to learn about these APIs is to read the Zed source code, ask us about it at a fireside hack, or drop
|
||||
//! a question in the [Zed Discord](https://zed.dev/community-links). We're working on improving the documentation, creating more examples,
|
||||
//! and will be publishing more guides to GPUI on our [blog](https://zed.dev/blog).
|
||||
|
||||
#![doc = include_str!("../README.md")]
|
||||
#![deny(missing_docs)]
|
||||
#![allow(clippy::type_complexity)] // Not useful, GPUI makes heavy use of callbacks
|
||||
#![allow(clippy::collapsible_else_if)] // False positives in platform specific code
|
||||
|
||||
@@ -1213,6 +1213,11 @@ impl WindowBounds {
|
||||
WindowBounds::Fullscreen(bounds) => *bounds,
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a new window bounds that centers the window on the screen.
|
||||
pub fn centered(size: Size<Pixels>, cx: &App) -> Self {
|
||||
WindowBounds::Windowed(Bounds::centered(None, size, cx))
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for WindowOptions {
|
||||
@@ -1263,6 +1268,9 @@ pub enum WindowKind {
|
||||
/// A window that appears above all other windows, usually used for alerts or popups
|
||||
/// use sparingly!
|
||||
PopUp,
|
||||
|
||||
/// A floating window that appears on top of its parent window
|
||||
Floating,
|
||||
}
|
||||
|
||||
/// The appearance of the window, as defined by the operating system.
|
||||
|
||||
@@ -172,6 +172,12 @@ fn distance_from_clip_rect(unit_vertex: vec2<f32>, bounds: Bounds, clip_bounds:
|
||||
return distance_from_clip_rect_impl(position, clip_bounds);
|
||||
}
|
||||
|
||||
fn distance_from_clip_rect_transformed(unit_vertex: vec2<f32>, bounds: Bounds, clip_bounds: Bounds, transform: TransformationMatrix) -> vec4<f32> {
|
||||
let position = unit_vertex * vec2<f32>(bounds.size) + bounds.origin;
|
||||
let transformed = transpose(transform.rotation_scale) * position + transform.translation;
|
||||
return distance_from_clip_rect_impl(transformed, clip_bounds);
|
||||
}
|
||||
|
||||
// https://gamedev.stackexchange.com/questions/92015/optimized-linear-to-srgb-glsl
|
||||
fn srgb_to_linear(srgb: vec3<f32>) -> vec3<f32> {
|
||||
let cutoff = srgb < vec3<f32>(0.04045);
|
||||
@@ -677,7 +683,24 @@ fn fs_quad(input: QuadVarying) -> @location(0) vec4<f32> {
|
||||
let is_horizontal =
|
||||
corner_center_to_point.x <
|
||||
corner_center_to_point.y;
|
||||
let border_width = select(border.y, border.x, is_horizontal);
|
||||
|
||||
// When applying dashed borders to just some, not all, the sides.
|
||||
// The way we chose border widths above sometimes comes with a 0 width value.
|
||||
// So we choose again to avoid division by zero.
|
||||
// TODO: A better solution exists taking a look at the whole file.
|
||||
// this does not fix single dashed borders at the corners
|
||||
let dashed_border = vec2<f32>(
|
||||
max(
|
||||
quad.border_widths.bottom,
|
||||
quad.border_widths.top,
|
||||
),
|
||||
max(
|
||||
quad.border_widths.right,
|
||||
quad.border_widths.left,
|
||||
)
|
||||
);
|
||||
|
||||
let border_width = select(dashed_border.y, dashed_border.x, is_horizontal);
|
||||
dash_velocity = dv_numerator / border_width;
|
||||
t = select(point.y, point.x, is_horizontal) * dash_velocity;
|
||||
max_t = select(size.y, size.x, is_horizontal) * dash_velocity;
|
||||
@@ -1150,7 +1173,7 @@ fn vs_mono_sprite(@builtin(vertex_index) vertex_id: u32, @builtin(instance_index
|
||||
|
||||
out.tile_position = to_tile_position(unit_vertex, sprite.tile);
|
||||
out.color = hsla_to_rgba(sprite.color);
|
||||
out.clip_distances = distance_from_clip_rect(unit_vertex, sprite.bounds, sprite.content_mask);
|
||||
out.clip_distances = distance_from_clip_rect_transformed(unit_vertex, sprite.bounds, sprite.content_mask, sprite.transformation);
|
||||
return out;
|
||||
}
|
||||
|
||||
|
||||
@@ -73,6 +73,13 @@ pub trait LinuxClient {
|
||||
fn active_window(&self) -> Option<AnyWindowHandle>;
|
||||
fn window_stack(&self) -> Option<Vec<AnyWindowHandle>>;
|
||||
fn run(&self);
|
||||
|
||||
#[cfg(any(feature = "wayland", feature = "x11"))]
|
||||
fn window_identifier(
|
||||
&self,
|
||||
) -> impl Future<Output = Option<ashpd::WindowIdentifier>> + Send + 'static {
|
||||
std::future::ready::<Option<ashpd::WindowIdentifier>>(None)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
@@ -290,6 +297,9 @@ impl<P: LinuxClient + 'static> Platform for P {
|
||||
#[cfg(not(any(feature = "wayland", feature = "x11")))]
|
||||
let _ = (done_tx.send(Ok(None)), options);
|
||||
|
||||
#[cfg(any(feature = "wayland", feature = "x11"))]
|
||||
let identifier = self.window_identifier();
|
||||
|
||||
#[cfg(any(feature = "wayland", feature = "x11"))]
|
||||
self.foreground_executor()
|
||||
.spawn(async move {
|
||||
@@ -300,6 +310,7 @@ impl<P: LinuxClient + 'static> Platform for P {
|
||||
};
|
||||
|
||||
let request = match ashpd::desktop::file_chooser::OpenFileRequest::default()
|
||||
.identifier(identifier.await)
|
||||
.modal(true)
|
||||
.title(title)
|
||||
.accept_label(options.prompt.as_ref().map(crate::SharedString::as_str))
|
||||
@@ -346,6 +357,9 @@ impl<P: LinuxClient + 'static> Platform for P {
|
||||
#[cfg(not(any(feature = "wayland", feature = "x11")))]
|
||||
let _ = (done_tx.send(Ok(None)), directory, suggested_name);
|
||||
|
||||
#[cfg(any(feature = "wayland", feature = "x11"))]
|
||||
let identifier = self.window_identifier();
|
||||
|
||||
#[cfg(any(feature = "wayland", feature = "x11"))]
|
||||
self.foreground_executor()
|
||||
.spawn({
|
||||
@@ -355,6 +369,7 @@ impl<P: LinuxClient + 'static> Platform for P {
|
||||
async move {
|
||||
let mut request_builder =
|
||||
ashpd::desktop::file_chooser::SaveFileRequest::default()
|
||||
.identifier(identifier.await)
|
||||
.modal(true)
|
||||
.title("Save File")
|
||||
.current_folder(directory)
|
||||
|
||||
@@ -7,6 +7,7 @@ use std::{
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
|
||||
use ashpd::WindowIdentifier;
|
||||
use calloop::{
|
||||
EventLoop, LoopHandle,
|
||||
timer::{TimeoutAction, Timer},
|
||||
@@ -694,6 +695,8 @@ impl LinuxClient for WaylandClient {
|
||||
) -> anyhow::Result<Box<dyn PlatformWindow>> {
|
||||
let mut state = self.0.borrow_mut();
|
||||
|
||||
let parent = state.keyboard_focused_window.as_ref().map(|w| w.toplevel());
|
||||
|
||||
let (window, surface_id) = WaylandWindow::new(
|
||||
handle,
|
||||
state.globals.clone(),
|
||||
@@ -701,6 +704,7 @@ impl LinuxClient for WaylandClient {
|
||||
WaylandClientStatePtr(Rc::downgrade(&self.0)),
|
||||
params,
|
||||
state.common.appearance,
|
||||
parent,
|
||||
)?;
|
||||
state.windows.insert(surface_id, window.0.clone());
|
||||
|
||||
@@ -858,6 +862,20 @@ impl LinuxClient for WaylandClient {
|
||||
fn compositor_name(&self) -> &'static str {
|
||||
"Wayland"
|
||||
}
|
||||
|
||||
fn window_identifier(&self) -> impl Future<Output = Option<WindowIdentifier>> + Send + 'static {
|
||||
async fn inner(surface: Option<wl_surface::WlSurface>) -> Option<WindowIdentifier> {
|
||||
if let Some(surface) = surface {
|
||||
ashpd::WindowIdentifier::from_wayland(&surface).await
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
let client_state = self.0.borrow();
|
||||
let active_window = client_state.keyboard_focused_window.as_ref();
|
||||
inner(active_window.map(|aw| aw.surface()))
|
||||
}
|
||||
}
|
||||
|
||||
impl Dispatch<wl_registry::WlRegistry, GlobalListContents> for WaylandClientStatePtr {
|
||||
|
||||
@@ -14,14 +14,16 @@ use raw_window_handle as rwh;
|
||||
use wayland_backend::client::ObjectId;
|
||||
use wayland_client::WEnum;
|
||||
use wayland_client::{Proxy, protocol::wl_surface};
|
||||
use wayland_protocols::wp::fractional_scale::v1::client::wp_fractional_scale_v1;
|
||||
use wayland_protocols::wp::viewporter::client::wp_viewport;
|
||||
use wayland_protocols::xdg::decoration::zv1::client::zxdg_toplevel_decoration_v1;
|
||||
use wayland_protocols::xdg::shell::client::xdg_surface;
|
||||
use wayland_protocols::xdg::shell::client::xdg_toplevel::{self};
|
||||
use wayland_protocols::{
|
||||
wp::fractional_scale::v1::client::wp_fractional_scale_v1,
|
||||
xdg::shell::client::xdg_toplevel::XdgToplevel,
|
||||
};
|
||||
use wayland_protocols_plasma::blur::client::org_kde_kwin_blur;
|
||||
|
||||
use crate::scene::Scene;
|
||||
use crate::{
|
||||
AnyWindowHandle, Bounds, Decorations, Globals, GpuSpecs, Modifiers, Output, Pixels,
|
||||
PlatformDisplay, PlatformInput, Point, PromptButton, PromptLevel, RequestFrameOptions,
|
||||
@@ -36,6 +38,7 @@ use crate::{
|
||||
linux::wayland::{display::WaylandDisplay, serial::SerialKind},
|
||||
},
|
||||
};
|
||||
use crate::{WindowKind, scene::Scene};
|
||||
|
||||
#[derive(Default)]
|
||||
pub(crate) struct Callbacks {
|
||||
@@ -276,6 +279,7 @@ impl WaylandWindow {
|
||||
client: WaylandClientStatePtr,
|
||||
params: WindowParams,
|
||||
appearance: WindowAppearance,
|
||||
parent: Option<XdgToplevel>,
|
||||
) -> anyhow::Result<(Self, ObjectId)> {
|
||||
let surface = globals.compositor.create_surface(&globals.qh, ());
|
||||
let xdg_surface = globals
|
||||
@@ -283,6 +287,10 @@ impl WaylandWindow {
|
||||
.get_xdg_surface(&surface, &globals.qh, surface.id());
|
||||
let toplevel = xdg_surface.get_toplevel(&globals.qh, surface.id());
|
||||
|
||||
if params.kind == WindowKind::Floating {
|
||||
toplevel.set_parent(parent.as_ref());
|
||||
}
|
||||
|
||||
if let Some(size) = params.window_min_size {
|
||||
toplevel.set_min_size(size.width.0 as i32, size.height.0 as i32);
|
||||
}
|
||||
@@ -337,6 +345,10 @@ impl WaylandWindowStatePtr {
|
||||
self.state.borrow().surface.clone()
|
||||
}
|
||||
|
||||
pub fn toplevel(&self) -> xdg_toplevel::XdgToplevel {
|
||||
self.state.borrow().toplevel.clone()
|
||||
}
|
||||
|
||||
pub fn ptr_eq(&self, other: &Self) -> bool {
|
||||
Rc::ptr_eq(&self.state, &other.state)
|
||||
}
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use crate::{Capslock, xcb_flush};
|
||||
use anyhow::{Context as _, anyhow};
|
||||
use ashpd::WindowIdentifier;
|
||||
use calloop::{
|
||||
EventLoop, LoopHandle, RegistrationToken,
|
||||
generic::{FdWrapper, Generic},
|
||||
@@ -1447,6 +1448,10 @@ impl LinuxClient for X11Client {
|
||||
params: WindowParams,
|
||||
) -> anyhow::Result<Box<dyn PlatformWindow>> {
|
||||
let mut state = self.0.borrow_mut();
|
||||
let parent_window = state
|
||||
.keyboard_focused_window
|
||||
.and_then(|focused_window| state.windows.get(&focused_window))
|
||||
.map(|window| window.window.x_window);
|
||||
let x_window = state
|
||||
.xcb_connection
|
||||
.generate_id()
|
||||
@@ -1465,6 +1470,7 @@ impl LinuxClient for X11Client {
|
||||
&state.atoms,
|
||||
state.scale_factor,
|
||||
state.common.appearance,
|
||||
parent_window,
|
||||
)?;
|
||||
check_reply(
|
||||
|| "Failed to set XdndAware property",
|
||||
@@ -1652,6 +1658,16 @@ impl LinuxClient for X11Client {
|
||||
|
||||
Some(handles)
|
||||
}
|
||||
|
||||
fn window_identifier(&self) -> impl Future<Output = Option<WindowIdentifier>> + Send + 'static {
|
||||
let state = self.0.borrow();
|
||||
state
|
||||
.keyboard_focused_window
|
||||
.and_then(|focused_window| state.windows.get(&focused_window))
|
||||
.map(|window| window.window.x_window as u64)
|
||||
.map(|x_window| std::future::ready(Some(WindowIdentifier::from_xid(x_window))))
|
||||
.unwrap_or(std::future::ready(None))
|
||||
}
|
||||
}
|
||||
|
||||
impl X11ClientState {
|
||||
|
||||
@@ -57,6 +57,7 @@ x11rb::atom_manager! {
|
||||
WM_PROTOCOLS,
|
||||
WM_DELETE_WINDOW,
|
||||
WM_CHANGE_STATE,
|
||||
WM_TRANSIENT_FOR,
|
||||
_NET_WM_PID,
|
||||
_NET_WM_NAME,
|
||||
_NET_WM_STATE,
|
||||
@@ -72,6 +73,7 @@ x11rb::atom_manager! {
|
||||
_NET_WM_MOVERESIZE,
|
||||
_NET_WM_WINDOW_TYPE,
|
||||
_NET_WM_WINDOW_TYPE_NOTIFICATION,
|
||||
_NET_WM_WINDOW_TYPE_DIALOG,
|
||||
_NET_WM_SYNC,
|
||||
_NET_SUPPORTED,
|
||||
_MOTIF_WM_HINTS,
|
||||
@@ -284,7 +286,7 @@ pub(crate) struct X11WindowStatePtr {
|
||||
pub state: Rc<RefCell<X11WindowState>>,
|
||||
pub(crate) callbacks: Rc<RefCell<Callbacks>>,
|
||||
xcb: Rc<XCBConnection>,
|
||||
x_window: xproto::Window,
|
||||
pub(crate) x_window: xproto::Window,
|
||||
}
|
||||
|
||||
impl rwh::HasWindowHandle for RawWindow {
|
||||
@@ -392,6 +394,7 @@ impl X11WindowState {
|
||||
atoms: &XcbAtoms,
|
||||
scale_factor: f32,
|
||||
appearance: WindowAppearance,
|
||||
parent_window: Option<xproto::Window>,
|
||||
) -> anyhow::Result<Self> {
|
||||
let x_screen_index = params
|
||||
.display_id
|
||||
@@ -529,6 +532,7 @@ impl X11WindowState {
|
||||
),
|
||||
)?;
|
||||
}
|
||||
|
||||
if params.kind == WindowKind::PopUp {
|
||||
check_reply(
|
||||
|| "X11 ChangeProperty32 setting window type for pop-up failed.",
|
||||
@@ -542,6 +546,38 @@ impl X11WindowState {
|
||||
)?;
|
||||
}
|
||||
|
||||
if params.kind == WindowKind::Floating {
|
||||
if let Some(parent_window) = parent_window {
|
||||
// WM_TRANSIENT_FOR hint indicating the main application window. For floating windows, we set
|
||||
// a parent window (WM_TRANSIENT_FOR) such that the window manager knows where to
|
||||
// place the floating window in relation to the main window.
|
||||
// https://specifications.freedesktop.org/wm-spec/1.4/ar01s05.html
|
||||
check_reply(
|
||||
|| "X11 ChangeProperty32 setting WM_TRANSIENT_FOR for floating window failed.",
|
||||
xcb.change_property32(
|
||||
xproto::PropMode::REPLACE,
|
||||
x_window,
|
||||
atoms.WM_TRANSIENT_FOR,
|
||||
xproto::AtomEnum::WINDOW,
|
||||
&[parent_window],
|
||||
),
|
||||
)?;
|
||||
}
|
||||
|
||||
// _NET_WM_WINDOW_TYPE_DIALOG indicates that this is a dialog (floating) window
|
||||
// https://specifications.freedesktop.org/wm-spec/1.4/ar01s05.html
|
||||
check_reply(
|
||||
|| "X11 ChangeProperty32 setting window type for floating window failed.",
|
||||
xcb.change_property32(
|
||||
xproto::PropMode::REPLACE,
|
||||
x_window,
|
||||
atoms._NET_WM_WINDOW_TYPE,
|
||||
xproto::AtomEnum::ATOM,
|
||||
&[atoms._NET_WM_WINDOW_TYPE_DIALOG],
|
||||
),
|
||||
)?;
|
||||
}
|
||||
|
||||
check_reply(
|
||||
|| "X11 ChangeProperty32 setting protocols failed.",
|
||||
xcb.change_property32(
|
||||
@@ -737,6 +773,7 @@ impl X11Window {
|
||||
atoms: &XcbAtoms,
|
||||
scale_factor: f32,
|
||||
appearance: WindowAppearance,
|
||||
parent_window: Option<xproto::Window>,
|
||||
) -> anyhow::Result<Self> {
|
||||
let ptr = X11WindowStatePtr {
|
||||
state: Rc::new(RefCell::new(X11WindowState::new(
|
||||
@@ -752,6 +789,7 @@ impl X11Window {
|
||||
atoms,
|
||||
scale_factor,
|
||||
appearance,
|
||||
parent_window,
|
||||
)?)),
|
||||
callbacks: Rc::new(RefCell::new(Callbacks::default())),
|
||||
xcb: xcb.clone(),
|
||||
|
||||
@@ -18,6 +18,8 @@ float2 to_tile_position(float2 unit_vertex, AtlasTile tile,
|
||||
constant Size_DevicePixels *atlas_size);
|
||||
float4 distance_from_clip_rect(float2 unit_vertex, Bounds_ScaledPixels bounds,
|
||||
Bounds_ScaledPixels clip_bounds);
|
||||
float4 distance_from_clip_rect_transformed(float2 unit_vertex, Bounds_ScaledPixels bounds,
|
||||
Bounds_ScaledPixels clip_bounds, TransformationMatrix transformation);
|
||||
float corner_dash_velocity(float dv1, float dv2);
|
||||
float dash_alpha(float t, float period, float length, float dash_velocity,
|
||||
float antialias_threshold);
|
||||
@@ -243,7 +245,15 @@ fragment float4 quad_fragment(QuadFragmentInput input [[stage_in]],
|
||||
// out on each straight line, rather than around the whole
|
||||
// perimeter. This way each line starts and ends with a dash.
|
||||
bool is_horizontal = corner_center_to_point.x < corner_center_to_point.y;
|
||||
float border_width = is_horizontal ? border.x : border.y;
|
||||
|
||||
// Choosing the right border width for dashed borders.
|
||||
// TODO: A better solution exists taking a look at the whole file.
|
||||
// this does not fix single dashed borders at the corners
|
||||
float2 dashed_border = float2(
|
||||
fmax(quad.border_widths.bottom, quad.border_widths.top),
|
||||
fmax(quad.border_widths.right, quad.border_widths.left));
|
||||
|
||||
float border_width = is_horizontal ? dashed_border.x : dashed_border.y;
|
||||
dash_velocity = dv_numerator / border_width;
|
||||
t = is_horizontal ? point.x : point.y;
|
||||
t *= dash_velocity;
|
||||
@@ -599,13 +609,14 @@ struct MonochromeSpriteVertexOutput {
|
||||
float4 position [[position]];
|
||||
float2 tile_position;
|
||||
float4 color [[flat]];
|
||||
float clip_distance [[clip_distance]][4];
|
||||
float4 clip_distance;
|
||||
};
|
||||
|
||||
struct MonochromeSpriteFragmentInput {
|
||||
float4 position [[position]];
|
||||
float2 tile_position;
|
||||
float4 color [[flat]];
|
||||
float4 clip_distance;
|
||||
};
|
||||
|
||||
vertex MonochromeSpriteVertexOutput monochrome_sprite_vertex(
|
||||
@@ -620,8 +631,8 @@ vertex MonochromeSpriteVertexOutput monochrome_sprite_vertex(
|
||||
MonochromeSprite sprite = sprites[sprite_id];
|
||||
float4 device_position =
|
||||
to_device_position_transformed(unit_vertex, sprite.bounds, sprite.transformation, viewport_size);
|
||||
float4 clip_distance = distance_from_clip_rect(unit_vertex, sprite.bounds,
|
||||
sprite.content_mask.bounds);
|
||||
float4 clip_distance = distance_from_clip_rect_transformed(unit_vertex, sprite.bounds,
|
||||
sprite.content_mask.bounds, sprite.transformation);
|
||||
float2 tile_position = to_tile_position(unit_vertex, sprite.tile, atlas_size);
|
||||
float4 color = hsla_to_rgba(sprite.color);
|
||||
return MonochromeSpriteVertexOutput{
|
||||
@@ -635,6 +646,10 @@ fragment float4 monochrome_sprite_fragment(
|
||||
MonochromeSpriteFragmentInput input [[stage_in]],
|
||||
constant MonochromeSprite *sprites [[buffer(SpriteInputIndex_Sprites)]],
|
||||
texture2d<float> atlas_texture [[texture(SpriteInputIndex_AtlasTexture)]]) {
|
||||
if (any(input.clip_distance < float4(0.0))) {
|
||||
return float4(0.0);
|
||||
}
|
||||
|
||||
constexpr sampler atlas_texture_sampler(mag_filter::linear,
|
||||
min_filter::linear);
|
||||
float4 sample =
|
||||
@@ -1096,6 +1111,23 @@ float4 distance_from_clip_rect(float2 unit_vertex, Bounds_ScaledPixels bounds,
|
||||
clip_bounds.origin.y + clip_bounds.size.height - position.y);
|
||||
}
|
||||
|
||||
float4 distance_from_clip_rect_transformed(float2 unit_vertex, Bounds_ScaledPixels bounds,
|
||||
Bounds_ScaledPixels clip_bounds, TransformationMatrix transformation) {
|
||||
float2 position =
|
||||
unit_vertex * float2(bounds.size.width, bounds.size.height) +
|
||||
float2(bounds.origin.x, bounds.origin.y);
|
||||
float2 transformed_position = float2(0, 0);
|
||||
transformed_position[0] = position[0] * transformation.rotation_scale[0][0] + position[1] * transformation.rotation_scale[0][1];
|
||||
transformed_position[1] = position[0] * transformation.rotation_scale[1][0] + position[1] * transformation.rotation_scale[1][1];
|
||||
transformed_position[0] += transformation.translation[0];
|
||||
transformed_position[1] += transformation.translation[1];
|
||||
|
||||
return float4(transformed_position.x - clip_bounds.origin.x,
|
||||
clip_bounds.origin.x + clip_bounds.size.width - transformed_position.x,
|
||||
transformed_position.y - clip_bounds.origin.y,
|
||||
clip_bounds.origin.y + clip_bounds.size.height - transformed_position.y);
|
||||
}
|
||||
|
||||
float4 over(float4 below, float4 above) {
|
||||
float4 result;
|
||||
float alpha = above.a + below.a * (1.0 - above.a);
|
||||
|
||||
@@ -430,6 +430,8 @@ impl MacTextSystemState {
|
||||
fn layout_line(&mut self, text: &str, font_size: Pixels, font_runs: &[FontRun]) -> LineLayout {
|
||||
// Construct the attributed string, converting UTF8 ranges to UTF16 ranges.
|
||||
let mut string = CFMutableAttributedString::new();
|
||||
let mut max_ascent = 0.0f32;
|
||||
let mut max_descent = 0.0f32;
|
||||
{
|
||||
string.replace_str(&CFString::new(text), CFRange::init(0, 0));
|
||||
let utf16_line_len = string.char_len() as usize;
|
||||
@@ -451,6 +453,11 @@ impl MacTextSystemState {
|
||||
|
||||
let font: &FontKitFont = &self.fonts[run.font_id.0];
|
||||
|
||||
let font_metrics = font.metrics();
|
||||
let font_scale = font_size.0 / font_metrics.units_per_em as f32;
|
||||
max_ascent = max_ascent.max(font_metrics.ascent * font_scale);
|
||||
max_descent = max_descent.max(-font_metrics.descent * font_scale);
|
||||
|
||||
unsafe {
|
||||
string.set_attribute(
|
||||
cf_range,
|
||||
@@ -508,8 +515,8 @@ impl MacTextSystemState {
|
||||
runs,
|
||||
font_size,
|
||||
width: typographic_bounds.width.into(),
|
||||
ascent: typographic_bounds.ascent.into(),
|
||||
descent: typographic_bounds.descent.into(),
|
||||
ascent: max_ascent.into(),
|
||||
descent: max_descent.into(),
|
||||
len: text.len(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -618,7 +618,7 @@ impl MacWindow {
|
||||
}
|
||||
|
||||
let native_window: id = match kind {
|
||||
WindowKind::Normal => msg_send![WINDOW_CLASS, alloc],
|
||||
WindowKind::Normal | WindowKind::Floating => msg_send![WINDOW_CLASS, alloc],
|
||||
WindowKind::PopUp => {
|
||||
style_mask |= NSWindowStyleMaskNonactivatingPanel;
|
||||
msg_send![PANEL_CLASS, alloc]
|
||||
@@ -776,7 +776,7 @@ impl MacWindow {
|
||||
native_window.makeFirstResponder_(native_view);
|
||||
|
||||
match kind {
|
||||
WindowKind::Normal => {
|
||||
WindowKind::Normal | WindowKind::Floating => {
|
||||
native_window.setLevel_(NSNormalWindowLevel);
|
||||
native_window.setAcceptsMouseMovedEvents_(YES);
|
||||
|
||||
|
||||
@@ -3,7 +3,6 @@ use std::sync::LazyLock;
|
||||
use anyhow::Result;
|
||||
use collections::{FxHashMap, FxHashSet};
|
||||
use itertools::Itertools;
|
||||
use util::ResultExt;
|
||||
use windows::Win32::{
|
||||
Foundation::{HANDLE, HGLOBAL},
|
||||
System::{
|
||||
@@ -76,14 +75,18 @@ enum ClipboardFormatType {
|
||||
}
|
||||
|
||||
pub(crate) fn write_to_clipboard(item: ClipboardItem) {
|
||||
write_to_clipboard_inner(item).log_err();
|
||||
unsafe { CloseClipboard().log_err() };
|
||||
with_clipboard(|| write_to_clipboard_inner(item));
|
||||
}
|
||||
|
||||
pub(crate) fn read_from_clipboard() -> Option<ClipboardItem> {
|
||||
let result = read_from_clipboard_inner();
|
||||
unsafe { CloseClipboard().log_err() };
|
||||
result
|
||||
with_clipboard(|| {
|
||||
with_best_match_format(|item_format| match format_to_type(item_format) {
|
||||
ClipboardFormatType::Text => read_string_from_clipboard(),
|
||||
ClipboardFormatType::Image => read_image_from_clipboard(item_format),
|
||||
ClipboardFormatType::Files => read_files_from_clipboard(),
|
||||
})
|
||||
})
|
||||
.flatten()
|
||||
}
|
||||
|
||||
pub(crate) fn with_file_names<F>(hdrop: HDROP, mut f: F)
|
||||
@@ -96,11 +99,33 @@ where
|
||||
let mut buffer = vec![0u16; filename_length + 1];
|
||||
let ret = unsafe { DragQueryFileW(hdrop, file_index, Some(buffer.as_mut_slice())) };
|
||||
if ret == 0 {
|
||||
log::error!("unable to read file name");
|
||||
log::error!("unable to read file name of dragged file");
|
||||
continue;
|
||||
}
|
||||
if let Some(file_name) = String::from_utf16(&buffer[0..filename_length]).log_err() {
|
||||
f(file_name);
|
||||
match String::from_utf16(&buffer[0..filename_length]) {
|
||||
Ok(file_name) => f(file_name),
|
||||
Err(e) => {
|
||||
log::error!("dragged file name is not UTF-16: {}", e)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn with_clipboard<F, T>(f: F) -> Option<T>
|
||||
where
|
||||
F: FnOnce() -> T,
|
||||
{
|
||||
match unsafe { OpenClipboard(None) } {
|
||||
Ok(()) => {
|
||||
let result = f();
|
||||
if let Err(e) = unsafe { CloseClipboard() } {
|
||||
log::error!("Failed to close clipboard: {e}",);
|
||||
}
|
||||
Some(result)
|
||||
}
|
||||
Err(e) => {
|
||||
log::error!("Failed to open clipboard: {e}",);
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -124,7 +149,6 @@ fn format_to_type(item_format: u32) -> &'static ClipboardFormatType {
|
||||
// Currently, we only write the first item.
|
||||
fn write_to_clipboard_inner(item: ClipboardItem) -> Result<()> {
|
||||
unsafe {
|
||||
OpenClipboard(None)?;
|
||||
EmptyClipboard()?;
|
||||
}
|
||||
match item.entries().first() {
|
||||
@@ -215,15 +239,6 @@ fn convert_image_to_png_format(bytes: &[u8], image_format: ImageFormat) -> Resul
|
||||
Ok(output_buf)
|
||||
}
|
||||
|
||||
fn read_from_clipboard_inner() -> Option<ClipboardItem> {
|
||||
unsafe { OpenClipboard(None) }.log_err()?;
|
||||
with_best_match_format(|item_format| match format_to_type(item_format) {
|
||||
ClipboardFormatType::Text => read_string_from_clipboard(),
|
||||
ClipboardFormatType::Image => read_image_from_clipboard(item_format),
|
||||
ClipboardFormatType::Files => read_files_from_clipboard(),
|
||||
})
|
||||
}
|
||||
|
||||
// Here, we enumerate all formats on the clipboard and find the first one that we can process.
|
||||
// The reason we don't use `GetPriorityClipboardFormat` is that it sometimes returns the
|
||||
// wrong format.
|
||||
@@ -266,7 +281,7 @@ where
|
||||
}
|
||||
|
||||
fn read_string_from_clipboard() -> Option<ClipboardEntry> {
|
||||
let text = with_clipboard_data(CF_UNICODETEXT.0 as u32, |data_ptr| {
|
||||
let text = with_clipboard_data(CF_UNICODETEXT.0 as u32, |data_ptr, _| {
|
||||
let pcwstr = PCWSTR(data_ptr as *const u16);
|
||||
String::from_utf16_lossy(unsafe { pcwstr.as_wide() })
|
||||
})?;
|
||||
@@ -290,20 +305,22 @@ fn read_hash_from_clipboard() -> Option<u64> {
|
||||
if unsafe { IsClipboardFormatAvailable(*CLIPBOARD_HASH_FORMAT).is_err() } {
|
||||
return None;
|
||||
}
|
||||
with_clipboard_data(*CLIPBOARD_HASH_FORMAT, |data_ptr| {
|
||||
with_clipboard_data(*CLIPBOARD_HASH_FORMAT, |data_ptr, size| {
|
||||
if size < 8 {
|
||||
return None;
|
||||
}
|
||||
let hash_bytes: [u8; 8] = unsafe {
|
||||
std::slice::from_raw_parts(data_ptr.cast::<u8>(), 8)
|
||||
.to_vec()
|
||||
.try_into()
|
||||
.log_err()
|
||||
.ok()
|
||||
}?;
|
||||
Some(u64::from_ne_bytes(hash_bytes))
|
||||
})?
|
||||
}
|
||||
|
||||
fn read_metadata_from_clipboard() -> Option<String> {
|
||||
unsafe { IsClipboardFormatAvailable(*CLIPBOARD_METADATA_FORMAT).log_err()? };
|
||||
with_clipboard_data(*CLIPBOARD_METADATA_FORMAT, |data_ptr| {
|
||||
unsafe { IsClipboardFormatAvailable(*CLIPBOARD_METADATA_FORMAT).ok()? };
|
||||
with_clipboard_data(*CLIPBOARD_METADATA_FORMAT, |data_ptr, _size| {
|
||||
let pcwstr = PCWSTR(data_ptr as *const u16);
|
||||
String::from_utf16_lossy(unsafe { pcwstr.as_wide() })
|
||||
})
|
||||
@@ -320,7 +337,7 @@ fn format_number_to_image_format(format_number: u32) -> Option<&'static ImageFor
|
||||
}
|
||||
|
||||
fn read_image_for_type(format_number: u32, format: ImageFormat) -> Option<ClipboardEntry> {
|
||||
let (bytes, id) = with_clipboard_data_and_size(format_number, |data_ptr, size| {
|
||||
let (bytes, id) = with_clipboard_data(format_number, |data_ptr, size| {
|
||||
let bytes = unsafe { std::slice::from_raw_parts(data_ptr as *mut u8 as _, size).to_vec() };
|
||||
let id = hash(&bytes);
|
||||
(bytes, id)
|
||||
@@ -329,7 +346,7 @@ fn read_image_for_type(format_number: u32, format: ImageFormat) -> Option<Clipbo
|
||||
}
|
||||
|
||||
fn read_files_from_clipboard() -> Option<ClipboardEntry> {
|
||||
let text = with_clipboard_data(CF_HDROP.0 as u32, |data_ptr| {
|
||||
let text = with_clipboard_data(CF_HDROP.0 as u32, |data_ptr, _size| {
|
||||
let hdrop = HDROP(data_ptr);
|
||||
let mut filenames = String::new();
|
||||
with_file_names(hdrop, |file_name| {
|
||||
@@ -344,25 +361,14 @@ fn read_files_from_clipboard() -> Option<ClipboardEntry> {
|
||||
}
|
||||
|
||||
fn with_clipboard_data<F, R>(format: u32, f: F) -> Option<R>
|
||||
where
|
||||
F: FnOnce(*mut std::ffi::c_void) -> R,
|
||||
{
|
||||
let global = HGLOBAL(unsafe { GetClipboardData(format).log_err() }?.0);
|
||||
let data_ptr = unsafe { GlobalLock(global) };
|
||||
let result = f(data_ptr);
|
||||
unsafe { GlobalUnlock(global).log_err() };
|
||||
Some(result)
|
||||
}
|
||||
|
||||
fn with_clipboard_data_and_size<F, R>(format: u32, f: F) -> Option<R>
|
||||
where
|
||||
F: FnOnce(*mut std::ffi::c_void, usize) -> R,
|
||||
{
|
||||
let global = HGLOBAL(unsafe { GetClipboardData(format).log_err() }?.0);
|
||||
let global = HGLOBAL(unsafe { GetClipboardData(format).ok() }?.0);
|
||||
let size = unsafe { GlobalSize(global) };
|
||||
let data_ptr = unsafe { GlobalLock(global) };
|
||||
let result = f(data_ptr, size);
|
||||
unsafe { GlobalUnlock(global).log_err() };
|
||||
unsafe { GlobalUnlock(global).ok() };
|
||||
Some(result)
|
||||
}
|
||||
|
||||
|
||||
@@ -14,10 +14,11 @@ use windows::Win32::{
|
||||
},
|
||||
Dxgi::{
|
||||
CreateDXGIFactory2, DXGI_CREATE_FACTORY_DEBUG, DXGI_CREATE_FACTORY_FLAGS,
|
||||
DXGI_GPU_PREFERENCE_MINIMUM_POWER, IDXGIAdapter1, IDXGIFactory6,
|
||||
IDXGIAdapter1, IDXGIFactory6,
|
||||
},
|
||||
},
|
||||
};
|
||||
use windows::core::Interface;
|
||||
|
||||
pub(crate) fn try_to_recover_from_device_lost<T>(
|
||||
mut f: impl FnMut() -> Result<T>,
|
||||
@@ -121,10 +122,7 @@ fn get_dxgi_factory(debug_layer_available: bool) -> Result<IDXGIFactory6> {
|
||||
#[inline]
|
||||
fn get_adapter(dxgi_factory: &IDXGIFactory6, debug_layer_available: bool) -> Result<IDXGIAdapter1> {
|
||||
for adapter_index in 0.. {
|
||||
let adapter: IDXGIAdapter1 = unsafe {
|
||||
dxgi_factory
|
||||
.EnumAdapterByGpuPreference(adapter_index, DXGI_GPU_PREFERENCE_MINIMUM_POWER)
|
||||
}?;
|
||||
let adapter: IDXGIAdapter1 = unsafe { dxgi_factory.EnumAdapters(adapter_index)?.cast()? };
|
||||
if let Ok(desc) = unsafe { adapter.GetDesc1() } {
|
||||
let gpu_name = String::from_utf16_lossy(&desc.Description)
|
||||
.trim_matches(char::from(0))
|
||||
|
||||
@@ -1449,9 +1449,11 @@ fn is_virtual_key_pressed(vkey: VIRTUAL_KEY) -> bool {
|
||||
|
||||
#[inline]
|
||||
pub(crate) fn current_modifiers() -> Modifiers {
|
||||
let altgr = is_virtual_key_pressed(VK_RMENU) && is_virtual_key_pressed(VK_LCONTROL);
|
||||
|
||||
Modifiers {
|
||||
control: is_virtual_key_pressed(VK_CONTROL),
|
||||
alt: is_virtual_key_pressed(VK_MENU),
|
||||
control: is_virtual_key_pressed(VK_CONTROL) && !altgr,
|
||||
alt: is_virtual_key_pressed(VK_MENU) && !altgr,
|
||||
shift: is_virtual_key_pressed(VK_SHIFT),
|
||||
platform: is_virtual_key_pressed(VK_LWIN) || is_virtual_key_pressed(VK_RWIN),
|
||||
function: false,
|
||||
|
||||
@@ -107,6 +107,12 @@ float4 distance_from_clip_rect(float2 unit_vertex, Bounds bounds, Bounds clip_bo
|
||||
return distance_from_clip_rect_impl(position, clip_bounds);
|
||||
}
|
||||
|
||||
float4 distance_from_clip_rect_transformed(float2 unit_vertex, Bounds bounds, Bounds clip_bounds, TransformationMatrix transformation) {
|
||||
float2 position = unit_vertex * bounds.size + bounds.origin;
|
||||
float2 transformed = mul(position, transformation.rotation_scale) + transformation.translation;
|
||||
return distance_from_clip_rect_impl(transformed, clip_bounds);
|
||||
}
|
||||
|
||||
// Convert linear RGB to sRGB
|
||||
float3 linear_to_srgb(float3 color) {
|
||||
return pow(color, float3(2.2, 2.2, 2.2));
|
||||
@@ -654,7 +660,14 @@ float4 quad_fragment(QuadFragmentInput input): SV_Target {
|
||||
// out on each straight line, rather than around the whole
|
||||
// perimeter. This way each line starts and ends with a dash.
|
||||
bool is_horizontal = corner_center_to_point.x < corner_center_to_point.y;
|
||||
float border_width = is_horizontal ? border.x : border.y;
|
||||
// Choosing the right border width for dashed borders.
|
||||
// TODO: A better solution exists taking a look at the whole file.
|
||||
// this does not fix single dashed borders at the corners
|
||||
float2 dashed_border = float2(
|
||||
max(quad.border_widths.bottom, quad.border_widths.top),
|
||||
max(quad.border_widths.right, quad.border_widths.left)
|
||||
);
|
||||
float border_width = is_horizontal ? dashed_border.x : dashed_border.y;
|
||||
dash_velocity = dv_numerator / border_width;
|
||||
t = is_horizontal ? the_point.x : the_point.y;
|
||||
t *= dash_velocity;
|
||||
@@ -1088,7 +1101,7 @@ MonochromeSpriteVertexOutput monochrome_sprite_vertex(uint vertex_id: SV_VertexI
|
||||
MonochromeSprite sprite = mono_sprites[sprite_id];
|
||||
float4 device_position =
|
||||
to_device_position_transformed(unit_vertex, sprite.bounds, sprite.transformation);
|
||||
float4 clip_distance = distance_from_clip_rect(unit_vertex, sprite.bounds, sprite.content_mask);
|
||||
float4 clip_distance = distance_from_clip_rect_transformed(unit_vertex, sprite.bounds, sprite.content_mask, sprite.transformation);
|
||||
float2 tile_position = to_tile_position(unit_vertex, sprite.tile);
|
||||
float4 color = hsla_to_rgba(sprite.color);
|
||||
|
||||
|
||||
@@ -54,7 +54,10 @@ impl SvgRenderer {
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn render(&self, params: &RenderSvgParams) -> Result<Option<Vec<u8>>> {
|
||||
pub(crate) fn render(
|
||||
&self,
|
||||
params: &RenderSvgParams,
|
||||
) -> Result<Option<(Size<DevicePixels>, Vec<u8>)>> {
|
||||
anyhow::ensure!(!params.size.is_zero(), "can't render at a zero size");
|
||||
|
||||
// Load the tree.
|
||||
@@ -65,30 +68,33 @@ impl SvgRenderer {
|
||||
let pixmap = self.render_pixmap(&bytes, SvgSize::Size(params.size))?;
|
||||
|
||||
// Convert the pixmap's pixels into an alpha mask.
|
||||
let size = Size::new(
|
||||
DevicePixels(pixmap.width() as i32),
|
||||
DevicePixels(pixmap.height() as i32),
|
||||
);
|
||||
let alpha_mask = pixmap
|
||||
.pixels()
|
||||
.iter()
|
||||
.map(|p| p.alpha())
|
||||
.collect::<Vec<_>>();
|
||||
Ok(Some(alpha_mask))
|
||||
Ok(Some((size, alpha_mask)))
|
||||
}
|
||||
|
||||
pub fn render_pixmap(&self, bytes: &[u8], size: SvgSize) -> Result<Pixmap, usvg::Error> {
|
||||
let tree = usvg::Tree::from_data(bytes, &self.usvg_options)?;
|
||||
|
||||
let size = match size {
|
||||
SvgSize::Size(size) => size,
|
||||
SvgSize::ScaleFactor(scale) => crate::size(
|
||||
DevicePixels((tree.size().width() * scale) as i32),
|
||||
DevicePixels((tree.size().height() * scale) as i32),
|
||||
),
|
||||
let svg_size = tree.size();
|
||||
let scale = match size {
|
||||
SvgSize::Size(size) => size.width.0 as f32 / svg_size.width(),
|
||||
SvgSize::ScaleFactor(scale) => scale,
|
||||
};
|
||||
|
||||
// Render the SVG to a pixmap with the specified width and height.
|
||||
let mut pixmap = resvg::tiny_skia::Pixmap::new(size.width.into(), size.height.into())
|
||||
.ok_or(usvg::Error::InvalidSize)?;
|
||||
let mut pixmap = resvg::tiny_skia::Pixmap::new(
|
||||
(svg_size.width() * scale) as u32,
|
||||
(svg_size.height() * scale) as u32,
|
||||
)
|
||||
.ok_or(usvg::Error::InvalidSize)?;
|
||||
|
||||
let scale = size.width.0 as f32 / tree.size().width();
|
||||
let transform = resvg::tiny_skia::Transform::from_scale(scale, scale);
|
||||
|
||||
resvg::render(&tree, transform, &mut pixmap.as_mut());
|
||||
|
||||
@@ -120,7 +120,9 @@ impl TabStopMap {
|
||||
}
|
||||
};
|
||||
|
||||
let node = self.tab_node_for_focus_id(focused_id)?;
|
||||
let Some(node) = self.tab_node_for_focus_id(focused_id) else {
|
||||
return self.next(None);
|
||||
};
|
||||
let item = self.next_inner(node);
|
||||
|
||||
if let Some(item) = item {
|
||||
@@ -155,7 +157,9 @@ impl TabStopMap {
|
||||
}
|
||||
};
|
||||
|
||||
let node = self.tab_node_for_focus_id(focused_id)?;
|
||||
let Some(node) = self.tab_node_for_focus_id(focused_id) else {
|
||||
return self.prev(None);
|
||||
};
|
||||
let item = self.prev_inner(node);
|
||||
|
||||
if let Some(item) = item {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user